Compare commits

...

21 Commits

Author SHA1 Message Date
Jamison Lahman
b359e13281 fix(citations): enable citation sidebar w/ web_search-only assistants (#7888) 2026-01-27 13:26:29 -08:00
Justin Tahara
717f410a4a fix(llm): Hide private models from Agent Creation (#7873) 2026-01-27 12:21:06 -08:00
SubashMohan
ada0946a62 fix(layout): adjust footer margin and prevent page refresh on chatsession drop (#7759) 2026-01-27 11:57:18 -08:00
Jamison Lahman
eb2ac8f5a3 fix(fe): inline code text wraps (#7574) 2026-01-27 11:33:03 -08:00
Nikolas Garza
fbeb57c592 fix(slack): Extract person names and filter garbage in query expansion (#7632) 2026-01-27 11:26:52 -08:00
Nikolas Garza
d6da9c9b85 fix: scroll to bottom when loading existing conversations (#7614) 2026-01-27 11:26:52 -08:00
Nikolas Garza
5aea2e223e fix(billing): remove grandfathered pricing option when subscription lapses (#7583) 2026-01-27 11:26:52 -08:00
Nikolas Garza
1ff91de07e fix: deflake chat user journey test (#7646) 2026-01-27 11:18:27 -08:00
Nikolas Garza
b3dbc69faf fix(tests): use crawler-friendly search query in Exa integration test (#7746) 2026-01-27 11:13:01 -08:00
Yuhong Sun
431597b0f9 fix: LiteLLM Azure models don't stream (#7761) 2026-01-27 10:49:17 -08:00
Yuhong Sun
51b4e5f2fb fix: Azure OpenAI Tool Calls (#7727) 2026-01-27 10:49:17 -08:00
Justin Tahara
9afa04a26b fix(ui): Coda Logo (#7656) 2026-01-26 17:43:54 -08:00
Justin Tahara
70a3a9c0cd fix(ui): User Groups Connectors Fix (#7658) 2026-01-26 17:43:45 -08:00
Justin Tahara
080165356c fix(ui): First Connector Result (#7657) 2026-01-26 17:43:35 -08:00
Justin Tahara
3ae974bdf6 fix(ui): Fix Token Rate Limits Page (#7659) 2026-01-26 17:42:57 -08:00
Justin Tahara
1471658151 fix(vertex ai): Extra Args for Opus 4.5 (#7586) 2026-01-26 17:42:43 -08:00
Justin Tahara
3e85e9c1a3 feat(desktop): Domain Configuration (#7655) 2026-01-26 17:12:33 -08:00
Justin Tahara
851033be5f feat(desktop): Properly Sign Mac App (#7608) 2026-01-26 17:12:24 -08:00
Jamison Lahman
91e974a6cc chore(desktop): make artifact filename version-agnostic (#7679) 2026-01-26 16:20:39 -08:00
Jamison Lahman
38ba4f8a1c chore(deployments): fix region (#7640) 2026-01-26 16:20:39 -08:00
Jamison Lahman
6f02473064 chore(deployments): fetch secrets from AWS (#7584) 2026-01-26 16:20:39 -08:00
27 changed files with 1206 additions and 479 deletions

View File

@@ -8,7 +8,9 @@ on:
# Set restrictive default permissions for all jobs. Jobs that need more permissions
# should explicitly declare them.
permissions: {}
permissions:
# Required for OIDC authentication with AWS
id-token: write # zizmor: ignore[excessive-permissions]
env:
EDGE_TAG: ${{ startsWith(github.ref_name, 'nightly-latest') }}
@@ -150,16 +152,30 @@ jobs:
if: always() && needs.check-version-tag.result == 'failure' && github.event_name != 'workflow_dispatch'
runs-on: ubuntu-slim
timeout-minutes: 10
environment: release
steps:
- name: Checkout
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802
with:
secret-ids: |
MONITOR_DEPLOYMENTS_WEBHOOK, deploy/monitor-deployments-webhook
parse-json-secrets: true
- name: Send Slack notification
uses: ./.github/actions/slack-notify
with:
webhook-url: ${{ secrets.MONITOR_DEPLOYMENTS_WEBHOOK }}
webhook-url: ${{ env.MONITOR_DEPLOYMENTS_WEBHOOK }}
failed-jobs: "• check-version-tag"
title: "🚨 Version Tag Check Failed"
ref-name: ${{ github.ref_name }}
@@ -168,6 +184,7 @@ jobs:
needs: determine-builds
if: needs.determine-builds.outputs.build-desktop == 'true'
permissions:
id-token: write
contents: write
actions: read
strategy:
@@ -185,12 +202,33 @@ jobs:
runs-on: ${{ matrix.platform }}
timeout-minutes: 90
environment: release
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6.0.1
with:
# NOTE: persist-credentials is needed for tauri-action to create GitHub releases.
persist-credentials: true # zizmor: ignore[artipacked]
- name: Configure AWS credentials
if: startsWith(matrix.platform, 'macos-')
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
if: startsWith(matrix.platform, 'macos-')
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802
with:
secret-ids: |
APPLE_ID, deploy/apple-id
APPLE_PASSWORD, deploy/apple-password
APPLE_CERTIFICATE, deploy/apple-certificate
APPLE_CERTIFICATE_PASSWORD, deploy/apple-certificate-password
KEYCHAIN_PASSWORD, deploy/keychain-password
APPLE_TEAM_ID, deploy/apple-team-id
parse-json-secrets: true
- name: install dependencies (ubuntu only)
if: startsWith(matrix.platform, 'ubuntu-')
run: |
@@ -285,15 +323,40 @@ jobs:
Write-Host "Versions set to: $VERSION"
- name: Import Apple Developer Certificate
if: startsWith(matrix.platform, 'macos-')
run: |
echo $APPLE_CERTIFICATE | base64 --decode > certificate.p12
security create-keychain -p "$KEYCHAIN_PASSWORD" build.keychain
security default-keychain -s build.keychain
security unlock-keychain -p "$KEYCHAIN_PASSWORD" build.keychain
security set-keychain-settings -t 3600 -u build.keychain
security import certificate.p12 -k build.keychain -P "$APPLE_CERTIFICATE_PASSWORD" -T /usr/bin/codesign
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k "$KEYCHAIN_PASSWORD" build.keychain
security find-identity -v -p codesigning build.keychain
- name: Verify Certificate
if: startsWith(matrix.platform, 'macos-')
run: |
CERT_INFO=$(security find-identity -v -p codesigning build.keychain | grep -E "(Developer ID Application|Apple Distribution|Apple Development)" | head -n 1)
CERT_ID=$(echo "$CERT_INFO" | awk -F'"' '{print $2}')
echo "CERT_ID=$CERT_ID" >> $GITHUB_ENV
echo "Certificate imported."
- uses: tauri-apps/tauri-action@73fb865345c54760d875b94642314f8c0c894afa # ratchet:tauri-apps/tauri-action@action-v0.6.1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
APPLE_ID: ${{ env.APPLE_ID }}
APPLE_PASSWORD: ${{ env.APPLE_PASSWORD }}
APPLE_SIGNING_IDENTITY: ${{ env.CERT_ID }}
APPLE_TEAM_ID: ${{ env.APPLE_TEAM_ID }}
with:
tagName: ${{ needs.determine-builds.outputs.is-test-run != 'true' && 'v__VERSION__' || format('v0.0.0-dev+{0}', needs.determine-builds.outputs.short-sha) }}
releaseName: ${{ needs.determine-builds.outputs.is-test-run != 'true' && 'v__VERSION__' || format('v0.0.0-dev+{0}', needs.determine-builds.outputs.short-sha) }}
releaseBody: "See the assets to download this version and install."
releaseDraft: true
prerelease: false
assetNamePattern: "[name]_[arch][ext]"
args: ${{ matrix.args }}
build-web-amd64:
@@ -305,6 +368,7 @@ jobs:
- run-id=${{ github.run_id }}-web-amd64
- extras=ecr-cache
timeout-minutes: 90
environment: release
outputs:
digest: ${{ steps.build.outputs.digest }}
env:
@@ -317,6 +381,20 @@ jobs:
with:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802
with:
secret-ids: |
DOCKER_USERNAME, deploy/docker-username
DOCKER_TOKEN, deploy/docker-token
parse-json-secrets: true
- name: Docker meta
id: meta
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
@@ -331,8 +409,8 @@ jobs:
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
username: ${{ env.DOCKER_USERNAME }}
password: ${{ env.DOCKER_TOKEN }}
- name: Build and push AMD64
id: build
@@ -363,6 +441,7 @@ jobs:
- run-id=${{ github.run_id }}-web-arm64
- extras=ecr-cache
timeout-minutes: 90
environment: release
outputs:
digest: ${{ steps.build.outputs.digest }}
env:
@@ -375,6 +454,20 @@ jobs:
with:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802
with:
secret-ids: |
DOCKER_USERNAME, deploy/docker-username
DOCKER_TOKEN, deploy/docker-token
parse-json-secrets: true
- name: Docker meta
id: meta
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
@@ -389,8 +482,8 @@ jobs:
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
username: ${{ env.DOCKER_USERNAME }}
password: ${{ env.DOCKER_TOKEN }}
- name: Build and push ARM64
id: build
@@ -423,19 +516,34 @@ jobs:
- run-id=${{ github.run_id }}-merge-web
- extras=ecr-cache
timeout-minutes: 90
environment: release
env:
REGISTRY_IMAGE: onyxdotapp/onyx-web-server
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802
with:
secret-ids: |
DOCKER_USERNAME, deploy/docker-username
DOCKER_TOKEN, deploy/docker-token
parse-json-secrets: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
username: ${{ env.DOCKER_USERNAME }}
password: ${{ env.DOCKER_TOKEN }}
- name: Docker meta
id: meta
@@ -471,6 +579,7 @@ jobs:
- run-id=${{ github.run_id }}-web-cloud-amd64
- extras=ecr-cache
timeout-minutes: 90
environment: release
outputs:
digest: ${{ steps.build.outputs.digest }}
env:
@@ -483,6 +592,20 @@ jobs:
with:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802
with:
secret-ids: |
DOCKER_USERNAME, deploy/docker-username
DOCKER_TOKEN, deploy/docker-token
parse-json-secrets: true
- name: Docker meta
id: meta
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
@@ -497,8 +620,8 @@ jobs:
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
username: ${{ env.DOCKER_USERNAME }}
password: ${{ env.DOCKER_TOKEN }}
- name: Build and push AMD64
id: build
@@ -537,6 +660,7 @@ jobs:
- run-id=${{ github.run_id }}-web-cloud-arm64
- extras=ecr-cache
timeout-minutes: 90
environment: release
outputs:
digest: ${{ steps.build.outputs.digest }}
env:
@@ -549,6 +673,20 @@ jobs:
with:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802
with:
secret-ids: |
DOCKER_USERNAME, deploy/docker-username
DOCKER_TOKEN, deploy/docker-token
parse-json-secrets: true
- name: Docker meta
id: meta
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
@@ -563,8 +701,8 @@ jobs:
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
username: ${{ env.DOCKER_USERNAME }}
password: ${{ env.DOCKER_TOKEN }}
- name: Build and push ARM64
id: build
@@ -605,19 +743,34 @@ jobs:
- run-id=${{ github.run_id }}-merge-web-cloud
- extras=ecr-cache
timeout-minutes: 90
environment: release
env:
REGISTRY_IMAGE: onyxdotapp/onyx-web-server-cloud
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802
with:
secret-ids: |
DOCKER_USERNAME, deploy/docker-username
DOCKER_TOKEN, deploy/docker-token
parse-json-secrets: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
username: ${{ env.DOCKER_USERNAME }}
password: ${{ env.DOCKER_TOKEN }}
- name: Docker meta
id: meta
@@ -650,6 +803,7 @@ jobs:
- run-id=${{ github.run_id }}-backend-amd64
- extras=ecr-cache
timeout-minutes: 90
environment: release
outputs:
digest: ${{ steps.build.outputs.digest }}
env:
@@ -662,6 +816,20 @@ jobs:
with:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802
with:
secret-ids: |
DOCKER_USERNAME, deploy/docker-username
DOCKER_TOKEN, deploy/docker-token
parse-json-secrets: true
- name: Docker meta
id: meta
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
@@ -676,8 +844,8 @@ jobs:
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
username: ${{ env.DOCKER_USERNAME }}
password: ${{ env.DOCKER_TOKEN }}
- name: Build and push AMD64
id: build
@@ -707,6 +875,7 @@ jobs:
- run-id=${{ github.run_id }}-backend-arm64
- extras=ecr-cache
timeout-minutes: 90
environment: release
outputs:
digest: ${{ steps.build.outputs.digest }}
env:
@@ -719,6 +888,20 @@ jobs:
with:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802
with:
secret-ids: |
DOCKER_USERNAME, deploy/docker-username
DOCKER_TOKEN, deploy/docker-token
parse-json-secrets: true
- name: Docker meta
id: meta
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
@@ -733,8 +916,8 @@ jobs:
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
username: ${{ env.DOCKER_USERNAME }}
password: ${{ env.DOCKER_TOKEN }}
- name: Build and push ARM64
id: build
@@ -766,19 +949,34 @@ jobs:
- run-id=${{ github.run_id }}-merge-backend
- extras=ecr-cache
timeout-minutes: 90
environment: release
env:
REGISTRY_IMAGE: ${{ contains(github.ref_name, 'cloud') && 'onyxdotapp/onyx-backend-cloud' || 'onyxdotapp/onyx-backend' }}
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802
with:
secret-ids: |
DOCKER_USERNAME, deploy/docker-username
DOCKER_TOKEN, deploy/docker-token
parse-json-secrets: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
username: ${{ env.DOCKER_USERNAME }}
password: ${{ env.DOCKER_TOKEN }}
- name: Docker meta
id: meta
@@ -815,6 +1013,7 @@ jobs:
- volume=40gb
- extras=ecr-cache
timeout-minutes: 90
environment: release
outputs:
digest: ${{ steps.build.outputs.digest }}
env:
@@ -827,6 +1026,20 @@ jobs:
with:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802
with:
secret-ids: |
DOCKER_USERNAME, deploy/docker-username
DOCKER_TOKEN, deploy/docker-token
parse-json-secrets: true
- name: Docker meta
id: meta
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
@@ -843,8 +1056,8 @@ jobs:
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
username: ${{ env.DOCKER_USERNAME }}
password: ${{ env.DOCKER_TOKEN }}
- name: Build and push AMD64
id: build
@@ -879,6 +1092,7 @@ jobs:
- volume=40gb
- extras=ecr-cache
timeout-minutes: 90
environment: release
outputs:
digest: ${{ steps.build.outputs.digest }}
env:
@@ -891,6 +1105,20 @@ jobs:
with:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802
with:
secret-ids: |
DOCKER_USERNAME, deploy/docker-username
DOCKER_TOKEN, deploy/docker-token
parse-json-secrets: true
- name: Docker meta
id: meta
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
@@ -907,8 +1135,8 @@ jobs:
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
username: ${{ env.DOCKER_USERNAME }}
password: ${{ env.DOCKER_TOKEN }}
- name: Build and push ARM64
id: build
@@ -944,19 +1172,34 @@ jobs:
- run-id=${{ github.run_id }}-merge-model-server
- extras=ecr-cache
timeout-minutes: 90
environment: release
env:
REGISTRY_IMAGE: ${{ contains(github.ref_name, 'cloud') && 'onyxdotapp/onyx-model-server-cloud' || 'onyxdotapp/onyx-model-server' }}
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802
with:
secret-ids: |
DOCKER_USERNAME, deploy/docker-username
DOCKER_TOKEN, deploy/docker-token
parse-json-secrets: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
username: ${{ env.DOCKER_USERNAME }}
password: ${{ env.DOCKER_TOKEN }}
- name: Docker meta
id: meta
@@ -994,11 +1237,26 @@ jobs:
- run-id=${{ github.run_id }}-trivy-scan-web
- extras=ecr-cache
timeout-minutes: 90
environment: release
env:
REGISTRY_IMAGE: onyxdotapp/onyx-web-server
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802
with:
secret-ids: |
DOCKER_USERNAME, deploy/docker-username
DOCKER_TOKEN, deploy/docker-token
parse-json-secrets: true
- name: Run Trivy vulnerability scanner
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # ratchet:nick-fields/retry@v3
with:
@@ -1014,8 +1272,8 @@ jobs:
docker run --rm -v $HOME/.cache/trivy:/root/.cache/trivy \
-e TRIVY_DB_REPOSITORY="public.ecr.aws/aquasecurity/trivy-db:2" \
-e TRIVY_JAVA_DB_REPOSITORY="public.ecr.aws/aquasecurity/trivy-java-db:1" \
-e TRIVY_USERNAME="${{ secrets.DOCKER_USERNAME }}" \
-e TRIVY_PASSWORD="${{ secrets.DOCKER_TOKEN }}" \
-e TRIVY_USERNAME="${{ env.DOCKER_USERNAME }}" \
-e TRIVY_PASSWORD="${{ env.DOCKER_TOKEN }}" \
aquasec/trivy@sha256:a22415a38938a56c379387a8163fcb0ce38b10ace73e593475d3658d578b2436 \
image \
--skip-version-check \
@@ -1034,11 +1292,26 @@ jobs:
- run-id=${{ github.run_id }}-trivy-scan-web-cloud
- extras=ecr-cache
timeout-minutes: 90
environment: release
env:
REGISTRY_IMAGE: onyxdotapp/onyx-web-server-cloud
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802
with:
secret-ids: |
DOCKER_USERNAME, deploy/docker-username
DOCKER_TOKEN, deploy/docker-token
parse-json-secrets: true
- name: Run Trivy vulnerability scanner
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # ratchet:nick-fields/retry@v3
with:
@@ -1054,8 +1327,8 @@ jobs:
docker run --rm -v $HOME/.cache/trivy:/root/.cache/trivy \
-e TRIVY_DB_REPOSITORY="public.ecr.aws/aquasecurity/trivy-db:2" \
-e TRIVY_JAVA_DB_REPOSITORY="public.ecr.aws/aquasecurity/trivy-java-db:1" \
-e TRIVY_USERNAME="${{ secrets.DOCKER_USERNAME }}" \
-e TRIVY_PASSWORD="${{ secrets.DOCKER_TOKEN }}" \
-e TRIVY_USERNAME="${{ env.DOCKER_USERNAME }}" \
-e TRIVY_PASSWORD="${{ env.DOCKER_TOKEN }}" \
aquasec/trivy@sha256:a22415a38938a56c379387a8163fcb0ce38b10ace73e593475d3658d578b2436 \
image \
--skip-version-check \
@@ -1074,6 +1347,7 @@ jobs:
- run-id=${{ github.run_id }}-trivy-scan-backend
- extras=ecr-cache
timeout-minutes: 90
environment: release
env:
REGISTRY_IMAGE: ${{ contains(github.ref_name, 'cloud') && 'onyxdotapp/onyx-backend-cloud' || 'onyxdotapp/onyx-backend' }}
steps:
@@ -1084,6 +1358,20 @@ jobs:
with:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802
with:
secret-ids: |
DOCKER_USERNAME, deploy/docker-username
DOCKER_TOKEN, deploy/docker-token
parse-json-secrets: true
- name: Run Trivy vulnerability scanner
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # ratchet:nick-fields/retry@v3
with:
@@ -1100,8 +1388,8 @@ jobs:
-v ${{ github.workspace }}/backend/.trivyignore:/tmp/.trivyignore:ro \
-e TRIVY_DB_REPOSITORY="public.ecr.aws/aquasecurity/trivy-db:2" \
-e TRIVY_JAVA_DB_REPOSITORY="public.ecr.aws/aquasecurity/trivy-java-db:1" \
-e TRIVY_USERNAME="${{ secrets.DOCKER_USERNAME }}" \
-e TRIVY_PASSWORD="${{ secrets.DOCKER_TOKEN }}" \
-e TRIVY_USERNAME="${{ env.DOCKER_USERNAME }}" \
-e TRIVY_PASSWORD="${{ env.DOCKER_TOKEN }}" \
aquasec/trivy@sha256:a22415a38938a56c379387a8163fcb0ce38b10ace73e593475d3658d578b2436 \
image \
--skip-version-check \
@@ -1121,11 +1409,26 @@ jobs:
- run-id=${{ github.run_id }}-trivy-scan-model-server
- extras=ecr-cache
timeout-minutes: 90
environment: release
env:
REGISTRY_IMAGE: ${{ contains(github.ref_name, 'cloud') && 'onyxdotapp/onyx-model-server-cloud' || 'onyxdotapp/onyx-model-server' }}
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802
with:
secret-ids: |
DOCKER_USERNAME, deploy/docker-username
DOCKER_TOKEN, deploy/docker-token
parse-json-secrets: true
- name: Run Trivy vulnerability scanner
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # ratchet:nick-fields/retry@v3
with:
@@ -1141,8 +1444,8 @@ jobs:
docker run --rm -v $HOME/.cache/trivy:/root/.cache/trivy \
-e TRIVY_DB_REPOSITORY="public.ecr.aws/aquasecurity/trivy-db:2" \
-e TRIVY_JAVA_DB_REPOSITORY="public.ecr.aws/aquasecurity/trivy-java-db:1" \
-e TRIVY_USERNAME="${{ secrets.DOCKER_USERNAME }}" \
-e TRIVY_PASSWORD="${{ secrets.DOCKER_TOKEN }}" \
-e TRIVY_USERNAME="${{ env.DOCKER_USERNAME }}" \
-e TRIVY_PASSWORD="${{ env.DOCKER_TOKEN }}" \
aquasec/trivy@sha256:a22415a38938a56c379387a8163fcb0ce38b10ace73e593475d3658d578b2436 \
image \
--skip-version-check \
@@ -1170,12 +1473,26 @@ jobs:
# NOTE: Github-hosted runners have about 20s faster queue times and are preferred here.
runs-on: ubuntu-slim
timeout-minutes: 90
environment: release
steps:
- name: Checkout
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
- name: Get AWS Secrets
uses: aws-actions/aws-secretsmanager-get-secrets@a9a7eb4e2f2871d30dc5b892576fde60a2ecc802
with:
secret-ids: |
MONITOR_DEPLOYMENTS_WEBHOOK, deploy/monitor-deployments-webhook
parse-json-secrets: true
- name: Determine failed jobs
id: failed-jobs
shell: bash
@@ -1241,7 +1558,7 @@ jobs:
- name: Send Slack notification
uses: ./.github/actions/slack-notify
with:
webhook-url: ${{ secrets.MONITOR_DEPLOYMENTS_WEBHOOK }}
webhook-url: ${{ env.MONITOR_DEPLOYMENTS_WEBHOOK }}
failed-jobs: ${{ steps.failed-jobs.outputs.jobs }}
title: "🚨 Deployment Workflow Failed"
ref-name: ${{ github.ref_name }}

View File

@@ -567,6 +567,23 @@ def extract_content_words_from_recency_query(
return content_words_filtered[:MAX_CONTENT_WORDS]
def _is_valid_keyword_query(line: str) -> bool:
"""Check if a line looks like a valid keyword query vs explanatory text.
Returns False for lines that appear to be LLM explanations rather than keywords.
"""
# Reject lines that start with parentheses (explanatory notes)
if line.startswith("("):
return False
# Reject lines that are too long (likely sentences, not keywords)
# Keywords should be short - reject if > 50 chars or > 6 words
if len(line) > 50 or len(line.split()) > 6:
return False
return True
def expand_query_with_llm(query_text: str, llm: LLM) -> list[str]:
"""Use LLM to expand query into multiple search variations.
@@ -589,10 +606,18 @@ def expand_query_with_llm(query_text: str, llm: LLM) -> list[str]:
response_clean = _parse_llm_code_block_response(response)
# Split into lines and filter out empty lines
rephrased_queries = [
raw_queries = [
line.strip() for line in response_clean.split("\n") if line.strip()
]
# Filter out lines that look like explanatory text rather than keywords
rephrased_queries = [q for q in raw_queries if _is_valid_keyword_query(q)]
# Log if we filtered out garbage
if len(raw_queries) != len(rephrased_queries):
filtered_out = set(raw_queries) - set(rephrased_queries)
logger.warning(f"Filtered out non-keyword LLM responses: {filtered_out}")
# If no queries generated, use empty query
if not rephrased_queries:
logger.debug("No content keywords extracted from query expansion")

View File

@@ -369,6 +369,8 @@ def _patch_openai_responses_chunk_parser() -> None:
# New output item added
output_item = parsed_chunk.get("item", {})
if output_item.get("type") == "function_call":
# Track that we've received tool calls via streaming
self._has_streamed_tool_calls = True
return GenericStreamingChunk(
text="",
tool_use=ChatCompletionToolCallChunk(
@@ -394,6 +396,8 @@ def _patch_openai_responses_chunk_parser() -> None:
elif event_type == "response.function_call_arguments.delta":
content_part: Optional[str] = parsed_chunk.get("delta", None)
if content_part:
# Track that we've received tool calls via streaming
self._has_streamed_tool_calls = True
return GenericStreamingChunk(
text="",
tool_use=ChatCompletionToolCallChunk(
@@ -491,22 +495,72 @@ def _patch_openai_responses_chunk_parser() -> None:
elif event_type == "response.completed":
# Final event signaling all output items (including parallel tool calls) are done
# Check if we already received tool calls via streaming events
# There is an issue where OpenAI (not via Azure) will give back the tool calls streamed out as tokens
# But on Azure, it's only given out all at once. OpenAI also happens to give back the tool calls in the
# response.completed event so we need to throw it out here or there are duplicate tool calls.
has_streamed_tool_calls = getattr(self, "_has_streamed_tool_calls", False)
response_data = parsed_chunk.get("response", {})
# Determine finish reason based on response content
finish_reason = "stop"
if response_data.get("output"):
for item in response_data["output"]:
if isinstance(item, dict) and item.get("type") == "function_call":
finish_reason = "tool_calls"
break
return GenericStreamingChunk(
text="",
tool_use=None,
is_finished=True,
finish_reason=finish_reason,
usage=None,
output_items = response_data.get("output", [])
# Check if there are function_call items in the output
has_function_calls = any(
isinstance(item, dict) and item.get("type") == "function_call"
for item in output_items
)
if has_function_calls and not has_streamed_tool_calls:
# Azure's Responses API returns all tool calls in response.completed
# without streaming them incrementally. Extract them here.
from litellm.types.utils import (
Delta,
ModelResponseStream,
StreamingChoices,
)
tool_calls = []
for idx, item in enumerate(output_items):
if isinstance(item, dict) and item.get("type") == "function_call":
tool_calls.append(
ChatCompletionToolCallChunk(
id=item.get("call_id"),
index=idx,
type="function",
function=ChatCompletionToolCallFunctionChunk(
name=item.get("name"),
arguments=item.get("arguments", ""),
),
)
)
return ModelResponseStream(
choices=[
StreamingChoices(
index=0,
delta=Delta(tool_calls=tool_calls),
finish_reason="tool_calls",
)
]
)
elif has_function_calls:
# Tool calls were already streamed, just signal completion
return GenericStreamingChunk(
text="",
tool_use=None,
is_finished=True,
finish_reason="tool_calls",
usage=None,
)
else:
return GenericStreamingChunk(
text="",
tool_use=None,
is_finished=True,
finish_reason="stop",
usage=None,
)
else:
pass
@@ -631,6 +685,40 @@ def _patch_openai_responses_transform_response() -> None:
LiteLLMResponsesTransformationHandler.transform_response = _patched_transform_response # type: ignore[method-assign]
def _patch_azure_responses_should_fake_stream() -> None:
"""
Patches AzureOpenAIResponsesAPIConfig.should_fake_stream to always return False.
By default, LiteLLM uses "fake streaming" (MockResponsesAPIStreamingIterator) for models
not in its database. This causes Azure custom model deployments to buffer the entire
response before yielding, resulting in poor time-to-first-token.
Azure's Responses API supports native streaming, so we override this to always use
real streaming (SyncResponsesAPIStreamingIterator).
"""
from litellm.llms.azure.responses.transformation import (
AzureOpenAIResponsesAPIConfig,
)
if (
getattr(AzureOpenAIResponsesAPIConfig.should_fake_stream, "__name__", "")
== "_patched_should_fake_stream"
):
return
def _patched_should_fake_stream(
self: Any,
model: Optional[str],
stream: Optional[bool],
custom_llm_provider: Optional[str] = None,
) -> bool:
# Azure Responses API supports native streaming - never fake it
return False
_patched_should_fake_stream.__name__ = "_patched_should_fake_stream"
AzureOpenAIResponsesAPIConfig.should_fake_stream = _patched_should_fake_stream # type: ignore[method-assign]
def apply_monkey_patches() -> None:
"""
Apply all necessary monkey patches to LiteLLM for compatibility.
@@ -640,12 +728,13 @@ def apply_monkey_patches() -> None:
- Patching OllamaChatCompletionResponseIterator.chunk_parser for streaming content
- Patching OpenAiResponsesToChatCompletionStreamIterator.chunk_parser for OpenAI Responses API
- Patching LiteLLMResponsesTransformationHandler.transform_response for non-streaming responses
- Patching LiteLLMResponsesTransformationHandler._convert_content_str_to_input_text for tool content types
- Patching AzureOpenAIResponsesAPIConfig.should_fake_stream to enable native streaming
"""
_patch_ollama_transform_request()
_patch_ollama_chunk_parser()
_patch_openai_responses_chunk_parser()
_patch_openai_responses_transform_response()
_patch_azure_responses_should_fake_stream()
def _extract_reasoning_content(message: dict) -> Tuple[Optional[str], Optional[str]]:

View File

@@ -301,6 +301,12 @@ class LitellmLLM(LLM):
)
is_ollama = self._model_provider == LlmProviderNames.OLLAMA_CHAT
is_mistral = self._model_provider == LlmProviderNames.MISTRAL
is_vertex_ai = self._model_provider == LlmProviderNames.VERTEX_AI
# Vertex Anthropic Opus 4.5 rejects output_config (LiteLLM maps reasoning_effort).
# Keep this guard until LiteLLM/Vertex accept the field for this model.
is_vertex_opus_4_5 = (
is_vertex_ai and "claude-opus-4-5" in self.config.model_name.lower()
)
#########################
# Build arguments
@@ -331,12 +337,16 @@ class LitellmLLM(LLM):
# Temperature
temperature = 1 if is_reasoning else self._temperature
if stream:
if stream and not is_vertex_opus_4_5:
optional_kwargs["stream_options"] = {"include_usage": True}
# Use configured default if not provided (if not set in env, low)
reasoning_effort = reasoning_effort or ReasoningEffort(DEFAULT_REASONING_EFFORT)
if is_reasoning and reasoning_effort != ReasoningEffort.OFF:
if (
is_reasoning
and reasoning_effort != ReasoningEffort.OFF
and not is_vertex_opus_4_5
):
if is_openai_model:
# OpenAI API does not accept reasoning params for GPT 5 chat models
# (neither reasoning nor reasoning_effort are accepted)

View File

@@ -1,30 +1,39 @@
from onyx.configs.app_configs import MAX_SLACK_QUERY_EXPANSIONS
SLACK_QUERY_EXPANSION_PROMPT = f"""
Rewrite the user's query and, if helpful, split it into at most {MAX_SLACK_QUERY_EXPANSIONS} \
keyword-only queries, so that Slack's keyword search yields the best matches.
Rewrite the user's query into at most {MAX_SLACK_QUERY_EXPANSIONS} keyword-only queries for Slack's keyword search.
Keep in mind the Slack's search behavior:
- Pure keyword AND search (no semantics).
- Word order matters.
- More words = fewer matches, so keep each query concise.
- IMPORTANT: Prefer simple 1-2 word queries over longer multi-word queries.
Slack search behavior:
- Pure keyword AND search (no semantics)
- More words = fewer matches, so keep queries concise (1-3 words)
Critical: Extract ONLY keywords that would actually appear in Slack message content.
ALWAYS include:
- Person names (e.g., "Sarah Chen", "Mike Johnson") - people search for messages from/about specific people
- Project/product names, technical terms, proper nouns
- Actual content words: "performance", "bug", "deployment", "API", "error"
DO NOT include:
- Meta-words: "topics", "conversations", "discussed", "summary", "messages", "big", "main", "talking"
- Temporal: "today", "yesterday", "week", "month", "recent", "past", "last"
- Channels/Users: "general", "eng-general", "engineering", "@username"
DO include:
- Actual content: "performance", "bug", "deployment", "API", "database", "error", "feature"
- Meta-words: "topics", "conversations", "discussed", "summary", "messages"
- Temporal: "today", "yesterday", "week", "month", "recent", "last"
- Channel names: "general", "eng-general", "random"
Examples:
Query: "what are the big topics in eng-general this week?"
Output:
Query: "messages with Sarah about the deployment"
Output:
Sarah deployment
Sarah
deployment
Query: "what did Mike say about the budget?"
Output:
Mike budget
Mike
budget
Query: "performance issues in eng-general"
Output:
performance issues
@@ -41,7 +50,7 @@ Now process this query:
{{query}}
Output:
Output (keywords only, one per line, NO explanations or commentary):
"""
SLACK_DATE_EXTRACTION_PROMPT = """

View File

@@ -410,26 +410,20 @@ def list_llm_provider_basics(
all_providers = fetch_existing_llm_providers(db_session)
user_group_ids = fetch_user_group_ids(db_session, user) if user else set()
is_admin = user and user.role == UserRole.ADMIN
is_admin = user is not None and user.role == UserRole.ADMIN
accessible_providers = []
for provider in all_providers:
# Include all public providers
if provider.is_public:
accessible_providers.append(LLMProviderDescriptor.from_model(provider))
continue
# Include restricted providers user has access to via groups
if is_admin:
# Admins see all providers
accessible_providers.append(LLMProviderDescriptor.from_model(provider))
elif provider.groups:
# User must be in at least one of the provider's groups
if user_group_ids.intersection({g.id for g in provider.groups}):
accessible_providers.append(LLMProviderDescriptor.from_model(provider))
elif not provider.personas:
# No restrictions = accessible
# Use centralized access control logic with persona=None since we're
# listing providers without a specific persona context. This correctly:
# - Includes all public providers
# - Includes providers user can access via group membership
# - Excludes persona-only restricted providers (requires specific persona)
# - Excludes non-public providers with no restrictions (admin-only)
if can_user_access_llm_provider(
provider, user_group_ids, persona=None, is_admin=is_admin
):
accessible_providers.append(LLMProviderDescriptor.from_model(provider))
end_time = datetime.now(timezone.utc)

View File

@@ -309,6 +309,63 @@ def test_get_llm_for_persona_falls_back_when_access_denied(
assert fallback_llm.config.model_name == default_provider.default_model_name
def test_list_llm_provider_basics_excludes_non_public_unrestricted(
users: tuple[DATestUser, DATestUser],
) -> None:
"""Test that the /llm/provider endpoint correctly excludes non-public providers
with no group/persona restrictions.
This tests the fix for the bug where non-public providers with no restrictions
were incorrectly shown to all users instead of being admin-only.
"""
admin_user, basic_user = users
# Create a public provider (should be visible to all)
public_provider = LLMProviderManager.create(
name="public-provider",
is_public=True,
set_as_default=True,
user_performing_action=admin_user,
)
# Create a non-public provider with no restrictions (should be admin-only)
non_public_provider = LLMProviderManager.create(
name="non-public-unrestricted",
is_public=False,
groups=[],
personas=[],
set_as_default=False,
user_performing_action=admin_user,
)
# Non-admin user calls the /llm/provider endpoint
response = requests.get(
f"{API_SERVER_URL}/llm/provider",
headers=basic_user.headers,
)
assert response.status_code == 200
providers = response.json()
provider_names = [p["name"] for p in providers]
# Public provider should be visible
assert public_provider.name in provider_names
# Non-public provider with no restrictions should NOT be visible to non-admin
assert non_public_provider.name not in provider_names
# Admin user should see both providers
admin_response = requests.get(
f"{API_SERVER_URL}/llm/provider",
headers=admin_user.headers,
)
assert admin_response.status_code == 200
admin_providers = admin_response.json()
admin_provider_names = [p["name"] for p in admin_providers]
assert public_provider.name in admin_provider_names
assert non_public_provider.name in admin_provider_names
def test_provider_delete_clears_persona_references(reset: None) -> None:
"""Test that deleting a provider automatically clears persona references."""
admin_user = UserManager.create(name="admin_user")

View File

@@ -270,7 +270,7 @@ def test_web_search_endpoints_with_exa(
provider_id = _activate_exa_provider(admin_user)
assert isinstance(provider_id, int)
search_request = {"queries": ["latest ai research news"], "max_results": 3}
search_request = {"queries": ["wikipedia python programming"], "max_results": 3}
lite_response = requests.post(
f"{API_SERVER_URL}/web-search/search-lite",

View File

@@ -409,6 +409,53 @@ def test_multiple_tool_calls_streaming(default_multi_llm: LitellmLLM) -> None:
)
def test_vertex_stream_omits_stream_options() -> None:
llm = LitellmLLM(
api_key="test_key",
timeout=30,
model_provider=LlmProviderNames.VERTEX_AI,
model_name="claude-opus-4-5@20251101",
max_input_tokens=get_max_input_tokens(
model_provider=LlmProviderNames.VERTEX_AI,
model_name="claude-opus-4-5@20251101",
),
)
with patch("litellm.completion") as mock_completion:
mock_completion.return_value = []
messages: LanguageModelInput = [UserMessage(content="Hi")]
list(llm.stream(messages))
kwargs = mock_completion.call_args.kwargs
assert "stream_options" not in kwargs
def test_vertex_opus_4_5_omits_reasoning_effort() -> None:
llm = LitellmLLM(
api_key="test_key",
timeout=30,
model_provider=LlmProviderNames.VERTEX_AI,
model_name="claude-opus-4-5@20251101",
max_input_tokens=get_max_input_tokens(
model_provider=LlmProviderNames.VERTEX_AI,
model_name="claude-opus-4-5@20251101",
),
)
with (
patch("litellm.completion") as mock_completion,
patch("onyx.llm.multi_llm.model_is_reasoning_model", return_value=True),
):
mock_completion.return_value = []
messages: LanguageModelInput = [UserMessage(content="Hi")]
list(llm.stream(messages))
kwargs = mock_completion.call_args.kwargs
assert "reasoning_effort" not in kwargs
def test_user_identity_metadata_enabled(default_multi_llm: LitellmLLM) -> None:
with (
patch("litellm.completion") as mock_completion,

View File

@@ -21,9 +21,9 @@ use tauri::{
webview::PageLoadPayload, AppHandle, Manager, Webview, WebviewUrl, WebviewWindowBuilder,
};
use tauri_plugin_global_shortcut::{Code, GlobalShortcutExt, Modifiers, Shortcut};
use url::Url;
#[cfg(target_os = "macos")]
use tokio::time::sleep;
use url::Url;
#[cfg(target_os = "macos")]
use window_vibrancy::{apply_vibrancy, NSVisualEffectMaterial};
@@ -76,39 +76,25 @@ fn get_config_path() -> Option<PathBuf> {
}
/// Load config from file, or create default if it doesn't exist
fn load_config() -> AppConfig {
fn load_config() -> (AppConfig, bool) {
let config_path = match get_config_path() {
Some(path) => path,
None => {
eprintln!("Could not determine config directory, using defaults");
return AppConfig::default();
return (AppConfig::default(), false);
}
};
if config_path.exists() {
match fs::read_to_string(&config_path) {
Ok(contents) => match serde_json::from_str(&contents) {
Ok(config) => {
return config;
}
Err(e) => {
eprintln!("Failed to parse config: {}, using defaults", e);
}
},
Err(e) => {
eprintln!("Failed to read config: {}, using defaults", e);
}
}
} else {
// Create default config file
if let Err(e) = save_config(&AppConfig::default()) {
eprintln!("Failed to create default config: {}", e);
} else {
println!("Created default config at {:?}", config_path);
}
if !config_path.exists() {
return (AppConfig::default(), false);
}
AppConfig::default()
match fs::read_to_string(&config_path) {
Ok(contents) => match serde_json::from_str(&contents) {
Ok(config) => (config, true),
Err(_) => (AppConfig::default(), false),
},
Err(_) => (AppConfig::default(), false),
}
}
/// Save config to file
@@ -128,7 +114,11 @@ fn save_config(config: &AppConfig) -> Result<(), String> {
}
// Global config state
struct ConfigState(RwLock<AppConfig>);
struct ConfigState {
config: RwLock<AppConfig>,
config_initialized: RwLock<bool>,
app_base_url: RwLock<Option<Url>>,
}
fn focus_main_window(app: &AppHandle) {
if let Some(window) = app.get_webview_window("main") {
@@ -142,7 +132,7 @@ fn focus_main_window(app: &AppHandle) {
fn trigger_new_chat(app: &AppHandle) {
let state = app.state::<ConfigState>();
let server_url = state.0.read().unwrap().server_url.clone();
let server_url = state.config.read().unwrap().server_url.clone();
if let Some(window) = app.get_webview_window("main") {
let url = format!("{}/chat", server_url);
@@ -152,7 +142,7 @@ fn trigger_new_chat(app: &AppHandle) {
fn trigger_new_window(app: &AppHandle) {
let state = app.state::<ConfigState>();
let server_url = state.0.read().unwrap().server_url.clone();
let server_url = state.config.read().unwrap().server_url.clone();
let handle = app.clone();
tauri::async_runtime::spawn(async move {
@@ -206,6 +196,30 @@ fn open_docs() {
}
}
fn open_settings(app: &AppHandle) {
// Navigate main window to the settings page (index.html) with settings flag
let state = app.state::<ConfigState>();
let settings_url = state
.app_base_url
.read()
.unwrap()
.as_ref()
.cloned()
.and_then(|mut url| {
url.set_query(None);
url.set_fragment(Some("settings"));
url.set_path("/");
Some(url)
})
.or_else(|| Url::parse("tauri://localhost/#settings").ok());
if let Some(window) = app.get_webview_window("main") {
if let Some(url) = settings_url {
let _ = window.navigate(url);
}
}
}
// ============================================================================
// Tauri Commands
// ============================================================================
@@ -213,7 +227,27 @@ fn open_docs() {
/// Get the current server URL
#[tauri::command]
fn get_server_url(state: tauri::State<ConfigState>) -> String {
state.0.read().unwrap().server_url.clone()
state.config.read().unwrap().server_url.clone()
}
#[derive(Serialize)]
struct BootstrapState {
server_url: String,
config_exists: bool,
}
/// Get the server URL plus whether a config file exists
#[tauri::command]
fn get_bootstrap_state(state: tauri::State<ConfigState>) -> BootstrapState {
let server_url = state.config.read().unwrap().server_url.clone();
let config_initialized = *state.config_initialized.read().unwrap();
let config_exists = config_initialized
&& get_config_path().map(|path| path.exists()).unwrap_or(false);
BootstrapState {
server_url,
config_exists,
}
}
/// Set a new server URL and save to config
@@ -224,9 +258,10 @@ fn set_server_url(state: tauri::State<ConfigState>, url: String) -> Result<Strin
return Err("URL must start with http:// or https://".to_string());
}
let mut config = state.0.write().unwrap();
let mut config = state.config.write().unwrap();
config.server_url = url.trim_end_matches('/').to_string();
save_config(&config)?;
*state.config_initialized.write().unwrap() = true;
Ok(config.server_url.clone())
}
@@ -315,7 +350,7 @@ fn open_config_directory() -> Result<(), String> {
/// Navigate to a specific path on the configured server
#[tauri::command]
fn navigate_to(window: tauri::WebviewWindow, state: tauri::State<ConfigState>, path: &str) {
let base_url = state.0.read().unwrap().server_url.clone();
let base_url = state.config.read().unwrap().server_url.clone();
let url = format!("{}{}", base_url, path);
let _ = window.eval(&format!("window.location.href = '{}'", url));
}
@@ -341,7 +376,7 @@ fn go_forward(window: tauri::WebviewWindow) {
/// Open a new window
#[tauri::command]
async fn new_window(app: AppHandle, state: tauri::State<'_, ConfigState>) -> Result<(), String> {
let server_url = state.0.read().unwrap().server_url.clone();
let server_url = state.config.read().unwrap().server_url.clone();
let window_label = format!("onyx-{}", uuid::Uuid::new_v4());
let builder = WebviewWindowBuilder::new(
@@ -385,9 +420,10 @@ async fn new_window(app: AppHandle, state: tauri::State<'_, ConfigState>) -> Res
/// Reset config to defaults
#[tauri::command]
fn reset_config(state: tauri::State<ConfigState>) -> Result<(), String> {
let mut config = state.0.write().unwrap();
let mut config = state.config.write().unwrap();
*config = AppConfig::default();
save_config(&config)?;
*state.config_initialized.write().unwrap() = true;
Ok(())
}
@@ -423,7 +459,7 @@ fn setup_shortcuts(app: &AppHandle) -> Result<(), Box<dyn std::error::Error>> {
let forward = Shortcut::new(Some(Modifiers::SUPER), Code::BracketRight);
let new_window_shortcut = Shortcut::new(Some(Modifiers::SUPER | Modifiers::SHIFT), Code::KeyN);
let show_app = Shortcut::new(Some(Modifiers::SUPER | Modifiers::SHIFT), Code::Space);
let open_settings = Shortcut::new(Some(Modifiers::SUPER), Code::Comma);
let open_settings_shortcut = Shortcut::new(Some(Modifiers::SUPER), Code::Comma);
let app_handle = app.clone();
@@ -435,7 +471,7 @@ fn setup_shortcuts(app: &AppHandle) -> Result<(), Box<dyn std::error::Error>> {
forward,
new_window_shortcut,
show_app,
open_settings,
open_settings_shortcut,
];
#[cfg(not(target_os = "macos"))]
@@ -446,7 +482,7 @@ fn setup_shortcuts(app: &AppHandle) -> Result<(), Box<dyn std::error::Error>> {
forward,
new_window_shortcut,
show_app,
open_settings,
open_settings_shortcut,
];
app.global_shortcut().on_shortcuts(
@@ -463,9 +499,8 @@ fn setup_shortcuts(app: &AppHandle) -> Result<(), Box<dyn std::error::Error>> {
let _ = window.eval("window.history.back()");
} else if shortcut == &forward {
let _ = window.eval("window.history.forward()");
} else if shortcut == &open_settings {
// Open config file for editing
let _ = open_config_file();
} else if shortcut == &open_settings_shortcut {
open_settings(&app_handle);
}
}
@@ -495,6 +530,7 @@ fn setup_app_menu(app: &AppHandle) -> tauri::Result<()> {
true,
Some("CmdOrCtrl+Shift+N"),
)?;
let settings_item = MenuItem::with_id(app, "open_settings", "Settings...", true, Some("CmdOrCtrl+Comma"))?;
let docs_item = MenuItem::with_id(app, "open_docs", "Onyx Documentation", true, None::<&str>)?;
if let Some(file_menu) = menu
@@ -503,12 +539,13 @@ fn setup_app_menu(app: &AppHandle) -> tauri::Result<()> {
.filter_map(|item| item.as_submenu().cloned())
.find(|submenu| submenu.text().ok().as_deref() == Some("File"))
{
file_menu.insert_items(&[&new_chat_item, &new_window_item], 0)?;
file_menu.insert_items(&[&new_chat_item, &new_window_item, &settings_item], 0)?;
} else {
let file_menu = SubmenuBuilder::new(app, "File")
.items(&[
&new_chat_item,
&new_window_item,
&settings_item,
&PredefinedMenuItem::close_window(app, None)?,
])
.build()?;
@@ -625,22 +662,20 @@ fn setup_tray_icon(app: &AppHandle) -> tauri::Result<()> {
fn main() {
// Load config at startup
let config = load_config();
let server_url = config.server_url.clone();
println!("Starting Onyx Desktop");
println!("Server URL: {}", server_url);
if let Some(path) = get_config_path() {
println!("Config file: {:?}", path);
}
let (config, config_initialized) = load_config();
tauri::Builder::default()
.plugin(tauri_plugin_shell::init())
.plugin(tauri_plugin_global_shortcut::Builder::new().build())
.plugin(tauri_plugin_window_state::Builder::default().build())
.manage(ConfigState(RwLock::new(config)))
.manage(ConfigState {
config: RwLock::new(config),
config_initialized: RwLock::new(config_initialized),
app_base_url: RwLock::new(None),
})
.invoke_handler(tauri::generate_handler![
get_server_url,
get_bootstrap_state,
set_server_url,
get_config_path_cmd,
open_config_file,
@@ -657,6 +692,7 @@ fn main() {
"open_docs" => open_docs(),
"new_chat" => trigger_new_chat(app),
"new_window" => trigger_new_window(app),
"open_settings" => open_settings(app),
_ => {}
})
.setup(move |app| {
@@ -675,7 +711,7 @@ fn main() {
eprintln!("Failed to setup tray icon: {}", e);
}
// Update main window URL to configured server and inject title bar
// Setup main window with vibrancy effect
if let Some(window) = app.get_webview_window("main") {
// Apply vibrancy effect for translucent glass look
#[cfg(target_os = "macos")]
@@ -683,14 +719,12 @@ fn main() {
let _ = apply_vibrancy(&window, NSVisualEffectMaterial::Sidebar, None, None);
}
if let Ok(target) = Url::parse(&server_url) {
if let Ok(current) = window.url() {
if current != target {
let _ = window.navigate(target);
}
} else {
let _ = window.navigate(target);
}
if let Ok(url) = window.url() {
let mut base_url = url;
base_url.set_query(None);
base_url.set_fragment(None);
base_url.set_path("/");
*app.state::<ConfigState>().app_base_url.write().unwrap() = Some(base_url);
}
#[cfg(target_os = "macos")]

View File

@@ -14,7 +14,7 @@
{
"title": "Onyx",
"label": "main",
"url": "https://cloud.onyx.app",
"url": "index.html",
"width": 1200,
"height": 800,
"minWidth": 800,
@@ -52,7 +52,7 @@
"entitlements": null,
"exceptionDomain": "cloud.onyx.app",
"minimumSystemVersion": "10.15",
"signingIdentity": "-",
"signingIdentity": null,
"dmg": {
"windowSize": {
"width": 660,

View File

@@ -4,28 +4,43 @@
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Onyx</title>
<link
href="https://fonts.googleapis.com/css2?family=Hanken+Grotesk:wght@400;500;600;700&display=swap"
rel="stylesheet"
/>
<style>
:root {
--background-900: #f5f5f5;
--background-800: #ffffff;
--text-light-05: rgba(0, 0, 0, 0.95);
--text-light-03: rgba(0, 0, 0, 0.6);
--white-10: rgba(0, 0, 0, 0.1);
--white-15: rgba(0, 0, 0, 0.15);
--white-20: rgba(0, 0, 0, 0.2);
--white-30: rgba(0, 0, 0, 0.3);
--font-hanken-grotesk: "Hanken Grotesk", -apple-system,
BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
}
* {
box-sizing: border-box;
margin: 0;
padding: 0;
box-sizing: border-box;
}
body {
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto,
Oxygen, Ubuntu, sans-serif;
background: linear-gradient(135deg, #1a1a2e 0%, #16213e 100%);
color: #fff;
font-family: var(--font-hanken-grotesk);
background: linear-gradient(135deg, #f5f5f5 0%, #ffffff 100%);
min-height: 100vh;
color: var(--text-light-05);
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
padding: 20px;
-webkit-user-select: none;
user-select: none;
}
/* Draggable titlebar area for macOS */
.titlebar {
position: fixed;
top: 0;
@@ -33,198 +48,451 @@
right: 0;
height: 28px;
-webkit-app-region: drag;
z-index: 10000;
}
.container {
text-align: center;
padding: 2rem;
.settings-container {
max-width: 500px;
width: 100%;
opacity: 0;
transform: translateY(8px);
pointer-events: none;
transition:
opacity 0.18s ease,
transform 0.18s ease;
}
.logo {
width: 80px;
height: 80px;
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
border-radius: 20px;
margin: 0 auto 1.5rem;
body.show-settings .settings-container {
opacity: 1;
transform: translateY(0);
pointer-events: auto;
}
.settings-panel {
background: linear-gradient(
to bottom,
rgba(255, 255, 255, 0.95),
rgba(245, 245, 245, 0.95)
);
backdrop-filter: blur(24px);
border-radius: 16px;
border: 1px solid var(--white-10);
overflow: hidden;
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.1);
}
.settings-header {
padding: 24px;
border-bottom: 1px solid var(--white-10);
display: flex;
align-items: center;
gap: 12px;
}
.settings-icon {
width: 40px;
height: 40px;
border-radius: 12px;
background: white;
display: flex;
align-items: center;
justify-content: center;
font-size: 2.5rem;
font-weight: bold;
overflow: hidden;
}
h1 {
font-size: 2rem;
margin-bottom: 0.5rem;
.settings-icon svg {
width: 24px;
height: 24px;
color: #000;
}
.settings-title {
font-size: 20px;
font-weight: 600;
color: var(--text-light-05);
}
p {
color: #a0a0a0;
margin-bottom: 2rem;
.settings-content {
padding: 24px;
}
.loading {
.settings-section {
margin-bottom: 32px;
}
.settings-section:last-child {
margin-bottom: 0;
}
.section-title {
font-size: 11px;
font-weight: 600;
text-transform: uppercase;
letter-spacing: 0.05em;
color: var(--text-light-03);
margin-bottom: 12px;
}
.settings-group {
background: rgba(0, 0, 0, 0.03);
border-radius: 16px;
padding: 4px;
}
.setting-row {
display: flex;
gap: 0.5rem;
justify-content: center;
margin-bottom: 2rem;
justify-content: space-between;
align-items: center;
padding: 12px;
}
.loading span {
width: 10px;
height: 10px;
background: #667eea;
border-radius: 50%;
animation: bounce 1.4s ease-in-out infinite;
.setting-row-content {
display: flex;
flex-direction: column;
gap: 4px;
flex: 1;
}
.loading span:nth-child(1) {
animation-delay: 0s;
}
.loading span:nth-child(2) {
animation-delay: 0.2s;
}
.loading span:nth-child(3) {
animation-delay: 0.4s;
.setting-label {
font-size: 14px;
font-weight: 400;
color: var(--text-light-05);
}
@keyframes bounce {
0%,
80%,
100% {
transform: scale(0.8);
opacity: 0.5;
}
40% {
transform: scale(1.2);
opacity: 1;
}
.setting-description {
font-size: 12px;
color: var(--text-light-03);
}
.btn {
background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
color: white;
border: none;
padding: 0.75rem 2rem;
.setting-divider {
height: 1px;
background: var(--white-10);
margin: 0 4px;
}
.input-field {
width: 100%;
padding: 10px 12px;
border: 1px solid var(--white-10);
border-radius: 8px;
font-size: 1rem;
cursor: pointer;
transition:
transform 0.2s,
box-shadow 0.2s;
font-size: 14px;
background: rgba(0, 0, 0, 0.05);
color: var(--text-light-05);
font-family: var(--font-hanken-grotesk);
transition: all 0.2s;
-webkit-app-region: no-drag;
}
.btn:hover {
transform: translateY(-2px);
box-shadow: 0 4px 20px rgba(102, 126, 234, 0.4);
.input-field:focus {
outline: none;
border-color: var(--white-30);
background: rgba(0, 0, 0, 0.08);
box-shadow: 0 0 0 2px rgba(0, 0, 0, 0.05);
}
.shortcuts {
margin-top: 3rem;
padding: 1.5rem;
background: rgba(255, 255, 255, 0.05);
border-radius: 12px;
text-align: left;
.input-field::placeholder {
color: var(--text-light-03);
}
.shortcuts h3 {
font-size: 0.875rem;
text-transform: uppercase;
letter-spacing: 0.05em;
color: #a0a0a0;
margin-bottom: 1rem;
.input-field.error {
border-color: #ef4444;
}
.shortcut {
display: flex;
justify-content: space-between;
padding: 0.5rem 0;
border-bottom: 1px solid rgba(255, 255, 255, 0.1);
.error-message {
color: #ef4444;
font-size: 12px;
margin-top: 4px;
padding-left: 12px;
display: none;
}
.shortcut:last-child {
border-bottom: none;
.error-message.visible {
display: block;
}
.shortcut-key {
font-family:
SF Mono,
Monaco,
monospace;
background: rgba(255, 255, 255, 0.1);
padding: 0.25rem 0.5rem;
.toggle-switch {
position: relative;
display: inline-block;
width: 44px;
height: 24px;
flex-shrink: 0;
}
.toggle-switch input {
opacity: 0;
width: 0;
height: 0;
}
.toggle-slider {
position: absolute;
cursor: pointer;
top: 0;
left: 0;
right: 0;
bottom: 0;
background-color: rgba(0, 0, 0, 0.15);
transition: 0.3s;
border-radius: 24px;
}
.toggle-slider:before {
position: absolute;
content: "";
height: 18px;
width: 18px;
left: 3px;
bottom: 3px;
background-color: white;
box-shadow: 0 1px 3px rgba(0, 0, 0, 0.2);
transition: 0.3s;
border-radius: 50%;
}
input:checked + .toggle-slider {
background-color: rgba(0, 0, 0, 0.3);
}
input:checked + .toggle-slider:before {
transform: translateX(20px);
}
.button {
padding: 12px 24px;
border-radius: 8px;
border: none;
cursor: pointer;
font-size: 14px;
font-weight: 600;
transition: all 0.2s;
font-family: var(--font-hanken-grotesk);
width: 100%;
margin-top: 24px;
-webkit-app-region: no-drag;
}
.button.primary {
background: #286df8;
color: white;
}
.button.primary:hover {
background: #1e5cd6;
box-shadow: 0 4px 12px rgba(40, 109, 248, 0.3);
}
.button.primary:disabled {
opacity: 0.5;
cursor: not-allowed;
box-shadow: none;
}
kbd {
background: rgba(0, 0, 0, 0.1);
border: 1px solid var(--white-10);
border-radius: 4px;
font-size: 0.75rem;
padding: 2px 6px;
font-family: monospace;
font-weight: 500;
color: var(--text-light-05);
font-size: 11px;
}
</style>
</head>
<body>
<div class="titlebar"></div>
<div class="container">
<div class="logo">O</div>
<h1>Onyx</h1>
<p>Connecting to Onyx Cloud...</p>
<div class="settings-container">
<div class="settings-panel">
<div class="settings-header">
<div class="settings-icon">
<svg
viewBox="0 0 56 56"
xmlns="http://www.w3.org/2000/svg"
fill="currentColor"
>
<path
fill-rule="evenodd"
clip-rule="evenodd"
d="M28 0 10.869 7.77 28 15.539l17.131-7.77L28 0Zm0 40.461-17.131 7.77L28 56l17.131-7.77L28 40.461Zm20.231-29.592L56 28.001l-7.769 17.131L40.462 28l7.769-17.131ZM15.538 28 7.77 10.869 0 28l7.769 17.131L15.538 28Z"
/>
</svg>
</div>
<h1 class="settings-title">Settings</h1>
</div>
<div class="loading">
<span></span>
<span></span>
<span></span>
</div>
<div class="settings-content">
<section class="settings-section">
<div class="section-title">GENERAL</div>
<div class="settings-group">
<div class="setting-row">
<div class="setting-row-content">
<label class="setting-label" for="onyxDomain"
>Root Domain</label
>
<div class="setting-description">
The root URL for your Onyx instance
</div>
</div>
</div>
<div class="setting-divider"></div>
<div class="setting-row" style="padding: 12px">
<input
type="text"
id="onyxDomain"
class="input-field"
placeholder="https://cloud.onyx.app"
autocomplete="off"
autocorrect="off"
autocapitalize="off"
spellcheck="false"
/>
</div>
<div class="error-message" id="errorMessage">
Please enter a valid URL starting with http:// or https://
</div>
</div>
</section>
<button
class="btn"
onclick="window.location.href='https://cloud.onyx.app'"
>
Open Onyx Cloud
</button>
<p style="margin-top: 1.5rem; font-size: 0.875rem; color: #666">
Self-hosted? Press
<span
class="shortcut-key"
style="display: inline; padding: 0.15rem 0.4rem"
>⌘ ,</span
>
to configure your server URL.
</p>
<div class="shortcuts">
<h3>Keyboard Shortcuts</h3>
<div class="shortcut">
<span>New Chat</span>
<span class="shortcut-key">⌘ N</span>
</div>
<div class="shortcut">
<span>New Window</span>
<span class="shortcut-key">⌘ ⇧ N</span>
</div>
<div class="shortcut">
<span>Reload</span>
<span class="shortcut-key">⌘ R</span>
</div>
<div class="shortcut">
<span>Back</span>
<span class="shortcut-key">⌘ [</span>
</div>
<div class="shortcut">
<span>Forward</span>
<span class="shortcut-key">⌘ ]</span>
</div>
<div class="shortcut">
<span>Settings / Config</span>
<span class="shortcut-key">⌘ ,</span>
<button class="button primary" id="saveBtn">Save & Connect</button>
</div>
</div>
</div>
<script>
// Auto-redirect to Onyx Cloud after a short delay
setTimeout(() => {
window.location.href = "https://cloud.onyx.app";
}, 1500);
// Import Tauri API
const { invoke } = window.__TAURI__.core;
// Configuration
const DEFAULT_DOMAIN = "https://cloud.onyx.app";
let currentServerUrl = "";
// DOM elements
const domainInput = document.getElementById("onyxDomain");
const errorMessage = document.getElementById("errorMessage");
const saveBtn = document.getElementById("saveBtn");
function showSettings() {
document.body.classList.add("show-settings");
}
// Initialize the app
async function init() {
try {
const bootstrap = await invoke("get_bootstrap_state");
currentServerUrl = bootstrap.server_url;
// Set the input value
domainInput.value = currentServerUrl || DEFAULT_DOMAIN;
// Check if user came here explicitly (via Settings menu/shortcut)
const urlParams = new URLSearchParams(window.location.search);
const isExplicitSettings =
window.location.hash === "#settings" ||
urlParams.get("settings") === "true";
// If user explicitly opened settings, show modal
if (isExplicitSettings) {
// Modal is already visible, user can edit and save
showSettings();
return;
}
// Otherwise, check if this is first launch
// First launch = config doesn't exist
if (!bootstrap.config_exists || !currentServerUrl) {
// First launch - show modal, require user to configure
showSettings();
return;
}
// Not first launch and not explicit settings
// Auto-redirect to configured domain
window.location.href = currentServerUrl;
} catch (error) {
// On error, default to cloud
domainInput.value = DEFAULT_DOMAIN;
showSettings();
}
}
// Validate URL
function validateUrl(url) {
const trimmedUrl = url.trim();
if (!trimmedUrl) {
return { valid: false, error: "URL cannot be empty" };
}
if (
!trimmedUrl.startsWith("http://") &&
!trimmedUrl.startsWith("https://")
) {
return {
valid: false,
error: "URL must start with http:// or https://",
};
}
try {
new URL(trimmedUrl);
return { valid: true, url: trimmedUrl };
} catch {
return { valid: false, error: "Please enter a valid URL" };
}
}
// Show error
function showError(message) {
domainInput.classList.add("error");
errorMessage.textContent = message;
errorMessage.classList.add("visible");
}
// Clear error
function clearError() {
domainInput.classList.remove("error");
errorMessage.classList.remove("visible");
}
// Save configuration
async function saveConfiguration() {
clearError();
const validation = validateUrl(domainInput.value);
if (!validation.valid) {
showError(validation.error);
return;
}
try {
saveBtn.disabled = true;
saveBtn.textContent = "Saving...";
// Call Tauri command to save the URL
await invoke("set_server_url", { url: validation.url });
// Success - redirect to the new URL (login page)
window.location.href = validation.url;
} catch (error) {
showError(error || "Failed to save configuration");
saveBtn.disabled = false;
saveBtn.textContent = "Save & Connect";
}
}
// Event listeners
domainInput.addEventListener("input", clearError);
domainInput.addEventListener("keypress", (e) => {
if (e.key === "Enter") {
saveConfiguration();
}
});
saveBtn.addEventListener("click", saveConfiguration);
// Initialize when DOM is ready
if (document.readyState === "loading") {
document.addEventListener("DOMContentLoaded", init);
} else {
init();
}
</script>
</body>
</html>

View File

@@ -2,8 +2,6 @@
// This script injects a draggable title bar that matches Onyx design system
(function () {
console.log("[Onyx Desktop] Title bar script loaded");
const TITLEBAR_ID = "onyx-desktop-titlebar";
const TITLEBAR_HEIGHT = 36;
const STYLE_ID = "onyx-desktop-titlebar-style";
@@ -31,12 +29,7 @@
try {
await invoke("start_drag_window");
return;
} catch (err) {
console.error(
"[Onyx Desktop] Failed to start dragging via invoke:",
err,
);
}
} catch (err) {}
}
const appWindow =
@@ -46,14 +39,7 @@
if (appWindow?.startDragging) {
try {
await appWindow.startDragging();
} catch (err) {
console.error(
"[Onyx Desktop] Failed to start dragging via appWindow:",
err,
);
}
} else {
console.error("[Onyx Desktop] No Tauri drag API available.");
} catch (err) {}
}
}
@@ -177,7 +163,6 @@
function mountTitleBar() {
if (!document.body) {
console.error("[Onyx Desktop] document.body not found");
return;
}
@@ -193,7 +178,6 @@
const titleBar = buildTitleBar();
document.body.insertBefore(titleBar, document.body.firstChild);
injectStyles();
console.log("[Onyx Desktop] Title bar injected");
}
function syncViewportHeight() {

Binary file not shown.

Before

Width:  |  Height:  |  Size: 548 B

After

Width:  |  Height:  |  Size: 581 B

View File

@@ -101,7 +101,7 @@ function Main() {
};
return (
<Section alignItems="stretch">
<Section alignItems="stretch" justifyContent="start" height="auto">
{popup}
<Text>

View File

@@ -25,7 +25,6 @@ import { useDocumentSets } from "@/lib/hooks/useDocumentSets";
import { useAgents } from "@/hooks/useAgents";
import { ChatPopup } from "@/app/chat/components/ChatPopup";
import ExceptionTraceModal from "@/components/modals/ExceptionTraceModal";
import { SEARCH_TOOL_ID } from "@/app/chat/components/tools/constants";
import { useUser } from "@/components/user/UserProvider";
import NoAssistantModal from "@/components/modals/NoAssistantModal";
import TextView from "@/components/chat/TextView";
@@ -378,9 +377,7 @@ export default function ChatPage({ firstMessage }: ChatPageProps) {
const retrievalEnabled = useMemo(() => {
if (liveAssistant) {
return liveAssistant.tools.some(
(tool) => tool.in_code_tool_id === SEARCH_TOOL_ID
);
return personaIncludesRetrieval(liveAssistant);
}
return false;
}, [liveAssistant]);

View File

@@ -68,7 +68,7 @@ export const CodeBlock = memo(function CodeBlock({
"bg-background-tint-00",
"rounded",
"text-xs",
"inline-block",
"inline",
"whitespace-pre-wrap",
"break-words",
"py-0.5",

View File

@@ -41,7 +41,8 @@ export const ConnectorMultiSelect = ({
(connector) => !selectedIds.includes(connector.cc_pair_id)
);
const allConnectorsSelected = unselectedConnectors.length === 0;
const allConnectorsSelected =
connectors.length > 0 && unselectedConnectors.length === 0;
const filteredUnselectedConnectors = unselectedConnectors.filter(
(connector) => {
@@ -50,17 +51,8 @@ export const ConnectorMultiSelect = ({
}
);
useEffect(() => {
if (allConnectorsSelected && open) {
setOpen(false);
inputRef.current?.blur();
setSearchQuery("");
}
}, [allConnectorsSelected, open]);
useEffect(() => {
if (allConnectorsSelected) {
inputRef.current?.blur();
setSearchQuery("");
}
}, [allConnectorsSelected, selectedIds]);
@@ -111,7 +103,7 @@ export const ConnectorMultiSelect = ({
? "All connectors selected"
: placeholder;
const isInputDisabled = disabled || allConnectorsSelected;
const isInputDisabled = disabled;
return (
<div className="flex flex-col w-full space-y-2 mb-4">
@@ -129,32 +121,39 @@ export const ConnectorMultiSelect = ({
value={searchQuery}
disabled={isInputDisabled}
onChange={(e) => {
setSearchQuery(e.target.value);
setOpen(true);
}}
onFocus={() => {
if (!allConnectorsSelected) {
setSearchQuery(e.target.value);
setOpen(true);
}
}}
onFocus={() => {
setOpen(true);
}}
onKeyDown={handleKeyDown}
className={
allConnectorsSelected
? "rounded-12 bg-background-neutral-01"
: "rounded-12"
}
className="rounded-12"
/>
{open && !allConnectorsSelected && (
{open && (
<div
ref={dropdownRef}
className="absolute z-50 w-full mt-1 rounded-12 border border-border-02 bg-background-neutral-00 shadow-md default-scrollbar max-h-[300px] overflow-auto"
>
{filteredUnselectedConnectors.length === 0 ? (
<div className="py-4 text-center text-xs text-text-03">
{searchQuery
? "No matching connectors found"
: "No more connectors available"}
{allConnectorsSelected ? (
<div className="py-4 px-3">
<Text as="p" text03 className="text-center text-xs">
All available connectors have been selected. Remove connectors
below to add different ones.
</Text>
</div>
) : filteredUnselectedConnectors.length === 0 ? (
<div className="py-4 px-3">
<Text as="p" text03 className="text-center text-xs">
{searchQuery
? "No matching connectors found"
: connectors.length === 0
? "No private connectors available. Create a private connector first."
: "No more connectors available"}
</Text>
</div>
) : (
<div>

View File

@@ -28,13 +28,10 @@ export default function SourceTile({
w-40
cursor-pointer
shadow-md
bg-background-tint-00
hover:bg-background-tint-02
relative
${
preSelect
? "bg-background-tint-01 subtle-pulse"
: "bg-background-tint-00"
}
${preSelect ? "subtle-pulse" : ""}
`}
href={navigationUrl as Route}
>

View File

@@ -13,7 +13,6 @@ import React, {
const DEFAULT_ANCHOR_OFFSET_PX = 16; // 1rem
const DEFAULT_FADE_THRESHOLD_PX = 80; // 5rem
const DEFAULT_BUTTON_THRESHOLD_PX = 32; // 2rem
const SCROLL_DEBOUNCE_MS = 100;
const FADE_OVERLAY_HEIGHT = "h-8"; // 2rem
export interface ScrollState {
@@ -30,8 +29,8 @@ export interface ChatScrollContainerProps {
children: React.ReactNode;
/**
* CSS selector for the element to anchor at top (e.g., "#message-123")
* When set, positions this element at top with spacer below content
* CSS selector for the anchor element (e.g., "#message-123")
* Used to scroll to a specific message position
*/
anchorSelector?: string;
@@ -90,7 +89,6 @@ const ChatScrollContainer = React.memo(
const scrolledForSessionRef = useRef<string | null>(null);
const prevAnchorSelectorRef = useRef<string | null>(null);
const [spacerHeight, setSpacerHeight] = useState(0);
const [hasContentAbove, setHasContentAbove] = useState(false);
const [hasContentBelow, setHasContentBelow] = useState(false);
const [isAtBottom, setIsAtBottom] = useState(true);
@@ -110,22 +108,6 @@ const ChatScrollContainer = React.memo(
const isStreamingRef = useRef(isStreaming);
isStreamingRef.current = isStreaming;
// Calculate spacer height to position anchor at top
const calcSpacerHeight = useCallback(
(anchorElement: HTMLElement): number => {
if (!endDivRef.current || !scrollContainerRef.current) return 0;
const contentEnd = endDivRef.current.offsetTop;
const contentFromAnchor = contentEnd - anchorElement.offsetTop;
return Math.max(
0,
scrollContainerRef.current.clientHeight -
contentFromAnchor -
anchorOffsetPx
);
},
[anchorOffsetPx]
);
// Get current scroll state
const getScrollState = useCallback((): ScrollState => {
const container = scrollContainerRef.current;
@@ -226,17 +208,7 @@ const ChatScrollContainer = React.memo(
// Update button visibility based on actual position
onScrollButtonVisibilityChangeRef.current?.(!state.isAtBottom);
}
// Recalculate spacer for non-auto-scroll mode during user scroll
if (!autoScrollRef.current && anchorSelector && endDivRef.current) {
const anchorElement = container.querySelector(
anchorSelector
) as HTMLElement;
if (anchorElement) {
setSpacerHeight(calcSpacerHeight(anchorElement));
}
}
}, [anchorSelector, calcSpacerHeight, updateScrollState, getScrollState]);
}, [updateScrollState, getScrollState]);
// Watch for content changes (MutationObserver + ResizeObserver)
useEffect(() => {
@@ -253,16 +225,6 @@ const ChatScrollContainer = React.memo(
// Capture whether we were at bottom BEFORE content changed
const wasAtBottom = isAtBottomRef.current;
// Update spacer for non-auto-scroll mode
if (!autoScrollRef.current && anchorSelector) {
const anchorElement = container.querySelector(
anchorSelector
) as HTMLElement;
if (anchorElement) {
setSpacerHeight(calcSpacerHeight(anchorElement));
}
}
// Auto-scroll: follow content if we were at bottom
if (autoScrollRef.current && wasAtBottom) {
// scrollToBottom handles isAutoScrollingRef and ref updates
@@ -290,7 +252,7 @@ const ChatScrollContainer = React.memo(
resizeObserver.disconnect();
if (rafId) cancelAnimationFrame(rafId);
};
}, [anchorSelector, calcSpacerHeight, updateScrollState, scrollToBottom]);
}, [updateScrollState, scrollToBottom]);
// Handle session changes and anchor changes
useEffect(() => {
@@ -329,13 +291,6 @@ const ChatScrollContainer = React.memo(
return;
}
// Calculate spacer
if (!autoScrollRef.current) {
setSpacerHeight(calcSpacerHeight(anchorElement));
} else {
setSpacerHeight(0);
}
// Determine scroll behavior
// New session with existing content = instant, new anchor = smooth
const isLoadingExistingContent =
@@ -344,12 +299,21 @@ const ChatScrollContainer = React.memo(
? "instant"
: "smooth";
// Defer scroll to next tick so spacer height takes effect
// Defer scroll to next tick for layout to settle
const timeoutId = setTimeout(() => {
const targetScrollTop = Math.max(
0,
anchorElement.offsetTop - anchorOffsetPx
);
let targetScrollTop: number;
// When loading an existing conversation, scroll to bottom
// Otherwise (e.g., anchor change during conversation), scroll to anchor
if (isLoadingExistingContent) {
targetScrollTop = container.scrollHeight - container.clientHeight;
} else {
targetScrollTop = Math.max(
0,
anchorElement.offsetTop - anchorOffsetPx
);
}
container.scrollTo({ top: targetScrollTop, behavior });
// Update prevScrollTopRef so scroll direction is measured from new position
@@ -357,9 +321,8 @@ const ChatScrollContainer = React.memo(
updateScrollState();
// When autoScroll is on, assume we're "at bottom" after positioning
// so that MutationObserver will continue auto-scrolling
if (autoScrollRef.current) {
// Mark as "at bottom" after scrolling to bottom so auto-scroll continues
if (isLoadingExistingContent || autoScrollRef.current) {
isAtBottomRef.current = true;
}
@@ -369,13 +332,7 @@ const ChatScrollContainer = React.memo(
}, 0);
return () => clearTimeout(timeoutId);
}, [
sessionId,
anchorSelector,
anchorOffsetPx,
calcSpacerHeight,
updateScrollState,
]);
}, [sessionId, anchorSelector, anchorOffsetPx, updateScrollState]);
return (
<div className="flex flex-col flex-1 min-h-0 w-full relative overflow-hidden mb-[7.5rem]">
@@ -400,13 +357,8 @@ const ChatScrollContainer = React.memo(
>
{children}
{/* End marker - before spacer so we can measure content end */}
{/* End marker to measure content end */}
<div ref={endDivRef} />
{/* Spacer to allow scrolling anchor to top */}
{spacerHeight > 0 && (
<div style={{ height: spacerHeight }} aria-hidden="true" />
)}
</div>
</div>
</div>

View File

@@ -113,7 +113,7 @@ const MessageList = React.memo(
);
return (
<div className="w-[min(50rem,100%)] px-6">
<div className="w-[min(50rem,100%)] h-full px-6 rounded-2xl backdrop-blur-md">
<Spacer />
{messages.map((message, i) => {
const messageReactComponentKey = `message-${message.nodeId}`;

View File

@@ -3,8 +3,6 @@
import { useState } from "react";
import Link from "next/link";
import ErrorPageLayout from "@/components/errorPages/ErrorPageLayout";
import { fetchCustomerPortal } from "@/lib/billing/utils";
import { useRouter } from "next/navigation";
import Button from "@/refresh-components/buttons/Button";
import InlineExternalLink from "@/refresh-components/InlineExternalLink";
import { logout } from "@/lib/user";
@@ -33,37 +31,6 @@ const fetchResubscriptionSession = async () => {
export default function AccessRestricted() {
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState<string | null>(null);
const router = useRouter();
const handleManageSubscription = async () => {
setIsLoading(true);
setError(null);
try {
const response = await fetchCustomerPortal();
if (!response.ok) {
const errorData = await response.json();
throw new Error(
`Failed to create customer portal session: ${
errorData.message || response.statusText
}`
);
}
const { url } = await response.json();
if (!url) {
throw new Error("No portal URL returned from the server");
}
router.push(url);
} catch (error) {
console.error("Error creating customer portal session:", error);
setError("Error opening customer portal. Please try again later.");
} finally {
setIsLoading(false);
}
};
const handleResubscribe = async () => {
setIsLoading(true);
@@ -119,13 +86,6 @@ export default function AccessRestricted() {
<Button onClick={handleResubscribe} disabled={isLoading}>
{isLoading ? "Loading..." : "Resubscribe"}
</Button>
<Button
secondary
onClick={handleManageSubscription}
disabled={isLoading}
>
Manage Existing Subscription
</Button>
<Button
secondary
onClick={async () => {

View File

@@ -871,7 +871,7 @@ export const MicrosoftIconSVG = createLogoIcon(microsoftSVG);
export const MistralIcon = createLogoIcon(mistralSVG);
export const MixedBreadIcon = createLogoIcon(mixedBreadSVG);
export const NomicIcon = createLogoIcon(nomicSVG);
export const CodaIcon = createLogoIcon(codaIcon, { monochromatic: true });
export const CodaIcon = createLogoIcon(codaIcon);
export const NotionIcon = createLogoIcon(notionIcon, { monochromatic: true });
export const OCIStorageIcon = createLogoIcon(OCIStorageSVG);
export const OllamaIcon = createLogoIcon(ollamaIcon);

View File

@@ -119,7 +119,7 @@ function AppFooter() {
}](https://www.onyx.app/) - Open Source AI Platform`;
return (
<footer className="w-full flex flex-row justify-center items-center gap-2 pb-2">
<footer className="w-full flex flex-row justify-center items-center gap-2 pb-2 mt-auto">
<MinimalMarkdown
content={customFooterContent}
className={cn("max-w-full text-center")}

View File

@@ -442,7 +442,7 @@ const ChatButton = memo(
>
<Popover.Anchor>
<SidebarTab
href={`/chat?chatId=${chatSession.id}`}
href={isDragging ? undefined : `/chat?chatId=${chatSession.id}`}
onClick={handleClick}
transient={active}
rightChildren={rightMenu}

View File

@@ -582,18 +582,10 @@ test.describe("End-to-End Default Assistant Flow", () => {
await page.waitForLoadState("networkidle");
// Verify greeting message appears
const greetingElement = await page.waitForSelector(
'[data-testid="onyx-logo"]',
{ timeout: 5000 }
);
expect(greetingElement).toBeTruthy();
await expect(page.locator('[data-testid="onyx-logo"]')).toBeVisible();
// Verify Onyx logo is displayed
const logoElement = await page.waitForSelector(
'[data-testid="onyx-logo"]',
{ timeout: 5000 }
);
expect(logoElement).toBeTruthy();
await expect(page.locator('[data-testid="onyx-logo"]')).toBeVisible();
// Send a message using the chat input
await sendMessage(page, "Hello, can you help me?");
@@ -608,10 +600,6 @@ test.describe("End-to-End Default Assistant Flow", () => {
await startNewChat(page);
// Verify we're back to default assistant with greeting
const newGreeting = await page.waitForSelector(
'[data-testid="onyx-logo"]',
{ timeout: 5000 }
);
expect(newGreeting).toBeTruthy();
await expect(page.locator('[data-testid="onyx-logo"]')).toBeVisible();
});
});

View File

@@ -27,9 +27,9 @@ export async function waitForUnifiedGreeting(page: Page): Promise<string> {
// Ensure the Action Management popover is open
export async function openActionManagement(page: Page): Promise<void> {
const actionToggle = page.locator(TOOL_IDS.actionToggle);
await actionToggle.waitFor({ timeout: 5000 });
await actionToggle.waitFor();
await actionToggle.click();
await page.locator(TOOL_IDS.options).waitFor({ timeout: 5000 });
await page.locator(TOOL_IDS.options).waitFor();
}
// Check presence of the Action Management toggle