Compare commits

..

24 Commits

Author SHA1 Message Date
Dane Urban
f6d6bd3b3c Prevent the removal and hiding of default model 2026-03-05 19:10:22 -08:00
Jamison Lahman
f59aaa902d chore(playwright): tighten how elements are hidden (#9117) 2026-03-05 23:58:07 +00:00
Nikolas Garza
57349bdbd1 chore: OnyxError cleanup (#9071)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2026-03-05 23:21:38 +00:00
Wenxi
192639a801 chore: bump recommended models (#9112) 2026-03-05 23:02:18 +00:00
Jamison Lahman
c10ffbb464 fix(safari): chat background blur ignores text (#9111)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2026-03-05 15:54:25 -08:00
dependabot[bot]
091f41fd1f chore(deps): bump google-cloud-aiplatform from 1.121.0 to 1.133.0 (#8658)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2026-03-05 22:54:42 +00:00
dependabot[bot]
45d77be4eb chore(deps): bump ajv in /backend/onyx/server/features/build/sandbox/kubernetes/docker/templates/outputs/web (#8655)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-05 15:14:50 -08:00
dependabot[bot]
413fa85134 chore(deps): bump minimatch in /backend/onyx/server/features/build/sandbox/kubernetes/docker/templates/outputs/web (#8828)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-05 15:13:57 -08:00
dependabot[bot]
108cde4f55 chore(deps): bump j178/prek-action from 1.0.12 to 1.1.1 (#8477)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2026-03-05 15:13:00 -08:00
dependabot[bot]
f88ce32bd4 chore(deps): bump @hono/node-server from 1.19.9 to 1.19.10 in /backend/onyx/server/features/build/sandbox/kubernetes/docker/templates/outputs/web (#9048)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-05 14:51:22 -08:00
dependabot[bot]
911f3439ea chore(deps): bump helm/kind-action from 1.13.0 to 1.14.0 (#8917)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-05 14:50:06 -08:00
dependabot[bot]
b02590d2b2 chore(deps): bump aws-actions/configure-aws-credentials from 5.1.1 to 6.0.0 (#8478)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-05 14:49:29 -08:00
dependabot[bot]
2d75b4b1f8 chore(deps): bump dompurify from 3.3.1 to 3.3.2 in /widget (#9106)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-03-05 14:45:53 -08:00
dependabot[bot]
7e3f7d01c2 chore(deps): bump authlib from 1.6.6 to 1.6.7 (#9049)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2026-03-05 22:14:44 +00:00
Jamison Lahman
9d6ce26ea3 fix(fe): show modal body on Safari/desktop (#9035) 2026-03-05 21:35:43 +00:00
roshan
41713d42a2 chore: upgrade golangci-lint to v2.10.1 for Go 1.26 support (#9107)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-05 21:22:56 +00:00
roshan
8afc283410 fix(chrome-extension): open login in new tab when session expires (#9091)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2026-03-05 21:18:21 +00:00
Jamison Lahman
b5c873077e chore(devtools): upgrade ods: 0.6.2->0.6.3 (#9105) 2026-03-05 21:04:51 +00:00
Jamison Lahman
20a4dd32eb chore(devtools): pull release branch and support PR # args (#9102)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2026-03-05 12:37:51 -08:00
Jamison Lahman
fde0d44bc1 chore(devtools): upgrade ods to go 1.26 (#9103) 2026-03-05 20:24:57 +00:00
Jamison Lahman
8fd91b6e83 chore(devtools): ods desktop (#9100) 2026-03-05 19:38:02 +00:00
Justin Tahara
8247fdd45b fix(llm): Handle Bedrock tool content in message history without toolConfig (#9063) 2026-03-05 19:06:35 +00:00
Jamison Lahman
8c5859ba4d fix(fe): disable projects modal button unless project is named (#9093) 2026-03-05 10:29:15 -08:00
Jamison Lahman
62ef6f59bb chore(playwright): screenshot tests for user settings pages (#9078) 2026-03-05 08:35:46 -08:00
52 changed files with 1396 additions and 502 deletions

View File

@@ -213,7 +213,7 @@ jobs:
- name: Configure AWS credentials
if: startsWith(matrix.platform, 'macos-')
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -384,7 +384,7 @@ jobs:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -458,7 +458,7 @@ jobs:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -527,7 +527,7 @@ jobs:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -597,7 +597,7 @@ jobs:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -679,7 +679,7 @@ jobs:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -756,7 +756,7 @@ jobs:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -823,7 +823,7 @@ jobs:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -896,7 +896,7 @@ jobs:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -964,7 +964,7 @@ jobs:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -1034,7 +1034,7 @@ jobs:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -1107,7 +1107,7 @@ jobs:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -1176,7 +1176,7 @@ jobs:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -1246,7 +1246,7 @@ jobs:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -1326,7 +1326,7 @@ jobs:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -1400,7 +1400,7 @@ jobs:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -1465,7 +1465,7 @@ jobs:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -1520,7 +1520,7 @@ jobs:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -1580,7 +1580,7 @@ jobs:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -1637,7 +1637,7 @@ jobs:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2

View File

@@ -71,7 +71,7 @@ jobs:
- name: Create kind cluster
if: steps.list-changed.outputs.changed == 'true'
uses: helm/kind-action@92086f6be054225fa813e0a4b13787fc9088faab # ratchet:helm/kind-action@v1.13.0
uses: helm/kind-action@ef37e7f390d99f746eb8b610417061a60e82a6cc # ratchet:helm/kind-action@v1.14.0
- name: Pre-install cluster status check
if: steps.list-changed.outputs.changed == 'true'

View File

@@ -461,7 +461,7 @@ jobs:
# --- Visual Regression Diff ---
- name: Configure AWS credentials
if: always()
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2

View File

@@ -38,9 +38,9 @@ jobs:
- name: Install node dependencies
working-directory: ./web
run: npm ci
- uses: j178/prek-action@9d6a3097e0c1865ecce00cfb89fe80f2ee91b547 # ratchet:j178/prek-action@v1
- uses: j178/prek-action@0bb87d7f00b0c99306c8bcb8b8beba1eb581c037 # ratchet:j178/prek-action@v1
with:
prek-version: '0.2.21'
prek-version: '0.3.4'
extra-args: ${{ github.event_name == 'pull_request' && format('--from-ref {0} --to-ref {1}', github.event.pull_request.base.sha, github.event.pull_request.head.sha) || github.event_name == 'merge_group' && format('--from-ref {0} --to-ref {1}', github.event.merge_group.base_sha, github.event.merge_group.head_sha) || github.ref_name == 'main' && '--all-files' || '' }}
- name: Check Actions
uses: giner/check-actions@28d366c7cbbe235f9624a88aa31a628167eee28c # ratchet:giner/check-actions@v1.0.1

View File

@@ -73,7 +73,7 @@ jobs:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -116,7 +116,7 @@ jobs:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -158,7 +158,7 @@ jobs:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -264,7 +264,7 @@ jobs:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2

View File

@@ -110,7 +110,7 @@ jobs:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -180,7 +180,7 @@ jobs:
persist-credentials: false
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2
@@ -244,7 +244,7 @@ jobs:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@61815dcd50bd041e203e49132bacad1fd04d2708
uses: aws-actions/configure-aws-credentials@8df5847569e6427dd6c4fb1cf565c83acfa8afa7
with:
role-to-assume: ${{ secrets.AWS_OIDC_ROLE_ARN }}
aws-region: us-east-2

View File

@@ -119,9 +119,10 @@ repos:
]
- repo: https://github.com/golangci/golangci-lint
rev: 9f61b0f53f80672872fced07b6874397c3ed197b # frozen: v2.7.2
rev: 5d1e709b7be35cb2025444e19de266b056b7b7ee # frozen: v2.10.1
hooks:
- id: golangci-lint
language_version: "1.26.0"
entry: bash -c "find tools/ -name go.mod -print0 | xargs -0 -I{} bash -c 'cd \"$(dirname {})\" && golangci-lint run ./...'"
- repo: https://github.com/astral-sh/ruff-pre-commit

View File

@@ -246,7 +246,11 @@ async def get_billing_information(
)
except OnyxError as e:
# Open circuit breaker on connection failures (self-hosted only)
if e.status_code in (502, 503, 504):
if e.status_code in (
OnyxErrorCode.BAD_GATEWAY.status_code,
OnyxErrorCode.SERVICE_UNAVAILABLE.status_code,
OnyxErrorCode.GATEWAY_TIMEOUT.status_code,
):
_open_billing_circuit()
raise

View File

@@ -36,7 +36,6 @@ from onyx.db.memory import add_memory
from onyx.db.memory import update_memory_at_index
from onyx.db.memory import UserMemoryContext
from onyx.db.models import Persona
from onyx.llm.constants import LlmProviderNames
from onyx.llm.interfaces import LLM
from onyx.llm.interfaces import LLMUserIdentity
from onyx.llm.interfaces import ToolChoiceOptions
@@ -84,28 +83,6 @@ def _looks_like_xml_tool_call_payload(text: str | None) -> bool:
)
def _should_keep_bedrock_tool_definitions(
llm: object, simple_chat_history: list[ChatMessageSimple]
) -> bool:
"""Bedrock requires tool config when history includes toolUse/toolResult blocks."""
model_provider = getattr(getattr(llm, "config", None), "model_provider", None)
if model_provider not in {
LlmProviderNames.BEDROCK,
LlmProviderNames.BEDROCK_CONVERSE,
}:
return False
return any(
(
msg.message_type == MessageType.ASSISTANT
and msg.tool_calls
and len(msg.tool_calls) > 0
)
or msg.message_type == MessageType.TOOL_CALL_RESPONSE
for msg in simple_chat_history
)
def _try_fallback_tool_extraction(
llm_step_result: LlmStepResult,
tool_choice: ToolChoiceOptions,
@@ -686,12 +663,7 @@ def run_llm_loop(
elif out_of_cycles or ran_image_gen:
# Last cycle, no tools allowed, just answer!
tool_choice = ToolChoiceOptions.NONE
# Bedrock requires tool config in requests that include toolUse/toolResult history.
final_tools = (
tools
if _should_keep_bedrock_tool_definitions(llm, simple_chat_history)
else []
)
final_tools = []
else:
tool_choice = ToolChoiceOptions.AUTO
final_tools = tools

View File

@@ -267,10 +267,34 @@ def upsert_llm_provider(
mc.name for mc in llm_provider_upsert_request.model_configurations
}
default_model = fetch_default_llm_model(db_session)
# Build a lookup of requested visibility by model name
requested_visibility = {
mc.name: mc.is_visible
for mc in llm_provider_upsert_request.model_configurations
}
# Delete removed models
removed_ids = [
mc.id for name, mc in existing_by_name.items() if name not in models_to_exist
]
# Prevent removing and hiding the default model
if default_model:
for name, mc in existing_by_name.items():
if mc.id == default_model.id:
if name not in models_to_exist:
raise ValueError(
f"Cannot remove the default model '{name}'. "
"Please change the default model before removing."
)
if not requested_visibility.get(name, True):
raise ValueError(
f"Cannot hide the default model '{name}'. "
"Please change the default model before hiding."
)
if removed_ids:
db_session.query(ModelConfiguration).filter(
ModelConfiguration.id.in_(removed_ids)

View File

@@ -48,10 +48,11 @@ class OnyxError(Exception):
*,
status_code_override: int | None = None,
) -> None:
resolved_message = message or error_code.code
super().__init__(resolved_message)
self.error_code = error_code
self.message = message or error_code.code
self.message = resolved_message
self._status_code_override = status_code_override
super().__init__(self.message)
@property
def status_code(self) -> int:

View File

@@ -2516,6 +2516,10 @@
"model_vendor": "openai",
"model_version": "2025-10-06"
},
"gpt-5.4": {
"display_name": "GPT-5.4",
"model_vendor": "openai"
},
"gpt-5.2-pro-2025-12-11": {
"display_name": "GPT-5.2 Pro",
"model_vendor": "openai",

View File

@@ -92,6 +92,98 @@ def _prompt_to_dicts(prompt: LanguageModelInput) -> list[dict[str, Any]]:
return [prompt.model_dump(exclude_none=True)]
def _normalize_content(raw: Any) -> str:
"""Normalize a message content field to a plain string.
Content can be a string, None, or a list of content-block dicts
(e.g. [{"type": "text", "text": "..."}]).
"""
if raw is None:
return ""
if isinstance(raw, str):
return raw
if isinstance(raw, list):
return "\n".join(
block.get("text", "") if isinstance(block, dict) else str(block)
for block in raw
)
return str(raw)
def _strip_tool_content_from_messages(
messages: list[dict[str, Any]],
) -> list[dict[str, Any]]:
"""Convert tool-related messages to plain text.
Bedrock's Converse API requires toolConfig when messages contain
toolUse/toolResult content blocks. When no tools are provided for the
current request, we must convert any tool-related history into plain text
to avoid the "toolConfig field must be defined" error.
This is the same approach used by _OllamaHistoryMessageFormatter.
"""
result: list[dict[str, Any]] = []
for msg in messages:
role = msg.get("role")
tool_calls = msg.get("tool_calls")
if role == "assistant" and tool_calls:
# Convert structured tool calls to text representation
tool_call_lines = []
for tc in tool_calls:
func = tc.get("function", {})
name = func.get("name", "unknown")
args = func.get("arguments", "{}")
tc_id = tc.get("id", "")
tool_call_lines.append(
f"[Tool Call] name={name} id={tc_id} args={args}"
)
existing_content = _normalize_content(msg.get("content"))
parts = (
[existing_content] + tool_call_lines
if existing_content
else tool_call_lines
)
new_msg = {
"role": "assistant",
"content": "\n".join(parts),
}
result.append(new_msg)
elif role == "tool":
# Convert tool response to user message with text content
tool_call_id = msg.get("tool_call_id", "")
content = _normalize_content(msg.get("content"))
tool_result_text = f"[Tool Result] id={tool_call_id}\n{content}"
# Merge into previous user message if it is also a converted
# tool result to avoid consecutive user messages (Bedrock requires
# strict user/assistant alternation).
if (
result
and result[-1]["role"] == "user"
and "[Tool Result]" in result[-1].get("content", "")
):
result[-1]["content"] += "\n\n" + tool_result_text
else:
result.append({"role": "user", "content": tool_result_text})
else:
result.append(msg)
return result
def _messages_contain_tool_content(messages: list[dict[str, Any]]) -> bool:
"""Check if any messages contain tool-related content blocks."""
for msg in messages:
if msg.get("role") == "tool":
return True
if msg.get("role") == "assistant" and msg.get("tool_calls"):
return True
return False
def _is_vertex_model_rejecting_output_config(model_name: str) -> bool:
normalized_model_name = model_name.lower()
return any(
@@ -404,13 +496,30 @@ class LitellmLLM(LLM):
else nullcontext()
)
with env_ctx:
messages = _prompt_to_dicts(prompt)
# Bedrock's Converse API requires toolConfig when messages
# contain toolUse/toolResult content blocks. When no tools are
# provided for this request but the history contains tool
# content from previous turns, strip it to plain text.
is_bedrock = self._model_provider in {
LlmProviderNames.BEDROCK,
LlmProviderNames.BEDROCK_CONVERSE,
}
if (
is_bedrock
and not tools
and _messages_contain_tool_content(messages)
):
messages = _strip_tool_content_from_messages(messages)
response = litellm.completion(
mock_response=get_llm_mock_response() or MOCK_LLM_RESPONSE,
model=model,
base_url=self._api_base or None,
api_version=self._api_version or None,
custom_llm_provider=self._custom_llm_provider or None,
messages=_prompt_to_dicts(prompt),
messages=messages,
tools=tools,
tool_choice=tool_choice,
stream=stream,

View File

@@ -1,12 +1,12 @@
{
"version": "1.1",
"updated_at": "2026-02-05T00:00:00Z",
"updated_at": "2026-03-05T00:00:00Z",
"providers": {
"openai": {
"default_model": { "name": "gpt-5.2" },
"default_model": { "name": "gpt-5.4" },
"additional_visible_models": [
{ "name": "gpt-5-mini" },
{ "name": "gpt-4.1" }
{ "name": "gpt-5.4" },
{ "name": "gpt-5.2" }
]
},
"anthropic": {

View File

@@ -961,9 +961,9 @@
"license": "MIT"
},
"node_modules/@hono/node-server": {
"version": "1.19.9",
"resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.19.9.tgz",
"integrity": "sha512-vHL6w3ecZsky+8P5MD+eFfaGTyCeOHUIFYMGpQGbrBTSmNNoxv0if69rEZ5giu36weC5saFuznL411gRX7bJDw==",
"version": "1.19.10",
"resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.19.10.tgz",
"integrity": "sha512-hZ7nOssGqRgyV3FVVQdfi+U4q02uB23bpnYpdvNXkYTRRyWx84b7yf1ans+dnJ/7h41sGL3CeQTfO+ZGxuO+Iw==",
"license": "MIT",
"engines": {
"node": ">=18.14.1"
@@ -1573,27 +1573,6 @@
}
}
},
"node_modules/@isaacs/balanced-match": {
"version": "4.0.1",
"resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz",
"integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==",
"license": "MIT",
"engines": {
"node": "20 || >=22"
}
},
"node_modules/@isaacs/brace-expansion": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/@isaacs/brace-expansion/-/brace-expansion-5.0.1.tgz",
"integrity": "sha512-WMz71T1JS624nWj2n2fnYAuPovhv7EUhk69R6i9dsVyzxt5eM3bjwvgk9L+APE1TRscGysAVMANkB0jh0LQZrQ==",
"license": "MIT",
"dependencies": {
"@isaacs/balanced-match": "^4.0.1"
},
"engines": {
"node": "20 || >=22"
}
},
"node_modules/@jridgewell/gen-mapping": {
"version": "0.3.13",
"resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz",
@@ -1680,9 +1659,9 @@
}
},
"node_modules/@modelcontextprotocol/sdk/node_modules/ajv": {
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
"version": "8.18.0",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.18.0.tgz",
"integrity": "sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A==",
"license": "MIT",
"dependencies": {
"fast-deep-equal": "^3.1.3",
@@ -3855,6 +3834,27 @@
"path-browserify": "^1.0.1"
}
},
"node_modules/@ts-morph/common/node_modules/balanced-match": {
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz",
"integrity": "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==",
"license": "MIT",
"engines": {
"node": "18 || 20 || >=22"
}
},
"node_modules/@ts-morph/common/node_modules/brace-expansion": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.3.tgz",
"integrity": "sha512-fy6KJm2RawA5RcHkLa1z/ScpBeA762UF9KmZQxwIbDtRJrgLzM10depAiEQ+CXYcoiqW1/m96OAAoke2nE9EeA==",
"license": "MIT",
"dependencies": {
"balanced-match": "^4.0.2"
},
"engines": {
"node": "18 || 20 || >=22"
}
},
"node_modules/@ts-morph/common/node_modules/fast-glob": {
"version": "3.3.3",
"resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz",
@@ -3884,15 +3884,15 @@
}
},
"node_modules/@ts-morph/common/node_modules/minimatch": {
"version": "10.1.1",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.1.1.tgz",
"integrity": "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==",
"version": "10.2.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz",
"integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==",
"license": "BlueOak-1.0.0",
"dependencies": {
"@isaacs/brace-expansion": "^5.0.0"
"brace-expansion": "^5.0.2"
},
"engines": {
"node": "20 || >=22"
"node": "18 || 20 || >=22"
},
"funding": {
"url": "https://github.com/sponsors/isaacs"
@@ -4234,13 +4234,13 @@
}
},
"node_modules/@typescript-eslint/typescript-estree/node_modules/minimatch": {
"version": "9.0.5",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz",
"integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==",
"version": "9.0.9",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.9.tgz",
"integrity": "sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==",
"dev": true,
"license": "ISC",
"dependencies": {
"brace-expansion": "^2.0.1"
"brace-expansion": "^2.0.2"
},
"engines": {
"node": ">=16 || 14 >=14.17"
@@ -4619,9 +4619,9 @@
}
},
"node_modules/ajv": {
"version": "6.12.6",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz",
"integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==",
"version": "6.14.0",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-6.14.0.tgz",
"integrity": "sha512-IWrosm/yrn43eiKqkfkHis7QioDleaXQHdDVPKg0FSwwd/DuvyX79TZnFOnYpB7dcsFAMmtFztZuXPDvSePkFw==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -4653,9 +4653,9 @@
}
},
"node_modules/ajv-formats/node_modules/ajv": {
"version": "8.17.1",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz",
"integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==",
"version": "8.18.0",
"resolved": "https://registry.npmjs.org/ajv/-/ajv-8.18.0.tgz",
"integrity": "sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A==",
"license": "MIT",
"dependencies": {
"fast-deep-equal": "^3.1.3",
@@ -8831,9 +8831,9 @@
}
},
"node_modules/minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
"integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
"version": "3.1.5",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz",
"integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==",
"dev": true,
"license": "ISC",
"dependencies": {

View File

@@ -5,6 +5,7 @@ from typing import cast
from fastapi import APIRouter
from fastapi import Depends
from fastapi import HTTPException
from sqlalchemy.orm import Session
from onyx.auth.users import current_admin_user
@@ -27,8 +28,6 @@ from onyx.db.feedback import update_document_boost_for_user
from onyx.db.feedback import update_document_hidden_for_user
from onyx.db.index_attempt import cancel_indexing_attempts_for_ccpair
from onyx.db.models import User
from onyx.error_handling.error_codes import OnyxErrorCode
from onyx.error_handling.exceptions import OnyxError
from onyx.file_store.file_store import get_default_file_store
from onyx.key_value_store.factory import get_kv_store
from onyx.key_value_store.interface import KvKeyNotFoundError
@@ -125,11 +124,11 @@ def validate_existing_genai_api_key(
try:
llm = get_default_llm(timeout=10)
except ValueError:
raise OnyxError(OnyxErrorCode.NOT_FOUND, "LLM not setup")
raise HTTPException(status_code=404, detail="LLM not setup")
error = test_llm(llm)
if error:
raise OnyxError(OnyxErrorCode.VALIDATION_ERROR, error)
raise HTTPException(status_code=400, detail=error)
# Mark check as successful
curr_time = datetime.now(tz=timezone.utc)
@@ -160,7 +159,10 @@ def create_deletion_attempt_for_connector_id(
f"'{credential_id}' does not exist. Has it already been deleted?"
)
logger.error(error)
raise OnyxError(OnyxErrorCode.CONNECTOR_NOT_FOUND, error)
raise HTTPException(
status_code=404,
detail=error,
)
# Cancel any scheduled indexing attempts
cancel_indexing_attempts_for_ccpair(
@@ -176,9 +178,9 @@ def create_deletion_attempt_for_connector_id(
# connector_credential_pair=cc_pair, db_session=db_session
# )
# if deletion_attempt_disallowed_reason:
# raise OnyxError(
# OnyxErrorCode.VALIDATION_ERROR,
# deletion_attempt_disallowed_reason,
# raise HTTPException(
# status_code=400,
# detail=deletion_attempt_disallowed_reason,
# )
# mark as deleting

View File

@@ -2,6 +2,8 @@
from fastapi import APIRouter
from fastapi import Depends
from fastapi import HTTPException
from fastapi import status
from sqlalchemy.orm import Session
from onyx.auth.users import current_admin_user
@@ -22,8 +24,6 @@ from onyx.db.discord_bot import update_discord_channel_config
from onyx.db.discord_bot import update_guild_config
from onyx.db.engine.sql_engine import get_session
from onyx.db.models import User
from onyx.error_handling.error_codes import OnyxErrorCode
from onyx.error_handling.exceptions import OnyxError
from onyx.server.manage.discord_bot.models import DiscordBotConfigCreateRequest
from onyx.server.manage.discord_bot.models import DiscordBotConfigResponse
from onyx.server.manage.discord_bot.models import DiscordChannelConfigResponse
@@ -47,14 +47,14 @@ def _check_bot_config_api_access() -> None:
- When DISCORD_BOT_TOKEN env var is set (managed via env)
"""
if AUTH_TYPE == AuthType.CLOUD:
raise OnyxError(
OnyxErrorCode.UNAUTHORIZED,
"Discord bot configuration is managed by Onyx on Cloud.",
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Discord bot configuration is managed by Onyx on Cloud.",
)
if DISCORD_BOT_TOKEN:
raise OnyxError(
OnyxErrorCode.UNAUTHORIZED,
"Discord bot is configured via environment variables. API access disabled.",
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Discord bot is configured via environment variables. API access disabled.",
)
@@ -92,9 +92,9 @@ def create_bot_request(
bot_token=request.bot_token,
)
except ValueError:
raise OnyxError(
OnyxErrorCode.CONFLICT,
"Discord bot config already exists. Delete it first to create a new one.",
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail="Discord bot config already exists. Delete it first to create a new one.",
)
db_session.commit()
@@ -117,7 +117,7 @@ def delete_bot_config_endpoint(
"""
deleted = delete_discord_bot_config(db_session)
if not deleted:
raise OnyxError(OnyxErrorCode.NOT_FOUND, "Bot config not found")
raise HTTPException(status_code=404, detail="Bot config not found")
# Also delete the service API key used by the Discord bot
delete_discord_service_api_key(db_session)
@@ -144,7 +144,7 @@ def delete_service_api_key_endpoint(
"""
deleted = delete_discord_service_api_key(db_session)
if not deleted:
raise OnyxError(OnyxErrorCode.NOT_FOUND, "Service API key not found")
raise HTTPException(status_code=404, detail="Service API key not found")
db_session.commit()
return {"deleted": True}
@@ -189,7 +189,7 @@ def get_guild_config(
"""Get specific guild config."""
config = get_guild_config_by_internal_id(db_session, internal_id=config_id)
if not config:
raise OnyxError(OnyxErrorCode.NOT_FOUND, "Guild config not found")
raise HTTPException(status_code=404, detail="Guild config not found")
return DiscordGuildConfigResponse.model_validate(config)
@@ -203,7 +203,7 @@ def update_guild_request(
"""Update guild config."""
config = get_guild_config_by_internal_id(db_session, internal_id=config_id)
if not config:
raise OnyxError(OnyxErrorCode.NOT_FOUND, "Guild config not found")
raise HTTPException(status_code=404, detail="Guild config not found")
config = update_guild_config(
db_session,
@@ -228,7 +228,7 @@ def delete_guild_request(
"""
deleted = delete_guild_config(db_session, config_id)
if not deleted:
raise OnyxError(OnyxErrorCode.NOT_FOUND, "Guild config not found")
raise HTTPException(status_code=404, detail="Guild config not found")
# On Cloud, delete service API key when all guilds are removed
if AUTH_TYPE == AuthType.CLOUD:
@@ -254,9 +254,9 @@ def list_channel_configs(
"""List whitelisted channels for a guild."""
guild_config = get_guild_config_by_internal_id(db_session, internal_id=config_id)
if not guild_config:
raise OnyxError(OnyxErrorCode.NOT_FOUND, "Guild config not found")
raise HTTPException(status_code=404, detail="Guild config not found")
if not guild_config.guild_id:
raise OnyxError(OnyxErrorCode.VALIDATION_ERROR, "Guild not yet registered")
raise HTTPException(status_code=400, detail="Guild not yet registered")
configs = get_channel_configs(db_session, config_id)
return [DiscordChannelConfigResponse.model_validate(c) for c in configs]
@@ -278,7 +278,7 @@ def update_channel_request(
db_session, guild_config_id, channel_config_id
)
if not config:
raise OnyxError(OnyxErrorCode.NOT_FOUND, "Channel config not found")
raise HTTPException(status_code=404, detail="Channel config not found")
config = update_discord_channel_config(
db_session,

View File

@@ -3,6 +3,7 @@ import re
import requests
from fastapi import APIRouter
from fastapi import HTTPException
from onyx import __version__
from onyx.auth.users import anonymous_user_enabled
@@ -15,8 +16,6 @@ from onyx.configs.constants import DEV_VERSION_PATTERN
from onyx.configs.constants import PUBLIC_API_TAGS
from onyx.configs.constants import STABLE_VERSION_PATTERN
from onyx.db.auth import get_user_count
from onyx.error_handling.error_codes import OnyxErrorCode
from onyx.error_handling.exceptions import OnyxError
from onyx.server.manage.models import AllVersions
from onyx.server.manage.models import AuthTypeResponse
from onyx.server.manage.models import ContainerVersions
@@ -105,14 +104,14 @@ def get_versions() -> AllVersions:
# Ensure we have at least one tag of each type
if not dev_tags:
raise OnyxError(
OnyxErrorCode.INTERNAL_ERROR,
"No valid dev versions found matching pattern v(number).(number).(number)-beta.(number)",
raise HTTPException(
status_code=500,
detail="No valid dev versions found matching pattern v(number).(number).(number)-beta.(number)",
)
if not stable_tags:
raise OnyxError(
OnyxErrorCode.INTERNAL_ERROR,
"No valid stable versions found matching pattern v(number).(number).(number)",
raise HTTPException(
status_code=500,
detail="No valid stable versions found matching pattern v(number).(number).(number)",
)
# Sort common tags and get the latest one

View File

@@ -1,5 +1,6 @@
from fastapi import APIRouter
from fastapi import Depends
from fastapi import HTTPException
from sqlalchemy.orm import Session
from onyx.auth.users import current_admin_user
@@ -14,8 +15,6 @@ from onyx.db.llm import remove_llm_provider__no_commit
from onyx.db.models import LLMProvider as LLMProviderModel
from onyx.db.models import ModelConfiguration
from onyx.db.models import User
from onyx.error_handling.error_codes import OnyxErrorCode
from onyx.error_handling.exceptions import OnyxError
from onyx.image_gen.exceptions import ImageProviderCredentialsError
from onyx.image_gen.factory import get_image_generation_provider
from onyx.image_gen.factory import validate_credentials
@@ -75,9 +74,9 @@ def _build_llm_provider_request(
# Clone mode: Only use API key from source provider
source_provider = db_session.get(LLMProviderModel, source_llm_provider_id)
if not source_provider:
raise OnyxError(
OnyxErrorCode.NOT_FOUND,
f"Source LLM provider with id {source_llm_provider_id} not found",
raise HTTPException(
status_code=404,
detail=f"Source LLM provider with id {source_llm_provider_id} not found",
)
_validate_llm_provider_change(
@@ -111,9 +110,9 @@ def _build_llm_provider_request(
)
if not provider:
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
"No provider or source llm provided",
raise HTTPException(
status_code=400,
detail="No provider or source llm provided",
)
credentials = ImageGenerationProviderCredentials(
@@ -125,9 +124,9 @@ def _build_llm_provider_request(
)
if not validate_credentials(provider, credentials):
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
f"Incorrect credentials for {provider}",
raise HTTPException(
status_code=400,
detail=f"Incorrect credentials for {provider}",
)
return LLMProviderUpsertRequest(
@@ -216,9 +215,9 @@ def test_image_generation(
LLMProviderModel, test_request.source_llm_provider_id
)
if not source_provider:
raise OnyxError(
OnyxErrorCode.NOT_FOUND,
f"Source LLM provider with id {test_request.source_llm_provider_id} not found",
raise HTTPException(
status_code=404,
detail=f"Source LLM provider with id {test_request.source_llm_provider_id} not found",
)
_validate_llm_provider_change(
@@ -237,9 +236,9 @@ def test_image_generation(
provider = source_provider.provider
if provider is None:
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
"No provider or source llm provided",
raise HTTPException(
status_code=400,
detail="No provider or source llm provided",
)
try:
@@ -258,14 +257,14 @@ def test_image_generation(
),
)
except ValueError:
raise OnyxError(
OnyxErrorCode.NOT_FOUND,
f"Invalid image generation provider: {provider}",
raise HTTPException(
status_code=404,
detail=f"Invalid image generation provider: {provider}",
)
except ImageProviderCredentialsError:
raise OnyxError(
OnyxErrorCode.UNAUTHENTICATED,
"Invalid image generation credentials",
raise HTTPException(
status_code=401,
detail="Invalid image generation credentials",
)
quality = _get_test_quality_for_model(test_request.model_name)
@@ -277,15 +276,15 @@ def test_image_generation(
n=1,
quality=quality,
)
except OnyxError:
except HTTPException:
raise
except Exception as e:
# Log only exception type to avoid exposing sensitive data
# (LiteLLM errors may contain URLs with API keys or auth tokens)
logger.warning(f"Image generation test failed: {type(e).__name__}")
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
f"Image generation test failed: {type(e).__name__}",
raise HTTPException(
status_code=400,
detail=f"Image generation test failed: {type(e).__name__}",
)
@@ -310,9 +309,9 @@ def create_config(
db_session, config_create.image_provider_id
)
if existing_config:
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
f"ImageGenerationConfig with image_provider_id '{config_create.image_provider_id}' already exists",
raise HTTPException(
status_code=400,
detail=f"ImageGenerationConfig with image_provider_id '{config_create.image_provider_id}' already exists",
)
try:
@@ -346,10 +345,10 @@ def create_config(
db_session.commit()
db_session.refresh(config)
return ImageGenerationConfigView.from_model(config)
except OnyxError:
except HTTPException:
raise
except Exception as e:
raise OnyxError(OnyxErrorCode.VALIDATION_ERROR, str(e))
raise HTTPException(status_code=400, detail=str(e))
@admin_router.get("/config")
@@ -374,9 +373,9 @@ def get_config_credentials(
"""
config = get_image_generation_config(db_session, image_provider_id)
if not config:
raise OnyxError(
OnyxErrorCode.NOT_FOUND,
f"ImageGenerationConfig with image_provider_id {image_provider_id} not found",
raise HTTPException(
status_code=404,
detail=f"ImageGenerationConfig with image_provider_id {image_provider_id} not found",
)
return ImageGenerationCredentials.from_model(config)
@@ -402,9 +401,9 @@ def update_config(
# 1. Get existing config
existing_config = get_image_generation_config(db_session, image_provider_id)
if not existing_config:
raise OnyxError(
OnyxErrorCode.NOT_FOUND,
f"ImageGenerationConfig with image_provider_id {image_provider_id} not found",
raise HTTPException(
status_code=404,
detail=f"ImageGenerationConfig with image_provider_id {image_provider_id} not found",
)
old_llm_provider_id = existing_config.model_configuration.llm_provider_id
@@ -473,10 +472,10 @@ def update_config(
db_session.refresh(existing_config)
return ImageGenerationConfigView.from_model(existing_config)
except OnyxError:
except HTTPException:
raise
except Exception as e:
raise OnyxError(OnyxErrorCode.VALIDATION_ERROR, str(e))
raise HTTPException(status_code=400, detail=str(e))
@admin_router.delete("/config/{image_provider_id}")
@@ -490,9 +489,9 @@ def delete_config(
# Get the config first to find the associated LLM provider
existing_config = get_image_generation_config(db_session, image_provider_id)
if not existing_config:
raise OnyxError(
OnyxErrorCode.NOT_FOUND,
f"ImageGenerationConfig with image_provider_id {image_provider_id} not found",
raise HTTPException(
status_code=404,
detail=f"ImageGenerationConfig with image_provider_id {image_provider_id} not found",
)
llm_provider_id = existing_config.model_configuration.llm_provider_id
@@ -504,10 +503,10 @@ def delete_config(
remove_llm_provider__no_commit(db_session, llm_provider_id)
db_session.commit()
except OnyxError:
except HTTPException:
raise
except ValueError as e:
raise OnyxError(OnyxErrorCode.NOT_FOUND, str(e))
raise HTTPException(status_code=404, detail=str(e))
@admin_router.post("/config/{image_provider_id}/default")
@@ -520,7 +519,7 @@ def set_config_as_default(
try:
set_default_image_generation_config(db_session, image_provider_id)
except ValueError as e:
raise OnyxError(OnyxErrorCode.NOT_FOUND, str(e))
raise HTTPException(status_code=404, detail=str(e))
@admin_router.delete("/config/{image_provider_id}/default")
@@ -533,4 +532,4 @@ def unset_config_as_default(
try:
unset_default_image_generation_config(db_session, image_provider_id)
except ValueError as e:
raise OnyxError(OnyxErrorCode.NOT_FOUND, str(e))
raise HTTPException(status_code=404, detail=str(e))

View File

@@ -1,5 +1,7 @@
from fastapi import APIRouter
from fastapi import Depends
from fastapi import HTTPException
from fastapi import status
from sqlalchemy.orm import Session
from onyx.auth.users import current_admin_user
@@ -25,8 +27,6 @@ from onyx.db.search_settings import update_current_search_settings
from onyx.db.search_settings import update_search_settings_status
from onyx.document_index.factory import get_all_document_indices
from onyx.document_index.factory import get_default_document_index
from onyx.error_handling.error_codes import OnyxErrorCode
from onyx.error_handling.exceptions import OnyxError
from onyx.file_processing.unstructured import delete_unstructured_api_key
from onyx.file_processing.unstructured import get_unstructured_api_key
from onyx.file_processing.unstructured import update_unstructured_api_key
@@ -58,9 +58,9 @@ def set_new_search_settings(
# Disallow contextual RAG for cloud deployments.
if MULTI_TENANT and search_settings_new.enable_contextual_rag:
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
"Contextual RAG disabled in Onyx Cloud",
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Contextual RAG disabled in Onyx Cloud",
)
# Validate cloud provider exists or create new LiteLLM provider.
@@ -70,9 +70,9 @@ def set_new_search_settings(
)
if cloud_provider is None:
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
f"No embedding provider exists for cloud embedding type {search_settings_new.provider_type}",
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"No embedding provider exists for cloud embedding type {search_settings_new.provider_type}",
)
validate_contextual_rag_model(
@@ -188,7 +188,7 @@ def delete_search_settings_endpoint(
search_settings_id=deletion_request.search_settings_id,
)
except ValueError as e:
raise OnyxError(OnyxErrorCode.VALIDATION_ERROR, str(e))
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(e))
@router.get("/get-current-search-settings")
@@ -238,9 +238,9 @@ def update_saved_search_settings(
) -> None:
# Disallow contextual RAG for cloud deployments
if MULTI_TENANT and search_settings.enable_contextual_rag:
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
"Contextual RAG disabled in Onyx Cloud",
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Contextual RAG disabled in Onyx Cloud",
)
validate_contextual_rag_model(
@@ -294,7 +294,7 @@ def validate_contextual_rag_model(
model_name=model_name,
db_session=db_session,
):
raise OnyxError(OnyxErrorCode.VALIDATION_ERROR, error_msg)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=error_msg)
def _validate_contextual_rag_model(

View File

@@ -1,5 +1,6 @@
from fastapi import APIRouter
from fastapi import Depends
from fastapi import HTTPException
from sqlalchemy.orm import Session
from onyx.auth.users import current_admin_user
@@ -20,8 +21,6 @@ from onyx.db.slack_channel_config import fetch_slack_channel_configs
from onyx.db.slack_channel_config import insert_slack_channel_config
from onyx.db.slack_channel_config import remove_slack_channel_config
from onyx.db.slack_channel_config import update_slack_channel_config
from onyx.error_handling.error_codes import OnyxErrorCode
from onyx.error_handling.exceptions import OnyxError
from onyx.onyxbot.slack.config import validate_channel_name
from onyx.server.manage.models import SlackBot
from onyx.server.manage.models import SlackBotCreationRequest
@@ -64,7 +63,10 @@ def _form_channel_config(
current_slack_bot_id=slack_channel_config_creation_request.slack_bot_id,
)
except ValueError as e:
raise OnyxError(OnyxErrorCode.VALIDATION_ERROR, str(e))
raise HTTPException(
status_code=400,
detail=str(e),
)
if respond_tag_only and respond_member_group_list:
raise ValueError(
@@ -121,7 +123,10 @@ def create_slack_channel_config(
)
if channel_config["channel_name"] is None:
raise OnyxError(OnyxErrorCode.VALIDATION_ERROR, "Channel name is required")
raise HTTPException(
status_code=400,
detail="Channel name is required",
)
persona_id = None
if slack_channel_config_creation_request.persona_id is not None:
@@ -166,7 +171,10 @@ def patch_slack_channel_config(
db_session=db_session, slack_channel_config_id=slack_channel_config_id
)
if existing_slack_channel_config is None:
raise OnyxError(OnyxErrorCode.NOT_FOUND, "Slack channel config not found")
raise HTTPException(
status_code=404,
detail="Slack channel config not found",
)
existing_persona_id = existing_slack_channel_config.persona_id
if existing_persona_id is not None:

View File

@@ -13,6 +13,7 @@ from email_validator import validate_email
from fastapi import APIRouter
from fastapi import Body
from fastapi import Depends
from fastapi import HTTPException
from fastapi import Query
from fastapi import Request
from fastapi.responses import StreamingResponse
@@ -72,8 +73,6 @@ from onyx.db.users import get_page_of_filtered_users
from onyx.db.users import get_total_filtered_users_count
from onyx.db.users import get_user_by_email
from onyx.db.users import validate_user_role_update
from onyx.error_handling.error_codes import OnyxErrorCode
from onyx.error_handling.exceptions import OnyxError
from onyx.key_value_store.factory import get_kv_store
from onyx.redis.redis_pool import get_raw_redis_client
from onyx.server.documents.models import PaginatedReturn
@@ -125,7 +124,7 @@ def set_user_role(
email=user_role_update_request.user_email, db_session=db_session
)
if not user_to_update:
raise OnyxError(OnyxErrorCode.USER_NOT_FOUND, "User not found")
raise HTTPException(status_code=404, detail="User not found")
current_role = user_to_update.role
requested_role = user_role_update_request.new_role
@@ -140,9 +139,9 @@ def set_user_role(
)
if user_to_update.id == current_user.id:
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
"An admin cannot demote themselves from admin role!",
raise HTTPException(
status_code=400,
detail="An admin cannot demote themselves from admin role!",
)
if requested_role == UserRole.CURATOR:
@@ -387,9 +386,9 @@ def bulk_invite_users(
new_invited_emails.append(email_info.normalized)
except (EmailUndeliverableError, EmailNotValidError) as e:
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
f"Invalid email address: {email} - {str(e)}",
raise HTTPException(
status_code=400,
detail=f"Invalid email address: {email} - {str(e)}",
)
# Count only new users (not already invited or existing) that need seats
@@ -406,9 +405,9 @@ def bulk_invite_users(
if MULTI_TENANT and is_tenant_on_trial_fn(tenant_id):
current_invited = len(already_invited)
if current_invited + len(emails_needing_seats) > NUM_FREE_TRIAL_USER_INVITES:
raise OnyxError(
OnyxErrorCode.UNAUTHORIZED,
"You have hit your invite limit. "
raise HTTPException(
status_code=403,
detail="You have hit your invite limit. "
"Please upgrade for unlimited invites.",
)
@@ -503,16 +502,14 @@ def deactivate_user_api(
db_session: Session = Depends(get_session),
) -> None:
if current_user.email == user_email.user_email:
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR, "You cannot deactivate yourself"
)
raise HTTPException(status_code=400, detail="You cannot deactivate yourself")
user_to_deactivate = get_user_by_email(
email=user_email.user_email, db_session=db_session
)
if not user_to_deactivate:
raise OnyxError(OnyxErrorCode.USER_NOT_FOUND, "User not found")
raise HTTPException(status_code=404, detail="User not found")
if user_to_deactivate.is_active is False:
logger.warning("{} is already deactivated".format(user_to_deactivate.email))
@@ -537,15 +534,14 @@ async def delete_user(
email=user_email.user_email, db_session=db_session
)
if not user_to_delete:
raise OnyxError(OnyxErrorCode.USER_NOT_FOUND, "User not found")
raise HTTPException(status_code=404, detail="User not found")
if user_to_delete.is_active is True:
logger.warning(
"{} must be deactivated before deleting".format(user_to_delete.email)
)
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
"User must be deactivated before deleting",
raise HTTPException(
status_code=400, detail="User must be deactivated before deleting"
)
# Detach the user from the current session
@@ -569,7 +565,7 @@ async def delete_user(
except Exception as e:
db_session.rollback()
logger.error(f"Error deleting user {user_to_delete.email}: {str(e)}")
raise OnyxError(OnyxErrorCode.INTERNAL_ERROR, "Error deleting user")
raise HTTPException(status_code=500, detail="Error deleting user")
@router.patch("/manage/admin/activate-user", tags=PUBLIC_API_TAGS)
@@ -582,7 +578,7 @@ def activate_user_api(
email=user_email.user_email, db_session=db_session
)
if not user_to_activate:
raise OnyxError(OnyxErrorCode.USER_NOT_FOUND, "User not found")
raise HTTPException(status_code=404, detail="User not found")
if user_to_activate.is_active is True:
logger.warning("{} is already activated".format(user_to_activate.email))

View File

@@ -1,8 +1,7 @@
import requests
from fastapi import HTTPException
from onyx.configs.constants import SLACK_USER_TOKEN_PREFIX
from onyx.error_handling.error_codes import OnyxErrorCode
from onyx.error_handling.exceptions import OnyxError
SLACK_API_URL = "https://slack.com/api/auth.test"
SLACK_CONNECTIONS_OPEN_URL = "https://slack.com/api/apps.connections.open"
@@ -13,15 +12,15 @@ def validate_bot_token(bot_token: str) -> bool:
response = requests.post(SLACK_API_URL, headers=headers)
if response.status_code != 200:
raise OnyxError(
OnyxErrorCode.INTERNAL_ERROR, "Error communicating with Slack API."
raise HTTPException(
status_code=500, detail="Error communicating with Slack API."
)
data = response.json()
if not data.get("ok", False):
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
f"Invalid bot token: {data.get('error', 'Unknown error')}",
raise HTTPException(
status_code=400,
detail=f"Invalid bot token: {data.get('error', 'Unknown error')}",
)
return True
@@ -32,15 +31,15 @@ def validate_app_token(app_token: str) -> bool:
response = requests.post(SLACK_CONNECTIONS_OPEN_URL, headers=headers)
if response.status_code != 200:
raise OnyxError(
OnyxErrorCode.INTERNAL_ERROR, "Error communicating with Slack API."
raise HTTPException(
status_code=500, detail="Error communicating with Slack API."
)
data = response.json()
if not data.get("ok", False):
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
f"Invalid app token: {data.get('error', 'Unknown error')}",
raise HTTPException(
status_code=400,
detail=f"Invalid app token: {data.get('error', 'Unknown error')}",
)
return True
@@ -55,16 +54,16 @@ def validate_user_token(user_token: str | None) -> None:
Returns:
None is valid and will return successfully.
Raises:
OnyxError: If the token is invalid or missing required fields
HTTPException: If the token is invalid or missing required fields
"""
if not user_token:
# user_token is optional, so None or empty string is valid
return
if not user_token.startswith(SLACK_USER_TOKEN_PREFIX):
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
f"Invalid user token format. User OAuth tokens must start with '{SLACK_USER_TOKEN_PREFIX}'",
raise HTTPException(
status_code=400,
detail=f"Invalid user token format. User OAuth tokens must start with '{SLACK_USER_TOKEN_PREFIX}'",
)
# Test the token with Slack API to ensure it's valid
@@ -72,13 +71,13 @@ def validate_user_token(user_token: str | None) -> None:
response = requests.post(SLACK_API_URL, headers=headers)
if response.status_code != 200:
raise OnyxError(
OnyxErrorCode.INTERNAL_ERROR, "Error communicating with Slack API."
raise HTTPException(
status_code=500, detail="Error communicating with Slack API."
)
data = response.json()
if not data.get("ok", False):
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
f"Invalid user token: {data.get('error', 'Unknown error')}",
raise HTTPException(
status_code=400,
detail=f"Invalid user token: {data.get('error', 'Unknown error')}",
)

View File

@@ -2,6 +2,7 @@ from __future__ import annotations
from fastapi import APIRouter
from fastapi import Depends
from fastapi import HTTPException
from fastapi import Response
from sqlalchemy.dialects.postgresql import insert
from sqlalchemy.orm import Session
@@ -25,8 +26,6 @@ from onyx.db.web_search import set_active_web_content_provider
from onyx.db.web_search import set_active_web_search_provider
from onyx.db.web_search import upsert_web_content_provider
from onyx.db.web_search import upsert_web_search_provider
from onyx.error_handling.error_codes import OnyxErrorCode
from onyx.error_handling.exceptions import OnyxError
from onyx.server.manage.web_search.models import WebContentProviderTestRequest
from onyx.server.manage.web_search.models import WebContentProviderUpsertRequest
from onyx.server.manage.web_search.models import WebContentProviderView
@@ -87,9 +86,9 @@ def upsert_search_provider_endpoint(
and request.id is not None
and existing_by_name.id != request.id
):
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
f"A search provider named '{request.name}' already exists.",
raise HTTPException(
status_code=400,
detail=f"A search provider named '{request.name}' already exists.",
)
provider = upsert_web_search_provider(
@@ -194,16 +193,16 @@ def test_search_provider(
request.provider_type, db_session
)
if existing_provider is None or not existing_provider.api_key:
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
"No stored API key found for this provider type.",
raise HTTPException(
status_code=400,
detail="No stored API key found for this provider type.",
)
api_key = existing_provider.api_key.get_value(apply_mask=False)
if requires_key and not api_key:
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
"API key is required. Either provide api_key or set use_stored_key to true.",
raise HTTPException(
status_code=400,
detail="API key is required. Either provide api_key or set use_stored_key to true.",
)
try:
@@ -213,21 +212,20 @@ def test_search_provider(
config=request.config or {},
)
except ValueError as exc:
raise OnyxError(OnyxErrorCode.VALIDATION_ERROR, str(exc)) from exc
raise HTTPException(status_code=400, detail=str(exc)) from exc
if provider is None:
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
"Unable to build provider configuration.",
raise HTTPException(
status_code=400, detail="Unable to build provider configuration."
)
# Run the API client's test_connection method to ensure the connection is valid.
try:
return provider.test_connection()
except OnyxError:
except HTTPException:
raise
except Exception as e:
raise OnyxError(OnyxErrorCode.VALIDATION_ERROR, str(e)) from e
raise HTTPException(status_code=400, detail=str(e)) from e
@admin_router.get("/content-providers", response_model=list[WebContentProviderView])
@@ -261,9 +259,9 @@ def upsert_content_provider_endpoint(
and request.id is not None
and existing_by_name.id != request.id
):
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
f"A content provider named '{request.name}' already exists.",
raise HTTPException(
status_code=400,
detail=f"A content provider named '{request.name}' already exists.",
)
provider = upsert_web_content_provider(
@@ -381,9 +379,9 @@ def test_content_provider(
request.provider_type, db_session
)
if existing_provider is None or not existing_provider.api_key:
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
"No stored API key found for this provider type.",
raise HTTPException(
status_code=400,
detail="No stored API key found for this provider type.",
)
if MULTI_TENANT:
stored_base_url = (
@@ -391,17 +389,17 @@ def test_content_provider(
)
request_base_url = request.config.base_url
if request_base_url != stored_base_url:
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
"Base URL cannot differ from stored provider when using stored API key",
raise HTTPException(
status_code=400,
detail="Base URL cannot differ from stored provider when using stored API key",
)
api_key = existing_provider.api_key.get_value(apply_mask=False)
if not api_key:
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
"API key is required. Either provide api_key or set use_stored_key to true.",
raise HTTPException(
status_code=400,
detail="API key is required. Either provide api_key or set use_stored_key to true.",
)
try:
@@ -411,12 +409,11 @@ def test_content_provider(
config=request.config,
)
except ValueError as exc:
raise OnyxError(OnyxErrorCode.VALIDATION_ERROR, str(exc)) from exc
raise HTTPException(status_code=400, detail=str(exc)) from exc
if provider is None:
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
"Unable to build provider configuration.",
raise HTTPException(
status_code=400, detail="Unable to build provider configuration."
)
# Actually test the API key by making a real content fetch call
@@ -428,11 +425,11 @@ def test_content_provider(
if not test_results or not any(
result.scrape_successful for result in test_results
):
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
"API key validation failed: content fetch returned no results.",
raise HTTPException(
status_code=400,
detail="API key validation failed: content fetch returned no results.",
)
except OnyxError:
except HTTPException:
raise
except Exception as e:
error_msg = str(e)
@@ -441,13 +438,13 @@ def test_content_provider(
or "key" in error_msg.lower()
or "auth" in error_msg.lower()
):
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
f"Invalid API key: {error_msg}",
raise HTTPException(
status_code=400,
detail=f"Invalid API key: {error_msg}",
) from e
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
f"API key validation failed: {error_msg}",
raise HTTPException(
status_code=400,
detail=f"API key validation failed: {error_msg}",
) from e
logger.info(

View File

@@ -65,7 +65,7 @@ attrs==25.4.0
# jsonschema
# referencing
# zeep
authlib==1.6.6
authlib==1.6.7
# via fastmcp
babel==2.17.0
# via courlan
@@ -109,9 +109,7 @@ brotli==1.2.0
bytecode==0.17.0
# via ddtrace
cachetools==6.2.2
# via
# google-auth
# py-key-value-aio
# via py-key-value-aio
caio==0.9.25
# via aiofile
celery==5.5.1
@@ -190,6 +188,7 @@ courlan==1.3.2
cryptography==46.0.5
# via
# authlib
# google-auth
# msal
# msoffcrypto-tool
# pdfminer-six
@@ -306,7 +305,7 @@ google-api-core==2.28.1
# google-cloud-storage
google-api-python-client==2.86.0
# via onyx
google-auth==2.43.0
google-auth==2.48.0
# via
# google-api-core
# google-api-python-client
@@ -325,7 +324,7 @@ google-auth-httplib2==0.1.0
# onyx
google-auth-oauthlib==1.0.0
# via onyx
google-cloud-aiplatform==1.121.0
google-cloud-aiplatform==1.133.0
# via onyx
google-cloud-bigquery==3.38.0
# via google-cloud-aiplatform
@@ -1002,9 +1001,7 @@ sendgrid==6.12.5
sentry-sdk==2.14.0
# via onyx
shapely==2.0.6
# via
# google-cloud-aiplatform
# onyx
# via onyx
shellingham==1.5.4
# via typer
simple-salesforce==1.12.6

View File

@@ -59,8 +59,6 @@ botocore==1.39.11
# s3transfer
brotli==1.2.0
# via onyx
cachetools==6.2.2
# via google-auth
celery-types==0.19.0
# via onyx
certifi==2025.11.12
@@ -100,7 +98,9 @@ comm==0.2.3
contourpy==1.3.3
# via matplotlib
cryptography==46.0.5
# via pyjwt
# via
# google-auth
# pyjwt
cycler==0.12.1
# via matplotlib
debugpy==1.8.17
@@ -152,7 +152,7 @@ google-api-core==2.28.1
# google-cloud-core
# google-cloud-resource-manager
# google-cloud-storage
google-auth==2.43.0
google-auth==2.48.0
# via
# google-api-core
# google-cloud-aiplatform
@@ -162,7 +162,7 @@ google-auth==2.43.0
# google-cloud-storage
# google-genai
# kubernetes
google-cloud-aiplatform==1.121.0
google-cloud-aiplatform==1.133.0
# via onyx
google-cloud-bigquery==3.38.0
# via google-cloud-aiplatform
@@ -311,13 +311,12 @@ numpy==2.4.1
# contourpy
# matplotlib
# pandas-stubs
# shapely
# voyageai
oauthlib==3.2.2
# via
# kubernetes
# requests-oauthlib
onyx-devtools==0.6.2
onyx-devtools==0.6.3
# via onyx
openai==2.14.0
# via
@@ -510,8 +509,6 @@ s3transfer==0.13.1
# via boto3
sentry-sdk==2.14.0
# via onyx
shapely==2.0.6
# via google-cloud-aiplatform
six==1.17.0
# via
# kubernetes

View File

@@ -53,8 +53,6 @@ botocore==1.39.11
# s3transfer
brotli==1.2.0
# via onyx
cachetools==6.2.2
# via google-auth
certifi==2025.11.12
# via
# httpcore
@@ -79,7 +77,9 @@ colorama==0.4.6 ; sys_platform == 'win32'
# click
# tqdm
cryptography==46.0.5
# via pyjwt
# via
# google-auth
# pyjwt
decorator==5.2.1
# via retry
discord-py==2.4.0
@@ -111,7 +111,7 @@ google-api-core==2.28.1
# google-cloud-core
# google-cloud-resource-manager
# google-cloud-storage
google-auth==2.43.0
google-auth==2.48.0
# via
# google-api-core
# google-cloud-aiplatform
@@ -121,7 +121,7 @@ google-auth==2.43.0
# google-cloud-storage
# google-genai
# kubernetes
google-cloud-aiplatform==1.121.0
google-cloud-aiplatform==1.133.0
# via onyx
google-cloud-bigquery==3.38.0
# via google-cloud-aiplatform
@@ -221,9 +221,7 @@ multidict==6.7.0
# aiohttp
# yarl
numpy==2.4.1
# via
# shapely
# voyageai
# via voyageai
oauthlib==3.2.2
# via
# kubernetes
@@ -345,8 +343,6 @@ s3transfer==0.13.1
# via boto3
sentry-sdk==2.14.0
# via onyx
shapely==2.0.6
# via google-cloud-aiplatform
six==1.17.0
# via
# kubernetes

View File

@@ -57,8 +57,6 @@ botocore==1.39.11
# s3transfer
brotli==1.2.0
# via onyx
cachetools==6.2.2
# via google-auth
celery==5.5.1
# via sentry-sdk
certifi==2025.11.12
@@ -95,7 +93,9 @@ colorama==0.4.6 ; sys_platform == 'win32'
# click
# tqdm
cryptography==46.0.5
# via pyjwt
# via
# google-auth
# pyjwt
decorator==5.2.1
# via retry
discord-py==2.4.0
@@ -136,7 +136,7 @@ google-api-core==2.28.1
# google-cloud-core
# google-cloud-resource-manager
# google-cloud-storage
google-auth==2.43.0
google-auth==2.48.0
# via
# google-api-core
# google-cloud-aiplatform
@@ -146,7 +146,7 @@ google-auth==2.43.0
# google-cloud-storage
# google-genai
# kubernetes
google-cloud-aiplatform==1.121.0
google-cloud-aiplatform==1.133.0
# via onyx
google-cloud-bigquery==3.38.0
# via google-cloud-aiplatform
@@ -263,7 +263,6 @@ numpy==2.4.1
# onyx
# scikit-learn
# scipy
# shapely
# transformers
# voyageai
nvidia-cublas-cu12==12.8.4.1 ; platform_machine == 'x86_64' and sys_platform == 'linux'
@@ -452,8 +451,6 @@ sentry-sdk==2.14.0
# via onyx
setuptools==80.9.0 ; python_full_version >= '3.12'
# via torch
shapely==2.0.6
# via google-cloud-aiplatform
six==1.17.0
# via
# kubernetes

View File

@@ -0,0 +1,238 @@
"""
Tests that the default model cannot be removed or hidden via provider upsert.
When a model is set as the default (for any flow type), attempts to remove it
from the provider's model list or set its visibility to False should raise a
ValueError (which the API layer converts to OnyxError VALIDATION_ERROR).
"""
from collections.abc import Generator
from uuid import uuid4
import pytest
from sqlalchemy.orm import Session
from onyx.db.llm import fetch_existing_llm_provider
from onyx.db.llm import remove_llm_provider
from onyx.db.llm import update_default_provider
from onyx.db.llm import update_default_vision_provider
from onyx.db.llm import upsert_llm_provider
from onyx.llm.constants import LlmProviderNames
from onyx.server.manage.llm.models import LLMProviderUpsertRequest
from onyx.server.manage.llm.models import LLMProviderView
from onyx.server.manage.llm.models import ModelConfigurationUpsertRequest
def _create_test_provider(
db_session: Session,
name: str,
models: list[ModelConfigurationUpsertRequest] | None = None,
) -> LLMProviderView:
"""Helper to create a test LLM provider with multiple models."""
if models is None:
models = [
ModelConfigurationUpsertRequest(
name="gpt-4o", is_visible=True, supports_image_input=True
),
ModelConfigurationUpsertRequest(
name="gpt-4o-mini", is_visible=True, supports_image_input=False
),
]
return upsert_llm_provider(
LLMProviderUpsertRequest(
name=name,
provider=LlmProviderNames.OPENAI,
api_key="sk-test-key-00000000000000000000000000000000000",
api_key_changed=True,
model_configurations=models,
),
db_session=db_session,
)
def _cleanup_provider(db_session: Session, name: str) -> None:
"""Helper to clean up a test provider by name."""
provider = fetch_existing_llm_provider(name=name, db_session=db_session)
if provider:
remove_llm_provider(db_session, provider.id)
@pytest.fixture
def provider_name() -> Generator[str, None, None]:
"""Generate a unique provider name for each test."""
yield f"test-provider-{uuid4().hex[:8]}"
class TestDefaultModelProtection:
"""Tests that the default model cannot be removed or hidden."""
def test_cannot_remove_default_text_model(
self,
db_session: Session,
provider_name: str,
) -> None:
"""Removing the default text model from a provider should raise ValueError."""
try:
provider = _create_test_provider(db_session, provider_name)
update_default_provider(provider.id, "gpt-4o", db_session)
# Try to update the provider without the default model
with pytest.raises(ValueError, match="Cannot remove the default model"):
upsert_llm_provider(
LLMProviderUpsertRequest(
id=provider.id,
name=provider_name,
provider=LlmProviderNames.OPENAI,
api_key="sk-test-key-00000000000000000000000000000000000",
api_key_changed=True,
model_configurations=[
ModelConfigurationUpsertRequest(
name="gpt-4o-mini", is_visible=True
),
],
),
db_session=db_session,
)
finally:
db_session.rollback()
_cleanup_provider(db_session, provider_name)
def test_cannot_hide_default_text_model(
self,
db_session: Session,
provider_name: str,
) -> None:
"""Setting is_visible=False on the default text model should raise ValueError."""
try:
provider = _create_test_provider(db_session, provider_name)
update_default_provider(provider.id, "gpt-4o", db_session)
# Try to hide the default model
with pytest.raises(ValueError, match="Cannot hide the default model"):
upsert_llm_provider(
LLMProviderUpsertRequest(
id=provider.id,
name=provider_name,
provider=LlmProviderNames.OPENAI,
api_key="sk-test-key-00000000000000000000000000000000000",
api_key_changed=True,
model_configurations=[
ModelConfigurationUpsertRequest(
name="gpt-4o", is_visible=False
),
ModelConfigurationUpsertRequest(
name="gpt-4o-mini", is_visible=True
),
],
),
db_session=db_session,
)
finally:
db_session.rollback()
_cleanup_provider(db_session, provider_name)
def test_cannot_remove_default_vision_model(
self,
db_session: Session,
provider_name: str,
) -> None:
"""Removing the default vision model from a provider should raise ValueError."""
try:
provider = _create_test_provider(db_session, provider_name)
# Set gpt-4o as both the text and vision default
update_default_provider(provider.id, "gpt-4o", db_session)
update_default_vision_provider(provider.id, "gpt-4o", db_session)
# Try to remove the default vision model
with pytest.raises(ValueError, match="Cannot remove the default model"):
upsert_llm_provider(
LLMProviderUpsertRequest(
id=provider.id,
name=provider_name,
provider=LlmProviderNames.OPENAI,
api_key="sk-test-key-00000000000000000000000000000000000",
api_key_changed=True,
model_configurations=[
ModelConfigurationUpsertRequest(
name="gpt-4o-mini", is_visible=True
),
],
),
db_session=db_session,
)
finally:
db_session.rollback()
_cleanup_provider(db_session, provider_name)
def test_can_remove_non_default_model(
self,
db_session: Session,
provider_name: str,
) -> None:
"""Removing a non-default model should succeed."""
try:
provider = _create_test_provider(db_session, provider_name)
update_default_provider(provider.id, "gpt-4o", db_session)
# Remove gpt-4o-mini (not default) — should succeed
updated = upsert_llm_provider(
LLMProviderUpsertRequest(
id=provider.id,
name=provider_name,
provider=LlmProviderNames.OPENAI,
api_key="sk-test-key-00000000000000000000000000000000000",
api_key_changed=True,
model_configurations=[
ModelConfigurationUpsertRequest(
name="gpt-4o", is_visible=True, supports_image_input=True
),
],
),
db_session=db_session,
)
model_names = {mc.name for mc in updated.model_configurations}
assert "gpt-4o" in model_names
assert "gpt-4o-mini" not in model_names
finally:
db_session.rollback()
_cleanup_provider(db_session, provider_name)
def test_can_hide_non_default_model(
self,
db_session: Session,
provider_name: str,
) -> None:
"""Hiding a non-default model should succeed."""
try:
provider = _create_test_provider(db_session, provider_name)
update_default_provider(provider.id, "gpt-4o", db_session)
# Hide gpt-4o-mini (not default) — should succeed
updated = upsert_llm_provider(
LLMProviderUpsertRequest(
id=provider.id,
name=provider_name,
provider=LlmProviderNames.OPENAI,
api_key="sk-test-key-00000000000000000000000000000000000",
api_key_changed=True,
model_configurations=[
ModelConfigurationUpsertRequest(
name="gpt-4o", is_visible=True, supports_image_input=True
),
ModelConfigurationUpsertRequest(
name="gpt-4o-mini", is_visible=False
),
],
),
db_session=db_session,
)
model_visibility = {
mc.name: mc.is_visible for mc in updated.model_configurations
}
assert model_visibility["gpt-4o"] is True
assert model_visibility["gpt-4o-mini"] is False
finally:
db_session.rollback()
_cleanup_provider(db_session, provider_name)

View File

@@ -2,7 +2,6 @@
import pytest
from onyx.chat.llm_loop import _should_keep_bedrock_tool_definitions
from onyx.chat.llm_loop import _try_fallback_tool_extraction
from onyx.chat.llm_loop import construct_message_history
from onyx.chat.models import ChatLoadedFile
@@ -14,22 +13,11 @@ from onyx.chat.models import LlmStepResult
from onyx.chat.models import ToolCallSimple
from onyx.configs.constants import MessageType
from onyx.file_store.models import ChatFileType
from onyx.llm.constants import LlmProviderNames
from onyx.llm.interfaces import ToolChoiceOptions
from onyx.server.query_and_chat.placement import Placement
from onyx.tools.models import ToolCallKickoff
class _StubConfig:
def __init__(self, model_provider: str) -> None:
self.model_provider = model_provider
class _StubLLM:
def __init__(self, model_provider: str) -> None:
self.config = _StubConfig(model_provider=model_provider)
def create_message(
content: str, message_type: MessageType, token_count: int | None = None
) -> ChatMessageSimple:
@@ -946,37 +934,6 @@ class TestForgottenFileMetadata:
assert "moby_dick.txt" in forgotten.message
class TestBedrockToolConfigGuard:
def test_bedrock_with_tool_history_keeps_tool_definitions(self) -> None:
llm = _StubLLM(LlmProviderNames.BEDROCK)
history = [
create_message("Question", MessageType.USER, 5),
create_assistant_with_tool_call("tc_1", "search", 5),
create_tool_response("tc_1", "Tool output", 5),
]
assert _should_keep_bedrock_tool_definitions(llm, history) is True
def test_bedrock_without_tool_history_does_not_keep_tool_definitions(self) -> None:
llm = _StubLLM(LlmProviderNames.BEDROCK)
history = [
create_message("Question", MessageType.USER, 5),
create_message("Answer", MessageType.ASSISTANT, 5),
]
assert _should_keep_bedrock_tool_definitions(llm, history) is False
def test_non_bedrock_with_tool_history_does_not_keep_tool_definitions(self) -> None:
llm = _StubLLM(LlmProviderNames.OPENAI)
history = [
create_message("Question", MessageType.USER, 5),
create_assistant_with_tool_call("tc_1", "search", 5),
create_tool_response("tc_1", "Tool output", 5),
]
assert _should_keep_bedrock_tool_definitions(llm, history) is False
class TestFallbackToolExtraction:
def _tool_defs(self) -> list[dict]:
return [

View File

@@ -1214,3 +1214,218 @@ def test_multithreaded_invoke_without_custom_config_skips_env_lock() -> None:
# The env lock context manager should never have been called
mock_env_lock.assert_not_called()
# ---- Tests for Bedrock tool content stripping ----
def test_messages_contain_tool_content_with_tool_role() -> None:
from onyx.llm.multi_llm import _messages_contain_tool_content
messages: list[dict[str, Any]] = [
{"role": "user", "content": "Hello"},
{"role": "assistant", "content": "I'll search for that."},
{"role": "tool", "content": "search results", "tool_call_id": "tc_1"},
]
assert _messages_contain_tool_content(messages) is True
def test_messages_contain_tool_content_with_tool_calls() -> None:
from onyx.llm.multi_llm import _messages_contain_tool_content
messages: list[dict[str, Any]] = [
{"role": "user", "content": "Hello"},
{
"role": "assistant",
"content": None,
"tool_calls": [
{
"id": "tc_1",
"type": "function",
"function": {"name": "search", "arguments": "{}"},
}
],
},
]
assert _messages_contain_tool_content(messages) is True
def test_messages_contain_tool_content_without_tools() -> None:
from onyx.llm.multi_llm import _messages_contain_tool_content
messages: list[dict[str, Any]] = [
{"role": "user", "content": "Hello"},
{"role": "assistant", "content": "Hi there!"},
]
assert _messages_contain_tool_content(messages) is False
def test_strip_tool_content_converts_assistant_tool_calls_to_text() -> None:
from onyx.llm.multi_llm import _strip_tool_content_from_messages
messages: list[dict[str, Any]] = [
{"role": "user", "content": "Search for cats"},
{
"role": "assistant",
"content": "Let me search.",
"tool_calls": [
{
"id": "tc_1",
"type": "function",
"function": {
"name": "search",
"arguments": '{"query": "cats"}',
},
}
],
},
{
"role": "tool",
"content": "Found 3 results about cats.",
"tool_call_id": "tc_1",
},
{"role": "assistant", "content": "Here are the results."},
]
result = _strip_tool_content_from_messages(messages)
assert len(result) == 4
# First message unchanged
assert result[0] == {"role": "user", "content": "Search for cats"}
# Assistant with tool calls → plain text
assert result[1]["role"] == "assistant"
assert "tool_calls" not in result[1]
assert "Let me search." in result[1]["content"]
assert "[Tool Call]" in result[1]["content"]
assert "search" in result[1]["content"]
assert "tc_1" in result[1]["content"]
# Tool response → user message
assert result[2]["role"] == "user"
assert "[Tool Result]" in result[2]["content"]
assert "tc_1" in result[2]["content"]
assert "Found 3 results about cats." in result[2]["content"]
# Final assistant message unchanged
assert result[3] == {"role": "assistant", "content": "Here are the results."}
def test_strip_tool_content_handles_assistant_with_no_text_content() -> None:
from onyx.llm.multi_llm import _strip_tool_content_from_messages
messages: list[dict[str, Any]] = [
{
"role": "assistant",
"content": None,
"tool_calls": [
{
"id": "tc_1",
"type": "function",
"function": {"name": "search", "arguments": "{}"},
}
],
},
]
result = _strip_tool_content_from_messages(messages)
assert result[0]["role"] == "assistant"
assert "[Tool Call]" in result[0]["content"]
assert "tool_calls" not in result[0]
def test_strip_tool_content_passes_through_non_tool_messages() -> None:
from onyx.llm.multi_llm import _strip_tool_content_from_messages
messages: list[dict[str, Any]] = [
{"role": "system", "content": "You are helpful."},
{"role": "user", "content": "Hello"},
{"role": "assistant", "content": "Hi!"},
]
result = _strip_tool_content_from_messages(messages)
assert result == messages
def test_strip_tool_content_handles_list_content_blocks() -> None:
from onyx.llm.multi_llm import _strip_tool_content_from_messages
messages: list[dict[str, Any]] = [
{
"role": "assistant",
"content": [{"type": "text", "text": "Searching now."}],
"tool_calls": [
{
"id": "tc_1",
"type": "function",
"function": {"name": "search", "arguments": "{}"},
}
],
},
{
"role": "tool",
"content": [
{"type": "text", "text": "result A"},
{"type": "text", "text": "result B"},
],
"tool_call_id": "tc_1",
},
]
result = _strip_tool_content_from_messages(messages)
# Assistant: list content flattened + tool call appended
assert result[0]["role"] == "assistant"
assert "Searching now." in result[0]["content"]
assert "[Tool Call]" in result[0]["content"]
assert isinstance(result[0]["content"], str)
# Tool: list content flattened into user message
assert result[1]["role"] == "user"
assert "result A" in result[1]["content"]
assert "result B" in result[1]["content"]
assert isinstance(result[1]["content"], str)
def test_strip_tool_content_merges_consecutive_tool_results() -> None:
"""Bedrock requires strict user/assistant alternation. Multiple parallel
tool results must be merged into a single user message."""
from onyx.llm.multi_llm import _strip_tool_content_from_messages
messages: list[dict[str, Any]] = [
{"role": "user", "content": "weather and news?"},
{
"role": "assistant",
"content": None,
"tool_calls": [
{
"id": "tc_1",
"type": "function",
"function": {"name": "search_weather", "arguments": "{}"},
},
{
"id": "tc_2",
"type": "function",
"function": {"name": "search_news", "arguments": "{}"},
},
],
},
{"role": "tool", "content": "sunny 72F", "tool_call_id": "tc_1"},
{"role": "tool", "content": "headline news", "tool_call_id": "tc_2"},
{"role": "assistant", "content": "Here are the results."},
]
result = _strip_tool_content_from_messages(messages)
# user, assistant (flattened), user (merged tool results), assistant
assert len(result) == 4
roles = [m["role"] for m in result]
assert roles == ["user", "assistant", "user", "assistant"]
# Both tool results merged into one user message
merged = result[2]["content"]
assert "tc_1" in merged
assert "sunny 72F" in merged
assert "tc_2" in merged
assert "headline news" in merged

View File

@@ -11,7 +11,7 @@ dependencies = [
"aioboto3==15.1.0",
"cohere==5.6.1",
"fastapi==0.133.1",
"google-cloud-aiplatform==1.121.0",
"google-cloud-aiplatform==1.133.0",
"google-genai==1.52.0",
"litellm==1.81.6",
"openai==2.14.0",
@@ -144,7 +144,7 @@ dev = [
"matplotlib==3.10.8",
"mypy-extensions==1.0.0",
"mypy==1.13.0",
"onyx-devtools==0.6.2",
"onyx-devtools==0.6.3",
"openapi-generator-cli==7.17.0",
"pandas-stubs~=2.3.3",
"pre-commit==3.2.2",

View File

@@ -6,6 +6,7 @@ import (
"os"
"os/exec"
"regexp"
"strconv"
"strings"
log "github.com/sirupsen/logrus"
@@ -33,11 +34,15 @@ func NewCherryPickCommand() *cobra.Command {
opts := &CherryPickOptions{}
cmd := &cobra.Command{
Use: "cherry-pick <commit-sha> [<commit-sha>...]",
Use: "cherry-pick <commit-or-pr> [<commit-or-pr>...]",
Aliases: []string{"cp"},
Short: "Cherry-pick one or more commits to a release branch",
Short: "Cherry-pick one or more commits (or PRs) to a release branch",
Long: `Cherry-pick one or more commits to a release branch and create a PR.
Arguments can be commit SHAs or GitHub PR numbers. A purely numeric argument
with fewer than 6 digits is treated as a PR number and resolved to its merge
commit automatically.
This command will:
1. Find the nearest stable version tag
2. Fetch the corresponding release branch(es)
@@ -54,7 +59,8 @@ If a cherry-pick hits a merge conflict, resolve it manually, then run:
Example usage:
$ ods cherry-pick foo123 bar456 --release 2.5 --release 2.6
$ ods cp foo123 --release 2.5`,
$ ods cp foo123 --release 2.5
$ ods cp 1234 --release 2.5 # cherry-pick merge commit of PR #1234`,
Args: func(cmd *cobra.Command, args []string) error {
cont, _ := cmd.Flags().GetBool("continue")
if cont {
@@ -90,11 +96,12 @@ Example usage:
func runCherryPick(cmd *cobra.Command, args []string, opts *CherryPickOptions) {
git.CheckGitHubCLI()
commitSHAs := args
// Resolve any PR numbers (e.g. "1234") to their merge commit SHAs
commitSHAs, labels := resolveArgs(args)
if len(commitSHAs) == 1 {
log.Debugf("Cherry-picking commit: %s", commitSHAs[0])
log.Debugf("Cherry-picking %s (%s)", labels[0], commitSHAs[0])
} else {
log.Debugf("Cherry-picking %d commits: %s", len(commitSHAs), strings.Join(commitSHAs, ", "))
log.Debugf("Cherry-picking %d commits: %s", len(commitSHAs), strings.Join(labels, ", "))
}
if opts.DryRun {
@@ -294,6 +301,11 @@ func runCherryPickContinue() {
log.Infof("Resuming cherry-pick (original branch: %s, releases: %v)", state.OriginalBranch, state.Releases)
// If a rebase is in progress (REBASE_HEAD exists), it must be resolved first
if git.IsRebaseInProgress() {
log.Fatal("A git rebase is in progress. Resolve it first:\n To continue: git rebase --continue\n To abort: git rebase --abort\nThen re-run: ods cherry-pick --continue")
}
// If git cherry-pick is still in progress (CHERRY_PICK_HEAD exists), continue it
if git.IsCherryPickInProgress() {
log.Info("Continuing in-progress cherry-pick...")
@@ -327,6 +339,23 @@ func cherryPickToRelease(commitSHAs, commitMessages []string, branchSuffix, vers
return "", fmt.Errorf("failed to checkout existing hotfix branch: %w", err)
}
// Only rebase when the branch has no unique commits (pure fast-forward).
// If unique commits exist (e.g. after --continue resolved a cherry-pick
// conflict), rebasing would re-apply them and risk the same conflicts.
remoteRef := fmt.Sprintf("origin/%s", releaseBranch)
uniqueCount, err := git.CountUniqueCommits(hotfixBranch, remoteRef)
if err != nil {
log.Warnf("Could not determine unique commits, skipping rebase: %v", err)
} else if uniqueCount == 0 {
log.Infof("Rebasing %s onto %s", hotfixBranch, releaseBranch)
if err := git.RunCommand("rebase", "--quiet", remoteRef); err != nil {
_ = git.RunCommand("rebase", "--abort")
return "", fmt.Errorf("failed to rebase hotfix branch onto %s (rebase aborted, re-run to retry): %w", releaseBranch, err)
}
} else {
log.Infof("Branch %s has %d unique commit(s), skipping rebase", hotfixBranch, uniqueCount)
}
// Check which commits need to be cherry-picked
commitsToCherry := []string{}
for _, sha := range commitSHAs {
@@ -364,7 +393,6 @@ func cherryPickToRelease(commitSHAs, commitMessages []string, branchSuffix, vers
return "", nil
}
// Push the hotfix branch
log.Infof("Pushing hotfix branch: %s", hotfixBranch)
pushArgs := []string{"push", "-u", "origin", hotfixBranch}
if noVerify {
@@ -432,6 +460,40 @@ func performCherryPick(commitSHAs []string) error {
return nil
}
// isPRNumber returns true if the argument looks like a GitHub PR number
// (purely numeric with fewer than 6 digits).
func isPRNumber(arg string) bool {
if len(arg) == 0 || len(arg) >= 6 {
return false
}
n, err := strconv.Atoi(arg)
return err == nil && n > 0
}
// resolveArgs resolves arguments that may be PR numbers into commit SHAs.
// Returns the resolved commit SHAs and a display-friendly label for logging
// (e.g. "PR #1234" instead of raw SHA).
func resolveArgs(args []string) (commitSHAs []string, labels []string) {
commitSHAs = make([]string, len(args))
labels = make([]string, len(args))
for i, arg := range args {
if isPRNumber(arg) {
log.Infof("Resolving PR #%s to merge commit...", arg)
sha, err := git.ResolvePRToMergeCommit(arg)
if err != nil {
log.Fatalf("Failed to resolve PR #%s: %v", arg, err)
}
log.Infof("PR #%s → %s", arg, sha)
commitSHAs[i] = sha
labels[i] = fmt.Sprintf("PR #%s", arg)
} else {
commitSHAs[i] = arg
labels[i] = arg
}
}
return commitSHAs, labels
}
// normalizeVersion ensures the version has a 'v' prefix
func normalizeVersion(version string) string {
if !strings.HasPrefix(version, "v") {

144
tools/ods/cmd/desktop.go Normal file
View File

@@ -0,0 +1,144 @@
package cmd
import (
"encoding/json"
"errors"
"fmt"
"os"
"os/exec"
"path/filepath"
"sort"
"strings"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/onyx-dot-app/onyx/tools/ods/internal/paths"
)
type desktopPackageJSON struct {
Scripts map[string]string `json:"scripts"`
}
// NewDesktopCommand creates a command that runs npm scripts from the desktop directory.
func NewDesktopCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "desktop <script> [args...]",
Short: "Run desktop/package.json npm scripts",
Long: desktopHelpDescription(),
Args: cobra.MinimumNArgs(1),
ValidArgsFunction: func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
if len(args) > 0 {
return nil, cobra.ShellCompDirectiveNoFileComp
}
return desktopScriptNames(), cobra.ShellCompDirectiveNoFileComp
},
Run: func(cmd *cobra.Command, args []string) {
runDesktopScript(args)
},
}
cmd.Flags().SetInterspersed(false)
return cmd
}
func runDesktopScript(args []string) {
desktopDir, err := desktopDir()
if err != nil {
log.Fatalf("Failed to find desktop directory: %v", err)
}
scriptName := args[0]
scriptArgs := args[1:]
if len(scriptArgs) > 0 && scriptArgs[0] == "--" {
scriptArgs = scriptArgs[1:]
}
npmArgs := []string{"run", scriptName}
if len(scriptArgs) > 0 {
// npm requires "--" to forward flags to the underlying script.
npmArgs = append(npmArgs, "--")
npmArgs = append(npmArgs, scriptArgs...)
}
log.Debugf("Running in %s: npm %v", desktopDir, npmArgs)
desktopCmd := exec.Command("npm", npmArgs...)
desktopCmd.Dir = desktopDir
desktopCmd.Stdout = os.Stdout
desktopCmd.Stderr = os.Stderr
desktopCmd.Stdin = os.Stdin
if err := desktopCmd.Run(); err != nil {
// For wrapped commands, preserve the child process's exit code and
// avoid duplicating already-printed stderr output.
var exitErr *exec.ExitError
if errors.As(err, &exitErr) {
if code := exitErr.ExitCode(); code != -1 {
os.Exit(code)
}
}
log.Fatalf("Failed to run npm: %v", err)
}
}
func desktopScriptNames() []string {
scripts, err := loadDesktopScripts()
if err != nil {
return nil
}
names := make([]string, 0, len(scripts))
for name := range scripts {
names = append(names, name)
}
sort.Strings(names)
return names
}
func desktopHelpDescription() string {
description := `Run npm scripts from desktop/package.json.
Examples:
ods desktop dev
ods desktop build
ods desktop build:dmg`
scripts := desktopScriptNames()
if len(scripts) == 0 {
return description + "\n\nAvailable scripts: (unable to load)"
}
return description + "\n\nAvailable scripts:\n " + strings.Join(scripts, "\n ")
}
func loadDesktopScripts() (map[string]string, error) {
desktopDir, err := desktopDir()
if err != nil {
return nil, err
}
packageJSONPath := filepath.Join(desktopDir, "package.json")
data, err := os.ReadFile(packageJSONPath)
if err != nil {
return nil, fmt.Errorf("failed to read %s: %w", packageJSONPath, err)
}
var pkg desktopPackageJSON
if err := json.Unmarshal(data, &pkg); err != nil {
return nil, fmt.Errorf("failed to parse %s: %w", packageJSONPath, err)
}
if pkg.Scripts == nil {
return nil, nil
}
return pkg.Scripts, nil
}
func desktopDir() (string, error) {
root, err := paths.GitRoot()
if err != nil {
return "", err
}
return filepath.Join(root, "desktop"), nil
}

View File

@@ -50,6 +50,7 @@ func NewRootCommand() *cobra.Command {
cmd.AddCommand(NewPullCommand())
cmd.AddCommand(NewRunCICommand())
cmd.AddCommand(NewScreenshotDiffCommand())
cmd.AddCommand(NewDesktopCommand())
cmd.AddCommand(NewWebCommand())
cmd.AddCommand(NewWhoisCommand())

View File

@@ -1,14 +1,14 @@
module github.com/onyx-dot-app/onyx/tools/ods
go 1.24.11
go 1.26.0
require (
github.com/sirupsen/logrus v1.9.3
github.com/spf13/cobra v1.10.1
github.com/spf13/pflag v1.0.9
)
require (
github.com/inconshreveable/mousetrap v1.1.0 // indirect
github.com/spf13/pflag v1.0.9 // indirect
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8 // indirect
)

View File

@@ -6,6 +6,7 @@ import (
"os"
"os/exec"
"path/filepath"
"strconv"
"strings"
log "github.com/sirupsen/logrus"
@@ -173,6 +174,26 @@ func IsCherryPickInProgress() bool {
return cmd.Run() == nil
}
// CountUniqueCommits returns the number of commits on branch that are not on upstream.
func CountUniqueCommits(branch, upstream string) (int, error) {
cmd := exec.Command("git", "rev-list", "--count", fmt.Sprintf("%s..%s", upstream, branch))
output, err := cmd.Output()
if err != nil {
return 0, fmt.Errorf("git rev-list --count failed: %w", err)
}
count, err := strconv.Atoi(strings.TrimSpace(string(output)))
if err != nil {
return 0, fmt.Errorf("failed to parse commit count: %w", err)
}
return count, nil
}
// IsRebaseInProgress checks if a rebase is currently in progress
func IsRebaseInProgress() bool {
cmd := exec.Command("git", "rev-parse", "--verify", "--quiet", "REBASE_HEAD")
return cmd.Run() == nil
}
// HasStagedChanges checks if there are staged changes in the index
func HasStagedChanges() bool {
cmd := exec.Command("git", "diff", "--quiet", "--cached")
@@ -216,6 +237,23 @@ func IsCommitAppliedOnBranch(commitSHA, branchName string) bool {
return false
}
// ResolvePRToMergeCommit resolves a GitHub PR number to its merge commit SHA
func ResolvePRToMergeCommit(prNumber string) (string, error) {
cmd := exec.Command("gh", "pr", "view", prNumber, "--json", "mergeCommit", "--jq", ".mergeCommit.oid")
output, err := cmd.Output()
if err != nil {
if exitErr, ok := err.(*exec.ExitError); ok {
return "", fmt.Errorf("gh pr view failed: %w: %s", err, string(exitErr.Stderr))
}
return "", fmt.Errorf("gh pr view failed: %w", err)
}
sha := strings.TrimSpace(string(output))
if sha == "" || sha == "null" {
return "", fmt.Errorf("PR #%s has no merge commit (is it merged?)", prNumber)
}
return sha, nil
}
// RunCherryPickContinue runs git cherry-pick --continue --no-edit
func RunCherryPickContinue() error {
return RunCommandVerboseOnError("cherry-pick", "--continue", "--no-edit")

View File

@@ -1,5 +1,5 @@
[build-system]
requires = ["hatchling", "go-bin~=1.24.11", "manygo"]
requires = ["hatchling", "go-bin~=1.26.0", "manygo"]
build-backend = "hatchling.build"
[project]

49
uv.lock generated
View File

@@ -453,14 +453,14 @@ wheels = [
[[package]]
name = "authlib"
version = "1.6.6"
version = "1.6.7"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cryptography" },
]
sdist = { url = "https://files.pythonhosted.org/packages/bb/9b/b1661026ff24bc641b76b78c5222d614776b0c085bcfdac9bd15a1cb4b35/authlib-1.6.6.tar.gz", hash = "sha256:45770e8e056d0f283451d9996fbb59b70d45722b45d854d58f32878d0a40c38e", size = 164894, upload-time = "2025-12-12T08:01:41.464Z" }
sdist = { url = "https://files.pythonhosted.org/packages/49/dc/ed1681bf1339dd6ea1ce56136bad4baabc6f7ad466e375810702b0237047/authlib-1.6.7.tar.gz", hash = "sha256:dbf10100011d1e1b34048c9d120e83f13b35d69a826ae762b93d2fb5aafc337b", size = 164950, upload-time = "2026-02-06T14:04:14.171Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/54/51/321e821856452f7386c4e9df866f196720b1ad0c5ea1623ea7399969ae3b/authlib-1.6.6-py2.py3-none-any.whl", hash = "sha256:7d9e9bc535c13974313a87f53e8430eb6ea3d1cf6ae4f6efcd793f2e949143fd", size = 244005, upload-time = "2025-12-12T08:01:40.209Z" },
{ url = "https://files.pythonhosted.org/packages/f8/00/3ed12264094ec91f534fae429945efbaa9f8c666f3aa7061cc3b2a26a0cd/authlib-1.6.7-py2.py3-none-any.whl", hash = "sha256:c637340d9a02789d2efa1d003a7437d10d3e565237bcb5fcbc6c134c7b95bab0", size = 244115, upload-time = "2026-02-06T14:04:12.141Z" },
]
[[package]]
@@ -756,12 +756,20 @@ sdist = { url = "https://files.pythonhosted.org/packages/92/88/b8527e1b00c1811db
wheels = [
{ url = "https://files.pythonhosted.org/packages/ec/90/543f556fcfcfa270713eef906b6352ab048e1e557afec12925c991dc93c2/caio-0.9.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d6956d9e4a27021c8bd6c9677f3a59eb1d820cc32d0343cea7961a03b1371965", size = 36839, upload-time = "2025-12-26T15:21:40.267Z" },
{ url = "https://files.pythonhosted.org/packages/51/3b/36f3e8ec38dafe8de4831decd2e44c69303d2a3892d16ceda42afed44e1b/caio-0.9.25-cp311-cp311-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bf84bfa039f25ad91f4f52944452a5f6f405e8afab4d445450978cd6241d1478", size = 80255, upload-time = "2025-12-26T15:22:20.271Z" },
{ url = "https://files.pythonhosted.org/packages/df/ce/65e64867d928e6aff1b4f0e12dba0ef6d5bf412c240dc1df9d421ac10573/caio-0.9.25-cp311-cp311-manylinux_2_34_aarch64.whl", hash = "sha256:ae3d62587332bce600f861a8de6256b1014d6485cfd25d68c15caf1611dd1f7c", size = 80052, upload-time = "2026-03-04T22:08:20.402Z" },
{ url = "https://files.pythonhosted.org/packages/46/90/e278863c47e14ec58309aa2e38a45882fbe67b4cc29ec9bc8f65852d3e45/caio-0.9.25-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:fc220b8533dcf0f238a6b1a4a937f92024c71e7b10b5a2dfc1c73604a25709bc", size = 78273, upload-time = "2026-03-04T22:08:21.368Z" },
{ url = "https://files.pythonhosted.org/packages/d3/25/79c98ebe12df31548ba4eaf44db11b7cad6b3e7b4203718335620939083c/caio-0.9.25-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fb7ff95af4c31ad3f03179149aab61097a71fd85e05f89b4786de0359dffd044", size = 36983, upload-time = "2025-12-26T15:21:36.075Z" },
{ url = "https://files.pythonhosted.org/packages/a3/2b/21288691f16d479945968a0a4f2856818c1c5be56881d51d4dac9b255d26/caio-0.9.25-cp312-cp312-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:97084e4e30dfa598449d874c4d8e0c8d5ea17d2f752ef5e48e150ff9d240cd64", size = 82012, upload-time = "2025-12-26T15:22:20.983Z" },
{ url = "https://files.pythonhosted.org/packages/03/c4/8a1b580875303500a9c12b9e0af58cb82e47f5bcf888c2457742a138273c/caio-0.9.25-cp312-cp312-manylinux_2_34_aarch64.whl", hash = "sha256:4fa69eba47e0f041b9d4f336e2ad40740681c43e686b18b191b6c5f4c5544bfb", size = 81502, upload-time = "2026-03-04T22:08:22.381Z" },
{ url = "https://files.pythonhosted.org/packages/d1/1c/0fe770b8ffc8362c48134d1592d653a81a3d8748d764bec33864db36319d/caio-0.9.25-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:6bebf6f079f1341d19f7386db9b8b1f07e8cc15ae13bfdaff573371ba0575d69", size = 80200, upload-time = "2026-03-04T22:08:23.382Z" },
{ url = "https://files.pythonhosted.org/packages/31/57/5e6ff127e6f62c9f15d989560435c642144aa4210882f9494204bc892305/caio-0.9.25-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d6c2a3411af97762a2b03840c3cec2f7f728921ff8adda53d7ea2315a8563451", size = 36979, upload-time = "2025-12-26T15:21:35.484Z" },
{ url = "https://files.pythonhosted.org/packages/a3/9f/f21af50e72117eb528c422d4276cbac11fb941b1b812b182e0a9c70d19c5/caio-0.9.25-cp313-cp313-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0998210a4d5cd5cb565b32ccfe4e53d67303f868a76f212e002a8554692870e6", size = 81900, upload-time = "2025-12-26T15:22:21.919Z" },
{ url = "https://files.pythonhosted.org/packages/9c/12/c39ae2a4037cb10ad5eb3578eb4d5f8c1a2575c62bba675f3406b7ef0824/caio-0.9.25-cp313-cp313-manylinux_2_34_aarch64.whl", hash = "sha256:1a177d4777141b96f175fe2c37a3d96dec7911ed9ad5f02bac38aaa1c936611f", size = 81523, upload-time = "2026-03-04T22:08:25.187Z" },
{ url = "https://files.pythonhosted.org/packages/22/59/f8f2e950eb4f1a5a3883e198dca514b9d475415cb6cd7b78b9213a0dd45a/caio-0.9.25-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:9ed3cfb28c0e99fec5e208c934e5c157d0866aa9c32aa4dc5e9b6034af6286b7", size = 80243, upload-time = "2026-03-04T22:08:26.449Z" },
{ url = "https://files.pythonhosted.org/packages/69/ca/a08fdc7efdcc24e6a6131a93c85be1f204d41c58f474c42b0670af8c016b/caio-0.9.25-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:fab6078b9348e883c80a5e14b382e6ad6aabbc4429ca034e76e730cf464269db", size = 36978, upload-time = "2025-12-26T15:21:41.055Z" },
{ url = "https://files.pythonhosted.org/packages/5e/6c/d4d24f65e690213c097174d26eda6831f45f4734d9d036d81790a27e7b78/caio-0.9.25-cp314-cp314-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:44a6b58e52d488c75cfaa5ecaa404b2b41cc965e6c417e03251e868ecd5b6d77", size = 81832, upload-time = "2025-12-26T15:22:22.757Z" },
{ url = "https://files.pythonhosted.org/packages/87/a4/e534cf7d2d0e8d880e25dd61e8d921ffcfe15bd696734589826f5a2df727/caio-0.9.25-cp314-cp314-manylinux_2_34_aarch64.whl", hash = "sha256:628a630eb7fb22381dd8e3c8ab7f59e854b9c806639811fc3f4310c6bd711d79", size = 81565, upload-time = "2026-03-04T22:08:27.483Z" },
{ url = "https://files.pythonhosted.org/packages/3f/ed/bf81aeac1d290017e5e5ac3e880fd56ee15e50a6d0353986799d1bc5cfd5/caio-0.9.25-cp314-cp314-manylinux_2_34_x86_64.whl", hash = "sha256:0ba16aa605ccb174665357fc729cf500679c2d94d5f1458a6f0d5ca48f2060a7", size = 80071, upload-time = "2026-03-04T22:08:28.751Z" },
{ url = "https://files.pythonhosted.org/packages/86/93/1f76c8d1bafe3b0614e06b2195784a3765bbf7b0a067661af9e2dd47fc33/caio-0.9.25-py3-none-any.whl", hash = "sha256:06c0bb02d6b929119b1cfbe1ca403c768b2013a369e2db46bfa2a5761cf82e40", size = 19087, upload-time = "2025-12-26T15:22:00.221Z" },
]
@@ -2123,16 +2131,16 @@ wheels = [
[[package]]
name = "google-auth"
version = "2.43.0"
version = "2.48.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cachetools" },
{ name = "cryptography" },
{ name = "pyasn1-modules" },
{ name = "rsa" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ff/ef/66d14cf0e01b08d2d51ffc3c20410c4e134a1548fc246a6081eae585a4fe/google_auth-2.43.0.tar.gz", hash = "sha256:88228eee5fc21b62a1b5fe773ca15e67778cb07dc8363adcb4a8827b52d81483", size = 296359, upload-time = "2025-11-06T00:13:36.587Z" }
sdist = { url = "https://files.pythonhosted.org/packages/0c/41/242044323fbd746615884b1c16639749e73665b718209946ebad7ba8a813/google_auth-2.48.0.tar.gz", hash = "sha256:4f7e706b0cd3208a3d940a19a822c37a476ddba5450156c3e6624a71f7c841ce", size = 326522, upload-time = "2026-01-26T19:22:47.157Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/6f/d1/385110a9ae86d91cc14c5282c61fe9f4dc41c0b9f7d423c6ad77038c4448/google_auth-2.43.0-py2.py3-none-any.whl", hash = "sha256:af628ba6fa493f75c7e9dbe9373d148ca9f4399b5ea29976519e0a3848eddd16", size = 223114, upload-time = "2025-11-06T00:13:35.209Z" },
{ url = "https://files.pythonhosted.org/packages/83/1d/d6466de3a5249d35e832a52834115ca9d1d0de6abc22065f049707516d47/google_auth-2.48.0-py3-none-any.whl", hash = "sha256:2e2a537873d449434252a9632c28bfc268b0adb1e53f9fb62afc5333a975903f", size = 236499, upload-time = "2026-01-26T19:22:45.099Z" },
]
[[package]]
@@ -2164,7 +2172,7 @@ wheels = [
[[package]]
name = "google-cloud-aiplatform"
version = "1.121.0"
version = "1.133.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "docstring-parser" },
@@ -2178,12 +2186,11 @@ dependencies = [
{ name = "proto-plus" },
{ name = "protobuf" },
{ name = "pydantic" },
{ name = "shapely" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b1/86/d1bad9a342122f0f5913cd8b7758ab340aac3f579cffb800d294da605a7c/google_cloud_aiplatform-1.121.0.tar.gz", hash = "sha256:65710396238fa461dbea9b2af9ed23f95458d70d9684e75519c7c9c1601ff308", size = 9705200, upload-time = "2025-10-15T20:27:59.262Z" }
sdist = { url = "https://files.pythonhosted.org/packages/d4/be/31ce7fd658ddebafbe5583977ddee536b2bacc491ad10b5a067388aec66f/google_cloud_aiplatform-1.133.0.tar.gz", hash = "sha256:3a6540711956dd178daaab3c2c05db476e46d94ac25912b8cf4f59b00b058ae0", size = 9921309, upload-time = "2026-01-08T22:11:25.079Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/bd/f6/806b39f86f912133a3071ffa9ff99801a12868216069e26c83a48943116b/google_cloud_aiplatform-1.121.0-py2.py3-none-any.whl", hash = "sha256:1e7105dfd17963207e966550c9544264508efdfded29cf4924c5b86ff4a22efd", size = 8067568, upload-time = "2025-10-15T20:27:54.842Z" },
{ url = "https://files.pythonhosted.org/packages/01/5b/ef74ff65aebb74eaba51078e33ddd897247ba0d1197fd5a7953126205519/google_cloud_aiplatform-1.133.0-py2.py3-none-any.whl", hash = "sha256:dfc81228e987ca10d1c32c7204e2131b3c8d6b7c8e0b4e23bf7c56816bc4c566", size = 8184595, upload-time = "2026-01-08T22:11:22.067Z" },
]
[[package]]
@@ -4622,7 +4629,7 @@ requires-dist = [
{ name = "google-api-python-client", marker = "extra == 'backend'", specifier = "==2.86.0" },
{ name = "google-auth-httplib2", marker = "extra == 'backend'", specifier = "==0.1.0" },
{ name = "google-auth-oauthlib", marker = "extra == 'backend'", specifier = "==1.0.0" },
{ name = "google-cloud-aiplatform", specifier = "==1.121.0" },
{ name = "google-cloud-aiplatform", specifier = "==1.133.0" },
{ name = "google-genai", specifier = "==1.52.0" },
{ name = "hatchling", marker = "extra == 'dev'", specifier = "==1.28.0" },
{ name = "httpcore", marker = "extra == 'backend'", specifier = "==1.0.9" },
@@ -4655,7 +4662,7 @@ requires-dist = [
{ name = "numpy", marker = "extra == 'model-server'", specifier = "==2.4.1" },
{ name = "oauthlib", marker = "extra == 'backend'", specifier = "==3.2.2" },
{ name = "office365-rest-python-client", marker = "extra == 'backend'", specifier = "==2.6.2" },
{ name = "onyx-devtools", marker = "extra == 'dev'", specifier = "==0.6.2" },
{ name = "onyx-devtools", marker = "extra == 'dev'", specifier = "==0.6.3" },
{ name = "openai", specifier = "==2.14.0" },
{ name = "openapi-generator-cli", marker = "extra == 'dev'", specifier = "==7.17.0" },
{ name = "openinference-instrumentation", marker = "extra == 'backend'", specifier = "==0.1.42" },
@@ -4760,20 +4767,20 @@ requires-dist = [{ name = "onyx", extras = ["backend", "dev", "ee"], editable =
[[package]]
name = "onyx-devtools"
version = "0.6.2"
version = "0.6.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "fastapi" },
{ name = "openapi-generator-cli" },
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/cc/20/d9f6089616044b0fb6e097cbae82122de24f3acd97820be4868d5c28ee3f/onyx_devtools-0.6.2-py3-none-any.whl", hash = "sha256:e48d14695d39d62ec3247a4c76ea56604bc5fb635af84c4ff3e9628bcc67b4fb", size = 3785941, upload-time = "2026-02-25T22:33:43.585Z" },
{ url = "https://files.pythonhosted.org/packages/d6/f5/f754a717f6b011050eb52ef09895cfa2f048f567f4aa3d5e0f773657dea4/onyx_devtools-0.6.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:505f9910a04868ab62d99bb483dc37c9f4ad94fa80e6ac0e6a10b86351c31420", size = 3832182, upload-time = "2026-02-25T22:33:43.283Z" },
{ url = "https://files.pythonhosted.org/packages/6a/35/6e653398c62078e87ebb0d03dc944df6691d92ca427c92867309d2d803b7/onyx_devtools-0.6.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:edec98e3acc0fa22cf9102c2070409ea7bcf99d7ded72bd8cb184ece8171c36a", size = 3576948, upload-time = "2026-02-25T22:33:42.962Z" },
{ url = "https://files.pythonhosted.org/packages/3c/97/cff707c5c3d2acd714365b1023f0100676abc99816a29558319e8ef01d5f/onyx_devtools-0.6.2-py3-none-manylinux_2_17_aarch64.whl", hash = "sha256:97abab61216866cdccd8c0a7e27af328776083756ce4fb57c4bd723030449e3b", size = 3439359, upload-time = "2026-02-25T22:33:44.684Z" },
{ url = "https://files.pythonhosted.org/packages/fc/98/3b768d18e5599178834b966b447075626d224e048d6eb264d89d19abacb4/onyx_devtools-0.6.2-py3-none-manylinux_2_17_x86_64.whl", hash = "sha256:681b038ab6f1457409d14b2490782c7a8014fc0f0f1b9cd69bb2b7199f99aef1", size = 3785959, upload-time = "2026-02-25T22:33:44.342Z" },
{ url = "https://files.pythonhosted.org/packages/d6/38/9b047f9e61c14ccf22b8f386c7a57da3965f90737453f3a577a97da45cdf/onyx_devtools-0.6.2-py3-none-win_amd64.whl", hash = "sha256:a2063be6be104b50a7538cf0d26c7f7ab9159d53327dd6f3e91db05d793c95f3", size = 3878776, upload-time = "2026-02-25T22:33:45.229Z" },
{ url = "https://files.pythonhosted.org/packages/9d/0f/742f644bae84f5f8f7b500094a2f58da3ff8027fc739944622577e2e2850/onyx_devtools-0.6.2-py3-none-win_arm64.whl", hash = "sha256:00fb90a49a15c932b5cacf818b1b4918e5b5c574bde243dc1828b57690dd5046", size = 3501112, upload-time = "2026-02-25T22:33:41.512Z" },
{ url = "https://files.pythonhosted.org/packages/84/e2/e7619722c3ccd18eb38100f776fb3dd6b4ae0fbbee09fca5af7c69a279b5/onyx_devtools-0.6.3-py3-none-any.whl", hash = "sha256:d3a5422945d9da12cafc185f64b39f6e727ee4cc92b37427deb7a38f9aad4966", size = 3945381, upload-time = "2026-03-05T20:39:25.896Z" },
{ url = "https://files.pythonhosted.org/packages/f2/09/513d2dabedc1e54ad4376830fc9b34a3d9c164bdbcdedfcdbb8b8154dc5a/onyx_devtools-0.6.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:efe300e9f3a2e7ae75f88a4f9e0a5c4c471478296cb1615b6a1f03d247582e13", size = 3978761, upload-time = "2026-03-05T20:39:28.822Z" },
{ url = "https://files.pythonhosted.org/packages/39/41/e757602a0de032d74ed01c7ee57f30e57728fb9cd4f922f50d2affda3889/onyx_devtools-0.6.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:594066eed3f917cfab5a8c7eac3d4a210df30259f2049f664787749709345e19", size = 3665378, upload-time = "2026-03-05T20:44:22.696Z" },
{ url = "https://files.pythonhosted.org/packages/33/1c/c93b65d0b32e202596a2647922a75c7011cb982f899ddfcfd171f792c58f/onyx_devtools-0.6.3-py3-none-manylinux_2_17_aarch64.whl", hash = "sha256:384ef66030b55c0fd68b3898782b5b4b868ff3de119569dfc8544e2ce534b98a", size = 3540890, upload-time = "2026-03-05T20:39:28.886Z" },
{ url = "https://files.pythonhosted.org/packages/f4/33/760eb656013f7f0cdff24570480d3dc4e52bbd8e6147ea1e8cf6fad7554f/onyx_devtools-0.6.3-py3-none-manylinux_2_17_x86_64.whl", hash = "sha256:82e218f3a49f64910c2c4c34d5dc12d1ea1520a27e0b0f6e4c0949ff9abaf0e1", size = 3945396, upload-time = "2026-03-05T20:39:34.323Z" },
{ url = "https://files.pythonhosted.org/packages/1a/eb/f54b3675c464df8a51194ff75afc97c2417659e3a209dc46948b47c28860/onyx_devtools-0.6.3-py3-none-win_amd64.whl", hash = "sha256:8af614ae7229290ef2417cb85270184a1e826ed9a3a34658da93851edb36df57", size = 4045936, upload-time = "2026-03-05T20:39:28.375Z" },
{ url = "https://files.pythonhosted.org/packages/04/b8/5bee38e748f3d4b8ec935766224db1bbc1214c91092e5822c080fccd9130/onyx_devtools-0.6.3-py3-none-win_arm64.whl", hash = "sha256:717589db4b42528d33ae96f8006ee6aad3555034dcfee724705b6576be6a6ec4", size = 3608268, upload-time = "2026-03-05T20:39:28.731Z" },
]
[[package]]

View File

@@ -7,7 +7,7 @@ import SidebarTab from "@/refresh-components/buttons/SidebarTab";
import { SvgSliders } from "@opal/icons";
import { useUser } from "@/providers/UserProvider";
import { useAuthType } from "@/lib/hooks";
import { AuthType } from "@/lib/constants";
import { Section } from "@/layouts/general-layouts";
interface LayoutProps {
children: React.ReactNode;
@@ -28,9 +28,12 @@ export default function Layout({ children }: LayoutProps) {
<SettingsLayouts.Header icon={SvgSliders} title="Settings" separator />
<SettingsLayouts.Body>
<div className="grid grid-cols-[auto_1fr]">
<Section flexDirection="row" alignItems="start" gap={1.5}>
{/* Left: Tab Navigation */}
<div className="flex flex-col px-2 w-[12.5rem]">
<div
data-testid="settings-left-tab-navigation"
className="flex flex-col px-2 min-w-[12.5rem]"
>
<SidebarTab
href="/app/settings/general"
selected={pathname === "/app/settings/general"}
@@ -60,8 +63,8 @@ export default function Layout({ children }: LayoutProps) {
</div>
{/* Right: Tab Content */}
<div className="px-4">{children}</div>
</div>
{children}
</Section>
</SettingsLayouts.Body>
</SettingsLayouts.Root>
</AppLayouts.Root>

View File

@@ -68,7 +68,9 @@ export default function CreateProjectModal({
<Button prominence="secondary" onClick={() => modal.toggle(false)}>
Cancel
</Button>
<Button onClick={handleSubmit}>Create Project</Button>
<Button disabled={!projectName.trim()} onClick={handleSubmit}>
Create Project
</Button>
</Modal.Footer>
</Modal.Content>
</Modal>

View File

@@ -0,0 +1,64 @@
"use client";
import { useEffect, useState } from "react";
export interface BrowserInfo {
isSafari: boolean;
isFirefox: boolean;
isChrome: boolean;
isChromium: boolean;
isEdge: boolean;
isOpera: boolean;
isIOS: boolean;
isMac: boolean;
isWindows: boolean;
}
const DEFAULT_BROWSER_INFO: BrowserInfo = {
isSafari: false,
isFirefox: false,
isChrome: false,
isChromium: false,
isEdge: false,
isOpera: false,
isIOS: false,
isMac: false,
isWindows: false,
};
export default function useBrowserInfo(): BrowserInfo {
const [browserInfo, setBrowserInfo] =
useState<BrowserInfo>(DEFAULT_BROWSER_INFO);
useEffect(() => {
const userAgent = window.navigator.userAgent;
const isEdge = /Edg/i.test(userAgent);
const isOpera = /OPR|Opera/i.test(userAgent);
const isFirefox = /Firefox|FxiOS/i.test(userAgent);
const isChrome = /Chrome|CriOS/i.test(userAgent) && !isEdge && !isOpera;
const isChromium = /Chromium/i.test(userAgent) || isChrome;
const isSafari =
/Safari/i.test(userAgent) &&
!isChromium &&
!isEdge &&
!isOpera &&
!isFirefox;
const isIOS = /iPhone|iPad|iPod/i.test(userAgent);
const isMac = /Macintosh|Mac OS X/i.test(userAgent);
const isWindows = /Win/i.test(userAgent);
setBrowserInfo({
isSafari,
isFirefox,
isChrome,
isChromium,
isEdge,
isOpera,
isIOS,
isMac,
isWindows,
});
}, []);
return browserInfo;
}

View File

@@ -64,6 +64,7 @@ import { AppMode, useAppMode } from "@/providers/AppModeProvider";
import useAppFocus from "@/hooks/useAppFocus";
import { useQueryController } from "@/providers/QueryControllerProvider";
import { usePaidEnterpriseFeaturesEnabled } from "@/components/settings/usePaidEnterpriseFeaturesEnabled";
import useBrowserInfo from "@/hooks/useBrowserInfo";
/**
* App Header Component
@@ -527,8 +528,16 @@ function Root({ children, enableBackground }: AppRootProps) {
const { hasBackground, appBackgroundUrl } = useAppBackground();
const { resolvedTheme } = useTheme();
const appFocus = useAppFocus();
const { isSafari } = useBrowserInfo();
const isLightMode = resolvedTheme === "light";
const showBackground = hasBackground && enableBackground;
const horizontalBlurMask = `linear-gradient(
to right,
transparent 0%,
black max(0%, calc(50% - 25rem)),
black min(100%, calc(50% + 25rem)),
transparent 100%
)`;
return (
/* NOTE: Some elements, markdown tables in particular, refer to this `@container` in order to
@@ -568,25 +577,25 @@ function Root({ children, enableBackground }: AppRootProps) {
{showBackground && appFocus.isChat() && (
<>
<div className="absolute inset-0 backdrop-blur-[1px] pointer-events-none" />
<div
className="absolute z-0 inset-0 backdrop-blur-md transition-all duration-600 pointer-events-none"
style={{
maskImage: `linear-gradient(
to right,
transparent 0%,
black max(0%, calc(50% - 25rem)),
black min(100%, calc(50% + 25rem)),
transparent 100%
)`,
WebkitMaskImage: `linear-gradient(
to right,
transparent 0%,
black max(0%, calc(50% - 25rem)),
black min(100%, calc(50% + 25rem)),
transparent 100%
)`,
}}
/>
{isSafari ? (
<div
className="absolute z-0 inset-0 bg-cover bg-center bg-fixed pointer-events-none"
style={{
backgroundImage: `url(${appBackgroundUrl})`,
filter: "blur(16px)",
maskImage: horizontalBlurMask,
WebkitMaskImage: horizontalBlurMask,
}}
/>
) : (
<div
className="absolute z-0 inset-0 backdrop-blur-md transition-all duration-600 pointer-events-none"
style={{
maskImage: horizontalBlurMask,
WebkitMaskImage: horizontalBlurMask,
}}
/>
)}
</>
)}

View File

@@ -35,7 +35,7 @@ export const widthClassmap: Record<Length, string> = {
export const heightClassmap: Record<Length, string> = {
auto: "h-auto",
fit: "h-fit",
full: "h-full",
full: "h-full min-h-0",
};
/**

View File

@@ -515,10 +515,16 @@ const ModalBody = React.forwardRef<HTMLDivElement, ModalBodyProps>(
ref={ref}
className={cn(
twoTone && "bg-background-tint-01",
"h-full min-h-0 overflow-y-auto w-full"
"flex-auto min-h-0 overflow-y-auto w-full"
)}
>
<Section padding={1} gap={1} alignItems="start" {...props}>
<Section
height="auto"
padding={1}
gap={1}
alignItems="start"
{...props}
>
{children}
</Section>
</div>

View File

@@ -13,6 +13,7 @@ import { usePathname, useRouter } from "next/navigation";
import { SvgAlertTriangle, SvgLogOut } from "@opal/icons";
import { Content } from "@opal/layouts";
import { useCurrentUser } from "@/hooks/useCurrentUser";
import { getExtensionContext } from "@/lib/extension/utils";
export default function AppHealthBanner() {
const router = useRouter();
@@ -39,7 +40,18 @@ export default function AppHealthBanner() {
// Function to handle the "Log in" button click
function handleLogin() {
setShowLoggedOutModal(false);
router.push("/auth/login");
const { isExtension } = getExtensionContext();
if (isExtension) {
// In the Chrome extension, open login in a new tab so OAuth popups
// work correctly (the extension iframe has no navigable URL origin).
window.open(
window.location.origin + "/auth/login",
"_blank",
"noopener,noreferrer"
);
} else {
router.push("/auth/login");
}
}
// Function to set up expiration timeout

View File

@@ -188,50 +188,42 @@ export default function ShareChatSessionModal({
<Section
justifyContent="start"
alignItems="stretch"
gap={1}
height="auto"
gap={0.12}
>
<Section
justifyContent="start"
alignItems="stretch"
height="auto"
gap={0.12}
>
<PrivacyOption
icon={SvgLock}
title="Private"
description="Only you have access to this chat."
selected={selectedPrivacy === "private"}
onClick={() => setSelectedPrivacy("private")}
ariaLabel="share-modal-option-private"
/>
<PrivacyOption
icon={SvgUsers}
title="Your Organization"
description="Anyone in your organization can view this chat."
selected={selectedPrivacy === "public"}
onClick={() => setSelectedPrivacy("public")}
ariaLabel="share-modal-option-public"
/>
</Section>
{isShared && (
<div aria-label="share-modal-link-input">
<InputTypeIn
readOnly
value={shareLink}
rightSection={
<CopyIconButton
getCopyText={() => shareLink}
tooltip="Copy link"
size="sm"
aria-label="share-modal-copy-link"
/>
}
/>
</div>
)}
<PrivacyOption
icon={SvgLock}
title="Private"
description="Only you have access to this chat."
selected={selectedPrivacy === "private"}
onClick={() => setSelectedPrivacy("private")}
ariaLabel="share-modal-option-private"
/>
<PrivacyOption
icon={SvgUsers}
title="Your Organization"
description="Anyone in your organization can view this chat."
selected={selectedPrivacy === "public"}
onClick={() => setSelectedPrivacy("public")}
ariaLabel="share-modal-option-public"
/>
</Section>
{isShared && (
<InputTypeIn
aria-label="share-modal-link-input"
readOnly
value={shareLink}
rightSection={
<CopyIconButton
getCopyText={() => shareLink}
tooltip="Copy link"
size="sm"
aria-label="share-modal-copy-link"
/>
}
/>
)}
</Modal.Body>
<Modal.Footer>
{!isShared && (

View File

@@ -156,10 +156,7 @@ test.describe("Share Chat Session Modal", () => {
expect(patchBody).toEqual({ sharing_status: "public" });
const linkInput = dialog.locator('[aria-label="share-modal-link-input"]');
await expect(linkInput).toBeVisible({ timeout: 5000 });
const inputValue = await linkInput.locator("input").inputValue();
expect(inputValue).toContain("/app/shared/");
await expect(linkInput).toHaveValue(/\/app\/shared\//, { timeout: 5000 });
await expect(submitButton).toHaveText("Copy Link");
await expect(dialog.getByText("Chat shared")).toBeVisible();

View File

@@ -0,0 +1,36 @@
import { expect, test } from "@playwright/test";
import { THEMES, setThemeBeforeNavigation } from "@tests/e2e/utils/theme";
import { expectScreenshot } from "@tests/e2e/utils/visualRegression";
test.use({ storageState: "admin_auth.json" });
for (const theme of THEMES) {
test.describe(`Settings pages (${theme} mode)`, () => {
test.beforeEach(async ({ page }) => {
await setThemeBeforeNavigation(page, theme);
});
test("should screenshot each settings tab", async ({ page }) => {
await page.goto("/app/settings");
await page.waitForLoadState("networkidle");
const nav = page.getByTestId("settings-left-tab-navigation");
const tabs = nav.locator("a");
const count = await tabs.count();
expect(count).toBeGreaterThan(0);
for (let i = 0; i < count; i++) {
const tab = tabs.nth(i);
const href = await tab.getAttribute("href");
const slug = href ? href.replace("/app/settings/", "") : `tab-${i}`;
await tab.click();
await page.waitForLoadState("networkidle");
await expectScreenshot(page, {
name: `settings-${theme}-${slug}`,
});
}
});
});
}

View File

@@ -165,11 +165,6 @@ export async function expectScreenshot(
threshold,
} = options;
// Wait for any in-flight CSS animations / transitions to settle so that
// screenshots are deterministic (e.g. slide-in card animations on the
// onboarding flow).
await waitForAnimations(page);
// Merge default hide selectors with per-call selectors
const allHideSelectors = [...DEFAULT_HIDE_SELECTORS, ...hide];
@@ -178,7 +173,10 @@ export async function expectScreenshot(
if (allHideSelectors.length > 0) {
styleHandle = await page.addStyleTag({
content: allHideSelectors
.map((selector) => `${selector} { visibility: hidden !important; }`)
.map(
(selector) =>
`${selector} { visibility: hidden !important; opacity: 0 !important; pointer-events: none !important; }`
)
.join("\n"),
});
}
@@ -190,6 +188,11 @@ export async function expectScreenshot(
page.locator(selector)
);
// Wait for any in-flight CSS animations / transitions to settle so that
// screenshots are deterministic (e.g. slide-in card animations on the
// onboarding flow).
await waitForAnimations(page);
// Build the screenshot name array (Playwright expects string[])
const nameArg = name ? [name + ".png"] : undefined;
@@ -253,10 +256,6 @@ export async function expectElementScreenshot(
const page = locator.page();
// Wait for any in-flight CSS animations / transitions to settle so that
// element screenshots are deterministic (same reasoning as expectScreenshot).
await waitForAnimations(page);
// Merge default hide selectors with per-call selectors
const allHideSelectors = [...DEFAULT_HIDE_SELECTORS, ...hide];
@@ -265,7 +264,10 @@ export async function expectElementScreenshot(
if (allHideSelectors.length > 0) {
styleHandle = await page.addStyleTag({
content: allHideSelectors
.map((selector) => `${selector} { visibility: hidden !important; }`)
.map(
(selector) =>
`${selector} { visibility: hidden !important; opacity: 0 !important; pointer-events: none !important; }`
)
.join("\n"),
});
}
@@ -277,6 +279,10 @@ export async function expectElementScreenshot(
page.locator(selector)
);
// Wait for any in-flight CSS animations / transitions to settle so that
// element screenshots are deterministic (same reasoning as expectScreenshot).
await waitForAnimations(page);
// Build the screenshot name array (Playwright expects string[])
const nameArg = name ? [name + ".png"] : undefined;

View File

@@ -8,7 +8,7 @@
"name": "onyx-chat-widget",
"version": "1.0.0",
"dependencies": {
"dompurify": "^3.0.0",
"dompurify": "^3.3.2",
"lit": "^3.1.0",
"marked": "^12.0.0"
},
@@ -860,10 +860,13 @@
"license": "MIT"
},
"node_modules/dompurify": {
"version": "3.3.1",
"resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.1.tgz",
"integrity": "sha512-qkdCKzLNtrgPFP1Vo+98FRzJnBRGe4ffyCea9IwHB1fyxPOeNTHpLKYGd4Uk9xvNoH0ZoOjwZxNptyMwqrId1Q==",
"version": "3.3.2",
"resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.3.2.tgz",
"integrity": "sha512-6obghkliLdmKa56xdbLOpUZ43pAR6xFy1uOrxBaIDjT+yaRuuybLjGS9eVBoSR/UPU5fq3OXClEHLJNGvbxKpQ==",
"license": "(MPL-2.0 OR Apache-2.0)",
"engines": {
"node": ">=20"
},
"optionalDependencies": {
"@types/trusted-types": "^2.0.7"
}

View File

@@ -17,7 +17,7 @@
"type-check": "tsc --noEmit"
},
"dependencies": {
"dompurify": "^3.0.0",
"dompurify": "^3.3.2",
"lit": "^3.1.0",
"marked": "^12.0.0"
},