Compare commits

..

1 Commits

Author SHA1 Message Date
Raunak Bhagat
51b8ac96d6 fix(opal): move default type="button" from Button to InteractiveContainer
Button no longer sets type="button" by default — this is now handled
by InteractiveContainer, ensuring all interactive elements get the
correct default type regardless of which component wraps them.
2026-03-13 10:58:19 -07:00
24 changed files with 75 additions and 791 deletions

3
.github/CODEOWNERS vendored
View File

@@ -8,6 +8,3 @@
# Agent context files
/CLAUDE.md @Weves
/AGENTS.md @Weves
# Beta cherry-pick workflow owners
/.github/workflows/post-merge-beta-cherry-pick.yml @justin-tahara @jmelahman

View File

@@ -1,14 +1,11 @@
name: "Slack Notify"
description: "Sends a Slack notification for workflow events"
name: "Slack Notify on Failure"
description: "Sends a Slack notification when a workflow fails"
inputs:
webhook-url:
description: "Slack webhook URL (can also use SLACK_WEBHOOK_URL env var)"
required: false
details:
description: "Additional message body content"
required: false
failed-jobs:
description: "Deprecated alias for details"
description: "List of failed job names (newline-separated)"
required: false
title:
description: "Title for the notification"
@@ -24,7 +21,6 @@ runs:
shell: bash
env:
SLACK_WEBHOOK_URL: ${{ inputs.webhook-url }}
DETAILS: ${{ inputs.details }}
FAILED_JOBS: ${{ inputs.failed-jobs }}
TITLE: ${{ inputs.title }}
REF_NAME: ${{ inputs.ref-name }}
@@ -48,18 +44,6 @@ runs:
REF_NAME="$GITHUB_REF_NAME"
fi
if [ -z "$DETAILS" ]; then
DETAILS="$FAILED_JOBS"
fi
normalize_multiline() {
printf '%s' "$1" | awk 'BEGIN { ORS=""; first=1 } { if (!first) printf "\\n"; printf "%s", $0; first=0 }'
}
DETAILS="$(normalize_multiline "$DETAILS")"
REF_NAME="$(normalize_multiline "$REF_NAME")"
TITLE="$(normalize_multiline "$TITLE")"
# Escape JSON special characters
escape_json() {
local input="$1"
@@ -75,12 +59,12 @@ runs:
}
REF_NAME_ESC=$(escape_json "$REF_NAME")
DETAILS_ESC=$(escape_json "$DETAILS")
FAILED_JOBS_ESC=$(escape_json "$FAILED_JOBS")
WORKFLOW_URL_ESC=$(escape_json "$WORKFLOW_URL")
TITLE_ESC=$(escape_json "$TITLE")
# Build JSON payload piece by piece
# Note: DETAILS_ESC already contains \n sequences that should remain as \n in JSON
# Note: FAILED_JOBS_ESC already contains \n sequences that should remain as \n in JSON
PAYLOAD="{"
PAYLOAD="${PAYLOAD}\"text\":\"${TITLE_ESC}\","
PAYLOAD="${PAYLOAD}\"blocks\":[{"
@@ -95,10 +79,10 @@ runs:
PAYLOAD="${PAYLOAD}{\"type\":\"mrkdwn\",\"text\":\"*Run ID:*\\n#${RUN_NUMBER}\"}"
PAYLOAD="${PAYLOAD}]"
PAYLOAD="${PAYLOAD}}"
if [ -n "$DETAILS" ]; then
if [ -n "$FAILED_JOBS" ]; then
PAYLOAD="${PAYLOAD},{"
PAYLOAD="${PAYLOAD}\"type\":\"section\","
PAYLOAD="${PAYLOAD}\"text\":{\"type\":\"mrkdwn\",\"text\":\"${DETAILS_ESC}\"}"
PAYLOAD="${PAYLOAD}\"text\":{\"type\":\"mrkdwn\",\"text\":\"*Failed Jobs:*\\n${FAILED_JOBS_ESC}\"}"
PAYLOAD="${PAYLOAD}}"
fi
PAYLOAD="${PAYLOAD},{"
@@ -115,3 +99,4 @@ runs:
curl -X POST -H 'Content-type: application/json' \
--data "$PAYLOAD" \
"$SLACK_WEBHOOK_URL"

View File

@@ -37,27 +37,10 @@ jobs:
PR_BODY: ${{ github.event.pull_request.body }}
MERGE_COMMIT_SHA: ${{ github.event.pull_request.merge_commit_sha }}
MERGED_BY: ${{ github.event.pull_request.merged_by.login }}
# Explicit merger allowlist used because pull_request_target runs with
# the default GITHUB_TOKEN, which cannot reliably read org/team
# membership for this repository context.
ALLOWED_MERGERS: |
acaprau
bo-onyx
danelegend
duo-onyx
evan-onyx
jessicasingh7
jmelahman
joachim-danswer
justin-tahara
nmgarza5
raunakab
rohoswagger
subash-mohan
trial2onyx
wenxi-onyx
weves
yuhongsun96
# GitHub team slug authorized to trigger cherry-picks (e.g. "core-eng").
# For private/secret teams the GITHUB_TOKEN may need org:read scope;
# visible teams work with the default token.
ALLOWED_TEAM: "onyx-core-team"
run: |
echo "pr_number=${PR_NUMBER}" >> "$GITHUB_OUTPUT"
echo "merged_by=${MERGED_BY}" >> "$GITHUB_OUTPUT"
@@ -81,11 +64,19 @@ jobs:
echo "merge_commit_sha=${MERGE_COMMIT_SHA}" >> "$GITHUB_OUTPUT"
normalized_merged_by="$(printf '%s' "${MERGED_BY}" | tr '[:upper:]' '[:lower:]')"
normalized_allowed_mergers="$(printf '%s\n' "${ALLOWED_MERGERS}" | tr '[:upper:]' '[:lower:]')"
if ! printf '%s\n' "${normalized_allowed_mergers}" | grep -Fxq "${normalized_merged_by}"; then
echo "gate_error=not-allowed-merger" >> "$GITHUB_OUTPUT"
echo "::error::${MERGED_BY} is not in the explicit cherry-pick merger allowlist. Failing cherry-pick gate."
member_state_file="$(mktemp)"
member_err_file="$(mktemp)"
if ! gh api "orgs/${GITHUB_REPOSITORY_OWNER}/teams/${ALLOWED_TEAM}/memberships/${MERGED_BY}" --jq '.state' >"${member_state_file}" 2>"${member_err_file}"; then
api_err="$(tr '\n' ' ' < "${member_err_file}" | sed 's/[[:space:]]\+/ /g' | cut -c1-300)"
echo "gate_error=team-api-error" >> "$GITHUB_OUTPUT"
echo "::error::Team membership API call failed for ${MERGED_BY} in ${ALLOWED_TEAM}: ${api_err}"
exit 1
fi
member_state="$(cat "${member_state_file}")"
if [ "${member_state}" != "active" ]; then
echo "gate_error=not-team-member" >> "$GITHUB_OUTPUT"
echo "::error::${MERGED_BY} is not an active member of team ${ALLOWED_TEAM} (state: ${member_state}). Failing cherry-pick gate."
exit 1
fi
@@ -99,7 +90,6 @@ jobs:
contents: write
pull-requests: write
outputs:
cherry_pick_pr_url: ${{ steps.run_cherry_pick.outputs.pr_url }}
cherry_pick_reason: ${{ steps.run_cherry_pick.outputs.reason }}
cherry_pick_details: ${{ steps.run_cherry_pick.outputs.details }}
runs-on: ubuntu-latest
@@ -147,11 +137,7 @@ jobs:
fi
if [ "${exit_code}" -eq 0 ]; then
pr_url="$(sed -n 's/^.*PR created successfully: \(https:\/\/github\.com\/[^[:space:]]\+\/pull\/[0-9]\+\).*$/\1/p' "$output_file" | tail -n 1)"
echo "status=success" >> "$GITHUB_OUTPUT"
if [ -n "${pr_url}" ]; then
echo "pr_url=${pr_url}" >> "$GITHUB_OUTPUT"
fi
exit 0
fi
@@ -177,54 +163,6 @@ jobs:
echo "::error::Automated cherry-pick failed (${CHERRY_PICK_REASON})."
exit 1
notify-slack-on-cherry-pick-success:
needs:
- resolve-cherry-pick-request
- cherry-pick-to-latest-release
if: needs.resolve-cherry-pick-request.outputs.should_cherrypick == 'true' && needs.resolve-cherry-pick-request.result == 'success' && needs.cherry-pick-to-latest-release.result == 'success'
runs-on: ubuntu-slim
timeout-minutes: 10
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Fail if Slack webhook secret is missing
env:
CHERRY_PICK_PRS_WEBHOOK: ${{ secrets.CHERRY_PICK_PRS_WEBHOOK }}
run: |
if [ -z "${CHERRY_PICK_PRS_WEBHOOK}" ]; then
echo "::error::CHERRY_PICK_PRS_WEBHOOK is not configured."
exit 1
fi
- name: Build cherry-pick success summary
id: success-summary
env:
SOURCE_PR_NUMBER: ${{ needs.resolve-cherry-pick-request.outputs.pr_number }}
MERGE_COMMIT_SHA: ${{ needs.resolve-cherry-pick-request.outputs.merge_commit_sha }}
CHERRY_PICK_PR_URL: ${{ needs.cherry-pick-to-latest-release.outputs.cherry_pick_pr_url }}
run: |
source_pr_url="https://github.com/${GITHUB_REPOSITORY}/pull/${SOURCE_PR_NUMBER}"
details="*Cherry-pick PR opened successfully.*\\n• source PR: ${source_pr_url}"
if [ -n "${CHERRY_PICK_PR_URL}" ]; then
details="${details}\\n• cherry-pick PR: ${CHERRY_PICK_PR_URL}"
fi
if [ -n "${MERGE_COMMIT_SHA}" ]; then
details="${details}\\n• merge SHA: ${MERGE_COMMIT_SHA}"
fi
echo "details=${details}" >> "$GITHUB_OUTPUT"
- name: Notify #cherry-pick-prs about cherry-pick success
uses: ./.github/actions/slack-notify
with:
webhook-url: ${{ secrets.CHERRY_PICK_PRS_WEBHOOK }}
details: ${{ steps.success-summary.outputs.details }}
title: "✅ Automated Cherry-Pick PR Opened"
ref-name: ${{ github.event.pull_request.base.ref }}
notify-slack-on-cherry-pick-failure:
needs:
- resolve-cherry-pick-request
@@ -261,8 +199,10 @@ jobs:
reason_text="cherry-pick command failed"
if [ "${GATE_ERROR}" = "missing-merge-commit-sha" ]; then
reason_text="requested cherry-pick but merge commit SHA was missing"
elif [ "${GATE_ERROR}" = "not-allowed-merger" ]; then
reason_text="merger is not in the explicit cherry-pick allowlist"
elif [ "${GATE_ERROR}" = "team-api-error" ]; then
reason_text="team membership lookup failed while validating cherry-pick permissions"
elif [ "${GATE_ERROR}" = "not-team-member" ]; then
reason_text="merger is not an active member of the allowed team"
elif [ "${CHERRY_PICK_REASON}" = "output-capture-failed" ]; then
reason_text="failed to capture cherry-pick output for classification"
elif [ "${CHERRY_PICK_REASON}" = "merge-conflict" ]; then
@@ -289,6 +229,6 @@ jobs:
uses: ./.github/actions/slack-notify
with:
webhook-url: ${{ secrets.CHERRY_PICK_PRS_WEBHOOK }}
details: ${{ steps.failure-summary.outputs.jobs }}
failed-jobs: ${{ steps.failure-summary.outputs.jobs }}
title: "🚨 Automated Cherry-Pick Failed"
ref-name: ${{ github.event.pull_request.base.ref }}

View File

@@ -1,104 +0,0 @@
"""add_hook_and_hook_execution_log_tables
Revision ID: 689433b0d8de
Revises: 93a2e195e25c
Create Date: 2026-03-13 11:25:06.547474
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import UUID as PGUUID
# revision identifiers, used by Alembic.
revision = "689433b0d8de"
down_revision = "93a2e195e25c"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"hook",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(), nullable=False),
sa.Column(
"hook_point",
sa.Enum("document_ingestion", "query_processing", native_enum=False),
nullable=False,
),
sa.Column("endpoint_url", sa.Text(), nullable=True),
sa.Column("api_key", sa.LargeBinary(), nullable=True),
sa.Column("is_reachable", sa.Boolean(), nullable=True),
sa.Column(
"fail_strategy",
sa.Enum("hard", "soft", native_enum=False),
nullable=False,
server_default="hard",
),
sa.Column("timeout_seconds", sa.Float(), nullable=False, server_default="30.0"),
sa.Column(
"is_active", sa.Boolean(), nullable=False, server_default=sa.text("false")
),
sa.Column(
"deleted", sa.Boolean(), nullable=False, server_default=sa.text("false")
),
sa.Column("creator_id", PGUUID(as_uuid=True), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.ForeignKeyConstraint(["creator_id"], ["user.id"], ondelete="SET NULL"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
"ix_hook_one_active_per_point",
"hook",
["hook_point"],
unique=True,
postgresql_where=sa.text("is_active = true AND deleted = false"),
)
op.create_table(
"hook_execution_log",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("hook_id", sa.Integer(), nullable=False),
sa.Column(
"hook_point",
sa.Enum("document_ingestion", "query_processing", native_enum=False),
nullable=False,
),
sa.Column("error_message", sa.Text(), nullable=True),
sa.Column("status_code", sa.Integer(), nullable=True),
sa.Column("duration_ms", sa.Integer(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.ForeignKeyConstraint(["hook_id"], ["hook.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index("ix_hook_execution_log_hook_id", "hook_execution_log", ["hook_id"])
op.create_index(
"ix_hook_execution_log_created_at", "hook_execution_log", ["created_at"]
)
def downgrade() -> None:
op.drop_index("ix_hook_execution_log_created_at", table_name="hook_execution_log")
op.drop_index("ix_hook_execution_log_hook_id", table_name="hook_execution_log")
op.drop_table("hook_execution_log")
op.drop_index("ix_hook_one_active_per_point", table_name="hook")
op.drop_table("hook")

View File

@@ -282,23 +282,6 @@ def _log_and_raise_for_status(response: requests.Response) -> None:
raise
GRAPH_INVALID_REQUEST_CODE = "invalidRequest"
def _is_graph_invalid_request(response: requests.Response) -> bool:
"""Return True if the response body is the generic Graph API
``{"error": {"code": "invalidRequest", "message": "Invalid request"}}``
shape. This particular error has no actionable inner error code and is
returned by the site-pages endpoint when a page has a corrupt canvas layout
(e.g. duplicate web-part IDs — see SharePoint/sp-dev-docs#8822)."""
try:
body = response.json()
except Exception:
return False
error = body.get("error", {})
return error.get("code") == GRAPH_INVALID_REQUEST_CODE
def load_certificate_from_pfx(pfx_data: bytes, password: str) -> CertificateData | None:
"""Load certificate from .pfx file for MSAL authentication"""
try:
@@ -1269,35 +1252,19 @@ class SharepointConnector(
site.execute_query()
site_id = site.id
site_pages_base = (
f"{self.graph_api_base}/sites/{site_id}/pages/microsoft.graph.sitePage"
page_url: str | None = (
f"{self.graph_api_base}/sites/{site_id}" f"/pages/microsoft.graph.sitePage"
)
page_url: str | None = site_pages_base
params: dict[str, str] | None = {"$expand": "canvasLayout"}
total_yielded = 0
yielded_ids: set[str] = set()
while page_url:
try:
data = self._graph_api_get_json(page_url, params)
except HTTPError as e:
if e.response is not None and e.response.status_code == 404:
if e.response.status_code == 404:
logger.warning(f"Site page not found: {page_url}")
break
if (
e.response is not None
and e.response.status_code == 400
and _is_graph_invalid_request(e.response)
):
logger.warning(
f"$expand=canvasLayout on the LIST endpoint returned 400 "
f"for site {site_descriptor.url}. Falling back to "
f"per-page expansion."
)
yield from self._fetch_site_pages_individually(
site_pages_base, start, end, skip_ids=yielded_ids
)
return
raise
params = None # nextLink already embeds query params
@@ -1306,98 +1273,12 @@ class SharepointConnector(
if not _site_page_in_time_window(page, start, end):
continue
total_yielded += 1
page_id = page.get("id")
if page_id:
yielded_ids.add(page_id)
yield page
page_url = data.get("@odata.nextLink")
logger.debug(f"Yielded {total_yielded} site pages for {site_descriptor.url}")
def _fetch_site_pages_individually(
self,
site_pages_base: str,
start: datetime | None = None,
end: datetime | None = None,
skip_ids: set[str] | None = None,
) -> Generator[dict[str, Any], None, None]:
"""Fallback for _fetch_site_pages: list pages without $expand, then
expand canvasLayout on each page individually.
The Graph API's LIST endpoint can return 400 when $expand=canvasLayout
is used and *any* page in the site has a corrupt canvas layout (e.g.
duplicate web part IDs — see SharePoint/sp-dev-docs#8822). Since the
LIST expansion is all-or-nothing, a single bad page poisons the entire
response. This method works around it by fetching metadata first, then
expanding each page individually so only the broken page loses its
canvas content.
``skip_ids`` contains page IDs already yielded by the caller before the
fallback was triggered, preventing duplicates.
"""
page_url: str | None = site_pages_base
total_yielded = 0
_skip_ids = skip_ids or set()
while page_url:
try:
data = self._graph_api_get_json(page_url)
except HTTPError as e:
if e.response is not None and e.response.status_code == 404:
break
raise
for page in data.get("value", []):
if not _site_page_in_time_window(page, start, end):
continue
page_id = page.get("id")
if page_id and page_id in _skip_ids:
continue
if not page_id:
total_yielded += 1
yield page
continue
expanded = self._try_expand_single_page(site_pages_base, page_id, page)
total_yielded += 1
yield expanded
page_url = data.get("@odata.nextLink")
logger.debug(
f"Yielded {total_yielded} site pages (per-page expansion fallback)"
)
def _try_expand_single_page(
self,
site_pages_base: str,
page_id: str,
fallback_page: dict[str, Any],
) -> dict[str, Any]:
"""Try to GET a single page with $expand=canvasLayout. On 400, return
the metadata-only fallback so the page is still indexed (without canvas
content)."""
pages_collection = site_pages_base.removesuffix("/microsoft.graph.sitePage")
single_url = f"{pages_collection}/{page_id}/microsoft.graph.sitePage"
try:
return self._graph_api_get_json(single_url, {"$expand": "canvasLayout"})
except HTTPError as e:
if (
e.response is not None
and e.response.status_code == 400
and _is_graph_invalid_request(e.response)
):
page_name = fallback_page.get("name", page_id)
logger.warning(
f"$expand=canvasLayout failed for page '{page_name}' "
f"({page_id}). Indexing metadata only."
)
return fallback_page
raise
def _acquire_token(self) -> dict[str, Any]:
"""
Acquire token via MSAL

View File

@@ -304,13 +304,3 @@ class LLMModelFlowType(str, PyEnum):
CHAT = "chat"
VISION = "vision"
CONTEXTUAL_RAG = "contextual_rag"
class HookPoint(str, PyEnum):
DOCUMENT_INGESTION = "document_ingestion"
QUERY_PROCESSING = "query_processing"
class HookFailStrategy(str, PyEnum):
HARD = "hard" # exception propagates, pipeline aborts
SOFT = "soft" # log error, return original input, pipeline continues

View File

@@ -64,8 +64,6 @@ from onyx.db.enums import (
BuildSessionStatus,
EmbeddingPrecision,
HierarchyNodeType,
HookFailStrategy,
HookPoint,
IndexingMode,
OpenSearchDocumentMigrationStatus,
OpenSearchTenantMigrationStatus,
@@ -5174,94 +5172,3 @@ class CacheStore(Base):
expires_at: Mapped[datetime.datetime | None] = mapped_column(
DateTime(timezone=True), nullable=True
)
class Hook(Base):
"""Pairs a HookPoint with a customer-provided API endpoint.
At most one Hook per HookPoint can be active at a time, enforced by a
partial unique index on (hook_point) where is_active=true AND deleted=false.
"""
__tablename__ = "hook"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
name: Mapped[str] = mapped_column(String, nullable=False)
hook_point: Mapped[HookPoint] = mapped_column(
Enum(HookPoint, native_enum=False), nullable=False
)
endpoint_url: Mapped[str | None] = mapped_column(Text, nullable=True)
api_key: Mapped[SensitiveValue[str] | None] = mapped_column(
EncryptedString(), nullable=True
)
is_reachable: Mapped[bool | None] = mapped_column(
Boolean, nullable=True, default=None
) # null = never validated, true = last check passed, false = last check failed
fail_strategy: Mapped[HookFailStrategy] = mapped_column(
Enum(HookFailStrategy, native_enum=False),
nullable=False,
default=HookFailStrategy.HARD,
server_default=HookFailStrategy.HARD.value,
)
timeout_seconds: Mapped[float] = mapped_column(
Float, nullable=False, default=30.0, server_default="30.0"
)
is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
deleted: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
creator_id: Mapped[UUID | None] = mapped_column(
PGUUID(as_uuid=True),
ForeignKey("user.id", ondelete="SET NULL"),
nullable=True,
)
created_at: Mapped[datetime.datetime] = mapped_column(
DateTime(timezone=True), server_default=func.now(), nullable=False
)
updated_at: Mapped[datetime.datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
onupdate=func.now(),
nullable=False,
)
creator: Mapped["User | None"] = relationship("User", foreign_keys=[creator_id])
execution_logs: Mapped[list["HookExecutionLog"]] = relationship(
"HookExecutionLog", back_populates="hook", cascade="all, delete-orphan"
)
__table_args__ = (
Index(
"ix_hook_one_active_per_point",
"hook_point",
unique=True,
postgresql_where=(is_active == True) & (deleted == False), # noqa: E712
),
)
class HookExecutionLog(Base):
"""Records each failed hook execution for health monitoring and debugging.
Only failures are logged. Retention: rows older than 30 days are deleted
by a nightly Celery task.
"""
__tablename__ = "hook_execution_log"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
hook_id: Mapped[int] = mapped_column(
Integer,
ForeignKey("hook.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
hook_point: Mapped[HookPoint] = mapped_column(
Enum(HookPoint, native_enum=False), nullable=False
) # denormalized for query convenience
error_message: Mapped[str | None] = mapped_column(Text, nullable=True)
status_code: Mapped[int | None] = mapped_column(Integer, nullable=True)
duration_ms: Mapped[int | None] = mapped_column(Integer, nullable=True)
created_at: Mapped[datetime.datetime] = mapped_column(
DateTime(timezone=True), server_default=func.now(), nullable=False, index=True
)
hook: Mapped["Hook"] = relationship("Hook", back_populates="execution_logs")

View File

@@ -33,10 +33,6 @@ logger = setup_logger()
admin_router = APIRouter(prefix="/admin/voice")
VOICE_PROVIDER_VALIDATION_FAILURE_MESSAGE = (
"Connection test failed. Please verify your API key and settings."
)
def _validate_voice_api_base(provider_type: str, api_base: str | None) -> str | None:
"""Validate and normalize provider api_base / target URI."""
@@ -140,7 +136,7 @@ async def upsert_voice_provider_endpoint(
logger.error(f"Voice provider credential validation failed on save: {e}")
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
VOICE_PROVIDER_VALIDATION_FAILURE_MESSAGE,
str(e),
) from e
db_session.commit()
@@ -267,7 +263,7 @@ async def test_voice_provider(
logger.error(f"Voice provider connection test failed: {e}")
raise OnyxError(
OnyxErrorCode.VALIDATION_ERROR,
VOICE_PROVIDER_VALIDATION_FAILURE_MESSAGE,
str(e),
) from e
logger.info(f"Voice provider test succeeded for {request.provider_type}.")

View File

@@ -1,43 +1,33 @@
"""Unit tests for SharepointConnector._fetch_site_pages error handling.
"""Unit tests for SharepointConnector._fetch_site_pages 404 handling.
Covers 404 handling (classic sites / no modern pages) and 400
canvasLayout fallback (corrupt pages causing $expand=canvasLayout to
fail on the LIST endpoint).
The Graph Pages API returns 404 for classic sites or sites without
modern pages enabled. _fetch_site_pages should gracefully skip these
rather than crashing the entire indexing run.
"""
from __future__ import annotations
import json
from typing import Any
import pytest
from requests import Response
from requests.exceptions import HTTPError
from onyx.connectors.sharepoint.connector import GRAPH_INVALID_REQUEST_CODE
from onyx.connectors.sharepoint.connector import SharepointConnector
from onyx.connectors.sharepoint.connector import SiteDescriptor
SITE_URL = "https://tenant.sharepoint.com/sites/ClassicSite"
FAKE_SITE_ID = "tenant.sharepoint.com,abc123,def456"
PAGES_COLLECTION = f"https://graph.microsoft.com/v1.0/sites/{FAKE_SITE_ID}/pages"
SITE_PAGES_BASE = f"{PAGES_COLLECTION}/microsoft.graph.sitePage"
def _site_descriptor() -> SiteDescriptor:
return SiteDescriptor(url=SITE_URL, drive_name=None, folder_path=None)
def _make_http_error(
status_code: int,
error_code: str = "itemNotFound",
message: str = "Item not found",
) -> HTTPError:
body = {"error": {"code": error_code, "message": message}}
def _make_http_error(status_code: int) -> HTTPError:
response = Response()
response.status_code = status_code
response._content = json.dumps(body).encode()
response.headers["Content-Type"] = "application/json"
response._content = b'{"error":{"code":"itemNotFound","message":"Item not found"}}'
return HTTPError(response=response)
@@ -187,139 +177,3 @@ class TestFetchSitePages404:
pages = list(connector._fetch_site_pages(_site_descriptor()))
assert len(pages) == 1
assert pages[0]["id"] == "page-1"
class TestFetchSitePages400Fallback:
"""When $expand=canvasLayout on the LIST endpoint returns 400
invalidRequest, _fetch_site_pages should fall back to listing
without expansion, then expanding each page individually."""
GOOD_PAGE: dict[str, Any] = {
"id": "good-1",
"name": "Good.aspx",
"title": "Good Page",
"lastModifiedDateTime": "2025-06-01T00:00:00Z",
}
BAD_PAGE: dict[str, Any] = {
"id": "bad-1",
"name": "Bad.aspx",
"title": "Bad Page",
"lastModifiedDateTime": "2025-06-01T00:00:00Z",
}
GOOD_PAGE_EXPANDED: dict[str, Any] = {
**GOOD_PAGE,
"canvasLayout": {"horizontalSections": []},
}
def test_fallback_expands_good_pages_individually(
self, monkeypatch: pytest.MonkeyPatch
) -> None:
"""On 400 from the LIST expand, the connector should list without
expand, then GET each page individually with $expand=canvasLayout."""
connector = _setup_connector(monkeypatch)
good_page = self.GOOD_PAGE
bad_page = self.BAD_PAGE
good_page_expanded = self.GOOD_PAGE_EXPANDED
def fake_get_json(
self: SharepointConnector, # noqa: ARG001
url: str,
params: dict[str, str] | None = None,
) -> dict[str, Any]:
if url == SITE_PAGES_BASE and params == {"$expand": "canvasLayout"}:
raise _make_http_error(
400, GRAPH_INVALID_REQUEST_CODE, "Invalid request"
)
if url == SITE_PAGES_BASE and params is None:
return {"value": [good_page, bad_page]}
expand_params = {"$expand": "canvasLayout"}
if url == f"{PAGES_COLLECTION}/good-1/microsoft.graph.sitePage":
assert params == expand_params, f"Expected $expand params, got {params}"
return good_page_expanded
if url == f"{PAGES_COLLECTION}/bad-1/microsoft.graph.sitePage":
assert params == expand_params, f"Expected $expand params, got {params}"
raise _make_http_error(
400, GRAPH_INVALID_REQUEST_CODE, "Invalid request"
)
raise AssertionError(f"Unexpected call: {url} {params}")
_patch_graph_api_get_json(monkeypatch, fake_get_json)
pages = list(connector._fetch_site_pages(_site_descriptor()))
assert len(pages) == 2
assert pages[0].get("canvasLayout") is not None
assert pages[1].get("canvasLayout") is None
assert pages[1]["id"] == "bad-1"
def test_mid_pagination_400_does_not_duplicate(
self, monkeypatch: pytest.MonkeyPatch
) -> None:
"""If the first paginated batch succeeds but a later nextLink
returns 400, pages from the first batch must not be re-yielded
by the fallback."""
connector = _setup_connector(monkeypatch)
good_page = self.GOOD_PAGE
good_page_expanded = self.GOOD_PAGE_EXPANDED
bad_page = self.BAD_PAGE
second_page = {
"id": "page-2",
"name": "Second.aspx",
"title": "Second Page",
"lastModifiedDateTime": "2025-06-01T00:00:00Z",
}
next_link = "https://graph.microsoft.com/v1.0/next-page-link"
def fake_get_json(
self: SharepointConnector, # noqa: ARG001
url: str,
params: dict[str, str] | None = None,
) -> dict[str, Any]:
if url == SITE_PAGES_BASE and params == {"$expand": "canvasLayout"}:
return {
"value": [good_page],
"@odata.nextLink": next_link,
}
if url == next_link:
raise _make_http_error(
400, GRAPH_INVALID_REQUEST_CODE, "Invalid request"
)
if url == SITE_PAGES_BASE and params is None:
return {"value": [good_page, bad_page, second_page]}
expand_params = {"$expand": "canvasLayout"}
if url == f"{PAGES_COLLECTION}/good-1/microsoft.graph.sitePage":
assert params == expand_params, f"Expected $expand params, got {params}"
return good_page_expanded
if url == f"{PAGES_COLLECTION}/bad-1/microsoft.graph.sitePage":
assert params == expand_params, f"Expected $expand params, got {params}"
raise _make_http_error(
400, GRAPH_INVALID_REQUEST_CODE, "Invalid request"
)
if url == f"{PAGES_COLLECTION}/page-2/microsoft.graph.sitePage":
assert params == expand_params, f"Expected $expand params, got {params}"
return {**second_page, "canvasLayout": {"horizontalSections": []}}
raise AssertionError(f"Unexpected call: {url} {params}")
_patch_graph_api_get_json(monkeypatch, fake_get_json)
pages = list(connector._fetch_site_pages(_site_descriptor()))
ids = [p["id"] for p in pages]
assert ids == ["good-1", "bad-1", "page-2"]
def test_non_invalid_request_400_still_raises(
self, monkeypatch: pytest.MonkeyPatch
) -> None:
"""A 400 with a different error code (not invalidRequest) should
propagate, not trigger the fallback."""
connector = _setup_connector(monkeypatch)
def fake_get_json(
self: SharepointConnector, # noqa: ARG001
url: str, # noqa: ARG001
params: dict[str, str] | None = None, # noqa: ARG001
) -> dict[str, Any]:
raise _make_http_error(400, "badRequest", "Something else went wrong")
_patch_graph_api_get_json(monkeypatch, fake_get_json)
with pytest.raises(HTTPError):
list(connector._fetch_site_pages(_site_descriptor()))

View File

@@ -55,7 +55,7 @@ function Button({
children,
rightIcon: RightIcon,
size = "lg",
type = "button",
type,
width,
tooltip,
tooltipSide = "top",

View File

@@ -97,7 +97,7 @@ interface InteractiveContainerProps
*/
function InteractiveContainer({
ref,
type,
type = "button",
border,
roundingVariant = "default",
heightVariant = "lg",

6
web/package-lock.json generated
View File

@@ -53,7 +53,7 @@
"formik": "^2.2.9",
"highlight.js": "^11.11.1",
"js-cookie": "^3.0.5",
"katex": "^0.16.38",
"katex": "^0.16.17",
"linguist-languages": "^9.3.1",
"lodash": "^4.17.23",
"lowlight": "^3.3.0",
@@ -12794,9 +12794,7 @@
}
},
"node_modules/katex": {
"version": "0.16.38",
"resolved": "https://registry.npmjs.org/katex/-/katex-0.16.38.tgz",
"integrity": "sha512-cjHooZUmIAUmDsHBN+1n8LaZdpmbj03LtYeYPyuYB7OuloiaeaV6N4LcfjcnHVzGWjVQmKrxxTrpDcmSzEZQwQ==",
"version": "0.16.25",
"funding": [
"https://opencollective.com/katex",
"https://github.com/sponsors/katex"

View File

@@ -71,7 +71,7 @@
"formik": "^2.2.9",
"highlight.js": "^11.11.1",
"js-cookie": "^3.0.5",
"katex": "^0.16.38",
"katex": "^0.16.17",
"linguist-languages": "^9.3.1",
"lodash": "^4.17.23",
"lowlight": "^3.3.0",

View File

@@ -175,9 +175,6 @@ const AgentMessage = React.memo(function AgentMessage({
// Streaming TTS integration
const { streamTTS, resetTTS, stopTTS } = useVoiceMode();
const ttsCompletedRef = useRef(false);
const hasStreamedIncompleteRef = useRef(false);
const hasObservedPacketGrowthRef = useRef(false);
const lastSeenPacketCountRef = useRef(packetCount ?? rawPackets.length);
const streamTTSRef = useRef(streamTTS);
// Keep streamTTS ref in sync without triggering effect re-runs
@@ -189,12 +186,6 @@ const AgentMessage = React.memo(function AgentMessage({
// Uses ref for streamTTS to avoid re-triggering when its identity changes
// Note: packetCount is used instead of rawPackets because the array is mutated in place
useLayoutEffect(() => {
const effectivePacketCount = packetCount ?? rawPackets.length;
if (effectivePacketCount > lastSeenPacketCountRef.current) {
hasObservedPacketGrowthRef.current = true;
}
lastSeenPacketCountRef.current = effectivePacketCount;
// Skip if we've already finished TTS for this message
if (ttsCompletedRef.current) return;
@@ -205,22 +196,13 @@ const AgentMessage = React.memo(function AgentMessage({
}
const textContent = removeThinkingTokens(getTextContent(rawPackets));
if (!(typeof textContent === "string" && textContent.length > 0)) return;
if (typeof textContent === "string" && textContent.length > 0) {
streamTTSRef.current(textContent, isComplete, nodeId);
// Only autoplay messages that were observed streaming in this lifecycle.
// Prevents historical, already-complete chats from re-triggering read-aloud on mount.
if (!isComplete) {
if (!hasObservedPacketGrowthRef.current) {
return;
// Mark as completed once the message is done streaming
if (isComplete) {
ttsCompletedRef.current = true;
}
hasStreamedIncompleteRef.current = true;
streamTTSRef.current(textContent, false, nodeId);
return;
}
if (hasStreamedIncompleteRef.current) {
streamTTSRef.current(textContent, true, nodeId);
ttsCompletedRef.current = true;
}
}, [packetCount, isComplete, rawPackets, nodeId, stopPacketSeen, stopReason]); // packetCount triggers on new packets since rawPackets is mutated in place
@@ -234,9 +216,6 @@ const AgentMessage = React.memo(function AgentMessage({
// Reset TTS completed flag when nodeId changes (new message)
useEffect(() => {
ttsCompletedRef.current = false;
hasStreamedIncompleteRef.current = false;
hasObservedPacketGrowthRef.current = false;
lastSeenPacketCountRef.current = packetCount ?? rawPackets.length;
}, [nodeId]);
// Reset TTS when component unmounts or nodeId changes

View File

@@ -2,7 +2,7 @@
import { useCallback, useState, useEffect, useRef, useMemo } from "react";
import { useRouter } from "next/navigation";
import { track, AnalyticsEvent } from "@/lib/analytics";
import { usePostHog } from "posthog-js/react";
import {
useSession,
useSessionId,
@@ -61,6 +61,7 @@ export default function BuildChatPanel({
existingSessionId,
}: BuildChatPanelProps) {
const router = useRouter();
const posthog = usePostHog();
const outputPanelOpen = useOutputPanelOpen();
const session = useSession();
const sessionId = useSessionId();
@@ -253,7 +254,7 @@ export default function BuildChatPanel({
return;
}
track(AnalyticsEvent.SENT_CRAFT_MESSAGE);
posthog?.capture("sent_craft_message");
if (hasSession && sessionId) {
// Existing session flow
@@ -366,6 +367,7 @@ export default function BuildChatPanel({
hasUploadingFiles,
limits,
refreshLimits,
posthog,
]
);

View File

@@ -2,7 +2,7 @@
import { useEffect } from "react";
import { motion } from "motion/react";
import { track, AnalyticsEvent } from "@/lib/analytics";
import { usePostHog } from "posthog-js/react";
import { OnyxLogoTypeIcon } from "@/components/icons/icons";
import Text from "@/refresh-components/texts/Text";
import BigButton from "@/app/craft/components/BigButton";
@@ -16,10 +16,12 @@ export default function BuildModeIntroContent({
onClose,
onTryBuildMode,
}: BuildModeIntroContentProps) {
const posthog = usePostHog();
// Track when user sees the craft intro
useEffect(() => {
track(AnalyticsEvent.SAW_CRAFT_INTRO);
}, []);
posthog?.capture("saw_craft_intro");
}, [posthog]);
return (
<div className="absolute inset-0 flex flex-col items-center justify-center pointer-events-none">
@@ -73,7 +75,7 @@ export default function BuildModeIntroContent({
className="!border-white !text-white hover:!bg-white/10 active:!bg-white/20 !w-[160px]"
onClick={(e) => {
e.stopPropagation();
track(AnalyticsEvent.CLICKED_GO_HOME);
posthog?.capture("clicked_go_home");
onClose();
}}
>
@@ -84,7 +86,7 @@ export default function BuildModeIntroContent({
className="!bg-white !text-black hover:!bg-gray-200 active:!bg-gray-300 !w-[160px]"
onClick={(e) => {
e.stopPropagation();
track(AnalyticsEvent.CLICKED_TRY_CRAFT);
posthog?.capture("clicked_try_craft");
onTryBuildMode();
}}
>

View File

@@ -1,11 +1,7 @@
"use client";
import { useState, useEffect, useMemo } from "react";
import {
track,
AnalyticsEvent,
LLMProviderConfiguredSource,
} from "@/lib/analytics";
import { usePostHog } from "posthog-js/react";
import { SvgArrowRight, SvgArrowLeft, SvgX } from "@opal/icons";
import { cn } from "@/lib/utils";
import Text from "@/refresh-components/texts/Text";
@@ -116,6 +112,8 @@ export default function BuildOnboardingModal({
onLlmComplete,
onClose,
}: BuildOnboardingModalProps) {
const posthog = usePostHog();
// Compute steps based on mode
const steps = useMemo(
() => getStepsForMode(mode, isAdmin, allProvidersConfigured, hasUserInfo),
@@ -285,12 +283,6 @@ export default function BuildOnboardingModal({
modelName: selectedModel,
});
track(AnalyticsEvent.CONFIGURED_LLM_PROVIDER, {
provider: currentProviderConfig.providerName,
is_creation: true,
source: LLMProviderConfiguredSource.CRAFT_ONBOARDING,
});
setConnectionStatus("success");
} catch (error) {
console.error("Error connecting LLM provider:", error);
@@ -355,7 +347,7 @@ export default function BuildOnboardingModal({
level: level || undefined,
});
track(AnalyticsEvent.COMPLETED_CRAFT_ONBOARDING);
posthog?.capture("completed_craft_onboarding");
onClose();
} catch (error) {
console.error("Error completing onboarding:", error);
@@ -473,7 +465,7 @@ export default function BuildOnboardingModal({
<button
type="button"
onClick={() => {
track(AnalyticsEvent.COMPLETED_CRAFT_USER_INFO, {
posthog?.capture("completed_craft_user_info", {
first_name: firstName.trim(),
last_name: lastName.trim() || undefined,
work_area: workArea,

View File

@@ -58,7 +58,7 @@ import {
useRouter,
useSearchParams,
} from "next/navigation";
import { track, AnalyticsEvent } from "@/lib/analytics";
import { usePostHog } from "posthog-js/react";
import { getExtensionContext } from "@/lib/extension/utils";
import useChatSessions from "@/hooks/useChatSessions";
import { usePinnedAgents } from "@/hooks/useAgents";
@@ -147,6 +147,7 @@ export default function useChatController({
const { forcedToolIds } = useForcedTools();
const { fetchProjects, setCurrentMessageFiles, beginUpload } =
useProjectsContext();
const posthog = usePostHog();
// Use selectors to access only the specific fields we need
const currentSessionId = useChatSessionStore(
@@ -763,8 +764,8 @@ export default function useChatController({
.user_message_id;
// Track extension queries in PostHog (reuses isExtension/extensionContext from above)
if (isExtension) {
track(AnalyticsEvent.EXTENSION_CHAT_QUERY, {
if (isExtension && posthog) {
posthog.capture("extension_chat_query", {
extension_context: extensionContext,
assistant_id: liveAgent?.id,
has_files: effectiveFileDescriptors.length > 0,

View File

@@ -1,70 +0,0 @@
import posthog from "posthog-js";
// ─── Event Registry ────────────────────────────────────────────────────────
// All tracked event names. Add new events here to get type-safe tracking.
export enum AnalyticsEvent {
CONFIGURED_LLM_PROVIDER = "configured_llm_provider",
COMPLETED_CRAFT_ONBOARDING = "completed_craft_onboarding",
COMPLETED_CRAFT_USER_INFO = "completed_craft_user_info",
SENT_CRAFT_MESSAGE = "sent_craft_message",
SAW_CRAFT_INTRO = "saw_craft_intro",
CLICKED_GO_HOME = "clicked_go_home",
CLICKED_TRY_CRAFT = "clicked_try_craft",
CLICKED_CRAFT_IN_SIDEBAR = "clicked_craft_in_sidebar",
RELEASE_NOTIFICATION_CLICKED = "release_notification_clicked",
EXTENSION_CHAT_QUERY = "extension_chat_query",
}
// ─── Shared Enums ──────────────────────────────────────────────────────────
export enum LLMProviderConfiguredSource {
ADMIN_PAGE = "admin_page",
CHAT_ONBOARDING = "chat_onboarding",
CRAFT_ONBOARDING = "craft_onboarding",
}
// ─── Event Property Types ──────────────────────────────────────────────────
// Maps each event to its required properties. Use `void` for events with no
// properties — this makes the second argument to `track()` optional for those
// events while requiring it for events that carry data.
interface AnalyticsEventProperties {
[AnalyticsEvent.CONFIGURED_LLM_PROVIDER]: {
provider: string;
is_creation: boolean;
source: LLMProviderConfiguredSource;
};
[AnalyticsEvent.COMPLETED_CRAFT_ONBOARDING]: void;
[AnalyticsEvent.COMPLETED_CRAFT_USER_INFO]: {
first_name: string;
last_name: string | undefined;
work_area: string | undefined;
level: string | undefined;
};
[AnalyticsEvent.SENT_CRAFT_MESSAGE]: void;
[AnalyticsEvent.SAW_CRAFT_INTRO]: void;
[AnalyticsEvent.CLICKED_GO_HOME]: void;
[AnalyticsEvent.CLICKED_TRY_CRAFT]: void;
[AnalyticsEvent.CLICKED_CRAFT_IN_SIDEBAR]: void;
[AnalyticsEvent.RELEASE_NOTIFICATION_CLICKED]: {
version: string | undefined;
};
[AnalyticsEvent.EXTENSION_CHAT_QUERY]: {
extension_context: string | null | undefined;
assistant_id: number | undefined;
has_files: boolean;
deep_research: boolean;
};
}
// ─── Typed Track Function ──────────────────────────────────────────────────
export function track<E extends AnalyticsEvent>(
...args: AnalyticsEventProperties[E] extends void
? [event: E]
: [event: E, properties: AnalyticsEventProperties[E]]
): void {
const [event, properties] = args as [E, Record<string, unknown>?];
posthog.capture(event, properties ?? {});
}

View File

@@ -1,5 +1,4 @@
import {
LLMProviderName,
LLMProviderView,
ModelConfiguration,
WellKnownLLMProviderDescriptor,
@@ -13,11 +12,6 @@ import { toast } from "@/hooks/useToast";
import * as Yup from "yup";
import isEqual from "lodash/isEqual";
import { ScopedMutator } from "swr";
import {
track,
AnalyticsEvent,
LLMProviderConfiguredSource,
} from "@/lib/analytics";
// Common class names for the Form component across all LLM provider forms
export const LLM_FORM_CLASS_NAME = "flex flex-col gap-y-4 items-stretch mt-6";
@@ -305,12 +299,5 @@ export const submitLLMProvider = async <T extends BaseLLMFormValues>({
toast.success(successMsg);
}
const knownProviders = new Set<string>(Object.values(LLMProviderName));
track(AnalyticsEvent.CONFIGURED_LLM_PROVIDER, {
provider: knownProviders.has(providerName) ? providerName : "custom",
is_creation: !existingLlmProvider,
source: LLMProviderConfiguredSource.ADMIN_PAGE,
});
setSubmitting(false);
};

View File

@@ -1,11 +1,6 @@
"use client";
import React, { useState, useMemo, ReactNode } from "react";
import {
track,
AnalyticsEvent,
LLMProviderConfiguredSource,
} from "@/lib/analytics";
import { Form, Formik, FormikProps } from "formik";
import * as Yup from "yup";
import ProviderModal from "@/components/modals/ProviderModal";
@@ -269,12 +264,6 @@ export function OnboardingFormWrapper<T extends Record<string, any>>({
}
}
track(AnalyticsEvent.CONFIGURED_LLM_PROVIDER, {
provider: isCustomProvider ? "custom" : llmDescriptor?.name ?? "",
is_creation: true,
source: LLMProviderConfiguredSource.CHAT_ONBOARDING,
});
// Update onboarding state
onboardingActions?.updateData({
llmProviders: [

View File

@@ -72,7 +72,6 @@ import BuildModeIntroBackground from "@/app/craft/components/IntroBackground";
import BuildModeIntroContent from "@/app/craft/components/IntroContent";
import { CRAFT_PATH } from "@/app/craft/v1/constants";
import { usePostHog } from "posthog-js/react";
import { track, AnalyticsEvent } from "@/lib/analytics";
import { motion, AnimatePresence } from "motion/react";
import { Notification, NotificationType } from "@/interfaces/settings";
import { errorHandlingFetcher } from "@/lib/fetcher";
@@ -528,7 +527,7 @@ const MemoizedAppSidebarInner = memo(
icon={SvgDevKit}
folded={folded}
href={CRAFT_PATH}
onClick={() => track(AnalyticsEvent.CLICKED_CRAFT_IN_SIDEBAR)}
onClick={() => posthog?.capture("clicked_craft_in_sidebar")}
>
Craft
</SidebarTab>

View File

@@ -3,7 +3,7 @@
import useSWR from "swr";
import { useRouter } from "next/navigation";
import { Route } from "next";
import { track, AnalyticsEvent } from "@/lib/analytics";
import { usePostHog } from "posthog-js/react";
import { Notification, NotificationType } from "@/interfaces/settings";
import { errorHandlingFetcher } from "@/lib/fetcher";
import Text from "@/refresh-components/texts/Text";
@@ -38,6 +38,7 @@ export default function NotificationsPopover({
onShowBuildIntro,
}: NotificationsPopoverProps) {
const router = useRouter();
const posthog = usePostHog();
const {
data: notifications,
mutate,
@@ -61,7 +62,7 @@ export default function NotificationsPopover({
// Track release notes clicks
if (notification.notif_type === NotificationType.RELEASE_NOTES) {
track(AnalyticsEvent.RELEASE_NOTIFICATION_CLICKED, {
posthog?.capture("release_notification_clicked", {
version: notification.additional_data?.version,
});
}

View File

@@ -96,22 +96,6 @@ Key benefits include:
- **Flexibility**: Connect any data source via custom connectors
- **Extensibility**: Open-source codebase with active community`;
const LATEX_AI_RESPONSE = `Here is a mix of math and plain text:
Inline math should render cleanly: \\(E = mc^2\\).
Display math should render on its own line:
\\[
\\int_0^1 x^2 \\, dx = \\frac{1}{3}
\\]
This currency value should stay plain text: $100.
And this LaTeX source should remain a code block:
\`\`\`latex
\\int_0^1 x^2 \\, dx = \\frac{1}{3}
\`\`\``;
interface MockDocument {
document_id: string;
semantic_identifier: string;
@@ -458,32 +442,6 @@ for (const theme of THEMES) {
`chat-markdown-code-response-${theme}`
);
});
test("AI response with LaTeX math renders correctly", async ({
page,
}) => {
await openChat(page);
await mockChatEndpoint(page, LATEX_AI_RESPONSE);
await sendMessage(page, "Show me inline and block math");
const aiMessage = page.getByTestId("onyx-ai-message").first();
await screenshotChatContainer(
page,
`chat-latex-math-response-${theme}`
);
await expect(aiMessage).toContainText("Inline math should render");
await expect(aiMessage).toContainText(
"This currency value should stay plain text: $100."
);
await expect(aiMessage.locator(".katex")).toHaveCount(2);
await expect(aiMessage.locator(".katex-display")).toBeVisible();
await expect(aiMessage.getByRole("code")).toContainText(
"\\int_0^1 x^2 \\, dx = \\frac{1}{3}"
);
});
});
test.describe("Multi-Turn Conversation", () => {