Compare commits

..

5 Commits

Author SHA1 Message Date
Nik
ce629d2734 WIP: GenUI framework integration — chat-level toggle, backend dual-emit, session loading
Progress on GenUI structured UI rendering:
- Moved genui packages from packages/genui/ to web/lib/ (genui-core, genui-react, genui-onyx)
- Backend: dual-emit AgentResponseDelta + GenUIDelta when GENUI_ENABLED
- Backend: session_loading emits GenUI packets for historical messages
- Frontend: chat-level toggle (zustand store + header button)
- Frontend: GenUIRenderer with loading indicator during generation
- Frontend: GenUIToggleHandler routes between text and structured views
- Still WIP: streaming shows raw GenUI syntax as markdown (needs backend fix to
  stop emitting AgentResponseDelta content when GenUI mode is active)
2026-03-16 09:55:43 -07:00
Nik
9693123b7d feat: add GenUI structured UI rendering framework
Standalone framework for LLM-generated structured UI at packages/genui/.
Three packages: @onyx/genui (core parser/registry/prompt), @onyx/genui-react
(React renderer), @onyx/genui-onyx (Onyx component bindings).

Core: line-oriented markup parser with streaming support, recursive descent
parser, variable resolution, Zod-based prop validation, and auto prompt
generation from component schemas. 208 tests passing.

Onyx bindings: 16 components bound to real Opal/refresh-components (Text,
Button, Card, Tag, Table, Code, Divider, Stack/Row/Column, Image, Link,
Alert, List, IconButton, Input).

Integration: GENUI_START/GENUI_DELTA streaming packets (frontend + backend),
GenUIRenderer wired into chat pipeline via findRenderer(), genui_prompt
parameter added to build_system_prompt().
2026-03-15 12:55:03 -07:00
Yuhong Sun
34356a5853 Fix sidebar 2026-03-13 13:56:47 -07:00
Yuhong Sun
82fb535015 Done 2026-03-13 13:55:47 -07:00
Yuhong Sun
6bb9a4970b Small touchups in UI 2026-03-13 13:55:47 -07:00
234 changed files with 13716 additions and 5147 deletions

View File

@@ -10,9 +10,6 @@ inputs:
failed-jobs:
description: "Deprecated alias for details"
required: false
mention:
description: "GitHub username to resolve to a Slack @-mention. Replaces {mention} in details."
required: false
title:
description: "Title for the notification"
required: false
@@ -29,7 +26,6 @@ runs:
SLACK_WEBHOOK_URL: ${{ inputs.webhook-url }}
DETAILS: ${{ inputs.details }}
FAILED_JOBS: ${{ inputs.failed-jobs }}
MENTION_USER: ${{ inputs.mention }}
TITLE: ${{ inputs.title }}
REF_NAME: ${{ inputs.ref-name }}
REPO: ${{ github.repository }}
@@ -56,27 +52,6 @@ runs:
DETAILS="$FAILED_JOBS"
fi
# Resolve {mention} placeholder if a GitHub username was provided.
# Looks up the username in user-mappings.json (co-located with this action)
# and replaces {mention} with <@SLACK_ID> for a Slack @-mention.
# Falls back to the plain GitHub username if not found in the mapping.
if [ -n "$MENTION_USER" ]; then
MAPPINGS_FILE="${GITHUB_ACTION_PATH}/user-mappings.json"
slack_id="$(jq -r --arg gh "$MENTION_USER" 'to_entries[] | select(.value | ascii_downcase == ($gh | ascii_downcase)) | .key' "$MAPPINGS_FILE" 2>/dev/null | head -1)"
if [ -n "$slack_id" ]; then
mention_text="<@${slack_id}>"
else
mention_text="${MENTION_USER}"
fi
DETAILS="${DETAILS//\{mention\}/$mention_text}"
TITLE="${TITLE//\{mention\}/}"
else
DETAILS="${DETAILS//\{mention\}/}"
TITLE="${TITLE//\{mention\}/}"
fi
normalize_multiline() {
printf '%s' "$1" | awk 'BEGIN { ORS=""; first=1 } { if (!first) printf "\\n"; printf "%s", $0; first=0 }'
}

View File

@@ -1,18 +0,0 @@
{
"U05SAGZPEA1": "yuhongsun96",
"U05SAH6UGUD": "Weves",
"U07PWEQB7A5": "evan-onyx",
"U07V1SM68KF": "joachim-danswer",
"U08JZ9N3QNN": "raunakab",
"U08L24NCLJE": "Subash-Mohan",
"U090B9M07B2": "wenxi-onyx",
"U094RASDP0Q": "duo-onyx",
"U096L8ZQ85B": "justin-tahara",
"U09AHV8UBQX": "jessicasingh7",
"U09KAL5T3C2": "nmgarza5",
"U09KPGVQ70R": "acaprau",
"U09QR8KTSJH": "rohoswagger",
"U09RB4NTXA4": "jmelahman",
"U0A6K9VCY6A": "Danelegend",
"U0AGC4KH71A": "Bo-Onyx"
}

View File

@@ -455,7 +455,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -529,7 +529,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -607,7 +607,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -668,7 +668,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -750,7 +750,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -836,7 +836,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -894,7 +894,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -967,7 +967,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -1044,7 +1044,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -1105,7 +1105,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
@@ -1178,7 +1178,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
@@ -1256,7 +1256,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
@@ -1317,7 +1317,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -1397,7 +1397,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |
@@ -1480,7 +1480,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ needs.determine-builds.outputs.is-test-run == 'true' && env.RUNS_ON_ECR_CACHE || env.REGISTRY_IMAGE }}
flavor: |

View File

@@ -207,7 +207,7 @@ jobs:
CHERRY_PICK_PR_URL: ${{ needs.cherry-pick-to-latest-release.outputs.cherry_pick_pr_url }}
run: |
source_pr_url="https://github.com/${GITHUB_REPOSITORY}/pull/${SOURCE_PR_NUMBER}"
details="*Cherry-pick PR opened successfully.*\\n• author: {mention}\\n• source PR: ${source_pr_url}"
details="*Cherry-pick PR opened successfully.*\\n• source PR: ${source_pr_url}"
if [ -n "${CHERRY_PICK_PR_URL}" ]; then
details="${details}\\n• cherry-pick PR: ${CHERRY_PICK_PR_URL}"
fi
@@ -221,7 +221,6 @@ jobs:
uses: ./.github/actions/slack-notify
with:
webhook-url: ${{ secrets.CHERRY_PICK_PRS_WEBHOOK }}
mention: ${{ needs.resolve-cherry-pick-request.outputs.merged_by }}
details: ${{ steps.success-summary.outputs.details }}
title: "✅ Automated Cherry-Pick PR Opened"
ref-name: ${{ github.event.pull_request.base.ref }}
@@ -276,21 +275,20 @@ jobs:
else
failed_job_label="cherry-pick-to-latest-release"
fi
details="• author: {mention}\\n• ${failed_job_label}\\n• source PR: ${source_pr_url}\\n• reason: ${reason_text}"
failed_jobs="• ${failed_job_label}\\n• source PR: ${source_pr_url}\\n• reason: ${reason_text}"
if [ -n "${MERGE_COMMIT_SHA}" ]; then
details="${details}\\n• merge SHA: ${MERGE_COMMIT_SHA}"
failed_jobs="${failed_jobs}\\n• merge SHA: ${MERGE_COMMIT_SHA}"
fi
if [ -n "${details_excerpt}" ]; then
details="${details}\\n• excerpt: ${details_excerpt}"
failed_jobs="${failed_jobs}\\n• excerpt: ${details_excerpt}"
fi
echo "details=${details}" >> "$GITHUB_OUTPUT"
echo "jobs=${failed_jobs}" >> "$GITHUB_OUTPUT"
- name: Notify #cherry-pick-prs about cherry-pick failure
uses: ./.github/actions/slack-notify
with:
webhook-url: ${{ secrets.CHERRY_PICK_PRS_WEBHOOK }}
mention: ${{ needs.resolve-cherry-pick-request.outputs.merged_by }}
details: ${{ steps.failure-summary.outputs.details }}
details: ${{ steps.failure-summary.outputs.jobs }}
title: "🚨 Automated Cherry-Pick Failed"
ref-name: ${{ github.event.pull_request.base.ref }}

View File

@@ -105,7 +105,7 @@ jobs:
- name: Upload build artifacts
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: desktop-build-${{ matrix.platform }}-${{ github.run_id }}
path: |

View File

@@ -174,7 +174,7 @@ jobs:
- name: Upload Docker logs
if: failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: docker-logs-${{ matrix.test-dir }}
path: docker-logs/

View File

@@ -25,7 +25,7 @@ jobs:
outputs:
modules: ${{ steps.set-modules.outputs.modules }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
with:
persist-credentials: false
- id: set-modules
@@ -39,7 +39,7 @@ jobs:
matrix:
modules: ${{ fromJSON(needs.detect-modules.outputs.modules) }}
steps:
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- uses: actions/setup-go@4dc6199c7b1a012772edbd06daecab0f50c9053c # zizmor: ignore[cache-poisoning]

View File

@@ -466,7 +466,7 @@ jobs:
- name: Upload logs
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: docker-all-logs-${{ matrix.edition }}-${{ matrix.test-dir.name }}
path: ${{ github.workspace }}/docker-compose.log
@@ -587,7 +587,7 @@ jobs:
- name: Upload logs (onyx-lite)
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: docker-all-logs-onyx-lite
path: ${{ github.workspace }}/docker-compose-onyx-lite.log
@@ -725,7 +725,7 @@ jobs:
- name: Upload logs (multi-tenant)
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: docker-all-logs-multitenant
path: ${{ github.workspace }}/docker-compose-multitenant.log

View File

@@ -44,7 +44,7 @@ jobs:
- name: Upload coverage reports
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: jest-coverage-${{ github.run_id }}
path: ./web/coverage

View File

@@ -445,7 +445,7 @@ jobs:
run: |
npx playwright test --project ${PROJECT}
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
if: always()
with:
# Includes test results and trace.zip files
@@ -454,7 +454,7 @@ jobs:
retention-days: 30
- name: Upload screenshots
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
if: always()
with:
name: playwright-screenshots-${{ matrix.project }}-${{ github.run_id }}
@@ -534,7 +534,7 @@ jobs:
"s3://${PLAYWRIGHT_S3_BUCKET}/reports/pr-${PR_NUMBER}/${RUN_ID}/${PROJECT}/"
- name: Upload visual diff summary
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
if: always()
with:
name: screenshot-diff-summary-${{ matrix.project }}
@@ -543,7 +543,7 @@ jobs:
retention-days: 5
- name: Upload visual diff report artifact
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
if: always()
with:
name: screenshot-diff-report-${{ matrix.project }}-${{ github.run_id }}
@@ -590,7 +590,7 @@ jobs:
- name: Upload logs
if: success() || failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: docker-logs-${{ matrix.project }}-${{ github.run_id }}
path: ${{ github.workspace }}/docker-compose.log
@@ -674,7 +674,7 @@ jobs:
working-directory: ./web
run: npx playwright test --project lite
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
if: always()
with:
name: playwright-test-results-lite-${{ github.run_id }}
@@ -692,7 +692,7 @@ jobs:
- name: Upload logs
if: success() || failure()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: docker-logs-lite-${{ github.run_id }}
path: ${{ github.workspace }}/docker-compose.log

View File

@@ -122,7 +122,7 @@ jobs:
- name: Upload logs
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: docker-all-logs
path: ${{ github.workspace }}/docker-compose.log

View File

@@ -319,7 +319,7 @@ jobs:
- name: Upload logs
if: always()
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
with:
name: docker-all-logs-nightly-${{ matrix.provider }}-llm-provider
path: |

View File

@@ -125,7 +125,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
@@ -195,7 +195,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
@@ -268,7 +268,7 @@ jobs:
- name: Docker meta
id: meta
uses: docker/metadata-action@030e881283bb7a6894de51c315a6bfe6a94e05cf # ratchet:docker/metadata-action@v6.0.0
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # ratchet:docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |

32
.vscode/launch.json vendored
View File

@@ -15,7 +15,7 @@
{
"name": "Run All Onyx Services",
"configurations": [
"Web Server",
// "Web Server",
"Model Server",
"API Server",
"MCP Server",
@@ -95,7 +95,7 @@
"LOG_LEVEL": "DEBUG",
"PYTHONUNBUFFERED": "1"
},
"args": ["model_server.main:app", "--reload", "--port", "9000"],
"args": ["model_server.main:app", "--reload", "--port", "9010"],
"presentation": {
"group": "2"
},
@@ -113,7 +113,7 @@
"LOG_LEVEL": "DEBUG",
"PYTHONUNBUFFERED": "1"
},
"args": ["onyx.main:app", "--reload", "--port", "8080"],
"args": ["onyx.main:app", "--reload", "--port", "8090"],
"presentation": {
"group": "2"
},
@@ -165,7 +165,7 @@
"envFile": "${workspaceFolder}/.vscode/.env",
"env": {
"MCP_SERVER_ENABLED": "true",
"MCP_SERVER_PORT": "8090",
"MCP_SERVER_PORT": "8100",
"MCP_SERVER_CORS_ORIGINS": "http://localhost:*",
"LOG_LEVEL": "DEBUG",
"PYTHONUNBUFFERED": "1"
@@ -174,7 +174,7 @@
"onyx.mcp_server.api:mcp_app",
"--reload",
"--port",
"8090",
"8100",
"--timeout-graceful-shutdown",
"0"
],
@@ -526,10 +526,7 @@
"type": "node",
"request": "launch",
"runtimeExecutable": "uv",
"runtimeArgs": [
"sync",
"--all-extras"
],
"runtimeArgs": ["sync", "--all-extras"],
"cwd": "${workspaceFolder}",
"console": "integratedTerminal",
"presentation": {
@@ -653,14 +650,7 @@
"type": "node",
"request": "launch",
"runtimeExecutable": "uv",
"runtimeArgs": [
"run",
"--with",
"onyx-devtools",
"ods",
"db",
"upgrade"
],
"runtimeArgs": ["run", "--with", "onyx-devtools", "ods", "db", "upgrade"],
"cwd": "${workspaceFolder}",
"console": "integratedTerminal",
"presentation": {
@@ -679,7 +669,11 @@
"PYTHONUNBUFFERED": "1",
"PYTHONPATH": "backend"
},
"args": ["--filename", "backend/generated/openapi.json", "--generate-python-client"]
"args": [
"--filename",
"backend/generated/openapi.json",
"--generate-python-client"
]
},
{
// script to debug multi tenant db issues
@@ -708,7 +702,7 @@
"name": "Debug React Web App in Chrome",
"type": "chrome",
"request": "launch",
"url": "http://localhost:3000",
"url": "http://localhost:3010",
"webRoot": "${workspaceFolder}/web"
}
]

View File

@@ -1,103 +0,0 @@
"""add_hook_and_hook_execution_log_tables
Revision ID: 689433b0d8de
Revises: 93a2e195e25c
Create Date: 2026-03-13 11:25:06.547474
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import UUID as PGUUID
# revision identifiers, used by Alembic.
revision = "689433b0d8de"
down_revision = "93a2e195e25c"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"hook",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(), nullable=False),
sa.Column(
"hook_point",
sa.Enum("document_ingestion", "query_processing", native_enum=False),
nullable=False,
),
sa.Column("endpoint_url", sa.Text(), nullable=True),
sa.Column("api_key", sa.LargeBinary(), nullable=True),
sa.Column("is_reachable", sa.Boolean(), nullable=True),
sa.Column(
"fail_strategy",
sa.Enum("hard", "soft", native_enum=False),
nullable=False,
),
sa.Column("timeout_seconds", sa.Float(), nullable=False),
sa.Column(
"is_active", sa.Boolean(), nullable=False, server_default=sa.text("false")
),
sa.Column(
"deleted", sa.Boolean(), nullable=False, server_default=sa.text("false")
),
sa.Column("creator_id", PGUUID(as_uuid=True), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.ForeignKeyConstraint(["creator_id"], ["user.id"], ondelete="SET NULL"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index(
"ix_hook_one_non_deleted_per_point",
"hook",
["hook_point"],
unique=True,
postgresql_where=sa.text("deleted = false"),
)
op.create_table(
"hook_execution_log",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("hook_id", sa.Integer(), nullable=False),
sa.Column(
"is_success",
sa.Boolean(),
nullable=False,
),
sa.Column("error_message", sa.Text(), nullable=True),
sa.Column("status_code", sa.Integer(), nullable=True),
sa.Column("duration_ms", sa.Integer(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.ForeignKeyConstraint(["hook_id"], ["hook.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index("ix_hook_execution_log_hook_id", "hook_execution_log", ["hook_id"])
op.create_index(
"ix_hook_execution_log_created_at", "hook_execution_log", ["created_at"]
)
def downgrade() -> None:
op.drop_index("ix_hook_execution_log_created_at", table_name="hook_execution_log")
op.drop_index("ix_hook_execution_log_hook_id", table_name="hook_execution_log")
op.drop_table("hook_execution_log")
op.drop_index("ix_hook_one_non_deleted_per_point", table_name="hook")
op.drop_table("hook")

View File

@@ -118,7 +118,9 @@ JWT_PUBLIC_KEY_URL: str | None = os.getenv("JWT_PUBLIC_KEY_URL", None)
SUPER_USERS = json.loads(os.environ.get("SUPER_USERS", "[]"))
SUPER_CLOUD_API_KEY = os.environ.get("SUPER_CLOUD_API_KEY", "api_key")
POSTHOG_API_KEY = os.environ.get("POSTHOG_API_KEY")
# The posthog client does not accept empty API keys or hosts however it fails silently
# when the capture is called. These defaults prevent Posthog issues from breaking the Onyx app
POSTHOG_API_KEY = os.environ.get("POSTHOG_API_KEY") or "FooBar"
POSTHOG_HOST = os.environ.get("POSTHOG_HOST") or "https://us.i.posthog.com"
POSTHOG_DEBUG_LOGS_ENABLED = (
os.environ.get("POSTHOG_DEBUG_LOGS_ENABLED", "").lower() == "true"

View File

@@ -34,9 +34,6 @@ class PostHogFeatureFlagProvider(FeatureFlagProvider):
Returns:
True if the feature is enabled for the user, False otherwise.
"""
if not posthog:
return False
try:
posthog.set(
distinct_id=user_id,

View File

@@ -29,6 +29,7 @@ from onyx.configs.app_configs import OPENAI_DEFAULT_API_KEY
from onyx.configs.app_configs import OPENROUTER_DEFAULT_API_KEY
from onyx.configs.app_configs import VERTEXAI_DEFAULT_CREDENTIALS
from onyx.configs.app_configs import VERTEXAI_DEFAULT_LOCATION
from onyx.configs.constants import MilestoneRecordType
from onyx.db.engine.sql_engine import get_session_with_shared_schema
from onyx.db.engine.sql_engine import get_session_with_tenant
from onyx.db.image_generation import create_default_image_gen_config_from_api_key
@@ -58,6 +59,7 @@ from onyx.server.manage.llm.models import LLMProviderUpsertRequest
from onyx.server.manage.llm.models import ModelConfigurationUpsertRequest
from onyx.setup import setup_onyx
from onyx.utils.logger import setup_logger
from onyx.utils.telemetry import mt_cloud_telemetry
from shared_configs.configs import MULTI_TENANT
from shared_configs.configs import POSTGRES_DEFAULT_SCHEMA
from shared_configs.configs import TENANT_ID_PREFIX
@@ -69,9 +71,7 @@ logger = setup_logger()
async def get_or_provision_tenant(
email: str,
referral_source: str | None = None,
request: Request | None = None,
email: str, referral_source: str | None = None, request: Request | None = None
) -> str:
"""
Get existing tenant ID for an email or create a new tenant if none exists.
@@ -693,6 +693,12 @@ async def assign_tenant_to_user(
try:
add_users_to_tenant([email], tenant_id)
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=email,
event=MilestoneRecordType.TENANT_CREATED,
)
except Exception:
logger.exception(f"Failed to assign tenant {tenant_id} to user {email}")
raise Exception("Failed to assign tenant to user")

View File

@@ -9,7 +9,6 @@ from ee.onyx.configs.app_configs import POSTHOG_API_KEY
from ee.onyx.configs.app_configs import POSTHOG_DEBUG_LOGS_ENABLED
from ee.onyx.configs.app_configs import POSTHOG_HOST
from onyx.utils.logger import setup_logger
from shared_configs.configs import MULTI_TENANT
logger = setup_logger()
@@ -19,19 +18,12 @@ def posthog_on_error(error: Any, items: Any) -> None:
logger.error(f"PostHog error: {error}, items: {items}")
posthog: Posthog | None = None
if POSTHOG_API_KEY:
posthog = Posthog(
project_api_key=POSTHOG_API_KEY,
host=POSTHOG_HOST,
debug=POSTHOG_DEBUG_LOGS_ENABLED,
on_error=posthog_on_error,
)
elif MULTI_TENANT:
logger.warning(
"POSTHOG_API_KEY is not set but MULTI_TENANT is enabled — "
"PostHog telemetry and feature flags will be disabled"
)
posthog = Posthog(
project_api_key=POSTHOG_API_KEY,
host=POSTHOG_HOST,
debug=POSTHOG_DEBUG_LOGS_ENABLED,
on_error=posthog_on_error,
)
# For cross referencing between cloud and www Onyx sites
# NOTE: These clients are separate because they are separate posthog projects.
@@ -68,7 +60,7 @@ def capture_and_sync_with_alternate_posthog(
logger.error(f"Error capturing marketing posthog event: {e}")
try:
if posthog and (cloud_user_id := props.get("onyx_cloud_user_id")):
if cloud_user_id := props.get("onyx_cloud_user_id"):
cloud_props = props.copy()
cloud_props.pop("onyx_cloud_user_id", None)

View File

@@ -1,5 +1,3 @@
from typing import Any
from ee.onyx.utils.posthog_client import posthog
from onyx.utils.logger import setup_logger
@@ -7,27 +5,12 @@ logger = setup_logger()
def event_telemetry(
distinct_id: str, event: str, properties: dict[str, Any] | None = None
distinct_id: str, event: str, properties: dict | None = None
) -> None:
"""Capture and send an event to PostHog, flushing immediately."""
if not posthog:
return
logger.info(f"Capturing PostHog event: {distinct_id} {event} {properties}")
try:
posthog.capture(distinct_id, event, properties)
posthog.flush()
except Exception as e:
logger.error(f"Error capturing PostHog event: {e}")
def identify_user(distinct_id: str, properties: dict[str, Any] | None = None) -> None:
"""Create/update a PostHog person profile, flushing immediately."""
if not posthog:
return
try:
posthog.identify(distinct_id, properties)
posthog.flush()
except Exception as e:
logger.error(f"Error identifying PostHog user: {e}")

View File

@@ -19,7 +19,6 @@ from typing import Optional
from typing import Protocol
from typing import Tuple
from typing import TypeVar
from urllib.parse import urlparse
import jwt
from email_validator import EmailNotValidError
@@ -135,7 +134,6 @@ from onyx.redis.redis_pool import retrieve_ws_token_data
from onyx.server.settings.store import load_settings
from onyx.server.utils import BasicAuthenticationError
from onyx.utils.logger import setup_logger
from onyx.utils.telemetry import mt_cloud_identify
from onyx.utils.telemetry import mt_cloud_telemetry
from onyx.utils.telemetry import optional_telemetry
from onyx.utils.telemetry import RecordType
@@ -794,12 +792,6 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
except Exception:
logger.exception("Error deleting anonymous user cookie")
tenant_id = CURRENT_TENANT_ID_CONTEXTVAR.get()
mt_cloud_identify(
distinct_id=str(user.id),
properties={"email": user.email, "tenant_id": tenant_id},
)
async def on_after_register(
self, user: User, request: Optional[Request] = None
) -> None:
@@ -818,25 +810,12 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
user_count = await get_user_count()
logger.debug(f"Current tenant user count: {user_count}")
# Ensure a PostHog person profile exists for this user.
mt_cloud_identify(
distinct_id=str(user.id),
properties={"email": user.email, "tenant_id": tenant_id},
)
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=str(user.id),
distinct_id=user.email,
event=MilestoneRecordType.USER_SIGNED_UP,
)
if user_count == 1:
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=str(user.id),
event=MilestoneRecordType.TENANT_CREATED,
)
finally:
CURRENT_TENANT_ID_CONTEXTVAR.reset(token)
@@ -1673,33 +1652,6 @@ async def _get_user_from_token_data(token_data: dict) -> User | None:
return user
_LOOPBACK_HOSTNAMES = frozenset({"localhost", "127.0.0.1", "::1"})
def _is_same_origin(actual: str, expected: str) -> bool:
"""Compare two origins for the WebSocket CSWSH check.
Scheme and hostname must match exactly. Port must also match, except
when the hostname is a loopback address (localhost / 127.0.0.1 / ::1),
where port is ignored. On loopback, all ports belong to the same
operator, so port differences carry no security significance — the
CSWSH threat is remote origins, not local ones.
"""
a = urlparse(actual.rstrip("/"))
e = urlparse(expected.rstrip("/"))
if a.scheme != e.scheme or a.hostname != e.hostname:
return False
if a.hostname in _LOOPBACK_HOSTNAMES:
return True
actual_port = a.port or (443 if a.scheme == "https" else 80)
expected_port = e.port or (443 if e.scheme == "https" else 80)
return actual_port == expected_port
async def current_user_from_websocket(
websocket: WebSocket,
token: str = Query(..., description="WebSocket authentication token"),
@@ -1719,15 +1671,19 @@ async def current_user_from_websocket(
This applies the same auth checks as current_user() for HTTP endpoints.
"""
# Check Origin header to prevent Cross-Site WebSocket Hijacking (CSWSH).
# Browsers always send Origin on WebSocket connections.
# Check Origin header to prevent Cross-Site WebSocket Hijacking (CSWSH)
# Browsers always send Origin on WebSocket connections
origin = websocket.headers.get("origin")
expected_origin = WEB_DOMAIN.rstrip("/")
if not origin:
logger.warning("WS auth: missing Origin header")
raise BasicAuthenticationError(detail="Access denied. Missing origin.")
if not _is_same_origin(origin, WEB_DOMAIN):
logger.warning(f"WS auth: origin mismatch. Expected {WEB_DOMAIN}, got {origin}")
actual_origin = origin.rstrip("/")
if actual_origin != expected_origin:
logger.warning(
f"WS auth: origin mismatch. Expected {expected_origin}, got {actual_origin}"
)
raise BasicAuthenticationError(detail="Access denied. Invalid origin.")
# Validate WS token in Redis (single-use, deleted after retrieval)

View File

@@ -29,8 +29,6 @@ from onyx.configs.constants import OnyxCeleryPriority
from onyx.configs.constants import OnyxCeleryQueues
from onyx.configs.constants import OnyxCeleryTask
from onyx.configs.constants import OnyxRedisLocks
from onyx.connectors.factory import ConnectorMissingException
from onyx.connectors.factory import identify_connector_class
from onyx.connectors.factory import instantiate_connector
from onyx.connectors.interfaces import HierarchyConnector
from onyx.connectors.models import HierarchyNode as PydanticHierarchyNode
@@ -57,26 +55,6 @@ logger = setup_logger()
HIERARCHY_FETCH_INTERVAL_SECONDS = 24 * 60 * 60
def _connector_supports_hierarchy_fetching(
cc_pair: ConnectorCredentialPair,
) -> bool:
"""Return True only for connectors whose class implements HierarchyConnector."""
try:
connector_class = identify_connector_class(
cc_pair.connector.source,
)
except ConnectorMissingException as e:
task_logger.warning(
"Skipping hierarchy fetching enqueue for source=%s input_type=%s: %s",
cc_pair.connector.source,
cc_pair.connector.input_type,
str(e),
)
return False
return issubclass(connector_class, HierarchyConnector)
def _is_hierarchy_fetching_due(cc_pair: ConnectorCredentialPair) -> bool:
"""Returns boolean indicating if hierarchy fetching is due for this connector.
@@ -208,10 +186,7 @@ def check_for_hierarchy_fetching(self: Task, *, tenant_id: str) -> int | None:
cc_pair_id=cc_pair_id,
)
if not cc_pair or not _connector_supports_hierarchy_fetching(cc_pair):
continue
if not _is_hierarchy_fetching_due(cc_pair):
if not cc_pair or not _is_hierarchy_fetching_due(cc_pair):
continue
task_id = _try_creating_hierarchy_fetching_task(

View File

@@ -0,0 +1,112 @@
"""
GenUI system prompt for LLM integration.
This prompt teaches the LLM to output structured UI using GenUI Lang.
It's generated from the Onyx component library definitions and kept
in sync with the frontend @onyx/genui-onyx library.
TODO: Auto-generate this from the frontend library at build time
instead of maintaining a static copy.
"""
GENUI_SYSTEM_PROMPT = """# Structured UI Output (GenUI Lang)
When the user's request benefits from structured UI (tables, cards, buttons, layouts), respond using GenUI Lang — a compact, line-oriented markup. Otherwise respond in plain markdown.
## Syntax
Each line declares a variable: `name = expression`
Expressions:
- `ComponentName(arg1, arg2, key: value)` — component with positional or named args
- `[a, b, c]` — array
- `{key: value}` — object
- `"string"`, `42`, `true`, `false`, `null` — literals
- `variableName` — reference to a previously defined variable
Rules:
- PascalCase identifiers are component types
- camelCase identifiers are variable references
- Positional args map to props in the order defined below
- The last statement is the root element (or name one `root`)
- Lines inside brackets/parens can span multiple lines
- Lines that don't match `name = expression` are treated as plain text
## Available Components
### Layout
- `Stack(children?: unknown[], gap?: "none" | "xs" | "sm" | "md" | "lg" | "xl", align?: "start" | "center" | "end" | "stretch")` — Vertical stack layout — arranges children top to bottom
- `Row(children?: unknown[], gap?: "none" | "xs" | "sm" | "md" | "lg" | "xl", align?: "start" | "center" | "end" | "stretch", wrap?: boolean)` — Horizontal row layout — arranges children left to right
- `Column(children?: unknown[], width?: string)` — A column within a Row, with optional width control
- `Card(title?: string, padding?: "none" | "sm" | "md" | "lg")` — A container card with optional title and padding
- `Divider(spacing?: "sm" | "md" | "lg")` — A horizontal separator line
### Content
- `Text(children: string, headingH1?: boolean, headingH2?: boolean, headingH3?: boolean, muted?: boolean, mono?: boolean, bold?: boolean)` — Displays text with typography variants
- `Tag(title: string, color?: "green" | "purple" | "blue" | "gray" | "amber", size?: "sm" | "md")` — A small label tag with color
- `Table(columns: string[], rows: unknown[][], compact?: boolean)` — A data table with columns and rows
- `Code(children: string, language?: string, showCopyButton?: boolean)` — A code block with optional copy button
- `Image(src: string, alt?: string, width?: string, height?: string)` — Displays an image
- `Link(children: string, href: string, external?: boolean)` — A clickable hyperlink
- `List(items: string[], ordered?: boolean)` — An ordered or unordered list
### Interactive
- `Button(children: string, main?: boolean, action?: boolean, danger?: boolean, primary?: boolean, secondary?: boolean, tertiary?: boolean, size?: "lg" | "md", actionId?: string, disabled?: boolean)` — An interactive button that triggers an action
- `IconButton(icon: string, tooltip?: string, main?: boolean, action?: boolean, danger?: boolean, primary?: boolean, secondary?: boolean, actionId?: string, disabled?: boolean)` — A button that displays an icon with an optional tooltip
- `Input(placeholder?: string, value?: string, actionId?: string, readOnly?: boolean)` — A text input field
### Feedback
- `Alert(text: string, description?: string, level?: "default" | "info" | "success" | "warning" | "error", showIcon?: boolean)` — A status message banner (info, success, warning, error)
## Output Format
**CRITICAL: Output GenUI Lang directly as plain text. Do NOT wrap it in code fences (no ```genui or ``` blocks). The output is parsed as a streaming language, not displayed as code.**
## Streaming Guidelines
- Define variables before referencing them
- Each line is independently parseable — the UI updates as each line completes
- Keep variable names short and descriptive
- Build up complex UIs incrementally: define data first, then layout
## Examples
### Search results with table
```
title = Text("Search Results", headingH2: true)
row1 = ["Onyx Docs", Tag("PDF", color: "blue"), "2024-01-15"]
row2 = ["API Guide", Tag("MD", color: "green"), "2024-02-01"]
results = Table(["Name", "Type", "Date"], [row1, row2])
action = Button("View All", main: true, primary: true, actionId: "viewAll")
root = Stack([title, results, action], gap: "md")
```
### Status card with actions
```
status = Alert("Pipeline completed successfully", level: "success")
stats = Row([
Text("Processed: 1,234 docs"),
Text("Duration: 2m 34s", muted: true)
], gap: "lg")
actions = Row([
Button("View Results", main: true, primary: true, actionId: "viewResults"),
Button("Run Again", action: true, secondary: true, actionId: "rerun")
], gap: "sm")
root = Stack([status, stats, actions], gap: "md")
```
### Simple info display
```
root = Card(title: "Document Summary")
```
## Additional Guidelines
- Use Stack for vertical layouts and Row for horizontal layouts
- For tables, pass column headers as a string array and rows as arrays of values
- Tags are great for showing status, categories, or labels inline
- Use Alert for important status messages — choose the right level (info, success, warning, error)
- Buttons need an actionId to trigger events — the UI framework handles the callback
- Keep layouts simple — prefer flat structures over deeply nested ones
- For search results or document lists, use Table with relevant columns
- Use Card to visually group related content"""

View File

@@ -13,6 +13,7 @@ from onyx.chat.citation_processor import CitationMode
from onyx.chat.citation_processor import DynamicCitationProcessor
from onyx.chat.citation_utils import update_citation_processor_from_tool_response
from onyx.chat.emitter import Emitter
from onyx.chat.genui_prompt import GENUI_SYSTEM_PROMPT
from onyx.chat.llm_step import extract_tool_calls_from_response_text
from onyx.chat.llm_step import run_llm_step
from onyx.chat.models import ChatMessageSimple
@@ -26,6 +27,7 @@ from onyx.chat.prompt_utils import build_system_prompt
from onyx.chat.prompt_utils import (
get_default_base_system_prompt,
)
from onyx.configs.app_configs import GENUI_ENABLED
from onyx.configs.app_configs import INTEGRATION_TESTS_MODE
from onyx.configs.constants import DocumentSource
from onyx.configs.constants import MessageType
@@ -699,6 +701,7 @@ def run_llm_loop(
tools=tools,
should_cite_documents=should_cite_documents
or always_cite_documents,
genui_prompt=GENUI_SYSTEM_PROMPT if GENUI_ENABLED else None,
)
system_prompt = ChatMessageSimple(
message=system_prompt_str,
@@ -792,6 +795,7 @@ def run_llm_loop(
final_documents=gathered_documents,
user_identity=user_identity,
pre_answer_processing_time=pre_answer_processing_time,
use_genui=GENUI_ENABLED,
)
if has_reasoned:
reasoning_cycles += 1

View File

@@ -48,6 +48,8 @@ from onyx.server.query_and_chat.placement import Placement
from onyx.server.query_and_chat.streaming_models import AgentResponseDelta
from onyx.server.query_and_chat.streaming_models import AgentResponseStart
from onyx.server.query_and_chat.streaming_models import CitationInfo
from onyx.server.query_and_chat.streaming_models import GenUIDelta
from onyx.server.query_and_chat.streaming_models import GenUIStart
from onyx.server.query_and_chat.streaming_models import Packet
from onyx.server.query_and_chat.streaming_models import ReasoningDelta
from onyx.server.query_and_chat.streaming_models import ReasoningDone
@@ -931,6 +933,7 @@ def run_llm_step_pkt_generator(
is_deep_research: bool = False,
pre_answer_processing_time: float | None = None,
timeout_override: int | None = None,
use_genui: bool = False,
) -> Generator[Packet, None, tuple[LlmStepResult, bool]]:
"""Run an LLM step and stream the response as packets.
NOTE: DO NOT TOUCH THIS FUNCTION BEFORE ASKING YUHONG, this is very finicky and
@@ -966,6 +969,8 @@ def run_llm_step_pkt_generator(
pre_answer_processing_time: Optional time spent processing before the
answer started, recorded in state_container for analytics.
timeout_override: Optional timeout override for the LLM call.
use_genui: If True, emit GenUIStart/GenUIDelta packets instead of
AgentResponseStart/AgentResponseDelta.
Yields:
Packet: Streaming packets containing:
@@ -1112,6 +1117,7 @@ def run_llm_step_pkt_generator(
pre_answer_processing_time
)
# Always emit AgentResponseStart for text rendering
yield Packet(
placement=_current_placement(),
obj=AgentResponseStart(
@@ -1119,9 +1125,30 @@ def run_llm_step_pkt_generator(
pre_answer_processing_seconds=pre_answer_processing_time,
),
)
# When GenUI is enabled, also emit GenUIStart so the
# frontend can offer both text and structured views.
if use_genui:
yield Packet(
placement=_current_placement(),
obj=GenUIStart(),
)
answer_start = True
if citation_processor:
if use_genui:
accumulated_answer += content_chunk
if state_container:
state_container.set_answer_tokens(accumulated_answer)
# Emit both text and GenUI deltas so the frontend can
# toggle between plain text and structured rendering.
yield Packet(
placement=_current_placement(),
obj=AgentResponseDelta(content=content_chunk),
)
yield Packet(
placement=_current_placement(),
obj=GenUIDelta(content=content_chunk),
)
elif citation_processor:
yield from _emit_citation_results(
citation_processor.process_token(content_chunk)
)
@@ -1338,6 +1365,7 @@ def run_llm_step(
is_deep_research: bool = False,
pre_answer_processing_time: float | None = None,
timeout_override: int | None = None,
use_genui: bool = False,
) -> tuple[LlmStepResult, bool]:
"""Wrapper around run_llm_step_pkt_generator that consumes packets and emits them.
@@ -1361,6 +1389,7 @@ def run_llm_step(
is_deep_research=is_deep_research,
pre_answer_processing_time=pre_answer_processing_time,
timeout_override=timeout_override,
use_genui=use_genui,
)
while True:

View File

@@ -490,13 +490,13 @@ def handle_stream_message_objects(
# Milestone tracking, most devs using the API don't need to understand this
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=str(user.id) if not user.is_anonymous else tenant_id,
distinct_id=user.email if not user.is_anonymous else tenant_id,
event=MilestoneRecordType.MULTIPLE_ASSISTANTS,
)
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=str(user.id) if not user.is_anonymous else tenant_id,
distinct_id=user.email if not user.is_anonymous else tenant_id,
event=MilestoneRecordType.USER_MESSAGE_SENT,
properties={
"origin": new_msg_req.origin.value,

View File

@@ -200,6 +200,7 @@ def build_system_prompt(
tools: Sequence[Tool] | None = None,
should_cite_documents: bool = False,
include_all_guidance: bool = False,
genui_prompt: str | None = None,
) -> str:
"""Should only be called with the default behavior system prompt.
If the user has replaced the default behavior prompt with their custom agent prompt, do not call this function.
@@ -288,4 +289,7 @@ def build_system_prompt(
if tool_guidance_sections:
system_prompt += TOOL_SECTION_HEADER + "\n".join(tool_guidance_sections)
if genui_prompt:
system_prompt += "\n\n" + genui_prompt
return system_prompt

View File

@@ -957,7 +957,7 @@ ENTERPRISE_EDITION_ENABLED = (
#####
# Image Generation Configuration (DEPRECATED)
# These environment variables will be deprecated soon.
# To configure image generation, please visit the Image Generation page in the Admin Panel.
# To configure image generation, please visit the Image Generation page in the Admin Settings.
#####
# Azure Image Configurations
AZURE_IMAGE_API_VERSION = os.environ.get("AZURE_IMAGE_API_VERSION") or os.environ.get(
@@ -1046,10 +1046,14 @@ POD_NAMESPACE = os.environ.get("POD_NAMESPACE")
DEV_MODE = os.environ.get("DEV_MODE", "").lower() == "true"
HOOK_ENABLED = os.environ.get("HOOK_ENABLED", "").lower() == "true"
INTEGRATION_TESTS_MODE = os.environ.get("INTEGRATION_TESTS_MODE", "").lower() == "true"
#####
# GenUI Configuration
#####
# Enable GenUI structured UI rendering in chat responses
GENUI_ENABLED = os.environ.get("GENUI_ENABLED", "").lower() == "true"
#####
# Captcha Configuration (for cloud signup protection)
#####

View File

@@ -304,13 +304,3 @@ class LLMModelFlowType(str, PyEnum):
CHAT = "chat"
VISION = "vision"
CONTEXTUAL_RAG = "contextual_rag"
class HookPoint(str, PyEnum):
DOCUMENT_INGESTION = "document_ingestion"
QUERY_PROCESSING = "query_processing"
class HookFailStrategy(str, PyEnum):
HARD = "hard" # exception propagates, pipeline aborts
SOFT = "soft" # log error, return original input, pipeline continues

View File

@@ -64,8 +64,6 @@ from onyx.db.enums import (
BuildSessionStatus,
EmbeddingPrecision,
HierarchyNodeType,
HookFailStrategy,
HookPoint,
IndexingMode,
OpenSearchDocumentMigrationStatus,
OpenSearchTenantMigrationStatus,
@@ -5180,90 +5178,3 @@ class CacheStore(Base):
expires_at: Mapped[datetime.datetime | None] = mapped_column(
DateTime(timezone=True), nullable=True
)
class Hook(Base):
"""Pairs a HookPoint with a customer-provided API endpoint.
At most one non-deleted Hook per HookPoint is allowed, enforced by a
partial unique index on (hook_point) where deleted=false.
"""
__tablename__ = "hook"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
name: Mapped[str] = mapped_column(String, nullable=False)
hook_point: Mapped[HookPoint] = mapped_column(
Enum(HookPoint, native_enum=False), nullable=False
)
endpoint_url: Mapped[str | None] = mapped_column(Text, nullable=True)
api_key: Mapped[SensitiveValue[str] | None] = mapped_column(
EncryptedString(), nullable=True
)
is_reachable: Mapped[bool | None] = mapped_column(
Boolean, nullable=True, default=None
) # null = never validated, true = last check passed, false = last check failed
fail_strategy: Mapped[HookFailStrategy] = mapped_column(
Enum(HookFailStrategy, native_enum=False),
nullable=False,
default=HookFailStrategy.HARD,
)
timeout_seconds: Mapped[float] = mapped_column(Float, nullable=False, default=30.0)
is_active: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
deleted: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
creator_id: Mapped[UUID | None] = mapped_column(
PGUUID(as_uuid=True),
ForeignKey("user.id", ondelete="SET NULL"),
nullable=True,
)
created_at: Mapped[datetime.datetime] = mapped_column(
DateTime(timezone=True), server_default=func.now(), nullable=False
)
updated_at: Mapped[datetime.datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
onupdate=func.now(),
nullable=False,
)
creator: Mapped["User | None"] = relationship("User", foreign_keys=[creator_id])
execution_logs: Mapped[list["HookExecutionLog"]] = relationship(
"HookExecutionLog", back_populates="hook", cascade="all, delete-orphan"
)
__table_args__ = (
Index(
"ix_hook_one_non_deleted_per_point",
"hook_point",
unique=True,
postgresql_where=(deleted == False), # noqa: E712
),
)
class HookExecutionLog(Base):
"""Records hook executions for health monitoring and debugging.
Currently only failures are logged; the is_success column exists so
success logging can be added later without a schema change.
Retention: rows older than 30 days are deleted by a nightly Celery task.
"""
__tablename__ = "hook_execution_log"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
hook_id: Mapped[int] = mapped_column(
Integer,
ForeignKey("hook.id", ondelete="CASCADE"),
nullable=False,
index=True,
)
is_success: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
error_message: Mapped[str | None] = mapped_column(Text, nullable=True)
status_code: Mapped[int | None] = mapped_column(Integer, nullable=True)
duration_ms: Mapped[int | None] = mapped_column(Integer, nullable=True)
created_at: Mapped[datetime.datetime] = mapped_column(
DateTime(timezone=True), server_default=func.now(), nullable=False, index=True
)
hook: Mapped["Hook"] = relationship("Hook", back_populates="execution_logs")

View File

@@ -35,8 +35,6 @@ class OnyxErrorCode(Enum):
INSUFFICIENT_PERMISSIONS = ("INSUFFICIENT_PERMISSIONS", 403)
ADMIN_ONLY = ("ADMIN_ONLY", 403)
EE_REQUIRED = ("EE_REQUIRED", 403)
SINGLE_TENANT_ONLY = ("SINGLE_TENANT_ONLY", 403)
ENV_VAR_GATED = ("ENV_VAR_GATED", 403)
# ------------------------------------------------------------------
# Validation / Bad Request (400)

View File

@@ -1,26 +0,0 @@
from onyx.configs.app_configs import HOOK_ENABLED
from onyx.error_handling.error_codes import OnyxErrorCode
from onyx.error_handling.exceptions import OnyxError
from shared_configs.configs import MULTI_TENANT
def require_hook_enabled() -> None:
"""FastAPI dependency that gates all hook management endpoints.
Hooks are only available in single-tenant / self-hosted deployments with
HOOK_ENABLED=true explicitly set. Two layers of protection:
1. MULTI_TENANT check — rejects even if HOOK_ENABLED is accidentally set true
2. HOOK_ENABLED flag — explicit opt-in by the operator
Use as: Depends(require_hook_enabled)
"""
if MULTI_TENANT:
raise OnyxError(
OnyxErrorCode.SINGLE_TENANT_ONLY,
"Hooks are not available in multi-tenant deployments",
)
if not HOOK_ENABLED:
raise OnyxError(
OnyxErrorCode.ENV_VAR_GATED,
"Hooks are not enabled. Set HOOK_ENABLED=true to enable.",
)

View File

@@ -1319,7 +1319,7 @@ def get_connector_indexing_status(
# Track admin page visit for analytics
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=str(user.id),
distinct_id=user.email,
event=MilestoneRecordType.VISITED_ADMIN_PAGE,
)
@@ -1533,7 +1533,7 @@ def create_connector_from_model(
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=str(user.id),
distinct_id=user.email,
event=MilestoneRecordType.CREATED_CONNECTOR,
)
@@ -1611,7 +1611,7 @@ def create_connector_with_mock_credential(
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=str(user.id),
distinct_id=user.email,
event=MilestoneRecordType.CREATED_CONNECTOR,
)
return response
@@ -1915,7 +1915,9 @@ def submit_connector_request(
if not connector_name:
raise HTTPException(status_code=400, detail="Connector name cannot be empty")
# Get user identifier for telemetry
user_email = user.email
distinct_id = user_email or tenant_id
# Track connector request via PostHog telemetry (Cloud only)
from shared_configs.configs import MULTI_TENANT
@@ -1923,11 +1925,11 @@ def submit_connector_request(
if MULTI_TENANT:
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=str(user.id),
distinct_id=distinct_id,
event=MilestoneRecordType.REQUESTED_CONNECTOR,
properties={
"connector_name": connector_name,
"user_email": user.email,
"user_email": user_email,
},
)

View File

@@ -314,7 +314,7 @@ def create_persona(
)
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=str(user.id),
distinct_id=user.email,
event=MilestoneRecordType.CREATED_ASSISTANT,
)

View File

@@ -81,7 +81,6 @@ from onyx.server.manage.llm.models import VisionProviderResponse
from onyx.server.manage.llm.utils import generate_bedrock_display_name
from onyx.server.manage.llm.utils import generate_ollama_display_name
from onyx.server.manage.llm.utils import infer_vision_support
from onyx.server.manage.llm.utils import is_embedding_model
from onyx.server.manage.llm.utils import is_reasoning_model
from onyx.server.manage.llm.utils import is_valid_bedrock_model
from onyx.server.manage.llm.utils import ModelMetadata
@@ -1375,10 +1374,6 @@ def get_litellm_available_models(
try:
model_details = LitellmModelDetails.model_validate(model)
# Skip embedding models
if is_embedding_model(model_details.id):
continue
results.append(
LitellmFinalModelResponse(
provider_name=model_details.owned_by,

View File

@@ -366,18 +366,3 @@ def extract_vendor_from_model_name(model_name: str, provider: str) -> str | None
return None
return None
def is_embedding_model(model_name: str) -> bool:
"""Checks for if a model is an embedding model"""
from litellm import get_model_info
try:
# get_model_info raises on unknown models
# default to False
model_info = get_model_info(model_name)
except Exception:
return False
is_embedding_mode = model_info.get("mode") == "embedding"
return is_embedding_mode

View File

@@ -561,7 +561,7 @@ def handle_send_chat_message(
tenant_id = get_current_tenant_id()
mt_cloud_telemetry(
tenant_id=tenant_id,
distinct_id=tenant_id if user.is_anonymous else str(user.id),
distinct_id=tenant_id if user.is_anonymous else user.email,
event=MilestoneRecordType.RAN_QUERY,
)

View File

@@ -9,6 +9,7 @@ from pydantic import ValidationError
from sqlalchemy.orm import Session
from onyx.chat.citation_utils import extract_citation_order_from_text
from onyx.configs.app_configs import GENUI_ENABLED
from onyx.configs.constants import MessageType
from onyx.context.search.models import SavedSearchDoc
from onyx.context.search.models import SearchDoc
@@ -29,6 +30,8 @@ from onyx.server.query_and_chat.streaming_models import CustomToolStart
from onyx.server.query_and_chat.streaming_models import FileReaderResult
from onyx.server.query_and_chat.streaming_models import FileReaderStart
from onyx.server.query_and_chat.streaming_models import GeneratedImage
from onyx.server.query_and_chat.streaming_models import GenUIDelta
from onyx.server.query_and_chat.streaming_models import GenUIStart
from onyx.server.query_and_chat.streaming_models import ImageGenerationFinal
from onyx.server.query_and_chat.streaming_models import ImageGenerationToolStart
from onyx.server.query_and_chat.streaming_models import IntermediateReportDelta
@@ -89,6 +92,16 @@ def create_message_packets(
)
)
# When GenUI is enabled, also emit GenUIStart so the frontend
# can offer both text and structured views for old conversations.
if GENUI_ENABLED:
packets.append(
Packet(
placement=Placement(turn_index=turn_index),
obj=GenUIStart(),
)
)
packets.append(
Packet(
placement=Placement(turn_index=turn_index),
@@ -98,6 +111,16 @@ def create_message_packets(
),
)
if GENUI_ENABLED:
packets.append(
Packet(
placement=Placement(turn_index=turn_index),
obj=GenUIDelta(
content=message_text,
),
),
)
packets.append(
Packet(
placement=Placement(turn_index=turn_index),

View File

@@ -55,6 +55,9 @@ class StreamingType(Enum):
INTERMEDIATE_REPORT_DELTA = "intermediate_report_delta"
INTERMEDIATE_REPORT_CITED_DOCS = "intermediate_report_cited_docs"
GENUI_START = "genui_start"
GENUI_DELTA = "genui_delta"
class BaseObj(BaseModel):
type: str = ""
@@ -367,6 +370,18 @@ class IntermediateReportCitedDocs(BaseObj):
cited_docs: list[SearchDoc] | None = None
################################################
# GenUI Packets
################################################
class GenUIStart(BaseObj):
type: Literal["genui_start"] = StreamingType.GENUI_START.value
class GenUIDelta(BaseObj):
type: Literal["genui_delta"] = StreamingType.GENUI_DELTA.value
content: str
################################################
# Packet Object
################################################
@@ -415,6 +430,9 @@ PacketObj = Union[
IntermediateReportStart,
IntermediateReportDelta,
IntermediateReportCitedDocs,
# GenUI Packets
GenUIStart,
GenUIDelta,
]

View File

@@ -2,7 +2,6 @@ import contextvars
import threading
import uuid
from enum import Enum
from typing import Any
import requests
@@ -153,7 +152,7 @@ def mt_cloud_telemetry(
tenant_id: str,
distinct_id: str,
event: MilestoneRecordType,
properties: dict[str, Any] | None = None,
properties: dict | None = None,
) -> None:
if not MULTI_TENANT:
return
@@ -174,18 +173,3 @@ def mt_cloud_telemetry(
attribute="event_telemetry",
fallback=noop_fallback,
)(distinct_id, event, all_properties)
def mt_cloud_identify(
distinct_id: str,
properties: dict[str, Any] | None = None,
) -> None:
"""Create/update a PostHog person profile (Cloud only)."""
if not MULTI_TENANT:
return
fetch_versioned_implementation_with_fallback(
module="onyx.utils.telemetry",
attribute="identify_user",
fallback=noop_fallback,
)(distinct_id, properties)

View File

@@ -65,7 +65,7 @@ attrs==25.4.0
# jsonschema
# referencing
# zeep
authlib==1.6.9
authlib==1.6.7
# via fastmcp
azure-cognitiveservices-speech==1.38.0
# via onyx
@@ -737,7 +737,7 @@ pygithub==2.5.0
# via onyx
pygments==2.19.2
# via rich
pyjwt==2.12.0
pyjwt==2.11.0
# via
# fastapi-users
# mcp

View File

@@ -353,7 +353,7 @@ pygments==2.19.2
# via
# ipython
# ipython-pygments-lexers
pyjwt==2.12.0
pyjwt==2.11.0
# via mcp
pyparsing==3.2.5
# via matplotlib

View File

@@ -218,7 +218,7 @@ pydantic-core==2.33.2
# via pydantic
pydantic-settings==2.12.0
# via mcp
pyjwt==2.12.0
pyjwt==2.11.0
# via mcp
python-dateutil==2.8.2
# via

View File

@@ -308,7 +308,7 @@ pydantic-core==2.33.2
# via pydantic
pydantic-settings==2.12.0
# via mcp
pyjwt==2.12.0
pyjwt==2.11.0
# via mcp
python-dateutil==2.8.2
# via

View File

@@ -45,21 +45,6 @@ npx playwright test <TEST_NAME>
Shared fixtures live in `backend/tests/conftest.py`. Test subdirectories can define
their own `conftest.py` for directory-scoped fixtures.
## Running Tests Repeatedly (`pytest-repeat`)
Use `pytest-repeat` to catch flaky tests by running them multiple times:
```bash
# Run a specific test 50 times
pytest --count=50 backend/tests/unit/path/to/test.py::test_name
# Stop on first failure with -x
pytest --count=50 -x backend/tests/unit/path/to/test.py::test_name
# Repeat an entire test file
pytest --count=10 backend/tests/unit/path/to/test_file.py
```
## Best Practices
### Use `enable_ee` fixture instead of inlining

View File

@@ -1,120 +0,0 @@
import pytest
from onyx.auth.users import _is_same_origin
class TestExactMatch:
"""Origins that are textually identical should always match."""
@pytest.mark.parametrize(
"origin",
[
"http://localhost:3000",
"https://app.example.com",
"https://app.example.com:8443",
"http://127.0.0.1:8080",
],
)
def test_identical_origins(self, origin: str) -> None:
assert _is_same_origin(origin, origin)
class TestLoopbackPortRelaxation:
"""On loopback addresses, port differences should be ignored."""
@pytest.mark.parametrize(
"actual,expected",
[
("http://localhost:3001", "http://localhost:3000"),
("http://localhost:8080", "http://localhost:3000"),
("http://localhost", "http://localhost:3000"),
("http://127.0.0.1:3001", "http://127.0.0.1:3000"),
("http://[::1]:3001", "http://[::1]:3000"),
],
)
def test_loopback_different_ports_accepted(
self, actual: str, expected: str
) -> None:
assert _is_same_origin(actual, expected)
@pytest.mark.parametrize(
"actual,expected",
[
("https://localhost:3001", "http://localhost:3000"),
("http://localhost:3001", "https://localhost:3000"),
],
)
def test_loopback_different_scheme_rejected(
self, actual: str, expected: str
) -> None:
assert not _is_same_origin(actual, expected)
def test_loopback_hostname_mismatch_rejected(self) -> None:
assert not _is_same_origin("http://localhost:3001", "http://127.0.0.1:3000")
class TestNonLoopbackStrictPort:
"""Non-loopback origins must match scheme, hostname, AND port."""
def test_different_port_rejected(self) -> None:
assert not _is_same_origin(
"https://app.example.com:8443", "https://app.example.com"
)
def test_different_hostname_rejected(self) -> None:
assert not _is_same_origin("https://evil.com", "https://app.example.com")
def test_different_scheme_rejected(self) -> None:
assert not _is_same_origin("http://app.example.com", "https://app.example.com")
def test_same_port_explicit(self) -> None:
assert _is_same_origin(
"https://app.example.com:443", "https://app.example.com:443"
)
class TestDefaultPortNormalization:
"""Port should be normalized so that omitted default port == explicit default port."""
def test_http_implicit_vs_explicit_80(self) -> None:
assert _is_same_origin("http://example.com", "http://example.com:80")
def test_http_explicit_80_vs_implicit(self) -> None:
assert _is_same_origin("http://example.com:80", "http://example.com")
def test_https_implicit_vs_explicit_443(self) -> None:
assert _is_same_origin("https://example.com", "https://example.com:443")
def test_https_explicit_443_vs_implicit(self) -> None:
assert _is_same_origin("https://example.com:443", "https://example.com")
def test_http_non_default_port_vs_implicit_rejected(self) -> None:
assert not _is_same_origin("http://example.com:8080", "http://example.com")
class TestTrailingSlash:
"""Trailing slashes should not affect comparison."""
def test_trailing_slash_on_actual(self) -> None:
assert _is_same_origin("https://app.example.com/", "https://app.example.com")
def test_trailing_slash_on_expected(self) -> None:
assert _is_same_origin("https://app.example.com", "https://app.example.com/")
def test_trailing_slash_on_both(self) -> None:
assert _is_same_origin("https://app.example.com/", "https://app.example.com/")
class TestCSWSHScenarios:
"""Realistic attack scenarios that must be rejected."""
def test_remote_attacker_rejected(self) -> None:
assert not _is_same_origin("https://evil.com", "http://localhost:3000")
def test_remote_attacker_same_port_rejected(self) -> None:
assert not _is_same_origin("http://evil.com:3000", "http://localhost:3000")
def test_remote_attacker_matching_hostname_different_port(self) -> None:
assert not _is_same_origin(
"https://app.example.com:9999", "https://app.example.com"
)

View File

@@ -1,194 +0,0 @@
from unittest.mock import MagicMock
from unittest.mock import patch
from onyx.background.celery.tasks.hierarchyfetching.tasks import (
_connector_supports_hierarchy_fetching,
)
from onyx.background.celery.tasks.hierarchyfetching.tasks import (
check_for_hierarchy_fetching,
)
from onyx.connectors.factory import ConnectorMissingException
from onyx.connectors.interfaces import BaseConnector
from onyx.connectors.interfaces import HierarchyConnector
from onyx.connectors.interfaces import HierarchyOutput
from onyx.connectors.interfaces import SecondsSinceUnixEpoch
TASKS_MODULE = "onyx.background.celery.tasks.hierarchyfetching.tasks"
class _NonHierarchyConnector(BaseConnector):
def load_credentials(self, credentials: dict) -> dict | None: # noqa: ARG002
return None
class _HierarchyCapableConnector(HierarchyConnector):
def load_credentials(self, credentials: dict) -> dict | None: # noqa: ARG002
return None
def load_hierarchy(
self,
start: SecondsSinceUnixEpoch, # noqa: ARG002
end: SecondsSinceUnixEpoch, # noqa: ARG002
) -> HierarchyOutput:
return
yield
def _build_cc_pair_mock() -> MagicMock:
cc_pair = MagicMock()
cc_pair.connector.source = "mock-source"
cc_pair.connector.input_type = "mock-input-type"
return cc_pair
def _build_redis_mock_with_lock() -> tuple[MagicMock, MagicMock]:
redis_client = MagicMock()
lock = MagicMock()
lock.acquire.return_value = True
lock.owned.return_value = True
redis_client.lock.return_value = lock
return redis_client, lock
@patch(f"{TASKS_MODULE}.identify_connector_class")
def test_connector_supports_hierarchy_fetching_false_for_non_hierarchy_connector(
mock_identify_connector_class: MagicMock,
) -> None:
mock_identify_connector_class.return_value = _NonHierarchyConnector
assert _connector_supports_hierarchy_fetching(_build_cc_pair_mock()) is False
mock_identify_connector_class.assert_called_once_with("mock-source")
@patch(f"{TASKS_MODULE}.task_logger.warning")
@patch(f"{TASKS_MODULE}.identify_connector_class")
def test_connector_supports_hierarchy_fetching_false_when_class_missing(
mock_identify_connector_class: MagicMock,
mock_warning: MagicMock,
) -> None:
mock_identify_connector_class.side_effect = ConnectorMissingException("missing")
assert _connector_supports_hierarchy_fetching(_build_cc_pair_mock()) is False
mock_warning.assert_called_once()
@patch(f"{TASKS_MODULE}.identify_connector_class")
def test_connector_supports_hierarchy_fetching_true_for_supported_connector(
mock_identify_connector_class: MagicMock,
) -> None:
mock_identify_connector_class.return_value = _HierarchyCapableConnector
assert _connector_supports_hierarchy_fetching(_build_cc_pair_mock()) is True
mock_identify_connector_class.assert_called_once_with("mock-source")
@patch(f"{TASKS_MODULE}._try_creating_hierarchy_fetching_task")
@patch(f"{TASKS_MODULE}._is_hierarchy_fetching_due")
@patch(f"{TASKS_MODULE}.get_connector_credential_pair_from_id")
@patch(f"{TASKS_MODULE}.fetch_indexable_standard_connector_credential_pair_ids")
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
@patch(f"{TASKS_MODULE}.get_redis_client")
@patch(f"{TASKS_MODULE}._connector_supports_hierarchy_fetching")
def test_check_for_hierarchy_fetching_skips_unsupported_connectors(
mock_supports_hierarchy_fetching: MagicMock,
mock_get_redis_client: MagicMock,
mock_get_session: MagicMock,
mock_fetch_cc_pair_ids: MagicMock,
mock_get_cc_pair: MagicMock,
mock_is_due: MagicMock,
mock_try_create_task: MagicMock,
) -> None:
redis_client, lock = _build_redis_mock_with_lock()
mock_get_redis_client.return_value = redis_client
mock_get_session.return_value.__enter__.return_value = MagicMock()
mock_fetch_cc_pair_ids.return_value = [123]
mock_get_cc_pair.return_value = _build_cc_pair_mock()
mock_supports_hierarchy_fetching.return_value = False
mock_is_due.return_value = True
task_app = MagicMock()
with patch.object(check_for_hierarchy_fetching, "app", task_app):
result = check_for_hierarchy_fetching.run(tenant_id="test-tenant")
assert result == 0
mock_is_due.assert_not_called()
mock_try_create_task.assert_not_called()
lock.release.assert_called_once()
@patch(f"{TASKS_MODULE}._try_creating_hierarchy_fetching_task")
@patch(f"{TASKS_MODULE}._is_hierarchy_fetching_due")
@patch(f"{TASKS_MODULE}.get_connector_credential_pair_from_id")
@patch(f"{TASKS_MODULE}.fetch_indexable_standard_connector_credential_pair_ids")
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
@patch(f"{TASKS_MODULE}.get_redis_client")
@patch(f"{TASKS_MODULE}._connector_supports_hierarchy_fetching")
def test_check_for_hierarchy_fetching_creates_task_for_supported_due_connector(
mock_supports_hierarchy_fetching: MagicMock,
mock_get_redis_client: MagicMock,
mock_get_session: MagicMock,
mock_fetch_cc_pair_ids: MagicMock,
mock_get_cc_pair: MagicMock,
mock_is_due: MagicMock,
mock_try_create_task: MagicMock,
) -> None:
redis_client, lock = _build_redis_mock_with_lock()
cc_pair = _build_cc_pair_mock()
db_session = MagicMock()
mock_get_redis_client.return_value = redis_client
mock_get_session.return_value.__enter__.return_value = db_session
mock_fetch_cc_pair_ids.return_value = [123]
mock_get_cc_pair.return_value = cc_pair
mock_supports_hierarchy_fetching.return_value = True
mock_is_due.return_value = True
mock_try_create_task.return_value = "task-id"
task_app = MagicMock()
with patch.object(check_for_hierarchy_fetching, "app", task_app):
result = check_for_hierarchy_fetching.run(tenant_id="test-tenant")
assert result == 1
mock_is_due.assert_called_once_with(cc_pair)
mock_try_create_task.assert_called_once_with(
celery_app=task_app,
cc_pair=cc_pair,
db_session=db_session,
r=redis_client,
tenant_id="test-tenant",
)
lock.release.assert_called_once()
@patch(f"{TASKS_MODULE}._try_creating_hierarchy_fetching_task")
@patch(f"{TASKS_MODULE}._is_hierarchy_fetching_due")
@patch(f"{TASKS_MODULE}.get_connector_credential_pair_from_id")
@patch(f"{TASKS_MODULE}.fetch_indexable_standard_connector_credential_pair_ids")
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
@patch(f"{TASKS_MODULE}.get_redis_client")
@patch(f"{TASKS_MODULE}._connector_supports_hierarchy_fetching")
def test_check_for_hierarchy_fetching_skips_supported_connector_when_not_due(
mock_supports_hierarchy_fetching: MagicMock,
mock_get_redis_client: MagicMock,
mock_get_session: MagicMock,
mock_fetch_cc_pair_ids: MagicMock,
mock_get_cc_pair: MagicMock,
mock_is_due: MagicMock,
mock_try_create_task: MagicMock,
) -> None:
redis_client, lock = _build_redis_mock_with_lock()
cc_pair = _build_cc_pair_mock()
mock_get_redis_client.return_value = redis_client
mock_get_session.return_value.__enter__.return_value = MagicMock()
mock_fetch_cc_pair_ids.return_value = [123]
mock_get_cc_pair.return_value = cc_pair
mock_supports_hierarchy_fetching.return_value = True
mock_is_due.return_value = False
task_app = MagicMock()
with patch.object(check_for_hierarchy_fetching, "app", task_app):
result = check_for_hierarchy_fetching.run(tenant_id="test-tenant")
assert result == 0
mock_is_due.assert_called_once_with(cc_pair)
mock_try_create_task.assert_not_called()
lock.release.assert_called_once()

View File

@@ -1,40 +0,0 @@
"""Unit tests for the hooks feature gate."""
from unittest.mock import patch
import pytest
from onyx.error_handling.error_codes import OnyxErrorCode
from onyx.error_handling.exceptions import OnyxError
from onyx.hooks.api_dependencies import require_hook_enabled
class TestRequireHookEnabled:
def test_raises_when_multi_tenant(self) -> None:
with (
patch("onyx.hooks.api_dependencies.MULTI_TENANT", True),
patch("onyx.hooks.api_dependencies.HOOK_ENABLED", True),
):
with pytest.raises(OnyxError) as exc_info:
require_hook_enabled()
assert exc_info.value.error_code is OnyxErrorCode.SINGLE_TENANT_ONLY
assert exc_info.value.status_code == 403
assert "multi-tenant" in exc_info.value.detail
def test_raises_when_flag_disabled(self) -> None:
with (
patch("onyx.hooks.api_dependencies.MULTI_TENANT", False),
patch("onyx.hooks.api_dependencies.HOOK_ENABLED", False),
):
with pytest.raises(OnyxError) as exc_info:
require_hook_enabled()
assert exc_info.value.error_code is OnyxErrorCode.ENV_VAR_GATED
assert exc_info.value.status_code == 403
assert "HOOK_ENABLED" in exc_info.value.detail
def test_passes_when_enabled_single_tenant(self) -> None:
with (
patch("onyx.hooks.api_dependencies.MULTI_TENANT", False),
patch("onyx.hooks.api_dependencies.HOOK_ENABLED", True),
):
require_hook_enabled() # must not raise

View File

@@ -3,7 +3,6 @@
from onyx.server.manage.llm.utils import generate_bedrock_display_name
from onyx.server.manage.llm.utils import generate_ollama_display_name
from onyx.server.manage.llm.utils import infer_vision_support
from onyx.server.manage.llm.utils import is_embedding_model
from onyx.server.manage.llm.utils import is_reasoning_model
from onyx.server.manage.llm.utils import is_valid_bedrock_model
from onyx.server.manage.llm.utils import strip_openrouter_vendor_prefix
@@ -210,35 +209,3 @@ class TestIsReasoningModel:
is_reasoning_model("anthropic/claude-3-5-sonnet", "Claude 3.5 Sonnet")
is False
)
class TestIsEmbeddingModel:
"""Tests for embedding model detection."""
def test_openai_embedding_ada(self) -> None:
assert is_embedding_model("text-embedding-ada-002") is True
def test_openai_embedding_3_small(self) -> None:
assert is_embedding_model("text-embedding-3-small") is True
def test_openai_embedding_3_large(self) -> None:
assert is_embedding_model("text-embedding-3-large") is True
def test_cohere_embed_model(self) -> None:
assert is_embedding_model("embed-english-v3.0") is True
def test_bedrock_titan_embed(self) -> None:
assert is_embedding_model("amazon.titan-embed-text-v1") is True
def test_gpt4o_not_embedding(self) -> None:
assert is_embedding_model("gpt-4o") is False
def test_gpt4_not_embedding(self) -> None:
assert is_embedding_model("gpt-4") is False
def test_dall_e_not_embedding(self) -> None:
assert is_embedding_model("dall-e-3") is False
def test_unknown_custom_model_not_embedding(self) -> None:
"""Custom/local models not in litellm's model DB should default to False."""
assert is_embedding_model("my-custom-local-model-v1") is False

View File

@@ -17,7 +17,7 @@ def test_mt_cloud_telemetry_noop_when_not_multi_tenant(monkeypatch: Any) -> None
telemetry_utils.mt_cloud_telemetry(
tenant_id="tenant-1",
distinct_id="12345678-1234-1234-1234-123456789abc",
distinct_id="user@example.com",
event=MilestoneRecordType.USER_MESSAGE_SENT,
properties={"origin": "web"},
)
@@ -40,7 +40,7 @@ def test_mt_cloud_telemetry_calls_event_telemetry_when_multi_tenant(
telemetry_utils.mt_cloud_telemetry(
tenant_id="tenant-1",
distinct_id="12345678-1234-1234-1234-123456789abc",
distinct_id="user@example.com",
event=MilestoneRecordType.USER_MESSAGE_SENT,
properties={"origin": "web"},
)
@@ -51,52 +51,7 @@ def test_mt_cloud_telemetry_calls_event_telemetry_when_multi_tenant(
fallback=telemetry_utils.noop_fallback,
)
event_telemetry.assert_called_once_with(
"12345678-1234-1234-1234-123456789abc",
"user@example.com",
MilestoneRecordType.USER_MESSAGE_SENT,
{"origin": "web", "tenant_id": "tenant-1"},
)
def test_mt_cloud_identify_noop_when_not_multi_tenant(monkeypatch: Any) -> None:
fetch_impl = Mock()
monkeypatch.setattr(
telemetry_utils,
"fetch_versioned_implementation_with_fallback",
fetch_impl,
)
monkeypatch.setattr("onyx.utils.telemetry.MULTI_TENANT", False)
telemetry_utils.mt_cloud_identify(
distinct_id="12345678-1234-1234-1234-123456789abc",
properties={"email": "user@example.com"},
)
fetch_impl.assert_not_called()
def test_mt_cloud_identify_calls_identify_user_when_multi_tenant(
monkeypatch: Any,
) -> None:
identify_user = Mock()
fetch_impl = Mock(return_value=identify_user)
monkeypatch.setattr(
telemetry_utils,
"fetch_versioned_implementation_with_fallback",
fetch_impl,
)
monkeypatch.setattr("onyx.utils.telemetry.MULTI_TENANT", True)
telemetry_utils.mt_cloud_identify(
distinct_id="12345678-1234-1234-1234-123456789abc",
properties={"email": "user@example.com"},
)
fetch_impl.assert_called_once_with(
module="onyx.utils.telemetry",
attribute="identify_user",
fallback=telemetry_utils.noop_fallback,
)
identify_user.assert_called_once_with(
"12345678-1234-1234-1234-123456789abc",
{"email": "user@example.com"},
)

View File

@@ -32,17 +32,15 @@ def test_run_with_timeout_raises_on_timeout(slow: float, timeout: float) -> None
"""Test that a function that exceeds timeout raises TimeoutError"""
def slow_function() -> None:
time.sleep(slow)
time.sleep(slow) # Sleep for 2 seconds
start = time.monotonic()
with pytest.raises(TimeoutError) as exc_info:
run_with_timeout(timeout, slow_function)
elapsed = time.monotonic() - start
start = time.time()
run_with_timeout(timeout, slow_function) # Set timeout to 0.1 seconds
end = time.time()
assert end - start >= timeout
assert end - start < (slow + timeout) / 2
assert f"timed out after {timeout} seconds" in str(exc_info.value)
assert elapsed >= timeout
# Should return around the timeout duration, not the full sleep duration
assert elapsed == pytest.approx(timeout, abs=0.8)
@pytest.mark.filterwarnings("ignore::pytest.PytestUnhandledThreadExceptionWarning")

View File

@@ -15,9 +15,8 @@
# -f docker-compose.dev.yml up -d --wait
#
# This overlay:
# - Moves Vespa (index), both model servers, OpenSearch, MinIO,
# Redis (cache), and the background worker to profiles so they do
# not start by default
# - Moves Vespa (index), both model servers, code-interpreter, Redis (cache),
# and the background worker to profiles so they do not start by default
# - Makes depends_on references to removed services optional
# - Sets DISABLE_VECTOR_DB=true on the api_server
# - Uses PostgreSQL for caching and auth instead of Redis
@@ -28,8 +27,7 @@
# --profile inference Inference model server
# --profile background Background worker (Celery) — also needs redis
# --profile redis Redis cache
# --profile opensearch OpenSearch
# --profile s3-filestore MinIO (S3-compatible file store)
# --profile code-interpreter Code interpreter
# =============================================================================
name: onyx
@@ -40,9 +38,6 @@ services:
index:
condition: service_started
required: false
opensearch:
condition: service_started
required: false
cache:
condition: service_started
required: false
@@ -89,10 +84,4 @@ services:
inference_model_server:
profiles: ["inference"]
# OpenSearch is not needed in lite mode (no indexing).
opensearch:
profiles: ["opensearch"]
# MinIO is not needed in lite mode (Postgres handles file storage).
minio:
profiles: ["s3-filestore"]
code-interpreter: {}

View File

@@ -1,8 +1,8 @@
#!/bin/bash
set -euo pipefail
set -e
# Expected resource requirements (overridden below if --lite)
# Expected resource requirements
EXPECTED_DOCKER_RAM_GB=10
EXPECTED_DISK_GB=32
@@ -10,11 +10,6 @@ EXPECTED_DISK_GB=32
SHUTDOWN_MODE=false
DELETE_DATA_MODE=false
INCLUDE_CRAFT=false # Disabled by default, use --include-craft to enable
LITE_MODE=false # Disabled by default, use --lite to enable
USE_LOCAL_FILES=false # Disabled by default, use --local to skip downloading config files
NO_PROMPT=false
DRY_RUN=false
VERBOSE=false
while [[ $# -gt 0 ]]; do
case $1 in
@@ -30,26 +25,6 @@ while [[ $# -gt 0 ]]; do
INCLUDE_CRAFT=true
shift
;;
--lite)
LITE_MODE=true
shift
;;
--local)
USE_LOCAL_FILES=true
shift
;;
--no-prompt)
NO_PROMPT=true
shift
;;
--dry-run)
DRY_RUN=true
shift
;;
--verbose)
VERBOSE=true
shift
;;
--help|-h)
echo "Onyx Installation Script"
echo ""
@@ -57,23 +32,15 @@ while [[ $# -gt 0 ]]; do
echo ""
echo "Options:"
echo " --include-craft Enable Onyx Craft (AI-powered web app building)"
echo " --lite Deploy Onyx Lite (no Vespa, Redis, or model servers)"
echo " --local Use existing config files instead of downloading from GitHub"
echo " --shutdown Stop (pause) Onyx containers"
echo " --delete-data Remove all Onyx data (containers, volumes, and files)"
echo " --no-prompt Run non-interactively with defaults (for CI/automation)"
echo " --dry-run Show what would be done without making changes"
echo " --verbose Show detailed output for debugging"
echo " --help, -h Show this help message"
echo ""
echo "Examples:"
echo " $0 # Install Onyx"
echo " $0 --lite # Install Onyx Lite (minimal deployment)"
echo " $0 --include-craft # Install Onyx with Craft enabled"
echo " $0 --shutdown # Pause Onyx services"
echo " $0 --delete-data # Completely remove Onyx and all data"
echo " $0 --local # Re-run using existing config files on disk"
echo " $0 --no-prompt # Non-interactive install with defaults"
exit 0
;;
*)
@@ -84,129 +51,8 @@ while [[ $# -gt 0 ]]; do
esac
done
if [[ "$VERBOSE" = true ]]; then
set -x
fi
if [[ "$LITE_MODE" = true ]] && [[ "$INCLUDE_CRAFT" = true ]]; then
echo "ERROR: --lite and --include-craft cannot be used together."
echo "Craft requires services (Vespa, Redis, background workers) that lite mode disables."
exit 1
fi
# When --lite is passed as a flag, lower resource thresholds early (before the
# resource check). When lite is chosen interactively, the thresholds are adjusted
# inside the new-deployment flow, after the resource check has already passed
# with the standard thresholds — which is the safer direction.
if [[ "$LITE_MODE" = true ]]; then
EXPECTED_DOCKER_RAM_GB=4
EXPECTED_DISK_GB=16
fi
INSTALL_ROOT="${INSTALL_PREFIX:-onyx_data}"
LITE_COMPOSE_FILE="docker-compose.onyx-lite.yml"
# Build the -f flags for docker compose.
# Pass "true" as $1 to auto-detect a previously-downloaded lite overlay
# (used by shutdown/delete-data so users don't need to remember --lite).
# Without the argument, the lite overlay is only included when --lite was
# explicitly passed — preventing install/start from silently staying in
# lite mode just because the file exists on disk from a prior run.
compose_file_args() {
local auto_detect="${1:-false}"
local args="-f docker-compose.yml"
if [[ "$LITE_MODE" = true ]] || { [[ "$auto_detect" = true ]] && [[ -f "${INSTALL_ROOT}/deployment/${LITE_COMPOSE_FILE}" ]]; }; then
args="$args -f ${LITE_COMPOSE_FILE}"
fi
echo "$args"
}
# --- Downloader detection (curl with wget fallback) ---
DOWNLOADER=""
detect_downloader() {
if command -v curl &> /dev/null; then
DOWNLOADER="curl"
return 0
fi
if command -v wget &> /dev/null; then
DOWNLOADER="wget"
return 0
fi
echo "ERROR: Neither curl nor wget found. Please install one and retry."
exit 1
}
detect_downloader
download_file() {
local url="$1"
local output="$2"
if [[ "$DOWNLOADER" == "curl" ]]; then
curl -fsSL --retry 3 --retry-delay 2 --retry-connrefused -o "$output" "$url"
else
wget -q --tries=3 --timeout=20 -O "$output" "$url"
fi
}
# Ensures a required file is present. With --local, verifies the file exists on
# disk. Otherwise, downloads it from the given URL. Returns 0 on success, 1 on
# failure (caller should handle the exit).
ensure_file() {
local path="$1"
local url="$2"
local desc="$3"
if [[ "$USE_LOCAL_FILES" = true ]]; then
if [[ -f "$path" ]]; then
print_success "Using existing ${desc}"
return 0
fi
print_error "Required file missing: ${desc} (${path})"
return 1
fi
print_info "Downloading ${desc}..."
if download_file "$url" "$path" 2>/dev/null; then
print_success "${desc} downloaded"
return 0
fi
print_error "Failed to download ${desc}"
print_info "Please ensure you have internet connection and try again"
return 1
}
# --- Interactive prompt helpers ---
is_interactive() {
[[ "$NO_PROMPT" = false ]] && [[ -t 0 ]]
}
prompt_or_default() {
local prompt_text="$1"
local default_value="$2"
if is_interactive; then
read -p "$prompt_text" -r REPLY
if [[ -z "$REPLY" ]]; then
REPLY="$default_value"
fi
else
REPLY="$default_value"
fi
}
prompt_yn_or_default() {
local prompt_text="$1"
local default_value="$2"
if is_interactive; then
read -p "$prompt_text" -n 1 -r
echo ""
if [[ -z "$REPLY" ]]; then
REPLY="$default_value"
fi
else
REPLY="$default_value"
fi
}
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
@@ -265,7 +111,7 @@ if [ "$SHUTDOWN_MODE" = true ]; then
fi
# Stop containers (without removing them)
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD $(compose_file_args true) stop)
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD -f docker-compose.yml stop)
if [ $? -eq 0 ]; then
print_success "Onyx containers stopped (paused)"
else
@@ -294,17 +140,12 @@ if [ "$DELETE_DATA_MODE" = true ]; then
echo " • All downloaded files and configurations"
echo " • All user data and documents"
echo ""
if is_interactive; then
read -p "Are you sure you want to continue? Type 'DELETE' to confirm: " -r
echo ""
if [ "$REPLY" != "DELETE" ]; then
print_info "Operation cancelled."
exit 0
fi
else
print_error "Cannot confirm destructive operation in non-interactive mode."
print_info "Run interactively or remove the ${INSTALL_ROOT} directory manually."
exit 1
read -p "Are you sure you want to continue? Type 'DELETE' to confirm: " -r
echo ""
if [ "$REPLY" != "DELETE" ]; then
print_info "Operation cancelled."
exit 0
fi
print_info "Removing Onyx containers and volumes..."
@@ -323,7 +164,7 @@ if [ "$DELETE_DATA_MODE" = true ]; then
fi
# Stop and remove containers with volumes
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD $(compose_file_args true) down -v)
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD -f docker-compose.yml down -v)
if [ $? -eq 0 ]; then
print_success "Onyx containers and volumes removed"
else
@@ -345,117 +186,6 @@ if [ "$DELETE_DATA_MODE" = true ]; then
exit 0
fi
# --- Auto-install Docker (Linux only) ---
# Runs before the banner so a group-based re-exec doesn't repeat it.
install_docker_linux() {
local distro_id=""
if [[ -f /etc/os-release ]]; then
distro_id="$(. /etc/os-release && echo "${ID:-}")"
fi
case "$distro_id" in
amzn)
print_info "Detected Amazon Linux — installing Docker via package manager..."
if command -v dnf &> /dev/null; then
sudo dnf install -y docker
else
sudo yum install -y docker
fi
;;
*)
print_info "Installing Docker via get.docker.com..."
download_file "https://get.docker.com" /tmp/get-docker.sh
sudo sh /tmp/get-docker.sh
rm -f /tmp/get-docker.sh
;;
esac
sudo systemctl start docker 2>/dev/null || sudo service docker start 2>/dev/null || true
sudo systemctl enable docker 2>/dev/null || true
}
# Detect OS (including WSL)
IS_WSL=false
if [[ -n "${WSL_DISTRO_NAME:-}" ]] || grep -qi microsoft /proc/version 2>/dev/null; then
IS_WSL=true
fi
# Dry-run: show plan and exit
if [[ "$DRY_RUN" = true ]]; then
print_info "Dry run mode — showing what would happen:"
echo " • Install root: ${INSTALL_ROOT}"
echo " • Lite mode: ${LITE_MODE}"
echo " • Include Craft: ${INCLUDE_CRAFT}"
echo " • OS type: ${OSTYPE:-unknown} (WSL: ${IS_WSL})"
echo " • Downloader: ${DOWNLOADER}"
echo ""
print_success "Dry run complete (no changes made)"
exit 0
fi
if ! command -v docker &> /dev/null; then
if [[ "$OSTYPE" == "linux-gnu"* ]] || [[ -n "${WSL_DISTRO_NAME:-}" ]]; then
install_docker_linux
if ! command -v docker &> /dev/null; then
print_error "Docker installation failed."
echo " Visit: https://docs.docker.com/get-docker/"
exit 1
fi
print_success "Docker installed successfully"
fi
fi
# --- Auto-install Docker Compose plugin (Linux only) ---
if command -v docker &> /dev/null \
&& ! docker compose version &> /dev/null \
&& ! command -v docker-compose &> /dev/null \
&& { [[ "$OSTYPE" == "linux-gnu"* ]] || [[ -n "${WSL_DISTRO_NAME:-}" ]]; }; then
print_info "Docker Compose not found — installing plugin..."
COMPOSE_ARCH="$(uname -m)"
COMPOSE_URL="https://github.com/docker/compose/releases/latest/download/docker-compose-linux-${COMPOSE_ARCH}"
COMPOSE_DIR="/usr/local/lib/docker/cli-plugins"
COMPOSE_TMP="$(mktemp)"
sudo mkdir -p "$COMPOSE_DIR"
if download_file "$COMPOSE_URL" "$COMPOSE_TMP"; then
sudo mv "$COMPOSE_TMP" "$COMPOSE_DIR/docker-compose"
sudo chmod +x "$COMPOSE_DIR/docker-compose"
if docker compose version &> /dev/null; then
print_success "Docker Compose plugin installed"
else
print_error "Docker Compose plugin installed but not detected."
echo " Visit: https://docs.docker.com/compose/install/"
exit 1
fi
else
rm -f "$COMPOSE_TMP"
print_error "Failed to download Docker Compose plugin."
echo " Visit: https://docs.docker.com/compose/install/"
exit 1
fi
fi
# On Linux, ensure the current user can talk to the Docker daemon without
# sudo. If necessary, add them to the "docker" group and re-exec the
# script under that group so the rest of the install proceeds normally.
if command -v docker &> /dev/null \
&& { [[ "$OSTYPE" == "linux-gnu"* ]] || [[ -n "${WSL_DISTRO_NAME:-}" ]]; } \
&& [[ "$(id -u)" -ne 0 ]] \
&& ! docker info &> /dev/null; then
if [[ "${_ONYX_REEXEC:-}" = "1" ]]; then
print_error "Cannot connect to Docker after group re-exec."
print_info "Log out and back in, then run the script again."
exit 1
fi
if ! getent group docker &> /dev/null; then
sudo groupadd docker
fi
print_info "Adding $USER to the docker group..."
sudo usermod -aG docker "$USER"
print_info "Re-launching with docker group active..."
exec sg docker -c "_ONYX_REEXEC=1 bash $(printf '%q ' "$0" "$@")"
fi
# ASCII Art Banner
echo ""
echo -e "${BLUE}${BOLD}"
@@ -479,7 +209,8 @@ echo "2. Check your system resources (Docker, memory, disk space)"
echo "3. Guide you through deployment options (version, authentication)"
echo ""
if is_interactive; then
# Only prompt for acknowledgment if running interactively
if [ -t 0 ]; then
echo -e "${YELLOW}${BOLD}Please acknowledge and press Enter to continue...${NC}"
read -r
echo ""
@@ -529,35 +260,41 @@ else
exit 1
fi
# Returns 0 if $1 <= $2, 1 if $1 > $2
# Handles missing or non-numeric parts gracefully (treats them as 0)
# Function to compare version numbers
version_compare() {
local version1="${1:-0.0.0}"
local version2="${2:-0.0.0}"
# Returns 0 if $1 <= $2, 1 if $1 > $2
local version1=$1
local version2=$2
local v1_major v1_minor v1_patch v2_major v2_minor v2_patch
v1_major=$(echo "$version1" | cut -d. -f1)
v1_minor=$(echo "$version1" | cut -d. -f2)
v1_patch=$(echo "$version1" | cut -d. -f3)
v2_major=$(echo "$version2" | cut -d. -f1)
v2_minor=$(echo "$version2" | cut -d. -f2)
v2_patch=$(echo "$version2" | cut -d. -f3)
# Split versions into components
local v1_major=$(echo $version1 | cut -d. -f1)
local v1_minor=$(echo $version1 | cut -d. -f2)
local v1_patch=$(echo $version1 | cut -d. -f3)
# Default non-numeric or empty parts to 0
[[ "$v1_major" =~ ^[0-9]+$ ]] || v1_major=0
[[ "$v1_minor" =~ ^[0-9]+$ ]] || v1_minor=0
[[ "$v1_patch" =~ ^[0-9]+$ ]] || v1_patch=0
[[ "$v2_major" =~ ^[0-9]+$ ]] || v2_major=0
[[ "$v2_minor" =~ ^[0-9]+$ ]] || v2_minor=0
[[ "$v2_patch" =~ ^[0-9]+$ ]] || v2_patch=0
local v2_major=$(echo $version2 | cut -d. -f1)
local v2_minor=$(echo $version2 | cut -d. -f2)
local v2_patch=$(echo $version2 | cut -d. -f3)
if [ "$v1_major" -lt "$v2_major" ]; then return 0
elif [ "$v1_major" -gt "$v2_major" ]; then return 1; fi
# Compare major version
if [ "$v1_major" -lt "$v2_major" ]; then
return 0
elif [ "$v1_major" -gt "$v2_major" ]; then
return 1
fi
if [ "$v1_minor" -lt "$v2_minor" ]; then return 0
elif [ "$v1_minor" -gt "$v2_minor" ]; then return 1; fi
# Compare minor version
if [ "$v1_minor" -lt "$v2_minor" ]; then
return 0
elif [ "$v1_minor" -gt "$v2_minor" ]; then
return 1
fi
[ "$v1_patch" -le "$v2_patch" ]
# Compare patch version
if [ "$v1_patch" -le "$v2_patch" ]; then
return 0
else
return 1
fi
}
# Check Docker daemon
@@ -599,20 +336,10 @@ fi
# Convert to GB for display
if [ "$MEMORY_MB" -gt 0 ]; then
MEMORY_GB=$(awk "BEGIN {printf \"%.1f\", $MEMORY_MB / 1024}")
if [ "$(awk "BEGIN {print ($MEMORY_MB >= 1024)}")" = "1" ]; then
MEMORY_DISPLAY="~${MEMORY_GB}GB"
else
MEMORY_DISPLAY="${MEMORY_MB}MB"
fi
if [[ "$OSTYPE" == "darwin"* ]]; then
print_info "Docker memory allocation: ${MEMORY_DISPLAY}"
else
print_info "System memory: ${MEMORY_DISPLAY} (Docker uses host memory directly)"
fi
MEMORY_GB=$((MEMORY_MB / 1024))
print_info "Docker memory allocation: ~${MEMORY_GB}GB"
else
print_warning "Could not determine memory allocation"
MEMORY_DISPLAY="unknown"
print_warning "Could not determine Docker memory allocation"
MEMORY_MB=0
fi
@@ -631,7 +358,7 @@ RESOURCE_WARNING=false
EXPECTED_RAM_MB=$((EXPECTED_DOCKER_RAM_GB * 1024))
if [ "$MEMORY_MB" -gt 0 ] && [ "$MEMORY_MB" -lt "$EXPECTED_RAM_MB" ]; then
print_warning "Less than ${EXPECTED_DOCKER_RAM_GB}GB RAM available (found: ${MEMORY_DISPLAY})"
print_warning "Docker has less than ${EXPECTED_DOCKER_RAM_GB}GB RAM allocated (found: ~${MEMORY_GB}GB)"
RESOURCE_WARNING=true
fi
@@ -642,10 +369,10 @@ fi
if [ "$RESOURCE_WARNING" = true ]; then
echo ""
print_warning "Onyx recommends at least ${EXPECTED_DOCKER_RAM_GB}GB RAM and ${EXPECTED_DISK_GB}GB disk space for optimal performance in standard mode."
print_warning "Lite mode requires less resources (1-4GB RAM, 8-16GB disk depending on usage), but does not include a vector database."
print_warning "Onyx recommends at least ${EXPECTED_DOCKER_RAM_GB}GB RAM and ${EXPECTED_DISK_GB}GB disk space for optimal performance."
echo ""
read -p "Do you want to continue anyway? (y/N): " -n 1 -r
echo ""
prompt_yn_or_default "Do you want to continue anyway? (Y/n): " "y"
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
print_info "Installation cancelled. Please allocate more resources and try again."
exit 1
@@ -658,89 +385,117 @@ print_step "Creating directory structure"
if [ -d "${INSTALL_ROOT}" ]; then
print_info "Directory structure already exists"
print_success "Using existing ${INSTALL_ROOT} directory"
else
mkdir -p "${INSTALL_ROOT}/deployment"
mkdir -p "${INSTALL_ROOT}/data/nginx/local"
print_success "Directory structure created"
fi
mkdir -p "${INSTALL_ROOT}/deployment"
mkdir -p "${INSTALL_ROOT}/data/nginx/local"
print_success "Directory structure created"
# Ensure all required configuration files are present
# Download all required files
print_step "Downloading Onyx configuration files"
print_info "This step downloads all necessary configuration files from GitHub..."
echo ""
print_info "Downloading the following files:"
echo " • docker-compose.yml - Main Docker Compose configuration"
echo " • env.template - Environment variables template"
echo " • nginx/app.conf.template - Nginx web server configuration"
echo " • nginx/run-nginx.sh - Nginx startup script"
echo " • README.md - Documentation and setup instructions"
echo ""
# Download Docker Compose file
COMPOSE_FILE="${INSTALL_ROOT}/deployment/docker-compose.yml"
print_info "Downloading docker-compose.yml..."
if curl -fsSL -o "$COMPOSE_FILE" "${GITHUB_RAW_URL}/docker-compose.yml" 2>/dev/null; then
print_success "Docker Compose file downloaded successfully"
# Check if Docker Compose version is older than 2.24.0 and show warning
# Skip check for dev builds (assume they're recent enough)
if [ "$COMPOSE_VERSION" != "dev" ] && version_compare "$COMPOSE_VERSION" "2.24.0"; then
print_warning "Docker Compose version $COMPOSE_VERSION is older than 2.24.0"
echo ""
print_warning "The docker-compose.yml file uses the newer env_file format that requires Docker Compose 2.24.0 or later."
echo ""
print_info "To use this configuration with your current Docker Compose version, you have two options:"
echo ""
echo "1. Upgrade Docker Compose to version 2.24.0 or later (recommended)"
echo " Visit: https://docs.docker.com/compose/install/"
echo ""
echo "2. Manually replace all env_file sections in docker-compose.yml"
echo " Change from:"
echo " env_file:"
echo " - path: .env"
echo " required: false"
echo " To:"
echo " env_file: .env"
echo ""
print_warning "The installation will continue, but may fail if Docker Compose cannot parse the file."
echo ""
read -p "Do you want to continue anyway? (y/N): " -n 1 -r
echo ""
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
print_info "Installation cancelled. Please upgrade Docker Compose or manually edit the docker-compose.yml file."
exit 1
fi
print_info "Proceeding with installation despite Docker Compose version compatibility issues..."
fi
else
print_error "Failed to download Docker Compose file"
print_info "Please ensure you have internet connection and try again"
exit 1
fi
# Download env.template file
ENV_TEMPLATE="${INSTALL_ROOT}/deployment/env.template"
print_info "Downloading env.template..."
if curl -fsSL -o "$ENV_TEMPLATE" "${GITHUB_RAW_URL}/env.template" 2>/dev/null; then
print_success "Environment template downloaded successfully"
else
print_error "Failed to download env.template"
print_info "Please ensure you have internet connection and try again"
exit 1
fi
# Download nginx config files
NGINX_BASE_URL="https://raw.githubusercontent.com/onyx-dot-app/onyx/main/deployment/data/nginx"
if [[ "$USE_LOCAL_FILES" = true ]]; then
print_step "Verifying existing configuration files"
# Download app.conf.template
NGINX_CONFIG="${INSTALL_ROOT}/data/nginx/app.conf.template"
print_info "Downloading nginx configuration template..."
if curl -fsSL -o "$NGINX_CONFIG" "$NGINX_BASE_URL/app.conf.template" 2>/dev/null; then
print_success "Nginx configuration template downloaded"
else
print_step "Downloading Onyx configuration files"
print_info "This step downloads all necessary configuration files from GitHub..."
print_error "Failed to download nginx configuration template"
print_info "Please ensure you have internet connection and try again"
exit 1
fi
ensure_file "${INSTALL_ROOT}/deployment/docker-compose.yml" \
"${GITHUB_RAW_URL}/docker-compose.yml" "docker-compose.yml" || exit 1
# Check Docker Compose version compatibility after obtaining docker-compose.yml
if [ "$COMPOSE_VERSION" != "dev" ] && version_compare "$COMPOSE_VERSION" "2.24.0"; then
print_warning "Docker Compose version $COMPOSE_VERSION is older than 2.24.0"
echo ""
print_warning "The docker-compose.yml file uses the newer env_file format that requires Docker Compose 2.24.0 or later."
echo ""
print_info "To use this configuration with your current Docker Compose version, you have two options:"
echo ""
echo "1. Upgrade Docker Compose to version 2.24.0 or later (recommended)"
echo " Visit: https://docs.docker.com/compose/install/"
echo ""
echo "2. Manually replace all env_file sections in docker-compose.yml"
echo " Change from:"
echo " env_file:"
echo " - path: .env"
echo " required: false"
echo " To:"
echo " env_file: .env"
echo ""
print_warning "The installation will continue, but may fail if Docker Compose cannot parse the file."
echo ""
prompt_yn_or_default "Do you want to continue anyway? (Y/n): " "y"
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
print_info "Installation cancelled. Please upgrade Docker Compose or manually edit the docker-compose.yml file."
exit 1
fi
print_info "Proceeding with installation despite Docker Compose version compatibility issues..."
# Download run-nginx.sh script
NGINX_RUN_SCRIPT="${INSTALL_ROOT}/data/nginx/run-nginx.sh"
print_info "Downloading nginx startup script..."
if curl -fsSL -o "$NGINX_RUN_SCRIPT" "$NGINX_BASE_URL/run-nginx.sh" 2>/dev/null; then
chmod +x "$NGINX_RUN_SCRIPT"
print_success "Nginx startup script downloaded and made executable"
else
print_error "Failed to download nginx startup script"
print_info "Please ensure you have internet connection and try again"
exit 1
fi
# Handle lite overlay: ensure it if --lite, clean up stale copies otherwise
if [[ "$LITE_MODE" = true ]]; then
ensure_file "${INSTALL_ROOT}/deployment/${LITE_COMPOSE_FILE}" \
"${GITHUB_RAW_URL}/${LITE_COMPOSE_FILE}" "${LITE_COMPOSE_FILE}" || exit 1
elif [[ -f "${INSTALL_ROOT}/deployment/${LITE_COMPOSE_FILE}" ]]; then
if [[ -f "${INSTALL_ROOT}/deployment/.env" ]]; then
print_warning "Existing lite overlay found but --lite was not passed."
prompt_yn_or_default "Remove lite overlay and switch to standard mode? (y/N): " "n"
if [[ ! $REPLY =~ ^[Yy]$ ]]; then
print_info "Keeping existing lite overlay. Pass --lite to keep using lite mode."
LITE_MODE=true
else
rm -f "${INSTALL_ROOT}/deployment/${LITE_COMPOSE_FILE}"
print_info "Removed lite overlay (switching to standard mode)"
fi
else
rm -f "${INSTALL_ROOT}/deployment/${LITE_COMPOSE_FILE}"
print_info "Removed previous lite overlay (switching to standard mode)"
fi
# Download README file
README_FILE="${INSTALL_ROOT}/README.md"
print_info "Downloading README.md..."
if curl -fsSL -o "$README_FILE" "${GITHUB_RAW_URL}/README.md" 2>/dev/null; then
print_success "README.md downloaded successfully"
else
print_error "Failed to download README.md"
print_info "Please ensure you have internet connection and try again"
exit 1
fi
ensure_file "${INSTALL_ROOT}/deployment/env.template" \
"${GITHUB_RAW_URL}/env.template" "env.template" || exit 1
ensure_file "${INSTALL_ROOT}/data/nginx/app.conf.template" \
"$NGINX_BASE_URL/app.conf.template" "nginx/app.conf.template" || exit 1
ensure_file "${INSTALL_ROOT}/data/nginx/run-nginx.sh" \
"$NGINX_BASE_URL/run-nginx.sh" "nginx/run-nginx.sh" || exit 1
chmod +x "${INSTALL_ROOT}/data/nginx/run-nginx.sh"
ensure_file "${INSTALL_ROOT}/README.md" \
"${GITHUB_RAW_URL}/README.md" "README.md" || exit 1
# Create empty local directory marker (if needed)
touch "${INSTALL_ROOT}/data/nginx/local/.gitkeep"
print_success "All configuration files ready"
print_success "All configuration files downloaded successfully"
# Set up deployment configuration
print_step "Setting up deployment configs"
@@ -758,7 +513,7 @@ if [ -d "${INSTALL_ROOT}/deployment" ] && [ -f "${INSTALL_ROOT}/deployment/docke
if [ -n "$COMPOSE_CMD" ]; then
# Check if any containers are running
RUNNING_CONTAINERS=$(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD $(compose_file_args true) ps -q 2>/dev/null | wc -l)
RUNNING_CONTAINERS=$(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD -f docker-compose.yml ps -q 2>/dev/null | wc -l)
if [ "$RUNNING_CONTAINERS" -gt 0 ]; then
print_error "Onyx services are currently running!"
echo ""
@@ -779,7 +534,7 @@ if [ -f "$ENV_FILE" ]; then
echo "• Press Enter to restart with current configuration"
echo "• Type 'update' to update to a newer version"
echo ""
prompt_or_default "Choose an option [default: restart]: " ""
read -p "Choose an option [default: restart]: " -r
echo ""
if [ "$REPLY" = "update" ]; then
@@ -788,30 +543,26 @@ if [ -f "$ENV_FILE" ]; then
echo "• Press Enter for latest (recommended)"
echo "• Type a specific tag (e.g., v0.1.0)"
echo ""
# If --include-craft was passed, default to craft-latest
if [ "$INCLUDE_CRAFT" = true ]; then
prompt_or_default "Enter tag [default: craft-latest]: " "craft-latest"
VERSION="$REPLY"
read -p "Enter tag [default: craft-latest]: " -r VERSION
else
prompt_or_default "Enter tag [default: latest]: " "latest"
VERSION="$REPLY"
read -p "Enter tag [default: latest]: " -r VERSION
fi
echo ""
if [ "$INCLUDE_CRAFT" = true ] && [ "$VERSION" = "craft-latest" ]; then
print_info "Selected: craft-latest (Craft enabled)"
elif [ "$VERSION" = "latest" ]; then
print_info "Selected: Latest version"
if [ -z "$VERSION" ]; then
if [ "$INCLUDE_CRAFT" = true ]; then
VERSION="craft-latest"
print_info "Selected: craft-latest (Craft enabled)"
else
VERSION="latest"
print_info "Selected: Latest version"
fi
else
print_info "Selected: $VERSION"
fi
# Reject craft image tags when running in lite mode
if [[ "$LITE_MODE" = true ]] && [[ "${VERSION:-}" == craft-* ]]; then
print_error "Cannot use a craft image tag (${VERSION}) with --lite."
print_info "Craft requires services (Vespa, Redis, background workers) that lite mode disables."
exit 1
fi
# Update .env file with new version
print_info "Updating configuration for version $VERSION..."
if grep -q "^IMAGE_TAG=" "$ENV_FILE"; then
@@ -830,67 +581,13 @@ if [ -f "$ENV_FILE" ]; then
fi
print_success "Configuration updated for upgrade"
else
# Reject restarting a craft deployment in lite mode
EXISTING_TAG=$(grep "^IMAGE_TAG=" "$ENV_FILE" | head -1 | cut -d'=' -f2 | tr -d ' "'"'"'')
if [[ "$LITE_MODE" = true ]] && [[ "${EXISTING_TAG:-}" == craft-* ]]; then
print_error "Cannot restart a craft deployment (${EXISTING_TAG}) with --lite."
print_info "Craft requires services (Vespa, Redis, background workers) that lite mode disables."
exit 1
fi
print_info "Keeping existing configuration..."
print_success "Will restart with current settings"
fi
# Ensure COMPOSE_PROFILES is cleared when running in lite mode on an
# existing .env (the template ships with s3-filestore enabled).
if [[ "$LITE_MODE" = true ]] && grep -q "^COMPOSE_PROFILES=.*s3-filestore" "$ENV_FILE" 2>/dev/null; then
sed -i.bak 's/^COMPOSE_PROFILES=.*/COMPOSE_PROFILES=/' "$ENV_FILE" 2>/dev/null || true
print_success "Cleared COMPOSE_PROFILES for lite mode"
fi
else
print_info "No existing .env file found. Setting up new deployment..."
echo ""
# Ask for deployment mode (standard vs lite) unless already set via --lite flag
if [[ "$LITE_MODE" = false ]]; then
print_info "Which deployment mode would you like?"
echo ""
echo " 1) Standard - Full deployment with search, connectors, and RAG"
echo " 2) Lite - Minimal deployment (no Vespa, Redis, or model servers)"
echo " LLM chat, tools, file uploads, and Projects still work"
echo ""
prompt_or_default "Choose a mode (1 or 2) [default: 1]: " "1"
echo ""
case "$REPLY" in
2)
LITE_MODE=true
print_info "Selected: Lite mode"
ensure_file "${INSTALL_ROOT}/deployment/${LITE_COMPOSE_FILE}" \
"${GITHUB_RAW_URL}/${LITE_COMPOSE_FILE}" "${LITE_COMPOSE_FILE}" || exit 1
;;
*)
print_info "Selected: Standard mode"
;;
esac
else
print_info "Deployment mode: Lite (set via --lite flag)"
fi
# Validate lite + craft combination (could now be set interactively)
if [[ "$LITE_MODE" = true ]] && [[ "$INCLUDE_CRAFT" = true ]]; then
print_error "--include-craft cannot be used with Lite mode."
print_info "Craft requires services (Vespa, Redis, background workers) that lite mode disables."
exit 1
fi
# Adjust resource expectations for lite mode
if [[ "$LITE_MODE" = true ]]; then
EXPECTED_DOCKER_RAM_GB=4
EXPECTED_DISK_GB=16
fi
# Ask for version
print_info "Which tag would you like to deploy?"
echo ""
@@ -898,21 +595,23 @@ else
echo "• Press Enter for craft-latest (recommended for Craft)"
echo "• Type a specific tag (e.g., craft-v1.0.0)"
echo ""
prompt_or_default "Enter tag [default: craft-latest]: " "craft-latest"
VERSION="$REPLY"
read -p "Enter tag [default: craft-latest]: " -r VERSION
else
echo "• Press Enter for latest (recommended)"
echo "• Type a specific tag (e.g., v0.1.0)"
echo ""
prompt_or_default "Enter tag [default: latest]: " "latest"
VERSION="$REPLY"
read -p "Enter tag [default: latest]: " -r VERSION
fi
echo ""
if [ "$INCLUDE_CRAFT" = true ] && [ "$VERSION" = "craft-latest" ]; then
print_info "Selected: craft-latest (Craft enabled)"
elif [ "$VERSION" = "latest" ]; then
print_info "Selected: Latest tag"
if [ -z "$VERSION" ]; then
if [ "$INCLUDE_CRAFT" = true ]; then
VERSION="craft-latest"
print_info "Selected: craft-latest (Craft enabled)"
else
VERSION="latest"
print_info "Selected: Latest tag"
fi
else
print_info "Selected: $VERSION"
fi
@@ -946,13 +645,6 @@ else
# Use basic auth by default
AUTH_SCHEMA="basic"
# Reject craft image tags when running in lite mode (must check before writing .env)
if [[ "$LITE_MODE" = true ]] && [[ "${VERSION:-}" == craft-* ]]; then
print_error "Cannot use a craft image tag (${VERSION}) with --lite."
print_info "Craft requires services (Vespa, Redis, background workers) that lite mode disables."
exit 1
fi
# Create .env file from template
print_info "Creating .env file with your selections..."
cp "$ENV_TEMPLATE" "$ENV_FILE"
@@ -962,13 +654,6 @@ else
sed -i.bak "s/^IMAGE_TAG=.*/IMAGE_TAG=$VERSION/" "$ENV_FILE"
print_success "IMAGE_TAG set to $VERSION"
# In lite mode, clear COMPOSE_PROFILES so profiled services (MinIO, etc.)
# stay disabled — the template ships with s3-filestore enabled by default.
if [[ "$LITE_MODE" = true ]]; then
sed -i.bak 's/^COMPOSE_PROFILES=.*/COMPOSE_PROFILES=/' "$ENV_FILE" 2>/dev/null || true
print_success "Cleared COMPOSE_PROFILES for lite mode"
fi
# Configure basic authentication (default)
sed -i.bak 's/^AUTH_TYPE=.*/AUTH_TYPE=basic/' "$ENV_FILE" 2>/dev/null || true
print_success "Basic authentication enabled in configuration"
@@ -1089,7 +774,7 @@ print_step "Pulling Docker images"
print_info "This may take several minutes depending on your internet connection..."
echo ""
print_info "Downloading Docker images (this may take a while)..."
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD $(compose_file_args) pull --quiet)
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD -f docker-compose.yml pull --quiet)
if [ $? -eq 0 ]; then
print_success "Docker images downloaded successfully"
else
@@ -1103,9 +788,9 @@ print_info "Launching containers..."
echo ""
if [ "$USE_LATEST" = true ]; then
print_info "Force pulling latest images and recreating containers..."
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD $(compose_file_args) up -d --pull always --force-recreate)
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD -f docker-compose.yml up -d --pull always --force-recreate)
else
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD $(compose_file_args) up -d)
(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD -f docker-compose.yml up -d)
fi
if [ $? -ne 0 ]; then
print_error "Failed to start Onyx services"
@@ -1127,7 +812,7 @@ echo ""
# Check for restart loops
print_info "Checking container health status..."
RESTART_ISSUES=false
CONTAINERS=$(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD $(compose_file_args) ps -q 2>/dev/null)
CONTAINERS=$(cd "${INSTALL_ROOT}/deployment" && $COMPOSE_CMD -f docker-compose.yml ps -q 2>/dev/null)
for CONTAINER in $CONTAINERS; do
PROJECT_NAME="$(basename "$INSTALL_ROOT")_deployment_"
@@ -1156,7 +841,7 @@ if [ "$RESTART_ISSUES" = true ]; then
print_error "Some containers are experiencing issues!"
echo ""
print_info "Please check the logs for more information:"
echo " (cd \"${INSTALL_ROOT}/deployment\" && $COMPOSE_CMD $(compose_file_args) logs)"
echo " (cd \"${INSTALL_ROOT}/deployment\" && $COMPOSE_CMD -f docker-compose.yml logs)"
echo ""
print_info "If the issue persists, please contact: founders@onyx.app"
@@ -1175,12 +860,8 @@ check_onyx_health() {
echo ""
while [ $attempt -le $max_attempts ]; do
local http_code=""
if [[ "$DOWNLOADER" == "curl" ]]; then
http_code=$(curl -s -o /dev/null -w "%{http_code}" "http://localhost:$port" 2>/dev/null || echo "000")
else
http_code=$(wget -q --spider -S "http://localhost:$port" 2>&1 | grep "HTTP/" | tail -1 | awk '{print $2}' || echo "000")
fi
# Check for successful HTTP responses (200, 301, 302, etc.)
local http_code=$(curl -s -o /dev/null -w "%{http_code}" "http://localhost:$port")
if echo "$http_code" | grep -qE "^(200|301|302|303|307|308)$"; then
return 0
fi
@@ -1236,18 +917,6 @@ print_info "If authentication is enabled, you can create your admin account here
echo " • Visit http://localhost:${HOST_PORT}/auth/signup to create your admin account"
echo " • The first user created will automatically have admin privileges"
echo ""
if [[ "$LITE_MODE" = true ]]; then
echo ""
print_info "Running in Lite mode — the following services are NOT started:"
echo " • Vespa (vector database)"
echo " • Redis (cache)"
echo " • Model servers (embedding/inference)"
echo " • Background workers (Celery)"
echo ""
print_info "Connectors and RAG search are disabled. LLM chat, tools, user file"
print_info "uploads, Projects, Agent knowledge, and code interpreter still work."
fi
echo ""
print_info "Refer to the README in the ${INSTALL_ROOT} directory for more information."
echo ""
print_info "For help or issues, contact: founders@onyx.app"

12
uv.lock generated
View File

@@ -453,14 +453,14 @@ wheels = [
[[package]]
name = "authlib"
version = "1.6.9"
version = "1.6.7"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cryptography" },
]
sdist = { url = "https://files.pythonhosted.org/packages/af/98/00d3dd826d46959ad8e32af2dbb2398868fd9fd0683c26e56d0789bd0e68/authlib-1.6.9.tar.gz", hash = "sha256:d8f2421e7e5980cc1ddb4e32d3f5fa659cfaf60d8eaf3281ebed192e4ab74f04", size = 165134, upload-time = "2026-03-02T07:44:01.998Z" }
sdist = { url = "https://files.pythonhosted.org/packages/49/dc/ed1681bf1339dd6ea1ce56136bad4baabc6f7ad466e375810702b0237047/authlib-1.6.7.tar.gz", hash = "sha256:dbf10100011d1e1b34048c9d120e83f13b35d69a826ae762b93d2fb5aafc337b", size = 164950, upload-time = "2026-02-06T14:04:14.171Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/53/23/b65f568ed0c22f1efacb744d2db1a33c8068f384b8c9b482b52ebdbc3ef6/authlib-1.6.9-py2.py3-none-any.whl", hash = "sha256:f08b4c14e08f0861dc18a32357b33fbcfd2ea86cfe3fe149484b4d764c4a0ac3", size = 244197, upload-time = "2026-03-02T07:44:00.307Z" },
{ url = "https://files.pythonhosted.org/packages/f8/00/3ed12264094ec91f534fae429945efbaa9f8c666f3aa7061cc3b2a26a0cd/authlib-1.6.7-py2.py3-none-any.whl", hash = "sha256:c637340d9a02789d2efa1d003a7437d10d3e565237bcb5fcbc6c134c7b95bab0", size = 244115, upload-time = "2026-02-06T14:04:12.141Z" },
]
[[package]]
@@ -5643,11 +5643,11 @@ wheels = [
[[package]]
name = "pyjwt"
version = "2.12.0"
version = "2.11.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a8/10/e8192be5f38f3e8e7e046716de4cae33d56fd5ae08927a823bb916be36c1/pyjwt-2.12.0.tar.gz", hash = "sha256:2f62390b667cd8257de560b850bb5a883102a388829274147f1d724453f8fb02", size = 102511, upload-time = "2026-03-12T17:15:30.831Z" }
sdist = { url = "https://files.pythonhosted.org/packages/5c/5a/b46fa56bf322901eee5b0454a34343cdbdae202cd421775a8ee4e42fd519/pyjwt-2.11.0.tar.gz", hash = "sha256:35f95c1f0fbe5d5ba6e43f00271c275f7a1a4db1dab27bf708073b75318ea623", size = 98019, upload-time = "2026-01-30T19:59:55.694Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/15/70/70f895f404d363d291dcf62c12c85fdd47619ad9674ac0f53364d035925a/pyjwt-2.12.0-py3-none-any.whl", hash = "sha256:9bb459d1bdd0387967d287f5656bf7ec2b9a26645d1961628cda1764e087fd6e", size = 29700, upload-time = "2026-03-12T17:15:29.257Z" },
{ url = "https://files.pythonhosted.org/packages/6f/01/c26ce75ba460d5cd503da9e13b21a33804d38c2165dec7b716d06b13010c/pyjwt-2.11.0-py3-none-any.whl", hash = "sha256:94a6bde30eb5c8e04fee991062b534071fd1439ef58d2adc9ccb823e7bcd0469", size = 28224, upload-time = "2026-01-30T19:59:54.539Z" },
]
[package.optional-dependencies]

1838
web/lib/genui-core/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,12 @@
{
"name": "@onyx/genui",
"version": "0.1.0",
"private": true,
"description": "Framework-agnostic structured UI rendering — parser, registry, prompt generation",
"exports": {
".": {
"types": "./src/index.ts",
"default": "./src/index.ts"
}
}
}

View File

@@ -0,0 +1,33 @@
import { z } from "zod";
import type { ComponentDef } from "./types";
interface DefineComponentConfig<T extends z.ZodObject<z.ZodRawShape>> {
name: string;
description: string;
props: T;
component: unknown;
group?: string;
}
/**
* Define a GenUI component with typed props via Zod schema.
* The `component` field is framework-agnostic (typed as `unknown` in core).
* React bindings narrow this to `React.FC`.
*/
export function defineComponent<T extends z.ZodObject<z.ZodRawShape>>(
config: DefineComponentConfig<T>
): ComponentDef<T> {
if (!/^[A-Z][a-zA-Z0-9]*$/.test(config.name)) {
throw new Error(
`Component name "${config.name}" must be PascalCase (start with uppercase, alphanumeric only)`
);
}
return {
name: config.name,
description: config.description,
props: config.props,
component: config.component,
group: config.group,
};
}

View File

@@ -0,0 +1,74 @@
// ── Types ──
export type {
Token,
ASTNode,
ComponentNode,
ArgumentNode,
ArrayNode,
ObjectNode,
LiteralNode,
ReferenceNode,
ElementNode,
TextElementNode,
ResolvedNode,
Statement,
ParseError,
ParseResult,
ComponentDef,
ParamDef,
ParamMap,
Library,
PromptOptions,
ActionEvent,
} from "./types";
export { TokenType } from "./types";
// ── Component & Library ──
export { defineComponent } from "./component";
export { createLibrary } from "./library";
// ── Parser ──
export { Tokenizer } from "./parser/tokenizer";
export { Parser } from "./parser/parser";
export { autoClose } from "./parser/autoclose";
export { resolveReferences } from "./parser/resolver";
export { validateAndTransform } from "./parser/validator";
export { createStreamingParser } from "./parser/streaming";
export type { StreamParser } from "./parser/streaming";
// ── Prompt ──
export { generatePrompt } from "./prompt/generator";
export { zodToTypeString, schemaToSignature } from "./prompt/introspector";
// ── Convenience: one-shot parse ──
import type { Library, ParseResult, ElementNode, ASTNode } from "./types";
import { Parser } from "./parser/parser";
import { resolveReferences } from "./parser/resolver";
import { validateAndTransform } from "./parser/validator";
/**
* One-shot parse: tokenize → parse → resolve → validate.
*/
export function parse(input: string, library: Library): ParseResult {
const parser = Parser.fromSource(input);
const { statements, errors: parseErrors } = parser.parse();
const { root, errors: resolveErrors } = resolveReferences(statements);
const allErrors = [...parseErrors, ...resolveErrors];
let rootElement: ElementNode | null = null;
if (root) {
const { element, errors: validateErrors } = validateAndTransform(
root,
library
);
rootElement = element;
allErrors.push(...validateErrors);
}
return {
statements,
root: rootElement as ASTNode | null,
errors: allErrors,
};
}

View File

@@ -0,0 +1,208 @@
import { describe, it, expect } from "vitest";
import { z } from "zod";
import { defineComponent, createLibrary, parse } from "./index";
/**
* Integration tests: end-to-end from source → parsed element tree.
*/
function makeTestLibrary() {
return createLibrary([
defineComponent({
name: "Text",
description: "Displays text",
props: z.object({
children: z.string(),
headingH2: z.boolean().optional(),
}),
component: null,
group: "Content",
}),
defineComponent({
name: "Button",
description: "Interactive button",
props: z.object({
children: z.string(),
main: z.boolean().optional(),
primary: z.boolean().optional(),
actionId: z.string().optional(),
}),
component: null,
group: "Interactive",
}),
defineComponent({
name: "Tag",
description: "Label tag",
props: z.object({
title: z.string(),
color: z.enum(["green", "purple", "blue", "gray", "amber"]).optional(),
}),
component: null,
group: "Content",
}),
defineComponent({
name: "Table",
description: "Data table",
props: z.object({
columns: z.array(z.string()),
rows: z.array(z.array(z.unknown())),
}),
component: null,
group: "Content",
}),
defineComponent({
name: "Stack",
description: "Vertical layout",
props: z.object({
children: z.array(z.unknown()).optional(),
gap: z.enum(["none", "xs", "sm", "md", "lg", "xl"]).optional(),
}),
component: null,
group: "Layout",
}),
]);
}
describe("Integration: parse()", () => {
it("parses the spec example end-to-end", () => {
const lib = makeTestLibrary();
const input = `title = Text("Search Results", headingH2: true)
row1 = ["Onyx Docs", Tag("PDF", color: "blue"), "2024-01-15"]
row2 = ["API Guide", Tag("MD", color: "green"), "2024-02-01"]
results = Table(["Name", "Type", "Date"], [row1, row2])
action = Button("View All", main: true, primary: true, actionId: "viewAll")
root = Stack([title, results, action], gap: "md")`;
const result = parse(input, lib);
expect(result.root).not.toBeNull();
// Root should be a Stack element
if (result.root && "component" in result.root) {
expect((result.root as any).component).toBe("Stack");
}
});
it("parses a single component", () => {
const lib = makeTestLibrary();
const result = parse('x = Text("Hello World")', lib);
expect(result.root).not.toBeNull();
expect(
result.errors.filter((e) => !e.message.includes("Unknown"))
).toHaveLength(0);
});
it("handles unknown components gracefully", () => {
const lib = makeTestLibrary();
const result = parse('x = UnknownWidget("test")', lib);
expect(result.root).not.toBeNull();
expect(
result.errors.some((e) => e.message.includes("Unknown component"))
).toBe(true);
});
it("handles empty input", () => {
const lib = makeTestLibrary();
const result = parse("", lib);
expect(result.root).toBeNull();
expect(result.errors).toHaveLength(0);
});
});
describe("Integration: library.prompt()", () => {
it("generates a prompt with component signatures", () => {
const lib = makeTestLibrary();
const prompt = lib.prompt();
expect(prompt).toContain("GenUI Lang");
expect(prompt).toContain("Text(");
expect(prompt).toContain("Button(");
expect(prompt).toContain("Tag(");
expect(prompt).toContain("Table(");
expect(prompt).toContain("Stack(");
});
it("includes syntax rules", () => {
const lib = makeTestLibrary();
const prompt = lib.prompt();
expect(prompt).toContain("PascalCase");
expect(prompt).toContain("camelCase");
expect(prompt).toContain("positional");
});
it("includes streaming guidelines by default", () => {
const lib = makeTestLibrary();
const prompt = lib.prompt();
expect(prompt).toContain("Streaming");
});
it("can disable streaming guidelines", () => {
const lib = makeTestLibrary();
const prompt = lib.prompt({ streaming: false });
expect(prompt).not.toContain("Streaming Guidelines");
});
it("includes custom examples", () => {
const lib = makeTestLibrary();
const prompt = lib.prompt({
examples: [{ description: "Test example", code: 'x = Text("test")' }],
});
expect(prompt).toContain("Test example");
expect(prompt).toContain('x = Text("test")');
});
});
describe("Integration: defineComponent", () => {
it("rejects non-PascalCase names", () => {
expect(() =>
defineComponent({
name: "button",
description: "Invalid",
props: z.object({}),
component: null,
})
).toThrow("PascalCase");
});
it("accepts valid PascalCase names", () => {
expect(() =>
defineComponent({
name: "MyWidget",
description: "Valid",
props: z.object({}),
component: null,
})
).not.toThrow();
});
});
describe("Integration: createLibrary", () => {
it("rejects duplicate component names", () => {
const comp = defineComponent({
name: "Foo",
description: "Foo",
props: z.object({}),
component: null,
});
expect(() => createLibrary([comp, comp])).toThrow("Duplicate");
});
it("resolves components by name", () => {
const lib = makeTestLibrary();
expect(lib.resolve("Text")).toBeDefined();
expect(lib.resolve("NonExistent")).toBeUndefined();
});
it("generates param map", () => {
const lib = makeTestLibrary();
const paramMap = lib.paramMap();
const textParams = paramMap.get("Text");
expect(textParams).toBeDefined();
expect(textParams![0]!.name).toBe("children");
expect(textParams![0]!.required).toBe(true);
});
});

View File

@@ -0,0 +1,84 @@
import { z } from "zod";
import type {
ComponentDef,
Library,
ParamDef,
ParamMap,
PromptOptions,
} from "./types";
import { generatePrompt } from "./prompt/generator";
/**
* Build ordered param definitions from a Zod object schema.
* Ordering matches the shape key order (which is insertion order in JS objects).
*/
function buildParamDefs(schema: z.ZodObject<z.ZodRawShape>): ParamDef[] {
const shape = schema.shape;
return Object.entries(shape).map(([name, zodType]) => {
const unwrapped = zodType as z.ZodTypeAny;
const isOptional = unwrapped.isOptional();
return {
name,
required: !isOptional,
zodType: unwrapped,
};
});
}
interface CreateLibraryOptions {
/** Default prompt options merged with per-call options */
defaultPromptOptions?: PromptOptions;
}
/**
* Create a component library from an array of component definitions.
*/
export function createLibrary(
components: ComponentDef[],
options?: CreateLibraryOptions
): Library {
const map = new Map<string, ComponentDef>();
for (const comp of components) {
if (map.has(comp.name)) {
throw new Error(`Duplicate component name: "${comp.name}"`);
}
map.set(comp.name, comp);
}
const cachedParamMap = new Map<string, ParamDef[]>();
return {
components: map,
resolve(name: string): ComponentDef | undefined {
return map.get(name);
},
prompt(promptOptions?: PromptOptions): string {
const merged: PromptOptions = {
...options?.defaultPromptOptions,
...promptOptions,
additionalRules: [
...(options?.defaultPromptOptions?.additionalRules ?? []),
...(promptOptions?.additionalRules ?? []),
],
examples: [
...(options?.defaultPromptOptions?.examples ?? []),
...(promptOptions?.examples ?? []),
],
};
return generatePrompt(this, merged);
},
paramMap(): ParamMap {
if (cachedParamMap.size === 0) {
for (const [name, comp] of map) {
cachedParamMap.set(name, buildParamDefs(comp.props));
}
}
return cachedParamMap;
},
};
}

View File

@@ -0,0 +1,321 @@
import { describe, it, expect } from "vitest";
import { z } from "zod";
import { defineComponent, createLibrary, parse } from "./index";
/**
* Smoke test that mirrors the Onyx library assembly.
* Verifies all 16 component definitions register without errors
* and the library generates a valid prompt.
*/
describe("Onyx Library Assembly (smoke test)", () => {
// Re-define all components exactly as onyx/src/components/ does,
// to verify the schemas are valid without needing the onyx package import.
const components = [
defineComponent({
name: "Stack",
description: "Vertical stack layout",
group: "Layout",
props: z.object({
children: z.array(z.unknown()).optional(),
gap: z.enum(["none", "xs", "sm", "md", "lg", "xl"]).optional(),
align: z.enum(["start", "center", "end", "stretch"]).optional(),
}),
component: null,
}),
defineComponent({
name: "Row",
description: "Horizontal row layout",
group: "Layout",
props: z.object({
children: z.array(z.unknown()).optional(),
gap: z.enum(["none", "xs", "sm", "md", "lg", "xl"]).optional(),
align: z.enum(["start", "center", "end", "stretch"]).optional(),
wrap: z.boolean().optional(),
}),
component: null,
}),
defineComponent({
name: "Column",
description: "A column within a Row",
group: "Layout",
props: z.object({
children: z.array(z.unknown()).optional(),
width: z.string().optional(),
}),
component: null,
}),
defineComponent({
name: "Card",
description: "Container card",
group: "Layout",
props: z.object({
title: z.string().optional(),
padding: z.enum(["none", "sm", "md", "lg"]).optional(),
}),
component: null,
}),
defineComponent({
name: "Divider",
description: "Horizontal separator",
group: "Layout",
props: z.object({
spacing: z.enum(["sm", "md", "lg"]).optional(),
}),
component: null,
}),
defineComponent({
name: "Text",
description: "Displays text with typography variants",
group: "Content",
props: z.object({
children: z.string(),
headingH1: z.boolean().optional(),
headingH2: z.boolean().optional(),
headingH3: z.boolean().optional(),
muted: z.boolean().optional(),
mono: z.boolean().optional(),
bold: z.boolean().optional(),
}),
component: null,
}),
defineComponent({
name: "Tag",
description: "Label tag with color",
group: "Content",
props: z.object({
title: z.string(),
color: z.enum(["green", "purple", "blue", "gray", "amber"]).optional(),
size: z.enum(["sm", "md"]).optional(),
}),
component: null,
}),
defineComponent({
name: "Table",
description: "Data table",
group: "Content",
props: z.object({
columns: z.array(z.string()),
rows: z.array(z.array(z.unknown())),
compact: z.boolean().optional(),
}),
component: null,
}),
defineComponent({
name: "Code",
description: "Code block",
group: "Content",
props: z.object({
children: z.string(),
language: z.string().optional(),
showCopyButton: z.boolean().optional(),
}),
component: null,
}),
defineComponent({
name: "Image",
description: "Displays an image",
group: "Content",
props: z.object({
src: z.string(),
alt: z.string().optional(),
width: z.string().optional(),
height: z.string().optional(),
}),
component: null,
}),
defineComponent({
name: "Link",
description: "Hyperlink",
group: "Content",
props: z.object({
children: z.string(),
href: z.string(),
external: z.boolean().optional(),
}),
component: null,
}),
defineComponent({
name: "List",
description: "Ordered or unordered list",
group: "Content",
props: z.object({
items: z.array(z.string()),
ordered: z.boolean().optional(),
}),
component: null,
}),
defineComponent({
name: "Button",
description: "Interactive button",
group: "Interactive",
props: z.object({
children: z.string(),
main: z.boolean().optional(),
action: z.boolean().optional(),
danger: z.boolean().optional(),
primary: z.boolean().optional(),
secondary: z.boolean().optional(),
tertiary: z.boolean().optional(),
size: z.enum(["lg", "md"]).optional(),
actionId: z.string().optional(),
disabled: z.boolean().optional(),
}),
component: null,
}),
defineComponent({
name: "IconButton",
description: "Icon button with tooltip",
group: "Interactive",
props: z.object({
icon: z.string(),
tooltip: z.string().optional(),
main: z.boolean().optional(),
action: z.boolean().optional(),
danger: z.boolean().optional(),
primary: z.boolean().optional(),
secondary: z.boolean().optional(),
actionId: z.string().optional(),
disabled: z.boolean().optional(),
}),
component: null,
}),
defineComponent({
name: "Input",
description: "Text input field",
group: "Interactive",
props: z.object({
placeholder: z.string().optional(),
value: z.string().optional(),
actionId: z.string().optional(),
readOnly: z.boolean().optional(),
}),
component: null,
}),
defineComponent({
name: "Alert",
description: "Status message banner",
group: "Feedback",
props: z.object({
text: z.string(),
description: z.string().optional(),
level: z
.enum(["default", "info", "success", "warning", "error"])
.optional(),
showIcon: z.boolean().optional(),
}),
component: null,
}),
];
it("registers all 16 components without errors", () => {
expect(() => createLibrary(components)).not.toThrow();
});
it("creates a library with exactly 16 components", () => {
const lib = createLibrary(components);
expect(lib.components.size).toBe(16);
});
it("resolves every component by name", () => {
const lib = createLibrary(components);
const names = [
"Stack",
"Row",
"Column",
"Card",
"Divider",
"Text",
"Tag",
"Table",
"Code",
"Image",
"Link",
"List",
"Button",
"IconButton",
"Input",
"Alert",
];
for (const name of names) {
expect(lib.resolve(name)).toBeDefined();
}
});
it("generates param map for all components", () => {
const lib = createLibrary(components);
const paramMap = lib.paramMap();
expect(paramMap.size).toBe(16);
// Verify a few specific param orderings
const textParams = paramMap.get("Text")!;
expect(textParams[0]!.name).toBe("children");
expect(textParams[0]!.required).toBe(true);
const buttonParams = paramMap.get("Button")!;
expect(buttonParams[0]!.name).toBe("children");
expect(buttonParams.find((p) => p.name === "actionId")).toBeDefined();
const tagParams = paramMap.get("Tag")!;
expect(tagParams[0]!.name).toBe("title");
expect(tagParams[0]!.required).toBe(true);
});
it("generates a prompt containing all component signatures", () => {
const lib = createLibrary(components);
const prompt = lib.prompt();
// Every component name should appear
for (const [name] of lib.components) {
expect(prompt).toContain(name);
}
// Should contain group headers
expect(prompt).toContain("Layout");
expect(prompt).toContain("Content");
expect(prompt).toContain("Interactive");
expect(prompt).toContain("Feedback");
// Should have syntax section
expect(prompt).toContain("Syntax");
expect(prompt).toContain("Streaming");
});
it("generates a prompt with correct Button signature", () => {
const lib = createLibrary(components);
const prompt = lib.prompt();
// Button should show its required `children` param and optional params
expect(prompt).toContain("Button(children: string");
expect(prompt).toContain("actionId?");
});
it("parses a complex GenUI input using the full library", () => {
const lib = createLibrary(components);
const input = `heading = Text("Dashboard", headingH1: true)
status = Alert("All systems operational", level: "success")
row1 = ["API Server", Tag("Running", color: "green"), "99.9%"]
row2 = ["Database", Tag("Running", color: "green"), "99.8%"]
row3 = ["Cache", Tag("Warning", color: "amber"), "95.2%"]
table = Table(["Service", "Status", "Uptime"], [row1, row2, row3])
actions = Row([
Button("Refresh", main: true, primary: true, actionId: "refresh"),
Button("Settings", action: true, secondary: true, actionId: "settings")
], gap: "sm")
divider = Divider(spacing: "md")
code = Code("curl https://api.example.com/health", language: "bash")
root = Stack([heading, status, table, divider, actions, code], gap: "md")`;
const result = parse(input, lib);
expect(result.root).not.toBeNull();
expect(result.statements.length).toBeGreaterThanOrEqual(9);
// Should have no critical errors (unknown components, etc.)
const criticalErrors = result.errors.filter(
(e: { message: string }) => !e.message.includes("Unknown")
);
expect(criticalErrors).toHaveLength(0);
});
});

View File

@@ -0,0 +1,42 @@
import { describe, it, expect } from "vitest";
import { autoClose } from "./autoclose";
describe("autoClose", () => {
it("closes unmatched parentheses", () => {
expect(autoClose('Button("hello"')).toBe('Button("hello")');
});
it("closes unmatched brackets", () => {
expect(autoClose('["a", "b"')).toBe('["a", "b"]');
});
it("closes unmatched braces", () => {
expect(autoClose('{name: "test"')).toBe('{name: "test"}');
});
it("closes unmatched strings", () => {
expect(autoClose('"hello')).toBe('"hello"');
});
it("closes nested brackets", () => {
expect(autoClose("Foo([1, 2")).toBe("Foo([1, 2])");
});
it("handles already closed input", () => {
expect(autoClose('Button("ok")')).toBe('Button("ok")');
});
it("handles empty input", () => {
expect(autoClose("")).toBe("");
});
it("handles escaped quotes inside strings", () => {
expect(autoClose('"hello \\"world')).toBe('"hello \\"world"');
});
it("handles deeply nested structures", () => {
expect(autoClose('Stack([Row([Text("hi"')).toBe(
'Stack([Row([Text("hi")])])'
);
});
});

View File

@@ -0,0 +1,63 @@
/**
* Auto-close unmatched brackets and strings for streaming partial input.
*
* When the LLM is mid-stream, the last line may be incomplete — e.g. an
* unclosed `(`, `[`, `{`, or string. We append the matching closers so the
* parser can produce a valid (partial) tree from what we have so far.
*/
export function autoClose(input: string): string {
const closers: string[] = [];
let inString: string | null = null;
let escaped = false;
for (let i = 0; i < input.length; i++) {
const ch = input[i]!;
if (escaped) {
escaped = false;
continue;
}
if (ch === "\\") {
escaped = true;
continue;
}
if (inString !== null) {
if (ch === inString) {
inString = null;
closers.pop(); // remove the string closer
}
continue;
}
if (ch === '"' || ch === "'") {
inString = ch;
closers.push(ch);
continue;
}
switch (ch) {
case "(":
closers.push(")");
break;
case "[":
closers.push("]");
break;
case "{":
closers.push("}");
break;
case ")":
case "]":
case "}":
// Pop the matching opener if present
if (closers.length > 0 && closers[closers.length - 1] === ch) {
closers.pop();
}
break;
}
}
// Append closers in reverse order
return input + closers.reverse().join("");
}

View File

@@ -0,0 +1,542 @@
import { describe, it, expect } from "vitest";
import { Tokenizer } from "./tokenizer";
import { Parser } from "./parser";
import { TokenType } from "../types";
// ── Helpers ──
function tokenize(input: string) {
return new Tokenizer(input).tokenize();
}
function tokenTypes(input: string): TokenType[] {
return tokenize(input).map((t) => t.type);
}
function tokenValues(input: string): string[] {
return tokenize(input).map((t) => t.value);
}
function parseStatements(input: string) {
return Parser.fromSource(input).parse();
}
// ────────────────────────────────────────────────────────────
// Tokenizer edge cases
// ────────────────────────────────────────────────────────────
describe("Tokenizer edge cases", () => {
it("handles empty string", () => {
const tokens = tokenize("");
expect(tokens).toHaveLength(1);
expect(tokens[0]!.type).toBe(TokenType.EOF);
});
it("handles only whitespace (spaces and tabs)", () => {
const tokens = tokenize(" \t\t ");
expect(tokens).toHaveLength(1);
expect(tokens[0]!.type).toBe(TokenType.EOF);
});
it("handles only newlines", () => {
const types = tokenTypes("\n\n\n");
// Each newline at bracket depth 0 produces a Newline token
expect(types.filter((t) => t === TokenType.Newline).length).toBe(3);
expect(types[types.length - 1]).toBe(TokenType.EOF);
});
it("handles unicode in string literals (emoji)", () => {
const tokens = tokenize('"hello \u{1F680}\u{1F525}"');
const str = tokens.find((t) => t.type === TokenType.String);
expect(str).toBeDefined();
expect(str!.value).toBe("hello \u{1F680}\u{1F525}");
});
it("handles unicode in string literals (CJK characters)", () => {
const tokens = tokenize('"\u4F60\u597D\u4E16\u754C"');
const str = tokens.find((t) => t.type === TokenType.String);
expect(str!.value).toBe("\u4F60\u597D\u4E16\u754C");
});
it("handles very long string literals (1000+ chars)", () => {
const longContent = "a".repeat(2000);
const tokens = tokenize(`"${longContent}"`);
const str = tokens.find((t) => t.type === TokenType.String);
expect(str!.value).toBe(longContent);
expect(str!.value.length).toBe(2000);
});
it("handles deeply nested brackets (10+ levels)", () => {
const open = "(".repeat(15);
const close = ")".repeat(15);
const input = `Foo${open}${close}`;
const tokens = tokenize(input);
const lParens = tokens.filter((t) => t.type === TokenType.LParen);
const rParens = tokens.filter((t) => t.type === TokenType.RParen);
expect(lParens).toHaveLength(15);
expect(rParens).toHaveLength(15);
});
it("suppresses newlines inside brackets", () => {
const input = '(\n\n"hello"\n\n)';
const types = tokenTypes(input);
// Newlines inside brackets should be suppressed
expect(types).not.toContain(TokenType.Newline);
expect(types).toContain(TokenType.LParen);
expect(types).toContain(TokenType.String);
expect(types).toContain(TokenType.RParen);
});
it("handles single-quoted strings", () => {
const tokens = tokenize("'hello world'");
const str = tokens.find((t) => t.type === TokenType.String);
expect(str!.value).toBe("hello world");
});
it("handles double-quoted strings", () => {
const tokens = tokenize('"hello world"');
const str = tokens.find((t) => t.type === TokenType.String);
expect(str!.value).toBe("hello world");
});
it("handles single quotes inside double-quoted strings without escaping", () => {
const tokens = tokenize('"it\'s fine"');
// The \' escape yields a literal '
const str = tokens.find((t) => t.type === TokenType.String);
expect(str!.value).toBe("it's fine");
});
it("handles negative decimals (-3.14)", () => {
const tokens = tokenize("-3.14");
const num = tokens.find((t) => t.type === TokenType.Number);
expect(num).toBeDefined();
expect(num!.value).toBe("-3.14");
});
it("handles negative integers", () => {
const tokens = tokenize("-42");
const num = tokens.find((t) => t.type === TokenType.Number);
expect(num!.value).toBe("-42");
});
it("handles multiple consecutive comments", () => {
const input = "// comment 1\n// comment 2\n// comment 3\nx";
const tokens = tokenize(input);
// Comments are skipped; we should get newlines and the identifier
const identifiers = tokens.filter((t) => t.type === TokenType.Identifier);
expect(identifiers).toHaveLength(1);
expect(identifiers[0]!.value).toBe("x");
});
it("handles comment at end of file (no trailing newline)", () => {
const input = "x = 1\n// trailing comment";
const tokens = tokenize(input);
// Should not crash, last token is EOF
expect(tokens[tokens.length - 1]!.type).toBe(TokenType.EOF);
// The identifier and number should be present
expect(
tokens.some((t) => t.type === TokenType.Identifier && t.value === "x")
).toBe(true);
expect(
tokens.some((t) => t.type === TokenType.Number && t.value === "1")
).toBe(true);
});
it("handles all escape sequences in strings", () => {
const input = '"\\n\\t\\\\\\"\\\'"';
const tokens = tokenize(input);
const str = tokens.find((t) => t.type === TokenType.String);
expect(str!.value).toBe("\n\t\\\"'");
});
it("handles unknown escape sequences by preserving the escaped char", () => {
const tokens = tokenize('"\\x"');
const str = tokens.find((t) => t.type === TokenType.String);
expect(str!.value).toBe("x");
});
it("handles unterminated string (EOF inside string)", () => {
// Should not throw; tokenizer consumes until EOF
const tokens = tokenize('"unterminated');
const str = tokens.find((t) => t.type === TokenType.String);
expect(str).toBeDefined();
expect(str!.value).toBe("unterminated");
});
it("handles bracket depth never going below zero on unmatched closing brackets", () => {
// Extra closing parens should not crash
const tokens = tokenize(")))]]]");
expect(tokens[tokens.length - 1]!.type).toBe(TokenType.EOF);
});
it("skips unknown characters silently", () => {
const tokens = tokenize("@ # $ %");
// All unknown chars are skipped, only EOF remains
expect(tokens).toHaveLength(1);
expect(tokens[0]!.type).toBe(TokenType.EOF);
});
it("tracks line and column correctly across newlines", () => {
const tokens = tokenize("x\ny");
const x = tokens.find((t) => t.value === "x");
const y = tokens.find((t) => t.value === "y");
expect(x!.line).toBe(1);
expect(x!.column).toBe(1);
expect(y!.line).toBe(2);
expect(y!.column).toBe(1);
});
it("treats identifier starting with underscore as valid", () => {
const tokens = tokenize("_foo _bar123");
const idents = tokens.filter((t) => t.type === TokenType.Identifier);
expect(idents).toHaveLength(2);
expect(idents[0]!.value).toBe("_foo");
expect(idents[1]!.value).toBe("_bar123");
});
it("tokenizes number with trailing dot as number then unknown", () => {
// "42." => number "42." (reads the dot as part of decimal), then EOF
const tokens = tokenize("42.");
const num = tokens.find((t) => t.type === TokenType.Number);
expect(num!.value).toBe("42.");
});
});
// ────────────────────────────────────────────────────────────
// Parser edge cases
// ────────────────────────────────────────────────────────────
describe("Parser edge cases", () => {
it("handles empty input", () => {
const { statements, errors } = parseStatements("");
expect(statements).toHaveLength(0);
expect(errors).toHaveLength(0);
});
it("handles single identifier with no assignment (error recovery)", () => {
const { statements, errors } = parseStatements("foo");
// Should produce an error because it expects `=` after identifier
expect(errors.length).toBeGreaterThan(0);
expect(errors[0]!.message).toContain("Expected Equals");
});
it("handles assignment with no value (error recovery)", () => {
const { statements, errors } = parseStatements("x =");
// Should produce an error because there's no expression after `=`
expect(errors.length).toBeGreaterThan(0);
});
it("parses component with 0 args: Foo()", () => {
const { statements, errors } = parseStatements("x = Foo()");
expect(errors).toHaveLength(0);
expect(statements).toHaveLength(1);
const node = statements[0]!.value;
expect(node.kind).toBe("component");
if (node.kind === "component") {
expect(node.name).toBe("Foo");
expect(node.args).toHaveLength(0);
}
});
it("parses component with only named args", () => {
const { statements, errors } = parseStatements("x = Foo(a: 1, b: 2)");
expect(errors).toHaveLength(0);
const node = statements[0]!.value;
if (node.kind === "component") {
expect(node.args).toHaveLength(2);
expect(node.args[0]!.key).toBe("a");
expect(node.args[0]!.value).toEqual({ kind: "literal", value: 1 });
expect(node.args[1]!.key).toBe("b");
expect(node.args[1]!.value).toEqual({ kind: "literal", value: 2 });
}
});
it("parses deeply nested components", () => {
const { statements, errors } = parseStatements('x = A(B(C(D("deep"))))');
expect(errors).toHaveLength(0);
const a = statements[0]!.value;
expect(a.kind).toBe("component");
if (a.kind === "component") {
expect(a.name).toBe("A");
const b = a.args[0]!.value;
expect(b.kind).toBe("component");
if (b.kind === "component") {
expect(b.name).toBe("B");
const c = b.args[0]!.value;
expect(c.kind).toBe("component");
if (c.kind === "component") {
expect(c.name).toBe("C");
const d = c.args[0]!.value;
expect(d.kind).toBe("component");
if (d.kind === "component") {
expect(d.name).toBe("D");
expect(d.args[0]!.value).toEqual({
kind: "literal",
value: "deep",
});
}
}
}
}
});
it("parses array of arrays", () => {
const { statements, errors } = parseStatements("x = [[1, 2], [3, 4]]");
expect(errors).toHaveLength(0);
const node = statements[0]!.value;
expect(node.kind).toBe("array");
if (node.kind === "array") {
expect(node.elements).toHaveLength(2);
const first = node.elements[0]!;
expect(first.kind).toBe("array");
if (first.kind === "array") {
expect(first.elements).toEqual([
{ kind: "literal", value: 1 },
{ kind: "literal", value: 2 },
]);
}
const second = node.elements[1]!;
if (second.kind === "array") {
expect(second.elements).toEqual([
{ kind: "literal", value: 3 },
{ kind: "literal", value: 4 },
]);
}
}
});
it("parses object with string keys (including spaces)", () => {
const { statements, errors } = parseStatements(
'x = {"key with spaces": 1, "another key": 2}'
);
expect(errors).toHaveLength(0);
const node = statements[0]!.value;
expect(node.kind).toBe("object");
if (node.kind === "object") {
expect(node.entries).toHaveLength(2);
expect(node.entries[0]!.key).toBe("key with spaces");
expect(node.entries[0]!.value).toEqual({ kind: "literal", value: 1 });
expect(node.entries[1]!.key).toBe("another key");
}
});
it("handles trailing newlines gracefully", () => {
const { statements, errors } = parseStatements('x = "hello"\n\n\n');
expect(errors).toHaveLength(0);
expect(statements).toHaveLength(1);
});
it("handles leading newlines gracefully", () => {
const { statements, errors } = parseStatements('\n\n\nx = "hello"');
expect(errors).toHaveLength(0);
expect(statements).toHaveLength(1);
expect(statements[0]!.name).toBe("x");
});
it("handles multiple empty lines between statements", () => {
const { statements, errors } = parseStatements("x = 1\n\n\n\n\ny = 2");
expect(errors).toHaveLength(0);
expect(statements).toHaveLength(2);
expect(statements[0]!.name).toBe("x");
expect(statements[1]!.name).toBe("y");
});
it("treats PascalCase identifiers as components, not keywords: True", () => {
// `True` is PascalCase, so it should be parsed as a component call (not boolean)
// when followed by parens
const { statements, errors } = parseStatements("x = True()");
expect(errors).toHaveLength(0);
const node = statements[0]!.value;
expect(node.kind).toBe("component");
if (node.kind === "component") {
expect(node.name).toBe("True");
}
});
it("treats PascalCase identifiers as components: Null", () => {
const { statements, errors } = parseStatements("x = Null()");
expect(errors).toHaveLength(0);
const node = statements[0]!.value;
expect(node.kind).toBe("component");
if (node.kind === "component") {
expect(node.name).toBe("Null");
}
});
it("treats lowercase 'true' as boolean literal, not reference", () => {
const { statements } = parseStatements("x = true");
expect(statements[0]!.value).toEqual({ kind: "literal", value: true });
});
it("treats lowercase 'null' as null literal, not reference", () => {
const { statements } = parseStatements("x = null");
expect(statements[0]!.value).toEqual({ kind: "literal", value: null });
});
it("handles very long identifier names", () => {
const longName = "a".repeat(500);
const { statements, errors } = parseStatements(`${longName} = 42`);
expect(errors).toHaveLength(0);
expect(statements[0]!.name).toBe(longName);
});
it("parses mixed named and positional args", () => {
const { statements, errors } = parseStatements(
'x = Foo("pos", named: "val", "pos2")'
);
expect(errors).toHaveLength(0);
const node = statements[0]!.value;
if (node.kind === "component") {
expect(node.args).toHaveLength(3);
// First: positional
expect(node.args[0]!.key).toBeNull();
expect(node.args[0]!.value).toEqual({ kind: "literal", value: "pos" });
// Second: named
expect(node.args[1]!.key).toBe("named");
expect(node.args[1]!.value).toEqual({ kind: "literal", value: "val" });
// Third: positional
expect(node.args[2]!.key).toBeNull();
expect(node.args[2]!.value).toEqual({ kind: "literal", value: "pos2" });
}
});
it("handles trailing comma in component args", () => {
const { statements, errors } = parseStatements("x = Foo(1, 2,)");
expect(errors).toHaveLength(0);
const node = statements[0]!.value;
if (node.kind === "component") {
expect(node.args).toHaveLength(2);
}
});
it("handles trailing comma in arrays", () => {
const { statements, errors } = parseStatements("x = [1, 2, 3,]");
expect(errors).toHaveLength(0);
const node = statements[0]!.value;
if (node.kind === "array") {
expect(node.elements).toHaveLength(3);
}
});
it("handles trailing comma in objects", () => {
const { statements, errors } = parseStatements("x = {a: 1, b: 2,}");
expect(errors).toHaveLength(0);
const node = statements[0]!.value;
if (node.kind === "object") {
expect(node.entries).toHaveLength(2);
}
});
it("recovers from error and parses subsequent statements", () => {
const { statements, errors } = parseStatements("bad\ny = 42");
// First statement is invalid (no `=`), second is valid
expect(errors.length).toBeGreaterThan(0);
expect(statements).toHaveLength(1);
expect(statements[0]!.name).toBe("y");
expect(statements[0]!.value).toEqual({ kind: "literal", value: 42 });
});
it("parses camelCase identifier as reference", () => {
const { statements, errors } = parseStatements("x = myRef");
expect(errors).toHaveLength(0);
const node = statements[0]!.value;
expect(node.kind).toBe("reference");
if (node.kind === "reference") {
expect(node.name).toBe("myRef");
}
});
it("parses PascalCase identifier without parens as reference", () => {
// PascalCase but no `(` following => treated as a reference, not component
const { statements, errors } = parseStatements("x = MyComponent");
expect(errors).toHaveLength(0);
const node = statements[0]!.value;
expect(node.kind).toBe("reference");
if (node.kind === "reference") {
expect(node.name).toBe("MyComponent");
}
});
it("parses empty array", () => {
const { statements, errors } = parseStatements("x = []");
expect(errors).toHaveLength(0);
const node = statements[0]!.value;
expect(node.kind).toBe("array");
if (node.kind === "array") {
expect(node.elements).toHaveLength(0);
}
});
it("parses empty object", () => {
const { statements, errors } = parseStatements("x = {}");
expect(errors).toHaveLength(0);
const node = statements[0]!.value;
expect(node.kind).toBe("object");
if (node.kind === "object") {
expect(node.entries).toHaveLength(0);
}
});
it("parses component as named arg value", () => {
const { statements, errors } = parseStatements(
'x = Layout(header: Header("Title"))'
);
expect(errors).toHaveLength(0);
const node = statements[0]!.value;
if (node.kind === "component") {
expect(node.name).toBe("Layout");
expect(node.args[0]!.key).toBe("header");
const headerVal = node.args[0]!.value;
expect(headerVal.kind).toBe("component");
if (headerVal.kind === "component") {
expect(headerVal.name).toBe("Header");
}
}
});
it("parses negative number in expression position", () => {
const { statements, errors } = parseStatements("x = -3.14");
expect(errors).toHaveLength(0);
expect(statements[0]!.value).toEqual({ kind: "literal", value: -3.14 });
});
it("parses component with array arg", () => {
const { statements, errors } = parseStatements(
"x = List(items: [1, 2, 3])"
);
expect(errors).toHaveLength(0);
const node = statements[0]!.value;
if (node.kind === "component") {
expect(node.args[0]!.key).toBe("items");
expect(node.args[0]!.value.kind).toBe("array");
}
});
it("handles comments between statements", () => {
const input = "x = 1\n// comment\ny = 2";
const { statements, errors } = parseStatements(input);
expect(errors).toHaveLength(0);
expect(statements).toHaveLength(2);
});
it("handles comment on the same line as a statement", () => {
// The comment eats everything after //, so `x = 1` is before the comment on a new line
const input = "// header comment\nx = 1";
const { statements, errors } = parseStatements(input);
expect(errors).toHaveLength(0);
expect(statements).toHaveLength(1);
expect(statements[0]!.name).toBe("x");
});
it("parses object with mixed identifier and string keys", () => {
const { statements, errors } = parseStatements(
'x = {name: "Alice", "full name": "Alice B"}'
);
expect(errors).toHaveLength(0);
const node = statements[0]!.value;
if (node.kind === "object") {
expect(node.entries[0]!.key).toBe("name");
expect(node.entries[1]!.key).toBe("full name");
}
});
});

View File

@@ -0,0 +1,7 @@
export { Tokenizer } from "./tokenizer";
export { Parser } from "./parser";
export { autoClose } from "./autoclose";
export { resolveReferences } from "./resolver";
export { validateAndTransform } from "./validator";
export { createStreamingParser } from "./streaming";
export type { StreamParser } from "./streaming";

View File

@@ -0,0 +1,132 @@
import { describe, it, expect } from "vitest";
import { Parser } from "./parser";
describe("Parser", () => {
function parseStatements(input: string) {
const parser = Parser.fromSource(input);
return parser.parse();
}
it("parses a simple literal assignment", () => {
const { statements, errors } = parseStatements('x = "hello"');
expect(errors).toHaveLength(0);
expect(statements).toHaveLength(1);
expect(statements[0]!.name).toBe("x");
expect(statements[0]!.value).toEqual({ kind: "literal", value: "hello" });
});
it("parses number literals", () => {
const { statements } = parseStatements("n = 42");
expect(statements[0]!.value).toEqual({ kind: "literal", value: 42 });
});
it("parses boolean literals", () => {
const { statements } = parseStatements("b = true");
expect(statements[0]!.value).toEqual({ kind: "literal", value: true });
});
it("parses null", () => {
const { statements } = parseStatements("x = null");
expect(statements[0]!.value).toEqual({ kind: "literal", value: null });
});
it("parses a component call with positional args", () => {
const { statements, errors } = parseStatements('btn = Button("Click me")');
expect(errors).toHaveLength(0);
expect(statements[0]!.value).toEqual({
kind: "component",
name: "Button",
args: [{ key: null, value: { kind: "literal", value: "Click me" } }],
});
});
it("parses a component call with named args", () => {
const { statements } = parseStatements('t = Tag("PDF", color: "blue")');
const comp = statements[0]!.value;
expect(comp.kind).toBe("component");
if (comp.kind === "component") {
expect(comp.args).toHaveLength(2);
expect(comp.args[0]!.key).toBeNull();
expect(comp.args[1]!.key).toBe("color");
}
});
it("parses nested components", () => {
const { statements, errors } = parseStatements(
'row = Row([Button("A"), Button("B")])'
);
expect(errors).toHaveLength(0);
const comp = statements[0]!.value;
expect(comp.kind).toBe("component");
});
it("parses arrays", () => {
const { statements } = parseStatements('items = ["a", "b", "c"]');
expect(statements[0]!.value).toEqual({
kind: "array",
elements: [
{ kind: "literal", value: "a" },
{ kind: "literal", value: "b" },
{ kind: "literal", value: "c" },
],
});
});
it("parses objects", () => {
const { statements } = parseStatements('opts = {name: "test", count: 5}');
expect(statements[0]!.value).toEqual({
kind: "object",
entries: [
{ key: "name", value: { kind: "literal", value: "test" } },
{ key: "count", value: { kind: "literal", value: 5 } },
],
});
});
it("parses variable references", () => {
const { statements } = parseStatements("ref = myVar");
expect(statements[0]!.value).toEqual({ kind: "reference", name: "myVar" });
});
it("parses multiple statements", () => {
const { statements, errors } = parseStatements(
'title = Text("Hello")\nbtn = Button("Click")'
);
expect(errors).toHaveLength(0);
expect(statements).toHaveLength(2);
expect(statements[0]!.name).toBe("title");
expect(statements[1]!.name).toBe("btn");
});
it("handles trailing commas", () => {
const { statements, errors } = parseStatements('x = Button("a", "b",)');
expect(errors).toHaveLength(0);
const comp = statements[0]!.value;
if (comp.kind === "component") {
expect(comp.args).toHaveLength(2);
}
});
it("recovers from parse errors", () => {
const { statements, errors } = parseStatements(
'!!invalid!!\ny = Text("valid")'
);
expect(errors.length).toBeGreaterThan(0);
// Should still parse the valid second line
expect(statements.length).toBeGreaterThanOrEqual(1);
});
it("parses the full example from the spec", () => {
const input = `title = Text("Search Results", headingH2: true)
row1 = ["Onyx Docs", Tag("PDF", color: "blue"), "2024-01-15"]
row2 = ["API Guide", Tag("MD", color: "green"), "2024-02-01"]
results = Table(["Name", "Type", "Date"], [row1, row2])
action = Button("View All", main: true, primary: true, actionId: "viewAll")
root = Stack([title, results, action], gap: "md")`;
const { statements, errors } = parseStatements(input);
expect(errors).toHaveLength(0);
expect(statements).toHaveLength(6);
expect(statements[5]!.name).toBe("root");
});
});

View File

@@ -0,0 +1,305 @@
import type {
ASTNode,
ArgumentNode,
Statement,
ParseError,
Token,
} from "../types";
import { TokenType } from "../types";
import { Tokenizer } from "./tokenizer";
/**
* Recursive descent parser for GenUI Lang.
*
* Grammar:
* program = statement*
* statement = identifier "=" expression NEWLINE
* expression = component | array | object | literal | reference
* component = PascalCase "(" arglist? ")"
* arglist = arg ("," arg)*
* arg = namedArg | expression
* namedArg = identifier ":" expression
* array = "[" (expression ("," expression)*)? "]"
* object = "{" (pair ("," pair)*)? "}"
* pair = (identifier | string) ":" expression
* literal = string | number | boolean | null
* reference = camelCase identifier (doesn't start with uppercase)
*/
export class Parser {
private tokens: Token[];
private pos = 0;
private errors: ParseError[] = [];
constructor(tokens: Token[]) {
this.tokens = tokens;
}
static fromSource(source: string): Parser {
const tokenizer = new Tokenizer(source);
return new Parser(tokenizer.tokenize());
}
parse(): { statements: Statement[]; errors: ParseError[] } {
const statements: Statement[] = [];
this.skipNewlines();
while (!this.isAtEnd()) {
try {
const stmt = this.parseStatement();
if (stmt) {
statements.push(stmt);
}
} catch (e) {
if (e instanceof ParseErrorException) {
this.errors.push(e.toParseError());
}
// Skip to next line to recover
this.skipToNextStatement();
}
this.skipNewlines();
}
return { statements, errors: this.errors };
}
private parseStatement(): Statement | null {
if (this.isAtEnd()) return null;
const ident = this.expect(TokenType.Identifier);
this.expect(TokenType.Equals);
const value = this.parseExpression();
return { name: ident.value, value };
}
private parseExpression(): ASTNode {
const token = this.current();
if (token.type === TokenType.LBracket) {
return this.parseArray();
}
if (token.type === TokenType.LBrace) {
return this.parseObject();
}
if (token.type === TokenType.String) {
this.advance();
return { kind: "literal", value: token.value };
}
if (token.type === TokenType.Number) {
this.advance();
return { kind: "literal", value: Number(token.value) };
}
if (token.type === TokenType.Boolean) {
this.advance();
return { kind: "literal", value: token.value === "true" };
}
if (token.type === TokenType.Null) {
this.advance();
return { kind: "literal", value: null };
}
if (token.type === TokenType.Identifier) {
const isPascalCase = /^[A-Z]/.test(token.value);
if (isPascalCase && this.peek()?.type === TokenType.LParen) {
return this.parseComponent();
}
// camelCase identifier = variable reference
this.advance();
return { kind: "reference", name: token.value };
}
throw this.error(`Unexpected token: ${token.type} "${token.value}"`);
}
private parseComponent(): ASTNode {
const name = this.expect(TokenType.Identifier);
this.expect(TokenType.LParen);
const args: ArgumentNode[] = [];
if (this.current().type !== TokenType.RParen) {
args.push(this.parseArg());
while (this.current().type === TokenType.Comma) {
this.advance(); // skip comma
if (this.current().type === TokenType.RParen) break; // trailing comma
args.push(this.parseArg());
}
}
this.expect(TokenType.RParen);
return { kind: "component", name: name.value, args };
}
private parseArg(): ArgumentNode {
// Look ahead: if we see `identifier ":"`, it's a named arg
if (
this.current().type === TokenType.Identifier &&
this.peek()?.type === TokenType.Colon
) {
// But only if the identifier is NOT PascalCase (which would be a component)
const isPascalCase = /^[A-Z]/.test(this.current().value);
if (!isPascalCase) {
const key = this.current().value;
this.advance(); // identifier
this.advance(); // colon
const value = this.parseExpression();
return { key, value };
}
}
// Positional argument
const value = this.parseExpression();
return { key: null, value };
}
private parseArray(): ASTNode {
this.expect(TokenType.LBracket);
const elements: ASTNode[] = [];
if (this.current().type !== TokenType.RBracket) {
elements.push(this.parseExpression());
while (this.current().type === TokenType.Comma) {
this.advance();
if (this.current().type === TokenType.RBracket) break;
elements.push(this.parseExpression());
}
}
this.expect(TokenType.RBracket);
return { kind: "array", elements };
}
private parseObject(): ASTNode {
this.expect(TokenType.LBrace);
const entries: { key: string; value: ASTNode }[] = [];
if (this.current().type !== TokenType.RBrace) {
entries.push(this.parseObjectEntry());
while (this.current().type === TokenType.Comma) {
this.advance();
if (this.current().type === TokenType.RBrace) break;
entries.push(this.parseObjectEntry());
}
}
this.expect(TokenType.RBrace);
return { kind: "object", entries };
}
private parseObjectEntry(): { key: string; value: ASTNode } {
let key: string;
if (this.current().type === TokenType.String) {
key = this.current().value;
this.advance();
} else if (this.current().type === TokenType.Identifier) {
key = this.current().value;
this.advance();
} else {
throw this.error(`Expected object key, got ${this.current().type}`);
}
this.expect(TokenType.Colon);
const value = this.parseExpression();
return { key, value };
}
// ── Helpers ──
private current(): Token {
return (
this.tokens[this.pos] ?? {
type: TokenType.EOF,
value: "",
offset: -1,
line: -1,
column: -1,
}
);
}
private peek(): Token | undefined {
return this.tokens[this.pos + 1];
}
private advance(): Token {
const token = this.current();
if (this.pos < this.tokens.length) this.pos++;
return token;
}
private expect(type: TokenType): Token {
const token = this.current();
if (token.type !== type) {
throw this.error(`Expected ${type}, got ${token.type} "${token.value}"`);
}
this.advance();
return token;
}
private isAtEnd(): boolean {
return this.current().type === TokenType.EOF;
}
private skipNewlines(): void {
while (this.current().type === TokenType.Newline) {
this.advance();
}
}
private skipToNextStatement(): void {
while (!this.isAtEnd() && this.current().type !== TokenType.Newline) {
this.advance();
}
this.skipNewlines();
}
private error(message: string): ParseErrorException {
const token = this.current();
return new ParseErrorException(
message,
token.line,
token.column,
token.offset
);
}
}
class ParseErrorException extends Error {
line: number;
column: number;
offset: number;
constructor(message: string, line: number, column: number, offset: number) {
super(message);
this.line = line;
this.column = column;
this.offset = offset;
}
toParseError(): ParseError {
return {
message: this.message,
line: this.line,
column: this.column,
offset: this.offset,
};
}
}

View File

@@ -0,0 +1,100 @@
import { describe, it, expect } from "vitest";
import { resolveReferences } from "./resolver";
import type { Statement } from "../types";
describe("resolveReferences", () => {
it("resolves simple variable references", () => {
const statements: Statement[] = [
{ name: "a", value: { kind: "literal", value: "hello" } },
{ name: "b", value: { kind: "reference", name: "a" } },
];
const { resolved, errors } = resolveReferences(statements);
expect(errors).toHaveLength(0);
expect(resolved.get("b")).toEqual({ kind: "literal", value: "hello" });
});
it("resolves nested references in components", () => {
const statements: Statement[] = [
{ name: "label", value: { kind: "literal", value: "Click me" } },
{
name: "btn",
value: {
kind: "component",
name: "Button",
args: [{ key: null, value: { kind: "reference", name: "label" } }],
},
},
];
const { resolved, errors } = resolveReferences(statements);
expect(errors).toHaveLength(0);
const btn = resolved.get("btn");
expect(btn?.kind).toBe("component");
if (btn?.kind === "component") {
expect(btn.args[0]!.value).toEqual({
kind: "literal",
value: "Click me",
});
}
});
it("detects circular references", () => {
const statements: Statement[] = [
{ name: "a", value: { kind: "reference", name: "b" } },
{ name: "b", value: { kind: "reference", name: "a" } },
];
const { errors } = resolveReferences(statements);
expect(errors.some((e) => e.message.includes("Circular"))).toBe(true);
});
it("leaves unknown references as-is", () => {
const statements: Statement[] = [
{ name: "x", value: { kind: "reference", name: "unknown" } },
];
const { resolved, errors } = resolveReferences(statements);
expect(errors).toHaveLength(0);
expect(resolved.get("x")).toEqual({ kind: "reference", name: "unknown" });
});
it("uses last statement as root by default", () => {
const statements: Statement[] = [
{ name: "a", value: { kind: "literal", value: 1 } },
{ name: "b", value: { kind: "literal", value: 2 } },
];
const { root } = resolveReferences(statements);
expect(root).toEqual({ kind: "literal", value: 2 });
});
it("uses statement named 'root' as root", () => {
const statements: Statement[] = [
{ name: "root", value: { kind: "literal", value: "I am root" } },
{ name: "other", value: { kind: "literal", value: "not root" } },
];
const { root } = resolveReferences(statements);
expect(root).toEqual({ kind: "literal", value: "I am root" });
});
it("resolves references in arrays", () => {
const statements: Statement[] = [
{ name: "item", value: { kind: "literal", value: "hello" } },
{
name: "list",
value: {
kind: "array",
elements: [{ kind: "reference", name: "item" }],
},
},
];
const { resolved } = resolveReferences(statements);
const list = resolved.get("list");
if (list?.kind === "array") {
expect(list.elements[0]).toEqual({ kind: "literal", value: "hello" });
}
});
});

View File

@@ -0,0 +1,135 @@
import type { ASTNode, Statement, ParseError } from "../types";
/**
* Resolve variable references in the AST.
*
* Each statement defines `name = expression`. Later expressions can reference
* earlier variable names. This pass replaces ReferenceNodes with the actual
* subtree they point to, detecting cycles.
*/
export function resolveReferences(statements: Statement[]): {
resolved: Map<string, ASTNode>;
root: ASTNode | null;
errors: ParseError[];
} {
const definitions = new Map<string, ASTNode>();
const resolved = new Map<string, ASTNode>();
const errors: ParseError[] = [];
// Build definition map
for (const stmt of statements) {
definitions.set(stmt.name, stmt.value);
}
// Resolve each statement
for (const stmt of statements) {
const resolving = new Set<string>();
const result = resolveNode(
stmt.value,
definitions,
resolved,
resolving,
errors
);
resolved.set(stmt.name, result);
}
// Root is the last statement or the one named "root"
let root: ASTNode | null = null;
if (resolved.has("root")) {
root = resolved.get("root")!;
} else if (statements.length > 0) {
const lastStmt = statements[statements.length - 1]!;
root = resolved.get(lastStmt.name) ?? null;
}
return { resolved, root, errors };
}
function resolveNode(
node: ASTNode,
definitions: Map<string, ASTNode>,
resolved: Map<string, ASTNode>,
resolving: Set<string>,
errors: ParseError[]
): ASTNode {
switch (node.kind) {
case "reference": {
const { name } = node;
// Already resolved
if (resolved.has(name)) {
return resolved.get(name)!;
}
// Cycle detection
if (resolving.has(name)) {
errors.push({
message: `Circular reference detected: "${name}"`,
line: 0,
column: 0,
});
return { kind: "literal", value: null };
}
// Unknown reference — leave as-is (may be defined later in streaming)
const definition = definitions.get(name);
if (!definition) {
return node; // keep as unresolved reference
}
resolving.add(name);
const result = resolveNode(
definition,
definitions,
resolved,
resolving,
errors
);
resolving.delete(name);
resolved.set(name, result);
return result;
}
case "component":
return {
...node,
args: node.args.map((arg) => ({
...arg,
value: resolveNode(
arg.value,
definitions,
resolved,
resolving,
errors
),
})),
};
case "array":
return {
...node,
elements: node.elements.map((el) =>
resolveNode(el, definitions, resolved, resolving, errors)
),
};
case "object":
return {
...node,
entries: node.entries.map((entry) => ({
...entry,
value: resolveNode(
entry.value,
definitions,
resolved,
resolving,
errors
),
})),
};
case "literal":
return node;
}
}

View File

@@ -0,0 +1,280 @@
import { describe, it, expect } from "vitest";
import { z } from "zod";
import { createStreamingParser } from "./streaming";
import { createLibrary } from "../library";
import { defineComponent } from "../component";
import { autoClose } from "./autoclose";
function makeTestLibrary() {
return createLibrary([
defineComponent({
name: "Text",
description: "Displays text",
props: z.object({ children: z.string() }),
component: null,
}),
defineComponent({
name: "Button",
description: "Clickable button",
props: z.object({
children: z.string(),
main: z.boolean().optional(),
actionId: z.string().optional(),
}),
component: null,
}),
defineComponent({
name: "Stack",
description: "Vertical stack layout",
props: z.object({
children: z.array(z.unknown()).optional(),
gap: z.string().optional(),
}),
component: null,
}),
]);
}
describe("Streaming edge cases", () => {
it("single character at a time streaming", () => {
const lib = makeTestLibrary();
const parser = createStreamingParser(lib);
const input = 'title = Text("Hello")\n';
let result;
for (const ch of input) {
result = parser.push(ch);
}
expect(result!.statements).toHaveLength(1);
expect(result!.statements[0]!.name).toBe("title");
expect(result!.root).not.toBeNull();
});
it("token split across chunks — component name", () => {
const lib = makeTestLibrary();
const parser = createStreamingParser(lib);
// "Text" split as "Tex" + "t"
parser.push("a = Tex");
const result = parser.push('t("hello")\n');
expect(result.statements).toHaveLength(1);
expect(result.statements[0]!.value).toMatchObject({
kind: "component",
name: "Text",
});
});
it("string split mid-escape sequence", () => {
const lib = makeTestLibrary();
const parser = createStreamingParser(lib);
// Split right before the escaped quote
parser.push('a = Text("hel');
const result = parser.push('lo \\"world\\"")\n');
expect(result.statements).toHaveLength(1);
expect(result.root).not.toBeNull();
});
it("multi-line component split across chunks", () => {
const lib = makeTestLibrary();
const parser = createStreamingParser(lib);
// The streaming parser splits on newlines, so multi-line expressions
// need to be on a single line or use variables. Test that a long
// single-line expression streamed in chunks works correctly.
parser.push('root = Stack([Text("line 1"), Text("line');
const result = parser.push(' 2")])\n');
expect(result.statements).toHaveLength(1);
expect(result.root).not.toBeNull();
});
it("empty chunks do not corrupt state", () => {
const lib = makeTestLibrary();
const parser = createStreamingParser(lib);
parser.push("");
parser.push('a = Text("hi")');
parser.push("");
parser.push("");
const result = parser.push("\n");
expect(result.statements).toHaveLength(1);
expect(result.statements[0]!.name).toBe("a");
});
it("very large single chunk with multiple complete statements", () => {
const lib = makeTestLibrary();
const parser = createStreamingParser(lib);
const lines =
Array.from({ length: 50 }, (_, i) => `v${i} = Text("item ${i}")`).join(
"\n"
) + "\n";
const result = parser.push(lines);
expect(result.statements).toHaveLength(50);
expect(result.root).not.toBeNull();
});
it("interleaved complete and partial lines", () => {
const lib = makeTestLibrary();
const parser = createStreamingParser(lib);
// Complete line followed by partial
parser.push('a = Text("done")\nb = Text("part');
let result = parser.result();
// "a" is cached complete, "b" is partial but auto-closed
expect(result.statements.length).toBeGreaterThanOrEqual(1);
// Now finish the partial and add another complete
result = parser.push('ial")\nc = Text("also done")\n');
expect(result.statements).toHaveLength(3);
expect(result.statements.map((s) => s.name)).toEqual(["a", "b", "c"]);
});
it("variable reference before definition — streaming order matters", () => {
const lib = makeTestLibrary();
const parser = createStreamingParser(lib);
// Reference "label" before it's defined
parser.push("root = Stack([label])\n");
let result = parser.result();
// At this point "label" is an unresolved reference — should not crash
expect(result.statements).toHaveLength(1);
expect(result.errors.length).toBeGreaterThanOrEqual(0);
// Now define it
result = parser.push('label = Text("Hi")\n');
// After defining, root should pick it up via resolution
expect(result.statements).toHaveLength(2);
});
it("repeated push after complete response is idempotent", () => {
const lib = makeTestLibrary();
const parser = createStreamingParser(lib);
const full = 'a = Text("done")\n';
const first = parser.push(full);
// Push empty strings — result should remain stable
const second = parser.push("");
const third = parser.push("");
expect(second.statements).toEqual(first.statements);
expect(third.statements).toEqual(first.statements);
expect(third.root).toEqual(first.root);
});
it("unicode characters split across chunk boundaries", () => {
const lib = makeTestLibrary();
const parser = createStreamingParser(lib);
// JS strings are UTF-16, so multi-byte chars like emoji are fine as
// string splits — but let's verify the parser handles them gracefully
parser.push('a = Text("hello ');
parser.push("🌍");
parser.push(" world");
const result = parser.push('")\n');
expect(result.statements).toHaveLength(1);
expect(result.root).not.toBeNull();
});
it("unicode CJK characters streamed char by char", () => {
const lib = makeTestLibrary();
const parser = createStreamingParser(lib);
const input = 'a = Text("你好世界")\n';
let result;
for (const ch of input) {
result = parser.push(ch);
}
expect(result!.statements).toHaveLength(1);
expect(result!.root).not.toBeNull();
});
});
describe("autoClose additional edge cases", () => {
it("mixed bracket types — ([{", () => {
const result = autoClose("([{");
expect(result).toBe("([{}])");
});
it("string containing bracket chars is not counted", () => {
// The ( inside the string should not produce a closer
const result = autoClose('"hello (world"');
// String is closed, paren inside string is ignored — no extra closers
expect(result).toBe('"hello (world"');
});
it("unclosed string containing bracket chars", () => {
// Unclosed string with brackets inside — brackets are ignored, string gets closed
const result = autoClose('"hello (world');
expect(result).toBe('"hello (world"');
});
it("only opening brackets — (((", () => {
const result = autoClose("(((");
expect(result).toBe("((()))");
});
it("alternating open/close with extras — (()(", () => {
const result = autoClose("(()(");
// Stack: push ( → push ( → pop for ) → push ( → closers left: (, (
expect(result).toBe("(()())");
});
it("all bracket types deeply nested", () => {
const result = autoClose("({[");
expect(result).toBe("({[]})");
});
it("partial close leaves remaining openers", () => {
// ( [ ] — bracket closed, paren still open
const result = autoClose("([]");
expect(result).toBe("([])");
});
it("escaped quote at end of string does not close it", () => {
// The backslash escapes the quote, so the string is still open
const result = autoClose('"hello\\');
// escaped flag is set, next char would be escaped — string still open
expect(result).toBe('"hello\\"');
});
it("single quotes work the same as double quotes", () => {
const result = autoClose("'hello");
expect(result).toBe("'hello'");
});
it("mixed string types — only the active one matters", () => {
// Double-quoted string containing a single quote — single quote is literal
const result = autoClose("\"it's");
expect(result).toBe('"it\'s"');
});
it("empty string input returns empty", () => {
expect(autoClose("")).toBe("");
});
it("already balanced input returns unchanged", () => {
expect(autoClose("({[]})")).toBe("({[]})");
});
it("mismatched close bracket is tolerated", () => {
// A ] with no matching [ — should not crash, just ignored
const result = autoClose("(]");
// The ] doesn't match (, so it's ignored — ( still needs closing
expect(result).toBe("(])");
});
});

View File

@@ -0,0 +1,101 @@
import { describe, it, expect } from "vitest";
import { z } from "zod";
import { createStreamingParser } from "./streaming";
import { createLibrary } from "../library";
import { defineComponent } from "../component";
function makeTestLibrary() {
return createLibrary([
defineComponent({
name: "Text",
description: "Text",
props: z.object({ children: z.string() }),
component: null,
}),
defineComponent({
name: "Button",
description: "Button",
props: z.object({
children: z.string(),
main: z.boolean().optional(),
actionId: z.string().optional(),
}),
component: null,
}),
defineComponent({
name: "Stack",
description: "Stack",
props: z.object({
children: z.array(z.unknown()).optional(),
gap: z.string().optional(),
}),
component: null,
}),
]);
}
describe("StreamingParser", () => {
it("parses a complete response", () => {
const lib = makeTestLibrary();
const parser = createStreamingParser(lib);
const result = parser.push('title = Text("Hello World")\n');
expect(result.statements).toHaveLength(1);
expect(result.root).not.toBeNull();
});
it("handles incremental streaming", () => {
const lib = makeTestLibrary();
const parser = createStreamingParser(lib);
// First chunk — partial line
let result = parser.push('title = Text("He');
expect(result.statements.length).toBeGreaterThanOrEqual(0);
// Complete the line
result = parser.push('llo World")\n');
expect(result.statements).toHaveLength(1);
});
it("handles multi-line streaming", () => {
const lib = makeTestLibrary();
const parser = createStreamingParser(lib);
parser.push('a = Text("Line 1")\n');
const result = parser.push('b = Text("Line 2")\n');
expect(result.statements).toHaveLength(2);
});
it("caches complete lines and only re-parses partial", () => {
const lib = makeTestLibrary();
const parser = createStreamingParser(lib);
// First complete line
parser.push('a = Text("First")\n');
// Partial second line — should still have first line cached
const result = parser.push('b = Text("Sec');
expect(result.statements.length).toBeGreaterThanOrEqual(1);
});
it("resets on shorter input", () => {
const lib = makeTestLibrary();
const parser = createStreamingParser(lib);
parser.push('a = Text("Hello")\n');
parser.reset();
const result = parser.push('x = Text("Fresh")\n');
expect(result.statements).toHaveLength(1);
expect(result.statements[0]!.name).toBe("x");
});
it("result() returns last parse result", () => {
const lib = makeTestLibrary();
const parser = createStreamingParser(lib);
parser.push('a = Text("Hello")\n');
const result = parser.result();
expect(result.statements).toHaveLength(1);
});
});

View File

@@ -0,0 +1,112 @@
import type {
ASTNode,
ElementNode,
Library,
ParseError,
ParseResult,
Statement,
} from "../types";
import { Parser } from "./parser";
import { autoClose } from "./autoclose";
import { resolveReferences } from "./resolver";
import { validateAndTransform } from "./validator";
/**
* Streaming parser for GenUI Lang.
*
* Design: each `push(chunk)` appends to the buffer. We split on newlines,
* cache results for complete lines, and re-parse only the last (partial) line
* with auto-closing applied.
*
* This gives us O(1) work per chunk for complete lines and O(n) only for the
* current partial line — ideal for LLM token-by-token streaming.
*/
export interface StreamParser {
push(chunk: string): ParseResult;
result(): ParseResult;
reset(): void;
}
export function createStreamingParser(library: Library): StreamParser {
let buffer = "";
let cachedStatements: Statement[] = [];
let cachedLineCount = 0;
let lastResult: ParseResult = { statements: [], root: null, errors: [] };
function parseAll(): ParseResult {
const allErrors: ParseError[] = [];
// Split into lines
const lines = buffer.split("\n");
const completeLines = lines.slice(0, -1);
const partialLine = lines[lines.length - 1] ?? "";
// Re-use cached statements for lines we've already parsed
const newCompleteCount = completeLines.length;
if (newCompleteCount > cachedLineCount) {
// Parse new complete lines
const newLines = completeLines.slice(cachedLineCount).join("\n");
if (newLines.trim()) {
const parser = Parser.fromSource(newLines);
const { statements, errors } = parser.parse();
cachedStatements = [...cachedStatements, ...statements];
allErrors.push(...errors);
}
cachedLineCount = newCompleteCount;
}
// Parse partial line with auto-closing
let partialStatements: Statement[] = [];
if (partialLine.trim()) {
const closed = autoClose(partialLine);
const parser = Parser.fromSource(closed);
const { statements, errors } = parser.parse();
partialStatements = statements;
// Don't report errors for partial lines — they're expected during streaming
void errors;
}
const allStatements = [...cachedStatements, ...partialStatements];
// Resolve references
const { root, errors: resolveErrors } = resolveReferences(allStatements);
allErrors.push(...resolveErrors);
// Transform to element tree
let rootElement: ElementNode | null = null;
if (root) {
const { element, errors: validateErrors } = validateAndTransform(
root,
library
);
rootElement = element;
allErrors.push(...validateErrors);
}
lastResult = {
statements: allStatements,
root: rootElement as ASTNode | null,
errors: allErrors,
};
return lastResult;
}
return {
push(chunk: string): ParseResult {
buffer += chunk;
return parseAll();
},
result(): ParseResult {
return lastResult;
},
reset(): void {
buffer = "";
cachedStatements = [];
cachedLineCount = 0;
lastResult = { statements: [], root: null, errors: [] };
},
};
}

View File

@@ -0,0 +1,111 @@
import { describe, it, expect } from "vitest";
import { Tokenizer } from "./tokenizer";
import { TokenType } from "../types";
describe("Tokenizer", () => {
function tokenTypes(input: string): TokenType[] {
return new Tokenizer(input).tokenize().map((t) => t.type);
}
function tokenValues(input: string): string[] {
return new Tokenizer(input).tokenize().map((t) => t.value);
}
it("tokenizes a simple assignment", () => {
const tokens = new Tokenizer('x = "hello"').tokenize();
expect(tokens.map((t) => t.type)).toEqual([
TokenType.Identifier,
TokenType.Equals,
TokenType.String,
TokenType.EOF,
]);
expect(tokens[0]!.value).toBe("x");
expect(tokens[2]!.value).toBe("hello");
});
it("tokenizes a component call", () => {
expect(tokenTypes('Button("Click me", main: true)')).toEqual([
TokenType.Identifier,
TokenType.LParen,
TokenType.String,
TokenType.Comma,
TokenType.Identifier,
TokenType.Colon,
TokenType.Boolean,
TokenType.RParen,
TokenType.EOF,
]);
});
it("tokenizes arrays", () => {
expect(tokenTypes('["a", "b", "c"]')).toEqual([
TokenType.LBracket,
TokenType.String,
TokenType.Comma,
TokenType.String,
TokenType.Comma,
TokenType.String,
TokenType.RBracket,
TokenType.EOF,
]);
});
it("tokenizes objects", () => {
expect(tokenTypes('{name: "Alice", age: 30}')).toEqual([
TokenType.LBrace,
TokenType.Identifier,
TokenType.Colon,
TokenType.String,
TokenType.Comma,
TokenType.Identifier,
TokenType.Colon,
TokenType.Number,
TokenType.RBrace,
TokenType.EOF,
]);
});
it("tokenizes numbers including negatives and decimals", () => {
const tokens = new Tokenizer("42 -7 3.14").tokenize();
const numbers = tokens.filter((t) => t.type === TokenType.Number);
expect(numbers.map((t) => t.value)).toEqual(["42", "-7", "3.14"]);
});
it("tokenizes booleans and null", () => {
expect(tokenTypes("true false null")).toEqual([
TokenType.Boolean,
TokenType.Boolean,
TokenType.Null,
TokenType.EOF,
]);
});
it("handles escaped strings", () => {
const tokens = new Tokenizer('"hello \\"world\\""').tokenize();
expect(tokens[0]!.value).toBe('hello "world"');
});
it("emits newlines only at bracket depth 0", () => {
// Inside parens — newlines suppressed
const inside = tokenTypes('Foo(\n"a",\n"b"\n)');
expect(inside.filter((t) => t === TokenType.Newline)).toHaveLength(0);
// At top level — newlines emitted
const outside = tokenTypes("x = 1\ny = 2");
expect(outside.filter((t) => t === TokenType.Newline)).toHaveLength(1);
});
it("skips line comments", () => {
const tokens = new Tokenizer(
"x = 1 // this is a comment\ny = 2"
).tokenize();
const idents = tokens.filter((t) => t.type === TokenType.Identifier);
expect(idents.map((t) => t.value)).toEqual(["x", "y"]);
});
it("tracks line and column numbers", () => {
const tokens = new Tokenizer("x = 1\ny = 2").tokenize();
const y = tokens.find((t) => t.value === "y");
expect(y?.line).toBe(2);
});
});

View File

@@ -0,0 +1,294 @@
import { Token, TokenType } from "../types";
const WHITESPACE = /[ \t\r]/;
const DIGIT = /[0-9]/;
const IDENT_START = /[a-zA-Z_]/;
const IDENT_CHAR = /[a-zA-Z0-9_]/;
export class Tokenizer {
private input: string;
private pos = 0;
private line = 1;
private column = 1;
private bracketDepth = 0;
constructor(input: string) {
this.input = input;
}
tokenize(): Token[] {
const tokens: Token[] = [];
while (this.pos < this.input.length) {
this.skipWhitespace();
if (this.pos >= this.input.length) break;
const ch = this.input[this.pos]!;
// Comments — skip to end of line
if (ch === "/" && this.input[this.pos + 1] === "/") {
this.skipLineComment();
continue;
}
if (ch === "\n") {
// Newlines only matter at bracket depth 0
if (this.bracketDepth === 0) {
tokens.push(this.makeToken(TokenType.Newline, "\n"));
}
this.advance();
this.line++;
this.column = 1;
continue;
}
if (ch === '"' || ch === "'") {
tokens.push(this.readString(ch));
continue;
}
if (
DIGIT.test(ch) ||
(ch === "-" && this.peek(1) !== undefined && DIGIT.test(this.peek(1)!))
) {
tokens.push(this.readNumber());
continue;
}
if (IDENT_START.test(ch)) {
tokens.push(this.readIdentifier());
continue;
}
switch (ch) {
case "=":
tokens.push(this.makeToken(TokenType.Equals, "="));
this.advance();
break;
case ":":
tokens.push(this.makeToken(TokenType.Colon, ":"));
this.advance();
break;
case ",":
tokens.push(this.makeToken(TokenType.Comma, ","));
this.advance();
break;
case "(":
this.bracketDepth++;
tokens.push(this.makeToken(TokenType.LParen, "("));
this.advance();
break;
case ")":
this.bracketDepth = Math.max(0, this.bracketDepth - 1);
tokens.push(this.makeToken(TokenType.RParen, ")"));
this.advance();
break;
case "[":
this.bracketDepth++;
tokens.push(this.makeToken(TokenType.LBracket, "["));
this.advance();
break;
case "]":
this.bracketDepth = Math.max(0, this.bracketDepth - 1);
tokens.push(this.makeToken(TokenType.RBracket, "]"));
this.advance();
break;
case "{":
this.bracketDepth++;
tokens.push(this.makeToken(TokenType.LBrace, "{"));
this.advance();
break;
case "}":
this.bracketDepth = Math.max(0, this.bracketDepth - 1);
tokens.push(this.makeToken(TokenType.RBrace, "}"));
this.advance();
break;
default:
// Skip unknown characters
this.advance();
break;
}
}
tokens.push(this.makeToken(TokenType.EOF, ""));
return tokens;
}
private skipWhitespace(): void {
while (
this.pos < this.input.length &&
WHITESPACE.test(this.input[this.pos]!)
) {
this.advance();
}
}
private skipLineComment(): void {
while (this.pos < this.input.length && this.input[this.pos] !== "\n") {
this.advance();
}
}
private readString(quote: string): Token {
const startOffset = this.pos;
const startLine = this.line;
const startCol = this.column;
this.advance(); // skip opening quote
let value = "";
while (this.pos < this.input.length) {
const ch = this.input[this.pos]!;
if (ch === "\\") {
this.advance();
if (this.pos < this.input.length) {
const escaped = this.input[this.pos]!;
switch (escaped) {
case "n":
value += "\n";
break;
case "t":
value += "\t";
break;
case "\\":
value += "\\";
break;
case '"':
value += '"';
break;
case "'":
value += "'";
break;
default:
value += escaped;
}
this.advance();
}
continue;
}
if (ch === quote) {
this.advance(); // skip closing quote
break;
}
if (ch === "\n") {
this.line++;
this.column = 0;
}
value += ch;
this.advance();
}
return {
type: TokenType.String,
value,
offset: startOffset,
line: startLine,
column: startCol,
};
}
private readNumber(): Token {
const startOffset = this.pos;
const startLine = this.line;
const startCol = this.column;
let value = "";
if (this.input[this.pos] === "-") {
value += "-";
this.advance();
}
while (this.pos < this.input.length && DIGIT.test(this.input[this.pos]!)) {
value += this.input[this.pos]!;
this.advance();
}
if (this.pos < this.input.length && this.input[this.pos] === ".") {
value += ".";
this.advance();
while (
this.pos < this.input.length &&
DIGIT.test(this.input[this.pos]!)
) {
value += this.input[this.pos]!;
this.advance();
}
}
return {
type: TokenType.Number,
value,
offset: startOffset,
line: startLine,
column: startCol,
};
}
private readIdentifier(): Token {
const startOffset = this.pos;
const startLine = this.line;
const startCol = this.column;
let value = "";
while (
this.pos < this.input.length &&
IDENT_CHAR.test(this.input[this.pos]!)
) {
value += this.input[this.pos]!;
this.advance();
}
// Check for keywords
if (value === "true" || value === "false") {
return {
type: TokenType.Boolean,
value,
offset: startOffset,
line: startLine,
column: startCol,
};
}
if (value === "null") {
return {
type: TokenType.Null,
value,
offset: startOffset,
line: startLine,
column: startCol,
};
}
return {
type: TokenType.Identifier,
value,
offset: startOffset,
line: startLine,
column: startCol,
};
}
private makeToken(type: TokenType, value: string): Token {
return {
type,
value,
offset: this.pos,
line: this.line,
column: this.column,
};
}
private advance(): void {
this.pos++;
this.column++;
}
private peek(offset: number): string | undefined {
return this.input[this.pos + offset];
}
}

View File

@@ -0,0 +1,473 @@
import { describe, it, expect } from "vitest";
import { z } from "zod";
import { validateAndTransform } from "./validator";
import { defineComponent } from "../component";
import { createLibrary } from "../library";
import type { ASTNode, ComponentNode, Library } from "../types";
// ── Test library setup ──
const ButtonDef = defineComponent({
name: "Button",
description: "A clickable button",
props: z.object({
label: z.string(),
variant: z.enum(["primary", "secondary"]).optional(),
}),
component: null,
});
const TextDef = defineComponent({
name: "Text",
description: "A text element",
props: z.object({
children: z.string(),
}),
component: null,
});
const CardDef = defineComponent({
name: "Card",
description: "A card container",
props: z.object({
title: z.string(),
subtitle: z.string().optional(),
}),
component: null,
});
const InputDef = defineComponent({
name: "Input",
description: "A text input",
props: z.object({
placeholder: z.string().optional(),
disabled: z.boolean().optional(),
}),
component: null,
});
const EmptyDef = defineComponent({
name: "Divider",
description: "A divider with no props",
props: z.object({}),
component: null,
});
function makeLibrary(): Library {
return createLibrary([ButtonDef, TextDef, CardDef, InputDef, EmptyDef]);
}
// ── Helpers for building AST nodes ──
function lit(value: string | number | boolean | null): ASTNode {
return { kind: "literal", value };
}
function comp(
name: string,
args: { key: string | null; value: ASTNode }[]
): ComponentNode {
return { kind: "component", name, args };
}
function ref(name: string): ASTNode {
return { kind: "reference", name };
}
function arr(elements: ASTNode[]): ASTNode {
return { kind: "array", elements };
}
function obj(entries: { key: string; value: ASTNode }[]): ASTNode {
return { kind: "object", entries };
}
// ── Tests ──
describe("validateAndTransform", () => {
const library = makeLibrary();
describe("positional argument mapping", () => {
it("maps first positional arg to first prop", () => {
const node = comp("Button", [{ key: null, value: lit("Click me") }]);
const { element, errors } = validateAndTransform(node, library);
expect(element).not.toBeNull();
expect(element!.component).toBe("Button");
expect(element!.props.label).toBe("Click me");
// variant is optional so no validation error for missing it
expect(errors).toHaveLength(0);
});
it("maps second positional arg to second prop", () => {
const node = comp("Button", [
{ key: null, value: lit("Click me") },
{ key: null, value: lit("secondary") },
]);
const { element, errors } = validateAndTransform(node, library);
expect(element!.props.label).toBe("Click me");
expect(element!.props.variant).toBe("secondary");
expect(errors).toHaveLength(0);
});
it("maps multiple positional args in order for Card", () => {
const node = comp("Card", [
{ key: null, value: lit("My Title") },
{ key: null, value: lit("My Subtitle") },
]);
const { element, errors } = validateAndTransform(node, library);
expect(element!.props.title).toBe("My Title");
expect(element!.props.subtitle).toBe("My Subtitle");
expect(errors).toHaveLength(0);
});
});
describe("named arguments", () => {
it("passes named args through correctly", () => {
const node = comp("Button", [
{ key: "label", value: lit("OK") },
{ key: "variant", value: lit("primary") },
]);
const { element, errors } = validateAndTransform(node, library);
expect(element!.props.label).toBe("OK");
expect(element!.props.variant).toBe("primary");
expect(errors).toHaveLength(0);
});
it("handles named args in any order", () => {
const node = comp("Button", [
{ key: "variant", value: lit("secondary") },
{ key: "label", value: lit("Cancel") },
]);
const { element, errors } = validateAndTransform(node, library);
expect(element!.props.label).toBe("Cancel");
expect(element!.props.variant).toBe("secondary");
expect(errors).toHaveLength(0);
});
});
describe("mixed positional + named arguments", () => {
it("maps positional first then named", () => {
const node = comp("Button", [
{ key: null, value: lit("Submit") },
{ key: "variant", value: lit("primary") },
]);
const { element, errors } = validateAndTransform(node, library);
expect(element!.props.label).toBe("Submit");
expect(element!.props.variant).toBe("primary");
expect(errors).toHaveLength(0);
});
});
describe("unknown component", () => {
it("produces error but still returns element", () => {
const node = comp("Nonexistent", [{ key: null, value: lit("hello") }]);
const { element, errors } = validateAndTransform(node, library);
expect(element).not.toBeNull();
expect(element!.component).toBe("Nonexistent");
expect(errors).toHaveLength(1);
expect(errors[0]!.message).toContain('Unknown component: "Nonexistent"');
});
it("still assigns positional args as generic props for unknown component", () => {
// Unknown component has no paramDefs, so all positional args become children
const node = comp("Foo", [
{ key: null, value: comp("Button", [{ key: null, value: lit("hi") }]) },
]);
const { element } = validateAndTransform(node, library);
// The positional arg should become a child since there are no param defs
expect(element!.children).toHaveLength(1);
expect(element!.children[0]!.component).toBe("Button");
});
it("passes named args through even for unknown component", () => {
const node = comp("Unknown", [{ key: "title", value: lit("hey") }]);
const { element } = validateAndTransform(node, library);
expect(element!.props.title).toBe("hey");
});
});
describe("literal string wrapping", () => {
it("wraps a literal string in a Text element", () => {
const node = lit("Hello world");
const { element, errors } = validateAndTransform(node, library);
expect(element).not.toBeNull();
expect(element!.component).toBe("Text");
expect(element!.props.children).toBe("Hello world");
expect(element!.children).toHaveLength(0);
expect(errors).toHaveLength(0);
});
it("returns null for non-string literals", () => {
const numNode = lit(42);
expect(validateAndTransform(numNode, library).element).toBeNull();
const boolNode = lit(true);
expect(validateAndTransform(boolNode, library).element).toBeNull();
const nullNode = lit(null);
expect(validateAndTransform(nullNode, library).element).toBeNull();
});
});
describe("array wrapping", () => {
it("wraps array in a Stack element", () => {
const node = arr([
comp("Button", [{ key: null, value: lit("A") }]),
comp("Button", [{ key: null, value: lit("B") }]),
]);
const { element, errors } = validateAndTransform(node, library);
expect(element).not.toBeNull();
expect(element!.component).toBe("Stack");
expect(element!.children).toHaveLength(2);
expect(element!.children[0]!.component).toBe("Button");
expect(element!.children[1]!.component).toBe("Button");
expect(errors).toHaveLength(0);
});
it("filters out null elements from array children", () => {
// A number literal returns null from transformNode
const node = arr([
comp("Button", [{ key: null, value: lit("OK") }]),
lit(42),
]);
const { element } = validateAndTransform(node, library);
expect(element!.component).toBe("Stack");
expect(element!.children).toHaveLength(1);
});
it("wraps empty array as empty Stack", () => {
const node = arr([]);
const { element } = validateAndTransform(node, library);
expect(element!.component).toBe("Stack");
expect(element!.children).toHaveLength(0);
});
});
describe("object literal", () => {
it("produces error and returns null", () => {
const node = obj([{ key: "a", value: lit("b") }]);
const { element, errors } = validateAndTransform(node, library);
expect(element).toBeNull();
expect(errors).toHaveLength(1);
expect(errors[0]!.message).toContain("Object literal cannot be rendered");
});
});
describe("unresolved reference", () => {
it("produces __Unresolved placeholder", () => {
const node = ref("someVar");
const { element, errors } = validateAndTransform(node, library);
expect(element).not.toBeNull();
expect(element!.component).toBe("__Unresolved");
expect(element!.props.name).toBe("someVar");
expect(element!.children).toHaveLength(0);
expect(errors).toHaveLength(0);
});
});
describe("nested components", () => {
it("handles component as a named arg value", () => {
// Card with title as a string, but imagine a prop that accepts a component
// Since the validator calls astToValue for component nodes, it returns an ElementNode
const innerButton = comp("Button", [{ key: null, value: lit("Inner") }]);
const node = comp("Card", [{ key: "title", value: innerButton }]);
const { element } = validateAndTransform(node, library);
// The inner button becomes an ElementNode object in props
expect(element!.props.title).toEqual({
kind: "element",
component: "Button",
props: { label: "Inner" },
children: [],
});
});
it("handles component as positional arg", () => {
const innerText = comp("Text", [{ key: null, value: lit("hello") }]);
const node = comp("Card", [{ key: null, value: innerText }]);
const { element } = validateAndTransform(node, library);
// First positional maps to "title" param — value is the ElementNode
expect(element!.props.title).toEqual(
expect.objectContaining({ kind: "element", component: "Text" })
);
});
});
describe("Zod validation errors", () => {
it("reports error when required prop is missing", () => {
// Button requires label (z.string()), pass no args
const node = comp("Button", []);
const { element, errors } = validateAndTransform(node, library);
expect(element).not.toBeNull(); // still returns the element
expect(element!.component).toBe("Button");
expect(errors.length).toBeGreaterThan(0);
expect(errors.some((e) => e.message.includes("Button"))).toBe(true);
});
it("reports error for wrong type", () => {
// Button label expects string, pass a number
const node = comp("Button", [{ key: null, value: lit(42) }]);
const { element, errors } = validateAndTransform(node, library);
expect(element).not.toBeNull();
expect(errors.length).toBeGreaterThan(0);
expect(errors.some((e) => e.message.includes("label"))).toBe(true);
});
it("reports error for invalid enum value", () => {
const node = comp("Button", [
{ key: null, value: lit("OK") },
{ key: null, value: lit("invalid-variant") },
]);
const { element, errors } = validateAndTransform(node, library);
expect(element).not.toBeNull();
expect(errors.length).toBeGreaterThan(0);
expect(errors.some((e) => e.message.includes("variant"))).toBe(true);
});
it("reports error for wrong boolean type", () => {
// Input.disabled expects boolean, pass a string
const node = comp("Input", [{ key: "disabled", value: lit("yes") }]);
const { element, errors } = validateAndTransform(node, library);
expect(element).not.toBeNull();
expect(errors.length).toBeGreaterThan(0);
expect(errors.some((e) => e.message.includes("disabled"))).toBe(true);
});
});
describe("extra positional args beyond param count", () => {
it("treats extra positional args as children", () => {
// Button has 2 params (label, variant). Third positional should become a child.
const extraChild = comp("Text", [{ key: null, value: lit("extra") }]);
const node = comp("Button", [
{ key: null, value: lit("Click") },
{ key: null, value: lit("primary") },
{ key: null, value: extraChild },
]);
const { element } = validateAndTransform(node, library);
expect(element!.children).toHaveLength(1);
expect(element!.children[0]!.component).toBe("Text");
});
it("handles multiple extra positional args as children", () => {
const child1 = comp("Button", [{ key: null, value: lit("A") }]);
const child2 = comp("Button", [{ key: null, value: lit("B") }]);
const node = comp("Button", [
{ key: null, value: lit("Parent") },
{ key: null, value: lit("primary") },
{ key: null, value: child1 },
{ key: null, value: child2 },
]);
const { element } = validateAndTransform(node, library);
expect(element!.children).toHaveLength(2);
});
it("skips null children from extra positional args", () => {
// A number literal transforms to null, so it should be filtered out
const node = comp("Button", [
{ key: null, value: lit("Click") },
{ key: null, value: lit("primary") },
{ key: null, value: lit(999) }, // number literal → null child
]);
const { element } = validateAndTransform(node, library);
expect(element!.children).toHaveLength(0);
});
});
describe("component with no args", () => {
it("renders component with empty props and no errors when all props are optional", () => {
const node = comp("Input", []);
const { element, errors } = validateAndTransform(node, library);
expect(element).not.toBeNull();
expect(element!.component).toBe("Input");
expect(element!.props).toEqual({});
expect(element!.children).toHaveLength(0);
expect(errors).toHaveLength(0);
});
it("renders component with empty props and no children for empty schema", () => {
const node = comp("Divider", []);
const { element, errors } = validateAndTransform(node, library);
expect(element!.component).toBe("Divider");
expect(element!.props).toEqual({});
expect(errors).toHaveLength(0);
});
});
describe("astToValue edge cases", () => {
it("converts array prop values with nested components", () => {
// Pass an array as a named prop — components inside become ElementNodes
const node = comp("Card", [
{
key: "title",
value: arr([comp("Button", [{ key: null, value: lit("A") }])]),
},
]);
const { element } = validateAndTransform(node, library);
const titleProp = element!.props.title as unknown[];
expect(Array.isArray(titleProp)).toBe(true);
expect(titleProp[0]).toEqual(
expect.objectContaining({ kind: "element", component: "Button" })
);
});
it("converts object prop values to plain objects", () => {
const node = comp("Card", [
{
key: "title",
value: obj([
{ key: "text", value: lit("hello") },
{ key: "bold", value: lit(true) },
]),
},
]);
const { element } = validateAndTransform(node, library);
expect(element!.props.title).toEqual({ text: "hello", bold: true });
});
it("converts reference in prop to { __ref: name } placeholder", () => {
const node = comp("Card", [{ key: "title", value: ref("myVar") }]);
const { element } = validateAndTransform(node, library);
expect(element!.props.title).toEqual({ __ref: "myVar" });
});
it("converts literal prop values directly", () => {
const node = comp("Input", [
{ key: "placeholder", value: lit("Type here") },
{ key: "disabled", value: lit(false) },
]);
const { element, errors } = validateAndTransform(node, library);
expect(element!.props.placeholder).toBe("Type here");
expect(element!.props.disabled).toBe(false);
expect(errors).toHaveLength(0);
});
});
});

View File

@@ -0,0 +1,179 @@
import type {
ASTNode,
ComponentNode,
ElementNode,
ParseError,
ParamDef,
} from "../types";
import type { Library } from "../types";
/**
* Convert a resolved AST into an ElementNode tree.
*
* - Maps positional arguments to named props using ParamDef ordering
* - Validates prop values against Zod schemas
* - Unknown components produce errors but still render (as generic elements)
*/
export function validateAndTransform(
node: ASTNode,
library: Library
): { element: ElementNode | null; errors: ParseError[] } {
const errors: ParseError[] = [];
const element = transformNode(node, library, errors);
return { element, errors };
}
function transformNode(
node: ASTNode,
library: Library,
errors: ParseError[]
): ElementNode | null {
switch (node.kind) {
case "component":
return transformComponent(node, library, errors);
case "literal":
// Wrap literal strings in a Text element
if (typeof node.value === "string") {
return {
kind: "element",
component: "Text",
props: { children: node.value },
children: [],
};
}
return null;
case "array":
// Array at root level → Stack wrapper
return {
kind: "element",
component: "Stack",
props: {},
children: node.elements
.map((el) => transformNode(el, library, errors))
.filter((el): el is ElementNode => el !== null),
};
case "object":
// Objects can't directly render — treat as props error
errors.push({
message: "Object literal cannot be rendered as a component",
line: 0,
column: 0,
});
return null;
case "reference":
// Unresolved reference — placeholder
return {
kind: "element",
component: "__Unresolved",
props: { name: node.name },
children: [],
};
}
}
function transformComponent(
node: ComponentNode,
library: Library,
errors: ParseError[]
): ElementNode {
const def = library.resolve(node.name);
const paramDefs = def ? library.paramMap().get(node.name) ?? [] : [];
// Map positional args to named props
const props: Record<string, unknown> = {};
const children: ElementNode[] = [];
let positionalIndex = 0;
for (const arg of node.args) {
if (arg.key !== null) {
// Named argument
props[arg.key] = astToValue(arg.value, library, errors, children);
} else {
// Positional argument — map to param def by index
const paramDef = paramDefs[positionalIndex] as ParamDef | undefined;
if (paramDef) {
props[paramDef.name] = astToValue(arg.value, library, errors, children);
} else {
// Extra positional arg with no param def — treat as child
const childElement = transformNode(arg.value, library, errors);
if (childElement) {
children.push(childElement);
}
}
positionalIndex++;
}
}
// Validate props against Zod schema if component is known
if (def) {
const result = def.props.safeParse(props);
if (!result.success) {
for (const issue of result.error.issues) {
errors.push({
message: `${node.name}: ${issue.path.join(".")}: ${issue.message}`,
line: 0,
column: 0,
});
}
}
} else {
errors.push({
message: `Unknown component: "${node.name}"`,
line: 0,
column: 0,
});
}
return {
kind: "element",
component: node.name,
props,
children,
};
}
/**
* Convert an AST node to a plain JS value for use as a prop.
*/
function astToValue(
node: ASTNode,
library: Library,
errors: ParseError[],
children: ElementNode[]
): unknown {
switch (node.kind) {
case "literal":
return node.value;
case "array":
return node.elements.map((el) => {
// Nested components become ElementNodes
if (el.kind === "component") {
return transformComponent(el, library, errors);
}
return astToValue(el, library, errors, children);
});
case "object": {
const obj: Record<string, unknown> = {};
for (const entry of node.entries) {
obj[entry.key] = astToValue(entry.value, library, errors, children);
}
return obj;
}
case "component":
return transformComponent(node, library, errors);
case "reference":
// Unresolved reference — return placeholder
return { __ref: node.name };
}
}

View File

@@ -0,0 +1,114 @@
import type { Library, PromptOptions } from "../types";
import { schemaToSignature } from "./introspector";
/**
* Auto-generate a system prompt section from a component library.
*
* The generated prompt teaches the LLM:
* 1. The GenUI Lang syntax
* 2. Available components with signatures
* 3. Streaming guidelines
* 4. User-provided examples and rules
*/
export function generatePrompt(
library: Library,
options?: PromptOptions
): string {
const sections: string[] = [];
// ── Header ──
sections.push(`# Structured UI Output (GenUI Lang)
When the user's request benefits from structured UI (tables, cards, buttons, layouts), respond using GenUI Lang — a compact, line-oriented markup. Otherwise respond in plain markdown.`);
// ── Syntax ──
sections.push(`## Syntax
Each line declares a variable: \`name = expression\`
Expressions:
- \`ComponentName(arg1, arg2, key: value)\` — component with positional or named args
- \`[a, b, c]\` — array
- \`{key: value}\` — object
- \`"string"\`, \`42\`, \`true\`, \`false\`, \`null\` — literals
- \`variableName\` — reference to a previously defined variable
Rules:
- PascalCase identifiers are component types
- camelCase identifiers are variable references
- Positional args map to props in the order defined below
- The last statement is the root element (or name one \`root\`)
- Lines inside brackets/parens can span multiple lines
- Lines that don't match \`name = expression\` are treated as plain text`);
// ── Components ──
const grouped = groupComponents(library);
const componentLines: string[] = [];
for (const [group, components] of grouped) {
if (group) {
componentLines.push(`\n### ${group}`);
}
for (const comp of components) {
const sig = schemaToSignature(comp.name, comp.props);
componentLines.push(`- \`${sig}\`${comp.description}`);
}
}
sections.push(`## Available Components\n${componentLines.join("\n")}`);
// ── Streaming Guidelines ──
if (options?.streaming !== false) {
sections.push(`## Streaming Guidelines
- Define variables before referencing them
- Each line is independently parseable — the UI updates as each line completes
- Keep variable names short and descriptive
- Build up complex UIs incrementally: define data first, then layout`);
}
// ── Examples ──
if (options?.examples && options.examples.length > 0) {
const exampleLines = options.examples.map(
(ex) => `### ${ex.description}\n\`\`\`\n${ex.code}\n\`\`\``
);
sections.push(`## Examples\n\n${exampleLines.join("\n\n")}`);
}
// ── Additional Rules ──
if (options?.additionalRules && options.additionalRules.length > 0) {
const ruleLines = options.additionalRules.map((r) => `- ${r}`);
sections.push(`## Additional Guidelines\n\n${ruleLines.join("\n")}`);
}
return sections.join("\n\n");
}
function groupComponents(library: Library): [
string | undefined,
{
name: string;
description: string;
props: import("zod").ZodObject<import("zod").ZodRawShape>;
}[],
][] {
const groups = new Map<string | undefined, typeof result>();
type ComponentEntry = {
name: string;
description: string;
props: import("zod").ZodObject<import("zod").ZodRawShape>;
};
const result: ComponentEntry[] = [];
for (const [, comp] of library.components) {
const group = comp.group;
if (!groups.has(group)) {
groups.set(group, []);
}
groups.get(group)!.push(comp);
}
return Array.from(groups.entries());
}

View File

@@ -0,0 +1,2 @@
export { generatePrompt } from "./generator";
export { zodToTypeString, schemaToSignature } from "./introspector";

View File

@@ -0,0 +1,75 @@
import { describe, it, expect } from "vitest";
import { z } from "zod";
import { zodToTypeString, schemaToSignature } from "./introspector";
describe("zodToTypeString", () => {
it("handles primitives", () => {
expect(zodToTypeString(z.string())).toBe("string");
expect(zodToTypeString(z.number())).toBe("number");
expect(zodToTypeString(z.boolean())).toBe("boolean");
expect(zodToTypeString(z.null())).toBe("null");
});
it("handles optional", () => {
expect(zodToTypeString(z.string().optional())).toBe("string?");
});
it("handles nullable", () => {
expect(zodToTypeString(z.string().nullable())).toBe("string | null");
});
it("handles enums", () => {
expect(zodToTypeString(z.enum(["a", "b", "c"]))).toBe('"a" | "b" | "c"');
});
it("handles arrays", () => {
expect(zodToTypeString(z.array(z.string()))).toBe("string[]");
});
it("handles objects", () => {
const schema = z.object({
name: z.string(),
age: z.number().optional(),
});
expect(zodToTypeString(schema)).toBe("{ name: string, age?: number }");
});
it("handles defaults", () => {
expect(zodToTypeString(z.string().default("hello"))).toBe("string?");
});
});
describe("schemaToSignature", () => {
it("generates a function-like signature", () => {
const schema = z.object({
label: z.string(),
main: z.boolean().optional(),
primary: z.boolean().optional(),
});
expect(schemaToSignature("Button", schema)).toBe(
"Button(label: string, main?: boolean, primary?: boolean)"
);
});
it("handles required-only params", () => {
const schema = z.object({
title: z.string(),
color: z.string(),
});
expect(schemaToSignature("Tag", schema)).toBe(
"Tag(title: string, color: string)"
);
});
it("handles enum params", () => {
const schema = z.object({
size: z.enum(["sm", "md", "lg"]).optional(),
});
expect(schemaToSignature("Widget", schema)).toBe(
'Widget(size?: "sm" | "md" | "lg")'
);
});
});

View File

@@ -0,0 +1,140 @@
import { z } from "zod";
/**
* Convert a Zod schema to a human-readable type string for LLM prompts.
*
* Uses `_def.typeName` instead of `instanceof` to avoid issues with
* multiple Zod copies in the module graph.
*/
export function zodToTypeString(schema: z.ZodTypeAny): string {
return describeType(schema, false);
}
function describeType(
schema: z.ZodTypeAny,
isOptionalContext: boolean
): string {
const typeName = schema._def?.typeName as string | undefined;
// Unwrap optional/nullable
if (typeName === "ZodOptional") {
const inner = describeType(
(schema as z.ZodOptional<z.ZodTypeAny>).unwrap(),
true
);
return `${inner}?`;
}
if (typeName === "ZodNullable") {
const inner = describeType(
(schema as z.ZodNullable<z.ZodTypeAny>).unwrap(),
false
);
return `${inner} | null`;
}
if (typeName === "ZodDefault") {
const inner = describeType(
(schema as z.ZodDefault<z.ZodTypeAny>).removeDefault(),
true
);
const suffix = isOptionalContext ? "" : "?";
return `${inner}${suffix}`;
}
// Primitives
if (typeName === "ZodString") return "string";
if (typeName === "ZodNumber") return "number";
if (typeName === "ZodBoolean") return "boolean";
if (typeName === "ZodNull") return "null";
// Enum
if (typeName === "ZodEnum") {
const values = (schema as z.ZodEnum<[string, ...string[]]>)
.options as string[];
return values.map((v) => `"${v}"`).join(" | ");
}
if (typeName === "ZodNativeEnum") {
return "enum";
}
// Literal
if (typeName === "ZodLiteral") {
const val = (schema as z.ZodLiteral<unknown>).value;
if (typeof val === "string") return `"${val}"`;
return String(val);
}
// Array
if (typeName === "ZodArray") {
const inner = describeType(
(schema as z.ZodArray<z.ZodTypeAny>).element,
false
);
const needsParens = inner.includes("|") || inner.includes("&");
return needsParens ? `(${inner})[]` : `${inner}[]`;
}
// Object
if (typeName === "ZodObject") {
const shape = (schema as z.ZodObject<z.ZodRawShape>).shape as Record<
string,
z.ZodTypeAny
>;
const entries = Object.entries(shape).map(([key, val]) => {
const typeStr = describeType(val, false);
const isOpt = val.isOptional();
return `${key}${isOpt ? "?" : ""}: ${typeStr.replace(/\?$/, "")}`;
});
return `{ ${entries.join(", ")} }`;
}
// Union
if (typeName === "ZodUnion") {
const options = (schema as z.ZodUnion<[z.ZodTypeAny, ...z.ZodTypeAny[]]>)
.options;
return options.map((o: z.ZodTypeAny) => describeType(o, false)).join(" | ");
}
// Record
if (typeName === "ZodRecord") {
const valueType = describeType((schema as z.ZodRecord).element, false);
return `Record<string, ${valueType}>`;
}
// Tuple
if (typeName === "ZodTuple") {
const items = (schema as z.ZodTuple<[z.ZodTypeAny, ...z.ZodTypeAny[]]>)
.items;
return `[${items
.map((i: z.ZodTypeAny) => describeType(i, false))
.join(", ")}]`;
}
// Any / Unknown
if (typeName === "ZodAny") return "any";
if (typeName === "ZodUnknown") return "unknown";
return "unknown";
}
/**
* Generate a function-signature-style string for a component's props schema.
*
* Example: `Button(label: string, main?: boolean, primary?: boolean)`
*/
export function schemaToSignature(
name: string,
schema: z.ZodObject<z.ZodRawShape>
): string {
const shape = schema.shape;
const params = Object.entries(shape).map(([key, zodType]) => {
const type = zodType as z.ZodTypeAny;
const isOpt = type.isOptional();
const typeStr = zodToTypeString(type).replace(/\?$/, "");
return `${key}${isOpt ? "?" : ""}: ${typeStr}`;
});
return `${name}(${params.join(", ")})`;
}

View File

@@ -0,0 +1,156 @@
import { z } from "zod";
// ── Token types produced by the tokenizer ──
export enum TokenType {
Identifier = "Identifier",
String = "String",
Number = "Number",
Boolean = "Boolean",
Null = "Null",
Equals = "Equals",
Colon = "Colon",
Comma = "Comma",
LParen = "LParen",
RParen = "RParen",
LBracket = "LBracket",
RBracket = "RBracket",
LBrace = "LBrace",
RBrace = "RBrace",
Newline = "Newline",
EOF = "EOF",
}
export interface Token {
type: TokenType;
value: string;
offset: number;
line: number;
column: number;
}
// ── AST nodes produced by the parser ──
export type ASTNode =
| ComponentNode
| ArrayNode
| ObjectNode
| LiteralNode
| ReferenceNode;
export interface ComponentNode {
kind: "component";
name: string;
args: ArgumentNode[];
}
export interface ArgumentNode {
key: string | null; // null = positional
value: ASTNode;
}
export interface ArrayNode {
kind: "array";
elements: ASTNode[];
}
export interface ObjectNode {
kind: "object";
entries: { key: string; value: ASTNode }[];
}
export interface LiteralNode {
kind: "literal";
value: string | number | boolean | null;
}
export interface ReferenceNode {
kind: "reference";
name: string;
}
// ── Resolved element tree (post-resolution) ──
export interface ElementNode {
kind: "element";
component: string;
props: Record<string, unknown>;
children: ElementNode[];
}
export interface TextElementNode {
kind: "text";
content: string;
}
export type ResolvedNode = ElementNode | TextElementNode;
// ── Statement = one line binding ──
export interface Statement {
name: string;
value: ASTNode;
}
// ── Parse result ──
export interface ParseError {
message: string;
line: number;
column: number;
offset?: number;
}
export interface ParseResult {
statements: Statement[];
root: ASTNode | null;
errors: ParseError[];
}
// ── Component definition ──
export interface ComponentDef<
T extends z.ZodObject<z.ZodRawShape> = z.ZodObject<z.ZodRawShape>,
> {
name: string;
description: string;
props: T;
component: unknown; // framework-agnostic — React renderer narrows this
group?: string;
}
// ── Param mapping (for positional → named resolution) ──
export interface ParamDef {
name: string;
required: boolean;
description?: string;
zodType: z.ZodTypeAny;
}
export type ParamMap = Map<string, ParamDef[]>;
// ── Library ──
export interface Library {
components: ReadonlyMap<string, ComponentDef>;
resolve(name: string): ComponentDef | undefined;
prompt(options?: PromptOptions): string;
paramMap(): ParamMap;
}
export interface PromptOptions {
/** Extra rules or guidelines appended to the prompt */
additionalRules?: string[];
/** Example GenUI Lang snippets */
examples?: { description: string; code: string }[];
/** If true, include streaming guidelines */
streaming?: boolean;
}
// ── Action events (from interactive components) ──
export interface ActionEvent {
actionId: string;
payload?: Record<string, unknown>;
}

View File

@@ -0,0 +1,22 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "ESNext",
"moduleResolution": "bundler",
"lib": ["ES2022"],
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true,
"forceConsistentCasingInFileNames": true,
"declaration": true,
"declarationMap": true,
"sourceMap": true,
"outDir": "dist",
"rootDir": "src",
"noUncheckedIndexedAccess": true,
"noUnusedLocals": true,
"noUnusedParameters": true
},
"include": ["src/**/*.ts"],
"exclude": ["node_modules", "dist", "**/*.test.ts"]
}

View File

@@ -0,0 +1,7 @@
import { defineConfig } from "vitest/config";
export default defineConfig({
test: {
include: ["src/**/*.test.ts"],
},
});

View File

@@ -0,0 +1,17 @@
{
"name": "@onyx/genui-onyx",
"version": "0.1.0",
"private": true,
"description": "Onyx component bindings for GenUI",
"exports": {
".": {
"types": "./src/index.ts",
"default": "./src/index.ts"
}
},
"scripts": {
"test": "vitest run",
"test:watch": "vitest",
"typecheck": "tsc --noEmit"
}
}

View File

@@ -0,0 +1,46 @@
import React from "react";
import { z } from "zod";
import { defineComponent } from "@onyx/genui";
import Message from "@/refresh-components/messages/Message";
export const alertComponent = defineComponent({
name: "Alert",
description: "A status message banner (info, success, warning, error)",
group: "Feedback",
props: z.object({
text: z.string().describe("Alert message text"),
description: z.string().optional().describe("Additional description"),
level: z
.enum(["default", "info", "success", "warning", "error"])
.optional()
.describe("Alert severity level"),
showIcon: z.boolean().optional().describe("Show status icon"),
}),
component: ({
props,
}: {
props: {
text: string;
description?: string;
level?: "default" | "info" | "success" | "warning" | "error";
showIcon?: boolean;
};
}) => {
const level = props.level ?? "default";
return (
<Message
static
text={props.text}
description={props.description}
default={level === "default"}
info={level === "info"}
success={level === "success"}
warning={level === "warning"}
error={level === "error"}
icon={props.showIcon !== false}
close={false}
/>
);
},
});

View File

@@ -0,0 +1,62 @@
import React from "react";
import { z } from "zod";
import { defineComponent } from "@onyx/genui";
import Button from "@/refresh-components/buttons/Button";
import { useTriggerAction } from "@onyx/genui-react";
export const buttonComponent = defineComponent({
name: "Button",
description: "An interactive button that triggers an action",
group: "Interactive",
props: z.object({
children: z.string().describe("Button label text"),
main: z.boolean().optional().describe("Main variant styling"),
action: z.boolean().optional().describe("Action variant styling"),
danger: z.boolean().optional().describe("Danger/destructive variant"),
primary: z.boolean().optional().describe("Primary sub-variant"),
secondary: z.boolean().optional().describe("Secondary sub-variant"),
tertiary: z.boolean().optional().describe("Tertiary sub-variant"),
size: z.enum(["lg", "md"]).optional().describe("Button size"),
actionId: z
.string()
.optional()
.describe("Action identifier for event handling"),
disabled: z.boolean().optional().describe("Disable the button"),
}),
component: ({
props,
}: {
props: {
children: string;
main?: boolean;
action?: boolean;
danger?: boolean;
primary?: boolean;
secondary?: boolean;
tertiary?: boolean;
size?: "lg" | "md";
actionId?: string;
disabled?: boolean;
};
}) => {
const triggerAction = useTriggerAction();
return (
<Button
main={props.main}
action={props.action}
danger={props.danger}
primary={props.primary}
secondary={props.secondary}
tertiary={props.tertiary}
size={props.size}
disabled={props.disabled}
onClick={
props.actionId ? () => triggerAction(props.actionId!) : undefined
}
>
{props.children}
</Button>
);
},
});

View File

@@ -0,0 +1,34 @@
import React from "react";
import { z } from "zod";
import { defineComponent } from "@onyx/genui";
import Card from "@/refresh-components/cards/Card";
import Text from "@/refresh-components/texts/Text";
export const cardComponent = defineComponent({
name: "Card",
description: "A container card with optional title and padding",
group: "Layout",
props: z.object({
title: z.string().optional().describe("Card heading"),
padding: z
.enum(["none", "sm", "md", "lg"])
.optional()
.describe("Inner padding"),
}),
component: ({
props,
children,
}: {
props: { title?: string; padding?: string };
children?: React.ReactNode;
}) => (
<Card variant="primary">
{props.title && (
<Text headingH3 text05>
{props.title}
</Text>
)}
{children}
</Card>
),
});

View File

@@ -0,0 +1,30 @@
import React from "react";
import { z } from "zod";
import { defineComponent } from "@onyx/genui";
import Code from "@/refresh-components/Code";
export const codeComponent = defineComponent({
name: "Code",
description: "A code block with optional copy button",
group: "Content",
props: z.object({
children: z.string().describe("The code content"),
language: z
.string()
.optional()
.describe("Programming language for syntax highlighting"),
showCopyButton: z
.boolean()
.optional()
.describe("Show copy-to-clipboard button"),
}),
component: ({
props,
}: {
props: {
children: string;
language?: string;
showCopyButton?: boolean;
};
}) => <Code showCopyButton={props.showCopyButton}>{props.children}</Code>,
});

View File

@@ -0,0 +1,19 @@
import React from "react";
import { z } from "zod";
import { defineComponent } from "@onyx/genui";
import Separator from "@/refresh-components/Separator";
export const dividerComponent = defineComponent({
name: "Divider",
description: "A horizontal separator line",
group: "Layout",
props: z.object({
spacing: z
.enum(["sm", "md", "lg"])
.optional()
.describe("Vertical spacing around the divider"),
}),
component: ({ props }: { props: { spacing?: string } }) => (
<Separator noPadding={props.spacing === "sm"} />
),
});

View File

@@ -0,0 +1,89 @@
import React from "react";
import { z } from "zod";
import { defineComponent } from "@onyx/genui";
import IconButton from "@/refresh-components/buttons/IconButton";
import { useTriggerAction } from "@onyx/genui-react";
import {
SvgCopy,
SvgDownload,
SvgExternalLink,
SvgMoreHorizontal,
SvgPlus,
SvgRefreshCw,
SvgSearch,
SvgSettings,
SvgTrash,
SvgX,
} from "@opal/icons";
import type { IconFunctionComponent } from "@opal/types";
const iconMap: Record<string, IconFunctionComponent> = {
copy: SvgCopy,
download: SvgDownload,
"external-link": SvgExternalLink,
more: SvgMoreHorizontal,
plus: SvgPlus,
refresh: SvgRefreshCw,
search: SvgSearch,
settings: SvgSettings,
trash: SvgTrash,
close: SvgX,
};
export const iconButtonComponent = defineComponent({
name: "IconButton",
description: "A button that displays an icon with an optional tooltip",
group: "Interactive",
props: z.object({
icon: z
.string()
.describe(
"Icon name (copy, download, external-link, more, plus, refresh, search, settings, trash, close)"
),
tooltip: z.string().optional().describe("Tooltip text on hover"),
main: z.boolean().optional().describe("Main variant styling"),
action: z.boolean().optional().describe("Action variant styling"),
danger: z.boolean().optional().describe("Danger/destructive variant"),
primary: z.boolean().optional().describe("Primary sub-variant"),
secondary: z.boolean().optional().describe("Secondary sub-variant"),
actionId: z
.string()
.optional()
.describe("Action identifier for event handling"),
disabled: z.boolean().optional().describe("Disable the button"),
}),
component: ({
props,
}: {
props: {
icon: string;
tooltip?: string;
main?: boolean;
action?: boolean;
danger?: boolean;
primary?: boolean;
secondary?: boolean;
actionId?: string;
disabled?: boolean;
};
}) => {
const triggerAction = useTriggerAction();
const IconComponent = iconMap[props.icon] ?? SvgMoreHorizontal;
return (
<IconButton
icon={IconComponent}
tooltip={props.tooltip}
main={props.main}
action={props.action}
danger={props.danger}
primary={props.primary}
secondary={props.secondary}
disabled={props.disabled}
onClick={
props.actionId ? () => triggerAction(props.actionId!) : undefined
}
/>
);
},
});

View File

@@ -0,0 +1,39 @@
import React from "react";
import { z } from "zod";
import { defineComponent } from "@onyx/genui";
import PreviewImage from "@/refresh-components/PreviewImage";
export const imageComponent = defineComponent({
name: "Image",
description: "Displays an image",
group: "Content",
props: z.object({
src: z.string().describe("Image URL"),
alt: z.string().optional().describe("Alt text for accessibility"),
width: z.string().optional().describe("CSS width"),
height: z.string().optional().describe("CSS height"),
}),
component: ({
props,
}: {
props: {
src: string;
alt?: string;
width?: string;
height?: string;
};
}) => (
<PreviewImage
src={props.src}
alt={props.alt ?? ""}
className={
[
props.width ? `w-[${props.width}]` : undefined,
props.height ? `h-[${props.height}]` : undefined,
]
.filter(Boolean)
.join(" ") || undefined
}
/>
),
});

View File

@@ -0,0 +1,59 @@
import React, { useState, useCallback } from "react";
import { z } from "zod";
import { defineComponent } from "@onyx/genui";
import InputTypeIn from "@/refresh-components/inputs/InputTypeIn";
import { useTriggerAction } from "@onyx/genui-react";
export const inputComponent = defineComponent({
name: "Input",
description: "A text input field",
group: "Interactive",
props: z.object({
placeholder: z.string().optional().describe("Placeholder text"),
value: z.string().optional().describe("Initial value"),
actionId: z
.string()
.optional()
.describe("Action identifier for value changes"),
readOnly: z.boolean().optional().describe("Make the input read-only"),
}),
component: ({
props,
}: {
props: {
placeholder?: string;
value?: string;
actionId?: string;
readOnly?: boolean;
};
}) => {
const triggerAction = useTriggerAction();
const [value, setValue] = useState(props.value ?? "");
const handleChange = useCallback(
(e: React.ChangeEvent<HTMLInputElement>) => {
setValue(e.target.value);
},
[]
);
const handleKeyDown = useCallback(
(e: React.KeyboardEvent<HTMLInputElement>) => {
if (e.key === "Enter" && props.actionId) {
triggerAction(props.actionId, { value });
}
},
[props.actionId, triggerAction, value]
);
return (
<InputTypeIn
placeholder={props.placeholder}
value={value}
onChange={handleChange}
onKeyDown={handleKeyDown}
variant={props.readOnly ? "readOnly" : "primary"}
/>
);
},
});

View File

@@ -0,0 +1,114 @@
import React from "react";
import { z } from "zod";
import { defineComponent } from "@onyx/genui";
import { cn } from "@/lib/utils";
const gapMap: Record<string, string> = {
none: "gap-0",
xs: "gap-1",
sm: "gap-2",
md: "gap-4",
lg: "gap-6",
xl: "gap-8",
};
const alignMap: Record<string, string> = {
start: "items-start",
center: "items-center",
end: "items-end",
stretch: "items-stretch",
};
const gapSchema = z
.enum(["none", "xs", "sm", "md", "lg", "xl"])
.optional()
.describe("Gap between children");
const alignSchema = z
.enum(["start", "center", "end", "stretch"])
.optional()
.describe("Cross-axis alignment");
export const stackComponent = defineComponent({
name: "Stack",
description: "Vertical stack layout — arranges children top to bottom",
group: "Layout",
props: z.object({
children: z.array(z.unknown()).optional().describe("Child elements"),
gap: gapSchema,
align: alignSchema,
}),
component: ({
props,
}: {
props: { children?: React.ReactNode[]; gap?: string; align?: string };
}) => (
<div
className={cn(
"flex flex-col",
gapMap[props.gap ?? "sm"],
props.align && alignMap[props.align]
)}
>
{props.children}
</div>
),
});
export const rowComponent = defineComponent({
name: "Row",
description: "Horizontal row layout — arranges children left to right",
group: "Layout",
props: z.object({
children: z.array(z.unknown()).optional().describe("Child elements"),
gap: gapSchema,
align: alignSchema,
wrap: z.boolean().optional().describe("Allow wrapping to next line"),
}),
component: ({
props,
}: {
props: {
children?: React.ReactNode[];
gap?: string;
align?: string;
wrap?: boolean;
};
}) => (
<div
className={cn(
"flex flex-row",
gapMap[props.gap ?? "sm"],
props.align && alignMap[props.align],
props.wrap && "flex-wrap"
)}
>
{props.children}
</div>
),
});
export const columnComponent = defineComponent({
name: "Column",
description: "A column within a Row, with optional width control",
group: "Layout",
props: z.object({
children: z.array(z.unknown()).optional().describe("Child elements"),
width: z
.string()
.optional()
.describe("CSS width (e.g. '50%', '200px', 'auto')"),
}),
component: ({
props,
}: {
props: { children?: React.ReactNode[]; width?: string };
}) => (
<div
className="flex flex-col"
style={props.width ? { width: props.width } : undefined}
>
{props.children}
</div>
),
});

View File

@@ -0,0 +1,43 @@
import React from "react";
import { z } from "zod";
import { defineComponent } from "@onyx/genui";
import InlineExternalLink from "@/refresh-components/InlineExternalLink";
import Text from "@/refresh-components/texts/Text";
export const linkComponent = defineComponent({
name: "Link",
description: "A clickable hyperlink",
group: "Content",
props: z.object({
children: z.string().describe("Link text"),
href: z.string().describe("URL to link to"),
external: z.boolean().optional().describe("Open in new tab"),
}),
component: ({
props,
}: {
props: {
children: string;
href: string;
external?: boolean;
};
}) => {
if (props.external !== false) {
return (
<InlineExternalLink href={props.href}>
<Text mainContentBody text05 as="span" className="underline">
{props.children}
</Text>
</InlineExternalLink>
);
}
return (
<a href={props.href} className="underline">
<Text mainContentBody text05 as="span" className="underline">
{props.children}
</Text>
</a>
);
},
});

View File

@@ -0,0 +1,45 @@
import React from "react";
import { z } from "zod";
import { defineComponent } from "@onyx/genui";
import Text from "@/refresh-components/texts/Text";
export const listComponent = defineComponent({
name: "List",
description: "An ordered or unordered list",
group: "Content",
props: z.object({
items: z.array(z.string()).describe("List item texts"),
ordered: z
.boolean()
.optional()
.describe("Use numbered list instead of bullets"),
}),
component: ({
props,
}: {
props: {
items: string[];
ordered?: boolean;
};
}) => {
const Tag = props.ordered ? "ol" : "ul";
return (
<Tag
className={
props.ordered
? "list-decimal pl-6 space-y-1"
: "list-disc pl-6 space-y-1"
}
>
{(props.items ?? []).map((item, i) => (
<li key={i}>
<Text mainContentBody text05 as="span">
{item}
</Text>
</li>
))}
</Tag>
);
},
});

View File

@@ -0,0 +1,103 @@
import React from "react";
import { z } from "zod";
import { defineComponent, type ElementNode } from "@onyx/genui";
import Text from "@/refresh-components/texts/Text";
/**
* Lightweight table renderer for GenUI.
*
* We don't use DataTable here because it requires TanStack column definitions
* and typed data — overkill for LLM-generated tables. Instead we render a
* simple HTML table styled with Onyx design tokens.
*/
export const tableComponent = defineComponent({
name: "Table",
description: "A data table with columns and rows",
group: "Content",
props: z.object({
columns: z.array(z.string()).describe("Column header labels"),
rows: z
.array(z.array(z.unknown()))
.describe("Row data as arrays of values"),
compact: z.boolean().optional().describe("Use compact row height"),
}),
component: ({
props,
}: {
props: {
columns: string[];
rows: unknown[][];
compact?: boolean;
};
}) => {
const cellPadding = props.compact ? "px-3 py-1.5" : "px-3 py-2.5";
const columns = props.columns ?? [];
const rows = props.rows ?? [];
return (
<div className="w-full overflow-x-auto rounded-12 border border-border-01">
<table className="w-full border-collapse">
<thead>
<tr className="bg-background-neutral-01">
{columns.map((col, i) => (
<th
key={i}
className={`${cellPadding} text-left border-b border-border-01`}
>
<Text mainUiAction text03>
{col}
</Text>
</th>
))}
</tr>
</thead>
<tbody>
{rows.map((row, rowIdx) => {
// Defensive: row might not be an array if resolver
// returned a rendered element or an object
const cells = Array.isArray(row) ? row : [row];
return (
<tr
key={rowIdx}
className="border-b border-border-01 last:border-b-0"
>
{cells.map((cell, cellIdx) => (
<td key={cellIdx} className={cellPadding}>
{renderCell(cell)}
</td>
))}
</tr>
);
})}
</tbody>
</table>
</div>
);
},
});
function renderCell(cell: unknown): React.ReactNode {
// If it's a rendered React element (from NodeRenderer), return it directly
if (React.isValidElement(cell)) {
return cell;
}
// Primitive values → text
if (
typeof cell === "string" ||
typeof cell === "number" ||
typeof cell === "boolean"
) {
return (
<Text mainContentBody text05>
{String(cell)}
</Text>
);
}
return (
<Text mainContentBody text03>
</Text>
);
}

View File

@@ -0,0 +1,46 @@
import React from "react";
import { z } from "zod";
import { defineComponent } from "@onyx/genui";
import { Tag } from "@opal/components";
const VALID_TAG_COLORS = new Set<string>([
"green",
"purple",
"blue",
"gray",
"amber",
]);
type TagColor = "green" | "purple" | "blue" | "gray" | "amber";
export const tagComponent = defineComponent({
name: "Tag",
description: "A small label tag with color",
group: "Content",
props: z.object({
title: z.string().describe("Tag text"),
color: z
.enum(["green", "purple", "blue", "gray", "amber"])
.optional()
.describe("Tag color"),
size: z.enum(["sm", "md"]).optional().describe("Tag size"),
}),
component: ({
props,
}: {
props: {
title: string;
color?: string;
size?: "sm" | "md";
};
}) => {
const safeColor: TagColor =
props.color && VALID_TAG_COLORS.has(props.color)
? (props.color as TagColor)
: "gray";
return (
<Tag title={props.title ?? ""} color={safeColor} size={props.size} />
);
},
});

View File

@@ -0,0 +1,63 @@
import React from "react";
import { z } from "zod";
import { defineComponent } from "@onyx/genui";
import Text from "@/refresh-components/texts/Text";
export const textComponent = defineComponent({
name: "Text",
description: "Displays text with typography variants",
group: "Content",
props: z.object({
children: z.string().describe("The text content"),
headingH1: z.boolean().optional().describe("Heading level 1"),
headingH2: z.boolean().optional().describe("Heading level 2"),
headingH3: z.boolean().optional().describe("Heading level 3"),
muted: z.boolean().optional().describe("Muted/secondary style"),
mono: z.boolean().optional().describe("Monospace font"),
bold: z.boolean().optional().describe("Bold emphasis"),
}),
component: ({
props,
}: {
props: {
children: string;
headingH1?: boolean;
headingH2?: boolean;
headingH3?: boolean;
muted?: boolean;
mono?: boolean;
bold?: boolean;
};
}) => {
const as = props.headingH1
? ("p" as const)
: props.headingH2
? ("p" as const)
: props.headingH3
? ("p" as const)
: ("span" as const);
return (
<Text
as={as}
headingH1={props.headingH1}
headingH2={props.headingH2}
headingH3={props.headingH3}
mainContentMuted={props.muted}
mainContentMono={props.mono}
mainContentEmphasis={props.bold}
mainContentBody={
!props.headingH1 &&
!props.headingH2 &&
!props.headingH3 &&
!props.muted &&
!props.mono &&
!props.bold
}
text05
>
{props.children}
</Text>
);
},
});

Some files were not shown because too many files have changed in this diff Show More