Compare commits

..

12 Commits

Author SHA1 Message Date
Jamison Lahman
7332adb1e6 fix(copy-button): fall back when Clipboard API unavailable (#10080) 2026-04-10 22:49:56 -07:00
Nikolas Garza
0ab1b76765 Revert "feat(chat): smooth character-level streaming (#10076) to release v3.2" (#10082) 2026-04-10 20:49:39 -07:00
github-actions[bot]
40cd0a78a3 feat(chat): smooth character-level streaming (#10076) to release v3.2 (#10081)
Co-authored-by: Nikolas Garza <90273783+nmgarza5@users.noreply.github.com>
2026-04-10 20:41:49 -07:00
github-actions[bot]
28d8c5de46 fix(chat): model selection + multi-model follow-up correctness (#10075) to release v3.2 (#10078) 2026-04-10 17:25:00 -07:00
github-actions[bot]
004092767f fix(mcp): prevent masked OAuth credentials from being stored on re-auth (#10066) to release v3.2 (#10069)
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2026-04-10 14:47:17 -07:00
Nikolas Garza
eb4689a669 fix(chat): hide ModelSelector in search mode (#10052) to release v3.2 (#10068) 2026-04-10 12:43:05 -07:00
github-actions[bot]
47dd8973c1 fix(scim): add advisory lock to prevent seat limit race condition (#10048) to release v3.2 (#10065)
Co-authored-by: Nikolas Garza <90273783+nmgarza5@users.noreply.github.com>
2026-04-10 12:05:14 -07:00
github-actions[bot]
a1403ef78c feat(slack-bot): make agent selector searchable (#10036) to release v3.2 (#10038)
Co-authored-by: Nikolas Garza <90273783+nmgarza5@users.noreply.github.com>
2026-04-10 12:04:51 -07:00
github-actions[bot]
f96b9d6804 fix(license): exclude service account users from seat count (#10053) to release v3.2 (#10061)
Co-authored-by: Nikolas Garza <90273783+nmgarza5@users.noreply.github.com>
2026-04-10 12:04:30 -07:00
github-actions[bot]
711651276c fix(LLM config): resolve API Key before fetching models (#10056) to release v3.2 (#10057)
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2026-04-10 00:02:33 -07:00
github-actions[bot]
3731110cf9 feat(federated): full thread replies + direct URL fetch in Slack search (#9940) to release v3.2 (#10050)
Co-authored-by: Nikolas Garza <90273783+nmgarza5@users.noreply.github.com>
2026-04-09 18:24:02 -07:00
Evan Lohn
8fb7a8718e fix: jira bulk issue fetch batching (#10044) 2026-04-09 20:50:41 -04:00
70 changed files with 58 additions and 3791 deletions

View File

@@ -1,62 +0,0 @@
FROM ubuntu:26.04@sha256:cc925e589b7543b910fea57a240468940003fbfc0515245a495dd0ad8fe7cef1
RUN apt-get update && apt-get install -y --no-install-recommends \
curl \
fd-find \
fzf \
git \
jq \
less \
make \
neovim \
openssh-client \
python3-venv \
ripgrep \
sudo \
ca-certificates \
iptables \
ipset \
iproute2 \
dnsutils \
unzip \
wget \
zsh \
&& curl -fsSL https://deb.nodesource.com/setup_20.x | bash - \
&& apt-get install -y nodejs \
&& install -m 0755 -d /etc/apt/keyrings \
&& curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg -o /etc/apt/keyrings/githubcli-archive-keyring.gpg \
&& chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" > /etc/apt/sources.list.d/github-cli.list \
&& apt-get update \
&& apt-get install -y --no-install-recommends gh \
&& apt-get clean && rm -rf /var/lib/apt/lists/*
# fd-find installs as fdfind on Debian/Ubuntu — symlink to fd
RUN ln -sf "$(which fdfind)" /usr/local/bin/fd
# Install uv (Python package manager)
COPY --from=ghcr.io/astral-sh/uv:latest /uv /uvx /usr/local/bin/
# Create non-root dev user with passwordless sudo
RUN useradd -m -s /bin/zsh dev && \
echo "dev ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/dev && \
chmod 0440 /etc/sudoers.d/dev
ENV DEVCONTAINER=true
RUN mkdir -p /workspace && \
chown -R dev:dev /workspace
WORKDIR /workspace
# Install Claude Code
ARG CLAUDE_CODE_VERSION=latest
RUN npm install -g @anthropic-ai/claude-code@${CLAUDE_CODE_VERSION}
# Configure zsh — source the repo-local zshrc so shell customization
# doesn't require an image rebuild.
RUN chsh -s /bin/zsh root && \
for rc in /root/.zshrc /home/dev/.zshrc; do \
echo '[ -f /workspace/.devcontainer/zshrc ] && . /workspace/.devcontainer/zshrc' >> "$rc"; \
done && \
chown dev:dev /home/dev/.zshrc

View File

@@ -1,86 +0,0 @@
# Onyx Dev Container
A containerized development environment for working on Onyx.
## What's included
- Ubuntu 26.04 base image
- Node.js 20, uv, Claude Code
- GitHub CLI (`gh`)
- Neovim, ripgrep, fd, fzf, jq, make, wget, unzip
- Zsh as default shell (sources host `~/.zshrc` if available)
- Python venv auto-activation
- Network firewall (default-deny, whitelists npm, GitHub, Anthropic APIs, Sentry, and VS Code update servers)
## Usage
### CLI (`ods dev`)
The [`ods` devtools CLI](../tools/ods/README.md) provides workspace-aware wrappers
for all devcontainer operations (also available as `ods dc`):
```bash
# Start the container
ods dev up
# Open a shell
ods dev into
# Run a command
ods dev exec npm test
# Stop the container
ods dev stop
```
## Restarting the container
```bash
# Restart the container
ods dev restart
# Pull the latest published image and recreate
ods dev rebuild
```
## Image
The devcontainer uses a prebuilt image published to `onyxdotapp/onyx-devcontainer`.
The tag is pinned in `devcontainer.json` — no local build is required.
To build the image locally (e.g. while iterating on the Dockerfile):
```bash
docker buildx bake devcontainer
```
The `devcontainer` target is defined in `docker-bake.hcl` at the repo root.
## User & permissions
The container runs as the `dev` user by default (`remoteUser` in devcontainer.json).
An init script (`init-dev-user.sh`) runs at container start to ensure the active
user has read/write access to the bind-mounted workspace:
- **Standard Docker** — `dev`'s UID/GID is remapped to match the workspace owner,
so file permissions work seamlessly.
- **Rootless Docker** — The workspace appears as root-owned (UID 0) inside the
container due to user-namespace mapping. `ods dev up` auto-detects rootless Docker
and sets `DEVCONTAINER_REMOTE_USER=root` so the container runs as root — which
maps back to your host user via the user namespace. New files are owned by your
host UID and no ACL workarounds are needed.
To override the auto-detection, set `DEVCONTAINER_REMOTE_USER` before running
`ods dev up`.
## Firewall
The container starts with a default-deny firewall (`init-firewall.sh`) that only allows outbound traffic to:
- npm registry
- GitHub
- Anthropic API
- Sentry
- VS Code update servers
This requires the `NET_ADMIN` and `NET_RAW` capabilities, which are added via `runArgs` in `devcontainer.json`.

View File

@@ -1,23 +0,0 @@
{
"name": "Onyx Dev Sandbox",
"image": "onyxdotapp/onyx-devcontainer@sha256:12184169c5bcc9cca0388286d5ffe504b569bc9c37bfa631b76ee8eee2064055",
"runArgs": ["--cap-add=NET_ADMIN", "--cap-add=NET_RAW"],
"mounts": [
"source=${localEnv:HOME}/.claude,target=/home/dev/.claude,type=bind",
"source=${localEnv:HOME}/.claude.json,target=/home/dev/.claude.json,type=bind",
"source=${localEnv:HOME}/.zshrc,target=/home/dev/.zshrc.host,type=bind,readonly",
"source=${localEnv:HOME}/.gitconfig,target=/home/dev/.gitconfig,type=bind,readonly",
"source=${localEnv:HOME}/.config/nvim,target=/home/dev/.config/nvim,type=bind,readonly",
"source=onyx-devcontainer-cache,target=/home/dev/.cache,type=volume",
"source=onyx-devcontainer-local,target=/home/dev/.local,type=volume"
],
"containerEnv": {
"SSH_AUTH_SOCK": "/tmp/ssh-agent.sock"
},
"remoteUser": "${localEnv:DEVCONTAINER_REMOTE_USER:dev}",
"updateRemoteUserUID": false,
"workspaceMount": "source=${localWorkspaceFolder},target=/workspace,type=bind,consistency=delegated",
"workspaceFolder": "/workspace",
"postStartCommand": "sudo bash /workspace/.devcontainer/init-dev-user.sh && sudo bash /workspace/.devcontainer/init-firewall.sh",
"waitFor": "postStartCommand"
}

View File

@@ -1,107 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
# Remap the dev user's UID/GID to match the workspace owner so that
# bind-mounted files are accessible without running as root.
#
# Standard Docker: Workspace is owned by the host user's UID (e.g. 1000).
# We remap dev to that UID -- fast and seamless.
#
# Rootless Docker: Workspace appears as root-owned (UID 0) inside the
# container due to user-namespace mapping. Requires
# DEVCONTAINER_REMOTE_USER=root (set automatically by
# ods dev up). Container root IS the host user, so
# bind-mounts and named volumes are symlinked into /root.
WORKSPACE=/workspace
TARGET_USER=dev
REMOTE_USER="${SUDO_USER:-$TARGET_USER}"
WS_UID=$(stat -c '%u' "$WORKSPACE")
WS_GID=$(stat -c '%g' "$WORKSPACE")
DEV_UID=$(id -u "$TARGET_USER")
DEV_GID=$(id -g "$TARGET_USER")
# devcontainer.json bind-mounts and named volumes target /home/dev regardless
# of remoteUser. When running as root ($HOME=/root), Phase 1 bridges the gap
# with symlinks from ACTIVE_HOME → MOUNT_HOME.
MOUNT_HOME=/home/"$TARGET_USER"
if [ "$REMOTE_USER" = "root" ]; then
ACTIVE_HOME="/root"
else
ACTIVE_HOME="$MOUNT_HOME"
fi
# ── Phase 1: home directory setup ───────────────────────────────────
# ~/.local and ~/.cache are named Docker volumes mounted under MOUNT_HOME.
mkdir -p "$MOUNT_HOME"/.local/state "$MOUNT_HOME"/.local/share
# When running as root, symlink bind-mounts and named volumes into /root
# so that $HOME-relative tools (Claude Code, git, etc.) find them.
if [ "$ACTIVE_HOME" != "$MOUNT_HOME" ]; then
for item in .claude .cache .local; do
[ -d "$MOUNT_HOME/$item" ] || continue
if [ -e "$ACTIVE_HOME/$item" ] && [ ! -L "$ACTIVE_HOME/$item" ]; then
echo "warning: replacing $ACTIVE_HOME/$item with symlink to $MOUNT_HOME/$item" >&2
rm -rf "$ACTIVE_HOME/$item"
fi
ln -sfn "$MOUNT_HOME/$item" "$ACTIVE_HOME/$item"
done
# Symlink files (not directories).
for file in .claude.json .gitconfig .zshrc.host; do
[ -f "$MOUNT_HOME/$file" ] && ln -sf "$MOUNT_HOME/$file" "$ACTIVE_HOME/$file"
done
# Nested mount: .config/nvim
if [ -d "$MOUNT_HOME/.config/nvim" ]; then
mkdir -p "$ACTIVE_HOME/.config"
if [ -e "$ACTIVE_HOME/.config/nvim" ] && [ ! -L "$ACTIVE_HOME/.config/nvim" ]; then
echo "warning: replacing $ACTIVE_HOME/.config/nvim with symlink" >&2
rm -rf "$ACTIVE_HOME/.config/nvim"
fi
ln -sfn "$MOUNT_HOME/.config/nvim" "$ACTIVE_HOME/.config/nvim"
fi
fi
# ── Phase 2: workspace access ───────────────────────────────────────
# Root always has workspace access; Phase 1 handled home setup.
if [ "$REMOTE_USER" = "root" ]; then
exit 0
fi
# Already matching -- nothing to do.
if [ "$WS_UID" = "$DEV_UID" ] && [ "$WS_GID" = "$DEV_GID" ]; then
exit 0
fi
if [ "$WS_UID" != "0" ]; then
# ── Standard Docker ──────────────────────────────────────────────
# Workspace is owned by a non-root UID (the host user).
# Remap dev's UID/GID to match.
if [ "$DEV_GID" != "$WS_GID" ]; then
if ! groupmod -g "$WS_GID" "$TARGET_USER" 2>&1; then
echo "warning: failed to remap $TARGET_USER GID to $WS_GID" >&2
fi
fi
if [ "$DEV_UID" != "$WS_UID" ]; then
if ! usermod -u "$WS_UID" -g "$WS_GID" "$TARGET_USER" 2>&1; then
echo "warning: failed to remap $TARGET_USER UID to $WS_UID" >&2
fi
fi
if ! chown -R "$TARGET_USER":"$TARGET_USER" "$MOUNT_HOME" 2>&1; then
echo "warning: failed to chown $MOUNT_HOME" >&2
fi
else
# ── Rootless Docker ──────────────────────────────────────────────
# Workspace is root-owned (UID 0) due to user-namespace mapping.
# The supported path is remoteUser=root (set DEVCONTAINER_REMOTE_USER=root),
# which is handled above. If we reach here, the user is running as dev
# under rootless Docker without the override.
echo "error: rootless Docker detected but remoteUser is not root." >&2
echo " Set DEVCONTAINER_REMOTE_USER=root before starting the container," >&2
echo " or use 'ods dev up' which sets it automatically." >&2
exit 1
fi

View File

@@ -1,105 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
echo "Setting up firewall..."
# Preserve docker dns resolution
DOCKER_DNS_RULES=$(iptables-save | grep -E "^-A.*-d 127.0.0.11/32" || true)
# Flush all rules
iptables -t nat -F
iptables -t nat -X
iptables -t mangle -F
iptables -t mangle -X
iptables -F
iptables -X
# Restore docker dns rules
if [ -n "$DOCKER_DNS_RULES" ]; then
echo "$DOCKER_DNS_RULES" | iptables-restore -n
fi
# Create ipset for allowed destinations
ipset create allowed-domains hash:net || true
ipset flush allowed-domains
# Fetch GitHub IP ranges (IPv4 only -- ipset hash:net and iptables are IPv4)
GITHUB_IPS=$(curl -s https://api.github.com/meta | jq -r '.api[]' 2>/dev/null | grep -v ':' || echo "")
for ip in $GITHUB_IPS; do
if ! ipset add allowed-domains "$ip" -exist 2>&1; then
echo "warning: failed to add GitHub IP $ip to allowlist" >&2
fi
done
# Resolve allowed domains
ALLOWED_DOMAINS=(
"registry.npmjs.org"
"api.anthropic.com"
"api-staging.anthropic.com"
"files.anthropic.com"
"sentry.io"
"update.code.visualstudio.com"
"pypi.org"
"files.pythonhosted.org"
"go.dev"
"storage.googleapis.com"
"static.rust-lang.org"
)
for domain in "${ALLOWED_DOMAINS[@]}"; do
IPS=$(getent ahosts "$domain" 2>/dev/null | awk '{print $1}' | grep -v ':' | sort -u || echo "")
for ip in $IPS; do
if ! ipset add allowed-domains "$ip/32" -exist 2>&1; then
echo "warning: failed to add $domain ($ip) to allowlist" >&2
fi
done
done
# Allow traffic to the Docker gateway so the container can reach host services
# (e.g. the Onyx stack at localhost:3000, localhost:8080, etc.)
DOCKER_GATEWAY=$(ip -4 route show default | awk '{print $3}')
if [ -n "$DOCKER_GATEWAY" ]; then
if ! ipset add allowed-domains "$DOCKER_GATEWAY/32" -exist 2>&1; then
echo "warning: failed to add Docker gateway $DOCKER_GATEWAY to allowlist" >&2
fi
fi
# Set default policies to DROP
iptables -P FORWARD DROP
iptables -P INPUT DROP
iptables -P OUTPUT DROP
# Allow established connections
iptables -A INPUT -m conntrack --ctstate ESTABLISHED,RELATED -j ACCEPT
iptables -A OUTPUT -m conntrack --ctstate ESTABLISHED,RELATED -j ACCEPT
# Allow loopback
iptables -A INPUT -i lo -j ACCEPT
iptables -A OUTPUT -o lo -j ACCEPT
# Allow DNS
iptables -A OUTPUT -p udp --dport 53 -j ACCEPT
iptables -A OUTPUT -p tcp --dport 53 -j ACCEPT
# Allow outbound to allowed destinations
iptables -A OUTPUT -m set --match-set allowed-domains dst -j ACCEPT
# Reject unauthorized outbound
iptables -A OUTPUT -j REJECT --reject-with icmp-host-unreachable
# Validate firewall configuration
echo "Validating firewall configuration..."
BLOCKED_SITES=("example.com" "google.com" "facebook.com")
for site in "${BLOCKED_SITES[@]}"; do
if timeout 2 ping -c 1 "$site" &>/dev/null; then
echo "Warning: $site is still reachable"
fi
done
if ! timeout 5 curl -s https://api.github.com/meta > /dev/null; then
echo "Warning: GitHub API is not accessible"
fi
echo "Firewall setup complete"

View File

@@ -1,10 +0,0 @@
# Devcontainer zshrc — sourced automatically for both root and dev users.
# Edit this file to customize the shell without rebuilding the image.
# Auto-activate Python venv
if [ -f /workspace/.venv/bin/activate ]; then
. /workspace/.venv/bin/activate
fi
# Source host zshrc if bind-mounted
[ -f ~/.zshrc.host ] && . ~/.zshrc.host

View File

@@ -42,9 +42,6 @@ from onyx.connectors.google_drive.file_retrieval import (
get_all_files_in_my_drive_and_shared,
)
from onyx.connectors.google_drive.file_retrieval import get_external_access_for_folder
from onyx.connectors.google_drive.file_retrieval import (
get_files_by_web_view_links_batch,
)
from onyx.connectors.google_drive.file_retrieval import get_files_in_shared_drive
from onyx.connectors.google_drive.file_retrieval import get_folder_metadata
from onyx.connectors.google_drive.file_retrieval import get_root_folder_id
@@ -73,13 +70,11 @@ from onyx.connectors.interfaces import CheckpointedConnectorWithPermSync
from onyx.connectors.interfaces import CheckpointOutput
from onyx.connectors.interfaces import GenerateSlimDocumentOutput
from onyx.connectors.interfaces import NormalizationResult
from onyx.connectors.interfaces import Resolver
from onyx.connectors.interfaces import SecondsSinceUnixEpoch
from onyx.connectors.interfaces import SlimConnectorWithPermSync
from onyx.connectors.models import ConnectorFailure
from onyx.connectors.models import ConnectorMissingCredentialError
from onyx.connectors.models import Document
from onyx.connectors.models import DocumentFailure
from onyx.connectors.models import EntityFailure
from onyx.connectors.models import HierarchyNode
from onyx.connectors.models import SlimDocument
@@ -207,9 +202,7 @@ class DriveIdStatus(Enum):
class GoogleDriveConnector(
SlimConnectorWithPermSync,
CheckpointedConnectorWithPermSync[GoogleDriveCheckpoint],
Resolver,
SlimConnectorWithPermSync, CheckpointedConnectorWithPermSync[GoogleDriveCheckpoint]
):
def __init__(
self,
@@ -1672,82 +1665,6 @@ class GoogleDriveConnector(
start, end, checkpoint, include_permissions=True
)
@override
def resolve_errors(
self,
errors: list[ConnectorFailure],
include_permissions: bool = False,
) -> Generator[Document | ConnectorFailure | HierarchyNode, None, None]:
if self._creds is None or self._primary_admin_email is None:
raise RuntimeError(
"Credentials missing, should not call this method before calling load_credentials"
)
logger.info(f"Resolving {len(errors)} errors")
doc_ids = [
failure.failed_document.document_id
for failure in errors
if failure.failed_document
]
service = get_drive_service(self.creds, self.primary_admin_email)
field_type = (
DriveFileFieldType.WITH_PERMISSIONS
if include_permissions or self.exclude_domain_link_only
else DriveFileFieldType.STANDARD
)
batch_result = get_files_by_web_view_links_batch(service, doc_ids, field_type)
for doc_id, error in batch_result.errors.items():
yield ConnectorFailure(
failed_document=DocumentFailure(
document_id=doc_id,
document_link=doc_id,
),
failure_message=f"Failed to retrieve file during error resolution: {error}",
exception=error,
)
permission_sync_context = (
PermissionSyncContext(
primary_admin_email=self.primary_admin_email,
google_domain=self.google_domain,
)
if include_permissions
else None
)
retrieved_files = [
RetrievedDriveFile(
drive_file=file,
user_email=self.primary_admin_email,
completion_stage=DriveRetrievalStage.DONE,
)
for file in batch_result.files.values()
]
yield from self._get_new_ancestors_for_files(
files=retrieved_files,
seen_hierarchy_node_raw_ids=ThreadSafeSet(),
fully_walked_hierarchy_node_raw_ids=ThreadSafeSet(),
permission_sync_context=permission_sync_context,
add_prefix=True,
)
func_with_args = [
(
self._convert_retrieved_file_to_document,
(rf, permission_sync_context),
)
for rf in retrieved_files
]
results = cast(
list[Document | ConnectorFailure | None],
run_functions_tuples_in_parallel(func_with_args, max_workers=8),
)
for result in results:
if result is not None:
yield result
def _extract_slim_docs_from_google_drive(
self,
checkpoint: GoogleDriveCheckpoint,

View File

@@ -9,7 +9,6 @@ from urllib.parse import urlparse
from googleapiclient.discovery import Resource # type: ignore
from googleapiclient.errors import HttpError # type: ignore
from googleapiclient.http import BatchHttpRequest # type: ignore
from onyx.access.models import ExternalAccess
from onyx.connectors.google_drive.constants import DRIVE_FOLDER_TYPE
@@ -61,8 +60,6 @@ SLIM_FILE_FIELDS = (
)
FOLDER_FIELDS = "nextPageToken, files(id, name, permissions, modifiedTime, webViewLink, shortcutDetails)"
MAX_BATCH_SIZE = 100
HIERARCHY_FIELDS = "id, name, parents, webViewLink, mimeType, driveId"
HIERARCHY_FIELDS_WITH_PERMISSIONS = (
@@ -219,7 +216,7 @@ def get_external_access_for_folder(
def _get_fields_for_file_type(field_type: DriveFileFieldType) -> str:
"""Get the appropriate fields string for files().list() based on the field type enum."""
"""Get the appropriate fields string based on the field type enum"""
if field_type == DriveFileFieldType.SLIM:
return SLIM_FILE_FIELDS
elif field_type == DriveFileFieldType.WITH_PERMISSIONS:
@@ -228,25 +225,6 @@ def _get_fields_for_file_type(field_type: DriveFileFieldType) -> str:
return FILE_FIELDS
def _extract_single_file_fields(list_fields: str) -> str:
"""Convert a files().list() fields string to one suitable for files().get().
List fields look like "nextPageToken, files(field1, field2, ...)"
Single-file fields should be just "field1, field2, ..."
"""
start = list_fields.find("files(")
if start == -1:
return list_fields
inner_start = start + len("files(")
inner_end = list_fields.rfind(")")
return list_fields[inner_start:inner_end]
def _get_single_file_fields(field_type: DriveFileFieldType) -> str:
"""Get the appropriate fields string for files().get() based on the field type enum."""
return _extract_single_file_fields(_get_fields_for_file_type(field_type))
def _get_files_in_parent(
service: Resource,
parent_id: str,
@@ -558,74 +536,3 @@ def get_file_by_web_view_link(
)
.execute()
)
class BatchRetrievalResult:
"""Result of a batch file retrieval, separating successes from errors."""
def __init__(self) -> None:
self.files: dict[str, GoogleDriveFileType] = {}
self.errors: dict[str, Exception] = {}
def get_files_by_web_view_links_batch(
service: GoogleDriveService,
web_view_links: list[str],
field_type: DriveFileFieldType,
) -> BatchRetrievalResult:
"""Retrieve multiple Google Drive files by webViewLink using the batch API.
Returns a BatchRetrievalResult containing successful file retrievals
and errors for any files that could not be fetched.
Automatically splits into chunks of MAX_BATCH_SIZE.
"""
fields = _get_single_file_fields(field_type)
if len(web_view_links) <= MAX_BATCH_SIZE:
return _get_files_by_web_view_links_batch(service, web_view_links, fields)
combined = BatchRetrievalResult()
for i in range(0, len(web_view_links), MAX_BATCH_SIZE):
chunk = web_view_links[i : i + MAX_BATCH_SIZE]
chunk_result = _get_files_by_web_view_links_batch(service, chunk, fields)
combined.files.update(chunk_result.files)
combined.errors.update(chunk_result.errors)
return combined
def _get_files_by_web_view_links_batch(
service: GoogleDriveService,
web_view_links: list[str],
fields: str,
) -> BatchRetrievalResult:
"""Single-batch implementation."""
result = BatchRetrievalResult()
def callback(
request_id: str,
response: GoogleDriveFileType,
exception: Exception | None,
) -> None:
if exception:
logger.warning(f"Error retrieving file {request_id}: {exception}")
result.errors[request_id] = exception
else:
result.files[request_id] = response
batch = cast(BatchHttpRequest, service.new_batch_http_request(callback=callback))
for web_view_link in web_view_links:
try:
file_id = _extract_file_id_from_web_view_link(web_view_link)
request = service.files().get(
fileId=file_id,
supportsAllDrives=True,
fields=fields,
)
batch.add(request, request_id=web_view_link)
except ValueError as e:
logger.warning(f"Failed to extract file ID from {web_view_link}: {e}")
result.errors[web_view_link] = e
batch.execute()
return result

View File

@@ -298,22 +298,6 @@ class CheckpointedConnectorWithPermSync(CheckpointedConnector[CT]):
raise NotImplementedError
class Resolver(BaseConnector):
@abc.abstractmethod
def resolve_errors(
self,
errors: list[ConnectorFailure],
include_permissions: bool = False,
) -> Generator[Document | ConnectorFailure | HierarchyNode, None, None]:
"""Attempts to yield back ALL the documents described by the errors, no checkpointing.
Caller's responsibility is to delete the old ConnectorFailures and replace with the new ones.
If include_permissions is True, the documents will have permissions synced.
May also yield HierarchyNode objects for ancestor folders of resolved documents.
"""
raise NotImplementedError
class HierarchyConnector(BaseConnector):
@abc.abstractmethod
def load_hierarchy(

View File

@@ -254,7 +254,7 @@ oauthlib==3.2.2
# via
# kubernetes
# requests-oauthlib
onyx-devtools==0.7.5
onyx-devtools==0.7.3
openai==2.14.0
# via
# litellm

View File

@@ -1,239 +0,0 @@
"""Tests for GoogleDriveConnector.resolve_errors against real Google Drive."""
import json
import os
from collections.abc import Callable
from unittest.mock import patch
from onyx.connectors.google_drive.connector import GoogleDriveConnector
from onyx.connectors.models import ConnectorFailure
from onyx.connectors.models import Document
from onyx.connectors.models import DocumentFailure
from onyx.connectors.models import HierarchyNode
from tests.daily.connectors.google_drive.consts_and_utils import ADMIN_EMAIL
from tests.daily.connectors.google_drive.consts_and_utils import (
ALL_EXPECTED_HIERARCHY_NODES,
)
from tests.daily.connectors.google_drive.consts_and_utils import FOLDER_1_ID
from tests.daily.connectors.google_drive.consts_and_utils import SHARED_DRIVE_1_ID
_DRIVE_ID_MAPPING_PATH = os.path.join(
os.path.dirname(__file__), "drive_id_mapping.json"
)
def _load_web_view_links(file_ids: list[int]) -> list[str]:
with open(_DRIVE_ID_MAPPING_PATH) as f:
mapping: dict[str, str] = json.load(f)
return [mapping[str(fid)] for fid in file_ids]
def _build_failures(web_view_links: list[str]) -> list[ConnectorFailure]:
return [
ConnectorFailure(
failed_document=DocumentFailure(
document_id=link,
document_link=link,
),
failure_message=f"Synthetic failure for {link}",
)
for link in web_view_links
]
@patch("onyx.file_processing.extract_file_text.get_unstructured_api_key")
def test_resolve_single_file(
mock_api_key: None, # noqa: ARG001
google_drive_service_acct_connector_factory: Callable[..., GoogleDriveConnector],
) -> None:
"""Resolve a single known file and verify we get back exactly one Document."""
connector = google_drive_service_acct_connector_factory(
primary_admin_email=ADMIN_EMAIL,
include_shared_drives=True,
shared_drive_urls=None,
include_my_drives=True,
my_drive_emails=None,
shared_folder_urls=None,
include_files_shared_with_me=False,
)
web_view_links = _load_web_view_links([0])
failures = _build_failures(web_view_links)
results = list(connector.resolve_errors(failures))
docs = [r for r in results if isinstance(r, Document)]
new_failures = [r for r in results if isinstance(r, ConnectorFailure)]
hierarchy_nodes = [r for r in results if isinstance(r, HierarchyNode)]
assert len(docs) == 1
assert len(new_failures) == 0
assert docs[0].semantic_identifier == "file_0.txt"
# Should yield at least one hierarchy node (the file's parent folder chain)
assert len(hierarchy_nodes) > 0
@patch("onyx.file_processing.extract_file_text.get_unstructured_api_key")
def test_resolve_multiple_files(
mock_api_key: None, # noqa: ARG001
google_drive_service_acct_connector_factory: Callable[..., GoogleDriveConnector],
) -> None:
"""Resolve multiple files across different folders via batch API."""
connector = google_drive_service_acct_connector_factory(
primary_admin_email=ADMIN_EMAIL,
include_shared_drives=True,
shared_drive_urls=None,
include_my_drives=True,
my_drive_emails=None,
shared_folder_urls=None,
include_files_shared_with_me=False,
)
# Pick files from different folders: admin files (0-4), shared drive 1 (20-24), folder_2 (45-49)
file_ids = [0, 1, 20, 21, 45]
web_view_links = _load_web_view_links(file_ids)
failures = _build_failures(web_view_links)
results = list(connector.resolve_errors(failures))
docs = [r for r in results if isinstance(r, Document)]
new_failures = [r for r in results if isinstance(r, ConnectorFailure)]
hierarchy_nodes = [r for r in results if isinstance(r, HierarchyNode)]
assert len(new_failures) == 0
retrieved_names = {doc.semantic_identifier for doc in docs}
expected_names = {f"file_{fid}.txt" for fid in file_ids}
assert expected_names == retrieved_names
# Files span multiple folders, so we should get hierarchy nodes
assert len(hierarchy_nodes) > 0
@patch("onyx.file_processing.extract_file_text.get_unstructured_api_key")
def test_resolve_hierarchy_nodes_are_valid(
mock_api_key: None, # noqa: ARG001
google_drive_service_acct_connector_factory: Callable[..., GoogleDriveConnector],
) -> None:
"""Verify that hierarchy nodes from resolve_errors match expected structure."""
connector = google_drive_service_acct_connector_factory(
primary_admin_email=ADMIN_EMAIL,
include_shared_drives=True,
shared_drive_urls=None,
include_my_drives=True,
my_drive_emails=None,
shared_folder_urls=None,
include_files_shared_with_me=False,
)
# File in folder_1 (inside shared_drive_1) — should walk up to shared_drive_1 root
web_view_links = _load_web_view_links([25])
failures = _build_failures(web_view_links)
results = list(connector.resolve_errors(failures))
hierarchy_nodes = [r for r in results if isinstance(r, HierarchyNode)]
node_ids = {node.raw_node_id for node in hierarchy_nodes}
# File 25 is in folder_1 which is inside shared_drive_1.
# The parent walk must yield at least these two ancestors.
assert (
FOLDER_1_ID in node_ids
), f"Expected folder_1 ({FOLDER_1_ID}) in hierarchy nodes, got: {node_ids}"
assert (
SHARED_DRIVE_1_ID in node_ids
), f"Expected shared_drive_1 ({SHARED_DRIVE_1_ID}) in hierarchy nodes, got: {node_ids}"
for node in hierarchy_nodes:
if node.raw_node_id not in ALL_EXPECTED_HIERARCHY_NODES:
continue
expected = ALL_EXPECTED_HIERARCHY_NODES[node.raw_node_id]
assert node.display_name == expected.display_name, (
f"Display name mismatch for {node.raw_node_id}: "
f"expected '{expected.display_name}', got '{node.display_name}'"
)
assert node.node_type == expected.node_type, (
f"Node type mismatch for {node.raw_node_id}: "
f"expected '{expected.node_type}', got '{node.node_type}'"
)
@patch("onyx.file_processing.extract_file_text.get_unstructured_api_key")
def test_resolve_with_invalid_link(
mock_api_key: None, # noqa: ARG001
google_drive_service_acct_connector_factory: Callable[..., GoogleDriveConnector],
) -> None:
"""Resolve with a mix of valid and invalid links — invalid ones yield ConnectorFailure."""
connector = google_drive_service_acct_connector_factory(
primary_admin_email=ADMIN_EMAIL,
include_shared_drives=True,
shared_drive_urls=None,
include_my_drives=True,
my_drive_emails=None,
shared_folder_urls=None,
include_files_shared_with_me=False,
)
valid_links = _load_web_view_links([0])
invalid_link = "https://drive.google.com/file/d/NONEXISTENT_FILE_ID_12345"
failures = _build_failures(valid_links + [invalid_link])
results = list(connector.resolve_errors(failures))
docs = [r for r in results if isinstance(r, Document)]
new_failures = [r for r in results if isinstance(r, ConnectorFailure)]
assert len(docs) == 1
assert docs[0].semantic_identifier == "file_0.txt"
assert len(new_failures) == 1
assert new_failures[0].failed_document is not None
assert new_failures[0].failed_document.document_id == invalid_link
@patch("onyx.file_processing.extract_file_text.get_unstructured_api_key")
def test_resolve_empty_errors(
mock_api_key: None, # noqa: ARG001
google_drive_service_acct_connector_factory: Callable[..., GoogleDriveConnector],
) -> None:
"""Resolving an empty error list should yield nothing."""
connector = google_drive_service_acct_connector_factory(
primary_admin_email=ADMIN_EMAIL,
include_shared_drives=True,
shared_drive_urls=None,
include_my_drives=True,
my_drive_emails=None,
shared_folder_urls=None,
include_files_shared_with_me=False,
)
results = list(connector.resolve_errors([]))
assert len(results) == 0
@patch("onyx.file_processing.extract_file_text.get_unstructured_api_key")
def test_resolve_entity_failures_are_skipped(
mock_api_key: None, # noqa: ARG001
google_drive_service_acct_connector_factory: Callable[..., GoogleDriveConnector],
) -> None:
"""Entity failures (not document failures) should be skipped by resolve_errors."""
from onyx.connectors.models import EntityFailure
connector = google_drive_service_acct_connector_factory(
primary_admin_email=ADMIN_EMAIL,
include_shared_drives=True,
shared_drive_urls=None,
include_my_drives=True,
my_drive_emails=None,
shared_folder_urls=None,
include_files_shared_with_me=False,
)
entity_failure = ConnectorFailure(
failed_entity=EntityFailure(entity_id="some_stage"),
failure_message="retrieval failure",
)
results = list(connector.resolve_errors([entity_failure]))
assert len(results) == 0

View File

@@ -1,769 +0,0 @@
"""Unit tests for Chunker._chunk_document_with_sections.
These tests use a fake character-level tokenizer so every char counts as
exactly one token. This makes token-limit arithmetic deterministic and lets
us exercise every branch of the method without pulling real embedding
models into the test.
"""
import pytest
from onyx.configs.constants import DocumentSource
from onyx.configs.constants import SECTION_SEPARATOR
from onyx.connectors.models import IndexingDocument
from onyx.connectors.models import Section
from onyx.indexing import chunker as chunker_module
from onyx.indexing.chunker import Chunker
from onyx.natural_language_processing.utils import BaseTokenizer
class CharTokenizer(BaseTokenizer):
"""1 character == 1 token. Deterministic & trivial to reason about."""
def encode(self, string: str) -> list[int]:
return [ord(c) for c in string]
def tokenize(self, string: str) -> list[str]:
return list(string)
def decode(self, tokens: list[int]) -> str:
return "".join(chr(t) for t in tokens)
# With a char-level tokenizer, each char is a token. 200 is comfortably
# above BLURB_SIZE (128) so the blurb splitter won't get weird on small text.
CHUNK_LIMIT = 200
def _make_chunker(
chunk_token_limit: int = CHUNK_LIMIT,
enable_multipass: bool = False,
) -> Chunker:
return Chunker(
tokenizer=CharTokenizer(),
enable_multipass=enable_multipass,
enable_large_chunks=False,
enable_contextual_rag=False,
chunk_token_limit=chunk_token_limit,
)
def _make_doc(
sections: list[Section],
title: str | None = "Test Doc",
doc_id: str = "doc1",
) -> IndexingDocument:
return IndexingDocument(
id=doc_id,
source=DocumentSource.WEB,
semantic_identifier=doc_id,
title=title,
metadata={},
sections=[], # real sections unused — method reads processed_sections
processed_sections=sections,
)
# --- Empty / degenerate input -------------------------------------------------
def test_empty_processed_sections_returns_single_empty_safety_chunk() -> None:
"""No sections at all should still yield one empty chunk (the
`or not chunks` safety branch at the end)."""
chunker = _make_chunker()
doc = _make_doc(sections=[])
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=[],
title_prefix="TITLE\n",
metadata_suffix_semantic="meta_sem",
metadata_suffix_keyword="meta_kw",
content_token_limit=CHUNK_LIMIT,
)
assert len(chunks) == 1
assert chunks[0].content == ""
assert chunks[0].chunk_id == 0
assert chunks[0].title_prefix == "TITLE\n"
assert chunks[0].metadata_suffix_semantic == "meta_sem"
assert chunks[0].metadata_suffix_keyword == "meta_kw"
# safe default link offsets
assert chunks[0].source_links == {0: ""}
def test_empty_section_on_first_position_without_title_is_skipped() -> None:
"""Doc has no title, first section has empty text — the guard
`(not document.title or section_idx > 0)` means it IS skipped."""
chunker = _make_chunker()
doc = _make_doc(
sections=[Section(text="", link="l0")],
title=None,
)
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="",
metadata_suffix_semantic="",
metadata_suffix_keyword="",
content_token_limit=CHUNK_LIMIT,
)
# skipped → no real content, but safety branch still yields 1 empty chunk
assert len(chunks) == 1
assert chunks[0].content == ""
def test_empty_section_on_later_position_is_skipped_even_with_title() -> None:
"""Index > 0 empty sections are skipped regardless of title."""
chunker = _make_chunker()
doc = _make_doc(
sections=[
Section(text="Alpha.", link="l0"),
Section(text="", link="l1"), # should be skipped
Section(text="Beta.", link="l2"),
],
)
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="",
metadata_suffix_semantic="",
metadata_suffix_keyword="",
content_token_limit=CHUNK_LIMIT,
)
assert len(chunks) == 1
assert "Alpha." in chunks[0].content
assert "Beta." in chunks[0].content
# link offsets should only contain l0 and l2 (no l1)
assert "l1" not in (chunks[0].source_links or {}).values()
# --- Single text section ------------------------------------------------------
def test_single_small_text_section_becomes_one_chunk() -> None:
chunker = _make_chunker()
doc = _make_doc(sections=[Section(text="Hello world.", link="https://a")])
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="TITLE\n",
metadata_suffix_semantic="ms",
metadata_suffix_keyword="mk",
content_token_limit=CHUNK_LIMIT,
)
assert len(chunks) == 1
chunk = chunks[0]
assert chunk.content == "Hello world."
assert chunk.source_links == {0: "https://a"}
assert chunk.title_prefix == "TITLE\n"
assert chunk.metadata_suffix_semantic == "ms"
assert chunk.metadata_suffix_keyword == "mk"
assert chunk.section_continuation is False
assert chunk.image_file_id is None
# --- Multiple text sections combined -----------------------------------------
def test_multiple_small_sections_combine_into_one_chunk() -> None:
chunker = _make_chunker()
sections = [
Section(text="Part one.", link="l1"),
Section(text="Part two.", link="l2"),
Section(text="Part three.", link="l3"),
]
doc = _make_doc(sections=sections)
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="",
metadata_suffix_semantic="",
metadata_suffix_keyword="",
content_token_limit=CHUNK_LIMIT,
)
assert len(chunks) == 1
expected = SECTION_SEPARATOR.join(["Part one.", "Part two.", "Part three."])
assert chunks[0].content == expected
# link_offsets: indexed by shared_precompare_cleanup length of the
# chunk_text *before* each section was appended.
# "" -> "", len 0
# "Part one." -> "partone", len 7
# "Part one.\n\nPart two." -> "partoneparttwo", len 14
assert chunks[0].source_links == {0: "l1", 7: "l2", 14: "l3"}
def test_sections_overflow_into_second_chunk() -> None:
"""Two sections that together exceed content_token_limit should
finalize the first as one chunk and start a new one."""
chunker = _make_chunker()
# char-level: 120 char section → 120 tokens. 2 of these plus separator
# exceed a 200-token limit, forcing a flush.
a = "A" * 120
b = "B" * 120
doc = _make_doc(
sections=[
Section(text=a, link="la"),
Section(text=b, link="lb"),
],
)
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="",
metadata_suffix_semantic="",
metadata_suffix_keyword="",
content_token_limit=CHUNK_LIMIT,
)
assert len(chunks) == 2
assert chunks[0].content == a
assert chunks[1].content == b
# first chunk is not a continuation; second starts a new section → not either
assert chunks[0].section_continuation is False
assert chunks[1].section_continuation is False
# chunk_ids should be sequential starting at 0
assert chunks[0].chunk_id == 0
assert chunks[1].chunk_id == 1
# links routed appropriately
assert chunks[0].source_links == {0: "la"}
assert chunks[1].source_links == {0: "lb"}
# --- Image section handling --------------------------------------------------
def test_image_only_section_produces_single_chunk_with_image_id() -> None:
chunker = _make_chunker()
doc = _make_doc(
sections=[
Section(
text="summary of image",
link="https://img",
image_file_id="img-abc",
)
],
)
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="",
metadata_suffix_semantic="",
metadata_suffix_keyword="",
content_token_limit=CHUNK_LIMIT,
)
assert len(chunks) == 1
assert chunks[0].image_file_id == "img-abc"
assert chunks[0].content == "summary of image"
assert chunks[0].source_links == {0: "https://img"}
def test_image_section_flushes_pending_text_and_creates_its_own_chunk() -> None:
"""A buffered text section followed by an image section:
the pending text should be flushed first, then the image chunk."""
chunker = _make_chunker()
doc = _make_doc(
sections=[
Section(text="Pending text.", link="ltext"),
Section(
text="image summary",
link="limage",
image_file_id="img-1",
),
Section(text="Trailing text.", link="ltail"),
],
)
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="",
metadata_suffix_semantic="",
metadata_suffix_keyword="",
content_token_limit=CHUNK_LIMIT,
)
assert len(chunks) == 3
# 0: flushed pending text
assert chunks[0].content == "Pending text."
assert chunks[0].image_file_id is None
assert chunks[0].source_links == {0: "ltext"}
# 1: image chunk
assert chunks[1].content == "image summary"
assert chunks[1].image_file_id == "img-1"
assert chunks[1].source_links == {0: "limage"}
# 2: trailing text, started fresh after image
assert chunks[2].content == "Trailing text."
assert chunks[2].image_file_id is None
assert chunks[2].source_links == {0: "ltail"}
def test_image_section_without_link_gets_empty_links_dict() -> None:
"""If an image section has no link, links param is {} (not {0: ""})."""
chunker = _make_chunker()
doc = _make_doc(
sections=[
Section(text="img", link=None, image_file_id="img-xyz"),
],
)
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="",
metadata_suffix_semantic="",
metadata_suffix_keyword="",
content_token_limit=CHUNK_LIMIT,
)
assert len(chunks) == 1
assert chunks[0].image_file_id == "img-xyz"
# _create_chunk falls back to {0: ""} when given an empty dict
assert chunks[0].source_links == {0: ""}
# --- Oversized section splitting ---------------------------------------------
def test_oversized_section_is_split_across_multiple_chunks() -> None:
"""A section whose text exceeds content_token_limit should be passed
through chunk_splitter and yield >1 chunks; only the first is not a
continuation."""
chunker = _make_chunker()
# Build a section whose char-count is well over CHUNK_LIMIT (200), made
# of many short sentences so chonkie's SentenceChunker can split cleanly.
section_text = (
"Alpha beta gamma. Delta epsilon zeta. Eta theta iota. "
"Kappa lambda mu. Nu xi omicron. Pi rho sigma. Tau upsilon phi. "
"Chi psi omega. One two three. Four five six. Seven eight nine. "
"Ten eleven twelve. Thirteen fourteen fifteen. "
"Sixteen seventeen eighteen. Nineteen twenty."
)
assert len(section_text) > CHUNK_LIMIT
doc = _make_doc(
sections=[Section(text=section_text, link="big-link")],
)
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="",
metadata_suffix_semantic="",
metadata_suffix_keyword="",
content_token_limit=CHUNK_LIMIT,
)
assert len(chunks) >= 2
# First chunk is fresh, rest are continuations
assert chunks[0].section_continuation is False
for c in chunks[1:]:
assert c.section_continuation is True
# Every produced chunk should carry the section's link
for c in chunks:
assert c.source_links == {0: "big-link"}
# Concatenated content should roughly cover the original (allowing
# for chunker boundary whitespace differences).
joined = "".join(c.content for c in chunks)
for word in ("Alpha", "omega", "twenty"):
assert word in joined
def test_oversized_section_flushes_pending_text_first() -> None:
"""A buffered text section followed by an oversized section should
flush the pending chunk first, then emit the split chunks."""
chunker = _make_chunker()
pending = "Pending buffered text."
big = (
"Alpha beta gamma. Delta epsilon zeta. Eta theta iota. "
"Kappa lambda mu. Nu xi omicron. Pi rho sigma. Tau upsilon phi. "
"Chi psi omega. One two three. Four five six. Seven eight nine. "
"Ten eleven twelve. Thirteen fourteen fifteen. Sixteen seventeen."
)
assert len(big) > CHUNK_LIMIT
doc = _make_doc(
sections=[
Section(text=pending, link="l-pending"),
Section(text=big, link="l-big"),
],
)
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="",
metadata_suffix_semantic="",
metadata_suffix_keyword="",
content_token_limit=CHUNK_LIMIT,
)
# First chunk is the flushed pending text
assert chunks[0].content == pending
assert chunks[0].source_links == {0: "l-pending"}
assert chunks[0].section_continuation is False
# Remaining chunks correspond to the oversized section
assert len(chunks) >= 2
for c in chunks[1:]:
assert c.source_links == {0: "l-big"}
# Within the oversized section, the first is fresh and the rest are
# continuations.
assert chunks[1].section_continuation is False
for c in chunks[2:]:
assert c.section_continuation is True
# --- Title prefix / metadata propagation -------------------------------------
def test_title_prefix_and_metadata_propagate_to_all_chunks() -> None:
chunker = _make_chunker()
doc = _make_doc(
sections=[
Section(text="A" * 120, link="la"),
Section(text="B" * 120, link="lb"),
],
)
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="MY_TITLE\n",
metadata_suffix_semantic="MS",
metadata_suffix_keyword="MK",
content_token_limit=CHUNK_LIMIT,
)
assert len(chunks) == 2
for chunk in chunks:
assert chunk.title_prefix == "MY_TITLE\n"
assert chunk.metadata_suffix_semantic == "MS"
assert chunk.metadata_suffix_keyword == "MK"
# --- chunk_id monotonicity ---------------------------------------------------
def test_chunk_ids_are_sequential_starting_at_zero() -> None:
chunker = _make_chunker()
doc = _make_doc(
sections=[
Section(text="A" * 120, link="la"),
Section(text="B" * 120, link="lb"),
Section(text="C" * 120, link="lc"),
],
)
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="",
metadata_suffix_semantic="",
metadata_suffix_keyword="",
content_token_limit=CHUNK_LIMIT,
)
assert [c.chunk_id for c in chunks] == list(range(len(chunks)))
# --- Overflow accumulation behavior ------------------------------------------
def test_overflow_flush_then_subsequent_section_joins_new_chunk() -> None:
"""After an overflow flush starts a new chunk, the next fitting section
should combine into that same new chunk (not spawn a third)."""
chunker = _make_chunker()
# 120 + 120 > 200 → first two sections produce two chunks.
# Third section is small (20 chars) → should fit with second.
doc = _make_doc(
sections=[
Section(text="A" * 120, link="la"),
Section(text="B" * 120, link="lb"),
Section(text="C" * 20, link="lc"),
],
)
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="",
metadata_suffix_semantic="",
metadata_suffix_keyword="",
content_token_limit=CHUNK_LIMIT,
)
assert len(chunks) == 2
assert chunks[0].content == "A" * 120
assert chunks[1].content == ("B" * 120) + SECTION_SEPARATOR + ("C" * 20)
# link_offsets on second chunk: lb at 0, lc at precompare-len("BBBB...")=120
assert chunks[1].source_links == {0: "lb", 120: "lc"}
def test_small_section_after_oversized_starts_a_fresh_chunk() -> None:
"""After an oversized section is emitted as its own chunks, the internal
accumulator should be empty so a following small section starts a new
chunk instead of being swallowed."""
chunker = _make_chunker()
big = (
"Alpha beta gamma. Delta epsilon zeta. Eta theta iota. "
"Kappa lambda mu. Nu xi omicron. Pi rho sigma. Tau upsilon phi. "
"Chi psi omega. One two three. Four five six. Seven eight nine. "
"Ten eleven twelve. Thirteen fourteen fifteen. Sixteen seventeen."
)
assert len(big) > CHUNK_LIMIT
doc = _make_doc(
sections=[
Section(text=big, link="l-big"),
Section(text="Tail text.", link="l-tail"),
],
)
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="",
metadata_suffix_semantic="",
metadata_suffix_keyword="",
content_token_limit=CHUNK_LIMIT,
)
# All-but-last chunks belong to the oversized section; the very last is
# the tail text starting fresh (not a continuation).
assert len(chunks) >= 2
assert chunks[-1].content == "Tail text."
assert chunks[-1].source_links == {0: "l-tail"}
assert chunks[-1].section_continuation is False
# And earlier oversized chunks never leaked the tail link
for c in chunks[:-1]:
assert c.source_links == {0: "l-big"}
# --- STRICT_CHUNK_TOKEN_LIMIT fallback path ----------------------------------
def test_strict_chunk_token_limit_subdivides_oversized_split(
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""When STRICT_CHUNK_TOKEN_LIMIT is enabled and chonkie's chunk_splitter
still produces a piece larger than content_token_limit (e.g. a single
no-period run), the code must fall back to _split_oversized_chunk."""
monkeypatch.setattr(chunker_module, "STRICT_CHUNK_TOKEN_LIMIT", True)
chunker = _make_chunker()
# 500 non-whitespace chars with no sentence boundaries — chonkie will
# return it as one oversized piece (>200) which triggers the fallback.
run = "a" * 500
doc = _make_doc(sections=[Section(text=run, link="l-run")])
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="",
metadata_suffix_semantic="",
metadata_suffix_keyword="",
content_token_limit=CHUNK_LIMIT,
)
# With CHUNK_LIMIT=200 and a 500-char run we expect ceil(500/200)=3 sub-chunks.
assert len(chunks) == 3
# First is fresh, rest are continuations (is_continuation=(j != 0))
assert chunks[0].section_continuation is False
assert chunks[1].section_continuation is True
assert chunks[2].section_continuation is True
# All carry the section link
for c in chunks:
assert c.source_links == {0: "l-run"}
# NOTE: we do NOT assert the chunks are at or below content_token_limit.
# _split_oversized_chunk joins tokens with " ", which means the resulting
# chunk contents can exceed the limit when tokens are short. That's a
# quirk of the current implementation and this test pins the window
# slicing, not the post-join length.
def test_strict_chunk_token_limit_disabled_allows_oversized_split(
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Same pathological input, but with STRICT disabled: the oversized
split is emitted verbatim as a single chunk (current behavior)."""
monkeypatch.setattr(chunker_module, "STRICT_CHUNK_TOKEN_LIMIT", False)
chunker = _make_chunker()
run = "a" * 500
doc = _make_doc(sections=[Section(text=run, link="l-run")])
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="",
metadata_suffix_semantic="",
metadata_suffix_keyword="",
content_token_limit=CHUNK_LIMIT,
)
assert len(chunks) == 1
assert chunks[0].content == run
assert chunks[0].section_continuation is False
# --- First-section-with-empty-text-but-document-has-title edge case ----------
def test_first_empty_section_with_title_is_processed_not_skipped() -> None:
"""The guard `(not document.title or section_idx > 0)` means: when
the doc has a title AND it's the first section, an empty text section
is NOT skipped. This pins current behavior so a refactor can't silently
change it."""
chunker = _make_chunker()
doc = _make_doc(
sections=[
Section(text="", link="l0"), # empty first section, kept
Section(text="Real content.", link="l1"),
],
title="Has A Title",
)
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="",
metadata_suffix_semantic="",
metadata_suffix_keyword="",
content_token_limit=CHUNK_LIMIT,
)
assert len(chunks) == 1
assert chunks[0].content == "Real content."
# First (empty) section did register a link_offset at 0 before being
# overwritten; that offset is then reused when "Real content." is added,
# because shared_precompare_cleanup("") is still "". End state: {0: "l1"}
assert chunks[0].source_links == {0: "l1"}
# --- clean_text is applied to section text -----------------------------------
def test_clean_text_strips_control_chars_from_section_content() -> None:
"""clean_text() should remove control chars before the text enters the
accumulator — verifies the call isn't dropped by a refactor."""
chunker = _make_chunker()
# NUL + BEL are control chars below 0x20 and not \n or \t → should be
# stripped by clean_text.
dirty = "Hello\x00 World\x07!"
doc = _make_doc(sections=[Section(text=dirty, link="l1")])
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="",
metadata_suffix_semantic="",
metadata_suffix_keyword="",
content_token_limit=CHUNK_LIMIT,
)
assert len(chunks) == 1
assert chunks[0].content == "Hello World!"
# --- None-valued fields ------------------------------------------------------
def test_section_with_none_text_behaves_like_empty_string() -> None:
"""`section.text` may be None — the method coerces via
`str(section.text or "")`, so a None-text section behaves identically
to an empty one (skipped unless it's the first section of a titled doc)."""
chunker = _make_chunker()
doc = _make_doc(
sections=[
Section(text="Alpha.", link="la"),
Section(text=None, link="lnone"), # idx 1 → skipped
Section(text="Beta.", link="lb"),
],
)
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="",
metadata_suffix_semantic="",
metadata_suffix_keyword="",
content_token_limit=CHUNK_LIMIT,
)
assert len(chunks) == 1
assert "Alpha." in chunks[0].content
assert "Beta." in chunks[0].content
assert "lnone" not in (chunks[0].source_links or {}).values()
# --- Trailing empty chunk suppression ----------------------------------------
def test_no_trailing_empty_chunk_when_last_section_was_image() -> None:
"""If the final section was an image (which emits its own chunk and
resets chunk_text), the safety `or not chunks` branch should NOT fire
because chunks is non-empty. Pin this explicitly."""
chunker = _make_chunker()
doc = _make_doc(
sections=[
Section(text="Leading text.", link="ltext"),
Section(
text="img summary", link="limg", image_file_id="img-final"
),
],
)
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="",
metadata_suffix_semantic="",
metadata_suffix_keyword="",
content_token_limit=CHUNK_LIMIT,
)
assert len(chunks) == 2
assert chunks[0].content == "Leading text."
assert chunks[0].image_file_id is None
assert chunks[1].content == "img summary"
assert chunks[1].image_file_id == "img-final"
# Crucially: no third empty chunk got appended at the end.
def test_no_trailing_empty_chunk_when_last_section_was_oversized() -> None:
"""Same guarantee for oversized sections: their splits fully clear the
accumulator, and the trailing safety branch should be a no-op."""
chunker = _make_chunker()
big = (
"Alpha beta gamma. Delta epsilon zeta. Eta theta iota. "
"Kappa lambda mu. Nu xi omicron. Pi rho sigma. Tau upsilon phi. "
"Chi psi omega. One two three. Four five six. Seven eight nine. "
"Ten eleven twelve. Thirteen fourteen fifteen. Sixteen seventeen."
)
assert len(big) > CHUNK_LIMIT
doc = _make_doc(sections=[Section(text=big, link="l-big")])
chunks = chunker._chunk_document_with_sections(
document=doc,
sections=doc.processed_sections,
title_prefix="",
metadata_suffix_semantic="",
metadata_suffix_keyword="",
content_token_limit=CHUNK_LIMIT,
)
# Every chunk should be non-empty — no dangling "" chunk at the tail.
assert all(c.content.strip() for c in chunks)

View File

@@ -1,17 +1,3 @@
# OAuth callback page must be served by the web server (Next.js),
# not the MCP server. Exact match takes priority over the regex below.
location = /mcp/oauth/callback {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header X-Forwarded-Port $server_port;
proxy_set_header Host $host;
proxy_http_version 1.1;
proxy_redirect off;
proxy_pass http://web_server;
}
# MCP Server - Model Context Protocol for LLM integrations
# Match /mcp, /mcp/, or /mcp/* but NOT /mcpserver, /mcpapi, etc.
location ~ ^/mcp(/.*)?$ {

View File

@@ -5,7 +5,7 @@ home: https://www.onyx.app/
sources:
- "https://github.com/onyx-dot-app/onyx"
type: application
version: 0.4.44
version: 0.4.40
appVersion: latest
annotations:
category: Productivity

View File

@@ -1,349 +0,0 @@
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": { "type": "grafana", "uid": "-- Grafana --" },
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 1,
"id": null,
"links": [],
"liveNow": true,
"panels": [
{
"title": "Client-Side Search Latency (P50 / P95 / P99)",
"description": "End-to-end latency as measured by the Python client, including network round-trip and serialization overhead.",
"type": "timeseries",
"gridPos": { "h": 10, "w": 12, "x": 0, "y": 0 },
"id": 1,
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
"fieldConfig": {
"defaults": {
"color": { "mode": "palette-classic" },
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisLabel": "seconds",
"axisPlacement": "auto",
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"lineInterpolation": "smooth",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": { "type": "linear" },
"showPoints": "never",
"spanNulls": false,
"stacking": { "group": "A", "mode": "none" },
"thresholdsStyle": { "mode": "dashed" }
},
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null },
{ "color": "yellow", "value": 0.5 },
{ "color": "red", "value": 2.0 }
]
},
"unit": "s",
"min": 0
},
"overrides": []
},
"targets": [
{
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
"expr": "histogram_quantile(0.5, sum by (le) (rate(onyx_opensearch_search_client_duration_seconds_bucket[5m])))",
"legendFormat": "P50",
"refId": "A"
},
{
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
"expr": "histogram_quantile(0.95, sum by (le) (rate(onyx_opensearch_search_client_duration_seconds_bucket[5m])))",
"legendFormat": "P95",
"refId": "B"
},
{
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
"expr": "histogram_quantile(0.99, sum by (le) (rate(onyx_opensearch_search_client_duration_seconds_bucket[5m])))",
"legendFormat": "P99",
"refId": "C"
}
]
},
{
"title": "Server-Side Search Latency (P50 / P95 / P99)",
"description": "OpenSearch server-side execution time from the 'took' field in the response. Does not include network or client-side overhead.",
"type": "timeseries",
"gridPos": { "h": 10, "w": 12, "x": 12, "y": 0 },
"id": 2,
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
"fieldConfig": {
"defaults": {
"color": { "mode": "palette-classic" },
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisLabel": "seconds",
"axisPlacement": "auto",
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"lineInterpolation": "smooth",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": { "type": "linear" },
"showPoints": "never",
"spanNulls": false,
"stacking": { "group": "A", "mode": "none" },
"thresholdsStyle": { "mode": "dashed" }
},
"thresholds": {
"mode": "absolute",
"steps": [
{ "color": "green", "value": null },
{ "color": "yellow", "value": 0.5 },
{ "color": "red", "value": 2.0 }
]
},
"unit": "s",
"min": 0
},
"overrides": []
},
"targets": [
{
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
"expr": "histogram_quantile(0.5, sum by (le) (rate(onyx_opensearch_search_server_duration_seconds_bucket[5m])))",
"legendFormat": "P50",
"refId": "A"
},
{
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
"expr": "histogram_quantile(0.95, sum by (le) (rate(onyx_opensearch_search_server_duration_seconds_bucket[5m])))",
"legendFormat": "P95",
"refId": "B"
},
{
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
"expr": "histogram_quantile(0.99, sum by (le) (rate(onyx_opensearch_search_server_duration_seconds_bucket[5m])))",
"legendFormat": "P99",
"refId": "C"
}
]
},
{
"title": "Client-Side Latency by Search Type (P95)",
"description": "P95 client-side latency broken down by search type (hybrid, keyword, semantic, random, doc_id_retrieval).",
"type": "timeseries",
"gridPos": { "h": 10, "w": 12, "x": 0, "y": 10 },
"id": 3,
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
"fieldConfig": {
"defaults": {
"color": { "mode": "palette-classic" },
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisLabel": "seconds",
"axisPlacement": "auto",
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"lineInterpolation": "smooth",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": { "type": "linear" },
"showPoints": "never",
"spanNulls": false,
"stacking": { "group": "A", "mode": "none" },
"thresholdsStyle": { "mode": "off" }
},
"unit": "s",
"min": 0
},
"overrides": []
},
"targets": [
{
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
"expr": "histogram_quantile(0.95, sum by (search_type, le) (rate(onyx_opensearch_search_client_duration_seconds_bucket[5m])))",
"legendFormat": "{{ search_type }}",
"refId": "A"
}
]
},
{
"title": "Search Throughput by Type",
"description": "Searches per second broken down by search type.",
"type": "timeseries",
"gridPos": { "h": 10, "w": 12, "x": 12, "y": 10 },
"id": 4,
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
"fieldConfig": {
"defaults": {
"color": { "mode": "palette-classic" },
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisLabel": "searches/s",
"axisPlacement": "auto",
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"lineInterpolation": "smooth",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": { "type": "linear" },
"showPoints": "never",
"spanNulls": false,
"stacking": { "group": "A", "mode": "normal" },
"thresholdsStyle": { "mode": "off" }
},
"unit": "ops",
"min": 0
},
"overrides": []
},
"targets": [
{
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
"expr": "sum by (search_type) (rate(onyx_opensearch_search_total[5m]))",
"legendFormat": "{{ search_type }}",
"refId": "A"
}
]
},
{
"title": "Concurrent Searches In Progress",
"description": "Number of OpenSearch searches currently in flight, broken down by search type. Summed across all instances.",
"type": "timeseries",
"gridPos": { "h": 10, "w": 12, "x": 0, "y": 20 },
"id": 5,
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
"fieldConfig": {
"defaults": {
"color": { "mode": "palette-classic" },
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisLabel": "searches",
"axisPlacement": "auto",
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"lineInterpolation": "smooth",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": { "type": "linear" },
"showPoints": "never",
"spanNulls": false,
"stacking": { "group": "A", "mode": "normal" },
"thresholdsStyle": { "mode": "off" }
},
"min": 0
},
"overrides": []
},
"targets": [
{
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
"expr": "sum by (search_type) (onyx_opensearch_searches_in_progress)",
"legendFormat": "{{ search_type }}",
"refId": "A"
}
]
},
{
"title": "Client vs Server Latency Overhead (P50)",
"description": "Difference between client-side and server-side P50 latency. Reveals network, serialization, and untracked OpenSearch overhead.",
"type": "timeseries",
"gridPos": { "h": 10, "w": 12, "x": 12, "y": 20 },
"id": 6,
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
"fieldConfig": {
"defaults": {
"color": { "mode": "palette-classic" },
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisLabel": "seconds",
"axisPlacement": "auto",
"drawStyle": "line",
"fillOpacity": 0,
"gradientMode": "none",
"lineInterpolation": "smooth",
"lineWidth": 2,
"pointSize": 5,
"scaleDistribution": { "type": "linear" },
"showPoints": "never",
"spanNulls": false,
"stacking": { "group": "A", "mode": "none" },
"thresholdsStyle": { "mode": "off" }
},
"unit": "s",
"min": 0
},
"overrides": []
},
"targets": [
{
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
"expr": "histogram_quantile(0.5, sum by (le) (rate(onyx_opensearch_search_client_duration_seconds_bucket[5m]))) - histogram_quantile(0.5, sum by (le) (rate(onyx_opensearch_search_server_duration_seconds_bucket[5m])))",
"legendFormat": "Client - Server overhead (P50)",
"refId": "A"
},
{
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
"expr": "histogram_quantile(0.5, sum by (le) (rate(onyx_opensearch_search_client_duration_seconds_bucket[5m])))",
"legendFormat": "Client P50",
"refId": "B"
},
{
"datasource": { "type": "prometheus", "uid": "${DS_PROMETHEUS}" },
"expr": "histogram_quantile(0.5, sum by (le) (rate(onyx_opensearch_search_server_duration_seconds_bucket[5m])))",
"legendFormat": "Server P50",
"refId": "C"
}
]
}
],
"refresh": "5s",
"schemaVersion": 37,
"style": "dark",
"tags": ["onyx", "opensearch", "search", "latency"],
"templating": {
"list": [
{
"current": {
"text": "Prometheus",
"value": "prometheus"
},
"includeAll": false,
"name": "DS_PROMETHEUS",
"options": [],
"query": "prometheus",
"refresh": 1,
"type": "datasource"
}
]
},
"time": { "from": "now-60m", "to": "now" },
"timepicker": {
"refresh_intervals": ["5s", "10s", "30s", "1m"]
},
"timezone": "",
"title": "Onyx OpenSearch Search Latency",
"uid": "onyx-opensearch-search-latency",
"version": 0,
"weekStart": ""
}

View File

@@ -1,606 +0,0 @@
{
"id": null,
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "grafana",
"uid": "-- Grafana --"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"type": "dashboard"
}
]
},
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"links": [],
"panels": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 18,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"insertNulls": false,
"lineInterpolation": "linear",
"lineWidth": 2,
"pointSize": 4,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "orange",
"value": 10
},
{
"color": "red",
"value": 50
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 10,
"w": 24,
"x": 0,
"y": 0
},
"id": 1,
"options": {
"legend": {
"calcs": ["lastNotNull", "max"],
"displayMode": "table",
"placement": "right",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "desc"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "onyx_celery_queue_depth{queue=~\"$queue\"}",
"legendFormat": "{{queue}}",
"range": true,
"refId": "A"
}
],
"title": "Queue Depth by Queue",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "orange",
"value": 20
},
{
"color": "red",
"value": 100
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 4,
"w": 6,
"x": 0,
"y": 10
},
"id": 2,
"options": {
"colorMode": "background",
"graphMode": "area",
"justifyMode": "auto",
"orientation": "auto",
"reduceOptions": {
"calcs": ["lastNotNull"],
"fields": "",
"values": false
},
"textMode": "auto"
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "sum(onyx_celery_queue_depth)",
"refId": "A"
}
],
"title": "Total Queued Tasks",
"type": "stat"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "orange",
"value": 20
},
{
"color": "red",
"value": 100
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 4,
"w": 6,
"x": 6,
"y": 10
},
"id": 3,
"options": {
"colorMode": "background",
"graphMode": "area",
"justifyMode": "auto",
"orientation": "auto",
"reduceOptions": {
"calcs": ["lastNotNull"],
"fields": "",
"values": false
},
"textMode": "auto"
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "onyx_celery_unacked_tasks",
"refId": "A"
}
],
"title": "Unacked Tasks",
"type": "stat"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "orange",
"value": 10
},
{
"color": "red",
"value": 50
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 4,
"w": 6,
"x": 12,
"y": 10
},
"id": 4,
"options": {
"colorMode": "background",
"graphMode": "none",
"justifyMode": "center",
"orientation": "auto",
"reduceOptions": {
"calcs": ["lastNotNull"],
"fields": "",
"values": false
},
"textMode": "auto"
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "onyx_celery_queue_depth{queue=\"docprocessing\"}",
"refId": "A"
}
],
"title": "Docprocessing Queue",
"type": "stat"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "orange",
"value": 10
},
{
"color": "red",
"value": 50
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 4,
"w": 6,
"x": 18,
"y": 10
},
"id": 5,
"options": {
"colorMode": "background",
"graphMode": "none",
"justifyMode": "center",
"orientation": "auto",
"reduceOptions": {
"calcs": ["lastNotNull"],
"fields": "",
"values": false
},
"textMode": "auto"
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "onyx_celery_queue_depth{queue=\"connector_doc_fetching\"}",
"refId": "A"
}
],
"title": "Docfetching Queue",
"type": "stat"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisBorderShow": false,
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "bars",
"fillOpacity": 80,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineWidth": 1,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"stacking": {
"group": "A",
"mode": "none"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "orange",
"value": 10
},
{
"color": "red",
"value": 50
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 10,
"w": 12,
"x": 0,
"y": 14
},
"id": 6,
"options": {
"legend": {
"calcs": ["lastNotNull"],
"displayMode": "list",
"placement": "bottom",
"showLegend": false
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "topk(10, onyx_celery_queue_depth)",
"legendFormat": "{{queue}}",
"range": true,
"refId": "A"
}
],
"title": "Top 10 Queue Backlogs",
"type": "timeseries"
},
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"fieldConfig": {
"defaults": {
"custom": {
"align": "auto",
"cellOptions": {
"type": "auto"
},
"inspect": false
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "orange",
"value": 10
},
{
"color": "red",
"value": 50
}
]
}
},
"overrides": []
},
"gridPos": {
"h": 10,
"w": 12,
"x": 12,
"y": 14
},
"id": 7,
"options": {
"cellHeight": "sm",
"footer": {
"countRows": false,
"fields": "",
"reducer": ["sum"],
"show": false
},
"showHeader": true,
"sortBy": [
{
"desc": true,
"displayName": "Value"
}
]
},
"targets": [
{
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"expr": "sort_desc(onyx_celery_queue_depth)",
"format": "table",
"instant": true,
"refId": "A"
}
],
"title": "Current Queue Depth",
"transformations": [
{
"id": "labelsToFields",
"options": {
"mode": "columns"
}
}
],
"type": "table"
}
],
"refresh": "30s",
"schemaVersion": 39,
"style": "dark",
"tags": ["onyx", "redis", "celery"],
"templating": {
"list": [
{
"current": {
"selected": true,
"text": "Prometheus",
"value": "Prometheus"
},
"hide": 0,
"includeAll": false,
"label": "Datasource",
"name": "DS_PROMETHEUS",
"options": [],
"query": "prometheus",
"refresh": 1,
"regex": "",
"type": "datasource"
},
{
"allValue": ".*",
"current": {
"selected": true,
"text": "All",
"value": ".*"
},
"datasource": {
"type": "prometheus",
"uid": "${DS_PROMETHEUS}"
},
"definition": "label_values(onyx_celery_queue_depth, queue)",
"hide": 0,
"includeAll": true,
"label": "Queue",
"multi": true,
"name": "queue",
"options": [],
"query": {
"query": "label_values(onyx_celery_queue_depth, queue)",
"refId": "StandardVariableQuery"
},
"refresh": 2,
"regex": "",
"sort": 1,
"type": "query"
}
]
},
"time": {
"from": "now-6h",
"to": "now"
},
"timepicker": {},
"timezone": "",
"title": "Onyx Redis Queues",
"uid": "onyx-redis-queues",
"version": 1,
"weekStart": ""
}

View File

@@ -1,23 +0,0 @@
{{- if .Values.monitoring.serviceMonitors.enabled }}
apiVersion: monitoring.coreos.com/v1
kind: ServiceMonitor
metadata:
name: {{ include "onyx.fullname" . }}-api
labels:
{{- include "onyx.labels" . | nindent 4 }}
{{- with .Values.monitoring.serviceMonitors.labels }}
{{- toYaml . | nindent 4 }}
{{- end }}
spec:
namespaceSelector:
matchNames:
- {{ .Release.Namespace }}
selector:
matchLabels:
app: {{ .Values.api.deploymentLabels.app }}
endpoints:
- port: api-server-port
path: /metrics
interval: 30s
scrapeTimeout: 10s
{{- end }}

View File

@@ -74,29 +74,4 @@ spec:
interval: 30s
scrapeTimeout: 10s
{{- end }}
{{- if gt (int .Values.celery_worker_heavy.replicaCount) 0 }}
---
apiVersion: monitoring.coreos.com/v1
kind: ServiceMonitor
metadata:
name: {{ include "onyx.fullname" . }}-celery-worker-heavy
labels:
{{- include "onyx.labels" . | nindent 4 }}
{{- with .Values.monitoring.serviceMonitors.labels }}
{{- toYaml . | nindent 4 }}
{{- end }}
spec:
namespaceSelector:
matchNames:
- {{ .Release.Namespace }}
selector:
matchLabels:
app: {{ .Values.celery_worker_heavy.deploymentLabels.app }}
metrics: "true"
endpoints:
- port: metrics
path: /metrics
interval: 30s
scrapeTimeout: 10s
{{- end }}
{{- end }}

View File

@@ -12,30 +12,4 @@ metadata:
data:
onyx-indexing-pipeline.json: |
{{- .Files.Get "dashboards/indexing-pipeline.json" | nindent 4 }}
---
apiVersion: v1
kind: ConfigMap
metadata:
name: {{ include "onyx.fullname" . }}-opensearch-search-latency-dashboard
labels:
{{- include "onyx.labels" . | nindent 4 }}
grafana_dashboard: "1"
annotations:
grafana_folder: "Onyx"
data:
onyx-opensearch-search-latency.json: |
{{- .Files.Get "dashboards/opensearch-search-latency.json" | nindent 4 }}
---
apiVersion: v1
kind: ConfigMap
metadata:
name: {{ include "onyx.fullname" . }}-redis-queues-dashboard
labels:
{{- include "onyx.labels" . | nindent 4 }}
grafana_dashboard: "1"
annotations:
grafana_folder: "Onyx"
data:
onyx-redis-queues.json: |
{{- .Files.Get "dashboards/redis-queues.json" | nindent 4 }}
{{- end }}

View File

@@ -1,30 +0,0 @@
{{- if and .Values.ingress.enabled .Values.mcpServer.enabled -}}
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: {{ include "onyx.fullname" . }}-ingress-mcp-oauth-callback
annotations:
{{- if not .Values.ingress.className }}
kubernetes.io/ingress.class: nginx
{{- end }}
cert-manager.io/cluster-issuer: {{ include "onyx.fullname" . }}-letsencrypt
spec:
{{- if .Values.ingress.className }}
ingressClassName: {{ .Values.ingress.className }}
{{- end }}
rules:
- host: {{ .Values.ingress.api.host }}
http:
paths:
- path: /mcp/oauth/callback
pathType: Exact
backend:
service:
name: {{ include "onyx.fullname" . }}-webserver
port:
number: {{ .Values.webserver.service.servicePort }}
tls:
- hosts:
- {{ .Values.ingress.api.host }}
secretName: {{ include "onyx.fullname" . }}-ingress-mcp-oauth-callback-tls
{{- end }}

View File

@@ -42,22 +42,6 @@ data:
client_max_body_size 5G;
{{- if .Values.mcpServer.enabled }}
# OAuth callback page must be served by the web server (Next.js),
# not the MCP server. Exact match takes priority over the regex below.
location = /mcp/oauth/callback {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header Host $host;
proxy_http_version 1.1;
proxy_redirect off;
proxy_connect_timeout {{ .Values.nginx.timeouts.connect }}s;
proxy_send_timeout {{ .Values.nginx.timeouts.send }}s;
proxy_read_timeout {{ .Values.nginx.timeouts.read }}s;
proxy_pass http://web_server;
}
# MCP Server - Model Context Protocol for LLM integrations
# Match /mcp, /mcp/, or /mcp/* but NOT /mcpserver, /mcpapi, etc.
location ~ ^/mcp(/.*)?$ {

View File

@@ -264,7 +264,7 @@ monitoring:
# The sidecar must be configured with label selector: grafana_dashboard=1
enabled: false
serviceMonitors:
# -- Set to true to deploy ServiceMonitor resources for API server and Celery worker metrics endpoints.
# -- Set to true to deploy ServiceMonitor resources for Celery worker metrics endpoints.
# Requires the Prometheus Operator CRDs (included in kube-prometheus-stack).
# Use `labels` to match your Prometheus CR's serviceMonitorSelector (e.g. release: onyx-monitoring).
enabled: false
@@ -296,7 +296,7 @@ nginx:
# The ingress-nginx subchart doesn't auto-detect our custom ConfigMap changes.
# Workaround: Helm upgrade will restart if the following annotation value changes.
podAnnotations:
onyx.app/nginx-config-version: "4"
onyx.app/nginx-config-version: "3"
# Propagate DOMAIN into nginx so server_name continues to use the same env var
extraEnvs:

View File

@@ -22,10 +22,6 @@ variable "CLI_REPOSITORY" {
default = "onyxdotapp/onyx-cli"
}
variable "DEVCONTAINER_REPOSITORY" {
default = "onyxdotapp/onyx-devcontainer"
}
variable "TAG" {
default = "latest"
}
@@ -94,16 +90,3 @@ target "cli" {
tags = ["${CLI_REPOSITORY}:${TAG}"]
}
target "devcontainer" {
context = ".devcontainer"
dockerfile = "Dockerfile"
cache-from = [
"type=registry,ref=${DEVCONTAINER_REPOSITORY}:latest",
"type=registry,ref=${DEVCONTAINER_REPOSITORY}:edge",
]
cache-to = ["type=inline"]
tags = ["${DEVCONTAINER_REPOSITORY}:${TAG}"]
}

View File

@@ -148,7 +148,7 @@ dev = [
"matplotlib==3.10.8",
"mypy-extensions==1.0.0",
"mypy==1.13.0",
"onyx-devtools==0.7.5",
"onyx-devtools==0.7.3",
"openapi-generator-cli==7.17.0",
"pandas-stubs~=2.3.3",
"pre-commit==3.2.2",

View File

@@ -244,54 +244,6 @@ ods web lint
ods web test --watch
```
### `dev` - Devcontainer Management
Manage the Onyx devcontainer. Also available as `ods dc`.
Requires the [devcontainer CLI](https://github.com/devcontainers/cli) (`npm install -g @devcontainers/cli`).
```shell
ods dev <subcommand>
```
**Subcommands:**
- `up` - Start the devcontainer (pulls the image if needed)
- `into` - Open a zsh shell inside the running devcontainer
- `exec` - Run an arbitrary command inside the devcontainer
- `restart` - Remove and recreate the devcontainer
- `rebuild` - Pull the latest published image and recreate
- `stop` - Stop the running devcontainer
The devcontainer image is published to `onyxdotapp/onyx-devcontainer` and
referenced by tag in `.devcontainer/devcontainer.json` — no local build needed.
**Examples:**
```shell
# Start the devcontainer
ods dev up
# Open a shell
ods dev into
# Run a command
ods dev exec -- npm test
# Restart the container
ods dev restart
# Pull latest image and recreate
ods dev rebuild
# Stop the container
ods dev stop
# Same commands work with the dc alias
ods dc up
ods dc into
```
### `db` - Database Administration
Manage PostgreSQL database dumps, restores, and migrations.

View File

@@ -1,34 +0,0 @@
package cmd
import (
"github.com/spf13/cobra"
)
// NewDevCommand creates the parent dev command for devcontainer operations.
func NewDevCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "dev",
Aliases: []string{"dc"},
Short: "Manage the devcontainer",
Long: `Manage the Onyx devcontainer.
Wraps the devcontainer CLI with workspace-aware defaults.
Commands:
up Start the devcontainer
into Open a shell inside the running devcontainer
exec Run a command inside the devcontainer
restart Remove and recreate the devcontainer
rebuild Pull the latest image and recreate
stop Stop the running devcontainer`,
}
cmd.AddCommand(newDevUpCommand())
cmd.AddCommand(newDevIntoCommand())
cmd.AddCommand(newDevExecCommand())
cmd.AddCommand(newDevRestartCommand())
cmd.AddCommand(newDevRebuildCommand())
cmd.AddCommand(newDevStopCommand())
return cmd
}

View File

@@ -1,29 +0,0 @@
package cmd
import (
"github.com/spf13/cobra"
)
func newDevExecCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "exec [--] <command> [args...]",
Short: "Run a command inside the devcontainer",
Long: `Run an arbitrary command inside the running devcontainer.
All arguments are treated as positional (flags like -it are passed through).
Examples:
ods dev exec npm test
ods dev exec -- ls -la
ods dev exec -it echo hello`,
Args: cobra.MinimumNArgs(1),
DisableFlagParsing: true,
Run: func(cmd *cobra.Command, args []string) {
if len(args) > 0 && args[0] == "--" {
args = args[1:]
}
runDevExec(args)
},
}
return cmd
}

View File

@@ -1,53 +0,0 @@
package cmd
import (
"os"
"os/exec"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/onyx-dot-app/onyx/tools/ods/internal/paths"
)
func newDevIntoCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "into",
Short: "Open a shell inside the running devcontainer",
Long: `Open an interactive zsh shell inside the running devcontainer.
Examples:
ods dev into`,
Run: func(cmd *cobra.Command, args []string) {
runDevExec([]string{"zsh"})
},
}
return cmd
}
// runDevExec executes "devcontainer exec --workspace-folder <root> <command...>".
func runDevExec(command []string) {
checkDevcontainerCLI()
ensureDockerSock()
ensureRemoteUser()
root, err := paths.GitRoot()
if err != nil {
log.Fatalf("Failed to find git root: %v", err)
}
args := []string{"exec", "--workspace-folder", root}
args = append(args, command...)
log.Debugf("Running: devcontainer %v", args)
c := exec.Command("devcontainer", args...)
c.Stdout = os.Stdout
c.Stderr = os.Stderr
c.Stdin = os.Stdin
if err := c.Run(); err != nil {
log.Fatalf("devcontainer exec failed: %v", err)
}
}

View File

@@ -1,41 +0,0 @@
package cmd
import (
"os"
"os/exec"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
)
func newDevRebuildCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "rebuild",
Short: "Pull the latest devcontainer image and recreate",
Long: `Pull the latest devcontainer image and recreate the container.
Use after the published image has been updated or after changing devcontainer.json.
Examples:
ods dev rebuild`,
Run: func(cmd *cobra.Command, args []string) {
runDevRebuild()
},
}
return cmd
}
func runDevRebuild() {
image := devcontainerImage()
log.Infof("Pulling %s...", image)
pull := exec.Command("docker", "pull", image)
pull.Stdout = os.Stdout
pull.Stderr = os.Stderr
if err := pull.Run(); err != nil {
log.Warnf("Failed to pull image (continuing with local copy): %v", err)
}
runDevcontainer("up", []string{"--remove-existing-container"})
}

View File

@@ -1,23 +0,0 @@
package cmd
import (
"github.com/spf13/cobra"
)
func newDevRestartCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "restart",
Short: "Remove and recreate the devcontainer",
Long: `Remove the existing devcontainer and recreate it.
Uses the cached image — for a full image rebuild, use "ods dev rebuild".
Examples:
ods dev restart`,
Run: func(cmd *cobra.Command, args []string) {
runDevcontainer("up", []string{"--remove-existing-container"})
},
}
return cmd
}

View File

@@ -1,56 +0,0 @@
package cmd
import (
"os/exec"
"strings"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/onyx-dot-app/onyx/tools/ods/internal/paths"
)
func newDevStopCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "stop",
Short: "Stop the running devcontainer",
Long: `Stop the running devcontainer.
Examples:
ods dev stop`,
Run: func(cmd *cobra.Command, args []string) {
runDevStop()
},
}
return cmd
}
func runDevStop() {
root, err := paths.GitRoot()
if err != nil {
log.Fatalf("Failed to find git root: %v", err)
}
// Find the container by the devcontainer label
out, err := exec.Command(
"docker", "ps", "-q",
"--filter", "label=devcontainer.local_folder="+root,
).Output()
if err != nil {
log.Fatalf("Failed to find devcontainer: %v", err)
}
containerID := strings.TrimSpace(string(out))
if containerID == "" {
log.Info("No running devcontainer found")
return
}
log.Infof("Stopping devcontainer %s...", containerID)
c := exec.Command("docker", "stop", containerID)
if err := c.Run(); err != nil {
log.Fatalf("Failed to stop devcontainer: %v", err)
}
log.Info("Devcontainer stopped")
}

View File

@@ -1,223 +0,0 @@
package cmd
import (
"encoding/json"
"fmt"
"os"
"os/exec"
"path/filepath"
"runtime"
"strings"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/onyx-dot-app/onyx/tools/ods/internal/paths"
)
func newDevUpCommand() *cobra.Command {
cmd := &cobra.Command{
Use: "up",
Short: "Start the devcontainer",
Long: `Start the devcontainer, pulling the image if needed.
Examples:
ods dev up`,
Run: func(cmd *cobra.Command, args []string) {
runDevcontainer("up", nil)
},
}
return cmd
}
// devcontainerImage reads the image field from .devcontainer/devcontainer.json.
func devcontainerImage() string {
root, err := paths.GitRoot()
if err != nil {
log.Fatalf("Failed to find git root: %v", err)
}
data, err := os.ReadFile(filepath.Join(root, ".devcontainer", "devcontainer.json"))
if err != nil {
log.Fatalf("Failed to read devcontainer.json: %v", err)
}
var cfg struct {
Image string `json:"image"`
}
if err := json.Unmarshal(data, &cfg); err != nil {
log.Fatalf("Failed to parse devcontainer.json: %v", err)
}
if cfg.Image == "" {
log.Fatal("No image field in devcontainer.json")
}
return cfg.Image
}
// checkDevcontainerCLI ensures the devcontainer CLI is installed.
func checkDevcontainerCLI() {
if _, err := exec.LookPath("devcontainer"); err != nil {
log.Fatal("devcontainer CLI is not installed. Install it with: npm install -g @devcontainers/cli")
}
}
// ensureDockerSock sets the DOCKER_SOCK environment variable if not already set.
// Used by ensureRemoteUser to detect rootless Docker.
func ensureDockerSock() {
if os.Getenv("DOCKER_SOCK") != "" {
return
}
sock := detectDockerSock()
if err := os.Setenv("DOCKER_SOCK", sock); err != nil {
log.Fatalf("Failed to set DOCKER_SOCK: %v", err)
}
}
// detectDockerSock returns the path to the Docker socket on the host.
func detectDockerSock() string {
// Prefer explicit DOCKER_HOST (strip unix:// prefix if present).
if dh := os.Getenv("DOCKER_HOST"); dh != "" {
const prefix = "unix://"
if len(dh) > len(prefix) && dh[:len(prefix)] == prefix {
return dh[len(prefix):]
}
// Only bare paths (starting with /) are valid socket paths.
// Non-unix schemes (e.g. tcp://) can't be bind-mounted.
if len(dh) > 0 && dh[0] == '/' {
return dh
}
log.Warnf("DOCKER_HOST=%q is not a unix socket path; falling back to local socket detection", dh)
}
// Linux rootless Docker: $XDG_RUNTIME_DIR/docker.sock
if runtime.GOOS == "linux" {
if xdg := os.Getenv("XDG_RUNTIME_DIR"); xdg != "" {
sock := filepath.Join(xdg, "docker.sock")
if _, err := os.Stat(sock); err == nil {
return sock
}
}
}
// macOS Docker Desktop: ~/.docker/run/docker.sock
if runtime.GOOS == "darwin" {
if home, err := os.UserHomeDir(); err == nil {
sock := filepath.Join(home, ".docker", "run", "docker.sock")
if _, err := os.Stat(sock); err == nil {
return sock
}
}
}
// Fallback: standard socket path (Linux with standard Docker, macOS symlink)
return "/var/run/docker.sock"
}
// worktreeGitMount returns a --mount flag value that makes a git worktree's
// .git reference resolve inside the container. In a worktree, .git is a file
// containing "gitdir: /path/to/main/.git/worktrees/<name>", so we need the
// main repo's .git directory to exist at the same absolute host path inside
// the container.
//
// Returns ("", false) when the workspace is not a worktree.
func worktreeGitMount(root string) (string, bool) {
dotgit := filepath.Join(root, ".git")
info, err := os.Lstat(dotgit)
if err != nil || info.IsDir() {
return "", false // regular repo or no .git
}
// .git is a file — parse the gitdir path.
out, err := exec.Command("git", "-C", root, "rev-parse", "--git-common-dir").Output()
if err != nil {
log.Warnf("Failed to detect git common dir: %v", err)
return "", false
}
commonDir := strings.TrimSpace(string(out))
// Resolve to absolute path.
if !filepath.IsAbs(commonDir) {
commonDir = filepath.Join(root, commonDir)
}
commonDir, _ = filepath.EvalSymlinks(commonDir)
mount := fmt.Sprintf("type=bind,source=%s,target=%s", commonDir, commonDir)
log.Debugf("Worktree detected — mounting main .git: %s", commonDir)
return mount, true
}
// sshAgentMount returns a --mount flag value that forwards the host's SSH agent
// socket into the container. Returns ("", false) when SSH_AUTH_SOCK is unset or
// the socket is not accessible.
func sshAgentMount() (string, bool) {
sock := os.Getenv("SSH_AUTH_SOCK")
if sock == "" {
log.Debug("SSH_AUTH_SOCK not set — skipping SSH agent forwarding")
return "", false
}
if _, err := os.Stat(sock); err != nil {
log.Debugf("SSH_AUTH_SOCK=%s not accessible: %v", sock, err)
return "", false
}
mount := fmt.Sprintf("type=bind,source=%s,target=/tmp/ssh-agent.sock", sock)
log.Debugf("Forwarding SSH agent: %s", sock)
return mount, true
}
// ensureRemoteUser sets DEVCONTAINER_REMOTE_USER when rootless Docker is
// detected. Container root maps to the host user in rootless mode, so running
// as root inside the container avoids the UID mismatch on new files.
// Must be called after ensureDockerSock.
func ensureRemoteUser() {
if os.Getenv("DEVCONTAINER_REMOTE_USER") != "" {
return
}
if runtime.GOOS == "linux" {
sock := os.Getenv("DOCKER_SOCK")
xdg := os.Getenv("XDG_RUNTIME_DIR")
// Heuristic: rootless Docker on Linux typically places its socket
// under $XDG_RUNTIME_DIR. If DOCKER_SOCK was set to a custom path
// outside XDG_RUNTIME_DIR, set DEVCONTAINER_REMOTE_USER=root manually.
if xdg != "" && strings.HasPrefix(sock, xdg) {
log.Debug("Rootless Docker detected — setting DEVCONTAINER_REMOTE_USER=root")
if err := os.Setenv("DEVCONTAINER_REMOTE_USER", "root"); err != nil {
log.Warnf("Failed to set DEVCONTAINER_REMOTE_USER: %v", err)
}
}
}
}
// runDevcontainer executes "devcontainer <action> --workspace-folder <root> [extraArgs...]".
func runDevcontainer(action string, extraArgs []string) {
checkDevcontainerCLI()
ensureDockerSock()
ensureRemoteUser()
root, err := paths.GitRoot()
if err != nil {
log.Fatalf("Failed to find git root: %v", err)
}
args := []string{action, "--workspace-folder", root}
if mount, ok := worktreeGitMount(root); ok {
args = append(args, "--mount", mount)
}
if mount, ok := sshAgentMount(); ok {
args = append(args, "--mount", mount)
}
args = append(args, extraArgs...)
log.Debugf("Running: devcontainer %v", args)
c := exec.Command("devcontainer", args...)
c.Stdout = os.Stdout
c.Stderr = os.Stderr
c.Stdin = os.Stdin
if err := c.Run(); err != nil {
log.Fatalf("devcontainer %s failed: %v", action, err)
}
}

View File

@@ -53,7 +53,6 @@ func NewRootCommand() *cobra.Command {
cmd.AddCommand(NewRunCICommand())
cmd.AddCommand(NewScreenshotDiffCommand())
cmd.AddCommand(NewDesktopCommand())
cmd.AddCommand(NewDevCommand())
cmd.AddCommand(NewWebCommand())
cmd.AddCommand(NewLatestStableTagCommand())
cmd.AddCommand(NewWhoisCommand())

16
uv.lock generated
View File

@@ -4511,7 +4511,7 @@ dev = [
{ name = "matplotlib", specifier = "==3.10.8" },
{ name = "mypy", specifier = "==1.13.0" },
{ name = "mypy-extensions", specifier = "==1.0.0" },
{ name = "onyx-devtools", specifier = "==0.7.5" },
{ name = "onyx-devtools", specifier = "==0.7.3" },
{ name = "openapi-generator-cli", specifier = "==7.17.0" },
{ name = "pandas-stubs", specifier = "~=2.3.3" },
{ name = "pre-commit", specifier = "==3.2.2" },
@@ -4554,19 +4554,19 @@ model-server = [
[[package]]
name = "onyx-devtools"
version = "0.7.5"
version = "0.7.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "fastapi" },
{ name = "openapi-generator-cli" },
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/cb/f8/844e34f5126ae40fff0d012bba0b28f031f8871062759bb3789eae4f5e0a/onyx_devtools-0.7.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b3cd434c722ae48a1f651748a9f094711b29d1a9f37fbbadef3144f2cdb0f16d", size = 4238900, upload-time = "2026-04-10T07:02:16.382Z" },
{ url = "https://files.pythonhosted.org/packages/2d/97/d1db725f900b199fa3f7a7a7c9b51ae75d4b18755c924f00f06a7703e552/onyx_devtools-0.7.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c50e3d76d4f8cc4faa6250e758d42f0249067f0e17bc82b99c6c00dd48114393", size = 3913672, upload-time = "2026-04-10T07:02:17.46Z" },
{ url = "https://files.pythonhosted.org/packages/31/83/e11bedb0a1321b63c844a418be1990c172ed363c6ee612978c3a38df71f1/onyx_devtools-0.7.5-py3-none-manylinux_2_17_aarch64.whl", hash = "sha256:ec01aeaaa14854b0933bb85bbfc51184599d3dbf1c0097ff59c1c72db8222a5a", size = 3779585, upload-time = "2026-04-10T07:02:16.31Z" },
{ url = "https://files.pythonhosted.org/packages/b3/85/128d25cd35c1adc436dcff9ab4f2c20cf29528d09415280c1230ff0ca993/onyx_devtools-0.7.5-py3-none-manylinux_2_17_x86_64.whl", hash = "sha256:586d50ecb6dcea95611135e4cd4529ebedd8ab84a41b1adf3be1280a48dc52af", size = 4201962, upload-time = "2026-04-10T07:02:14.466Z" },
{ url = "https://files.pythonhosted.org/packages/99/5d/83c80f918b399fea998cd41bfe90bda733eda77e133ca4dc1e9ce18a9b4a/onyx_devtools-0.7.5-py3-none-win_amd64.whl", hash = "sha256:c45d80f0093ba738120b77c4c0bde13843e33d786ae8608eb10490f06183d89b", size = 4320088, upload-time = "2026-04-10T07:02:17.09Z" },
{ url = "https://files.pythonhosted.org/packages/26/bf/b9c85cc61981bd71c0f1cbb50192763b11788a7c8636b1e01f750251c92c/onyx_devtools-0.7.5-py3-none-win_arm64.whl", hash = "sha256:9852a7cc29939371e016b794f2cffdb88680280d857d24c191c5188884416a3d", size = 3858839, upload-time = "2026-04-10T07:02:20.098Z" },
{ url = "https://files.pythonhosted.org/packages/72/64/c75be8ab325896cc64bccd0e1e139a03ce305bf05598967922d380fc4694/onyx_devtools-0.7.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:675e2fdbd8d291fba4b8a6dfcf2bc94c56d22d11f395a9f0d0c3c0e5b39d7f9b", size = 4220613, upload-time = "2026-04-09T00:04:36.624Z" },
{ url = "https://files.pythonhosted.org/packages/ae/1f/589ff6bd446c4498f5bcdfd2a315709e91fc15edf5440c91ff64cbf0800f/onyx_devtools-0.7.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:bf3993de8ba02d6c2f1ab12b5b9b965e005040b37502f97db8a7d88d9b0cde4b", size = 3897867, upload-time = "2026-04-09T00:04:40.781Z" },
{ url = "https://files.pythonhosted.org/packages/10/c0/53c9173eefc13218707282c5b99753960d039684994c3b3caf90ce286094/onyx_devtools-0.7.3-py3-none-manylinux_2_17_aarch64.whl", hash = "sha256:6138a94084bed05c674ad210a0bc4006c43bc4384e8eb54d469233de85c72bd7", size = 3762408, upload-time = "2026-04-09T00:04:41.592Z" },
{ url = "https://files.pythonhosted.org/packages/d2/37/69fadb65112854a596d200f704da94b837817d4dd0f46cb4482dc0309c94/onyx_devtools-0.7.3-py3-none-manylinux_2_17_x86_64.whl", hash = "sha256:90dac91b0cdc32eb8861f6e83545009a34c439fd3c41fc7dd499acd0105b660e", size = 4184427, upload-time = "2026-04-09T00:04:41.525Z" },
{ url = "https://files.pythonhosted.org/packages/bd/45/91c829ccb45f1a15e7c9641eccc6dd154adb540e03c7dee2a8f28cea24d0/onyx_devtools-0.7.3-py3-none-win_amd64.whl", hash = "sha256:abc68d70bec06e349481beec4b212de28a1a8b7ed6ef3b41daf7093ee10b44f3", size = 4299935, upload-time = "2026-04-09T00:04:40.262Z" },
{ url = "https://files.pythonhosted.org/packages/cc/30/c5adcb8e3b46b71d8d92c3f9ee0c1d0bc5e2adc9f46e93931f21b36a3ee4/onyx_devtools-0.7.3-py3-none-win_arm64.whl", hash = "sha256:9e4411cadc5e81fabc9ed991402e3b4b40f02800681299c277b2142e5af0dcee", size = 3840228, upload-time = "2026-04-09T00:04:39.708Z" },
]
[[package]]

View File

@@ -1,6 +1,6 @@
import "@opal/components/cards/card/styles.css";
import type { PaddingVariants, RoundingVariants } from "@opal/types";
import { paddingVariants, cardRoundingVariants } from "@opal/shared";
import { cardPaddingVariants, cardRoundingVariants } from "@opal/shared";
import { cn } from "@opal/utils";
// ---------------------------------------------------------------------------
@@ -79,7 +79,7 @@ function Card({
ref,
children,
}: CardProps) {
const padding = paddingVariants[paddingProp];
const padding = cardPaddingVariants[paddingProp];
const rounding = cardRoundingVariants[roundingProp];
return (

View File

@@ -1,6 +1,6 @@
import "@opal/components/cards/select-card/styles.css";
import type { PaddingVariants, RoundingVariants } from "@opal/types";
import { paddingVariants, cardRoundingVariants } from "@opal/shared";
import { cardPaddingVariants, cardRoundingVariants } from "@opal/shared";
import { cn } from "@opal/utils";
import { Interactive, type InteractiveStatefulProps } from "@opal/core";
@@ -78,7 +78,7 @@ function SelectCard({
children,
...statefulProps
}: SelectCardProps) {
const padding = paddingVariants[paddingProp];
const padding = cardPaddingVariants[paddingProp];
const rounding = cardRoundingVariants[roundingProp];
return (

View File

@@ -1,82 +0,0 @@
import React from "react";
import type { Meta, StoryObj } from "@storybook/react";
import { Divider } from "@opal/components/divider/components";
const meta: Meta<typeof Divider> = {
title: "opal/components/Divider",
component: Divider,
tags: ["autodocs"],
};
export default meta;
type Story = StoryObj<typeof Divider>;
export const Plain: Story = {
render: () => <Divider />,
};
export const Vertical: Story = {
render: () => (
<div
style={{ display: "flex", alignItems: "stretch", height: 64, gap: 16 }}
>
<span>Left</span>
<Divider orientation="vertical" />
<span>Right</span>
</div>
),
};
export const NoPadding: Story = {
render: () => <Divider paddingParallel="fit" paddingPerpendicular="fit" />,
};
export const CustomPadding: Story = {
render: () => <Divider paddingParallel="lg" paddingPerpendicular="sm" />,
};
export const VerticalNoPadding: Story = {
render: () => (
<div
style={{ display: "flex", alignItems: "stretch", height: 64, gap: 16 }}
>
<span>Left</span>
<Divider
orientation="vertical"
paddingParallel="fit"
paddingPerpendicular="fit"
/>
<span>Right</span>
</div>
),
};
export const WithTitle: Story = {
render: () => <Divider title="Section" />,
};
export const WithDescription: Story = {
render: () => (
<Divider description="Additional configuration options for power users." />
),
};
export const Foldable: Story = {
render: () => (
<Divider title="Advanced Options" foldable defaultOpen={false}>
<div style={{ padding: "0.5rem 0" }}>
<p>This content is revealed when the divider is expanded.</p>
</div>
</Divider>
),
};
export const FoldableDefaultOpen: Story = {
render: () => (
<Divider title="Details" foldable defaultOpen>
<div style={{ padding: "0.5rem 0" }}>
<p>This starts open by default.</p>
</div>
</Divider>
),
};

View File

@@ -1,77 +0,0 @@
# Divider
**Import:** `import { Divider } from "@opal/components";`
A horizontal rule that optionally displays a title, description, or foldable content section.
## Props
The component uses a discriminated union with four variants. `title` and `description` are mutually exclusive; `foldable` requires `title`.
### Bare divider
A plain line with no title or description.
| Prop | Type | Default | Description |
|---|---|---|---|
| `orientation` | `"horizontal" \| "vertical"` | `"horizontal"` | Direction of the line |
| `paddingParallel` | `PaddingVariants` | `"sm"` | Padding along the line direction (0.5rem) |
| `paddingPerpendicular` | `PaddingVariants` | `"xs"` | Padding perpendicular to the line (0.25rem) |
### Titled divider
| Prop | Type | Default | Description |
|---|---|---|---|
| `title` | `string \| RichStr` | **(required)** | Label to the left of the line |
### Described divider
| Prop | Type | Default | Description |
|---|---|---|---|
| `description` | `string \| RichStr` | **(required)** | Text below the line |
### Foldable divider
| Prop | Type | Default | Description |
|---|---|---|---|
| `title` | `string \| RichStr` | **(required)** | Label to the left of the line |
| `foldable` | `true` | **(required)** | Enables fold/expand behavior |
| `open` | `boolean` | — | Controlled open state |
| `defaultOpen` | `boolean` | `false` | Uncontrolled initial open state |
| `onOpenChange` | `(open: boolean) => void` | — | Callback when toggled |
| `children` | `ReactNode` | — | Content revealed when open |
## Usage Examples
```tsx
import { Divider } from "@opal/components";
// Plain horizontal line
<Divider />
// Vertical line
<Divider orientation="vertical" />
// No padding
<Divider paddingParallel="fit" paddingPerpendicular="fit" />
// Custom padding
<Divider paddingParallel="lg" paddingPerpendicular="sm" />
// With title
<Divider title="Advanced" />
// With description
<Divider description="Additional configuration options." />
// Foldable
<Divider title="Advanced Options" foldable>
<p>Hidden content here</p>
</Divider>
// Controlled foldable
const [open, setOpen] = useState(false);
<Divider title="Details" foldable open={open} onOpenChange={setOpen}>
<p>Controlled content</p>
</Divider>
```

View File

@@ -1,199 +0,0 @@
"use client";
import "@opal/components/divider/styles.css";
import { useState, useCallback } from "react";
import type { PaddingVariants, RichStr } from "@opal/types";
import { Button, Text } from "@opal/components";
import { SvgChevronRight } from "@opal/icons";
import { Interactive } from "@opal/core";
import { cn } from "@opal/utils";
import { paddingXVariants, paddingYVariants } from "@opal/shared";
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
interface DividerSharedProps {
ref?: React.Ref<HTMLDivElement>;
title?: never;
description?: never;
foldable?: false;
orientation?: never;
paddingParallel?: never;
paddingPerpendicular?: never;
open?: never;
defaultOpen?: never;
onOpenChange?: never;
children?: never;
}
/** Plain line — no title, no description. */
type DividerBareProps = Omit<
DividerSharedProps,
"orientation" | "paddingParallel" | "paddingPerpendicular"
> & {
/** Orientation of the line. Default: `"horizontal"`. */
orientation?: "horizontal" | "vertical";
/** Padding along the line direction. Default: `"sm"` (0.5rem). */
paddingParallel?: PaddingVariants;
/** Padding perpendicular to the line. Default: `"xs"` (0.25rem). */
paddingPerpendicular?: PaddingVariants;
};
/** Line with a title to the left. */
type DividerTitledProps = Omit<DividerSharedProps, "title"> & {
title: string | RichStr;
};
/** Line with a description below. */
type DividerDescribedProps = Omit<DividerSharedProps, "description"> & {
/** Description rendered below the divider line. */
description: string | RichStr;
};
/** Foldable — requires title, reveals children. */
type DividerFoldableProps = Omit<
DividerSharedProps,
"title" | "foldable" | "open" | "defaultOpen" | "onOpenChange" | "children"
> & {
/** Title is required when foldable. */
title: string | RichStr;
foldable: true;
/** Controlled open state. */
open?: boolean;
/** Uncontrolled default open state. */
defaultOpen?: boolean;
/** Callback when open state changes. */
onOpenChange?: (open: boolean) => void;
/** Content revealed when open. */
children?: React.ReactNode;
};
type DividerProps =
| DividerBareProps
| DividerTitledProps
| DividerDescribedProps
| DividerFoldableProps;
// ---------------------------------------------------------------------------
// Divider
// ---------------------------------------------------------------------------
function Divider(props: DividerProps) {
if (props.foldable) {
return <FoldableDivider {...props} />;
}
const {
ref,
title,
description,
orientation = "horizontal",
paddingParallel = "sm",
paddingPerpendicular = "xs",
} = props;
if (orientation === "vertical") {
return (
<div
ref={ref}
className={cn(
"opal-divider-vertical",
paddingXVariants[paddingPerpendicular],
paddingYVariants[paddingParallel]
)}
>
<div className="opal-divider-line-vertical" />
</div>
);
}
return (
<div
ref={ref}
className={cn(
"opal-divider",
paddingXVariants[paddingParallel],
paddingYVariants[paddingPerpendicular]
)}
>
<div className="opal-divider-row">
{title && (
<div className="opal-divider-title">
<Text font="secondary-body" color="text-03" nowrap>
{title}
</Text>
</div>
)}
<div className="opal-divider-line" />
</div>
{description && (
<div className="opal-divider-description">
<Text font="secondary-body" color="text-03">
{description}
</Text>
</div>
)}
</div>
);
}
// ---------------------------------------------------------------------------
// FoldableDivider (internal)
// ---------------------------------------------------------------------------
function FoldableDivider({
title,
open: controlledOpen,
defaultOpen = false,
onOpenChange,
children,
}: DividerFoldableProps) {
const [internalOpen, setInternalOpen] = useState(defaultOpen);
const isControlled = controlledOpen !== undefined;
const isOpen = isControlled ? controlledOpen : internalOpen;
const toggle = useCallback(() => {
const next = !isOpen;
if (!isControlled) setInternalOpen(next);
onOpenChange?.(next);
}, [isOpen, isControlled, onOpenChange]);
return (
<>
<Interactive.Stateless
variant="default"
prominence="tertiary"
interaction={isOpen ? "hover" : "rest"}
onClick={toggle}
>
<Interactive.Container
roundingVariant="sm"
heightVariant="fit"
widthVariant="full"
>
<div className="opal-divider">
<div className="opal-divider-row">
<div className="opal-divider-title">
<Text font="secondary-body" color="inherit" nowrap>
{title}
</Text>
</div>
<div className="opal-divider-line" />
<div className="opal-divider-chevron" data-open={isOpen}>
<Button
icon={SvgChevronRight}
size="sm"
prominence="tertiary"
/>
</div>
</div>
</div>
</Interactive.Container>
</Interactive.Stateless>
{isOpen && children}
</>
);
}
export { Divider, type DividerProps };

View File

@@ -1,52 +0,0 @@
/* ---------------------------------------------------------------------------
Divider
A horizontal rule with optional title, foldable chevron, or description.
Padding is controlled via Tailwind classes applied by the component.
--------------------------------------------------------------------------- */
/* ── Horizontal ─────────────────────────────────────────────────────────────── */
.opal-divider {
@apply flex flex-col w-full;
gap: 0.75rem;
}
.opal-divider-row {
@apply flex flex-row items-center w-full;
gap: 2px;
padding: 0px;
}
.opal-divider-title {
@apply flex flex-col justify-center;
padding: 0px 2px;
}
.opal-divider-line {
@apply flex-1 h-px bg-border-01;
}
.opal-divider-description {
padding: 0px 2px;
}
/* ── Vertical orientation ───────────────────────────────────────────────────── */
.opal-divider-vertical {
@apply flex flex-row h-full;
}
.opal-divider-line-vertical {
@apply flex-1 w-px bg-border-01;
}
/* ── Foldable chevron ───────────────────────────────────────────────────────── */
.opal-divider-chevron {
@apply transition-transform duration-200 ease-in-out;
}
.opal-divider-chevron[data-open="true"] {
transform: rotate(90deg);
}

View File

@@ -54,12 +54,6 @@ export {
type TagColor,
} from "@opal/components/tag/components";
/* Divider */
export {
Divider,
type DividerProps,
} from "@opal/components/divider/components";
/* Card */
export {
Card,

View File

@@ -10,7 +10,7 @@ const SvgAnthropic = ({ size, ...props }: IconProps) => (
>
<path
d="M36.1779 9.78003H29.1432L41.9653 42.2095H49L36.1779 9.78003ZM15.8221 9.78003L3 42.2095H10.1844L12.8286 35.4243H26.2495L28.8438 42.2095H36.0282L23.2061 9.78003H15.8221ZM15.1236 29.3874L19.5141 18.0121L23.9046 29.3874H15.1236Z"
fill="var(--text-05)"
fill="currentColor"
/>
</svg>
);

View File

@@ -12,7 +12,7 @@ const SvgAws = ({ size, ...props }: IconProps) => (
<title>AWS</title>
<path
d="M14.6195 23.2934C14.6195 23.9333 14.7233 24.4522 14.8443 24.8326C14.9827 25.2131 15.1556 25.6282 15.3978 26.0778C15.4842 26.2162 15.5188 26.3546 15.5188 26.4756C15.5188 26.6486 15.4151 26.8215 15.1902 26.9945L14.1007 27.7208C13.945 27.8246 13.7894 27.8765 13.651 27.8765C13.4781 27.8765 13.3051 27.79 13.1322 27.6344C12.89 27.3749 12.6825 27.0982 12.5096 26.8215C12.3366 26.5275 12.1637 26.1989 11.9734 25.8011C10.6245 27.3922 8.92958 28.1878 6.88881 28.1878C5.43606 28.1878 4.27731 27.7727 3.42988 26.9426C2.58244 26.1124 2.15007 25.0056 2.15007 23.622C2.15007 22.152 2.66891 20.9586 3.72389 20.0593C4.77886 19.16 6.17973 18.7103 7.96108 18.7103C8.54909 18.7103 9.15441 18.7622 9.79431 18.8487C10.4342 18.9352 11.0914 19.0735 11.7832 19.2292V17.9667C11.7832 16.6523 11.5065 15.7356 10.9703 15.1995C10.4169 14.6634 9.483 14.404 8.15132 14.404C7.546 14.404 6.9234 14.4731 6.28349 14.6288C5.64359 14.7844 5.02098 14.9747 4.41567 15.2168C4.13896 15.3379 3.93142 15.407 3.81036 15.4416C3.6893 15.4762 3.60282 15.4935 3.53364 15.4935C3.29152 15.4935 3.17046 15.3206 3.17046 14.9574V14.1099C3.17046 13.8332 3.20505 13.6257 3.29152 13.5046C3.37799 13.3836 3.53364 13.2625 3.77577 13.1414C4.38108 12.8301 5.10746 12.5707 5.9549 12.3632C6.80233 12.1384 7.70165 12.0346 8.65286 12.0346C10.7109 12.0346 12.2156 12.5015 13.1841 13.4355C14.1353 14.3694 14.6195 15.7875 14.6195 17.6899V23.2934ZM7.63248 25.9222C8.2032 25.9222 8.79122 25.8184 9.41383 25.6109C10.0364 25.4034 10.5899 25.0229 11.0568 24.504C11.3335 24.1754 11.5411 23.8122 11.6448 23.3972C11.7486 22.9821 11.8178 22.4806 11.8178 21.8925V21.1662C11.3162 21.0451 10.7801 20.9413 10.2267 20.8722C9.67325 20.803 9.13711 20.7684 8.60098 20.7684C7.44224 20.7684 6.5948 20.9932 6.02407 21.4602C5.45335 21.9271 5.17664 22.5843 5.17664 23.4491C5.17664 24.2619 5.38417 24.8672 5.81654 25.2823C6.23161 25.7147 6.83692 25.9222 7.63248 25.9222ZM21.5201 27.79C21.2088 27.79 21.0012 27.7381 20.8629 27.6171C20.7245 27.5133 20.6035 27.2712 20.4997 26.9426L16.4355 13.5738C16.3317 13.2279 16.2798 13.0031 16.2798 12.882C16.2798 12.6053 16.4182 12.4497 16.6949 12.4497H18.3897C18.7183 12.4497 18.9432 12.5015 19.0642 12.6226C19.2026 12.7264 19.3064 12.9685 19.4101 13.2971L22.3156 24.7462L25.0136 13.2971C25.1001 12.9512 25.2038 12.7264 25.3422 12.6226C25.4806 12.5188 25.7227 12.4497 26.034 12.4497H27.4176C27.7462 12.4497 27.971 12.5015 28.1093 12.6226C28.2477 12.7264 28.3688 12.9685 28.4379 13.2971L31.1705 24.8845L34.1625 13.2971C34.2662 12.9512 34.3873 12.7264 34.5084 12.6226C34.6467 12.5188 34.8716 12.4497 35.1829 12.4497H36.7913C37.068 12.4497 37.2236 12.588 37.2236 12.882C37.2236 12.9685 37.2063 13.055 37.189 13.1587C37.1717 13.2625 37.1372 13.4009 37.068 13.5911L32.9 26.9599C32.7962 27.3058 32.6751 27.5306 32.5368 27.6344C32.3984 27.7381 32.1736 27.8073 31.8796 27.8073H30.3922C30.0636 27.8073 29.8388 27.7554 29.7004 27.6344C29.5621 27.5133 29.441 27.2885 29.3719 26.9426L26.6912 15.7875L24.0278 26.9253C23.9413 27.2712 23.8376 27.496 23.6992 27.6171C23.5609 27.7381 23.3187 27.79 23.0074 27.79H21.5201ZM43.7437 28.257C42.8444 28.257 41.9451 28.1532 41.0803 27.9457C40.2156 27.7381 39.5411 27.5133 39.0914 27.2539C38.8147 27.0982 38.6245 26.9253 38.5553 26.7696C38.4861 26.614 38.4515 26.441 38.4515 26.2854V25.4034C38.4515 25.0402 38.5899 24.8672 38.8493 24.8672C38.9531 24.8672 39.0569 24.8845 39.1606 24.9191C39.2644 24.9537 39.42 25.0229 39.593 25.0921C40.181 25.3515 40.8209 25.559 41.4954 25.6974C42.1872 25.8357 42.8617 25.9049 43.5535 25.9049C44.643 25.9049 45.4905 25.7147 46.0785 25.3342C46.6665 24.9537 46.9778 24.4003 46.9778 23.6912C46.9778 23.2069 46.8222 22.8092 46.5109 22.4806C46.1996 22.152 45.6115 21.858 44.7641 21.5812L42.2564 20.803C40.9939 20.4052 40.0599 19.8172 39.4892 19.0389C38.9185 18.278 38.6245 17.4305 38.6245 16.5312C38.6245 15.8048 38.7801 15.1649 39.0914 14.6115C39.4027 14.0581 39.8178 13.5738 40.3367 13.1933C40.8555 12.7956 41.4435 12.5015 42.1353 12.294C42.8271 12.0865 43.5535 12 44.3144 12C44.6949 12 45.0927 12.0173 45.4732 12.0692C45.871 12.1211 46.2341 12.1902 46.5973 12.2594C46.9432 12.3459 47.2718 12.4324 47.5831 12.5361C47.8944 12.6399 48.1366 12.7437 48.3095 12.8474C48.5516 12.9858 48.7246 13.1242 48.8283 13.2798C48.9321 13.4182 48.984 13.6084 48.984 13.8505V14.6634C48.984 15.0266 48.8456 15.2168 48.5862 15.2168C48.4479 15.2168 48.223 15.1476 47.929 15.0093C46.9432 14.5596 45.8364 14.3348 44.6084 14.3348C43.6227 14.3348 42.8444 14.4904 42.3083 14.819C41.7721 15.1476 41.4954 15.6492 41.4954 16.3583C41.4954 16.8425 41.6684 17.2576 42.0142 17.5862C42.3601 17.9148 43 18.2434 43.9167 18.5374L46.3725 19.3156C47.6177 19.7134 48.517 20.2668 49.0532 20.9759C49.5893 21.685 49.8487 22.4979 49.8487 23.3972C49.8487 24.1408 49.6931 24.8153 49.3991 25.4034C49.0878 25.9914 48.6727 26.5102 48.1366 26.9253C47.6004 27.3577 46.9605 27.669 46.2168 27.8938C45.4386 28.1359 44.6257 28.257 43.7437 28.257Z"
className="fill-[#252F3E] dark:fill-text-05"
fill="#252F3E"
/>
<path
fillRule="evenodd"

View File

@@ -1,25 +0,0 @@
import type { IconProps } from "@opal/types";
const SvgCohere = ({ size, ...props }: IconProps) => (
<svg
width={size}
height={size}
viewBox="0 0 52 52"
fill="none"
xmlns="http://www.w3.org/2000/svg"
{...props}
>
<path
d="M18.256 30.224C19.4293 30.224 21.776 30.1653 25.0613 28.816C28.8747 27.232 36.384 24.416 41.84 21.4827C45.6533 19.4293 47.296 16.7307 47.296 13.0933C47.296 8.10667 43.248 4 38.2027 4H17.0827C9.86667 4 4 9.86667 4 17.0827C4 24.2987 9.51467 30.224 18.256 30.224Z"
fill="#39594D"
/>
<path
d="M21.8347 39.2C21.8347 35.68 23.9467 32.4533 27.232 31.104L33.8613 28.3467C40.608 25.5893 48 30.5173 48 37.792C48 43.424 43.424 48 37.792 48H30.576C25.7653 48 21.8347 44.0693 21.8347 39.2Z"
fill="#D18EE2"
/>
<path
d="M11.568 31.9253C7.40267 31.9253 4 35.328 4 39.4933V40.4907C4 44.5973 7.40267 48 11.568 48C15.7333 48 19.136 44.5973 19.136 40.432V39.4347C19.0773 35.328 15.7333 31.9253 11.568 31.9253Z"
fill="#FF7759"
/>
</svg>
);
export default SvgCohere;

View File

@@ -3,7 +3,6 @@ export { default as SvgAws } from "@opal/logos/aws";
export { default as SvgAzure } from "@opal/logos/azure";
export { default as SvgBifrost } from "@opal/logos/bifrost";
export { default as SvgClaude } from "@opal/logos/claude";
export { default as SvgCohere } from "@opal/logos/cohere";
export { default as SvgDeepseek } from "@opal/logos/deepseek";
export { default as SvgDiscord } from "@opal/logos/discord";
export { default as SvgGemini } from "@opal/logos/gemini";
@@ -12,7 +11,6 @@ export { default as SvgLitellm } from "@opal/logos/litellm";
export { default as SvgLmStudio } from "@opal/logos/lm-studio";
export { default as SvgMicrosoft } from "@opal/logos/microsoft";
export { default as SvgMistral } from "@opal/logos/mistral";
export { default as SvgNomic } from "@opal/logos/nomic";
export { default as SvgOllama } from "@opal/logos/ollama";
export { default as SvgOnyxLogo } from "@opal/logos/onyx-logo";
export { default as SvgOnyxLogoTyped } from "@opal/logos/onyx-logo-typed";
@@ -21,4 +19,3 @@ export { default as SvgOpenai } from "@opal/logos/openai";
export { default as SvgOpenrouter } from "@opal/logos/openrouter";
export { default as SvgQwen } from "@opal/logos/qwen";
export { default as SvgSlack } from "@opal/logos/slack";
export { default as SvgVoyage } from "@opal/logos/voyage";

View File

@@ -1,21 +0,0 @@
import type { IconProps } from "@opal/types";
const SvgNomic = ({ size, ...props }: IconProps) => (
<svg
width={size}
height={size}
viewBox="0 0 52 52"
fill="none"
xmlns="http://www.w3.org/2000/svg"
{...props}
>
<path
d="M35.858 6.31995H46V45.6709H35.6146C32.0852 36.8676 25.1481 27.7804 15.7363 24.8189V6.31995H25.4726C26.5274 12.7296 30.1618 18.3744 35.858 21.6546V6.31995Z"
fill="var(--text-05)"
/>
<path
d="M15.7363 24.8189V45.6709H6L6 30.0927C9.05968 27.6167 11.9635 25.8737 15.7363 24.8189Z"
fill="var(--text-05)"
/>
</svg>
);
export default SvgNomic;

File diff suppressed because one or more lines are too long

View File

@@ -1,7 +1,7 @@
import React from "react";
import type { IconProps } from "@opal/types";
const SvgOpenai = ({ size, ...props }: IconProps) => {
const SvgOpenAI = ({ size, ...props }: IconProps) => {
const clipId = React.useId();
return (
<svg
@@ -15,16 +15,16 @@ const SvgOpenai = ({ size, ...props }: IconProps) => {
<g clipPath={`url(#${clipId})`}>
<path
d="M6.27989 5.99136V4.58828C6.27989 4.4701 6.32383 4.38143 6.42625 4.32242L9.22206 2.69783C9.60266 2.4763 10.0564 2.37296 10.5247 2.37296C12.2813 2.37296 13.3937 3.74654 13.3937 5.20864C13.3937 5.31199 13.3937 5.43016 13.379 5.54833L10.4808 3.83506C10.3052 3.73172 10.1295 3.73172 9.95386 3.83506L6.27989 5.99136ZM12.8082 11.4561V8.10334C12.8082 7.89651 12.7203 7.74883 12.5447 7.64548L8.87071 5.48918L10.071 4.79498C10.1734 4.73597 10.2613 4.73597 10.3637 4.79498L13.1595 6.41959C13.9647 6.89226 14.5061 7.89651 14.5061 8.87124C14.5061 9.99365 13.8476 11.0277 12.8082 11.4561ZM5.41629 8.50218L4.21603 7.7933C4.11361 7.73429 4.06967 7.64563 4.06967 7.52745V4.27824C4.06967 2.69797 5.26993 1.50157 6.89473 1.50157C7.50955 1.50157 8.08029 1.70841 8.56345 2.07761L5.67991 3.76136C5.5043 3.86471 5.41643 4.01239 5.41643 4.21923L5.41629 8.50218ZM7.99984 10.0086L6.27988 9.03389V6.96624L7.99984 5.99151L9.71963 6.96624V9.03389L7.99984 10.0086ZM9.10494 14.4985C8.49012 14.4985 7.91938 14.2917 7.43622 13.9226L10.3197 12.2387C10.4953 12.1354 10.5832 11.9878 10.5832 11.7809V7.4978L11.7982 8.20668C11.9006 8.2657 11.9445 8.35436 11.9445 8.47254V11.7218C11.9445 13.302 10.7296 14.4985 9.10494 14.4985ZM5.63583 11.205L2.84002 9.58041C2.03489 9.10771 1.4934 8.10348 1.4934 7.12875C1.4934 5.99151 2.16672 4.97244 3.20591 4.5441V7.91148C3.20591 8.11831 3.29379 8.26599 3.46939 8.36934L7.12882 10.5108L5.92856 11.205C5.82613 11.264 5.73825 11.264 5.63583 11.205ZM5.47491 13.6272C3.82088 13.6272 2.60592 12.3717 2.60592 10.821C2.60592 10.7028 2.62061 10.5846 2.63517 10.4665L5.51871 12.1502C5.69432 12.2535 5.87006 12.2535 6.04567 12.1502L9.71964 10.0088V11.4119C9.71964 11.53 9.67571 11.6186 9.57328 11.6777L6.77746 13.3023C6.39688 13.5238 5.94323 13.6272 5.47491 13.6272ZM9.10494 15.3846C10.8761 15.3846 12.3544 14.1145 12.6912 12.4307C14.3305 12.0024 15.3845 10.4516 15.3845 8.87139C15.3845 7.8375 14.9453 6.83326 14.1549 6.10955C14.2281 5.79937 14.2721 5.48918 14.2721 5.17914C14.2721 3.06718 12.5741 1.48677 10.6126 1.48677C10.2175 1.48677 9.83689 1.54578 9.4563 1.67878C8.79753 1.02891 7.88999 0.615387 6.89473 0.615387C5.12357 0.615387 3.64528 1.88548 3.30848 3.56923C1.66914 3.99756 0.615234 5.54834 0.615234 7.1286C0.615234 8.1625 1.05431 9.16673 1.84474 9.89044C1.77155 10.2006 1.72762 10.5108 1.72762 10.8209C1.72762 12.9328 3.42558 14.5132 5.38704 14.5132C5.78218 14.5132 6.16278 14.4542 6.54336 14.3213C7.20198 14.9711 8.10953 15.3846 9.10494 15.3846Z"
fill="var(--text-05)"
fill="currentColor"
/>
</g>
<defs>
<clipPath id={clipId}>
<rect width="16" height="16" fill="var(--text-05)" />
<rect width="16" height="16" fill="white" />
</clipPath>
</defs>
</svg>
);
};
export default SvgOpenai;
export default SvgOpenAI;

View File

@@ -14,7 +14,7 @@ const SvgOpenrouter = ({ size, ...props }: IconProps) => (
fillRule="evenodd"
clipRule="evenodd"
d="M33.6 0L48 8.19239V8.36602L33.6 16.4V12.2457L31.8202 12.1858C29.7043 12.1299 28.6014 12.1898 27.2887 12.4053C25.1628 12.7546 23.2168 13.5569 21.001 15.1035L16.6733 18.1071C16.1059 18.4962 15.6843 18.7776 15.3147 19.0151L14.2857 19.6577L13.4925 20.1247L14.2617 20.5837L15.3207 21.2583C16.2717 21.8849 17.6583 22.8469 20.7173 24.9823C22.9351 26.529 24.8791 27.3312 27.005 27.6805L27.6044 27.7703C28.991 27.9519 30.7029 27.9579 33.6 27.8362V23.6L48 31.7198V31.8934L33.6 40V36.284L31.9041 36.3279C29.1349 36.4117 27.6344 36.3319 25.6344 36.0046C22.2498 35.4458 19.1209 34.1566 15.8821 31.8954L11.5704 28.9019C11.0745 28.5603 10.5715 28.2289 10.0619 27.908L9.12887 27.3492C8.62495 27.0592 8.11878 26.7731 7.61039 26.491C5.81019 25.4912 1.12488 24.2658 0 24.2658V15.836C1.12687 15.822 6.09391 14.5946 7.89011 13.5928L9.92008 12.4353L10.7952 11.8884C11.6503 11.3296 12.9371 10.4396 16.1618 8.19039C19.4006 5.92925 22.5275 4.63803 25.9141 4.08123C28.2158 3.70204 29.9237 3.65614 33.6 3.80582V0Z"
fill="var(--text-05)"
fill="currentColor"
/>
</svg>
);

View File

@@ -1,17 +0,0 @@
import type { IconProps } from "@opal/types";
const SvgVoyage = ({ size, ...props }: IconProps) => (
<svg
width={size}
height={size}
viewBox="0 0 52 52"
fill="none"
xmlns="http://www.w3.org/2000/svg"
{...props}
>
<path
d="M14.1848 8V8.11C14.1408 8.24212 14.1139 8.37935 14.1048 8.51833C14.0865 8.70167 14.0782 8.865 14.0782 9.01C14.0782 9.575 14.1498 10.2017 14.2915 10.8933C14.4532 11.5683 14.7482 12.4133 15.1765 13.4333L27.0515 40.71L38.5248 13.65C38.7932 12.9767 39.0798 12.24 39.3832 11.4383C39.6865 10.6383 39.8382 9.82833 39.8382 9.00833C39.8444 8.70074 39.7901 8.39492 39.6782 8.10833V8H45.1732V8.11C44.8332 8.455 44.4232 9.07333 43.9398 9.96667C43.4565 10.8583 42.9298 11.9583 42.3582 13.27L26.9982 48H24.8532L10.2982 14.6083C9.95818 13.825 9.60151 13.07 9.22484 12.3417C8.86818 11.6133 8.52818 10.9583 8.20818 10.375C7.88484 9.775 7.59984 9.275 7.34984 8.875C7.19246 8.61074 7.02226 8.35432 6.83984 8.10667V8H14.1848Z"
className="fill-[#012E33] dark:fill-text-05"
/>
</svg>
);
export default SvgVoyage;

View File

@@ -100,7 +100,7 @@ const heightVariants: Record<ExtremaSizeVariants, string> = {
// - SelectCard (paddingVariant, roundingVariant)
// ---------------------------------------------------------------------------
const paddingVariants: Record<PaddingVariants, string> = {
const cardPaddingVariants: Record<PaddingVariants, string> = {
lg: "p-6",
md: "p-4",
sm: "p-2",
@@ -109,24 +109,6 @@ const paddingVariants: Record<PaddingVariants, string> = {
fit: "p-0",
};
const paddingXVariants: Record<PaddingVariants, string> = {
lg: "px-6",
md: "px-4",
sm: "px-2",
xs: "px-1",
"2xs": "px-0.5",
fit: "px-0",
};
const paddingYVariants: Record<PaddingVariants, string> = {
lg: "py-6",
md: "py-4",
sm: "py-2",
xs: "py-1",
"2xs": "py-0.5",
fit: "py-0",
};
const cardRoundingVariants: Record<RoundingVariants, string> = {
lg: "rounded-16",
md: "rounded-12",
@@ -140,9 +122,7 @@ export {
type OverridableExtremaSizeVariants,
type SizeVariants,
containerSizeVariants,
paddingVariants,
paddingXVariants,
paddingYVariants,
cardPaddingVariants,
cardRoundingVariants,
widthVariants,
heightVariants,

8
web/package-lock.json generated
View File

@@ -56,7 +56,7 @@
"js-cookie": "^3.0.5",
"katex": "^0.16.38",
"linguist-languages": "^9.3.1",
"lodash": "^4.18.1",
"lodash": "^4.17.23",
"lowlight": "^3.3.0",
"lucide-react": "^0.454.0",
"mdast-util-find-and-replace": "^3.0.1",
@@ -12936,9 +12936,9 @@
}
},
"node_modules/lodash": {
"version": "4.18.1",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.18.1.tgz",
"integrity": "sha512-dMInicTPVE8d1e5otfwmmjlxkZoUpiVLwyeTdUsi/Caj/gfzzblBcCE5sRHV/AsjuCmxWrte2TNGSYuCeCq+0Q==",
"version": "4.17.23",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz",
"integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==",
"license": "MIT"
},
"node_modules/lodash-es": {

View File

@@ -74,7 +74,7 @@
"js-cookie": "^3.0.5",
"katex": "^0.16.38",
"linguist-languages": "^9.3.1",
"lodash": "^4.18.1",
"lodash": "^4.17.23",
"lowlight": "^3.3.0",
"lucide-react": "^0.454.0",
"mdast-util-find-and-replace": "^3.0.1",

View File

@@ -1,7 +1,6 @@
import Modal from "@/refresh-components/Modal";
import { Button } from "@opal/components";
import { CloudEmbeddingModel } from "../../../../components/embedding/interfaces";
import { markdown } from "@opal/utils";
import { SvgCheck } from "@opal/icons";
export interface AlreadyPickedModalProps {
@@ -18,7 +17,7 @@ export default function AlreadyPickedModal({
<Modal.Content width="sm" height="sm">
<Modal.Header
icon={SvgCheck}
title={markdown(`*${model.model_name}* already chosen`)}
title={`${model.model_name} already chosen`}
description="You can select a different one if you want!"
onClose={onClose}
/>

View File

@@ -12,7 +12,6 @@ import {
getFormattedProviderName,
} from "@/components/embedding/interfaces";
import { EMBEDDING_PROVIDERS_ADMIN_URL } from "@/lib/llmConfig/constants";
import { markdown } from "@opal/utils";
import { mutate } from "swr";
import { SWR_KEYS } from "@/lib/swr-keys";
import { testEmbedding } from "@/app/admin/embeddings/pages/utils";
@@ -173,11 +172,9 @@ export default function ChangeCredentialsModal({
<Modal.Content>
<Modal.Header
icon={SvgSettings}
title={markdown(
`Modify your *${getFormattedProviderName(
provider.provider_type
)}* ${isProxy ? "configuration" : "key"}`
)}
title={`Modify your ${getFormattedProviderName(
provider.provider_type
)} ${isProxy ? "Configuration" : "key"}`}
onClose={onCancel}
/>
<Modal.Body>

View File

@@ -7,7 +7,6 @@ import {
getFormattedProviderName,
} from "../../../../components/embedding/interfaces";
import { SvgTrash } from "@opal/icons";
import { markdown } from "@opal/utils";
export interface DeleteCredentialsModalProps {
modelProvider: CloudEmbeddingProvider;
@@ -25,11 +24,9 @@ export default function DeleteCredentialsModal({
<Modal.Content width="sm" height="sm">
<Modal.Header
icon={SvgTrash}
title={markdown(
`Delete *${getFormattedProviderName(
modelProvider.provider_type
)}* credentials?`
)}
title={`Delete ${getFormattedProviderName(
modelProvider.provider_type
)} Credentials?`}
onClose={onCancel}
/>
<Modal.Body>

View File

@@ -12,7 +12,6 @@ import {
} from "@/components/embedding/interfaces";
import { EMBEDDING_PROVIDERS_ADMIN_URL } from "@/lib/llmConfig/constants";
import Modal from "@/refresh-components/Modal";
import { markdown } from "@opal/utils";
import { SvgSettings } from "@opal/icons";
import SimpleLoader from "@/refresh-components/loaders/SimpleLoader";
export interface ProviderCreationModalProps {
@@ -186,11 +185,9 @@ export default function ProviderCreationModal({
<Modal.Content width="sm" height="sm">
<Modal.Header
icon={SvgSettings}
title={markdown(
`Configure *${getFormattedProviderName(
selectedProvider.provider_type
)}*`
)}
title={`Configure ${getFormattedProviderName(
selectedProvider.provider_type
)}`}
onClose={onCancel}
/>
<Modal.Body>

View File

@@ -2,7 +2,6 @@ import Modal from "@/refresh-components/Modal";
import { Button } from "@opal/components";
import Text from "@/refresh-components/texts/Text";
import { CloudEmbeddingModel } from "@/components/embedding/interfaces";
import { markdown } from "@opal/utils";
import { SvgServer } from "@opal/icons";
export interface SelectModelModalProps {
@@ -21,7 +20,7 @@ export default function SelectModelModal({
<Modal.Content width="sm" height="sm">
<Modal.Header
icon={SvgServer}
title={markdown(`Select *${model.model_name}*`)}
title={`Select ${model.model_name}`}
onClose={onCancel}
/>
<Modal.Body>

View File

@@ -1,7 +1,6 @@
"use client";
import { toast } from "@/hooks/useToast";
import { markdown } from "@opal/utils";
import EmbeddingModelSelection from "../EmbeddingModelSelectionForm";
import { useCallback, useEffect, useMemo, useState, useRef } from "react";
@@ -539,9 +538,7 @@ export default function EmbeddingForm() {
<Modal.Content>
<Modal.Header
icon={SvgAlertTriangle}
title={markdown(
`Are you sure you want to select *${selectedProvider.model_name}*?`
)}
title={`Are you sure you want to select ${selectedProvider.model_name}?`}
onClose={() => setShowPoorModel(false)}
/>
<Modal.Body>

View File

@@ -61,7 +61,7 @@ function QueryHistoryTableRow({
key={chatSessionMinimal.id}
className="hover:bg-accent-background cursor-pointer relative select-none"
>
<TableCell className="max-w-xs">
<TableCell>
<Text className="whitespace-normal line-clamp-5">
{chatSessionMinimal.first_user_message ||
chatSessionMinimal.name ||

View File

@@ -137,7 +137,7 @@ function DeleteConfirmModal({ hook, onDelete }: DeleteConfirmModalProps) {
<Modal.Header
// TODO(@raunakab): replace the colour of this SVG with red.
icon={SvgTrash}
title={markdown(`Delete *${hook.name}*`)}
title={`Delete ${hook.name}`}
onClose={onClose}
/>
<Modal.Body>

View File

@@ -5,7 +5,6 @@ import { usePathname, useRouter } from "next/navigation";
import * as InputLayouts from "@/layouts/input-layouts";
import { Section, AttachmentItemLayout } from "@/layouts/general-layouts";
import { Content, ContentAction } from "@opal/layouts";
import { markdown } from "@opal/utils";
import { Formik, Form } from "formik";
import * as Yup from "yup";
import {
@@ -1557,7 +1556,7 @@ function FederatedConnectorCard({
{showDisconnectConfirmation && (
<ConfirmationModalLayout
icon={SvgUnplug}
title={markdown(`Disconnect *${sourceMetadata.displayName}*`)}
title={`Disconnect ${sourceMetadata.displayName}`}
onClose={() => setShowDisconnectConfirmation(false)}
submit={
<Button

View File

@@ -4,7 +4,7 @@ import { useCallback, useState } from "react";
import { Button } from "@opal/components";
// TODO(@raunakab): migrate to Opal LineItemButton once it supports danger variant
import LineItem from "@/refresh-components/buttons/LineItem";
import { cn, markdown } from "@opal/utils";
import { cn } from "@opal/utils";
import {
SvgMoreHorizontal,
SvgEdit,
@@ -341,7 +341,7 @@ export default function AgentRowActions({
{unlistOpen && (
<ConfirmationModalLayout
icon={SvgEyeOff}
title={markdown(`Unlist *${agent.name}*`)}
title={`Unlist ${agent.name}`}
onClose={isSubmitting ? undefined : () => setUnlistOpen(false)}
submit={
<Button

View File

@@ -347,7 +347,7 @@ export default function ImageGenerationContent() {
{disconnectProvider && (
<ConfirmationModalLayout
icon={SvgUnplug}
title={markdown(`Disconnect *${disconnectProvider.title}*`)}
title={`Disconnect ${disconnectProvider.title}`}
description="This will remove the stored credentials for this provider."
onClose={() => {
setDisconnectProvider(null);

View File

@@ -201,7 +201,7 @@ function VoiceDisconnectModal({
return (
<ConfirmationModalLayout
icon={SvgUnplug}
title={markdown(`Disconnect *${disconnectTarget.providerLabel}*`)}
title={`Disconnect ${disconnectTarget.providerLabel}`}
description="Voice models"
onClose={onClose}
submit={

View File

@@ -9,7 +9,6 @@ import Modal from "@/refresh-components/Modal";
import { Button } from "@opal/components";
import { SvgArrowExchange } from "@opal/icons";
import { markdown } from "@opal/utils";
import { SvgOnyxLogo } from "@opal/logos";
import type { IconProps } from "@opal/types";
@@ -82,7 +81,7 @@ export const WebProviderSetupModal = memo(
<Modal.Content width="sm" preventAccidentalClose>
<Modal.Header
icon={LogoArrangement}
title={markdown(`Set up *${providerLabel}*`)}
title={`Set up ${providerLabel}`}
description={description}
onClose={onClose}
/>

View File

@@ -7,7 +7,6 @@ import Text from "@/refresh-components/texts/Text";
import { Section } from "@/layouts/general-layouts";
import * as SettingsLayouts from "@/layouts/settings-layouts";
import { Content, Card } from "@opal/layouts";
import { markdown } from "@opal/utils";
import useSWR from "swr";
import { errorHandlingFetcher, FetchError } from "@/lib/fetcher";
import { SWR_KEYS } from "@/lib/swr-keys";
@@ -147,7 +146,7 @@ function WebSearchDisconnectModal({
return (
<ConfirmationModalLayout
icon={SvgUnplug}
title={markdown(`Disconnect *${disconnectTarget.label}*`)}
title={`Disconnect ${disconnectTarget.label}`}
description="This will remove the stored credentials for this provider."
onClose={onClose}
submit={

View File

@@ -5,7 +5,6 @@ import Modal from "@/refresh-components/Modal";
import { Button } from "@opal/components";
import Text from "@/refresh-components/texts/Text";
import { cn } from "@/lib/utils";
import { markdown } from "@opal/utils";
import { SvgUnplug } from "@opal/icons";
interface DisconnectEntityModalProps {
isOpen: boolean;
@@ -52,7 +51,7 @@ export default function DisconnectEntityModal({
icon={({ className }) => (
<SvgUnplug className={cn(className, "stroke-action-danger-05")} />
)}
title={markdown(`Disconnect *${name}*`)}
title={`Disconnect ${name}`}
onClose={onClose}
/>

View File

@@ -10,7 +10,6 @@ import InputSelect from "@/refresh-components/inputs/InputSelect";
import InputTypeIn from "@/refresh-components/inputs/InputTypeIn";
import PasswordInputTypeIn from "@/refresh-components/inputs/PasswordInputTypeIn";
import { Button } from "@opal/components";
import { markdown } from "@opal/utils";
import CopyIconButton from "@/refresh-components/buttons/CopyIconButton";
import Text from "@/refresh-components/texts/Text";
import { Formik, Form } from "formik";
@@ -318,11 +317,7 @@ export default function MCPAuthenticationModal({
<Modal.Content width="sm" height="lg" skipOverlay={skipOverlay}>
<Modal.Header
icon={SvgArrowExchange}
title={
mcpServer
? markdown(`Authenticate *${mcpServer.name}*`)
: "Authenticate MCP Server"
}
title={`Authenticate ${mcpServer?.name || "MCP Server"}`}
description="Authenticate your connection to start using the MCP server."
/>

View File

@@ -4,7 +4,6 @@ import React, { useEffect, useRef, useState } from "react";
import { Formik, Form, useFormikContext } from "formik";
import type { FormikConfig } from "formik";
import { cn } from "@/lib/utils";
import { markdown } from "@opal/utils";
import { Interactive } from "@opal/core";
import { usePaidEnterpriseFeaturesEnabled } from "@/components/settings/usePaidEnterpriseFeaturesEnabled";
import { useAgents } from "@/hooks/useAgents";
@@ -721,7 +720,7 @@ function ModalWrapperInner({
} = getProvider(providerName);
const title = llmProvider
? markdown(`Configure *${llmProvider.name}*`)
? `Configure "${llmProvider.name}"`
: `Set up ${providerProductName}`;
const description =
descriptionOverride ??