Compare commits

..

58 Commits

Author SHA1 Message Date
Jamison Lahman
060db5414d chore(fe): opal button implements responsiveHideText 2026-02-25 11:55:09 -08:00
Jamison Lahman
aef009cc97 chore(fe): foldable buttons display text via tooltip when disabled (#8735) 2026-02-25 18:39:53 +00:00
Evan Lohn
18d1ea1770 fix: sharepoint driveItem perm sync (#8698) 2026-02-25 18:29:26 +00:00
Bo-Onyx
f336ad00f4 fix(user invitation): failed but no warning. (#8731)
Co-authored-by: Bo Yang <boyang@Bos-MacBook-Pro.local>
2026-02-25 17:23:39 +00:00
SubashMohan
0558e687d9 fix: persist onboarding dismissal in localStorage with user-specific keys (#8674) 2026-02-25 06:22:17 +00:00
roshan
784a99e24a updated demo data (#8748) 2026-02-24 19:59:46 -08:00
Justin Tahara
da1f5a11f4 chore(cherry-pick): Alerting on Failed Cherry-Picks (#8744) 2026-02-25 02:09:19 +00:00
Justin Tahara
5633805890 chore(devtools): Upgrade ods from 0.6.0 -> 0.6.1 (#8743) 2026-02-25 02:01:20 +00:00
Danelegend
0817b45ae1 feat: Get code interpreter config route (#8739) 2026-02-25 01:49:30 +00:00
Justin Tahara
af0e4bdebc fix(slack): Cleaning up URL Links (#8569) 2026-02-25 01:42:12 +00:00
Justin Tahara
4cd2320732 chore(cherry-pick): Add Github Label for PRs (#8736) 2026-02-25 00:46:12 +00:00
Danelegend
90a361f0e1 feat: code interpreter routes (#8670) 2026-02-24 16:27:10 -08:00
Justin Tahara
194efde97b chore(llm): Scaffolding for Nightly LLM Tests (#8704) 2026-02-25 00:06:24 +00:00
Danelegend
d922a42262 feat: code interpreter docker default deploy (#8672) 2026-02-24 23:51:19 +00:00
Danelegend
f00c3a486e feat: default deploy code interpreter - helm & bump version 0.3.0 (#8685) 2026-02-24 23:40:46 +00:00
Danelegend
192080c9e4 feat: default deploy code interpreter - restart_script (#8686) 2026-02-24 23:40:36 +00:00
Justin Tahara
c5787dc073 chore(image): Update test to be for Dall E 3 instead of 2 (#8732) 2026-02-24 22:53:31 +00:00
Justin Tahara
d424d6462c fix(sanitization): Centralizing DB Filters (#8730) 2026-02-24 22:28:25 +00:00
Jamison Lahman
ecea86deb6 chore(fe): only left input items flex (#8734) 2026-02-24 22:25:04 +00:00
Jamison Lahman
a5c1f50a8a chore(fe): update disabled "select" button color (#8733) 2026-02-24 22:03:52 +00:00
roshan
4a04cfd486 feat(craft): make output/ files downloadable from Artifacts tab (#8721)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2026-02-24 21:49:59 +00:00
Nikolas Garza
f22e9628db feat(scim): add additional entra id fields to ScimUserMapping (#8728) 2026-02-24 20:23:21 +00:00
Jamison Lahman
255ba10af6 chore(chat): consolidate chat message whitespacing style (#8696) 2026-02-24 20:02:28 +00:00
Justin Tahara
563202a080 feat(image): support Azure historical image context edits (#8726) 2026-02-24 19:21:30 +00:00
Evan Lohn
1062dc0743 fix: graph client env (#8727) 2026-02-24 18:46:49 +00:00
Justin Tahara
0826348568 feat(image): support OpenAI historical image context edits (#8725) 2026-02-24 18:45:56 +00:00
Justin Tahara
375079136d chore(cherry-pick): Assign merged-by user on beta cherry-pick PR (#8723) 2026-02-24 18:27:48 +00:00
Jamison Lahman
82aad5e253 fix(welcome): add back agent description (#8716) 2026-02-24 17:27:23 +00:00
Jamison Lahman
beb1c49c69 fix(fe): inline code-blocks respect header font-size (#8691) 2026-02-24 17:03:21 +00:00
Jamison Lahman
c4556515be fix(fe): rm non-admin-confirmation max-width (#8693) 2026-02-24 17:03:05 +00:00
SubashMohan
a4387f230b fix(popover): prevent viewport overflow with dynamic max-height and collision padding (#8675) 2026-02-24 10:27:36 +00:00
Evan Lohn
d91e452658 chore: version bumps for client libs (#8720) 2026-02-24 08:13:37 +00:00
Danelegend
dd274f8667 feat: code interpreter supports streaming (#8663) 2026-02-24 06:07:36 +00:00
roshan
2c82f0da16 fix(craft): delete S3 snapshot files when deleting a craft (#8718)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-24 05:58:29 +00:00
Raunak Bhagat
26101636f2 refactor: add new ContentAction component (#8695) 2026-02-24 05:13:18 +00:00
roshan
5e2c0c6cf4 fix(nrf): hide search toggle when search mode is unavailable (#8717)
Co-authored-by: Claude Opus 4.6 <noreply@anthropic.com>
2026-02-23 20:43:19 -08:00
roshan
33b64db498 fix(extensions): fix base url for chrome extension to (#8714) 2026-02-23 20:18:05 -08:00
roshan
b925cc1a56 feat(chrome-extension): add tab reading to side panel (#8571)
Co-authored-by: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-02-24 01:17:57 +00:00
Danelegend
bac4b7c945 fix: preview markdown formatting (#8667) 2026-02-24 01:13:52 +00:00
Evan Lohn
6f6ef1e657 chore: coerce doc metadata (#8703) 2026-02-24 01:12:11 +00:00
Danelegend
885c69f460 feat: Improve csv preview modal (#8702) 2026-02-24 01:00:20 +00:00
Danelegend
4b837303ff feat(code-interpreter): Seed code interpreter server row (#8701) 2026-02-24 00:59:49 +00:00
Justin Tahara
d856a9befb fix(projects): Guardrails for Project User Files (#8644) 2026-02-24 00:21:57 +00:00
Justin Tahara
adade353c5 fix(api): Improving the API handling of threads (#8573) 2026-02-24 00:04:21 +00:00
Nikolas Garza
3cb6ec2f85 fix: patch prometheus metrics in daily test fixture (#8699) 2026-02-24 00:02:56 +00:00
Wenxi
691eebf00a fix: remove user info requirement for craft onboarding modal (#8697) 2026-02-23 23:52:17 +00:00
Danelegend
905b6633e6 chore: preview modal (#8665) 2026-02-23 23:40:55 +00:00
Justin Tahara
fd088196ff fix(search): Improve Speed (#8430) 2026-02-23 22:45:18 +00:00
Jamison Lahman
cafbf5b8be chore(playwright): warn user if setup takes longer than usual (#8690) 2026-02-23 22:23:58 +00:00
roshan
1235181559 fix(ui): Clean up NRF settings button styling (#8678)
Co-authored-by: Claude <noreply@anthropic.com>
2026-02-23 21:25:43 +00:00
Justin Tahara
caa2e45632 fix(db): Multitenant Schema migration update (#8679) 2026-02-23 21:25:26 +00:00
Justin Tahara
9c62e03120 chore(ods): Automated Cherry-pick backport (#8642) 2026-02-23 21:15:09 +00:00
Nikolas Garza
0937305064 feat(scim): Okta compatibility + provider abstraction (#8568) 2026-02-23 21:09:18 +00:00
Wenxi
e4c06570e3 fix: domain rules for signup on cloud (#8671) 2026-02-23 20:27:37 +00:00
roshan
78fc7c86d7 fix: Handle unauthenticated state gracefully on NRF page (#8491)
Co-authored-by: Claude <noreply@anthropic.com>
2026-02-23 19:26:38 +00:00
Raunak Bhagat
84d3aea847 refactor: migrate Web Search page to SettingsLayouts + Content (#8662) 2026-02-23 13:38:37 +00:00
Danelegend
00a404d3cd feat: Add code interpreter server db model (#8669) 2026-02-23 05:09:59 +00:00
Wenxi
787cf90d96 chore: set trial api usage to 0 and show ui (#8664) 2026-02-23 01:41:23 +00:00
143 changed files with 4961 additions and 1802 deletions

View File

@@ -11,6 +11,11 @@ permissions:
jobs:
cherry-pick-to-latest-release:
outputs:
should_cherrypick: ${{ steps.gate.outputs.should_cherrypick }}
pr_number: ${{ steps.gate.outputs.pr_number }}
cherry_pick_reason: ${{ steps.run_cherry_pick.outputs.reason }}
cherry_pick_details: ${{ steps.run_cherry_pick.outputs.details }}
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
@@ -36,9 +41,13 @@ jobs:
exit 0
fi
# Read the PR body and check whether the helper checkbox is checked.
pr_body="$(gh api "repos/${GITHUB_REPOSITORY}/pulls/${pr_number}" --jq '.body // ""')"
# Read the PR once so we can gate behavior and infer preferred actor.
pr_json="$(gh api "repos/${GITHUB_REPOSITORY}/pulls/${pr_number}")"
pr_body="$(printf '%s' "$pr_json" | jq -r '.body // ""')"
merged_by="$(printf '%s' "$pr_json" | jq -r '.merged_by.login // ""')"
echo "pr_number=$pr_number" >> "$GITHUB_OUTPUT"
echo "merged_by=$merged_by" >> "$GITHUB_OUTPUT"
if echo "$pr_body" | grep -qiE "\\[x\\][[:space:]]*(\\[[^]]+\\][[:space:]]*)?Please cherry-pick this PR to the latest release version"; then
echo "should_cherrypick=true" >> "$GITHUB_OUTPUT"
@@ -71,9 +80,82 @@ jobs:
git config user.email "github-actions[bot]@users.noreply.github.com"
- name: Create cherry-pick PR to latest release
id: run_cherry_pick
if: steps.gate.outputs.should_cherrypick == 'true'
continue-on-error: true
env:
GH_TOKEN: ${{ github.token }}
GITHUB_TOKEN: ${{ github.token }}
CHERRY_PICK_ASSIGNEE: ${{ steps.gate.outputs.merged_by }}
run: |
uv run --no-sync --with onyx-devtools ods cherry-pick "${GITHUB_SHA}" --yes --no-verify
set -o pipefail
output_file="$(mktemp)"
uv run --no-sync --with onyx-devtools ods cherry-pick "${GITHUB_SHA}" --yes --no-verify 2>&1 | tee "$output_file"
exit_code="${PIPESTATUS[0]}"
if [ "${exit_code}" -eq 0 ]; then
echo "status=success" >> "$GITHUB_OUTPUT"
exit 0
fi
echo "status=failure" >> "$GITHUB_OUTPUT"
reason="command-failed"
if grep -qiE "merge conflict during cherry-pick|CONFLICT|could not apply|cherry-pick in progress with staged changes" "$output_file"; then
reason="merge-conflict"
fi
echo "reason=${reason}" >> "$GITHUB_OUTPUT"
{
echo "details<<EOF"
tail -n 40 "$output_file"
echo "EOF"
} >> "$GITHUB_OUTPUT"
- name: Mark workflow as failed if cherry-pick failed
if: steps.gate.outputs.should_cherrypick == 'true' && steps.run_cherry_pick.outputs.status == 'failure'
run: |
echo "::error::Automated cherry-pick failed (${{ steps.run_cherry_pick.outputs.reason }})."
exit 1
notify-slack-on-cherry-pick-failure:
needs:
- cherry-pick-to-latest-release
if: always() && needs.cherry-pick-to-latest-release.outputs.should_cherrypick == 'true' && needs.cherry-pick-to-latest-release.result != 'success'
runs-on: ubuntu-slim
timeout-minutes: 10
steps:
- name: Checkout
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Build cherry-pick failure summary
id: failure-summary
env:
SOURCE_PR_NUMBER: ${{ needs.cherry-pick-to-latest-release.outputs.pr_number }}
CHERRY_PICK_REASON: ${{ needs.cherry-pick-to-latest-release.outputs.cherry_pick_reason }}
CHERRY_PICK_DETAILS: ${{ needs.cherry-pick-to-latest-release.outputs.cherry_pick_details }}
run: |
source_pr_url="https://github.com/${GITHUB_REPOSITORY}/pull/${SOURCE_PR_NUMBER}"
reason_text="cherry-pick command failed"
if [ "${CHERRY_PICK_REASON}" = "merge-conflict" ]; then
reason_text="merge conflict during cherry-pick"
fi
details_excerpt="$(printf '%s' "${CHERRY_PICK_DETAILS}" | tail -n 8 | tr '\n' ' ' | sed "s/[[:space:]]\\+/ /g" | sed "s/\"/'/g" | cut -c1-350)"
failed_jobs="• cherry-pick-to-latest-release\\n• source PR: ${source_pr_url}\\n• reason: ${reason_text}"
if [ -n "${details_excerpt}" ]; then
failed_jobs="${failed_jobs}\\n• excerpt: ${details_excerpt}"
fi
echo "jobs=${failed_jobs}" >> "$GITHUB_OUTPUT"
- name: Notify #cherry-pick-prs about cherry-pick failure
uses: ./.github/actions/slack-notify
with:
webhook-url: ${{ secrets.CHERRY_PICK_PRS_WEBHOOK }}
failed-jobs: ${{ steps.failure-summary.outputs.jobs }}
title: "🚨 Automated Cherry-Pick Failed"
ref-name: ${{ github.ref_name }}

View File

@@ -116,7 +116,6 @@ jobs:
run: |
cat <<EOF > deployment/docker_compose/.env
COMPOSE_PROFILES=s3-filestore,opensearch-enabled
CODE_INTERPRETER_BETA_ENABLED=true
DISABLE_TELEMETRY=true
OPENSEARCH_FOR_ONYX_ENABLED=true
EOF

View File

@@ -20,6 +20,7 @@ env:
# Test Environment Variables
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
SLACK_BOT_TOKEN_TEST_SPACE: ${{ secrets.SLACK_BOT_TOKEN_TEST_SPACE }}
CONFLUENCE_TEST_SPACE_URL: ${{ vars.CONFLUENCE_TEST_SPACE_URL }}
CONFLUENCE_USER_NAME: ${{ vars.CONFLUENCE_USER_NAME }}
CONFLUENCE_ACCESS_TOKEN: ${{ secrets.CONFLUENCE_ACCESS_TOKEN }}
@@ -423,6 +424,7 @@ jobs:
-e OPENAI_API_KEY=${OPENAI_API_KEY} \
-e EXA_API_KEY=${EXA_API_KEY} \
-e SLACK_BOT_TOKEN=${SLACK_BOT_TOKEN} \
-e SLACK_BOT_TOKEN_TEST_SPACE=${SLACK_BOT_TOKEN_TEST_SPACE} \
-e CONFLUENCE_TEST_SPACE_URL=${CONFLUENCE_TEST_SPACE_URL} \
-e CONFLUENCE_USER_NAME=${CONFLUENCE_USER_NAME} \
-e CONFLUENCE_ACCESS_TOKEN=${CONFLUENCE_ACCESS_TOKEN} \
@@ -443,6 +445,7 @@ jobs:
-e TEST_WEB_HOSTNAME=test-runner \
-e MOCK_CONNECTOR_SERVER_HOST=mock_connector_server \
-e MOCK_CONNECTOR_SERVER_PORT=8001 \
-e ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=${{ matrix.edition == 'ee' && 'true' || 'false' }} \
${{ env.RUNS_ON_ECR_CACHE }}:integration-test-${{ github.run_id }} \
/app/tests/integration/${{ matrix.test-dir.path }}
@@ -701,6 +704,7 @@ jobs:
-e OPENAI_API_KEY=${OPENAI_API_KEY} \
-e EXA_API_KEY=${EXA_API_KEY} \
-e SLACK_BOT_TOKEN=${SLACK_BOT_TOKEN} \
-e SLACK_BOT_TOKEN_TEST_SPACE=${SLACK_BOT_TOKEN_TEST_SPACE} \
-e TEST_WEB_HOSTNAME=test-runner \
-e AUTH_TYPE=cloud \
-e MULTI_TENANT=true \

View File

@@ -548,7 +548,7 @@ class in the utils over directly calling the APIs with a library like `requests`
calling the utilities directly (e.g. do NOT create admin users with
`admin_user = UserManager.create(name="admin_user")`, instead use the `admin_user` fixture).
A great example of this type of test is `backend/tests/integration/dev_apis/test_simple_chat_api.py`.
A great example of this type of test is `backend/tests/integration/tests/streaming_endpoints/test_chat_stream.py`.
To run them:

View File

@@ -0,0 +1,29 @@
"""code interpreter seed
Revision ID: 07b98176f1de
Revises: 7cb492013621
Create Date: 2026-02-23 15:55:07.606784
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "07b98176f1de"
down_revision = "7cb492013621"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Seed the single instance of code_interpreter_server
# NOTE: There should only exist at most and at minimum 1 code_interpreter_server row
op.execute(
sa.text("INSERT INTO code_interpreter_server (server_enabled) VALUES (true)")
)
def downgrade() -> None:
op.execute(sa.text("DELETE FROM code_interpreter_server"))

View File

@@ -0,0 +1,48 @@
"""add enterprise and name fields to scim_user_mapping
Revision ID: 7616121f6e97
Revises: 07b98176f1de
Create Date: 2026-02-23 12:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "7616121f6e97"
down_revision = "07b98176f1de"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"scim_user_mapping",
sa.Column("department", sa.String(), nullable=True),
)
op.add_column(
"scim_user_mapping",
sa.Column("manager", sa.String(), nullable=True),
)
op.add_column(
"scim_user_mapping",
sa.Column("given_name", sa.String(), nullable=True),
)
op.add_column(
"scim_user_mapping",
sa.Column("family_name", sa.String(), nullable=True),
)
op.add_column(
"scim_user_mapping",
sa.Column("scim_emails_json", sa.Text(), nullable=True),
)
def downgrade() -> None:
op.drop_column("scim_user_mapping", "scim_emails_json")
op.drop_column("scim_user_mapping", "family_name")
op.drop_column("scim_user_mapping", "given_name")
op.drop_column("scim_user_mapping", "manager")
op.drop_column("scim_user_mapping", "department")

View File

@@ -5,8 +5,10 @@ from uuid import UUID
import httpx
import sqlalchemy as sa
from celery import Celery
from celery import shared_task
from celery import Task
from redis import Redis
from redis.lock import Lock as RedisLock
from retry import retry
from sqlalchemy import select
@@ -24,12 +26,14 @@ from onyx.configs.constants import CELERY_GENERIC_BEAT_LOCK_TIMEOUT
from onyx.configs.constants import CELERY_USER_FILE_PROCESSING_LOCK_TIMEOUT
from onyx.configs.constants import CELERY_USER_FILE_PROCESSING_TASK_EXPIRES
from onyx.configs.constants import CELERY_USER_FILE_PROJECT_SYNC_LOCK_TIMEOUT
from onyx.configs.constants import CELERY_USER_FILE_PROJECT_SYNC_TASK_EXPIRES
from onyx.configs.constants import DocumentSource
from onyx.configs.constants import OnyxCeleryPriority
from onyx.configs.constants import OnyxCeleryQueues
from onyx.configs.constants import OnyxCeleryTask
from onyx.configs.constants import OnyxRedisLocks
from onyx.configs.constants import USER_FILE_PROCESSING_MAX_QUEUE_DEPTH
from onyx.configs.constants import USER_FILE_PROJECT_SYNC_MAX_QUEUE_DEPTH
from onyx.connectors.file.connector import LocalFileConnector
from onyx.connectors.models import Document
from onyx.connectors.models import HierarchyNode
@@ -75,10 +79,58 @@ def _user_file_project_sync_lock_key(user_file_id: str | UUID) -> str:
return f"{OnyxRedisLocks.USER_FILE_PROJECT_SYNC_LOCK_PREFIX}:{user_file_id}"
def _user_file_project_sync_queued_key(user_file_id: str | UUID) -> str:
return f"{OnyxRedisLocks.USER_FILE_PROJECT_SYNC_QUEUED_PREFIX}:{user_file_id}"
def _user_file_delete_lock_key(user_file_id: str | UUID) -> str:
return f"{OnyxRedisLocks.USER_FILE_DELETE_LOCK_PREFIX}:{user_file_id}"
def get_user_file_project_sync_queue_depth(celery_app: Celery) -> int:
redis_celery: Redis = celery_app.broker_connection().channel().client # type: ignore
return celery_get_queue_length(
OnyxCeleryQueues.USER_FILE_PROJECT_SYNC, redis_celery
)
def enqueue_user_file_project_sync_task(
*,
celery_app: Celery,
redis_client: Redis,
user_file_id: str | UUID,
tenant_id: str,
priority: OnyxCeleryPriority = OnyxCeleryPriority.HIGH,
) -> bool:
"""Enqueue a project-sync task if no matching queued task already exists."""
queued_key = _user_file_project_sync_queued_key(user_file_id)
# NX+EX gives us atomic dedupe and a self-healing TTL.
queued_guard_set = redis_client.set(
queued_key,
1,
nx=True,
ex=CELERY_USER_FILE_PROJECT_SYNC_TASK_EXPIRES,
)
if not queued_guard_set:
return False
try:
celery_app.send_task(
OnyxCeleryTask.PROCESS_SINGLE_USER_FILE_PROJECT_SYNC,
kwargs={"user_file_id": str(user_file_id), "tenant_id": tenant_id},
queue=OnyxCeleryQueues.USER_FILE_PROJECT_SYNC,
priority=priority,
expires=CELERY_USER_FILE_PROJECT_SYNC_TASK_EXPIRES,
)
except Exception:
# Roll back the queued guard if task publish fails.
redis_client.delete(queued_key)
raise
return True
@retry(tries=3, delay=1, backoff=2, jitter=(0.0, 1.0))
def _visit_chunks(
*,
@@ -632,8 +684,8 @@ def process_single_user_file_delete(
ignore_result=True,
)
def check_for_user_file_project_sync(self: Task, *, tenant_id: str) -> None:
"""Scan for user files with PROJECT_SYNC status and enqueue per-file tasks."""
task_logger.info("check_for_user_file_project_sync - Starting")
"""Scan for user files needing project sync and enqueue per-file tasks."""
task_logger.info("Starting")
redis_client = get_redis_client(tenant_id=tenant_id)
lock: RedisLock = redis_client.lock(
@@ -645,7 +697,16 @@ def check_for_user_file_project_sync(self: Task, *, tenant_id: str) -> None:
return None
enqueued = 0
skipped_guard = 0
try:
queue_depth = get_user_file_project_sync_queue_depth(self.app)
if queue_depth > USER_FILE_PROJECT_SYNC_MAX_QUEUE_DEPTH:
task_logger.warning(
f"Queue depth {queue_depth} exceeds "
f"{USER_FILE_PROJECT_SYNC_MAX_QUEUE_DEPTH}, skipping enqueue for tenant={tenant_id}"
)
return None
with get_session_with_current_tenant() as db_session:
user_file_ids = (
db_session.execute(
@@ -661,19 +722,23 @@ def check_for_user_file_project_sync(self: Task, *, tenant_id: str) -> None:
)
for user_file_id in user_file_ids:
self.app.send_task(
OnyxCeleryTask.PROCESS_SINGLE_USER_FILE_PROJECT_SYNC,
kwargs={"user_file_id": str(user_file_id), "tenant_id": tenant_id},
queue=OnyxCeleryQueues.USER_FILE_PROJECT_SYNC,
if not enqueue_user_file_project_sync_task(
celery_app=self.app,
redis_client=redis_client,
user_file_id=user_file_id,
tenant_id=tenant_id,
priority=OnyxCeleryPriority.HIGH,
)
):
skipped_guard += 1
continue
enqueued += 1
finally:
if lock.owned():
lock.release()
task_logger.info(
f"check_for_user_file_project_sync - Enqueued {enqueued} tasks for tenant={tenant_id}"
f"Enqueued {enqueued} "
f"Skipped guard {skipped_guard} tasks for tenant={tenant_id}"
)
return None
@@ -692,6 +757,8 @@ def process_single_user_file_project_sync(
)
redis_client = get_redis_client(tenant_id=tenant_id)
redis_client.delete(_user_file_project_sync_queued_key(user_file_id))
file_lock: RedisLock = redis_client.lock(
_user_file_project_sync_lock_key(user_file_id),
timeout=CELERY_USER_FILE_PROJECT_SYNC_LOCK_TIMEOUT,

View File

@@ -58,6 +58,8 @@ from onyx.file_store.document_batch_storage import DocumentBatchStorage
from onyx.file_store.document_batch_storage import get_document_batch_storage
from onyx.indexing.indexing_heartbeat import IndexingHeartbeatInterface
from onyx.indexing.indexing_pipeline import index_doc_batch_prepare
from onyx.indexing.postgres_sanitization import sanitize_document_for_postgres
from onyx.indexing.postgres_sanitization import sanitize_hierarchy_nodes_for_postgres
from onyx.redis.redis_hierarchy import cache_hierarchy_nodes_batch
from onyx.redis.redis_hierarchy import ensure_source_node_exists
from onyx.redis.redis_hierarchy import get_node_id_from_raw_id
@@ -156,36 +158,7 @@ def strip_null_characters(doc_batch: list[Document]) -> list[Document]:
logger.warning(
f"doc {doc.id} too large, Document size: {sys.getsizeof(doc)}"
)
cleaned_doc = doc.model_copy()
# Postgres cannot handle NUL characters in text fields
if "\x00" in cleaned_doc.id:
logger.warning(f"NUL characters found in document ID: {cleaned_doc.id}")
cleaned_doc.id = cleaned_doc.id.replace("\x00", "")
if cleaned_doc.title and "\x00" in cleaned_doc.title:
logger.warning(
f"NUL characters found in document title: {cleaned_doc.title}"
)
cleaned_doc.title = cleaned_doc.title.replace("\x00", "")
if "\x00" in cleaned_doc.semantic_identifier:
logger.warning(
f"NUL characters found in document semantic identifier: {cleaned_doc.semantic_identifier}"
)
cleaned_doc.semantic_identifier = cleaned_doc.semantic_identifier.replace(
"\x00", ""
)
for section in cleaned_doc.sections:
if section.link is not None:
section.link = section.link.replace("\x00", "")
# since text can be longer, just replace to avoid double scan
if isinstance(section, TextSection) and section.text is not None:
section.text = section.text.replace("\x00", "")
cleaned_batch.append(cleaned_doc)
cleaned_batch.append(sanitize_document_for_postgres(doc))
return cleaned_batch
@@ -602,10 +575,13 @@ def connector_document_extraction(
# Process hierarchy nodes batch - upsert to Postgres and cache in Redis
if hierarchy_node_batch:
hierarchy_node_batch_cleaned = (
sanitize_hierarchy_nodes_for_postgres(hierarchy_node_batch)
)
with get_session_with_current_tenant() as db_session:
upserted_nodes = upsert_hierarchy_nodes_batch(
db_session=db_session,
nodes=hierarchy_node_batch,
nodes=hierarchy_node_batch_cleaned,
source=db_connector.source,
commit=True,
is_connector_public=is_connector_public,
@@ -624,7 +600,7 @@ def connector_document_extraction(
)
logger.debug(
f"Persisted and cached {len(hierarchy_node_batch)} hierarchy nodes "
f"Persisted and cached {len(hierarchy_node_batch_cleaned)} hierarchy nodes "
f"for attempt={index_attempt_id}"
)

View File

@@ -30,6 +30,7 @@ from onyx.configs.constants import DocumentSource
from onyx.configs.constants import MessageType
from onyx.context.search.models import SearchDoc
from onyx.context.search.models import SearchDocsResponse
from onyx.db.engine.sql_engine import get_session_with_current_tenant
from onyx.db.memory import add_memory
from onyx.db.memory import update_memory_at_index
from onyx.db.memory import UserMemoryContext
@@ -656,7 +657,12 @@ def run_llm_loop(
fallback_extraction_attempted: bool = False
citation_mapping: dict[int, str] = {} # Maps citation_num -> document_id/URL
default_base_system_prompt: str = get_default_base_system_prompt(db_session)
# Fetch this in a short-lived session so the long-running stream loop does
# not pin a connection just to keep read state alive.
with get_session_with_current_tenant() as prompt_db_session:
default_base_system_prompt: str = get_default_base_system_prompt(
prompt_db_session
)
system_prompt = None
custom_agent_prompt_msg = None

View File

@@ -856,6 +856,11 @@ def handle_stream_message_objects(
reserved_tokens=reserved_token_count,
)
# Release any read transaction before entering the long-running LLM stream.
# Without this, the request-scoped session can keep a connection checked out
# for the full stream duration.
db_session.commit()
# The stream generator can resume on a different worker thread after early yields.
# Set this right before launching the LLM loop so run_in_background copies the right context.
if new_msg_req.mock_llm_response is not None:

View File

@@ -210,10 +210,10 @@ AUTH_COOKIE_EXPIRE_TIME_SECONDS = int(
REQUIRE_EMAIL_VERIFICATION = (
os.environ.get("REQUIRE_EMAIL_VERIFICATION", "").lower() == "true"
)
SMTP_SERVER = os.environ.get("SMTP_SERVER") or "smtp.gmail.com"
SMTP_SERVER = os.environ.get("SMTP_SERVER") or ""
SMTP_PORT = int(os.environ.get("SMTP_PORT") or "587")
SMTP_USER = os.environ.get("SMTP_USER", "your-email@gmail.com")
SMTP_PASS = os.environ.get("SMTP_PASS", "your-gmail-password")
SMTP_USER = os.environ.get("SMTP_USER") or ""
SMTP_PASS = os.environ.get("SMTP_PASS") or ""
EMAIL_FROM = os.environ.get("EMAIL_FROM") or SMTP_USER
SENDGRID_API_KEY = os.environ.get("SENDGRID_API_KEY") or ""

View File

@@ -167,6 +167,14 @@ CELERY_USER_FILE_PROCESSING_TASK_EXPIRES = 60 # 1 minute (in seconds)
# beat generator stops adding more. Prevents unbounded queue growth when workers
# fall behind.
USER_FILE_PROCESSING_MAX_QUEUE_DEPTH = 500
# How long a queued user-file-project-sync task remains valid.
# Should be short enough to discard stale queue entries under load while still
# allowing workers enough time to pick up new tasks.
CELERY_USER_FILE_PROJECT_SYNC_TASK_EXPIRES = 60 # 1 minute (in seconds)
# Max queue depth before user-file-project-sync producers stop enqueuing.
# This applies backpressure when workers are falling behind.
USER_FILE_PROJECT_SYNC_MAX_QUEUE_DEPTH = 500
CELERY_USER_FILE_PROJECT_SYNC_LOCK_TIMEOUT = 5 * 60 # 5 minutes (in seconds)
@@ -459,6 +467,7 @@ class OnyxRedisLocks:
USER_FILE_QUEUED_PREFIX = "da_lock:user_file_queued"
USER_FILE_PROJECT_SYNC_BEAT_LOCK = "da_lock:check_user_file_project_sync_beat"
USER_FILE_PROJECT_SYNC_LOCK_PREFIX = "da_lock:user_file_project_sync"
USER_FILE_PROJECT_SYNC_QUEUED_PREFIX = "da_lock:user_file_project_sync_queued"
USER_FILE_DELETE_BEAT_LOCK = "da_lock:check_user_file_delete_beat"
USER_FILE_DELETE_LOCK_PREFIX = "da_lock:user_file_delete"

View File

@@ -0,0 +1,96 @@
"""Inverse mapping from user-facing Microsoft host URLs to the SDK's AzureEnvironment.
The office365 library's GraphClient requires an ``AzureEnvironment`` string
(e.g. ``"Global"``, ``"GCC High"``) to route requests to the correct national
cloud. Our connectors instead expose free-text ``authority_host`` and
``graph_api_host`` fields so the frontend doesn't need to know about SDK
internals.
This module bridges the gap: given the two host URLs the user configured, it
resolves the matching ``AzureEnvironment`` value (and the implied SharePoint
domain suffix) so callers can pass ``environment=…`` to ``GraphClient``.
"""
from office365.graph_client import AzureEnvironment # type: ignore[import-untyped]
from pydantic import BaseModel
from onyx.connectors.exceptions import ConnectorValidationError
class MicrosoftGraphEnvironment(BaseModel):
"""One row of the inverse mapping."""
environment: str
graph_host: str
authority_host: str
sharepoint_domain_suffix: str
_ENVIRONMENTS: list[MicrosoftGraphEnvironment] = [
MicrosoftGraphEnvironment(
environment=AzureEnvironment.Global,
graph_host="https://graph.microsoft.com",
authority_host="https://login.microsoftonline.com",
sharepoint_domain_suffix="sharepoint.com",
),
MicrosoftGraphEnvironment(
environment=AzureEnvironment.USGovernmentHigh,
graph_host="https://graph.microsoft.us",
authority_host="https://login.microsoftonline.us",
sharepoint_domain_suffix="sharepoint.us",
),
MicrosoftGraphEnvironment(
environment=AzureEnvironment.USGovernmentDoD,
graph_host="https://dod-graph.microsoft.us",
authority_host="https://login.microsoftonline.us",
sharepoint_domain_suffix="sharepoint.us",
),
MicrosoftGraphEnvironment(
environment=AzureEnvironment.China,
graph_host="https://microsoftgraph.chinacloudapi.cn",
authority_host="https://login.chinacloudapi.cn",
sharepoint_domain_suffix="sharepoint.cn",
),
MicrosoftGraphEnvironment(
environment=AzureEnvironment.Germany,
graph_host="https://graph.microsoft.de",
authority_host="https://login.microsoftonline.de",
sharepoint_domain_suffix="sharepoint.de",
),
]
_GRAPH_HOST_INDEX: dict[str, MicrosoftGraphEnvironment] = {
env.graph_host: env for env in _ENVIRONMENTS
}
def resolve_microsoft_environment(
graph_api_host: str,
authority_host: str,
) -> MicrosoftGraphEnvironment:
"""Return the ``MicrosoftGraphEnvironment`` that matches the supplied hosts.
Raises ``ConnectorValidationError`` when the combination is unknown or
internally inconsistent (e.g. a GCC-High graph host paired with a
commercial authority host).
"""
graph_api_host = graph_api_host.rstrip("/")
authority_host = authority_host.rstrip("/")
env = _GRAPH_HOST_INDEX.get(graph_api_host)
if env is None:
known = ", ".join(sorted(_GRAPH_HOST_INDEX))
raise ConnectorValidationError(
f"Unsupported Microsoft Graph API host '{graph_api_host}'. "
f"Recognised hosts: {known}"
)
if env.authority_host != authority_host:
raise ConnectorValidationError(
f"Authority host '{authority_host}' is inconsistent with "
f"graph API host '{graph_api_host}'. "
f"Expected authority host '{env.authority_host}' "
f"for the {env.environment} environment."
)
return env

View File

@@ -6,6 +6,7 @@ from typing import cast
from pydantic import BaseModel
from pydantic import Field
from pydantic import field_validator
from pydantic import model_validator
from onyx.access.models import ExternalAccess
@@ -167,6 +168,14 @@ class DocumentBase(BaseModel):
# list of strings.
metadata: dict[str, str | list[str]]
@field_validator("metadata", mode="before")
@classmethod
def _coerce_metadata_values(cls, v: dict[str, Any]) -> dict[str, str | list[str]]:
return {
key: [str(item) for item in val] if isinstance(val, list) else str(val)
for key, val in v.items()
}
# UTC time
doc_updated_at: datetime | None = None
chunk_count: int | None = None

View File

@@ -47,6 +47,7 @@ from onyx.connectors.interfaces import GenerateSlimDocumentOutput
from onyx.connectors.interfaces import IndexingHeartbeatInterface
from onyx.connectors.interfaces import SecondsSinceUnixEpoch
from onyx.connectors.interfaces import SlimConnectorWithPermSync
from onyx.connectors.microsoft_graph_env import resolve_microsoft_environment
from onyx.connectors.models import BasicExpertInfo
from onyx.connectors.models import ConnectorCheckpoint
from onyx.connectors.models import ConnectorFailure
@@ -146,7 +147,9 @@ class DriveItemData(BaseModel):
self.id,
ResourcePath("items", ResourcePath(self.drive_id, ResourcePath("drives"))),
)
return DriveItem(graph_client, path)
item = DriveItem(graph_client, path)
item.set_property("id", self.id)
return item
# The office365 library's ClientContext caches the access token from its
@@ -837,10 +840,20 @@ class SharepointConnector(
self._cached_rest_ctx: ClientContext | None = None
self._cached_rest_ctx_url: str | None = None
self._cached_rest_ctx_created_at: float = 0.0
self.authority_host = authority_host.rstrip("/")
self.graph_api_host = graph_api_host.rstrip("/")
resolved_env = resolve_microsoft_environment(graph_api_host, authority_host)
self._azure_environment = resolved_env.environment
self.authority_host = resolved_env.authority_host
self.graph_api_host = resolved_env.graph_host
self.graph_api_base = f"{self.graph_api_host}/v1.0"
self.sharepoint_domain_suffix = sharepoint_domain_suffix
self.sharepoint_domain_suffix = resolved_env.sharepoint_domain_suffix
if sharepoint_domain_suffix != resolved_env.sharepoint_domain_suffix:
logger.warning(
f"Configured sharepoint_domain_suffix '{sharepoint_domain_suffix}' "
f"differs from the expected suffix '{resolved_env.sharepoint_domain_suffix}' "
f"for the {resolved_env.environment} environment. "
f"Using '{resolved_env.sharepoint_domain_suffix}'."
)
def validate_connector_settings(self) -> None:
# Validate that at least one content type is enabled
@@ -1592,6 +1605,7 @@ class SharepointConnector(
if certificate_data is None:
raise RuntimeError("Failed to load certificate")
logger.info(f"Creating MSAL app with authority url {authority_url}")
self.msal_app = msal.ConfidentialClientApplication(
authority=authority_url,
client_id=sp_client_id,
@@ -1623,7 +1637,9 @@ class SharepointConnector(
raise ConnectorValidationError("Failed to acquire token for graph")
return token
self._graph_client = GraphClient(_acquire_token_for_graph)
self._graph_client = GraphClient(
_acquire_token_for_graph, environment=self._azure_environment
)
if auth_method == SharepointAuthMethod.CERTIFICATE.value:
org = self.graph_client.organization.get().execute_query()
if not org or len(org) == 0:

View File

@@ -23,6 +23,7 @@ from onyx.connectors.interfaces import CheckpointOutput
from onyx.connectors.interfaces import GenerateSlimDocumentOutput
from onyx.connectors.interfaces import SecondsSinceUnixEpoch
from onyx.connectors.interfaces import SlimConnectorWithPermSync
from onyx.connectors.microsoft_graph_env import resolve_microsoft_environment
from onyx.connectors.models import ConnectorCheckpoint
from onyx.connectors.models import ConnectorFailure
from onyx.connectors.models import ConnectorMissingCredentialError
@@ -73,8 +74,11 @@ class TeamsConnector(
self.msal_app: msal.ConfidentialClientApplication | None = None
self.max_workers = max_workers
self.requested_team_list: list[str] = teams
self.authority_host = authority_host.rstrip("/")
self.graph_api_host = graph_api_host.rstrip("/")
resolved_env = resolve_microsoft_environment(graph_api_host, authority_host)
self._azure_environment = resolved_env.environment
self.authority_host = resolved_env.authority_host
self.graph_api_host = resolved_env.graph_host
# impls for BaseConnector
@@ -106,7 +110,9 @@ class TeamsConnector(
return token
self.graph_client = GraphClient(_acquire_token_func)
self.graph_client = GraphClient(
_acquire_token_func, environment=self._azure_environment
)
return None
def validate_connector_settings(self) -> None:

View File

@@ -0,0 +1,21 @@
from sqlalchemy import select
from sqlalchemy.orm import Session
from onyx.db.models import CodeInterpreterServer
def fetch_code_interpreter_server(
db_session: Session,
) -> CodeInterpreterServer:
server = db_session.scalars(select(CodeInterpreterServer)).one()
return server
def update_code_interpreter_server_enabled(
db_session: Session,
enabled: bool,
) -> CodeInterpreterServer:
server = db_session.scalars(select(CodeInterpreterServer)).one()
server.server_enabled = enabled
db_session.commit()
return server

View File

@@ -4940,6 +4940,11 @@ class ScimUserMapping(Base):
ForeignKey("user.id", ondelete="CASCADE"), unique=True, nullable=False
)
scim_username: Mapped[str | None] = mapped_column(String, nullable=True)
department: Mapped[str | None] = mapped_column(String, nullable=True)
manager: Mapped[str | None] = mapped_column(String, nullable=True)
given_name: Mapped[str | None] = mapped_column(String, nullable=True)
family_name: Mapped[str | None] = mapped_column(String, nullable=True)
scim_emails_json: Mapped[str | None] = mapped_column(Text, nullable=True)
created_at: Mapped[datetime.datetime] = mapped_column(
DateTime(timezone=True), server_default=func.now(), nullable=False

View File

@@ -2,6 +2,7 @@ import random
from datetime import datetime
from datetime import timedelta
from logging import getLogger
from uuid import UUID
from onyx.configs.constants import MessageType
from onyx.db.chat import create_chat_session
@@ -13,18 +14,26 @@ from onyx.db.models import ChatSession
logger = getLogger(__name__)
def seed_chat_history(num_sessions: int, num_messages: int, days: int) -> None:
def seed_chat_history(
num_sessions: int,
num_messages: int,
days: int,
user_id: UUID | None = None,
persona_id: int | None = None,
) -> None:
"""Utility function to seed chat history for testing.
num_sessions: the number of sessions to seed
num_messages: the number of messages to seed per sessions
days: the number of days looking backwards from the current time over which to randomize
the times.
user_id: optional user to associate with sessions
persona_id: optional persona/assistant to associate with sessions
"""
with get_session_with_current_tenant() as db_session:
logger.info(f"Seeding {num_sessions} sessions.")
for y in range(0, num_sessions):
create_chat_session(db_session, f"pytest_session_{y}", None, None)
create_chat_session(db_session, f"pytest_session_{y}", user_id, persona_id)
# randomize all session times
logger.info(f"Seeding {num_messages} messages per session.")

View File

@@ -12,6 +12,9 @@ if TYPE_CHECKING:
class AzureImageGenerationProvider(ImageGenerationProvider):
_GPT_IMAGE_MODEL_PREFIX = "gpt-image-"
_DALL_E_2_MODEL_NAME = "dall-e-2"
def __init__(
self,
api_key: str,
@@ -53,6 +56,25 @@ class AzureImageGenerationProvider(ImageGenerationProvider):
deployment_name=credentials.deployment_name,
)
@property
def supports_reference_images(self) -> bool:
return True
@property
def max_reference_images(self) -> int:
# Azure GPT image models support up to 16 input images for edits.
return 16
def _normalize_model_name(self, model: str) -> str:
return model.rsplit("/", 1)[-1]
def _model_supports_image_edits(self, model: str) -> bool:
normalized_model = self._normalize_model_name(model)
return (
normalized_model.startswith(self._GPT_IMAGE_MODEL_PREFIX)
or normalized_model == self._DALL_E_2_MODEL_NAME
)
def generate_image(
self,
prompt: str,
@@ -60,14 +82,44 @@ class AzureImageGenerationProvider(ImageGenerationProvider):
size: str,
n: int,
quality: str | None = None,
reference_images: list[ReferenceImage] | None = None, # noqa: ARG002
reference_images: list[ReferenceImage] | None = None,
**kwargs: Any,
) -> ImageGenerationResponse:
from litellm import image_generation
deployment = self._deployment_name or model
model_name = f"azure/{deployment}"
if reference_images:
if not self._model_supports_image_edits(model):
raise ValueError(
f"Model '{model}' does not support image edits with reference images."
)
normalized_model = self._normalize_model_name(model)
if (
normalized_model == self._DALL_E_2_MODEL_NAME
and len(reference_images) > 1
):
raise ValueError(
"Model 'dall-e-2' only supports a single reference image for edits."
)
from litellm import image_edit
return image_edit(
image=[image.data for image in reference_images],
prompt=prompt,
model=model_name,
api_key=self._api_key,
api_base=self._api_base,
api_version=self._api_version,
size=size,
n=n,
quality=quality,
**kwargs,
)
from litellm import image_generation
return image_generation(
prompt=prompt,
model=model_name,

View File

@@ -12,6 +12,9 @@ if TYPE_CHECKING:
class OpenAIImageGenerationProvider(ImageGenerationProvider):
_GPT_IMAGE_MODEL_PREFIX = "gpt-image-"
_DALL_E_2_MODEL_NAME = "dall-e-2"
def __init__(
self,
api_key: str,
@@ -39,6 +42,25 @@ class OpenAIImageGenerationProvider(ImageGenerationProvider):
api_base=credentials.api_base,
)
@property
def supports_reference_images(self) -> bool:
return True
@property
def max_reference_images(self) -> int:
# GPT image models support up to 16 input images for edits.
return 16
def _normalize_model_name(self, model: str) -> str:
return model.rsplit("/", 1)[-1]
def _model_supports_image_edits(self, model: str) -> bool:
normalized_model = self._normalize_model_name(model)
return (
normalized_model.startswith(self._GPT_IMAGE_MODEL_PREFIX)
or normalized_model == self._DALL_E_2_MODEL_NAME
)
def generate_image(
self,
prompt: str,
@@ -46,9 +68,38 @@ class OpenAIImageGenerationProvider(ImageGenerationProvider):
size: str,
n: int,
quality: str | None = None,
reference_images: list[ReferenceImage] | None = None, # noqa: ARG002
reference_images: list[ReferenceImage] | None = None,
**kwargs: Any,
) -> ImageGenerationResponse:
if reference_images:
if not self._model_supports_image_edits(model):
raise ValueError(
f"Model '{model}' does not support image edits with reference images."
)
normalized_model = self._normalize_model_name(model)
if (
normalized_model == self._DALL_E_2_MODEL_NAME
and len(reference_images) > 1
):
raise ValueError(
"Model 'dall-e-2' only supports a single reference image for edits."
)
from litellm import image_edit
return image_edit(
image=[image.data for image in reference_images],
prompt=prompt,
model=model,
api_key=self._api_key,
api_base=self._api_base,
size=size,
n=n,
quality=quality,
**kwargs,
)
from litellm import image_generation
return image_generation(

View File

@@ -49,6 +49,7 @@ from onyx.indexing.embedder import IndexingEmbedder
from onyx.indexing.models import DocAwareChunk
from onyx.indexing.models import IndexingBatchAdapter
from onyx.indexing.models import UpdatableChunkData
from onyx.indexing.postgres_sanitization import sanitize_documents_for_postgres
from onyx.indexing.vector_db_insertion import write_chunks_to_vector_db_with_backoff
from onyx.llm.factory import get_default_llm_with_vision
from onyx.llm.factory import get_llm_for_contextual_rag
@@ -228,6 +229,8 @@ def index_doc_batch_prepare(
) -> DocumentBatchPrepareContext | None:
"""Sets up the documents in the relational DB (source of truth) for permissions, metadata, etc.
This preceeds indexing it into the actual document index."""
documents = sanitize_documents_for_postgres(documents)
# Create a trimmed list of docs that don't have a newer updated at
# Shortcuts the time-consuming flow on connector index retries
document_ids: list[str] = [document.id for document in documents]

View File

@@ -0,0 +1,150 @@
from typing import Any
from onyx.access.models import ExternalAccess
from onyx.connectors.models import BasicExpertInfo
from onyx.connectors.models import Document
from onyx.connectors.models import HierarchyNode
def _sanitize_string(value: str) -> str:
return value.replace("\x00", "")
def _sanitize_json_like(value: Any) -> Any:
if isinstance(value, str):
return _sanitize_string(value)
if isinstance(value, list):
return [_sanitize_json_like(item) for item in value]
if isinstance(value, tuple):
return tuple(_sanitize_json_like(item) for item in value)
if isinstance(value, dict):
sanitized: dict[Any, Any] = {}
for key, nested_value in value.items():
cleaned_key = _sanitize_string(key) if isinstance(key, str) else key
sanitized[cleaned_key] = _sanitize_json_like(nested_value)
return sanitized
return value
def _sanitize_expert_info(expert: BasicExpertInfo) -> BasicExpertInfo:
return expert.model_copy(
update={
"display_name": (
_sanitize_string(expert.display_name)
if expert.display_name is not None
else None
),
"first_name": (
_sanitize_string(expert.first_name)
if expert.first_name is not None
else None
),
"middle_initial": (
_sanitize_string(expert.middle_initial)
if expert.middle_initial is not None
else None
),
"last_name": (
_sanitize_string(expert.last_name)
if expert.last_name is not None
else None
),
"email": (
_sanitize_string(expert.email) if expert.email is not None else None
),
}
)
def _sanitize_external_access(external_access: ExternalAccess) -> ExternalAccess:
return ExternalAccess(
external_user_emails={
_sanitize_string(email) for email in external_access.external_user_emails
},
external_user_group_ids={
_sanitize_string(group_id)
for group_id in external_access.external_user_group_ids
},
is_public=external_access.is_public,
)
def sanitize_document_for_postgres(document: Document) -> Document:
cleaned_doc = document.model_copy(deep=True)
cleaned_doc.id = _sanitize_string(cleaned_doc.id)
cleaned_doc.semantic_identifier = _sanitize_string(cleaned_doc.semantic_identifier)
if cleaned_doc.title is not None:
cleaned_doc.title = _sanitize_string(cleaned_doc.title)
if cleaned_doc.parent_hierarchy_raw_node_id is not None:
cleaned_doc.parent_hierarchy_raw_node_id = _sanitize_string(
cleaned_doc.parent_hierarchy_raw_node_id
)
cleaned_doc.metadata = {
_sanitize_string(key): (
[_sanitize_string(item) for item in value]
if isinstance(value, list)
else _sanitize_string(value)
)
for key, value in cleaned_doc.metadata.items()
}
if cleaned_doc.doc_metadata is not None:
cleaned_doc.doc_metadata = _sanitize_json_like(cleaned_doc.doc_metadata)
if cleaned_doc.primary_owners is not None:
cleaned_doc.primary_owners = [
_sanitize_expert_info(expert) for expert in cleaned_doc.primary_owners
]
if cleaned_doc.secondary_owners is not None:
cleaned_doc.secondary_owners = [
_sanitize_expert_info(expert) for expert in cleaned_doc.secondary_owners
]
if cleaned_doc.external_access is not None:
cleaned_doc.external_access = _sanitize_external_access(
cleaned_doc.external_access
)
for section in cleaned_doc.sections:
if section.link is not None:
section.link = _sanitize_string(section.link)
if section.text is not None:
section.text = _sanitize_string(section.text)
if section.image_file_id is not None:
section.image_file_id = _sanitize_string(section.image_file_id)
return cleaned_doc
def sanitize_documents_for_postgres(documents: list[Document]) -> list[Document]:
return [sanitize_document_for_postgres(document) for document in documents]
def sanitize_hierarchy_node_for_postgres(node: HierarchyNode) -> HierarchyNode:
cleaned_node = node.model_copy(deep=True)
cleaned_node.raw_node_id = _sanitize_string(cleaned_node.raw_node_id)
cleaned_node.display_name = _sanitize_string(cleaned_node.display_name)
if cleaned_node.raw_parent_id is not None:
cleaned_node.raw_parent_id = _sanitize_string(cleaned_node.raw_parent_id)
if cleaned_node.link is not None:
cleaned_node.link = _sanitize_string(cleaned_node.link)
if cleaned_node.external_access is not None:
cleaned_node.external_access = _sanitize_external_access(
cleaned_node.external_access
)
return cleaned_node
def sanitize_hierarchy_nodes_for_postgres(
nodes: list[HierarchyNode],
) -> list[HierarchyNode]:
return [sanitize_hierarchy_node_for_postgres(node) for node in nodes]

View File

@@ -97,6 +97,9 @@ from onyx.server.features.web_search.api import router as web_search_router
from onyx.server.federated.api import router as federated_router
from onyx.server.kg.api import admin_router as kg_admin_router
from onyx.server.manage.administrative import router as admin_router
from onyx.server.manage.code_interpreter.api import (
admin_router as code_interpreter_admin_router,
)
from onyx.server.manage.discord_bot.api import router as discord_bot_router
from onyx.server.manage.embedding.api import admin_router as embedding_admin_router
from onyx.server.manage.embedding.api import basic_router as embedding_router
@@ -421,6 +424,9 @@ def get_application(lifespan_override: Lifespan | None = None) -> FastAPI:
include_router_with_global_prefix_prepended(application, llm_admin_router)
include_router_with_global_prefix_prepended(application, kg_admin_router)
include_router_with_global_prefix_prepended(application, llm_router)
include_router_with_global_prefix_prepended(
application, code_interpreter_admin_router
)
include_router_with_global_prefix_prepended(
application, image_generation_admin_router
)

View File

@@ -1,14 +1,68 @@
import re
from typing import Any
from mistune import create_markdown
from mistune import HTMLRenderer
_CITATION_LINK_PATTERN = re.compile(r"\[\[\d+\]\]\(")
def _extract_link_destination(message: str, start_idx: int) -> tuple[str, int | None]:
"""Extract markdown link destination, allowing nested parentheses in the URL."""
depth = 0
i = start_idx
while i < len(message):
curr = message[i]
if curr == "\\":
i += 2
continue
if curr == "(":
depth += 1
elif curr == ")":
if depth == 0:
return message[start_idx:i], i
depth -= 1
i += 1
return message[start_idx:], None
def _normalize_citation_link_destinations(message: str) -> str:
"""Wrap citation URLs in angle brackets so markdown parsers handle parentheses safely."""
if "[[" not in message:
return message
normalized_parts: list[str] = []
cursor = 0
while match := _CITATION_LINK_PATTERN.search(message, cursor):
normalized_parts.append(message[cursor : match.end()])
destination_start = match.end()
destination, end_idx = _extract_link_destination(message, destination_start)
if end_idx is None:
normalized_parts.append(message[destination_start:])
return "".join(normalized_parts)
already_wrapped = destination.startswith("<") and destination.endswith(">")
if destination and not already_wrapped:
destination = f"<{destination}>"
normalized_parts.append(destination)
normalized_parts.append(")")
cursor = end_idx + 1
normalized_parts.append(message[cursor:])
return "".join(normalized_parts)
def format_slack_message(message: str | None) -> str:
if message is None:
return ""
md = create_markdown(renderer=SlackRenderer(), plugins=["strikethrough"])
result = md(message)
normalized_message = _normalize_citation_link_destinations(message)
result = md(normalized_message)
# With HTMLRenderer, result is always str (not AST list)
assert isinstance(result, str)
return result

View File

@@ -762,6 +762,43 @@ def download_webapp(
)
@router.get("/{session_id}/download-directory/{path:path}")
def download_directory(
session_id: UUID,
path: str,
user: User = Depends(current_user),
db_session: Session = Depends(get_session),
) -> Response:
"""
Download a directory as a zip file.
Returns the specified directory as a zip archive.
"""
user_id: UUID = user.id
session_manager = SessionManager(db_session)
try:
result = session_manager.download_directory(session_id, user_id, path)
except ValueError as e:
error_message = str(e)
if "path traversal" in error_message.lower():
raise HTTPException(status_code=403, detail="Access denied")
raise HTTPException(status_code=400, detail=error_message)
if result is None:
raise HTTPException(status_code=404, detail="Directory not found")
zip_bytes, filename = result
return Response(
content=zip_bytes,
media_type="application/zip",
headers={
"Content-Disposition": f'attachment; filename="{filename}"',
},
)
@router.post("/{session_id}/upload", response_model=UploadResponse)
def upload_file_endpoint(
session_id: UUID,

View File

@@ -107,27 +107,23 @@ def get_or_create_craft_connector(db_session: Session, user: User) -> tuple[int,
)
for cc_pair in cc_pairs:
if cc_pair.connector.source == DocumentSource.CRAFT_FILE:
if (
cc_pair.connector.source == DocumentSource.CRAFT_FILE
and cc_pair.creator_id == user.id
):
return cc_pair.connector.id, cc_pair.credential.id
# Check for orphaned connector (created but cc_pair creation failed previously)
# No cc_pair for this user — find or create the shared CRAFT_FILE connector
existing_connectors = fetch_connectors(
db_session, sources=[DocumentSource.CRAFT_FILE]
)
orphaned_connector = None
connector_id: int | None = None
for conn in existing_connectors:
if conn.name != USER_LIBRARY_CONNECTOR_NAME:
continue
if not conn.credentials:
orphaned_connector = conn
if conn.name == USER_LIBRARY_CONNECTOR_NAME:
connector_id = conn.id
break
if orphaned_connector:
connector_id = orphaned_connector.id
logger.info(
f"Found orphaned User Library connector {connector_id}, completing setup"
)
else:
if connector_id is None:
connector_data = ConnectorBase(
name=USER_LIBRARY_CONNECTOR_NAME,
source=DocumentSource.CRAFT_FILE,

View File

@@ -68,6 +68,7 @@ from onyx.server.features.build.db.sandbox import create_sandbox__no_commit
from onyx.server.features.build.db.sandbox import get_running_sandbox_count_by_tenant
from onyx.server.features.build.db.sandbox import get_sandbox_by_session_id
from onyx.server.features.build.db.sandbox import get_sandbox_by_user_id
from onyx.server.features.build.db.sandbox import get_snapshots_for_session
from onyx.server.features.build.db.sandbox import update_sandbox_heartbeat
from onyx.server.features.build.db.sandbox import update_sandbox_status__no_commit
from onyx.server.features.build.sandbox import get_sandbox_manager
@@ -646,16 +647,30 @@ class SessionManager:
if sandbox and sandbox.status.is_active():
# Quick health check to verify sandbox is actually responsive
if self._sandbox_manager.health_check(sandbox.id, timeout=5.0):
# AND verify the session workspace still exists on disk
# (it may have been wiped if the sandbox was re-provisioned)
is_healthy = self._sandbox_manager.health_check(sandbox.id, timeout=5.0)
workspace_exists = (
is_healthy
and self._sandbox_manager.session_workspace_exists(
sandbox.id, existing.id
)
)
if is_healthy and workspace_exists:
logger.info(
f"Returning existing empty session {existing.id} for user {user_id}"
)
return existing
else:
elif not is_healthy:
logger.warning(
f"Empty session {existing.id} has unhealthy sandbox {sandbox.id}. "
f"Deleting and creating fresh session."
)
else:
logger.warning(
f"Empty session {existing.id} workspace missing in sandbox "
f"{sandbox.id}. Deleting and creating fresh session."
)
else:
logger.warning(
f"Empty session {existing.id} has no active sandbox "
@@ -1035,6 +1050,23 @@ class SessionManager:
# workspace cleanup fails (e.g., if pod is already terminated)
logger.warning(f"Failed to cleanup session workspace {session_id}: {e}")
# Delete snapshot files from S3 before removing DB records
snapshots = get_snapshots_for_session(self._db_session, session_id)
if snapshots:
from onyx.file_store.file_store import get_default_file_store
from onyx.server.features.build.sandbox.manager.snapshot_manager import (
SnapshotManager,
)
snapshot_manager = SnapshotManager(get_default_file_store())
for snapshot in snapshots:
try:
snapshot_manager.delete_snapshot(snapshot.storage_path)
except Exception as e:
logger.warning(
f"Failed to delete snapshot file {snapshot.storage_path}: {e}"
)
# Delete session (uses flush, caller commits)
return delete_build_session__no_commit(session_id, user_id, self._db_session)
@@ -1903,6 +1935,94 @@ class SessionManager:
return zip_buffer.getvalue(), filename
def download_directory(
self,
session_id: UUID,
user_id: UUID,
path: str,
) -> tuple[bytes, str] | None:
"""
Create a zip file of an arbitrary directory in the session workspace.
Args:
session_id: The session UUID
user_id: The user ID to verify ownership
path: Relative path to the directory (within session workspace)
Returns:
Tuple of (zip_bytes, filename) or None if session not found
Raises:
ValueError: If path traversal attempted or path is not a directory
"""
# Verify session ownership
session = get_build_session(session_id, user_id, self._db_session)
if session is None:
return None
sandbox = get_sandbox_by_user_id(self._db_session, user_id)
if sandbox is None:
return None
# Check if directory exists
try:
self._sandbox_manager.list_directory(
sandbox_id=sandbox.id,
session_id=session_id,
path=path,
)
except ValueError:
return None
# Recursively collect all files
def collect_files(dir_path: str) -> list[tuple[str, str]]:
"""Collect all files recursively, returning (full_path, arcname) tuples."""
files: list[tuple[str, str]] = []
try:
entries = self._sandbox_manager.list_directory(
sandbox_id=sandbox.id,
session_id=session_id,
path=dir_path,
)
for entry in entries:
if entry.is_directory:
files.extend(collect_files(entry.path))
else:
# arcname is relative to the target directory
prefix_len = len(path) + 1 # +1 for trailing slash
arcname = entry.path[prefix_len:]
files.append((entry.path, arcname))
except ValueError:
pass
return files
file_list = collect_files(path)
# Create zip file in memory
zip_buffer = io.BytesIO()
with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zip_file:
for full_path, arcname in file_list:
try:
content = self._sandbox_manager.read_file(
sandbox_id=sandbox.id,
session_id=session_id,
path=full_path,
)
zip_file.writestr(arcname, content)
except ValueError:
pass
zip_buffer.seek(0)
# Use the directory name for the zip filename
dir_name = Path(path).name
safe_name = "".join(
c if c.isalnum() or c in ("-", "_", ".") else "_" for c in dir_name
)
filename = f"{safe_name}.zip"
return zip_buffer.getvalue(), filename
# =========================================================================
# File System Operations
# =========================================================================
@@ -1937,11 +2057,18 @@ class SessionManager:
return None
# Use sandbox manager to list directory (works for both local and K8s)
raw_entries = self._sandbox_manager.list_directory(
sandbox_id=sandbox.id,
session_id=session_id,
path=path,
)
# If the directory doesn't exist (e.g., session workspace not yet loaded),
# return an empty listing rather than erroring out.
try:
raw_entries = self._sandbox_manager.list_directory(
sandbox_id=sandbox.id,
session_id=session_id,
path=path,
)
except ValueError as e:
if "path traversal" in str(e).lower():
raise
return DirectoryListing(path=path, entries=[])
# Filter hidden files and directories
entries: list[FileSystemEntry] = [

View File

@@ -12,11 +12,18 @@ from pydantic import BaseModel
from sqlalchemy.orm import Session
from onyx.auth.users import current_user
from onyx.background.celery.tasks.user_file_processing.tasks import (
enqueue_user_file_project_sync_task,
)
from onyx.background.celery.tasks.user_file_processing.tasks import (
get_user_file_project_sync_queue_depth,
)
from onyx.background.celery.versioned_apps.client import app as client_app
from onyx.configs.constants import OnyxCeleryPriority
from onyx.configs.constants import OnyxCeleryQueues
from onyx.configs.constants import OnyxCeleryTask
from onyx.configs.constants import PUBLIC_API_TAGS
from onyx.configs.constants import USER_FILE_PROJECT_SYNC_MAX_QUEUE_DEPTH
from onyx.db.engine.sql_engine import get_session
from onyx.db.enums import UserFileStatus
from onyx.db.models import ChatSession
@@ -27,6 +34,7 @@ from onyx.db.models import UserProject
from onyx.db.persona import get_personas_by_ids
from onyx.db.projects import get_project_token_count
from onyx.db.projects import upload_files_to_user_files_with_indexing
from onyx.redis.redis_pool import get_redis_client
from onyx.server.features.projects.models import CategorizedFilesSnapshot
from onyx.server.features.projects.models import ChatSessionRequest
from onyx.server.features.projects.models import TokenCountResponse
@@ -47,6 +55,33 @@ class UserFileDeleteResult(BaseModel):
assistant_names: list[str] = []
def _trigger_user_file_project_sync(user_file_id: UUID, tenant_id: str) -> None:
queue_depth = get_user_file_project_sync_queue_depth(client_app)
if queue_depth > USER_FILE_PROJECT_SYNC_MAX_QUEUE_DEPTH:
logger.warning(
f"Skipping immediate project sync for user_file_id={user_file_id} due to "
f"queue depth {queue_depth}>{USER_FILE_PROJECT_SYNC_MAX_QUEUE_DEPTH}. "
"It will be picked up by beat later."
)
return
redis_client = get_redis_client(tenant_id=tenant_id)
enqueued = enqueue_user_file_project_sync_task(
celery_app=client_app,
redis_client=redis_client,
user_file_id=user_file_id,
tenant_id=tenant_id,
priority=OnyxCeleryPriority.HIGHEST,
)
if not enqueued:
logger.info(
f"Skipped duplicate project sync enqueue for user_file_id={user_file_id}"
)
return
logger.info(f"Triggered project sync for user_file_id={user_file_id}")
@router.get("", tags=PUBLIC_API_TAGS)
def get_projects(
user: User = Depends(current_user),
@@ -189,15 +224,7 @@ def unlink_user_file_from_project(
db_session.commit()
tenant_id = get_current_tenant_id()
task = client_app.send_task(
OnyxCeleryTask.PROCESS_SINGLE_USER_FILE_PROJECT_SYNC,
kwargs={"user_file_id": user_file.id, "tenant_id": tenant_id},
queue=OnyxCeleryQueues.USER_FILE_PROJECT_SYNC,
priority=OnyxCeleryPriority.HIGHEST,
)
logger.info(
f"Triggered project sync for user_file_id={user_file.id} with task_id={task.id}"
)
_trigger_user_file_project_sync(user_file.id, tenant_id)
return Response(status_code=204)
@@ -241,15 +268,7 @@ def link_user_file_to_project(
db_session.commit()
tenant_id = get_current_tenant_id()
task = client_app.send_task(
OnyxCeleryTask.PROCESS_SINGLE_USER_FILE_PROJECT_SYNC,
kwargs={"user_file_id": user_file.id, "tenant_id": tenant_id},
queue=OnyxCeleryQueues.USER_FILE_PROJECT_SYNC,
priority=OnyxCeleryPriority.HIGHEST,
)
logger.info(
f"Triggered project sync for user_file_id={user_file.id} with task_id={task.id}"
)
_trigger_user_file_project_sync(user_file.id, tenant_id)
return UserFileSnapshot.from_model(user_file)

View File

@@ -0,0 +1,47 @@
from fastapi import APIRouter
from fastapi import Depends
from sqlalchemy.orm import Session
from onyx.auth.users import current_admin_user
from onyx.db.code_interpreter import fetch_code_interpreter_server
from onyx.db.code_interpreter import update_code_interpreter_server_enabled
from onyx.db.engine.sql_engine import get_session
from onyx.db.models import User
from onyx.server.manage.code_interpreter.models import CodeInterpreterServer
from onyx.server.manage.code_interpreter.models import CodeInterpreterServerHealth
from onyx.tools.tool_implementations.python.code_interpreter_client import (
CodeInterpreterClient,
)
admin_router = APIRouter(prefix="/admin/code-interpreter")
@admin_router.get("/health")
def get_code_interpreter_health(
_: User = Depends(current_admin_user),
) -> CodeInterpreterServerHealth:
try:
client = CodeInterpreterClient()
return CodeInterpreterServerHealth(healthy=client.health())
except ValueError:
return CodeInterpreterServerHealth(healthy=False)
@admin_router.get("")
def get_code_interpreter(
_: User = Depends(current_admin_user), db_session: Session = Depends(get_session)
) -> CodeInterpreterServer:
ci_server = fetch_code_interpreter_server(db_session)
return CodeInterpreterServer(enabled=ci_server.server_enabled)
@admin_router.put("")
def update_code_interpreter(
update: CodeInterpreterServer,
_: User = Depends(current_admin_user),
db_session: Session = Depends(get_session),
) -> None:
update_code_interpreter_server_enabled(
db_session=db_session,
enabled=update.enabled,
)

View File

@@ -0,0 +1,9 @@
from pydantic import BaseModel
class CodeInterpreterServer(BaseModel):
enabled: bool
class CodeInterpreterServerHealth(BaseModel):
healthy: bool

View File

@@ -35,6 +35,18 @@ if TYPE_CHECKING:
pass
class EmailInviteStatus(str, Enum):
SENT = "SENT"
NOT_CONFIGURED = "NOT_CONFIGURED"
SEND_FAILED = "SEND_FAILED"
DISABLED = "DISABLED"
class BulkInviteResponse(BaseModel):
invited_count: int
email_invite_status: EmailInviteStatus
class VersionResponse(BaseModel):
backend_version: str

View File

@@ -36,6 +36,7 @@ from onyx.configs.app_configs import AUTH_BACKEND
from onyx.configs.app_configs import AUTH_TYPE
from onyx.configs.app_configs import AuthBackend
from onyx.configs.app_configs import DEV_MODE
from onyx.configs.app_configs import EMAIL_CONFIGURED
from onyx.configs.app_configs import ENABLE_EMAIL_INVITES
from onyx.configs.app_configs import NUM_FREE_TRIAL_USER_INVITES
from onyx.configs.app_configs import REDIS_AUTH_KEY_PREFIX
@@ -78,8 +79,10 @@ from onyx.server.documents.models import PaginatedReturn
from onyx.server.features.projects.models import UserFileSnapshot
from onyx.server.manage.models import AllUsersResponse
from onyx.server.manage.models import AutoScrollRequest
from onyx.server.manage.models import BulkInviteResponse
from onyx.server.manage.models import ChatBackgroundRequest
from onyx.server.manage.models import DefaultAppModeRequest
from onyx.server.manage.models import EmailInviteStatus
from onyx.server.manage.models import MemoryItem
from onyx.server.manage.models import PersonalizationUpdateRequest
from onyx.server.manage.models import TenantInfo
@@ -368,7 +371,7 @@ def bulk_invite_users(
emails: list[str] = Body(..., embed=True),
current_user: User = Depends(current_admin_user),
db_session: Session = Depends(get_session),
) -> int:
) -> BulkInviteResponse:
"""emails are string validated. If any email fails validation, no emails are
invited and an exception is raised."""
tenant_id = get_current_tenant_id()
@@ -427,34 +430,41 @@ def bulk_invite_users(
number_of_invited_users = write_invited_users(all_emails)
# send out email invitations only to new users (not already invited or existing)
if ENABLE_EMAIL_INVITES:
if not ENABLE_EMAIL_INVITES:
email_invite_status = EmailInviteStatus.DISABLED
elif not EMAIL_CONFIGURED:
email_invite_status = EmailInviteStatus.NOT_CONFIGURED
else:
try:
for email in emails_needing_seats:
send_user_email_invite(email, current_user, AUTH_TYPE)
email_invite_status = EmailInviteStatus.SENT
except Exception as e:
logger.error(f"Error sending email invite to invited users: {e}")
email_invite_status = EmailInviteStatus.SEND_FAILED
if not MULTI_TENANT or DEV_MODE:
return number_of_invited_users
if MULTI_TENANT and not DEV_MODE:
# for billing purposes, write to the control plane about the number of new users
try:
logger.info("Registering tenant users")
fetch_ee_implementation_or_noop(
"onyx.server.tenants.billing", "register_tenant_users", None
)(tenant_id, get_live_users_count(db_session))
except Exception as e:
logger.error(f"Failed to register tenant users: {str(e)}")
logger.info(
"Reverting changes: removing users from tenant and resetting invited users"
)
write_invited_users(initial_invited_users) # Reset to original state
fetch_ee_implementation_or_noop(
"onyx.server.tenants.user_mapping", "remove_users_from_tenant", None
)(new_invited_emails, tenant_id)
raise e
# for billing purposes, write to the control plane about the number of new users
try:
logger.info("Registering tenant users")
fetch_ee_implementation_or_noop(
"onyx.server.tenants.billing", "register_tenant_users", None
)(tenant_id, get_live_users_count(db_session))
return number_of_invited_users
except Exception as e:
logger.error(f"Failed to register tenant users: {str(e)}")
logger.info(
"Reverting changes: removing users from tenant and resetting invited users"
)
write_invited_users(initial_invited_users) # Reset to original state
fetch_ee_implementation_or_noop(
"onyx.server.tenants.user_mapping", "remove_users_from_tenant", None
)(new_invited_emails, tenant_id)
raise e
return BulkInviteResponse(
invited_count=number_of_invited_users,
email_invite_status=email_invite_status,
)
@router.patch("/manage/admin/remove-invited-user", tags=PUBLIC_API_TAGS)

View File

@@ -587,6 +587,7 @@ def handle_send_chat_message(
request.headers
),
mcp_headers=chat_message_req.mcp_headers,
additional_context=chat_message_req.additional_context,
external_state_container=state_container,
)
result = gather_stream_full(packets, state_container)
@@ -609,6 +610,7 @@ def handle_send_chat_message(
request.headers
),
mcp_headers=chat_message_req.mcp_headers,
additional_context=chat_message_req.additional_context,
external_state_container=state_container,
):
yield get_json_line(obj.model_dump())

View File

@@ -125,6 +125,11 @@ class SendMessageRequest(BaseModel):
# - No CitationInfo packets are emitted during streaming
include_citations: bool = True
# Additional context injected into the LLM call but NOT stored in the DB
# (not shown in chat history). Used e.g. by the Chrome extension to pass
# the current tab URL when "Read this tab" is enabled.
additional_context: str | None = None
@model_validator(mode="after")
def check_chat_session_id_or_info(self) -> "SendMessageRequest":
# If neither is provided, default to creating a new chat session using the

View File

@@ -1,5 +1,8 @@
import json
from collections.abc import Generator
from typing import Literal
from typing import TypedDict
from typing import Union
import requests
from pydantic import BaseModel
@@ -36,6 +39,39 @@ class ExecuteResponse(BaseModel):
files: list[WorkspaceFile]
class StreamOutputEvent(BaseModel):
"""SSE 'output' event: a chunk of stdout or stderr"""
stream: Literal["stdout", "stderr"]
data: str
class StreamResultEvent(BaseModel):
"""SSE 'result' event: final execution result"""
exit_code: int | None
timed_out: bool
duration_ms: int
files: list[WorkspaceFile]
class StreamErrorEvent(BaseModel):
"""SSE 'error' event: execution-level error"""
message: str
StreamEvent = Union[StreamOutputEvent, StreamResultEvent, StreamErrorEvent]
_SSE_EVENT_MAP: dict[
str, type[StreamOutputEvent | StreamResultEvent | StreamErrorEvent]
] = {
"output": StreamOutputEvent,
"result": StreamResultEvent,
"error": StreamErrorEvent,
}
class CodeInterpreterClient:
"""Client for Code Interpreter service"""
@@ -45,6 +81,34 @@ class CodeInterpreterClient:
self.base_url = base_url.rstrip("/")
self.session = requests.Session()
def _build_payload(
self,
code: str,
stdin: str | None,
timeout_ms: int,
files: list[FileInput] | None,
) -> dict:
payload: dict = {
"code": code,
"timeout_ms": timeout_ms,
}
if stdin is not None:
payload["stdin"] = stdin
if files:
payload["files"] = files
return payload
def health(self) -> bool:
"""Check if the Code Interpreter service is healthy"""
url = f"{self.base_url}/health"
try:
response = self.session.get(url, timeout=5)
response.raise_for_status()
return response.json().get("status") == "ok"
except Exception as e:
logger.warning(f"Exception caught when checking health, e={e}")
return False
def execute(
self,
code: str,
@@ -52,25 +116,110 @@ class CodeInterpreterClient:
timeout_ms: int = 30000,
files: list[FileInput] | None = None,
) -> ExecuteResponse:
"""Execute Python code"""
"""Execute Python code (batch)"""
url = f"{self.base_url}/v1/execute"
payload = {
"code": code,
"timeout_ms": timeout_ms,
}
if stdin is not None:
payload["stdin"] = stdin
if files:
payload["files"] = files
payload = self._build_payload(code, stdin, timeout_ms, files)
response = self.session.post(url, json=payload, timeout=timeout_ms / 1000 + 10)
response.raise_for_status()
return ExecuteResponse(**response.json())
def execute_streaming(
self,
code: str,
stdin: str | None = None,
timeout_ms: int = 30000,
files: list[FileInput] | None = None,
) -> Generator[StreamEvent, None, None]:
"""Execute Python code with streaming SSE output.
Yields StreamEvent objects (StreamOutputEvent, StreamResultEvent,
StreamErrorEvent) as execution progresses. Falls back to batch
execution if the streaming endpoint is not available (older
code-interpreter versions).
"""
url = f"{self.base_url}/v1/execute/stream"
payload = self._build_payload(code, stdin, timeout_ms, files)
response = self.session.post(
url,
json=payload,
stream=True,
timeout=timeout_ms / 1000 + 10,
)
if response.status_code == 404:
logger.info(
"Streaming endpoint not available, " "falling back to batch execution"
)
response.close()
yield from self._batch_as_stream(code, stdin, timeout_ms, files)
return
response.raise_for_status()
yield from self._parse_sse(response)
def _parse_sse(
self, response: requests.Response
) -> Generator[StreamEvent, None, None]:
"""Parse SSE streaming response into StreamEvent objects.
Expected format per event:
event: <type>
data: <json>
<blank line>
"""
event_type: str | None = None
data_lines: list[str] = []
for line in response.iter_lines(decode_unicode=True):
if line is None:
continue
if line == "":
# Blank line marks end of an SSE event
if event_type is not None and data_lines:
data = "\n".join(data_lines)
model_cls = _SSE_EVENT_MAP.get(event_type)
if model_cls is not None:
yield model_cls(**json.loads(data))
else:
logger.warning(f"Unknown SSE event type: {event_type}")
event_type = None
data_lines = []
elif line.startswith("event:"):
event_type = line[len("event:") :].strip()
elif line.startswith("data:"):
data_lines.append(line[len("data:") :].strip())
if event_type is not None or data_lines:
logger.warning(
f"SSE stream ended with incomplete event: "
f"event_type={event_type}, data_lines={data_lines}"
)
def _batch_as_stream(
self,
code: str,
stdin: str | None,
timeout_ms: int,
files: list[FileInput] | None,
) -> Generator[StreamEvent, None, None]:
"""Execute via batch endpoint and yield results as stream events."""
result = self.execute(code, stdin, timeout_ms, files)
if result.stdout:
yield StreamOutputEvent(stream="stdout", data=result.stdout)
if result.stderr:
yield StreamOutputEvent(stream="stderr", data=result.stderr)
yield StreamResultEvent(
exit_code=result.exit_code,
timed_out=result.timed_out,
duration_ms=result.duration_ms,
files=result.files,
)
def upload_file(self, file_content: bytes, filename: str) -> str:
"""Upload file to Code Interpreter and return file_id"""
url = f"{self.base_url}/v1/files"

View File

@@ -28,6 +28,15 @@ from onyx.tools.tool_implementations.python.code_interpreter_client import (
CodeInterpreterClient,
)
from onyx.tools.tool_implementations.python.code_interpreter_client import FileInput
from onyx.tools.tool_implementations.python.code_interpreter_client import (
StreamErrorEvent,
)
from onyx.tools.tool_implementations.python.code_interpreter_client import (
StreamOutputEvent,
)
from onyx.tools.tool_implementations.python.code_interpreter_client import (
StreamResultEvent,
)
from onyx.utils.logger import setup_logger
@@ -181,19 +190,50 @@ class PythonTool(Tool[PythonToolOverrideKwargs]):
try:
logger.debug(f"Executing code: {code}")
# Execute code with timeout
response = client.execute(
# Execute code with streaming (falls back to batch if unavailable)
stdout_parts: list[str] = []
stderr_parts: list[str] = []
result_event: StreamResultEvent | None = None
for event in client.execute_streaming(
code=code,
timeout_ms=CODE_INTERPRETER_DEFAULT_TIMEOUT_MS,
files=files_to_stage or None,
)
):
if isinstance(event, StreamOutputEvent):
if event.stream == "stdout":
stdout_parts.append(event.data)
else:
stderr_parts.append(event.data)
# Emit incremental delta to frontend
self.emitter.emit(
Packet(
placement=placement,
obj=PythonToolDelta(
stdout=event.data if event.stream == "stdout" else "",
stderr=event.data if event.stream == "stderr" else "",
),
)
)
elif isinstance(event, StreamResultEvent):
result_event = event
elif isinstance(event, StreamErrorEvent):
raise RuntimeError(f"Code interpreter error: {event.message}")
if result_event is None:
raise RuntimeError(
"Code interpreter stream ended without a result event"
)
full_stdout = "".join(stdout_parts)
full_stderr = "".join(stderr_parts)
# Truncate output for LLM consumption
truncated_stdout = _truncate_output(
response.stdout, CODE_INTERPRETER_MAX_OUTPUT_LENGTH, "stdout"
full_stdout, CODE_INTERPRETER_MAX_OUTPUT_LENGTH, "stdout"
)
truncated_stderr = _truncate_output(
response.stderr, CODE_INTERPRETER_MAX_OUTPUT_LENGTH, "stderr"
full_stderr, CODE_INTERPRETER_MAX_OUTPUT_LENGTH, "stderr"
)
# Handle generated files
@@ -202,7 +242,7 @@ class PythonTool(Tool[PythonToolOverrideKwargs]):
file_ids_to_cleanup: list[str] = []
file_store = get_default_file_store()
for workspace_file in response.files:
for workspace_file in result_event.files:
if workspace_file.kind != "file" or not workspace_file.file_id:
continue
@@ -258,26 +298,23 @@ class PythonTool(Tool[PythonToolOverrideKwargs]):
f"Failed to delete Code Interpreter staged file {file_mapping['file_id']}: {e}"
)
# Emit delta with stdout/stderr and generated files
self.emitter.emit(
Packet(
placement=placement,
obj=PythonToolDelta(
stdout=truncated_stdout,
stderr=truncated_stderr,
file_ids=generated_file_ids,
),
# Emit file_ids once files are processed
if generated_file_ids:
self.emitter.emit(
Packet(
placement=placement,
obj=PythonToolDelta(file_ids=generated_file_ids),
)
)
)
# Build result
result = LlmPythonExecutionResult(
stdout=truncated_stdout,
stderr=truncated_stderr,
exit_code=response.exit_code,
timed_out=response.timed_out,
exit_code=result_event.exit_code,
timed_out=result_event.timed_out,
generated_files=generated_files,
error=None if response.exit_code == 0 else truncated_stderr,
error=None if result_event.exit_code == 0 else truncated_stderr,
)
# Serialize result for LLM

View File

@@ -6,6 +6,8 @@ aioboto3==15.1.0
# via onyx
aiobotocore==2.24.0
# via aioboto3
aiofile==3.9.0
# via py-key-value-aio
aiofiles==25.1.0
# via
# aioboto3
@@ -40,8 +42,10 @@ anyio==4.11.0
# httpx
# mcp
# openai
# py-key-value-aio
# sse-starlette
# starlette
# watchfiles
argon2-cffi==23.1.0
# via pwdlib
argon2-cffi-bindings==25.1.0
@@ -74,9 +78,7 @@ backports-tarfile==1.2.0 ; python_full_version < '3.12'
bcrypt==4.3.0
# via pwdlib
beartype==0.22.6
# via
# py-key-value-aio
# py-key-value-shared
# via py-key-value-aio
beautifulsoup4==4.12.3
# via
# atlassian-python-api
@@ -110,6 +112,8 @@ cachetools==6.2.2
# via
# google-auth
# py-key-value-aio
caio==0.9.25
# via aiofile
celery==5.5.1
# via onyx
certifi==2025.11.12
@@ -170,7 +174,6 @@ cloudpickle==3.1.2
# via
# dask
# distributed
# pydocket
cobble==0.1.4
# via mammoth
cohere==5.6.1
@@ -218,8 +221,6 @@ deprecated==1.3.1
# pygithub
discord-py==2.4.0
# via onyx
diskcache==5.6.3
# via py-key-value-aio
distributed==2026.1.1
# via onyx
distro==1.9.0
@@ -256,8 +257,6 @@ exceptiongroup==1.3.0
# via
# braintrust
# fastmcp
fakeredis==2.33.0
# via pydocket
fastapi==0.128.0
# via
# fastapi-limiter
@@ -273,7 +272,7 @@ fastapi-users-db-sqlalchemy==7.0.0
# via onyx
fastavro==1.12.1
# via cohere
fastmcp==2.14.2
fastmcp==3.0.2
# via onyx
fastuuid==0.14.0
# via litellm
@@ -478,7 +477,9 @@ jsonpatch==1.33
jsonpointer==3.0.0
# via jsonpatch
jsonref==1.1.0
# via onyx
# via
# fastmcp
# onyx
jsonschema==4.25.1
# via
# litellm
@@ -513,8 +514,6 @@ locket==1.0.0
# via
# distributed
# partd
lupa==2.6
# via fakeredis
lxml==5.3.0
# via
# htmldate
@@ -556,7 +555,7 @@ marshmallow==3.26.2
# via dataclasses-json
matrix-client==0.3.2
# via zulip
mcp==1.25.0
mcp==1.26.0
# via
# claude-agent-sdk
# fastmcp
@@ -613,7 +612,7 @@ oauthlib==3.2.2
# kubernetes
# onyx
# requests-oauthlib
office365-rest-python-client==2.5.9
office365-rest-python-client==2.6.2
# via onyx
olefile==0.47
# via
@@ -642,22 +641,16 @@ opensearch-py==3.0.0
opentelemetry-api==1.39.1
# via
# ddtrace
# fastmcp
# langfuse
# openinference-instrumentation
# opentelemetry-exporter-otlp-proto-http
# opentelemetry-exporter-prometheus
# opentelemetry-instrumentation
# opentelemetry-sdk
# opentelemetry-semantic-conventions
# pydocket
opentelemetry-exporter-otlp-proto-common==1.39.1
# via opentelemetry-exporter-otlp-proto-http
opentelemetry-exporter-otlp-proto-http==1.39.1
# via langfuse
opentelemetry-exporter-prometheus==0.60b1
# via pydocket
opentelemetry-instrumentation==0.60b1
# via pydocket
opentelemetry-proto==1.39.1
# via
# onyx
@@ -668,17 +661,15 @@ opentelemetry-sdk==1.39.1
# langfuse
# openinference-instrumentation
# opentelemetry-exporter-otlp-proto-http
# opentelemetry-exporter-prometheus
opentelemetry-semantic-conventions==0.60b1
# via
# opentelemetry-instrumentation
# opentelemetry-sdk
# via opentelemetry-sdk
orjson==3.11.4 ; platform_python_implementation != 'PyPy'
# via langsmith
packaging==24.2
# via
# dask
# distributed
# fastmcp
# google-cloud-aiplatform
# google-cloud-bigquery
# huggingface-hub
@@ -689,7 +680,6 @@ packaging==24.2
# langsmith
# marshmallow
# onnxruntime
# opentelemetry-instrumentation
# pytest
# pywikibot
pandas==2.3.3
@@ -702,8 +692,6 @@ passlib==1.7.4
# via onyx
pathable==0.4.4
# via jsonschema-path
pathvalidate==3.3.1
# via py-key-value-aio
pdfminer-six==20251107
# via markitdown
pillow==12.1.1
@@ -723,9 +711,7 @@ ply==3.11
prometheus-client==0.23.1
# via
# onyx
# opentelemetry-exporter-prometheus
# prometheus-fastapi-instrumentator
# pydocket
prometheus-fastapi-instrumentator==7.1.0
# via onyx
prompt-toolkit==3.0.52
@@ -764,12 +750,8 @@ pwdlib==0.3.0
# via fastapi-users
py==1.11.0
# via retry
py-key-value-aio==0.3.0
# via
# fastmcp
# pydocket
py-key-value-shared==0.3.0
# via py-key-value-aio
py-key-value-aio==0.4.4
# via fastmcp
pyairtable==3.0.1
# via onyx
pyasn1==0.6.2
@@ -806,8 +788,6 @@ pydantic-core==2.33.2
# via pydantic
pydantic-settings==2.12.0
# via mcp
pydocket==0.16.3
# via fastmcp
pyee==13.0.0
# via playwright
pygithub==2.5.0
@@ -879,8 +859,6 @@ python-http-client==3.3.7
# via sendgrid
python-iso639==2025.11.16
# via unstructured
python-json-logger==4.0.0
# via pydocket
python-magic==0.4.27
# via unstructured
python-multipart==0.0.22
@@ -918,6 +896,7 @@ pyyaml==6.0.3
# via
# dask
# distributed
# fastmcp
# huggingface-hub
# jsonschema-path
# kubernetes
@@ -928,11 +907,8 @@ rapidfuzz==3.13.0
# unstructured
redis==5.0.8
# via
# fakeredis
# fastapi-limiter
# onyx
# py-key-value-aio
# pydocket
referencing==0.36.2
# via
# jsonschema
@@ -1007,7 +983,6 @@ rich==14.2.0
# via
# cyclopts
# fastmcp
# pydocket
# rich-rst
# typer
rich-rst==1.3.2
@@ -1056,9 +1031,7 @@ sniffio==1.3.1
# anyio
# openai
sortedcontainers==2.4.0
# via
# distributed
# fakeredis
# via distributed
soupsieve==2.8
# via beautifulsoup4
sqlalchemy==2.0.15
@@ -1124,9 +1097,7 @@ tqdm==4.67.1
trafilatura==1.12.2
# via onyx
typer==0.20.0
# via
# mcp
# pydocket
# via mcp
types-awscrt==0.28.4
# via botocore-stubs
types-openpyxl==3.0.4.7
@@ -1162,11 +1133,10 @@ typing-extensions==4.15.0
# opentelemetry-exporter-otlp-proto-http
# opentelemetry-sdk
# opentelemetry-semantic-conventions
# py-key-value-shared
# py-key-value-aio
# pyairtable
# pydantic
# pydantic-core
# pydocket
# pyee
# pygithub
# python-docx
@@ -1234,6 +1204,8 @@ vine==5.1.0
# kombu
voyageai==0.2.3
# via onyx
watchfiles==1.1.1
# via fastmcp
wcwidth==0.2.14
# via prompt-toolkit
webencodings==0.5.1
@@ -1254,7 +1226,6 @@ wrapt==1.17.3
# deprecated
# langfuse
# openinference-instrumentation
# opentelemetry-instrumentation
# unstructured
xlrd==2.0.2
# via markitdown

View File

@@ -288,7 +288,7 @@ matplotlib-inline==0.2.1
# via
# ipykernel
# ipython
mcp==1.25.0
mcp==1.26.0
# via claude-agent-sdk
multidict==6.7.0
# via
@@ -317,7 +317,7 @@ oauthlib==3.2.2
# via
# kubernetes
# requests-oauthlib
onyx-devtools==0.6.0
onyx-devtools==0.6.1
# via onyx
openai==2.14.0
# via

View File

@@ -211,7 +211,7 @@ litellm==1.81.6
# via onyx
markupsafe==3.0.3
# via jinja2
mcp==1.25.0
mcp==1.26.0
# via claude-agent-sdk
monotonic==1.6
# via posthog

View File

@@ -246,7 +246,7 @@ litellm==1.81.6
# via onyx
markupsafe==3.0.3
# via jinja2
mcp==1.25.0
mcp==1.26.0
# via claude-agent-sdk
mpmath==1.3.0
# via sympy

View File

@@ -3,8 +3,8 @@ set -e
cleanup() {
echo "Error occurred. Cleaning up..."
docker stop onyx_postgres onyx_vespa onyx_redis onyx_minio 2>/dev/null || true
docker rm onyx_postgres onyx_vespa onyx_redis onyx_minio 2>/dev/null || true
docker stop onyx_postgres onyx_vespa onyx_redis onyx_minio onyx_code_interpreter 2>/dev/null || true
docker rm onyx_postgres onyx_vespa onyx_redis onyx_minio onyx_code_interpreter 2>/dev/null || true
}
# Trap errors and output a message, then cleanup
@@ -20,8 +20,8 @@ MINIO_VOLUME=${4:-""} # Default is empty if not provided
# Stop and remove the existing containers
echo "Stopping and removing existing containers..."
docker stop onyx_postgres onyx_vespa onyx_redis onyx_minio 2>/dev/null || true
docker rm onyx_postgres onyx_vespa onyx_redis onyx_minio 2>/dev/null || true
docker stop onyx_postgres onyx_vespa onyx_redis onyx_minio onyx_code_interpreter 2>/dev/null || true
docker rm onyx_postgres onyx_vespa onyx_redis onyx_minio onyx_code_interpreter 2>/dev/null || true
# Start the PostgreSQL container with optional volume
echo "Starting PostgreSQL container..."
@@ -55,6 +55,10 @@ else
docker run --detach --name onyx_minio --publish 9004:9000 --publish 9005:9001 -e MINIO_ROOT_USER=minioadmin -e MINIO_ROOT_PASSWORD=minioadmin minio/minio server /data --console-address ":9001"
fi
# Start the Code Interpreter container
echo "Starting Code Interpreter container..."
docker run --detach --name onyx_code_interpreter --publish 8000:8000 --user root -v /var/run/docker.sock:/var/run/docker.sock onyxdotapp/code-interpreter:latest bash ./entrypoint.sh code-interpreter-api
# Ensure alembic runs in the correct directory (backend/)
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
PARENT_DIR="$(dirname "$SCRIPT_DIR")"

View File

@@ -9,6 +9,7 @@ from collections.abc import AsyncGenerator
from collections.abc import Generator
from contextlib import asynccontextmanager
from unittest.mock import MagicMock
from unittest.mock import patch
import pytest
from dotenv import load_dotenv
@@ -46,11 +47,15 @@ def mock_current_admin_user() -> MagicMock:
@pytest.fixture(scope="function")
def client() -> Generator[TestClient, None, None]:
# Initialize TestClient with the FastAPI app using a no-op test lifespan
# Initialize TestClient with the FastAPI app using a no-op test lifespan.
# Patch out prometheus metrics setup to avoid "Duplicated timeseries in
# CollectorRegistry" errors when multiple tests each create a new app
# (prometheus registers metrics globally and rejects duplicate names).
get_app = fetch_versioned_implementation(
module="onyx.main", attribute="get_application"
)
app: FastAPI = get_app(lifespan_override=test_lifespan)
with patch("onyx.main.setup_prometheus_metrics"):
app: FastAPI = get_app(lifespan_override=test_lifespan)
# Override the database session dependency with a mock
# (these tests don't actually need DB access)

View File

@@ -990,6 +990,27 @@ class _MockCIHandler(BaseHTTPRequestHandler):
self._respond_json(
200, {"file_id": f"mock-ci-file-{self.server._file_counter}"}
)
elif self.path == "/v1/execute/stream":
if self.server.streaming_enabled:
self._respond_sse(
[
(
"output",
{"stream": "stdout", "data": "mock output\n"},
),
(
"result",
{
"exit_code": 0,
"timed_out": False,
"duration_ms": 50,
"files": [],
},
),
]
)
else:
self._respond_json(404, {"error": "not found"})
elif self.path == "/v1/execute":
self._respond_json(
200,
@@ -1027,6 +1048,17 @@ class _MockCIHandler(BaseHTTPRequestHandler):
self.end_headers()
self.wfile.write(payload)
def _respond_sse(self, events: list[tuple[str, dict[str, Any]]]) -> None:
frames = []
for event_type, data in events:
frames.append(f"event: {event_type}\ndata: {json.dumps(data)}\n\n")
payload = "".join(frames).encode()
self.send_response(200)
self.send_header("Content-Type", "text/event-stream")
self.send_header("Content-Length", str(len(payload)))
self.end_headers()
self.wfile.write(payload)
def log_message(self, format: str, *args: Any) -> None: # noqa: A002
pass
@@ -1038,6 +1070,7 @@ class MockCodeInterpreterServer(HTTPServer):
super().__init__(("localhost", 0), _MockCIHandler)
self.captured_requests: list[CapturedRequest] = []
self._file_counter = 0
self.streaming_enabled: bool = True
@property
def url(self) -> str:
@@ -1168,17 +1201,19 @@ def test_code_interpreter_receives_chat_files(
finally:
ci_mod.CodeInterpreterClient.__init__.__defaults__ = original_defaults
# Verify: file uploaded, code executed, staged file cleaned up
# Verify: file uploaded, code executed via streaming, staged file cleaned up
assert len(mock_ci_server.get_requests(method="POST", path="/v1/files")) == 1
assert len(mock_ci_server.get_requests(method="POST", path="/v1/execute")) == 1
assert (
len(mock_ci_server.get_requests(method="POST", path="/v1/execute/stream")) == 1
)
delete_requests = mock_ci_server.get_requests(method="DELETE")
assert len(delete_requests) == 1
assert delete_requests[0].path.startswith("/v1/files/")
execute_body = mock_ci_server.get_requests(method="POST", path="/v1/execute")[
0
].json_body()
execute_body = mock_ci_server.get_requests(
method="POST", path="/v1/execute/stream"
)[0].json_body()
assert execute_body["code"] == code
assert len(execute_body["files"]) == 1
assert execute_body["files"][0]["path"] == "data.csv"
@@ -1284,7 +1319,9 @@ def test_code_interpreter_replay_packets_include_code_and_output(
db_session=db_session,
)
assert len(mock_ci_server.get_requests(method="POST", path="/v1/execute")) == 1
assert (
len(mock_ci_server.get_requests(method="POST", path="/v1/execute/stream")) == 1
)
# The response contains `packets` — a list of packet-lists, one per
# assistant message. We should have exactly one assistant message.
@@ -1313,3 +1350,76 @@ def test_code_interpreter_replay_packets_include_code_and_output(
delta_obj = delta_packets[0].obj
assert isinstance(delta_obj, PythonToolDelta)
assert "mock output" in delta_obj.stdout
def test_code_interpreter_streaming_fallback_to_batch(
db_session: Session,
mock_ci_server: MockCodeInterpreterServer,
_attach_python_tool_to_default_persona: None,
initialize_file_store: None, # noqa: ARG001
) -> None:
"""When the streaming endpoint is not available (older code-interpreter),
execute_streaming should fall back to the batch /v1/execute endpoint."""
mock_ci_server.captured_requests.clear()
mock_ci_server._file_counter = 0
mock_ci_server.streaming_enabled = False
mock_url = mock_ci_server.url
user = create_test_user(db_session, "ci_fallback_test")
chat_session = create_chat_session(db_session=db_session, user=user)
code = 'print("fallback test")'
msg_req = SendMessageRequest(
message="Print fallback test",
chat_session_id=chat_session.id,
stream=True,
)
original_defaults = ci_mod.CodeInterpreterClient.__init__.__defaults__
with (
use_mock_llm() as mock_llm,
patch(
"onyx.tools.tool_implementations.python.python_tool.CODE_INTERPRETER_BASE_URL",
mock_url,
),
patch(
"onyx.tools.tool_implementations.python.code_interpreter_client.CODE_INTERPRETER_BASE_URL",
mock_url,
),
):
mock_llm.add_response(
LLMToolCallResponse(
tool_name="python",
tool_call_id="call_fallback",
tool_call_argument_tokens=[json.dumps({"code": code})],
)
)
mock_llm.forward_till_end()
ci_mod.CodeInterpreterClient.__init__.__defaults__ = (mock_url,)
try:
packets = list(
handle_stream_message_objects(
new_msg_req=msg_req, user=user, db_session=db_session
)
)
finally:
ci_mod.CodeInterpreterClient.__init__.__defaults__ = original_defaults
mock_ci_server.streaming_enabled = True
# Streaming was attempted first (returned 404), then fell back to batch
assert (
len(mock_ci_server.get_requests(method="POST", path="/v1/execute/stream")) == 1
)
assert len(mock_ci_server.get_requests(method="POST", path="/v1/execute")) == 1
# Verify output still made it through
delta_packets = [
p
for p in packets
if isinstance(p, Packet) and isinstance(p.obj, PythonToolDelta)
]
assert len(delta_packets) >= 1
first_delta = delta_packets[0].obj
assert isinstance(first_delta, PythonToolDelta)
assert "mock output" in first_delta.stdout

View File

@@ -38,5 +38,5 @@ COPY --from=openapi-client /local/onyx_openapi_client /app/generated/onyx_openap
ENV PYTHONPATH=/app
ENTRYPOINT ["pytest", "-s"]
ENTRYPOINT ["pytest", "-s", "-rs"]
CMD ["/app/tests/integration", "--ignore=/app/tests/integration/multitenant_tests"]

View File

@@ -1,3 +1,4 @@
import time
from datetime import datetime
from urllib.parse import urlencode
from uuid import UUID
@@ -8,8 +9,10 @@ from requests.models import CaseInsensitiveDict
from ee.onyx.server.query_history.models import ChatSessionMinimal
from ee.onyx.server.query_history.models import ChatSessionSnapshot
from onyx.configs.constants import QAFeedbackType
from onyx.db.enums import TaskStatus
from onyx.server.documents.models import PaginatedReturn
from tests.integration.common_utils.constants import API_SERVER_URL
from tests.integration.common_utils.constants import MAX_DELAY
from tests.integration.common_utils.test_models import DATestUser
@@ -69,9 +72,42 @@ class QueryHistoryManager:
if end_time:
query_params["end"] = end_time.isoformat()
response = requests.get(
url=f"{API_SERVER_URL}/admin/query-history-csv?{urlencode(query_params, doseq=True)}",
start_response = requests.post(
url=f"{API_SERVER_URL}/admin/query-history/start-export?{urlencode(query_params, doseq=True)}",
headers=user_performing_action.headers,
)
response.raise_for_status()
return response.headers, response.content.decode()
start_response.raise_for_status()
request_id = start_response.json()["request_id"]
deadline = time.time() + MAX_DELAY
while time.time() < deadline:
status_response = requests.get(
url=f"{API_SERVER_URL}/admin/query-history/export-status",
params={"request_id": request_id},
headers=user_performing_action.headers,
)
status_response.raise_for_status()
status = status_response.json()["status"]
if status == TaskStatus.SUCCESS:
break
if status == TaskStatus.FAILURE:
raise RuntimeError("Query history export task failed")
time.sleep(2)
else:
raise TimeoutError(
f"Query history export not completed within {MAX_DELAY} seconds"
)
download_response = requests.get(
url=f"{API_SERVER_URL}/admin/query-history/download",
params={"request_id": request_id},
headers=user_performing_action.headers,
)
download_response.raise_for_status()
if not download_response.content:
raise RuntimeError(
"Query history CSV download returned zero-length content"
)
return download_response.headers, download_response.content.decode()

View File

@@ -6,16 +6,26 @@ import pytest
from onyx.connectors.slack.models import ChannelType
from tests.integration.connector_job_tests.slack.slack_api_utils import SlackManager
# from tests.load_env_vars import load_env_vars
# load_env_vars()
SLACK_ADMIN_EMAIL = os.environ.get("SLACK_ADMIN_EMAIL", "evan@onyx.app")
SLACK_TEST_USER_1_EMAIL = os.environ.get("SLACK_TEST_USER_1_EMAIL", "evan+1@onyx.app")
SLACK_TEST_USER_2_EMAIL = os.environ.get("SLACK_TEST_USER_2_EMAIL", "justin@onyx.app")
@pytest.fixture()
def slack_test_setup() -> Generator[tuple[ChannelType, ChannelType], None, None]:
slack_client = SlackManager.get_slack_client(os.environ["SLACK_BOT_TOKEN"])
def _provision_slack_channels(
bot_token: str,
) -> Generator[tuple[ChannelType, ChannelType], None, None]:
slack_client = SlackManager.get_slack_client(bot_token)
auth_info = slack_client.auth_test()
print(f"\nSlack workspace: {auth_info.get('team')} ({auth_info.get('url')})")
user_map = SlackManager.build_slack_user_email_id_map(slack_client)
admin_user_id = user_map["admin@example.com"]
if SLACK_ADMIN_EMAIL not in user_map:
raise KeyError(
f"'{SLACK_ADMIN_EMAIL}' not found in Slack workspace. "
f"Available emails: {sorted(user_map.keys())}"
)
admin_user_id = user_map[SLACK_ADMIN_EMAIL]
(
public_channel,
@@ -27,5 +37,16 @@ def slack_test_setup() -> Generator[tuple[ChannelType, ChannelType], None, None]
yield public_channel, private_channel
# This part will always run after the test, even if it fails
SlackManager.cleanup_after_test(slack_client=slack_client, test_id=run_id)
@pytest.fixture()
def slack_test_setup() -> Generator[tuple[ChannelType, ChannelType], None, None]:
yield from _provision_slack_channels(os.environ["SLACK_BOT_TOKEN"])
@pytest.fixture()
def slack_perm_sync_test_setup() -> (
Generator[tuple[ChannelType, ChannelType], None, None]
):
yield from _provision_slack_channels(os.environ["SLACK_BOT_TOKEN_TEST_SPACE"])

View File

@@ -22,6 +22,9 @@ from tests.integration.common_utils.test_models import DATestConnector
from tests.integration.common_utils.test_models import DATestCredential
from tests.integration.common_utils.test_models import DATestUser
from tests.integration.common_utils.vespa import vespa_fixture
from tests.integration.connector_job_tests.slack.conftest import SLACK_ADMIN_EMAIL
from tests.integration.connector_job_tests.slack.conftest import SLACK_TEST_USER_1_EMAIL
from tests.integration.connector_job_tests.slack.conftest import SLACK_TEST_USER_2_EMAIL
from tests.integration.connector_job_tests.slack.slack_api_utils import SlackManager
@@ -34,26 +37,24 @@ from tests.integration.connector_job_tests.slack.slack_api_utils import SlackMan
def test_slack_permission_sync(
reset: None, # noqa: ARG001
vespa_client: vespa_fixture, # noqa: ARG001
slack_test_setup: tuple[ChannelType, ChannelType],
slack_perm_sync_test_setup: tuple[ChannelType, ChannelType],
) -> None:
public_channel, private_channel = slack_test_setup
public_channel, private_channel = slack_perm_sync_test_setup
# Creating an admin user (first user created is automatically an admin)
admin_user: DATestUser = UserManager.create(
email="admin@example.com",
email=SLACK_ADMIN_EMAIL,
)
# Creating a non-admin user
test_user_1: DATestUser = UserManager.create(
email="test_user_1@example.com",
email=SLACK_TEST_USER_1_EMAIL,
)
# Creating a non-admin user
test_user_2: DATestUser = UserManager.create(
email="test_user_2@example.com",
email=SLACK_TEST_USER_2_EMAIL,
)
slack_client = SlackManager.get_slack_client(os.environ["SLACK_BOT_TOKEN"])
bot_token = os.environ["SLACK_BOT_TOKEN_TEST_SPACE"]
slack_client = SlackManager.get_slack_client(bot_token)
email_id_map = SlackManager.build_slack_user_email_id_map(slack_client)
admin_user_id = email_id_map[admin_user.email]
@@ -63,7 +64,7 @@ def test_slack_permission_sync(
credential: DATestCredential = CredentialManager.create(
source=DocumentSource.SLACK,
credential_json={
"slack_bot_token": os.environ["SLACK_BOT_TOKEN"],
"slack_bot_token": bot_token,
},
user_performing_action=admin_user,
)
@@ -73,6 +74,7 @@ def test_slack_permission_sync(
source=DocumentSource.SLACK,
connector_specific_config={
"channels": [public_channel["name"], private_channel["name"]],
"include_bot_messages": True,
},
access_type=AccessType.SYNC,
groups=[],
@@ -102,14 +104,11 @@ def test_slack_permission_sync(
public_message = "Steve's favorite number is 809752"
private_message = "Sara's favorite number is 346794"
# Add messages to channels
print(f"\n Adding public message to channel: {public_message}")
SlackManager.add_message_to_channel(
slack_client=slack_client,
channel=public_channel,
message=public_message,
)
print(f"\n Adding private message to channel: {private_message}")
SlackManager.add_message_to_channel(
slack_client=slack_client,
channel=private_channel,
@@ -127,7 +126,9 @@ def test_slack_permission_sync(
user_performing_action=admin_user,
)
# Run permission sync
# Run permission sync. Since initial_index_should_sync=True for Slack,
# permissions were already set during indexing above — the explicit sync
# should find no changes to apply.
CCPairManager.sync(
cc_pair=cc_pair,
user_performing_action=admin_user,
@@ -135,59 +136,38 @@ def test_slack_permission_sync(
CCPairManager.wait_for_sync(
cc_pair=cc_pair,
after=before,
number_of_updated_docs=2,
number_of_updated_docs=0,
user_performing_action=admin_user,
should_wait_for_group_sync=False,
should_wait_for_vespa_sync=False,
)
# Search as admin with access to both channels
print("\nSearching as admin user")
onyx_doc_message_strings = DocumentSearchManager.search_documents(
# Verify admin can see messages from both channels
admin_docs = DocumentSearchManager.search_documents(
query="favorite number",
user_performing_action=admin_user,
)
print(
"\n documents retrieved by admin user: ",
onyx_doc_message_strings,
)
assert public_message in admin_docs
assert private_message in admin_docs
# Ensure admin user can see messages from both channels
assert public_message in onyx_doc_message_strings
assert private_message in onyx_doc_message_strings
# Search as test_user_2 with access to only the public channel
print("\n Searching as test_user_2")
onyx_doc_message_strings = DocumentSearchManager.search_documents(
# Verify test_user_2 can only see public channel messages
user_2_docs = DocumentSearchManager.search_documents(
query="favorite number",
user_performing_action=test_user_2,
)
print(
"\n documents retrieved by test_user_2: ",
onyx_doc_message_strings,
)
assert public_message in user_2_docs
assert private_message not in user_2_docs
# Ensure test_user_2 can only see messages from the public channel
assert public_message in onyx_doc_message_strings
assert private_message not in onyx_doc_message_strings
# Search as test_user_1 with access to both channels
print("\n Searching as test_user_1")
onyx_doc_message_strings = DocumentSearchManager.search_documents(
# Verify test_user_1 can see both channels (member of private channel)
user_1_docs = DocumentSearchManager.search_documents(
query="favorite number",
user_performing_action=test_user_1,
)
print(
"\n documents retrieved by test_user_1 before being removed from private channel: ",
onyx_doc_message_strings,
)
assert public_message in user_1_docs
assert private_message in user_1_docs
# Ensure test_user_1 can see messages from both channels
assert public_message in onyx_doc_message_strings
assert private_message in onyx_doc_message_strings
# ----------------------MAKE THE CHANGES--------------------------
print("\n Removing test_user_1 from the private channel")
before = datetime.now(timezone.utc)
# Remove test_user_1 from the private channel
before = datetime.now(timezone.utc)
desired_channel_members = [admin_user]
SlackManager.set_channel_members(
slack_client=slack_client,
@@ -206,24 +186,16 @@ def test_slack_permission_sync(
after=before,
number_of_updated_docs=1,
user_performing_action=admin_user,
should_wait_for_group_sync=False,
)
# ----------------------------VERIFY THE CHANGES---------------------------
# Ensure test_user_1 can no longer see messages from the private channel
# Search as test_user_1 with access to only the public channel
onyx_doc_message_strings = DocumentSearchManager.search_documents(
# Verify test_user_1 can no longer see private channel after removal
user_1_docs = DocumentSearchManager.search_documents(
query="favorite number",
user_performing_action=test_user_1,
)
print(
"\n documents retrieved by test_user_1 after being removed from private channel: ",
onyx_doc_message_strings,
)
# Ensure test_user_1 can only see messages from the public channel
assert public_message in onyx_doc_message_strings
assert private_message not in onyx_doc_message_strings
assert public_message in user_1_docs
assert private_message not in user_1_docs
# NOTE(rkuo): it isn't yet clear if the reason these were previously xfail'd
@@ -235,21 +207,19 @@ def test_slack_permission_sync(
def test_slack_group_permission_sync(
reset: None, # noqa: ARG001
vespa_client: vespa_fixture, # noqa: ARG001
slack_test_setup: tuple[ChannelType, ChannelType],
slack_perm_sync_test_setup: tuple[ChannelType, ChannelType],
) -> None:
"""
This test ensures that permission sync overrides onyx group access.
"""
public_channel, private_channel = slack_test_setup
public_channel, private_channel = slack_perm_sync_test_setup
# Creating an admin user (first user created is automatically an admin)
admin_user: DATestUser = UserManager.create(
email="admin@example.com",
email=SLACK_ADMIN_EMAIL,
)
# Creating a non-admin user
test_user_1: DATestUser = UserManager.create(
email="test_user_1@example.com",
email=SLACK_TEST_USER_1_EMAIL,
)
# Create a user group and adding the non-admin user to it
@@ -264,7 +234,8 @@ def test_slack_group_permission_sync(
user_performing_action=admin_user,
)
slack_client = SlackManager.get_slack_client(os.environ["SLACK_BOT_TOKEN"])
bot_token = os.environ["SLACK_BOT_TOKEN_TEST_SPACE"]
slack_client = SlackManager.get_slack_client(bot_token)
email_id_map = SlackManager.build_slack_user_email_id_map(slack_client)
admin_user_id = email_id_map[admin_user.email]
@@ -282,7 +253,7 @@ def test_slack_group_permission_sync(
credential = CredentialManager.create(
source=DocumentSource.SLACK,
credential_json={
"slack_bot_token": os.environ["SLACK_BOT_TOKEN"],
"slack_bot_token": bot_token,
},
user_performing_action=admin_user,
)
@@ -294,6 +265,7 @@ def test_slack_group_permission_sync(
source=DocumentSource.SLACK,
connector_specific_config={
"channels": [private_channel["name"]],
"include_bot_messages": True,
},
access_type=AccessType.SYNC,
groups=[user_group.id],
@@ -326,7 +298,8 @@ def test_slack_group_permission_sync(
user_performing_action=admin_user,
)
# Run permission sync
# Run permission sync. Since initial_index_should_sync=True for Slack,
# permissions were already set during indexing — no changes expected.
CCPairManager.sync(
cc_pair=cc_pair,
user_performing_action=admin_user,
@@ -334,8 +307,10 @@ def test_slack_group_permission_sync(
CCPairManager.wait_for_sync(
cc_pair=cc_pair,
after=before,
number_of_updated_docs=1,
number_of_updated_docs=0,
user_performing_action=admin_user,
should_wait_for_group_sync=False,
should_wait_for_vespa_sync=False,
)
# Verify admin can see the message

View File

@@ -5,22 +5,17 @@ from fastapi import FastAPI
from fastapi.responses import PlainTextResponse
from fastmcp import FastMCP
from fastmcp.server.auth import StaticTokenVerifier
from fastmcp.server.server import FunctionTool
def make_many_tools(mcp: FastMCP) -> list[FunctionTool]:
def make_tool(i: int) -> FunctionTool:
def make_many_tools(mcp: FastMCP) -> None:
def make_tool(i: int) -> None:
@mcp.tool(name=f"tool_{i}", description=f"Get secret value {i}")
def tool_name(name: str) -> str: # noqa: ARG001
"""Get secret value."""
return f"Secret value {200 - i}!"
return tool_name
tools = []
for i in range(100):
tools.append(make_tool(i))
return tools
make_tool(i)
if __name__ == "__main__":

View File

@@ -28,7 +28,6 @@ from fastmcp import FastMCP
from fastmcp.server.auth import AccessToken
from fastmcp.server.auth import TokenVerifier
from fastmcp.server.dependencies import get_access_token
from fastmcp.server.server import FunctionTool
# Google's tokeninfo endpoint for validating access tokens
GOOGLE_TOKENINFO_URL = "https://oauth2.googleapis.com/tokeninfo"
@@ -148,24 +147,19 @@ class GoogleOAuthTokenVerifier(TokenVerifier):
await self._http_client.aclose()
def make_tools(mcp: FastMCP) -> list[FunctionTool]:
def make_tools(mcp: FastMCP) -> None:
"""Create test tools for the MCP server."""
tools: list[FunctionTool] = []
@mcp.tool(name="echo", description="Echo back the input message")
def echo(message: str) -> str:
"""Echo the message back to the caller."""
return f"You said: {message}"
tools.append(echo)
@mcp.tool(name="get_secret", description="Get a secret value (requires auth)")
def get_secret(secret_name: str) -> str:
"""Get a secret value. This proves the token was validated."""
return f"Secret value for '{secret_name}': super-secret-value-12345"
tools.append(get_secret)
@mcp.tool(name="whoami", description="Get information about the authenticated user")
async def whoami() -> dict[str, Any]:
"""Get information about the authenticated user from their Google token."""
@@ -182,9 +176,6 @@ def make_tools(mcp: FastMCP) -> list[FunctionTool]:
"access_type": tok.claims.get("access_type"),
}
tools.append(whoami)
# Add some numbered tools for testing tool discovery
for i in range(5):
@mcp.tool(name=f"oauth_tool_{i}", description=f"Test tool number {i}")
@@ -192,10 +183,6 @@ def make_tools(mcp: FastMCP) -> list[FunctionTool]:
"""A numbered test tool."""
return f"Tool {_i} says hello to {name}!"
tools.append(numbered_tool)
return tools
if __name__ == "__main__":
port = int(sys.argv[1] if len(sys.argv) > 1 else "8006")

View File

@@ -2,7 +2,6 @@ import os
import sys
from fastmcp import FastMCP
from fastmcp.server.server import FunctionTool
mcp = FastMCP("My HTTP MCP")
@@ -13,19 +12,15 @@ def hello(name: str) -> str:
return f"Hello, {name}!"
def make_many_tools() -> list[FunctionTool]:
def make_tool(i: int) -> FunctionTool:
def make_many_tools() -> None:
def make_tool(i: int) -> None:
@mcp.tool(name=f"tool_{i}", description=f"Get secret value {i}")
def tool_name(name: str) -> str: # noqa: ARG001
"""Get secret value."""
return f"Secret value {100 - i}!"
return tool_name
tools = []
for i in range(100):
tools.append(make_tool(i))
return tools
make_tool(i)
if __name__ == "__main__":

View File

@@ -15,7 +15,6 @@ from fastapi.responses import Response
from fastmcp import FastMCP
from fastmcp.server.auth.providers.jwt import JWTVerifier
from fastmcp.server.dependencies import get_access_token
from fastmcp.server.server import FunctionTool
from starlette.middleware.base import BaseHTTPMiddleware
# uncomment for debug logs
@@ -37,18 +36,15 @@ Enable authorization code and store the client id and secret.
"""
def make_many_tools(mcp: FastMCP) -> list[FunctionTool]:
def make_tool(i: int) -> FunctionTool:
def make_many_tools(mcp: FastMCP) -> None:
def make_tool(i: int) -> None:
@mcp.tool(name=f"tool_{i}", description=f"Get secret value {i}")
def tool_name(name: str) -> str: # noqa: ARG001
"""Get secret value."""
return f"Secret value {500 - i}!"
return tool_name
tools = []
for i in range(100):
tools.append(make_tool(i))
make_tool(i)
@mcp.tool
async def whoami() -> dict[str, Any]:
@@ -59,9 +55,6 @@ def make_many_tools(mcp: FastMCP) -> list[FunctionTool]:
"claims": tok.claims if tok else {},
}
tools.append(whoami)
return tools
# ---------- FASTAPI APP ----------

View File

@@ -10,7 +10,6 @@ from fastmcp import FastMCP
from fastmcp.server.auth.auth import AccessToken
from fastmcp.server.auth.auth import TokenVerifier
from fastmcp.server.dependencies import get_access_token
from fastmcp.server.server import FunctionTool
# pip install fastmcp bcrypt
@@ -93,19 +92,15 @@ class ApiKeyVerifier(TokenVerifier):
# ---- server -----------------------------------------------------------------
def make_many_tools(mcp: FastMCP) -> list[FunctionTool]:
def make_tool(i: int) -> FunctionTool:
def make_many_tools(mcp: FastMCP) -> None:
def make_tool(i: int) -> None:
@mcp.tool(name=f"tool_{i}", description=f"Get secret value {i}")
def tool_name(name: str) -> str: # noqa: ARG001
"""Get secret value."""
return f"Secret value {400 - i}!"
return tool_name
tools = []
for i in range(100):
tools.append(make_tool(i))
return tools
make_tool(i)
if __name__ == "__main__":

View File

@@ -4,75 +4,84 @@ import time
import pytest
import requests
from onyx.db.chat import delete_chat_session
from onyx.db.chat import get_chat_sessions_older_than
from onyx.db.engine.sql_engine import get_session_with_current_tenant
from tests.integration.common_utils.managers.chat import ChatSessionManager
from tests.integration.common_utils.managers.settings import SettingsManager
from tests.integration.common_utils.test_models import DATestLLMProvider
from tests.integration.common_utils.test_models import DATestSettings
from tests.integration.common_utils.test_models import DATestUser
RETENTION_SECONDS = 10
def _run_ttl_cleanup(retention_days: int) -> None:
"""Directly execute TTL cleanup logic, bypassing Celery task infrastructure."""
with get_session_with_current_tenant() as db_session:
old_chat_sessions = get_chat_sessions_older_than(retention_days, db_session)
for user_id, session_id in old_chat_sessions:
with get_session_with_current_tenant() as db_session:
delete_chat_session(
user_id,
session_id,
db_session,
include_deleted=True,
hard_delete=True,
)
@pytest.mark.skipif(
os.environ.get("ENABLE_PAID_ENTERPRISE_EDITION_FEATURES", "").lower() != "true",
reason="Chat retention tests are enterprise only",
)
def test_chat_retention(reset: None, admin_user: DATestUser) -> None: # noqa: ARG001
def test_chat_retention(
reset: None, admin_user: DATestUser, llm_provider: DATestLLMProvider # noqa: ARG001
) -> None: # noqa: ARG001
"""Test that chat sessions are deleted after the retention period expires."""
# Set chat retention period to 10 seconds
retention_days = 10 / 86400 # 10 seconds in days (10 / 24 / 60 / 60)
retention_days = RETENTION_SECONDS // 86400
settings = DATestSettings(maximum_chat_retention_days=retention_days)
SettingsManager.update_settings(settings, user_performing_action=admin_user)
# Create a chat session
chat_session = ChatSessionManager.create(
persona_id=0,
description="Test chat retention",
user_performing_action=admin_user,
)
# Send a message
ChatSessionManager.send_message(
response = ChatSessionManager.send_message(
chat_session_id=chat_session.id,
message="This message should be deleted soon",
user_performing_action=admin_user,
)
assert (
response.error is None
), f"Chat response should not have an error: {response.error}"
# Verify the chat session exists
chat_history = ChatSessionManager.get_chat_history(
chat_session=chat_session,
user_performing_action=admin_user,
)
assert len(chat_history) > 0, "Chat session should have messages"
# Wait for TTL task to run (give it ~60 seconds)
print("Waiting for chat retention TTL task to run...")
max_wait_time = 60 # maximum time to wait in seconds
start_time = time.time()
# Wait for the retention period to elapse, then directly run TTL cleanup
time.sleep(RETENTION_SECONDS + 2)
_run_ttl_cleanup(retention_days)
# Verify the chat session was deleted
session_deleted = False
try:
chat_history = ChatSessionManager.get_chat_history(
chat_session=chat_session,
user_performing_action=admin_user,
)
session_deleted = len(chat_history) == 0
except requests.exceptions.HTTPError as e:
if e.response.status_code in (404, 400):
session_deleted = True
else:
raise
while not session_deleted and (time.time() - start_time < max_wait_time):
# Check if chat session is deleted
try:
# Attempt to get chat history - this should 404
chat_history = ChatSessionManager.get_chat_history(
chat_session=chat_session,
user_performing_action=admin_user,
)
# If we got no messages or an empty response, session might be deleted
if not chat_history:
session_deleted = True
break
except requests.exceptions.HTTPError as e:
# If we get a 404 or other error, the session is gone
if e.response.status_code in (404, 400):
session_deleted = True
break
raise # Re-raise other errors
# Wait a bit before checking again
time.sleep(5)
print(f"Waited {time.time() - start_time:.1f} seconds for chat deletion...")
# Assert that the chat session was deleted
assert session_deleted, "Chat session was not deleted within the expected time"
assert session_deleted, "Chat session was not deleted after retention period"

View File

@@ -0,0 +1,96 @@
import requests
from tests.integration.common_utils.constants import API_SERVER_URL
from tests.integration.common_utils.test_models import DATestUser
CODE_INTERPRETER_URL = f"{API_SERVER_URL}/admin/code-interpreter"
CODE_INTERPRETER_HEALTH_URL = f"{CODE_INTERPRETER_URL}/health"
def test_get_code_interpreter_health_as_admin(
admin_user: DATestUser,
) -> None:
"""Health endpoint should return a JSON object with a 'healthy' boolean."""
response = requests.get(
CODE_INTERPRETER_HEALTH_URL,
headers=admin_user.headers,
)
assert response.status_code == 200
data = response.json()
assert "healthy" in data
assert isinstance(data["healthy"], bool)
def test_get_code_interpreter_status_as_admin(
admin_user: DATestUser,
) -> None:
"""GET endpoint should return a JSON object with an 'enabled' boolean."""
response = requests.get(
CODE_INTERPRETER_URL,
headers=admin_user.headers,
)
assert response.status_code == 200
data = response.json()
assert "enabled" in data
assert isinstance(data["enabled"], bool)
def test_update_code_interpreter_disable_and_enable(
admin_user: DATestUser,
) -> None:
"""PUT endpoint should update the enabled flag and persist across reads."""
# Disable
response = requests.put(
CODE_INTERPRETER_URL,
json={"enabled": False},
headers=admin_user.headers,
)
assert response.status_code == 200
# Verify disabled
response = requests.get(
CODE_INTERPRETER_URL,
headers=admin_user.headers,
)
assert response.status_code == 200
assert response.json()["enabled"] is False
# Re-enable
response = requests.put(
CODE_INTERPRETER_URL,
json={"enabled": True},
headers=admin_user.headers,
)
assert response.status_code == 200
# Verify enabled
response = requests.get(
CODE_INTERPRETER_URL,
headers=admin_user.headers,
)
assert response.status_code == 200
assert response.json()["enabled"] is True
def test_code_interpreter_endpoints_require_admin(
basic_user: DATestUser,
) -> None:
"""All code interpreter endpoints should reject non-admin users."""
health_response = requests.get(
CODE_INTERPRETER_HEALTH_URL,
headers=basic_user.headers,
)
assert health_response.status_code == 403
get_response = requests.get(
CODE_INTERPRETER_URL,
headers=basic_user.headers,
)
assert get_response.status_code == 403
put_response = requests.put(
CODE_INTERPRETER_URL,
json={"enabled": True},
headers=basic_user.headers,
)
assert put_response.status_code == 403

View File

@@ -1,195 +0,0 @@
import os
import pytest
import requests
from onyx.configs.constants import MessageType
from tests.integration.common_utils.constants import API_SERVER_URL
from tests.integration.common_utils.managers.api_key import APIKeyManager
from tests.integration.common_utils.managers.cc_pair import CCPairManager
from tests.integration.common_utils.managers.document import DocumentManager
from tests.integration.common_utils.managers.llm_provider import LLMProviderManager
from tests.integration.common_utils.managers.user import UserManager
from tests.integration.common_utils.test_models import DATestAPIKey
from tests.integration.common_utils.test_models import DATestCCPair
from tests.integration.common_utils.test_models import DATestUser
@pytest.mark.skipif(
os.environ.get("ENABLE_PAID_ENTERPRISE_EDITION_FEATURES", "").lower() != "true",
reason="/chat/send-message-simple-with-history is enterprise only",
)
def test_all_stream_chat_message_objects_outputs(reset: None) -> None: # noqa: ARG001
# Creating an admin user (first user created is automatically an admin)
admin_user: DATestUser = UserManager.create(name="admin_user")
# create connector
cc_pair_1: DATestCCPair = CCPairManager.create_from_scratch(
user_performing_action=admin_user,
)
api_key: DATestAPIKey = APIKeyManager.create(
user_performing_action=admin_user,
)
LLMProviderManager.create(user_performing_action=admin_user)
# SEEDING DOCUMENTS
cc_pair_1.documents = []
cc_pair_1.documents.append(
DocumentManager.seed_doc_with_content(
cc_pair=cc_pair_1,
content="Pablo's favorite color is blue",
api_key=api_key,
)
)
cc_pair_1.documents.append(
DocumentManager.seed_doc_with_content(
cc_pair=cc_pair_1,
content="Chris's favorite color is red",
api_key=api_key,
)
)
cc_pair_1.documents.append(
DocumentManager.seed_doc_with_content(
cc_pair=cc_pair_1,
content="Pika's favorite color is green",
api_key=api_key,
)
)
# TESTING RESPONSE FOR QUESTION 1
response = requests.post(
f"{API_SERVER_URL}/chat/send-message-simple-with-history",
json={
"messages": [
{
"message": "What is Pablo's favorite color?",
"role": MessageType.USER.value,
}
],
"persona_id": 0,
},
headers=admin_user.headers,
)
assert response.status_code == 200
response_json = response.json()
# check that the answer is correct
answer_1 = response_json["answer"]
assert "blue" in answer_1.lower()
# FLAKY - check that the llm selected a document
# assert 0 in response_json["llm_selected_doc_indices"]
# check that the final context documents are correct
# (it should contain all documents because there arent enough to exclude any)
assert 0 in response_json["final_context_doc_indices"]
assert 1 in response_json["final_context_doc_indices"]
assert 2 in response_json["final_context_doc_indices"]
# FLAKY - check that the cited documents are correct
# assert cc_pair_1.documents[0].id in response_json["cited_documents"].values()
# flakiness likely due to non-deterministic rephrasing
# FLAKY - check that the top documents are correct
# assert response_json["top_documents"][0]["document_id"] == cc_pair_1.documents[0].id
print("response 1/3 passed")
# TESTING RESPONSE FOR QUESTION 2
response = requests.post(
f"{API_SERVER_URL}/chat/send-message-simple-with-history",
json={
"messages": [
{
"message": "What is Pablo's favorite color?",
"role": MessageType.USER.value,
},
{
"message": answer_1,
"role": MessageType.ASSISTANT.value,
},
{
"message": "What is Chris's favorite color?",
"role": MessageType.USER.value,
},
],
"persona_id": 0,
},
headers=admin_user.headers,
)
assert response.status_code == 200
response_json = response.json()
# check that the answer is correct
answer_2 = response_json["answer"]
assert "red" in answer_2.lower()
# FLAKY - check that the llm selected a document
# assert 0 in response_json["llm_selected_doc_indices"]
# check that the final context documents are correct
# (it should contain all documents because there arent enough to exclude any)
assert 0 in response_json["final_context_doc_indices"]
assert 1 in response_json["final_context_doc_indices"]
assert 2 in response_json["final_context_doc_indices"]
# FLAKY - check that the cited documents are correct
# assert cc_pair_1.documents[1].id in response_json["cited_documents"].values()
# flakiness likely due to non-deterministic rephrasing
# FLAKY - check that the top documents are correct
# assert response_json["top_documents"][0]["document_id"] == cc_pair_1.documents[1].id
print("response 2/3 passed")
# TESTING RESPONSE FOR QUESTION 3
response = requests.post(
f"{API_SERVER_URL}/chat/send-message-simple-with-history",
json={
"messages": [
{
"message": "What is Pablo's favorite color?",
"role": MessageType.USER.value,
},
{
"message": answer_1,
"role": MessageType.ASSISTANT.value,
},
{
"message": "What is Chris's favorite color?",
"role": MessageType.USER.value,
},
{
"message": answer_2,
"role": MessageType.ASSISTANT.value,
},
{
"message": "What is Pika's favorite color?",
"role": MessageType.USER.value,
},
],
"persona_id": 0,
},
headers=admin_user.headers,
)
assert response.status_code == 200
response_json = response.json()
# check that the answer is correct
answer_3 = response_json["answer"]
assert "green" in answer_3.lower()
# FLAKY - check that the llm selected a document
# assert 0 in response_json["llm_selected_doc_indices"]
# check that the final context documents are correct
# (it should contain all documents because there arent enough to exclude any)
assert 0 in response_json["final_context_doc_indices"]
assert 1 in response_json["final_context_doc_indices"]
assert 2 in response_json["final_context_doc_indices"]
# FLAKY - check that the cited documents are correct
# assert cc_pair_1.documents[2].id in response_json["cited_documents"].values()
# flakiness likely due to non-deterministic rephrasing
# FLAKY - check that the top documents are correct
# assert response_json["top_documents"][0]["document_id"] == cc_pair_1.documents[2].id
print("response 3/3 passed")

View File

@@ -1,250 +0,0 @@
import json
import os
import pytest
import requests
from onyx.configs.constants import MessageType
from tests.integration.common_utils.constants import API_SERVER_URL
from tests.integration.common_utils.constants import NUM_DOCS
from tests.integration.common_utils.test_models import DATestLLMProvider
from tests.integration.common_utils.test_models import DATestUser
from tests.integration.conftest import DocumentBuilderType
@pytest.mark.skipif(
os.environ.get("ENABLE_PAID_ENTERPRISE_EDITION_FEATURES", "").lower() != "true",
reason="/chat/send-message-simple-with-history tests are enterprise only",
)
def test_send_message_simple_with_history(
reset: None, # noqa: ARG001
admin_user: DATestUser,
llm_provider: DATestLLMProvider, # noqa: ARG001
document_builder: DocumentBuilderType,
) -> None:
# create documents using the document builder
# Create NUM_DOCS number of documents with dummy content
content_list = [f"Document {i} content" for i in range(NUM_DOCS)]
docs = document_builder(content_list)
response = requests.post(
f"{API_SERVER_URL}/chat/send-message-simple-with-history",
json={
"messages": [
{
"message": docs[0].content,
"role": MessageType.USER.value,
}
],
"persona_id": 0,
},
headers=admin_user.headers,
)
assert response.status_code == 200
response_json = response.json()
# Check that the top document is the correct document
assert response_json["top_documents"][0]["document_id"] == docs[0].id
# assert that the metadata is correct
for doc in docs:
found_doc = next(
(x for x in response_json["top_documents"] if x["document_id"] == doc.id),
None,
)
assert found_doc
assert found_doc["metadata"]["document_id"] == doc.id
@pytest.mark.skipif(
os.environ.get("ENABLE_PAID_ENTERPRISE_EDITION_FEATURES", "").lower() != "true",
reason="/chat/send-message-simple-with-history tests are enterprise only",
)
def test_using_reference_docs_with_simple_with_history_api_flow(
reset: None, # noqa: ARG001
admin_user: DATestUser,
llm_provider: DATestLLMProvider, # noqa: ARG001
document_builder: DocumentBuilderType,
) -> None:
# SEEDING DOCUMENTS
docs = document_builder(
[
"Chris's favorite color is blue",
"Hagen's favorite color is red",
"Pablo's favorite color is green",
]
)
# SEINDING MESSAGE 1
response = requests.post(
f"{API_SERVER_URL}/chat/send-message-simple-with-history",
json={
"messages": [
{
"message": "What is Pablo's favorite color?",
"role": MessageType.USER.value,
}
],
"persona_id": 0,
},
headers=admin_user.headers,
)
assert response.status_code == 200
response_json = response.json()
# get the db_doc_id of the top document to use as a search doc id for second message
first_db_doc_id = response_json["top_documents"][0]["db_doc_id"]
# SEINDING MESSAGE 2
response = requests.post(
f"{API_SERVER_URL}/chat/send-message-simple-with-history",
json={
"messages": [
{
"message": "What is Pablo's favorite color?",
"role": MessageType.USER.value,
}
],
"persona_id": 0,
"search_doc_ids": [first_db_doc_id],
},
headers=admin_user.headers,
)
assert response.status_code == 200
response_json = response.json()
# make sure there is an answer
assert response_json["answer"]
# This ensures the the document we think we are referencing when we send the search_doc_ids in the second
# message is the document that we expect it to be
assert response_json["top_documents"][0]["document_id"] == docs[2].id
@pytest.mark.skip(reason="We don't support this anymore with the DR flow :(")
@pytest.mark.skipif(
os.environ.get("ENABLE_PAID_ENTERPRISE_EDITION_FEATURES", "").lower() != "true",
reason="/chat/send-message-simple-with-history tests are enterprise only",
)
def test_send_message_simple_with_history_strict_json(
reset: None, # noqa: ARG001
admin_user: DATestUser,
llm_provider: DATestLLMProvider, # noqa: ARG001
) -> None:
response = requests.post(
f"{API_SERVER_URL}/chat/send-message-simple-with-history",
json={
# intentionally not relevant prompt to ensure that the
# structured response format is actually used
"messages": [
{
"message": "What is green?",
"role": MessageType.USER.value,
}
],
"persona_id": 0,
"structured_response_format": {
"type": "json_schema",
"json_schema": {
"name": "presidents",
"schema": {
"type": "object",
"properties": {
"presidents": {
"type": "array",
"items": {"type": "string"},
"description": "List of the first three US presidents",
}
},
"required": ["presidents"],
"additionalProperties": False,
},
"strict": True,
},
},
},
headers=admin_user.headers,
)
assert response.status_code == 200
response_json = response.json()
# Check that the answer is present
assert "answer" in response_json
assert response_json["answer"] is not None
# helper
def clean_json_string(json_string: str) -> str:
return json_string.strip().removeprefix("```json").removesuffix("```").strip()
# Attempt to parse the answer as JSON
try:
clean_answer = clean_json_string(response_json["answer"])
parsed_answer = json.loads(clean_answer)
# NOTE: do not check content, just the structure
assert isinstance(parsed_answer, dict)
assert "presidents" in parsed_answer
assert isinstance(parsed_answer["presidents"], list)
for president in parsed_answer["presidents"]:
assert isinstance(president, str)
except json.JSONDecodeError:
assert (
False
), f"The answer is not a valid JSON object - '{response_json['answer']}'"
# Check that the answer_citationless is also valid JSON
assert "answer_citationless" in response_json
assert response_json["answer_citationless"] is not None
try:
clean_answer_citationless = clean_json_string(
response_json["answer_citationless"]
)
parsed_answer_citationless = json.loads(clean_answer_citationless)
assert isinstance(parsed_answer_citationless, dict)
except json.JSONDecodeError:
assert False, "The answer_citationless is not a valid JSON object"
@pytest.mark.skipif(
os.environ.get("ENABLE_PAID_ENTERPRISE_EDITION_FEATURES", "").lower() != "true",
reason="/query/answer-with-citation tests are enterprise only",
)
def test_answer_with_citation_api(
reset: None, # noqa: ARG001
admin_user: DATestUser,
llm_provider: DATestLLMProvider, # noqa: ARG001
document_builder: DocumentBuilderType,
) -> None:
# create docs
docs = document_builder(["Chris' favorite color is green"])
# send a message
response = requests.post(
f"{API_SERVER_URL}/query/answer-with-citation",
json={
"messages": [
{
"message": "What is Chris' favorite color? Make sure to cite the document.",
"role": MessageType.USER.value,
}
],
"persona_id": 0,
},
headers=admin_user.headers,
cookies=admin_user.cookies,
)
assert response.status_code == 200
response_json = response.json()
assert response_json["answer"]
has_correct_citation = False
for citation in response_json["citations"]:
if citation["document_id"] == docs[0].id:
has_correct_citation = True
break
assert has_correct_citation

View File

@@ -2,7 +2,6 @@ import os
import uuid
from datetime import datetime
from datetime import timezone
from unittest.mock import patch
import httpx
import pytest
@@ -12,6 +11,7 @@ from onyx.configs.constants import DocumentSource
from onyx.connectors.mock_connector.connector import EXTERNAL_USER_EMAILS
from onyx.connectors.mock_connector.connector import EXTERNAL_USER_GROUP_IDS
from onyx.connectors.mock_connector.connector import MockConnectorCheckpoint
from onyx.connectors.models import Document
from onyx.connectors.models import InputType
from onyx.db.document import get_documents_by_ids
from onyx.db.engine.sql_engine import get_session_with_current_tenant
@@ -25,128 +25,16 @@ from tests.integration.common_utils.managers.cc_pair import CCPairManager
from tests.integration.common_utils.managers.document import DocumentManager
from tests.integration.common_utils.managers.index_attempt import IndexAttemptManager
from tests.integration.common_utils.test_document_utils import create_test_document
from tests.integration.common_utils.test_models import DATestCCPair
from tests.integration.common_utils.test_models import DATestUser
from tests.integration.common_utils.vespa import vespa_fixture
@pytest.mark.skipif(
os.environ.get("ENABLE_PAID_ENTERPRISE_EDITION_FEATURES", "").lower() != "true",
reason="Permission sync is enterprise only",
)
def test_mock_connector_initial_permission_sync(
def _setup_mock_connector(
mock_server_client: httpx.Client,
vespa_client: vespa_fixture,
admin_user: DATestUser,
) -> None:
"""Test that the MockConnector fetches and sets permissions during initial indexing when AccessType.SYNC is used"""
# Set up mock server behavior
doc_uuid = uuid.uuid4()
test_doc = create_test_document(doc_id=f"test-doc-{doc_uuid}")
response = mock_server_client.post(
"/set-behavior",
json=[
{
"documents": [test_doc.model_dump(mode="json")],
"checkpoint": MockConnectorCheckpoint(has_more=False).model_dump(
mode="json"
),
"failures": [],
}
],
)
assert response.status_code == 200
# Create CC Pair with SYNC access type to enable permissions during indexing
cc_pair = CCPairManager.create_from_scratch(
name=f"mock-connector-permissions-{uuid.uuid4()}",
source=DocumentSource.MOCK_CONNECTOR,
input_type=InputType.POLL,
connector_specific_config={
"mock_server_host": MOCK_CONNECTOR_SERVER_HOST,
"mock_server_port": MOCK_CONNECTOR_SERVER_PORT,
},
access_type=AccessType.SYNC, # This enables permissions during indexing
user_performing_action=admin_user,
)
# Wait for index attempt to start
index_attempt = IndexAttemptManager.wait_for_index_attempt_start(
cc_pair_id=cc_pair.id,
user_performing_action=admin_user,
)
# Wait for index attempt to finish
IndexAttemptManager.wait_for_index_attempt_completion(
index_attempt_id=index_attempt.id,
cc_pair_id=cc_pair.id,
user_performing_action=admin_user,
)
# Validate status
finished_index_attempt = IndexAttemptManager.get_index_attempt_by_id(
index_attempt_id=index_attempt.id,
cc_pair_id=cc_pair.id,
user_performing_action=admin_user,
)
assert finished_index_attempt.status == IndexingStatus.SUCCESS
# Verify document was indexed
with get_session_with_current_tenant() as db_session:
documents = DocumentManager.fetch_documents_for_cc_pair(
cc_pair_id=cc_pair.id,
db_session=db_session,
vespa_client=vespa_client,
)
assert len(documents) == 1
assert documents[0].id == test_doc.id
# Verify no errors occurred
errors = IndexAttemptManager.get_index_attempt_errors_for_cc_pair(
cc_pair_id=cc_pair.id,
user_performing_action=admin_user,
)
assert len(errors) == 0
# Verify permissions were set during indexing by checking the document in the database
with get_session_with_current_tenant() as db_session:
db_docs = get_documents_by_ids(
db_session=db_session,
document_ids=[test_doc.id],
)
assert len(db_docs) == 1
db_doc = db_docs[0]
assert db_doc.external_user_emails is not None
assert db_doc.external_user_group_ids is not None
# Check the specific permissions that MockConnector sets
assert set(db_doc.external_user_emails) == EXTERNAL_USER_EMAILS
assert set(db_doc.external_user_group_ids) == EXTERNAL_USER_GROUP_IDS
# Verify the document is not public (as set by MockConnector)
assert db_doc.is_public is False
# Verify that the cc_pair was marked as permissions synced
updated_cc_pair_info = CCPairManager.get_single(
cc_pair.id, user_performing_action=admin_user
)
assert updated_cc_pair_info is not None
assert updated_cc_pair_info.last_full_permission_sync is not None
@pytest.mark.skipif(
os.environ.get("ENABLE_PAID_ENTERPRISE_EDITION_FEATURES", "").lower() != "true",
reason="Permission sync attempt tracking is enterprise only",
)
def test_permission_sync_attempt_tracking_integration(
mock_server_client: httpx.Client,
vespa_client: vespa_fixture, # noqa: ARG001
admin_user: DATestUser,
) -> None:
"""Test that permission sync attempts are properly tracked during real sync workflows."""
) -> tuple[DATestCCPair, Document]:
"""Common setup: create a test doc, configure mock server, create cc_pair, wait for indexing."""
doc_uuid = uuid.uuid4()
test_doc = create_test_document(doc_id=f"test-doc-{doc_uuid}")
@@ -165,7 +53,7 @@ def test_permission_sync_attempt_tracking_integration(
assert response.status_code == 200
cc_pair = CCPairManager.create_from_scratch(
name=f"mock-connector-attempt-tracking-{uuid.uuid4()}",
name=f"mock-connector-{uuid.uuid4()}",
source=DocumentSource.MOCK_CONNECTOR,
input_type=InputType.POLL,
connector_specific_config={
@@ -187,6 +75,95 @@ def test_permission_sync_attempt_tracking_integration(
user_performing_action=admin_user,
)
finished = IndexAttemptManager.get_index_attempt_by_id(
index_attempt_id=index_attempt.id,
cc_pair_id=cc_pair.id,
user_performing_action=admin_user,
)
assert finished.status == IndexingStatus.SUCCESS
return cc_pair, test_doc
@pytest.mark.skipif(
os.environ.get("ENABLE_PAID_ENTERPRISE_EDITION_FEATURES", "").lower() != "true",
reason="Permission sync is enterprise only",
)
def test_mock_connector_initial_permission_sync(
mock_server_client: httpx.Client,
vespa_client: vespa_fixture,
admin_user: DATestUser,
) -> None:
"""Test that the MockConnector fetches and sets permissions during initial indexing
when AccessType.SYNC is used."""
cc_pair, test_doc = _setup_mock_connector(mock_server_client, admin_user)
with get_session_with_current_tenant() as db_session:
documents = DocumentManager.fetch_documents_for_cc_pair(
cc_pair_id=cc_pair.id,
db_session=db_session,
vespa_client=vespa_client,
)
assert len(documents) == 1
assert documents[0].id == test_doc.id
errors = IndexAttemptManager.get_index_attempt_errors_for_cc_pair(
cc_pair_id=cc_pair.id,
user_performing_action=admin_user,
)
assert len(errors) == 0
with get_session_with_current_tenant() as db_session:
db_docs = get_documents_by_ids(
db_session=db_session,
document_ids=[test_doc.id],
)
assert len(db_docs) == 1
db_doc = db_docs[0]
assert db_doc.external_user_emails is not None
assert db_doc.external_user_group_ids is not None
assert set(db_doc.external_user_emails) == EXTERNAL_USER_EMAILS
assert set(db_doc.external_user_group_ids) == EXTERNAL_USER_GROUP_IDS
assert db_doc.is_public is False
# After initial indexing, the beat task detects last_time_perm_sync is None
# and triggers a doc permission sync. Explicitly trigger it to avoid
# waiting for the 30s beat interval.
before = datetime.now(timezone.utc)
CCPairManager.sync(
cc_pair=cc_pair,
user_performing_action=admin_user,
)
CCPairManager.wait_for_sync(
cc_pair=cc_pair,
after=before,
number_of_updated_docs=1,
user_performing_action=admin_user,
should_wait_for_group_sync=False,
should_wait_for_vespa_sync=False,
)
updated_cc_pair_info = CCPairManager.get_single(
cc_pair.id, user_performing_action=admin_user
)
assert updated_cc_pair_info is not None
assert updated_cc_pair_info.last_full_permission_sync is not None
@pytest.mark.skipif(
os.environ.get("ENABLE_PAID_ENTERPRISE_EDITION_FEATURES", "").lower() != "true",
reason="Permission sync attempt tracking is enterprise only",
)
def test_permission_sync_attempt_tracking_integration(
mock_server_client: httpx.Client,
vespa_client: vespa_fixture, # noqa: ARG001
admin_user: DATestUser,
) -> None:
"""Test that permission sync attempts are properly tracked during real sync workflows."""
cc_pair, _test_doc = _setup_mock_connector(mock_server_client, admin_user)
before = datetime.now(timezone.utc)
CCPairManager.sync(
cc_pair=cc_pair,
@@ -198,6 +175,8 @@ def test_permission_sync_attempt_tracking_integration(
after=before,
number_of_updated_docs=1,
user_performing_action=admin_user,
should_wait_for_group_sync=False,
should_wait_for_vespa_sync=False,
)
with get_session_with_current_tenant() as db_session:
@@ -219,88 +198,6 @@ def test_permission_sync_attempt_tracking_integration(
)
@pytest.mark.skipif(
os.environ.get("ENABLE_PAID_ENTERPRISE_EDITION_FEATURES", "").lower() != "true",
reason="Permission sync attempt tracking is enterprise only",
)
def test_permission_sync_attempt_tracking_with_mocked_failure(
mock_server_client: httpx.Client,
vespa_client: vespa_fixture, # noqa: ARG001
admin_user: DATestUser,
) -> None:
"""Test that permission sync attempts are properly tracked when sync fails."""
doc_uuid = uuid.uuid4()
test_doc = create_test_document(doc_id=f"test-doc-{doc_uuid}")
response = mock_server_client.post(
"/set-behavior",
json=[
{
"documents": [test_doc.model_dump(mode="json")],
"checkpoint": MockConnectorCheckpoint(has_more=False).model_dump(
mode="json"
),
"failures": [],
}
],
)
assert response.status_code == 200
cc_pair = CCPairManager.create_from_scratch(
name=f"mock-connector-attempt-failure-{uuid.uuid4()}",
source=DocumentSource.MOCK_CONNECTOR,
input_type=InputType.POLL,
connector_specific_config={
"mock_server_host": MOCK_CONNECTOR_SERVER_HOST,
"mock_server_port": MOCK_CONNECTOR_SERVER_PORT,
},
access_type=AccessType.SYNC,
user_performing_action=admin_user,
)
index_attempt = IndexAttemptManager.wait_for_index_attempt_start(
cc_pair_id=cc_pair.id,
user_performing_action=admin_user,
)
IndexAttemptManager.wait_for_index_attempt_completion(
index_attempt_id=index_attempt.id,
cc_pair_id=cc_pair.id,
user_performing_action=admin_user,
)
# Mock the permission sync to force a failure and verify attempt tracking
with patch(
"ee.onyx.background.celery.tasks.doc_permission_syncing.tasks.validate_ccpair_for_user"
) as mock_validate:
mock_validate.side_effect = Exception("Validation failed for testing")
try:
before = datetime.now(timezone.utc)
CCPairManager.sync(
cc_pair=cc_pair,
user_performing_action=admin_user,
)
CCPairManager.wait_for_sync(
cc_pair=cc_pair,
after=before,
number_of_updated_docs=0,
user_performing_action=admin_user,
)
except Exception:
pass
with get_session_with_current_tenant() as db_session:
attempt = db_session.execute(
select(DocPermissionSyncAttempt).where(
DocPermissionSyncAttempt.connector_credential_pair_id == cc_pair.id
)
).scalar_one()
assert attempt.status == PermissionSyncStatus.FAILED
@pytest.mark.skipif(
os.environ.get("ENABLE_PAID_ENTERPRISE_EDITION_FEATURES", "").lower() != "true",
reason="Permission sync attempt tracking is enterprise only",
@@ -311,45 +208,8 @@ def test_permission_sync_attempt_status_success(
admin_user: DATestUser,
) -> None:
"""Test that permission sync attempts are marked as SUCCESS when sync completes without errors."""
doc_uuid = uuid.uuid4()
test_doc = create_test_document(doc_id=f"test-doc-{doc_uuid}")
response = mock_server_client.post(
"/set-behavior",
json=[
{
"documents": [test_doc.model_dump(mode="json")],
"checkpoint": MockConnectorCheckpoint(has_more=False).model_dump(
mode="json"
),
"failures": [],
}
],
)
assert response.status_code == 200
cc_pair = CCPairManager.create_from_scratch(
name=f"mock-connector-success-{uuid.uuid4()}",
source=DocumentSource.MOCK_CONNECTOR,
input_type=InputType.POLL,
connector_specific_config={
"mock_server_host": MOCK_CONNECTOR_SERVER_HOST,
"mock_server_port": MOCK_CONNECTOR_SERVER_PORT,
},
access_type=AccessType.SYNC,
user_performing_action=admin_user,
)
index_attempt = IndexAttemptManager.wait_for_index_attempt_start(
cc_pair_id=cc_pair.id,
user_performing_action=admin_user,
)
IndexAttemptManager.wait_for_index_attempt_completion(
index_attempt_id=index_attempt.id,
cc_pair_id=cc_pair.id,
user_performing_action=admin_user,
)
cc_pair, _test_doc = _setup_mock_connector(mock_server_client, admin_user)
before = datetime.now(timezone.utc)
CCPairManager.sync(
@@ -362,6 +222,8 @@ def test_permission_sync_attempt_status_success(
after=before,
number_of_updated_docs=1,
user_performing_action=admin_user,
should_wait_for_group_sync=False,
should_wait_for_vespa_sync=False,
)
with get_session_with_current_tenant() as db_session:

View File

@@ -6,11 +6,14 @@ from sqlalchemy.orm import Session
from onyx.context.search.enums import RecencyBiasSetting
from onyx.db.engine.sql_engine import get_session_with_current_tenant
from onyx.db.enums import LLMModelFlowType
from onyx.db.llm import can_user_access_llm_provider
from onyx.db.llm import fetch_user_group_ids
from onyx.db.models import LLMModelFlow
from onyx.db.models import LLMProvider as LLMProviderModel
from onyx.db.models import LLMProvider__Persona
from onyx.db.models import LLMProvider__UserGroup
from onyx.db.models import ModelConfiguration
from onyx.db.models import Persona
from onyx.db.models import User
from onyx.db.models import User__UserGroup
@@ -267,6 +270,24 @@ def test_get_llm_for_persona_falls_back_when_access_denied(
provider_name=restricted_provider.name,
)
# Set up ModelConfiguration + LLMModelFlow so get_default_llm() can
# resolve the default provider when the fallback path is triggered.
default_model_config = ModelConfiguration(
llm_provider_id=default_provider.id,
name=default_provider.default_model_name,
is_visible=True,
)
db_session.add(default_model_config)
db_session.flush()
db_session.add(
LLMModelFlow(
model_configuration_id=default_model_config.id,
llm_model_flow_type=LLMModelFlowType.CHAT,
is_default=True,
)
)
db_session.flush()
access_group = UserGroup(name="persona-group")
db_session.add(access_group)
db_session.flush()

View File

@@ -0,0 +1,322 @@
import json
import os
import time
from uuid import uuid4
import pytest
import requests
from pydantic import BaseModel
from pydantic import ConfigDict
from onyx.configs import app_configs
from onyx.configs.constants import DocumentSource
from onyx.tools.constants import SEARCH_TOOL_ID
from tests.integration.common_utils.constants import API_SERVER_URL
from tests.integration.common_utils.managers.cc_pair import CCPairManager
from tests.integration.common_utils.managers.chat import ChatSessionManager
from tests.integration.common_utils.managers.tool import ToolManager
from tests.integration.common_utils.test_models import DATestUser
from tests.integration.common_utils.test_models import ToolName
_ENV_PROVIDER = "NIGHTLY_LLM_PROVIDER"
_ENV_MODELS = "NIGHTLY_LLM_MODELS"
_ENV_API_KEY = "NIGHTLY_LLM_API_KEY"
_ENV_API_BASE = "NIGHTLY_LLM_API_BASE"
_ENV_CUSTOM_CONFIG_JSON = "NIGHTLY_LLM_CUSTOM_CONFIG_JSON"
_ENV_STRICT = "NIGHTLY_LLM_STRICT"
class NightlyProviderConfig(BaseModel):
model_config = ConfigDict(frozen=True)
provider: str
model_names: list[str]
api_key: str | None
api_base: str | None
custom_config: dict[str, str] | None
strict: bool
def _env_true(env_var: str, default: bool = False) -> bool:
value = os.environ.get(env_var)
if value is None:
return default
return value.strip().lower() in {"1", "true", "yes", "on"}
def _split_csv_env(env_var: str) -> list[str]:
return [
part.strip() for part in os.environ.get(env_var, "").split(",") if part.strip()
]
def _load_provider_config() -> NightlyProviderConfig:
provider = os.environ.get(_ENV_PROVIDER, "").strip().lower()
model_names = _split_csv_env(_ENV_MODELS)
api_key = os.environ.get(_ENV_API_KEY) or None
api_base = os.environ.get(_ENV_API_BASE) or None
strict = _env_true(_ENV_STRICT, default=False)
custom_config: dict[str, str] | None = None
custom_config_json = os.environ.get(_ENV_CUSTOM_CONFIG_JSON, "").strip()
if custom_config_json:
parsed = json.loads(custom_config_json)
if not isinstance(parsed, dict):
raise ValueError(f"{_ENV_CUSTOM_CONFIG_JSON} must be a JSON object")
custom_config = {str(key): str(value) for key, value in parsed.items()}
if provider == "ollama_chat" and api_key and not custom_config:
custom_config = {"OLLAMA_API_KEY": api_key}
return NightlyProviderConfig(
provider=provider,
model_names=model_names,
api_key=api_key,
api_base=api_base,
custom_config=custom_config,
strict=strict,
)
def _skip_or_fail(strict: bool, message: str) -> None:
if strict:
pytest.fail(message)
pytest.skip(message)
def _validate_provider_config(config: NightlyProviderConfig) -> None:
if not config.provider:
_skip_or_fail(strict=config.strict, message=f"{_ENV_PROVIDER} must be set")
if not config.model_names:
_skip_or_fail(
strict=config.strict,
message=f"{_ENV_MODELS} must include at least one model",
)
if config.provider != "ollama_chat" and not config.api_key:
_skip_or_fail(
strict=config.strict,
message=(f"{_ENV_API_KEY} is required for provider '{config.provider}'"),
)
if config.provider == "ollama_chat" and not (
config.api_base or _default_api_base_for_provider(config.provider)
):
_skip_or_fail(
strict=config.strict,
message=(f"{_ENV_API_BASE} is required for provider '{config.provider}'"),
)
def _assert_integration_mode_enabled() -> None:
assert (
app_configs.INTEGRATION_TESTS_MODE is True
), "Integration tests require INTEGRATION_TESTS_MODE=true."
def _seed_connector_for_search_tool(admin_user: DATestUser) -> None:
# SearchTool is only exposed when at least one non-default connector exists.
CCPairManager.create_from_scratch(
source=DocumentSource.INGESTION_API,
user_performing_action=admin_user,
)
def _get_internal_search_tool_id(admin_user: DATestUser) -> int:
tools = ToolManager.list_tools(user_performing_action=admin_user)
for tool in tools:
if tool.in_code_tool_id == SEARCH_TOOL_ID:
return tool.id
raise AssertionError("SearchTool must exist for this test")
def _default_api_base_for_provider(provider: str) -> str | None:
if provider == "openrouter":
return "https://openrouter.ai/api/v1"
if provider == "ollama_chat":
# host.docker.internal works when tests are running inside the integration test container.
return "http://host.docker.internal:11434"
return None
def _create_provider_payload(
provider: str,
provider_name: str,
model_name: str,
api_key: str | None,
api_base: str | None,
custom_config: dict[str, str] | None,
) -> dict:
return {
"name": provider_name,
"provider": provider,
"api_key": api_key,
"api_base": api_base,
"custom_config": custom_config,
"default_model_name": model_name,
"is_public": True,
"groups": [],
"personas": [],
"model_configurations": [{"name": model_name, "is_visible": True}],
"api_key_changed": bool(api_key),
"custom_config_changed": bool(custom_config),
}
def _ensure_provider_is_default(provider_id: int, admin_user: DATestUser) -> None:
list_response = requests.get(
f"{API_SERVER_URL}/admin/llm/provider",
headers=admin_user.headers,
)
list_response.raise_for_status()
providers = list_response.json()
current_default = next(
(provider for provider in providers if provider.get("is_default_provider")),
None,
)
assert (
current_default is not None
), "Expected a default provider after setting provider as default"
assert (
current_default["id"] == provider_id
), f"Expected provider {provider_id} to be default, found {current_default['id']}"
def _run_chat_assertions(
admin_user: DATestUser,
search_tool_id: int,
provider: str,
model_name: str,
) -> None:
last_error: str | None = None
# Retry once to reduce transient nightly flakes due provider-side blips.
for attempt in range(1, 3):
chat_session = ChatSessionManager.create(user_performing_action=admin_user)
response = ChatSessionManager.send_message(
chat_session_id=chat_session.id,
message=(
"Use internal_search to search for 'nightly-provider-regression-sentinel', "
"then summarize the result in one short sentence."
),
user_performing_action=admin_user,
forced_tool_ids=[search_tool_id],
)
if response.error is None:
used_internal_search = any(
used_tool.tool_name == ToolName.INTERNAL_SEARCH
for used_tool in response.used_tools
)
debug_has_internal_search = any(
debug_tool_call.tool_name == "internal_search"
for debug_tool_call in response.tool_call_debug
)
has_answer = bool(response.full_message.strip())
if used_internal_search and debug_has_internal_search and has_answer:
return
last_error = (
f"attempt={attempt} provider={provider} model={model_name} "
f"used_internal_search={used_internal_search} "
f"debug_internal_search={debug_has_internal_search} "
f"has_answer={has_answer} "
f"tool_call_debug={response.tool_call_debug}"
)
else:
last_error = (
f"attempt={attempt} provider={provider} model={model_name} "
f"stream_error={response.error.error}"
)
time.sleep(attempt)
pytest.fail(f"Chat/tool-call assertions failed: {last_error}")
def _create_and_test_provider_for_model(
admin_user: DATestUser,
config: NightlyProviderConfig,
model_name: str,
search_tool_id: int,
) -> None:
provider_name = f"nightly-{config.provider}-{uuid4().hex[:12]}"
resolved_api_base = config.api_base or _default_api_base_for_provider(
config.provider
)
provider_payload = _create_provider_payload(
provider=config.provider,
provider_name=provider_name,
model_name=model_name,
api_key=config.api_key,
api_base=resolved_api_base,
custom_config=config.custom_config,
)
test_response = requests.post(
f"{API_SERVER_URL}/admin/llm/test",
headers=admin_user.headers,
json=provider_payload,
)
assert test_response.status_code == 200, (
f"Provider test endpoint failed for provider={config.provider} "
f"model={model_name}: {test_response.status_code} {test_response.text}"
)
create_response = requests.put(
f"{API_SERVER_URL}/admin/llm/provider?is_creation=true",
headers=admin_user.headers,
json=provider_payload,
)
assert create_response.status_code == 200, (
f"Provider creation failed for provider={config.provider} "
f"model={model_name}: {create_response.status_code} {create_response.text}"
)
provider_id = create_response.json()["id"]
try:
set_default_response = requests.post(
f"{API_SERVER_URL}/admin/llm/provider/{provider_id}/default",
headers=admin_user.headers,
)
assert set_default_response.status_code == 200, (
f"Setting default provider failed for provider={config.provider} "
f"model={model_name}: {set_default_response.status_code} "
f"{set_default_response.text}"
)
_ensure_provider_is_default(provider_id=provider_id, admin_user=admin_user)
_run_chat_assertions(
admin_user=admin_user,
search_tool_id=search_tool_id,
provider=config.provider,
model_name=model_name,
)
finally:
requests.delete(
f"{API_SERVER_URL}/admin/llm/provider/{provider_id}",
headers=admin_user.headers,
)
def test_nightly_provider_chat_workflow(admin_user: DATestUser) -> None:
"""Nightly regression test for provider setup + default selection + chat tool calls."""
_assert_integration_mode_enabled()
config = _load_provider_config()
_validate_provider_config(config)
_seed_connector_for_search_tool(admin_user)
search_tool_id = _get_internal_search_tool_id(admin_user)
for model_name in config.model_names:
_create_and_test_provider_for_model(
admin_user=admin_user,
config=config,
model_name=model_name,
search_tool_id=search_tool_id,
)

View File

@@ -6,7 +6,7 @@ the permissions of the curator manipulating connector-credential pairs.
import os
import pytest
from requests.exceptions import HTTPError
from onyx_openapi_client.exceptions import ApiException # type: ignore[import-untyped,unused-ignore,import-not-found]
from onyx.db.enums import AccessType
from onyx.server.documents.models import DocumentSource
@@ -93,20 +93,9 @@ def test_cc_pair_permissions(reset: None) -> None: # noqa: ARG001
"""Tests for things Curators should not be able to do"""
# Curators should not be able to create a public cc pair
with pytest.raises(HTTPError):
CCPairManager.create(
connector_id=connector_1.id,
credential_id=credential_1.id,
name="invalid_cc_pair_1",
access_type=AccessType.PUBLIC,
groups=[user_group_1.id],
user_performing_action=curator,
)
# Curators should not be able to create a cc
# pair for a user group they are not a curator of
with pytest.raises(HTTPError):
with pytest.raises(ApiException):
CCPairManager.create(
connector_id=connector_1.id,
credential_id=credential_1.id,
@@ -118,7 +107,7 @@ def test_cc_pair_permissions(reset: None) -> None: # noqa: ARG001
# Curators should not be able to create a cc
# pair without an attached user group
with pytest.raises(HTTPError):
with pytest.raises(ApiException):
CCPairManager.create(
connector_id=connector_1.id,
credential_id=credential_1.id,
@@ -144,7 +133,7 @@ def test_cc_pair_permissions(reset: None) -> None: # noqa: ARG001
# Curators should not be able to create a cc
# pair for a user group that the credential does not belong to
with pytest.raises(HTTPError):
with pytest.raises(ApiException):
CCPairManager.create(
connector_id=connector_1.id,
credential_id=credential_2.id,
@@ -156,6 +145,16 @@ def test_cc_pair_permissions(reset: None) -> None: # noqa: ARG001
"""Tests for things Curators should be able to do"""
# Re-create connector since the credential_2 validation error above
# triggers connector deletion in the exception handler
connector_1 = ConnectorManager.create(
name="admin_owned_connector_2",
source=DocumentSource.CONFLUENCE,
groups=[user_group_1.id],
access_type=AccessType.PRIVATE,
user_performing_action=admin_user,
)
# Curators should be able to create a private
# cc pair for a user group they are a curator of
valid_cc_pair = CCPairManager.create(

View File

@@ -59,17 +59,7 @@ def test_connector_permissions(reset: None) -> None: # noqa: ARG001
"""Tests for things Curators should not be able to do"""
# Curators should not be able to create a public connector
with pytest.raises(HTTPError):
ConnectorManager.create(
name="invalid_connector_1",
source=DocumentSource.CONFLUENCE,
groups=[user_group_1.id],
access_type=AccessType.PUBLIC,
user_performing_action=curator,
)
# Curators should not be able to create a cc pair for a
# Curators should not be able to create a connector for a
# user group they are not a curator of
with pytest.raises(HTTPError):
ConnectorManager.create(
@@ -133,12 +123,12 @@ def test_connector_permissions(reset: None) -> None: # noqa: ARG001
user_performing_action=curator,
)
# Test that curator cannot create a public connector
with pytest.raises(HTTPError):
ConnectorManager.create(
name="invalid_connector_4",
source=DocumentSource.CONFLUENCE,
groups=[user_group_1.id],
access_type=AccessType.PUBLIC,
user_performing_action=curator,
)
# Curators should be able to create a public connector
public_connector = ConnectorManager.create(
name="curator_public_connector",
source=DocumentSource.CONFLUENCE,
groups=[user_group_1.id],
access_type=AccessType.PUBLIC,
user_performing_action=curator,
)
assert public_connector.id is not None

View File

@@ -58,16 +58,6 @@ def test_credential_permissions(reset: None) -> None: # noqa: ARG001
"""Tests for things Curators should not be able to do"""
# Curators should not be able to create a public credential
with pytest.raises(HTTPError):
CredentialManager.create(
name="invalid_credential_1",
source=DocumentSource.CONFLUENCE,
groups=[user_group_1.id],
curator_public=True,
user_performing_action=curator,
)
# Curators should not be able to create a credential for a user group they are not a curator of
with pytest.raises(HTTPError):
CredentialManager.create(
@@ -113,3 +103,16 @@ def test_credential_permissions(reset: None) -> None: # noqa: ARG001
verify_deleted=True,
user_performing_action=curator,
)
# Curators should be able to create a public credential
public_credential = CredentialManager.create(
name="curator_public_credential",
source=DocumentSource.CONFLUENCE,
groups=[user_group_1.id],
curator_public=True,
user_performing_action=curator,
)
CredentialManager.verify(
credential=public_credential,
user_performing_action=curator,
)

View File

@@ -70,10 +70,11 @@ def test_doc_set_permissions_setup(reset: None) -> None: # noqa: ARG001
"""Tests for things Curators/Admins should not be able to do"""
# Test that curator cannot create a document set for the group they don't curate
# Test that curator cannot create a non-public document set for the group they don't curate
with pytest.raises(HTTPError):
DocumentSetManager.create(
name="Invalid Document Set 1",
is_public=False,
groups=[user_group_2.id],
cc_pair_ids=[public_cc_pair.id],
user_performing_action=curator,

View File

@@ -6,12 +6,14 @@ from datetime import timedelta
from datetime import timezone
from io import BytesIO
from io import StringIO
from uuid import UUID
from zipfile import ZipFile
import pytest
import requests
from ee.onyx.db.usage_export import UsageReportMetadata
from onyx.configs.constants import DEFAULT_PERSONA_ID
from onyx.db.seeding.chat_history_seeding import seed_chat_history
from tests.integration.common_utils.constants import API_SERVER_URL
from tests.integration.common_utils.test_models import DATestUser
@@ -26,7 +28,13 @@ class TestUsageExportAPI:
self, reset: None, admin_user: DATestUser # noqa: ARG002
) -> None:
# Seed some chat history data for the report
seed_chat_history(num_sessions=10, num_messages=4, days=30)
seed_chat_history(
num_sessions=10,
num_messages=4,
days=30,
user_id=UUID(admin_user.id),
persona_id=DEFAULT_PERSONA_ID,
)
# Get initial list of reports
initial_response = requests.get(
@@ -76,7 +84,13 @@ class TestUsageExportAPI:
self, reset: None, admin_user: DATestUser # noqa: ARG002
) -> None:
# Seed some chat history data
seed_chat_history(num_sessions=20, num_messages=4, days=60)
seed_chat_history(
num_sessions=20,
num_messages=4,
days=60,
user_id=UUID(admin_user.id),
persona_id=DEFAULT_PERSONA_ID,
)
# Get initial list of reports
initial_response = requests.get(
@@ -148,7 +162,13 @@ class TestUsageExportAPI:
self, reset: None, admin_user: DATestUser # noqa: ARG002
) -> None:
# First generate a report to ensure we have at least one
seed_chat_history(num_sessions=5, num_messages=4, days=30)
seed_chat_history(
num_sessions=5,
num_messages=4,
days=30,
user_id=UUID(admin_user.id),
persona_id=DEFAULT_PERSONA_ID,
)
# Get initial count
initial_response = requests.get(
@@ -204,7 +224,13 @@ class TestUsageExportAPI:
self, reset: None, admin_user: DATestUser # noqa: ARG002
) -> None:
# First generate a report
seed_chat_history(num_sessions=5, num_messages=4, days=30)
seed_chat_history(
num_sessions=5,
num_messages=4,
days=30,
user_id=UUID(admin_user.id),
persona_id=DEFAULT_PERSONA_ID,
)
# Get initial reports count
initial_response = requests.get(
@@ -352,7 +378,13 @@ class TestUsageExportAPI:
self, reset: None, admin_user: DATestUser # noqa: ARG002
) -> None:
# Seed some data
seed_chat_history(num_sessions=10, num_messages=4, days=30)
seed_chat_history(
num_sessions=10,
num_messages=4,
days=30,
user_id=UUID(admin_user.id),
persona_id=DEFAULT_PERSONA_ID,
)
# Get initial count of reports
initial_response = requests.get(

View File

@@ -25,6 +25,11 @@ def test_add_users_to_group(reset: None) -> None: # noqa: ARG001
user_performing_action=admin_user,
)
UserGroupManager.wait_for_sync(
user_performing_action=admin_user,
user_groups_to_check=[user_group],
)
updated_user_group = UserGroupManager.add_users(
user_group=user_group,
user_ids=[user_to_add.id],

View File

@@ -0,0 +1,168 @@
from unittest.mock import MagicMock
from unittest.mock import patch
from uuid import uuid4
import pytest
from onyx.background.celery.tasks.user_file_processing.tasks import (
_user_file_project_sync_queued_key,
)
from onyx.background.celery.tasks.user_file_processing.tasks import (
check_for_user_file_project_sync,
)
from onyx.background.celery.tasks.user_file_processing.tasks import (
enqueue_user_file_project_sync_task,
)
from onyx.background.celery.tasks.user_file_processing.tasks import (
process_single_user_file_project_sync,
)
from onyx.configs.constants import CELERY_USER_FILE_PROJECT_SYNC_TASK_EXPIRES
from onyx.configs.constants import OnyxCeleryPriority
from onyx.configs.constants import OnyxCeleryQueues
from onyx.configs.constants import OnyxCeleryTask
from onyx.configs.constants import USER_FILE_PROJECT_SYNC_MAX_QUEUE_DEPTH
def _build_redis_mock_with_lock() -> tuple[MagicMock, MagicMock]:
redis_client = MagicMock()
lock = MagicMock()
lock.acquire.return_value = True
lock.owned.return_value = True
redis_client.lock.return_value = lock
return redis_client, lock
@patch(
"onyx.background.celery.tasks.user_file_processing.tasks."
"get_user_file_project_sync_queue_depth"
)
@patch("onyx.background.celery.tasks.user_file_processing.tasks.get_redis_client")
def test_check_for_user_file_project_sync_applies_queue_backpressure(
mock_get_redis_client: MagicMock,
mock_get_queue_depth: MagicMock,
) -> None:
redis_client, lock = _build_redis_mock_with_lock()
mock_get_redis_client.return_value = redis_client
mock_get_queue_depth.return_value = USER_FILE_PROJECT_SYNC_MAX_QUEUE_DEPTH + 1
task_app = MagicMock()
with patch.object(check_for_user_file_project_sync, "app", task_app):
check_for_user_file_project_sync.run(tenant_id="test-tenant")
task_app.send_task.assert_not_called()
lock.release.assert_called_once()
@patch(
"onyx.background.celery.tasks.user_file_processing.tasks."
"enqueue_user_file_project_sync_task"
)
@patch(
"onyx.background.celery.tasks.user_file_processing.tasks."
"get_user_file_project_sync_queue_depth"
)
@patch(
"onyx.background.celery.tasks.user_file_processing.tasks."
"get_session_with_current_tenant"
)
@patch("onyx.background.celery.tasks.user_file_processing.tasks.get_redis_client")
def test_check_for_user_file_project_sync_skips_duplicates(
mock_get_redis_client: MagicMock,
mock_get_session: MagicMock,
mock_get_queue_depth: MagicMock,
mock_enqueue: MagicMock,
) -> None:
redis_client, lock = _build_redis_mock_with_lock()
mock_get_redis_client.return_value = redis_client
mock_get_queue_depth.return_value = 0
user_file_id_one = uuid4()
user_file_id_two = uuid4()
session = MagicMock()
session.execute.return_value.scalars.return_value.all.return_value = [
user_file_id_one,
user_file_id_two,
]
mock_get_session.return_value.__enter__.return_value = session
mock_enqueue.side_effect = [True, False]
task_app = MagicMock()
with patch.object(check_for_user_file_project_sync, "app", task_app):
check_for_user_file_project_sync.run(tenant_id="test-tenant")
assert mock_enqueue.call_count == 2
lock.release.assert_called_once()
def test_enqueue_user_file_project_sync_task_sets_guard_and_expiry() -> None:
redis_client = MagicMock()
redis_client.set.return_value = True
celery_app = MagicMock()
user_file_id = str(uuid4())
enqueued = enqueue_user_file_project_sync_task(
celery_app=celery_app,
redis_client=redis_client,
user_file_id=user_file_id,
tenant_id="test-tenant",
priority=OnyxCeleryPriority.HIGHEST,
)
assert enqueued is True
redis_client.set.assert_called_once_with(
_user_file_project_sync_queued_key(user_file_id),
1,
nx=True,
ex=CELERY_USER_FILE_PROJECT_SYNC_TASK_EXPIRES,
)
celery_app.send_task.assert_called_once_with(
OnyxCeleryTask.PROCESS_SINGLE_USER_FILE_PROJECT_SYNC,
kwargs={"user_file_id": user_file_id, "tenant_id": "test-tenant"},
queue=OnyxCeleryQueues.USER_FILE_PROJECT_SYNC,
priority=OnyxCeleryPriority.HIGHEST,
expires=CELERY_USER_FILE_PROJECT_SYNC_TASK_EXPIRES,
)
def test_enqueue_user_file_project_sync_task_rolls_back_guard_on_publish_failure() -> (
None
):
redis_client = MagicMock()
redis_client.set.return_value = True
celery_app = MagicMock()
celery_app.send_task.side_effect = RuntimeError("publish failed")
user_file_id = str(uuid4())
with pytest.raises(RuntimeError):
enqueue_user_file_project_sync_task(
celery_app=celery_app,
redis_client=redis_client,
user_file_id=user_file_id,
tenant_id="test-tenant",
)
redis_client.delete.assert_called_once_with(
_user_file_project_sync_queued_key(user_file_id)
)
@patch("onyx.background.celery.tasks.user_file_processing.tasks.get_redis_client")
def test_process_single_user_file_project_sync_clears_queued_guard_on_pickup(
mock_get_redis_client: MagicMock,
) -> None:
redis_client = MagicMock()
lock = MagicMock()
lock.acquire.return_value = False
redis_client.lock.return_value = lock
mock_get_redis_client.return_value = redis_client
user_file_id = str(uuid4())
process_single_user_file_project_sync.run(
user_file_id=user_file_id,
tenant_id="test-tenant",
)
redis_client.delete.assert_called_once_with(
_user_file_project_sync_queued_key(user_file_id)
)

View File

@@ -0,0 +1,95 @@
from onyx.configs.constants import DocumentSource
from onyx.connectors.models import Document
from onyx.connectors.models import DocumentBase
from onyx.connectors.models import TextSection
def _minimal_doc_kwargs(metadata: dict) -> dict:
return {
"id": "test-doc",
"sections": [TextSection(text="hello", link="http://example.com")],
"source": DocumentSource.NOT_APPLICABLE,
"semantic_identifier": "Test Doc",
"metadata": metadata,
}
def test_int_values_coerced_to_str() -> None:
doc = Document(**_minimal_doc_kwargs({"count": 42}))
assert doc.metadata == {"count": "42"}
def test_float_values_coerced_to_str() -> None:
doc = Document(**_minimal_doc_kwargs({"score": 3.14}))
assert doc.metadata == {"score": "3.14"}
def test_bool_values_coerced_to_str() -> None:
doc = Document(**_minimal_doc_kwargs({"active": True}))
assert doc.metadata == {"active": "True"}
def test_list_of_ints_coerced_to_list_of_str() -> None:
doc = Document(**_minimal_doc_kwargs({"ids": [1, 2, 3]}))
assert doc.metadata == {"ids": ["1", "2", "3"]}
def test_list_of_mixed_types_coerced_to_list_of_str() -> None:
doc = Document(**_minimal_doc_kwargs({"tags": ["a", 1, True, 2.5]}))
assert doc.metadata == {"tags": ["a", "1", "True", "2.5"]}
def test_list_of_dicts_coerced_to_list_of_str() -> None:
raw = {"nested": [{"key": "val"}, {"key2": "val2"}]}
doc = Document(**_minimal_doc_kwargs(raw))
assert doc.metadata == {"nested": ["{'key': 'val'}", "{'key2': 'val2'}"]}
def test_dict_value_coerced_to_str() -> None:
raw = {"info": {"inner_key": "inner_val"}}
doc = Document(**_minimal_doc_kwargs(raw))
assert doc.metadata == {"info": "{'inner_key': 'inner_val'}"}
def test_none_value_coerced_to_str() -> None:
doc = Document(**_minimal_doc_kwargs({"empty": None}))
assert doc.metadata == {"empty": "None"}
def test_already_valid_str_values_unchanged() -> None:
doc = Document(**_minimal_doc_kwargs({"key": "value"}))
assert doc.metadata == {"key": "value"}
def test_already_valid_list_of_str_unchanged() -> None:
doc = Document(**_minimal_doc_kwargs({"tags": ["a", "b", "c"]}))
assert doc.metadata == {"tags": ["a", "b", "c"]}
def test_empty_metadata_unchanged() -> None:
doc = Document(**_minimal_doc_kwargs({}))
assert doc.metadata == {}
def test_mixed_metadata_values() -> None:
raw = {
"str_val": "hello",
"int_val": 99,
"list_val": [1, "two", 3.0],
"dict_val": {"nested": True},
}
doc = Document(**_minimal_doc_kwargs(raw))
assert doc.metadata == {
"str_val": "hello",
"int_val": "99",
"list_val": ["1", "two", "3.0"],
"dict_val": "{'nested': True}",
}
def test_coercion_works_on_base_class() -> None:
kwargs = _minimal_doc_kwargs({"count": 42})
kwargs.pop("source")
kwargs.pop("id")
doc = DocumentBase(**kwargs)
assert doc.metadata == {"count": "42"}

View File

@@ -0,0 +1,52 @@
import pytest
from office365.graph_client import AzureEnvironment # type: ignore[import-untyped]
from onyx.connectors.exceptions import ConnectorValidationError
from onyx.connectors.microsoft_graph_env import resolve_microsoft_environment
def test_resolve_global_defaults() -> None:
env = resolve_microsoft_environment(
"https://graph.microsoft.com", "https://login.microsoftonline.com"
)
assert env.environment == AzureEnvironment.Global
assert env.sharepoint_domain_suffix == "sharepoint.com"
def test_resolve_gcc_high() -> None:
env = resolve_microsoft_environment(
"https://graph.microsoft.us", "https://login.microsoftonline.us"
)
assert env.environment == AzureEnvironment.USGovernmentHigh
assert env.graph_host == "https://graph.microsoft.us"
assert env.authority_host == "https://login.microsoftonline.us"
assert env.sharepoint_domain_suffix == "sharepoint.us"
def test_resolve_dod() -> None:
env = resolve_microsoft_environment(
"https://dod-graph.microsoft.us", "https://login.microsoftonline.us"
)
assert env.environment == AzureEnvironment.USGovernmentDoD
assert env.sharepoint_domain_suffix == "sharepoint.us"
def test_trailing_slashes_are_stripped() -> None:
env = resolve_microsoft_environment(
"https://graph.microsoft.us/", "https://login.microsoftonline.us/"
)
assert env.environment == AzureEnvironment.USGovernmentHigh
def test_mismatched_authority_raises() -> None:
with pytest.raises(ConnectorValidationError, match="inconsistent"):
resolve_microsoft_environment(
"https://graph.microsoft.us", "https://login.microsoftonline.com"
)
def test_unknown_graph_host_raises() -> None:
with pytest.raises(ConnectorValidationError, match="Unsupported"):
resolve_microsoft_environment(
"https://graph.example.com", "https://login.example.com"
)

View File

@@ -1,10 +1,12 @@
import json
from unittest.mock import patch
import pytest
from onyx.image_gen.exceptions import ImageProviderCredentialsError
from onyx.image_gen.factory import get_image_generation_provider
from onyx.image_gen.interfaces import ImageGenerationProviderCredentials
from onyx.image_gen.interfaces import ReferenceImage
from onyx.image_gen.providers.azure_img_gen import AzureImageGenerationProvider
from onyx.image_gen.providers.openai_img_gen import OpenAIImageGenerationProvider
from onyx.image_gen.providers.vertex_img_gen import VertexImageGenerationProvider
@@ -45,6 +47,8 @@ def test_build_openai_provider_from_api_key_and_base() -> None:
assert isinstance(image_gen_provider, OpenAIImageGenerationProvider)
assert image_gen_provider._api_key == "test"
assert image_gen_provider._api_base == "test"
assert image_gen_provider.supports_reference_images is True
assert image_gen_provider.max_reference_images == 16
def test_build_openai_provider_fails_no_api_key() -> None:
@@ -73,6 +77,8 @@ def test_build_azure_provider_from_api_key_and_base_and_version() -> None:
assert image_gen_provider._api_key == "test"
assert image_gen_provider._api_base == "test"
assert image_gen_provider._api_version == "test"
assert image_gen_provider.supports_reference_images is True
assert image_gen_provider.max_reference_images == 16
def test_build_azure_provider_fails_missing_credential() -> None:
@@ -133,3 +139,195 @@ def test_build_vertex_provider_with_missing_project_id() -> None:
with pytest.raises(ImageProviderCredentialsError):
get_image_generation_provider("vertex_ai", credentials)
def test_openai_provider_uses_image_generation_without_reference_images() -> None:
provider = OpenAIImageGenerationProvider(
api_key="test-key",
api_base="test-base",
)
expected_response = object()
with (
patch("litellm.image_generation", return_value=expected_response) as mock_gen,
patch("litellm.image_edit") as mock_edit,
):
response = provider.generate_image(
prompt="draw a mountain",
model="gpt-image-1",
size="1024x1024",
n=1,
quality="high",
)
assert response is expected_response
mock_gen.assert_called_once()
mock_edit.assert_not_called()
def test_openai_provider_uses_image_edit_with_reference_images() -> None:
provider = OpenAIImageGenerationProvider(
api_key="test-key",
api_base="test-base",
)
reference_images = [
ReferenceImage(data=b"image-1-bytes", mime_type="image/png"),
ReferenceImage(data=b"image-2-bytes", mime_type="image/jpeg"),
]
expected_response = object()
with (
patch("litellm.image_generation") as mock_gen,
patch("litellm.image_edit", return_value=expected_response) as mock_edit,
):
response = provider.generate_image(
prompt="make this look watercolor",
model="gpt-image-1",
size="1024x1024",
n=1,
quality="high",
reference_images=reference_images,
)
assert response is expected_response
mock_gen.assert_not_called()
mock_edit.assert_called_once()
assert mock_edit.call_args.kwargs["image"] == [
b"image-1-bytes",
b"image-2-bytes",
]
def test_openai_provider_rejects_reference_images_for_unsupported_model() -> None:
provider = OpenAIImageGenerationProvider(api_key="test-key")
with pytest.raises(ValueError):
provider.generate_image(
prompt="edit this image",
model="dall-e-3",
size="1024x1024",
n=1,
reference_images=[ReferenceImage(data=b"image-1", mime_type="image/png")],
)
def test_openai_provider_rejects_multiple_reference_images_for_dalle3() -> None:
provider = OpenAIImageGenerationProvider(api_key="test-key")
with pytest.raises(
ValueError,
match="does not support image edits with reference images",
):
provider.generate_image(
prompt="edit this image",
model="dall-e-3",
size="1024x1024",
n=1,
reference_images=[
ReferenceImage(data=b"image-1", mime_type="image/png"),
ReferenceImage(data=b"image-2", mime_type="image/png"),
],
)
def test_azure_provider_uses_image_generation_without_reference_images() -> None:
provider = AzureImageGenerationProvider(
api_key="test-key",
api_base="https://azure.example.com",
api_version="2024-05-01-preview",
deployment_name="img-deployment",
)
expected_response = object()
with (
patch("litellm.image_generation", return_value=expected_response) as mock_gen,
patch("litellm.image_edit") as mock_edit,
):
response = provider.generate_image(
prompt="draw a skyline",
model="gpt-image-1",
size="1024x1024",
n=1,
quality="high",
)
assert response is expected_response
mock_gen.assert_called_once()
mock_edit.assert_not_called()
assert mock_gen.call_args.kwargs["model"] == "azure/img-deployment"
def test_azure_provider_uses_image_edit_with_reference_images() -> None:
provider = AzureImageGenerationProvider(
api_key="test-key",
api_base="https://azure.example.com",
api_version="2024-05-01-preview",
deployment_name="img-deployment",
)
reference_images = [
ReferenceImage(data=b"image-1-bytes", mime_type="image/png"),
ReferenceImage(data=b"image-2-bytes", mime_type="image/jpeg"),
]
expected_response = object()
with (
patch("litellm.image_generation") as mock_gen,
patch("litellm.image_edit", return_value=expected_response) as mock_edit,
):
response = provider.generate_image(
prompt="make this noir style",
model="gpt-image-1",
size="1024x1024",
n=1,
quality="high",
reference_images=reference_images,
)
assert response is expected_response
mock_gen.assert_not_called()
mock_edit.assert_called_once()
assert mock_edit.call_args.kwargs["model"] == "azure/img-deployment"
assert mock_edit.call_args.kwargs["image"] == [
b"image-1-bytes",
b"image-2-bytes",
]
def test_azure_provider_rejects_reference_images_for_unsupported_model() -> None:
provider = AzureImageGenerationProvider(
api_key="test-key",
api_base="https://azure.example.com",
api_version="2024-05-01-preview",
)
with pytest.raises(ValueError):
provider.generate_image(
prompt="edit this image",
model="dall-e-3",
size="1024x1024",
n=1,
reference_images=[ReferenceImage(data=b"image-1", mime_type="image/png")],
)
def test_azure_provider_rejects_multiple_reference_images_for_dalle3() -> None:
provider = AzureImageGenerationProvider(
api_key="test-key",
api_base="https://azure.example.com",
api_version="2024-05-01-preview",
)
with pytest.raises(
ValueError,
match="does not support image edits with reference images",
):
provider.generate_image(
prompt="edit this image",
model="dall-e-3",
size="1024x1024",
n=1,
reference_images=[
ReferenceImage(data=b"image-1", mime_type="image/png"),
ReferenceImage(data=b"image-2", mime_type="image/png"),
],
)

View File

@@ -0,0 +1,159 @@
from pytest import MonkeyPatch
from onyx.access.models import ExternalAccess
from onyx.connectors.models import BasicExpertInfo
from onyx.connectors.models import Document
from onyx.connectors.models import DocumentSource
from onyx.connectors.models import HierarchyNode
from onyx.connectors.models import IndexAttemptMetadata
from onyx.connectors.models import TextSection
from onyx.db.enums import HierarchyNodeType
from onyx.indexing import indexing_pipeline
from onyx.indexing.postgres_sanitization import sanitize_document_for_postgres
from onyx.indexing.postgres_sanitization import sanitize_hierarchy_node_for_postgres
def test_sanitize_document_for_postgres_removes_nul_bytes() -> None:
document = Document(
id="doc\x00-id",
source=DocumentSource.FILE,
semantic_identifier="sem\x00-id",
title="ti\x00tle",
parent_hierarchy_raw_node_id="parent\x00-id",
sections=[TextSection(link="lin\x00k", text="te\x00xt")],
metadata={"ke\x00y": "va\x00lue", "list\x00key": ["a\x00", "b"]},
doc_metadata={
"j\x00son": {
"in\x00ner": "va\x00l",
"arr": ["x\x00", {"dee\x00p": "y\x00"}],
}
},
primary_owners=[BasicExpertInfo(display_name="Ali\x00ce", email="a\x00@x.com")],
secondary_owners=[BasicExpertInfo(first_name="Bo\x00b", last_name="Sm\x00ith")],
external_access=ExternalAccess(
external_user_emails={"user\x00@example.com"},
external_user_group_ids={"gro\x00up-1"},
is_public=False,
),
)
sanitized = sanitize_document_for_postgres(document)
assert sanitized.id == "doc-id"
assert sanitized.semantic_identifier == "sem-id"
assert sanitized.title == "title"
assert sanitized.parent_hierarchy_raw_node_id == "parent-id"
assert sanitized.sections[0].link == "link"
assert sanitized.sections[0].text == "text"
assert sanitized.metadata == {"key": "value", "listkey": ["a", "b"]}
assert sanitized.doc_metadata == {
"json": {"inner": "val", "arr": ["x", {"deep": "y"}]}
}
assert sanitized.primary_owners is not None
assert sanitized.primary_owners[0].display_name == "Alice"
assert sanitized.primary_owners[0].email == "a@x.com"
assert sanitized.secondary_owners is not None
assert sanitized.secondary_owners[0].first_name == "Bob"
assert sanitized.secondary_owners[0].last_name == "Smith"
assert sanitized.external_access is not None
assert sanitized.external_access.external_user_emails == {"user@example.com"}
assert sanitized.external_access.external_user_group_ids == {"group-1"}
# Ensure original document is not mutated
assert document.id == "doc\x00-id"
assert document.metadata == {"ke\x00y": "va\x00lue", "list\x00key": ["a\x00", "b"]}
def test_sanitize_hierarchy_node_for_postgres_removes_nul_bytes() -> None:
node = HierarchyNode(
raw_node_id="raw\x00-id",
raw_parent_id="paren\x00t-id",
display_name="fol\x00der",
link="https://exa\x00mple.com",
node_type=HierarchyNodeType.FOLDER,
external_access=ExternalAccess(
external_user_emails={"a\x00@example.com"},
external_user_group_ids={"g\x00-1"},
is_public=True,
),
)
sanitized = sanitize_hierarchy_node_for_postgres(node)
assert sanitized.raw_node_id == "raw-id"
assert sanitized.raw_parent_id == "parent-id"
assert sanitized.display_name == "folder"
assert sanitized.link == "https://example.com"
assert sanitized.external_access is not None
assert sanitized.external_access.external_user_emails == {"a@example.com"}
assert sanitized.external_access.external_user_group_ids == {"g-1"}
def test_index_doc_batch_prepare_sanitizes_before_db_ops(
monkeypatch: MonkeyPatch,
) -> None:
document = Document(
id="doc\x00id",
source=DocumentSource.FILE,
semantic_identifier="sem\x00id",
sections=[TextSection(text="content", link="li\x00nk")],
metadata={"ke\x00y": "va\x00lue"},
)
captured: dict[str, object] = {}
def _get_documents_by_ids(db_session: object, document_ids: list[str]) -> list:
_ = db_session, document_ids
return []
monkeypatch.setattr(
indexing_pipeline, "get_documents_by_ids", _get_documents_by_ids
)
def _capture_upsert_documents_in_db(**kwargs: object) -> None:
captured["upsert_documents"] = kwargs["documents"]
monkeypatch.setattr(
indexing_pipeline, "_upsert_documents_in_db", _capture_upsert_documents_in_db
)
def _capture_doc_cc_pair(*args: object) -> None:
captured["cc_pair_doc_ids"] = args[3]
monkeypatch.setattr(
indexing_pipeline,
"upsert_document_by_connector_credential_pair",
_capture_doc_cc_pair,
)
def _noop_link_hierarchy_nodes_to_documents(
db_session: object,
document_ids: list[str],
source: DocumentSource,
commit: bool,
) -> int:
_ = db_session, document_ids, source, commit
return 0
monkeypatch.setattr(
indexing_pipeline,
"link_hierarchy_nodes_to_documents",
_noop_link_hierarchy_nodes_to_documents,
)
context = indexing_pipeline.index_doc_batch_prepare(
documents=[document],
index_attempt_metadata=IndexAttemptMetadata(connector_id=1, credential_id=2),
db_session=object(), # type: ignore[arg-type]
ignore_time_skip=True,
)
assert context is not None
assert context.updatable_docs[0].id == "docid"
assert context.updatable_docs[0].semantic_identifier == "semid"
assert context.updatable_docs[0].metadata == {"key": "value"}
assert captured["cc_pair_doc_ids"] == ["docid"]
upsert_documents = captured["upsert_documents"]
assert isinstance(upsert_documents, list)
assert upsert_documents[0].id == "docid"

View File

@@ -0,0 +1,52 @@
from onyx.onyxbot.slack.formatting import _normalize_citation_link_destinations
from onyx.onyxbot.slack.formatting import format_slack_message
from onyx.onyxbot.slack.utils import remove_slack_text_interactions
from onyx.utils.text_processing import decode_escapes
def test_normalize_citation_link_wraps_url_with_parentheses() -> None:
message = (
"See [[1]](https://example.com/Access%20ID%20Card(s)%20Guide.pdf) for details."
)
normalized = _normalize_citation_link_destinations(message)
assert (
"See [[1]](<https://example.com/Access%20ID%20Card(s)%20Guide.pdf>) for details."
== normalized
)
def test_normalize_citation_link_keeps_existing_angle_brackets() -> None:
message = "[[1]](<https://example.com/Access%20ID%20Card(s)%20Guide.pdf>)"
normalized = _normalize_citation_link_destinations(message)
assert message == normalized
def test_normalize_citation_link_handles_multiple_links() -> None:
message = (
"[[1]](https://example.com/(USA)%20Guide.pdf) "
"[[2]](https://example.com/Plan(s)%20Overview.pdf)"
)
normalized = _normalize_citation_link_destinations(message)
assert "[[1]](<https://example.com/(USA)%20Guide.pdf>)" in normalized
assert "[[2]](<https://example.com/Plan(s)%20Overview.pdf>)" in normalized
def test_format_slack_message_keeps_parenthesized_citation_links_intact() -> None:
message = (
"Download [[1]](https://example.com/(USA)%20Access%20ID%20Card(s)%20Guide.pdf)"
)
formatted = format_slack_message(message)
rendered = decode_escapes(remove_slack_text_interactions(formatted))
assert (
"<https://example.com/(USA)%20Access%20ID%20Card(s)%20Guide.pdf|[1]>"
in rendered
)
assert "|[1]>%20Access%20ID%20Card" not in rendered

View File

@@ -1,10 +1,12 @@
"""Test bulk invite limit for free trial tenants."""
from unittest.mock import MagicMock
from unittest.mock import patch
import pytest
from fastapi import HTTPException
from onyx.server.manage.models import EmailInviteStatus
from onyx.server.manage.users import bulk_invite_users
@@ -33,6 +35,7 @@ def test_trial_tenant_cannot_exceed_invite_limit(*_mocks: None) -> None:
@patch("onyx.server.manage.users.get_invited_users", return_value=[])
@patch("onyx.server.manage.users.get_all_users", return_value=[])
@patch("onyx.server.manage.users.write_invited_users", return_value=3)
@patch("onyx.server.manage.users.enforce_seat_limit")
@patch("onyx.server.manage.users.NUM_FREE_TRIAL_USER_INVITES", 5)
@patch(
"onyx.server.manage.users.fetch_ee_implementation_or_noop",
@@ -44,4 +47,69 @@ def test_trial_tenant_can_invite_within_limit(*_mocks: None) -> None:
result = bulk_invite_users(emails=emails)
assert result == 3
assert result.invited_count == 3
assert result.email_invite_status == EmailInviteStatus.DISABLED
# --- email_invite_status tests ---
_COMMON_PATCHES = [
patch("onyx.server.manage.users.MULTI_TENANT", False),
patch("onyx.server.manage.users.get_current_tenant_id", return_value="test_tenant"),
patch("onyx.server.manage.users.get_invited_users", return_value=[]),
patch("onyx.server.manage.users.get_all_users", return_value=[]),
patch("onyx.server.manage.users.write_invited_users", return_value=1),
patch("onyx.server.manage.users.enforce_seat_limit"),
]
def _with_common_patches(fn: object) -> object:
for p in reversed(_COMMON_PATCHES):
fn = p(fn) # type: ignore
return fn
@_with_common_patches
@patch("onyx.server.manage.users.ENABLE_EMAIL_INVITES", False)
def test_email_invite_status_disabled(*_mocks: None) -> None:
"""When email invites are disabled, status is disabled."""
result = bulk_invite_users(emails=["user@example.com"])
assert result.email_invite_status == EmailInviteStatus.DISABLED
@_with_common_patches
@patch("onyx.server.manage.users.ENABLE_EMAIL_INVITES", True)
@patch("onyx.server.manage.users.EMAIL_CONFIGURED", False)
def test_email_invite_status_not_configured(*_mocks: None) -> None:
"""When email invites are enabled but no server is configured, status is not_configured."""
result = bulk_invite_users(emails=["user@example.com"])
assert result.email_invite_status == EmailInviteStatus.NOT_CONFIGURED
@_with_common_patches
@patch("onyx.server.manage.users.ENABLE_EMAIL_INVITES", True)
@patch("onyx.server.manage.users.EMAIL_CONFIGURED", True)
@patch("onyx.server.manage.users.send_user_email_invite")
def test_email_invite_status_sent(mock_send: MagicMock, *_mocks: None) -> None:
"""When email invites are enabled and configured, status is sent."""
result = bulk_invite_users(emails=["user@example.com"])
mock_send.assert_called_once()
assert result.email_invite_status == EmailInviteStatus.SENT
@_with_common_patches
@patch("onyx.server.manage.users.ENABLE_EMAIL_INVITES", True)
@patch("onyx.server.manage.users.EMAIL_CONFIGURED", True)
@patch(
"onyx.server.manage.users.send_user_email_invite",
side_effect=Exception("SMTP auth failed"),
)
def test_email_invite_status_send_failed(*_mocks: None) -> None:
"""When email sending throws, status is send_failed and invite is still saved."""
result = bulk_invite_users(emails=["user@example.com"])
assert result.email_invite_status == EmailInviteStatus.SEND_FAILED
assert result.invited_count == 1

View File

@@ -0,0 +1,173 @@
"""Unit tests for CodeInterpreterClient streaming-to-batch fallback.
When the streaming endpoint (/v1/execute/stream) returns 404 — e.g. because the
code-interpreter service is an older version that doesn't support streaming — the
client should transparently fall back to the batch endpoint (/v1/execute) and
convert the batch response into the same stream-event interface.
"""
from __future__ import annotations
from unittest.mock import MagicMock
from unittest.mock import patch
from onyx.tools.tool_implementations.python.code_interpreter_client import (
CodeInterpreterClient,
)
from onyx.tools.tool_implementations.python.code_interpreter_client import FileInput
from onyx.tools.tool_implementations.python.code_interpreter_client import (
StreamOutputEvent,
)
from onyx.tools.tool_implementations.python.code_interpreter_client import (
StreamResultEvent,
)
def _make_batch_response(
stdout: str = "",
stderr: str = "",
exit_code: int = 0,
timed_out: bool = False,
duration_ms: int = 50,
) -> MagicMock:
"""Build a mock ``requests.Response`` for the batch /v1/execute endpoint."""
resp = MagicMock()
resp.status_code = 200
resp.raise_for_status = MagicMock()
resp.json.return_value = {
"stdout": stdout,
"stderr": stderr,
"exit_code": exit_code,
"timed_out": timed_out,
"duration_ms": duration_ms,
"files": [],
}
return resp
def _make_404_response() -> MagicMock:
"""Build a mock ``requests.Response`` that returns 404 (streaming not found)."""
resp = MagicMock()
resp.status_code = 404
return resp
def test_execute_streaming_fallback_to_batch_on_404() -> None:
"""When /v1/execute/stream returns 404, the client should fall back to
/v1/execute and yield equivalent StreamEvent objects."""
client = CodeInterpreterClient(base_url="http://fake:9000")
stream_resp = _make_404_response()
batch_resp = _make_batch_response(
stdout="hello world\n",
stderr="a warning\n",
)
urls_called: list[str] = []
def mock_post(url: str, **_kwargs: object) -> MagicMock:
urls_called.append(url)
if url.endswith("/v1/execute/stream"):
return stream_resp
if url.endswith("/v1/execute"):
return batch_resp
raise AssertionError(f"Unexpected URL: {url}")
with patch.object(client.session, "post", side_effect=mock_post):
events = list(client.execute_streaming(code="print('hello world')"))
# Streaming endpoint was attempted first, then batch
assert len(urls_called) == 2
assert urls_called[0].endswith("/v1/execute/stream")
assert urls_called[1].endswith("/v1/execute")
# The 404 response must be closed before making the batch call
stream_resp.close.assert_called_once()
# _batch_as_stream yields: stdout event, stderr event, result event
assert len(events) == 3
assert isinstance(events[0], StreamOutputEvent)
assert events[0].stream == "stdout"
assert events[0].data == "hello world\n"
assert isinstance(events[1], StreamOutputEvent)
assert events[1].stream == "stderr"
assert events[1].data == "a warning\n"
assert isinstance(events[2], StreamResultEvent)
assert events[2].exit_code == 0
assert not events[2].timed_out
assert events[2].duration_ms == 50
assert events[2].files == []
def test_execute_streaming_fallback_stdout_only() -> None:
"""Fallback with only stdout (no stderr) should yield two events:
one StreamOutputEvent for stdout and one StreamResultEvent."""
client = CodeInterpreterClient(base_url="http://fake:9000")
stream_resp = _make_404_response()
batch_resp = _make_batch_response(stdout="result: 42\n")
def mock_post(url: str, **_kwargs: object) -> MagicMock:
if url.endswith("/v1/execute/stream"):
return stream_resp
if url.endswith("/v1/execute"):
return batch_resp
raise AssertionError(f"Unexpected URL: {url}")
with patch.object(client.session, "post", side_effect=mock_post):
events = list(client.execute_streaming(code="print(42)"))
# No stderr → only stdout + result
assert len(events) == 2
assert isinstance(events[0], StreamOutputEvent)
assert events[0].stream == "stdout"
assert events[0].data == "result: 42\n"
assert isinstance(events[1], StreamResultEvent)
assert events[1].exit_code == 0
def test_execute_streaming_fallback_preserves_files_param() -> None:
"""When falling back, the files parameter must be forwarded to the
batch endpoint so staged files are still available for execution."""
client = CodeInterpreterClient(base_url="http://fake:9000")
stream_resp = _make_404_response()
batch_resp = _make_batch_response(stdout="ok\n")
captured_payloads: list[dict] = []
def mock_post(url: str, **kwargs: object) -> MagicMock:
if "json" in kwargs:
captured_payloads.append(kwargs["json"]) # type: ignore[arg-type]
if url.endswith("/v1/execute/stream"):
return stream_resp
if url.endswith("/v1/execute"):
return batch_resp
raise AssertionError(f"Unexpected URL: {url}")
files_input: list[FileInput] = [{"path": "data.csv", "file_id": "file-abc123"}]
with patch.object(client.session, "post", side_effect=mock_post):
events = list(
client.execute_streaming(
code="import pandas",
files=files_input,
)
)
# Both the streaming attempt and the batch fallback should include files
assert len(captured_payloads) == 2
for payload in captured_payloads:
assert payload["files"] == files_input
assert payload["code"] == "import pandas"
# Should still yield valid events
assert any(isinstance(e, StreamResultEvent) for e in events)

View File

@@ -487,16 +487,7 @@ services:
code-interpreter:
image: onyxdotapp/code-interpreter:${CODE_INTERPRETER_IMAGE_TAG:-latest}
entrypoint: ["/bin/bash", "-c"]
command: >
"
if [ \"$${CODE_INTERPRETER_BETA_ENABLED}\" = \"True\" ] || [ \"$${CODE_INTERPRETER_BETA_ENABLED}\" = \"true\" ]; then
exec bash ./entrypoint.sh code-interpreter-api;
else
echo 'Skipping code interpreter';
exec tail -f /dev/null;
fi
"
command: ["bash", "./entrypoint.sh", "code-interpreter-api"]
restart: unless-stopped
env_file:
- path: .env

View File

@@ -69,6 +69,4 @@ services:
inference_model_server:
profiles: ["inference"]
# Code interpreter is not needed in minimal mode.
code-interpreter:
profiles: ["code-interpreter"]
code-interpreter: {}

View File

@@ -315,16 +315,7 @@ services:
code-interpreter:
image: onyxdotapp/code-interpreter:${CODE_INTERPRETER_IMAGE_TAG:-latest}
entrypoint: ["/bin/bash", "-c"]
command: >
"
if [ \"$${CODE_INTERPRETER_BETA_ENABLED}\" = \"True\" ] || [ \"$${CODE_INTERPRETER_BETA_ENABLED}\" = \"true\" ]; then
exec bash ./entrypoint.sh code-interpreter-api;
else
echo 'Skipping code interpreter';
exec tail -f /dev/null;
fi
"
command: ["bash", "./entrypoint.sh", "code-interpreter-api"]
restart: unless-stopped
env_file:
- path: .env

View File

@@ -352,16 +352,7 @@ services:
code-interpreter:
image: onyxdotapp/code-interpreter:${CODE_INTERPRETER_IMAGE_TAG:-latest}
entrypoint: ["/bin/bash", "-c"]
command: >
"
if [ \"$${CODE_INTERPRETER_BETA_ENABLED}\" = \"True\" ] || [ \"$${CODE_INTERPRETER_BETA_ENABLED}\" = \"true\" ]; then
exec bash ./entrypoint.sh code-interpreter-api;
else
echo 'Skipping code interpreter';
exec tail -f /dev/null;
fi
"
command: ["bash", "./entrypoint.sh", "code-interpreter-api"]
restart: unless-stopped
env_file:
- path: .env

View File

@@ -527,16 +527,7 @@ services:
code-interpreter:
image: onyxdotapp/code-interpreter:${CODE_INTERPRETER_IMAGE_TAG:-latest}
entrypoint: ["/bin/bash", "-c"]
command: >
"
if [ \"$${CODE_INTERPRETER_BETA_ENABLED}\" = \"True\" ] || [ \"$${CODE_INTERPRETER_BETA_ENABLED}\" = \"true\" ]; then
exec bash ./entrypoint.sh code-interpreter-api;
else
echo 'Skipping code interpreter';
exec tail -f /dev/null;
fi
"
command: ["bash", "./entrypoint.sh", "code-interpreter-api"]
restart: unless-stopped
env_file:
- path: .env

View File

@@ -19,6 +19,6 @@ dependencies:
version: 5.4.0
- name: code-interpreter
repository: https://onyx-dot-app.github.io/python-sandbox/
version: 0.2.1
digest: sha256:aedc211d9732c934be8b79735b62f8caa9bcd235e03fd0dd10b49e0a13ed15b7
generated: "2026-02-20T11:19:47.957449-08:00"
version: 0.3.0
digest: sha256:cf8f01906d46034962c6ce894770621ee183ac761e6942951118aeb48540eddd
generated: "2026-02-24T10:59:38.78318-08:00"

View File

@@ -45,6 +45,6 @@ dependencies:
repository: https://charts.min.io/
condition: minio.enabled
- name: code-interpreter
version: 0.2.1
version: 0.3.0
repository: https://onyx-dot-app.github.io/python-sandbox/
condition: codeInterpreter.enabled

View File

@@ -957,7 +957,7 @@ minio:
# Code Interpreter - Python code execution service (beta feature)
codeInterpreter:
enabled: false # Disabled by default (beta feature)
enabled: true
replicaCount: 1

View File

@@ -1,7 +1,7 @@
{
"manifest_version": 3,
"name": "Onyx",
"version": "1.0",
"version": "1.1",
"description": "Onyx lets you research, create, and automate with LLMs powered by your team's unique knowledge",
"permissions": [
"sidePanel",

View File

@@ -43,8 +43,12 @@ async function openSidePanel(tabId) {
}
}
function encodeUserPrompt(text) {
return encodeURIComponent(text).replace(/\(/g, "%28").replace(/\)/g, "%29");
}
async function sendToOnyx(info, tab) {
const selectedText = encodeURIComponent(info.selectionText);
const selectedText = encodeUserPrompt(info.selectionText);
const currentUrl = encodeURIComponent(tab.url);
try {
@@ -153,6 +157,23 @@ chrome.commands.onCommand.addListener(async (command) => {
}
});
async function sendActiveTabUrlToPanel() {
try {
const [tab] = await chrome.tabs.query({
active: true,
lastFocusedWindow: true,
});
if (tab?.url) {
chrome.runtime.sendMessage({
action: ACTIONS.TAB_URL_UPDATED,
url: tab.url,
});
}
} catch (error) {
console.error("[Onyx SW] Error sending tab URL:", error);
}
}
chrome.runtime.onMessage.addListener((request, sender, sendResponse) => {
if (request.action === ACTIONS.GET_CURRENT_ONYX_DOMAIN) {
chrome.storage.local.get(
@@ -188,7 +209,7 @@ chrome.runtime.onMessage.addListener((request, sender, sendResponse) => {
chrome.storage.local.get(
{ [CHROME_SPECIFIC_STORAGE_KEYS.ONYX_DOMAIN]: DEFAULT_ONYX_DOMAIN },
(result) => {
const encodedText = encodeURIComponent(selectedText);
const encodedText = encodeUserPrompt(selectedText);
const onyxDomain = result[CHROME_SPECIFIC_STORAGE_KEYS.ONYX_DOMAIN];
const url = `${onyxDomain}${SIDE_PANEL_PATH}?user-prompt=${encodedText}`;
@@ -222,6 +243,15 @@ chrome.runtime.onMessage.addListener((request, sender, sendResponse) => {
}
return true;
}
if (request.action === ACTIONS.TAB_READING_ENABLED) {
chrome.storage.session.set({ tabReadingEnabled: true });
sendActiveTabUrlToPanel();
return false;
}
if (request.action === ACTIONS.TAB_READING_DISABLED) {
chrome.storage.session.set({ tabReadingEnabled: false });
return false;
}
});
chrome.storage.onChanged.addListener((changes, namespace) => {
@@ -273,4 +303,40 @@ chrome.omnibox.onInputChanged.addListener((text, suggest) => {
}
});
chrome.tabs.onActivated.addListener(async (activeInfo) => {
const result = await chrome.storage.session.get({ tabReadingEnabled: false });
if (!result.tabReadingEnabled) return;
try {
const tab = await chrome.tabs.get(activeInfo.tabId);
if (tab.url) {
chrome.runtime.sendMessage({
action: ACTIONS.TAB_URL_UPDATED,
url: tab.url,
});
}
} catch (error) {
console.error("[Onyx SW] Error on tab activated:", error);
}
});
chrome.tabs.onUpdated.addListener(async (tabId, changeInfo, tab) => {
if (!changeInfo.url) return;
const result = await chrome.storage.session.get({ tabReadingEnabled: false });
if (!result.tabReadingEnabled) return;
try {
const [activeTab] = await chrome.tabs.query({
active: true,
lastFocusedWindow: true,
});
if (activeTab?.id === tabId) {
chrome.runtime.sendMessage({
action: ACTIONS.TAB_URL_UPDATED,
url: changeInfo.url,
});
}
} catch (error) {
console.error("[Onyx SW] Error on tab updated:", error);
}
});
setupSidePanel();

View File

@@ -132,9 +132,7 @@ import { getOnyxDomain } from "../utils/storage.js";
return;
}
setIframeSrc(
items[CHROME_SPECIFIC_STORAGE_KEYS.ONYX_DOMAIN] + "/chat/nrf",
);
setIframeSrc(items[CHROME_SPECIFIC_STORAGE_KEYS.ONYX_DOMAIN] + "/nrf");
},
);
}

View File

@@ -15,6 +15,15 @@ import {
let iframeLoadTimeout;
let authRequired = false;
// Returns the origin of the Onyx app loaded in the iframe.
// We derive the origin from iframe.src so postMessage payloads
// (including tab URLs) are only delivered to the expected page.
// Throws if iframe.src is not a valid URL — this is intentional:
// postMessage must never fall back to the unsafe wildcard "*".
function getIframeOrigin() {
return new URL(iframe.src).origin;
}
async function checkPendingInput() {
try {
const result = await chrome.storage.session.get("pendingInput");
@@ -57,7 +66,7 @@ import {
type: WEB_MESSAGE.PAGE_CHANGE,
url: pageUrl,
},
"*",
getIframeOrigin(),
);
currentUrl = pageUrl;
}
@@ -76,15 +85,34 @@ import {
}
function handleMessage(event) {
// Only trust messages from the Onyx app iframe.
// Check both source identity and origin so that a cross-origin page
// navigated to inside the iframe cannot send privileged extension
// messages (e.g. TAB_READING_ENABLED) after iframe.src changes.
// getIframeOrigin() throws if iframe.src is not yet a valid URL —
// catching it here fails closed (message is rejected, not processed).
if (event.source !== iframe.contentWindow) return;
try {
if (event.origin !== getIframeOrigin()) return;
} catch {
return;
}
if (event.data.type === CHROME_MESSAGE.ONYX_APP_LOADED) {
clearTimeout(iframeLoadTimeout);
iframeLoaded = true;
showIframe();
if (iframe.contentWindow) {
iframe.contentWindow.postMessage({ type: "PANEL_READY" }, "*");
iframe.contentWindow.postMessage(
{ type: "PANEL_READY" },
getIframeOrigin(),
);
}
} else if (event.data.type === CHROME_MESSAGE.AUTH_REQUIRED) {
authRequired = true;
} else if (event.data.type === CHROME_MESSAGE.TAB_READING_ENABLED) {
chrome.runtime.sendMessage({ action: ACTIONS.TAB_READING_ENABLED });
} else if (event.data.type === CHROME_MESSAGE.TAB_READING_DISABLED) {
chrome.runtime.sendMessage({ action: ACTIONS.TAB_READING_DISABLED });
}
}
@@ -117,6 +145,13 @@ import {
setIframeSrc(request.url, request.pageUrl);
} else if (request.action === ACTIONS.UPDATE_PAGE_URL) {
sendWebsiteToIframe(request.pageUrl);
} else if (request.action === ACTIONS.TAB_URL_UPDATED) {
if (iframe.contentWindow) {
iframe.contentWindow.postMessage(
{ type: CHROME_MESSAGE.TAB_URL_UPDATED, url: request.url },
getIframeOrigin(),
);
}
}
});

View File

@@ -5,7 +5,7 @@ export const THEMES = {
export const DEFAULT_ONYX_DOMAIN = "http://localhost:3000";
export const SIDE_PANEL_PATH = "/chat/nrf/side-panel";
export const SIDE_PANEL_PATH = "/nrf/side-panel";
export const ACTIONS = {
GET_SELECTED_TEXT: "getSelectedText",
@@ -17,6 +17,9 @@ export const ACTIONS = {
OPEN_SIDE_PANEL_WITH_INPUT: "openSidePanelWithInput",
OPEN_ONYX_WITH_INPUT: "openOnyxWithInput",
CLOSE_SIDE_PANEL: "closeSidePanel",
TAB_URL_UPDATED: "tabUrlUpdated",
TAB_READING_ENABLED: "tabReadingEnabled",
TAB_READING_DISABLED: "tabReadingDisabled",
};
export const CHROME_SPECIFIC_STORAGE_KEYS = {
@@ -36,6 +39,9 @@ export const CHROME_MESSAGE = {
LOAD_NEW_CHAT_PAGE: "LOAD_NEW_CHAT_PAGE",
LOAD_NEW_PAGE: "LOAD_NEW_PAGE",
AUTH_REQUIRED: "AUTH_REQUIRED",
TAB_READING_ENABLED: "TAB_READING_ENABLED",
TAB_READING_DISABLED: "TAB_READING_DISABLED",
TAB_URL_UPDATED: "TAB_URL_UPDATED",
};
export const WEB_MESSAGE = {

View File

@@ -49,7 +49,7 @@ backend = [
"fastapi-users==15.0.4",
"fastapi-users-db-sqlalchemy==7.0.0",
"fastapi-limiter==0.1.6",
"fastmcp==2.14.2",
"fastmcp==3.0.2",
"filelock==3.20.3",
"google-api-python-client==2.86.0",
"google-auth-httplib2==0.1.0",
@@ -71,10 +71,10 @@ backend = [
"lxml==5.3.0",
"Mako==1.2.4",
"markitdown[pdf, docx, pptx, xlsx, xls]==0.1.2",
"mcp[cli]==1.25.0",
"mcp[cli]==1.26.0",
"msal==1.34.0",
"msoffcrypto-tool==5.4.2",
"Office365-REST-Python-Client==2.5.9",
"Office365-REST-Python-Client==2.6.2",
"oauthlib==3.2.2",
# NOTE: This is frozen to avoid https://foss.heptapod.net/openpyxl/openpyxl/-/issues/2147
"openpyxl==3.0.10",
@@ -144,7 +144,7 @@ dev = [
"matplotlib==3.10.8",
"mypy-extensions==1.0.0",
"mypy==1.13.0",
"onyx-devtools==0.6.0",
"onyx-devtools==0.6.1",
"openapi-generator-cli==7.17.0",
"pandas-stubs~=2.3.3",
"pre-commit==3.2.2",

View File

@@ -222,6 +222,7 @@ ods run-ci 7353
### `cherry-pick` - Backport Commits to Release Branches
Cherry-pick one or more commits to release branches and automatically create PRs.
Cherry-pick PRs created by this command are labeled `cherry-pick 🍒`.
```shell
ods cherry-pick <commit-sha> [<commit-sha>...] [--release <version>]

View File

@@ -2,20 +2,26 @@ package cmd
import (
"fmt"
"io"
"os"
"os/exec"
"regexp"
"strings"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"github.com/onyx-dot-app/onyx/tools/ods/internal/git"
"github.com/onyx-dot-app/onyx/tools/ods/internal/prompt"
)
const cherryPickPRLabel = "cherry-pick 🍒"
// CherryPickOptions holds options for the cherry-pick command
type CherryPickOptions struct {
Releases []string
Assignees []string
DryRun bool
Yes bool
NoVerify bool
@@ -73,6 +79,7 @@ Example usage:
cmd.Flags().BoolVar(&opts.Continue, "continue", false, "Resume a cherry-pick after manual conflict resolution")
cmd.Flags().StringSliceVar(&opts.Releases, "release", []string{}, "Release version(s) to cherry-pick to (e.g., 1.0, v1.1). 'v' prefix is optional. Can be specified multiple times.")
cmd.Flags().StringSliceVar(&opts.Assignees, "assignee", nil, "GitHub assignee(s) for the created PR. Can be specified multiple times or as comma-separated values.")
cmd.Flags().BoolVar(&opts.DryRun, "dry-run", false, "Perform all local operations but skip pushing to remote and creating PRs")
cmd.Flags().BoolVar(&opts.Yes, "yes", false, "Skip confirmation prompts and automatically proceed")
cmd.Flags().BoolVar(&opts.NoVerify, "no-verify", false, "Skip pre-commit and commit-msg hooks for cherry-pick and push")
@@ -192,11 +199,18 @@ func runCherryPick(cmd *cobra.Command, args []string, opts *CherryPickOptions) {
}
// Save state so --continue can resume if a conflict occurs
assignees, err := resolveAssignees(cmd, opts.Assignees)
if err != nil {
git.RestoreStash(stashResult)
log.Fatalf("Failed to parse assignees: %v", err)
}
state := &git.CherryPickState{
OriginalBranch: originalBranch,
CommitSHAs: commitSHAs,
CommitMessages: commitMessages,
Releases: releases,
Assignees: assignees,
Stashed: stashResult.Stashed,
NoVerify: opts.NoVerify,
DryRun: opts.DryRun,
@@ -227,7 +241,7 @@ func finishCherryPick(state *git.CherryPickState, stashResult *git.StashResult)
log.Infof("Processing release %s", release)
prTitleWithRelease := fmt.Sprintf("%s to release %s", state.PRTitle, release)
prURL, err := cherryPickToRelease(state.CommitSHAs, state.CommitMessages, state.BranchSuffix, release, prTitleWithRelease, state.DryRun, state.NoVerify)
prURL, err := cherryPickToRelease(state.CommitSHAs, state.CommitMessages, state.BranchSuffix, release, prTitleWithRelease, state.Assignees, state.DryRun, state.NoVerify)
if err != nil {
if strings.Contains(err.Error(), "merge conflict") {
if stashResult.Stashed {
@@ -295,7 +309,7 @@ func runCherryPickContinue() {
}
// cherryPickToRelease cherry-picks one or more commits to a specific release branch
func cherryPickToRelease(commitSHAs, commitMessages []string, branchSuffix, version, prTitle string, dryRun, noVerify bool) (string, error) {
func cherryPickToRelease(commitSHAs, commitMessages []string, branchSuffix, version, prTitle string, assignees []string, dryRun, noVerify bool) (string, error) {
releaseBranch := fmt.Sprintf("release/%s", version)
hotfixBranch := fmt.Sprintf("hotfix/%s-%s", branchSuffix, version)
@@ -362,7 +376,7 @@ func cherryPickToRelease(commitSHAs, commitMessages []string, branchSuffix, vers
// Create PR using GitHub CLI
log.Info("Creating PR...")
prURL, err := createCherryPickPR(hotfixBranch, releaseBranch, prTitle, commitSHAs, commitMessages)
prURL, err := createCherryPickPR(hotfixBranch, releaseBranch, prTitle, commitSHAs, commitMessages, assignees)
if err != nil {
return "", fmt.Errorf("failed to create PR: %w", err)
}
@@ -456,7 +470,7 @@ func findNearestStableTag(commitSHA string) (string, error) {
}
// createCherryPickPR creates a pull request for cherry-picks using the GitHub CLI
func createCherryPickPR(headBranch, baseBranch, title string, commitSHAs, commitMessages []string) (string, error) {
func createCherryPickPR(headBranch, baseBranch, title string, commitSHAs, commitMessages, assignees []string) (string, error) {
var body string
// Collect all original PR numbers for the summary
@@ -492,12 +506,20 @@ func createCherryPickPR(headBranch, baseBranch, title string, commitSHAs, commit
body += "\n\n"
body += "- [x] [Optional] Override Linear Check\n"
cmd := exec.Command("gh", "pr", "create",
args := []string{
"pr", "create",
"--base", baseBranch,
"--head", headBranch,
"--title", title,
"--body", body,
)
"--label", cherryPickPRLabel,
}
for _, assignee := range assignees {
args = append(args, "--assignee", assignee)
}
cmd := exec.Command("gh", args...)
output, err := cmd.Output()
if err != nil {
@@ -510,3 +532,44 @@ func createCherryPickPR(headBranch, baseBranch, title string, commitSHAs, commit
prURL := strings.TrimSpace(string(output))
return prURL, nil
}
func parseCSVEnv(name string) ([]string, error) {
raw := strings.TrimSpace(os.Getenv(name))
if raw == "" {
return nil, nil
}
fs := pflag.NewFlagSet("csv-env", pflag.ContinueOnError)
fs.SetOutput(io.Discard)
values := []string{}
fs.StringSliceVar(&values, "value", nil, "")
if err := fs.Set("value", raw); err != nil {
return nil, fmt.Errorf("failed to parse %s=%q: %w", name, raw, err)
}
return dedupeNonEmpty(values), nil
}
func resolveAssignees(cmd *cobra.Command, flagAssignees []string) ([]string, error) {
if cmd.Flags().Changed("assignee") {
return dedupeNonEmpty(flagAssignees), nil
}
return parseCSVEnv("CHERRY_PICK_ASSIGNEE")
}
func dedupeNonEmpty(values []string) []string {
out := make([]string, 0, len(values))
seen := make(map[string]struct{}, len(values))
for _, value := range values {
trimmed := strings.TrimSpace(value)
if trimmed == "" {
continue
}
if _, exists := seen[trimmed]; exists {
continue
}
seen[trimmed] = struct{}{}
out = append(out, trimmed)
}
return out
}

View File

@@ -227,6 +227,7 @@ type CherryPickState struct {
CommitSHAs []string `json:"commit_shas"`
CommitMessages []string `json:"commit_messages"`
Releases []string `json:"releases"`
Assignees []string `json:"assignees,omitempty"`
CompletedReleases []string `json:"completed_releases,omitempty"`
Stashed bool `json:"stashed"`
NoVerify bool `json:"no_verify"`

View File

@@ -78,6 +78,7 @@ func TestCherryPickStateRoundTrip(t *testing.T) {
CommitSHAs: []string{"abc123", "def456"},
CommitMessages: []string{"fix: something", "feat: another"},
Releases: []string{"v2.12"},
Assignees: []string{"alice", "bob"},
Stashed: true,
NoVerify: false,
DryRun: true,
@@ -106,6 +107,9 @@ func TestCherryPickStateRoundTrip(t *testing.T) {
if loaded.DryRun != state.DryRun {
t.Errorf("DryRun = %v, want %v", loaded.DryRun, state.DryRun)
}
if len(loaded.Assignees) != len(state.Assignees) {
t.Errorf("Assignees len = %d, want %d", len(loaded.Assignees), len(state.Assignees))
}
CleanCherryPickState()

356
uv.lock generated
View File

@@ -86,6 +86,18 @@ boto3 = [
{ name = "boto3" },
]
[[package]]
name = "aiofile"
version = "3.9.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "caio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/67/e2/d7cb819de8df6b5c1968a2756c3cb4122d4fa2b8fc768b53b7c9e5edb646/aiofile-3.9.0.tar.gz", hash = "sha256:e5ad718bb148b265b6df1b3752c4d1d83024b93da9bd599df74b9d9ffcf7919b", size = 17943, upload-time = "2024-10-08T10:39:35.846Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/50/25/da1f0b4dd970e52bf5a36c204c107e11a0c6d3ed195eba0bfbc664c312b2/aiofile-3.9.0-py3-none-any.whl", hash = "sha256:ce2f6c1571538cbdfa0143b04e16b208ecb0e9cb4148e528af8a640ed51cc8aa", size = 19539, upload-time = "2024-10-08T10:39:32.955Z" },
]
[[package]]
name = "aiofiles"
version = "25.1.0"
@@ -736,6 +748,23 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e6/46/eb6eca305c77a4489affe1c5d8f4cae82f285d9addd8de4ec084a7184221/cachetools-6.2.2-py3-none-any.whl", hash = "sha256:6c09c98183bf58560c97b2abfcedcbaf6a896a490f534b031b661d3723b45ace", size = 11503, upload-time = "2025-11-13T17:42:50.232Z" },
]
[[package]]
name = "caio"
version = "0.9.25"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/92/88/b8527e1b00c1811db339a1df8bd1ae49d146fcea9d6a5c40e3a80aaeb38d/caio-0.9.25.tar.gz", hash = "sha256:16498e7f81d1d0f5a4c0ad3f2540e65fe25691376e0a5bd367f558067113ed10", size = 26781, upload-time = "2025-12-26T15:21:36.501Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ec/90/543f556fcfcfa270713eef906b6352ab048e1e557afec12925c991dc93c2/caio-0.9.25-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d6956d9e4a27021c8bd6c9677f3a59eb1d820cc32d0343cea7961a03b1371965", size = 36839, upload-time = "2025-12-26T15:21:40.267Z" },
{ url = "https://files.pythonhosted.org/packages/51/3b/36f3e8ec38dafe8de4831decd2e44c69303d2a3892d16ceda42afed44e1b/caio-0.9.25-cp311-cp311-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bf84bfa039f25ad91f4f52944452a5f6f405e8afab4d445450978cd6241d1478", size = 80255, upload-time = "2025-12-26T15:22:20.271Z" },
{ url = "https://files.pythonhosted.org/packages/d3/25/79c98ebe12df31548ba4eaf44db11b7cad6b3e7b4203718335620939083c/caio-0.9.25-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:fb7ff95af4c31ad3f03179149aab61097a71fd85e05f89b4786de0359dffd044", size = 36983, upload-time = "2025-12-26T15:21:36.075Z" },
{ url = "https://files.pythonhosted.org/packages/a3/2b/21288691f16d479945968a0a4f2856818c1c5be56881d51d4dac9b255d26/caio-0.9.25-cp312-cp312-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:97084e4e30dfa598449d874c4d8e0c8d5ea17d2f752ef5e48e150ff9d240cd64", size = 82012, upload-time = "2025-12-26T15:22:20.983Z" },
{ url = "https://files.pythonhosted.org/packages/31/57/5e6ff127e6f62c9f15d989560435c642144aa4210882f9494204bc892305/caio-0.9.25-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d6c2a3411af97762a2b03840c3cec2f7f728921ff8adda53d7ea2315a8563451", size = 36979, upload-time = "2025-12-26T15:21:35.484Z" },
{ url = "https://files.pythonhosted.org/packages/a3/9f/f21af50e72117eb528c422d4276cbac11fb941b1b812b182e0a9c70d19c5/caio-0.9.25-cp313-cp313-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0998210a4d5cd5cb565b32ccfe4e53d67303f868a76f212e002a8554692870e6", size = 81900, upload-time = "2025-12-26T15:22:21.919Z" },
{ url = "https://files.pythonhosted.org/packages/69/ca/a08fdc7efdcc24e6a6131a93c85be1f204d41c58f474c42b0670af8c016b/caio-0.9.25-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:fab6078b9348e883c80a5e14b382e6ad6aabbc4429ca034e76e730cf464269db", size = 36978, upload-time = "2025-12-26T15:21:41.055Z" },
{ url = "https://files.pythonhosted.org/packages/5e/6c/d4d24f65e690213c097174d26eda6831f45f4734d9d036d81790a27e7b78/caio-0.9.25-cp314-cp314-manylinux2010_x86_64.manylinux2014_x86_64.manylinux_2_12_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:44a6b58e52d488c75cfaa5ecaa404b2b41cc965e6c417e03251e868ecd5b6d77", size = 81832, upload-time = "2025-12-26T15:22:22.757Z" },
{ url = "https://files.pythonhosted.org/packages/86/93/1f76c8d1bafe3b0614e06b2195784a3765bbf7b0a067661af9e2dd47fc33/caio-0.9.25-py3-none-any.whl", hash = "sha256:06c0bb02d6b929119b1cfbe1ca403c768b2013a369e2db46bfa2a5761cf82e40", size = 19087, upload-time = "2025-12-26T15:22:00.221Z" },
]
[[package]]
name = "celery"
version = "5.5.1"
@@ -1448,15 +1477,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/23/10/3c44e9331a5ec3bae8b2919d51f611a5b94e179563b1b89eb6423a8f43eb/discord.py-2.4.0-py3-none-any.whl", hash = "sha256:b8af6711c70f7e62160bfbecb55be699b5cb69d007426759ab8ab06b1bd77d1d", size = 1125988, upload-time = "2024-06-22T01:20:19.764Z" },
]
[[package]]
name = "diskcache"
version = "5.6.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/3f/21/1c1ffc1a039ddcc459db43cc108658f32c57d271d7289a2794e401d0fdb6/diskcache-5.6.3.tar.gz", hash = "sha256:2c3a3fa2743d8535d832ec61c2054a1641f41775aa7c556758a109941e33e4fc", size = 67916, upload-time = "2023-08-31T06:12:00.316Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3f/27/4570e78fc0bf5ea0ca45eb1de3818a23787af9b390c0b0a0033a1b8236f9/diskcache-5.6.3-py3-none-any.whl", hash = "sha256:5e31b2d5fbad117cc363ebaf6b689474db18a1f6438bc82358b024abd4c2ca19", size = 45550, upload-time = "2023-08-31T06:11:58.822Z" },
]
[[package]]
name = "distlib"
version = "0.4.0"
@@ -1666,24 +1686,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/46/ec/91a434c8a53d40c3598966621dea9c50512bec6ce8e76fa1751015e74cef/faker-40.1.2-py3-none-any.whl", hash = "sha256:93503165c165d330260e4379fd6dc07c94da90c611ed3191a0174d2ab9966a42", size = 1985633, upload-time = "2026-01-13T20:51:47.982Z" },
]
[[package]]
name = "fakeredis"
version = "2.33.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "redis" },
{ name = "sortedcontainers" },
]
sdist = { url = "https://files.pythonhosted.org/packages/5f/f9/57464119936414d60697fcbd32f38909bb5688b616ae13de6e98384433e0/fakeredis-2.33.0.tar.gz", hash = "sha256:d7bc9a69d21df108a6451bbffee23b3eba432c21a654afc7ff2d295428ec5770", size = 175187, upload-time = "2025-12-16T19:45:52.269Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/6e/78/a850fed8aeef96d4a99043c90b818b2ed5419cd5b24a4049fd7cfb9f1471/fakeredis-2.33.0-py3-none-any.whl", hash = "sha256:de535f3f9ccde1c56672ab2fdd6a8efbc4f2619fc2f1acc87b8737177d71c965", size = 119605, upload-time = "2025-12-16T19:45:51.08Z" },
]
[package.optional-dependencies]
lua = [
{ name = "lupa" },
]
[[package]]
name = "fastapi"
version = "0.128.0"
@@ -1785,29 +1787,33 @@ wheels = [
[[package]]
name = "fastmcp"
version = "2.14.2"
version = "3.0.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "authlib" },
{ name = "cyclopts" },
{ name = "exceptiongroup" },
{ name = "httpx" },
{ name = "jsonref" },
{ name = "jsonschema-path" },
{ name = "mcp" },
{ name = "openapi-pydantic" },
{ name = "opentelemetry-api" },
{ name = "packaging" },
{ name = "platformdirs" },
{ name = "py-key-value-aio", extra = ["disk", "keyring", "memory"] },
{ name = "py-key-value-aio", extra = ["filetree", "keyring", "memory"] },
{ name = "pydantic", extra = ["email"] },
{ name = "pydocket" },
{ name = "pyperclip" },
{ name = "python-dotenv" },
{ name = "pyyaml" },
{ name = "rich" },
{ name = "uvicorn" },
{ name = "watchfiles" },
{ name = "websockets" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d1/1e/e3528227688c248283f6d86869b1e900563ffc223eff00f4f923d2750365/fastmcp-2.14.2.tar.gz", hash = "sha256:bd23d1b808b6f446444f10114dac468b11bfb9153ed78628f5619763d0cf573e", size = 8272966, upload-time = "2025-12-31T15:26:13.433Z" }
sdist = { url = "https://files.pythonhosted.org/packages/11/6b/1a7ec89727797fb07ec0928e9070fa2f45e7b35718e1fe01633a34c35e45/fastmcp-3.0.2.tar.gz", hash = "sha256:6bd73b4a3bab773ee6932df5249dcbcd78ed18365ed0aeeb97bb42702a7198d7", size = 17239351, upload-time = "2026-02-22T16:32:28.843Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0d/67/8456d39484fcb7afd0defed21918e773ed59a98b39e5b633328527c88367/fastmcp-2.14.2-py3-none-any.whl", hash = "sha256:e33cd622e1ebd5110af6a981804525b6cd41072e3c7d68268ed69ef3be651aca", size = 413279, upload-time = "2025-12-31T15:26:11.178Z" },
{ url = "https://files.pythonhosted.org/packages/0a/5a/f410a9015cfde71adf646dab4ef2feae49f92f34f6050fcfb265eb126b30/fastmcp-3.0.2-py3-none-any.whl", hash = "sha256:f513d80d4b30b54749fe8950116b1aab843f3c293f5cb971fc8665cb48dbb028", size = 606268, upload-time = "2026-02-22T16:32:30.992Z" },
]
[[package]]
@@ -3353,69 +3359,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/db/bc/83e112abc66cd466c6b83f99118035867cecd41802f8d044638aa78a106e/locket-1.0.0-py2.py3-none-any.whl", hash = "sha256:b6c819a722f7b6bd955b80781788e4a66a55628b858d347536b7e81325a3a5e3", size = 4398, upload-time = "2022-04-20T22:04:42.23Z" },
]
[[package]]
name = "lupa"
version = "2.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/b8/1c/191c3e6ec6502e3dbe25a53e27f69a5daeac3e56de1f73c0138224171ead/lupa-2.6.tar.gz", hash = "sha256:9a770a6e89576be3447668d7ced312cd6fd41d3c13c2462c9dc2c2ab570e45d9", size = 7240282, upload-time = "2025-10-24T07:20:29.738Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ca/29/1f66907c1ebf1881735afa695e646762c674f00738ebf66d795d59fc0665/lupa-2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6d988c0f9331b9f2a5a55186701a25444ab10a1432a1021ee58011499ecbbdd5", size = 962875, upload-time = "2025-10-24T07:17:39.107Z" },
{ url = "https://files.pythonhosted.org/packages/e6/67/4a748604be360eb9c1c215f6a0da921cd1a2b44b2c5951aae6fb83019d3a/lupa-2.6-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:ebe1bbf48259382c72a6fe363dea61a0fd6fe19eab95e2ae881e20f3654587bf", size = 1935390, upload-time = "2025-10-24T07:17:41.427Z" },
{ url = "https://files.pythonhosted.org/packages/ac/0c/8ef9ee933a350428b7bdb8335a37ef170ab0bb008bbf9ca8f4f4310116b6/lupa-2.6-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:a8fcee258487cf77cdd41560046843bb38c2e18989cd19671dd1e2596f798306", size = 992193, upload-time = "2025-10-24T07:17:43.231Z" },
{ url = "https://files.pythonhosted.org/packages/65/46/e6c7facebdb438db8a65ed247e56908818389c1a5abbf6a36aab14f1057d/lupa-2.6-cp311-cp311-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:561a8e3be800827884e767a694727ed8482d066e0d6edfcbf423b05e63b05535", size = 1165844, upload-time = "2025-10-24T07:17:45.437Z" },
{ url = "https://files.pythonhosted.org/packages/1c/26/9f1154c6c95f175ccbf96aa96c8f569c87f64f463b32473e839137601a8b/lupa-2.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af880a62d47991cae78b8e9905c008cbfdc4a3a9723a66310c2634fc7644578c", size = 1048069, upload-time = "2025-10-24T07:17:47.181Z" },
{ url = "https://files.pythonhosted.org/packages/68/67/2cc52ab73d6af81612b2ea24c870d3fa398443af8e2875e5befe142398b1/lupa-2.6-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:80b22923aa4023c86c0097b235615f89d469a0c4eee0489699c494d3367c4c85", size = 2079079, upload-time = "2025-10-24T07:17:49.755Z" },
{ url = "https://files.pythonhosted.org/packages/2e/dc/f843f09bbf325f6e5ee61730cf6c3409fc78c010d968c7c78acba3019ca7/lupa-2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:153d2cc6b643f7efb9cfc0c6bb55ec784d5bac1a3660cfc5b958a7b8f38f4a75", size = 1071428, upload-time = "2025-10-24T07:17:51.991Z" },
{ url = "https://files.pythonhosted.org/packages/2e/60/37533a8d85bf004697449acb97ecdacea851acad28f2ad3803662487dd2a/lupa-2.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3fa8777e16f3ded50b72967dc17e23f5a08e4f1e2c9456aff2ebdb57f5b2869f", size = 1181756, upload-time = "2025-10-24T07:17:53.752Z" },
{ url = "https://files.pythonhosted.org/packages/e4/f2/cf29b20dbb4927b6a3d27c339ac5d73e74306ecc28c8e2c900b2794142ba/lupa-2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8dbdcbe818c02a2f56f5ab5ce2de374dab03e84b25266cfbaef237829bc09b3f", size = 2175687, upload-time = "2025-10-24T07:17:56.228Z" },
{ url = "https://files.pythonhosted.org/packages/94/7c/050e02f80c7131b63db1474bff511e63c545b5a8636a24cbef3fc4da20b6/lupa-2.6-cp311-cp311-win32.whl", hash = "sha256:defaf188fde8f7a1e5ce3a5e6d945e533b8b8d547c11e43b96c9b7fe527f56dc", size = 1412592, upload-time = "2025-10-24T07:17:59.062Z" },
{ url = "https://files.pythonhosted.org/packages/6f/9a/6f2af98aa5d771cea661f66c8eb8f53772ec1ab1dfbce24126cfcd189436/lupa-2.6-cp311-cp311-win_amd64.whl", hash = "sha256:9505ae600b5c14f3e17e70f87f88d333717f60411faca1ddc6f3e61dce85fa9e", size = 1669194, upload-time = "2025-10-24T07:18:01.647Z" },
{ url = "https://files.pythonhosted.org/packages/94/86/ce243390535c39d53ea17ccf0240815e6e457e413e40428a658ea4ee4b8d/lupa-2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47ce718817ef1cc0c40d87c3d5ae56a800d61af00fbc0fad1ca9be12df2f3b56", size = 951707, upload-time = "2025-10-24T07:18:03.884Z" },
{ url = "https://files.pythonhosted.org/packages/86/85/cedea5e6cbeb54396fdcc55f6b741696f3f036d23cfaf986d50d680446da/lupa-2.6-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:7aba985b15b101495aa4b07112cdc08baa0c545390d560ad5cfde2e9e34f4d58", size = 1916703, upload-time = "2025-10-24T07:18:05.6Z" },
{ url = "https://files.pythonhosted.org/packages/24/be/3d6b5f9a8588c01a4d88129284c726017b2089f3a3fd3ba8bd977292fea0/lupa-2.6-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:b766f62f95b2739f2248977d29b0722e589dcf4f0ccfa827ccbd29f0148bd2e5", size = 985152, upload-time = "2025-10-24T07:18:08.561Z" },
{ url = "https://files.pythonhosted.org/packages/eb/23/9f9a05beee5d5dce9deca4cb07c91c40a90541fc0a8e09db4ee670da550f/lupa-2.6-cp312-cp312-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:00a934c23331f94cb51760097ebfab14b005d55a6b30a2b480e3c53dd2fa290d", size = 1159599, upload-time = "2025-10-24T07:18:10.346Z" },
{ url = "https://files.pythonhosted.org/packages/40/4e/e7c0583083db9d7f1fd023800a9767d8e4391e8330d56c2373d890ac971b/lupa-2.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21de9f38bd475303e34a042b7081aabdf50bd9bafd36ce4faea2f90fd9f15c31", size = 1038686, upload-time = "2025-10-24T07:18:12.112Z" },
{ url = "https://files.pythonhosted.org/packages/1c/9f/5a4f7d959d4feba5e203ff0c31889e74d1ca3153122be4a46dca7d92bf7c/lupa-2.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cf3bda96d3fc41237e964a69c23647d50d4e28421111360274d4799832c560e9", size = 2071956, upload-time = "2025-10-24T07:18:14.572Z" },
{ url = "https://files.pythonhosted.org/packages/92/34/2f4f13ca65d01169b1720176aedc4af17bc19ee834598c7292db232cb6dc/lupa-2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5a76ead245da54801a81053794aa3975f213221f6542d14ec4b859ee2e7e0323", size = 1057199, upload-time = "2025-10-24T07:18:16.379Z" },
{ url = "https://files.pythonhosted.org/packages/35/2a/5f7d2eebec6993b0dcd428e0184ad71afb06a45ba13e717f6501bfed1da3/lupa-2.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8dd0861741caa20886ddbda0a121d8e52fb9b5bb153d82fa9bba796962bf30e8", size = 1173693, upload-time = "2025-10-24T07:18:18.153Z" },
{ url = "https://files.pythonhosted.org/packages/e4/29/089b4d2f8e34417349af3904bb40bec40b65c8731f45e3fd8d497ca573e5/lupa-2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:239e63948b0b23023f81d9a19a395e768ed3da6a299f84e7963b8f813f6e3f9c", size = 2164394, upload-time = "2025-10-24T07:18:20.403Z" },
{ url = "https://files.pythonhosted.org/packages/f3/1b/79c17b23c921f81468a111cad843b076a17ef4b684c4a8dff32a7969c3f0/lupa-2.6-cp312-cp312-win32.whl", hash = "sha256:325894e1099499e7a6f9c351147661a2011887603c71086d36fe0f964d52d1ce", size = 1420647, upload-time = "2025-10-24T07:18:23.368Z" },
{ url = "https://files.pythonhosted.org/packages/b8/15/5121e68aad3584e26e1425a5c9a79cd898f8a152292059e128c206ee817c/lupa-2.6-cp312-cp312-win_amd64.whl", hash = "sha256:c735a1ce8ee60edb0fe71d665f1e6b7c55c6021f1d340eb8c865952c602cd36f", size = 1688529, upload-time = "2025-10-24T07:18:25.523Z" },
{ url = "https://files.pythonhosted.org/packages/28/1d/21176b682ca5469001199d8b95fa1737e29957a3d185186e7a8b55345f2e/lupa-2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:663a6e58a0f60e7d212017d6678639ac8df0119bc13c2145029dcba084391310", size = 947232, upload-time = "2025-10-24T07:18:27.878Z" },
{ url = "https://files.pythonhosted.org/packages/ce/4c/d327befb684660ca13cf79cd1f1d604331808f9f1b6fb6bf57832f8edf80/lupa-2.6-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:d1f5afda5c20b1f3217a80e9bc1b77037f8a6eb11612fd3ada19065303c8f380", size = 1908625, upload-time = "2025-10-24T07:18:29.944Z" },
{ url = "https://files.pythonhosted.org/packages/66/8e/ad22b0a19454dfd08662237a84c792d6d420d36b061f239e084f29d1a4f3/lupa-2.6-cp313-cp313-macosx_11_0_x86_64.whl", hash = "sha256:26f2b3c085fe76e9119e48c1013c1cccdc1f51585d456858290475aa38e7089e", size = 981057, upload-time = "2025-10-24T07:18:31.553Z" },
{ url = "https://files.pythonhosted.org/packages/5c/48/74859073ab276bd0566c719f9ca0108b0cfc1956ca0d68678d117d47d155/lupa-2.6-cp313-cp313-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:60d2f902c7b96fb8ab98493dcff315e7bb4d0b44dc9dd76eb37de575025d5685", size = 1156227, upload-time = "2025-10-24T07:18:33.981Z" },
{ url = "https://files.pythonhosted.org/packages/09/6c/0e9ded061916877253c2266074060eb71ed99fb21d73c8c114a76725bce2/lupa-2.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a02d25dee3a3250967c36590128d9220ae02f2eda166a24279da0b481519cbff", size = 1035752, upload-time = "2025-10-24T07:18:36.32Z" },
{ url = "https://files.pythonhosted.org/packages/dd/ef/f8c32e454ef9f3fe909f6c7d57a39f950996c37a3deb7b391fec7903dab7/lupa-2.6-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6eae1ee16b886b8914ff292dbefbf2f48abfbdee94b33a88d1d5475e02423203", size = 2069009, upload-time = "2025-10-24T07:18:38.072Z" },
{ url = "https://files.pythonhosted.org/packages/53/dc/15b80c226a5225815a890ee1c11f07968e0aba7a852df41e8ae6fe285063/lupa-2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0edd5073a4ee74ab36f74fe61450148e6044f3952b8d21248581f3c5d1a58be", size = 1056301, upload-time = "2025-10-24T07:18:40.165Z" },
{ url = "https://files.pythonhosted.org/packages/31/14/2086c1425c985acfb30997a67e90c39457122df41324d3c179d6ee2292c6/lupa-2.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0c53ee9f22a8a17e7d4266ad48e86f43771951797042dd51d1494aaa4f5f3f0a", size = 1170673, upload-time = "2025-10-24T07:18:42.426Z" },
{ url = "https://files.pythonhosted.org/packages/10/e5/b216c054cf86576c0191bf9a9f05de6f7e8e07164897d95eea0078dca9b2/lupa-2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:de7c0f157a9064a400d828789191a96da7f4ce889969a588b87ec80de9b14772", size = 2162227, upload-time = "2025-10-24T07:18:46.112Z" },
{ url = "https://files.pythonhosted.org/packages/59/2f/33ecb5bedf4f3bc297ceacb7f016ff951331d352f58e7e791589609ea306/lupa-2.6-cp313-cp313-win32.whl", hash = "sha256:ee9523941ae0a87b5b703417720c5d78f72d2f5bc23883a2ea80a949a3ed9e75", size = 1419558, upload-time = "2025-10-24T07:18:48.371Z" },
{ url = "https://files.pythonhosted.org/packages/f9/b4/55e885834c847ea610e111d87b9ed4768f0afdaeebc00cd46810f25029f6/lupa-2.6-cp313-cp313-win_amd64.whl", hash = "sha256:b1335a5835b0a25ebdbc75cf0bda195e54d133e4d994877ef025e218c2e59db9", size = 1683424, upload-time = "2025-10-24T07:18:50.976Z" },
{ url = "https://files.pythonhosted.org/packages/66/9d/d9427394e54d22a35d1139ef12e845fd700d4872a67a34db32516170b746/lupa-2.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:dcb6d0a3264873e1653bc188499f48c1fb4b41a779e315eba45256cfe7bc33c1", size = 953818, upload-time = "2025-10-24T07:18:53.378Z" },
{ url = "https://files.pythonhosted.org/packages/10/41/27bbe81953fb2f9ecfced5d9c99f85b37964cfaf6aa8453bb11283983721/lupa-2.6-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:a37e01f2128f8c36106726cb9d360bac087d58c54b4522b033cc5691c584db18", size = 1915850, upload-time = "2025-10-24T07:18:55.259Z" },
{ url = "https://files.pythonhosted.org/packages/a3/98/f9ff60db84a75ba8725506bbf448fb085bc77868a021998ed2a66d920568/lupa-2.6-cp314-cp314-macosx_11_0_x86_64.whl", hash = "sha256:458bd7e9ff3c150b245b0fcfbb9bd2593d1152ea7f0a7b91c1d185846da033fe", size = 982344, upload-time = "2025-10-24T07:18:57.05Z" },
{ url = "https://files.pythonhosted.org/packages/41/f7/f39e0f1c055c3b887d86b404aaf0ca197b5edfd235a8b81b45b25bac7fc3/lupa-2.6-cp314-cp314-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:052ee82cac5206a02df77119c325339acbc09f5ce66967f66a2e12a0f3211cad", size = 1156543, upload-time = "2025-10-24T07:18:59.251Z" },
{ url = "https://files.pythonhosted.org/packages/9e/9c/59e6cffa0d672d662ae17bd7ac8ecd2c89c9449dee499e3eb13ca9cd10d9/lupa-2.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96594eca3c87dd07938009e95e591e43d554c1dbd0385be03c100367141db5a8", size = 1047974, upload-time = "2025-10-24T07:19:01.449Z" },
{ url = "https://files.pythonhosted.org/packages/23/c6/a04e9cef7c052717fcb28fb63b3824802488f688391895b618e39be0f684/lupa-2.6-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8faddd9d198688c8884091173a088a8e920ecc96cda2ffed576a23574c4b3f6", size = 2073458, upload-time = "2025-10-24T07:19:03.369Z" },
{ url = "https://files.pythonhosted.org/packages/e6/10/824173d10f38b51fc77785228f01411b6ca28826ce27404c7c912e0e442c/lupa-2.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:daebb3a6b58095c917e76ba727ab37b27477fb926957c825205fbda431552134", size = 1067683, upload-time = "2025-10-24T07:19:06.2Z" },
{ url = "https://files.pythonhosted.org/packages/b6/dc/9692fbcf3c924d9c4ece2d8d2f724451ac2e09af0bd2a782db1cef34e799/lupa-2.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:f3154e68972befe0f81564e37d8142b5d5d79931a18309226a04ec92487d4ea3", size = 1171892, upload-time = "2025-10-24T07:19:08.544Z" },
{ url = "https://files.pythonhosted.org/packages/84/ff/e318b628d4643c278c96ab3ddea07fc36b075a57383c837f5b11e537ba9d/lupa-2.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e4dadf77b9fedc0bfa53417cc28dc2278a26d4cbd95c29f8927ad4d8fe0a7ef9", size = 2166641, upload-time = "2025-10-24T07:19:10.485Z" },
{ url = "https://files.pythonhosted.org/packages/12/f7/a6f9ec2806cf2d50826980cdb4b3cffc7691dc6f95e13cc728846d5cb793/lupa-2.6-cp314-cp314-win32.whl", hash = "sha256:cb34169c6fa3bab3e8ac58ca21b8a7102f6a94b6a5d08d3636312f3f02fafd8f", size = 1456857, upload-time = "2025-10-24T07:19:37.989Z" },
{ url = "https://files.pythonhosted.org/packages/c5/de/df71896f25bdc18360fdfa3b802cd7d57d7fede41a0e9724a4625b412c85/lupa-2.6-cp314-cp314-win_amd64.whl", hash = "sha256:b74f944fe46c421e25d0f8692aef1e842192f6f7f68034201382ac440ef9ea67", size = 1731191, upload-time = "2025-10-24T07:19:40.281Z" },
{ url = "https://files.pythonhosted.org/packages/47/3c/a1f23b01c54669465f5f4c4083107d496fbe6fb45998771420e9aadcf145/lupa-2.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:0e21b716408a21ab65723f8841cf7f2f37a844b7a965eeabb785e27fca4099cf", size = 999343, upload-time = "2025-10-24T07:19:12.519Z" },
{ url = "https://files.pythonhosted.org/packages/c5/6d/501994291cb640bfa2ccf7f554be4e6914afa21c4026bd01bff9ca8aac57/lupa-2.6-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:589db872a141bfff828340079bbdf3e9a31f2689f4ca0d88f97d9e8c2eae6142", size = 2000730, upload-time = "2025-10-24T07:19:14.869Z" },
{ url = "https://files.pythonhosted.org/packages/53/a5/457ffb4f3f20469956c2d4c4842a7675e884efc895b2f23d126d23e126cc/lupa-2.6-cp314-cp314t-macosx_11_0_x86_64.whl", hash = "sha256:cd852a91a4a9d4dcbb9a58100f820a75a425703ec3e3f049055f60b8533b7953", size = 1021553, upload-time = "2025-10-24T07:19:17.123Z" },
{ url = "https://files.pythonhosted.org/packages/51/6b/36bb5a5d0960f2a5c7c700e0819abb76fd9bf9c1d8a66e5106416d6e9b14/lupa-2.6-cp314-cp314t-manylinux2010_i686.manylinux_2_12_i686.manylinux_2_28_i686.whl", hash = "sha256:0334753be028358922415ca97a64a3048e4ed155413fc4eaf87dd0a7e2752983", size = 1133275, upload-time = "2025-10-24T07:19:20.51Z" },
{ url = "https://files.pythonhosted.org/packages/19/86/202ff4429f663013f37d2229f6176ca9f83678a50257d70f61a0a97281bf/lupa-2.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:661d895cd38c87658a34780fac54a690ec036ead743e41b74c3fb81a9e65a6aa", size = 1038441, upload-time = "2025-10-24T07:19:22.509Z" },
{ url = "https://files.pythonhosted.org/packages/a7/42/d8125f8e420714e5b52e9c08d88b5329dfb02dcca731b4f21faaee6cc5b5/lupa-2.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6aa58454ccc13878cc177c62529a2056be734da16369e451987ff92784994ca7", size = 2058324, upload-time = "2025-10-24T07:19:24.979Z" },
{ url = "https://files.pythonhosted.org/packages/2b/2c/47bf8b84059876e877a339717ddb595a4a7b0e8740bacae78ba527562e1c/lupa-2.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1425017264e470c98022bba8cff5bd46d054a827f5df6b80274f9cc71dafd24f", size = 1060250, upload-time = "2025-10-24T07:19:27.262Z" },
{ url = "https://files.pythonhosted.org/packages/c2/06/d88add2b6406ca1bdec99d11a429222837ca6d03bea42ca75afa169a78cb/lupa-2.6-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:224af0532d216e3105f0a127410f12320f7c5f1aa0300bdf9646b8d9afb0048c", size = 1151126, upload-time = "2025-10-24T07:19:29.522Z" },
{ url = "https://files.pythonhosted.org/packages/b4/a0/89e6a024c3b4485b89ef86881c9d55e097e7cb0bdb74efb746f2fa6a9a76/lupa-2.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9abb98d5a8fd27c8285302e82199f0e56e463066f88f619d6594a450bf269d80", size = 2153693, upload-time = "2025-10-24T07:19:31.379Z" },
{ url = "https://files.pythonhosted.org/packages/b6/36/a0f007dc58fc1bbf51fb85dcc82fcb1f21b8c4261361de7dab0e3d8521ef/lupa-2.6-cp314-cp314t-win32.whl", hash = "sha256:1849efeba7a8f6fb8aa2c13790bee988fd242ae404bd459509640eeea3d1e291", size = 1590104, upload-time = "2025-10-24T07:19:33.514Z" },
{ url = "https://files.pythonhosted.org/packages/7d/5e/db903ce9cf82c48d6b91bf6d63ae4c8d0d17958939a4e04ba6b9f38b8643/lupa-2.6-cp314-cp314t-win_amd64.whl", hash = "sha256:fc1498d1a4fc028bc521c26d0fad4ca00ed63b952e32fb95949bda76a04bad52", size = 1913818, upload-time = "2025-10-24T07:19:36.039Z" },
]
[[package]]
name = "lxml"
version = "5.3.0"
@@ -3790,7 +3733,7 @@ wheels = [
[[package]]
name = "mcp"
version = "1.25.0"
version = "1.26.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@@ -3808,9 +3751,9 @@ dependencies = [
{ name = "typing-inspection" },
{ name = "uvicorn", marker = "sys_platform != 'emscripten'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d5/2d/649d80a0ecf6a1f82632ca44bec21c0461a9d9fc8934d38cb5b319f2db5e/mcp-1.25.0.tar.gz", hash = "sha256:56310361ebf0364e2d438e5b45f7668cbb124e158bb358333cd06e49e83a6802", size = 605387, upload-time = "2025-12-19T10:19:56.985Z" }
sdist = { url = "https://files.pythonhosted.org/packages/fc/6d/62e76bbb8144d6ed86e202b5edd8a4cb631e7c8130f3f4893c3f90262b10/mcp-1.26.0.tar.gz", hash = "sha256:db6e2ef491eecc1a0d93711a76f28dec2e05999f93afd48795da1c1137142c66", size = 608005, upload-time = "2026-01-24T19:40:32.468Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e2/fc/6dc7659c2ae5ddf280477011f4213a74f806862856b796ef08f028e664bf/mcp-1.25.0-py3-none-any.whl", hash = "sha256:b37c38144a666add0862614cc79ec276e97d72aa8ca26d622818d4e278b9721a", size = 233076, upload-time = "2025-12-19T10:19:55.416Z" },
{ url = "https://files.pythonhosted.org/packages/fd/d9/eaa1f80170d2b7c5ba23f3b59f766f3a0bb41155fbc32a69adfa1adaaef9/mcp-1.26.0-py3-none-any.whl", hash = "sha256:904a21c33c25aa98ddbeb47273033c435e595bbacfdb177f4bd87f6dceebe1ca", size = 233615, upload-time = "2026-01-24T19:40:30.652Z" },
]
[package.optional-dependencies]
@@ -4408,7 +4351,7 @@ wheels = [
[[package]]
name = "office365-rest-python-client"
version = "2.5.9"
version = "2.6.2"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "msal" },
@@ -4416,9 +4359,9 @@ dependencies = [
{ name = "requests" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/e5/7d/7219ab9e1091024a74ee32cef0205a7bd44d3058d08a4948d30c58793eee/Office365-REST-Python-Client-2.5.9.tar.gz", hash = "sha256:95c1c01d6a52c1bd3fcd8cc914c373d2c3e1578173070e65f325da9c5ba6f5ad", size = 605076, upload-time = "2024-05-06T19:38:38.98Z" }
sdist = { url = "https://files.pythonhosted.org/packages/bc/04/6dce2d581c54a8e55a3b128cf79a93821a68a62bb9a956e65476c5bb247e/office365_rest_python_client-2.6.2.tar.gz", hash = "sha256:ce27f5a1c0cc3ff97041ccd9b386145692be4c64739f243f7d6ac3edbe0a3c46", size = 659460, upload-time = "2025-05-11T10:24:21.895Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b7/47/d01eaa338abadcbabf69b913d91ffb54a7f62f538ceb402cd2cf572115a8/Office365_REST_Python_Client-2.5.9-py3-none-any.whl", hash = "sha256:79b2718572763492f6fd351ad388089bdb3fde07fe5c1f92d716255ceadc0e47", size = 1168849, upload-time = "2024-05-06T19:38:32.534Z" },
{ url = "https://files.pythonhosted.org/packages/3a/a4/611155711f8af347875c15b8b83f5fd9e978bd4de45f90085b9a583b684d/Office365_REST_Python_Client-2.6.2-py3-none-any.whl", hash = "sha256:06fc6829c39b503897caa9d881db419d7f97a8e4f1c95c4c2d12db36ea6c955d", size = 1337139, upload-time = "2025-05-11T10:24:18.926Z" },
]
[[package]]
@@ -4673,7 +4616,7 @@ requires-dist = [
{ name = "fastapi-limiter", marker = "extra == 'backend'", specifier = "==0.1.6" },
{ name = "fastapi-users", marker = "extra == 'backend'", specifier = "==15.0.4" },
{ name = "fastapi-users-db-sqlalchemy", marker = "extra == 'backend'", specifier = "==7.0.0" },
{ name = "fastmcp", marker = "extra == 'backend'", specifier = "==2.14.2" },
{ name = "fastmcp", marker = "extra == 'backend'", specifier = "==3.0.2" },
{ name = "filelock", marker = "extra == 'backend'", specifier = "==3.20.3" },
{ name = "google-api-python-client", marker = "extra == 'backend'", specifier = "==2.86.0" },
{ name = "google-auth-httplib2", marker = "extra == 'backend'", specifier = "==0.1.0" },
@@ -4701,7 +4644,7 @@ requires-dist = [
{ name = "manygo", marker = "extra == 'dev'", specifier = "==0.2.0" },
{ name = "markitdown", extras = ["pdf", "docx", "pptx", "xlsx", "xls"], marker = "extra == 'backend'", specifier = "==0.1.2" },
{ name = "matplotlib", marker = "extra == 'dev'", specifier = "==3.10.8" },
{ name = "mcp", extras = ["cli"], marker = "extra == 'backend'", specifier = "==1.25.0" },
{ name = "mcp", extras = ["cli"], marker = "extra == 'backend'", specifier = "==1.26.0" },
{ name = "mistune", marker = "extra == 'backend'", specifier = "==3.2.0" },
{ name = "msal", marker = "extra == 'backend'", specifier = "==1.34.0" },
{ name = "msoffcrypto-tool", marker = "extra == 'backend'", specifier = "==5.4.2" },
@@ -4710,8 +4653,8 @@ requires-dist = [
{ name = "nest-asyncio", marker = "extra == 'backend'", specifier = "==1.6.0" },
{ name = "numpy", marker = "extra == 'model-server'", specifier = "==2.4.1" },
{ name = "oauthlib", marker = "extra == 'backend'", specifier = "==3.2.2" },
{ name = "office365-rest-python-client", marker = "extra == 'backend'", specifier = "==2.5.9" },
{ name = "onyx-devtools", marker = "extra == 'dev'", specifier = "==0.6.0" },
{ name = "office365-rest-python-client", marker = "extra == 'backend'", specifier = "==2.6.2" },
{ name = "onyx-devtools", marker = "extra == 'dev'", specifier = "==0.6.1" },
{ name = "openai", specifier = "==2.14.0" },
{ name = "openapi-generator-cli", marker = "extra == 'dev'", specifier = "==7.17.0" },
{ name = "openinference-instrumentation", marker = "extra == 'backend'", specifier = "==0.1.42" },
@@ -4816,20 +4759,20 @@ requires-dist = [{ name = "onyx", extras = ["backend", "dev", "ee"], editable =
[[package]]
name = "onyx-devtools"
version = "0.6.0"
version = "0.6.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "fastapi" },
{ name = "openapi-generator-cli" },
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/fa/f9/79d66c1f06e4d1dca0a9df30afcd65ec1a69219fdf17c45349396d1ec668/onyx_devtools-0.6.0-py3-none-any.whl", hash = "sha256:26049075a6d3eb794f44c1bbe55a7cfc0c5427de681ed29319064e2deb956a15", size = 3777572, upload-time = "2026-02-19T23:05:51.823Z" },
{ url = "https://files.pythonhosted.org/packages/40/37/0abff5ab8d79c90f9d57eeaf4998f668145b01e81da0307df56c3b15d16c/onyx_devtools-0.6.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:a7c00f2f1924c231b2480edcd3b6aa83398e13e4587c213fe1c97e0f6d3cfce1", size = 3822965, upload-time = "2026-02-19T23:06:02.992Z" },
{ url = "https://files.pythonhosted.org/packages/59/79/a8c23e456b7f1bb4cb741875af6c323fba11d5ef1ba121ea8b44587c236f/onyx_devtools-0.6.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:0e67fc47dfffb510826a6487dd5029a65b4a5b3f8a42e0e1208b6faee353518c", size = 3570391, upload-time = "2026-02-19T23:05:48.853Z" },
{ url = "https://files.pythonhosted.org/packages/c5/c5/d166bf2c98b80fd83d76abe88e57d63a8cb55880ba40a3d34c831361e3cf/onyx_devtools-0.6.0-py3-none-manylinux_2_17_aarch64.whl", hash = "sha256:0fdbd085f82788b900620424798d04dc1b10c3b1baf9be821ac178adc41c6858", size = 3432611, upload-time = "2026-02-19T23:05:51.924Z" },
{ url = "https://files.pythonhosted.org/packages/18/8e/c53fb7f7781acbf37ca80ebcee5d1274d54c6d853606adefc517df715f9a/onyx_devtools-0.6.0-py3-none-manylinux_2_17_x86_64.whl", hash = "sha256:3915ad5ea245e597a8ad91bd2ba5efc2b6a336ca59c7f3670bd89530cc9ab00f", size = 3777586, upload-time = "2026-02-19T23:05:51.877Z" },
{ url = "https://files.pythonhosted.org/packages/e5/57/194ded4aa5151d96911b021829e015370b4f1fc7493ac584d445fd96f97b/onyx_devtools-0.6.0-py3-none-win_amd64.whl", hash = "sha256:478cdae03ae2e797345396397318446622c7472df0a7d9dbd58d3e96489198b2", size = 3871835, upload-time = "2026-02-19T23:05:51.209Z" },
{ url = "https://files.pythonhosted.org/packages/3c/e9/cc7d204b9b1103b2f33f8f62d29076083f40f44697b398e83b3d44daca23/onyx_devtools-0.6.0-py3-none-win_arm64.whl", hash = "sha256:4bff060fd5f017ddceaf753252e0bc16699922d9a0a88506a56505aad4580824", size = 3492854, upload-time = "2026-02-19T23:05:51.856Z" },
{ url = "https://files.pythonhosted.org/packages/bf/3c/fc0c152ecc403b8d4c929eacc7ea4c3d6cba2094f3cfa51d9e5c4d3bda3d/onyx_devtools-0.6.1-py3-none-any.whl", hash = "sha256:a9ad90ca4536ebe9aaeb604f82c418f3fd148100f14cca7749df0d076ee5c4b0", size = 3781440, upload-time = "2026-02-25T00:59:03.565Z" },
{ url = "https://files.pythonhosted.org/packages/fd/1c/2df5a06eed5490057f0852153940142f9987ff9b865c9c185b733fa360b1/onyx_devtools-0.6.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:769a656737e2389312e8e24bf3e9dd559dcb00160f323228dfe34d005ab47af3", size = 3827421, upload-time = "2026-02-25T00:58:59.672Z" },
{ url = "https://files.pythonhosted.org/packages/a2/e3/389644eb9ba0a3cfa975cc015a48140702b05abc9093542b2a3ba6cc5cc1/onyx_devtools-0.6.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:93886332e97e6efa5f3d7a1d1e4facf1442d301df379f65dfc2a328ed43c8f39", size = 3573060, upload-time = "2026-02-25T00:59:02.582Z" },
{ url = "https://files.pythonhosted.org/packages/68/fe/dd0f32e08f7e7fb1861a28b82431e0a43cf6ab33e04fb2938f4ee20c891b/onyx_devtools-0.6.1-py3-none-manylinux_2_17_aarch64.whl", hash = "sha256:cf896e420c78c08c541135473627ffcab0a0156e0e462e71bcb476f560c324fa", size = 3435936, upload-time = "2026-02-25T00:59:02.313Z" },
{ url = "https://files.pythonhosted.org/packages/bb/3a/4376cba6adcf86b9fc55f146493450955497d988920eaa37a8aec9f9f897/onyx_devtools-0.6.1-py3-none-manylinux_2_17_x86_64.whl", hash = "sha256:4cb5a1b44a4e74c2fc68164a5caa34bce3f6d2dd5639e48438c1d04f09c4c7c6", size = 3781457, upload-time = "2026-02-25T00:59:02.126Z" },
{ url = "https://files.pythonhosted.org/packages/9d/0d/d2ecf7edc02354d16d9a1d9bd7d8d35f46cdde08b86635ba02075e4d3c7c/onyx_devtools-0.6.1-py3-none-win_amd64.whl", hash = "sha256:0c6c6a667851b9ab215980f1b391216bc2f157c8a29d0cfa96c32c6d10116a5c", size = 3875146, upload-time = "2026-02-25T00:59:02.364Z" },
{ url = "https://files.pythonhosted.org/packages/c5/c3/04783dcfad36b18f48befb6d85bf4f9a9f36fd4cd6e08077676c72c9c504/onyx_devtools-0.6.1-py3-none-win_arm64.whl", hash = "sha256:f095e58b4dad0671c7127a452c5d5f411f55070ebf586a2e47f9193ab753ce44", size = 3496971, upload-time = "2026-02-25T00:59:17.98Z" },
]
[[package]]
@@ -4967,35 +4910,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/95/f1/b27d3e2e003cd9a3592c43d099d2ed8d0a947c15281bf8463a256db0b46c/opentelemetry_exporter_otlp_proto_http-1.39.1-py3-none-any.whl", hash = "sha256:d9f5207183dd752a412c4cd564ca8875ececba13be6e9c6c370ffb752fd59985", size = 19641, upload-time = "2025-12-11T13:32:22.248Z" },
]
[[package]]
name = "opentelemetry-exporter-prometheus"
version = "0.60b1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
{ name = "opentelemetry-sdk" },
{ name = "prometheus-client" },
]
sdist = { url = "https://files.pythonhosted.org/packages/14/39/7dafa6fff210737267bed35a8855b6ac7399b9e582b8cf1f25f842517012/opentelemetry_exporter_prometheus-0.60b1.tar.gz", hash = "sha256:a4011b46906323f71724649d301b4dc188aaa068852e814f4df38cc76eac616b", size = 14976, upload-time = "2025-12-11T13:32:42.944Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9b/0d/4be6bf5477a3eb3d917d2f17d3c0b6720cd6cb97898444a61d43cc983f5c/opentelemetry_exporter_prometheus-0.60b1-py3-none-any.whl", hash = "sha256:49f59178de4f4590e3cef0b8b95cf6e071aae70e1f060566df5546fad773b8fd", size = 13019, upload-time = "2025-12-11T13:32:23.974Z" },
]
[[package]]
name = "opentelemetry-instrumentation"
version = "0.60b1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "opentelemetry-api" },
{ name = "opentelemetry-semantic-conventions" },
{ name = "packaging" },
{ name = "wrapt" },
]
sdist = { url = "https://files.pythonhosted.org/packages/41/0f/7e6b713ac117c1f5e4e3300748af699b9902a2e5e34c9cf443dde25a01fa/opentelemetry_instrumentation-0.60b1.tar.gz", hash = "sha256:57ddc7974c6eb35865af0426d1a17132b88b2ed8586897fee187fd5b8944bd6a", size = 31706, upload-time = "2025-12-11T13:36:42.515Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/77/d2/6788e83c5c86a2690101681aeef27eeb2a6bf22df52d3f263a22cee20915/opentelemetry_instrumentation-0.60b1-py3-none-any.whl", hash = "sha256:04480db952b48fb1ed0073f822f0ee26012b7be7c3eac1a3793122737c78632d", size = 33096, upload-time = "2025-12-11T13:35:33.067Z" },
]
[[package]]
name = "opentelemetry-proto"
version = "1.39.1"
@@ -5237,15 +5151,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" },
]
[[package]]
name = "pathvalidate"
version = "3.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/fa/2a/52a8da6fe965dea6192eb716b357558e103aea0a1e9a8352ad575a8406ca/pathvalidate-3.3.1.tar.gz", hash = "sha256:b18c07212bfead624345bb8e1d6141cdcf15a39736994ea0b94035ad2b1ba177", size = 63262, upload-time = "2025-06-15T09:07:20.736Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9a/70/875f4a23bfc4731703a5835487d0d2fb999031bd415e7d17c0ae615c18b7/pathvalidate-3.3.1-py3-none-any.whl", hash = "sha256:5263baab691f8e1af96092fa5137ee17df5bdfbd6cff1fcac4d6ef4bc2e1735f", size = 24305, upload-time = "2025-06-15T09:07:19.117Z" },
]
[[package]]
name = "pdfminer-six"
version = "20251107"
@@ -5709,21 +5614,21 @@ wheels = [
[[package]]
name = "py-key-value-aio"
version = "0.3.0"
version = "0.4.4"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "beartype" },
{ name = "py-key-value-shared" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/93/ce/3136b771dddf5ac905cc193b461eb67967cf3979688c6696e1f2cdcde7ea/py_key_value_aio-0.3.0.tar.gz", hash = "sha256:858e852fcf6d696d231266da66042d3355a7f9871650415feef9fca7a6cd4155", size = 50801, upload-time = "2025-11-17T16:50:04.711Z" }
sdist = { url = "https://files.pythonhosted.org/packages/04/3c/0397c072a38d4bc580994b42e0c90c5f44f679303489e4376289534735e5/py_key_value_aio-0.4.4.tar.gz", hash = "sha256:e3012e6243ed7cc09bb05457bd4d03b1ba5c2b1ca8700096b3927db79ffbbe55", size = 92300, upload-time = "2026-02-16T21:21:43.245Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/99/10/72f6f213b8f0bce36eff21fda0a13271834e9eeff7f9609b01afdc253c79/py_key_value_aio-0.3.0-py3-none-any.whl", hash = "sha256:1c781915766078bfd608daa769fefb97e65d1d73746a3dfb640460e322071b64", size = 96342, upload-time = "2025-11-17T16:50:03.801Z" },
{ url = "https://files.pythonhosted.org/packages/32/69/f1b537ee70b7def42d63124a539ed3026a11a3ffc3086947a1ca6e861868/py_key_value_aio-0.4.4-py3-none-any.whl", hash = "sha256:18e17564ecae61b987f909fc2cd41ee2012c84b4b1dcb8c055cf8b4bc1bf3f5d", size = 152291, upload-time = "2026-02-16T21:21:44.241Z" },
]
[package.optional-dependencies]
disk = [
{ name = "diskcache" },
{ name = "pathvalidate" },
filetree = [
{ name = "aiofile" },
{ name = "anyio" },
]
keyring = [
{ name = "keyring" },
@@ -5731,22 +5636,6 @@ keyring = [
memory = [
{ name = "cachetools" },
]
redis = [
{ name = "redis" },
]
[[package]]
name = "py-key-value-shared"
version = "0.3.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "beartype" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/7b/e4/1971dfc4620a3a15b4579fe99e024f5edd6e0967a71154771a059daff4db/py_key_value_shared-0.3.0.tar.gz", hash = "sha256:8fdd786cf96c3e900102945f92aa1473138ebe960ef49da1c833790160c28a4b", size = 11666, upload-time = "2025-11-17T16:50:06.849Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/51/e4/b8b0a03ece72f47dce2307d36e1c34725b7223d209fc679315ffe6a4e2c3/py_key_value_shared-0.3.0-py3-none-any.whl", hash = "sha256:5b0efba7ebca08bb158b1e93afc2f07d30b8f40c2fc12ce24a4c0d84f42f9298", size = 19560, upload-time = "2025-11-17T16:50:05.954Z" },
]
[[package]]
name = "pyairtable"
@@ -5911,29 +5800,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" },
]
[[package]]
name = "pydocket"
version = "0.16.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cloudpickle" },
{ name = "fakeredis", extra = ["lua"] },
{ name = "opentelemetry-api" },
{ name = "opentelemetry-exporter-prometheus" },
{ name = "opentelemetry-instrumentation" },
{ name = "prometheus-client" },
{ name = "py-key-value-aio", extra = ["memory", "redis"] },
{ name = "python-json-logger" },
{ name = "redis" },
{ name = "rich" },
{ name = "typer" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/e0/c5/61dcfce4d50b66a3f09743294d37fab598b81bb0975054b7f732da9243ec/pydocket-0.16.3.tar.gz", hash = "sha256:78e9da576de09e9f3f410d2471ef1c679b7741ddd21b586c97a13872b69bd265", size = 297080, upload-time = "2025-12-23T23:37:33.32Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/94/93b7f5981aa04f922e0d9ce7326a4587866ec7e39f7c180ffcf408e66ee8/pydocket-0.16.3-py3-none-any.whl", hash = "sha256:e2b50925356e7cd535286255195458ac7bba15f25293356651b36d223db5dd7c", size = 67087, upload-time = "2025-12-23T23:37:31.829Z" },
]
[[package]]
name = "pyee"
version = "13.0.0"
@@ -6268,15 +6134,6 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b5/2d/563849c31e58eb2e273fa0c391a7d9987db32f4d9152fe6ecdac0a8ffe93/python_iso639-2025.11.16-py3-none-any.whl", hash = "sha256:65f6ac6c6d8e8207f6175f8bf7fff7db486c6dc5c1d8866c2b77d2a923370896", size = 167818, upload-time = "2025-11-16T21:53:35.36Z" },
]
[[package]]
name = "python-json-logger"
version = "4.0.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/29/bf/eca6a3d43db1dae7070f70e160ab20b807627ba953663ba07928cdd3dc58/python_json_logger-4.0.0.tar.gz", hash = "sha256:f58e68eb46e1faed27e0f574a55a0455eecd7b8a5b88b85a784519ba3cff047f", size = 17683, upload-time = "2025-10-06T04:15:18.984Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/51/e5/fecf13f06e5e5f67e8837d777d1bc43fac0ed2b77a676804df5c34744727/python_json_logger-4.0.0-py3-none-any.whl", hash = "sha256:af09c9daf6a813aa4cc7180395f50f2a9e5fa056034c9953aec92e381c5ba1e2", size = 15548, upload-time = "2025-10-06T04:15:17.553Z" },
]
[[package]]
name = "python-magic"
version = "0.4.27"
@@ -8064,6 +7921,93 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/bf/7c/43fb4689fe287eceb701f389863aab35211835d63bbb9a798cfefa80d7de/voyageai-0.2.3-py3-none-any.whl", hash = "sha256:59c4958bd991e83cedb5a82d5e14ac698ce67e42713ea10467631a48ee272b15", size = 19748, upload-time = "2024-05-29T08:12:44.968Z" },
]
[[package]]
name = "watchfiles"
version = "1.1.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c2/c9/8869df9b2a2d6c59d79220a4db37679e74f807c559ffe5265e08b227a210/watchfiles-1.1.1.tar.gz", hash = "sha256:a173cb5c16c4f40ab19cecf48a534c409f7ea983ab8fed0741304a1c0a31b3f2", size = 94440, upload-time = "2025-10-14T15:06:21.08Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/1f/f8/2c5f479fb531ce2f0564eda479faecf253d886b1ab3630a39b7bf7362d46/watchfiles-1.1.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:f57b396167a2565a4e8b5e56a5a1c537571733992b226f4f1197d79e94cf0ae5", size = 406529, upload-time = "2025-10-14T15:04:32.899Z" },
{ url = "https://files.pythonhosted.org/packages/fe/cd/f515660b1f32f65df671ddf6f85bfaca621aee177712874dc30a97397977/watchfiles-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:421e29339983e1bebc281fab40d812742268ad057db4aee8c4d2bce0af43b741", size = 394384, upload-time = "2025-10-14T15:04:33.761Z" },
{ url = "https://files.pythonhosted.org/packages/7b/c3/28b7dc99733eab43fca2d10f55c86e03bd6ab11ca31b802abac26b23d161/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e43d39a741e972bab5d8100b5cdacf69db64e34eb19b6e9af162bccf63c5cc6", size = 448789, upload-time = "2025-10-14T15:04:34.679Z" },
{ url = "https://files.pythonhosted.org/packages/4a/24/33e71113b320030011c8e4316ccca04194bf0cbbaeee207f00cbc7d6b9f5/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f537afb3276d12814082a2e9b242bdcf416c2e8fd9f799a737990a1dbe906e5b", size = 460521, upload-time = "2025-10-14T15:04:35.963Z" },
{ url = "https://files.pythonhosted.org/packages/f4/c3/3c9a55f255aa57b91579ae9e98c88704955fa9dac3e5614fb378291155df/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2cd9e04277e756a2e2d2543d65d1e2166d6fd4c9b183f8808634fda23f17b14", size = 488722, upload-time = "2025-10-14T15:04:37.091Z" },
{ url = "https://files.pythonhosted.org/packages/49/36/506447b73eb46c120169dc1717fe2eff07c234bb3232a7200b5f5bd816e9/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3f58818dc0b07f7d9aa7fe9eb1037aecb9700e63e1f6acfed13e9fef648f5d", size = 596088, upload-time = "2025-10-14T15:04:38.39Z" },
{ url = "https://files.pythonhosted.org/packages/82/ab/5f39e752a9838ec4d52e9b87c1e80f1ee3ccdbe92e183c15b6577ab9de16/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb9f66367023ae783551042d31b1d7fd422e8289eedd91f26754a66f44d5cff", size = 472923, upload-time = "2025-10-14T15:04:39.666Z" },
{ url = "https://files.pythonhosted.org/packages/af/b9/a419292f05e302dea372fa7e6fda5178a92998411f8581b9830d28fb9edb/watchfiles-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aebfd0861a83e6c3d1110b78ad54704486555246e542be3e2bb94195eabb2606", size = 456080, upload-time = "2025-10-14T15:04:40.643Z" },
{ url = "https://files.pythonhosted.org/packages/b0/c3/d5932fd62bde1a30c36e10c409dc5d54506726f08cb3e1d8d0ba5e2bc8db/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fac835b4ab3c6487b5dbad78c4b3724e26bcc468e886f8ba8cc4306f68f6701", size = 629432, upload-time = "2025-10-14T15:04:41.789Z" },
{ url = "https://files.pythonhosted.org/packages/f7/77/16bddd9779fafb795f1a94319dc965209c5641db5bf1edbbccace6d1b3c0/watchfiles-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:399600947b170270e80134ac854e21b3ccdefa11a9529a3decc1327088180f10", size = 623046, upload-time = "2025-10-14T15:04:42.718Z" },
{ url = "https://files.pythonhosted.org/packages/46/ef/f2ecb9a0f342b4bfad13a2787155c6ee7ce792140eac63a34676a2feeef2/watchfiles-1.1.1-cp311-cp311-win32.whl", hash = "sha256:de6da501c883f58ad50db3a32ad397b09ad29865b5f26f64c24d3e3281685849", size = 271473, upload-time = "2025-10-14T15:04:43.624Z" },
{ url = "https://files.pythonhosted.org/packages/94/bc/f42d71125f19731ea435c3948cad148d31a64fccde3867e5ba4edee901f9/watchfiles-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:35c53bd62a0b885bf653ebf6b700d1bf05debb78ad9292cf2a942b23513dc4c4", size = 287598, upload-time = "2025-10-14T15:04:44.516Z" },
{ url = "https://files.pythonhosted.org/packages/57/c9/a30f897351f95bbbfb6abcadafbaca711ce1162f4db95fc908c98a9165f3/watchfiles-1.1.1-cp311-cp311-win_arm64.whl", hash = "sha256:57ca5281a8b5e27593cb7d82c2ac927ad88a96ed406aa446f6344e4328208e9e", size = 277210, upload-time = "2025-10-14T15:04:45.883Z" },
{ url = "https://files.pythonhosted.org/packages/74/d5/f039e7e3c639d9b1d09b07ea412a6806d38123f0508e5f9b48a87b0a76cc/watchfiles-1.1.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8c89f9f2f740a6b7dcc753140dd5e1ab9215966f7a3530d0c0705c83b401bd7d", size = 404745, upload-time = "2025-10-14T15:04:46.731Z" },
{ url = "https://files.pythonhosted.org/packages/a5/96/a881a13aa1349827490dab2d363c8039527060cfcc2c92cc6d13d1b1049e/watchfiles-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bd404be08018c37350f0d6e34676bd1e2889990117a2b90070b3007f172d0610", size = 391769, upload-time = "2025-10-14T15:04:48.003Z" },
{ url = "https://files.pythonhosted.org/packages/4b/5b/d3b460364aeb8da471c1989238ea0e56bec24b6042a68046adf3d9ddb01c/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8526e8f916bb5b9a0a777c8317c23ce65de259422bba5b31325a6fa6029d33af", size = 449374, upload-time = "2025-10-14T15:04:49.179Z" },
{ url = "https://files.pythonhosted.org/packages/b9/44/5769cb62d4ed055cb17417c0a109a92f007114a4e07f30812a73a4efdb11/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2edc3553362b1c38d9f06242416a5d8e9fe235c204a4072e988ce2e5bb1f69f6", size = 459485, upload-time = "2025-10-14T15:04:50.155Z" },
{ url = "https://files.pythonhosted.org/packages/19/0c/286b6301ded2eccd4ffd0041a1b726afda999926cf720aab63adb68a1e36/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30f7da3fb3f2844259cba4720c3fc7138eb0f7b659c38f3bfa65084c7fc7abce", size = 488813, upload-time = "2025-10-14T15:04:51.059Z" },
{ url = "https://files.pythonhosted.org/packages/c7/2b/8530ed41112dd4a22f4dcfdb5ccf6a1baad1ff6eed8dc5a5f09e7e8c41c7/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8979280bdafff686ba5e4d8f97840f929a87ed9cdf133cbbd42f7766774d2aa", size = 594816, upload-time = "2025-10-14T15:04:52.031Z" },
{ url = "https://files.pythonhosted.org/packages/ce/d2/f5f9fb49489f184f18470d4f99f4e862a4b3e9ac2865688eb2099e3d837a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dcc5c24523771db3a294c77d94771abcfcb82a0e0ee8efd910c37c59ec1b31bb", size = 475186, upload-time = "2025-10-14T15:04:53.064Z" },
{ url = "https://files.pythonhosted.org/packages/cf/68/5707da262a119fb06fbe214d82dd1fe4a6f4af32d2d14de368d0349eb52a/watchfiles-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db5d7ae38ff20153d542460752ff397fcf5c96090c1230803713cf3147a6803", size = 456812, upload-time = "2025-10-14T15:04:55.174Z" },
{ url = "https://files.pythonhosted.org/packages/66/ab/3cbb8756323e8f9b6f9acb9ef4ec26d42b2109bce830cc1f3468df20511d/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:28475ddbde92df1874b6c5c8aaeb24ad5be47a11f87cde5a28ef3835932e3e94", size = 630196, upload-time = "2025-10-14T15:04:56.22Z" },
{ url = "https://files.pythonhosted.org/packages/78/46/7152ec29b8335f80167928944a94955015a345440f524d2dfe63fc2f437b/watchfiles-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:36193ed342f5b9842edd3532729a2ad55c4160ffcfa3700e0d54be496b70dd43", size = 622657, upload-time = "2025-10-14T15:04:57.521Z" },
{ url = "https://files.pythonhosted.org/packages/0a/bf/95895e78dd75efe9a7f31733607f384b42eb5feb54bd2eb6ed57cc2e94f4/watchfiles-1.1.1-cp312-cp312-win32.whl", hash = "sha256:859e43a1951717cc8de7f4c77674a6d389b106361585951d9e69572823f311d9", size = 272042, upload-time = "2025-10-14T15:04:59.046Z" },
{ url = "https://files.pythonhosted.org/packages/87/0a/90eb755f568de2688cb220171c4191df932232c20946966c27a59c400850/watchfiles-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:91d4c9a823a8c987cce8fa2690923b069966dabb196dd8d137ea2cede885fde9", size = 288410, upload-time = "2025-10-14T15:05:00.081Z" },
{ url = "https://files.pythonhosted.org/packages/36/76/f322701530586922fbd6723c4f91ace21364924822a8772c549483abed13/watchfiles-1.1.1-cp312-cp312-win_arm64.whl", hash = "sha256:a625815d4a2bdca61953dbba5a39d60164451ef34c88d751f6c368c3ea73d404", size = 278209, upload-time = "2025-10-14T15:05:01.168Z" },
{ url = "https://files.pythonhosted.org/packages/bb/f4/f750b29225fe77139f7ae5de89d4949f5a99f934c65a1f1c0b248f26f747/watchfiles-1.1.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:130e4876309e8686a5e37dba7d5e9bc77e6ed908266996ca26572437a5271e18", size = 404321, upload-time = "2025-10-14T15:05:02.063Z" },
{ url = "https://files.pythonhosted.org/packages/2b/f9/f07a295cde762644aa4c4bb0f88921d2d141af45e735b965fb2e87858328/watchfiles-1.1.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5f3bde70f157f84ece3765b42b4a52c6ac1a50334903c6eaf765362f6ccca88a", size = 391783, upload-time = "2025-10-14T15:05:03.052Z" },
{ url = "https://files.pythonhosted.org/packages/bc/11/fc2502457e0bea39a5c958d86d2cb69e407a4d00b85735ca724bfa6e0d1a/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e0b1fe858430fc0251737ef3824c54027bedb8c37c38114488b8e131cf8219", size = 449279, upload-time = "2025-10-14T15:05:04.004Z" },
{ url = "https://files.pythonhosted.org/packages/e3/1f/d66bc15ea0b728df3ed96a539c777acfcad0eb78555ad9efcaa1274688f0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f27db948078f3823a6bb3b465180db8ebecf26dd5dae6f6180bd87383b6b4428", size = 459405, upload-time = "2025-10-14T15:05:04.942Z" },
{ url = "https://files.pythonhosted.org/packages/be/90/9f4a65c0aec3ccf032703e6db02d89a157462fbb2cf20dd415128251cac0/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059098c3a429f62fc98e8ec62b982230ef2c8df68c79e826e37b895bc359a9c0", size = 488976, upload-time = "2025-10-14T15:05:05.905Z" },
{ url = "https://files.pythonhosted.org/packages/37/57/ee347af605d867f712be7029bb94c8c071732a4b44792e3176fa3c612d39/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfb5862016acc9b869bb57284e6cb35fdf8e22fe59f7548858e2f971d045f150", size = 595506, upload-time = "2025-10-14T15:05:06.906Z" },
{ url = "https://files.pythonhosted.org/packages/a8/78/cc5ab0b86c122047f75e8fc471c67a04dee395daf847d3e59381996c8707/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:319b27255aacd9923b8a276bb14d21a5f7ff82564c744235fc5eae58d95422ae", size = 474936, upload-time = "2025-10-14T15:05:07.906Z" },
{ url = "https://files.pythonhosted.org/packages/62/da/def65b170a3815af7bd40a3e7010bf6ab53089ef1b75d05dd5385b87cf08/watchfiles-1.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c755367e51db90e75b19454b680903631d41f9e3607fbd941d296a020c2d752d", size = 456147, upload-time = "2025-10-14T15:05:09.138Z" },
{ url = "https://files.pythonhosted.org/packages/57/99/da6573ba71166e82d288d4df0839128004c67d2778d3b566c138695f5c0b/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c22c776292a23bfc7237a98f791b9ad3144b02116ff10d820829ce62dff46d0b", size = 630007, upload-time = "2025-10-14T15:05:10.117Z" },
{ url = "https://files.pythonhosted.org/packages/a8/51/7439c4dd39511368849eb1e53279cd3454b4a4dbace80bab88feeb83c6b5/watchfiles-1.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:3a476189be23c3686bc2f4321dd501cb329c0a0469e77b7b534ee10129ae6374", size = 622280, upload-time = "2025-10-14T15:05:11.146Z" },
{ url = "https://files.pythonhosted.org/packages/95/9c/8ed97d4bba5db6fdcdb2b298d3898f2dd5c20f6b73aee04eabe56c59677e/watchfiles-1.1.1-cp313-cp313-win32.whl", hash = "sha256:bf0a91bfb5574a2f7fc223cf95eeea79abfefa404bf1ea5e339c0c1560ae99a0", size = 272056, upload-time = "2025-10-14T15:05:12.156Z" },
{ url = "https://files.pythonhosted.org/packages/1f/f3/c14e28429f744a260d8ceae18bf58c1d5fa56b50d006a7a9f80e1882cb0d/watchfiles-1.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:52e06553899e11e8074503c8e716d574adeeb7e68913115c4b3653c53f9bae42", size = 288162, upload-time = "2025-10-14T15:05:13.208Z" },
{ url = "https://files.pythonhosted.org/packages/dc/61/fe0e56c40d5cd29523e398d31153218718c5786b5e636d9ae8ae79453d27/watchfiles-1.1.1-cp313-cp313-win_arm64.whl", hash = "sha256:ac3cc5759570cd02662b15fbcd9d917f7ecd47efe0d6b40474eafd246f91ea18", size = 277909, upload-time = "2025-10-14T15:05:14.49Z" },
{ url = "https://files.pythonhosted.org/packages/79/42/e0a7d749626f1e28c7108a99fb9bf524b501bbbeb9b261ceecde644d5a07/watchfiles-1.1.1-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:563b116874a9a7ce6f96f87cd0b94f7faf92d08d0021e837796f0a14318ef8da", size = 403389, upload-time = "2025-10-14T15:05:15.777Z" },
{ url = "https://files.pythonhosted.org/packages/15/49/08732f90ce0fbbc13913f9f215c689cfc9ced345fb1bcd8829a50007cc8d/watchfiles-1.1.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3ad9fe1dae4ab4212d8c91e80b832425e24f421703b5a42ef2e4a1e215aff051", size = 389964, upload-time = "2025-10-14T15:05:16.85Z" },
{ url = "https://files.pythonhosted.org/packages/27/0d/7c315d4bd5f2538910491a0393c56bf70d333d51bc5b34bee8e68e8cea19/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce70f96a46b894b36eba678f153f052967a0d06d5b5a19b336ab0dbbd029f73e", size = 448114, upload-time = "2025-10-14T15:05:17.876Z" },
{ url = "https://files.pythonhosted.org/packages/c3/24/9e096de47a4d11bc4df41e9d1e61776393eac4cb6eb11b3e23315b78b2cc/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cb467c999c2eff23a6417e58d75e5828716f42ed8289fe6b77a7e5a91036ca70", size = 460264, upload-time = "2025-10-14T15:05:18.962Z" },
{ url = "https://files.pythonhosted.org/packages/cc/0f/e8dea6375f1d3ba5fcb0b3583e2b493e77379834c74fd5a22d66d85d6540/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:836398932192dae4146c8f6f737d74baeac8b70ce14831a239bdb1ca882fc261", size = 487877, upload-time = "2025-10-14T15:05:20.094Z" },
{ url = "https://files.pythonhosted.org/packages/ac/5b/df24cfc6424a12deb41503b64d42fbea6b8cb357ec62ca84a5a3476f654a/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:743185e7372b7bc7c389e1badcc606931a827112fbbd37f14c537320fca08620", size = 595176, upload-time = "2025-10-14T15:05:21.134Z" },
{ url = "https://files.pythonhosted.org/packages/8f/b5/853b6757f7347de4e9b37e8cc3289283fb983cba1ab4d2d7144694871d9c/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:afaeff7696e0ad9f02cbb8f56365ff4686ab205fcf9c4c5b6fdfaaa16549dd04", size = 473577, upload-time = "2025-10-14T15:05:22.306Z" },
{ url = "https://files.pythonhosted.org/packages/e1/f7/0a4467be0a56e80447c8529c9fce5b38eab4f513cb3d9bf82e7392a5696b/watchfiles-1.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7eb7da0eb23aa2ba036d4f616d46906013a68caf61b7fdbe42fc8b25132e77", size = 455425, upload-time = "2025-10-14T15:05:23.348Z" },
{ url = "https://files.pythonhosted.org/packages/8e/e0/82583485ea00137ddf69bc84a2db88bd92ab4a6e3c405e5fb878ead8d0e7/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:831a62658609f0e5c64178211c942ace999517f5770fe9436be4c2faeba0c0ef", size = 628826, upload-time = "2025-10-14T15:05:24.398Z" },
{ url = "https://files.pythonhosted.org/packages/28/9a/a785356fccf9fae84c0cc90570f11702ae9571036fb25932f1242c82191c/watchfiles-1.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:f9a2ae5c91cecc9edd47e041a930490c31c3afb1f5e6d71de3dc671bfaca02bf", size = 622208, upload-time = "2025-10-14T15:05:25.45Z" },
{ url = "https://files.pythonhosted.org/packages/c3/f4/0872229324ef69b2c3edec35e84bd57a1289e7d3fe74588048ed8947a323/watchfiles-1.1.1-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:d1715143123baeeaeadec0528bb7441103979a1d5f6fd0e1f915383fea7ea6d5", size = 404315, upload-time = "2025-10-14T15:05:26.501Z" },
{ url = "https://files.pythonhosted.org/packages/7b/22/16d5331eaed1cb107b873f6ae1b69e9ced582fcf0c59a50cd84f403b1c32/watchfiles-1.1.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:39574d6370c4579d7f5d0ad940ce5b20db0e4117444e39b6d8f99db5676c52fd", size = 390869, upload-time = "2025-10-14T15:05:27.649Z" },
{ url = "https://files.pythonhosted.org/packages/b2/7e/5643bfff5acb6539b18483128fdc0ef2cccc94a5b8fbda130c823e8ed636/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7365b92c2e69ee952902e8f70f3ba6360d0d596d9299d55d7d386df84b6941fb", size = 449919, upload-time = "2025-10-14T15:05:28.701Z" },
{ url = "https://files.pythonhosted.org/packages/51/2e/c410993ba5025a9f9357c376f48976ef0e1b1aefb73b97a5ae01a5972755/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bfff9740c69c0e4ed32416f013f3c45e2ae42ccedd1167ef2d805c000b6c71a5", size = 460845, upload-time = "2025-10-14T15:05:30.064Z" },
{ url = "https://files.pythonhosted.org/packages/8e/a4/2df3b404469122e8680f0fcd06079317e48db58a2da2950fb45020947734/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b27cf2eb1dda37b2089e3907d8ea92922b673c0c427886d4edc6b94d8dfe5db3", size = 489027, upload-time = "2025-10-14T15:05:31.064Z" },
{ url = "https://files.pythonhosted.org/packages/ea/84/4587ba5b1f267167ee715b7f66e6382cca6938e0a4b870adad93e44747e6/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:526e86aced14a65a5b0ec50827c745597c782ff46b571dbfe46192ab9e0b3c33", size = 595615, upload-time = "2025-10-14T15:05:32.074Z" },
{ url = "https://files.pythonhosted.org/packages/6a/0f/c6988c91d06e93cd0bb3d4a808bcf32375ca1904609835c3031799e3ecae/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04e78dd0b6352db95507fd8cb46f39d185cf8c74e4cf1e4fbad1d3df96faf510", size = 474836, upload-time = "2025-10-14T15:05:33.209Z" },
{ url = "https://files.pythonhosted.org/packages/b4/36/ded8aebea91919485b7bbabbd14f5f359326cb5ec218cd67074d1e426d74/watchfiles-1.1.1-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c85794a4cfa094714fb9c08d4a218375b2b95b8ed1666e8677c349906246c05", size = 455099, upload-time = "2025-10-14T15:05:34.189Z" },
{ url = "https://files.pythonhosted.org/packages/98/e0/8c9bdba88af756a2fce230dd365fab2baf927ba42cd47521ee7498fd5211/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:74d5012b7630714b66be7b7b7a78855ef7ad58e8650c73afc4c076a1f480a8d6", size = 630626, upload-time = "2025-10-14T15:05:35.216Z" },
{ url = "https://files.pythonhosted.org/packages/2a/84/a95db05354bf2d19e438520d92a8ca475e578c647f78f53197f5a2f17aaf/watchfiles-1.1.1-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:8fbe85cb3201c7d380d3d0b90e63d520f15d6afe217165d7f98c9c649654db81", size = 622519, upload-time = "2025-10-14T15:05:36.259Z" },
{ url = "https://files.pythonhosted.org/packages/1d/ce/d8acdc8de545de995c339be67711e474c77d643555a9bb74a9334252bd55/watchfiles-1.1.1-cp314-cp314-win32.whl", hash = "sha256:3fa0b59c92278b5a7800d3ee7733da9d096d4aabcfabb9a928918bd276ef9b9b", size = 272078, upload-time = "2025-10-14T15:05:37.63Z" },
{ url = "https://files.pythonhosted.org/packages/c4/c9/a74487f72d0451524be827e8edec251da0cc1fcf111646a511ae752e1a3d/watchfiles-1.1.1-cp314-cp314-win_amd64.whl", hash = "sha256:c2047d0b6cea13b3316bdbafbfa0c4228ae593d995030fda39089d36e64fc03a", size = 287664, upload-time = "2025-10-14T15:05:38.95Z" },
{ url = "https://files.pythonhosted.org/packages/df/b8/8ac000702cdd496cdce998c6f4ee0ca1f15977bba51bdf07d872ebdfc34c/watchfiles-1.1.1-cp314-cp314-win_arm64.whl", hash = "sha256:842178b126593addc05acf6fce960d28bc5fae7afbaa2c6c1b3a7b9460e5be02", size = 277154, upload-time = "2025-10-14T15:05:39.954Z" },
{ url = "https://files.pythonhosted.org/packages/47/a8/e3af2184707c29f0f14b1963c0aace6529f9d1b8582d5b99f31bbf42f59e/watchfiles-1.1.1-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:88863fbbc1a7312972f1c511f202eb30866370ebb8493aef2812b9ff28156a21", size = 403820, upload-time = "2025-10-14T15:05:40.932Z" },
{ url = "https://files.pythonhosted.org/packages/c0/ec/e47e307c2f4bd75f9f9e8afbe3876679b18e1bcec449beca132a1c5ffb2d/watchfiles-1.1.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:55c7475190662e202c08c6c0f4d9e345a29367438cf8e8037f3155e10a88d5a5", size = 390510, upload-time = "2025-10-14T15:05:41.945Z" },
{ url = "https://files.pythonhosted.org/packages/d5/a0/ad235642118090f66e7b2f18fd5c42082418404a79205cdfca50b6309c13/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f53fa183d53a1d7a8852277c92b967ae99c2d4dcee2bfacff8868e6e30b15f7", size = 448408, upload-time = "2025-10-14T15:05:43.385Z" },
{ url = "https://files.pythonhosted.org/packages/df/85/97fa10fd5ff3332ae17e7e40e20784e419e28521549780869f1413742e9d/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6aae418a8b323732fa89721d86f39ec8f092fc2af67f4217a2b07fd3e93c6101", size = 458968, upload-time = "2025-10-14T15:05:44.404Z" },
{ url = "https://files.pythonhosted.org/packages/47/c2/9059c2e8966ea5ce678166617a7f75ecba6164375f3b288e50a40dc6d489/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f096076119da54a6080e8920cbdaac3dbee667eb91dcc5e5b78840b87415bd44", size = 488096, upload-time = "2025-10-14T15:05:45.398Z" },
{ url = "https://files.pythonhosted.org/packages/94/44/d90a9ec8ac309bc26db808a13e7bfc0e4e78b6fc051078a554e132e80160/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00485f441d183717038ed2e887a7c868154f216877653121068107b227a2f64c", size = 596040, upload-time = "2025-10-14T15:05:46.502Z" },
{ url = "https://files.pythonhosted.org/packages/95/68/4e3479b20ca305cfc561db3ed207a8a1c745ee32bf24f2026a129d0ddb6e/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a55f3e9e493158d7bfdb60a1165035f1cf7d320914e7b7ea83fe22c6023b58fc", size = 473847, upload-time = "2025-10-14T15:05:47.484Z" },
{ url = "https://files.pythonhosted.org/packages/4f/55/2af26693fd15165c4ff7857e38330e1b61ab8c37d15dc79118cdba115b7a/watchfiles-1.1.1-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c91ed27800188c2ae96d16e3149f199d62f86c7af5f5f4d2c61a3ed8cd3666c", size = 455072, upload-time = "2025-10-14T15:05:48.928Z" },
{ url = "https://files.pythonhosted.org/packages/66/1d/d0d200b10c9311ec25d2273f8aad8c3ef7cc7ea11808022501811208a750/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:311ff15a0bae3714ffb603e6ba6dbfba4065ab60865d15a6ec544133bdb21099", size = 629104, upload-time = "2025-10-14T15:05:49.908Z" },
{ url = "https://files.pythonhosted.org/packages/e3/bd/fa9bb053192491b3867ba07d2343d9f2252e00811567d30ae8d0f78136fe/watchfiles-1.1.1-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:a916a2932da8f8ab582f242c065f5c81bed3462849ca79ee357dd9551b0e9b01", size = 622112, upload-time = "2025-10-14T15:05:50.941Z" },
{ url = "https://files.pythonhosted.org/packages/d3/8e/e500f8b0b77be4ff753ac94dc06b33d8f0d839377fee1b78e8c8d8f031bf/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:db476ab59b6765134de1d4fe96a1a9c96ddf091683599be0f26147ea1b2e4b88", size = 408250, upload-time = "2025-10-14T15:06:10.264Z" },
{ url = "https://files.pythonhosted.org/packages/bd/95/615e72cd27b85b61eec764a5ca51bd94d40b5adea5ff47567d9ebc4d275a/watchfiles-1.1.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:89eef07eee5e9d1fda06e38822ad167a044153457e6fd997f8a858ab7564a336", size = 396117, upload-time = "2025-10-14T15:06:11.28Z" },
{ url = "https://files.pythonhosted.org/packages/c9/81/e7fe958ce8a7fb5c73cc9fb07f5aeaf755e6aa72498c57d760af760c91f8/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ce19e06cbda693e9e7686358af9cd6f5d61312ab8b00488bc36f5aabbaf77e24", size = 450493, upload-time = "2025-10-14T15:06:12.321Z" },
{ url = "https://files.pythonhosted.org/packages/6e/d4/ed38dd3b1767193de971e694aa544356e63353c33a85d948166b5ff58b9e/watchfiles-1.1.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e6f39af2eab0118338902798b5aa6664f46ff66bc0280de76fca67a7f262a49", size = 457546, upload-time = "2025-10-14T15:06:13.372Z" },
]
[[package]]
name = "wcwidth"
version = "0.2.14"

View File

@@ -3,9 +3,9 @@ import "@opal/components/tooltip.css";
import {
Interactive,
type InteractiveBaseProps,
type InteractiveContainerHeightVariant,
type InteractiveContainerWidthVariant,
} from "@opal/core";
import type { SizeVariant } from "@opal/shared";
import type { TooltipSide } from "@opal/components";
import type { IconFunctionComponent } from "@opal/types";
import * as TooltipPrimitive from "@radix-ui/react-tooltip";
@@ -22,7 +22,7 @@ const iconVariants = {
function iconWrapper(
Icon: IconFunctionComponent | undefined,
size: InteractiveContainerHeightVariant,
size: SizeVariant,
includeSpacer: boolean
) {
const { padding: p, size: s } = iconVariants[size];
@@ -75,8 +75,11 @@ type ButtonContentProps =
type ButtonProps = InteractiveBaseProps &
ButtonContentProps & {
/** Size preset — controls gap, text size, and Container height/rounding. */
size?: InteractiveContainerHeightVariant;
/**
* Size preset — controls gap, text size, and Container height/rounding.
* Uses the shared `SizeVariant` scale from `@opal/shared`.
*/
size?: SizeVariant;
/** HTML button type. When provided, Container renders a `<button>` element. */
type?: "submit" | "button" | "reset";
@@ -89,6 +92,9 @@ type ButtonProps = InteractiveBaseProps &
/** Which side the tooltip appears on. */
tooltipSide?: TooltipSide;
/** Hide button text and right icon on small screens and show at `md` and above. */
responsiveHideText?: boolean;
};
// ---------------------------------------------------------------------------
@@ -105,6 +111,7 @@ function Button({
width,
tooltip,
tooltipSide = "top",
responsiveHideText = false,
...interactiveBaseProps
}: ButtonProps) {
const isLarge = size === "lg";
@@ -113,7 +120,8 @@ function Button({
<span
className={cn(
"opal-button-label",
isLarge ? "font-main-ui-body " : "font-secondary-body"
isLarge ? "font-main-ui-body " : "font-secondary-body",
responsiveHideText && "hidden md:inline"
)}
>
{children}
@@ -143,13 +151,25 @@ function Button({
<div className="opal-button-foldable">
<div className="opal-button-foldable-inner">
{labelEl}
{iconWrapper(RightIcon, size, !!children)}
{responsiveHideText ? (
<span className="hidden md:inline-flex">
{iconWrapper(RightIcon, size, !!children)}
</span>
) : (
iconWrapper(RightIcon, size, !!children)
)}
</div>
</div>
) : (
<>
{labelEl}
{iconWrapper(RightIcon, size, !!children)}
{responsiveHideText ? (
<span className="hidden md:inline-flex">
{iconWrapper(RightIcon, size, !!children)}
</span>
) : (
iconWrapper(RightIcon, size, !!children)
)}
</>
)}
</div>
@@ -157,7 +177,13 @@ function Button({
</Interactive.Base>
);
if (!tooltip) return button;
const resolvedTooltip =
tooltip ??
(foldable && interactiveBaseProps.disabled && children
? children
: undefined);
if (!resolvedTooltip) return button;
return (
<TooltipPrimitive.Root>
@@ -168,7 +194,7 @@ function Button({
side={tooltipSide}
sideOffset={4}
>
{tooltip}
{resolvedTooltip}
</TooltipPrimitive.Content>
</TooltipPrimitive.Portal>
</TooltipPrimitive.Root>

View File

@@ -4,7 +4,6 @@ export {
type InteractiveBaseProps,
type InteractiveBaseVariantProps,
type InteractiveContainerProps,
type InteractiveContainerHeightVariant,
type InteractiveContainerWidthVariant,
type InteractiveContainerRoundingVariant,
} from "@opal/core/interactive/components";

View File

@@ -3,6 +3,7 @@ import React from "react";
import { Slot } from "@radix-ui/react-slot";
import { cn } from "@opal/utils";
import type { WithoutStyles } from "@opal/types";
import { sizeVariants, type SizeVariant } from "@opal/shared";
// ---------------------------------------------------------------------------
// Types
@@ -38,31 +39,6 @@ type InteractiveBaseVariantProps =
selected?: never;
};
/**
* Height presets for `Interactive.Container`.
*
* - `"lg"` — 2.25rem (36px), suitable for most buttons/items
* - `"md"` — 1.75rem (28px), standard compact size
* - `"sm"` — 1.5rem (24px), for denser UIs
* - `"xs"` — 1.25rem (20px), for inline elements
* - `"2xs"` — 1rem (16px), for micro elements
* - `"fit"` — Shrink-wraps to content height (`h-fit`), for variable-height layouts
*/
type InteractiveContainerHeightVariant =
keyof typeof interactiveContainerSizeVariants;
const interactiveContainerSizeVariants = {
lg: { height: "h-[2.25rem]", minWidth: "min-w-[2.25rem]", padding: "p-2" },
md: { height: "h-[1.75rem]", minWidth: "min-w-[1.75rem]", padding: "p-1" },
sm: { height: "h-[1.5rem]", minWidth: "min-w-[1.5rem]", padding: "p-1" },
xs: {
height: "h-[1.25rem]",
minWidth: "min-w-[1.25rem]",
padding: "p-0.5",
},
"2xs": { height: "h-[1rem]", minWidth: "min-w-[1rem]", padding: "p-0.5" },
fit: { height: "h-fit", minWidth: "", padding: "p-0" },
} as const;
/**
* Width presets for `Interactive.Container`.
*
@@ -353,18 +329,13 @@ interface InteractiveContainerProps
roundingVariant?: InteractiveContainerRoundingVariant;
/**
* Height preset controlling the container's vertical size.
*
* - `"lg"` — 2.25rem (36px), typical button/item height
* - `"md"` — 1.75rem (28px), standard compact size
* - `"sm"` — 1.5rem (24px), for denser UIs
* - `"xs"` — 1.25rem (20px), for inline elements
* - `"2xs"` — 1rem (16px), for micro elements
* - `"fit"` — Shrink-wraps to content height (`h-fit`)
* Size preset controlling the container's height, min-width, and padding.
* Uses the shared `SizeVariant` scale from `@opal/shared`.
*
* @default "lg"
* @see {@link SizeVariant} for the full list of presets.
*/
heightVariant?: InteractiveContainerHeightVariant;
heightVariant?: SizeVariant;
/**
* Width preset controlling the container's horizontal size.
@@ -433,8 +404,7 @@ function InteractiveContainer({
target?: string;
rel?: string;
};
const { height, minWidth, padding } =
interactiveContainerSizeVariants[heightVariant];
const { height, minWidth, padding } = sizeVariants[heightVariant];
const sharedProps = {
...rest,
className: cn(
@@ -520,7 +490,6 @@ export {
type InteractiveBaseVariantProps,
type InteractiveBaseSelectVariantProps,
type InteractiveContainerProps,
type InteractiveContainerHeightVariant,
type InteractiveContainerWidthVariant,
type InteractiveContainerRoundingVariant,
};

View File

@@ -394,7 +394,7 @@
}
.interactive[data-interactive-base-variant="select"][data-disabled] {
@apply bg-transparent;
--interactive-foreground: var(--text-02);
--interactive-foreground: var(--text-01);
}
.interactive[data-interactive-base-variant="select"][data-selected="true"][data-disabled] {
--interactive-foreground: var(--action-link-03);

Some files were not shown because too many files have changed in this diff Show More