Compare commits

..

1 Commits

Author SHA1 Message Date
Jamison Lahman
6b198eb433 fix(chat): improve LLM_SOCKET_READ_TIMEOUT user experience 2026-04-21 11:32:15 -07:00
107 changed files with 1110 additions and 3681 deletions

View File

@@ -19,7 +19,6 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
iptables \
ipset \
iproute2 \
dnsmasq \
dnsutils \
unzip \
wget \

View File

@@ -1,6 +1,6 @@
{
"name": "Onyx Dev Sandbox",
"image": "onyxdotapp/onyx-devcontainer@sha256:9aedd9b6e127c7e23c57eb05bbe14f466dec3093d26bfdb82c3299b275211419",
"image": "onyxdotapp/onyx-devcontainer@sha256:4986c9252289b660ce772b45f0488b938fe425d8114245e96ef64b273b3fcee4",
"runArgs": [
"--cap-add=NET_ADMIN",
"--cap-add=NET_RAW",

View File

@@ -1,32 +0,0 @@
# Local resolver for the devcontainer. init-firewall.sh launches dnsmasq with
# this config and points /etc/resolv.conf at 127.0.0.1.
#
# The `ipset=` directives are the reason this exists: every A record dnsmasq
# returns for a listed domain is added to the `allowed-domains` ipset that
# iptables matches on. This keeps the allowlist correct as CDN IPs rotate
# (Fastly, Cloudflare, etc.) — the one-shot getent-at-boot approach in
# init-firewall.sh cannot do that.
no-resolv
no-hosts
# Forward upstream to Docker's embedded resolver.
server=127.0.0.11
# Only answer on loopback.
listen-address=127.0.0.1
bind-interfaces
cache-size=1000
# Domains whose resolved IPs should be added to the firewall allowlist.
# Keep in sync with ALLOWED_DOMAINS in init-firewall.sh.
ipset=/github.com/api.github.com/allowed-domains
ipset=/registry.npmjs.org/deb.nodesource.com/allowed-domains
ipset=/api.anthropic.com/api-staging.anthropic.com/files.anthropic.com/allowed-domains
ipset=/sentry.io/update.code.visualstudio.com/allowed-domains
ipset=/pypi.org/files.pythonhosted.org/allowed-domains
ipset=/go.dev/proxy.golang.org/sum.golang.org/allowed-domains
ipset=/storage.googleapis.com/dl.google.com/allowed-domains
ipset=/static.rust-lang.org/index.crates.io/static.crates.io/allowed-domains
ipset=/archive.ubuntu.com/security.ubuntu.com/allowed-domains

View File

@@ -121,16 +121,4 @@ if ! timeout 5 curl -s https://api.github.com/meta > /dev/null; then
echo "Warning: GitHub API is not accessible"
fi
# Start dnsmasq and point the container resolver at it. dnsmasq's ipset=
# directives add every resolved A record for allowlisted domains into the
# `allowed-domains` ipset at resolve time, keeping the firewall in step with
# CDN IP rotation.
pkill -x dnsmasq 2>/dev/null || true
dnsmasq -C /workspace/.devcontainer/dnsmasq.conf
cat > /etc/resolv.conf <<EOF
nameserver 127.0.0.1
options edns0 trust-ad
EOF
echo "Firewall setup complete"

View File

@@ -105,7 +105,7 @@ jobs:
- name: Upload build artifacts
if: always()
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
with:
name: desktop-build-${{ matrix.platform }}-${{ github.run_id }}
path: |

View File

@@ -184,7 +184,7 @@ jobs:
- name: Upload Docker logs
if: failure()
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
with:
name: docker-logs-${{ matrix.test-dir }}
path: docker-logs/

View File

@@ -423,9 +423,6 @@ jobs:
-e ENABLE_OPENSEARCH_INDEXING_FOR_ONYX=false \
-e REDIS_HOST=cache \
-e API_SERVER_HOST=api_server \
-e S3_ENDPOINT_URL=http://minio:9000 \
-e S3_AWS_ACCESS_KEY_ID=minioadmin \
-e S3_AWS_SECRET_ACCESS_KEY=minioadmin \
-e OPENAI_API_KEY=${OPENAI_API_KEY} \
-e EXA_API_KEY=${EXA_API_KEY} \
-e SLACK_BOT_TOKEN=${SLACK_BOT_TOKEN} \
@@ -471,7 +468,7 @@ jobs:
- name: Upload logs
if: always()
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
with:
name: docker-all-logs-${{ matrix.edition }}-${{ matrix.test-dir.name }}
path: ${{ github.workspace }}/docker-compose.log
@@ -592,7 +589,7 @@ jobs:
- name: Upload logs (onyx-lite)
if: always()
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
with:
name: docker-all-logs-onyx-lite
path: ${{ github.workspace }}/docker-compose-onyx-lite.log
@@ -730,7 +727,7 @@ jobs:
- name: Upload logs (multi-tenant)
if: always()
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
with:
name: docker-all-logs-multitenant
path: ${{ github.workspace }}/docker-compose-multitenant.log

View File

@@ -44,7 +44,7 @@ jobs:
- name: Upload coverage reports
if: always()
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
with:
name: jest-coverage-${{ github.run_id }}
path: ./web/coverage

View File

@@ -445,7 +445,7 @@ jobs:
run: |
npx playwright test --project ${PROJECT}
- uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
if: always()
with:
# Includes test results and trace.zip files
@@ -454,7 +454,7 @@ jobs:
retention-days: 30
- name: Upload screenshots
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
if: always()
with:
name: playwright-screenshots-${{ matrix.project }}-${{ github.run_id }}
@@ -534,7 +534,7 @@ jobs:
"s3://${PLAYWRIGHT_S3_BUCKET}/reports/pr-${PR_NUMBER}/${RUN_ID}/${PROJECT}/"
- name: Upload visual diff summary
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
if: always()
with:
name: screenshot-diff-summary-${{ matrix.project }}
@@ -543,7 +543,7 @@ jobs:
retention-days: 5
- name: Upload visual diff report artifact
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
if: always()
with:
name: screenshot-diff-report-${{ matrix.project }}-${{ github.run_id }}
@@ -590,7 +590,7 @@ jobs:
- name: Upload logs
if: success() || failure()
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
with:
name: docker-logs-${{ matrix.project }}-${{ github.run_id }}
path: ${{ github.workspace }}/docker-compose.log
@@ -674,7 +674,7 @@ jobs:
working-directory: ./web
run: npx playwright test --project lite
- uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a
- uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
if: always()
with:
name: playwright-test-results-lite-${{ github.run_id }}
@@ -692,7 +692,7 @@ jobs:
- name: Upload logs
if: success() || failure()
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
with:
name: docker-logs-lite-${{ github.run_id }}
path: ${{ github.workspace }}/docker-compose.log

View File

@@ -121,7 +121,7 @@ jobs:
- name: Upload logs
if: always()
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
with:
name: docker-all-logs
path: ${{ github.workspace }}/docker-compose.log

View File

@@ -319,7 +319,7 @@ jobs:
- name: Upload logs
if: always()
uses: actions/upload-artifact@043fb46d1a93c77aae656e7c1c64a875d1fc6a0a
uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f
with:
name: docker-all-logs-nightly-${{ matrix.provider }}-llm-provider
path: |

2
.gitignore vendored
View File

@@ -61,4 +61,4 @@ node_modules
plans/
# Added context for LLMs
.claude/CLAUDE.md
onyx-llm-context/

View File

@@ -1,33 +0,0 @@
"""remove multilingual_expansion from search_settings
Revision ID: a7c3e2b1d4f8
Revises: 856bcbe14d79
Create Date: 2026-04-16
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "a7c3e2b1d4f8"
down_revision = "856bcbe14d79"
branch_labels: None = None
depends_on: None = None
def upgrade() -> None:
op.drop_column("search_settings", "multilingual_expansion")
def downgrade() -> None:
op.add_column(
"search_settings",
sa.Column(
"multilingual_expansion",
postgresql.ARRAY(sa.String()),
nullable=False,
server_default="{}",
),
)

View File

@@ -1,79 +0,0 @@
"""add_tenant_invite_counter_table
Revision ID: d4e7a92c1b38
Revises: 3b9f09038764
Create Date: 2026-04-20 18:00:00.000000
Adds `public.tenant_invite_counter`, the lifetime invite-quota counter used by
the trial-tenant cap in `bulk_invite_users`. One row per tenant; holds a
monotonically-incremented total of invites ever reserved by that tenant.
Why we need it:
Trial tenants are capped at NUM_FREE_TRIAL_USER_INVITES per lifetime.
A counter derived from the mutable KV-backed invited-users list can be
reset by the remove-invited-user endpoint (each removal pops a KV
entry, lowering the effective count), allowing the cap to be bypassed
by looping invite → remove → invite. This table stores a counter that
is only ever incremented; no endpoint decrements it, so removals do
not free up quota.
How it works:
Each call to `bulk_invite_users` for a trial tenant runs a single atomic
UPSERT:
INSERT INTO public.tenant_invite_counter (tenant_id, total_invites_sent)
VALUES (:tid, :n)
ON CONFLICT (tenant_id) DO UPDATE
SET total_invites_sent = tenant_invite_counter.total_invites_sent + EXCLUDED.total_invites_sent,
updated_at = NOW()
RETURNING total_invites_sent;
The UPDATE takes a row-level lock on `tenant_id`, so concurrent bulk-
invite flows for the same tenant are serialized without an advisory
lock. If the returned total exceeds the cap the caller ROLLBACKs so the
reservation does not stick. Paid tenants skip this path entirely.
Deploy-time behavior:
The table ships empty. Trial tenants with pre-existing KV invited-users
entries are not seeded, so each one's counter starts at 0 and can
issue one additional full batch (up to NUM_FREE_TRIAL_USER_INVITES)
before the monotonic guard engages. Scope of the gap is bounded to
one batch per trial tenant and does not recur; backfill was
intentionally skipped to keep this migration pure-DDL.
"""
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "d4e7a92c1b38"
down_revision = "3b9f09038764"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"tenant_invite_counter",
sa.Column("tenant_id", sa.String(), nullable=False),
sa.Column(
"total_invites_sent",
sa.Integer(),
nullable=False,
server_default="0",
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.func.now(),
nullable=False,
),
sa.PrimaryKeyConstraint("tenant_id"),
schema="public",
)
def downgrade() -> None:
op.drop_table("tenant_invite_counter", schema="public")

View File

@@ -2,24 +2,21 @@
Two flows share this module:
1. Email/password signup — ``UserManager.create`` verifies the token
posted with the signup body.
2. Google OAuth signup — the frontend pre-verifies a token, the backend
sets a signed cookie, and ``CaptchaCookieMiddleware`` checks the
cookie on the ``/auth/oauth/callback`` redirect.
1. Email/password signup. The token is posted with the signup body and
verified inline by ``UserManager.create``.
Verification calls the reCAPTCHA Enterprise Assessment API so rejections
can key on ``riskAnalysis.reasons`` rather than a raw 0-1 score.
``issue_captcha_cookie_value`` / ``validate_captcha_cookie_value`` sign
the OAuth cookie.
2. Google OAuth signup. The OAuth callback request originates from Google
as a browser redirect, so we cannot attach a header or body field to it
at that moment. Instead the frontend verifies a reCAPTCHA token BEFORE
redirecting to Google and we set a signed HttpOnly cookie. The cookie
is sent automatically on the callback request, where middleware checks
it. ``issue_captcha_cookie_value`` / ``validate_captcha_cookie_value``
handle the HMAC signing + expiry.
"""
import hashlib
import hmac
import time
from datetime import datetime
from datetime import timezone
from enum import StrEnum
import httpx
from pydantic import BaseModel
@@ -27,87 +24,54 @@ from pydantic import Field
from onyx.configs.app_configs import CAPTCHA_COOKIE_TTL_SECONDS
from onyx.configs.app_configs import CAPTCHA_ENABLED
from onyx.configs.app_configs import RECAPTCHA_ENTERPRISE_API_KEY
from onyx.configs.app_configs import RECAPTCHA_ENTERPRISE_PROJECT_ID
from onyx.configs.app_configs import RECAPTCHA_HOSTNAME_ALLOWLIST
from onyx.configs.app_configs import RECAPTCHA_SCORE_THRESHOLD
from onyx.configs.app_configs import RECAPTCHA_SITE_KEY
from onyx.configs.app_configs import RECAPTCHA_SECRET_KEY
from onyx.configs.app_configs import USER_AUTH_SECRET
from onyx.redis.redis_pool import get_async_redis_connection
from onyx.utils.logger import setup_logger
logger = setup_logger()
RECAPTCHA_VERIFY_URL = "https://www.google.com/recaptcha/api/siteverify"
CAPTCHA_COOKIE_NAME = "onyx_captcha_verified"
# Enterprise Assessment reason enums defined by Google — not a
# per-deployment tuning knob.
_HARD_REJECT_REASONS: frozenset[str] = frozenset(
{"AUTOMATION", "UNEXPECTED_ENVIRONMENT", "TOO_MUCH_TRAFFIC"}
)
# Matches Google's own ~2 minute token validity window.
_TOKEN_MAX_AGE_SECONDS = 120
# Google v3 tokens expire server-side at ~2 minutes, so 120s is the max useful
# replay window — after that Google would reject the token anyway.
_REPLAY_CACHE_TTL_SECONDS = 120
_REPLAY_KEY_PREFIX = "captcha:replay:"
class CaptchaAction(StrEnum):
"""Distinct per-endpoint action names. Enforced against
``tokenProperties.action`` with strict equality so a token minted for
one endpoint cannot be replayed against another."""
SIGNUP = "signup"
LOGIN = "login"
OAUTH = "oauth"
class CaptchaVerificationError(Exception):
"""Raised when captcha verification fails."""
class _TokenProperties(BaseModel):
valid: bool = False
invalid_reason: str | None = Field(default=None, alias="invalidReason")
class RecaptchaResponse(BaseModel):
"""Response from Google reCAPTCHA verification API."""
success: bool
score: float | None = None # Only present for reCAPTCHA v3
action: str | None = None
challenge_ts: str | None = None
hostname: str | None = None
create_time: str | None = Field(default=None, alias="createTime")
class _RiskAnalysis(BaseModel):
score: float = 0.0
reasons: list[str] = Field(default_factory=list)
class RecaptchaAssessmentResponse(BaseModel):
name: str | None = None
token_properties: _TokenProperties = Field(
default_factory=_TokenProperties, alias="tokenProperties"
)
risk_analysis: _RiskAnalysis = Field(
default_factory=_RiskAnalysis, alias="riskAnalysis"
)
error_codes: list[str] | None = Field(default=None, alias="error-codes")
def is_captcha_enabled() -> bool:
return (
CAPTCHA_ENABLED
and bool(RECAPTCHA_ENTERPRISE_PROJECT_ID)
and bool(RECAPTCHA_ENTERPRISE_API_KEY)
and bool(RECAPTCHA_SITE_KEY)
)
"""Check if captcha verification is enabled."""
return CAPTCHA_ENABLED and bool(RECAPTCHA_SECRET_KEY)
def _replay_cache_key(token: str) -> str:
"""Avoid storing the raw token in Redis — hash it first."""
digest = hashlib.sha256(token.encode("utf-8")).hexdigest()
return f"{_REPLAY_KEY_PREFIX}{digest}"
async def _reserve_token_or_raise(token: str) -> None:
"""Claim a token fingerprint via ``SETNX``. A concurrent replay within
the TTL returns False → raise. Redis errors fail open so a blip does
not block legitimate signups."""
"""SETNX a token fingerprint. If another caller already claimed it within
the TTL, reject as a replay. Fails open on Redis errors — losing replay
protection is strictly better than hard-failing legitimate registrations
if Redis blips."""
try:
redis = await get_async_redis_connection()
claimed = await redis.set(
@@ -128,129 +92,139 @@ async def _reserve_token_or_raise(token: str) -> None:
async def _release_token(token: str) -> None:
"""Unclaim the reservation when the failure is OURS (transport, parse),
not Google's. Google-rejected tokens stay claimed — they are dead for
their whole TTL regardless."""
"""Unclaim a previously-reserved token so a retry with the same still-valid
token is not blocked. Called when WE fail (network error talking to
Google), not when Google rejects the token — Google rejections mean the
token is permanently invalid and must stay claimed."""
try:
redis = await get_async_redis_connection()
await redis.delete(_replay_cache_key(token))
except Exception as e:
# Worst case: the user must wait up to 120s before the TTL expires
# on its own and they can retry. Still strictly better than failing
# open on the reservation side.
logger.error(f"Captcha replay cache release error (ignored): {e}")
def _check_token_freshness(create_time: str | None) -> None:
if create_time is None:
raise CaptchaVerificationError(
"Captcha verification failed: missing createTime"
)
try:
ts = datetime.fromisoformat(create_time.replace("Z", "+00:00"))
except ValueError:
logger.warning(f"Captcha createTime unparseable: {create_time!r}")
raise CaptchaVerificationError(
"Captcha verification failed: malformed createTime"
)
age_seconds = (datetime.now(timezone.utc) - ts).total_seconds()
if age_seconds > _TOKEN_MAX_AGE_SECONDS:
logger.warning(f"Captcha token stale: age={age_seconds:.1f}s")
raise CaptchaVerificationError("Captcha verification failed: token expired")
def _evaluate_assessment(
result: RecaptchaAssessmentResponse, action: CaptchaAction
async def verify_captcha_token(
token: str,
expected_action: str = "signup",
) -> None:
tp = result.token_properties
ra = result.risk_analysis
"""
Verify a reCAPTCHA token with Google's API.
if not tp.valid:
reason = tp.invalid_reason or "INVALID"
logger.warning(f"Captcha token invalid: reason={reason}")
raise CaptchaVerificationError(f"Captcha verification failed: {reason}")
Args:
token: The reCAPTCHA response token from the client
expected_action: Expected action name for v3 verification
if RECAPTCHA_HOSTNAME_ALLOWLIST and (
tp.hostname is None or tp.hostname not in RECAPTCHA_HOSTNAME_ALLOWLIST
):
logger.warning(f"Captcha hostname mismatch: {tp.hostname!r}")
raise CaptchaVerificationError("Captcha verification failed: hostname mismatch")
_check_token_freshness(tp.create_time)
if tp.action != action.value:
logger.warning(
f"Captcha action mismatch: got={tp.action!r} expected={action.value!r}"
)
raise CaptchaVerificationError("Captcha verification failed: action mismatch")
hard = _HARD_REJECT_REASONS.intersection(ra.reasons)
if hard:
logger.warning(f"Captcha hard reject: reasons={sorted(hard)} score={ra.score}")
raise CaptchaVerificationError(
f"Captcha verification failed: {', '.join(sorted(hard))}"
)
if ra.score < RECAPTCHA_SCORE_THRESHOLD:
logger.warning(
f"Captcha score below threshold: {ra.score} < {RECAPTCHA_SCORE_THRESHOLD} reasons={ra.reasons}"
)
raise CaptchaVerificationError(
"Captcha verification failed: suspicious activity detected"
)
logger.info(
f"Captcha verification passed: action={tp.action} score={ra.score} reasons={ra.reasons} hostname={tp.hostname}"
)
async def verify_captcha_token(token: str, action: CaptchaAction) -> None:
"""Reject on any of: empty token, replay, invalid token, hostname
mismatch, stale createTime, action mismatch, hard-reject reason, or
score below threshold. No silent skip on null/empty fields."""
Raises:
CaptchaVerificationError: If verification fails
"""
if not is_captcha_enabled():
return
if not token:
raise CaptchaVerificationError("Captcha token is required")
# Claim before the Google round-trip so a concurrent replay of the
# same token is rejected without both callers hitting the API.
# Claim the token first so a concurrent replay of the same value cannot
# slip through the Google round-trip window. Done BEFORE calling Google
# because even a still-valid token should only redeem once.
await _reserve_token_or_raise(token)
try:
async with httpx.AsyncClient() as client:
response = await client.post(
(
f"https://recaptchaenterprise.googleapis.com/v1/projects/{RECAPTCHA_ENTERPRISE_PROJECT_ID}/assessments"
),
params={"key": RECAPTCHA_ENTERPRISE_API_KEY},
json={
"event": {
"token": token,
"siteKey": RECAPTCHA_SITE_KEY,
"expectedAction": action.value,
}
RECAPTCHA_VERIFY_URL,
data={
"secret": RECAPTCHA_SECRET_KEY,
"response": token,
},
timeout=10.0,
)
response.raise_for_status()
result = RecaptchaAssessmentResponse(**response.json())
_evaluate_assessment(result, action)
data = response.json()
result = RecaptchaResponse(**data)
if not result.success:
error_codes = result.error_codes or ["unknown-error"]
logger.warning(f"Captcha verification failed: {error_codes}")
raise CaptchaVerificationError(
f"Captcha verification failed: {', '.join(error_codes)}"
)
# Require v3 score. Google's public test secret returns no score
# — that path must not be active in prod since it skips the only
# human-vs-bot signal. A missing score here means captcha is
# misconfigured (test secret in prod, or a v2 response slipped in
# via an action mismatch).
if result.score is None:
logger.warning(
"Captcha verification failed: siteverify returned no score (likely test secret in prod)"
)
raise CaptchaVerificationError(
"Captcha verification failed: missing score"
)
if result.score < RECAPTCHA_SCORE_THRESHOLD:
logger.warning(
f"Captcha score too low: {result.score} < {RECAPTCHA_SCORE_THRESHOLD}"
)
raise CaptchaVerificationError(
"Captcha verification failed: suspicious activity detected"
)
if result.action and result.action != expected_action:
logger.warning(
f"Captcha action mismatch: {result.action} != {expected_action}"
)
raise CaptchaVerificationError(
"Captcha verification failed: action mismatch"
)
logger.debug(
f"Captcha verification passed: score={result.score}, action={result.action}"
)
except CaptchaVerificationError:
# Definitively-bad token (Google rejected it, score too low, action
# mismatch). Keep the reservation so the same token cannot be
# retried elsewhere during the TTL window.
raise
except Exception as e:
# Anything else — network failure, JSON decode error, Pydantic
# validation error on an unexpected siteverify response shape — is
# OUR inability to verify the token, not proof the token is bad.
# Release the reservation so the user can retry with the same
# still-valid token instead of being locked out for ~120s.
logger.error(f"Captcha verification failed unexpectedly: {e}")
await _release_token(token)
raise CaptchaVerificationError("Captcha verification service unavailable")
# ---------------------------------------------------------------------------
# OAuth pre-redirect cookie helpers
# ---------------------------------------------------------------------------
def _cookie_signing_key() -> bytes:
"""Derive a dedicated HMAC key from USER_AUTH_SECRET.
Using a separate derivation keeps the captcha cookie signature from
being interchangeable with any other token that reuses USER_AUTH_SECRET.
"""
return hashlib.sha256(
f"onyx-captcha-cookie-v1::{USER_AUTH_SECRET}".encode("utf-8")
).digest()
def issue_captcha_cookie_value(now: int | None = None) -> str:
"""Return ``<expiry_epoch>.<hex_hmac>`` proving a recent captcha challenge."""
"""Produce an opaque cookie value encoding 'verified until <expiry>'.
Format: ``<expiry_epoch>.<hex_hmac>``. The presence of a valid
unexpired signature proves the browser solved a captcha challenge
recently on this origin.
"""
issued_at = now if now is not None else int(time.time())
expiry = issued_at + CAPTCHA_COOKIE_TTL_SECONDS
sig = hmac.new(
@@ -260,6 +234,19 @@ def issue_captcha_cookie_value(now: int | None = None) -> str:
def validate_captcha_cookie_value(value: str | None) -> bool:
"""Return True if the cookie value has a valid unexpired signature.
The cookie is NOT a JWT — it's a minimal two-field format produced by
``issue_captcha_cookie_value``:
<expiry_epoch_seconds>.<hex_hmac_sha256>
We split on the first ``.``, parse the expiry as an integer, recompute
the HMAC over the expiry string using the key derived from
USER_AUTH_SECRET, and compare with ``hmac.compare_digest`` to avoid
timing leaks. No base64, no JSON, no claims — anything fancier would
be overkill for a short-lived "verified recently" cookie.
"""
if not value:
return False
parts = value.split(".", 1)

View File

@@ -1,77 +0,0 @@
"""Per-IP rate limit on email/password signup."""
import ipaddress
import time
from fastapi import Request
from onyx.configs.app_configs import SIGNUP_RATE_LIMIT_ENABLED
from onyx.error_handling.error_codes import OnyxErrorCode
from onyx.error_handling.exceptions import OnyxError
from onyx.redis.redis_pool import get_async_redis_connection
from onyx.utils.logger import setup_logger
from shared_configs.configs import MULTI_TENANT
logger = setup_logger()
_PER_IP_PER_HOUR = 5
_BUCKET_SECONDS = 3600
_REDIS_KEY_PREFIX = "signup_rate:"
def _is_usable_client_ip(ip_str: str) -> bool:
try:
ip = ipaddress.ip_address(ip_str)
except ValueError:
return False
return ip.is_global
def _client_ip(request: Request) -> str:
xff = request.headers.get("x-forwarded-for")
if xff:
parts = [p.strip() for p in xff.split(",") if p.strip()]
if parts and _is_usable_client_ip(parts[0]):
return parts[0]
return request.client.host if request.client else "unknown"
def _bucket_key(ip: str) -> str:
bucket = int(time.time() // _BUCKET_SECONDS)
return f"{_REDIS_KEY_PREFIX}{ip}:{bucket}"
async def enforce_signup_rate_limit(request: Request) -> None:
"""Raise OnyxError(RATE_LIMITED) when the client exceeds the signup cap."""
if not (MULTI_TENANT and SIGNUP_RATE_LIMIT_ENABLED):
return
ip = _client_ip(request)
key = _bucket_key(ip)
try:
redis = await get_async_redis_connection()
pipe = redis.pipeline()
pipe.incr(key)
pipe.expire(key, _BUCKET_SECONDS)
incr_result, _ = await pipe.execute()
count = int(incr_result)
except Exception as e:
logger.error(f"Signup rate-limit Redis error: {e}")
return
if count > _PER_IP_PER_HOUR:
logger.warning(f"Signup rate limit exceeded for ip={ip} count={count}")
raise OnyxError(
OnyxErrorCode.RATE_LIMITED,
"Too many signup attempts from this network. Please wait before trying again.",
)
__all__ = [
"enforce_signup_rate_limit",
"_PER_IP_PER_HOUR",
"_BUCKET_SECONDS",
"_client_ip",
"_bucket_key",
]

View File

@@ -82,7 +82,6 @@ from onyx.auth.pat import get_hashed_pat_from_request
from onyx.auth.schemas import AuthBackend
from onyx.auth.schemas import UserCreate
from onyx.auth.schemas import UserRole
from onyx.auth.signup_rate_limit import enforce_signup_rate_limit
from onyx.configs.app_configs import AUTH_BACKEND
from onyx.configs.app_configs import AUTH_COOKIE_EXPIRE_TIME_SECONDS
from onyx.configs.app_configs import AUTH_TYPE
@@ -399,11 +398,7 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
)
raise
if request is not None:
await enforce_signup_rate_limit(request)
# Verify captcha if enabled (for cloud signup protection)
from onyx.auth.captcha import CaptchaAction
from onyx.auth.captcha import CaptchaVerificationError
from onyx.auth.captcha import is_captcha_enabled
from onyx.auth.captcha import verify_captcha_token
@@ -419,7 +414,9 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
captcha_token = request.headers.get("X-Captcha-Token")
try:
await verify_captcha_token(captcha_token or "", CaptchaAction.SIGNUP)
await verify_captcha_token(
captcha_token or "", expected_action="signup"
)
except CaptchaVerificationError as e:
raise OnyxError(OnyxErrorCode.INVALID_INPUT, str(e))

View File

@@ -24,7 +24,6 @@ from onyx.background.indexing.job_client import SimpleJobClient
from onyx.background.indexing.job_client import SimpleJobException
from onyx.background.indexing.run_docfetching import run_docfetching_entrypoint
from onyx.configs.constants import CELERY_INDEXING_WATCHDOG_CONNECTOR_TIMEOUT
from onyx.configs.constants import CELERY_INDEXING_WATCHDOG_SIGTERM_GRACE_SECONDS
from onyx.configs.constants import OnyxCeleryTask
from onyx.connectors.exceptions import ConnectorValidationError
from onyx.db.connector_credential_pair import get_connector_credential_pair_from_id
@@ -520,10 +519,8 @@ def docfetching_proxy_task(
)
last_memory_emit_time = current_time
# if the IndexAttempt row has been marked terminal (failed/canceled/
# succeeded) by anyone else, the spawned subprocess is no longer doing
# work that anyone cares about. Kill it so the worker thread is freed
# up and a fresh attempt can be scheduled with a clean slate.
# if the spawned task is still running, restart the check once again
# if the index attempt is not in a finished status
try:
with get_session_with_current_tenant() as db_session:
index_attempt = get_index_attempt(
@@ -536,7 +533,6 @@ def docfetching_proxy_task(
if not index_attempt.is_finished():
continue
attempt_status = index_attempt.status
except Exception:
task_logger.exception(
log_builder.build(
@@ -545,30 +541,6 @@ def docfetching_proxy_task(
)
continue
task_logger.warning(
log_builder.build(
"Indexing watchdog - IndexAttempt reached terminal status while "
"subprocess was still running; terminating subprocess",
attempt_status=str(attempt_status.value),
pid=str(job.process.pid),
)
)
result.status = (
IndexingWatchdogTerminalStatus.TERMINATED_BY_ATTEMPT_FINALIZED
)
try:
job.terminate_and_wait(CELERY_INDEXING_WATCHDOG_SIGTERM_GRACE_SECONDS)
except Exception:
task_logger.exception(
log_builder.build(
"Indexing watchdog - exception while terminating subprocess "
"after attempt finalization"
)
)
if job.process is not None:
result.exit_code = job.process.exitcode
break
except Exception as e:
result.status = IndexingWatchdogTerminalStatus.WATCHDOG_EXCEPTIONED
if isinstance(e, ConnectorValidationError):
@@ -641,7 +613,7 @@ def docfetching_proxy_task(
)
)
job.terminate_and_wait(CELERY_INDEXING_WATCHDOG_SIGTERM_GRACE_SECONDS)
job.cancel()
elif result.status == IndexingWatchdogTerminalStatus.TERMINATED_BY_ACTIVITY_TIMEOUT:
try:
with get_session_with_current_tenant() as db_session:
@@ -658,16 +630,7 @@ def docfetching_proxy_task(
"Indexing watchdog - transient exception marking index attempt as failed"
)
)
job.terminate_and_wait(CELERY_INDEXING_WATCHDOG_SIGTERM_GRACE_SECONDS)
elif (
result.status == IndexingWatchdogTerminalStatus.TERMINATED_BY_ATTEMPT_FINALIZED
):
# the IndexAttempt row was already marked terminal by whoever finalized it
# (e.g. heartbeat watchdog marking it FAILED, user requesting cancellation,
# successful completion in the spawned process before we noticed). The
# subprocess has been killed in the watchdog loop above; no further DB
# writes are needed here.
pass
job.cancel()
else:
pass

View File

@@ -49,12 +49,6 @@ class IndexingWatchdogTerminalStatus(str, Enum):
# the watchdog terminated the task due to no activity
TERMINATED_BY_ACTIVITY_TIMEOUT = "terminated_by_activity_timeout"
# the watchdog terminated the task because the IndexAttempt reached a terminal
# status (failed/canceled/succeeded) outside of the spawned subprocess. We kill
# the subprocess so it can't keep doing work for an attempt that no longer exists
# logically. The DB row already reflects the real outcome, so we don't touch it.
TERMINATED_BY_ATTEMPT_FINALIZED = "terminated_by_attempt_finalized"
# NOTE: this may actually be the same as SIGKILL, but parsed differently by python
# consolidate once we know more
OUT_OF_MEMORY = "out_of_memory"

View File

@@ -15,7 +15,7 @@ from onyx.background.celery.tasks.shared.RetryDocumentIndex import RetryDocument
from onyx.configs.constants import ONYX_CELERY_BEAT_HEARTBEAT_KEY
from onyx.configs.constants import OnyxCeleryTask
from onyx.db.document import delete_document_by_connector_credential_pair__no_commit
from onyx.db.document import delete_documents_complete
from onyx.db.document import delete_documents_complete__no_commit
from onyx.db.document import fetch_chunk_count_for_document
from onyx.db.document import get_document
from onyx.db.document import get_document_connector_count
@@ -129,10 +129,11 @@ def document_by_cc_pair_cleanup_task(
document_id=document_id,
)
delete_documents_complete(
delete_documents_complete__no_commit(
db_session=db_session,
document_ids=[document_id],
)
db_session.commit()
completion_status = OnyxCeleryTaskCompletionStatus.SUCCEEDED
elif count > 1:

View File

@@ -125,36 +125,6 @@ class SimpleJob:
return True
return False
def terminate_and_wait(self, sigterm_grace_seconds: float) -> bool:
"""Best-effort hard-kill of the spawned process.
Sends SIGTERM, waits up to `sigterm_grace_seconds` for the process to exit,
then escalates to SIGKILL if the process is still alive. Joins after each
signal so the OS can reap the child. Returns True if the process was alive
when this was called (i.e. we actually had to do something).
"""
if self.process is None:
return False
if not self.process.is_alive():
return False
pid = self.process.pid
logger.warning(
f"SimpleJob.terminate_and_wait - sending SIGTERM to job: id={self.id} pid={pid}"
)
self.process.terminate()
self.process.join(timeout=sigterm_grace_seconds)
if self.process.is_alive():
logger.warning(
f"SimpleJob.terminate_and_wait - SIGTERM grace exceeded, sending SIGKILL: "
f"id={self.id} pid={pid} grace={sigterm_grace_seconds}s"
)
self.process.kill()
self.process.join()
return True
@property
def status(self) -> JobStatusType:
if not self.process:

View File

@@ -100,6 +100,7 @@ from onyx.llm.factory import get_llm_for_persona
from onyx.llm.factory import get_llm_token_counter
from onyx.llm.interfaces import LLM
from onyx.llm.interfaces import LLMUserIdentity
from onyx.llm.multi_llm import LLMTimeoutError
from onyx.llm.override_models import LLMOverride
from onyx.llm.request_context import reset_llm_mock_response
from onyx.llm.request_context import set_llm_mock_response
@@ -1277,6 +1278,32 @@ def _run_models(
else:
if item is _MODEL_DONE:
models_remaining -= 1
elif isinstance(item, LLMTimeoutError):
model_llm = setup.llms[model_idx]
error_msg = (
"The LLM took too long to respond. "
"If you're running a local model, try increasing the "
"LLM_SOCKET_READ_TIMEOUT environment variable "
"(current default: 120 seconds)."
)
stack_trace = "".join(
traceback.format_exception(type(item), item, item.__traceback__)
)
if model_llm.config.api_key and len(model_llm.config.api_key) > 2:
stack_trace = stack_trace.replace(
model_llm.config.api_key, "[REDACTED_API_KEY]"
)
yield StreamingError(
error=error_msg,
stack_trace=stack_trace,
error_code="CONNECTION_ERROR",
is_retryable=True,
details={
"model": model_llm.config.model_name,
"provider": model_llm.config.model_provider,
"model_index": model_idx,
},
)
elif isinstance(item, Exception):
# Yield a tagged error for this model but keep the other models running.
# Do NOT decrement models_remaining — _run_model's finally always posts

View File

@@ -775,7 +775,7 @@ LEAVE_CONNECTOR_ACTIVE_ON_INITIALIZATION_FAILURE = (
== "true"
)
DEFAULT_PRUNING_FREQ = 60 * 60 * 24 * 25 # 25 days
DEFAULT_PRUNING_FREQ = 60 * 60 * 24 # Once a day
ALLOW_SIMULTANEOUS_PRUNING = (
os.environ.get("ALLOW_SIMULTANEOUS_PRUNING", "").lower() == "true"
@@ -1155,20 +1155,12 @@ INTEGRATION_TESTS_MODE = os.environ.get("INTEGRATION_TESTS_MODE", "").lower() ==
# Enable captcha verification for new user registration
CAPTCHA_ENABLED = os.environ.get("CAPTCHA_ENABLED", "").lower() == "true"
RECAPTCHA_ENTERPRISE_PROJECT_ID = os.environ.get("RECAPTCHA_ENTERPRISE_PROJECT_ID", "")
RECAPTCHA_ENTERPRISE_API_KEY = os.environ.get("RECAPTCHA_ENTERPRISE_API_KEY", "")
RECAPTCHA_SITE_KEY = os.environ.get("RECAPTCHA_SITE_KEY", "")
RECAPTCHA_HOSTNAME_ALLOWLIST = frozenset(
h.strip()
for h in os.environ.get("RECAPTCHA_HOSTNAME_ALLOWLIST", "").split(",")
if h.strip()
)
RECAPTCHA_SCORE_THRESHOLD = float(os.environ.get("RECAPTCHA_SCORE_THRESHOLD", "0.5"))
# Google reCAPTCHA secret key (server-side validation)
RECAPTCHA_SECRET_KEY = os.environ.get("RECAPTCHA_SECRET_KEY", "")
# Opt-in per-IP rate limit on /auth/register.
SIGNUP_RATE_LIMIT_ENABLED = (
os.environ.get("SIGNUP_RATE_LIMIT_ENABLED", "").lower() == "true"
)
# Minimum score threshold for reCAPTCHA v3 (0.0-1.0, higher = more likely human)
# 0.5 is the recommended default
RECAPTCHA_SCORE_THRESHOLD = float(os.environ.get("RECAPTCHA_SCORE_THRESHOLD", "0.5"))
MOCK_CONNECTOR_FILE_PATH = os.environ.get("MOCK_CONNECTOR_FILE_PATH")

View File

@@ -138,11 +138,6 @@ CELERY_PRIMARY_WORKER_LOCK_TIMEOUT = 120
# to handle hung connectors
CELERY_INDEXING_WATCHDOG_CONNECTOR_TIMEOUT = 3 * 60 * 60 # 3 hours (in seconds)
# how long the indexing watchdog waits for a spawned subprocess to exit after
# SIGTERM before escalating to SIGKILL. Kept short because we only fall into this
# code path once we have already decided the process needs to die.
CELERY_INDEXING_WATCHDOG_SIGTERM_GRACE_SECONDS = 10 # seconds
# soft timeout for the lock taken by the indexing connector run
# allows the lock to eventually expire if the managing code around it dies
# if we can get callbacks as object bytes download, we could lower this a lot.
@@ -644,15 +639,12 @@ REDIS_SOCKET_KEEPALIVE_OPTIONS = {}
REDIS_SOCKET_KEEPALIVE_OPTIONS[socket.TCP_KEEPINTVL] = 15
REDIS_SOCKET_KEEPALIVE_OPTIONS[socket.TCP_KEEPCNT] = 3
# TCP_KEEPALIVE only exists on Darwin and TCP_KEEPIDLE only exists on Linux/BSD.
# getattr keeps both branches type-checkable on either platform; any per-line
# ty suppression (scoped or bare) would itself be flagged as unused on the
# platform where the attribute actually resolves, since ty analyzes one
# platform at a time and can't model cross-platform conditional unused-ignores.
if platform.system() == "Darwin":
REDIS_SOCKET_KEEPALIVE_OPTIONS[getattr(socket, "TCP_KEEPALIVE")] = 60
REDIS_SOCKET_KEEPALIVE_OPTIONS[
socket.TCP_KEEPALIVE # ty: ignore[unresolved-attribute]
] = 60
else:
REDIS_SOCKET_KEEPALIVE_OPTIONS[getattr(socket, "TCP_KEEPIDLE")] = 60
REDIS_SOCKET_KEEPALIVE_OPTIONS[socket.TCP_KEEPIDLE] = 60
class OnyxCallTypes(str, Enum):

View File

@@ -52,7 +52,6 @@ from onyx.db.utils import DocumentRow
from onyx.db.utils import model_to_dict
from onyx.db.utils import SortOrder
from onyx.document_index.interfaces import DocumentMetadata
from onyx.file_store.staging import delete_files_best_effort
from onyx.kg.models import KGStage
from onyx.server.documents.models import ConnectorCredentialPairIdentifier
from onyx.utils.logger import setup_logger
@@ -928,22 +927,6 @@ def delete_documents__no_commit(db_session: Session, document_ids: list[str]) ->
db_session.execute(delete(DbDocument).where(DbDocument.id.in_(document_ids)))
def get_file_ids_for_document_ids(
db_session: Session,
document_ids: list[str],
) -> list[str]:
"""Return the non-null `file_id` values attached to the given documents."""
if not document_ids:
return []
rows = (
db_session.query(DbDocument.file_id)
.filter(DbDocument.id.in_(document_ids))
.filter(DbDocument.file_id.isnot(None))
.all()
)
return [row.file_id for row in rows if row.file_id is not None]
def delete_documents_complete__no_commit(
db_session: Session, document_ids: list[str]
) -> None:
@@ -987,27 +970,6 @@ def delete_documents_complete__no_commit(
delete_documents__no_commit(db_session, document_ids)
def delete_documents_complete(
db_session: Session,
document_ids: list[str],
) -> None:
"""Fully remove documents AND best-effort delete their attached files.
To be used when a document is finished and should be disposed of.
Removes the row and the potentially associated file.
"""
file_ids_to_delete = get_file_ids_for_document_ids(
db_session=db_session,
document_ids=document_ids,
)
delete_documents_complete__no_commit(
db_session=db_session,
document_ids=document_ids,
)
db_session.commit()
delete_files_best_effort(file_ids_to_delete)
def delete_all_documents_for_connector_credential_pair(
db_session: Session,
connector_id: int,
@@ -1039,9 +1001,10 @@ def delete_all_documents_for_connector_credential_pair(
if not document_ids:
break
delete_documents_complete(
delete_documents_complete__no_commit(
db_session=db_session, document_ids=list(document_ids)
)
db_session.commit()
if time.monotonic() - start_time > timeout:
raise RuntimeError("Timeout reached while deleting documents")

View File

@@ -8,13 +8,14 @@ from sqlalchemy.orm import selectinload
from sqlalchemy.orm import Session
from onyx.configs.constants import FederatedConnectorSource
from onyx.configs.constants import MASK_CREDENTIAL_CHAR
from onyx.configs.constants import MASK_CREDENTIAL_LONG_RE
from onyx.db.engine.sql_engine import get_session_with_current_tenant
from onyx.db.models import DocumentSet
from onyx.db.models import FederatedConnector
from onyx.db.models import FederatedConnector__DocumentSet
from onyx.db.models import FederatedConnectorOAuthToken
from onyx.federated_connectors.factory import get_federated_connector
from onyx.utils.encryption import reject_masked_credentials
from onyx.utils.logger import setup_logger
logger = setup_logger()
@@ -46,6 +47,23 @@ def fetch_all_federated_connectors_parallel() -> list[FederatedConnector]:
return fetch_all_federated_connectors(db_session)
def _reject_masked_credentials(credentials: dict[str, Any]) -> None:
"""Raise if any credential string value contains mask placeholder characters.
mask_string() has two output formats:
- Short strings (< 14 chars): "••••••••••••" (U+2022 BULLET)
- Long strings (>= 14 chars): "abcd...wxyz" (first4 + "..." + last4)
Both must be rejected.
"""
for key, val in credentials.items():
if isinstance(val, str) and (
MASK_CREDENTIAL_CHAR in val or MASK_CREDENTIAL_LONG_RE.match(val)
):
raise ValueError(
f"Credential field '{key}' contains masked placeholder characters. Please provide the actual credential value."
)
def validate_federated_connector_credentials(
source: FederatedConnectorSource,
credentials: dict[str, Any],
@@ -67,7 +85,7 @@ def create_federated_connector(
config: dict[str, Any] | None = None,
) -> FederatedConnector:
"""Create a new federated connector with credential and config validation."""
reject_masked_credentials(credentials)
_reject_masked_credentials(credentials)
# Validate credentials before creating
if not validate_federated_connector_credentials(source, credentials):
@@ -280,7 +298,7 @@ def update_federated_connector(
)
if credentials is not None:
reject_masked_credentials(credentials)
_reject_masked_credentials(credentials)
# Validate credentials before updating
if not validate_federated_connector_credentials(

View File

@@ -2100,6 +2100,10 @@ class SearchSettings(Base):
String, nullable=True
)
multilingual_expansion: Mapped[list[str]] = mapped_column(
postgresql.ARRAY(String), default=[]
)
cloud_provider: Mapped["CloudEmbeddingProvider"] = relationship(
"CloudEmbeddingProvider",
back_populates="search_settings",
@@ -4589,25 +4593,6 @@ class TenantAnonymousUserPath(Base):
)
# Lifetime invite counter per tenant. Incremented atomically on every
# invite reservation; never decremented — removals do not free quota, so
# loops of invite → remove → invite cannot bypass the trial cap.
class TenantInviteCounter(Base):
__tablename__ = "tenant_invite_counter"
__table_args__ = {"schema": "public"}
tenant_id: Mapped[str] = mapped_column(String, primary_key=True)
total_invites_sent: Mapped[int] = mapped_column(
Integer, nullable=False, default=0, server_default="0"
)
updated_at: Mapped[datetime.datetime] = mapped_column(
DateTime(timezone=True),
server_default=func.now(),
onupdate=func.now(),
nullable=False,
)
class MCPServer(Base):
"""Model for storing MCP server configurations"""

View File

@@ -6,6 +6,7 @@ from sqlalchemy.orm import Session
from onyx.configs.model_configs import DEFAULT_DOCUMENT_ENCODER_MODEL
from onyx.configs.model_configs import DOCUMENT_ENCODER_MODEL
from onyx.context.search.models import SavedSearchSettings
from onyx.db.engine.sql_engine import get_session_with_current_tenant
from onyx.db.llm import fetch_embedding_provider
from onyx.db.models import CloudEmbeddingProvider
from onyx.db.models import IndexAttempt
@@ -176,6 +177,17 @@ def get_all_search_settings(db_session: Session) -> list[SearchSettings]:
return list(all_settings)
def get_multilingual_expansion(db_session: Session | None = None) -> list[str]:
if db_session is None:
with get_session_with_current_tenant() as db_session:
search_settings = get_current_search_settings(db_session)
else:
search_settings = get_current_search_settings(db_session)
if not search_settings:
return []
return search_settings.multilingual_expansion
def update_search_settings(
current_settings: SearchSettings,
updated_settings: SavedSearchSettings,

View File

@@ -1,62 +0,0 @@
from sqlalchemy import func
from sqlalchemy import update
from sqlalchemy.dialects.postgresql import insert as pg_insert
from sqlalchemy.orm import Session
from onyx.db.models import TenantInviteCounter
def reserve_trial_invites(
shared_session: Session,
tenant_id: str,
num_invites: int,
) -> int:
"""Atomically increment the tenant's invite counter by `num_invites`.
Returns the post-increment total. The caller is expected to compare
against the trial cap and rollback the session if the total exceeds
it — the UPSERT's UPDATE leg holds a row-level lock on `tenant_id`
for the duration of the transaction, serializing concurrent reservers
for the same tenant.
"""
stmt = pg_insert(TenantInviteCounter).values(
tenant_id=tenant_id,
total_invites_sent=num_invites,
)
stmt = stmt.on_conflict_do_update(
index_elements=[TenantInviteCounter.tenant_id],
set_={
"total_invites_sent": TenantInviteCounter.total_invites_sent
+ stmt.excluded.total_invites_sent,
"updated_at": func.now(),
},
).returning(TenantInviteCounter.total_invites_sent)
return int(shared_session.execute(stmt).scalar_one())
def release_trial_invites(
shared_session: Session,
tenant_id: str,
num_invites: int,
) -> None:
"""Compensating decrement of the counter by `num_invites`, clamped at 0.
Only called when a downstream step (KV write, billing register, etc.)
fails after the counter has already been incremented, so the counter
tracks invites that actually reached the system rather than merely
reserved. The counter is monotonic with respect to user actions — no
user-facing endpoint decrements it — but it is reconciled downward by
this function when the system fails mid-flow. No-op if the tenant has
no counter row.
"""
stmt = (
update(TenantInviteCounter)
.where(TenantInviteCounter.tenant_id == tenant_id)
.values(
total_invites_sent=func.greatest(
TenantInviteCounter.total_invites_sent - num_invites, 0
),
updated_at=func.now(),
)
)
shared_session.execute(stmt)

View File

@@ -15,7 +15,6 @@ from onyx.db.models import ChatSessionSharedStatus
from onyx.db.models import FileRecord
from onyx.db.models import Persona
from onyx.db.models import Project__UserFile
from onyx.db.models import ToolCall
from onyx.db.models import UserFile
@@ -138,9 +137,6 @@ def user_can_access_chat_file(file_id: str, user_id: UUID, db_session: Session)
- The `file_id` appears in a `ChatMessage.files` descriptor of a chat
session the user owns or a session publicly shared via
`ChatSessionSharedStatus.PUBLIC`.
- The `file_id` appears in a `ToolCall.generated_images` entry on a chat
session the user owns or a publicly shared session (image-generation
outputs are persisted there, not on `ChatMessage.files`).
"""
owns_user_file = db_session.query(
select(UserFile.id)
@@ -182,22 +178,7 @@ def user_can_access_chat_file(file_id: str, user_id: UUID, db_session: Session)
)
.limit(1)
)
if db_session.execute(chat_file_stmt).first() is not None:
return True
generated_image_stmt = (
select(ToolCall.id)
.join(ChatSession, ToolCall.chat_session_id == ChatSession.id)
.where(ToolCall.generated_images.op("@>")([{"file_id": file_id}]))
.where(
or_(
ChatSession.user_id == user_id,
ChatSession.shared_status == ChatSessionSharedStatus.PUBLIC,
)
)
.limit(1)
)
return db_session.execute(generated_image_stmt).first() is not None
return db_session.execute(chat_file_stmt).first() is not None
def get_file_ids_by_user_file_ids(

View File

@@ -68,7 +68,6 @@ class OnyxErrorCode(Enum):
# ------------------------------------------------------------------
RATE_LIMITED = ("RATE_LIMITED", 429)
SEAT_LIMIT_EXCEEDED = ("SEAT_LIMIT_EXCEEDED", 402)
TRIAL_INVITE_LIMIT_EXCEEDED = ("TRIAL_INVITE_LIMIT_EXCEEDED", 403)
# ------------------------------------------------------------------
# Payload (413)

View File

@@ -66,22 +66,6 @@ def build_raw_file_callback(
return _callback
def delete_files_best_effort(file_ids: list[str]) -> None:
"""Delete a list of files from the file store, logging individual
failures rather than raising.
"""
if not file_ids:
return
file_store = get_default_file_store()
for file_id in file_ids:
try:
file_store.delete_file(file_id, error_on_missing=False)
except Exception:
logger.exception(
f"Failed to delete file_id={file_id} during document cleanup"
)
def promote_staged_file(db_session: Session, file_id: str) -> None:
"""Mark a previously-staged file as `FileOrigin.CONNECTOR`."""
update_filerecord_origin(

View File

@@ -290,7 +290,11 @@ def litellm_exception_to_error_msg(
error_code = "BUDGET_EXCEEDED"
is_retryable = False
elif isinstance(core_exception, Timeout):
error_msg = "Request timed out: The operation took too long to complete. Please try again."
error_msg = (
"The LLM took too long to respond. "
"If you're running a local model, try increasing the "
"LLM_SOCKET_READ_TIMEOUT environment variable (current default: 120 seconds)."
)
error_code = "CONNECTION_ERROR"
is_retryable = True
elif isinstance(core_exception, APIError):
@@ -531,13 +535,11 @@ def llm_max_input_tokens(
)
return GEN_AI_MODEL_FALLBACK_MAX_TOKENS
max_input_tokens = model_obj.get("max_input_tokens")
if max_input_tokens is not None:
return max_input_tokens
if "max_input_tokens" in model_obj:
return model_obj["max_input_tokens"]
max_tokens = model_obj.get("max_tokens")
if max_tokens is not None:
return max_tokens
if "max_tokens" in model_obj:
return model_obj["max_tokens"]
logger.warning(
f"No max tokens found for '{model_name}'. Falling back to {GEN_AI_MODEL_FALLBACK_MAX_TOKENS} tokens."
@@ -563,14 +565,12 @@ def get_llm_max_output_tokens(
)
return default_output_tokens
max_output_tokens = model_obj.get("max_output_tokens")
if max_output_tokens is not None:
return max_output_tokens
if "max_output_tokens" in model_obj:
return model_obj["max_output_tokens"]
# Fallback to a fraction of max_tokens if max_output_tokens is not specified
max_tokens = model_obj.get("max_tokens")
if max_tokens is not None:
return int(max_tokens * 0.1)
if "max_tokens" in model_obj:
return int(model_obj["max_tokens"] * 0.1)
logger.warning(
f"No max output tokens found for '{model_name}'. Falling back to {default_output_tokens} output tokens."
@@ -667,12 +667,10 @@ def get_bedrock_token_limit(model_id: str) -> int:
for key in [f"bedrock/{model_id}", model_id]:
if key in model_map:
model_info = model_map[key]
max_input_tokens = model_info.get("max_input_tokens")
if max_input_tokens is not None:
return max_input_tokens
max_tokens = model_info.get("max_tokens")
if max_tokens is not None:
return max_tokens
if "max_input_tokens" in model_info:
return model_info["max_input_tokens"]
if "max_tokens" in model_info:
return model_info["max_tokens"]
except Exception:
pass # Fall through to mapping

View File

@@ -65,7 +65,6 @@ from onyx.file_store.file_store import get_default_file_store
from onyx.hooks.registry import validate_registry
from onyx.server.api_key.api import router as api_key_router
from onyx.server.auth.captcha_api import CaptchaCookieMiddleware
from onyx.server.auth.captcha_api import LoginCaptchaMiddleware
from onyx.server.auth.captcha_api import router as captcha_router
from onyx.server.auth_check import check_router_auth
from onyx.server.documents.cc_pair import router as cc_pair_router
@@ -663,7 +662,6 @@ def get_application(lifespan_override: Lifespan | None = None) -> FastAPI:
# before the Google redirect. No-op unless is_captcha_enabled() is true
# (requires CAPTCHA_ENABLED=true and RECAPTCHA_SECRET_KEY set).
application.add_middleware(CaptchaCookieMiddleware)
application.add_middleware(LoginCaptchaMiddleware)
if LOG_ENDPOINT_LATENCY:
add_latency_logging_middleware(application, logger)

View File

@@ -1,15 +1,15 @@
"""API + middleware for the reCAPTCHA cookie and header flows.
"""API + middleware for the reCAPTCHA pre-OAuth cookie flow.
Three entry points are gated:
The frontend solves a reCAPTCHA v3 challenge before clicking "Continue
with Google", POSTs the token to ``/auth/captcha/oauth-verify``, and the
backend verifies it with Google and sets a signed HttpOnly cookie. The
cookie rides along on the subsequent Google OAuth callback redirect,
where ``CaptchaCookieMiddleware`` checks it. Without this cookie flow
the OAuth callback is un-gated because Google (not our frontend) issues
the request and we cannot attach a header at the redirect hop.
1. ``/auth/oauth/callback`` — the frontend pre-verifies a token and gets
a signed HttpOnly cookie (``/auth/captcha/oauth-verify``) that rides
along on the Google redirect, where ``CaptchaCookieMiddleware``
checks it.
2. ``/auth/login`` — ``LoginCaptchaMiddleware`` verifies an
``X-Captcha-Token`` header before the fastapi-users handler runs.
3. ``/auth/register`` — captcha is enforced inside
``UserManager.create`` via the body's ``captcha_token`` field.
Email/password signup has its own captcha enforcement inside
``UserManager.create``, so this module only gates the OAuth callback.
"""
from fastapi import APIRouter
@@ -20,7 +20,6 @@ from starlette.middleware.base import BaseHTTPMiddleware
from starlette.middleware.base import RequestResponseEndpoint
from onyx.auth.captcha import CAPTCHA_COOKIE_NAME
from onyx.auth.captcha import CaptchaAction
from onyx.auth.captcha import CaptchaVerificationError
from onyx.auth.captcha import is_captcha_enabled
from onyx.auth.captcha import issue_captcha_cookie_value
@@ -67,7 +66,7 @@ async def verify_oauth_captcha(
return OAuthCaptchaVerifyResponse(ok=True)
try:
await verify_captcha_token(body.token, CaptchaAction.OAUTH)
await verify_captcha_token(body.token, expected_action="oauth")
except CaptchaVerificationError as exc:
raise OnyxError(OnyxErrorCode.UNAUTHORIZED, str(exc))
@@ -117,34 +116,3 @@ class CaptchaCookieMiddleware(BaseHTTPMiddleware):
if is_guarded_callback:
response.delete_cookie(CAPTCHA_COOKIE_NAME, path="/")
return response
GUARDED_LOGIN_PATHS = frozenset({"/auth/login"})
LOGIN_CAPTCHA_HEADER = "X-Captcha-Token"
class LoginCaptchaMiddleware(BaseHTTPMiddleware):
"""Reject ``/auth/login`` requests without a valid captcha token.
Enforced before the fastapi-users handler runs, so credential-stuffing
attempts cost the attacker a fresh captcha token per try. No-op when
``is_captcha_enabled()`` is false.
"""
async def dispatch(
self, request: Request, call_next: RequestResponseEndpoint
) -> Response:
if (
request.method == "POST"
and request.url.path in GUARDED_LOGIN_PATHS
and is_captcha_enabled()
):
token = request.headers.get(LOGIN_CAPTCHA_HEADER, "")
try:
await verify_captcha_token(token, CaptchaAction.LOGIN)
except CaptchaVerificationError as exc:
return onyx_error_to_json_response(
OnyxError(OnyxErrorCode.UNAUTHORIZED, str(exc))
)
return await call_next(request)

View File

@@ -82,7 +82,6 @@ from onyx.tools.tool_implementations.mcp.mcp_client import discover_mcp_tools
from onyx.tools.tool_implementations.mcp.mcp_client import initialize_mcp_client
from onyx.tools.tool_implementations.mcp.mcp_client import log_exception_group
from onyx.utils.encryption import mask_string
from onyx.utils.encryption import reject_masked_credentials
from onyx.utils.logger import setup_logger
logger = setup_logger()
@@ -97,65 +96,30 @@ def _truncate_description(description: str | None, max_length: int = 500) -> str
return description[: max_length - 3] + "..."
def _resolve_oauth_credentials(
*,
# TODO: Replace mask-comparison approach with an explicit Unset sentinel from the
# frontend indicating whether each credential field was actually modified. The current
# approach is brittle (e.g. short credentials produce a fixed-length mask that could
# collide) and mutates request values, which is surprising. The frontend should signal
# "unchanged" vs "new value" directly rather than relying on masked-string equality.
def _restore_masked_oauth_credentials(
request_client_id: str | None,
request_client_id_changed: bool,
request_client_secret: str | None,
request_client_secret_changed: bool,
existing_client: OAuthClientInformationFull | None,
existing_client: OAuthClientInformationFull,
) -> tuple[str | None, str | None]:
"""Pick the effective client_id / client_secret for an upsert/connect.
Mirrors the LLM-provider `api_key_changed` pattern: when the frontend
flags a field as unchanged, ignore whatever value it sent (it is most
likely a masked placeholder) and reuse the stored value. When the
frontend flags a field as changed, take the request value as-is, but
defensively reject masked placeholders so a buggy client can't write
a mask to the database.
"""
resolved_id = request_client_id
if not request_client_id_changed:
resolved_id = existing_client.client_id if existing_client else None
elif resolved_id:
reject_masked_credentials({"oauth_client_id": resolved_id})
resolved_secret = request_client_secret
if not request_client_secret_changed:
resolved_secret = existing_client.client_secret if existing_client else None
elif resolved_secret:
reject_masked_credentials({"oauth_client_secret": resolved_secret})
return resolved_id, resolved_secret
def _build_oauth_admin_config_data(
*,
client_id: str | None,
client_secret: str | None,
) -> MCPConnectionData:
"""Construct the admin connection config payload for an OAuth client.
A public client legitimately has no `client_secret`, so we only require
a `client_id` to seed `client_info`. When no client_id is available we
fall through to an empty config (the OAuth provider will rely on
Dynamic Client Registration to obtain credentials).
"""
config_data = MCPConnectionData(headers={})
if not client_id:
return config_data
token_endpoint_auth_method = "client_secret_post" if client_secret else "none"
client_info = OAuthClientInformationFull(
client_id=client_id,
client_secret=client_secret,
redirect_uris=[AnyUrl(f"{WEB_DOMAIN}/mcp/oauth/callback")],
grant_types=["authorization_code", "refresh_token"],
response_types=["code"],
scope=REQUESTED_SCOPE, # TODO(evan): allow specifying scopes?
token_endpoint_auth_method=token_endpoint_auth_method,
)
config_data[MCPOAuthKeys.CLIENT_INFO.value] = client_info.model_dump(mode="json")
return config_data
"""If the frontend sent back masked credentials, restore the real stored values."""
if (
request_client_id
and existing_client.client_id
and request_client_id == mask_string(existing_client.client_id)
):
request_client_id = existing_client.client_id
if (
request_client_secret
and existing_client.client_secret
and request_client_secret == mask_string(existing_client.client_secret)
):
request_client_secret = existing_client.client_secret
return request_client_id, request_client_secret
router = APIRouter(prefix="/mcp")
@@ -456,10 +420,8 @@ async def _connect_oauth(
detail=f"Server was configured with authentication type {auth_type_str}",
)
# Resolve the effective OAuth credentials, falling back to the stored
# values for any field the frontend marked as unchanged. This protects
# against the resubmit case where the form replays masked placeholders.
existing_client: OAuthClientInformationFull | None = None
# If the frontend sent back masked credentials (unchanged by the user),
# restore the real stored values so we don't overwrite them with masks.
if mcp_server.admin_connection_config:
existing_data = extract_connection_data(
mcp_server.admin_connection_config, apply_mask=False
@@ -469,19 +431,31 @@ async def _connect_oauth(
existing_client = OAuthClientInformationFull.model_validate(
existing_client_raw
)
(
request.oauth_client_id,
request.oauth_client_secret,
) = _restore_masked_oauth_credentials(
request.oauth_client_id,
request.oauth_client_secret,
existing_client,
)
request.oauth_client_id, request.oauth_client_secret = _resolve_oauth_credentials(
request_client_id=request.oauth_client_id,
request_client_id_changed=request.oauth_client_id_changed,
request_client_secret=request.oauth_client_secret,
request_client_secret_changed=request.oauth_client_secret_changed,
existing_client=existing_client,
)
config_data = _build_oauth_admin_config_data(
client_id=request.oauth_client_id,
client_secret=request.oauth_client_secret,
)
# Create admin config with client info if provided
config_data = MCPConnectionData(headers={})
if request.oauth_client_id and request.oauth_client_secret:
client_info = OAuthClientInformationFull(
client_id=request.oauth_client_id,
client_secret=request.oauth_client_secret,
redirect_uris=[AnyUrl(f"{WEB_DOMAIN}/mcp/oauth/callback")],
grant_types=["authorization_code", "refresh_token"],
response_types=["code"],
scope=REQUESTED_SCOPE, # TODO: allow specifying scopes?
# Must specify auth method so client_secret is actually sent during token exchange
token_endpoint_auth_method="client_secret_post",
)
config_data[MCPOAuthKeys.CLIENT_INFO.value] = client_info.model_dump(
mode="json"
)
if mcp_server.admin_connection_config_id is None:
if not is_admin:
@@ -1430,20 +1404,17 @@ def _upsert_mcp_server(
if client_info_raw:
client_info = OAuthClientInformationFull.model_validate(client_info_raw)
# Resolve the effective OAuth credentials, falling back to the stored
# values for any field the frontend marked as unchanged. This protects
# the change-detection comparison below from spurious diffs caused by
# masked placeholders being replayed.
# If the frontend sent back masked credentials (unchanged by the user),
# restore the real stored values so the comparison below sees no change
# and the credentials aren't overwritten with masked strings.
if client_info and request.auth_type == MCPAuthenticationType.OAUTH:
(
request.oauth_client_id,
request.oauth_client_secret,
) = _resolve_oauth_credentials(
request_client_id=request.oauth_client_id,
request_client_id_changed=request.oauth_client_id_changed,
request_client_secret=request.oauth_client_secret,
request_client_secret_changed=request.oauth_client_secret_changed,
existing_client=client_info,
) = _restore_masked_oauth_credentials(
request.oauth_client_id,
request.oauth_client_secret,
client_info,
)
changing_connection_config = (

View File

@@ -75,23 +75,6 @@ class MCPToolCreateRequest(BaseModel):
)
oauth_client_id: Optional[str] = Field(None, description="OAuth client ID")
oauth_client_secret: Optional[str] = Field(None, description="OAuth client secret")
oauth_client_id_changed: bool = Field(
default=False,
description=(
"True if `oauth_client_id` was edited by the user. When False on an "
"update of an existing server, the stored value is reused and the "
"request value is ignored. Defaults to False for backward "
"compatibility with older clients that don't send the flag."
),
)
oauth_client_secret_changed: bool = Field(
default=False,
description=(
"True if `oauth_client_secret` was edited by the user. When False on "
"an update of an existing server, the stored value is reused and the "
"request value is ignored."
),
)
transport: MCPTransport | None = Field(
None, description="MCP transport type (STREAMABLE_HTTP or SSE)"
)
@@ -221,21 +204,6 @@ class MCPUserOAuthConnectRequest(BaseModel):
oauth_client_secret: str | None = Field(
None, description="OAuth client secret (optional for DCR)"
)
oauth_client_id_changed: bool = Field(
default=False,
description=(
"True if `oauth_client_id` was edited by the user. When False, "
"the stored value is reused and the request value is ignored. "
"Defaults to False for backward compatibility."
),
)
oauth_client_secret_changed: bool = Field(
default=False,
description=(
"True if `oauth_client_secret` was edited by the user. When False, "
"the stored value is reused and the request value is ignored."
),
)
@model_validator(mode="after")
def validate_return_path(self) -> "MCPUserOAuthConnectRequest":

View File

@@ -51,14 +51,11 @@ from onyx.configs.constants import PUBLIC_API_TAGS
from onyx.db.api_key import is_api_key_email_address
from onyx.db.auth import get_live_users_count
from onyx.db.engine.sql_engine import get_session
from onyx.db.engine.sql_engine import get_session_with_shared_schema
from onyx.db.enums import AccountType
from onyx.db.enums import Permission
from onyx.db.enums import UserFileStatus
from onyx.db.models import User
from onyx.db.models import UserFile
from onyx.db.tenant_invite_counter import release_trial_invites
from onyx.db.tenant_invite_counter import reserve_trial_invites
from onyx.db.user_preferences import activate_user
from onyx.db.user_preferences import deactivate_user
from onyx.db.user_preferences import get_all_user_assistant_specific_configs
@@ -469,31 +466,15 @@ def bulk_invite_users(
if e not in existing_users and e not in already_invited
]
# Check seat availability for new users. Must run before the counter
# reservation below — a seat-limit failure must not burn trial quota.
if emails_needing_seats:
enforce_seat_limit(db_session, seats_needed=len(emails_needing_seats))
# Enforce the trial invite cap via the monotonic `tenant_invite_counter`.
# The UPSERT holds a row-level lock on `tenant_id` during the UPDATE, so
# concurrent bulk-invite flows for the same tenant are serialized without
# an advisory lock. On reject we ROLLBACK so the reservation does not stick.
trial_invite_reservation = 0
# Limit bulk invites for trial tenants to prevent email spam
# Only count new invites, not re-invites of existing users
if MULTI_TENANT and is_tenant_on_trial_fn(tenant_id):
num_new_invites = len(emails_needing_seats)
if num_new_invites > 0:
with get_session_with_shared_schema() as shared_session:
new_total = reserve_trial_invites(
shared_session, tenant_id, num_new_invites
)
if new_total > NUM_FREE_TRIAL_USER_INVITES:
shared_session.rollback()
raise OnyxError(
OnyxErrorCode.TRIAL_INVITE_LIMIT_EXCEEDED,
"You have hit your invite limit. Please upgrade for unlimited invites.",
)
shared_session.commit()
trial_invite_reservation = num_new_invites
current_invited = len(already_invited)
if current_invited + len(emails_needing_seats) > NUM_FREE_TRIAL_USER_INVITES:
raise HTTPException(
status_code=403,
detail="You have hit your invite limit. Please upgrade for unlimited invites.",
)
enforce_invite_rate_limit(
redis_client=get_redis_client(tenant_id=tenant_id),
admin_user_id=current_user.id,
@@ -501,6 +482,10 @@ def bulk_invite_users(
tenant_id=tenant_id,
)
# Check seat availability for new users
if emails_needing_seats:
enforce_seat_limit(db_session, seats_needed=len(emails_needing_seats))
if MULTI_TENANT:
try:
fetch_ee_implementation_or_noop(
@@ -513,29 +498,7 @@ def bulk_invite_users(
initial_invited_users = get_invited_users()
all_emails = list(set(new_invited_emails) | set(initial_invited_users))
try:
number_of_invited_users = write_invited_users(all_emails)
except Exception:
# KV write failed after the counter already reserved slots. Release
# the reservation so the counter tracks invites that actually reached
# the store. Compensation failures are logged and never re-raised —
# the original KV error is what the caller needs to see.
if trial_invite_reservation > 0:
try:
with get_session_with_shared_schema() as comp_session:
release_trial_invites(
comp_session, tenant_id, trial_invite_reservation
)
comp_session.commit()
except Exception as comp_err:
logger.error(
"tenant_invite_counter release failed for tenant=%s, "
"slots burned=%d: %s",
tenant_id,
trial_invite_reservation,
comp_err,
)
raise
number_of_invited_users = write_invited_users(all_emails)
# send out email invitations only to new users (not already invited or existing)
if not ENABLE_EMAIL_INVITES:
@@ -563,31 +526,10 @@ def bulk_invite_users(
logger.info(
"Reverting changes: removing users from tenant and resetting invited users"
)
try:
write_invited_users(initial_invited_users) # Reset to original state
fetch_ee_implementation_or_noop(
"onyx.server.tenants.user_mapping", "remove_users_from_tenant", None
)(new_invited_emails, tenant_id)
finally:
# Release the counter reservation regardless of whether the KV /
# user-mapping reverts above succeeded — otherwise a double-fault
# (billing failure + revert failure) permanently inflates the
# counter for an invite batch the system considers rolled back.
if trial_invite_reservation > 0:
try:
with get_session_with_shared_schema() as comp_session:
release_trial_invites(
comp_session, tenant_id, trial_invite_reservation
)
comp_session.commit()
except Exception as comp_err:
logger.error(
"tenant_invite_counter release failed for tenant=%s, "
"slots burned=%d: %s",
tenant_id,
trial_invite_reservation,
comp_err,
)
write_invited_users(initial_invited_users) # Reset to original state
fetch_ee_implementation_or_noop(
"onyx.server.tenants.user_mapping", "remove_users_from_tenant", None
)(new_invited_emails, tenant_id)
raise e
return BulkInviteResponse(

View File

@@ -13,7 +13,7 @@ from onyx.configs.constants import PUBLIC_API_TAGS
from onyx.connectors.models import Document
from onyx.connectors.models import IndexAttemptMetadata
from onyx.db.connector_credential_pair import get_connector_credential_pair_from_id
from onyx.db.document import delete_documents_complete
from onyx.db.document import delete_documents_complete__no_commit
from onyx.db.document import get_document
from onyx.db.document import get_documents_by_cc_pair
from onyx.db.document import get_ingestion_documents
@@ -210,4 +210,5 @@ def delete_ingestion_doc(
)
# Delete from database
delete_documents_complete(db_session, [document_id])
delete_documents_complete__no_commit(db_session, [document_id])
db_session.commit()

View File

@@ -4,7 +4,6 @@ from enum import Enum
from pydantic import BaseModel
from pydantic import Field
from onyx.configs.app_configs import DEFAULT_PRUNING_FREQ
from onyx.configs.app_configs import DEFAULT_USER_FILE_MAX_UPLOAD_SIZE_MB
from onyx.configs.app_configs import DISABLE_VECTOR_DB
from onyx.configs.app_configs import MAX_ALLOWED_UPLOAD_SIZE_MB
@@ -126,7 +125,6 @@ class UserSettings(Settings):
# Hard ceiling for user_file_max_upload_size_mb, derived from env var.
max_allowed_upload_size_mb: int = MAX_ALLOWED_UPLOAD_SIZE_MB
# Factory defaults so the frontend can show a "restore default" button.
default_pruning_freq: int = DEFAULT_PRUNING_FREQ
default_user_file_max_upload_size_mb: int = DEFAULT_USER_FILE_MAX_UPLOAD_SIZE_MB
default_file_token_count_threshold_k: int = Field(
default_factory=lambda: (

View File

@@ -108,6 +108,12 @@ def setup_onyx(
logger.notice(f'Query embedding prefix: "{search_settings.query_prefix}"')
logger.notice(f'Passage embedding prefix: "{search_settings.passage_prefix}"')
if search_settings:
if search_settings.multilingual_expansion:
logger.notice(
f"Multilingual query expansion is enabled with {search_settings.multilingual_expansion}."
)
# setup Postgres with default credential, llm providers, etc.
setup_postgres(db_session)

View File

@@ -21,9 +21,10 @@ def explicit_tool_calling_supported(model_provider: str, model_name: str) -> boo
model_name=model_name,
)
if not model_obj:
return False
return bool(model_obj.get("supports_function_calling"))
model_supports = (
model_obj.get("supports_function_calling", False) if model_obj else False
)
return model_supports
def compute_tool_tokens(tool: Tool, llm_tokenizer: BaseTokenizer) -> int:

View File

@@ -1,8 +1,6 @@
from typing import Any
from onyx.configs.app_configs import ENCRYPTION_KEY_SECRET
from onyx.configs.constants import MASK_CREDENTIAL_CHAR
from onyx.configs.constants import MASK_CREDENTIAL_LONG_RE
from onyx.connectors.google_utils.shared_constants import (
DB_CREDENTIALS_AUTHENTICATION_METHOD,
)
@@ -44,52 +42,6 @@ def mask_string(sensitive_str: str) -> str:
return f"{sensitive_str[:visible_start]}...{sensitive_str[-visible_end:]}"
def is_masked_credential(value: str) -> bool:
"""Return True if the string looks like a `mask_string` placeholder.
`mask_string` has two output formats:
- Short strings (< 14 chars): "••••••••••••" (U+2022 BULLET)
- Long strings (>= 14 chars): "abcd...wxyz" (first4 + "..." + last4)
"""
return MASK_CREDENTIAL_CHAR in value or bool(MASK_CREDENTIAL_LONG_RE.match(value))
def reject_masked_credentials(credentials: dict[str, Any]) -> None:
"""Raise if any credential string value contains mask placeholder characters.
Used as a defensive net at write boundaries so that masked values
round-tripped from `mask_string` are never persisted as real credentials.
Recurses into nested dicts and lists to stay symmetric with
`mask_credential_dict`, which masks nested string values. The error
message includes a dotted path like `oauth.client_secret` or
`keys[2]` so callers can pinpoint the offending field.
"""
_reject_masked_in_dict(credentials, path="")
def _reject_masked_in_dict(credentials: dict[str, Any], path: str) -> None:
for key, val in credentials.items():
field_path = f"{path}.{key}" if path else key
_reject_masked_in_value(val, field_path)
def _reject_masked_in_value(val: Any, path: str) -> None:
if isinstance(val, str):
if is_masked_credential(val):
raise ValueError(
f"Credential field '{path}' contains masked placeholder "
"characters. Please provide the actual credential value."
)
return
if isinstance(val, dict):
_reject_masked_in_dict(val, path=path)
return
if isinstance(val, list):
for index, item in enumerate(val):
_reject_masked_in_value(item, f"{path}[{index}]")
MASK_CREDENTIALS_WHITELIST = {
DB_CREDENTIALS_AUTHENTICATION_METHOD,
"wiki_base",

View File

@@ -1,5 +1,5 @@
[pytest]
pythonpath =
pythonpath =
.
generated/onyx_openapi_client
asyncio_default_fixture_loop_scope = function
@@ -9,6 +9,7 @@ markers =
filterwarnings =
ignore::DeprecationWarning
ignore::cryptography.utils.CryptographyDeprecationWarning
ignore::PendingDeprecationWarning:ddtrace.internal.module
# .test.env is gitignored.
# After installing pytest-dotenv,
# you can use it to test credentials locally.

View File

@@ -98,6 +98,8 @@ botocore-stubs==1.40.74
braintrust==0.3.9
brotli==1.2.0
# via onyx
bytecode==0.17.0
# via ddtrace
cachetools==6.2.2
# via py-key-value-aio
caio==0.9.25
@@ -191,6 +193,7 @@ dataclasses-json==0.6.7
# via unstructured
dateparser==1.2.2
# via htmldate
ddtrace==3.10.0
decorator==5.2.1
# via retry
defusedxml==0.7.1
@@ -225,6 +228,8 @@ email-validator==2.2.0
# pydantic
emoji==2.15.0
# via unstructured
envier==0.6.1
# via ddtrace
et-xmlfile==2.0.0
# via openpyxl
events==0.5
@@ -459,6 +464,8 @@ langfuse==3.10.0
langsmith==0.7.32
# via langchain-core
lazy-imports==1.0.1
legacy-cgi==2.6.4 ; python_full_version >= '3.13'
# via ddtrace
litellm==1.81.6
# via onyx
locket==1.0.0
@@ -502,7 +509,7 @@ marshmallow==3.26.2
# via dataclasses-json
matrix-client==0.3.2
# via zulip
mcp==1.27.0
mcp==1.26.0
# via
# claude-agent-sdk
# fastmcp
@@ -571,6 +578,7 @@ openpyxl==3.0.10
opensearch-py==3.0.0
opentelemetry-api==1.39.1
# via
# ddtrace
# fastmcp
# langfuse
# openinference-instrumentation
@@ -653,6 +661,7 @@ proto-plus==1.26.1
# google-cloud-resource-manager
protobuf==6.33.5
# via
# ddtrace
# google-api-core
# google-cloud-aiplatform
# google-cloud-resource-manager
@@ -757,7 +766,7 @@ python-dateutil==2.8.2
# opensearch-py
# pandas
python-docx==1.1.2
python-dotenv==1.2.2
python-dotenv==1.1.1
# via
# braintrust
# fastmcp
@@ -772,7 +781,7 @@ python-iso639==2025.11.16
# via unstructured
python-magic==0.4.27
# via unstructured
python-multipart==0.0.26
python-multipart==0.0.22
# via
# fastapi-users
# mcp
@@ -999,6 +1008,7 @@ typing-extensions==4.15.0
# boto3-stubs
# braintrust
# cohere
# ddtrace
# exa-py
# exceptiongroup
# fastapi
@@ -1106,6 +1116,7 @@ wrapt==1.17.3
# via
# aiobotocore
# braintrust
# ddtrace
# deprecated
# langfuse
# openinference-instrumentation
@@ -1116,9 +1127,11 @@ xlsxwriter==3.2.9
# via python-pptx
xmlsec==1.3.14
# via python3-saml
xmltodict==1.0.2
# via ddtrace
xxhash==3.6.0
# via langsmith
yarl==1.23.0
yarl==1.22.0
# via aiohttp
zeep==4.3.2
# via simple-salesforce

View File

@@ -285,7 +285,7 @@ matplotlib-inline==0.2.1
# via
# ipykernel
# ipython
mcp==1.27.0
mcp==1.26.0
# via claude-agent-sdk
mdurl==0.1.2
# via markdown-it-py
@@ -438,12 +438,12 @@ python-dateutil==2.8.2
# jupyter-client
# kubernetes
# matplotlib
python-dotenv==1.2.2
python-dotenv==1.1.1
# via
# litellm
# pydantic-settings
# pytest-dotenv
python-multipart==0.0.26
python-multipart==0.0.22
# via mcp
pytokens==0.4.1
# via black
@@ -618,7 +618,7 @@ websockets==15.0.1
# via google-genai
wrapt==1.17.3
# via aiobotocore
yarl==1.23.0
yarl==1.22.0
# via aiohttp
zipp==3.23.0
# via importlib-metadata

View File

@@ -215,7 +215,7 @@ markdown-it-py==4.0.0
# via rich
markupsafe==3.0.3
# via jinja2
mcp==1.27.0
mcp==1.26.0
# via claude-agent-sdk
mdurl==0.1.2
# via markdown-it-py
@@ -305,11 +305,11 @@ python-dateutil==2.8.2
# google-cloud-bigquery
# kubernetes
# posthog
python-dotenv==1.2.2
python-dotenv==1.1.1
# via
# litellm
# pydantic-settings
python-multipart==0.0.26
python-multipart==0.0.22
# via mcp
pywin32==311 ; sys_platform == 'win32'
# via mcp
@@ -430,7 +430,7 @@ websockets==15.0.1
# via google-genai
wrapt==1.17.3
# via aiobotocore
yarl==1.23.0
yarl==1.22.0
# via aiohttp
zipp==3.23.0
# via importlib-metadata

View File

@@ -247,7 +247,7 @@ markdown-it-py==4.0.0
# via rich
markupsafe==3.0.3
# via jinja2
mcp==1.27.0
mcp==1.26.0
# via claude-agent-sdk
mdurl==0.1.2
# via markdown-it-py
@@ -390,11 +390,11 @@ python-dateutil==2.8.2
# celery
# google-cloud-bigquery
# kubernetes
python-dotenv==1.2.2
python-dotenv==1.1.1
# via
# litellm
# pydantic-settings
python-multipart==0.0.26
python-multipart==0.0.22
# via mcp
pywin32==311 ; sys_platform == 'win32'
# via mcp
@@ -559,7 +559,7 @@ websockets==15.0.1
# via google-genai
wrapt==1.17.3
# via aiobotocore
yarl==1.23.0
yarl==1.22.0
# via aiohttp
zipp==3.23.0
# via importlib-metadata

View File

@@ -1,279 +0,0 @@
"""External dependency unit tests for the file_id cleanup that runs alongside
document deletion across the three deletion paths:
1. `document_by_cc_pair_cleanup_task` (pruning + connector deletion)
2. `delete_ingestion_doc` (public ingestion API DELETE)
3. `delete_all_documents_for_connector_credential_pair` (index swap)
Each path captures attached `Document.file_id`s before the row is removed and
best-effort deletes the underlying files after the DB commit.
"""
from collections.abc import Generator
from unittest.mock import MagicMock
from unittest.mock import patch
from uuid import uuid4
import pytest
from sqlalchemy.orm import Session
from onyx.background.celery.tasks.shared.tasks import (
document_by_cc_pair_cleanup_task,
)
from onyx.connectors.models import Document
from onyx.connectors.models import IndexAttemptMetadata
from onyx.db.document import delete_all_documents_for_connector_credential_pair
from onyx.db.document import upsert_document_by_connector_credential_pair
from onyx.db.models import ConnectorCredentialPair
from onyx.indexing.indexing_pipeline import index_doc_batch_prepare
from onyx.server.onyx_api.ingestion import delete_ingestion_doc
from tests.external_dependency_unit.constants import TEST_TENANT_ID
from tests.external_dependency_unit.indexing_helpers import cleanup_cc_pair
from tests.external_dependency_unit.indexing_helpers import get_doc_row
from tests.external_dependency_unit.indexing_helpers import get_filerecord
from tests.external_dependency_unit.indexing_helpers import make_cc_pair
from tests.external_dependency_unit.indexing_helpers import make_doc
from tests.external_dependency_unit.indexing_helpers import stage_file
# ---------------------------------------------------------------------------
# Helpers (file-local)
# ---------------------------------------------------------------------------
def _index_doc(
db_session: Session,
doc: Document,
attempt_metadata: IndexAttemptMetadata,
) -> None:
"""Run the doc through the upsert pipeline so the row + cc_pair mapping
exist (so deletion paths have something to find)."""
index_doc_batch_prepare(
documents=[doc],
index_attempt_metadata=attempt_metadata,
db_session=db_session,
ignore_time_skip=True,
)
db_session.commit()
# ---------------------------------------------------------------------------
# Fixtures
# ---------------------------------------------------------------------------
@pytest.fixture
def cc_pair(
db_session: Session,
tenant_context: None, # noqa: ARG001
initialize_file_store: None, # noqa: ARG001
) -> Generator[ConnectorCredentialPair, None, None]:
pair = make_cc_pair(db_session)
try:
yield pair
finally:
cleanup_cc_pair(db_session, pair)
@pytest.fixture
def second_cc_pair(
db_session: Session,
tenant_context: None, # noqa: ARG001
initialize_file_store: None, # noqa: ARG001
) -> Generator[ConnectorCredentialPair, None, None]:
"""A second cc_pair, used to test the count > 1 branch."""
pair = make_cc_pair(db_session)
try:
yield pair
finally:
cleanup_cc_pair(db_session, pair)
@pytest.fixture
def attempt_metadata(cc_pair: ConnectorCredentialPair) -> IndexAttemptMetadata:
return IndexAttemptMetadata(
connector_id=cc_pair.connector_id,
credential_id=cc_pair.credential_id,
attempt_id=None,
request_id="test-request",
)
# ---------------------------------------------------------------------------
# Tests
# ---------------------------------------------------------------------------
class TestDeleteAllDocumentsForCcPair:
"""Path 3: bulk delete during index swap (`INSTANT` switchover)."""
def test_cleans_up_files_for_all_docs(
self,
db_session: Session,
cc_pair: ConnectorCredentialPair,
attempt_metadata: IndexAttemptMetadata,
) -> None:
file_id_a = stage_file(content=b"a")
file_id_b = stage_file(content=b"b")
doc_a = make_doc(f"doc-{uuid4().hex[:8]}", file_id=file_id_a)
doc_b = make_doc(f"doc-{uuid4().hex[:8]}", file_id=file_id_b)
_index_doc(db_session, doc_a, attempt_metadata)
_index_doc(db_session, doc_b, attempt_metadata)
assert get_filerecord(db_session, file_id_a) is not None
assert get_filerecord(db_session, file_id_b) is not None
delete_all_documents_for_connector_credential_pair(
db_session=db_session,
connector_id=cc_pair.connector_id,
credential_id=cc_pair.credential_id,
)
assert get_doc_row(db_session, doc_a.id) is None
assert get_doc_row(db_session, doc_b.id) is None
assert get_filerecord(db_session, file_id_a) is None
assert get_filerecord(db_session, file_id_b) is None
def test_handles_mixed_docs_with_and_without_file_ids(
self,
db_session: Session,
cc_pair: ConnectorCredentialPair,
attempt_metadata: IndexAttemptMetadata,
) -> None:
"""Docs without file_id should be cleanly removed — no errors,
no spurious file_store calls."""
file_id = stage_file()
doc_with = make_doc(f"doc-{uuid4().hex[:8]}", file_id=file_id)
doc_without = make_doc(f"doc-{uuid4().hex[:8]}", file_id=None)
_index_doc(db_session, doc_with, attempt_metadata)
_index_doc(db_session, doc_without, attempt_metadata)
delete_all_documents_for_connector_credential_pair(
db_session=db_session,
connector_id=cc_pair.connector_id,
credential_id=cc_pair.credential_id,
)
assert get_doc_row(db_session, doc_with.id) is None
assert get_doc_row(db_session, doc_without.id) is None
assert get_filerecord(db_session, file_id) is None
class TestDeleteIngestionDoc:
"""Path 2: public ingestion API DELETE endpoint."""
def test_cleans_up_file_for_ingestion_api_doc(
self,
db_session: Session,
attempt_metadata: IndexAttemptMetadata,
tenant_context: None, # noqa: ARG002
initialize_file_store: None, # noqa: ARG002
) -> None:
file_id = stage_file()
doc = make_doc(
f"doc-{uuid4().hex[:8]}",
file_id=file_id,
from_ingestion_api=True,
)
_index_doc(db_session, doc, attempt_metadata)
assert get_filerecord(db_session, file_id) is not None
# Patch out Vespa — we're testing the file cleanup, not the document
# index integration.
with patch(
"onyx.server.onyx_api.ingestion.get_all_document_indices",
return_value=[],
):
delete_ingestion_doc(
document_id=doc.id,
_=MagicMock(), # auth dep — not used by the function body
db_session=db_session,
)
assert get_doc_row(db_session, doc.id) is None
assert get_filerecord(db_session, file_id) is None
class TestDocumentByCcPairCleanupTask:
"""Path 1: per-doc cleanup task fired by pruning / connector deletion."""
def test_count_1_branch_cleans_up_file(
self,
db_session: Session,
cc_pair: ConnectorCredentialPair,
attempt_metadata: IndexAttemptMetadata,
full_deployment_setup: None, # noqa: ARG002
) -> None:
"""When the doc has exactly one cc_pair reference, the full delete
path runs and the attached file is reaped."""
file_id = stage_file()
doc = make_doc(f"doc-{uuid4().hex[:8]}", file_id=file_id)
_index_doc(db_session, doc, attempt_metadata)
assert get_filerecord(db_session, file_id) is not None
# Patch out Vespa interaction — no chunks were ever written, and we're
# not testing the document index here.
with patch(
"onyx.background.celery.tasks.shared.tasks.get_all_document_indices",
return_value=[],
):
result = document_by_cc_pair_cleanup_task.apply(
args=(
doc.id,
cc_pair.connector_id,
cc_pair.credential_id,
TEST_TENANT_ID,
),
)
assert result.successful(), result.traceback
assert get_doc_row(db_session, doc.id) is None
assert get_filerecord(db_session, file_id) is None
def test_count_gt_1_branch_preserves_file(
self,
db_session: Session,
cc_pair: ConnectorCredentialPair,
second_cc_pair: ConnectorCredentialPair,
attempt_metadata: IndexAttemptMetadata,
full_deployment_setup: None, # noqa: ARG002
) -> None:
"""When the doc is referenced by another cc_pair, only the mapping
for the detaching cc_pair is removed. The file MUST stay because
the doc and its file are still owned by the remaining cc_pair."""
file_id = stage_file()
doc = make_doc(f"doc-{uuid4().hex[:8]}", file_id=file_id)
_index_doc(db_session, doc, attempt_metadata)
# Attach the same doc to a second cc_pair so refcount becomes 2.
upsert_document_by_connector_credential_pair(
db_session,
second_cc_pair.connector_id,
second_cc_pair.credential_id,
[doc.id],
)
db_session.commit()
with patch(
"onyx.background.celery.tasks.shared.tasks.get_all_document_indices",
return_value=[],
):
result = document_by_cc_pair_cleanup_task.apply(
args=(
doc.id,
cc_pair.connector_id,
cc_pair.credential_id,
TEST_TENANT_ID,
),
)
assert result.successful(), result.traceback
# Document row still exists (other cc_pair owns it).
assert get_doc_row(db_session, doc.id) is not None
# File MUST still exist.
record = get_filerecord(db_session, file_id)
assert record is not None

View File

@@ -11,21 +11,72 @@ Uses real PostgreSQL + real S3/MinIO via the file store.
"""
from collections.abc import Generator
from io import BytesIO
from uuid import uuid4
import pytest
from sqlalchemy.orm import Session
from onyx.configs.constants import DocumentSource
from onyx.configs.constants import FileOrigin
from onyx.connectors.models import Document
from onyx.connectors.models import IndexAttemptMetadata
from onyx.connectors.models import InputType
from onyx.connectors.models import TextSection
from onyx.db.enums import AccessType
from onyx.db.enums import ConnectorCredentialPairStatus
from onyx.db.file_record import get_filerecord_by_file_id_optional
from onyx.db.models import Connector
from onyx.db.models import ConnectorCredentialPair
from onyx.db.models import Credential
from onyx.db.models import Document as DBDocument
from onyx.db.models import DocumentByConnectorCredentialPair
from onyx.db.models import FileRecord
from onyx.file_store.file_store import get_default_file_store
from onyx.indexing.indexing_pipeline import index_doc_batch_prepare
from tests.external_dependency_unit.indexing_helpers import cleanup_cc_pair
from tests.external_dependency_unit.indexing_helpers import get_doc_row
from tests.external_dependency_unit.indexing_helpers import get_filerecord
from tests.external_dependency_unit.indexing_helpers import make_cc_pair
from tests.external_dependency_unit.indexing_helpers import make_doc
from tests.external_dependency_unit.indexing_helpers import stage_file
# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------
def _make_doc(doc_id: str, file_id: str | None = None) -> Document:
"""Minimal Document for indexing-pipeline tests. MOCK_CONNECTOR avoids
triggering the hierarchy-node linking branch (NOTION/CONFLUENCE only)."""
return Document(
id=doc_id,
source=DocumentSource.MOCK_CONNECTOR,
semantic_identifier=f"semantic-{doc_id}",
sections=[TextSection(text="content", link=None)],
metadata={},
file_id=file_id,
)
def _stage_file(content: bytes = b"raw bytes") -> str:
"""Write bytes to the file store as INDEXING_STAGING and return the file_id.
Mirrors what the connector raw_file_callback would do during fetch.
"""
return get_default_file_store().save_file(
content=BytesIO(content),
display_name=None,
file_origin=FileOrigin.INDEXING_STAGING,
file_type="application/octet-stream",
file_metadata={"test": True},
)
def _get_doc_row(db_session: Session, doc_id: str) -> DBDocument | None:
"""Reload the document row fresh from DB so we see post-upsert state."""
db_session.expire_all()
return db_session.query(DBDocument).filter(DBDocument.id == doc_id).one_or_none()
def _get_filerecord(db_session: Session, file_id: str) -> FileRecord | None:
db_session.expire_all()
return get_filerecord_by_file_id_optional(file_id=file_id, db_session=db_session)
# ---------------------------------------------------------------------------
@@ -39,11 +90,97 @@ def cc_pair(
tenant_context: None, # noqa: ARG001
initialize_file_store: None, # noqa: ARG001
) -> Generator[ConnectorCredentialPair, None, None]:
pair = make_cc_pair(db_session)
"""Create a connector + credential + cc_pair backing the index attempt.
Teardown sweeps everything the test created under this cc_pair: the
`document_by_connector_credential_pair` join rows, the `Document` rows
they point at, the `FileRecord` + blob for each doc's `file_id`, and
finally the cc_pair / connector / credential themselves. Without this,
every run would leave orphan rows in the dev DB and orphan blobs in
MinIO.
"""
connector = Connector(
name=f"test-connector-{uuid4().hex[:8]}",
source=DocumentSource.MOCK_CONNECTOR,
input_type=InputType.LOAD_STATE,
connector_specific_config={},
refresh_freq=None,
prune_freq=None,
indexing_start=None,
)
db_session.add(connector)
db_session.flush()
credential = Credential(
source=DocumentSource.MOCK_CONNECTOR,
credential_json={},
)
db_session.add(credential)
db_session.flush()
pair = ConnectorCredentialPair(
connector_id=connector.id,
credential_id=credential.id,
name=f"test-cc-pair-{uuid4().hex[:8]}",
status=ConnectorCredentialPairStatus.ACTIVE,
access_type=AccessType.PUBLIC,
auto_sync_options=None,
)
db_session.add(pair)
db_session.commit()
db_session.refresh(pair)
connector_id = pair.connector_id
credential_id = pair.credential_id
try:
yield pair
finally:
cleanup_cc_pair(db_session, pair)
db_session.expire_all()
# Collect every doc indexed under this cc_pair so we can delete its
# file_record + blob before dropping the Document row itself.
doc_ids: list[str] = [
row[0]
for row in db_session.query(DocumentByConnectorCredentialPair.id)
.filter(
DocumentByConnectorCredentialPair.connector_id == connector_id,
DocumentByConnectorCredentialPair.credential_id == credential_id,
)
.all()
]
file_ids: list[str] = [
row[0]
for row in db_session.query(DBDocument.file_id)
.filter(DBDocument.id.in_(doc_ids), DBDocument.file_id.isnot(None))
.all()
]
file_store = get_default_file_store()
for fid in file_ids:
try:
file_store.delete_file(fid, error_on_missing=False)
except Exception:
pass
if doc_ids:
db_session.query(DocumentByConnectorCredentialPair).filter(
DocumentByConnectorCredentialPair.id.in_(doc_ids)
).delete(synchronize_session="fetch")
db_session.query(DBDocument).filter(DBDocument.id.in_(doc_ids)).delete(
synchronize_session="fetch"
)
db_session.query(ConnectorCredentialPair).filter(
ConnectorCredentialPair.id == pair.id
).delete(synchronize_session="fetch")
db_session.query(Connector).filter(Connector.id == connector_id).delete(
synchronize_session="fetch"
)
db_session.query(Credential).filter(Credential.id == credential_id).delete(
synchronize_session="fetch"
)
db_session.commit()
@pytest.fixture
@@ -69,7 +206,7 @@ class TestNewDocuments:
db_session: Session,
attempt_metadata: IndexAttemptMetadata,
) -> None:
doc = make_doc(f"doc-{uuid4().hex[:8]}", file_id=None)
doc = _make_doc(f"doc-{uuid4().hex[:8]}", file_id=None)
index_doc_batch_prepare(
documents=[doc],
@@ -79,7 +216,7 @@ class TestNewDocuments:
)
db_session.commit()
row = get_doc_row(db_session, doc.id)
row = _get_doc_row(db_session, doc.id)
assert row is not None
assert row.file_id is None
@@ -88,8 +225,8 @@ class TestNewDocuments:
db_session: Session,
attempt_metadata: IndexAttemptMetadata,
) -> None:
file_id = stage_file()
doc = make_doc(f"doc-{uuid4().hex[:8]}", file_id=file_id)
file_id = _stage_file()
doc = _make_doc(f"doc-{uuid4().hex[:8]}", file_id=file_id)
index_doc_batch_prepare(
documents=[doc],
@@ -99,10 +236,10 @@ class TestNewDocuments:
)
db_session.commit()
row = get_doc_row(db_session, doc.id)
row = _get_doc_row(db_session, doc.id)
assert row is not None and row.file_id == file_id
record = get_filerecord(db_session, file_id)
record = _get_filerecord(db_session, file_id)
assert record is not None
assert record.file_origin == FileOrigin.CONNECTOR
@@ -115,8 +252,8 @@ class TestExistingDocuments:
db_session: Session,
attempt_metadata: IndexAttemptMetadata,
) -> None:
file_id = stage_file()
doc = make_doc(f"doc-{uuid4().hex[:8]}", file_id=file_id)
file_id = _stage_file()
doc = _make_doc(f"doc-{uuid4().hex[:8]}", file_id=file_id)
# First pass: inserts the row + promotes the file.
index_doc_batch_prepare(
@@ -136,11 +273,11 @@ class TestExistingDocuments:
)
db_session.commit()
record = get_filerecord(db_session, file_id)
record = _get_filerecord(db_session, file_id)
assert record is not None
assert record.file_origin == FileOrigin.CONNECTOR
row = get_doc_row(db_session, doc.id)
row = _get_doc_row(db_session, doc.id)
assert row is not None and row.file_id == file_id
def test_swapping_file_id_promotes_new_and_deletes_old(
@@ -148,8 +285,8 @@ class TestExistingDocuments:
db_session: Session,
attempt_metadata: IndexAttemptMetadata,
) -> None:
old_file_id = stage_file(content=b"old bytes")
doc = make_doc(f"doc-{uuid4().hex[:8]}", file_id=old_file_id)
old_file_id = _stage_file(content=b"old bytes")
doc = _make_doc(f"doc-{uuid4().hex[:8]}", file_id=old_file_id)
index_doc_batch_prepare(
documents=[doc],
@@ -160,8 +297,8 @@ class TestExistingDocuments:
db_session.commit()
# Re-fetch produces a new staged file_id for the same doc.
new_file_id = stage_file(content=b"new bytes")
doc_v2 = make_doc(doc.id, file_id=new_file_id)
new_file_id = _stage_file(content=b"new bytes")
doc_v2 = _make_doc(doc.id, file_id=new_file_id)
index_doc_batch_prepare(
documents=[doc_v2],
@@ -171,23 +308,23 @@ class TestExistingDocuments:
)
db_session.commit()
row = get_doc_row(db_session, doc.id)
row = _get_doc_row(db_session, doc.id)
assert row is not None and row.file_id == new_file_id
new_record = get_filerecord(db_session, new_file_id)
new_record = _get_filerecord(db_session, new_file_id)
assert new_record is not None
assert new_record.file_origin == FileOrigin.CONNECTOR
# Old file_record + S3 object are gone.
assert get_filerecord(db_session, old_file_id) is None
assert _get_filerecord(db_session, old_file_id) is None
def test_clearing_file_id_deletes_old_and_nulls_column(
self,
db_session: Session,
attempt_metadata: IndexAttemptMetadata,
) -> None:
old_file_id = stage_file()
doc = make_doc(f"doc-{uuid4().hex[:8]}", file_id=old_file_id)
old_file_id = _stage_file()
doc = _make_doc(f"doc-{uuid4().hex[:8]}", file_id=old_file_id)
index_doc_batch_prepare(
documents=[doc],
@@ -198,7 +335,7 @@ class TestExistingDocuments:
db_session.commit()
# Connector opts out on next run — yields the doc without a file_id.
doc_v2 = make_doc(doc.id, file_id=None)
doc_v2 = _make_doc(doc.id, file_id=None)
index_doc_batch_prepare(
documents=[doc_v2],
@@ -208,9 +345,9 @@ class TestExistingDocuments:
)
db_session.commit()
row = get_doc_row(db_session, doc.id)
row = _get_doc_row(db_session, doc.id)
assert row is not None and row.file_id is None
assert get_filerecord(db_session, old_file_id) is None
assert _get_filerecord(db_session, old_file_id) is None
class TestBatchHandling:
@@ -222,8 +359,8 @@ class TestBatchHandling:
attempt_metadata: IndexAttemptMetadata,
) -> None:
# Pre-seed an existing doc with a file_id we'll swap.
existing_old_id = stage_file(content=b"existing-old")
existing_doc = make_doc(f"doc-{uuid4().hex[:8]}", file_id=existing_old_id)
existing_old_id = _stage_file(content=b"existing-old")
existing_doc = _make_doc(f"doc-{uuid4().hex[:8]}", file_id=existing_old_id)
index_doc_batch_prepare(
documents=[existing_doc],
index_attempt_metadata=attempt_metadata,
@@ -234,11 +371,11 @@ class TestBatchHandling:
# Now: swap the existing one, add a brand-new doc with file_id, and a
# brand-new doc without file_id.
swap_new_id = stage_file(content=b"existing-new")
new_with_file_id = stage_file(content=b"new-with-file")
existing_v2 = make_doc(existing_doc.id, file_id=swap_new_id)
new_with = make_doc(f"doc-{uuid4().hex[:8]}", file_id=new_with_file_id)
new_without = make_doc(f"doc-{uuid4().hex[:8]}", file_id=None)
swap_new_id = _stage_file(content=b"existing-new")
new_with_file_id = _stage_file(content=b"new-with-file")
existing_v2 = _make_doc(existing_doc.id, file_id=swap_new_id)
new_with = _make_doc(f"doc-{uuid4().hex[:8]}", file_id=new_with_file_id)
new_without = _make_doc(f"doc-{uuid4().hex[:8]}", file_id=None)
index_doc_batch_prepare(
documents=[existing_v2, new_with, new_without],
@@ -249,20 +386,20 @@ class TestBatchHandling:
db_session.commit()
# Existing doc was swapped: old file gone, new file promoted.
existing_row = get_doc_row(db_session, existing_doc.id)
existing_row = _get_doc_row(db_session, existing_doc.id)
assert existing_row is not None and existing_row.file_id == swap_new_id
assert get_filerecord(db_session, existing_old_id) is None
swap_record = get_filerecord(db_session, swap_new_id)
assert _get_filerecord(db_session, existing_old_id) is None
swap_record = _get_filerecord(db_session, swap_new_id)
assert swap_record is not None
assert swap_record.file_origin == FileOrigin.CONNECTOR
# New doc with file_id: row exists, file promoted.
new_with_row = get_doc_row(db_session, new_with.id)
new_with_row = _get_doc_row(db_session, new_with.id)
assert new_with_row is not None and new_with_row.file_id == new_with_file_id
new_with_record = get_filerecord(db_session, new_with_file_id)
new_with_record = _get_filerecord(db_session, new_with_file_id)
assert new_with_record is not None
assert new_with_record.file_origin == FileOrigin.CONNECTOR
# New doc without file_id: row exists, no file_record involvement.
new_without_row = get_doc_row(db_session, new_without.id)
new_without_row = _get_doc_row(db_session, new_without.id)
assert new_without_row is not None and new_without_row.file_id is None

View File

@@ -1,190 +0,0 @@
"""Shared helpers for external-dependency indexing tests.
Three test files exercise the `Document` / `cc_pair` / `file_store` surfaces
against real Postgres + S3: `test_index_doc_batch_prepare`, `test_index_swap_workflow`,
and `test_document_deletion_file_cleanup`. The setup + teardown logic is
substantial and identical across all three, so it lives here.
Tests keep their own `cc_pair` fixture (dependencies differ per file), but
the body is just `make_cc_pair` + `cleanup_cc_pair`.
"""
from io import BytesIO
from uuid import uuid4
from sqlalchemy.orm import Session
from onyx.configs.constants import DocumentSource
from onyx.configs.constants import FileOrigin
from onyx.connectors.models import Document
from onyx.connectors.models import InputType
from onyx.connectors.models import TextSection
from onyx.db.enums import AccessType
from onyx.db.enums import ConnectorCredentialPairStatus
from onyx.db.file_record import get_filerecord_by_file_id_optional
from onyx.db.models import Connector
from onyx.db.models import ConnectorCredentialPair
from onyx.db.models import Credential
from onyx.db.models import Document as DBDocument
from onyx.db.models import DocumentByConnectorCredentialPair
from onyx.db.models import FileRecord
from onyx.file_store.file_store import get_default_file_store
def make_doc(
doc_id: str,
file_id: str | None = None,
from_ingestion_api: bool = False,
) -> Document:
"""Minimal Document for indexing-pipeline tests. MOCK_CONNECTOR avoids
triggering the hierarchy-node linking branch (NOTION/CONFLUENCE only)."""
return Document(
id=doc_id,
source=DocumentSource.MOCK_CONNECTOR,
semantic_identifier=f"semantic-{doc_id}",
sections=[TextSection(text="content", link=None)],
metadata={},
file_id=file_id,
from_ingestion_api=from_ingestion_api,
)
def stage_file(content: bytes = b"raw bytes") -> str:
"""Write bytes to the file store as INDEXING_STAGING and return the file_id.
Mirrors what the connector raw_file_callback would do during fetch.
The `{"test": True}` metadata tag lets manual cleanup scripts find
leftovers if a cleanup ever slips through.
"""
return get_default_file_store().save_file(
content=BytesIO(content),
display_name=None,
file_origin=FileOrigin.INDEXING_STAGING,
file_type="application/octet-stream",
file_metadata={"test": True},
)
def get_doc_row(db_session: Session, doc_id: str) -> DBDocument | None:
"""Reload the document row fresh from DB so we see post-upsert state."""
db_session.expire_all()
return db_session.query(DBDocument).filter(DBDocument.id == doc_id).one_or_none()
def get_filerecord(db_session: Session, file_id: str) -> FileRecord | None:
db_session.expire_all()
return get_filerecord_by_file_id_optional(file_id=file_id, db_session=db_session)
def make_cc_pair(db_session: Session) -> ConnectorCredentialPair:
"""Create a Connector + Credential + ConnectorCredentialPair for a test.
All names are UUID-suffixed so parallel test runs don't collide.
"""
connector = Connector(
name=f"test-connector-{uuid4().hex[:8]}",
source=DocumentSource.MOCK_CONNECTOR,
input_type=InputType.LOAD_STATE,
connector_specific_config={},
refresh_freq=None,
prune_freq=None,
indexing_start=None,
)
db_session.add(connector)
db_session.flush()
credential = Credential(
source=DocumentSource.MOCK_CONNECTOR,
credential_json={},
)
db_session.add(credential)
db_session.flush()
pair = ConnectorCredentialPair(
connector_id=connector.id,
credential_id=credential.id,
name=f"test-cc-pair-{uuid4().hex[:8]}",
status=ConnectorCredentialPairStatus.ACTIVE,
access_type=AccessType.PUBLIC,
auto_sync_options=None,
)
db_session.add(pair)
db_session.commit()
db_session.refresh(pair)
return pair
def cleanup_cc_pair(db_session: Session, pair: ConnectorCredentialPair) -> None:
"""Tear down everything created under `pair`.
Deletes own join rows first (FK to document has no cascade), then for any
doc that now has zero remaining cc_pair references, deletes its file +
the document row. Finally removes the cc_pair, connector, credential.
Safe against docs shared with other cc_pairs — those stay alive until
their last reference is torn down.
"""
db_session.expire_all()
connector_id = pair.connector_id
credential_id = pair.credential_id
owned_doc_ids: list[str] = [
row[0]
for row in db_session.query(DocumentByConnectorCredentialPair.id)
.filter(
DocumentByConnectorCredentialPair.connector_id == connector_id,
DocumentByConnectorCredentialPair.credential_id == credential_id,
)
.all()
]
db_session.query(DocumentByConnectorCredentialPair).filter(
DocumentByConnectorCredentialPair.connector_id == connector_id,
DocumentByConnectorCredentialPair.credential_id == credential_id,
).delete(synchronize_session="fetch")
db_session.flush()
if owned_doc_ids:
orphan_doc_ids: list[str] = [
row[0]
for row in db_session.query(DBDocument.id)
.filter(DBDocument.id.in_(owned_doc_ids))
.filter(
~db_session.query(DocumentByConnectorCredentialPair)
.filter(DocumentByConnectorCredentialPair.id == DBDocument.id)
.exists()
)
.all()
]
orphan_file_ids: list[str] = [
row[0]
for row in db_session.query(DBDocument.file_id)
.filter(
DBDocument.id.in_(orphan_doc_ids),
DBDocument.file_id.isnot(None),
)
.all()
]
file_store = get_default_file_store()
for fid in orphan_file_ids:
try:
file_store.delete_file(fid, error_on_missing=False)
except Exception:
pass
if orphan_doc_ids:
db_session.query(DBDocument).filter(
DBDocument.id.in_(orphan_doc_ids)
).delete(synchronize_session="fetch")
db_session.query(ConnectorCredentialPair).filter(
ConnectorCredentialPair.id == pair.id
).delete(synchronize_session="fetch")
db_session.query(Connector).filter(Connector.id == connector_id).delete(
synchronize_session="fetch"
)
db_session.query(Credential).filter(Credential.id == credential_id).delete(
synchronize_session="fetch"
)
db_session.commit()

View File

@@ -1,193 +0,0 @@
"""Workflow-level test for the INSTANT index swap.
When `check_and_perform_index_swap` runs against an `INSTANT` switchover, it
calls `delete_all_documents_for_connector_credential_pair` for each cc_pair.
This test exercises that full workflow end-to-end and asserts that the
attached `Document.file_id`s are also reaped — not just the document rows.
Mocks Vespa (`get_all_document_indices`) since this is testing the postgres +
file_store side effects of the swap, not the document index integration.
"""
from collections.abc import Generator
from unittest.mock import patch
from uuid import uuid4
import pytest
from sqlalchemy.orm import Session
from onyx.connectors.models import IndexAttemptMetadata
from onyx.context.search.models import SavedSearchSettings
from onyx.db.enums import EmbeddingPrecision
from onyx.db.enums import SwitchoverType
from onyx.db.models import ConnectorCredentialPair
from onyx.db.models import IndexModelStatus
from onyx.db.search_settings import create_search_settings
from onyx.db.swap_index import check_and_perform_index_swap
from onyx.indexing.indexing_pipeline import index_doc_batch_prepare
from tests.external_dependency_unit.indexing_helpers import cleanup_cc_pair
from tests.external_dependency_unit.indexing_helpers import get_doc_row
from tests.external_dependency_unit.indexing_helpers import get_filerecord
from tests.external_dependency_unit.indexing_helpers import make_cc_pair
from tests.external_dependency_unit.indexing_helpers import make_doc
from tests.external_dependency_unit.indexing_helpers import stage_file
# ---------------------------------------------------------------------------
# Helpers (file-local)
# ---------------------------------------------------------------------------
def _make_saved_search_settings(
*,
switchover_type: SwitchoverType = SwitchoverType.REINDEX,
) -> SavedSearchSettings:
return SavedSearchSettings(
model_name=f"test-embedding-model-{uuid4().hex[:8]}",
model_dim=768,
normalize=True,
query_prefix="",
passage_prefix="",
provider_type=None,
index_name=f"test_index_{uuid4().hex[:8]}",
multipass_indexing=False,
embedding_precision=EmbeddingPrecision.FLOAT,
reduced_dimension=None,
enable_contextual_rag=False,
contextual_rag_llm_name=None,
contextual_rag_llm_provider=None,
switchover_type=switchover_type,
)
# ---------------------------------------------------------------------------
# Fixtures
# ---------------------------------------------------------------------------
@pytest.fixture
def cc_pair(
db_session: Session,
tenant_context: None, # noqa: ARG001
initialize_file_store: None, # noqa: ARG001
full_deployment_setup: None, # noqa: ARG001
) -> Generator[ConnectorCredentialPair, None, None]:
pair = make_cc_pair(db_session)
try:
yield pair
finally:
cleanup_cc_pair(db_session, pair)
@pytest.fixture
def attempt_metadata(cc_pair: ConnectorCredentialPair) -> IndexAttemptMetadata:
return IndexAttemptMetadata(
connector_id=cc_pair.connector_id,
credential_id=cc_pair.credential_id,
attempt_id=None,
request_id="test-request",
)
# ---------------------------------------------------------------------------
# Tests
# ---------------------------------------------------------------------------
class TestInstantIndexSwap:
"""`SwitchoverType.INSTANT` wipes all docs for every cc_pair as part of
the swap. The associated raw files must be reaped too."""
def test_instant_swap_deletes_docs_and_files(
self,
db_session: Session,
attempt_metadata: IndexAttemptMetadata,
) -> None:
# Index two docs with attached files via the normal pipeline.
file_id_a = stage_file(content=b"alpha")
file_id_b = stage_file(content=b"beta")
doc_a = make_doc(f"doc-{uuid4().hex[:8]}", file_id=file_id_a)
doc_b = make_doc(f"doc-{uuid4().hex[:8]}", file_id=file_id_b)
index_doc_batch_prepare(
documents=[doc_a, doc_b],
index_attempt_metadata=attempt_metadata,
db_session=db_session,
ignore_time_skip=True,
)
db_session.commit()
# Sanity: docs and files exist before the swap.
assert get_doc_row(db_session, doc_a.id) is not None
assert get_doc_row(db_session, doc_b.id) is not None
assert get_filerecord(db_session, file_id_a) is not None
assert get_filerecord(db_session, file_id_b) is not None
# Stage a FUTURE search settings with INSTANT switchover. The next
# `check_and_perform_index_swap` call will see this and trigger the
# bulk-delete path on every cc_pair.
create_search_settings(
search_settings=_make_saved_search_settings(
switchover_type=SwitchoverType.INSTANT
),
db_session=db_session,
status=IndexModelStatus.FUTURE,
)
# Vespa is patched out — we're testing the postgres + file_store
# side effects, not the document-index integration.
with patch(
"onyx.db.swap_index.get_all_document_indices",
return_value=[],
):
old_settings = check_and_perform_index_swap(db_session)
assert old_settings is not None, "INSTANT swap should have executed"
# Documents are gone.
assert get_doc_row(db_session, doc_a.id) is None
assert get_doc_row(db_session, doc_b.id) is None
# Files are gone — the workflow's bulk-delete path correctly
# propagated through to file cleanup.
assert get_filerecord(db_session, file_id_a) is None
assert get_filerecord(db_session, file_id_b) is None
def test_instant_swap_with_mixed_docs_does_not_break(
self,
db_session: Session,
attempt_metadata: IndexAttemptMetadata,
) -> None:
"""A mix of docs with and without file_ids must all be swept up
without errors during the swap."""
file_id = stage_file()
doc_with = make_doc(f"doc-{uuid4().hex[:8]}", file_id=file_id)
doc_without = make_doc(f"doc-{uuid4().hex[:8]}", file_id=None)
index_doc_batch_prepare(
documents=[doc_with, doc_without],
index_attempt_metadata=attempt_metadata,
db_session=db_session,
ignore_time_skip=True,
)
db_session.commit()
create_search_settings(
search_settings=_make_saved_search_settings(
switchover_type=SwitchoverType.INSTANT
),
db_session=db_session,
status=IndexModelStatus.FUTURE,
)
with patch(
"onyx.db.swap_index.get_all_document_indices",
return_value=[],
):
old_settings = check_and_perform_index_swap(db_session)
assert old_settings is not None
assert get_doc_row(db_session, doc_with.id) is None
assert get_doc_row(db_session, doc_without.id) is None
assert get_filerecord(db_session, file_id) is None

View File

@@ -6,7 +6,7 @@ The script is invoked as a subprocess — the same way it would be used in
production. Tests verify exit codes and stdout messages.
Usage:
pytest -m alembic tests/integration/tests/migrations/test_run_multitenant_migrations.py -v
pytest tests/integration/tests/migrations/test_run_multitenant_migrations.py -v
"""
from __future__ import annotations
@@ -24,8 +24,6 @@ from sqlalchemy.engine import Engine
from onyx.db.engine.sql_engine import SqlEngine
pytestmark = pytest.mark.alembic
# Resolve the backend/ directory once so every helper can use it as cwd.
_BACKEND_DIR = os.path.normpath(
os.path.join(os.path.dirname(__file__), "..", "..", "..", "..")
@@ -45,13 +43,14 @@ def _run_script(
env_override: dict[str, str] | None = None,
) -> subprocess.CompletedProcess[str]:
"""Run ``python alembic/run_multitenant_migrations.py`` from the backend/ directory."""
env = {**os.environ, **(env_override or {})}
return subprocess.run(
[sys.executable, "alembic/run_multitenant_migrations.py", *extra_args],
cwd=_BACKEND_DIR,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
env={**os.environ, "PYTHONPATH": _BACKEND_DIR, **(env_override or {})},
env=env,
)
@@ -111,7 +110,6 @@ def current_head_rev() -> str:
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
env={**os.environ, "PYTHONPATH": _BACKEND_DIR},
)
assert (
result.returncode == 0

View File

@@ -2,25 +2,14 @@
This file tests user file permissions in different scenarios:
1. Public assistant with user files - files should be accessible to all users
2. Direct file access - user files should NOT be accessible by users who don't own them
3. Image-generation tool outputs - files persisted on `ToolCall.generated_images`
must be downloadable by the chat session owner (and by anyone if the session
is publicly shared), but not by other users on private sessions.
"""
import io
from typing import NamedTuple
from uuid import UUID
from uuid import uuid4
import pytest
import requests
from onyx.configs.constants import FileOrigin
from onyx.db.engine.sql_engine import get_session_with_current_tenant
from onyx.db.enums import ChatSessionSharedStatus
from onyx.db.models import ChatSession
from onyx.db.models import ToolCall
from onyx.file_store.file_store import get_default_file_store
from onyx.file_store.models import FileDescriptor
from tests.integration.common_utils.constants import API_SERVER_URL
from tests.integration.common_utils.managers.chat import ChatSessionManager
@@ -28,7 +17,6 @@ from tests.integration.common_utils.managers.file import FileManager
from tests.integration.common_utils.managers.llm_provider import LLMProviderManager
from tests.integration.common_utils.managers.persona import PersonaManager
from tests.integration.common_utils.managers.user import UserManager
from tests.integration.common_utils.test_models import DATestChatSession
from tests.integration.common_utils.test_models import DATestPersona
from tests.integration.common_utils.test_models import DATestUser
@@ -161,140 +149,3 @@ def test_cannot_download_other_users_file_via_chat_file_endpoint(
f"when fetching file_id={file_id}"
)
assert user2_response.content != owner_response.content
# -----------------------------------------------------------------------------
# Image-generation tool output access checks
#
# Image-generation results are persisted on `ToolCall.generated_images` (JSONB),
# *not* on `ChatMessage.files`. The hardening commit `a7a5b66d6` added an
# authorization gate to `GET /chat/file/{file_id}` that did not know about that
# column, so previously-rendered images started returning 404 on chat reload.
# These tests pin the post-fix behavior end-to-end.
# -----------------------------------------------------------------------------
_IMAGE_GEN_PNG_BYTES = b"\x89PNG\r\n\x1a\n" + b"image-gen-test-bytes"
class ImageGenSetup(NamedTuple):
owner: DATestUser
intruder: DATestUser
chat_session: DATestChatSession
file_id: str
def _seed_image_gen_tool_call(chat_session_id: UUID) -> str:
"""Persist a fake image to the file store and link it via a ToolCall row,
mirroring what `ImageGenerationTool` produces at runtime."""
file_store = get_default_file_store()
file_id = file_store.save_file(
content=io.BytesIO(_IMAGE_GEN_PNG_BYTES),
display_name="GeneratedImage",
file_origin=FileOrigin.CHAT_IMAGE_GEN,
file_type="image/png",
)
with get_session_with_current_tenant() as db_session:
tool_call = ToolCall(
chat_session_id=chat_session_id,
parent_chat_message_id=None,
parent_tool_call_id=None,
turn_number=0,
tab_index=0,
tool_id=0,
tool_call_id=uuid4().hex,
tool_call_arguments={},
tool_call_response="",
tool_call_tokens=0,
generated_images=[
{
"file_id": file_id,
"url": f"/api/chat/file/{file_id}",
"revised_prompt": "a cat",
"shape": "square",
}
],
)
db_session.add(tool_call)
db_session.commit()
return file_id
@pytest.fixture
def image_gen_setup(reset: None) -> ImageGenSetup: # noqa: ARG001
"""Owner with a chat session that has an image-generation tool output."""
owner: DATestUser = UserManager.create(name="img_gen_owner")
intruder: DATestUser = UserManager.create(name="img_gen_intruder")
LLMProviderManager.create(user_performing_action=owner)
chat_session = ChatSessionManager.create(
user_performing_action=owner,
description="image gen permission test",
)
file_id = _seed_image_gen_tool_call(UUID(str(chat_session.id)))
return ImageGenSetup(
owner=owner,
intruder=intruder,
chat_session=chat_session,
file_id=file_id,
)
def test_owner_can_download_image_gen_file(
image_gen_setup: ImageGenSetup,
) -> None:
"""The chat session owner must be able to fetch an image-gen file_id stored
on `ToolCall.generated_images`. Pre-fix, this returned 404 — that 404 is
the exact regression these tests pin."""
response = requests.get(
f"{API_SERVER_URL}/chat/file/{image_gen_setup.file_id}",
headers=image_gen_setup.owner.headers,
)
assert response.status_code == 200, (
f"Owner should receive image-gen file, got {response.status_code}: "
f"{response.text}"
)
assert response.content == _IMAGE_GEN_PNG_BYTES
def test_non_owner_cannot_download_image_gen_file_in_private_session(
image_gen_setup: ImageGenSetup,
) -> None:
"""A non-owner must not be able to read an image-gen file in a PRIVATE
session — the new branch should not over-grant access."""
response = requests.get(
f"{API_SERVER_URL}/chat/file/{image_gen_setup.file_id}",
headers=image_gen_setup.intruder.headers,
)
assert response.status_code in (403, 404), (
f"Non-owner should be denied on a private session, got "
f"{response.status_code}: {response.text}"
)
assert response.content != _IMAGE_GEN_PNG_BYTES
def test_non_owner_can_download_image_gen_file_in_public_session(
image_gen_setup: ImageGenSetup,
) -> None:
"""When the chat session is publicly shared, any authenticated user must
be able to fetch its image-gen outputs — mirrors the existing
`ChatMessage.files` public-share branch."""
with get_session_with_current_tenant() as db_session:
chat_session = db_session.get(
ChatSession, UUID(str(image_gen_setup.chat_session.id))
)
assert chat_session is not None
chat_session.shared_status = ChatSessionSharedStatus.PUBLIC
db_session.commit()
response = requests.get(
f"{API_SERVER_URL}/chat/file/{image_gen_setup.file_id}",
headers=image_gen_setup.intruder.headers,
)
assert response.status_code == 200, (
f"Non-owner should be able to read image-gen file on public session, "
f"got {response.status_code}: {response.text}"
)
assert response.content == _IMAGE_GEN_PNG_BYTES

View File

@@ -1,7 +1,7 @@
import pytest
from onyx.configs.constants import MASK_CREDENTIAL_CHAR
from onyx.utils.encryption import reject_masked_credentials
from onyx.db.federated import _reject_masked_credentials
class TestRejectMaskedCredentials:
@@ -10,24 +10,24 @@ class TestRejectMaskedCredentials:
mask_string() has two output formats:
- Short strings (< 14 chars): "••••••••••••" (U+2022 BULLET)
- Long strings (>= 14 chars): "abcd...wxyz" (first4 + "..." + last4)
reject_masked_credentials must catch both.
_reject_masked_credentials must catch both.
"""
def test_rejects_fully_masked_value(self) -> None:
masked = MASK_CREDENTIAL_CHAR * 12 # "••••••••••••"
with pytest.raises(ValueError, match="masked placeholder"):
reject_masked_credentials({"client_id": masked})
_reject_masked_credentials({"client_id": masked})
def test_rejects_long_string_masked_value(self) -> None:
"""mask_string returns 'first4...last4' for long strings — the real
format used for OAuth credentials like client_id and client_secret."""
with pytest.raises(ValueError, match="masked placeholder"):
reject_masked_credentials({"client_id": "1234...7890"})
_reject_masked_credentials({"client_id": "1234...7890"})
def test_rejects_when_any_field_is_masked(self) -> None:
"""Even if client_id is real, a masked client_secret must be caught."""
with pytest.raises(ValueError, match="client_secret"):
reject_masked_credentials(
_reject_masked_credentials(
{
"client_id": "1234567890.1234567890",
"client_secret": MASK_CREDENTIAL_CHAR * 12,
@@ -36,7 +36,7 @@ class TestRejectMaskedCredentials:
def test_accepts_real_credentials(self) -> None:
# Should not raise
reject_masked_credentials(
_reject_masked_credentials(
{
"client_id": "1234567890.1234567890",
"client_secret": "test_client_secret_value",
@@ -45,63 +45,14 @@ class TestRejectMaskedCredentials:
def test_accepts_empty_dict(self) -> None:
# Should not raise — empty credentials are handled elsewhere
reject_masked_credentials({})
_reject_masked_credentials({})
def test_ignores_non_string_values(self) -> None:
# Non-string values (None, bool, int) should pass through
reject_masked_credentials(
_reject_masked_credentials(
{
"client_id": "real_value",
"redirect_uri": None,
"some_flag": True,
}
)
def test_rejects_masked_value_inside_nested_dict(self) -> None:
"""`mask_credential_dict` recurses into nested dicts; the rejection
helper must do the same so a masked nested string can't slip
through on resubmit."""
with pytest.raises(ValueError, match=r"oauth\.client_secret"):
reject_masked_credentials(
{
"name": "fine",
"oauth": {
"client_id": "1234567890.1234567890",
"client_secret": "abcd...wxyz",
},
}
)
def test_rejects_masked_value_inside_list(self) -> None:
"""`_mask_list` masks string elements; the rejection helper must
catch them too."""
with pytest.raises(ValueError, match=r"keys\[1\]"):
reject_masked_credentials(
{
"keys": ["real-key-aaaa", "abcd...wxyz", "real-key-bbbb"],
}
)
def test_rejects_masked_value_inside_list_of_dicts(self) -> None:
with pytest.raises(ValueError, match=r"sessions\[0\]\.token"):
reject_masked_credentials(
{
"sessions": [
{"token": "abcd...wxyz"},
{"token": "real-token-value"},
],
}
)
def test_accepts_deeply_nested_real_values(self) -> None:
reject_masked_credentials(
{
"oauth": {
"client_id": "real-id-value-1234",
"extras": {
"scopes": ["read", "write"],
"metadata": {"region": "us-east-1"},
},
},
}
)

View File

@@ -1,255 +0,0 @@
"""Unit tests for the reCAPTCHA Enterprise Assessment rejection ladder."""
from collections.abc import Iterator
from datetime import datetime
from datetime import timezone
from unittest.mock import AsyncMock
from unittest.mock import MagicMock
from unittest.mock import patch
import pytest
from onyx.auth import captcha as captcha_module
from onyx.auth.captcha import CaptchaAction
from onyx.auth.captcha import CaptchaVerificationError
from onyx.auth.captcha import verify_captcha_token
def _fake_client(payload: dict) -> MagicMock:
resp = MagicMock()
resp.raise_for_status = MagicMock()
resp.json = MagicMock(return_value=payload)
client = MagicMock()
client.post = AsyncMock(return_value=resp)
client.__aenter__ = AsyncMock(return_value=client)
client.__aexit__ = AsyncMock(return_value=None)
return client
def _fresh_create_time() -> str:
"""An RFC3339 createTime that passes the 120s freshness check."""
return datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
def _assessment(
*,
valid: bool = True,
invalid_reason: str | None = None,
action: str = "signup",
hostname: str = "cloud.onyx.app",
create_time: str | None = None,
score: float = 0.9,
reasons: list[str] | None = None,
) -> dict:
return {
"name": "projects/154649423065/assessments/abc",
"tokenProperties": {
"valid": valid,
"invalidReason": invalid_reason,
"action": action,
"hostname": hostname,
"createTime": create_time or _fresh_create_time(),
},
"riskAnalysis": {"score": score, "reasons": reasons or []},
}
@pytest.fixture(autouse=True)
def _test_env() -> Iterator[None]:
"""Stub the Redis replay cache (covered in test_captcha_replay.py) and
populate cloud-like config so the hostname check actually fires."""
with (
patch.object(
captcha_module,
"_reserve_token_or_raise",
AsyncMock(return_value=None),
),
patch.object(captcha_module, "_release_token", AsyncMock(return_value=None)),
patch.object(
captcha_module,
"RECAPTCHA_HOSTNAME_ALLOWLIST",
frozenset({"cloud.onyx.app"}),
),
patch.object(captcha_module, "RECAPTCHA_SCORE_THRESHOLD", 0.8),
):
yield
@pytest.mark.asyncio
async def test_happy_path_passes() -> None:
client = _fake_client(_assessment())
with (
patch.object(captcha_module, "is_captcha_enabled", return_value=True),
patch.object(captcha_module.httpx, "AsyncClient", return_value=client),
):
await verify_captcha_token("tok", CaptchaAction.SIGNUP)
@pytest.mark.asyncio
async def test_invalid_token_rejects_with_reason() -> None:
client = _fake_client(_assessment(valid=False, invalid_reason="MALFORMED"))
with (
patch.object(captcha_module, "is_captcha_enabled", return_value=True),
patch.object(captcha_module.httpx, "AsyncClient", return_value=client),
):
with pytest.raises(CaptchaVerificationError, match="MALFORMED"):
await verify_captcha_token("tok", CaptchaAction.SIGNUP)
@pytest.mark.asyncio
async def test_hostname_mismatch_rejects() -> None:
client = _fake_client(_assessment(hostname="evil.example.com"))
with (
patch.object(captcha_module, "is_captcha_enabled", return_value=True),
patch.object(captcha_module.httpx, "AsyncClient", return_value=client),
):
with pytest.raises(CaptchaVerificationError, match="hostname"):
await verify_captcha_token("tok", CaptchaAction.SIGNUP)
@pytest.mark.asyncio
async def test_stale_create_time_rejects() -> None:
stale = "2020-01-01T00:00:00Z"
client = _fake_client(_assessment(create_time=stale))
with (
patch.object(captcha_module, "is_captcha_enabled", return_value=True),
patch.object(captcha_module.httpx, "AsyncClient", return_value=client),
):
with pytest.raises(CaptchaVerificationError, match="token expired"):
await verify_captcha_token("tok", CaptchaAction.SIGNUP)
@pytest.mark.asyncio
async def test_action_mismatch_rejects_strictly() -> None:
"""A signup token cannot satisfy the oauth path and vice versa."""
client = _fake_client(_assessment(action="oauth"))
with (
patch.object(captcha_module, "is_captcha_enabled", return_value=True),
patch.object(captcha_module.httpx, "AsyncClient", return_value=client),
):
with pytest.raises(CaptchaVerificationError, match="action mismatch"):
await verify_captcha_token("tok", CaptchaAction.SIGNUP)
@pytest.mark.asyncio
async def test_empty_action_rejects() -> None:
"""Regression guard: the legacy code skipped the check when action was
falsy. Enterprise ladder must reject instead."""
client = _fake_client(_assessment(action=""))
with (
patch.object(captcha_module, "is_captcha_enabled", return_value=True),
patch.object(captcha_module.httpx, "AsyncClient", return_value=client),
):
with pytest.raises(CaptchaVerificationError, match="action mismatch"):
await verify_captcha_token("tok", CaptchaAction.SIGNUP)
@pytest.mark.asyncio
async def test_automation_reason_rejects_even_with_high_score() -> None:
"""The key win of moving to Enterprise: a 0.9-scoring bot caught by
reasons[] still gets rejected. Legacy siteverify would have let this
through on score alone."""
client = _fake_client(_assessment(score=0.9, reasons=["AUTOMATION"]))
with (
patch.object(captcha_module, "is_captcha_enabled", return_value=True),
patch.object(captcha_module.httpx, "AsyncClient", return_value=client),
):
with pytest.raises(CaptchaVerificationError, match="AUTOMATION"):
await verify_captcha_token("tok", CaptchaAction.SIGNUP)
@pytest.mark.asyncio
async def test_too_much_traffic_reason_rejects() -> None:
client = _fake_client(_assessment(score=0.9, reasons=["TOO_MUCH_TRAFFIC"]))
with (
patch.object(captcha_module, "is_captcha_enabled", return_value=True),
patch.object(captcha_module.httpx, "AsyncClient", return_value=client),
):
with pytest.raises(CaptchaVerificationError, match="TOO_MUCH_TRAFFIC"):
await verify_captcha_token("tok", CaptchaAction.SIGNUP)
@pytest.mark.asyncio
async def test_unexpected_environment_reason_rejects() -> None:
client = _fake_client(_assessment(score=0.9, reasons=["UNEXPECTED_ENVIRONMENT"]))
with (
patch.object(captcha_module, "is_captcha_enabled", return_value=True),
patch.object(captcha_module.httpx, "AsyncClient", return_value=client),
):
with pytest.raises(CaptchaVerificationError, match="UNEXPECTED_ENVIRONMENT"):
await verify_captcha_token("tok", CaptchaAction.SIGNUP)
@pytest.mark.asyncio
async def test_score_below_floor_rejects() -> None:
client = _fake_client(_assessment(score=0.1))
with (
patch.object(captcha_module, "is_captcha_enabled", return_value=True),
patch.object(captcha_module.httpx, "AsyncClient", return_value=client),
):
with pytest.raises(CaptchaVerificationError, match="suspicious"):
await verify_captcha_token("tok", CaptchaAction.SIGNUP)
@pytest.mark.asyncio
async def test_soft_reason_alone_does_not_reject() -> None:
"""LOW_CONFIDENCE_SCORE is not in the hard-reject set; if the score is
above floor and nothing else fails, the request passes."""
client = _fake_client(_assessment(score=0.9, reasons=["LOW_CONFIDENCE_SCORE"]))
with (
patch.object(captcha_module, "is_captcha_enabled", return_value=True),
patch.object(captcha_module.httpx, "AsyncClient", return_value=client),
):
await verify_captcha_token("tok", CaptchaAction.SIGNUP)
@pytest.mark.asyncio
async def test_no_op_when_disabled() -> None:
"""Disabled captcha returns None without making an HTTP call."""
client = _fake_client(_assessment())
with (
patch.object(captcha_module, "is_captcha_enabled", return_value=False),
patch.object(captcha_module.httpx, "AsyncClient", return_value=client),
):
await verify_captcha_token("tok", CaptchaAction.SIGNUP)
client.post.assert_not_awaited()
@pytest.mark.asyncio
async def test_empty_token_rejected_before_http() -> None:
client = _fake_client(_assessment())
with (
patch.object(captcha_module, "is_captcha_enabled", return_value=True),
patch.object(captcha_module.httpx, "AsyncClient", return_value=client),
):
with pytest.raises(CaptchaVerificationError, match="required"):
await verify_captcha_token("", CaptchaAction.SIGNUP)
client.post.assert_not_awaited()
@pytest.mark.asyncio
async def test_post_body_has_enterprise_shape() -> None:
client = _fake_client(_assessment())
with (
patch.object(captcha_module, "is_captcha_enabled", return_value=True),
patch.object(captcha_module, "RECAPTCHA_ENTERPRISE_PROJECT_ID", "test-project"),
patch.object(captcha_module, "RECAPTCHA_SITE_KEY", "test-site-key"),
patch.object(captcha_module, "RECAPTCHA_ENTERPRISE_API_KEY", "test-api-key"),
patch.object(captcha_module.httpx, "AsyncClient", return_value=client),
):
await verify_captcha_token("tok", CaptchaAction.SIGNUP)
client.post.assert_awaited_once()
call = client.post.await_args
assert call is not None
url = call.args[0]
assert url == (
"https://recaptchaenterprise.googleapis.com/v1/projects/test-project/assessments"
)
body = call.kwargs["json"]
assert body["event"] == {
"token": "tok",
"siteKey": "test-site-key",
"expectedAction": "signup",
}
assert call.kwargs["params"] == {"key": "test-api-key"}

View File

@@ -10,7 +10,6 @@ import pytest
from onyx.auth import captcha as captcha_module
from onyx.auth.captcha import _replay_cache_key
from onyx.auth.captcha import _reserve_token_or_raise
from onyx.auth.captcha import CaptchaAction
from onyx.auth.captcha import CaptchaVerificationError
from onyx.auth.captcha import verify_captcha_token
@@ -70,7 +69,7 @@ def test_replay_cache_key_is_sha256_prefixed() -> None:
@pytest.mark.asyncio
async def test_reservation_released_when_google_unreachable() -> None:
"""If the Assessment API itself errors (our side, not the token's), the
"""If Google's siteverify itself errors (our side, not the token's), the
replay reservation must be released so the user can retry with the same
still-valid token instead of getting 'already used' for 120s."""
fake_redis = MagicMock()
@@ -92,7 +91,7 @@ async def test_reservation_released_when_google_unreachable() -> None:
patch.object(captcha_module.httpx, "AsyncClient", return_value=fake_client),
):
with pytest.raises(CaptchaVerificationError, match="service unavailable"):
await verify_captcha_token("valid-token", CaptchaAction.SIGNUP)
await verify_captcha_token("valid-token", expected_action="signup")
# The reservation was claimed and then released.
fake_redis.set.assert_awaited_once()
@@ -108,6 +107,9 @@ async def test_reservation_released_on_unexpected_response_shape() -> None:
fake_redis.set = AsyncMock(return_value=True)
fake_redis.delete = AsyncMock(return_value=1)
# Simulate Google returning something that json() still succeeds on but
# fails RecaptchaResponse validation (e.g. success=true but with a wrong
# shape that Pydantic rejects when coerced).
fake_httpx_response = MagicMock()
fake_httpx_response.raise_for_status = MagicMock()
fake_httpx_response.json = MagicMock(side_effect=ValueError("not valid JSON"))
@@ -126,7 +128,7 @@ async def test_reservation_released_on_unexpected_response_shape() -> None:
patch.object(captcha_module.httpx, "AsyncClient", return_value=fake_client),
):
with pytest.raises(CaptchaVerificationError, match="service unavailable"):
await verify_captcha_token("valid-token", CaptchaAction.SIGNUP)
await verify_captcha_token("valid-token", expected_action="signup")
fake_redis.set.assert_awaited_once()
fake_redis.delete.assert_awaited_once()
@@ -134,9 +136,9 @@ async def test_reservation_released_on_unexpected_response_shape() -> None:
@pytest.mark.asyncio
async def test_reservation_kept_when_google_rejects_token() -> None:
"""If Google itself says the token is invalid (tokenProperties.valid=false),
the reservation must NOT be released — that token is known-bad for its
entire lifetime and shouldn't be retryable."""
"""If Google itself says the token is invalid (success=false, or score
too low), the reservation must NOT be released — that token is known-bad
for its entire lifetime and shouldn't be retryable."""
fake_redis = MagicMock()
fake_redis.set = AsyncMock(return_value=True)
fake_redis.delete = AsyncMock(return_value=1)
@@ -145,12 +147,8 @@ async def test_reservation_kept_when_google_rejects_token() -> None:
fake_httpx_response.raise_for_status = MagicMock()
fake_httpx_response.json = MagicMock(
return_value={
"name": "projects/154649423065/assessments/abc",
"tokenProperties": {
"valid": False,
"invalidReason": "MALFORMED",
},
"riskAnalysis": {"score": 0.0, "reasons": []},
"success": False,
"error-codes": ["invalid-input-response"],
}
)
fake_client = MagicMock()
@@ -167,8 +165,8 @@ async def test_reservation_kept_when_google_rejects_token() -> None:
),
patch.object(captcha_module.httpx, "AsyncClient", return_value=fake_client),
):
with pytest.raises(CaptchaVerificationError, match="MALFORMED"):
await verify_captcha_token("bad-token", CaptchaAction.SIGNUP)
with pytest.raises(CaptchaVerificationError, match="invalid-input-response"):
await verify_captcha_token("bad-token", expected_action="signup")
fake_redis.set.assert_awaited_once()
fake_redis.delete.assert_not_awaited()

View File

@@ -0,0 +1,78 @@
"""Unit tests for the require-score check in verify_captcha_token."""
from unittest.mock import AsyncMock
from unittest.mock import MagicMock
from unittest.mock import patch
import pytest
from onyx.auth import captcha as captcha_module
from onyx.auth.captcha import CaptchaVerificationError
from onyx.auth.captcha import verify_captcha_token
def _fake_httpx_client_returning(payload: dict) -> MagicMock:
resp = MagicMock()
resp.raise_for_status = MagicMock()
resp.json = MagicMock(return_value=payload)
client = MagicMock()
client.post = AsyncMock(return_value=resp)
client.__aenter__ = AsyncMock(return_value=client)
client.__aexit__ = AsyncMock(return_value=None)
return client
@pytest.mark.asyncio
async def test_rejects_when_score_missing() -> None:
"""Siteverify response with no score field is rejected outright —
closes the accidental 'test secret in prod' bypass path."""
client = _fake_httpx_client_returning(
{"success": True, "hostname": "testkey.google.com"}
)
with (
patch.object(captcha_module, "is_captcha_enabled", return_value=True),
patch.object(captcha_module.httpx, "AsyncClient", return_value=client),
):
with pytest.raises(CaptchaVerificationError, match="missing score"):
await verify_captcha_token("test-token", expected_action="signup")
@pytest.mark.asyncio
async def test_accepts_when_score_present_and_above_threshold() -> None:
"""Sanity check the happy path still works with the tighter score rule."""
client = _fake_httpx_client_returning(
{
"success": True,
"score": 0.9,
"action": "signup",
"hostname": "cloud.onyx.app",
}
)
with (
patch.object(captcha_module, "is_captcha_enabled", return_value=True),
patch.object(captcha_module.httpx, "AsyncClient", return_value=client),
):
# Should not raise.
await verify_captcha_token("fresh-token", expected_action="signup")
@pytest.mark.asyncio
async def test_rejects_when_score_below_threshold() -> None:
"""A score present but below threshold still rejects (existing behavior,
guarding against regression from this PR's restructure)."""
client = _fake_httpx_client_returning(
{
"success": True,
"score": 0.1,
"action": "signup",
"hostname": "cloud.onyx.app",
}
)
with (
patch.object(captcha_module, "is_captcha_enabled", return_value=True),
patch.object(captcha_module.httpx, "AsyncClient", return_value=client),
):
with pytest.raises(
CaptchaVerificationError, match="suspicious activity detected"
):
await verify_captcha_token("low-score-token", expected_action="signup")

View File

@@ -1,176 +0,0 @@
"""Unit tests for the per-IP signup rate limiter."""
from unittest.mock import AsyncMock
from unittest.mock import MagicMock
from unittest.mock import patch
import pytest
from fastapi import Request
from onyx.auth import signup_rate_limit as rl
from onyx.auth.signup_rate_limit import _bucket_key
from onyx.auth.signup_rate_limit import _client_ip
from onyx.auth.signup_rate_limit import _PER_IP_PER_HOUR
from onyx.auth.signup_rate_limit import enforce_signup_rate_limit
from onyx.error_handling.exceptions import OnyxError
def _make_request(
xff: str | None = None, client_host: str | None = "1.2.3.4"
) -> Request:
scope: dict = {
"type": "http",
"method": "POST",
"path": "/auth/register",
"headers": [],
}
if xff is not None:
scope["headers"].append((b"x-forwarded-for", xff.encode()))
if client_host is not None:
scope["client"] = (client_host, 54321)
return Request(scope)
def _fake_pipeline_redis(incr_return: int) -> MagicMock:
"""Build a Redis mock whose pipeline().execute() yields [incr_return, ok]."""
pipeline = MagicMock()
pipeline.incr = MagicMock()
pipeline.expire = MagicMock()
pipeline.execute = AsyncMock(return_value=[incr_return, 1])
redis = MagicMock()
redis.pipeline = MagicMock(return_value=pipeline)
redis._pipeline = pipeline # type: ignore[attr-defined]
return redis
def test_client_ip_uses_leftmost_when_first_entry_is_public() -> None:
req = _make_request(xff="1.2.3.4, 10.0.0.42")
assert _client_ip(req) == "1.2.3.4"
def test_client_ip_falls_back_when_leftmost_is_private() -> None:
req = _make_request(xff="10.0.0.1, 1.2.3.4", client_host="5.6.7.8")
assert _client_ip(req) == "5.6.7.8"
def test_client_ip_falls_back_when_leftmost_is_loopback() -> None:
req = _make_request(xff="127.0.0.1", client_host="5.6.7.8")
assert _client_ip(req) == "5.6.7.8"
def test_client_ip_falls_back_when_xff_is_malformed() -> None:
req = _make_request(xff="not-an-ip, 1.2.3.4", client_host="10.0.0.1")
assert _client_ip(req) == "10.0.0.1"
def test_client_ip_falls_back_to_tcp_peer_when_xff_absent() -> None:
req = _make_request(xff=None, client_host="5.6.7.8")
assert _client_ip(req) == "5.6.7.8"
def test_client_ip_handles_no_client() -> None:
req = _make_request(xff=None, client_host=None)
assert _client_ip(req) == "unknown"
@pytest.mark.asyncio
async def test_disabled_when_not_multitenant() -> None:
req = _make_request(client_host="1.2.3.4")
fake_redis = MagicMock()
with (
patch.object(rl, "MULTI_TENANT", False),
patch.object(rl, "SIGNUP_RATE_LIMIT_ENABLED", True),
patch.object(
rl, "get_async_redis_connection", AsyncMock(return_value=fake_redis)
) as conn,
):
await enforce_signup_rate_limit(req)
conn.assert_not_awaited()
@pytest.mark.asyncio
async def test_disabled_when_enable_flag_off() -> None:
req = _make_request(client_host="1.2.3.4")
fake_redis = MagicMock()
with (
patch.object(rl, "MULTI_TENANT", True),
patch.object(rl, "SIGNUP_RATE_LIMIT_ENABLED", False),
patch.object(
rl, "get_async_redis_connection", AsyncMock(return_value=fake_redis)
) as conn,
):
await enforce_signup_rate_limit(req)
conn.assert_not_awaited()
@pytest.mark.asyncio
async def test_allows_when_under_limit() -> None:
"""Counts at or below the hourly cap do not raise."""
req = _make_request(xff="1.2.3.4, 10.0.0.1")
fake_redis = _fake_pipeline_redis(incr_return=_PER_IP_PER_HOUR)
with (
patch.object(rl, "MULTI_TENANT", True),
patch.object(rl, "SIGNUP_RATE_LIMIT_ENABLED", True),
patch.object(
rl, "get_async_redis_connection", AsyncMock(return_value=fake_redis)
),
):
await enforce_signup_rate_limit(req)
@pytest.mark.asyncio
async def test_rejects_when_over_limit() -> None:
"""Strictly above the cap → OnyxError.RATE_LIMITED (HTTP 429)."""
req = _make_request(xff="1.2.3.4, 10.0.0.1")
fake_redis = _fake_pipeline_redis(incr_return=_PER_IP_PER_HOUR + 1)
with (
patch.object(rl, "MULTI_TENANT", True),
patch.object(rl, "SIGNUP_RATE_LIMIT_ENABLED", True),
patch.object(
rl, "get_async_redis_connection", AsyncMock(return_value=fake_redis)
),
):
with pytest.raises(OnyxError) as exc_info:
await enforce_signup_rate_limit(req)
assert exc_info.value.error_code.status_code == 429
@pytest.mark.asyncio
async def test_pipeline_expire_runs_on_every_hit() -> None:
"""INCR and EXPIRE run in a single pipeline for atomicity."""
req = _make_request(xff="1.2.3.4, 10.0.0.1")
fake_redis = _fake_pipeline_redis(incr_return=3)
with (
patch.object(rl, "MULTI_TENANT", True),
patch.object(rl, "SIGNUP_RATE_LIMIT_ENABLED", True),
patch.object(
rl, "get_async_redis_connection", AsyncMock(return_value=fake_redis)
),
):
await enforce_signup_rate_limit(req)
fake_redis._pipeline.expire.assert_called_once()
@pytest.mark.asyncio
async def test_fails_open_on_redis_error() -> None:
"""Redis blip must NOT block legitimate signups."""
req = _make_request(xff="1.2.3.4, 10.0.0.1")
with (
patch.object(rl, "MULTI_TENANT", True),
patch.object(rl, "SIGNUP_RATE_LIMIT_ENABLED", True),
patch.object(
rl,
"get_async_redis_connection",
AsyncMock(side_effect=RuntimeError("redis down")),
),
):
await enforce_signup_rate_limit(req)
def test_bucket_keys_differ_across_ips() -> None:
"""Two different IPs in the same hour must not share a counter."""
a = _bucket_key("1.1.1.1")
b = _bucket_key("2.2.2.2")
assert a != b
assert a.startswith("signup_rate:1.1.1.1:")
assert b.startswith("signup_rate:2.2.2.2:")

View File

@@ -1,126 +0,0 @@
"""Unit tests for `SimpleJob.terminate_and_wait`.
These tests do NOT exercise the full indexing watchdog. They only validate the
small primitive added so the watchdog can hard-kill a stuck spawned process
when its IndexAttempt has been finalized.
We need real OS processes here (multiprocessing.Process) - mocking would
defeat the purpose of testing termination semantics.
"""
import multiprocessing as mp
import os
import signal
import time
import pytest
from onyx.background.indexing.job_client import SimpleJob
def _ignore_sigterm_and_sleep_forever(ready_path: str) -> None:
"""Child entry point that ignores SIGTERM, simulating a hung connector
that is not responsive to graceful shutdown signals.
Writes to `ready_path` once SIGTERM is masked so the parent can avoid a
race where it sends SIGTERM before the child has installed its handler.
"""
signal.signal(signal.SIGTERM, signal.SIG_IGN)
with open(ready_path, "w") as f:
f.write("ready")
while True:
time.sleep(0.1)
def _exit_quickly_on_sigterm(ready_path: str) -> None:
"""Child entry point that handles SIGTERM by exiting cleanly."""
def _handler(_signum: int, _frame: object) -> None:
os._exit(0)
signal.signal(signal.SIGTERM, _handler)
with open(ready_path, "w") as f:
f.write("ready")
while True:
time.sleep(0.1)
def _wait_for_ready_file(path: str, timeout: float = 5.0) -> None:
deadline = time.monotonic() + timeout
while time.monotonic() < deadline:
if os.path.exists(path):
return
time.sleep(0.05)
raise TimeoutError(f"child never wrote ready file: {path}")
@pytest.fixture()
def ready_file(tmp_path) -> str: # type: ignore[no-untyped-def]
return str(tmp_path / "child_ready.txt")
def test_terminate_and_wait_returns_false_when_no_process() -> None:
"""A SimpleJob with no spawned process should be a no-op."""
job = SimpleJob(id=0, process=None, queue=None)
assert job.terminate_and_wait(sigterm_grace_seconds=1.0) is False
def test_terminate_and_wait_returns_false_when_process_already_exited(
ready_file: str,
) -> None:
"""If the child already exited cleanly, terminate_and_wait shouldn't error
and should report that there was nothing to do."""
ctx = mp.get_context("spawn")
process = ctx.Process(target=_exit_quickly_on_sigterm, args=(ready_file,))
process.start()
_wait_for_ready_file(ready_file)
assert process.pid is not None
os.kill(process.pid, signal.SIGTERM)
process.join(timeout=5.0)
assert not process.is_alive()
job = SimpleJob(id=1, process=process, queue=None)
assert job.terminate_and_wait(sigterm_grace_seconds=1.0) is False
def test_terminate_and_wait_kills_responsive_child_with_sigterm(
ready_file: str,
) -> None:
"""A responsive child should be reaped by the SIGTERM stage; we should
not need to escalate to SIGKILL."""
ctx = mp.get_context("spawn")
process = ctx.Process(target=_exit_quickly_on_sigterm, args=(ready_file,))
process.start()
_wait_for_ready_file(ready_file)
job = SimpleJob(id=2, process=process, queue=None)
assert job.terminate_and_wait(sigterm_grace_seconds=5.0) is True
assert not process.is_alive()
assert process.exitcode == 0
def test_terminate_and_wait_escalates_to_sigkill_for_unresponsive_child(
ready_file: str,
) -> None:
"""If the child ignores SIGTERM, terminate_and_wait must escalate to SIGKILL
so we never leave an orphaned subprocess attached to a worker thread.
This is the exact scenario that motivated this change: a connector
subprocess that is hung and unresponsive to SIGTERM was tying up a
docfetching worker thread indefinitely.
"""
ctx = mp.get_context("spawn")
process = ctx.Process(target=_ignore_sigterm_and_sleep_forever, args=(ready_file,))
process.start()
_wait_for_ready_file(ready_file)
job = SimpleJob(id=3, process=process, queue=None)
start = time.monotonic()
grace = 0.5
assert job.terminate_and_wait(sigterm_grace_seconds=grace) is True
elapsed = time.monotonic() - start
assert not process.is_alive()
assert process.exitcode == -signal.SIGKILL
assert elapsed >= grace
assert elapsed < grace + 5.0

View File

@@ -1,117 +0,0 @@
"""Unit tests for LoginCaptchaMiddleware."""
from unittest.mock import AsyncMock
from unittest.mock import patch
from fastapi import FastAPI
from fastapi.testclient import TestClient
from onyx.auth.captcha import CaptchaAction
from onyx.auth.captcha import CaptchaVerificationError
from onyx.error_handling.exceptions import register_onyx_exception_handlers
from onyx.server.auth import captcha_api as captcha_api_module
from onyx.server.auth.captcha_api import LoginCaptchaMiddleware
def build_app() -> FastAPI:
app = FastAPI()
register_onyx_exception_handlers(app)
app.add_middleware(LoginCaptchaMiddleware)
@app.post("/auth/login")
async def _login() -> dict[str, str]:
return {"status": "logged-in"}
@app.post("/auth/register")
async def _register() -> dict[str, str]:
return {"status": "created"}
@app.get("/auth/login")
async def _login_get() -> dict[str, str]:
return {"status": "get-ignored"}
return app
def test_passes_through_when_captcha_disabled() -> None:
app = build_app()
client = TestClient(app)
with patch.object(captcha_api_module, "is_captcha_enabled", return_value=False):
res = client.post("/auth/login")
assert res.status_code == 200
assert res.json() == {"status": "logged-in"}
def test_rejects_when_header_missing() -> None:
app = build_app()
client = TestClient(app)
with (
patch.object(captcha_api_module, "is_captcha_enabled", return_value=True),
patch.object(
captcha_api_module,
"verify_captcha_token",
new=AsyncMock(
side_effect=CaptchaVerificationError(
"Captcha verification failed: Captcha token is required"
)
),
),
):
res = client.post("/auth/login")
assert res.status_code == 403
assert "Captcha" in res.json()["detail"]
def test_rejects_on_bad_token() -> None:
app = build_app()
client = TestClient(app)
with (
patch.object(captcha_api_module, "is_captcha_enabled", return_value=True),
patch.object(
captcha_api_module,
"verify_captcha_token",
new=AsyncMock(
side_effect=CaptchaVerificationError(
"Captcha verification failed: AUTOMATION"
)
),
) as verify_mock,
):
res = client.post("/auth/login", headers={"X-Captcha-Token": "bad-token"})
assert res.status_code == 403
verify_mock.assert_awaited_once_with("bad-token", CaptchaAction.LOGIN)
def test_passes_on_valid_token() -> None:
app = build_app()
client = TestClient(app)
with (
patch.object(captcha_api_module, "is_captcha_enabled", return_value=True),
patch.object(
captcha_api_module,
"verify_captcha_token",
new=AsyncMock(return_value=None),
) as verify_mock,
):
res = client.post("/auth/login", headers={"X-Captcha-Token": "good-token"})
assert res.status_code == 200
verify_mock.assert_awaited_once_with("good-token", CaptchaAction.LOGIN)
def test_does_not_gate_other_endpoints() -> None:
"""Only POST /auth/login is guarded. /auth/register and GET /auth/login pass."""
app = build_app()
client = TestClient(app)
with (
patch.object(captcha_api_module, "is_captcha_enabled", return_value=True),
patch.object(
captcha_api_module,
"verify_captcha_token",
new=AsyncMock(),
) as verify_mock,
):
register_res = client.post("/auth/register")
get_login_res = client.get("/auth/login")
assert register_res.status_code == 200
assert get_login_res.status_code == 200
verify_mock.assert_not_awaited()

View File

@@ -1,207 +0,0 @@
"""Unit tests for the MCP OAuth credentials resolver and config builder.
These tests cover the fix for the "resubmit unchanged wipes client_info" bug
described in `plans/mcp-oauth-resubmit-empty-secret-fix.md`. The resolver
mirrors the LLM-provider `api_key_changed` pattern: when the frontend marks a
credential field as unchanged, the backend reuses the stored value instead of
overwriting it with whatever (likely masked) string the form replayed.
"""
import pytest
from mcp.shared.auth import OAuthClientInformationFull
from pydantic import AnyUrl
from onyx.server.features.mcp.api import _build_oauth_admin_config_data
from onyx.server.features.mcp.api import _resolve_oauth_credentials
from onyx.server.features.mcp.models import MCPOAuthKeys
from onyx.utils.encryption import mask_string
def _make_existing_client(
*,
client_id: str = "stored-client-id",
client_secret: str | None = "stored-secret",
) -> OAuthClientInformationFull:
return OAuthClientInformationFull(
client_id=client_id,
client_secret=client_secret,
redirect_uris=[AnyUrl("https://example.com/callback")],
grant_types=["authorization_code", "refresh_token"],
response_types=["code"],
token_endpoint_auth_method=("client_secret_post" if client_secret else "none"),
)
class TestResolveOAuthCredentials:
def test_public_client_unchanged_resubmit_keeps_stored_values(self) -> None:
existing = _make_existing_client(client_id="abc", client_secret=None)
resolved_id, resolved_secret = _resolve_oauth_credentials(
request_client_id=mask_string("abc") if len("abc") >= 14 else "abc",
request_client_id_changed=False,
request_client_secret="",
request_client_secret_changed=False,
existing_client=existing,
)
assert resolved_id == "abc"
assert resolved_secret is None
def test_confidential_client_unchanged_resubmit_keeps_stored_values(self) -> None:
stored_id = "long-client-id-123456"
stored_secret = "long-client-secret-abcdef"
existing = _make_existing_client(
client_id=stored_id,
client_secret=stored_secret,
)
resolved_id, resolved_secret = _resolve_oauth_credentials(
request_client_id=mask_string(stored_id),
request_client_id_changed=False,
request_client_secret=mask_string(stored_secret),
request_client_secret_changed=False,
existing_client=existing,
)
assert resolved_id == stored_id
assert resolved_secret == stored_secret
def test_only_client_id_changed_keeps_stored_secret(self) -> None:
existing = _make_existing_client(
client_id="stored-id",
client_secret="stored-secret-value",
)
resolved_id, resolved_secret = _resolve_oauth_credentials(
request_client_id="brand-new-id",
request_client_id_changed=True,
request_client_secret=mask_string("stored-secret-value"),
request_client_secret_changed=False,
existing_client=existing,
)
assert resolved_id == "brand-new-id"
assert resolved_secret == "stored-secret-value"
def test_only_client_secret_changed_keeps_stored_id(self) -> None:
existing = _make_existing_client(
client_id="stored-client-id-1234",
client_secret="stored-secret",
)
resolved_id, resolved_secret = _resolve_oauth_credentials(
request_client_id=mask_string("stored-client-id-1234"),
request_client_id_changed=False,
request_client_secret="brand-new-secret",
request_client_secret_changed=True,
existing_client=existing,
)
assert resolved_id == "stored-client-id-1234"
assert resolved_secret == "brand-new-secret"
def test_changed_flag_with_long_masked_value_is_rejected(self) -> None:
existing = _make_existing_client(
client_id="real-stored-id-1234",
client_secret="real-stored-secret-1234",
)
with pytest.raises(ValueError, match="oauth_client_id"):
_resolve_oauth_credentials(
request_client_id=mask_string("some-other-long-string"),
request_client_id_changed=True,
request_client_secret="anything-else",
request_client_secret_changed=True,
existing_client=existing,
)
with pytest.raises(ValueError, match="oauth_client_secret"):
_resolve_oauth_credentials(
request_client_id="totally-fresh-id",
request_client_id_changed=True,
request_client_secret=mask_string("another-long-secret"),
request_client_secret_changed=True,
existing_client=existing,
)
def test_changed_flag_with_short_mask_placeholder_is_rejected(self) -> None:
# mask_string returns "••••••••••••" for short inputs; verify both
# mask formats trip the safety net, not just the long form.
short_mask = mask_string("short")
existing = _make_existing_client()
with pytest.raises(ValueError, match="oauth_client_secret"):
_resolve_oauth_credentials(
request_client_id="something",
request_client_id_changed=True,
request_client_secret=short_mask,
request_client_secret_changed=True,
existing_client=existing,
)
def test_no_existing_client_passes_request_values_through(self) -> None:
# Create flow: nothing is stored yet; both flags are False (the default)
# but there's nothing to fall back to. The resolver should resolve to
# None for both fields, leaving the caller to handle the create path
# explicitly (which `_upsert_mcp_server` does by only invoking the
# resolver when an `existing_client` is present).
resolved_id, resolved_secret = _resolve_oauth_credentials(
request_client_id="user-typed-id",
request_client_id_changed=False,
request_client_secret="user-typed-secret",
request_client_secret_changed=False,
existing_client=None,
)
assert resolved_id is None
assert resolved_secret is None
def test_no_existing_client_with_changed_flags_uses_request_values(self) -> None:
resolved_id, resolved_secret = _resolve_oauth_credentials(
request_client_id="user-typed-id",
request_client_id_changed=True,
request_client_secret="user-typed-secret",
request_client_secret_changed=True,
existing_client=None,
)
assert resolved_id == "user-typed-id"
assert resolved_secret == "user-typed-secret"
class TestBuildOAuthAdminConfigData:
def test_no_client_id_returns_empty_headers_only(self) -> None:
config_data = _build_oauth_admin_config_data(
client_id=None,
client_secret=None,
)
assert config_data == {"headers": {}}
assert MCPOAuthKeys.CLIENT_INFO.value not in config_data
def test_public_client_with_no_secret_still_seeds_client_info(self) -> None:
# Regression for the original bug: a public client (id present, secret
# absent) used to fall through the gate and silently wipe the stored
# client_info on resubmit.
config_data = _build_oauth_admin_config_data(
client_id="public-client-id",
client_secret=None,
)
client_info_dict = config_data.get(MCPOAuthKeys.CLIENT_INFO.value)
assert client_info_dict is not None
assert client_info_dict["client_id"] == "public-client-id"
assert client_info_dict.get("client_secret") is None
assert client_info_dict["token_endpoint_auth_method"] == "none"
def test_confidential_client_uses_client_secret_post(self) -> None:
config_data = _build_oauth_admin_config_data(
client_id="confidential-id",
client_secret="confidential-secret",
)
client_info_dict = config_data.get(MCPOAuthKeys.CLIENT_INFO.value)
assert client_info_dict is not None
assert client_info_dict["client_id"] == "confidential-id"
assert client_info_dict["client_secret"] == "confidential-secret"
assert client_info_dict["token_endpoint_auth_method"] == "client_secret_post"

View File

@@ -1,70 +1,36 @@
"""Test bulk invite limit for free trial tenants."""
from collections.abc import Iterator
from contextlib import contextmanager
from unittest.mock import MagicMock
from unittest.mock import patch
import pytest
from fastapi import HTTPException
from onyx.error_handling.error_codes import OnyxErrorCode
from onyx.error_handling.exceptions import OnyxError
from onyx.server.manage.models import EmailInviteStatus
from onyx.server.manage.models import UserByEmail
from onyx.server.manage.users import bulk_invite_users
from onyx.server.manage.users import remove_invited_user
def _make_shared_session_mock(next_total: int) -> MagicMock:
"""Build a MagicMock mirroring `get_session_with_shared_schema`.
The production code does one `INSERT ... ON CONFLICT DO UPDATE ...
RETURNING total_invites_sent` call against the shared-schema session.
The mock plays the role of that session: calling the patched factory
returns a context manager whose `__enter__` yields a session whose
`.execute(...).scalar_one()` answers with `next_total` — the
post-increment counter value the DB would have returned.
"""
session = MagicMock()
session.execute.return_value.scalar_one.return_value = next_total
@contextmanager
def _ctx() -> Iterator[MagicMock]:
yield session
mock = MagicMock(side_effect=_ctx)
return mock
@patch(
"onyx.server.manage.users.get_session_with_shared_schema",
new_callable=lambda: _make_shared_session_mock(next_total=6),
)
@patch("onyx.server.manage.users.enforce_invite_rate_limit")
@patch("onyx.server.manage.users.MULTI_TENANT", True)
@patch("onyx.server.manage.users.is_tenant_on_trial_fn", return_value=True)
@patch("onyx.server.manage.users.get_current_tenant_id", return_value="test_tenant")
@patch("onyx.server.manage.users.get_invited_users", return_value=[])
@patch("onyx.server.manage.users.get_all_users", return_value=[])
@patch("onyx.server.manage.users.enforce_seat_limit")
@patch("onyx.server.manage.users.NUM_FREE_TRIAL_USER_INVITES", 5)
def test_trial_tenant_cannot_exceed_invite_limit(*_mocks: None) -> None:
"""Post-upsert total of 6 exceeds cap=5 — must raise OnyxError."""
"""Trial tenants cannot invite more users than the configured limit."""
emails = [f"user{i}@example.com" for i in range(6)]
with pytest.raises(OnyxError) as exc_info:
with pytest.raises(HTTPException) as exc_info:
bulk_invite_users(emails=emails, current_user=MagicMock())
assert exc_info.value.error_code == OnyxErrorCode.TRIAL_INVITE_LIMIT_EXCEEDED
assert exc_info.value.status_code == 403
assert "invite limit" in exc_info.value.detail.lower()
@patch(
"onyx.server.manage.users.get_session_with_shared_schema",
new_callable=lambda: _make_shared_session_mock(next_total=3),
)
@patch("onyx.server.manage.users.enforce_invite_rate_limit")
@patch("onyx.server.manage.users.get_redis_client")
@patch("onyx.server.manage.users.MULTI_TENANT", True)
@patch("onyx.server.manage.users.DEV_MODE", True)
@patch("onyx.server.manage.users.ENABLE_EMAIL_INVITES", False)
@@ -80,7 +46,7 @@ def test_trial_tenant_cannot_exceed_invite_limit(*_mocks: None) -> None:
return_value=lambda *_args: None,
)
def test_trial_tenant_can_invite_within_limit(*_mocks: None) -> None:
"""Post-upsert total of 3 fits under cap=5 — must succeed."""
"""Trial tenants can invite users when under the limit."""
emails = ["user1@example.com", "user2@example.com", "user3@example.com"]
result = bulk_invite_users(emails=emails, current_user=MagicMock())
@@ -89,41 +55,6 @@ def test_trial_tenant_can_invite_within_limit(*_mocks: None) -> None:
assert result.email_invite_status == EmailInviteStatus.DISABLED
@patch("onyx.server.manage.users.get_session_with_shared_schema")
@patch("onyx.server.manage.users.enforce_invite_rate_limit")
@patch("onyx.server.manage.users.MULTI_TENANT", True)
@patch("onyx.server.manage.users.DEV_MODE", True)
@patch("onyx.server.manage.users.ENABLE_EMAIL_INVITES", False)
@patch("onyx.server.manage.users.is_tenant_on_trial_fn", return_value=False)
@patch("onyx.server.manage.users.get_current_tenant_id", return_value="test_tenant")
@patch("onyx.server.manage.users.get_invited_users", return_value=[])
@patch("onyx.server.manage.users.get_all_users", return_value=[])
@patch("onyx.server.manage.users.write_invited_users", return_value=3)
@patch("onyx.server.manage.users.enforce_seat_limit")
@patch(
"onyx.server.manage.users.fetch_ee_implementation_or_noop",
return_value=lambda *_args: None,
)
def test_paid_tenant_bypasses_invite_counter(
_ee_fetch: MagicMock,
_seat_limit: MagicMock,
_write_invited: MagicMock,
_get_all_users: MagicMock,
_get_invited_users: MagicMock,
_get_tenant_id: MagicMock,
_is_trial: MagicMock,
_rate_limit: MagicMock,
mock_get_session: MagicMock,
) -> None:
"""Paid tenants must not read or write the invite counter at all."""
emails = [f"user{i}@example.com" for i in range(3)]
result = bulk_invite_users(emails=emails, current_user=MagicMock())
mock_get_session.assert_not_called()
assert result.invited_count == 3
# --- email_invite_status tests ---
_COMMON_PATCHES = [
@@ -189,7 +120,68 @@ def test_email_invite_status_send_failed(*_mocks: None) -> None:
assert result.invited_count == 1
# --- trial-only rate limit gating tests (remove-invited-user) ---
# --- trial-only rate limit gating tests ---
@patch("onyx.server.manage.users.enforce_invite_rate_limit")
@patch("onyx.server.manage.users.MULTI_TENANT", True)
@patch("onyx.server.manage.users.DEV_MODE", True)
@patch("onyx.server.manage.users.ENABLE_EMAIL_INVITES", False)
@patch("onyx.server.manage.users.is_tenant_on_trial_fn", return_value=False)
@patch("onyx.server.manage.users.get_current_tenant_id", return_value="test_tenant")
@patch("onyx.server.manage.users.get_invited_users", return_value=[])
@patch("onyx.server.manage.users.get_all_users", return_value=[])
@patch("onyx.server.manage.users.write_invited_users", return_value=3)
@patch("onyx.server.manage.users.enforce_seat_limit")
@patch(
"onyx.server.manage.users.fetch_ee_implementation_or_noop",
return_value=lambda *_args: None,
)
def test_paid_tenant_bypasses_invite_rate_limit(
_ee_fetch: MagicMock,
_seat_limit: MagicMock,
_write_invited: MagicMock,
_get_all_users: MagicMock,
_get_invited_users: MagicMock,
_get_tenant_id: MagicMock,
_is_trial: MagicMock,
mock_rate_limit: MagicMock,
) -> None:
"""Paid tenants must not hit the invite rate limiter at all."""
emails = [f"user{i}@example.com" for i in range(3)]
bulk_invite_users(emails=emails, current_user=MagicMock())
mock_rate_limit.assert_not_called()
@patch("onyx.server.manage.users.enforce_invite_rate_limit")
@patch("onyx.server.manage.users.MULTI_TENANT", True)
@patch("onyx.server.manage.users.DEV_MODE", True)
@patch("onyx.server.manage.users.ENABLE_EMAIL_INVITES", False)
@patch("onyx.server.manage.users.is_tenant_on_trial_fn", return_value=True)
@patch("onyx.server.manage.users.get_current_tenant_id", return_value="test_tenant")
@patch("onyx.server.manage.users.get_invited_users", return_value=[])
@patch("onyx.server.manage.users.get_all_users", return_value=[])
@patch("onyx.server.manage.users.write_invited_users", return_value=3)
@patch("onyx.server.manage.users.enforce_seat_limit")
@patch("onyx.server.manage.users.NUM_FREE_TRIAL_USER_INVITES", 50)
@patch(
"onyx.server.manage.users.fetch_ee_implementation_or_noop",
return_value=lambda *_args: None,
)
def test_trial_tenant_hits_invite_rate_limit(
_ee_fetch: MagicMock,
_seat_limit: MagicMock,
_write_invited: MagicMock,
_get_all_users: MagicMock,
_get_invited_users: MagicMock,
_get_tenant_id: MagicMock,
_is_trial: MagicMock,
mock_rate_limit: MagicMock,
) -> None:
"""Trial tenants must flow through the invite rate limiter."""
emails = [f"user{i}@example.com" for i in range(3)]
bulk_invite_users(emails=emails, current_user=MagicMock())
mock_rate_limit.assert_called_once()
@patch("onyx.server.manage.users.enforce_remove_invited_rate_limit")
@@ -220,7 +212,6 @@ def test_paid_tenant_bypasses_remove_invited_rate_limit(
@patch("onyx.server.manage.users.enforce_remove_invited_rate_limit")
@patch("onyx.server.manage.users.remove_user_from_invited_users", return_value=0)
@patch("onyx.server.manage.users.get_redis_client")
@patch("onyx.server.manage.users.MULTI_TENANT", True)
@patch("onyx.server.manage.users.DEV_MODE", True)
@patch("onyx.server.manage.users.is_tenant_on_trial_fn", return_value=True)
@@ -233,7 +224,6 @@ def test_trial_tenant_hits_remove_invited_rate_limit(
_ee_fetch: MagicMock,
_get_tenant_id: MagicMock,
_is_trial: MagicMock,
_get_redis: MagicMock,
_remove_from_invited: MagicMock,
mock_rate_limit: MagicMock,
) -> None:

View File

@@ -175,7 +175,7 @@ LOG_ONYX_MODEL_INTERACTIONS=False
## Gen AI Settings
# GEN_AI_MAX_TOKENS=
# LLM_SOCKET_READ_TIMEOUT=
LLM_SOCKET_READ_TIMEOUT=120
# MAX_CHUNKS_FED_TO_CHAT=
# DISABLE_LITELLM_STREAMING=
# LITELLM_EXTRA_HEADERS=

View File

@@ -1262,7 +1262,7 @@ configMap:
S3_FILE_STORE_BUCKET_NAME: ""
# Gen AI Settings
GEN_AI_MAX_TOKENS: ""
LLM_SOCKET_READ_TIMEOUT: "60"
LLM_SOCKET_READ_TIMEOUT: "120"
MAX_CHUNKS_FED_TO_CHAT: ""
# Query Options
DOC_TIME_DECAY: ""

View File

@@ -2950,9 +2950,9 @@ checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f"
[[package]]
name = "rustls-webpki"
version = "0.103.12"
version = "0.103.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8279bb85272c9f10811ae6a6c547ff594d6a7f3c6c6b02ee9726d1d0dcfcdd06"
checksum = "df33b2b81ac578cabaf06b89b0631153a3f416b0a886e8a7a1707fb51abbd1ef"
dependencies = [
"aws-lc-rs",
"ring",

View File

@@ -43,6 +43,7 @@ backend = [
"chardet==5.2.0",
"chonkie==1.0.10",
"dask==2026.1.1",
"ddtrace==3.10.0",
"discord.py==2.4.0",
"distributed==2026.1.1",
"fastapi-users==15.0.4",
@@ -74,7 +75,7 @@ backend = [
# backend/onyx/file_processing/extract_file_text.py and what impacts
# updating might have on this behavior.
"markitdown[pdf, docx, pptx, xlsx, xls]==0.1.2",
"mcp[cli]==1.27.0",
"mcp[cli]==1.26.0",
"msal==1.34.0",
"msoffcrypto-tool==5.4.2",
"Office365-REST-Python-Client==2.6.2",
@@ -99,8 +100,8 @@ backend = [
"pytest-mock==3.12.0",
"pytest-playwright==0.7.2",
"python-docx==1.1.2",
"python-dotenv==1.2.2",
"python-multipart==0.0.26",
"python-dotenv==1.1.1",
"python-multipart==0.0.22",
"pywikibot==9.0.0",
"redis==5.0.8",
"requests==2.33.0",

View File

@@ -1,210 +0,0 @@
package cmd
import (
"fmt"
"os"
"os/exec"
"path/filepath"
"strings"
"github.com/onyx-dot-app/onyx/tools/ods/internal/paths"
"github.com/spf13/cobra"
)
const (
defaultSkillSource = ".claude/skills/onyx-llm-context"
claudeSkillsDir = ".claude/skills"
claudeMDFile = ".claude/CLAUDE.md"
llmContextCloneURL = "https://github.com/onyx-dot-app/onyx-llm-context.git"
)
func NewInstallSkillCommand() *cobra.Command {
var (
source string
copyMode bool
cloneRepo bool
)
cmd := &cobra.Command{
Use: "install-skill",
Short: "Install onyx-llm-context skills for Claude Code",
Long: `Install skills from onyx-llm-context into Claude Code.
Enforced skills (enforced/) are added as @imports in .claude/CLAUDE.md (project-scoped, git-ignored).
Manual skills (skills/) are symlinked into ~/.claude/skills/ and invoked via /skill-name.
By default, looks for onyx-llm-context at ~/.claude/skills/onyx-llm-context.`,
Example: ` ods install-skill --clone
ods install-skill --source /path/to/onyx-llm-context
ods install-skill --copy`,
RunE: func(cmd *cobra.Command, args []string) error {
if source == "" {
home, err := os.UserHomeDir()
if err != nil {
return fmt.Errorf("could not determine home directory: %w", err)
}
source = filepath.Join(home, defaultSkillSource)
}
if _, err := os.Stat(source); os.IsNotExist(err) {
if !cloneRepo {
return fmt.Errorf("onyx-llm-context not found at %s\n Re-run with --clone to fetch it automatically", source)
}
_, _ = fmt.Fprintf(cmd.OutOrStdout(), "Cloning %s → %s\n", llmContextCloneURL, source)
gitCmd := exec.Command("git", "clone", llmContextCloneURL, source)
gitCmd.Stdout = cmd.OutOrStdout()
gitCmd.Stderr = cmd.ErrOrStderr()
if err := gitCmd.Run(); err != nil {
return fmt.Errorf("git clone failed: %w", err)
}
}
repoRoot, err := paths.GitRoot()
if err != nil {
return err
}
if err := installEnforcedSkills(cmd, source, repoRoot); err != nil {
return err
}
if err := installManualSkills(cmd, source, copyMode); err != nil {
return err
}
return nil
},
}
cmd.Flags().StringVar(&source, "source", "", "Path to onyx-llm-context (default: ~/.claude/skills/onyx-llm-context)")
cmd.Flags().BoolVar(&copyMode, "copy", false, "Copy files instead of symlinking")
cmd.Flags().BoolVar(&cloneRepo, "clone", false, fmt.Sprintf("Clone onyx-llm-context from %s if not already present", llmContextCloneURL))
return cmd
}
// installEnforcedSkills writes @imports for all enforced/ skills into .claude/CLAUDE.md at the repo root.
func installEnforcedSkills(cmd *cobra.Command, source, repoRoot string) error {
enforcedDir := filepath.Join(source, "enforced")
entries, err := os.ReadDir(enforcedDir)
if err != nil {
if os.IsNotExist(err) {
return nil
}
return fmt.Errorf("could not read %s: %w", enforcedDir, err)
}
var imports []string
for _, entry := range entries {
if !entry.IsDir() {
continue
}
skillFile := filepath.Join(enforcedDir, entry.Name(), "SKILL.md")
if _, err := os.Stat(skillFile); os.IsNotExist(err) {
continue
}
imports = append(imports, fmt.Sprintf("@%s", skillFile))
}
if len(imports) == 0 {
return nil
}
claudeDir := filepath.Join(repoRoot, ".claude")
destFile := filepath.Join(repoRoot, claudeMDFile)
if err := os.MkdirAll(claudeDir, 0o755); err != nil {
return fmt.Errorf("could not create .claude directory: %w", err)
}
content := strings.Join(imports, "\n") + "\n"
existing, err := os.ReadFile(destFile)
if err == nil && string(existing) == content {
_, _ = fmt.Fprintf(cmd.OutOrStdout(), "Up to date %s\n", destFile)
return nil
}
if err := os.WriteFile(destFile, []byte(content), 0o644); err != nil {
return fmt.Errorf("could not write %s: %w", destFile, err)
}
_, _ = fmt.Fprintf(cmd.OutOrStdout(), "Installed %s\n", destFile)
return nil
}
// installManualSkills symlinks each skills/ subdirectory into ~/.claude/skills/.
func installManualSkills(cmd *cobra.Command, source string, copyMode bool) error {
skillsDir := filepath.Join(source, "skills")
entries, err := os.ReadDir(skillsDir)
if err != nil {
if os.IsNotExist(err) {
return nil
}
return fmt.Errorf("could not read %s: %w", skillsDir, err)
}
home, err := os.UserHomeDir()
if err != nil {
return fmt.Errorf("could not determine home directory: %w", err)
}
claudeSkills := filepath.Join(home, claudeSkillsDir)
if err := os.MkdirAll(claudeSkills, 0o755); err != nil {
return fmt.Errorf("could not create %s: %w", claudeSkills, err)
}
for _, entry := range entries {
if !entry.IsDir() {
continue
}
srcDir := filepath.Join(skillsDir, entry.Name())
dstDir := filepath.Join(claudeSkills, entry.Name())
if copyMode {
if err := copySkill(srcDir, dstDir); err != nil {
return fmt.Errorf("could not copy %s: %w", entry.Name(), err)
}
_, _ = fmt.Fprintf(cmd.OutOrStdout(), "Copied %s\n", dstDir)
continue
}
if fi, err := os.Lstat(dstDir); err == nil {
if fi.Mode()&os.ModeSymlink != 0 {
_ = os.Remove(dstDir)
} else if err := os.RemoveAll(dstDir); err != nil {
return fmt.Errorf("could not remove existing %s: %w", dstDir, err)
}
}
rel, err := filepath.Rel(claudeSkills, srcDir)
if err != nil {
return fmt.Errorf("could not compute relative path for %s: %w", entry.Name(), err)
}
if err := os.Symlink(rel, dstDir); err != nil {
if copyErr := copySkill(srcDir, dstDir); copyErr != nil {
return fmt.Errorf("could not install %s: %w", entry.Name(), copyErr)
}
_, _ = fmt.Fprintf(cmd.OutOrStdout(), "Copied %s (symlink failed)\n", dstDir)
continue
}
_, _ = fmt.Fprintf(cmd.OutOrStdout(), "Linked %s -> %s\n", dstDir, rel)
}
return nil
}
func copySkill(srcDir, dstDir string) error {
return filepath.WalkDir(srcDir, func(path string, d os.DirEntry, err error) error {
if err != nil {
return err
}
rel, _ := filepath.Rel(srcDir, path)
dst := filepath.Join(dstDir, rel)
if d.IsDir() {
return os.MkdirAll(dst, 0o755)
}
content, err := os.ReadFile(path)
if err != nil {
return err
}
return os.WriteFile(dst, content, 0o644)
})
}

View File

@@ -58,7 +58,6 @@ func NewRootCommand() *cobra.Command {
cmd.AddCommand(NewLatestStableTagCommand())
cmd.AddCommand(NewWhoisCommand())
cmd.AddCommand(NewTraceCommand())
cmd.AddCommand(NewInstallSkillCommand())
return cmd
}

320
uv.lock generated
View File

@@ -746,6 +746,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/f5/10/56978295c14794b2c12007b07f3e41ba26acda9257457d7085b0bb3bb90c/brotli-1.2.0-cp314-cp314-win_amd64.whl", hash = "sha256:e7c0af964e0b4e3412a0ebf341ea26ec767fa0b4cf81abb5e897c9338b5ad6a3", size = 375639, upload-time = "2025-11-05T18:38:55.67Z" },
]
[[package]]
name = "bytecode"
version = "0.17.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/98/c4/4818b392104bd426171fc2ce9c79c8edb4019ba6505747626d0f7107766c/bytecode-0.17.0.tar.gz", hash = "sha256:0c37efa5bd158b1b873f530cceea2c645611d55bd2dc2a4758b09f185749b6fd", size = 105863, upload-time = "2025-09-03T19:55:45.703Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ce/80/379e685099841f8501a19fb58b496512ef432331fed38276c3938ab09d8e/bytecode-0.17.0-py3-none-any.whl", hash = "sha256:64fb10cde1db7ef5cc39bd414ecebd54ba3b40e1c4cf8121ca5e72f170916ff8", size = 43045, upload-time = "2025-09-03T19:55:43.879Z" },
]
[[package]]
name = "cachetools"
version = "6.2.2"
@@ -1377,6 +1386,54 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/87/22/f020c047ae1346613db9322638186468238bcfa8849b4668a22b97faad65/dateparser-1.2.2-py3-none-any.whl", hash = "sha256:5a5d7211a09013499867547023a2a0c91d5a27d15dd4dbcea676ea9fe66f2482", size = 315453, upload-time = "2025-06-26T09:29:21.412Z" },
]
[[package]]
name = "ddtrace"
version = "3.10.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "bytecode" },
{ name = "envier" },
{ name = "legacy-cgi", marker = "python_full_version >= '3.13'" },
{ name = "opentelemetry-api" },
{ name = "protobuf" },
{ name = "typing-extensions" },
{ name = "wrapt" },
{ name = "xmltodict" },
]
sdist = { url = "https://files.pythonhosted.org/packages/31/79/f0e5d00c401b9cbb771a6e6fdfc2ceaabcd384a6147fc55b6bebd4d26806/ddtrace-3.10.0.tar.gz", hash = "sha256:82a412a4320404f4d8dc1eea7a871cf9a55392685ac5e9d7fe178dc5c40e8b5c", size = 6731269, upload-time = "2025-07-03T19:56:26.419Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/89/d8/3330339982318ba7d313c40f76bda5db6adc9a046704823e8d5c7f98b06c/ddtrace-3.10.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:4704b7af76c93ae5616bc7ce225c8dc56b1b2cb78c78f64c952392f9ef920a81", size = 6885135, upload-time = "2025-07-03T19:54:29.312Z" },
{ url = "https://files.pythonhosted.org/packages/40/b5/03b5ead62875d19cb0da4bd6dd5d97b74f8320687f8e4dec59058592531c/ddtrace-3.10.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:9ab54d3d5b84d1ac2e570efdeef0dfa15add46a674bb034f8797ae9224280afa", size = 7211906, upload-time = "2025-07-03T19:54:30.826Z" },
{ url = "https://files.pythonhosted.org/packages/fb/22/cb74c4f4b56b8fc3178c62abe21b3c01aacd8caf0d3889124c6cfdc19f4f/ddtrace-3.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c843bef47601cfca1f123d57c26a4e287906ae2fda23c55c42ed0fffdd96e9e7", size = 6239310, upload-time = "2025-07-03T19:54:32.472Z" },
{ url = "https://files.pythonhosted.org/packages/0d/35/1370fd43244aff881d9a8709ffc8d82e665d1a2fc8a052926e140fc2d7c7/ddtrace-3.10.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425062d369b709083836b37c64fa4a982da5365b7604cd714d4eca1fa5eb008d", size = 2966051, upload-time = "2025-07-03T19:54:34.459Z" },
{ url = "https://files.pythonhosted.org/packages/07/4e/3a28b2acc5d145a98666303abc654b6fb89816ee7c880f8205742108389f/ddtrace-3.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e07bccea09006e1202f9e6398ac5641888308bbecc5e09b80190d01f48853086", size = 6573290, upload-time = "2025-07-03T19:54:36.077Z" },
{ url = "https://files.pythonhosted.org/packages/cf/ef/b9b29ffb3dbd6a0123b4c8648479a28b77ac8242b3f7a070016db9c94623/ddtrace-3.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0cfa3861dc0c7e80230b8f26dcf2656e2e613eb179dc947516657de437318026", size = 7188225, upload-time = "2025-07-03T19:54:38.178Z" },
{ url = "https://files.pythonhosted.org/packages/1c/ed/ae1b36a3fcf71afcde867496c4ed34053bcc18ae4eaa94bec4cd5db27a34/ddtrace-3.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1fad7e92a43ea8abcafd3a332610d157ed24d614aba2deab1af026c13b0e4b84", size = 4118801, upload-time = "2025-07-03T19:54:40.472Z" },
{ url = "https://files.pythonhosted.org/packages/f7/d7/67073f3d8b01fe359d77779583c0db8c6a57564aa2118fe37f8d381372f2/ddtrace-3.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9d3a49c5247397fbd37bffb674aefa934b238a4aa36e5cd200917a435a5a606d", size = 7611449, upload-time = "2025-07-03T19:54:42.093Z" },
{ url = "https://files.pythonhosted.org/packages/3c/d3/74f7b204a8f19dcee0a94aebaeafc28e0e0cf25b9093be8c17b20d9e3226/ddtrace-3.10.0-cp311-cp311-win32.whl", hash = "sha256:4be49a04407977e5be7d2323d7a8762ad65bb0a0c242841d292ec447fe9d3c20", size = 5733173, upload-time = "2025-07-03T19:54:43.961Z" },
{ url = "https://files.pythonhosted.org/packages/f5/25/2f48e7395dce16d2f8960150811ad78e4e8e8acdce921253474e8d4ede2d/ddtrace-3.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:8b2d8a10e494a4cdb6bb4a41a384f561413f72a9979744ebe2f89dd2af82fd48", size = 6551512, upload-time = "2025-07-03T19:54:45.739Z" },
{ url = "https://files.pythonhosted.org/packages/d4/d7/157f61f354069f8d96223961ddd5c08f6e190efab5e6d22e01b03cce330d/ddtrace-3.10.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:acc084e43e42a7c780e37bca50c57c212f0bc42d9667647d9b871a0c63916c31", size = 6877273, upload-time = "2025-07-03T19:54:47.698Z" },
{ url = "https://files.pythonhosted.org/packages/6b/44/0949b7bc23656772953788623c4ad93793f4e14535c2cd3ae4a253e6bfec/ddtrace-3.10.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:f4a3d18d3a44594cb8a40e9a369e142fc52f544bc01df7a0a627d41238238cbb", size = 7205992, upload-time = "2025-07-03T19:54:49.538Z" },
{ url = "https://files.pythonhosted.org/packages/0a/34/b1ae07406176586c20c0cb8740de32449685830fa16ffec9b960405bb618/ddtrace-3.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e1e2cb1c8f18bbc58880148f9250c828bd5fd36f996b51ee05d06c29c76ac67", size = 6223804, upload-time = "2025-07-03T19:54:51.693Z" },
{ url = "https://files.pythonhosted.org/packages/dd/c6/425f551f3d751075010afe6aaa253a7f42a8f599950c74a60d61584e4fb0/ddtrace-3.10.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:601bb4a45abc6515335e2e73db0a7361158d8801cc31db78f791b7c536ae8b97", size = 2950106, upload-time = "2025-07-03T19:54:53.519Z" },
{ url = "https://files.pythonhosted.org/packages/1c/68/9c4010e4dc870dfb3c692a03557ff73bca3b9110044878945802a053769c/ddtrace-3.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c400f67a2831f7a52f30046f16a1898e7236798115617bbdf8662cd1ae08bea1", size = 6559763, upload-time = "2025-07-03T19:54:55.353Z" },
{ url = "https://files.pythonhosted.org/packages/38/62/9deb677e3ff92757d0fa8163baec37d5a4bfb3489a66625259ea6e877930/ddtrace-3.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8b10f392a332fecbaf3f5058fb1a932eb6199aee5aa49e2d41a5b35cf4f28c88", size = 7168092, upload-time = "2025-07-03T19:54:57.336Z" },
{ url = "https://files.pythonhosted.org/packages/ba/20/057849336b4af39d903ca89d908d02a24305e1aecea45db28b91ed93f45c/ddtrace-3.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:af2544451bd2fc59a5d5caf2a032366fcd6034da1ff3f20fccca5ade5be254de", size = 4096999, upload-time = "2025-07-03T19:54:59.515Z" },
{ url = "https://files.pythonhosted.org/packages/ff/06/38d3c22c2fe3b1b45792a7e64f96bb38ec459e8f1c8ee1e0b196eff352e5/ddtrace-3.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:22be32fe541212ab912ad21971241b979b96174c225c4950299dd3889822d791", size = 7598395, upload-time = "2025-07-03T19:55:01.549Z" },
{ url = "https://files.pythonhosted.org/packages/f7/2d/33f5523c794d4762938ee4ccf9f876b480632da67b5f6daa80f89a66d7ed/ddtrace-3.10.0-cp312-cp312-win32.whl", hash = "sha256:2fe703848a68c4314200dd4bbc7a6861c7b664700c319b39566516d3eca71688", size = 5728447, upload-time = "2025-07-03T19:55:03.564Z" },
{ url = "https://files.pythonhosted.org/packages/84/a5/f68d322f43b18baafd70892aed2f1e0ece63cfd048d53b4edb865b2467b5/ddtrace-3.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:dc4b2bb321fe1589338e8676f4971bd276b8a2eae62774c03efe7c1e61534f92", size = 6546168, upload-time = "2025-07-03T19:55:05.564Z" },
{ url = "https://files.pythonhosted.org/packages/a4/df/7a2528558e55a1a119d4c19021454f70022349fb6c145eeea65b1dc992eb/ddtrace-3.10.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:2c3db0eb18e476115087cedbbc787d2c8fae9a1353b600ef8d7ec2cf44c9b62f", size = 6868784, upload-time = "2025-07-03T19:55:07.879Z" },
{ url = "https://files.pythonhosted.org/packages/5a/21/88a9ab6924431cc1657fd3e363131c5e9dd6e13f4d669a50ab8c4c31cc66/ddtrace-3.10.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:1a45e0d226dd78066868e71ab4f1b4688aeec4b8a482fb495ccfaafbfa11de87", size = 7198483, upload-time = "2025-07-03T19:55:10.468Z" },
{ url = "https://files.pythonhosted.org/packages/37/4e/6fec0110bb37a306052797512e9a080baef5043d08657402e2ac5331d0b8/ddtrace-3.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:244157a6db87efcf81dfaf319a83dfaf9afd41a5cbcdd3388a86b8537fe75cda", size = 6217912, upload-time = "2025-07-03T19:55:13.143Z" },
{ url = "https://files.pythonhosted.org/packages/9c/e6/ba3afc112099ea4d7a78fbca124f401db569e7dd7a8967b646b4f761602e/ddtrace-3.10.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b4a59a1a2ab35a0efa58be904d8a96b505ec2e67f0db7d2856715bff1189220", size = 2943999, upload-time = "2025-07-03T19:55:15.175Z" },
{ url = "https://files.pythonhosted.org/packages/06/c1/541160c7b89188acc941e2d26a086769b4ee4c437a422b20ec1646602059/ddtrace-3.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167e519c55a12a0bce8964a6d58221432f29f039dbe537092af78b2c0640205f", size = 6552727, upload-time = "2025-07-03T19:55:17.303Z" },
{ url = "https://files.pythonhosted.org/packages/1e/06/b1a9b3eb6a1333dc3c405ac90252091ae2822e2979a175d836ce33e6ad58/ddtrace-3.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ffe9fff5364531ecbdae1f54992b8c05abac3a09cde4b0e4a7c6213ea6e1b89c", size = 7162545, upload-time = "2025-07-03T19:55:20.542Z" },
{ url = "https://files.pythonhosted.org/packages/ae/ec/41dbdd788e19325019af392286e94974e7c2f71848e26da755585bc9644e/ddtrace-3.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fe6439351b94cca8d5422a73ffc5db60f198c1b45c7a485bfba157cb53032a6b", size = 4093055, upload-time = "2025-07-03T19:55:23.04Z" },
{ url = "https://files.pythonhosted.org/packages/57/f6/3489c28c1ea009a31ba5137aa1daa62da313b516c41d03adbf13c02c1943/ddtrace-3.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f9a5d25107ce5364d8747e999bf0ecc9a73b51d9f95a9d8a32d728bf1008ba8b", size = 7592729, upload-time = "2025-07-03T19:55:25.16Z" },
{ url = "https://files.pythonhosted.org/packages/6a/31/eee2515cdd52c9a933fe2e1f329ba9700a14a35adcd67417e25472104a97/ddtrace-3.10.0-cp313-cp313-win32.whl", hash = "sha256:8cb6cd3edd2ccacb79ce33b7588591ea138fab9f5299b6c7340f6512658056da", size = 5724955, upload-time = "2025-07-03T19:55:29.36Z" },
{ url = "https://files.pythonhosted.org/packages/73/8f/ef14c53296fdb91cef93dd18b7a9ec0c9965ea975711cd75893639b37e2b/ddtrace-3.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:288b61ad03eae5ac23bcea298158bfdf4547dce2ff24c98b5f24e91d99114d8a", size = 6542769, upload-time = "2025-07-03T19:55:31.556Z" },
]
[[package]]
name = "debugpy"
version = "1.8.17"
@@ -1569,6 +1626,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e1/5e/4b5aaaabddfacfe36ba7768817bd1f71a7a810a43705e531f3ae4c690767/emoji-2.15.0-py3-none-any.whl", hash = "sha256:205296793d66a89d88af4688fa57fd6496732eb48917a87175a023c8138995eb", size = 608433, upload-time = "2025-09-21T12:13:01.197Z" },
]
[[package]]
name = "envier"
version = "0.6.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/19/e7/4fe4d3f6e21213cea9bcddc36ba60e6ae4003035f9ce8055e6a9f0322ddb/envier-0.6.1.tar.gz", hash = "sha256:3309a01bb3d8850c9e7a31a5166d5a836846db2faecb79b9cb32654dd50ca9f9", size = 10063, upload-time = "2024-10-22T09:56:47.226Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/56/e9/30493b1cc967f7c07869de4b2ab3929151a58e6bb04495015554d24b61db/envier-0.6.1-py3-none-any.whl", hash = "sha256:73609040a76be48bbcb97074d9969666484aa0de706183a6e9ef773156a8a6a9", size = 10638, upload-time = "2024-10-22T09:56:45.968Z" },
]
[[package]]
name = "et-xmlfile"
version = "2.0.0"
@@ -3286,6 +3352,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/88/37/c84d7fec58dec38564574dec8f94bb1db788598fe397f116a9d6a86d3055/lazy_imports-1.0.1-py3-none-any.whl", hash = "sha256:eb5accc33bf9987e5197e79476bbeb960b74a2c16619bdf41281b3240f730846", size = 18896, upload-time = "2025-08-09T07:15:53.7Z" },
]
[[package]]
name = "legacy-cgi"
version = "2.6.4"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/f4/9c/91c7d2c5ebbdf0a1a510bfa0ddeaa2fbb5b78677df5ac0a0aa51cf7125b0/legacy_cgi-2.6.4.tar.gz", hash = "sha256:abb9dfc7835772f7c9317977c63253fd22a7484b5c9bbcdca60a29dcce97c577", size = 24603, upload-time = "2025-10-27T05:20:05.395Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8c/7e/e7394eeb49a41cc514b3eb49020223666cbf40d86f5721c2f07871e6d84a/legacy_cgi-2.6.4-py3-none-any.whl", hash = "sha256:7e235ce58bf1e25d1fc9b2d299015e4e2cd37305eccafec1e6bac3fc04b878cd", size = 20035, upload-time = "2025-10-27T05:20:04.289Z" },
]
[[package]]
name = "litellm"
version = "1.81.6"
@@ -3697,7 +3772,7 @@ wheels = [
[[package]]
name = "mcp"
version = "1.27.0"
version = "1.26.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "anyio" },
@@ -3715,9 +3790,9 @@ dependencies = [
{ name = "typing-inspection" },
{ name = "uvicorn", marker = "sys_platform != 'emscripten'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/8b/eb/c0cfc62075dc6e1ec1c64d352ae09ac051d9334311ed226f1f425312848a/mcp-1.27.0.tar.gz", hash = "sha256:d3dc35a7eec0d458c1da4976a48f982097ddaab87e278c5511d5a4a56e852b83", size = 607509, upload-time = "2026-04-02T14:48:08.88Z" }
sdist = { url = "https://files.pythonhosted.org/packages/fc/6d/62e76bbb8144d6ed86e202b5edd8a4cb631e7c8130f3f4893c3f90262b10/mcp-1.26.0.tar.gz", hash = "sha256:db6e2ef491eecc1a0d93711a76f28dec2e05999f93afd48795da1c1137142c66", size = 608005, upload-time = "2026-01-24T19:40:32.468Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9c/46/f6b4ad632c67ef35209a66127e4bddc95759649dd595f71f13fba11bdf9a/mcp-1.27.0-py3-none-any.whl", hash = "sha256:5ce1fa81614958e267b21fb2aa34e0aea8e2c6ede60d52aba45fd47246b4d741", size = 215967, upload-time = "2026-04-02T14:48:07.24Z" },
{ url = "https://files.pythonhosted.org/packages/fd/d9/eaa1f80170d2b7c5ba23f3b59f766f3a0bb41155fbc32a69adfa1adaaef9/mcp-1.26.0-py3-none-any.whl", hash = "sha256:904a21c33c25aa98ddbeb47273033c435e595bbacfdb177f4bd87f6dceebe1ca", size = 233615, upload-time = "2026-01-24T19:40:30.652Z" },
]
[package.optional-dependencies]
@@ -4381,6 +4456,7 @@ backend = [
{ name = "chardet" },
{ name = "chonkie" },
{ name = "dask" },
{ name = "ddtrace" },
{ name = "discord-py" },
{ name = "distributed" },
{ name = "dropbox" },
@@ -4553,6 +4629,7 @@ backend = [
{ name = "chardet", specifier = "==5.2.0" },
{ name = "chonkie", specifier = "==1.0.10" },
{ name = "dask", specifier = "==2026.1.1" },
{ name = "ddtrace", specifier = "==3.10.0" },
{ name = "discord-py", specifier = "==2.4.0" },
{ name = "distributed", specifier = "==2026.1.1" },
{ name = "dropbox", specifier = "==12.0.2" },
@@ -4580,7 +4657,7 @@ backend = [
{ name = "lxml", specifier = "==5.3.0" },
{ name = "mako", specifier = "==1.3.11" },
{ name = "markitdown", extras = ["pdf", "docx", "pptx", "xlsx", "xls"], specifier = "==0.1.2" },
{ name = "mcp", extras = ["cli"], specifier = "==1.27.0" },
{ name = "mcp", extras = ["cli"], specifier = "==1.26.0" },
{ name = "mistune", specifier = "==3.2.0" },
{ name = "msal", specifier = "==1.34.0" },
{ name = "msoffcrypto-tool", specifier = "==5.4.2" },
@@ -4606,9 +4683,9 @@ backend = [
{ name = "pytest-playwright", specifier = "==0.7.2" },
{ name = "python-dateutil", specifier = "==2.8.2" },
{ name = "python-docx", specifier = "==1.1.2" },
{ name = "python-dotenv", specifier = "==1.2.2" },
{ name = "python-dotenv", specifier = "==1.1.1" },
{ name = "python-gitlab", specifier = "==5.6.0" },
{ name = "python-multipart", specifier = "==0.0.26" },
{ name = "python-multipart", specifier = "==0.0.22" },
{ name = "python-pptx", specifier = "==0.6.23" },
{ name = "python3-saml", specifier = "==1.15.0" },
{ name = "pywikibot", specifier = "==9.0.0" },
@@ -6024,11 +6101,11 @@ wheels = [
[[package]]
name = "python-dotenv"
version = "1.2.2"
version = "1.1.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/82/ed/0301aeeac3e5353ef3d94b6ec08bbcabd04a72018415dcb29e588514bba8/python_dotenv-1.2.2.tar.gz", hash = "sha256:2c371a91fbd7ba082c2c1dc1f8bf89ca22564a087c2c287cd9b662adde799cf3", size = 50135, upload-time = "2026-03-01T16:00:26.196Z" }
sdist = { url = "https://files.pythonhosted.org/packages/f6/b0/4bc07ccd3572a2f9df7e6782f52b0c6c90dcbb803ac4a167702d7d0dfe1e/python_dotenv-1.1.1.tar.gz", hash = "sha256:a8a6399716257f45be6a007360200409fce5cda2661e3dec71d23dc15f6189ab", size = 41978, upload-time = "2025-06-24T04:21:07.341Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0b/d7/1959b9648791274998a9c3526f6d0ec8fd2233e4d4acce81bbae76b44b2a/python_dotenv-1.2.2-py3-none-any.whl", hash = "sha256:1d8214789a24de455a8b8bd8ae6fe3c6b69a5e3d64aa8a8e5d68e694bbcb285a", size = 22101, upload-time = "2026-03-01T16:00:25.09Z" },
{ url = "https://files.pythonhosted.org/packages/5f/ed/539768cf28c661b5b068d66d96a2f155c4971a5d55684a514c1a0e0dec2f/python_dotenv-1.1.1-py3-none-any.whl", hash = "sha256:31f23644fe2602f88ff55e1f5c79ba497e01224ee7737937930c448e4d0e24dc", size = 20556, upload-time = "2025-06-24T04:21:06.073Z" },
]
[[package]]
@@ -6073,11 +6150,11 @@ wheels = [
[[package]]
name = "python-multipart"
version = "0.0.26"
version = "0.0.22"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/88/71/b145a380824a960ebd60e1014256dbb7d2253f2316ff2d73dfd8928ec2c3/python_multipart-0.0.26.tar.gz", hash = "sha256:08fadc45918cd615e26846437f50c5d6d23304da32c341f289a617127b081f17", size = 43501, upload-time = "2026-04-10T14:09:59.473Z" }
sdist = { url = "https://files.pythonhosted.org/packages/94/01/979e98d542a70714b0cb2b6728ed0b7c46792b695e3eaec3e20711271ca3/python_multipart-0.0.22.tar.gz", hash = "sha256:7340bef99a7e0032613f56dc36027b959fd3b30a787ed62d310e951f7c3a3a58", size = 37612, upload-time = "2026-01-25T10:15:56.219Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9a/22/f1925cdda983ab66fc8ec6ec8014b959262747e58bdca26a4e3d1da29d56/python_multipart-0.0.26-py3-none-any.whl", hash = "sha256:c0b169f8c4484c13b0dcf2ef0ec3a4adb255c4b7d18d8e420477d2b1dd03f185", size = 28847, upload-time = "2026-04-10T14:09:58.131Z" },
{ url = "https://files.pythonhosted.org/packages/1b/d0/397f9626e711ff749a95d96b7af99b9c566a9bb5129b8e4c10fc4d100304/python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155", size = 24579, upload-time = "2026-01-25T10:15:54.811Z" },
]
[[package]]
@@ -8185,6 +8262,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e3/3f/75e69fa9d2084524ca4e796442d8058a78d78c64c1e8229d552c031a23b4/xmlsec-1.3.14-cp312-cp312-win_amd64.whl", hash = "sha256:d0762f4232bce2c7f6c0af329db8b821b4460bbe123a2528fb5677d03db7a4b5", size = 2441942, upload-time = "2024-04-17T19:34:10.416Z" },
]
[[package]]
name = "xmltodict"
version = "1.0.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/6a/aa/917ceeed4dbb80d2f04dbd0c784b7ee7bba8ae5a54837ef0e5e062cd3cfb/xmltodict-1.0.2.tar.gz", hash = "sha256:54306780b7c2175a3967cad1db92f218207e5bc1aba697d887807c0fb68b7649", size = 25725, upload-time = "2025-09-17T21:59:26.459Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c0/20/69a0e6058bc5ea74892d089d64dfc3a62ba78917ec5e2cfa70f7c92ba3a5/xmltodict-1.0.2-py3-none-any.whl", hash = "sha256:62d0fddb0dcbc9f642745d8bbf4d81fd17d6dfaec5a15b5c1876300aad92af0d", size = 13893, upload-time = "2025-09-17T21:59:24.859Z" },
]
[[package]]
name = "xxhash"
version = "3.6.0"
@@ -8290,124 +8376,112 @@ wheels = [
[[package]]
name = "yarl"
version = "1.23.0"
version = "1.22.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "idna" },
{ name = "multidict" },
{ name = "propcache" },
]
sdist = { url = "https://files.pythonhosted.org/packages/23/6e/beb1beec874a72f23815c1434518bfc4ed2175065173fb138c3705f658d4/yarl-1.23.0.tar.gz", hash = "sha256:53b1ea6ca88ebd4420379c330aea57e258408dd0df9af0992e5de2078dc9f5d5", size = 194676, upload-time = "2026-03-01T22:07:53.373Z" }
sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169, upload-time = "2025-10-06T14:12:55.963Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a2/aa/60da938b8f0997ba3a911263c40d82b6f645a67902a490b46f3355e10fae/yarl-1.23.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b35d13d549077713e4414f927cdc388d62e543987c572baee613bf82f11a4b99", size = 123641, upload-time = "2026-03-01T22:04:42.841Z" },
{ url = "https://files.pythonhosted.org/packages/24/84/e237607faf4e099dbb8a4f511cfd5efcb5f75918baad200ff7380635631b/yarl-1.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cbb0fef01f0c6b38cb0f39b1f78fc90b807e0e3c86a7ff3ce74ad77ce5c7880c", size = 86248, upload-time = "2026-03-01T22:04:44.757Z" },
{ url = "https://files.pythonhosted.org/packages/b2/0d/71ceabc14c146ba8ee3804ca7b3d42b1664c8440439de5214d366fec7d3a/yarl-1.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dc52310451fc7c629e13c4e061cbe2dd01684d91f2f8ee2821b083c58bd72432", size = 85988, upload-time = "2026-03-01T22:04:46.365Z" },
{ url = "https://files.pythonhosted.org/packages/8c/6c/4a90d59c572e46b270ca132aca66954f1175abd691f74c1ef4c6711828e2/yarl-1.23.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b2c6b50c7b0464165472b56b42d4c76a7b864597007d9c085e8b63e185cf4a7a", size = 100566, upload-time = "2026-03-01T22:04:47.639Z" },
{ url = "https://files.pythonhosted.org/packages/49/fb/c438fb5108047e629f6282a371e6e91cf3f97ee087c4fb748a1f32ceef55/yarl-1.23.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:aafe5dcfda86c8af00386d7781d4c2181b5011b7be3f2add5e99899ea925df05", size = 92079, upload-time = "2026-03-01T22:04:48.925Z" },
{ url = "https://files.pythonhosted.org/packages/d9/13/d269aa1aed3e4f50a5a103f96327210cc5fa5dd2d50882778f13c7a14606/yarl-1.23.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9ee33b875f0b390564c1fb7bc528abf18c8ee6073b201c6ae8524aca778e2d83", size = 108741, upload-time = "2026-03-01T22:04:50.838Z" },
{ url = "https://files.pythonhosted.org/packages/85/fb/115b16f22c37ea4437d323e472945bea97301c8ec6089868fa560abab590/yarl-1.23.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4c41e021bc6d7affb3364dc1e1e5fa9582b470f283748784bd6ea0558f87f42c", size = 108099, upload-time = "2026-03-01T22:04:52.499Z" },
{ url = "https://files.pythonhosted.org/packages/9a/64/c53487d9f4968045b8afa51aed7ca44f58b2589e772f32745f3744476c82/yarl-1.23.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:99c8a9ed30f4164bc4c14b37a90208836cbf50d4ce2a57c71d0f52c7fb4f7598", size = 102678, upload-time = "2026-03-01T22:04:55.176Z" },
{ url = "https://files.pythonhosted.org/packages/85/59/cd98e556fbb2bf8fab29c1a722f67ad45c5f3447cac798ab85620d1e70af/yarl-1.23.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f2af5c81a1f124609d5f33507082fc3f739959d4719b56877ab1ee7e7b3d602b", size = 100803, upload-time = "2026-03-01T22:04:56.588Z" },
{ url = "https://files.pythonhosted.org/packages/9e/c0/b39770b56d4a9f0bb5f77e2f1763cd2d75cc2f6c0131e3b4c360348fcd65/yarl-1.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6b41389c19b07c760c7e427a3462e8ab83c4bb087d127f0e854c706ce1b9215c", size = 100163, upload-time = "2026-03-01T22:04:58.492Z" },
{ url = "https://files.pythonhosted.org/packages/e7/64/6980f99ab00e1f0ff67cb84766c93d595b067eed07439cfccfc8fb28c1a6/yarl-1.23.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:1dc702e42d0684f42d6519c8d581e49c96cefaaab16691f03566d30658ee8788", size = 93859, upload-time = "2026-03-01T22:05:00.268Z" },
{ url = "https://files.pythonhosted.org/packages/38/69/912e6c5e146793e5d4b5fe39ff5b00f4d22463dfd5a162bec565ac757673/yarl-1.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0e40111274f340d32ebcc0a5668d54d2b552a6cca84c9475859d364b380e3222", size = 108202, upload-time = "2026-03-01T22:05:02.273Z" },
{ url = "https://files.pythonhosted.org/packages/59/97/35ca6767524687ad64e5f5c31ad54bc76d585585a9fcb40f649e7e82ffed/yarl-1.23.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:4764a6a7588561a9aef92f65bda2c4fb58fe7c675c0883862e6df97559de0bfb", size = 99866, upload-time = "2026-03-01T22:05:03.597Z" },
{ url = "https://files.pythonhosted.org/packages/d3/1c/1a3387ee6d73589f6f2a220ae06f2984f6c20b40c734989b0a44f5987308/yarl-1.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:03214408cfa590df47728b84c679ae4ef00be2428e11630277be0727eba2d7cc", size = 107852, upload-time = "2026-03-01T22:05:04.986Z" },
{ url = "https://files.pythonhosted.org/packages/a4/b8/35c0750fcd5a3f781058bfd954515dd4b1eab45e218cbb85cf11132215f1/yarl-1.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:170e26584b060879e29fac213e4228ef063f39128723807a312e5c7fec28eff2", size = 102919, upload-time = "2026-03-01T22:05:06.397Z" },
{ url = "https://files.pythonhosted.org/packages/e5/1c/9a1979aec4a81896d597bcb2177827f2dbee3f5b7cc48b2d0dadb644b41d/yarl-1.23.0-cp311-cp311-win32.whl", hash = "sha256:51430653db848d258336cfa0244427b17d12db63d42603a55f0d4546f50f25b5", size = 82602, upload-time = "2026-03-01T22:05:08.444Z" },
{ url = "https://files.pythonhosted.org/packages/93/22/b85eca6fa2ad9491af48c973e4c8cf6b103a73dbb271fe3346949449fca0/yarl-1.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf49a3ae946a87083ef3a34c8f677ae4243f5b824bfc4c69672e72b3d6719d46", size = 87461, upload-time = "2026-03-01T22:05:10.145Z" },
{ url = "https://files.pythonhosted.org/packages/93/95/07e3553fe6f113e6864a20bdc53a78113cda3b9ced8784ee52a52c9f80d8/yarl-1.23.0-cp311-cp311-win_arm64.whl", hash = "sha256:b39cb32a6582750b6cc77bfb3c49c0f8760dc18dc96ec9fb55fbb0f04e08b928", size = 82336, upload-time = "2026-03-01T22:05:11.554Z" },
{ url = "https://files.pythonhosted.org/packages/88/8a/94615bc31022f711add374097ad4144d569e95ff3c38d39215d07ac153a0/yarl-1.23.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1932b6b8bba8d0160a9d1078aae5838a66039e8832d41d2992daa9a3a08f7860", size = 124737, upload-time = "2026-03-01T22:05:12.897Z" },
{ url = "https://files.pythonhosted.org/packages/e3/6f/c6554045d59d64052698add01226bc867b52fe4a12373415d7991fdca95d/yarl-1.23.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:411225bae281f114067578891bc75534cfb3d92a3b4dfef7a6ca78ba354e6069", size = 87029, upload-time = "2026-03-01T22:05:14.376Z" },
{ url = "https://files.pythonhosted.org/packages/19/2a/725ecc166d53438bc88f76822ed4b1e3b10756e790bafd7b523fe97c322d/yarl-1.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13a563739ae600a631c36ce096615fe307f131344588b0bc0daec108cdb47b25", size = 86310, upload-time = "2026-03-01T22:05:15.71Z" },
{ url = "https://files.pythonhosted.org/packages/99/30/58260ed98e6ff7f90ba84442c1ddd758c9170d70327394a6227b310cd60f/yarl-1.23.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cbf44c5cb4a7633d078788e1b56387e3d3cf2b8139a3be38040b22d6c3221c8", size = 97587, upload-time = "2026-03-01T22:05:17.384Z" },
{ url = "https://files.pythonhosted.org/packages/76/0a/8b08aac08b50682e65759f7f8dde98ae8168f72487e7357a5d684c581ef9/yarl-1.23.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53ad387048f6f09a8969631e4de3f1bf70c50e93545d64af4f751b2498755072", size = 92528, upload-time = "2026-03-01T22:05:18.804Z" },
{ url = "https://files.pythonhosted.org/packages/52/07/0b7179101fe5f8385ec6c6bb5d0cb9f76bd9fb4a769591ab6fb5cdbfc69a/yarl-1.23.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4a59ba56f340334766f3a4442e0efd0af895fae9e2b204741ef885c446b3a1a8", size = 105339, upload-time = "2026-03-01T22:05:20.235Z" },
{ url = "https://files.pythonhosted.org/packages/d3/8a/36d82869ab5ec829ca8574dfcb92b51286fcfb1e9c7a73659616362dc880/yarl-1.23.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:803a3c3ce4acc62eaf01eaca1208dcf0783025ef27572c3336502b9c232005e7", size = 105061, upload-time = "2026-03-01T22:05:22.268Z" },
{ url = "https://files.pythonhosted.org/packages/66/3e/868e5c3364b6cee19ff3e1a122194fa4ce51def02c61023970442162859e/yarl-1.23.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3d2bff8f37f8d0f96c7ec554d16945050d54462d6e95414babaa18bfafc7f51", size = 100132, upload-time = "2026-03-01T22:05:23.638Z" },
{ url = "https://files.pythonhosted.org/packages/cf/26/9c89acf82f08a52cb52d6d39454f8d18af15f9d386a23795389d1d423823/yarl-1.23.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c75eb09e8d55bceb4367e83496ff8ef2bc7ea6960efb38e978e8073ea59ecb67", size = 99289, upload-time = "2026-03-01T22:05:25.749Z" },
{ url = "https://files.pythonhosted.org/packages/6f/54/5b0db00d2cb056922356104468019c0a132e89c8d3ab67d8ede9f4483d2a/yarl-1.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877b0738624280e34c55680d6054a307aa94f7d52fa0e3034a9cc6e790871da7", size = 96950, upload-time = "2026-03-01T22:05:27.318Z" },
{ url = "https://files.pythonhosted.org/packages/f6/40/10fa93811fd439341fad7e0718a86aca0de9548023bbb403668d6555acab/yarl-1.23.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b5405bb8f0e783a988172993cfc627e4d9d00432d6bbac65a923041edacf997d", size = 93960, upload-time = "2026-03-01T22:05:28.738Z" },
{ url = "https://files.pythonhosted.org/packages/bc/d2/8ae2e6cd77d0805f4526e30ec43b6f9a3dfc542d401ac4990d178e4bf0cf/yarl-1.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1c3a3598a832590c5a3ce56ab5576361b5688c12cb1d39429cf5dba30b510760", size = 104703, upload-time = "2026-03-01T22:05:30.438Z" },
{ url = "https://files.pythonhosted.org/packages/2f/0c/b3ceacf82c3fe21183ce35fa2acf5320af003d52bc1fcf5915077681142e/yarl-1.23.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8419ebd326430d1cbb7efb5292330a2cf39114e82df5cc3d83c9a0d5ebeaf2f2", size = 98325, upload-time = "2026-03-01T22:05:31.835Z" },
{ url = "https://files.pythonhosted.org/packages/9d/e0/12900edd28bdab91a69bd2554b85ad7b151f64e8b521fe16f9ad2f56477a/yarl-1.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:be61f6fff406ca40e3b1d84716fde398fc08bc63dd96d15f3a14230a0973ed86", size = 105067, upload-time = "2026-03-01T22:05:33.358Z" },
{ url = "https://files.pythonhosted.org/packages/15/61/74bb1182cf79c9bbe4eb6b1f14a57a22d7a0be5e9cedf8e2d5c2086474c3/yarl-1.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ceb13c5c858d01321b5d9bb65e4cf37a92169ea470b70fec6f236b2c9dd7e34", size = 100285, upload-time = "2026-03-01T22:05:35.4Z" },
{ url = "https://files.pythonhosted.org/packages/69/7f/cd5ef733f2550de6241bd8bd8c3febc78158b9d75f197d9c7baa113436af/yarl-1.23.0-cp312-cp312-win32.whl", hash = "sha256:fffc45637bcd6538de8b85f51e3df3223e4ad89bccbfca0481c08c7fc8b7ed7d", size = 82359, upload-time = "2026-03-01T22:05:36.811Z" },
{ url = "https://files.pythonhosted.org/packages/f5/be/25216a49daeeb7af2bec0db22d5e7df08ed1d7c9f65d78b14f3b74fd72fc/yarl-1.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:f69f57305656a4852f2a7203efc661d8c042e6cc67f7acd97d8667fb448a426e", size = 87674, upload-time = "2026-03-01T22:05:38.171Z" },
{ url = "https://files.pythonhosted.org/packages/d2/35/aeab955d6c425b227d5b7247eafb24f2653fedc32f95373a001af5dfeb9e/yarl-1.23.0-cp312-cp312-win_arm64.whl", hash = "sha256:6e87a6e8735b44816e7db0b2fbc9686932df473c826b0d9743148432e10bb9b9", size = 81879, upload-time = "2026-03-01T22:05:40.006Z" },
{ url = "https://files.pythonhosted.org/packages/9a/4b/a0a6e5d0ee8a2f3a373ddef8a4097d74ac901ac363eea1440464ccbe0898/yarl-1.23.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:16c6994ac35c3e74fb0ae93323bf8b9c2a9088d55946109489667c510a7d010e", size = 123796, upload-time = "2026-03-01T22:05:41.412Z" },
{ url = "https://files.pythonhosted.org/packages/67/b6/8925d68af039b835ae876db5838e82e76ec87b9782ecc97e192b809c4831/yarl-1.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4a42e651629dafb64fd5b0286a3580613702b5809ad3f24934ea87595804f2c5", size = 86547, upload-time = "2026-03-01T22:05:42.841Z" },
{ url = "https://files.pythonhosted.org/packages/ae/50/06d511cc4b8e0360d3c94af051a768e84b755c5eb031b12adaaab6dec6e5/yarl-1.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7c6b9461a2a8b47c65eef63bb1c76a4f1c119618ffa99ea79bc5bb1e46c5821b", size = 85854, upload-time = "2026-03-01T22:05:44.85Z" },
{ url = "https://files.pythonhosted.org/packages/c4/f4/4e30b250927ffdab4db70da08b9b8d2194d7c7b400167b8fbeca1e4701ca/yarl-1.23.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2569b67d616eab450d262ca7cb9f9e19d2f718c70a8b88712859359d0ab17035", size = 98351, upload-time = "2026-03-01T22:05:46.836Z" },
{ url = "https://files.pythonhosted.org/packages/86/fc/4118c5671ea948208bdb1492d8b76bdf1453d3e73df051f939f563e7dcc5/yarl-1.23.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e9d9a4d06d3481eab79803beb4d9bd6f6a8e781ec078ac70d7ef2dcc29d1bea5", size = 92711, upload-time = "2026-03-01T22:05:48.316Z" },
{ url = "https://files.pythonhosted.org/packages/56/11/1ed91d42bd9e73c13dc9e7eb0dd92298d75e7ac4dd7f046ad0c472e231cd/yarl-1.23.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f514f6474e04179d3d33175ed3f3e31434d3130d42ec153540d5b157deefd735", size = 106014, upload-time = "2026-03-01T22:05:50.028Z" },
{ url = "https://files.pythonhosted.org/packages/ce/c9/74e44e056a23fbc33aca71779ef450ca648a5bc472bdad7a82339918f818/yarl-1.23.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fda207c815b253e34f7e1909840fd14299567b1c0eb4908f8c2ce01a41265401", size = 105557, upload-time = "2026-03-01T22:05:51.416Z" },
{ url = "https://files.pythonhosted.org/packages/66/fe/b1e10b08d287f518994f1e2ff9b6d26f0adeecd8dd7d533b01bab29a3eda/yarl-1.23.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34b6cf500e61c90f305094911f9acc9c86da1a05a7a3f5be9f68817043f486e4", size = 101559, upload-time = "2026-03-01T22:05:52.872Z" },
{ url = "https://files.pythonhosted.org/packages/72/59/c5b8d94b14e3d3c2a9c20cb100119fd534ab5a14b93673ab4cc4a4141ea5/yarl-1.23.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d7504f2b476d21653e4d143f44a175f7f751cd41233525312696c76aa3dbb23f", size = 100502, upload-time = "2026-03-01T22:05:54.954Z" },
{ url = "https://files.pythonhosted.org/packages/77/4f/96976cb54cbfc5c9fd73ed4c51804f92f209481d1fb190981c0f8a07a1d7/yarl-1.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:578110dd426f0d209d1509244e6d4a3f1a3e9077655d98c5f22583d63252a08a", size = 98027, upload-time = "2026-03-01T22:05:56.409Z" },
{ url = "https://files.pythonhosted.org/packages/63/6e/904c4f476471afdbad6b7e5b70362fb5810e35cd7466529a97322b6f5556/yarl-1.23.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:609d3614d78d74ebe35f54953c5bbd2ac647a7ddb9c30a5d877580f5e86b22f2", size = 95369, upload-time = "2026-03-01T22:05:58.141Z" },
{ url = "https://files.pythonhosted.org/packages/9d/40/acfcdb3b5f9d68ef499e39e04d25e141fe90661f9d54114556cf83be8353/yarl-1.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4966242ec68afc74c122f8459abd597afd7d8a60dc93d695c1334c5fd25f762f", size = 105565, upload-time = "2026-03-01T22:06:00.286Z" },
{ url = "https://files.pythonhosted.org/packages/5e/c6/31e28f3a6ba2869c43d124f37ea5260cac9c9281df803c354b31f4dd1f3c/yarl-1.23.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e0fd068364a6759bc794459f0a735ab151d11304346332489c7972bacbe9e72b", size = 99813, upload-time = "2026-03-01T22:06:01.712Z" },
{ url = "https://files.pythonhosted.org/packages/08/1f/6f65f59e72d54aa467119b63fc0b0b1762eff0232db1f4720cd89e2f4a17/yarl-1.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:39004f0ad156da43e86aa71f44e033de68a44e5a31fc53507b36dd253970054a", size = 105632, upload-time = "2026-03-01T22:06:03.188Z" },
{ url = "https://files.pythonhosted.org/packages/a3/c4/18b178a69935f9e7a338127d5b77d868fdc0f0e49becd286d51b3a18c61d/yarl-1.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e5723c01a56c5028c807c701aa66722916d2747ad737a046853f6c46f4875543", size = 101895, upload-time = "2026-03-01T22:06:04.651Z" },
{ url = "https://files.pythonhosted.org/packages/8f/54/f5b870b5505663911dba950a8e4776a0dbd51c9c54c0ae88e823e4b874a0/yarl-1.23.0-cp313-cp313-win32.whl", hash = "sha256:1b6b572edd95b4fa8df75de10b04bc81acc87c1c7d16bcdd2035b09d30acc957", size = 82356, upload-time = "2026-03-01T22:06:06.04Z" },
{ url = "https://files.pythonhosted.org/packages/7a/84/266e8da36879c6edcd37b02b547e2d9ecdfea776be49598e75696e3316e1/yarl-1.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:baaf55442359053c7d62f6f8413a62adba3205119bcb6f49594894d8be47e5e3", size = 87515, upload-time = "2026-03-01T22:06:08.107Z" },
{ url = "https://files.pythonhosted.org/packages/00/fd/7e1c66efad35e1649114fa13f17485f62881ad58edeeb7f49f8c5e748bf9/yarl-1.23.0-cp313-cp313-win_arm64.whl", hash = "sha256:fb4948814a2a98e3912505f09c9e7493b1506226afb1f881825368d6fb776ee3", size = 81785, upload-time = "2026-03-01T22:06:10.181Z" },
{ url = "https://files.pythonhosted.org/packages/9c/fc/119dd07004f17ea43bb91e3ece6587759edd7519d6b086d16bfbd3319982/yarl-1.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:aecfed0b41aa72b7881712c65cf764e39ce2ec352324f5e0837c7048d9e6daaa", size = 130719, upload-time = "2026-03-01T22:06:11.708Z" },
{ url = "https://files.pythonhosted.org/packages/e6/0d/9f2348502fbb3af409e8f47730282cd6bc80dec6630c1e06374d882d6eb2/yarl-1.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a41bcf68efd19073376eb8cf948b8d9be0af26256403e512bb18f3966f1f9120", size = 89690, upload-time = "2026-03-01T22:06:13.429Z" },
{ url = "https://files.pythonhosted.org/packages/50/93/e88f3c80971b42cfc83f50a51b9d165a1dbf154b97005f2994a79f212a07/yarl-1.23.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cde9a2ecd91668bcb7f077c4966d8ceddb60af01b52e6e3e2680e4cf00ad1a59", size = 89851, upload-time = "2026-03-01T22:06:15.53Z" },
{ url = "https://files.pythonhosted.org/packages/1c/07/61c9dd8ba8f86473263b4036f70fb594c09e99c0d9737a799dfd8bc85651/yarl-1.23.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5023346c4ee7992febc0068e7593de5fa2bf611848c08404b35ebbb76b1b0512", size = 95874, upload-time = "2026-03-01T22:06:17.553Z" },
{ url = "https://files.pythonhosted.org/packages/9e/e9/f9ff8ceefba599eac6abddcfb0b3bee9b9e636e96dbf54342a8577252379/yarl-1.23.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1009abedb49ae95b136a8904a3f71b342f849ffeced2d3747bf29caeda218c4", size = 88710, upload-time = "2026-03-01T22:06:19.004Z" },
{ url = "https://files.pythonhosted.org/packages/eb/78/0231bfcc5d4c8eec220bc2f9ef82cb4566192ea867a7c5b4148f44f6cbcd/yarl-1.23.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a8d00f29b42f534cc8aa3931cfe773b13b23e561e10d2b26f27a8d309b0e82a1", size = 101033, upload-time = "2026-03-01T22:06:21.203Z" },
{ url = "https://files.pythonhosted.org/packages/cd/9b/30ea5239a61786f18fd25797151a17fbb3be176977187a48d541b5447dd4/yarl-1.23.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:95451e6ce06c3e104556d73b559f5da6c34a069b6b62946d3ad66afcd51642ea", size = 100817, upload-time = "2026-03-01T22:06:22.738Z" },
{ url = "https://files.pythonhosted.org/packages/62/e2/a4980481071791bc83bce2b7a1a1f7adcabfa366007518b4b845e92eeee3/yarl-1.23.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:531ef597132086b6cf96faa7c6c1dcd0361dd5f1694e5cc30375907b9b7d3ea9", size = 97482, upload-time = "2026-03-01T22:06:24.21Z" },
{ url = "https://files.pythonhosted.org/packages/e5/1e/304a00cf5f6100414c4b5a01fc7ff9ee724b62158a08df2f8170dfc72a2d/yarl-1.23.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:88f9fb0116fbfcefcab70f85cf4b74a2b6ce5d199c41345296f49d974ddb4123", size = 95949, upload-time = "2026-03-01T22:06:25.697Z" },
{ url = "https://files.pythonhosted.org/packages/68/03/093f4055ed4cae649ac53bca3d180bd37102e9e11d048588e9ab0c0108d0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e7b0460976dc75cb87ad9cc1f9899a4b97751e7d4e77ab840fc9b6d377b8fd24", size = 95839, upload-time = "2026-03-01T22:06:27.309Z" },
{ url = "https://files.pythonhosted.org/packages/b9/28/4c75ebb108f322aa8f917ae10a8ffa4f07cae10a8a627b64e578617df6a0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:115136c4a426f9da976187d238e84139ff6b51a20839aa6e3720cd1026d768de", size = 90696, upload-time = "2026-03-01T22:06:29.048Z" },
{ url = "https://files.pythonhosted.org/packages/23/9c/42c2e2dd91c1a570402f51bdf066bfdb1241c2240ba001967bad778e77b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ead11956716a940c1abc816b7df3fa2b84d06eaed8832ca32f5c5e058c65506b", size = 100865, upload-time = "2026-03-01T22:06:30.525Z" },
{ url = "https://files.pythonhosted.org/packages/74/05/1bcd60a8a0a914d462c305137246b6f9d167628d73568505fce3f1cb2e65/yarl-1.23.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:fe8f8f5e70e6dbdfca9882cd9deaac058729bcf323cf7a58660901e55c9c94f6", size = 96234, upload-time = "2026-03-01T22:06:32.692Z" },
{ url = "https://files.pythonhosted.org/packages/90/b2/f52381aac396d6778ce516b7bc149c79e65bfc068b5de2857ab69eeea3b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:a0e317df055958a0c1e79e5d2aa5a5eaa4a6d05a20d4b0c9c3f48918139c9fc6", size = 100295, upload-time = "2026-03-01T22:06:34.268Z" },
{ url = "https://files.pythonhosted.org/packages/e5/e8/638bae5bbf1113a659b2435d8895474598afe38b4a837103764f603aba56/yarl-1.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f0fd84de0c957b2d280143522c4f91a73aada1923caee763e24a2b3fda9f8a5", size = 97784, upload-time = "2026-03-01T22:06:35.864Z" },
{ url = "https://files.pythonhosted.org/packages/80/25/a3892b46182c586c202629fc2159aa13975d3741d52ebd7347fd501d48d5/yarl-1.23.0-cp313-cp313t-win32.whl", hash = "sha256:93a784271881035ab4406a172edb0faecb6e7d00f4b53dc2f55919d6c9688595", size = 88313, upload-time = "2026-03-01T22:06:37.39Z" },
{ url = "https://files.pythonhosted.org/packages/43/68/8c5b36aa5178900b37387937bc2c2fe0e9505537f713495472dcf6f6fccc/yarl-1.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dd00607bffbf30250fe108065f07453ec124dbf223420f57f5e749b04295e090", size = 94932, upload-time = "2026-03-01T22:06:39.579Z" },
{ url = "https://files.pythonhosted.org/packages/c6/cc/d79ba8292f51f81f4dc533a8ccfb9fc6992cabf0998ed3245de7589dc07c/yarl-1.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ac09d42f48f80c9ee1635b2fcaa819496a44502737660d3c0f2ade7526d29144", size = 84786, upload-time = "2026-03-01T22:06:41.988Z" },
{ url = "https://files.pythonhosted.org/packages/90/98/b85a038d65d1b92c3903ab89444f48d3cee490a883477b716d7a24b1a78c/yarl-1.23.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:21d1b7305a71a15b4794b5ff22e8eef96ff4a6d7f9657155e5aa419444b28912", size = 124455, upload-time = "2026-03-01T22:06:43.615Z" },
{ url = "https://files.pythonhosted.org/packages/39/54/bc2b45559f86543d163b6e294417a107bb87557609007c007ad889afec18/yarl-1.23.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:85610b4f27f69984932a7abbe52703688de3724d9f72bceb1cca667deff27474", size = 86752, upload-time = "2026-03-01T22:06:45.425Z" },
{ url = "https://files.pythonhosted.org/packages/24/f9/e8242b68362bffe6fb536c8db5076861466fc780f0f1b479fc4ffbebb128/yarl-1.23.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:23f371bd662cf44a7630d4d113101eafc0cfa7518a2760d20760b26021454719", size = 86291, upload-time = "2026-03-01T22:06:46.974Z" },
{ url = "https://files.pythonhosted.org/packages/ea/d8/d1cb2378c81dd729e98c716582b1ccb08357e8488e4c24714658cc6630e8/yarl-1.23.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a80f77dc1acaaa61f0934176fccca7096d9b1ff08c8ba9cddf5ae034a24319", size = 99026, upload-time = "2026-03-01T22:06:48.459Z" },
{ url = "https://files.pythonhosted.org/packages/0a/ff/7196790538f31debe3341283b5b0707e7feb947620fc5e8236ef28d44f72/yarl-1.23.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:bd654fad46d8d9e823afbb4f87c79160b5a374ed1ff5bde24e542e6ba8f41434", size = 92355, upload-time = "2026-03-01T22:06:50.306Z" },
{ url = "https://files.pythonhosted.org/packages/c1/56/25d58c3eddde825890a5fe6aa1866228377354a3c39262235234ab5f616b/yarl-1.23.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:682bae25f0a0dd23a056739f23a134db9f52a63e2afd6bfb37ddc76292bbd723", size = 106417, upload-time = "2026-03-01T22:06:52.1Z" },
{ url = "https://files.pythonhosted.org/packages/51/8a/882c0e7bc8277eb895b31bce0138f51a1ba551fc2e1ec6753ffc1e7c1377/yarl-1.23.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a82836cab5f197a0514235aaf7ffccdc886ccdaa2324bc0aafdd4ae898103039", size = 106422, upload-time = "2026-03-01T22:06:54.424Z" },
{ url = "https://files.pythonhosted.org/packages/42/2b/fef67d616931055bf3d6764885990a3ac647d68734a2d6a9e1d13de437a2/yarl-1.23.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c57676bdedc94cd3bc37724cf6f8cd2779f02f6aba48de45feca073e714fe52", size = 101915, upload-time = "2026-03-01T22:06:55.895Z" },
{ url = "https://files.pythonhosted.org/packages/18/6a/530e16aebce27c5937920f3431c628a29a4b6b430fab3fd1c117b26ff3f6/yarl-1.23.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c7f8dc16c498ff06497c015642333219871effba93e4a2e8604a06264aca5c5c", size = 100690, upload-time = "2026-03-01T22:06:58.21Z" },
{ url = "https://files.pythonhosted.org/packages/88/08/93749219179a45e27b036e03260fda05190b911de8e18225c294ac95bbc9/yarl-1.23.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5ee586fb17ff8f90c91cf73c6108a434b02d69925f44f5f8e0d7f2f260607eae", size = 98750, upload-time = "2026-03-01T22:06:59.794Z" },
{ url = "https://files.pythonhosted.org/packages/d9/cf/ea424a004969f5d81a362110a6ac1496d79efdc6d50c2c4b2e3ea0fc2519/yarl-1.23.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:17235362f580149742739cc3828b80e24029d08cbb9c4bda0242c7b5bc610a8e", size = 94685, upload-time = "2026-03-01T22:07:01.375Z" },
{ url = "https://files.pythonhosted.org/packages/e2/b7/14341481fe568e2b0408bcf1484c652accafe06a0ade9387b5d3fd9df446/yarl-1.23.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:0793e2bd0cf14234983bbb371591e6bea9e876ddf6896cdcc93450996b0b5c85", size = 106009, upload-time = "2026-03-01T22:07:03.151Z" },
{ url = "https://files.pythonhosted.org/packages/0a/e6/5c744a9b54f4e8007ad35bce96fbc9218338e84812d36f3390cea616881a/yarl-1.23.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:3650dc2480f94f7116c364096bc84b1d602f44224ef7d5c7208425915c0475dd", size = 100033, upload-time = "2026-03-01T22:07:04.701Z" },
{ url = "https://files.pythonhosted.org/packages/0c/23/e3bfc188d0b400f025bc49d99793d02c9abe15752138dcc27e4eaf0c4a9e/yarl-1.23.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f40e782d49630ad384db66d4d8b73ff4f1b8955dc12e26b09a3e3af064b3b9d6", size = 106483, upload-time = "2026-03-01T22:07:06.231Z" },
{ url = "https://files.pythonhosted.org/packages/72/42/f0505f949a90b3f8b7a363d6cbdf398f6e6c58946d85c6d3a3bc70595b26/yarl-1.23.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94f8575fbdf81749008d980c17796097e645574a3b8c28ee313931068dad14fe", size = 102175, upload-time = "2026-03-01T22:07:08.4Z" },
{ url = "https://files.pythonhosted.org/packages/aa/65/b39290f1d892a9dd671d1c722014ca062a9c35d60885d57e5375db0404b5/yarl-1.23.0-cp314-cp314-win32.whl", hash = "sha256:c8aa34a5c864db1087d911a0b902d60d203ea3607d91f615acd3f3108ac32169", size = 83871, upload-time = "2026-03-01T22:07:09.968Z" },
{ url = "https://files.pythonhosted.org/packages/a9/5b/9b92f54c784c26e2a422e55a8d2607ab15b7ea3349e28359282f84f01d43/yarl-1.23.0-cp314-cp314-win_amd64.whl", hash = "sha256:63e92247f383c85ab00dd0091e8c3fa331a96e865459f5ee80353c70a4a42d70", size = 89093, upload-time = "2026-03-01T22:07:11.501Z" },
{ url = "https://files.pythonhosted.org/packages/e0/7d/8a84dc9381fd4412d5e7ff04926f9865f6372b4c2fd91e10092e65d29eb8/yarl-1.23.0-cp314-cp314-win_arm64.whl", hash = "sha256:70efd20be968c76ece7baa8dafe04c5be06abc57f754d6f36f3741f7aa7a208e", size = 83384, upload-time = "2026-03-01T22:07:13.069Z" },
{ url = "https://files.pythonhosted.org/packages/dd/8d/d2fad34b1c08aa161b74394183daa7d800141aaaee207317e82c790b418d/yarl-1.23.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:9a18d6f9359e45722c064c97464ec883eb0e0366d33eda61cb19a244bf222679", size = 131019, upload-time = "2026-03-01T22:07:14.903Z" },
{ url = "https://files.pythonhosted.org/packages/19/ff/33009a39d3ccf4b94d7d7880dfe17fb5816c5a4fe0096d9b56abceea9ac7/yarl-1.23.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:2803ed8b21ca47a43da80a6fd1ed3019d30061f7061daa35ac54f63933409412", size = 89894, upload-time = "2026-03-01T22:07:17.372Z" },
{ url = "https://files.pythonhosted.org/packages/0c/f1/dab7ac5e7306fb79c0190766a3c00b4cb8d09a1f390ded68c85a5934faf5/yarl-1.23.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:394906945aa8b19fc14a61cf69743a868bb8c465efe85eee687109cc540b98f4", size = 89979, upload-time = "2026-03-01T22:07:19.361Z" },
{ url = "https://files.pythonhosted.org/packages/aa/b1/08e95f3caee1fad6e65017b9f26c1d79877b502622d60e517de01e72f95d/yarl-1.23.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71d006bee8397a4a89f469b8deb22469fe7508132d3c17fa6ed871e79832691c", size = 95943, upload-time = "2026-03-01T22:07:21.266Z" },
{ url = "https://files.pythonhosted.org/packages/c0/cc/6409f9018864a6aa186c61175b977131f373f1988e198e031236916e87e4/yarl-1.23.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:62694e275c93d54f7ccedcfef57d42761b2aad5234b6be1f3e3026cae4001cd4", size = 88786, upload-time = "2026-03-01T22:07:23.129Z" },
{ url = "https://files.pythonhosted.org/packages/76/40/cc22d1d7714b717fde2006fad2ced5efe5580606cb059ae42117542122f3/yarl-1.23.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31de1613658308efdb21ada98cbc86a97c181aa050ba22a808120bb5be3ab94", size = 101307, upload-time = "2026-03-01T22:07:24.689Z" },
{ url = "https://files.pythonhosted.org/packages/8f/0d/476c38e85ddb4c6ec6b20b815bdd779aa386a013f3d8b85516feee55c8dc/yarl-1.23.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb1e8b8d66c278b21d13b0a7ca22c41dd757a7c209c6b12c313e445c31dd3b28", size = 100904, upload-time = "2026-03-01T22:07:26.287Z" },
{ url = "https://files.pythonhosted.org/packages/72/32/0abe4a76d59adf2081dcb0397168553ece4616ada1c54d1c49d8936c74f8/yarl-1.23.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50f9d8d531dfb767c565f348f33dd5139a6c43f5cbdf3f67da40d54241df93f6", size = 97728, upload-time = "2026-03-01T22:07:27.906Z" },
{ url = "https://files.pythonhosted.org/packages/b7/35/7b30f4810fba112f60f5a43237545867504e15b1c7647a785fbaf588fac2/yarl-1.23.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:575aa4405a656e61a540f4a80eaa5260f2a38fff7bfdc4b5f611840d76e9e277", size = 95964, upload-time = "2026-03-01T22:07:30.198Z" },
{ url = "https://files.pythonhosted.org/packages/2d/86/ed7a73ab85ef00e8bb70b0cb5421d8a2a625b81a333941a469a6f4022828/yarl-1.23.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:041b1a4cefacf65840b4e295c6985f334ba83c30607441ae3cf206a0eed1a2e4", size = 95882, upload-time = "2026-03-01T22:07:32.132Z" },
{ url = "https://files.pythonhosted.org/packages/19/90/d56967f61a29d8498efb7afb651e0b2b422a1e9b47b0ab5f4e40a19b699b/yarl-1.23.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:d38c1e8231722c4ce40d7593f28d92b5fc72f3e9774fe73d7e800ec32299f63a", size = 90797, upload-time = "2026-03-01T22:07:34.404Z" },
{ url = "https://files.pythonhosted.org/packages/72/00/8b8f76909259f56647adb1011d7ed8b321bcf97e464515c65016a47ecdf0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:d53834e23c015ee83a99377db6e5e37d8484f333edb03bd15b4bc312cc7254fb", size = 101023, upload-time = "2026-03-01T22:07:35.953Z" },
{ url = "https://files.pythonhosted.org/packages/ac/e2/cab11b126fb7d440281b7df8e9ddbe4851e70a4dde47a202b6642586b8d9/yarl-1.23.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:2e27c8841126e017dd2a054a95771569e6070b9ee1b133366d8b31beb5018a41", size = 96227, upload-time = "2026-03-01T22:07:37.594Z" },
{ url = "https://files.pythonhosted.org/packages/c2/9b/2c893e16bfc50e6b2edf76c1a9eb6cb0c744346197e74c65e99ad8d634d0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:76855800ac56f878847a09ce6dba727c93ca2d89c9e9d63002d26b916810b0a2", size = 100302, upload-time = "2026-03-01T22:07:39.334Z" },
{ url = "https://files.pythonhosted.org/packages/28/ec/5498c4e3a6d5f1003beb23405671c2eb9cdbf3067d1c80f15eeafe301010/yarl-1.23.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e09fd068c2e169a7070d83d3bde728a4d48de0549f975290be3c108c02e499b4", size = 98202, upload-time = "2026-03-01T22:07:41.717Z" },
{ url = "https://files.pythonhosted.org/packages/fe/c3/cd737e2d45e70717907f83e146f6949f20cc23cd4bf7b2688727763aa458/yarl-1.23.0-cp314-cp314t-win32.whl", hash = "sha256:73309162a6a571d4cbd3b6a1dcc703c7311843ae0d1578df6f09be4e98df38d4", size = 90558, upload-time = "2026-03-01T22:07:43.433Z" },
{ url = "https://files.pythonhosted.org/packages/e1/19/3774d162f6732d1cfb0b47b4140a942a35ca82bb19b6db1f80e9e7bdc8f8/yarl-1.23.0-cp314-cp314t-win_amd64.whl", hash = "sha256:4503053d296bc6e4cbd1fad61cf3b6e33b939886c4f249ba7c78b602214fabe2", size = 97610, upload-time = "2026-03-01T22:07:45.773Z" },
{ url = "https://files.pythonhosted.org/packages/51/47/3fa2286c3cb162c71cdb34c4224d5745a1ceceb391b2bd9b19b668a8d724/yarl-1.23.0-cp314-cp314t-win_arm64.whl", hash = "sha256:44bb7bef4ea409384e3f8bc36c063d77ea1b8d4a5b2706956c0d6695f07dcc25", size = 86041, upload-time = "2026-03-01T22:07:49.026Z" },
{ url = "https://files.pythonhosted.org/packages/69/68/c8739671f5699c7dc470580a4f821ef37c32c4cb0b047ce223a7f115757f/yarl-1.23.0-py3-none-any.whl", hash = "sha256:a2df6afe50dea8ae15fa34c9f824a3ee958d785fd5d089063d960bae1daa0a3f", size = 48288, upload-time = "2026-03-01T22:07:51.388Z" },
{ url = "https://files.pythonhosted.org/packages/4d/27/5ab13fc84c76a0250afd3d26d5936349a35be56ce5785447d6c423b26d92/yarl-1.22.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511", size = 141607, upload-time = "2025-10-06T14:09:16.298Z" },
{ url = "https://files.pythonhosted.org/packages/6a/a1/d065d51d02dc02ce81501d476b9ed2229d9a990818332242a882d5d60340/yarl-1.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6", size = 94027, upload-time = "2025-10-06T14:09:17.786Z" },
{ url = "https://files.pythonhosted.org/packages/c1/da/8da9f6a53f67b5106ffe902c6fa0164e10398d4e150d85838b82f424072a/yarl-1.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028", size = 94963, upload-time = "2025-10-06T14:09:19.662Z" },
{ url = "https://files.pythonhosted.org/packages/68/fe/2c1f674960c376e29cb0bec1249b117d11738db92a6ccc4a530b972648db/yarl-1.22.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ea66b1c11c9150f1372f69afb6b8116f2dd7286f38e14ea71a44eee9ec51b9d", size = 368406, upload-time = "2025-10-06T14:09:21.402Z" },
{ url = "https://files.pythonhosted.org/packages/95/26/812a540e1c3c6418fec60e9bbd38e871eaba9545e94fa5eff8f4a8e28e1e/yarl-1.22.0-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3e2daa88dc91870215961e96a039ec73e4937da13cf77ce17f9cad0c18df3503", size = 336581, upload-time = "2025-10-06T14:09:22.98Z" },
{ url = "https://files.pythonhosted.org/packages/0b/f5/5777b19e26fdf98563985e481f8be3d8a39f8734147a6ebf459d0dab5a6b/yarl-1.22.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba440ae430c00eee41509353628600212112cd5018d5def7e9b05ea7ac34eb65", size = 388924, upload-time = "2025-10-06T14:09:24.655Z" },
{ url = "https://files.pythonhosted.org/packages/86/08/24bd2477bd59c0bbd994fe1d93b126e0472e4e3df5a96a277b0a55309e89/yarl-1.22.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e6438cc8f23a9c1478633d216b16104a586b9761db62bfacb6425bac0a36679e", size = 392890, upload-time = "2025-10-06T14:09:26.617Z" },
{ url = "https://files.pythonhosted.org/packages/46/00/71b90ed48e895667ecfb1eaab27c1523ee2fa217433ed77a73b13205ca4b/yarl-1.22.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c52a6e78aef5cf47a98ef8e934755abf53953379b7d53e68b15ff4420e6683d", size = 365819, upload-time = "2025-10-06T14:09:28.544Z" },
{ url = "https://files.pythonhosted.org/packages/30/2d/f715501cae832651d3282387c6a9236cd26bd00d0ff1e404b3dc52447884/yarl-1.22.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3b06bcadaac49c70f4c88af4ffcfbe3dc155aab3163e75777818092478bcbbe7", size = 363601, upload-time = "2025-10-06T14:09:30.568Z" },
{ url = "https://files.pythonhosted.org/packages/f8/f9/a678c992d78e394e7126ee0b0e4e71bd2775e4334d00a9278c06a6cce96a/yarl-1.22.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:6944b2dc72c4d7f7052683487e3677456050ff77fcf5e6204e98caf785ad1967", size = 358072, upload-time = "2025-10-06T14:09:32.528Z" },
{ url = "https://files.pythonhosted.org/packages/2c/d1/b49454411a60edb6fefdcad4f8e6dbba7d8019e3a508a1c5836cba6d0781/yarl-1.22.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed", size = 385311, upload-time = "2025-10-06T14:09:34.634Z" },
{ url = "https://files.pythonhosted.org/packages/87/e5/40d7a94debb8448c7771a916d1861d6609dddf7958dc381117e7ba36d9e8/yarl-1.22.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6", size = 381094, upload-time = "2025-10-06T14:09:36.268Z" },
{ url = "https://files.pythonhosted.org/packages/35/d8/611cc282502381ad855448643e1ad0538957fc82ae83dfe7762c14069e14/yarl-1.22.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e", size = 370944, upload-time = "2025-10-06T14:09:37.872Z" },
{ url = "https://files.pythonhosted.org/packages/2d/df/fadd00fb1c90e1a5a8bd731fa3d3de2e165e5a3666a095b04e31b04d9cb6/yarl-1.22.0-cp311-cp311-win32.whl", hash = "sha256:a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca", size = 81804, upload-time = "2025-10-06T14:09:39.359Z" },
{ url = "https://files.pythonhosted.org/packages/b5/f7/149bb6f45f267cb5c074ac40c01c6b3ea6d8a620d34b337f6321928a1b4d/yarl-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b", size = 86858, upload-time = "2025-10-06T14:09:41.068Z" },
{ url = "https://files.pythonhosted.org/packages/2b/13/88b78b93ad3f2f0b78e13bfaaa24d11cbc746e93fe76d8c06bf139615646/yarl-1.22.0-cp311-cp311-win_arm64.whl", hash = "sha256:b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376", size = 81637, upload-time = "2025-10-06T14:09:42.712Z" },
{ url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000, upload-time = "2025-10-06T14:09:44.631Z" },
{ url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338, upload-time = "2025-10-06T14:09:46.372Z" },
{ url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909, upload-time = "2025-10-06T14:09:48.648Z" },
{ url = "https://files.pythonhosted.org/packages/60/41/9a1fe0b73dbcefce72e46cf149b0e0a67612d60bfc90fb59c2b2efdfbd86/yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df", size = 372940, upload-time = "2025-10-06T14:09:50.089Z" },
{ url = "https://files.pythonhosted.org/packages/17/7a/795cb6dfee561961c30b800f0ed616b923a2ec6258b5def2a00bf8231334/yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb", size = 345825, upload-time = "2025-10-06T14:09:52.142Z" },
{ url = "https://files.pythonhosted.org/packages/d7/93/a58f4d596d2be2ae7bab1a5846c4d270b894958845753b2c606d666744d3/yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2", size = 386705, upload-time = "2025-10-06T14:09:54.128Z" },
{ url = "https://files.pythonhosted.org/packages/61/92/682279d0e099d0e14d7fd2e176bd04f48de1484f56546a3e1313cd6c8e7c/yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82", size = 396518, upload-time = "2025-10-06T14:09:55.762Z" },
{ url = "https://files.pythonhosted.org/packages/db/0f/0d52c98b8a885aeda831224b78f3be7ec2e1aa4a62091f9f9188c3c65b56/yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a", size = 377267, upload-time = "2025-10-06T14:09:57.958Z" },
{ url = "https://files.pythonhosted.org/packages/22/42/d2685e35908cbeaa6532c1fc73e89e7f2efb5d8a7df3959ea8e37177c5a3/yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124", size = 365797, upload-time = "2025-10-06T14:09:59.527Z" },
{ url = "https://files.pythonhosted.org/packages/a2/83/cf8c7bcc6355631762f7d8bdab920ad09b82efa6b722999dfb05afa6cfac/yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa", size = 365535, upload-time = "2025-10-06T14:10:01.139Z" },
{ url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7", size = 382324, upload-time = "2025-10-06T14:10:02.756Z" },
{ url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d", size = 383803, upload-time = "2025-10-06T14:10:04.552Z" },
{ url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520", size = 374220, upload-time = "2025-10-06T14:10:06.489Z" },
{ url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8", size = 81589, upload-time = "2025-10-06T14:10:09.254Z" },
{ url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c", size = 87213, upload-time = "2025-10-06T14:10:11.369Z" },
{ url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74", size = 81330, upload-time = "2025-10-06T14:10:13.112Z" },
{ url = "https://files.pythonhosted.org/packages/ea/f3/d67de7260456ee105dc1d162d43a019ecad6b91e2f51809d6cddaa56690e/yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53", size = 139980, upload-time = "2025-10-06T14:10:14.601Z" },
{ url = "https://files.pythonhosted.org/packages/01/88/04d98af0b47e0ef42597b9b28863b9060bb515524da0a65d5f4db160b2d5/yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a", size = 93424, upload-time = "2025-10-06T14:10:16.115Z" },
{ url = "https://files.pythonhosted.org/packages/18/91/3274b215fd8442a03975ce6bee5fe6aa57a8326b29b9d3d56234a1dca244/yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c", size = 93821, upload-time = "2025-10-06T14:10:17.993Z" },
{ url = "https://files.pythonhosted.org/packages/61/3a/caf4e25036db0f2da4ca22a353dfeb3c9d3c95d2761ebe9b14df8fc16eb0/yarl-1.22.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b4f15793aa49793ec8d1c708ab7f9eded1aa72edc5174cae703651555ed1b601", size = 373243, upload-time = "2025-10-06T14:10:19.44Z" },
{ url = "https://files.pythonhosted.org/packages/6e/9e/51a77ac7516e8e7803b06e01f74e78649c24ee1021eca3d6a739cb6ea49c/yarl-1.22.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5542339dcf2747135c5c85f68680353d5cb9ffd741c0f2e8d832d054d41f35a", size = 342361, upload-time = "2025-10-06T14:10:21.124Z" },
{ url = "https://files.pythonhosted.org/packages/d4/f8/33b92454789dde8407f156c00303e9a891f1f51a0330b0fad7c909f87692/yarl-1.22.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5c401e05ad47a75869c3ab3e35137f8468b846770587e70d71e11de797d113df", size = 387036, upload-time = "2025-10-06T14:10:22.902Z" },
{ url = "https://files.pythonhosted.org/packages/d9/9a/c5db84ea024f76838220280f732970aa4ee154015d7f5c1bfb60a267af6f/yarl-1.22.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:243dda95d901c733f5b59214d28b0120893d91777cb8aa043e6ef059d3cddfe2", size = 397671, upload-time = "2025-10-06T14:10:24.523Z" },
{ url = "https://files.pythonhosted.org/packages/11/c9/cd8538dc2e7727095e0c1d867bad1e40c98f37763e6d995c1939f5fdc7b1/yarl-1.22.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bec03d0d388060058f5d291a813f21c011041938a441c593374da6077fe21b1b", size = 377059, upload-time = "2025-10-06T14:10:26.406Z" },
{ url = "https://files.pythonhosted.org/packages/a1/b9/ab437b261702ced75122ed78a876a6dec0a1b0f5e17a4ac7a9a2482d8abe/yarl-1.22.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b0748275abb8c1e1e09301ee3cf90c8a99678a4e92e4373705f2a2570d581273", size = 365356, upload-time = "2025-10-06T14:10:28.461Z" },
{ url = "https://files.pythonhosted.org/packages/b2/9d/8e1ae6d1d008a9567877b08f0ce4077a29974c04c062dabdb923ed98e6fe/yarl-1.22.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:47fdb18187e2a4e18fda2c25c05d8251a9e4a521edaed757fef033e7d8498d9a", size = 361331, upload-time = "2025-10-06T14:10:30.541Z" },
{ url = "https://files.pythonhosted.org/packages/ca/5a/09b7be3905962f145b73beb468cdd53db8aa171cf18c80400a54c5b82846/yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d", size = 382590, upload-time = "2025-10-06T14:10:33.352Z" },
{ url = "https://files.pythonhosted.org/packages/aa/7f/59ec509abf90eda5048b0bc3e2d7b5099dffdb3e6b127019895ab9d5ef44/yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02", size = 385316, upload-time = "2025-10-06T14:10:35.034Z" },
{ url = "https://files.pythonhosted.org/packages/e5/84/891158426bc8036bfdfd862fabd0e0fa25df4176ec793e447f4b85cf1be4/yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67", size = 374431, upload-time = "2025-10-06T14:10:37.76Z" },
{ url = "https://files.pythonhosted.org/packages/bb/49/03da1580665baa8bef5e8ed34c6df2c2aca0a2f28bf397ed238cc1bbc6f2/yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95", size = 81555, upload-time = "2025-10-06T14:10:39.649Z" },
{ url = "https://files.pythonhosted.org/packages/9a/ee/450914ae11b419eadd067c6183ae08381cfdfcb9798b90b2b713bbebddda/yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d", size = 86965, upload-time = "2025-10-06T14:10:41.313Z" },
{ url = "https://files.pythonhosted.org/packages/98/4d/264a01eae03b6cf629ad69bae94e3b0e5344741e929073678e84bf7a3e3b/yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b", size = 81205, upload-time = "2025-10-06T14:10:43.167Z" },
{ url = "https://files.pythonhosted.org/packages/88/fc/6908f062a2f77b5f9f6d69cecb1747260831ff206adcbc5b510aff88df91/yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10", size = 146209, upload-time = "2025-10-06T14:10:44.643Z" },
{ url = "https://files.pythonhosted.org/packages/65/47/76594ae8eab26210b4867be6f49129861ad33da1f1ebdf7051e98492bf62/yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3", size = 95966, upload-time = "2025-10-06T14:10:46.554Z" },
{ url = "https://files.pythonhosted.org/packages/ab/ce/05e9828a49271ba6b5b038b15b3934e996980dd78abdfeb52a04cfb9467e/yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9", size = 97312, upload-time = "2025-10-06T14:10:48.007Z" },
{ url = "https://files.pythonhosted.org/packages/d1/c5/7dffad5e4f2265b29c9d7ec869c369e4223166e4f9206fc2243ee9eea727/yarl-1.22.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4398557cbf484207df000309235979c79c4356518fd5c99158c7d38203c4da4f", size = 361967, upload-time = "2025-10-06T14:10:49.997Z" },
{ url = "https://files.pythonhosted.org/packages/50/b2/375b933c93a54bff7fc041e1a6ad2c0f6f733ffb0c6e642ce56ee3b39970/yarl-1.22.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2ca6fd72a8cd803be290d42f2dec5cdcd5299eeb93c2d929bf060ad9efaf5de0", size = 323949, upload-time = "2025-10-06T14:10:52.004Z" },
{ url = "https://files.pythonhosted.org/packages/66/50/bfc2a29a1d78644c5a7220ce2f304f38248dc94124a326794e677634b6cf/yarl-1.22.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ca1f59c4e1ab6e72f0a23c13fca5430f889634166be85dbf1013683e49e3278e", size = 361818, upload-time = "2025-10-06T14:10:54.078Z" },
{ url = "https://files.pythonhosted.org/packages/46/96/f3941a46af7d5d0f0498f86d71275696800ddcdd20426298e572b19b91ff/yarl-1.22.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:6c5010a52015e7c70f86eb967db0f37f3c8bd503a695a49f8d45700144667708", size = 372626, upload-time = "2025-10-06T14:10:55.767Z" },
{ url = "https://files.pythonhosted.org/packages/c1/42/8b27c83bb875cd89448e42cd627e0fb971fa1675c9ec546393d18826cb50/yarl-1.22.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d7672ecf7557476642c88497c2f8d8542f8e36596e928e9bcba0e42e1e7d71f", size = 341129, upload-time = "2025-10-06T14:10:57.985Z" },
{ url = "https://files.pythonhosted.org/packages/49/36/99ca3122201b382a3cf7cc937b95235b0ac944f7e9f2d5331d50821ed352/yarl-1.22.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3b7c88eeef021579d600e50363e0b6ee4f7f6f728cd3486b9d0f3ee7b946398d", size = 346776, upload-time = "2025-10-06T14:10:59.633Z" },
{ url = "https://files.pythonhosted.org/packages/85/b4/47328bf996acd01a4c16ef9dcd2f59c969f495073616586f78cd5f2efb99/yarl-1.22.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f4afb5c34f2c6fecdcc182dfcfc6af6cccf1aa923eed4d6a12e9d96904e1a0d8", size = 334879, upload-time = "2025-10-06T14:11:01.454Z" },
{ url = "https://files.pythonhosted.org/packages/c2/ad/b77d7b3f14a4283bffb8e92c6026496f6de49751c2f97d4352242bba3990/yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5", size = 350996, upload-time = "2025-10-06T14:11:03.452Z" },
{ url = "https://files.pythonhosted.org/packages/81/c8/06e1d69295792ba54d556f06686cbd6a7ce39c22307100e3fb4a2c0b0a1d/yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f", size = 356047, upload-time = "2025-10-06T14:11:05.115Z" },
{ url = "https://files.pythonhosted.org/packages/4b/b8/4c0e9e9f597074b208d18cef227d83aac36184bfbc6eab204ea55783dbc5/yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62", size = 342947, upload-time = "2025-10-06T14:11:08.137Z" },
{ url = "https://files.pythonhosted.org/packages/e0/e5/11f140a58bf4c6ad7aca69a892bff0ee638c31bea4206748fc0df4ebcb3a/yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03", size = 86943, upload-time = "2025-10-06T14:11:10.284Z" },
{ url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249", size = 93715, upload-time = "2025-10-06T14:11:11.739Z" },
{ url = "https://files.pythonhosted.org/packages/69/66/991858aa4b5892d57aef7ee1ba6b4d01ec3b7eb3060795d34090a3ca3278/yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b", size = 83857, upload-time = "2025-10-06T14:11:13.586Z" },
{ url = "https://files.pythonhosted.org/packages/46/b3/e20ef504049f1a1c54a814b4b9bed96d1ac0e0610c3b4da178f87209db05/yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4", size = 140520, upload-time = "2025-10-06T14:11:15.465Z" },
{ url = "https://files.pythonhosted.org/packages/e4/04/3532d990fdbab02e5ede063676b5c4260e7f3abea2151099c2aa745acc4c/yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683", size = 93504, upload-time = "2025-10-06T14:11:17.106Z" },
{ url = "https://files.pythonhosted.org/packages/11/63/ff458113c5c2dac9a9719ac68ee7c947cb621432bcf28c9972b1c0e83938/yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b", size = 94282, upload-time = "2025-10-06T14:11:19.064Z" },
{ url = "https://files.pythonhosted.org/packages/a7/bc/315a56aca762d44a6aaaf7ad253f04d996cb6b27bad34410f82d76ea8038/yarl-1.22.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3d7a87a78d46a2e3d5b72587ac14b4c16952dd0887dbb051451eceac774411e", size = 372080, upload-time = "2025-10-06T14:11:20.996Z" },
{ url = "https://files.pythonhosted.org/packages/3f/3f/08e9b826ec2e099ea6e7c69a61272f4f6da62cb5b1b63590bb80ca2e4a40/yarl-1.22.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:852863707010316c973162e703bddabec35e8757e67fcb8ad58829de1ebc8590", size = 338696, upload-time = "2025-10-06T14:11:22.847Z" },
{ url = "https://files.pythonhosted.org/packages/e3/9f/90360108e3b32bd76789088e99538febfea24a102380ae73827f62073543/yarl-1.22.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:131a085a53bfe839a477c0845acf21efc77457ba2bcf5899618136d64f3303a2", size = 387121, upload-time = "2025-10-06T14:11:24.889Z" },
{ url = "https://files.pythonhosted.org/packages/98/92/ab8d4657bd5b46a38094cfaea498f18bb70ce6b63508fd7e909bd1f93066/yarl-1.22.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:078a8aefd263f4d4f923a9677b942b445a2be970ca24548a8102689a3a8ab8da", size = 394080, upload-time = "2025-10-06T14:11:27.307Z" },
{ url = "https://files.pythonhosted.org/packages/f5/e7/d8c5a7752fef68205296201f8ec2bf718f5c805a7a7e9880576c67600658/yarl-1.22.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bca03b91c323036913993ff5c738d0842fc9c60c4648e5c8d98331526df89784", size = 372661, upload-time = "2025-10-06T14:11:29.387Z" },
{ url = "https://files.pythonhosted.org/packages/b6/2e/f4d26183c8db0bb82d491b072f3127fb8c381a6206a3a56332714b79b751/yarl-1.22.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:68986a61557d37bb90d3051a45b91fa3d5c516d177dfc6dd6f2f436a07ff2b6b", size = 364645, upload-time = "2025-10-06T14:11:31.423Z" },
{ url = "https://files.pythonhosted.org/packages/80/7c/428e5812e6b87cd00ee8e898328a62c95825bf37c7fa87f0b6bb2ad31304/yarl-1.22.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:4792b262d585ff0dff6bcb787f8492e40698443ec982a3568c2096433660c694", size = 355361, upload-time = "2025-10-06T14:11:33.055Z" },
{ url = "https://files.pythonhosted.org/packages/ec/2a/249405fd26776f8b13c067378ef4d7dd49c9098d1b6457cdd152a99e96a9/yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d", size = 381451, upload-time = "2025-10-06T14:11:35.136Z" },
{ url = "https://files.pythonhosted.org/packages/67/a8/fb6b1adbe98cf1e2dd9fad71003d3a63a1bc22459c6e15f5714eb9323b93/yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd", size = 383814, upload-time = "2025-10-06T14:11:37.094Z" },
{ url = "https://files.pythonhosted.org/packages/d9/f9/3aa2c0e480fb73e872ae2814c43bc1e734740bb0d54e8cb2a95925f98131/yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da", size = 370799, upload-time = "2025-10-06T14:11:38.83Z" },
{ url = "https://files.pythonhosted.org/packages/50/3c/af9dba3b8b5eeb302f36f16f92791f3ea62e3f47763406abf6d5a4a3333b/yarl-1.22.0-cp314-cp314-win32.whl", hash = "sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2", size = 82990, upload-time = "2025-10-06T14:11:40.624Z" },
{ url = "https://files.pythonhosted.org/packages/ac/30/ac3a0c5bdc1d6efd1b41fa24d4897a4329b3b1e98de9449679dd327af4f0/yarl-1.22.0-cp314-cp314-win_amd64.whl", hash = "sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79", size = 88292, upload-time = "2025-10-06T14:11:42.578Z" },
{ url = "https://files.pythonhosted.org/packages/df/0a/227ab4ff5b998a1b7410abc7b46c9b7a26b0ca9e86c34ba4b8d8bc7c63d5/yarl-1.22.0-cp314-cp314-win_arm64.whl", hash = "sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33", size = 82888, upload-time = "2025-10-06T14:11:44.863Z" },
{ url = "https://files.pythonhosted.org/packages/06/5e/a15eb13db90abd87dfbefb9760c0f3f257ac42a5cac7e75dbc23bed97a9f/yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1", size = 146223, upload-time = "2025-10-06T14:11:46.796Z" },
{ url = "https://files.pythonhosted.org/packages/18/82/9665c61910d4d84f41a5bf6837597c89e665fa88aa4941080704645932a9/yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca", size = 95981, upload-time = "2025-10-06T14:11:48.845Z" },
{ url = "https://files.pythonhosted.org/packages/5d/9a/2f65743589809af4d0a6d3aa749343c4b5f4c380cc24a8e94a3c6625a808/yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53", size = 97303, upload-time = "2025-10-06T14:11:50.897Z" },
{ url = "https://files.pythonhosted.org/packages/b0/ab/5b13d3e157505c43c3b43b5a776cbf7b24a02bc4cccc40314771197e3508/yarl-1.22.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1e7ce67c34138a058fd092f67d07a72b8e31ff0c9236e751957465a24b28910c", size = 361820, upload-time = "2025-10-06T14:11:52.549Z" },
{ url = "https://files.pythonhosted.org/packages/fb/76/242a5ef4677615cf95330cfc1b4610e78184400699bdda0acb897ef5e49a/yarl-1.22.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d77e1b2c6d04711478cb1c4ab90db07f1609ccf06a287d5607fcd90dc9863acf", size = 323203, upload-time = "2025-10-06T14:11:54.225Z" },
{ url = "https://files.pythonhosted.org/packages/8c/96/475509110d3f0153b43d06164cf4195c64d16999e0c7e2d8a099adcd6907/yarl-1.22.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4647674b6150d2cae088fc07de2738a84b8bcedebef29802cf0b0a82ab6face", size = 363173, upload-time = "2025-10-06T14:11:56.069Z" },
{ url = "https://files.pythonhosted.org/packages/c9/66/59db471aecfbd559a1fd48aedd954435558cd98c7d0da8b03cc6c140a32c/yarl-1.22.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efb07073be061c8f79d03d04139a80ba33cbd390ca8f0297aae9cce6411e4c6b", size = 373562, upload-time = "2025-10-06T14:11:58.783Z" },
{ url = "https://files.pythonhosted.org/packages/03/1f/c5d94abc91557384719da10ff166b916107c1b45e4d0423a88457071dd88/yarl-1.22.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e51ac5435758ba97ad69617e13233da53908beccc6cfcd6c34bbed8dcbede486", size = 339828, upload-time = "2025-10-06T14:12:00.686Z" },
{ url = "https://files.pythonhosted.org/packages/5f/97/aa6a143d3afba17b6465733681c70cf175af89f76ec8d9286e08437a7454/yarl-1.22.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:33e32a0dd0c8205efa8e83d04fc9f19313772b78522d1bdc7d9aed706bfd6138", size = 347551, upload-time = "2025-10-06T14:12:02.628Z" },
{ url = "https://files.pythonhosted.org/packages/43/3c/45a2b6d80195959239a7b2a8810506d4eea5487dce61c2a3393e7fc3c52e/yarl-1.22.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:bf4a21e58b9cde0e401e683ebd00f6ed30a06d14e93f7c8fd059f8b6e8f87b6a", size = 334512, upload-time = "2025-10-06T14:12:04.871Z" },
{ url = "https://files.pythonhosted.org/packages/86/a0/c2ab48d74599c7c84cb104ebd799c5813de252bea0f360ffc29d270c2caa/yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529", size = 352400, upload-time = "2025-10-06T14:12:06.624Z" },
{ url = "https://files.pythonhosted.org/packages/32/75/f8919b2eafc929567d3d8411f72bdb1a2109c01caaab4ebfa5f8ffadc15b/yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093", size = 357140, upload-time = "2025-10-06T14:12:08.362Z" },
{ url = "https://files.pythonhosted.org/packages/cf/72/6a85bba382f22cf78add705d8c3731748397d986e197e53ecc7835e76de7/yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c", size = 341473, upload-time = "2025-10-06T14:12:10.994Z" },
{ url = "https://files.pythonhosted.org/packages/35/18/55e6011f7c044dc80b98893060773cefcfdbf60dfefb8cb2f58b9bacbd83/yarl-1.22.0-cp314-cp314t-win32.whl", hash = "sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e", size = 89056, upload-time = "2025-10-06T14:12:13.317Z" },
{ url = "https://files.pythonhosted.org/packages/f9/86/0f0dccb6e59a9e7f122c5afd43568b1d31b8ab7dda5f1b01fb5c7025c9a9/yarl-1.22.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27", size = 96292, upload-time = "2025-10-06T14:12:15.398Z" },
{ url = "https://files.pythonhosted.org/packages/48/b7/503c98092fb3b344a179579f55814b613c1fbb1c23b3ec14a7b008a66a6e/yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1", size = 85171, upload-time = "2025-10-06T14:12:16.935Z" },
{ url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" },
]
[[package]]

View File

@@ -81,16 +81,6 @@
@apply border;
}
/* When expanded, the header's bottom border acts as a separator between
header and body. It should always remain the default border color,
regardless of any status borderColor applied to the card. */
.opal-card-expandable-header:has(
+ .opal-card-expandable-wrapper[data-expanded="true"]
) {
@apply border-b-border-01;
}
/* ── Content wrapper: grid 0fr↔1fr animation ─────────────────────────── */
.opal-card-expandable-wrapper {

View File

@@ -14,8 +14,6 @@ and border colors.
| `icon` | `IconFunctionComponent` | per variant | Override the default variant icon |
| `title` | `string \| RichStr` | — | Main title text |
| `description` | `string \| RichStr` | — | Description below the title |
| `padding` | `"sm" \| "xs"` | `"sm"` | Padding preset for the outer card |
| `headerPadding` | `PaddingVariants` | `"fit"` | Padding around the header Content area. `"fit"` → no padding; `"sm"``p-2`. |
| `bottomChildren` | `ReactNode` | — | Content below a divider, under the main content |
| `rightChildren` | `ReactNode` | — | Content on the right side. Mutually exclusive with `onClose`. |
| `onClose` | `() => void` | — | Close button callback. When omitted, no close button is rendered. |

View File

@@ -38,9 +38,6 @@ interface MessageCardBaseProps {
/** Padding preset. @default "sm" */
padding?: Extract<PaddingVariants, "sm" | "xs">;
/** Padding around the header Content area. @default "fit" */
headerPadding?: PaddingVariants;
/**
* Content rendered below a divider, under the main content area.
* When provided, a `Divider` is inserted between the `ContentAction` and this node.
@@ -125,7 +122,6 @@ function MessageCard({
title,
description,
padding = "sm",
headerPadding = "fit",
bottomChildren,
rightChildren,
onClose,
@@ -150,22 +146,19 @@ function MessageCard({
<div
className={cn("opal-message-card", paddingVariants[padding])}
data-variant={variant}
data-opal-status-border={variant}
ref={ref}
>
<div className={paddingVariants[headerPadding]}>
<ContentAction
icon={(props) => (
<Icon {...props} className={cn(props.className, iconClass)} />
)}
title={title}
description={description}
sizePreset="main-ui"
variant="section"
padding="fit"
rightChildren={right}
/>
</div>
<ContentAction
icon={(props) => (
<Icon {...props} className={cn(props.className, iconClass)} />
)}
title={title}
description={description}
sizePreset="main-ui"
variant="section"
padding="md"
rightChildren={right}
/>
{bottomChildren && (
<>

View File

@@ -1,26 +1,25 @@
.opal-message-card {
@apply flex flex-col gap-1 w-full self-stretch rounded-16 border;
@apply flex flex-col w-full self-stretch rounded-16 border;
}
/* Variant background colors. Border *color* lives in `cards/shared.css` and
is keyed off the `data-opal-status-border` attribute. */
/* Variant colors */
.opal-message-card[data-variant="default"] {
@apply bg-background-tint-01;
@apply bg-background-tint-01 border-border-01;
}
.opal-message-card[data-variant="info"] {
@apply bg-status-info-00;
@apply bg-status-info-00 border-status-info-02;
}
.opal-message-card[data-variant="success"] {
@apply bg-status-success-00;
@apply bg-status-success-00 border-status-success-02;
}
.opal-message-card[data-variant="warning"] {
@apply bg-status-warning-00;
@apply bg-status-warning-00 border-status-warning-02;
}
.opal-message-card[data-variant="error"] {
@apply bg-status-error-00;
@apply bg-status-error-00 border-status-error-02;
}

View File

@@ -114,7 +114,6 @@ export { default as SvgMoon } from "@opal/icons/moon";
export { default as SvgMoreHorizontal } from "@opal/icons/more-horizontal";
export { default as SvgMusicSmall } from "@opal/icons/music-small";
export { default as SvgNetworkGraph } from "@opal/icons/network-graph";
export { default as SvgNoImage } from "@opal/icons/no-image";
export { default as SvgNotificationBubble } from "@opal/icons/notification-bubble";
export { default as SvgOnyxOctagon } from "@opal/icons/onyx-octagon";
export { default as SvgOrganization } from "@opal/icons/organization";
@@ -149,7 +148,6 @@ export { default as SvgSlack } from "@opal/icons/slack";
export { default as SvgSlash } from "@opal/icons/slash";
export { default as SvgSliders } from "@opal/icons/sliders";
export { default as SvgSlidersSmall } from "@opal/icons/sliders-small";
export { default as SvgSlowTime } from "@opal/icons/slow-time";
export { default as SvgSort } from "@opal/icons/sort";
export { default as SvgSortOrder } from "@opal/icons/sort-order";
export { default as SvgSparkle } from "@opal/icons/sparkle";

View File

@@ -1,20 +0,0 @@
import type { IconProps } from "@opal/types";
const SvgNoImage = ({ size, ...props }: IconProps) => (
<svg
width={size}
height={size}
viewBox="0 0 16 16"
fill="none"
xmlns="http://www.w3.org/2000/svg"
stroke="currentColor"
{...props}
>
<path
d="M11 14L6.06066 9.06072C5.47487 8.47498 4.52513 8.47498 3.93934 9.06072L2 11M11 14L12.5 13.9998C12.9142 13.9998 13.2892 13.832 13.5606 13.5606M11 14L3.5 13.9998C2.67157 13.9998 2 13.3283 2 12.4999V11M2 11V3.49998C2 3.08577 2.16789 2.71078 2.43934 2.43934M1 1L2.43934 2.43934M2.43934 2.43934L13.5606 13.5606M13.5606 13.5606L15 15M10.8033 7.30328C11.1515 7.0286 11.375 6.60288 11.375 6.12494C11.375 5.29653 10.7035 4.62496 9.875 4.62496C9.39706 4.62496 8.97135 4.84847 8.69666 5.19666M14 10.5V3.49998C14 2.67156 13.3285 2 12.5 2H5.5"
strokeWidth={1.5}
strokeLinecap="round"
strokeLinejoin="round"
/>
</svg>
);
export default SvgNoImage;

View File

@@ -1,27 +0,0 @@
import type { IconProps } from "@opal/types";
const SvgSlowTime = ({ size, ...props }: IconProps) => (
<svg
width={size}
height={size}
viewBox="0 0 16 16"
fill="none"
xmlns="http://www.w3.org/2000/svg"
stroke="currentColor"
{...props}
>
<g clipPath="url(#clip0_997_17795)">
<path
d="M8 4.00001V8.00001L11 9.5M13.1404 12.2453C11.9176 13.7243 10.0689 14.6667 7.99999 14.6667C6.70211 14.6667 5.49086 14.2958 4.46643 13.6542M14.4826 9.5624C14.6029 9.06125 14.6667 8.53806 14.6667 7.99999C14.6667 4.83387 12.4596 2.18324 9.5 1.50275M6.5 1.50275C5.76902 1.67082 5.08394 1.95908 4.46668 2.3456M2.34559 4.4667C1.95907 5.08396 1.67082 5.76903 1.50275 6.50001M1.50276 9.50001C1.67083 10.231 1.95909 10.916 2.34561 11.5333"
strokeWidth={1.5}
strokeLinecap="round"
strokeLinejoin="round"
/>
</g>
<defs>
<clipPath id="clip0_997_17795">
<rect width={16} height={16} fill="white" />
</clipPath>
</defs>
</svg>
);
export default SvgSlowTime;

View File

@@ -1,5 +1,5 @@
import type { Meta, StoryObj } from "@storybook/react";
import { Card, Content } from "@opal/layouts";
import { Card } from "@opal/layouts";
import { Button } from "@opal/components";
import {
SvgArrowExchange,
@@ -39,16 +39,12 @@ export const Default: Story = {
render: () => (
<div className="w-[28rem] border rounded-16">
<Card.Header
headerChildren={
<Content
sizePreset="main-ui"
variant="section"
icon={SvgGlobe}
title="Google Search"
description="Web search provider"
/>
}
topRightChildren={
sizePreset="main-ui"
variant="section"
icon={SvgGlobe}
title="Google Search"
description="Web search provider"
rightChildren={
<Button prominence="tertiary" rightIcon={SvgArrowExchange}>
Connect
</Button>
@@ -62,16 +58,12 @@ export const WithBothSlots: Story = {
render: () => (
<div className="w-[28rem] border rounded-16">
<Card.Header
headerChildren={
<Content
sizePreset="main-ui"
variant="section"
icon={SvgGlobe}
title="Google Search"
description="Currently the default provider."
/>
}
topRightChildren={
sizePreset="main-ui"
variant="section"
icon={SvgGlobe}
title="Google Search"
description="Currently the default provider."
rightChildren={
<Button variant="action" prominence="tertiary" icon={SvgCheckSquare}>
Current Default
</Button>
@@ -101,16 +93,12 @@ export const RightChildrenOnly: Story = {
render: () => (
<div className="w-[28rem] border rounded-16">
<Card.Header
headerChildren={
<Content
sizePreset="main-ui"
variant="section"
icon={SvgGlobe}
title="OpenAI"
description="Not configured"
/>
}
topRightChildren={
sizePreset="main-ui"
variant="section"
icon={SvgGlobe}
title="OpenAI"
description="Not configured"
rightChildren={
<Button prominence="tertiary" rightIcon={SvgArrowExchange}>
Connect
</Button>
@@ -124,15 +112,11 @@ export const NoRightChildren: Story = {
render: () => (
<div className="w-[28rem] border rounded-16">
<Card.Header
headerChildren={
<Content
sizePreset="main-ui"
variant="section"
icon={SvgGlobe}
title="Section Header"
description="No actions on the right."
/>
}
sizePreset="main-ui"
variant="section"
icon={SvgGlobe}
title="Section Header"
description="No actions on the right."
/>
</div>
),
@@ -142,16 +126,12 @@ export const LongContent: Story = {
render: () => (
<div className="w-[28rem] border rounded-16">
<Card.Header
headerChildren={
<Content
sizePreset="main-ui"
variant="section"
icon={SvgGlobe}
title="Very Long Provider Name That Should Truncate"
description="This is a much longer description that tests how the layout handles overflow when the content area needs to shrink."
/>
}
topRightChildren={
sizePreset="main-ui"
variant="section"
icon={SvgGlobe}
title="Very Long Provider Name That Should Truncate"
description="This is a much longer description that tests how the layout handles overflow when the content area needs to shrink."
rightChildren={
<Button variant="action" prominence="tertiary" icon={SvgCheckSquare}>
Current Default
</Button>

View File

@@ -6,62 +6,56 @@ A namespace of card layout primitives. Each sub-component handles a specific reg
## Card.Header
A flexible card header with one slot for the main header content, two stacked slots in a right-side column, and a full-width slot below.
A card header layout that pairs a [`Content`](../content/README.md) block with a right-side column and an optional full-width children slot.
### Why Card.Header?
[`ContentAction`](../content-action/README.md) provides a single right-side slot. Card headers typically need more — a primary action on top, secondary actions on the bottom, and sometimes a full-width region beneath the entire row (e.g. expandable details, search bars, secondary info).
`Card.Header` is layout-only — it intentionally doesn't bake in `Content` props. Pass a `<Content />` (or any other element) into `headerChildren` for the icon/title/description region.
[`ContentAction`](../content-action/README.md) provides a single `rightChildren` slot. Card headers typically need two distinct right-side regions — a primary action on top and secondary actions on the bottom. `Card.Header` provides this with `rightChildren` and `bottomRightChildren` slots, plus a `children` slot for full-width content below the header row (e.g., search bars, expandable tool lists).
### Props
Inherits **all** props from [`Content`](../content/README.md) (icon, title, description, sizePreset, variant, editable, onTitleChange, suffix, etc.) plus:
| Prop | Type | Default | Description |
|---|---|---|---|
| `headerChildren` | `ReactNode` | `undefined` | Content rendered in the top-left header slot — typically a `<Content />` block. |
| `headerPadding` | `"sm" \| "fit"` | `"fit"` | Padding applied around `headerChildren`. `"sm"``p-2`; `"fit"``p-0`. |
| `topRightChildren` | `ReactNode` | `undefined` | Content rendered to the right of `headerChildren` (top of right column). |
| `bottomRightChildren` | `ReactNode` | `undefined` | Content rendered below `topRightChildren` in the same column. Laid out as `flex flex-row`. |
| `bottomChildren` | `ReactNode` | `undefined` | Content rendered below the entire header (left + right columns), spanning the full width. |
| `rightChildren` | `ReactNode` | `undefined` | Content rendered to the right of the Content block (top of right column). |
| `bottomRightChildren` | `ReactNode` | `undefined` | Content rendered below `rightChildren` in the same column. Laid out as `flex flex-row`. |
| `children` | `ReactNode` | `undefined` | Content rendered below the full header row, spanning the entire width. |
### Layout Structure
```
+------------------+----------------+
| headerChildren | topRight |
+ +----------------+
| | bottomRight |
+------------------+----------------+
| bottomChildren (full width) |
+-----------------------------------+
+---------------------------------------------------------+
| [Content (p-2, self-start)] [rightChildren] |
| icon + title + description [bottomRightChildren] |
+---------------------------------------------------------+
| [children — full width] |
+---------------------------------------------------------+
```
- Outer wrapper: `flex flex-col w-full`
- Header row: `flex flex-row items-start w-full` — columns are independent in height
- Left column (headerChildren wrapper): `self-start grow min-w-0` + `headerPadding` variant (default `p-0`)grows to fill available space
- Right column: `flex flex-col items-end shrink-0`shrinks to fit its content
- `bottomChildren` wrapper: `w-full` — only rendered when provided
- Header row: `flex flex-row items-stretch w-full`
- Content area: `flex-1 min-w-0 self-start p-2`top-aligned with fixed padding
- Right column: `flex flex-col items-end shrink-0`no padding, no gap
- `bottomRightChildren` wrapper: `flex flex-row` — lays children out horizontally
- `children` wrapper: `w-full` — only rendered when children are provided
### Usage
#### Card with primary and secondary actions
```tsx
import { Card, Content } from "@opal/layouts";
import { Card } from "@opal/layouts";
import { Button } from "@opal/components";
import { SvgGlobe, SvgSettings, SvgUnplug, SvgCheckSquare } from "@opal/icons";
<Card.Header
headerChildren={
<Content
icon={SvgGlobe}
title="Google Search"
description="Web search provider"
sizePreset="main-ui"
variant="section"
/>
}
topRightChildren={
icon={SvgGlobe}
title="Google Search"
description="Web search provider"
sizePreset="main-ui"
variant="section"
rightChildren={
<Button icon={SvgCheckSquare} variant="action" prominence="tertiary">
Current Default
</Button>
@@ -79,16 +73,12 @@ import { SvgGlobe, SvgSettings, SvgUnplug, SvgCheckSquare } from "@opal/icons";
```tsx
<Card.Header
headerChildren={
<Content
icon={SvgCloud}
title="OpenAI"
description="Not configured"
sizePreset="main-ui"
variant="section"
/>
}
topRightChildren={
icon={SvgCloud}
title="OpenAI"
description="Not configured"
sizePreset="main-ui"
variant="section"
rightChildren={
<Button rightIcon={SvgArrowExchange} prominence="tertiary">
Connect
</Button>
@@ -96,36 +86,31 @@ import { SvgGlobe, SvgSettings, SvgUnplug, SvgCheckSquare } from "@opal/icons";
/>
```
#### Card with extra info beneath the header
#### Card with expandable children
```tsx
<Card.Header
headerChildren={
<Content
icon={SvgServer}
title="MCP Server"
description="12 tools available"
sizePreset="main-ui"
variant="section"
/>
}
topRightChildren={<Button icon={SvgSettings} prominence="tertiary" />}
bottomChildren={<SearchBar placeholder="Search tools..." />}
/>
icon={SvgServer}
title="MCP Server"
description="12 tools available"
sizePreset="main-ui"
variant="section"
rightChildren={<Button icon={SvgSettings} prominence="tertiary" />}
>
<SearchBar placeholder="Search tools..." />
</Card.Header>
```
#### No slots
#### No right children
```tsx
<Card.Header
headerChildren={
<Content
icon={SvgInfo}
title="Section Header"
description="Description text"
sizePreset="main-content"
variant="section"
/>
}
icon={SvgInfo}
title="Section Header"
description="Description text"
sizePreset="main-content"
variant="section"
/>
```
When both `rightChildren` and `bottomRightChildren` are omitted and no `children` are provided, the component renders only the padded `Content`.

View File

@@ -2,17 +2,10 @@
// Types
// ---------------------------------------------------------------------------
import { paddingVariants } from "@opal/shared";
import type { PaddingVariants } from "@opal/types";
import { cn } from "@opal/utils";
interface CardHeaderProps {
/** Content rendered in the top-left header slot — typically a {@link Content} block. */
headerChildren?: React.ReactNode;
/** Padding applied around `headerChildren`. @default "fit" */
headerPadding?: Extract<PaddingVariants, "sm" | "fit">;
/** Content rendered to the right of `headerChildren` (top of right column). */
topRightChildren?: React.ReactNode;
@@ -75,7 +68,6 @@ interface CardHeaderProps {
*/
function Header({
headerChildren,
headerPadding = "fit",
topRightChildren,
bottomRightChildren,
bottomChildren,
@@ -86,14 +78,7 @@ function Header({
<div className="flex flex-col w-full">
<div className="flex flex-row items-start w-full">
{headerChildren != null && (
<div
className={cn(
"self-start grow min-w-0",
paddingVariants[headerPadding]
)}
>
{headerChildren}
</div>
<div className="self-start p-2 grow min-w-0">{headerChildren}</div>
)}
{hasRight && (
<div className="flex flex-col items-end shrink-0">

View File

@@ -1,19 +0,0 @@
/**
* @opal/root — Root-level design tokens for the opal design system.
*
* Import this file once at the top of the component tree (or from shared.ts)
* to make all opal CSS custom properties available globally.
*
* App-level tokens (container widths, page widths, etc.) live in the app's
* own CSS — this file is strictly for library-owned tokens.
*/
:root {
/* ── Line heights / container sizes ──────────────────────────────────── */
--opal-line-height-lg: 2.25rem;
--opal-line-height-md: 1.75rem;
--opal-line-height-sm: 1.5rem;
--opal-line-height-xs: 1.25rem;
--opal-line-height-2xs: 1rem;
}

View File

@@ -6,8 +6,6 @@
* circular imports and gives every consumer a single source of truth.
*/
import "@opal/root.css";
import type {
SizeVariants,
OverridableExtremaSizeVariants,
@@ -23,16 +21,14 @@ import type {
* Each entry maps a named preset to Tailwind utility classes for
* `height`, `min-width`, and `padding`.
*
* Heights are driven by CSS custom properties defined in `@opal/root.css`.
*
* | Key | Height | Padding |
* |-------|-----------------------------|----------|
* | `lg` | `--opal-line-height-lg` | `p-2` |
* | `md` | `--opal-line-height-md` | `p-1` |
* | `sm` | `--opal-line-height-sm` | `p-1` |
* | `xs` | `--opal-line-height-xs` | `p-0.5` |
* | `2xs` | `--opal-line-height-2xs` | `p-0.5` |
* | `fit` | `h-fit` | `p-0` |
* | Key | Height | Padding |
* |-------|---------------|----------|
* | `lg` | 2.25rem (36px)| `p-2` |
* | `md` | 1.75rem (28px)| `p-1` |
* | `sm` | 1.5rem (24px) | `p-1` |
* | `xs` | 1.25rem (20px)| `p-0.5` |
* | `2xs` | 1rem (16px) | `p-0.5` |
* | `fit` | h-fit | `p-0` |
*/
type ContainerProperties = {
height: string;
@@ -44,31 +40,15 @@ const containerSizeVariants: Record<
ContainerProperties
> = {
fit: { height: "h-fit", minWidth: "", padding: "p-0" },
lg: {
height: "h-[var(--opal-line-height-lg)]",
minWidth: "min-w-[var(--opal-line-height-lg)]",
padding: "p-2",
},
md: {
height: "h-[var(--opal-line-height-md)]",
minWidth: "min-w-[var(--opal-line-height-md)]",
padding: "p-1",
},
sm: {
height: "h-[var(--opal-line-height-sm)]",
minWidth: "min-w-[var(--opal-line-height-sm)]",
padding: "p-1",
},
lg: { height: "h-[2.25rem]", minWidth: "min-w-[2.25rem]", padding: "p-2" },
md: { height: "h-[1.75rem]", minWidth: "min-w-[1.75rem]", padding: "p-1" },
sm: { height: "h-[1.5rem]", minWidth: "min-w-[1.5rem]", padding: "p-1" },
xs: {
height: "h-[var(--opal-line-height-xs)]",
minWidth: "min-w-[var(--opal-line-height-xs)]",
padding: "p-0.5",
},
"2xs": {
height: "h-[var(--opal-line-height-2xs)]",
minWidth: "min-w-[var(--opal-line-height-2xs)]",
height: "h-[1.25rem]",
minWidth: "min-w-[1.25rem]",
padding: "p-0.5",
},
"2xs": { height: "h-[1rem]", minWidth: "min-w-[1rem]", padding: "p-0.5" },
} as const;
// ---------------------------------------------------------------------------

22
web/package-lock.json generated
View File

@@ -5765,9 +5765,9 @@
}
},
"node_modules/@sentry/node/node_modules/brace-expansion": {
"version": "5.0.5",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz",
"integrity": "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==",
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.3.tgz",
"integrity": "sha512-fy6KJm2RawA5RcHkLa1z/ScpBeA762UF9KmZQxwIbDtRJrgLzM10depAiEQ+CXYcoiqW1/m96OAAoke2nE9EeA==",
"license": "MIT",
"dependencies": {
"balanced-match": "^4.0.2"
@@ -7196,9 +7196,9 @@
}
},
"node_modules/@typescript-eslint/typescript-estree/node_modules/brace-expansion": {
"version": "5.0.5",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz",
"integrity": "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==",
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.3.tgz",
"integrity": "sha512-fy6KJm2RawA5RcHkLa1z/ScpBeA762UF9KmZQxwIbDtRJrgLzM10depAiEQ+CXYcoiqW1/m96OAAoke2nE9EeA==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -8401,9 +8401,7 @@
}
},
"node_modules/brace-expansion": {
"version": "1.1.14",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.14.tgz",
"integrity": "sha512-MWPGfDxnyzKU7rNOW9SP/c50vi3xrmrua/+6hfPbCS2ABNWfx24vPidzvC7krjU/RTo235sV776ymlsMtGKj8g==",
"version": "1.1.12",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -10683,9 +10681,9 @@
}
},
"node_modules/glob/node_modules/brace-expansion": {
"version": "5.0.5",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz",
"integrity": "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==",
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.3.tgz",
"integrity": "sha512-fy6KJm2RawA5RcHkLa1z/ScpBeA762UF9KmZQxwIbDtRJrgLzM10depAiEQ+CXYcoiqW1/m96OAAoke2nE9EeA==",
"license": "MIT",
"dependencies": {
"balanced-match": "^4.0.2"

View File

@@ -27,12 +27,12 @@ import {
connectorConfigs,
createConnectorInitialValues,
createConnectorValidationSchema,
defaultPruneFreqHours,
defaultRefreshFreqMinutes,
isLoadState,
Connector,
ConnectorBase,
} from "@/lib/connectors/connectors";
import { useSettings } from "@/hooks/useSettings";
import Modal from "@/refresh-components/Modal";
import { GmailMain } from "@/app/admin/connectors/[connector]/pages/gmail/GmailPage";
import {
@@ -149,10 +149,6 @@ export default function AddConnector({
}, []);
const router = useRouter();
const { settings } = useSettings();
const defaultPruneFreqHours = settings.default_pruning_freq
? settings.default_pruning_freq / 3600
: 600; // 25 days fallback until settings load
// State for managing credentials and files
const [currentCredential, setCurrentCredential] =
@@ -666,7 +662,7 @@ export default function AddConnector({
{formStep === 2 && (
<CardSection>
<AdvancedFormPage defaultPruneFreqHours={defaultPruneFreqHours} />
<AdvancedFormPage />
</CardSection>
)}

View File

@@ -3,13 +3,7 @@ import NumberInput from "./ConnectorInput/NumberInput";
import { TextFormField } from "@/components/Field";
import { Button } from "@opal/components";
import { SvgTrash } from "@opal/icons";
interface AdvancedFormPageProps {
defaultPruneFreqHours?: number;
}
export default function AdvancedFormPage({
defaultPruneFreqHours = 600,
}: AdvancedFormPageProps) {
export default function AdvancedFormPage() {
return (
<div className="py-4 flex flex-col gap-y-6 rounded-lg max-w-2xl mx-auto">
<h2 className="text-2xl font-bold mb-4 text-text-800">
@@ -20,9 +14,7 @@ export default function AdvancedFormPage({
description={`
Checks all documents against the source to delete those that no longer exist.
Note: This process checks every document, so be cautious when increasing frequency.
Default is ${defaultPruneFreqHours} hours (${Math.round(
defaultPruneFreqHours / 24
)} days). Decimal hours are supported (e.g., 0.1 hours = 6 minutes).
Default is 720 hours (30 days). Decimal hours are supported (e.g., 0.1 hours = 6 minutes).
Enter 0 to disable pruning for this connector.
`}
label="Prune Frequency (hours)"

View File

@@ -44,6 +44,7 @@ export interface AdvancedSearchConfiguration {
enable_contextual_rag: boolean;
contextual_rag_llm_name: string | null;
contextual_rag_llm_provider: string | null;
multilingual_expansion: string[];
disable_rerank_for_streaming: boolean;
api_url: string | null;
num_rerank: number;

View File

@@ -101,6 +101,7 @@ const AdvancedEmbeddingFormPage = forwardRef<
contextual_rag_llm: getCurrentLLMValue,
}}
validationSchema={Yup.object().shape({
multilingual_expansion: Yup.array().of(Yup.string()),
multipass_indexing: Yup.boolean(),
enable_contextual_rag: Yup.boolean(),
contextual_rag_llm: Yup.string()
@@ -168,6 +169,7 @@ const AdvancedEmbeddingFormPage = forwardRef<
// Manually validate against the schema
Yup.object()
.shape({
multilingual_expansion: Yup.array().of(Yup.string()),
multipass_indexing: Yup.boolean(),
enable_contextual_rag: Yup.boolean(),
contextual_rag_llm: Yup.string()

View File

@@ -53,6 +53,7 @@ export default function EmbeddingForm() {
enable_contextual_rag: false,
contextual_rag_llm_name: null,
contextual_rag_llm_provider: null,
multilingual_expansion: [],
disable_rerank_for_streaming: false,
api_url: null,
num_rerank: 0,
@@ -143,6 +144,7 @@ export default function EmbeddingForm() {
enable_contextual_rag: searchSettings.enable_contextual_rag,
contextual_rag_llm_name: searchSettings.contextual_rag_llm_name,
contextual_rag_llm_provider: searchSettings.contextual_rag_llm_provider,
multilingual_expansion: searchSettings.multilingual_expansion,
disable_rerank_for_streaming:
searchSettings.disable_rerank_for_streaming,
num_rerank: searchSettings.num_rerank,

View File

@@ -130,12 +130,7 @@ export default function EmailPasswordForm({
}
}
const loginCaptchaToken = await getCaptchaToken("login");
const loginResponse = await basicLogin(
email,
values.password,
loginCaptchaToken
);
const loginResponse = await basicLogin(email, values.password);
if (loginResponse.ok) {
setApiStatus("success");
if (isSignup && shouldVerify) {

View File

@@ -23,7 +23,7 @@ export function SearchDateRangeSelector({
<TimeRangeSelector
value={value}
className={cn(
"border border-border bg-background rounded-lg flex flex-col w-64 max-h-96 overflow-y-auto overscroll-contain",
"border border-border bg-background rounded-lg flex flex-col w-64 max-h-96 overflow-y-auto flex overscroll-contain",
className
)}
timeRangeValues={timeRangeValues}

View File

@@ -74,7 +74,6 @@ export interface Settings {
max_allowed_upload_size_mb?: number;
// Factory defaults for the restore button.
default_pruning_freq?: number;
default_user_file_max_upload_size_mb?: number;
default_file_token_count_threshold_k?: number;
}

View File

@@ -1873,6 +1873,8 @@ export function createConnectorValidationSchema(
return object;
}
export const defaultPruneFreqHours = 720; // 30 days in hours
export const defaultRefreshFreqMinutes = 30; // 30 minutes
// CONNECTORS

View File

@@ -183,11 +183,6 @@ export async function upsertMCPServer(serverData: {
api_token?: string;
oauth_client_id?: string;
oauth_client_secret?: string;
// Mirrors the LLM-provider `api_key_changed` pattern: explicitly signal
// whether the OAuth credential fields were edited so the backend doesn't
// overwrite stored values with masked placeholders on resubmit.
oauth_client_id_changed?: boolean;
oauth_client_secret_changed?: boolean;
auth_template?: any;
admin_credentials?: Record<string, string>;
existing_server_id?: number;

View File

@@ -25,25 +25,19 @@ export const logout = async (): Promise<Response> => {
export const basicLogin = async (
email: string,
password: string,
captchaToken?: string
password: string
): Promise<Response> => {
const params = new URLSearchParams([
["username", email],
["password", password],
]);
const headers: Record<string, string> = {
"Content-Type": "application/x-www-form-urlencoded",
};
if (captchaToken) {
headers["X-Captcha-Token"] = captchaToken;
}
const response = await fetch("/api/auth/login", {
method: "POST",
credentials: "include",
headers,
headers: {
"Content-Type": "application/x-www-form-urlencoded",
},
body: params,
});
return response;

View File

@@ -149,20 +149,3 @@ export const LoadingTab: Story = {
</Tabs>
),
};
// ---------------------------------------------------------------------------
// Underline variant
// ---------------------------------------------------------------------------
export const Underline: Story = {
render: () => (
<Tabs defaultValue="cloud">
<Tabs.List variant="underline">
<Tabs.Trigger value="cloud">Cloud-based</Tabs.Trigger>
<Tabs.Trigger value="self">Self-hosted</Tabs.Trigger>
</Tabs.List>
<Tabs.Content value="cloud">Cloud-based models</Tabs.Content>
<Tabs.Content value="self">Self-hosted models</Tabs.Content>
</Tabs>
),
};

View File

@@ -8,19 +8,21 @@ import React, {
useCallback,
} from "react";
import * as TabsPrimitive from "@radix-ui/react-tabs";
import { mergeRefs } from "@/lib/utils";
import { cn } from "@opal/utils";
import { cn, mergeRefs } from "@/lib/utils";
import { Tooltip } from "@opal/components";
import { WithoutStyles } from "@/types";
import { Section, SectionProps } from "@/layouts/general-layouts";
import { IconProps, WithoutStyles } from "@opal/types";
import { IconProps } from "@opal/types";
import { SvgChevronLeft, SvgChevronRight } from "@opal/icons";
import { Tooltip, Button, Text } from "@opal/components";
import Text from "./texts/Text";
import { Button } from "@opal/components";
/* =============================================================================
CONTEXT
============================================================================= */
interface TabsContextValue {
variant: "contained" | "pill" | "underline";
variant: "contained" | "pill";
}
const TabsContext = React.createContext<TabsContextValue | undefined>(
@@ -67,29 +69,21 @@ const useTabsContext = () => {
============================================================================= */
/** Style classes for TabsList variants */
const PILL_LIST =
"relative flex w-full items-center pb-[5px] bg-background-tint-00 overflow-hidden";
const listVariants = {
contained: "grid w-full rounded-08 bg-background-tint-03",
pill: PILL_LIST,
underline: PILL_LIST,
pill: "relative flex w-full items-center pb-[5px] bg-background-tint-00 overflow-hidden",
} as const;
/** Base style classes for TabsTrigger variants */
const PILL_TRIGGER =
"p-1 font-secondary-action transition-all duration-200 ease-out";
const triggerBaseStyles = {
contained: "p-2 gap-2",
pill: PILL_TRIGGER,
underline: PILL_TRIGGER,
pill: "p-1 font-secondary-action transition-all duration-200 ease-out",
} as const;
/** Icon style classes for TabsTrigger variants */
const PILL_ICON = "stroke-current";
const iconVariants = {
contained: "stroke-text-03",
pill: PILL_ICON,
underline: PILL_ICON,
pill: "stroke-current",
} as const;
/* =============================================================================
@@ -303,20 +297,16 @@ function useHorizontalScroll(
function PillIndicator({
style,
rightOffset = 0,
hideBaseLine = false,
}: {
style: IndicatorStyle;
rightOffset?: number;
hideBaseLine?: boolean;
}) {
return (
<>
{!hideBaseLine && (
<div
className="absolute bottom-0 left-0 h-px bg-border-02 pointer-events-none"
style={{ right: rightOffset }}
/>
)}
<div
className="absolute bottom-0 left-0 h-px bg-border-02 pointer-events-none"
style={{ right: rightOffset }}
/>
<div
className="absolute bottom-0 h-[2px] bg-background-tint-inverted-03 z-10 pointer-events-none transition-all duration-200 ease-out"
style={{
@@ -370,7 +360,7 @@ interface TabsListProps
* - `pill`: Transparent background with a sliding underline indicator.
* Best for secondary navigation or filter-style tabs with flexible widths.
*/
variant?: "contained" | "pill" | "underline";
variant?: "contained" | "pill";
/**
* Content to render on the right side of the tab list.
@@ -425,7 +415,7 @@ const TabsList = React.forwardRef<
const scrollArrowsRef = useRef<HTMLDivElement>(null);
const rightContentRef = useRef<HTMLDivElement>(null);
const [rightOffset, setRightOffset] = useState(0);
const isPill = variant === "pill" || variant === "underline";
const isPill = variant === "pill";
const { style: indicatorStyle } = usePillIndicator(
listRef,
isPill,
@@ -539,11 +529,7 @@ const TabsList = React.forwardRef<
)}
{isPill && (
<PillIndicator
style={indicatorStyle}
rightOffset={rightOffset}
hideBaseLine={variant === "underline"}
/>
<PillIndicator style={indicatorStyle} rightOffset={rightOffset} />
)}
</TabsContext.Provider>
</TabsPrimitive.List>
@@ -572,7 +558,7 @@ interface TabsTriggerProps
* - `contained` (default): White background with shadow when active
* - `pill`: Dark pill background when active, transparent when inactive
*/
variant?: "contained" | "pill" | "underline";
variant?: "contained" | "pill";
/** Optional tooltip text to display on hover */
tooltip?: string;
@@ -631,7 +617,7 @@ const TabsTrigger = React.forwardRef<
)}
{typeof children === "string" ? (
<div className="px-0.5">
<Text color="inherit">{children}</Text>
<Text>{children}</Text>
</div>
) : (
children
@@ -663,7 +649,6 @@ const TabsTrigger = React.forwardRef<
"data-[state=active]:bg-background-tint-inverted-03",
"data-[state=active]:text-text-inverted-05",
],
variant === "underline" && ["data-[state=active]:text-text-05"],
variant === "contained" && [
"data-[state=inactive]:text-text-03",
"data-[state=inactive]:bg-transparent",
@@ -673,8 +658,7 @@ const TabsTrigger = React.forwardRef<
variant === "pill" && [
"data-[state=inactive]:bg-background-tint-00",
"data-[state=inactive]:text-text-03",
],
variant === "underline" && ["data-[state=inactive]:text-text-03"]
]
)}
{...props}
>
@@ -721,14 +705,11 @@ TabsTrigger.displayName = TabsPrimitive.Trigger.displayName;
const TabsContent = React.forwardRef<
React.ElementRef<typeof TabsPrimitive.Content>,
SectionProps & { value: string }
>(({ children, value, className, ...props }, ref) => (
>(({ children, value, ...props }, ref) => (
<TabsPrimitive.Content
ref={ref}
value={value}
className={cn(
"pt-4 focus:outline-none focus:border-theme-primary-05 w-full",
className
)}
className="pt-4 focus:outline-none focus:border-theme-primary-05 w-full"
>
<Section padding={0} {...props}>
{children}

View File

@@ -105,16 +105,6 @@ export const WithRightChildren: Story = {
},
};
export const WithWrappedDescription: Story = {
args: {
icon: SvgSettings,
children: "Re-index All Connectors",
description:
"Safest option. Continue using the current document index with existing settings until all connectors have completed a successful index attempt.",
wrapDescription: true,
},
};
export const MenuExample: Story = {
render: () => (
<div style={{ display: "flex", flexDirection: "column", gap: 2 }}>

View File

@@ -1,7 +1,6 @@
import React from "react";
import { cn } from "@/lib/utils";
import type { IconProps } from "@opal/types";
import Text from "@/refresh-components/texts/Text";
import Truncated from "@/refresh-components/texts/Truncated";
import Link from "next/link";
import type { Route } from "next";
@@ -85,8 +84,6 @@ export interface LineItemProps
icon?: React.FunctionComponent<IconProps>;
strokeIcon?: boolean;
description?: string;
/** When true, the description text wraps instead of truncating. @default false */
wrapDescription?: boolean;
rightChildren?: React.ReactNode;
href?: string;
rel?: string;
@@ -160,7 +157,6 @@ export default function LineItem({
icon: Icon,
strokeIcon = true,
description,
wrapDescription,
children,
rightChildren,
href,
@@ -275,28 +271,17 @@ export default function LineItem({
</Section>
)}
</Section>
{description &&
(wrapDescription ? (
<Text as="p" secondaryBody text03 className="text-left w-full">
{description}
</Text>
) : (
<Truncated secondaryBody text03 className="text-left w-full">
{description}
</Truncated>
))}
</>
) : description ? (
<Section flexDirection="row" gap={0.5}>
{wrapDescription ? (
<Text as="p" secondaryBody text03 className="text-left w-full">
{description}
</Text>
) : (
{description && (
<Truncated secondaryBody text03 className="text-left w-full">
{description}
</Truncated>
)}
</>
) : description ? (
<Section flexDirection="row" gap={0.5}>
<Truncated secondaryBody text03 className="text-left w-full">
{description}
</Truncated>
{rightChildren && (
<Section alignItems="end" width="fit">
{rightChildren}

View File

@@ -279,7 +279,7 @@ function OpenApiToolCard({ tool }: OpenApiToolCardProps) {
const toolFieldName = `openapi_tool_${tool.id}`;
return (
<Card border="solid" rounding="lg" padding="md">
<Card border="solid" rounding="lg" padding="sm">
<CardLayout.Header
headerChildren={
<ContentAction
@@ -337,14 +337,14 @@ function MCPServerCard({
);
} else if (hasTools) {
cardContent = (
<GeneralLayouts.Section gap={0.5} padding={0.5}>
<div className="flex flex-col gap-2 p-2">
{filteredTools.map((tool) => {
const toolDisabled =
!tool.isAvailable ||
!getFieldMeta<boolean>(`${serverFieldName}.enabled`).value;
return (
<Disabled key={tool.id} disabled={toolDisabled}>
<Card border="solid" rounding="md" padding="sm">
<Card border="solid" rounding="lg" padding="sm">
<CardLayout.Header
headerChildren={
<ContentAction
@@ -367,7 +367,7 @@ function MCPServerCard({
</Disabled>
);
})}
</GeneralLayouts.Section>
</div>
);
}
@@ -416,7 +416,6 @@ function MCPServerCard({
}
/>
}
headerPadding="sm"
bottomChildren={
<GeneralLayouts.Section flexDirection="row" gap={0.5}>
<InputTypeIn

View File

@@ -117,9 +117,9 @@ function MCPServerCard({
padding="sm"
expandedContent={
hasContent ? (
<Section gap={0.5} padding={0.5}>
<div className="flex flex-col gap-2 p-2">
{filteredTools.map((tool) => (
<Card key={tool.id} border="solid" rounding="md" padding="sm">
<Card key={tool.id} border="solid" rounding="lg" padding="sm">
<CardLayout.Header
headerChildren={
<Content
@@ -144,12 +144,11 @@ function MCPServerCard({
/>
</Card>
))}
</Section>
</div>
) : undefined
}
>
<CardLayout.Header
headerPadding="sm"
headerChildren={
<ContentAction
icon={getActionIcon(server.server_url, server.name)}
@@ -892,7 +891,7 @@ export default function ChatPreferencesPage() {
key={tool.id}
border="solid"
rounding="lg"
padding="md"
padding="sm"
>
<CardLayout.Header
headerChildren={

Some files were not shown because too many files have changed in this diff Show More