mirror of
https://github.com/onyx-dot-app/onyx.git
synced 2026-03-02 14:15:44 +00:00
Compare commits
10 Commits
content-re
...
feat/remov
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
654e311aa3 | ||
|
|
3e9fd9c2b9 | ||
|
|
fa2afb00c2 | ||
|
|
6fcf822ddc | ||
|
|
6c687722b1 | ||
|
|
4b6fda0e0c | ||
|
|
947b11899d | ||
|
|
4912dd749f | ||
|
|
b553828d4d | ||
|
|
033aeeed49 |
@@ -114,10 +114,8 @@ jobs:
|
||||
|
||||
- name: Mark workflow as failed if cherry-pick failed
|
||||
if: steps.gate.outputs.should_cherrypick == 'true' && steps.run_cherry_pick.outputs.status == 'failure'
|
||||
env:
|
||||
CHERRY_PICK_REASON: ${{ steps.run_cherry_pick.outputs.reason }}
|
||||
run: |
|
||||
echo "::error::Automated cherry-pick failed (${CHERRY_PICK_REASON})."
|
||||
echo "::error::Automated cherry-pick failed (${{ steps.run_cherry_pick.outputs.reason }})."
|
||||
exit 1
|
||||
|
||||
notify-slack-on-cherry-pick-failure:
|
||||
|
||||
2
.github/workflows/pr-playwright-tests.yml
vendored
2
.github/workflows/pr-playwright-tests.yml
vendored
@@ -603,7 +603,7 @@ jobs:
|
||||
pull-requests: write
|
||||
steps:
|
||||
- name: Download visual diff summaries
|
||||
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131
|
||||
uses: actions/download-artifact@95815c38cf2ff2164869cbab79da8d1f422bc89e # ratchet:actions/download-artifact@v4
|
||||
with:
|
||||
pattern: screenshot-diff-summary-*
|
||||
path: summaries/
|
||||
|
||||
@@ -1,69 +0,0 @@
|
||||
"""add python tool on default
|
||||
|
||||
Revision ID: 57122d037335
|
||||
Revises: c0c937d5c9e5
|
||||
Create Date: 2026-02-27 10:10:40.124925
|
||||
|
||||
"""
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "57122d037335"
|
||||
down_revision = "c0c937d5c9e5"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
PYTHON_TOOL_NAME = "python"
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
|
||||
# Look up the PythonTool id
|
||||
result = conn.execute(
|
||||
sa.text("SELECT id FROM tool WHERE name = :name"),
|
||||
{"name": PYTHON_TOOL_NAME},
|
||||
).fetchone()
|
||||
|
||||
if not result:
|
||||
return
|
||||
|
||||
tool_id = result[0]
|
||||
|
||||
# Attach to the default persona (id=0) if not already attached
|
||||
conn.execute(
|
||||
sa.text(
|
||||
"""
|
||||
INSERT INTO persona__tool (persona_id, tool_id)
|
||||
VALUES (0, :tool_id)
|
||||
ON CONFLICT DO NOTHING
|
||||
"""
|
||||
),
|
||||
{"tool_id": tool_id},
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
|
||||
result = conn.execute(
|
||||
sa.text("SELECT id FROM tool WHERE name = :name"),
|
||||
{"name": PYTHON_TOOL_NAME},
|
||||
).fetchone()
|
||||
|
||||
if not result:
|
||||
return
|
||||
|
||||
conn.execute(
|
||||
sa.text(
|
||||
"""
|
||||
DELETE FROM persona__tool
|
||||
WHERE persona_id = 0 AND tool_id = :tool_id
|
||||
"""
|
||||
),
|
||||
{"tool_id": result[0]},
|
||||
)
|
||||
@@ -20,7 +20,6 @@ from ee.onyx.server.enterprise_settings.store import (
|
||||
from ee.onyx.server.enterprise_settings.store import upload_logo
|
||||
from onyx.context.search.enums import RecencyBiasSetting
|
||||
from onyx.db.engine.sql_engine import get_session_with_current_tenant
|
||||
from onyx.db.llm import fetch_existing_llm_provider
|
||||
from onyx.db.llm import update_default_provider
|
||||
from onyx.db.llm import upsert_llm_provider
|
||||
from onyx.db.models import Tool
|
||||
@@ -118,38 +117,15 @@ def _seed_custom_tools(db_session: Session, tools: List[CustomToolSeed]) -> None
|
||||
def _seed_llms(
|
||||
db_session: Session, llm_upsert_requests: list[LLMProviderUpsertRequest]
|
||||
) -> None:
|
||||
if not llm_upsert_requests:
|
||||
return
|
||||
|
||||
logger.notice("Seeding LLMs")
|
||||
for request in llm_upsert_requests:
|
||||
existing = fetch_existing_llm_provider(name=request.name, db_session=db_session)
|
||||
if existing:
|
||||
request.id = existing.id
|
||||
seeded_providers = [
|
||||
upsert_llm_provider(llm_upsert_request, db_session)
|
||||
for llm_upsert_request in llm_upsert_requests
|
||||
]
|
||||
|
||||
default_provider = next(
|
||||
(p for p in seeded_providers if p.model_configurations), None
|
||||
)
|
||||
if not default_provider:
|
||||
return
|
||||
|
||||
visible_configs = [
|
||||
mc for mc in default_provider.model_configurations if mc.is_visible
|
||||
]
|
||||
default_config = (
|
||||
visible_configs[0]
|
||||
if visible_configs
|
||||
else default_provider.model_configurations[0]
|
||||
)
|
||||
update_default_provider(
|
||||
provider_id=default_provider.id,
|
||||
model_name=default_config.name,
|
||||
db_session=db_session,
|
||||
)
|
||||
if llm_upsert_requests:
|
||||
logger.notice("Seeding LLMs")
|
||||
seeded_providers = [
|
||||
upsert_llm_provider(llm_upsert_request, db_session)
|
||||
for llm_upsert_request in llm_upsert_requests
|
||||
]
|
||||
update_default_provider(
|
||||
provider_id=seeded_providers[0].id, db_session=db_session
|
||||
)
|
||||
|
||||
|
||||
def _seed_personas(db_session: Session, personas: list[PersonaUpsertRequest]) -> None:
|
||||
|
||||
@@ -109,12 +109,6 @@ def apply_license_status_to_settings(settings: Settings) -> Settings:
|
||||
if metadata.status == _BLOCKING_STATUS:
|
||||
settings.application_status = metadata.status
|
||||
settings.ee_features_enabled = False
|
||||
elif metadata.used_seats > metadata.seats:
|
||||
# License is valid but seat limit exceeded
|
||||
settings.application_status = ApplicationStatus.SEAT_LIMIT_EXCEEDED
|
||||
settings.seat_count = metadata.seats
|
||||
settings.used_seats = metadata.used_seats
|
||||
settings.ee_features_enabled = True
|
||||
else:
|
||||
# Has a valid license (GRACE_PERIOD/PAYMENT_REMINDER still allow EE features)
|
||||
settings.ee_features_enabled = True
|
||||
|
||||
@@ -33,7 +33,6 @@ from onyx.configs.constants import MilestoneRecordType
|
||||
from onyx.db.engine.sql_engine import get_session_with_shared_schema
|
||||
from onyx.db.engine.sql_engine import get_session_with_tenant
|
||||
from onyx.db.image_generation import create_default_image_gen_config_from_api_key
|
||||
from onyx.db.llm import fetch_existing_llm_provider
|
||||
from onyx.db.llm import update_default_provider
|
||||
from onyx.db.llm import upsert_cloud_embedding_provider
|
||||
from onyx.db.llm import upsert_llm_provider
|
||||
@@ -303,17 +302,12 @@ def configure_default_api_keys(db_session: Session) -> None:
|
||||
|
||||
has_set_default_provider = False
|
||||
|
||||
def _upsert(request: LLMProviderUpsertRequest, default_model: str) -> None:
|
||||
def _upsert(request: LLMProviderUpsertRequest) -> None:
|
||||
nonlocal has_set_default_provider
|
||||
try:
|
||||
existing = fetch_existing_llm_provider(
|
||||
name=request.name, db_session=db_session
|
||||
)
|
||||
if existing:
|
||||
request.id = existing.id
|
||||
provider = upsert_llm_provider(request, db_session)
|
||||
if not has_set_default_provider:
|
||||
update_default_provider(provider.id, default_model, db_session)
|
||||
update_default_provider(provider.id, db_session)
|
||||
has_set_default_provider = True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to configure {request.provider} provider: {e}")
|
||||
@@ -331,13 +325,14 @@ def configure_default_api_keys(db_session: Session) -> None:
|
||||
name="OpenAI",
|
||||
provider=OPENAI_PROVIDER_NAME,
|
||||
api_key=OPENAI_DEFAULT_API_KEY,
|
||||
default_model_name=default_model_name,
|
||||
model_configurations=_build_model_configuration_upsert_requests(
|
||||
OPENAI_PROVIDER_NAME, recommendations
|
||||
),
|
||||
api_key_changed=True,
|
||||
is_auto_mode=True,
|
||||
)
|
||||
_upsert(openai_provider, default_model_name)
|
||||
_upsert(openai_provider)
|
||||
|
||||
# Create default image generation config using the OpenAI API key
|
||||
try:
|
||||
@@ -366,13 +361,14 @@ def configure_default_api_keys(db_session: Session) -> None:
|
||||
name="Anthropic",
|
||||
provider=ANTHROPIC_PROVIDER_NAME,
|
||||
api_key=ANTHROPIC_DEFAULT_API_KEY,
|
||||
default_model_name=default_model_name,
|
||||
model_configurations=_build_model_configuration_upsert_requests(
|
||||
ANTHROPIC_PROVIDER_NAME, recommendations
|
||||
),
|
||||
api_key_changed=True,
|
||||
is_auto_mode=True,
|
||||
)
|
||||
_upsert(anthropic_provider, default_model_name)
|
||||
_upsert(anthropic_provider)
|
||||
else:
|
||||
logger.info(
|
||||
"ANTHROPIC_DEFAULT_API_KEY not set, skipping Anthropic provider configuration"
|
||||
@@ -397,13 +393,14 @@ def configure_default_api_keys(db_session: Session) -> None:
|
||||
name="Google Vertex AI",
|
||||
provider=VERTEXAI_PROVIDER_NAME,
|
||||
custom_config=custom_config,
|
||||
default_model_name=default_model_name,
|
||||
model_configurations=_build_model_configuration_upsert_requests(
|
||||
VERTEXAI_PROVIDER_NAME, recommendations
|
||||
),
|
||||
api_key_changed=True,
|
||||
is_auto_mode=True,
|
||||
)
|
||||
_upsert(vertexai_provider, default_model_name)
|
||||
_upsert(vertexai_provider)
|
||||
else:
|
||||
logger.info(
|
||||
"VERTEXAI_DEFAULT_CREDENTIALS not set, skipping Vertex AI provider configuration"
|
||||
@@ -435,11 +432,12 @@ def configure_default_api_keys(db_session: Session) -> None:
|
||||
name="OpenRouter",
|
||||
provider=OPENROUTER_PROVIDER_NAME,
|
||||
api_key=OPENROUTER_DEFAULT_API_KEY,
|
||||
default_model_name=default_model_name,
|
||||
model_configurations=model_configurations,
|
||||
api_key_changed=True,
|
||||
is_auto_mode=True,
|
||||
)
|
||||
_upsert(openrouter_provider, default_model_name)
|
||||
_upsert(openrouter_provider)
|
||||
else:
|
||||
logger.info(
|
||||
"OPENROUTER_DEFAULT_API_KEY not set, skipping OpenRouter provider configuration"
|
||||
|
||||
@@ -241,8 +241,7 @@ _VECTOR_DB_BEAT_TASK_NAMES: set[str] = {
|
||||
"check-for-index-attempt-cleanup",
|
||||
"check-for-doc-permissions-sync",
|
||||
"check-for-external-group-sync",
|
||||
"check-for-documents-for-opensearch-migration",
|
||||
"migrate-documents-from-vespa-to-opensearch",
|
||||
"migrate-chunks-from-vespa-to-opensearch",
|
||||
}
|
||||
|
||||
if DISABLE_VECTOR_DB:
|
||||
|
||||
@@ -414,34 +414,31 @@ def _process_user_file_with_indexing(
|
||||
raise RuntimeError(f"Indexing pipeline failed for user file {user_file_id}")
|
||||
|
||||
|
||||
@shared_task(
|
||||
name=OnyxCeleryTask.PROCESS_SINGLE_USER_FILE,
|
||||
bind=True,
|
||||
ignore_result=True,
|
||||
)
|
||||
def process_single_user_file(
|
||||
self: Task, *, user_file_id: str, tenant_id: str # noqa: ARG001
|
||||
def _process_user_file_impl(
|
||||
*, user_file_id: str, tenant_id: str, redis_locking: bool
|
||||
) -> None:
|
||||
task_logger.info(f"process_single_user_file - Starting id={user_file_id}")
|
||||
"""Core implementation for processing a single user file.
|
||||
|
||||
When redis_locking=True, acquires a per-file Redis lock and clears the
|
||||
queued-key guard (Celery path). When redis_locking=False, skips all Redis
|
||||
operations (BackgroundTask path).
|
||||
"""
|
||||
task_logger.info(f"_process_user_file_impl - Starting id={user_file_id}")
|
||||
start = time.monotonic()
|
||||
|
||||
redis_client = get_redis_client(tenant_id=tenant_id)
|
||||
|
||||
# Clear the "queued" guard set by the beat generator so that the next beat
|
||||
# cycle can re-enqueue this file if it is still in PROCESSING state after
|
||||
# this task completes or fails.
|
||||
redis_client.delete(_user_file_queued_key(user_file_id))
|
||||
|
||||
file_lock: RedisLock = redis_client.lock(
|
||||
_user_file_lock_key(user_file_id),
|
||||
timeout=CELERY_USER_FILE_PROCESSING_LOCK_TIMEOUT,
|
||||
)
|
||||
|
||||
if not file_lock.acquire(blocking=False):
|
||||
task_logger.info(
|
||||
f"process_single_user_file - Lock held, skipping user_file_id={user_file_id}"
|
||||
file_lock: RedisLock | None = None
|
||||
if redis_locking:
|
||||
redis_client = get_redis_client(tenant_id=tenant_id)
|
||||
redis_client.delete(_user_file_queued_key(user_file_id))
|
||||
file_lock = redis_client.lock(
|
||||
_user_file_lock_key(user_file_id),
|
||||
timeout=CELERY_USER_FILE_PROCESSING_LOCK_TIMEOUT,
|
||||
)
|
||||
return None
|
||||
if not file_lock.acquire(blocking=False):
|
||||
task_logger.info(
|
||||
f"_process_user_file_impl - Lock held, skipping user_file_id={user_file_id}"
|
||||
)
|
||||
return
|
||||
|
||||
documents: list[Document] = []
|
||||
try:
|
||||
@@ -449,15 +446,15 @@ def process_single_user_file(
|
||||
uf = db_session.get(UserFile, _as_uuid(user_file_id))
|
||||
if not uf:
|
||||
task_logger.warning(
|
||||
f"process_single_user_file - UserFile not found id={user_file_id}"
|
||||
f"_process_user_file_impl - UserFile not found id={user_file_id}"
|
||||
)
|
||||
return None
|
||||
return
|
||||
|
||||
if uf.status != UserFileStatus.PROCESSING:
|
||||
task_logger.info(
|
||||
f"process_single_user_file - Skipping id={user_file_id} status={uf.status}"
|
||||
f"_process_user_file_impl - Skipping id={user_file_id} status={uf.status}"
|
||||
)
|
||||
return None
|
||||
return
|
||||
|
||||
connector = LocalFileConnector(
|
||||
file_locations=[uf.file_id],
|
||||
@@ -471,7 +468,6 @@ def process_single_user_file(
|
||||
[doc for doc in batch if not isinstance(doc, HierarchyNode)]
|
||||
)
|
||||
|
||||
# update the document id to userfile id in the documents
|
||||
for document in documents:
|
||||
document.id = str(user_file_id)
|
||||
document.source = DocumentSource.USER_FILE
|
||||
@@ -493,9 +489,8 @@ def process_single_user_file(
|
||||
|
||||
except Exception as e:
|
||||
task_logger.exception(
|
||||
f"process_single_user_file - Error processing file id={user_file_id} - {e.__class__.__name__}"
|
||||
f"_process_user_file_impl - Error processing file id={user_file_id} - {e.__class__.__name__}"
|
||||
)
|
||||
# don't update the status if the user file is being deleted
|
||||
current_user_file = db_session.get(UserFile, _as_uuid(user_file_id))
|
||||
if (
|
||||
current_user_file
|
||||
@@ -504,33 +499,42 @@ def process_single_user_file(
|
||||
uf.status = UserFileStatus.FAILED
|
||||
db_session.add(uf)
|
||||
db_session.commit()
|
||||
return None
|
||||
return
|
||||
|
||||
elapsed = time.monotonic() - start
|
||||
task_logger.info(
|
||||
f"process_single_user_file - Finished id={user_file_id} docs={len(documents)} elapsed={elapsed:.2f}s"
|
||||
f"_process_user_file_impl - Finished id={user_file_id} docs={len(documents)} elapsed={elapsed:.2f}s"
|
||||
)
|
||||
return None
|
||||
except Exception as e:
|
||||
# Attempt to mark the file as failed
|
||||
with get_session_with_current_tenant() as db_session:
|
||||
uf = db_session.get(UserFile, _as_uuid(user_file_id))
|
||||
if uf:
|
||||
# don't update the status if the user file is being deleted
|
||||
if uf.status != UserFileStatus.DELETING:
|
||||
uf.status = UserFileStatus.FAILED
|
||||
db_session.add(uf)
|
||||
db_session.commit()
|
||||
|
||||
task_logger.exception(
|
||||
f"process_single_user_file - Error processing file id={user_file_id} - {e.__class__.__name__}"
|
||||
f"_process_user_file_impl - Error processing file id={user_file_id} - {e.__class__.__name__}"
|
||||
)
|
||||
return None
|
||||
finally:
|
||||
if file_lock.owned():
|
||||
if file_lock is not None and file_lock.owned():
|
||||
file_lock.release()
|
||||
|
||||
|
||||
@shared_task(
|
||||
name=OnyxCeleryTask.PROCESS_SINGLE_USER_FILE,
|
||||
bind=True,
|
||||
ignore_result=True,
|
||||
)
|
||||
def process_single_user_file(
|
||||
self: Task, *, user_file_id: str, tenant_id: str # noqa: ARG001
|
||||
) -> None:
|
||||
_process_user_file_impl(
|
||||
user_file_id=user_file_id, tenant_id=tenant_id, redis_locking=True
|
||||
)
|
||||
|
||||
|
||||
@shared_task(
|
||||
name=OnyxCeleryTask.CHECK_FOR_USER_FILE_DELETE,
|
||||
soft_time_limit=300,
|
||||
@@ -581,36 +585,38 @@ def check_for_user_file_delete(self: Task, *, tenant_id: str) -> None:
|
||||
return None
|
||||
|
||||
|
||||
@shared_task(
|
||||
name=OnyxCeleryTask.DELETE_SINGLE_USER_FILE,
|
||||
bind=True,
|
||||
ignore_result=True,
|
||||
)
|
||||
def process_single_user_file_delete(
|
||||
self: Task, *, user_file_id: str, tenant_id: str # noqa: ARG001
|
||||
def _delete_user_file_impl(
|
||||
*, user_file_id: str, tenant_id: str, redis_locking: bool
|
||||
) -> None:
|
||||
"""Process a single user file delete."""
|
||||
task_logger.info(f"process_single_user_file_delete - Starting id={user_file_id}")
|
||||
redis_client = get_redis_client(tenant_id=tenant_id)
|
||||
file_lock: RedisLock = redis_client.lock(
|
||||
_user_file_delete_lock_key(user_file_id),
|
||||
timeout=CELERY_GENERIC_BEAT_LOCK_TIMEOUT,
|
||||
)
|
||||
if not file_lock.acquire(blocking=False):
|
||||
task_logger.info(
|
||||
f"process_single_user_file_delete - Lock held, skipping user_file_id={user_file_id}"
|
||||
"""Core implementation for deleting a single user file.
|
||||
|
||||
When redis_locking=True, acquires a per-file Redis lock (Celery path).
|
||||
When redis_locking=False, skips Redis operations (BackgroundTask path).
|
||||
"""
|
||||
task_logger.info(f"_delete_user_file_impl - Starting id={user_file_id}")
|
||||
|
||||
file_lock: RedisLock | None = None
|
||||
if redis_locking:
|
||||
redis_client = get_redis_client(tenant_id=tenant_id)
|
||||
file_lock = redis_client.lock(
|
||||
_user_file_delete_lock_key(user_file_id),
|
||||
timeout=CELERY_GENERIC_BEAT_LOCK_TIMEOUT,
|
||||
)
|
||||
return None
|
||||
if not file_lock.acquire(blocking=False):
|
||||
task_logger.info(
|
||||
f"_delete_user_file_impl - Lock held, skipping user_file_id={user_file_id}"
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
with get_session_with_current_tenant() as db_session:
|
||||
user_file = db_session.get(UserFile, _as_uuid(user_file_id))
|
||||
if not user_file:
|
||||
task_logger.info(
|
||||
f"process_single_user_file_delete - User file not found id={user_file_id}"
|
||||
f"_delete_user_file_impl - User file not found id={user_file_id}"
|
||||
)
|
||||
return None
|
||||
return
|
||||
|
||||
# 1) Delete vector DB chunks (skip when disabled)
|
||||
if not DISABLE_VECTOR_DB:
|
||||
if MANAGED_VESPA:
|
||||
httpx_init_vespa_pool(
|
||||
@@ -648,7 +654,6 @@ def process_single_user_file_delete(
|
||||
chunk_count=chunk_count,
|
||||
)
|
||||
|
||||
# 2) Delete the user-uploaded file content from filestore (blob + metadata)
|
||||
file_store = get_default_file_store()
|
||||
try:
|
||||
file_store.delete_file(user_file.file_id)
|
||||
@@ -656,26 +661,33 @@ def process_single_user_file_delete(
|
||||
user_file_id_to_plaintext_file_name(user_file.id)
|
||||
)
|
||||
except Exception as e:
|
||||
# This block executed only if the file is not found in the filestore
|
||||
task_logger.exception(
|
||||
f"process_single_user_file_delete - Error deleting file id={user_file.id} - {e.__class__.__name__}"
|
||||
f"_delete_user_file_impl - Error deleting file id={user_file.id} - {e.__class__.__name__}"
|
||||
)
|
||||
|
||||
# 3) Finally, delete the UserFile row
|
||||
db_session.delete(user_file)
|
||||
db_session.commit()
|
||||
task_logger.info(
|
||||
f"process_single_user_file_delete - Completed id={user_file_id}"
|
||||
)
|
||||
task_logger.info(f"_delete_user_file_impl - Completed id={user_file_id}")
|
||||
except Exception as e:
|
||||
task_logger.exception(
|
||||
f"process_single_user_file_delete - Error processing file id={user_file_id} - {e.__class__.__name__}"
|
||||
f"_delete_user_file_impl - Error processing file id={user_file_id} - {e.__class__.__name__}"
|
||||
)
|
||||
return None
|
||||
finally:
|
||||
if file_lock.owned():
|
||||
if file_lock is not None and file_lock.owned():
|
||||
file_lock.release()
|
||||
return None
|
||||
|
||||
|
||||
@shared_task(
|
||||
name=OnyxCeleryTask.DELETE_SINGLE_USER_FILE,
|
||||
bind=True,
|
||||
ignore_result=True,
|
||||
)
|
||||
def process_single_user_file_delete(
|
||||
self: Task, *, user_file_id: str, tenant_id: str # noqa: ARG001
|
||||
) -> None:
|
||||
_delete_user_file_impl(
|
||||
user_file_id=user_file_id, tenant_id=tenant_id, redis_locking=True
|
||||
)
|
||||
|
||||
|
||||
@shared_task(
|
||||
@@ -747,32 +759,30 @@ def check_for_user_file_project_sync(self: Task, *, tenant_id: str) -> None:
|
||||
return None
|
||||
|
||||
|
||||
@shared_task(
|
||||
name=OnyxCeleryTask.PROCESS_SINGLE_USER_FILE_PROJECT_SYNC,
|
||||
bind=True,
|
||||
ignore_result=True,
|
||||
)
|
||||
def process_single_user_file_project_sync(
|
||||
self: Task, *, user_file_id: str, tenant_id: str # noqa: ARG001
|
||||
def _project_sync_user_file_impl(
|
||||
*, user_file_id: str, tenant_id: str, redis_locking: bool
|
||||
) -> None:
|
||||
"""Process a single user file project sync."""
|
||||
task_logger.info(
|
||||
f"process_single_user_file_project_sync - Starting id={user_file_id}"
|
||||
)
|
||||
"""Core implementation for syncing a user file's project/persona metadata.
|
||||
|
||||
redis_client = get_redis_client(tenant_id=tenant_id)
|
||||
redis_client.delete(_user_file_project_sync_queued_key(user_file_id))
|
||||
When redis_locking=True, acquires a per-file Redis lock and clears the
|
||||
queued-key guard (Celery path). When redis_locking=False, skips Redis
|
||||
operations (BackgroundTask path).
|
||||
"""
|
||||
task_logger.info(f"_project_sync_user_file_impl - Starting id={user_file_id}")
|
||||
|
||||
file_lock: RedisLock = redis_client.lock(
|
||||
user_file_project_sync_lock_key(user_file_id),
|
||||
timeout=CELERY_USER_FILE_PROJECT_SYNC_LOCK_TIMEOUT,
|
||||
)
|
||||
|
||||
if not file_lock.acquire(blocking=False):
|
||||
task_logger.info(
|
||||
f"process_single_user_file_project_sync - Lock held, skipping user_file_id={user_file_id}"
|
||||
file_lock: RedisLock | None = None
|
||||
if redis_locking:
|
||||
redis_client = get_redis_client(tenant_id=tenant_id)
|
||||
redis_client.delete(_user_file_project_sync_queued_key(user_file_id))
|
||||
file_lock = redis_client.lock(
|
||||
user_file_project_sync_lock_key(user_file_id),
|
||||
timeout=CELERY_USER_FILE_PROJECT_SYNC_LOCK_TIMEOUT,
|
||||
)
|
||||
return None
|
||||
if not file_lock.acquire(blocking=False):
|
||||
task_logger.info(
|
||||
f"_project_sync_user_file_impl - Lock held, skipping user_file_id={user_file_id}"
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
with get_session_with_current_tenant() as db_session:
|
||||
@@ -783,11 +793,10 @@ def process_single_user_file_project_sync(
|
||||
).scalar_one_or_none()
|
||||
if not user_file:
|
||||
task_logger.info(
|
||||
f"process_single_user_file_project_sync - User file not found id={user_file_id}"
|
||||
f"_project_sync_user_file_impl - User file not found id={user_file_id}"
|
||||
)
|
||||
return None
|
||||
return
|
||||
|
||||
# Sync project metadata to vector DB (skip when disabled)
|
||||
if not DISABLE_VECTOR_DB:
|
||||
if MANAGED_VESPA:
|
||||
httpx_init_vespa_pool(
|
||||
@@ -822,7 +831,7 @@ def process_single_user_file_project_sync(
|
||||
)
|
||||
|
||||
task_logger.info(
|
||||
f"process_single_user_file_project_sync - User file id={user_file_id}"
|
||||
f"_project_sync_user_file_impl - User file id={user_file_id}"
|
||||
)
|
||||
|
||||
user_file.needs_project_sync = False
|
||||
@@ -835,11 +844,21 @@ def process_single_user_file_project_sync(
|
||||
|
||||
except Exception as e:
|
||||
task_logger.exception(
|
||||
f"process_single_user_file_project_sync - Error syncing project for file id={user_file_id} - {e.__class__.__name__}"
|
||||
f"_project_sync_user_file_impl - Error syncing project for file id={user_file_id} - {e.__class__.__name__}"
|
||||
)
|
||||
return None
|
||||
finally:
|
||||
if file_lock.owned():
|
||||
if file_lock is not None and file_lock.owned():
|
||||
file_lock.release()
|
||||
|
||||
return None
|
||||
|
||||
@shared_task(
|
||||
name=OnyxCeleryTask.PROCESS_SINGLE_USER_FILE_PROJECT_SYNC,
|
||||
bind=True,
|
||||
ignore_result=True,
|
||||
)
|
||||
def process_single_user_file_project_sync(
|
||||
self: Task, *, user_file_id: str, tenant_id: str # noqa: ARG001
|
||||
) -> None:
|
||||
_project_sync_user_file_impl(
|
||||
user_file_id=user_file_id, tenant_id=tenant_id, redis_locking=True
|
||||
)
|
||||
|
||||
256
backend/onyx/background/periodic_poller.py
Normal file
256
backend/onyx/background/periodic_poller.py
Normal file
@@ -0,0 +1,256 @@
|
||||
"""Periodic poller for NO_VECTOR_DB deployments.
|
||||
|
||||
Replaces Celery Beat and background workers with a lightweight daemon thread
|
||||
that runs from the API server process. Two responsibilities:
|
||||
|
||||
1. Recovery polling (every 30 s): re-processes user files stuck in
|
||||
PROCESSING / DELETING / needs_sync states via the drain loops defined
|
||||
in ``task_utils.py``.
|
||||
|
||||
2. Periodic task execution (configurable intervals): runs LLM model updates
|
||||
and scheduled evals at their configured cadences, with Postgres advisory
|
||||
lock deduplication across multiple API server instances.
|
||||
"""
|
||||
|
||||
import threading
|
||||
import time
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import field
|
||||
|
||||
from sqlalchemy import text
|
||||
|
||||
from onyx.utils.logger import setup_logger
|
||||
|
||||
logger = setup_logger()
|
||||
|
||||
RECOVERY_INTERVAL_SECONDS = 30
|
||||
PERIODIC_TASK_LOCK_BASE = 20_000
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Periodic task definitions
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
@dataclass
|
||||
class _PeriodicTaskDef:
|
||||
name: str
|
||||
interval_seconds: float
|
||||
lock_id: int
|
||||
run_fn: Callable[[], None]
|
||||
last_run_at: float = field(default=0.0)
|
||||
|
||||
|
||||
def _run_auto_llm_update() -> None:
|
||||
from onyx.configs.app_configs import AUTO_LLM_CONFIG_URL
|
||||
|
||||
if not AUTO_LLM_CONFIG_URL:
|
||||
return
|
||||
|
||||
from onyx.db.engine.sql_engine import get_session_with_current_tenant
|
||||
from onyx.llm.well_known_providers.auto_update_service import (
|
||||
sync_llm_models_from_github,
|
||||
)
|
||||
|
||||
with get_session_with_current_tenant() as db_session:
|
||||
sync_llm_models_from_github(db_session)
|
||||
|
||||
|
||||
def _run_scheduled_eval() -> None:
|
||||
from onyx.configs.app_configs import BRAINTRUST_API_KEY
|
||||
from onyx.configs.app_configs import SCHEDULED_EVAL_DATASET_NAMES
|
||||
from onyx.configs.app_configs import SCHEDULED_EVAL_PERMISSIONS_EMAIL
|
||||
from onyx.configs.app_configs import SCHEDULED_EVAL_PROJECT
|
||||
|
||||
if not all(
|
||||
[
|
||||
BRAINTRUST_API_KEY,
|
||||
SCHEDULED_EVAL_PROJECT,
|
||||
SCHEDULED_EVAL_DATASET_NAMES,
|
||||
SCHEDULED_EVAL_PERMISSIONS_EMAIL,
|
||||
]
|
||||
):
|
||||
return
|
||||
|
||||
from datetime import datetime
|
||||
from datetime import timezone
|
||||
|
||||
from onyx.evals.eval import run_eval
|
||||
from onyx.evals.models import EvalConfigurationOptions
|
||||
|
||||
run_timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
||||
for dataset_name in SCHEDULED_EVAL_DATASET_NAMES:
|
||||
try:
|
||||
run_eval(
|
||||
configuration=EvalConfigurationOptions(
|
||||
search_permissions_email=SCHEDULED_EVAL_PERMISSIONS_EMAIL,
|
||||
dataset_name=dataset_name,
|
||||
no_send_logs=False,
|
||||
braintrust_project=SCHEDULED_EVAL_PROJECT,
|
||||
experiment_name=f"{dataset_name} - {run_timestamp}",
|
||||
),
|
||||
remote_dataset_name=dataset_name,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
f"Periodic poller - Failed scheduled eval for dataset {dataset_name}"
|
||||
)
|
||||
|
||||
|
||||
def _build_periodic_tasks() -> list[_PeriodicTaskDef]:
|
||||
from onyx.configs.app_configs import AUTO_LLM_CONFIG_URL
|
||||
from onyx.configs.app_configs import AUTO_LLM_UPDATE_INTERVAL_SECONDS
|
||||
from onyx.configs.app_configs import SCHEDULED_EVAL_DATASET_NAMES
|
||||
|
||||
tasks: list[_PeriodicTaskDef] = []
|
||||
if AUTO_LLM_CONFIG_URL:
|
||||
tasks.append(
|
||||
_PeriodicTaskDef(
|
||||
name="auto-llm-update",
|
||||
interval_seconds=AUTO_LLM_UPDATE_INTERVAL_SECONDS,
|
||||
lock_id=PERIODIC_TASK_LOCK_BASE,
|
||||
run_fn=_run_auto_llm_update,
|
||||
)
|
||||
)
|
||||
if SCHEDULED_EVAL_DATASET_NAMES:
|
||||
tasks.append(
|
||||
_PeriodicTaskDef(
|
||||
name="scheduled-eval",
|
||||
interval_seconds=7 * 24 * 3600,
|
||||
lock_id=PERIODIC_TASK_LOCK_BASE + 1,
|
||||
run_fn=_run_scheduled_eval,
|
||||
)
|
||||
)
|
||||
return tasks
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Periodic task runner with advisory lock dedup
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
def _try_run_periodic_task(task_def: _PeriodicTaskDef) -> None:
|
||||
"""Run *task_def* if its interval has elapsed and no peer holds the lock."""
|
||||
now = time.monotonic()
|
||||
if now - task_def.last_run_at < task_def.interval_seconds:
|
||||
return
|
||||
|
||||
from onyx.db.engine.sql_engine import get_session_with_current_tenant
|
||||
|
||||
with get_session_with_current_tenant() as db_session:
|
||||
acquired = db_session.execute(
|
||||
text("SELECT pg_try_advisory_lock(:id)"),
|
||||
{"id": task_def.lock_id},
|
||||
).scalar()
|
||||
if not acquired:
|
||||
return
|
||||
|
||||
try:
|
||||
task_def.run_fn()
|
||||
task_def.last_run_at = now
|
||||
except Exception:
|
||||
logger.exception(
|
||||
f"Periodic poller - Error running periodic task {task_def.name}"
|
||||
)
|
||||
finally:
|
||||
db_session.execute(
|
||||
text("SELECT pg_advisory_unlock(:id)"),
|
||||
{"id": task_def.lock_id},
|
||||
)
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Recovery / drain loop runner
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
def _run_drain_loops(tenant_id: str) -> None:
|
||||
from onyx.background.task_utils import drain_delete_loop
|
||||
from onyx.background.task_utils import drain_processing_loop
|
||||
from onyx.background.task_utils import drain_project_sync_loop
|
||||
|
||||
drain_processing_loop(tenant_id)
|
||||
drain_delete_loop(tenant_id)
|
||||
drain_project_sync_loop(tenant_id)
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Startup recovery (10g)
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
def recover_stuck_user_files(tenant_id: str) -> None:
|
||||
"""Run all drain loops once to re-process files left in intermediate states.
|
||||
|
||||
Called from ``lifespan()`` on startup when ``DISABLE_VECTOR_DB`` is set.
|
||||
"""
|
||||
logger.info("recover_stuck_user_files - Checking for stuck user files")
|
||||
try:
|
||||
_run_drain_loops(tenant_id)
|
||||
except Exception:
|
||||
logger.exception("recover_stuck_user_files - Error during recovery")
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Daemon thread (10f)
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
_shutdown_event = threading.Event()
|
||||
_poller_thread: threading.Thread | None = None
|
||||
|
||||
|
||||
def _poller_loop(tenant_id: str) -> None:
|
||||
from shared_configs.configs import POSTGRES_DEFAULT_SCHEMA
|
||||
from shared_configs.contextvars import CURRENT_TENANT_ID_CONTEXTVAR
|
||||
|
||||
CURRENT_TENANT_ID_CONTEXTVAR.set(POSTGRES_DEFAULT_SCHEMA)
|
||||
|
||||
periodic_tasks = _build_periodic_tasks()
|
||||
logger.info(
|
||||
f"Periodic poller started with {len(periodic_tasks)} periodic task(s): "
|
||||
f"{[t.name for t in periodic_tasks]}"
|
||||
)
|
||||
|
||||
while not _shutdown_event.is_set():
|
||||
try:
|
||||
_run_drain_loops(tenant_id)
|
||||
except Exception:
|
||||
logger.exception("Periodic poller - Error in recovery polling")
|
||||
|
||||
for task_def in periodic_tasks:
|
||||
try:
|
||||
_try_run_periodic_task(task_def)
|
||||
except Exception:
|
||||
logger.exception(
|
||||
f"Periodic poller - Unhandled error checking task {task_def.name}"
|
||||
)
|
||||
|
||||
_shutdown_event.wait(RECOVERY_INTERVAL_SECONDS)
|
||||
|
||||
|
||||
def start_periodic_poller(tenant_id: str) -> None:
|
||||
"""Start the periodic poller daemon thread."""
|
||||
global _poller_thread # noqa: PLW0603
|
||||
_shutdown_event.clear()
|
||||
_poller_thread = threading.Thread(
|
||||
target=_poller_loop,
|
||||
args=(tenant_id,),
|
||||
daemon=True,
|
||||
name="no-vectordb-periodic-poller",
|
||||
)
|
||||
_poller_thread.start()
|
||||
logger.info("Periodic poller thread started")
|
||||
|
||||
|
||||
def stop_periodic_poller() -> None:
|
||||
"""Signal the periodic poller to stop and wait for it to exit."""
|
||||
global _poller_thread # noqa: PLW0603
|
||||
if _poller_thread is None:
|
||||
return
|
||||
_shutdown_event.set()
|
||||
_poller_thread.join(timeout=10)
|
||||
if _poller_thread.is_alive():
|
||||
logger.warning("Periodic poller thread did not stop within timeout")
|
||||
_poller_thread = None
|
||||
logger.info("Periodic poller thread stopped")
|
||||
@@ -1,3 +1,35 @@
|
||||
"""Background task utilities.
|
||||
|
||||
Contains query-history report helpers (used by all deployment modes) and
|
||||
in-process background task execution helpers for NO_VECTOR_DB mode:
|
||||
|
||||
- Postgres advisory lock-based concurrency semaphore (10d)
|
||||
- Drain loops that process all pending user file work (10e)
|
||||
- Entry points wired to FastAPI BackgroundTasks (10c)
|
||||
|
||||
Advisory locks are session-level: they persist until explicitly released via
|
||||
``pg_advisory_unlock`` or until the DB connection closes. The semaphore
|
||||
session is kept open for the entire drain loop so the slot stays held, and
|
||||
released in a ``finally`` block before the connection returns to the pool.
|
||||
"""
|
||||
|
||||
from uuid import UUID
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.db.enums import UserFileStatus
|
||||
from onyx.db.models import UserFile
|
||||
from onyx.utils.logger import setup_logger
|
||||
|
||||
logger = setup_logger()
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Query-history report helpers (pre-existing, used by all modes)
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
QUERY_REPORT_NAME_PREFIX = "query-history"
|
||||
|
||||
|
||||
@@ -9,3 +41,173 @@ def construct_query_history_report_name(
|
||||
|
||||
def extract_task_id_from_query_history_report_name(name: str) -> str:
|
||||
return name.removeprefix(f"{QUERY_REPORT_NAME_PREFIX}-").removesuffix(".csv")
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Postgres advisory lock semaphore (NO_VECTOR_DB mode)
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
BACKGROUND_TASK_SLOT_BASE = 10_000
|
||||
BACKGROUND_TASK_MAX_CONCURRENCY = 4
|
||||
|
||||
|
||||
def try_acquire_semaphore_slot(db_session: Session) -> int | None:
|
||||
"""Try to acquire one of N advisory lock slots.
|
||||
|
||||
Returns the slot number (0-based) if acquired, ``None`` if all slots are
|
||||
taken. ``pg_try_advisory_lock`` is non-blocking — returns ``false``
|
||||
immediately when the lock is held by another session.
|
||||
"""
|
||||
for slot in range(BACKGROUND_TASK_MAX_CONCURRENCY):
|
||||
lock_id = BACKGROUND_TASK_SLOT_BASE + slot
|
||||
acquired = db_session.execute(
|
||||
text("SELECT pg_try_advisory_lock(:id)"),
|
||||
{"id": lock_id},
|
||||
).scalar()
|
||||
if acquired:
|
||||
return slot
|
||||
return None
|
||||
|
||||
|
||||
def release_semaphore_slot(db_session: Session, slot: int) -> None:
|
||||
"""Release a previously acquired advisory lock slot."""
|
||||
lock_id = BACKGROUND_TASK_SLOT_BASE + slot
|
||||
db_session.execute(
|
||||
text("SELECT pg_advisory_unlock(:id)"),
|
||||
{"id": lock_id},
|
||||
)
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Work-claiming helpers (FOR UPDATE SKIP LOCKED)
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
def _claim_next_processing_file(db_session: Session) -> UUID | None:
|
||||
"""Claim the next file in PROCESSING status."""
|
||||
return db_session.execute(
|
||||
select(UserFile.id)
|
||||
.where(UserFile.status == UserFileStatus.PROCESSING)
|
||||
.order_by(UserFile.created_at)
|
||||
.limit(1)
|
||||
.with_for_update(skip_locked=True)
|
||||
).scalar_one_or_none()
|
||||
|
||||
|
||||
def _claim_next_deleting_file(db_session: Session) -> UUID | None:
|
||||
"""Claim the next file in DELETING status."""
|
||||
return db_session.execute(
|
||||
select(UserFile.id)
|
||||
.where(UserFile.status == UserFileStatus.DELETING)
|
||||
.order_by(UserFile.created_at)
|
||||
.limit(1)
|
||||
.with_for_update(skip_locked=True)
|
||||
).scalar_one_or_none()
|
||||
|
||||
|
||||
def _claim_next_sync_file(db_session: Session) -> UUID | None:
|
||||
"""Claim the next file needing project/persona sync."""
|
||||
return db_session.execute(
|
||||
select(UserFile.id)
|
||||
.where(
|
||||
sa.and_(
|
||||
sa.or_(
|
||||
UserFile.needs_project_sync.is_(True),
|
||||
UserFile.needs_persona_sync.is_(True),
|
||||
),
|
||||
UserFile.status == UserFileStatus.COMPLETED,
|
||||
)
|
||||
)
|
||||
.order_by(UserFile.created_at)
|
||||
.limit(1)
|
||||
.with_for_update(skip_locked=True)
|
||||
).scalar_one_or_none()
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Drain loops — acquire a semaphore slot then process *all* pending work
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
def drain_processing_loop(tenant_id: str) -> None:
|
||||
"""Process all pending PROCESSING user files."""
|
||||
from onyx.background.celery.tasks.user_file_processing.tasks import (
|
||||
_process_user_file_impl,
|
||||
)
|
||||
from onyx.db.engine.sql_engine import get_session_with_current_tenant
|
||||
|
||||
with get_session_with_current_tenant() as sem_session:
|
||||
slot = try_acquire_semaphore_slot(sem_session)
|
||||
if slot is None:
|
||||
logger.info("drain_processing_loop - All semaphore slots taken, skipping")
|
||||
return
|
||||
|
||||
try:
|
||||
while True:
|
||||
with get_session_with_current_tenant() as claim_session:
|
||||
file_id = _claim_next_processing_file(claim_session)
|
||||
if file_id is None:
|
||||
break
|
||||
_process_user_file_impl(
|
||||
user_file_id=str(file_id),
|
||||
tenant_id=tenant_id,
|
||||
redis_locking=False,
|
||||
)
|
||||
finally:
|
||||
release_semaphore_slot(sem_session, slot)
|
||||
|
||||
|
||||
def drain_delete_loop(tenant_id: str) -> None:
|
||||
"""Delete all pending DELETING user files."""
|
||||
from onyx.background.celery.tasks.user_file_processing.tasks import (
|
||||
_delete_user_file_impl,
|
||||
)
|
||||
from onyx.db.engine.sql_engine import get_session_with_current_tenant
|
||||
|
||||
with get_session_with_current_tenant() as sem_session:
|
||||
slot = try_acquire_semaphore_slot(sem_session)
|
||||
if slot is None:
|
||||
logger.info("drain_delete_loop - All semaphore slots taken, skipping")
|
||||
return
|
||||
|
||||
try:
|
||||
while True:
|
||||
with get_session_with_current_tenant() as claim_session:
|
||||
file_id = _claim_next_deleting_file(claim_session)
|
||||
if file_id is None:
|
||||
break
|
||||
_delete_user_file_impl(
|
||||
user_file_id=str(file_id),
|
||||
tenant_id=tenant_id,
|
||||
redis_locking=False,
|
||||
)
|
||||
finally:
|
||||
release_semaphore_slot(sem_session, slot)
|
||||
|
||||
|
||||
def drain_project_sync_loop(tenant_id: str) -> None:
|
||||
"""Sync all pending project/persona metadata for user files."""
|
||||
from onyx.background.celery.tasks.user_file_processing.tasks import (
|
||||
_project_sync_user_file_impl,
|
||||
)
|
||||
from onyx.db.engine.sql_engine import get_session_with_current_tenant
|
||||
|
||||
with get_session_with_current_tenant() as sem_session:
|
||||
slot = try_acquire_semaphore_slot(sem_session)
|
||||
if slot is None:
|
||||
logger.info("drain_project_sync_loop - All semaphore slots taken, skipping")
|
||||
return
|
||||
|
||||
try:
|
||||
while True:
|
||||
with get_session_with_current_tenant() as claim_session:
|
||||
file_id = _claim_next_sync_file(claim_session)
|
||||
if file_id is None:
|
||||
break
|
||||
_project_sync_user_file_impl(
|
||||
user_file_id=str(file_id),
|
||||
tenant_id=tenant_id,
|
||||
redis_locking=False,
|
||||
)
|
||||
finally:
|
||||
release_semaphore_slot(sem_session, slot)
|
||||
|
||||
45
backend/onyx/cache/factory.py
vendored
Normal file
45
backend/onyx/cache/factory.py
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
from collections.abc import Callable
|
||||
|
||||
from onyx.cache.interface import CacheBackend
|
||||
from onyx.cache.interface import CacheBackendType
|
||||
from onyx.configs.app_configs import CACHE_BACKEND
|
||||
|
||||
|
||||
def _build_redis_backend(tenant_id: str) -> CacheBackend:
|
||||
from onyx.cache.redis_backend import RedisCacheBackend
|
||||
from onyx.redis.redis_pool import redis_pool
|
||||
|
||||
return RedisCacheBackend(redis_pool.get_client(tenant_id))
|
||||
|
||||
|
||||
_BACKEND_BUILDERS: dict[CacheBackendType, Callable[[str], CacheBackend]] = {
|
||||
CacheBackendType.REDIS: _build_redis_backend,
|
||||
# CacheBackendType.POSTGRES will be added in a follow-up PR.
|
||||
}
|
||||
|
||||
|
||||
def get_cache_backend(*, tenant_id: str | None = None) -> CacheBackend:
|
||||
"""Return a tenant-aware ``CacheBackend``.
|
||||
|
||||
If *tenant_id* is ``None``, the current tenant is read from the
|
||||
thread-local context variable (same behaviour as ``get_redis_client``).
|
||||
"""
|
||||
if tenant_id is None:
|
||||
from shared_configs.contextvars import get_current_tenant_id
|
||||
|
||||
tenant_id = get_current_tenant_id()
|
||||
|
||||
builder = _BACKEND_BUILDERS.get(CACHE_BACKEND)
|
||||
if builder is None:
|
||||
raise ValueError(
|
||||
f"Unsupported CACHE_BACKEND={CACHE_BACKEND!r}. "
|
||||
f"Supported values: {[t.value for t in CacheBackendType]}"
|
||||
)
|
||||
return builder(tenant_id)
|
||||
|
||||
|
||||
def get_shared_cache_backend() -> CacheBackend:
|
||||
"""Return a ``CacheBackend`` in the shared (cross-tenant) namespace."""
|
||||
from shared_configs.configs import DEFAULT_REDIS_PREFIX
|
||||
|
||||
return get_cache_backend(tenant_id=DEFAULT_REDIS_PREFIX)
|
||||
89
backend/onyx/cache/interface.py
vendored
Normal file
89
backend/onyx/cache/interface.py
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
import abc
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class CacheBackendType(str, Enum):
|
||||
REDIS = "redis"
|
||||
POSTGRES = "postgres"
|
||||
|
||||
|
||||
class CacheLock(abc.ABC):
|
||||
"""Abstract distributed lock returned by CacheBackend.lock()."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def acquire(
|
||||
self,
|
||||
blocking: bool = True,
|
||||
blocking_timeout: float | None = None,
|
||||
) -> bool:
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def release(self) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def owned(self) -> bool:
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class CacheBackend(abc.ABC):
|
||||
"""Thin abstraction over a key-value cache with TTL, locks, and blocking lists.
|
||||
|
||||
Covers the subset of Redis operations used outside of Celery. When
|
||||
CACHE_BACKEND=postgres, a PostgreSQL-backed implementation is used instead.
|
||||
"""
|
||||
|
||||
# -- basic key/value ---------------------------------------------------
|
||||
|
||||
@abc.abstractmethod
|
||||
def get(self, key: str) -> bytes | None:
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def set(
|
||||
self,
|
||||
key: str,
|
||||
value: str | bytes | int | float,
|
||||
ex: int | None = None,
|
||||
) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def delete(self, key: str) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def exists(self, key: str) -> bool:
|
||||
raise NotImplementedError
|
||||
|
||||
# -- TTL ---------------------------------------------------------------
|
||||
|
||||
@abc.abstractmethod
|
||||
def expire(self, key: str, seconds: int) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def ttl(self, key: str) -> int:
|
||||
"""Return remaining TTL in seconds. -1 if no expiry, -2 if key missing."""
|
||||
raise NotImplementedError
|
||||
|
||||
# -- distributed lock --------------------------------------------------
|
||||
|
||||
@abc.abstractmethod
|
||||
def lock(self, name: str, timeout: float | None = None) -> CacheLock:
|
||||
raise NotImplementedError
|
||||
|
||||
# -- blocking list (used by MCP OAuth BLPOP pattern) -------------------
|
||||
|
||||
@abc.abstractmethod
|
||||
def rpush(self, key: str, value: str | bytes) -> None:
|
||||
raise NotImplementedError
|
||||
|
||||
@abc.abstractmethod
|
||||
def blpop(self, keys: list[str], timeout: int = 0) -> tuple[bytes, bytes] | None:
|
||||
"""Block until a value is available on one of *keys*, or *timeout* expires.
|
||||
|
||||
Returns ``(key, value)`` or ``None`` on timeout.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
92
backend/onyx/cache/redis_backend.py
vendored
Normal file
92
backend/onyx/cache/redis_backend.py
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
from typing import cast
|
||||
|
||||
from redis.client import Redis
|
||||
from redis.lock import Lock as RedisLock
|
||||
|
||||
from onyx.cache.interface import CacheBackend
|
||||
from onyx.cache.interface import CacheLock
|
||||
|
||||
|
||||
class RedisCacheLock(CacheLock):
|
||||
"""Wraps ``redis.lock.Lock`` behind the ``CacheLock`` interface."""
|
||||
|
||||
def __init__(self, lock: RedisLock) -> None:
|
||||
self._lock = lock
|
||||
|
||||
def acquire(
|
||||
self,
|
||||
blocking: bool = True,
|
||||
blocking_timeout: float | None = None,
|
||||
) -> bool:
|
||||
return bool(
|
||||
self._lock.acquire(
|
||||
blocking=blocking,
|
||||
blocking_timeout=blocking_timeout,
|
||||
)
|
||||
)
|
||||
|
||||
def release(self) -> None:
|
||||
self._lock.release()
|
||||
|
||||
def owned(self) -> bool:
|
||||
return bool(self._lock.owned())
|
||||
|
||||
|
||||
class RedisCacheBackend(CacheBackend):
|
||||
"""``CacheBackend`` implementation that delegates to a ``redis.Redis`` client.
|
||||
|
||||
This is a thin pass-through — every method maps 1-to-1 to the underlying
|
||||
Redis command. ``TenantRedis`` key-prefixing is handled by the client
|
||||
itself (provided by ``get_redis_client``).
|
||||
"""
|
||||
|
||||
def __init__(self, redis_client: Redis) -> None:
|
||||
self._r = redis_client
|
||||
|
||||
# -- basic key/value ---------------------------------------------------
|
||||
|
||||
def get(self, key: str) -> bytes | None:
|
||||
val = self._r.get(key)
|
||||
if val is None:
|
||||
return None
|
||||
if isinstance(val, bytes):
|
||||
return val
|
||||
return str(val).encode()
|
||||
|
||||
def set(
|
||||
self,
|
||||
key: str,
|
||||
value: str | bytes | int | float,
|
||||
ex: int | None = None,
|
||||
) -> None:
|
||||
self._r.set(key, value, ex=ex)
|
||||
|
||||
def delete(self, key: str) -> None:
|
||||
self._r.delete(key)
|
||||
|
||||
def exists(self, key: str) -> bool:
|
||||
return bool(self._r.exists(key))
|
||||
|
||||
# -- TTL ---------------------------------------------------------------
|
||||
|
||||
def expire(self, key: str, seconds: int) -> None:
|
||||
self._r.expire(key, seconds)
|
||||
|
||||
def ttl(self, key: str) -> int:
|
||||
return cast(int, self._r.ttl(key))
|
||||
|
||||
# -- distributed lock --------------------------------------------------
|
||||
|
||||
def lock(self, name: str, timeout: float | None = None) -> CacheLock:
|
||||
return RedisCacheLock(self._r.lock(name, timeout=timeout))
|
||||
|
||||
# -- blocking list (MCP OAuth BLPOP pattern) ---------------------------
|
||||
|
||||
def rpush(self, key: str, value: str | bytes) -> None:
|
||||
self._r.rpush(key, value)
|
||||
|
||||
def blpop(self, keys: list[str], timeout: int = 0) -> tuple[bytes, bytes] | None:
|
||||
result = cast(list[bytes] | None, self._r.blpop(keys, timeout=timeout))
|
||||
if result is None:
|
||||
return None
|
||||
return (result[0], result[1])
|
||||
@@ -6,6 +6,7 @@ from datetime import timezone
|
||||
from typing import cast
|
||||
|
||||
from onyx.auth.schemas import AuthBackend
|
||||
from onyx.cache.interface import CacheBackendType
|
||||
from onyx.configs.constants import AuthType
|
||||
from onyx.configs.constants import QueryHistoryType
|
||||
from onyx.file_processing.enums import HtmlBasedConnectorTransformLinksStrategy
|
||||
@@ -54,6 +55,12 @@ DISABLE_USER_KNOWLEDGE = os.environ.get("DISABLE_USER_KNOWLEDGE", "").lower() ==
|
||||
# are disabled but core chat, tools, user file uploads, and Projects still work.
|
||||
DISABLE_VECTOR_DB = os.environ.get("DISABLE_VECTOR_DB", "").lower() == "true"
|
||||
|
||||
# Which backend to use for caching, locks, and ephemeral state.
|
||||
# "redis" (default) or "postgres" (only valid when DISABLE_VECTOR_DB=true).
|
||||
CACHE_BACKEND = CacheBackendType(
|
||||
os.environ.get("CACHE_BACKEND", CacheBackendType.REDIS)
|
||||
)
|
||||
|
||||
# Maximum token count for a single uploaded file. Files exceeding this are rejected.
|
||||
# Defaults to 100k tokens (or 10M when vector DB is disabled).
|
||||
_DEFAULT_FILE_TOKEN_LIMIT = 10_000_000 if DISABLE_VECTOR_DB else 100_000
|
||||
|
||||
@@ -32,8 +32,6 @@ class GongConnector(LoadConnector, PollConnector):
|
||||
BASE_URL = "https://api.gong.io"
|
||||
MAX_CALL_DETAILS_ATTEMPTS = 6
|
||||
CALL_DETAILS_DELAY = 30 # in seconds
|
||||
# Gong API limit is 3 calls/sec — stay safely under it
|
||||
MIN_REQUEST_INTERVAL = 0.5 # seconds between requests
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -47,13 +45,9 @@ class GongConnector(LoadConnector, PollConnector):
|
||||
self.continue_on_fail = continue_on_fail
|
||||
self.auth_token_basic: str | None = None
|
||||
self.hide_user_info = hide_user_info
|
||||
self._last_request_time: float = 0.0
|
||||
|
||||
# urllib3 Retry already respects the Retry-After header by default
|
||||
# (respect_retry_after_header=True), so on 429 it will sleep for the
|
||||
# duration Gong specifies before retrying.
|
||||
retry_strategy = Retry(
|
||||
total=10,
|
||||
total=5,
|
||||
backoff_factor=2,
|
||||
status_forcelist=[429, 500, 502, 503, 504],
|
||||
)
|
||||
@@ -67,24 +61,8 @@ class GongConnector(LoadConnector, PollConnector):
|
||||
url = f"{GongConnector.BASE_URL}{endpoint}"
|
||||
return url
|
||||
|
||||
def _throttled_request(
|
||||
self, method: str, url: str, **kwargs: Any
|
||||
) -> requests.Response:
|
||||
"""Rate-limited request wrapper. Enforces MIN_REQUEST_INTERVAL between
|
||||
calls to stay under Gong's 3 calls/sec limit and avoid triggering 429s."""
|
||||
now = time.monotonic()
|
||||
elapsed = now - self._last_request_time
|
||||
if elapsed < self.MIN_REQUEST_INTERVAL:
|
||||
time.sleep(self.MIN_REQUEST_INTERVAL - elapsed)
|
||||
|
||||
response = self._session.request(method, url, **kwargs)
|
||||
self._last_request_time = time.monotonic()
|
||||
return response
|
||||
|
||||
def _get_workspace_id_map(self) -> dict[str, str]:
|
||||
response = self._throttled_request(
|
||||
"GET", GongConnector.make_url("/v2/workspaces")
|
||||
)
|
||||
response = self._session.get(GongConnector.make_url("/v2/workspaces"))
|
||||
response.raise_for_status()
|
||||
|
||||
workspaces_details = response.json().get("workspaces")
|
||||
@@ -128,8 +106,8 @@ class GongConnector(LoadConnector, PollConnector):
|
||||
del body["filter"]["workspaceId"]
|
||||
|
||||
while True:
|
||||
response = self._throttled_request(
|
||||
"POST", GongConnector.make_url("/v2/calls/transcript"), json=body
|
||||
response = self._session.post(
|
||||
GongConnector.make_url("/v2/calls/transcript"), json=body
|
||||
)
|
||||
# If no calls in the range, just break out
|
||||
if response.status_code == 404:
|
||||
@@ -164,8 +142,8 @@ class GongConnector(LoadConnector, PollConnector):
|
||||
"contentSelector": {"exposedFields": {"parties": True}},
|
||||
}
|
||||
|
||||
response = self._throttled_request(
|
||||
"POST", GongConnector.make_url("/v2/calls/extensive"), json=body
|
||||
response = self._session.post(
|
||||
GongConnector.make_url("/v2/calls/extensive"), json=body
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
@@ -216,8 +194,7 @@ class GongConnector(LoadConnector, PollConnector):
|
||||
# There's a likely race condition in the API where a transcript will have a
|
||||
# call id but the call to v2/calls/extensive will not return all of the id's
|
||||
# retry with exponential backoff has been observed to mitigate this
|
||||
# in ~2 minutes. After max attempts, proceed with whatever we have —
|
||||
# the per-call loop below will skip missing IDs gracefully.
|
||||
# in ~2 minutes
|
||||
current_attempt = 0
|
||||
while True:
|
||||
current_attempt += 1
|
||||
@@ -236,14 +213,11 @@ class GongConnector(LoadConnector, PollConnector):
|
||||
f"missing_call_ids={missing_call_ids}"
|
||||
)
|
||||
if current_attempt >= self.MAX_CALL_DETAILS_ATTEMPTS:
|
||||
logger.error(
|
||||
f"Giving up on missing call id's after "
|
||||
f"{self.MAX_CALL_DETAILS_ATTEMPTS} attempts: "
|
||||
f"missing_call_ids={missing_call_ids} — "
|
||||
f"proceeding with {len(call_details_map)} of "
|
||||
f"{len(transcript_call_ids)} calls"
|
||||
raise RuntimeError(
|
||||
f"Attempt count exceeded for _get_call_details_by_ids: "
|
||||
f"missing_call_ids={missing_call_ids} "
|
||||
f"max_attempts={self.MAX_CALL_DETAILS_ATTEMPTS}"
|
||||
)
|
||||
break
|
||||
|
||||
wait_seconds = self.CALL_DETAILS_DELAY * pow(2, current_attempt - 1)
|
||||
logger.warning(
|
||||
|
||||
@@ -202,6 +202,7 @@ def create_default_image_gen_config_from_api_key(
|
||||
api_key=api_key,
|
||||
api_base=None,
|
||||
api_version=None,
|
||||
default_model_name=model_name,
|
||||
deployment_name=None,
|
||||
is_public=True,
|
||||
)
|
||||
|
||||
@@ -213,29 +213,11 @@ def upsert_llm_provider(
|
||||
llm_provider_upsert_request: LLMProviderUpsertRequest,
|
||||
db_session: Session,
|
||||
) -> LLMProviderView:
|
||||
existing_llm_provider: LLMProviderModel | None = None
|
||||
if llm_provider_upsert_request.id:
|
||||
existing_llm_provider = fetch_existing_llm_provider_by_id(
|
||||
id=llm_provider_upsert_request.id, db_session=db_session
|
||||
)
|
||||
if not existing_llm_provider:
|
||||
raise ValueError(
|
||||
f"LLM provider with id {llm_provider_upsert_request.id} not found"
|
||||
)
|
||||
existing_llm_provider = fetch_existing_llm_provider(
|
||||
name=llm_provider_upsert_request.name, db_session=db_session
|
||||
)
|
||||
|
||||
if existing_llm_provider.name != llm_provider_upsert_request.name:
|
||||
raise ValueError(
|
||||
f"LLM provider with id {llm_provider_upsert_request.id} name change not allowed"
|
||||
)
|
||||
else:
|
||||
existing_llm_provider = fetch_existing_llm_provider(
|
||||
name=llm_provider_upsert_request.name, db_session=db_session
|
||||
)
|
||||
if existing_llm_provider:
|
||||
raise ValueError(
|
||||
f"LLM provider with name '{llm_provider_upsert_request.name}'"
|
||||
" already exists"
|
||||
)
|
||||
if not existing_llm_provider:
|
||||
existing_llm_provider = LLMProviderModel(name=llm_provider_upsert_request.name)
|
||||
db_session.add(existing_llm_provider)
|
||||
|
||||
@@ -256,7 +238,11 @@ def upsert_llm_provider(
|
||||
existing_llm_provider.api_base = api_base
|
||||
existing_llm_provider.api_version = llm_provider_upsert_request.api_version
|
||||
existing_llm_provider.custom_config = custom_config
|
||||
|
||||
# TODO: Remove default model name on api change
|
||||
# Needed due to /provider/{id}/default endpoint not disclosing the default model name
|
||||
existing_llm_provider.default_model_name = (
|
||||
llm_provider_upsert_request.default_model_name
|
||||
)
|
||||
existing_llm_provider.is_public = llm_provider_upsert_request.is_public
|
||||
existing_llm_provider.is_auto_mode = llm_provider_upsert_request.is_auto_mode
|
||||
existing_llm_provider.deployment_name = llm_provider_upsert_request.deployment_name
|
||||
@@ -320,6 +306,15 @@ def upsert_llm_provider(
|
||||
display_name=model_config.display_name,
|
||||
)
|
||||
|
||||
default_model = fetch_default_model(db_session, LLMModelFlowType.CHAT)
|
||||
if default_model and default_model.llm_provider_id == existing_llm_provider.id:
|
||||
_update_default_model(
|
||||
db_session=db_session,
|
||||
provider_id=existing_llm_provider.id,
|
||||
model=existing_llm_provider.default_model_name,
|
||||
flow_type=LLMModelFlowType.CHAT,
|
||||
)
|
||||
|
||||
# Make sure the relationship table stays up to date
|
||||
update_group_llm_provider_relationships__no_commit(
|
||||
llm_provider_id=existing_llm_provider.id,
|
||||
@@ -493,22 +488,6 @@ def fetch_existing_llm_provider(
|
||||
return provider_model
|
||||
|
||||
|
||||
def fetch_existing_llm_provider_by_id(
|
||||
id: int, db_session: Session
|
||||
) -> LLMProviderModel | None:
|
||||
provider_model = db_session.scalar(
|
||||
select(LLMProviderModel)
|
||||
.where(LLMProviderModel.id == id)
|
||||
.options(
|
||||
selectinload(LLMProviderModel.model_configurations),
|
||||
selectinload(LLMProviderModel.groups),
|
||||
selectinload(LLMProviderModel.personas),
|
||||
)
|
||||
)
|
||||
|
||||
return provider_model
|
||||
|
||||
|
||||
def fetch_embedding_provider(
|
||||
db_session: Session, provider_type: EmbeddingProvider
|
||||
) -> CloudEmbeddingProviderModel | None:
|
||||
@@ -625,13 +604,22 @@ def remove_llm_provider__no_commit(db_session: Session, provider_id: int) -> Non
|
||||
db_session.flush()
|
||||
|
||||
|
||||
def update_default_provider(
|
||||
provider_id: int, model_name: str, db_session: Session
|
||||
) -> None:
|
||||
def update_default_provider(provider_id: int, db_session: Session) -> None:
|
||||
# Attempt to get the default_model_name from the provider first
|
||||
# TODO: Remove default_model_name check
|
||||
provider = db_session.scalar(
|
||||
select(LLMProviderModel).where(
|
||||
LLMProviderModel.id == provider_id,
|
||||
)
|
||||
)
|
||||
|
||||
if provider is None:
|
||||
raise ValueError(f"LLM Provider with id={provider_id} does not exist")
|
||||
|
||||
_update_default_model(
|
||||
db_session,
|
||||
provider_id,
|
||||
model_name,
|
||||
provider.default_model_name, # type: ignore[arg-type]
|
||||
LLMModelFlowType.CHAT,
|
||||
)
|
||||
|
||||
@@ -817,6 +805,12 @@ def sync_auto_mode_models(
|
||||
)
|
||||
changes += 1
|
||||
|
||||
# In Auto mode, default model is always set from GitHub config
|
||||
default_model = llm_recommendations.get_default_model(provider.provider)
|
||||
if default_model and provider.default_model_name != default_model.name:
|
||||
provider.default_model_name = default_model.name
|
||||
changes += 1
|
||||
|
||||
db_session.commit()
|
||||
return changes
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import datetime
|
||||
import uuid
|
||||
from typing import List
|
||||
from typing import TYPE_CHECKING
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import HTTPException
|
||||
@@ -10,7 +11,10 @@ from pydantic import ConfigDict
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.background.celery.versioned_apps.client import app as client_app
|
||||
if TYPE_CHECKING:
|
||||
from starlette.background import BackgroundTasks
|
||||
|
||||
from onyx.configs.app_configs import DISABLE_VECTOR_DB
|
||||
from onyx.configs.constants import FileOrigin
|
||||
from onyx.configs.constants import OnyxCeleryPriority
|
||||
from onyx.configs.constants import OnyxCeleryQueues
|
||||
@@ -105,8 +109,8 @@ def upload_files_to_user_files_with_indexing(
|
||||
user: User,
|
||||
temp_id_map: dict[str, str] | None,
|
||||
db_session: Session,
|
||||
background_tasks: BackgroundTasks | None = None,
|
||||
) -> CategorizedFilesResult:
|
||||
# Validate project ownership if a project_id is provided
|
||||
if project_id is not None and user is not None:
|
||||
if not check_project_ownership(project_id, user.id, db_session):
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
@@ -127,16 +131,27 @@ def upload_files_to_user_files_with_indexing(
|
||||
logger.warning(
|
||||
f"File {rejected_file.filename} rejected for {rejected_file.reason}"
|
||||
)
|
||||
for user_file in user_files:
|
||||
task = client_app.send_task(
|
||||
OnyxCeleryTask.PROCESS_SINGLE_USER_FILE,
|
||||
kwargs={"user_file_id": user_file.id, "tenant_id": tenant_id},
|
||||
queue=OnyxCeleryQueues.USER_FILE_PROCESSING,
|
||||
priority=OnyxCeleryPriority.HIGH,
|
||||
)
|
||||
logger.info(
|
||||
f"Triggered indexing for user_file_id={user_file.id} with task_id={task.id}"
|
||||
)
|
||||
|
||||
if DISABLE_VECTOR_DB and background_tasks is not None:
|
||||
from onyx.background.task_utils import drain_processing_loop
|
||||
|
||||
background_tasks.add_task(drain_processing_loop, tenant_id)
|
||||
for user_file in user_files:
|
||||
logger.info(f"Queued in-process processing for user_file_id={user_file.id}")
|
||||
else:
|
||||
from onyx.background.celery.versioned_apps.client import app as client_app
|
||||
|
||||
for user_file in user_files:
|
||||
task = client_app.send_task(
|
||||
OnyxCeleryTask.PROCESS_SINGLE_USER_FILE,
|
||||
kwargs={"user_file_id": user_file.id, "tenant_id": tenant_id},
|
||||
queue=OnyxCeleryQueues.USER_FILE_PROCESSING,
|
||||
priority=OnyxCeleryPriority.HIGH,
|
||||
)
|
||||
logger.info(
|
||||
f"Triggered indexing for user_file_id={user_file.id} "
|
||||
f"with task_id={task.id}"
|
||||
)
|
||||
|
||||
return CategorizedFilesResult(
|
||||
user_files=user_files,
|
||||
|
||||
@@ -32,11 +32,14 @@ from onyx.auth.schemas import UserUpdate
|
||||
from onyx.auth.users import auth_backend
|
||||
from onyx.auth.users import create_onyx_oauth_router
|
||||
from onyx.auth.users import fastapi_users
|
||||
from onyx.cache.interface import CacheBackendType
|
||||
from onyx.configs.app_configs import APP_API_PREFIX
|
||||
from onyx.configs.app_configs import APP_HOST
|
||||
from onyx.configs.app_configs import APP_PORT
|
||||
from onyx.configs.app_configs import AUTH_RATE_LIMITING_ENABLED
|
||||
from onyx.configs.app_configs import AUTH_TYPE
|
||||
from onyx.configs.app_configs import CACHE_BACKEND
|
||||
from onyx.configs.app_configs import DISABLE_VECTOR_DB
|
||||
from onyx.configs.app_configs import LOG_ENDPOINT_LATENCY
|
||||
from onyx.configs.app_configs import OAUTH_CLIENT_ID
|
||||
from onyx.configs.app_configs import OAUTH_CLIENT_SECRET
|
||||
@@ -254,8 +257,53 @@ def include_auth_router_with_prefix(
|
||||
)
|
||||
|
||||
|
||||
def validate_cache_backend_settings() -> None:
|
||||
"""Validate that CACHE_BACKEND=postgres is only used with DISABLE_VECTOR_DB.
|
||||
|
||||
The Postgres cache backend eliminates the Redis dependency, but only works
|
||||
when Celery is not running (which requires DISABLE_VECTOR_DB=true).
|
||||
"""
|
||||
if CACHE_BACKEND == CacheBackendType.POSTGRES and not DISABLE_VECTOR_DB:
|
||||
raise RuntimeError(
|
||||
"CACHE_BACKEND=postgres requires DISABLE_VECTOR_DB=true. "
|
||||
"The Postgres cache backend is only supported in no-vector-DB "
|
||||
"deployments where Celery is replaced by the in-process task runner."
|
||||
)
|
||||
|
||||
|
||||
def validate_no_vector_db_settings() -> None:
|
||||
"""Validate that DISABLE_VECTOR_DB is not combined with incompatible settings.
|
||||
|
||||
Raises RuntimeError if DISABLE_VECTOR_DB is set alongside MULTI_TENANT or ENABLE_CRAFT,
|
||||
since these modes require infrastructure that is removed in no-vector-DB deployments.
|
||||
"""
|
||||
if not DISABLE_VECTOR_DB:
|
||||
return
|
||||
|
||||
if MULTI_TENANT:
|
||||
raise RuntimeError(
|
||||
"DISABLE_VECTOR_DB cannot be used with MULTI_TENANT. "
|
||||
"Multi-tenant deployments require the vector database for "
|
||||
"per-tenant document indexing and search. Run in single-tenant "
|
||||
"mode when disabling the vector database."
|
||||
)
|
||||
|
||||
from onyx.server.features.build.configs import ENABLE_CRAFT
|
||||
|
||||
if ENABLE_CRAFT:
|
||||
raise RuntimeError(
|
||||
"DISABLE_VECTOR_DB cannot be used with ENABLE_CRAFT. "
|
||||
"Onyx Craft requires background workers for sandbox lifecycle "
|
||||
"management, which are removed in no-vector-DB deployments. "
|
||||
"Disable Craft (ENABLE_CRAFT=false) when disabling the vector database."
|
||||
)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: # noqa: ARG001
|
||||
validate_no_vector_db_settings()
|
||||
validate_cache_backend_settings()
|
||||
|
||||
# Set recursion limit
|
||||
if SYSTEM_RECURSION_LIMIT is not None:
|
||||
sys.setrecursionlimit(SYSTEM_RECURSION_LIMIT)
|
||||
@@ -324,8 +372,20 @@ async def lifespan(app: FastAPI) -> AsyncGenerator[None, None]: # noqa: ARG001
|
||||
if AUTH_RATE_LIMITING_ENABLED:
|
||||
await setup_auth_limiter()
|
||||
|
||||
if DISABLE_VECTOR_DB:
|
||||
from onyx.background.periodic_poller import recover_stuck_user_files
|
||||
from onyx.background.periodic_poller import start_periodic_poller
|
||||
|
||||
recover_stuck_user_files(POSTGRES_DEFAULT_SCHEMA)
|
||||
start_periodic_poller(POSTGRES_DEFAULT_SCHEMA)
|
||||
|
||||
yield
|
||||
|
||||
if DISABLE_VECTOR_DB:
|
||||
from onyx.background.periodic_poller import stop_periodic_poller
|
||||
|
||||
stop_periodic_poller()
|
||||
|
||||
SqlEngine.reset_engine()
|
||||
|
||||
if AUTH_RATE_LIMITING_ENABLED:
|
||||
|
||||
@@ -2,6 +2,7 @@ import json
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi import APIRouter
|
||||
from fastapi import BackgroundTasks
|
||||
from fastapi import Depends
|
||||
from fastapi import File
|
||||
from fastapi import Form
|
||||
@@ -12,13 +13,7 @@ from pydantic import BaseModel
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.auth.users import current_user
|
||||
from onyx.background.celery.tasks.user_file_processing.tasks import (
|
||||
enqueue_user_file_project_sync_task,
|
||||
)
|
||||
from onyx.background.celery.tasks.user_file_processing.tasks import (
|
||||
get_user_file_project_sync_queue_depth,
|
||||
)
|
||||
from onyx.background.celery.versioned_apps.client import app as client_app
|
||||
from onyx.configs.app_configs import DISABLE_VECTOR_DB
|
||||
from onyx.configs.constants import OnyxCeleryPriority
|
||||
from onyx.configs.constants import OnyxCeleryQueues
|
||||
from onyx.configs.constants import OnyxCeleryTask
|
||||
@@ -34,7 +29,6 @@ from onyx.db.models import UserProject
|
||||
from onyx.db.persona import get_personas_by_ids
|
||||
from onyx.db.projects import get_project_token_count
|
||||
from onyx.db.projects import upload_files_to_user_files_with_indexing
|
||||
from onyx.redis.redis_pool import get_redis_client
|
||||
from onyx.server.features.projects.models import CategorizedFilesSnapshot
|
||||
from onyx.server.features.projects.models import ChatSessionRequest
|
||||
from onyx.server.features.projects.models import TokenCountResponse
|
||||
@@ -55,7 +49,27 @@ class UserFileDeleteResult(BaseModel):
|
||||
assistant_names: list[str] = []
|
||||
|
||||
|
||||
def _trigger_user_file_project_sync(user_file_id: UUID, tenant_id: str) -> None:
|
||||
def _trigger_user_file_project_sync(
|
||||
user_file_id: UUID,
|
||||
tenant_id: str,
|
||||
background_tasks: BackgroundTasks | None = None,
|
||||
) -> None:
|
||||
if DISABLE_VECTOR_DB and background_tasks is not None:
|
||||
from onyx.background.task_utils import drain_project_sync_loop
|
||||
|
||||
background_tasks.add_task(drain_project_sync_loop, tenant_id)
|
||||
logger.info(f"Queued in-process project sync for user_file_id={user_file_id}")
|
||||
return
|
||||
|
||||
from onyx.background.celery.tasks.user_file_processing.tasks import (
|
||||
enqueue_user_file_project_sync_task,
|
||||
)
|
||||
from onyx.background.celery.tasks.user_file_processing.tasks import (
|
||||
get_user_file_project_sync_queue_depth,
|
||||
)
|
||||
from onyx.background.celery.versioned_apps.client import app as client_app
|
||||
from onyx.redis.redis_pool import get_redis_client
|
||||
|
||||
queue_depth = get_user_file_project_sync_queue_depth(client_app)
|
||||
if queue_depth > USER_FILE_PROJECT_SYNC_MAX_QUEUE_DEPTH:
|
||||
logger.warning(
|
||||
@@ -111,6 +125,7 @@ def create_project(
|
||||
|
||||
@router.post("/file/upload", tags=PUBLIC_API_TAGS)
|
||||
def upload_user_files(
|
||||
bg_tasks: BackgroundTasks,
|
||||
files: list[UploadFile] = File(...),
|
||||
project_id: int | None = Form(None),
|
||||
temp_id_map: str | None = Form(None), # JSON string mapping hashed key -> temp_id
|
||||
@@ -137,12 +152,12 @@ def upload_user_files(
|
||||
user=user,
|
||||
temp_id_map=parsed_temp_id_map,
|
||||
db_session=db_session,
|
||||
background_tasks=bg_tasks if DISABLE_VECTOR_DB else None,
|
||||
)
|
||||
|
||||
return CategorizedFilesSnapshot.from_result(categorized_files_result)
|
||||
|
||||
except Exception as e:
|
||||
# Log error with type, message, and stack for easier debugging
|
||||
logger.exception(f"Error uploading files - {type(e).__name__}: {str(e)}")
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
@@ -192,6 +207,7 @@ def get_files_in_project(
|
||||
def unlink_user_file_from_project(
|
||||
project_id: int,
|
||||
file_id: UUID,
|
||||
bg_tasks: BackgroundTasks,
|
||||
user: User = Depends(current_user),
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> Response:
|
||||
@@ -208,7 +224,6 @@ def unlink_user_file_from_project(
|
||||
if project is None:
|
||||
raise HTTPException(status_code=404, detail="Project not found")
|
||||
|
||||
user_id = user.id
|
||||
user_file = (
|
||||
db_session.query(UserFile)
|
||||
.filter(UserFile.id == file_id, UserFile.user_id == user_id)
|
||||
@@ -224,7 +239,7 @@ def unlink_user_file_from_project(
|
||||
db_session.commit()
|
||||
|
||||
tenant_id = get_current_tenant_id()
|
||||
_trigger_user_file_project_sync(user_file.id, tenant_id)
|
||||
_trigger_user_file_project_sync(user_file.id, tenant_id, bg_tasks)
|
||||
|
||||
return Response(status_code=204)
|
||||
|
||||
@@ -237,6 +252,7 @@ def unlink_user_file_from_project(
|
||||
def link_user_file_to_project(
|
||||
project_id: int,
|
||||
file_id: UUID,
|
||||
bg_tasks: BackgroundTasks,
|
||||
user: User = Depends(current_user),
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> UserFileSnapshot:
|
||||
@@ -268,7 +284,7 @@ def link_user_file_to_project(
|
||||
db_session.commit()
|
||||
|
||||
tenant_id = get_current_tenant_id()
|
||||
_trigger_user_file_project_sync(user_file.id, tenant_id)
|
||||
_trigger_user_file_project_sync(user_file.id, tenant_id, bg_tasks)
|
||||
|
||||
return UserFileSnapshot.from_model(user_file)
|
||||
|
||||
@@ -426,6 +442,7 @@ def delete_project(
|
||||
@router.delete("/file/{file_id}", tags=PUBLIC_API_TAGS)
|
||||
def delete_user_file(
|
||||
file_id: UUID,
|
||||
bg_tasks: BackgroundTasks,
|
||||
user: User = Depends(current_user),
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> UserFileDeleteResult:
|
||||
@@ -458,15 +475,25 @@ def delete_user_file(
|
||||
db_session.commit()
|
||||
|
||||
tenant_id = get_current_tenant_id()
|
||||
task = client_app.send_task(
|
||||
OnyxCeleryTask.DELETE_SINGLE_USER_FILE,
|
||||
kwargs={"user_file_id": str(user_file.id), "tenant_id": tenant_id},
|
||||
queue=OnyxCeleryQueues.USER_FILE_DELETE,
|
||||
priority=OnyxCeleryPriority.HIGH,
|
||||
)
|
||||
logger.info(
|
||||
f"Triggered delete for user_file_id={user_file.id} with task_id={task.id}"
|
||||
)
|
||||
if DISABLE_VECTOR_DB:
|
||||
from onyx.background.task_utils import drain_delete_loop
|
||||
|
||||
bg_tasks.add_task(drain_delete_loop, tenant_id)
|
||||
logger.info(f"Queued in-process delete for user_file_id={user_file.id}")
|
||||
else:
|
||||
from onyx.background.celery.versioned_apps.client import app as client_app
|
||||
|
||||
task = client_app.send_task(
|
||||
OnyxCeleryTask.DELETE_SINGLE_USER_FILE,
|
||||
kwargs={"user_file_id": str(user_file.id), "tenant_id": tenant_id},
|
||||
queue=OnyxCeleryQueues.USER_FILE_DELETE,
|
||||
priority=OnyxCeleryPriority.HIGH,
|
||||
)
|
||||
logger.info(
|
||||
f"Triggered delete for user_file_id={user_file.id} "
|
||||
f"with task_id={task.id}"
|
||||
)
|
||||
|
||||
return UserFileDeleteResult(
|
||||
has_associations=False, project_names=[], assistant_names=[]
|
||||
)
|
||||
|
||||
@@ -97,6 +97,7 @@ def _build_llm_provider_request(
|
||||
), # Only this from source
|
||||
api_base=api_base, # From request
|
||||
api_version=api_version, # From request
|
||||
default_model_name=model_name,
|
||||
deployment_name=deployment_name, # From request
|
||||
is_public=True,
|
||||
groups=[],
|
||||
@@ -135,6 +136,7 @@ def _build_llm_provider_request(
|
||||
api_key=api_key,
|
||||
api_base=api_base,
|
||||
api_version=api_version,
|
||||
default_model_name=model_name,
|
||||
deployment_name=deployment_name,
|
||||
is_public=True,
|
||||
groups=[],
|
||||
@@ -166,6 +168,7 @@ def _create_image_gen_llm_provider__no_commit(
|
||||
api_key=provider_request.api_key,
|
||||
api_base=provider_request.api_base,
|
||||
api_version=provider_request.api_version,
|
||||
default_model_name=provider_request.default_model_name,
|
||||
deployment_name=provider_request.deployment_name,
|
||||
is_public=provider_request.is_public,
|
||||
custom_config=provider_request.custom_config,
|
||||
|
||||
@@ -22,10 +22,7 @@ from onyx.auth.users import current_chat_accessible_user
|
||||
from onyx.db.engine.sql_engine import get_session
|
||||
from onyx.db.enums import LLMModelFlowType
|
||||
from onyx.db.llm import can_user_access_llm_provider
|
||||
from onyx.db.llm import fetch_default_llm_model
|
||||
from onyx.db.llm import fetch_default_vision_model
|
||||
from onyx.db.llm import fetch_existing_llm_provider
|
||||
from onyx.db.llm import fetch_existing_llm_provider_by_id
|
||||
from onyx.db.llm import fetch_existing_llm_providers
|
||||
from onyx.db.llm import fetch_existing_models
|
||||
from onyx.db.llm import fetch_persona_with_groups
|
||||
@@ -55,12 +52,11 @@ from onyx.llm.well_known_providers.llm_provider_options import (
|
||||
)
|
||||
from onyx.server.manage.llm.models import BedrockFinalModelResponse
|
||||
from onyx.server.manage.llm.models import BedrockModelsRequest
|
||||
from onyx.server.manage.llm.models import DefaultModel
|
||||
from onyx.server.manage.llm.models import LLMCost
|
||||
from onyx.server.manage.llm.models import LLMProviderDescriptor
|
||||
from onyx.server.manage.llm.models import LLMProviderResponse
|
||||
from onyx.server.manage.llm.models import LLMProviderUpsertRequest
|
||||
from onyx.server.manage.llm.models import LLMProviderView
|
||||
from onyx.server.manage.llm.models import ModelConfigurationUpsertRequest
|
||||
from onyx.server.manage.llm.models import OllamaFinalModelResponse
|
||||
from onyx.server.manage.llm.models import OllamaModelDetails
|
||||
from onyx.server.manage.llm.models import OllamaModelsRequest
|
||||
@@ -237,9 +233,12 @@ def test_llm_configuration(
|
||||
|
||||
test_api_key = test_llm_request.api_key
|
||||
test_custom_config = test_llm_request.custom_config
|
||||
if test_llm_request.id:
|
||||
existing_provider = fetch_existing_llm_provider_by_id(
|
||||
id=test_llm_request.id, db_session=db_session
|
||||
if test_llm_request.name:
|
||||
# NOTE: we are querying by name. we probably should be querying by an invariant id, but
|
||||
# as it turns out the name is not editable in the UI and other code also keys off name,
|
||||
# so we won't rock the boat just yet.
|
||||
existing_provider = fetch_existing_llm_provider(
|
||||
name=test_llm_request.name, db_session=db_session
|
||||
)
|
||||
if existing_provider:
|
||||
test_custom_config = _restore_masked_custom_config_values(
|
||||
@@ -269,7 +268,7 @@ def test_llm_configuration(
|
||||
|
||||
llm = get_llm(
|
||||
provider=test_llm_request.provider,
|
||||
model=test_llm_request.model,
|
||||
model=test_llm_request.default_model_name,
|
||||
api_key=test_api_key,
|
||||
api_base=test_llm_request.api_base,
|
||||
api_version=test_llm_request.api_version,
|
||||
@@ -304,7 +303,7 @@ def list_llm_providers(
|
||||
include_image_gen: bool = Query(False),
|
||||
_: User = Depends(current_admin_user),
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> LLMProviderResponse[LLMProviderView]:
|
||||
) -> list[LLMProviderView]:
|
||||
start_time = datetime.now(timezone.utc)
|
||||
logger.debug("Starting to fetch LLM providers")
|
||||
|
||||
@@ -329,15 +328,7 @@ def list_llm_providers(
|
||||
duration = (end_time - start_time).total_seconds()
|
||||
logger.debug(f"Completed fetching LLM providers in {duration:.2f} seconds")
|
||||
|
||||
return LLMProviderResponse[LLMProviderView].from_models(
|
||||
providers=llm_provider_list,
|
||||
default_text=DefaultModel.from_model_config(
|
||||
fetch_default_llm_model(db_session)
|
||||
),
|
||||
default_vision=DefaultModel.from_model_config(
|
||||
fetch_default_vision_model(db_session)
|
||||
),
|
||||
)
|
||||
return llm_provider_list
|
||||
|
||||
|
||||
@admin_router.put("/provider")
|
||||
@@ -353,44 +344,18 @@ def put_llm_provider(
|
||||
# validate request (e.g. if we're intending to create but the name already exists we should throw an error)
|
||||
# NOTE: may involve duplicate fetching to Postgres, but we're assuming SQLAlchemy is smart enough to cache
|
||||
# the result
|
||||
existing_provider = None
|
||||
if llm_provider_upsert_request.id:
|
||||
existing_provider = fetch_existing_llm_provider_by_id(
|
||||
id=llm_provider_upsert_request.id, db_session=db_session
|
||||
)
|
||||
|
||||
# Check name constraints
|
||||
# TODO: Once port from name to id is complete, unique name will no longer be required
|
||||
if existing_provider and llm_provider_upsert_request.name != existing_provider.name:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Renaming providers is not currently supported",
|
||||
)
|
||||
|
||||
found_provider = fetch_existing_llm_provider(
|
||||
existing_provider = fetch_existing_llm_provider(
|
||||
name=llm_provider_upsert_request.name, db_session=db_session
|
||||
)
|
||||
if found_provider is not None and found_provider is not existing_provider:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Provider with name={llm_provider_upsert_request.name} already exists",
|
||||
)
|
||||
|
||||
if existing_provider and is_creation:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=(
|
||||
f"LLM Provider with name {llm_provider_upsert_request.name} and "
|
||||
f"id={llm_provider_upsert_request.id} already exists"
|
||||
),
|
||||
detail=f"LLM Provider with name {llm_provider_upsert_request.name} already exists",
|
||||
)
|
||||
elif not existing_provider and not is_creation:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=(
|
||||
f"LLM Provider with name {llm_provider_upsert_request.name} and "
|
||||
f"id={llm_provider_upsert_request.id} does not exist"
|
||||
),
|
||||
detail=f"LLM Provider with name {llm_provider_upsert_request.name} does not exist",
|
||||
)
|
||||
|
||||
# SSRF Protection: Validate api_base and custom_config match stored values
|
||||
@@ -428,6 +393,22 @@ def put_llm_provider(
|
||||
deduplicated_personas.append(persona_id)
|
||||
llm_provider_upsert_request.personas = deduplicated_personas
|
||||
|
||||
default_model_found = False
|
||||
|
||||
for model_configuration in llm_provider_upsert_request.model_configurations:
|
||||
if model_configuration.name == llm_provider_upsert_request.default_model_name:
|
||||
model_configuration.is_visible = True
|
||||
default_model_found = True
|
||||
|
||||
# TODO: Remove this logic on api change
|
||||
# Believed to be a dead pathway but we want to be safe for now
|
||||
if not default_model_found:
|
||||
llm_provider_upsert_request.model_configurations.append(
|
||||
ModelConfigurationUpsertRequest(
|
||||
name=llm_provider_upsert_request.default_model_name, is_visible=True
|
||||
)
|
||||
)
|
||||
|
||||
# the llm api key is sanitized when returned to clients, so the only time we
|
||||
# should get a real key is when it is explicitly changed
|
||||
if existing_provider and not llm_provider_upsert_request.api_key_changed:
|
||||
@@ -457,8 +438,8 @@ def put_llm_provider(
|
||||
config = fetch_llm_recommendations_from_github()
|
||||
if config and llm_provider_upsert_request.provider in config.providers:
|
||||
# Refetch the provider to get the updated model
|
||||
updated_provider = fetch_existing_llm_provider_by_id(
|
||||
id=result.id, db_session=db_session
|
||||
updated_provider = fetch_existing_llm_provider(
|
||||
name=llm_provider_upsert_request.name, db_session=db_session
|
||||
)
|
||||
if updated_provider:
|
||||
sync_auto_mode_models(
|
||||
@@ -488,29 +469,28 @@ def delete_llm_provider(
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
|
||||
|
||||
@admin_router.post("/default")
|
||||
@admin_router.post("/provider/{provider_id}/default")
|
||||
def set_provider_as_default(
|
||||
default_model_request: DefaultModel,
|
||||
provider_id: int,
|
||||
_: User = Depends(current_admin_user),
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> None:
|
||||
update_default_provider(
|
||||
provider_id=default_model_request.provider_id,
|
||||
model_name=default_model_request.model_name,
|
||||
db_session=db_session,
|
||||
)
|
||||
update_default_provider(provider_id=provider_id, db_session=db_session)
|
||||
|
||||
|
||||
@admin_router.post("/default-vision")
|
||||
@admin_router.post("/provider/{provider_id}/default-vision")
|
||||
def set_provider_as_default_vision(
|
||||
default_model: DefaultModel,
|
||||
provider_id: int,
|
||||
vision_model: str | None = Query(
|
||||
None, description="The default vision model to use"
|
||||
),
|
||||
_: User = Depends(current_admin_user),
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> None:
|
||||
if vision_model is None:
|
||||
raise HTTPException(status_code=404, detail="Vision model not provided")
|
||||
update_default_vision_provider(
|
||||
provider_id=default_model.provider_id,
|
||||
vision_model=default_model.model_name,
|
||||
db_session=db_session,
|
||||
provider_id=provider_id, vision_model=vision_model, db_session=db_session
|
||||
)
|
||||
|
||||
|
||||
@@ -536,7 +516,7 @@ def get_auto_config(
|
||||
def get_vision_capable_providers(
|
||||
_: User = Depends(current_admin_user),
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> LLMProviderResponse[VisionProviderResponse]:
|
||||
) -> list[VisionProviderResponse]:
|
||||
"""Return a list of LLM providers and their models that support image input"""
|
||||
vision_models = fetch_existing_models(
|
||||
db_session=db_session, flow_types=[LLMModelFlowType.VISION]
|
||||
@@ -565,13 +545,7 @@ def get_vision_capable_providers(
|
||||
]
|
||||
|
||||
logger.debug(f"Found {len(vision_provider_response)} vision-capable providers")
|
||||
|
||||
return LLMProviderResponse[VisionProviderResponse].from_models(
|
||||
providers=vision_provider_response,
|
||||
default_vision=DefaultModel.from_model_config(
|
||||
fetch_default_vision_model(db_session)
|
||||
),
|
||||
)
|
||||
return vision_provider_response
|
||||
|
||||
|
||||
"""Endpoints for all"""
|
||||
@@ -581,7 +555,7 @@ def get_vision_capable_providers(
|
||||
def list_llm_provider_basics(
|
||||
user: User = Depends(current_chat_accessible_user),
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> LLMProviderResponse[LLMProviderDescriptor]:
|
||||
) -> list[LLMProviderDescriptor]:
|
||||
"""Get LLM providers accessible to the current user.
|
||||
|
||||
Returns:
|
||||
@@ -618,15 +592,7 @@ def list_llm_provider_basics(
|
||||
f"Completed fetching {len(accessible_providers)} user-accessible providers in {duration:.2f} seconds"
|
||||
)
|
||||
|
||||
return LLMProviderResponse[LLMProviderDescriptor].from_models(
|
||||
providers=accessible_providers,
|
||||
default_text=DefaultModel.from_model_config(
|
||||
fetch_default_llm_model(db_session)
|
||||
),
|
||||
default_vision=DefaultModel.from_model_config(
|
||||
fetch_default_vision_model(db_session)
|
||||
),
|
||||
)
|
||||
return accessible_providers
|
||||
|
||||
|
||||
def get_valid_model_names_for_persona(
|
||||
@@ -669,7 +635,7 @@ def list_llm_providers_for_persona(
|
||||
persona_id: int,
|
||||
user: User = Depends(current_chat_accessible_user),
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> LLMProviderResponse[LLMProviderDescriptor]:
|
||||
) -> list[LLMProviderDescriptor]:
|
||||
"""Get LLM providers for a specific persona.
|
||||
|
||||
Returns providers that the user can access when using this persona:
|
||||
@@ -716,51 +682,7 @@ def list_llm_providers_for_persona(
|
||||
f"Completed fetching {len(llm_provider_list)} LLM providers for persona {persona_id} in {duration:.2f} seconds"
|
||||
)
|
||||
|
||||
# Get the default model and vision model for the persona
|
||||
# TODO: Port persona's over to use ID
|
||||
persona_default_provider = persona.llm_model_provider_override
|
||||
persona_default_model = persona.llm_model_version_override
|
||||
|
||||
default_text_model = fetch_default_llm_model(db_session)
|
||||
default_vision_model = fetch_default_vision_model(db_session)
|
||||
|
||||
# Build default_text and default_vision using persona overrides when available,
|
||||
# falling back to the global defaults.
|
||||
default_text = DefaultModel.from_model_config(default_text_model)
|
||||
default_vision = DefaultModel.from_model_config(default_vision_model)
|
||||
|
||||
if persona_default_provider:
|
||||
provider = fetch_existing_llm_provider(persona_default_provider, db_session)
|
||||
if provider and can_user_access_llm_provider(
|
||||
provider, user_group_ids, persona, is_admin=is_admin
|
||||
):
|
||||
if persona_default_model:
|
||||
# Persona specifies both provider and model — use them directly
|
||||
default_text = DefaultModel(
|
||||
provider_id=provider.id,
|
||||
model_name=persona_default_model,
|
||||
)
|
||||
else:
|
||||
# Persona specifies only the provider — pick a visible (public) model,
|
||||
# falling back to any model on this provider
|
||||
visible_model = next(
|
||||
(mc for mc in provider.model_configurations if mc.is_visible),
|
||||
None,
|
||||
)
|
||||
fallback_model = visible_model or next(
|
||||
iter(provider.model_configurations), None
|
||||
)
|
||||
if fallback_model:
|
||||
default_text = DefaultModel(
|
||||
provider_id=provider.id,
|
||||
model_name=fallback_model.name,
|
||||
)
|
||||
|
||||
return LLMProviderResponse[LLMProviderDescriptor].from_models(
|
||||
providers=llm_provider_list,
|
||||
default_text=default_text,
|
||||
default_vision=default_vision,
|
||||
)
|
||||
return llm_provider_list
|
||||
|
||||
|
||||
@admin_router.get("/provider-contextual-cost")
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from typing import Generic
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
|
||||
from pydantic import BaseModel
|
||||
from pydantic import Field
|
||||
@@ -25,22 +21,50 @@ if TYPE_CHECKING:
|
||||
ModelConfiguration as ModelConfigurationModel,
|
||||
)
|
||||
|
||||
T = TypeVar("T", "LLMProviderDescriptor", "LLMProviderView", "VisionProviderResponse")
|
||||
|
||||
# TODO: Clear this up on api refactor
|
||||
# There is still logic that requires sending each providers default model name
|
||||
# There is no logic that requires sending the providers default vision model name
|
||||
# We only send for the one that is actually the default
|
||||
def get_default_llm_model_name(llm_provider_model: "LLMProviderModel") -> str:
|
||||
"""Find the default conversation model name for a provider.
|
||||
|
||||
Returns the model name if found, otherwise returns empty string.
|
||||
"""
|
||||
for model_config in llm_provider_model.model_configurations:
|
||||
for flow in model_config.llm_model_flows:
|
||||
if flow.is_default and flow.llm_model_flow_type == LLMModelFlowType.CHAT:
|
||||
return model_config.name
|
||||
return ""
|
||||
|
||||
|
||||
def get_default_vision_model_name(llm_provider_model: "LLMProviderModel") -> str | None:
|
||||
"""Find the default vision model name for a provider.
|
||||
|
||||
Returns the model name if found, otherwise returns None.
|
||||
"""
|
||||
for model_config in llm_provider_model.model_configurations:
|
||||
for flow in model_config.llm_model_flows:
|
||||
if flow.is_default and flow.llm_model_flow_type == LLMModelFlowType.VISION:
|
||||
return model_config.name
|
||||
return None
|
||||
|
||||
|
||||
class TestLLMRequest(BaseModel):
|
||||
# provider level
|
||||
id: int | None = None
|
||||
name: str | None = None
|
||||
provider: str
|
||||
model: str
|
||||
api_key: str | None = None
|
||||
api_base: str | None = None
|
||||
api_version: str | None = None
|
||||
custom_config: dict[str, str] | None = None
|
||||
|
||||
# model level
|
||||
default_model_name: str
|
||||
deployment_name: str | None = None
|
||||
|
||||
model_configurations: list["ModelConfigurationUpsertRequest"]
|
||||
|
||||
# if try and use the existing API/custom config key
|
||||
api_key_changed: bool
|
||||
custom_config_changed: bool
|
||||
@@ -56,10 +80,13 @@ class LLMProviderDescriptor(BaseModel):
|
||||
"""A descriptor for an LLM provider that can be safely viewed by
|
||||
non-admin users. Used when giving a list of available LLMs."""
|
||||
|
||||
id: int
|
||||
name: str
|
||||
provider: str
|
||||
provider_display_name: str # Human-friendly name like "Claude (Anthropic)"
|
||||
default_model_name: str
|
||||
is_default_provider: bool | None
|
||||
is_default_vision_provider: bool | None
|
||||
default_vision_model: str | None
|
||||
model_configurations: list["ModelConfigurationView"]
|
||||
|
||||
@classmethod
|
||||
@@ -72,12 +99,24 @@ class LLMProviderDescriptor(BaseModel):
|
||||
)
|
||||
|
||||
provider = llm_provider_model.provider
|
||||
default_model_name = get_default_llm_model_name(llm_provider_model)
|
||||
default_vision_model = get_default_vision_model_name(llm_provider_model)
|
||||
|
||||
is_default_provider = bool(default_model_name)
|
||||
is_default_vision_provider = default_vision_model is not None
|
||||
|
||||
default_model_name = (
|
||||
default_model_name or llm_provider_model.default_model_name or ""
|
||||
)
|
||||
|
||||
return cls(
|
||||
id=llm_provider_model.id,
|
||||
name=llm_provider_model.name,
|
||||
provider=provider,
|
||||
provider_display_name=get_provider_display_name(provider),
|
||||
default_model_name=default_model_name,
|
||||
is_default_provider=is_default_provider,
|
||||
is_default_vision_provider=is_default_vision_provider,
|
||||
default_vision_model=default_vision_model,
|
||||
model_configurations=filter_model_configurations(
|
||||
llm_provider_model.model_configurations, provider
|
||||
),
|
||||
@@ -91,17 +130,18 @@ class LLMProvider(BaseModel):
|
||||
api_base: str | None = None
|
||||
api_version: str | None = None
|
||||
custom_config: dict[str, str] | None = None
|
||||
default_model_name: str
|
||||
is_public: bool = True
|
||||
is_auto_mode: bool = False
|
||||
groups: list[int] = Field(default_factory=list)
|
||||
personas: list[int] = Field(default_factory=list)
|
||||
deployment_name: str | None = None
|
||||
default_vision_model: str | None = None
|
||||
|
||||
|
||||
class LLMProviderUpsertRequest(LLMProvider):
|
||||
# should only be used for a "custom" provider
|
||||
# for default providers, the built-in model names are used
|
||||
id: int | None = None
|
||||
api_key_changed: bool = False
|
||||
custom_config_changed: bool = False
|
||||
model_configurations: list["ModelConfigurationUpsertRequest"] = []
|
||||
@@ -117,6 +157,8 @@ class LLMProviderView(LLMProvider):
|
||||
"""Stripped down representation of LLMProvider for display / limited access info only"""
|
||||
|
||||
id: int
|
||||
is_default_provider: bool | None = None
|
||||
is_default_vision_provider: bool | None = None
|
||||
model_configurations: list["ModelConfigurationView"]
|
||||
|
||||
@classmethod
|
||||
@@ -138,6 +180,16 @@ class LLMProviderView(LLMProvider):
|
||||
|
||||
provider = llm_provider_model.provider
|
||||
|
||||
default_model_name = get_default_llm_model_name(llm_provider_model)
|
||||
default_vision_model = get_default_vision_model_name(llm_provider_model)
|
||||
|
||||
is_default_provider = bool(default_model_name)
|
||||
is_default_vision_provider = default_vision_model is not None
|
||||
|
||||
default_model_name = (
|
||||
default_model_name or llm_provider_model.default_model_name or ""
|
||||
)
|
||||
|
||||
return cls(
|
||||
id=llm_provider_model.id,
|
||||
name=llm_provider_model.name,
|
||||
@@ -150,6 +202,10 @@ class LLMProviderView(LLMProvider):
|
||||
api_base=llm_provider_model.api_base,
|
||||
api_version=llm_provider_model.api_version,
|
||||
custom_config=llm_provider_model.custom_config,
|
||||
default_model_name=default_model_name,
|
||||
is_default_provider=is_default_provider,
|
||||
is_default_vision_provider=is_default_vision_provider,
|
||||
default_vision_model=default_vision_model,
|
||||
is_public=llm_provider_model.is_public,
|
||||
is_auto_mode=llm_provider_model.is_auto_mode,
|
||||
groups=groups,
|
||||
@@ -369,38 +425,3 @@ class OpenRouterFinalModelResponse(BaseModel):
|
||||
int | None
|
||||
) # From OpenRouter API context_length (may be missing for some models)
|
||||
supports_image_input: bool
|
||||
|
||||
|
||||
class DefaultModel(BaseModel):
|
||||
provider_id: int
|
||||
model_name: str
|
||||
|
||||
@classmethod
|
||||
def from_model_config(
|
||||
cls, model_config: ModelConfigurationModel | None
|
||||
) -> DefaultModel | None:
|
||||
if not model_config:
|
||||
return None
|
||||
return cls(
|
||||
provider_id=model_config.llm_provider_id,
|
||||
model_name=model_config.name,
|
||||
)
|
||||
|
||||
|
||||
class LLMProviderResponse(BaseModel, Generic[T]):
|
||||
providers: list[T]
|
||||
default_text: DefaultModel | None = None
|
||||
default_vision: DefaultModel | None = None
|
||||
|
||||
@classmethod
|
||||
def from_models(
|
||||
cls,
|
||||
providers: list[T],
|
||||
default_text: DefaultModel | None = None,
|
||||
default_vision: DefaultModel | None = None,
|
||||
) -> LLMProviderResponse[T]:
|
||||
return cls(
|
||||
providers=providers,
|
||||
default_text=default_text,
|
||||
default_vision=default_vision,
|
||||
)
|
||||
|
||||
@@ -19,7 +19,6 @@ class ApplicationStatus(str, Enum):
|
||||
PAYMENT_REMINDER = "payment_reminder"
|
||||
GRACE_PERIOD = "grace_period"
|
||||
GATED_ACCESS = "gated_access"
|
||||
SEAT_LIMIT_EXCEEDED = "seat_limit_exceeded"
|
||||
|
||||
|
||||
class Notification(BaseModel):
|
||||
@@ -83,10 +82,6 @@ class Settings(BaseModel):
|
||||
# Default Assistant settings
|
||||
disable_default_assistant: bool | None = False
|
||||
|
||||
# Seat usage - populated by license enforcement when seat limit is exceeded
|
||||
seat_count: int | None = None
|
||||
used_seats: int | None = None
|
||||
|
||||
# OpenSearch migration
|
||||
opensearch_indexing_enabled: bool = False
|
||||
|
||||
|
||||
@@ -25,7 +25,6 @@ from onyx.db.enums import EmbeddingPrecision
|
||||
from onyx.db.index_attempt import cancel_indexing_attempts_past_model
|
||||
from onyx.db.index_attempt import expire_index_attempts
|
||||
from onyx.db.llm import fetch_default_llm_model
|
||||
from onyx.db.llm import fetch_existing_llm_provider
|
||||
from onyx.db.llm import update_default_provider
|
||||
from onyx.db.llm import upsert_llm_provider
|
||||
from onyx.db.search_settings import get_active_search_settings
|
||||
@@ -255,18 +254,14 @@ def setup_postgres(db_session: Session) -> None:
|
||||
logger.notice("Setting up default OpenAI LLM for dev.")
|
||||
|
||||
llm_model = GEN_AI_MODEL_VERSION or "gpt-4o-mini"
|
||||
provider_name = "DevEnvPresetOpenAI"
|
||||
existing = fetch_existing_llm_provider(
|
||||
name=provider_name, db_session=db_session
|
||||
)
|
||||
model_req = LLMProviderUpsertRequest(
|
||||
id=existing.id if existing else None,
|
||||
name=provider_name,
|
||||
name="DevEnvPresetOpenAI",
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key=GEN_AI_API_KEY,
|
||||
api_base=None,
|
||||
api_version=None,
|
||||
custom_config=None,
|
||||
default_model_name=llm_model,
|
||||
is_public=True,
|
||||
groups=[],
|
||||
model_configurations=[
|
||||
@@ -278,9 +273,7 @@ def setup_postgres(db_session: Session) -> None:
|
||||
new_llm_provider = upsert_llm_provider(
|
||||
llm_provider_upsert_request=model_req, db_session=db_session
|
||||
)
|
||||
update_default_provider(
|
||||
provider_id=new_llm_provider.id, model_name=llm_model, db_session=db_session
|
||||
)
|
||||
update_default_provider(provider_id=new_llm_provider.id, db_session=db_session)
|
||||
|
||||
|
||||
def update_default_multipass_indexing(db_session: Session) -> None:
|
||||
|
||||
@@ -257,7 +257,7 @@ exceptiongroup==1.3.0
|
||||
# via
|
||||
# braintrust
|
||||
# fastmcp
|
||||
fastapi==0.133.1
|
||||
fastapi==0.128.0
|
||||
# via
|
||||
# fastapi-limiter
|
||||
# fastapi-users
|
||||
@@ -1155,7 +1155,6 @@ typing-inspect==0.9.0
|
||||
# via dataclasses-json
|
||||
typing-inspection==0.4.2
|
||||
# via
|
||||
# fastapi
|
||||
# mcp
|
||||
# pydantic
|
||||
# pydantic-settings
|
||||
@@ -1217,7 +1216,7 @@ websockets==15.0.1
|
||||
# via
|
||||
# fastmcp
|
||||
# google-genai
|
||||
werkzeug==3.1.6
|
||||
werkzeug==3.1.5
|
||||
# via sendgrid
|
||||
wrapt==1.17.3
|
||||
# via
|
||||
|
||||
@@ -125,7 +125,7 @@ executing==2.2.1
|
||||
# via stack-data
|
||||
faker==40.1.2
|
||||
# via onyx
|
||||
fastapi==0.133.1
|
||||
fastapi==0.128.0
|
||||
# via
|
||||
# onyx
|
||||
# onyx-devtools
|
||||
@@ -619,7 +619,6 @@ typing-extensions==4.15.0
|
||||
# typing-inspection
|
||||
typing-inspection==0.4.2
|
||||
# via
|
||||
# fastapi
|
||||
# mcp
|
||||
# pydantic
|
||||
# pydantic-settings
|
||||
|
||||
@@ -90,7 +90,7 @@ docstring-parser==0.17.0
|
||||
# via google-cloud-aiplatform
|
||||
durationpy==0.10
|
||||
# via kubernetes
|
||||
fastapi==0.133.1
|
||||
fastapi==0.128.0
|
||||
# via onyx
|
||||
fastavro==1.12.1
|
||||
# via cohere
|
||||
@@ -398,7 +398,6 @@ typing-extensions==4.15.0
|
||||
# typing-inspection
|
||||
typing-inspection==0.4.2
|
||||
# via
|
||||
# fastapi
|
||||
# mcp
|
||||
# pydantic
|
||||
# pydantic-settings
|
||||
|
||||
@@ -108,7 +108,7 @@ durationpy==0.10
|
||||
# via kubernetes
|
||||
einops==0.8.1
|
||||
# via onyx
|
||||
fastapi==0.133.1
|
||||
fastapi==0.128.0
|
||||
# via
|
||||
# onyx
|
||||
# sentry-sdk
|
||||
@@ -525,7 +525,6 @@ typing-extensions==4.15.0
|
||||
# typing-inspection
|
||||
typing-inspection==0.4.2
|
||||
# via
|
||||
# fastapi
|
||||
# mcp
|
||||
# pydantic
|
||||
# pydantic-settings
|
||||
|
||||
@@ -17,7 +17,7 @@ def test_bedrock_llm_configuration(client: TestClient) -> None:
|
||||
# Prepare the test request payload
|
||||
test_request: dict[str, Any] = {
|
||||
"provider": LlmProviderNames.BEDROCK,
|
||||
"model": _DEFAULT_BEDROCK_MODEL,
|
||||
"default_model_name": _DEFAULT_BEDROCK_MODEL,
|
||||
"api_key": None,
|
||||
"api_base": None,
|
||||
"api_version": None,
|
||||
@@ -44,7 +44,7 @@ def test_bedrock_llm_configuration_invalid_key(client: TestClient) -> None:
|
||||
# Prepare the test request payload with invalid credentials
|
||||
test_request: dict[str, Any] = {
|
||||
"provider": LlmProviderNames.BEDROCK,
|
||||
"model": _DEFAULT_BEDROCK_MODEL,
|
||||
"default_model_name": _DEFAULT_BEDROCK_MODEL,
|
||||
"api_key": None,
|
||||
"api_base": None,
|
||||
"api_version": None,
|
||||
|
||||
@@ -28,6 +28,7 @@ def ensure_default_llm_provider(db_session: Session) -> None:
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key=os.environ.get("OPENAI_API_KEY", "test"),
|
||||
is_public=True,
|
||||
default_model_name="gpt-4o-mini",
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
name="gpt-4o-mini",
|
||||
@@ -40,7 +41,7 @@ def ensure_default_llm_provider(db_session: Session) -> None:
|
||||
llm_provider_upsert_request=llm_provider_request,
|
||||
db_session=db_session,
|
||||
)
|
||||
update_default_provider(provider.id, "gpt-4o-mini", db_session)
|
||||
update_default_provider(provider.id, db_session)
|
||||
except Exception as exc: # pragma: no cover - only hits on duplicate setup issues
|
||||
# Rollback to clear the pending transaction state
|
||||
db_session.rollback()
|
||||
|
||||
@@ -47,6 +47,7 @@ def test_answer_with_only_anthropic_provider(
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.ANTHROPIC,
|
||||
api_key=anthropic_api_key,
|
||||
default_model_name=anthropic_model,
|
||||
is_public=True,
|
||||
groups=[],
|
||||
model_configurations=[
|
||||
@@ -58,7 +59,7 @@ def test_answer_with_only_anthropic_provider(
|
||||
)
|
||||
|
||||
try:
|
||||
update_default_provider(anthropic_provider.id, anthropic_model, db_session)
|
||||
update_default_provider(anthropic_provider.id, db_session)
|
||||
|
||||
test_user = create_test_user(db_session, email_prefix="anthropic_only")
|
||||
chat_session = create_chat_session(
|
||||
|
||||
@@ -0,0 +1,219 @@
|
||||
"""External dependency unit tests for startup recovery (Step 10g).
|
||||
|
||||
Seeds ``UserFile`` records in stuck states (PROCESSING, DELETING,
|
||||
needs_project_sync) then calls ``recover_stuck_user_files`` and verifies
|
||||
the drain loops pick them up via ``FOR UPDATE SKIP LOCKED``.
|
||||
|
||||
Uses real PostgreSQL (via ``db_session`` / ``tenant_context`` fixtures).
|
||||
The per-file ``_impl`` functions are mocked so no real file store or
|
||||
connector is needed — we only verify that recovery finds and dispatches
|
||||
the correct files.
|
||||
"""
|
||||
|
||||
from collections.abc import Generator
|
||||
from unittest.mock import MagicMock
|
||||
from unittest.mock import patch
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.background.periodic_poller import recover_stuck_user_files
|
||||
from onyx.db.enums import UserFileStatus
|
||||
from onyx.db.models import UserFile
|
||||
from tests.external_dependency_unit.conftest import create_test_user
|
||||
from tests.external_dependency_unit.constants import TEST_TENANT_ID
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_IMPL_MODULE = "onyx.background.celery.tasks.user_file_processing.tasks"
|
||||
|
||||
|
||||
def _create_user_file(
|
||||
db_session: Session,
|
||||
user_id: object,
|
||||
*,
|
||||
status: UserFileStatus = UserFileStatus.PROCESSING,
|
||||
needs_project_sync: bool = False,
|
||||
needs_persona_sync: bool = False,
|
||||
) -> UserFile:
|
||||
uf = UserFile(
|
||||
id=uuid4(),
|
||||
user_id=user_id,
|
||||
file_id=f"test_file_{uuid4().hex[:8]}",
|
||||
name=f"test_{uuid4().hex[:8]}.txt",
|
||||
file_type="text/plain",
|
||||
status=status,
|
||||
needs_project_sync=needs_project_sync,
|
||||
needs_persona_sync=needs_persona_sync,
|
||||
)
|
||||
db_session.add(uf)
|
||||
db_session.commit()
|
||||
db_session.refresh(uf)
|
||||
return uf
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def _cleanup_user_files(db_session: Session) -> Generator[list[UserFile], None, None]:
|
||||
"""Track created UserFile rows and delete them after each test."""
|
||||
created: list[UserFile] = []
|
||||
yield created
|
||||
for uf in created:
|
||||
existing = db_session.get(UserFile, uf.id)
|
||||
if existing:
|
||||
db_session.delete(existing)
|
||||
db_session.commit()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestRecoverProcessingFiles:
|
||||
"""Files in PROCESSING status are re-processed via the processing drain loop."""
|
||||
|
||||
def test_processing_files_recovered(
|
||||
self,
|
||||
db_session: Session,
|
||||
tenant_context: None, # noqa: ARG002
|
||||
_cleanup_user_files: list[UserFile],
|
||||
) -> None:
|
||||
user = create_test_user(db_session, "recovery_proc")
|
||||
uf = _create_user_file(db_session, user.id, status=UserFileStatus.PROCESSING)
|
||||
_cleanup_user_files.append(uf)
|
||||
|
||||
mock_impl = MagicMock()
|
||||
with patch(f"{_IMPL_MODULE}._process_user_file_impl", mock_impl):
|
||||
recover_stuck_user_files(TEST_TENANT_ID)
|
||||
|
||||
called_ids = [call.kwargs["user_file_id"] for call in mock_impl.call_args_list]
|
||||
assert (
|
||||
str(uf.id) in called_ids
|
||||
), f"Expected file {uf.id} to be recovered but got: {called_ids}"
|
||||
|
||||
def test_completed_files_not_recovered(
|
||||
self,
|
||||
db_session: Session,
|
||||
tenant_context: None, # noqa: ARG002
|
||||
_cleanup_user_files: list[UserFile],
|
||||
) -> None:
|
||||
user = create_test_user(db_session, "recovery_comp")
|
||||
uf = _create_user_file(db_session, user.id, status=UserFileStatus.COMPLETED)
|
||||
_cleanup_user_files.append(uf)
|
||||
|
||||
mock_impl = MagicMock()
|
||||
with patch(f"{_IMPL_MODULE}._process_user_file_impl", mock_impl):
|
||||
recover_stuck_user_files(TEST_TENANT_ID)
|
||||
|
||||
called_ids = [call.kwargs["user_file_id"] for call in mock_impl.call_args_list]
|
||||
assert (
|
||||
str(uf.id) not in called_ids
|
||||
), f"COMPLETED file {uf.id} should not have been recovered"
|
||||
|
||||
|
||||
class TestRecoverDeletingFiles:
|
||||
"""Files in DELETING status are recovered via the delete drain loop."""
|
||||
|
||||
def test_deleting_files_recovered(
|
||||
self,
|
||||
db_session: Session,
|
||||
tenant_context: None, # noqa: ARG002
|
||||
_cleanup_user_files: list[UserFile],
|
||||
) -> None:
|
||||
user = create_test_user(db_session, "recovery_del")
|
||||
uf = _create_user_file(db_session, user.id, status=UserFileStatus.DELETING)
|
||||
_cleanup_user_files.append(uf)
|
||||
|
||||
mock_impl = MagicMock()
|
||||
with patch(f"{_IMPL_MODULE}._delete_user_file_impl", mock_impl):
|
||||
recover_stuck_user_files(TEST_TENANT_ID)
|
||||
|
||||
called_ids = [call.kwargs["user_file_id"] for call in mock_impl.call_args_list]
|
||||
assert (
|
||||
str(uf.id) in called_ids
|
||||
), f"Expected file {uf.id} to be recovered for deletion but got: {called_ids}"
|
||||
|
||||
|
||||
class TestRecoverSyncFiles:
|
||||
"""Files needing project/persona sync are recovered via the sync drain loop."""
|
||||
|
||||
def test_needs_project_sync_recovered(
|
||||
self,
|
||||
db_session: Session,
|
||||
tenant_context: None, # noqa: ARG002
|
||||
_cleanup_user_files: list[UserFile],
|
||||
) -> None:
|
||||
user = create_test_user(db_session, "recovery_sync")
|
||||
uf = _create_user_file(
|
||||
db_session,
|
||||
user.id,
|
||||
status=UserFileStatus.COMPLETED,
|
||||
needs_project_sync=True,
|
||||
)
|
||||
_cleanup_user_files.append(uf)
|
||||
|
||||
mock_impl = MagicMock()
|
||||
with patch(f"{_IMPL_MODULE}._project_sync_user_file_impl", mock_impl):
|
||||
recover_stuck_user_files(TEST_TENANT_ID)
|
||||
|
||||
called_ids = [call.kwargs["user_file_id"] for call in mock_impl.call_args_list]
|
||||
assert (
|
||||
str(uf.id) in called_ids
|
||||
), f"Expected file {uf.id} to be recovered for sync but got: {called_ids}"
|
||||
|
||||
def test_needs_persona_sync_recovered(
|
||||
self,
|
||||
db_session: Session,
|
||||
tenant_context: None, # noqa: ARG002
|
||||
_cleanup_user_files: list[UserFile],
|
||||
) -> None:
|
||||
user = create_test_user(db_session, "recovery_psync")
|
||||
uf = _create_user_file(
|
||||
db_session,
|
||||
user.id,
|
||||
status=UserFileStatus.COMPLETED,
|
||||
needs_persona_sync=True,
|
||||
)
|
||||
_cleanup_user_files.append(uf)
|
||||
|
||||
mock_impl = MagicMock()
|
||||
with patch(f"{_IMPL_MODULE}._project_sync_user_file_impl", mock_impl):
|
||||
recover_stuck_user_files(TEST_TENANT_ID)
|
||||
|
||||
called_ids = [call.kwargs["user_file_id"] for call in mock_impl.call_args_list]
|
||||
assert (
|
||||
str(uf.id) in called_ids
|
||||
), f"Expected file {uf.id} to be recovered for persona sync but got: {called_ids}"
|
||||
|
||||
|
||||
class TestRecoveryMultipleFiles:
|
||||
"""Recovery processes all stuck files in one pass, not just the first."""
|
||||
|
||||
def test_multiple_processing_files(
|
||||
self,
|
||||
db_session: Session,
|
||||
tenant_context: None, # noqa: ARG002
|
||||
_cleanup_user_files: list[UserFile],
|
||||
) -> None:
|
||||
user = create_test_user(db_session, "recovery_multi")
|
||||
files = []
|
||||
for _ in range(3):
|
||||
uf = _create_user_file(
|
||||
db_session, user.id, status=UserFileStatus.PROCESSING
|
||||
)
|
||||
_cleanup_user_files.append(uf)
|
||||
files.append(uf)
|
||||
|
||||
mock_impl = MagicMock()
|
||||
with patch(f"{_IMPL_MODULE}._process_user_file_impl", mock_impl):
|
||||
recover_stuck_user_files(TEST_TENANT_ID)
|
||||
|
||||
called_ids = {call.kwargs["user_file_id"] for call in mock_impl.call_args_list}
|
||||
expected_ids = {str(uf.id) for uf in files}
|
||||
assert expected_ids.issubset(called_ids), (
|
||||
f"Expected all {len(files)} files to be recovered. "
|
||||
f"Missing: {expected_ids - called_ids}"
|
||||
)
|
||||
@@ -29,7 +29,6 @@ from onyx.server.manage.llm.api import (
|
||||
test_llm_configuration as run_test_llm_configuration,
|
||||
)
|
||||
from onyx.server.manage.llm.models import LLMProviderUpsertRequest
|
||||
from onyx.server.manage.llm.models import LLMProviderView
|
||||
from onyx.server.manage.llm.models import ModelConfigurationUpsertRequest
|
||||
from onyx.server.manage.llm.models import TestLLMRequest as LLMTestRequest
|
||||
|
||||
@@ -45,14 +44,15 @@ def _create_test_provider(
|
||||
db_session: Session,
|
||||
name: str,
|
||||
api_key: str = "sk-test-key-00000000000000000000000000000000000",
|
||||
) -> LLMProviderView:
|
||||
) -> None:
|
||||
"""Helper to create a test LLM provider in the database."""
|
||||
return upsert_llm_provider(
|
||||
upsert_llm_provider(
|
||||
LLMProviderUpsertRequest(
|
||||
name=name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key=api_key,
|
||||
api_key_changed=True,
|
||||
default_model_name="gpt-4o-mini",
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(name="gpt-4o-mini", is_visible=True)
|
||||
],
|
||||
@@ -102,11 +102,17 @@ class TestLLMConfigurationEndpoint:
|
||||
# This should complete without exception
|
||||
run_test_llm_configuration(
|
||||
test_llm_request=LLMTestRequest(
|
||||
name=None, # New provider (not in DB)
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key="sk-new-test-key-0000000000000000000000000000",
|
||||
api_key_changed=True,
|
||||
custom_config_changed=False,
|
||||
model="gpt-4o-mini",
|
||||
default_model_name="gpt-4o-mini",
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
name="gpt-4o-mini", is_visible=True
|
||||
)
|
||||
],
|
||||
),
|
||||
_=_create_mock_admin(),
|
||||
db_session=db_session,
|
||||
@@ -146,11 +152,17 @@ class TestLLMConfigurationEndpoint:
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
run_test_llm_configuration(
|
||||
test_llm_request=LLMTestRequest(
|
||||
name=None,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key="sk-invalid-key-00000000000000000000000000",
|
||||
api_key_changed=True,
|
||||
custom_config_changed=False,
|
||||
model="gpt-4o-mini",
|
||||
default_model_name="gpt-4o-mini",
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
name="gpt-4o-mini", is_visible=True
|
||||
)
|
||||
],
|
||||
),
|
||||
_=_create_mock_admin(),
|
||||
db_session=db_session,
|
||||
@@ -182,9 +194,7 @@ class TestLLMConfigurationEndpoint:
|
||||
|
||||
try:
|
||||
# First, create the provider in the database
|
||||
provider = _create_test_provider(
|
||||
db_session, provider_name, api_key=original_api_key
|
||||
)
|
||||
_create_test_provider(db_session, provider_name, api_key=original_api_key)
|
||||
|
||||
with patch(
|
||||
"onyx.server.manage.llm.api.test_llm", side_effect=mock_test_llm_capture
|
||||
@@ -192,12 +202,17 @@ class TestLLMConfigurationEndpoint:
|
||||
# Test with api_key_changed=False - should use stored key
|
||||
run_test_llm_configuration(
|
||||
test_llm_request=LLMTestRequest(
|
||||
id=provider.id,
|
||||
name=provider_name, # Existing provider
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key=None, # Not providing a new key
|
||||
api_key_changed=False, # Using existing key
|
||||
custom_config_changed=False,
|
||||
model="gpt-4o-mini",
|
||||
default_model_name="gpt-4o-mini",
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
name="gpt-4o-mini", is_visible=True
|
||||
)
|
||||
],
|
||||
),
|
||||
_=_create_mock_admin(),
|
||||
db_session=db_session,
|
||||
@@ -231,9 +246,7 @@ class TestLLMConfigurationEndpoint:
|
||||
|
||||
try:
|
||||
# First, create the provider in the database
|
||||
provider = _create_test_provider(
|
||||
db_session, provider_name, api_key=original_api_key
|
||||
)
|
||||
_create_test_provider(db_session, provider_name, api_key=original_api_key)
|
||||
|
||||
with patch(
|
||||
"onyx.server.manage.llm.api.test_llm", side_effect=mock_test_llm_capture
|
||||
@@ -241,12 +254,17 @@ class TestLLMConfigurationEndpoint:
|
||||
# Test with api_key_changed=True - should use new key
|
||||
run_test_llm_configuration(
|
||||
test_llm_request=LLMTestRequest(
|
||||
id=provider.id,
|
||||
name=provider_name, # Existing provider
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key=new_api_key, # Providing a new key
|
||||
api_key_changed=True, # Key is being changed
|
||||
custom_config_changed=False,
|
||||
model="gpt-4o-mini",
|
||||
default_model_name="gpt-4o-mini",
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
name="gpt-4o-mini", is_visible=True
|
||||
)
|
||||
],
|
||||
),
|
||||
_=_create_mock_admin(),
|
||||
db_session=db_session,
|
||||
@@ -279,7 +297,7 @@ class TestLLMConfigurationEndpoint:
|
||||
|
||||
try:
|
||||
# First, create the provider in the database with custom_config
|
||||
provider = upsert_llm_provider(
|
||||
upsert_llm_provider(
|
||||
LLMProviderUpsertRequest(
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
@@ -287,6 +305,7 @@ class TestLLMConfigurationEndpoint:
|
||||
api_key_changed=True,
|
||||
custom_config=original_custom_config,
|
||||
custom_config_changed=True,
|
||||
default_model_name="gpt-4o-mini",
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
name="gpt-4o-mini", is_visible=True
|
||||
@@ -302,13 +321,18 @@ class TestLLMConfigurationEndpoint:
|
||||
# Test with custom_config_changed=False - should use stored config
|
||||
run_test_llm_configuration(
|
||||
test_llm_request=LLMTestRequest(
|
||||
id=provider.id,
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key=None,
|
||||
api_key_changed=False,
|
||||
custom_config=None, # Not providing new config
|
||||
custom_config_changed=False, # Using existing config
|
||||
model="gpt-4o-mini",
|
||||
default_model_name="gpt-4o-mini",
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
name="gpt-4o-mini", is_visible=True
|
||||
)
|
||||
],
|
||||
),
|
||||
_=_create_mock_admin(),
|
||||
db_session=db_session,
|
||||
@@ -344,11 +368,17 @@ class TestLLMConfigurationEndpoint:
|
||||
for model_name in test_models:
|
||||
run_test_llm_configuration(
|
||||
test_llm_request=LLMTestRequest(
|
||||
name=None,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key="sk-test-key-00000000000000000000000000000000000",
|
||||
api_key_changed=True,
|
||||
custom_config_changed=False,
|
||||
model=model_name,
|
||||
default_model_name=model_name,
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
name=model_name, is_visible=True
|
||||
)
|
||||
],
|
||||
),
|
||||
_=_create_mock_admin(),
|
||||
db_session=db_session,
|
||||
@@ -412,6 +442,7 @@ class TestDefaultProviderEndpoint:
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key=provider_1_api_key,
|
||||
api_key_changed=True,
|
||||
default_model_name=provider_1_initial_model,
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(name="gpt-4", is_visible=True),
|
||||
ModelConfigurationUpsertRequest(name="gpt-4o", is_visible=True),
|
||||
@@ -421,7 +452,7 @@ class TestDefaultProviderEndpoint:
|
||||
)
|
||||
|
||||
# Set provider 1 as the default provider explicitly
|
||||
update_default_provider(provider_1.id, provider_1_initial_model, db_session)
|
||||
update_default_provider(provider_1.id, db_session)
|
||||
|
||||
# Step 2: Call run_test_default_provider - should use provider 1's default model
|
||||
with patch(
|
||||
@@ -441,6 +472,7 @@ class TestDefaultProviderEndpoint:
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key=provider_2_api_key,
|
||||
api_key_changed=True,
|
||||
default_model_name=provider_2_default_model,
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
name="gpt-4o-mini", is_visible=True
|
||||
@@ -467,11 +499,11 @@ class TestDefaultProviderEndpoint:
|
||||
# Step 5: Update provider 1's default model
|
||||
upsert_llm_provider(
|
||||
LLMProviderUpsertRequest(
|
||||
id=provider_1.id,
|
||||
name=provider_1_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key=provider_1_api_key,
|
||||
api_key_changed=True,
|
||||
default_model_name=provider_1_updated_model, # Changed
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(name="gpt-4", is_visible=True),
|
||||
ModelConfigurationUpsertRequest(name="gpt-4o", is_visible=True),
|
||||
@@ -480,9 +512,6 @@ class TestDefaultProviderEndpoint:
|
||||
db_session=db_session,
|
||||
)
|
||||
|
||||
# Set provider 1's default model to the updated model
|
||||
update_default_provider(provider_1.id, provider_1_updated_model, db_session)
|
||||
|
||||
# Step 6: Call run_test_default_provider - should use new model on provider 1
|
||||
with patch(
|
||||
"onyx.server.manage.llm.api.test_llm", side_effect=mock_test_llm_capture
|
||||
@@ -495,7 +524,7 @@ class TestDefaultProviderEndpoint:
|
||||
captured_llms.clear()
|
||||
|
||||
# Step 7: Change the default provider to provider 2
|
||||
update_default_provider(provider_2.id, provider_2_default_model, db_session)
|
||||
update_default_provider(provider_2.id, db_session)
|
||||
|
||||
# Step 8: Call run_test_default_provider - should use provider 2
|
||||
with patch(
|
||||
@@ -567,6 +596,7 @@ class TestDefaultProviderEndpoint:
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key="sk-test-key-00000000000000000000000000000000000",
|
||||
api_key_changed=True,
|
||||
default_model_name="gpt-4o-mini",
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
name="gpt-4o-mini", is_visible=True
|
||||
@@ -575,7 +605,7 @@ class TestDefaultProviderEndpoint:
|
||||
),
|
||||
db_session=db_session,
|
||||
)
|
||||
update_default_provider(provider.id, "gpt-4o-mini", db_session)
|
||||
update_default_provider(provider.id, db_session)
|
||||
|
||||
# Test should fail
|
||||
with patch(
|
||||
|
||||
@@ -49,6 +49,7 @@ def _create_test_provider(
|
||||
api_key_changed=True,
|
||||
api_base=api_base,
|
||||
custom_config=custom_config,
|
||||
default_model_name="gpt-4o-mini",
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(name="gpt-4o-mini", is_visible=True)
|
||||
],
|
||||
@@ -90,14 +91,14 @@ class TestLLMProviderChanges:
|
||||
the API key should be blocked.
|
||||
"""
|
||||
try:
|
||||
provider = _create_test_provider(db_session, provider_name)
|
||||
_create_test_provider(db_session, provider_name)
|
||||
|
||||
with patch("onyx.server.manage.llm.api.MULTI_TENANT", True):
|
||||
update_request = LLMProviderUpsertRequest(
|
||||
id=provider.id,
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_base="https://attacker.example.com",
|
||||
default_model_name="gpt-4o-mini",
|
||||
)
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
@@ -124,16 +125,16 @@ class TestLLMProviderChanges:
|
||||
Changing api_base IS allowed when the API key is also being changed.
|
||||
"""
|
||||
try:
|
||||
provider = _create_test_provider(db_session, provider_name)
|
||||
_create_test_provider(db_session, provider_name)
|
||||
|
||||
with patch("onyx.server.manage.llm.api.MULTI_TENANT", True):
|
||||
update_request = LLMProviderUpsertRequest(
|
||||
id=provider.id,
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key="sk-new-key-00000000000000000000000000000000000",
|
||||
api_key_changed=True,
|
||||
api_base="https://custom-endpoint.example.com/v1",
|
||||
default_model_name="gpt-4o-mini",
|
||||
)
|
||||
|
||||
result = put_llm_provider(
|
||||
@@ -158,16 +159,14 @@ class TestLLMProviderChanges:
|
||||
original_api_base = "https://original.example.com/v1"
|
||||
|
||||
try:
|
||||
provider = _create_test_provider(
|
||||
db_session, provider_name, api_base=original_api_base
|
||||
)
|
||||
_create_test_provider(db_session, provider_name, api_base=original_api_base)
|
||||
|
||||
with patch("onyx.server.manage.llm.api.MULTI_TENANT", True):
|
||||
update_request = LLMProviderUpsertRequest(
|
||||
id=provider.id,
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_base=original_api_base,
|
||||
default_model_name="gpt-4o-mini",
|
||||
)
|
||||
|
||||
result = put_llm_provider(
|
||||
@@ -191,14 +190,14 @@ class TestLLMProviderChanges:
|
||||
changes. This allows model-only updates when provider has no custom base URL.
|
||||
"""
|
||||
try:
|
||||
view = _create_test_provider(db_session, provider_name, api_base=None)
|
||||
_create_test_provider(db_session, provider_name, api_base=None)
|
||||
|
||||
with patch("onyx.server.manage.llm.api.MULTI_TENANT", True):
|
||||
update_request = LLMProviderUpsertRequest(
|
||||
id=view.id,
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_base="",
|
||||
default_model_name="gpt-4o-mini",
|
||||
)
|
||||
|
||||
result = put_llm_provider(
|
||||
@@ -224,16 +223,14 @@ class TestLLMProviderChanges:
|
||||
original_api_base = "https://original.example.com/v1"
|
||||
|
||||
try:
|
||||
provider = _create_test_provider(
|
||||
db_session, provider_name, api_base=original_api_base
|
||||
)
|
||||
_create_test_provider(db_session, provider_name, api_base=original_api_base)
|
||||
|
||||
with patch("onyx.server.manage.llm.api.MULTI_TENANT", True):
|
||||
update_request = LLMProviderUpsertRequest(
|
||||
id=provider.id,
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_base=None,
|
||||
default_model_name="gpt-4o-mini",
|
||||
)
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
@@ -262,14 +259,14 @@ class TestLLMProviderChanges:
|
||||
users have full control over their deployment.
|
||||
"""
|
||||
try:
|
||||
provider = _create_test_provider(db_session, provider_name)
|
||||
_create_test_provider(db_session, provider_name)
|
||||
|
||||
with patch("onyx.server.manage.llm.api.MULTI_TENANT", False):
|
||||
update_request = LLMProviderUpsertRequest(
|
||||
id=provider.id,
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_base="https://custom.example.com/v1",
|
||||
default_model_name="gpt-4o-mini",
|
||||
)
|
||||
|
||||
result = put_llm_provider(
|
||||
@@ -300,6 +297,7 @@ class TestLLMProviderChanges:
|
||||
api_key="sk-new-key-00000000000000000000000000000000000",
|
||||
api_key_changed=True,
|
||||
api_base="https://custom.example.com/v1",
|
||||
default_model_name="gpt-4o-mini",
|
||||
)
|
||||
|
||||
result = put_llm_provider(
|
||||
@@ -324,7 +322,7 @@ class TestLLMProviderChanges:
|
||||
redirect LLM API requests).
|
||||
"""
|
||||
try:
|
||||
provider = _create_test_provider(
|
||||
_create_test_provider(
|
||||
db_session,
|
||||
provider_name,
|
||||
custom_config={"SOME_CONFIG": "original_value"},
|
||||
@@ -332,11 +330,11 @@ class TestLLMProviderChanges:
|
||||
|
||||
with patch("onyx.server.manage.llm.api.MULTI_TENANT", True):
|
||||
update_request = LLMProviderUpsertRequest(
|
||||
id=provider.id,
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
custom_config={"OPENAI_API_BASE": "https://attacker.example.com"},
|
||||
custom_config_changed=True,
|
||||
default_model_name="gpt-4o-mini",
|
||||
)
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
@@ -364,15 +362,15 @@ class TestLLMProviderChanges:
|
||||
without changing the API key.
|
||||
"""
|
||||
try:
|
||||
provider = _create_test_provider(db_session, provider_name)
|
||||
_create_test_provider(db_session, provider_name)
|
||||
|
||||
with patch("onyx.server.manage.llm.api.MULTI_TENANT", True):
|
||||
update_request = LLMProviderUpsertRequest(
|
||||
id=provider.id,
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
custom_config={"OPENAI_API_BASE": "https://attacker.example.com"},
|
||||
custom_config_changed=True,
|
||||
default_model_name="gpt-4o-mini",
|
||||
)
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
@@ -401,7 +399,7 @@ class TestLLMProviderChanges:
|
||||
new_config = {"AWS_REGION_NAME": "us-west-2"}
|
||||
|
||||
try:
|
||||
provider = _create_test_provider(
|
||||
_create_test_provider(
|
||||
db_session,
|
||||
provider_name,
|
||||
custom_config={"AWS_REGION_NAME": "us-east-1"},
|
||||
@@ -409,13 +407,13 @@ class TestLLMProviderChanges:
|
||||
|
||||
with patch("onyx.server.manage.llm.api.MULTI_TENANT", True):
|
||||
update_request = LLMProviderUpsertRequest(
|
||||
id=provider.id,
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key="sk-new-key-00000000000000000000000000000000000",
|
||||
api_key_changed=True,
|
||||
custom_config_changed=True,
|
||||
custom_config=new_config,
|
||||
default_model_name="gpt-4o-mini",
|
||||
)
|
||||
|
||||
result = put_llm_provider(
|
||||
@@ -440,17 +438,17 @@ class TestLLMProviderChanges:
|
||||
original_config = {"AWS_REGION_NAME": "us-east-1"}
|
||||
|
||||
try:
|
||||
provider = _create_test_provider(
|
||||
_create_test_provider(
|
||||
db_session, provider_name, custom_config=original_config
|
||||
)
|
||||
|
||||
with patch("onyx.server.manage.llm.api.MULTI_TENANT", True):
|
||||
update_request = LLMProviderUpsertRequest(
|
||||
id=provider.id,
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
custom_config=original_config,
|
||||
custom_config_changed=True,
|
||||
default_model_name="gpt-4o-mini",
|
||||
)
|
||||
|
||||
result = put_llm_provider(
|
||||
@@ -476,7 +474,7 @@ class TestLLMProviderChanges:
|
||||
new_config = {"AWS_REGION_NAME": "eu-west-1"}
|
||||
|
||||
try:
|
||||
provider = _create_test_provider(
|
||||
_create_test_provider(
|
||||
db_session,
|
||||
provider_name,
|
||||
custom_config={"AWS_REGION_NAME": "us-east-1"},
|
||||
@@ -484,10 +482,10 @@ class TestLLMProviderChanges:
|
||||
|
||||
with patch("onyx.server.manage.llm.api.MULTI_TENANT", False):
|
||||
update_request = LLMProviderUpsertRequest(
|
||||
id=provider.id,
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
custom_config=new_config,
|
||||
default_model_name="gpt-4o-mini",
|
||||
custom_config_changed=True,
|
||||
)
|
||||
|
||||
@@ -532,8 +530,14 @@ def test_upload_with_custom_config_then_change(
|
||||
with patch("onyx.server.manage.llm.api.test_llm", side_effect=capture_test_llm):
|
||||
run_llm_config_test(
|
||||
LLMTestRequest(
|
||||
name=name,
|
||||
provider=provider_name,
|
||||
model=default_model_name,
|
||||
default_model_name=default_model_name,
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
name=default_model_name, is_visible=True
|
||||
)
|
||||
],
|
||||
api_key_changed=False,
|
||||
custom_config_changed=True,
|
||||
custom_config=custom_config,
|
||||
@@ -542,10 +546,11 @@ def test_upload_with_custom_config_then_change(
|
||||
db_session=db_session,
|
||||
)
|
||||
|
||||
provider = put_llm_provider(
|
||||
put_llm_provider(
|
||||
llm_provider_upsert_request=LLMProviderUpsertRequest(
|
||||
name=name,
|
||||
provider=provider_name,
|
||||
default_model_name=default_model_name,
|
||||
custom_config=custom_config,
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
@@ -564,9 +569,14 @@ def test_upload_with_custom_config_then_change(
|
||||
# Turn auto mode off
|
||||
run_llm_config_test(
|
||||
LLMTestRequest(
|
||||
id=provider.id,
|
||||
name=name,
|
||||
provider=provider_name,
|
||||
model=default_model_name,
|
||||
default_model_name=default_model_name,
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
name=default_model_name, is_visible=True
|
||||
)
|
||||
],
|
||||
api_key_changed=False,
|
||||
custom_config_changed=False,
|
||||
),
|
||||
@@ -576,9 +586,9 @@ def test_upload_with_custom_config_then_change(
|
||||
|
||||
put_llm_provider(
|
||||
llm_provider_upsert_request=LLMProviderUpsertRequest(
|
||||
id=provider.id,
|
||||
name=name,
|
||||
provider=provider_name,
|
||||
default_model_name=default_model_name,
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
name=default_model_name, is_visible=True
|
||||
@@ -606,13 +616,13 @@ def test_upload_with_custom_config_then_change(
|
||||
)
|
||||
|
||||
# Check inside the database and check that custom_config is the same as the original
|
||||
db_provider = fetch_existing_llm_provider(name=name, db_session=db_session)
|
||||
if not db_provider:
|
||||
provider = fetch_existing_llm_provider(name=name, db_session=db_session)
|
||||
if not provider:
|
||||
assert False, "Provider not found in the database"
|
||||
|
||||
assert db_provider.custom_config == custom_config, (
|
||||
assert provider.custom_config == custom_config, (
|
||||
f"Expected custom_config {custom_config}, "
|
||||
f"but got {db_provider.custom_config}"
|
||||
f"but got {provider.custom_config}"
|
||||
)
|
||||
finally:
|
||||
db_session.rollback()
|
||||
@@ -632,10 +642,11 @@ def test_preserves_masked_sensitive_custom_config_on_provider_update(
|
||||
}
|
||||
|
||||
try:
|
||||
view = put_llm_provider(
|
||||
put_llm_provider(
|
||||
llm_provider_upsert_request=LLMProviderUpsertRequest(
|
||||
name=name,
|
||||
provider=provider,
|
||||
default_model_name=default_model_name,
|
||||
custom_config=original_custom_config,
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
@@ -654,9 +665,9 @@ def test_preserves_masked_sensitive_custom_config_on_provider_update(
|
||||
with patch("onyx.server.manage.llm.api.MULTI_TENANT", False):
|
||||
put_llm_provider(
|
||||
llm_provider_upsert_request=LLMProviderUpsertRequest(
|
||||
id=view.id,
|
||||
name=name,
|
||||
provider=provider,
|
||||
default_model_name=default_model_name,
|
||||
custom_config={
|
||||
"vertex_credentials": _mask_string(
|
||||
original_custom_config["vertex_credentials"]
|
||||
@@ -695,7 +706,7 @@ def test_preserves_masked_sensitive_custom_config_on_test_request(
|
||||
) -> None:
|
||||
"""LLM test should restore masked sensitive custom config values before invocation."""
|
||||
name = f"test-provider-vertex-test-{uuid4().hex[:8]}"
|
||||
provider_name = LlmProviderNames.VERTEX_AI.value
|
||||
provider = LlmProviderNames.VERTEX_AI.value
|
||||
default_model_name = "gemini-2.5-pro"
|
||||
original_custom_config = {
|
||||
"vertex_credentials": '{"type":"service_account","private_key":"REAL_PRIVATE_KEY"}',
|
||||
@@ -708,10 +719,11 @@ def test_preserves_masked_sensitive_custom_config_on_test_request(
|
||||
return ""
|
||||
|
||||
try:
|
||||
provider = put_llm_provider(
|
||||
put_llm_provider(
|
||||
llm_provider_upsert_request=LLMProviderUpsertRequest(
|
||||
name=name,
|
||||
provider=provider_name,
|
||||
provider=provider,
|
||||
default_model_name=default_model_name,
|
||||
custom_config=original_custom_config,
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
@@ -730,9 +742,14 @@ def test_preserves_masked_sensitive_custom_config_on_test_request(
|
||||
with patch("onyx.server.manage.llm.api.test_llm", side_effect=capture_test_llm):
|
||||
run_llm_config_test(
|
||||
LLMTestRequest(
|
||||
id=provider.id,
|
||||
provider=provider_name,
|
||||
model=default_model_name,
|
||||
name=name,
|
||||
provider=provider,
|
||||
default_model_name=default_model_name,
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
name=default_model_name, is_visible=True
|
||||
)
|
||||
],
|
||||
api_key_changed=False,
|
||||
custom_config_changed=True,
|
||||
custom_config={
|
||||
|
||||
@@ -15,11 +15,9 @@ import pytest
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.db.enums import LLMModelFlowType
|
||||
from onyx.db.llm import fetch_auto_mode_providers
|
||||
from onyx.db.llm import fetch_default_llm_model
|
||||
from onyx.db.llm import fetch_existing_llm_provider
|
||||
from onyx.db.llm import fetch_existing_llm_providers
|
||||
from onyx.db.llm import fetch_llm_provider_view
|
||||
from onyx.db.llm import remove_llm_provider
|
||||
from onyx.db.llm import sync_auto_mode_models
|
||||
from onyx.db.llm import update_default_provider
|
||||
@@ -137,6 +135,7 @@ class TestAutoModeSyncFeature:
|
||||
api_key="sk-test-key-00000000000000000000000000000000000",
|
||||
api_key_changed=True,
|
||||
is_auto_mode=True,
|
||||
default_model_name=expected_default_model,
|
||||
model_configurations=[], # No model configs provided
|
||||
),
|
||||
is_creation=True,
|
||||
@@ -164,8 +163,13 @@ class TestAutoModeSyncFeature:
|
||||
if mc.name in all_expected_models:
|
||||
assert mc.is_visible is True, f"Model '{mc.name}' should be visible"
|
||||
|
||||
# Verify the default model was set correctly
|
||||
assert (
|
||||
provider.default_model_name == expected_default_model
|
||||
), f"Default model should be '{expected_default_model}'"
|
||||
|
||||
# Step 4: Set the provider as default
|
||||
update_default_provider(provider.id, expected_default_model, db_session)
|
||||
update_default_provider(provider.id, db_session)
|
||||
|
||||
# Step 5: Fetch the default provider and verify
|
||||
default_model = fetch_default_llm_model(db_session)
|
||||
@@ -234,6 +238,7 @@ class TestAutoModeSyncFeature:
|
||||
api_key="sk-test-key-00000000000000000000000000000000000",
|
||||
api_key_changed=True,
|
||||
is_auto_mode=True,
|
||||
default_model_name="gpt-4o",
|
||||
model_configurations=[],
|
||||
),
|
||||
is_creation=True,
|
||||
@@ -312,6 +317,7 @@ class TestAutoModeSyncFeature:
|
||||
api_key="sk-test-key-00000000000000000000000000000000000",
|
||||
api_key_changed=True,
|
||||
is_auto_mode=False, # Not in auto mode initially
|
||||
default_model_name="gpt-4",
|
||||
model_configurations=initial_models,
|
||||
),
|
||||
is_creation=True,
|
||||
@@ -320,13 +326,13 @@ class TestAutoModeSyncFeature:
|
||||
)
|
||||
|
||||
# Verify initial state: all models are visible
|
||||
initial_provider = fetch_existing_llm_provider(
|
||||
provider = fetch_existing_llm_provider(
|
||||
name=provider_name, db_session=db_session
|
||||
)
|
||||
assert initial_provider is not None
|
||||
assert initial_provider.is_auto_mode is False
|
||||
assert provider is not None
|
||||
assert provider.is_auto_mode is False
|
||||
|
||||
for mc in initial_provider.model_configurations:
|
||||
for mc in provider.model_configurations:
|
||||
assert (
|
||||
mc.is_visible is True
|
||||
), f"Initial model '{mc.name}' should be visible"
|
||||
@@ -338,12 +344,12 @@ class TestAutoModeSyncFeature:
|
||||
):
|
||||
put_llm_provider(
|
||||
llm_provider_upsert_request=LLMProviderUpsertRequest(
|
||||
id=initial_provider.id,
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key=None, # Not changing API key
|
||||
api_key_changed=False,
|
||||
is_auto_mode=True, # Now enabling auto mode
|
||||
default_model_name=auto_mode_default,
|
||||
model_configurations=[], # Auto mode will sync from config
|
||||
),
|
||||
is_creation=False, # This is an update
|
||||
@@ -354,15 +360,15 @@ class TestAutoModeSyncFeature:
|
||||
# Step 3: Verify model visibility after auto mode transition
|
||||
# Expire session cache to force fresh fetch after sync_auto_mode_models committed
|
||||
db_session.expire_all()
|
||||
provider_view = fetch_llm_provider_view(
|
||||
provider_name=provider_name, db_session=db_session
|
||||
provider = fetch_existing_llm_provider(
|
||||
name=provider_name, db_session=db_session
|
||||
)
|
||||
assert provider_view is not None
|
||||
assert provider_view.is_auto_mode is True
|
||||
assert provider is not None
|
||||
assert provider.is_auto_mode is True
|
||||
|
||||
# Build a map of model name -> visibility
|
||||
model_visibility = {
|
||||
mc.name: mc.is_visible for mc in provider_view.model_configurations
|
||||
mc.name: mc.is_visible for mc in provider.model_configurations
|
||||
}
|
||||
|
||||
# Models in auto mode config should be visible
|
||||
@@ -382,6 +388,9 @@ class TestAutoModeSyncFeature:
|
||||
model_visibility[model_name] is False
|
||||
), f"Model '{model_name}' not in auto config should NOT be visible"
|
||||
|
||||
# Verify the default model was updated
|
||||
assert provider.default_model_name == auto_mode_default
|
||||
|
||||
finally:
|
||||
db_session.rollback()
|
||||
_cleanup_provider(db_session, provider_name)
|
||||
@@ -423,12 +432,8 @@ class TestAutoModeSyncFeature:
|
||||
api_key="sk-test-key-00000000000000000000000000000000000",
|
||||
api_key_changed=True,
|
||||
is_auto_mode=True,
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
name="gpt-4o",
|
||||
is_visible=True,
|
||||
)
|
||||
],
|
||||
default_model_name="gpt-4o",
|
||||
model_configurations=[],
|
||||
),
|
||||
is_creation=True,
|
||||
_=_create_mock_admin(),
|
||||
@@ -530,6 +535,7 @@ class TestAutoModeSyncFeature:
|
||||
api_key=provider_1_api_key,
|
||||
api_key_changed=True,
|
||||
is_auto_mode=True,
|
||||
default_model_name=provider_1_default_model,
|
||||
model_configurations=[],
|
||||
),
|
||||
is_creation=True,
|
||||
@@ -543,7 +549,7 @@ class TestAutoModeSyncFeature:
|
||||
name=provider_1_name, db_session=db_session
|
||||
)
|
||||
assert provider_1 is not None
|
||||
update_default_provider(provider_1.id, provider_1_default_model, db_session)
|
||||
update_default_provider(provider_1.id, db_session)
|
||||
|
||||
with patch(
|
||||
"onyx.server.manage.llm.api.fetch_llm_recommendations_from_github",
|
||||
@@ -557,6 +563,7 @@ class TestAutoModeSyncFeature:
|
||||
api_key=provider_2_api_key,
|
||||
api_key_changed=True,
|
||||
is_auto_mode=True,
|
||||
default_model_name=provider_2_default_model,
|
||||
model_configurations=[],
|
||||
),
|
||||
is_creation=True,
|
||||
@@ -577,7 +584,7 @@ class TestAutoModeSyncFeature:
|
||||
name=provider_2_name, db_session=db_session
|
||||
)
|
||||
assert provider_2 is not None
|
||||
update_default_provider(provider_2.id, provider_2_default_model, db_session)
|
||||
update_default_provider(provider_2.id, db_session)
|
||||
|
||||
# Step 5: Verify provider 2 is now the default
|
||||
db_session.expire_all()
|
||||
@@ -637,6 +644,7 @@ class TestAutoModeMissingFlows:
|
||||
api_key="sk-test-key-00000000000000000000000000000000000",
|
||||
api_key_changed=True,
|
||||
is_auto_mode=True,
|
||||
default_model_name="gpt-4o",
|
||||
model_configurations=[],
|
||||
),
|
||||
is_creation=True,
|
||||
@@ -693,364 +701,3 @@ class TestAutoModeMissingFlows:
|
||||
finally:
|
||||
db_session.rollback()
|
||||
_cleanup_provider(db_session, provider_name)
|
||||
|
||||
|
||||
class TestAutoModeTransitionsAndResync:
|
||||
"""Tests for auto/manual transitions, config evolution, and sync idempotency."""
|
||||
|
||||
def test_auto_to_manual_mode_preserves_models_and_stops_syncing(
|
||||
self,
|
||||
db_session: Session,
|
||||
provider_name: str,
|
||||
) -> None:
|
||||
"""Disabling auto mode should preserve the current model list and
|
||||
prevent future syncs from altering visibility.
|
||||
|
||||
Steps:
|
||||
1. Create provider in auto mode — models synced from config.
|
||||
2. Update provider to manual mode (is_auto_mode=False).
|
||||
3. Verify all models remain with unchanged visibility.
|
||||
4. Call sync_auto_mode_models with a *different* config.
|
||||
5. Verify fetch_auto_mode_providers excludes this provider, so the
|
||||
periodic task would never call sync on it.
|
||||
"""
|
||||
initial_config = _create_mock_llm_recommendations(
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
default_model_name="gpt-4o",
|
||||
additional_models=["gpt-4o-mini"],
|
||||
)
|
||||
|
||||
try:
|
||||
# Step 1: Create in auto mode
|
||||
with patch(
|
||||
"onyx.server.manage.llm.api.fetch_llm_recommendations_from_github",
|
||||
return_value=initial_config,
|
||||
):
|
||||
put_llm_provider(
|
||||
llm_provider_upsert_request=LLMProviderUpsertRequest(
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key="sk-test-key-00000000000000000000000000000000000",
|
||||
api_key_changed=True,
|
||||
is_auto_mode=True,
|
||||
model_configurations=[],
|
||||
),
|
||||
is_creation=True,
|
||||
_=_create_mock_admin(),
|
||||
db_session=db_session,
|
||||
)
|
||||
|
||||
db_session.expire_all()
|
||||
provider = fetch_existing_llm_provider(
|
||||
name=provider_name, db_session=db_session
|
||||
)
|
||||
assert provider is not None
|
||||
visibility_before = {
|
||||
mc.name: mc.is_visible for mc in provider.model_configurations
|
||||
}
|
||||
assert visibility_before == {"gpt-4o": True, "gpt-4o-mini": True}
|
||||
|
||||
# Step 2: Switch to manual mode
|
||||
put_llm_provider(
|
||||
llm_provider_upsert_request=LLMProviderUpsertRequest(
|
||||
id=provider.id,
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key=None,
|
||||
api_key_changed=False,
|
||||
is_auto_mode=False,
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(name="gpt-4o", is_visible=True),
|
||||
ModelConfigurationUpsertRequest(
|
||||
name="gpt-4o-mini", is_visible=True
|
||||
),
|
||||
],
|
||||
),
|
||||
is_creation=False,
|
||||
_=_create_mock_admin(),
|
||||
db_session=db_session,
|
||||
)
|
||||
|
||||
# Step 3: Models unchanged
|
||||
db_session.expire_all()
|
||||
provider = fetch_existing_llm_provider(
|
||||
name=provider_name, db_session=db_session
|
||||
)
|
||||
assert provider is not None
|
||||
assert provider.is_auto_mode is False
|
||||
visibility_after = {
|
||||
mc.name: mc.is_visible for mc in provider.model_configurations
|
||||
}
|
||||
assert visibility_after == visibility_before
|
||||
|
||||
# Step 4-5: Provider excluded from auto mode queries
|
||||
auto_providers = fetch_auto_mode_providers(db_session)
|
||||
auto_provider_ids = {p.id for p in auto_providers}
|
||||
assert provider.id not in auto_provider_ids
|
||||
|
||||
finally:
|
||||
db_session.rollback()
|
||||
_cleanup_provider(db_session, provider_name)
|
||||
|
||||
def test_resync_adds_new_and_hides_removed_models(
|
||||
self,
|
||||
db_session: Session,
|
||||
provider_name: str,
|
||||
) -> None:
|
||||
"""When the GitHub config changes between syncs, a subsequent sync
|
||||
should add newly listed models and hide models that were removed.
|
||||
|
||||
Steps:
|
||||
1. Create provider in auto mode with config v1: [gpt-4o, gpt-4o-mini].
|
||||
2. Sync with config v2: [gpt-4o, gpt-4-turbo] (gpt-4o-mini removed,
|
||||
gpt-4-turbo added).
|
||||
3. Verify gpt-4o still visible, gpt-4o-mini hidden, gpt-4-turbo added
|
||||
and visible.
|
||||
"""
|
||||
config_v1 = _create_mock_llm_recommendations(
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
default_model_name="gpt-4o",
|
||||
additional_models=["gpt-4o-mini"],
|
||||
)
|
||||
config_v2 = _create_mock_llm_recommendations(
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
default_model_name="gpt-4o",
|
||||
additional_models=["gpt-4-turbo"],
|
||||
)
|
||||
|
||||
try:
|
||||
# Step 1: Create with config v1
|
||||
with patch(
|
||||
"onyx.server.manage.llm.api.fetch_llm_recommendations_from_github",
|
||||
return_value=config_v1,
|
||||
):
|
||||
put_llm_provider(
|
||||
llm_provider_upsert_request=LLMProviderUpsertRequest(
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key="sk-test-key-00000000000000000000000000000000000",
|
||||
api_key_changed=True,
|
||||
is_auto_mode=True,
|
||||
model_configurations=[],
|
||||
),
|
||||
is_creation=True,
|
||||
_=_create_mock_admin(),
|
||||
db_session=db_session,
|
||||
)
|
||||
|
||||
# Step 2: Re-sync with config v2
|
||||
db_session.expire_all()
|
||||
provider = fetch_existing_llm_provider(
|
||||
name=provider_name, db_session=db_session
|
||||
)
|
||||
assert provider is not None
|
||||
|
||||
changes = sync_auto_mode_models(
|
||||
db_session=db_session,
|
||||
provider=provider,
|
||||
llm_recommendations=config_v2,
|
||||
)
|
||||
assert changes > 0
|
||||
|
||||
# Step 3: Verify
|
||||
db_session.expire_all()
|
||||
provider = fetch_existing_llm_provider(
|
||||
name=provider_name, db_session=db_session
|
||||
)
|
||||
assert provider is not None
|
||||
|
||||
visibility = {
|
||||
mc.name: mc.is_visible for mc in provider.model_configurations
|
||||
}
|
||||
|
||||
# gpt-4o: still in config -> visible
|
||||
assert visibility["gpt-4o"] is True
|
||||
# gpt-4o-mini: removed from config -> hidden (not deleted)
|
||||
assert "gpt-4o-mini" in visibility, "Removed model should still exist in DB"
|
||||
assert visibility["gpt-4o-mini"] is False
|
||||
# gpt-4-turbo: newly added -> visible
|
||||
assert visibility["gpt-4-turbo"] is True
|
||||
|
||||
finally:
|
||||
db_session.rollback()
|
||||
_cleanup_provider(db_session, provider_name)
|
||||
|
||||
def test_sync_is_idempotent(
|
||||
self,
|
||||
db_session: Session,
|
||||
provider_name: str,
|
||||
) -> None:
|
||||
"""Running sync twice with the same config should produce zero
|
||||
changes on the second call."""
|
||||
config = _create_mock_llm_recommendations(
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
default_model_name="gpt-4o",
|
||||
additional_models=["gpt-4o-mini", "gpt-4-turbo"],
|
||||
)
|
||||
|
||||
try:
|
||||
with patch(
|
||||
"onyx.server.manage.llm.api.fetch_llm_recommendations_from_github",
|
||||
return_value=config,
|
||||
):
|
||||
put_llm_provider(
|
||||
llm_provider_upsert_request=LLMProviderUpsertRequest(
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key="sk-test-key-00000000000000000000000000000000000",
|
||||
api_key_changed=True,
|
||||
is_auto_mode=True,
|
||||
model_configurations=[],
|
||||
),
|
||||
is_creation=True,
|
||||
_=_create_mock_admin(),
|
||||
db_session=db_session,
|
||||
)
|
||||
|
||||
db_session.expire_all()
|
||||
provider = fetch_existing_llm_provider(
|
||||
name=provider_name, db_session=db_session
|
||||
)
|
||||
assert provider is not None
|
||||
|
||||
# First explicit sync (may report changes if creation already synced)
|
||||
sync_auto_mode_models(
|
||||
db_session=db_session,
|
||||
provider=provider,
|
||||
llm_recommendations=config,
|
||||
)
|
||||
|
||||
# Snapshot state after first sync
|
||||
db_session.expire_all()
|
||||
provider = fetch_existing_llm_provider(
|
||||
name=provider_name, db_session=db_session
|
||||
)
|
||||
assert provider is not None
|
||||
snapshot = {
|
||||
mc.name: (mc.is_visible, mc.display_name)
|
||||
for mc in provider.model_configurations
|
||||
}
|
||||
|
||||
# Second sync — should be a no-op
|
||||
changes = sync_auto_mode_models(
|
||||
db_session=db_session,
|
||||
provider=provider,
|
||||
llm_recommendations=config,
|
||||
)
|
||||
assert (
|
||||
changes == 0
|
||||
), f"Expected 0 changes on idempotent re-sync, got {changes}"
|
||||
|
||||
# State should be identical
|
||||
db_session.expire_all()
|
||||
provider = fetch_existing_llm_provider(
|
||||
name=provider_name, db_session=db_session
|
||||
)
|
||||
assert provider is not None
|
||||
current = {
|
||||
mc.name: (mc.is_visible, mc.display_name)
|
||||
for mc in provider.model_configurations
|
||||
}
|
||||
assert current == snapshot
|
||||
|
||||
finally:
|
||||
db_session.rollback()
|
||||
_cleanup_provider(db_session, provider_name)
|
||||
|
||||
def test_default_model_hidden_when_removed_from_config(
|
||||
self,
|
||||
db_session: Session,
|
||||
provider_name: str,
|
||||
) -> None:
|
||||
"""When the current default model is removed from the config, sync
|
||||
should hide it. The default model flow row should still exist (it
|
||||
points at the ModelConfiguration), but the model is no longer visible.
|
||||
|
||||
Steps:
|
||||
1. Create provider with config: default=gpt-4o, additional=[gpt-4o-mini].
|
||||
2. Set gpt-4o as the global default.
|
||||
3. Re-sync with config: default=gpt-4o-mini (gpt-4o removed entirely).
|
||||
4. Verify gpt-4o is hidden, gpt-4o-mini is visible, and
|
||||
fetch_default_llm_model still returns a result (the flow row persists).
|
||||
"""
|
||||
config_v1 = _create_mock_llm_recommendations(
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
default_model_name="gpt-4o",
|
||||
additional_models=["gpt-4o-mini"],
|
||||
)
|
||||
config_v2 = _create_mock_llm_recommendations(
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
default_model_name="gpt-4o-mini",
|
||||
additional_models=[],
|
||||
)
|
||||
|
||||
try:
|
||||
with patch(
|
||||
"onyx.server.manage.llm.api.fetch_llm_recommendations_from_github",
|
||||
return_value=config_v1,
|
||||
):
|
||||
put_llm_provider(
|
||||
llm_provider_upsert_request=LLMProviderUpsertRequest(
|
||||
name=provider_name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key="sk-test-key-00000000000000000000000000000000000",
|
||||
api_key_changed=True,
|
||||
is_auto_mode=True,
|
||||
model_configurations=[],
|
||||
),
|
||||
is_creation=True,
|
||||
_=_create_mock_admin(),
|
||||
db_session=db_session,
|
||||
)
|
||||
|
||||
# Step 2: Set gpt-4o as global default
|
||||
db_session.expire_all()
|
||||
provider = fetch_existing_llm_provider(
|
||||
name=provider_name, db_session=db_session
|
||||
)
|
||||
assert provider is not None
|
||||
update_default_provider(provider.id, "gpt-4o", db_session)
|
||||
|
||||
default_before = fetch_default_llm_model(db_session)
|
||||
assert default_before is not None
|
||||
assert default_before.name == "gpt-4o"
|
||||
|
||||
# Step 3: Re-sync with config v2 (gpt-4o removed)
|
||||
db_session.expire_all()
|
||||
provider = fetch_existing_llm_provider(
|
||||
name=provider_name, db_session=db_session
|
||||
)
|
||||
assert provider is not None
|
||||
|
||||
changes = sync_auto_mode_models(
|
||||
db_session=db_session,
|
||||
provider=provider,
|
||||
llm_recommendations=config_v2,
|
||||
)
|
||||
assert changes > 0
|
||||
|
||||
# Step 4: Verify visibility
|
||||
db_session.expire_all()
|
||||
provider = fetch_existing_llm_provider(
|
||||
name=provider_name, db_session=db_session
|
||||
)
|
||||
assert provider is not None
|
||||
|
||||
visibility = {
|
||||
mc.name: mc.is_visible for mc in provider.model_configurations
|
||||
}
|
||||
assert visibility["gpt-4o"] is False, "Removed default should be hidden"
|
||||
assert visibility["gpt-4o-mini"] is True, "New default should be visible"
|
||||
|
||||
# The LLMModelFlow row for gpt-4o still exists (is_default=True),
|
||||
# but the model is hidden. fetch_default_llm_model filters on
|
||||
# is_visible=True, so it should NOT return gpt-4o.
|
||||
db_session.expire_all()
|
||||
default_after = fetch_default_llm_model(db_session)
|
||||
assert (
|
||||
default_after is None or default_after.name != "gpt-4o"
|
||||
), "Hidden model should not be returned as the default"
|
||||
|
||||
finally:
|
||||
db_session.rollback()
|
||||
_cleanup_provider(db_session, provider_name)
|
||||
|
||||
@@ -64,6 +64,7 @@ def _create_provider(
|
||||
name=name,
|
||||
provider=provider,
|
||||
api_key="sk-ant-api03-...",
|
||||
default_model_name="claude-3-5-sonnet-20240620",
|
||||
is_public=is_public,
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
@@ -153,9 +154,7 @@ def test_user_sends_message_to_private_provider(
|
||||
)
|
||||
_create_provider(db_session, LlmProviderNames.GOOGLE, "private-provider", False)
|
||||
|
||||
update_default_provider(
|
||||
public_provider_id, "claude-3-5-sonnet-20240620", db_session
|
||||
)
|
||||
update_default_provider(public_provider_id, db_session)
|
||||
|
||||
try:
|
||||
# Create chat session
|
||||
|
||||
@@ -42,6 +42,7 @@ def _create_llm_provider_and_model(
|
||||
name=provider_name,
|
||||
provider="openai",
|
||||
api_key="test-api-key",
|
||||
default_model_name=model_name,
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
name=model_name,
|
||||
|
||||
@@ -434,6 +434,7 @@ class TestSlackBotFederatedSearch:
|
||||
name=f"test-llm-provider-{uuid4().hex[:8]}",
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key=api_key,
|
||||
default_model_name="gpt-4o",
|
||||
is_public=True,
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
@@ -447,7 +448,7 @@ class TestSlackBotFederatedSearch:
|
||||
db_session=db_session,
|
||||
)
|
||||
|
||||
update_default_provider(provider_view.id, "gpt-4o", db_session)
|
||||
update_default_provider(provider_view.id, db_session)
|
||||
|
||||
def _teardown_common_mocks(self, patches: list) -> None:
|
||||
"""Stop all patches"""
|
||||
|
||||
@@ -4,12 +4,10 @@ from uuid import uuid4
|
||||
import requests
|
||||
|
||||
from onyx.llm.constants import LlmProviderNames
|
||||
from onyx.server.manage.llm.models import DefaultModel
|
||||
from onyx.server.manage.llm.models import LLMProviderUpsertRequest
|
||||
from onyx.server.manage.llm.models import LLMProviderView
|
||||
from onyx.server.manage.llm.models import ModelConfigurationUpsertRequest
|
||||
from tests.integration.common_utils.constants import API_SERVER_URL
|
||||
from tests.integration.common_utils.constants import GENERAL_HEADERS
|
||||
from tests.integration.common_utils.test_models import DATestLLMProvider
|
||||
from tests.integration.common_utils.test_models import DATestUser
|
||||
|
||||
@@ -34,6 +32,7 @@ class LLMProviderManager:
|
||||
llm_provider = LLMProviderUpsertRequest(
|
||||
name=name or f"test-provider-{uuid4()}",
|
||||
provider=provider or LlmProviderNames.OPENAI,
|
||||
default_model_name=default_model_name or "gpt-4o-mini",
|
||||
api_key=api_key or os.environ["OPENAI_API_KEY"],
|
||||
api_base=api_base,
|
||||
api_version=api_version,
|
||||
@@ -66,7 +65,7 @@ class LLMProviderManager:
|
||||
name=response_data["name"],
|
||||
provider=response_data["provider"],
|
||||
api_key=response_data["api_key"],
|
||||
default_model_name=default_model_name or "gpt-4o-mini",
|
||||
default_model_name=response_data["default_model_name"],
|
||||
is_public=response_data["is_public"],
|
||||
is_auto_mode=response_data.get("is_auto_mode", False),
|
||||
groups=response_data["groups"],
|
||||
@@ -76,19 +75,9 @@ class LLMProviderManager:
|
||||
)
|
||||
|
||||
if set_as_default:
|
||||
if default_model_name is None:
|
||||
default_model_name = "gpt-4o-mini"
|
||||
set_default_response = requests.post(
|
||||
f"{API_SERVER_URL}/admin/llm/default",
|
||||
json={
|
||||
"provider_id": response_data["id"],
|
||||
"model_name": default_model_name,
|
||||
},
|
||||
headers=(
|
||||
user_performing_action.headers
|
||||
if user_performing_action
|
||||
else GENERAL_HEADERS
|
||||
),
|
||||
f"{API_SERVER_URL}/admin/llm/provider/{llm_response.json()['id']}/default",
|
||||
headers=user_performing_action.headers,
|
||||
)
|
||||
set_default_response.raise_for_status()
|
||||
|
||||
@@ -115,7 +104,7 @@ class LLMProviderManager:
|
||||
headers=user_performing_action.headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
return [LLMProviderView(**p) for p in response.json()["providers"]]
|
||||
return [LLMProviderView(**ug) for ug in response.json()]
|
||||
|
||||
@staticmethod
|
||||
def verify(
|
||||
@@ -124,11 +113,7 @@ class LLMProviderManager:
|
||||
verify_deleted: bool = False,
|
||||
) -> None:
|
||||
all_llm_providers = LLMProviderManager.get_all(user_performing_action)
|
||||
default_model = LLMProviderManager.get_default_model(user_performing_action)
|
||||
for fetched_llm_provider in all_llm_providers:
|
||||
model_names = [
|
||||
model.name for model in fetched_llm_provider.model_configurations
|
||||
]
|
||||
if llm_provider.id == fetched_llm_provider.id:
|
||||
if verify_deleted:
|
||||
raise ValueError(
|
||||
@@ -141,30 +126,11 @@ class LLMProviderManager:
|
||||
if (
|
||||
fetched_llm_groups == llm_provider_groups
|
||||
and llm_provider.provider == fetched_llm_provider.provider
|
||||
and (
|
||||
default_model is None or default_model.model_name in model_names
|
||||
)
|
||||
and llm_provider.default_model_name
|
||||
== fetched_llm_provider.default_model_name
|
||||
and llm_provider.is_public == fetched_llm_provider.is_public
|
||||
and set(fetched_llm_provider.personas) == set(llm_provider.personas)
|
||||
):
|
||||
return
|
||||
if not verify_deleted:
|
||||
raise ValueError(f"LLM Provider {llm_provider.id} not found")
|
||||
|
||||
@staticmethod
|
||||
def get_default_model(
|
||||
user_performing_action: DATestUser | None = None,
|
||||
) -> DefaultModel | None:
|
||||
response = requests.get(
|
||||
f"{API_SERVER_URL}/admin/llm/provider",
|
||||
headers=(
|
||||
user_performing_action.headers
|
||||
if user_performing_action
|
||||
else GENERAL_HEADERS
|
||||
),
|
||||
)
|
||||
response.raise_for_status()
|
||||
default_text = response.json().get("default_text")
|
||||
if default_text is None:
|
||||
return None
|
||||
return DefaultModel(**default_text)
|
||||
|
||||
@@ -128,7 +128,7 @@ class DATestLLMProvider(BaseModel):
|
||||
name: str
|
||||
provider: str
|
||||
api_key: str
|
||||
default_model_name: str | None = None
|
||||
default_model_name: str
|
||||
is_public: bool
|
||||
is_auto_mode: bool = False
|
||||
groups: list[int]
|
||||
|
||||
@@ -42,10 +42,12 @@ def _create_provider_with_api(
|
||||
llm_provider_data = {
|
||||
"name": name,
|
||||
"provider": provider_type,
|
||||
"default_model_name": default_model,
|
||||
"api_key": "test-api-key-for-auto-mode-testing",
|
||||
"api_base": None,
|
||||
"api_version": None,
|
||||
"custom_config": None,
|
||||
"fast_default_model_name": default_model,
|
||||
"is_public": True,
|
||||
"is_auto_mode": is_auto_mode,
|
||||
"groups": [],
|
||||
@@ -70,7 +72,7 @@ def _get_provider_by_id(admin_user: DATestUser, provider_id: int) -> dict:
|
||||
headers=admin_user.headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
for provider in response.json()["providers"]:
|
||||
for provider in response.json():
|
||||
if provider["id"] == provider_id:
|
||||
return provider
|
||||
raise ValueError(f"Provider with id {provider_id} not found")
|
||||
@@ -217,6 +219,15 @@ def test_auto_mode_provider_gets_synced_from_github_config(
|
||||
"is_visible"
|
||||
], "Outdated model should not be visible after sync"
|
||||
|
||||
# Verify default model was set from GitHub config
|
||||
expected_default = (
|
||||
default_model["name"] if isinstance(default_model, dict) else default_model
|
||||
)
|
||||
assert synced_provider["default_model_name"] == expected_default, (
|
||||
f"Default model should be {expected_default}, "
|
||||
f"got {synced_provider['default_model_name']}"
|
||||
)
|
||||
|
||||
|
||||
def test_manual_mode_provider_not_affected_by_auto_sync(
|
||||
reset: None, # noqa: ARG001
|
||||
@@ -262,3 +273,7 @@ def test_manual_mode_provider_not_affected_by_auto_sync(
|
||||
f"Manual mode provider models should not change. "
|
||||
f"Initial: {initial_models}, Current: {current_models}"
|
||||
)
|
||||
|
||||
assert (
|
||||
updated_provider["default_model_name"] == custom_model
|
||||
), f"Manual mode default model should remain {custom_model}"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -6,21 +6,20 @@ from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.context.search.enums import RecencyBiasSetting
|
||||
from onyx.db.engine.sql_engine import get_session_with_current_tenant
|
||||
from onyx.db.enums import LLMModelFlowType
|
||||
from onyx.db.llm import can_user_access_llm_provider
|
||||
from onyx.db.llm import fetch_user_group_ids
|
||||
from onyx.db.llm import update_default_provider
|
||||
from onyx.db.llm import upsert_llm_provider
|
||||
from onyx.db.models import LLMModelFlow
|
||||
from onyx.db.models import LLMProvider as LLMProviderModel
|
||||
from onyx.db.models import LLMProvider__Persona
|
||||
from onyx.db.models import LLMProvider__UserGroup
|
||||
from onyx.db.models import ModelConfiguration
|
||||
from onyx.db.models import Persona
|
||||
from onyx.db.models import User
|
||||
from onyx.db.models import User__UserGroup
|
||||
from onyx.db.models import UserGroup
|
||||
from onyx.llm.constants import LlmProviderNames
|
||||
from onyx.llm.factory import get_llm_for_persona
|
||||
from onyx.server.manage.llm.models import LLMProviderUpsertRequest
|
||||
from onyx.server.manage.llm.models import ModelConfigurationUpsertRequest
|
||||
from tests.integration.common_utils.constants import API_SERVER_URL
|
||||
from tests.integration.common_utils.managers.llm_provider import LLMProviderManager
|
||||
from tests.integration.common_utils.managers.persona import PersonaManager
|
||||
@@ -42,30 +41,24 @@ def _create_llm_provider(
|
||||
is_public: bool,
|
||||
is_default: bool,
|
||||
) -> LLMProviderModel:
|
||||
_provider = upsert_llm_provider(
|
||||
llm_provider_upsert_request=LLMProviderUpsertRequest(
|
||||
name=name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key=None,
|
||||
api_base=None,
|
||||
api_version=None,
|
||||
custom_config=None,
|
||||
is_public=is_public,
|
||||
model_configurations=[
|
||||
ModelConfigurationUpsertRequest(
|
||||
name=default_model_name,
|
||||
is_visible=True,
|
||||
)
|
||||
],
|
||||
),
|
||||
db_session=db_session,
|
||||
provider = LLMProviderModel(
|
||||
name=name,
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
api_key=None,
|
||||
api_base=None,
|
||||
api_version=None,
|
||||
custom_config=None,
|
||||
default_model_name=default_model_name,
|
||||
deployment_name=None,
|
||||
is_public=is_public,
|
||||
# Use None instead of False to avoid unique constraint violation
|
||||
# The is_default_provider column has unique=True, so only one True and one False allowed
|
||||
is_default_provider=is_default if is_default else None,
|
||||
is_default_vision_provider=False,
|
||||
default_vision_model=None,
|
||||
)
|
||||
if is_default:
|
||||
update_default_provider(_provider.id, default_model_name, db_session)
|
||||
|
||||
provider = db_session.get(LLMProviderModel, _provider.id)
|
||||
if not provider:
|
||||
raise ValueError(f"Provider {name} not found")
|
||||
db_session.add(provider)
|
||||
db_session.flush()
|
||||
return provider
|
||||
|
||||
|
||||
@@ -277,6 +270,24 @@ def test_get_llm_for_persona_falls_back_when_access_denied(
|
||||
provider_name=restricted_provider.name,
|
||||
)
|
||||
|
||||
# Set up ModelConfiguration + LLMModelFlow so get_default_llm() can
|
||||
# resolve the default provider when the fallback path is triggered.
|
||||
default_model_config = ModelConfiguration(
|
||||
llm_provider_id=default_provider.id,
|
||||
name=default_provider.default_model_name,
|
||||
is_visible=True,
|
||||
)
|
||||
db_session.add(default_model_config)
|
||||
db_session.flush()
|
||||
db_session.add(
|
||||
LLMModelFlow(
|
||||
model_configuration_id=default_model_config.id,
|
||||
llm_model_flow_type=LLMModelFlowType.CHAT,
|
||||
is_default=True,
|
||||
)
|
||||
)
|
||||
db_session.flush()
|
||||
|
||||
access_group = UserGroup(name="persona-group")
|
||||
db_session.add(access_group)
|
||||
db_session.flush()
|
||||
@@ -310,19 +321,13 @@ def test_get_llm_for_persona_falls_back_when_access_denied(
|
||||
persona=persona,
|
||||
user=admin_model,
|
||||
)
|
||||
assert (
|
||||
allowed_llm.config.model_name
|
||||
== restricted_provider.model_configurations[0].name
|
||||
)
|
||||
assert allowed_llm.config.model_name == restricted_provider.default_model_name
|
||||
|
||||
fallback_llm = get_llm_for_persona(
|
||||
persona=persona,
|
||||
user=basic_model,
|
||||
)
|
||||
assert (
|
||||
fallback_llm.config.model_name
|
||||
== default_provider.model_configurations[0].name
|
||||
)
|
||||
assert fallback_llm.config.model_name == default_provider.default_model_name
|
||||
|
||||
|
||||
def test_list_llm_provider_basics_excludes_non_public_unrestricted(
|
||||
@@ -341,7 +346,6 @@ def test_list_llm_provider_basics_excludes_non_public_unrestricted(
|
||||
name="public-provider",
|
||||
is_public=True,
|
||||
set_as_default=True,
|
||||
default_model_name="gpt-4o",
|
||||
user_performing_action=admin_user,
|
||||
)
|
||||
|
||||
@@ -361,7 +365,7 @@ def test_list_llm_provider_basics_excludes_non_public_unrestricted(
|
||||
headers=basic_user.headers,
|
||||
)
|
||||
assert response.status_code == 200
|
||||
providers = response.json()["providers"]
|
||||
providers = response.json()
|
||||
provider_names = [p["name"] for p in providers]
|
||||
|
||||
# Public provider should be visible
|
||||
@@ -376,7 +380,7 @@ def test_list_llm_provider_basics_excludes_non_public_unrestricted(
|
||||
headers=admin_user.headers,
|
||||
)
|
||||
assert admin_response.status_code == 200
|
||||
admin_providers = admin_response.json()["providers"]
|
||||
admin_providers = admin_response.json()
|
||||
admin_provider_names = [p["name"] for p in admin_providers]
|
||||
|
||||
assert public_provider.name in admin_provider_names
|
||||
@@ -392,7 +396,6 @@ def test_provider_delete_clears_persona_references(reset: None) -> None: # noqa
|
||||
name="default-provider",
|
||||
is_public=True,
|
||||
set_as_default=True,
|
||||
default_model_name="gpt-4o",
|
||||
user_performing_action=admin_user,
|
||||
)
|
||||
|
||||
|
||||
@@ -107,7 +107,7 @@ def test_authorized_persona_access_returns_filtered_providers(
|
||||
|
||||
# Should succeed
|
||||
assert response.status_code == 200
|
||||
providers = response.json()["providers"]
|
||||
providers = response.json()
|
||||
|
||||
# Should include the restricted provider since basic_user can access the persona
|
||||
provider_names = [p["name"] for p in providers]
|
||||
@@ -140,7 +140,7 @@ def test_persona_id_zero_applies_rbac(
|
||||
|
||||
# Should succeed (persona_id=0 refers to default persona, which is public)
|
||||
assert response.status_code == 200
|
||||
providers = response.json()["providers"]
|
||||
providers = response.json()
|
||||
|
||||
# Should NOT include the restricted provider since basic_user is not in group2
|
||||
provider_names = [p["name"] for p in providers]
|
||||
@@ -182,7 +182,7 @@ def test_admin_can_query_any_persona(
|
||||
|
||||
# Should succeed - admins can access any persona
|
||||
assert response.status_code == 200
|
||||
providers = response.json()["providers"]
|
||||
providers = response.json()
|
||||
|
||||
# Should include the restricted provider
|
||||
provider_names = [p["name"] for p in providers]
|
||||
@@ -223,7 +223,7 @@ def test_public_persona_accessible_to_all(
|
||||
|
||||
# Should succeed
|
||||
assert response.status_code == 200
|
||||
providers = response.json()["providers"]
|
||||
providers = response.json()
|
||||
|
||||
# Should return the public provider
|
||||
assert len(providers) > 0
|
||||
|
||||
@@ -72,9 +72,6 @@ def test_cold_startup_default_assistant() -> None:
|
||||
assert (
|
||||
"read_file" in tool_names
|
||||
), "Default assistant should have FileReaderTool attached"
|
||||
assert (
|
||||
"python" in tool_names
|
||||
), "Default assistant should have PythonTool attached"
|
||||
|
||||
# Also verify by display names for clarity
|
||||
assert (
|
||||
@@ -89,11 +86,8 @@ def test_cold_startup_default_assistant() -> None:
|
||||
assert (
|
||||
"File Reader" in tool_display_names
|
||||
), "Default assistant should have File Reader tool"
|
||||
assert (
|
||||
"Code Interpreter" in tool_display_names
|
||||
), "Default assistant should have Code Interpreter tool"
|
||||
|
||||
# Should have exactly 6 tools
|
||||
# Should have exactly 5 tools
|
||||
assert (
|
||||
len(tool_associations) == 6
|
||||
), f"Default assistant should have exactly 6 tools attached, got {len(tool_associations)}"
|
||||
len(tool_associations) == 5
|
||||
), f"Default assistant should have exactly 5 tools attached, got {len(tool_associations)}"
|
||||
|
||||
@@ -0,0 +1,160 @@
|
||||
"""Integration test for the full user-file lifecycle in no-vector-DB mode.
|
||||
|
||||
Covers: upload → COMPLETED → unlink from project → delete → gone.
|
||||
|
||||
The entire lifecycle is handled by FastAPI BackgroundTasks (no Celery workers
|
||||
needed). The conftest-level ``pytestmark`` ensures these tests are skipped
|
||||
when the server is running with vector DB enabled.
|
||||
"""
|
||||
|
||||
import time
|
||||
from uuid import UUID
|
||||
|
||||
import requests
|
||||
|
||||
from onyx.db.enums import UserFileStatus
|
||||
from tests.integration.common_utils.constants import API_SERVER_URL
|
||||
from tests.integration.common_utils.managers.project import ProjectManager
|
||||
from tests.integration.common_utils.test_models import DATestLLMProvider
|
||||
from tests.integration.common_utils.test_models import DATestUser
|
||||
|
||||
POLL_INTERVAL_SECONDS = 1
|
||||
POLL_TIMEOUT_SECONDS = 30
|
||||
|
||||
|
||||
def _poll_file_status(
|
||||
file_id: UUID,
|
||||
user: DATestUser,
|
||||
target_status: UserFileStatus,
|
||||
timeout: int = POLL_TIMEOUT_SECONDS,
|
||||
) -> None:
|
||||
"""Poll GET /user/projects/file/{file_id} until the file reaches *target_status*."""
|
||||
deadline = time.time() + timeout
|
||||
while time.time() < deadline:
|
||||
resp = requests.get(
|
||||
f"{API_SERVER_URL}/user/projects/file/{file_id}",
|
||||
headers=user.headers,
|
||||
)
|
||||
if resp.ok:
|
||||
status = resp.json().get("status")
|
||||
if status == target_status.value:
|
||||
return
|
||||
time.sleep(POLL_INTERVAL_SECONDS)
|
||||
raise TimeoutError(
|
||||
f"File {file_id} did not reach {target_status.value} within {timeout}s"
|
||||
)
|
||||
|
||||
|
||||
def _file_is_gone(file_id: UUID, user: DATestUser, timeout: int = 15) -> None:
|
||||
"""Poll until GET /user/projects/file/{file_id} returns 404."""
|
||||
deadline = time.time() + timeout
|
||||
while time.time() < deadline:
|
||||
resp = requests.get(
|
||||
f"{API_SERVER_URL}/user/projects/file/{file_id}",
|
||||
headers=user.headers,
|
||||
)
|
||||
if resp.status_code == 404:
|
||||
return
|
||||
time.sleep(POLL_INTERVAL_SECONDS)
|
||||
raise TimeoutError(
|
||||
f"File {file_id} still accessible after {timeout}s (expected 404)"
|
||||
)
|
||||
|
||||
|
||||
def test_file_upload_process_delete_lifecycle(
|
||||
reset: None, # noqa: ARG001
|
||||
admin_user: DATestUser,
|
||||
llm_provider: DATestLLMProvider, # noqa: ARG001
|
||||
) -> None:
|
||||
"""Full lifecycle: upload → COMPLETED → unlink → delete → 404.
|
||||
|
||||
Validates that the API server handles all background processing
|
||||
(via FastAPI BackgroundTasks) without any Celery workers running.
|
||||
"""
|
||||
project = ProjectManager.create(
|
||||
name="lifecycle-test", user_performing_action=admin_user
|
||||
)
|
||||
|
||||
file_content = b"Integration test file content for lifecycle verification."
|
||||
upload_result = ProjectManager.upload_files(
|
||||
project_id=project.id,
|
||||
files=[("lifecycle.txt", file_content)],
|
||||
user_performing_action=admin_user,
|
||||
)
|
||||
assert upload_result.user_files, "Expected at least one file in upload response"
|
||||
|
||||
user_file = upload_result.user_files[0]
|
||||
file_id = user_file.id
|
||||
|
||||
_poll_file_status(file_id, admin_user, UserFileStatus.COMPLETED)
|
||||
|
||||
project_files = ProjectManager.get_project_files(project.id, admin_user)
|
||||
assert any(
|
||||
f.id == file_id for f in project_files
|
||||
), "File should be listed in project files after processing"
|
||||
|
||||
# Unlink the file from the project so the delete endpoint will proceed
|
||||
unlink_resp = requests.delete(
|
||||
f"{API_SERVER_URL}/user/projects/{project.id}/files/{file_id}",
|
||||
headers=admin_user.headers,
|
||||
)
|
||||
assert (
|
||||
unlink_resp.status_code == 204
|
||||
), f"Expected 204 on unlink, got {unlink_resp.status_code}: {unlink_resp.text}"
|
||||
|
||||
delete_resp = requests.delete(
|
||||
f"{API_SERVER_URL}/user/projects/file/{file_id}",
|
||||
headers=admin_user.headers,
|
||||
)
|
||||
assert (
|
||||
delete_resp.ok
|
||||
), f"Delete request failed: {delete_resp.status_code} {delete_resp.text}"
|
||||
body = delete_resp.json()
|
||||
assert (
|
||||
body["has_associations"] is False
|
||||
), f"File still has associations after unlink: {body}"
|
||||
|
||||
_file_is_gone(file_id, admin_user)
|
||||
|
||||
project_files_after = ProjectManager.get_project_files(project.id, admin_user)
|
||||
assert not any(
|
||||
f.id == file_id for f in project_files_after
|
||||
), "Deleted file should not appear in project files"
|
||||
|
||||
|
||||
def test_delete_blocked_while_associated(
|
||||
reset: None, # noqa: ARG001
|
||||
admin_user: DATestUser,
|
||||
llm_provider: DATestLLMProvider, # noqa: ARG001
|
||||
) -> None:
|
||||
"""Deleting a file that still belongs to a project should return
|
||||
has_associations=True without actually deleting the file."""
|
||||
project = ProjectManager.create(
|
||||
name="assoc-test", user_performing_action=admin_user
|
||||
)
|
||||
|
||||
upload_result = ProjectManager.upload_files(
|
||||
project_id=project.id,
|
||||
files=[("assoc.txt", b"associated file content")],
|
||||
user_performing_action=admin_user,
|
||||
)
|
||||
file_id = upload_result.user_files[0].id
|
||||
|
||||
_poll_file_status(file_id, admin_user, UserFileStatus.COMPLETED)
|
||||
|
||||
# Attempt to delete while still linked
|
||||
delete_resp = requests.delete(
|
||||
f"{API_SERVER_URL}/user/projects/file/{file_id}",
|
||||
headers=admin_user.headers,
|
||||
)
|
||||
assert delete_resp.ok
|
||||
body = delete_resp.json()
|
||||
assert body["has_associations"] is True, "Should report existing associations"
|
||||
assert project.name in body["project_names"]
|
||||
|
||||
# File should still be accessible
|
||||
get_resp = requests.get(
|
||||
f"{API_SERVER_URL}/user/projects/file/{file_id}",
|
||||
headers=admin_user.headers,
|
||||
)
|
||||
assert get_resp.status_code == 200, "File should still exist after blocked delete"
|
||||
@@ -9,19 +9,6 @@ from redis.exceptions import RedisError
|
||||
from onyx.server.settings.models import ApplicationStatus
|
||||
from onyx.server.settings.models import Settings
|
||||
|
||||
# Fields we assert on across all tests
|
||||
_ASSERT_FIELDS = {
|
||||
"application_status",
|
||||
"ee_features_enabled",
|
||||
"seat_count",
|
||||
"used_seats",
|
||||
}
|
||||
|
||||
|
||||
def _pick(settings: Settings) -> dict:
|
||||
"""Extract only the fields under test from a Settings object."""
|
||||
return settings.model_dump(include=_ASSERT_FIELDS)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def base_settings() -> Settings:
|
||||
@@ -40,17 +27,17 @@ class TestApplyLicenseStatusToSettings:
|
||||
def test_enforcement_disabled_enables_ee_features(
|
||||
self, base_settings: Settings
|
||||
) -> None:
|
||||
"""When LICENSE_ENFORCEMENT_ENABLED=False, EE features are enabled."""
|
||||
"""When LICENSE_ENFORCEMENT_ENABLED=False, EE features are enabled.
|
||||
|
||||
If we're running the EE apply function, EE code was loaded via
|
||||
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES, so features should be on.
|
||||
"""
|
||||
from ee.onyx.server.settings.api import apply_license_status_to_settings
|
||||
|
||||
assert base_settings.ee_features_enabled is False
|
||||
result = apply_license_status_to_settings(base_settings)
|
||||
assert _pick(result) == {
|
||||
"application_status": ApplicationStatus.ACTIVE,
|
||||
"ee_features_enabled": True,
|
||||
"seat_count": None,
|
||||
"used_seats": None,
|
||||
}
|
||||
assert result.application_status == ApplicationStatus.ACTIVE
|
||||
assert result.ee_features_enabled is True
|
||||
|
||||
@patch("ee.onyx.server.settings.api.LICENSE_ENFORCEMENT_ENABLED", True)
|
||||
@patch("ee.onyx.server.settings.api.MULTI_TENANT", True)
|
||||
@@ -59,60 +46,13 @@ class TestApplyLicenseStatusToSettings:
|
||||
from ee.onyx.server.settings.api import apply_license_status_to_settings
|
||||
|
||||
result = apply_license_status_to_settings(base_settings)
|
||||
assert _pick(result) == {
|
||||
"application_status": ApplicationStatus.ACTIVE,
|
||||
"ee_features_enabled": True,
|
||||
"seat_count": None,
|
||||
"used_seats": None,
|
||||
}
|
||||
assert result.ee_features_enabled is True
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"license_status,used_seats,seats,expected",
|
||||
"license_status,expected_app_status,expected_ee_enabled",
|
||||
[
|
||||
(
|
||||
ApplicationStatus.GATED_ACCESS,
|
||||
3,
|
||||
10,
|
||||
{
|
||||
"application_status": ApplicationStatus.GATED_ACCESS,
|
||||
"ee_features_enabled": False,
|
||||
"seat_count": None,
|
||||
"used_seats": None,
|
||||
},
|
||||
),
|
||||
(
|
||||
ApplicationStatus.ACTIVE,
|
||||
3,
|
||||
10,
|
||||
{
|
||||
"application_status": ApplicationStatus.ACTIVE,
|
||||
"ee_features_enabled": True,
|
||||
"seat_count": None,
|
||||
"used_seats": None,
|
||||
},
|
||||
),
|
||||
(
|
||||
ApplicationStatus.ACTIVE,
|
||||
10,
|
||||
10,
|
||||
{
|
||||
"application_status": ApplicationStatus.ACTIVE,
|
||||
"ee_features_enabled": True,
|
||||
"seat_count": None,
|
||||
"used_seats": None,
|
||||
},
|
||||
),
|
||||
(
|
||||
ApplicationStatus.GRACE_PERIOD,
|
||||
3,
|
||||
10,
|
||||
{
|
||||
"application_status": ApplicationStatus.ACTIVE,
|
||||
"ee_features_enabled": True,
|
||||
"seat_count": None,
|
||||
"used_seats": None,
|
||||
},
|
||||
),
|
||||
(ApplicationStatus.GATED_ACCESS, ApplicationStatus.GATED_ACCESS, False),
|
||||
(ApplicationStatus.ACTIVE, ApplicationStatus.ACTIVE, True),
|
||||
],
|
||||
)
|
||||
@patch("ee.onyx.server.settings.api.LICENSE_ENFORCEMENT_ENABLED", True)
|
||||
@@ -123,80 +63,25 @@ class TestApplyLicenseStatusToSettings:
|
||||
self,
|
||||
mock_get_metadata: MagicMock,
|
||||
mock_get_tenant: MagicMock,
|
||||
license_status: ApplicationStatus,
|
||||
used_seats: int,
|
||||
seats: int,
|
||||
expected: dict,
|
||||
license_status: ApplicationStatus | None,
|
||||
expected_app_status: ApplicationStatus,
|
||||
expected_ee_enabled: bool,
|
||||
base_settings: Settings,
|
||||
) -> None:
|
||||
"""Self-hosted: license status controls both application_status and ee_features_enabled."""
|
||||
from ee.onyx.server.settings.api import apply_license_status_to_settings
|
||||
|
||||
mock_get_tenant.return_value = "test_tenant"
|
||||
mock_metadata = MagicMock()
|
||||
mock_metadata.status = license_status
|
||||
mock_metadata.used_seats = used_seats
|
||||
mock_metadata.seats = seats
|
||||
mock_get_metadata.return_value = mock_metadata
|
||||
if license_status is None:
|
||||
mock_get_metadata.return_value = None
|
||||
else:
|
||||
mock_metadata = MagicMock()
|
||||
mock_metadata.status = license_status
|
||||
mock_get_metadata.return_value = mock_metadata
|
||||
|
||||
result = apply_license_status_to_settings(base_settings)
|
||||
assert _pick(result) == expected
|
||||
|
||||
@patch("ee.onyx.server.settings.api.LICENSE_ENFORCEMENT_ENABLED", True)
|
||||
@patch("ee.onyx.server.settings.api.MULTI_TENANT", False)
|
||||
@patch("ee.onyx.server.settings.api.get_current_tenant_id")
|
||||
@patch("ee.onyx.server.settings.api.get_cached_license_metadata")
|
||||
def test_seat_limit_exceeded_sets_status_and_counts(
|
||||
self,
|
||||
mock_get_metadata: MagicMock,
|
||||
mock_get_tenant: MagicMock,
|
||||
base_settings: Settings,
|
||||
) -> None:
|
||||
"""Seat limit exceeded sets SEAT_LIMIT_EXCEEDED with counts, keeps EE enabled."""
|
||||
from ee.onyx.server.settings.api import apply_license_status_to_settings
|
||||
|
||||
mock_get_tenant.return_value = "test_tenant"
|
||||
mock_metadata = MagicMock()
|
||||
mock_metadata.status = ApplicationStatus.ACTIVE
|
||||
mock_metadata.used_seats = 15
|
||||
mock_metadata.seats = 10
|
||||
mock_get_metadata.return_value = mock_metadata
|
||||
|
||||
result = apply_license_status_to_settings(base_settings)
|
||||
assert _pick(result) == {
|
||||
"application_status": ApplicationStatus.SEAT_LIMIT_EXCEEDED,
|
||||
"ee_features_enabled": True,
|
||||
"seat_count": 10,
|
||||
"used_seats": 15,
|
||||
}
|
||||
|
||||
@patch("ee.onyx.server.settings.api.LICENSE_ENFORCEMENT_ENABLED", True)
|
||||
@patch("ee.onyx.server.settings.api.MULTI_TENANT", False)
|
||||
@patch("ee.onyx.server.settings.api.get_current_tenant_id")
|
||||
@patch("ee.onyx.server.settings.api.get_cached_license_metadata")
|
||||
def test_expired_license_takes_precedence_over_seat_limit(
|
||||
self,
|
||||
mock_get_metadata: MagicMock,
|
||||
mock_get_tenant: MagicMock,
|
||||
base_settings: Settings,
|
||||
) -> None:
|
||||
"""Expired license (GATED_ACCESS) takes precedence over seat limit exceeded."""
|
||||
from ee.onyx.server.settings.api import apply_license_status_to_settings
|
||||
|
||||
mock_get_tenant.return_value = "test_tenant"
|
||||
mock_metadata = MagicMock()
|
||||
mock_metadata.status = ApplicationStatus.GATED_ACCESS
|
||||
mock_metadata.used_seats = 15
|
||||
mock_metadata.seats = 10
|
||||
mock_get_metadata.return_value = mock_metadata
|
||||
|
||||
result = apply_license_status_to_settings(base_settings)
|
||||
assert _pick(result) == {
|
||||
"application_status": ApplicationStatus.GATED_ACCESS,
|
||||
"ee_features_enabled": False,
|
||||
"seat_count": None,
|
||||
"used_seats": None,
|
||||
}
|
||||
assert result.application_status == expected_app_status
|
||||
assert result.ee_features_enabled is expected_ee_enabled
|
||||
|
||||
@patch("ee.onyx.server.settings.api.ENTERPRISE_EDITION_ENABLED", True)
|
||||
@patch("ee.onyx.server.settings.api.LICENSE_ENFORCEMENT_ENABLED", True)
|
||||
@@ -220,12 +105,8 @@ class TestApplyLicenseStatusToSettings:
|
||||
mock_get_metadata.return_value = None
|
||||
|
||||
result = apply_license_status_to_settings(base_settings)
|
||||
assert _pick(result) == {
|
||||
"application_status": ApplicationStatus.GATED_ACCESS,
|
||||
"ee_features_enabled": False,
|
||||
"seat_count": None,
|
||||
"used_seats": None,
|
||||
}
|
||||
assert result.application_status == ApplicationStatus.GATED_ACCESS
|
||||
assert result.ee_features_enabled is False
|
||||
|
||||
@patch("ee.onyx.server.settings.api.ENTERPRISE_EDITION_ENABLED", False)
|
||||
@patch("ee.onyx.server.settings.api.LICENSE_ENFORCEMENT_ENABLED", True)
|
||||
@@ -249,12 +130,8 @@ class TestApplyLicenseStatusToSettings:
|
||||
mock_get_metadata.return_value = None
|
||||
|
||||
result = apply_license_status_to_settings(base_settings)
|
||||
assert _pick(result) == {
|
||||
"application_status": ApplicationStatus.ACTIVE,
|
||||
"ee_features_enabled": False,
|
||||
"seat_count": None,
|
||||
"used_seats": None,
|
||||
}
|
||||
assert result.application_status == ApplicationStatus.ACTIVE
|
||||
assert result.ee_features_enabled is False
|
||||
|
||||
@patch("ee.onyx.server.settings.api.LICENSE_ENFORCEMENT_ENABLED", True)
|
||||
@patch("ee.onyx.server.settings.api.MULTI_TENANT", False)
|
||||
@@ -273,12 +150,8 @@ class TestApplyLicenseStatusToSettings:
|
||||
mock_get_metadata.side_effect = RedisError("Connection failed")
|
||||
|
||||
result = apply_license_status_to_settings(base_settings)
|
||||
assert _pick(result) == {
|
||||
"application_status": ApplicationStatus.ACTIVE,
|
||||
"ee_features_enabled": False,
|
||||
"seat_count": None,
|
||||
"used_seats": None,
|
||||
}
|
||||
assert result.application_status == ApplicationStatus.ACTIVE
|
||||
assert result.ee_features_enabled is False
|
||||
|
||||
|
||||
class TestSettingsDefaultEEDisabled:
|
||||
|
||||
@@ -0,0 +1,291 @@
|
||||
"""Tests for the _impl functions' redis_locking parameter.
|
||||
|
||||
Verifies that:
|
||||
- redis_locking=True acquires/releases Redis locks and clears queued keys
|
||||
- redis_locking=False skips all Redis operations entirely
|
||||
- Both paths execute the same business logic (DB lookup, status check)
|
||||
"""
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
from unittest.mock import patch
|
||||
from uuid import uuid4
|
||||
|
||||
from onyx.background.celery.tasks.user_file_processing.tasks import (
|
||||
_delete_user_file_impl,
|
||||
)
|
||||
from onyx.background.celery.tasks.user_file_processing.tasks import (
|
||||
_process_user_file_impl,
|
||||
)
|
||||
from onyx.background.celery.tasks.user_file_processing.tasks import (
|
||||
_project_sync_user_file_impl,
|
||||
)
|
||||
|
||||
TASKS_MODULE = "onyx.background.celery.tasks.user_file_processing.tasks"
|
||||
|
||||
|
||||
def _mock_session_returning_none() -> MagicMock:
|
||||
"""Return a mock session whose .get() returns None (file not found)."""
|
||||
session = MagicMock()
|
||||
session.get.return_value = None
|
||||
session.execute.return_value.scalar_one_or_none.return_value = None
|
||||
return session
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# _process_user_file_impl
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestProcessUserFileImpl:
|
||||
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
|
||||
@patch(f"{TASKS_MODULE}.get_redis_client")
|
||||
def test_redis_locking_true_acquires_and_releases_lock(
|
||||
self,
|
||||
mock_get_redis: MagicMock,
|
||||
mock_get_session: MagicMock,
|
||||
) -> None:
|
||||
redis_client = MagicMock()
|
||||
lock = MagicMock()
|
||||
lock.acquire.return_value = True
|
||||
lock.owned.return_value = True
|
||||
redis_client.lock.return_value = lock
|
||||
mock_get_redis.return_value = redis_client
|
||||
|
||||
session = _mock_session_returning_none()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
user_file_id = str(uuid4())
|
||||
_process_user_file_impl(
|
||||
user_file_id=user_file_id,
|
||||
tenant_id="test-tenant",
|
||||
redis_locking=True,
|
||||
)
|
||||
|
||||
mock_get_redis.assert_called_once_with(tenant_id="test-tenant")
|
||||
redis_client.delete.assert_called_once()
|
||||
lock.acquire.assert_called_once_with(blocking=False)
|
||||
lock.release.assert_called_once()
|
||||
|
||||
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
|
||||
@patch(f"{TASKS_MODULE}.get_redis_client")
|
||||
def test_redis_locking_true_skips_when_lock_held(
|
||||
self,
|
||||
mock_get_redis: MagicMock,
|
||||
mock_get_session: MagicMock,
|
||||
) -> None:
|
||||
redis_client = MagicMock()
|
||||
lock = MagicMock()
|
||||
lock.acquire.return_value = False
|
||||
redis_client.lock.return_value = lock
|
||||
mock_get_redis.return_value = redis_client
|
||||
|
||||
_process_user_file_impl(
|
||||
user_file_id=str(uuid4()),
|
||||
tenant_id="test-tenant",
|
||||
redis_locking=True,
|
||||
)
|
||||
|
||||
lock.acquire.assert_called_once()
|
||||
mock_get_session.assert_not_called()
|
||||
|
||||
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
|
||||
@patch(f"{TASKS_MODULE}.get_redis_client")
|
||||
def test_redis_locking_false_skips_redis_entirely(
|
||||
self,
|
||||
mock_get_redis: MagicMock,
|
||||
mock_get_session: MagicMock,
|
||||
) -> None:
|
||||
session = _mock_session_returning_none()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
_process_user_file_impl(
|
||||
user_file_id=str(uuid4()),
|
||||
tenant_id="test-tenant",
|
||||
redis_locking=False,
|
||||
)
|
||||
|
||||
mock_get_redis.assert_not_called()
|
||||
mock_get_session.assert_called_once()
|
||||
|
||||
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
|
||||
@patch(f"{TASKS_MODULE}.get_redis_client")
|
||||
def test_both_paths_call_db_get(
|
||||
self,
|
||||
mock_get_redis: MagicMock,
|
||||
mock_get_session: MagicMock,
|
||||
) -> None:
|
||||
"""Both redis_locking=True and False should call db_session.get(UserFile, ...)."""
|
||||
redis_client = MagicMock()
|
||||
lock = MagicMock()
|
||||
lock.acquire.return_value = True
|
||||
lock.owned.return_value = True
|
||||
redis_client.lock.return_value = lock
|
||||
mock_get_redis.return_value = redis_client
|
||||
|
||||
session = _mock_session_returning_none()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
uid = str(uuid4())
|
||||
|
||||
_process_user_file_impl(user_file_id=uid, tenant_id="t", redis_locking=True)
|
||||
call_count_true = session.get.call_count
|
||||
|
||||
session.reset_mock()
|
||||
mock_get_session.reset_mock()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
_process_user_file_impl(user_file_id=uid, tenant_id="t", redis_locking=False)
|
||||
call_count_false = session.get.call_count
|
||||
|
||||
assert call_count_true == call_count_false == 1
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# _delete_user_file_impl
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestDeleteUserFileImpl:
|
||||
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
|
||||
@patch(f"{TASKS_MODULE}.get_redis_client")
|
||||
def test_redis_locking_true_acquires_and_releases_lock(
|
||||
self,
|
||||
mock_get_redis: MagicMock,
|
||||
mock_get_session: MagicMock,
|
||||
) -> None:
|
||||
redis_client = MagicMock()
|
||||
lock = MagicMock()
|
||||
lock.acquire.return_value = True
|
||||
lock.owned.return_value = True
|
||||
redis_client.lock.return_value = lock
|
||||
mock_get_redis.return_value = redis_client
|
||||
|
||||
session = _mock_session_returning_none()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
_delete_user_file_impl(
|
||||
user_file_id=str(uuid4()),
|
||||
tenant_id="test-tenant",
|
||||
redis_locking=True,
|
||||
)
|
||||
|
||||
mock_get_redis.assert_called_once()
|
||||
lock.acquire.assert_called_once_with(blocking=False)
|
||||
lock.release.assert_called_once()
|
||||
|
||||
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
|
||||
@patch(f"{TASKS_MODULE}.get_redis_client")
|
||||
def test_redis_locking_true_skips_when_lock_held(
|
||||
self,
|
||||
mock_get_redis: MagicMock,
|
||||
mock_get_session: MagicMock,
|
||||
) -> None:
|
||||
redis_client = MagicMock()
|
||||
lock = MagicMock()
|
||||
lock.acquire.return_value = False
|
||||
redis_client.lock.return_value = lock
|
||||
mock_get_redis.return_value = redis_client
|
||||
|
||||
_delete_user_file_impl(
|
||||
user_file_id=str(uuid4()),
|
||||
tenant_id="test-tenant",
|
||||
redis_locking=True,
|
||||
)
|
||||
|
||||
lock.acquire.assert_called_once()
|
||||
mock_get_session.assert_not_called()
|
||||
|
||||
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
|
||||
@patch(f"{TASKS_MODULE}.get_redis_client")
|
||||
def test_redis_locking_false_skips_redis_entirely(
|
||||
self,
|
||||
mock_get_redis: MagicMock,
|
||||
mock_get_session: MagicMock,
|
||||
) -> None:
|
||||
session = _mock_session_returning_none()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
_delete_user_file_impl(
|
||||
user_file_id=str(uuid4()),
|
||||
tenant_id="test-tenant",
|
||||
redis_locking=False,
|
||||
)
|
||||
|
||||
mock_get_redis.assert_not_called()
|
||||
mock_get_session.assert_called_once()
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# _project_sync_user_file_impl
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestProjectSyncUserFileImpl:
|
||||
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
|
||||
@patch(f"{TASKS_MODULE}.get_redis_client")
|
||||
def test_redis_locking_true_acquires_and_releases_lock(
|
||||
self,
|
||||
mock_get_redis: MagicMock,
|
||||
mock_get_session: MagicMock,
|
||||
) -> None:
|
||||
redis_client = MagicMock()
|
||||
lock = MagicMock()
|
||||
lock.acquire.return_value = True
|
||||
lock.owned.return_value = True
|
||||
redis_client.lock.return_value = lock
|
||||
mock_get_redis.return_value = redis_client
|
||||
|
||||
session = _mock_session_returning_none()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
_project_sync_user_file_impl(
|
||||
user_file_id=str(uuid4()),
|
||||
tenant_id="test-tenant",
|
||||
redis_locking=True,
|
||||
)
|
||||
|
||||
mock_get_redis.assert_called_once()
|
||||
redis_client.delete.assert_called_once()
|
||||
lock.acquire.assert_called_once_with(blocking=False)
|
||||
lock.release.assert_called_once()
|
||||
|
||||
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
|
||||
@patch(f"{TASKS_MODULE}.get_redis_client")
|
||||
def test_redis_locking_true_skips_when_lock_held(
|
||||
self,
|
||||
mock_get_redis: MagicMock,
|
||||
mock_get_session: MagicMock,
|
||||
) -> None:
|
||||
redis_client = MagicMock()
|
||||
lock = MagicMock()
|
||||
lock.acquire.return_value = False
|
||||
redis_client.lock.return_value = lock
|
||||
mock_get_redis.return_value = redis_client
|
||||
|
||||
_project_sync_user_file_impl(
|
||||
user_file_id=str(uuid4()),
|
||||
tenant_id="test-tenant",
|
||||
redis_locking=True,
|
||||
)
|
||||
|
||||
lock.acquire.assert_called_once()
|
||||
mock_get_session.assert_not_called()
|
||||
|
||||
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
|
||||
@patch(f"{TASKS_MODULE}.get_redis_client")
|
||||
def test_redis_locking_false_skips_redis_entirely(
|
||||
self,
|
||||
mock_get_redis: MagicMock,
|
||||
mock_get_session: MagicMock,
|
||||
) -> None:
|
||||
session = _mock_session_returning_none()
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
_project_sync_user_file_impl(
|
||||
user_file_id=str(uuid4()),
|
||||
tenant_id="test-tenant",
|
||||
redis_locking=False,
|
||||
)
|
||||
|
||||
mock_get_redis.assert_not_called()
|
||||
mock_get_session.assert_called_once()
|
||||
@@ -0,0 +1,421 @@
|
||||
"""Tests for no-vector-DB user file processing paths.
|
||||
|
||||
Verifies that when DISABLE_VECTOR_DB is True:
|
||||
- _process_user_file_impl calls _process_user_file_without_vector_db (not indexing)
|
||||
- _process_user_file_without_vector_db extracts text, counts tokens, stores plaintext,
|
||||
sets status=COMPLETED and chunk_count=0
|
||||
- _delete_user_file_impl skips vector DB chunk deletion
|
||||
- _project_sync_user_file_impl skips vector DB metadata update
|
||||
"""
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
from unittest.mock import patch
|
||||
from uuid import uuid4
|
||||
|
||||
from onyx.background.celery.tasks.user_file_processing.tasks import (
|
||||
_delete_user_file_impl,
|
||||
)
|
||||
from onyx.background.celery.tasks.user_file_processing.tasks import (
|
||||
_process_user_file_impl,
|
||||
)
|
||||
from onyx.background.celery.tasks.user_file_processing.tasks import (
|
||||
_process_user_file_without_vector_db,
|
||||
)
|
||||
from onyx.background.celery.tasks.user_file_processing.tasks import (
|
||||
_project_sync_user_file_impl,
|
||||
)
|
||||
from onyx.configs.constants import DocumentSource
|
||||
from onyx.connectors.models import Document
|
||||
from onyx.connectors.models import TextSection
|
||||
from onyx.db.enums import UserFileStatus
|
||||
|
||||
TASKS_MODULE = "onyx.background.celery.tasks.user_file_processing.tasks"
|
||||
LLM_FACTORY_MODULE = "onyx.llm.factory"
|
||||
|
||||
|
||||
def _make_documents(texts: list[str]) -> list[Document]:
|
||||
"""Build a list of Document objects with the given section texts."""
|
||||
return [
|
||||
Document(
|
||||
id=str(uuid4()),
|
||||
source=DocumentSource.USER_FILE,
|
||||
sections=[TextSection(text=t)],
|
||||
semantic_identifier=f"test-doc-{i}",
|
||||
metadata={},
|
||||
)
|
||||
for i, t in enumerate(texts)
|
||||
]
|
||||
|
||||
|
||||
def _make_user_file(
|
||||
*,
|
||||
status: UserFileStatus = UserFileStatus.PROCESSING,
|
||||
file_id: str = "test-file-id",
|
||||
name: str = "test.txt",
|
||||
) -> MagicMock:
|
||||
"""Return a MagicMock mimicking a UserFile ORM instance."""
|
||||
uf = MagicMock()
|
||||
uf.id = uuid4()
|
||||
uf.file_id = file_id
|
||||
uf.name = name
|
||||
uf.status = status
|
||||
uf.token_count = None
|
||||
uf.chunk_count = None
|
||||
uf.last_project_sync_at = None
|
||||
uf.projects = []
|
||||
uf.assistants = []
|
||||
uf.needs_project_sync = True
|
||||
uf.needs_persona_sync = True
|
||||
return uf
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# _process_user_file_without_vector_db — direct tests
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestProcessUserFileWithoutVectorDb:
|
||||
@patch(f"{TASKS_MODULE}.store_user_file_plaintext")
|
||||
@patch(f"{LLM_FACTORY_MODULE}.get_llm_tokenizer_encode_func")
|
||||
@patch(f"{LLM_FACTORY_MODULE}.get_default_llm")
|
||||
def test_extracts_and_combines_text(
|
||||
self,
|
||||
mock_get_llm: MagicMock, # noqa: ARG002
|
||||
mock_get_encode: MagicMock,
|
||||
mock_store_plaintext: MagicMock,
|
||||
) -> None:
|
||||
mock_encode = MagicMock(return_value=[1, 2, 3, 4, 5])
|
||||
mock_get_encode.return_value = mock_encode
|
||||
|
||||
uf = _make_user_file()
|
||||
docs = _make_documents(["hello world", "foo bar"])
|
||||
db_session = MagicMock()
|
||||
|
||||
_process_user_file_without_vector_db(uf, docs, db_session)
|
||||
|
||||
stored_text = mock_store_plaintext.call_args.kwargs["plaintext_content"]
|
||||
assert "hello world" in stored_text
|
||||
assert "foo bar" in stored_text
|
||||
|
||||
@patch(f"{TASKS_MODULE}.store_user_file_plaintext")
|
||||
@patch(f"{LLM_FACTORY_MODULE}.get_llm_tokenizer_encode_func")
|
||||
@patch(f"{LLM_FACTORY_MODULE}.get_default_llm")
|
||||
def test_computes_token_count(
|
||||
self,
|
||||
mock_get_llm: MagicMock, # noqa: ARG002
|
||||
mock_get_encode: MagicMock,
|
||||
mock_store_plaintext: MagicMock, # noqa: ARG002
|
||||
) -> None:
|
||||
mock_encode = MagicMock(return_value=list(range(42)))
|
||||
mock_get_encode.return_value = mock_encode
|
||||
|
||||
uf = _make_user_file()
|
||||
docs = _make_documents(["some text content"])
|
||||
db_session = MagicMock()
|
||||
|
||||
_process_user_file_without_vector_db(uf, docs, db_session)
|
||||
|
||||
assert uf.token_count == 42
|
||||
|
||||
@patch(f"{TASKS_MODULE}.store_user_file_plaintext")
|
||||
@patch(f"{LLM_FACTORY_MODULE}.get_llm_tokenizer_encode_func")
|
||||
@patch(f"{LLM_FACTORY_MODULE}.get_default_llm")
|
||||
def test_token_count_falls_back_to_none_on_error(
|
||||
self,
|
||||
mock_get_llm: MagicMock,
|
||||
mock_get_encode: MagicMock, # noqa: ARG002
|
||||
mock_store_plaintext: MagicMock, # noqa: ARG002
|
||||
) -> None:
|
||||
mock_get_llm.side_effect = RuntimeError("No LLM configured")
|
||||
|
||||
uf = _make_user_file()
|
||||
docs = _make_documents(["text"])
|
||||
db_session = MagicMock()
|
||||
|
||||
_process_user_file_without_vector_db(uf, docs, db_session)
|
||||
|
||||
assert uf.token_count is None
|
||||
|
||||
@patch(f"{TASKS_MODULE}.store_user_file_plaintext")
|
||||
@patch(f"{LLM_FACTORY_MODULE}.get_llm_tokenizer_encode_func")
|
||||
@patch(f"{LLM_FACTORY_MODULE}.get_default_llm")
|
||||
def test_stores_plaintext(
|
||||
self,
|
||||
mock_get_llm: MagicMock, # noqa: ARG002
|
||||
mock_get_encode: MagicMock,
|
||||
mock_store_plaintext: MagicMock,
|
||||
) -> None:
|
||||
mock_get_encode.return_value = MagicMock(return_value=[1])
|
||||
|
||||
uf = _make_user_file()
|
||||
docs = _make_documents(["content to store"])
|
||||
db_session = MagicMock()
|
||||
|
||||
_process_user_file_without_vector_db(uf, docs, db_session)
|
||||
|
||||
mock_store_plaintext.assert_called_once_with(
|
||||
user_file_id=uf.id,
|
||||
plaintext_content="content to store",
|
||||
)
|
||||
|
||||
@patch(f"{TASKS_MODULE}.store_user_file_plaintext")
|
||||
@patch(f"{LLM_FACTORY_MODULE}.get_llm_tokenizer_encode_func")
|
||||
@patch(f"{LLM_FACTORY_MODULE}.get_default_llm")
|
||||
def test_sets_completed_status_and_zero_chunk_count(
|
||||
self,
|
||||
mock_get_llm: MagicMock, # noqa: ARG002
|
||||
mock_get_encode: MagicMock,
|
||||
mock_store_plaintext: MagicMock, # noqa: ARG002
|
||||
) -> None:
|
||||
mock_get_encode.return_value = MagicMock(return_value=[1])
|
||||
|
||||
uf = _make_user_file()
|
||||
docs = _make_documents(["text"])
|
||||
db_session = MagicMock()
|
||||
|
||||
_process_user_file_without_vector_db(uf, docs, db_session)
|
||||
|
||||
assert uf.status == UserFileStatus.COMPLETED
|
||||
assert uf.chunk_count == 0
|
||||
assert uf.last_project_sync_at is not None
|
||||
db_session.add.assert_called_once_with(uf)
|
||||
db_session.commit.assert_called_once()
|
||||
|
||||
@patch(f"{TASKS_MODULE}.store_user_file_plaintext")
|
||||
@patch(f"{LLM_FACTORY_MODULE}.get_llm_tokenizer_encode_func")
|
||||
@patch(f"{LLM_FACTORY_MODULE}.get_default_llm")
|
||||
def test_preserves_deleting_status(
|
||||
self,
|
||||
mock_get_llm: MagicMock, # noqa: ARG002
|
||||
mock_get_encode: MagicMock,
|
||||
mock_store_plaintext: MagicMock, # noqa: ARG002
|
||||
) -> None:
|
||||
mock_get_encode.return_value = MagicMock(return_value=[1])
|
||||
|
||||
uf = _make_user_file(status=UserFileStatus.DELETING)
|
||||
docs = _make_documents(["text"])
|
||||
db_session = MagicMock()
|
||||
|
||||
_process_user_file_without_vector_db(uf, docs, db_session)
|
||||
|
||||
assert uf.status == UserFileStatus.DELETING
|
||||
assert uf.chunk_count == 0
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# _process_user_file_impl — branching on DISABLE_VECTOR_DB
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestProcessImplBranching:
|
||||
@patch(f"{TASKS_MODULE}._process_user_file_without_vector_db")
|
||||
@patch(f"{TASKS_MODULE}._process_user_file_with_indexing")
|
||||
@patch(f"{TASKS_MODULE}.DISABLE_VECTOR_DB", True)
|
||||
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
|
||||
def test_calls_without_vector_db_when_disabled(
|
||||
self,
|
||||
mock_get_session: MagicMock,
|
||||
mock_with_indexing: MagicMock,
|
||||
mock_without_vdb: MagicMock,
|
||||
) -> None:
|
||||
uf = _make_user_file()
|
||||
session = MagicMock()
|
||||
session.get.return_value = uf
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
connector_mock = MagicMock()
|
||||
connector_mock.load_from_state.return_value = [_make_documents(["hello"])]
|
||||
|
||||
with patch(f"{TASKS_MODULE}.LocalFileConnector", return_value=connector_mock):
|
||||
_process_user_file_impl(
|
||||
user_file_id=str(uf.id),
|
||||
tenant_id="test-tenant",
|
||||
redis_locking=False,
|
||||
)
|
||||
|
||||
mock_without_vdb.assert_called_once()
|
||||
mock_with_indexing.assert_not_called()
|
||||
|
||||
@patch(f"{TASKS_MODULE}._process_user_file_without_vector_db")
|
||||
@patch(f"{TASKS_MODULE}._process_user_file_with_indexing")
|
||||
@patch(f"{TASKS_MODULE}.DISABLE_VECTOR_DB", False)
|
||||
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
|
||||
def test_calls_with_indexing_when_vector_db_enabled(
|
||||
self,
|
||||
mock_get_session: MagicMock,
|
||||
mock_with_indexing: MagicMock,
|
||||
mock_without_vdb: MagicMock,
|
||||
) -> None:
|
||||
uf = _make_user_file()
|
||||
session = MagicMock()
|
||||
session.get.return_value = uf
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
connector_mock = MagicMock()
|
||||
connector_mock.load_from_state.return_value = [_make_documents(["hello"])]
|
||||
|
||||
with patch(f"{TASKS_MODULE}.LocalFileConnector", return_value=connector_mock):
|
||||
_process_user_file_impl(
|
||||
user_file_id=str(uf.id),
|
||||
tenant_id="test-tenant",
|
||||
redis_locking=False,
|
||||
)
|
||||
|
||||
mock_with_indexing.assert_called_once()
|
||||
mock_without_vdb.assert_not_called()
|
||||
|
||||
@patch(f"{TASKS_MODULE}.run_indexing_pipeline")
|
||||
@patch(f"{TASKS_MODULE}.store_user_file_plaintext")
|
||||
@patch(f"{TASKS_MODULE}.DISABLE_VECTOR_DB", True)
|
||||
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
|
||||
def test_indexing_pipeline_not_called_when_disabled(
|
||||
self,
|
||||
mock_get_session: MagicMock,
|
||||
mock_store_plaintext: MagicMock, # noqa: ARG002
|
||||
mock_run_pipeline: MagicMock,
|
||||
) -> None:
|
||||
"""End-to-end: verify run_indexing_pipeline is never invoked."""
|
||||
uf = _make_user_file()
|
||||
session = MagicMock()
|
||||
session.get.return_value = uf
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
connector_mock = MagicMock()
|
||||
connector_mock.load_from_state.return_value = [_make_documents(["content"])]
|
||||
|
||||
with (
|
||||
patch(f"{TASKS_MODULE}.LocalFileConnector", return_value=connector_mock),
|
||||
patch(f"{LLM_FACTORY_MODULE}.get_default_llm"),
|
||||
patch(
|
||||
f"{LLM_FACTORY_MODULE}.get_llm_tokenizer_encode_func",
|
||||
return_value=MagicMock(return_value=[1, 2, 3]),
|
||||
),
|
||||
):
|
||||
_process_user_file_impl(
|
||||
user_file_id=str(uf.id),
|
||||
tenant_id="test-tenant",
|
||||
redis_locking=False,
|
||||
)
|
||||
|
||||
mock_run_pipeline.assert_not_called()
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# _delete_user_file_impl — vector DB skip
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestDeleteImplNoVectorDb:
|
||||
@patch(f"{TASKS_MODULE}.DISABLE_VECTOR_DB", True)
|
||||
@patch(f"{TASKS_MODULE}.get_default_file_store")
|
||||
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
|
||||
def test_skips_vector_db_deletion(
|
||||
self,
|
||||
mock_get_session: MagicMock,
|
||||
mock_get_file_store: MagicMock,
|
||||
) -> None:
|
||||
uf = _make_user_file(status=UserFileStatus.DELETING)
|
||||
session = MagicMock()
|
||||
session.get.return_value = uf
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
mock_get_file_store.return_value = MagicMock()
|
||||
|
||||
with (
|
||||
patch(f"{TASKS_MODULE}.get_all_document_indices") as mock_get_indices,
|
||||
patch(f"{TASKS_MODULE}.get_active_search_settings") as mock_get_ss,
|
||||
patch(f"{TASKS_MODULE}.httpx_init_vespa_pool") as mock_vespa_pool,
|
||||
):
|
||||
_delete_user_file_impl(
|
||||
user_file_id=str(uf.id),
|
||||
tenant_id="test-tenant",
|
||||
redis_locking=False,
|
||||
)
|
||||
|
||||
mock_get_indices.assert_not_called()
|
||||
mock_get_ss.assert_not_called()
|
||||
mock_vespa_pool.assert_not_called()
|
||||
|
||||
session.delete.assert_called_once_with(uf)
|
||||
session.commit.assert_called_once()
|
||||
|
||||
@patch(f"{TASKS_MODULE}.DISABLE_VECTOR_DB", True)
|
||||
@patch(f"{TASKS_MODULE}.get_default_file_store")
|
||||
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
|
||||
def test_still_deletes_file_store_and_db_record(
|
||||
self,
|
||||
mock_get_session: MagicMock,
|
||||
mock_get_file_store: MagicMock,
|
||||
) -> None:
|
||||
uf = _make_user_file(status=UserFileStatus.DELETING)
|
||||
session = MagicMock()
|
||||
session.get.return_value = uf
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
file_store = MagicMock()
|
||||
mock_get_file_store.return_value = file_store
|
||||
|
||||
_delete_user_file_impl(
|
||||
user_file_id=str(uf.id),
|
||||
tenant_id="test-tenant",
|
||||
redis_locking=False,
|
||||
)
|
||||
|
||||
assert file_store.delete_file.call_count == 2
|
||||
session.delete.assert_called_once_with(uf)
|
||||
session.commit.assert_called_once()
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# _project_sync_user_file_impl — vector DB skip
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestProjectSyncImplNoVectorDb:
|
||||
@patch(f"{TASKS_MODULE}.DISABLE_VECTOR_DB", True)
|
||||
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
|
||||
def test_skips_vector_db_update(
|
||||
self,
|
||||
mock_get_session: MagicMock,
|
||||
) -> None:
|
||||
uf = _make_user_file(status=UserFileStatus.COMPLETED)
|
||||
session = MagicMock()
|
||||
session.execute.return_value.scalar_one_or_none.return_value = uf
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
with (
|
||||
patch(f"{TASKS_MODULE}.get_all_document_indices") as mock_get_indices,
|
||||
patch(f"{TASKS_MODULE}.get_active_search_settings") as mock_get_ss,
|
||||
patch(f"{TASKS_MODULE}.httpx_init_vespa_pool") as mock_vespa_pool,
|
||||
):
|
||||
_project_sync_user_file_impl(
|
||||
user_file_id=str(uf.id),
|
||||
tenant_id="test-tenant",
|
||||
redis_locking=False,
|
||||
)
|
||||
|
||||
mock_get_indices.assert_not_called()
|
||||
mock_get_ss.assert_not_called()
|
||||
mock_vespa_pool.assert_not_called()
|
||||
|
||||
@patch(f"{TASKS_MODULE}.DISABLE_VECTOR_DB", True)
|
||||
@patch(f"{TASKS_MODULE}.get_session_with_current_tenant")
|
||||
def test_still_clears_sync_flags(
|
||||
self,
|
||||
mock_get_session: MagicMock,
|
||||
) -> None:
|
||||
uf = _make_user_file(status=UserFileStatus.COMPLETED)
|
||||
session = MagicMock()
|
||||
session.execute.return_value.scalar_one_or_none.return_value = uf
|
||||
mock_get_session.return_value.__enter__.return_value = session
|
||||
|
||||
_project_sync_user_file_impl(
|
||||
user_file_id=str(uf.id),
|
||||
tenant_id="test-tenant",
|
||||
redis_locking=False,
|
||||
)
|
||||
|
||||
assert uf.needs_project_sync is False
|
||||
assert uf.needs_persona_sync is False
|
||||
assert uf.last_project_sync_at is not None
|
||||
session.add.assert_called_once_with(uf)
|
||||
session.commit.assert_called_once()
|
||||
@@ -44,6 +44,7 @@ def _build_provider_view(
|
||||
id=1,
|
||||
name="test-provider",
|
||||
provider=provider,
|
||||
default_model_name="test-model",
|
||||
model_configurations=[
|
||||
ModelConfigurationView(
|
||||
name="test-model",
|
||||
@@ -61,6 +62,7 @@ def _build_provider_view(
|
||||
groups=[],
|
||||
personas=[],
|
||||
deployment_name=None,
|
||||
default_vision_model=None,
|
||||
)
|
||||
|
||||
|
||||
|
||||
52
backend/tests/unit/onyx/test_startup_validation.py
Normal file
52
backend/tests/unit/onyx/test_startup_validation.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""Tests for startup validation in no-vector-DB mode.
|
||||
|
||||
Verifies that DISABLE_VECTOR_DB raises RuntimeError when combined with
|
||||
incompatible settings (MULTI_TENANT, ENABLE_CRAFT).
|
||||
"""
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
class TestValidateNoVectorDbSettings:
|
||||
@patch("onyx.main.DISABLE_VECTOR_DB", False)
|
||||
def test_no_error_when_vector_db_enabled(self) -> None:
|
||||
from onyx.main import validate_no_vector_db_settings
|
||||
|
||||
validate_no_vector_db_settings()
|
||||
|
||||
@patch("onyx.main.DISABLE_VECTOR_DB", True)
|
||||
@patch("onyx.main.MULTI_TENANT", False)
|
||||
@patch("onyx.server.features.build.configs.ENABLE_CRAFT", False)
|
||||
def test_no_error_when_no_conflicts(self) -> None:
|
||||
from onyx.main import validate_no_vector_db_settings
|
||||
|
||||
validate_no_vector_db_settings()
|
||||
|
||||
@patch("onyx.main.DISABLE_VECTOR_DB", True)
|
||||
@patch("onyx.main.MULTI_TENANT", True)
|
||||
def test_raises_on_multi_tenant(self) -> None:
|
||||
from onyx.main import validate_no_vector_db_settings
|
||||
|
||||
with pytest.raises(RuntimeError, match="MULTI_TENANT"):
|
||||
validate_no_vector_db_settings()
|
||||
|
||||
@patch("onyx.main.DISABLE_VECTOR_DB", True)
|
||||
@patch("onyx.main.MULTI_TENANT", False)
|
||||
@patch("onyx.server.features.build.configs.ENABLE_CRAFT", True)
|
||||
def test_raises_on_enable_craft(self) -> None:
|
||||
from onyx.main import validate_no_vector_db_settings
|
||||
|
||||
with pytest.raises(RuntimeError, match="ENABLE_CRAFT"):
|
||||
validate_no_vector_db_settings()
|
||||
|
||||
@patch("onyx.main.DISABLE_VECTOR_DB", True)
|
||||
@patch("onyx.main.MULTI_TENANT", True)
|
||||
@patch("onyx.server.features.build.configs.ENABLE_CRAFT", True)
|
||||
def test_multi_tenant_checked_before_craft(self) -> None:
|
||||
"""MULTI_TENANT is checked first, so it should be the error raised."""
|
||||
from onyx.main import validate_no_vector_db_settings
|
||||
|
||||
with pytest.raises(RuntimeError, match="MULTI_TENANT"):
|
||||
validate_no_vector_db_settings()
|
||||
@@ -0,0 +1,196 @@
|
||||
"""Tests for tool construction when DISABLE_VECTOR_DB is True.
|
||||
|
||||
Verifies that:
|
||||
- SearchTool.is_available() returns False when vector DB is disabled
|
||||
- OpenURLTool.is_available() returns False when vector DB is disabled
|
||||
- The force-add SearchTool block is suppressed when DISABLE_VECTOR_DB
|
||||
- FileReaderTool.is_available() returns True when vector DB is disabled
|
||||
"""
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
from unittest.mock import patch
|
||||
|
||||
from onyx.tools.tool_implementations.file_reader.file_reader_tool import FileReaderTool
|
||||
|
||||
APP_CONFIGS_MODULE = "onyx.configs.app_configs"
|
||||
FILE_READER_MODULE = "onyx.tools.tool_implementations.file_reader.file_reader_tool"
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# SearchTool.is_available()
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestSearchToolAvailability:
|
||||
@patch(f"{APP_CONFIGS_MODULE}.DISABLE_VECTOR_DB", True)
|
||||
def test_unavailable_when_vector_db_disabled(self) -> None:
|
||||
from onyx.tools.tool_implementations.search.search_tool import SearchTool
|
||||
|
||||
assert SearchTool.is_available(MagicMock()) is False
|
||||
|
||||
@patch("onyx.db.connector.check_user_files_exist", return_value=True)
|
||||
@patch(
|
||||
"onyx.tools.tool_implementations.search.search_tool.check_federated_connectors_exist",
|
||||
return_value=False,
|
||||
)
|
||||
@patch(
|
||||
"onyx.tools.tool_implementations.search.search_tool.check_connectors_exist",
|
||||
return_value=False,
|
||||
)
|
||||
@patch(f"{APP_CONFIGS_MODULE}.DISABLE_VECTOR_DB", False)
|
||||
def test_available_when_vector_db_enabled_and_files_exist(
|
||||
self,
|
||||
mock_connectors: MagicMock, # noqa: ARG002
|
||||
mock_federated: MagicMock, # noqa: ARG002
|
||||
mock_user_files: MagicMock, # noqa: ARG002
|
||||
) -> None:
|
||||
from onyx.tools.tool_implementations.search.search_tool import SearchTool
|
||||
|
||||
assert SearchTool.is_available(MagicMock()) is True
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# OpenURLTool.is_available()
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestOpenURLToolAvailability:
|
||||
@patch(f"{APP_CONFIGS_MODULE}.DISABLE_VECTOR_DB", True)
|
||||
def test_unavailable_when_vector_db_disabled(self) -> None:
|
||||
from onyx.tools.tool_implementations.open_url.open_url_tool import OpenURLTool
|
||||
|
||||
assert OpenURLTool.is_available(MagicMock()) is False
|
||||
|
||||
@patch(f"{APP_CONFIGS_MODULE}.DISABLE_VECTOR_DB", False)
|
||||
def test_available_when_vector_db_enabled(self) -> None:
|
||||
from onyx.tools.tool_implementations.open_url.open_url_tool import OpenURLTool
|
||||
|
||||
assert OpenURLTool.is_available(MagicMock()) is True
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# FileReaderTool.is_available()
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestFileReaderToolAvailability:
|
||||
@patch(f"{FILE_READER_MODULE}.DISABLE_VECTOR_DB", True)
|
||||
def test_available_when_vector_db_disabled(self) -> None:
|
||||
assert FileReaderTool.is_available(MagicMock()) is True
|
||||
|
||||
@patch(f"{FILE_READER_MODULE}.DISABLE_VECTOR_DB", False)
|
||||
def test_unavailable_when_vector_db_enabled(self) -> None:
|
||||
assert FileReaderTool.is_available(MagicMock()) is False
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Force-add SearchTool suppression
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestForceAddSearchToolGuard:
|
||||
def test_force_add_block_checks_disable_vector_db(self) -> None:
|
||||
"""The force-add SearchTool block in construct_tools should include
|
||||
`not DISABLE_VECTOR_DB` so that forced search is also suppressed
|
||||
without a vector DB."""
|
||||
import inspect
|
||||
|
||||
from onyx.tools.tool_constructor import construct_tools
|
||||
|
||||
source = inspect.getsource(construct_tools)
|
||||
assert "DISABLE_VECTOR_DB" in source, (
|
||||
"construct_tools should reference DISABLE_VECTOR_DB "
|
||||
"to suppress force-adding SearchTool"
|
||||
)
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Persona API — _validate_vector_db_knowledge
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestValidateVectorDbKnowledge:
|
||||
@patch(
|
||||
"onyx.server.features.persona.api.DISABLE_VECTOR_DB",
|
||||
True,
|
||||
)
|
||||
def test_rejects_document_set_ids(self) -> None:
|
||||
from fastapi import HTTPException
|
||||
|
||||
from onyx.server.features.persona.api import _validate_vector_db_knowledge
|
||||
|
||||
request = MagicMock()
|
||||
request.document_set_ids = [1]
|
||||
request.hierarchy_node_ids = []
|
||||
request.document_ids = []
|
||||
|
||||
with __import__("pytest").raises(HTTPException) as exc_info:
|
||||
_validate_vector_db_knowledge(request)
|
||||
assert exc_info.value.status_code == 400
|
||||
assert "document sets" in exc_info.value.detail
|
||||
|
||||
@patch(
|
||||
"onyx.server.features.persona.api.DISABLE_VECTOR_DB",
|
||||
True,
|
||||
)
|
||||
def test_rejects_hierarchy_node_ids(self) -> None:
|
||||
from fastapi import HTTPException
|
||||
|
||||
from onyx.server.features.persona.api import _validate_vector_db_knowledge
|
||||
|
||||
request = MagicMock()
|
||||
request.document_set_ids = []
|
||||
request.hierarchy_node_ids = [1]
|
||||
request.document_ids = []
|
||||
|
||||
with __import__("pytest").raises(HTTPException) as exc_info:
|
||||
_validate_vector_db_knowledge(request)
|
||||
assert exc_info.value.status_code == 400
|
||||
assert "hierarchy nodes" in exc_info.value.detail
|
||||
|
||||
@patch(
|
||||
"onyx.server.features.persona.api.DISABLE_VECTOR_DB",
|
||||
True,
|
||||
)
|
||||
def test_rejects_document_ids(self) -> None:
|
||||
from fastapi import HTTPException
|
||||
|
||||
from onyx.server.features.persona.api import _validate_vector_db_knowledge
|
||||
|
||||
request = MagicMock()
|
||||
request.document_set_ids = []
|
||||
request.hierarchy_node_ids = []
|
||||
request.document_ids = ["doc-abc"]
|
||||
|
||||
with __import__("pytest").raises(HTTPException) as exc_info:
|
||||
_validate_vector_db_knowledge(request)
|
||||
assert exc_info.value.status_code == 400
|
||||
assert "documents" in exc_info.value.detail
|
||||
|
||||
@patch(
|
||||
"onyx.server.features.persona.api.DISABLE_VECTOR_DB",
|
||||
True,
|
||||
)
|
||||
def test_allows_user_files_only(self) -> None:
|
||||
from onyx.server.features.persona.api import _validate_vector_db_knowledge
|
||||
|
||||
request = MagicMock()
|
||||
request.document_set_ids = []
|
||||
request.hierarchy_node_ids = []
|
||||
request.document_ids = []
|
||||
|
||||
_validate_vector_db_knowledge(request)
|
||||
|
||||
@patch(
|
||||
"onyx.server.features.persona.api.DISABLE_VECTOR_DB",
|
||||
False,
|
||||
)
|
||||
def test_allows_everything_when_vector_db_enabled(self) -> None:
|
||||
from onyx.server.features.persona.api import _validate_vector_db_knowledge
|
||||
|
||||
request = MagicMock()
|
||||
request.document_set_ids = [1, 2]
|
||||
request.hierarchy_node_ids = [3]
|
||||
request.document_ids = ["doc-x"]
|
||||
|
||||
_validate_vector_db_knowledge(request)
|
||||
237
backend/tests/unit/onyx/tools/test_file_reader_tool.py
Normal file
237
backend/tests/unit/onyx/tools/test_file_reader_tool.py
Normal file
@@ -0,0 +1,237 @@
|
||||
"""Tests for the FileReaderTool.
|
||||
|
||||
Verifies:
|
||||
- Tool definition schema is well-formed
|
||||
- File ID validation (allowlist, UUID format)
|
||||
- Character range extraction and clamping
|
||||
- Error handling for missing parameters and non-text files
|
||||
- is_available() reflects DISABLE_VECTOR_DB
|
||||
"""
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
from unittest.mock import patch
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
|
||||
from onyx.file_store.models import ChatFileType
|
||||
from onyx.file_store.models import InMemoryChatFile
|
||||
from onyx.server.query_and_chat.placement import Placement
|
||||
from onyx.tools.models import ToolCallException
|
||||
from onyx.tools.tool_implementations.file_reader.file_reader_tool import FILE_ID_FIELD
|
||||
from onyx.tools.tool_implementations.file_reader.file_reader_tool import FileReaderTool
|
||||
from onyx.tools.tool_implementations.file_reader.file_reader_tool import MAX_NUM_CHARS
|
||||
from onyx.tools.tool_implementations.file_reader.file_reader_tool import NUM_CHARS_FIELD
|
||||
from onyx.tools.tool_implementations.file_reader.file_reader_tool import (
|
||||
START_CHAR_FIELD,
|
||||
)
|
||||
|
||||
TOOL_MODULE = "onyx.tools.tool_implementations.file_reader.file_reader_tool"
|
||||
_PLACEMENT = Placement(turn_index=0)
|
||||
|
||||
|
||||
def _make_tool(
|
||||
user_file_ids: list | None = None,
|
||||
chat_file_ids: list | None = None,
|
||||
) -> FileReaderTool:
|
||||
emitter = MagicMock()
|
||||
return FileReaderTool(
|
||||
tool_id=99,
|
||||
emitter=emitter,
|
||||
user_file_ids=user_file_ids or [],
|
||||
chat_file_ids=chat_file_ids or [],
|
||||
)
|
||||
|
||||
|
||||
def _text_file(content: str, filename: str = "test.txt") -> InMemoryChatFile:
|
||||
return InMemoryChatFile(
|
||||
file_id="some-file-id",
|
||||
content=content.encode("utf-8"),
|
||||
file_type=ChatFileType.PLAIN_TEXT,
|
||||
filename=filename,
|
||||
)
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Tool metadata
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestToolMetadata:
|
||||
def test_tool_name(self) -> None:
|
||||
tool = _make_tool()
|
||||
assert tool.name == "read_file"
|
||||
|
||||
def test_tool_definition_schema(self) -> None:
|
||||
tool = _make_tool()
|
||||
defn = tool.tool_definition()
|
||||
assert defn["type"] == "function"
|
||||
func = defn["function"]
|
||||
assert func["name"] == "read_file"
|
||||
props = func["parameters"]["properties"]
|
||||
assert FILE_ID_FIELD in props
|
||||
assert START_CHAR_FIELD in props
|
||||
assert NUM_CHARS_FIELD in props
|
||||
assert func["parameters"]["required"] == [FILE_ID_FIELD]
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# File ID validation
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestFileIdValidation:
|
||||
def test_rejects_invalid_uuid(self) -> None:
|
||||
tool = _make_tool()
|
||||
with pytest.raises(ToolCallException, match="Invalid file_id"):
|
||||
tool._validate_file_id("not-a-uuid")
|
||||
|
||||
def test_rejects_file_not_in_allowlist(self) -> None:
|
||||
tool = _make_tool(user_file_ids=[uuid4()])
|
||||
other_id = uuid4()
|
||||
with pytest.raises(ToolCallException, match="not in available files"):
|
||||
tool._validate_file_id(str(other_id))
|
||||
|
||||
def test_accepts_user_file_id(self) -> None:
|
||||
uid = uuid4()
|
||||
tool = _make_tool(user_file_ids=[uid])
|
||||
assert tool._validate_file_id(str(uid)) == uid
|
||||
|
||||
def test_accepts_chat_file_id(self) -> None:
|
||||
cid = uuid4()
|
||||
tool = _make_tool(chat_file_ids=[cid])
|
||||
assert tool._validate_file_id(str(cid)) == cid
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# run() — character range extraction
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestRun:
|
||||
@patch(f"{TOOL_MODULE}.get_session_with_current_tenant")
|
||||
@patch(f"{TOOL_MODULE}.load_user_file")
|
||||
def test_returns_full_content_by_default(
|
||||
self,
|
||||
mock_load_user_file: MagicMock,
|
||||
mock_get_session: MagicMock,
|
||||
) -> None:
|
||||
uid = uuid4()
|
||||
content = "Hello, world!"
|
||||
mock_load_user_file.return_value = _text_file(content)
|
||||
mock_get_session.return_value.__enter__.return_value = MagicMock()
|
||||
|
||||
tool = _make_tool(user_file_ids=[uid])
|
||||
resp = tool.run(
|
||||
placement=_PLACEMENT,
|
||||
override_kwargs=MagicMock(),
|
||||
**{FILE_ID_FIELD: str(uid)},
|
||||
)
|
||||
assert content in resp.llm_facing_response
|
||||
|
||||
@patch(f"{TOOL_MODULE}.get_session_with_current_tenant")
|
||||
@patch(f"{TOOL_MODULE}.load_user_file")
|
||||
def test_respects_start_char_and_num_chars(
|
||||
self,
|
||||
mock_load_user_file: MagicMock,
|
||||
mock_get_session: MagicMock,
|
||||
) -> None:
|
||||
uid = uuid4()
|
||||
content = "abcdefghijklmnop"
|
||||
mock_load_user_file.return_value = _text_file(content)
|
||||
mock_get_session.return_value.__enter__.return_value = MagicMock()
|
||||
|
||||
tool = _make_tool(user_file_ids=[uid])
|
||||
resp = tool.run(
|
||||
placement=_PLACEMENT,
|
||||
override_kwargs=MagicMock(),
|
||||
**{FILE_ID_FIELD: str(uid), START_CHAR_FIELD: 4, NUM_CHARS_FIELD: 6},
|
||||
)
|
||||
assert "efghij" in resp.llm_facing_response
|
||||
|
||||
@patch(f"{TOOL_MODULE}.get_session_with_current_tenant")
|
||||
@patch(f"{TOOL_MODULE}.load_user_file")
|
||||
def test_clamps_num_chars_to_max(
|
||||
self,
|
||||
mock_load_user_file: MagicMock,
|
||||
mock_get_session: MagicMock,
|
||||
) -> None:
|
||||
uid = uuid4()
|
||||
content = "x" * (MAX_NUM_CHARS + 500)
|
||||
mock_load_user_file.return_value = _text_file(content)
|
||||
mock_get_session.return_value.__enter__.return_value = MagicMock()
|
||||
|
||||
tool = _make_tool(user_file_ids=[uid])
|
||||
resp = tool.run(
|
||||
placement=_PLACEMENT,
|
||||
override_kwargs=MagicMock(),
|
||||
**{FILE_ID_FIELD: str(uid), NUM_CHARS_FIELD: MAX_NUM_CHARS + 9999},
|
||||
)
|
||||
assert f"Characters 0-{MAX_NUM_CHARS}" in resp.llm_facing_response
|
||||
|
||||
@patch(f"{TOOL_MODULE}.get_session_with_current_tenant")
|
||||
@patch(f"{TOOL_MODULE}.load_user_file")
|
||||
def test_includes_continuation_hint(
|
||||
self,
|
||||
mock_load_user_file: MagicMock,
|
||||
mock_get_session: MagicMock,
|
||||
) -> None:
|
||||
uid = uuid4()
|
||||
content = "x" * 100
|
||||
mock_load_user_file.return_value = _text_file(content)
|
||||
mock_get_session.return_value.__enter__.return_value = MagicMock()
|
||||
|
||||
tool = _make_tool(user_file_ids=[uid])
|
||||
resp = tool.run(
|
||||
placement=_PLACEMENT,
|
||||
override_kwargs=MagicMock(),
|
||||
**{FILE_ID_FIELD: str(uid), NUM_CHARS_FIELD: 10},
|
||||
)
|
||||
assert "use start_char=10 to continue reading" in resp.llm_facing_response
|
||||
|
||||
def test_raises_on_missing_file_id(self) -> None:
|
||||
tool = _make_tool()
|
||||
with pytest.raises(ToolCallException, match="Missing required"):
|
||||
tool.run(
|
||||
placement=_PLACEMENT,
|
||||
override_kwargs=MagicMock(),
|
||||
)
|
||||
|
||||
@patch(f"{TOOL_MODULE}.get_session_with_current_tenant")
|
||||
@patch(f"{TOOL_MODULE}.load_user_file")
|
||||
def test_raises_on_non_text_file(
|
||||
self,
|
||||
mock_load_user_file: MagicMock,
|
||||
mock_get_session: MagicMock,
|
||||
) -> None:
|
||||
uid = uuid4()
|
||||
mock_load_user_file.return_value = InMemoryChatFile(
|
||||
file_id="img",
|
||||
content=b"\x89PNG",
|
||||
file_type=ChatFileType.IMAGE,
|
||||
filename="photo.png",
|
||||
)
|
||||
mock_get_session.return_value.__enter__.return_value = MagicMock()
|
||||
|
||||
tool = _make_tool(user_file_ids=[uid])
|
||||
with pytest.raises(ToolCallException, match="not a text file"):
|
||||
tool.run(
|
||||
placement=_PLACEMENT,
|
||||
override_kwargs=MagicMock(),
|
||||
**{FILE_ID_FIELD: str(uid)},
|
||||
)
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# is_available()
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestIsAvailable:
|
||||
@patch(f"{TOOL_MODULE}.DISABLE_VECTOR_DB", True)
|
||||
def test_available_when_vector_db_disabled(self) -> None:
|
||||
assert FileReaderTool.is_available(MagicMock()) is True
|
||||
|
||||
@patch(f"{TOOL_MODULE}.DISABLE_VECTOR_DB", False)
|
||||
def test_unavailable_when_vector_db_enabled(self) -> None:
|
||||
assert FileReaderTool.is_available(MagicMock()) is False
|
||||
@@ -163,16 +163,3 @@ Add clear comments:
|
||||
- Any TODOs you add in the code must be accompanied by either the name/username
|
||||
of the owner of that TODO, or an issue number for an issue referencing that
|
||||
piece of work.
|
||||
- Avoid module-level logic that runs on import, which leads to import-time side
|
||||
effects. Essentially every piece of meaningful logic should exist within some
|
||||
function that has to be explicitly invoked. Acceptable exceptions to this may
|
||||
include loading environment variables or setting up loggers.
|
||||
- If you find yourself needing something like this, you may want that logic to
|
||||
exist in a file dedicated for manual execution (contains `if __name__ ==
|
||||
"__main__":`) which should not be imported by anything else.
|
||||
- Related to the above, do not conflate Python scripts you intend to run from
|
||||
the command line (contains `if __name__ == "__main__":`) with modules you
|
||||
intend to import from elsewhere. If for some unlikely reason they have to be
|
||||
the same file, any logic specific to executing the file (including imports)
|
||||
should be contained in the `if __name__ == "__main__":` block.
|
||||
- Generally these executable files exist in `backend/scripts/`.
|
||||
|
||||
@@ -468,7 +468,7 @@ services:
|
||||
- minio_data:/data
|
||||
command: server /data --console-address ":9001"
|
||||
healthcheck:
|
||||
test: ["CMD", "mc", "ready", "local"]
|
||||
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
|
||||
interval: 30s
|
||||
timeout: 20s
|
||||
retries: 3
|
||||
|
||||
@@ -16,12 +16,15 @@
|
||||
# This overlay:
|
||||
# - Moves Vespa (index), both model servers, and code-interpreter to profiles
|
||||
# so they do not start by default
|
||||
# - Makes the depends_on references to those services optional
|
||||
# - Sets DISABLE_VECTOR_DB=true on backend services
|
||||
# - Moves the background worker to the "background" profile (the API server
|
||||
# handles all background work via FastAPI BackgroundTasks)
|
||||
# - Makes the depends_on references to removed services optional
|
||||
# - Sets DISABLE_VECTOR_DB=true on the api_server
|
||||
#
|
||||
# To selectively bring services back:
|
||||
# --profile vectordb Vespa + indexing model server
|
||||
# --profile inference Inference model server
|
||||
# --profile background Background worker (Celery)
|
||||
# --profile code-interpreter Code interpreter
|
||||
# =============================================================================
|
||||
|
||||
@@ -43,20 +46,10 @@ services:
|
||||
- DISABLE_VECTOR_DB=true
|
||||
- FILE_STORE_BACKEND=postgres
|
||||
|
||||
# Move the background worker to a profile so it does not start by default.
|
||||
# The API server handles all background work in NO_VECTOR_DB mode.
|
||||
background:
|
||||
depends_on:
|
||||
index:
|
||||
condition: service_started
|
||||
required: false
|
||||
indexing_model_server:
|
||||
condition: service_started
|
||||
required: false
|
||||
inference_model_server:
|
||||
condition: service_started
|
||||
required: false
|
||||
environment:
|
||||
- DISABLE_VECTOR_DB=true
|
||||
- FILE_STORE_BACKEND=postgres
|
||||
profiles: ["background"]
|
||||
|
||||
# Move Vespa and indexing model server to a profile so they do not start.
|
||||
index:
|
||||
|
||||
@@ -293,7 +293,7 @@ services:
|
||||
- minio_data:/data
|
||||
command: server /data --console-address ":9001"
|
||||
healthcheck:
|
||||
test: ["CMD", "mc", "ready", "local"]
|
||||
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
|
||||
interval: 30s
|
||||
timeout: 20s
|
||||
retries: 3
|
||||
|
||||
@@ -298,7 +298,7 @@ services:
|
||||
- minio_data:/data
|
||||
command: server /data --console-address ":9001"
|
||||
healthcheck:
|
||||
test: ["CMD", "mc", "ready", "local"]
|
||||
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
|
||||
interval: 30s
|
||||
timeout: 20s
|
||||
retries: 3
|
||||
|
||||
@@ -335,7 +335,7 @@ services:
|
||||
- minio_data:/data
|
||||
command: server /data --console-address ":9001"
|
||||
healthcheck:
|
||||
test: ["CMD", "mc", "ready", "local"]
|
||||
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
|
||||
interval: 30s
|
||||
timeout: 20s
|
||||
retries: 3
|
||||
|
||||
@@ -232,7 +232,7 @@ services:
|
||||
- minio_data:/data
|
||||
command: server /data --console-address ":9001"
|
||||
healthcheck:
|
||||
test: ["CMD", "mc", "ready", "local"]
|
||||
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
|
||||
interval: 30s
|
||||
timeout: 20s
|
||||
retries: 3
|
||||
|
||||
@@ -520,7 +520,7 @@ services:
|
||||
- minio_data:/data
|
||||
command: server /data --console-address ":9001"
|
||||
healthcheck:
|
||||
test: ["CMD", "mc", "ready", "local"]
|
||||
test: ["CMD", "curl", "-f", "http://localhost:9000/minio/health/live"]
|
||||
interval: 30s
|
||||
timeout: 20s
|
||||
retries: 3
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{{- if gt (int .Values.celery_beat.replicaCount) 0 }}
|
||||
{{- if and .Values.vectorDB.enabled (gt (int .Values.celery_beat.replicaCount) 0) }}
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{{- if and (.Values.celery_worker_heavy.autoscaling.enabled) (ne (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
{{- if and .Values.vectorDB.enabled (.Values.celery_worker_heavy.autoscaling.enabled) (ne (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
apiVersion: autoscaling/v2
|
||||
kind: HorizontalPodAutoscaler
|
||||
metadata:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{{- if and (.Values.celery_worker_heavy.autoscaling.enabled) (eq (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
{{- if and .Values.vectorDB.enabled (.Values.celery_worker_heavy.autoscaling.enabled) (eq (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
apiVersion: keda.sh/v1alpha1
|
||||
kind: ScaledObject
|
||||
metadata:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{{- if gt (int .Values.celery_worker_heavy.replicaCount) 0 }}
|
||||
{{- if and .Values.vectorDB.enabled (gt (int .Values.celery_worker_heavy.replicaCount) 0) }}
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{{- if and (.Values.celery_worker_light.autoscaling.enabled) (ne (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
{{- if and .Values.vectorDB.enabled (.Values.celery_worker_light.autoscaling.enabled) (ne (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
apiVersion: autoscaling/v2
|
||||
kind: HorizontalPodAutoscaler
|
||||
metadata:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{{- if and (.Values.celery_worker_light.autoscaling.enabled) (eq (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
{{- if and .Values.vectorDB.enabled (.Values.celery_worker_light.autoscaling.enabled) (eq (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
apiVersion: keda.sh/v1alpha1
|
||||
kind: ScaledObject
|
||||
metadata:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{{- if gt (int .Values.celery_worker_light.replicaCount) 0 }}
|
||||
{{- if and .Values.vectorDB.enabled (gt (int .Values.celery_worker_light.replicaCount) 0) }}
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{{- if and (.Values.celery_worker_monitoring.autoscaling.enabled) (ne (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
{{- if and .Values.vectorDB.enabled (.Values.celery_worker_monitoring.autoscaling.enabled) (ne (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
apiVersion: autoscaling/v2
|
||||
kind: HorizontalPodAutoscaler
|
||||
metadata:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{{- if and (.Values.celery_worker_monitoring.autoscaling.enabled) (eq (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
{{- if and .Values.vectorDB.enabled (.Values.celery_worker_monitoring.autoscaling.enabled) (eq (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
apiVersion: keda.sh/v1alpha1
|
||||
kind: ScaledObject
|
||||
metadata:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{{- if gt (int .Values.celery_worker_monitoring.replicaCount) 0 }}
|
||||
{{- if and .Values.vectorDB.enabled (gt (int .Values.celery_worker_monitoring.replicaCount) 0) }}
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{{- if and (.Values.celery_worker_primary.autoscaling.enabled) (ne (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
{{- if and .Values.vectorDB.enabled (.Values.celery_worker_primary.autoscaling.enabled) (ne (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
apiVersion: autoscaling/v2
|
||||
kind: HorizontalPodAutoscaler
|
||||
metadata:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{{- if and (.Values.celery_worker_primary.autoscaling.enabled) (eq (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
{{- if and .Values.vectorDB.enabled (.Values.celery_worker_primary.autoscaling.enabled) (eq (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
apiVersion: keda.sh/v1alpha1
|
||||
kind: ScaledObject
|
||||
metadata:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{{- if gt (int .Values.celery_worker_primary.replicaCount) 0 }}
|
||||
{{- if and .Values.vectorDB.enabled (gt (int .Values.celery_worker_primary.replicaCount) 0) }}
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{{- if and (.Values.celery_worker_user_file_processing.autoscaling.enabled) (ne (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
{{- if and .Values.vectorDB.enabled (.Values.celery_worker_user_file_processing.autoscaling.enabled) (ne (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
apiVersion: autoscaling/v2
|
||||
kind: HorizontalPodAutoscaler
|
||||
metadata:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{{- if and (.Values.celery_worker_user_file_processing.autoscaling.enabled) (eq (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
{{- if and .Values.vectorDB.enabled (.Values.celery_worker_user_file_processing.autoscaling.enabled) (eq (include "onyx.autoscaling.engine" .) "keda") }}
|
||||
apiVersion: keda.sh/v1alpha1
|
||||
kind: ScaledObject
|
||||
metadata:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
{{- if gt (int .Values.celery_worker_user_file_processing.replicaCount) 0 }}
|
||||
{{- if and .Values.vectorDB.enabled (gt (int .Values.celery_worker_user_file_processing.replicaCount) 0) }}
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
|
||||
@@ -28,7 +28,9 @@ postgresql:
|
||||
# -- Master toggle for vector database support. When false:
|
||||
# - Sets DISABLE_VECTOR_DB=true on all backend pods
|
||||
# - Skips the indexing model server deployment (embeddings not needed)
|
||||
# - Skips docprocessing and docfetching celery workers
|
||||
# - Skips ALL celery worker deployments (beat, primary, light, heavy,
|
||||
# monitoring, user-file-processing, docprocessing, docfetching) — the
|
||||
# API server handles background work via FastAPI BackgroundTasks
|
||||
# - You should also set vespa.enabled=false and opensearch.enabled=false
|
||||
# to prevent those subcharts from deploying
|
||||
vectorDB:
|
||||
|
||||
@@ -10,7 +10,7 @@ requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"aioboto3==15.1.0",
|
||||
"cohere==5.6.1",
|
||||
"fastapi==0.133.1",
|
||||
"fastapi==0.128.0",
|
||||
"google-cloud-aiplatform==1.121.0",
|
||||
"google-genai==1.52.0",
|
||||
"litellm==1.81.6",
|
||||
|
||||
@@ -51,7 +51,6 @@ func NewRootCommand() *cobra.Command {
|
||||
cmd.AddCommand(NewRunCICommand())
|
||||
cmd.AddCommand(NewScreenshotDiffCommand())
|
||||
cmd.AddCommand(NewWebCommand())
|
||||
cmd.AddCommand(NewWhoisCommand())
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
@@ -1,159 +0,0 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
"text/tabwriter"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"github.com/onyx-dot-app/onyx/tools/ods/internal/kube"
|
||||
)
|
||||
|
||||
var safeIdentifier = regexp.MustCompile(`^[a-zA-Z0-9_\-]+$`)
|
||||
|
||||
// NewWhoisCommand creates the whois command for looking up users/tenants.
|
||||
func NewWhoisCommand() *cobra.Command {
|
||||
var ctx string
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "whois <email-fragment or tenant-id>",
|
||||
Short: "Look up users and admins by email or tenant ID",
|
||||
Long: `Look up tenant and user information from the data plane PostgreSQL database.
|
||||
|
||||
Requires: AWS SSO login, kubectl access to the EKS cluster.
|
||||
|
||||
Two modes (auto-detected):
|
||||
|
||||
Email fragment:
|
||||
ods whois chris
|
||||
→ Searches user_tenant_mapping for emails matching '%chris%'
|
||||
|
||||
Tenant ID:
|
||||
ods whois tenant_abcd1234-...
|
||||
→ Lists all admin emails in that tenant
|
||||
|
||||
Cluster connection is configured via KUBE_CTX_* environment variables.
|
||||
Each variable is a space-separated tuple: "cluster region namespace"
|
||||
|
||||
export KUBE_CTX_DATA_PLANE="<cluster> <region> <namespace>"
|
||||
export KUBE_CTX_CONTROL_PLANE="<cluster> <region> <namespace>"
|
||||
etc...
|
||||
|
||||
Use -c to select which context (default: data_plane).`,
|
||||
Args: cobra.ExactArgs(1),
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
runWhois(args[0], ctx)
|
||||
},
|
||||
}
|
||||
|
||||
cmd.Flags().StringVarP(&ctx, "context", "c", "data_plane", "cluster context name (maps to KUBE_CTX_<NAME> env var)")
|
||||
|
||||
return cmd
|
||||
}
|
||||
|
||||
func clusterFromEnv(name string) *kube.Cluster {
|
||||
envKey := "KUBE_CTX_" + strings.ToUpper(name)
|
||||
val := os.Getenv(envKey)
|
||||
if val == "" {
|
||||
log.Fatalf("Environment variable %s is not set.\n\nSet it as a space-separated tuple:\n export %s=\"<cluster> <region> <namespace>\"", envKey, envKey)
|
||||
}
|
||||
|
||||
parts := strings.Fields(val)
|
||||
if len(parts) != 3 {
|
||||
log.Fatalf("%s must be a space-separated tuple of 3 values (cluster region namespace), got: %q", envKey, val)
|
||||
}
|
||||
|
||||
return &kube.Cluster{Name: parts[0], Region: parts[1], Namespace: parts[2]}
|
||||
}
|
||||
|
||||
// queryPod runs a SQL query via pginto on the given pod and returns cleaned output lines.
|
||||
func queryPod(c *kube.Cluster, pod, sql string) []string {
|
||||
raw, err := c.ExecOnPod(pod, "pginto", "-A", "-t", "-F", "\t", "-c", sql)
|
||||
if err != nil {
|
||||
log.Fatalf("Query failed: %v", err)
|
||||
}
|
||||
|
||||
var lines []string
|
||||
for _, line := range strings.Split(strings.TrimSpace(raw), "\n") {
|
||||
line = strings.TrimSpace(line)
|
||||
if line != "" && !strings.HasPrefix(line, "Connecting to ") {
|
||||
lines = append(lines, line)
|
||||
}
|
||||
}
|
||||
return lines
|
||||
}
|
||||
|
||||
func runWhois(query string, ctx string) {
|
||||
c := clusterFromEnv(ctx)
|
||||
|
||||
if err := c.EnsureContext(); err != nil {
|
||||
log.Fatalf("Failed to ensure cluster context: %v", err)
|
||||
}
|
||||
|
||||
log.Info("Finding api-server pod...")
|
||||
pod, err := c.FindPod("api-server")
|
||||
if err != nil {
|
||||
log.Fatalf("Failed to find api-server pod: %v", err)
|
||||
}
|
||||
log.Debugf("Using pod: %s", pod)
|
||||
|
||||
if strings.HasPrefix(query, "tenant_") {
|
||||
findAdminsByTenant(c, pod, query)
|
||||
} else {
|
||||
findByEmail(c, pod, query)
|
||||
}
|
||||
}
|
||||
|
||||
func findByEmail(c *kube.Cluster, pod, fragment string) {
|
||||
fragment = strings.NewReplacer("'", "", `"`, "", `;`, "", `\`, `\\`, `%`, `\%`, `_`, `\_`).Replace(fragment)
|
||||
|
||||
sql := fmt.Sprintf(
|
||||
`SELECT email, tenant_id, active FROM public.user_tenant_mapping WHERE email LIKE '%%%s%%' ORDER BY email;`,
|
||||
fragment,
|
||||
)
|
||||
|
||||
log.Infof("Searching for emails matching '%%%s%%'...", fragment)
|
||||
lines := queryPod(c, pod, sql)
|
||||
if len(lines) == 0 {
|
||||
fmt.Println("No results found.")
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Println()
|
||||
w := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0)
|
||||
_, _ = fmt.Fprintln(w, "EMAIL\tTENANT ID\tACTIVE")
|
||||
_, _ = fmt.Fprintln(w, "-----\t---------\t------")
|
||||
for _, line := range lines {
|
||||
_, _ = fmt.Fprintln(w, line)
|
||||
}
|
||||
_ = w.Flush()
|
||||
}
|
||||
|
||||
func findAdminsByTenant(c *kube.Cluster, pod, tenantID string) {
|
||||
if !safeIdentifier.MatchString(tenantID) {
|
||||
log.Fatalf("Invalid tenant ID: %q (must be alphanumeric, hyphens, underscores only)", tenantID)
|
||||
}
|
||||
|
||||
sql := fmt.Sprintf(
|
||||
`SELECT email FROM "%s"."user" WHERE role = 'ADMIN' AND is_active = true AND email NOT LIKE 'api_key__%%' ORDER BY email;`,
|
||||
tenantID,
|
||||
)
|
||||
|
||||
log.Infof("Fetching admin emails for %s...", tenantID)
|
||||
lines := queryPod(c, pod, sql)
|
||||
if len(lines) == 0 {
|
||||
fmt.Println("No admin users found for this tenant.")
|
||||
return
|
||||
}
|
||||
|
||||
fmt.Println()
|
||||
fmt.Println("EMAIL")
|
||||
fmt.Println("-----")
|
||||
for _, line := range lines {
|
||||
fmt.Println(line)
|
||||
}
|
||||
}
|
||||
@@ -1,90 +0,0 @@
|
||||
package kube
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"os/exec"
|
||||
"strings"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
// Cluster holds the connection info for a Kubernetes cluster.
|
||||
type Cluster struct {
|
||||
Name string
|
||||
Region string
|
||||
Namespace string
|
||||
}
|
||||
|
||||
// EnsureContext makes sure the cluster exists in kubeconfig, calling
|
||||
// aws eks update-kubeconfig only if the context is missing.
|
||||
func (c *Cluster) EnsureContext() error {
|
||||
// Check if context already exists in kubeconfig
|
||||
cmd := exec.Command("kubectl", "config", "get-contexts", c.Name, "--no-headers")
|
||||
if err := cmd.Run(); err == nil {
|
||||
log.Debugf("Context %s already exists, skipping aws eks update-kubeconfig", c.Name)
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Infof("Context %s not found, fetching kubeconfig from AWS...", c.Name)
|
||||
cmd = exec.Command("aws", "eks", "update-kubeconfig", "--region", c.Region, "--name", c.Name, "--alias", c.Name)
|
||||
if out, err := cmd.CombinedOutput(); err != nil {
|
||||
return fmt.Errorf("aws eks update-kubeconfig failed: %w\n%s", err, string(out))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// kubectlArgs returns common kubectl flags to target this cluster without mutating global context.
|
||||
func (c *Cluster) kubectlArgs() []string {
|
||||
return []string{"--context", c.Name, "--namespace", c.Namespace}
|
||||
}
|
||||
|
||||
// FindPod returns the name of the first Running/Ready pod matching the given substring.
|
||||
func (c *Cluster) FindPod(substring string) (string, error) {
|
||||
args := append(c.kubectlArgs(), "get", "po",
|
||||
"--field-selector", "status.phase=Running",
|
||||
"--no-headers",
|
||||
"-o", "custom-columns=NAME:.metadata.name,READY:.status.conditions[?(@.type=='Ready')].status",
|
||||
)
|
||||
cmd := exec.Command("kubectl", args...)
|
||||
out, err := cmd.Output()
|
||||
if err != nil {
|
||||
if exitErr, ok := err.(*exec.ExitError); ok {
|
||||
return "", fmt.Errorf("kubectl get po failed: %w\n%s", err, string(exitErr.Stderr))
|
||||
}
|
||||
return "", fmt.Errorf("kubectl get po failed: %w", err)
|
||||
}
|
||||
|
||||
for _, line := range strings.Split(strings.TrimSpace(string(out)), "\n") {
|
||||
fields := strings.Fields(line)
|
||||
if len(fields) < 2 {
|
||||
continue
|
||||
}
|
||||
name, ready := fields[0], fields[1]
|
||||
if strings.Contains(name, substring) && ready == "True" {
|
||||
log.Debugf("Found pod: %s", name)
|
||||
return name, nil
|
||||
}
|
||||
}
|
||||
|
||||
return "", fmt.Errorf("no ready pod found matching %q", substring)
|
||||
}
|
||||
|
||||
// ExecOnPod runs a command on a pod and returns its stdout.
|
||||
func (c *Cluster) ExecOnPod(pod string, command ...string) (string, error) {
|
||||
args := append(c.kubectlArgs(), "exec", pod, "--")
|
||||
args = append(args, command...)
|
||||
log.Debugf("Running: kubectl %s", strings.Join(args, " "))
|
||||
|
||||
cmd := exec.Command("kubectl", args...)
|
||||
var stdout, stderr bytes.Buffer
|
||||
cmd.Stdout = &stdout
|
||||
cmd.Stderr = &stderr
|
||||
|
||||
if err := cmd.Run(); err != nil {
|
||||
return "", fmt.Errorf("kubectl exec failed: %w\n%s", err, stderr.String())
|
||||
}
|
||||
|
||||
return stdout.String(), nil
|
||||
}
|
||||
15
uv.lock
generated
15
uv.lock
generated
@@ -1688,18 +1688,17 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "fastapi"
|
||||
version = "0.133.1"
|
||||
version = "0.128.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "annotated-doc" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "starlette" },
|
||||
{ name = "typing-extensions" },
|
||||
{ name = "typing-inspection" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/22/6f/0eafed8349eea1fa462238b54a624c8b408cd1ba2795c8e64aa6c34f8ab7/fastapi-0.133.1.tar.gz", hash = "sha256:ed152a45912f102592976fde6cbce7dae1a8a1053da94202e51dd35d184fadd6", size = 378741, upload-time = "2026-02-25T18:18:17.398Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/52/08/8c8508db6c7b9aae8f7175046af41baad690771c9bcde676419965e338c7/fastapi-0.128.0.tar.gz", hash = "sha256:1cc179e1cef10a6be60ffe429f79b829dce99d8de32d7acb7e6c8dfdf7f2645a", size = 365682, upload-time = "2025-12-27T15:21:13.714Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/d2/c9/a175a7779f3599dfa4adfc97a6ce0e157237b3d7941538604aadaf97bfb6/fastapi-0.133.1-py3-none-any.whl", hash = "sha256:658f34ba334605b1617a65adf2ea6461901bdb9af3a3080d63ff791ecf7dc2e2", size = 109029, upload-time = "2026-02-25T18:18:18.578Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/5c/05/5cbb59154b093548acd0f4c7c474a118eda06da25aa75c616b72d8fcd92a/fastapi-0.128.0-py3-none-any.whl", hash = "sha256:aebd93f9716ee3b4f4fcfe13ffb7cf308d99c9f3ab5622d8877441072561582d", size = 103094, upload-time = "2025-12-27T15:21:12.154Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4613,7 +4612,7 @@ requires-dist = [
|
||||
{ name = "einops", marker = "extra == 'model-server'", specifier = "==0.8.1" },
|
||||
{ name = "exa-py", marker = "extra == 'backend'", specifier = "==1.15.4" },
|
||||
{ name = "faker", marker = "extra == 'dev'", specifier = "==40.1.2" },
|
||||
{ name = "fastapi", specifier = "==0.133.1" },
|
||||
{ name = "fastapi", specifier = "==0.128.0" },
|
||||
{ name = "fastapi-limiter", marker = "extra == 'backend'", specifier = "==0.1.6" },
|
||||
{ name = "fastapi-users", marker = "extra == 'backend'", specifier = "==15.0.4" },
|
||||
{ name = "fastapi-users-db-sqlalchemy", marker = "extra == 'backend'", specifier = "==7.0.0" },
|
||||
@@ -8080,14 +8079,14 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "werkzeug"
|
||||
version = "3.1.6"
|
||||
version = "3.1.5"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "markupsafe" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/61/f1/ee81806690a87dab5f5653c1f146c92bc066d7f4cebc603ef88eb9e13957/werkzeug-3.1.6.tar.gz", hash = "sha256:210c6bede5a420a913956b4791a7f4d6843a43b6fcee4dfa08a65e93007d0d25", size = 864736, upload-time = "2026-02-19T15:17:18.884Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/5a/70/1469ef1d3542ae7c2c7b72bd5e3a4e6ee69d7978fa8a3af05a38eca5becf/werkzeug-3.1.5.tar.gz", hash = "sha256:6a548b0e88955dd07ccb25539d7d0cc97417ee9e179677d22c7041c8f078ce67", size = 864754, upload-time = "2026-01-08T17:49:23.247Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/4d/ec/d58832f89ede95652fd01f4f24236af7d32b70cab2196dfcc2d2fd13c5c2/werkzeug-3.1.6-py3-none-any.whl", hash = "sha256:7ddf3357bb9564e407607f988f683d72038551200c704012bb9a4c523d42f131", size = 225166, upload-time = "2026-02-19T15:17:17.475Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ad/e4/8d97cca767bcc1be76d16fb76951608305561c6e056811587f36cb1316a8/werkzeug-3.1.5-py3-none-any.whl", hash = "sha256:5111e36e91086ece91f93268bb39b4a35c1e6f1feac762c9c822ded0a4e322dc", size = 225025, upload-time = "2026-01-08T17:49:21.859Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
@@ -42,7 +42,7 @@ import SvgStar from "@opal/icons/star";
|
||||
|
||||
## Usage inside Content
|
||||
|
||||
Tag can be rendered as an accessory inside `Content`'s ContentMd via the `tag` prop:
|
||||
Tag can be rendered as an accessory inside `Content`'s LabelLayout via the `tag` prop:
|
||||
|
||||
```tsx
|
||||
import { Content } from "@opal/layouts";
|
||||
|
||||
@@ -1,7 +1,11 @@
|
||||
import "@opal/components/buttons/Button/styles.css";
|
||||
import "@opal/components/tooltip.css";
|
||||
import { Interactive, type InteractiveBaseProps } from "@opal/core";
|
||||
import type { SizeVariant, WidthVariant } from "@opal/shared";
|
||||
import {
|
||||
Interactive,
|
||||
type InteractiveBaseProps,
|
||||
type InteractiveContainerWidthVariant,
|
||||
} from "@opal/core";
|
||||
import type { SizeVariant } from "@opal/shared";
|
||||
import type { TooltipSide } from "@opal/components";
|
||||
import type { IconFunctionComponent } from "@opal/types";
|
||||
import * as TooltipPrimitive from "@radix-ui/react-tooltip";
|
||||
@@ -87,7 +91,7 @@ type ButtonProps = InteractiveBaseProps &
|
||||
tooltip?: string;
|
||||
|
||||
/** Width preset. `"auto"` shrink-wraps, `"full"` stretches to parent width. */
|
||||
width?: WidthVariant;
|
||||
width?: InteractiveContainerWidthVariant;
|
||||
|
||||
/** Which side the tooltip appears on. */
|
||||
tooltipSide?: TooltipSide;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
/* Hoverable — item transitions */
|
||||
.hoverable-item {
|
||||
transition: opacity 150ms ease-in-out;
|
||||
transition: opacity 200ms ease-in-out;
|
||||
}
|
||||
|
||||
.hoverable-item[data-hoverable-variant="opacity-on-hover"] {
|
||||
|
||||
@@ -12,5 +12,6 @@ export {
|
||||
type InteractiveBaseProps,
|
||||
type InteractiveBaseVariantProps,
|
||||
type InteractiveContainerProps,
|
||||
type InteractiveContainerWidthVariant,
|
||||
type InteractiveContainerRoundingVariant,
|
||||
} from "@opal/core/interactive/components";
|
||||
|
||||
@@ -3,12 +3,7 @@ import React from "react";
|
||||
import { Slot } from "@radix-ui/react-slot";
|
||||
import { cn } from "@opal/utils";
|
||||
import type { WithoutStyles } from "@opal/types";
|
||||
import {
|
||||
sizeVariants,
|
||||
type SizeVariant,
|
||||
widthVariants,
|
||||
type WidthVariant,
|
||||
} from "@opal/shared";
|
||||
import { sizeVariants, type SizeVariant } from "@opal/shared";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
@@ -44,6 +39,18 @@ type InteractiveBaseVariantProps =
|
||||
selected?: never;
|
||||
};
|
||||
|
||||
/**
|
||||
* Width presets for `Interactive.Container`.
|
||||
*
|
||||
* - `"auto"` — Shrink-wraps to content width (default)
|
||||
* - `"full"` — Stretches to fill the parent's width (`w-full`)
|
||||
*/
|
||||
type InteractiveContainerWidthVariant = "auto" | "full";
|
||||
const interactiveContainerWidthVariants = {
|
||||
auto: "w-auto",
|
||||
full: "w-full",
|
||||
} as const;
|
||||
|
||||
/**
|
||||
* Border-radius presets for `Interactive.Container`.
|
||||
*
|
||||
@@ -338,7 +345,7 @@ interface InteractiveContainerProps
|
||||
*
|
||||
* @default "auto"
|
||||
*/
|
||||
widthVariant?: WidthVariant;
|
||||
widthVariant?: InteractiveContainerWidthVariant;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -406,7 +413,7 @@ function InteractiveContainer({
|
||||
height,
|
||||
minWidth,
|
||||
padding,
|
||||
widthVariants[widthVariant],
|
||||
interactiveContainerWidthVariants[widthVariant],
|
||||
slotClassName
|
||||
),
|
||||
"data-border": border ? ("true" as const) : undefined,
|
||||
@@ -483,5 +490,6 @@ export {
|
||||
type InteractiveBaseVariantProps,
|
||||
type InteractiveBaseSelectVariantProps,
|
||||
type InteractiveContainerProps,
|
||||
type InteractiveContainerWidthVariant,
|
||||
type InteractiveContainerRoundingVariant,
|
||||
};
|
||||
|
||||
@@ -1,200 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { Button } from "@opal/components/buttons/Button/components";
|
||||
import type { SizeVariant } from "@opal/shared";
|
||||
import SvgEdit from "@opal/icons/edit";
|
||||
import type { IconFunctionComponent } from "@opal/types";
|
||||
import { cn } from "@opal/utils";
|
||||
import { useRef, useState } from "react";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type ContentLgSizePreset = "headline" | "section";
|
||||
|
||||
interface ContentLgPresetConfig {
|
||||
/** Icon width/height (CSS value). */
|
||||
iconSize: string;
|
||||
/** Tailwind padding class for the icon container. */
|
||||
iconContainerPadding: string;
|
||||
/** Gap between icon container and content (CSS value). */
|
||||
gap: string;
|
||||
/** Tailwind font class for the title. */
|
||||
titleFont: string;
|
||||
/** Title line-height — also used as icon container min-height (CSS value). */
|
||||
lineHeight: string;
|
||||
/** Button `size` prop for the edit button. Uses the shared `SizeVariant` scale. */
|
||||
editButtonSize: SizeVariant;
|
||||
/** Tailwind padding class for the edit button container. */
|
||||
editButtonPadding: string;
|
||||
}
|
||||
|
||||
interface ContentLgProps {
|
||||
/** Optional icon component. */
|
||||
icon?: IconFunctionComponent;
|
||||
|
||||
/** Main title text. */
|
||||
title: string;
|
||||
|
||||
/** Optional description below the title. */
|
||||
description?: string;
|
||||
|
||||
/** Enable inline editing of the title. */
|
||||
editable?: boolean;
|
||||
|
||||
/** Called when the user commits an edit. */
|
||||
onTitleChange?: (newTitle: string) => void;
|
||||
|
||||
/** Size preset. Default: `"headline"`. */
|
||||
sizePreset?: ContentLgSizePreset;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Presets
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const CONTENT_LG_PRESETS: Record<ContentLgSizePreset, ContentLgPresetConfig> = {
|
||||
headline: {
|
||||
iconSize: "2rem",
|
||||
iconContainerPadding: "p-0.5",
|
||||
gap: "0.25rem",
|
||||
titleFont: "font-heading-h2",
|
||||
lineHeight: "2.25rem",
|
||||
editButtonSize: "md",
|
||||
editButtonPadding: "p-1",
|
||||
},
|
||||
section: {
|
||||
iconSize: "1.25rem",
|
||||
iconContainerPadding: "p-1",
|
||||
gap: "0rem",
|
||||
titleFont: "font-heading-h3-muted",
|
||||
lineHeight: "1.75rem",
|
||||
editButtonSize: "sm",
|
||||
editButtonPadding: "p-0.5",
|
||||
},
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// ContentLg
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function ContentLg({
|
||||
sizePreset = "headline",
|
||||
icon: Icon,
|
||||
title,
|
||||
description,
|
||||
editable,
|
||||
onTitleChange,
|
||||
}: ContentLgProps) {
|
||||
const [editing, setEditing] = useState(false);
|
||||
const [editValue, setEditValue] = useState(title);
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
|
||||
const config = CONTENT_LG_PRESETS[sizePreset];
|
||||
|
||||
function startEditing() {
|
||||
setEditValue(title);
|
||||
setEditing(true);
|
||||
}
|
||||
|
||||
function commit() {
|
||||
const value = editValue.trim();
|
||||
if (value && value !== title) onTitleChange?.(value);
|
||||
setEditing(false);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="opal-content-lg" style={{ gap: config.gap }}>
|
||||
{Icon && (
|
||||
<div
|
||||
className={cn(
|
||||
"opal-content-lg-icon-container shrink-0",
|
||||
config.iconContainerPadding
|
||||
)}
|
||||
style={{ minHeight: config.lineHeight }}
|
||||
>
|
||||
<Icon
|
||||
className="opal-content-lg-icon"
|
||||
style={{ width: config.iconSize, height: config.iconSize }}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="opal-content-lg-body">
|
||||
<div className="opal-content-lg-title-row">
|
||||
{editing ? (
|
||||
<div className="opal-content-lg-input-sizer">
|
||||
<span
|
||||
className={cn("opal-content-lg-input-mirror", config.titleFont)}
|
||||
>
|
||||
{editValue || "\u00A0"}
|
||||
</span>
|
||||
<input
|
||||
ref={inputRef}
|
||||
className={cn(
|
||||
"opal-content-lg-input",
|
||||
config.titleFont,
|
||||
"text-text-04"
|
||||
)}
|
||||
value={editValue}
|
||||
onChange={(e) => setEditValue(e.target.value)}
|
||||
size={1}
|
||||
autoFocus
|
||||
onFocus={(e) => e.currentTarget.select()}
|
||||
onBlur={commit}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === "Enter") commit();
|
||||
if (e.key === "Escape") {
|
||||
setEditValue(title);
|
||||
setEditing(false);
|
||||
}
|
||||
}}
|
||||
style={{ height: config.lineHeight }}
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<span
|
||||
className={cn(
|
||||
"opal-content-lg-title",
|
||||
config.titleFont,
|
||||
"text-text-04",
|
||||
editable && "cursor-pointer"
|
||||
)}
|
||||
onClick={editable ? startEditing : undefined}
|
||||
style={{ height: config.lineHeight }}
|
||||
>
|
||||
{title}
|
||||
</span>
|
||||
)}
|
||||
|
||||
{editable && !editing && (
|
||||
<div
|
||||
className={cn(
|
||||
"opal-content-lg-edit-button",
|
||||
config.editButtonPadding
|
||||
)}
|
||||
>
|
||||
<Button
|
||||
icon={SvgEdit}
|
||||
prominence="internal"
|
||||
size={config.editButtonSize}
|
||||
tooltip="Edit"
|
||||
tooltipSide="right"
|
||||
onClick={startEditing}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{description && (
|
||||
<div className="opal-content-lg-description font-secondary-body text-text-03">
|
||||
{description}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export { ContentLg, type ContentLgProps, type ContentLgSizePreset };
|
||||
@@ -1,279 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { Button } from "@opal/components/buttons/Button/components";
|
||||
import { Tag, type TagProps } from "@opal/components/Tag/components";
|
||||
import type { SizeVariant } from "@opal/shared";
|
||||
import SvgAlertCircle from "@opal/icons/alert-circle";
|
||||
import SvgAlertTriangle from "@opal/icons/alert-triangle";
|
||||
import SvgEdit from "@opal/icons/edit";
|
||||
import SvgXOctagon from "@opal/icons/x-octagon";
|
||||
import type { IconFunctionComponent } from "@opal/types";
|
||||
import { cn } from "@opal/utils";
|
||||
import { useRef, useState } from "react";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type ContentMdSizePreset = "main-content" | "main-ui" | "secondary";
|
||||
|
||||
type ContentMdAuxIcon = "info-gray" | "info-blue" | "warning" | "error";
|
||||
|
||||
interface ContentMdPresetConfig {
|
||||
iconSize: string;
|
||||
iconContainerPadding: string;
|
||||
iconColorClass: string;
|
||||
titleFont: string;
|
||||
lineHeight: string;
|
||||
gap: string;
|
||||
/** Button `size` prop for the edit button. Uses the shared `SizeVariant` scale. */
|
||||
editButtonSize: SizeVariant;
|
||||
editButtonPadding: string;
|
||||
optionalFont: string;
|
||||
/** Aux icon size = lineHeight − 2 × p-0.5. */
|
||||
auxIconSize: string;
|
||||
}
|
||||
|
||||
interface ContentMdProps {
|
||||
/** Optional icon component. */
|
||||
icon?: IconFunctionComponent;
|
||||
|
||||
/** Main title text. */
|
||||
title: string;
|
||||
|
||||
/** Optional description text below the title. */
|
||||
description?: string;
|
||||
|
||||
/** Enable inline editing of the title. */
|
||||
editable?: boolean;
|
||||
|
||||
/** Called when the user commits an edit. */
|
||||
onTitleChange?: (newTitle: string) => void;
|
||||
|
||||
/** When `true`, renders "(Optional)" beside the title. */
|
||||
optional?: boolean;
|
||||
|
||||
/** Auxiliary status icon rendered beside the title. */
|
||||
auxIcon?: ContentMdAuxIcon;
|
||||
|
||||
/** Tag rendered beside the title. */
|
||||
tag?: TagProps;
|
||||
|
||||
/** Size preset. Default: `"main-ui"`. */
|
||||
sizePreset?: ContentMdSizePreset;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Presets
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const CONTENT_MD_PRESETS: Record<ContentMdSizePreset, ContentMdPresetConfig> = {
|
||||
"main-content": {
|
||||
iconSize: "1rem",
|
||||
iconContainerPadding: "p-1",
|
||||
iconColorClass: "text-text-04",
|
||||
titleFont: "font-main-content-emphasis",
|
||||
lineHeight: "1.5rem",
|
||||
gap: "0.125rem",
|
||||
editButtonSize: "sm",
|
||||
editButtonPadding: "p-0",
|
||||
optionalFont: "font-main-content-muted",
|
||||
auxIconSize: "1.25rem",
|
||||
},
|
||||
"main-ui": {
|
||||
iconSize: "1rem",
|
||||
iconContainerPadding: "p-0.5",
|
||||
iconColorClass: "text-text-03",
|
||||
titleFont: "font-main-ui-action",
|
||||
lineHeight: "1.25rem",
|
||||
gap: "0.25rem",
|
||||
editButtonSize: "xs",
|
||||
editButtonPadding: "p-0",
|
||||
optionalFont: "font-main-ui-muted",
|
||||
auxIconSize: "1rem",
|
||||
},
|
||||
secondary: {
|
||||
iconSize: "0.75rem",
|
||||
iconContainerPadding: "p-0.5",
|
||||
iconColorClass: "text-text-04",
|
||||
titleFont: "font-secondary-action",
|
||||
lineHeight: "1rem",
|
||||
gap: "0.125rem",
|
||||
editButtonSize: "2xs",
|
||||
editButtonPadding: "p-0",
|
||||
optionalFont: "font-secondary-action",
|
||||
auxIconSize: "0.75rem",
|
||||
},
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// ContentMd
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const AUX_ICON_CONFIG: Record<
|
||||
ContentMdAuxIcon,
|
||||
{ icon: IconFunctionComponent; colorClass: string }
|
||||
> = {
|
||||
"info-gray": { icon: SvgAlertCircle, colorClass: "text-text-02" },
|
||||
"info-blue": { icon: SvgAlertCircle, colorClass: "text-status-info-05" },
|
||||
warning: { icon: SvgAlertTriangle, colorClass: "text-status-warning-05" },
|
||||
error: { icon: SvgXOctagon, colorClass: "text-status-error-05" },
|
||||
};
|
||||
|
||||
function ContentMd({
|
||||
icon: Icon,
|
||||
title,
|
||||
description,
|
||||
editable,
|
||||
onTitleChange,
|
||||
optional,
|
||||
auxIcon,
|
||||
tag,
|
||||
sizePreset = "main-ui",
|
||||
}: ContentMdProps) {
|
||||
const [editing, setEditing] = useState(false);
|
||||
const [editValue, setEditValue] = useState(title);
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
|
||||
const config = CONTENT_MD_PRESETS[sizePreset];
|
||||
|
||||
function startEditing() {
|
||||
setEditValue(title);
|
||||
setEditing(true);
|
||||
}
|
||||
|
||||
function commit() {
|
||||
const value = editValue.trim();
|
||||
if (value && value !== title) onTitleChange?.(value);
|
||||
setEditing(false);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="opal-content-md" style={{ gap: config.gap }}>
|
||||
{Icon && (
|
||||
<div
|
||||
className={cn(
|
||||
"opal-content-md-icon-container shrink-0",
|
||||
config.iconContainerPadding
|
||||
)}
|
||||
style={{ minHeight: config.lineHeight }}
|
||||
>
|
||||
<Icon
|
||||
className={cn("opal-content-md-icon", config.iconColorClass)}
|
||||
style={{ width: config.iconSize, height: config.iconSize }}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="opal-content-md-body">
|
||||
<div className="opal-content-md-title-row">
|
||||
{editing ? (
|
||||
<div className="opal-content-md-input-sizer">
|
||||
<span
|
||||
className={cn("opal-content-md-input-mirror", config.titleFont)}
|
||||
>
|
||||
{editValue || "\u00A0"}
|
||||
</span>
|
||||
<input
|
||||
ref={inputRef}
|
||||
className={cn(
|
||||
"opal-content-md-input",
|
||||
config.titleFont,
|
||||
"text-text-04"
|
||||
)}
|
||||
value={editValue}
|
||||
onChange={(e) => setEditValue(e.target.value)}
|
||||
size={1}
|
||||
autoFocus
|
||||
onFocus={(e) => e.currentTarget.select()}
|
||||
onBlur={commit}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === "Enter") commit();
|
||||
if (e.key === "Escape") {
|
||||
setEditValue(title);
|
||||
setEditing(false);
|
||||
}
|
||||
}}
|
||||
style={{ height: config.lineHeight }}
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<span
|
||||
className={cn(
|
||||
"opal-content-md-title",
|
||||
config.titleFont,
|
||||
"text-text-04",
|
||||
editable && "cursor-pointer"
|
||||
)}
|
||||
onClick={editable ? startEditing : undefined}
|
||||
style={{ height: config.lineHeight }}
|
||||
>
|
||||
{title}
|
||||
</span>
|
||||
)}
|
||||
|
||||
{optional && (
|
||||
<span
|
||||
className={cn(config.optionalFont, "text-text-03 shrink-0")}
|
||||
style={{ height: config.lineHeight }}
|
||||
>
|
||||
(Optional)
|
||||
</span>
|
||||
)}
|
||||
|
||||
{auxIcon &&
|
||||
(() => {
|
||||
const { icon: AuxIcon, colorClass } = AUX_ICON_CONFIG[auxIcon];
|
||||
return (
|
||||
<div
|
||||
className="opal-content-md-aux-icon shrink-0 p-0.5"
|
||||
style={{ height: config.lineHeight }}
|
||||
>
|
||||
<AuxIcon
|
||||
className={colorClass}
|
||||
style={{
|
||||
width: config.auxIconSize,
|
||||
height: config.auxIconSize,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
})()}
|
||||
|
||||
{tag && <Tag {...tag} />}
|
||||
|
||||
{editable && !editing && (
|
||||
<div
|
||||
className={cn(
|
||||
"opal-content-md-edit-button",
|
||||
config.editButtonPadding
|
||||
)}
|
||||
>
|
||||
<Button
|
||||
icon={SvgEdit}
|
||||
prominence="internal"
|
||||
size={config.editButtonSize}
|
||||
tooltip="Edit"
|
||||
tooltipSide="right"
|
||||
onClick={startEditing}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{description && (
|
||||
<div className="opal-content-md-description font-secondary-body text-text-03">
|
||||
{description}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export {
|
||||
ContentMd,
|
||||
type ContentMdProps,
|
||||
type ContentMdSizePreset,
|
||||
type ContentMdAuxIcon,
|
||||
};
|
||||
@@ -1,129 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import type { IconFunctionComponent } from "@opal/types";
|
||||
import { cn } from "@opal/utils";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type ContentSmSizePreset = "main-content" | "main-ui" | "secondary";
|
||||
type ContentSmOrientation = "vertical" | "inline" | "reverse";
|
||||
type ContentSmProminence = "default" | "muted";
|
||||
|
||||
interface ContentSmPresetConfig {
|
||||
/** Icon width/height (CSS value). */
|
||||
iconSize: string;
|
||||
/** Tailwind padding class for the icon container. */
|
||||
iconContainerPadding: string;
|
||||
/** Tailwind font class for the title. */
|
||||
titleFont: string;
|
||||
/** Title line-height — also used as icon container min-height (CSS value). */
|
||||
lineHeight: string;
|
||||
/** Gap between icon container and title (CSS value). */
|
||||
gap: string;
|
||||
}
|
||||
|
||||
/** Props for {@link ContentSm}. Does not support editing or descriptions. */
|
||||
interface ContentSmProps {
|
||||
/** Optional icon component. */
|
||||
icon?: IconFunctionComponent;
|
||||
|
||||
/** Main title text (read-only — editing is not supported). */
|
||||
title: string;
|
||||
|
||||
/** Size preset. Default: `"main-ui"`. */
|
||||
sizePreset?: ContentSmSizePreset;
|
||||
|
||||
/** Layout orientation. Default: `"inline"`. */
|
||||
orientation?: ContentSmOrientation;
|
||||
|
||||
/** Title prominence. Default: `"default"`. */
|
||||
prominence?: ContentSmProminence;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Presets
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const CONTENT_SM_PRESETS: Record<ContentSmSizePreset, ContentSmPresetConfig> = {
|
||||
"main-content": {
|
||||
iconSize: "1rem",
|
||||
iconContainerPadding: "p-1",
|
||||
titleFont: "font-main-content-body",
|
||||
lineHeight: "1.5rem",
|
||||
gap: "0.125rem",
|
||||
},
|
||||
"main-ui": {
|
||||
iconSize: "1rem",
|
||||
iconContainerPadding: "p-0.5",
|
||||
titleFont: "font-main-ui-action",
|
||||
lineHeight: "1.25rem",
|
||||
gap: "0.25rem",
|
||||
},
|
||||
secondary: {
|
||||
iconSize: "0.75rem",
|
||||
iconContainerPadding: "p-0.5",
|
||||
titleFont: "font-secondary-action",
|
||||
lineHeight: "1rem",
|
||||
gap: "0.125rem",
|
||||
},
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// ContentSm
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function ContentSm({
|
||||
icon: Icon,
|
||||
title,
|
||||
sizePreset = "main-ui",
|
||||
orientation = "inline",
|
||||
prominence = "default",
|
||||
}: ContentSmProps) {
|
||||
const config = CONTENT_SM_PRESETS[sizePreset];
|
||||
const titleColorClass =
|
||||
prominence === "muted" ? "text-text-03" : "text-text-04";
|
||||
|
||||
return (
|
||||
<div
|
||||
className="opal-content-sm"
|
||||
data-orientation={orientation}
|
||||
style={{ gap: config.gap }}
|
||||
>
|
||||
{Icon && (
|
||||
<div
|
||||
className={cn(
|
||||
"opal-content-sm-icon-container shrink-0",
|
||||
config.iconContainerPadding
|
||||
)}
|
||||
style={{ minHeight: config.lineHeight }}
|
||||
>
|
||||
<Icon
|
||||
className="opal-content-sm-icon text-text-03"
|
||||
style={{ width: config.iconSize, height: config.iconSize }}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<span
|
||||
className={cn(
|
||||
"opal-content-sm-title",
|
||||
config.titleFont,
|
||||
titleColorClass
|
||||
)}
|
||||
style={{ height: config.lineHeight }}
|
||||
>
|
||||
{title}
|
||||
</span>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export {
|
||||
ContentSm,
|
||||
type ContentSmProps,
|
||||
type ContentSmSizePreset,
|
||||
type ContentSmOrientation,
|
||||
type ContentSmProminence,
|
||||
};
|
||||
@@ -1,258 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { Button } from "@opal/components/buttons/Button/components";
|
||||
import type { SizeVariant } from "@opal/shared";
|
||||
import SvgEdit from "@opal/icons/edit";
|
||||
import type { IconFunctionComponent } from "@opal/types";
|
||||
import { cn } from "@opal/utils";
|
||||
import { useRef, useState } from "react";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type ContentXlSizePreset = "headline" | "section";
|
||||
|
||||
interface ContentXlPresetConfig {
|
||||
/** Icon width/height (CSS value). */
|
||||
iconSize: string;
|
||||
/** Tailwind padding class for the icon container. */
|
||||
iconContainerPadding: string;
|
||||
/** More-icon-1 width/height (CSS value). */
|
||||
moreIcon1Size: string;
|
||||
/** Tailwind padding class for the more-icon-1 container. */
|
||||
moreIcon1ContainerPadding: string;
|
||||
/** More-icon-2 width/height (CSS value). */
|
||||
moreIcon2Size: string;
|
||||
/** Tailwind padding class for the more-icon-2 container. */
|
||||
moreIcon2ContainerPadding: string;
|
||||
/** Tailwind font class for the title. */
|
||||
titleFont: string;
|
||||
/** Title line-height — also used as icon container min-height (CSS value). */
|
||||
lineHeight: string;
|
||||
/** Button `size` prop for the edit button. Uses the shared `SizeVariant` scale. */
|
||||
editButtonSize: SizeVariant;
|
||||
/** Tailwind padding class for the edit button container. */
|
||||
editButtonPadding: string;
|
||||
}
|
||||
|
||||
interface ContentXlProps {
|
||||
/** Optional icon component. */
|
||||
icon?: IconFunctionComponent;
|
||||
|
||||
/** Main title text. */
|
||||
title: string;
|
||||
|
||||
/** Optional description below the title. */
|
||||
description?: string;
|
||||
|
||||
/** Enable inline editing of the title. */
|
||||
editable?: boolean;
|
||||
|
||||
/** Called when the user commits an edit. */
|
||||
onTitleChange?: (newTitle: string) => void;
|
||||
|
||||
/** Size preset. Default: `"headline"`. */
|
||||
sizePreset?: ContentXlSizePreset;
|
||||
|
||||
/** Optional secondary icon rendered in the icon row. */
|
||||
moreIcon1?: IconFunctionComponent;
|
||||
|
||||
/** Optional tertiary icon rendered in the icon row. */
|
||||
moreIcon2?: IconFunctionComponent;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Presets
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
const CONTENT_XL_PRESETS: Record<ContentXlSizePreset, ContentXlPresetConfig> = {
|
||||
headline: {
|
||||
iconSize: "2rem",
|
||||
iconContainerPadding: "p-0.5",
|
||||
moreIcon1Size: "1rem",
|
||||
moreIcon1ContainerPadding: "p-0.5",
|
||||
moreIcon2Size: "2rem",
|
||||
moreIcon2ContainerPadding: "p-0.5",
|
||||
titleFont: "font-heading-h2",
|
||||
lineHeight: "2.25rem",
|
||||
editButtonSize: "md",
|
||||
editButtonPadding: "p-1",
|
||||
},
|
||||
section: {
|
||||
iconSize: "1.5rem",
|
||||
iconContainerPadding: "p-0.5",
|
||||
moreIcon1Size: "0.75rem",
|
||||
moreIcon1ContainerPadding: "p-0.5",
|
||||
moreIcon2Size: "1.5rem",
|
||||
moreIcon2ContainerPadding: "p-0.5",
|
||||
titleFont: "font-heading-h3",
|
||||
lineHeight: "1.75rem",
|
||||
editButtonSize: "sm",
|
||||
editButtonPadding: "p-0.5",
|
||||
},
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// ContentXl
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function ContentXl({
|
||||
sizePreset = "headline",
|
||||
icon: Icon,
|
||||
title,
|
||||
description,
|
||||
editable,
|
||||
onTitleChange,
|
||||
moreIcon1: MoreIcon1,
|
||||
moreIcon2: MoreIcon2,
|
||||
}: ContentXlProps) {
|
||||
const [editing, setEditing] = useState(false);
|
||||
const [editValue, setEditValue] = useState(title);
|
||||
const inputRef = useRef<HTMLInputElement>(null);
|
||||
|
||||
const config = CONTENT_XL_PRESETS[sizePreset];
|
||||
|
||||
function startEditing() {
|
||||
setEditValue(title);
|
||||
setEditing(true);
|
||||
}
|
||||
|
||||
function commit() {
|
||||
const value = editValue.trim();
|
||||
if (value && value !== title) onTitleChange?.(value);
|
||||
setEditing(false);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="opal-content-xl">
|
||||
{(Icon || MoreIcon1 || MoreIcon2) && (
|
||||
<div className="opal-content-xl-icon-row">
|
||||
{Icon && (
|
||||
<div
|
||||
className={cn(
|
||||
"opal-content-xl-icon-container shrink-0",
|
||||
config.iconContainerPadding
|
||||
)}
|
||||
style={{ minHeight: config.lineHeight }}
|
||||
>
|
||||
<Icon
|
||||
className="opal-content-xl-icon"
|
||||
style={{ width: config.iconSize, height: config.iconSize }}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{MoreIcon1 && (
|
||||
<div
|
||||
className={cn(
|
||||
"opal-content-xl-more-icon-container shrink-0",
|
||||
config.moreIcon1ContainerPadding
|
||||
)}
|
||||
>
|
||||
<MoreIcon1
|
||||
className="opal-content-xl-icon"
|
||||
style={{
|
||||
width: config.moreIcon1Size,
|
||||
height: config.moreIcon1Size,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{MoreIcon2 && (
|
||||
<div
|
||||
className={cn(
|
||||
"opal-content-xl-more-icon-container shrink-0",
|
||||
config.moreIcon2ContainerPadding
|
||||
)}
|
||||
>
|
||||
<MoreIcon2
|
||||
className="opal-content-xl-icon"
|
||||
style={{
|
||||
width: config.moreIcon2Size,
|
||||
height: config.moreIcon2Size,
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="opal-content-xl-body">
|
||||
<div className="opal-content-xl-title-row">
|
||||
{editing ? (
|
||||
<div className="opal-content-xl-input-sizer">
|
||||
<span
|
||||
className={cn("opal-content-xl-input-mirror", config.titleFont)}
|
||||
>
|
||||
{editValue || "\u00A0"}
|
||||
</span>
|
||||
<input
|
||||
ref={inputRef}
|
||||
className={cn(
|
||||
"opal-content-xl-input",
|
||||
config.titleFont,
|
||||
"text-text-04"
|
||||
)}
|
||||
value={editValue}
|
||||
onChange={(e) => setEditValue(e.target.value)}
|
||||
size={1}
|
||||
autoFocus
|
||||
onFocus={(e) => e.currentTarget.select()}
|
||||
onBlur={commit}
|
||||
onKeyDown={(e) => {
|
||||
if (e.key === "Enter") commit();
|
||||
if (e.key === "Escape") {
|
||||
setEditValue(title);
|
||||
setEditing(false);
|
||||
}
|
||||
}}
|
||||
style={{ height: config.lineHeight }}
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<span
|
||||
className={cn(
|
||||
"opal-content-xl-title",
|
||||
config.titleFont,
|
||||
"text-text-04",
|
||||
editable && "cursor-pointer"
|
||||
)}
|
||||
onClick={editable ? startEditing : undefined}
|
||||
style={{ height: config.lineHeight }}
|
||||
>
|
||||
{title}
|
||||
</span>
|
||||
)}
|
||||
|
||||
{editable && !editing && (
|
||||
<div
|
||||
className={cn(
|
||||
"opal-content-xl-edit-button",
|
||||
config.editButtonPadding
|
||||
)}
|
||||
>
|
||||
<Button
|
||||
icon={SvgEdit}
|
||||
prominence="internal"
|
||||
size={config.editButtonSize}
|
||||
tooltip="Edit"
|
||||
tooltipSide="right"
|
||||
onClick={startEditing}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{description && (
|
||||
<div className="opal-content-xl-description font-secondary-body text-text-03">
|
||||
{description}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export { ContentXl, type ContentXlProps, type ContentXlSizePreset };
|
||||
@@ -8,21 +8,14 @@ A two-axis layout component for displaying icon + title + description rows. Rout
|
||||
|
||||
### `sizePreset` — controls sizing (icon, padding, gap, font)
|
||||
|
||||
#### ContentXl presets (variant="heading")
|
||||
|
||||
| Preset | Icon | Icon padding | moreIcon1 | mI1 padding | moreIcon2 | mI2 padding | Title font | Line-height |
|
||||
|---|---|---|---|---|---|---|---|---|
|
||||
| `headline` | 2rem (32px) | `p-0.5` (2px) | 1rem (16px) | `p-0.5` (2px) | 2rem (32px) | `p-0.5` (2px) | `font-heading-h2` | 2.25rem (36px) |
|
||||
| `section` | 1.5rem (24px) | `p-0.5` (2px) | 0.75rem (12px) | `p-0.5` (2px) | 1.5rem (24px) | `p-0.5` (2px) | `font-heading-h3` | 1.75rem (28px) |
|
||||
|
||||
#### ContentLg presets (variant="section")
|
||||
#### HeadingLayout presets
|
||||
|
||||
| Preset | Icon | Icon padding | Gap | Title font | Line-height |
|
||||
|---|---|---|---|---|---|
|
||||
| `headline` | 2rem (32px) | `p-0.5` (2px) | 0.25rem (4px) | `font-heading-h2` | 2.25rem (36px) |
|
||||
| `section` | 1.25rem (20px) | `p-1` (4px) | 0rem | `font-heading-h3-muted` | 1.75rem (28px) |
|
||||
| `section` | 1.25rem (20px) | `p-1` (4px) | 0rem | `font-heading-h3` | 1.75rem (28px) |
|
||||
|
||||
#### ContentMd presets
|
||||
#### LabelLayout presets
|
||||
|
||||
| Preset | Icon | Icon padding | Icon color | Gap | Title font | Line-height |
|
||||
|---|---|---|---|---|---|---|
|
||||
@@ -36,18 +29,18 @@ A two-axis layout component for displaying icon + title + description rows. Rout
|
||||
|
||||
| variant | Description |
|
||||
|---|---|
|
||||
| `heading` | Icon on **top** (flex-col) — ContentXl |
|
||||
| `section` | Icon **inline** (flex-row) — ContentLg or ContentMd |
|
||||
| `body` | Body text layout — ContentSm |
|
||||
| `heading` | Icon on **top** (flex-col) — HeadingLayout |
|
||||
| `section` | Icon **inline** (flex-row) — HeadingLayout or LabelLayout |
|
||||
| `body` | Body text layout — BodyLayout (future) |
|
||||
|
||||
### Valid Combinations -> Internal Routing
|
||||
|
||||
| sizePreset | variant | Routes to |
|
||||
|---|---|---|
|
||||
| `headline` / `section` | `heading` | **ContentXl** (icon on top) |
|
||||
| `headline` / `section` | `section` | **ContentLg** (icon inline) |
|
||||
| `main-content` / `main-ui` / `secondary` | `section` | **ContentMd** |
|
||||
| `main-content` / `main-ui` / `secondary` | `body` | **ContentSm** |
|
||||
| `headline` / `section` | `heading` | **HeadingLayout** (icon on top) |
|
||||
| `headline` / `section` | `section` | **HeadingLayout** (icon inline) |
|
||||
| `main-content` / `main-ui` / `secondary` | `section` | **LabelLayout** |
|
||||
| `main-content` / `main-ui` / `secondary` | `body` | BodyLayout (future) |
|
||||
|
||||
Invalid combinations (e.g. `sizePreset="headline" + variant="body"`) are excluded at the type level.
|
||||
|
||||
@@ -62,20 +55,14 @@ Invalid combinations (e.g. `sizePreset="headline" + variant="body"`) are exclude
|
||||
| `description` | `string` | — | Optional description below the title |
|
||||
| `editable` | `boolean` | `false` | Enable inline editing of the title |
|
||||
| `onTitleChange` | `(newTitle: string) => void` | — | Called when user commits an edit |
|
||||
| `moreIcon1` | `IconFunctionComponent` | — | Secondary icon in icon row (ContentXl only) |
|
||||
| `moreIcon2` | `IconFunctionComponent` | — | Tertiary icon in icon row (ContentXl only) |
|
||||
|
||||
## Internal Layouts
|
||||
|
||||
### ContentXl
|
||||
### HeadingLayout
|
||||
|
||||
For `headline` / `section` presets with `variant="heading"`. Icon row on top (flex-col), supports `moreIcon1` and `moreIcon2` in the icon row. Description is always `font-secondary-body text-text-03`.
|
||||
For `headline` / `section` presets. Supports `variant="heading"` (icon on top) and `variant="section"` (icon inline). Description is always `font-secondary-body text-text-03`.
|
||||
|
||||
### ContentLg
|
||||
|
||||
For `headline` / `section` presets with `variant="section"`. Always inline (flex-row). Description is always `font-secondary-body text-text-03`.
|
||||
|
||||
### ContentMd
|
||||
### LabelLayout
|
||||
|
||||
For `main-content` / `main-ui` / `secondary` presets. Always inline. Both `icon` and `description` are optional. Description is always `font-secondary-body text-text-03`.
|
||||
|
||||
@@ -85,7 +72,7 @@ For `main-content` / `main-ui` / `secondary` presets. Always inline. Both `icon`
|
||||
import { Content } from "@opal/layouts";
|
||||
import SvgSearch from "@opal/icons/search";
|
||||
|
||||
// ContentXl — headline, icon on top
|
||||
// HeadingLayout — headline, icon on top
|
||||
<Content
|
||||
icon={SvgSearch}
|
||||
sizePreset="headline"
|
||||
@@ -94,17 +81,7 @@ import SvgSearch from "@opal/icons/search";
|
||||
description="Configure your agent's behavior"
|
||||
/>
|
||||
|
||||
// ContentXl — with more icons
|
||||
<Content
|
||||
icon={SvgSearch}
|
||||
sizePreset="headline"
|
||||
variant="heading"
|
||||
title="Agent Settings"
|
||||
moreIcon1={SvgStar}
|
||||
moreIcon2={SvgLock}
|
||||
/>
|
||||
|
||||
// ContentLg — section, icon inline
|
||||
// HeadingLayout — section, icon inline
|
||||
<Content
|
||||
icon={SvgSearch}
|
||||
sizePreset="section"
|
||||
@@ -113,7 +90,7 @@ import SvgSearch from "@opal/icons/search";
|
||||
description="Connected integrations"
|
||||
/>
|
||||
|
||||
// ContentMd — with icon and description
|
||||
// LabelLayout — with icon and description
|
||||
<Content
|
||||
icon={SvgSearch}
|
||||
sizePreset="main-ui"
|
||||
@@ -121,7 +98,7 @@ import SvgSearch from "@opal/icons/search";
|
||||
description="Agent system prompt"
|
||||
/>
|
||||
|
||||
// ContentMd — title only (no icon, no description)
|
||||
// LabelLayout — title only (no icon, no description)
|
||||
<Content
|
||||
sizePreset="main-content"
|
||||
title="Featured Agent"
|
||||
|
||||
@@ -1,24 +1,19 @@
|
||||
import "@opal/layouts/Content/styles.css";
|
||||
import {
|
||||
ContentSm,
|
||||
type ContentSmOrientation,
|
||||
type ContentSmProminence,
|
||||
} from "@opal/layouts/Content/ContentSm";
|
||||
BodyLayout,
|
||||
type BodyOrientation,
|
||||
type BodyProminence,
|
||||
} from "@opal/layouts/Content/BodyLayout";
|
||||
import {
|
||||
ContentXl,
|
||||
type ContentXlProps,
|
||||
} from "@opal/layouts/Content/ContentXl";
|
||||
HeadingLayout,
|
||||
type HeadingLayoutProps,
|
||||
} from "@opal/layouts/Content/HeadingLayout";
|
||||
import {
|
||||
ContentLg,
|
||||
type ContentLgProps,
|
||||
} from "@opal/layouts/Content/ContentLg";
|
||||
import {
|
||||
ContentMd,
|
||||
type ContentMdProps,
|
||||
} from "@opal/layouts/Content/ContentMd";
|
||||
LabelLayout,
|
||||
type LabelLayoutProps,
|
||||
} from "@opal/layouts/Content/LabelLayout";
|
||||
import type { TagProps } from "@opal/components/Tag/components";
|
||||
import type { IconFunctionComponent } from "@opal/types";
|
||||
import { widthVariants, type WidthVariant } from "@opal/shared";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Shared types
|
||||
@@ -48,42 +43,20 @@ interface ContentBaseProps {
|
||||
|
||||
/** Called when the user commits an edit. */
|
||||
onTitleChange?: (newTitle: string) => void;
|
||||
|
||||
/**
|
||||
* Width preset controlling the component's horizontal size.
|
||||
* Uses the shared `WidthVariant` scale from `@opal/shared`.
|
||||
*
|
||||
* - `"auto"` — Shrink-wraps to content width
|
||||
* - `"full"` — Stretches to fill the parent's width
|
||||
*
|
||||
* @default "auto"
|
||||
*/
|
||||
widthVariant?: WidthVariant;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Discriminated union: valid sizePreset × variant combinations
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
type XlContentProps = ContentBaseProps & {
|
||||
type HeadingContentProps = ContentBaseProps & {
|
||||
/** Size preset. Default: `"headline"`. */
|
||||
sizePreset?: "headline" | "section";
|
||||
/** Variant. Default: `"heading"` for heading-eligible presets. */
|
||||
variant?: "heading";
|
||||
/** Optional secondary icon rendered in the icon row (ContentXl only). */
|
||||
moreIcon1?: IconFunctionComponent;
|
||||
/** Optional tertiary icon rendered in the icon row (ContentXl only). */
|
||||
moreIcon2?: IconFunctionComponent;
|
||||
variant?: "heading" | "section";
|
||||
};
|
||||
|
||||
type LgContentProps = ContentBaseProps & {
|
||||
/** Size preset. Default: `"headline"`. */
|
||||
sizePreset?: "headline" | "section";
|
||||
/** Variant. */
|
||||
variant: "section";
|
||||
};
|
||||
|
||||
type MdContentProps = ContentBaseProps & {
|
||||
type LabelContentProps = ContentBaseProps & {
|
||||
sizePreset: "main-content" | "main-ui" | "secondary";
|
||||
variant?: "section";
|
||||
/** When `true`, renders "(Optional)" beside the title in the muted font variant. */
|
||||
@@ -94,94 +67,63 @@ type MdContentProps = ContentBaseProps & {
|
||||
tag?: TagProps;
|
||||
};
|
||||
|
||||
/** ContentSm does not support descriptions or inline editing. */
|
||||
type SmContentProps = Omit<
|
||||
/** BodyLayout does not support descriptions or inline editing. */
|
||||
type BodyContentProps = Omit<
|
||||
ContentBaseProps,
|
||||
"description" | "editable" | "onTitleChange"
|
||||
> & {
|
||||
sizePreset: "main-content" | "main-ui" | "secondary";
|
||||
variant: "body";
|
||||
/** Layout orientation. Default: `"inline"`. */
|
||||
orientation?: ContentSmOrientation;
|
||||
orientation?: BodyOrientation;
|
||||
/** Title prominence. Default: `"default"`. */
|
||||
prominence?: ContentSmProminence;
|
||||
prominence?: BodyProminence;
|
||||
};
|
||||
|
||||
type ContentProps =
|
||||
| XlContentProps
|
||||
| LgContentProps
|
||||
| MdContentProps
|
||||
| SmContentProps;
|
||||
type ContentProps = HeadingContentProps | LabelContentProps | BodyContentProps;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Content — routes to the appropriate internal layout
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
function Content(props: ContentProps) {
|
||||
const {
|
||||
sizePreset = "headline",
|
||||
variant = "heading",
|
||||
widthVariant = "auto",
|
||||
...rest
|
||||
} = props;
|
||||
const { sizePreset = "headline", variant = "heading", ...rest } = props;
|
||||
|
||||
const widthClass = widthVariants[widthVariant];
|
||||
|
||||
let layout: React.ReactNode = null;
|
||||
|
||||
// ContentXl / ContentLg: headline/section presets
|
||||
// Heading layout: headline/section presets with heading/section variant
|
||||
if (sizePreset === "headline" || sizePreset === "section") {
|
||||
if (variant === "heading") {
|
||||
layout = (
|
||||
<ContentXl
|
||||
sizePreset={sizePreset}
|
||||
{...(rest as Omit<ContentXlProps, "sizePreset">)}
|
||||
/>
|
||||
);
|
||||
} else {
|
||||
layout = (
|
||||
<ContentLg
|
||||
sizePreset={sizePreset}
|
||||
{...(rest as Omit<ContentLgProps, "sizePreset">)}
|
||||
/>
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// ContentMd: main-content/main-ui/secondary with section variant
|
||||
else if (variant === "section" || variant === "heading") {
|
||||
layout = (
|
||||
<ContentMd
|
||||
return (
|
||||
<HeadingLayout
|
||||
sizePreset={sizePreset}
|
||||
{...(rest as Omit<ContentMdProps, "sizePreset">)}
|
||||
variant={variant as HeadingLayoutProps["variant"]}
|
||||
{...rest}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
// ContentSm: main-content/main-ui/secondary with body variant
|
||||
else if (variant === "body") {
|
||||
layout = (
|
||||
<ContentSm
|
||||
// Label layout: main-content/main-ui/secondary with section variant
|
||||
if (variant === "section" || variant === "heading") {
|
||||
return (
|
||||
<LabelLayout
|
||||
sizePreset={sizePreset}
|
||||
{...(rest as Omit<LabelLayoutProps, "sizePreset">)}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
// Body layout: main-content/main-ui/secondary with body variant
|
||||
if (variant === "body") {
|
||||
return (
|
||||
<BodyLayout
|
||||
sizePreset={sizePreset}
|
||||
{...(rest as Omit<
|
||||
React.ComponentProps<typeof ContentSm>,
|
||||
React.ComponentProps<typeof BodyLayout>,
|
||||
"sizePreset"
|
||||
>)}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
// This case should NEVER be hit.
|
||||
if (!layout)
|
||||
throw new Error(
|
||||
`Content: no layout matched for sizePreset="${sizePreset}" variant="${variant}"`
|
||||
);
|
||||
|
||||
// "auto" → return layout directly (a block div with w-auto still
|
||||
// stretches to its parent, defeating shrink-to-content).
|
||||
if (widthVariant === "auto") return layout;
|
||||
|
||||
return <div className={widthClass}>{layout}</div>;
|
||||
return null;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
@@ -193,8 +135,7 @@ export {
|
||||
type ContentProps,
|
||||
type SizePreset,
|
||||
type ContentVariant,
|
||||
type XlContentProps,
|
||||
type LgContentProps,
|
||||
type MdContentProps,
|
||||
type SmContentProps,
|
||||
type HeadingContentProps,
|
||||
type LabelContentProps,
|
||||
type BodyContentProps,
|
||||
};
|
||||
|
||||
@@ -1,145 +1,41 @@
|
||||
/* ===========================================================================
|
||||
Content — ContentXl
|
||||
/* ---------------------------------------------------------------------------
|
||||
Content — HeadingLayout
|
||||
|
||||
Icon row on top (flex-col). Icon row contains main icon + optional
|
||||
moreIcon1 / moreIcon2 in a flex-row.
|
||||
Two icon placement modes (driven by variant):
|
||||
left (variant="section") : flex-row — icon beside content
|
||||
top (variant="heading") : flex-col — icon above content
|
||||
|
||||
Sizing (icon size, gap, padding, font, line-height) is driven by the
|
||||
sizePreset prop via inline styles + Tailwind classes in the component.
|
||||
=========================================================================== */
|
||||
|
||||
/* ---------------------------------------------------------------------------
|
||||
Layout — flex-col (icon row above body)
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-xl {
|
||||
@apply flex flex-col items-start;
|
||||
}
|
||||
|
||||
/* ---------------------------------------------------------------------------
|
||||
Icon row — flex-row containing main icon + more icons
|
||||
Layout — icon placement
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-xl-icon-row {
|
||||
@apply flex flex-row items-center;
|
||||
.opal-content-heading {
|
||||
@apply flex items-start;
|
||||
}
|
||||
|
||||
/* ---------------------------------------------------------------------------
|
||||
Icons
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-xl-icon-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
.opal-content-heading[data-icon-placement="left"] {
|
||||
@apply flex-row;
|
||||
}
|
||||
|
||||
.opal-content-xl-more-icon-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.opal-content-xl-icon {
|
||||
color: var(--text-04);
|
||||
}
|
||||
|
||||
/* ---------------------------------------------------------------------------
|
||||
Body column
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-xl-body {
|
||||
@apply flex flex-1 flex-col items-start;
|
||||
min-width: 0.0625rem;
|
||||
}
|
||||
|
||||
/* ---------------------------------------------------------------------------
|
||||
Title row — title (or input) + edit button
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-xl-title-row {
|
||||
@apply flex items-center w-full;
|
||||
gap: 0.25rem;
|
||||
}
|
||||
|
||||
.opal-content-xl-title {
|
||||
@apply text-left overflow-hidden;
|
||||
display: -webkit-box;
|
||||
-webkit-box-orient: vertical;
|
||||
-webkit-line-clamp: 1;
|
||||
padding: 0 0.125rem;
|
||||
min-width: 0.0625rem;
|
||||
}
|
||||
|
||||
.opal-content-xl-input-sizer {
|
||||
display: inline-grid;
|
||||
align-items: stretch;
|
||||
}
|
||||
|
||||
.opal-content-xl-input-sizer > * {
|
||||
grid-area: 1 / 1;
|
||||
padding: 0 0.125rem;
|
||||
min-width: 0.0625rem;
|
||||
}
|
||||
|
||||
.opal-content-xl-input-mirror {
|
||||
visibility: hidden;
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
.opal-content-xl-input {
|
||||
@apply bg-transparent outline-none border-none;
|
||||
}
|
||||
|
||||
/* ---------------------------------------------------------------------------
|
||||
Edit button — visible only on hover of the outer container
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-xl-edit-button {
|
||||
@apply opacity-0 transition-opacity shrink-0;
|
||||
}
|
||||
|
||||
.opal-content-xl:hover .opal-content-xl-edit-button {
|
||||
@apply opacity-100;
|
||||
}
|
||||
|
||||
/* ---------------------------------------------------------------------------
|
||||
Description
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-xl-description {
|
||||
@apply text-left w-full;
|
||||
padding: 0 0.125rem;
|
||||
}
|
||||
|
||||
/* ===========================================================================
|
||||
Content — ContentLg
|
||||
|
||||
Always inline (flex-row) — icon beside content.
|
||||
|
||||
Sizing (icon size, gap, padding, font, line-height) is driven by the
|
||||
sizePreset prop via inline styles + Tailwind classes in the component.
|
||||
=========================================================================== */
|
||||
|
||||
/* ---------------------------------------------------------------------------
|
||||
Layout
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-lg {
|
||||
@apply flex flex-row items-start;
|
||||
.opal-content-heading[data-icon-placement="top"] {
|
||||
@apply flex-col;
|
||||
}
|
||||
|
||||
/* ---------------------------------------------------------------------------
|
||||
Icon
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-lg-icon-container {
|
||||
.opal-content-heading-icon-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
}
|
||||
|
||||
.opal-content-lg-icon {
|
||||
.opal-content-heading-icon {
|
||||
color: var(--text-04);
|
||||
}
|
||||
|
||||
@@ -147,7 +43,7 @@
|
||||
Body column
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-lg-body {
|
||||
.opal-content-heading-body {
|
||||
@apply flex flex-1 flex-col items-start;
|
||||
min-width: 0.0625rem;
|
||||
}
|
||||
@@ -156,12 +52,12 @@
|
||||
Title row — title (or input) + edit button
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-lg-title-row {
|
||||
.opal-content-heading-title-row {
|
||||
@apply flex items-center w-full;
|
||||
gap: 0.25rem;
|
||||
}
|
||||
|
||||
.opal-content-lg-title {
|
||||
.opal-content-heading-title {
|
||||
@apply text-left overflow-hidden;
|
||||
display: -webkit-box;
|
||||
-webkit-box-orient: vertical;
|
||||
@@ -170,23 +66,23 @@
|
||||
min-width: 0.0625rem;
|
||||
}
|
||||
|
||||
.opal-content-lg-input-sizer {
|
||||
.opal-content-heading-input-sizer {
|
||||
display: inline-grid;
|
||||
align-items: stretch;
|
||||
}
|
||||
|
||||
.opal-content-lg-input-sizer > * {
|
||||
.opal-content-heading-input-sizer > * {
|
||||
grid-area: 1 / 1;
|
||||
padding: 0 0.125rem;
|
||||
min-width: 0.0625rem;
|
||||
}
|
||||
|
||||
.opal-content-lg-input-mirror {
|
||||
.opal-content-heading-input-mirror {
|
||||
visibility: hidden;
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
.opal-content-lg-input {
|
||||
.opal-content-heading-input {
|
||||
@apply bg-transparent outline-none border-none;
|
||||
}
|
||||
|
||||
@@ -194,11 +90,11 @@
|
||||
Edit button — visible only on hover of the outer container
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-lg-edit-button {
|
||||
.opal-content-heading-edit-button {
|
||||
@apply opacity-0 transition-opacity shrink-0;
|
||||
}
|
||||
|
||||
.opal-content-lg:hover .opal-content-lg-edit-button {
|
||||
.opal-content-heading:hover .opal-content-heading-edit-button {
|
||||
@apply opacity-100;
|
||||
}
|
||||
|
||||
@@ -206,13 +102,13 @@
|
||||
Description
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-lg-description {
|
||||
.opal-content-heading-description {
|
||||
@apply text-left w-full;
|
||||
padding: 0 0.125rem;
|
||||
}
|
||||
|
||||
/* ===========================================================================
|
||||
Content — ContentMd
|
||||
Content — LabelLayout
|
||||
|
||||
Always inline (flex-row). Icon color varies per sizePreset and is applied
|
||||
via Tailwind class from the component.
|
||||
@@ -222,7 +118,7 @@
|
||||
Layout
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-md {
|
||||
.opal-content-label {
|
||||
@apply flex flex-row items-start;
|
||||
}
|
||||
|
||||
@@ -230,7 +126,7 @@
|
||||
Icon
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-md-icon-container {
|
||||
.opal-content-label-icon-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
@@ -240,7 +136,7 @@
|
||||
Body column
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-md-body {
|
||||
.opal-content-label-body {
|
||||
@apply flex flex-1 flex-col items-start;
|
||||
min-width: 0.0625rem;
|
||||
}
|
||||
@@ -249,12 +145,12 @@
|
||||
Title row — title (or input) + edit button
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-md-title-row {
|
||||
.opal-content-label-title-row {
|
||||
@apply flex items-center w-full;
|
||||
gap: 0.25rem;
|
||||
}
|
||||
|
||||
.opal-content-md-title {
|
||||
.opal-content-label-title {
|
||||
@apply text-left overflow-hidden;
|
||||
display: -webkit-box;
|
||||
-webkit-box-orient: vertical;
|
||||
@@ -263,23 +159,23 @@
|
||||
min-width: 0.0625rem;
|
||||
}
|
||||
|
||||
.opal-content-md-input-sizer {
|
||||
.opal-content-label-input-sizer {
|
||||
display: inline-grid;
|
||||
align-items: stretch;
|
||||
}
|
||||
|
||||
.opal-content-md-input-sizer > * {
|
||||
.opal-content-label-input-sizer > * {
|
||||
grid-area: 1 / 1;
|
||||
padding: 0 0.125rem;
|
||||
min-width: 0.0625rem;
|
||||
}
|
||||
|
||||
.opal-content-md-input-mirror {
|
||||
.opal-content-label-input-mirror {
|
||||
visibility: hidden;
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
.opal-content-md-input {
|
||||
.opal-content-label-input {
|
||||
@apply bg-transparent outline-none border-none;
|
||||
}
|
||||
|
||||
@@ -287,7 +183,7 @@
|
||||
Aux icon
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-md-aux-icon {
|
||||
.opal-content-label-aux-icon {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
@@ -297,11 +193,11 @@
|
||||
Edit button — visible only on hover of the outer container
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-md-edit-button {
|
||||
.opal-content-label-edit-button {
|
||||
@apply opacity-0 transition-opacity shrink-0;
|
||||
}
|
||||
|
||||
.opal-content-md:hover .opal-content-md-edit-button {
|
||||
.opal-content-label:hover .opal-content-label-edit-button {
|
||||
@apply opacity-100;
|
||||
}
|
||||
|
||||
@@ -309,13 +205,13 @@
|
||||
Description
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-md-description {
|
||||
.opal-content-label-description {
|
||||
@apply text-left w-full;
|
||||
padding: 0 0.125rem;
|
||||
}
|
||||
|
||||
/* ===========================================================================
|
||||
Content — ContentSm
|
||||
Content — BodyLayout
|
||||
|
||||
Three orientation modes (driven by orientation prop):
|
||||
inline : flex-row — icon left, title right
|
||||
@@ -330,19 +226,19 @@
|
||||
Layout — orientation
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-sm {
|
||||
.opal-content-body {
|
||||
@apply flex items-start;
|
||||
}
|
||||
|
||||
.opal-content-sm[data-orientation="inline"] {
|
||||
.opal-content-body[data-orientation="inline"] {
|
||||
@apply flex-row;
|
||||
}
|
||||
|
||||
.opal-content-sm[data-orientation="vertical"] {
|
||||
.opal-content-body[data-orientation="vertical"] {
|
||||
@apply flex-col;
|
||||
}
|
||||
|
||||
.opal-content-sm[data-orientation="reverse"] {
|
||||
.opal-content-body[data-orientation="reverse"] {
|
||||
@apply flex-row-reverse;
|
||||
}
|
||||
|
||||
@@ -350,7 +246,7 @@
|
||||
Icon
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-sm-icon-container {
|
||||
.opal-content-body-icon-container {
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
@@ -360,7 +256,7 @@
|
||||
Title
|
||||
--------------------------------------------------------------------------- */
|
||||
|
||||
.opal-content-sm-title {
|
||||
.opal-content-body-title {
|
||||
@apply text-left overflow-hidden;
|
||||
display: -webkit-box;
|
||||
-webkit-box-orient: vertical;
|
||||
|
||||
@@ -58,7 +58,7 @@ function ContentAction({
|
||||
|
||||
return (
|
||||
<div className="flex flex-row items-stretch w-full">
|
||||
<div className={cn("flex-1 min-w-0 self-center", padding)}>
|
||||
<div className={cn("flex-1 min-w-0", padding)}>
|
||||
<Content {...contentProps} />
|
||||
</div>
|
||||
{rightChildren && (
|
||||
|
||||
@@ -8,7 +8,7 @@ Layout primitives for composing icon + title + description rows. These component
|
||||
|
||||
| Component | Description | Docs |
|
||||
|---|---|---|
|
||||
| [`Content`](./Content/README.md) | Icon + title + description row. Routes to an internal layout (`ContentLg`, `ContentMd`, or `ContentSm`) based on `sizePreset` and `variant`. | [Content README](./Content/README.md) |
|
||||
| [`Content`](./Content/README.md) | Icon + title + description row. Routes to an internal layout (`HeadingLayout`, `LabelLayout`, or `BodyLayout`) based on `sizePreset` and `variant`. | [Content README](./Content/README.md) |
|
||||
| [`ContentAction`](./ContentAction/README.md) | Wraps `Content` in a flex-row with an optional `rightChildren` slot for action buttons. Adds padding alignment via the shared `SizeVariant` scale. | [ContentAction README](./ContentAction/README.md) |
|
||||
|
||||
## Quick Start
|
||||
@@ -88,7 +88,6 @@ These are not exported — `Content` routes to them automatically:
|
||||
|
||||
| Layout | Used when | File |
|
||||
|---|---|---|
|
||||
| `ContentXl` | `sizePreset` is `headline` or `section` with `variant="heading"` | `Content/ContentXl.tsx` |
|
||||
| `ContentLg` | `sizePreset` is `headline` or `section` with `variant="section"` | `Content/ContentLg.tsx` |
|
||||
| `ContentMd` | `sizePreset` is `main-content`, `main-ui`, or `secondary` with `variant="section"` | `Content/ContentMd.tsx` |
|
||||
| `ContentSm` | `variant="body"` | `Content/ContentSm.tsx` |
|
||||
| `HeadingLayout` | `sizePreset` is `headline` or `section` | `Content/HeadingLayout.tsx` |
|
||||
| `LabelLayout` | `sizePreset` is `main-content`, `main-ui`, or `secondary` with `variant="section"` | `Content/LabelLayout.tsx` |
|
||||
| `BodyLayout` | `variant="body"` | `Content/BodyLayout.tsx` |
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
// - Interactive.Container (height + min-width + padding)
|
||||
// - Button (icon sizing)
|
||||
// - ContentAction (padding only)
|
||||
// - Content (ContentLg / ContentMd) (edit-button size)
|
||||
// - Content (HeadingLayout / LabelLayout) (edit-button size)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
@@ -50,31 +50,4 @@ const sizeVariants = {
|
||||
/** Named size preset key. */
|
||||
type SizeVariant = keyof typeof sizeVariants;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Width Variants
|
||||
//
|
||||
// A named scale of width presets that map to Tailwind width utility classes.
|
||||
//
|
||||
// Consumers:
|
||||
// - Interactive.Container (widthVariant)
|
||||
// - Button (width)
|
||||
// - Content (widthVariant)
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Width-variant scale.
|
||||
*
|
||||
* | Key | Tailwind class |
|
||||
* |--------|----------------|
|
||||
* | `auto` | `w-auto` |
|
||||
* | `full` | `w-full` |
|
||||
*/
|
||||
const widthVariants = {
|
||||
auto: "w-auto",
|
||||
full: "w-full",
|
||||
} as const;
|
||||
|
||||
/** Named width preset key. */
|
||||
type WidthVariant = keyof typeof widthVariants;
|
||||
|
||||
export { sizeVariants, type SizeVariant, widthVariants, type WidthVariant };
|
||||
export { sizeVariants, type SizeVariant };
|
||||
|
||||
220
web/package-lock.json
generated
220
web/package-lock.json
generated
@@ -4127,9 +4127,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/@rollup/rollup-android-arm-eabi": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.59.0.tgz",
|
||||
"integrity": "sha512-upnNBkA6ZH2VKGcBj9Fyl9IGNPULcjXRlg0LLeaioQWueH30p6IXtJEbKAgvyv+mJaMxSm1l6xwDXYjpEMiLMg==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.52.5.tgz",
|
||||
"integrity": "sha512-8c1vW4ocv3UOMp9K+gToY5zL2XiiVw3k7f1ksf4yO1FlDFQ1C2u72iACFnSOceJFsWskc2WZNqeRhFRPzv+wtQ==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
@@ -4140,9 +4140,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-android-arm64": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.59.0.tgz",
|
||||
"integrity": "sha512-hZ+Zxj3SySm4A/DylsDKZAeVg0mvi++0PYVceVyX7hemkw7OreKdCvW2oQ3T1FMZvCaQXqOTHb8qmBShoqk69Q==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.52.5.tgz",
|
||||
"integrity": "sha512-mQGfsIEFcu21mvqkEKKu2dYmtuSZOBMmAl5CFlPGLY94Vlcm+zWApK7F/eocsNzp8tKmbeBP8yXyAbx0XHsFNA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -4153,9 +4153,7 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-darwin-arm64": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.59.0.tgz",
|
||||
"integrity": "sha512-W2Psnbh1J8ZJw0xKAd8zdNgF9HRLkdWwwdWqubSVk0pUuQkoHnv7rx4GiF9rT4t5DIZGAsConRE3AxCdJ4m8rg==",
|
||||
"version": "4.52.5",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -4166,9 +4164,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-darwin-x64": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.59.0.tgz",
|
||||
"integrity": "sha512-ZW2KkwlS4lwTv7ZVsYDiARfFCnSGhzYPdiOU4IM2fDbL+QGlyAbjgSFuqNRbSthybLbIJ915UtZBtmuLrQAT/w==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.52.5.tgz",
|
||||
"integrity": "sha512-W901Pla8Ya95WpxDn//VF9K9u2JbocwV/v75TE0YIHNTbhqUTv9w4VuQ9MaWlNOkkEfFwkdNhXgcLqPSmHy0fA==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -4179,9 +4177,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-freebsd-arm64": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.59.0.tgz",
|
||||
"integrity": "sha512-EsKaJ5ytAu9jI3lonzn3BgG8iRBjV4LxZexygcQbpiU0wU0ATxhNVEpXKfUa0pS05gTcSDMKpn3Sx+QB9RlTTA==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.52.5.tgz",
|
||||
"integrity": "sha512-QofO7i7JycsYOWxe0GFqhLmF6l1TqBswJMvICnRUjqCx8b47MTo46W8AoeQwiokAx3zVryVnxtBMcGcnX12LvA==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -4192,9 +4190,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-freebsd-x64": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.59.0.tgz",
|
||||
"integrity": "sha512-d3DuZi2KzTMjImrxoHIAODUZYoUUMsuUiY4SRRcJy6NJoZ6iIqWnJu9IScV9jXysyGMVuW+KNzZvBLOcpdl3Vg==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.52.5.tgz",
|
||||
"integrity": "sha512-jr21b/99ew8ujZubPo9skbrItHEIE50WdV86cdSoRkKtmWa+DDr6fu2c/xyRT0F/WazZpam6kk7IHBerSL7LDQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -4205,9 +4203,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-arm-gnueabihf": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.59.0.tgz",
|
||||
"integrity": "sha512-t4ONHboXi/3E0rT6OZl1pKbl2Vgxf9vJfWgmUoCEVQVxhW6Cw/c8I6hbbu7DAvgp82RKiH7TpLwxnJeKv2pbsw==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.52.5.tgz",
|
||||
"integrity": "sha512-PsNAbcyv9CcecAUagQefwX8fQn9LQ4nZkpDboBOttmyffnInRy8R8dSg6hxxl2Re5QhHBf6FYIDhIj5v982ATQ==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
@@ -4218,9 +4216,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-arm-musleabihf": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.59.0.tgz",
|
||||
"integrity": "sha512-CikFT7aYPA2ufMD086cVORBYGHffBo4K8MQ4uPS/ZnY54GKj36i196u8U+aDVT2LX4eSMbyHtyOh7D7Zvk2VvA==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.52.5.tgz",
|
||||
"integrity": "sha512-Fw4tysRutyQc/wwkmcyoqFtJhh0u31K+Q6jYjeicsGJJ7bbEq8LwPWV/w0cnzOqR2m694/Af6hpFayLJZkG2VQ==",
|
||||
"cpu": [
|
||||
"arm"
|
||||
],
|
||||
@@ -4231,9 +4229,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-arm64-gnu": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.59.0.tgz",
|
||||
"integrity": "sha512-jYgUGk5aLd1nUb1CtQ8E+t5JhLc9x5WdBKew9ZgAXg7DBk0ZHErLHdXM24rfX+bKrFe+Xp5YuJo54I5HFjGDAA==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.52.5.tgz",
|
||||
"integrity": "sha512-a+3wVnAYdQClOTlyapKmyI6BLPAFYs0JM8HRpgYZQO02rMR09ZcV9LbQB+NL6sljzG38869YqThrRnfPMCDtZg==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -4244,9 +4242,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-arm64-musl": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.59.0.tgz",
|
||||
"integrity": "sha512-peZRVEdnFWZ5Bh2KeumKG9ty7aCXzzEsHShOZEFiCQlDEepP1dpUl/SrUNXNg13UmZl+gzVDPsiCwnV1uI0RUA==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.52.5.tgz",
|
||||
"integrity": "sha512-AvttBOMwO9Pcuuf7m9PkC1PUIKsfaAJ4AYhy944qeTJgQOqJYJ9oVl2nYgY7Rk0mkbsuOpCAYSs6wLYB2Xiw0Q==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -4257,22 +4255,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-loong64-gnu": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.59.0.tgz",
|
||||
"integrity": "sha512-gbUSW/97f7+r4gHy3Jlup8zDG190AuodsWnNiXErp9mT90iCy9NKKU0Xwx5k8VlRAIV2uU9CsMnEFg/xXaOfXg==",
|
||||
"cpu": [
|
||||
"loong64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-loong64-musl": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.59.0.tgz",
|
||||
"integrity": "sha512-yTRONe79E+o0FWFijasoTjtzG9EBedFXJMl888NBEDCDV9I2wGbFFfJQQe63OijbFCUZqxpHz1GzpbtSFikJ4Q==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.52.5.tgz",
|
||||
"integrity": "sha512-DkDk8pmXQV2wVrF6oq5tONK6UHLz/XcEVow4JTTerdeV1uqPeHxwcg7aFsfnSm9L+OO8WJsWotKM2JJPMWrQtA==",
|
||||
"cpu": [
|
||||
"loong64"
|
||||
],
|
||||
@@ -4283,22 +4268,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-ppc64-gnu": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.59.0.tgz",
|
||||
"integrity": "sha512-sw1o3tfyk12k3OEpRddF68a1unZ5VCN7zoTNtSn2KndUE+ea3m3ROOKRCZxEpmT9nsGnogpFP9x6mnLTCaoLkA==",
|
||||
"cpu": [
|
||||
"ppc64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-ppc64-musl": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.59.0.tgz",
|
||||
"integrity": "sha512-+2kLtQ4xT3AiIxkzFVFXfsmlZiG5FXYW7ZyIIvGA7Bdeuh9Z0aN4hVyXS/G1E9bTP/vqszNIN/pUKCk/BTHsKA==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.52.5.tgz",
|
||||
"integrity": "sha512-W/b9ZN/U9+hPQVvlGwjzi+Wy4xdoH2I8EjaCkMvzpI7wJUs8sWJ03Rq96jRnHkSrcHTpQe8h5Tg3ZzUPGauvAw==",
|
||||
"cpu": [
|
||||
"ppc64"
|
||||
],
|
||||
@@ -4309,9 +4281,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-riscv64-gnu": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.59.0.tgz",
|
||||
"integrity": "sha512-NDYMpsXYJJaj+I7UdwIuHHNxXZ/b/N2hR15NyH3m2qAtb/hHPA4g4SuuvrdxetTdndfj9b1WOmy73kcPRoERUg==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.52.5.tgz",
|
||||
"integrity": "sha512-sjQLr9BW7R/ZiXnQiWPkErNfLMkkWIoCz7YMn27HldKsADEKa5WYdobaa1hmN6slu9oWQbB6/jFpJ+P2IkVrmw==",
|
||||
"cpu": [
|
||||
"riscv64"
|
||||
],
|
||||
@@ -4322,9 +4294,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-riscv64-musl": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.59.0.tgz",
|
||||
"integrity": "sha512-nLckB8WOqHIf1bhymk+oHxvM9D3tyPndZH8i8+35p/1YiVoVswPid2yLzgX7ZJP0KQvnkhM4H6QZ5m0LzbyIAg==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.52.5.tgz",
|
||||
"integrity": "sha512-hq3jU/kGyjXWTvAh2awn8oHroCbrPm8JqM7RUpKjalIRWWXE01CQOf/tUNWNHjmbMHg/hmNCwc/Pz3k1T/j/Lg==",
|
||||
"cpu": [
|
||||
"riscv64"
|
||||
],
|
||||
@@ -4335,9 +4307,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-s390x-gnu": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.59.0.tgz",
|
||||
"integrity": "sha512-oF87Ie3uAIvORFBpwnCvUzdeYUqi2wY6jRFWJAy1qus/udHFYIkplYRW+wo+GRUP4sKzYdmE1Y3+rY5Gc4ZO+w==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.52.5.tgz",
|
||||
"integrity": "sha512-gn8kHOrku8D4NGHMK1Y7NA7INQTRdVOntt1OCYypZPRt6skGbddska44K8iocdpxHTMMNui5oH4elPH4QOLrFQ==",
|
||||
"cpu": [
|
||||
"s390x"
|
||||
],
|
||||
@@ -4348,9 +4320,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-x64-gnu": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.59.0.tgz",
|
||||
"integrity": "sha512-3AHmtQq/ppNuUspKAlvA8HtLybkDflkMuLK4DPo77DfthRb71V84/c4MlWJXixZz4uruIH4uaa07IqoAkG64fg==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.52.5.tgz",
|
||||
"integrity": "sha512-hXGLYpdhiNElzN770+H2nlx+jRog8TyynpTVzdlc6bndktjKWyZyiCsuDAlpd+j+W+WNqfcyAWz9HxxIGfZm1Q==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -4361,9 +4333,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-linux-x64-musl": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.59.0.tgz",
|
||||
"integrity": "sha512-2UdiwS/9cTAx7qIUZB/fWtToJwvt0Vbo0zmnYt7ED35KPg13Q0ym1g442THLC7VyI6JfYTP4PiSOWyoMdV2/xg==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.52.5.tgz",
|
||||
"integrity": "sha512-arCGIcuNKjBoKAXD+y7XomR9gY6Mw7HnFBv5Rw7wQRvwYLR7gBAgV7Mb2QTyjXfTveBNFAtPt46/36vV9STLNg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -4373,23 +4345,10 @@
|
||||
"linux"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-openbsd-x64": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.59.0.tgz",
|
||||
"integrity": "sha512-M3bLRAVk6GOwFlPTIxVBSYKUaqfLrn8l0psKinkCFxl4lQvOSz8ZrKDz2gxcBwHFpci0B6rttydI4IpS4IS/jQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
"license": "MIT",
|
||||
"optional": true,
|
||||
"os": [
|
||||
"openbsd"
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-openharmony-arm64": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.59.0.tgz",
|
||||
"integrity": "sha512-tt9KBJqaqp5i5HUZzoafHZX8b5Q2Fe7UjYERADll83O4fGqJ49O1FsL6LpdzVFQcpwvnyd0i+K/VSwu/o/nWlA==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.52.5.tgz",
|
||||
"integrity": "sha512-QoFqB6+/9Rly/RiPjaomPLmR/13cgkIGfA40LHly9zcH1S0bN2HVFYk3a1eAyHQyjs3ZJYlXvIGtcCs5tko9Cw==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -4400,9 +4359,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-win32-arm64-msvc": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.59.0.tgz",
|
||||
"integrity": "sha512-V5B6mG7OrGTwnxaNUzZTDTjDS7F75PO1ae6MJYdiMu60sq0CqN5CVeVsbhPxalupvTX8gXVSU9gq+Rx1/hvu6A==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.52.5.tgz",
|
||||
"integrity": "sha512-w0cDWVR6MlTstla1cIfOGyl8+qb93FlAVutcor14Gf5Md5ap5ySfQ7R9S/NjNaMLSFdUnKGEasmVnu3lCMqB7w==",
|
||||
"cpu": [
|
||||
"arm64"
|
||||
],
|
||||
@@ -4413,9 +4372,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-win32-ia32-msvc": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.59.0.tgz",
|
||||
"integrity": "sha512-UKFMHPuM9R0iBegwzKF4y0C4J9u8C6MEJgFuXTBerMk7EJ92GFVFYBfOZaSGLu6COf7FxpQNqhNS4c4icUPqxA==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.52.5.tgz",
|
||||
"integrity": "sha512-Aufdpzp7DpOTULJCuvzqcItSGDH73pF3ko/f+ckJhxQyHtp67rHw3HMNxoIdDMUITJESNE6a8uh4Lo4SLouOUg==",
|
||||
"cpu": [
|
||||
"ia32"
|
||||
],
|
||||
@@ -4426,9 +4385,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-win32-x64-gnu": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.59.0.tgz",
|
||||
"integrity": "sha512-laBkYlSS1n2L8fSo1thDNGrCTQMmxjYY5G0WFWjFFYZkKPjsMBsgJfGf4TLxXrF6RyhI60L8TMOjBMvXiTcxeA==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.52.5.tgz",
|
||||
"integrity": "sha512-UGBUGPFp1vkj6p8wCRraqNhqwX/4kNQPS57BCFc8wYh0g94iVIW33wJtQAx3G7vrjjNtRaxiMUylM0ktp/TRSQ==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -4439,9 +4398,9 @@
|
||||
]
|
||||
},
|
||||
"node_modules/@rollup/rollup-win32-x64-msvc": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.59.0.tgz",
|
||||
"integrity": "sha512-2HRCml6OztYXyJXAvdDXPKcawukWY2GpR5/nxKp4iBgiO3wcoEGkAaqctIbZcNB6KlUQBIqt8VYkNSj2397EfA==",
|
||||
"version": "4.52.5",
|
||||
"resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.52.5.tgz",
|
||||
"integrity": "sha512-TAcgQh2sSkykPRWLrdyy2AiceMckNf5loITqXxFI5VuQjS5tSuw3WlwdN8qv8vzjLAUTvYaH/mVjSFpbkFbpTg==",
|
||||
"cpu": [
|
||||
"x64"
|
||||
],
|
||||
@@ -14505,9 +14464,7 @@
|
||||
}
|
||||
},
|
||||
"node_modules/rollup": {
|
||||
"version": "4.59.0",
|
||||
"resolved": "https://registry.npmjs.org/rollup/-/rollup-4.59.0.tgz",
|
||||
"integrity": "sha512-2oMpl67a3zCH9H79LeMcbDhXW/UmWG/y2zuqnF2jQq5uq9TbM9TVyXvA4+t+ne2IIkBdrLpAaRQAvo7YI/Yyeg==",
|
||||
"version": "4.52.5",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/estree": "1.0.8"
|
||||
@@ -14520,31 +14477,28 @@
|
||||
"npm": ">=8.0.0"
|
||||
},
|
||||
"optionalDependencies": {
|
||||
"@rollup/rollup-android-arm-eabi": "4.59.0",
|
||||
"@rollup/rollup-android-arm64": "4.59.0",
|
||||
"@rollup/rollup-darwin-arm64": "4.59.0",
|
||||
"@rollup/rollup-darwin-x64": "4.59.0",
|
||||
"@rollup/rollup-freebsd-arm64": "4.59.0",
|
||||
"@rollup/rollup-freebsd-x64": "4.59.0",
|
||||
"@rollup/rollup-linux-arm-gnueabihf": "4.59.0",
|
||||
"@rollup/rollup-linux-arm-musleabihf": "4.59.0",
|
||||
"@rollup/rollup-linux-arm64-gnu": "4.59.0",
|
||||
"@rollup/rollup-linux-arm64-musl": "4.59.0",
|
||||
"@rollup/rollup-linux-loong64-gnu": "4.59.0",
|
||||
"@rollup/rollup-linux-loong64-musl": "4.59.0",
|
||||
"@rollup/rollup-linux-ppc64-gnu": "4.59.0",
|
||||
"@rollup/rollup-linux-ppc64-musl": "4.59.0",
|
||||
"@rollup/rollup-linux-riscv64-gnu": "4.59.0",
|
||||
"@rollup/rollup-linux-riscv64-musl": "4.59.0",
|
||||
"@rollup/rollup-linux-s390x-gnu": "4.59.0",
|
||||
"@rollup/rollup-linux-x64-gnu": "4.59.0",
|
||||
"@rollup/rollup-linux-x64-musl": "4.59.0",
|
||||
"@rollup/rollup-openbsd-x64": "4.59.0",
|
||||
"@rollup/rollup-openharmony-arm64": "4.59.0",
|
||||
"@rollup/rollup-win32-arm64-msvc": "4.59.0",
|
||||
"@rollup/rollup-win32-ia32-msvc": "4.59.0",
|
||||
"@rollup/rollup-win32-x64-gnu": "4.59.0",
|
||||
"@rollup/rollup-win32-x64-msvc": "4.59.0",
|
||||
"@rollup/rollup-android-arm-eabi": "4.52.5",
|
||||
"@rollup/rollup-android-arm64": "4.52.5",
|
||||
"@rollup/rollup-darwin-arm64": "4.52.5",
|
||||
"@rollup/rollup-darwin-x64": "4.52.5",
|
||||
"@rollup/rollup-freebsd-arm64": "4.52.5",
|
||||
"@rollup/rollup-freebsd-x64": "4.52.5",
|
||||
"@rollup/rollup-linux-arm-gnueabihf": "4.52.5",
|
||||
"@rollup/rollup-linux-arm-musleabihf": "4.52.5",
|
||||
"@rollup/rollup-linux-arm64-gnu": "4.52.5",
|
||||
"@rollup/rollup-linux-arm64-musl": "4.52.5",
|
||||
"@rollup/rollup-linux-loong64-gnu": "4.52.5",
|
||||
"@rollup/rollup-linux-ppc64-gnu": "4.52.5",
|
||||
"@rollup/rollup-linux-riscv64-gnu": "4.52.5",
|
||||
"@rollup/rollup-linux-riscv64-musl": "4.52.5",
|
||||
"@rollup/rollup-linux-s390x-gnu": "4.52.5",
|
||||
"@rollup/rollup-linux-x64-gnu": "4.52.5",
|
||||
"@rollup/rollup-linux-x64-musl": "4.52.5",
|
||||
"@rollup/rollup-openharmony-arm64": "4.52.5",
|
||||
"@rollup/rollup-win32-arm64-msvc": "4.52.5",
|
||||
"@rollup/rollup-win32-ia32-msvc": "4.52.5",
|
||||
"@rollup/rollup-win32-x64-gnu": "4.52.5",
|
||||
"@rollup/rollup-win32-x64-msvc": "4.52.5",
|
||||
"fsevents": "~2.3.2"
|
||||
}
|
||||
},
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user