mirror of
https://github.com/onyx-dot-app/onyx.git
synced 2026-03-06 16:15:46 +00:00
Compare commits
23 Commits
v3.0.0-clo
...
nikg/std-e
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d1fa65ce0a | ||
|
|
7eabfa125c | ||
|
|
ee18114739 | ||
|
|
f7630f5648 | ||
|
|
e0d91b9ea7 | ||
|
|
2c0a4a60a5 | ||
|
|
3a7d4dad56 | ||
|
|
c5c236d098 | ||
|
|
b18baff4d0 | ||
|
|
eb3e15c195 | ||
|
|
47d9a9e1ac | ||
|
|
aca466b35d | ||
|
|
5176fd7386 | ||
|
|
92538084e9 | ||
|
|
2d996e05a4 | ||
|
|
b2956f795b | ||
|
|
b272085543 | ||
|
|
8193aa4fd0 | ||
|
|
52db41a00b | ||
|
|
f1cf3c4589 | ||
|
|
5322aeed90 | ||
|
|
5da8870fd2 | ||
|
|
57d3ab3b40 |
108
.github/workflows/pr-playwright-tests.yml
vendored
108
.github/workflows/pr-playwright-tests.yml
vendored
@@ -268,10 +268,11 @@ jobs:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup node
|
||||
# zizmor: ignore[cache-poisoning] ephemeral runners; no release artifacts
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # ratchet:actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
cache: "npm"
|
||||
cache: "npm" # zizmor: ignore[cache-poisoning]
|
||||
cache-dependency-path: ./web/package-lock.json
|
||||
|
||||
- name: Install node dependencies
|
||||
@@ -279,6 +280,7 @@ jobs:
|
||||
run: npm ci
|
||||
|
||||
- name: Cache playwright cache
|
||||
# zizmor: ignore[cache-poisoning] ephemeral runners; no release artifacts
|
||||
uses: runs-on/cache@50350ad4242587b6c8c2baa2e740b1bc11285ff4 # ratchet:runs-on/cache@v4
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
@@ -590,6 +592,108 @@ jobs:
|
||||
name: docker-logs-${{ matrix.project }}-${{ github.run_id }}
|
||||
path: ${{ github.workspace }}/docker-compose.log
|
||||
|
||||
playwright-tests-lite:
|
||||
needs: [build-web-image, build-backend-image]
|
||||
name: Playwright Tests (lite)
|
||||
runs-on:
|
||||
- runs-on
|
||||
- runner=4cpu-linux-arm64
|
||||
- "run-id=${{ github.run_id }}-playwright-tests-lite"
|
||||
- "extras=ecr-cache"
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
|
||||
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # ratchet:actions/checkout@v6
|
||||
with:
|
||||
persist-credentials: false
|
||||
|
||||
- name: Setup node
|
||||
# zizmor: ignore[cache-poisoning] ephemeral runners; no release artifacts
|
||||
uses: actions/setup-node@6044e13b5dc448c55e2357c09f80417699197238 # ratchet:actions/setup-node@v4
|
||||
with:
|
||||
node-version: 22
|
||||
cache: "npm" # zizmor: ignore[cache-poisoning]
|
||||
cache-dependency-path: ./web/package-lock.json
|
||||
|
||||
- name: Install node dependencies
|
||||
working-directory: ./web
|
||||
run: npm ci
|
||||
|
||||
- name: Cache playwright cache
|
||||
# zizmor: ignore[cache-poisoning] ephemeral runners; no release artifacts
|
||||
uses: runs-on/cache@50350ad4242587b6c8c2baa2e740b1bc11285ff4 # ratchet:runs-on/cache@v4
|
||||
with:
|
||||
path: ~/.cache/ms-playwright
|
||||
key: ${{ runner.os }}-playwright-npm-${{ hashFiles('web/package-lock.json') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-playwright-npm-
|
||||
|
||||
- name: Install playwright browsers
|
||||
working-directory: ./web
|
||||
run: npx playwright install --with-deps
|
||||
|
||||
- name: Create .env file for Docker Compose
|
||||
env:
|
||||
OPENAI_API_KEY_VALUE: ${{ env.OPENAI_API_KEY }}
|
||||
ECR_CACHE: ${{ env.RUNS_ON_ECR_CACHE }}
|
||||
RUN_ID: ${{ github.run_id }}
|
||||
run: |
|
||||
cat <<EOF > deployment/docker_compose/.env
|
||||
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true
|
||||
LICENSE_ENFORCEMENT_ENABLED=false
|
||||
AUTH_TYPE=basic
|
||||
INTEGRATION_TESTS_MODE=true
|
||||
GEN_AI_API_KEY=${OPENAI_API_KEY_VALUE}
|
||||
MOCK_LLM_RESPONSE=true
|
||||
REQUIRE_EMAIL_VERIFICATION=false
|
||||
DISABLE_TELEMETRY=true
|
||||
ONYX_BACKEND_IMAGE=${ECR_CACHE}:playwright-test-backend-${RUN_ID}
|
||||
ONYX_WEB_SERVER_IMAGE=${ECR_CACHE}:playwright-test-web-${RUN_ID}
|
||||
EOF
|
||||
|
||||
# needed for pulling external images otherwise, we hit the "Unauthenticated users" limit
|
||||
# https://docs.docker.com/docker-hub/usage/
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # ratchet:docker/login-action@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_TOKEN }}
|
||||
|
||||
- name: Start Docker containers (lite)
|
||||
run: |
|
||||
cd deployment/docker_compose
|
||||
docker compose -f docker-compose.yml -f docker-compose.onyx-lite.yml -f docker-compose.dev.yml up -d
|
||||
id: start_docker
|
||||
|
||||
- name: Run Playwright tests (lite)
|
||||
working-directory: ./web
|
||||
run: npx playwright test --project lite
|
||||
|
||||
- uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
|
||||
if: always()
|
||||
with:
|
||||
name: playwright-test-results-lite-${{ github.run_id }}
|
||||
path: ./web/output/playwright/
|
||||
retention-days: 30
|
||||
|
||||
- name: Save Docker logs
|
||||
if: success() || failure()
|
||||
env:
|
||||
WORKSPACE: ${{ github.workspace }}
|
||||
run: |
|
||||
cd deployment/docker_compose
|
||||
docker compose logs > docker-compose.log
|
||||
mv docker-compose.log ${WORKSPACE}/docker-compose.log
|
||||
|
||||
- name: Upload logs
|
||||
if: success() || failure()
|
||||
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f
|
||||
with:
|
||||
name: docker-logs-lite-${{ github.run_id }}
|
||||
path: ${{ github.workspace }}/docker-compose.log
|
||||
|
||||
# Post a single combined visual regression comment after all matrix jobs finish
|
||||
visual-regression-comment:
|
||||
needs: [playwright-tests]
|
||||
@@ -686,7 +790,7 @@ jobs:
|
||||
# NOTE: Github-hosted runners have about 20s faster queue times and are preferred here.
|
||||
runs-on: ubuntu-slim
|
||||
timeout-minutes: 45
|
||||
needs: [playwright-tests]
|
||||
needs: [playwright-tests, playwright-tests-lite]
|
||||
if: ${{ always() }}
|
||||
steps:
|
||||
- name: Check job status
|
||||
|
||||
15
.vscode/launch.json
vendored
15
.vscode/launch.json
vendored
@@ -485,21 +485,6 @@
|
||||
"group": "3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Clear and Restart OpenSearch Container",
|
||||
// Generic debugger type, required arg but has no bearing on bash.
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"runtimeExecutable": "bash",
|
||||
"runtimeArgs": [
|
||||
"${workspaceFolder}/backend/scripts/restart_opensearch_container.sh"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"console": "integratedTerminal",
|
||||
"presentation": {
|
||||
"group": "3"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Eval CLI",
|
||||
"type": "debugpy",
|
||||
|
||||
@@ -15,6 +15,7 @@ from sqlalchemy.orm import Session
|
||||
from ee.onyx.server.user_group.models import SetCuratorRequest
|
||||
from ee.onyx.server.user_group.models import UserGroupCreate
|
||||
from ee.onyx.server.user_group.models import UserGroupUpdate
|
||||
from onyx.configs.app_configs import DISABLE_VECTOR_DB
|
||||
from onyx.db.connector_credential_pair import get_connector_credential_pair_from_id
|
||||
from onyx.db.enums import AccessType
|
||||
from onyx.db.enums import ConnectorCredentialPairStatus
|
||||
@@ -471,7 +472,9 @@ def _add_user_group__cc_pair_relationships__no_commit(
|
||||
|
||||
def insert_user_group(db_session: Session, user_group: UserGroupCreate) -> UserGroup:
|
||||
db_user_group = UserGroup(
|
||||
name=user_group.name, time_last_modified_by_user=func.now()
|
||||
name=user_group.name,
|
||||
time_last_modified_by_user=func.now(),
|
||||
is_up_to_date=DISABLE_VECTOR_DB,
|
||||
)
|
||||
db_session.add(db_user_group)
|
||||
db_session.flush() # give the group an ID
|
||||
@@ -774,8 +777,7 @@ def update_user_group(
|
||||
cc_pair_ids=user_group_update.cc_pair_ids,
|
||||
)
|
||||
|
||||
# only needs to sync with Vespa if the cc_pairs have been updated
|
||||
if cc_pairs_updated:
|
||||
if cc_pairs_updated and not DISABLE_VECTOR_DB:
|
||||
db_user_group.is_up_to_date = False
|
||||
|
||||
removed_users = db_session.scalars(
|
||||
|
||||
@@ -4,7 +4,6 @@ from contextlib import asynccontextmanager
|
||||
from fastapi import FastAPI
|
||||
from httpx_oauth.clients.google import GoogleOAuth2
|
||||
|
||||
from ee.onyx.configs.app_configs import LICENSE_ENFORCEMENT_ENABLED
|
||||
from ee.onyx.server.analytics.api import router as analytics_router
|
||||
from ee.onyx.server.auth_check import check_ee_router_auth
|
||||
from ee.onyx.server.billing.api import router as billing_router
|
||||
@@ -153,12 +152,9 @@ def get_application() -> FastAPI:
|
||||
# License management
|
||||
include_router_with_global_prefix_prepended(application, license_router)
|
||||
|
||||
# Unified billing API - available when license system is enabled
|
||||
# Works for both self-hosted and cloud deployments
|
||||
# TODO(ENG-3533): Once frontend migrates to /admin/billing/*, this becomes the
|
||||
# primary billing API and /tenants/* billing endpoints can be removed
|
||||
if LICENSE_ENFORCEMENT_ENABLED:
|
||||
include_router_with_global_prefix_prepended(application, billing_router)
|
||||
# Unified billing API - always registered in EE.
|
||||
# Each endpoint is protected by the `current_admin_user` dependency (admin auth).
|
||||
include_router_with_global_prefix_prepended(application, billing_router)
|
||||
|
||||
if MULTI_TENANT:
|
||||
# Tenant management
|
||||
|
||||
@@ -223,6 +223,15 @@ def get_active_scim_token(
|
||||
token = dal.get_active_token()
|
||||
if not token:
|
||||
raise HTTPException(status_code=404, detail="No active SCIM token")
|
||||
|
||||
# Derive the IdP domain from the first synced user as a heuristic.
|
||||
idp_domain: str | None = None
|
||||
mappings, _total = dal.list_user_mappings(start_index=1, count=1)
|
||||
if mappings:
|
||||
user = dal.get_user(mappings[0].user_id)
|
||||
if user and "@" in user.email:
|
||||
idp_domain = user.email.rsplit("@", 1)[1]
|
||||
|
||||
return ScimTokenResponse(
|
||||
id=token.id,
|
||||
name=token.name,
|
||||
@@ -230,6 +239,7 @@ def get_active_scim_token(
|
||||
is_active=token.is_active,
|
||||
created_at=token.created_at,
|
||||
last_used_at=token.last_used_at,
|
||||
idp_domain=idp_domain,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -365,6 +365,7 @@ class ScimTokenResponse(BaseModel):
|
||||
is_active: bool
|
||||
created_at: datetime
|
||||
last_used_at: datetime | None = None
|
||||
idp_domain: str | None = None
|
||||
|
||||
|
||||
class ScimTokenCreatedResponse(ScimTokenResponse):
|
||||
|
||||
@@ -2,12 +2,13 @@ from datetime import datetime
|
||||
from datetime import timedelta
|
||||
|
||||
import jwt
|
||||
from fastapi import HTTPException
|
||||
from fastapi import Request
|
||||
|
||||
from onyx.configs.app_configs import DATA_PLANE_SECRET
|
||||
from onyx.configs.app_configs import EXPECTED_API_KEY
|
||||
from onyx.configs.app_configs import JWT_ALGORITHM
|
||||
from onyx.error_handling.error_codes import OnyxErrorCode
|
||||
from onyx.error_handling.exceptions import OnyxError
|
||||
from onyx.utils.logger import setup_logger
|
||||
|
||||
logger = setup_logger()
|
||||
@@ -32,22 +33,24 @@ async def control_plane_dep(request: Request) -> None:
|
||||
api_key = request.headers.get("X-API-KEY")
|
||||
if api_key != EXPECTED_API_KEY:
|
||||
logger.warning("Invalid API key")
|
||||
raise HTTPException(status_code=401, detail="Invalid API key")
|
||||
raise OnyxError(OnyxErrorCode.UNAUTHENTICATED, "Invalid API key")
|
||||
|
||||
auth_header = request.headers.get("Authorization")
|
||||
if not auth_header or not auth_header.startswith("Bearer "):
|
||||
logger.warning("Invalid authorization header")
|
||||
raise HTTPException(status_code=401, detail="Invalid authorization header")
|
||||
raise OnyxError(OnyxErrorCode.UNAUTHENTICATED, "Invalid authorization header")
|
||||
|
||||
token = auth_header.split(" ")[1]
|
||||
try:
|
||||
payload = jwt.decode(token, DATA_PLANE_SECRET, algorithms=[JWT_ALGORITHM])
|
||||
if payload.get("scope") != "tenant:create":
|
||||
logger.warning("Insufficient permissions")
|
||||
raise HTTPException(status_code=403, detail="Insufficient permissions")
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.INSUFFICIENT_PERMISSIONS, "Insufficient permissions"
|
||||
)
|
||||
except jwt.ExpiredSignatureError:
|
||||
logger.warning("Token has expired")
|
||||
raise HTTPException(status_code=401, detail="Token has expired")
|
||||
raise OnyxError(OnyxErrorCode.TOKEN_EXPIRED, "Token has expired")
|
||||
except jwt.InvalidTokenError:
|
||||
logger.warning("Invalid token")
|
||||
raise HTTPException(status_code=401, detail="Invalid token")
|
||||
raise OnyxError(OnyxErrorCode.INVALID_TOKEN, "Invalid token")
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from fastapi import APIRouter
|
||||
from fastapi import Depends
|
||||
from fastapi import HTTPException
|
||||
from fastapi import Response
|
||||
from fastapi_users import exceptions
|
||||
|
||||
@@ -12,6 +11,8 @@ from onyx.auth.users import get_redis_strategy
|
||||
from onyx.auth.users import User
|
||||
from onyx.db.engine.sql_engine import get_session_with_tenant
|
||||
from onyx.db.users import get_user_by_email
|
||||
from onyx.error_handling.error_codes import OnyxErrorCode
|
||||
from onyx.error_handling.exceptions import OnyxError
|
||||
from onyx.utils.logger import setup_logger
|
||||
|
||||
logger = setup_logger()
|
||||
@@ -30,7 +31,7 @@ async def impersonate_user(
|
||||
except exceptions.UserNotExists:
|
||||
detail = f"User has no tenant mapping: {impersonate_request.email=}"
|
||||
logger.warning(detail)
|
||||
raise HTTPException(status_code=422, detail=detail)
|
||||
raise OnyxError(OnyxErrorCode.VALIDATION_ERROR, detail)
|
||||
|
||||
with get_session_with_tenant(tenant_id=tenant_id) as tenant_session:
|
||||
user_to_impersonate = get_user_by_email(
|
||||
@@ -41,7 +42,7 @@ async def impersonate_user(
|
||||
f"User not found in tenant: {impersonate_request.email=} {tenant_id=}"
|
||||
)
|
||||
logger.warning(detail)
|
||||
raise HTTPException(status_code=422, detail=detail)
|
||||
raise OnyxError(OnyxErrorCode.USER_NOT_FOUND, detail)
|
||||
|
||||
token = await get_redis_strategy().write_token(user_to_impersonate)
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from fastapi import APIRouter
|
||||
from fastapi import Depends
|
||||
from fastapi import HTTPException
|
||||
from fastapi import Response
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
@@ -18,6 +17,8 @@ from onyx.auth.users import User
|
||||
from onyx.configs.constants import ANONYMOUS_USER_COOKIE_NAME
|
||||
from onyx.configs.constants import FASTAPI_USERS_AUTH_COOKIE_NAME
|
||||
from onyx.db.engine.sql_engine import get_session_with_shared_schema
|
||||
from onyx.error_handling.error_codes import OnyxErrorCode
|
||||
from onyx.error_handling.exceptions import OnyxError
|
||||
from onyx.utils.logger import setup_logger
|
||||
from shared_configs.contextvars import get_current_tenant_id
|
||||
|
||||
@@ -33,7 +34,7 @@ async def get_anonymous_user_path_api(
|
||||
tenant_id = get_current_tenant_id()
|
||||
|
||||
if tenant_id is None:
|
||||
raise HTTPException(status_code=404, detail="Tenant not found")
|
||||
raise OnyxError(OnyxErrorCode.NOT_FOUND, "Tenant not found")
|
||||
|
||||
with get_session_with_shared_schema() as db_session:
|
||||
current_path = get_anonymous_user_path(tenant_id, db_session)
|
||||
@@ -50,21 +51,21 @@ async def set_anonymous_user_path_api(
|
||||
try:
|
||||
validate_anonymous_user_path(anonymous_user_path)
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
raise OnyxError(OnyxErrorCode.VALIDATION_ERROR, str(e))
|
||||
|
||||
with get_session_with_shared_schema() as db_session:
|
||||
try:
|
||||
modify_anonymous_user_path(tenant_id, anonymous_user_path, db_session)
|
||||
except IntegrityError:
|
||||
raise HTTPException(
|
||||
status_code=409,
|
||||
detail="The anonymous user path is already in use. Please choose a different path.",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.CONFLICT,
|
||||
"The anonymous user path is already in use. Please choose a different path.",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to modify anonymous user path: {str(e)}")
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="An unexpected error occurred while modifying the anonymous user path",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.INTERNAL_ERROR,
|
||||
"An unexpected error occurred while modifying the anonymous user path",
|
||||
)
|
||||
|
||||
|
||||
@@ -77,10 +78,10 @@ async def login_as_anonymous_user(
|
||||
anonymous_user_path, db_session
|
||||
)
|
||||
if not tenant_id:
|
||||
raise HTTPException(status_code=404, detail="Tenant not found")
|
||||
raise OnyxError(OnyxErrorCode.NOT_FOUND, "Tenant not found")
|
||||
|
||||
if not anonymous_user_enabled(tenant_id=tenant_id):
|
||||
raise HTTPException(status_code=403, detail="Anonymous user is not enabled")
|
||||
raise OnyxError(OnyxErrorCode.UNAUTHORIZED, "Anonymous user is not enabled")
|
||||
|
||||
token = generate_anonymous_user_jwt_token(tenant_id)
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ import uuid
|
||||
import aiohttp # Async HTTP client
|
||||
import httpx
|
||||
import requests
|
||||
from fastapi import HTTPException
|
||||
from fastapi import Request
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
@@ -41,6 +40,8 @@ from onyx.db.models import AvailableTenant
|
||||
from onyx.db.models import IndexModelStatus
|
||||
from onyx.db.models import SearchSettings
|
||||
from onyx.db.models import UserTenantMapping
|
||||
from onyx.error_handling.error_codes import OnyxErrorCode
|
||||
from onyx.error_handling.exceptions import OnyxError
|
||||
from onyx.llm.well_known_providers.auto_update_models import LLMRecommendations
|
||||
from onyx.llm.well_known_providers.constants import ANTHROPIC_PROVIDER_NAME
|
||||
from onyx.llm.well_known_providers.constants import OPENAI_PROVIDER_NAME
|
||||
@@ -116,9 +117,9 @@ async def get_or_provision_tenant(
|
||||
# If we've encountered an error, log and raise an exception
|
||||
error_msg = "Failed to provision tenant"
|
||||
logger.error(error_msg, exc_info=e)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Failed to provision tenant. Please try again later.",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.INTERNAL_ERROR,
|
||||
"Failed to provision tenant. Please try again later.",
|
||||
)
|
||||
|
||||
|
||||
@@ -144,18 +145,18 @@ async def create_tenant(
|
||||
await rollback_tenant_provisioning(tenant_id)
|
||||
except Exception:
|
||||
logger.exception(f"Failed to rollback tenant provisioning for {tenant_id}")
|
||||
raise HTTPException(status_code=500, detail="Failed to provision tenant.")
|
||||
raise OnyxError(OnyxErrorCode.INTERNAL_ERROR, "Failed to provision tenant.")
|
||||
|
||||
return tenant_id
|
||||
|
||||
|
||||
async def provision_tenant(tenant_id: str, email: str) -> None:
|
||||
if not MULTI_TENANT:
|
||||
raise HTTPException(status_code=403, detail="Multi-tenancy is not enabled")
|
||||
raise OnyxError(OnyxErrorCode.UNAUTHORIZED, "Multi-tenancy is not enabled")
|
||||
|
||||
if user_owns_a_tenant(email):
|
||||
raise HTTPException(
|
||||
status_code=409, detail="User already belongs to an organization"
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.CONFLICT, "User already belongs to an organization"
|
||||
)
|
||||
|
||||
logger.debug(f"Provisioning tenant {tenant_id} for user {email}")
|
||||
@@ -175,8 +176,8 @@ async def provision_tenant(tenant_id: str, email: str) -> None:
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to create tenant {tenant_id}")
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"Failed to create tenant: {str(e)}"
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.INTERNAL_ERROR, f"Failed to create tenant: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -25,7 +25,6 @@ import httpx
|
||||
from fastapi import APIRouter
|
||||
from fastapi import Depends
|
||||
from fastapi import Header
|
||||
from fastapi import HTTPException
|
||||
from pydantic import BaseModel
|
||||
|
||||
from ee.onyx.configs.app_configs import LICENSE_ENFORCEMENT_ENABLED
|
||||
@@ -36,6 +35,8 @@ from ee.onyx.server.tenants.access import generate_data_plane_token
|
||||
from ee.onyx.utils.license import is_license_valid
|
||||
from ee.onyx.utils.license import verify_license_signature
|
||||
from onyx.configs.app_configs import CONTROL_PLANE_API_BASE_URL
|
||||
from onyx.error_handling.error_codes import OnyxErrorCode
|
||||
from onyx.error_handling.exceptions import OnyxError
|
||||
from onyx.utils.logger import setup_logger
|
||||
|
||||
logger = setup_logger()
|
||||
@@ -46,9 +47,9 @@ router = APIRouter(prefix="/proxy")
|
||||
def _check_license_enforcement_enabled() -> None:
|
||||
"""Ensure LICENSE_ENFORCEMENT_ENABLED is true (proxy endpoints only work on cloud DP)."""
|
||||
if not LICENSE_ENFORCEMENT_ENABLED:
|
||||
raise HTTPException(
|
||||
status_code=501,
|
||||
detail="Proxy endpoints are only available on cloud data plane",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.NOT_IMPLEMENTED,
|
||||
"Proxy endpoints are only available on cloud data plane",
|
||||
)
|
||||
|
||||
|
||||
@@ -81,8 +82,9 @@ def _extract_license_from_header(
|
||||
"""
|
||||
if not authorization or not authorization.startswith("Bearer "):
|
||||
if required:
|
||||
raise HTTPException(
|
||||
status_code=401, detail="Missing or invalid authorization header"
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.UNAUTHENTICATED,
|
||||
"Missing or invalid authorization header",
|
||||
)
|
||||
return None
|
||||
|
||||
@@ -110,10 +112,10 @@ def verify_license_auth(
|
||||
try:
|
||||
payload = verify_license_signature(license_data)
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=401, detail=f"Invalid license: {e}")
|
||||
raise OnyxError(OnyxErrorCode.UNAUTHENTICATED, f"Invalid license: {e}")
|
||||
|
||||
if not allow_expired and not is_license_valid(payload):
|
||||
raise HTTPException(status_code=401, detail="License has expired")
|
||||
raise OnyxError(OnyxErrorCode.TOKEN_EXPIRED, "License has expired")
|
||||
|
||||
return payload
|
||||
|
||||
@@ -197,12 +199,12 @@ async def forward_to_control_plane(
|
||||
except Exception:
|
||||
pass
|
||||
logger.error(f"Control plane returned {status_code}: {detail}")
|
||||
raise HTTPException(status_code=status_code, detail=detail)
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.BAD_GATEWAY, detail, status_code_override=status_code
|
||||
)
|
||||
except httpx.RequestError:
|
||||
logger.exception("Failed to connect to control plane")
|
||||
raise HTTPException(
|
||||
status_code=502, detail="Failed to connect to control plane"
|
||||
)
|
||||
raise OnyxError(OnyxErrorCode.BAD_GATEWAY, "Failed to connect to control plane")
|
||||
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
@@ -294,9 +296,9 @@ async def proxy_claim_license(
|
||||
|
||||
if not tenant_id or not license_data:
|
||||
logger.error(f"Control plane returned incomplete claim response: {result}")
|
||||
raise HTTPException(
|
||||
status_code=502,
|
||||
detail="Control plane returned incomplete license data",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.BAD_GATEWAY,
|
||||
"Control plane returned incomplete license data",
|
||||
)
|
||||
|
||||
return ClaimLicenseResponse(
|
||||
@@ -326,7 +328,7 @@ async def proxy_create_customer_portal_session(
|
||||
# tenant_id is a required field in LicensePayload (Pydantic validates this),
|
||||
# but we check explicitly for defense in depth
|
||||
if not license_payload.tenant_id:
|
||||
raise HTTPException(status_code=401, detail="License missing tenant_id")
|
||||
raise OnyxError(OnyxErrorCode.UNAUTHENTICATED, "License missing tenant_id")
|
||||
|
||||
tenant_id = license_payload.tenant_id
|
||||
|
||||
@@ -367,7 +369,7 @@ async def proxy_billing_information(
|
||||
# tenant_id is a required field in LicensePayload (Pydantic validates this),
|
||||
# but we check explicitly for defense in depth
|
||||
if not license_payload.tenant_id:
|
||||
raise HTTPException(status_code=401, detail="License missing tenant_id")
|
||||
raise OnyxError(OnyxErrorCode.UNAUTHENTICATED, "License missing tenant_id")
|
||||
|
||||
tenant_id = license_payload.tenant_id
|
||||
|
||||
@@ -398,12 +400,12 @@ async def proxy_license_fetch(
|
||||
# tenant_id is a required field in LicensePayload (Pydantic validates this),
|
||||
# but we check explicitly for defense in depth
|
||||
if not license_payload.tenant_id:
|
||||
raise HTTPException(status_code=401, detail="License missing tenant_id")
|
||||
raise OnyxError(OnyxErrorCode.UNAUTHENTICATED, "License missing tenant_id")
|
||||
|
||||
if tenant_id != license_payload.tenant_id:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="Cannot fetch license for a different tenant",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.UNAUTHORIZED,
|
||||
"Cannot fetch license for a different tenant",
|
||||
)
|
||||
|
||||
result = await forward_to_control_plane("GET", f"/license/{tenant_id}")
|
||||
@@ -411,9 +413,9 @@ async def proxy_license_fetch(
|
||||
license_data = result.get("license")
|
||||
if not license_data:
|
||||
logger.error(f"Control plane returned incomplete license response: {result}")
|
||||
raise HTTPException(
|
||||
status_code=502,
|
||||
detail="Control plane returned incomplete license data",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.BAD_GATEWAY,
|
||||
"Control plane returned incomplete license data",
|
||||
)
|
||||
|
||||
# Return license to caller - self-hosted instance stores it via /api/license/claim
|
||||
@@ -432,7 +434,7 @@ async def proxy_seat_update(
|
||||
Returns the regenerated license in the response for the caller to store.
|
||||
"""
|
||||
if not license_payload.tenant_id:
|
||||
raise HTTPException(status_code=401, detail="License missing tenant_id")
|
||||
raise OnyxError(OnyxErrorCode.UNAUTHENTICATED, "License missing tenant_id")
|
||||
|
||||
tenant_id = license_payload.tenant_id
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from fastapi import APIRouter
|
||||
from fastapi import Depends
|
||||
from fastapi import HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ee.onyx.server.tenants.provisioning import delete_user_from_control_plane
|
||||
@@ -12,6 +11,8 @@ from onyx.db.auth import get_user_count
|
||||
from onyx.db.engine.sql_engine import get_session
|
||||
from onyx.db.users import delete_user_from_db
|
||||
from onyx.db.users import get_user_by_email
|
||||
from onyx.error_handling.error_codes import OnyxErrorCode
|
||||
from onyx.error_handling.exceptions import OnyxError
|
||||
from onyx.server.manage.models import UserByEmail
|
||||
from onyx.utils.logger import setup_logger
|
||||
from shared_configs.contextvars import get_current_tenant_id
|
||||
@@ -30,13 +31,14 @@ async def leave_organization(
|
||||
tenant_id = get_current_tenant_id()
|
||||
|
||||
if current_user.email != user_email.user_email:
|
||||
raise HTTPException(
|
||||
status_code=403, detail="You can only leave the organization as yourself"
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.INSUFFICIENT_PERMISSIONS,
|
||||
"You can only leave the organization as yourself",
|
||||
)
|
||||
|
||||
user_to_delete = get_user_by_email(user_email.user_email, db_session)
|
||||
if user_to_delete is None:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
raise OnyxError(OnyxErrorCode.USER_NOT_FOUND, "User not found")
|
||||
|
||||
num_admin_users = await get_user_count(only_admin_users=True)
|
||||
|
||||
@@ -53,9 +55,9 @@ async def leave_organization(
|
||||
logger.exception(
|
||||
f"Failed to delete user from control plane for tenant {tenant_id}: {e}"
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to remove user from control plane: {str(e)}",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.INTERNAL_ERROR,
|
||||
f"Failed to remove user from control plane: {str(e)}",
|
||||
)
|
||||
|
||||
db_session.expunge(user_to_delete)
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from fastapi import APIRouter
|
||||
from fastapi import Depends
|
||||
from fastapi import HTTPException
|
||||
|
||||
from ee.onyx.server.tenants.models import ApproveUserRequest
|
||||
from ee.onyx.server.tenants.models import PendingUserSnapshot
|
||||
@@ -13,6 +12,8 @@ from onyx.auth.invited_users import get_pending_users
|
||||
from onyx.auth.users import current_admin_user
|
||||
from onyx.auth.users import current_user
|
||||
from onyx.auth.users import User
|
||||
from onyx.error_handling.error_codes import OnyxErrorCode
|
||||
from onyx.error_handling.exceptions import OnyxError
|
||||
from onyx.utils.logger import setup_logger
|
||||
from shared_configs.contextvars import get_current_tenant_id
|
||||
|
||||
@@ -32,7 +33,7 @@ async def request_invite(
|
||||
logger.exception(
|
||||
f"Failed to invite self to tenant {invite_request.tenant_id}: {e}"
|
||||
)
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
raise OnyxError(OnyxErrorCode.INTERNAL_ERROR, str(e))
|
||||
|
||||
|
||||
@router.get("/users/pending")
|
||||
@@ -64,7 +65,7 @@ async def accept_invite(
|
||||
accept_user_invite(user.email, invite_request.tenant_id)
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to accept invite: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail="Failed to accept invitation")
|
||||
raise OnyxError(OnyxErrorCode.INTERNAL_ERROR, "Failed to accept invitation")
|
||||
|
||||
|
||||
@router.post("/users/invite/deny")
|
||||
@@ -79,4 +80,4 @@ async def deny_invite(
|
||||
deny_user_invite(user.email, invite_request.tenant_id)
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to deny invite: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail="Failed to deny invitation")
|
||||
raise OnyxError(OnyxErrorCode.INTERNAL_ERROR, "Failed to deny invitation")
|
||||
|
||||
@@ -5,6 +5,8 @@ from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ee.onyx.db.user_group import add_users_to_user_group
|
||||
from ee.onyx.db.user_group import delete_user_group as db_delete_user_group
|
||||
from ee.onyx.db.user_group import fetch_user_group
|
||||
from ee.onyx.db.user_group import fetch_user_groups
|
||||
from ee.onyx.db.user_group import fetch_user_groups_for_user
|
||||
from ee.onyx.db.user_group import insert_user_group
|
||||
@@ -20,6 +22,7 @@ from ee.onyx.server.user_group.models import UserGroupUpdate
|
||||
from onyx.auth.users import current_admin_user
|
||||
from onyx.auth.users import current_curator_or_admin_user
|
||||
from onyx.auth.users import current_user
|
||||
from onyx.configs.app_configs import DISABLE_VECTOR_DB
|
||||
from onyx.configs.constants import PUBLIC_API_TAGS
|
||||
from onyx.db.engine.sql_engine import get_session
|
||||
from onyx.db.models import User
|
||||
@@ -153,3 +156,8 @@ def delete_user_group(
|
||||
prepare_user_group_for_deletion(db_session, user_group_id)
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
|
||||
if DISABLE_VECTOR_DB:
|
||||
user_group = fetch_user_group(db_session, user_group_id)
|
||||
if user_group:
|
||||
db_delete_user_group(db_session, user_group)
|
||||
|
||||
@@ -39,9 +39,13 @@ CT = TypeVar("CT", bound=ConnectorCheckpoint)
|
||||
|
||||
|
||||
class SlimConnectorExtractionResult(BaseModel):
|
||||
"""Result of extracting document IDs and hierarchy nodes from a connector."""
|
||||
"""Result of extracting document IDs and hierarchy nodes from a connector.
|
||||
|
||||
doc_ids: set[str]
|
||||
raw_id_to_parent maps document ID → parent_hierarchy_raw_node_id (or None).
|
||||
Use raw_id_to_parent.keys() wherever the old set of IDs was needed.
|
||||
"""
|
||||
|
||||
raw_id_to_parent: dict[str, str | None]
|
||||
hierarchy_nodes: list[HierarchyNode]
|
||||
|
||||
|
||||
@@ -93,30 +97,37 @@ def _get_failure_id(failure: ConnectorFailure) -> str | None:
|
||||
return None
|
||||
|
||||
|
||||
class BatchResult(BaseModel):
|
||||
raw_id_to_parent: dict[str, str | None]
|
||||
hierarchy_nodes: list[HierarchyNode]
|
||||
|
||||
|
||||
def _extract_from_batch(
|
||||
doc_list: Sequence[Document | SlimDocument | HierarchyNode | ConnectorFailure],
|
||||
) -> tuple[set[str], list[HierarchyNode]]:
|
||||
"""Separate a batch into document IDs and hierarchy nodes.
|
||||
) -> BatchResult:
|
||||
"""Separate a batch into document IDs (with parent mapping) and hierarchy nodes.
|
||||
|
||||
ConnectorFailure items have their failed document/entity IDs added to the
|
||||
ID set so that failed-to-retrieve documents are not accidentally pruned.
|
||||
ID dict so that failed-to-retrieve documents are not accidentally pruned.
|
||||
"""
|
||||
ids: set[str] = set()
|
||||
ids: dict[str, str | None] = {}
|
||||
hierarchy_nodes: list[HierarchyNode] = []
|
||||
for item in doc_list:
|
||||
if isinstance(item, HierarchyNode):
|
||||
hierarchy_nodes.append(item)
|
||||
ids.add(item.raw_node_id)
|
||||
if item.raw_node_id not in ids:
|
||||
ids[item.raw_node_id] = None
|
||||
elif isinstance(item, ConnectorFailure):
|
||||
failed_id = _get_failure_id(item)
|
||||
if failed_id:
|
||||
ids.add(failed_id)
|
||||
ids[failed_id] = None
|
||||
logger.warning(
|
||||
f"Failed to retrieve document {failed_id}: " f"{item.failure_message}"
|
||||
)
|
||||
else:
|
||||
ids.add(item.id)
|
||||
return ids, hierarchy_nodes
|
||||
parent_raw = getattr(item, "parent_hierarchy_raw_node_id", None)
|
||||
ids[item.id] = parent_raw
|
||||
return BatchResult(raw_id_to_parent=ids, hierarchy_nodes=hierarchy_nodes)
|
||||
|
||||
|
||||
def extract_ids_from_runnable_connector(
|
||||
@@ -132,7 +143,7 @@ def extract_ids_from_runnable_connector(
|
||||
|
||||
Optionally, a callback can be passed to handle the length of each document batch.
|
||||
"""
|
||||
all_connector_doc_ids: set[str] = set()
|
||||
all_raw_id_to_parent: dict[str, str | None] = {}
|
||||
all_hierarchy_nodes: list[HierarchyNode] = []
|
||||
|
||||
# Sequence (covariant) lets all the specific list[...] iterator types unify here
|
||||
@@ -177,15 +188,20 @@ def extract_ids_from_runnable_connector(
|
||||
"extract_ids_from_runnable_connector: Stop signal detected"
|
||||
)
|
||||
|
||||
batch_ids, batch_nodes = _extract_from_batch(doc_list)
|
||||
all_connector_doc_ids.update(doc_batch_processing_func(batch_ids))
|
||||
batch_result = _extract_from_batch(doc_list)
|
||||
batch_ids = batch_result.raw_id_to_parent
|
||||
batch_nodes = batch_result.hierarchy_nodes
|
||||
doc_batch_processing_func(batch_ids)
|
||||
for k, v in batch_ids.items():
|
||||
if v is not None or k not in all_raw_id_to_parent:
|
||||
all_raw_id_to_parent[k] = v
|
||||
all_hierarchy_nodes.extend(batch_nodes)
|
||||
|
||||
if callback:
|
||||
callback.progress("extract_ids_from_runnable_connector", len(batch_ids))
|
||||
|
||||
return SlimConnectorExtractionResult(
|
||||
doc_ids=all_connector_doc_ids,
|
||||
raw_id_to_parent=all_raw_id_to_parent,
|
||||
hierarchy_nodes=all_hierarchy_nodes,
|
||||
)
|
||||
|
||||
|
||||
@@ -29,6 +29,7 @@ from onyx.configs.constants import CELERY_GENERIC_BEAT_LOCK_TIMEOUT
|
||||
from onyx.configs.constants import CELERY_PRUNING_LOCK_TIMEOUT
|
||||
from onyx.configs.constants import CELERY_TASK_WAIT_FOR_FENCE_TIMEOUT
|
||||
from onyx.configs.constants import DANSWER_REDIS_FUNCTION_LOCK_PREFIX
|
||||
from onyx.configs.constants import DocumentSource
|
||||
from onyx.configs.constants import OnyxCeleryPriority
|
||||
from onyx.configs.constants import OnyxCeleryQueues
|
||||
from onyx.configs.constants import OnyxCeleryTask
|
||||
@@ -47,6 +48,8 @@ from onyx.db.enums import AccessType
|
||||
from onyx.db.enums import ConnectorCredentialPairStatus
|
||||
from onyx.db.enums import SyncStatus
|
||||
from onyx.db.enums import SyncType
|
||||
from onyx.db.hierarchy import link_hierarchy_nodes_to_documents
|
||||
from onyx.db.hierarchy import update_document_parent_hierarchy_nodes
|
||||
from onyx.db.hierarchy import upsert_hierarchy_nodes_batch
|
||||
from onyx.db.models import ConnectorCredentialPair
|
||||
from onyx.db.sync_record import insert_sync_record
|
||||
@@ -57,6 +60,8 @@ from onyx.redis.redis_connector_prune import RedisConnectorPrune
|
||||
from onyx.redis.redis_connector_prune import RedisConnectorPrunePayload
|
||||
from onyx.redis.redis_hierarchy import cache_hierarchy_nodes_batch
|
||||
from onyx.redis.redis_hierarchy import ensure_source_node_exists
|
||||
from onyx.redis.redis_hierarchy import get_node_id_from_raw_id
|
||||
from onyx.redis.redis_hierarchy import get_source_node_id_from_cache
|
||||
from onyx.redis.redis_hierarchy import HierarchyNodeCacheEntry
|
||||
from onyx.redis.redis_pool import get_redis_client
|
||||
from onyx.redis.redis_pool import get_redis_replica_client
|
||||
@@ -113,6 +118,38 @@ class PruneCallback(IndexingCallbackBase):
|
||||
super().progress(tag, amount)
|
||||
|
||||
|
||||
def _resolve_and_update_document_parents(
|
||||
db_session: Session,
|
||||
redis_client: Redis,
|
||||
source: DocumentSource,
|
||||
raw_id_to_parent: dict[str, str | None],
|
||||
) -> None:
|
||||
"""Resolve parent_hierarchy_raw_node_id → parent_hierarchy_node_id for
|
||||
each document and bulk-update the DB. Mirrors the resolution logic in
|
||||
run_docfetching.py."""
|
||||
source_node_id = get_source_node_id_from_cache(redis_client, db_session, source)
|
||||
|
||||
resolved: dict[str, int | None] = {}
|
||||
for doc_id, raw_parent_id in raw_id_to_parent.items():
|
||||
if raw_parent_id is None:
|
||||
continue
|
||||
node_id, found = get_node_id_from_raw_id(redis_client, source, raw_parent_id)
|
||||
resolved[doc_id] = node_id if found else source_node_id
|
||||
|
||||
if not resolved:
|
||||
return
|
||||
|
||||
update_document_parent_hierarchy_nodes(
|
||||
db_session=db_session,
|
||||
doc_parent_map=resolved,
|
||||
commit=True,
|
||||
)
|
||||
task_logger.info(
|
||||
f"Pruning: resolved and updated parent hierarchy for "
|
||||
f"{len(resolved)} documents (source={source.value})"
|
||||
)
|
||||
|
||||
|
||||
"""Jobs / utils for kicking off pruning tasks."""
|
||||
|
||||
|
||||
@@ -535,22 +572,22 @@ def connector_pruning_generator_task(
|
||||
extraction_result = extract_ids_from_runnable_connector(
|
||||
runnable_connector, callback
|
||||
)
|
||||
all_connector_doc_ids = extraction_result.doc_ids
|
||||
all_connector_doc_ids = extraction_result.raw_id_to_parent
|
||||
|
||||
# Process hierarchy nodes (same as docfetching):
|
||||
# upsert to Postgres and cache in Redis
|
||||
source = cc_pair.connector.source
|
||||
redis_client = get_redis_client(tenant_id=tenant_id)
|
||||
|
||||
if extraction_result.hierarchy_nodes:
|
||||
is_connector_public = cc_pair.access_type == AccessType.PUBLIC
|
||||
|
||||
redis_client = get_redis_client(tenant_id=tenant_id)
|
||||
ensure_source_node_exists(
|
||||
redis_client, db_session, cc_pair.connector.source
|
||||
)
|
||||
ensure_source_node_exists(redis_client, db_session, source)
|
||||
|
||||
upserted_nodes = upsert_hierarchy_nodes_batch(
|
||||
db_session=db_session,
|
||||
nodes=extraction_result.hierarchy_nodes,
|
||||
source=cc_pair.connector.source,
|
||||
source=source,
|
||||
commit=True,
|
||||
is_connector_public=is_connector_public,
|
||||
)
|
||||
@@ -561,7 +598,7 @@ def connector_pruning_generator_task(
|
||||
]
|
||||
cache_hierarchy_nodes_batch(
|
||||
redis_client=redis_client,
|
||||
source=cc_pair.connector.source,
|
||||
source=source,
|
||||
entries=cache_entries,
|
||||
)
|
||||
|
||||
@@ -570,6 +607,26 @@ def connector_pruning_generator_task(
|
||||
f"hierarchy nodes for cc_pair={cc_pair_id}"
|
||||
)
|
||||
|
||||
ensure_source_node_exists(redis_client, db_session, source)
|
||||
# Resolve parent_hierarchy_raw_node_id → parent_hierarchy_node_id
|
||||
# and bulk-update documents, mirroring the docfetching resolution
|
||||
_resolve_and_update_document_parents(
|
||||
db_session=db_session,
|
||||
redis_client=redis_client,
|
||||
source=source,
|
||||
raw_id_to_parent=all_connector_doc_ids,
|
||||
)
|
||||
|
||||
# Link hierarchy nodes to documents for sources where pages can be
|
||||
# both hierarchy nodes AND documents (e.g. Notion, Confluence)
|
||||
all_doc_id_list = list(all_connector_doc_ids.keys())
|
||||
link_hierarchy_nodes_to_documents(
|
||||
db_session=db_session,
|
||||
document_ids=all_doc_id_list,
|
||||
source=source,
|
||||
commit=True,
|
||||
)
|
||||
|
||||
# a list of docs in our local index
|
||||
all_indexed_document_ids = {
|
||||
doc.id
|
||||
@@ -581,7 +638,9 @@ def connector_pruning_generator_task(
|
||||
}
|
||||
|
||||
# generate list of docs to remove (no longer in the source)
|
||||
doc_ids_to_remove = list(all_indexed_document_ids - all_connector_doc_ids)
|
||||
doc_ids_to_remove = list(
|
||||
all_indexed_document_ids - all_connector_doc_ids.keys()
|
||||
)
|
||||
|
||||
task_logger.info(
|
||||
"Pruning set collected: "
|
||||
|
||||
@@ -943,6 +943,9 @@ class ConfluenceConnector(
|
||||
if include_permissions
|
||||
else None
|
||||
),
|
||||
parent_hierarchy_raw_node_id=self._get_parent_hierarchy_raw_id(
|
||||
page
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -992,6 +995,7 @@ class ConfluenceConnector(
|
||||
if include_permissions
|
||||
else None
|
||||
),
|
||||
parent_hierarchy_raw_node_id=page_id,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -781,4 +781,5 @@ def build_slim_document(
|
||||
return SlimDocument(
|
||||
id=onyx_document_id_from_drive_file(file),
|
||||
external_access=external_access,
|
||||
parent_hierarchy_raw_node_id=(file.get("parents") or [None])[0],
|
||||
)
|
||||
|
||||
@@ -902,6 +902,11 @@ class JiraConnector(
|
||||
external_access=self._get_project_permissions(
|
||||
project_key, add_prefix=False
|
||||
),
|
||||
parent_hierarchy_raw_node_id=(
|
||||
self._get_parent_hierarchy_raw_node_id(issue, project_key)
|
||||
if project_key
|
||||
else None
|
||||
),
|
||||
)
|
||||
)
|
||||
current_offset += 1
|
||||
|
||||
@@ -385,6 +385,7 @@ class IndexingDocument(Document):
|
||||
class SlimDocument(BaseModel):
|
||||
id: str
|
||||
external_access: ExternalAccess | None = None
|
||||
parent_hierarchy_raw_node_id: str | None = None
|
||||
|
||||
|
||||
class HierarchyNode(BaseModel):
|
||||
|
||||
@@ -772,6 +772,7 @@ def _convert_driveitem_to_slim_document(
|
||||
drive_name: str,
|
||||
ctx: ClientContext,
|
||||
graph_client: GraphClient,
|
||||
parent_hierarchy_raw_node_id: str | None = None,
|
||||
) -> SlimDocument:
|
||||
if driveitem.id is None:
|
||||
raise ValueError("DriveItem ID is required")
|
||||
@@ -787,11 +788,15 @@ def _convert_driveitem_to_slim_document(
|
||||
return SlimDocument(
|
||||
id=driveitem.id,
|
||||
external_access=external_access,
|
||||
parent_hierarchy_raw_node_id=parent_hierarchy_raw_node_id,
|
||||
)
|
||||
|
||||
|
||||
def _convert_sitepage_to_slim_document(
|
||||
site_page: dict[str, Any], ctx: ClientContext | None, graph_client: GraphClient
|
||||
site_page: dict[str, Any],
|
||||
ctx: ClientContext | None,
|
||||
graph_client: GraphClient,
|
||||
parent_hierarchy_raw_node_id: str | None = None,
|
||||
) -> SlimDocument:
|
||||
"""Convert a SharePoint site page to a SlimDocument object."""
|
||||
if site_page.get("id") is None:
|
||||
@@ -808,6 +813,7 @@ def _convert_sitepage_to_slim_document(
|
||||
return SlimDocument(
|
||||
id=id,
|
||||
external_access=external_access,
|
||||
parent_hierarchy_raw_node_id=parent_hierarchy_raw_node_id,
|
||||
)
|
||||
|
||||
|
||||
@@ -1594,12 +1600,22 @@ class SharepointConnector(
|
||||
)
|
||||
)
|
||||
|
||||
parent_hierarchy_url: str | None = None
|
||||
if drive_web_url:
|
||||
parent_hierarchy_url = self._get_parent_hierarchy_url(
|
||||
site_url, drive_web_url, drive_name, driveitem
|
||||
)
|
||||
|
||||
try:
|
||||
logger.debug(f"Processing: {driveitem.web_url}")
|
||||
ctx = self._create_rest_client_context(site_descriptor.url)
|
||||
doc_batch.append(
|
||||
_convert_driveitem_to_slim_document(
|
||||
driveitem, drive_name, ctx, self.graph_client
|
||||
driveitem,
|
||||
drive_name,
|
||||
ctx,
|
||||
self.graph_client,
|
||||
parent_hierarchy_raw_node_id=parent_hierarchy_url,
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
@@ -1619,7 +1635,10 @@ class SharepointConnector(
|
||||
ctx = self._create_rest_client_context(site_descriptor.url)
|
||||
doc_batch.append(
|
||||
_convert_sitepage_to_slim_document(
|
||||
site_page, ctx, self.graph_client
|
||||
site_page,
|
||||
ctx,
|
||||
self.graph_client,
|
||||
parent_hierarchy_raw_node_id=site_descriptor.url,
|
||||
)
|
||||
)
|
||||
if len(doc_batch) >= SLIM_BATCH_SIZE:
|
||||
|
||||
@@ -565,6 +565,7 @@ def _get_all_doc_ids(
|
||||
channel_id=channel_id, thread_ts=message["ts"]
|
||||
),
|
||||
external_access=external_access,
|
||||
parent_hierarchy_raw_node_id=channel_id,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -13,6 +13,7 @@ from sqlalchemy.orm import aliased
|
||||
from sqlalchemy.orm import selectinload
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.configs.app_configs import DISABLE_VECTOR_DB
|
||||
from onyx.db.connector_credential_pair import get_cc_pair_groups_for_ids
|
||||
from onyx.db.connector_credential_pair import get_connector_credential_pairs
|
||||
from onyx.db.enums import AccessType
|
||||
@@ -246,6 +247,7 @@ def insert_document_set(
|
||||
description=document_set_creation_request.description,
|
||||
user_id=user_id,
|
||||
is_public=document_set_creation_request.is_public,
|
||||
is_up_to_date=DISABLE_VECTOR_DB,
|
||||
time_last_modified_by_user=func.now(),
|
||||
)
|
||||
db_session.add(new_document_set_row)
|
||||
@@ -336,7 +338,8 @@ def update_document_set(
|
||||
)
|
||||
|
||||
document_set_row.description = document_set_update_request.description
|
||||
document_set_row.is_up_to_date = False
|
||||
if not DISABLE_VECTOR_DB:
|
||||
document_set_row.is_up_to_date = False
|
||||
document_set_row.is_public = document_set_update_request.is_public
|
||||
document_set_row.time_last_modified_by_user = func.now()
|
||||
versioned_private_doc_set_fn = fetch_versioned_implementation(
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
"""CRUD operations for HierarchyNode."""
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
@@ -525,6 +527,53 @@ def get_document_parent_hierarchy_node_ids(
|
||||
return {doc_id: parent_id for doc_id, parent_id in results}
|
||||
|
||||
|
||||
def update_document_parent_hierarchy_nodes(
|
||||
db_session: Session,
|
||||
doc_parent_map: dict[str, int | None],
|
||||
commit: bool = True,
|
||||
) -> int:
|
||||
"""Bulk-update Document.parent_hierarchy_node_id for multiple documents.
|
||||
|
||||
Only updates rows whose current value differs from the desired value to
|
||||
avoid unnecessary writes.
|
||||
|
||||
Args:
|
||||
db_session: SQLAlchemy session
|
||||
doc_parent_map: Mapping of document_id → desired parent_hierarchy_node_id
|
||||
commit: Whether to commit the transaction
|
||||
|
||||
Returns:
|
||||
Number of documents actually updated
|
||||
"""
|
||||
if not doc_parent_map:
|
||||
return 0
|
||||
|
||||
doc_ids = list(doc_parent_map.keys())
|
||||
existing = get_document_parent_hierarchy_node_ids(db_session, doc_ids)
|
||||
|
||||
by_parent: dict[int | None, list[str]] = defaultdict(list)
|
||||
for doc_id, desired_parent_id in doc_parent_map.items():
|
||||
current = existing.get(doc_id)
|
||||
if current == desired_parent_id or doc_id not in existing:
|
||||
continue
|
||||
by_parent[desired_parent_id].append(doc_id)
|
||||
|
||||
updated = 0
|
||||
for desired_parent_id, ids in by_parent.items():
|
||||
db_session.query(Document).filter(Document.id.in_(ids)).update(
|
||||
{Document.parent_hierarchy_node_id: desired_parent_id},
|
||||
synchronize_session=False,
|
||||
)
|
||||
updated += len(ids)
|
||||
|
||||
if commit:
|
||||
db_session.commit()
|
||||
elif updated:
|
||||
db_session.flush()
|
||||
|
||||
return updated
|
||||
|
||||
|
||||
def update_hierarchy_node_permissions(
|
||||
db_session: Session,
|
||||
raw_node_id: str,
|
||||
|
||||
@@ -129,7 +129,7 @@ def get_current_search_settings(db_session: Session) -> SearchSettings:
|
||||
latest_settings = result.scalars().first()
|
||||
|
||||
if not latest_settings:
|
||||
raise RuntimeError("No search settings specified, DB is not in a valid state")
|
||||
raise RuntimeError("No search settings specified; DB is not in a valid state.")
|
||||
return latest_settings
|
||||
|
||||
|
||||
|
||||
@@ -32,9 +32,6 @@ def get_multipass_config(search_settings: SearchSettings) -> MultipassConfig:
|
||||
Determines whether to enable multipass and large chunks by examining
|
||||
the current search settings and the embedder configuration.
|
||||
"""
|
||||
if not search_settings:
|
||||
return MultipassConfig(multipass_indexing=False, enable_large_chunks=False)
|
||||
|
||||
multipass = should_use_multipass(search_settings)
|
||||
enable_large_chunks = SearchSettings.can_use_large_chunks(
|
||||
multipass, search_settings.model_name, search_settings.provider_type
|
||||
|
||||
@@ -26,11 +26,10 @@ def get_default_document_index(
|
||||
To be used for retrieval only. Indexing should be done through both indices
|
||||
until Vespa is deprecated.
|
||||
|
||||
Pre-existing docstring for this function, although secondary indices are not
|
||||
currently supported:
|
||||
Primary index is the index that is used for querying/updating etc. Secondary
|
||||
index is for when both the currently used index and the upcoming index both
|
||||
need to be updated, updates are applied to both indices.
|
||||
need to be updated. Updates are applied to both indices.
|
||||
WARNING: In that case, get_all_document_indices should be used.
|
||||
"""
|
||||
if DISABLE_VECTOR_DB:
|
||||
return DisabledDocumentIndex(
|
||||
@@ -51,11 +50,26 @@ def get_default_document_index(
|
||||
opensearch_retrieval_enabled = get_opensearch_retrieval_state(db_session)
|
||||
if opensearch_retrieval_enabled:
|
||||
indexing_setting = IndexingSetting.from_db_model(search_settings)
|
||||
secondary_indexing_setting = (
|
||||
IndexingSetting.from_db_model(secondary_search_settings)
|
||||
if secondary_search_settings
|
||||
else None
|
||||
)
|
||||
return OpenSearchOldDocumentIndex(
|
||||
index_name=search_settings.index_name,
|
||||
embedding_dim=indexing_setting.final_embedding_dim,
|
||||
embedding_precision=indexing_setting.embedding_precision,
|
||||
secondary_index_name=secondary_index_name,
|
||||
secondary_embedding_dim=(
|
||||
secondary_indexing_setting.final_embedding_dim
|
||||
if secondary_indexing_setting
|
||||
else None
|
||||
),
|
||||
secondary_embedding_precision=(
|
||||
secondary_indexing_setting.embedding_precision
|
||||
if secondary_indexing_setting
|
||||
else None
|
||||
),
|
||||
large_chunks_enabled=search_settings.large_chunks_enabled,
|
||||
secondary_large_chunks_enabled=secondary_large_chunks_enabled,
|
||||
multitenant=MULTI_TENANT,
|
||||
@@ -86,8 +100,7 @@ def get_all_document_indices(
|
||||
Used for indexing only. Until Vespa is deprecated we will index into both
|
||||
document indices. Retrieval is done through only one index however.
|
||||
|
||||
Large chunks and secondary indices are not currently supported so we
|
||||
hardcode appropriate values.
|
||||
Large chunks are not currently supported so we hardcode appropriate values.
|
||||
|
||||
NOTE: Make sure the Vespa index object is returned first. In the rare event
|
||||
that there is some conflict between indexing and the migration task, it is
|
||||
@@ -123,13 +136,36 @@ def get_all_document_indices(
|
||||
opensearch_document_index: OpenSearchOldDocumentIndex | None = None
|
||||
if ENABLE_OPENSEARCH_INDEXING_FOR_ONYX:
|
||||
indexing_setting = IndexingSetting.from_db_model(search_settings)
|
||||
secondary_indexing_setting = (
|
||||
IndexingSetting.from_db_model(secondary_search_settings)
|
||||
if secondary_search_settings
|
||||
else None
|
||||
)
|
||||
opensearch_document_index = OpenSearchOldDocumentIndex(
|
||||
index_name=search_settings.index_name,
|
||||
embedding_dim=indexing_setting.final_embedding_dim,
|
||||
embedding_precision=indexing_setting.embedding_precision,
|
||||
secondary_index_name=None,
|
||||
large_chunks_enabled=False,
|
||||
secondary_large_chunks_enabled=None,
|
||||
secondary_index_name=(
|
||||
secondary_search_settings.index_name
|
||||
if secondary_search_settings
|
||||
else None
|
||||
),
|
||||
secondary_embedding_dim=(
|
||||
secondary_indexing_setting.final_embedding_dim
|
||||
if secondary_indexing_setting
|
||||
else None
|
||||
),
|
||||
secondary_embedding_precision=(
|
||||
secondary_indexing_setting.embedding_precision
|
||||
if secondary_indexing_setting
|
||||
else None
|
||||
),
|
||||
large_chunks_enabled=search_settings.large_chunks_enabled,
|
||||
secondary_large_chunks_enabled=(
|
||||
secondary_search_settings.large_chunks_enabled
|
||||
if secondary_search_settings
|
||||
else None
|
||||
),
|
||||
multitenant=MULTI_TENANT,
|
||||
httpx_client=httpx_client,
|
||||
)
|
||||
|
||||
@@ -271,6 +271,9 @@ class OpenSearchOldDocumentIndex(OldDocumentIndex):
|
||||
embedding_dim: int,
|
||||
embedding_precision: EmbeddingPrecision,
|
||||
secondary_index_name: str | None,
|
||||
secondary_embedding_dim: int | None,
|
||||
secondary_embedding_precision: EmbeddingPrecision | None,
|
||||
# NOTE: We do not support large chunks right now.
|
||||
large_chunks_enabled: bool, # noqa: ARG002
|
||||
secondary_large_chunks_enabled: bool | None, # noqa: ARG002
|
||||
multitenant: bool = False,
|
||||
@@ -286,12 +289,25 @@ class OpenSearchOldDocumentIndex(OldDocumentIndex):
|
||||
f"Expected {MULTI_TENANT}, got {multitenant}."
|
||||
)
|
||||
tenant_id = get_current_tenant_id()
|
||||
tenant_state = TenantState(tenant_id=tenant_id, multitenant=multitenant)
|
||||
self._real_index = OpenSearchDocumentIndex(
|
||||
tenant_state=TenantState(tenant_id=tenant_id, multitenant=multitenant),
|
||||
tenant_state=tenant_state,
|
||||
index_name=index_name,
|
||||
embedding_dim=embedding_dim,
|
||||
embedding_precision=embedding_precision,
|
||||
)
|
||||
self._secondary_real_index: OpenSearchDocumentIndex | None = None
|
||||
if self.secondary_index_name:
|
||||
if secondary_embedding_dim is None or secondary_embedding_precision is None:
|
||||
raise ValueError(
|
||||
"Bug: Secondary index embedding dimension and precision are not set."
|
||||
)
|
||||
self._secondary_real_index = OpenSearchDocumentIndex(
|
||||
tenant_state=tenant_state,
|
||||
index_name=self.secondary_index_name,
|
||||
embedding_dim=secondary_embedding_dim,
|
||||
embedding_precision=secondary_embedding_precision,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def register_multitenant_indices(
|
||||
@@ -307,19 +323,38 @@ class OpenSearchOldDocumentIndex(OldDocumentIndex):
|
||||
self,
|
||||
primary_embedding_dim: int,
|
||||
primary_embedding_precision: EmbeddingPrecision,
|
||||
secondary_index_embedding_dim: int | None, # noqa: ARG002
|
||||
secondary_index_embedding_precision: EmbeddingPrecision | None, # noqa: ARG002
|
||||
secondary_index_embedding_dim: int | None,
|
||||
secondary_index_embedding_precision: EmbeddingPrecision | None,
|
||||
) -> None:
|
||||
# Only handle primary index for now, ignore secondary.
|
||||
return self._real_index.verify_and_create_index_if_necessary(
|
||||
self._real_index.verify_and_create_index_if_necessary(
|
||||
primary_embedding_dim, primary_embedding_precision
|
||||
)
|
||||
if self.secondary_index_name:
|
||||
if (
|
||||
secondary_index_embedding_dim is None
|
||||
or secondary_index_embedding_precision is None
|
||||
):
|
||||
raise ValueError(
|
||||
"Bug: Secondary index embedding dimension and precision are not set."
|
||||
)
|
||||
assert (
|
||||
self._secondary_real_index is not None
|
||||
), "Bug: Secondary index is not initialized."
|
||||
self._secondary_real_index.verify_and_create_index_if_necessary(
|
||||
secondary_index_embedding_dim, secondary_index_embedding_precision
|
||||
)
|
||||
|
||||
def index(
|
||||
self,
|
||||
chunks: list[DocMetadataAwareIndexChunk],
|
||||
index_batch_params: IndexBatchParams,
|
||||
) -> set[OldDocumentInsertionRecord]:
|
||||
"""
|
||||
NOTE: Do NOT consider the secondary index here. A separate indexing
|
||||
pipeline will be responsible for indexing to the secondary index. This
|
||||
design is not ideal and we should reconsider this when revamping index
|
||||
swapping.
|
||||
"""
|
||||
# Convert IndexBatchParams to IndexingMetadata.
|
||||
chunk_counts: dict[str, IndexingMetadata.ChunkCounts] = {}
|
||||
for doc_id in index_batch_params.doc_id_to_new_chunk_cnt:
|
||||
@@ -351,7 +386,20 @@ class OpenSearchOldDocumentIndex(OldDocumentIndex):
|
||||
tenant_id: str, # noqa: ARG002
|
||||
chunk_count: int | None,
|
||||
) -> int:
|
||||
return self._real_index.delete(doc_id, chunk_count)
|
||||
"""
|
||||
NOTE: Remember to handle the secondary index here. There is no separate
|
||||
pipeline for deleting chunks in the secondary index. This design is not
|
||||
ideal and we should reconsider this when revamping index swapping.
|
||||
"""
|
||||
total_chunks_deleted = self._real_index.delete(doc_id, chunk_count)
|
||||
if self.secondary_index_name:
|
||||
assert (
|
||||
self._secondary_real_index is not None
|
||||
), "Bug: Secondary index is not initialized."
|
||||
total_chunks_deleted += self._secondary_real_index.delete(
|
||||
doc_id, chunk_count
|
||||
)
|
||||
return total_chunks_deleted
|
||||
|
||||
def update_single(
|
||||
self,
|
||||
@@ -362,6 +410,11 @@ class OpenSearchOldDocumentIndex(OldDocumentIndex):
|
||||
fields: VespaDocumentFields | None,
|
||||
user_fields: VespaDocumentUserFields | None,
|
||||
) -> None:
|
||||
"""
|
||||
NOTE: Remember to handle the secondary index here. There is no separate
|
||||
pipeline for updating chunks in the secondary index. This design is not
|
||||
ideal and we should reconsider this when revamping index swapping.
|
||||
"""
|
||||
if fields is None and user_fields is None:
|
||||
logger.warning(
|
||||
f"Tried to update document {doc_id} with no updated fields or user fields."
|
||||
@@ -392,6 +445,11 @@ class OpenSearchOldDocumentIndex(OldDocumentIndex):
|
||||
|
||||
try:
|
||||
self._real_index.update([update_request])
|
||||
if self.secondary_index_name:
|
||||
assert (
|
||||
self._secondary_real_index is not None
|
||||
), "Bug: Secondary index is not initialized."
|
||||
self._secondary_real_index.update([update_request])
|
||||
except NotFoundError:
|
||||
logger.exception(
|
||||
f"Tried to update document {doc_id} but at least one of its chunks was not found in OpenSearch. "
|
||||
|
||||
@@ -465,6 +465,12 @@ class VespaIndex(DocumentIndex):
|
||||
chunks: list[DocMetadataAwareIndexChunk],
|
||||
index_batch_params: IndexBatchParams,
|
||||
) -> set[OldDocumentInsertionRecord]:
|
||||
"""
|
||||
NOTE: Do NOT consider the secondary index here. A separate indexing
|
||||
pipeline will be responsible for indexing to the secondary index. This
|
||||
design is not ideal and we should reconsider this when revamping index
|
||||
swapping.
|
||||
"""
|
||||
if len(index_batch_params.doc_id_to_previous_chunk_cnt) != len(
|
||||
index_batch_params.doc_id_to_new_chunk_cnt
|
||||
):
|
||||
@@ -659,6 +665,10 @@ class VespaIndex(DocumentIndex):
|
||||
"""Note: if the document id does not exist, the update will be a no-op and the
|
||||
function will complete with no errors or exceptions.
|
||||
Handle other exceptions if you wish to implement retry behavior
|
||||
|
||||
NOTE: Remember to handle the secondary index here. There is no separate
|
||||
pipeline for updating chunks in the secondary index. This design is not
|
||||
ideal and we should reconsider this when revamping index swapping.
|
||||
"""
|
||||
if fields is None and user_fields is None:
|
||||
logger.warning(
|
||||
@@ -679,13 +689,6 @@ class VespaIndex(DocumentIndex):
|
||||
f"Bug: Tenant ID mismatch. Expected {tenant_state.tenant_id}, got {tenant_id}."
|
||||
)
|
||||
|
||||
vespa_document_index = VespaDocumentIndex(
|
||||
index_name=self.index_name,
|
||||
tenant_state=tenant_state,
|
||||
large_chunks_enabled=self.large_chunks_enabled,
|
||||
httpx_client=self.httpx_client,
|
||||
)
|
||||
|
||||
project_ids: set[int] | None = None
|
||||
if user_fields is not None and user_fields.user_projects is not None:
|
||||
project_ids = set(user_fields.user_projects)
|
||||
@@ -705,7 +708,20 @@ class VespaIndex(DocumentIndex):
|
||||
persona_ids=persona_ids,
|
||||
)
|
||||
|
||||
vespa_document_index.update([update_request])
|
||||
indices = [self.index_name]
|
||||
if self.secondary_index_name:
|
||||
indices.append(self.secondary_index_name)
|
||||
|
||||
for index_name in indices:
|
||||
vespa_document_index = VespaDocumentIndex(
|
||||
index_name=index_name,
|
||||
tenant_state=tenant_state,
|
||||
large_chunks_enabled=self.index_to_large_chunks_enabled.get(
|
||||
index_name, False
|
||||
),
|
||||
httpx_client=self.httpx_client,
|
||||
)
|
||||
vespa_document_index.update([update_request])
|
||||
|
||||
def delete_single(
|
||||
self,
|
||||
@@ -714,6 +730,11 @@ class VespaIndex(DocumentIndex):
|
||||
tenant_id: str,
|
||||
chunk_count: int | None,
|
||||
) -> int:
|
||||
"""
|
||||
NOTE: Remember to handle the secondary index here. There is no separate
|
||||
pipeline for deleting chunks in the secondary index. This design is not
|
||||
ideal and we should reconsider this when revamping index swapping.
|
||||
"""
|
||||
tenant_state = TenantState(
|
||||
tenant_id=get_current_tenant_id(),
|
||||
multitenant=MULTI_TENANT,
|
||||
@@ -726,13 +747,25 @@ class VespaIndex(DocumentIndex):
|
||||
raise ValueError(
|
||||
f"Bug: Tenant ID mismatch. Expected {tenant_state.tenant_id}, got {tenant_id}."
|
||||
)
|
||||
vespa_document_index = VespaDocumentIndex(
|
||||
index_name=self.index_name,
|
||||
tenant_state=tenant_state,
|
||||
large_chunks_enabled=self.large_chunks_enabled,
|
||||
httpx_client=self.httpx_client,
|
||||
)
|
||||
return vespa_document_index.delete(document_id=doc_id, chunk_count=chunk_count)
|
||||
indices = [self.index_name]
|
||||
if self.secondary_index_name:
|
||||
indices.append(self.secondary_index_name)
|
||||
|
||||
total_chunks_deleted = 0
|
||||
for index_name in indices:
|
||||
vespa_document_index = VespaDocumentIndex(
|
||||
index_name=index_name,
|
||||
tenant_state=tenant_state,
|
||||
large_chunks_enabled=self.index_to_large_chunks_enabled.get(
|
||||
index_name, False
|
||||
),
|
||||
httpx_client=self.httpx_client,
|
||||
)
|
||||
total_chunks_deleted += vespa_document_index.delete(
|
||||
document_id=doc_id, chunk_count=chunk_count
|
||||
)
|
||||
|
||||
return total_chunks_deleted
|
||||
|
||||
def id_based_retrieval(
|
||||
self,
|
||||
|
||||
@@ -7424,9 +7424,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/hono": {
|
||||
"version": "4.11.7",
|
||||
"resolved": "https://registry.npmjs.org/hono/-/hono-4.11.7.tgz",
|
||||
"integrity": "sha512-l7qMiNee7t82bH3SeyUCt9UF15EVmaBvsppY2zQtrbIhl/yzBTny+YUxsVjSjQ6gaqaeVtZmGocom8TzBlA4Yw==",
|
||||
"version": "4.12.5",
|
||||
"resolved": "https://registry.npmjs.org/hono/-/hono-4.12.5.tgz",
|
||||
"integrity": "sha512-3qq+FUBtlTHhtYxbxheZgY8NIFnkkC/MR8u5TTsr7YZ3wixryQ3cCwn3iZbg8p8B88iDBBAYSfZDS75t8MN7Vg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=16.9.0"
|
||||
|
||||
@@ -11,6 +11,7 @@ from onyx.configs.app_configs import DISABLE_VECTOR_DB
|
||||
from onyx.configs.constants import OnyxCeleryPriority
|
||||
from onyx.configs.constants import OnyxCeleryTask
|
||||
from onyx.db.document_set import check_document_sets_are_public
|
||||
from onyx.db.document_set import delete_document_set as db_delete_document_set
|
||||
from onyx.db.document_set import fetch_all_document_sets_for_user
|
||||
from onyx.db.document_set import get_document_set_by_id
|
||||
from onyx.db.document_set import insert_document_set
|
||||
@@ -142,7 +143,10 @@ def delete_document_set(
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
if not DISABLE_VECTOR_DB:
|
||||
if DISABLE_VECTOR_DB:
|
||||
db_session.refresh(document_set)
|
||||
db_delete_document_set(document_set, db_session)
|
||||
else:
|
||||
client_app.send_task(
|
||||
OnyxCeleryTask.CHECK_FOR_VESPA_SYNC_TASK,
|
||||
kwargs={"tenant_id": tenant_id},
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from fastapi import APIRouter
|
||||
from fastapi import Depends
|
||||
from fastapi import HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.auth.users import current_admin_user
|
||||
@@ -11,6 +10,8 @@ from onyx.db.llm import upsert_cloud_embedding_provider
|
||||
from onyx.db.models import User
|
||||
from onyx.db.search_settings import get_all_search_settings
|
||||
from onyx.db.search_settings import get_current_db_embedding_provider
|
||||
from onyx.error_handling.error_codes import OnyxErrorCode
|
||||
from onyx.error_handling.exceptions import OnyxError
|
||||
from onyx.indexing.models import EmbeddingModelDetail
|
||||
from onyx.natural_language_processing.search_nlp_models import EmbeddingModel
|
||||
from onyx.server.manage.embedding.models import CloudEmbeddingProvider
|
||||
@@ -59,7 +60,7 @@ def test_embedding_configuration(
|
||||
except Exception as e:
|
||||
error_msg = "An error occurred while testing your embedding model. Please check your configuration."
|
||||
logger.error(f"{error_msg} Error message: {e}", exc_info=True)
|
||||
raise HTTPException(status_code=400, detail=error_msg)
|
||||
raise OnyxError(OnyxErrorCode.VALIDATION_ERROR, error_msg)
|
||||
|
||||
|
||||
@admin_router.get("", response_model=list[EmbeddingModelDetail])
|
||||
@@ -93,8 +94,9 @@ def delete_embedding_provider(
|
||||
embedding_provider is not None
|
||||
and provider_type == embedding_provider.provider_type
|
||||
):
|
||||
raise HTTPException(
|
||||
status_code=400, detail="You can't delete a currently active model"
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.VALIDATION_ERROR,
|
||||
"You can't delete a currently active model",
|
||||
)
|
||||
|
||||
remove_embedding_provider(db_session, provider_type=provider_type)
|
||||
|
||||
@@ -11,7 +11,6 @@ from botocore.exceptions import ClientError
|
||||
from botocore.exceptions import NoCredentialsError
|
||||
from fastapi import APIRouter
|
||||
from fastapi import Depends
|
||||
from fastapi import HTTPException
|
||||
from fastapi import Query
|
||||
from pydantic import ValidationError
|
||||
from sqlalchemy.orm import Session
|
||||
@@ -38,6 +37,8 @@ from onyx.db.llm import upsert_llm_provider
|
||||
from onyx.db.llm import validate_persona_ids_exist
|
||||
from onyx.db.models import User
|
||||
from onyx.db.persona import user_can_access_persona
|
||||
from onyx.error_handling.error_codes import OnyxErrorCode
|
||||
from onyx.error_handling.exceptions import OnyxError
|
||||
from onyx.llm.factory import get_default_llm
|
||||
from onyx.llm.factory import get_llm
|
||||
from onyx.llm.factory import get_max_input_tokens_from_llm_provider
|
||||
@@ -186,7 +187,7 @@ def _validate_llm_provider_change(
|
||||
Only enforced in MULTI_TENANT mode.
|
||||
|
||||
Raises:
|
||||
HTTPException: If api_base or custom_config changed without changing API key
|
||||
OnyxError: If api_base or custom_config changed without changing API key
|
||||
"""
|
||||
if not MULTI_TENANT or api_key_changed:
|
||||
return
|
||||
@@ -200,9 +201,9 @@ def _validate_llm_provider_change(
|
||||
)
|
||||
|
||||
if api_base_changed or custom_config_changed:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="API base and/or custom config cannot be changed without changing the API key",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.VALIDATION_ERROR,
|
||||
"API base and/or custom config cannot be changed without changing the API key",
|
||||
)
|
||||
|
||||
|
||||
@@ -222,7 +223,7 @@ def fetch_llm_provider_options(
|
||||
for well_known_llm in well_known_llms:
|
||||
if well_known_llm.name == provider_name:
|
||||
return well_known_llm
|
||||
raise HTTPException(status_code=404, detail=f"Provider {provider_name} not found")
|
||||
raise OnyxError(OnyxErrorCode.NOT_FOUND, f"Provider {provider_name} not found")
|
||||
|
||||
|
||||
@admin_router.post("/test")
|
||||
@@ -281,7 +282,7 @@ def test_llm_configuration(
|
||||
error_msg = test_llm(llm)
|
||||
|
||||
if error_msg:
|
||||
raise HTTPException(status_code=400, detail=error_msg)
|
||||
raise OnyxError(OnyxErrorCode.VALIDATION_ERROR, error_msg)
|
||||
|
||||
|
||||
@admin_router.post("/test/default")
|
||||
@@ -292,11 +293,11 @@ def test_default_provider(
|
||||
llm = get_default_llm()
|
||||
except ValueError:
|
||||
logger.exception("Failed to fetch default LLM Provider")
|
||||
raise HTTPException(status_code=400, detail="No LLM Provider setup")
|
||||
raise OnyxError(OnyxErrorCode.VALIDATION_ERROR, "No LLM Provider setup")
|
||||
|
||||
error = test_llm(llm)
|
||||
if error:
|
||||
raise HTTPException(status_code=400, detail=str(error))
|
||||
raise OnyxError(OnyxErrorCode.VALIDATION_ERROR, str(error))
|
||||
|
||||
|
||||
@admin_router.get("/provider")
|
||||
@@ -362,35 +363,31 @@ def put_llm_provider(
|
||||
# Check name constraints
|
||||
# TODO: Once port from name to id is complete, unique name will no longer be required
|
||||
if existing_provider and llm_provider_upsert_request.name != existing_provider.name:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Renaming providers is not currently supported",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.VALIDATION_ERROR,
|
||||
"Renaming providers is not currently supported",
|
||||
)
|
||||
|
||||
found_provider = fetch_existing_llm_provider(
|
||||
name=llm_provider_upsert_request.name, db_session=db_session
|
||||
)
|
||||
if found_provider is not None and found_provider is not existing_provider:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Provider with name={llm_provider_upsert_request.name} already exists",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.DUPLICATE_RESOURCE,
|
||||
f"Provider with name={llm_provider_upsert_request.name} already exists",
|
||||
)
|
||||
|
||||
if existing_provider and is_creation:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=(
|
||||
f"LLM Provider with name {llm_provider_upsert_request.name} and "
|
||||
f"id={llm_provider_upsert_request.id} already exists"
|
||||
),
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.DUPLICATE_RESOURCE,
|
||||
f"LLM Provider with name {llm_provider_upsert_request.name} and "
|
||||
f"id={llm_provider_upsert_request.id} already exists",
|
||||
)
|
||||
elif not existing_provider and not is_creation:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=(
|
||||
f"LLM Provider with name {llm_provider_upsert_request.name} and "
|
||||
f"id={llm_provider_upsert_request.id} does not exist"
|
||||
),
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.NOT_FOUND,
|
||||
f"LLM Provider with name {llm_provider_upsert_request.name} and "
|
||||
f"id={llm_provider_upsert_request.id} does not exist",
|
||||
)
|
||||
|
||||
# SSRF Protection: Validate api_base and custom_config match stored values
|
||||
@@ -415,9 +412,9 @@ def put_llm_provider(
|
||||
db_session, persona_ids
|
||||
)
|
||||
if missing_personas:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Invalid persona IDs: {', '.join(map(str, missing_personas))}",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.VALIDATION_ERROR,
|
||||
f"Invalid persona IDs: {', '.join(map(str, missing_personas))}",
|
||||
)
|
||||
# Remove duplicates while preserving order
|
||||
seen: set[int] = set()
|
||||
@@ -473,7 +470,7 @@ def put_llm_provider(
|
||||
return result
|
||||
except ValueError as e:
|
||||
logger.exception("Failed to upsert LLM Provider")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
raise OnyxError(OnyxErrorCode.VALIDATION_ERROR, str(e))
|
||||
|
||||
|
||||
@admin_router.delete("/provider/{provider_id}")
|
||||
@@ -483,19 +480,19 @@ def delete_llm_provider(
|
||||
_: User = Depends(current_admin_user),
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> None:
|
||||
if not force:
|
||||
model = fetch_default_llm_model(db_session)
|
||||
|
||||
if model and model.llm_provider_id == provider_id:
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.VALIDATION_ERROR,
|
||||
"Cannot delete the default LLM provider",
|
||||
)
|
||||
|
||||
try:
|
||||
if not force:
|
||||
model = fetch_default_llm_model(db_session)
|
||||
|
||||
if model and model.llm_provider_id == provider_id:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Cannot delete the default LLM provider",
|
||||
)
|
||||
|
||||
remove_llm_provider(db_session, provider_id)
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=404, detail=str(e))
|
||||
raise OnyxError(OnyxErrorCode.NOT_FOUND, str(e))
|
||||
|
||||
|
||||
@admin_router.post("/default")
|
||||
@@ -535,9 +532,9 @@ def get_auto_config(
|
||||
"""
|
||||
config = fetch_llm_recommendations_from_github()
|
||||
if not config:
|
||||
raise HTTPException(
|
||||
status_code=502,
|
||||
detail="Failed to fetch configuration from GitHub",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.BAD_GATEWAY,
|
||||
"Failed to fetch configuration from GitHub",
|
||||
)
|
||||
return config.model_dump()
|
||||
|
||||
@@ -694,13 +691,13 @@ def list_llm_providers_for_persona(
|
||||
|
||||
persona = fetch_persona_with_groups(db_session, persona_id)
|
||||
if not persona:
|
||||
raise HTTPException(status_code=404, detail="Persona not found")
|
||||
raise OnyxError(OnyxErrorCode.PERSONA_NOT_FOUND, "Persona not found")
|
||||
|
||||
# Verify user has access to this persona
|
||||
if not user_can_access_persona(db_session, persona_id, user, get_editable=False):
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail="You don't have access to this assistant",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.INSUFFICIENT_PERMISSIONS,
|
||||
"You don't have access to this assistant",
|
||||
)
|
||||
|
||||
is_admin = user.role == UserRole.ADMIN
|
||||
@@ -854,9 +851,9 @@ def get_bedrock_available_models(
|
||||
try:
|
||||
bedrock = session.client("bedrock")
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Failed to create Bedrock client: {e}. Check AWS credentials and region.",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.CREDENTIAL_INVALID,
|
||||
f"Failed to create Bedrock client: {e}. Check AWS credentials and region.",
|
||||
)
|
||||
|
||||
# Build model info dict from foundation models (modelId -> metadata)
|
||||
@@ -975,14 +972,14 @@ def get_bedrock_available_models(
|
||||
return results
|
||||
|
||||
except (ClientError, NoCredentialsError, BotoCoreError) as e:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Failed to connect to AWS Bedrock: {e}",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.CREDENTIAL_INVALID,
|
||||
f"Failed to connect to AWS Bedrock: {e}",
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Unexpected error fetching Bedrock models: {e}",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.INTERNAL_ERROR,
|
||||
f"Unexpected error fetching Bedrock models: {e}",
|
||||
)
|
||||
|
||||
|
||||
@@ -994,9 +991,9 @@ def _get_ollama_available_model_names(api_base: str) -> set[str]:
|
||||
response.raise_for_status()
|
||||
response_json = response.json()
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Failed to fetch Ollama models: {e}",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.BAD_GATEWAY,
|
||||
f"Failed to fetch Ollama models: {e}",
|
||||
)
|
||||
|
||||
models = response_json.get("models", [])
|
||||
@@ -1013,9 +1010,9 @@ def get_ollama_available_models(
|
||||
|
||||
cleaned_api_base = request.api_base.strip().rstrip("/")
|
||||
if not cleaned_api_base:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="API base URL is required to fetch Ollama models.",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.VALIDATION_ERROR,
|
||||
"API base URL is required to fetch Ollama models.",
|
||||
)
|
||||
|
||||
# NOTE: most people run Ollama locally, so we don't disallow internal URLs
|
||||
@@ -1024,9 +1021,9 @@ def get_ollama_available_models(
|
||||
# with the same response format
|
||||
model_names = _get_ollama_available_model_names(cleaned_api_base)
|
||||
if not model_names:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="No models found from your Ollama server",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.VALIDATION_ERROR,
|
||||
"No models found from your Ollama server",
|
||||
)
|
||||
|
||||
all_models_with_context_size_and_vision: list[OllamaFinalModelResponse] = []
|
||||
@@ -1128,9 +1125,9 @@ def _get_openrouter_models_response(api_base: str, api_key: str) -> dict:
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Failed to fetch OpenRouter models: {e}",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.BAD_GATEWAY,
|
||||
f"Failed to fetch OpenRouter models: {e}",
|
||||
)
|
||||
|
||||
|
||||
@@ -1151,9 +1148,9 @@ def get_openrouter_available_models(
|
||||
|
||||
data = response_json.get("data", [])
|
||||
if not isinstance(data, list) or len(data) == 0:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="No models found from your OpenRouter endpoint",
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.VALIDATION_ERROR,
|
||||
"No models found from your OpenRouter endpoint",
|
||||
)
|
||||
|
||||
results: list[OpenRouterFinalModelResponse] = []
|
||||
@@ -1188,8 +1185,9 @@ def get_openrouter_available_models(
|
||||
)
|
||||
|
||||
if not results:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="No compatible models found from OpenRouter"
|
||||
raise OnyxError(
|
||||
OnyxErrorCode.VALIDATION_ERROR,
|
||||
"No compatible models found from OpenRouter",
|
||||
)
|
||||
|
||||
sorted_results = sorted(results, key=lambda m: m.name.lower())
|
||||
|
||||
@@ -6,8 +6,11 @@ from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.auth.users import current_admin_user
|
||||
from onyx.auth.users import current_user
|
||||
from onyx.configs.app_configs import DISABLE_INDEX_UPDATE_ON_SWAP
|
||||
from onyx.context.search.models import SavedSearchSettings
|
||||
from onyx.context.search.models import SearchSettingsCreationRequest
|
||||
from onyx.db.connector_credential_pair import get_connector_credential_pairs
|
||||
from onyx.db.connector_credential_pair import resync_cc_pair
|
||||
from onyx.db.engine.sql_engine import get_session
|
||||
from onyx.db.index_attempt import expire_index_attempts
|
||||
from onyx.db.llm import fetch_existing_llm_provider
|
||||
@@ -15,20 +18,25 @@ from onyx.db.llm import update_default_contextual_model
|
||||
from onyx.db.llm import update_no_default_contextual_rag_provider
|
||||
from onyx.db.models import IndexModelStatus
|
||||
from onyx.db.models import User
|
||||
from onyx.db.search_settings import create_search_settings
|
||||
from onyx.db.search_settings import delete_search_settings
|
||||
from onyx.db.search_settings import get_current_search_settings
|
||||
from onyx.db.search_settings import get_embedding_provider_from_provider_type
|
||||
from onyx.db.search_settings import get_secondary_search_settings
|
||||
from onyx.db.search_settings import update_current_search_settings
|
||||
from onyx.db.search_settings import update_search_settings_status
|
||||
from onyx.document_index.factory import get_all_document_indices
|
||||
from onyx.document_index.factory import get_default_document_index
|
||||
from onyx.file_processing.unstructured import delete_unstructured_api_key
|
||||
from onyx.file_processing.unstructured import get_unstructured_api_key
|
||||
from onyx.file_processing.unstructured import update_unstructured_api_key
|
||||
from onyx.natural_language_processing.search_nlp_models import clean_model_name
|
||||
from onyx.server.manage.embedding.models import SearchSettingsDeleteRequest
|
||||
from onyx.server.manage.models import FullModelVersionResponse
|
||||
from onyx.server.models import IdReturn
|
||||
from onyx.server.utils_vector_db import require_vector_db
|
||||
from onyx.utils.logger import setup_logger
|
||||
from shared_configs.configs import ALT_INDEX_SUFFIX
|
||||
from shared_configs.configs import MULTI_TENANT
|
||||
|
||||
router = APIRouter(prefix="/search-settings")
|
||||
@@ -41,110 +49,99 @@ def set_new_search_settings(
|
||||
_: User = Depends(current_admin_user),
|
||||
db_session: Session = Depends(get_session), # noqa: ARG001
|
||||
) -> IdReturn:
|
||||
"""Creates a new EmbeddingModel row and cancels the previous secondary indexing if any
|
||||
Gives an error if the same model name is used as the current or secondary index
|
||||
"""
|
||||
# TODO(andrei): Re-enable.
|
||||
# NOTE Enable integration external dependency tests in test_search_settings.py
|
||||
# when this is reenabled. They are currently skipped
|
||||
logger.error("Setting new search settings is temporarily disabled.")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_501_NOT_IMPLEMENTED,
|
||||
detail="Setting new search settings is temporarily disabled.",
|
||||
Creates a new SearchSettings row and cancels the previous secondary indexing
|
||||
if any exists.
|
||||
"""
|
||||
if search_settings_new.index_name:
|
||||
logger.warning("Index name was specified by request, this is not suggested")
|
||||
|
||||
# Disallow contextual RAG for cloud deployments.
|
||||
if MULTI_TENANT and search_settings_new.enable_contextual_rag:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Contextual RAG disabled in Onyx Cloud",
|
||||
)
|
||||
|
||||
# Validate cloud provider exists or create new LiteLLM provider.
|
||||
if search_settings_new.provider_type is not None:
|
||||
cloud_provider = get_embedding_provider_from_provider_type(
|
||||
db_session, provider_type=search_settings_new.provider_type
|
||||
)
|
||||
|
||||
if cloud_provider is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"No embedding provider exists for cloud embedding type {search_settings_new.provider_type}",
|
||||
)
|
||||
|
||||
validate_contextual_rag_model(
|
||||
provider_name=search_settings_new.contextual_rag_llm_provider,
|
||||
model_name=search_settings_new.contextual_rag_llm_name,
|
||||
db_session=db_session,
|
||||
)
|
||||
# if search_settings_new.index_name:
|
||||
# logger.warning("Index name was specified by request, this is not suggested")
|
||||
|
||||
# # Disallow contextual RAG for cloud deployments
|
||||
# if MULTI_TENANT and search_settings_new.enable_contextual_rag:
|
||||
# raise HTTPException(
|
||||
# status_code=status.HTTP_400_BAD_REQUEST,
|
||||
# detail="Contextual RAG disabled in Onyx Cloud",
|
||||
# )
|
||||
search_settings = get_current_search_settings(db_session)
|
||||
|
||||
# # Validate cloud provider exists or create new LiteLLM provider
|
||||
# if search_settings_new.provider_type is not None:
|
||||
# cloud_provider = get_embedding_provider_from_provider_type(
|
||||
# db_session, provider_type=search_settings_new.provider_type
|
||||
# )
|
||||
if search_settings_new.index_name is None:
|
||||
# We define index name here.
|
||||
index_name = f"danswer_chunk_{clean_model_name(search_settings_new.model_name)}"
|
||||
if (
|
||||
search_settings_new.model_name == search_settings.model_name
|
||||
and not search_settings.index_name.endswith(ALT_INDEX_SUFFIX)
|
||||
):
|
||||
index_name += ALT_INDEX_SUFFIX
|
||||
search_values = search_settings_new.model_dump()
|
||||
search_values["index_name"] = index_name
|
||||
new_search_settings_request = SavedSearchSettings(**search_values)
|
||||
else:
|
||||
new_search_settings_request = SavedSearchSettings(
|
||||
**search_settings_new.model_dump()
|
||||
)
|
||||
|
||||
# if cloud_provider is None:
|
||||
# raise HTTPException(
|
||||
# status_code=status.HTTP_400_BAD_REQUEST,
|
||||
# detail=f"No embedding provider exists for cloud embedding type {search_settings_new.provider_type}",
|
||||
# )
|
||||
secondary_search_settings = get_secondary_search_settings(db_session)
|
||||
|
||||
# validate_contextual_rag_model(
|
||||
# provider_name=search_settings_new.contextual_rag_llm_provider,
|
||||
# model_name=search_settings_new.contextual_rag_llm_name,
|
||||
# db_session=db_session,
|
||||
# )
|
||||
if secondary_search_settings:
|
||||
# Cancel any background indexing jobs.
|
||||
expire_index_attempts(
|
||||
search_settings_id=secondary_search_settings.id, db_session=db_session
|
||||
)
|
||||
|
||||
# search_settings = get_current_search_settings(db_session)
|
||||
# Mark previous model as a past model directly.
|
||||
update_search_settings_status(
|
||||
search_settings=secondary_search_settings,
|
||||
new_status=IndexModelStatus.PAST,
|
||||
db_session=db_session,
|
||||
)
|
||||
|
||||
# if search_settings_new.index_name is None:
|
||||
# # We define index name here
|
||||
# index_name = f"danswer_chunk_{clean_model_name(search_settings_new.model_name)}"
|
||||
# if (
|
||||
# search_settings_new.model_name == search_settings.model_name
|
||||
# and not search_settings.index_name.endswith(ALT_INDEX_SUFFIX)
|
||||
# ):
|
||||
# index_name += ALT_INDEX_SUFFIX
|
||||
# search_values = search_settings_new.model_dump()
|
||||
# search_values["index_name"] = index_name
|
||||
# new_search_settings_request = SavedSearchSettings(**search_values)
|
||||
# else:
|
||||
# new_search_settings_request = SavedSearchSettings(
|
||||
# **search_settings_new.model_dump()
|
||||
# )
|
||||
new_search_settings = create_search_settings(
|
||||
search_settings=new_search_settings_request, db_session=db_session
|
||||
)
|
||||
|
||||
# secondary_search_settings = get_secondary_search_settings(db_session)
|
||||
# Ensure the document indices have the new index immediately.
|
||||
document_indices = get_all_document_indices(search_settings, new_search_settings)
|
||||
for document_index in document_indices:
|
||||
document_index.ensure_indices_exist(
|
||||
primary_embedding_dim=search_settings.final_embedding_dim,
|
||||
primary_embedding_precision=search_settings.embedding_precision,
|
||||
secondary_index_embedding_dim=new_search_settings.final_embedding_dim,
|
||||
secondary_index_embedding_precision=new_search_settings.embedding_precision,
|
||||
)
|
||||
|
||||
# if secondary_search_settings:
|
||||
# # Cancel any background indexing jobs
|
||||
# expire_index_attempts(
|
||||
# search_settings_id=secondary_search_settings.id, db_session=db_session
|
||||
# )
|
||||
# Pause index attempts for the currently in-use index to preserve resources.
|
||||
if DISABLE_INDEX_UPDATE_ON_SWAP:
|
||||
expire_index_attempts(
|
||||
search_settings_id=search_settings.id, db_session=db_session
|
||||
)
|
||||
for cc_pair in get_connector_credential_pairs(db_session):
|
||||
resync_cc_pair(
|
||||
cc_pair=cc_pair,
|
||||
search_settings_id=new_search_settings.id,
|
||||
db_session=db_session,
|
||||
)
|
||||
|
||||
# # Mark previous model as a past model directly
|
||||
# update_search_settings_status(
|
||||
# search_settings=secondary_search_settings,
|
||||
# new_status=IndexModelStatus.PAST,
|
||||
# db_session=db_session,
|
||||
# )
|
||||
|
||||
# new_search_settings = create_search_settings(
|
||||
# search_settings=new_search_settings_request, db_session=db_session
|
||||
# )
|
||||
|
||||
# # Ensure Vespa has the new index immediately
|
||||
# get_multipass_config(search_settings)
|
||||
# get_multipass_config(new_search_settings)
|
||||
# document_index = get_default_document_index(
|
||||
# search_settings, new_search_settings, db_session
|
||||
# )
|
||||
|
||||
# document_index.ensure_indices_exist(
|
||||
# primary_embedding_dim=search_settings.final_embedding_dim,
|
||||
# primary_embedding_precision=search_settings.embedding_precision,
|
||||
# secondary_index_embedding_dim=new_search_settings.final_embedding_dim,
|
||||
# secondary_index_embedding_precision=new_search_settings.embedding_precision,
|
||||
# )
|
||||
|
||||
# # Pause index attempts for the currently in use index to preserve resources
|
||||
# if DISABLE_INDEX_UPDATE_ON_SWAP:
|
||||
# expire_index_attempts(
|
||||
# search_settings_id=search_settings.id, db_session=db_session
|
||||
# )
|
||||
# for cc_pair in get_connector_credential_pairs(db_session):
|
||||
# resync_cc_pair(
|
||||
# cc_pair=cc_pair,
|
||||
# search_settings_id=new_search_settings.id,
|
||||
# db_session=db_session,
|
||||
# )
|
||||
|
||||
# db_session.commit()
|
||||
# return IdReturn(id=new_search_settings.id)
|
||||
db_session.commit()
|
||||
return IdReturn(id=new_search_settings.id)
|
||||
|
||||
|
||||
@router.post("/cancel-new-embedding", dependencies=[Depends(require_vector_db)])
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import datetime
|
||||
import json
|
||||
import os
|
||||
from collections.abc import Generator
|
||||
from datetime import timedelta
|
||||
from uuid import UUID
|
||||
@@ -61,7 +60,6 @@ from onyx.db.persona import get_persona_by_id
|
||||
from onyx.db.usage import increment_usage
|
||||
from onyx.db.usage import UsageType
|
||||
from onyx.db.user_file import get_file_id_by_user_file_id
|
||||
from onyx.file_processing.extract_file_text import docx_to_txt_filename
|
||||
from onyx.file_store.file_store import get_default_file_store
|
||||
from onyx.llm.constants import LlmProviderNames
|
||||
from onyx.llm.factory import get_default_llm
|
||||
@@ -812,18 +810,6 @@ def fetch_chat_file(
|
||||
if not file_record:
|
||||
raise HTTPException(status_code=404, detail="File not found")
|
||||
|
||||
original_file_name = file_record.display_name
|
||||
if file_record.file_type.startswith(
|
||||
"application/vnd.openxmlformats-officedocument.wordprocessingml.document"
|
||||
):
|
||||
# Check if a converted text file exists for .docx files
|
||||
txt_file_name = docx_to_txt_filename(original_file_name)
|
||||
txt_file_id = os.path.join(os.path.dirname(file_id), txt_file_name)
|
||||
txt_file_record = file_store.read_file_record(txt_file_id)
|
||||
if txt_file_record:
|
||||
file_record = txt_file_record
|
||||
file_id = txt_file_id
|
||||
|
||||
media_type = file_record.file_type
|
||||
file_io = file_store.read_file(file_id, mode="b")
|
||||
|
||||
|
||||
@@ -60,9 +60,11 @@ class Settings(BaseModel):
|
||||
deep_research_enabled: bool | None = None
|
||||
search_ui_enabled: bool | None = None
|
||||
|
||||
# Enterprise features flag - set by license enforcement at runtime
|
||||
# When LICENSE_ENFORCEMENT_ENABLED=true, this reflects license status
|
||||
# When LICENSE_ENFORCEMENT_ENABLED=false, defaults to False
|
||||
# Whether EE features are unlocked for use.
|
||||
# Depends on license status: True when the user has a valid license
|
||||
# (ACTIVE, GRACE_PERIOD, PAYMENT_REMINDER), False when there's no license
|
||||
# or the license is expired (GATED_ACCESS).
|
||||
# This controls UI visibility of EE features (user groups, analytics, RBAC, etc.).
|
||||
ee_features_enabled: bool = False
|
||||
|
||||
temperature_override_enabled: bool | None = False
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import time
|
||||
from collections.abc import Generator
|
||||
@@ -84,6 +86,19 @@ class CodeInterpreterClient:
|
||||
raise ValueError("CODE_INTERPRETER_BASE_URL not configured")
|
||||
self.base_url = base_url.rstrip("/")
|
||||
self.session = requests.Session()
|
||||
self._closed = False
|
||||
|
||||
def __enter__(self) -> CodeInterpreterClient:
|
||||
return self
|
||||
|
||||
def __exit__(self, *args: object) -> None:
|
||||
self.close()
|
||||
|
||||
def close(self) -> None:
|
||||
if self._closed:
|
||||
return
|
||||
self.session.close()
|
||||
self._closed = True
|
||||
|
||||
def _build_payload(
|
||||
self,
|
||||
@@ -177,8 +192,11 @@ class CodeInterpreterClient:
|
||||
yield from self._batch_as_stream(code, stdin, timeout_ms, files)
|
||||
return
|
||||
|
||||
response.raise_for_status()
|
||||
yield from self._parse_sse(response)
|
||||
try:
|
||||
response.raise_for_status()
|
||||
yield from self._parse_sse(response)
|
||||
finally:
|
||||
response.close()
|
||||
|
||||
def _parse_sse(
|
||||
self, response: requests.Response
|
||||
|
||||
@@ -111,8 +111,8 @@ class PythonTool(Tool[PythonToolOverrideKwargs]):
|
||||
if not server.server_enabled:
|
||||
return False
|
||||
|
||||
client = CodeInterpreterClient()
|
||||
return client.health(use_cache=True)
|
||||
with CodeInterpreterClient() as client:
|
||||
return client.health(use_cache=True)
|
||||
|
||||
def tool_definition(self) -> dict:
|
||||
return {
|
||||
@@ -176,196 +176,203 @@ class PythonTool(Tool[PythonToolOverrideKwargs]):
|
||||
)
|
||||
)
|
||||
|
||||
# Create Code Interpreter client
|
||||
client = CodeInterpreterClient()
|
||||
# Create Code Interpreter client — context manager ensures
|
||||
# session.close() is called on every exit path.
|
||||
with CodeInterpreterClient() as client:
|
||||
# Stage chat files for execution
|
||||
files_to_stage: list[FileInput] = []
|
||||
for ind, chat_file in enumerate(chat_files):
|
||||
file_name = chat_file.filename or f"file_{ind}"
|
||||
try:
|
||||
# Upload to Code Interpreter
|
||||
ci_file_id = client.upload_file(chat_file.content, file_name)
|
||||
|
||||
# Stage for execution
|
||||
files_to_stage.append({"path": file_name, "file_id": ci_file_id})
|
||||
|
||||
logger.info(f"Staged file for Python execution: {file_name}")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to stage file {file_name}: {e}")
|
||||
|
||||
# Stage chat files for execution
|
||||
files_to_stage: list[FileInput] = []
|
||||
for ind, chat_file in enumerate(chat_files):
|
||||
file_name = chat_file.filename or f"file_{ind}"
|
||||
try:
|
||||
# Upload to Code Interpreter
|
||||
ci_file_id = client.upload_file(chat_file.content, file_name)
|
||||
logger.debug(f"Executing code: {code}")
|
||||
|
||||
# Stage for execution
|
||||
files_to_stage.append({"path": file_name, "file_id": ci_file_id})
|
||||
# Execute code with streaming (falls back to batch if unavailable)
|
||||
stdout_parts: list[str] = []
|
||||
stderr_parts: list[str] = []
|
||||
result_event: StreamResultEvent | None = None
|
||||
|
||||
logger.info(f"Staged file for Python execution: {file_name}")
|
||||
for event in client.execute_streaming(
|
||||
code=code,
|
||||
timeout_ms=CODE_INTERPRETER_DEFAULT_TIMEOUT_MS,
|
||||
files=files_to_stage or None,
|
||||
):
|
||||
if isinstance(event, StreamOutputEvent):
|
||||
if event.stream == "stdout":
|
||||
stdout_parts.append(event.data)
|
||||
else:
|
||||
stderr_parts.append(event.data)
|
||||
# Emit incremental delta to frontend
|
||||
self.emitter.emit(
|
||||
Packet(
|
||||
placement=placement,
|
||||
obj=PythonToolDelta(
|
||||
stdout=(
|
||||
event.data if event.stream == "stdout" else ""
|
||||
),
|
||||
stderr=(
|
||||
event.data if event.stream == "stderr" else ""
|
||||
),
|
||||
),
|
||||
)
|
||||
)
|
||||
elif isinstance(event, StreamResultEvent):
|
||||
result_event = event
|
||||
elif isinstance(event, StreamErrorEvent):
|
||||
raise RuntimeError(f"Code interpreter error: {event.message}")
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Failed to stage file {file_name}: {e}")
|
||||
if result_event is None:
|
||||
raise RuntimeError(
|
||||
"Code interpreter stream ended without a result event"
|
||||
)
|
||||
|
||||
try:
|
||||
logger.debug(f"Executing code: {code}")
|
||||
full_stdout = "".join(stdout_parts)
|
||||
full_stderr = "".join(stderr_parts)
|
||||
|
||||
# Execute code with streaming (falls back to batch if unavailable)
|
||||
stdout_parts: list[str] = []
|
||||
stderr_parts: list[str] = []
|
||||
result_event: StreamResultEvent | None = None
|
||||
# Truncate output for LLM consumption
|
||||
truncated_stdout = _truncate_output(
|
||||
full_stdout, CODE_INTERPRETER_MAX_OUTPUT_LENGTH, "stdout"
|
||||
)
|
||||
truncated_stderr = _truncate_output(
|
||||
full_stderr, CODE_INTERPRETER_MAX_OUTPUT_LENGTH, "stderr"
|
||||
)
|
||||
|
||||
for event in client.execute_streaming(
|
||||
code=code,
|
||||
timeout_ms=CODE_INTERPRETER_DEFAULT_TIMEOUT_MS,
|
||||
files=files_to_stage or None,
|
||||
):
|
||||
if isinstance(event, StreamOutputEvent):
|
||||
if event.stream == "stdout":
|
||||
stdout_parts.append(event.data)
|
||||
else:
|
||||
stderr_parts.append(event.data)
|
||||
# Emit incremental delta to frontend
|
||||
# Handle generated files
|
||||
generated_files: list[PythonExecutionFile] = []
|
||||
generated_file_ids: list[str] = []
|
||||
file_ids_to_cleanup: list[str] = []
|
||||
file_store = get_default_file_store()
|
||||
|
||||
for workspace_file in result_event.files:
|
||||
if workspace_file.kind != "file" or not workspace_file.file_id:
|
||||
continue
|
||||
|
||||
try:
|
||||
# Download file from Code Interpreter
|
||||
file_content = client.download_file(workspace_file.file_id)
|
||||
|
||||
# Determine MIME type from file extension
|
||||
filename = workspace_file.path.split("/")[-1]
|
||||
mime_type, _ = mimetypes.guess_type(filename)
|
||||
# Default to binary if we can't determine the type
|
||||
mime_type = mime_type or "application/octet-stream"
|
||||
|
||||
# Save to Onyx file store
|
||||
onyx_file_id = file_store.save_file(
|
||||
content=BytesIO(file_content),
|
||||
display_name=filename,
|
||||
file_origin=FileOrigin.CHAT_UPLOAD,
|
||||
file_type=mime_type,
|
||||
)
|
||||
|
||||
generated_files.append(
|
||||
PythonExecutionFile(
|
||||
filename=filename,
|
||||
file_link=build_full_frontend_file_url(onyx_file_id),
|
||||
)
|
||||
)
|
||||
generated_file_ids.append(onyx_file_id)
|
||||
|
||||
# Mark for cleanup
|
||||
file_ids_to_cleanup.append(workspace_file.file_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to handle generated file "
|
||||
f"{workspace_file.path}: {e}"
|
||||
)
|
||||
|
||||
# Cleanup Code Interpreter files (generated files)
|
||||
for ci_file_id in file_ids_to_cleanup:
|
||||
try:
|
||||
client.delete_file(ci_file_id)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to delete Code Interpreter generated "
|
||||
f"file {ci_file_id}: {e}"
|
||||
)
|
||||
|
||||
# Cleanup staged input files
|
||||
for file_mapping in files_to_stage:
|
||||
try:
|
||||
client.delete_file(file_mapping["file_id"])
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to delete Code Interpreter staged "
|
||||
f"file {file_mapping['file_id']}: {e}"
|
||||
)
|
||||
|
||||
# Emit file_ids once files are processed
|
||||
if generated_file_ids:
|
||||
self.emitter.emit(
|
||||
Packet(
|
||||
placement=placement,
|
||||
obj=PythonToolDelta(
|
||||
stdout=event.data if event.stream == "stdout" else "",
|
||||
stderr=event.data if event.stream == "stderr" else "",
|
||||
),
|
||||
obj=PythonToolDelta(file_ids=generated_file_ids),
|
||||
)
|
||||
)
|
||||
elif isinstance(event, StreamResultEvent):
|
||||
result_event = event
|
||||
elif isinstance(event, StreamErrorEvent):
|
||||
raise RuntimeError(f"Code interpreter error: {event.message}")
|
||||
|
||||
if result_event is None:
|
||||
raise RuntimeError(
|
||||
"Code interpreter stream ended without a result event"
|
||||
# Build result
|
||||
result = LlmPythonExecutionResult(
|
||||
stdout=truncated_stdout,
|
||||
stderr=truncated_stderr,
|
||||
exit_code=result_event.exit_code,
|
||||
timed_out=result_event.timed_out,
|
||||
generated_files=generated_files,
|
||||
error=(None if result_event.exit_code == 0 else truncated_stderr),
|
||||
)
|
||||
|
||||
full_stdout = "".join(stdout_parts)
|
||||
full_stderr = "".join(stderr_parts)
|
||||
# Serialize result for LLM
|
||||
adapter = TypeAdapter(LlmPythonExecutionResult)
|
||||
llm_response = adapter.dump_json(result).decode()
|
||||
|
||||
# Truncate output for LLM consumption
|
||||
truncated_stdout = _truncate_output(
|
||||
full_stdout, CODE_INTERPRETER_MAX_OUTPUT_LENGTH, "stdout"
|
||||
)
|
||||
truncated_stderr = _truncate_output(
|
||||
full_stderr, CODE_INTERPRETER_MAX_OUTPUT_LENGTH, "stderr"
|
||||
)
|
||||
return ToolResponse(
|
||||
rich_response=PythonToolRichResponse(
|
||||
generated_files=generated_files,
|
||||
),
|
||||
llm_facing_response=llm_response,
|
||||
)
|
||||
|
||||
# Handle generated files
|
||||
generated_files: list[PythonExecutionFile] = []
|
||||
generated_file_ids: list[str] = []
|
||||
file_ids_to_cleanup: list[str] = []
|
||||
file_store = get_default_file_store()
|
||||
except Exception as e:
|
||||
logger.error(f"Python execution failed: {e}")
|
||||
error_msg = str(e)
|
||||
|
||||
for workspace_file in result_event.files:
|
||||
if workspace_file.kind != "file" or not workspace_file.file_id:
|
||||
continue
|
||||
|
||||
try:
|
||||
# Download file from Code Interpreter
|
||||
file_content = client.download_file(workspace_file.file_id)
|
||||
|
||||
# Determine MIME type from file extension
|
||||
filename = workspace_file.path.split("/")[-1]
|
||||
mime_type, _ = mimetypes.guess_type(filename)
|
||||
# Default to binary if we can't determine the type
|
||||
mime_type = mime_type or "application/octet-stream"
|
||||
|
||||
# Save to Onyx file store
|
||||
onyx_file_id = file_store.save_file(
|
||||
content=BytesIO(file_content),
|
||||
display_name=filename,
|
||||
file_origin=FileOrigin.CHAT_UPLOAD,
|
||||
file_type=mime_type,
|
||||
)
|
||||
|
||||
generated_files.append(
|
||||
PythonExecutionFile(
|
||||
filename=filename,
|
||||
file_link=build_full_frontend_file_url(onyx_file_id),
|
||||
)
|
||||
)
|
||||
generated_file_ids.append(onyx_file_id)
|
||||
|
||||
# Mark for cleanup
|
||||
file_ids_to_cleanup.append(workspace_file.file_id)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to handle generated file {workspace_file.path}: {e}"
|
||||
)
|
||||
|
||||
# Cleanup Code Interpreter files (generated files)
|
||||
for ci_file_id in file_ids_to_cleanup:
|
||||
try:
|
||||
client.delete_file(ci_file_id)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to delete Code Interpreter generated file {ci_file_id}: {e}"
|
||||
)
|
||||
|
||||
# Cleanup staged input files
|
||||
for file_mapping in files_to_stage:
|
||||
try:
|
||||
client.delete_file(file_mapping["file_id"])
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to delete Code Interpreter staged file {file_mapping['file_id']}: {e}"
|
||||
)
|
||||
|
||||
# Emit file_ids once files are processed
|
||||
if generated_file_ids:
|
||||
# Emit error delta
|
||||
self.emitter.emit(
|
||||
Packet(
|
||||
placement=placement,
|
||||
obj=PythonToolDelta(file_ids=generated_file_ids),
|
||||
obj=PythonToolDelta(
|
||||
stdout="",
|
||||
stderr=error_msg,
|
||||
file_ids=[],
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
# Build result
|
||||
result = LlmPythonExecutionResult(
|
||||
stdout=truncated_stdout,
|
||||
stderr=truncated_stderr,
|
||||
exit_code=result_event.exit_code,
|
||||
timed_out=result_event.timed_out,
|
||||
generated_files=generated_files,
|
||||
error=None if result_event.exit_code == 0 else truncated_stderr,
|
||||
)
|
||||
|
||||
# Serialize result for LLM
|
||||
adapter = TypeAdapter(LlmPythonExecutionResult)
|
||||
llm_response = adapter.dump_json(result).decode()
|
||||
|
||||
return ToolResponse(
|
||||
rich_response=PythonToolRichResponse(
|
||||
generated_files=generated_files,
|
||||
),
|
||||
llm_facing_response=llm_response,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Python execution failed: {e}")
|
||||
error_msg = str(e)
|
||||
|
||||
# Emit error delta
|
||||
self.emitter.emit(
|
||||
Packet(
|
||||
placement=placement,
|
||||
obj=PythonToolDelta(
|
||||
stdout="",
|
||||
stderr=error_msg,
|
||||
file_ids=[],
|
||||
),
|
||||
# Return error result
|
||||
result = LlmPythonExecutionResult(
|
||||
stdout="",
|
||||
stderr=error_msg,
|
||||
exit_code=-1,
|
||||
timed_out=False,
|
||||
generated_files=[],
|
||||
error=error_msg,
|
||||
)
|
||||
)
|
||||
|
||||
# Return error result
|
||||
result = LlmPythonExecutionResult(
|
||||
stdout="",
|
||||
stderr=error_msg,
|
||||
exit_code=-1,
|
||||
timed_out=False,
|
||||
generated_files=[],
|
||||
error=error_msg,
|
||||
)
|
||||
adapter = TypeAdapter(LlmPythonExecutionResult)
|
||||
llm_response = adapter.dump_json(result).decode()
|
||||
|
||||
adapter = TypeAdapter(LlmPythonExecutionResult)
|
||||
llm_response = adapter.dump_json(result).decode()
|
||||
|
||||
return ToolResponse(
|
||||
rich_response=None,
|
||||
llm_facing_response=llm_response,
|
||||
)
|
||||
return ToolResponse(
|
||||
rich_response=None,
|
||||
llm_facing_response=llm_response,
|
||||
)
|
||||
|
||||
@@ -596,7 +596,7 @@ mypy-extensions==1.0.0
|
||||
# typing-inspect
|
||||
nest-asyncio==1.6.0
|
||||
# via onyx
|
||||
nltk==3.9.1
|
||||
nltk==3.9.3
|
||||
# via unstructured
|
||||
numpy==2.4.1
|
||||
# via
|
||||
|
||||
@@ -1,10 +1,20 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
cleanup() {
|
||||
echo "Error occurred. Cleaning up..."
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||
COMPOSE_FILE="$SCRIPT_DIR/../../deployment/docker_compose/docker-compose.yml"
|
||||
COMPOSE_DEV_FILE="$SCRIPT_DIR/../../deployment/docker_compose/docker-compose.dev.yml"
|
||||
|
||||
stop_and_remove_containers() {
|
||||
docker stop onyx_postgres onyx_vespa onyx_redis onyx_minio onyx_code_interpreter 2>/dev/null || true
|
||||
docker rm onyx_postgres onyx_vespa onyx_redis onyx_minio onyx_code_interpreter 2>/dev/null || true
|
||||
docker compose -f "$COMPOSE_FILE" -f "$COMPOSE_DEV_FILE" --profile opensearch-enabled stop opensearch 2>/dev/null || true
|
||||
docker compose -f "$COMPOSE_FILE" -f "$COMPOSE_DEV_FILE" --profile opensearch-enabled rm -f opensearch 2>/dev/null || true
|
||||
}
|
||||
|
||||
cleanup() {
|
||||
echo "Error occurred. Cleaning up..."
|
||||
stop_and_remove_containers
|
||||
}
|
||||
|
||||
# Trap errors and output a message, then cleanup
|
||||
@@ -12,16 +22,26 @@ trap 'echo "Error occurred on line $LINENO. Exiting script." >&2; cleanup' ERR
|
||||
|
||||
# Usage of the script with optional volume arguments
|
||||
# ./restart_containers.sh [vespa_volume] [postgres_volume] [redis_volume]
|
||||
# [minio_volume] [--keep-opensearch-data]
|
||||
|
||||
VESPA_VOLUME=${1:-""} # Default is empty if not provided
|
||||
POSTGRES_VOLUME=${2:-""} # Default is empty if not provided
|
||||
REDIS_VOLUME=${3:-""} # Default is empty if not provided
|
||||
MINIO_VOLUME=${4:-""} # Default is empty if not provided
|
||||
KEEP_OPENSEARCH_DATA=false
|
||||
POSITIONAL_ARGS=()
|
||||
for arg in "$@"; do
|
||||
if [[ "$arg" == "--keep-opensearch-data" ]]; then
|
||||
KEEP_OPENSEARCH_DATA=true
|
||||
else
|
||||
POSITIONAL_ARGS+=("$arg")
|
||||
fi
|
||||
done
|
||||
|
||||
VESPA_VOLUME=${POSITIONAL_ARGS[0]:-""}
|
||||
POSTGRES_VOLUME=${POSITIONAL_ARGS[1]:-""}
|
||||
REDIS_VOLUME=${POSITIONAL_ARGS[2]:-""}
|
||||
MINIO_VOLUME=${POSITIONAL_ARGS[3]:-""}
|
||||
|
||||
# Stop and remove the existing containers
|
||||
echo "Stopping and removing existing containers..."
|
||||
docker stop onyx_postgres onyx_vespa onyx_redis onyx_minio onyx_code_interpreter 2>/dev/null || true
|
||||
docker rm onyx_postgres onyx_vespa onyx_redis onyx_minio onyx_code_interpreter 2>/dev/null || true
|
||||
stop_and_remove_containers
|
||||
|
||||
# Start the PostgreSQL container with optional volume
|
||||
echo "Starting PostgreSQL container..."
|
||||
@@ -39,6 +59,29 @@ else
|
||||
docker run --detach --name onyx_vespa --hostname vespa-container --publish 8081:8081 --publish 19071:19071 vespaengine/vespa:8
|
||||
fi
|
||||
|
||||
# If OPENSEARCH_ADMIN_PASSWORD is not already set, try loading it from
|
||||
# .vscode/.env so existing dev setups that stored it there aren't silently
|
||||
# broken.
|
||||
VSCODE_ENV="$SCRIPT_DIR/../../.vscode/.env"
|
||||
if [[ -z "${OPENSEARCH_ADMIN_PASSWORD:-}" && -f "$VSCODE_ENV" ]]; then
|
||||
set -a
|
||||
# shellcheck source=/dev/null
|
||||
source "$VSCODE_ENV"
|
||||
set +a
|
||||
fi
|
||||
|
||||
# Start the OpenSearch container using the same service from docker-compose that
|
||||
# our users use, setting OPENSEARCH_INITIAL_ADMIN_PASSWORD from the env's
|
||||
# OPENSEARCH_ADMIN_PASSWORD if it exists, else defaulting to StrongPassword123!.
|
||||
# Pass --keep-opensearch-data to preserve the opensearch-data volume across
|
||||
# restarts, else the volume is deleted so the container starts fresh.
|
||||
if [[ "$KEEP_OPENSEARCH_DATA" == "false" ]]; then
|
||||
echo "Deleting opensearch-data volume..."
|
||||
docker volume rm onyx_opensearch-data 2>/dev/null || true
|
||||
fi
|
||||
echo "Starting OpenSearch container..."
|
||||
docker compose -f "$COMPOSE_FILE" -f "$COMPOSE_DEV_FILE" --profile opensearch-enabled up --force-recreate -d opensearch
|
||||
|
||||
# Start the Redis container with optional volume
|
||||
echo "Starting Redis container..."
|
||||
if [[ -n "$REDIS_VOLUME" ]]; then
|
||||
@@ -60,7 +103,6 @@ echo "Starting Code Interpreter container..."
|
||||
docker run --detach --name onyx_code_interpreter --publish 8000:8000 --user root -v /var/run/docker.sock:/var/run/docker.sock onyxdotapp/code-interpreter:latest bash ./entrypoint.sh code-interpreter-api
|
||||
|
||||
# Ensure alembic runs in the correct directory (backend/)
|
||||
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
||||
PARENT_DIR="$(dirname "$SCRIPT_DIR")"
|
||||
cd "$PARENT_DIR"
|
||||
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# We get OPENSEARCH_ADMIN_PASSWORD from the repo .env file.
|
||||
source "$(dirname "$0")/../../.vscode/.env"
|
||||
|
||||
cd "$(dirname "$0")/../../deployment/docker_compose"
|
||||
|
||||
# Start OpenSearch.
|
||||
echo "Forcefully starting fresh OpenSearch container..."
|
||||
docker compose -f docker-compose.opensearch.yml up --force-recreate -d opensearch
|
||||
@@ -5,6 +5,8 @@ Verifies that:
|
||||
1. extract_ids_from_runnable_connector correctly separates hierarchy nodes from doc IDs
|
||||
2. Extracted hierarchy nodes are correctly upserted to Postgres via upsert_hierarchy_nodes_batch
|
||||
3. Upserting is idempotent (running twice doesn't duplicate nodes)
|
||||
4. Document-to-hierarchy-node linkage is updated during pruning
|
||||
5. link_hierarchy_nodes_to_documents links nodes that are also documents
|
||||
|
||||
Uses a mock SlimConnectorWithPermSync that yields known hierarchy nodes and slim documents,
|
||||
combined with a real PostgreSQL database for verifying persistence.
|
||||
@@ -27,9 +29,13 @@ from onyx.db.enums import HierarchyNodeType
|
||||
from onyx.db.hierarchy import ensure_source_node_exists
|
||||
from onyx.db.hierarchy import get_all_hierarchy_nodes_for_source
|
||||
from onyx.db.hierarchy import get_hierarchy_node_by_raw_id
|
||||
from onyx.db.hierarchy import link_hierarchy_nodes_to_documents
|
||||
from onyx.db.hierarchy import update_document_parent_hierarchy_nodes
|
||||
from onyx.db.hierarchy import upsert_hierarchy_nodes_batch
|
||||
from onyx.db.models import Document as DbDocument
|
||||
from onyx.db.models import HierarchyNode as DBHierarchyNode
|
||||
from onyx.indexing.indexing_heartbeat import IndexingHeartbeatInterface
|
||||
from onyx.kg.models import KGStage
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Constants
|
||||
@@ -89,8 +95,18 @@ def _make_hierarchy_nodes() -> list[PydanticHierarchyNode]:
|
||||
]
|
||||
|
||||
|
||||
DOC_PARENT_MAP = {
|
||||
"msg-001": CHANNEL_A_ID,
|
||||
"msg-002": CHANNEL_A_ID,
|
||||
"msg-003": CHANNEL_B_ID,
|
||||
}
|
||||
|
||||
|
||||
def _make_slim_docs() -> list[SlimDocument | PydanticHierarchyNode]:
|
||||
return [SlimDocument(id=doc_id) for doc_id in SLIM_DOC_IDS]
|
||||
return [
|
||||
SlimDocument(id=doc_id, parent_hierarchy_raw_node_id=DOC_PARENT_MAP.get(doc_id))
|
||||
for doc_id in SLIM_DOC_IDS
|
||||
]
|
||||
|
||||
|
||||
class MockSlimConnectorWithPermSync(SlimConnectorWithPermSync):
|
||||
@@ -126,14 +142,31 @@ class MockSlimConnectorWithPermSync(SlimConnectorWithPermSync):
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _cleanup_test_hierarchy_nodes(db_session: Session) -> None:
|
||||
"""Remove all hierarchy nodes for TEST_SOURCE to isolate tests."""
|
||||
def _cleanup_test_data(db_session: Session) -> None:
|
||||
"""Remove all test hierarchy nodes and documents to isolate tests."""
|
||||
for doc_id in SLIM_DOC_IDS:
|
||||
db_session.query(DbDocument).filter(DbDocument.id == doc_id).delete()
|
||||
db_session.query(DBHierarchyNode).filter(
|
||||
DBHierarchyNode.source == TEST_SOURCE
|
||||
).delete()
|
||||
db_session.commit()
|
||||
|
||||
|
||||
def _create_test_documents(db_session: Session) -> list[DbDocument]:
|
||||
"""Insert minimal Document rows for our test doc IDs."""
|
||||
docs = []
|
||||
for doc_id in SLIM_DOC_IDS:
|
||||
doc = DbDocument(
|
||||
id=doc_id,
|
||||
semantic_id=doc_id,
|
||||
kg_stage=KGStage.NOT_STARTED,
|
||||
)
|
||||
db_session.add(doc)
|
||||
docs.append(doc)
|
||||
db_session.commit()
|
||||
return docs
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Tests
|
||||
# ---------------------------------------------------------------------------
|
||||
@@ -147,14 +180,14 @@ def test_pruning_extracts_hierarchy_nodes(db_session: Session) -> None: # noqa:
|
||||
result = extract_ids_from_runnable_connector(connector, callback=None)
|
||||
|
||||
# Doc IDs should include both slim doc IDs and hierarchy node raw_node_ids
|
||||
# (hierarchy node IDs are added to doc_ids so they aren't pruned)
|
||||
# (hierarchy node IDs are added to raw_id_to_parent so they aren't pruned)
|
||||
expected_ids = {
|
||||
CHANNEL_A_ID,
|
||||
CHANNEL_B_ID,
|
||||
CHANNEL_C_ID,
|
||||
*SLIM_DOC_IDS,
|
||||
}
|
||||
assert result.doc_ids == expected_ids
|
||||
assert result.raw_id_to_parent.keys() == expected_ids
|
||||
|
||||
# Hierarchy nodes should be the 3 channels
|
||||
assert len(result.hierarchy_nodes) == 3
|
||||
@@ -165,7 +198,7 @@ def test_pruning_extracts_hierarchy_nodes(db_session: Session) -> None: # noqa:
|
||||
def test_pruning_upserts_hierarchy_nodes_to_db(db_session: Session) -> None:
|
||||
"""Full flow: extract hierarchy nodes from mock connector, upsert to Postgres,
|
||||
then verify the DB state (node count, parent relationships, permissions)."""
|
||||
_cleanup_test_hierarchy_nodes(db_session)
|
||||
_cleanup_test_data(db_session)
|
||||
|
||||
# Step 1: ensure the SOURCE node exists (mirrors what the pruning task does)
|
||||
source_node = ensure_source_node_exists(db_session, TEST_SOURCE, commit=True)
|
||||
@@ -230,7 +263,7 @@ def test_pruning_upserts_hierarchy_nodes_public_connector(
|
||||
) -> None:
|
||||
"""When the connector's access type is PUBLIC, all hierarchy nodes must be
|
||||
marked is_public=True regardless of their external_access settings."""
|
||||
_cleanup_test_hierarchy_nodes(db_session)
|
||||
_cleanup_test_data(db_session)
|
||||
|
||||
ensure_source_node_exists(db_session, TEST_SOURCE, commit=True)
|
||||
|
||||
@@ -257,7 +290,7 @@ def test_pruning_upserts_hierarchy_nodes_public_connector(
|
||||
def test_pruning_hierarchy_node_upsert_idempotency(db_session: Session) -> None:
|
||||
"""Upserting the same hierarchy nodes twice must not create duplicates.
|
||||
The second call should update existing rows in place."""
|
||||
_cleanup_test_hierarchy_nodes(db_session)
|
||||
_cleanup_test_data(db_session)
|
||||
|
||||
ensure_source_node_exists(db_session, TEST_SOURCE, commit=True)
|
||||
|
||||
@@ -295,7 +328,7 @@ def test_pruning_hierarchy_node_upsert_idempotency(db_session: Session) -> None:
|
||||
|
||||
def test_pruning_hierarchy_node_upsert_updates_fields(db_session: Session) -> None:
|
||||
"""Upserting a hierarchy node with changed fields should update the existing row."""
|
||||
_cleanup_test_hierarchy_nodes(db_session)
|
||||
_cleanup_test_data(db_session)
|
||||
|
||||
ensure_source_node_exists(db_session, TEST_SOURCE, commit=True)
|
||||
|
||||
@@ -342,3 +375,193 @@ def test_pruning_hierarchy_node_upsert_updates_fields(db_session: Session) -> No
|
||||
assert db_node.is_public is True
|
||||
assert db_node.external_user_emails is not None
|
||||
assert set(db_node.external_user_emails) == {"new_user@example.com"}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Document-to-hierarchy-node linkage tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_extraction_preserves_parent_hierarchy_raw_node_id(
|
||||
db_session: Session, # noqa: ARG001
|
||||
) -> None:
|
||||
"""extract_ids_from_runnable_connector should carry the
|
||||
parent_hierarchy_raw_node_id from SlimDocument into the raw_id_to_parent dict."""
|
||||
connector = MockSlimConnectorWithPermSync()
|
||||
result = extract_ids_from_runnable_connector(connector, callback=None)
|
||||
|
||||
for doc_id, expected_parent in DOC_PARENT_MAP.items():
|
||||
assert (
|
||||
result.raw_id_to_parent[doc_id] == expected_parent
|
||||
), f"raw_id_to_parent[{doc_id}] should be {expected_parent}"
|
||||
|
||||
# Hierarchy node entries have None parent (they aren't documents)
|
||||
for channel_id in [CHANNEL_A_ID, CHANNEL_B_ID, CHANNEL_C_ID]:
|
||||
assert result.raw_id_to_parent[channel_id] is None
|
||||
|
||||
|
||||
def test_update_document_parent_hierarchy_nodes(db_session: Session) -> None:
|
||||
"""update_document_parent_hierarchy_nodes should set
|
||||
Document.parent_hierarchy_node_id for each document in the mapping."""
|
||||
_cleanup_test_data(db_session)
|
||||
|
||||
source_node = ensure_source_node_exists(db_session, TEST_SOURCE, commit=True)
|
||||
upserted = upsert_hierarchy_nodes_batch(
|
||||
db_session=db_session,
|
||||
nodes=_make_hierarchy_nodes(),
|
||||
source=TEST_SOURCE,
|
||||
commit=True,
|
||||
is_connector_public=False,
|
||||
)
|
||||
node_id_by_raw = {n.raw_node_id: n.id for n in upserted}
|
||||
|
||||
# Create documents with no parent set
|
||||
docs = _create_test_documents(db_session)
|
||||
for doc in docs:
|
||||
assert doc.parent_hierarchy_node_id is None
|
||||
|
||||
# Build resolved map (same logic as _resolve_and_update_document_parents)
|
||||
resolved: dict[str, int | None] = {}
|
||||
for doc_id, raw_parent in DOC_PARENT_MAP.items():
|
||||
resolved[doc_id] = node_id_by_raw.get(raw_parent, source_node.id)
|
||||
|
||||
updated = update_document_parent_hierarchy_nodes(
|
||||
db_session=db_session,
|
||||
doc_parent_map=resolved,
|
||||
commit=True,
|
||||
)
|
||||
assert updated == len(SLIM_DOC_IDS)
|
||||
|
||||
# Verify each document now points to the correct hierarchy node
|
||||
db_session.expire_all()
|
||||
for doc_id, raw_parent in DOC_PARENT_MAP.items():
|
||||
tmp_doc = db_session.get(DbDocument, doc_id)
|
||||
assert tmp_doc is not None
|
||||
doc = tmp_doc
|
||||
expected_node_id = node_id_by_raw[raw_parent]
|
||||
assert (
|
||||
doc.parent_hierarchy_node_id == expected_node_id
|
||||
), f"Document {doc_id} should point to node for {raw_parent}"
|
||||
|
||||
|
||||
def test_update_document_parent_is_idempotent(db_session: Session) -> None:
|
||||
"""Running update_document_parent_hierarchy_nodes a second time with the
|
||||
same mapping should update zero rows."""
|
||||
_cleanup_test_data(db_session)
|
||||
|
||||
ensure_source_node_exists(db_session, TEST_SOURCE, commit=True)
|
||||
upserted = upsert_hierarchy_nodes_batch(
|
||||
db_session=db_session,
|
||||
nodes=_make_hierarchy_nodes(),
|
||||
source=TEST_SOURCE,
|
||||
commit=True,
|
||||
is_connector_public=False,
|
||||
)
|
||||
node_id_by_raw = {n.raw_node_id: n.id for n in upserted}
|
||||
_create_test_documents(db_session)
|
||||
|
||||
resolved: dict[str, int | None] = {
|
||||
doc_id: node_id_by_raw[raw_parent]
|
||||
for doc_id, raw_parent in DOC_PARENT_MAP.items()
|
||||
}
|
||||
|
||||
first_updated = update_document_parent_hierarchy_nodes(
|
||||
db_session=db_session,
|
||||
doc_parent_map=resolved,
|
||||
commit=True,
|
||||
)
|
||||
assert first_updated == len(SLIM_DOC_IDS)
|
||||
|
||||
second_updated = update_document_parent_hierarchy_nodes(
|
||||
db_session=db_session,
|
||||
doc_parent_map=resolved,
|
||||
commit=True,
|
||||
)
|
||||
assert second_updated == 0
|
||||
|
||||
|
||||
def test_link_hierarchy_nodes_to_documents_for_confluence(
|
||||
db_session: Session,
|
||||
) -> None:
|
||||
"""For sources in SOURCES_WITH_HIERARCHY_NODE_DOCUMENTS (e.g. Confluence),
|
||||
link_hierarchy_nodes_to_documents should set HierarchyNode.document_id
|
||||
when a hierarchy node's raw_node_id matches a document ID."""
|
||||
_cleanup_test_data(db_session)
|
||||
confluence_source = DocumentSource.CONFLUENCE
|
||||
|
||||
# Clean up any existing Confluence hierarchy nodes
|
||||
db_session.query(DBHierarchyNode).filter(
|
||||
DBHierarchyNode.source == confluence_source
|
||||
).delete()
|
||||
db_session.commit()
|
||||
|
||||
ensure_source_node_exists(db_session, confluence_source, commit=True)
|
||||
|
||||
# Create a hierarchy node whose raw_node_id matches a document ID
|
||||
page_node_id = "confluence-page-123"
|
||||
nodes = [
|
||||
PydanticHierarchyNode(
|
||||
raw_node_id=page_node_id,
|
||||
raw_parent_id=None,
|
||||
display_name="Test Page",
|
||||
link="https://wiki.example.com/page/123",
|
||||
node_type=HierarchyNodeType.PAGE,
|
||||
),
|
||||
]
|
||||
upsert_hierarchy_nodes_batch(
|
||||
db_session=db_session,
|
||||
nodes=nodes,
|
||||
source=confluence_source,
|
||||
commit=True,
|
||||
is_connector_public=False,
|
||||
)
|
||||
|
||||
# Verify the node exists but has no document_id yet
|
||||
db_node = get_hierarchy_node_by_raw_id(db_session, page_node_id, confluence_source)
|
||||
assert db_node is not None
|
||||
assert db_node.document_id is None
|
||||
|
||||
# Create a document with the same ID as the hierarchy node
|
||||
doc = DbDocument(
|
||||
id=page_node_id,
|
||||
semantic_id="Test Page",
|
||||
kg_stage=KGStage.NOT_STARTED,
|
||||
)
|
||||
db_session.add(doc)
|
||||
db_session.commit()
|
||||
|
||||
# Link nodes to documents
|
||||
linked = link_hierarchy_nodes_to_documents(
|
||||
db_session=db_session,
|
||||
document_ids=[page_node_id],
|
||||
source=confluence_source,
|
||||
commit=True,
|
||||
)
|
||||
assert linked == 1
|
||||
|
||||
# Verify the hierarchy node now has document_id set
|
||||
db_session.expire_all()
|
||||
db_node = get_hierarchy_node_by_raw_id(db_session, page_node_id, confluence_source)
|
||||
assert db_node is not None
|
||||
assert db_node.document_id == page_node_id
|
||||
|
||||
# Cleanup
|
||||
db_session.query(DbDocument).filter(DbDocument.id == page_node_id).delete()
|
||||
db_session.query(DBHierarchyNode).filter(
|
||||
DBHierarchyNode.source == confluence_source
|
||||
).delete()
|
||||
db_session.commit()
|
||||
|
||||
|
||||
def test_link_hierarchy_nodes_skips_non_hierarchy_sources(
|
||||
db_session: Session,
|
||||
) -> None:
|
||||
"""link_hierarchy_nodes_to_documents should return 0 for sources that
|
||||
don't support hierarchy-node-as-document (e.g. Slack, Google Drive)."""
|
||||
linked = link_hierarchy_nodes_to_documents(
|
||||
db_session=db_session,
|
||||
document_ids=SLIM_DOC_IDS,
|
||||
source=TEST_SOURCE, # Slack — not in SOURCES_WITH_HIERARCHY_NODE_DOCUMENTS
|
||||
commit=False,
|
||||
)
|
||||
assert linked == 0
|
||||
|
||||
@@ -11,7 +11,6 @@ from unittest.mock import patch
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from fastapi import HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.db.enums import LLMModelFlowType
|
||||
@@ -20,6 +19,8 @@ from onyx.db.llm import remove_llm_provider
|
||||
from onyx.db.llm import update_default_provider
|
||||
from onyx.db.llm import upsert_llm_provider
|
||||
from onyx.db.models import UserRole
|
||||
from onyx.error_handling.error_codes import OnyxErrorCode
|
||||
from onyx.error_handling.exceptions import OnyxError
|
||||
from onyx.llm.constants import LlmProviderNames
|
||||
from onyx.llm.interfaces import LLM
|
||||
from onyx.server.manage.llm.api import (
|
||||
@@ -122,16 +123,16 @@ class TestLLMConfigurationEndpoint:
|
||||
finally:
|
||||
db_session.rollback()
|
||||
|
||||
def test_failed_llm_test_raises_http_exception(
|
||||
def test_failed_llm_test_raises_onyx_error(
|
||||
self,
|
||||
db_session: Session,
|
||||
provider_name: str, # noqa: ARG002
|
||||
) -> None:
|
||||
"""
|
||||
Test that a failed LLM test raises an HTTPException with status 400.
|
||||
Test that a failed LLM test raises an OnyxError with VALIDATION_ERROR.
|
||||
|
||||
When test_llm returns an error message, the endpoint should raise
|
||||
an HTTPException with the error details.
|
||||
an OnyxError with the error details.
|
||||
"""
|
||||
error_message = "Invalid API key: Authentication failed"
|
||||
|
||||
@@ -143,7 +144,7 @@ class TestLLMConfigurationEndpoint:
|
||||
with patch(
|
||||
"onyx.server.manage.llm.api.test_llm", side_effect=mock_test_llm_failure
|
||||
):
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
with pytest.raises(OnyxError) as exc_info:
|
||||
run_test_llm_configuration(
|
||||
test_llm_request=LLMTestRequest(
|
||||
provider=LlmProviderNames.OPENAI,
|
||||
@@ -156,9 +157,8 @@ class TestLLMConfigurationEndpoint:
|
||||
db_session=db_session,
|
||||
)
|
||||
|
||||
# Verify the exception details
|
||||
assert exc_info.value.status_code == 400
|
||||
assert exc_info.value.detail == error_message
|
||||
assert exc_info.value.error_code == OnyxErrorCode.VALIDATION_ERROR
|
||||
assert exc_info.value.message == error_message
|
||||
|
||||
finally:
|
||||
db_session.rollback()
|
||||
@@ -536,11 +536,11 @@ class TestDefaultProviderEndpoint:
|
||||
remove_llm_provider(db_session, provider.id)
|
||||
|
||||
# Now run_test_default_provider should fail
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
with pytest.raises(OnyxError) as exc_info:
|
||||
run_test_default_provider(_=_create_mock_admin())
|
||||
|
||||
assert exc_info.value.status_code == 400
|
||||
assert "No LLM Provider setup" in exc_info.value.detail
|
||||
assert exc_info.value.error_code == OnyxErrorCode.VALIDATION_ERROR
|
||||
assert "No LLM Provider setup" in exc_info.value.message
|
||||
|
||||
finally:
|
||||
db_session.rollback()
|
||||
@@ -581,11 +581,11 @@ class TestDefaultProviderEndpoint:
|
||||
with patch(
|
||||
"onyx.server.manage.llm.api.test_llm", side_effect=mock_test_llm_failure
|
||||
):
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
with pytest.raises(OnyxError) as exc_info:
|
||||
run_test_default_provider(_=_create_mock_admin())
|
||||
|
||||
assert exc_info.value.status_code == 400
|
||||
assert exc_info.value.detail == error_message
|
||||
assert exc_info.value.error_code == OnyxErrorCode.VALIDATION_ERROR
|
||||
assert exc_info.value.message == error_message
|
||||
|
||||
finally:
|
||||
db_session.rollback()
|
||||
|
||||
@@ -16,13 +16,14 @@ from unittest.mock import patch
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
from fastapi import HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.db.llm import fetch_existing_llm_provider
|
||||
from onyx.db.llm import remove_llm_provider
|
||||
from onyx.db.llm import upsert_llm_provider
|
||||
from onyx.db.models import UserRole
|
||||
from onyx.error_handling.error_codes import OnyxErrorCode
|
||||
from onyx.error_handling.exceptions import OnyxError
|
||||
from onyx.llm.constants import LlmProviderNames
|
||||
from onyx.server.manage.llm.api import _mask_string
|
||||
from onyx.server.manage.llm.api import put_llm_provider
|
||||
@@ -100,7 +101,7 @@ class TestLLMProviderChanges:
|
||||
api_base="https://attacker.example.com",
|
||||
)
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
with pytest.raises(OnyxError) as exc_info:
|
||||
put_llm_provider(
|
||||
llm_provider_upsert_request=update_request,
|
||||
is_creation=False,
|
||||
@@ -108,9 +109,9 @@ class TestLLMProviderChanges:
|
||||
db_session=db_session,
|
||||
)
|
||||
|
||||
assert exc_info.value.status_code == 400
|
||||
assert exc_info.value.error_code == OnyxErrorCode.VALIDATION_ERROR
|
||||
assert "cannot be changed without changing the API key" in str(
|
||||
exc_info.value.detail
|
||||
exc_info.value.message
|
||||
)
|
||||
finally:
|
||||
_cleanup_provider(db_session, provider_name)
|
||||
@@ -236,7 +237,7 @@ class TestLLMProviderChanges:
|
||||
api_base=None,
|
||||
)
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
with pytest.raises(OnyxError) as exc_info:
|
||||
put_llm_provider(
|
||||
llm_provider_upsert_request=update_request,
|
||||
is_creation=False,
|
||||
@@ -244,9 +245,9 @@ class TestLLMProviderChanges:
|
||||
db_session=db_session,
|
||||
)
|
||||
|
||||
assert exc_info.value.status_code == 400
|
||||
assert exc_info.value.error_code == OnyxErrorCode.VALIDATION_ERROR
|
||||
assert "cannot be changed without changing the API key" in str(
|
||||
exc_info.value.detail
|
||||
exc_info.value.message
|
||||
)
|
||||
finally:
|
||||
_cleanup_provider(db_session, provider_name)
|
||||
@@ -339,7 +340,7 @@ class TestLLMProviderChanges:
|
||||
custom_config_changed=True,
|
||||
)
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
with pytest.raises(OnyxError) as exc_info:
|
||||
put_llm_provider(
|
||||
llm_provider_upsert_request=update_request,
|
||||
is_creation=False,
|
||||
@@ -347,9 +348,9 @@ class TestLLMProviderChanges:
|
||||
db_session=db_session,
|
||||
)
|
||||
|
||||
assert exc_info.value.status_code == 400
|
||||
assert exc_info.value.error_code == OnyxErrorCode.VALIDATION_ERROR
|
||||
assert "cannot be changed without changing the API key" in str(
|
||||
exc_info.value.detail
|
||||
exc_info.value.message
|
||||
)
|
||||
finally:
|
||||
_cleanup_provider(db_session, provider_name)
|
||||
@@ -375,7 +376,7 @@ class TestLLMProviderChanges:
|
||||
custom_config_changed=True,
|
||||
)
|
||||
|
||||
with pytest.raises(HTTPException) as exc_info:
|
||||
with pytest.raises(OnyxError) as exc_info:
|
||||
put_llm_provider(
|
||||
llm_provider_upsert_request=update_request,
|
||||
is_creation=False,
|
||||
@@ -383,9 +384,9 @@ class TestLLMProviderChanges:
|
||||
db_session=db_session,
|
||||
)
|
||||
|
||||
assert exc_info.value.status_code == 400
|
||||
assert exc_info.value.error_code == OnyxErrorCode.VALIDATION_ERROR
|
||||
assert "cannot be changed without changing the API key" in str(
|
||||
exc_info.value.detail
|
||||
exc_info.value.message
|
||||
)
|
||||
finally:
|
||||
_cleanup_provider(db_session, provider_name)
|
||||
|
||||
@@ -11,6 +11,7 @@ from onyx.context.search.models import SavedSearchSettings
|
||||
from onyx.context.search.models import SearchSettingsCreationRequest
|
||||
from onyx.db.enums import EmbeddingPrecision
|
||||
from onyx.db.llm import fetch_default_contextual_rag_model
|
||||
from onyx.db.llm import fetch_existing_llm_provider
|
||||
from onyx.db.llm import update_default_contextual_model
|
||||
from onyx.db.llm import upsert_llm_provider
|
||||
from onyx.db.models import IndexModelStatus
|
||||
@@ -37,6 +38,8 @@ def _create_llm_provider_and_model(
|
||||
model_name: str,
|
||||
) -> None:
|
||||
"""Insert an LLM provider with a single visible model configuration."""
|
||||
if fetch_existing_llm_provider(name=provider_name, db_session=db_session):
|
||||
return
|
||||
upsert_llm_provider(
|
||||
LLMProviderUpsertRequest(
|
||||
name=provider_name,
|
||||
@@ -146,8 +149,8 @@ def baseline_search_settings(
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Set new search settings is temporarily disabled.")
|
||||
@patch("onyx.db.swap_index.get_all_document_indices")
|
||||
@patch("onyx.server.manage.search_settings.get_all_document_indices")
|
||||
@patch("onyx.server.manage.search_settings.get_default_document_index")
|
||||
@patch("onyx.indexing.indexing_pipeline.get_llm_for_contextual_rag")
|
||||
@patch("onyx.indexing.indexing_pipeline.index_doc_batch_with_handler")
|
||||
@@ -155,6 +158,7 @@ def test_indexing_pipeline_uses_contextual_rag_settings_from_create(
|
||||
mock_index_handler: MagicMock,
|
||||
mock_get_llm: MagicMock,
|
||||
mock_get_doc_index: MagicMock, # noqa: ARG001
|
||||
mock_get_all_doc_indices_search_settings: MagicMock, # noqa: ARG001
|
||||
mock_get_all_doc_indices: MagicMock,
|
||||
baseline_search_settings: None, # noqa: ARG001
|
||||
db_session: Session,
|
||||
@@ -196,8 +200,8 @@ def test_indexing_pipeline_uses_contextual_rag_settings_from_create(
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Set new search settings is temporarily disabled.")
|
||||
@patch("onyx.db.swap_index.get_all_document_indices")
|
||||
@patch("onyx.server.manage.search_settings.get_all_document_indices")
|
||||
@patch("onyx.server.manage.search_settings.get_default_document_index")
|
||||
@patch("onyx.indexing.indexing_pipeline.get_llm_for_contextual_rag")
|
||||
@patch("onyx.indexing.indexing_pipeline.index_doc_batch_with_handler")
|
||||
@@ -205,6 +209,7 @@ def test_indexing_pipeline_uses_updated_contextual_rag_settings(
|
||||
mock_index_handler: MagicMock,
|
||||
mock_get_llm: MagicMock,
|
||||
mock_get_doc_index: MagicMock, # noqa: ARG001
|
||||
mock_get_all_doc_indices_search_settings: MagicMock, # noqa: ARG001
|
||||
mock_get_all_doc_indices: MagicMock,
|
||||
baseline_search_settings: None, # noqa: ARG001
|
||||
db_session: Session,
|
||||
@@ -266,7 +271,7 @@ def test_indexing_pipeline_uses_updated_contextual_rag_settings(
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Set new search settings is temporarily disabled.")
|
||||
@patch("onyx.server.manage.search_settings.get_all_document_indices")
|
||||
@patch("onyx.server.manage.search_settings.get_default_document_index")
|
||||
@patch("onyx.indexing.indexing_pipeline.get_llm_for_contextual_rag")
|
||||
@patch("onyx.indexing.indexing_pipeline.index_doc_batch_with_handler")
|
||||
@@ -274,6 +279,7 @@ def test_indexing_pipeline_skips_llm_when_contextual_rag_disabled(
|
||||
mock_index_handler: MagicMock,
|
||||
mock_get_llm: MagicMock,
|
||||
mock_get_doc_index: MagicMock, # noqa: ARG001
|
||||
mock_get_all_doc_indices_search_settings: MagicMock, # noqa: ARG001
|
||||
baseline_search_settings: None, # noqa: ARG001
|
||||
db_session: Session,
|
||||
) -> None:
|
||||
|
||||
@@ -427,7 +427,7 @@ def test_delete_default_llm_provider_rejected(reset: None) -> None: # noqa: ARG
|
||||
headers=admin_user.headers,
|
||||
)
|
||||
assert delete_response.status_code == 400
|
||||
assert "Cannot delete the default LLM provider" in delete_response.json()["detail"]
|
||||
assert "Cannot delete the default LLM provider" in delete_response.json()["message"]
|
||||
|
||||
# Verify provider still exists
|
||||
provider_data = _get_provider_by_id(admin_user, created_provider["id"])
|
||||
@@ -673,8 +673,8 @@ def test_duplicate_provider_name_rejected(reset: None) -> None: # noqa: ARG001
|
||||
headers=admin_user.headers,
|
||||
json=base_payload,
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert "already exists" in response.json()["detail"]
|
||||
assert response.status_code == 409
|
||||
assert "already exists" in response.json()["message"]
|
||||
|
||||
|
||||
def test_rename_provider_rejected(reset: None) -> None: # noqa: ARG001
|
||||
@@ -711,7 +711,7 @@ def test_rename_provider_rejected(reset: None) -> None: # noqa: ARG001
|
||||
json=update_payload,
|
||||
)
|
||||
assert response.status_code == 400
|
||||
assert "not currently supported" in response.json()["detail"]
|
||||
assert "not currently supported" in response.json()["message"]
|
||||
|
||||
# Verify no duplicate was created — only the original provider should exist
|
||||
provider = _get_provider_by_id(admin_user, provider_id)
|
||||
|
||||
@@ -69,7 +69,7 @@ def test_unauthorized_persona_access_returns_403(
|
||||
|
||||
# Should return 403 Forbidden
|
||||
assert response.status_code == 403
|
||||
assert "don't have access to this assistant" in response.json()["detail"]
|
||||
assert "don't have access to this assistant" in response.json()["message"]
|
||||
|
||||
|
||||
def test_authorized_persona_access_returns_filtered_providers(
|
||||
@@ -245,4 +245,4 @@ def test_nonexistent_persona_returns_404(
|
||||
|
||||
# Should return 404
|
||||
assert response.status_code == 404
|
||||
assert "Persona not found" in response.json()["detail"]
|
||||
assert "Persona not found" in response.json()["message"]
|
||||
|
||||
@@ -42,6 +42,78 @@ class NightlyProviderConfig(BaseModel):
|
||||
strict: bool
|
||||
|
||||
|
||||
def _stringify_custom_config_value(value: object) -> str:
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
if isinstance(value, (dict, list)):
|
||||
return json.dumps(value)
|
||||
return str(value)
|
||||
|
||||
|
||||
def _looks_like_vertex_credentials_payload(
|
||||
raw_custom_config: dict[object, object],
|
||||
) -> bool:
|
||||
normalized_keys = {str(key).strip().lower() for key in raw_custom_config}
|
||||
provider_specific_keys = {
|
||||
"vertex_credentials",
|
||||
"credentials_file",
|
||||
"vertex_credentials_file",
|
||||
"google_application_credentials",
|
||||
"vertex_location",
|
||||
"location",
|
||||
"vertex_region",
|
||||
"region",
|
||||
}
|
||||
if normalized_keys & provider_specific_keys:
|
||||
return False
|
||||
|
||||
normalized_type = str(raw_custom_config.get("type", "")).strip().lower()
|
||||
if normalized_type not in {"service_account", "external_account"}:
|
||||
return False
|
||||
|
||||
# Service account JSON usually includes private_key/client_email, while external
|
||||
# account JSON includes credential_source. Either shape should be accepted.
|
||||
has_service_account_markers = any(
|
||||
key in normalized_keys for key in {"private_key", "client_email"}
|
||||
)
|
||||
has_external_account_markers = "credential_source" in normalized_keys
|
||||
return has_service_account_markers or has_external_account_markers
|
||||
|
||||
|
||||
def _normalize_custom_config(
|
||||
provider: str, raw_custom_config: dict[object, object]
|
||||
) -> dict[str, str]:
|
||||
if provider == "vertex_ai" and _looks_like_vertex_credentials_payload(
|
||||
raw_custom_config
|
||||
):
|
||||
return {"vertex_credentials": json.dumps(raw_custom_config)}
|
||||
|
||||
normalized: dict[str, str] = {}
|
||||
for raw_key, raw_value in raw_custom_config.items():
|
||||
key = str(raw_key).strip()
|
||||
key_lower = key.lower()
|
||||
|
||||
if provider == "vertex_ai":
|
||||
if key_lower in {
|
||||
"vertex_credentials",
|
||||
"credentials_file",
|
||||
"vertex_credentials_file",
|
||||
"google_application_credentials",
|
||||
}:
|
||||
key = "vertex_credentials"
|
||||
elif key_lower in {
|
||||
"vertex_location",
|
||||
"location",
|
||||
"vertex_region",
|
||||
"region",
|
||||
}:
|
||||
key = "vertex_location"
|
||||
|
||||
normalized[key] = _stringify_custom_config_value(raw_value)
|
||||
|
||||
return normalized
|
||||
|
||||
|
||||
def _env_true(env_var: str, default: bool = False) -> bool:
|
||||
value = os.environ.get(env_var)
|
||||
if value is None:
|
||||
@@ -80,7 +152,9 @@ def _load_provider_config() -> NightlyProviderConfig:
|
||||
parsed = json.loads(custom_config_json)
|
||||
if not isinstance(parsed, dict):
|
||||
raise ValueError(f"{_ENV_CUSTOM_CONFIG_JSON} must be a JSON object")
|
||||
custom_config = {str(key): str(value) for key, value in parsed.items()}
|
||||
custom_config = _normalize_custom_config(
|
||||
provider=provider, raw_custom_config=parsed
|
||||
)
|
||||
|
||||
if provider == "ollama_chat" and api_key and not custom_config:
|
||||
custom_config = {"OLLAMA_API_KEY": api_key}
|
||||
@@ -148,6 +222,23 @@ def _validate_provider_config(config: NightlyProviderConfig) -> None:
|
||||
),
|
||||
)
|
||||
|
||||
if config.provider == "vertex_ai":
|
||||
has_vertex_credentials = bool(
|
||||
config.custom_config and config.custom_config.get("vertex_credentials")
|
||||
)
|
||||
if not has_vertex_credentials:
|
||||
configured_keys = (
|
||||
sorted(config.custom_config.keys()) if config.custom_config else []
|
||||
)
|
||||
_skip_or_fail(
|
||||
strict=config.strict,
|
||||
message=(
|
||||
f"{_ENV_CUSTOM_CONFIG_JSON} must include 'vertex_credentials' "
|
||||
f"for provider '{config.provider}'. "
|
||||
f"Found keys: {configured_keys}"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _assert_integration_mode_enabled() -> None:
|
||||
assert (
|
||||
@@ -193,6 +284,7 @@ def _create_provider_payload(
|
||||
return {
|
||||
"name": provider_name,
|
||||
"provider": provider,
|
||||
"model": model_name,
|
||||
"api_key": api_key,
|
||||
"api_base": api_base,
|
||||
"api_version": api_version,
|
||||
@@ -208,24 +300,23 @@ def _create_provider_payload(
|
||||
}
|
||||
|
||||
|
||||
def _ensure_provider_is_default(provider_id: int, admin_user: DATestUser) -> None:
|
||||
def _ensure_provider_is_default(
|
||||
provider_id: int, model_name: str, admin_user: DATestUser
|
||||
) -> None:
|
||||
list_response = requests.get(
|
||||
f"{API_SERVER_URL}/admin/llm/provider",
|
||||
headers=admin_user.headers,
|
||||
)
|
||||
list_response.raise_for_status()
|
||||
providers = list_response.json()
|
||||
|
||||
current_default = next(
|
||||
(provider for provider in providers if provider.get("is_default_provider")),
|
||||
None,
|
||||
default_text = list_response.json().get("default_text")
|
||||
assert default_text is not None, "Expected a default provider after setting default"
|
||||
assert default_text.get("provider_id") == provider_id, (
|
||||
f"Expected provider {provider_id} to be default, "
|
||||
f"found {default_text.get('provider_id')}"
|
||||
)
|
||||
assert (
|
||||
current_default is not None
|
||||
), "Expected a default provider after setting provider as default"
|
||||
assert (
|
||||
current_default["id"] == provider_id
|
||||
), f"Expected provider {provider_id} to be default, found {current_default['id']}"
|
||||
default_text.get("model_name") == model_name
|
||||
), f"Expected default model {model_name}, found {default_text.get('model_name')}"
|
||||
|
||||
|
||||
def _run_chat_assertions(
|
||||
@@ -326,8 +417,9 @@ def _create_and_test_provider_for_model(
|
||||
|
||||
try:
|
||||
set_default_response = requests.post(
|
||||
f"{API_SERVER_URL}/admin/llm/provider/{provider_id}/default",
|
||||
f"{API_SERVER_URL}/admin/llm/default",
|
||||
headers=admin_user.headers,
|
||||
json={"provider_id": provider_id, "model_name": model_name},
|
||||
)
|
||||
assert set_default_response.status_code == 200, (
|
||||
f"Setting default provider failed for provider={config.provider} "
|
||||
@@ -335,7 +427,9 @@ def _create_and_test_provider_for_model(
|
||||
f"{set_default_response.text}"
|
||||
)
|
||||
|
||||
_ensure_provider_is_default(provider_id=provider_id, admin_user=admin_user)
|
||||
_ensure_provider_is_default(
|
||||
provider_id=provider_id, model_name=model_name, admin_user=admin_user
|
||||
)
|
||||
_run_chat_assertions(
|
||||
admin_user=admin_user,
|
||||
search_tool_id=search_tool_id,
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import pytest
|
||||
import requests
|
||||
|
||||
from tests.integration.common_utils.constants import API_SERVER_URL
|
||||
@@ -365,7 +364,6 @@ def test_update_contextual_rag_missing_model_name(
|
||||
assert "Provider name and model name are required" in response.json()["detail"]
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Set new search settings is temporarily disabled.")
|
||||
def test_set_new_search_settings_with_contextual_rag(
|
||||
reset: None, # noqa: ARG001
|
||||
admin_user: DATestUser,
|
||||
@@ -394,7 +392,6 @@ def test_set_new_search_settings_with_contextual_rag(
|
||||
_cancel_new_embedding(admin_user)
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Set new search settings is temporarily disabled.")
|
||||
def test_set_new_search_settings_without_contextual_rag(
|
||||
reset: None, # noqa: ARG001
|
||||
admin_user: DATestUser,
|
||||
@@ -419,7 +416,6 @@ def test_set_new_search_settings_without_contextual_rag(
|
||||
_cancel_new_embedding(admin_user)
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Set new search settings is temporarily disabled.")
|
||||
def test_set_new_then_update_inference_settings(
|
||||
reset: None, # noqa: ARG001
|
||||
admin_user: DATestUser,
|
||||
@@ -457,7 +453,6 @@ def test_set_new_then_update_inference_settings(
|
||||
_cancel_new_embedding(admin_user)
|
||||
|
||||
|
||||
@pytest.mark.skip(reason="Set new search settings is temporarily disabled.")
|
||||
def test_set_new_search_settings_replaces_previous_secondary(
|
||||
reset: None, # noqa: ARG001
|
||||
admin_user: DATestUser,
|
||||
|
||||
@@ -281,9 +281,10 @@ class TestApplyLicenseStatusToSettings:
|
||||
}
|
||||
|
||||
|
||||
class TestSettingsDefaultEEDisabled:
|
||||
"""Verify the Settings model defaults ee_features_enabled to False."""
|
||||
class TestSettingsDefaults:
|
||||
"""Verify Settings model defaults for CE deployments."""
|
||||
|
||||
def test_default_ee_features_disabled(self) -> None:
|
||||
"""CE default: ee_features_enabled is False."""
|
||||
settings = Settings()
|
||||
assert settings.ee_features_enabled is False
|
||||
|
||||
@@ -87,7 +87,8 @@ def test_python_tool_available_when_health_check_passes(
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.health.return_value = True
|
||||
mock_client_cls.return_value = mock_client
|
||||
mock_client_cls.return_value.__enter__ = MagicMock(return_value=mock_client)
|
||||
mock_client_cls.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
db_session = MagicMock(spec=Session)
|
||||
assert PythonTool.is_available(db_session) is True
|
||||
@@ -109,7 +110,8 @@ def test_python_tool_unavailable_when_health_check_fails(
|
||||
|
||||
mock_client = MagicMock()
|
||||
mock_client.health.return_value = False
|
||||
mock_client_cls.return_value = mock_client
|
||||
mock_client_cls.return_value.__enter__ = MagicMock(return_value=mock_client)
|
||||
mock_client_cls.return_value.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
db_session = MagicMock(spec=Session)
|
||||
assert PythonTool.is_available(db_session) is False
|
||||
|
||||
6
uv.lock
generated
6
uv.lock
generated
@@ -4106,7 +4106,7 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "nltk"
|
||||
version = "3.9.1"
|
||||
version = "3.9.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "click" },
|
||||
@@ -4114,9 +4114,9 @@ dependencies = [
|
||||
{ name = "regex" },
|
||||
{ name = "tqdm" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/3c/87/db8be88ad32c2d042420b6fd9ffd4a149f9a0d7f0e86b3f543be2eeeedd2/nltk-3.9.1.tar.gz", hash = "sha256:87d127bd3de4bd89a4f81265e5fa59cb1b199b27440175370f7417d2bc7ae868", size = 2904691, upload-time = "2024-08-18T19:48:37.769Z" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e1/8f/915e1c12df07c70ed779d18ab83d065718a926e70d3ea33eb0cd66ffb7c0/nltk-3.9.3.tar.gz", hash = "sha256:cb5945d6424a98d694c2b9a0264519fab4363711065a46aa0ae7a2195b92e71f", size = 2923673, upload-time = "2026-02-24T12:05:53.833Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/4d/66/7d9e26593edda06e8cb531874633f7c2372279c3b0f46235539fe546df8b/nltk-3.9.1-py3-none-any.whl", hash = "sha256:4fa26829c5b00715afe3061398a8989dc643b92ce7dd93fb4585a70930d168a1", size = 1505442, upload-time = "2024-08-18T19:48:21.909Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c2/7e/9af5a710a1236e4772de8dfcc6af942a561327bb9f42b5b4a24d0cf100fd/nltk-3.9.3-py3-none-any.whl", hash = "sha256:60b3db6e9995b3dd976b1f0fa7dec22069b2677e759c28eb69b62ddd44870522", size = 1525385, upload-time = "2026-02-24T12:05:46.54Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
||||
22
web/lib/opal/src/icons/column.tsx
Normal file
22
web/lib/opal/src/icons/column.tsx
Normal file
@@ -0,0 +1,22 @@
|
||||
import type { IconProps } from "@opal/types";
|
||||
|
||||
const SvgColumn = ({ size, ...props }: IconProps) => (
|
||||
<svg
|
||||
width={size}
|
||||
height={size}
|
||||
viewBox="0 0 16 16"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
stroke="currentColor"
|
||||
{...props}
|
||||
>
|
||||
<path
|
||||
d="M6 14H3.33333C2.59695 14 2 13.403 2 12.6667V3.33333C2 2.59695 2.59695 2 3.33333 2H6M6 14V2M6 14H10M6 2H10M10 2H12.6667C13.403 2 14 2.59695 14 3.33333V12.6667C14 13.403 13.403 14 12.6667 14H10M10 2V14"
|
||||
strokeWidth={1.5}
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
|
||||
export default SvgColumn;
|
||||
20
web/lib/opal/src/icons/handle.tsx
Normal file
20
web/lib/opal/src/icons/handle.tsx
Normal file
@@ -0,0 +1,20 @@
|
||||
import type { IconProps } from "@opal/types";
|
||||
|
||||
const SvgHandle = ({ size = 16, ...props }: IconProps) => (
|
||||
<svg
|
||||
width={Math.round((size * 3) / 17)}
|
||||
height={size}
|
||||
viewBox="0 0 3 17"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
{...props}
|
||||
>
|
||||
<path
|
||||
d="M0.5 0.5V16.5M2.5 0.5V16.5"
|
||||
stroke="currentColor"
|
||||
strokeLinecap="round"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
|
||||
export default SvgHandle;
|
||||
@@ -49,6 +49,7 @@ export { default as SvgClock } from "@opal/icons/clock";
|
||||
export { default as SvgClockHandsSmall } from "@opal/icons/clock-hands-small";
|
||||
export { default as SvgCloud } from "@opal/icons/cloud";
|
||||
export { default as SvgCode } from "@opal/icons/code";
|
||||
export { default as SvgColumn } from "@opal/icons/column";
|
||||
export { default as SvgCopy } from "@opal/icons/copy";
|
||||
export { default as SvgCornerRightUpDot } from "@opal/icons/corner-right-up-dot";
|
||||
export { default as SvgCpu } from "@opal/icons/cpu";
|
||||
@@ -79,6 +80,7 @@ export { default as SvgFolderPartialOpen } from "@opal/icons/folder-partial-open
|
||||
export { default as SvgFolderPlus } from "@opal/icons/folder-plus";
|
||||
export { default as SvgGemini } from "@opal/icons/gemini";
|
||||
export { default as SvgGlobe } from "@opal/icons/globe";
|
||||
export { default as SvgHandle } from "@opal/icons/handle";
|
||||
export { default as SvgHardDrive } from "@opal/icons/hard-drive";
|
||||
export { default as SvgHashSmall } from "@opal/icons/hash-small";
|
||||
export { default as SvgHash } from "@opal/icons/hash";
|
||||
@@ -146,6 +148,8 @@ export { default as SvgSlack } from "@opal/icons/slack";
|
||||
export { default as SvgSlash } from "@opal/icons/slash";
|
||||
export { default as SvgSliders } from "@opal/icons/sliders";
|
||||
export { default as SvgSlidersSmall } from "@opal/icons/sliders-small";
|
||||
export { default as SvgSort } from "@opal/icons/sort";
|
||||
export { default as SvgSortOrder } from "@opal/icons/sort-order";
|
||||
export { default as SvgSparkle } from "@opal/icons/sparkle";
|
||||
export { default as SvgStar } from "@opal/icons/star";
|
||||
export { default as SvgStep1 } from "@opal/icons/step1";
|
||||
@@ -169,6 +173,7 @@ export { default as SvgUploadCloud } from "@opal/icons/upload-cloud";
|
||||
export { default as SvgUser } from "@opal/icons/user";
|
||||
export { default as SvgUserManage } from "@opal/icons/user-manage";
|
||||
export { default as SvgUserPlus } from "@opal/icons/user-plus";
|
||||
export { default as SvgUserSync } from "@opal/icons/user-sync";
|
||||
export { default as SvgUsers } from "@opal/icons/users";
|
||||
export { default as SvgWallet } from "@opal/icons/wallet";
|
||||
export { default as SvgWorkflow } from "@opal/icons/workflow";
|
||||
|
||||
21
web/lib/opal/src/icons/sort-order.tsx
Normal file
21
web/lib/opal/src/icons/sort-order.tsx
Normal file
@@ -0,0 +1,21 @@
|
||||
import type { IconProps } from "@opal/types";
|
||||
|
||||
const SvgSortOrder = ({ size, ...props }: IconProps) => (
|
||||
<svg
|
||||
width={size}
|
||||
height={size}
|
||||
viewBox="0 0 16 16"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
stroke="currentColor"
|
||||
{...props}
|
||||
>
|
||||
<path
|
||||
d="M2.66675 12L7.67009 12.0001M2.66675 8H10.5001M2.66675 4H13.3334"
|
||||
strokeWidth={1.5}
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
export default SvgSortOrder;
|
||||
27
web/lib/opal/src/icons/sort.tsx
Normal file
27
web/lib/opal/src/icons/sort.tsx
Normal file
@@ -0,0 +1,27 @@
|
||||
import type { IconProps } from "@opal/types";
|
||||
|
||||
const SvgSort = ({ size, ...props }: IconProps) => (
|
||||
<svg
|
||||
width={size}
|
||||
height={size}
|
||||
viewBox="0 0 16 16"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
stroke="currentColor"
|
||||
{...props}
|
||||
>
|
||||
<path
|
||||
d="M2 4.5H10M2 8H7M2 11.5H5"
|
||||
strokeWidth={1.5}
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
/>
|
||||
<path
|
||||
d="M12 5V12M12 12L14 10M12 12L10 10"
|
||||
strokeWidth={1.5}
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
export default SvgSort;
|
||||
22
web/lib/opal/src/icons/user-sync.tsx
Normal file
22
web/lib/opal/src/icons/user-sync.tsx
Normal file
@@ -0,0 +1,22 @@
|
||||
import type { IconProps } from "@opal/types";
|
||||
|
||||
const SvgUserSync = ({ size, ...props }: IconProps) => (
|
||||
<svg
|
||||
width={size}
|
||||
height={size}
|
||||
viewBox="0 0 16 16"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
stroke="currentColor"
|
||||
{...props}
|
||||
>
|
||||
<path
|
||||
d="M1 14C1 13.6667 1 13.3333 1 13C1 11.3431 2.34316 10 4.00002 10H7M11 8.5L9.5 10L14.5 9.99985M13 14L14.5 12.5L9.5 12.5M8.75 4.75C8.75 6.26878 7.51878 7.5 6 7.5C4.48122 7.5 3.25 6.26878 3.25 4.75C3.25 3.23122 4.48122 2 6 2C7.51878 2 8.75 3.23122 8.75 4.75Z"
|
||||
strokeWidth={1.5}
|
||||
strokeLinecap="round"
|
||||
strokeLinejoin="round"
|
||||
/>
|
||||
</svg>
|
||||
);
|
||||
|
||||
export default SvgUserSync;
|
||||
@@ -41,7 +41,7 @@ export default defineConfig({
|
||||
viewport: { width: 1280, height: 720 },
|
||||
storageState: "admin_auth.json",
|
||||
},
|
||||
grepInvert: /@exclusive/,
|
||||
grepInvert: [/@exclusive/, /@lite/],
|
||||
},
|
||||
{
|
||||
// this suite runs independently and serially + slower
|
||||
@@ -55,5 +55,15 @@ export default defineConfig({
|
||||
grep: /@exclusive/,
|
||||
workers: 1,
|
||||
},
|
||||
{
|
||||
// runs against the Onyx Lite stack (DISABLE_VECTOR_DB=true, no Vespa/Redis)
|
||||
name: "lite",
|
||||
use: {
|
||||
...devices["Desktop Chrome"],
|
||||
viewport: { width: 1280, height: 720 },
|
||||
storageState: "admin_auth.json",
|
||||
},
|
||||
grep: /@lite/,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
import * as SettingsLayouts from "@/layouts/settings-layouts";
|
||||
import { SourceCategory, SourceMetadata } from "@/lib/search/interfaces";
|
||||
import { listSourceMetadata } from "@/lib/sources";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import {
|
||||
useCallback,
|
||||
useContext,
|
||||
@@ -254,9 +254,7 @@ export default function Page() {
|
||||
icon={route.icon}
|
||||
title={route.title}
|
||||
rightChildren={
|
||||
<Button href="/admin/indexing/status" primary>
|
||||
See Connectors
|
||||
</Button>
|
||||
<Button href="/admin/indexing/status">See Connectors</Button>
|
||||
}
|
||||
separator
|
||||
/>
|
||||
|
||||
@@ -16,9 +16,8 @@ import {
|
||||
} from "./lib";
|
||||
import { FiEdit2 } from "react-icons/fi";
|
||||
import { useUser } from "@/providers/UserProvider";
|
||||
import { Button as OpalButton } from "@opal/components";
|
||||
import { Button } from "@opal/components";
|
||||
import ConfirmationModalLayout from "@/refresh-components/layouts/ConfirmationModalLayout";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { SvgAlertCircle, SvgTrash } from "@opal/icons";
|
||||
import type { Route } from "next";
|
||||
|
||||
@@ -300,7 +299,7 @@ export function PersonasTable({
|
||||
<div key="edit" className="flex">
|
||||
<div className="mr-auto my-auto">
|
||||
{!persona.builtin_persona && isEditable ? (
|
||||
<OpalButton
|
||||
<Button
|
||||
icon={SvgTrash}
|
||||
prominence="tertiary"
|
||||
onClick={() => openDeleteModal(persona)}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Form, Formik } from "formik";
|
||||
import { toast } from "@/hooks/useToast";
|
||||
import { createApiKey, updateApiKey } from "./lib";
|
||||
import Modal from "@/refresh-components/Modal";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import Text from "@/refresh-components/texts/Text";
|
||||
import InputTypeIn from "@/refresh-components/inputs/InputTypeIn";
|
||||
import InputComboBox from "@/refresh-components/inputs/InputComboBox";
|
||||
|
||||
@@ -27,7 +27,7 @@ import {
|
||||
DISCORD_SERVICE_API_KEY_NAME,
|
||||
} from "@/app/admin/api-key/types";
|
||||
import CreateButton from "@/refresh-components/buttons/CreateButton";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import CopyIconButton from "@/refresh-components/buttons/CopyIconButton";
|
||||
import Text from "@/refresh-components/texts/Text";
|
||||
import { SvgEdit, SvgKey, SvgRefreshCw } from "@opal/icons";
|
||||
@@ -161,11 +161,11 @@ function Main() {
|
||||
<TableRow key={apiKey.api_key_id}>
|
||||
<TableCell>
|
||||
<Button
|
||||
internal
|
||||
prominence="internal"
|
||||
onClick={() => handleEdit(apiKey)}
|
||||
leftIcon={SvgEdit}
|
||||
icon={SvgEdit}
|
||||
>
|
||||
{apiKey.api_key_name || <i>null</i>}
|
||||
{apiKey.api_key_name || "null"}
|
||||
</Button>
|
||||
</TableCell>
|
||||
<TableCell className="max-w-64">
|
||||
@@ -176,8 +176,8 @@ function Main() {
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<Button
|
||||
internal
|
||||
leftIcon={SvgRefreshCw}
|
||||
prominence="internal"
|
||||
icon={SvgRefreshCw}
|
||||
onClick={async () => {
|
||||
setKeyIsGenerating(true);
|
||||
const response = await regenerateApiKey(apiKey);
|
||||
|
||||
@@ -7,6 +7,7 @@ import { Content } from "@opal/layouts";
|
||||
import * as InputLayouts from "@/layouts/input-layouts";
|
||||
import Card from "@/refresh-components/cards/Card";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button as OpalButton } from "@opal/components";
|
||||
import Text from "@/refresh-components/texts/Text";
|
||||
import Message from "@/refresh-components/messages/Message";
|
||||
import InfoBlock from "@/refresh-components/messages/InfoBlock";
|
||||
@@ -244,25 +245,20 @@ function SubscriptionCard({
|
||||
to make changes.
|
||||
</Text>
|
||||
) : disabled ? (
|
||||
<Button
|
||||
main
|
||||
secondary
|
||||
<OpalButton
|
||||
prominence="secondary"
|
||||
onClick={handleReconnect}
|
||||
rightIcon={SvgArrowRight}
|
||||
disabled={isReconnecting}
|
||||
>
|
||||
{isReconnecting ? "Connecting..." : "Connect to Stripe"}
|
||||
</Button>
|
||||
</OpalButton>
|
||||
) : (
|
||||
<Button
|
||||
main
|
||||
primary
|
||||
onClick={handleManagePlan}
|
||||
rightIcon={SvgExternalLink}
|
||||
>
|
||||
<OpalButton onClick={handleManagePlan} rightIcon={SvgExternalLink}>
|
||||
Manage Plan
|
||||
</Button>
|
||||
</OpalButton>
|
||||
)}
|
||||
{/* TODO(@raunakab): migrate to opal Button once className/iconClassName is resolved */}
|
||||
<Button tertiary onClick={onViewPlans} className="billing-text-link">
|
||||
<Text secondaryBody text03>
|
||||
View Plan Details
|
||||
@@ -379,9 +375,13 @@ function SeatsCard({
|
||||
sizePreset="main-content"
|
||||
variant="section"
|
||||
/>
|
||||
<Button main secondary onClick={handleCancel} disabled={isSubmitting}>
|
||||
<OpalButton
|
||||
prominence="secondary"
|
||||
onClick={handleCancel}
|
||||
disabled={isSubmitting}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
</OpalButton>
|
||||
</Section>
|
||||
|
||||
<div className="billing-content-area">
|
||||
@@ -463,16 +463,14 @@ function SeatsCard({
|
||||
No changes to your billing.
|
||||
</Text>
|
||||
)}
|
||||
<Button
|
||||
main
|
||||
primary
|
||||
<OpalButton
|
||||
onClick={handleConfirm}
|
||||
disabled={
|
||||
isSubmitting || newSeatCount === totalSeats || isBelowMinimum
|
||||
}
|
||||
>
|
||||
{isSubmitting ? "Saving..." : "Confirm Change"}
|
||||
</Button>
|
||||
</OpalButton>
|
||||
</Section>
|
||||
</Card>
|
||||
);
|
||||
@@ -502,19 +500,22 @@ function SeatsCard({
|
||||
height="auto"
|
||||
width="auto"
|
||||
>
|
||||
<Button main tertiary href="/admin/users" leftIcon={SvgExternalLink}>
|
||||
<OpalButton
|
||||
prominence="tertiary"
|
||||
href="/admin/users"
|
||||
icon={SvgExternalLink}
|
||||
>
|
||||
View Users
|
||||
</Button>
|
||||
</OpalButton>
|
||||
{!hideUpdateSeats && (
|
||||
<Button
|
||||
main
|
||||
secondary
|
||||
<OpalButton
|
||||
prominence="secondary"
|
||||
onClick={handleStartEdit}
|
||||
leftIcon={SvgPlus}
|
||||
icon={SvgPlus}
|
||||
disabled={isLoadingUsers || disabled || !billing}
|
||||
>
|
||||
Update Seats
|
||||
</Button>
|
||||
</OpalButton>
|
||||
)}
|
||||
</Section>
|
||||
</Section>
|
||||
@@ -566,14 +567,13 @@ function PaymentSection({ billing }: { billing: BillingInformation }) {
|
||||
title="Visa ending in 1234"
|
||||
description="Payment method"
|
||||
/>
|
||||
<Button
|
||||
main
|
||||
tertiary
|
||||
<OpalButton
|
||||
prominence="tertiary"
|
||||
onClick={handleOpenPortal}
|
||||
rightIcon={SvgExternalLink}
|
||||
>
|
||||
Update
|
||||
</Button>
|
||||
</OpalButton>
|
||||
</Section>
|
||||
</Card>
|
||||
{lastPaymentDate && (
|
||||
@@ -589,14 +589,13 @@ function PaymentSection({ billing }: { billing: BillingInformation }) {
|
||||
title={lastPaymentDate}
|
||||
description="Last payment"
|
||||
/>
|
||||
<Button
|
||||
main
|
||||
tertiary
|
||||
<OpalButton
|
||||
prominence="tertiary"
|
||||
onClick={handleOpenPortal}
|
||||
rightIcon={SvgExternalLink}
|
||||
>
|
||||
View Invoice
|
||||
</Button>
|
||||
</OpalButton>
|
||||
</Section>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import { useState, useMemo, useEffect } from "react";
|
||||
import { Section } from "@/layouts/general-layouts";
|
||||
import * as InputLayouts from "@/layouts/input-layouts";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import Text from "@/refresh-components/texts/Text";
|
||||
import Card from "@/refresh-components/cards/Card";
|
||||
import Separator from "@/refresh-components/Separator";
|
||||
@@ -176,7 +176,7 @@ export default function CheckoutView({ onAdjustPlan }: CheckoutViewProps) {
|
||||
Business
|
||||
</Text>
|
||||
</Section>
|
||||
<Button secondary onClick={onAdjustPlan}>
|
||||
<Button prominence="secondary" onClick={onAdjustPlan}>
|
||||
Adjust Plan
|
||||
</Button>
|
||||
</Section>
|
||||
@@ -262,7 +262,7 @@ export default function CheckoutView({ onAdjustPlan }: CheckoutViewProps) {
|
||||
// Empty div to maintain space-between alignment
|
||||
<div></div>
|
||||
)}
|
||||
<Button main primary onClick={handleSubmit} disabled={isSubmitting}>
|
||||
<Button onClick={handleSubmit} disabled={isSubmitting}>
|
||||
{isSubmitting ? "Loading..." : "Continue to Payment"}
|
||||
</Button>
|
||||
</Section>
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { useState } from "react";
|
||||
import Card from "@/refresh-components/cards/Card";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import Text from "@/refresh-components/texts/Text";
|
||||
import InputFile from "@/refresh-components/inputs/InputFile";
|
||||
import { Section } from "@/layouts/general-layouts";
|
||||
@@ -119,11 +119,11 @@ export default function LicenseActivationCard({
|
||||
</Text>
|
||||
</Section>
|
||||
<Section flexDirection="row" gap={0.5} height="auto" width="auto">
|
||||
<Button main secondary onClick={() => setShowInput(true)}>
|
||||
<Button prominence="secondary" onClick={() => setShowInput(true)}>
|
||||
Update Key
|
||||
</Button>
|
||||
{!hideClose && (
|
||||
<Button main tertiary onClick={handleClose}>
|
||||
<Button prominence="tertiary" onClick={handleClose}>
|
||||
Close
|
||||
</Button>
|
||||
)}
|
||||
@@ -146,7 +146,11 @@ export default function LicenseActivationCard({
|
||||
<Text headingH3>
|
||||
{hasLicense ? "Update License Key" : "Activate License Key"}
|
||||
</Text>
|
||||
<Button secondary onClick={handleClose} disabled={isActivating}>
|
||||
<Button
|
||||
prominence="secondary"
|
||||
onClick={handleClose}
|
||||
disabled={isActivating}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
</Section>
|
||||
@@ -219,8 +223,6 @@ export default function LicenseActivationCard({
|
||||
{/* Footer */}
|
||||
<Section flexDirection="row" justifyContent="end" padding={1}>
|
||||
<Button
|
||||
main
|
||||
primary
|
||||
onClick={handleActivate}
|
||||
disabled={isActivating || !licenseKey.trim() || success}
|
||||
>
|
||||
|
||||
@@ -21,6 +21,7 @@ import "@/app/admin/billing/billing.css";
|
||||
import type { IconProps } from "@opal/types";
|
||||
import Card from "@/refresh-components/cards/Card";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button as OpalButton } from "@opal/components";
|
||||
import Text from "@/refresh-components/texts/Text";
|
||||
import { Section } from "@/layouts/general-layouts";
|
||||
|
||||
@@ -146,26 +147,27 @@ function PlanCard({
|
||||
{/* Button */}
|
||||
<div className="plan-card-button">
|
||||
{isCurrentPlan ? (
|
||||
// TODO(@raunakab): migrate to opal Button once className/iconClassName is resolved
|
||||
<Button tertiary transient className="pointer-events-none">
|
||||
<Text mainUiAction text03>
|
||||
Your Current Plan
|
||||
</Text>
|
||||
</Button>
|
||||
) : href ? (
|
||||
<Button
|
||||
main
|
||||
secondary
|
||||
<OpalButton
|
||||
prominence="secondary"
|
||||
href={href}
|
||||
target="_blank"
|
||||
rel="noopener noreferrer"
|
||||
>
|
||||
{buttonLabel}
|
||||
</Button>
|
||||
</OpalButton>
|
||||
) : onClick ? (
|
||||
<Button main primary onClick={onClick} leftIcon={ButtonIcon}>
|
||||
<OpalButton onClick={onClick} icon={ButtonIcon}>
|
||||
{buttonLabel}
|
||||
</Button>
|
||||
</OpalButton>
|
||||
) : (
|
||||
// TODO(@raunakab): migrate to opal Button once className/iconClassName is resolved
|
||||
<Button tertiary transient className="pointer-events-none">
|
||||
<Text mainUiAction text03>
|
||||
Included in your plan
|
||||
|
||||
@@ -66,6 +66,7 @@ function FooterLinks({
|
||||
<Text secondaryBody text03>
|
||||
Have a license key?
|
||||
</Text>
|
||||
{/* TODO(@raunakab): migrate to opal Button once className/iconClassName is resolved */}
|
||||
<Button action tertiary onClick={onActivateLicense}>
|
||||
<Text secondaryBody text05 className="underline">
|
||||
{licenseText}
|
||||
@@ -73,6 +74,7 @@ function FooterLinks({
|
||||
</Button>
|
||||
</>
|
||||
)}
|
||||
{/* TODO(@raunakab): migrate to opal Button once className/iconClassName is resolved */}
|
||||
<Button
|
||||
action
|
||||
tertiary
|
||||
|
||||
@@ -10,7 +10,7 @@ import { SourceIcon } from "@/components/SourceIcon";
|
||||
import { EditableStringFieldDisplay } from "@/components/EditableStringFieldDisplay";
|
||||
import { deleteSlackBot } from "./new/lib";
|
||||
import GenericConfirmModal from "@/components/modals/GenericConfirmModal";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import { cn } from "@/lib/utils";
|
||||
import { SvgChevronDownSmall, SvgTrash } from "@opal/icons";
|
||||
|
||||
@@ -106,20 +106,20 @@ export const ExistingSlackBotForm = ({
|
||||
<div className="flex flex-col" ref={dropdownRef}>
|
||||
<div className="flex items-center gap-4">
|
||||
<Button
|
||||
leftIcon={({ className }) => (
|
||||
prominence="secondary"
|
||||
icon={({ className }) => (
|
||||
<SvgChevronDownSmall
|
||||
className={cn(className, !isExpanded && "-rotate-90")}
|
||||
/>
|
||||
)}
|
||||
onClick={() => setIsExpanded(!isExpanded)}
|
||||
secondary
|
||||
>
|
||||
Update Tokens
|
||||
</Button>
|
||||
<Button
|
||||
danger
|
||||
variant="danger"
|
||||
onClick={() => setShowDeleteModal(true)}
|
||||
leftIcon={SvgTrash}
|
||||
icon={SvgTrash}
|
||||
>
|
||||
Delete
|
||||
</Button>
|
||||
|
||||
@@ -4,7 +4,7 @@ import { TextFormField } from "@/components/Field";
|
||||
import { Form, Formik } from "formik";
|
||||
import * as Yup from "yup";
|
||||
import { createSlackBot, updateSlackBot } from "./new/lib";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import Separator from "@/refresh-components/Separator";
|
||||
import { useEffect } from "react";
|
||||
import { DOCS_ADMINS_PATH } from "@/lib/constants";
|
||||
|
||||
@@ -17,9 +17,8 @@ import type { Route } from "next";
|
||||
import { useState } from "react";
|
||||
import { deleteSlackChannelConfig, isPersonaASlackBotPersona } from "./lib";
|
||||
import { Card } from "@/components/ui/card";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import CreateButton from "@/refresh-components/buttons/CreateButton";
|
||||
import { Button as OpalButton } from "@opal/components";
|
||||
import { Button } from "@opal/components";
|
||||
import { SvgSettings, SvgTrash } from "@opal/icons";
|
||||
const numToDisplay = 50;
|
||||
|
||||
@@ -45,11 +44,11 @@ export default function SlackChannelConfigsTable({
|
||||
<div className="space-y-8">
|
||||
<div className="flex justify-between items-center mb-6">
|
||||
<Button
|
||||
prominence="secondary"
|
||||
onClick={() => {
|
||||
window.location.href = `/admin/bots/${slackBotId}/channels/${defaultConfig?.id}`;
|
||||
}}
|
||||
secondary
|
||||
leftIcon={SvgSettings}
|
||||
icon={SvgSettings}
|
||||
>
|
||||
Edit Default Configuration
|
||||
</Button>
|
||||
@@ -121,7 +120,7 @@ export default function SlackChannelConfigsTable({
|
||||
</div>
|
||||
</TableCell>
|
||||
<TableCell onClick={(e) => e.stopPropagation()}>
|
||||
<OpalButton
|
||||
<Button
|
||||
onClick={async (e) => {
|
||||
e.stopPropagation();
|
||||
const response = await deleteSlackChannelConfig(
|
||||
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
TextArrayField,
|
||||
TextFormField,
|
||||
} from "@/components/Field";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import { MinimalPersonaSnapshot } from "@/app/admin/agents/interfaces";
|
||||
import DocumentSetCard from "@/sections/cards/DocumentSetCard";
|
||||
import CollapsibleSection from "@/app/admin/agents/CollapsibleSection";
|
||||
@@ -598,7 +598,7 @@ export function SlackChannelConfigFormFields({
|
||||
</TooltipProvider>
|
||||
)}
|
||||
<Button type="submit">{isUpdate ? "Update" : "Create"}</Button>
|
||||
<Button secondary onClick={() => router.back()}>
|
||||
<Button prominence="secondary" onClick={() => router.back()}>
|
||||
Cancel
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import { useState } from "react";
|
||||
import CardSection from "@/components/admin/CardSection";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import InputTypeIn from "@/refresh-components/inputs/InputTypeIn";
|
||||
import useSWR from "swr";
|
||||
import { ThreeDotsLoader } from "@/components/Loading";
|
||||
@@ -129,7 +129,7 @@ function Main() {
|
||||
<div className="flex flex-col gap-2 desktop:flex-row desktop:items-center desktop:gap-2">
|
||||
{isApiKeySet ? (
|
||||
<>
|
||||
<Button onClick={handleDelete} danger>
|
||||
<Button variant="danger" onClick={handleDelete}>
|
||||
Delete API Key
|
||||
</Button>
|
||||
<Text as="p" mainContentBody text04 className="desktop:mt-0">
|
||||
@@ -137,7 +137,7 @@ function Main() {
|
||||
</Text>
|
||||
</>
|
||||
) : (
|
||||
<Button onClick={handleSave} action>
|
||||
<Button variant="action" onClick={handleSave}>
|
||||
Save API Key
|
||||
</Button>
|
||||
)}
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
import Text from "@/components/ui/text";
|
||||
import Title from "@/components/ui/title";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button as OpalButton } from "@opal/components";
|
||||
import { useMemo, useState } from "react";
|
||||
import useSWR, { mutate } from "swr";
|
||||
import { ReindexingProgressTable } from "../../../../components/embedding/ReindexingProgressTable";
|
||||
@@ -23,6 +24,7 @@ import { FailedReIndexAttempts } from "@/components/embedding/FailedReIndexAttem
|
||||
import { useConnectorIndexingStatusWithPagination } from "@/lib/hooks";
|
||||
import { SvgX } from "@opal/icons";
|
||||
import { ConnectorCredentialPairStatus } from "@/app/admin/connector/[ccPairId]/types";
|
||||
import { useVectorDbEnabled } from "@/providers/SettingsProvider";
|
||||
|
||||
export default function UpgradingPage({
|
||||
futureEmbeddingModel,
|
||||
@@ -30,11 +32,12 @@ export default function UpgradingPage({
|
||||
futureEmbeddingModel: CloudEmbeddingModel | HostedEmbeddingModel;
|
||||
}) {
|
||||
const [isCancelling, setIsCancelling] = useState<boolean>(false);
|
||||
const vectorDbEnabled = useVectorDbEnabled();
|
||||
|
||||
const { data: connectors, isLoading: isLoadingConnectors } = useSWR<
|
||||
Connector<any>[]
|
||||
>("/api/manage/connector", errorHandlingFetcher, {
|
||||
refreshInterval: 5000, // 5 seconds
|
||||
>(vectorDbEnabled ? "/api/manage/connector" : null, errorHandlingFetcher, {
|
||||
refreshInterval: 5000,
|
||||
});
|
||||
|
||||
const {
|
||||
@@ -42,7 +45,8 @@ export default function UpgradingPage({
|
||||
isLoading: isLoadingOngoingReIndexingStatus,
|
||||
} = useConnectorIndexingStatusWithPagination(
|
||||
{ secondary_index: true, get_all_connectors: true },
|
||||
5000
|
||||
5000,
|
||||
vectorDbEnabled
|
||||
) as {
|
||||
data: ConnectorIndexingStatusLiteResponse[];
|
||||
isLoading: boolean;
|
||||
@@ -51,9 +55,11 @@ export default function UpgradingPage({
|
||||
const { data: failedIndexingStatus } = useSWR<
|
||||
FailedConnectorIndexingStatus[]
|
||||
>(
|
||||
"/api/manage/admin/connector/failed-indexing-status?secondary_index=true",
|
||||
vectorDbEnabled
|
||||
? "/api/manage/admin/connector/failed-indexing-status?secondary_index=true"
|
||||
: null,
|
||||
errorHandlingFetcher,
|
||||
{ refreshInterval: 5000 } // 5 seconds
|
||||
{ refreshInterval: 5000 }
|
||||
);
|
||||
|
||||
const onCancel = async () => {
|
||||
@@ -140,10 +146,13 @@ export default function UpgradingPage({
|
||||
</div>
|
||||
</Modal.Body>
|
||||
<Modal.Footer>
|
||||
<Button onClick={onCancel}>Confirm</Button>
|
||||
<Button onClick={() => setIsCancelling(false)} secondary>
|
||||
<OpalButton onClick={onCancel}>Confirm</OpalButton>
|
||||
<OpalButton
|
||||
prominence="secondary"
|
||||
onClick={() => setIsCancelling(false)}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
</OpalButton>
|
||||
</Modal.Footer>
|
||||
</Modal.Content>
|
||||
</Modal>
|
||||
@@ -158,6 +167,7 @@ export default function UpgradingPage({
|
||||
{futureEmbeddingModel.model_name}
|
||||
</div>
|
||||
|
||||
{/* TODO(@raunakab): migrate to opal Button once className/iconClassName is resolved */}
|
||||
<Button
|
||||
danger
|
||||
className="mt-4"
|
||||
|
||||
@@ -5,7 +5,7 @@ import { errorHandlingFetcher } from "@/lib/fetcher";
|
||||
import * as SettingsLayouts from "@/layouts/settings-layouts";
|
||||
import Text from "@/components/ui/text";
|
||||
import Title from "@/components/ui/title";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import useSWR from "swr";
|
||||
import { ModelPreview } from "@/components/embedding/ModelSelector";
|
||||
import {
|
||||
@@ -130,7 +130,7 @@ function Main() {
|
||||
</CardSection>
|
||||
|
||||
<div className="mt-4">
|
||||
<Button action href="/admin/embeddings">
|
||||
<Button variant="action" href="/admin/embeddings">
|
||||
Update Search Settings
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
@@ -6,7 +6,7 @@ import { FormField } from "@/refresh-components/form/FormField";
|
||||
import InputTypeIn from "@/refresh-components/inputs/InputTypeIn";
|
||||
import PasswordInputTypeIn from "@/refresh-components/inputs/PasswordInputTypeIn";
|
||||
import Modal from "@/refresh-components/Modal";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
|
||||
import { SvgArrowExchange, SvgOnyxLogo } from "@opal/icons";
|
||||
import type { IconProps } from "@opal/types";
|
||||
@@ -244,13 +244,11 @@ export const WebProviderSetupModal = memo(
|
||||
)}
|
||||
</Modal.Body>
|
||||
<Modal.Footer>
|
||||
<Button type="button" main secondary onClick={onClose}>
|
||||
<Button prominence="secondary" type="button" onClick={onClose}>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
type="button"
|
||||
main
|
||||
primary
|
||||
disabled={!canConnect || isProcessing}
|
||||
onClick={onConnect}
|
||||
>
|
||||
|
||||
@@ -91,6 +91,7 @@ function HoverIconButton({
|
||||
}: HoverIconButtonProps) {
|
||||
return (
|
||||
<div onMouseEnter={onMouseEnter} onMouseLeave={onMouseLeave}>
|
||||
{/* TODO(@raunakab): migrate to opal Button once HoverIconButtonProps typing is resolved */}
|
||||
<Button {...buttonProps} rightIcon={isHovered ? SvgX : SvgCheckSquare}>
|
||||
{children}
|
||||
</Button>
|
||||
@@ -1010,9 +1011,8 @@ export default function Page() {
|
||||
{buttonState.label}
|
||||
</HoverIconButton>
|
||||
) : (
|
||||
<Button
|
||||
action={false}
|
||||
tertiary
|
||||
<OpalButton
|
||||
prominence="tertiary"
|
||||
disabled={
|
||||
buttonState.disabled || !buttonState.onClick
|
||||
}
|
||||
@@ -1029,7 +1029,7 @@ export default function Page() {
|
||||
}
|
||||
>
|
||||
{buttonState.label}
|
||||
</Button>
|
||||
</OpalButton>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
@@ -1202,9 +1202,8 @@ export default function Page() {
|
||||
{buttonState.label}
|
||||
</HoverIconButton>
|
||||
) : (
|
||||
<Button
|
||||
action={false}
|
||||
tertiary
|
||||
<OpalButton
|
||||
prominence="tertiary"
|
||||
disabled={
|
||||
buttonState.disabled || !buttonState.onClick
|
||||
}
|
||||
@@ -1221,7 +1220,7 @@ export default function Page() {
|
||||
}
|
||||
>
|
||||
{buttonState.label}
|
||||
</Button>
|
||||
</OpalButton>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -5,8 +5,7 @@ import { useState } from "react";
|
||||
import { ValidSources } from "@/lib/types";
|
||||
import { Section } from "@/layouts/general-layouts";
|
||||
import Text from "@/refresh-components/texts/Text";
|
||||
import IconButton from "@/refresh-components/buttons/IconButton";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import Separator from "@/refresh-components/Separator";
|
||||
import { SvgChevronUp, SvgChevronDown, SvgEdit } from "@opal/icons";
|
||||
import Truncated from "@/refresh-components/texts/Truncated";
|
||||
@@ -119,16 +118,21 @@ function ConfigItem({ label, value, onEdit }: ConfigItemProps) {
|
||||
|
||||
{isExpandable && (
|
||||
<Button
|
||||
tertiary
|
||||
prominence="tertiary"
|
||||
size="md"
|
||||
leftIcon={isExpanded ? SvgChevronUp : SvgChevronDown}
|
||||
icon={isExpanded ? SvgChevronUp : SvgChevronDown}
|
||||
onClick={() => setIsExpanded(!isExpanded)}
|
||||
>
|
||||
{isExpanded ? "Show less" : `Show all (${value.length} items)`}
|
||||
</Button>
|
||||
)}
|
||||
{onEdit && (
|
||||
<IconButton icon={SvgEdit} tertiary onClick={onEdit} tooltip="Edit" />
|
||||
<Button
|
||||
prominence="tertiary"
|
||||
icon={SvgEdit}
|
||||
onClick={onEdit}
|
||||
tooltip="Edit"
|
||||
/>
|
||||
)}
|
||||
</Section>
|
||||
</Section>
|
||||
|
||||
@@ -229,6 +229,7 @@ export default function IndexAttemptErrorsModal({
|
||||
<div className="flex w-full">
|
||||
<div className="flex gap-2 ml-auto">
|
||||
{hasUnresolvedErrors && !isResolvingErrors && (
|
||||
// TODO(@raunakab): migrate to opal Button once className/iconClassName is resolved
|
||||
<Button
|
||||
onClick={onResolveAll}
|
||||
className="ml-4 whitespace-nowrap"
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
"use client";
|
||||
|
||||
import { useState, useRef } from "react";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button as OpalButton } from "@opal/components";
|
||||
import { Button } from "@opal/components";
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
@@ -176,26 +175,25 @@ export default function InlineFileManagement({
|
||||
<div className="flex gap-2">
|
||||
{!isEditing ? (
|
||||
<Button
|
||||
prominence="secondary"
|
||||
onClick={() => setIsEditing(true)}
|
||||
secondary
|
||||
leftIcon={SvgEdit}
|
||||
icon={SvgEdit}
|
||||
>
|
||||
Edit
|
||||
</Button>
|
||||
) : (
|
||||
<>
|
||||
<Button
|
||||
prominence="secondary"
|
||||
onClick={handleCancel}
|
||||
secondary
|
||||
leftIcon={SvgX}
|
||||
icon={SvgX}
|
||||
disabled={isSaving}
|
||||
>
|
||||
Cancel
|
||||
</Button>
|
||||
<Button
|
||||
onClick={handleSaveClick}
|
||||
primary
|
||||
leftIcon={SvgCheck}
|
||||
icon={SvgCheck}
|
||||
disabled={
|
||||
isSaving ||
|
||||
(selectedFilesToRemove.size === 0 && filesToAdd.length === 0)
|
||||
@@ -295,7 +293,7 @@ export default function InlineFileManagement({
|
||||
>
|
||||
{isEditing && (
|
||||
<TableCell>
|
||||
<OpalButton
|
||||
<Button
|
||||
icon={SvgX}
|
||||
variant="danger"
|
||||
prominence="tertiary"
|
||||
@@ -335,9 +333,9 @@ export default function InlineFileManagement({
|
||||
id={`file-upload-${connectorId}`}
|
||||
/>
|
||||
<Button
|
||||
prominence="secondary"
|
||||
onClick={() => fileInputRef.current?.click()}
|
||||
secondary
|
||||
leftIcon={SvgPlusCircle}
|
||||
icon={SvgPlusCircle}
|
||||
disabled={isSaving}
|
||||
>
|
||||
Add Files
|
||||
@@ -398,8 +396,8 @@ export default function InlineFileManagement({
|
||||
|
||||
<Modal.Footer>
|
||||
<Button
|
||||
prominence="secondary"
|
||||
onClick={() => setShowSaveConfirm(false)}
|
||||
secondary
|
||||
disabled={isSaving}
|
||||
>
|
||||
Cancel
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
"use client";
|
||||
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import { useState } from "react";
|
||||
import { toast } from "@/hooks/useToast";
|
||||
import { triggerIndexing } from "@/app/admin/connector/[ccPairId]/lib";
|
||||
|
||||
@@ -62,7 +62,7 @@ import { DropdownMenuItemWithTooltip } from "@/components/ui/dropdown-menu-with-
|
||||
import { timeAgo } from "@/lib/time";
|
||||
import { useStatusChange } from "./useStatusChange";
|
||||
import { useReIndexModal } from "./ReIndexModal";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import { SvgSettings } from "@opal/icons";
|
||||
import { UserRole } from "@/lib/types";
|
||||
import { useUser } from "@/providers/UserProvider";
|
||||
@@ -456,7 +456,7 @@ function Main({ ccPairId }: { ccPairId: number }) {
|
||||
{ccPair.is_editable_for_current_user && (
|
||||
<DropdownMenu>
|
||||
<DropdownMenuTrigger asChild>
|
||||
<Button leftIcon={SvgSettings} secondary>
|
||||
<Button prominence="secondary" icon={SvgSettings}>
|
||||
Manage
|
||||
</Button>
|
||||
</DropdownMenuTrigger>
|
||||
|
||||
@@ -55,7 +55,7 @@ import {
|
||||
} from "@/lib/connectors/oauth";
|
||||
import { CreateStdOAuthCredential } from "@/components/credentials/actions/CreateStdOAuthCredential";
|
||||
import { Spinner } from "@/components/Spinner";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import { deleteConnector } from "@/lib/connector";
|
||||
import ConnectorDocsLink from "@/components/admin/connectors/ConnectorDocsLink";
|
||||
import Text from "@/refresh-components/texts/Text";
|
||||
@@ -580,7 +580,7 @@ export default function AddConnector({
|
||||
{oauthSupportedSources.includes(connector) &&
|
||||
(NEXT_PUBLIC_CLOUD_ENABLED || NEXT_PUBLIC_TEST_ENV) && (
|
||||
<Button
|
||||
action
|
||||
variant="action"
|
||||
onClick={handleAuthorize}
|
||||
disabled={isAuthorizing}
|
||||
hidden={!isAuthorizeVisible}
|
||||
|
||||
@@ -48,6 +48,7 @@ export default function ConnectorWrapper({
|
||||
<HeaderTitle>
|
||||
<p>‘{connector}’ is not a valid Connector Type!</p>
|
||||
</HeaderTitle>
|
||||
{/* TODO(@raunakab): migrate to opal Button once className/iconClassName is resolved */}
|
||||
<Button
|
||||
onClick={() => window.open("/admin/indexing/status", "_self")}
|
||||
className="mr-auto"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { useFormContext } from "@/components/context/FormContext";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import { SvgArrowLeft, SvgArrowRight, SvgPlusCircle } from "@opal/icons";
|
||||
|
||||
const NavigationRow = ({
|
||||
@@ -22,7 +22,11 @@ const NavigationRow = ({
|
||||
<div>
|
||||
{((formStep > 0 && !noCredentials) ||
|
||||
(formStep > 1 && !noAdvanced)) && (
|
||||
<Button secondary onClick={prevFormStep} leftIcon={SvgArrowLeft}>
|
||||
<Button
|
||||
prominence="secondary"
|
||||
onClick={prevFormStep}
|
||||
icon={SvgArrowLeft}
|
||||
>
|
||||
Previous
|
||||
</Button>
|
||||
)}
|
||||
@@ -41,7 +45,7 @@ const NavigationRow = ({
|
||||
<div className="flex justify-end">
|
||||
{formStep === 0 && (
|
||||
<Button
|
||||
action
|
||||
variant="action"
|
||||
disabled={!activatedCredential}
|
||||
rightIcon={SvgArrowRight}
|
||||
onClick={() => nextFormStep()}
|
||||
@@ -51,7 +55,7 @@ const NavigationRow = ({
|
||||
)}
|
||||
{!noAdvanced && formStep === 1 && (
|
||||
<Button
|
||||
secondary
|
||||
prominence="secondary"
|
||||
disabled={!isValid}
|
||||
rightIcon={SvgArrowRight}
|
||||
onClick={() => nextFormStep()}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import { useEffect, useState } from "react";
|
||||
import { usePathname, useRouter, useSearchParams } from "next/navigation";
|
||||
import { AdminPageTitle } from "@/components/admin/Title";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import { getSourceMetadata, isValidSource } from "@/lib/sources";
|
||||
import { ConfluenceAccessibleResource, ValidSources } from "@/lib/types";
|
||||
import CardSection from "@/components/admin/CardSection";
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import React from "react";
|
||||
import NumberInput from "./ConnectorInput/NumberInput";
|
||||
import { TextFormField } from "@/components/Field";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import { SvgTrash } from "@opal/icons";
|
||||
export default function AdvancedFormPage() {
|
||||
return (
|
||||
@@ -35,7 +35,7 @@ export default function AdvancedFormPage() {
|
||||
name="indexingStart"
|
||||
/>
|
||||
<div className="mt-4 flex w-full mx-auto max-w-2xl justify-start">
|
||||
<Button leftIcon={SvgTrash} danger type="submit">
|
||||
<Button variant="danger" icon={SvgTrash} type="submit">
|
||||
Reset
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
@@ -10,7 +10,7 @@ import { DOCS_ADMINS_PATH } from "@/lib/constants";
|
||||
import { TextFormField, SectionHeader } from "@/components/Field";
|
||||
import { Form, Formik } from "formik";
|
||||
import { User } from "@/lib/types";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import {
|
||||
Credential,
|
||||
GoogleDriveCredentialJson,
|
||||
@@ -314,7 +314,7 @@ export const DriveJsonUploadSection = ({
|
||||
{isAdmin && !existingAuthCredential && (
|
||||
<div className="mt-2">
|
||||
<Button
|
||||
danger
|
||||
variant="danger"
|
||||
onClick={async () => {
|
||||
const endpoint =
|
||||
localServiceAccountData?.service_account_email
|
||||
@@ -467,7 +467,7 @@ export const DriveAuthSection = ({
|
||||
</div>
|
||||
</div>
|
||||
<Button
|
||||
danger
|
||||
variant="danger"
|
||||
onClick={async () => {
|
||||
handleRevokeAccess(
|
||||
connectorAssociated,
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import { toast } from "@/hooks/useToast";
|
||||
import React, { useState, useEffect } from "react";
|
||||
import { useSWRConfig } from "swr";
|
||||
@@ -314,7 +314,7 @@ export const GmailJsonUploadSection = ({
|
||||
{isAdmin && !existingAuthCredential && (
|
||||
<div className="mt-2">
|
||||
<Button
|
||||
danger
|
||||
variant="danger"
|
||||
onClick={async () => {
|
||||
const endpoint =
|
||||
localServiceAccountData?.service_account_email
|
||||
@@ -470,7 +470,7 @@ export const GmailAuthSection = ({
|
||||
</div>
|
||||
<Section flexDirection="row" justifyContent="between" height="fit">
|
||||
<Button
|
||||
danger
|
||||
variant="danger"
|
||||
onClick={async () => {
|
||||
handleRevokeAccess(
|
||||
connectorExists,
|
||||
@@ -482,10 +482,7 @@ export const GmailAuthSection = ({
|
||||
Revoke Access
|
||||
</Button>
|
||||
{buildMode && onCredentialCreated && (
|
||||
<Button
|
||||
primary
|
||||
onClick={() => onCredentialCreated(existingCredential)}
|
||||
>
|
||||
<Button onClick={() => onCredentialCreated(existingCredential)}>
|
||||
Continue
|
||||
</Button>
|
||||
)}
|
||||
|
||||
@@ -11,7 +11,7 @@ import {
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import { Card } from "@/components/ui/card";
|
||||
import Text from "@/components/ui/text";
|
||||
import { Spinner } from "@/components/Spinner";
|
||||
@@ -99,9 +99,9 @@ function Main() {
|
||||
<TableCell className="font-medium">{category}</TableCell>
|
||||
<TableCell>
|
||||
<Button
|
||||
prominence="secondary"
|
||||
onClick={() => handleDownload(category)}
|
||||
secondary
|
||||
leftIcon={SvgDownloadCloud}
|
||||
icon={SvgDownloadCloud}
|
||||
>
|
||||
Download Logs
|
||||
</Button>
|
||||
|
||||
@@ -4,7 +4,7 @@ import { useState } from "react";
|
||||
import { Section } from "@/layouts/general-layouts";
|
||||
import Text from "@/refresh-components/texts/Text";
|
||||
import Card from "@/refresh-components/cards/Card";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import PasswordInputTypeIn from "@/refresh-components/inputs/PasswordInputTypeIn";
|
||||
import { ThreeDotsLoader } from "@/components/Loading";
|
||||
@@ -126,9 +126,9 @@ export function BotConfigCard() {
|
||||
disabled={!hasServerConfigs}
|
||||
>
|
||||
<Button
|
||||
variant="danger"
|
||||
onClick={() => setShowDeleteConfirm(true)}
|
||||
disabled={isSubmitting || hasServerConfigs}
|
||||
danger
|
||||
>
|
||||
Delete Discord Token
|
||||
</Button>
|
||||
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
} from "@/components/ui/table";
|
||||
import { Badge } from "@/components/ui/badge";
|
||||
import { DeleteButton } from "@/components/DeleteButton";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import Switch from "@/refresh-components/inputs/Switch";
|
||||
import { SvgEdit, SvgServer } from "@opal/icons";
|
||||
import EmptyMessage from "@/refresh-components/EmptyMessage";
|
||||
@@ -116,10 +116,10 @@ export function DiscordGuildsTable({ guilds, onRefresh }: Props) {
|
||||
<TableRow key={guild.id}>
|
||||
<TableCell>
|
||||
<Button
|
||||
internal
|
||||
prominence="internal"
|
||||
disabled={!guild.guild_id}
|
||||
onClick={() => router.push(`/admin/discord-bot/${guild.id}`)}
|
||||
leftIcon={SvgEdit}
|
||||
icon={SvgEdit}
|
||||
>
|
||||
{guild.guild_name || `Server #${guild.id}`}
|
||||
</Button>
|
||||
|
||||
@@ -12,7 +12,7 @@ import Text from "@/refresh-components/texts/Text";
|
||||
import Card from "@/refresh-components/cards/Card";
|
||||
import { Callout } from "@/components/ui/callout";
|
||||
import Message from "@/refresh-components/messages/Message";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import { SvgServer } from "@opal/icons";
|
||||
import InputSelect from "@/refresh-components/inputs/InputSelect";
|
||||
import {
|
||||
@@ -104,13 +104,17 @@ function GuildDetailContent({
|
||||
width="fit"
|
||||
gap={0.5}
|
||||
>
|
||||
<Button onClick={handleEnableAll} disabled={disabled} secondary>
|
||||
<Button
|
||||
prominence="secondary"
|
||||
onClick={handleEnableAll}
|
||||
disabled={disabled}
|
||||
>
|
||||
Enable All
|
||||
</Button>
|
||||
<Button
|
||||
prominence="secondary"
|
||||
onClick={handleDisableAll}
|
||||
disabled={disabled}
|
||||
secondary
|
||||
>
|
||||
Disable All
|
||||
</Button>
|
||||
|
||||
@@ -200,6 +200,7 @@ function RetrievalSourceSection() {
|
||||
</InputSelect>
|
||||
|
||||
{hasChanges && (
|
||||
// TODO(@raunakab): migrate to opal Button once className/iconClassName is resolved
|
||||
<Button
|
||||
className="self-center"
|
||||
onClick={handleUpdate}
|
||||
|
||||
@@ -257,6 +257,7 @@ export const DocumentSetCreationForm = ({
|
||||
</div>
|
||||
|
||||
<div className="flex mt-6 pt-4 border-t border-border-02">
|
||||
{/* TODO(@raunakab): migrate to opal Button once className/iconClassName is resolved */}
|
||||
<Button
|
||||
type="submit"
|
||||
disabled={props.isSubmitting}
|
||||
|
||||
@@ -10,9 +10,11 @@ import { ADMIN_ROUTE_CONFIG, ADMIN_PATHS } from "@/lib/admin-routes";
|
||||
import CardSection from "@/components/admin/CardSection";
|
||||
import { DocumentSetCreationForm } from "../DocumentSetCreationForm";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useVectorDbEnabled } from "@/providers/SettingsProvider";
|
||||
|
||||
function Main({ documentSetId }: { documentSetId: number }) {
|
||||
const router = useRouter();
|
||||
const vectorDbEnabled = useVectorDbEnabled();
|
||||
|
||||
const {
|
||||
data: documentSets,
|
||||
@@ -24,12 +26,16 @@ function Main({ documentSetId }: { documentSetId: number }) {
|
||||
data: ccPairs,
|
||||
isLoading: isCCPairsLoading,
|
||||
error: ccPairsError,
|
||||
} = useConnectorStatus();
|
||||
} = useConnectorStatus(30000, vectorDbEnabled);
|
||||
|
||||
// EE only
|
||||
const { data: userGroups, isLoading: userGroupsIsLoading } = useUserGroups();
|
||||
|
||||
if (isDocumentSetsLoading || isCCPairsLoading || userGroupsIsLoading) {
|
||||
if (
|
||||
isDocumentSetsLoading ||
|
||||
(vectorDbEnabled && isCCPairsLoading) ||
|
||||
userGroupsIsLoading
|
||||
) {
|
||||
return (
|
||||
<div className="flex justify-center items-center min-h-[400px]">
|
||||
<ThreeDotsLoader />
|
||||
@@ -46,7 +52,7 @@ function Main({ documentSetId }: { documentSetId: number }) {
|
||||
);
|
||||
}
|
||||
|
||||
if (ccPairsError || !ccPairs) {
|
||||
if (vectorDbEnabled && (ccPairsError || !ccPairs)) {
|
||||
return (
|
||||
<ErrorCallout
|
||||
errorTitle="Failed to fetch Connectors"
|
||||
@@ -70,7 +76,7 @@ function Main({ documentSetId }: { documentSetId: number }) {
|
||||
return (
|
||||
<CardSection>
|
||||
<DocumentSetCreationForm
|
||||
ccPairs={ccPairs}
|
||||
ccPairs={ccPairs ?? []}
|
||||
userGroups={userGroups}
|
||||
onClose={() => {
|
||||
refreshDocumentSets();
|
||||
|
||||
@@ -9,20 +9,22 @@ import { ErrorCallout } from "@/components/ErrorCallout";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { refreshDocumentSets } from "../hooks";
|
||||
import CardSection from "@/components/admin/CardSection";
|
||||
import { useVectorDbEnabled } from "@/providers/SettingsProvider";
|
||||
|
||||
function Main() {
|
||||
const router = useRouter();
|
||||
const vectorDbEnabled = useVectorDbEnabled();
|
||||
|
||||
const {
|
||||
data: ccPairs,
|
||||
isLoading: isCCPairsLoading,
|
||||
error: ccPairsError,
|
||||
} = useConnectorStatus();
|
||||
} = useConnectorStatus(30000, vectorDbEnabled);
|
||||
|
||||
// EE only
|
||||
const { data: userGroups, isLoading: userGroupsIsLoading } = useUserGroups();
|
||||
|
||||
if (isCCPairsLoading || userGroupsIsLoading) {
|
||||
if ((vectorDbEnabled && isCCPairsLoading) || userGroupsIsLoading) {
|
||||
return (
|
||||
<div className="flex justify-center items-center min-h-[400px]">
|
||||
<ThreeDotsLoader />
|
||||
@@ -30,7 +32,7 @@ function Main() {
|
||||
);
|
||||
}
|
||||
|
||||
if (ccPairsError || !ccPairs) {
|
||||
if (vectorDbEnabled && (ccPairsError || !ccPairs)) {
|
||||
return (
|
||||
<ErrorCallout
|
||||
errorTitle="Failed to fetch Connectors"
|
||||
@@ -43,7 +45,7 @@ function Main() {
|
||||
<>
|
||||
<CardSection>
|
||||
<DocumentSetCreationForm
|
||||
ccPairs={ccPairs}
|
||||
ccPairs={ccPairs ?? []}
|
||||
userGroups={userGroups}
|
||||
onClose={() => {
|
||||
refreshDocumentSets();
|
||||
|
||||
@@ -27,7 +27,7 @@ import {
|
||||
EMBEDDING_PROVIDERS_ADMIN_URL,
|
||||
} from "@/lib/llmConfig/constants";
|
||||
import { AdvancedSearchConfiguration } from "@/app/admin/embeddings/interfaces";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
|
||||
export interface EmbeddingDetails {
|
||||
api_key?: string;
|
||||
@@ -279,7 +279,7 @@ export default function EmbeddingModelSelection({
|
||||
{currentEmbeddingModel?.provider_type && (
|
||||
<div className="mt-2">
|
||||
<Button
|
||||
secondary
|
||||
prominence="secondary"
|
||||
onClick={() => {
|
||||
const allProviders = [
|
||||
...AVAILABLE_CLOUD_PROVIDERS,
|
||||
|
||||
@@ -21,7 +21,7 @@ import {
|
||||
MixedBreadIcon,
|
||||
} from "@/components/icons/icons";
|
||||
import Modal from "@/refresh-components/Modal";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import { TextFormField } from "@/components/Field";
|
||||
import { SettingsContext } from "@/providers/SettingsProvider";
|
||||
import { SvgAlertTriangle, SvgKey } from "@opal/icons";
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import Modal from "@/refresh-components/Modal";
|
||||
import Button from "@/refresh-components/buttons/Button";
|
||||
import { Button } from "@opal/components";
|
||||
import { CloudEmbeddingModel } from "../../../../components/embedding/interfaces";
|
||||
import { SvgCheck } from "@opal/icons";
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user