mirror of
https://github.com/onyx-dot-app/onyx.git
synced 2026-02-17 07:45:47 +00:00
Compare commits
3 Commits
as_2
...
cloud_debu
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
09e6bd3c9c | ||
|
|
c1803cdd56 | ||
|
|
a5b9c76012 |
18
.github/pull_request_template.md
vendored
18
.github/pull_request_template.md
vendored
@@ -6,6 +6,24 @@
|
||||
[Describe the tests you ran to verify your changes]
|
||||
|
||||
|
||||
## Accepted Risk (provide if relevant)
|
||||
N/A
|
||||
|
||||
|
||||
## Related Issue(s) (provide if relevant)
|
||||
N/A
|
||||
|
||||
|
||||
## Mental Checklist:
|
||||
- All of the automated tests pass
|
||||
- All PR comments are addressed and marked resolved
|
||||
- If there are migrations, they have been rebased to latest main
|
||||
- If there are new dependencies, they are added to the requirements
|
||||
- If there are new environment variables, they are added to all of the deployment methods
|
||||
- If there are new APIs that don't require auth, they are added to PUBLIC_ENDPOINT_SPECS
|
||||
- Docker images build and basic functionalities work
|
||||
- Author has done a final read through of the PR right before merge
|
||||
|
||||
## Backporting (check the box to trigger backport action)
|
||||
Note: You have to check that the action passes, otherwise resolve the conflicts manually and tag the patches.
|
||||
- [ ] This PR should be backported (make sure to check that the backport attempt succeeds)
|
||||
|
||||
@@ -66,7 +66,6 @@ jobs:
|
||||
NEXT_PUBLIC_POSTHOG_HOST=${{ secrets.POSTHOG_HOST }}
|
||||
NEXT_PUBLIC_SENTRY_DSN=${{ secrets.SENTRY_DSN }}
|
||||
NEXT_PUBLIC_GTM_ENABLED=true
|
||||
NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED=true
|
||||
# needed due to weird interactions with the builds for different platforms
|
||||
no-cache: true
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
16
README.md
16
README.md
@@ -3,7 +3,7 @@
|
||||
<a name="readme-top"></a>
|
||||
|
||||
<h2 align="center">
|
||||
<a href="https://www.onyx.app/"> <img width="50%" src="https://github.com/onyx-dot-app/onyx/blob/logo/OnyxLogoCropped.jpg?raw=true)" /></a>
|
||||
<a href="https://www.onyx.app/"> <img width="50%" src="https://github.com/onyx-dot-app/onyx/blob/logo/LogoOnyx.png?raw=true)" /></a>
|
||||
</h2>
|
||||
|
||||
<p align="center">
|
||||
@@ -24,7 +24,7 @@
|
||||
</a>
|
||||
</p>
|
||||
|
||||
<strong>[Onyx](https://www.onyx.app/)</strong> (formerly Danswer) is the AI Assistant connected to your company's docs, apps, and people.
|
||||
<strong>[Onyx](https://www.onyx.app/)</strong> (Formerly Danswer) is the AI Assistant connected to your company's docs, apps, and people.
|
||||
Onyx provides a Chat interface and plugs into any LLM of your choice. Onyx can be deployed anywhere and for any
|
||||
scale - on a laptop, on-premise, or to cloud. Since you own the deployment, your user data and chats are fully in your
|
||||
own control. Onyx is dual Licensed with most of it under MIT license and designed to be modular and easily extensible. The system also comes fully ready
|
||||
@@ -133,3 +133,15 @@ Looking to contribute? Please check out the [Contribution Guide](CONTRIBUTING.md
|
||||
## ⭐Star History
|
||||
|
||||
[](https://star-history.com/#onyx-dot-app/onyx&Date)
|
||||
|
||||
## ✨Contributors
|
||||
|
||||
<a href="https://github.com/onyx-dot-app/onyx/graphs/contributors">
|
||||
<img alt="contributors" src="https://contrib.rocks/image?repo=onyx-dot-app/onyx"/>
|
||||
</a>
|
||||
|
||||
<p align="right" style="font-size: 14px; color: #555; margin-top: 20px;">
|
||||
<a href="#readme-top" style="text-decoration: none; color: #007bff; font-weight: bold;">
|
||||
↑ Back to Top ↑
|
||||
</a>
|
||||
</p>
|
||||
|
||||
@@ -122,7 +122,7 @@ def _cleanup_document_set__user_group_relationships__no_commit(
|
||||
)
|
||||
|
||||
|
||||
def validate_object_creation_for_user(
|
||||
def validate_user_creation_permissions(
|
||||
db_session: Session,
|
||||
user: User | None,
|
||||
target_group_ids: list[int] | None = None,
|
||||
@@ -440,108 +440,32 @@ def remove_curator_status__no_commit(db_session: Session, user: User) -> None:
|
||||
_validate_curator_status__no_commit(db_session, [user])
|
||||
|
||||
|
||||
def _validate_curator_relationship_update_requester(
|
||||
db_session: Session,
|
||||
user_group_id: int,
|
||||
user_making_change: User | None = None,
|
||||
) -> None:
|
||||
"""
|
||||
This function validates that the user making the change has the necessary permissions
|
||||
to update the curator relationship for the target user in the given user group.
|
||||
"""
|
||||
|
||||
if user_making_change is None or user_making_change.role == UserRole.ADMIN:
|
||||
return
|
||||
|
||||
# check if the user making the change is a curator in the group they are changing the curator relationship for
|
||||
user_making_change_curator_groups = fetch_user_groups_for_user(
|
||||
db_session=db_session,
|
||||
user_id=user_making_change.id,
|
||||
# only check if the user making the change is a curator if they are a curator
|
||||
# otherwise, they are a global_curator and can update the curator relationship
|
||||
# for any group they are a member of
|
||||
only_curator_groups=user_making_change.role == UserRole.CURATOR,
|
||||
)
|
||||
requestor_curator_group_ids = [
|
||||
group.id for group in user_making_change_curator_groups
|
||||
]
|
||||
if user_group_id not in requestor_curator_group_ids:
|
||||
raise ValueError(
|
||||
f"user making change {user_making_change.email} is not a curator,"
|
||||
f" admin, or global_curator for group '{user_group_id}'"
|
||||
)
|
||||
|
||||
|
||||
def _validate_curator_relationship_update_request(
|
||||
db_session: Session,
|
||||
user_group_id: int,
|
||||
target_user: User,
|
||||
) -> None:
|
||||
"""
|
||||
This function validates that the curator_relationship_update request itself is valid.
|
||||
"""
|
||||
if target_user.role == UserRole.ADMIN:
|
||||
raise ValueError(
|
||||
f"User '{target_user.email}' is an admin and therefore has all permissions "
|
||||
"of a curator. If you'd like this user to only have curator permissions, "
|
||||
"you must update their role to BASIC then assign them to be CURATOR in the "
|
||||
"appropriate groups."
|
||||
)
|
||||
elif target_user.role == UserRole.GLOBAL_CURATOR:
|
||||
raise ValueError(
|
||||
f"User '{target_user.email}' is a global_curator and therefore has all "
|
||||
"permissions of a curator for all groups. If you'd like this user to only "
|
||||
"have curator permissions for a specific group, you must update their role "
|
||||
"to BASIC then assign them to be CURATOR in the appropriate groups."
|
||||
)
|
||||
elif target_user.role not in [UserRole.CURATOR, UserRole.BASIC]:
|
||||
raise ValueError(
|
||||
f"This endpoint can only be used to update the curator relationship for "
|
||||
"users with the CURATOR or BASIC role. \n"
|
||||
f"Target user: {target_user.email} \n"
|
||||
f"Target user role: {target_user.role} \n"
|
||||
)
|
||||
|
||||
# check if the target user is in the group they are changing the curator relationship for
|
||||
requested_user_groups = fetch_user_groups_for_user(
|
||||
db_session=db_session,
|
||||
user_id=target_user.id,
|
||||
only_curator_groups=False,
|
||||
)
|
||||
group_ids = [group.id for group in requested_user_groups]
|
||||
if user_group_id not in group_ids:
|
||||
raise ValueError(
|
||||
f"target user {target_user.email} is not in group '{user_group_id}'"
|
||||
)
|
||||
|
||||
|
||||
def update_user_curator_relationship(
|
||||
db_session: Session,
|
||||
user_group_id: int,
|
||||
set_curator_request: SetCuratorRequest,
|
||||
user_making_change: User | None = None,
|
||||
) -> None:
|
||||
target_user = fetch_user_by_id(db_session, set_curator_request.user_id)
|
||||
if not target_user:
|
||||
user = fetch_user_by_id(db_session, set_curator_request.user_id)
|
||||
if not user:
|
||||
raise ValueError(f"User with id '{set_curator_request.user_id}' not found")
|
||||
|
||||
_validate_curator_relationship_update_request(
|
||||
if user.role == UserRole.ADMIN:
|
||||
raise ValueError(
|
||||
f"User '{user.email}' is an admin and therefore has all permissions "
|
||||
"of a curator. If you'd like this user to only have curator permissions, "
|
||||
"you must update their role to BASIC then assign them to be CURATOR in the "
|
||||
"appropriate groups."
|
||||
)
|
||||
|
||||
requested_user_groups = fetch_user_groups_for_user(
|
||||
db_session=db_session,
|
||||
user_group_id=user_group_id,
|
||||
target_user=target_user,
|
||||
user_id=set_curator_request.user_id,
|
||||
only_curator_groups=False,
|
||||
)
|
||||
|
||||
_validate_curator_relationship_update_requester(
|
||||
db_session=db_session,
|
||||
user_group_id=user_group_id,
|
||||
user_making_change=user_making_change,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"user_making_change={user_making_change.email if user_making_change else 'None'} is "
|
||||
f"updating the curator relationship for user={target_user.email} "
|
||||
f"in group={user_group_id} to is_curator={set_curator_request.is_curator}"
|
||||
)
|
||||
group_ids = [group.id for group in requested_user_groups]
|
||||
if user_group_id not in group_ids:
|
||||
raise ValueError(f"user is not in group '{user_group_id}'")
|
||||
|
||||
relationship_to_update = (
|
||||
db_session.query(User__UserGroup)
|
||||
@@ -562,7 +486,7 @@ def update_user_curator_relationship(
|
||||
)
|
||||
db_session.add(relationship_to_update)
|
||||
|
||||
_validate_curator_status__no_commit(db_session, [target_user])
|
||||
_validate_curator_status__no_commit(db_session, [user])
|
||||
db_session.commit()
|
||||
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@ from fastapi import APIRouter
|
||||
from fastapi import Depends
|
||||
from fastapi import HTTPException
|
||||
from fastapi import Response
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ee.onyx.auth.users import current_cloud_superuser
|
||||
from ee.onyx.configs.app_configs import STRIPE_SECRET_KEY
|
||||
@@ -13,23 +12,15 @@ from ee.onyx.server.tenants.billing import fetch_tenant_stripe_information
|
||||
from ee.onyx.server.tenants.models import BillingInformation
|
||||
from ee.onyx.server.tenants.models import ImpersonateRequest
|
||||
from ee.onyx.server.tenants.models import ProductGatingRequest
|
||||
from ee.onyx.server.tenants.provisioning import delete_user_from_control_plane
|
||||
from ee.onyx.server.tenants.user_mapping import get_tenant_id_for_email
|
||||
from ee.onyx.server.tenants.user_mapping import remove_all_users_from_tenant
|
||||
from ee.onyx.server.tenants.user_mapping import remove_users_from_tenant
|
||||
from onyx.auth.users import auth_backend
|
||||
from onyx.auth.users import current_admin_user
|
||||
from onyx.auth.users import get_jwt_strategy
|
||||
from onyx.auth.users import User
|
||||
from onyx.configs.app_configs import WEB_DOMAIN
|
||||
from onyx.db.auth import get_user_count
|
||||
from onyx.db.engine import get_current_tenant_id
|
||||
from onyx.db.engine import get_session
|
||||
from onyx.db.engine import get_session_with_tenant
|
||||
from onyx.db.notification import create_notification
|
||||
from onyx.db.users import delete_user_from_db
|
||||
from onyx.db.users import get_user_by_email
|
||||
from onyx.server.manage.models import UserByEmail
|
||||
from onyx.server.settings.store import load_settings
|
||||
from onyx.server.settings.store import store_settings
|
||||
from onyx.utils.logger import setup_logger
|
||||
@@ -123,48 +114,3 @@ async def impersonate_user(
|
||||
samesite="lax",
|
||||
)
|
||||
return response
|
||||
|
||||
|
||||
@router.post("/leave-organization")
|
||||
async def leave_organization(
|
||||
user_email: UserByEmail,
|
||||
current_user: User | None = Depends(current_admin_user),
|
||||
db_session: Session = Depends(get_session),
|
||||
tenant_id: str = Depends(get_current_tenant_id),
|
||||
) -> None:
|
||||
if current_user is None or current_user.email != user_email.user_email:
|
||||
raise HTTPException(
|
||||
status_code=403, detail="You can only leave the organization as yourself"
|
||||
)
|
||||
|
||||
user_to_delete = get_user_by_email(user_email.user_email, db_session)
|
||||
if user_to_delete is None:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
|
||||
num_admin_users = await get_user_count(only_admin_users=True)
|
||||
|
||||
should_delete_tenant = num_admin_users == 1
|
||||
|
||||
if should_delete_tenant:
|
||||
logger.info(
|
||||
"Last admin user is leaving the organization. Deleting tenant from control plane."
|
||||
)
|
||||
try:
|
||||
await delete_user_from_control_plane(tenant_id, user_to_delete.email)
|
||||
logger.debug("User deleted from control plane")
|
||||
except Exception as e:
|
||||
logger.exception(
|
||||
f"Failed to delete user from control plane for tenant {tenant_id}: {e}"
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to remove user from control plane: {str(e)}",
|
||||
)
|
||||
|
||||
db_session.expunge(user_to_delete)
|
||||
delete_user_from_db(user_to_delete, db_session)
|
||||
|
||||
if should_delete_tenant:
|
||||
remove_all_users_from_tenant(tenant_id)
|
||||
else:
|
||||
remove_users_from_tenant([user_to_delete.email], tenant_id)
|
||||
|
||||
@@ -39,8 +39,3 @@ class TenantCreationPayload(BaseModel):
|
||||
tenant_id: str
|
||||
email: str
|
||||
referral_source: str | None = None
|
||||
|
||||
|
||||
class TenantDeletionPayload(BaseModel):
|
||||
tenant_id: str
|
||||
email: str
|
||||
|
||||
@@ -15,7 +15,6 @@ from ee.onyx.configs.app_configs import HUBSPOT_TRACKING_URL
|
||||
from ee.onyx.configs.app_configs import OPENAI_DEFAULT_API_KEY
|
||||
from ee.onyx.server.tenants.access import generate_data_plane_token
|
||||
from ee.onyx.server.tenants.models import TenantCreationPayload
|
||||
from ee.onyx.server.tenants.models import TenantDeletionPayload
|
||||
from ee.onyx.server.tenants.schema_management import create_schema_if_not_exists
|
||||
from ee.onyx.server.tenants.schema_management import drop_schema
|
||||
from ee.onyx.server.tenants.schema_management import run_alembic_migrations
|
||||
@@ -186,7 +185,6 @@ async def rollback_tenant_provisioning(tenant_id: str) -> None:
|
||||
try:
|
||||
# Drop the tenant's schema to rollback provisioning
|
||||
drop_schema(tenant_id)
|
||||
|
||||
# Remove tenant mapping
|
||||
with Session(get_sqlalchemy_engine()) as db_session:
|
||||
db_session.query(UserTenantMapping).filter(
|
||||
@@ -322,26 +320,3 @@ async def submit_to_hubspot(
|
||||
|
||||
if response.status_code != 200:
|
||||
logger.error(f"Failed to submit to HubSpot: {response.text}")
|
||||
|
||||
|
||||
async def delete_user_from_control_plane(tenant_id: str, email: str) -> None:
|
||||
token = generate_data_plane_token()
|
||||
headers = {
|
||||
"Authorization": f"Bearer {token}",
|
||||
"Content-Type": "application/json",
|
||||
}
|
||||
payload = TenantDeletionPayload(tenant_id=tenant_id, email=email)
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.delete(
|
||||
f"{CONTROL_PLANE_API_BASE_URL}/tenants/delete",
|
||||
headers=headers,
|
||||
json=payload.model_dump(),
|
||||
) as response:
|
||||
print(response)
|
||||
if response.status != 200:
|
||||
error_text = await response.text()
|
||||
logger.error(f"Control plane tenant creation failed: {error_text}")
|
||||
raise Exception(
|
||||
f"Failed to delete tenant on control plane: {error_text}"
|
||||
)
|
||||
|
||||
@@ -68,11 +68,3 @@ def remove_users_from_tenant(emails: list[str], tenant_id: str) -> None:
|
||||
f"Failed to remove users from tenant {tenant_id}: {str(e)}"
|
||||
)
|
||||
db_session.rollback()
|
||||
|
||||
|
||||
def remove_all_users_from_tenant(tenant_id: str) -> None:
|
||||
with get_session_with_tenant(POSTGRES_DEFAULT_SCHEMA) as db_session:
|
||||
db_session.query(UserTenantMapping).filter(
|
||||
UserTenantMapping.tenant_id == tenant_id
|
||||
).delete()
|
||||
db_session.commit()
|
||||
|
||||
@@ -83,7 +83,7 @@ def patch_user_group(
|
||||
def set_user_curator(
|
||||
user_group_id: int,
|
||||
set_curator_request: SetCuratorRequest,
|
||||
user: User | None = Depends(current_curator_or_admin_user),
|
||||
_: User | None = Depends(current_admin_user),
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> None:
|
||||
try:
|
||||
@@ -91,7 +91,6 @@ def set_user_curator(
|
||||
db_session=db_session,
|
||||
user_group_id=user_group_id,
|
||||
set_curator_request=set_curator_request,
|
||||
user_making_change=user,
|
||||
)
|
||||
except ValueError as e:
|
||||
logger.error(f"Error setting user curator: {e}")
|
||||
|
||||
@@ -10,7 +10,6 @@ logger = setup_logger()
|
||||
|
||||
|
||||
def posthog_on_error(error: Any, items: Any) -> None:
|
||||
"""Log any PostHog delivery errors."""
|
||||
logger.error(f"PostHog error: {error}, items: {items}")
|
||||
|
||||
|
||||
@@ -25,10 +24,15 @@ posthog = Posthog(
|
||||
def event_telemetry(
|
||||
distinct_id: str, event: str, properties: dict | None = None
|
||||
) -> None:
|
||||
"""Capture and send an event to PostHog, flushing immediately."""
|
||||
logger.info(f"Capturing PostHog event: {distinct_id} {event} {properties}")
|
||||
logger.info(f"Capturing Posthog event: {distinct_id} {event} {properties}")
|
||||
print("API KEY", POSTHOG_API_KEY)
|
||||
print("HOST", POSTHOG_HOST)
|
||||
try:
|
||||
posthog.capture(distinct_id, event, properties)
|
||||
print(type(distinct_id))
|
||||
print(type(event))
|
||||
print(type(properties))
|
||||
response = posthog.capture(distinct_id, event, properties)
|
||||
posthog.flush()
|
||||
print(response)
|
||||
except Exception as e:
|
||||
logger.error(f"Error capturing PostHog event: {e}")
|
||||
logger.error(f"Error capturing Posthog event: {e}")
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
import smtplib
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from textwrap import dedent
|
||||
|
||||
from onyx.configs.app_configs import EMAIL_CONFIGURED
|
||||
from onyx.configs.app_configs import EMAIL_FROM
|
||||
from onyx.configs.app_configs import SMTP_PASS
|
||||
from onyx.configs.app_configs import SMTP_PORT
|
||||
from onyx.configs.app_configs import SMTP_SERVER
|
||||
from onyx.configs.app_configs import SMTP_USER
|
||||
from onyx.configs.app_configs import WEB_DOMAIN
|
||||
from onyx.db.models import User
|
||||
|
||||
|
||||
def send_email(
|
||||
user_email: str,
|
||||
subject: str,
|
||||
body: str,
|
||||
mail_from: str = EMAIL_FROM,
|
||||
) -> None:
|
||||
if not EMAIL_CONFIGURED:
|
||||
raise ValueError("Email is not configured.")
|
||||
|
||||
msg = MIMEMultipart()
|
||||
msg["Subject"] = subject
|
||||
msg["To"] = user_email
|
||||
if mail_from:
|
||||
msg["From"] = mail_from
|
||||
|
||||
msg.attach(MIMEText(body))
|
||||
|
||||
try:
|
||||
with smtplib.SMTP(SMTP_SERVER, SMTP_PORT) as s:
|
||||
s.starttls()
|
||||
s.login(SMTP_USER, SMTP_PASS)
|
||||
s.send_message(msg)
|
||||
except Exception as e:
|
||||
raise e
|
||||
|
||||
|
||||
def send_user_email_invite(user_email: str, current_user: User) -> None:
|
||||
subject = "Invitation to Join Onyx Workspace"
|
||||
body = dedent(
|
||||
f"""\
|
||||
Hello,
|
||||
|
||||
You have been invited to join a workspace on Onyx.
|
||||
|
||||
To join the workspace, please visit the following link:
|
||||
|
||||
{WEB_DOMAIN}/auth/login
|
||||
|
||||
Best regards,
|
||||
The Onyx Team
|
||||
"""
|
||||
)
|
||||
send_email(user_email, subject, body, current_user.email)
|
||||
|
||||
|
||||
def send_forgot_password_email(
|
||||
user_email: str,
|
||||
token: str,
|
||||
mail_from: str = EMAIL_FROM,
|
||||
) -> None:
|
||||
subject = "Onyx Forgot Password"
|
||||
link = f"{WEB_DOMAIN}/auth/reset-password?token={token}"
|
||||
body = f"Click the following link to reset your password: {link}"
|
||||
send_email(user_email, subject, body, mail_from)
|
||||
|
||||
|
||||
def send_user_verification_email(
|
||||
user_email: str,
|
||||
token: str,
|
||||
mail_from: str = EMAIL_FROM,
|
||||
) -> None:
|
||||
subject = "Onyx Email Verification"
|
||||
link = f"{WEB_DOMAIN}/auth/verify-email?token={token}"
|
||||
body = f"Click the following link to verify your email address: {link}"
|
||||
send_email(user_email, subject, body, mail_from)
|
||||
@@ -1,7 +1,10 @@
|
||||
import smtplib
|
||||
import uuid
|
||||
from collections.abc import AsyncGenerator
|
||||
from datetime import datetime
|
||||
from datetime import timezone
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from typing import cast
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
@@ -50,17 +53,19 @@ from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from onyx.auth.api_key import get_hashed_api_key_from_request
|
||||
from onyx.auth.email_utils import send_forgot_password_email
|
||||
from onyx.auth.email_utils import send_user_verification_email
|
||||
from onyx.auth.invited_users import get_invited_users
|
||||
from onyx.auth.schemas import UserCreate
|
||||
from onyx.auth.schemas import UserRole
|
||||
from onyx.auth.schemas import UserUpdate
|
||||
from onyx.configs.app_configs import AUTH_TYPE
|
||||
from onyx.configs.app_configs import DISABLE_AUTH
|
||||
from onyx.configs.app_configs import EMAIL_CONFIGURED
|
||||
from onyx.configs.app_configs import EMAIL_FROM
|
||||
from onyx.configs.app_configs import REQUIRE_EMAIL_VERIFICATION
|
||||
from onyx.configs.app_configs import SESSION_EXPIRE_TIME_SECONDS
|
||||
from onyx.configs.app_configs import SMTP_PASS
|
||||
from onyx.configs.app_configs import SMTP_PORT
|
||||
from onyx.configs.app_configs import SMTP_SERVER
|
||||
from onyx.configs.app_configs import SMTP_USER
|
||||
from onyx.configs.app_configs import TRACK_EXTERNAL_IDP_EXPIRY
|
||||
from onyx.configs.app_configs import USER_AUTH_SECRET
|
||||
from onyx.configs.app_configs import VALID_EMAIL_DOMAINS
|
||||
@@ -188,6 +193,30 @@ def verify_email_domain(email: str) -> None:
|
||||
)
|
||||
|
||||
|
||||
def send_user_verification_email(
|
||||
user_email: str,
|
||||
token: str,
|
||||
mail_from: str = EMAIL_FROM,
|
||||
) -> None:
|
||||
msg = MIMEMultipart()
|
||||
msg["Subject"] = "Onyx Email Verification"
|
||||
msg["To"] = user_email
|
||||
if mail_from:
|
||||
msg["From"] = mail_from
|
||||
|
||||
link = f"{WEB_DOMAIN}/auth/verify-email?token={token}"
|
||||
|
||||
body = MIMEText(f"Click the following link to verify your email address: {link}")
|
||||
msg.attach(body)
|
||||
|
||||
with smtplib.SMTP(SMTP_SERVER, SMTP_PORT) as s:
|
||||
s.starttls()
|
||||
# If credentials fails with gmail, check (You need an app password, not just the basic email password)
|
||||
# https://support.google.com/accounts/answer/185833?sjid=8512343437447396151-NA
|
||||
s.login(SMTP_USER, SMTP_PASS)
|
||||
s.send_message(msg)
|
||||
|
||||
|
||||
class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
|
||||
reset_password_token_secret = USER_AUTH_SECRET
|
||||
verification_token_secret = USER_AUTH_SECRET
|
||||
@@ -477,15 +506,7 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
|
||||
async def on_after_forgot_password(
|
||||
self, user: User, token: str, request: Optional[Request] = None
|
||||
) -> None:
|
||||
if not EMAIL_CONFIGURED:
|
||||
logger.error(
|
||||
"Email is not configured. Please configure email in the admin panel"
|
||||
)
|
||||
raise HTTPException(
|
||||
status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
"Your admin has not enbaled this feature.",
|
||||
)
|
||||
send_forgot_password_email(user.email, token)
|
||||
logger.notice(f"User {user.id} has forgot their password. Reset token: {token}")
|
||||
|
||||
async def on_after_request_verify(
|
||||
self, user: User, token: str, request: Optional[Request] = None
|
||||
@@ -603,7 +624,9 @@ def get_database_strategy(
|
||||
|
||||
|
||||
auth_backend = AuthenticationBackend(
|
||||
name="jwt", transport=cookie_transport, get_strategy=get_jwt_strategy
|
||||
name="jwt" if MULTI_TENANT else "database",
|
||||
transport=cookie_transport,
|
||||
get_strategy=get_jwt_strategy if MULTI_TENANT else get_database_strategy, # type: ignore
|
||||
) # type: ignore
|
||||
|
||||
|
||||
|
||||
@@ -3,54 +3,12 @@ import json
|
||||
from typing import Any
|
||||
from typing import cast
|
||||
|
||||
from celery import Celery
|
||||
from redis import Redis
|
||||
|
||||
from onyx.background.celery.configs.base import CELERY_SEPARATOR
|
||||
from onyx.configs.constants import OnyxCeleryPriority
|
||||
|
||||
|
||||
def celery_get_unacked_length(r: Redis) -> int:
|
||||
"""Checking the unacked queue is useful because a non-zero length tells us there
|
||||
may be prefetched tasks.
|
||||
|
||||
There can be other tasks in here besides indexing tasks, so this is mostly useful
|
||||
just to see if the task count is non zero.
|
||||
|
||||
ref: https://blog.hikaru.run/2022/08/29/get-waiting-tasks-count-in-celery.html
|
||||
"""
|
||||
length = cast(int, r.hlen("unacked"))
|
||||
return length
|
||||
|
||||
|
||||
def celery_get_unacked_task_ids(queue: str, r: Redis) -> set[str]:
|
||||
"""Gets the set of task id's matching the given queue in the unacked hash.
|
||||
|
||||
Unacked entries belonging to the indexing queue are "prefetched", so this gives
|
||||
us crucial visibility as to what tasks are in that state.
|
||||
"""
|
||||
tasks: set[str] = set()
|
||||
|
||||
for _, v in r.hscan_iter("unacked"):
|
||||
v_bytes = cast(bytes, v)
|
||||
v_str = v_bytes.decode("utf-8")
|
||||
task = json.loads(v_str)
|
||||
|
||||
task_description = task[0]
|
||||
task_queue = task[2]
|
||||
|
||||
if task_queue != queue:
|
||||
continue
|
||||
|
||||
task_id = task_description.get("headers", {}).get("id")
|
||||
if not task_id:
|
||||
continue
|
||||
|
||||
# if the queue matches and we see the task_id, add it
|
||||
tasks.add(task_id)
|
||||
return tasks
|
||||
|
||||
|
||||
def celery_get_queue_length(queue: str, r: Redis) -> int:
|
||||
"""This is a redis specific way to get the length of a celery queue.
|
||||
It is priority aware and knows how to count across the multiple redis lists
|
||||
@@ -89,74 +47,3 @@ def celery_find_task(task_id: str, queue: str, r: Redis) -> int:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def celery_inspect_get_workers(name_filter: str | None, app: Celery) -> list[str]:
|
||||
"""Returns a list of current workers containing name_filter, or all workers if
|
||||
name_filter is None.
|
||||
|
||||
We've empirically discovered that the celery inspect API is potentially unstable
|
||||
and may hang or return empty results when celery is under load. Suggest using this
|
||||
more to debug and troubleshoot than in production code.
|
||||
"""
|
||||
worker_names: list[str] = []
|
||||
|
||||
# filter for and create an indexing specific inspect object
|
||||
inspect = app.control.inspect()
|
||||
workers: dict[str, Any] = inspect.ping() # type: ignore
|
||||
if workers:
|
||||
for worker_name in list(workers.keys()):
|
||||
# if the name filter not set, return all worker names
|
||||
if not name_filter:
|
||||
worker_names.append(worker_name)
|
||||
continue
|
||||
|
||||
# if the name filter is set, return only worker names that contain the name filter
|
||||
if name_filter not in worker_name:
|
||||
continue
|
||||
|
||||
worker_names.append(worker_name)
|
||||
|
||||
return worker_names
|
||||
|
||||
|
||||
def celery_inspect_get_reserved(worker_names: list[str], app: Celery) -> set[str]:
|
||||
"""Returns a list of reserved tasks on the specified workers.
|
||||
|
||||
We've empirically discovered that the celery inspect API is potentially unstable
|
||||
and may hang or return empty results when celery is under load. Suggest using this
|
||||
more to debug and troubleshoot than in production code.
|
||||
"""
|
||||
reserved_task_ids: set[str] = set()
|
||||
|
||||
inspect = app.control.inspect(destination=worker_names)
|
||||
|
||||
# get the list of reserved tasks
|
||||
reserved_tasks: dict[str, list] | None = inspect.reserved() # type: ignore
|
||||
if reserved_tasks:
|
||||
for _, task_list in reserved_tasks.items():
|
||||
for task in task_list:
|
||||
reserved_task_ids.add(task["id"])
|
||||
|
||||
return reserved_task_ids
|
||||
|
||||
|
||||
def celery_inspect_get_active(worker_names: list[str], app: Celery) -> set[str]:
|
||||
"""Returns a list of active tasks on the specified workers.
|
||||
|
||||
We've empirically discovered that the celery inspect API is potentially unstable
|
||||
and may hang or return empty results when celery is under load. Suggest using this
|
||||
more to debug and troubleshoot than in production code.
|
||||
"""
|
||||
active_task_ids: set[str] = set()
|
||||
|
||||
inspect = app.control.inspect(destination=worker_names)
|
||||
|
||||
# get the list of reserved tasks
|
||||
active_tasks: dict[str, list] | None = inspect.active() # type: ignore
|
||||
if active_tasks:
|
||||
for _, task_list in active_tasks.items():
|
||||
for task in task_list:
|
||||
active_task_ids.add(task["id"])
|
||||
|
||||
return active_task_ids
|
||||
|
||||
@@ -16,11 +16,6 @@ result_expires = shared_config.result_expires # 86400 seconds is the default
|
||||
task_default_priority = shared_config.task_default_priority
|
||||
task_acks_late = shared_config.task_acks_late
|
||||
|
||||
# Indexing worker specific ... this lets us track the transition to STARTED in redis
|
||||
# We don't currently rely on this but it has the potential to be useful and
|
||||
# indexing tasks are not high volume
|
||||
task_track_started = True
|
||||
|
||||
worker_concurrency = CELERY_WORKER_INDEXING_CONCURRENCY
|
||||
worker_pool = "threads"
|
||||
worker_prefetch_multiplier = 1
|
||||
|
||||
@@ -3,7 +3,7 @@ from datetime import datetime
|
||||
from datetime import timezone
|
||||
from http import HTTPStatus
|
||||
from time import sleep
|
||||
from typing import cast
|
||||
from typing import Any
|
||||
|
||||
import redis
|
||||
import sentry_sdk
|
||||
@@ -18,7 +18,6 @@ from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.background.celery.apps.app_base import task_logger
|
||||
from onyx.background.celery.celery_redis import celery_find_task
|
||||
from onyx.background.celery.celery_redis import celery_get_unacked_task_ids
|
||||
from onyx.background.indexing.job_client import SimpleJobClient
|
||||
from onyx.background.indexing.run_indexing import run_indexing_entrypoint
|
||||
from onyx.configs.app_configs import DISABLE_INDEX_UPDATE_ON_SWAP
|
||||
@@ -30,7 +29,6 @@ from onyx.configs.constants import OnyxCeleryPriority
|
||||
from onyx.configs.constants import OnyxCeleryQueues
|
||||
from onyx.configs.constants import OnyxCeleryTask
|
||||
from onyx.configs.constants import OnyxRedisLocks
|
||||
from onyx.configs.constants import OnyxRedisSignals
|
||||
from onyx.db.connector import mark_ccpair_with_indexing_trigger
|
||||
from onyx.db.connector_credential_pair import fetch_connector_credential_pairs
|
||||
from onyx.db.connector_credential_pair import get_connector_credential_pair_from_id
|
||||
@@ -101,37 +99,13 @@ class IndexingCallback(IndexingHeartbeatInterface):
|
||||
self.last_lock_reacquire = datetime.now(timezone.utc)
|
||||
except LockError:
|
||||
logger.exception(
|
||||
f"IndexingCallback - lock.reacquire exceptioned: "
|
||||
f"IndexingCallback - lock.reacquire exceptioned. "
|
||||
f"lock_timeout={self.redis_lock.timeout} "
|
||||
f"start={self.started} "
|
||||
f"last_tag={self.last_tag} "
|
||||
f"last_reacquired={self.last_lock_reacquire} "
|
||||
f"now={datetime.now(timezone.utc)}"
|
||||
)
|
||||
|
||||
# diagnostic logging for lock errors
|
||||
name = self.redis_lock.name
|
||||
ttl = self.redis_client.ttl(name)
|
||||
locked = self.redis_lock.locked()
|
||||
owned = self.redis_lock.owned()
|
||||
local_token: str | None = self.redis_lock.local.token # type: ignore
|
||||
|
||||
remote_token_raw = self.redis_client.get(self.redis_lock.name)
|
||||
if remote_token_raw:
|
||||
remote_token_bytes = cast(bytes, remote_token_raw)
|
||||
remote_token = remote_token_bytes.decode("utf-8")
|
||||
else:
|
||||
remote_token = None
|
||||
|
||||
logger.warning(
|
||||
f"IndexingCallback - lock diagnostics: "
|
||||
f"name={name} "
|
||||
f"locked={locked} "
|
||||
f"owned={owned} "
|
||||
f"local_token={local_token} "
|
||||
f"remote_token={remote_token} "
|
||||
f"ttl={ttl}"
|
||||
)
|
||||
raise
|
||||
|
||||
self.redis_client.incrby(self.generator_progress_key, amount)
|
||||
@@ -201,7 +175,7 @@ def check_for_indexing(self: Task, *, tenant_id: str | None) -> int | None:
|
||||
|
||||
# we need to use celery's redis client to access its redis data
|
||||
# (which lives on a different db number)
|
||||
redis_client_celery: Redis = self.app.broker_connection().channel().client # type: ignore
|
||||
# redis_client_celery: Redis = self.app.broker_connection().channel().client # type: ignore
|
||||
|
||||
lock_beat: RedisLock = redis_client.lock(
|
||||
OnyxRedisLocks.CHECK_INDEXING_BEAT_LOCK,
|
||||
@@ -344,19 +318,23 @@ def check_for_indexing(self: Task, *, tenant_id: str | None) -> int | None:
|
||||
attempt.id, db_session, failure_reason=failure_reason
|
||||
)
|
||||
|
||||
# we want to run this less frequently than the overall task
|
||||
if not redis_client.exists(OnyxRedisSignals.VALIDATE_INDEXING_FENCES):
|
||||
# clear any indexing fences that don't have associated celery tasks in progress
|
||||
# tasks can be in the queue in redis, in reserved tasks (prefetched by the worker),
|
||||
# or be currently executing
|
||||
try:
|
||||
validate_indexing_fences(
|
||||
tenant_id, self.app, redis_client, redis_client_celery, lock_beat
|
||||
)
|
||||
except Exception:
|
||||
task_logger.exception("Exception while validating indexing fences")
|
||||
# rkuo: The following code logically appears to work, but the celery inspect code may be unstable
|
||||
# turning off for the moment to see if it helps cloud stability
|
||||
|
||||
redis_client.set(OnyxRedisSignals.VALIDATE_INDEXING_FENCES, 1, ex=60)
|
||||
# we want to run this less frequently than the overall task
|
||||
# if not redis_client.exists(OnyxRedisSignals.VALIDATE_INDEXING_FENCES):
|
||||
# # clear any indexing fences that don't have associated celery tasks in progress
|
||||
# # tasks can be in the queue in redis, in reserved tasks (prefetched by the worker),
|
||||
# # or be currently executing
|
||||
# try:
|
||||
# task_logger.info("Validating indexing fences...")
|
||||
# validate_indexing_fences(
|
||||
# tenant_id, self.app, redis_client, redis_client_celery, lock_beat
|
||||
# )
|
||||
# except Exception:
|
||||
# task_logger.exception("Exception while validating indexing fences")
|
||||
|
||||
# redis_client.set(OnyxRedisSignals.VALIDATE_INDEXING_FENCES, 1, ex=60)
|
||||
|
||||
except SoftTimeLimitExceeded:
|
||||
task_logger.info(
|
||||
@@ -375,7 +353,7 @@ def check_for_indexing(self: Task, *, tenant_id: str | None) -> int | None:
|
||||
)
|
||||
|
||||
time_elapsed = time.monotonic() - time_start
|
||||
task_logger.debug(f"check_for_indexing finished: elapsed={time_elapsed:.2f}")
|
||||
task_logger.info(f"check_for_indexing finished: elapsed={time_elapsed:.2f}")
|
||||
return tasks_created
|
||||
|
||||
|
||||
@@ -386,9 +364,46 @@ def validate_indexing_fences(
|
||||
r_celery: Redis,
|
||||
lock_beat: RedisLock,
|
||||
) -> None:
|
||||
reserved_indexing_tasks = celery_get_unacked_task_ids(
|
||||
OnyxCeleryQueues.CONNECTOR_INDEXING, r_celery
|
||||
)
|
||||
reserved_indexing_tasks: set[str] = set()
|
||||
active_indexing_tasks: set[str] = set()
|
||||
indexing_worker_names: list[str] = []
|
||||
|
||||
# filter for and create an indexing specific inspect object
|
||||
inspect = celery_app.control.inspect()
|
||||
workers: dict[str, Any] = inspect.ping() # type: ignore
|
||||
if not workers:
|
||||
raise ValueError("No workers found!")
|
||||
|
||||
for worker_name in list(workers.keys()):
|
||||
if "indexing" in worker_name:
|
||||
indexing_worker_names.append(worker_name)
|
||||
|
||||
if len(indexing_worker_names) == 0:
|
||||
raise ValueError("No indexing workers found!")
|
||||
|
||||
inspect_indexing = celery_app.control.inspect(destination=indexing_worker_names)
|
||||
|
||||
# NOTE: each dict entry is a map of worker name to a list of tasks
|
||||
# we want sets for reserved task and active task id's to optimize
|
||||
# subsequent validation lookups
|
||||
|
||||
# get the list of reserved tasks
|
||||
reserved_tasks: dict[str, list] | None = inspect_indexing.reserved() # type: ignore
|
||||
if reserved_tasks is None:
|
||||
raise ValueError("inspect_indexing.reserved() returned None!")
|
||||
|
||||
for _, task_list in reserved_tasks.items():
|
||||
for task in task_list:
|
||||
reserved_indexing_tasks.add(task["id"])
|
||||
|
||||
# get the list of active tasks
|
||||
active_tasks: dict[str, list] | None = inspect_indexing.active() # type: ignore
|
||||
if active_tasks is None:
|
||||
raise ValueError("inspect_indexing.active() returned None!")
|
||||
|
||||
for _, task_list in active_tasks.items():
|
||||
for task in task_list:
|
||||
active_indexing_tasks.add(task["id"])
|
||||
|
||||
# validate all existing indexing jobs
|
||||
for key_bytes in r.scan_iter(RedisConnectorIndex.FENCE_PREFIX + "*"):
|
||||
@@ -398,6 +413,7 @@ def validate_indexing_fences(
|
||||
tenant_id,
|
||||
key_bytes,
|
||||
reserved_indexing_tasks,
|
||||
active_indexing_tasks,
|
||||
r_celery,
|
||||
db_session,
|
||||
)
|
||||
@@ -408,6 +424,7 @@ def validate_indexing_fence(
|
||||
tenant_id: str | None,
|
||||
key_bytes: bytes,
|
||||
reserved_tasks: set[str],
|
||||
active_tasks: set[str],
|
||||
r_celery: Redis,
|
||||
db_session: Session,
|
||||
) -> None:
|
||||
@@ -417,15 +434,11 @@ def validate_indexing_fence(
|
||||
gives the help.
|
||||
|
||||
How this works:
|
||||
1. This function renews the active signal with a 5 minute TTL under the following conditions
|
||||
1. Active signal is renewed with a 5 minute TTL
|
||||
1.1 When the fence is created
|
||||
1.2. When the task is seen in the redis queue
|
||||
1.3. When the task is seen in the reserved / prefetched list
|
||||
|
||||
2. Externally, the active signal is renewed when:
|
||||
2.1. The fence is created
|
||||
2.2. The indexing watchdog checks the spawned task.
|
||||
|
||||
3. The TTL allows us to get through the transitions on fence startup
|
||||
1.3. When the task is seen in the reserved or active list for a worker
|
||||
2. The TTL allows us to get through the transitions on fence startup
|
||||
and when the task starts executing.
|
||||
|
||||
More TTL clarification: it is seemingly impossible to exactly query Celery for
|
||||
@@ -453,8 +466,6 @@ def validate_indexing_fence(
|
||||
|
||||
redis_connector = RedisConnector(tenant_id, cc_pair_id)
|
||||
redis_connector_index = redis_connector.new_index(search_settings_id)
|
||||
|
||||
# check to see if the fence/payload exists
|
||||
if not redis_connector_index.fenced:
|
||||
return
|
||||
|
||||
@@ -490,24 +501,24 @@ def validate_indexing_fence(
|
||||
redis_connector_index.set_active()
|
||||
return
|
||||
|
||||
if payload.celery_task_id in active_tasks:
|
||||
# the celery task is active (aka currently executing)
|
||||
redis_connector_index.set_active()
|
||||
return
|
||||
|
||||
# we may want to enable this check if using the active task list somehow isn't good enough
|
||||
# if redis_connector_index.generator_locked():
|
||||
# logger.info(f"{payload.celery_task_id} is currently executing.")
|
||||
|
||||
# if we get here, we didn't find any direct indication that the associated celery tasks exist,
|
||||
# but they still might be there due to gaps in our ability to check states during transitions
|
||||
# Checking the active signal safeguards us against these transition periods
|
||||
# (which has a duration that allows us to bridge those gaps)
|
||||
# we didn't find any direct indication that associated celery tasks exist, but they still might be there
|
||||
# due to gaps in our ability to check states during transitions
|
||||
# Rely on the active signal (which has a duration that allows us to bridge those gaps)
|
||||
if redis_connector_index.active():
|
||||
return
|
||||
|
||||
# celery tasks don't exist and the active signal has expired, possibly due to a crash. Clean it up.
|
||||
logger.warning(
|
||||
f"validate_indexing_fence - Resetting fence because no associated celery tasks were found: "
|
||||
f"index_attempt={payload.index_attempt_id} "
|
||||
f"cc_pair={cc_pair_id} "
|
||||
f"search_settings={search_settings_id} "
|
||||
f"fence={fence_key}"
|
||||
f"validate_indexing_fence - Resetting fence because no associated celery tasks were found: fence={fence_key}"
|
||||
)
|
||||
if payload.index_attempt_id:
|
||||
try:
|
||||
@@ -784,52 +795,6 @@ def connector_indexing_proxy_task(
|
||||
while True:
|
||||
sleep(5)
|
||||
|
||||
# renew active signal
|
||||
redis_connector_index.set_active()
|
||||
|
||||
# if the job is done, clean up and break
|
||||
if job.done():
|
||||
if job.status == "error":
|
||||
ignore_exitcode = False
|
||||
|
||||
exit_code: int | None = None
|
||||
if job.process:
|
||||
exit_code = job.process.exitcode
|
||||
|
||||
# seeing odd behavior where spawned tasks usually return exit code 1 in the cloud,
|
||||
# even though logging clearly indicates that they completed successfully
|
||||
# to work around this, we ignore the job error state if the completion signal is OK
|
||||
status_int = redis_connector_index.get_completion()
|
||||
if status_int:
|
||||
status_enum = HTTPStatus(status_int)
|
||||
if status_enum == HTTPStatus.OK:
|
||||
ignore_exitcode = True
|
||||
|
||||
if ignore_exitcode:
|
||||
task_logger.warning(
|
||||
"Indexing watchdog - spawned task has non-zero exit code "
|
||||
"but completion signal is OK. Continuing...: "
|
||||
f"attempt={index_attempt_id} "
|
||||
f"tenant={tenant_id} "
|
||||
f"cc_pair={cc_pair_id} "
|
||||
f"search_settings={search_settings_id} "
|
||||
f"exit_code={exit_code}"
|
||||
)
|
||||
else:
|
||||
task_logger.error(
|
||||
"Indexing watchdog - spawned task exceptioned: "
|
||||
f"attempt={index_attempt_id} "
|
||||
f"tenant={tenant_id} "
|
||||
f"cc_pair={cc_pair_id} "
|
||||
f"search_settings={search_settings_id} "
|
||||
f"exit_code={exit_code} "
|
||||
f"error={job.exception()}"
|
||||
)
|
||||
|
||||
job.release()
|
||||
break
|
||||
|
||||
# if a termination signal is detected, clean up and break
|
||||
if self.request.id and redis_connector_index.terminating(self.request.id):
|
||||
task_logger.warning(
|
||||
"Indexing watchdog - termination signal detected: "
|
||||
@@ -856,33 +821,75 @@ def connector_indexing_proxy_task(
|
||||
f"search_settings={search_settings_id}"
|
||||
)
|
||||
|
||||
job.cancel()
|
||||
job.cancel()
|
||||
|
||||
break
|
||||
|
||||
# if the spawned task is still running, restart the check once again
|
||||
# if the index attempt is not in a finished status
|
||||
try:
|
||||
with get_session_with_tenant(tenant_id) as db_session:
|
||||
index_attempt = get_index_attempt(
|
||||
db_session=db_session, index_attempt_id=index_attempt_id
|
||||
if not job.done():
|
||||
# if the spawned task is still running, restart the check once again
|
||||
# if the index attempt is not in a finished status
|
||||
try:
|
||||
with get_session_with_tenant(tenant_id) as db_session:
|
||||
index_attempt = get_index_attempt(
|
||||
db_session=db_session, index_attempt_id=index_attempt_id
|
||||
)
|
||||
|
||||
if not index_attempt:
|
||||
continue
|
||||
|
||||
if not index_attempt.is_finished():
|
||||
continue
|
||||
except Exception:
|
||||
# if the DB exceptioned, just restart the check.
|
||||
# polling the index attempt status doesn't need to be strongly consistent
|
||||
logger.exception(
|
||||
"Indexing watchdog - transient exception looking up index attempt: "
|
||||
f"attempt={index_attempt_id} "
|
||||
f"tenant={tenant_id} "
|
||||
f"cc_pair={cc_pair_id} "
|
||||
f"search_settings={search_settings_id}"
|
||||
)
|
||||
continue
|
||||
|
||||
if job.status == "error":
|
||||
ignore_exitcode = False
|
||||
|
||||
exit_code: int | None = None
|
||||
if job.process:
|
||||
exit_code = job.process.exitcode
|
||||
|
||||
# seeing odd behavior where spawned tasks usually return exit code 1 in the cloud,
|
||||
# even though logging clearly indicates that they completed successfully
|
||||
# to work around this, we ignore the job error state if the completion signal is OK
|
||||
status_int = redis_connector_index.get_completion()
|
||||
if status_int:
|
||||
status_enum = HTTPStatus(status_int)
|
||||
if status_enum == HTTPStatus.OK:
|
||||
ignore_exitcode = True
|
||||
|
||||
if ignore_exitcode:
|
||||
task_logger.warning(
|
||||
"Indexing watchdog - spawned task has non-zero exit code "
|
||||
"but completion signal is OK. Continuing...: "
|
||||
f"attempt={index_attempt_id} "
|
||||
f"tenant={tenant_id} "
|
||||
f"cc_pair={cc_pair_id} "
|
||||
f"search_settings={search_settings_id} "
|
||||
f"exit_code={exit_code}"
|
||||
)
|
||||
else:
|
||||
task_logger.error(
|
||||
"Indexing watchdog - spawned task exceptioned: "
|
||||
f"attempt={index_attempt_id} "
|
||||
f"tenant={tenant_id} "
|
||||
f"cc_pair={cc_pair_id} "
|
||||
f"search_settings={search_settings_id} "
|
||||
f"exit_code={exit_code} "
|
||||
f"error={job.exception()}"
|
||||
)
|
||||
|
||||
if not index_attempt:
|
||||
continue
|
||||
|
||||
if not index_attempt.is_finished():
|
||||
continue
|
||||
except Exception:
|
||||
# if the DB exceptioned, just restart the check.
|
||||
# polling the index attempt status doesn't need to be strongly consistent
|
||||
logger.exception(
|
||||
"Indexing watchdog - transient exception looking up index attempt: "
|
||||
f"attempt={index_attempt_id} "
|
||||
f"tenant={tenant_id} "
|
||||
f"cc_pair={cc_pair_id} "
|
||||
f"search_settings={search_settings_id}"
|
||||
)
|
||||
continue
|
||||
job.release()
|
||||
break
|
||||
|
||||
task_logger.info(
|
||||
f"Indexing watchdog - finished: attempt={index_attempt_id} "
|
||||
|
||||
@@ -20,7 +20,6 @@ from tenacity import RetryError
|
||||
from onyx.access.access import get_access_for_document
|
||||
from onyx.background.celery.apps.app_base import task_logger
|
||||
from onyx.background.celery.celery_redis import celery_get_queue_length
|
||||
from onyx.background.celery.celery_redis import celery_get_unacked_task_ids
|
||||
from onyx.background.celery.tasks.shared.RetryDocumentIndex import RetryDocumentIndex
|
||||
from onyx.background.celery.tasks.shared.tasks import LIGHT_SOFT_TIME_LIMIT
|
||||
from onyx.background.celery.tasks.shared.tasks import LIGHT_TIME_LIMIT
|
||||
@@ -165,7 +164,7 @@ def check_for_vespa_sync_task(self: Task, *, tenant_id: str | None) -> None:
|
||||
lock_beat.release()
|
||||
|
||||
time_elapsed = time.monotonic() - time_start
|
||||
task_logger.debug(f"check_for_vespa_sync_task finished: elapsed={time_elapsed:.2f}")
|
||||
task_logger.info(f"check_for_vespa_sync_task finished: elapsed={time_elapsed:.2f}")
|
||||
return
|
||||
|
||||
|
||||
@@ -637,23 +636,15 @@ def monitor_ccpair_indexing_taskset(
|
||||
if not payload:
|
||||
return
|
||||
|
||||
elapsed_started_str = None
|
||||
if payload.started:
|
||||
elapsed_started = datetime.now(timezone.utc) - payload.started
|
||||
elapsed_started_str = f"{elapsed_started.total_seconds():.2f}"
|
||||
|
||||
elapsed_submitted = datetime.now(timezone.utc) - payload.submitted
|
||||
|
||||
progress = redis_connector_index.get_progress()
|
||||
if progress is not None:
|
||||
task_logger.info(
|
||||
f"Connector indexing progress: "
|
||||
f"attempt={payload.index_attempt_id} "
|
||||
f"cc_pair={cc_pair_id} "
|
||||
f"Connector indexing progress: cc_pair={cc_pair_id} "
|
||||
f"search_settings={search_settings_id} "
|
||||
f"progress={progress} "
|
||||
f"elapsed_submitted={elapsed_submitted.total_seconds():.2f} "
|
||||
f"elapsed_started={elapsed_started_str}"
|
||||
f"elapsed_submitted={elapsed_submitted.total_seconds():.2f}"
|
||||
)
|
||||
|
||||
if payload.index_attempt_id is None or payload.celery_task_id is None:
|
||||
@@ -724,14 +715,11 @@ def monitor_ccpair_indexing_taskset(
|
||||
status_enum = HTTPStatus(status_int)
|
||||
|
||||
task_logger.info(
|
||||
f"Connector indexing finished: "
|
||||
f"attempt={payload.index_attempt_id} "
|
||||
f"cc_pair={cc_pair_id} "
|
||||
f"Connector indexing finished: cc_pair={cc_pair_id} "
|
||||
f"search_settings={search_settings_id} "
|
||||
f"progress={progress} "
|
||||
f"status={status_enum.name} "
|
||||
f"elapsed_submitted={elapsed_submitted.total_seconds():.2f} "
|
||||
f"elapsed_started={elapsed_started_str}"
|
||||
f"elapsed_submitted={elapsed_submitted.total_seconds():.2f}"
|
||||
)
|
||||
|
||||
redis_connector_index.reset()
|
||||
@@ -778,34 +766,31 @@ def monitor_vespa_sync(self: Task, tenant_id: str | None) -> bool:
|
||||
OnyxCeleryQueues.CONNECTOR_DOC_PERMISSIONS_SYNC, r_celery
|
||||
)
|
||||
|
||||
prefetched = celery_get_unacked_task_ids(
|
||||
OnyxCeleryQueues.CONNECTOR_INDEXING, r_celery
|
||||
)
|
||||
|
||||
task_logger.info(
|
||||
f"Queue lengths: celery={n_celery} "
|
||||
f"indexing={n_indexing} "
|
||||
f"indexing_prefetched={len(prefetched)} "
|
||||
f"sync={n_sync} "
|
||||
f"deletion={n_deletion} "
|
||||
f"pruning={n_pruning} "
|
||||
f"permissions_sync={n_permissions_sync} "
|
||||
)
|
||||
|
||||
# scan and monitor activity to completion
|
||||
lock_beat.reacquire()
|
||||
if r.exists(RedisConnectorCredentialPair.get_fence_key()):
|
||||
monitor_connector_taskset(r)
|
||||
|
||||
lock_beat.reacquire()
|
||||
for key_bytes in r.scan_iter(RedisConnectorDelete.FENCE_PREFIX + "*"):
|
||||
lock_beat.reacquire()
|
||||
monitor_connector_deletion_taskset(tenant_id, key_bytes, r)
|
||||
|
||||
lock_beat.reacquire()
|
||||
for key_bytes in r.scan_iter(RedisDocumentSet.FENCE_PREFIX + "*"):
|
||||
lock_beat.reacquire()
|
||||
with get_session_with_tenant(tenant_id) as db_session:
|
||||
monitor_document_set_taskset(tenant_id, key_bytes, r, db_session)
|
||||
|
||||
lock_beat.reacquire()
|
||||
for key_bytes in r.scan_iter(RedisUserGroup.FENCE_PREFIX + "*"):
|
||||
lock_beat.reacquire()
|
||||
monitor_usergroup_taskset = fetch_versioned_implementation_with_fallback(
|
||||
@@ -816,21 +801,28 @@ def monitor_vespa_sync(self: Task, tenant_id: str | None) -> bool:
|
||||
with get_session_with_tenant(tenant_id) as db_session:
|
||||
monitor_usergroup_taskset(tenant_id, key_bytes, r, db_session)
|
||||
|
||||
lock_beat.reacquire()
|
||||
for key_bytes in r.scan_iter(RedisConnectorPrune.FENCE_PREFIX + "*"):
|
||||
lock_beat.reacquire()
|
||||
with get_session_with_tenant(tenant_id) as db_session:
|
||||
monitor_ccpair_pruning_taskset(tenant_id, key_bytes, r, db_session)
|
||||
|
||||
lock_beat.reacquire()
|
||||
for key_bytes in r.scan_iter(RedisConnectorIndex.FENCE_PREFIX + "*"):
|
||||
lock_beat.reacquire()
|
||||
with get_session_with_tenant(tenant_id) as db_session:
|
||||
monitor_ccpair_indexing_taskset(tenant_id, key_bytes, r, db_session)
|
||||
|
||||
lock_beat.reacquire()
|
||||
for key_bytes in r.scan_iter(RedisConnectorPermissionSync.FENCE_PREFIX + "*"):
|
||||
lock_beat.reacquire()
|
||||
with get_session_with_tenant(tenant_id) as db_session:
|
||||
monitor_ccpair_permissions_taskset(tenant_id, key_bytes, r, db_session)
|
||||
|
||||
# uncomment for debugging if needed
|
||||
# r_celery = celery_app.broker_connection().channel().client
|
||||
# length = celery_get_queue_length(OnyxCeleryQueues.VESPA_METADATA_SYNC, r_celery)
|
||||
# task_logger.warning(f"queue={OnyxCeleryQueues.VESPA_METADATA_SYNC} length={length}")
|
||||
except SoftTimeLimitExceeded:
|
||||
task_logger.info(
|
||||
"Soft time limit exceeded, task is being terminated gracefully."
|
||||
@@ -840,7 +832,7 @@ def monitor_vespa_sync(self: Task, tenant_id: str | None) -> bool:
|
||||
lock_beat.release()
|
||||
|
||||
time_elapsed = time.monotonic() - time_start
|
||||
task_logger.debug(f"monitor_vespa_sync finished: elapsed={time_elapsed:.2f}")
|
||||
task_logger.info(f"monitor_vespa_sync finished: elapsed={time_elapsed:.2f}")
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -92,7 +92,6 @@ SMTP_SERVER = os.environ.get("SMTP_SERVER") or "smtp.gmail.com"
|
||||
SMTP_PORT = int(os.environ.get("SMTP_PORT") or "587")
|
||||
SMTP_USER = os.environ.get("SMTP_USER", "your-email@gmail.com")
|
||||
SMTP_PASS = os.environ.get("SMTP_PASS", "your-gmail-password")
|
||||
EMAIL_CONFIGURED = all([SMTP_SERVER, SMTP_USER, SMTP_PASS])
|
||||
EMAIL_FROM = os.environ.get("EMAIL_FROM") or SMTP_USER
|
||||
|
||||
# If set, Onyx will listen to the `expires_at` returned by the identity
|
||||
|
||||
@@ -36,8 +36,6 @@ DISABLED_GEN_AI_MSG = (
|
||||
|
||||
DEFAULT_PERSONA_ID = 0
|
||||
|
||||
DEFAULT_CC_PAIR_ID = 1
|
||||
|
||||
# Postgres connection constants for application_name
|
||||
POSTGRES_WEB_APP_NAME = "web"
|
||||
POSTGRES_INDEXER_APP_NAME = "indexer"
|
||||
|
||||
@@ -56,23 +56,6 @@ _RESTRICTIONS_EXPANSION_FIELDS = [
|
||||
|
||||
_SLIM_DOC_BATCH_SIZE = 5000
|
||||
|
||||
_ATTACHMENT_EXTENSIONS_TO_FILTER_OUT = [
|
||||
"png",
|
||||
"jpg",
|
||||
"jpeg",
|
||||
"gif",
|
||||
"mp4",
|
||||
"mov",
|
||||
"mp3",
|
||||
"wav",
|
||||
]
|
||||
_FULL_EXTENSION_FILTER_STRING = "".join(
|
||||
[
|
||||
f" and title!~'*.{extension}'"
|
||||
for extension in _ATTACHMENT_EXTENSIONS_TO_FILTER_OUT
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class ConfluenceConnector(LoadConnector, PollConnector, SlimConnector):
|
||||
def __init__(
|
||||
@@ -81,7 +64,7 @@ class ConfluenceConnector(LoadConnector, PollConnector, SlimConnector):
|
||||
is_cloud: bool,
|
||||
space: str = "",
|
||||
page_id: str = "",
|
||||
index_recursively: bool = False,
|
||||
index_recursively: bool = True,
|
||||
cql_query: str | None = None,
|
||||
batch_size: int = INDEX_BATCH_SIZE,
|
||||
continue_on_failure: bool = CONTINUE_ON_CONNECTOR_FAILURE,
|
||||
@@ -99,25 +82,23 @@ class ConfluenceConnector(LoadConnector, PollConnector, SlimConnector):
|
||||
# Remove trailing slash from wiki_base if present
|
||||
self.wiki_base = wiki_base.rstrip("/")
|
||||
|
||||
"""
|
||||
If nothing is provided, we default to fetching all pages
|
||||
Only one or none of the following options should be specified so
|
||||
the order shouldn't matter
|
||||
However, we use elif to ensure that only of the following is enforced
|
||||
"""
|
||||
base_cql_page_query = "type=page"
|
||||
# if nothing is provided, we will fetch all pages
|
||||
cql_page_query = "type=page"
|
||||
if cql_query:
|
||||
base_cql_page_query = cql_query
|
||||
# if a cql_query is provided, we will use it to fetch the pages
|
||||
cql_page_query = cql_query
|
||||
elif page_id:
|
||||
# if a cql_query is not provided, we will use the page_id to fetch the page
|
||||
if index_recursively:
|
||||
base_cql_page_query += f" and (ancestor='{page_id}' or id='{page_id}')"
|
||||
cql_page_query += f" and ancestor='{page_id}'"
|
||||
else:
|
||||
base_cql_page_query += f" and id='{page_id}'"
|
||||
cql_page_query += f" and id='{page_id}'"
|
||||
elif space:
|
||||
uri_safe_space = quote(space)
|
||||
base_cql_page_query += f" and space='{uri_safe_space}'"
|
||||
# if no cql_query or page_id is provided, we will use the space to fetch the pages
|
||||
cql_page_query += f" and space='{quote(space)}'"
|
||||
|
||||
self.base_cql_page_query = base_cql_page_query
|
||||
self.cql_page_query = cql_page_query
|
||||
self.cql_time_filter = ""
|
||||
|
||||
self.cql_label_filter = ""
|
||||
if labels_to_skip:
|
||||
@@ -145,33 +126,6 @@ class ConfluenceConnector(LoadConnector, PollConnector, SlimConnector):
|
||||
)
|
||||
return None
|
||||
|
||||
def _construct_page_query(
|
||||
self,
|
||||
start: SecondsSinceUnixEpoch | None = None,
|
||||
end: SecondsSinceUnixEpoch | None = None,
|
||||
) -> str:
|
||||
page_query = self.base_cql_page_query + self.cql_label_filter
|
||||
|
||||
# Add time filters
|
||||
if start:
|
||||
formatted_start_time = datetime.fromtimestamp(
|
||||
start, tz=self.timezone
|
||||
).strftime("%Y-%m-%d %H:%M")
|
||||
page_query += f" and lastmodified >= '{formatted_start_time}'"
|
||||
if end:
|
||||
formatted_end_time = datetime.fromtimestamp(end, tz=self.timezone).strftime(
|
||||
"%Y-%m-%d %H:%M"
|
||||
)
|
||||
page_query += f" and lastmodified <= '{formatted_end_time}'"
|
||||
|
||||
return page_query
|
||||
|
||||
def _construct_attachment_query(self, confluence_page_id: str) -> str:
|
||||
attachment_query = f"type=attachment and container='{confluence_page_id}'"
|
||||
attachment_query += self.cql_label_filter
|
||||
attachment_query += _FULL_EXTENSION_FILTER_STRING
|
||||
return attachment_query
|
||||
|
||||
def _get_comment_string_for_page_id(self, page_id: str) -> str:
|
||||
comment_string = ""
|
||||
|
||||
@@ -251,15 +205,11 @@ class ConfluenceConnector(LoadConnector, PollConnector, SlimConnector):
|
||||
metadata=doc_metadata,
|
||||
)
|
||||
|
||||
def _fetch_document_batches(
|
||||
self,
|
||||
start: SecondsSinceUnixEpoch | None = None,
|
||||
end: SecondsSinceUnixEpoch | None = None,
|
||||
) -> GenerateDocumentsOutput:
|
||||
def _fetch_document_batches(self) -> GenerateDocumentsOutput:
|
||||
doc_batch: list[Document] = []
|
||||
confluence_page_ids: list[str] = []
|
||||
|
||||
page_query = self._construct_page_query(start, end)
|
||||
page_query = self.cql_page_query + self.cql_label_filter + self.cql_time_filter
|
||||
logger.debug(f"page_query: {page_query}")
|
||||
# Fetch pages as Documents
|
||||
for page in self.confluence_client.paginated_cql_retrieval(
|
||||
@@ -278,10 +228,11 @@ class ConfluenceConnector(LoadConnector, PollConnector, SlimConnector):
|
||||
|
||||
# Fetch attachments as Documents
|
||||
for confluence_page_id in confluence_page_ids:
|
||||
attachment_query = self._construct_attachment_query(confluence_page_id)
|
||||
attachment_cql = f"type=attachment and container='{confluence_page_id}'"
|
||||
attachment_cql += self.cql_label_filter
|
||||
# TODO: maybe should add time filter as well?
|
||||
for attachment in self.confluence_client.paginated_cql_retrieval(
|
||||
cql=attachment_query,
|
||||
cql=attachment_cql,
|
||||
expand=",".join(_ATTACHMENT_EXPANSION_FIELDS),
|
||||
):
|
||||
doc = self._convert_object_to_document(attachment)
|
||||
@@ -297,12 +248,17 @@ class ConfluenceConnector(LoadConnector, PollConnector, SlimConnector):
|
||||
def load_from_state(self) -> GenerateDocumentsOutput:
|
||||
return self._fetch_document_batches()
|
||||
|
||||
def poll_source(
|
||||
self,
|
||||
start: SecondsSinceUnixEpoch | None = None,
|
||||
end: SecondsSinceUnixEpoch | None = None,
|
||||
) -> GenerateDocumentsOutput:
|
||||
return self._fetch_document_batches(start, end)
|
||||
def poll_source(self, start: float, end: float) -> GenerateDocumentsOutput:
|
||||
# Add time filters
|
||||
formatted_start_time = datetime.fromtimestamp(start, tz=self.timezone).strftime(
|
||||
"%Y-%m-%d %H:%M"
|
||||
)
|
||||
formatted_end_time = datetime.fromtimestamp(end, tz=self.timezone).strftime(
|
||||
"%Y-%m-%d %H:%M"
|
||||
)
|
||||
self.cql_time_filter = f" and lastmodified >= '{formatted_start_time}'"
|
||||
self.cql_time_filter += f" and lastmodified <= '{formatted_end_time}'"
|
||||
return self._fetch_document_batches()
|
||||
|
||||
def retrieve_all_slim_documents(
|
||||
self,
|
||||
@@ -313,7 +269,7 @@ class ConfluenceConnector(LoadConnector, PollConnector, SlimConnector):
|
||||
|
||||
restrictions_expand = ",".join(_RESTRICTIONS_EXPANSION_FIELDS)
|
||||
|
||||
page_query = self.base_cql_page_query + self.cql_label_filter
|
||||
page_query = self.cql_page_query + self.cql_label_filter
|
||||
for page in self.confluence_client.cql_paginate_all_expansions(
|
||||
cql=page_query,
|
||||
expand=restrictions_expand,
|
||||
@@ -338,9 +294,10 @@ class ConfluenceConnector(LoadConnector, PollConnector, SlimConnector):
|
||||
perm_sync_data=page_perm_sync_data,
|
||||
)
|
||||
)
|
||||
attachment_query = self._construct_attachment_query(page["id"])
|
||||
attachment_cql = f"type=attachment and container='{page['id']}'"
|
||||
attachment_cql += self.cql_label_filter
|
||||
for attachment in self.confluence_client.cql_paginate_all_expansions(
|
||||
cql=attachment_query,
|
||||
cql=attachment_cql,
|
||||
expand=restrictions_expand,
|
||||
limit=_SLIM_DOC_BATCH_SIZE,
|
||||
):
|
||||
|
||||
@@ -190,7 +190,7 @@ class DiscourseConnector(PollConnector):
|
||||
start: datetime,
|
||||
end: datetime,
|
||||
) -> GenerateDocumentsOutput:
|
||||
page = 0
|
||||
page = 1
|
||||
while topic_ids := self._get_latest_topics(start, end, page):
|
||||
doc_batch: list[Document] = []
|
||||
for topic_id in topic_ids:
|
||||
|
||||
@@ -4,7 +4,6 @@ from typing import Dict
|
||||
|
||||
from google.oauth2.credentials import Credentials as OAuthCredentials # type: ignore
|
||||
from google.oauth2.service_account import Credentials as ServiceAccountCredentials # type: ignore
|
||||
from googleapiclient.errors import HttpError # type: ignore
|
||||
|
||||
from onyx.configs.app_configs import INDEX_BATCH_SIZE
|
||||
from onyx.configs.constants import DocumentSource
|
||||
@@ -250,36 +249,17 @@ class GmailConnector(LoadConnector, PollConnector, SlimConnector):
|
||||
return new_creds_dict
|
||||
|
||||
def _get_all_user_emails(self) -> list[str]:
|
||||
"""
|
||||
List all user emails if we are on a Google Workspace domain.
|
||||
If the domain is gmail.com, or if we attempt to call the Admin SDK and
|
||||
get a 404, fall back to using the single user.
|
||||
"""
|
||||
|
||||
try:
|
||||
admin_service = get_admin_service(self.creds, self.primary_admin_email)
|
||||
emails = []
|
||||
for user in execute_paginated_retrieval(
|
||||
retrieval_function=admin_service.users().list,
|
||||
list_key="users",
|
||||
fields=USER_FIELDS,
|
||||
domain=self.google_domain,
|
||||
):
|
||||
if email := user.get("primaryEmail"):
|
||||
emails.append(email)
|
||||
return emails
|
||||
|
||||
except HttpError as e:
|
||||
if e.resp.status == 404:
|
||||
logger.warning(
|
||||
"Received 404 from Admin SDK; this may indicate a personal Gmail account "
|
||||
"with no Workspace domain. Falling back to single user."
|
||||
)
|
||||
return [self.primary_admin_email]
|
||||
raise
|
||||
|
||||
except Exception:
|
||||
raise
|
||||
admin_service = get_admin_service(self.creds, self.primary_admin_email)
|
||||
emails = []
|
||||
for user in execute_paginated_retrieval(
|
||||
retrieval_function=admin_service.users().list,
|
||||
list_key="users",
|
||||
fields=USER_FIELDS,
|
||||
domain=self.google_domain,
|
||||
):
|
||||
if email := user.get("primaryEmail"):
|
||||
emails.append(email)
|
||||
return emails
|
||||
|
||||
def _fetch_threads(
|
||||
self,
|
||||
|
||||
@@ -54,11 +54,9 @@ def get_total_users_count(db_session: Session) -> int:
|
||||
return user_count + invited_users
|
||||
|
||||
|
||||
async def get_user_count(only_admin_users: bool = False) -> int:
|
||||
async def get_user_count() -> int:
|
||||
async with get_async_session_with_tenant() as session:
|
||||
stmt = select(func.count(User.id))
|
||||
if only_admin_users:
|
||||
stmt = stmt.where(User.role == UserRole.ADMIN)
|
||||
result = await session.execute(stmt)
|
||||
user_count = result.scalar()
|
||||
if user_count is None:
|
||||
|
||||
@@ -310,9 +310,6 @@ def associate_default_cc_pair(db_session: Session) -> None:
|
||||
if existing_association is not None:
|
||||
return
|
||||
|
||||
# DefaultCCPair has id 1 since it is the first CC pair created
|
||||
# It is DEFAULT_CC_PAIR_ID, but can't set it explicitly because it messed with the
|
||||
# auto-incrementing id
|
||||
association = ConnectorCredentialPair(
|
||||
connector_id=0,
|
||||
credential_id=0,
|
||||
@@ -353,12 +350,7 @@ def add_credential_to_connector(
|
||||
last_successful_index_time: datetime | None = None,
|
||||
) -> StatusResponse:
|
||||
connector = fetch_connector_by_id(connector_id, db_session)
|
||||
credential = fetch_credential_by_id(
|
||||
credential_id,
|
||||
user,
|
||||
db_session,
|
||||
get_editable=False,
|
||||
)
|
||||
credential = fetch_credential_by_id(credential_id, user, db_session)
|
||||
|
||||
if connector is None:
|
||||
raise HTTPException(status_code=404, detail="Connector does not exist")
|
||||
@@ -435,12 +427,7 @@ def remove_credential_from_connector(
|
||||
db_session: Session,
|
||||
) -> StatusResponse[int]:
|
||||
connector = fetch_connector_by_id(connector_id, db_session)
|
||||
credential = fetch_credential_by_id(
|
||||
credential_id,
|
||||
user,
|
||||
db_session,
|
||||
get_editable=False,
|
||||
)
|
||||
credential = fetch_credential_by_id(credential_id, user, db_session)
|
||||
|
||||
if connector is None:
|
||||
raise HTTPException(status_code=404, detail="Connector does not exist")
|
||||
|
||||
@@ -86,7 +86,7 @@ def _add_user_filters(
|
||||
"""
|
||||
Filter Credentials by:
|
||||
- if the user is in the user_group that owns the Credential
|
||||
- if the user is a curator, they must also have a curator relationship
|
||||
- if the user is not a global_curator, they must also have a curator relationship
|
||||
to the user_group
|
||||
- if editing is being done, we also filter out Credentials that are owned by groups
|
||||
that the user isn't a curator for
|
||||
@@ -97,7 +97,6 @@ def _add_user_filters(
|
||||
where_clause = User__UserGroup.user_id == user.id
|
||||
if user.role == UserRole.CURATOR:
|
||||
where_clause &= User__UserGroup.is_curator == True # noqa: E712
|
||||
|
||||
if get_editable:
|
||||
user_groups = select(User__UserGroup.user_group_id).where(
|
||||
User__UserGroup.user_id == user.id
|
||||
@@ -153,16 +152,10 @@ def fetch_credential_by_id(
|
||||
user: User | None,
|
||||
db_session: Session,
|
||||
assume_admin: bool = False,
|
||||
get_editable: bool = True,
|
||||
) -> Credential | None:
|
||||
stmt = select(Credential).distinct()
|
||||
stmt = stmt.where(Credential.id == credential_id)
|
||||
stmt = _add_user_filters(
|
||||
stmt=stmt,
|
||||
user=user,
|
||||
assume_admin=assume_admin,
|
||||
get_editable=get_editable,
|
||||
)
|
||||
stmt = _add_user_filters(stmt, user, assume_admin=assume_admin)
|
||||
result = db_session.execute(stmt)
|
||||
credential = result.scalar_one_or_none()
|
||||
return credential
|
||||
|
||||
@@ -7,15 +7,8 @@ from sqlalchemy import func
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.auth.invited_users import get_invited_users
|
||||
from onyx.auth.invited_users import write_invited_users
|
||||
from onyx.auth.schemas import UserRole
|
||||
from onyx.db.models import DocumentSet__User
|
||||
from onyx.db.models import Persona__User
|
||||
from onyx.db.models import SamlAccount
|
||||
from onyx.db.models import User
|
||||
from onyx.db.models import User__UserGroup
|
||||
from onyx.utils.variable_functionality import fetch_ee_implementation_or_noop
|
||||
|
||||
|
||||
def validate_user_role_update(requested_role: UserRole, current_role: UserRole) -> None:
|
||||
@@ -192,43 +185,3 @@ def batch_add_ext_perm_user_if_not_exists(
|
||||
db_session.commit()
|
||||
|
||||
return found_users + new_users
|
||||
|
||||
|
||||
def delete_user_from_db(
|
||||
user_to_delete: User,
|
||||
db_session: Session,
|
||||
) -> None:
|
||||
for oauth_account in user_to_delete.oauth_accounts:
|
||||
db_session.delete(oauth_account)
|
||||
|
||||
fetch_ee_implementation_or_noop(
|
||||
"onyx.db.external_perm",
|
||||
"delete_user__ext_group_for_user__no_commit",
|
||||
)(
|
||||
db_session=db_session,
|
||||
user_id=user_to_delete.id,
|
||||
)
|
||||
db_session.query(SamlAccount).filter(
|
||||
SamlAccount.user_id == user_to_delete.id
|
||||
).delete()
|
||||
db_session.query(DocumentSet__User).filter(
|
||||
DocumentSet__User.user_id == user_to_delete.id
|
||||
).delete()
|
||||
db_session.query(Persona__User).filter(
|
||||
Persona__User.user_id == user_to_delete.id
|
||||
).delete()
|
||||
db_session.query(User__UserGroup).filter(
|
||||
User__UserGroup.user_id == user_to_delete.id
|
||||
).delete()
|
||||
db_session.delete(user_to_delete)
|
||||
db_session.commit()
|
||||
|
||||
# NOTE: edge case may exist with race conditions
|
||||
# with this `invited user` scheme generally.
|
||||
user_emails = get_invited_users()
|
||||
remaining_users = [
|
||||
remaining_user_email
|
||||
for remaining_user_email in user_emails
|
||||
if remaining_user_email != user_to_delete.email
|
||||
]
|
||||
write_invited_users(remaining_users)
|
||||
|
||||
@@ -55,7 +55,9 @@ def remove_invalid_unicode_chars(text: str) -> str:
|
||||
return _illegal_xml_chars_RE.sub("", text)
|
||||
|
||||
|
||||
def get_vespa_http_client(no_timeout: bool = False, http2: bool = True) -> httpx.Client:
|
||||
def get_vespa_http_client(
|
||||
no_timeout: bool = False, http2: bool = False
|
||||
) -> httpx.Client:
|
||||
"""
|
||||
Configure and return an HTTP client for communicating with Vespa,
|
||||
including authentication if needed.
|
||||
|
||||
@@ -260,21 +260,6 @@ def index_doc_batch_prepare(
|
||||
def filter_documents(document_batch: list[Document]) -> list[Document]:
|
||||
documents: list[Document] = []
|
||||
for document in document_batch:
|
||||
# Remove any NUL characters from title/semantic_id
|
||||
# This is a known issue with the Zendesk connector
|
||||
# Postgres cannot handle NUL characters in text fields
|
||||
if document.title:
|
||||
document.title = document.title.replace("\x00", "")
|
||||
if document.semantic_identifier:
|
||||
document.semantic_identifier = document.semantic_identifier.replace(
|
||||
"\x00", ""
|
||||
)
|
||||
|
||||
# Remove NUL characters from all sections
|
||||
for section in document.sections:
|
||||
if section.text is not None:
|
||||
section.text = section.text.replace("\x00", "")
|
||||
|
||||
empty_contents = not any(section.text.strip() for section in document.sections)
|
||||
if (
|
||||
(not document.title or not document.title.strip())
|
||||
|
||||
@@ -266,27 +266,18 @@ class DefaultMultiLLM(LLM):
|
||||
# )
|
||||
self._custom_config = custom_config
|
||||
|
||||
# Create a dictionary for model-specific arguments if it's None
|
||||
model_kwargs = model_kwargs or {}
|
||||
|
||||
# NOTE: have to set these as environment variables for Litellm since
|
||||
# not all are able to passed in but they always support them set as env
|
||||
# variables. We'll also try passing them in, since litellm just ignores
|
||||
# addtional kwargs (and some kwargs MUST be passed in rather than set as
|
||||
# env variables)
|
||||
if custom_config:
|
||||
# Specifically pass in "vertex_credentials" as a model_kwarg to the
|
||||
# completion call for vertex AI. More details here:
|
||||
# https://docs.litellm.ai/docs/providers/vertex
|
||||
vertex_credentials_key = "vertex_credentials"
|
||||
vertex_credentials = custom_config.get(vertex_credentials_key)
|
||||
if vertex_credentials and model_provider == "vertex_ai":
|
||||
model_kwargs[vertex_credentials_key] = vertex_credentials
|
||||
else:
|
||||
# standard case
|
||||
for k, v in custom_config.items():
|
||||
os.environ[k] = v
|
||||
for k, v in custom_config.items():
|
||||
os.environ[k] = v
|
||||
|
||||
model_kwargs = model_kwargs or {}
|
||||
if custom_config:
|
||||
model_kwargs.update(custom_config)
|
||||
if extra_headers:
|
||||
model_kwargs.update({"extra_headers": extra_headers})
|
||||
if extra_body:
|
||||
|
||||
@@ -118,7 +118,7 @@ class RedisConnectorIndex:
|
||||
|
||||
The slack in timing is needed to avoid race conditions where simply checking
|
||||
the celery queue and task status could result in race conditions."""
|
||||
self.redis.set(self.active_key, 0, ex=3600)
|
||||
self.redis.set(self.active_key, 0, ex=300)
|
||||
|
||||
def active(self) -> bool:
|
||||
if self.redis.exists(self.active_key):
|
||||
@@ -172,9 +172,6 @@ class RedisConnectorIndex:
|
||||
@staticmethod
|
||||
def reset_all(r: redis.Redis) -> None:
|
||||
"""Deletes all redis values for all connectors"""
|
||||
for key in r.scan_iter(RedisConnectorIndex.ACTIVE_PREFIX + "*"):
|
||||
r.delete(key)
|
||||
|
||||
for key in r.scan_iter(RedisConnectorIndex.GENERATOR_LOCK_PREFIX + "*"):
|
||||
r.delete(key)
|
||||
|
||||
|
||||
@@ -510,7 +510,7 @@ def associate_credential_to_connector(
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> StatusResponse[int]:
|
||||
fetch_ee_implementation_or_noop(
|
||||
"onyx.db.user_group", "validate_object_creation_for_user", None
|
||||
"onyx.db.user_group", "validate_user_creation_permissions", None
|
||||
)(
|
||||
db_session=db_session,
|
||||
user=user,
|
||||
@@ -532,8 +532,7 @@ def associate_credential_to_connector(
|
||||
)
|
||||
|
||||
return response
|
||||
except IntegrityError as e:
|
||||
logger.error(f"IntegrityError: {e}")
|
||||
except IntegrityError:
|
||||
raise HTTPException(status_code=400, detail="Name must be unique")
|
||||
|
||||
|
||||
|
||||
@@ -680,7 +680,7 @@ def create_connector_from_model(
|
||||
_validate_connector_allowed(connector_data.source)
|
||||
|
||||
fetch_ee_implementation_or_noop(
|
||||
"onyx.db.user_group", "validate_object_creation_for_user", None
|
||||
"onyx.db.user_group", "validate_user_creation_permissions", None
|
||||
)(
|
||||
db_session=db_session,
|
||||
user=user,
|
||||
@@ -716,7 +716,7 @@ def create_connector_with_mock_credential(
|
||||
tenant_id: str = Depends(get_current_tenant_id),
|
||||
) -> StatusResponse:
|
||||
fetch_ee_implementation_or_noop(
|
||||
"onyx.db.user_group", "validate_object_creation_for_user", None
|
||||
"onyx.db.user_group", "validate_user_creation_permissions", None
|
||||
)(
|
||||
db_session=db_session,
|
||||
user=user,
|
||||
@@ -776,7 +776,7 @@ def update_connector_from_model(
|
||||
try:
|
||||
_validate_connector_allowed(connector_data.source)
|
||||
fetch_ee_implementation_or_noop(
|
||||
"onyx.db.user_group", "validate_object_creation_for_user", None
|
||||
"onyx.db.user_group", "validate_user_creation_permissions", None
|
||||
)(
|
||||
db_session=db_session,
|
||||
user=user,
|
||||
|
||||
@@ -122,7 +122,7 @@ def create_credential_from_model(
|
||||
) -> ObjectCreationIdResponse:
|
||||
if not _ignore_credential_permissions(credential_info.source):
|
||||
fetch_ee_implementation_or_noop(
|
||||
"onyx.db.user_group", "validate_object_creation_for_user", None
|
||||
"onyx.db.user_group", "validate_user_creation_permissions", None
|
||||
)(
|
||||
db_session=db_session,
|
||||
user=user,
|
||||
@@ -164,12 +164,7 @@ def get_credential_by_id(
|
||||
user: User = Depends(current_user),
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> CredentialSnapshot | StatusResponse[int]:
|
||||
credential = fetch_credential_by_id(
|
||||
credential_id,
|
||||
user,
|
||||
db_session,
|
||||
get_editable=False,
|
||||
)
|
||||
credential = fetch_credential_by_id(credential_id, user, db_session)
|
||||
if credential is None:
|
||||
raise HTTPException(
|
||||
status_code=401,
|
||||
|
||||
@@ -31,7 +31,7 @@ def create_document_set(
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> int:
|
||||
fetch_ee_implementation_or_noop(
|
||||
"onyx.db.user_group", "validate_object_creation_for_user", None
|
||||
"onyx.db.user_group", "validate_user_creation_permissions", None
|
||||
)(
|
||||
db_session=db_session,
|
||||
user=user,
|
||||
@@ -56,7 +56,7 @@ def patch_document_set(
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> None:
|
||||
fetch_ee_implementation_or_noop(
|
||||
"onyx.db.user_group", "validate_object_creation_for_user", None
|
||||
"onyx.db.user_group", "validate_user_creation_permissions", None
|
||||
)(
|
||||
db_session=db_session,
|
||||
user=user,
|
||||
|
||||
@@ -57,6 +57,7 @@ def test_llm_configuration(
|
||||
)
|
||||
|
||||
functions_with_args: list[tuple[Callable, tuple]] = [(test_llm, (llm,))]
|
||||
|
||||
if (
|
||||
test_llm_request.fast_default_model_name
|
||||
and test_llm_request.fast_default_model_name
|
||||
|
||||
@@ -21,7 +21,6 @@ from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from ee.onyx.configs.app_configs import SUPER_USERS
|
||||
from onyx.auth.email_utils import send_user_email_invite
|
||||
from onyx.auth.invited_users import get_invited_users
|
||||
from onyx.auth.invited_users import write_invited_users
|
||||
from onyx.auth.noauth_user import fetch_no_auth_user
|
||||
@@ -42,8 +41,11 @@ from onyx.db.auth import get_total_users_count
|
||||
from onyx.db.engine import CURRENT_TENANT_ID_CONTEXTVAR
|
||||
from onyx.db.engine import get_session
|
||||
from onyx.db.models import AccessToken
|
||||
from onyx.db.models import DocumentSet__User
|
||||
from onyx.db.models import Persona__User
|
||||
from onyx.db.models import SamlAccount
|
||||
from onyx.db.models import User
|
||||
from onyx.db.users import delete_user_from_db
|
||||
from onyx.db.models import User__UserGroup
|
||||
from onyx.db.users import get_user_by_email
|
||||
from onyx.db.users import list_users
|
||||
from onyx.db.users import validate_user_role_update
|
||||
@@ -59,6 +61,7 @@ from onyx.server.models import FullUserSnapshot
|
||||
from onyx.server.models import InvitedUserSnapshot
|
||||
from onyx.server.models import MinimalUserSnapshot
|
||||
from onyx.server.utils import BasicAuthenticationError
|
||||
from onyx.server.utils import send_user_email_invite
|
||||
from onyx.utils.logger import setup_logger
|
||||
from onyx.utils.variable_functionality import fetch_ee_implementation_or_noop
|
||||
from shared_configs.configs import MULTI_TENANT
|
||||
@@ -367,10 +370,45 @@ async def delete_user(
|
||||
db_session.expunge(user_to_delete)
|
||||
|
||||
try:
|
||||
delete_user_from_db(user_to_delete, db_session)
|
||||
logger.info(f"Deleted user {user_to_delete.email}")
|
||||
for oauth_account in user_to_delete.oauth_accounts:
|
||||
db_session.delete(oauth_account)
|
||||
|
||||
fetch_ee_implementation_or_noop(
|
||||
"onyx.db.external_perm",
|
||||
"delete_user__ext_group_for_user__no_commit",
|
||||
)(
|
||||
db_session=db_session,
|
||||
user_id=user_to_delete.id,
|
||||
)
|
||||
db_session.query(SamlAccount).filter(
|
||||
SamlAccount.user_id == user_to_delete.id
|
||||
).delete()
|
||||
db_session.query(DocumentSet__User).filter(
|
||||
DocumentSet__User.user_id == user_to_delete.id
|
||||
).delete()
|
||||
db_session.query(Persona__User).filter(
|
||||
Persona__User.user_id == user_to_delete.id
|
||||
).delete()
|
||||
db_session.query(User__UserGroup).filter(
|
||||
User__UserGroup.user_id == user_to_delete.id
|
||||
).delete()
|
||||
db_session.delete(user_to_delete)
|
||||
db_session.commit()
|
||||
|
||||
# NOTE: edge case may exist with race conditions
|
||||
# with this `invited user` scheme generally.
|
||||
user_emails = get_invited_users()
|
||||
remaining_users = [
|
||||
user for user in user_emails if user != user_email.user_email
|
||||
]
|
||||
write_invited_users(remaining_users)
|
||||
|
||||
logger.info(f"Deleted user {user_to_delete.email}")
|
||||
except Exception as e:
|
||||
import traceback
|
||||
|
||||
full_traceback = traceback.format_exc()
|
||||
logger.error(f"Full stack trace:\n{full_traceback}")
|
||||
db_session.rollback()
|
||||
logger.error(f"Error deleting user {user_to_delete.email}: {str(e)}")
|
||||
raise HTTPException(status_code=500, detail="Error deleting user")
|
||||
|
||||
@@ -4,7 +4,6 @@ from fastapi import HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.auth.users import api_key_dep
|
||||
from onyx.configs.constants import DEFAULT_CC_PAIR_ID
|
||||
from onyx.configs.constants import DocumentSource
|
||||
from onyx.connectors.models import Document
|
||||
from onyx.connectors.models import IndexAttemptMetadata
|
||||
@@ -80,7 +79,7 @@ def upsert_ingestion_doc(
|
||||
document.source = DocumentSource.FILE
|
||||
|
||||
cc_pair = get_connector_credential_pair_from_id(
|
||||
cc_pair_id=doc_info.cc_pair_id or DEFAULT_CC_PAIR_ID, db_session=db_session
|
||||
cc_pair_id=doc_info.cc_pair_id or 0, db_session=db_session
|
||||
)
|
||||
if cc_pair is None:
|
||||
raise HTTPException(
|
||||
|
||||
@@ -182,15 +182,12 @@ def get_chat_session(
|
||||
description=chat_session.description,
|
||||
persona_id=chat_session.persona_id,
|
||||
persona_name=chat_session.persona.name if chat_session.persona else None,
|
||||
persona_icon_color=chat_session.persona.icon_color
|
||||
if chat_session.persona
|
||||
else None,
|
||||
persona_icon_shape=chat_session.persona.icon_shape
|
||||
if chat_session.persona
|
||||
else None,
|
||||
current_alternate_model=chat_session.current_alternate_model,
|
||||
messages=[
|
||||
translate_db_message_to_chat_message_detail(msg) for msg in session_messages
|
||||
translate_db_message_to_chat_message_detail(
|
||||
msg, remove_doc_content=is_shared # if shared, don't leak doc content
|
||||
)
|
||||
for msg in session_messages
|
||||
],
|
||||
time_created=chat_session.time_created,
|
||||
shared_status=chat_session.shared_status,
|
||||
@@ -200,7 +197,7 @@ def get_chat_session(
|
||||
@router.post("/create-chat-session")
|
||||
def create_new_chat_session(
|
||||
chat_session_creation_request: ChatSessionCreationRequest,
|
||||
user: User | None = Depends(current_limited_user),
|
||||
user: User | None = Depends(current_user),
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> CreateChatSessionID:
|
||||
user_id = user.id if user is not None else None
|
||||
|
||||
@@ -225,8 +225,6 @@ class ChatSessionDetailResponse(BaseModel):
|
||||
description: str | None
|
||||
persona_id: int | None = None
|
||||
persona_name: str | None
|
||||
persona_icon_color: str | None
|
||||
persona_icon_shape: int | None
|
||||
messages: list[ChatMessageDetail]
|
||||
time_created: datetime
|
||||
shared_status: ChatSessionSharedStatus
|
||||
|
||||
@@ -1,10 +1,21 @@
|
||||
import json
|
||||
import smtplib
|
||||
from datetime import datetime
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
from email.mime.text import MIMEText
|
||||
from textwrap import dedent
|
||||
from typing import Any
|
||||
|
||||
from fastapi import HTTPException
|
||||
from fastapi import status
|
||||
|
||||
from onyx.configs.app_configs import SMTP_PASS
|
||||
from onyx.configs.app_configs import SMTP_PORT
|
||||
from onyx.configs.app_configs import SMTP_SERVER
|
||||
from onyx.configs.app_configs import SMTP_USER
|
||||
from onyx.configs.app_configs import WEB_DOMAIN
|
||||
from onyx.db.models import User
|
||||
|
||||
|
||||
class BasicAuthenticationError(HTTPException):
|
||||
def __init__(self, detail: str):
|
||||
@@ -51,3 +62,31 @@ def mask_credential_dict(credential_dict: dict[str, Any]) -> dict[str, str]:
|
||||
|
||||
masked_creds[key] = mask_string(val)
|
||||
return masked_creds
|
||||
|
||||
|
||||
def send_user_email_invite(user_email: str, current_user: User) -> None:
|
||||
msg = MIMEMultipart()
|
||||
msg["Subject"] = "Invitation to Join Onyx Workspace"
|
||||
msg["From"] = current_user.email
|
||||
msg["To"] = user_email
|
||||
|
||||
email_body = dedent(
|
||||
f"""\
|
||||
Hello,
|
||||
|
||||
You have been invited to join a workspace on Onyx.
|
||||
|
||||
To join the workspace, please visit the following link:
|
||||
|
||||
{WEB_DOMAIN}/auth/login
|
||||
|
||||
Best regards,
|
||||
The Onyx Team
|
||||
"""
|
||||
)
|
||||
|
||||
msg.attach(MIMEText(email_body, "plain"))
|
||||
with smtplib.SMTP(SMTP_SERVER, SMTP_PORT) as smtp_server:
|
||||
smtp_server.starttls()
|
||||
smtp_server.login(SMTP_USER, SMTP_PASS)
|
||||
smtp_server.send_message(msg)
|
||||
|
||||
@@ -22,6 +22,7 @@ from onyx.utils.variable_functionality import (
|
||||
from onyx.utils.variable_functionality import noop_fallback
|
||||
from shared_configs.configs import MULTI_TENANT
|
||||
|
||||
|
||||
_DANSWER_TELEMETRY_ENDPOINT = "https://telemetry.onyx.app/anonymous_telemetry"
|
||||
_CACHED_UUID: str | None = None
|
||||
_CACHED_INSTANCE_DOMAIN: str | None = None
|
||||
@@ -117,9 +118,12 @@ def mt_cloud_telemetry(
|
||||
event: MilestoneRecordType,
|
||||
properties: dict | None = None,
|
||||
) -> None:
|
||||
print(f"mt_cloud_telemetry {distinct_id} {event} {properties}")
|
||||
if not MULTI_TENANT:
|
||||
print("mt_cloud_telemetry not MULTI_TENANT")
|
||||
return
|
||||
|
||||
print("mt_cloud_telemetry MULTI_TENANT")
|
||||
# MIT version should not need to include any Posthog code
|
||||
# This is only for Onyx MT Cloud, this code should also never be hit, no reason for any orgs to
|
||||
# be running the Multi Tenant version of Onyx.
|
||||
@@ -137,8 +141,11 @@ def create_milestone_and_report(
|
||||
properties: dict | None,
|
||||
db_session: Session,
|
||||
) -> None:
|
||||
print(f"create_milestone_and_report {user} {event_type} {db_session}")
|
||||
_, is_new = create_milestone_if_not_exists(user, event_type, db_session)
|
||||
print(f"create_milestone_and_report {is_new}")
|
||||
if is_new:
|
||||
print("create_milestone_and_report is_new")
|
||||
mt_cloud_telemetry(
|
||||
distinct_id=distinct_id,
|
||||
event=event_type,
|
||||
|
||||
@@ -267,7 +267,7 @@ services:
|
||||
- NEXT_PUBLIC_NEGATIVE_PREDEFINED_FEEDBACK_OPTIONS=${NEXT_PUBLIC_NEGATIVE_PREDEFINED_FEEDBACK_OPTIONS:-}
|
||||
- NEXT_PUBLIC_DISABLE_LOGOUT=${NEXT_PUBLIC_DISABLE_LOGOUT:-}
|
||||
- NEXT_PUBLIC_DEFAULT_SIDEBAR_OPEN=${NEXT_PUBLIC_DEFAULT_SIDEBAR_OPEN:-}
|
||||
- NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED=${NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED:-}
|
||||
|
||||
# Enterprise Edition only
|
||||
- NEXT_PUBLIC_THEME=${NEXT_PUBLIC_THEME:-}
|
||||
# DO NOT TURN ON unless you have EXPLICIT PERMISSION from Onyx.
|
||||
|
||||
@@ -72,7 +72,6 @@ services:
|
||||
- NEXT_PUBLIC_NEGATIVE_PREDEFINED_FEEDBACK_OPTIONS=${NEXT_PUBLIC_NEGATIVE_PREDEFINED_FEEDBACK_OPTIONS:-}
|
||||
- NEXT_PUBLIC_DISABLE_LOGOUT=${NEXT_PUBLIC_DISABLE_LOGOUT:-}
|
||||
- NEXT_PUBLIC_THEME=${NEXT_PUBLIC_THEME:-}
|
||||
- NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED=${NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED:-}
|
||||
depends_on:
|
||||
- api_server
|
||||
restart: always
|
||||
|
||||
@@ -99,7 +99,6 @@ services:
|
||||
- NEXT_PUBLIC_NEGATIVE_PREDEFINED_FEEDBACK_OPTIONS=${NEXT_PUBLIC_NEGATIVE_PREDEFINED_FEEDBACK_OPTIONS:-}
|
||||
- NEXT_PUBLIC_DISABLE_LOGOUT=${NEXT_PUBLIC_DISABLE_LOGOUT:-}
|
||||
- NEXT_PUBLIC_THEME=${NEXT_PUBLIC_THEME:-}
|
||||
- NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED=${NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED:-}
|
||||
depends_on:
|
||||
- api_server
|
||||
restart: always
|
||||
@@ -238,7 +237,7 @@ services:
|
||||
volumes:
|
||||
- ../data/certbot/conf:/etc/letsencrypt
|
||||
- ../data/certbot/www:/var/www/certbot
|
||||
logging:
|
||||
logging::wq
|
||||
driver: json-file
|
||||
options:
|
||||
max-size: "50m"
|
||||
@@ -260,3 +259,6 @@ volumes:
|
||||
# Created by the container itself
|
||||
model_cache_huggingface:
|
||||
indexing_huggingface_model_cache:
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -75,9 +75,6 @@ ENV NEXT_PUBLIC_SENTRY_DSN=${NEXT_PUBLIC_SENTRY_DSN}
|
||||
ARG NEXT_PUBLIC_GTM_ENABLED
|
||||
ENV NEXT_PUBLIC_GTM_ENABLED=${NEXT_PUBLIC_GTM_ENABLED}
|
||||
|
||||
ARG NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED
|
||||
ENV NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED=${NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED}
|
||||
|
||||
RUN npx next build
|
||||
|
||||
# Step 2. Production image, copy all the files and run next
|
||||
@@ -153,9 +150,6 @@ ENV NEXT_PUBLIC_SENTRY_DSN=${NEXT_PUBLIC_SENTRY_DSN}
|
||||
ARG NEXT_PUBLIC_GTM_ENABLED
|
||||
ENV NEXT_PUBLIC_GTM_ENABLED=${NEXT_PUBLIC_GTM_ENABLED}
|
||||
|
||||
ARG NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED
|
||||
ENV NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED=${NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED}
|
||||
|
||||
# Note: Don't expose ports here, Compose will handle that for us if necessary.
|
||||
# If you want to run this without compose, specify the ports to
|
||||
# expose via cli
|
||||
|
||||
214
web/package-lock.json
generated
214
web/package-lock.json
generated
@@ -40,7 +40,6 @@
|
||||
"favicon-fetch": "^1.0.0",
|
||||
"formik": "^2.2.9",
|
||||
"js-cookie": "^3.0.5",
|
||||
"katex": "^0.16.17",
|
||||
"lodash": "^4.17.21",
|
||||
"lucide-react": "^0.454.0",
|
||||
"mdast-util-find-and-replace": "^3.0.1",
|
||||
@@ -58,10 +57,8 @@
|
||||
"react-markdown": "^9.0.1",
|
||||
"react-select": "^5.8.0",
|
||||
"recharts": "^2.13.1",
|
||||
"rehype-katex": "^7.0.1",
|
||||
"rehype-prism-plus": "^2.0.0",
|
||||
"remark-gfm": "^4.0.0",
|
||||
"remark-math": "^6.0.0",
|
||||
"semver": "^7.5.4",
|
||||
"sharp": "^0.33.5",
|
||||
"stripe": "^17.0.0",
|
||||
@@ -4778,12 +4775,6 @@
|
||||
"integrity": "sha512-dRLjCWHYg4oaA77cxO64oO+7JwCwnIzkZPdrrC71jQmQtlhM556pwKo5bUzqvZndkVbeFLIIi+9TC40JNF5hNQ==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@types/katex": {
|
||||
"version": "0.16.7",
|
||||
"resolved": "https://registry.npmjs.org/@types/katex/-/katex-0.16.7.tgz",
|
||||
"integrity": "sha512-HMwFiRujE5PjrgwHQ25+bsLJgowjGjm5Z8FVSf0N6PwgJrwxH0QxzHYDcKsTfV3wva0vzrpqMTJS2jXPr5BMEQ==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@types/lodash": {
|
||||
"version": "4.17.4",
|
||||
"resolved": "https://registry.npmjs.org/@types/lodash/-/lodash-4.17.4.tgz",
|
||||
@@ -8125,51 +8116,6 @@
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/hast-util-from-dom": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/hast-util-from-dom/-/hast-util-from-dom-5.0.1.tgz",
|
||||
"integrity": "sha512-N+LqofjR2zuzTjCPzyDUdSshy4Ma6li7p/c3pA78uTwzFgENbgbUrm2ugwsOdcjI1muO+o6Dgzp9p8WHtn/39Q==",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"@types/hast": "^3.0.0",
|
||||
"hastscript": "^9.0.0",
|
||||
"web-namespaces": "^2.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/hast-util-from-dom/node_modules/hast-util-parse-selector": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-4.0.0.tgz",
|
||||
"integrity": "sha512-wkQCkSYoOGCRKERFWcxMVMOcYE2K1AaNLU8DXS9arxnLOUEWbOXKXiJUNzEpqZ3JOKpnha3jkFrumEjVliDe7A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/hast": "^3.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/hast-util-from-dom/node_modules/hastscript": {
|
||||
"version": "9.0.0",
|
||||
"resolved": "https://registry.npmjs.org/hastscript/-/hastscript-9.0.0.tgz",
|
||||
"integrity": "sha512-jzaLBGavEDKHrc5EfFImKN7nZKKBdSLIdGvCwDZ9TfzbF2ffXiov8CKE445L2Z1Ek2t/m4SKQ2j6Ipv7NyUolw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/hast": "^3.0.0",
|
||||
"comma-separated-tokens": "^2.0.0",
|
||||
"hast-util-parse-selector": "^4.0.0",
|
||||
"property-information": "^6.0.0",
|
||||
"space-separated-tokens": "^2.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/hast-util-from-html": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/hast-util-from-html/-/hast-util-from-html-2.0.1.tgz",
|
||||
@@ -8187,22 +8133,6 @@
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/hast-util-from-html-isomorphic": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/hast-util-from-html-isomorphic/-/hast-util-from-html-isomorphic-2.0.0.tgz",
|
||||
"integrity": "sha512-zJfpXq44yff2hmE0XmwEOzdWin5xwH+QIhMLOScpX91e/NSGPsAzNCvLQDIEPyO2TXi+lBmU6hjLIhV8MwP2kw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/hast": "^3.0.0",
|
||||
"hast-util-from-dom": "^5.0.0",
|
||||
"hast-util-from-html": "^2.0.0",
|
||||
"unist-util-remove-position": "^5.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/hast-util-from-parse5": {
|
||||
"version": "8.0.1",
|
||||
"resolved": "https://registry.npmjs.org/hast-util-from-parse5/-/hast-util-from-parse5-8.0.1.tgz",
|
||||
@@ -8250,19 +8180,6 @@
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/hast-util-is-element": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/hast-util-is-element/-/hast-util-is-element-3.0.0.tgz",
|
||||
"integrity": "sha512-Val9mnv2IWpLbNPqc/pUem+a7Ipj2aHacCwgNfTiK0vJKl0LF+4Ba4+v1oPHFpf3bLYmreq0/l3Gud9S5OH42g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/hast": "^3.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/hast-util-parse-selector": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/hast-util-parse-selector/-/hast-util-parse-selector-3.1.1.tgz",
|
||||
@@ -8326,22 +8243,6 @@
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/hast-util-to-text": {
|
||||
"version": "4.0.2",
|
||||
"resolved": "https://registry.npmjs.org/hast-util-to-text/-/hast-util-to-text-4.0.2.tgz",
|
||||
"integrity": "sha512-KK6y/BN8lbaq654j7JgBydev7wuNMcID54lkRav1P0CaE1e47P72AWWPiGKXTJU271ooYzcvTAn/Zt0REnvc7A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/hast": "^3.0.0",
|
||||
"@types/unist": "^3.0.0",
|
||||
"hast-util-is-element": "^3.0.0",
|
||||
"unist-util-find-after": "^5.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/hast-util-whitespace": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/hast-util-whitespace/-/hast-util-whitespace-3.0.0.tgz",
|
||||
@@ -9325,31 +9226,6 @@
|
||||
"node": ">=4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/katex": {
|
||||
"version": "0.16.17",
|
||||
"resolved": "https://registry.npmjs.org/katex/-/katex-0.16.17.tgz",
|
||||
"integrity": "sha512-OyzSrXBllz+Jdc9Auiw0kt21gbZ4hkz8Q5srVAb2U9INcYIfGKbxe+bvNvEz1bQ/NrDeRRho5eLCyk/L03maAw==",
|
||||
"funding": [
|
||||
"https://opencollective.com/katex",
|
||||
"https://github.com/sponsors/katex"
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"commander": "^8.3.0"
|
||||
},
|
||||
"bin": {
|
||||
"katex": "cli.js"
|
||||
}
|
||||
},
|
||||
"node_modules/katex/node_modules/commander": {
|
||||
"version": "8.3.0",
|
||||
"resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz",
|
||||
"integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">= 12"
|
||||
}
|
||||
},
|
||||
"node_modules/keyv": {
|
||||
"version": "4.5.4",
|
||||
"resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz",
|
||||
@@ -9683,25 +9559,6 @@
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/mdast-util-math": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/mdast-util-math/-/mdast-util-math-3.0.0.tgz",
|
||||
"integrity": "sha512-Tl9GBNeG/AhJnQM221bJR2HPvLOSnLE/T9cJI9tlc6zwQk2nPk/4f0cHkOdEixQPC/j8UtKDdITswvLAy1OZ1w==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/hast": "^3.0.0",
|
||||
"@types/mdast": "^4.0.0",
|
||||
"devlop": "^1.0.0",
|
||||
"longest-streak": "^3.0.0",
|
||||
"mdast-util-from-markdown": "^2.0.0",
|
||||
"mdast-util-to-markdown": "^2.1.0",
|
||||
"unist-util-remove-position": "^5.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/mdast-util-mdx-expression": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/mdast-util-mdx-expression/-/mdast-util-mdx-expression-2.0.0.tgz",
|
||||
@@ -10046,25 +9903,6 @@
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/micromark-extension-math": {
|
||||
"version": "3.1.0",
|
||||
"resolved": "https://registry.npmjs.org/micromark-extension-math/-/micromark-extension-math-3.1.0.tgz",
|
||||
"integrity": "sha512-lvEqd+fHjATVs+2v/8kg9i5Q0AP2k85H0WUOwpIVvUML8BapsMvh1XAogmQjOCsLpoKRCVQqEkQBB3NhVBcsOg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/katex": "^0.16.0",
|
||||
"devlop": "^1.0.0",
|
||||
"katex": "^0.16.0",
|
||||
"micromark-factory-space": "^2.0.0",
|
||||
"micromark-util-character": "^2.0.0",
|
||||
"micromark-util-symbol": "^2.0.0",
|
||||
"micromark-util-types": "^2.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/micromark-factory-destination": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-2.0.0.tgz",
|
||||
@@ -14149,7 +13987,6 @@
|
||||
"version": "9.0.1",
|
||||
"resolved": "https://registry.npmjs.org/react-markdown/-/react-markdown-9.0.1.tgz",
|
||||
"integrity": "sha512-186Gw/vF1uRkydbsOIkcGXw7aHq0sZOCRFFjGrr7b9+nVZg4UfA4enXCaxm4fUzecU38sWfrNDitGhshuU7rdg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/hast": "^3.0.0",
|
||||
"devlop": "^1.0.0",
|
||||
@@ -14498,25 +14335,6 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/rehype-katex": {
|
||||
"version": "7.0.1",
|
||||
"resolved": "https://registry.npmjs.org/rehype-katex/-/rehype-katex-7.0.1.tgz",
|
||||
"integrity": "sha512-OiM2wrZ/wuhKkigASodFoo8wimG3H12LWQaH8qSPVJn9apWKFSH3YOCtbKpBorTVw/eI7cuT21XBbvwEswbIOA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/hast": "^3.0.0",
|
||||
"@types/katex": "^0.16.0",
|
||||
"hast-util-from-html-isomorphic": "^2.0.0",
|
||||
"hast-util-to-text": "^4.0.0",
|
||||
"katex": "^0.16.0",
|
||||
"unist-util-visit-parents": "^6.0.0",
|
||||
"vfile": "^6.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/rehype-parse": {
|
||||
"version": "9.0.0",
|
||||
"resolved": "https://registry.npmjs.org/rehype-parse/-/rehype-parse-9.0.0.tgz",
|
||||
@@ -14535,7 +14353,6 @@
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/rehype-prism-plus/-/rehype-prism-plus-2.0.0.tgz",
|
||||
"integrity": "sha512-FeM/9V2N7EvDZVdR2dqhAzlw5YI49m9Tgn7ZrYJeYHIahM6gcXpH0K1y2gNnKanZCydOMluJvX2cB9z3lhY8XQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"hast-util-to-string": "^3.0.0",
|
||||
"parse-numeric-range": "^1.3.0",
|
||||
@@ -14559,7 +14376,6 @@
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/remark-gfm/-/remark-gfm-4.0.0.tgz",
|
||||
"integrity": "sha512-U92vJgBPkbw4Zfu/IiW2oTZLSL3Zpv+uI7My2eq8JxKgqraFdU8YUGicEJCEgSbeaG+QDFqIcwwfMTOEelPxuA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/mdast": "^4.0.0",
|
||||
"mdast-util-gfm": "^3.0.0",
|
||||
@@ -14573,22 +14389,6 @@
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/remark-math": {
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/remark-math/-/remark-math-6.0.0.tgz",
|
||||
"integrity": "sha512-MMqgnP74Igy+S3WwnhQ7kqGlEerTETXMvJhrUzDikVZ2/uogJCb+WHUg97hK9/jcfc0dkD73s3LN8zU49cTEtA==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/mdast": "^4.0.0",
|
||||
"mdast-util-math": "^3.0.0",
|
||||
"micromark-extension-math": "^3.0.0",
|
||||
"unified": "^11.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/remark-parse": {
|
||||
"version": "11.0.0",
|
||||
"resolved": "https://registry.npmjs.org/remark-parse/-/remark-parse-11.0.0.tgz",
|
||||
@@ -15882,20 +15682,6 @@
|
||||
"unist-util-visit-parents": "^6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/unist-util-find-after": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/unist-util-find-after/-/unist-util-find-after-5.0.0.tgz",
|
||||
"integrity": "sha512-amQa0Ep2m6hE2g72AugUItjbuM8X8cGQnFoHk0pGfrFeT9GZhzN5SW8nRsiGKK7Aif4CrACPENkA6P/Lw6fHGQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/unist": "^3.0.0",
|
||||
"unist-util-is": "^6.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/unist-util-is": {
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/unist-util-is/-/unist-util-is-6.0.0.tgz",
|
||||
|
||||
@@ -42,7 +42,6 @@
|
||||
"favicon-fetch": "^1.0.0",
|
||||
"formik": "^2.2.9",
|
||||
"js-cookie": "^3.0.5",
|
||||
"katex": "^0.16.17",
|
||||
"lodash": "^4.17.21",
|
||||
"lucide-react": "^0.454.0",
|
||||
"mdast-util-find-and-replace": "^3.0.1",
|
||||
@@ -60,10 +59,8 @@
|
||||
"react-markdown": "^9.0.1",
|
||||
"react-select": "^5.8.0",
|
||||
"recharts": "^2.13.1",
|
||||
"rehype-katex": "^7.0.1",
|
||||
"rehype-prism-plus": "^2.0.0",
|
||||
"remark-gfm": "^4.0.0",
|
||||
"remark-math": "^6.0.0",
|
||||
"semver": "^7.5.4",
|
||||
"sharp": "^0.33.5",
|
||||
"stripe": "^17.0.0",
|
||||
|
||||
@@ -50,7 +50,7 @@ export function SlackChannelConfigsTable({
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
<TableHead>Channel</TableHead>
|
||||
<TableHead>Assistant</TableHead>
|
||||
<TableHead>Persona</TableHead>
|
||||
<TableHead>Document Sets</TableHead>
|
||||
<TableHead>Delete</TableHead>
|
||||
</TableRow>
|
||||
@@ -84,6 +84,7 @@ export function SlackChannelConfigsTable({
|
||||
href={`/admin/assistants/${slackChannelConfig.persona.id}`}
|
||||
className="text-blue-500 flex hover:underline"
|
||||
>
|
||||
<FiArrowUpRight className="my-auto mr-1" />
|
||||
{slackChannelConfig.persona.name}
|
||||
</Link>
|
||||
) : (
|
||||
|
||||
@@ -81,13 +81,13 @@ export const getProviderIcon = (providerName: string, modelName?: string) => {
|
||||
}
|
||||
if (modelName?.toLowerCase().includes("phi")) {
|
||||
return MicrosoftIconSVG;
|
||||
}
|
||||
}
|
||||
if (modelName?.toLowerCase().includes("mistral")) {
|
||||
return MistralIcon;
|
||||
}
|
||||
}
|
||||
if (modelName?.toLowerCase().includes("llama")) {
|
||||
return MetaIcon;
|
||||
}
|
||||
}
|
||||
if (modelName?.toLowerCase().includes("gemini")) {
|
||||
return GeminiIcon;
|
||||
}
|
||||
|
||||
@@ -1,100 +0,0 @@
|
||||
"use client";
|
||||
import React, { useState } from "react";
|
||||
import { forgotPassword } from "./utils";
|
||||
import AuthFlowContainer from "@/components/auth/AuthFlowContainer";
|
||||
import CardSection from "@/components/admin/CardSection";
|
||||
import Title from "@/components/ui/title";
|
||||
import Text from "@/components/ui/text";
|
||||
import Link from "next/link";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Form, Formik } from "formik";
|
||||
import * as Yup from "yup";
|
||||
import { TextFormField } from "@/components/admin/connectors/Field";
|
||||
import { usePopup } from "@/components/admin/connectors/Popup";
|
||||
import { Spinner } from "@/components/Spinner";
|
||||
import { redirect } from "next/navigation";
|
||||
import { NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED } from "@/lib/constants";
|
||||
|
||||
const ForgotPasswordPage: React.FC = () => {
|
||||
const { popup, setPopup } = usePopup();
|
||||
const [isWorking, setIsWorking] = useState(false);
|
||||
|
||||
if (!NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED) {
|
||||
redirect("/auth/login");
|
||||
}
|
||||
|
||||
return (
|
||||
<AuthFlowContainer>
|
||||
<div className="flex flex-col w-full justify-center">
|
||||
<CardSection className="mt-4 w-full">
|
||||
{" "}
|
||||
<div className="flex">
|
||||
<Title className="mb-2 mx-auto font-bold">Forgot Password</Title>
|
||||
</div>
|
||||
{isWorking && <Spinner />}
|
||||
{popup}
|
||||
<Formik
|
||||
initialValues={{
|
||||
email: "",
|
||||
}}
|
||||
validationSchema={Yup.object().shape({
|
||||
email: Yup.string().email().required(),
|
||||
})}
|
||||
onSubmit={async (values) => {
|
||||
setIsWorking(true);
|
||||
try {
|
||||
await forgotPassword(values.email);
|
||||
setPopup({
|
||||
type: "success",
|
||||
message:
|
||||
"Password reset email sent. Please check your inbox.",
|
||||
});
|
||||
} catch (error) {
|
||||
const errorMessage =
|
||||
error instanceof Error
|
||||
? error.message
|
||||
: "An error occurred. Please try again.";
|
||||
setPopup({
|
||||
type: "error",
|
||||
message: errorMessage,
|
||||
});
|
||||
} finally {
|
||||
setIsWorking(false);
|
||||
}
|
||||
}}
|
||||
>
|
||||
{({ isSubmitting }) => (
|
||||
<Form className="w-full flex flex-col items-stretch mt-2">
|
||||
<TextFormField
|
||||
name="email"
|
||||
label="Email"
|
||||
type="email"
|
||||
placeholder="email@yourcompany.com"
|
||||
/>
|
||||
|
||||
<div className="flex">
|
||||
<Button
|
||||
type="submit"
|
||||
disabled={isSubmitting}
|
||||
className="mx-auto w-full"
|
||||
>
|
||||
Reset Password
|
||||
</Button>
|
||||
</div>
|
||||
</Form>
|
||||
)}
|
||||
</Formik>
|
||||
<div className="flex">
|
||||
<Text className="mt-4 mx-auto">
|
||||
<Link href="/auth/login" className="text-link font-medium">
|
||||
Back to Login
|
||||
</Link>
|
||||
</Text>
|
||||
</div>
|
||||
</CardSection>
|
||||
</div>
|
||||
</AuthFlowContainer>
|
||||
);
|
||||
};
|
||||
|
||||
export default ForgotPasswordPage;
|
||||
@@ -1,33 +0,0 @@
|
||||
export const forgotPassword = async (email: string): Promise<void> => {
|
||||
const response = await fetch(`/api/auth/forgot-password`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({ email }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const error = await response.json();
|
||||
const errorMessage =
|
||||
error?.detail || "An error occurred during password reset.";
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
};
|
||||
|
||||
export const resetPassword = async (
|
||||
token: string,
|
||||
password: string
|
||||
): Promise<void> => {
|
||||
const response = await fetch(`/api/auth/reset-password`, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({ token, password }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error("Failed to reset password");
|
||||
}
|
||||
};
|
||||
@@ -10,8 +10,6 @@ import { requestEmailVerification } from "../lib";
|
||||
import { useState } from "react";
|
||||
import { Spinner } from "@/components/Spinner";
|
||||
import { set } from "lodash";
|
||||
import { NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED } from "@/lib/constants";
|
||||
import Link from "next/link";
|
||||
|
||||
export function EmailPasswordForm({
|
||||
isSignup = false,
|
||||
@@ -112,21 +110,15 @@ export function EmailPasswordForm({
|
||||
placeholder="**************"
|
||||
/>
|
||||
|
||||
{NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED && !isSignup && (
|
||||
<Link
|
||||
href="/auth/forgot-password"
|
||||
className="text-sm text-link font-medium whitespace-nowrap"
|
||||
<div className="flex">
|
||||
<Button
|
||||
type="submit"
|
||||
disabled={isSubmitting}
|
||||
className="mx-auto w-full"
|
||||
>
|
||||
Forgot Password?
|
||||
</Link>
|
||||
)}
|
||||
<Button
|
||||
type="submit"
|
||||
disabled={isSubmitting}
|
||||
className="mx-auto w-full"
|
||||
>
|
||||
{isSignup ? "Sign Up" : "Log In"}
|
||||
</Button>
|
||||
{isSignup ? "Sign Up" : "Log In"}
|
||||
</Button>
|
||||
</div>
|
||||
</Form>
|
||||
)}
|
||||
</Formik>
|
||||
|
||||
@@ -16,7 +16,6 @@ import { LoginText } from "./LoginText";
|
||||
import { getSecondsUntilExpiration } from "@/lib/time";
|
||||
import AuthFlowContainer from "@/components/auth/AuthFlowContainer";
|
||||
import CardSection from "@/components/admin/CardSection";
|
||||
import { NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED } from "@/lib/constants";
|
||||
|
||||
const Page = async (props: {
|
||||
searchParams?: Promise<{ [key: string]: string | string[] | undefined }>;
|
||||
@@ -102,24 +101,16 @@ const Page = async (props: {
|
||||
</div>
|
||||
<EmailPasswordForm shouldVerify={true} nextUrl={nextUrl} />
|
||||
|
||||
<div className="flex mt-4 justify-between">
|
||||
<Link
|
||||
href={`/auth/signup${
|
||||
searchParams?.next ? `?next=${searchParams.next}` : ""
|
||||
}`}
|
||||
className="text-link font-medium"
|
||||
>
|
||||
Create an account
|
||||
</Link>
|
||||
|
||||
{NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED && (
|
||||
<div className="flex">
|
||||
<Text className="mt-4 mx-auto">
|
||||
Don't have an account?{" "}
|
||||
<Link
|
||||
href="/auth/forgot-password"
|
||||
href={`/auth/signup${searchParams?.next ? `?next=${searchParams.next}` : ""}`}
|
||||
className="text-link font-medium"
|
||||
>
|
||||
Reset Password
|
||||
Create an account
|
||||
</Link>
|
||||
)}
|
||||
</Text>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
@@ -132,13 +123,11 @@ const Page = async (props: {
|
||||
</Title>
|
||||
</div>
|
||||
<EmailPasswordForm nextUrl={nextUrl} />
|
||||
<div className="flex flex-col gap-y-2 items-center">
|
||||
<Text className="mt-4 ">
|
||||
<div className="flex">
|
||||
<Text className="mt-4 mx-auto">
|
||||
Don't have an account?{" "}
|
||||
<Link
|
||||
href={`/auth/signup${
|
||||
searchParams?.next ? `?next=${searchParams.next}` : ""
|
||||
}`}
|
||||
href={`/auth/signup${searchParams?.next ? `?next=${searchParams.next}` : ""}`}
|
||||
className="text-link font-medium"
|
||||
>
|
||||
Create an account
|
||||
|
||||
@@ -1,117 +0,0 @@
|
||||
"use client";
|
||||
import React, { useState } from "react";
|
||||
import { resetPassword } from "../forgot-password/utils";
|
||||
import AuthFlowContainer from "@/components/auth/AuthFlowContainer";
|
||||
import CardSection from "@/components/admin/CardSection";
|
||||
import Title from "@/components/ui/title";
|
||||
import Text from "@/components/ui/text";
|
||||
import Link from "next/link";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Form, Formik } from "formik";
|
||||
import * as Yup from "yup";
|
||||
import { TextFormField } from "@/components/admin/connectors/Field";
|
||||
import { usePopup } from "@/components/admin/connectors/Popup";
|
||||
import { Spinner } from "@/components/Spinner";
|
||||
import { redirect, useSearchParams } from "next/navigation";
|
||||
import { NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED } from "@/lib/constants";
|
||||
|
||||
const ResetPasswordPage: React.FC = () => {
|
||||
const { popup, setPopup } = usePopup();
|
||||
const [isWorking, setIsWorking] = useState(false);
|
||||
const searchParams = useSearchParams();
|
||||
const token = searchParams.get("token");
|
||||
|
||||
if (!NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED) {
|
||||
redirect("/auth/login");
|
||||
}
|
||||
|
||||
return (
|
||||
<AuthFlowContainer>
|
||||
<div className="flex flex-col w-full justify-center">
|
||||
<CardSection className="mt-4 w-full">
|
||||
<div className="flex">
|
||||
<Title className="mb-2 mx-auto font-bold">Reset Password</Title>
|
||||
</div>
|
||||
{isWorking && <Spinner />}
|
||||
{popup}
|
||||
<Formik
|
||||
initialValues={{
|
||||
password: "",
|
||||
confirmPassword: "",
|
||||
}}
|
||||
validationSchema={Yup.object().shape({
|
||||
password: Yup.string().required("Password is required"),
|
||||
confirmPassword: Yup.string()
|
||||
.oneOf([Yup.ref("password"), undefined], "Passwords must match")
|
||||
.required("Confirm Password is required"),
|
||||
})}
|
||||
onSubmit={async (values) => {
|
||||
if (!token) {
|
||||
setPopup({
|
||||
type: "error",
|
||||
message: "Invalid or missing reset token.",
|
||||
});
|
||||
return;
|
||||
}
|
||||
setIsWorking(true);
|
||||
try {
|
||||
await resetPassword(token, values.password);
|
||||
setPopup({
|
||||
type: "success",
|
||||
message:
|
||||
"Password reset successfully. Redirecting to login...",
|
||||
});
|
||||
setTimeout(() => {
|
||||
redirect("/auth/login");
|
||||
}, 1000);
|
||||
} catch (error) {
|
||||
setPopup({
|
||||
type: "error",
|
||||
message: "An error occurred. Please try again.",
|
||||
});
|
||||
} finally {
|
||||
setIsWorking(false);
|
||||
}
|
||||
}}
|
||||
>
|
||||
{({ isSubmitting }) => (
|
||||
<Form className="w-full flex flex-col items-stretch mt-2">
|
||||
<TextFormField
|
||||
name="password"
|
||||
label="New Password"
|
||||
type="password"
|
||||
placeholder="Enter your new password"
|
||||
/>
|
||||
<TextFormField
|
||||
name="confirmPassword"
|
||||
label="Confirm New Password"
|
||||
type="password"
|
||||
placeholder="Confirm your new password"
|
||||
/>
|
||||
|
||||
<div className="flex">
|
||||
<Button
|
||||
type="submit"
|
||||
disabled={isSubmitting}
|
||||
className="mx-auto w-full"
|
||||
>
|
||||
Reset Password
|
||||
</Button>
|
||||
</div>
|
||||
</Form>
|
||||
)}
|
||||
</Formik>
|
||||
<div className="flex">
|
||||
<Text className="mt-4 mx-auto">
|
||||
<Link href="/auth/login" className="text-link font-medium">
|
||||
Back to Login
|
||||
</Link>
|
||||
</Text>
|
||||
</div>
|
||||
</CardSection>
|
||||
</div>
|
||||
</AuthFlowContainer>
|
||||
);
|
||||
};
|
||||
|
||||
export default ResetPasswordPage;
|
||||
@@ -2105,7 +2105,6 @@ export function ChatPage({
|
||||
}
|
||||
/>
|
||||
)}
|
||||
|
||||
{sharingModalVisible && chatSessionIdRef.current !== null && (
|
||||
<ShareChatSessionModal
|
||||
message={message}
|
||||
|
||||
@@ -7,7 +7,7 @@ import { DocumentUpdatedAtBadge } from "@/components/search/DocumentUpdatedAtBad
|
||||
import { MetadataBadge } from "@/components/MetadataBadge";
|
||||
import { WebResultIcon } from "@/components/WebResultIcon";
|
||||
import { Dispatch, SetStateAction } from "react";
|
||||
import { openDocument } from "@/lib/search/utils";
|
||||
import { ValidSources } from "@/lib/types";
|
||||
|
||||
interface DocumentDisplayProps {
|
||||
closeSidebar: () => void;
|
||||
@@ -16,7 +16,6 @@ interface DocumentDisplayProps {
|
||||
isSelected: boolean;
|
||||
handleSelect: (documentId: string) => void;
|
||||
tokenLimitReached: boolean;
|
||||
hideSelection?: boolean;
|
||||
setPresentingDocument: Dispatch<SetStateAction<OnyxDocument | null>>;
|
||||
}
|
||||
|
||||
@@ -63,7 +62,6 @@ export function ChatDocumentDisplay({
|
||||
closeSidebar,
|
||||
document,
|
||||
modal,
|
||||
hideSelection,
|
||||
isSelected,
|
||||
handleSelect,
|
||||
tokenLimitReached,
|
||||
@@ -75,19 +73,25 @@ export function ChatDocumentDisplay({
|
||||
return null;
|
||||
}
|
||||
|
||||
const handleViewFile = async () => {
|
||||
if (document.source_type == ValidSources.File && setPresentingDocument) {
|
||||
setPresentingDocument(document);
|
||||
} else if (document.link) {
|
||||
window.open(document.link, "_blank");
|
||||
}
|
||||
};
|
||||
|
||||
const hasMetadata =
|
||||
document.updated_at || Object.keys(document.metadata).length > 0;
|
||||
return (
|
||||
<div
|
||||
className={`max-w-[400px] opacity-100 ${modal ? "w-[90vw]" : "w-full"}`}
|
||||
>
|
||||
<div className={`opacity-100 ${modal ? "w-[90vw]" : "w-full"}`}>
|
||||
<div
|
||||
className={`flex relative flex-col gap-0.5 rounded-xl mx-2 my-1 ${
|
||||
isSelected ? "bg-gray-200" : "hover:bg-background-125"
|
||||
}`}
|
||||
>
|
||||
<button
|
||||
onClick={() => openDocument(document, setPresentingDocument)}
|
||||
onClick={handleViewFile}
|
||||
className="cursor-pointer text-left flex flex-col px-2 py-1.5"
|
||||
>
|
||||
<div className="line-clamp-1 mb-1 flex h-6 items-center gap-2 text-xs">
|
||||
@@ -119,7 +123,7 @@ export function ChatDocumentDisplay({
|
||||
)}
|
||||
</div>
|
||||
<div className="absolute top-2 right-2">
|
||||
{!isInternet && !hideSelection && (
|
||||
{!isInternet && (
|
||||
<DocumentSelector
|
||||
isSelected={isSelected}
|
||||
handleSelect={() => handleSelect(document.document_id)}
|
||||
|
||||
@@ -17,7 +17,7 @@ import { SourceSelector } from "../shared_chat_search/SearchFilters";
|
||||
import { XIcon } from "@/components/icons/icons";
|
||||
|
||||
interface ChatFiltersProps {
|
||||
filterManager?: FilterManager;
|
||||
filterManager: FilterManager;
|
||||
closeSidebar: () => void;
|
||||
selectedMessage: Message | null;
|
||||
selectedDocuments: OnyxDocument[] | null;
|
||||
@@ -27,7 +27,6 @@ interface ChatFiltersProps {
|
||||
maxTokens: number;
|
||||
initialWidth: number;
|
||||
isOpen: boolean;
|
||||
isSharedChat?: boolean;
|
||||
modal: boolean;
|
||||
ccPairs: CCPairBasicInfo[];
|
||||
tags: Tag[];
|
||||
@@ -49,7 +48,6 @@ export const ChatFilters = forwardRef<HTMLDivElement, ChatFiltersProps>(
|
||||
selectedDocumentTokens,
|
||||
maxTokens,
|
||||
initialWidth,
|
||||
isSharedChat,
|
||||
isOpen,
|
||||
ccPairs,
|
||||
tags,
|
||||
@@ -81,14 +79,13 @@ export const ChatFilters = forwardRef<HTMLDivElement, ChatFiltersProps>(
|
||||
const dedupedDocuments = removeDuplicateDocs(currentDocuments || []);
|
||||
|
||||
const tokenLimitReached = selectedDocumentTokens > maxTokens - 75;
|
||||
console.log("SELECTED MESSAGE is", selectedMessage);
|
||||
|
||||
const hasSelectedDocuments = selectedDocumentIds.length > 0;
|
||||
|
||||
return (
|
||||
<div
|
||||
id="onyx-chat-sidebar"
|
||||
className={`relative bg-background max-w-full ${
|
||||
className={`relative max-w-full ${
|
||||
!modal ? "border-l h-full border-sidebar-border" : ""
|
||||
}`}
|
||||
onClick={(e) => {
|
||||
@@ -125,10 +122,10 @@ export const ChatFilters = forwardRef<HTMLDivElement, ChatFiltersProps>(
|
||||
<div className="overflow-y-auto -mx-1 sm:mx-0 flex-grow gap-y-0 default-scrollbar dark-scrollbar flex flex-col">
|
||||
{showFilters ? (
|
||||
<SourceSelector
|
||||
{...filterManager!}
|
||||
modal={modal}
|
||||
tagsOnLeft={true}
|
||||
filtersUntoggled={false}
|
||||
{...filterManager}
|
||||
availableDocumentSets={documentSets}
|
||||
existingSources={ccPairs.map((ccPair) => ccPair.source)}
|
||||
availableTags={tags}
|
||||
@@ -160,7 +157,6 @@ export const ChatFilters = forwardRef<HTMLDivElement, ChatFiltersProps>(
|
||||
)!
|
||||
);
|
||||
}}
|
||||
hideSelection={isSharedChat}
|
||||
tokenLimitReached={tokenLimitReached}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -102,8 +102,6 @@ export interface BackendChatSession {
|
||||
description: string;
|
||||
persona_id: number;
|
||||
persona_name: string;
|
||||
persona_icon_color: string | null;
|
||||
persona_icon_shape: number | null;
|
||||
messages: BackendMessage[];
|
||||
time_created: string;
|
||||
shared_status: ChatSessionSharedStatus;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Citation } from "@/components/search/results/Citation";
|
||||
import { WebResultIcon } from "@/components/WebResultIcon";
|
||||
import { LoadedOnyxDocument, OnyxDocument } from "@/lib/search/interfaces";
|
||||
import { LoadedOnyxDocument } from "@/lib/search/interfaces";
|
||||
import { getSourceMetadata, SOURCE_METADATA_MAP } from "@/lib/sources";
|
||||
import { ValidSources } from "@/lib/types";
|
||||
import React, { memo } from "react";
|
||||
@@ -9,15 +9,7 @@ import { SlackIcon } from "@/components/icons/icons";
|
||||
import { SourceIcon } from "@/components/SourceIcon";
|
||||
|
||||
export const MemoizedAnchor = memo(
|
||||
({
|
||||
docs,
|
||||
updatePresentingDocument,
|
||||
children,
|
||||
}: {
|
||||
docs?: OnyxDocument[] | null;
|
||||
updatePresentingDocument: (doc: OnyxDocument) => void;
|
||||
children: React.ReactNode;
|
||||
}) => {
|
||||
({ docs, updatePresentingDocument, children }: any) => {
|
||||
const value = children?.toString();
|
||||
if (value?.startsWith("[") && value?.endsWith("]")) {
|
||||
const match = value.match(/\[(\d+)\]/);
|
||||
@@ -29,11 +21,9 @@ export const MemoizedAnchor = memo(
|
||||
? new URL(associatedDoc.link).origin + "/favicon.ico"
|
||||
: "";
|
||||
|
||||
const icon =
|
||||
(associatedDoc && (
|
||||
<SourceIcon sourceType={associatedDoc?.source_type} iconSize={18} />
|
||||
)) ||
|
||||
null;
|
||||
const icon = (
|
||||
<SourceIcon sourceType={associatedDoc?.source_type} iconSize={18} />
|
||||
);
|
||||
|
||||
return (
|
||||
<MemoizedLink
|
||||
|
||||
@@ -66,17 +66,13 @@ import RegenerateOption from "../RegenerateOption";
|
||||
import { LlmOverride } from "@/lib/hooks";
|
||||
import { ContinueGenerating } from "./ContinueMessage";
|
||||
import { MemoizedAnchor, MemoizedParagraph } from "./MemoizedTextComponents";
|
||||
import { extractCodeText, preprocessLaTeX } from "./codeUtils";
|
||||
import { extractCodeText } from "./codeUtils";
|
||||
import ToolResult from "../../../components/tools/ToolResult";
|
||||
import CsvContent from "../../../components/tools/CSVContent";
|
||||
import SourceCard, {
|
||||
SeeMoreBlock,
|
||||
} from "@/components/chat_search/sources/SourceCard";
|
||||
|
||||
import remarkMath from "remark-math";
|
||||
import rehypeKatex from "rehype-katex";
|
||||
import "katex/dist/katex.min.css";
|
||||
|
||||
const TOOLS_WITH_CUSTOM_HANDLING = [
|
||||
SEARCH_TOOL_NAME,
|
||||
INTERNET_SEARCH_TOOL_NAME,
|
||||
@@ -228,7 +224,6 @@ export const AIMessage = ({
|
||||
setPresentingDocument?: (document: OnyxDocument) => void;
|
||||
}) => {
|
||||
const toolCallGenerating = toolCall && !toolCall.tool_result;
|
||||
|
||||
const processContent = (content: string | JSX.Element) => {
|
||||
if (typeof content !== "string") {
|
||||
return content;
|
||||
@@ -247,16 +242,12 @@ export const AIMessage = ({
|
||||
|
||||
const lastMatch = matches[matches.length - 1];
|
||||
if (!lastMatch.endsWith("```")) {
|
||||
return preprocessLaTeX(content);
|
||||
return content;
|
||||
}
|
||||
}
|
||||
|
||||
return (
|
||||
preprocessLaTeX(content) +
|
||||
(!isComplete && !toolCallGenerating ? " [*]() " : "")
|
||||
);
|
||||
return content + (!isComplete && !toolCallGenerating ? " [*]() " : "");
|
||||
};
|
||||
|
||||
const finalContent = processContent(content as string);
|
||||
|
||||
const [isRegenerateHovered, setIsRegenerateHovered] = useState(false);
|
||||
@@ -322,7 +313,7 @@ export const AIMessage = ({
|
||||
const anchorCallback = useCallback(
|
||||
(props: any) => (
|
||||
<MemoizedAnchor
|
||||
updatePresentingDocument={setPresentingDocument!}
|
||||
updatePresentingDocument={setPresentingDocument}
|
||||
docs={docs}
|
||||
>
|
||||
{props.children}
|
||||
@@ -365,8 +356,8 @@ export const AIMessage = ({
|
||||
<ReactMarkdown
|
||||
className="prose max-w-full text-base"
|
||||
components={markdownComponents}
|
||||
remarkPlugins={[remarkGfm, remarkMath]}
|
||||
rehypePlugins={[[rehypePrism, { ignoreMissing: true }], rehypeKatex]}
|
||||
remarkPlugins={[remarkGfm]}
|
||||
rehypePlugins={[[rehypePrism, { ignoreMissing: true }]]}
|
||||
>
|
||||
{finalContent as string}
|
||||
</ReactMarkdown>
|
||||
@@ -378,7 +369,6 @@ export const AIMessage = ({
|
||||
onMessageSelection &&
|
||||
otherMessagesCanSwitchTo &&
|
||||
otherMessagesCanSwitchTo.length > 1;
|
||||
|
||||
return (
|
||||
<div
|
||||
id="onyx-ai-message"
|
||||
@@ -403,16 +393,21 @@ export const AIMessage = ({
|
||||
<div className="max-w-message-max break-words">
|
||||
{!toolCall || toolCall.tool_name === SEARCH_TOOL_NAME ? (
|
||||
<>
|
||||
{query !== undefined && !retrievalDisabled && (
|
||||
<div className="mb-1">
|
||||
<SearchSummary
|
||||
index={index || 0}
|
||||
query={query}
|
||||
finished={toolCall?.tool_result != undefined}
|
||||
handleSearchQueryEdit={handleSearchQueryEdit}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{query !== undefined &&
|
||||
handleShowRetrieved !== undefined &&
|
||||
!retrievalDisabled && (
|
||||
<div className="mb-1">
|
||||
<SearchSummary
|
||||
index={index || 0}
|
||||
query={query}
|
||||
finished={toolCall?.tool_result != undefined}
|
||||
hasDocs={hasDocs || false}
|
||||
messageId={messageId}
|
||||
handleShowRetrieved={handleShowRetrieved}
|
||||
handleSearchQueryEdit={handleSearchQueryEdit}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{handleForceSearch &&
|
||||
content &&
|
||||
query === undefined &&
|
||||
|
||||
@@ -43,12 +43,18 @@ export function ShowHideDocsButton({
|
||||
export function SearchSummary({
|
||||
index,
|
||||
query,
|
||||
hasDocs,
|
||||
finished,
|
||||
messageId,
|
||||
handleShowRetrieved,
|
||||
handleSearchQueryEdit,
|
||||
}: {
|
||||
index: number;
|
||||
finished: boolean;
|
||||
query: string;
|
||||
hasDocs: boolean;
|
||||
messageId: number | null;
|
||||
handleShowRetrieved: (messageId: number | null) => void;
|
||||
handleSearchQueryEdit?: (query: string) => void;
|
||||
}) {
|
||||
const [isEditing, setIsEditing] = useState(false);
|
||||
|
||||
@@ -59,18 +59,3 @@ export function extractCodeText(
|
||||
|
||||
return codeText || "";
|
||||
}
|
||||
|
||||
// This is a temporary solution to preprocess LaTeX in LLM output
|
||||
export const preprocessLaTeX = (content: string) => {
|
||||
// Replace block-level LaTeX delimiters \[ \] with $$ $$
|
||||
const blockProcessedContent = content.replace(
|
||||
/\\\[([\s\S]*?)\\\]/g,
|
||||
(_, equation) => `$$${equation}$$`
|
||||
);
|
||||
// Replace inline LaTeX delimiters \( \) with $ $
|
||||
const inlineProcessedContent = blockProcessedContent.replace(
|
||||
/\\\(([\s\S]*?)\\\)/g,
|
||||
(_, equation) => `$${equation}$`
|
||||
);
|
||||
return inlineProcessedContent;
|
||||
};
|
||||
|
||||
@@ -34,7 +34,7 @@ async function generateShareLink(chatSessionId: string) {
|
||||
return null;
|
||||
}
|
||||
|
||||
async function generateSeedLink(
|
||||
async function generateCloneLink(
|
||||
message?: string,
|
||||
assistantId?: number,
|
||||
modelOverride?: LlmOverride
|
||||
@@ -115,7 +115,7 @@ export function ShareChatSessionModal({
|
||||
{shareLink ? (
|
||||
<div>
|
||||
<Text>
|
||||
This chat session is currently shared. Anyone in your
|
||||
This chat session is currently shared. Anyone at your
|
||||
organization can view the message history using the following
|
||||
link:
|
||||
</Text>
|
||||
@@ -157,8 +157,10 @@ export function ShareChatSessionModal({
|
||||
) : (
|
||||
<div>
|
||||
<Callout type="warning" title="Warning" className="mb-4">
|
||||
Please make sure that all content in this chat is safe to
|
||||
share with the whole organization.
|
||||
Ensure that all content in the chat is safe to share with the
|
||||
whole organization. The content of the retrieved documents
|
||||
will not be visible, but the names of cited documents as well
|
||||
as the AI and human messages will be visible.
|
||||
</Callout>
|
||||
<div className="flex w-full justify-between">
|
||||
<Button
|
||||
@@ -192,9 +194,10 @@ export function ShareChatSessionModal({
|
||||
|
||||
<Separator className="my-4" />
|
||||
<div className="mb-4">
|
||||
<Callout type="notice" title="Seed New Chat">
|
||||
Generate a link to a new chat session with the same settings as
|
||||
this chat (including the assistant and model).
|
||||
<Callout type="notice" title="Clone Chat">
|
||||
Generate a link to clone this chat session with the current query.
|
||||
This allows others to start a new chat with the same initial
|
||||
message and settings.
|
||||
</Callout>
|
||||
</div>
|
||||
<div className="flex w-full justify-between">
|
||||
@@ -204,18 +207,18 @@ export function ShareChatSessionModal({
|
||||
// NOTE: for "insecure" non-https setup, the `navigator.clipboard.writeText` may fail
|
||||
// as the browser may not allow the clipboard to be accessed.
|
||||
try {
|
||||
const seedLink = await generateSeedLink(
|
||||
const cloneLink = await generateCloneLink(
|
||||
message,
|
||||
assistantId,
|
||||
modelOverride
|
||||
);
|
||||
if (!seedLink) {
|
||||
if (!cloneLink) {
|
||||
setPopup({
|
||||
message: "Failed to generate seed link",
|
||||
message: "Failed to generate clone link",
|
||||
type: "error",
|
||||
});
|
||||
} else {
|
||||
navigator.clipboard.writeText(seedLink);
|
||||
navigator.clipboard.writeText(cloneLink);
|
||||
setPopup({
|
||||
message: "Link copied to clipboard!",
|
||||
type: "success",
|
||||
@@ -229,7 +232,7 @@ export function ShareChatSessionModal({
|
||||
size="sm"
|
||||
variant="secondary"
|
||||
>
|
||||
Generate and Copy Seed Link
|
||||
Generate and Copy Clone Link
|
||||
</Button>
|
||||
</div>
|
||||
</>
|
||||
|
||||
@@ -6,7 +6,6 @@ import { BackendChatSession } from "../../interfaces";
|
||||
import {
|
||||
buildLatestMessageChain,
|
||||
getCitedDocumentsFromMessage,
|
||||
getHumanAndAIMessageFromMessageNumber,
|
||||
processRawChatHistory,
|
||||
} from "../../lib";
|
||||
import { AIMessage, HumanMessage } from "../../message/Messages";
|
||||
@@ -20,17 +19,8 @@ import { Persona } from "@/app/admin/assistants/interfaces";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { OnyxDocument } from "@/lib/search/interfaces";
|
||||
import TextView from "@/components/chat_search/TextView";
|
||||
import { ChatFilters } from "../../documentSidebar/ChatFilters";
|
||||
import { Modal } from "@/components/Modal";
|
||||
import FunctionalHeader from "@/components/chat_search/Header";
|
||||
import FixedLogo from "../../shared_chat_search/FixedLogo";
|
||||
import { useDocumentSelection } from "../../useDocumentSelection";
|
||||
|
||||
function BackToOnyxButton({
|
||||
documentSidebarToggled,
|
||||
}: {
|
||||
documentSidebarToggled: boolean;
|
||||
}) {
|
||||
function BackToOnyxButton() {
|
||||
const router = useRouter();
|
||||
const enterpriseSettings = useContext(SettingsContext)?.enterpriseSettings;
|
||||
|
||||
@@ -41,17 +31,6 @@ function BackToOnyxButton({
|
||||
Back to {enterpriseSettings?.application_name || "Onyx Chat"}
|
||||
</Button>
|
||||
</div>
|
||||
<div
|
||||
style={{ transition: "width 0.30s ease-out" }}
|
||||
className={`
|
||||
flex-none
|
||||
overflow-y-hidden
|
||||
transition-all
|
||||
duration-300
|
||||
ease-in-out
|
||||
${documentSidebarToggled ? "w-[400px]" : "w-[0px]"}
|
||||
`}
|
||||
></div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -63,18 +42,10 @@ export function SharedChatDisplay({
|
||||
chatSession: BackendChatSession | null;
|
||||
persona: Persona;
|
||||
}) {
|
||||
const settings = useContext(SettingsContext);
|
||||
const [documentSidebarToggled, setDocumentSidebarToggled] = useState(false);
|
||||
const [selectedMessageForDocDisplay, setSelectedMessageForDocDisplay] =
|
||||
useState<number | null>(null);
|
||||
const [isReady, setIsReady] = useState(false);
|
||||
const [presentingDocument, setPresentingDocument] =
|
||||
useState<OnyxDocument | null>(null);
|
||||
|
||||
const toggleDocumentSidebar = () => {
|
||||
setDocumentSidebarToggled(!documentSidebarToggled);
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
Prism.highlightAll();
|
||||
setIsReady(true);
|
||||
@@ -87,7 +58,7 @@ export function SharedChatDisplay({
|
||||
Did not find a shared chat with the specified ID.
|
||||
</Callout>
|
||||
</div>
|
||||
<BackToOnyxButton documentSidebarToggled={documentSidebarToggled} />
|
||||
<BackToOnyxButton />
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -104,215 +75,62 @@ export function SharedChatDisplay({
|
||||
onClose={() => setPresentingDocument(null)}
|
||||
/>
|
||||
)}
|
||||
{documentSidebarToggled && settings?.isMobile && (
|
||||
<div className="md:hidden">
|
||||
<Modal noPadding noScroll>
|
||||
<ChatFilters
|
||||
isSharedChat={true}
|
||||
selectedMessage={
|
||||
selectedMessageForDocDisplay
|
||||
? messages.find(
|
||||
(message) =>
|
||||
message.messageId === selectedMessageForDocDisplay
|
||||
) || null
|
||||
: null
|
||||
}
|
||||
toggleDocumentSelection={() => {
|
||||
setDocumentSidebarToggled(true);
|
||||
}}
|
||||
selectedDocuments={[]}
|
||||
clearSelectedDocuments={() => {}}
|
||||
selectedDocumentTokens={0}
|
||||
maxTokens={0}
|
||||
initialWidth={400}
|
||||
isOpen={true}
|
||||
setPresentingDocument={setPresentingDocument}
|
||||
modal={true}
|
||||
ccPairs={[]}
|
||||
tags={[]}
|
||||
documentSets={[]}
|
||||
showFilters={false}
|
||||
closeSidebar={() => {
|
||||
setDocumentSidebarToggled(false);
|
||||
}}
|
||||
/>
|
||||
</Modal>
|
||||
</div>
|
||||
)}
|
||||
<div className="w-full h-[100dvh] overflow-hidden">
|
||||
<div className="flex max-h-full overflow-hidden pb-[72px]">
|
||||
<div className="flex w-full overflow-hidden overflow-y-scroll">
|
||||
<div className="w-full h-full flex-col flex max-w-message-max mx-auto">
|
||||
<div className="px-5 pt-8">
|
||||
<h1 className="text-3xl text-strong font-bold">
|
||||
{chatSession.description ||
|
||||
`Chat ${chatSession.chat_session_id}`}
|
||||
</h1>
|
||||
<p className="text-emphasis">
|
||||
{humanReadableFormat(chatSession.time_created)}
|
||||
</p>
|
||||
|
||||
<div className="fixed inset-0 flex flex-col text-default">
|
||||
<div className="h-[100dvh] px-2 overflow-y-hidden">
|
||||
<div className="w-full h-[100dvh] flex flex-col overflow-hidden">
|
||||
{!settings?.isMobile && (
|
||||
<div
|
||||
style={{ transition: "width 0.30s ease-out" }}
|
||||
className={`
|
||||
flex-none
|
||||
fixed
|
||||
right-0
|
||||
z-[1000]
|
||||
bg-background
|
||||
h-screen
|
||||
transition-all
|
||||
bg-opacity-80
|
||||
duration-300
|
||||
ease-in-out
|
||||
bg-transparent
|
||||
transition-all
|
||||
bg-opacity-80
|
||||
duration-300
|
||||
ease-in-out
|
||||
h-full
|
||||
${documentSidebarToggled ? "w-[400px]" : "w-[0px]"}
|
||||
`}
|
||||
>
|
||||
<ChatFilters
|
||||
modal={false}
|
||||
isSharedChat={true}
|
||||
selectedMessage={
|
||||
selectedMessageForDocDisplay
|
||||
? messages.find(
|
||||
(message) =>
|
||||
message.messageId === selectedMessageForDocDisplay
|
||||
) || null
|
||||
: null
|
||||
}
|
||||
toggleDocumentSelection={() => {
|
||||
setDocumentSidebarToggled(true);
|
||||
}}
|
||||
clearSelectedDocuments={() => {}}
|
||||
selectedDocumentTokens={0}
|
||||
maxTokens={0}
|
||||
initialWidth={400}
|
||||
isOpen={true}
|
||||
setPresentingDocument={setPresentingDocument}
|
||||
ccPairs={[]}
|
||||
tags={[]}
|
||||
documentSets={[]}
|
||||
showFilters={false}
|
||||
closeSidebar={() => {
|
||||
setDocumentSidebarToggled(false);
|
||||
}}
|
||||
selectedDocuments={[]}
|
||||
/>
|
||||
<Separator />
|
||||
</div>
|
||||
)}
|
||||
<div className="flex mobile:hidden max-h-full overflow-hidden ">
|
||||
<FunctionalHeader
|
||||
documentSidebarToggled={documentSidebarToggled}
|
||||
sidebarToggled={false}
|
||||
toggleSidebar={() => {}}
|
||||
page="chat"
|
||||
reset={() => {}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="flex w-full overflow-hidden overflow-y-scroll">
|
||||
<div className="w-full h-full flex-col flex max-w-message-max mx-auto">
|
||||
<div className="fixed z-10 w-full ">
|
||||
<div className="bg-background relative px-5 pt-4 w-full">
|
||||
<h1 className="text-3xl text-strong font-bold">
|
||||
{chatSession.description ||
|
||||
`Chat ${chatSession.chat_session_id}`}
|
||||
</h1>
|
||||
<p className=" text-emphasis">
|
||||
{humanReadableFormat(chatSession.time_created)}
|
||||
</p>
|
||||
<div
|
||||
className={`
|
||||
h-full absolute top-0 z-10 w-full sm:w-[90%] lg:w-[70%]
|
||||
bg-gradient-to-b via-50% z-[-1] from-background via-background to-background/10 flex
|
||||
transition-all duration-300 ease-in-out
|
||||
${
|
||||
documentSidebarToggled
|
||||
? "left-[200px] transform -translate-x-[calc(50%+100px)]"
|
||||
: "left-1/2 transform -translate-x-1/2"
|
||||
}
|
||||
`}
|
||||
/>
|
||||
{isReady ? (
|
||||
<div className="w-full pb-16">
|
||||
{messages.map((message) => {
|
||||
if (message.type === "user") {
|
||||
return (
|
||||
<HumanMessage
|
||||
shared
|
||||
key={message.messageId}
|
||||
content={message.message}
|
||||
files={message.files}
|
||||
/>
|
||||
);
|
||||
} else {
|
||||
return (
|
||||
<AIMessage
|
||||
shared
|
||||
setPresentingDocument={setPresentingDocument}
|
||||
currentPersona={persona}
|
||||
key={message.messageId}
|
||||
messageId={message.messageId}
|
||||
content={message.message}
|
||||
files={message.files || []}
|
||||
citedDocuments={getCitedDocumentsFromMessage(message)}
|
||||
isComplete
|
||||
/>
|
||||
);
|
||||
}
|
||||
})}
|
||||
</div>
|
||||
) : (
|
||||
<div className="grow flex-0 h-screen w-full flex items-center justify-center">
|
||||
<div className="mb-[33vh]">
|
||||
<OnyxInitializingLoader />
|
||||
</div>
|
||||
</div>
|
||||
{isReady ? (
|
||||
<div className="w-full pt-24 pb-16">
|
||||
{messages.map((message) => {
|
||||
if (message.type === "user") {
|
||||
return (
|
||||
<HumanMessage
|
||||
shared
|
||||
key={message.messageId}
|
||||
content={message.message}
|
||||
files={message.files}
|
||||
/>
|
||||
);
|
||||
} else {
|
||||
return (
|
||||
<AIMessage
|
||||
shared
|
||||
query={message.query || undefined}
|
||||
hasDocs={
|
||||
(message.documents &&
|
||||
message.documents.length > 0) === true
|
||||
}
|
||||
toolCall={message.toolCall}
|
||||
docs={message.documents}
|
||||
setPresentingDocument={setPresentingDocument}
|
||||
currentPersona={persona}
|
||||
key={message.messageId}
|
||||
messageId={message.messageId}
|
||||
content={message.message}
|
||||
files={message.files || []}
|
||||
citedDocuments={getCitedDocumentsFromMessage(
|
||||
message
|
||||
)}
|
||||
// toggleDocumentSelection={() => {
|
||||
// setDocumentSidebarToggled(true);
|
||||
// }}
|
||||
toggleDocumentSelection={() => {
|
||||
if (
|
||||
!documentSidebarToggled ||
|
||||
(documentSidebarToggled &&
|
||||
selectedMessageForDocDisplay ===
|
||||
message.messageId)
|
||||
) {
|
||||
toggleDocumentSidebar();
|
||||
}
|
||||
setSelectedMessageForDocDisplay(
|
||||
message.messageId
|
||||
);
|
||||
}}
|
||||
isComplete
|
||||
/>
|
||||
);
|
||||
}
|
||||
})}
|
||||
</div>
|
||||
) : (
|
||||
<div className="grow flex-0 h-screen w-full flex items-center justify-center">
|
||||
<div className="mb-[33vh]">
|
||||
<OnyxInitializingLoader />
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{!settings?.isMobile && (
|
||||
<div
|
||||
style={{ transition: "width 0.30s ease-out" }}
|
||||
className={`
|
||||
flex-none
|
||||
overflow-y-hidden
|
||||
transition-all
|
||||
duration-300
|
||||
ease-in-out
|
||||
${documentSidebarToggled ? "w-[400px]" : "w-[0px]"}
|
||||
`}
|
||||
></div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<FixedLogo backgroundToggled={false} />
|
||||
<BackToOnyxButton documentSidebarToggled={documentSidebarToggled} />
|
||||
</div>
|
||||
|
||||
<BackToOnyxButton />
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
|
||||
@@ -13,8 +13,8 @@ import {
|
||||
FetchAssistantsResponse,
|
||||
fetchAssistantsSS,
|
||||
} from "@/lib/assistants/fetchAssistantsSS";
|
||||
import FunctionalHeader from "@/components/chat_search/Header";
|
||||
import { defaultPersona } from "@/app/admin/assistants/lib";
|
||||
import { constructMiniFiedPersona } from "@/lib/assistantIconUtils";
|
||||
|
||||
async function getSharedChat(chatId: string) {
|
||||
const response = await fetchSS(
|
||||
@@ -34,6 +34,7 @@ export default async function Page(props: {
|
||||
getAuthTypeMetadataSS(),
|
||||
getCurrentUserSS(),
|
||||
getSharedChat(params.chatId),
|
||||
fetchAssistantsSS(),
|
||||
];
|
||||
|
||||
// catch cases where the backend is completely unreachable here
|
||||
@@ -49,6 +50,8 @@ export default async function Page(props: {
|
||||
const authTypeMetadata = results[0] as AuthTypeMetadata | null;
|
||||
const user = results[1] as User | null;
|
||||
const chatSession = results[2] as BackendChatSession | null;
|
||||
const assistantsResponse = results[3] as FetchAssistantsResponse | null;
|
||||
const [availableAssistants, error] = assistantsResponse ?? [[], null];
|
||||
|
||||
const authDisabled = authTypeMetadata?.authType === "disabled";
|
||||
if (!authDisabled && !user) {
|
||||
@@ -58,13 +61,22 @@ export default async function Page(props: {
|
||||
if (user && !user.is_verified && authTypeMetadata?.requiresVerification) {
|
||||
return redirect("/auth/waiting-on-verification");
|
||||
}
|
||||
// prettier-ignore
|
||||
const persona: Persona =
|
||||
chatSession?.persona_id && availableAssistants?.length
|
||||
? (availableAssistants.find((p) => p.id === chatSession.persona_id) ??
|
||||
defaultPersona)
|
||||
: (availableAssistants?.[0] ?? defaultPersona);
|
||||
|
||||
const persona: Persona = constructMiniFiedPersona(
|
||||
chatSession?.persona_icon_color ?? null,
|
||||
chatSession?.persona_icon_shape ?? null,
|
||||
chatSession?.persona_name ?? "",
|
||||
chatSession?.persona_id ?? 0
|
||||
return (
|
||||
<div>
|
||||
<div className="absolute top-0 z-40 w-full">
|
||||
<FunctionalHeader page="shared" />
|
||||
</div>
|
||||
|
||||
<div className="flex relative bg-background text-default overflow-hidden pt-16 h-screen">
|
||||
<SharedChatDisplay chatSession={chatSession} persona={persona} />
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
|
||||
return <SharedChatDisplay chatSession={chatSession} persona={persona} />;
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"use client";
|
||||
|
||||
import React, { memo } from "react";
|
||||
import { HeaderTitle } from "@/components/header/HeaderTitle";
|
||||
import { Logo } from "@/components/logo/Logo";
|
||||
import { SettingsContext } from "@/components/settings/SettingsProvider";
|
||||
@@ -12,7 +11,7 @@ import { LogoType } from "@/components/logo/Logo";
|
||||
import { EnterpriseSettings } from "@/app/admin/settings/interfaces";
|
||||
import { useRouter } from "next/navigation";
|
||||
|
||||
export const LogoComponent = memo(function LogoComponent({
|
||||
export function LogoComponent({
|
||||
enterpriseSettings,
|
||||
backgroundToggled,
|
||||
show,
|
||||
@@ -24,13 +23,12 @@ export const LogoComponent = memo(function LogoComponent({
|
||||
isAdmin?: boolean;
|
||||
}) {
|
||||
const router = useRouter();
|
||||
|
||||
return (
|
||||
<button
|
||||
onClick={isAdmin ? () => router.push("/chat") : () => {}}
|
||||
className={`max-w-[200px]
|
||||
${!show && "mobile:hidden"}
|
||||
flex items-center gap-x-1`}
|
||||
className={`max-w-[200px] ${
|
||||
!show && "mobile:hidden"
|
||||
} flex items-center gap-x-1`}
|
||||
>
|
||||
{enterpriseSettings && enterpriseSettings.application_name ? (
|
||||
<>
|
||||
@@ -42,9 +40,7 @@ export const LogoComponent = memo(function LogoComponent({
|
||||
{enterpriseSettings.application_name}
|
||||
</HeaderTitle>
|
||||
{!NEXT_PUBLIC_DO_NOT_USE_TOGGLE_OFF_DANSWER_POWERED && (
|
||||
<p className="text-xs text-left text-subtle whitespace-nowrap overflow-hidden text-ellipsis">
|
||||
Powered by Onyx
|
||||
</p>
|
||||
<p className="text-xs text-left text-subtle">Powered by Onyx</p>
|
||||
)}
|
||||
</div>
|
||||
</>
|
||||
@@ -53,7 +49,7 @@ export const LogoComponent = memo(function LogoComponent({
|
||||
)}
|
||||
</button>
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
export default function FixedLogo({
|
||||
backgroundToggled,
|
||||
|
||||
@@ -38,7 +38,6 @@ import { BookmarkIcon, RobotIcon } from "@/components/icons/icons";
|
||||
import { AddTokenRateLimitForm } from "./AddTokenRateLimitForm";
|
||||
import { GenericTokenRateLimitTable } from "@/app/admin/token-rate-limits/TokenRateLimitTables";
|
||||
import { useUser } from "@/components/user/UserProvider";
|
||||
import { GenericConfirmModal } from "@/components/modals/GenericConfirmModal";
|
||||
|
||||
interface GroupDisplayProps {
|
||||
users: User[];
|
||||
@@ -69,13 +68,8 @@ const UserRoleDropdown = ({
|
||||
return user.role;
|
||||
});
|
||||
const [isSettingRole, setIsSettingRole] = useState(false);
|
||||
const [showDemoteConfirm, setShowDemoteConfirm] = useState(false);
|
||||
const [pendingRoleChange, setPendingRoleChange] = useState<string | null>(
|
||||
null
|
||||
);
|
||||
const { user: currentUser } = useUser();
|
||||
|
||||
const applyRoleChange = async (value: string) => {
|
||||
const handleChange = async (value: string) => {
|
||||
if (value === localRole) return;
|
||||
if (value === UserRole.BASIC || value === UserRole.CURATOR) {
|
||||
setIsSettingRole(true);
|
||||
@@ -101,61 +95,31 @@ const UserRoleDropdown = ({
|
||||
}
|
||||
};
|
||||
|
||||
const handleChange = (value: string) => {
|
||||
if (value === UserRole.BASIC && user.id === currentUser?.id) {
|
||||
setPendingRoleChange(value);
|
||||
setShowDemoteConfirm(true);
|
||||
} else {
|
||||
applyRoleChange(value);
|
||||
}
|
||||
};
|
||||
|
||||
const isEditable =
|
||||
user.role === UserRole.BASIC || user.role === UserRole.CURATOR;
|
||||
(user.role === UserRole.BASIC || user.role === UserRole.CURATOR) && isAdmin;
|
||||
|
||||
return (
|
||||
<>
|
||||
{/* Confirmation modal - only shown when users try to demote themselves */}
|
||||
{showDemoteConfirm && pendingRoleChange && (
|
||||
<GenericConfirmModal
|
||||
title="Remove Yourself as a Curator for this Group?"
|
||||
message="Are you sure you want to change your role to Basic? This will remove your ability to curate this group."
|
||||
confirmText="Yes, set me to Basic"
|
||||
onClose={() => {
|
||||
// Cancel the role change if user dismisses modal
|
||||
setShowDemoteConfirm(false);
|
||||
setPendingRoleChange(null);
|
||||
}}
|
||||
onConfirm={() => {
|
||||
// Apply the role change if user confirms
|
||||
setShowDemoteConfirm(false);
|
||||
applyRoleChange(pendingRoleChange);
|
||||
setPendingRoleChange(null);
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
{isEditable ? (
|
||||
<div className="w-40">
|
||||
<Select
|
||||
value={localRole}
|
||||
onValueChange={handleChange}
|
||||
disabled={isSettingRole}
|
||||
>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder="Select role" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value={UserRole.BASIC}>Basic</SelectItem>
|
||||
<SelectItem value={UserRole.CURATOR}>Curator</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
) : (
|
||||
<div>{USER_ROLE_LABELS[localRole]}</div>
|
||||
)}
|
||||
</>
|
||||
);
|
||||
if (isEditable) {
|
||||
return (
|
||||
<div className="w-40">
|
||||
Select group
|
||||
<Select
|
||||
value={localRole}
|
||||
onValueChange={handleChange}
|
||||
disabled={isSettingRole}
|
||||
>
|
||||
<SelectTrigger>
|
||||
<SelectValue placeholder="Select role" />
|
||||
</SelectTrigger>
|
||||
<SelectContent>
|
||||
<SelectItem value={UserRole.BASIC}>Basic</SelectItem>
|
||||
<SelectItem value={UserRole.CURATOR}>Curator</SelectItem>
|
||||
</SelectContent>
|
||||
</Select>
|
||||
</div>
|
||||
);
|
||||
} else {
|
||||
return <div>{USER_ROLE_LABELS[localRole]}</div>;
|
||||
}
|
||||
};
|
||||
|
||||
export const GroupDisplay = ({
|
||||
|
||||
@@ -28,7 +28,7 @@ export function MetadataBadge({
|
||||
className: flexNone ? "flex-none" : "mr-0.5 my-auto",
|
||||
})}
|
||||
<p className="max-w-[6rem] text-ellipsis overflow-hidden truncate whitespace-nowrap">
|
||||
{value}
|
||||
{value}lllaasfasdf
|
||||
</p>
|
||||
</div>
|
||||
);
|
||||
|
||||
@@ -35,7 +35,7 @@ const DropdownOption: React.FC<DropdownOptionProps> = ({
|
||||
openInNewTab,
|
||||
}) => {
|
||||
const content = (
|
||||
<div className="flex py-3 px-4 cursor-pointer rounded hover:bg-hover">
|
||||
<div className="flex py-3 px-4 cursor-pointer rounded hover:bg-hover-light">
|
||||
{icon}
|
||||
{label}
|
||||
</div>
|
||||
|
||||
@@ -211,11 +211,7 @@ export function TextFormField({
|
||||
|
||||
return (
|
||||
<div className={`w-full ${width}`}>
|
||||
<div
|
||||
className={`flex ${
|
||||
vertical ? "flex-col" : "flex-row"
|
||||
} gap-x-2 items-start`}
|
||||
>
|
||||
<div className={`flex ${vertical ? "flex-col" : "flex-row"} items-start`}>
|
||||
<div className="flex gap-x-2 items-center">
|
||||
{!removeLabel && (
|
||||
<Label className={sizeClass.label} small={small}>
|
||||
|
||||
@@ -13,9 +13,6 @@ import { TableHeader } from "@/components/ui/table";
|
||||
import { UserRoleDropdown } from "./buttons/UserRoleDropdown";
|
||||
import { DeleteUserButton } from "./buttons/DeleteUserButton";
|
||||
import { DeactivaterButton } from "./buttons/DeactivaterButton";
|
||||
import { useUser } from "@/components/user/UserProvider";
|
||||
import { LeaveOrganizationButton } from "./buttons/LeaveOrganizationButton";
|
||||
import { NEXT_PUBLIC_CLOUD_ENABLED } from "@/lib/constants";
|
||||
|
||||
interface Props {
|
||||
users: Array<User>;
|
||||
@@ -31,8 +28,6 @@ const SignedUpUserTable = ({
|
||||
onPageChange,
|
||||
mutate,
|
||||
}: Props & PageSelectorProps) => {
|
||||
const { user: currentUser } = useUser();
|
||||
|
||||
if (!users.length) return null;
|
||||
|
||||
const handlePopup = (message: string, type: "success" | "error") => {
|
||||
@@ -86,30 +81,18 @@ const SignedUpUserTable = ({
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<div className="flex justify-end gap-x-2">
|
||||
{NEXT_PUBLIC_CLOUD_ENABLED &&
|
||||
user.id === currentUser?.id ? (
|
||||
<LeaveOrganizationButton
|
||||
<DeactivaterButton
|
||||
user={user}
|
||||
deactivate={user.status === UserStatus.live}
|
||||
setPopup={setPopup}
|
||||
mutate={mutate}
|
||||
/>
|
||||
{user.status == UserStatus.deactivated && (
|
||||
<DeleteUserButton
|
||||
user={user}
|
||||
setPopup={setPopup}
|
||||
mutate={mutate}
|
||||
/>
|
||||
) : (
|
||||
<>
|
||||
<DeactivaterButton
|
||||
user={user}
|
||||
deactivate={user.status === UserStatus.live}
|
||||
setPopup={setPopup}
|
||||
mutate={mutate}
|
||||
/>
|
||||
|
||||
{user.status == UserStatus.deactivated && (
|
||||
<DeleteUserButton
|
||||
user={user}
|
||||
setPopup={setPopup}
|
||||
mutate={mutate}
|
||||
/>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</TableCell>
|
||||
|
||||
@@ -1,8 +1,36 @@
|
||||
import { type User } from "@/lib/types";
|
||||
import {
|
||||
type User,
|
||||
UserStatus,
|
||||
UserRole,
|
||||
USER_ROLE_LABELS,
|
||||
INVALID_ROLE_HOVER_TEXT,
|
||||
} from "@/lib/types";
|
||||
import { type PageSelectorProps } from "@/components/PageSelector";
|
||||
import { HidableSection } from "@/app/admin/assistants/HidableSection";
|
||||
import { PopupSpec } from "@/components/admin/connectors/Popup";
|
||||
import userMutationFetcher from "@/lib/admin/users/userMutationFetcher";
|
||||
import useSWRMutation from "swr/mutation";
|
||||
import {
|
||||
Table,
|
||||
TableHead,
|
||||
TableRow,
|
||||
TableBody,
|
||||
TableCell,
|
||||
} from "@/components/ui/table";
|
||||
|
||||
import {
|
||||
Select,
|
||||
SelectContent,
|
||||
SelectItem,
|
||||
SelectTrigger,
|
||||
SelectValue,
|
||||
} from "@/components/ui/select";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { GenericConfirmModal } from "@/components/modals/GenericConfirmModal";
|
||||
import { useState } from "react";
|
||||
import { usePaidEnterpriseFeaturesEnabled } from "@/components/settings/usePaidEnterpriseFeaturesEnabled";
|
||||
import { DeleteEntityModal } from "@/components/modals/DeleteEntityModal";
|
||||
import { TableHeader } from "@/components/ui/table";
|
||||
|
||||
export const DeactivaterButton = ({
|
||||
user,
|
||||
|
||||
@@ -1,70 +0,0 @@
|
||||
import { type User } from "@/lib/types";
|
||||
import { PopupSpec } from "@/components/admin/connectors/Popup";
|
||||
import userMutationFetcher from "@/lib/admin/users/userMutationFetcher";
|
||||
import useSWRMutation from "swr/mutation";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { useState } from "react";
|
||||
import { DeleteEntityModal } from "@/components/modals/DeleteEntityModal";
|
||||
import { useRouter } from "next/navigation";
|
||||
|
||||
export const LeaveOrganizationButton = ({
|
||||
user,
|
||||
setPopup,
|
||||
mutate,
|
||||
}: {
|
||||
user: User;
|
||||
setPopup: (spec: PopupSpec) => void;
|
||||
mutate: () => void;
|
||||
}) => {
|
||||
const router = useRouter();
|
||||
const { trigger, isMutating } = useSWRMutation(
|
||||
"/api/tenants/leave-organization",
|
||||
userMutationFetcher,
|
||||
{
|
||||
onSuccess: () => {
|
||||
mutate();
|
||||
setPopup({
|
||||
message: "Successfully left the organization!",
|
||||
type: "success",
|
||||
});
|
||||
},
|
||||
onError: (errorMsg) =>
|
||||
setPopup({
|
||||
message: `Unable to leave organization - ${errorMsg}`,
|
||||
type: "error",
|
||||
}),
|
||||
}
|
||||
);
|
||||
|
||||
const [showLeaveModal, setShowLeaveModal] = useState(false);
|
||||
|
||||
const handleLeaveOrganization = async () => {
|
||||
await trigger({ user_email: user.email, method: "POST" });
|
||||
router.push("/");
|
||||
};
|
||||
|
||||
return (
|
||||
<>
|
||||
{showLeaveModal && (
|
||||
<DeleteEntityModal
|
||||
deleteButtonText="Leave"
|
||||
entityType="organization"
|
||||
entityName="your organization"
|
||||
onClose={() => setShowLeaveModal(false)}
|
||||
onSubmit={handleLeaveOrganization}
|
||||
additionalDetails="You will lose access to all organization data and resources."
|
||||
/>
|
||||
)}
|
||||
|
||||
<Button
|
||||
className="w-min"
|
||||
onClick={() => setShowLeaveModal(true)}
|
||||
disabled={isMutating}
|
||||
size="sm"
|
||||
variant="destructive"
|
||||
>
|
||||
Leave Organization
|
||||
</Button>
|
||||
</>
|
||||
);
|
||||
};
|
||||
@@ -33,7 +33,7 @@ export function AssistantIcon({
|
||||
|
||||
return (
|
||||
<CustomTooltip
|
||||
disabled={disableToolip || !assistant.description}
|
||||
disabled={disableToolip}
|
||||
showTick
|
||||
line
|
||||
wrap
|
||||
|
||||
@@ -130,7 +130,7 @@ export default function FunctionalHeader({
|
||||
: "")
|
||||
}
|
||||
>
|
||||
<div className="cursor-pointer ml-2 mr-4 flex-none text-text-700 hover:text-text-600 transition-colors duration-300">
|
||||
<div className="cursor-pointer mr-4 flex-none text-text-700 hover:text-text-600 transition-colors duration-300">
|
||||
<NewChatIcon size={20} />
|
||||
</div>
|
||||
</Link>
|
||||
|
||||
@@ -2,7 +2,9 @@ import { WebResultIcon } from "@/components/WebResultIcon";
|
||||
import { SourceIcon } from "@/components/SourceIcon";
|
||||
import { OnyxDocument } from "@/lib/search/interfaces";
|
||||
import { truncateString } from "@/lib/utils";
|
||||
import { openDocument } from "@/lib/search/utils";
|
||||
import { SetStateAction } from "react";
|
||||
import { Dispatch } from "react";
|
||||
import { ValidSources } from "@/lib/types";
|
||||
|
||||
export default function SourceCard({
|
||||
doc,
|
||||
@@ -14,7 +16,13 @@ export default function SourceCard({
|
||||
return (
|
||||
<div
|
||||
key={doc.document_id}
|
||||
onClick={() => openDocument(doc, setPresentingDocument)}
|
||||
onClick={() => {
|
||||
if (doc.source_type == ValidSources.File && setPresentingDocument) {
|
||||
setPresentingDocument(doc);
|
||||
} else if (doc.link) {
|
||||
window.open(doc.link, "_blank");
|
||||
}
|
||||
}}
|
||||
className="cursor-pointer text-left overflow-hidden flex flex-col gap-0.5 rounded-sm px-3 py-2.5 hover:bg-background-125 bg-background-100 w-[200px]"
|
||||
>
|
||||
<div className="line-clamp-1 font-semibold text-ellipsis text-text-900 flex h-6 items-center gap-2 text-sm">
|
||||
|
||||
@@ -11,9 +11,7 @@ export function HeaderTitle({
|
||||
}) {
|
||||
const isString = typeof children === "string";
|
||||
const textSize =
|
||||
isString && children.length > 10
|
||||
? "text-lg pb-[4px] "
|
||||
: "pb-[2px] text-2xl";
|
||||
isString && children.length > 10 ? "text-lg mb-[4px] " : "text-2xl";
|
||||
|
||||
return (
|
||||
<h1
|
||||
|
||||
@@ -40,7 +40,7 @@ export default function LogoWithText({
|
||||
<div
|
||||
className={`${
|
||||
hideOnMobile && "mobile:hidden"
|
||||
} z-[100] ml-2 mt-1 h-8 mb-auto shrink-0 flex gap-x-0 items-center text-xl`}
|
||||
} z-[100] ml-2 mt-1 h-8 mb-auto shrink-0 flex gap-x-0 items-center text-xl`}
|
||||
>
|
||||
{toggleSidebar && page == "chat" ? (
|
||||
<button
|
||||
@@ -59,29 +59,27 @@ export default function LogoWithText({
|
||||
|
||||
<FiSidebar
|
||||
size={20}
|
||||
className={`text-text-mobile-sidebar desktop:hidden ${
|
||||
toggled && "mobile:hidden"
|
||||
}`}
|
||||
className={`text-text-mobile-sidebar ${toggled && "mobile:hidden"}`}
|
||||
/>
|
||||
</button>
|
||||
) : (
|
||||
<div className="mr-1 invisible mb-auto h-6 w-6">
|
||||
<Logo height={24} width={24} />
|
||||
lll
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!toggled && (
|
||||
<div
|
||||
className={`${
|
||||
showArrow ? "desktop:hidden" : "invisible"
|
||||
} break-words inline-block w-fit text-text-700 text-xl`}
|
||||
>
|
||||
<LogoComponent
|
||||
enterpriseSettings={enterpriseSettings!}
|
||||
backgroundToggled={toggled}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
<div
|
||||
className={`${
|
||||
showArrow ? "desktop:invisible" : "invisible"
|
||||
} break-words inline-block w-fit text-text-700 text-xl`}
|
||||
>
|
||||
<LogoComponent
|
||||
enterpriseSettings={enterpriseSettings!}
|
||||
backgroundToggled={toggled}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{page == "chat" && !showArrow && (
|
||||
<TooltipProvider delayDuration={1000}>
|
||||
<Tooltip>
|
||||
@@ -113,7 +111,6 @@ export default function LogoWithText({
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
)}
|
||||
|
||||
{showArrow && toggleSidebar && (
|
||||
<TooltipProvider delayDuration={0}>
|
||||
<Tooltip>
|
||||
|
||||
@@ -103,14 +103,12 @@ export const LogoIcon = ({
|
||||
className = defaultTailwindCSS,
|
||||
src,
|
||||
}: LogoIconProps) => (
|
||||
<Image
|
||||
<div
|
||||
style={{ width: `${size}px`, height: `${size}px` }}
|
||||
className={`w-[${size}px] h-[${size}px] ` + className}
|
||||
src={src}
|
||||
alt="Logo"
|
||||
width="96"
|
||||
height="96"
|
||||
/>
|
||||
>
|
||||
<Image src={src} alt="Logo" width="96" height="96" />
|
||||
</div>
|
||||
);
|
||||
|
||||
export const AssistantsIconSkeleton = ({
|
||||
@@ -1123,16 +1121,12 @@ export const MetaIcon = ({
|
||||
export const MicrosoftIconSVG = ({
|
||||
size = 16,
|
||||
className = defaultTailwindCSS,
|
||||
}: IconProps) => (
|
||||
<LogoIcon size={size} className={className} src={microsoftSVG} />
|
||||
);
|
||||
}: IconProps) => <LogoIcon size={size} className={className} src={microsoftSVG} />;
|
||||
|
||||
export const MistralIcon = ({
|
||||
size = 16,
|
||||
className = defaultTailwindCSS,
|
||||
}: IconProps) => (
|
||||
<LogoIcon size={size} className={className} src={mistralSVG} />
|
||||
);
|
||||
}: IconProps) => <LogoIcon size={size} className={className} src={mistralSVG} />;
|
||||
|
||||
export const VoyageIcon = ({
|
||||
size = 16,
|
||||
|
||||
@@ -48,15 +48,12 @@ export function Logo({
|
||||
export function LogoType() {
|
||||
return (
|
||||
<Image
|
||||
priority
|
||||
className="max-h-8 w-full mr-auto "
|
||||
src="/logotype.png"
|
||||
alt="Logo"
|
||||
width={2640}
|
||||
height={733}
|
||||
style={{ objectFit: "contain", width: "100%", height: "100%" }}
|
||||
loading="eager"
|
||||
unoptimized={true}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
@@ -8,26 +8,21 @@ export const DeleteEntityModal = ({
|
||||
entityType,
|
||||
entityName,
|
||||
additionalDetails,
|
||||
deleteButtonText,
|
||||
}: {
|
||||
entityType: string;
|
||||
entityName: string;
|
||||
onClose: () => void;
|
||||
onSubmit: () => void;
|
||||
additionalDetails?: string;
|
||||
deleteButtonText?: string;
|
||||
}) => {
|
||||
return (
|
||||
<Modal onOutsideClick={onClose}>
|
||||
<>
|
||||
<div className="flex mb-4">
|
||||
<h2 className="my-auto text-2xl font-bold">
|
||||
{deleteButtonText || `Delete`} {entityType}
|
||||
</h2>
|
||||
<h2 className="my-auto text-2xl font-bold">Delete {entityType}?</h2>
|
||||
</div>
|
||||
<p className="mb-4">
|
||||
Click below to confirm that you want to {deleteButtonText || "delete"}{" "}
|
||||
<b>{entityName}</b>
|
||||
Click below to confirm that you want to delete <b>{entityName}</b>
|
||||
</p>
|
||||
{additionalDetails && <p className="mb-4">{additionalDetails}</p>}
|
||||
<div className="flex">
|
||||
@@ -35,7 +30,7 @@ export const DeleteEntityModal = ({
|
||||
<BasicClickable onClick={onSubmit}>
|
||||
<div className="flex mx-2">
|
||||
<FiTrash className="my-auto mr-2" />
|
||||
{deleteButtonText || "Delete"}
|
||||
Delete
|
||||
</div>
|
||||
</BasicClickable>
|
||||
</div>
|
||||
|
||||
@@ -22,7 +22,6 @@ import { WarningCircle } from "@phosphor-icons/react";
|
||||
import TextView from "../chat_search/TextView";
|
||||
import { SearchResultIcon } from "../SearchResultIcon";
|
||||
import { ValidSources } from "@/lib/types";
|
||||
import { openDocument } from "@/lib/search/utils";
|
||||
|
||||
export const buildDocumentSummaryDisplay = (
|
||||
matchHighlights: string[],
|
||||
@@ -429,15 +428,19 @@ export function CompactDocumentCard({
|
||||
url,
|
||||
updatePresentingDocument,
|
||||
}: {
|
||||
document: OnyxDocument;
|
||||
document: LoadedOnyxDocument;
|
||||
icon?: React.ReactNode;
|
||||
url?: string;
|
||||
updatePresentingDocument: (document: OnyxDocument) => void;
|
||||
updatePresentingDocument: (documentIndex: LoadedOnyxDocument) => void;
|
||||
}) {
|
||||
return (
|
||||
<div
|
||||
onClick={() => {
|
||||
openDocument(document, updatePresentingDocument);
|
||||
if (document.source_type === ValidSources.File) {
|
||||
updatePresentingDocument(document);
|
||||
} else if (document.link) {
|
||||
window.open(document.link, "_blank");
|
||||
}
|
||||
}}
|
||||
className="max-w-[250px] cursor-pointer pb-0 pt-0 mt-0 flex gap-y-0 flex-col content-start items-start gap-0 "
|
||||
>
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { ReactNode } from "react";
|
||||
import { CompactDocumentCard } from "../DocumentDisplay";
|
||||
import { LoadedOnyxDocument, OnyxDocument } from "@/lib/search/interfaces";
|
||||
import { LoadedOnyxDocument } from "@/lib/search/interfaces";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
@@ -8,7 +8,6 @@ import {
|
||||
TooltipTrigger,
|
||||
} from "@/components/ui/tooltip";
|
||||
import { ValidSources } from "@/lib/types";
|
||||
import { openDocument } from "@/lib/search/utils";
|
||||
|
||||
export function Citation({
|
||||
children,
|
||||
@@ -22,7 +21,7 @@ export function Citation({
|
||||
link?: string;
|
||||
children?: JSX.Element | string | null | ReactNode;
|
||||
index?: number;
|
||||
updatePresentingDocument: (document: OnyxDocument) => void;
|
||||
updatePresentingDocument: (documentIndex: LoadedOnyxDocument) => void;
|
||||
document: LoadedOnyxDocument;
|
||||
icon?: React.ReactNode;
|
||||
url?: string;
|
||||
@@ -31,12 +30,20 @@ export function Citation({
|
||||
? children?.toString().split("[")[1].split("]")[0]
|
||||
: index;
|
||||
|
||||
const onClick = () => {
|
||||
if (document.source_type == ValidSources.File) {
|
||||
updatePresentingDocument(document);
|
||||
} else {
|
||||
window.open(link || document.link, "_blank");
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<TooltipProvider delayDuration={0}>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div
|
||||
onClick={() => openDocument(document, updatePresentingDocument)}
|
||||
onMouseDown={onClick}
|
||||
className="inline-flex items-center cursor-pointer transition-all duration-200 ease-in-out"
|
||||
>
|
||||
<span className="flex items-center justify-center w-6 h-6 text-[11px] font-medium text-gray-700 bg-gray-100 rounded-full border border-gray-300 hover:bg-gray-200 hover:text-gray-900 shadow-sm">
|
||||
|
||||
@@ -2,15 +2,34 @@ import * as React from "react";
|
||||
|
||||
import { cn } from "@/lib/utils";
|
||||
|
||||
const Input = React.forwardRef<HTMLInputElement, React.ComponentProps<"input">>(
|
||||
({ className, type, ...props }, ref) => {
|
||||
export interface InputProps
|
||||
extends React.InputHTMLAttributes<HTMLInputElement> {
|
||||
isEditing?: boolean;
|
||||
}
|
||||
|
||||
const Input = React.forwardRef<HTMLInputElement, InputProps>(
|
||||
({ className, type, isEditing = true, style, ...props }, ref) => {
|
||||
const textClassName = "text-2xl text-strong dark:text-neutral-50";
|
||||
if (!isEditing) {
|
||||
return (
|
||||
<span className={cn(textClassName, className)}>
|
||||
{props.value || props.defaultValue}
|
||||
</span>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<input
|
||||
type={type}
|
||||
className={cn(
|
||||
"flex h-10 w-full rounded-md border border-input bg-background px-3 py-2 text-base ring-offset-background file:border-0 file:bg-transparent file:text-sm file:font-medium file:text-foreground placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50 md:text-sm",
|
||||
textClassName,
|
||||
"w-[1ch] min-w-[1ch] box-content pr-1",
|
||||
className
|
||||
)}
|
||||
style={{
|
||||
width: `${Math.max(1, String(props.value || props.defaultValue || "").length)}ch`,
|
||||
...style,
|
||||
}}
|
||||
ref={ref}
|
||||
{...props}
|
||||
/>
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { Persona } from "@/app/admin/assistants/interfaces";
|
||||
|
||||
export interface GridShape {
|
||||
encodedGrid: number;
|
||||
filledSquares: number;
|
||||
@@ -47,9 +45,7 @@ export function generateRandomIconShape(): GridShape {
|
||||
if (grid[row][col]) {
|
||||
const x = col * 12;
|
||||
const y = row * 12;
|
||||
path += `M ${x} ${y} L ${x + 12} ${y} L ${x + 12} ${y + 12} L ${x} ${
|
||||
y + 12
|
||||
} Z `;
|
||||
path += `M ${x} ${y} L ${x + 12} ${y} L ${x + 12} ${y + 12} L ${x} ${y + 12} Z `;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -98,9 +94,7 @@ export function createSVG(
|
||||
if (grid[row][col]) {
|
||||
const x = col * 12;
|
||||
const y = row * 12;
|
||||
path += `M ${x} ${y} L ${x + 12} ${y} L ${x + 12} ${y + 12} L ${x} ${
|
||||
y + 12
|
||||
} Z `;
|
||||
path += `M ${x} ${y} L ${x + 12} ${y} L ${x + 12} ${y + 12} L ${x} ${y + 12} Z `;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -138,32 +132,3 @@ function shuffleArray(array: any[]) {
|
||||
[array[i], array[j]] = [array[j], array[i]];
|
||||
}
|
||||
}
|
||||
|
||||
// This is used for rendering a persona in the shared chat display
|
||||
export const constructMiniFiedPersona = (
|
||||
assistant_icon_color: string | null,
|
||||
assistant_icon_shape: number | null,
|
||||
name: string,
|
||||
id: number
|
||||
): Persona => {
|
||||
return {
|
||||
id,
|
||||
name,
|
||||
icon_color: assistant_icon_color ?? undefined,
|
||||
icon_shape: assistant_icon_shape ?? undefined,
|
||||
is_visible: true,
|
||||
is_public: true,
|
||||
display_priority: 0,
|
||||
description: "",
|
||||
document_sets: [],
|
||||
prompts: [],
|
||||
tools: [],
|
||||
search_start_date: null,
|
||||
owner: null,
|
||||
starter_messages: null,
|
||||
builtin_persona: false,
|
||||
is_default_persona: false,
|
||||
users: [],
|
||||
groups: [],
|
||||
};
|
||||
};
|
||||
|
||||
@@ -38,9 +38,8 @@ export const LOGOUT_DISABLED =
|
||||
|
||||
// Default sidebar open is true if the environment variable is not set
|
||||
export const NEXT_PUBLIC_DEFAULT_SIDEBAR_OPEN =
|
||||
process.env.NEXT_PUBLIC_DEFAULT_SIDEBAR_OPEN?.toLowerCase() === "false"
|
||||
? false
|
||||
: true;
|
||||
process.env.NEXT_PUBLIC_DEFAULT_SIDEBAR_OPEN?.toLowerCase() === "true" ??
|
||||
true;
|
||||
|
||||
export const TOGGLED_CONNECTORS_COOKIE_NAME = "toggled_connectors";
|
||||
|
||||
@@ -75,12 +74,6 @@ export const NEXT_PUBLIC_CLOUD_ENABLED =
|
||||
export const REGISTRATION_URL =
|
||||
process.env.INTERNAL_URL || "http://127.0.0.1:3001";
|
||||
|
||||
export const SERVER_SIDE_ONLY__CLOUD_ENABLED =
|
||||
process.env.NEXT_PUBLIC_CLOUD_ENABLED?.toLowerCase() === "true";
|
||||
|
||||
export const NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED =
|
||||
process.env.NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED?.toLowerCase() === "true";
|
||||
|
||||
export const TEST_ENV = process.env.TEST_ENV?.toLowerCase() === "true";
|
||||
|
||||
export const NEXT_PUBLIC_DELETE_ALL_CHATS_ENABLED =
|
||||
|
||||
@@ -101,7 +101,7 @@ const MODEL_NAMES_SUPPORTING_IMAGE_INPUT = [
|
||||
"amazon.nova-pro@v1",
|
||||
// meta models
|
||||
"llama-3.2-90b-vision-instruct",
|
||||
"llama-3.2-11b-vision-instruct",
|
||||
"llama-3.2-11b-vision-instruct"
|
||||
];
|
||||
|
||||
export function checkLLMSupportsImageInput(model: string) {
|
||||
|
||||
@@ -1,10 +1,5 @@
|
||||
import { Tag, ValidSources } from "../types";
|
||||
import {
|
||||
Filters,
|
||||
LoadedOnyxDocument,
|
||||
OnyxDocument,
|
||||
SourceMetadata,
|
||||
} from "./interfaces";
|
||||
import { Tag } from "../types";
|
||||
import { Filters, SourceMetadata } from "./interfaces";
|
||||
import { DateRangePickerValue } from "@/app/ee/admin/performance/DateRangeSelector";
|
||||
|
||||
export const buildFilters = (
|
||||
@@ -27,16 +22,3 @@ export const buildFilters = (
|
||||
export function endsWithLetterOrNumber(str: string) {
|
||||
return /[a-zA-Z0-9]$/.test(str);
|
||||
}
|
||||
|
||||
// If we have a link, open it in a new tab (including if it's a file)
|
||||
// If above fails and we have a file, update the presenting document
|
||||
export const openDocument = (
|
||||
document: OnyxDocument,
|
||||
updatePresentingDocument?: (document: OnyxDocument) => void
|
||||
) => {
|
||||
if (document.link) {
|
||||
window.open(document.link, "_blank");
|
||||
} else if (document.source_type === ValidSources.File) {
|
||||
updatePresentingDocument?.(document);
|
||||
}
|
||||
};
|
||||
|
||||
@@ -115,7 +115,6 @@ export const getAuthUrlSS = async (
|
||||
return await getGoogleOAuthUrlSS(nextUrl);
|
||||
}
|
||||
case "cloud": {
|
||||
console.log("LLpp");
|
||||
return await getGoogleOAuthUrlSS(nextUrl);
|
||||
}
|
||||
case "saml": {
|
||||
|
||||
Reference in New Issue
Block a user