Compare commits

..

1 Commits

Author SHA1 Message Date
pablonyx
a49b4dd531 add user files 2025-03-31 13:23:28 -07:00
57 changed files with 8326 additions and 5073 deletions

View File

@@ -1,117 +0,0 @@
"""duplicated no-harm user file migration
Revision ID: 6a804aeb4830
Revises: 8e1ac4f39a9f
Create Date: 2025-04-01 07:26:10.539362
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import inspect
import datetime
# revision identifiers, used by Alembic.
revision = "6a804aeb4830"
down_revision = "8e1ac4f39a9f"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Check if user_file table already exists
conn = op.get_bind()
inspector = inspect(conn)
if not inspector.has_table("user_file"):
# Create user_folder table without parent_id
op.create_table(
"user_folder",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("user_id", sa.UUID(), sa.ForeignKey("user.id"), nullable=True),
sa.Column("name", sa.String(length=255), nullable=True),
sa.Column("description", sa.String(length=255), nullable=True),
sa.Column("display_priority", sa.Integer(), nullable=True, default=0),
sa.Column(
"created_at", sa.DateTime(timezone=True), server_default=sa.func.now()
),
)
# Create user_file table with folder_id instead of parent_folder_id
op.create_table(
"user_file",
sa.Column("id", sa.Integer(), primary_key=True, autoincrement=True),
sa.Column("user_id", sa.UUID(), sa.ForeignKey("user.id"), nullable=True),
sa.Column(
"folder_id",
sa.Integer(),
sa.ForeignKey("user_folder.id"),
nullable=True,
),
sa.Column("link_url", sa.String(), nullable=True),
sa.Column("token_count", sa.Integer(), nullable=True),
sa.Column("file_type", sa.String(), nullable=True),
sa.Column("file_id", sa.String(length=255), nullable=False),
sa.Column("document_id", sa.String(length=255), nullable=False),
sa.Column("name", sa.String(length=255), nullable=False),
sa.Column(
"created_at",
sa.DateTime(),
default=datetime.datetime.utcnow,
),
sa.Column(
"cc_pair_id",
sa.Integer(),
sa.ForeignKey("connector_credential_pair.id"),
nullable=True,
unique=True,
),
)
# Create persona__user_file table
op.create_table(
"persona__user_file",
sa.Column(
"persona_id",
sa.Integer(),
sa.ForeignKey("persona.id"),
primary_key=True,
),
sa.Column(
"user_file_id",
sa.Integer(),
sa.ForeignKey("user_file.id"),
primary_key=True,
),
)
# Create persona__user_folder table
op.create_table(
"persona__user_folder",
sa.Column(
"persona_id",
sa.Integer(),
sa.ForeignKey("persona.id"),
primary_key=True,
),
sa.Column(
"user_folder_id",
sa.Integer(),
sa.ForeignKey("user_folder.id"),
primary_key=True,
),
)
op.add_column(
"connector_credential_pair",
sa.Column("is_user_file", sa.Boolean(), nullable=True, default=False),
)
# Update existing records to have is_user_file=False instead of NULL
op.execute(
"UPDATE connector_credential_pair SET is_user_file = FALSE WHERE is_user_file IS NULL"
)
def downgrade() -> None:
pass

View File

@@ -5,9 +5,9 @@ Revises: 3781a5eb12cb
Create Date: 2025-01-26 16:08:21.551022
"""
from alembic import op
import sqlalchemy as sa
import datetime
from alembic import op
# revision identifiers, used by Alembic.

View File

@@ -58,7 +58,6 @@ def _get_objects_access_for_user_email_from_salesforce(
f"Time taken to get Salesforce user ID: {end_time - start_time} seconds"
)
if user_id is None:
logger.warning(f"User '{user_email}' not found in Salesforce")
return None
# This is the only query that is not cached in the function
@@ -66,7 +65,6 @@ def _get_objects_access_for_user_email_from_salesforce(
object_id_to_access = get_objects_access_for_user_id(
salesforce_client, user_id, list(object_ids)
)
logger.debug(f"Object ID to access: {object_id_to_access}")
return object_id_to_access

View File

@@ -36,6 +36,9 @@ from onyx.utils.logger import setup_logger
logger = setup_logger()
router = APIRouter(prefix="/auth/saml")
# Define non-authenticated user roles that should be re-created during SAML login
NON_AUTHENTICATED_ROLES = {UserRole.SLACK_USER, UserRole.EXT_PERM_USER}
async def upsert_saml_user(email: str) -> User:
logger.debug(f"Attempting to upsert SAML user with email: {email}")
@@ -51,7 +54,7 @@ async def upsert_saml_user(email: str) -> User:
try:
user = await user_manager.get_by_email(email)
# If user has a non-authenticated role, treat as non-existent
if not user.role.is_web_login():
if user.role in NON_AUTHENTICATED_ROLES:
raise exceptions.UserNotExists()
return user
except exceptions.UserNotExists:

View File

@@ -23,7 +23,6 @@ from onyx.utils.url import add_url_params
from onyx.utils.variable_functionality import fetch_versioned_implementation
from shared_configs.configs import MULTI_TENANT
HTML_EMAIL_TEMPLATE = """\
<!DOCTYPE html>
<html lang="en">

View File

@@ -56,7 +56,6 @@ from httpx_oauth.oauth2 import OAuth2Token
from pydantic import BaseModel
from sqlalchemy.ext.asyncio import AsyncSession
from ee.onyx.configs.app_configs import ANONYMOUS_USER_COOKIE_NAME
from onyx.auth.api_key import get_hashed_api_key_from_request
from onyx.auth.email_utils import send_forgot_password_email
from onyx.auth.email_utils import send_user_verification_email
@@ -514,25 +513,6 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
return user
async def on_after_login(
self,
user: User,
request: Optional[Request] = None,
response: Optional[Response] = None,
) -> None:
try:
if response and request and ANONYMOUS_USER_COOKIE_NAME in request.cookies:
response.delete_cookie(
ANONYMOUS_USER_COOKIE_NAME,
# Ensure cookie deletion doesn't override other cookies by setting the same path/domain
path="/",
domain=None,
secure=WEB_DOMAIN.startswith("https"),
)
logger.debug(f"Deleted anonymous user cookie for user {user.email}")
except Exception:
logger.exception("Error deleting anonymous user cookie")
async def on_after_register(
self, user: User, request: Optional[Request] = None
) -> None:
@@ -1322,7 +1302,6 @@ def get_oauth_router(
# Login user
response = await backend.login(strategy, user)
await user_manager.on_after_login(user, request, response)
# Prepare redirect response
if tenant_id is None:
# Use URL utility to add parameters
@@ -1332,14 +1311,9 @@ def get_oauth_router(
# No parameters to add
redirect_response = RedirectResponse(next_url, status_code=302)
# Copy headers from auth response to redirect response, with special handling for Set-Cookie
# Copy headers and other attributes from 'response' to 'redirect_response'
for header_name, header_value in response.headers.items():
# FastAPI can have multiple Set-Cookie headers as a list
if header_name.lower() == "set-cookie" and isinstance(header_value, list):
for cookie_value in header_value:
redirect_response.headers.append(header_name, cookie_value)
else:
redirect_response.headers[header_name] = header_value
redirect_response.headers[header_name] = header_value
if hasattr(response, "body"):
redirect_response.body = response.body

View File

@@ -886,8 +886,11 @@ def monitor_ccpair_permissions_taskset(
record_type=RecordType.PERMISSION_SYNC_PROGRESS,
data={
"cc_pair_id": cc_pair_id,
"total_docs_synced": initial if initial is not None else 0,
"remaining_docs_to_sync": remaining,
"id": payload.id if payload else None,
"total_docs": initial if initial is not None else 0,
"remaining_docs": remaining,
"synced_docs": (initial - remaining) if initial is not None else 0,
"is_complete": remaining == 0,
},
tenant_id=tenant_id,
)
@@ -903,13 +906,6 @@ def monitor_ccpair_permissions_taskset(
f"num_synced={initial}"
)
# Add telemetry for permission syncing complete
optional_telemetry(
record_type=RecordType.PERMISSION_SYNC_COMPLETE,
data={"cc_pair_id": cc_pair_id},
tenant_id=tenant_id,
)
update_sync_record_status(
db_session=db_session,
entity_id=cc_pair_id,

View File

@@ -80,8 +80,7 @@ def check_for_vespa_sync_task(self: Task, *, tenant_id: str) -> bool | None:
"""Runs periodically to check if any document needs syncing.
Generates sets of tasks for Celery if syncing is needed."""
# Useful for debugging timing issues with reacquisitions.
# TODO: remove once more generalized logging is in place
# Useful for debugging timing issues with reacquisitions. TODO: remove once more generalized logging is in place
task_logger.info("check_for_vespa_sync_task started")
time_start = time.monotonic()

View File

@@ -56,6 +56,7 @@ from onyx.indexing.indexing_pipeline import build_indexing_pipeline
from onyx.natural_language_processing.search_nlp_models import (
InformationContentClassificationModel,
)
from onyx.redis.redis_connector import RedisConnector
from onyx.utils.logger import setup_logger
from onyx.utils.logger import TaskAttemptSingleton
from onyx.utils.telemetry import create_milestone_and_report
@@ -577,8 +578,11 @@ def _run_indexing(
data={
"index_attempt_id": index_attempt_id,
"cc_pair_id": ctx.cc_pair_id,
"current_docs_indexed": document_count,
"current_chunks_indexed": chunk_count,
"connector_id": ctx.connector_id,
"credential_id": ctx.credential_id,
"total_docs_indexed": document_count,
"total_chunks": chunk_count,
"batch_num": batch_num,
"source": ctx.source.value,
},
tenant_id=tenant_id,
@@ -599,15 +603,26 @@ def _run_indexing(
checkpoint=checkpoint,
)
# Add telemetry for completed indexing
redis_connector = RedisConnector(tenant_id, ctx.cc_pair_id)
redis_connector_index = redis_connector.new_index(
index_attempt_start.search_settings_id
)
final_progress = redis_connector_index.get_progress() or 0
optional_telemetry(
record_type=RecordType.INDEXING_COMPLETE,
data={
"index_attempt_id": index_attempt_id,
"cc_pair_id": ctx.cc_pair_id,
"connector_id": ctx.connector_id,
"credential_id": ctx.credential_id,
"total_docs_indexed": document_count,
"total_chunks": chunk_count,
"batch_count": batch_num,
"time_elapsed_seconds": time.monotonic() - start_time,
"source": ctx.source.value,
"redis_progress": final_progress,
},
tenant_id=tenant_id,
)

View File

@@ -437,7 +437,7 @@ LINEAR_CLIENT_ID = os.getenv("LINEAR_CLIENT_ID")
LINEAR_CLIENT_SECRET = os.getenv("LINEAR_CLIENT_SECRET")
# Slack specific configs
SLACK_NUM_THREADS = int(os.getenv("SLACK_NUM_THREADS") or 8)
SLACK_NUM_THREADS = int(os.getenv("SLACK_NUM_THREADS") or 2)
DASK_JOB_CLIENT_ENABLED = (
os.environ.get("DASK_JOB_CLIENT_ENABLED", "").lower() == "true"

View File

@@ -45,8 +45,6 @@ _FIREFLIES_API_QUERY = """
}
"""
ONE_MINUTE = 60
def _create_doc_from_transcript(transcript: dict) -> Document | None:
sections: List[TextSection] = []
@@ -108,8 +106,6 @@ def _create_doc_from_transcript(transcript: dict) -> Document | None:
)
# If not all transcripts are being indexed, try using a more-recently-generated
# API key.
class FirefliesConnector(PollConnector, LoadConnector):
def __init__(self, batch_size: int = INDEX_BATCH_SIZE) -> None:
self.batch_size = batch_size
@@ -195,9 +191,6 @@ class FirefliesConnector(PollConnector, LoadConnector):
def poll_source(
self, start: SecondsSinceUnixEpoch, end: SecondsSinceUnixEpoch
) -> GenerateDocumentsOutput:
# add some leeway to account for any timezone funkiness and/or bad handling
# of start time on the Fireflies side
start = max(0, start - ONE_MINUTE)
start_datetime = datetime.fromtimestamp(start, tz=timezone.utc).strftime(
"%Y-%m-%dT%H:%M:%S.000Z"
)

View File

@@ -25,8 +25,6 @@ class SalesforceObject:
)
# te
# This defines the base path for all data files relative to this file
# AKA BE CAREFUL WHEN MOVING THIS FILE
BASE_DATA_PATH = os.path.join(os.path.dirname(__file__), "data")

View File

@@ -14,8 +14,6 @@ from typing import cast
from pydantic import BaseModel
from slack_sdk import WebClient
from slack_sdk.errors import SlackApiError
from slack_sdk.http_retry import ConnectionErrorRetryHandler
from slack_sdk.http_retry import RetryHandler
from typing_extensions import override
from onyx.configs.app_configs import ENABLE_EXPENSIVE_EXPERT_CALLS
@@ -28,8 +26,6 @@ from onyx.connectors.exceptions import InsufficientPermissionsError
from onyx.connectors.exceptions import UnexpectedValidationError
from onyx.connectors.interfaces import CheckpointConnector
from onyx.connectors.interfaces import CheckpointOutput
from onyx.connectors.interfaces import CredentialsConnector
from onyx.connectors.interfaces import CredentialsProviderInterface
from onyx.connectors.interfaces import GenerateSlimDocumentOutput
from onyx.connectors.interfaces import SecondsSinceUnixEpoch
from onyx.connectors.interfaces import SlimConnector
@@ -42,16 +38,15 @@ from onyx.connectors.models import DocumentFailure
from onyx.connectors.models import EntityFailure
from onyx.connectors.models import SlimDocument
from onyx.connectors.models import TextSection
from onyx.connectors.slack.onyx_retry_handler import OnyxRedisSlackRetryHandler
from onyx.connectors.slack.utils import expert_info_from_slack_id
from onyx.connectors.slack.utils import get_message_link
from onyx.connectors.slack.utils import make_paginated_slack_api_call_w_retries
from onyx.connectors.slack.utils import make_slack_api_call_w_retries
from onyx.connectors.slack.utils import SlackTextCleaner
from onyx.indexing.indexing_heartbeat import IndexingHeartbeatInterface
from onyx.redis.redis_pool import get_redis_client
from onyx.utils.logger import setup_logger
logger = setup_logger()
_SLACK_LIMIT = 900
@@ -498,13 +493,9 @@ def _process_message(
)
class SlackConnector(
SlimConnector, CredentialsConnector, CheckpointConnector[SlackCheckpoint]
):
class SlackConnector(SlimConnector, CheckpointConnector[SlackCheckpoint]):
FAST_TIMEOUT = 1
MAX_RETRIES = 7 # arbitrarily selected
def __init__(
self,
channels: list[str] | None = None,
@@ -523,49 +514,16 @@ class SlackConnector(
# just used for efficiency
self.text_cleaner: SlackTextCleaner | None = None
self.user_cache: dict[str, BasicExpertInfo | None] = {}
self.credentials_provider: CredentialsProviderInterface | None = None
self.credential_prefix: str | None = None
self.delay_lock: str | None = None # the redis key for the shared lock
self.delay_key: str | None = None # the redis key for the shared delay
def load_credentials(self, credentials: dict[str, Any]) -> dict[str, Any] | None:
raise NotImplementedError("Use set_credentials_provider with this connector.")
def set_credentials_provider(
self, credentials_provider: CredentialsProviderInterface
) -> None:
credentials = credentials_provider.get_credentials()
tenant_id = credentials_provider.get_tenant_id()
self.redis = get_redis_client(tenant_id=tenant_id)
self.credential_prefix = (
f"connector:slack:credential_{credentials_provider.get_provider_key()}"
)
self.delay_lock = f"{self.credential_prefix}:delay_lock"
self.delay_key = f"{self.credential_prefix}:delay"
# NOTE: slack has a built in RateLimitErrorRetryHandler, but it isn't designed
# for concurrent workers. We've extended it with OnyxRedisSlackRetryHandler.
connection_error_retry_handler = ConnectionErrorRetryHandler()
onyx_rate_limit_error_retry_handler = OnyxRedisSlackRetryHandler(
max_retry_count=self.MAX_RETRIES,
delay_lock=self.delay_lock,
delay_key=self.delay_key,
r=self.redis,
)
custom_retry_handlers: list[RetryHandler] = [
connection_error_retry_handler,
onyx_rate_limit_error_retry_handler,
]
bot_token = credentials["slack_bot_token"]
self.client = WebClient(token=bot_token, retry_handlers=custom_retry_handlers)
self.client = WebClient(token=bot_token)
# use for requests that must return quickly (e.g. realtime flows where user is waiting)
self.fast_client = WebClient(
token=bot_token, timeout=SlackConnector.FAST_TIMEOUT
)
self.text_cleaner = SlackTextCleaner(client=self.client)
self.credentials_provider = credentials_provider
return None
def retrieve_all_slim_documents(
self,

View File

@@ -1,159 +0,0 @@
import math
import random
import time
from typing import cast
from typing import Optional
from redis import Redis
from redis.lock import Lock as RedisLock
from slack_sdk.http_retry.handler import RetryHandler
from slack_sdk.http_retry.request import HttpRequest
from slack_sdk.http_retry.response import HttpResponse
from slack_sdk.http_retry.state import RetryState
from onyx.utils.logger import setup_logger
logger = setup_logger()
class OnyxRedisSlackRetryHandler(RetryHandler):
"""
This class uses Redis to share a rate limit among multiple threads.
Threads that encounter a rate limit will observe the shared delay, increment the
shared delay with the retry value, and use the new shared value as a wait interval.
This has the effect of serializing calls when a rate limit is hit, which is what
needs to happens if the server punishes us with additional limiting when we make
a call too early. We believe this is what Slack is doing based on empirical
observation, meaning we see indefinite hangs if we're too aggressive.
Another way to do this is just to do exponential backoff. Might be easier?
Adapted from slack's RateLimitErrorRetryHandler.
"""
LOCK_TTL = 60 # used to serialize access to the retry TTL
LOCK_BLOCKING_TIMEOUT = 60 # how long to wait for the lock
"""RetryHandler that does retries for rate limited errors."""
def __init__(
self,
max_retry_count: int,
delay_lock: str,
delay_key: str,
r: Redis,
):
"""
delay_lock: the redis key to use with RedisLock (to synchronize access to delay_key)
delay_key: the redis key containing a shared TTL
"""
super().__init__(max_retry_count=max_retry_count)
self._redis: Redis = r
self._delay_lock = delay_lock
self._delay_key = delay_key
def _can_retry(
self,
*,
state: RetryState,
request: HttpRequest,
response: Optional[HttpResponse] = None,
error: Optional[Exception] = None,
) -> bool:
return response is not None and response.status_code == 429
def prepare_for_next_attempt(
self,
*,
state: RetryState,
request: HttpRequest,
response: Optional[HttpResponse] = None,
error: Optional[Exception] = None,
) -> None:
"""It seems this function is responsible for the wait to retry ... aka we
actually sleep in this function."""
retry_after_value: list[str] | None = None
retry_after_header_name: Optional[str] = None
duration_s: float = 1.0 # seconds
if response is None:
# NOTE(rkuo): this logic comes from RateLimitErrorRetryHandler.
# This reads oddly, as if the caller itself could raise the exception.
# We don't have the luxury of changing this.
if error:
raise error
return
state.next_attempt_requested = True # this signals the caller to retry
# calculate wait duration based on retry-after + some jitter
for k in response.headers.keys():
if k.lower() == "retry-after":
retry_after_header_name = k
break
try:
if retry_after_header_name is None:
# This situation usually does not arise. Just in case.
raise ValueError(
"OnyxRedisSlackRetryHandler.prepare_for_next_attempt: retry-after header name is None"
)
retry_after_value = response.headers.get(retry_after_header_name)
if not retry_after_value:
raise ValueError(
"OnyxRedisSlackRetryHandler.prepare_for_next_attempt: retry-after header value is None"
)
retry_after_value_int = int(
retry_after_value[0]
) # will raise ValueError if somehow we can't convert to int
jitter = retry_after_value_int * 0.25 * random.random()
duration_s = math.ceil(retry_after_value_int + jitter)
except ValueError:
duration_s += random.random()
# lock and extend the ttl
lock: RedisLock = self._redis.lock(
self._delay_lock,
timeout=OnyxRedisSlackRetryHandler.LOCK_TTL,
thread_local=False,
)
acquired = lock.acquire(
blocking_timeout=OnyxRedisSlackRetryHandler.LOCK_BLOCKING_TIMEOUT / 2
)
ttl_ms: int | None = None
try:
if acquired:
# if we can get the lock, then read and extend the ttl
ttl_ms = cast(int, self._redis.pttl(self._delay_key))
if ttl_ms < 0: # negative values are error status codes ... see docs
ttl_ms = 0
ttl_ms_new = ttl_ms + int(duration_s * 1000.0)
self._redis.set(self._delay_key, "1", px=ttl_ms_new)
else:
# if we can't get the lock, just go ahead.
# TODO: if we know our actual parallelism, multiplying by that
# would be a pretty good idea
ttl_ms_new = int(duration_s * 1000.0)
finally:
if acquired:
lock.release()
logger.warning(
f"OnyxRedisSlackRetryHandler.prepare_for_next_attempt wait: "
f"retry-after={retry_after_value} "
f"shared_delay_ms={ttl_ms} new_shared_delay_ms={ttl_ms_new}"
)
# TODO: would be good to take an event var and sleep in short increments to
# allow for a clean exit / exception
time.sleep(ttl_ms_new / 1000.0)
state.increment_current_attempt()

View File

@@ -1,4 +1,5 @@
import re
import time
from collections.abc import Callable
from collections.abc import Generator
from functools import lru_cache
@@ -63,72 +64,71 @@ def _make_slack_api_call_paginated(
return paginated_call
# NOTE(rkuo): we may not need this any more if the integrated retry handlers work as
# expected. Do we want to keep this around?
def make_slack_api_rate_limited(
call: Callable[..., SlackResponse], max_retries: int = 7
) -> Callable[..., SlackResponse]:
"""Wraps calls to slack API so that they automatically handle rate limiting"""
# def make_slack_api_rate_limited(
# call: Callable[..., SlackResponse], max_retries: int = 7
# ) -> Callable[..., SlackResponse]:
# """Wraps calls to slack API so that they automatically handle rate limiting"""
@wraps(call)
def rate_limited_call(**kwargs: Any) -> SlackResponse:
last_exception = None
# @wraps(call)
# def rate_limited_call(**kwargs: Any) -> SlackResponse:
# last_exception = None
for _ in range(max_retries):
try:
# Make the API call
response = call(**kwargs)
# for _ in range(max_retries):
# try:
# # Make the API call
# response = call(**kwargs)
# Check for errors in the response, will raise `SlackApiError`
# if anything went wrong
response.validate()
return response
# # Check for errors in the response, will raise `SlackApiError`
# # if anything went wrong
# response.validate()
# return response
except SlackApiError as e:
last_exception = e
try:
error = e.response["error"]
except KeyError:
error = "unknown error"
# except SlackApiError as e:
# last_exception = e
# try:
# error = e.response["error"]
# except KeyError:
# error = "unknown error"
if error == "ratelimited":
# Handle rate limiting: get the 'Retry-After' header value and sleep for that duration
retry_after = int(e.response.headers.get("Retry-After", 1))
logger.info(
f"Slack call rate limited, retrying after {retry_after} seconds. Exception: {e}"
)
time.sleep(retry_after)
elif error in ["already_reacted", "no_reaction", "internal_error"]:
# Log internal_error and return the response instead of failing
logger.warning(
f"Slack call encountered '{error}', skipping and continuing..."
)
return e.response
else:
# Raise the error for non-transient errors
raise
# if error == "ratelimited":
# # Handle rate limiting: get the 'Retry-After' header value and sleep for that duration
# retry_after = int(e.response.headers.get("Retry-After", 1))
# logger.info(
# f"Slack call rate limited, retrying after {retry_after} seconds. Exception: {e}"
# )
# time.sleep(retry_after)
# elif error in ["already_reacted", "no_reaction", "internal_error"]:
# # Log internal_error and return the response instead of failing
# logger.warning(
# f"Slack call encountered '{error}', skipping and continuing..."
# )
# return e.response
# else:
# # Raise the error for non-transient errors
# raise
# If the code reaches this point, all retries have been exhausted
msg = f"Max retries ({max_retries}) exceeded"
if last_exception:
raise Exception(msg) from last_exception
else:
raise Exception(msg)
# # If the code reaches this point, all retries have been exhausted
# msg = f"Max retries ({max_retries}) exceeded"
# if last_exception:
# raise Exception(msg) from last_exception
# else:
# raise Exception(msg)
# return rate_limited_call
return rate_limited_call
def make_slack_api_call_w_retries(
call: Callable[..., SlackResponse], **kwargs: Any
) -> SlackResponse:
return basic_retry_wrapper(call)(**kwargs)
return basic_retry_wrapper(make_slack_api_rate_limited(call))(**kwargs)
def make_paginated_slack_api_call_w_retries(
call: Callable[..., SlackResponse], **kwargs: Any
) -> Generator[dict[str, Any], None, None]:
return _make_slack_api_call_paginated(basic_retry_wrapper(call))(**kwargs)
return _make_slack_api_call_paginated(
basic_retry_wrapper(make_slack_api_rate_limited(call))
)(**kwargs)
def expert_info_from_slack_id(
@@ -142,7 +142,7 @@ def expert_info_from_slack_id(
if user_id in user_cache:
return user_cache[user_id]
response = client.users_info(user=user_id)
response = make_slack_api_rate_limited(client.users_info)(user=user_id)
if not response["ok"]:
user_cache[user_id] = None
@@ -175,7 +175,9 @@ class SlackTextCleaner:
def _get_slack_name(self, user_id: str) -> str:
if user_id not in self._id_to_name_map:
try:
response = self._client.users_info(user=user_id)
response = make_slack_api_rate_limited(self._client.users_info)(
user=user_id
)
# prefer display name if set, since that is what is shown in Slack
self._id_to_name_map[user_id] = (
response["user"]["profile"]["display_name"]

View File

@@ -217,6 +217,7 @@ def mark_attempt_in_progress(
"index_attempt_id": index_attempt.id,
"status": IndexingStatus.IN_PROGRESS.value,
"cc_pair_id": index_attempt.connector_credential_pair_id,
"search_settings_id": index_attempt.search_settings_id,
},
)
except Exception:
@@ -245,6 +246,9 @@ def mark_attempt_succeeded(
"index_attempt_id": index_attempt_id,
"status": IndexingStatus.SUCCESS.value,
"cc_pair_id": attempt.connector_credential_pair_id,
"search_settings_id": attempt.search_settings_id,
"total_docs_indexed": attempt.total_docs_indexed,
"new_docs_indexed": attempt.new_docs_indexed,
},
)
except Exception:
@@ -273,6 +277,9 @@ def mark_attempt_partially_succeeded(
"index_attempt_id": index_attempt_id,
"status": IndexingStatus.COMPLETED_WITH_ERRORS.value,
"cc_pair_id": attempt.connector_credential_pair_id,
"search_settings_id": attempt.search_settings_id,
"total_docs_indexed": attempt.total_docs_indexed,
"new_docs_indexed": attempt.new_docs_indexed,
},
)
except Exception:
@@ -305,6 +312,10 @@ def mark_attempt_canceled(
"index_attempt_id": index_attempt_id,
"status": IndexingStatus.CANCELED.value,
"cc_pair_id": attempt.connector_credential_pair_id,
"search_settings_id": attempt.search_settings_id,
"reason": reason,
"total_docs_indexed": attempt.total_docs_indexed,
"new_docs_indexed": attempt.new_docs_indexed,
},
)
except Exception:
@@ -339,6 +350,10 @@ def mark_attempt_failed(
"index_attempt_id": index_attempt_id,
"status": IndexingStatus.FAILED.value,
"cc_pair_id": attempt.connector_credential_pair_id,
"search_settings_id": attempt.search_settings_id,
"reason": failure_reason,
"total_docs_indexed": attempt.total_docs_indexed,
"new_docs_indexed": attempt.new_docs_indexed,
},
)
except Exception:

View File

@@ -5,7 +5,6 @@ from datetime import timezone
from onyx.configs.constants import INDEX_SEPARATOR
from onyx.context.search.models import IndexFilters
from onyx.document_index.interfaces import VespaChunkRequest
from onyx.document_index.vespa_constants import ACCESS_CONTROL_LIST
from onyx.document_index.vespa_constants import CHUNK_ID
from onyx.document_index.vespa_constants import DOC_UPDATED_AT
from onyx.document_index.vespa_constants import DOCUMENT_ID
@@ -75,10 +74,8 @@ def build_vespa_filters(
filter_str += f'({TENANT_ID} contains "{filters.tenant_id}") and '
# ACL filters
if filters.access_control_list is not None:
filter_str += _build_or_filters(
ACCESS_CONTROL_LIST, filters.access_control_list
)
# if filters.access_control_list is not None:
# filter_str += _build_or_filters(ACCESS_CONTROL_LIST, filters.access_control_list)
# Source type filters
source_strs = (

View File

@@ -42,7 +42,6 @@ from onyx.context.search.retrieval.search_runner import (
from onyx.db.engine import get_all_tenant_ids
from onyx.db.engine import get_session_with_current_tenant
from onyx.db.engine import get_session_with_tenant
from onyx.db.engine import SqlEngine
from onyx.db.models import SlackBot
from onyx.db.search_settings import get_current_search_settings
from onyx.db.slack_bot import fetch_slack_bot
@@ -973,9 +972,6 @@ def _get_socket_client(
if __name__ == "__main__":
# Initialize the SqlEngine
SqlEngine.init_engine(pool_size=20, max_overflow=5)
# Initialize the tenant handler which will manage tenant connections
logger.info("Starting SlackbotHandler")
tenant_handler = SlackbotHandler()

View File

@@ -195,7 +195,7 @@ class RedisConnectorPermissionSync:
),
queue=OnyxCeleryQueues.DOC_PERMISSIONS_UPSERT,
task_id=custom_task_id,
priority=OnyxCeleryPriority.MEDIUM,
priority=OnyxCeleryPriority.HIGH,
ignore_result=True,
)
async_results.append(result)

View File

@@ -125,7 +125,6 @@ class TenantRedis(redis.Redis):
"hset",
"hdel",
"ttl",
"pttl",
] # Regular methods that need simple prefixing
if item == "scan_iter" or item == "sscan_iter":

View File

@@ -39,7 +39,6 @@ class RecordType(str, Enum):
INDEXING_PROGRESS = "indexing_progress"
INDEXING_COMPLETE = "indexing_complete"
PERMISSION_SYNC_PROGRESS = "permission_sync_progress"
PERMISSION_SYNC_COMPLETE = "permission_sync_complete"
INDEX_ATTEMPT_STATUS = "index_attempt_status"

View File

@@ -887,7 +887,6 @@ def main() -> None:
type=int,
help="Maximum number of documents to delete (for delete-all-documents)",
)
parser.add_argument("--link", help="Document link (for get_acls filter)")
args = parser.parse_args()
vespa_debug = VespaDebugging(args.tenant_id)
@@ -925,11 +924,7 @@ def main() -> None:
elif args.action == "get_acls":
if args.cc_pair_id is None:
parser.error("--cc-pair-id is required for get_acls action")
if args.link is None:
vespa_debug.acls(args.cc_pair_id, args.n)
else:
vespa_debug.acls_by_link(args.cc_pair_id, args.link)
vespa_debug.acls(args.cc_pair_id, args.n)
if __name__ == "__main__":

View File

@@ -2,8 +2,6 @@ import json
import os
import time
from pathlib import Path
from unittest.mock import MagicMock
from unittest.mock import patch
import pytest
@@ -42,13 +40,10 @@ def highspot_connector() -> HighspotConnector:
return connector
@patch(
"onyx.file_processing.extract_file_text.get_unstructured_api_key",
return_value=None,
@pytest.mark.xfail(
reason="Accessing postgres that isn't available in connector only tests",
)
def test_highspot_connector_basic(
mock_get_api_key: MagicMock, highspot_connector: HighspotConnector
) -> None:
def test_highspot_connector_basic(highspot_connector: HighspotConnector) -> None:
"""Test basic functionality of the Highspot connector."""
all_docs: list[Document] = []
test_data = load_test_data()
@@ -81,13 +76,10 @@ def test_highspot_connector_basic(
assert len(section.text) > 0
@patch(
"onyx.file_processing.extract_file_text.get_unstructured_api_key",
return_value=None,
@pytest.mark.xfail(
reason="Possibly accessing postgres that isn't available in connector only tests",
)
def test_highspot_connector_slim(
mock_get_api_key: MagicMock, highspot_connector: HighspotConnector
) -> None:
def test_highspot_connector_slim(highspot_connector: HighspotConnector) -> None:
"""Test slim document retrieval."""
# Get all doc IDs from the full connector
all_full_doc_ids = set()

View File

@@ -313,29 +313,3 @@ class UserManager:
)
response.raise_for_status()
return UserInfo(**response.json())
@staticmethod
def invite_users(
user_performing_action: DATestUser,
emails: list[str],
) -> int:
response = requests.put(
url=f"{API_SERVER_URL}/manage/admin/users",
json={"emails": emails},
headers=user_performing_action.headers,
)
response.raise_for_status()
return response.json()
@staticmethod
def remove_invited_user(
user_performing_action: DATestUser,
user_email: str,
) -> int:
response = requests.patch(
url=f"{API_SERVER_URL}/manage/admin/remove-invited-user",
json={"user_email": user_email},
headers=user_performing_action.headers,
)
response.raise_for_status()
return response.json()

View File

@@ -22,7 +22,6 @@ from onyx.document_index.document_index_utils import get_multipass_config
from onyx.document_index.vespa.index import DOCUMENT_ID_ENDPOINT
from onyx.document_index.vespa.index import VespaIndex
from onyx.indexing.models import IndexingSetting
from onyx.redis.redis_pool import get_redis_client
from onyx.setup import setup_postgres
from onyx.setup import setup_vespa
from onyx.utils.logger import setup_logger
@@ -238,12 +237,6 @@ def reset_vespa() -> None:
time.sleep(5)
def reset_redis() -> None:
"""Reset the Redis database."""
redis_client = get_redis_client()
redis_client.flushall()
def reset_postgres_multitenant() -> None:
"""Reset the Postgres database for all tenants in a multitenant setup."""
@@ -348,8 +341,6 @@ def reset_all() -> None:
reset_postgres()
logger.info("Resetting Vespa...")
reset_vespa()
logger.info("Resetting Redis...")
reset_redis()
def reset_all_multitenant() -> None:

View File

@@ -1,38 +0,0 @@
import pytest
from requests import HTTPError
from onyx.auth.schemas import UserRole
from tests.integration.common_utils.managers.user import UserManager
from tests.integration.common_utils.test_models import DATestUser
def test_inviting_users_flow(reset: None) -> None:
"""
Test that verifies the functionality around inviting users:
1. Creating an admin user
2. Admin inviting a new user
3. Invited user successfully signing in
4. Non-invited user attempting to sign in (should result in an error)
"""
# 1) Create an admin user (the first user created is automatically admin)
admin_user: DATestUser = UserManager.create(name="admin_user")
assert admin_user is not None
assert UserManager.is_role(admin_user, UserRole.ADMIN)
# 2) Admin invites a new user
invited_email = "invited_user@test.com"
invite_response = UserManager.invite_users(admin_user, [invited_email])
assert invite_response == 1
# 3) The invited user successfully registers/logs in
invited_user: DATestUser = UserManager.create(
name="invited_user", email=invited_email
)
assert invited_user is not None
assert invited_user.email == invited_email
assert UserManager.is_role(invited_user, UserRole.BASIC)
# 4) A non-invited user attempts to sign in/register (should fail)
with pytest.raises(HTTPError):
UserManager.create(name="uninvited_user", email="uninvited_user@test.com")

View File

@@ -42,6 +42,7 @@ ENV NEXT_PUBLIC_DISABLE_STREAMING=${NEXT_PUBLIC_DISABLE_STREAMING}
ARG NEXT_PUBLIC_NEW_CHAT_DIRECTS_TO_SAME_PERSONA
ENV NEXT_PUBLIC_NEW_CHAT_DIRECTS_TO_SAME_PERSONA=${NEXT_PUBLIC_NEW_CHAT_DIRECTS_TO_SAME_PERSONA}
# allow user to specify custom feedback options
ARG NEXT_PUBLIC_POSITIVE_PREDEFINED_FEEDBACK_OPTIONS
ENV NEXT_PUBLIC_POSITIVE_PREDEFINED_FEEDBACK_OPTIONS=${NEXT_PUBLIC_POSITIVE_PREDEFINED_FEEDBACK_OPTIONS}

12200
web/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -93,12 +93,11 @@
},
"devDependencies": {
"@chromatic-com/playwright": "^0.10.2",
"@playwright/test": "^1.39.0",
"@tailwindcss/typography": "^0.5.10",
"@types/chrome": "^0.0.287",
"@types/jest": "^29.5.14",
"chromatic": "^11.25.2",
"eslint": "^8.57.1",
"eslint": "^8.48.0",
"eslint-config-next": "^14.1.0",
"jest": "^29.7.0",
"prettier": "2.8.8",

View File

@@ -17,7 +17,7 @@ export default function PostHogPageView(): null {
// Track pageviews
if (pathname) {
let url = window.origin + pathname;
if (searchParams?.toString()) {
if (searchParams.toString()) {
url = url + `?${searchParams.toString()}`;
}
posthog.capture("$pageview", {

View File

@@ -149,7 +149,7 @@ export function AssistantEditor({
const { refreshAssistants, isImageGenerationAvailable } = useAssistants();
const router = useRouter();
const searchParams = useSearchParams();
const isAdminPage = searchParams?.get("admin") === "true";
const isAdminPage = searchParams.get("admin") === "true";
const { popup, setPopup } = usePopup();
const { labels, refreshLabels, createLabel, updateLabel, deleteLabel } =
@@ -469,14 +469,8 @@ export function AssistantEditor({
description: Yup.string().required(
"Must provide a description for the Assistant"
),
system_prompt: Yup.string().max(
8000,
"Instructions must be less than 8000 characters"
),
task_prompt: Yup.string().max(
8000,
"Reminders must be less than 8000 characters"
),
system_prompt: Yup.string(),
task_prompt: Yup.string(),
is_public: Yup.boolean().required(),
document_set_ids: Yup.array().of(Yup.number()),
num_chunks: Yup.number().nullable(),

View File

@@ -302,17 +302,11 @@ export default function AddConnector({
...connector_specific_config
} = values;
// Apply special transforms according to application logic
// Apply transforms from connectors.ts configuration
const transformedConnectorSpecificConfig = Object.entries(
connector_specific_config
).reduce(
(acc, [key, value]) => {
// Filter out empty strings from arrays
if (Array.isArray(value)) {
value = (value as any[]).filter(
(item) => typeof item !== "string" || item.trim() !== ""
);
}
const matchingConfigValue = configuration.values.find(
(configValue) => configValue.name === key
);

View File

@@ -26,8 +26,8 @@ export default function OAuthCallbackPage() {
);
// Extract query parameters
const code = searchParams?.get("code");
const state = searchParams?.get("state");
const code = searchParams.get("code");
const state = searchParams.get("state");
const pathname = usePathname();
const connector = pathname?.split("/")[3];

View File

@@ -4,6 +4,7 @@ import { useEffect, useState } from "react";
import { usePathname, useRouter, useSearchParams } from "next/navigation";
import { AdminPageTitle } from "@/components/admin/Title";
import { Button } from "@/components/ui/button";
import Title from "@/components/ui/title";
import { KeyIcon } from "@/components/icons/icons";
import { getSourceMetadata, isValidSource } from "@/lib/sources";
import { ConfluenceAccessibleResource, ValidSources } from "@/lib/types";
@@ -73,7 +74,7 @@ export default function OAuthFinalizePage() {
>([]);
// Extract query parameters
const credentialParam = searchParams?.get("credential");
const credentialParam = searchParams.get("credential");
const credential = credentialParam ? parseInt(credentialParam, 10) : NaN;
const pathname = usePathname();
const connector = pathname?.split("/")[3];
@@ -84,7 +85,7 @@ export default function OAuthFinalizePage() {
// connector (url segment)= "google-drive"
// sourceType (for looking up metadata) = "google_drive"
if (isNaN(credential) || !connector) {
if (isNaN(credential)) {
setStatusMessage("Improperly formed OAuth finalization request.");
setStatusDetails("Invalid or missing credential id.");
setIsError(true);

View File

@@ -76,7 +76,7 @@ const RerankingDetailsForm = forwardRef<
function (value) {
const { rerank_provider_type } = this.parent;
return (
rerank_provider_type === RerankerProvider.LITELLM ||
rerank_provider_type !== RerankerProvider.COHERE ||
(value !== null && value !== "")
);
}
@@ -457,7 +457,7 @@ const RerankingDetailsForm = forwardRef<
...values,
rerank_api_key: value,
});
setFieldValue("rerank_api_key", value);
setFieldValue("api_key", value);
}}
type="password"
label={

View File

@@ -23,8 +23,8 @@ const ResetPasswordPage: React.FC = () => {
const { popup, setPopup } = usePopup();
const [isWorking, setIsWorking] = useState(false);
const searchParams = useSearchParams();
const token = searchParams?.get("token");
const tenantId = searchParams?.get(TENANT_ID_COOKIE_NAME);
const token = searchParams.get("token");
const tenantId = searchParams.get(TENANT_ID_COOKIE_NAME);
// Keep search param same name as cookie for simplicity
useEffect(() => {

View File

@@ -15,9 +15,9 @@ export function Verify({ user }: { user: User | null }) {
const [error, setError] = useState("");
const verify = useCallback(async () => {
const token = searchParams?.get("token");
const token = searchParams.get("token");
const firstUser =
searchParams?.get("first_user") && NEXT_PUBLIC_CLOUD_ENABLED;
searchParams.get("first_user") && NEXT_PUBLIC_CLOUD_ENABLED;
if (!token) {
setError(
"Missing verification token. Try requesting a new verification email."

View File

@@ -196,9 +196,7 @@ export function ChatPage({
setCurrentMessageFiles,
} = useDocumentsContext();
const defaultAssistantIdRaw = searchParams?.get(
SEARCH_PARAM_NAMES.PERSONA_ID
);
const defaultAssistantIdRaw = searchParams.get(SEARCH_PARAM_NAMES.PERSONA_ID);
const defaultAssistantId = defaultAssistantIdRaw
? parseInt(defaultAssistantIdRaw)
: undefined;
@@ -254,8 +252,8 @@ export function ChatPage({
);
const { user, isAdmin } = useUser();
const slackChatId = searchParams?.get("slackChatId");
const existingChatIdRaw = searchParams?.get("chatId");
const slackChatId = searchParams.get("slackChatId");
const existingChatIdRaw = searchParams.get("chatId");
const [showHistorySidebar, setShowHistorySidebar] = useState(false);
@@ -277,7 +275,7 @@ export function ChatPage({
const processSearchParamsAndSubmitMessage = (searchParamsString: string) => {
const newSearchParams = new URLSearchParams(searchParamsString);
const message = newSearchParams?.get("user-prompt");
const message = newSearchParams.get("user-prompt");
filterManager.buildFiltersFromQueryString(
newSearchParams.toString(),
@@ -286,7 +284,7 @@ export function ChatPage({
tags
);
const fileDescriptorString = newSearchParams?.get(SEARCH_PARAM_NAMES.FILES);
const fileDescriptorString = newSearchParams.get(SEARCH_PARAM_NAMES.FILES);
const overrideFileDescriptors: FileDescriptor[] = fileDescriptorString
? JSON.parse(decodeURIComponent(fileDescriptorString))
: [];
@@ -326,7 +324,7 @@ export function ChatPage({
: undefined
);
// Gather default temperature settings
const search_param_temperature = searchParams?.get(
const search_param_temperature = searchParams.get(
SEARCH_PARAM_NAMES.TEMPERATURE
);
@@ -553,7 +551,7 @@ export function ChatPage({
if (
newMessageHistory.length === 1 &&
!submitOnLoadPerformed.current &&
searchParams?.get(SEARCH_PARAM_NAMES.SEEDED) === "true"
searchParams.get(SEARCH_PARAM_NAMES.SEEDED) === "true"
) {
submitOnLoadPerformed.current = true;
const seededMessage = newMessageHistory[0].message;
@@ -574,11 +572,11 @@ export function ChatPage({
initialSessionFetch();
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [existingChatSessionId, searchParams?.get(SEARCH_PARAM_NAMES.PERSONA_ID)]);
}, [existingChatSessionId, searchParams.get(SEARCH_PARAM_NAMES.PERSONA_ID)]);
useEffect(() => {
const userFolderId = searchParams?.get(SEARCH_PARAM_NAMES.USER_FOLDER_ID);
const allMyDocuments = searchParams?.get(
const userFolderId = searchParams.get(SEARCH_PARAM_NAMES.USER_FOLDER_ID);
const allMyDocuments = searchParams.get(
SEARCH_PARAM_NAMES.ALL_MY_DOCUMENTS
);
@@ -601,14 +599,14 @@ export function ChatPage({
}
}, [
userFolders,
searchParams?.get(SEARCH_PARAM_NAMES.USER_FOLDER_ID),
searchParams?.get(SEARCH_PARAM_NAMES.ALL_MY_DOCUMENTS),
searchParams.get(SEARCH_PARAM_NAMES.USER_FOLDER_ID),
searchParams.get(SEARCH_PARAM_NAMES.ALL_MY_DOCUMENTS),
addSelectedFolder,
clearSelectedItems,
]);
const [message, setMessage] = useState(
searchParams?.get(SEARCH_PARAM_NAMES.USER_PROMPT) || ""
searchParams.get(SEARCH_PARAM_NAMES.USER_PROMPT) || ""
);
const [completeMessageDetail, setCompleteMessageDetail] = useState<
@@ -1050,7 +1048,7 @@ export function ChatPage({
// Equivalent to `loadNewPageLogic`
useEffect(() => {
if (searchParams?.get(SEARCH_PARAM_NAMES.SEND_ON_LOAD)) {
if (searchParams.get(SEARCH_PARAM_NAMES.SEND_ON_LOAD)) {
processSearchParamsAndSubmitMessage(searchParams.toString());
}
}, [searchParams, router]);
@@ -1233,7 +1231,7 @@ export function ChatPage({
const isNewSession = chatSessionIdRef.current === null;
const searchParamBasedChatSessionName =
searchParams?.get(SEARCH_PARAM_NAMES.TITLE) || null;
searchParams.get(SEARCH_PARAM_NAMES.TITLE) || null;
if (isNewSession) {
currChatSessionId = await createChatSession(
@@ -1411,11 +1409,11 @@ export function ChatPage({
modelVersion:
modelOverride?.modelName ||
llmManager.currentLlm.modelName ||
searchParams?.get(SEARCH_PARAM_NAMES.MODEL_VERSION) ||
searchParams.get(SEARCH_PARAM_NAMES.MODEL_VERSION) ||
undefined,
temperature: llmManager.temperature || undefined,
systemPromptOverride:
searchParams?.get(SEARCH_PARAM_NAMES.SYSTEM_PROMPT) || undefined,
searchParams.get(SEARCH_PARAM_NAMES.SYSTEM_PROMPT) || undefined,
useExistingUserMessage: isSeededChat,
useLanggraph:
settings?.settings.pro_search_enabled &&

View File

@@ -4,62 +4,177 @@ import {
LlmDescriptor,
useLlmManager,
} from "@/lib/hooks";
import { StringOrNumberOption } from "@/components/Dropdown";
import { Persona } from "@/app/admin/assistants/interfaces";
import { destructureValue } from "@/lib/llm/utils";
import { destructureValue, getFinalLLM, structureValue } from "@/lib/llm/utils";
import { useState } from "react";
import { Hoverable } from "@/components/Hoverable";
import { Popover } from "@/components/popover/Popover";
import { IconType } from "react-icons";
import { FiRefreshCw } from "react-icons/fi";
import LLMPopover from "./input/LLMPopover";
import { FiRefreshCw, FiCheck } from "react-icons/fi";
export default function RegenerateOption({
selectedAssistant,
regenerate,
overriddenModel,
export function RegenerateDropdown({
options,
selected,
onSelect,
side,
maxHeight,
alternate,
onDropdownVisibleChange,
}: {
selectedAssistant: Persona;
regenerate: (modelOverRide: LlmDescriptor) => Promise<void>;
overriddenModel?: string;
alternate?: string;
options: StringOrNumberOption[];
selected: string | null;
onSelect: (value: string | number | null) => void;
includeDefault?: boolean;
side?: "top" | "right" | "bottom" | "left";
maxHeight?: string;
onDropdownVisibleChange: (isVisible: boolean) => void;
}) {
const { llmProviders } = useChatContext();
const llmManager = useLlmManager(llmProviders);
const [isOpen, setIsOpen] = useState(false);
const toggleDropdownVisible = (isVisible: boolean) => {
setIsOpen(isVisible);
onDropdownVisibleChange(isVisible);
};
const Dropdown = (
<div className="overflow-y-auto border border-neutral-800 py-2 min-w-fit bg-neutral-50 dark:bg-neutral-900 rounded-md shadow-lg">
<div className="mb-1 flex items-center justify-between px-4 pt-2">
<span className="text-sm text-neutral-600 dark:text-neutral-400">
Regenerate with
</span>
</div>
{options.map((option) => (
<div
key={option.value}
role="menuitem"
className={`flex items-center m-1.5 p-1.5 text-sm cursor-pointer focus-visible:outline-0 group relative hover:bg-neutral-200 dark:hover:bg-neutral-800 rounded-md my-0 px-3 mx-2 gap-2.5 py-3 !pr-3 ${
option.value === selected
? "bg-neutral-200 dark:bg-neutral-800"
: ""
}`}
onClick={() => onSelect(option.value)}
>
<div className="flex grow items-center justify-between gap-2">
<div>
<div className="flex items-center gap-3">
<div>{getDisplayNameForModel(option.name)}</div>
</div>
</div>
</div>
{option.value === selected && (
<FiCheck className="text-neutral-700 dark:text-neutral-300" />
)}
</div>
))}
</div>
);
return (
<LLMPopover
llmManager={llmManager}
llmProviders={llmProviders}
requiresImageGeneration={false}
currentAssistant={selectedAssistant}
currentModelName={overriddenModel}
trigger={
<Popover
open={isOpen}
onOpenChange={toggleDropdownVisible}
content={
<div onClick={() => toggleDropdownVisible(!isOpen)}>
{!overriddenModel ? (
{!alternate ? (
<Hoverable size={16} icon={FiRefreshCw as IconType} />
) : (
<Hoverable
size={16}
icon={FiRefreshCw as IconType}
hoverText={getDisplayNameForModel(overriddenModel)}
hoverText={getDisplayNameForModel(alternate)}
/>
)}
</div>
}
onSelect={(value) => {
const { name, provider, modelName } = destructureValue(value as string);
regenerate({
name: name,
provider: provider,
modelName: modelName,
});
}}
popover={Dropdown}
align="start"
side={side}
sideOffset={5}
triggerMaxWidth
/>
);
}
export default function RegenerateOption({
selectedAssistant,
regenerate,
overriddenModel,
onHoverChange,
onDropdownVisibleChange,
}: {
selectedAssistant: Persona;
regenerate: (modelOverRide: LlmDescriptor) => Promise<void>;
overriddenModel?: string;
onHoverChange: (isHovered: boolean) => void;
onDropdownVisibleChange: (isVisible: boolean) => void;
}) {
const { llmProviders } = useChatContext();
const llmManager = useLlmManager(llmProviders);
const [_, llmName] = getFinalLLM(llmProviders, selectedAssistant, null);
const llmOptionsByProvider: {
[provider: string]: { name: string; value: string }[];
} = {};
const uniqueModelNames = new Set<string>();
llmProviders.forEach((llmProvider) => {
if (!llmOptionsByProvider[llmProvider.provider]) {
llmOptionsByProvider[llmProvider.provider] = [];
}
(llmProvider.display_model_names || llmProvider.model_names).forEach(
(modelName) => {
if (!uniqueModelNames.has(modelName)) {
uniqueModelNames.add(modelName);
llmOptionsByProvider[llmProvider.provider].push({
name: modelName,
value: structureValue(
llmProvider.name,
llmProvider.provider,
modelName
),
});
}
}
);
});
const llmOptions = Object.entries(llmOptionsByProvider).flatMap(
([provider, options]) => [...options]
);
const currentModelName =
llmManager?.currentLlm.modelName ||
(selectedAssistant
? selectedAssistant.llm_model_version_override || llmName
: llmName);
return (
<div
className="group flex items-center relative"
onMouseEnter={() => onHoverChange(true)}
onMouseLeave={() => onHoverChange(false)}
>
<RegenerateDropdown
onDropdownVisibleChange={onDropdownVisibleChange}
alternate={overriddenModel}
options={llmOptions}
selected={currentModelName}
onSelect={(value) => {
const { name, provider, modelName } = destructureValue(
value as string
);
regenerate({
name: name,
provider: provider,
modelName: modelName,
});
}}
/>
</div>
);
}

View File

@@ -6,7 +6,7 @@ import { Persona } from "@/app/admin/assistants/interfaces";
import LLMPopover from "./LLMPopover";
import { InputPrompt } from "@/app/chat/interfaces";
import { FilterManager, getDisplayNameForModel, LlmManager } from "@/lib/hooks";
import { FilterManager, LlmManager } from "@/lib/hooks";
import { useChatContext } from "@/components/context/ChatContext";
import { ChatFileType, FileDescriptor } from "../interfaces";
import {
@@ -38,7 +38,6 @@ import { useUser } from "@/components/user/UserProvider";
import { useDocumentSelection } from "../useDocumentSelection";
import { AgenticToggle } from "./AgenticToggle";
import { SettingsContext } from "@/components/settings/SettingsProvider";
import { getProviderIcon } from "@/app/admin/configuration/llm/interfaces";
import { LoadingIndicator } from "react-select/dist/declarations/src/components/indicators";
import { FidgetSpinner } from "react-loader-spinner";
import { LoadingAnimation } from "@/components/Loading";
@@ -800,27 +799,6 @@ export function ChatInputBar({
llmManager={llmManager}
requiresImageGeneration={false}
currentAssistant={selectedAssistant}
trigger={
<button
className="dark:text-white text-black focus:outline-none"
data-testid="llm-popover-trigger"
>
<ChatInputOption
minimize
toggle
flexPriority="stiff"
name={getDisplayNameForModel(
llmManager?.currentLlm.modelName || "Models"
)}
Icon={getProviderIcon(
llmManager?.currentLlm.provider || "anthropic",
llmManager?.currentLlm.modelName ||
"claude-3-5-sonnet-20240620"
)}
tooltipContent="Switch models"
/>
</button>
}
/>
{retrievalEnabled && (

View File

@@ -1,9 +1,16 @@
import React, { useState, useEffect, useCallback, useMemo } from "react";
import React, {
useState,
useEffect,
useCallback,
useLayoutEffect,
useMemo,
} from "react";
import {
Popover,
PopoverContent,
PopoverTrigger,
} from "@/components/ui/popover";
import { ChatInputOption } from "./ChatInputOption";
import { getDisplayNameForModel } from "@/lib/hooks";
import {
checkLLMSupportsImageInput,
@@ -28,16 +35,12 @@ import { FiAlertTriangle } from "react-icons/fi";
import { Slider } from "@/components/ui/slider";
import { useUser } from "@/components/user/UserProvider";
import { TruncatedText } from "@/components/ui/truncatedText";
import { ChatInputOption } from "./ChatInputOption";
interface LLMPopoverProps {
llmProviders: LLMProviderDescriptor[];
llmManager: LlmManager;
requiresImageGeneration?: boolean;
currentAssistant?: Persona;
trigger?: React.ReactElement;
onSelect?: (value: string) => void;
currentModelName?: string;
}
export default function LLMPopover({
@@ -45,69 +48,70 @@ export default function LLMPopover({
llmManager,
requiresImageGeneration,
currentAssistant,
trigger,
onSelect,
currentModelName,
}: LLMPopoverProps) {
const [isOpen, setIsOpen] = useState(false);
const { user } = useUser();
// Memoize the options to prevent unnecessary recalculations
const { llmOptions, defaultProvider, defaultModelDisplayName } =
useMemo(() => {
const llmOptionsByProvider: {
[provider: string]: {
name: string;
value: string;
icon: React.FC<{ size?: number; className?: string }>;
}[];
} = {};
const {
llmOptionsByProvider,
llmOptions,
defaultProvider,
defaultModelDisplayName,
} = useMemo(() => {
const llmOptionsByProvider: {
[provider: string]: {
name: string;
value: string;
icon: React.FC<{ size?: number; className?: string }>;
}[];
} = {};
const uniqueModelNames = new Set<string>();
const uniqueModelNames = new Set<string>();
llmProviders.forEach((llmProvider) => {
if (!llmOptionsByProvider[llmProvider.provider]) {
llmOptionsByProvider[llmProvider.provider] = [];
}
llmProviders.forEach((llmProvider) => {
if (!llmOptionsByProvider[llmProvider.provider]) {
llmOptionsByProvider[llmProvider.provider] = [];
}
(llmProvider.display_model_names || llmProvider.model_names).forEach(
(modelName) => {
if (!uniqueModelNames.has(modelName)) {
uniqueModelNames.add(modelName);
llmOptionsByProvider[llmProvider.provider].push({
name: modelName,
value: structureValue(
llmProvider.name,
llmProvider.provider,
modelName
),
icon: getProviderIcon(llmProvider.provider, modelName),
});
}
(llmProvider.display_model_names || llmProvider.model_names).forEach(
(modelName) => {
if (!uniqueModelNames.has(modelName)) {
uniqueModelNames.add(modelName);
llmOptionsByProvider[llmProvider.provider].push({
name: modelName,
value: structureValue(
llmProvider.name,
llmProvider.provider,
modelName
),
icon: getProviderIcon(llmProvider.provider, modelName),
});
}
);
});
const llmOptions = Object.entries(llmOptionsByProvider).flatMap(
([provider, options]) => [...options]
}
);
});
const defaultProvider = llmProviders.find(
(llmProvider) => llmProvider.is_default_provider
);
const llmOptions = Object.entries(llmOptionsByProvider).flatMap(
([provider, options]) => [...options]
);
const defaultModelName = defaultProvider?.default_model_name;
const defaultModelDisplayName = defaultModelName
? getDisplayNameForModel(defaultModelName)
: null;
const defaultProvider = llmProviders.find(
(llmProvider) => llmProvider.is_default_provider
);
return {
llmOptionsByProvider,
llmOptions,
defaultProvider,
defaultModelDisplayName,
};
}, [llmProviders]);
const defaultModelName = defaultProvider?.default_model_name;
const defaultModelDisplayName = defaultModelName
? getDisplayNameForModel(defaultModelName)
: null;
return {
llmOptionsByProvider,
llmOptions,
defaultProvider,
defaultModelDisplayName,
};
}, [llmProviders]);
const [localTemperature, setLocalTemperature] = useState(
llmManager.temperature ?? 0.5
@@ -131,34 +135,32 @@ export default function LLMPopover({
// Memoize trigger content to prevent rerendering
const triggerContent = useMemo(
trigger
? () => trigger
: () => (
<button
className="dark:text-[#fff] text-[#000] focus:outline-none"
data-testid="llm-popover-trigger"
>
<ChatInputOption
minimize
toggle
flexPriority="stiff"
name={getDisplayNameForModel(
llmManager?.currentLlm.modelName ||
defaultModelDisplayName ||
"Models"
)}
Icon={getProviderIcon(
llmManager?.currentLlm.provider ||
defaultProvider?.provider ||
"anthropic",
llmManager?.currentLlm.modelName ||
defaultProvider?.default_model_name ||
"claude-3-5-sonnet-20240620"
)}
tooltipContent="Switch models"
/>
</button>
),
() => (
<button
className="dark:text-[#fff] text-[#000] focus:outline-none"
data-testid="llm-popover-trigger"
>
<ChatInputOption
minimize
toggle
flexPriority="stiff"
name={getDisplayNameForModel(
llmManager?.currentLlm.modelName ||
defaultModelDisplayName ||
"Models"
)}
Icon={getProviderIcon(
llmManager?.currentLlm.provider ||
defaultProvider?.provider ||
"anthropic",
llmManager?.currentLlm.modelName ||
defaultProvider?.default_model_name ||
"claude-3-5-sonnet-20240620"
)}
tooltipContent="Switch models"
/>
</button>
),
[defaultModelDisplayName, defaultProvider, llmManager?.currentLlm]
);
@@ -176,14 +178,12 @@ export default function LLMPopover({
<button
key={index}
className={`w-full flex items-center gap-x-2 px-3 py-2 text-sm text-left hover:bg-background-100 dark:hover:bg-neutral-800 transition-colors duration-150 ${
(currentModelName || llmManager.currentLlm.modelName) ===
name
llmManager.currentLlm.modelName === name
? "bg-background-100 dark:bg-neutral-900 text-text"
: "text-text-darker"
}`}
onClick={() => {
llmManager.updateCurrentLlm(destructureValue(value));
onSelect?.(value);
setIsOpen(false);
}}
>

View File

@@ -668,7 +668,7 @@ const PARAMS_TO_SKIP = [
];
export function buildChatUrl(
existingSearchParams: ReadonlyURLSearchParams | null,
existingSearchParams: ReadonlyURLSearchParams,
chatSessionId: string | null,
personaId: number | null,
search?: boolean
@@ -685,7 +685,7 @@ export function buildChatUrl(
finalSearchParams.push(`${SEARCH_PARAM_NAMES.PERSONA_ID}=${personaId}`);
}
existingSearchParams?.forEach((value, key) => {
existingSearchParams.forEach((value, key) => {
if (!PARAMS_TO_SKIP.includes(key)) {
finalSearchParams.push(`${key}=${value}`);
}
@@ -719,7 +719,7 @@ export async function uploadFilesForChat(
return [responseJson.files as FileDescriptor[], null];
}
export function useScrollonStream({
export async function useScrollonStream({
chatState,
scrollableDivRef,
scrollDist,
@@ -817,5 +817,5 @@ export function useScrollonStream({
});
}
}
}, [chatState, distance, scrollDist, scrollableDivRef, enableAutoScroll]);
}, [chatState, distance, scrollDist, scrollableDivRef]);
}

View File

@@ -178,6 +178,7 @@ export const AgenticMessage = ({
const [isViewingInitialAnswer, setIsViewingInitialAnswer] = useState(true);
const [canShowResponse, setCanShowResponse] = useState(isComplete);
const [isRegenerateHovered, setIsRegenerateHovered] = useState(false);
const [isRegenerateDropdownVisible, setIsRegenerateDropdownVisible] =
useState(false);
@@ -596,6 +597,7 @@ export const AgenticMessage = ({
onDropdownVisibleChange={
setIsRegenerateDropdownVisible
}
onHoverChange={setIsRegenerateHovered}
selectedAssistant={currentPersona!}
regenerate={regenerate}
overriddenModel={overriddenModel}
@@ -611,10 +613,16 @@ export const AgenticMessage = ({
absolute -bottom-5
z-10
invisible ${
(isHovering || settings?.isMobile) && "!visible"
(isHovering ||
isRegenerateHovered ||
settings?.isMobile) &&
"!visible"
}
opacity-0 ${
(isHovering || settings?.isMobile) && "!opacity-100"
(isHovering ||
isRegenerateHovered ||
settings?.isMobile) &&
"!opacity-100"
}
translate-y-2 ${
(isHovering || settings?.isMobile) &&
@@ -689,6 +697,7 @@ export const AgenticMessage = ({
}
regenerate={regenerate}
overriddenModel={overriddenModel}
onHoverChange={setIsRegenerateHovered}
/>
</CustomTooltip>
)}

View File

@@ -301,6 +301,7 @@ export const AIMessage = ({
const finalContent = processContent(content as string);
const [isRegenerateHovered, setIsRegenerateHovered] = useState(false);
const [isRegenerateDropdownVisible, setIsRegenerateDropdownVisible] =
useState(false);
const { isHovering, trackedElementRef, hoverElementRef } = useMouseTracking();
@@ -727,6 +728,7 @@ export const AIMessage = ({
onDropdownVisibleChange={
setIsRegenerateDropdownVisible
}
onHoverChange={setIsRegenerateHovered}
selectedAssistant={currentPersona!}
regenerate={regenerate}
overriddenModel={overriddenModel}
@@ -742,10 +744,16 @@ export const AIMessage = ({
absolute -bottom-5
z-10
invisible ${
(isHovering || settings?.isMobile) && "!visible"
(isHovering ||
isRegenerateHovered ||
settings?.isMobile) &&
"!visible"
}
opacity-0 ${
(isHovering || settings?.isMobile) && "!opacity-100"
(isHovering ||
isRegenerateHovered ||
settings?.isMobile) &&
"!opacity-100"
}
flex md:flex-row gap-x-0.5 bg-background-125/40 -mx-1.5 p-1.5 rounded-lg
`}
@@ -810,6 +818,7 @@ export const AIMessage = ({
}
regenerate={regenerate}
overriddenModel={overriddenModel}
onHoverChange={setIsRegenerateHovered}
/>
</CustomTooltip>
)}

View File

@@ -23,10 +23,8 @@ export const SEARCH_PARAM_NAMES = {
SEND_ON_LOAD: "send-on-load",
};
export function shouldSubmitOnLoad(
searchParams: ReadonlyURLSearchParams | null
) {
const rawSubmitOnLoad = searchParams?.get(SEARCH_PARAM_NAMES.SUBMIT_ON_LOAD);
export function shouldSubmitOnLoad(searchParams: ReadonlyURLSearchParams) {
const rawSubmitOnLoad = searchParams.get(SEARCH_PARAM_NAMES.SUBMIT_ON_LOAD);
if (rawSubmitOnLoad === "true" || rawSubmitOnLoad === "1") {
return true;
}

View File

@@ -104,7 +104,7 @@ export function UserDropdown({
// Construct the current URL
const currentUrl = `${pathname}${
searchParams?.toString() ? `?${searchParams.toString()}` : ""
searchParams.toString() ? `?${searchParams.toString()}` : ""
}`;
// Encode the current URL to use as a redirect parameter

View File

@@ -59,8 +59,8 @@ export function ClientLayout({
const { llmProviders } = useChatContext();
const { popup, setPopup } = usePopup();
if (
(pathname && pathname.startsWith("/admin/connectors")) ||
(pathname && pathname.startsWith("/admin/embeddings"))
pathname.startsWith("/admin/connectors") ||
pathname.startsWith("/admin/embeddings")
) {
return <>{children}</>;
}

View File

@@ -76,7 +76,7 @@ export const ChatProvider: React.FC<{
const { sessions } = await response.json();
setChatSessions(sessions);
const currentSessionId = searchParams?.get("chatId");
const currentSessionId = searchParams.get("chatId");
if (
currentSessionId &&
!sessions.some(

View File

@@ -34,7 +34,7 @@ export const EmbeddingFormProvider: React.FC<{
const pathname = usePathname();
// Initialize formStep based on the URL parameter
const initialStep = parseInt(searchParams?.get("step") || "0", 10);
const initialStep = parseInt(searchParams.get("step") || "0", 10);
const [formStep, setFormStep] = useState(initialStep);
const [formValues, setFormValues] = useState<Record<string, any>>({});
@@ -56,10 +56,8 @@ export const EmbeddingFormProvider: React.FC<{
useEffect(() => {
// Update URL when formStep changes
const updatedSearchParams = new URLSearchParams(
searchParams?.toString() || ""
);
const existingStep = updatedSearchParams?.get("step");
const updatedSearchParams = new URLSearchParams(searchParams.toString());
const existingStep = updatedSearchParams.get("step");
updatedSearchParams.set("step", formStep.toString());
const newUrl = `${pathname}?${updatedSearchParams.toString()}`;
@@ -72,7 +70,7 @@ export const EmbeddingFormProvider: React.FC<{
// Update formStep when URL changes
useEffect(() => {
const stepFromUrl = parseInt(searchParams?.get("step") || "0", 10);
const stepFromUrl = parseInt(searchParams.get("step") || "0", 10);
if (stepFromUrl !== formStep) {
setFormStep(stepFromUrl);
}

View File

@@ -34,7 +34,7 @@ export const FormProvider: React.FC<{
const pathname = usePathname();
// Initialize formStep based on the URL parameter
const initialStep = parseInt(searchParams?.get("step") || "0", 10);
const initialStep = parseInt(searchParams.get("step") || "0", 10);
const [formStep, setFormStep] = useState(initialStep);
const [formValues, setFormValues] = useState<Record<string, any>>({});
@@ -56,10 +56,8 @@ export const FormProvider: React.FC<{
useEffect(() => {
// Update URL when formStep changes
const updatedSearchParams = new URLSearchParams(
searchParams?.toString() || ""
);
const existingStep = updatedSearchParams?.get("step");
const updatedSearchParams = new URLSearchParams(searchParams.toString());
const existingStep = updatedSearchParams.get("step");
updatedSearchParams.set("step", formStep.toString());
const newUrl = `${pathname}?${updatedSearchParams.toString()}`;
@@ -71,7 +69,7 @@ export const FormProvider: React.FC<{
}, [formStep, router, pathname, searchParams]);
useEffect(() => {
const stepFromUrl = parseInt(searchParams?.get("step") || "0", 10);
const stepFromUrl = parseInt(searchParams.get("step") || "0", 10);
if (stepFromUrl !== formStep) {
setFormStep(stepFromUrl);
}

View File

@@ -35,7 +35,7 @@ export const HealthCheckBanner = () => {
useEffect(() => {
if (userError && userError.status === 403) {
logout().then(() => {
if (!pathname?.includes("/auth")) {
if (!pathname.includes("/auth")) {
setShowLoggedOutModal(true);
}
});
@@ -61,7 +61,7 @@ export const HealthCheckBanner = () => {
expirationTimeoutRef.current = setTimeout(() => {
setExpired(true);
if (!pathname?.includes("/auth")) {
if (!pathname.includes("/auth")) {
setShowLoggedOutModal(true);
}
}, timeUntilExpire);
@@ -205,7 +205,7 @@ export const HealthCheckBanner = () => {
}
if (error instanceof RedirectError || expired) {
if (!pathname?.includes("/auth")) {
if (!pathname.includes("/auth")) {
setShowLoggedOutModal(true);
}
return null;

View File

@@ -19,12 +19,12 @@ function setWelcomeFlowComplete() {
Cookies.set(COMPLETED_WELCOME_FLOW_COOKIE, "true", { expires: 365 });
}
export function CompletedWelcomeFlowDummyComponent() {
export function _CompletedWelcomeFlowDummyComponent() {
setWelcomeFlowComplete();
return null;
}
export function WelcomeModal({ user }: { user: User | null }) {
export function _WelcomeModal({ user }: { user: User | null }) {
const router = useRouter();
const [providerOptions, setProviderOptions] = useState<

View File

@@ -1,6 +1,6 @@
import {
CompletedWelcomeFlowDummyComponent,
WelcomeModal as WelcomeModalComponent,
_CompletedWelcomeFlowDummyComponent,
_WelcomeModal,
} from "./WelcomeModal";
import { COMPLETED_WELCOME_FLOW_COOKIE } from "./constants";
import { User } from "@/lib/types";
@@ -24,8 +24,8 @@ export function WelcomeModal({
}) {
const hasCompletedWelcomeFlow = hasCompletedWelcomeFlowSS(requestCookies);
if (hasCompletedWelcomeFlow) {
return <CompletedWelcomeFlowDummyComponent />;
return <_CompletedWelcomeFlowDummyComponent />;
}
return <WelcomeModalComponent user={user} />;
return <_WelcomeModal user={user} />;
}

View File

@@ -31,13 +31,13 @@ export function NewTeamModal() {
const { setPopup } = usePopup();
useEffect(() => {
const hasNewTeamParam = searchParams?.has("new_team");
const hasNewTeamParam = searchParams.has("new_team");
if (hasNewTeamParam) {
setShowNewTeamModal(true);
fetchTenantInfo();
// Remove the new_team parameter from the URL without page reload
const newParams = new URLSearchParams(searchParams?.toString() || "");
const newParams = new URLSearchParams(searchParams.toString());
newParams.delete("new_team");
const newUrl =
window.location.pathname +

View File

@@ -16,7 +16,7 @@ export const usePopupFromQuery = (messages: PopupMessages) => {
const searchParams = new URLSearchParams(window.location.search);
// Get the value for search param with key "message"
const messageValue = searchParams?.get("message");
const messageValue = searchParams.get("message");
// Check if any key from messages object is present in search params
if (messageValue && messageValue in messages) {

View File

@@ -148,7 +148,7 @@ function usePaginatedFetch<T extends PaginatedType>({
// Updates the URL with the current page number
const updatePageUrl = useCallback(
(page: number) => {
if (currentPath && searchParams) {
if (currentPath) {
const params = new URLSearchParams(searchParams);
params.set("page", page.toString());
router.replace(`${currentPath}?${params.toString()}`, {

View File

@@ -1333,10 +1333,10 @@ export function createConnectorValidationSchema(
): Yup.ObjectSchema<Record<string, any>> {
const configuration = connectorConfigs[connector];
const object = Yup.object().shape({
return Yup.object().shape({
access_type: Yup.string().required("Access Type is required"),
name: Yup.string().required("Connector Name is required"),
...[...configuration.values, ...configuration.advanced_values].reduce(
...configuration.values.reduce(
(acc, field) => {
let schema: any =
field.type === "select"
@@ -1363,8 +1363,6 @@ export function createConnectorValidationSchema(
pruneFreq: Yup.number().min(0, "Prune frequency must be non-negative"),
refreshFreq: Yup.number().min(0, "Refresh frequency must be non-negative"),
});
return object;
}
export const defaultPruneFreqDays = 30; // 30 days