Compare commits

..

13 Commits

Author SHA1 Message Date
pablodanswer
2f350ac209 minutes -> seconds 2025-01-25 17:57:10 -08:00
pablodanswer
76b97b0e06 update 2025-01-25 17:56:25 -08:00
pablodanswer
de98775b43 improvements to monitoring 2025-01-25 17:44:21 -08:00
pablodanswer
95b89863e4 functioning 2025-01-25 14:47:43 -08:00
pablodanswer
25c1f16e5b quick nit 2025-01-25 14:31:47 -08:00
pablodanswer
8822b37dad update values 2025-01-25 14:29:17 -08:00
pablodanswer
ee5752f3d5 fix typing 2025-01-25 14:04:09 -08:00
pablodanswer
ebf06ee528 quick nit 2025-01-25 14:02:27 -08:00
pablodanswer
d56a089370 connector deletion 2025-01-25 13:50:49 -08:00
pablodanswer
b6fab0687a quick updates to monitoring 2025-01-25 13:47:26 -08:00
pablodanswer
3ada740f3c typing 2025-01-24 13:36:46 -08:00
pablodanswer
d8e9e56526 additional comment for clarity 2025-01-24 13:35:13 -08:00
pablodanswer
9439628890 minor improvments / clarity 2025-01-24 13:33:53 -08:00
26 changed files with 623 additions and 583 deletions

View File

@@ -9,9 +9,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Check PR body for Linear link or override
env:
PR_BODY: ${{ github.event.pull_request.body }}
run: |
PR_BODY="${{ github.event.pull_request.body }}"
# Looking for "https://linear.app" in the body
if echo "$PR_BODY" | grep -qE "https://linear\.app"; then
echo "Found a Linear link. Check passed."

View File

@@ -9,10 +9,8 @@ founders@onyx.app for more information. Please visit https://github.com/onyx-dot
# Default ONYX_VERSION, typically overriden during builds by GitHub Actions.
ARG ONYX_VERSION=0.8-dev
# DO_NOT_TRACK is used to disable telemetry for Unstructured
ENV ONYX_VERSION=${ONYX_VERSION} \
DANSWER_RUNNING_IN_DOCKER="true" \
DO_NOT_TRACK="true"
DANSWER_RUNNING_IN_DOCKER="true"
RUN echo "ONYX_VERSION: ${ONYX_VERSION}"

View File

@@ -221,7 +221,7 @@ if not MULTI_TENANT:
{
"name": "monitor-background-processes",
"task": OnyxCeleryTask.MONITOR_BACKGROUND_PROCESSES,
"schedule": timedelta(minutes=5),
"schedule": timedelta(minutes=15),
"options": {
"priority": OnyxCeleryPriority.LOW,
"expires": BEAT_EXPIRES_DEFAULT,

View File

@@ -139,13 +139,6 @@ def try_generate_document_cc_pair_cleanup_tasks(
submitted=datetime.now(timezone.utc),
)
# create before setting fence to avoid race condition where the monitoring
# task updates the sync record before it is created
insert_sync_record(
db_session=db_session,
entity_id=cc_pair_id,
sync_type=SyncType.CONNECTOR_DELETION,
)
redis_connector.delete.set_fence(fence_payload)
try:
@@ -184,6 +177,13 @@ def try_generate_document_cc_pair_cleanup_tasks(
)
if tasks_generated is None:
raise ValueError("RedisConnectorDeletion.generate_tasks returned None")
insert_sync_record(
db_session=db_session,
entity_id=cc_pair_id,
sync_type=SyncType.CONNECTOR_DELETION,
)
except TaskDependencyError:
redis_connector.delete.set_fence(None)
raise

View File

@@ -4,6 +4,7 @@ from collections.abc import Callable
from datetime import timedelta
from itertools import islice
from typing import Any
from typing import Literal
from celery import shared_task
from celery import Task
@@ -26,6 +27,7 @@ from onyx.db.engine import get_all_tenant_ids
from onyx.db.engine import get_db_current_time
from onyx.db.engine import get_session_with_tenant
from onyx.db.enums import IndexingStatus
from onyx.db.enums import SyncStatus
from onyx.db.enums import SyncType
from onyx.db.models import ConnectorCredentialPair
from onyx.db.models import DocumentSet
@@ -38,6 +40,7 @@ from onyx.redis.redis_pool import redis_lock_dump
from onyx.utils.telemetry import optional_telemetry
from onyx.utils.telemetry import RecordType
_MONITORING_SOFT_TIME_LIMIT = 60 * 5 # 5 minutes
_MONITORING_TIME_LIMIT = _MONITORING_SOFT_TIME_LIMIT + 60 # 6 minutes
@@ -49,6 +52,12 @@ _CONNECTOR_INDEX_ATTEMPT_RUN_SUCCESS_KEY_FMT = (
"monitoring_connector_index_attempt_run_success:{cc_pair_id}:{index_attempt_id}"
)
_FINAL_METRIC_KEY_FMT = "sync_final_metrics:{sync_type}:{entity_id}:{sync_record_id}"
_SYNC_START_LATENCY_KEY_FMT = (
"sync_start_latency:{sync_type}:{entity_id}:{sync_record_id}"
)
def _mark_metric_as_emitted(redis_std: Redis, key: str) -> None:
"""Mark a metric as having been emitted by setting a Redis key with expiration"""
@@ -111,6 +120,7 @@ class Metric(BaseModel):
}.items()
if v is not None
}
task_logger.info(f"Emitting metric: {data}")
optional_telemetry(
record_type=RecordType.METRIC,
data=data,
@@ -189,237 +199,368 @@ def _build_connector_start_latency_metric(
f"Start latency for index attempt {recent_attempt.id}: {start_latency:.2f}s "
f"(desired: {desired_start_time}, actual: {recent_attempt.time_started})"
)
job_id = build_job_id("connector", str(cc_pair.id), str(recent_attempt.id))
return Metric(
key=metric_key,
name="connector_start_latency",
value=start_latency,
tags={},
tags={
"job_id": job_id,
"connector_id": str(cc_pair.connector.id),
"source": str(cc_pair.connector.source),
},
)
def _build_run_success_metrics(
def _build_connector_final_metrics(
cc_pair: ConnectorCredentialPair,
recent_attempts: list[IndexAttempt],
redis_std: Redis,
) -> list[Metric]:
"""
Final metrics for connector index attempts:
- Boolean success/fail metric
- If success, emit:
* duration (seconds)
* doc_count
"""
metrics = []
for attempt in recent_attempts:
metric_key = _CONNECTOR_INDEX_ATTEMPT_RUN_SUCCESS_KEY_FMT.format(
cc_pair_id=cc_pair.id,
index_attempt_id=attempt.id,
)
if _has_metric_been_emitted(redis_std, metric_key):
task_logger.info(
f"Skipping metric for connector {cc_pair.connector.id} "
f"index attempt {attempt.id} because it has already been "
"emitted"
f"Skipping final metrics for connector {cc_pair.connector.id} "
f"index attempt {attempt.id}, already emitted."
)
continue
if attempt.status in [
# We only emit final metrics if the attempt is in a terminal state
if attempt.status not in [
IndexingStatus.SUCCESS,
IndexingStatus.FAILED,
IndexingStatus.CANCELED,
]:
task_logger.info(
f"Adding run success metric for index attempt {attempt.id} with status {attempt.status}"
# Not finished; skip
continue
job_id = build_job_id("connector", str(cc_pair.id), str(attempt.id))
success = attempt.status == IndexingStatus.SUCCESS
metrics.append(
Metric(
key=metric_key, # We'll mark the same key for any final metrics
name="connector_run_succeeded",
value=success,
tags={
"job_id": job_id,
"connector_id": str(cc_pair.connector.id),
"source": str(cc_pair.connector.source),
"status": attempt.status.value,
},
)
)
if success:
# Make sure we have valid time_started
if attempt.time_started and attempt.time_updated:
duration_seconds = (
attempt.time_updated - attempt.time_started
).total_seconds()
metrics.append(
Metric(
key=None, # No need for a new key, or you can reuse the same if you prefer
name="connector_index_duration_seconds",
value=duration_seconds,
tags={
"job_id": job_id,
"connector_id": str(cc_pair.connector.id),
"source": str(cc_pair.connector.source),
},
)
)
else:
task_logger.error(
f"Index attempt {attempt.id} succeeded but has missing time "
f"(time_started={attempt.time_started}, time_updated={attempt.time_updated})."
)
# For doc counts, choose whichever field is more relevant
doc_count = attempt.total_docs_indexed or 0
metrics.append(
Metric(
key=metric_key,
name="connector_run_succeeded",
value=attempt.status == IndexingStatus.SUCCESS,
tags={"source": str(cc_pair.connector.source)},
key=None,
name="connector_index_doc_count",
value=doc_count,
tags={
"job_id": job_id,
"connector_id": str(cc_pair.connector.id),
"source": str(cc_pair.connector.source),
},
)
)
_mark_metric_as_emitted(redis_std, metric_key)
return metrics
def _collect_connector_metrics(db_session: Session, redis_std: Redis) -> list[Metric]:
"""Collect metrics about connector runs from the past hour"""
# NOTE: use get_db_current_time since the IndexAttempt times are set based on DB time
one_hour_ago = get_db_current_time(db_session) - timedelta(hours=1)
# Get all connector credential pairs
cc_pairs = db_session.scalars(select(ConnectorCredentialPair)).all()
# Might be more than one search setting, or just one
active_search_settings = get_active_search_settings(db_session)
metrics = []
for cc_pair, search_settings in zip(cc_pairs, active_search_settings):
recent_attempts = (
db_session.query(IndexAttempt)
.filter(
IndexAttempt.connector_credential_pair_id == cc_pair.id,
IndexAttempt.search_settings_id == search_settings.id,
# If you want to process each cc_pair against each search setting:
for cc_pair in cc_pairs:
for search_settings in active_search_settings:
recent_attempts = (
db_session.query(IndexAttempt)
.filter(
IndexAttempt.connector_credential_pair_id == cc_pair.id,
IndexAttempt.search_settings_id == search_settings.id,
)
.order_by(IndexAttempt.time_created.desc())
.limit(2)
.all()
)
.order_by(IndexAttempt.time_created.desc())
.limit(2)
.all()
)
if not recent_attempts:
continue
most_recent_attempt = recent_attempts[0]
second_most_recent_attempt = (
recent_attempts[1] if len(recent_attempts) > 1 else None
)
if not recent_attempts:
continue
if one_hour_ago > most_recent_attempt.time_created:
continue
most_recent_attempt = recent_attempts[0]
second_most_recent_attempt = (
recent_attempts[1] if len(recent_attempts) > 1 else None
)
# Connector start latency
start_latency_metric = _build_connector_start_latency_metric(
cc_pair, most_recent_attempt, second_most_recent_attempt, redis_std
)
if start_latency_metric:
metrics.append(start_latency_metric)
if one_hour_ago > most_recent_attempt.time_created:
continue
# Connector run success/failure
run_success_metrics = _build_run_success_metrics(
cc_pair, recent_attempts, redis_std
)
metrics.extend(run_success_metrics)
# Connector start latency
start_latency_metric = _build_connector_start_latency_metric(
cc_pair, most_recent_attempt, second_most_recent_attempt, redis_std
)
if start_latency_metric:
metrics.append(start_latency_metric)
# Connector run success/failure
final_metrics = _build_connector_final_metrics(
cc_pair, recent_attempts, redis_std
)
metrics.extend(final_metrics)
return metrics
def _collect_sync_metrics(db_session: Session, redis_std: Redis) -> list[Metric]:
"""Collect metrics about document set and group syncing speed"""
# NOTE: use get_db_current_time since the SyncRecord times are set based on DB time
"""
Collect metrics for document set and group syncing:
- Success/failure status
- Start latency (always)
- Duration & doc count (only if success)
- Throughput (docs/min) (only if success)
"""
one_hour_ago = get_db_current_time(db_session) - timedelta(hours=1)
# Get all sync records from the last hour
# Get all sync records that ended in the last hour
recent_sync_records = db_session.scalars(
select(SyncRecord)
.where(SyncRecord.sync_start_time >= one_hour_ago)
.order_by(SyncRecord.sync_start_time.desc())
.where(SyncRecord.sync_end_time.isnot(None))
.where(SyncRecord.sync_end_time >= one_hour_ago)
.order_by(SyncRecord.sync_end_time.desc())
).all()
task_logger.info(
f"Collecting sync metrics for {len(recent_sync_records)} sync records"
)
metrics = []
for sync_record in recent_sync_records:
# Skip if no end time (sync still in progress)
if not sync_record.sync_end_time:
continue
# Build a job_id for correlation
job_id = build_job_id("sync_record", str(sync_record.id))
# Check if we already emitted a metric for this sync record
metric_key = (
f"sync_speed:{sync_record.sync_type}:"
f"{sync_record.entity_id}:{sync_record.id}"
# Emit a SUCCESS/FAIL boolean metric
# Use a single Redis key to avoid re-emitting final metrics
final_metric_key = _FINAL_METRIC_KEY_FMT.format(
sync_type=sync_record.sync_type,
entity_id=sync_record.entity_id,
sync_record_id=sync_record.id,
)
if _has_metric_been_emitted(redis_std, metric_key):
task_logger.info(
f"Skipping metric for sync record {sync_record.id} "
"because it has already been emitted"
if not _has_metric_been_emitted(redis_std, final_metric_key):
# Evaluate success
sync_succeeded = sync_record.sync_status == SyncStatus.SUCCESS
metrics.append(
Metric(
key=final_metric_key,
name="sync_run_succeeded",
value=sync_succeeded,
tags={
"job_id": job_id,
"sync_type": str(sync_record.sync_type),
"status": str(sync_record.sync_status),
},
)
)
continue
# Calculate sync duration in minutes
sync_duration_mins = (
sync_record.sync_end_time - sync_record.sync_start_time
).total_seconds() / 60.0
# If successful, emit additional metrics
if sync_succeeded:
if sync_record.sync_end_time and sync_record.sync_start_time:
duration_seconds = (
sync_record.sync_end_time - sync_record.sync_start_time
).total_seconds()
else:
task_logger.error(
f"Invalid times for sync record {sync_record.id}: "
f"start={sync_record.sync_start_time}, end={sync_record.sync_end_time}"
)
duration_seconds = None
# Calculate sync speed (docs/min) - avoid division by zero
sync_speed = (
sync_record.num_docs_synced / sync_duration_mins
if sync_duration_mins > 0
else None
doc_count = sync_record.num_docs_synced or 0
sync_speed = None
if duration_seconds and duration_seconds > 0:
duration_mins = duration_seconds / 60.0
sync_speed = (
doc_count / duration_mins if duration_mins > 0 else None
)
# Emit duration, doc count, speed
if duration_seconds is not None:
metrics.append(
Metric(
key=None,
name="sync_duration_seconds",
value=duration_seconds,
tags={
"job_id": job_id,
"sync_type": str(sync_record.sync_type),
},
)
)
else:
task_logger.error(
f"Invalid sync record {sync_record.id} with no duration"
)
metrics.append(
Metric(
key=None,
name="sync_doc_count",
value=doc_count,
tags={
"job_id": job_id,
"sync_type": str(sync_record.sync_type),
},
)
)
if sync_speed is not None:
metrics.append(
Metric(
key=None,
name="sync_speed_docs_per_min",
value=sync_speed,
tags={
"job_id": job_id,
"sync_type": str(sync_record.sync_type),
},
)
)
else:
task_logger.error(
f"Invalid sync record {sync_record.id} with no duration"
)
# Mark final metrics as emitted so we don't re-emit
_mark_metric_as_emitted(redis_std, final_metric_key)
# Emit start latency
start_latency_key = _SYNC_START_LATENCY_KEY_FMT.format(
sync_type=sync_record.sync_type,
entity_id=sync_record.entity_id,
sync_record_id=sync_record.id,
)
if not _has_metric_been_emitted(redis_std, start_latency_key):
# Get the entity's last update time based on sync type
entity: DocumentSet | UserGroup | None = None
if sync_record.sync_type == SyncType.DOCUMENT_SET:
entity = db_session.scalar(
select(DocumentSet).where(DocumentSet.id == sync_record.entity_id)
)
elif sync_record.sync_type == SyncType.USER_GROUP:
entity = db_session.scalar(
select(UserGroup).where(UserGroup.id == sync_record.entity_id)
)
else:
task_logger.info(
f"Skipping sync record {sync_record.id} of type {sync_record.sync_type}."
)
continue
if sync_speed is None:
task_logger.error(
f"Something went wrong with sync speed calculation. "
f"Sync record: {sync_record.id}, duration: {sync_duration_mins}, "
f"docs synced: {sync_record.num_docs_synced}"
)
continue
if entity is None:
task_logger.error(
f"Could not find entity for sync record {sync_record.id} "
f"(type={sync_record.sync_type}, id={sync_record.entity_id})."
)
continue
task_logger.info(
f"Calculated sync speed for record {sync_record.id}: {sync_speed} docs/min"
)
metrics.append(
Metric(
key=metric_key,
name="sync_speed_docs_per_min",
value=sync_speed,
tags={
"sync_type": str(sync_record.sync_type),
"status": str(sync_record.sync_status),
},
)
)
# Calculate start latency in seconds:
# (actual sync start) - (last modified time)
if entity.time_last_modified_by_user and sync_record.sync_start_time:
start_latency = (
sync_record.sync_start_time - entity.time_last_modified_by_user
).total_seconds()
# Add sync start latency metric
start_latency_key = (
f"sync_start_latency:{sync_record.sync_type}"
f":{sync_record.entity_id}:{sync_record.id}"
)
if _has_metric_been_emitted(redis_std, start_latency_key):
task_logger.info(
f"Skipping start latency metric for sync record {sync_record.id} "
"because it has already been emitted"
)
continue
if start_latency < 0:
task_logger.error(
f"Negative start latency for sync record {sync_record.id} "
f"(start={sync_record.sync_start_time}, entity_modified={entity.time_last_modified_by_user})"
)
continue
# Get the entity's last update time based on sync type
entity: DocumentSet | UserGroup | None = None
if sync_record.sync_type == SyncType.DOCUMENT_SET:
entity = db_session.scalar(
select(DocumentSet).where(DocumentSet.id == sync_record.entity_id)
)
elif sync_record.sync_type == SyncType.USER_GROUP:
entity = db_session.scalar(
select(UserGroup).where(UserGroup.id == sync_record.entity_id)
)
else:
# Skip other sync types
task_logger.info(
f"Skipping sync record {sync_record.id} "
f"with type {sync_record.sync_type} "
f"and id {sync_record.entity_id} "
"because it is not a document set or user group"
)
continue
metrics.append(
Metric(
key=start_latency_key,
name="sync_start_latency_seconds",
value=start_latency,
tags={
"job_id": job_id,
"sync_type": str(sync_record.sync_type),
},
)
)
if entity is None:
task_logger.error(
f"Could not find entity for sync record {sync_record.id} "
f"with type {sync_record.sync_type} and id {sync_record.entity_id}"
)
continue
# Calculate start latency in seconds
start_latency = (
sync_record.sync_start_time - entity.time_last_modified_by_user
).total_seconds()
task_logger.info(
f"Calculated start latency for sync record {sync_record.id}: {start_latency} seconds"
)
if start_latency < 0:
task_logger.error(
f"Start latency is negative for sync record {sync_record.id} "
f"with type {sync_record.sync_type} and id {sync_record.entity_id}. "
f"Sync start time: {sync_record.sync_start_time}, "
f"Entity last modified: {entity.time_last_modified_by_user}"
)
continue
metrics.append(
Metric(
key=start_latency_key,
name="sync_start_latency_seconds",
value=start_latency,
tags={
"sync_type": str(sync_record.sync_type),
},
)
)
_mark_metric_as_emitted(redis_std, start_latency_key)
return metrics
def build_job_id(
job_type: Literal["connector", "sync_record"],
primary_id: str,
secondary_id: str | None = None,
) -> str:
if job_type == "connector":
if secondary_id is None:
raise ValueError(
"secondary_id (attempt_id) is required for connector job_type"
)
return f"connector:{primary_id}:attempt:{secondary_id}"
elif job_type == "sync_record":
return f"sync_record:{primary_id}"
@shared_task(
name=OnyxCeleryTask.MONITOR_BACKGROUND_PROCESSES,
soft_time_limit=_MONITORING_SOFT_TIME_LIMIT,
@@ -459,6 +600,7 @@ def monitor_background_processes(self: Task, *, tenant_id: str | None) -> None:
lambda: _collect_connector_metrics(db_session, redis_std),
lambda: _collect_sync_metrics(db_session, redis_std),
]
# Collect and log each metric
with get_session_with_tenant(tenant_id) as db_session:
for metric_fn in metric_functions:

View File

@@ -8,20 +8,64 @@ from sqlalchemy.orm import Session
from onyx.db.enums import SyncStatus
from onyx.db.enums import SyncType
from onyx.db.models import SyncRecord
from onyx.setup import setup_logger
logger = setup_logger()
def insert_sync_record(
db_session: Session,
entity_id: int | None,
entity_id: int,
sync_type: SyncType,
) -> SyncRecord:
"""Insert a new sync record into the database.
"""Insert a new sync record into the database, cancelling any existing in-progress records.
Args:
db_session: The database session to use
entity_id: The ID of the entity being synced (document set ID, user group ID, etc.)
sync_type: The type of sync operation
"""
# If an existing in-progress sync record exists, mark as cancelled
existing_in_progress_sync_record = fetch_latest_sync_record(
db_session, entity_id, sync_type, sync_status=SyncStatus.IN_PROGRESS
)
if existing_in_progress_sync_record is not None:
logger.info(
f"Cancelling existing in-progress sync record {existing_in_progress_sync_record.id} "
f"for entity_id={entity_id} sync_type={sync_type}"
)
mark_sync_records_as_cancelled(db_session, entity_id, sync_type)
return _create_sync_record(db_session, entity_id, sync_type)
def mark_sync_records_as_cancelled(
db_session: Session,
entity_id: int | None,
sync_type: SyncType,
) -> None:
stmt = (
update(SyncRecord)
.where(
and_(
SyncRecord.entity_id == entity_id,
SyncRecord.sync_type == sync_type,
SyncRecord.sync_status == SyncStatus.IN_PROGRESS,
)
)
.values(sync_status=SyncStatus.CANCELED)
)
db_session.execute(stmt)
db_session.commit()
def _create_sync_record(
db_session: Session,
entity_id: int | None,
sync_type: SyncType,
) -> SyncRecord:
"""Create and insert a new sync record into the database."""
sync_record = SyncRecord(
entity_id=entity_id,
sync_type=sync_type,
@@ -39,6 +83,7 @@ def fetch_latest_sync_record(
db_session: Session,
entity_id: int,
sync_type: SyncType,
sync_status: SyncStatus | None = None,
) -> SyncRecord | None:
"""Fetch the most recent sync record for a given entity ID and status.
@@ -59,6 +104,9 @@ def fetch_latest_sync_record(
.limit(1)
)
if sync_status is not None:
stmt = stmt.where(SyncRecord.sync_status == sync_status)
result = db_session.execute(stmt)
return result.scalar_one_or_none()

View File

@@ -1,8 +1,5 @@
import mimetypes
import os
import uuid
import zipfile
from io import BytesIO
from typing import cast
from fastapi import APIRouter
@@ -389,43 +386,10 @@ def upload_files(
for file in files:
if not file.filename:
raise HTTPException(status_code=400, detail="File name cannot be empty")
# Skip directories and known macOS metadata entries
def should_process_file(file_path: str) -> bool:
normalized_path = os.path.normpath(file_path)
return not any(part.startswith(".") for part in normalized_path.split(os.sep))
try:
file_store = get_default_file_store(db_session)
deduped_file_paths = []
for file in files:
if file.content_type and file.content_type.startswith("application/zip"):
with zipfile.ZipFile(file.file, "r") as zf:
for file_info in zf.namelist():
if zf.getinfo(file_info).is_dir():
continue
if not should_process_file(file_info):
continue
sub_file_bytes = zf.read(file_info)
sub_file_name = os.path.join(str(uuid.uuid4()), file_info)
deduped_file_paths.append(sub_file_name)
mime_type, __ = mimetypes.guess_type(file_info)
if mime_type is None:
mime_type = "application/octet-stream"
file_store.save_file(
file_name=sub_file_name,
content=BytesIO(sub_file_bytes),
display_name=os.path.basename(file_info),
file_origin=FileOrigin.CONNECTOR,
file_type=mime_type,
)
continue
file_path = os.path.join(str(uuid.uuid4()), cast(str, file.filename))
deduped_file_paths.append(file_path)
file_store.save_file(

View File

@@ -123,6 +123,7 @@ def optional_telemetry(
headers={"Content-Type": "application/json"},
json=payload,
)
except Exception:
# This way it silences all thread level logging as well
pass

View File

View File

@@ -78,7 +78,7 @@ ENV NEXT_PUBLIC_GTM_ENABLED=${NEXT_PUBLIC_GTM_ENABLED}
ARG NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED
ENV NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED=${NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED}
RUN NODE_OPTIONS="--max-old-space-size=8192" npx next build
RUN npx next build
# Step 2. Production image, copy all the files and run next
FROM base AS runner

View File

@@ -890,20 +890,47 @@ export function AssistantEditor({
{imageGenerationTool && (
<>
<div className="flex items-center content-start mb-2">
<BooleanFormField
name={`enabled_tools_map.${imageGenerationTool.id}`}
label={imageGenerationTool.display_name}
subtext="Generate and manipulate images using AI-powered tools"
disabled={
!currentLLMSupportsImageOutput ||
!isImageGenerationAvailable
}
disabledTooltip={
!currentLLMSupportsImageOutput
? "To use Image Generation, select GPT-4 or another image compatible model as the default model for this Assistant."
: "Image Generation requires an OpenAI or Azure Dall-E configuration."
}
/>
<TooltipProvider>
<Tooltip>
<TooltipTrigger>
<CheckboxField
size="sm"
id={`enabled_tools_map.${imageGenerationTool.id}`}
name={`enabled_tools_map.${imageGenerationTool.id}`}
onCheckedChange={() => {
if (isImageGenerationAvailable) {
toggleToolInValues(
imageGenerationTool.id
);
}
}}
className={
!isImageGenerationAvailable
? "opacity-50 cursor-not-allowed"
: ""
}
/>
</TooltipTrigger>
{!isImageGenerationAvailable && (
<TooltipContent side="top" align="center">
<p className="bg-background-900 max-w-[200px] mb-1 text-sm rounded-lg p-1.5 text-white">
{!currentLLMSupportsImageOutput
? "To use Image Generation, select GPT-4 or another image compatible model as the default model for this Assistant."
: "Image Generation requires an OpenAI or Azure Dalle configuration."}
</p>
</TooltipContent>
)}
</Tooltip>
</TooltipProvider>
<div className="flex flex-col ml-2">
<span className="text-sm">
{imageGenerationTool.display_name}
</span>
<span className="text-xs text-subtle">
Generate and manipulate images using AI-powered
tools
</span>
</div>
</div>
</>
)}
@@ -937,12 +964,23 @@ export function AssistantEditor({
{customTools.length > 0 &&
customTools.map((tool) => (
<BooleanFormField
key={tool.id}
name={`enabled_tools_map.${tool.id}`}
label={tool.display_name}
subtext={tool.description}
/>
<React.Fragment key={tool.id}>
<div className="flex items-center content-start mb-2">
<Checkbox
size="sm"
id={`enabled_tools_map.${tool.id}`}
checked={values.enabled_tools_map[tool.id]}
onCheckedChange={() => {
toggleToolInValues(tool.id);
}}
/>
<div className="ml-2">
<span className="text-sm">
{tool.display_name}
</span>
</div>
</div>
</React.Fragment>
))}
</div>
</div>
@@ -1295,6 +1333,7 @@ export function AssistantEditor({
<BooleanFormField
small
removeIndent
alignTop
name="llm_relevance_filter"
label="AI Relevance Filter"
subtext="If enabled, the LLM will filter out documents that are not useful for answering the user query prior to generating a response. This typically improves the quality of the response but incurs slightly higher cost."
@@ -1303,6 +1342,7 @@ export function AssistantEditor({
<BooleanFormField
small
removeIndent
alignTop
name="include_citations"
label="Citations"
subtext="Response will include citations ([1], [2], etc.) for documents referenced by the LLM. In general, we recommend to leave this enabled in order to increase trust in the LLM answer."
@@ -1315,6 +1355,7 @@ export function AssistantEditor({
<BooleanFormField
small
removeIndent
alignTop
name="datetime_aware"
label="Date and Time Aware"
subtext='Toggle this option to let the assistant know the current date and time (formatted like: "Thursday Jan 1, 1970 00:01"). To inject it in a specific place in the prompt, use the pattern [[CURRENT_DATETIME]]'

View File

@@ -50,7 +50,7 @@ export const rerankingModels: RerankingModel[] = [
cloud: true,
displayName: "LiteLLM",
description: "Host your own reranker or router with LiteLLM proxy",
link: "https://docs.litellm.ai/docs/simple_proxy",
link: "https://docs.litellm.ai/docs/proxy",
},
{
rerank_provider_type: null,
@@ -82,7 +82,7 @@ export const rerankingModels: RerankingModel[] = [
modelName: "rerank-english-v3.0",
displayName: "Cohere English",
description: "High-performance English-focused reranking model.",
link: "https://docs.cohere.com/v2/reference/rerank",
link: "https://docs.cohere.com/docs/rerank",
},
{
cloud: true,
@@ -90,7 +90,7 @@ export const rerankingModels: RerankingModel[] = [
modelName: "rerank-multilingual-v3.0",
displayName: "Cohere Multilingual",
description: "Powerful multilingual reranking model.",
link: "https://docs.cohere.com/v2/reference/rerank",
link: "https://docs.cohere.com/docs/rerank",
},
];

View File

@@ -1,13 +1,14 @@
"use client";
import React, { useMemo, useState } from "react";
import { Persona } from "@/app/admin/assistants/interfaces";
import { useRouter } from "next/navigation";
import { Modal } from "@/components/Modal";
import AssistantCard from "./AssistantCard";
import { useAssistants } from "@/components/context/AssistantsContext";
import { useUser } from "@/components/user/UserProvider";
import { FilterIcon } from "lucide-react";
import { checkUserOwnsAssistant } from "@/lib/assistants/checkOwnership";
import { Dialog, DialogContent } from "@/components/ui/dialog";
export const AssistantBadgeSelector = ({
text,
@@ -20,12 +21,11 @@ export const AssistantBadgeSelector = ({
}) => {
return (
<div
className={`
select-none ${
selected
? "bg-neutral-900 text-white"
: "bg-transparent text-neutral-900"
} w-12 h-5 text-center px-1 py-0.5 rounded-lg cursor-pointer text-[12px] font-normal leading-[10px] border border-black justify-center items-center gap-1 inline-flex`}
className={`${
selected
? "bg-neutral-900 text-white"
: "bg-transparent text-neutral-900"
} w-12 h-5 text-center px-1 py-0.5 rounded-lg cursor-pointer text-[12px] font-normal leading-[10px] border border-black justify-center items-center gap-1 inline-flex`}
onClick={toggleFilter}
>
{text}
@@ -60,15 +60,11 @@ const useAssistantFilter = () => {
return { assistantFilters, toggleAssistantFilter, setAssistantFilters };
};
interface AssistantModalProps {
hideModal: () => void;
modalHeight?: string;
}
export function AssistantModal({
export default function AssistantModal({
hideModal,
modalHeight,
}: AssistantModalProps) {
}: {
hideModal: () => void;
}) {
const { assistants, pinnedAssistants } = useAssistants();
const { assistantFilters, toggleAssistantFilter } = useAssistantFilter();
const router = useRouter();
@@ -90,11 +86,11 @@ export function AssistantModal({
!assistantFilters[AssistantFilter.Private] || !assistant.is_public;
const pinnedFilter =
!assistantFilters[AssistantFilter.Pinned] ||
(pinnedAssistants.map((a) => a.id).includes(assistant.id) ?? false);
(user?.preferences?.pinned_assistants?.includes(assistant.id) ?? false);
const mineFilter =
!assistantFilters[AssistantFilter.Mine] ||
checkUserOwnsAssistant(user, assistant);
assistants.map((a: Persona) => checkUserOwnsAssistant(user, a));
return (
(nameMatches || labelMatches) &&
@@ -115,145 +111,142 @@ export function AssistantModal({
(assistant) => !assistant.builtin_persona && !assistant.is_default_persona
);
const maxHeight = 900;
const calculatedHeight = Math.min(
Math.ceil(assistants.length / 2) * 170 + 200,
window.innerHeight * 0.8
);
const height = Math.min(calculatedHeight, maxHeight);
return (
<Dialog open={true} onOpenChange={(open) => !open && hideModal()}>
<DialogContent
className="p-0 overflow-hidden max-h-[80vh] max-w-none w-[95%] bg-background rounded-sm shadow-2xl transform transition-all duration-300 ease-in-out relative w-11/12 max-w-4xl pt-10 pb-10 px-10 overflow-hidden flex flex-col max-w-4xl"
style={{
position: "fixed",
top: "10vh",
left: "50%",
transform: "translateX(-50%)",
margin: 0,
}}
>
<div className="flex overflow-hidden flex-col h-full">
<div className="flex flex-col sticky top-0 z-10">
<div className="flex px-2 justify-between items-center gap-x-2 mb-0">
<div className="h-12 w-full rounded-lg flex-col justify-center items-start gap-2.5 inline-flex">
<div className="h-12 rounded-md w-full shadow-[0px_0px_2px_0px_rgba(0,0,0,0.25)] border border-[#dcdad4] flex items-center px-3">
{!isSearchFocused && (
<svg
xmlns="http://www.w3.org/2000/svg"
className="h-5 w-5 text-gray-400"
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth={2}
d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z"
/>
</svg>
)}
<input
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
onFocus={() => setIsSearchFocused(true)}
onBlur={() => setIsSearchFocused(false)}
type="text"
className="w-full h-full bg-transparent outline-none text-black"
<Modal
heightOverride={`${height}px`}
onOutsideClick={hideModal}
removeBottomPadding
className={`max-w-4xl max-h-[90vh] ${height} w-[95%] overflow-hidden`}
>
<div className="flex flex-col h-full">
<div className="flex bg-background flex-col sticky top-0 z-10">
<div className="flex px-2 justify-between items-center gap-x-2 mb-0">
<div className="h-12 w-full rounded-lg flex-col justify-center items-start gap-2.5 inline-flex">
<div className="h-12 rounded-md w-full shadow-[0px_0px_2px_0px_rgba(0,0,0,0.25)] border border-[#dcdad4] flex items-center px-3">
{!isSearchFocused && (
<svg
xmlns="http://www.w3.org/2000/svg"
className="h-5 w-5 text-gray-400"
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
>
<path
strokeLinecap="round"
strokeLinejoin="round"
strokeWidth={2}
d="M21 21l-6-6m2-5a7 7 0 11-14 0 7 7 0 0114 0z"
/>
</svg>
)}
<input
value={searchQuery}
onChange={(e) => setSearchQuery(e.target.value)}
onFocus={() => setIsSearchFocused(true)}
onBlur={() => setIsSearchFocused(false)}
type="text"
className="w-full h-full bg-transparent outline-none text-black"
/>
</div>
</div>
<button
onClick={() => router.push("/assistants/new")}
className="h-10 cursor-pointer px-6 py-3 bg-black rounded-md border border-black justify-center items-center gap-2.5 inline-flex"
>
<div className="text-[#fffcf4] text-lg font-normal leading-normal">
Create
</div>
</button>
</div>
<div className="px-2 flex py-4 items-center gap-x-2 flex-wrap">
<FilterIcon size={16} />
<AssistantBadgeSelector
text="Pinned"
selected={assistantFilters[AssistantFilter.Pinned]}
toggleFilter={() => toggleAssistantFilter(AssistantFilter.Pinned)}
/>
<AssistantBadgeSelector
text="Mine"
selected={assistantFilters[AssistantFilter.Mine]}
toggleFilter={() => toggleAssistantFilter(AssistantFilter.Mine)}
/>
<AssistantBadgeSelector
text="Private"
selected={assistantFilters[AssistantFilter.Private]}
toggleFilter={() =>
toggleAssistantFilter(AssistantFilter.Private)
}
/>
<AssistantBadgeSelector
text="Public"
selected={assistantFilters[AssistantFilter.Public]}
toggleFilter={() => toggleAssistantFilter(AssistantFilter.Public)}
/>
</div>
<div className="w-full border-t border-neutral-200" />
</div>
<div className="flex-grow overflow-y-auto">
<h2 className="text-2xl font-semibold text-gray-800 mb-2 px-4 py-2">
Featured Assistants
</h2>
<div className="w-full px-2 pb-2 grid grid-cols-1 md:grid-cols-2 gap-x-6 gap-y-6">
{featuredAssistants.length > 0 ? (
featuredAssistants.map((assistant, index) => (
<div key={index}>
<AssistantCard
pinned={pinnedAssistants
.map((a) => a.id)
.includes(assistant.id)}
persona={assistant}
closeModal={hideModal}
/>
</div>
))
) : (
<div className="col-span-2 text-center text-gray-500">
No featured assistants match filters
</div>
<button
onClick={() => router.push("/assistants/new")}
className="h-10 cursor-pointer px-6 py-3 bg-black rounded-md border border-black justify-center items-center gap-2.5 inline-flex"
>
<div className="text-[#fffcf4] text-lg font-normal leading-normal">
Create
</div>
</button>
</div>
<div className="px-2 flex py-4 items-center gap-x-2 flex-wrap">
<FilterIcon size={16} />
<AssistantBadgeSelector
text="Pinned"
selected={assistantFilters[AssistantFilter.Pinned]}
toggleFilter={() =>
toggleAssistantFilter(AssistantFilter.Pinned)
}
/>
<AssistantBadgeSelector
text="Mine"
selected={assistantFilters[AssistantFilter.Mine]}
toggleFilter={() => toggleAssistantFilter(AssistantFilter.Mine)}
/>
<AssistantBadgeSelector
text="Private"
selected={assistantFilters[AssistantFilter.Private]}
toggleFilter={() =>
toggleAssistantFilter(AssistantFilter.Private)
}
/>
<AssistantBadgeSelector
text="Public"
selected={assistantFilters[AssistantFilter.Public]}
toggleFilter={() =>
toggleAssistantFilter(AssistantFilter.Public)
}
/>
</div>
<div className="w-full border-t border-neutral-200" />
</div>
<div className="flex-grow overflow-y-auto">
<h2 className="text-2xl font-semibold text-gray-800 mb-2 px-4 py-2">
Featured Assistants
</h2>
<div className="w-full px-2 pb-2 grid grid-cols-1 md:grid-cols-2 gap-x-6 gap-y-6">
{featuredAssistants.length > 0 ? (
featuredAssistants.map((assistant, index) => (
<div key={index}>
<AssistantCard
pinned={pinnedAssistants
.map((a) => a.id)
.includes(assistant.id)}
persona={assistant}
closeModal={hideModal}
/>
</div>
))
) : (
<div className="col-span-2 text-center text-gray-500">
No featured assistants match filters
</div>
)}
</div>
{allAssistants && allAssistants.length > 0 && (
<>
<h2 className="text-2xl font-semibold text-gray-800 mt-4 mb-2 px-4 py-2">
All Assistants
</h2>
<div className="w-full mt-2 px-2 pb-2 grid grid-cols-1 md:grid-cols-2 gap-x-6 gap-y-6">
{allAssistants
.sort((a, b) => b.id - a.id)
.map((assistant, index) => (
<div key={index}>
<AssistantCard
pinned={
user?.preferences?.pinned_assistants?.includes(
assistant.id
) ?? false
}
persona={assistant}
closeModal={hideModal}
/>
</div>
))}
</div>
</>
)}
</div>
{allAssistants && allAssistants.length > 0 && (
<>
<h2 className="text-2xl font-semibold text-gray-800 mt-4 mb-2 px-4 py-2">
All Assistants
</h2>
<div className="w-full mt-2 px-2 pb-2 grid grid-cols-1 md:grid-cols-2 gap-x-6 gap-y-6">
{allAssistants
.sort((a, b) => b.id - a.id)
.map((assistant, index) => (
<div key={index}>
<AssistantCard
pinned={
user?.preferences?.pinned_assistants?.includes(
assistant.id
) ?? false
}
persona={assistant}
closeModal={hideModal}
/>
</div>
))}
</div>
</>
)}
</div>
</DialogContent>
</Dialog>
</div>
</Modal>
);
}
export default AssistantModal;

View File

@@ -694,7 +694,6 @@ export function ChatInputBar({
flexPriority="stiff"
name="Filters"
Icon={FiFilter}
toggle
tooltipContent="Filter your search"
/>
}

View File

@@ -22,7 +22,7 @@ export const MemoizedAnchor = memo(
const index = parseInt(match[1], 10) - 1;
const associatedDoc = docs?.[index];
if (!associatedDoc) {
return <a href={children as string}>{children}</a>;
return <>{children}</>;
}
let icon: React.ReactNode = null;
@@ -77,24 +77,9 @@ export const MemoizedLink = memo((props: any) => {
);
}
const handleMouseDown = () => {
let url = rest.href || rest.children?.toString();
if (url && !url.startsWith("http://") && !url.startsWith("https://")) {
// Try to construct a valid URL
const httpsUrl = `https://${url}`;
try {
new URL(httpsUrl);
url = httpsUrl;
} catch {
// If not a valid URL, don't modify original url
}
}
window.open(url, "_blank");
};
return (
<a
onMouseDown={handleMouseDown}
onMouseDown={() => rest.href && window.open(rest.href, "_blank")}
className="cursor-pointer text-link hover:text-link-hover"
>
{rest.children}

View File

@@ -375,11 +375,7 @@ export const AIMessage = ({
return (
<>
<div
style={{
position: "absolute",
left: "-9999px",
display: "none",
}}
style={{ position: "absolute", left: "-9999px" }}
dangerouslySetInnerHTML={{ __html: htmlContent }}
/>
<ReactMarkdown

View File

@@ -6,17 +6,8 @@ import React, {
useContext,
useState,
useCallback,
useLayoutEffect,
useRef,
} from "react";
import Link from "next/link";
import {
Tooltip,
TooltipTrigger,
TooltipContent,
TooltipProvider,
} from "@/components/ui/tooltip";
import { useRouter, useSearchParams } from "next/navigation";
import { ChatSession } from "../interfaces";
import { NEXT_PUBLIC_NEW_CHAT_DIRECTS_TO_SAME_PERSONA } from "@/lib/constants";
@@ -53,7 +44,6 @@ import {
import { useSortable } from "@dnd-kit/sortable";
import { CSS } from "@dnd-kit/utilities";
import { CircleX } from "lucide-react";
import { restrictToVerticalAxis } from "@dnd-kit/modifiers";
interface HistorySidebarProps {
page: pageType;
@@ -100,24 +90,6 @@ const SortableAssistant: React.FC<SortableAssistantProps> = ({
...(isDragging ? { zIndex: 1000, position: "relative" as const } : {}),
};
const nameRef = useRef<HTMLParagraphElement>(null);
const hiddenNameRef = useRef<HTMLSpanElement>(null);
const [isNameTruncated, setIsNameTruncated] = useState(false);
useLayoutEffect(() => {
const checkTruncation = () => {
if (nameRef.current && hiddenNameRef.current) {
const visibleWidth = nameRef.current.offsetWidth;
const fullTextWidth = hiddenNameRef.current.offsetWidth;
setIsNameTruncated(fullTextWidth > visibleWidth);
}
};
checkTruncation();
window.addEventListener("resize", checkTruncation);
return () => window.removeEventListener("resize", checkTruncation);
}, [assistant.name]);
return (
<div
ref={setNodeRef}
@@ -143,28 +115,10 @@ const SortableAssistant: React.FC<SortableAssistantProps> = ({
: ""
} relative flex items-center gap-x-2 py-1 px-2 rounded-md`}
>
<AssistantIcon assistant={assistant} size={20} className="flex-none" />
<TooltipProvider>
<Tooltip>
<TooltipTrigger asChild>
<p
ref={nameRef}
className="text-base text-left w-fit line-clamp-1 text-ellipsis text-black"
>
{assistant.name}
</p>
</TooltipTrigger>
{isNameTruncated && (
<TooltipContent>{assistant.name}</TooltipContent>
)}
</Tooltip>
</TooltipProvider>
<span
ref={hiddenNameRef}
className="absolute left-[-9999px] whitespace-nowrap"
>
<AssistantIcon assistant={assistant} size={16} className="flex-none" />
<p className="text-base text-left w-fit line-clamp-1 text-ellipsis text-black">
{assistant.name}
</span>
</p>
<button
onClick={(e) => {
e.stopPropagation();
@@ -341,7 +295,7 @@ export const HistorySidebar = forwardRef<HTMLDivElement, HistorySidebarProps>(
</div>
)}
<div className="h-full relative overflow-x-hidden overflow-y-auto">
<div className="h-full relative overflow-y-auto">
<div className="flex px-4 font-normal text-sm gap-x-2 leading-normal text-[#6c6c6c]/80 items-center font-normal leading-normal">
Assistants
</div>
@@ -349,7 +303,6 @@ export const HistorySidebar = forwardRef<HTMLDivElement, HistorySidebarProps>(
sensors={sensors}
collisionDetection={closestCenter}
onDragEnd={handleDragEnd}
modifiers={[restrictToVerticalAxis]}
>
<SortableContext
items={pinnedAssistants.map((a) =>

View File

@@ -13,7 +13,7 @@ import { FiPlus, FiTrash2, FiCheck, FiX } from "react-icons/fi";
import { NEXT_PUBLIC_DELETE_ALL_CHATS_ENABLED } from "@/lib/constants";
import { FolderDropdown } from "../folders/FolderDropdown";
import { ChatSessionDisplay } from "./ChatSessionDisplay";
import { useState, useCallback, useRef, useContext, useEffect } from "react";
import { useState, useCallback, useRef, useContext } from "react";
import { Caret } from "@/components/icons/icons";
import { groupSessionsByDateRange } from "../lib";
import React from "react";
@@ -36,7 +36,6 @@ import { useSortable } from "@dnd-kit/sortable";
import { CSS } from "@dnd-kit/utilities";
import { useChatContext } from "@/components/context/ChatContext";
import { SettingsContext } from "@/components/settings/SettingsProvider";
import { restrictToVerticalAxis } from "@dnd-kit/modifiers";
interface SortableFolderProps {
folder: Folder;
@@ -54,41 +53,34 @@ interface SortableFolderProps {
const SortableFolder: React.FC<SortableFolderProps> = (props) => {
const settings = useContext(SettingsContext);
const mobile = settings?.isMobile;
const [isDragging, setIsDragging] = useState(false);
const {
attributes,
listeners,
setNodeRef,
transform,
transition,
isDragging: isDraggingDndKit,
} = useSortable({
id: props.folder.folder_id?.toString() ?? "",
disabled: mobile,
});
const { attributes, listeners, setNodeRef, transform, transition } =
useSortable({
id: props.folder.folder_id?.toString() ?? "",
data: {
activationConstraint: {
distance: 8,
},
},
disabled: mobile,
});
const ref = useRef<HTMLDivElement>(null);
const style: React.CSSProperties = {
const style = {
transform: CSS.Transform.toString(transform),
transition,
zIndex: isDragging ? 1000 : "auto",
position: isDragging ? "relative" : "static",
opacity: isDragging ? 0.6 : 1,
};
useEffect(() => {
setIsDragging(isDraggingDndKit);
}, [isDraggingDndKit]);
return (
<div
ref={setNodeRef}
className="pr-3 ml-4 overflow-visible flex items-start"
style={style}
{...attributes}
{...listeners}
>
<FolderDropdown ref={ref} {...props} />
<FolderDropdown
ref={ref}
{...props}
{...(mobile ? {} : attributes)}
{...(mobile ? {} : listeners)}
/>
</div>
);
};
@@ -367,7 +359,6 @@ export function PagesTab({
{folders && folders.length > 0 && (
<DndContext
modifiers={[restrictToVerticalAxis]}
sensors={sensors}
collisionDetection={closestCenter}
onDragEnd={handleDragEnd}

View File

@@ -287,53 +287,11 @@
overflow-x: hidden;
}
.scrollbar {
width: 100%;
height: 100%;
}
/* Styling for textarea scrollbar */
textarea::-webkit-scrollbar {
width: 8px;
}
textarea::-webkit-scrollbar-track {
background: var(--scrollbar-track);
border-radius: 4px;
}
textarea::-webkit-scrollbar-thumb {
background: var(--scrollbar-thumb);
border-radius: 4px;
}
textarea::-webkit-scrollbar-thumb:hover {
background: var(--scrollbar-thumb-hover);
}
/* Styling for textarea resize handle */
textarea {
resize: vertical;
}
/* For Firefox */
textarea {
scrollbar-width: thin;
scrollbar-color: var(--scrollbar-thumb) var(--scrollbar-track);
}
.inputscroll::-webkit-scrollbar-track {
background: #e5e7eb;
scrollbar-width: none;
}
::-webkit-scrollbar {
width: 0px;
/* Vertical scrollbar width */
height: 8px;
/* Horizontal scrollbar height */
}
::-webkit-scrollbar-track {
background: transparent;
/* background: theme("colors.scrollbar.track"); */

View File

@@ -23,7 +23,6 @@ import PostHogPageView from "./PostHogPageView";
import Script from "next/script";
import { LogoType } from "@/components/logo/Logo";
import { Hanken_Grotesk } from "next/font/google";
import { WebVitals } from "./web-vitals";
const inter = Inter({
subsets: ["latin"],
@@ -205,9 +204,8 @@ export default async function RootLayout({
>
<Suspense fallback={null}>
<PostHogPageView />
{process.env.NEXT_PUBLIC_POSTHOG_KEY && <WebVitals />}
{children}
</Suspense>
{children}
</AppProvider>
);
}

View File

@@ -1,12 +0,0 @@
"use client";
import { useReportWebVitals } from "next/web-vitals";
import { usePostHog } from "posthog-js/react";
export function WebVitals() {
const posthog = usePostHog();
useReportWebVitals((metric) => {
posthog.capture(metric.name, metric);
});
return <></>;
}

View File

@@ -63,7 +63,7 @@ export function Modal({
<div
onMouseDown={handleMouseDown}
className={cn(
`fixed inset-0 bg-black border boder-border bg-opacity-10 backdrop-blur-sm h-full
`fixed inset-0 bg-black bg-opacity-25 backdrop-blur-sm h-full
flex items-center justify-center z-[9999] transition-opacity duration-300 ease-in-out`
)}
>

View File

@@ -25,13 +25,11 @@ import {
} from "@/components/ui/tooltip";
import ReactMarkdown from "react-markdown";
import { FaMarkdown } from "react-icons/fa";
import { useRef, useState, useCallback } from "react";
import { useRef, useState } from "react";
import remarkGfm from "remark-gfm";
import { EditIcon } from "@/components/icons/icons";
import { Button } from "@/components/ui/button";
import Link from "next/link";
import { CheckboxField } from "@/components/ui/checkbox";
import { CheckedState } from "@radix-ui/react-checkbox";
export function SectionHeader({
children,
@@ -53,7 +51,7 @@ export function Label({
return (
<div
className={`block font-medium base ${className} ${
small ? "text-xs" : "text-sm"
small ? "text-sm" : "text-base"
}`}
>
{children}
@@ -77,7 +75,7 @@ export function LabelWithTooltip({
}
export function SubLabel({ children }: { children: string | JSX.Element }) {
return <div className="text-xs text-subtle">{children}</div>;
return <div className="text-sm text-subtle mb-2">{children}</div>;
}
export function ManualErrorMessage({ children }: { children: string }) {
@@ -441,62 +439,53 @@ interface BooleanFormFieldProps {
name: string;
label: string;
subtext?: string | JSX.Element;
onChange?: (e: React.ChangeEvent<HTMLInputElement>) => void;
removeIndent?: boolean;
small?: boolean;
alignTop?: boolean;
noLabel?: boolean;
disabled?: boolean;
checked?: boolean;
optional?: boolean;
tooltip?: string;
disabledTooltip?: string;
}
export const BooleanFormField = ({
name,
label,
subtext,
onChange,
removeIndent,
noLabel,
optional,
small,
disabled,
alignTop,
checked,
tooltip,
disabledTooltip,
}: BooleanFormFieldProps) => {
const { setFieldValue } = useFormikContext<any>();
const [field, meta, helpers] = useField<boolean>(name);
const { setValue } = helpers;
const handleChange = useCallback(
(checked: CheckedState) => {
if (!disabled) {
setFieldValue(name, checked);
}
},
[disabled, name, setFieldValue]
);
const handleChange = (e: React.ChangeEvent<HTMLInputElement>) => {
setValue(e.target.checked);
if (onChange) {
onChange(e);
}
};
return (
<div>
<label className="flex items-center text-sm cursor-pointer">
<TooltipProvider>
<Tooltip>
<TooltipTrigger>
<CheckboxField
name={name}
size="sm"
className={`
${disabled ? "opacity-50" : ""}
${removeIndent ? "mr-2" : "mx-3"}`}
onCheckedChange={handleChange}
/>
</TooltipTrigger>
{disabled && disabledTooltip && (
<TooltipContent side="top" align="center">
<p className="bg-background-900 max-w-[200px] mb-1 text-sm rounded-lg p-1.5 text-white">
{disabledTooltip}
</p>
</TooltipContent>
)}
</Tooltip>
</TooltipProvider>
<label className="flex text-sm">
<Field
type="checkbox"
{...field}
checked={checked !== undefined ? checked : field.value}
disabled={disabled}
onChange={handleChange}
className={`${removeIndent ? "mr-2" : "mx-3"}
px-5 w-3.5 h-3.5 ${alignTop ? "mt-1" : "my-auto"}`}
/>
{!noLabel && (
<div>
<div className="flex items-center gap-x-2">

View File

@@ -139,7 +139,7 @@ export function AssistantIcon({
alt={assistant.name}
src={buildImgUrl(assistant.uploaded_image_id)}
loading="lazy"
className={`h-[${dimension}px] w-[${dimension}px] rounded-full object-cover object-center transition-opacity duration-300 ${wrapperClass}`}
className={`h-[${dimension}px] w-[${dimension}px] object-cover object-center rounded-sm transition-opacity duration-300 ${wrapperClass}`}
style={style}
/>
) : (

View File

@@ -36,7 +36,6 @@ export default function TextView({
"text/plain",
"text/x-rst",
"text/x-org",
"txt",
];
return markdownFormats.some((format) => mimeType.startsWith(format));
};
@@ -118,10 +117,7 @@ export default function TextView({
return (
<Dialog open onOpenChange={onClose}>
<DialogContent
hideCloseIcon
className="max-w-5xl w-[90vw] flex flex-col justify-between gap-y-0 h-full max-h-[80vh] p-0"
>
<DialogContent className="max-w-5xl w-[90vw] flex flex-col justify-between gap-y-0 h-full max-h-[80vh] p-0">
<DialogHeader className="px-4 mb-0 pt-2 pb-3 flex flex-row items-center justify-between border-b">
<DialogTitle className="text-lg font-medium truncate">
{fileName}

View File

@@ -21,7 +21,7 @@ const DialogOverlay = React.forwardRef<
<DialogPrimitive.Overlay
ref={ref}
className={cn(
"fixed inset-0 z-50 bg-black/50 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0",
"fixed inset-0 z-50 bg-black/80 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0",
className
)}
{...props}