Compare commits

...

8 Commits

Author SHA1 Message Date
SubashMohan
5c4f44d258 fix: sharepoint lg files issue (#5065)
* add SharePoint file size threshold check

* Implement retry logic for SharePoint queries to handle rate limiting and server error

* mypy fix

* add content none check

* remove unreachable code from retry logic in sharepoint connector
2025-07-24 14:26:01 +00:00
Evan Lohn
19652ad60e attempt fix for broken excel files (#5071) 2025-07-24 01:21:13 +00:00
Evan Lohn
70c96b6ab3 fix: remove locks from indexing callback (#5070) 2025-07-23 23:05:35 +00:00
Raunak Bhagat
65076b916f refactor: Update location of sidebar (#5067)
* Use props instead of inline type def

* Add new AppProvider

* Remove unused component file

* Move `sessionSidebar` to be inside of `components` instead of `app/chat`

* Change name of `sessionSidebar` to `sidebar`

* Remove `AppModeProvider`

* Fix bug in how the cookies were set
2025-07-23 21:59:34 +00:00
PaulHLiatrio
06bc0e51db fix: adjust template variable from .Chart.AppVersion to .Values.global.version to match versioning pattern. (#5069) 2025-07-23 14:54:32 -07:00
Devin
508b456b40 fix: explicit api_server dependency on minio in docker compose files (#5066) 2025-07-23 13:37:42 -07:00
Evan Lohn
bf1e2a2661 feat: avoid full rerun (#5063)
* fix: remove extra group sync

* second extra task

* minor improvement for non-checkpointed connectors
2025-07-23 18:01:23 +00:00
Evan Lohn
991d5e4203 fix: regen api key (#5064) 2025-07-23 03:36:51 +00:00
31 changed files with 146 additions and 158 deletions

View File

@@ -9,7 +9,6 @@ import sentry_sdk
from celery import Celery
from celery import shared_task
from celery import Task
from redis.lock import Lock as RedisLock
from onyx.background.celery.apps.app_base import task_logger
from onyx.background.celery.memory_monitoring import emit_process_memory
@@ -24,7 +23,6 @@ from onyx.background.indexing.job_client import SimpleJob
from onyx.background.indexing.job_client import SimpleJobClient
from onyx.background.indexing.job_client import SimpleJobException
from onyx.background.indexing.run_docfetching import run_indexing_entrypoint
from onyx.configs.constants import CELERY_INDEXING_LOCK_TIMEOUT
from onyx.configs.constants import CELERY_INDEXING_WATCHDOG_CONNECTOR_TIMEOUT
from onyx.configs.constants import OnyxCeleryTask
from onyx.connectors.exceptions import ConnectorValidationError
@@ -37,7 +35,6 @@ from onyx.db.index_attempt import mark_attempt_failed
from onyx.db.indexing_coordination import IndexingCoordination
from onyx.redis.redis_connector import RedisConnector
from onyx.redis.redis_connector_index import RedisConnectorIndex
from onyx.redis.redis_pool import get_redis_client
from onyx.utils.logger import setup_logger
from onyx.utils.variable_functionality import global_version
from shared_configs.configs import SENTRY_DSN
@@ -159,7 +156,7 @@ def _docfetching_task(
)
redis_connector = RedisConnector(tenant_id, cc_pair_id)
redis_connector_index = redis_connector.new_index(search_settings_id)
redis_connector.new_index(search_settings_id)
# TODO: remove all fences, cause all signals to be set in postgres
if redis_connector.delete.fenced:
@@ -184,34 +181,6 @@ def _docfetching_task(
# This replaces the Redis fence payload waiting
_verify_indexing_attempt(index_attempt_id, cc_pair_id, search_settings_id)
# We still need a basic Redis lock to prevent duplicate task execution
# but this is much simpler than the full fencing mechanism
r = get_redis_client()
# set thread_local=False since we don't control what thread the indexing/pruning
# might run our callback with
lock: RedisLock = r.lock(
redis_connector_index.generator_lock_key,
timeout=CELERY_INDEXING_LOCK_TIMEOUT,
thread_local=False,
)
acquired = lock.acquire(blocking=False)
if not acquired:
logger.warning(
f"Docfetching task already running, exiting...: "
f"index_attempt={index_attempt_id} "
f"cc_pair={cc_pair_id} "
f"search_settings={search_settings_id}"
)
raise SimpleJobException(
f"Docfetching task already running, exiting...: "
f"index_attempt={index_attempt_id} "
f"cc_pair={cc_pair_id} "
f"search_settings={search_settings_id}",
code=IndexingWatchdogTerminalStatus.TASK_ALREADY_RUNNING.code,
)
try:
with get_session_with_current_tenant() as db_session:
attempt = get_index_attempt(db_session, index_attempt_id)
@@ -234,10 +203,7 @@ def _docfetching_task(
# define a callback class
callback = IndexingCallback(
os.getppid(),
redis_connector,
lock,
r,
)
logger.info(
@@ -284,10 +250,6 @@ def _docfetching_task(
except Exception:
raise e
finally:
if lock.owned():
lock.release()
logger.info(
f"Indexing spawned task finished: attempt={index_attempt_id} "
f"cc_pair={cc_pair_id} "

View File

@@ -1,4 +1,3 @@
import os
import time
import traceback
from collections import defaultdict
@@ -7,7 +6,6 @@ from datetime import timedelta
from datetime import timezone
from http import HTTPStatus
from typing import Any
from typing import cast
from celery import shared_task
from celery import Task
@@ -29,12 +27,10 @@ from onyx.background.celery.tasks.docprocessing.utils import (
try_creating_docfetching_task,
)
from onyx.background.celery.tasks.models import DocProcessingContext
from onyx.background.celery.tasks.models import IndexingWatchdogTerminalStatus
from onyx.background.indexing.checkpointing_utils import cleanup_checkpoint
from onyx.background.indexing.checkpointing_utils import (
get_index_attempts_with_old_checkpoints,
)
from onyx.background.indexing.job_client import SimpleJobException
from onyx.configs.app_configs import MANAGED_VESPA
from onyx.configs.app_configs import VESPA_CLOUD_CERT_PATH
from onyx.configs.app_configs import VESPA_CLOUD_KEY_PATH
@@ -1098,41 +1094,9 @@ def _docprocessing_task(
timeout=CELERY_INDEXING_LOCK_TIMEOUT,
thread_local=False,
)
# set thread_local=False since we don't control what thread the indexing/pruning
# might run our callback with
per_batch_lock = cast(
RedisLock,
r.lock(
redis_connector_index.lock_key_by_batch(batch_num),
timeout=CELERY_INDEXING_LOCK_TIMEOUT,
thread_local=False,
),
)
acquired = per_batch_lock.acquire(blocking=False)
if not acquired:
logger.warning(
f"Indexing batch task already running, exiting...: "
f"index_attempt={index_attempt_id} "
f"cc_pair={cc_pair_id} "
f"search_settings={index_attempt.search_settings.id} "
f"batch_num={batch_num}"
)
raise SimpleJobException(
f"Indexing batch task already running, exiting...: "
f"index_attempt={index_attempt_id} "
f"cc_pair={cc_pair_id} "
f"search_settings={index_attempt.search_settings.id} "
f"batch_num={batch_num}",
code=IndexingWatchdogTerminalStatus.TASK_ALREADY_RUNNING.code,
)
callback = IndexingCallback(
os.getppid(),
redis_connector,
per_batch_lock,
r,
)
# TODO: right now this is the only thing the callback is used for,
# probably there is a simpler way to handle pausing
@@ -1170,8 +1134,6 @@ def _docprocessing_task(
f"Processing {len(documents)} documents through indexing pipeline"
)
per_batch_lock.reacquire()
# real work happens here!
index_pipeline_result = run_indexing_pipeline(
embedder=embedding_model,
@@ -1183,7 +1145,6 @@ def _docprocessing_task(
document_batch=documents,
index_attempt_metadata=index_attempt_metadata,
)
per_batch_lock.reacquire()
# Update batch completion and document counts atomically using database coordination

View File

@@ -110,18 +110,23 @@ class IndexingCallbackBase(IndexingHeartbeatInterface):
raise
class IndexingCallback(IndexingCallbackBase):
# NOTE: we're in the process of removing all fences from indexing; this will
# eventually no longer be used. For now, it is used only for connector pausing.
class IndexingCallback(IndexingHeartbeatInterface):
def __init__(
self,
parent_pid: int,
redis_connector: RedisConnector,
redis_lock: RedisLock,
redis_client: Redis,
):
super().__init__(parent_pid, redis_connector, redis_lock, redis_client)
self.redis_connector = redis_connector
def should_stop(self) -> bool:
# Check if the associated indexing attempt has been cancelled
# TODO: Pass index_attempt_id to the callback and check cancellation using the db
return bool(self.redis_connector.stop.fenced)
# included to satisfy old interface
def progress(self, tag: str, amount: int) -> None:
super().progress(tag, amount)
pass
# NOTE: The validate_indexing_fence and validate_indexing_fences functions have been removed

View File

@@ -1060,9 +1060,14 @@ def connector_document_extraction(
connector=connector_runner.connector,
)
# checkpoint resumption OR the connector already finished.
if (
isinstance(connector_runner.connector, CheckpointedConnector)
and resuming_from_checkpoint
) or (
most_recent_attempt
and most_recent_attempt.total_batches is not None
and not checkpoint.has_more
):
reissued_batch_count, completed_batches = reissue_old_batches(
batch_storage,
@@ -1108,15 +1113,8 @@ def connector_document_extraction(
# index being built. We want to populate it even for paused connectors
# Often paused connectors are sources that aren't updated frequently but the
# contents still need to be initially pulled.
if callback:
if callback.should_stop():
raise ConnectorStopSignal("Connector stop signal detected")
# NOTE: this progress callback runs on every loop. We've seen cases
# where we loop many times with no new documents and eventually time
# out, so only doing the callback after indexing isn't sufficient.
# TODO: change to doc extraction if it doesnt break things
callback.progress("_run_indexing", 0)
if callback and callback.should_stop():
raise ConnectorStopSignal("Connector stop signal detected")
# will exception if the connector/index attempt is marked as paused/failed
with get_session_with_current_tenant() as db_session_tmp:

View File

@@ -468,6 +468,11 @@ GOOGLE_DRIVE_CONNECTOR_SIZE_THRESHOLD = int(
os.environ.get("GOOGLE_DRIVE_CONNECTOR_SIZE_THRESHOLD", 10 * 1024 * 1024)
)
# Default size threshold for SharePoint files (20MB)
SHAREPOINT_CONNECTOR_SIZE_THRESHOLD = int(
os.environ.get("SHAREPOINT_CONNECTOR_SIZE_THRESHOLD", 20 * 1024 * 1024)
)
JIRA_CONNECTOR_LABELS_TO_SKIP = [
ignored_tag
for ignored_tag in os.environ.get("JIRA_CONNECTOR_LABELS_TO_SKIP", "").split(",")

View File

@@ -1,5 +1,6 @@
import io
import os
import time
from collections.abc import Generator
from datetime import datetime
from datetime import timezone
@@ -11,9 +12,11 @@ from office365.graph_client import GraphClient # type: ignore
from office365.onedrive.driveitems.driveItem import DriveItem # type: ignore
from office365.onedrive.sites.site import Site # type: ignore
from office365.onedrive.sites.sites_with_root import SitesWithRoot # type: ignore
from office365.runtime.client_request import ClientRequestException # type: ignore
from pydantic import BaseModel
from onyx.configs.app_configs import INDEX_BATCH_SIZE
from onyx.configs.app_configs import SHAREPOINT_CONNECTOR_SIZE_THRESHOLD
from onyx.configs.constants import DocumentSource
from onyx.connectors.interfaces import GenerateDocumentsOutput
from onyx.connectors.interfaces import LoadConnector
@@ -46,12 +49,72 @@ class SiteDescriptor(BaseModel):
folder_path: str | None
def _sleep_and_retry(query_obj: Any, method_name: str, max_retries: int = 3) -> Any:
"""
Execute a SharePoint query with retry logic for rate limiting.
"""
for attempt in range(max_retries + 1):
try:
return query_obj.execute_query()
except ClientRequestException as e:
if (
e.response
and e.response.status_code in [429, 503]
and attempt < max_retries
):
logger.warning(
f"Rate limit exceeded on {method_name}, attempt {attempt + 1}/{max_retries + 1}, sleeping and retrying"
)
retry_after = e.response.headers.get("Retry-After")
if retry_after:
sleep_time = int(retry_after)
else:
# Exponential backoff: 2^attempt * 5 seconds
sleep_time = min(30, (2**attempt) * 5)
logger.info(f"Sleeping for {sleep_time} seconds before retry")
time.sleep(sleep_time)
else:
# Either not a rate limit error, or we've exhausted retries
if e.response and e.response.status_code == 429:
logger.error(
f"Rate limit retry exhausted for {method_name} after {max_retries} attempts"
)
raise e
def _convert_driveitem_to_document(
driveitem: DriveItem,
drive_name: str,
) -> Document:
) -> Document | None:
# Check file size before downloading
try:
size_value = getattr(driveitem, "size", None)
if size_value is not None:
file_size = int(size_value)
if file_size > SHAREPOINT_CONNECTOR_SIZE_THRESHOLD:
logger.warning(
f"File '{driveitem.name}' exceeds size threshold of {SHAREPOINT_CONNECTOR_SIZE_THRESHOLD} bytes. "
f"File size: {file_size} bytes. Skipping."
)
return None
else:
logger.warning(
f"Could not access file size for '{driveitem.name}' Proceeding with download."
)
except (ValueError, TypeError, AttributeError) as e:
logger.info(
f"Could not access file size for '{driveitem.name}': {e}. Proceeding with download."
)
# Proceed with download if size is acceptable or not available
content = _sleep_and_retry(driveitem.get_content(), "get_content")
if content is None:
logger.warning(f"Could not access content for '{driveitem.name}'")
return None
file_text = extract_file_text(
file=io.BytesIO(driveitem.get_content().execute_query().value),
file=io.BytesIO(content.value),
file_name=driveitem.name,
break_on_unprocessable=False,
)
@@ -275,7 +338,11 @@ class SharepointConnector(LoadConnector, PollConnector):
driveitems = self._fetch_driveitems(site_descriptor, start=start, end=end)
for driveitem, drive_name in driveitems:
logger.debug(f"Processing: {driveitem.web_url}")
doc_batch.append(_convert_driveitem_to_document(driveitem, drive_name))
# Convert driveitem to document with size checking
doc = _convert_driveitem_to_document(driveitem, drive_name)
if doc is not None:
doc_batch.append(doc)
if len(doc_batch) >= self.batch_size:
yield doc_batch

View File

@@ -148,7 +148,10 @@ def regenerate_api_key(db_session: Session, api_key_id: int) -> ApiKeyDescriptor
if api_key_user is None:
raise RuntimeError("API Key does not have associated user.")
new_api_key = generate_api_key()
# Get tenant_id from context var (will be default schema for single tenant)
tenant_id = get_current_tenant_id()
new_api_key = generate_api_key(tenant_id)
existing_api_key.hashed_api_key = hash_api_key(new_api_key)
existing_api_key.api_key_display = build_displayable_api_key(new_api_key)
db_session.commit()

View File

@@ -83,6 +83,11 @@ IMAGE_MEDIA_TYPES = [
"image/webp",
]
KNOWN_OPENPYXL_BUGS = [
"Value must be either numerical or a string containing a wildcard",
"File contains no valid workbook part",
]
class OnyxExtensionType(IntFlag):
Plain = auto()
@@ -374,7 +379,7 @@ def xlsx_to_text(file: IO[Any], file_name: str = "") -> str:
logger.warning(error_str)
return ""
except Exception as e:
if "File contains no valid workbook part" in str(e):
if any(s in str(e) for s in KNOWN_OPENPYXL_BUGS):
logger.error(
f"Failed to extract text from {file_name or 'xlsx file'}. This happens due to a bug in openpyxl. {e}"
)

View File

@@ -258,8 +258,6 @@ class EmbeddingModel:
try:
result = future.result()
batch_results.append(result)
if self.callback:
self.callback.progress("_batch_encode_texts", 1)
except Exception as e:
logger.exception("Embedding model failed to process batch")
raise e
@@ -279,8 +277,6 @@ class EmbeddingModel:
request_id=request_id,
)
embeddings.extend(batch_embeddings)
if self.callback:
self.callback.progress("_batch_encode_texts", 1)
return embeddings

View File

@@ -79,9 +79,6 @@ class RedisConnectorIndex:
f"{self.TERMINATE_PREFIX}_{cc_pair_id}/{search_settings_id}"
)
def lock_key_by_batch(self, batch_n: int) -> str:
return f"{self.per_worker_lock_key}/{batch_n}"
def set_generator_complete(self, payload: int | None) -> None:
if not payload:
self.redis.delete(self.generator_complete_key)

View File

@@ -49,7 +49,7 @@ nltk==3.9.1
Office365-REST-Python-Client==2.5.9
oauthlib==3.2.2
openai==1.75.0
openpyxl==3.1.2
openpyxl==3.0.10
passlib==1.7.4
playwright==1.41.2
psutil==5.9.5

View File

@@ -13,6 +13,7 @@ services:
- index
- cache
- inference_model_server
- minio
restart: unless-stopped
ports:
- "8080:8080"

View File

@@ -13,6 +13,7 @@ services:
- index
- cache
- inference_model_server
- minio
restart: unless-stopped
ports:
- "8080:8080"

View File

@@ -14,6 +14,7 @@ services:
- index
- cache
- inference_model_server
- minio
restart: unless-stopped
ports:
- "8080:8080"

View File

@@ -13,6 +13,7 @@ services:
- index
- cache
- inference_model_server
- minio
restart: unless-stopped
env_file:
- .env

View File

@@ -13,6 +13,7 @@ services:
- index
- cache
- inference_model_server
- minio
restart: unless-stopped
env_file:
- .env

View File

@@ -13,6 +13,7 @@ services:
- relational_db
- index
- cache
- minio
- inference_model_server
restart: unless-stopped
env_file:

View File

@@ -12,6 +12,7 @@ services:
- relational_db
- index
- cache
- minio
restart: unless-stopped
ports:
- "8080"

View File

@@ -37,7 +37,7 @@ spec:
- name: celery-worker-docfetching
securityContext:
{{- toYaml .Values.celery_worker_docfetching.securityContext | nindent 12 }}
image: "{{ .Values.celery_shared.image.repository }}:{{ .Values.celery_shared.image.tag | default .Chart.AppVersion }}"
image: "{{ .Values.celery_shared.image.repository }}:{{ .Values.celery_shared.image.tag | default .Values.global.version }}"
imagePullPolicy: {{ .Values.global.pullPolicy }}
command:
[

View File

@@ -9,7 +9,7 @@ import { useRouter } from "next/navigation";
import FixedLogo from "../../components/logo/FixedLogo";
import { SettingsContext } from "@/components/settings/SettingsProvider";
import { useChatContext } from "@/components/context/ChatContext";
import { HistorySidebar } from "../chat/sessionSidebar/HistorySidebar";
import { HistorySidebar } from "@/components/sidebar/HistorySidebar";
import { useAssistants } from "@/components/context/AssistantsContext";
import AssistantModal from "./mine/AssistantModal";
import { useSidebarShortcut } from "@/lib/browserUtilities";
@@ -35,11 +35,9 @@ export default function SidebarWrapper<T extends object>({
const toggleSidebar = useCallback(() => {
Cookies.set(
SIDEBAR_TOGGLED_COOKIE_NAME,
String(!sidebarVisible).toLocaleLowerCase()
),
{
path: "/",
};
String(!sidebarVisible).toLocaleLowerCase(),
{ path: "/" }
);
setSidebarVisible((sidebarVisible) => !sidebarVisible);
}, [sidebarVisible]);

View File

@@ -28,7 +28,7 @@ import {
import Prism from "prismjs";
import Cookies from "js-cookie";
import { HistorySidebar } from "./sessionSidebar/HistorySidebar";
import { HistorySidebar } from "@/components/sidebar/HistorySidebar";
import { MinimalPersonaSnapshot } from "../admin/assistants/interfaces";
import { HealthCheckBanner } from "@/components/health/healthcheck";
import {
@@ -150,6 +150,15 @@ export enum UploadIntent {
ADD_TO_DOCUMENTS, // For files uploaded via FilePickerModal or similar (just add to repo)
}
type ChatPageProps = {
toggle: (toggled?: boolean) => void;
documentSidebarInitialWidth?: number;
sidebarVisible: boolean;
firstMessage?: string;
initialFolders?: any;
initialFiles?: any;
};
// ---
// File Attachment Behavior in ChatPage
//
@@ -171,14 +180,7 @@ export function ChatPage({
firstMessage,
initialFolders,
initialFiles,
}: {
toggle: (toggled?: boolean) => void;
documentSidebarInitialWidth?: number;
sidebarVisible: boolean;
firstMessage?: string;
initialFolders?: any;
initialFiles?: any;
}) {
}: ChatPageProps) {
const router = useRouter();
const searchParams = useSearchParams();

View File

@@ -2,7 +2,7 @@
import React, { useState, useEffect, useRef } from "react";
import { Folder } from "./interfaces";
import { ChatSessionDisplay } from "../sessionSidebar/ChatSessionDisplay"; // Ensure this is correctly imported
import { ChatSessionDisplay } from "@/components/sidebar/ChatSessionDisplay"; // Ensure this is correctly imported
import {
FiChevronDown,
FiChevronRight,

View File

@@ -1,20 +0,0 @@
import { useRouter } from "next/router";
import { ChatSession } from "../interfaces";
export const ChatGroup = ({
groupName,
toggled,
chatSessions,
}: {
groupName: string;
toggled: boolean;
chatSessions: ChatSession[];
}) => {
const router = useRouter();
return toggled ? (
<div>
<p>{groupName}</p>
</div>
) : null;
};

View File

@@ -7,7 +7,9 @@ const isPostHogEnabled = !!(
process.env.NEXT_PUBLIC_POSTHOG_KEY && process.env.NEXT_PUBLIC_POSTHOG_HOST
);
export function PHProvider({ children }: { children: React.ReactNode }) {
type PHProviderProps = { children: React.ReactNode };
export function PHProvider({ children }: PHProviderProps) {
useEffect(() => {
if (isPostHogEnabled) {
posthog.init(process.env.NEXT_PUBLIC_POSTHOG_KEY!, {

View File

@@ -10,7 +10,7 @@ import { Popover } from "./popover/Popover";
import { LOGOUT_DISABLED } from "@/lib/constants";
import { SettingsContext } from "./settings/SettingsProvider";
import { BellIcon, LightSettingsIcon, UserIcon } from "./icons/icons";
import { pageType } from "@/app/chat/sessionSidebar/types";
import { pageType } from "@/components/sidebar/types";
import { NavigationItem, Notification } from "@/app/admin/settings/interfaces";
import DynamicFaIcon, { preloadIcons } from "./icons/DynamicFaIcon";
import { useUser } from "./user/UserProvider";

View File

@@ -4,7 +4,7 @@ import { FiShare2 } from "react-icons/fi";
import { SetStateAction, useContext, useEffect } from "react";
import { ChatSession } from "@/app/chat/interfaces";
import Link from "next/link";
import { pageType } from "@/app/chat/sessionSidebar/types";
import { pageType } from "@/components/sidebar/types";
import { useRouter } from "next/navigation";
import { ChatBanner } from "@/app/chat/ChatBanner";
import LogoWithText from "../header/LogoWithText";

View File

@@ -9,7 +9,7 @@ import {
TooltipProvider,
TooltipTrigger,
} from "@/components/ui/tooltip";
import { pageType } from "@/app/chat/sessionSidebar/types";
import { pageType } from "@/components/sidebar/types";
import { Logo } from "../logo/Logo";
import Link from "next/link";
import { LogoComponent } from "@/components/logo/FixedLogo";

View File

@@ -1,13 +1,13 @@
"use client";
import { useRouter } from "next/navigation";
import { ChatSession } from "../interfaces";
import { ChatSession } from "@/app/chat/interfaces";
import { useState, useEffect, useContext, useRef, useCallback } from "react";
import {
deleteChatSession,
getChatRetentionInfo,
renameChatSession,
} from "../lib";
} from "@/app/chat/lib";
import { BasicSelectable } from "@/components/BasicClickable";
import Link from "next/link";
import {
@@ -20,7 +20,7 @@ import {
} from "react-icons/fi";
import { DefaultDropdownElement } from "@/components/Dropdown";
import { Popover } from "@/components/popover/Popover";
import { ShareChatSessionModal } from "../modal/ShareChatSessionModal";
import { ShareChatSessionModal } from "@/app/chat/modal/ShareChatSessionModal";
import { CHAT_SESSION_ID_KEY, FOLDER_ID_KEY } from "@/lib/drag/constants";
import { SettingsContext } from "@/components/settings/SettingsProvider";
import { DragHandle } from "@/components/table/DragHandle";

View File

@@ -15,8 +15,8 @@ import {
} from "@/components/ui/tooltip";
import { useRouter, useSearchParams } from "next/navigation";
import { ChatSession } from "../interfaces";
import { Folder } from "../folders/interfaces";
import { ChatSession } from "@/app/chat/interfaces";
import { Folder } from "@/app/chat/folders/interfaces";
import { SettingsContext } from "@/components/settings/SettingsProvider";
import {
@@ -31,7 +31,7 @@ import { MinimalPersonaSnapshot } from "@/app/admin/assistants/interfaces";
import { DragEndEvent } from "@dnd-kit/core";
import { useAssistants } from "@/components/context/AssistantsContext";
import { AssistantIcon } from "@/components/assistants/AssistantIcon";
import { buildChatUrl } from "../lib";
import { buildChatUrl } from "@/app/chat/lib";
import { reorderPinnedAssistants } from "@/lib/assistants/updateAssistantPreferences";
import { useUser } from "@/components/user/UserProvider";
import { DragHandle } from "@/components/table/DragHandle";

View File

@@ -1,20 +1,20 @@
import { ChatSession } from "../interfaces";
import { ChatSession } from "@/app/chat/interfaces";
import {
createFolder,
updateFolderName,
deleteFolder,
addChatToFolder,
updateFolderDisplayPriorities,
} from "../folders/FolderManagement";
import { Folder } from "../folders/interfaces";
} from "@/app/chat/folders/FolderManagement";
import { Folder } from "@/app/chat/folders/interfaces";
import { usePopup } from "@/components/admin/connectors/Popup";
import { useRouter } from "next/navigation";
import { FiPlus, FiCheck, FiX } from "react-icons/fi";
import { FolderDropdown } from "../folders/FolderDropdown";
import { FolderDropdown } from "@/app/chat/folders/FolderDropdown";
import { ChatSessionDisplay } from "./ChatSessionDisplay";
import { useState, useCallback, useRef, useContext, useEffect } from "react";
import { Caret } from "@/components/icons/icons";
import { groupSessionsByDateRange } from "../lib";
import { groupSessionsByDateRange } from "@/app/chat/lib";
import React from "react";
import {
Tooltip,