Compare commits

..

14 Commits

Author SHA1 Message Date
pablonyx
411f912b9f k 2025-04-01 12:32:19 -07:00
pablonyx
7de576ce55 k 2025-04-01 12:31:40 -07:00
pablonyx
b48ebeacab update 2025-04-01 12:30:38 -07:00
pablonyx
d7a2329580 k 2025-04-01 12:04:05 -07:00
pablonyx
4db048797c k 2025-04-01 12:03:52 -07:00
pablonyx
70197e8329 k 2025-04-01 12:03:34 -07:00
pablonyx
ca0738ed65 remove logs 2025-04-01 12:03:34 -07:00
pablonyx
6a0e8a1f8c validate advanced fields + proper yup assurances for lists 2025-04-01 12:03:34 -07:00
pablonyx
f055cbcbad Very minor auth standardization (#4400) 2025-04-01 11:46:49 -07:00
pablonyx
2f3020a4d3 Update migration (#4410) 2025-04-01 09:10:24 -07:00
SubashMohan
4bae1318bb refactor tests for Highspot connector to use mocking for API key retrieval (#4346) 2025-04-01 02:39:05 +00:00
Weves
11c3f44c76 Init engine in slackbot 2025-03-31 17:04:20 -07:00
rkuo-danswer
cb38ac8a97 also set permission upsert to medium priority (#4405)
Co-authored-by: Richard Kuo (Onyx) <rkuo@onyx.app>
2025-03-31 14:59:31 -07:00
pablonyx
b2120b9f39 add user files (#4152) 2025-03-31 21:06:59 +00:00
21 changed files with 302 additions and 388 deletions

View File

@@ -1,77 +0,0 @@
"""updated constraints for ccpairs
Revision ID: f7505c5b0284
Revises: 6a804aeb4830
Create Date: 2025-04-01 17:50:42.504818
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = "f7505c5b0284"
down_revision = "6a804aeb4830"
branch_labels = None
depends_on = None
def upgrade() -> None:
# 1) Drop the old foreign-key constraints
op.drop_constraint(
"document_by_connector_credential_pair_connector_id_fkey",
"document_by_connector_credential_pair",
type_="foreignkey",
)
op.drop_constraint(
"document_by_connector_credential_pair_credential_id_fkey",
"document_by_connector_credential_pair",
type_="foreignkey",
)
# 2) Re-add them with ondelete='CASCADE'
op.create_foreign_key(
"document_by_connector_credential_pair_connector_id_fkey",
source_table="document_by_connector_credential_pair",
referent_table="connector",
local_cols=["connector_id"],
remote_cols=["id"],
ondelete="CASCADE",
)
op.create_foreign_key(
"document_by_connector_credential_pair_credential_id_fkey",
source_table="document_by_connector_credential_pair",
referent_table="credential",
local_cols=["credential_id"],
remote_cols=["id"],
ondelete="CASCADE",
)
def downgrade() -> None:
# Reverse the changes for rollback
op.drop_constraint(
"document_by_connector_credential_pair_connector_id_fkey",
"document_by_connector_credential_pair",
type_="foreignkey",
)
op.drop_constraint(
"document_by_connector_credential_pair_credential_id_fkey",
"document_by_connector_credential_pair",
type_="foreignkey",
)
# Recreate without CASCADE
op.create_foreign_key(
"document_by_connector_credential_pair_connector_id_fkey",
"document_by_connector_credential_pair",
"connector",
["connector_id"],
["id"],
)
op.create_foreign_key(
"document_by_connector_credential_pair_credential_id_fkey",
"document_by_connector_credential_pair",
"credential",
["credential_id"],
["id"],
)

View File

@@ -58,7 +58,6 @@ def _get_objects_access_for_user_email_from_salesforce(
f"Time taken to get Salesforce user ID: {end_time - start_time} seconds"
)
if user_id is None:
logger.warning(f"User '{user_email}' not found in Salesforce")
return None
# This is the only query that is not cached in the function
@@ -66,7 +65,6 @@ def _get_objects_access_for_user_email_from_salesforce(
object_id_to_access = get_objects_access_for_user_id(
salesforce_client, user_id, list(object_ids)
)
logger.debug(f"Object ID to access: {object_id_to_access}")
return object_id_to_access

View File

@@ -23,7 +23,6 @@ from onyx.utils.url import add_url_params
from onyx.utils.variable_functionality import fetch_versioned_implementation
from shared_configs.configs import MULTI_TENANT
HTML_EMAIL_TEMPLATE = """\
<!DOCTYPE html>
<html lang="en">

View File

@@ -56,7 +56,6 @@ from httpx_oauth.oauth2 import OAuth2Token
from pydantic import BaseModel
from sqlalchemy.ext.asyncio import AsyncSession
from ee.onyx.configs.app_configs import ANONYMOUS_USER_COOKIE_NAME
from onyx.auth.api_key import get_hashed_api_key_from_request
from onyx.auth.email_utils import send_forgot_password_email
from onyx.auth.email_utils import send_user_verification_email
@@ -514,25 +513,6 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
return user
async def on_after_login(
self,
user: User,
request: Optional[Request] = None,
response: Optional[Response] = None,
) -> None:
try:
if response and request and ANONYMOUS_USER_COOKIE_NAME in request.cookies:
response.delete_cookie(
ANONYMOUS_USER_COOKIE_NAME,
# Ensure cookie deletion doesn't override other cookies by setting the same path/domain
path="/",
domain=None,
secure=WEB_DOMAIN.startswith("https"),
)
logger.debug(f"Deleted anonymous user cookie for user {user.email}")
except Exception:
logger.exception("Error deleting anonymous user cookie")
async def on_after_register(
self, user: User, request: Optional[Request] = None
) -> None:
@@ -1322,7 +1302,6 @@ def get_oauth_router(
# Login user
response = await backend.login(strategy, user)
await user_manager.on_after_login(user, request, response)
# Prepare redirect response
if tenant_id is None:
# Use URL utility to add parameters
@@ -1332,14 +1311,9 @@ def get_oauth_router(
# No parameters to add
redirect_response = RedirectResponse(next_url, status_code=302)
# Copy headers from auth response to redirect response, with special handling for Set-Cookie
# Copy headers and other attributes from 'response' to 'redirect_response'
for header_name, header_value in response.headers.items():
# FastAPI can have multiple Set-Cookie headers as a list
if header_name.lower() == "set-cookie" and isinstance(header_value, list):
for cookie_value in header_value:
redirect_response.headers.append(header_name, cookie_value)
else:
redirect_response.headers[header_name] = header_value
redirect_response.headers[header_name] = header_value
if hasattr(response, "body"):
redirect_response.body = response.body

View File

@@ -886,8 +886,11 @@ def monitor_ccpair_permissions_taskset(
record_type=RecordType.PERMISSION_SYNC_PROGRESS,
data={
"cc_pair_id": cc_pair_id,
"total_docs_synced": initial if initial is not None else 0,
"remaining_docs_to_sync": remaining,
"id": payload.id if payload else None,
"total_docs": initial if initial is not None else 0,
"remaining_docs": remaining,
"synced_docs": (initial - remaining) if initial is not None else 0,
"is_complete": remaining == 0,
},
tenant_id=tenant_id,
)
@@ -903,13 +906,6 @@ def monitor_ccpair_permissions_taskset(
f"num_synced={initial}"
)
# Add telemetry for permission syncing complete
optional_telemetry(
record_type=RecordType.PERMISSION_SYNC_COMPLETE,
data={"cc_pair_id": cc_pair_id},
tenant_id=tenant_id,
)
update_sync_record_status(
db_session=db_session,
entity_id=cc_pair_id,

View File

@@ -56,6 +56,7 @@ from onyx.indexing.indexing_pipeline import build_indexing_pipeline
from onyx.natural_language_processing.search_nlp_models import (
InformationContentClassificationModel,
)
from onyx.redis.redis_connector import RedisConnector
from onyx.utils.logger import setup_logger
from onyx.utils.logger import TaskAttemptSingleton
from onyx.utils.telemetry import create_milestone_and_report
@@ -577,8 +578,11 @@ def _run_indexing(
data={
"index_attempt_id": index_attempt_id,
"cc_pair_id": ctx.cc_pair_id,
"current_docs_indexed": document_count,
"current_chunks_indexed": chunk_count,
"connector_id": ctx.connector_id,
"credential_id": ctx.credential_id,
"total_docs_indexed": document_count,
"total_chunks": chunk_count,
"batch_num": batch_num,
"source": ctx.source.value,
},
tenant_id=tenant_id,
@@ -599,15 +603,26 @@ def _run_indexing(
checkpoint=checkpoint,
)
# Add telemetry for completed indexing
redis_connector = RedisConnector(tenant_id, ctx.cc_pair_id)
redis_connector_index = redis_connector.new_index(
index_attempt_start.search_settings_id
)
final_progress = redis_connector_index.get_progress() or 0
optional_telemetry(
record_type=RecordType.INDEXING_COMPLETE,
data={
"index_attempt_id": index_attempt_id,
"cc_pair_id": ctx.cc_pair_id,
"connector_id": ctx.connector_id,
"credential_id": ctx.credential_id,
"total_docs_indexed": document_count,
"total_chunks": chunk_count,
"batch_count": batch_num,
"time_elapsed_seconds": time.monotonic() - start_time,
"source": ctx.source.value,
"redis_progress": final_progress,
},
tenant_id=tenant_id,
)

View File

@@ -5,13 +5,11 @@ from typing import cast
from sqlalchemy.orm import Session
from onyx.chat.models import ContextualPruningConfig
from onyx.chat.models import PromptConfig
from onyx.chat.models import SectionRelevancePiece
from onyx.chat.prune_and_merge import _merge_sections
from onyx.chat.prune_and_merge import ChunkRange
from onyx.chat.prune_and_merge import merge_chunk_intervals
from onyx.chat.prune_and_merge import prune_and_merge_sections
from onyx.configs.chat_configs import DISABLE_LLM_DOC_RELEVANCE
from onyx.context.search.enums import LLMEvaluationType
from onyx.context.search.enums import QueryFlow
@@ -63,7 +61,6 @@ class SearchPipeline:
| None = None,
rerank_metrics_callback: Callable[[RerankMetricsContainer], None] | None = None,
prompt_config: PromptConfig | None = None,
contextual_pruning_config: ContextualPruningConfig | None = None,
):
# NOTE: The Search Request contains a lot of fields that are overrides, many of them can be None
# and typically are None. The preprocessing will fetch default values to replace these empty overrides.
@@ -80,9 +77,6 @@ class SearchPipeline:
self.search_settings = get_current_search_settings(db_session)
self.document_index = get_default_document_index(self.search_settings, None)
self.prompt_config: PromptConfig | None = prompt_config
self.contextual_pruning_config: ContextualPruningConfig | None = (
contextual_pruning_config
)
# Preprocessing steps generate this
self._search_query: SearchQuery | None = None
@@ -426,26 +420,7 @@ class SearchPipeline:
if self._final_context_sections is not None:
return self._final_context_sections
if (
self.contextual_pruning_config is not None
and self.prompt_config is not None
):
self._final_context_sections = prune_and_merge_sections(
sections=self.reranked_sections,
section_relevance_list=None,
prompt_config=self.prompt_config,
llm_config=self.llm.config,
question=self.search_query.query,
contextual_pruning_config=self.contextual_pruning_config,
)
else:
logger.error(
"Contextual pruning or prompt config not set, using default merge"
)
self._final_context_sections = _merge_sections(
sections=self.reranked_sections
)
self._final_context_sections = _merge_sections(sections=self.reranked_sections)
return self._final_context_sections
@property

View File

@@ -217,6 +217,7 @@ def mark_attempt_in_progress(
"index_attempt_id": index_attempt.id,
"status": IndexingStatus.IN_PROGRESS.value,
"cc_pair_id": index_attempt.connector_credential_pair_id,
"search_settings_id": index_attempt.search_settings_id,
},
)
except Exception:
@@ -245,6 +246,9 @@ def mark_attempt_succeeded(
"index_attempt_id": index_attempt_id,
"status": IndexingStatus.SUCCESS.value,
"cc_pair_id": attempt.connector_credential_pair_id,
"search_settings_id": attempt.search_settings_id,
"total_docs_indexed": attempt.total_docs_indexed,
"new_docs_indexed": attempt.new_docs_indexed,
},
)
except Exception:
@@ -273,6 +277,9 @@ def mark_attempt_partially_succeeded(
"index_attempt_id": index_attempt_id,
"status": IndexingStatus.COMPLETED_WITH_ERRORS.value,
"cc_pair_id": attempt.connector_credential_pair_id,
"search_settings_id": attempt.search_settings_id,
"total_docs_indexed": attempt.total_docs_indexed,
"new_docs_indexed": attempt.new_docs_indexed,
},
)
except Exception:
@@ -305,6 +312,10 @@ def mark_attempt_canceled(
"index_attempt_id": index_attempt_id,
"status": IndexingStatus.CANCELED.value,
"cc_pair_id": attempt.connector_credential_pair_id,
"search_settings_id": attempt.search_settings_id,
"reason": reason,
"total_docs_indexed": attempt.total_docs_indexed,
"new_docs_indexed": attempt.new_docs_indexed,
},
)
except Exception:
@@ -339,6 +350,10 @@ def mark_attempt_failed(
"index_attempt_id": index_attempt_id,
"status": IndexingStatus.FAILED.value,
"cc_pair_id": attempt.connector_credential_pair_id,
"search_settings_id": attempt.search_settings_id,
"reason": failure_reason,
"total_docs_indexed": attempt.total_docs_indexed,
"new_docs_indexed": attempt.new_docs_indexed,
},
)
except Exception:

View File

@@ -703,11 +703,7 @@ class Connector(Base):
)
documents_by_connector: Mapped[
list["DocumentByConnectorCredentialPair"]
] = relationship(
"DocumentByConnectorCredentialPair",
back_populates="connector",
passive_deletes=True,
)
] = relationship("DocumentByConnectorCredentialPair", back_populates="connector")
# synchronize this validation logic with RefreshFrequencySchema etc on front end
# until we have a centralized validation schema
@@ -761,11 +757,7 @@ class Credential(Base):
)
documents_by_credential: Mapped[
list["DocumentByConnectorCredentialPair"]
] = relationship(
"DocumentByConnectorCredentialPair",
back_populates="credential",
passive_deletes=True,
)
] = relationship("DocumentByConnectorCredentialPair", back_populates="credential")
user: Mapped[User | None] = relationship("User", back_populates="credentials")
@@ -1118,10 +1110,10 @@ class DocumentByConnectorCredentialPair(Base):
id: Mapped[str] = mapped_column(ForeignKey("document.id"), primary_key=True)
# TODO: transition this to use the ConnectorCredentialPair id directly
connector_id: Mapped[int] = mapped_column(
ForeignKey("connector.id", ondelete="CASCADE"), primary_key=True
ForeignKey("connector.id"), primary_key=True
)
credential_id: Mapped[int] = mapped_column(
ForeignKey("credential.id", ondelete="CASCADE"), primary_key=True
ForeignKey("credential.id"), primary_key=True
)
# used to better keep track of document counts at a connector level
@@ -1131,10 +1123,10 @@ class DocumentByConnectorCredentialPair(Base):
has_been_indexed: Mapped[bool] = mapped_column(Boolean)
connector: Mapped[Connector] = relationship(
"Connector", back_populates="documents_by_connector", passive_deletes=True
"Connector", back_populates="documents_by_connector"
)
credential: Mapped[Credential] = relationship(
"Credential", back_populates="documents_by_credential", passive_deletes=True
"Credential", back_populates="documents_by_credential"
)
__table_args__ = (

View File

@@ -5,7 +5,6 @@ from datetime import timezone
from onyx.configs.constants import INDEX_SEPARATOR
from onyx.context.search.models import IndexFilters
from onyx.document_index.interfaces import VespaChunkRequest
from onyx.document_index.vespa_constants import ACCESS_CONTROL_LIST
from onyx.document_index.vespa_constants import CHUNK_ID
from onyx.document_index.vespa_constants import DOC_UPDATED_AT
from onyx.document_index.vespa_constants import DOCUMENT_ID
@@ -75,10 +74,8 @@ def build_vespa_filters(
filter_str += f'({TENANT_ID} contains "{filters.tenant_id}") and '
# ACL filters
if filters.access_control_list is not None:
filter_str += _build_or_filters(
ACCESS_CONTROL_LIST, filters.access_control_list
)
# if filters.access_control_list is not None:
# filter_str += _build_or_filters(ACCESS_CONTROL_LIST, filters.access_control_list)
# Source type filters
source_strs = (

View File

@@ -376,7 +376,6 @@ class SearchTool(Tool[SearchToolOverrideKwargs]):
db_session=alternate_db_session or self.db_session,
prompt_config=self.prompt_config,
retrieved_sections_callback=retrieved_sections_callback,
contextual_pruning_config=self.contextual_pruning_config,
)
search_query_info = SearchQueryInfo(
@@ -448,7 +447,6 @@ class SearchTool(Tool[SearchToolOverrideKwargs]):
db_session=self.db_session,
bypass_acl=self.bypass_acl,
prompt_config=self.prompt_config,
contextual_pruning_config=self.contextual_pruning_config,
)
# Log what we're doing

View File

@@ -39,7 +39,6 @@ class RecordType(str, Enum):
INDEXING_PROGRESS = "indexing_progress"
INDEXING_COMPLETE = "indexing_complete"
PERMISSION_SYNC_PROGRESS = "permission_sync_progress"
PERMISSION_SYNC_COMPLETE = "permission_sync_complete"
INDEX_ATTEMPT_STATUS = "index_attempt_status"

View File

@@ -887,7 +887,6 @@ def main() -> None:
type=int,
help="Maximum number of documents to delete (for delete-all-documents)",
)
parser.add_argument("--link", help="Document link (for get_acls filter)")
args = parser.parse_args()
vespa_debug = VespaDebugging(args.tenant_id)
@@ -925,11 +924,7 @@ def main() -> None:
elif args.action == "get_acls":
if args.cc_pair_id is None:
parser.error("--cc-pair-id is required for get_acls action")
if args.link is None:
vespa_debug.acls(args.cc_pair_id, args.n)
else:
vespa_debug.acls_by_link(args.cc_pair_id, args.link)
vespa_debug.acls(args.cc_pair_id, args.n)
if __name__ == "__main__":

View File

@@ -313,29 +313,3 @@ class UserManager:
)
response.raise_for_status()
return UserInfo(**response.json())
@staticmethod
def invite_users(
user_performing_action: DATestUser,
emails: list[str],
) -> int:
response = requests.put(
url=f"{API_SERVER_URL}/manage/admin/users",
json={"emails": emails},
headers=user_performing_action.headers,
)
response.raise_for_status()
return response.json()
@staticmethod
def remove_invited_user(
user_performing_action: DATestUser,
user_email: str,
) -> int:
response = requests.patch(
url=f"{API_SERVER_URL}/manage/admin/remove-invited-user",
json={"user_email": user_email},
headers=user_performing_action.headers,
)
response.raise_for_status()
return response.json()

View File

@@ -22,7 +22,6 @@ from onyx.document_index.document_index_utils import get_multipass_config
from onyx.document_index.vespa.index import DOCUMENT_ID_ENDPOINT
from onyx.document_index.vespa.index import VespaIndex
from onyx.indexing.models import IndexingSetting
from onyx.redis.redis_pool import get_redis_client
from onyx.setup import setup_postgres
from onyx.setup import setup_vespa
from onyx.utils.logger import setup_logger
@@ -238,12 +237,6 @@ def reset_vespa() -> None:
time.sleep(5)
def reset_redis() -> None:
"""Reset the Redis database."""
redis_client = get_redis_client()
redis_client.flushall()
def reset_postgres_multitenant() -> None:
"""Reset the Postgres database for all tenants in a multitenant setup."""
@@ -348,8 +341,6 @@ def reset_all() -> None:
reset_postgres()
logger.info("Resetting Vespa...")
reset_vespa()
logger.info("Resetting Redis...")
reset_redis()
def reset_all_multitenant() -> None:

View File

@@ -1,38 +0,0 @@
import pytest
from requests import HTTPError
from onyx.auth.schemas import UserRole
from tests.integration.common_utils.managers.user import UserManager
from tests.integration.common_utils.test_models import DATestUser
def test_inviting_users_flow(reset: None) -> None:
"""
Test that verifies the functionality around inviting users:
1. Creating an admin user
2. Admin inviting a new user
3. Invited user successfully signing in
4. Non-invited user attempting to sign in (should result in an error)
"""
# 1) Create an admin user (the first user created is automatically admin)
admin_user: DATestUser = UserManager.create(name="admin_user")
assert admin_user is not None
assert UserManager.is_role(admin_user, UserRole.ADMIN)
# 2) Admin invites a new user
invited_email = "invited_user@test.com"
invite_response = UserManager.invite_users(admin_user, [invited_email])
assert invite_response == 1
# 3) The invited user successfully registers/logs in
invited_user: DATestUser = UserManager.create(
name="invited_user", email=invited_email
)
assert invited_user is not None
assert invited_user.email == invited_email
assert UserManager.is_role(invited_user, UserRole.BASIC)
# 4) A non-invited user attempts to sign in/register (should fail)
with pytest.raises(HTTPError):
UserManager.create(name="uninvited_user", email="uninvited_user@test.com")

View File

@@ -4,62 +4,177 @@ import {
LlmDescriptor,
useLlmManager,
} from "@/lib/hooks";
import { StringOrNumberOption } from "@/components/Dropdown";
import { Persona } from "@/app/admin/assistants/interfaces";
import { destructureValue } from "@/lib/llm/utils";
import { destructureValue, getFinalLLM, structureValue } from "@/lib/llm/utils";
import { useState } from "react";
import { Hoverable } from "@/components/Hoverable";
import { Popover } from "@/components/popover/Popover";
import { IconType } from "react-icons";
import { FiRefreshCw } from "react-icons/fi";
import LLMPopover from "./input/LLMPopover";
import { FiRefreshCw, FiCheck } from "react-icons/fi";
export default function RegenerateOption({
selectedAssistant,
regenerate,
overriddenModel,
export function RegenerateDropdown({
options,
selected,
onSelect,
side,
maxHeight,
alternate,
onDropdownVisibleChange,
}: {
selectedAssistant: Persona;
regenerate: (modelOverRide: LlmDescriptor) => Promise<void>;
overriddenModel?: string;
alternate?: string;
options: StringOrNumberOption[];
selected: string | null;
onSelect: (value: string | number | null) => void;
includeDefault?: boolean;
side?: "top" | "right" | "bottom" | "left";
maxHeight?: string;
onDropdownVisibleChange: (isVisible: boolean) => void;
}) {
const { llmProviders } = useChatContext();
const llmManager = useLlmManager(llmProviders);
const [isOpen, setIsOpen] = useState(false);
const toggleDropdownVisible = (isVisible: boolean) => {
setIsOpen(isVisible);
onDropdownVisibleChange(isVisible);
};
const Dropdown = (
<div className="overflow-y-auto border border-neutral-800 py-2 min-w-fit bg-neutral-50 dark:bg-neutral-900 rounded-md shadow-lg">
<div className="mb-1 flex items-center justify-between px-4 pt-2">
<span className="text-sm text-neutral-600 dark:text-neutral-400">
Regenerate with
</span>
</div>
{options.map((option) => (
<div
key={option.value}
role="menuitem"
className={`flex items-center m-1.5 p-1.5 text-sm cursor-pointer focus-visible:outline-0 group relative hover:bg-neutral-200 dark:hover:bg-neutral-800 rounded-md my-0 px-3 mx-2 gap-2.5 py-3 !pr-3 ${
option.value === selected
? "bg-neutral-200 dark:bg-neutral-800"
: ""
}`}
onClick={() => onSelect(option.value)}
>
<div className="flex grow items-center justify-between gap-2">
<div>
<div className="flex items-center gap-3">
<div>{getDisplayNameForModel(option.name)}</div>
</div>
</div>
</div>
{option.value === selected && (
<FiCheck className="text-neutral-700 dark:text-neutral-300" />
)}
</div>
))}
</div>
);
return (
<LLMPopover
llmManager={llmManager}
llmProviders={llmProviders}
requiresImageGeneration={false}
currentAssistant={selectedAssistant}
currentModelName={overriddenModel}
trigger={
<Popover
open={isOpen}
onOpenChange={toggleDropdownVisible}
content={
<div onClick={() => toggleDropdownVisible(!isOpen)}>
{!overriddenModel ? (
{!alternate ? (
<Hoverable size={16} icon={FiRefreshCw as IconType} />
) : (
<Hoverable
size={16}
icon={FiRefreshCw as IconType}
hoverText={getDisplayNameForModel(overriddenModel)}
hoverText={getDisplayNameForModel(alternate)}
/>
)}
</div>
}
onSelect={(value) => {
const { name, provider, modelName } = destructureValue(value as string);
regenerate({
name: name,
provider: provider,
modelName: modelName,
});
}}
popover={Dropdown}
align="start"
side={side}
sideOffset={5}
triggerMaxWidth
/>
);
}
export default function RegenerateOption({
selectedAssistant,
regenerate,
overriddenModel,
onHoverChange,
onDropdownVisibleChange,
}: {
selectedAssistant: Persona;
regenerate: (modelOverRide: LlmDescriptor) => Promise<void>;
overriddenModel?: string;
onHoverChange: (isHovered: boolean) => void;
onDropdownVisibleChange: (isVisible: boolean) => void;
}) {
const { llmProviders } = useChatContext();
const llmManager = useLlmManager(llmProviders);
const [_, llmName] = getFinalLLM(llmProviders, selectedAssistant, null);
const llmOptionsByProvider: {
[provider: string]: { name: string; value: string }[];
} = {};
const uniqueModelNames = new Set<string>();
llmProviders.forEach((llmProvider) => {
if (!llmOptionsByProvider[llmProvider.provider]) {
llmOptionsByProvider[llmProvider.provider] = [];
}
(llmProvider.display_model_names || llmProvider.model_names).forEach(
(modelName) => {
if (!uniqueModelNames.has(modelName)) {
uniqueModelNames.add(modelName);
llmOptionsByProvider[llmProvider.provider].push({
name: modelName,
value: structureValue(
llmProvider.name,
llmProvider.provider,
modelName
),
});
}
}
);
});
const llmOptions = Object.entries(llmOptionsByProvider).flatMap(
([provider, options]) => [...options]
);
const currentModelName =
llmManager?.currentLlm.modelName ||
(selectedAssistant
? selectedAssistant.llm_model_version_override || llmName
: llmName);
return (
<div
className="group flex items-center relative"
onMouseEnter={() => onHoverChange(true)}
onMouseLeave={() => onHoverChange(false)}
>
<RegenerateDropdown
onDropdownVisibleChange={onDropdownVisibleChange}
alternate={overriddenModel}
options={llmOptions}
selected={currentModelName}
onSelect={(value) => {
const { name, provider, modelName } = destructureValue(
value as string
);
regenerate({
name: name,
provider: provider,
modelName: modelName,
});
}}
/>
</div>
);
}

View File

@@ -6,7 +6,7 @@ import { Persona } from "@/app/admin/assistants/interfaces";
import LLMPopover from "./LLMPopover";
import { InputPrompt } from "@/app/chat/interfaces";
import { FilterManager, getDisplayNameForModel, LlmManager } from "@/lib/hooks";
import { FilterManager, LlmManager } from "@/lib/hooks";
import { useChatContext } from "@/components/context/ChatContext";
import { ChatFileType, FileDescriptor } from "../interfaces";
import {
@@ -38,7 +38,6 @@ import { useUser } from "@/components/user/UserProvider";
import { useDocumentSelection } from "../useDocumentSelection";
import { AgenticToggle } from "./AgenticToggle";
import { SettingsContext } from "@/components/settings/SettingsProvider";
import { getProviderIcon } from "@/app/admin/configuration/llm/interfaces";
import { LoadingIndicator } from "react-select/dist/declarations/src/components/indicators";
import { FidgetSpinner } from "react-loader-spinner";
import { LoadingAnimation } from "@/components/Loading";
@@ -800,27 +799,6 @@ export function ChatInputBar({
llmManager={llmManager}
requiresImageGeneration={false}
currentAssistant={selectedAssistant}
trigger={
<button
className="dark:text-white text-black focus:outline-none"
data-testid="llm-popover-trigger"
>
<ChatInputOption
minimize
toggle
flexPriority="stiff"
name={getDisplayNameForModel(
llmManager?.currentLlm.modelName || "Models"
)}
Icon={getProviderIcon(
llmManager?.currentLlm.provider || "anthropic",
llmManager?.currentLlm.modelName ||
"claude-3-5-sonnet-20240620"
)}
tooltipContent="Switch models"
/>
</button>
}
/>
{retrievalEnabled && (

View File

@@ -1,9 +1,16 @@
import React, { useState, useEffect, useCallback, useMemo } from "react";
import React, {
useState,
useEffect,
useCallback,
useLayoutEffect,
useMemo,
} from "react";
import {
Popover,
PopoverContent,
PopoverTrigger,
} from "@/components/ui/popover";
import { ChatInputOption } from "./ChatInputOption";
import { getDisplayNameForModel } from "@/lib/hooks";
import {
checkLLMSupportsImageInput,
@@ -28,16 +35,12 @@ import { FiAlertTriangle } from "react-icons/fi";
import { Slider } from "@/components/ui/slider";
import { useUser } from "@/components/user/UserProvider";
import { TruncatedText } from "@/components/ui/truncatedText";
import { ChatInputOption } from "./ChatInputOption";
interface LLMPopoverProps {
llmProviders: LLMProviderDescriptor[];
llmManager: LlmManager;
requiresImageGeneration?: boolean;
currentAssistant?: Persona;
trigger?: React.ReactElement;
onSelect?: (value: string) => void;
currentModelName?: string;
}
export default function LLMPopover({
@@ -45,69 +48,70 @@ export default function LLMPopover({
llmManager,
requiresImageGeneration,
currentAssistant,
trigger,
onSelect,
currentModelName,
}: LLMPopoverProps) {
const [isOpen, setIsOpen] = useState(false);
const { user } = useUser();
// Memoize the options to prevent unnecessary recalculations
const { llmOptions, defaultProvider, defaultModelDisplayName } =
useMemo(() => {
const llmOptionsByProvider: {
[provider: string]: {
name: string;
value: string;
icon: React.FC<{ size?: number; className?: string }>;
}[];
} = {};
const {
llmOptionsByProvider,
llmOptions,
defaultProvider,
defaultModelDisplayName,
} = useMemo(() => {
const llmOptionsByProvider: {
[provider: string]: {
name: string;
value: string;
icon: React.FC<{ size?: number; className?: string }>;
}[];
} = {};
const uniqueModelNames = new Set<string>();
const uniqueModelNames = new Set<string>();
llmProviders.forEach((llmProvider) => {
if (!llmOptionsByProvider[llmProvider.provider]) {
llmOptionsByProvider[llmProvider.provider] = [];
}
llmProviders.forEach((llmProvider) => {
if (!llmOptionsByProvider[llmProvider.provider]) {
llmOptionsByProvider[llmProvider.provider] = [];
}
(llmProvider.display_model_names || llmProvider.model_names).forEach(
(modelName) => {
if (!uniqueModelNames.has(modelName)) {
uniqueModelNames.add(modelName);
llmOptionsByProvider[llmProvider.provider].push({
name: modelName,
value: structureValue(
llmProvider.name,
llmProvider.provider,
modelName
),
icon: getProviderIcon(llmProvider.provider, modelName),
});
}
(llmProvider.display_model_names || llmProvider.model_names).forEach(
(modelName) => {
if (!uniqueModelNames.has(modelName)) {
uniqueModelNames.add(modelName);
llmOptionsByProvider[llmProvider.provider].push({
name: modelName,
value: structureValue(
llmProvider.name,
llmProvider.provider,
modelName
),
icon: getProviderIcon(llmProvider.provider, modelName),
});
}
);
});
const llmOptions = Object.entries(llmOptionsByProvider).flatMap(
([provider, options]) => [...options]
}
);
});
const defaultProvider = llmProviders.find(
(llmProvider) => llmProvider.is_default_provider
);
const llmOptions = Object.entries(llmOptionsByProvider).flatMap(
([provider, options]) => [...options]
);
const defaultModelName = defaultProvider?.default_model_name;
const defaultModelDisplayName = defaultModelName
? getDisplayNameForModel(defaultModelName)
: null;
const defaultProvider = llmProviders.find(
(llmProvider) => llmProvider.is_default_provider
);
return {
llmOptionsByProvider,
llmOptions,
defaultProvider,
defaultModelDisplayName,
};
}, [llmProviders]);
const defaultModelName = defaultProvider?.default_model_name;
const defaultModelDisplayName = defaultModelName
? getDisplayNameForModel(defaultModelName)
: null;
return {
llmOptionsByProvider,
llmOptions,
defaultProvider,
defaultModelDisplayName,
};
}, [llmProviders]);
const [localTemperature, setLocalTemperature] = useState(
llmManager.temperature ?? 0.5
@@ -131,34 +135,32 @@ export default function LLMPopover({
// Memoize trigger content to prevent rerendering
const triggerContent = useMemo(
trigger
? () => trigger
: () => (
<button
className="dark:text-[#fff] text-[#000] focus:outline-none"
data-testid="llm-popover-trigger"
>
<ChatInputOption
minimize
toggle
flexPriority="stiff"
name={getDisplayNameForModel(
llmManager?.currentLlm.modelName ||
defaultModelDisplayName ||
"Models"
)}
Icon={getProviderIcon(
llmManager?.currentLlm.provider ||
defaultProvider?.provider ||
"anthropic",
llmManager?.currentLlm.modelName ||
defaultProvider?.default_model_name ||
"claude-3-5-sonnet-20240620"
)}
tooltipContent="Switch models"
/>
</button>
),
() => (
<button
className="dark:text-[#fff] text-[#000] focus:outline-none"
data-testid="llm-popover-trigger"
>
<ChatInputOption
minimize
toggle
flexPriority="stiff"
name={getDisplayNameForModel(
llmManager?.currentLlm.modelName ||
defaultModelDisplayName ||
"Models"
)}
Icon={getProviderIcon(
llmManager?.currentLlm.provider ||
defaultProvider?.provider ||
"anthropic",
llmManager?.currentLlm.modelName ||
defaultProvider?.default_model_name ||
"claude-3-5-sonnet-20240620"
)}
tooltipContent="Switch models"
/>
</button>
),
[defaultModelDisplayName, defaultProvider, llmManager?.currentLlm]
);
@@ -176,14 +178,12 @@ export default function LLMPopover({
<button
key={index}
className={`w-full flex items-center gap-x-2 px-3 py-2 text-sm text-left hover:bg-background-100 dark:hover:bg-neutral-800 transition-colors duration-150 ${
(currentModelName || llmManager.currentLlm.modelName) ===
name
llmManager.currentLlm.modelName === name
? "bg-background-100 dark:bg-neutral-900 text-text"
: "text-text-darker"
}`}
onClick={() => {
llmManager.updateCurrentLlm(destructureValue(value));
onSelect?.(value);
setIsOpen(false);
}}
>

View File

@@ -178,6 +178,7 @@ export const AgenticMessage = ({
const [isViewingInitialAnswer, setIsViewingInitialAnswer] = useState(true);
const [canShowResponse, setCanShowResponse] = useState(isComplete);
const [isRegenerateHovered, setIsRegenerateHovered] = useState(false);
const [isRegenerateDropdownVisible, setIsRegenerateDropdownVisible] =
useState(false);
@@ -596,6 +597,7 @@ export const AgenticMessage = ({
onDropdownVisibleChange={
setIsRegenerateDropdownVisible
}
onHoverChange={setIsRegenerateHovered}
selectedAssistant={currentPersona!}
regenerate={regenerate}
overriddenModel={overriddenModel}
@@ -611,10 +613,16 @@ export const AgenticMessage = ({
absolute -bottom-5
z-10
invisible ${
(isHovering || settings?.isMobile) && "!visible"
(isHovering ||
isRegenerateHovered ||
settings?.isMobile) &&
"!visible"
}
opacity-0 ${
(isHovering || settings?.isMobile) && "!opacity-100"
(isHovering ||
isRegenerateHovered ||
settings?.isMobile) &&
"!opacity-100"
}
translate-y-2 ${
(isHovering || settings?.isMobile) &&
@@ -689,6 +697,7 @@ export const AgenticMessage = ({
}
regenerate={regenerate}
overriddenModel={overriddenModel}
onHoverChange={setIsRegenerateHovered}
/>
</CustomTooltip>
)}

View File

@@ -301,6 +301,7 @@ export const AIMessage = ({
const finalContent = processContent(content as string);
const [isRegenerateHovered, setIsRegenerateHovered] = useState(false);
const [isRegenerateDropdownVisible, setIsRegenerateDropdownVisible] =
useState(false);
const { isHovering, trackedElementRef, hoverElementRef } = useMouseTracking();
@@ -727,6 +728,7 @@ export const AIMessage = ({
onDropdownVisibleChange={
setIsRegenerateDropdownVisible
}
onHoverChange={setIsRegenerateHovered}
selectedAssistant={currentPersona!}
regenerate={regenerate}
overriddenModel={overriddenModel}
@@ -742,10 +744,16 @@ export const AIMessage = ({
absolute -bottom-5
z-10
invisible ${
(isHovering || settings?.isMobile) && "!visible"
(isHovering ||
isRegenerateHovered ||
settings?.isMobile) &&
"!visible"
}
opacity-0 ${
(isHovering || settings?.isMobile) && "!opacity-100"
(isHovering ||
isRegenerateHovered ||
settings?.isMobile) &&
"!opacity-100"
}
flex md:flex-row gap-x-0.5 bg-background-125/40 -mx-1.5 p-1.5 rounded-lg
`}
@@ -810,6 +818,7 @@ export const AIMessage = ({
}
regenerate={regenerate}
overriddenModel={overriddenModel}
onHoverChange={setIsRegenerateHovered}
/>
</CustomTooltip>
)}