Compare commits

..

5 Commits

Author SHA1 Message Date
pablodanswer
45d1eba580 update 2025-02-04 20:20:57 -08:00
pablodanswer
3193f4720d finalize 2025-02-04 20:20:48 -08:00
pablodanswer
74a497bbd0 improvement 2025-02-04 19:02:52 -08:00
pablodanswer
d871376144 various minor improvements 2025-02-04 17:58:52 -08:00
pablodanswer
828a2aef13 pop 2025-02-04 17:39:18 -08:00
145 changed files with 1299 additions and 2591 deletions

View File

@@ -101,8 +101,7 @@ COPY ./alembic_tenants /app/alembic_tenants
COPY ./alembic.ini /app/alembic.ini
COPY supervisord.conf /usr/etc/supervisord.conf
# Escape hatch scripts
COPY ./scripts/debugging /app/scripts/debugging
# Escape hatch
COPY ./scripts/force_delete_connector_by_id.py /app/scripts/force_delete_connector_by_id.py
# Put logo in assets

View File

@@ -1,80 +0,0 @@
"""add default slack channel config
Revision ID: eaa3b5593925
Revises: 98a5008d8711
Create Date: 2025-02-03 18:07:56.552526
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "eaa3b5593925"
down_revision = "98a5008d8711"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add is_default column
op.add_column(
"slack_channel_config",
sa.Column("is_default", sa.Boolean(), nullable=False, server_default="false"),
)
op.create_index(
"ix_slack_channel_config_slack_bot_id_default",
"slack_channel_config",
["slack_bot_id", "is_default"],
unique=True,
postgresql_where=sa.text("is_default IS TRUE"),
)
# Create default channel configs for existing slack bots without one
conn = op.get_bind()
slack_bots = conn.execute(sa.text("SELECT id FROM slack_bot")).fetchall()
for slack_bot in slack_bots:
slack_bot_id = slack_bot[0]
existing_default = conn.execute(
sa.text(
"SELECT id FROM slack_channel_config WHERE slack_bot_id = :bot_id AND is_default = TRUE"
),
{"bot_id": slack_bot_id},
).fetchone()
if not existing_default:
conn.execute(
sa.text(
"""
INSERT INTO slack_channel_config (
slack_bot_id, persona_id, channel_config, enable_auto_filters, is_default
) VALUES (
:bot_id, NULL,
'{"channel_name": null, '
'"respond_member_group_list": [], '
'"answer_filters": [], '
'"follow_up_tags": [], '
'"respond_tag_only": true}',
FALSE, TRUE
)
"""
),
{"bot_id": slack_bot_id},
)
def downgrade() -> None:
# Delete default slack channel configs
conn = op.get_bind()
conn.execute(sa.text("DELETE FROM slack_channel_config WHERE is_default = TRUE"))
# Remove index
op.drop_index(
"ix_slack_channel_config_slack_bot_id_default",
table_name="slack_channel_config",
)
# Remove is_default column
op.drop_column("slack_channel_config", "is_default")

View File

@@ -1,53 +0,0 @@
"""delete non-search assistants
Revision ID: f5437cc136c5
Revises: eaa3b5593925
Create Date: 2025-02-04 16:17:15.677256
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = "f5437cc136c5"
down_revision = "eaa3b5593925"
branch_labels = None
depends_on = None
def upgrade() -> None:
pass
def downgrade() -> None:
# Fix: split the statements into multiple op.execute() calls
op.execute(
"""
WITH personas_without_search AS (
SELECT p.id
FROM persona p
LEFT JOIN persona__tool pt ON p.id = pt.persona_id
LEFT JOIN tool t ON pt.tool_id = t.id
GROUP BY p.id
HAVING COUNT(CASE WHEN t.in_code_tool_id = 'run_search' THEN 1 END) = 0
)
UPDATE slack_channel_config
SET persona_id = NULL
WHERE is_default = TRUE AND persona_id IN (SELECT id FROM personas_without_search)
"""
)
op.execute(
"""
WITH personas_without_search AS (
SELECT p.id
FROM persona p
LEFT JOIN persona__tool pt ON p.id = pt.persona_id
LEFT JOIN tool t ON pt.tool_id = t.id
GROUP BY p.id
HAVING COUNT(CASE WHEN t.in_code_tool_id = 'run_search' THEN 1 END) = 0
)
DELETE FROM slack_channel_config
WHERE is_default = FALSE AND persona_id IN (SELECT id FROM personas_without_search)
"""
)

View File

@@ -2,11 +2,8 @@ from uuid import UUID
from sqlalchemy.orm import Session
from onyx.configs.constants import NotificationType
from onyx.db.models import Persona__User
from onyx.db.models import Persona__UserGroup
from onyx.db.notification import create_notification
from onyx.server.features.persona.models import PersonaSharedNotificationData
def make_persona_private(
@@ -26,14 +23,6 @@ def make_persona_private(
for user_uuid in user_ids:
db_session.add(Persona__User(persona_id=persona_id, user_id=user_uuid))
create_notification(
user_id=user_uuid,
notif_type=NotificationType.PERSONA_SHARED,
db_session=db_session,
additional_data=PersonaSharedNotificationData(
persona_id=persona_id,
).model_dump(),
)
if group_ids:
for group_id in group_ids:
db_session.add(

View File

@@ -218,14 +218,14 @@ def fetch_user_groups_for_user(
return db_session.scalars(stmt).all()
def construct_document_id_select_by_usergroup(
def construct_document_select_by_usergroup(
user_group_id: int,
) -> Select:
"""This returns a statement that should be executed using
.yield_per() to minimize overhead. The primary consumers of this function
are background processing task generators."""
stmt = (
select(Document.id)
select(Document)
.join(
DocumentByConnectorCredentialPair,
Document.id == DocumentByConnectorCredentialPair.id,

View File

@@ -80,7 +80,7 @@ def oneoff_standard_answers(
def _handle_standard_answers(
message_info: SlackMessageInfo,
receiver_ids: list[str] | None,
slack_channel_config: SlackChannelConfig,
slack_channel_config: SlackChannelConfig | None,
prompt: Prompt | None,
logger: OnyxLoggingAdapter,
client: WebClient,
@@ -94,10 +94,13 @@ def _handle_standard_answers(
Returns True if standard answers are found to match the user's message and therefore,
we still need to respond to the users.
"""
# if no channel config, then no standard answers are configured
if not slack_channel_config:
return False
slack_thread_id = message_info.thread_to_respond
configured_standard_answer_categories = (
slack_channel_config.standard_answer_categories
slack_channel_config.standard_answer_categories if slack_channel_config else []
)
configured_standard_answers = set(
[

View File

@@ -10,7 +10,6 @@ from fastapi import Response
from ee.onyx.auth.users import decode_anonymous_user_jwt_token
from ee.onyx.configs.app_configs import ANONYMOUS_USER_COOKIE_NAME
from onyx.auth.api_key import extract_tenant_from_api_key_header
from onyx.configs.constants import TENANT_ID_COOKIE_NAME
from onyx.db.engine import is_valid_schema_name
from onyx.redis.redis_pool import retrieve_auth_token_data_from_redis
from shared_configs.configs import MULTI_TENANT
@@ -44,7 +43,6 @@ async def _get_tenant_id_from_request(
Attempt to extract tenant_id from:
1) The API key header
2) The Redis-based token (stored in Cookie: fastapiusersauth)
3) Reset token cookie
Fallback: POSTGRES_DEFAULT_SCHEMA
"""
# Check for API key
@@ -87,18 +85,8 @@ async def _get_tenant_id_from_request(
if not is_valid_schema_name(tenant_id):
raise HTTPException(status_code=400, detail="Invalid tenant ID format")
return tenant_id
except Exception as e:
logger.error(f"Unexpected error in _get_tenant_id_from_request: {str(e)}")
raise HTTPException(status_code=500, detail="Internal server error")
finally:
if tenant_id:
return tenant_id
# As a final step, check for explicit tenant_id cookie
tenant_id_cookie = request.cookies.get(TENANT_ID_COOKIE_NAME)
if tenant_id_cookie and is_valid_schema_name(tenant_id_cookie):
return tenant_id_cookie
# If we've reached this point, return the default schema
return POSTGRES_DEFAULT_SCHEMA

View File

@@ -34,7 +34,6 @@ from onyx.auth.users import get_redis_strategy
from onyx.auth.users import optional_user
from onyx.auth.users import User
from onyx.configs.app_configs import WEB_DOMAIN
from onyx.configs.constants import FASTAPI_USERS_AUTH_COOKIE_NAME
from onyx.db.auth import get_user_count
from onyx.db.engine import get_current_tenant_id
from onyx.db.engine import get_session
@@ -112,7 +111,7 @@ async def login_as_anonymous_user(
token = generate_anonymous_user_jwt_token(tenant_id)
response = Response()
response.delete_cookie(FASTAPI_USERS_AUTH_COOKIE_NAME)
response.delete_cookie("fastapiusersauth")
response.set_cookie(
key=ANONYMOUS_USER_COOKIE_NAME,
value=token,

View File

@@ -7,7 +7,6 @@ from langgraph.types import StreamWriter
from onyx.agents.agent_search.shared_graph_utils.utils import write_custom_event
from onyx.chat.models import LlmDoc
from onyx.chat.models import OnyxContext
from onyx.chat.stream_processing.answer_response_handler import AnswerResponseHandler
from onyx.chat.stream_processing.answer_response_handler import CitationResponseHandler
from onyx.chat.stream_processing.answer_response_handler import (
@@ -24,7 +23,7 @@ def process_llm_stream(
should_stream_answer: bool,
writer: StreamWriter,
final_search_results: list[LlmDoc] | None = None,
displayed_search_results: list[OnyxContext] | list[LlmDoc] | None = None,
displayed_search_results: list[LlmDoc] | None = None,
) -> AIMessageChunk:
tool_call_chunk = AIMessageChunk(content="")

View File

@@ -12,9 +12,8 @@ from onyx.agents.agent_search.deep_search.initial.generate_initial_answer.states
from onyx.agents.agent_search.deep_search.main.models import (
AgentRefinedMetrics,
)
from onyx.agents.agent_search.deep_search.main.operations import dispatch_subquestion
from onyx.agents.agent_search.deep_search.main.operations import (
dispatch_subquestion_sep,
dispatch_subquestion,
)
from onyx.agents.agent_search.deep_search.main.states import (
InitialQuestionDecompositionUpdate,
@@ -110,12 +109,9 @@ def decompose_orig_question(
),
writer,
)
# dispatches custom events for subquestion tokens, adding in subquestion ids.
streamed_tokens = dispatch_separated(
model.stream(msg),
dispatch_subquestion(0, writer),
sep_callback=dispatch_subquestion_sep(0, writer),
model.stream(msg), dispatch_subquestion(0, writer)
)
stop_event = StreamStopInfo(

View File

@@ -9,9 +9,8 @@ from langgraph.types import StreamWriter
from onyx.agents.agent_search.deep_search.main.models import (
RefinementSubQuestion,
)
from onyx.agents.agent_search.deep_search.main.operations import dispatch_subquestion
from onyx.agents.agent_search.deep_search.main.operations import (
dispatch_subquestion_sep,
dispatch_subquestion,
)
from onyx.agents.agent_search.deep_search.main.states import MainState
from onyx.agents.agent_search.deep_search.main.states import (
@@ -97,9 +96,7 @@ def create_refined_sub_questions(
model = graph_config.tooling.fast_llm
streamed_tokens = dispatch_separated(
model.stream(msg),
dispatch_subquestion(1, writer),
sep_callback=dispatch_subquestion_sep(1, writer),
model.stream(msg), dispatch_subquestion(1, writer)
)
response = merge_content(*streamed_tokens)

View File

@@ -9,9 +9,6 @@ from onyx.agents.agent_search.shared_graph_utils.models import (
SubQuestionAnswerResults,
)
from onyx.agents.agent_search.shared_graph_utils.utils import write_custom_event
from onyx.chat.models import StreamStopInfo
from onyx.chat.models import StreamStopReason
from onyx.chat.models import StreamType
from onyx.chat.models import SubQuestionPiece
from onyx.context.search.models import IndexFilters
from onyx.tools.models import SearchQueryInfo
@@ -37,22 +34,6 @@ def dispatch_subquestion(
return _helper
def dispatch_subquestion_sep(level: int, writer: StreamWriter) -> Callable[[int], None]:
def _helper(sep_num: int) -> None:
write_custom_event(
"stream_finished",
StreamStopInfo(
stop_reason=StreamStopReason.FINISHED,
stream_type=StreamType.SUB_QUESTIONS,
level=level,
level_question_num=sep_num,
),
writer,
)
return _helper
def calculate_initial_agent_stats(
decomp_answer_results: list[SubQuestionAnswerResults],
original_question_stats: AgentChunkRetrievalStats,

View File

@@ -9,7 +9,6 @@ from onyx.agents.agent_search.basic.states import BasicState
from onyx.agents.agent_search.basic.utils import process_llm_stream
from onyx.agents.agent_search.models import GraphConfig
from onyx.chat.models import LlmDoc
from onyx.chat.models import OnyxContexts
from onyx.tools.tool_implementations.search.search_tool import (
SEARCH_DOC_CONTENT_ID,
)
@@ -51,11 +50,13 @@ def basic_use_tool_response(
if yield_item.id == FINAL_CONTEXT_DOCUMENTS_ID:
final_search_results = cast(list[LlmDoc], yield_item.response)
elif yield_item.id == SEARCH_DOC_CONTENT_ID:
search_contexts = cast(OnyxContexts, yield_item.response).contexts
search_contexts = yield_item.response.contexts
for doc in search_contexts:
if doc.document_id not in initial_search_results:
initial_search_results.append(doc)
initial_search_results = cast(list[LlmDoc], initial_search_results)
new_tool_call_chunk = AIMessageChunk(content="")
if not agent_config.behavior.skip_gen_ai_answer_generation:
stream = llm.stream(
@@ -69,9 +70,7 @@ def basic_use_tool_response(
True,
writer,
final_search_results=final_search_results,
# when the search tool is called with specific doc ids, initial search
# results are not output. But, we still want i.e. citations to be processed.
displayed_search_results=initial_search_results or final_search_results,
displayed_search_results=initial_search_results,
)
return BasicOutput(tool_call_chunk=new_tool_call_chunk)

View File

@@ -295,7 +295,6 @@ def _dispatch_nonempty(
def dispatch_separated(
tokens: Iterator[BaseMessage],
dispatch_event: Callable[[str, int], None],
sep_callback: Callable[[int], None] | None = None,
sep: str = DISPATCH_SEP_CHAR,
) -> list[BaseMessage_Content]:
num = 1
@@ -305,10 +304,6 @@ def dispatch_separated(
if sep in content:
sub_question_parts = content.split(sep)
_dispatch_nonempty(sub_question_parts[0], dispatch_event, num)
if sep_callback:
sep_callback(num)
num += 1
_dispatch_nonempty(
"".join(sub_question_parts[1:]).strip(), dispatch_event, num
@@ -317,9 +312,6 @@ def dispatch_separated(
_dispatch_nonempty(content, dispatch_event, num)
streamed_tokens.append(content)
if sep_callback:
sep_callback(num)
return streamed_tokens

View File

@@ -10,7 +10,6 @@ from onyx.configs.app_configs import SMTP_PORT
from onyx.configs.app_configs import SMTP_SERVER
from onyx.configs.app_configs import SMTP_USER
from onyx.configs.app_configs import WEB_DOMAIN
from onyx.configs.constants import TENANT_ID_COOKIE_NAME
from onyx.db.models import User
@@ -66,13 +65,9 @@ def send_forgot_password_email(
user_email: str,
token: str,
mail_from: str = EMAIL_FROM,
tenant_id: str | None = None,
) -> None:
subject = "Onyx Forgot Password"
link = f"{WEB_DOMAIN}/auth/reset-password?token={token}"
if tenant_id:
link += f"&{TENANT_ID_COOKIE_NAME}={tenant_id}"
# Keep search param same name as cookie for simplicity
body = f"Click the following link to reset your password: {link}"
send_email(user_email, subject, body, mail_from)

View File

@@ -73,7 +73,6 @@ from onyx.configs.app_configs import WEB_DOMAIN
from onyx.configs.constants import AuthType
from onyx.configs.constants import DANSWER_API_KEY_DUMMY_EMAIL_DOMAIN
from onyx.configs.constants import DANSWER_API_KEY_PREFIX
from onyx.configs.constants import FASTAPI_USERS_AUTH_COOKIE_NAME
from onyx.configs.constants import MilestoneRecordType
from onyx.configs.constants import OnyxRedisLocks
from onyx.configs.constants import PASSWORD_SPECIAL_CHARS
@@ -219,24 +218,6 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
verification_token_lifetime_seconds = AUTH_COOKIE_EXPIRE_TIME_SECONDS
user_db: SQLAlchemyUserDatabase[User, uuid.UUID]
async def get_by_email(self, user_email: str) -> User:
tenant_id = fetch_ee_implementation_or_noop(
"onyx.server.tenants.user_mapping", "get_tenant_id_for_email", None
)(user_email)
async with get_async_session_with_tenant(tenant_id) as db_session:
if MULTI_TENANT:
tenant_user_db = SQLAlchemyUserAdminDB[User, uuid.UUID](
db_session, User, OAuthAccount
)
user = await tenant_user_db.get_by_email(user_email)
else:
user = await self.user_db.get_by_email(user_email)
if not user:
raise exceptions.UserNotExists()
return user
async def create(
self,
user_create: schemas.UC | UserCreate,
@@ -523,15 +504,9 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
)
raise HTTPException(
status.HTTP_500_INTERNAL_SERVER_ERROR,
"Your admin has not enabled this feature.",
"Your admin has not enbaled this feature.",
)
tenant_id = await fetch_ee_implementation_or_noop(
"onyx.server.tenants.provisioning",
"get_or_provision_tenant",
async_return_default_schema,
)(email=user.email)
send_forgot_password_email(user.email, token, tenant_id=tenant_id)
send_forgot_password_email(user.email, token)
async def on_after_request_verify(
self, user: User, token: str, request: Optional[Request] = None
@@ -605,7 +580,6 @@ async def get_user_manager(
cookie_transport = CookieTransport(
cookie_max_age=SESSION_EXPIRE_TIME_SECONDS,
cookie_secure=WEB_DOMAIN.startswith("https"),
cookie_name=FASTAPI_USERS_AUTH_COOKIE_NAME,
)

View File

@@ -179,14 +179,11 @@ def try_generate_document_cc_pair_cleanup_tasks(
if tasks_generated is None:
raise ValueError("RedisConnectorDeletion.generate_tasks returned None")
try:
insert_sync_record(
db_session=db_session,
entity_id=cc_pair_id,
sync_type=SyncType.CONNECTOR_DELETION,
)
except Exception:
pass
insert_sync_record(
db_session=db_session,
entity_id=cc_pair_id,
sync_type=SyncType.CONNECTOR_DELETION,
)
except TaskDependencyError:
redis_connector.delete.set_fence(None)

View File

@@ -659,8 +659,7 @@ def validate_permission_sync_fence(
f"tasks_scanned={tasks_scanned} tasks_not_in_celery={tasks_not_in_celery}"
)
# we're only active if tasks_scanned > 0 and tasks_not_in_celery == 0
if tasks_scanned > 0 and tasks_not_in_celery == 0:
if tasks_not_in_celery == 0:
redis_connector.permissions.set_active()
return

View File

@@ -728,10 +728,6 @@ def cloud_check_alembic() -> bool | None:
TODO: have the cloud migration script set an activity signal that this check
uses to know it doesn't make sense to run a check at the present time.
"""
# Used as a placeholder if the alembic revision cannot be retrieved
ALEMBIC_NULL_REVISION = "000000000000"
time_start = time.monotonic()
redis_client = get_redis_client(tenant_id=ONYX_CLOUD_TENANT_ID)
@@ -747,14 +743,14 @@ def cloud_check_alembic() -> bool | None:
last_lock_time = time.monotonic()
tenant_to_revision: dict[str, str] = {}
tenant_to_revision: dict[str, str | None] = {}
revision_counts: dict[str, int] = {}
out_of_date_tenants: dict[str, str] = {}
out_of_date_tenants: dict[str, str | None] = {}
top_revision: str = ""
tenant_ids: list[str] | list[None] = []
try:
# map tenant_id to revision (or ALEMBIC_NULL_REVISION if the query fails)
# map each tenant_id to its revision
tenant_ids = get_all_tenant_ids()
for tenant_id in tenant_ids:
current_time = time.monotonic()
@@ -766,28 +762,19 @@ def cloud_check_alembic() -> bool | None:
continue
with get_session_with_tenant(tenant_id=None) as session:
try:
result = session.execute(
text(f'SELECT * FROM "{tenant_id}".alembic_version LIMIT 1')
)
result = session.execute(
text(f'SELECT * FROM "{tenant_id}".alembic_version LIMIT 1')
)
result_scalar: str | None = result.scalar_one_or_none()
if result_scalar is None:
raise ValueError("Alembic version should not be None.")
tenant_to_revision[tenant_id] = result_scalar
except Exception:
task_logger.warning(f"Tenant {tenant_id} has no revision!")
tenant_to_revision[tenant_id] = ALEMBIC_NULL_REVISION
result_scalar: str | None = result.scalar_one_or_none()
tenant_to_revision[tenant_id] = result_scalar
# get the total count of each revision
for k, v in tenant_to_revision.items():
revision_counts[v] = revision_counts.get(v, 0) + 1
if v is None:
continue
# error if any null revision tenants are found
if ALEMBIC_NULL_REVISION in revision_counts:
num_null_revisions = revision_counts[ALEMBIC_NULL_REVISION]
raise ValueError(f"No revision was found for {num_null_revisions} tenants!")
revision_counts[v] = revision_counts.get(v, 0) + 1
# get the revision with the most counts
sorted_revision_counts = sorted(
@@ -795,24 +782,23 @@ def cloud_check_alembic() -> bool | None:
)
if len(sorted_revision_counts) == 0:
raise ValueError(
task_logger.error(
f"cloud_check_alembic - No revisions found for {len(tenant_ids)} tenant ids!"
)
else:
top_revision, _ = sorted_revision_counts[0]
top_revision, _ = sorted_revision_counts[0]
# build a list of out of date tenants
for k, v in tenant_to_revision.items():
if v == top_revision:
continue
# build a list of out of date tenants
for k, v in tenant_to_revision.items():
if v == top_revision:
continue
out_of_date_tenants[k] = v
out_of_date_tenants[k] = v
except SoftTimeLimitExceeded:
task_logger.info(
"Soft time limit exceeded, task is being terminated gracefully."
)
raise
except Exception:
task_logger.exception("Unexpected exception during cloud alembic check")
raise
@@ -830,11 +816,6 @@ def cloud_check_alembic() -> bool | None:
f"num_tenants={len(tenant_ids)} "
f"revision={top_revision}"
)
num_to_log = min(5, len(out_of_date_tenants))
task_logger.info(
f"Logging {num_to_log}/{len(out_of_date_tenants)} out of date tenants."
)
for k, v in islice(out_of_date_tenants.items(), 5):
task_logger.info(f"Out of date tenant: tenant={k} revision={v}")
else:

View File

@@ -11,7 +11,6 @@ from onyx.background.indexing.checkpointing import get_time_windows_for_index_at
from onyx.background.indexing.tracer import OnyxTracer
from onyx.configs.app_configs import INDEXING_SIZE_WARNING_THRESHOLD
from onyx.configs.app_configs import INDEXING_TRACER_INTERVAL
from onyx.configs.app_configs import LEAVE_CONNECTOR_ACTIVE_ON_INITIALIZATION_FAILURE
from onyx.configs.app_configs import POLL_CONNECTOR_OFFSET
from onyx.configs.constants import DocumentSource
from onyx.configs.constants import MilestoneRecordType
@@ -56,7 +55,6 @@ def _get_connector_runner(
start_time: datetime,
end_time: datetime,
tenant_id: str | None,
leave_connector_active: bool = LEAVE_CONNECTOR_ACTIVE_ON_INITIALIZATION_FAILURE,
) -> ConnectorRunner:
"""
NOTE: `start_time` and `end_time` are only used for poll connectors
@@ -78,25 +76,20 @@ def _get_connector_runner(
)
except Exception as e:
logger.exception(f"Unable to instantiate connector due to {e}")
# since we failed to even instantiate the connector, we pause the CCPair since
# it will never succeed. Sometimes there are cases where the connector will
# intermittently fail to initialize in which case we should pass in
# leave_connector_active=True to allow it to continue.
# For example, if there is nightly maintenance on a Confluence Server instance,
# the connector will fail to initialize every night.
if not leave_connector_active:
cc_pair = get_connector_credential_pair_from_id(
# it will never succeed
cc_pair = get_connector_credential_pair_from_id(
db_session=db_session,
cc_pair_id=attempt.connector_credential_pair.id,
)
if cc_pair and cc_pair.status == ConnectorCredentialPairStatus.ACTIVE:
update_connector_credential_pair(
db_session=db_session,
cc_pair_id=attempt.connector_credential_pair.id,
connector_id=attempt.connector_credential_pair.connector.id,
credential_id=attempt.connector_credential_pair.credential.id,
status=ConnectorCredentialPairStatus.PAUSED,
)
if cc_pair and cc_pair.status == ConnectorCredentialPairStatus.ACTIVE:
update_connector_credential_pair(
db_session=db_session,
connector_id=attempt.connector_credential_pair.connector.id,
credential_id=attempt.connector_credential_pair.credential.id,
status=ConnectorCredentialPairStatus.PAUSED,
)
raise e
return ConnectorRunner(

View File

@@ -183,7 +183,6 @@ class Answer:
citations_by_subquestion: dict[
SubQuestionKey, list[CitationInfo]
] = defaultdict(list)
basic_subq_key = SubQuestionKey(level=BASIC_KEY[0], question_num=BASIC_KEY[1])
for packet in self.processed_streamed_output:
if isinstance(packet, CitationInfo):
if packet.level_question_num is not None and packet.level is not None:
@@ -193,7 +192,7 @@ class Answer:
)
].append(packet)
elif packet.level is None:
citations_by_subquestion[basic_subq_key].append(packet)
citations_by_subquestion[BASIC_SQ_KEY].append(packet)
return citations_by_subquestion
def is_cancelled(self) -> bool:

View File

@@ -3,7 +3,6 @@ from collections.abc import Sequence
from pydantic import BaseModel
from onyx.chat.models import LlmDoc
from onyx.chat.models import OnyxContext
from onyx.context.search.models import InferenceChunk
@@ -12,7 +11,7 @@ class DocumentIdOrderMapping(BaseModel):
def map_document_id_order(
chunks: Sequence[InferenceChunk | LlmDoc | OnyxContext], one_indexed: bool = True
chunks: Sequence[InferenceChunk | LlmDoc], one_indexed: bool = True
) -> DocumentIdOrderMapping:
order_mapping = {}
current = 1 if one_indexed else 0

View File

@@ -409,11 +409,6 @@ EXPERIMENTAL_CHECKPOINTING_ENABLED = (
os.environ.get("EXPERIMENTAL_CHECKPOINTING_ENABLED", "").lower() == "true"
)
LEAVE_CONNECTOR_ACTIVE_ON_INITIALIZATION_FAILURE = (
os.environ.get("LEAVE_CONNECTOR_ACTIVE_ON_INITIALIZATION_FAILURE", "").lower()
== "true"
)
PRUNING_DISABLED = -1
DEFAULT_PRUNING_FREQ = 60 * 60 * 24 # Once a day

View File

@@ -15,12 +15,6 @@ ID_SEPARATOR = ":;:"
DEFAULT_BOOST = 0
SESSION_KEY = "session"
# Cookies
FASTAPI_USERS_AUTH_COOKIE_NAME = (
"fastapiusersauth" # Currently a constant, but logic allows for configuration
)
TENANT_ID_COOKIE_NAME = "onyx_tid" # tenant id - for workaround cases
NO_AUTH_USER_ID = "__no_auth_user__"
NO_AUTH_USER_EMAIL = "anonymous@onyx.app"

View File

@@ -1,2 +1 @@
SLACK_BOT_PERSONA_PREFIX = "__slack_bot_persona__"
DEFAULT_PERSONA_SLACK_CHANNEL_NAME = "DEFAULT_SLACK_CHANNEL"

View File

@@ -105,32 +105,6 @@ def construct_document_select_for_connector_credential_pair_by_needs_sync(
return stmt
def construct_document_id_select_for_connector_credential_pair_by_needs_sync(
connector_id: int, credential_id: int
) -> Select:
initial_doc_ids_stmt = select(DocumentByConnectorCredentialPair.id).where(
and_(
DocumentByConnectorCredentialPair.connector_id == connector_id,
DocumentByConnectorCredentialPair.credential_id == credential_id,
)
)
stmt = (
select(DbDocument.id)
.where(
DbDocument.id.in_(initial_doc_ids_stmt),
or_(
DbDocument.last_modified
> DbDocument.last_synced, # last_modified is newer than last_synced
DbDocument.last_synced.is_(None), # never synced
),
)
.distinct()
)
return stmt
def get_all_documents_needing_vespa_sync_for_cc_pair(
db_session: Session, cc_pair_id: int
) -> list[DbDocument]:

View File

@@ -198,7 +198,7 @@ def _check_if_cc_pairs_are_owned_by_groups(
ids=missing_cc_pair_ids,
)
for cc_pair in cc_pairs:
if cc_pair.access_type == AccessType.PRIVATE:
if cc_pair.access_type != AccessType.PUBLIC:
raise ValueError(
f"Connector Credential Pair with ID: '{cc_pair.id}'"
" is not owned by the specified groups"
@@ -545,7 +545,7 @@ def fetch_documents_for_document_set_paginated(
return documents, documents[-1].id if documents else None
def construct_document_id_select_by_docset(
def construct_document_select_by_docset(
document_set_id: int,
current_only: bool = True,
) -> Select:
@@ -554,7 +554,7 @@ def construct_document_id_select_by_docset(
are background processing task generators."""
stmt = (
select(Document.id)
select(Document)
.join(
DocumentByConnectorCredentialPair,
DocumentByConnectorCredentialPair.id == Document.id,

View File

@@ -1716,7 +1716,7 @@ class ChannelConfig(TypedDict):
"""NOTE: is a `TypedDict` so it can be used as a type hint for a JSONB column
in Postgres"""
channel_name: str | None # None for default channel config
channel_name: str
respond_tag_only: NotRequired[bool] # defaults to False
respond_to_bots: NotRequired[bool] # defaults to False
respond_member_group_list: NotRequired[list[str]]
@@ -1737,6 +1737,7 @@ class SlackChannelConfig(Base):
persona_id: Mapped[int | None] = mapped_column(
ForeignKey("persona.id"), nullable=True
)
# JSON for flexibility. Contains things like: channel name, team members, etc.
channel_config: Mapped[ChannelConfig] = mapped_column(
postgresql.JSONB(), nullable=False
)
@@ -1745,8 +1746,6 @@ class SlackChannelConfig(Base):
Boolean, nullable=False, default=False
)
is_default: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
persona: Mapped[Persona | None] = relationship("Persona")
slack_bot: Mapped["SlackBot"] = relationship(
"SlackBot",
@@ -1758,21 +1757,6 @@ class SlackChannelConfig(Base):
back_populates="slack_channel_configs",
)
__table_args__ = (
UniqueConstraint(
"slack_bot_id",
"is_default",
name="uq_slack_channel_config_slack_bot_id_default",
),
Index(
"ix_slack_channel_config_slack_bot_id_default",
"slack_bot_id",
"is_default",
unique=True,
postgresql_where=(is_default is True), # type: ignore
),
)
class SlackBot(Base):
__tablename__ = "slack_bot"

View File

@@ -11,7 +11,6 @@ from sqlalchemy import Select
from sqlalchemy import select
from sqlalchemy import update
from sqlalchemy.orm import aliased
from sqlalchemy.orm import joinedload
from sqlalchemy.orm import selectinload
from sqlalchemy.orm import Session
@@ -20,7 +19,6 @@ from onyx.configs.app_configs import DISABLE_AUTH
from onyx.configs.chat_configs import BING_API_KEY
from onyx.configs.chat_configs import CONTEXT_CHUNKS_ABOVE
from onyx.configs.chat_configs import CONTEXT_CHUNKS_BELOW
from onyx.configs.constants import NotificationType
from onyx.context.search.enums import RecencyBiasSetting
from onyx.db.constants import SLACK_BOT_PERSONA_PREFIX
from onyx.db.models import DocumentSet
@@ -34,8 +32,6 @@ from onyx.db.models import Tool
from onyx.db.models import User
from onyx.db.models import User__UserGroup
from onyx.db.models import UserGroup
from onyx.db.notification import create_notification
from onyx.server.features.persona.models import PersonaSharedNotificationData
from onyx.server.features.persona.models import PersonaSnapshot
from onyx.server.features.persona.models import PersonaUpsertRequest
from onyx.utils.logger import setup_logger
@@ -173,15 +169,6 @@ def make_persona_private(
for user_uuid in user_ids:
db_session.add(Persona__User(persona_id=persona_id, user_id=user_uuid))
create_notification(
user_id=user_uuid,
notif_type=NotificationType.PERSONA_SHARED,
db_session=db_session,
additional_data=PersonaSharedNotificationData(
persona_id=persona_id,
).model_dump(),
)
db_session.commit()
# May cause error if someone switches down to MIT from EE
@@ -721,15 +708,3 @@ def update_persona_label(
def delete_persona_label(label_id: int, db_session: Session) -> None:
db_session.query(PersonaLabel).filter(PersonaLabel.id == label_id).delete()
db_session.commit()
def persona_has_search_tool(persona_id: int, db_session: Session) -> bool:
persona = (
db_session.query(Persona)
.options(joinedload(Persona.tools))
.filter(Persona.id == persona_id)
.one_or_none()
)
if persona is None:
raise ValueError(f"Persona with ID {persona_id} does not exist")
return any(tool.in_code_tool_id == "run_search" for tool in persona.tools)

View File

@@ -74,15 +74,3 @@ def remove_slack_bot(
def fetch_slack_bots(db_session: Session) -> Sequence[SlackBot]:
return db_session.scalars(select(SlackBot)).all()
def fetch_slack_bot_tokens(
db_session: Session, slack_bot_id: int
) -> dict[str, str] | None:
slack_bot = db_session.scalar(select(SlackBot).where(SlackBot.id == slack_bot_id))
if not slack_bot:
return None
return {
"app_token": slack_bot.app_token,
"bot_token": slack_bot.bot_token,
}

View File

@@ -6,7 +6,6 @@ from sqlalchemy.orm import Session
from onyx.configs.chat_configs import MAX_CHUNKS_FED_TO_CHAT
from onyx.context.search.enums import RecencyBiasSetting
from onyx.db.constants import DEFAULT_PERSONA_SLACK_CHANNEL_NAME
from onyx.db.constants import SLACK_BOT_PERSONA_PREFIX
from onyx.db.models import ChannelConfig
from onyx.db.models import Persona
@@ -23,8 +22,8 @@ from onyx.utils.variable_functionality import (
)
def _build_persona_name(channel_name: str | None) -> str:
return f"{SLACK_BOT_PERSONA_PREFIX}{channel_name if channel_name else DEFAULT_PERSONA_SLACK_CHANNEL_NAME}"
def _build_persona_name(channel_name: str) -> str:
return f"{SLACK_BOT_PERSONA_PREFIX}{channel_name}"
def _cleanup_relationships(db_session: Session, persona_id: int) -> None:
@@ -41,7 +40,7 @@ def _cleanup_relationships(db_session: Session, persona_id: int) -> None:
def create_slack_channel_persona(
db_session: Session,
channel_name: str | None,
channel_name: str,
document_set_ids: list[int],
existing_persona_id: int | None = None,
num_chunks: float = MAX_CHUNKS_FED_TO_CHAT,
@@ -91,7 +90,6 @@ def insert_slack_channel_config(
channel_config: ChannelConfig,
standard_answer_category_ids: list[int],
enable_auto_filters: bool,
is_default: bool = False,
) -> SlackChannelConfig:
versioned_fetch_standard_answer_categories_by_ids = (
fetch_versioned_implementation_with_fallback(
@@ -117,26 +115,12 @@ def insert_slack_channel_config(
f"Some or all categories with ids {standard_answer_category_ids} do not exist"
)
if is_default:
existing_default = db_session.scalar(
select(SlackChannelConfig).where(
SlackChannelConfig.slack_bot_id == slack_bot_id,
SlackChannelConfig.is_default is True, # type: ignore
)
)
if existing_default:
raise ValueError("A default config already exists for this Slack bot.")
else:
if "channel_name" not in channel_config:
raise ValueError("Channel name is required for non-default configs.")
slack_channel_config = SlackChannelConfig(
slack_bot_id=slack_bot_id,
persona_id=persona_id,
channel_config=channel_config,
standard_answer_categories=existing_standard_answer_categories,
enable_auto_filters=enable_auto_filters,
is_default=is_default,
)
db_session.add(slack_channel_config)
db_session.commit()
@@ -180,7 +164,12 @@ def update_slack_channel_config(
f"Some or all categories with ids {standard_answer_category_ids} do not exist"
)
# get the existing persona id before updating the object
existing_persona_id = slack_channel_config.persona_id
# update the config
# NOTE: need to do this before cleaning up the old persona or else we
# will encounter `violates foreign key constraint` errors
slack_channel_config.persona_id = persona_id
slack_channel_config.channel_config = channel_config
slack_channel_config.standard_answer_categories = list(
@@ -188,6 +177,20 @@ def update_slack_channel_config(
)
slack_channel_config.enable_auto_filters = enable_auto_filters
# if the persona has changed, then clean up the old persona
if persona_id != existing_persona_id and existing_persona_id:
existing_persona = db_session.scalar(
select(Persona).where(Persona.id == existing_persona_id)
)
# if the existing persona was one created just for use with this Slack channel,
# then clean it up
if existing_persona and existing_persona.name.startswith(
SLACK_BOT_PERSONA_PREFIX
):
_cleanup_relationships(
db_session=db_session, persona_id=existing_persona_id
)
db_session.commit()
return slack_channel_config
@@ -250,32 +253,3 @@ def fetch_slack_channel_config(
SlackChannelConfig.id == slack_channel_config_id
)
)
def fetch_slack_channel_config_for_channel_or_default(
db_session: Session, slack_bot_id: int, channel_name: str | None
) -> SlackChannelConfig | None:
# attempt to find channel-specific config first
if channel_name is not None:
sc_config = db_session.scalar(
select(SlackChannelConfig).where(
SlackChannelConfig.slack_bot_id == slack_bot_id,
SlackChannelConfig.channel_config["channel_name"].astext
== channel_name,
)
)
else:
sc_config = None
if sc_config:
return sc_config
# if none found, see if there is a default
default_sc = db_session.scalar(
select(SlackChannelConfig).where(
SlackChannelConfig.slack_bot_id == slack_bot_id,
SlackChannelConfig.is_default == True, # noqa: E712
)
)
return default_sc

View File

@@ -142,8 +142,6 @@ def get_uuid_from_chunk_info(
tenant_id: str | None,
large_chunk_id: int | None = None,
) -> UUID:
"""NOTE: be VERY carefuly about changing this function. If changed without a migration,
this can cause deletion/update/insertion to function incorrectly."""
doc_str = document_id
# Web parsing URL duplicate catching

View File

@@ -346,14 +346,6 @@ class VespaIndex(DocumentIndex):
# IMPORTANT: This must be done one index at a time, do not use secondary index here
cleaned_chunks = [clean_chunk_id_copy(chunk) for chunk in chunks]
# needed so the final DocumentInsertionRecord returned can have the original document ID
new_document_id_to_original_document_id: dict[str, str] = {}
for ind, chunk in enumerate(cleaned_chunks):
old_chunk = chunks[ind]
new_document_id_to_original_document_id[
chunk.source_document.id
] = old_chunk.source_document.id
existing_docs: set[str] = set()
# NOTE: using `httpx` here since `requests` doesn't support HTTP2. This is beneficial for
@@ -409,14 +401,14 @@ class VespaIndex(DocumentIndex):
executor=executor,
)
all_cleaned_doc_ids = {chunk.source_document.id for chunk in cleaned_chunks}
all_doc_ids = {chunk.source_document.id for chunk in cleaned_chunks}
return {
DocumentInsertionRecord(
document_id=new_document_id_to_original_document_id[cleaned_doc_id],
already_existed=cleaned_doc_id in existing_docs,
document_id=doc_id,
already_existed=doc_id in existing_docs,
)
for cleaned_doc_id in all_cleaned_doc_ids
for doc_id in all_doc_ids
}
@classmethod
@@ -549,7 +541,7 @@ class VespaIndex(DocumentIndex):
time.monotonic() - update_start,
)
def _update_single_chunk(
def update_single_chunk(
self,
doc_chunk_id: UUID,
index_name: str,
@@ -613,8 +605,6 @@ class VespaIndex(DocumentIndex):
"""
doc_chunk_count = 0
doc_id = replace_invalid_doc_id_characters(doc_id)
with self.httpx_client_context as httpx_client:
for (
index_name,
@@ -637,7 +627,7 @@ class VespaIndex(DocumentIndex):
doc_chunk_count += len(doc_chunk_ids)
for doc_chunk_id in doc_chunk_ids:
self._update_single_chunk(
self.update_single_chunk(
doc_chunk_id, index_name, fields, doc_id, httpx_client
)
@@ -699,18 +689,6 @@ class VespaIndex(DocumentIndex):
batch_retrieval: bool = False,
get_large_chunks: bool = False,
) -> list[InferenceChunkUncleaned]:
# make sure to use the vespa-afied document IDs
chunk_requests = [
VespaChunkRequest(
document_id=replace_invalid_doc_id_characters(
chunk_request.document_id
),
min_chunk_ind=chunk_request.min_chunk_ind,
max_chunk_ind=chunk_request.max_chunk_ind,
)
for chunk_request in chunk_requests
]
if batch_retrieval:
return batch_search_api_retrieval(
index_name=self.index_name,

View File

@@ -242,9 +242,9 @@ def batch_index_vespa_chunks(
def clean_chunk_id_copy(
chunk: DocMetadataAwareIndexChunk,
) -> DocMetadataAwareIndexChunk:
clean_chunk = chunk.model_copy(
clean_chunk = chunk.copy(
update={
"source_document": chunk.source_document.model_copy(
"source_document": chunk.source_document.copy(
update={
"id": replace_invalid_doc_id_characters(chunk.source_document.id)
}

View File

@@ -45,9 +45,7 @@ def is_text_character(codepoint: int) -> bool:
def replace_invalid_doc_id_characters(text: str) -> str:
"""Replaces invalid document ID characters in text.
NOTE: this must be called at the start of every vespa-related operation or else we
risk discrepancies -> silent failures on deletion/update/insertion."""
"""Replaces invalid document ID characters in text."""
# There may be a more complete set of replacements that need to be made but Vespa docs are unclear
# and users only seem to be running into this error with single quotes
return text.replace("'", "_")

View File

@@ -409,11 +409,7 @@ class DefaultMultiLLM(LLM):
# For now, we don't support parallel tool calls
# NOTE: we can't pass this in if tools are not specified
# or else OpenAI throws an error
**(
{"parallel_tool_calls": False}
if tools and self.config.model_name != "o3-mini"
else {}
), # TODO: remove once LITELLM has patched
**({"parallel_tool_calls": False} if tools else {}),
**(
{"response_format": structured_response_format}
if structured_response_format
@@ -473,7 +469,9 @@ class DefaultMultiLLM(LLM):
if LOG_DANSWER_MODEL_INTERACTIONS:
self.log_model_configs()
if DISABLE_LITELLM_STREAMING:
if (
DISABLE_LITELLM_STREAMING or self.config.model_name == "o1-2024-12-17"
): # TODO: remove once litellm supports streaming
yield self.invoke(prompt, tools, tool_choice, structured_response_format)
return

View File

@@ -27,7 +27,6 @@ class WellKnownLLMProviderDescriptor(BaseModel):
OPENAI_PROVIDER_NAME = "openai"
OPEN_AI_MODEL_NAMES = [
"o3-mini",
"o1-mini",
"o1-preview",
"o1-2024-12-17",
@@ -92,7 +91,7 @@ def fetch_available_well_known_llms() -> list[WellKnownLLMProviderDescriptor]:
api_version_required=False,
custom_config_keys=[],
llm_names=fetch_models_for_provider(OPENAI_PROVIDER_NAME),
default_model="gpt-4o",
default_model="gpt-4",
default_fast_model="gpt-4o-mini",
),
WellKnownLLMProviderDescriptor(

View File

@@ -1,5 +1,4 @@
from datetime import datetime
from typing import cast
import pytz
import timeago # type: ignore
@@ -339,23 +338,6 @@ def _build_citations_blocks(
return citations_block
def _build_answer_blocks(
answer: ChatOnyxBotResponse, fallback_answer: str
) -> list[SectionBlock]:
if not answer.answer:
answer_blocks = [SectionBlock(text=fallback_answer)]
else:
# replaces markdown links with slack format links
formatted_answer = format_slack_message(answer.answer)
answer_processed = decode_escapes(
remove_slack_text_interactions(formatted_answer)
)
answer_blocks = [
SectionBlock(text=text) for text in _split_text(answer_processed)
]
return answer_blocks
def _build_qa_response_blocks(
answer: ChatOnyxBotResponse,
) -> list[Block]:
@@ -394,10 +376,21 @@ def _build_qa_response_blocks(
filter_block = SectionBlock(text=f"_{filter_text}_")
answer_blocks = _build_answer_blocks(
answer=answer,
fallback_answer="Sorry, I was unable to find an answer, but I did find some potentially relevant docs 🤓",
)
if not answer.answer:
answer_blocks = [
SectionBlock(
text="Sorry, I was unable to find an answer, but I did find some potentially relevant docs 🤓"
)
]
else:
# replaces markdown links with slack format links
formatted_answer = format_slack_message(answer.answer)
answer_processed = decode_escapes(
remove_slack_text_interactions(formatted_answer)
)
answer_blocks = [
SectionBlock(text=text) for text in _split_text(answer_processed)
]
response_blocks: list[Block] = []
@@ -488,7 +481,6 @@ def build_slack_response_blocks(
use_citations: bool,
feedback_reminder_id: str | None,
skip_ai_feedback: bool = False,
expecting_search_result: bool = False,
) -> list[Block]:
"""
This function is a top level function that builds all the blocks for the Slack response.
@@ -499,19 +491,9 @@ def build_slack_response_blocks(
message_info.thread_messages[-1].message, message_info.is_bot_msg
)
if expecting_search_result:
answer_blocks = _build_qa_response_blocks(
answer=answer,
)
else:
answer_blocks = cast(
list[Block],
_build_answer_blocks(
answer=answer,
fallback_answer="Sorry, I was unable to generate an answer.",
),
)
answer_blocks = _build_qa_response_blocks(
answer=answer,
)
web_follow_up_block = []
if channel_conf and channel_conf.get("show_continue_in_web_ui"):

View File

@@ -3,11 +3,9 @@ import os
from sqlalchemy.orm import Session
from onyx.db.models import SlackChannelConfig
from onyx.db.slack_channel_config import (
fetch_slack_channel_config_for_channel_or_default,
)
from onyx.db.slack_channel_config import fetch_slack_channel_configs
VALID_SLACK_FILTERS = [
"answerable_prefilter",
"well_answered_postfilter",
@@ -19,16 +17,18 @@ def get_slack_channel_config_for_bot_and_channel(
db_session: Session,
slack_bot_id: int,
channel_name: str | None,
) -> SlackChannelConfig:
slack_bot_config = fetch_slack_channel_config_for_channel_or_default(
db_session=db_session, slack_bot_id=slack_bot_id, channel_name=channel_name
)
if not slack_bot_config:
raise ValueError(
"No default configuration has been set for this Slack bot. This should not be possible."
)
) -> SlackChannelConfig | None:
if not channel_name:
return None
return slack_bot_config
slack_bot_configs = fetch_slack_channel_configs(
db_session=db_session, slack_bot_id=slack_bot_id
)
for config in slack_bot_configs:
if channel_name in config.channel_config["channel_name"]:
return config
return None
def validate_channel_name(

View File

@@ -106,7 +106,7 @@ def remove_scheduled_feedback_reminder(
def handle_message(
message_info: SlackMessageInfo,
slack_channel_config: SlackChannelConfig,
slack_channel_config: SlackChannelConfig | None,
client: WebClient,
feedback_reminder_id: str | None,
tenant_id: str | None,

View File

@@ -27,7 +27,6 @@ from onyx.db.engine import get_session_with_tenant
from onyx.db.models import SlackChannelConfig
from onyx.db.models import User
from onyx.db.persona import get_persona_by_id
from onyx.db.persona import persona_has_search_tool
from onyx.db.users import get_user_by_email
from onyx.onyxbot.slack.blocks import build_slack_response_blocks
from onyx.onyxbot.slack.handlers.utils import send_team_member_message
@@ -65,7 +64,7 @@ def rate_limits(
def handle_regular_answer(
message_info: SlackMessageInfo,
slack_channel_config: SlackChannelConfig,
slack_channel_config: SlackChannelConfig | None,
receiver_ids: list[str] | None,
client: WebClient,
channel: str,
@@ -77,7 +76,7 @@ def handle_regular_answer(
should_respond_with_error_msgs: bool = DANSWER_BOT_DISPLAY_ERROR_MSGS,
disable_docs_only_answer: bool = DANSWER_BOT_DISABLE_DOCS_ONLY_ANSWER,
) -> bool:
channel_conf = slack_channel_config.channel_config
channel_conf = slack_channel_config.channel_config if slack_channel_config else None
messages = message_info.thread_messages
@@ -93,7 +92,7 @@ def handle_regular_answer(
prompt = None
# If no persona is specified, use the default search based persona
# This way slack flow always has a persona
persona = slack_channel_config.persona
persona = slack_channel_config.persona if slack_channel_config else None
if not persona:
with get_session_with_tenant(tenant_id) as db_session:
persona = get_persona_by_id(DEFAULT_PERSONA_ID, user, db_session)
@@ -107,8 +106,7 @@ def handle_regular_answer(
]
prompt = persona.prompts[0] if persona.prompts else None
with get_session_with_tenant(tenant_id) as db_session:
expecting_search_result = persona_has_search_tool(persona.id, db_session)
should_respond_even_with_no_docs = persona.num_chunks == 0 if persona else False
# TODO: Add in support for Slack to truncate messages based on max LLM context
# llm, _ = get_llms_for_persona(persona)
@@ -136,7 +134,11 @@ def handle_regular_answer(
single_message_history = slackify_message_thread(history_messages) or None
bypass_acl = False
if slack_channel_config.persona and slack_channel_config.persona.document_sets:
if (
slack_channel_config
and slack_channel_config.persona
and slack_channel_config.persona.document_sets
):
# For Slack channels, use the full document set, admin will be warned when configuring it
# with non-public document sets
bypass_acl = True
@@ -188,7 +190,11 @@ def handle_regular_answer(
# auto_detect_filters = (
# persona.llm_filter_extraction if persona is not None else True
# )
auto_detect_filters = slack_channel_config.enable_auto_filters
auto_detect_filters = (
slack_channel_config.enable_auto_filters
if slack_channel_config is not None
else False
)
retrieval_details = RetrievalDetails(
run_search=OptionalSearchSetting.ALWAYS,
real_time=False,
@@ -305,12 +311,12 @@ def handle_regular_answer(
return True
retrieval_info = answer.docs
if not retrieval_info and expecting_search_result:
if not retrieval_info:
# This should not happen, even with no docs retrieved, there is still info returned
raise RuntimeError("Failed to retrieve docs, cannot answer question.")
top_docs = retrieval_info.top_documents if retrieval_info else []
if not top_docs and expecting_search_result:
top_docs = retrieval_info.top_documents
if not top_docs and not should_respond_even_with_no_docs:
logger.error(
f"Unable to answer question: '{user_message}' - no documents found"
)
@@ -339,8 +345,7 @@ def handle_regular_answer(
)
if (
expecting_search_result
and only_respond_if_citations
only_respond_if_citations
and not answer.citations
and not message_info.bypass_filters
):
@@ -366,7 +371,6 @@ def handle_regular_answer(
channel_conf=channel_conf,
use_citations=True, # No longer supporting quotes
feedback_reminder_id=feedback_reminder_id,
expecting_search_result=expecting_search_result,
)
try:

View File

@@ -14,7 +14,7 @@ logger = setup_logger()
def handle_standard_answers(
message_info: SlackMessageInfo,
receiver_ids: list[str] | None,
slack_channel_config: SlackChannelConfig,
slack_channel_config: SlackChannelConfig | None,
prompt: Prompt | None,
logger: OnyxLoggingAdapter,
client: WebClient,
@@ -40,7 +40,7 @@ def handle_standard_answers(
def _handle_standard_answers(
message_info: SlackMessageInfo,
receiver_ids: list[str] | None,
slack_channel_config: SlackChannelConfig,
slack_channel_config: SlackChannelConfig | None,
prompt: Prompt | None,
logger: OnyxLoggingAdapter,
client: WebClient,

View File

@@ -410,27 +410,13 @@ class SlackbotHandler:
def start_socket_client(
self, slack_bot_id: int, tenant_id: str | None, slack_bot_tokens: SlackBotTokens
) -> None:
logger.info(
f"Starting socket client for tenant: {tenant_id}, app: {slack_bot_id}"
)
socket_client: TenantSocketModeClient = _get_socket_client(
slack_bot_tokens, tenant_id, slack_bot_id
)
try:
bot_info = socket_client.web_client.auth_test()
if bot_info["ok"]:
bot_user_id = bot_info["user_id"]
user_info = socket_client.web_client.users_info(user=bot_user_id)
if user_info["ok"]:
bot_name = (
user_info["user"]["real_name"] or user_info["user"]["name"]
)
logger.info(
f"Started socket client for Slackbot with name '{bot_name}' (tenant: {tenant_id}, app: {slack_bot_id})"
)
except Exception as e:
logger.warning(
f"Could not fetch bot name: {e} for tenant: {tenant_id}, app: {slack_bot_id}"
)
# Append the event handler
process_slack_event = create_process_slack_event()
socket_client.socket_mode_request_listeners.append(process_slack_event) # type: ignore
@@ -801,8 +787,22 @@ def process_message(
channel_name=channel_name,
)
# Be careful about this default, don't want to accidentally spam every channel
# Users should be able to DM slack bot in their private channels though
if (
slack_channel_config is None
and not respond_every_channel
# Can't have configs for DMs so don't toss them out
and not is_dm
# If /OnyxBot (is_bot_msg) or @OnyxBot (bypass_filters)
# always respond with the default configs
and not (details.is_bot_msg or details.bypass_filters)
):
return
follow_up = bool(
slack_channel_config.channel_config
slack_channel_config
and slack_channel_config.channel_config
and slack_channel_config.channel_config.get("follow_up_tags")
is not None
)

View File

@@ -16,8 +16,9 @@ from onyx.configs.constants import OnyxCeleryTask
from onyx.configs.constants import OnyxRedisConstants
from onyx.db.connector_credential_pair import get_connector_credential_pair_from_id
from onyx.db.document import (
construct_document_id_select_for_connector_credential_pair_by_needs_sync,
construct_document_select_for_connector_credential_pair_by_needs_sync,
)
from onyx.db.models import Document
from onyx.redis.redis_object_helper import RedisObjectHelper
@@ -71,8 +72,7 @@ class RedisConnectorCredentialPair(RedisObjectHelper):
last_lock_time = time.monotonic()
num_tasks_sent = 0
async_results = []
cc_pair = get_connector_credential_pair_from_id(
db_session=db_session,
cc_pair_id=int(self._id),
@@ -80,14 +80,14 @@ class RedisConnectorCredentialPair(RedisObjectHelper):
if not cc_pair:
return None
stmt = construct_document_id_select_for_connector_credential_pair_by_needs_sync(
stmt = construct_document_select_for_connector_credential_pair_by_needs_sync(
cc_pair.connector_id, cc_pair.credential_id
)
num_docs = 0
for doc_id in db_session.scalars(stmt).yield_per(DB_YIELD_PER_DEFAULT):
doc_id = cast(str, doc_id)
for doc in db_session.scalars(stmt).yield_per(DB_YIELD_PER_DEFAULT):
doc = cast(Document, doc)
current_time = time.monotonic()
if current_time - last_lock_time >= (
CELERY_VESPA_SYNC_BEAT_LOCK_TIMEOUT / 4
@@ -98,7 +98,7 @@ class RedisConnectorCredentialPair(RedisObjectHelper):
num_docs += 1
# check if we should skip the document (typically because it's already syncing)
if doc_id in self.skip_docs:
if doc.id in self.skip_docs:
continue
# celery's default task id format is "dd32ded3-00aa-4884-8b21-42f8332e7fac"
@@ -114,21 +114,21 @@ class RedisConnectorCredentialPair(RedisObjectHelper):
)
# Priority on sync's triggered by new indexing should be medium
celery_app.send_task(
result = celery_app.send_task(
OnyxCeleryTask.VESPA_METADATA_SYNC_TASK,
kwargs=dict(document_id=doc_id, tenant_id=tenant_id),
kwargs=dict(document_id=doc.id, tenant_id=tenant_id),
queue=OnyxCeleryQueues.VESPA_METADATA_SYNC,
task_id=custom_task_id,
priority=OnyxCeleryPriority.MEDIUM,
)
num_tasks_sent += 1
self.skip_docs.add(doc_id)
async_results.append(result)
self.skip_docs.add(doc.id)
if num_tasks_sent >= max_tasks:
if len(async_results) >= max_tasks:
break
return num_tasks_sent, num_docs
return len(async_results), num_docs
class RedisGlobalConnectorCredentialPair:

View File

@@ -14,7 +14,8 @@ from onyx.configs.constants import OnyxCeleryPriority
from onyx.configs.constants import OnyxCeleryQueues
from onyx.configs.constants import OnyxCeleryTask
from onyx.configs.constants import OnyxRedisConstants
from onyx.db.document_set import construct_document_id_select_by_docset
from onyx.db.document_set import construct_document_select_by_docset
from onyx.db.models import Document
from onyx.redis.redis_object_helper import RedisObjectHelper
@@ -65,11 +66,10 @@ class RedisDocumentSet(RedisObjectHelper):
"""
last_lock_time = time.monotonic()
num_tasks_sent = 0
stmt = construct_document_id_select_by_docset(int(self._id), current_only=False)
for doc_id in db_session.scalars(stmt).yield_per(DB_YIELD_PER_DEFAULT):
doc_id = cast(str, doc_id)
async_results = []
stmt = construct_document_select_by_docset(int(self._id), current_only=False)
for doc in db_session.scalars(stmt).yield_per(DB_YIELD_PER_DEFAULT):
doc = cast(Document, doc)
current_time = time.monotonic()
if current_time - last_lock_time >= (
CELERY_VESPA_SYNC_BEAT_LOCK_TIMEOUT / 4
@@ -86,17 +86,17 @@ class RedisDocumentSet(RedisObjectHelper):
# add to the set BEFORE creating the task.
redis_client.sadd(self.taskset_key, custom_task_id)
celery_app.send_task(
result = celery_app.send_task(
OnyxCeleryTask.VESPA_METADATA_SYNC_TASK,
kwargs=dict(document_id=doc_id, tenant_id=tenant_id),
kwargs=dict(document_id=doc.id, tenant_id=tenant_id),
queue=OnyxCeleryQueues.VESPA_METADATA_SYNC,
task_id=custom_task_id,
priority=OnyxCeleryPriority.LOW,
)
num_tasks_sent += 1
async_results.append(result)
return num_tasks_sent, num_tasks_sent
return len(async_results), len(async_results)
def reset(self) -> None:
self.redis.srem(OnyxRedisConstants.ACTIVE_FENCES, self.fence_key)

View File

@@ -25,7 +25,6 @@ from onyx.configs.app_configs import REDIS_REPLICA_HOST
from onyx.configs.app_configs import REDIS_SSL
from onyx.configs.app_configs import REDIS_SSL_CA_CERTS
from onyx.configs.app_configs import REDIS_SSL_CERT_REQS
from onyx.configs.constants import FASTAPI_USERS_AUTH_COOKIE_NAME
from onyx.configs.constants import REDIS_SOCKET_KEEPALIVE_OPTIONS
from onyx.utils.logger import setup_logger
@@ -288,7 +287,7 @@ async def get_async_redis_connection() -> aioredis.Redis:
async def retrieve_auth_token_data_from_redis(request: Request) -> dict | None:
token = request.cookies.get(FASTAPI_USERS_AUTH_COOKIE_NAME)
token = request.cookies.get("fastapiusersauth")
if not token:
logger.debug("No auth token cookie found")
return None

View File

@@ -14,6 +14,7 @@ from onyx.configs.constants import OnyxCeleryPriority
from onyx.configs.constants import OnyxCeleryQueues
from onyx.configs.constants import OnyxCeleryTask
from onyx.configs.constants import OnyxRedisConstants
from onyx.db.models import Document
from onyx.redis.redis_object_helper import RedisObjectHelper
from onyx.utils.variable_functionality import fetch_versioned_implementation
from onyx.utils.variable_functionality import global_version
@@ -65,22 +66,23 @@ class RedisUserGroup(RedisObjectHelper):
user group up to date over multiple batches.
"""
last_lock_time = time.monotonic()
num_tasks_sent = 0
async_results = []
if not global_version.is_ee_version():
return 0, 0
try:
construct_document_id_select_by_usergroup = fetch_versioned_implementation(
construct_document_select_by_usergroup = fetch_versioned_implementation(
"onyx.db.user_group",
"construct_document_id_select_by_usergroup",
"construct_document_select_by_usergroup",
)
except ModuleNotFoundError:
return 0, 0
stmt = construct_document_id_select_by_usergroup(int(self._id))
for doc_id in db_session.scalars(stmt).yield_per(DB_YIELD_PER_DEFAULT):
doc_id = cast(str, doc_id)
stmt = construct_document_select_by_usergroup(int(self._id))
for doc in db_session.scalars(stmt).yield_per(DB_YIELD_PER_DEFAULT):
doc = cast(Document, doc)
current_time = time.monotonic()
if current_time - last_lock_time >= (
CELERY_VESPA_SYNC_BEAT_LOCK_TIMEOUT / 4
@@ -97,17 +99,17 @@ class RedisUserGroup(RedisObjectHelper):
# add to the set BEFORE creating the task.
redis_client.sadd(self.taskset_key, custom_task_id)
celery_app.send_task(
result = celery_app.send_task(
OnyxCeleryTask.VESPA_METADATA_SYNC_TASK,
kwargs=dict(document_id=doc_id, tenant_id=tenant_id),
kwargs=dict(document_id=doc.id, tenant_id=tenant_id),
queue=OnyxCeleryQueues.VESPA_METADATA_SYNC,
task_id=custom_task_id,
priority=OnyxCeleryPriority.LOW,
)
num_tasks_sent += 1
async_results.append(result)
return num_tasks_sent, num_tasks_sent
return len(async_results), len(async_results)
def reset(self) -> None:
self.redis.srem(OnyxRedisConstants.ACTIVE_FENCES, self.fence_key)

View File

@@ -215,7 +215,6 @@ class SlackChannelConfig(BaseModel):
# XXX this is going away soon
standard_answer_categories: list[StandardAnswerCategory]
enable_auto_filters: bool
is_default: bool
@classmethod
def from_model(
@@ -238,7 +237,6 @@ class SlackChannelConfig(BaseModel):
for standard_answer_category_model in slack_channel_config_model.standard_answer_categories
],
enable_auto_filters=slack_channel_config_model.enable_auto_filters,
is_default=slack_channel_config_model.is_default,
)
@@ -281,8 +279,3 @@ class AllUsersResponse(BaseModel):
accepted_pages: int
invited_pages: int
slack_users_pages: int
class SlackChannel(BaseModel):
id: str
name: str

View File

@@ -1,10 +1,6 @@
from typing import Any
from fastapi import APIRouter
from fastapi import Depends
from fastapi import HTTPException
from slack_sdk import WebClient
from slack_sdk.errors import SlackApiError
from sqlalchemy.orm import Session
from onyx.auth.users import current_admin_user
@@ -16,7 +12,6 @@ from onyx.db.models import ChannelConfig
from onyx.db.models import User
from onyx.db.persona import get_persona_by_id
from onyx.db.slack_bot import fetch_slack_bot
from onyx.db.slack_bot import fetch_slack_bot_tokens
from onyx.db.slack_bot import fetch_slack_bots
from onyx.db.slack_bot import insert_slack_bot
from onyx.db.slack_bot import remove_slack_bot
@@ -30,7 +25,6 @@ from onyx.db.slack_channel_config import update_slack_channel_config
from onyx.onyxbot.slack.config import validate_channel_name
from onyx.server.manage.models import SlackBot
from onyx.server.manage.models import SlackBotCreationRequest
from onyx.server.manage.models import SlackChannel
from onyx.server.manage.models import SlackChannelConfig
from onyx.server.manage.models import SlackChannelConfigCreationRequest
from onyx.server.manage.validate_tokens import validate_app_token
@@ -54,6 +48,12 @@ def _form_channel_config(
answer_filters = slack_channel_config_creation_request.answer_filters
follow_up_tags = slack_channel_config_creation_request.follow_up_tags
if not raw_channel_name:
raise HTTPException(
status_code=400,
detail="Must provide at least one channel name",
)
try:
cleaned_channel_name = validate_channel_name(
db_session=db_session,
@@ -108,12 +108,6 @@ def create_slack_channel_config(
current_slack_channel_config_id=None,
)
if channel_config["channel_name"] is None:
raise HTTPException(
status_code=400,
detail="Channel name is required",
)
persona_id = None
if slack_channel_config_creation_request.persona_id is not None:
persona_id = slack_channel_config_creation_request.persona_id
@@ -126,11 +120,11 @@ def create_slack_channel_config(
).id
slack_channel_config_model = insert_slack_channel_config(
db_session=db_session,
slack_bot_id=slack_channel_config_creation_request.slack_bot_id,
persona_id=persona_id,
channel_config=channel_config,
standard_answer_category_ids=slack_channel_config_creation_request.standard_answer_categories,
db_session=db_session,
enable_auto_filters=slack_channel_config_creation_request.enable_auto_filters,
)
return SlackChannelConfig.from_model(slack_channel_config_model)
@@ -241,24 +235,6 @@ def create_bot(
app_token=slack_bot_creation_request.app_token,
)
# Create a default Slack channel config
default_channel_config = ChannelConfig(
channel_name=None,
respond_member_group_list=[],
answer_filters=[],
follow_up_tags=[],
respond_tag_only=True,
)
insert_slack_channel_config(
db_session=db_session,
slack_bot_id=slack_bot_model.id,
persona_id=None,
channel_config=default_channel_config,
standard_answer_category_ids=[],
enable_auto_filters=False,
is_default=True,
)
create_milestone_and_report(
user=None,
distinct_id=tenant_id or "N/A",
@@ -339,48 +315,3 @@ def list_bot_configs(
SlackChannelConfig.from_model(slack_bot_config_model)
for slack_bot_config_model in slack_bot_config_models
]
@router.get(
"/admin/slack-app/bots/{bot_id}/channels",
)
def get_all_channels_from_slack_api(
bot_id: int,
db_session: Session = Depends(get_session),
_: User | None = Depends(current_admin_user),
) -> list[SlackChannel]:
tokens = fetch_slack_bot_tokens(db_session, bot_id)
if not tokens or "bot_token" not in tokens:
raise HTTPException(
status_code=404, detail="Bot token not found for the given bot ID"
)
bot_token = tokens["bot_token"]
client = WebClient(token=bot_token)
try:
channels = []
cursor = None
while True:
response = client.conversations_list(
types="public_channel,private_channel",
exclude_archived=True,
limit=1000,
cursor=cursor,
)
for channel in response["channels"]:
channels.append(SlackChannel(id=channel["id"], name=channel["name"]))
response_metadata: dict[str, Any] = response.get("response_metadata", {})
if isinstance(response_metadata, dict):
cursor = response_metadata.get("next_cursor")
if not cursor:
break
else:
break
return channels
except SlackApiError as e:
raise HTTPException(
status_code=500, detail=f"Error fetching channels from Slack API: {str(e)}"
)

View File

@@ -38,7 +38,6 @@ from onyx.configs.app_configs import ENABLE_EMAIL_INVITES
from onyx.configs.app_configs import SESSION_EXPIRE_TIME_SECONDS
from onyx.configs.app_configs import VALID_EMAIL_DOMAINS
from onyx.configs.constants import AuthType
from onyx.configs.constants import FASTAPI_USERS_AUTH_COOKIE_NAME
from onyx.db.api_key import is_api_key_email_address
from onyx.db.auth import get_total_users_count
from onyx.db.engine import CURRENT_TENANT_ID_CONTEXTVAR
@@ -480,7 +479,7 @@ def get_current_token_expiration_jwt(
try:
# Get the JWT from the cookie
jwt_token = request.cookies.get(FASTAPI_USERS_AUTH_COOKIE_NAME)
jwt_token = request.cookies.get("fastapiusersauth")
if not jwt_token:
logger.error("No JWT token found in cookies")
return None

View File

@@ -719,12 +719,15 @@ def upload_files_for_chat(
file_content = file.file.read() # Read the file content
# NOTE: Image conversion to JPEG used to be enforced here.
# This was removed to:
# 1. Preserve original file content for downloads
# 2. Maintain transparency in formats like PNG
# 3. Ameliorate issue with file conversion
file_content_io = io.BytesIO(file_content)
if file_type == ChatFileType.IMAGE:
file_content_io = file.file
# NOTE: Image conversion to JPEG used to be enforced here.
# This was removed to:
# 1. Preserve original file content for downloads
# 2. Maintain transparency in formats like PNG
# 3. Ameliorate issue with file conversion
else:
file_content_io = io.BytesIO(file_content)
new_content_type = file.content_type
@@ -742,11 +745,10 @@ def upload_files_for_chat(
# to re-extract it every time we send a message
if file_type == ChatFileType.DOC:
extracted_text = extract_file_text(
file=file_content_io, # use the bytes we already read
file=io.BytesIO(file_content), # use the bytes we already read
file_name=file.filename or "",
)
text_file_id = str(uuid.uuid4())
file_store.save_file(
file_name=text_file_id,
content=io.BytesIO(extracted_text.encode()),

View File

@@ -7,6 +7,7 @@ from typing import cast
from sqlalchemy.orm import Session
from onyx.chat.chat_utils import llm_doc_from_inference_section
from onyx.chat.llm_response_handler import LLMCall
from onyx.chat.models import AnswerStyleConfig
from onyx.chat.models import ContextualPruningConfig
from onyx.chat.models import DocumentPruningConfig
@@ -370,6 +371,41 @@ class SearchTool(Tool):
prompt_config=self.prompt_config,
)
"""Other utility functions"""
@classmethod
def get_search_result(
cls, llm_call: LLMCall
) -> tuple[list[LlmDoc], list[LlmDoc]] | None:
"""
Returns the final search results and a map of docs to their original search rank (which is what is displayed to user)
"""
if not llm_call.tool_call_info:
return None
final_search_results = []
initial_search_results = []
for yield_item in llm_call.tool_call_info:
if (
isinstance(yield_item, ToolResponse)
and yield_item.id == FINAL_CONTEXT_DOCUMENTS_ID
):
final_search_results = cast(list[LlmDoc], yield_item.response)
elif (
isinstance(yield_item, ToolResponse)
and yield_item.id == SEARCH_DOC_CONTENT_ID
):
search_contexts = yield_item.response.contexts
# original_doc_search_rank = 1
for doc in search_contexts:
if doc.document_id not in initial_search_results:
initial_search_results.append(doc)
initial_search_results = cast(list[LlmDoc], initial_search_results)
return final_search_results, initial_search_results
# Allows yielding the same responses as a SearchTool without being a SearchTool.
# SearchTool passed in to allow for access to SearchTool properties.

View File

@@ -37,7 +37,7 @@ langchainhub==0.1.21
langgraph==0.2.59
langgraph-checkpoint==2.0.5
langgraph-sdk==0.1.44
litellm==1.60.2
litellm==1.55.4
lxml==5.3.0
lxml_html_clean==0.2.2
llama-index==0.9.45
@@ -46,7 +46,7 @@ msal==1.28.0
nltk==3.8.1
Office365-REST-Python-Client==2.5.9
oauthlib==3.2.2
openai==1.61.0
openai==1.55.3
openpyxl==3.1.2
playwright==1.41.2
psutil==5.9.5

View File

@@ -3,7 +3,7 @@ cohere==5.6.1
fastapi==0.109.2
google-cloud-aiplatform==1.58.0
numpy==1.26.4
openai==1.61.0
openai==1.55.3
pydantic==2.8.2
retry==0.9.2
safetensors==0.4.2
@@ -12,5 +12,5 @@ torch==2.2.0
transformers==4.39.2
uvicorn==0.21.1
voyageai==0.2.3
litellm==1.60.2
litellm==1.55.4
sentry-sdk[fastapi,celery,starlette]==2.14.0

View File

@@ -10,8 +10,6 @@ from uuid import UUID
from redis import Redis
from ee.onyx.server.tenants.user_mapping import get_tenant_id_for_email
from onyx.auth.invited_users import get_invited_users
from onyx.auth.invited_users import write_invited_users
from onyx.configs.app_configs import REDIS_AUTH_KEY_PREFIX
from onyx.configs.app_configs import REDIS_DB_NUMBER
from onyx.configs.app_configs import REDIS_HOST
@@ -23,7 +21,6 @@ from onyx.db.users import get_user_by_email
from onyx.redis.redis_pool import RedisPool
from shared_configs.configs import MULTI_TENANT
from shared_configs.configs import POSTGRES_DEFAULT_SCHEMA
from shared_configs.contextvars import CURRENT_TENANT_ID_CONTEXTVAR
# Tool to run helpful operations on Redis in production
# This is targeted for internal usage and may not have all the necessary parameters
@@ -313,13 +310,6 @@ if __name__ == "__main__":
required=False,
)
parser.add_argument(
"--tenant-id",
type=str,
help="Tenant ID for get, delete user token, or add to invited users",
required=False,
)
parser.add_argument(
"--batch",
type=int,
@@ -338,32 +328,11 @@ if __name__ == "__main__":
parser.add_argument(
"--user-email",
type=str,
help="User email for get, delete user token, or add to invited users",
help="User email for get or delete user token",
required=False,
)
args = parser.parse_args()
if args.tenant_id:
CURRENT_TENANT_ID_CONTEXTVAR.set(args.tenant_id)
if args.command == "add_invited_user":
if not args.user_email:
print("Error: --user-email is required for add_invited_user command")
sys.exit(1)
current_invited_users = get_invited_users()
if args.user_email not in current_invited_users:
current_invited_users.append(args.user_email)
if args.dry_run:
print(f"(DRY-RUN) Would add {args.user_email} to invited users")
else:
write_invited_users(current_invited_users)
print(f"Added {args.user_email} to invited users")
else:
print(f"{args.user_email} is already in the invited users list")
sys.exit(0)
exitcode = onyx_redis(
command=args.command,
batch=args.batch,

View File

@@ -255,24 +255,6 @@ def get_documents_for_tenant_connector(
print_documents(documents)
def search_for_document(
index_name: str, document_id: str, max_hits: int | None = 10
) -> List[Dict[str, Any]]:
yql_query = (
f'select * from sources {index_name} where document_id contains "{document_id}"'
)
params: dict[str, Any] = {"yql": yql_query}
if max_hits is not None:
params["hits"] = max_hits
with get_vespa_http_client() as client:
response = client.get(f"{SEARCH_ENDPOINT}/search/", params=params)
response.raise_for_status()
result = response.json()
documents = result.get("root", {}).get("children", [])
logger.info(f"Found {len(documents)} documents from query.")
return documents
def search_documents(
tenant_id: str, connector_id: int, query: str, n: int = 10
) -> None:
@@ -458,98 +440,10 @@ def get_document_acls(
print("-" * 80)
def get_current_chunk_count(
document_id: str, index_name: str, tenant_id: str
) -> int | None:
with get_session_with_tenant(tenant_id=tenant_id) as session:
return (
session.query(Document.chunk_count)
.filter(Document.id == document_id)
.scalar()
)
def get_number_of_chunks_we_think_exist(
document_id: str, index_name: str, tenant_id: str
) -> int:
current_chunk_count = get_current_chunk_count(document_id, index_name, tenant_id)
print(f"Current chunk count: {current_chunk_count}")
doc_info = VespaIndex.enrich_basic_chunk_info(
index_name=index_name,
http_client=get_vespa_http_client(),
document_id=document_id,
previous_chunk_count=current_chunk_count,
new_chunk_count=0,
)
chunk_ids = get_document_chunk_ids(
enriched_document_info_list=[doc_info],
tenant_id=tenant_id,
large_chunks_enabled=False,
)
return len(chunk_ids)
class VespaDebugging:
# Class for managing Vespa debugging actions.
def __init__(self, tenant_id: str | None = None):
self.tenant_id = POSTGRES_DEFAULT_SCHEMA if not tenant_id else tenant_id
self.index_name = get_index_name(self.tenant_id)
def sample_document_counts(self) -> None:
# Sample random documents and compare chunk counts
mismatches = []
no_chunks = []
with get_session_with_tenant(tenant_id=self.tenant_id) as session:
# Get a sample of random documents
from sqlalchemy import func
sample_docs = (
session.query(Document.id, Document.link, Document.semantic_id)
.order_by(func.random())
.limit(1000)
.all()
)
for doc in sample_docs:
document_id, link, semantic_id = doc
(
number_of_chunks_in_vespa,
number_of_chunks_we_think_exist,
) = self.compare_chunk_count(document_id)
if number_of_chunks_in_vespa != number_of_chunks_we_think_exist:
mismatches.append(
(
document_id,
link,
semantic_id,
number_of_chunks_in_vespa,
number_of_chunks_we_think_exist,
)
)
elif number_of_chunks_in_vespa == 0:
no_chunks.append((document_id, link, semantic_id))
# Print results
print("\nDocuments with mismatched chunk counts:")
for doc_id, link, semantic_id, vespa_count, expected_count in mismatches:
print(f"Document ID: {doc_id}")
print(f"Link: {link}")
print(f"Semantic ID: {semantic_id}")
print(f"Chunks in Vespa: {vespa_count}")
print(f"Expected chunks: {expected_count}")
print("-" * 80)
print("\nDocuments with no chunks in Vespa:")
for doc_id, link, semantic_id in no_chunks:
print(f"Document ID: {doc_id}")
print(f"Link: {link}")
print(f"Semantic ID: {semantic_id}")
print("-" * 80)
print(f"\nTotal mismatches: {len(mismatches)}")
print(f"Total documents with no chunks: {len(no_chunks)}")
def print_config(self) -> None:
# Print Vespa config.
@@ -563,16 +457,6 @@ class VespaDebugging:
# List documents for a tenant.
list_documents(n, self.tenant_id)
def compare_chunk_count(self, document_id: str) -> tuple[int, int]:
docs = search_for_document(self.index_name, document_id, max_hits=None)
number_of_chunks_we_think_exist = get_number_of_chunks_we_think_exist(
document_id, self.index_name, self.tenant_id
)
print(
f"Number of chunks in Vespa: {len(docs)}, Number of chunks we think exist: {number_of_chunks_we_think_exist}"
)
return len(docs), number_of_chunks_we_think_exist
def search_documents(self, connector_id: int, query: str, n: int = 10) -> None:
# Search documents for a tenant and connector.
search_documents(self.tenant_id, connector_id, query, n)
@@ -580,11 +464,9 @@ class VespaDebugging:
def update_document(
self, connector_id: int, doc_id: str, fields: Dict[str, Any]
) -> None:
# Update a document.
update_document(self.tenant_id, connector_id, doc_id, fields)
def search_for_document(self, document_id: str) -> List[Dict[str, Any]]:
return search_for_document(self.index_name, document_id)
def delete_document(self, connector_id: int, doc_id: str) -> None:
# Delete a document.
delete_document(self.tenant_id, connector_id, doc_id)
@@ -601,6 +483,7 @@ class VespaDebugging:
def main() -> None:
# Main CLI entry point.
parser = argparse.ArgumentParser(description="Vespa debugging tool")
parser.add_argument(
"--action",

View File

@@ -11,8 +11,6 @@ from typing import Optional
import requests
from onyx.configs.constants import FASTAPI_USERS_AUTH_COOKIE_NAME
parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(parent_dir)
@@ -376,11 +374,7 @@ class SelectionAnalysis:
Returns:
dict: The Onyx API response content
"""
cookies = (
{FASTAPI_USERS_AUTH_COOKIE_NAME: self._auth_cookie}
if self._auth_cookie
else {}
)
cookies = {"fastapiusersauth": self._auth_cookie} if self._auth_cookie else {}
endpoint = f"http://127.0.0.1:{self._web_port}/api/direct-qa"
query_json = {

View File

@@ -71,7 +71,6 @@ COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf
COPY ./onyx /app/onyx
COPY ./shared_configs /app/shared_configs
COPY ./alembic /app/alembic
COPY ./alembic_tenants /app/alembic_tenants
COPY ./alembic.ini /app/alembic.ini
COPY ./pytest.ini /app/pytest.ini
COPY supervisord.conf /usr/etc/supervisord.conf

View File

@@ -7,7 +7,6 @@ import requests
from requests import HTTPError
from onyx.auth.schemas import UserRole
from onyx.configs.constants import FASTAPI_USERS_AUTH_COOKIE_NAME
from onyx.server.documents.models import PaginatedReturn
from onyx.server.models import FullUserSnapshot
from tests.integration.common_utils.constants import API_SERVER_URL
@@ -83,7 +82,7 @@ class UserManager:
response.raise_for_status()
cookies = response.cookies.get_dict()
session_cookie = cookies.get(FASTAPI_USERS_AUTH_COOKIE_NAME)
session_cookie = cookies.get("fastapiusersauth")
if not session_cookie:
raise Exception("Failed to login")

View File

@@ -1,5 +1,6 @@
import json
import pytest
import requests
from onyx.configs.constants import MessageType
@@ -65,6 +66,9 @@ def test_send_message_simple_with_history(reset: None) -> None:
assert found_doc["metadata"]["document_id"] == doc.id
@pytest.mark.xfail(
reason="agent search broke this",
)
def test_using_reference_docs_with_simple_with_history_api_flow(reset: None) -> None:
# Creating an admin user (first user created is automatically an admin)
admin_user: DATestUser = UserManager.create(name="admin_user")

View File

@@ -4,12 +4,12 @@ dependencies:
version: 14.3.1
- name: vespa
repository: https://onyx-dot-app.github.io/vespa-helm-charts
version: 0.2.20
version: 0.2.18
- name: nginx
repository: oci://registry-1.docker.io/bitnamicharts
version: 15.14.0
- name: redis
repository: https://charts.bitnami.com/bitnami
version: 20.1.0
digest: sha256:4615c033064a987e3f66a48f4744d2e88bd1cc932c79453c4928455695a72778
generated: "2025-02-04T11:45:05.39228-08:00"
digest: sha256:5c9eb3d55d5f8e3beb64f26d26f686c8d62755daa10e2e6d87530bdf2fbbf957
generated: "2024-12-10T10:47:35.812483-08:00"

View File

@@ -23,7 +23,7 @@ dependencies:
repository: https://charts.bitnami.com/bitnami
condition: postgresql.enabled
- name: vespa
version: 0.2.20
version: 0.2.18
repository: https://onyx-dot-app.github.io/vespa-helm-charts
condition: vespa.enabled
- name: nginx

View File

@@ -7,10 +7,10 @@ metadata:
data:
INTERNAL_URL: "http://{{ include "onyx-stack.fullname" . }}-api-service:{{ .Values.api.service.port | default 8080 }}"
POSTGRES_HOST: {{ .Release.Name }}-postgresql
VESPA_HOST: {{ .Values.vespa.name }}.{{ .Values.vespa.service.name }}.{{ .Release.Namespace }}.svc.cluster.local
VESPA_HOST: da-vespa-0.vespa-service
REDIS_HOST: {{ .Release.Name }}-redis-master
MODEL_SERVER_HOST: "{{ include "onyx-stack.fullname" . }}-inference-model-service"
INDEXING_MODEL_SERVER_HOST: "{{ include "onyx-stack.fullname" . }}-indexing-model-service"
{{- range $key, $value := .Values.configMap }}
{{ $key }}: "{{ $value }}"
{{- end }}
{{- end }}

View File

@@ -5,7 +5,6 @@
postgresql:
primary:
persistence:
storageClass: ""
size: 5Gi
enabled: true
auth:
@@ -13,52 +12,13 @@ postgresql:
secretKeys:
# overwriting as postgres typically expects 'postgres-password'
adminPasswordKey: postgres_password
vespa:
name: da-vespa-0
service:
name: vespa-service
volumeClaimTemplates:
- metadata:
name: vespa-storage
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 1Gi
storageClassName: ""
enabled: true
replicaCount: 1
image:
repository: vespa
pullPolicy: IfNotPresent
tag: "8.277.17"
podAnnotations: {}
podLabels:
app: vespa
app.kubernetes.io/instance: onyx
app.kubernetes.io/name: vespa
securityContext:
privileged: true
runAsUser: 0
resources:
# The Vespa Helm chart specifies default resources, which are quite modest. We override
# them here to increase chances of the chart running successfully.
requests:
cpu: 1500m
memory: 4000Mi
limits:
cpu: 1500m
memory: 4000Mi
persistent:
storageClassName: ""
imagePullSecrets: []
nameOverride: ""
fullnameOverride: ""
persistent:
storageClassName: ""
inferenceCapability:
service:
portName: modelserver
@@ -312,9 +272,15 @@ background:
podSecurityContext:
{}
# fsGroup: 2000
securityContext:
privileged: true
runAsUser: 0
{}
# capabilities:
# drop:
# - ALL
# readOnlyRootFilesystem: true
# runAsNonRoot: true
# runAsUser: 1000
enableMiniChunk: "true"
resources: {}
# We usually recommend not to specify default resources and to leave this as a conscious
@@ -350,6 +316,50 @@ background:
nodeSelector: {}
tolerations: []
vespa:
volumeClaimTemplates:
- metadata:
name: vespa-storage
spec:
accessModes:
- ReadWriteOnce
storageClassName: ""
resources:
requests:
storage: 1Gi
enabled: true
replicaCount: 1
image:
repository: vespa
pullPolicy: IfNotPresent
tag: "8.277.17"
podAnnotations: {}
podLabels:
app: vespa
app.kubernetes.io/instance: onyx
app.kubernetes.io/name: vespa
podSecurityContext:
{}
# fsGroup: 2000
securityContext:
privileged: true
runAsUser: 0
resources:
# The Vespa Helm chart specifies default resources, which are quite modest. We override
# them here to increase chances of the chart running successfully.
requests:
cpu: 1500m
memory: 4000Mi
limits:
cpu: 1500m
memory: 4000Mi
nodeSelector: {}
tolerations: []
affinity: {}
redis:

4
web/.gitignore vendored
View File

@@ -35,8 +35,6 @@ yarn-error.log*
*.tsbuildinfo
next-env.d.ts
# playwright testing temp files
/admin_auth.json
/user_auth.json
/build-archive.log
/test-results

View File

@@ -21,42 +21,3 @@ Open [http://localhost:3000](http://localhost:3000) with your browser to see the
_Note:_ if you are having problems accessing the ^, try setting the `WEB_DOMAIN` env variable to
`http://127.0.0.1:3000` and accessing it there.
## Testing
This testing process will reset your application into a clean state.
Don't run these tests if you don't want to do this!
Bring up the entire application.
1. Reset the instance
```cd backend
export PYTEST_IGNORE_SKIP=true
pytest -s tests/integration/tests/playwright/test_playwright.py
```
2. Run playwright
```
cd web
npx playwright test
```
3. Inspect results
By default, playwright.config.ts is configured to output the results to:
```
web/test-results
```
4. Upload results to Chromatic (Optional)
This step would normally not be run by third party developers, but first party devs
may use this for local troubleshooting and testing.
```
cd web
npx chromatic --playwright --project-token={your token here}
```

151
web/package-lock.json generated
View File

@@ -15,7 +15,6 @@
"@headlessui/react": "^2.2.0",
"@headlessui/tailwindcss": "^0.2.1",
"@phosphor-icons/react": "^2.0.8",
"@radix-ui/react-accordion": "^1.2.2",
"@radix-ui/react-checkbox": "^1.1.2",
"@radix-ui/react-collapsible": "^1.1.2",
"@radix-ui/react-dialog": "^1.1.2",
@@ -84,11 +83,11 @@
"yup": "^1.4.0"
},
"devDependencies": {
"@chromatic-com/playwright": "^0.10.2",
"@chromatic-com/playwright": "^0.10.0",
"@tailwindcss/typography": "^0.5.10",
"@types/chrome": "^0.0.287",
"@types/jest": "^29.5.14",
"chromatic": "^11.25.2",
"chromatic": "^11.18.1",
"eslint": "^8.48.0",
"eslint-config-next": "^14.1.0",
"jest": "^29.7.0",
@@ -757,9 +756,9 @@
"license": "MIT"
},
"node_modules/@chromatic-com/playwright": {
"version": "0.10.2",
"resolved": "https://registry.npmjs.org/@chromatic-com/playwright/-/playwright-0.10.2.tgz",
"integrity": "sha512-SfP4I0rWPeSNW5VtV7eiuNSsZYK9IdVPTBT1SnUFJd3lACS1YJJd5s8pTisJvgh5Q8u9VNGWXfeuV3ddGJyRtw==",
"version": "0.10.0",
"resolved": "https://registry.npmjs.org/@chromatic-com/playwright/-/playwright-0.10.0.tgz",
"integrity": "sha512-QjKnOfuIcq9Y97QwA3MMVzOceXn1ikelUeC8gy60d2PbsQ2NNxH2n/PrAJ8Sllr225mXD1ts9xBH+Hq3+Blo5A==",
"dev": true,
"license": "MIT",
"dependencies": {
@@ -3443,140 +3442,6 @@
"integrity": "sha512-4Z8dn6Upk0qk4P74xBhZ6Hd/w0mPEzOOLxy4xiPXOXqjF7jZS0VAKk7/x/H6FyY2zCkYJqePf1G5KmkmNJ4RBA==",
"license": "MIT"
},
"node_modules/@radix-ui/react-accordion": {
"version": "1.2.2",
"resolved": "https://registry.npmjs.org/@radix-ui/react-accordion/-/react-accordion-1.2.2.tgz",
"integrity": "sha512-b1oh54x4DMCdGsB4/7ahiSrViXxaBwRPotiZNnYXjLha9vfuURSAZErki6qjDoSIV0eXx5v57XnTGVtGwnfp2g==",
"license": "MIT",
"dependencies": {
"@radix-ui/primitive": "1.1.1",
"@radix-ui/react-collapsible": "1.1.2",
"@radix-ui/react-collection": "1.1.1",
"@radix-ui/react-compose-refs": "1.1.1",
"@radix-ui/react-context": "1.1.1",
"@radix-ui/react-direction": "1.1.0",
"@radix-ui/react-id": "1.1.0",
"@radix-ui/react-primitive": "2.0.1",
"@radix-ui/react-use-controllable-state": "1.1.0"
},
"peerDependencies": {
"@types/react": "*",
"@types/react-dom": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
},
"peerDependenciesMeta": {
"@types/react": {
"optional": true
},
"@types/react-dom": {
"optional": true
}
}
},
"node_modules/@radix-ui/react-accordion/node_modules/@radix-ui/primitive": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.1.tgz",
"integrity": "sha512-SJ31y+Q/zAyShtXJc8x83i9TYdbAfHZ++tUZnvjJJqFjzsdUnKsxPL6IEtBlxKkU7yzer//GQtZSV4GbldL3YA==",
"license": "MIT"
},
"node_modules/@radix-ui/react-accordion/node_modules/@radix-ui/react-collection": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-1.1.1.tgz",
"integrity": "sha512-LwT3pSho9Dljg+wY2KN2mrrh6y3qELfftINERIzBUO9e0N+t0oMTyn3k9iv+ZqgrwGkRnLpNJrsMv9BZlt2yuA==",
"license": "MIT",
"dependencies": {
"@radix-ui/react-compose-refs": "1.1.1",
"@radix-ui/react-context": "1.1.1",
"@radix-ui/react-primitive": "2.0.1",
"@radix-ui/react-slot": "1.1.1"
},
"peerDependencies": {
"@types/react": "*",
"@types/react-dom": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
},
"peerDependenciesMeta": {
"@types/react": {
"optional": true
},
"@types/react-dom": {
"optional": true
}
}
},
"node_modules/@radix-ui/react-accordion/node_modules/@radix-ui/react-compose-refs": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@radix-ui/react-compose-refs/-/react-compose-refs-1.1.1.tgz",
"integrity": "sha512-Y9VzoRDSJtgFMUCoiZBDVo084VQ5hfpXxVE+NgkdNsjiDBByiImMZKKhxMwCbdHvhlENG6a833CbFkOQvTricw==",
"license": "MIT",
"peerDependencies": {
"@types/react": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
},
"peerDependenciesMeta": {
"@types/react": {
"optional": true
}
}
},
"node_modules/@radix-ui/react-accordion/node_modules/@radix-ui/react-context": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.1.tgz",
"integrity": "sha512-UASk9zi+crv9WteK/NU4PLvOoL3OuE6BWVKNF6hPRBtYBDXQ2u5iu3O59zUlJiTVvkyuycnqrztsHVJwcK9K+Q==",
"license": "MIT",
"peerDependencies": {
"@types/react": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
},
"peerDependenciesMeta": {
"@types/react": {
"optional": true
}
}
},
"node_modules/@radix-ui/react-accordion/node_modules/@radix-ui/react-primitive": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/@radix-ui/react-primitive/-/react-primitive-2.0.1.tgz",
"integrity": "sha512-sHCWTtxwNn3L3fH8qAfnF3WbUZycW93SM1j3NFDzXBiz8D6F5UTTy8G1+WFEaiCdvCVRJWj6N2R4Xq6HdiHmDg==",
"license": "MIT",
"dependencies": {
"@radix-ui/react-slot": "1.1.1"
},
"peerDependencies": {
"@types/react": "*",
"@types/react-dom": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
},
"peerDependenciesMeta": {
"@types/react": {
"optional": true
},
"@types/react-dom": {
"optional": true
}
}
},
"node_modules/@radix-ui/react-accordion/node_modules/@radix-ui/react-slot": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.1.1.tgz",
"integrity": "sha512-RApLLOcINYJA+dMVbOju7MYv1Mb2EBp2nH4HdDzXTSyaR5optlm6Otrz1euW3HbdOR8UmmFK06TD+A9frYWv+g==",
"license": "MIT",
"dependencies": {
"@radix-ui/react-compose-refs": "1.1.1"
},
"peerDependencies": {
"@types/react": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
},
"peerDependenciesMeta": {
"@types/react": {
"optional": true
}
}
},
"node_modules/@radix-ui/react-arrow": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/@radix-ui/react-arrow/-/react-arrow-1.1.0.tgz",
@@ -8577,9 +8442,9 @@
}
},
"node_modules/chromatic": {
"version": "11.25.2",
"resolved": "https://registry.npmjs.org/chromatic/-/chromatic-11.25.2.tgz",
"integrity": "sha512-/9eQWn6BU1iFsop86t8Au21IksTRxwXAl7if8YHD05L2AbuMjClLWZo5cZojqrJHGKDhTqfrC2X2xE4uSm0iKw==",
"version": "11.18.1",
"resolved": "https://registry.npmjs.org/chromatic/-/chromatic-11.18.1.tgz",
"integrity": "sha512-hkNT9vA6K9+PnE/khhZYBnRCOm8NonaQDs7RZ8YHFo7/lh1b/x/uFMkTjWjaj/mkM6QOR/evu5VcZMtcaauSlw==",
"dev": true,
"license": "MIT",
"bin": {

View File

@@ -18,7 +18,6 @@
"@headlessui/react": "^2.2.0",
"@headlessui/tailwindcss": "^0.2.1",
"@phosphor-icons/react": "^2.0.8",
"@radix-ui/react-accordion": "^1.2.2",
"@radix-ui/react-checkbox": "^1.1.2",
"@radix-ui/react-collapsible": "^1.1.2",
"@radix-ui/react-dialog": "^1.1.2",
@@ -87,11 +86,11 @@
"yup": "^1.4.0"
},
"devDependencies": {
"@chromatic-com/playwright": "^0.10.2",
"@chromatic-com/playwright": "^0.10.0",
"@tailwindcss/typography": "^0.5.10",
"@types/chrome": "^0.0.287",
"@types/jest": "^29.5.14",
"chromatic": "^11.25.2",
"chromatic": "^11.18.1",
"eslint": "^8.48.0",
"eslint-config-next": "^14.1.0",
"jest": "^29.7.0",

View File

@@ -2,19 +2,7 @@ import { defineConfig, devices } from "@playwright/test";
export default defineConfig({
globalSetup: require.resolve("./tests/e2e/global-setup"),
timeout: 60000, // 60 seconds timeout
reporter: [
["list"],
// Warning: uncommenting the html reporter may cause the chromatic-archives
// directory to be deleted after the test run, which will break CI.
// [
// 'html',
// {
// outputFolder: 'test-results', // or whatever directory you want
// open: 'never', // can be 'always' | 'on-failure' | 'never'
// },
// ],
],
timeout: 30000, // 30 seconds timeout
projects: [
{
name: "admin",

View File

@@ -1,9 +1,10 @@
"use client";
import { PageSelector } from "@/components/PageSelector";
import { SlackBot } from "@/lib/types";
import { useRouter } from "next/navigation";
import { useEffect, useState } from "react";
import { FiEdit } from "react-icons/fi";
import { FiCheck, FiEdit, FiXCircle } from "react-icons/fi";
import {
Table,
TableBody,
@@ -12,8 +13,6 @@ import {
TableHeader,
TableRow,
} from "@/components/ui/table";
import { Badge } from "@/components/ui/badge";
import { SlackBot } from "@/lib/types";
const NUM_IN_PAGE = 20;
@@ -43,7 +42,7 @@ function ClickableTableRow({
);
}
export const SlackBotTable = ({ slackBots }: { slackBots: SlackBot[] }) => {
export function SlackBotTable({ slackBots }: { slackBots: SlackBot[] }) {
const [page, setPage] = useState(1);
// sort by id for consistent ordering
@@ -68,9 +67,8 @@ export const SlackBotTable = ({ slackBots }: { slackBots: SlackBot[] }) => {
<TableHeader>
<TableRow>
<TableHead>Name</TableHead>
<TableHead>Status</TableHead>
<TableHead>Default Config</TableHead>
<TableHead>Channel Count</TableHead>
<TableHead>Enabled</TableHead>
</TableRow>
</TableHeader>
<TableBody>
@@ -87,19 +85,13 @@ export const SlackBotTable = ({ slackBots }: { slackBots: SlackBot[] }) => {
{slackBot.name}
</div>
</TableCell>
<TableCell>
{slackBot.enabled ? (
<Badge variant="success">Enabled</Badge>
) : (
<Badge variant="destructive">Disabled</Badge>
)}
</TableCell>
<TableCell>
<Badge variant="secondary">Default Set</Badge>
</TableCell>
<TableCell>{slackBot.configs_count}</TableCell>
<TableCell>
{/* Add any action buttons here if needed */}
{slackBot.enabled ? (
<FiCheck className="text-emerald-600" size="18" />
) : (
<FiXCircle className="text-red-600" size="18" />
)}
</TableCell>
</ClickableTableRow>
);
@@ -107,7 +99,7 @@ export const SlackBotTable = ({ slackBots }: { slackBots: SlackBot[] }) => {
{slackBots.length === 0 && (
<TableRow>
<TableCell
colSpan={5}
colSpan={4}
className="text-center text-muted-foreground"
>
Please add a New Slack Bot to begin chatting with Danswer!
@@ -136,4 +128,4 @@ export const SlackBotTable = ({ slackBots }: { slackBots: SlackBot[] }) => {
)}
</div>
);
};
}

View File

@@ -7,7 +7,6 @@ import { createSlackBot, updateSlackBot } from "./new/lib";
import { Button } from "@/components/ui/button";
import { Separator } from "@/components/ui/separator";
import { useEffect } from "react";
import { Switch } from "@/components/ui/switch";
export const SlackTokensForm = ({
isUpdate,
@@ -34,9 +33,7 @@ export const SlackTokensForm = ({
return (
<Formik
initialValues={{
...initialValues,
}}
initialValues={initialValues}
validationSchema={Yup.object().shape({
bot_token: Yup.string().required(),
app_token: Yup.string().required(),

View File

@@ -14,10 +14,8 @@ import {
} from "@/components/ui/table";
import Link from "next/link";
import { useState } from "react";
import { FiArrowUpRight } from "react-icons/fi";
import { deleteSlackChannelConfig, isPersonaASlackBotPersona } from "./lib";
import { Card } from "@/components/ui/card";
import { Button } from "@/components/ui/button";
import { FiPlusSquare, FiSettings } from "react-icons/fi";
const numToDisplay = 50;
@@ -34,147 +32,128 @@ export function SlackChannelConfigsTable({
}) {
const [page, setPage] = useState(1);
const defaultConfig = slackChannelConfigs.find((config) => config.is_default);
const channelConfigs = slackChannelConfigs.filter(
(config) => !config.is_default
);
// sort by name for consistent ordering
slackChannelConfigs.sort((a, b) => {
if (a.id < b.id) {
return -1;
} else if (a.id > b.id) {
return 1;
} else {
return 0;
}
});
return (
<div className="space-y-8">
<div className="flex justify-between items-center mb-6">
<Button
variant="outline"
onClick={() => {
window.location.href = `/admin/bots/${slackBotId}/channels/${defaultConfig?.id}`;
}}
>
<FiSettings />
Edit Default Configuration
</Button>
<Link href={`/admin/bots/${slackBotId}/channels/new`}>
<Button variant="outline">
<FiPlusSquare />
New Channel Configuration
</Button>
</Link>
<div>
<div className="rounded-md border">
<Table>
<TableHeader>
<TableRow>
<TableHead>Channel</TableHead>
<TableHead>Assistant</TableHead>
<TableHead>Document Sets</TableHead>
<TableHead>Delete</TableHead>
</TableRow>
</TableHeader>
<TableBody>
{slackChannelConfigs
.slice(numToDisplay * (page - 1), numToDisplay * page)
.map((slackChannelConfig) => {
return (
<TableRow
key={slackChannelConfig.id}
className="cursor-pointer hover:bg-gray-100 transition-colors"
onClick={() => {
window.location.href = `/admin/bots/${slackBotId}/channels/${slackChannelConfig.id}`;
}}
>
<TableCell>
<div className="flex gap-x-2">
<div className="my-auto">
<EditIcon />
</div>
<div className="my-auto">
{"#" + slackChannelConfig.channel_config.channel_name}
</div>
</div>
</TableCell>
<TableCell onClick={(e) => e.stopPropagation()}>
{slackChannelConfig.persona &&
!isPersonaASlackBotPersona(slackChannelConfig.persona) ? (
<Link
href={`/admin/assistants/${slackChannelConfig.persona.id}`}
className="text-blue-500 flex hover:underline"
>
{slackChannelConfig.persona.name}
</Link>
) : (
"-"
)}
</TableCell>
<TableCell>
<div>
{slackChannelConfig.persona &&
slackChannelConfig.persona.document_sets.length > 0
? slackChannelConfig.persona.document_sets
.map((documentSet) => documentSet.name)
.join(", ")
: "-"}
</div>
</TableCell>
<TableCell onClick={(e) => e.stopPropagation()}>
<div
className="cursor-pointer hover:text-destructive"
onClick={async (e) => {
e.stopPropagation();
const response = await deleteSlackChannelConfig(
slackChannelConfig.id
);
if (response.ok) {
setPopup({
message: `Slack bot config "${slackChannelConfig.id}" deleted`,
type: "success",
});
} else {
const errorMsg = await response.text();
setPopup({
message: `Failed to delete Slack bot config - ${errorMsg}`,
type: "error",
});
}
refresh();
}}
>
<TrashIcon />
</div>
</TableCell>
</TableRow>
);
})}
{/* Empty row with message when table has no data */}
{slackChannelConfigs.length === 0 && (
<TableRow>
<TableCell
colSpan={4}
className="text-center text-muted-foreground"
>
Please add a New Slack Bot Configuration to begin chatting
with Onyx!
</TableCell>
</TableRow>
)}
</TableBody>
</Table>
</div>
<div>
<h2 className="text-2xl font- mb-4">Channel-Specific Configurations</h2>
<Card>
<Table>
<TableHeader>
<TableRow>
<TableHead>Channel</TableHead>
<TableHead>Assistant</TableHead>
<TableHead>Document Sets</TableHead>
<TableHead>Actions</TableHead>
</TableRow>
</TableHeader>
<TableBody>
{channelConfigs
.slice(numToDisplay * (page - 1), numToDisplay * page)
.map((slackChannelConfig) => {
return (
<TableRow
key={slackChannelConfig.id}
className="cursor-pointer transition-colors"
onClick={() => {
window.location.href = `/admin/bots/${slackBotId}/channels/${slackChannelConfig.id}`;
}}
>
<TableCell>
<div className="flex gap-x-2">
<div className="my-auto">
<EditIcon className="text-muted-foreground" />
</div>
<div className="my-auto">
{"#" +
slackChannelConfig.channel_config.channel_name}
</div>
</div>
</TableCell>
<TableCell onClick={(e) => e.stopPropagation()}>
{slackChannelConfig.persona &&
!isPersonaASlackBotPersona(
slackChannelConfig.persona
) ? (
<Link
href={`/admin/assistants/${slackChannelConfig.persona.id}`}
className="text-primary hover:underline"
>
{slackChannelConfig.persona.name}
</Link>
) : (
"-"
)}
</TableCell>
<TableCell>
<div>
{slackChannelConfig.persona &&
slackChannelConfig.persona.document_sets.length > 0
? slackChannelConfig.persona.document_sets
.map((documentSet) => documentSet.name)
.join(", ")
: "-"}
</div>
</TableCell>
<TableCell onClick={(e) => e.stopPropagation()}>
<Button
variant="ghost"
size="sm"
className="hover:text-destructive"
onClick={async (e) => {
e.stopPropagation();
const response = await deleteSlackChannelConfig(
slackChannelConfig.id
);
if (response.ok) {
setPopup({
message: `Slack bot config "${slackChannelConfig.id}" deleted`,
type: "success",
});
} else {
const errorMsg = await response.text();
setPopup({
message: `Failed to delete Slack bot config - ${errorMsg}`,
type: "error",
});
}
refresh();
}}
>
<TrashIcon />
</Button>
</TableCell>
</TableRow>
);
})}
{channelConfigs.length === 0 && (
<TableRow>
<TableCell
colSpan={4}
className="text-center text-muted-foreground"
>
No channel-specific configurations. Add a new configuration
to customize behavior for specific channels.
</TableCell>
</TableRow>
)}
</TableBody>
</Table>
</Card>
{channelConfigs.length > numToDisplay && (
<div className="mt-4 flex justify-center">
<PageSelector
totalPages={Math.ceil(channelConfigs.length / numToDisplay)}
currentPage={page}
onPageChange={(newPage) => setPage(newPage)}
/>
</div>
)}
<div className="mt-3 flex">
<div className="mx-auto">
<PageSelector
totalPages={Math.ceil(slackChannelConfigs.length / numToDisplay)}
currentPage={page}
onPageChange={(newPage) => setPage(newPage)}
/>
</div>
</div>
</div>
);

View File

@@ -1,29 +1,21 @@
"use client";
import React, { useMemo, useState, useEffect } from "react";
import { Formik, Form, Field } from "formik";
import React, { useMemo } from "react";
import { Formik } from "formik";
import * as Yup from "yup";
import { usePopup } from "@/components/admin/connectors/Popup";
import {
DocumentSet,
SlackChannelConfig,
SlackBotResponseType,
} from "@/lib/types";
import { DocumentSet, SlackChannelConfig } from "@/lib/types";
import {
createSlackChannelConfig,
isPersonaASlackBotPersona,
updateSlackChannelConfig,
fetchSlackChannels,
} from "../lib";
import CardSection from "@/components/admin/CardSection";
import { useRouter } from "next/navigation";
import { Persona } from "@/app/admin/assistants/interfaces";
import { StandardAnswerCategoryResponse } from "@/components/standardAnswers/getStandardAnswerCategoriesIfEE";
import { SEARCH_TOOL_ID, SEARCH_TOOL_NAME } from "@/app/chat/tools/constants";
import {
SlackChannelConfigFormFields,
SlackChannelConfigFormFieldsProps,
} from "./SlackChannelConfigFormFields";
import { SlackChannelConfigFormFields } from "./SlackChannelConfigFormFields";
export const SlackChannelConfigCreationForm = ({
slack_bot_id,
@@ -41,43 +33,26 @@ export const SlackChannelConfigCreationForm = ({
const { popup, setPopup } = usePopup();
const router = useRouter();
const isUpdate = Boolean(existingSlackChannelConfig);
const isDefault = existingSlackChannelConfig?.is_default || false;
const existingSlackBotUsesPersona = existingSlackChannelConfig?.persona
? !isPersonaASlackBotPersona(existingSlackChannelConfig.persona)
: false;
const existingPersonaHasSearchTool = existingSlackChannelConfig?.persona
? existingSlackChannelConfig.persona.tools.some(
(tool) => tool.in_code_tool_id === SEARCH_TOOL_ID
)
: false;
const [searchEnabledAssistants, nonSearchAssistants] = useMemo(() => {
return personas.reduce(
(acc, persona) => {
if (
persona.tools.some((tool) => tool.in_code_tool_id === SEARCH_TOOL_ID)
) {
acc[0].push(persona);
} else {
acc[1].push(persona);
}
return acc;
},
[[], []] as [Persona[], Persona[]]
);
const searchEnabledAssistants = useMemo(() => {
return personas.filter((persona) => {
return persona.tools.some(
(tool) => tool.in_code_tool_id == SEARCH_TOOL_ID
);
});
}, [personas]);
return (
<CardSection className="!px-12 max-w-4xl">
<CardSection className="max-w-4xl">
{popup}
<Formik
initialValues={{
slack_bot_id: slack_bot_id,
channel_name: isDefault
? ""
: existingSlackChannelConfig?.channel_config.channel_name || "",
response_type: "citations" as SlackBotResponseType,
channel_name:
existingSlackChannelConfig?.channel_config.channel_name || "",
answer_validity_check_enabled: (
existingSlackChannelConfig?.channel_config?.answer_filters || []
).includes("well_answered_postfilter"),
@@ -115,24 +90,22 @@ export const SlackChannelConfigCreationForm = ({
!isPersonaASlackBotPersona(existingSlackChannelConfig.persona)
? existingSlackChannelConfig.persona.id
: null,
response_type:
existingSlackChannelConfig?.response_type || "citations",
standard_answer_categories:
existingSlackChannelConfig?.standard_answer_categories || [],
knowledge_source: existingSlackBotUsesPersona
? existingPersonaHasSearchTool
? "assistant"
: "non_search_assistant"
? "assistant"
: existingSlackChannelConfig?.persona
? "document_sets"
: "all_public",
}}
validationSchema={Yup.object().shape({
slack_bot_id: Yup.number().required(),
channel_name: isDefault
? Yup.string()
: Yup.string().required("Channel Name is required"),
response_type: Yup.mixed<SlackBotResponseType>()
channel_name: Yup.string().required("Channel Name is required"),
response_type: Yup.string()
.oneOf(["quotes", "citations"])
.required(),
.required("Response type is required"),
answer_validity_check_enabled: Yup.boolean().required(),
questionmark_prefilter_enabled: Yup.boolean().required(),
respond_tag_only: Yup.boolean().required(),
@@ -163,12 +136,7 @@ export const SlackChannelConfigCreationForm = ({
}),
standard_answer_categories: Yup.array(),
knowledge_source: Yup.string()
.oneOf([
"all_public",
"document_sets",
"assistant",
"non_search_assistant",
])
.oneOf(["all_public", "document_sets", "assistant"])
.required(),
})}
onSubmit={async (values, formikHelpers) => {
@@ -179,22 +147,18 @@ export const SlackChannelConfigCreationForm = ({
slack_bot_id,
channel_name: values.channel_name,
respond_member_group_list: values.respond_member_group_list,
usePersona:
values.knowledge_source === "assistant" ||
values.knowledge_source === "non_search_assistant",
usePersona: values.knowledge_source === "assistant",
document_sets:
values.knowledge_source === "document_sets"
? values.document_sets
: [],
persona_id:
values.knowledge_source === "assistant" ||
values.knowledge_source === "non_search_assistant"
values.knowledge_source === "assistant"
? values.persona_id
: null,
standard_answer_categories: values.standard_answer_categories.map(
(category: any) => category.id
),
response_type: values.response_type as SlackBotResponseType,
};
if (!cleanedValues.still_need_help_enabled) {
@@ -227,24 +191,13 @@ export const SlackChannelConfigCreationForm = ({
}
}}
>
{({ isSubmitting, values, setFieldValue, ...formikProps }) => (
<Form>
<div className="pb-6 w-full">
<SlackChannelConfigFormFields
{...values}
isUpdate={isUpdate}
isDefault={isDefault}
documentSets={documentSets}
searchEnabledAssistants={searchEnabledAssistants}
nonSearchAssistants={nonSearchAssistants}
standardAnswerCategoryResponse={standardAnswerCategoryResponse}
setPopup={setPopup}
slack_bot_id={slack_bot_id}
formikProps={formikProps}
/>
</div>
</Form>
)}
<SlackChannelConfigFormFields
isUpdate={isUpdate}
documentSets={documentSets}
searchEnabledAssistants={searchEnabledAssistants}
standardAnswerCategoryResponse={standardAnswerCategoryResponse}
setPopup={setPopup}
/>
</Formik>
</CardSection>
);

View File

@@ -1,15 +1,10 @@
"use client";
import React, { useState, useEffect, useMemo } from "react";
import {
FieldArray,
Form,
useFormikContext,
ErrorMessage,
Field,
} from "formik";
import { FieldArray, Form, useFormikContext, ErrorMessage } from "formik";
import { CCPairDescriptor, DocumentSet } from "@/lib/types";
import {
BooleanFormField,
Label,
SelectorFormField,
SubLabel,
@@ -36,49 +31,28 @@ import { TooltipProvider } from "@radix-ui/react-tooltip";
import { SourceIcon } from "@/components/SourceIcon";
import Link from "next/link";
import { AssistantIcon } from "@/components/assistants/AssistantIcon";
import { SearchMultiSelectDropdown } from "@/components/Dropdown";
import { fetchSlackChannels } from "../lib";
import { Badge } from "@/components/ui/badge";
import useSWR from "swr";
import { ThreeDotsLoader } from "@/components/Loading";
import {
Accordion,
AccordionContent,
AccordionItem,
AccordionTrigger,
} from "@/components/ui/accordion";
import { Separator } from "@/components/ui/separator";
import { CheckFormField } from "@/components/ui/CheckField";
export interface SlackChannelConfigFormFieldsProps {
interface SlackChannelConfigFormFieldsProps {
isUpdate: boolean;
isDefault: boolean;
documentSets: DocumentSet[];
searchEnabledAssistants: Persona[];
nonSearchAssistants: Persona[];
standardAnswerCategoryResponse: StandardAnswerCategoryResponse;
setPopup: (popup: {
message: string;
type: "error" | "success" | "warning";
}) => void;
slack_bot_id: number;
formikProps: any;
}
export function SlackChannelConfigFormFields({
isUpdate,
isDefault,
documentSets,
searchEnabledAssistants,
nonSearchAssistants,
standardAnswerCategoryResponse,
setPopup,
slack_bot_id,
formikProps,
}: SlackChannelConfigFormFieldsProps) {
const router = useRouter();
const { values, setFieldValue } = useFormikContext<any>();
const [showAdvancedOptions, setShowAdvancedOptions] = useState(false);
const [viewUnselectableSets, setViewUnselectableSets] = useState(false);
const [viewSyncEnabledAssistants, setViewSyncEnabledAssistants] =
useState(false);
@@ -178,53 +152,11 @@ export function SlackChannelConfigFormFields({
);
}, [documentSets]);
const { data: channelOptions, isLoading } = useSWR(
`/api/manage/admin/slack-app/bots/${slack_bot_id}/channels`,
async (url: string) => {
const channels = await fetchSlackChannels(slack_bot_id);
return channels.map((channel: any) => ({
name: channel.name,
value: channel.id,
}));
}
);
if (isLoading) {
return <ThreeDotsLoader />;
}
return (
<>
<div className="w-full">
{isDefault && (
<Badge variant="agent" className="bg-blue-100 text-blue-800">
Default Configuration
</Badge>
)}
{!isDefault && (
<>
<label
htmlFor="channel_name"
className="block text-text font-medium text-base mb-2"
>
Select A Slack Channel:
</label>{" "}
<Field name="channel_name">
{({ field, form }: { field: any; form: any }) => (
<SearchMultiSelectDropdown
options={channelOptions || []}
onSelect={(selected) => {
form.setFieldValue("channel_name", selected.name);
}}
initialSearchTerm={field.value}
onSearchTermChange={(term) => {
form.setFieldValue("channel_name", term);
}}
/>
)}
</Field>
</>
)}
<Form className="px-6 max-w-4xl">
<div className="pt-4 w-full">
<TextFormField name="channel_name" label="Slack Channel Name:" />
<div className="space-y-2 mt-4">
<Label>Knowledge Source</Label>
<RadioGroup
@@ -238,7 +170,7 @@ export function SlackChannelConfigFormFields({
value="all_public"
id="all_public"
label="All Public Knowledge"
sublabel="Let OnyxBot respond based on information from all public connectors"
sublabel="Let OnyxBot respond based on information from all public connectors "
/>
{selectableSets.length + unselectableSets.length > 0 && (
<RadioGroupItemField
@@ -251,17 +183,12 @@ export function SlackChannelConfigFormFields({
<RadioGroupItemField
value="assistant"
id="assistant"
label="Search Assistant"
label="Specific Assistant"
sublabel="Control both the documents and the prompt to use for answering questions"
/>
<RadioGroupItemField
value="non_search_assistant"
id="non_search_assistant"
label="Non-Search Assistant"
sublabel="Chat with an assistant that does not use documents"
/>
</RadioGroup>
</div>
{values.knowledge_source === "document_sets" &&
documentSets.length > 0 && (
<div className="mt-4">
@@ -354,6 +281,7 @@ export function SlackChannelConfigFormFields({
/>
</div>
)}
{values.knowledge_source === "assistant" && (
<div className="mt-4">
<SubLabel>
@@ -423,143 +351,100 @@ export function SlackChannelConfigFormFields({
)}
</div>
)}
{values.knowledge_source === "non_search_assistant" && (
<div className="mt-4">
<SubLabel>
<>
Select the non-search assistant OnyxBot will use while answering
questions in Slack.
{syncEnabledAssistants.length > 0 && (
<>
<br />
<span className="text-sm text-text-dark/80">
Note: Some of your assistants have auto-synced connectors
in their document sets. You cannot select these assistants
as they will not be able to answer questions in Slack.{" "}
<button
type="button"
onClick={() =>
setViewSyncEnabledAssistants(
(viewSyncEnabledAssistants) =>
!viewSyncEnabledAssistants
)
}
className="text-sm text-link"
>
{viewSyncEnabledAssistants
? "Hide un-selectable "
: "View all "}
assistants
</button>
</span>
</>
)}
</>
</SubLabel>
</div>
<div className="mt-2">
<AdvancedOptionsToggle
showAdvancedOptions={showAdvancedOptions}
setShowAdvancedOptions={setShowAdvancedOptions}
/>
</div>
{showAdvancedOptions && (
<div className="mt-4">
<div className="w-64 mb-4">
<SelectorFormField
name="persona_id"
options={nonSearchAssistants.map((persona) => ({
name: persona.name,
value: persona.id,
}))}
name="response_type"
label="Answer Type"
tooltip="Controls the format of OnyxBot's responses."
options={[
{ name: "Standard", value: "citations" },
{ name: "Detailed", value: "quotes" },
]}
/>
</div>
)}
</div>
<Separator className="my-4" />
<Accordion type="multiple" className=" gap-y-2 w-full">
{values.knowledge_source !== "non_search_assistant" && (
<AccordionItem value="search-options">
<AccordionTrigger className="text-text">
Search Configuration
</AccordionTrigger>
<AccordionContent>
<div className="space-y-4">
<div className="w-64">
<SelectorFormField
name="response_type"
label="Answer Type"
tooltip="Controls the format of OnyxBot's responses."
options={[
{ name: "Standard", value: "citations" },
{ name: "Detailed", value: "quotes" },
]}
/>
</div>
<CheckFormField
name="enable_auto_filters"
label="Enable LLM Autofiltering"
tooltip="If set, the LLM will generate source and time filters based on the user's query"
/>
<CheckFormField
name="answer_validity_check_enabled"
label="Only respond if citations found"
tooltip="If set, will only answer questions where the model successfully produces citations"
/>
</div>
</AccordionContent>
</AccordionItem>
)}
<BooleanFormField
name="show_continue_in_web_ui"
removeIndent
label="Show Continue in Web UI button"
tooltip="If set, will show a button at the bottom of the response that allows the user to continue the conversation in the Onyx Web UI"
/>
<AccordionItem className="mt-4" value="general-options">
<AccordionTrigger>General Configuration</AccordionTrigger>
<AccordionContent>
<div className="space-y-4">
<CheckFormField
name="show_continue_in_web_ui"
label="Show Continue in Web UI button"
tooltip="If set, will show a button at the bottom of the response that allows the user to continue the conversation in the Onyx Web UI"
/>
<CheckFormField
name="still_need_help_enabled"
onChange={(checked: boolean) => {
setFieldValue("still_need_help_enabled", checked);
if (!checked) {
setFieldValue("follow_up_tags", []);
<div className="flex flex-col space-y-3 mt-2">
<BooleanFormField
name="still_need_help_enabled"
removeIndent
onChange={(checked: boolean) => {
setFieldValue("still_need_help_enabled", checked);
if (!checked) {
setFieldValue("follow_up_tags", []);
}
}}
label={'Give a "Still need help?" button'}
tooltip={`OnyxBot's response will include a button at the bottom
of the response that asks the user if they still need help.`}
/>
{values.still_need_help_enabled && (
<CollapsibleSection prompt="Configure Still Need Help Button">
<TextArrayField
name="follow_up_tags"
label="(Optional) Users / Groups to Tag"
values={values}
subtext={
<div>
The Slack users / groups we should tag if the user clicks
the &quot;Still need help?&quot; button. If no emails are
provided, we will not tag anyone and will just react with
a 🆘 emoji to the original message.
</div>
}
}}
label={'Give a "Still need help?" button'}
tooltip={`OnyxBot's response will include a button at the bottom
of the response that asks the user if they still need help.`}
/>
{values.still_need_help_enabled && (
<CollapsibleSection prompt="Configure Still Need Help Button">
<TextArrayField
name="follow_up_tags"
label="(Optional) Users / Groups to Tag"
values={values}
subtext={
<div>
The Slack users / groups we should tag if the user
clicks the &quot;Still need help?&quot; button. If no
emails are provided, we will not tag anyone and will
just react with a 🆘 emoji to the original message.
</div>
}
placeholder="User email or user group name..."
/>
</CollapsibleSection>
)}
placeholder="User email or user group name..."
/>
</CollapsibleSection>
)}
<CheckFormField
name="questionmark_prefilter_enabled"
label="Only respond to questions"
tooltip="If set, OnyxBot will only respond to messages that contain a question mark"
/>
<CheckFormField
name="respond_tag_only"
label="Respond to @OnyxBot Only"
tooltip="If set, OnyxBot will only respond when directly tagged"
/>
<CheckFormField
name="respond_to_bots"
label="Respond to Bot messages"
tooltip="If not set, OnyxBot will always ignore messages from Bots"
/>
<BooleanFormField
name="answer_validity_check_enabled"
removeIndent
label="Only respond if citations found"
tooltip="If set, will only answer questions where the model successfully produces citations"
/>
<BooleanFormField
name="questionmark_prefilter_enabled"
removeIndent
label="Only respond to questions"
tooltip="If set, OnyxBot will only respond to messages that contain a question mark"
/>
<BooleanFormField
name="respond_tag_only"
removeIndent
label="Respond to @OnyxBot Only"
tooltip="If set, OnyxBot will only respond when directly tagged"
/>
<BooleanFormField
name="respond_to_bots"
removeIndent
label="Respond to Bot messages"
tooltip="If not set, OnyxBot will always ignore messages from Bots"
/>
<BooleanFormField
name="enable_auto_filters"
removeIndent
label="Enable LLM Autofiltering"
tooltip="If set, the LLM will generate source and time filters based on the user's query"
/>
<div className="mt-12">
<TextArrayField
name="respond_member_group_list"
label="(Optional) Respond to Certain Users / Groups"
@@ -570,20 +455,20 @@ export function SlackChannelConfigFormFields({
values={values}
placeholder="User email or user group name..."
/>
<StandardAnswerCategoryDropdownField
standardAnswerCategoryResponse={standardAnswerCategoryResponse}
categories={values.standard_answer_categories}
setCategories={(categories: any) =>
setFieldValue("standard_answer_categories", categories)
}
/>
</div>
</AccordionContent>
</AccordionItem>
</Accordion>
</div>
<div className="flex mt-8 gap-x-2 w-full justify-end">
<StandardAnswerCategoryDropdownField
standardAnswerCategoryResponse={standardAnswerCategoryResponse}
categories={values.standard_answer_categories}
setCategories={(categories: any) =>
setFieldValue("standard_answer_categories", categories)
}
/>
</div>
)}
<div className="flex mt-2 gap-x-2 w-full justify-end flex">
{shouldShowPrivacyAlert && (
<TooltipProvider>
<Tooltip>
@@ -633,11 +518,13 @@ export function SlackChannelConfigFormFields({
</Tooltip>
</TooltipProvider>
)}
<Button type="submit">{isUpdate ? "Update" : "Create"}</Button>
<Button onClick={() => {}} type="submit">
{isUpdate ? "Update" : "Create"}
</Button>
<Button type="button" variant="outline" onClick={() => router.back()}>
Cancel
</Button>
</div>
</>
</Form>
);
}

View File

@@ -94,17 +94,3 @@ export const deleteSlackChannelConfig = async (id: number) => {
export function isPersonaASlackBotPersona(persona: Persona) {
return persona.name.startsWith("__slack_bot_persona__");
}
export const fetchSlackChannels = async (botId: number) => {
return fetch(`/api/manage/admin/slack-app/bots/${botId}/channels`, {
method: "GET",
headers: {
"Content-Type": "application/json",
},
}).then((response) => {
if (!response.ok) {
throw new Error("Failed to fetch Slack channels");
}
return response.json();
});
};

View File

@@ -78,6 +78,30 @@ function SlackBotEditPage({
/>
<Separator />
<div className="my-8" />
<Link
className="
flex
py-2
px-4
mt-2
border
border-border
h-fit
cursor-pointer
hover:bg-hover
text-sm
w-80
"
href={`/admin/bots/${unwrappedParams["bot-id"]}/channels/new`}
>
<div className="mx-auto flex">
<FiPlusSquare className="my-auto mr-2" />
New Slack Channel Configuration
</div>
</Link>
<div className="mt-8">
<SlackChannelConfigsTable
slackBotId={slackBot.id}

View File

@@ -11,7 +11,6 @@ import {
GeminiIcon,
OpenSourceIcon,
AnthropicSVG,
IconProps,
} from "@/components/icons/icons";
import { FaRobot } from "react-icons/fa";
@@ -75,36 +74,29 @@ export interface LLMProviderDescriptor {
}
export const getProviderIcon = (providerName: string, modelName?: string) => {
const modelNameToIcon = (
modelName: string,
fallbackIcon: ({ size, className }: IconProps) => JSX.Element
): (({ size, className }: IconProps) => JSX.Element) => {
if (modelName?.toLowerCase().includes("amazon")) {
return AmazonIcon;
}
if (modelName?.toLowerCase().includes("phi")) {
return MicrosoftIconSVG;
}
if (modelName?.toLowerCase().includes("mistral")) {
return MistralIcon;
}
if (modelName?.toLowerCase().includes("llama")) {
return MetaIcon;
}
if (modelName?.toLowerCase().includes("gemini")) {
return GeminiIcon;
}
if (modelName?.toLowerCase().includes("claude")) {
return AnthropicIcon;
} else {
return fallbackIcon;
}
};
switch (providerName) {
case "openai":
// Special cases for openai based on modelName
return modelNameToIcon(modelName || "", OpenAIIcon);
if (modelName?.toLowerCase().includes("amazon")) {
return AmazonIcon;
}
if (modelName?.toLowerCase().includes("phi")) {
return MicrosoftIconSVG;
}
if (modelName?.toLowerCase().includes("mistral")) {
return MistralIcon;
}
if (modelName?.toLowerCase().includes("llama")) {
return MetaIcon;
}
if (modelName?.toLowerCase().includes("gemini")) {
return GeminiIcon;
}
if (modelName?.toLowerCase().includes("claude")) {
return AnthropicIcon;
}
return OpenAIIcon; // Default for openai
case "anthropic":
return AnthropicSVG;
case "bedrock":
@@ -112,7 +104,7 @@ export const getProviderIcon = (providerName: string, modelName?: string) => {
case "azure":
return AzureIcon;
default:
return modelNameToIcon(modelName || "", CPUIcon);
return CPUIcon;
}
};

View File

@@ -33,6 +33,11 @@ import EditPropertyModal from "@/components/modals/EditPropertyModal";
import * as Yup from "yup";
// since the uploaded files are cleaned up after some period of time
// re-indexing will not work for the file connector. Also, it would not
// make sense to re-index, since the files will not have changed.
const CONNECTOR_TYPES_THAT_CANT_REINDEX: ValidSources[] = [ValidSources.File];
// synchronize these validations with the SQLAlchemy connector class until we have a
// centralized schema for both frontend and backend
const RefreshFrequencySchema = Yup.object().shape({
@@ -263,18 +268,21 @@ function Main({ ccPairId }: { ccPairId: number }) {
{ccPair.is_editable_for_current_user && (
<div className="ml-auto flex gap-x-2">
<ReIndexButton
ccPairId={ccPair.id}
connectorId={ccPair.connector.id}
credentialId={ccPair.credential.id}
isDisabled={
ccPair.indexing ||
ccPair.status === ConnectorCredentialPairStatus.PAUSED
}
isIndexing={ccPair.indexing}
isDeleting={isDeleting}
/>
{!CONNECTOR_TYPES_THAT_CANT_REINDEX.includes(
ccPair.connector.source
) && (
<ReIndexButton
ccPairId={ccPair.id}
connectorId={ccPair.connector.id}
credentialId={ccPair.credential.id}
isDisabled={
ccPair.indexing ||
ccPair.status === ConnectorCredentialPairStatus.PAUSED
}
isIndexing={ccPair.indexing}
isDeleting={isDeleting}
/>
)}
{!isDeleting && <ModifyStatusButtonCluster ccPair={ccPair} />}
</div>
)}

View File

@@ -17,7 +17,7 @@ import { DocumentUpdatedAtBadge } from "@/components/search/DocumentUpdatedAtBad
import { DocumentSet } from "@/lib/types";
import { SourceIcon } from "@/components/SourceIcon";
import { Connector } from "@/lib/connectors/connectors";
import { HorizontalFilters } from "@/components/filters/SourceSelector";
import { HorizontalFilters } from "@/app/chat/shared_chat_search/Filters";
const DocumentDisplay = ({
document,

View File

@@ -231,7 +231,7 @@ export function SettingsForm() {
<Checkbox
label="Pro Search Disabled"
sublabel="If set, users will not be able to use Pro Search."
checked={settings.pro_search_disabled ?? false}
checked={settings.pro_search_disabled}
onChange={(e) =>
handleToggleSettingsField("pro_search_disabled", e.target.checked)
}

View File

@@ -10,7 +10,7 @@ export interface Settings {
notifications: Notification[];
needs_reindexing: boolean;
gpu_enabled: boolean;
pro_search_disabled: boolean | null;
pro_search_disabled: boolean;
product_gating: GatingType;
auto_scroll: boolean;
}

View File

@@ -10,17 +10,16 @@ import {
useRef,
useState,
} from "react";
import { useSidebarVisibility } from "@/components/chat/hooks";
import FunctionalHeader from "@/components/chat/Header";
import { useSidebarVisibility } from "@/components/chat_search/hooks";
import FunctionalHeader from "@/components/chat_search/Header";
import { useRouter } from "next/navigation";
import { pageType } from "../chat/sessionSidebar/types";
import FixedLogo from "../../components/logo/FixedLogo";
import FixedLogo from "../chat/shared_chat_search/FixedLogo";
import { SettingsContext } from "@/components/settings/SettingsProvider";
import { useChatContext } from "@/components/context/ChatContext";
import { HistorySidebar } from "../chat/sessionSidebar/HistorySidebar";
import { useAssistants } from "@/components/context/AssistantsContext";
import AssistantModal from "./mine/AssistantModal";
import { useSidebarShortcut } from "@/lib/browserUtilities";
interface SidebarWrapperProps<T extends object> {
initiallyToggled: boolean;
@@ -72,8 +71,23 @@ export default function SidebarWrapper<T extends object>({
const [showAssistantsModal, setShowAssistantsModal] = useState(false);
const router = useRouter();
useEffect(() => {
const handleKeyDown = (event: KeyboardEvent) => {
if (event.metaKey || event.ctrlKey) {
switch (event.key.toLowerCase()) {
case "e":
event.preventDefault();
toggleSidebar();
break;
}
}
};
useSidebarShortcut(router, toggleSidebar);
window.addEventListener("keydown", handleKeyDown);
return () => {
window.removeEventListener("keydown", handleKeyDown);
};
}, [router]);
return (
<div className="flex relative overflow-x-hidden overscroll-contain flex-col w-full h-screen">

View File

@@ -28,12 +28,6 @@ export const resetPassword = async (
});
if (!response.ok) {
const error = await response.json();
if (error?.detail?.code === "RESET_PASSWORD_INVALID_PASSWORD") {
throw new Error(error.detail.reason || "Invalid password");
}
const errorMessage =
error?.detail || "An error occurred during password reset.";
throw new Error(errorMessage);
throw new Error("Failed to reset password");
}
};

View File

@@ -1,5 +1,5 @@
"use client";
import React, { useState, useEffect } from "react";
import React, { useState } from "react";
import { resetPassword } from "../forgot-password/utils";
import AuthFlowContainer from "@/components/auth/AuthFlowContainer";
import CardSection from "@/components/admin/CardSection";
@@ -13,28 +13,13 @@ import { TextFormField } from "@/components/admin/connectors/Field";
import { usePopup } from "@/components/admin/connectors/Popup";
import { Spinner } from "@/components/Spinner";
import { redirect, useSearchParams } from "next/navigation";
import {
NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED,
TENANT_ID_COOKIE_NAME,
} from "@/lib/constants";
import Cookies from "js-cookie";
import { NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED } from "@/lib/constants";
const ResetPasswordPage: React.FC = () => {
const { popup, setPopup } = usePopup();
const [isWorking, setIsWorking] = useState(false);
const searchParams = useSearchParams();
const token = searchParams.get("token");
const tenantId = searchParams.get(TENANT_ID_COOKIE_NAME);
// Keep search param same name as cookie for simplicity
useEffect(() => {
if (tenantId) {
Cookies.set(TENANT_ID_COOKIE_NAME, tenantId, {
path: "/",
expires: 1 / 24,
}); // Expires in 1 hour
}
}, [tenantId]);
if (!NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED) {
redirect("/auth/login");
@@ -78,18 +63,10 @@ const ResetPasswordPage: React.FC = () => {
redirect("/auth/login");
}, 1000);
} catch (error) {
if (error instanceof Error) {
setPopup({
type: "error",
message:
error.message || "An error occurred during password reset.",
});
} else {
setPopup({
type: "error",
message: "An unexpected error occurred. Please try again.",
});
}
setPopup({
type: "error",
message: "An error occurred. Please try again.",
});
} finally {
setIsWorking(false);
}

View File

@@ -3,7 +3,7 @@
import { SettingsContext } from "@/components/settings/SettingsProvider";
import { useContext, useState, useRef, useLayoutEffect } from "react";
import { ChevronDownIcon } from "@/components/icons/icons";
import { MinimalMarkdown } from "@/components/chat/MinimalMarkdown";
import { MinimalMarkdown } from "@/components/chat_search/MinimalMarkdown";
export function ChatBanner() {
const settings = useContext(SettingsContext);

View File

@@ -89,16 +89,16 @@ import { useChatContext } from "@/components/context/ChatContext";
import { v4 as uuidv4 } from "uuid";
import { ChatPopup } from "./ChatPopup";
import FunctionalHeader from "@/components/chat/Header";
import { useSidebarVisibility } from "@/components/chat/hooks";
import FunctionalHeader from "@/components/chat_search/Header";
import { useSidebarVisibility } from "@/components/chat_search/hooks";
import {
PRO_SEARCH_TOGGLED_COOKIE_NAME,
SIDEBAR_TOGGLED_COOKIE_NAME,
} from "@/components/resizable/constants";
import FixedLogo from "../../components/logo/FixedLogo";
import FixedLogo from "./shared_chat_search/FixedLogo";
import { DeleteEntityModal } from "../../components/modals/DeleteEntityModal";
import { MinimalMarkdown } from "@/components/chat/MinimalMarkdown";
import { MinimalMarkdown } from "@/components/chat_search/MinimalMarkdown";
import ExceptionTraceModal from "@/components/modals/ExceptionTraceModal";
import {
@@ -108,10 +108,10 @@ import {
} from "./tools/constants";
import { useUser } from "@/components/user/UserProvider";
import { ApiKeyModal } from "@/components/llm/ApiKeyModal";
import BlurBackground from "../../components/chat/BlurBackground";
import BlurBackground from "./shared_chat_search/BlurBackground";
import { NoAssistantModal } from "@/components/modals/NoAssistantModal";
import { useAssistants } from "@/components/context/AssistantsContext";
import TextView from "@/components/chat/TextView";
import TextView from "@/components/chat_search/TextView";
import { Modal } from "@/components/Modal";
import { useSendMessageToParent } from "@/lib/extension/utils";
import {
@@ -124,11 +124,6 @@ import { UserSettingsModal } from "./modal/UserSettingsModal";
import { AlignStartVertical } from "lucide-react";
import { AgenticMessage } from "./message/AgenticMessage";
import AssistantModal from "../assistants/mine/AssistantModal";
import {
OperatingSystem,
useOperatingSystem,
useSidebarShortcut,
} from "@/lib/browserUtilities";
const TEMP_USER_MESSAGE_ID = -1;
const TEMP_ASSISTANT_MESSAGE_ID = -2;
@@ -471,6 +466,9 @@ export function ChatPage({
}
return;
}
const shouldScrollToBottom =
visibleRange.get(existingChatSessionId) === undefined ||
visibleRange.get(existingChatSessionId)?.end == 0;
clearSelectedDocuments();
setIsFetchingChatMessages(true);
@@ -508,13 +506,16 @@ export function ChatPage({
// go to bottom. If initial load, then do a scroll,
// otherwise just appear at the bottom
if (shouldScrollToBottom) {
scrollInitialized.current = false;
}
scrollInitialized.current = false;
if (!hasPerformedInitialScroll) {
clientScrollToBottom();
} else if (isChatSessionSwitch) {
clientScrollToBottom(true);
if (shouldScrollToBottom) {
if (!hasPerformedInitialScroll && autoScrollEnabled) {
clientScrollToBottom();
} else if (isChatSessionSwitch && autoScrollEnabled) {
clientScrollToBottom(true);
}
}
setIsFetchingChatMessages(false);
@@ -949,15 +950,15 @@ export function ChatPage({
// Check if all messages are currently rendered
if (currentVisibleRange.end < messageHistory.length) {
// Update visible range to include the last messages
// updateCurrentVisibleRange({
// start: Math.max(
// 0,
// messageHistory.length -
// (currentVisibleRange.end - currentVisibleRange.start)
// ),
// end: messageHistory.length,
// mostVisibleMessageId: currentVisibleRange.mostVisibleMessageId,
// });
updateCurrentVisibleRange({
start: Math.max(
0,
messageHistory.length -
(currentVisibleRange.end - currentVisibleRange.start)
),
end: messageHistory.length,
mostVisibleMessageId: currentVisibleRange.mostVisibleMessageId,
});
// Wait for the state update and re-render before scrolling
setTimeout(() => {
@@ -1028,7 +1029,6 @@ export function ChatPage({
) {
setDocumentSidebarToggled(false);
}
clientScrollToBottom();
}, [chatSessionIdRef.current]);
const loadNewPageLogic = (event: MessageEvent) => {
@@ -1063,6 +1063,7 @@ export function ChatPage({
if (!documentSidebarInitialWidth && maxDocumentSidebarWidth) {
documentSidebarInitialWidth = Math.min(700, maxDocumentSidebarWidth);
}
class CurrentMessageFIFO {
private stack: PacketType[] = [];
isComplete: boolean = false;
@@ -1326,9 +1327,7 @@ export function ChatPage({
searchParams.get(SEARCH_PARAM_NAMES.SYSTEM_PROMPT) || undefined,
useExistingUserMessage: isSeededChat,
useLanggraph:
!settings?.settings.pro_search_disabled &&
proSearchEnabled &&
retrievalEnabled,
!settings?.settings.pro_search_disabled && proSearchEnabled,
});
const delay = (ms: number) => {
@@ -1436,22 +1435,21 @@ export function ChatPage({
}
}
// // Continuously refine the sub_questions based on the packets that we receive
// Continuously refine the sub_questions based on the packets that we receive
if (
Object.hasOwn(packet, "stop_reason") &&
Object.hasOwn(packet, "level_question_num")
) {
sub_questions = constructSubQuestions(
sub_questions,
packet as StreamStopInfo
);
// sub_questions = constructSubQuestions(
// sub_questions,
// packet as StreamStopInfo
// );
} else if (Object.hasOwn(packet, "sub_question")) {
is_generating = true;
sub_questions = constructSubQuestions(
sub_questions,
packet as SubQuestionPiece
);
setAgenticGenerating(true);
} else if (Object.hasOwn(packet, "sub_query")) {
sub_questions = constructSubQuestions(
sub_questions,
@@ -1660,7 +1658,6 @@ export function ChatPage({
completeMessageMapOverride: currentMessageMap(completeMessageDetail),
});
}
setAgenticGenerating(false);
resetRegenerationState(currentSessionId());
updateChatState("input");
@@ -1788,7 +1785,6 @@ export function ChatPage({
// Used to maintain a "time out" for history sidebar so our existing refs can have time to process change
const [untoggled, setUntoggled] = useState(false);
const [loadingError, setLoadingError] = useState<string | null>(null);
const [agenticGenerating, setAgenticGenerating] = useState(false);
const explicitlyUntoggle = () => {
setShowHistorySidebar(false);
@@ -1833,17 +1829,17 @@ export function ChatPage({
const autoScrollEnabled =
user?.preferences?.auto_scroll == null
? settings?.enterpriseSettings?.auto_scroll || false
: user?.preferences?.auto_scroll! && !agenticGenerating;
: user?.preferences?.auto_scroll!;
useScrollonStream({
chatState: currentSessionChatState,
scrollableDivRef,
scrollDist,
endDivRef,
debounceNumber,
mobile: settings?.isMobile,
enableAutoScroll: autoScrollEnabled,
});
// useScrollonStream({
// chatState: currentSessionChatState,
// scrollableDivRef,
// scrollDist,
// endDivRef,
// debounceNumber,
// mobile: settings?.isMobile,
// enableAutoScroll: autoScrollEnabled,
// });
// Virtualization + Scrolling related effects and functions
const scrollInitialized = useRef(false);
@@ -1869,7 +1865,6 @@ export function ChatPage({
newRange: VisibleRange,
forceUpdate?: boolean
) => {
console.log("updateCurrentVisibleRange", newRange);
if (
scrollInitialized.current &&
visibleRange.get(loadedIdSessionRef.current) == undefined &&
@@ -1908,54 +1903,26 @@ export function ChatPage({
scrollInitialized.current = true;
}
};
const setVisibleRangeForCurrentSessionId = (newRange: VisibleRange) => {
console.log("setVisibleRangeForCurrentSessionId", newRange);
setVisibleRange((prevState) => {
const newState = new Map(prevState);
newState.set(currentSessionId(), newRange);
return newState;
});
};
function updateVisibleRangeBasedOnScroll() {
const updateVisibleRangeBasedOnScroll = () => {
if (!scrollInitialized.current) return;
const scrollableDiv = scrollableDivRef.current;
if (!scrollableDiv) return;
const distanceFromBottom =
scrollableDiv.scrollHeight -
scrollableDiv.scrollTop -
scrollableDiv.clientHeight;
const isNearBottom = distanceFromBottom < 200;
// If user is near bottom, we treat the last message as "most visible"
if (isNearBottom) {
const startIndex = Math.max(0, messageHistory.length - BUFFER_COUNT);
const endIndex = messageHistory.length;
setVisibleRangeForCurrentSessionId({
start: startIndex,
end: endIndex,
mostVisibleMessageId:
messageHistory.length > 0
? messageHistory[messageHistory.length - 1].messageId
: null,
});
return;
}
// otherwise do the bounding rect logic:
let mostVisibleMessageIndex = -1;
const viewportHeight = scrollableDiv.clientHeight;
let mostVisibleMessageIndex = -1;
messageHistory.forEach((message, index) => {
const elem = document.getElementById(`message-${message.messageId}`);
if (elem) {
const rect = elem.getBoundingClientRect();
const isVisible = rect.top < viewportHeight && rect.bottom >= 0;
const messageElement = document.getElementById(
`message-${message.messageId}`
);
if (messageElement) {
const rect = messageElement.getBoundingClientRect();
const isVisible = rect.bottom <= viewportHeight && rect.bottom > 0;
if (isVisible && index > mostVisibleMessageIndex) {
mostVisibleMessageIndex = index;
}
}
// clientScrollToBottom;
});
if (mostVisibleMessageIndex !== -1) {
@@ -1964,50 +1931,34 @@ export function ChatPage({
messageHistory.length,
mostVisibleMessageIndex + BUFFER_COUNT + 1
);
setVisibleRangeForCurrentSessionId({
updateCurrentVisibleRange({
start: startIndex,
end: endIndex,
mostVisibleMessageId: messageHistory[mostVisibleMessageIndex].messageId,
});
}
}
};
useEffect(() => {
initializeVisibleRange();
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [router, messageHistory]);
useEffect(() => {
console.log("useEffect has been called");
const scrollEl = scrollableDivRef.current;
if (!scrollEl) {
console.log("no scrollEl");
return;
}
useLayoutEffect(() => {
const scrollableDiv = scrollableDivRef.current;
const handleScroll = () => {
requestAnimationFrame(() => {
updateVisibleRangeBasedOnScroll();
});
updateVisibleRangeBasedOnScroll();
};
const attachScrollListener = () => {
if (scrollEl) {
scrollEl.addEventListener("scroll", handleScroll);
} else {
console.log("scrollEl not available, retrying in 100ms");
setTimeout(attachScrollListener, 100);
}
};
attachScrollListener();
scrollableDiv?.addEventListener("scroll", handleScroll);
return () => {
if (scrollEl) {
scrollEl.removeEventListener("scroll", handleScroll);
}
scrollableDiv?.removeEventListener("scroll", handleScroll);
};
}, [scrollableDivRef, messageHistory, currentSessionId()]);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [messageHistory]);
const imageFileInMessageHistory = useMemo(() => {
return messageHistory
@@ -2101,7 +2052,24 @@ export function ChatPage({
llmOverrideManager.updateImageFilesPresent(imageFileInMessageHistory);
}, [imageFileInMessageHistory]);
useSidebarShortcut(router, toggleSidebar);
useEffect(() => {
const handleKeyDown = (event: KeyboardEvent) => {
if (event.metaKey || event.ctrlKey) {
switch (event.key.toLowerCase()) {
case "e":
event.preventDefault();
toggleSidebar();
break;
}
}
};
window.addEventListener("keydown", handleKeyDown);
return () => {
window.removeEventListener("keydown", handleKeyDown);
};
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [router]);
const [sharedChatSession, setSharedChatSession] =
useState<ChatSession | null>();
@@ -2538,7 +2506,7 @@ export function ChatPage({
? messageHistory
: messageHistory.slice(
currentVisibleRange.start,
currentVisibleRange.end + 2
currentVisibleRange.end
)
).map((message, fauxIndex) => {
const i =
@@ -3102,19 +3070,20 @@ export function ChatPage({
</div>
<div
ref={inputRef}
className="absolute pointer-events-none bottom-0 z-10 w-full"
className="absolute bottom-0 z-10 w-full"
>
{aboveHorizon && (
<div className="mx-auto w-fit !pointer-events-none flex sticky justify-center">
<button
onClick={() => clientScrollToBottom()}
className="p-1 pointer-events-auto rounded-2xl bg-background-strong border border-border mx-auto "
>
<FiArrowDown size={18} />
</button>
</div>
)}
<div className="pointer-events-auto w-[95%] mx-auto relative mb-8">
<div className="w-[95%] mx-auto relative mb-8">
{aboveHorizon && (
<div className="pointer-events-none w-full bg-transparent flex sticky justify-center">
<button
onClick={() => clientScrollToBottom()}
className="p-1 pointer-events-auto rounded-2xl bg-background-strong border border-border mb-2 mx-auto "
>
<FiArrowDown size={18} />
</button>
</div>
)}
<ChatInputBar
proSearchEnabled={proSearchEnabled}
setProSearchEnabled={() => toggleProSearch()}

View File

@@ -1,7 +1,7 @@
"use client";
import { useChatContext } from "@/components/context/ChatContext";
import { ChatPage } from "./ChatPage";
import FunctionalWrapper from "../../components/chat/FunctionalWrapper";
import FunctionalWrapper from "./shared_chat_search/FunctionalWrapper";
export default function WrappedChat({
firstMessage,

View File

@@ -39,6 +39,7 @@ export function DocumentMetadataBlock({
{metadataEntries.length > 0 && (
<>
<div className="mx-1 h-4 border-l border-border" />
<div className="flex items-center overflow-hidden">
{metadataEntries
.slice(0, MAX_METADATA_ITEMS)

View File

@@ -1,6 +1,5 @@
import React, { useContext, useEffect, useRef, useState } from "react";
import { FiPlusCircle, FiPlus, FiInfo, FiX, FiFilter } from "react-icons/fi";
import { FiLoader } from "react-icons/fi";
import { ChatInputOption } from "./ChatInputOption";
import { Persona } from "@/app/admin/assistants/interfaces";
import LLMPopover from "./LLMPopover";
@@ -25,7 +24,7 @@ import {
} from "@/components/ui/tooltip";
import { Hoverable } from "@/components/Hoverable";
import { ChatState } from "../types";
import UnconfiguredProviderText from "@/components/chat/UnconfiguredProviderText";
import UnconfiguredProviderText from "@/components/chat_search/UnconfiguredProviderText";
import { useAssistants } from "@/components/context/AssistantsContext";
import { CalendarIcon, TagIcon, XIcon } from "lucide-react";
import { FilterPopup } from "@/components/search/filtering/FilterPopup";
@@ -37,9 +36,6 @@ import { buildImgUrl } from "../files/images/utils";
import { useUser } from "@/components/user/UserProvider";
import { AgenticToggle } from "./AgenticToggle";
import { SettingsContext } from "@/components/settings/SettingsProvider";
import { LoadingIndicator } from "react-select/dist/declarations/src/components/indicators";
import { FidgetSpinner } from "react-loader-spinner";
import { LoadingAnimation } from "@/components/Loading";
const MAX_INPUT_HEIGHT = 200;
export const SourceChip2 = ({
@@ -713,16 +709,12 @@ export function ChatInputBar({
<SourceChip
key={`file-${index}`}
icon={
file.isUploading ? (
<FiLoader className="animate-spin" />
) : (
<img
className="h-full py-.5 object-cover rounded-lg bg-background cursor-pointer"
src={buildImgUrl(file.id)}
/>
)
<img
className="h-full py-.5 object-cover rounded-lg bg-background cursor-pointer"
src={buildImgUrl(file.id)}
/>
}
title={file.name || "File" + file.id}
title={file.name || "File"}
onRemove={() => {
setFiles(
files.filter(

View File

@@ -5,7 +5,7 @@ import {
PopoverTrigger,
} from "@/components/ui/popover";
import { ChatInputOption } from "./ChatInputOption";
import { defaultModelsByProvider, getDisplayNameForModel } from "@/lib/hooks";
import { getDisplayNameForModel } from "@/lib/hooks";
import {
checkLLMSupportsImageInput,
destructureValue,
@@ -61,23 +61,22 @@ export default function LLMPopover({
llmOptionsByProvider[llmProvider.provider] = [];
}
(
llmProvider.display_model_names ||
defaultModelsByProvider[llmProvider.provider]
).forEach((modelName) => {
if (!uniqueModelNames.has(modelName)) {
uniqueModelNames.add(modelName);
llmOptionsByProvider[llmProvider.provider].push({
name: modelName,
value: structureValue(
llmProvider.name,
llmProvider.provider,
modelName
),
icon: getProviderIcon(llmProvider.provider, modelName),
});
(llmProvider.display_model_names || llmProvider.model_names).forEach(
(modelName) => {
if (!uniqueModelNames.has(modelName)) {
uniqueModelNames.add(modelName);
llmOptionsByProvider[llmProvider.provider].push({
name: modelName,
value: structureValue(
llmProvider.name,
llmProvider.provider,
modelName
),
icon: getProviderIcon(llmProvider.provider, modelName),
});
}
}
});
);
});
const llmOptions = Object.entries(llmOptionsByProvider).flatMap(

View File

@@ -218,7 +218,6 @@ export interface SubQuestionDetail extends BaseQuestionIdentifier {
sub_queries?: SubQueryDetail[] | null;
context_docs?: { top_documents: OnyxDocument[] } | null;
is_complete?: boolean;
is_stopped?: boolean;
}
export interface SubQueryDetail {
@@ -250,13 +249,14 @@ export const constructSubQuestions = (
// );
if ("stop_reason" in newDetail) {
console.log("STOP REASON");
console.log(newDetail);
const { level, level_question_num } = newDetail;
let subQuestion = updatedSubQuestions.find(
(sq) => sq.level === level && sq.level_question_num === level_question_num
);
if (subQuestion) {
subQuestion.is_complete = true;
subQuestion.is_stopped = true;
// subQuestion.is_complete = true;
}
} else if ("top_documents" in newDetail) {
const { level, level_question_num, top_documents } = newDetail;

View File

@@ -64,7 +64,9 @@ import { MemoizedAnchor, MemoizedParagraph } from "./MemoizedTextComponents";
import { extractCodeText, preprocessLaTeX } from "./codeUtils";
import ToolResult from "../../../components/tools/ToolResult";
import CsvContent from "../../../components/tools/CSVContent";
import SourceCard, { SeeMoreBlock } from "@/components/chat/sources/SourceCard";
import SourceCard, {
SeeMoreBlock,
} from "@/components/chat_search/sources/SourceCard";
import remarkMath from "remark-math";
import rehypeKatex from "rehype-katex";
import "katex/dist/katex.min.css";
@@ -322,6 +324,10 @@ export const AIMessage = ({
? otherMessagesCanSwitchTo?.indexOf(messageId)
: undefined;
const uniqueSources: ValidSources[] = Array.from(
new Set((docs || []).map((doc) => doc.source_type))
).slice(0, 3);
const webSourceDomains: string[] = Array.from(
new Set(
docs
@@ -502,7 +508,7 @@ export const AIMessage = ({
<SeeMoreBlock
toggled={toggledDocumentSidebar!}
toggleDocumentSelection={toggleDocumentSelection!}
docs={docs}
uniqueSources={uniqueSources}
webSourceDomains={webSourceDomains}
/>
</div>

View File

@@ -1,6 +1,9 @@
import React, { useState, useEffect } from "react";
import { OnyxDocument } from "@/lib/search/interfaces";
import { ResultIcon, SeeMoreBlock } from "@/components/chat/sources/SourceCard";
import {
ResultIcon,
SeeMoreBlock,
} from "@/components/chat_search/sources/SourceCard";
import { openDocument } from "@/lib/search/utils";
import { buildDocumentSummaryDisplay } from "@/components/search/DocumentDisplay";
import { ValidSources } from "@/lib/types";
@@ -53,7 +56,7 @@ const SourceCard: React.FC<{
</div>
<div className="flex items-center gap-1 mt-1">
<ResultIcon doc={document} size={18} />
<ResultIcon doc={document} size={14} />
<div className="text-[#4a4a4a] text-xs leading-tight truncate flex-1 min-w-0">
{truncatedIdentifier}
</div>
@@ -105,10 +108,13 @@ export const SourcesDisplay: React.FC<SourcesDisplayProps> = ({
{hasMoreDocuments && (
<SeeMoreBlock
fullWidth
toggled={docSidebarToggled}
toggleDocumentSelection={toggleDocumentSelection}
docs={documents}
uniqueSources={
Array.from(
new Set(documents.map((doc) => doc.source_type))
) as ValidSources[]
}
webSourceDomains={documents.map((doc) => doc.link)}
/>
)}

View File

@@ -55,8 +55,7 @@ const DOC_DELAY_MS = 100;
export const useStreamingMessages = (
subQuestions: SubQuestionDetail[],
allowStreaming: () => void,
onComplete: () => void
allowStreaming: () => void
) => {
const [dynamicSubQuestions, setDynamicSubQuestions] = useState<
SubQuestionDetail[]
@@ -118,39 +117,24 @@ export const useStreamingMessages = (
return;
}
// Stream high-level questions sequentially
// 1) Stream high-level questions in parallel
let didStreamQuestion = false;
let allQuestionsComplete = true;
for (let i = 0; i < actualSubQs.length; i++) {
const sq = actualSubQs[i];
const p = progressRef.current[i];
const dynSQ = dynamicSubQuestionsRef.current[i];
// Always stream the first subquestion (index 0)
// For others, only stream if the previous question is complete
if (i === 0 || (i > 0 && progressRef.current[i - 1].questionDone)) {
if (sq.question) {
const nextIndex = p.questionCharIndex + 1;
if (nextIndex <= sq.question.length) {
dynSQ.question = sq.question.slice(0, nextIndex);
p.questionCharIndex = nextIndex;
if (nextIndex >= sq.question.length && sq.is_stopped) {
p.questionDone = true;
}
didStreamQuestion = true;
// Break after streaming one question to ensure sequential behavior
break;
if (sq.question) {
const nextIndex = p.questionCharIndex + 1;
if (nextIndex <= sq.question.length) {
dynSQ.question = sq.question.slice(0, nextIndex);
p.questionCharIndex = nextIndex;
if (nextIndex >= sq.question.length) {
p.questionDone = true;
}
didStreamQuestion = true;
}
}
if (!p.questionDone) {
allQuestionsComplete = false;
}
}
if (allQuestionsComplete && !didStreamQuestion) {
onComplete();
}
if (didStreamQuestion) {

View File

@@ -9,7 +9,7 @@ import { FiSearch } from "react-icons/fi";
import { OnyxDocument } from "@/lib/search/interfaces";
import { BaseQuestionIdentifier, SubQuestionDetail } from "../interfaces";
import { SourceChip2 } from "../input/ChatInputBar";
import { ResultIcon } from "@/components/chat/sources/SourceCard";
import { ResultIcon } from "@/components/chat_search/sources/SourceCard";
import { openDocument } from "@/lib/search/utils";
import { SourcesDisplay } from "./SourcesDisplay";
import ReactMarkdown from "react-markdown";
@@ -317,7 +317,7 @@ const SubQuestionDisplay: React.FC<{
<div
className={`absolute left-[5px] ${
isFirst ? "top-[15px]" : "top-0"
} bottom-0 w-[2px] bg-neutral-200
} bottom-0 w-[2px] bg-neutral-200
${isLast && !toggled ? "h-4" : "h-full"}`}
/>
@@ -331,7 +331,7 @@ const SubQuestionDisplay: React.FC<{
</div>
<div className="ml-8 w-full">
<div
className="flex -mx-2 rounded-md px-2 hover:bg-[#F5F3ED] items-start py-1.5 my-.5 cursor-pointer"
className="flex -mx-2 rounded-md px-2 hover:bg-[#F5F3ED] items-start py-1.5 my-.5 cursor-pointer"
onClick={() => setToggled(!toggled)}
>
<div className="text-black text-base font-medium leading-normal flex-grow pr-2">
@@ -344,108 +344,102 @@ const SubQuestionDisplay: React.FC<{
size={20}
/>
</div>
<div
className={`transition-all duration-300 ease-in-out ${
toggled ? "max-h-[1000px]" : "max-h-0"
}`}
>
{isVisible && subQuestion && (
<div
className={`transform transition-all duration-300 ease-in-out origin-top ${
toggled ? "scale-y-100 opacity-100" : "scale-y-95 opacity-0"
}`}
>
<div className="pl-0 pb-2">
<div className="mb-4 flex flex-col gap-2">
<div className="text-[#4a4a4a] text-xs font-medium leading-normal">
Searching
</div>
<div className="flex flex-wrap gap-2">
{subQuestion?.sub_queries?.map((query, queryIndex) => (
<SourceChip2
key={queryIndex}
icon={<FiSearch size={10} />}
title={query.query}
includeTooltip
/>
))}
</div>
</div>
{!temporaryDisplay && (
<div
className={`transition-all duration-300 ease-in-out ${
toggled ? "max-h-[1000px]" : "max-h-0"
}`}
>
{isVisible && subQuestion && (
<div
className={`transform transition-all duration-300 ease-in-out origin-top ${
toggled ? "scale-y-100 opacity-100" : "scale-y-95 opacity-0"
}`}
>
<div className="pl-0 pb-2">
{(subQuestion?.is_complete || memoizedDocs?.length > 0) && (
<div className="mb-4 flex flex-col gap-2">
<div className="text-[#4a4a4a] text-xs font-medium leading-normal">
Searching
Reading
</div>
<div className="flex flex-wrap gap-2">
{subQuestion?.sub_queries?.map((query, queryIndex) => (
<SourceChip2
key={queryIndex}
icon={<FiSearch size={10} />}
title={query.query}
includeTooltip
/>
))}
</div>
</div>
{(subQuestion?.is_complete || memoizedDocs?.length > 0) && (
<div className="mb-4 flex flex-col gap-2">
<div className="text-[#4a4a4a] text-xs font-medium leading-normal">
Reading
</div>
<div className="flex flex-wrap gap-2">
{memoizedDocs.length > 0 ? (
memoizedDocs.slice(0, 10).map((doc, docIndex) => {
const truncatedIdentifier =
doc.semantic_identifier?.slice(0, 20) || "";
return (
<SourceChip2
includeAnimation
onClick={() =>
openDocument(doc, setPresentingDocument)
}
key={docIndex}
icon={<ResultIcon doc={doc} size={10} />}
title={`${truncatedIdentifier}${
truncatedIdentifier.length === 20
? "..."
: ""
}`}
/>
);
})
) : (
<div className="text-black text-sm font-medium">
No sources found
</div>
)}
</div>
</div>
)}
{(subQuestion?.is_complete ||
subQuestion?.answer?.length > 0) && (
<div className="flex flex-col gap-2">
<div
className="text-[#4a4a4a] cursor-pointer items-center text-xs flex gap-x-1 font-medium leading-normal"
onClick={() => setAnalysisToggled(!analysisToggled)}
>
Analyzing
<ChevronDown
className={`transition-transform duration-200 ${
analysisToggled ? "" : "-rotate-90"
}`}
size={8}
/>
</div>
{analysisToggled && (
<div className="flex flex-wrap gap-2">
{renderedMarkdown}
{memoizedDocs.length > 0 ? (
memoizedDocs.slice(0, 10).map((doc, docIndex) => {
const truncatedIdentifier =
doc.semantic_identifier?.slice(0, 20) || "";
return (
<SourceChip2
includeAnimation
onClick={() =>
openDocument(doc, setPresentingDocument)
}
key={docIndex}
icon={<ResultIcon doc={doc} size={10} />}
title={`${truncatedIdentifier}${
truncatedIdentifier.length === 20 ? "..." : ""
}`}
/>
);
})
) : (
<div className="text-black text-sm font-medium">
No sources found
</div>
)}
</div>
)}
</div>
</div>
)}
{(subQuestion?.is_complete ||
subQuestion?.answer?.length > 0) && (
<div className="flex flex-col gap-2">
<div
className="text-[#4a4a4a] cursor-pointer items-center text-xs flex gap-x-1 font-medium leading-normal"
onClick={() => setAnalysisToggled(!analysisToggled)}
>
Analyzing
<ChevronDown
className={`transition-transform duration-200 ${
analysisToggled ? "" : "-rotate-90"
}`}
size={8}
/>
</div>
{analysisToggled && (
<div className="flex flex-wrap gap-2">
{renderedMarkdown}
</div>
)}
</div>
)}
</div>
)}
</div>
)}
</div>
)}
</div>
{temporaryDisplay &&
((status === ToggleState.InProgress &&
forcedStatus !== ToggleState.Done) ||
toggled) && (
(status === ToggleState.InProgress || toggled) && (
<div
className={`transform ease-in-out origin-top ${
toggled ? "scale-y-100 opacity-100" : "scale-y-100 opacity-0"
className={`transform transition-all duration-100 ease-in-out origin-top ${
toggled ? "scale-y-100 opacity-100" : "scale-y-95 opacity-0"
}`}
>
<div className="pl-0">
<div className="bg-blaack pl-0">
<div className="flex flex-col gap-2">
<div className="leading-none text-[#4a4a4a] text-xs font-medium">
{temporaryDisplay?.tinyQuestion}
@@ -474,22 +468,9 @@ const SubQuestionsDisplay: React.FC<SubQuestionsDisplayProps> = ({
overallAnswerGenerating,
allowDocuments,
}) => {
const [showSummarizing, setShowSummarizing] = useState(
finishedGenerating && !overallAnswerGenerating
);
const { dynamicSubQuestions } = useStreamingMessages(
subQuestions,
() => {},
() => {
setShowSummarizing(true);
}
);
const { dynamicSubQuestions } = useStreamingMessages(subQuestions, () => {});
const { dynamicSubQuestions: dynamicSecondLevelQuestions } =
useStreamingMessages(
secondLevelQuestions || [],
() => {},
() => {}
);
useStreamingMessages(secondLevelQuestions || [], () => {});
const memoizedSubQuestions = useMemo(() => {
return finishedGenerating ? subQuestions : dynamicSubQuestions;
}, [finishedGenerating, dynamicSubQuestions, subQuestions]);
@@ -516,7 +497,10 @@ const SubQuestionsDisplay: React.FC<SubQuestionsDisplayProps> = ({
).length == memoizedSubQuestions.length;
const [streamedText, setStreamedText] = useState(
finishedGenerating ? "Summarize findings" : ""
!overallAnswerGenerating ? "Summarize findings" : ""
);
const [showSummarizing, setShowSummarizing] = useState(
finishedGenerating && !overallAnswerGenerating
);
const [canShowSummarizing, setCanShowSummarizing] =
useState(finishedGenerating);
@@ -536,7 +520,7 @@ const SubQuestionsDisplay: React.FC<SubQuestionsDisplayProps> = ({
memoizedSubQuestions.length > 0 &&
memoizedSubQuestions.filter(
(subQuestion) => subQuestion?.answer.length > 2
).length == subQuestions.length
).length == memoizedSubQuestions.length
) {
setTimeout(() => {
setCanShowSummarizing(true);
@@ -547,6 +531,20 @@ const SubQuestionsDisplay: React.FC<SubQuestionsDisplayProps> = ({
}
}, [memoizedSubQuestions]);
useEffect(() => {
const allSubQuestionsAnswered =
memoizedSubQuestions.length > 0 &&
memoizedSubQuestions.every(
(subQuestion) => subQuestion?.question.length > 5
);
if (allSubQuestionsAnswered) {
setTimeout(() => {
setShowSummarizing(true);
}, PHASE_MIN_MS * 0.75);
}
}, [memoizedSubQuestions, finishedGenerating]);
useEffect(() => {
if (showSummarizing && streamedText !== "Summarize findings") {
const fullText = "Summarize findings";
@@ -562,7 +560,7 @@ const SubQuestionsDisplay: React.FC<SubQuestionsDisplayProps> = ({
} else {
clearInterval(streamInterval);
}
}, 10);
}, 8);
}
}, [showSummarizing]);
@@ -706,6 +704,12 @@ const SubQuestionsDisplay: React.FC<SubQuestionsDisplayProps> = ({
(subQuestion?.sub_queries?.length > 0 &&
(subQuestion.answer == undefined ||
subQuestion.answer.length > 3))
// subQuestion == undefined &&
// subQuestion.answer != undefined &&
// !(
// dynamicSubQuestions[index + 1] != undefined ||
// dynamicSubQuestions[index + 1]?.sub_queries?.length! > 0
// )
}
/>
))}

View File

@@ -17,11 +17,11 @@ import { OnyxInitializingLoader } from "@/components/OnyxInitializingLoader";
import { Persona } from "@/app/admin/assistants/interfaces";
import { Button } from "@/components/ui/button";
import { OnyxDocument } from "@/lib/search/interfaces";
import TextView from "@/components/chat/TextView";
import TextView from "@/components/chat_search/TextView";
import { DocumentResults } from "../../documentSidebar/DocumentResults";
import { Modal } from "@/components/Modal";
import FunctionalHeader from "@/components/chat/Header";
import FixedLogo from "../../../../components/logo/FixedLogo";
import FunctionalHeader from "@/components/chat_search/Header";
import FixedLogo from "../../shared_chat_search/FixedLogo";
import { useRouter } from "next/navigation";
function BackToOnyxButton({

View File

@@ -1,7 +1,14 @@
import React from "react";
import React, { useState } from "react";
import { DocumentSet, Tag, ValidSources } from "@/lib/types";
import { SourceMetadata } from "@/lib/search/interfaces";
import { InfoIcon, defaultTailwindCSS } from "@/components/icons/icons";
import {
GearIcon,
InfoIcon,
MinusIcon,
PlusCircleIcon,
PlusIcon,
defaultTailwindCSS,
} from "@/components/icons/icons";
import { HoverPopup } from "@/components/HoverPopup";
import {
FiBook,
@@ -20,7 +27,7 @@ import { Calendar } from "@/components/ui/calendar";
import { Popover, PopoverTrigger } from "@/components/ui/popover";
import { PopoverContent } from "@radix-ui/react-popover";
import { CalendarIcon } from "lucide-react";
import { getTimeAgoString } from "@/lib/dateUtils";
import { buildDateString, getTimeAgoString } from "@/lib/dateUtils";
import { Separator } from "@/components/ui/separator";
import { FilterDropdown } from "@/components/search/filtering/FilterDropdown";

View File

@@ -0,0 +1,65 @@
"use client";
import React, { ReactNode, useEffect, useState } from "react";
import { useRouter } from "next/navigation";
export default function FunctionalWrapper({
initiallyToggled,
content,
}: {
content: (
toggledSidebar: boolean,
toggle: (toggled?: boolean) => void
) => ReactNode;
initiallyToggled: boolean;
}) {
const router = useRouter();
useEffect(() => {
const handleKeyDown = (event: KeyboardEvent) => {
if (event.metaKey || event.ctrlKey) {
const newPage = event.shiftKey;
switch (event.key.toLowerCase()) {
case "d":
event.preventDefault();
if (newPage) {
window.open("/chat", "_blank");
} else {
router.push("/chat");
}
break;
case "s":
event.preventDefault();
if (newPage) {
window.open("/search", "_blank");
} else {
router.push("/search");
}
break;
}
}
};
window.addEventListener("keydown", handleKeyDown);
return () => {
window.removeEventListener("keydown", handleKeyDown);
};
}, [router]);
const [toggledSidebar, setToggledSidebar] = useState(initiallyToggled);
const toggle = (value?: boolean) => {
setToggledSidebar((toggledSidebar) =>
value !== undefined ? value : !toggledSidebar
);
};
return (
<>
{" "}
<div className="overscroll-y-contain overflow-y-scroll overscroll-contain left-0 top-0 w-full h-svh">
{content(toggledSidebar, toggle)}
</div>
</>
);
}

Some files were not shown because too many files have changed in this diff Show More