mirror of
https://github.com/onyx-dot-app/onyx.git
synced 2026-02-16 23:35:46 +00:00
Compare commits
25 Commits
sharepoint
...
refactor-m
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ccde845e47 | ||
|
|
cad3517f85 | ||
|
|
191577fa19 | ||
|
|
a7d140cb5d | ||
|
|
4ef7e44c95 | ||
|
|
e7bd58cc85 | ||
|
|
dd18291d51 | ||
|
|
9a5ea03cd1 | ||
|
|
eee3054b45 | ||
|
|
5eea47cb1c | ||
|
|
c830364c15 | ||
|
|
04f3ba1f3d | ||
|
|
84f76fbee7 | ||
|
|
00aeb3b280 | ||
|
|
8c30085a9e | ||
|
|
419e82f9f4 | ||
|
|
8330e5d8f4 | ||
|
|
e06c60a1a3 | ||
|
|
e7eef67893 | ||
|
|
b5209edffa | ||
|
|
07ad4dc022 | ||
|
|
06e1a2c1a5 | ||
|
|
083c152878 | ||
|
|
06f11a0a06 | ||
|
|
fabfcddadb |
@@ -210,23 +210,6 @@ class Answer:
|
||||
|
||||
return citations
|
||||
|
||||
def citations_by_subquestion(self) -> dict[SubQuestionKey, list[CitationInfo]]:
|
||||
citations_by_subquestion: dict[SubQuestionKey, list[CitationInfo]] = (
|
||||
defaultdict(list)
|
||||
)
|
||||
basic_subq_key = SubQuestionKey(level=BASIC_KEY[0], question_num=BASIC_KEY[1])
|
||||
for packet in self.processed_streamed_output:
|
||||
if isinstance(packet, CitationInfo):
|
||||
if packet.level_question_num is not None and packet.level is not None:
|
||||
citations_by_subquestion[
|
||||
SubQuestionKey(
|
||||
level=packet.level, question_num=packet.level_question_num
|
||||
)
|
||||
].append(packet)
|
||||
elif packet.level is None:
|
||||
citations_by_subquestion[basic_subq_key].append(packet)
|
||||
return citations_by_subquestion
|
||||
|
||||
def is_cancelled(self) -> bool:
|
||||
if self._is_cancelled:
|
||||
return True
|
||||
|
||||
@@ -407,7 +407,7 @@ AnswerStream = Iterator[AnswerPacket]
|
||||
|
||||
class AnswerPostInfo(BaseModel):
|
||||
ai_message_files: list[FileDescriptor]
|
||||
qa_docs_response: QADocsResponse | None = None
|
||||
rephrased_query: str | None = None
|
||||
reference_db_search_docs: list[DbSearchDoc] | None = None
|
||||
dropped_indices: list[int] | None = None
|
||||
tool_result: ToolCallFinalResult | None = None
|
||||
|
||||
392
backend/onyx/chat/packet_proccessing/process_streamed_packets.py
Normal file
392
backend/onyx/chat/packet_proccessing/process_streamed_packets.py
Normal file
@@ -0,0 +1,392 @@
|
||||
from collections import defaultdict
|
||||
from collections.abc import Generator
|
||||
from typing import cast
|
||||
from typing import DefaultDict
|
||||
from typing import Union
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.chat.models import AgenticMessageResponseIDInfo
|
||||
from onyx.chat.models import AgentSearchPacket
|
||||
from onyx.chat.models import AllCitations
|
||||
from onyx.chat.models import AnswerPostInfo
|
||||
from onyx.chat.models import AnswerStream
|
||||
from onyx.chat.models import CitationInfo
|
||||
from onyx.chat.models import CustomToolResponse
|
||||
from onyx.chat.models import FileChatDisplay
|
||||
from onyx.chat.models import FinalUsedContextDocsResponse
|
||||
from onyx.chat.models import LLMRelevanceFilterResponse
|
||||
from onyx.chat.models import MessageResponseIDInfo
|
||||
from onyx.chat.models import MessageSpecificCitations
|
||||
from onyx.chat.models import OnyxAnswerPiece
|
||||
from onyx.chat.models import QADocsResponse
|
||||
from onyx.chat.models import StreamingError
|
||||
from onyx.chat.models import StreamStopInfo
|
||||
from onyx.chat.models import StreamStopReason
|
||||
from onyx.chat.models import SubQuestionKey
|
||||
from onyx.chat.models import UserKnowledgeFilePacket
|
||||
from onyx.chat.packet_proccessing.tool_processing import (
|
||||
handle_image_generation_tool_response,
|
||||
)
|
||||
from onyx.chat.packet_proccessing.tool_processing import (
|
||||
handle_internet_search_tool_response,
|
||||
)
|
||||
from onyx.chat.packet_proccessing.tool_processing import (
|
||||
handle_search_tool_response_summary,
|
||||
)
|
||||
from onyx.configs.constants import BASIC_KEY
|
||||
from onyx.context.search.models import RetrievalDetails
|
||||
from onyx.db.models import SearchDoc as DbSearchDoc
|
||||
from onyx.file_store.models import ChatFileType
|
||||
from onyx.server.query_and_chat.models import ChatMessageDetail
|
||||
from onyx.server.query_and_chat.streaming_models import CitationDelta
|
||||
from onyx.server.query_and_chat.streaming_models import CitationStart
|
||||
from onyx.server.query_and_chat.streaming_models import CustomToolDelta
|
||||
from onyx.server.query_and_chat.streaming_models import CustomToolStart
|
||||
from onyx.server.query_and_chat.streaming_models import ImageGenerationToolStart
|
||||
from onyx.server.query_and_chat.streaming_models import MessageDelta
|
||||
from onyx.server.query_and_chat.streaming_models import MessageStart
|
||||
from onyx.server.query_and_chat.streaming_models import OverallStop
|
||||
from onyx.server.query_and_chat.streaming_models import Packet
|
||||
from onyx.server.query_and_chat.streaming_models import SearchToolDelta
|
||||
from onyx.server.query_and_chat.streaming_models import SearchToolStart
|
||||
from onyx.server.query_and_chat.streaming_models import SectionEnd
|
||||
from onyx.tools.models import ToolCallKickoff
|
||||
from onyx.tools.models import ToolResponse
|
||||
from onyx.tools.tool_implementations.custom.custom_tool import CUSTOM_TOOL_RESPONSE_ID
|
||||
from onyx.tools.tool_implementations.custom.custom_tool import CustomToolCallSummary
|
||||
from onyx.tools.tool_implementations.custom.custom_tool import (
|
||||
CustomToolUserFileSnapshot,
|
||||
)
|
||||
from onyx.tools.tool_implementations.images.image_generation_tool import (
|
||||
IMAGE_GENERATION_RESPONSE_ID,
|
||||
)
|
||||
from onyx.tools.tool_implementations.images.image_generation_tool import (
|
||||
ImageGenerationResponse,
|
||||
)
|
||||
from onyx.tools.tool_implementations.internet_search.internet_search_tool import (
|
||||
INTERNET_QUERY_FIELD,
|
||||
)
|
||||
from onyx.tools.tool_implementations.internet_search.internet_search_tool import (
|
||||
INTERNET_SEARCH_RESPONSE_SUMMARY_ID,
|
||||
)
|
||||
from onyx.tools.tool_implementations.internet_search.internet_search_tool import (
|
||||
InternetSearchResponseSummary,
|
||||
)
|
||||
from onyx.tools.tool_implementations.search.search_tool import QUERY_FIELD
|
||||
from onyx.tools.tool_implementations.search.search_tool import (
|
||||
SEARCH_RESPONSE_SUMMARY_ID,
|
||||
)
|
||||
from onyx.tools.tool_implementations.search.search_tool import SearchResponseSummary
|
||||
from onyx.tools.tool_runner import ToolCallFinalResult
|
||||
from onyx.utils.logger import setup_logger
|
||||
|
||||
logger = setup_logger()
|
||||
|
||||
COMMON_TOOL_RESPONSE_TYPES = {
|
||||
"image": ChatFileType.IMAGE,
|
||||
"csv": ChatFileType.CSV,
|
||||
}
|
||||
|
||||
# Type definitions for packet processing
|
||||
ChatPacket = Union[
|
||||
StreamingError,
|
||||
QADocsResponse,
|
||||
LLMRelevanceFilterResponse,
|
||||
FinalUsedContextDocsResponse,
|
||||
ChatMessageDetail,
|
||||
AllCitations,
|
||||
CitationInfo,
|
||||
FileChatDisplay,
|
||||
CustomToolResponse,
|
||||
MessageResponseIDInfo,
|
||||
MessageSpecificCitations,
|
||||
AgenticMessageResponseIDInfo,
|
||||
StreamStopInfo,
|
||||
AgentSearchPacket,
|
||||
UserKnowledgeFilePacket,
|
||||
Packet,
|
||||
]
|
||||
|
||||
|
||||
def process_streamed_packets(
|
||||
answer_processed_output: AnswerStream,
|
||||
reserved_message_id: int,
|
||||
selected_db_search_docs: list[DbSearchDoc] | None,
|
||||
retrieval_options: RetrievalDetails | None,
|
||||
db_session: Session,
|
||||
) -> Generator[ChatPacket, None, dict[SubQuestionKey, AnswerPostInfo]]:
|
||||
"""Process the streamed output from the answer and yield chat packets."""
|
||||
has_transmitted_answer_piece = False
|
||||
packet_index = 0
|
||||
current_message_index: int | None = None
|
||||
current_tool_index: int | None = None
|
||||
current_citation_index: int | None = None
|
||||
|
||||
# Track ongoing tool operations to prevent concurrent operations of the same type
|
||||
ongoing_search = False
|
||||
ongoing_image_generation = False
|
||||
ongoing_internet_search = False
|
||||
|
||||
# Track citations
|
||||
citations_emitted = False
|
||||
collected_citations: list[CitationInfo] = []
|
||||
|
||||
# Initialize info_by_subq mapping and temp citations storage
|
||||
info_by_subq: dict[SubQuestionKey, AnswerPostInfo] = defaultdict(
|
||||
lambda: AnswerPostInfo(ai_message_files=[])
|
||||
)
|
||||
citations_by_key: DefaultDict[SubQuestionKey, list[CitationInfo]] = defaultdict(
|
||||
list
|
||||
)
|
||||
|
||||
for packet in answer_processed_output:
|
||||
# Determine the sub-question key context when applicable
|
||||
level = getattr(packet, "level", None)
|
||||
level_question_num = getattr(packet, "level_question_num", None)
|
||||
key = SubQuestionKey(
|
||||
level=level if level is not None else BASIC_KEY[0],
|
||||
question_num=(
|
||||
level_question_num if level_question_num is not None else BASIC_KEY[1]
|
||||
),
|
||||
)
|
||||
|
||||
if isinstance(packet, ToolCallFinalResult):
|
||||
info_by_subq[key].tool_result = packet
|
||||
|
||||
# Original packet processing logic continues
|
||||
if isinstance(packet, ToolCallKickoff) and not isinstance(
|
||||
packet, ToolCallFinalResult
|
||||
):
|
||||
# Allocate a new index for this tool call
|
||||
current_tool_index = packet_index
|
||||
packet_index += 1
|
||||
|
||||
# Handle image generation tool start
|
||||
if (
|
||||
packet.tool_name == "run_image_generation"
|
||||
and not ongoing_image_generation
|
||||
):
|
||||
ongoing_image_generation = True
|
||||
yield Packet(
|
||||
ind=current_tool_index,
|
||||
obj=ImageGenerationToolStart(),
|
||||
)
|
||||
|
||||
if packet.tool_name == "run_search" and not ongoing_search:
|
||||
ongoing_search = True
|
||||
yield Packet(
|
||||
ind=current_tool_index,
|
||||
obj=SearchToolStart(),
|
||||
)
|
||||
|
||||
yield Packet(
|
||||
ind=current_tool_index,
|
||||
obj=SearchToolDelta(
|
||||
queries=[packet.tool_args[QUERY_FIELD]],
|
||||
),
|
||||
)
|
||||
|
||||
if (
|
||||
packet.tool_name == "run_internet_search"
|
||||
and not ongoing_internet_search
|
||||
):
|
||||
ongoing_internet_search = True
|
||||
yield Packet(
|
||||
ind=current_tool_index,
|
||||
obj=SearchToolStart(
|
||||
is_internet_search=True,
|
||||
),
|
||||
)
|
||||
|
||||
yield Packet(
|
||||
ind=current_tool_index,
|
||||
obj=SearchToolDelta(
|
||||
queries=[packet.tool_args[INTERNET_QUERY_FIELD]],
|
||||
),
|
||||
)
|
||||
|
||||
# Fallback: treat unknown tool kickoffs as custom tool start
|
||||
elif packet.tool_name not in {
|
||||
"run_search",
|
||||
"run_internet_search",
|
||||
"run_image_generation",
|
||||
}:
|
||||
yield Packet(
|
||||
ind=current_tool_index,
|
||||
obj=CustomToolStart(tool_name=packet.tool_name),
|
||||
)
|
||||
|
||||
elif isinstance(packet, ToolResponse):
|
||||
# Ensure we have a tool index; fallback to current packet_index if needed
|
||||
if current_tool_index is None:
|
||||
current_tool_index = packet_index
|
||||
packet_index += 1
|
||||
|
||||
if packet.id == SEARCH_RESPONSE_SUMMARY_ID:
|
||||
search_response = cast(SearchResponseSummary, packet.response)
|
||||
saved_search_docs, dropped_inds = (
|
||||
yield from handle_search_tool_response_summary(
|
||||
current_ind=current_tool_index,
|
||||
search_response=search_response,
|
||||
selected_search_docs=selected_db_search_docs,
|
||||
is_extended=False,
|
||||
dedupe_docs=bool(
|
||||
retrieval_options and retrieval_options.dedupe_docs
|
||||
),
|
||||
)
|
||||
)
|
||||
info_by_subq[key].reference_db_search_docs = saved_search_docs
|
||||
info_by_subq[key].dropped_indices = dropped_inds
|
||||
ongoing_search = False # Reset search state when tool ends
|
||||
|
||||
elif packet.id == INTERNET_SEARCH_RESPONSE_SUMMARY_ID:
|
||||
internet_response = cast(InternetSearchResponseSummary, packet.response)
|
||||
saved_internet_docs = yield from handle_internet_search_tool_response(
|
||||
current_tool_index, internet_response
|
||||
)
|
||||
info_by_subq[key].reference_db_search_docs = saved_internet_docs
|
||||
ongoing_internet_search = False
|
||||
|
||||
elif packet.id == IMAGE_GENERATION_RESPONSE_ID:
|
||||
img_generation_response = cast(
|
||||
list[ImageGenerationResponse], packet.response
|
||||
)
|
||||
yield from handle_image_generation_tool_response(
|
||||
current_tool_index, img_generation_response
|
||||
)
|
||||
ongoing_image_generation = (
|
||||
False # Reset image generation state when tool ends
|
||||
)
|
||||
|
||||
elif packet.id == CUSTOM_TOOL_RESPONSE_ID:
|
||||
summary = cast(CustomToolCallSummary, packet.response)
|
||||
# Emit start if not already started for this index
|
||||
# We emit start once per custom tool index
|
||||
yield Packet(
|
||||
ind=current_tool_index,
|
||||
obj=CustomToolStart(tool_name=summary.tool_name),
|
||||
)
|
||||
|
||||
# Decide whether we have file outputs or data
|
||||
file_ids: list[str] | None = None
|
||||
data: dict | list | str | int | float | bool | None = None
|
||||
if summary.response_type in ("image", "csv"):
|
||||
try:
|
||||
snapshot = cast(CustomToolUserFileSnapshot, summary.tool_result)
|
||||
file_ids = snapshot.file_ids
|
||||
except Exception:
|
||||
file_ids = None
|
||||
else:
|
||||
data = summary.tool_result # type: ignore[assignment]
|
||||
|
||||
yield Packet(
|
||||
ind=current_tool_index,
|
||||
obj=CustomToolDelta(
|
||||
tool_name=summary.tool_name,
|
||||
response_type=summary.response_type,
|
||||
data=data,
|
||||
file_ids=file_ids,
|
||||
),
|
||||
)
|
||||
|
||||
# End this tool section
|
||||
yield Packet(
|
||||
ind=current_tool_index,
|
||||
obj=SectionEnd(),
|
||||
)
|
||||
|
||||
elif isinstance(packet, StreamStopInfo):
|
||||
if packet.stop_reason == StreamStopReason.FINISHED:
|
||||
yield packet
|
||||
elif isinstance(packet, OnyxAnswerPiece):
|
||||
if has_transmitted_answer_piece:
|
||||
if packet.answer_piece is None:
|
||||
# Message is ending, use current message index
|
||||
if current_message_index is not None:
|
||||
yield Packet(
|
||||
ind=current_message_index,
|
||||
obj=SectionEnd(),
|
||||
)
|
||||
# Reset for next message
|
||||
current_message_index = None
|
||||
has_transmitted_answer_piece = False
|
||||
else:
|
||||
# Continue with same index for message delta
|
||||
if current_message_index is not None:
|
||||
yield Packet(
|
||||
ind=current_message_index,
|
||||
obj=MessageDelta(
|
||||
content=packet.answer_piece or "",
|
||||
),
|
||||
)
|
||||
|
||||
elif packet.answer_piece:
|
||||
# New message starting, allocate new index
|
||||
current_message_index = packet_index
|
||||
packet_index += 1
|
||||
yield Packet(
|
||||
ind=current_message_index,
|
||||
obj=MessageStart(
|
||||
id=str(reserved_message_id),
|
||||
content=packet.answer_piece,
|
||||
),
|
||||
)
|
||||
has_transmitted_answer_piece = True
|
||||
elif isinstance(packet, CitationInfo):
|
||||
# Collect citations for batch processing
|
||||
if not citations_emitted:
|
||||
# First citation - allocate index but don't emit yet
|
||||
if current_citation_index is None:
|
||||
current_citation_index = packet_index
|
||||
packet_index += 1
|
||||
|
||||
# Collect citation info
|
||||
collected_citations.append(
|
||||
CitationInfo(
|
||||
citation_num=packet.citation_num,
|
||||
document_id=packet.document_id,
|
||||
)
|
||||
)
|
||||
|
||||
yield cast(ChatPacket, packet)
|
||||
|
||||
if current_message_index is not None:
|
||||
yield Packet(ind=current_message_index, obj=SectionEnd())
|
||||
|
||||
# Emit collected citations if any
|
||||
if collected_citations and current_citation_index is not None:
|
||||
yield Packet(ind=current_citation_index, obj=CitationStart())
|
||||
yield Packet(
|
||||
ind=current_citation_index, obj=CitationDelta(citations=collected_citations)
|
||||
)
|
||||
yield Packet(
|
||||
ind=current_citation_index,
|
||||
obj=SectionEnd(),
|
||||
)
|
||||
|
||||
# Yield STOP packet to indicate streaming is complete
|
||||
yield Packet(ind=packet_index, obj=OverallStop())
|
||||
|
||||
# Build citation maps per sub-question key using available docs
|
||||
for key, citation_list in citations_by_key.items():
|
||||
info = info_by_subq[key]
|
||||
if not citation_list:
|
||||
continue
|
||||
|
||||
doc_id_to_saved_db_id = {
|
||||
doc.document_id: doc.id for doc in info.reference_db_search_docs or []
|
||||
}
|
||||
|
||||
citation_map: dict[int, int] = {}
|
||||
for c in citation_list:
|
||||
mapped_db_id = doc_id_to_saved_db_id.get(c.document_id)
|
||||
if mapped_db_id is not None and c.citation_num not in citation_map:
|
||||
citation_map[c.citation_num] = mapped_db_id
|
||||
|
||||
if citation_map:
|
||||
info.message_specific_citations = MessageSpecificCitations(
|
||||
citation_map=citation_map
|
||||
)
|
||||
|
||||
return info_by_subq
|
||||
164
backend/onyx/chat/packet_proccessing/tool_processing.py
Normal file
164
backend/onyx/chat/packet_proccessing/tool_processing.py
Normal file
@@ -0,0 +1,164 @@
|
||||
from collections.abc import Generator
|
||||
|
||||
from onyx.context.search.utils import chunks_or_sections_to_search_docs
|
||||
from onyx.context.search.utils import dedupe_documents
|
||||
from onyx.db.chat import create_db_search_doc
|
||||
from onyx.db.chat import create_search_doc_from_user_file
|
||||
from onyx.db.chat import translate_db_search_doc_to_server_search_doc
|
||||
from onyx.db.engine.sql_engine import get_session_with_current_tenant
|
||||
from onyx.db.models import SearchDoc as DbSearchDoc
|
||||
from onyx.db.models import UserFile
|
||||
from onyx.file_store.models import InMemoryChatFile
|
||||
from onyx.file_store.utils import save_files
|
||||
from onyx.server.query_and_chat.streaming_models import ImageGenerationToolDelta
|
||||
from onyx.server.query_and_chat.streaming_models import Packet
|
||||
from onyx.server.query_and_chat.streaming_models import SearchToolDelta
|
||||
from onyx.server.query_and_chat.streaming_models import SectionEnd
|
||||
from onyx.tools.tool_implementations.images.image_generation_tool import (
|
||||
ImageGenerationResponse,
|
||||
)
|
||||
from onyx.tools.tool_implementations.internet_search.models import (
|
||||
InternetSearchResponseSummary,
|
||||
)
|
||||
from onyx.tools.tool_implementations.internet_search.utils import (
|
||||
internet_search_response_to_search_docs,
|
||||
)
|
||||
from onyx.tools.tool_implementations.search.search_tool import SearchResponseSummary
|
||||
|
||||
|
||||
def handle_search_tool_response_summary(
|
||||
current_ind: int,
|
||||
search_response: SearchResponseSummary,
|
||||
selected_search_docs: list[DbSearchDoc] | None,
|
||||
is_extended: bool,
|
||||
dedupe_docs: bool = False,
|
||||
user_files: list[UserFile] | None = None,
|
||||
loaded_user_files: list[InMemoryChatFile] | None = None,
|
||||
) -> Generator[Packet, None, tuple[list[DbSearchDoc], list[int] | None]]:
|
||||
dropped_inds = None
|
||||
|
||||
if not selected_search_docs:
|
||||
top_docs = chunks_or_sections_to_search_docs(search_response.top_sections)
|
||||
|
||||
deduped_docs = top_docs
|
||||
if (
|
||||
dedupe_docs and not is_extended
|
||||
): # Extended tool responses are already deduped
|
||||
deduped_docs, dropped_inds = dedupe_documents(top_docs)
|
||||
|
||||
with get_session_with_current_tenant() as db_session:
|
||||
reference_db_search_docs = [
|
||||
create_db_search_doc(server_search_doc=doc, db_session=db_session)
|
||||
for doc in deduped_docs
|
||||
]
|
||||
|
||||
else:
|
||||
reference_db_search_docs = selected_search_docs
|
||||
|
||||
doc_ids = {doc.id for doc in reference_db_search_docs}
|
||||
if user_files is not None and loaded_user_files is not None:
|
||||
for user_file in user_files:
|
||||
if user_file.id in doc_ids:
|
||||
continue
|
||||
|
||||
associated_chat_file = next(
|
||||
(
|
||||
file
|
||||
for file in loaded_user_files
|
||||
if file.file_id == str(user_file.file_id)
|
||||
),
|
||||
None,
|
||||
)
|
||||
# Use create_search_doc_from_user_file to properly add the document to the database
|
||||
if associated_chat_file is not None:
|
||||
with get_session_with_current_tenant() as db_session:
|
||||
db_doc = create_search_doc_from_user_file(
|
||||
user_file, associated_chat_file, db_session
|
||||
)
|
||||
reference_db_search_docs.append(db_doc)
|
||||
|
||||
response_docs = [
|
||||
translate_db_search_doc_to_server_search_doc(db_search_doc)
|
||||
for db_search_doc in reference_db_search_docs
|
||||
]
|
||||
|
||||
yield Packet(
|
||||
ind=current_ind,
|
||||
obj=SearchToolDelta(
|
||||
documents=response_docs,
|
||||
),
|
||||
)
|
||||
|
||||
yield Packet(
|
||||
ind=current_ind,
|
||||
obj=SectionEnd(),
|
||||
)
|
||||
|
||||
return reference_db_search_docs, dropped_inds
|
||||
|
||||
|
||||
def handle_internet_search_tool_response(
|
||||
current_ind: int,
|
||||
internet_search_response: InternetSearchResponseSummary,
|
||||
) -> Generator[Packet, None, list[DbSearchDoc]]:
|
||||
server_search_docs = internet_search_response_to_search_docs(
|
||||
internet_search_response
|
||||
)
|
||||
|
||||
with get_session_with_current_tenant() as db_session:
|
||||
reference_db_search_docs = [
|
||||
create_db_search_doc(server_search_doc=doc, db_session=db_session)
|
||||
for doc in server_search_docs
|
||||
]
|
||||
response_docs = [
|
||||
translate_db_search_doc_to_server_search_doc(db_search_doc)
|
||||
for db_search_doc in reference_db_search_docs
|
||||
]
|
||||
|
||||
yield Packet(
|
||||
ind=current_ind,
|
||||
obj=SearchToolDelta(
|
||||
documents=response_docs,
|
||||
),
|
||||
)
|
||||
|
||||
yield Packet(
|
||||
ind=current_ind,
|
||||
obj=SectionEnd(),
|
||||
)
|
||||
|
||||
return reference_db_search_docs
|
||||
|
||||
|
||||
def handle_image_generation_tool_response(
|
||||
current_ind: int,
|
||||
img_generation_responses: list[ImageGenerationResponse],
|
||||
) -> Generator[Packet, None, None]:
|
||||
|
||||
# Save files and get file IDs
|
||||
file_ids = save_files(
|
||||
urls=[img.url for img in img_generation_responses if img.url],
|
||||
base64_files=[
|
||||
img.image_data for img in img_generation_responses if img.image_data
|
||||
],
|
||||
)
|
||||
|
||||
yield Packet(
|
||||
ind=current_ind,
|
||||
obj=ImageGenerationToolDelta(
|
||||
images=[
|
||||
{
|
||||
"id": str(file_id),
|
||||
"url": "", # URL will be constructed by frontend
|
||||
"prompt": img.revised_prompt,
|
||||
}
|
||||
for file_id, img in zip(file_ids, img_generation_responses)
|
||||
]
|
||||
),
|
||||
)
|
||||
|
||||
# Emit ImageToolEnd packet with file information
|
||||
yield Packet(
|
||||
ind=current_ind,
|
||||
obj=SectionEnd(),
|
||||
)
|
||||
@@ -1,6 +1,5 @@
|
||||
import time
|
||||
import traceback
|
||||
from collections import defaultdict
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Generator
|
||||
from collections.abc import Iterator
|
||||
@@ -17,30 +16,26 @@ from onyx.chat.chat_utils import create_temporary_persona
|
||||
from onyx.chat.chat_utils import process_kg_commands
|
||||
from onyx.chat.models import AgenticMessageResponseIDInfo
|
||||
from onyx.chat.models import AgentMessageIDInfo
|
||||
from onyx.chat.models import AgentSearchPacket
|
||||
from onyx.chat.models import AllCitations
|
||||
from onyx.chat.models import AnswerPostInfo
|
||||
from onyx.chat.models import AnswerStyleConfig
|
||||
from onyx.chat.models import ChatOnyxBotResponse
|
||||
from onyx.chat.models import CitationConfig
|
||||
from onyx.chat.models import CitationInfo
|
||||
from onyx.chat.models import CustomToolResponse
|
||||
from onyx.chat.models import DocumentPruningConfig
|
||||
from onyx.chat.models import ExtendedToolResponse
|
||||
from onyx.chat.models import FileChatDisplay
|
||||
from onyx.chat.models import FinalUsedContextDocsResponse
|
||||
from onyx.chat.models import LLMRelevanceFilterResponse
|
||||
from onyx.chat.models import MessageResponseIDInfo
|
||||
from onyx.chat.models import MessageSpecificCitations
|
||||
from onyx.chat.models import OnyxAnswerPiece
|
||||
from onyx.chat.models import PromptConfig
|
||||
from onyx.chat.models import QADocsResponse
|
||||
from onyx.chat.models import RefinedAnswerImprovement
|
||||
from onyx.chat.models import StreamingError
|
||||
from onyx.chat.models import StreamStopInfo
|
||||
from onyx.chat.models import StreamStopReason
|
||||
from onyx.chat.models import SubQuestionKey
|
||||
from onyx.chat.models import UserKnowledgeFilePacket
|
||||
from onyx.chat.packet_proccessing.process_streamed_packets import ChatPacket
|
||||
from onyx.chat.packet_proccessing.process_streamed_packets import (
|
||||
process_streamed_packets,
|
||||
)
|
||||
from onyx.chat.prompt_builder.answer_prompt_builder import AnswerPromptBuilder
|
||||
from onyx.chat.prompt_builder.answer_prompt_builder import default_build_system_message
|
||||
from onyx.chat.prompt_builder.answer_prompt_builder import default_build_user_message
|
||||
@@ -55,21 +50,13 @@ from onyx.configs.constants import MessageType
|
||||
from onyx.configs.constants import MilestoneRecordType
|
||||
from onyx.configs.constants import NO_AUTH_USER_ID
|
||||
from onyx.context.search.enums import OptionalSearchSetting
|
||||
from onyx.context.search.enums import QueryFlow
|
||||
from onyx.context.search.enums import SearchType
|
||||
from onyx.context.search.models import InferenceSection
|
||||
from onyx.context.search.models import RetrievalDetails
|
||||
from onyx.context.search.retrieval.search_runner import (
|
||||
inference_sections_from_ids,
|
||||
)
|
||||
from onyx.context.search.utils import chunks_or_sections_to_search_docs
|
||||
from onyx.context.search.utils import dedupe_documents
|
||||
from onyx.context.search.utils import drop_llm_indices
|
||||
from onyx.context.search.utils import relevant_sections_to_indices
|
||||
from onyx.db.chat import attach_files_to_chat_message
|
||||
from onyx.db.chat import create_db_search_doc
|
||||
from onyx.db.chat import create_new_chat_message
|
||||
from onyx.db.chat import create_search_doc_from_user_file
|
||||
from onyx.db.chat import get_chat_message
|
||||
from onyx.db.chat import get_chat_session_by_id
|
||||
from onyx.db.chat import get_db_search_doc_by_id
|
||||
@@ -77,7 +64,6 @@ from onyx.db.chat import get_doc_query_identifiers_from_model
|
||||
from onyx.db.chat import get_or_create_root_message
|
||||
from onyx.db.chat import reserve_message_id
|
||||
from onyx.db.chat import translate_db_message_to_chat_message_detail
|
||||
from onyx.db.chat import translate_db_search_doc_to_server_search_doc
|
||||
from onyx.db.chat import update_chat_session_updated_at_timestamp
|
||||
from onyx.db.engine.sql_engine import get_session_with_current_tenant
|
||||
from onyx.db.milestone import check_multi_assistant_milestone
|
||||
@@ -88,15 +74,12 @@ from onyx.db.models import Persona
|
||||
from onyx.db.models import SearchDoc as DbSearchDoc
|
||||
from onyx.db.models import ToolCall
|
||||
from onyx.db.models import User
|
||||
from onyx.db.models import UserFile
|
||||
from onyx.db.persona import get_persona_by_id
|
||||
from onyx.db.search_settings import get_current_search_settings
|
||||
from onyx.document_index.factory import get_default_document_index
|
||||
from onyx.file_store.models import ChatFileType
|
||||
from onyx.file_store.models import FileDescriptor
|
||||
from onyx.file_store.models import InMemoryChatFile
|
||||
from onyx.file_store.utils import load_all_chat_files
|
||||
from onyx.file_store.utils import save_files
|
||||
from onyx.kg.models import KGException
|
||||
from onyx.llm.exceptions import GenAIDisabledException
|
||||
from onyx.llm.factory import get_llms_for_persona
|
||||
@@ -110,47 +93,16 @@ from onyx.server.query_and_chat.models import CreateChatMessageRequest
|
||||
from onyx.server.utils import get_json_line
|
||||
from onyx.tools.force import ForceUseTool
|
||||
from onyx.tools.models import SearchToolOverrideKwargs
|
||||
from onyx.tools.models import ToolResponse
|
||||
from onyx.tools.tool import Tool
|
||||
from onyx.tools.tool_constructor import construct_tools
|
||||
from onyx.tools.tool_constructor import CustomToolConfig
|
||||
from onyx.tools.tool_constructor import ImageGenerationToolConfig
|
||||
from onyx.tools.tool_constructor import InternetSearchToolConfig
|
||||
from onyx.tools.tool_constructor import SearchToolConfig
|
||||
from onyx.tools.tool_implementations.custom.custom_tool import (
|
||||
CUSTOM_TOOL_RESPONSE_ID,
|
||||
)
|
||||
from onyx.tools.tool_implementations.custom.custom_tool import CustomToolCallSummary
|
||||
from onyx.tools.tool_implementations.images.image_generation_tool import (
|
||||
IMAGE_GENERATION_RESPONSE_ID,
|
||||
)
|
||||
from onyx.tools.tool_implementations.images.image_generation_tool import (
|
||||
ImageGenerationResponse,
|
||||
)
|
||||
from onyx.tools.tool_implementations.internet_search.internet_search_tool import (
|
||||
INTERNET_SEARCH_RESPONSE_SUMMARY_ID,
|
||||
)
|
||||
from onyx.tools.tool_implementations.internet_search.internet_search_tool import (
|
||||
InternetSearchTool,
|
||||
)
|
||||
from onyx.tools.tool_implementations.internet_search.models import (
|
||||
InternetSearchResponseSummary,
|
||||
)
|
||||
from onyx.tools.tool_implementations.internet_search.utils import (
|
||||
internet_search_response_to_search_docs,
|
||||
)
|
||||
from onyx.tools.tool_implementations.search.search_tool import (
|
||||
FINAL_CONTEXT_DOCUMENTS_ID,
|
||||
)
|
||||
from onyx.tools.tool_implementations.search.search_tool import (
|
||||
SEARCH_RESPONSE_SUMMARY_ID,
|
||||
)
|
||||
from onyx.tools.tool_implementations.search.search_tool import SearchResponseSummary
|
||||
from onyx.tools.tool_implementations.search.search_tool import SearchTool
|
||||
from onyx.tools.tool_implementations.search.search_tool import (
|
||||
SECTION_RELEVANCE_LIST_ID,
|
||||
)
|
||||
from onyx.tools.tool_runner import ToolCallFinalResult
|
||||
from onyx.utils.logger import setup_logger
|
||||
from onyx.utils.long_term_log import LongTermLogger
|
||||
from onyx.utils.telemetry import mt_cloud_telemetry
|
||||
@@ -201,113 +153,6 @@ def _translate_citations(
|
||||
return MessageSpecificCitations(citation_map=citation_to_saved_doc_id_map)
|
||||
|
||||
|
||||
def _handle_search_tool_response_summary(
|
||||
packet: ToolResponse,
|
||||
db_session: Session,
|
||||
selected_search_docs: list[DbSearchDoc] | None,
|
||||
dedupe_docs: bool = False,
|
||||
user_files: list[UserFile] | None = None,
|
||||
loaded_user_files: list[InMemoryChatFile] | None = None,
|
||||
) -> tuple[QADocsResponse, list[DbSearchDoc], list[int] | None]:
|
||||
response_summary = cast(SearchResponseSummary, packet.response)
|
||||
|
||||
is_extended = isinstance(packet, ExtendedToolResponse)
|
||||
dropped_inds = None
|
||||
|
||||
if not selected_search_docs:
|
||||
top_docs = chunks_or_sections_to_search_docs(response_summary.top_sections)
|
||||
|
||||
deduped_docs = top_docs
|
||||
if (
|
||||
dedupe_docs and not is_extended
|
||||
): # Extended tool responses are already deduped
|
||||
deduped_docs, dropped_inds = dedupe_documents(top_docs)
|
||||
|
||||
reference_db_search_docs = [
|
||||
create_db_search_doc(server_search_doc=doc, db_session=db_session)
|
||||
for doc in deduped_docs
|
||||
]
|
||||
|
||||
else:
|
||||
reference_db_search_docs = selected_search_docs
|
||||
|
||||
doc_ids = {doc.id for doc in reference_db_search_docs}
|
||||
if user_files is not None and loaded_user_files is not None:
|
||||
for user_file in user_files:
|
||||
if user_file.id in doc_ids:
|
||||
continue
|
||||
|
||||
associated_chat_file = next(
|
||||
(
|
||||
file
|
||||
for file in loaded_user_files
|
||||
if file.file_id == str(user_file.file_id)
|
||||
),
|
||||
None,
|
||||
)
|
||||
# Use create_search_doc_from_user_file to properly add the document to the database
|
||||
if associated_chat_file is not None:
|
||||
db_doc = create_search_doc_from_user_file(
|
||||
user_file, associated_chat_file, db_session
|
||||
)
|
||||
reference_db_search_docs.append(db_doc)
|
||||
|
||||
response_docs = [
|
||||
translate_db_search_doc_to_server_search_doc(db_search_doc)
|
||||
for db_search_doc in reference_db_search_docs
|
||||
]
|
||||
|
||||
level, question_num = None, None
|
||||
if isinstance(packet, ExtendedToolResponse):
|
||||
level, question_num = packet.level, packet.level_question_num
|
||||
return (
|
||||
QADocsResponse(
|
||||
rephrased_query=response_summary.rephrased_query,
|
||||
top_documents=response_docs,
|
||||
predicted_flow=response_summary.predicted_flow,
|
||||
predicted_search=response_summary.predicted_search,
|
||||
applied_source_filters=response_summary.final_filters.source_type,
|
||||
applied_time_cutoff=response_summary.final_filters.time_cutoff,
|
||||
recency_bias_multiplier=response_summary.recency_bias_multiplier,
|
||||
level=level,
|
||||
level_question_num=question_num,
|
||||
),
|
||||
reference_db_search_docs,
|
||||
dropped_inds,
|
||||
)
|
||||
|
||||
|
||||
def _handle_internet_search_tool_response_summary(
|
||||
packet: ToolResponse,
|
||||
db_session: Session,
|
||||
) -> tuple[QADocsResponse, list[DbSearchDoc]]:
|
||||
internet_search_response = cast(InternetSearchResponseSummary, packet.response)
|
||||
server_search_docs = internet_search_response_to_search_docs(
|
||||
internet_search_response
|
||||
)
|
||||
|
||||
reference_db_search_docs = [
|
||||
create_db_search_doc(server_search_doc=doc, db_session=db_session)
|
||||
for doc in server_search_docs
|
||||
]
|
||||
response_docs = [
|
||||
translate_db_search_doc_to_server_search_doc(db_search_doc)
|
||||
for db_search_doc in reference_db_search_docs
|
||||
]
|
||||
return (
|
||||
QADocsResponse(
|
||||
rephrased_query=internet_search_response.query,
|
||||
top_documents=response_docs,
|
||||
predicted_flow=QueryFlow.QUESTION_ANSWER,
|
||||
predicted_search=SearchType.INTERNET,
|
||||
applied_source_filters=[],
|
||||
applied_time_cutoff=None,
|
||||
recency_bias_multiplier=1.0,
|
||||
),
|
||||
reference_db_search_docs,
|
||||
)
|
||||
|
||||
|
||||
def _get_force_search_settings(
|
||||
new_msg_req: CreateChatMessageRequest,
|
||||
tools: list[Tool],
|
||||
@@ -392,136 +237,9 @@ def _get_persona_for_chat_session(
|
||||
return persona
|
||||
|
||||
|
||||
ChatPacket = (
|
||||
StreamingError
|
||||
| QADocsResponse
|
||||
| LLMRelevanceFilterResponse
|
||||
| FinalUsedContextDocsResponse
|
||||
| ChatMessageDetail
|
||||
| OnyxAnswerPiece
|
||||
| AllCitations
|
||||
| CitationInfo
|
||||
| FileChatDisplay
|
||||
| CustomToolResponse
|
||||
| MessageSpecificCitations
|
||||
| MessageResponseIDInfo
|
||||
| AgenticMessageResponseIDInfo
|
||||
| StreamStopInfo
|
||||
| AgentSearchPacket
|
||||
| UserKnowledgeFilePacket
|
||||
)
|
||||
ChatPacketStream = Iterator[ChatPacket]
|
||||
|
||||
|
||||
def _process_tool_response(
|
||||
packet: ToolResponse,
|
||||
db_session: Session,
|
||||
selected_db_search_docs: list[DbSearchDoc] | None,
|
||||
info_by_subq: dict[SubQuestionKey, AnswerPostInfo],
|
||||
retrieval_options: RetrievalDetails | None,
|
||||
user_file_files: list[UserFile] | None,
|
||||
user_files: list[InMemoryChatFile] | None,
|
||||
) -> Generator[ChatPacket, None, dict[SubQuestionKey, AnswerPostInfo]]:
|
||||
level, level_question_num = (
|
||||
(packet.level, packet.level_question_num)
|
||||
if isinstance(packet, ExtendedToolResponse)
|
||||
else BASIC_KEY
|
||||
)
|
||||
|
||||
assert level is not None
|
||||
assert level_question_num is not None
|
||||
info = info_by_subq[SubQuestionKey(level=level, question_num=level_question_num)]
|
||||
|
||||
# TODO: don't need to dedupe here when we do it in agent flow
|
||||
if packet.id == SEARCH_RESPONSE_SUMMARY_ID:
|
||||
(
|
||||
info.qa_docs_response,
|
||||
info.reference_db_search_docs,
|
||||
info.dropped_indices,
|
||||
) = _handle_search_tool_response_summary(
|
||||
packet=packet,
|
||||
db_session=db_session,
|
||||
selected_search_docs=selected_db_search_docs,
|
||||
# Deduping happens at the last step to avoid harming quality by dropping content early on
|
||||
dedupe_docs=bool(retrieval_options and retrieval_options.dedupe_docs),
|
||||
user_files=[],
|
||||
loaded_user_files=[],
|
||||
)
|
||||
|
||||
yield info.qa_docs_response
|
||||
elif packet.id == SECTION_RELEVANCE_LIST_ID:
|
||||
relevance_sections = packet.response
|
||||
|
||||
if info.reference_db_search_docs is None:
|
||||
logger.warning("No reference docs found for relevance filtering")
|
||||
return info_by_subq
|
||||
|
||||
llm_indices = relevant_sections_to_indices(
|
||||
relevance_sections=relevance_sections,
|
||||
items=[
|
||||
translate_db_search_doc_to_server_search_doc(doc)
|
||||
for doc in info.reference_db_search_docs
|
||||
],
|
||||
)
|
||||
|
||||
if info.dropped_indices:
|
||||
llm_indices = drop_llm_indices(
|
||||
llm_indices=llm_indices,
|
||||
search_docs=info.reference_db_search_docs,
|
||||
dropped_indices=info.dropped_indices,
|
||||
)
|
||||
|
||||
yield LLMRelevanceFilterResponse(llm_selected_doc_indices=llm_indices)
|
||||
elif packet.id == FINAL_CONTEXT_DOCUMENTS_ID:
|
||||
yield FinalUsedContextDocsResponse(final_context_docs=packet.response)
|
||||
|
||||
elif packet.id == IMAGE_GENERATION_RESPONSE_ID:
|
||||
img_generation_response = cast(list[ImageGenerationResponse], packet.response)
|
||||
|
||||
file_ids = save_files(
|
||||
urls=[img.url for img in img_generation_response if img.url],
|
||||
base64_files=[
|
||||
img.image_data for img in img_generation_response if img.image_data
|
||||
],
|
||||
)
|
||||
info.ai_message_files.extend(
|
||||
[
|
||||
FileDescriptor(id=str(file_id), type=ChatFileType.IMAGE)
|
||||
for file_id in file_ids
|
||||
]
|
||||
)
|
||||
yield FileChatDisplay(file_ids=[str(file_id) for file_id in file_ids])
|
||||
elif packet.id == INTERNET_SEARCH_RESPONSE_SUMMARY_ID:
|
||||
(
|
||||
info.qa_docs_response,
|
||||
info.reference_db_search_docs,
|
||||
) = _handle_internet_search_tool_response_summary(
|
||||
packet=packet,
|
||||
db_session=db_session,
|
||||
)
|
||||
yield info.qa_docs_response
|
||||
elif packet.id == CUSTOM_TOOL_RESPONSE_ID:
|
||||
custom_tool_response = cast(CustomToolCallSummary, packet.response)
|
||||
response_type = custom_tool_response.response_type
|
||||
if response_type in COMMON_TOOL_RESPONSE_TYPES:
|
||||
file_ids = custom_tool_response.tool_result.file_ids
|
||||
file_type = COMMON_TOOL_RESPONSE_TYPES[response_type]
|
||||
info.ai_message_files.extend(
|
||||
[
|
||||
FileDescriptor(id=str(file_id), type=file_type)
|
||||
for file_id in file_ids
|
||||
]
|
||||
)
|
||||
yield FileChatDisplay(file_ids=[str(file_id) for file_id in file_ids])
|
||||
else:
|
||||
yield CustomToolResponse(
|
||||
response=custom_tool_response.tool_result,
|
||||
tool_name=custom_tool_response.tool_name,
|
||||
)
|
||||
|
||||
return info_by_subq
|
||||
|
||||
|
||||
def stream_chat_message_objects(
|
||||
new_msg_req: CreateChatMessageRequest,
|
||||
user: User | None,
|
||||
@@ -561,6 +279,7 @@ def stream_chat_message_objects(
|
||||
new_msg_req.chunks_below = 0
|
||||
|
||||
llm: LLM
|
||||
answer: Answer
|
||||
|
||||
try:
|
||||
# Move these variables inside the try block
|
||||
@@ -983,7 +702,6 @@ def stream_chat_message_objects(
|
||||
)
|
||||
|
||||
# LLM prompt building, response capturing, etc.
|
||||
|
||||
answer = Answer(
|
||||
prompt_builder=prompt_builder,
|
||||
is_connected=is_connected,
|
||||
@@ -1013,41 +731,14 @@ def stream_chat_message_objects(
|
||||
skip_gen_ai_answer_generation=new_msg_req.skip_gen_ai_answer_generation,
|
||||
)
|
||||
|
||||
info_by_subq: dict[SubQuestionKey, AnswerPostInfo] = defaultdict(
|
||||
lambda: AnswerPostInfo(ai_message_files=[])
|
||||
# Process streamed packets using the new packet processing module
|
||||
info_by_subq = yield from process_streamed_packets(
|
||||
answer_processed_output=answer.processed_streamed_output,
|
||||
reserved_message_id=reserved_message_id,
|
||||
selected_db_search_docs=selected_db_search_docs,
|
||||
retrieval_options=retrieval_options,
|
||||
db_session=db_session,
|
||||
)
|
||||
refined_answer_improvement = True
|
||||
for packet in answer.processed_streamed_output:
|
||||
if isinstance(packet, ToolResponse):
|
||||
info_by_subq = yield from _process_tool_response(
|
||||
packet=packet,
|
||||
db_session=db_session,
|
||||
selected_db_search_docs=selected_db_search_docs,
|
||||
info_by_subq=info_by_subq,
|
||||
retrieval_options=retrieval_options,
|
||||
user_file_files=user_file_models,
|
||||
user_files=in_memory_user_files,
|
||||
)
|
||||
|
||||
elif isinstance(packet, StreamStopInfo):
|
||||
if packet.stop_reason == StreamStopReason.FINISHED:
|
||||
yield packet
|
||||
elif isinstance(packet, RefinedAnswerImprovement):
|
||||
refined_answer_improvement = packet.refined_answer_improvement
|
||||
yield packet
|
||||
else:
|
||||
if isinstance(packet, ToolCallFinalResult):
|
||||
level, level_question_num = (
|
||||
(packet.level, packet.level_question_num)
|
||||
if packet.level is not None
|
||||
and packet.level_question_num is not None
|
||||
else BASIC_KEY
|
||||
)
|
||||
info = info_by_subq[
|
||||
SubQuestionKey(level=level, question_num=level_question_num)
|
||||
]
|
||||
info.tool_result = packet
|
||||
yield cast(ChatPacket, packet)
|
||||
|
||||
except ValueError as e:
|
||||
logger.exception("Failed to process chat message.")
|
||||
@@ -1091,7 +782,6 @@ def stream_chat_message_objects(
|
||||
llm_tokenizer_encode_func=llm_tokenizer_encode_func,
|
||||
db_session=db_session,
|
||||
chat_session_id=chat_session_id,
|
||||
refined_answer_improvement=refined_answer_improvement,
|
||||
)
|
||||
|
||||
|
||||
@@ -1103,7 +793,6 @@ def _post_llm_answer_processing(
|
||||
llm_tokenizer_encode_func: Callable[[str], list[int]],
|
||||
db_session: Session,
|
||||
chat_session_id: UUID,
|
||||
refined_answer_improvement: bool | None,
|
||||
) -> Generator[ChatPacket, None, None]:
|
||||
"""
|
||||
Stores messages in the db and yields some final packets to the frontend
|
||||
@@ -1115,20 +804,6 @@ def _post_llm_answer_processing(
|
||||
for tool in tool_list:
|
||||
tool_name_to_tool_id[tool.name] = tool_id
|
||||
|
||||
subq_citations = answer.citations_by_subquestion()
|
||||
for subq_key in subq_citations:
|
||||
info = info_by_subq[subq_key]
|
||||
logger.debug("Post-LLM answer processing")
|
||||
if info.reference_db_search_docs:
|
||||
info.message_specific_citations = _translate_citations(
|
||||
citations_list=subq_citations[subq_key],
|
||||
db_docs=info.reference_db_search_docs,
|
||||
)
|
||||
|
||||
# TODO: AllCitations should contain subq info?
|
||||
if not answer.is_cancelled():
|
||||
yield AllCitations(citations=subq_citations[subq_key])
|
||||
|
||||
# Saving Gen AI answer and responding with message info
|
||||
|
||||
basic_key = SubQuestionKey(level=BASIC_KEY[0], question_num=BASIC_KEY[1])
|
||||
@@ -1144,9 +819,7 @@ def _post_llm_answer_processing(
|
||||
)
|
||||
gen_ai_response_message = partial_response(
|
||||
message=answer.llm_answer,
|
||||
rephrased_query=(
|
||||
info.qa_docs_response.rephrased_query if info.qa_docs_response else None
|
||||
),
|
||||
rephrased_query=info.rephrased_query,
|
||||
reference_docs=info.reference_db_search_docs,
|
||||
files=info.ai_message_files,
|
||||
token_count=len(llm_tokenizer_encode_func(answer.llm_answer)),
|
||||
@@ -1205,7 +878,6 @@ def _post_llm_answer_processing(
|
||||
else None
|
||||
),
|
||||
error=ERROR_TYPE_CANCELLED if answer.is_cancelled() else None,
|
||||
refined_answer_improvement=refined_answer_improvement,
|
||||
is_agentic=True,
|
||||
)
|
||||
agentic_message_ids.append(
|
||||
|
||||
@@ -645,7 +645,6 @@ def create_new_chat_message(
|
||||
commit: bool = True,
|
||||
reserved_message_id: int | None = None,
|
||||
overridden_model: str | None = None,
|
||||
refined_answer_improvement: bool | None = None,
|
||||
is_agentic: bool = False,
|
||||
) -> ChatMessage:
|
||||
if reserved_message_id is not None:
|
||||
@@ -667,7 +666,6 @@ def create_new_chat_message(
|
||||
existing_message.error = error
|
||||
existing_message.alternate_assistant_id = alternate_assistant_id
|
||||
existing_message.overridden_model = overridden_model
|
||||
existing_message.refined_answer_improvement = refined_answer_improvement
|
||||
existing_message.is_agentic = is_agentic
|
||||
new_chat_message = existing_message
|
||||
else:
|
||||
@@ -687,7 +685,6 @@ def create_new_chat_message(
|
||||
error=error,
|
||||
alternate_assistant_id=alternate_assistant_id,
|
||||
overridden_model=overridden_model,
|
||||
refined_answer_improvement=refined_answer_improvement,
|
||||
is_agentic=is_agentic,
|
||||
)
|
||||
db_session.add(new_chat_message)
|
||||
@@ -1061,11 +1058,6 @@ def translate_db_message_to_chat_message_detail(
|
||||
),
|
||||
alternate_assistant_id=chat_message.alternate_assistant_id,
|
||||
overridden_model=chat_message.overridden_model,
|
||||
sub_questions=translate_db_sub_questions_to_server_objects(
|
||||
chat_message.sub_questions
|
||||
),
|
||||
refined_answer_improvement=chat_message.refined_answer_improvement,
|
||||
is_agentic=chat_message.is_agentic,
|
||||
error=chat_message.error,
|
||||
)
|
||||
|
||||
|
||||
@@ -96,6 +96,9 @@ from onyx.server.query_and_chat.models import RenameChatSessionResponse
|
||||
from onyx.server.query_and_chat.models import SearchFeedbackRequest
|
||||
from onyx.server.query_and_chat.models import UpdateChatSessionTemperatureRequest
|
||||
from onyx.server.query_and_chat.models import UpdateChatSessionThreadRequest
|
||||
from onyx.server.query_and_chat.streaming_utils import (
|
||||
create_simplified_packets_for_session,
|
||||
)
|
||||
from onyx.server.query_and_chat.token_limit import check_token_rate_limits
|
||||
from onyx.utils.file_types import UploadMimeTypes
|
||||
from onyx.utils.headers import get_custom_tool_additional_request_headers
|
||||
@@ -237,6 +240,14 @@ def get_chat_session(
|
||||
prefetch_tool_calls=True,
|
||||
)
|
||||
|
||||
# Convert messages to ChatMessageDetail format
|
||||
chat_message_details = [
|
||||
translate_db_message_to_chat_message_detail(msg) for msg in session_messages
|
||||
]
|
||||
|
||||
# Create simplified packets for the session
|
||||
simplified_packets = create_simplified_packets_for_session(chat_message_details)
|
||||
|
||||
return ChatSessionDetailResponse(
|
||||
chat_session_id=session_id,
|
||||
description=chat_session.description,
|
||||
@@ -249,13 +260,13 @@ def get_chat_session(
|
||||
chat_session.persona.icon_shape if chat_session.persona else None
|
||||
),
|
||||
current_alternate_model=chat_session.current_alternate_model,
|
||||
messages=[
|
||||
translate_db_message_to_chat_message_detail(msg) for msg in session_messages
|
||||
],
|
||||
messages=chat_message_details,
|
||||
time_created=chat_session.time_created,
|
||||
shared_status=chat_session.shared_status,
|
||||
current_temperature_override=chat_session.temperature_override,
|
||||
deleted=chat_session.deleted,
|
||||
# specifically for the Onyx Chat UI
|
||||
packets=simplified_packets,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ from onyx.db.enums import ChatSessionSharedStatus
|
||||
from onyx.file_store.models import FileDescriptor
|
||||
from onyx.llm.override_models import LLMOverride
|
||||
from onyx.llm.override_models import PromptOverride
|
||||
from onyx.server.query_and_chat.streaming_models import Packet
|
||||
from onyx.tools.models import ToolCallFinalResult
|
||||
|
||||
|
||||
@@ -240,11 +241,8 @@ class ChatMessageDetail(BaseModel):
|
||||
chat_session_id: UUID | None = None
|
||||
# Dict mapping citation number to db_doc_id
|
||||
citations: dict[int, int] | None = None
|
||||
sub_questions: list[SubQuestionDetail] | None = None
|
||||
files: list[FileDescriptor]
|
||||
tool_call: ToolCallFinalResult | None
|
||||
refined_answer_improvement: bool | None = None
|
||||
is_agentic: bool | None = None
|
||||
error: str | None = None
|
||||
|
||||
def model_dump(self, *args: list, **kwargs: dict[str, Any]) -> dict[str, Any]: # type: ignore
|
||||
@@ -274,6 +272,8 @@ class ChatSessionDetailResponse(BaseModel):
|
||||
current_temperature_override: float | None
|
||||
deleted: bool = False
|
||||
|
||||
packets: list[list[Packet]]
|
||||
|
||||
|
||||
# This one is not used anymore
|
||||
class QueryValidationResponse(BaseModel):
|
||||
|
||||
136
backend/onyx/server/query_and_chat/streaming_models.py
Normal file
136
backend/onyx/server/query_and_chat/streaming_models.py
Normal file
@@ -0,0 +1,136 @@
|
||||
from typing import Annotated
|
||||
from typing import Literal
|
||||
from typing import Union
|
||||
|
||||
from pydantic import BaseModel
|
||||
from pydantic import Field
|
||||
|
||||
from onyx.chat.models import CitationInfo
|
||||
from onyx.context.search.models import SavedSearchDoc
|
||||
|
||||
|
||||
class BaseObj(BaseModel):
|
||||
type: str = ""
|
||||
|
||||
|
||||
"""Basic Message Packets"""
|
||||
|
||||
|
||||
class MessageStart(BaseObj):
|
||||
id: str
|
||||
type: Literal["message_start"] = "message_start"
|
||||
content: str
|
||||
|
||||
|
||||
class MessageDelta(BaseObj):
|
||||
content: str
|
||||
type: Literal["message_delta"] = "message_delta"
|
||||
|
||||
|
||||
"""Control Packets"""
|
||||
|
||||
|
||||
class OverallStop(BaseObj):
|
||||
type: Literal["stop"] = "stop"
|
||||
|
||||
|
||||
class SectionEnd(BaseObj):
|
||||
type: Literal["section_end"] = "section_end"
|
||||
|
||||
|
||||
"""Tool Packets"""
|
||||
|
||||
|
||||
class SearchToolStart(BaseObj):
|
||||
type: Literal["internal_search_tool_start"] = "internal_search_tool_start"
|
||||
|
||||
is_internet_search: bool = False
|
||||
|
||||
|
||||
class SearchToolDelta(BaseObj):
|
||||
type: Literal["internal_search_tool_delta"] = "internal_search_tool_delta"
|
||||
|
||||
queries: list[str] | None = None
|
||||
documents: list[SavedSearchDoc] | None = None
|
||||
|
||||
|
||||
class ImageGenerationToolStart(BaseObj):
|
||||
type: Literal["image_generation_tool_start"] = "image_generation_tool_start"
|
||||
|
||||
|
||||
class ImageGenerationToolDelta(BaseObj):
|
||||
type: Literal["image_generation_tool_delta"] = "image_generation_tool_delta"
|
||||
|
||||
images: list[dict[str, str]] | None = None
|
||||
|
||||
|
||||
class CustomToolStart(BaseObj):
|
||||
type: Literal["custom_tool_start"] = "custom_tool_start"
|
||||
|
||||
tool_name: str
|
||||
|
||||
|
||||
class CustomToolDelta(BaseObj):
|
||||
type: Literal["custom_tool_delta"] = "custom_tool_delta"
|
||||
|
||||
tool_name: str
|
||||
response_type: str
|
||||
# For non-file responses
|
||||
data: dict | list | str | int | float | bool | None = None
|
||||
# For file-based responses like image/csv
|
||||
file_ids: list[str] | None = None
|
||||
|
||||
|
||||
"""Reasoning Packets"""
|
||||
|
||||
|
||||
class ReasoningStart(BaseObj):
|
||||
type: Literal["reasoning_start"] = "reasoning_start"
|
||||
|
||||
|
||||
class ReasoningDelta(BaseObj):
|
||||
type: Literal["reasoning_delta"] = "reasoning_delta"
|
||||
|
||||
reasoning: str
|
||||
|
||||
|
||||
"""Citation Packets"""
|
||||
|
||||
|
||||
class CitationStart(BaseObj):
|
||||
type: Literal["citation_start"] = "citation_start"
|
||||
|
||||
|
||||
class CitationDelta(BaseObj):
|
||||
type: Literal["citation_delta"] = "citation_delta"
|
||||
|
||||
citations: list[CitationInfo] | None = None
|
||||
|
||||
|
||||
"""Packet"""
|
||||
|
||||
# Discriminated union of all possible packet object types
|
||||
PacketObj = Annotated[
|
||||
Union[
|
||||
MessageStart,
|
||||
MessageDelta,
|
||||
OverallStop,
|
||||
SectionEnd,
|
||||
SearchToolStart,
|
||||
SearchToolDelta,
|
||||
ImageGenerationToolStart,
|
||||
ImageGenerationToolDelta,
|
||||
CustomToolStart,
|
||||
CustomToolDelta,
|
||||
ReasoningStart,
|
||||
ReasoningDelta,
|
||||
CitationStart,
|
||||
CitationDelta,
|
||||
],
|
||||
Field(discriminator="type"),
|
||||
]
|
||||
|
||||
|
||||
class Packet(BaseModel):
|
||||
ind: int
|
||||
obj: PacketObj
|
||||
313
backend/onyx/server/query_and_chat/streaming_utils.py
Normal file
313
backend/onyx/server/query_and_chat/streaming_utils.py
Normal file
@@ -0,0 +1,313 @@
|
||||
from onyx.chat.models import CitationInfo
|
||||
from onyx.configs.constants import MessageType
|
||||
from onyx.file_store.models import ChatFileType
|
||||
from onyx.server.query_and_chat.models import ChatMessageDetail
|
||||
from onyx.server.query_and_chat.streaming_models import CitationDelta
|
||||
from onyx.server.query_and_chat.streaming_models import CitationStart
|
||||
from onyx.server.query_and_chat.streaming_models import CustomToolDelta
|
||||
from onyx.server.query_and_chat.streaming_models import CustomToolStart
|
||||
from onyx.server.query_and_chat.streaming_models import ImageGenerationToolDelta
|
||||
from onyx.server.query_and_chat.streaming_models import ImageGenerationToolStart
|
||||
from onyx.server.query_and_chat.streaming_models import MessageDelta
|
||||
from onyx.server.query_and_chat.streaming_models import MessageStart
|
||||
from onyx.server.query_and_chat.streaming_models import OverallStop
|
||||
from onyx.server.query_and_chat.streaming_models import Packet
|
||||
from onyx.server.query_and_chat.streaming_models import SearchToolDelta
|
||||
from onyx.server.query_and_chat.streaming_models import SearchToolStart
|
||||
from onyx.server.query_and_chat.streaming_models import SectionEnd
|
||||
|
||||
|
||||
def create_simplified_packets_for_message(
|
||||
message: ChatMessageDetail, packet_index_start: int = 0
|
||||
) -> list[Packet]:
|
||||
"""
|
||||
Convert a ChatMessageDetail into simplified streaming packets that represent
|
||||
what would have been sent during the original streaming response.
|
||||
|
||||
Args:
|
||||
message: The chat message to convert to packets
|
||||
packet_index_start: Starting index for packet numbering
|
||||
|
||||
Returns:
|
||||
List of simplified packets representing the message
|
||||
"""
|
||||
packets: list[Packet] = []
|
||||
current_index = packet_index_start
|
||||
|
||||
# Only create packets for assistant messages
|
||||
if message.message_type != MessageType.ASSISTANT:
|
||||
return packets
|
||||
|
||||
# Handle all tool-related packets in one unified block
|
||||
# Check for tool calls first, then fall back to inferred tools from context/files
|
||||
if message.tool_call:
|
||||
tool_call = message.tool_call
|
||||
|
||||
# Handle different tool types based on tool name
|
||||
if tool_call.tool_name == "run_search":
|
||||
# Handle search tools - create search tool packets
|
||||
# Use context docs if available, otherwise use tool result
|
||||
if message.context_docs and message.context_docs.top_documents:
|
||||
search_docs = message.context_docs.top_documents
|
||||
|
||||
# Start search tool
|
||||
packets.append(
|
||||
Packet(
|
||||
ind=current_index,
|
||||
obj=SearchToolStart(),
|
||||
)
|
||||
)
|
||||
|
||||
# Include queries and documents in the delta
|
||||
if message.rephrased_query and message.rephrased_query.strip():
|
||||
queries = [str(message.rephrased_query)]
|
||||
else:
|
||||
queries = [message.message]
|
||||
|
||||
packets.append(
|
||||
Packet(
|
||||
ind=current_index,
|
||||
obj=SearchToolDelta(
|
||||
queries=queries,
|
||||
documents=search_docs,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
# End search tool
|
||||
packets.append(
|
||||
Packet(
|
||||
ind=current_index,
|
||||
obj=SectionEnd(),
|
||||
)
|
||||
)
|
||||
current_index += 1
|
||||
|
||||
elif tool_call.tool_name == "run_image_generation":
|
||||
# Handle image generation tools - create image generation packets
|
||||
# Use files if available, otherwise create from tool result
|
||||
if message.files:
|
||||
image_files = [
|
||||
f for f in message.files if f["type"] == ChatFileType.IMAGE
|
||||
]
|
||||
if image_files:
|
||||
# Start image tool
|
||||
image_tool_start = ImageGenerationToolStart()
|
||||
packets.append(Packet(ind=current_index, obj=image_tool_start))
|
||||
|
||||
# Send images via tool delta
|
||||
images = []
|
||||
for file in image_files:
|
||||
images.append(
|
||||
{
|
||||
"id": file["id"],
|
||||
"url": "", # URL will be constructed by frontend
|
||||
"prompt": file.get("name") or "Generated image",
|
||||
}
|
||||
)
|
||||
|
||||
image_tool_delta = ImageGenerationToolDelta(images=images)
|
||||
packets.append(Packet(ind=current_index, obj=image_tool_delta))
|
||||
|
||||
# End image tool
|
||||
image_tool_end = SectionEnd()
|
||||
packets.append(Packet(ind=current_index, obj=image_tool_end))
|
||||
current_index += 1
|
||||
|
||||
elif tool_call.tool_name == "run_internet_search":
|
||||
# Internet search tools return document data, but should be treated as custom tools
|
||||
# for packet purposes since they have a different data structure
|
||||
# Start custom tool
|
||||
custom_tool_start = CustomToolStart(tool_name=tool_call.tool_name)
|
||||
packets.append(Packet(ind=current_index, obj=custom_tool_start))
|
||||
|
||||
# Send internet search results as custom tool data
|
||||
custom_tool_delta = CustomToolDelta(
|
||||
tool_name=tool_call.tool_name,
|
||||
response_type="json",
|
||||
data=tool_call.tool_result,
|
||||
file_ids=None,
|
||||
)
|
||||
packets.append(Packet(ind=current_index, obj=custom_tool_delta))
|
||||
|
||||
# End custom tool
|
||||
custom_tool_end = SectionEnd()
|
||||
packets.append(Packet(ind=current_index, obj=custom_tool_end))
|
||||
current_index += 1
|
||||
|
||||
else:
|
||||
# Handle custom tools and any other tool types
|
||||
# Start custom tool
|
||||
custom_tool_start = CustomToolStart(tool_name=tool_call.tool_name)
|
||||
packets.append(Packet(ind=current_index, obj=custom_tool_start))
|
||||
|
||||
# Determine response type and data from tool result
|
||||
response_type = "json" # default
|
||||
data = None
|
||||
file_ids = None
|
||||
|
||||
if tool_call.tool_result:
|
||||
# Check if it's a custom tool call summary (most common case)
|
||||
if isinstance(tool_call.tool_result, dict):
|
||||
# Try to extract response_type if it's structured like CustomToolCallSummary
|
||||
if "response_type" in tool_call.tool_result:
|
||||
response_type = tool_call.tool_result["response_type"]
|
||||
tool_result = tool_call.tool_result.get("tool_result")
|
||||
|
||||
# Handle file-based responses
|
||||
if isinstance(tool_result, dict) and "file_ids" in tool_result:
|
||||
file_ids = tool_result["file_ids"]
|
||||
else:
|
||||
data = tool_result
|
||||
else:
|
||||
# Plain dict response
|
||||
data = tool_call.tool_result
|
||||
else:
|
||||
# Non-dict response (string, number, etc.)
|
||||
data = tool_call.tool_result
|
||||
|
||||
# Send tool response via tool delta
|
||||
custom_tool_delta = CustomToolDelta(
|
||||
tool_name=tool_call.tool_name,
|
||||
response_type=response_type,
|
||||
data=data,
|
||||
file_ids=file_ids,
|
||||
)
|
||||
packets.append(Packet(ind=current_index, obj=custom_tool_delta))
|
||||
|
||||
# End custom tool
|
||||
custom_tool_end = SectionEnd()
|
||||
packets.append(Packet(ind=current_index, obj=custom_tool_end))
|
||||
current_index += 1
|
||||
|
||||
# Fallback handling for when there's no explicit tool_call but we have tool-related data
|
||||
elif message.context_docs and message.context_docs.top_documents:
|
||||
# Handle search results without explicit tool call (legacy support)
|
||||
search_docs = message.context_docs.top_documents
|
||||
|
||||
# Start search tool
|
||||
packets.append(
|
||||
Packet(
|
||||
ind=current_index,
|
||||
obj=SearchToolStart(),
|
||||
)
|
||||
)
|
||||
|
||||
# Include queries and documents in the delta
|
||||
if message.rephrased_query and message.rephrased_query.strip():
|
||||
queries = [str(message.rephrased_query)]
|
||||
else:
|
||||
queries = [message.message]
|
||||
packets.append(
|
||||
Packet(
|
||||
ind=current_index,
|
||||
obj=SearchToolDelta(
|
||||
queries=queries,
|
||||
documents=search_docs,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
# End search tool
|
||||
packets.append(
|
||||
Packet(
|
||||
ind=current_index,
|
||||
obj=SectionEnd(),
|
||||
)
|
||||
)
|
||||
current_index += 1
|
||||
|
||||
# Handle image files without explicit tool call (legacy support)
|
||||
if message.files:
|
||||
image_files = [f for f in message.files if f["type"] == ChatFileType.IMAGE]
|
||||
if image_files and not message.tool_call:
|
||||
# Only create image packets if there's no tool call that might have handled them
|
||||
# Start image tool
|
||||
image_tool_start = ImageGenerationToolStart()
|
||||
packets.append(Packet(ind=current_index, obj=image_tool_start))
|
||||
|
||||
# Send images via tool delta
|
||||
images = []
|
||||
for file in image_files:
|
||||
images.append(
|
||||
{
|
||||
"id": file["id"],
|
||||
"url": "", # URL will be constructed by frontend
|
||||
"prompt": file.get("name") or "Generated image",
|
||||
}
|
||||
)
|
||||
|
||||
image_tool_delta = ImageGenerationToolDelta(images=images)
|
||||
packets.append(Packet(ind=current_index, obj=image_tool_delta))
|
||||
|
||||
# End image tool
|
||||
image_tool_end = SectionEnd()
|
||||
packets.append(Packet(ind=current_index, obj=image_tool_end))
|
||||
current_index += 1
|
||||
|
||||
# Create Citation packets if there are citations
|
||||
if message.citations:
|
||||
# Start citation flow
|
||||
citation_start = CitationStart()
|
||||
packets.append(Packet(ind=current_index, obj=citation_start))
|
||||
|
||||
# Create citation data
|
||||
# Convert dict[int, int] to list[StreamingCitation] format
|
||||
citations_list: list[CitationInfo] = []
|
||||
for citation_num, doc_id in message.citations.items():
|
||||
citation = CitationInfo(citation_num=citation_num, document_id=str(doc_id))
|
||||
citations_list.append(citation)
|
||||
|
||||
# Send citations via citation delta
|
||||
citation_delta = CitationDelta(citations=citations_list)
|
||||
packets.append(Packet(ind=current_index, obj=citation_delta))
|
||||
|
||||
# End citation flow
|
||||
citation_end = SectionEnd()
|
||||
packets.append(Packet(ind=current_index, obj=citation_end))
|
||||
current_index += 1
|
||||
|
||||
# Create MESSAGE_START packet
|
||||
message_start = MessageStart(id=str(message.message_id), content="")
|
||||
packets.append(Packet(ind=current_index, obj=message_start))
|
||||
|
||||
# Create MESSAGE_DELTA packet with the full message content
|
||||
# In a real streaming scenario, this would be broken into multiple deltas
|
||||
if message.message:
|
||||
message_delta = MessageDelta(content=message.message)
|
||||
packets.append(Packet(ind=current_index, obj=message_delta))
|
||||
|
||||
# Create MESSAGE_END packet
|
||||
message_end = SectionEnd()
|
||||
packets.append(Packet(ind=current_index, obj=message_end))
|
||||
current_index += 1
|
||||
|
||||
# Create STOP packet
|
||||
stop = OverallStop()
|
||||
packets.append(Packet(ind=current_index, obj=stop))
|
||||
|
||||
return packets
|
||||
|
||||
|
||||
def create_simplified_packets_for_session(
|
||||
messages: list[ChatMessageDetail],
|
||||
) -> list[list[Packet]]:
|
||||
"""
|
||||
Convert a list of chat messages into simplified streaming packets organized by message.
|
||||
Each inner list contains packets for a single assistant message.
|
||||
|
||||
Args:
|
||||
messages: List of chat messages from the session
|
||||
|
||||
Returns:
|
||||
List of lists of simplified packets, where each inner list represents one assistant message
|
||||
"""
|
||||
packets_by_message: list[list[Packet]] = []
|
||||
|
||||
for message in messages:
|
||||
if message.message_type == MessageType.ASSISTANT:
|
||||
message_packets = create_simplified_packets_for_message(message, 0)
|
||||
if message_packets: # Only add if there are actual packets
|
||||
packets_by_message.append(message_packets)
|
||||
|
||||
return packets_by_message
|
||||
@@ -373,16 +373,16 @@ celery_worker_heavy:
|
||||
tolerations: []
|
||||
affinity: {}
|
||||
|
||||
celery_worker_docprocessing:
|
||||
celery_worker_indexing:
|
||||
replicaCount: 1
|
||||
autoscaling:
|
||||
enabled: false
|
||||
podAnnotations: {}
|
||||
podLabels:
|
||||
scope: onyx-backend-celery
|
||||
app: celery-worker-docprocessing
|
||||
app: celery-worker-indexing
|
||||
deploymentLabels:
|
||||
app: celery-worker-docprocessing
|
||||
app: celery-worker-indexing
|
||||
podSecurityContext:
|
||||
{}
|
||||
securityContext:
|
||||
@@ -390,11 +390,11 @@ celery_worker_docprocessing:
|
||||
runAsUser: 0
|
||||
resources:
|
||||
requests:
|
||||
cpu: 500m
|
||||
memory: 4Gi
|
||||
cpu: 2000m
|
||||
memory: 8Gi
|
||||
limits:
|
||||
cpu: 1000m
|
||||
memory: 12Gi
|
||||
cpu: 2000m
|
||||
memory: 16Gi
|
||||
volumes: [] # Additional volumes on the output Deployment definition.
|
||||
volumeMounts: [] # Additional volumeMounts on the output Deployment definition.
|
||||
nodeSelector: {}
|
||||
@@ -536,33 +536,6 @@ slackbot:
|
||||
limits:
|
||||
cpu: "1000m"
|
||||
memory: "2000Mi"
|
||||
celery_worker_docfetching:
|
||||
replicaCount: 1
|
||||
autoscaling:
|
||||
enabled: false
|
||||
podAnnotations: {}
|
||||
podLabels:
|
||||
scope: onyx-backend-celery
|
||||
app: celery-worker-docfetching
|
||||
deploymentLabels:
|
||||
app: celery-worker-docfetching
|
||||
podSecurityContext:
|
||||
{}
|
||||
securityContext:
|
||||
privileged: true
|
||||
runAsUser: 0
|
||||
resources:
|
||||
requests:
|
||||
cpu: 500m
|
||||
memory: 8Gi
|
||||
limits:
|
||||
cpu: 2000m
|
||||
memory: 16Gi
|
||||
volumes: [] # Additional volumes on the output Deployment definition.
|
||||
volumeMounts: [] # Additional volumeMounts on the output Deployment definition.
|
||||
nodeSelector: {}
|
||||
tolerations: []
|
||||
affinity: {}
|
||||
|
||||
######################################################################
|
||||
#
|
||||
|
||||
31
web/package-lock.json
generated
31
web/package-lock.json
generated
@@ -87,7 +87,8 @@
|
||||
"typescript": "5.0.3",
|
||||
"uuid": "^9.0.1",
|
||||
"vaul": "^1.1.1",
|
||||
"yup": "^1.4.0"
|
||||
"yup": "^1.4.0",
|
||||
"zustand": "^5.0.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@chromatic-com/playwright": "^0.10.2",
|
||||
@@ -18619,6 +18620,34 @@
|
||||
"type-fest": "^2.19.0"
|
||||
}
|
||||
},
|
||||
"node_modules/zustand": {
|
||||
"version": "5.0.7",
|
||||
"resolved": "https://registry.npmjs.org/zustand/-/zustand-5.0.7.tgz",
|
||||
"integrity": "sha512-Ot6uqHDW/O2VdYsKLLU8GQu8sCOM1LcoE8RwvLv9uuRT9s6SOHCKs0ZEOhxg+I1Ld+A1Q5lwx+UlKXXUoCZITg==",
|
||||
"engines": {
|
||||
"node": ">=12.20.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": ">=18.0.0",
|
||||
"immer": ">=9.0.6",
|
||||
"react": ">=18.0.0",
|
||||
"use-sync-external-store": ">=1.2.0"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
},
|
||||
"immer": {
|
||||
"optional": true
|
||||
},
|
||||
"react": {
|
||||
"optional": true
|
||||
},
|
||||
"use-sync-external-store": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/zwitch": {
|
||||
"version": "2.0.4",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -93,7 +93,8 @@
|
||||
"typescript": "5.0.3",
|
||||
"uuid": "^9.0.1",
|
||||
"vaul": "^1.1.1",
|
||||
"yup": "^1.4.0"
|
||||
"yup": "^1.4.0",
|
||||
"zustand": "^5.0.7"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@chromatic-com/playwright": "^0.10.2",
|
||||
|
||||
@@ -53,8 +53,8 @@ import {
|
||||
SwapIcon,
|
||||
TrashIcon,
|
||||
} from "@/components/icons/icons";
|
||||
import { buildImgUrl } from "@/app/chat/files/images/utils";
|
||||
import { useAssistants } from "@/components/context/AssistantsContext";
|
||||
import { buildImgUrl } from "@/app/chat/components/files/images/utils";
|
||||
import { useAssistantsContext } from "@/components/context/AssistantsContext";
|
||||
import { debounce } from "lodash";
|
||||
import { LLMProviderView } from "../configuration/llm/interfaces";
|
||||
import StarterMessagesList from "./StarterMessageList";
|
||||
@@ -69,7 +69,7 @@ import {
|
||||
SearchMultiSelectDropdown,
|
||||
Option as DropdownOption,
|
||||
} from "@/components/Dropdown";
|
||||
import { SourceChip } from "@/app/chat/input/ChatInputBar";
|
||||
import { SourceChip } from "@/app/chat/components/input/ChatInputBar";
|
||||
import {
|
||||
TagIcon,
|
||||
UserIcon,
|
||||
@@ -86,7 +86,7 @@ import { ConfirmEntityModal } from "@/components/modals/ConfirmEntityModal";
|
||||
import { FilePickerModal } from "@/app/chat/my-documents/components/FilePicker";
|
||||
import { useDocumentsContext } from "@/app/chat/my-documents/DocumentsContext";
|
||||
|
||||
import { SEARCH_TOOL_ID } from "@/app/chat/tools/constants";
|
||||
import { SEARCH_TOOL_ID } from "@/app/chat/components/tools/constants";
|
||||
import TextView from "@/components/chat/TextView";
|
||||
import { MinimalOnyxDocument } from "@/lib/search/interfaces";
|
||||
import { MAX_CHARACTERS_PERSONA_DESCRIPTION } from "@/lib/constants";
|
||||
@@ -133,7 +133,8 @@ export function AssistantEditor({
|
||||
tools: ToolSnapshot[];
|
||||
shouldAddAssistantToUserPreferences?: boolean;
|
||||
}) {
|
||||
const { refreshAssistants, isImageGenerationAvailable } = useAssistants();
|
||||
const { refreshAssistants, isImageGenerationAvailable } =
|
||||
useAssistantsContext();
|
||||
|
||||
const router = useRouter();
|
||||
const searchParams = useSearchParams();
|
||||
|
||||
@@ -18,7 +18,7 @@ import CardSection from "@/components/admin/CardSection";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { MinimalPersonaSnapshot } from "@/app/admin/assistants/interfaces";
|
||||
import { StandardAnswerCategoryResponse } from "@/components/standardAnswers/getStandardAnswerCategoriesIfEE";
|
||||
import { SEARCH_TOOL_ID } from "@/app/chat/tools/constants";
|
||||
import { SEARCH_TOOL_ID } from "@/app/chat/components/tools/constants";
|
||||
import { SlackChannelConfigFormFields } from "./SlackChannelConfigFormFields";
|
||||
|
||||
export const SlackChannelConfigCreationForm = ({
|
||||
|
||||
@@ -10,10 +10,9 @@ import FixedLogo from "../../components/logo/FixedLogo";
|
||||
import { SettingsContext } from "@/components/settings/SettingsProvider";
|
||||
import { useChatContext } from "@/components/context/ChatContext";
|
||||
import { HistorySidebar } from "@/components/sidebar/HistorySidebar";
|
||||
import { useAssistants } from "@/components/context/AssistantsContext";
|
||||
import AssistantModal from "./mine/AssistantModal";
|
||||
import { useSidebarShortcut } from "@/lib/browserUtilities";
|
||||
import { UserSettingsModal } from "../chat/modal/UserSettingsModal";
|
||||
import { UserSettingsModal } from "@/app/chat/components/modal/UserSettingsModal";
|
||||
import { usePopup } from "@/components/admin/connectors/Popup";
|
||||
import { useUser } from "@/components/user/UserProvider";
|
||||
|
||||
@@ -43,7 +42,6 @@ export default function SidebarWrapper<T extends object>({
|
||||
|
||||
const sidebarElementRef = useRef<HTMLDivElement>(null);
|
||||
const { folders, openedFolders, chatSessions } = useChatContext();
|
||||
const { assistants } = useAssistants();
|
||||
const explicitlyUntoggle = () => {
|
||||
setShowDocSidebar(false);
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { FiImage, FiSearch } from "react-icons/fi";
|
||||
import { Persona } from "../admin/assistants/interfaces";
|
||||
import { SEARCH_TOOL_ID } from "../chat/tools/constants";
|
||||
import { SEARCH_TOOL_ID } from "../chat/components/tools/constants";
|
||||
|
||||
export function AssistantTools({
|
||||
assistant,
|
||||
|
||||
@@ -17,7 +17,7 @@ import {
|
||||
import { AssistantIcon } from "@/components/assistants/AssistantIcon";
|
||||
import { MinimalPersonaSnapshot } from "@/app/admin/assistants/interfaces";
|
||||
import { useUser } from "@/components/user/UserProvider";
|
||||
import { useAssistants } from "@/components/context/AssistantsContext";
|
||||
import { useAssistantsContext } from "@/components/context/AssistantsContext";
|
||||
import { checkUserOwnsAssistant } from "@/lib/assistants/utils";
|
||||
import {
|
||||
Tooltip,
|
||||
@@ -60,7 +60,7 @@ const AssistantCard: React.FC<{
|
||||
}> = ({ persona, pinned, closeModal }) => {
|
||||
const { user, toggleAssistantPinnedStatus } = useUser();
|
||||
const router = useRouter();
|
||||
const { refreshAssistants, pinnedAssistants } = useAssistants();
|
||||
const { refreshAssistants, pinnedAssistants } = useAssistantsContext();
|
||||
const { popup, setPopup } = usePopup();
|
||||
|
||||
const isOwnedByUser = checkUserOwnsAssistant(user, persona);
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
import React, { useMemo, useState } from "react";
|
||||
import { useRouter } from "next/navigation";
|
||||
import AssistantCard from "./AssistantCard";
|
||||
import { useAssistants } from "@/components/context/AssistantsContext";
|
||||
import { useAssistantsContext } from "@/components/context/AssistantsContext";
|
||||
import { useUser } from "@/components/user/UserProvider";
|
||||
import { FilterIcon, XIcon } from "lucide-react";
|
||||
import { checkUserOwnsAssistant } from "@/lib/assistants/checkOwnership";
|
||||
@@ -64,7 +64,7 @@ interface AssistantModalProps {
|
||||
}
|
||||
|
||||
export function AssistantModal({ hideModal }: AssistantModalProps) {
|
||||
const { assistants, pinnedAssistants } = useAssistants();
|
||||
const { assistants, pinnedAssistants } = useAssistantsContext();
|
||||
const { assistantFilters, toggleAssistantFilter } = useAssistantFilter();
|
||||
const router = useRouter();
|
||||
const { user } = useUser();
|
||||
|
||||
@@ -15,7 +15,7 @@ import { usePopup } from "@/components/admin/connectors/Popup";
|
||||
import { Bubble } from "@/components/Bubble";
|
||||
import { AssistantIcon } from "@/components/assistants/AssistantIcon";
|
||||
import { Spinner } from "@/components/Spinner";
|
||||
import { useAssistants } from "@/components/context/AssistantsContext";
|
||||
import { useAssistantsContext } from "@/components/context/AssistantsContext";
|
||||
|
||||
interface AssistantSharingModalProps {
|
||||
assistant: Persona;
|
||||
@@ -32,7 +32,7 @@ export function AssistantSharingModal({
|
||||
show,
|
||||
onClose,
|
||||
}: AssistantSharingModalProps) {
|
||||
const { refreshAssistants } = useAssistants();
|
||||
const { refreshAssistants } = useAssistantsContext();
|
||||
const { popup, setPopup } = usePopup();
|
||||
const [isUpdating, setIsUpdating] = useState(false);
|
||||
const [selectedUsers, setSelectedUsers] = useState<MinimalUserSnapshot[]>([]);
|
||||
|
||||
@@ -14,7 +14,7 @@ import { usePopup } from "@/components/admin/connectors/Popup";
|
||||
import { Bubble } from "@/components/Bubble";
|
||||
import { AssistantIcon } from "@/components/assistants/AssistantIcon";
|
||||
import { Spinner } from "@/components/Spinner";
|
||||
import { useAssistants } from "@/components/context/AssistantsContext";
|
||||
import { useAssistantsContext } from "@/components/context/AssistantsContext";
|
||||
|
||||
interface AssistantSharingPopoverProps {
|
||||
assistant: Persona;
|
||||
@@ -29,7 +29,7 @@ export function AssistantSharingPopover({
|
||||
allUsers,
|
||||
onClose,
|
||||
}: AssistantSharingPopoverProps) {
|
||||
const { refreshAssistants } = useAssistants();
|
||||
const { refreshAssistants } = useAssistantsContext();
|
||||
const { popup, setPopup } = usePopup();
|
||||
const [isUpdating, setIsUpdating] = useState(false);
|
||||
const [selectedUsers, setSelectedUsers] = useState<MinimalUserSnapshot[]>([]);
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,148 +0,0 @@
|
||||
import { Persona } from "@/app/admin/assistants/interfaces";
|
||||
import { FiCheck, FiChevronDown, FiPlusSquare, FiEdit2 } from "react-icons/fi";
|
||||
import { CustomDropdown, DefaultDropdownElement } from "@/components/Dropdown";
|
||||
import { useRouter } from "next/navigation";
|
||||
import Link from "next/link";
|
||||
import { checkUserIdOwnsAssistant } from "@/lib/assistants/checkOwnership";
|
||||
|
||||
function PersonaItem({
|
||||
id,
|
||||
name,
|
||||
onSelect,
|
||||
isSelected,
|
||||
isOwner,
|
||||
}: {
|
||||
id: number;
|
||||
name: string;
|
||||
onSelect: (personaId: number) => void;
|
||||
isSelected: boolean;
|
||||
isOwner: boolean;
|
||||
}) {
|
||||
return (
|
||||
<div className="flex w-full">
|
||||
<div
|
||||
key={id}
|
||||
className={`
|
||||
flex
|
||||
flex-grow
|
||||
px-3
|
||||
text-sm
|
||||
py-2
|
||||
my-0.5
|
||||
rounded
|
||||
mx-1
|
||||
select-none
|
||||
cursor-pointer
|
||||
text-text-darker
|
||||
bg-background
|
||||
hover:bg-accent-background
|
||||
${
|
||||
isSelected
|
||||
? "bg-accent-background-hovered text-selected-emphasis"
|
||||
: ""
|
||||
}
|
||||
`}
|
||||
onClick={() => {
|
||||
onSelect(id);
|
||||
}}
|
||||
>
|
||||
{name}
|
||||
{isSelected && (
|
||||
<div className="ml-auto mr-1 my-auto">
|
||||
<FiCheck />
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
{isOwner && (
|
||||
<Link href={`/assistants/edit/${id}`} className="mx-2 my-auto">
|
||||
<FiEdit2
|
||||
className="hover:bg-accent-background-hovered p-0.5 my-auto"
|
||||
size={20}
|
||||
/>
|
||||
</Link>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export function ChatPersonaSelector({
|
||||
personas,
|
||||
selectedPersonaId,
|
||||
onPersonaChange,
|
||||
userId,
|
||||
}: {
|
||||
personas: Persona[];
|
||||
selectedPersonaId: number | null;
|
||||
onPersonaChange: (persona: Persona | null) => void;
|
||||
userId: string | undefined;
|
||||
}) {
|
||||
const router = useRouter();
|
||||
|
||||
const currentlySelectedPersona = personas.find(
|
||||
(persona) => persona.id === selectedPersonaId
|
||||
);
|
||||
|
||||
return (
|
||||
<CustomDropdown
|
||||
dropdown={
|
||||
<div
|
||||
className={`
|
||||
border
|
||||
border-border
|
||||
bg-background
|
||||
rounded-lg
|
||||
shadow-lg
|
||||
flex
|
||||
flex-col
|
||||
w-64
|
||||
max-h-96
|
||||
overflow-y-auto
|
||||
p-1
|
||||
overscroll-contain`}
|
||||
>
|
||||
{personas.map((persona) => {
|
||||
const isSelected = persona.id === selectedPersonaId;
|
||||
const isOwner = checkUserIdOwnsAssistant(userId, persona);
|
||||
return (
|
||||
<PersonaItem
|
||||
key={persona.id}
|
||||
id={persona.id}
|
||||
name={persona.name}
|
||||
onSelect={(clickedPersonaId) => {
|
||||
const clickedPersona = personas.find(
|
||||
(persona) => persona.id === clickedPersonaId
|
||||
);
|
||||
if (clickedPersona) {
|
||||
onPersonaChange(clickedPersona);
|
||||
}
|
||||
}}
|
||||
isSelected={isSelected}
|
||||
isOwner={isOwner}
|
||||
/>
|
||||
);
|
||||
})}
|
||||
|
||||
<div className="border-t border-border pt-2">
|
||||
<DefaultDropdownElement
|
||||
name={
|
||||
<div className="flex items-center">
|
||||
<FiPlusSquare className="mr-2" />
|
||||
New Assistant
|
||||
</div>
|
||||
}
|
||||
onSelect={() => router.push("/assistants/new")}
|
||||
isSelected={false}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
>
|
||||
<div className="select-none text-xl text-strong font-bold flex px-2 rounded cursor-pointer hover:bg-accent-background">
|
||||
<div className="mt-auto">
|
||||
{currentlySelectedPersona?.name || "Default"}
|
||||
</div>
|
||||
<FiChevronDown className="my-auto ml-1" />
|
||||
</div>
|
||||
</CustomDropdown>
|
||||
);
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
"use client";
|
||||
import { useChatContext } from "@/components/context/ChatContext";
|
||||
import { ChatPage } from "./ChatPage";
|
||||
import { ChatPage } from "./components/ChatPage";
|
||||
import FunctionalWrapper from "../../components/chat/FunctionalWrapper";
|
||||
|
||||
export default function WrappedChat({
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { AssistantIcon } from "@/components/assistants/AssistantIcon";
|
||||
import { MinimalPersonaSnapshot } from "../admin/assistants/interfaces";
|
||||
import { MinimalPersonaSnapshot } from "../../admin/assistants/interfaces";
|
||||
|
||||
export function ChatIntro({
|
||||
selectedPersona,
|
||||
1359
web/src/app/chat/components/ChatPage.tsx
Normal file
1359
web/src/app/chat/components/ChatPage.tsx
Normal file
File diff suppressed because it is too large
Load Diff
@@ -8,9 +8,9 @@ import { MetadataBadge } from "@/components/MetadataBadge";
|
||||
import { WebResultIcon } from "@/components/WebResultIcon";
|
||||
import { Dispatch, SetStateAction } from "react";
|
||||
import { openDocument } from "@/lib/search/utils";
|
||||
import { ValidSources } from "@/lib/types";
|
||||
|
||||
interface DocumentDisplayProps {
|
||||
agenticMessage: boolean;
|
||||
closeSidebar: () => void;
|
||||
document: OnyxDocument;
|
||||
modal?: boolean;
|
||||
@@ -60,7 +60,6 @@ export function DocumentMetadataBlock({
|
||||
}
|
||||
|
||||
export function ChatDocumentDisplay({
|
||||
agenticMessage,
|
||||
closeSidebar,
|
||||
document,
|
||||
modal,
|
||||
@@ -93,7 +92,8 @@ export function ChatDocumentDisplay({
|
||||
className="cursor-pointer text-left flex flex-col"
|
||||
>
|
||||
<div className="line-clamp-1 mb-1 flex h-6 items-center gap-2 text-xs">
|
||||
{document.is_internet || document.source_type === "web" ? (
|
||||
{document.is_internet ||
|
||||
document.source_type === ValidSources.Web ? (
|
||||
<WebResultIcon url={document.link} />
|
||||
) : (
|
||||
<SourceIcon sourceType={document.source_type} iconSize={18} />
|
||||
@@ -115,12 +115,10 @@ export function ChatDocumentDisplay({
|
||||
hasMetadata ? "mt-2" : ""
|
||||
}`}
|
||||
>
|
||||
{!agenticMessage
|
||||
? buildDocumentSummaryDisplay(
|
||||
document.match_highlights,
|
||||
document.blurb
|
||||
)
|
||||
: document.blurb}
|
||||
{buildDocumentSummaryDisplay(
|
||||
document.match_highlights,
|
||||
document.blurb
|
||||
)}
|
||||
</div>
|
||||
<div className="absolute top-2 right-2">
|
||||
{!isInternet && !hideSelection && (
|
||||
226
web/src/app/chat/components/documentSidebar/DocumentResults.tsx
Normal file
226
web/src/app/chat/components/documentSidebar/DocumentResults.tsx
Normal file
@@ -0,0 +1,226 @@
|
||||
import { MinimalOnyxDocument, OnyxDocument } from "@/lib/search/interfaces";
|
||||
import { ChatDocumentDisplay } from "./ChatDocumentDisplay";
|
||||
import { removeDuplicateDocs } from "@/lib/documentUtils";
|
||||
import { ChatFileType, Message } from "@/app/chat/interfaces";
|
||||
import { Dispatch, ForwardedRef, forwardRef, SetStateAction } from "react";
|
||||
import { XIcon } from "@/components/icons/icons";
|
||||
import { FileSourceCardInResults } from "@/app/chat/message/SourcesDisplay";
|
||||
import { useDocumentsContext } from "@/app/chat/my-documents/DocumentsContext";
|
||||
import { CitationMap } from "@/app/chat/interfaces";
|
||||
|
||||
interface DocumentResultsProps {
|
||||
humanMessage: Message | null;
|
||||
closeSidebar: () => void;
|
||||
selectedMessage: Message | null;
|
||||
selectedDocuments: OnyxDocument[] | null;
|
||||
toggleDocumentSelection: (document: OnyxDocument) => void;
|
||||
clearSelectedDocuments: () => void;
|
||||
selectedDocumentTokens: number;
|
||||
maxTokens: number;
|
||||
initialWidth: number;
|
||||
isOpen: boolean;
|
||||
isSharedChat?: boolean;
|
||||
modal: boolean;
|
||||
setPresentingDocument: Dispatch<SetStateAction<MinimalOnyxDocument | null>>;
|
||||
}
|
||||
|
||||
export const DocumentResults = forwardRef<HTMLDivElement, DocumentResultsProps>(
|
||||
(
|
||||
{
|
||||
humanMessage,
|
||||
closeSidebar,
|
||||
modal,
|
||||
selectedMessage,
|
||||
selectedDocuments,
|
||||
toggleDocumentSelection,
|
||||
clearSelectedDocuments,
|
||||
selectedDocumentTokens,
|
||||
maxTokens,
|
||||
initialWidth,
|
||||
isSharedChat,
|
||||
isOpen,
|
||||
setPresentingDocument,
|
||||
},
|
||||
ref: ForwardedRef<HTMLDivElement>
|
||||
) => {
|
||||
const { files: allUserFiles } = useDocumentsContext();
|
||||
|
||||
const humanFileDescriptors = humanMessage?.files.filter(
|
||||
(file) => file.type == ChatFileType.USER_KNOWLEDGE
|
||||
);
|
||||
const userFiles = allUserFiles?.filter((file) =>
|
||||
humanFileDescriptors?.some((descriptor) => descriptor.id === file.file_id)
|
||||
);
|
||||
const selectedDocumentIds =
|
||||
selectedDocuments?.map((document) => document.document_id) || [];
|
||||
|
||||
const currentDocuments = selectedMessage?.documents || null;
|
||||
const dedupedDocuments = removeDuplicateDocs(currentDocuments || []);
|
||||
|
||||
const tokenLimitReached = selectedDocumentTokens > maxTokens - 75;
|
||||
|
||||
// Separate cited documents from other documents
|
||||
const citedDocumentIds = new Set<string>();
|
||||
const citations = selectedMessage?.citations || null;
|
||||
if (citations) {
|
||||
Object.keys(citations).forEach((docId) => {
|
||||
citedDocumentIds.add(docId);
|
||||
});
|
||||
}
|
||||
|
||||
const citedDocuments = dedupedDocuments.filter(
|
||||
(doc) =>
|
||||
doc.document_id !== null &&
|
||||
doc.document_id !== undefined &&
|
||||
citedDocumentIds.has(doc.document_id)
|
||||
);
|
||||
const otherDocuments = dedupedDocuments.filter(
|
||||
(doc) =>
|
||||
doc.document_id === null ||
|
||||
doc.document_id === undefined ||
|
||||
!citedDocumentIds.has(doc.document_id)
|
||||
);
|
||||
|
||||
return (
|
||||
<>
|
||||
<div
|
||||
id="onyx-chat-sidebar"
|
||||
className={`relative -mb-8 bg-background max-w-full ${
|
||||
!modal
|
||||
? "border-l border-t h-[105vh] border-sidebar-border dark:border-neutral-700"
|
||||
: ""
|
||||
}`}
|
||||
onClick={(e) => {
|
||||
if (e.target === e.currentTarget) {
|
||||
closeSidebar();
|
||||
}
|
||||
}}
|
||||
>
|
||||
<div
|
||||
className={`ml-auto h-full relative sidebar transition-transform ease-in-out duration-300
|
||||
${isOpen ? " translate-x-0" : " translate-x-[10%]"}`}
|
||||
style={{
|
||||
width: modal ? undefined : initialWidth,
|
||||
}}
|
||||
>
|
||||
<div className="flex flex-col h-full">
|
||||
<div className="overflow-y-auto h-fit mb-8 pb-8 sm:mx-0 flex-grow gap-y-0 default-scrollbar dark-scrollbar flex flex-col">
|
||||
{userFiles && userFiles.length > 0 ? (
|
||||
<div className=" gap-y-2 flex flex-col pt-2 mx-3">
|
||||
{userFiles?.map((file, index) => (
|
||||
<FileSourceCardInResults
|
||||
key={index}
|
||||
relevantDocument={dedupedDocuments.find(
|
||||
(doc) =>
|
||||
doc.document_id ===
|
||||
`FILE_CONNECTOR__${file.file_id}`
|
||||
)}
|
||||
document={file}
|
||||
setPresentingDocument={() =>
|
||||
setPresentingDocument({
|
||||
document_id: file.document_id,
|
||||
semantic_identifier: file.file_id || null,
|
||||
})
|
||||
}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
) : dedupedDocuments.length > 0 ? (
|
||||
<>
|
||||
{/* Cited Documents Section */}
|
||||
{citedDocuments.length > 0 && (
|
||||
<div className="mt-4">
|
||||
<div className="px-4 pb-3 pt-2 flex justify-between border-b border-border">
|
||||
<h3 className="text-base font-semibold text-text-700">
|
||||
Cited Sources
|
||||
</h3>
|
||||
|
||||
<button
|
||||
aria-label="Close sidebar"
|
||||
title="Close"
|
||||
className="my-auto p-1 rounded transition-colors hover:bg-neutral-200 dark:hover:bg-neutral-700"
|
||||
onClick={closeSidebar}
|
||||
>
|
||||
<XIcon size={16} />
|
||||
</button>
|
||||
</div>
|
||||
{citedDocuments.map((document, ind) => (
|
||||
<div
|
||||
key={document.document_id}
|
||||
className={`desktop:px-2 w-full`}
|
||||
>
|
||||
<ChatDocumentDisplay
|
||||
setPresentingDocument={setPresentingDocument}
|
||||
closeSidebar={closeSidebar}
|
||||
modal={modal}
|
||||
document={document}
|
||||
isSelected={selectedDocumentIds.includes(
|
||||
document.document_id
|
||||
)}
|
||||
handleSelect={(documentId) => {
|
||||
toggleDocumentSelection(
|
||||
dedupedDocuments.find(
|
||||
(doc) => doc.document_id === documentId
|
||||
)!
|
||||
);
|
||||
}}
|
||||
hideSelection={isSharedChat}
|
||||
tokenLimitReached={tokenLimitReached}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Other Documents Section */}
|
||||
{otherDocuments.length > 0 && (
|
||||
<div className="mt-4">
|
||||
<>
|
||||
<div className="px-4 pb-3 pt-2 border-b border-border">
|
||||
<h3 className="text-base font-semibold text-text-700">
|
||||
{citedDocuments.length > 0
|
||||
? "More"
|
||||
: "Found Sources"}
|
||||
</h3>
|
||||
</div>
|
||||
</>
|
||||
|
||||
{otherDocuments.map((document, ind) => (
|
||||
<div
|
||||
key={document.document_id}
|
||||
className={`desktop:px-2 w-full mb-2`}
|
||||
>
|
||||
<ChatDocumentDisplay
|
||||
setPresentingDocument={setPresentingDocument}
|
||||
closeSidebar={closeSidebar}
|
||||
modal={modal}
|
||||
document={document}
|
||||
isSelected={selectedDocumentIds.includes(
|
||||
document.document_id
|
||||
)}
|
||||
handleSelect={(documentId) => {
|
||||
toggleDocumentSelection(
|
||||
dedupedDocuments.find(
|
||||
(doc) => doc.document_id === documentId
|
||||
)!
|
||||
);
|
||||
}}
|
||||
hideSelection={isSharedChat}
|
||||
tokenLimitReached={tokenLimitReached}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
) : null}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
DocumentResults.displayName = "DocumentResults";
|
||||
@@ -1,5 +1,5 @@
|
||||
import { useEffect, useRef, useState } from "react";
|
||||
import { FileDescriptor } from "../interfaces";
|
||||
import { FileDescriptor } from "@/app/chat/interfaces";
|
||||
|
||||
import { FiX, FiLoader, FiFileText } from "react-icons/fi";
|
||||
import { InputBarPreviewImage } from "./images/InputBarPreviewImage";
|
||||
@@ -7,7 +7,7 @@ import React, {
|
||||
forwardRef,
|
||||
} from "react";
|
||||
import { Folder } from "./interfaces";
|
||||
import { ChatSession } from "../interfaces";
|
||||
import { ChatSession } from "@/app/chat/interfaces";
|
||||
import { FiTrash2, FiCheck, FiX } from "react-icons/fi";
|
||||
import { Caret } from "@/components/icons/icons";
|
||||
import { deleteFolder } from "./FolderManagement";
|
||||
@@ -23,7 +23,7 @@ import { useRouter } from "next/navigation";
|
||||
import { CHAT_SESSION_ID_KEY } from "@/lib/drag/constants";
|
||||
import Cookies from "js-cookie";
|
||||
import { Popover } from "@/components/popover/Popover";
|
||||
import { ChatSession } from "../interfaces";
|
||||
import { ChatSession } from "@/app/chat/interfaces";
|
||||
import { useChatContext } from "@/components/context/ChatContext";
|
||||
|
||||
const FolderItem = ({
|
||||
@@ -1,4 +1,4 @@
|
||||
import { ChatSession } from "../interfaces";
|
||||
import { ChatSession } from "@/app/chat/interfaces";
|
||||
|
||||
export interface Folder {
|
||||
folder_id?: number;
|
||||
@@ -1,14 +1,14 @@
|
||||
import React, { useContext, useEffect, useMemo, useRef, useState } from "react";
|
||||
import { FiPlusCircle, FiPlus, FiX, FiFilter } from "react-icons/fi";
|
||||
import { FiPlusCircle, FiPlus, FiFilter } from "react-icons/fi";
|
||||
import { FiLoader } from "react-icons/fi";
|
||||
import { ChatInputOption } from "./ChatInputOption";
|
||||
import { MinimalPersonaSnapshot } from "@/app/admin/assistants/interfaces";
|
||||
import LLMPopover from "./LLMPopover";
|
||||
import { InputPrompt } from "@/app/chat/interfaces";
|
||||
|
||||
import { FilterManager, getDisplayNameForModel, LlmManager } from "@/lib/hooks";
|
||||
import { FilterManager, LlmManager } from "@/lib/hooks";
|
||||
import { useChatContext } from "@/components/context/ChatContext";
|
||||
import { ChatFileType, FileDescriptor } from "../interfaces";
|
||||
import { ChatFileType, FileDescriptor } from "../../interfaces";
|
||||
import {
|
||||
DocumentIcon2,
|
||||
FileIcon,
|
||||
@@ -16,29 +16,26 @@ import {
|
||||
StopGeneratingIcon,
|
||||
} from "@/components/icons/icons";
|
||||
import { OnyxDocument, SourceMetadata } from "@/lib/search/interfaces";
|
||||
import { AssistantIcon } from "@/components/assistants/AssistantIcon";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
} from "@/components/ui/tooltip";
|
||||
import { Hoverable } from "@/components/Hoverable";
|
||||
import { ChatState } from "../types";
|
||||
import { UnconfiguredLlmProviderText } from "@/components/chat/UnconfiguredLlmProviderText";
|
||||
import { useAssistants } from "@/components/context/AssistantsContext";
|
||||
import { ChatState } from "@/app/chat/interfaces";
|
||||
import { useAssistantsContext } from "@/components/context/AssistantsContext";
|
||||
import { CalendarIcon, TagIcon, XIcon, FolderIcon } from "lucide-react";
|
||||
import { FilterPopup } from "@/components/search/filtering/FilterPopup";
|
||||
import { DocumentSetSummary, Tag } from "@/lib/types";
|
||||
import { SourceIcon } from "@/components/SourceIcon";
|
||||
import { getFormattedDateRangeString } from "@/lib/dateUtils";
|
||||
import { truncateString } from "@/lib/utils";
|
||||
import { buildImgUrl } from "../files/images/utils";
|
||||
import { buildImgUrl } from "@/app/chat/components/files/images/utils";
|
||||
import { useUser } from "@/components/user/UserProvider";
|
||||
import { AgenticToggle } from "./AgenticToggle";
|
||||
import { SettingsContext } from "@/components/settings/SettingsProvider";
|
||||
import { getProviderIcon } from "@/app/admin/configuration/llm/utils";
|
||||
import { useDocumentsContext } from "../my-documents/DocumentsContext";
|
||||
import { useDocumentsContext } from "@/app/chat/my-documents/DocumentsContext";
|
||||
import { UnconfiguredLlmProviderText } from "@/components/chat/UnconfiguredLlmProviderText";
|
||||
|
||||
const MAX_INPUT_HEIGHT = 200;
|
||||
export const SourceChip2 = ({
|
||||
@@ -50,7 +47,7 @@ export const SourceChip2 = ({
|
||||
includeAnimation,
|
||||
truncateTitle = true,
|
||||
}: {
|
||||
icon: React.ReactNode;
|
||||
icon?: React.ReactNode;
|
||||
title: string;
|
||||
onRemove?: () => void;
|
||||
onClick?: () => void;
|
||||
@@ -91,9 +88,11 @@ export const SourceChip2 = ({
|
||||
${onClick ? "cursor-pointer" : ""}
|
||||
`}
|
||||
>
|
||||
<div className="w-[17px] h-4 p-[3px] flex-col justify-center items-center gap-2.5 inline-flex">
|
||||
<div className="h-2.5 relative">{icon}</div>
|
||||
</div>
|
||||
{icon && (
|
||||
<div className="w-[17px] h-4 p-[3px] flex-col justify-center items-center gap-2.5 inline-flex">
|
||||
<div className="h-2.5 relative">{icon}</div>
|
||||
</div>
|
||||
)}
|
||||
<div className="text-text-800 text-xs font-medium leading-normal">
|
||||
{truncateTitle ? truncateString(title, 50) : title}
|
||||
</div>
|
||||
@@ -181,12 +180,10 @@ interface ChatInputBarProps {
|
||||
onSubmit: () => void;
|
||||
llmManager: LlmManager;
|
||||
chatState: ChatState;
|
||||
alternativeAssistant: MinimalPersonaSnapshot | null;
|
||||
|
||||
// assistants
|
||||
selectedAssistant: MinimalPersonaSnapshot;
|
||||
setAlternativeAssistant: (
|
||||
alternativeAssistant: MinimalPersonaSnapshot | null
|
||||
) => void;
|
||||
|
||||
toggleDocumentSidebar: () => void;
|
||||
setFiles: (files: FileDescriptor[]) => void;
|
||||
handleFileUpload: (files: File[]) => void;
|
||||
@@ -216,12 +213,10 @@ export function ChatInputBar({
|
||||
|
||||
// assistants
|
||||
selectedAssistant,
|
||||
setAlternativeAssistant,
|
||||
|
||||
setFiles,
|
||||
handleFileUpload,
|
||||
textAreaRef,
|
||||
alternativeAssistant,
|
||||
availableSources,
|
||||
availableDocumentSets,
|
||||
availableTags,
|
||||
@@ -276,7 +271,7 @@ export function ChatInputBar({
|
||||
}
|
||||
};
|
||||
|
||||
const { finalAssistants: assistantOptions } = useAssistants();
|
||||
const { finalAssistants: assistantOptions } = useAssistantsContext();
|
||||
|
||||
const { llmProviders, inputPrompts } = useChatContext();
|
||||
|
||||
@@ -307,14 +302,6 @@ export function ChatInputBar({
|
||||
};
|
||||
}, []);
|
||||
|
||||
const updatedTaggedAssistant = (assistant: MinimalPersonaSnapshot) => {
|
||||
setAlternativeAssistant(
|
||||
assistant.id == selectedAssistant.id ? null : assistant
|
||||
);
|
||||
hideSuggestions();
|
||||
setMessage("");
|
||||
};
|
||||
|
||||
const handleAssistantInput = (text: string) => {
|
||||
if (!text.startsWith("@")) {
|
||||
hideSuggestions();
|
||||
@@ -372,10 +359,6 @@ export function ChatInputBar({
|
||||
}
|
||||
}
|
||||
|
||||
const assistantTagOptions = assistantOptions.filter((assistant) =>
|
||||
assistant.name.toLowerCase().startsWith(startFilterAt)
|
||||
);
|
||||
|
||||
let startFilterSlash = "";
|
||||
if (message !== undefined) {
|
||||
const message_segments = message
|
||||
@@ -395,19 +378,14 @@ export function ChatInputBar({
|
||||
|
||||
const handleKeyDown = (e: React.KeyboardEvent<HTMLTextAreaElement>) => {
|
||||
if (
|
||||
((showSuggestions && assistantTagOptions.length > 0) || showPrompts) &&
|
||||
(showSuggestions || showPrompts) &&
|
||||
(e.key === "Tab" || e.key == "Enter")
|
||||
) {
|
||||
e.preventDefault();
|
||||
|
||||
if (
|
||||
(tabbingIconIndex == assistantTagOptions.length && showSuggestions) ||
|
||||
(tabbingIconIndex == filteredPrompts.length && showPrompts)
|
||||
) {
|
||||
if (tabbingIconIndex == filteredPrompts.length && showPrompts) {
|
||||
if (showPrompts) {
|
||||
window.open("/chat/input-prompts", "_self");
|
||||
} else {
|
||||
window.open("/assistants/new", "_self");
|
||||
}
|
||||
} else {
|
||||
if (showPrompts) {
|
||||
@@ -416,12 +394,6 @@ export function ChatInputBar({
|
||||
if (selectedPrompt) {
|
||||
updateInputPrompt(selectedPrompt);
|
||||
}
|
||||
} else {
|
||||
const option =
|
||||
assistantTagOptions[tabbingIconIndex >= 0 ? tabbingIconIndex : 0];
|
||||
if (option) {
|
||||
updatedTaggedAssistant(option);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -432,10 +404,7 @@ export function ChatInputBar({
|
||||
if (e.key === "ArrowDown") {
|
||||
e.preventDefault();
|
||||
setTabbingIconIndex((tabbingIconIndex) =>
|
||||
Math.min(
|
||||
tabbingIconIndex + 1,
|
||||
showPrompts ? filteredPrompts.length : assistantTagOptions.length
|
||||
)
|
||||
Math.min(tabbingIconIndex + 1, showPrompts ? filteredPrompts.length : 0)
|
||||
);
|
||||
} else if (e.key === "ArrowUp") {
|
||||
e.preventDefault();
|
||||
@@ -496,51 +465,6 @@ export function ChatInputBar({
|
||||
mx-auto
|
||||
"
|
||||
>
|
||||
{showSuggestions && assistantTagOptions.length > 0 && (
|
||||
<div
|
||||
ref={suggestionsRef}
|
||||
className="text-sm absolute w-[calc(100%-2rem)] top-0 transform -translate-y-full"
|
||||
>
|
||||
<div className="rounded-lg py-1 overflow-y-auto max-h-[200px] sm-1.5 bg-input-background border border-border dark:border-none shadow-lg px-1.5 mt-2 z-10">
|
||||
{assistantTagOptions.map((currentAssistant, index) => (
|
||||
<button
|
||||
key={index}
|
||||
className={`px-2 ${
|
||||
tabbingIconIndex == index &&
|
||||
"bg-neutral-200 dark:bg-neutral-800"
|
||||
} rounded items-center rounded-lg content-start flex gap-x-1 py-2 w-full hover:bg-neutral-200/90 dark:hover:bg-neutral-800/90 cursor-pointer`}
|
||||
onClick={() => {
|
||||
updatedTaggedAssistant(currentAssistant);
|
||||
}}
|
||||
>
|
||||
<AssistantIcon size={16} assistant={currentAssistant} />
|
||||
<p className="text-text-darker font-semibold">
|
||||
{currentAssistant.name}
|
||||
</p>
|
||||
<p className="text-text-dark font-light line-clamp-1">
|
||||
{currentAssistant.id == selectedAssistant.id &&
|
||||
"(default) "}
|
||||
{currentAssistant.description}
|
||||
</p>
|
||||
</button>
|
||||
))}
|
||||
|
||||
<a
|
||||
key={assistantTagOptions.length}
|
||||
target="_self"
|
||||
className={`${
|
||||
tabbingIconIndex == assistantTagOptions.length &&
|
||||
"bg-neutral-200 dark:bg-neutral-800"
|
||||
} rounded rounded-lg px-3 flex gap-x-1 py-2 w-full items-center hover:bg-neutral-200/90 dark:hover:bg-neutral-800/90 cursor-pointer`}
|
||||
href="/assistants/new"
|
||||
>
|
||||
<FiPlus size={17} />
|
||||
<p>Create a new assistant</p>
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{showPrompts && user?.preferences?.shortcut_enabled && (
|
||||
<div
|
||||
ref={suggestionsRef}
|
||||
@@ -606,26 +530,6 @@ export function ChatInputBar({
|
||||
[&:has(textarea:focus)]::ring-black
|
||||
"
|
||||
>
|
||||
{alternativeAssistant && (
|
||||
<div className="flex bg-background flex-wrap gap-x-2 px-2 pt-1.5 w-full">
|
||||
<div
|
||||
ref={interactionsRef}
|
||||
className="p-2 rounded-t-lg items-center flex w-full"
|
||||
>
|
||||
<AssistantIcon assistant={alternativeAssistant} />
|
||||
<p className="ml-3 text-strong my-auto">
|
||||
{alternativeAssistant.name}
|
||||
</p>
|
||||
<div className="flex gap-x-1 ml-auto">
|
||||
<Hoverable
|
||||
icon={FiX}
|
||||
onClick={() => setAlternativeAssistant(null)}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<textarea
|
||||
onPaste={handlePaste}
|
||||
onKeyDownCapture={handleKeyDown}
|
||||
@@ -29,13 +29,13 @@ export const ChatInputOption: React.FC<ChatInputOptionProps> = ({
|
||||
onClick,
|
||||
minimize,
|
||||
}) => {
|
||||
const componentRef = useRef<HTMLButtonElement>(null);
|
||||
const componentRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
return (
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<button
|
||||
<div
|
||||
ref={componentRef}
|
||||
className={`
|
||||
relative
|
||||
@@ -76,7 +76,7 @@ export const ChatInputOption: React.FC<ChatInputOptionProps> = ({
|
||||
<ChevronDownIcon className="flex-none ml-1" size={size - 4} />
|
||||
)}
|
||||
</div>
|
||||
</button>
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>{tooltipContent}</TooltipContent>
|
||||
</Tooltip>
|
||||
@@ -5,7 +5,7 @@ import {
|
||||
PopoverTrigger,
|
||||
} from "@/components/ui/popover";
|
||||
import { getDisplayNameForModel, LlmDescriptor } from "@/lib/hooks";
|
||||
import { modelSupportsImageInput } from "@/lib/llm/utils";
|
||||
import { modelSupportsImageInput, structureValue } from "@/lib/llm/utils";
|
||||
import { LLMProviderDescriptor } from "@/app/admin/configuration/llm/interfaces";
|
||||
import { getProviderIcon } from "@/app/admin/configuration/llm/utils";
|
||||
import { MinimalPersonaSnapshot } from "@/app/admin/assistants/interfaces";
|
||||
@@ -148,7 +148,7 @@ export default function LLMPopover({
|
||||
provider,
|
||||
name,
|
||||
} as LlmDescriptor);
|
||||
onSelect?.(modelName);
|
||||
onSelect?.(structureValue(name, provider, modelName));
|
||||
setIsOpen(false);
|
||||
}}
|
||||
>
|
||||
@@ -2,11 +2,11 @@ import React, { useEffect } from "react";
|
||||
import { FiPlusCircle } from "react-icons/fi";
|
||||
import { ChatInputOption } from "./ChatInputOption";
|
||||
import { FilterManager } from "@/lib/hooks";
|
||||
import { ChatFileType, FileDescriptor } from "../interfaces";
|
||||
import { ChatFileType, FileDescriptor } from "@/app/chat/interfaces";
|
||||
import {
|
||||
InputBarPreview,
|
||||
InputBarPreviewImageProvider,
|
||||
} from "../files/InputBarPreview";
|
||||
} from "@/app/chat/components/files/InputBarPreview";
|
||||
import { SendIcon } from "@/components/icons/icons";
|
||||
import { HorizontalSourceSelector } from "@/components/search/filtering/HorizontalSourceSelector";
|
||||
import { Tag } from "@/lib/types";
|
||||
@@ -1,9 +1,10 @@
|
||||
"use client";
|
||||
|
||||
import { useState } from "react";
|
||||
import { FeedbackType } from "../types";
|
||||
import { FeedbackType } from "@/app/chat/interfaces";
|
||||
import { Modal } from "@/components/Modal";
|
||||
import { FilledLikeIcon } from "@/components/icons/icons";
|
||||
import { handleChatFeedback } from "../../services/lib";
|
||||
|
||||
const predefinedPositiveFeedbackOptions = process.env
|
||||
.NEXT_PUBLIC_POSITIVE_PREDEFINED_FEEDBACK_OPTIONS
|
||||
@@ -21,30 +22,60 @@ const predefinedNegativeFeedbackOptions = process.env
|
||||
|
||||
interface FeedbackModalProps {
|
||||
feedbackType: FeedbackType;
|
||||
messageId: number;
|
||||
onClose: () => void;
|
||||
onSubmit: (feedbackDetails: {
|
||||
message: string;
|
||||
predefinedFeedback?: string;
|
||||
}) => void;
|
||||
setPopup: (popup: { message: string; type: "success" | "error" }) => void;
|
||||
}
|
||||
|
||||
export const FeedbackModal = ({
|
||||
feedbackType,
|
||||
messageId,
|
||||
onClose,
|
||||
onSubmit,
|
||||
setPopup,
|
||||
}: FeedbackModalProps) => {
|
||||
const [message, setMessage] = useState("");
|
||||
const [predefinedFeedback, setPredefinedFeedback] = useState<
|
||||
string | undefined
|
||||
>();
|
||||
const [isSubmitting, setIsSubmitting] = useState(false);
|
||||
|
||||
const handlePredefinedFeedback = (feedback: string) => {
|
||||
setPredefinedFeedback(feedback);
|
||||
};
|
||||
|
||||
const handleSubmit = () => {
|
||||
onSubmit({ message, predefinedFeedback });
|
||||
onClose();
|
||||
const handleSubmit = async () => {
|
||||
setIsSubmitting(true);
|
||||
|
||||
try {
|
||||
const response = await handleChatFeedback(
|
||||
messageId,
|
||||
feedbackType,
|
||||
message,
|
||||
predefinedFeedback
|
||||
);
|
||||
|
||||
if (response.ok) {
|
||||
setPopup({
|
||||
message: "Thanks for your feedback!",
|
||||
type: "success",
|
||||
});
|
||||
} else {
|
||||
const responseJson = await response.json();
|
||||
const errorMsg = responseJson.detail || responseJson.message;
|
||||
setPopup({
|
||||
message: `Failed to submit feedback - ${errorMsg}`,
|
||||
type: "error",
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
setPopup({
|
||||
message: "Failed to submit feedback - network error",
|
||||
type: "error",
|
||||
});
|
||||
} finally {
|
||||
setIsSubmitting(false);
|
||||
onClose();
|
||||
}
|
||||
};
|
||||
|
||||
const predefinedFeedbackOptions =
|
||||
@@ -76,8 +107,19 @@ export const FeedbackModal = ({
|
||||
{predefinedFeedbackOptions.map((feedback, index) => (
|
||||
<button
|
||||
key={index}
|
||||
className={`bg-background-dark hover:bg-accent-background-hovered text-default py-2 px-4 rounded m-1
|
||||
${predefinedFeedback === feedback && "ring-2 ring-accent/20"}`}
|
||||
disabled={isSubmitting}
|
||||
className={`
|
||||
bg-background-dark
|
||||
hover:bg-accent-background-hovered
|
||||
text-default
|
||||
py-2
|
||||
px-4
|
||||
rounded
|
||||
m-1
|
||||
disabled:opacity-50
|
||||
disabled:cursor-not-allowed
|
||||
${predefinedFeedback === feedback && "ring-2 ring-accent/20"}
|
||||
`}
|
||||
onClick={() => handlePredefinedFeedback(feedback)}
|
||||
>
|
||||
{feedback}
|
||||
@@ -87,14 +129,27 @@ export const FeedbackModal = ({
|
||||
|
||||
<textarea
|
||||
autoFocus
|
||||
disabled={isSubmitting}
|
||||
className={`
|
||||
w-full flex-grow
|
||||
border border-border-strong rounded
|
||||
outline-none placeholder-subtle
|
||||
pl-4 pr-4 py-4 bg-background
|
||||
overflow-hidden h-28
|
||||
whitespace-normal resize-none
|
||||
break-all overscroll-contain
|
||||
w-full
|
||||
flex-grow
|
||||
border
|
||||
border-border-strong
|
||||
rounded
|
||||
outline-none
|
||||
placeholder-subtle
|
||||
pl-4
|
||||
pr-4
|
||||
py-4
|
||||
bg-background
|
||||
overflow-hidden
|
||||
h-28
|
||||
whitespace-normal
|
||||
resize-none
|
||||
break-all
|
||||
overscroll-contain
|
||||
disabled:opacity-50
|
||||
disabled:cursor-not-allowed
|
||||
`}
|
||||
role="textarea"
|
||||
aria-multiline
|
||||
@@ -109,10 +164,22 @@ export const FeedbackModal = ({
|
||||
|
||||
<div className="flex mt-2">
|
||||
<button
|
||||
className="bg-agent text-white py-2 px-4 rounded hover:bg-agent/50 focus:outline-none mx-auto"
|
||||
disabled={isSubmitting}
|
||||
className={`
|
||||
bg-agent
|
||||
text-white
|
||||
py-2
|
||||
px-4
|
||||
rounded
|
||||
hover:bg-agent/50
|
||||
focus:outline-none
|
||||
mx-auto
|
||||
disabled:opacity-50
|
||||
disabled:cursor-not-allowed
|
||||
`}
|
||||
onClick={handleSubmit}
|
||||
>
|
||||
Submit feedback
|
||||
{isSubmitting ? "Submitting..." : "Submit feedback"}
|
||||
</button>
|
||||
</div>
|
||||
</>
|
||||
@@ -5,10 +5,10 @@ import { Callout } from "@/components/ui/callout";
|
||||
|
||||
import Text from "@/components/ui/text";
|
||||
|
||||
import { ChatSessionSharedStatus } from "../interfaces";
|
||||
import { ChatSessionSharedStatus } from "@/app/chat/interfaces";
|
||||
import { FiCopy } from "react-icons/fi";
|
||||
import { CopyButton } from "@/components/CopyButton";
|
||||
import { SEARCH_PARAM_NAMES } from "../searchParams";
|
||||
import { SEARCH_PARAM_NAMES } from "@/app/chat/services/searchParams";
|
||||
import { usePopup } from "@/components/admin/connectors/Popup";
|
||||
import { structureValue } from "@/lib/llm/utils";
|
||||
import { LlmDescriptor } from "@/lib/hooks";
|
||||
@@ -25,7 +25,7 @@ import { useTheme } from "next-themes";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { Input } from "@/components/ui/input";
|
||||
import { FiTrash2, FiExternalLink } from "react-icons/fi";
|
||||
import { deleteAllChatSessions } from "../lib";
|
||||
import { deleteAllChatSessions } from "../../services/lib";
|
||||
import { useChatContext } from "@/components/context/ChatContext";
|
||||
import { FederatedConnectorOAuthStatus } from "@/components/chat/FederatedOAuthModal";
|
||||
import { SourceIcon } from "@/components/SourceIcon";
|
||||
@@ -19,7 +19,7 @@ import { getFinalLLM } from "@/lib/llm/utils";
|
||||
import React, { useEffect, useState } from "react";
|
||||
import { updateUserAssistantList } from "@/lib/assistants/updateAssistantPreferences";
|
||||
import { DraggableAssistantCard } from "@/components/assistants/AssistantCards";
|
||||
import { useAssistants } from "@/components/context/AssistantsContext";
|
||||
import { useAssistantsContext } from "@/components/context/AssistantsContext";
|
||||
import { useUser } from "@/components/user/UserProvider";
|
||||
|
||||
export function AssistantsTab({
|
||||
@@ -33,7 +33,7 @@ export function AssistantsTab({
|
||||
}) {
|
||||
const { refreshUser } = useUser();
|
||||
const [_, llmName] = getFinalLLM(llmProviders, null, null);
|
||||
const { finalAssistants, refreshAssistants } = useAssistants();
|
||||
const { finalAssistants, refreshAssistants } = useAssistantsContext();
|
||||
const [assistants, setAssistants] = useState(finalAssistants);
|
||||
|
||||
useEffect(() => {
|
||||
@@ -1,182 +0,0 @@
|
||||
import { MinimalOnyxDocument, OnyxDocument } from "@/lib/search/interfaces";
|
||||
import { ChatDocumentDisplay } from "./ChatDocumentDisplay";
|
||||
import { removeDuplicateDocs } from "@/lib/documentUtils";
|
||||
import { ChatFileType, Message } from "../interfaces";
|
||||
import {
|
||||
Dispatch,
|
||||
ForwardedRef,
|
||||
forwardRef,
|
||||
SetStateAction,
|
||||
useEffect,
|
||||
useState,
|
||||
} from "react";
|
||||
import { XIcon } from "@/components/icons/icons";
|
||||
import { FileSourceCardInResults } from "../message/SourcesDisplay";
|
||||
import { useDocumentsContext } from "../my-documents/DocumentsContext";
|
||||
interface DocumentResultsProps {
|
||||
agenticMessage: boolean;
|
||||
humanMessage: Message | null;
|
||||
closeSidebar: () => void;
|
||||
selectedMessage: Message | null;
|
||||
selectedDocuments: OnyxDocument[] | null;
|
||||
toggleDocumentSelection: (document: OnyxDocument) => void;
|
||||
clearSelectedDocuments: () => void;
|
||||
selectedDocumentTokens: number;
|
||||
maxTokens: number;
|
||||
initialWidth: number;
|
||||
isOpen: boolean;
|
||||
isSharedChat?: boolean;
|
||||
modal: boolean;
|
||||
setPresentingDocument: Dispatch<SetStateAction<MinimalOnyxDocument | null>>;
|
||||
removeHeader?: boolean;
|
||||
}
|
||||
|
||||
export const DocumentResults = forwardRef<HTMLDivElement, DocumentResultsProps>(
|
||||
(
|
||||
{
|
||||
agenticMessage,
|
||||
humanMessage,
|
||||
closeSidebar,
|
||||
modal,
|
||||
selectedMessage,
|
||||
selectedDocuments,
|
||||
toggleDocumentSelection,
|
||||
clearSelectedDocuments,
|
||||
selectedDocumentTokens,
|
||||
maxTokens,
|
||||
initialWidth,
|
||||
isSharedChat,
|
||||
isOpen,
|
||||
setPresentingDocument,
|
||||
removeHeader,
|
||||
},
|
||||
ref: ForwardedRef<HTMLDivElement>
|
||||
) => {
|
||||
const [delayedSelectedDocumentCount, setDelayedSelectedDocumentCount] =
|
||||
useState(0);
|
||||
|
||||
useEffect(() => {
|
||||
const timer = setTimeout(
|
||||
() => {
|
||||
setDelayedSelectedDocumentCount(selectedDocuments?.length || 0);
|
||||
},
|
||||
selectedDocuments?.length == 0 ? 1000 : 0
|
||||
);
|
||||
|
||||
return () => clearTimeout(timer);
|
||||
}, [selectedDocuments]);
|
||||
const { files: allUserFiles } = useDocumentsContext();
|
||||
|
||||
const humanFileDescriptors = humanMessage?.files.filter(
|
||||
(file) => file.type == ChatFileType.USER_KNOWLEDGE
|
||||
);
|
||||
const userFiles = allUserFiles?.filter((file) =>
|
||||
humanFileDescriptors?.some((descriptor) => descriptor.id === file.file_id)
|
||||
);
|
||||
const selectedDocumentIds =
|
||||
selectedDocuments?.map((document) => document.document_id) || [];
|
||||
|
||||
const currentDocuments = selectedMessage?.documents || null;
|
||||
const dedupedDocuments = removeDuplicateDocs(currentDocuments || []);
|
||||
|
||||
const tokenLimitReached = selectedDocumentTokens > maxTokens - 75;
|
||||
|
||||
const hasSelectedDocuments = selectedDocumentIds.length > 0;
|
||||
return (
|
||||
<>
|
||||
<div
|
||||
id="onyx-chat-sidebar"
|
||||
className={`relative -mb-8 bg-background max-w-full ${
|
||||
!modal
|
||||
? "border-l border-t h-[105vh] border-sidebar-border dark:border-neutral-700"
|
||||
: ""
|
||||
}`}
|
||||
onClick={(e) => {
|
||||
if (e.target === e.currentTarget) {
|
||||
closeSidebar();
|
||||
}
|
||||
}}
|
||||
>
|
||||
<div
|
||||
className={`ml-auto h-full relative sidebar transition-transform ease-in-out duration-300
|
||||
${isOpen ? " translate-x-0" : " translate-x-[10%]"}`}
|
||||
style={{
|
||||
width: modal ? undefined : initialWidth,
|
||||
}}
|
||||
>
|
||||
<div className="flex flex-col h-full">
|
||||
{!removeHeader && (
|
||||
<>
|
||||
<div className="p-4 flex items-center justify-between gap-x-2">
|
||||
<div className="flex items-center gap-x-2">
|
||||
<h2 className="text-xl font-bold text-text-900">
|
||||
Sources
|
||||
</h2>
|
||||
</div>
|
||||
<button className="my-auto" onClick={closeSidebar}>
|
||||
<XIcon size={16} />
|
||||
</button>
|
||||
</div>
|
||||
<div className="border-b border-divider-history-sidebar-bar mx-3" />
|
||||
</>
|
||||
)}
|
||||
|
||||
<div className="overflow-y-auto h-fit mb-8 pb-8 sm:mx-0 flex-grow gap-y-0 default-scrollbar dark-scrollbar flex flex-col">
|
||||
{userFiles && userFiles.length > 0 ? (
|
||||
<div className=" gap-y-2 flex flex-col pt-2 mx-3">
|
||||
{userFiles?.map((file, index) => (
|
||||
<FileSourceCardInResults
|
||||
key={index}
|
||||
relevantDocument={dedupedDocuments.find(
|
||||
(doc) =>
|
||||
doc.document_id ===
|
||||
`FILE_CONNECTOR__${file.file_id}`
|
||||
)}
|
||||
document={file}
|
||||
setPresentingDocument={() =>
|
||||
setPresentingDocument({
|
||||
document_id: file.document_id,
|
||||
semantic_identifier: file.file_id || null,
|
||||
})
|
||||
}
|
||||
/>
|
||||
))}
|
||||
</div>
|
||||
) : dedupedDocuments.length > 0 ? (
|
||||
dedupedDocuments.map((document, ind) => (
|
||||
<div
|
||||
key={document.document_id}
|
||||
className={`desktop:px-2 w-full`}
|
||||
>
|
||||
<ChatDocumentDisplay
|
||||
agenticMessage={agenticMessage}
|
||||
setPresentingDocument={setPresentingDocument}
|
||||
closeSidebar={closeSidebar}
|
||||
modal={modal}
|
||||
document={document}
|
||||
isSelected={selectedDocumentIds.includes(
|
||||
document.document_id
|
||||
)}
|
||||
handleSelect={(documentId) => {
|
||||
toggleDocumentSelection(
|
||||
dedupedDocuments.find(
|
||||
(doc) => doc.document_id === documentId
|
||||
)!
|
||||
);
|
||||
}}
|
||||
hideSelection={isSharedChat}
|
||||
tokenLimitReached={tokenLimitReached}
|
||||
/>
|
||||
</div>
|
||||
))
|
||||
) : null}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
DocumentResults.displayName = "DocumentResults";
|
||||
82
web/src/app/chat/hooks/useAssistantController.ts
Normal file
82
web/src/app/chat/hooks/useAssistantController.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { MinimalPersonaSnapshot } from "@/app/admin/assistants/interfaces";
|
||||
import { useMemo, useState } from "react";
|
||||
import { ChatSession } from "../interfaces";
|
||||
import { useAssistantsContext } from "@/components/context/AssistantsContext";
|
||||
import { useSearchParams } from "next/navigation";
|
||||
import { SEARCH_PARAM_NAMES } from "../services/searchParams";
|
||||
|
||||
export function useAssistantController({
|
||||
selectedChatSession,
|
||||
}: {
|
||||
selectedChatSession: ChatSession | null | undefined;
|
||||
}) {
|
||||
const searchParams = useSearchParams();
|
||||
const { assistants: availableAssistants, pinnedAssistants } =
|
||||
useAssistantsContext();
|
||||
|
||||
const defaultAssistantIdRaw = searchParams?.get(
|
||||
SEARCH_PARAM_NAMES.PERSONA_ID
|
||||
);
|
||||
const defaultAssistantId = defaultAssistantIdRaw
|
||||
? parseInt(defaultAssistantIdRaw)
|
||||
: undefined;
|
||||
|
||||
const existingChatSessionAssistantId = selectedChatSession?.persona_id;
|
||||
const [selectedAssistant, setSelectedAssistant] = useState<
|
||||
MinimalPersonaSnapshot | undefined
|
||||
>(
|
||||
// NOTE: look through available assistants here, so that even if the user
|
||||
// has hidden this assistant it still shows the correct assistant when
|
||||
// going back to an old chat session
|
||||
existingChatSessionAssistantId !== undefined
|
||||
? availableAssistants.find(
|
||||
(assistant) => assistant.id === existingChatSessionAssistantId
|
||||
)
|
||||
: defaultAssistantId !== undefined
|
||||
? availableAssistants.find(
|
||||
(assistant) => assistant.id === defaultAssistantId
|
||||
)
|
||||
: undefined
|
||||
);
|
||||
|
||||
// Current assistant is decided based on this ordering
|
||||
// 1. Alternative assistant (assistant selected explicitly by user)
|
||||
// 2. Selected assistant (assistnat default in this chat session)
|
||||
// 3. First pinned assistants (ordered list of pinned assistants)
|
||||
// 4. Available assistants (ordered list of available assistants)
|
||||
// Relevant test: `live_assistant.spec.ts`
|
||||
const liveAssistant: MinimalPersonaSnapshot | undefined = useMemo(
|
||||
() => selectedAssistant || pinnedAssistants[0] || availableAssistants[0],
|
||||
[selectedAssistant, pinnedAssistants, availableAssistants]
|
||||
);
|
||||
|
||||
const setSelectedAssistantFromId = (
|
||||
assistantId: number | null | undefined
|
||||
) => {
|
||||
// NOTE: also intentionally look through available assistants here, so that
|
||||
// even if the user has hidden an assistant they can still go back to it
|
||||
// for old chats
|
||||
let newAssistant =
|
||||
assistantId !== null
|
||||
? availableAssistants.find((assistant) => assistant.id === assistantId)
|
||||
: undefined;
|
||||
|
||||
// if no assistant was passed in / found, use the default assistant
|
||||
if (!newAssistant && defaultAssistantId) {
|
||||
newAssistant = availableAssistants.find(
|
||||
(assistant) => assistant.id === defaultAssistantId
|
||||
);
|
||||
}
|
||||
|
||||
setSelectedAssistant(newAssistant);
|
||||
};
|
||||
|
||||
return {
|
||||
// main assistant selection
|
||||
selectedAssistant,
|
||||
setSelectedAssistantFromId,
|
||||
|
||||
// final computed assistant
|
||||
liveAssistant,
|
||||
};
|
||||
}
|
||||
1027
web/src/app/chat/hooks/useChatController.ts
Normal file
1027
web/src/app/chat/hooks/useChatController.ts
Normal file
File diff suppressed because it is too large
Load Diff
327
web/src/app/chat/hooks/useChatSessionController.ts
Normal file
327
web/src/app/chat/hooks/useChatSessionController.ts
Normal file
@@ -0,0 +1,327 @@
|
||||
"use client";
|
||||
|
||||
import { useEffect, useRef } from "react";
|
||||
import { ReadonlyURLSearchParams, useRouter } from "next/navigation";
|
||||
import {
|
||||
nameChatSession,
|
||||
processRawChatHistory,
|
||||
patchMessageToBeLatest,
|
||||
} from "../services/lib";
|
||||
import {
|
||||
getLatestMessageChain,
|
||||
setMessageAsLatest,
|
||||
} from "../services/messageTree";
|
||||
import { BackendChatSession, ChatSessionSharedStatus } from "../interfaces";
|
||||
import {
|
||||
SEARCH_PARAM_NAMES,
|
||||
shouldSubmitOnLoad,
|
||||
} from "../services/searchParams";
|
||||
import { FilterManager } from "@/lib/hooks";
|
||||
import { OnyxDocument } from "@/lib/search/interfaces";
|
||||
import { FileDescriptor } from "../interfaces";
|
||||
import { FileResponse, FolderResponse } from "../my-documents/DocumentsContext";
|
||||
import {
|
||||
useChatSessionStore,
|
||||
useCurrentMessageHistory,
|
||||
} from "../stores/useChatSessionStore";
|
||||
|
||||
interface UseChatSessionControllerProps {
|
||||
existingChatSessionId: string | null;
|
||||
searchParams: ReadonlyURLSearchParams;
|
||||
filterManager: FilterManager;
|
||||
firstMessage?: string;
|
||||
|
||||
// UI state setters
|
||||
setSelectedAssistantFromId: (assistantId: number | null) => void;
|
||||
setSelectedDocuments: (documents: OnyxDocument[]) => void;
|
||||
setCurrentMessageFiles: (
|
||||
files: FileDescriptor[] | ((prev: FileDescriptor[]) => FileDescriptor[])
|
||||
) => void;
|
||||
|
||||
// Refs
|
||||
chatSessionIdRef: React.MutableRefObject<string | null>;
|
||||
loadedIdSessionRef: React.MutableRefObject<string | null>;
|
||||
textAreaRef: React.RefObject<HTMLTextAreaElement>;
|
||||
scrollInitialized: React.MutableRefObject<boolean>;
|
||||
isInitialLoad: React.MutableRefObject<boolean>;
|
||||
submitOnLoadPerformed: React.MutableRefObject<boolean>;
|
||||
|
||||
// State
|
||||
hasPerformedInitialScroll: boolean;
|
||||
|
||||
// Actions
|
||||
clientScrollToBottom: (fast?: boolean) => void;
|
||||
clearSelectedItems: () => void;
|
||||
refreshChatSessions: () => void;
|
||||
onSubmit: (params: {
|
||||
message: string;
|
||||
selectedFiles: FileResponse[];
|
||||
selectedFolders: FolderResponse[];
|
||||
currentMessageFiles: FileDescriptor[];
|
||||
useAgentSearch: boolean;
|
||||
isSeededChat?: boolean;
|
||||
}) => Promise<void>;
|
||||
}
|
||||
|
||||
export function useChatSessionController({
|
||||
existingChatSessionId,
|
||||
searchParams,
|
||||
filterManager,
|
||||
firstMessage,
|
||||
setSelectedAssistantFromId,
|
||||
setSelectedDocuments,
|
||||
setCurrentMessageFiles,
|
||||
chatSessionIdRef,
|
||||
loadedIdSessionRef,
|
||||
textAreaRef,
|
||||
scrollInitialized,
|
||||
isInitialLoad,
|
||||
submitOnLoadPerformed,
|
||||
hasPerformedInitialScroll,
|
||||
clientScrollToBottom,
|
||||
clearSelectedItems,
|
||||
refreshChatSessions,
|
||||
onSubmit,
|
||||
}: UseChatSessionControllerProps) {
|
||||
// Store actions
|
||||
const updateSessionAndMessageTree = useChatSessionStore(
|
||||
(state) => state.updateSessionAndMessageTree
|
||||
);
|
||||
const updateSessionMessageTree = useChatSessionStore(
|
||||
(state) => state.updateSessionMessageTree
|
||||
);
|
||||
const setIsFetchingChatMessages = useChatSessionStore(
|
||||
(state) => state.setIsFetchingChatMessages
|
||||
);
|
||||
const setCurrentSession = useChatSessionStore(
|
||||
(state) => state.setCurrentSession
|
||||
);
|
||||
const updateHasPerformedInitialScroll = useChatSessionStore(
|
||||
(state) => state.updateHasPerformedInitialScroll
|
||||
);
|
||||
const updateCurrentChatSessionSharedStatus = useChatSessionStore(
|
||||
(state) => state.updateCurrentChatSessionSharedStatus
|
||||
);
|
||||
const updateCurrentSelectedMessageForDocDisplay = useChatSessionStore(
|
||||
(state) => state.updateCurrentSelectedMessageForDocDisplay
|
||||
);
|
||||
const currentChatState = useChatSessionStore(
|
||||
(state) =>
|
||||
state.sessions.get(state.currentSessionId || "")?.chatState || "input"
|
||||
);
|
||||
const currentChatHistory = useCurrentMessageHistory();
|
||||
|
||||
// Fetch chat messages for the chat session
|
||||
useEffect(() => {
|
||||
const priorChatSessionId = chatSessionIdRef.current;
|
||||
const loadedSessionId = loadedIdSessionRef.current;
|
||||
chatSessionIdRef.current = existingChatSessionId;
|
||||
loadedIdSessionRef.current = existingChatSessionId;
|
||||
|
||||
textAreaRef.current?.focus();
|
||||
|
||||
// Only clear things if we're going from one chat session to another
|
||||
const isChatSessionSwitch = existingChatSessionId !== priorChatSessionId;
|
||||
if (isChatSessionSwitch) {
|
||||
// De-select documents
|
||||
// Reset all filters
|
||||
filterManager.setSelectedDocumentSets([]);
|
||||
filterManager.setSelectedSources([]);
|
||||
filterManager.setSelectedTags([]);
|
||||
filterManager.setTimeRange(null);
|
||||
|
||||
// Remove uploaded files
|
||||
setCurrentMessageFiles([]);
|
||||
|
||||
// If switching from one chat to another, then need to scroll again
|
||||
// If we're creating a brand new chat, then don't need to scroll
|
||||
if (priorChatSessionId !== null) {
|
||||
setSelectedDocuments([]);
|
||||
clearSelectedItems();
|
||||
if (existingChatSessionId) {
|
||||
updateHasPerformedInitialScroll(existingChatSessionId, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function initialSessionFetch() {
|
||||
if (existingChatSessionId === null) {
|
||||
// Clear the current session in the store to show intro messages
|
||||
setCurrentSession(null);
|
||||
|
||||
// Reset the selected assistant back to default
|
||||
setSelectedAssistantFromId(null);
|
||||
updateCurrentChatSessionSharedStatus(ChatSessionSharedStatus.Private);
|
||||
|
||||
// If we're supposed to submit on initial load, then do that here
|
||||
if (
|
||||
shouldSubmitOnLoad(searchParams) &&
|
||||
!submitOnLoadPerformed.current
|
||||
) {
|
||||
submitOnLoadPerformed.current = true;
|
||||
await onSubmit({
|
||||
message: firstMessage || "",
|
||||
selectedFiles: [],
|
||||
selectedFolders: [],
|
||||
currentMessageFiles: [],
|
||||
useAgentSearch: false,
|
||||
});
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Set the current session first, then set fetching state to prevent intro flash
|
||||
setCurrentSession(existingChatSessionId);
|
||||
setIsFetchingChatMessages(existingChatSessionId, true);
|
||||
|
||||
const response = await fetch(
|
||||
`/api/chat/get-chat-session/${existingChatSessionId}`
|
||||
);
|
||||
|
||||
const session = await response.json();
|
||||
const chatSession = session as BackendChatSession;
|
||||
setSelectedAssistantFromId(chatSession.persona_id);
|
||||
|
||||
// Ensure the current session is set to the actual session ID from the response
|
||||
setCurrentSession(chatSession.chat_session_id);
|
||||
|
||||
const newMessageMap = processRawChatHistory(
|
||||
chatSession.messages,
|
||||
chatSession.packets
|
||||
);
|
||||
const newMessageHistory = getLatestMessageChain(newMessageMap);
|
||||
|
||||
// Update message history except for edge where where
|
||||
// last message is an error and we're on a new chat.
|
||||
// This corresponds to a "renaming" of chat, which occurs after first message
|
||||
// stream
|
||||
if (
|
||||
(newMessageHistory[newMessageHistory.length - 1]?.type !== "error" ||
|
||||
loadedSessionId != null) &&
|
||||
!(
|
||||
currentChatState == "toolBuilding" ||
|
||||
currentChatState == "streaming" ||
|
||||
currentChatState == "loading"
|
||||
)
|
||||
) {
|
||||
const latestMessageId =
|
||||
newMessageHistory[newMessageHistory.length - 1]?.messageId;
|
||||
|
||||
updateCurrentSelectedMessageForDocDisplay(
|
||||
latestMessageId !== undefined && latestMessageId !== null
|
||||
? latestMessageId
|
||||
: null
|
||||
);
|
||||
|
||||
updateSessionAndMessageTree(chatSession.chat_session_id, newMessageMap);
|
||||
chatSessionIdRef.current = chatSession.chat_session_id;
|
||||
}
|
||||
|
||||
// Go to bottom. If initial load, then do a scroll,
|
||||
// otherwise just appear at the bottom
|
||||
scrollInitialized.current = false;
|
||||
|
||||
if (!hasPerformedInitialScroll) {
|
||||
if (isInitialLoad.current) {
|
||||
if (chatSession.chat_session_id) {
|
||||
updateHasPerformedInitialScroll(chatSession.chat_session_id, true);
|
||||
}
|
||||
isInitialLoad.current = false;
|
||||
}
|
||||
clientScrollToBottom();
|
||||
|
||||
setTimeout(() => {
|
||||
if (chatSession.chat_session_id) {
|
||||
updateHasPerformedInitialScroll(chatSession.chat_session_id, true);
|
||||
}
|
||||
}, 100);
|
||||
} else if (isChatSessionSwitch) {
|
||||
if (chatSession.chat_session_id) {
|
||||
updateHasPerformedInitialScroll(chatSession.chat_session_id, true);
|
||||
}
|
||||
clientScrollToBottom(true);
|
||||
}
|
||||
|
||||
setIsFetchingChatMessages(chatSession.chat_session_id, false);
|
||||
|
||||
// If this is a seeded chat, then kick off the AI message generation
|
||||
if (
|
||||
newMessageHistory.length === 1 &&
|
||||
!submitOnLoadPerformed.current &&
|
||||
searchParams?.get(SEARCH_PARAM_NAMES.SEEDED) === "true"
|
||||
) {
|
||||
submitOnLoadPerformed.current = true;
|
||||
|
||||
const seededMessage = newMessageHistory[0]?.message;
|
||||
if (!seededMessage) {
|
||||
return;
|
||||
}
|
||||
|
||||
await onSubmit({
|
||||
message: seededMessage,
|
||||
isSeededChat: true,
|
||||
selectedFiles: [],
|
||||
selectedFolders: [],
|
||||
currentMessageFiles: [],
|
||||
useAgentSearch: false,
|
||||
});
|
||||
// Force re-name if the chat session doesn't have one
|
||||
if (!chatSession.description) {
|
||||
await nameChatSession(existingChatSessionId);
|
||||
refreshChatSessions();
|
||||
}
|
||||
} else if (newMessageHistory.length === 2 && !chatSession.description) {
|
||||
await nameChatSession(existingChatSessionId);
|
||||
refreshChatSessions();
|
||||
}
|
||||
}
|
||||
|
||||
// SKIP_RELOAD is used after completing the first message in a new session.
|
||||
// We don't need to re-fetch at that point, we have everything we need.
|
||||
// For safety, we should always re-fetch if there are no messages in the chat history.
|
||||
if (
|
||||
!searchParams?.get(SEARCH_PARAM_NAMES.SKIP_RELOAD) ||
|
||||
currentChatHistory.length === 0
|
||||
) {
|
||||
initialSessionFetch();
|
||||
} else {
|
||||
// Remove SKIP_RELOAD param without triggering a page reload
|
||||
const currentSearchParams = new URLSearchParams(searchParams?.toString());
|
||||
if (currentSearchParams.has(SEARCH_PARAM_NAMES.SKIP_RELOAD)) {
|
||||
currentSearchParams.delete(SEARCH_PARAM_NAMES.SKIP_RELOAD);
|
||||
const newUrl = `${window.location.pathname}${currentSearchParams.toString() ? "?" + currentSearchParams.toString() : ""}`;
|
||||
window.history.replaceState({}, "", newUrl);
|
||||
}
|
||||
}
|
||||
}, [
|
||||
existingChatSessionId,
|
||||
searchParams?.get(SEARCH_PARAM_NAMES.PERSONA_ID),
|
||||
// Note: We're intentionally not including all dependencies to avoid infinite loops
|
||||
// This effect should only run when existingChatSessionId or persona ID changes
|
||||
]);
|
||||
|
||||
const onMessageSelection = (messageId: number) => {
|
||||
updateCurrentSelectedMessageForDocDisplay(messageId);
|
||||
const currentMessageTree = useChatSessionStore
|
||||
.getState()
|
||||
.sessions.get(
|
||||
useChatSessionStore.getState().currentSessionId || ""
|
||||
)?.messageTree;
|
||||
|
||||
if (currentMessageTree) {
|
||||
const newMessageTree = setMessageAsLatest(currentMessageTree, messageId);
|
||||
const currentSessionId = useChatSessionStore.getState().currentSessionId;
|
||||
if (currentSessionId) {
|
||||
updateSessionMessageTree(currentSessionId, newMessageTree);
|
||||
}
|
||||
}
|
||||
|
||||
// Makes actual API call to set message as latest in the DB so we can
|
||||
// edit this message and so it sticks around on page reload
|
||||
patchMessageToBeLatest(messageId);
|
||||
};
|
||||
|
||||
return {
|
||||
onMessageSelection,
|
||||
};
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
import { OnyxDocument } from "@/lib/search/interfaces";
|
||||
import { useState } from "react";
|
||||
import { FileResponse } from "./my-documents/DocumentsContext";
|
||||
import { FileResponse } from "../my-documents/DocumentsContext";
|
||||
|
||||
interface DocumentInfo {
|
||||
num_chunks: number;
|
||||
@@ -20,7 +20,7 @@ import {
|
||||
DropdownMenuItem,
|
||||
DropdownMenuTrigger,
|
||||
} from "@/components/ui/dropdown-menu";
|
||||
import { SourceChip } from "../input/ChatInputBar";
|
||||
import { SourceChip } from "../components/input/ChatInputBar";
|
||||
|
||||
export default function InputPrompts() {
|
||||
const [inputPrompts, setInputPrompts] = useState<InputPrompt[]>([]);
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { SourceChip } from "../input/ChatInputBar";
|
||||
import { SourceChip } from "../components/input/ChatInputBar";
|
||||
|
||||
import { useEffect } from "react";
|
||||
import { useState } from "react";
|
||||
|
||||
@@ -3,12 +3,20 @@ import {
|
||||
Filters,
|
||||
SearchOnyxDocument,
|
||||
StreamStopReason,
|
||||
SubQuestionPiece,
|
||||
SubQueryPiece,
|
||||
AgentAnswerPiece,
|
||||
SubQuestionSearchDoc,
|
||||
StreamStopInfo,
|
||||
} from "@/lib/search/interfaces";
|
||||
import { Packet } from "./services/streamingModels";
|
||||
|
||||
export type FeedbackType = "like" | "dislike";
|
||||
export type ChatState =
|
||||
| "input"
|
||||
| "loading"
|
||||
| "streaming"
|
||||
| "toolBuilding"
|
||||
| "uploading";
|
||||
export interface RegenerationState {
|
||||
regenerating: boolean;
|
||||
finalMessageIndex: number;
|
||||
}
|
||||
|
||||
export enum RetrievalType {
|
||||
None = "none",
|
||||
@@ -31,7 +39,7 @@ export interface RetrievalDetails {
|
||||
enable_auto_detect_filters?: boolean | null;
|
||||
}
|
||||
|
||||
type CitationMap = { [key: string]: number };
|
||||
export type CitationMap = { [key: string]: number };
|
||||
|
||||
export enum ChatFileType {
|
||||
IMAGE = "image",
|
||||
@@ -96,8 +104,6 @@ export interface Message {
|
||||
type: "user" | "assistant" | "system" | "error";
|
||||
retrievalType?: RetrievalType;
|
||||
query?: string | null;
|
||||
documents?: OnyxDocument[] | null;
|
||||
citations?: CitationMap;
|
||||
files: FileDescriptor[];
|
||||
toolCall: ToolCallMetadata | null;
|
||||
// for rebuilding the message tree
|
||||
@@ -108,16 +114,13 @@ export interface Message {
|
||||
stackTrace?: string | null;
|
||||
overridden_model?: string;
|
||||
stopReason?: StreamStopReason | null;
|
||||
sub_questions?: SubQuestionDetail[] | null;
|
||||
is_agentic?: boolean | null;
|
||||
|
||||
// Streaming only
|
||||
second_level_generating?: boolean;
|
||||
agentic_docs?: OnyxDocument[] | null;
|
||||
second_level_message?: string;
|
||||
second_level_subquestions?: SubQuestionDetail[] | null;
|
||||
isImprovement?: boolean | null;
|
||||
isStreamingQuestions?: boolean;
|
||||
// new gen
|
||||
packets: Packet[];
|
||||
|
||||
// cached values for easy access
|
||||
documents?: OnyxDocument[] | null;
|
||||
citations?: CitationMap;
|
||||
}
|
||||
|
||||
export interface BackendChatSession {
|
||||
@@ -133,6 +136,8 @@ export interface BackendChatSession {
|
||||
shared_status: ChatSessionSharedStatus;
|
||||
current_temperature_override: number | null;
|
||||
current_alternate_model?: string;
|
||||
|
||||
packets: Packet[][];
|
||||
}
|
||||
|
||||
export interface BackendMessage {
|
||||
@@ -250,139 +255,3 @@ export interface SubQueryDetail {
|
||||
query_id: number;
|
||||
doc_ids?: number[] | null;
|
||||
}
|
||||
|
||||
export const constructSubQuestions = (
|
||||
subQuestions: SubQuestionDetail[],
|
||||
newDetail:
|
||||
| SubQuestionPiece
|
||||
| SubQueryPiece
|
||||
| AgentAnswerPiece
|
||||
| SubQuestionSearchDoc
|
||||
| DocumentsResponse
|
||||
| StreamStopInfo
|
||||
): SubQuestionDetail[] => {
|
||||
if (!newDetail) {
|
||||
return subQuestions;
|
||||
}
|
||||
if (newDetail.level_question_num == 0) {
|
||||
return subQuestions;
|
||||
}
|
||||
|
||||
const updatedSubQuestions = [...subQuestions];
|
||||
|
||||
if ("stop_reason" in newDetail) {
|
||||
const { level, level_question_num } = newDetail;
|
||||
let subQuestion = updatedSubQuestions.find(
|
||||
(sq) => sq.level === level && sq.level_question_num === level_question_num
|
||||
);
|
||||
if (subQuestion) {
|
||||
if (newDetail.stream_type == "sub_answer") {
|
||||
subQuestion.answer_streaming = false;
|
||||
} else {
|
||||
subQuestion.is_complete = true;
|
||||
subQuestion.is_stopped = true;
|
||||
}
|
||||
}
|
||||
} else if ("top_documents" in newDetail) {
|
||||
const { level, level_question_num, top_documents } = newDetail;
|
||||
let subQuestion = updatedSubQuestions.find(
|
||||
(sq) => sq.level === level && sq.level_question_num === level_question_num
|
||||
);
|
||||
if (!subQuestion) {
|
||||
subQuestion = {
|
||||
level: level ?? 0,
|
||||
level_question_num: level_question_num ?? 0,
|
||||
question: "",
|
||||
answer: "",
|
||||
sub_queries: [],
|
||||
context_docs: { top_documents },
|
||||
is_complete: false,
|
||||
};
|
||||
} else {
|
||||
subQuestion.context_docs = { top_documents };
|
||||
}
|
||||
} else if ("answer_piece" in newDetail) {
|
||||
// Handle AgentAnswerPiece
|
||||
const { level, level_question_num, answer_piece } = newDetail;
|
||||
// Find or create the relevant SubQuestionDetail
|
||||
let subQuestion = updatedSubQuestions.find(
|
||||
(sq) => sq.level === level && sq.level_question_num === level_question_num
|
||||
);
|
||||
|
||||
if (!subQuestion) {
|
||||
subQuestion = {
|
||||
level,
|
||||
level_question_num,
|
||||
question: "",
|
||||
answer: "",
|
||||
sub_queries: [],
|
||||
context_docs: undefined,
|
||||
is_complete: false,
|
||||
};
|
||||
updatedSubQuestions.push(subQuestion);
|
||||
}
|
||||
|
||||
// Append to the answer
|
||||
subQuestion.answer += answer_piece;
|
||||
} else if ("sub_question" in newDetail) {
|
||||
// Handle SubQuestionPiece
|
||||
const { level, level_question_num, sub_question } = newDetail;
|
||||
|
||||
// Find or create the relevant SubQuestionDetail
|
||||
let subQuestion = updatedSubQuestions.find(
|
||||
(sq) => sq.level === level && sq.level_question_num === level_question_num
|
||||
);
|
||||
|
||||
if (!subQuestion) {
|
||||
subQuestion = {
|
||||
level,
|
||||
level_question_num,
|
||||
question: "",
|
||||
answer: "",
|
||||
sub_queries: [],
|
||||
context_docs: undefined,
|
||||
is_complete: false,
|
||||
};
|
||||
updatedSubQuestions.push(subQuestion);
|
||||
}
|
||||
|
||||
// Append to the question
|
||||
subQuestion.question += sub_question;
|
||||
} else if ("sub_query" in newDetail) {
|
||||
// Handle SubQueryPiece
|
||||
const { level, level_question_num, query_id, sub_query } = newDetail;
|
||||
|
||||
// Find the relevant SubQuestionDetail
|
||||
let subQuestion = updatedSubQuestions.find(
|
||||
(sq) => sq.level === level && sq.level_question_num === level_question_num
|
||||
);
|
||||
|
||||
if (!subQuestion) {
|
||||
// If we receive a sub_query before its parent question, create a placeholder
|
||||
subQuestion = {
|
||||
level,
|
||||
level_question_num: level_question_num,
|
||||
question: "",
|
||||
answer: "",
|
||||
sub_queries: [],
|
||||
context_docs: undefined,
|
||||
};
|
||||
updatedSubQuestions.push(subQuestion);
|
||||
}
|
||||
|
||||
// Find or create the relevant SubQueryDetail
|
||||
let subQuery = subQuestion.sub_queries?.find(
|
||||
(sq) => sq.query_id === query_id
|
||||
);
|
||||
|
||||
if (!subQuery) {
|
||||
subQuery = { query: "", query_id };
|
||||
subQuestion.sub_queries = [...(subQuestion.sub_queries || []), subQuery];
|
||||
}
|
||||
|
||||
// Append to the query
|
||||
subQuery.query += sub_query;
|
||||
}
|
||||
|
||||
return updatedSubQuestions;
|
||||
};
|
||||
|
||||
@@ -1,713 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { FiChevronRight, FiChevronLeft } from "react-icons/fi";
|
||||
import { FeedbackType } from "../types";
|
||||
import React, {
|
||||
useCallback,
|
||||
useContext,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from "react";
|
||||
import ReactMarkdown from "react-markdown";
|
||||
import { OnyxDocument } from "@/lib/search/interfaces";
|
||||
import remarkGfm from "remark-gfm";
|
||||
import { CopyButton } from "@/components/CopyButton";
|
||||
import {
|
||||
BaseQuestionIdentifier,
|
||||
FileDescriptor,
|
||||
SubQuestionDetail,
|
||||
ToolCallMetadata,
|
||||
} from "../interfaces";
|
||||
import { SEARCH_TOOL_NAME } from "../tools/constants";
|
||||
import { Hoverable, HoverableIcon } from "@/components/Hoverable";
|
||||
import { CodeBlock } from "./CodeBlock";
|
||||
import rehypePrism from "rehype-prism-plus";
|
||||
|
||||
import "prismjs/themes/prism-tomorrow.css";
|
||||
import "./custom-code-styles.css";
|
||||
import { MinimalPersonaSnapshot } from "@/app/admin/assistants/interfaces";
|
||||
import { AssistantIcon } from "@/components/assistants/AssistantIcon";
|
||||
|
||||
import { LikeFeedback, DislikeFeedback } from "@/components/icons/icons";
|
||||
import {
|
||||
CustomTooltip,
|
||||
TooltipGroup,
|
||||
} from "@/components/tooltip/CustomTooltip";
|
||||
import { useMouseTracking } from "./hooks";
|
||||
import { SettingsContext } from "@/components/settings/SettingsProvider";
|
||||
import RegenerateOption from "../RegenerateOption";
|
||||
import { LlmDescriptor } from "@/lib/hooks";
|
||||
import { ContinueGenerating } from "./ContinueMessage";
|
||||
import { MemoizedAnchor, MemoizedParagraph } from "./MemoizedTextComponents";
|
||||
import { extractCodeText, preprocessLaTeX } from "./codeUtils";
|
||||
import { ThinkingBox } from "./thinkingBox/ThinkingBox";
|
||||
import {
|
||||
hasCompletedThinkingTokens,
|
||||
hasPartialThinkingTokens,
|
||||
extractThinkingContent,
|
||||
isThinkingComplete,
|
||||
removeThinkingTokens,
|
||||
} from "../utils/thinkingTokens";
|
||||
|
||||
import remarkMath from "remark-math";
|
||||
import rehypeKatex from "rehype-katex";
|
||||
import "katex/dist/katex.min.css";
|
||||
import SubQuestionsDisplay from "./SubQuestionsDisplay";
|
||||
import { copyAll, handleCopy } from "./copyingUtils";
|
||||
import { ErrorBanner } from "./Resubmit";
|
||||
import { transformLinkUri } from "@/lib/utils";
|
||||
|
||||
export const AgenticMessage = ({
|
||||
isStreamingQuestions,
|
||||
isGenerating,
|
||||
docSidebarToggled,
|
||||
secondLevelAssistantMessage,
|
||||
secondLevelGenerating,
|
||||
regenerate,
|
||||
overriddenModel,
|
||||
continueGenerating,
|
||||
shared,
|
||||
isActive,
|
||||
toggleDocumentSelection,
|
||||
alternativeAssistant,
|
||||
docs,
|
||||
messageId,
|
||||
content,
|
||||
files,
|
||||
query,
|
||||
citedDocuments,
|
||||
toolCall,
|
||||
isComplete,
|
||||
handleFeedback,
|
||||
currentPersona,
|
||||
otherMessagesCanSwitchTo,
|
||||
onMessageSelection,
|
||||
setPresentingDocument,
|
||||
subQuestions,
|
||||
agenticDocs,
|
||||
secondLevelSubquestions,
|
||||
error,
|
||||
resubmit,
|
||||
}: {
|
||||
resubmit?: () => void;
|
||||
isStreamingQuestions: boolean;
|
||||
isGenerating: boolean;
|
||||
docSidebarToggled?: boolean;
|
||||
secondLevelSubquestions?: SubQuestionDetail[] | null;
|
||||
agenticDocs?: OnyxDocument[] | null;
|
||||
secondLevelGenerating?: boolean;
|
||||
secondLevelAssistantMessage?: string;
|
||||
subQuestions: SubQuestionDetail[] | null;
|
||||
shared?: boolean;
|
||||
isActive?: boolean;
|
||||
continueGenerating?: () => void;
|
||||
otherMessagesCanSwitchTo?: number[];
|
||||
onMessageSelection?: (messageId: number) => void;
|
||||
toggleDocumentSelection?: (second: boolean) => void;
|
||||
docs?: OnyxDocument[] | null;
|
||||
alternativeAssistant?: MinimalPersonaSnapshot | null;
|
||||
currentPersona: MinimalPersonaSnapshot;
|
||||
messageId: number | null;
|
||||
content: string | JSX.Element;
|
||||
files?: FileDescriptor[];
|
||||
query?: string;
|
||||
citedDocuments?: [string, OnyxDocument][] | null;
|
||||
toolCall?: ToolCallMetadata | null;
|
||||
isComplete?: boolean;
|
||||
handleFeedback?: (feedbackType: FeedbackType) => void;
|
||||
overriddenModel?: string;
|
||||
regenerate?: (modelOverRide: LlmDescriptor) => Promise<void>;
|
||||
setPresentingDocument?: (document: OnyxDocument) => void;
|
||||
error?: string | null;
|
||||
}) => {
|
||||
const [lastKnownContentLength, setLastKnownContentLength] = useState(0);
|
||||
|
||||
const [allowStreaming, setAllowStreaming] = useState(isComplete);
|
||||
const [allowDocuments, setAllowDocuments] = useState(isComplete);
|
||||
|
||||
const alternativeContent = secondLevelAssistantMessage || "";
|
||||
|
||||
const processContent = (incoming: string | JSX.Element) => {
|
||||
if (typeof incoming !== "string") return incoming;
|
||||
|
||||
let processed = incoming;
|
||||
|
||||
// Apply thinking tokens processing first
|
||||
if (
|
||||
hasCompletedThinkingTokens(processed) ||
|
||||
hasPartialThinkingTokens(processed)
|
||||
) {
|
||||
processed = removeThinkingTokens(processed) as string;
|
||||
}
|
||||
|
||||
const codeBlockRegex = /```(\w*)\n[\s\S]*?```|```[\s\S]*?$/g;
|
||||
const matches = processed.match(codeBlockRegex);
|
||||
if (matches) {
|
||||
processed = matches.reduce((acc, match) => {
|
||||
if (!match.match(/```\w+/)) {
|
||||
return acc.replace(match, match.replace("```", "```plaintext"));
|
||||
}
|
||||
return acc;
|
||||
}, processed);
|
||||
|
||||
const lastMatch = matches[matches.length - 1];
|
||||
if (lastMatch && !lastMatch.endsWith("```")) {
|
||||
processed = preprocessLaTeX(processed);
|
||||
}
|
||||
}
|
||||
|
||||
processed = processed.replace(/\[([QD])(\d+)\]/g, (match, type, number) => {
|
||||
const citationNumber = parseInt(number, 10);
|
||||
return `[[${type}${citationNumber}]]()`;
|
||||
});
|
||||
|
||||
processed = processed.replace(/\{\{(\d+)\}\}/g, (match, p1) => {
|
||||
const citationNumber = parseInt(p1, 10);
|
||||
return `[[${citationNumber}]]()`;
|
||||
});
|
||||
|
||||
processed = processed.replace(/\]\](?!\()/g, "]]()");
|
||||
|
||||
return preprocessLaTeX(processed);
|
||||
};
|
||||
|
||||
const [streamedContent, setStreamedContent] = useState(
|
||||
processContent(content) as string
|
||||
);
|
||||
const finalContent = processContent(content) as string;
|
||||
const finalAlternativeContent = processContent(alternativeContent) as string;
|
||||
|
||||
// Check if content contains thinking tokens
|
||||
const hasThinkingTokens = useMemo(() => {
|
||||
return (
|
||||
hasCompletedThinkingTokens(content) || hasPartialThinkingTokens(content)
|
||||
);
|
||||
}, [content]);
|
||||
|
||||
// Extract thinking content
|
||||
const thinkingContent = useMemo(() => {
|
||||
if (!hasThinkingTokens) return "";
|
||||
return extractThinkingContent(content);
|
||||
}, [content, hasThinkingTokens]);
|
||||
|
||||
// Track if thinking is complete
|
||||
const isThinkingTokenComplete = useMemo(() => {
|
||||
return isThinkingComplete(thinkingContent);
|
||||
}, [thinkingContent]);
|
||||
|
||||
// Enable streaming when thinking tokens are detected
|
||||
useEffect(() => {
|
||||
if (hasThinkingTokens) {
|
||||
setAllowStreaming(true);
|
||||
}
|
||||
}, [hasThinkingTokens]);
|
||||
|
||||
const isViewingInitialAnswer = true;
|
||||
|
||||
const [isRegenerateDropdownVisible, setIsRegenerateDropdownVisible] =
|
||||
useState(false);
|
||||
|
||||
const { isHovering, trackedElementRef, hoverElementRef } = useMouseTracking();
|
||||
|
||||
const settings = useContext(SettingsContext);
|
||||
|
||||
const citedDocumentIds: string[] = [];
|
||||
|
||||
citedDocuments?.forEach((doc) => {
|
||||
citedDocumentIds.push(doc[1].document_id);
|
||||
});
|
||||
|
||||
if (!isComplete) {
|
||||
const trimIncompleteCodeSection = (
|
||||
content: string | JSX.Element
|
||||
): string | JSX.Element => {
|
||||
if (typeof content === "string") {
|
||||
const pattern = /```[a-zA-Z]+[^\s]*$/;
|
||||
const match = content.match(pattern);
|
||||
if (match && match.index && match.index > 3) {
|
||||
const newContent = content.slice(0, match.index - 3);
|
||||
return newContent;
|
||||
}
|
||||
return content;
|
||||
}
|
||||
return content;
|
||||
};
|
||||
content = trimIncompleteCodeSection(content);
|
||||
}
|
||||
|
||||
const paragraphCallback = useCallback(
|
||||
(props: any, fontSize: "sm" | "base" = "base") => (
|
||||
<MemoizedParagraph fontSize={fontSize}>
|
||||
{props.children}
|
||||
</MemoizedParagraph>
|
||||
),
|
||||
[]
|
||||
);
|
||||
const [currentlyOpenQuestion, setCurrentlyOpenQuestion] =
|
||||
useState<BaseQuestionIdentifier | null>(null);
|
||||
const [finishedGenerating, setFinishedGenerating] = useState(!isGenerating);
|
||||
|
||||
useEffect(() => {
|
||||
if (streamedContent.length == finalContent.length && !isGenerating) {
|
||||
setFinishedGenerating(true);
|
||||
}
|
||||
}, [streamedContent, finalContent, isGenerating]);
|
||||
|
||||
const openQuestion = useCallback(
|
||||
(question: SubQuestionDetail) => {
|
||||
setCurrentlyOpenQuestion({
|
||||
level: question.level,
|
||||
level_question_num: question.level_question_num,
|
||||
});
|
||||
setTimeout(() => {
|
||||
setCurrentlyOpenQuestion(null);
|
||||
}, 1000);
|
||||
},
|
||||
[currentlyOpenQuestion]
|
||||
);
|
||||
|
||||
const anchorCallback = useCallback(
|
||||
(props: any) => (
|
||||
<MemoizedAnchor
|
||||
updatePresentingDocument={setPresentingDocument!}
|
||||
docs={
|
||||
isViewingInitialAnswer
|
||||
? docs && docs.length > 0
|
||||
? docs
|
||||
: agenticDocs
|
||||
: agenticDocs && agenticDocs.length > 0
|
||||
? agenticDocs
|
||||
: docs
|
||||
}
|
||||
subQuestions={[
|
||||
...(subQuestions || []),
|
||||
...(secondLevelSubquestions || []),
|
||||
]}
|
||||
openQuestion={openQuestion}
|
||||
href={props.href}
|
||||
>
|
||||
{props.children}
|
||||
</MemoizedAnchor>
|
||||
),
|
||||
[docs, agenticDocs, isViewingInitialAnswer]
|
||||
);
|
||||
|
||||
const currentMessageInd = messageId
|
||||
? otherMessagesCanSwitchTo?.indexOf(messageId)
|
||||
: undefined;
|
||||
|
||||
const markdownComponents = useMemo(
|
||||
() => ({
|
||||
a: anchorCallback,
|
||||
p: paragraphCallback,
|
||||
code: ({ node, className, children }: any) => {
|
||||
const codeText = extractCodeText(node, streamedContent, children);
|
||||
return (
|
||||
<CodeBlock className={className} codeText={codeText}>
|
||||
{children}
|
||||
</CodeBlock>
|
||||
);
|
||||
},
|
||||
}),
|
||||
[anchorCallback, paragraphCallback, streamedContent]
|
||||
);
|
||||
|
||||
const markdownRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
const renderedAlternativeMarkdown = useMemo(() => {
|
||||
return (
|
||||
<ReactMarkdown
|
||||
className="prose dark:prose-invert max-w-full text-base"
|
||||
components={{
|
||||
...markdownComponents,
|
||||
code: ({ node, className, children }: any) => {
|
||||
const altCode = extractCodeText(
|
||||
node,
|
||||
finalAlternativeContent,
|
||||
children
|
||||
);
|
||||
return (
|
||||
<CodeBlock className={className} codeText={altCode}>
|
||||
{children}
|
||||
</CodeBlock>
|
||||
);
|
||||
},
|
||||
}}
|
||||
remarkPlugins={[remarkGfm, remarkMath]}
|
||||
rehypePlugins={[[rehypePrism, { ignoreMissing: true }], rehypeKatex]}
|
||||
urlTransform={transformLinkUri}
|
||||
>
|
||||
{finalAlternativeContent}
|
||||
</ReactMarkdown>
|
||||
);
|
||||
}, [markdownComponents, finalAlternativeContent]);
|
||||
|
||||
const renderedMarkdown = useMemo(() => {
|
||||
return (
|
||||
<ReactMarkdown
|
||||
className="prose dark:prose-invert max-w-full text-base"
|
||||
components={markdownComponents}
|
||||
remarkPlugins={[remarkGfm, remarkMath]}
|
||||
rehypePlugins={[[rehypePrism, { ignoreMissing: true }], rehypeKatex]}
|
||||
urlTransform={transformLinkUri}
|
||||
>
|
||||
{streamedContent +
|
||||
(!isComplete && !secondLevelGenerating ? " [*]() " : "")}
|
||||
</ReactMarkdown>
|
||||
);
|
||||
}, [streamedContent, markdownComponents, isComplete]);
|
||||
|
||||
const includeMessageSwitcher =
|
||||
currentMessageInd !== undefined &&
|
||||
onMessageSelection &&
|
||||
otherMessagesCanSwitchTo &&
|
||||
otherMessagesCanSwitchTo.length > 1;
|
||||
|
||||
let otherMessage: number | undefined = undefined;
|
||||
if (currentMessageInd && otherMessagesCanSwitchTo) {
|
||||
otherMessage = otherMessagesCanSwitchTo[currentMessageInd - 1];
|
||||
}
|
||||
|
||||
useEffect(() => {
|
||||
if (!allowStreaming) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof finalContent !== "string") return;
|
||||
|
||||
let intervalId: NodeJS.Timeout | null = null;
|
||||
|
||||
intervalId = setInterval(() => {
|
||||
setStreamedContent((prev) => {
|
||||
if (prev.length < finalContent.length) {
|
||||
const nextLength = Math.min(prev.length + 5, finalContent.length);
|
||||
return finalContent.slice(0, nextLength);
|
||||
} else {
|
||||
if (intervalId) clearInterval(intervalId);
|
||||
return finalContent;
|
||||
}
|
||||
});
|
||||
}, 10);
|
||||
|
||||
return () => {
|
||||
if (intervalId) clearInterval(intervalId);
|
||||
setLastKnownContentLength(finalContent.length);
|
||||
};
|
||||
}, [
|
||||
allowStreaming,
|
||||
finalContent,
|
||||
streamedContent,
|
||||
content,
|
||||
lastKnownContentLength,
|
||||
]);
|
||||
|
||||
return (
|
||||
<div
|
||||
id="onyx-ai-message"
|
||||
ref={trackedElementRef}
|
||||
className={`py-5 ml-4 lg:px-5 relative flex flex-col`}
|
||||
>
|
||||
<div
|
||||
className={`mx-auto ${shared ? "w-full" : "w-[90%]"} max-w-message-max`}
|
||||
>
|
||||
<div className={`lg:mr-12 ${!shared && "mobile:ml-0 md:ml-8"}`}>
|
||||
<div className="flex items-start">
|
||||
<AssistantIcon
|
||||
className="mobile:hidden"
|
||||
size={24}
|
||||
assistant={alternativeAssistant || currentPersona}
|
||||
/>
|
||||
|
||||
<div className="w-full">
|
||||
<div className="max-w-message-max break-words">
|
||||
<div className="w-full desktop:ml-4">
|
||||
{subQuestions && subQuestions.length > 0 && (
|
||||
<SubQuestionsDisplay
|
||||
isStreamingQuestions={isStreamingQuestions}
|
||||
allowDocuments={() => setAllowDocuments(true)}
|
||||
docSidebarToggled={docSidebarToggled || false}
|
||||
finishedGenerating={finishedGenerating}
|
||||
overallAnswerGenerating={
|
||||
!!(
|
||||
secondLevelSubquestions &&
|
||||
secondLevelSubquestions.length > 0 &&
|
||||
finalContent.length < 8
|
||||
)
|
||||
}
|
||||
showSecondLevel={!isViewingInitialAnswer}
|
||||
currentlyOpenQuestion={currentlyOpenQuestion}
|
||||
allowStreaming={() => setAllowStreaming(true)}
|
||||
subQuestions={subQuestions}
|
||||
secondLevelQuestions={secondLevelSubquestions || []}
|
||||
documents={
|
||||
!allowDocuments
|
||||
? []
|
||||
: isViewingInitialAnswer
|
||||
? docs!
|
||||
: agenticDocs!
|
||||
}
|
||||
toggleDocumentSelection={() => {
|
||||
toggleDocumentSelection!(!isViewingInitialAnswer);
|
||||
}}
|
||||
setPresentingDocument={setPresentingDocument!}
|
||||
unToggle={false}
|
||||
/>
|
||||
)}
|
||||
{/* Render thinking box if thinking tokens exist */}
|
||||
{hasThinkingTokens && thinkingContent && (
|
||||
<div className="mb-2 mt-1">
|
||||
<ThinkingBox
|
||||
content={thinkingContent}
|
||||
isComplete={isComplete || false}
|
||||
isStreaming={!isThinkingTokenComplete || !isComplete}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
{/* For debugging purposes */}
|
||||
{/* <SubQuestionProgress subQuestions={subQuestions || []} /> */}
|
||||
{/* */}
|
||||
{(allowStreaming &&
|
||||
finalContent &&
|
||||
finalContent.length > 8) ||
|
||||
(files && files.length > 0) ? (
|
||||
<>
|
||||
<div className="w-full py-4 flex flex-col gap-4">
|
||||
<div className="flex items-center gap-x-2 px-4">
|
||||
<div className="text-black text-lg font-medium">
|
||||
Answer
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="px-4">
|
||||
{typeof content === "string" ? (
|
||||
<div
|
||||
onCopy={(e) => handleCopy(e, markdownRef)}
|
||||
ref={markdownRef}
|
||||
className="overflow-x-visible !text-sm max-w-content-max"
|
||||
>
|
||||
{isViewingInitialAnswer
|
||||
? renderedMarkdown
|
||||
: renderedAlternativeMarkdown}
|
||||
</div>
|
||||
) : (
|
||||
content
|
||||
)}
|
||||
{error && (
|
||||
<ErrorBanner error={error} resubmit={resubmit} />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</>
|
||||
) : isComplete ? (
|
||||
error && (
|
||||
<p className="mt-2 mx-4 text-red-700 text-sm my-auto">
|
||||
<ErrorBanner error={error} resubmit={resubmit} />
|
||||
</p>
|
||||
)
|
||||
) : (
|
||||
<>
|
||||
{error && (
|
||||
<ErrorBanner error={error} resubmit={resubmit} />
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
{handleFeedback &&
|
||||
(isActive ? (
|
||||
<div
|
||||
className={`
|
||||
flex md:flex-row gap-x-0.5 mt-1
|
||||
transition-transform duration-300 ease-in-out
|
||||
transform opacity-100 translate-y-0"
|
||||
`}
|
||||
>
|
||||
<TooltipGroup>
|
||||
<div className="flex justify-start w-full gap-x-0.5">
|
||||
{includeMessageSwitcher &&
|
||||
otherMessage !== undefined && (
|
||||
<div className="-mx-1 mr-auto">
|
||||
<MessageSwitcher
|
||||
currentPage={currentMessageInd + 1}
|
||||
totalPages={otherMessagesCanSwitchTo.length}
|
||||
handlePrevious={() => {
|
||||
onMessageSelection(otherMessage!);
|
||||
}}
|
||||
handleNext={() => {
|
||||
onMessageSelection(otherMessage!);
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<CustomTooltip showTick line content="Copy">
|
||||
<CopyButton
|
||||
copyAllFn={() =>
|
||||
copyAll(
|
||||
(isViewingInitialAnswer
|
||||
? finalContent
|
||||
: finalAlternativeContent) as string,
|
||||
markdownRef
|
||||
)
|
||||
}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
<CustomTooltip showTick line content="Good response">
|
||||
<HoverableIcon
|
||||
icon={<LikeFeedback />}
|
||||
onClick={() => handleFeedback("like")}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
<CustomTooltip showTick line content="Bad response">
|
||||
<HoverableIcon
|
||||
icon={<DislikeFeedback size={16} />}
|
||||
onClick={() => handleFeedback("dislike")}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
{regenerate && (
|
||||
<CustomTooltip
|
||||
disabled={isRegenerateDropdownVisible}
|
||||
showTick
|
||||
line
|
||||
content="Regenerate"
|
||||
>
|
||||
<RegenerateOption
|
||||
onDropdownVisibleChange={
|
||||
setIsRegenerateDropdownVisible
|
||||
}
|
||||
selectedAssistant={currentPersona!}
|
||||
regenerate={regenerate}
|
||||
overriddenModel={overriddenModel}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
)}
|
||||
</TooltipGroup>
|
||||
</div>
|
||||
) : (
|
||||
<div
|
||||
ref={hoverElementRef}
|
||||
className={`
|
||||
absolute -bottom-5
|
||||
z-10
|
||||
invisible ${
|
||||
(isHovering || settings?.isMobile) && "!visible"
|
||||
}
|
||||
opacity-0 ${
|
||||
(isHovering || settings?.isMobile) && "!opacity-100"
|
||||
}
|
||||
translate-y-2 ${
|
||||
(isHovering || settings?.isMobile) &&
|
||||
"!translate-y-0"
|
||||
}
|
||||
transition-transform duration-300 ease-in-out
|
||||
flex md:flex-row gap-x-0.5 bg-background-125/40 -mx-1.5 p-1.5 rounded-lg
|
||||
`}
|
||||
>
|
||||
<TooltipGroup>
|
||||
<div className="flex justify-start w-full gap-x-0.5">
|
||||
{includeMessageSwitcher &&
|
||||
otherMessage !== undefined && (
|
||||
<div className="-mx-1 mr-auto">
|
||||
<MessageSwitcher
|
||||
currentPage={currentMessageInd + 1}
|
||||
totalPages={otherMessagesCanSwitchTo.length}
|
||||
handlePrevious={() => {
|
||||
onMessageSelection(otherMessage!);
|
||||
}}
|
||||
handleNext={() => {
|
||||
onMessageSelection(otherMessage!);
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<CustomTooltip showTick line content="Copy">
|
||||
<CopyButton
|
||||
copyAllFn={() =>
|
||||
copyAll(
|
||||
(isViewingInitialAnswer
|
||||
? finalContent
|
||||
: finalAlternativeContent) as string,
|
||||
markdownRef
|
||||
)
|
||||
}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
|
||||
<CustomTooltip showTick line content="Good response">
|
||||
<HoverableIcon
|
||||
icon={<LikeFeedback />}
|
||||
onClick={() => handleFeedback("like")}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
|
||||
<CustomTooltip showTick line content="Bad response">
|
||||
<HoverableIcon
|
||||
icon={<DislikeFeedback size={16} />}
|
||||
onClick={() => handleFeedback("dislike")}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
{regenerate && (
|
||||
<CustomTooltip
|
||||
disabled={isRegenerateDropdownVisible}
|
||||
showTick
|
||||
line
|
||||
content="Regenerate"
|
||||
>
|
||||
<RegenerateOption
|
||||
selectedAssistant={currentPersona!}
|
||||
onDropdownVisibleChange={
|
||||
setIsRegenerateDropdownVisible
|
||||
}
|
||||
regenerate={regenerate}
|
||||
overriddenModel={overriddenModel}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
)}
|
||||
</TooltipGroup>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{(!toolCall || toolCall.tool_name === SEARCH_TOOL_NAME) &&
|
||||
!query &&
|
||||
continueGenerating && (
|
||||
<ContinueGenerating handleContinueGenerating={continueGenerating} />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
function MessageSwitcher({
|
||||
currentPage,
|
||||
totalPages,
|
||||
handlePrevious,
|
||||
handleNext,
|
||||
}: {
|
||||
currentPage: number;
|
||||
totalPages: number;
|
||||
handlePrevious: () => void;
|
||||
handleNext: () => void;
|
||||
}) {
|
||||
return (
|
||||
<div className="flex items-center text-sm space-x-0.5">
|
||||
<Hoverable
|
||||
icon={FiChevronLeft}
|
||||
onClick={currentPage === 1 ? undefined : handlePrevious}
|
||||
/>
|
||||
|
||||
<span className="text-text-darker select-none">
|
||||
{currentPage} / {totalPages}
|
||||
</span>
|
||||
|
||||
<Hoverable
|
||||
icon={FiChevronRight}
|
||||
onClick={currentPage === totalPages ? undefined : handleNext}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
9
web/src/app/chat/message/BlinkingDot.tsx
Normal file
9
web/src/app/chat/message/BlinkingDot.tsx
Normal file
@@ -0,0 +1,9 @@
|
||||
import React from "react";
|
||||
|
||||
export function BlinkingDot() {
|
||||
return (
|
||||
<span className="inline-flex items-center">
|
||||
<span className="animate-pulse text-text-600 dark:text-text-400">●</span>
|
||||
</span>
|
||||
);
|
||||
}
|
||||
80
web/src/app/chat/message/MessageSwitcher.tsx
Normal file
80
web/src/app/chat/message/MessageSwitcher.tsx
Normal file
@@ -0,0 +1,80 @@
|
||||
import { Hoverable } from "@/components/Hoverable";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
} from "@/components/ui/tooltip";
|
||||
import { FiChevronLeft, FiChevronRight } from "react-icons/fi";
|
||||
|
||||
interface MessageSwitcherProps {
|
||||
currentPage: number;
|
||||
totalPages: number;
|
||||
handlePrevious: () => void;
|
||||
handleNext: () => void;
|
||||
disableForStreaming?: boolean;
|
||||
}
|
||||
|
||||
export function MessageSwitcher({
|
||||
currentPage,
|
||||
totalPages,
|
||||
handlePrevious,
|
||||
handleNext,
|
||||
disableForStreaming,
|
||||
}: MessageSwitcherProps) {
|
||||
return (
|
||||
<div className="flex items-center text-sm space-x-0.5">
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div>
|
||||
<Hoverable
|
||||
icon={FiChevronLeft}
|
||||
onClick={
|
||||
disableForStreaming
|
||||
? () => null
|
||||
: currentPage === 1
|
||||
? undefined
|
||||
: handlePrevious
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{disableForStreaming
|
||||
? "Wait for agent message to complete"
|
||||
: "Previous"}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
|
||||
<span className="text-text-darker select-none">
|
||||
{currentPage} / {totalPages}
|
||||
</span>
|
||||
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger>
|
||||
<div>
|
||||
<Hoverable
|
||||
icon={FiChevronRight}
|
||||
onClick={
|
||||
disableForStreaming
|
||||
? () => null
|
||||
: currentPage === totalPages
|
||||
? undefined
|
||||
: handleNext
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{disableForStreaming
|
||||
? "Wait for agent message to complete"
|
||||
: "Next"}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,93 +1,24 @@
|
||||
"use client";
|
||||
|
||||
import {
|
||||
FiEdit2,
|
||||
FiChevronRight,
|
||||
FiChevronLeft,
|
||||
FiTool,
|
||||
FiGlobe,
|
||||
} from "react-icons/fi";
|
||||
import { FeedbackType } from "../types";
|
||||
import React, {
|
||||
useCallback,
|
||||
useContext,
|
||||
useEffect,
|
||||
useMemo,
|
||||
useRef,
|
||||
useState,
|
||||
} from "react";
|
||||
import ReactMarkdown from "react-markdown";
|
||||
import {
|
||||
OnyxDocument,
|
||||
FilteredOnyxDocument,
|
||||
MinimalOnyxDocument,
|
||||
} from "@/lib/search/interfaces";
|
||||
import { SearchSummary, UserKnowledgeFiles } from "./SearchSummary";
|
||||
import { SkippedSearch } from "./SkippedSearch";
|
||||
import remarkGfm from "remark-gfm";
|
||||
import { CopyButton } from "@/components/CopyButton";
|
||||
import { ChatFileType, FileDescriptor, ToolCallMetadata } from "../interfaces";
|
||||
import {
|
||||
IMAGE_GENERATION_TOOL_NAME,
|
||||
SEARCH_TOOL_NAME,
|
||||
INTERNET_SEARCH_TOOL_NAME,
|
||||
} from "../tools/constants";
|
||||
import { ToolRunDisplay } from "../tools/ToolRunningAnimation";
|
||||
import { FiEdit2 } from "react-icons/fi";
|
||||
import React, { useEffect, useRef, useState } from "react";
|
||||
import { MinimalOnyxDocument } from "@/lib/search/interfaces";
|
||||
import { ChatFileType, FileDescriptor } from "@/app/chat/interfaces";
|
||||
import { Hoverable, HoverableIcon } from "@/components/Hoverable";
|
||||
import { DocumentPreview } from "../files/documents/DocumentPreview";
|
||||
import { InMessageImage } from "../files/images/InMessageImage";
|
||||
import { CodeBlock } from "./CodeBlock";
|
||||
import rehypePrism from "rehype-prism-plus";
|
||||
import { DocumentPreview } from "../components/files/documents/DocumentPreview";
|
||||
import { InMessageImage } from "../components/files/images/InMessageImage";
|
||||
import "prismjs/themes/prism-tomorrow.css";
|
||||
import "./custom-code-styles.css";
|
||||
import { MinimalPersonaSnapshot } from "@/app/admin/assistants/interfaces";
|
||||
import { AssistantIcon } from "@/components/assistants/AssistantIcon";
|
||||
import { LikeFeedback, DislikeFeedback } from "@/components/icons/icons";
|
||||
import {
|
||||
CustomTooltip,
|
||||
TooltipGroup,
|
||||
} from "@/components/tooltip/CustomTooltip";
|
||||
import {
|
||||
Tooltip,
|
||||
TooltipContent,
|
||||
TooltipProvider,
|
||||
TooltipTrigger,
|
||||
} from "@/components/ui/tooltip";
|
||||
import { useMouseTracking } from "./hooks";
|
||||
import { SettingsContext } from "@/components/settings/SettingsProvider";
|
||||
import GeneratingImageDisplay from "../tools/GeneratingImageDisplay";
|
||||
import RegenerateOption from "../RegenerateOption";
|
||||
import { LlmDescriptor } from "@/lib/hooks";
|
||||
import { ContinueGenerating } from "./ContinueMessage";
|
||||
import { MemoizedAnchor, MemoizedParagraph } from "./MemoizedTextComponents";
|
||||
import { extractCodeText, preprocessLaTeX } from "./codeUtils";
|
||||
import ToolResult from "../../../components/tools/ToolResult";
|
||||
import CsvContent from "../../../components/tools/CSVContent";
|
||||
import {
|
||||
FilesSeeMoreBlock,
|
||||
SeeMoreBlock,
|
||||
} from "@/components/chat/sources/SourceCard";
|
||||
import { FileSourceCard, SourceCard } from "./SourcesDisplay";
|
||||
import remarkMath from "remark-math";
|
||||
import rehypeKatex from "rehype-katex";
|
||||
import "katex/dist/katex.min.css";
|
||||
import { copyAll, handleCopy } from "./copyingUtils";
|
||||
import { transformLinkUri } from "@/lib/utils";
|
||||
import { ThinkingBox } from "./thinkingBox/ThinkingBox";
|
||||
import {
|
||||
hasCompletedThinkingTokens,
|
||||
hasPartialThinkingTokens,
|
||||
extractThinkingContent,
|
||||
isThinkingComplete,
|
||||
removeThinkingTokens,
|
||||
} from "../utils/thinkingTokens";
|
||||
import { FileResponse } from "../my-documents/DocumentsContext";
|
||||
|
||||
const TOOLS_WITH_CUSTOM_HANDLING = [
|
||||
SEARCH_TOOL_NAME,
|
||||
INTERNET_SEARCH_TOOL_NAME,
|
||||
IMAGE_GENERATION_TOOL_NAME,
|
||||
];
|
||||
import { MessageSwitcher } from "./MessageSwitcher";
|
||||
|
||||
function FileDisplay({
|
||||
files,
|
||||
@@ -99,7 +30,6 @@ function FileDisplay({
|
||||
setPresentingDocument: (document: MinimalOnyxDocument) => void;
|
||||
}) {
|
||||
const [close, setClose] = useState(true);
|
||||
const [expandedKnowledge, setExpandedKnowledge] = useState(false);
|
||||
const imageFiles = files.filter((file) => file.type === ChatFileType.IMAGE);
|
||||
const textFiles = files.filter(
|
||||
(file) => file.type == ChatFileType.PLAIN_TEXT
|
||||
@@ -173,790 +103,6 @@ function FileDisplay({
|
||||
);
|
||||
}
|
||||
|
||||
function FileResponseDisplay({
|
||||
files,
|
||||
alignBubble,
|
||||
setPresentingDocument,
|
||||
}: {
|
||||
files: FileResponse[];
|
||||
alignBubble?: boolean;
|
||||
setPresentingDocument: (document: MinimalOnyxDocument) => void;
|
||||
}) {
|
||||
if (!files || files.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
id="onyx-file-response"
|
||||
className={`${alignBubble && "ml-auto"} mt-2 auto mb-4`}
|
||||
>
|
||||
<div className="flex flex-col gap-2">
|
||||
{files.map((file) => {
|
||||
return (
|
||||
<div key={file.id} className="w-fit">
|
||||
<DocumentPreview
|
||||
fileName={file.name || file.document_id}
|
||||
alignBubble={alignBubble}
|
||||
open={() =>
|
||||
setPresentingDocument({
|
||||
document_id: file.document_id,
|
||||
semantic_identifier: file.name || file.document_id,
|
||||
})
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export const AIMessage = ({
|
||||
userKnowledgeFiles = [],
|
||||
regenerate,
|
||||
overriddenModel,
|
||||
continueGenerating,
|
||||
shared,
|
||||
isActive,
|
||||
toggleDocumentSelection,
|
||||
alternativeAssistant,
|
||||
docs,
|
||||
messageId,
|
||||
content,
|
||||
files,
|
||||
selectedDocuments,
|
||||
query,
|
||||
citedDocuments,
|
||||
toolCall,
|
||||
isComplete,
|
||||
hasDocs,
|
||||
handleFeedback,
|
||||
handleSearchQueryEdit,
|
||||
handleForceSearch,
|
||||
retrievalDisabled,
|
||||
currentPersona,
|
||||
otherMessagesCanSwitchTo,
|
||||
onMessageSelection,
|
||||
setPresentingDocument,
|
||||
index,
|
||||
documentSidebarVisible,
|
||||
removePadding,
|
||||
}: {
|
||||
userKnowledgeFiles?: FileResponse[];
|
||||
index?: number;
|
||||
shared?: boolean;
|
||||
isActive?: boolean;
|
||||
continueGenerating?: () => void;
|
||||
otherMessagesCanSwitchTo?: number[];
|
||||
onMessageSelection?: (messageId: number) => void;
|
||||
selectedDocuments?: OnyxDocument[] | null;
|
||||
toggleDocumentSelection?: () => void;
|
||||
docs?: OnyxDocument[] | null;
|
||||
alternativeAssistant?: MinimalPersonaSnapshot | null;
|
||||
currentPersona: MinimalPersonaSnapshot;
|
||||
messageId: number | null;
|
||||
content: string | JSX.Element;
|
||||
files?: FileDescriptor[];
|
||||
query?: string;
|
||||
citedDocuments?: [string, OnyxDocument][] | null;
|
||||
toolCall?: ToolCallMetadata | null;
|
||||
isComplete?: boolean;
|
||||
documentSidebarVisible?: boolean;
|
||||
hasDocs?: boolean;
|
||||
handleFeedback?: (feedbackType: FeedbackType) => void;
|
||||
handleSearchQueryEdit?: (query: string) => void;
|
||||
handleForceSearch?: () => void;
|
||||
retrievalDisabled?: boolean;
|
||||
overriddenModel?: string;
|
||||
regenerate?: (modelOverRide: LlmDescriptor) => Promise<void>;
|
||||
setPresentingDocument: (document: MinimalOnyxDocument) => void;
|
||||
removePadding?: boolean;
|
||||
}) => {
|
||||
const toolCallGenerating = toolCall && !toolCall.tool_result;
|
||||
|
||||
// Check if content contains thinking tokens (complete or partial)
|
||||
const hasThinkingTokens = useMemo(() => {
|
||||
return (
|
||||
hasCompletedThinkingTokens(content) || hasPartialThinkingTokens(content)
|
||||
);
|
||||
}, [content]);
|
||||
|
||||
// Extract thinking content
|
||||
const thinkingContent = useMemo(() => {
|
||||
if (!hasThinkingTokens) return "";
|
||||
return extractThinkingContent(content);
|
||||
}, [content, hasThinkingTokens]);
|
||||
|
||||
// Track if thinking is complete
|
||||
const isThinkingTokenComplete = useMemo(() => {
|
||||
return isThinkingComplete(thinkingContent);
|
||||
}, [thinkingContent]);
|
||||
|
||||
// Extract final content (remove thinking tokens)
|
||||
const finalContent = useMemo(() => {
|
||||
if (!hasThinkingTokens) return content;
|
||||
return removeThinkingTokens(content);
|
||||
}, [content, hasThinkingTokens]);
|
||||
|
||||
// Only show the message content when we've completed the thinking section
|
||||
// or there are no thinking tokens to begin with
|
||||
const shouldShowContent = useMemo(() => {
|
||||
if (!hasThinkingTokens) return true;
|
||||
|
||||
// If the message is complete, we always show the content
|
||||
if (isComplete) return true;
|
||||
|
||||
// If thinking is not complete, we don't show the content yet
|
||||
if (!isThinkingTokenComplete) return false;
|
||||
|
||||
// If thinking is complete but we're not done with the message yet,
|
||||
// only show the content if there's actually something to show
|
||||
const cleanedContent =
|
||||
typeof finalContent === "string" ? finalContent.trim() : finalContent;
|
||||
|
||||
return !!cleanedContent && cleanedContent !== "";
|
||||
}, [hasThinkingTokens, isComplete, isThinkingTokenComplete, finalContent]);
|
||||
|
||||
const processContent = (content: string | JSX.Element) => {
|
||||
if (typeof content !== "string") {
|
||||
return content;
|
||||
}
|
||||
|
||||
const codeBlockRegex = /```(\w*)\n[\s\S]*?```|```[\s\S]*?$/g;
|
||||
const matches = content.match(codeBlockRegex);
|
||||
|
||||
if (matches) {
|
||||
content = matches.reduce((acc, match) => {
|
||||
if (!match.match(/```\w+/)) {
|
||||
return acc.replace(match, match.replace("```", "```plaintext"));
|
||||
}
|
||||
return acc;
|
||||
}, content);
|
||||
|
||||
const lastMatch = matches[matches.length - 1];
|
||||
if (lastMatch && !lastMatch.endsWith("```")) {
|
||||
return preprocessLaTeX(content);
|
||||
}
|
||||
}
|
||||
const processed = preprocessLaTeX(content);
|
||||
|
||||
return processed + (!isComplete && !toolCallGenerating ? " [*]() " : "");
|
||||
};
|
||||
|
||||
const finalContentProcessed = processContent(finalContent as string);
|
||||
|
||||
const [isRegenerateDropdownVisible, setIsRegenerateDropdownVisible] =
|
||||
useState(false);
|
||||
const { isHovering, trackedElementRef, hoverElementRef } = useMouseTracking();
|
||||
|
||||
const settings = useContext(SettingsContext);
|
||||
// this is needed to give Prism a chance to load
|
||||
|
||||
const selectedDocumentIds =
|
||||
selectedDocuments?.map((document) => document.document_id) || [];
|
||||
const citedDocumentIds: string[] = [];
|
||||
|
||||
citedDocuments?.forEach((doc) => {
|
||||
citedDocumentIds.push(doc[1].document_id);
|
||||
});
|
||||
|
||||
if (!isComplete) {
|
||||
const trimIncompleteCodeSection = (
|
||||
content: string | JSX.Element
|
||||
): string | JSX.Element => {
|
||||
if (typeof content === "string") {
|
||||
const pattern = /```[a-zA-Z]+[^\s]*$/;
|
||||
const match = content.match(pattern);
|
||||
if (match && match.index && match.index > 3) {
|
||||
const newContent = content.slice(0, match.index - 3);
|
||||
return newContent;
|
||||
}
|
||||
return content;
|
||||
}
|
||||
return content;
|
||||
};
|
||||
content = trimIncompleteCodeSection(content);
|
||||
}
|
||||
|
||||
let filteredDocs: FilteredOnyxDocument[] = [];
|
||||
|
||||
if (docs) {
|
||||
filteredDocs = docs
|
||||
.filter(
|
||||
(doc, index, self) =>
|
||||
doc.document_id &&
|
||||
doc.document_id !== "" &&
|
||||
index === self.findIndex((d) => d.document_id === doc.document_id)
|
||||
)
|
||||
.filter((doc) => {
|
||||
return citedDocumentIds.includes(doc.document_id);
|
||||
})
|
||||
.map((doc: OnyxDocument, ind: number) => {
|
||||
return {
|
||||
...doc,
|
||||
included: selectedDocumentIds.includes(doc.document_id),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
const paragraphCallback = useCallback(
|
||||
(props: any) => <MemoizedParagraph>{props.children}</MemoizedParagraph>,
|
||||
[]
|
||||
);
|
||||
|
||||
const anchorCallback = useCallback(
|
||||
(props: any) => (
|
||||
<MemoizedAnchor
|
||||
updatePresentingDocument={setPresentingDocument!}
|
||||
docs={docs}
|
||||
userFiles={userKnowledgeFiles}
|
||||
href={props.href}
|
||||
>
|
||||
{props.children}
|
||||
</MemoizedAnchor>
|
||||
),
|
||||
[docs]
|
||||
);
|
||||
|
||||
const currentMessageInd = messageId
|
||||
? otherMessagesCanSwitchTo?.indexOf(messageId)
|
||||
: undefined;
|
||||
|
||||
const webSourceDomains: string[] = Array.from(
|
||||
new Set(
|
||||
docs
|
||||
?.filter((doc) => doc.source_type === "web")
|
||||
.map((doc) => {
|
||||
try {
|
||||
const url = new URL(doc.link);
|
||||
return `https://${url.hostname}`;
|
||||
} catch {
|
||||
return doc.link; // fallback to full link if parsing fails
|
||||
}
|
||||
}) || []
|
||||
)
|
||||
);
|
||||
|
||||
const markdownComponents = useMemo(
|
||||
() => ({
|
||||
a: anchorCallback,
|
||||
p: paragraphCallback,
|
||||
b: ({ node, className, children }: any) => {
|
||||
return <span className={className}>{children}</span>;
|
||||
},
|
||||
code: ({ node, className, children }: any) => {
|
||||
const codeText = extractCodeText(
|
||||
node,
|
||||
finalContentProcessed as string,
|
||||
children
|
||||
);
|
||||
|
||||
return (
|
||||
<CodeBlock className={className} codeText={codeText}>
|
||||
{children}
|
||||
</CodeBlock>
|
||||
);
|
||||
},
|
||||
}),
|
||||
[anchorCallback, paragraphCallback, finalContentProcessed]
|
||||
);
|
||||
const markdownRef = useRef<HTMLDivElement>(null);
|
||||
|
||||
// Process selection copying with HTML formatting
|
||||
|
||||
const renderedMarkdown = useMemo(() => {
|
||||
if (typeof finalContentProcessed !== "string") {
|
||||
return finalContentProcessed;
|
||||
}
|
||||
|
||||
return (
|
||||
<ReactMarkdown
|
||||
className="prose dark:prose-invert max-w-full text-base"
|
||||
components={markdownComponents}
|
||||
remarkPlugins={[remarkGfm, remarkMath]}
|
||||
rehypePlugins={[[rehypePrism, { ignoreMissing: true }], rehypeKatex]}
|
||||
urlTransform={transformLinkUri}
|
||||
>
|
||||
{finalContentProcessed}
|
||||
</ReactMarkdown>
|
||||
);
|
||||
}, [finalContentProcessed, markdownComponents]);
|
||||
|
||||
const includeMessageSwitcher =
|
||||
currentMessageInd !== undefined &&
|
||||
onMessageSelection &&
|
||||
otherMessagesCanSwitchTo &&
|
||||
otherMessagesCanSwitchTo.length > 1;
|
||||
|
||||
let otherMessage: number | undefined = undefined;
|
||||
if (currentMessageInd && otherMessagesCanSwitchTo) {
|
||||
otherMessage = otherMessagesCanSwitchTo[currentMessageInd - 1];
|
||||
}
|
||||
|
||||
return (
|
||||
<div
|
||||
id={isComplete ? "onyx-ai-message" : undefined}
|
||||
ref={trackedElementRef}
|
||||
className={`py-5 ml-4 lg:px-5 relative flex
|
||||
|
||||
${removePadding && "!pl-24 -mt-12"}`}
|
||||
>
|
||||
<div
|
||||
className={`mx-auto ${
|
||||
shared ? "w-full" : "w-[90%]"
|
||||
} max-w-message-max`}
|
||||
>
|
||||
<div className={`lg:mr-12 ${!shared && "mobile:ml-0 md:ml-8"}`}>
|
||||
<div className="flex items-start">
|
||||
{!removePadding && (
|
||||
<AssistantIcon
|
||||
className="mobile:hidden"
|
||||
size={24}
|
||||
assistant={alternativeAssistant || currentPersona}
|
||||
/>
|
||||
)}
|
||||
|
||||
<div className="w-full">
|
||||
<div className="max-w-message-max break-words">
|
||||
<div className="w-full desktop:ml-4">
|
||||
<div className="max-w-message-max break-words">
|
||||
{userKnowledgeFiles.length == 0 &&
|
||||
(!toolCall || toolCall.tool_name === SEARCH_TOOL_NAME ? (
|
||||
<>
|
||||
{query !== undefined && (
|
||||
<div className="mb-1">
|
||||
<SearchSummary
|
||||
index={index || 0}
|
||||
query={query}
|
||||
finished={toolCall?.tool_result != undefined}
|
||||
handleSearchQueryEdit={handleSearchQueryEdit}
|
||||
docs={docs || []}
|
||||
toggleDocumentSelection={
|
||||
toggleDocumentSelection!
|
||||
}
|
||||
userFileSearch={retrievalDisabled ?? false}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{handleForceSearch &&
|
||||
content &&
|
||||
query === undefined &&
|
||||
!hasDocs &&
|
||||
!retrievalDisabled && (
|
||||
<div className="mb-1">
|
||||
<SkippedSearch
|
||||
handleForceSearch={handleForceSearch}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
) : null)}
|
||||
{userKnowledgeFiles && (
|
||||
<UserKnowledgeFiles
|
||||
userKnowledgeFiles={userKnowledgeFiles}
|
||||
/>
|
||||
)}
|
||||
|
||||
{!userKnowledgeFiles &&
|
||||
toolCall &&
|
||||
!TOOLS_WITH_CUSTOM_HANDLING.includes(
|
||||
toolCall.tool_name
|
||||
) && (
|
||||
<ToolRunDisplay
|
||||
toolName={
|
||||
toolCall.tool_result && content
|
||||
? `Used "${toolCall.tool_name}"`
|
||||
: `Using "${toolCall.tool_name}"`
|
||||
}
|
||||
toolLogo={
|
||||
<FiTool size={15} className="my-auto mr-1" />
|
||||
}
|
||||
isRunning={!toolCall.tool_result || !content}
|
||||
/>
|
||||
)}
|
||||
{toolCall &&
|
||||
(!files || files.length == 0) &&
|
||||
toolCall.tool_name === IMAGE_GENERATION_TOOL_NAME &&
|
||||
!toolCall.tool_result && <GeneratingImageDisplay />}
|
||||
{toolCall &&
|
||||
toolCall.tool_name === INTERNET_SEARCH_TOOL_NAME && (
|
||||
<ToolRunDisplay
|
||||
toolName={
|
||||
toolCall.tool_result
|
||||
? `Searched the internet`
|
||||
: `Searching the internet`
|
||||
}
|
||||
toolLogo={
|
||||
<FiGlobe size={15} className="my-auto mr-1" />
|
||||
}
|
||||
isRunning={!toolCall.tool_result}
|
||||
/>
|
||||
)}
|
||||
{userKnowledgeFiles.length == 0 &&
|
||||
docs &&
|
||||
docs.length > 0 && (
|
||||
<div
|
||||
className={`mobile:hidden ${
|
||||
(query ||
|
||||
toolCall?.tool_name ===
|
||||
INTERNET_SEARCH_TOOL_NAME) &&
|
||||
"mt-2"
|
||||
} -mx-8 w-full mb-4 flex relative`}
|
||||
>
|
||||
<div className="w-full">
|
||||
<div className="px-8 flex gap-x-2">
|
||||
{!settings?.isMobile &&
|
||||
docs.length > 0 &&
|
||||
docs
|
||||
.slice(0, 2)
|
||||
.map((doc: OnyxDocument, ind: number) => (
|
||||
<SourceCard
|
||||
document={doc}
|
||||
key={ind}
|
||||
setPresentingDocument={() =>
|
||||
setPresentingDocument({
|
||||
document_id: doc.document_id,
|
||||
semantic_identifier: doc.document_id,
|
||||
})
|
||||
}
|
||||
/>
|
||||
))}
|
||||
<SeeMoreBlock
|
||||
toggled={documentSidebarVisible!}
|
||||
toggleDocumentSelection={
|
||||
toggleDocumentSelection!
|
||||
}
|
||||
docs={docs}
|
||||
webSourceDomains={webSourceDomains}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{userKnowledgeFiles && userKnowledgeFiles.length > 0 && (
|
||||
<div
|
||||
key={10}
|
||||
className={`mobile:hidden ${
|
||||
(query ||
|
||||
toolCall?.tool_name ===
|
||||
INTERNET_SEARCH_TOOL_NAME) &&
|
||||
"mt-2"
|
||||
} -mx-8 w-full mb-4 flex relative`}
|
||||
>
|
||||
<div className="w-full">
|
||||
<div className="px-8 flex gap-x-2">
|
||||
{!settings?.isMobile &&
|
||||
userKnowledgeFiles.length > 0 &&
|
||||
userKnowledgeFiles
|
||||
.slice(0, 2)
|
||||
.map((file: FileResponse, ind: number) => (
|
||||
<FileSourceCard
|
||||
relevantDocument={docs?.find(
|
||||
(doc) =>
|
||||
doc.document_id ===
|
||||
`FILE_CONNECTOR__${file.file_id}` ||
|
||||
doc.document_id ===
|
||||
`USER_FILE_CONNECTOR__${file.file_id}`
|
||||
)}
|
||||
key={ind}
|
||||
document={file}
|
||||
setPresentingDocument={() =>
|
||||
setPresentingDocument({
|
||||
document_id: file.document_id,
|
||||
semantic_identifier: file.name,
|
||||
})
|
||||
}
|
||||
/>
|
||||
))}
|
||||
|
||||
{userKnowledgeFiles.length > 2 && (
|
||||
<FilesSeeMoreBlock
|
||||
key={10}
|
||||
toggled={documentSidebarVisible!}
|
||||
toggleDocumentSelection={
|
||||
toggleDocumentSelection!
|
||||
}
|
||||
files={userKnowledgeFiles}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Render thinking box if thinking tokens exist */}
|
||||
{hasThinkingTokens && thinkingContent && (
|
||||
<div className="mb-2">
|
||||
<ThinkingBox
|
||||
content={thinkingContent}
|
||||
isComplete={isComplete || false}
|
||||
isStreaming={!isThinkingTokenComplete || !isComplete}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Only show the message content once thinking is complete or if there's no thinking */}
|
||||
{shouldShowContent && (content || files) ? (
|
||||
<>
|
||||
<FileDisplay
|
||||
setPresentingDocument={setPresentingDocument}
|
||||
files={files || []}
|
||||
/>
|
||||
{typeof content === "string" ? (
|
||||
<div className="overflow-x-visible max-w-content-max">
|
||||
<div
|
||||
ref={markdownRef}
|
||||
className="focus:outline-none cursor-text select-text"
|
||||
onCopy={(e) => handleCopy(e, markdownRef)}
|
||||
>
|
||||
{renderedMarkdown}
|
||||
</div>
|
||||
</div>
|
||||
) : (
|
||||
content
|
||||
)}
|
||||
</>
|
||||
) : isComplete ? null : (
|
||||
<></>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{!removePadding &&
|
||||
handleFeedback &&
|
||||
(isActive ? (
|
||||
<div
|
||||
className={`
|
||||
flex md:flex-row gap-x-0.5 mt-1
|
||||
transition-transform duration-300 ease-in-out
|
||||
transform opacity-100 "
|
||||
`}
|
||||
>
|
||||
<TooltipGroup>
|
||||
<div className="flex justify-start w-full gap-x-0.5">
|
||||
{includeMessageSwitcher &&
|
||||
otherMessage !== undefined && (
|
||||
<div className="-mx-1 mr-auto">
|
||||
<MessageSwitcher
|
||||
currentPage={currentMessageInd + 1}
|
||||
totalPages={otherMessagesCanSwitchTo.length}
|
||||
handlePrevious={() => {
|
||||
onMessageSelection(otherMessage!);
|
||||
}}
|
||||
handleNext={() => {
|
||||
onMessageSelection(otherMessage!);
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<CustomTooltip showTick line content="Copy">
|
||||
<CopyButton
|
||||
copyAllFn={() =>
|
||||
copyAll(
|
||||
finalContentProcessed as string,
|
||||
markdownRef
|
||||
)
|
||||
}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
<CustomTooltip showTick line content="Good response">
|
||||
<HoverableIcon
|
||||
icon={<LikeFeedback />}
|
||||
onClick={() => handleFeedback("like")}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
<CustomTooltip showTick line content="Bad response">
|
||||
<HoverableIcon
|
||||
icon={<DislikeFeedback size={16} />}
|
||||
onClick={() => handleFeedback("dislike")}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
|
||||
{regenerate && (
|
||||
<CustomTooltip
|
||||
disabled={isRegenerateDropdownVisible}
|
||||
showTick
|
||||
line
|
||||
content="Regenerate"
|
||||
>
|
||||
<RegenerateOption
|
||||
onDropdownVisibleChange={
|
||||
setIsRegenerateDropdownVisible
|
||||
}
|
||||
selectedAssistant={currentPersona!}
|
||||
regenerate={regenerate}
|
||||
overriddenModel={overriddenModel}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
)}
|
||||
</TooltipGroup>
|
||||
</div>
|
||||
) : (
|
||||
<div
|
||||
ref={hoverElementRef}
|
||||
className={`
|
||||
absolute -bottom-5
|
||||
z-10
|
||||
invisible ${
|
||||
(isHovering || settings?.isMobile) && "!visible"
|
||||
}
|
||||
opacity-0 ${
|
||||
(isHovering || settings?.isMobile) && "!opacity-100"
|
||||
}
|
||||
flex md:flex-row gap-x-0.5 bg-background-125/40 -mx-1.5 p-1.5 rounded-lg
|
||||
`}
|
||||
>
|
||||
<TooltipGroup>
|
||||
<div className="flex justify-start w-full gap-x-0.5">
|
||||
{includeMessageSwitcher &&
|
||||
otherMessage !== undefined && (
|
||||
<div className="-mx-1 mr-auto">
|
||||
<MessageSwitcher
|
||||
currentPage={currentMessageInd + 1}
|
||||
totalPages={otherMessagesCanSwitchTo.length}
|
||||
handlePrevious={() => {
|
||||
onMessageSelection(otherMessage!);
|
||||
}}
|
||||
handleNext={() => {
|
||||
onMessageSelection(otherMessage!);
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
<CustomTooltip showTick line content="Copy">
|
||||
<CopyButton
|
||||
copyAllFn={() =>
|
||||
copyAll(
|
||||
finalContentProcessed as string,
|
||||
markdownRef
|
||||
)
|
||||
}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
|
||||
<CustomTooltip showTick line content="Good response">
|
||||
<HoverableIcon
|
||||
icon={<LikeFeedback />}
|
||||
onClick={() => handleFeedback("like")}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
|
||||
<CustomTooltip showTick line content="Bad response">
|
||||
<HoverableIcon
|
||||
icon={<DislikeFeedback size={16} />}
|
||||
onClick={() => handleFeedback("dislike")}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
{regenerate && (
|
||||
<CustomTooltip
|
||||
disabled={isRegenerateDropdownVisible}
|
||||
showTick
|
||||
line
|
||||
content="Regenerate"
|
||||
>
|
||||
<RegenerateOption
|
||||
selectedAssistant={currentPersona!}
|
||||
onDropdownVisibleChange={
|
||||
setIsRegenerateDropdownVisible
|
||||
}
|
||||
regenerate={regenerate}
|
||||
overriddenModel={overriddenModel}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
)}
|
||||
</TooltipGroup>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{(!toolCall || toolCall.tool_name === SEARCH_TOOL_NAME) &&
|
||||
!query &&
|
||||
continueGenerating && (
|
||||
<ContinueGenerating handleContinueGenerating={continueGenerating} />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
function MessageSwitcher({
|
||||
currentPage,
|
||||
totalPages,
|
||||
handlePrevious,
|
||||
handleNext,
|
||||
disableForStreaming,
|
||||
}: {
|
||||
currentPage: number;
|
||||
totalPages: number;
|
||||
handlePrevious: () => void;
|
||||
handleNext: () => void;
|
||||
disableForStreaming?: boolean;
|
||||
}) {
|
||||
return (
|
||||
<div className="flex items-center text-sm space-x-0.5">
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger asChild>
|
||||
<div>
|
||||
<Hoverable
|
||||
icon={FiChevronLeft}
|
||||
onClick={
|
||||
disableForStreaming
|
||||
? () => null
|
||||
: currentPage === 1
|
||||
? undefined
|
||||
: handlePrevious
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{disableForStreaming
|
||||
? "Wait for agent message to complete"
|
||||
: "Previous"}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
|
||||
<span className="text-text-darker select-none">
|
||||
{currentPage} / {totalPages}
|
||||
</span>
|
||||
|
||||
<TooltipProvider>
|
||||
<Tooltip>
|
||||
<TooltipTrigger>
|
||||
<div>
|
||||
<Hoverable
|
||||
icon={FiChevronRight}
|
||||
onClick={
|
||||
disableForStreaming
|
||||
? () => null
|
||||
: currentPage === totalPages
|
||||
? undefined
|
||||
: handleNext
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
</TooltipTrigger>
|
||||
<TooltipContent>
|
||||
{disableForStreaming
|
||||
? "Wait for agent message to complete"
|
||||
: "Next"}
|
||||
</TooltipContent>
|
||||
</Tooltip>
|
||||
</TooltipProvider>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export const HumanMessage = ({
|
||||
content,
|
||||
files,
|
||||
@@ -1012,10 +158,27 @@ export const HumanMessage = ({
|
||||
? otherMessagesCanSwitchTo?.indexOf(messageId)
|
||||
: undefined;
|
||||
|
||||
let otherMessage: number | undefined = undefined;
|
||||
if (currentMessageInd && otherMessagesCanSwitchTo) {
|
||||
otherMessage = otherMessagesCanSwitchTo[currentMessageInd - 1];
|
||||
}
|
||||
const getPreviousMessage = () => {
|
||||
if (
|
||||
currentMessageInd !== undefined &&
|
||||
currentMessageInd > 0 &&
|
||||
otherMessagesCanSwitchTo
|
||||
) {
|
||||
return otherMessagesCanSwitchTo[currentMessageInd - 1];
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
const getNextMessage = () => {
|
||||
if (
|
||||
currentMessageInd !== undefined &&
|
||||
currentMessageInd < (otherMessagesCanSwitchTo?.length || 0) - 1 &&
|
||||
otherMessagesCanSwitchTo
|
||||
) {
|
||||
return otherMessagesCanSwitchTo[currentMessageInd + 1];
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
@@ -1212,7 +375,6 @@ export const HumanMessage = ({
|
||||
|
||||
<div className="flex flex-col md:flex-row gap-x-0.5 mt-1">
|
||||
{currentMessageInd !== undefined &&
|
||||
otherMessage !== undefined &&
|
||||
onMessageSelection &&
|
||||
otherMessagesCanSwitchTo &&
|
||||
otherMessagesCanSwitchTo.length > 1 && (
|
||||
@@ -1223,11 +385,17 @@ export const HumanMessage = ({
|
||||
totalPages={otherMessagesCanSwitchTo.length}
|
||||
handlePrevious={() => {
|
||||
stopGenerating();
|
||||
onMessageSelection(otherMessage!);
|
||||
const prevMessage = getPreviousMessage();
|
||||
if (prevMessage !== undefined) {
|
||||
onMessageSelection(prevMessage);
|
||||
}
|
||||
}}
|
||||
handleNext={() => {
|
||||
stopGenerating();
|
||||
onMessageSelection(otherMessage!);
|
||||
const nextMessage = getNextMessage();
|
||||
if (nextMessage !== undefined) {
|
||||
onMessageSelection(nextMessage);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
|
||||
@@ -8,7 +8,7 @@ import React, {
|
||||
import { FiSearch } from "react-icons/fi";
|
||||
import { OnyxDocument } from "@/lib/search/interfaces";
|
||||
import { BaseQuestionIdentifier, SubQuestionDetail } from "../interfaces";
|
||||
import { SourceChip2 } from "../input/ChatInputBar";
|
||||
import { SourceChip2 } from "../components/input/ChatInputBar";
|
||||
import { ResultIcon } from "@/components/chat/sources/SourceCard";
|
||||
import { openDocument } from "@/lib/search/utils";
|
||||
import { SourcesDisplay } from "./SourcesDisplay";
|
||||
|
||||
397
web/src/app/chat/message/messageComponents/AIMessage.tsx
Normal file
397
web/src/app/chat/message/messageComponents/AIMessage.tsx
Normal file
@@ -0,0 +1,397 @@
|
||||
import {
|
||||
Packet,
|
||||
PacketType,
|
||||
CitationDelta,
|
||||
SearchToolDelta,
|
||||
ImageGenerationToolDelta,
|
||||
StreamingCitation,
|
||||
} from "../../services/streamingModels";
|
||||
import { FullChatState } from "./interfaces";
|
||||
import { AssistantIcon } from "@/components/assistants/AssistantIcon";
|
||||
import { CopyButton } from "@/components/CopyButton";
|
||||
import { LikeFeedback, DislikeFeedback } from "@/components/icons/icons";
|
||||
import { HoverableIcon } from "@/components/Hoverable";
|
||||
import { OnyxDocument } from "@/lib/search/interfaces";
|
||||
import { CitedSourcesToggle } from "./CitedSourcesToggle";
|
||||
import {
|
||||
CustomTooltip,
|
||||
TooltipGroup,
|
||||
} from "@/components/tooltip/CustomTooltip";
|
||||
import { useMemo, useRef, useState, useEffect } from "react";
|
||||
import {
|
||||
useChatSessionStore,
|
||||
useDocumentSidebarVisible,
|
||||
useSelectedMessageForDocDisplay,
|
||||
} from "../../stores/useChatSessionStore";
|
||||
import { copyAll, handleCopy } from "../copyingUtils";
|
||||
import RegenerateOption from "../../components/RegenerateOption";
|
||||
import { MessageSwitcher } from "../MessageSwitcher";
|
||||
import { BlinkingDot } from "../BlinkingDot";
|
||||
import {
|
||||
getTextContent,
|
||||
isFinalAnswerComing,
|
||||
isStreamingComplete,
|
||||
isToolPacket,
|
||||
} from "./packetUtils";
|
||||
import { useMessageSwitching } from "./hooks/useMessageSwitching";
|
||||
import MultiToolRenderer, { RendererComponent } from "./MultiToolRenderer";
|
||||
|
||||
export function AIMessage({
|
||||
rawPackets,
|
||||
chatState,
|
||||
messageId,
|
||||
otherMessagesCanSwitchTo,
|
||||
onMessageSelection,
|
||||
}: {
|
||||
rawPackets: Packet[];
|
||||
chatState: FullChatState;
|
||||
messageId?: number | null;
|
||||
otherMessagesCanSwitchTo?: number[];
|
||||
onMessageSelection?: (messageId: number) => void;
|
||||
}) {
|
||||
const markdownRef = useRef<HTMLDivElement>(null);
|
||||
const [isRegenerateDropdownVisible, setIsRegenerateDropdownVisible] =
|
||||
useState(false);
|
||||
|
||||
const [allToolsFullyDisplayed, setAllToolsFullyDisplayed] = useState(
|
||||
isFinalAnswerComing(rawPackets) || isStreamingComplete(rawPackets)
|
||||
);
|
||||
|
||||
const [displayComplete, setDisplayComplete] = useState(
|
||||
isStreamingComplete(rawPackets)
|
||||
);
|
||||
|
||||
// Incremental packet processing state
|
||||
const lastProcessedIndexRef = useRef<number>(0);
|
||||
const citationsRef = useRef<StreamingCitation[]>([]);
|
||||
const seenCitationDocIdsRef = useRef<Set<string>>(new Set());
|
||||
const documentMapRef = useRef<Map<string, OnyxDocument>>(new Map());
|
||||
const groupedPacketsMapRef = useRef<Map<number, Packet[]>>(new Map());
|
||||
const groupedPacketsRef = useRef<{ ind: number; packets: Packet[] }[]>([]);
|
||||
|
||||
// Reset incremental state when switching messages or when stream resets
|
||||
useEffect(() => {
|
||||
lastProcessedIndexRef.current = 0;
|
||||
citationsRef.current = [];
|
||||
seenCitationDocIdsRef.current = new Set();
|
||||
documentMapRef.current = new Map();
|
||||
groupedPacketsMapRef.current = new Map();
|
||||
groupedPacketsRef.current = [];
|
||||
}, [messageId]);
|
||||
|
||||
// If the upstream replaces packets with a shorter list (reset), clear state
|
||||
if (lastProcessedIndexRef.current > rawPackets.length) {
|
||||
lastProcessedIndexRef.current = 0;
|
||||
citationsRef.current = [];
|
||||
seenCitationDocIdsRef.current = new Set();
|
||||
documentMapRef.current = new Map();
|
||||
groupedPacketsMapRef.current = new Map();
|
||||
groupedPacketsRef.current = [];
|
||||
}
|
||||
|
||||
// Process only the new packets synchronously for this render
|
||||
if (rawPackets.length > lastProcessedIndexRef.current) {
|
||||
for (let i = lastProcessedIndexRef.current; i < rawPackets.length; i++) {
|
||||
const packet = rawPackets[i];
|
||||
if (!packet) continue;
|
||||
|
||||
// Grouping by ind
|
||||
const existingGroup = groupedPacketsMapRef.current.get(packet.ind);
|
||||
if (existingGroup) {
|
||||
existingGroup.push(packet);
|
||||
} else {
|
||||
groupedPacketsMapRef.current.set(packet.ind, [packet]);
|
||||
}
|
||||
|
||||
// Citations
|
||||
if (packet.obj.type === PacketType.CITATION_DELTA) {
|
||||
const citationDelta = packet.obj as CitationDelta;
|
||||
if (citationDelta.citations) {
|
||||
for (const citation of citationDelta.citations) {
|
||||
if (!seenCitationDocIdsRef.current.has(citation.document_id)) {
|
||||
seenCitationDocIdsRef.current.add(citation.document_id);
|
||||
citationsRef.current.push(citation);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Documents from tool deltas
|
||||
if (
|
||||
packet.obj.type === PacketType.SEARCH_TOOL_DELTA ||
|
||||
packet.obj.type === PacketType.IMAGE_GENERATION_TOOL_DELTA
|
||||
) {
|
||||
const toolDelta = packet.obj as
|
||||
| SearchToolDelta
|
||||
| ImageGenerationToolDelta;
|
||||
if ("documents" in toolDelta && toolDelta.documents) {
|
||||
for (const doc of toolDelta.documents) {
|
||||
if (doc.document_id) {
|
||||
documentMapRef.current.set(doc.document_id, doc);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Rebuild the grouped packets array sorted by ind
|
||||
// Clone packet arrays to ensure referential changes so downstream memo hooks update
|
||||
groupedPacketsRef.current = Array.from(
|
||||
groupedPacketsMapRef.current.entries()
|
||||
)
|
||||
.map(([ind, packets]) => ({ ind, packets: [...packets] }))
|
||||
.sort((a, b) => a.ind - b.ind);
|
||||
|
||||
lastProcessedIndexRef.current = rawPackets.length;
|
||||
}
|
||||
|
||||
const citations = citationsRef.current;
|
||||
const documentMap = documentMapRef.current;
|
||||
|
||||
// Use store for document sidebar
|
||||
const documentSidebarVisible = useDocumentSidebarVisible();
|
||||
const selectedMessageForDocDisplay = useSelectedMessageForDocDisplay();
|
||||
const updateCurrentDocumentSidebarVisible = useChatSessionStore(
|
||||
(state) => state.updateCurrentDocumentSidebarVisible
|
||||
);
|
||||
const updateCurrentSelectedMessageForDocDisplay = useChatSessionStore(
|
||||
(state) => state.updateCurrentSelectedMessageForDocDisplay
|
||||
);
|
||||
|
||||
// Calculate unique source count
|
||||
const uniqueSourceCount = useMemo(() => {
|
||||
const uniqueDocIds = new Set<string>();
|
||||
for (const citation of citations) {
|
||||
if (citation.document_id) {
|
||||
uniqueDocIds.add(citation.document_id);
|
||||
}
|
||||
}
|
||||
documentMap.forEach((_, docId) => {
|
||||
uniqueDocIds.add(docId);
|
||||
});
|
||||
return uniqueDocIds.size;
|
||||
}, [citations.length, documentMap.size]);
|
||||
|
||||
// Message switching logic
|
||||
const {
|
||||
currentMessageInd,
|
||||
includeMessageSwitcher,
|
||||
getPreviousMessage,
|
||||
getNextMessage,
|
||||
} = useMessageSwitching({
|
||||
messageId,
|
||||
otherMessagesCanSwitchTo,
|
||||
onMessageSelection,
|
||||
});
|
||||
|
||||
const groupedPackets = groupedPacketsRef.current;
|
||||
|
||||
console.log("displayComplete", displayComplete);
|
||||
|
||||
// Return a list of rendered message components, one for each ind
|
||||
return (
|
||||
<div className="py-5 ml-4 lg:px-5 relative flex">
|
||||
<div className="mx-auto w-[90%] max-w-message-max">
|
||||
<div className="lg:mr-12 mobile:ml-0 md:ml-8">
|
||||
<div className="flex items-start">
|
||||
<AssistantIcon
|
||||
className="mobile:hidden"
|
||||
size={24}
|
||||
assistant={chatState.assistant}
|
||||
/>
|
||||
<div className="w-full">
|
||||
<div className="max-w-message-max break-words">
|
||||
<div className="w-full desktop:ml-4">
|
||||
<div className="max-w-message-max break-words">
|
||||
<div
|
||||
ref={markdownRef}
|
||||
className="overflow-x-visible max-w-content-max focus:outline-none cursor-text select-text"
|
||||
onCopy={(e) => handleCopy(e, markdownRef)}
|
||||
>
|
||||
{groupedPackets.length === 0 ? (
|
||||
// Show blinking dot when no content yet but message is generating
|
||||
<BlinkingDot />
|
||||
) : (
|
||||
(() => {
|
||||
// Separate tool groups from final answer groups
|
||||
const toolGroups = groupedPackets.filter(
|
||||
(group) =>
|
||||
group.packets[0] && isToolPacket(group.packets[0])
|
||||
) as { ind: number; packets: Packet[] }[];
|
||||
// display final answer only if all tools are fully displayed
|
||||
// OR if there are no tools at all (in which case show immediately)
|
||||
const finalAnswerGroups =
|
||||
allToolsFullyDisplayed || toolGroups.length === 0
|
||||
? groupedPackets.filter(
|
||||
(group) =>
|
||||
group.packets[0] &&
|
||||
!isToolPacket(group.packets[0])
|
||||
)
|
||||
: [];
|
||||
|
||||
return (
|
||||
<>
|
||||
{/* Render all tool groups together using MultiToolRenderer */}
|
||||
{toolGroups.length > 0 && (
|
||||
<MultiToolRenderer
|
||||
packetGroups={toolGroups}
|
||||
chatState={chatState}
|
||||
isComplete={allToolsFullyDisplayed}
|
||||
onAllToolsDisplayed={() =>
|
||||
setAllToolsFullyDisplayed(true)
|
||||
}
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Render final answer groups directly using renderMessageComponent */}
|
||||
{finalAnswerGroups.map((group) => (
|
||||
<RendererComponent
|
||||
key={group.ind}
|
||||
packets={group.packets}
|
||||
chatState={chatState}
|
||||
onComplete={() => {
|
||||
// Final answer completed
|
||||
setDisplayComplete(true);
|
||||
}}
|
||||
animate={!displayComplete}
|
||||
>
|
||||
{({ content }) => <div>{content}</div>}
|
||||
</RendererComponent>
|
||||
))}
|
||||
</>
|
||||
);
|
||||
})()
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Feedback buttons - only show when streaming is complete */}
|
||||
{chatState.handleFeedback && displayComplete && (
|
||||
<div className="flex md:flex-row justify-between items-center w-full mt-1 transition-transform duration-300 ease-in-out transform opacity-100">
|
||||
<TooltipGroup>
|
||||
<div className="flex items-center gap-x-0.5">
|
||||
{includeMessageSwitcher && (
|
||||
<div className="-mx-1">
|
||||
<MessageSwitcher
|
||||
currentPage={(currentMessageInd ?? 0) + 1}
|
||||
totalPages={
|
||||
otherMessagesCanSwitchTo?.length || 0
|
||||
}
|
||||
handlePrevious={() => {
|
||||
const prevMessage = getPreviousMessage();
|
||||
if (
|
||||
prevMessage !== undefined &&
|
||||
onMessageSelection
|
||||
) {
|
||||
onMessageSelection(prevMessage);
|
||||
}
|
||||
}}
|
||||
handleNext={() => {
|
||||
const nextMessage = getNextMessage();
|
||||
if (
|
||||
nextMessage !== undefined &&
|
||||
onMessageSelection
|
||||
) {
|
||||
onMessageSelection(nextMessage);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<CustomTooltip showTick line content="Copy">
|
||||
<CopyButton
|
||||
copyAllFn={() =>
|
||||
copyAll(getTextContent(rawPackets), markdownRef)
|
||||
}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
|
||||
<CustomTooltip showTick line content="Good response">
|
||||
<HoverableIcon
|
||||
icon={<LikeFeedback size={16} />}
|
||||
onClick={() => chatState.handleFeedback("like")}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
|
||||
<CustomTooltip showTick line content="Bad response">
|
||||
<HoverableIcon
|
||||
icon={<DislikeFeedback size={16} />}
|
||||
onClick={() =>
|
||||
chatState.handleFeedback("dislike")
|
||||
}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
|
||||
{chatState.regenerate && (
|
||||
<CustomTooltip
|
||||
disabled={isRegenerateDropdownVisible}
|
||||
showTick
|
||||
line
|
||||
content="Regenerate"
|
||||
>
|
||||
<RegenerateOption
|
||||
onDropdownVisibleChange={
|
||||
setIsRegenerateDropdownVisible
|
||||
}
|
||||
selectedAssistant={chatState.assistant}
|
||||
regenerate={chatState.regenerate}
|
||||
overriddenModel={chatState.overriddenModel}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
)}
|
||||
|
||||
{messageId &&
|
||||
(citations.length > 0 || documentMap.size > 0) && (
|
||||
<>
|
||||
{chatState.regenerate && (
|
||||
<div className="h-4 w-px bg-border mx-2" />
|
||||
)}
|
||||
<CustomTooltip
|
||||
showTick
|
||||
line
|
||||
content={`${uniqueSourceCount} Sources`}
|
||||
>
|
||||
<CitedSourcesToggle
|
||||
citations={citations}
|
||||
documentMap={documentMap}
|
||||
messageId={messageId}
|
||||
onToggle={(messageId) => {
|
||||
// Toggle sidebar if clicking on the same message
|
||||
if (
|
||||
selectedMessageForDocDisplay ===
|
||||
messageId &&
|
||||
documentSidebarVisible
|
||||
) {
|
||||
updateCurrentDocumentSidebarVisible(
|
||||
false
|
||||
);
|
||||
updateCurrentSelectedMessageForDocDisplay(
|
||||
null
|
||||
);
|
||||
} else {
|
||||
updateCurrentSelectedMessageForDocDisplay(
|
||||
messageId
|
||||
);
|
||||
updateCurrentDocumentSidebarVisible(
|
||||
true
|
||||
);
|
||||
}
|
||||
}}
|
||||
/>
|
||||
</CustomTooltip>
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</TooltipGroup>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,148 @@
|
||||
import React from "react";
|
||||
import { FiFileText } from "react-icons/fi";
|
||||
import { SourceIcon } from "@/components/SourceIcon";
|
||||
import { WebResultIcon } from "@/components/WebResultIcon";
|
||||
import { OnyxDocument } from "@/lib/search/interfaces";
|
||||
import { ValidSources } from "@/lib/types";
|
||||
|
||||
interface SourcesToggleProps {
|
||||
citations: Array<{
|
||||
citation_num: number;
|
||||
document_id: string;
|
||||
}>;
|
||||
documentMap: Map<string, OnyxDocument>;
|
||||
messageId: number;
|
||||
onToggle: (messageId: number) => void;
|
||||
}
|
||||
|
||||
export const CitedSourcesToggle = ({
|
||||
citations,
|
||||
documentMap,
|
||||
messageId,
|
||||
onToggle,
|
||||
}: SourcesToggleProps) => {
|
||||
// If no citations but we have documents, use the first 2 documents as fallback
|
||||
const hasContent = citations.length > 0 || documentMap.size > 0;
|
||||
if (!hasContent) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Helper function to create icon for a document
|
||||
const createDocumentIcon = (doc: OnyxDocument, documentId: string) => {
|
||||
let sourceKey: string;
|
||||
let iconElement: React.ReactNode;
|
||||
|
||||
if (doc.is_internet || doc.source_type === ValidSources.Web) {
|
||||
// For web sources, use the hostname as the unique key
|
||||
try {
|
||||
const hostname = new URL(doc.link).hostname;
|
||||
sourceKey = `web_${hostname}`;
|
||||
} catch {
|
||||
sourceKey = `web_${doc.link}`;
|
||||
}
|
||||
iconElement = <WebResultIcon key={documentId} url={doc.link} size={16} />;
|
||||
} else {
|
||||
sourceKey = `source_${doc.source_type}`;
|
||||
iconElement = (
|
||||
<SourceIcon
|
||||
key={documentId}
|
||||
sourceType={doc.source_type}
|
||||
iconSize={16}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return { sourceKey, iconElement };
|
||||
};
|
||||
|
||||
// Get unique icons by creating a unique identifier for each source
|
||||
const getUniqueIcons = () => {
|
||||
const seenSources = new Set<string>();
|
||||
const uniqueIcons: Array<{
|
||||
id: string;
|
||||
element: React.ReactNode;
|
||||
}> = [];
|
||||
|
||||
// Get documents to process - either from citations or fallback to all documents
|
||||
const documentsToProcess =
|
||||
citations.length > 0
|
||||
? citations.map((citation) => ({
|
||||
documentId: citation.document_id,
|
||||
doc: documentMap.get(citation.document_id),
|
||||
}))
|
||||
: Array.from(documentMap.entries()).map(([documentId, doc]) => ({
|
||||
documentId,
|
||||
doc,
|
||||
}));
|
||||
|
||||
for (const { documentId, doc } of documentsToProcess) {
|
||||
if (uniqueIcons.length >= 2) break;
|
||||
|
||||
let sourceKey: string;
|
||||
let iconElement: React.ReactNode;
|
||||
|
||||
if (doc) {
|
||||
const iconData = createDocumentIcon(doc, documentId);
|
||||
sourceKey = iconData.sourceKey;
|
||||
iconElement = iconData.iconElement;
|
||||
} else {
|
||||
// Fallback for missing document (only possible with citations)
|
||||
sourceKey = `file_${documentId}`;
|
||||
iconElement = <FiFileText key={documentId} size={16} />;
|
||||
}
|
||||
|
||||
if (!seenSources.has(sourceKey)) {
|
||||
seenSources.add(sourceKey);
|
||||
uniqueIcons.push({
|
||||
id: sourceKey,
|
||||
element: iconElement,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return uniqueIcons;
|
||||
};
|
||||
|
||||
const uniqueIcons = getUniqueIcons();
|
||||
|
||||
return (
|
||||
<div
|
||||
className="
|
||||
hover:bg-background-chat-hover
|
||||
text-text-600
|
||||
p-1.5
|
||||
rounded
|
||||
h-fit
|
||||
cursor-pointer
|
||||
flex
|
||||
items-center
|
||||
gap-1
|
||||
"
|
||||
onClick={() => onToggle(messageId)}
|
||||
>
|
||||
<div className="flex items-center">
|
||||
{uniqueIcons.map((icon, index) => (
|
||||
<div
|
||||
key={icon.id}
|
||||
className={index > 0 ? "-ml-1" : ""}
|
||||
style={{ zIndex: uniqueIcons.length - index }}
|
||||
>
|
||||
{icon.element}
|
||||
</div>
|
||||
))}
|
||||
{/* Show count for remaining items */}
|
||||
{(() => {
|
||||
const totalCount =
|
||||
citations.length > 0 ? citations.length : documentMap.size;
|
||||
const remainingCount = totalCount - uniqueIcons.length;
|
||||
return remainingCount > 0 ? (
|
||||
<span className="text-xs text-text-500 ml-1">
|
||||
+{remainingCount}
|
||||
</span>
|
||||
) : null;
|
||||
})()}
|
||||
</div>
|
||||
<span className="text-sm text-text-700">Sources</span>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
300
web/src/app/chat/message/messageComponents/MultiToolRenderer.tsx
Normal file
300
web/src/app/chat/message/messageComponents/MultiToolRenderer.tsx
Normal file
@@ -0,0 +1,300 @@
|
||||
import { useState, useMemo, useEffect } from "react";
|
||||
import { FiChevronDown, FiChevronRight } from "react-icons/fi";
|
||||
import { Packet } from "@/app/chat/services/streamingModels";
|
||||
import { FullChatState, RendererResult } from "./interfaces";
|
||||
import { renderMessageComponent } from "./renderMessageComponent";
|
||||
import { isToolPacket } from "./packetUtils";
|
||||
import { useToolDisplayTiming } from "./hooks/useToolDisplayTiming";
|
||||
import { STANDARD_TEXT_COLOR } from "./constants";
|
||||
|
||||
// React component wrapper to avoid hook count issues in map loops
|
||||
export function RendererComponent({
|
||||
packets,
|
||||
chatState,
|
||||
onComplete,
|
||||
animate,
|
||||
useShortRenderer = false,
|
||||
children,
|
||||
}: {
|
||||
packets: Packet[];
|
||||
chatState: FullChatState;
|
||||
onComplete: () => void;
|
||||
animate: boolean;
|
||||
useShortRenderer?: boolean;
|
||||
children: (result: RendererResult) => JSX.Element;
|
||||
}) {
|
||||
const result = renderMessageComponent(
|
||||
{ packets },
|
||||
chatState,
|
||||
onComplete,
|
||||
animate,
|
||||
useShortRenderer
|
||||
);
|
||||
|
||||
return children(result);
|
||||
}
|
||||
|
||||
// Multi-tool renderer component for grouped tools
|
||||
function MultiToolRenderer({
|
||||
packetGroups,
|
||||
chatState,
|
||||
isComplete,
|
||||
onAllToolsDisplayed,
|
||||
}: {
|
||||
packetGroups: { ind: number; packets: Packet[] }[];
|
||||
chatState: FullChatState;
|
||||
isComplete: boolean;
|
||||
onAllToolsDisplayed?: () => void;
|
||||
}) {
|
||||
const [isExpanded, setIsExpanded] = useState(false);
|
||||
|
||||
const toolGroups = useMemo(() => {
|
||||
return packetGroups.filter(
|
||||
(group) => group.packets[0] && isToolPacket(group.packets[0])
|
||||
);
|
||||
}, [packetGroups]);
|
||||
|
||||
// Use the custom hook to manage tool display timing
|
||||
const { visibleTools, allToolsDisplayed, handleToolComplete } =
|
||||
useToolDisplayTiming(toolGroups, isComplete);
|
||||
|
||||
// Notify parent when all tools are displayed
|
||||
useEffect(() => {
|
||||
if (allToolsDisplayed && onAllToolsDisplayed) {
|
||||
onAllToolsDisplayed();
|
||||
}
|
||||
}, [allToolsDisplayed, onAllToolsDisplayed]);
|
||||
|
||||
// If still processing, show tools progressively with timing
|
||||
if (!isComplete) {
|
||||
// Get the tools to display based on visibleTools
|
||||
const toolsToDisplay = toolGroups.filter((group) =>
|
||||
visibleTools.has(group.ind)
|
||||
);
|
||||
|
||||
if (toolsToDisplay.length === 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Show only the latest tool visually, but render all for completion tracking
|
||||
const shouldShowOnlyLatest = !isExpanded && toolsToDisplay.length > 1;
|
||||
const latestToolIndex = toolsToDisplay.length - 1;
|
||||
|
||||
return (
|
||||
<div className="mb-4 relative border border-border-sidebar-border rounded-lg p-4">
|
||||
<div className="relative">
|
||||
<div>
|
||||
{toolsToDisplay.map((toolGroup, index) => {
|
||||
if (!toolGroup) return null;
|
||||
|
||||
// Hide all but the latest tool when shouldShowOnlyLatest is true
|
||||
const isVisible =
|
||||
!shouldShowOnlyLatest || index === latestToolIndex;
|
||||
const isLastItem = index === toolsToDisplay.length - 1;
|
||||
|
||||
return (
|
||||
<div
|
||||
key={index}
|
||||
style={{ display: isVisible ? "block" : "none" }}
|
||||
>
|
||||
<RendererComponent
|
||||
packets={toolGroup.packets}
|
||||
chatState={chatState}
|
||||
onComplete={() => {
|
||||
// When a tool completes rendering, track it in the hook
|
||||
const toolInd = toolGroup.ind;
|
||||
if (toolInd !== undefined) {
|
||||
handleToolComplete(toolInd);
|
||||
}
|
||||
}}
|
||||
animate
|
||||
useShortRenderer={true}
|
||||
>
|
||||
{({ icon, content, status }) => {
|
||||
return (
|
||||
<div className="relative">
|
||||
{/* Connector line for non-last items */}
|
||||
{!isLastItem && isVisible && (
|
||||
<div
|
||||
className="absolute w-px bg-gray-300 dark:bg-gray-600 z-0"
|
||||
style={{
|
||||
left: "10px",
|
||||
top: "24px",
|
||||
bottom: "-12px",
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
|
||||
<div className="text-sm flex items-center gap-1 loading-text">
|
||||
{icon ? icon({ size: 14 }) : null}
|
||||
{status}
|
||||
</div>
|
||||
|
||||
<div
|
||||
className={`${STANDARD_TEXT_COLOR} relative z-10 mt-1 ${
|
||||
!isLastItem ? "mb-3" : ""
|
||||
}`}
|
||||
>
|
||||
{content}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}}
|
||||
</RendererComponent>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
// If complete, show summary with toggle
|
||||
return (
|
||||
<div className="relative pb-1">
|
||||
{/* Summary header - clickable */}
|
||||
<div
|
||||
className="cursor-pointer transition-colors rounded-md p-1 -m-1"
|
||||
onClick={() => setIsExpanded(!isExpanded)}
|
||||
>
|
||||
<div className="flex items-center">
|
||||
<div className="flex items-center gap-2">
|
||||
<span className="text-sm text-gray-600 dark:text-gray-400">
|
||||
Thought for 1m 21s
|
||||
</span>
|
||||
</div>
|
||||
<div className="text-gray-400 transition-transform duration-300 ease-in-out">
|
||||
{isExpanded ? (
|
||||
<FiChevronDown size={16} />
|
||||
) : (
|
||||
<FiChevronRight size={16} />
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Expanded content */}
|
||||
<div
|
||||
className={`transition-all duration-300 ease-in-out overflow-hidden ${
|
||||
isExpanded ? "max-h-[2000px] opacity-100" : "max-h-0 opacity-0"
|
||||
}`}
|
||||
>
|
||||
<div
|
||||
className={`p-4 transition-transform duration-300 ease-in-out ${
|
||||
isExpanded ? "transform translate-y-0" : "transform -translate-y-2"
|
||||
}`}
|
||||
>
|
||||
<div>
|
||||
{toolGroups.map((toolGroup, index) => {
|
||||
const isLastItem = index === toolGroups.length - 1;
|
||||
|
||||
return (
|
||||
<RendererComponent
|
||||
key={index}
|
||||
packets={toolGroup.packets}
|
||||
chatState={chatState}
|
||||
onComplete={() => {
|
||||
// When a tool completes rendering, track it in the hook
|
||||
const toolInd = toolGroup.ind;
|
||||
if (toolInd !== undefined) {
|
||||
handleToolComplete(toolInd);
|
||||
}
|
||||
}}
|
||||
animate
|
||||
useShortRenderer={false}
|
||||
>
|
||||
{({ icon, content, status }) => {
|
||||
const finalIcon = icon ? icon({ size: 14 }) : null;
|
||||
|
||||
return (
|
||||
<div className="relative">
|
||||
{/* Connector line drawn BEFORE content so it's behind everything */}
|
||||
{/* Now all tools get a connector line since we have a Done node at the end */}
|
||||
<div
|
||||
className="absolute w-px bg-gray-300 dark:bg-gray-600 z-0"
|
||||
style={{
|
||||
left: "10px", // Half of icon width (20px / 2)
|
||||
top: "20px", // Below icon (h-5 = 20px)
|
||||
bottom: "0", // Stop at the bottom of this container, not beyond
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* Main row with icon and content */}
|
||||
<div
|
||||
className={`flex items-start gap-2 ${STANDARD_TEXT_COLOR} relative z-10`}
|
||||
>
|
||||
{/* Icon column */}
|
||||
<div className="flex flex-col items-center w-5">
|
||||
{/* Icon with background to cover the line */}
|
||||
<div className="flex-shrink-0 flex items-center justify-center w-5 h-5 bg-background rounded-full">
|
||||
{finalIcon}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Content with padding */}
|
||||
<div
|
||||
className={`flex-1 ${!isLastItem ? "pb-3" : ""}`}
|
||||
>
|
||||
{
|
||||
<div className="flex mt-0.5 mb-1">
|
||||
<div className="text-xs text-gray-600 dark:text-gray-400">
|
||||
{status}
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
|
||||
{content}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}}
|
||||
</RendererComponent>
|
||||
);
|
||||
})}
|
||||
|
||||
{/* Done node at the bottom - only show after all tools are displayed */}
|
||||
{allToolsDisplayed && (
|
||||
<div className="relative">
|
||||
{/* Connector line from previous tool */}
|
||||
<div
|
||||
className="absolute w-px bg-gray-300 dark:bg-gray-600 z-0"
|
||||
style={{
|
||||
left: "10px",
|
||||
top: "0",
|
||||
height: "20px",
|
||||
}}
|
||||
/>
|
||||
|
||||
{/* Main row with icon and content */}
|
||||
<div
|
||||
className={`flex items-start gap-2 ${STANDARD_TEXT_COLOR} relative z-10 pb-3`}
|
||||
>
|
||||
{/* Icon column */}
|
||||
<div className="flex flex-col items-center w-5">
|
||||
{/* Dot with background to cover the line */}
|
||||
<div className="flex-shrink-0 flex items-center justify-center w-5 h-5 bg-background rounded-full">
|
||||
<div className="w-2 h-2 bg-gray-300 dark:bg-gray-700 rounded-full" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Content with padding */}
|
||||
<div className="flex-1">
|
||||
<div className="flex mt-0.5 mb-1">
|
||||
<div className="text-xs text-gray-600 dark:text-gray-400">
|
||||
Done
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default MultiToolRenderer;
|
||||
1
web/src/app/chat/message/messageComponents/constants.ts
Normal file
1
web/src/app/chat/message/messageComponents/constants.ts
Normal file
@@ -0,0 +1 @@
|
||||
export const STANDARD_TEXT_COLOR = "text-text-600";
|
||||
@@ -0,0 +1,58 @@
|
||||
interface UseMessageSwitchingProps {
|
||||
messageId?: number | null;
|
||||
otherMessagesCanSwitchTo?: number[];
|
||||
onMessageSelection?: (messageId: number) => void;
|
||||
}
|
||||
|
||||
interface UseMessageSwitchingReturn {
|
||||
currentMessageInd: number | undefined;
|
||||
includeMessageSwitcher: boolean;
|
||||
getPreviousMessage: () => number | undefined;
|
||||
getNextMessage: () => number | undefined;
|
||||
}
|
||||
|
||||
export function useMessageSwitching({
|
||||
messageId,
|
||||
otherMessagesCanSwitchTo,
|
||||
onMessageSelection,
|
||||
}: UseMessageSwitchingProps): UseMessageSwitchingReturn {
|
||||
// Calculate message switching state
|
||||
const currentMessageInd = messageId
|
||||
? otherMessagesCanSwitchTo?.indexOf(messageId)
|
||||
: undefined;
|
||||
|
||||
const includeMessageSwitcher =
|
||||
currentMessageInd !== undefined &&
|
||||
onMessageSelection !== undefined &&
|
||||
otherMessagesCanSwitchTo !== undefined &&
|
||||
otherMessagesCanSwitchTo.length > 1;
|
||||
|
||||
const getPreviousMessage = () => {
|
||||
if (
|
||||
currentMessageInd !== undefined &&
|
||||
currentMessageInd > 0 &&
|
||||
otherMessagesCanSwitchTo
|
||||
) {
|
||||
return otherMessagesCanSwitchTo[currentMessageInd - 1];
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
const getNextMessage = () => {
|
||||
if (
|
||||
currentMessageInd !== undefined &&
|
||||
currentMessageInd < (otherMessagesCanSwitchTo?.length || 0) - 1 &&
|
||||
otherMessagesCanSwitchTo
|
||||
) {
|
||||
return otherMessagesCanSwitchTo[currentMessageInd + 1];
|
||||
}
|
||||
return undefined;
|
||||
};
|
||||
|
||||
return {
|
||||
currentMessageInd,
|
||||
includeMessageSwitcher,
|
||||
getPreviousMessage,
|
||||
getNextMessage,
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,144 @@
|
||||
import { Packet } from "@/app/chat/services/streamingModels";
|
||||
import { useMemo, useState, useCallback, useEffect } from "react";
|
||||
import { useRef } from "react";
|
||||
|
||||
function getInitialTools(
|
||||
toolGroups: { ind: number; packets: Packet[] }[],
|
||||
isComplete: boolean
|
||||
): Set<number> {
|
||||
if (isComplete) {
|
||||
return new Set(toolGroups.map((group) => group.ind));
|
||||
}
|
||||
return new Set();
|
||||
}
|
||||
|
||||
export function useToolDisplayTiming(
|
||||
toolGroups: { ind: number; packets: Packet[] }[],
|
||||
isComplete: boolean
|
||||
) {
|
||||
/* Adds a "minimum display time" for each tool and makes sure that we
|
||||
display tools one after another (e.g. only after the rendering of a tool is complete,
|
||||
do we start showing the next tool). */
|
||||
const MINIMUM_DISPLAY_TIME_MS = 1500; // 1.5 seconds minimum display time
|
||||
const [visibleTools, setVisibleTools] = useState<Set<number>>(() =>
|
||||
getInitialTools(toolGroups, isComplete)
|
||||
);
|
||||
const [completedToolInds, setCompletedToolInds] = useState<Set<number>>(() =>
|
||||
getInitialTools(toolGroups, isComplete)
|
||||
);
|
||||
|
||||
// Track when each tool starts displaying
|
||||
const toolStartTimesRef = useRef<Map<number, number>>(new Map());
|
||||
|
||||
// Track pending completions that are waiting for minimum display time
|
||||
const pendingOrFullCompletionsRef = useRef<
|
||||
Map<number, NodeJS.Timeout | null>
|
||||
>(new Map());
|
||||
|
||||
// Effect to manage which tools are visible based on completed tools
|
||||
useEffect(() => {
|
||||
if (toolGroups.length === 0) return;
|
||||
|
||||
// First tool is always visible
|
||||
if (visibleTools.size === 0 && toolGroups[0]) {
|
||||
setVisibleTools(new Set([toolGroups[0].ind]));
|
||||
toolStartTimesRef.current.set(toolGroups[0].ind, Date.now());
|
||||
return;
|
||||
}
|
||||
|
||||
// Find the next tool to show
|
||||
const visibleToolsArray = Array.from(visibleTools);
|
||||
const lastVisibleToolIndex = toolGroups.findIndex(
|
||||
(group) => group.ind === visibleToolsArray[visibleToolsArray.length - 1]
|
||||
);
|
||||
|
||||
// Check if the last visible tool is completed
|
||||
const lastVisibleToolInd = toolGroups[lastVisibleToolIndex]?.ind;
|
||||
if (
|
||||
lastVisibleToolInd !== undefined &&
|
||||
completedToolInds.has(lastVisibleToolInd) &&
|
||||
lastVisibleToolIndex < toolGroups.length - 1
|
||||
) {
|
||||
// Show the next tool
|
||||
const nextTool = toolGroups[lastVisibleToolIndex + 1];
|
||||
if (nextTool) {
|
||||
setVisibleTools((prev) => new Set(prev).add(nextTool.ind));
|
||||
toolStartTimesRef.current.set(nextTool.ind, Date.now());
|
||||
}
|
||||
}
|
||||
}, [toolGroups, completedToolInds, visibleTools.size]);
|
||||
|
||||
// Callback to handle when a tool completes
|
||||
const handleToolComplete = useCallback((toolInd: number) => {
|
||||
if (
|
||||
completedToolInds.has(toolInd) ||
|
||||
pendingOrFullCompletionsRef.current.has(toolInd)
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const now = Date.now();
|
||||
const startTime = toolStartTimesRef.current.get(toolInd);
|
||||
|
||||
// If we don't have a start time, record it now (tool just started)
|
||||
if (!startTime) {
|
||||
toolStartTimesRef.current.set(toolInd, now);
|
||||
}
|
||||
|
||||
const actualStartTime = toolStartTimesRef.current.get(toolInd) || now;
|
||||
const elapsedTime = now - actualStartTime;
|
||||
|
||||
if (elapsedTime >= MINIMUM_DISPLAY_TIME_MS) {
|
||||
// Enough time has passed, mark as complete immediately
|
||||
setCompletedToolInds((prev) => new Set(prev).add(toolInd));
|
||||
pendingOrFullCompletionsRef.current.set(toolInd, null);
|
||||
} else {
|
||||
// Not enough time has passed, delay the completion
|
||||
const remainingTime = MINIMUM_DISPLAY_TIME_MS - elapsedTime;
|
||||
|
||||
// Clear any existing timeout for this tool
|
||||
const existingTimeout = pendingOrFullCompletionsRef.current.get(toolInd);
|
||||
if (existingTimeout) {
|
||||
clearTimeout(existingTimeout);
|
||||
}
|
||||
|
||||
// Set a timeout to mark as complete after the remaining time
|
||||
const timeoutId = setTimeout(() => {
|
||||
setCompletedToolInds((prev) => new Set(prev).add(toolInd));
|
||||
pendingOrFullCompletionsRef.current.set(toolInd, null);
|
||||
}, remainingTime);
|
||||
|
||||
pendingOrFullCompletionsRef.current.set(toolInd, timeoutId);
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Clean up timeouts on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
pendingOrFullCompletionsRef.current.forEach((timeout) => {
|
||||
if (timeout) {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
});
|
||||
};
|
||||
}, []);
|
||||
|
||||
// Check if all tools are displayed
|
||||
const allToolsDisplayed = useMemo(() => {
|
||||
if (toolGroups.length === 0) return true;
|
||||
|
||||
// All tools are displayed if they're all visible and completed
|
||||
const allVisible = toolGroups.every((group) => visibleTools.has(group.ind));
|
||||
const allCompleted = toolGroups.every((group) =>
|
||||
completedToolInds.has(group.ind)
|
||||
);
|
||||
|
||||
return allVisible && allCompleted;
|
||||
}, [toolGroups, visibleTools, completedToolInds]);
|
||||
|
||||
return {
|
||||
visibleTools,
|
||||
handleToolComplete,
|
||||
allToolsDisplayed,
|
||||
};
|
||||
}
|
||||
48
web/src/app/chat/message/messageComponents/interfaces.ts
Normal file
48
web/src/app/chat/message/messageComponents/interfaces.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
import { MinimalPersonaSnapshot } from "@/app/admin/assistants/interfaces";
|
||||
import { FeedbackType } from "../../interfaces";
|
||||
import { Packet } from "../../services/streamingModels";
|
||||
import { OnyxDocument, MinimalOnyxDocument } from "@/lib/search/interfaces";
|
||||
import { FileResponse } from "../../my-documents/DocumentsContext";
|
||||
import { LlmDescriptor } from "@/lib/hooks";
|
||||
import { IconType } from "react-icons";
|
||||
|
||||
export enum RenderType {
|
||||
HIGHLIGHT = "highlight",
|
||||
FULL = "full",
|
||||
}
|
||||
|
||||
export interface FullChatState {
|
||||
handleFeedback: (feedback: FeedbackType) => void;
|
||||
assistant: MinimalPersonaSnapshot;
|
||||
// Document-related context for citations
|
||||
docs?: OnyxDocument[] | null;
|
||||
userFiles?: FileResponse[];
|
||||
citations?: { [key: string]: number };
|
||||
setPresentingDocument?: (document: MinimalOnyxDocument) => void;
|
||||
// Regenerate functionality
|
||||
regenerate?: (modelOverRide: LlmDescriptor) => Promise<void>;
|
||||
overriddenModel?: string;
|
||||
}
|
||||
|
||||
export interface RendererResult {
|
||||
icon: IconType | null;
|
||||
status: string | null;
|
||||
content: JSX.Element;
|
||||
}
|
||||
|
||||
export type MessageRenderer<
|
||||
T extends Packet,
|
||||
S extends Partial<FullChatState>,
|
||||
> = ({
|
||||
packets,
|
||||
state,
|
||||
onComplete,
|
||||
renderType,
|
||||
animate,
|
||||
}: {
|
||||
packets: T[];
|
||||
state: S;
|
||||
onComplete: () => void;
|
||||
renderType: RenderType;
|
||||
animate: boolean;
|
||||
}) => RendererResult;
|
||||
84
web/src/app/chat/message/messageComponents/packetUtils.ts
Normal file
84
web/src/app/chat/message/messageComponents/packetUtils.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import {
|
||||
MessageDelta,
|
||||
MessageStart,
|
||||
PacketType,
|
||||
} from "../../services/streamingModels";
|
||||
import { Packet } from "@/app/chat/services/streamingModels";
|
||||
|
||||
export function isToolPacket(packet: Packet) {
|
||||
return (
|
||||
packet.obj.type === PacketType.SEARCH_TOOL_START ||
|
||||
packet.obj.type === PacketType.SEARCH_TOOL_DELTA ||
|
||||
packet.obj.type === PacketType.IMAGE_GENERATION_TOOL_START ||
|
||||
packet.obj.type === PacketType.IMAGE_GENERATION_TOOL_DELTA ||
|
||||
packet.obj.type === PacketType.CUSTOM_TOOL_START ||
|
||||
packet.obj.type === PacketType.CUSTOM_TOOL_DELTA ||
|
||||
packet.obj.type === PacketType.REASONING_START ||
|
||||
packet.obj.type === PacketType.REASONING_DELTA ||
|
||||
packet.obj.type === PacketType.SECTION_END
|
||||
);
|
||||
}
|
||||
|
||||
export function isStreamingComplete(packets: Packet[]) {
|
||||
return packets.some((packet) => packet.obj.type === PacketType.STOP);
|
||||
}
|
||||
|
||||
export function isFinalAnswerComing(packets: Packet[]) {
|
||||
return packets.some((packet) => packet.obj.type === PacketType.MESSAGE_START);
|
||||
}
|
||||
|
||||
export function isFinalAnswerComplete(packets: Packet[]) {
|
||||
// Find the first MESSAGE_START packet and get its index
|
||||
const messageStartPacket = packets.find(
|
||||
(packet) => packet.obj.type === PacketType.MESSAGE_START
|
||||
);
|
||||
|
||||
if (!messageStartPacket) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check if there's a corresponding SECTION_END with the same index
|
||||
return packets.some(
|
||||
(packet) =>
|
||||
packet.obj.type === PacketType.SECTION_END &&
|
||||
packet.ind === messageStartPacket.ind
|
||||
);
|
||||
}
|
||||
|
||||
export function groupPacketsByInd(
|
||||
packets: Packet[]
|
||||
): { ind: number; packets: Packet[] }[] {
|
||||
/*
|
||||
Group packets by ind. Ordered from lowest ind to highest ind.
|
||||
*/
|
||||
const groups = packets.reduce((acc: Map<number, Packet[]>, packet) => {
|
||||
const ind = packet.ind;
|
||||
if (!acc.has(ind)) {
|
||||
acc.set(ind, []);
|
||||
}
|
||||
acc.get(ind)!.push(packet);
|
||||
return acc;
|
||||
}, new Map());
|
||||
|
||||
// Convert to array and sort by ind (lowest to highest)
|
||||
return Array.from(groups.entries())
|
||||
.map(([ind, packets]) => ({
|
||||
ind,
|
||||
packets,
|
||||
}))
|
||||
.sort((a, b) => a.ind - b.ind);
|
||||
}
|
||||
|
||||
export function getTextContent(packets: Packet[]) {
|
||||
return packets
|
||||
.map((packet) => {
|
||||
if (
|
||||
packet.obj.type === PacketType.MESSAGE_START ||
|
||||
packet.obj.type === PacketType.MESSAGE_DELTA
|
||||
) {
|
||||
return (packet.obj as MessageStart | MessageDelta).content || "";
|
||||
}
|
||||
return "";
|
||||
})
|
||||
.join("");
|
||||
}
|
||||
@@ -0,0 +1,100 @@
|
||||
import {
|
||||
ChatPacket,
|
||||
Packet,
|
||||
PacketType,
|
||||
ReasoningPacket,
|
||||
} from "../../services/streamingModels";
|
||||
import {
|
||||
FullChatState,
|
||||
MessageRenderer,
|
||||
RenderType,
|
||||
RendererResult,
|
||||
} from "./interfaces";
|
||||
import { MessageTextRenderer } from "./renderers/MessageTextRenderer";
|
||||
import { SearchToolRenderer } from "./renderers/SearchToolRenderer";
|
||||
import { ImageToolRenderer } from "./renderers/ImageToolRenderer";
|
||||
import { ReasoningRenderer } from "./renderers/ReasoningRenderer";
|
||||
import CustomToolRenderer from "./renderers/CustomToolRenderer";
|
||||
|
||||
// Different types of chat packets using discriminated unions
|
||||
export interface GroupedPackets {
|
||||
packets: Packet[];
|
||||
}
|
||||
|
||||
function isChatPacket(packet: Packet): packet is ChatPacket {
|
||||
return (
|
||||
packet.obj.type === PacketType.MESSAGE_START ||
|
||||
packet.obj.type === PacketType.MESSAGE_DELTA ||
|
||||
packet.obj.type === PacketType.MESSAGE_END
|
||||
);
|
||||
}
|
||||
|
||||
function isSearchToolPacket(packet: Packet) {
|
||||
return packet.obj.type === PacketType.SEARCH_TOOL_START;
|
||||
}
|
||||
|
||||
function isImageToolPacket(packet: Packet) {
|
||||
return packet.obj.type === PacketType.IMAGE_GENERATION_TOOL_START;
|
||||
}
|
||||
|
||||
function isCustomToolPacket(packet: Packet) {
|
||||
return packet.obj.type === PacketType.CUSTOM_TOOL_START;
|
||||
}
|
||||
|
||||
function isReasoningPacket(packet: Packet): packet is ReasoningPacket {
|
||||
return (
|
||||
packet.obj.type === PacketType.REASONING_START ||
|
||||
packet.obj.type === PacketType.REASONING_DELTA ||
|
||||
packet.obj.type === PacketType.SECTION_END
|
||||
);
|
||||
}
|
||||
|
||||
export function findRenderer(
|
||||
groupedPackets: GroupedPackets
|
||||
): MessageRenderer<any, any> | null {
|
||||
if (groupedPackets.packets.some((packet) => isChatPacket(packet))) {
|
||||
return MessageTextRenderer;
|
||||
}
|
||||
if (groupedPackets.packets.some((packet) => isSearchToolPacket(packet))) {
|
||||
return SearchToolRenderer;
|
||||
}
|
||||
if (groupedPackets.packets.some((packet) => isImageToolPacket(packet))) {
|
||||
return ImageToolRenderer;
|
||||
}
|
||||
if (groupedPackets.packets.some((packet) => isCustomToolPacket(packet))) {
|
||||
return CustomToolRenderer;
|
||||
}
|
||||
if (groupedPackets.packets.some((packet) => isReasoningPacket(packet))) {
|
||||
return ReasoningRenderer;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
export function renderMessageComponent(
|
||||
groupedPackets: GroupedPackets,
|
||||
fullChatState: FullChatState,
|
||||
onComplete: () => void,
|
||||
animate: boolean,
|
||||
useShortRenderer: boolean = false
|
||||
): RendererResult {
|
||||
if (!groupedPackets.packets || !groupedPackets.packets[0]) {
|
||||
return { icon: null, status: null, content: <></> };
|
||||
}
|
||||
|
||||
const Renderer = findRenderer(groupedPackets);
|
||||
if (Renderer) {
|
||||
const renderType = useShortRenderer
|
||||
? RenderType.HIGHLIGHT
|
||||
: RenderType.FULL;
|
||||
|
||||
return Renderer({
|
||||
packets: groupedPackets.packets,
|
||||
state: fullChatState,
|
||||
onComplete,
|
||||
renderType,
|
||||
animate,
|
||||
});
|
||||
}
|
||||
|
||||
return { icon: null, status: null, content: <></> };
|
||||
}
|
||||
@@ -0,0 +1,130 @@
|
||||
import React, { useEffect, useMemo } from "react";
|
||||
import { FiExternalLink, FiDownload, FiTool } from "react-icons/fi";
|
||||
import {
|
||||
PacketType,
|
||||
CustomToolPacket,
|
||||
CustomToolStart,
|
||||
CustomToolDelta,
|
||||
SectionEnd,
|
||||
} from "../../../services/streamingModels";
|
||||
import { MessageRenderer, RenderType } from "../interfaces";
|
||||
import { buildImgUrl } from "../../../components/files/images/utils";
|
||||
|
||||
function constructCustomToolState(packets: CustomToolPacket[]) {
|
||||
const toolStart = packets.find(
|
||||
(p) => p.obj.type === PacketType.CUSTOM_TOOL_START
|
||||
)?.obj as CustomToolStart | null;
|
||||
const toolDeltas = packets
|
||||
.filter((p) => p.obj.type === PacketType.CUSTOM_TOOL_DELTA)
|
||||
.map((p) => p.obj as CustomToolDelta);
|
||||
const toolEnd = packets.find((p) => p.obj.type === PacketType.SECTION_END)
|
||||
?.obj as SectionEnd | null;
|
||||
|
||||
const toolName = toolStart?.tool_name || toolDeltas[0]?.tool_name || "Tool";
|
||||
const latestDelta = toolDeltas[toolDeltas.length - 1] || null;
|
||||
const responseType = latestDelta?.response_type || null;
|
||||
const data = latestDelta?.data;
|
||||
const fileIds = latestDelta?.file_ids || null;
|
||||
|
||||
const isRunning = Boolean(toolStart && !toolEnd);
|
||||
const isComplete = Boolean(toolStart && toolEnd);
|
||||
|
||||
return {
|
||||
toolName,
|
||||
responseType,
|
||||
data,
|
||||
fileIds,
|
||||
isRunning,
|
||||
isComplete,
|
||||
};
|
||||
}
|
||||
|
||||
export const CustomToolRenderer: MessageRenderer<CustomToolPacket, {}> = ({
|
||||
packets,
|
||||
onComplete,
|
||||
renderType,
|
||||
}) => {
|
||||
const { toolName, responseType, data, fileIds, isRunning, isComplete } =
|
||||
constructCustomToolState(packets);
|
||||
|
||||
useEffect(() => {
|
||||
if (isComplete) {
|
||||
onComplete();
|
||||
}
|
||||
}, [isComplete, onComplete]);
|
||||
|
||||
const status = useMemo(() => {
|
||||
if (isComplete) {
|
||||
if (responseType === "image") return `${toolName} returned images`;
|
||||
if (responseType === "csv") return `${toolName} returned a file`;
|
||||
return `${toolName} completed`;
|
||||
}
|
||||
if (isRunning) return `${toolName} running...`;
|
||||
return null;
|
||||
}, [toolName, responseType, isComplete, isRunning]);
|
||||
|
||||
const icon = FiTool;
|
||||
|
||||
if (renderType === RenderType.HIGHLIGHT) {
|
||||
return {
|
||||
icon,
|
||||
status: status,
|
||||
content: (
|
||||
<div className="text-sm text-muted-foreground">
|
||||
{isRunning && `${toolName} running...`}
|
||||
{isComplete && `${toolName} completed`}
|
||||
</div>
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
icon,
|
||||
status,
|
||||
content: (
|
||||
<div className="flex flex-col gap-3">
|
||||
{/* File responses */}
|
||||
{fileIds && fileIds.length > 0 && (
|
||||
<div className="text-sm text-muted-foreground flex flex-col gap-2">
|
||||
{fileIds.map((fid, idx) => (
|
||||
<div key={fid} className="flex items-center gap-2 flex-wrap">
|
||||
<span className="whitespace-nowrap">File {idx + 1}</span>
|
||||
<a
|
||||
href={buildImgUrl(fid)}
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
className="inline-flex items-center gap-1 text-xs text-blue-600 hover:underline whitespace-nowrap"
|
||||
>
|
||||
<FiExternalLink className="w-3 h-3" /> Open
|
||||
</a>
|
||||
<a
|
||||
href={buildImgUrl(fid)}
|
||||
download
|
||||
className="inline-flex items-center gap-1 text-xs text-blue-600 hover:underline whitespace-nowrap"
|
||||
>
|
||||
<FiDownload className="w-3 h-3" /> Download
|
||||
</a>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* JSON/Text responses */}
|
||||
{data !== undefined && data !== null && (
|
||||
<div className="text-xs bg-gray-50 dark:bg-gray-800 p-3 rounded border max-h-96 overflow-y-auto font-mono whitespace-pre-wrap break-all">
|
||||
{typeof data === "string" ? data : JSON.stringify(data, null, 2)}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Show placeholder if no response data yet */}
|
||||
{!fileIds && (data === undefined || data === null) && isRunning && (
|
||||
<div className="text-xs text-gray-500 italic">
|
||||
Waiting for response...
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
};
|
||||
};
|
||||
|
||||
export default CustomToolRenderer;
|
||||
@@ -0,0 +1,255 @@
|
||||
import React, { useEffect, useMemo } from "react";
|
||||
import { FiImage, FiDownload, FiEye } from "react-icons/fi";
|
||||
import {
|
||||
PacketType,
|
||||
ImageGenerationToolPacket,
|
||||
ImageGenerationToolStart,
|
||||
ImageGenerationToolDelta,
|
||||
SectionEnd,
|
||||
Packet,
|
||||
} from "../../../services/streamingModels";
|
||||
import { MessageRenderer, RenderType } from "../interfaces";
|
||||
import { buildImgUrl } from "../../../components/files/images/utils";
|
||||
|
||||
// Helper function to construct current image state
|
||||
function constructCurrentImageState(packets: ImageGenerationToolPacket[]) {
|
||||
const imageStart = packets.find(
|
||||
(packet) => packet.obj.type === PacketType.IMAGE_GENERATION_TOOL_START
|
||||
)?.obj as ImageGenerationToolStart | null;
|
||||
const imageDeltas = packets
|
||||
.filter(
|
||||
(packet) => packet.obj.type === PacketType.IMAGE_GENERATION_TOOL_DELTA
|
||||
)
|
||||
.map((packet) => packet.obj as ImageGenerationToolDelta);
|
||||
const imageEnd = packets.find(
|
||||
(packet) => packet.obj.type === PacketType.SECTION_END
|
||||
)?.obj as SectionEnd | null;
|
||||
|
||||
const prompt = ""; // Image generation tools don't have a main description
|
||||
const images = imageDeltas.flatMap((delta) => delta?.images || []);
|
||||
const isGenerating = imageStart && !imageEnd;
|
||||
const isComplete = imageStart && imageEnd;
|
||||
|
||||
const imageUrls = images
|
||||
.filter((image) => image.id)
|
||||
.map((image) => buildImgUrl(image.id!));
|
||||
|
||||
return {
|
||||
prompt,
|
||||
images,
|
||||
imageUrls,
|
||||
isGenerating,
|
||||
isComplete,
|
||||
error: false, // For now, we don't have error state in the packets
|
||||
};
|
||||
}
|
||||
|
||||
export const ImageToolRenderer: MessageRenderer<
|
||||
ImageGenerationToolPacket,
|
||||
{}
|
||||
> = ({ packets, onComplete, renderType }) => {
|
||||
const { prompt, images, imageUrls, isGenerating, isComplete, error } =
|
||||
constructCurrentImageState(packets);
|
||||
|
||||
useEffect(() => {
|
||||
if (isComplete) {
|
||||
onComplete();
|
||||
}
|
||||
}, [isComplete]);
|
||||
|
||||
const status = useMemo(() => {
|
||||
if (isComplete) {
|
||||
return `Generated ${imageUrls.length} image${
|
||||
imageUrls.length > 1 ? "s" : ""
|
||||
}`;
|
||||
}
|
||||
if (isGenerating) {
|
||||
return "Generating image...";
|
||||
}
|
||||
return null;
|
||||
}, [isComplete, isGenerating, imageUrls.length]);
|
||||
|
||||
// Render based on renderType
|
||||
if (renderType === RenderType.FULL) {
|
||||
// Full rendering with title header and content below
|
||||
// Loading state - when generating
|
||||
if (isGenerating) {
|
||||
return {
|
||||
icon: FiImage,
|
||||
status: "Generating images...",
|
||||
content: (
|
||||
<div className="flex flex-col">
|
||||
{/* Title header with icon */}
|
||||
<div className="flex items-center gap-3 mb-3">
|
||||
<div className="flex-shrink-0 w-4 h-4 flex items-center justify-center">
|
||||
<FiImage className="w-4 h-4 text-gray-600 dark:text-gray-400" />
|
||||
</div>
|
||||
<span className="text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||
Generating images{prompt && ` "${prompt}"`}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* Content below - loading indicator */}
|
||||
<div className="flex items-center gap-2 ml-7">
|
||||
<div className="flex gap-0.5">
|
||||
<div className="w-1 h-1 bg-gray-500 rounded-full animate-bounce"></div>
|
||||
<div
|
||||
className="w-1 h-1 bg-gray-500 rounded-full animate-bounce"
|
||||
style={{ animationDelay: "0.1s" }}
|
||||
></div>
|
||||
<div
|
||||
className="w-1 h-1 bg-gray-500 rounded-full animate-bounce"
|
||||
style={{ animationDelay: "0.2s" }}
|
||||
></div>
|
||||
</div>
|
||||
<span className="text-xs text-gray-500 dark:text-gray-400">
|
||||
Please wait...
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
// Complete state - show images
|
||||
if (isComplete) {
|
||||
return {
|
||||
icon: FiImage,
|
||||
status: `Generated ${images.length} image${
|
||||
images.length !== 1 ? "s" : ""
|
||||
}`,
|
||||
content: (
|
||||
<div className="flex flex-col">
|
||||
{/* Title header with icon */}
|
||||
<div className="flex items-center gap-3 mb-3">
|
||||
<div className="flex-shrink-0 w-4 h-4 flex items-center justify-center">
|
||||
<FiImage className="w-4 h-4 text-gray-600 dark:text-gray-400" />
|
||||
</div>
|
||||
<span className="text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||
{prompt && `"${prompt}"`} • {images.length} image
|
||||
{images.length !== 1 ? "s" : ""} generated
|
||||
</span>
|
||||
</div>
|
||||
|
||||
{/* Content below - images */}
|
||||
{images.length > 0 ? (
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-4 ml-7">
|
||||
{images.map(
|
||||
(image: { [key: string]: string }, index: number) => (
|
||||
<div
|
||||
key={image.id || index}
|
||||
className="bg-gray-50 dark:bg-gray-800 p-4 rounded-lg hover:bg-gray-100 dark:hover:bg-gray-700 transition-all group"
|
||||
>
|
||||
{/* Image */}
|
||||
{image.id && (
|
||||
<div className="relative mb-3">
|
||||
<img
|
||||
src={buildImgUrl(image.id)}
|
||||
alt={image.prompt || "Generated image"}
|
||||
className="w-full h-48 object-cover rounded-lg"
|
||||
loading="lazy"
|
||||
/>
|
||||
<div className="absolute top-2 right-2 opacity-0 group-hover:opacity-100 transition-opacity flex gap-1">
|
||||
<button
|
||||
onClick={() =>
|
||||
window.open(buildImgUrl(image.id!), "_blank")
|
||||
}
|
||||
className="bg-black bg-opacity-50 text-white p-1.5 rounded hover:bg-opacity-70 transition-colors"
|
||||
title="View full size"
|
||||
>
|
||||
<FiEye className="w-3 h-3" />
|
||||
</button>
|
||||
<button
|
||||
onClick={() => {
|
||||
const link = document.createElement("a");
|
||||
link.href = buildImgUrl(image.id!);
|
||||
link.download = `generated-image-${
|
||||
index + 1
|
||||
}.png`;
|
||||
link.click();
|
||||
}}
|
||||
className="bg-black bg-opacity-50 text-white p-1.5 rounded hover:bg-opacity-70 transition-colors"
|
||||
title="Download"
|
||||
>
|
||||
<FiDownload className="w-3 h-3" />
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Prompt */}
|
||||
{image.prompt && (
|
||||
<div className="text-sm text-gray-600 dark:text-gray-400 line-clamp-2 leading-relaxed">
|
||||
<span className="font-medium">Prompt:</span>{" "}
|
||||
{image.prompt}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
)
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
<div className="py-4 text-center text-gray-500 dark:text-gray-400 ml-7">
|
||||
<FiImage className="w-6 h-6 mx-auto mb-2 opacity-50" />
|
||||
<p className="text-sm">No images generated</p>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
// Fallback (shouldn't happen in normal flow)
|
||||
return {
|
||||
icon: FiImage,
|
||||
status: null,
|
||||
content: <div></div>,
|
||||
};
|
||||
}
|
||||
|
||||
// Highlight/Short rendering
|
||||
if (isGenerating) {
|
||||
return {
|
||||
icon: FiImage,
|
||||
status: "Generating image...",
|
||||
content: (
|
||||
<div className="text-sm text-muted-foreground">Generating image...</div>
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return {
|
||||
icon: FiImage,
|
||||
status: "Image generation failed",
|
||||
content: (
|
||||
<div className="text-sm text-red-600 dark:text-red-400">
|
||||
Image generation failed
|
||||
</div>
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
if (isComplete && imageUrls.length > 0) {
|
||||
return {
|
||||
icon: FiImage,
|
||||
status: `Generated ${imageUrls.length} image${
|
||||
imageUrls.length > 1 ? "s" : ""
|
||||
}`,
|
||||
content: (
|
||||
<div className="text-sm text-muted-foreground">
|
||||
Generated {imageUrls.length} image
|
||||
{imageUrls.length > 1 ? "s" : ""}
|
||||
</div>
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
icon: FiImage,
|
||||
status: "Image generation",
|
||||
content: (
|
||||
<div className="text-sm text-muted-foreground">Image generation</div>
|
||||
),
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,194 @@
|
||||
import React, { useCallback, useEffect, useMemo, useState } from "react";
|
||||
import ReactMarkdown from "react-markdown";
|
||||
import remarkGfm from "remark-gfm";
|
||||
import remarkMath from "remark-math";
|
||||
import rehypePrism from "rehype-prism-plus";
|
||||
import rehypeKatex from "rehype-katex";
|
||||
import "prismjs/themes/prism-tomorrow.css";
|
||||
import "katex/dist/katex.min.css";
|
||||
import "../../custom-code-styles.css";
|
||||
|
||||
import { ChatPacket, PacketType } from "../../../services/streamingModels";
|
||||
import { MessageRenderer, FullChatState } from "../interfaces";
|
||||
import {
|
||||
MemoizedAnchor,
|
||||
MemoizedParagraph,
|
||||
} from "../../MemoizedTextComponents";
|
||||
import { extractCodeText, preprocessLaTeX } from "../../codeUtils";
|
||||
import { CodeBlock } from "../../CodeBlock";
|
||||
import { transformLinkUri } from "@/lib/utils";
|
||||
import { isFinalAnswerComplete } from "../packetUtils";
|
||||
|
||||
// Control the rate of packet streaming (packets per second)
|
||||
const PACKET_DELAY_MS = 10;
|
||||
|
||||
export const MessageTextRenderer: MessageRenderer<
|
||||
ChatPacket,
|
||||
FullChatState
|
||||
> = ({ packets, state, onComplete, renderType, animate }) => {
|
||||
// If we're animating and the final answer is already complete, show more packets initially
|
||||
const initialPacketCount = animate
|
||||
? packets.length > 0
|
||||
? 1 // Otherwise start with 1 packet
|
||||
: 0
|
||||
: -1; // Show all if not animating
|
||||
|
||||
const [displayedPacketCount, setDisplayedPacketCount] =
|
||||
useState(initialPacketCount);
|
||||
|
||||
// Get the full content from all packets
|
||||
const fullContent = packets
|
||||
.map((packet) => {
|
||||
if (
|
||||
packet.obj.type === PacketType.MESSAGE_DELTA ||
|
||||
packet.obj.type === PacketType.MESSAGE_START
|
||||
) {
|
||||
return packet.obj.content;
|
||||
}
|
||||
return "";
|
||||
})
|
||||
.join("");
|
||||
|
||||
// Animation effect - gradually increase displayed packets at controlled rate
|
||||
useEffect(() => {
|
||||
if (!animate) {
|
||||
setDisplayedPacketCount(-1); // Show all packets
|
||||
return;
|
||||
}
|
||||
|
||||
if (displayedPacketCount >= 0 && displayedPacketCount < packets.length) {
|
||||
const timer = setTimeout(() => {
|
||||
setDisplayedPacketCount((prev) => Math.min(prev + 1, packets.length));
|
||||
}, PACKET_DELAY_MS);
|
||||
|
||||
return () => clearTimeout(timer);
|
||||
}
|
||||
}, [animate, displayedPacketCount, packets.length]);
|
||||
|
||||
// Reset displayed count when packet array changes significantly (e.g., new message)
|
||||
useEffect(() => {
|
||||
if (animate && packets.length < displayedPacketCount) {
|
||||
const resetCount = isFinalAnswerComplete(packets)
|
||||
? Math.min(10, packets.length)
|
||||
: packets.length > 0
|
||||
? 1
|
||||
: 0;
|
||||
setDisplayedPacketCount(resetCount);
|
||||
}
|
||||
}, [animate, packets.length, displayedPacketCount]);
|
||||
|
||||
// Only mark as complete when all packets are received AND displayed
|
||||
useEffect(() => {
|
||||
if (isFinalAnswerComplete(packets)) {
|
||||
// If animating, wait until all packets are displayed
|
||||
if (
|
||||
animate &&
|
||||
displayedPacketCount >= 0 &&
|
||||
displayedPacketCount < packets.length
|
||||
) {
|
||||
return;
|
||||
}
|
||||
onComplete();
|
||||
}
|
||||
}, [packets, onComplete, animate, displayedPacketCount]);
|
||||
|
||||
// Get content based on displayed packet count
|
||||
const content = useMemo(() => {
|
||||
if (!animate || displayedPacketCount === -1) {
|
||||
return fullContent; // Show all content
|
||||
}
|
||||
|
||||
// Only show content from packets up to displayedPacketCount
|
||||
return packets
|
||||
.slice(0, displayedPacketCount)
|
||||
.map((packet) => {
|
||||
if (
|
||||
packet.obj.type === PacketType.MESSAGE_DELTA ||
|
||||
packet.obj.type === PacketType.MESSAGE_START
|
||||
) {
|
||||
return packet.obj.content;
|
||||
}
|
||||
return "";
|
||||
})
|
||||
.join("");
|
||||
}, [animate, displayedPacketCount, fullContent, packets]);
|
||||
|
||||
const processContent = (content: string) => {
|
||||
const codeBlockRegex = /```(\w*)\n[\s\S]*?```|```[\s\S]*?$/g;
|
||||
const matches = content.match(codeBlockRegex);
|
||||
|
||||
if (matches) {
|
||||
content = matches.reduce((acc, match) => {
|
||||
if (!match.match(/```\w+/)) {
|
||||
return acc.replace(match, match.replace("```", "```plaintext"));
|
||||
}
|
||||
return acc;
|
||||
}, content);
|
||||
|
||||
const lastMatch = matches[matches.length - 1];
|
||||
if (lastMatch && !lastMatch.endsWith("```")) {
|
||||
return preprocessLaTeX(content);
|
||||
}
|
||||
}
|
||||
|
||||
const processed = preprocessLaTeX(content);
|
||||
return processed;
|
||||
};
|
||||
|
||||
const processedContent = processContent(content);
|
||||
|
||||
const paragraphCallback = useCallback(
|
||||
(props: any) => <MemoizedParagraph>{props.children}</MemoizedParagraph>,
|
||||
[]
|
||||
);
|
||||
|
||||
const anchorCallback = useCallback(
|
||||
(props: any) => (
|
||||
<MemoizedAnchor
|
||||
updatePresentingDocument={state.setPresentingDocument || (() => {})}
|
||||
docs={state.docs || []}
|
||||
userFiles={state.userFiles || []}
|
||||
href={props.href}
|
||||
>
|
||||
{props.children}
|
||||
</MemoizedAnchor>
|
||||
),
|
||||
[state.docs, state.userFiles, state.setPresentingDocument]
|
||||
);
|
||||
|
||||
const markdownComponents = useMemo(
|
||||
() => ({
|
||||
a: anchorCallback,
|
||||
p: paragraphCallback,
|
||||
b: ({ node, className, children }: any) => {
|
||||
return <span className={className}>{children}</span>;
|
||||
},
|
||||
code: ({ node, className, children }: any) => {
|
||||
const codeText = extractCodeText(node, processedContent, children);
|
||||
|
||||
return (
|
||||
<CodeBlock className={className} codeText={codeText}>
|
||||
{children}
|
||||
</CodeBlock>
|
||||
);
|
||||
},
|
||||
}),
|
||||
[anchorCallback, paragraphCallback, processedContent]
|
||||
);
|
||||
|
||||
return {
|
||||
icon: null,
|
||||
status: null,
|
||||
content: (
|
||||
<ReactMarkdown
|
||||
className="prose dark:prose-invert max-w-full text-base"
|
||||
components={markdownComponents}
|
||||
remarkPlugins={[remarkGfm, remarkMath]}
|
||||
rehypePlugins={[[rehypePrism, { ignoreMissing: true }], rehypeKatex]}
|
||||
urlTransform={transformLinkUri}
|
||||
>
|
||||
{processedContent}
|
||||
</ReactMarkdown>
|
||||
),
|
||||
};
|
||||
};
|
||||
@@ -0,0 +1,105 @@
|
||||
import React, { useEffect, useMemo, useRef, useState } from "react";
|
||||
import {
|
||||
PacketType,
|
||||
ReasoningDelta,
|
||||
ReasoningPacket,
|
||||
} from "../../../services/streamingModels";
|
||||
import { MessageRenderer } from "../interfaces";
|
||||
import { FiClock } from "react-icons/fi";
|
||||
|
||||
const THINKING_MIN_DURATION_MS = 1000; // 1 second minimum for "Thinking" state
|
||||
|
||||
function constructCurrentReasoningState(packets: ReasoningPacket[]) {
|
||||
const hasStart = packets.some(
|
||||
(p) => p.obj.type === PacketType.REASONING_START
|
||||
);
|
||||
const hasEnd = packets.some(
|
||||
(p) =>
|
||||
p.obj.type === PacketType.SECTION_END ||
|
||||
// Support either convention for reasoning completion
|
||||
(p.obj as any).type === PacketType.REASONING_END
|
||||
);
|
||||
const deltas = packets
|
||||
.filter((p) => p.obj.type === PacketType.REASONING_DELTA)
|
||||
.map((p) => p.obj as ReasoningDelta);
|
||||
|
||||
const content = deltas.map((d) => d.reasoning).join("");
|
||||
|
||||
return {
|
||||
hasStart,
|
||||
hasEnd,
|
||||
content,
|
||||
};
|
||||
}
|
||||
|
||||
export const ReasoningRenderer: MessageRenderer<ReasoningPacket, {}> = ({
|
||||
packets,
|
||||
onComplete,
|
||||
animate,
|
||||
}) => {
|
||||
const { hasStart, hasEnd, content } = useMemo(
|
||||
() => constructCurrentReasoningState(packets),
|
||||
[packets]
|
||||
);
|
||||
|
||||
// Track reasoning timing for minimum display duration
|
||||
const [reasoningStartTime, setReasoningStartTime] = useState<number | null>(
|
||||
null
|
||||
);
|
||||
const timeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||
const completionHandledRef = useRef(false);
|
||||
|
||||
// Track when reasoning starts
|
||||
useEffect(() => {
|
||||
if ((hasStart || hasEnd) && reasoningStartTime === null) {
|
||||
setReasoningStartTime(Date.now());
|
||||
}
|
||||
}, [hasStart, hasEnd, reasoningStartTime]);
|
||||
|
||||
// Handle reasoning completion with minimum duration
|
||||
useEffect(() => {
|
||||
if (
|
||||
hasEnd &&
|
||||
reasoningStartTime !== null &&
|
||||
!completionHandledRef.current
|
||||
) {
|
||||
completionHandledRef.current = true;
|
||||
const elapsedTime = Date.now() - reasoningStartTime;
|
||||
const minimumThinkingDuration = animate ? THINKING_MIN_DURATION_MS : 0;
|
||||
|
||||
if (elapsedTime >= minimumThinkingDuration) {
|
||||
// Enough time has passed, complete immediately
|
||||
onComplete();
|
||||
} else {
|
||||
// Not enough time has passed, delay completion
|
||||
const remainingTime = minimumThinkingDuration - elapsedTime;
|
||||
timeoutRef.current = setTimeout(() => {
|
||||
onComplete();
|
||||
}, remainingTime);
|
||||
}
|
||||
}
|
||||
}, [hasEnd, reasoningStartTime, animate, onComplete]);
|
||||
|
||||
// Cleanup timeout on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (timeoutRef.current) {
|
||||
clearTimeout(timeoutRef.current);
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
if (!hasStart && !hasEnd && content.length === 0) {
|
||||
return { icon: null, status: null, content: <></> };
|
||||
}
|
||||
|
||||
const status = hasEnd ? "Thinking complete" : "Thinking...";
|
||||
|
||||
return {
|
||||
icon: FiClock,
|
||||
status,
|
||||
content: <div className="text-sm">{content}</div>,
|
||||
};
|
||||
};
|
||||
|
||||
export default ReasoningRenderer;
|
||||
@@ -0,0 +1,225 @@
|
||||
import React, { useEffect, useState, useRef, useMemo } from "react";
|
||||
import { FiSearch } from "react-icons/fi";
|
||||
import {
|
||||
PacketType,
|
||||
SearchToolPacket,
|
||||
SearchToolStart,
|
||||
SearchToolDelta,
|
||||
SectionEnd,
|
||||
} from "../../../services/streamingModels";
|
||||
import { MessageRenderer } from "../interfaces";
|
||||
import { SourceChip2 } from "../../../components/input/ChatInputBar";
|
||||
import { ResultIcon } from "@/components/chat/sources/SourceCard";
|
||||
import { truncateString } from "@/lib/utils";
|
||||
import { OnyxDocument } from "@/lib/search/interfaces";
|
||||
|
||||
const MAX_RESULTS_TO_SHOW = 3;
|
||||
const MAX_TITLE_LENGTH = 25;
|
||||
|
||||
const SEARCHING_MIN_DURATION_MS = 1000; // 1 second minimum for "Searching" state
|
||||
const SEARCHED_MIN_DURATION_MS = 1000; // 1 second minimum for "Searched" state
|
||||
|
||||
const constructCurrentSearchState = (
|
||||
packets: SearchToolPacket[]
|
||||
): {
|
||||
queries: string[];
|
||||
results: OnyxDocument[];
|
||||
isSearching: boolean;
|
||||
isComplete: boolean;
|
||||
} => {
|
||||
// Check for new specific search tool packets first
|
||||
const searchStart = packets.find(
|
||||
(packet) => packet.obj.type === PacketType.SEARCH_TOOL_START
|
||||
)?.obj as SearchToolStart | null;
|
||||
const searchDeltas = packets
|
||||
.filter((packet) => packet.obj.type === PacketType.SEARCH_TOOL_DELTA)
|
||||
.map((packet) => packet.obj as SearchToolDelta);
|
||||
const searchEnd = packets.find(
|
||||
(packet) => packet.obj.type === PacketType.SECTION_END
|
||||
)?.obj as SectionEnd | null;
|
||||
|
||||
// Extract queries from ToolDelta packets
|
||||
const queries = searchDeltas
|
||||
.flatMap((delta) => delta?.queries || [])
|
||||
.filter((query, index, arr) => arr.indexOf(query) === index); // Remove duplicates
|
||||
|
||||
const seenDocIds = new Set<string>();
|
||||
const results = searchDeltas
|
||||
.flatMap((delta) => delta?.documents || [])
|
||||
.filter((doc) => {
|
||||
if (!doc || !doc.document_id) return false;
|
||||
if (seenDocIds.has(doc.document_id)) return false;
|
||||
seenDocIds.add(doc.document_id);
|
||||
return true;
|
||||
});
|
||||
|
||||
const isSearching = Boolean(searchStart && !searchEnd);
|
||||
const isComplete = Boolean(searchStart && searchEnd);
|
||||
|
||||
return { queries, results, isSearching, isComplete };
|
||||
};
|
||||
|
||||
export const SearchToolRenderer: MessageRenderer<SearchToolPacket, {}> = ({
|
||||
packets,
|
||||
onComplete,
|
||||
renderType,
|
||||
animate,
|
||||
}) => {
|
||||
const { queries, results, isSearching, isComplete } =
|
||||
constructCurrentSearchState(packets);
|
||||
|
||||
// Track search timing for minimum display duration
|
||||
const [searchStartTime, setSearchStartTime] = useState<number | null>(null);
|
||||
const [shouldShowAsSearching, setShouldShowAsSearching] = useState(false);
|
||||
|
||||
const [shouldShowAsSearched, setShouldShowAsSearched] = useState(false);
|
||||
const timeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||
const searchedTimeoutRef = useRef<NodeJS.Timeout | null>(null);
|
||||
const completionHandledRef = useRef(false);
|
||||
|
||||
// Track when search starts (even if the search completes instantly)
|
||||
useEffect(() => {
|
||||
if ((isSearching || isComplete) && searchStartTime === null) {
|
||||
setSearchStartTime(Date.now());
|
||||
setShouldShowAsSearching(true);
|
||||
}
|
||||
}, [isSearching, isComplete, searchStartTime]);
|
||||
|
||||
// Handle search completion with minimum duration
|
||||
useEffect(() => {
|
||||
if (
|
||||
isComplete &&
|
||||
searchStartTime !== null &&
|
||||
!completionHandledRef.current
|
||||
) {
|
||||
completionHandledRef.current = true;
|
||||
const elapsedTime = Date.now() - searchStartTime;
|
||||
const minimumSearchingDuration = animate ? SEARCHING_MIN_DURATION_MS : 0;
|
||||
const minimumSearchedDuration = animate ? SEARCHED_MIN_DURATION_MS : 0;
|
||||
|
||||
const handleSearchingToSearched = () => {
|
||||
setShouldShowAsSearching(false);
|
||||
setShouldShowAsSearched(true);
|
||||
|
||||
searchedTimeoutRef.current = setTimeout(() => {
|
||||
setShouldShowAsSearched(false);
|
||||
onComplete();
|
||||
}, minimumSearchedDuration);
|
||||
};
|
||||
|
||||
if (elapsedTime >= minimumSearchingDuration) {
|
||||
// Enough time has passed for searching, transition to searched immediately
|
||||
handleSearchingToSearched();
|
||||
} else {
|
||||
// Not enough time has passed for searching, delay the transition
|
||||
const remainingTime = minimumSearchingDuration - elapsedTime;
|
||||
timeoutRef.current = setTimeout(
|
||||
handleSearchingToSearched,
|
||||
remainingTime
|
||||
);
|
||||
}
|
||||
}
|
||||
}, [isComplete, searchStartTime, animate, queries, onComplete]);
|
||||
|
||||
// Cleanup timeouts on unmount
|
||||
useEffect(() => {
|
||||
return () => {
|
||||
if (timeoutRef.current) {
|
||||
clearTimeout(timeoutRef.current);
|
||||
}
|
||||
if (searchedTimeoutRef.current) {
|
||||
clearTimeout(searchedTimeoutRef.current);
|
||||
}
|
||||
};
|
||||
}, []);
|
||||
|
||||
const status = useMemo(() => {
|
||||
// If we have documents to show and we're in the searched state, show "Searched"
|
||||
if (results.length > 0) {
|
||||
// If we're still showing as searching (before transition), show "Searching"
|
||||
if (shouldShowAsSearching) {
|
||||
return "Searching internal documents";
|
||||
}
|
||||
// Otherwise show "Searched"
|
||||
return "Searched internal documents";
|
||||
}
|
||||
|
||||
// Handle states based on timing
|
||||
if (shouldShowAsSearched) {
|
||||
return "Searched internal documents";
|
||||
}
|
||||
if (isSearching || isComplete || shouldShowAsSearching) {
|
||||
return "Searching internal documents";
|
||||
}
|
||||
return null;
|
||||
}, [
|
||||
isSearching,
|
||||
isComplete,
|
||||
shouldShowAsSearching,
|
||||
shouldShowAsSearched,
|
||||
results.length,
|
||||
]);
|
||||
|
||||
// Don't render anything if search hasn't started
|
||||
if (queries.length === 0) {
|
||||
return {
|
||||
icon: FiSearch,
|
||||
status: null,
|
||||
content: <div></div>,
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
icon: FiSearch,
|
||||
status,
|
||||
content: (
|
||||
<div className="flex flex-col">
|
||||
<div className="flex flex-col">
|
||||
<div className="flex flex-wrap gap-2 ml-1 mt-1">
|
||||
{queries.map((query, index) => (
|
||||
<div key={index} className={`text-xs text-gray-600 mb-2`}>
|
||||
<SourceChip2
|
||||
icon={<FiSearch size={10} />}
|
||||
title={truncateString(query, MAX_TITLE_LENGTH)}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
<div className="flex flex-wrap gap-2 ml-1">
|
||||
{results.slice(0, MAX_RESULTS_TO_SHOW).map((result, index) => (
|
||||
<div
|
||||
key={result.document_id}
|
||||
className="animate-in fade-in slide-in-from-bottom-1 duration-300"
|
||||
style={{ animationDelay: `${index * 100}ms` }}
|
||||
>
|
||||
<SourceChip2
|
||||
icon={<ResultIcon doc={result} size={10} />}
|
||||
title={truncateString(
|
||||
result.semantic_identifier || "",
|
||||
MAX_TITLE_LENGTH
|
||||
)}
|
||||
onClick={() => {
|
||||
window.open(result.link, "_blank");
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
))}
|
||||
{/* Show a blurb if there are more results than we are displaying */}
|
||||
{results.length > MAX_RESULTS_TO_SHOW && (
|
||||
<div
|
||||
className="animate-in fade-in slide-in-from-bottom-1 duration-300"
|
||||
style={{
|
||||
animationDelay: `${MAX_RESULTS_TO_SHOW * 100}ms`,
|
||||
}}
|
||||
>
|
||||
<SourceChip2
|
||||
title={`${results.length - MAX_RESULTS_TO_SHOW} more...`}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
),
|
||||
};
|
||||
};
|
||||
@@ -14,7 +14,7 @@ import {
|
||||
cleanThinkingContent,
|
||||
hasPartialThinkingTokens,
|
||||
isThinkingComplete,
|
||||
} from "../../utils/thinkingTokens";
|
||||
} from "../../services/thinkingTokens";
|
||||
import "./ThinkingBox.css";
|
||||
|
||||
interface ThinkingBoxProps {
|
||||
|
||||
@@ -1,71 +0,0 @@
|
||||
import { BasicClickable } from "@/components/BasicClickable";
|
||||
import { ControlledPopup, DefaultDropdownElement } from "@/components/Dropdown";
|
||||
import { useState } from "react";
|
||||
import { FiCpu, FiSearch } from "react-icons/fi";
|
||||
|
||||
export const QA = "Question Answering";
|
||||
export const SEARCH = "Search Only";
|
||||
|
||||
function SearchTypeSelectorContent({
|
||||
selectedSearchType,
|
||||
setSelectedSearchType,
|
||||
}: {
|
||||
selectedSearchType: string;
|
||||
setSelectedSearchType: React.Dispatch<React.SetStateAction<string>>;
|
||||
}) {
|
||||
return (
|
||||
<div className="w-56">
|
||||
<DefaultDropdownElement
|
||||
key={QA}
|
||||
name={QA}
|
||||
icon={FiCpu}
|
||||
onSelect={() => setSelectedSearchType(QA)}
|
||||
isSelected={selectedSearchType === QA}
|
||||
/>
|
||||
<DefaultDropdownElement
|
||||
key={SEARCH}
|
||||
name={SEARCH}
|
||||
icon={FiSearch}
|
||||
onSelect={() => setSelectedSearchType(SEARCH)}
|
||||
isSelected={selectedSearchType === SEARCH}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export function SearchTypeSelector({
|
||||
selectedSearchType,
|
||||
setSelectedSearchType,
|
||||
}: {
|
||||
selectedSearchType: string;
|
||||
setSelectedSearchType: React.Dispatch<React.SetStateAction<string>>;
|
||||
}) {
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
|
||||
return (
|
||||
<ControlledPopup
|
||||
isOpen={isOpen}
|
||||
setIsOpen={setIsOpen}
|
||||
popupContent={
|
||||
<SearchTypeSelectorContent
|
||||
selectedSearchType={selectedSearchType}
|
||||
setSelectedSearchType={setSelectedSearchType}
|
||||
/>
|
||||
}
|
||||
>
|
||||
<BasicClickable onClick={() => setIsOpen(!isOpen)}>
|
||||
<div className="flex text-xs">
|
||||
{selectedSearchType === QA ? (
|
||||
<>
|
||||
<FiCpu className="my-auto mr-1" /> QA
|
||||
</>
|
||||
) : (
|
||||
<>
|
||||
<FiSearch className="my-auto mr-1" /> Search
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
</BasicClickable>
|
||||
</ControlledPopup>
|
||||
);
|
||||
}
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
} from "@/components/ui/dialog";
|
||||
import { v4 as uuidv4 } from "uuid";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { SimplifiedChatInputBar } from "../input/SimplifiedChatInputBar";
|
||||
import { SimplifiedChatInputBar } from "../components/input/SimplifiedChatInputBar";
|
||||
import { Menu } from "lucide-react";
|
||||
import { Shortcut } from "./interfaces";
|
||||
import {
|
||||
@@ -22,7 +22,7 @@ import {
|
||||
import { Modal } from "@/components/Modal";
|
||||
import { useNightTime } from "@/lib/dateUtils";
|
||||
import { useFilters } from "@/lib/hooks";
|
||||
import { uploadFilesForChat } from "../lib";
|
||||
import { uploadFilesForChat } from "../services/lib";
|
||||
import { ChatFileType, FileDescriptor } from "../interfaces";
|
||||
import { useChatContext } from "@/components/context/ChatContext";
|
||||
import Dropzone from "react-dropzone";
|
||||
|
||||
146
web/src/app/chat/services/constructSubQuestions.ts
Normal file
146
web/src/app/chat/services/constructSubQuestions.ts
Normal file
@@ -0,0 +1,146 @@
|
||||
import {
|
||||
AgentAnswerPiece,
|
||||
SubQuestionPiece,
|
||||
SubQuestionSearchDoc,
|
||||
} from "@/lib/search/interfaces";
|
||||
import { StreamStopInfo } from "@/lib/search/interfaces";
|
||||
import { SubQueryPiece } from "@/lib/search/interfaces";
|
||||
import { SubQuestionDetail } from "../interfaces";
|
||||
|
||||
import { DocumentsResponse } from "../interfaces";
|
||||
|
||||
export const constructSubQuestions = (
|
||||
subQuestions: SubQuestionDetail[],
|
||||
newDetail:
|
||||
| SubQuestionPiece
|
||||
| SubQueryPiece
|
||||
| AgentAnswerPiece
|
||||
| SubQuestionSearchDoc
|
||||
| DocumentsResponse
|
||||
| StreamStopInfo
|
||||
): SubQuestionDetail[] => {
|
||||
if (!newDetail) {
|
||||
return subQuestions;
|
||||
}
|
||||
if (newDetail.level_question_num == 0) {
|
||||
return subQuestions;
|
||||
}
|
||||
|
||||
const updatedSubQuestions = [...subQuestions];
|
||||
|
||||
if ("stop_reason" in newDetail) {
|
||||
const { level, level_question_num } = newDetail;
|
||||
let subQuestion = updatedSubQuestions.find(
|
||||
(sq) => sq.level === level && sq.level_question_num === level_question_num
|
||||
);
|
||||
if (subQuestion) {
|
||||
if (newDetail.stream_type == "sub_answer") {
|
||||
subQuestion.answer_streaming = false;
|
||||
} else {
|
||||
subQuestion.is_complete = true;
|
||||
subQuestion.is_stopped = true;
|
||||
}
|
||||
}
|
||||
} else if ("top_documents" in newDetail) {
|
||||
const { level, level_question_num, top_documents } = newDetail;
|
||||
let subQuestion = updatedSubQuestions.find(
|
||||
(sq) => sq.level === level && sq.level_question_num === level_question_num
|
||||
);
|
||||
if (!subQuestion) {
|
||||
subQuestion = {
|
||||
level: level ?? 0,
|
||||
level_question_num: level_question_num ?? 0,
|
||||
question: "",
|
||||
answer: "",
|
||||
sub_queries: [],
|
||||
context_docs: { top_documents },
|
||||
is_complete: false,
|
||||
};
|
||||
} else {
|
||||
subQuestion.context_docs = { top_documents };
|
||||
}
|
||||
} else if ("answer_piece" in newDetail) {
|
||||
// Handle AgentAnswerPiece
|
||||
const { level, level_question_num, answer_piece } = newDetail;
|
||||
// Find or create the relevant SubQuestionDetail
|
||||
let subQuestion = updatedSubQuestions.find(
|
||||
(sq) => sq.level === level && sq.level_question_num === level_question_num
|
||||
);
|
||||
|
||||
if (!subQuestion) {
|
||||
subQuestion = {
|
||||
level,
|
||||
level_question_num,
|
||||
question: "",
|
||||
answer: "",
|
||||
sub_queries: [],
|
||||
context_docs: undefined,
|
||||
is_complete: false,
|
||||
};
|
||||
updatedSubQuestions.push(subQuestion);
|
||||
}
|
||||
|
||||
// Append to the answer
|
||||
subQuestion.answer += answer_piece;
|
||||
} else if ("sub_question" in newDetail) {
|
||||
// Handle SubQuestionPiece
|
||||
const { level, level_question_num, sub_question } = newDetail;
|
||||
|
||||
// Find or create the relevant SubQuestionDetail
|
||||
let subQuestion = updatedSubQuestions.find(
|
||||
(sq) => sq.level === level && sq.level_question_num === level_question_num
|
||||
);
|
||||
|
||||
if (!subQuestion) {
|
||||
subQuestion = {
|
||||
level,
|
||||
level_question_num,
|
||||
question: "",
|
||||
answer: "",
|
||||
sub_queries: [],
|
||||
context_docs: undefined,
|
||||
is_complete: false,
|
||||
};
|
||||
updatedSubQuestions.push(subQuestion);
|
||||
}
|
||||
|
||||
// Append to the question
|
||||
subQuestion.question += sub_question;
|
||||
} else if ("sub_query" in newDetail) {
|
||||
// Handle SubQueryPiece
|
||||
const { level, level_question_num, query_id, sub_query } = newDetail;
|
||||
|
||||
// Find the relevant SubQuestionDetail
|
||||
let subQuestion = updatedSubQuestions.find(
|
||||
(sq) => sq.level === level && sq.level_question_num === level_question_num
|
||||
);
|
||||
|
||||
if (!subQuestion) {
|
||||
// If we receive a sub_query before its parent question, create a placeholder
|
||||
subQuestion = {
|
||||
level,
|
||||
level_question_num: level_question_num,
|
||||
question: "",
|
||||
answer: "",
|
||||
sub_queries: [],
|
||||
context_docs: undefined,
|
||||
};
|
||||
updatedSubQuestions.push(subQuestion);
|
||||
}
|
||||
|
||||
// Find or create the relevant SubQueryDetail
|
||||
let subQuery = subQuestion.sub_queries?.find(
|
||||
(sq) => sq.query_id === query_id
|
||||
);
|
||||
|
||||
if (!subQuery) {
|
||||
subQuery = { query: "", query_id };
|
||||
subQuestion.sub_queries = [...(subQuestion.sub_queries || []), subQuery];
|
||||
}
|
||||
|
||||
// Append to the query
|
||||
subQuery.query += sub_query;
|
||||
}
|
||||
|
||||
return updatedSubQuestions;
|
||||
};
|
||||
45
web/src/app/chat/services/currentMessageFIFO.ts
Normal file
45
web/src/app/chat/services/currentMessageFIFO.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { PacketType, sendMessage, SendMessageParams } from "./lib";
|
||||
|
||||
export class CurrentMessageFIFO {
|
||||
private stack: PacketType[] = [];
|
||||
isComplete: boolean = false;
|
||||
error: string | null = null;
|
||||
|
||||
push(packetBunch: PacketType) {
|
||||
this.stack.push(packetBunch);
|
||||
}
|
||||
|
||||
nextPacket(): PacketType | undefined {
|
||||
return this.stack.shift();
|
||||
}
|
||||
|
||||
isEmpty(): boolean {
|
||||
return this.stack.length === 0;
|
||||
}
|
||||
}
|
||||
|
||||
export async function updateCurrentMessageFIFO(
|
||||
stack: CurrentMessageFIFO,
|
||||
params: SendMessageParams
|
||||
) {
|
||||
try {
|
||||
for await (const packet of sendMessage(params)) {
|
||||
if (params.signal?.aborted) {
|
||||
throw new Error("AbortError");
|
||||
}
|
||||
stack.push(packet);
|
||||
}
|
||||
} catch (error: unknown) {
|
||||
if (error instanceof Error) {
|
||||
if (error.name === "AbortError") {
|
||||
console.debug("Stream aborted");
|
||||
} else {
|
||||
stack.error = error.message;
|
||||
}
|
||||
} else {
|
||||
stack.error = String(error);
|
||||
}
|
||||
} finally {
|
||||
stack.isComplete = true;
|
||||
}
|
||||
}
|
||||
@@ -12,7 +12,7 @@ import {
|
||||
RefinedAnswerImprovement,
|
||||
} from "@/lib/search/interfaces";
|
||||
import { handleSSEStream } from "@/lib/search/streamingUtils";
|
||||
import { ChatState, FeedbackType } from "./types";
|
||||
import { ChatState, FeedbackType } from "@/app/chat/interfaces";
|
||||
import { MutableRefObject, RefObject, useEffect, useRef } from "react";
|
||||
import {
|
||||
BackendMessage,
|
||||
@@ -27,14 +27,17 @@ import {
|
||||
ToolCallMetadata,
|
||||
AgenticMessageResponseIDInfo,
|
||||
UserKnowledgeFilePacket,
|
||||
} from "./interfaces";
|
||||
import { MinimalPersonaSnapshot } from "../admin/assistants/interfaces";
|
||||
} from "../interfaces";
|
||||
import { MinimalPersonaSnapshot } from "../../admin/assistants/interfaces";
|
||||
import { ReadonlyURLSearchParams } from "next/navigation";
|
||||
import { SEARCH_PARAM_NAMES } from "./searchParams";
|
||||
import { Settings } from "../admin/settings/interfaces";
|
||||
import { INTERNET_SEARCH_TOOL_ID } from "./tools/constants";
|
||||
import { SEARCH_TOOL_ID } from "./tools/constants";
|
||||
import { IMAGE_GENERATION_TOOL_ID } from "./tools/constants";
|
||||
import { Settings } from "../../admin/settings/interfaces";
|
||||
import {
|
||||
IMAGE_GENERATION_TOOL_ID,
|
||||
INTERNET_SEARCH_TOOL_ID,
|
||||
} from "@/app/chat/components/tools/constants";
|
||||
import { SEARCH_TOOL_ID } from "@/app/chat/components/tools/constants";
|
||||
import { Packet } from "./streamingModels";
|
||||
|
||||
// Date range group constants
|
||||
export const DATE_RANGE_GROUPS = {
|
||||
@@ -152,7 +155,6 @@ export const isPacketType = (data: any): data is PacketType => {
|
||||
export type PacketType =
|
||||
| ToolCallMetadata
|
||||
| BackendMessage
|
||||
| AnswerPiecePacket
|
||||
| DocumentInfoPacket
|
||||
| DocumentsResponse
|
||||
| FileChatDisplay
|
||||
@@ -166,7 +168,8 @@ export type PacketType =
|
||||
| ExtendedToolResponse
|
||||
| RefinedAnswerImprovement
|
||||
| AgenticMessageResponseIDInfo
|
||||
| UserKnowledgeFilePacket;
|
||||
| UserKnowledgeFilePacket
|
||||
| Packet;
|
||||
|
||||
export interface SendMessageParams {
|
||||
regenerate: boolean;
|
||||
@@ -187,7 +190,7 @@ export interface SendMessageParams {
|
||||
signal?: AbortSignal;
|
||||
userFileIds?: number[];
|
||||
userFolderIds?: number[];
|
||||
useLanggraph?: boolean;
|
||||
useAgentSearch?: boolean;
|
||||
}
|
||||
|
||||
export async function* sendMessage({
|
||||
@@ -209,7 +212,7 @@ export async function* sendMessage({
|
||||
useExistingUserMessage,
|
||||
alternateAssistantId,
|
||||
signal,
|
||||
useLanggraph,
|
||||
useAgentSearch,
|
||||
}: SendMessageParams): AsyncGenerator<PacketType, void, unknown> {
|
||||
const documentsAreSelected =
|
||||
selectedDocumentIds && selectedDocumentIds.length > 0;
|
||||
@@ -249,7 +252,7 @@ export async function* sendMessage({
|
||||
}
|
||||
: null,
|
||||
use_existing_user_message: useExistingUserMessage,
|
||||
use_agentic_search: useLanggraph ?? false,
|
||||
use_agentic_search: useAgentSearch ?? false,
|
||||
});
|
||||
|
||||
const response = await fetch(`/api/chat/send-message`, {
|
||||
@@ -282,7 +285,7 @@ export async function nameChatSession(chatSessionId: string) {
|
||||
return response;
|
||||
}
|
||||
|
||||
export async function setMessageAsLatest(messageId: number) {
|
||||
export async function patchMessageToBeLatest(messageId: number) {
|
||||
const response = await fetch("/api/chat/set-message-as-latest", {
|
||||
method: "PUT",
|
||||
headers: {
|
||||
@@ -467,25 +470,21 @@ export function groupSessionsByDateRange(chatSessions: ChatSession[]) {
|
||||
return groups;
|
||||
}
|
||||
|
||||
export function getLastSuccessfulMessageId(messageHistory: Message[]) {
|
||||
const lastSuccessfulMessage = messageHistory
|
||||
.slice()
|
||||
.reverse()
|
||||
.find(
|
||||
(message) =>
|
||||
(message.type === "assistant" || message.type === "system") &&
|
||||
message.messageId !== -1 &&
|
||||
message.messageId !== null
|
||||
);
|
||||
return lastSuccessfulMessage ? lastSuccessfulMessage?.messageId : null;
|
||||
}
|
||||
export function processRawChatHistory(
|
||||
rawMessages: BackendMessage[]
|
||||
rawMessages: BackendMessage[],
|
||||
packets: Packet[][]
|
||||
): Map<number, Message> {
|
||||
const messages: Map<number, Message> = new Map();
|
||||
const parentMessageChildrenMap: Map<number, number[]> = new Map();
|
||||
|
||||
rawMessages.forEach((messageInfo) => {
|
||||
let assistantMessageInd = 0;
|
||||
|
||||
rawMessages.forEach((messageInfo, ind) => {
|
||||
const packetsForMessage = packets[assistantMessageInd];
|
||||
if (messageInfo.message_type === "assistant") {
|
||||
assistantMessageInd++;
|
||||
}
|
||||
|
||||
const hasContextDocs =
|
||||
(messageInfo?.context_docs?.top_documents || []).length > 0;
|
||||
let retrievalType;
|
||||
@@ -498,10 +497,8 @@ export function processRawChatHistory(
|
||||
} else {
|
||||
retrievalType = RetrievalType.None;
|
||||
}
|
||||
const subQuestions = messageInfo.sub_questions?.map((q) => ({
|
||||
...q,
|
||||
is_complete: true,
|
||||
}));
|
||||
|
||||
console.log("messageInfo", messageInfo);
|
||||
|
||||
const message: Message = {
|
||||
messageId: messageInfo.message_id,
|
||||
@@ -527,10 +524,7 @@ export function processRawChatHistory(
|
||||
childrenMessageIds: [],
|
||||
latestChildMessageId: messageInfo.latest_child_message,
|
||||
overridden_model: messageInfo.overridden_model,
|
||||
sub_questions: subQuestions,
|
||||
isImprovement:
|
||||
(messageInfo.refined_answer_improvement as unknown as boolean) || false,
|
||||
is_agentic: messageInfo.is_agentic,
|
||||
packets: packetsForMessage || [],
|
||||
};
|
||||
|
||||
messages.set(messageInfo.message_id, message);
|
||||
@@ -557,86 +551,6 @@ export function processRawChatHistory(
|
||||
return messages;
|
||||
}
|
||||
|
||||
export function buildLatestMessageChain(
|
||||
messageMap: Map<number, Message>,
|
||||
additionalMessagesOnMainline: Message[] = []
|
||||
): Message[] {
|
||||
const rootMessage = Array.from(messageMap.values()).find(
|
||||
(message) => message.parentMessageId === null
|
||||
);
|
||||
|
||||
let finalMessageList: Message[] = [];
|
||||
if (rootMessage) {
|
||||
let currMessage: Message | null = rootMessage;
|
||||
while (currMessage) {
|
||||
finalMessageList.push(currMessage);
|
||||
const childMessageNumber = currMessage.latestChildMessageId;
|
||||
if (childMessageNumber && messageMap.has(childMessageNumber)) {
|
||||
currMessage = messageMap.get(childMessageNumber) as Message;
|
||||
} else {
|
||||
currMessage = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// remove system message
|
||||
if (
|
||||
finalMessageList.length > 0 &&
|
||||
finalMessageList[0] &&
|
||||
finalMessageList[0].type === "system"
|
||||
) {
|
||||
finalMessageList = finalMessageList.slice(1);
|
||||
}
|
||||
return finalMessageList.concat(additionalMessagesOnMainline);
|
||||
}
|
||||
|
||||
export function updateParentChildren(
|
||||
message: Message,
|
||||
completeMessageMap: Map<number, Message>,
|
||||
setAsLatestChild: boolean = false
|
||||
) {
|
||||
// NOTE: updates the `completeMessageMap` in place
|
||||
const parentMessage = message.parentMessageId
|
||||
? completeMessageMap.get(message.parentMessageId)
|
||||
: null;
|
||||
if (parentMessage) {
|
||||
if (setAsLatestChild) {
|
||||
parentMessage.latestChildMessageId = message.messageId;
|
||||
}
|
||||
|
||||
const parentChildMessages = parentMessage.childrenMessageIds || [];
|
||||
if (!parentChildMessages.includes(message.messageId)) {
|
||||
parentChildMessages.push(message.messageId);
|
||||
}
|
||||
parentMessage.childrenMessageIds = parentChildMessages;
|
||||
}
|
||||
}
|
||||
|
||||
export function removeMessage(
|
||||
messageId: number,
|
||||
completeMessageMap: Map<number, Message>
|
||||
) {
|
||||
const messageToRemove = completeMessageMap.get(messageId);
|
||||
if (!messageToRemove) {
|
||||
return;
|
||||
}
|
||||
|
||||
const parentMessage = messageToRemove.parentMessageId
|
||||
? completeMessageMap.get(messageToRemove.parentMessageId)
|
||||
: null;
|
||||
if (parentMessage) {
|
||||
if (parentMessage.latestChildMessageId === messageId) {
|
||||
parentMessage.latestChildMessageId = null;
|
||||
}
|
||||
const currChildMessage = parentMessage.childrenMessageIds || [];
|
||||
const newChildMessage = currChildMessage.filter((id) => id !== messageId);
|
||||
parentMessage.childrenMessageIds = newChildMessage;
|
||||
}
|
||||
|
||||
completeMessageMap.delete(messageId);
|
||||
}
|
||||
|
||||
export function checkAnyAssistantHasSearch(
|
||||
messageHistory: Message[],
|
||||
availableAssistants: MinimalPersonaSnapshot[],
|
||||
@@ -691,7 +605,8 @@ export function buildChatUrl(
|
||||
existingSearchParams: ReadonlyURLSearchParams | null,
|
||||
chatSessionId: string | null,
|
||||
personaId: number | null,
|
||||
search?: boolean
|
||||
search?: boolean,
|
||||
skipReload?: boolean
|
||||
) {
|
||||
const finalSearchParams: string[] = [];
|
||||
if (chatSessionId) {
|
||||
@@ -710,6 +625,11 @@ export function buildChatUrl(
|
||||
finalSearchParams.push(`${key}=${value}`);
|
||||
}
|
||||
});
|
||||
|
||||
if (skipReload) {
|
||||
finalSearchParams.push(`${SEARCH_PARAM_NAMES.SKIP_RELOAD}=true`);
|
||||
}
|
||||
|
||||
const finalSearchParamsString = finalSearchParams.join("&");
|
||||
|
||||
if (finalSearchParamsString) {
|
||||
392
web/src/app/chat/services/messageTree.ts
Normal file
392
web/src/app/chat/services/messageTree.ts
Normal file
@@ -0,0 +1,392 @@
|
||||
import { Message } from "../interfaces";
|
||||
|
||||
export const SYSTEM_MESSAGE_ID = -3;
|
||||
|
||||
export type MessageTreeState = Map<number, Message>;
|
||||
|
||||
export function createInitialMessageTreeState(
|
||||
initialMessages?: Map<number, Message> | Message[]
|
||||
): MessageTreeState {
|
||||
if (!initialMessages) {
|
||||
return new Map();
|
||||
}
|
||||
if (initialMessages instanceof Map) {
|
||||
return new Map(initialMessages); // Shallow copy
|
||||
}
|
||||
return new Map(initialMessages.map((msg) => [msg.messageId, msg]));
|
||||
}
|
||||
|
||||
export function getMessage(
|
||||
messages: MessageTreeState,
|
||||
messageId: number
|
||||
): Message | undefined {
|
||||
return messages.get(messageId);
|
||||
}
|
||||
|
||||
function updateParentInMap(
|
||||
map: Map<number, Message>,
|
||||
parentId: number,
|
||||
childId: number,
|
||||
makeLatest: boolean
|
||||
): void {
|
||||
const parent = map.get(parentId);
|
||||
if (parent) {
|
||||
const parentChildren = parent.childrenMessageIds || [];
|
||||
const childrenSet = new Set(parentChildren);
|
||||
let updatedChildren = parentChildren;
|
||||
|
||||
if (!childrenSet.has(childId)) {
|
||||
updatedChildren = [...parentChildren, childId];
|
||||
}
|
||||
|
||||
const updatedParent = {
|
||||
...parent,
|
||||
childrenMessageIds: updatedChildren,
|
||||
// Update latestChild only if explicitly requested or if it's the only child,
|
||||
// or if the child was newly added
|
||||
latestChildMessageId:
|
||||
makeLatest || updatedChildren.length === 1 || !childrenSet.has(childId)
|
||||
? childId
|
||||
: parent.latestChildMessageId,
|
||||
};
|
||||
if (makeLatest && parent.latestChildMessageId !== childId) {
|
||||
updatedParent.latestChildMessageId = childId;
|
||||
}
|
||||
|
||||
map.set(parentId, updatedParent);
|
||||
} else {
|
||||
console.warn(
|
||||
`Parent message with ID ${parentId} not found when updating for child ${childId}`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export function upsertMessages(
|
||||
currentMessages: MessageTreeState,
|
||||
messagesToAdd: Message[],
|
||||
makeLatestChildMessage: boolean = false
|
||||
): MessageTreeState {
|
||||
let newMessages = new Map(currentMessages);
|
||||
let messagesToAddClones = messagesToAdd.map((msg) => ({ ...msg })); // Clone all incoming messages
|
||||
|
||||
if (newMessages.size === 0 && messagesToAddClones.length > 0) {
|
||||
const firstMessage = messagesToAddClones[0];
|
||||
if (!firstMessage) {
|
||||
throw new Error("No first message found in the message tree.");
|
||||
}
|
||||
const systemMessageId =
|
||||
firstMessage.parentMessageId !== null
|
||||
? firstMessage.parentMessageId
|
||||
: SYSTEM_MESSAGE_ID;
|
||||
const firstMessageId = firstMessage.messageId;
|
||||
|
||||
// Check if system message needs to be added or already exists (e.g., from parentMessageId)
|
||||
if (!newMessages.has(systemMessageId)) {
|
||||
const dummySystemMessage: Message = {
|
||||
messageId: systemMessageId,
|
||||
message: "",
|
||||
type: "system",
|
||||
files: [],
|
||||
toolCall: null,
|
||||
parentMessageId: null,
|
||||
childrenMessageIds: [firstMessageId],
|
||||
latestChildMessageId: firstMessageId,
|
||||
packets: [],
|
||||
};
|
||||
newMessages.set(dummySystemMessage.messageId, dummySystemMessage);
|
||||
}
|
||||
// Ensure the first message points to the system message if its parent was null
|
||||
if (!firstMessage) {
|
||||
console.error("No first message found in the message tree.");
|
||||
return newMessages;
|
||||
}
|
||||
if (firstMessage.parentMessageId === null) {
|
||||
firstMessage.parentMessageId = systemMessageId;
|
||||
}
|
||||
}
|
||||
|
||||
messagesToAddClones.forEach((message) => {
|
||||
// Add/update the message itself
|
||||
newMessages.set(message.messageId, message);
|
||||
|
||||
// Update parent's children if the message has a parent
|
||||
if (message.parentMessageId !== null) {
|
||||
// When adding multiple messages, only make the *first* one added potentially the latest,
|
||||
// unless `makeLatestChildMessage` is true for all.
|
||||
// Let's stick to the original logic: update parent, potentially making this message latest
|
||||
// based on makeLatestChildMessage flag OR if it's a new child being added.
|
||||
updateParentInMap(
|
||||
newMessages,
|
||||
message.parentMessageId,
|
||||
message.messageId,
|
||||
makeLatestChildMessage
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// Explicitly set the last message of the batch as the latest if requested,
|
||||
// overriding previous updates within the loop if necessary.
|
||||
if (makeLatestChildMessage && messagesToAddClones.length > 0) {
|
||||
const lastMessage = messagesToAddClones[messagesToAddClones.length - 1];
|
||||
if (!lastMessage) {
|
||||
console.error("No last message found in the message tree.");
|
||||
return newMessages;
|
||||
}
|
||||
if (lastMessage.parentMessageId !== null) {
|
||||
const parent = newMessages.get(lastMessage.parentMessageId);
|
||||
if (parent && parent.latestChildMessageId !== lastMessage.messageId) {
|
||||
const updatedParent = {
|
||||
...parent,
|
||||
latestChildMessageId: lastMessage.messageId,
|
||||
};
|
||||
newMessages.set(parent.messageId, updatedParent);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return newMessages;
|
||||
}
|
||||
|
||||
export function removeMessage(
|
||||
currentMessages: MessageTreeState,
|
||||
messageIdToRemove: number
|
||||
): MessageTreeState {
|
||||
if (!currentMessages.has(messageIdToRemove)) {
|
||||
return currentMessages; // Return original if message doesn't exist
|
||||
}
|
||||
|
||||
const newMessages = new Map(currentMessages);
|
||||
const messageToRemove = newMessages.get(messageIdToRemove)!;
|
||||
|
||||
// Collect all descendant IDs to remove
|
||||
const idsToRemove = new Set<number>();
|
||||
const queue: number[] = [messageIdToRemove];
|
||||
|
||||
while (queue.length > 0) {
|
||||
const currentId = queue.shift()!;
|
||||
if (!newMessages.has(currentId) || idsToRemove.has(currentId)) continue;
|
||||
idsToRemove.add(currentId);
|
||||
|
||||
const currentMsg = newMessages.get(currentId);
|
||||
if (currentMsg?.childrenMessageIds) {
|
||||
currentMsg.childrenMessageIds.forEach((childId) => queue.push(childId));
|
||||
}
|
||||
}
|
||||
|
||||
// Remove all descendants
|
||||
idsToRemove.forEach((id) => newMessages.delete(id));
|
||||
|
||||
// Update the parent
|
||||
if (messageToRemove.parentMessageId !== null) {
|
||||
const parent = newMessages.get(messageToRemove.parentMessageId);
|
||||
if (parent) {
|
||||
const updatedChildren = (parent.childrenMessageIds || []).filter(
|
||||
(id) => id !== messageIdToRemove
|
||||
);
|
||||
const updatedParent = {
|
||||
...parent,
|
||||
childrenMessageIds: updatedChildren,
|
||||
// If the removed message was the latest, find the new latest (last in the updated children list)
|
||||
latestChildMessageId:
|
||||
parent.latestChildMessageId === messageIdToRemove
|
||||
? updatedChildren.length > 0
|
||||
? updatedChildren[updatedChildren.length - 1]
|
||||
: null
|
||||
: parent.latestChildMessageId,
|
||||
};
|
||||
newMessages.set(parent.messageId, updatedParent);
|
||||
}
|
||||
}
|
||||
|
||||
return newMessages;
|
||||
}
|
||||
|
||||
export function setMessageAsLatest(
|
||||
currentMessages: MessageTreeState,
|
||||
messageId: number
|
||||
): MessageTreeState {
|
||||
const message = currentMessages.get(messageId);
|
||||
if (!message || message.parentMessageId === null) {
|
||||
return currentMessages; // Cannot set root or non-existent message as latest
|
||||
}
|
||||
|
||||
const parent = currentMessages.get(message.parentMessageId);
|
||||
if (!parent || !(parent.childrenMessageIds || []).includes(messageId)) {
|
||||
console.warn(
|
||||
`Cannot set message ${messageId} as latest, parent ${message.parentMessageId} or child link missing.`
|
||||
);
|
||||
return currentMessages; // Parent doesn't exist or doesn't list this message as a child
|
||||
}
|
||||
|
||||
if (parent.latestChildMessageId === messageId) {
|
||||
return currentMessages; // Already the latest
|
||||
}
|
||||
|
||||
const newMessages = new Map(currentMessages);
|
||||
const updatedParent = {
|
||||
...parent,
|
||||
latestChildMessageId: messageId,
|
||||
};
|
||||
newMessages.set(parent.messageId, updatedParent);
|
||||
|
||||
return newMessages;
|
||||
}
|
||||
|
||||
export function getLatestMessageChain(messages: MessageTreeState): Message[] {
|
||||
const chain: Message[] = [];
|
||||
if (messages.size === 0) {
|
||||
return chain;
|
||||
}
|
||||
|
||||
// Find the root message
|
||||
let root: Message | undefined;
|
||||
if (messages.has(SYSTEM_MESSAGE_ID)) {
|
||||
root = messages.get(SYSTEM_MESSAGE_ID);
|
||||
} else {
|
||||
// Use Array.from to fix linter error
|
||||
const potentialRoots = Array.from(messages.values()).filter(
|
||||
(message) =>
|
||||
message.parentMessageId === null ||
|
||||
!messages.has(message.parentMessageId!)
|
||||
);
|
||||
if (potentialRoots.length > 0) {
|
||||
// Prefer non-system message if multiple roots found somehow
|
||||
root =
|
||||
potentialRoots.find((m) => m.type !== "system") || potentialRoots[0];
|
||||
}
|
||||
}
|
||||
|
||||
if (!root) {
|
||||
console.error("Could not determine the root message.");
|
||||
// Fallback: return flat list sorted by ID perhaps? Or empty?
|
||||
return Array.from(messages.values()).sort(
|
||||
(a, b) => a.messageId - b.messageId
|
||||
);
|
||||
}
|
||||
|
||||
let currentMessage: Message | undefined = root;
|
||||
// The root itself (like SYSTEM_MESSAGE) might not be part of the visible chain
|
||||
if (root.messageId !== SYSTEM_MESSAGE_ID && root.type !== "system") {
|
||||
// Need to clone message for safety? If MessageTreeState guarantees immutability maybe not.
|
||||
// Let's assume Message objects within the map are treated as immutable.
|
||||
chain.push(root);
|
||||
}
|
||||
|
||||
while (
|
||||
currentMessage?.latestChildMessageId !== null &&
|
||||
currentMessage?.latestChildMessageId !== undefined
|
||||
) {
|
||||
const nextMessageId = currentMessage.latestChildMessageId;
|
||||
const nextMessage = messages.get(nextMessageId);
|
||||
if (nextMessage) {
|
||||
chain.push(nextMessage);
|
||||
currentMessage = nextMessage;
|
||||
} else {
|
||||
console.warn(`Chain broken: Message ${nextMessageId} not found.`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return chain;
|
||||
}
|
||||
|
||||
export function getHumanAndAIMessageFromMessageNumber(
|
||||
messages: MessageTreeState,
|
||||
messageNumber: number
|
||||
): { humanMessage: Message | null; aiMessage: Message | null } {
|
||||
const latestChain = getLatestMessageChain(messages);
|
||||
const messageIndex = latestChain.findIndex(
|
||||
(msg) => msg.messageId === messageNumber
|
||||
);
|
||||
|
||||
if (messageIndex === -1) {
|
||||
// Maybe the message exists but isn't in the latest chain? Search the whole map.
|
||||
const message = messages.get(messageNumber);
|
||||
if (!message) return { humanMessage: null, aiMessage: null };
|
||||
|
||||
if (message.type === "user") {
|
||||
// Find its latest child that is an assistant
|
||||
const potentialAiMessage =
|
||||
message.latestChildMessageId !== null &&
|
||||
message.latestChildMessageId !== undefined
|
||||
? messages.get(message.latestChildMessageId)
|
||||
: undefined;
|
||||
const aiMessage =
|
||||
potentialAiMessage?.type === "assistant" ? potentialAiMessage : null;
|
||||
return { humanMessage: message, aiMessage };
|
||||
} else if (message.type === "assistant" || message.type === "error") {
|
||||
const humanMessage =
|
||||
message.parentMessageId !== null
|
||||
? messages.get(message.parentMessageId)
|
||||
: null;
|
||||
return {
|
||||
humanMessage: humanMessage?.type === "user" ? humanMessage : null,
|
||||
aiMessage: message,
|
||||
};
|
||||
}
|
||||
return { humanMessage: null, aiMessage: null };
|
||||
}
|
||||
|
||||
// Message is in the latest chain
|
||||
const message = latestChain[messageIndex];
|
||||
if (!message) {
|
||||
console.error(`Message ${messageNumber} not found in the latest chain.`);
|
||||
return { humanMessage: null, aiMessage: null };
|
||||
}
|
||||
|
||||
if (message.type === "user") {
|
||||
const potentialAiMessage = latestChain[messageIndex + 1];
|
||||
const aiMessage =
|
||||
potentialAiMessage?.type === "assistant" &&
|
||||
potentialAiMessage.parentMessageId === message.messageId
|
||||
? potentialAiMessage
|
||||
: null;
|
||||
return { humanMessage: message, aiMessage };
|
||||
} else if (message.type === "assistant" || message.type === "error") {
|
||||
const potentialHumanMessage = latestChain[messageIndex - 1];
|
||||
const humanMessage =
|
||||
potentialHumanMessage?.type === "user" &&
|
||||
message.parentMessageId === potentialHumanMessage.messageId
|
||||
? potentialHumanMessage
|
||||
: null;
|
||||
return { humanMessage, aiMessage: message };
|
||||
}
|
||||
|
||||
return { humanMessage: null, aiMessage: null };
|
||||
}
|
||||
|
||||
export function getLastSuccessfulMessageId(
|
||||
messages: MessageTreeState,
|
||||
chain?: Message[]
|
||||
): number | null {
|
||||
const messageChain = chain || getLatestMessageChain(messages);
|
||||
for (let i = messageChain.length - 1; i >= 0; i--) {
|
||||
const message = messageChain[i];
|
||||
if (!message) {
|
||||
console.error(`Message ${i} not found in the message chain.`);
|
||||
continue;
|
||||
}
|
||||
if (message.type !== "error") {
|
||||
return message.messageId;
|
||||
}
|
||||
}
|
||||
|
||||
// If the chain starts with an error or is empty, check for system message
|
||||
const systemMessage = messages.get(SYSTEM_MESSAGE_ID);
|
||||
if (systemMessage) {
|
||||
// Check if the system message itself is considered "successful" (it usually is)
|
||||
// Or if it has a successful child
|
||||
const childId = systemMessage.latestChildMessageId;
|
||||
if (childId !== null && childId !== undefined) {
|
||||
const firstRealMessage = messages.get(childId);
|
||||
if (firstRealMessage && firstRealMessage.type !== "error") {
|
||||
return firstRealMessage.messageId;
|
||||
}
|
||||
}
|
||||
// If no successful child, return the system message ID itself as the root?
|
||||
// This matches the class behavior implicitly returning the root ID if nothing else works.
|
||||
return systemMessage.messageId;
|
||||
}
|
||||
|
||||
return null; // No successful message found
|
||||
}
|
||||
@@ -21,6 +21,10 @@ export const SEARCH_PARAM_NAMES = {
|
||||
// for seeding chats
|
||||
SEEDED: "seeded",
|
||||
SEND_ON_LOAD: "send-on-load",
|
||||
|
||||
// when sending a message for the first time, we don't want to reload the page
|
||||
// and cause a re-render
|
||||
SKIP_RELOAD: "skip-reload",
|
||||
};
|
||||
|
||||
export function shouldSubmitOnLoad(
|
||||
194
web/src/app/chat/services/streamingModels.ts
Normal file
194
web/src/app/chat/services/streamingModels.ts
Normal file
@@ -0,0 +1,194 @@
|
||||
import { OnyxDocument } from "@/lib/search/interfaces";
|
||||
|
||||
// Base interface for all streaming objects
|
||||
interface BaseObj {
|
||||
type: string;
|
||||
}
|
||||
|
||||
export enum PacketType {
|
||||
MESSAGE_START = "message_start",
|
||||
MESSAGE_DELTA = "message_delta",
|
||||
MESSAGE_END = "message_end",
|
||||
|
||||
STOP = "stop",
|
||||
SECTION_END = "section_end",
|
||||
|
||||
// Specific tool packets
|
||||
SEARCH_TOOL_START = "internal_search_tool_start",
|
||||
SEARCH_TOOL_DELTA = "internal_search_tool_delta",
|
||||
IMAGE_GENERATION_TOOL_START = "image_generation_tool_start",
|
||||
IMAGE_GENERATION_TOOL_DELTA = "image_generation_tool_delta",
|
||||
|
||||
// Custom tool packets
|
||||
CUSTOM_TOOL_START = "custom_tool_start",
|
||||
CUSTOM_TOOL_DELTA = "custom_tool_delta",
|
||||
|
||||
// Reasoning packets
|
||||
REASONING_START = "reasoning_start",
|
||||
REASONING_DELTA = "reasoning_delta",
|
||||
REASONING_END = "reasoning_end",
|
||||
|
||||
CITATION_START = "citation_start",
|
||||
CITATION_DELTA = "citation_delta",
|
||||
CITATION_END = "citation_end",
|
||||
}
|
||||
|
||||
// Basic Message Packets
|
||||
export interface MessageStart extends BaseObj {
|
||||
id: string;
|
||||
type: "message_start";
|
||||
content: string;
|
||||
}
|
||||
|
||||
export interface MessageDelta extends BaseObj {
|
||||
content: string;
|
||||
type: "message_delta";
|
||||
}
|
||||
|
||||
export interface MessageEnd extends BaseObj {
|
||||
type: "message_end";
|
||||
}
|
||||
|
||||
// Control Packets
|
||||
export interface Stop extends BaseObj {
|
||||
type: "stop";
|
||||
}
|
||||
|
||||
export interface SectionEnd extends BaseObj {
|
||||
type: "section_end";
|
||||
}
|
||||
|
||||
// Specific tool packets
|
||||
export interface SearchToolStart extends BaseObj {
|
||||
type: "internal_search_tool_start";
|
||||
is_internet_search?: boolean;
|
||||
}
|
||||
|
||||
export interface SearchToolDelta extends BaseObj {
|
||||
type: "internal_search_tool_delta";
|
||||
queries: string[] | null;
|
||||
documents: OnyxDocument[] | null;
|
||||
}
|
||||
|
||||
export interface ImageGenerationToolStart extends BaseObj {
|
||||
type: "image_generation_tool_start";
|
||||
}
|
||||
|
||||
export interface ImageGenerationToolDelta extends BaseObj {
|
||||
type: "image_generation_tool_delta";
|
||||
images: Array<{ [key: string]: string }> | null;
|
||||
}
|
||||
|
||||
// Custom Tool Packets
|
||||
export interface CustomToolStart extends BaseObj {
|
||||
type: "custom_tool_start";
|
||||
tool_name: string;
|
||||
}
|
||||
|
||||
export interface CustomToolDelta extends BaseObj {
|
||||
type: "custom_tool_delta";
|
||||
tool_name: string;
|
||||
response_type: string;
|
||||
data?: any;
|
||||
file_ids?: string[] | null;
|
||||
}
|
||||
|
||||
// Reasoning Packets
|
||||
export interface ReasoningStart extends BaseObj {
|
||||
type: "reasoning_start";
|
||||
}
|
||||
|
||||
export interface ReasoningDelta extends BaseObj {
|
||||
type: "reasoning_delta";
|
||||
reasoning: string;
|
||||
}
|
||||
|
||||
// Citation Packets
|
||||
export interface StreamingCitation {
|
||||
citation_num: number;
|
||||
document_id: string;
|
||||
}
|
||||
|
||||
export interface CitationStart extends BaseObj {
|
||||
type: "citation_start";
|
||||
}
|
||||
|
||||
export interface CitationDelta extends BaseObj {
|
||||
type: "citation_delta";
|
||||
citations: StreamingCitation[];
|
||||
}
|
||||
|
||||
export type ChatObj = MessageStart | MessageDelta | MessageEnd;
|
||||
|
||||
export type StopObj = Stop;
|
||||
|
||||
export type SectionEndObj = SectionEnd;
|
||||
|
||||
// Specific tool objects
|
||||
export type SearchToolObj = SearchToolStart | SearchToolDelta | SectionEnd;
|
||||
export type ImageGenerationToolObj =
|
||||
| ImageGenerationToolStart
|
||||
| ImageGenerationToolDelta
|
||||
| SectionEnd;
|
||||
export type CustomToolObj = CustomToolStart | CustomToolDelta | SectionEnd;
|
||||
export type NewToolObj = SearchToolObj | ImageGenerationToolObj | CustomToolObj;
|
||||
|
||||
export type ReasoningObj = ReasoningStart | ReasoningDelta | SectionEnd;
|
||||
|
||||
export type CitationObj = CitationStart | CitationDelta | SectionEnd;
|
||||
|
||||
// Union type for all possible streaming objects
|
||||
export type ObjTypes =
|
||||
| ChatObj
|
||||
| NewToolObj
|
||||
| ReasoningObj
|
||||
| StopObj
|
||||
| SectionEndObj
|
||||
| CitationObj;
|
||||
|
||||
// Packet wrapper for streaming objects
|
||||
export interface Packet {
|
||||
ind: number;
|
||||
obj: ObjTypes;
|
||||
}
|
||||
|
||||
export interface ChatPacket {
|
||||
ind: number;
|
||||
obj: ChatObj;
|
||||
}
|
||||
|
||||
export interface StopPacket {
|
||||
ind: number;
|
||||
obj: StopObj;
|
||||
}
|
||||
|
||||
export interface CitationPacket {
|
||||
ind: number;
|
||||
obj: CitationObj;
|
||||
}
|
||||
|
||||
// New specific tool packet types
|
||||
export interface SearchToolPacket {
|
||||
ind: number;
|
||||
obj: SearchToolObj;
|
||||
}
|
||||
|
||||
export interface ImageGenerationToolPacket {
|
||||
ind: number;
|
||||
obj: ImageGenerationToolObj;
|
||||
}
|
||||
|
||||
export interface CustomToolPacket {
|
||||
ind: number;
|
||||
obj: CustomToolObj;
|
||||
}
|
||||
|
||||
export interface ReasoningPacket {
|
||||
ind: number;
|
||||
obj: ReasoningObj;
|
||||
}
|
||||
|
||||
export interface SectionEndPacket {
|
||||
ind: number;
|
||||
obj: SectionEndObj;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user