mirror of
https://github.com/onyx-dot-app/onyx.git
synced 2026-02-18 08:15:48 +00:00
Compare commits
12 Commits
bookstack_
...
stash_open
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
39106e57b2 | ||
|
|
7e478f0e90 | ||
|
|
599b7705c2 | ||
|
|
4958a5355d | ||
|
|
c4b8519381 | ||
|
|
8b4413694a | ||
|
|
57cf7d9fac | ||
|
|
ad4efb5f20 | ||
|
|
e304ec4ab6 | ||
|
|
1690dc45ba | ||
|
|
7582ba1640 | ||
|
|
99fc546943 |
@@ -0,0 +1,27 @@
|
||||
"""Add composite index for last_modified and last_synced to document
|
||||
|
||||
Revision ID: f13db29f3101
|
||||
Revises: b388730a2899
|
||||
Create Date: 2025-02-18 22:48:11.511389
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "f13db29f3101"
|
||||
down_revision = "acaab4ef4507"
|
||||
branch_labels: str | None = None
|
||||
depends_on: str | None = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.create_index(
|
||||
"ix_document_sync_status",
|
||||
"document",
|
||||
["last_modified", "last_synced"],
|
||||
unique=False,
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_index("ix_document_sync_status", table_name="document")
|
||||
@@ -10,6 +10,7 @@ from onyx.configs.app_configs import SMTP_PORT
|
||||
from onyx.configs.app_configs import SMTP_SERVER
|
||||
from onyx.configs.app_configs import SMTP_USER
|
||||
from onyx.configs.app_configs import WEB_DOMAIN
|
||||
from onyx.configs.constants import AuthType
|
||||
from onyx.configs.constants import TENANT_ID_COOKIE_NAME
|
||||
from onyx.db.models import User
|
||||
|
||||
@@ -187,23 +188,51 @@ def send_subscription_cancellation_email(user_email: str) -> None:
|
||||
send_email(user_email, subject, html_content, text_content)
|
||||
|
||||
|
||||
def send_user_email_invite(user_email: str, current_user: User) -> None:
|
||||
def send_user_email_invite(
|
||||
user_email: str, current_user: User, auth_type: AuthType
|
||||
) -> None:
|
||||
subject = "Invitation to Join Onyx Organization"
|
||||
heading = "You've Been Invited!"
|
||||
message = (
|
||||
f"<p>You have been invited by {current_user.email} to join an organization on Onyx.</p>"
|
||||
"<p>To join the organization, please click the button below to set a password "
|
||||
"or login with Google and complete your registration.</p>"
|
||||
)
|
||||
|
||||
# the exact action taken by the user, and thus the message, depends on the auth type
|
||||
message = f"<p>You have been invited by {current_user.email} to join an organization on Onyx.</p>"
|
||||
if auth_type == AuthType.CLOUD:
|
||||
message += (
|
||||
"<p>To join the organization, please click the button below to set a password "
|
||||
"or login with Google and complete your registration.</p>"
|
||||
)
|
||||
elif auth_type == AuthType.BASIC:
|
||||
message += (
|
||||
"<p>To join the organization, please click the button below to set a password "
|
||||
"and complete your registration.</p>"
|
||||
)
|
||||
elif auth_type == AuthType.GOOGLE_OAUTH:
|
||||
message += (
|
||||
"<p>To join the organization, please click the button below to login with Google "
|
||||
"and complete your registration.</p>"
|
||||
)
|
||||
elif auth_type == AuthType.OIDC or auth_type == AuthType.SAML:
|
||||
message += (
|
||||
"<p>To join the organization, please click the button below to"
|
||||
" complete your registration.</p>"
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Invalid auth type: {auth_type}")
|
||||
|
||||
cta_text = "Join Organization"
|
||||
cta_link = f"{WEB_DOMAIN}/auth/signup?email={user_email}"
|
||||
html_content = build_html_email(heading, message, cta_text, cta_link)
|
||||
|
||||
# text content is the fallback for clients that don't support HTML
|
||||
# not as critical, so not having special cases for each auth type
|
||||
text_content = (
|
||||
f"You have been invited by {current_user.email} to join an organization on Onyx.\n"
|
||||
"To join the organization, please visit the following link:\n"
|
||||
f"{WEB_DOMAIN}/auth/signup?email={user_email}\n"
|
||||
"You'll be asked to set a password or login with Google to complete your registration."
|
||||
)
|
||||
if auth_type == AuthType.CLOUD:
|
||||
text_content += "You'll be asked to set a password or login with Google to complete your registration."
|
||||
|
||||
send_email(user_email, subject, html_content, text_content)
|
||||
|
||||
|
||||
|
||||
@@ -140,7 +140,7 @@ def on_task_postrun(
|
||||
f"{f'for tenant_id={tenant_id}' if tenant_id else ''}"
|
||||
)
|
||||
|
||||
r = get_redis_client()
|
||||
r = get_redis_client(tenant_id=tenant_id)
|
||||
|
||||
if task_id.startswith(RedisConnectorCredentialPair.PREFIX):
|
||||
r.srem(RedisConnectorCredentialPair.get_taskset_key(), task_id)
|
||||
|
||||
@@ -190,7 +190,8 @@ def create_chat_chain(
|
||||
and previous_message.message_type == MessageType.ASSISTANT
|
||||
and mainline_messages
|
||||
):
|
||||
mainline_messages[-1] = current_message
|
||||
if current_message.refined_answer_improvement:
|
||||
mainline_messages[-1] = current_message
|
||||
else:
|
||||
mainline_messages.append(current_message)
|
||||
|
||||
|
||||
@@ -142,6 +142,15 @@ class MessageResponseIDInfo(BaseModel):
|
||||
reserved_assistant_message_id: int
|
||||
|
||||
|
||||
class AgentMessageIDInfo(BaseModel):
|
||||
level: int
|
||||
message_id: int
|
||||
|
||||
|
||||
class AgenticMessageResponseIDInfo(BaseModel):
|
||||
agentic_message_ids: list[AgentMessageIDInfo]
|
||||
|
||||
|
||||
class StreamingError(BaseModel):
|
||||
error: str
|
||||
stack_trace: str | None = None
|
||||
|
||||
@@ -11,6 +11,8 @@ from onyx.agents.agent_search.orchestration.nodes.call_tool import ToolCallExcep
|
||||
from onyx.chat.answer import Answer
|
||||
from onyx.chat.chat_utils import create_chat_chain
|
||||
from onyx.chat.chat_utils import create_temporary_persona
|
||||
from onyx.chat.models import AgenticMessageResponseIDInfo
|
||||
from onyx.chat.models import AgentMessageIDInfo
|
||||
from onyx.chat.models import AgentSearchPacket
|
||||
from onyx.chat.models import AllCitations
|
||||
from onyx.chat.models import AnswerPostInfo
|
||||
@@ -308,6 +310,7 @@ ChatPacket = (
|
||||
| CustomToolResponse
|
||||
| MessageSpecificCitations
|
||||
| MessageResponseIDInfo
|
||||
| AgenticMessageResponseIDInfo
|
||||
| StreamStopInfo
|
||||
| AgentSearchPacket
|
||||
)
|
||||
@@ -1035,6 +1038,7 @@ def stream_chat_message_objects(
|
||||
next_level = 1
|
||||
prev_message = gen_ai_response_message
|
||||
agent_answers = answer.llm_answer_by_level()
|
||||
agentic_message_ids = []
|
||||
while next_level in agent_answers:
|
||||
next_answer = agent_answers[next_level]
|
||||
info = info_by_subq[
|
||||
@@ -1059,17 +1063,18 @@ def stream_chat_message_objects(
|
||||
refined_answer_improvement=refined_answer_improvement,
|
||||
is_agentic=True,
|
||||
)
|
||||
agentic_message_ids.append(
|
||||
AgentMessageIDInfo(level=next_level, message_id=next_answer_message.id)
|
||||
)
|
||||
next_level += 1
|
||||
prev_message = next_answer_message
|
||||
|
||||
logger.debug("Committing messages")
|
||||
db_session.commit() # actually save user / assistant message
|
||||
|
||||
msg_detail_response = translate_db_message_to_chat_message_detail(
|
||||
gen_ai_response_message
|
||||
)
|
||||
yield AgenticMessageResponseIDInfo(agentic_message_ids=agentic_message_ids)
|
||||
|
||||
yield msg_detail_response
|
||||
yield translate_db_message_to_chat_message_detail(gen_ai_response_message)
|
||||
except Exception as e:
|
||||
error_msg = str(e)
|
||||
logger.exception(error_msg)
|
||||
|
||||
@@ -195,11 +195,16 @@ def validate_ccpair_for_user(
|
||||
db_session,
|
||||
get_editable=False,
|
||||
)
|
||||
if not credential:
|
||||
raise ValueError("Credential not found")
|
||||
|
||||
if not connector:
|
||||
raise ValueError("Connector not found")
|
||||
|
||||
if connector.source == DocumentSource.INGESTION_API:
|
||||
return
|
||||
|
||||
if not credential:
|
||||
raise ValueError("Credential not found")
|
||||
|
||||
try:
|
||||
runnable_connector = instantiate_connector(
|
||||
db_session=db_session,
|
||||
|
||||
@@ -229,16 +229,20 @@ class GitbookConnector(LoadConnector, PollConnector):
|
||||
|
||||
try:
|
||||
content = self.client.get(f"/spaces/{self.space_id}/content")
|
||||
pages = content.get("pages", [])
|
||||
|
||||
pages: list[dict[str, Any]] = content.get("pages", [])
|
||||
current_batch: list[Document] = []
|
||||
for page in pages:
|
||||
updated_at = datetime.fromisoformat(page["updatedAt"])
|
||||
|
||||
while pages:
|
||||
page = pages.pop(0)
|
||||
|
||||
updated_at_raw = page.get("updatedAt")
|
||||
if updated_at_raw is None:
|
||||
# if updatedAt is not present, that means the page has never been edited
|
||||
continue
|
||||
|
||||
updated_at = datetime.fromisoformat(updated_at_raw)
|
||||
if start and updated_at < start:
|
||||
if current_batch:
|
||||
yield current_batch
|
||||
return
|
||||
continue
|
||||
if end and updated_at > end:
|
||||
continue
|
||||
|
||||
@@ -250,6 +254,8 @@ class GitbookConnector(LoadConnector, PollConnector):
|
||||
yield current_batch
|
||||
current_batch = []
|
||||
|
||||
pages.extend(page.get("pages", []))
|
||||
|
||||
if current_batch:
|
||||
yield current_batch
|
||||
|
||||
|
||||
@@ -568,6 +568,7 @@ class GoogleDriveConnector(LoadConnector, PollConnector, SlimConnector):
|
||||
callback: IndexingHeartbeatInterface | None = None,
|
||||
) -> GenerateSlimDocumentOutput:
|
||||
slim_batch = []
|
||||
# ERROR here.
|
||||
for file in self._fetch_drive_items(
|
||||
is_slim=True,
|
||||
start=start,
|
||||
|
||||
@@ -60,9 +60,8 @@ def count_documents_by_needs_sync(session: Session) -> int:
|
||||
This function executes the query and returns the count of
|
||||
documents matching the criteria."""
|
||||
|
||||
count = (
|
||||
session.query(func.count(DbDocument.id.distinct()))
|
||||
.select_from(DbDocument)
|
||||
return (
|
||||
session.query(DbDocument.id)
|
||||
.join(
|
||||
DocumentByConnectorCredentialPair,
|
||||
DbDocument.id == DocumentByConnectorCredentialPair.id,
|
||||
@@ -73,63 +72,53 @@ def count_documents_by_needs_sync(session: Session) -> int:
|
||||
DbDocument.last_synced.is_(None),
|
||||
)
|
||||
)
|
||||
.scalar()
|
||||
.count()
|
||||
)
|
||||
|
||||
return count
|
||||
|
||||
|
||||
def construct_document_select_for_connector_credential_pair_by_needs_sync(
|
||||
connector_id: int, credential_id: int
|
||||
) -> Select:
|
||||
initial_doc_ids_stmt = select(DocumentByConnectorCredentialPair.id).where(
|
||||
and_(
|
||||
DocumentByConnectorCredentialPair.connector_id == connector_id,
|
||||
DocumentByConnectorCredentialPair.credential_id == credential_id,
|
||||
)
|
||||
)
|
||||
|
||||
stmt = (
|
||||
return (
|
||||
select(DbDocument)
|
||||
.where(
|
||||
DbDocument.id.in_(initial_doc_ids_stmt),
|
||||
or_(
|
||||
DbDocument.last_modified
|
||||
> DbDocument.last_synced, # last_modified is newer than last_synced
|
||||
DbDocument.last_synced.is_(None), # never synced
|
||||
),
|
||||
.join(
|
||||
DocumentByConnectorCredentialPair,
|
||||
DbDocument.id == DocumentByConnectorCredentialPair.id,
|
||||
)
|
||||
.where(
|
||||
and_(
|
||||
DocumentByConnectorCredentialPair.connector_id == connector_id,
|
||||
DocumentByConnectorCredentialPair.credential_id == credential_id,
|
||||
or_(
|
||||
DbDocument.last_modified > DbDocument.last_synced,
|
||||
DbDocument.last_synced.is_(None),
|
||||
),
|
||||
)
|
||||
)
|
||||
.distinct()
|
||||
)
|
||||
|
||||
return stmt
|
||||
|
||||
|
||||
def construct_document_id_select_for_connector_credential_pair_by_needs_sync(
|
||||
connector_id: int, credential_id: int
|
||||
) -> Select:
|
||||
initial_doc_ids_stmt = select(DocumentByConnectorCredentialPair.id).where(
|
||||
and_(
|
||||
DocumentByConnectorCredentialPair.connector_id == connector_id,
|
||||
DocumentByConnectorCredentialPair.credential_id == credential_id,
|
||||
)
|
||||
)
|
||||
|
||||
stmt = (
|
||||
return (
|
||||
select(DbDocument.id)
|
||||
.where(
|
||||
DbDocument.id.in_(initial_doc_ids_stmt),
|
||||
or_(
|
||||
DbDocument.last_modified
|
||||
> DbDocument.last_synced, # last_modified is newer than last_synced
|
||||
DbDocument.last_synced.is_(None), # never synced
|
||||
),
|
||||
.join(
|
||||
DocumentByConnectorCredentialPair,
|
||||
DbDocument.id == DocumentByConnectorCredentialPair.id,
|
||||
)
|
||||
.where(
|
||||
and_(
|
||||
DocumentByConnectorCredentialPair.connector_id == connector_id,
|
||||
DocumentByConnectorCredentialPair.credential_id == credential_id,
|
||||
or_(
|
||||
DbDocument.last_modified > DbDocument.last_synced,
|
||||
DbDocument.last_synced.is_(None),
|
||||
),
|
||||
)
|
||||
)
|
||||
.distinct()
|
||||
)
|
||||
|
||||
return stmt
|
||||
|
||||
|
||||
def get_all_documents_needing_vespa_sync_for_cc_pair(
|
||||
db_session: Session, cc_pair_id: int
|
||||
|
||||
@@ -570,6 +570,14 @@ class Document(Base):
|
||||
back_populates="documents",
|
||||
)
|
||||
|
||||
__table_args__ = (
|
||||
Index(
|
||||
"ix_document_sync_status",
|
||||
last_modified,
|
||||
last_synced,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class Tag(Base):
|
||||
__tablename__ = "tag"
|
||||
|
||||
@@ -23,6 +23,7 @@ class PreviousMessage(BaseModel):
|
||||
message_type: MessageType
|
||||
files: list[InMemoryChatFile]
|
||||
tool_call: ToolCallFinalResult | None
|
||||
refined_answer_improvement: bool | None
|
||||
|
||||
@classmethod
|
||||
def from_chat_message(
|
||||
@@ -47,6 +48,7 @@ class PreviousMessage(BaseModel):
|
||||
)
|
||||
if chat_message.tool_call
|
||||
else None,
|
||||
refined_answer_improvement=chat_message.refined_answer_improvement,
|
||||
)
|
||||
|
||||
def to_langchain_msg(self) -> BaseMessage:
|
||||
|
||||
@@ -14,6 +14,7 @@ from fastapi import Request
|
||||
from fastapi import status
|
||||
from fastapi.exceptions import RequestValidationError
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.openapi.utils import get_openapi
|
||||
from fastapi.responses import JSONResponse
|
||||
from httpx_oauth.clients.google import GoogleOAuth2
|
||||
from sentry_sdk.integrations.fastapi import FastApiIntegration
|
||||
@@ -28,8 +29,6 @@ from onyx.auth.users import auth_backend
|
||||
from onyx.auth.users import create_onyx_oauth_router
|
||||
from onyx.auth.users import fastapi_users
|
||||
from onyx.configs.app_configs import APP_API_PREFIX
|
||||
from onyx.configs.app_configs import APP_HOST
|
||||
from onyx.configs.app_configs import APP_PORT
|
||||
from onyx.configs.app_configs import AUTH_RATE_LIMITING_ENABLED
|
||||
from onyx.configs.app_configs import AUTH_TYPE
|
||||
from onyx.configs.app_configs import DISABLE_GENERATIVE_AI
|
||||
@@ -105,7 +104,6 @@ from onyx.utils.telemetry import get_or_generate_uuid
|
||||
from onyx.utils.telemetry import optional_telemetry
|
||||
from onyx.utils.telemetry import RecordType
|
||||
from onyx.utils.variable_functionality import fetch_versioned_implementation
|
||||
from onyx.utils.variable_functionality import global_version
|
||||
from onyx.utils.variable_functionality import set_is_ee_based_on_env_variable
|
||||
from shared_configs.configs import CORS_ALLOWED_ORIGIN
|
||||
from shared_configs.configs import MULTI_TENANT
|
||||
@@ -263,7 +261,12 @@ def log_http_error(request: Request, exc: Exception) -> JSONResponse:
|
||||
|
||||
|
||||
def get_application() -> FastAPI:
|
||||
application = FastAPI(title="Onyx Backend", version=__version__, lifespan=lifespan)
|
||||
application = FastAPI(
|
||||
title="Onyx Backend",
|
||||
version=__version__,
|
||||
openapi_url="/openapi.json",
|
||||
lifespan=lifespan,
|
||||
)
|
||||
if SENTRY_DSN:
|
||||
sentry_sdk.init(
|
||||
dsn=SENTRY_DSN,
|
||||
@@ -397,24 +400,48 @@ def get_application() -> FastAPI:
|
||||
if LOG_ENDPOINT_LATENCY:
|
||||
add_latency_logging_middleware(application, logger)
|
||||
|
||||
# Ensure all routes have auth enabled or are explicitly marked as public
|
||||
# # Ensure all routes have auth enabled or are explicitly marked as public
|
||||
check_router_auth(application)
|
||||
|
||||
# @application.get("/")
|
||||
# async def root():
|
||||
# return {"message": "Hello World"}
|
||||
|
||||
def custom_openapi():
|
||||
if application.openapi_schema:
|
||||
return application.openapi_schema
|
||||
openapi_schema = get_openapi(
|
||||
title=application.title,
|
||||
version=application.version,
|
||||
description="Your API description here",
|
||||
routes=application.routes,
|
||||
)
|
||||
# Force OpenAPI version 2.0 instead of 3.1.0
|
||||
application.openapi_schema = openapi_schema
|
||||
return application.openapi_schema
|
||||
|
||||
application.openapi = custom_openapi
|
||||
|
||||
return application
|
||||
|
||||
|
||||
# NOTE: needs to be outside of the `if __name__ == "__main__"` block so that the
|
||||
# app is exportable
|
||||
set_is_ee_based_on_env_variable()
|
||||
# app = get_application()
|
||||
app = fetch_versioned_implementation(module="onyx.main", attribute="get_application")
|
||||
|
||||
# from fastapi import FastAPI
|
||||
|
||||
# app = FastAPI()
|
||||
|
||||
# @app.get("/")
|
||||
# async def root():
|
||||
# return {"message": "Hello World"}
|
||||
|
||||
# @app.get("/hi")
|
||||
# async def docs():
|
||||
# return {"message": "Hello World"}
|
||||
|
||||
if __name__ == "__main__":
|
||||
logger.notice(
|
||||
f"Starting Onyx Backend version {__version__} on http://{APP_HOST}:{str(APP_PORT)}/"
|
||||
)
|
||||
|
||||
if global_version.is_ee_version():
|
||||
logger.notice("Running Enterprise Edition")
|
||||
|
||||
uvicorn.run(app, host=APP_HOST, port=APP_PORT)
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
||||
|
||||
@@ -1,61 +1,47 @@
|
||||
from fastapi import APIRouter
|
||||
from fastapi import Depends
|
||||
from fastapi import HTTPException
|
||||
from fastapi_users.exceptions import InvalidPasswordException
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from onyx.auth.users import current_admin_user
|
||||
from onyx.auth.users import current_user
|
||||
from onyx.auth.users import get_user_manager
|
||||
from onyx.auth.users import User
|
||||
from onyx.auth.users import UserManager
|
||||
from onyx.db.engine import get_session
|
||||
from onyx.db.users import get_user_by_email
|
||||
from onyx.server.features.password.models import ChangePasswordRequest
|
||||
from onyx.server.features.password.models import UserResetRequest
|
||||
from onyx.server.features.password.models import UserResetResponse
|
||||
|
||||
router = APIRouter(prefix="/password")
|
||||
|
||||
|
||||
@router.post("/change-password")
|
||||
async def change_my_password(
|
||||
form_data: ChangePasswordRequest,
|
||||
user_manager: UserManager = Depends(get_user_manager),
|
||||
current_user: User = Depends(current_user),
|
||||
) -> None:
|
||||
"""
|
||||
Change the password for the current user.
|
||||
"""
|
||||
try:
|
||||
await user_manager.change_password_if_old_matches(
|
||||
user=current_user,
|
||||
old_password=form_data.old_password,
|
||||
new_password=form_data.new_password,
|
||||
)
|
||||
except InvalidPasswordException as e:
|
||||
raise HTTPException(status_code=400, detail=str(e.reason))
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500, detail=f"An unexpected error occurred: {str(e)}"
|
||||
)
|
||||
# @router.post("/change-password")
|
||||
# async def change_my_password(
|
||||
# form_data: ChangePasswordRequest,
|
||||
# user_manager: UserManager = Depends(get_user_manager),
|
||||
# current_user: User = Depends(current_user),
|
||||
# ) -> None:
|
||||
# """
|
||||
# Change the password for the current user.
|
||||
# """
|
||||
# try:
|
||||
# await user_manager.change_password_if_old_matches(
|
||||
# user=current_user,
|
||||
# old_password=form_data.old_password,
|
||||
# new_password=form_data.new_password,
|
||||
# )
|
||||
# except InvalidPasswordException as e:
|
||||
# raise HTTPException(status_code=400, detail=str(e.reason))
|
||||
# except Exception as e:
|
||||
# raise HTTPException(
|
||||
# status_code=500, detail=f"An unexpected error occurred: {str(e)}"
|
||||
# )
|
||||
|
||||
|
||||
@router.post("/reset_password")
|
||||
async def admin_reset_user_password(
|
||||
user_reset_request: UserResetRequest,
|
||||
user_manager: UserManager = Depends(get_user_manager),
|
||||
db_session: Session = Depends(get_session),
|
||||
_: User = Depends(current_admin_user),
|
||||
) -> UserResetResponse:
|
||||
"""
|
||||
Reset the password for a user (admin only).
|
||||
"""
|
||||
user = get_user_by_email(user_reset_request.user_email, db_session)
|
||||
if not user:
|
||||
raise HTTPException(status_code=404, detail="User not found")
|
||||
new_password = await user_manager.reset_password_as_admin(user.id)
|
||||
return UserResetResponse(
|
||||
user_id=str(user.id),
|
||||
new_password=new_password,
|
||||
)
|
||||
# @router.post("/reset_password")
|
||||
# async def admin_reset_user_password(
|
||||
# user_reset_request: UserResetRequest,
|
||||
# user_manager: UserManager = Depends(get_user_manager),
|
||||
# db_session: Session = Depends(get_session),
|
||||
# _: User = Depends(current_admin_user),
|
||||
# ) -> UserResetResponse:
|
||||
# """
|
||||
# Reset the password for a user (admin only).
|
||||
# """
|
||||
# user = get_user_by_email(user_reset_request.user_email, db_session)
|
||||
# if not user:
|
||||
# raise HTTPException(status_code=404, detail="User not found")
|
||||
# new_password = await user_manager.reset_password_as_admin(user.id)
|
||||
# return UserResetResponse(
|
||||
# user_id=str(user.id),
|
||||
# new_password=new_password,
|
||||
# )
|
||||
|
||||
@@ -311,19 +311,23 @@ def bulk_invite_users(
|
||||
all_emails = list(set(new_invited_emails) | set(initial_invited_users))
|
||||
number_of_invited_users = write_invited_users(all_emails)
|
||||
|
||||
# send out email invitations if enabled
|
||||
if ENABLE_EMAIL_INVITES:
|
||||
try:
|
||||
for email in new_invited_emails:
|
||||
send_user_email_invite(email, current_user, AUTH_TYPE)
|
||||
except Exception as e:
|
||||
logger.error(f"Error sending email invite to invited users: {e}")
|
||||
|
||||
if not MULTI_TENANT:
|
||||
return number_of_invited_users
|
||||
|
||||
# for billing purposes, write to the control plane about the number of new users
|
||||
try:
|
||||
logger.info("Registering tenant users")
|
||||
fetch_ee_implementation_or_noop(
|
||||
"onyx.server.tenants.billing", "register_tenant_users", None
|
||||
)(tenant_id, get_total_users_count(db_session))
|
||||
if ENABLE_EMAIL_INVITES:
|
||||
try:
|
||||
for email in new_invited_emails:
|
||||
send_user_email_invite(email, current_user)
|
||||
except Exception as e:
|
||||
logger.error(f"Error sending email invite to invited users: {e}")
|
||||
|
||||
return number_of_invited_users
|
||||
except Exception as e:
|
||||
|
||||
@@ -45,7 +45,7 @@ class Settings(BaseModel):
|
||||
gpu_enabled: bool | None = None
|
||||
application_status: ApplicationStatus = ApplicationStatus.ACTIVE
|
||||
anonymous_user_enabled: bool | None = None
|
||||
pro_search_disabled: bool | None = None
|
||||
pro_search_enabled: bool | None = None
|
||||
|
||||
temperature_override_enabled: bool = False
|
||||
auto_scroll: bool = False
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
cohere==5.6.1
|
||||
posthog==3.7.4
|
||||
python3-saml==1.15.0
|
||||
xmlsec==1.3.14
|
||||
|
||||
@@ -4,6 +4,24 @@ log_format custom_main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for" '
|
||||
'rt=$request_time';
|
||||
|
||||
# Map X-Forwarded-Proto or fallback to $scheme
|
||||
map $http_x_forwarded_proto $forwarded_proto {
|
||||
default $http_x_forwarded_proto;
|
||||
"" $scheme;
|
||||
}
|
||||
|
||||
# Map X-Forwarded-Host or fallback to $host
|
||||
map $http_x_forwarded_host $forwarded_host {
|
||||
default $http_x_forwarded_host;
|
||||
"" $host;
|
||||
}
|
||||
|
||||
# Map X-Forwarded-Port or fallback to server port
|
||||
map $http_x_forwarded_port $forwarded_port {
|
||||
default $http_x_forwarded_port;
|
||||
"" $server_port;
|
||||
}
|
||||
|
||||
upstream api_server {
|
||||
# fail_timeout=0 means we always retry an upstream even if it failed
|
||||
# to return a good HTTP response
|
||||
@@ -21,8 +39,7 @@ upstream web_server {
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name ${DOMAIN};
|
||||
listen 80 default_server;
|
||||
|
||||
client_max_body_size 5G; # Maximum upload size
|
||||
|
||||
@@ -36,8 +53,9 @@ server {
|
||||
# misc headers
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Proto $forwarded_proto;
|
||||
proxy_set_header X-Forwarded-Host $forwarded_host;
|
||||
proxy_set_header X-Forwarded-Port $forwarded_port;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
# need to use 1.1 to support chunked transfers
|
||||
@@ -54,8 +72,9 @@ server {
|
||||
# misc headers
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Proto $forwarded_proto;
|
||||
proxy_set_header X-Forwarded-Host $forwarded_host;
|
||||
proxy_set_header X-Forwarded-Port $forwarded_port;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
proxy_http_version 1.1;
|
||||
@@ -72,14 +91,25 @@ server {
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443 ssl;
|
||||
server_name ${DOMAIN};
|
||||
listen 443 ssl default_server;
|
||||
|
||||
client_max_body_size 5G; # Maximum upload size
|
||||
|
||||
location / {
|
||||
# misc headers
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# don't use forwarded schema, host, or port here - this is the entry point
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Port $server_port;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
proxy_http_version 1.1;
|
||||
proxy_buffering off;
|
||||
# we don't want nginx trying to do something clever with
|
||||
# redirects, we set the Host: header above already.
|
||||
proxy_redirect off;
|
||||
proxy_pass http://localhost:80;
|
||||
}
|
||||
|
||||
|
||||
@@ -21,8 +21,7 @@ upstream web_server {
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name ${DOMAIN};
|
||||
listen 80 default_server;
|
||||
|
||||
client_max_body_size 5G; # Maximum upload size
|
||||
|
||||
@@ -37,7 +36,8 @@ server {
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Port $server_port;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
# need to use 1.1 to support chunked transfers
|
||||
@@ -55,7 +55,8 @@ server {
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Port $server_port;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
proxy_http_version 1.1;
|
||||
|
||||
@@ -4,6 +4,24 @@ log_format custom_main '$remote_addr - $remote_user [$time_local] "$request" '
|
||||
'"$http_user_agent" "$http_x_forwarded_for" '
|
||||
'rt=$request_time';
|
||||
|
||||
# Map X-Forwarded-Proto or fallback to $scheme
|
||||
map $http_x_forwarded_proto $forwarded_proto {
|
||||
default $http_x_forwarded_proto;
|
||||
"" $scheme;
|
||||
}
|
||||
|
||||
# Map X-Forwarded-Host or fallback to $host
|
||||
map $http_x_forwarded_host $forwarded_host {
|
||||
default $http_x_forwarded_host;
|
||||
"" $host;
|
||||
}
|
||||
|
||||
# Map X-Forwarded-Port or fallback to server port
|
||||
map $http_x_forwarded_port $forwarded_port {
|
||||
default $http_x_forwarded_port;
|
||||
"" $server_port;
|
||||
}
|
||||
|
||||
upstream api_server {
|
||||
# fail_timeout=0 means we always retry an upstream even if it failed
|
||||
# to return a good HTTP response
|
||||
@@ -21,8 +39,7 @@ upstream web_server {
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name ${DOMAIN};
|
||||
listen 80 default_server;
|
||||
|
||||
client_max_body_size 5G; # Maximum upload size
|
||||
|
||||
@@ -36,8 +53,9 @@ server {
|
||||
# misc headers
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Proto $forwarded_proto;
|
||||
proxy_set_header X-Forwarded-Host $forwarded_host;
|
||||
proxy_set_header X-Forwarded-Port $forwarded_port;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
# need to use 1.1 to support chunked transfers
|
||||
@@ -54,8 +72,9 @@ server {
|
||||
# misc headers
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Proto $forwarded_proto;
|
||||
proxy_set_header X-Forwarded-Host $forwarded_host;
|
||||
proxy_set_header X-Forwarded-Port $forwarded_port;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
proxy_http_version 1.1;
|
||||
@@ -68,14 +87,25 @@ server {
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443 ssl;
|
||||
server_name ${DOMAIN};
|
||||
listen 443 ssl default_server;
|
||||
|
||||
client_max_body_size 5G; # Maximum upload size
|
||||
|
||||
location / {
|
||||
# misc headers
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
# don't use forwarded schema, host, or port here - this is the entry point
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_set_header X-Forwarded-Host $host;
|
||||
proxy_set_header X-Forwarded-Port $server_port;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
proxy_http_version 1.1;
|
||||
proxy_buffering off;
|
||||
# we don't want nginx trying to do something clever with
|
||||
# redirects, we set the Host: header above already.
|
||||
proxy_redirect off;
|
||||
proxy_pass http://localhost:80;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# fill in the template
|
||||
envsubst '$DOMAIN $SSL_CERT_FILE_NAME $SSL_CERT_KEY_FILE_NAME' < "/etc/nginx/conf.d/$1" > /etc/nginx/conf.d/app.conf
|
||||
envsubst '$SSL_CERT_FILE_NAME $SSL_CERT_KEY_FILE_NAME' < "/etc/nginx/conf.d/$1" > /etc/nginx/conf.d/app.conf
|
||||
|
||||
# wait for the api_server to be ready
|
||||
echo "Waiting for API server to boot up; this may take a minute or two..."
|
||||
|
||||
@@ -68,6 +68,22 @@ const nextConfig = {
|
||||
},
|
||||
];
|
||||
},
|
||||
async rewrites() {
|
||||
return [
|
||||
{
|
||||
source: "/api/docs/:path*", // catch /api/docs and /api/docs/...
|
||||
destination: "http://localhost:8000/docs/:path*",
|
||||
},
|
||||
{
|
||||
source: "/api/docs", // if you also need the exact /api/docs
|
||||
destination: "http://localhost:8000/docs",
|
||||
},
|
||||
{
|
||||
source: "/openapi.json",
|
||||
destination: "http://localhost:8000/openapi.json",
|
||||
},
|
||||
];
|
||||
},
|
||||
};
|
||||
|
||||
// Sentry configuration for error monitoring:
|
||||
|
||||
17
web/package-lock.json
generated
17
web/package-lock.json
generated
@@ -75,6 +75,7 @@
|
||||
"semver": "^7.5.4",
|
||||
"sharp": "^0.33.5",
|
||||
"stripe": "^17.0.0",
|
||||
"swagger-ui-dist": "^5.19.0",
|
||||
"swr": "^2.1.5",
|
||||
"tailwind-merge": "^2.5.4",
|
||||
"tailwindcss": "^3.3.1",
|
||||
@@ -6180,6 +6181,13 @@
|
||||
"integrity": "sha512-qC/xYId4NMebE6w/V33Fh9gWxLgURiNYgVNObbJl2LZv0GUUItCcCqC5axQSwRaAgaxl2mELq1rMzlswaQ0Zxg==",
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/@scarf/scarf": {
|
||||
"version": "1.4.0",
|
||||
"resolved": "https://registry.npmjs.org/@scarf/scarf/-/scarf-1.4.0.tgz",
|
||||
"integrity": "sha512-xxeapPiUXdZAE3che6f3xogoJPeZgig6omHEy1rIY5WVsB3H2BHNnZH+gHG6x91SCWyQCzWGsuL2Hh3ClO5/qQ==",
|
||||
"hasInstallScript": true,
|
||||
"license": "Apache-2.0"
|
||||
},
|
||||
"node_modules/@segment/analytics-core": {
|
||||
"version": "1.4.1",
|
||||
"resolved": "https://registry.npmjs.org/@segment/analytics-core/-/analytics-core-1.4.1.tgz",
|
||||
@@ -20122,6 +20130,15 @@
|
||||
"url": "https://github.com/sponsors/ljharb"
|
||||
}
|
||||
},
|
||||
"node_modules/swagger-ui-dist": {
|
||||
"version": "5.19.0",
|
||||
"resolved": "https://registry.npmjs.org/swagger-ui-dist/-/swagger-ui-dist-5.19.0.tgz",
|
||||
"integrity": "sha512-bSVZeYaqanMFeW5ZY3+EejFbsjkjazYxm1I7Lz3xayYz5XU3m2aUzvuPC0jI95WCQdduszHYV3ER4buQoy8DXA==",
|
||||
"license": "Apache-2.0",
|
||||
"dependencies": {
|
||||
"@scarf/scarf": "=1.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/swr": {
|
||||
"version": "2.2.5",
|
||||
"resolved": "https://registry.npmjs.org/swr/-/swr-2.2.5.tgz",
|
||||
|
||||
@@ -78,6 +78,7 @@
|
||||
"semver": "^7.5.4",
|
||||
"sharp": "^0.33.5",
|
||||
"stripe": "^17.0.0",
|
||||
"swagger-ui-dist": "^5.19.0",
|
||||
"swr": "^2.1.5",
|
||||
"tailwind-merge": "^2.5.4",
|
||||
"tailwindcss": "^3.3.1",
|
||||
|
||||
@@ -240,11 +240,11 @@ export function SettingsForm() {
|
||||
/>
|
||||
|
||||
<Checkbox
|
||||
label="Pro Search Disabled"
|
||||
sublabel="If set, users will not be able to use Pro Search."
|
||||
checked={settings.pro_search_disabled ?? false}
|
||||
label="Agent Search"
|
||||
sublabel="If set, users will be able to use Agent Search."
|
||||
checked={settings.pro_search_enabled ?? true}
|
||||
onChange={(e) =>
|
||||
handleToggleSettingsField("pro_search_disabled", e.target.checked)
|
||||
handleToggleSettingsField("pro_search_enabled", e.target.checked)
|
||||
}
|
||||
/>
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ export interface Settings {
|
||||
notifications: Notification[];
|
||||
needs_reindexing: boolean;
|
||||
gpu_enabled: boolean;
|
||||
pro_search_disabled: boolean | null;
|
||||
pro_search_enabled: boolean | null;
|
||||
application_status: ApplicationStatus;
|
||||
auto_scroll: boolean;
|
||||
temperature_override_enabled: boolean;
|
||||
|
||||
@@ -61,20 +61,20 @@ export async function OPTIONS(
|
||||
}
|
||||
|
||||
async function handleRequest(request: NextRequest, path: string[]) {
|
||||
if (
|
||||
process.env.NODE_ENV !== "development" &&
|
||||
// NOTE: Set this environment variable to 'true' for preview environments
|
||||
// Where you want finer-grained control over API access
|
||||
process.env.OVERRIDE_API_PRODUCTION !== "true"
|
||||
) {
|
||||
return NextResponse.json(
|
||||
{
|
||||
message:
|
||||
"This API is only available in development mode. In production, something else (e.g. nginx) should handle this.",
|
||||
},
|
||||
{ status: 404 }
|
||||
);
|
||||
}
|
||||
// if (
|
||||
// process.env.NODE_ENV !== "development" &&
|
||||
// // NOTE: Set this environment variable to 'true' for preview environments
|
||||
// // Where you want finer-grained control over API access
|
||||
// process.env.OVERRIDE_API_PRODUCTION !== "true"
|
||||
// ) {
|
||||
// return NextResponse.json(
|
||||
// {
|
||||
// message:
|
||||
// "This API is only available in development mode. In production, something else (e.g. nginx) should handle this.",
|
||||
// },
|
||||
// { status: 404 }
|
||||
// );
|
||||
// }
|
||||
|
||||
try {
|
||||
const backendUrl = new URL(`${INTERNAL_URL}/${path.join("/")}`);
|
||||
@@ -87,14 +87,17 @@ async function handleRequest(request: NextRequest, path: string[]) {
|
||||
backendUrl.searchParams.append(key, value);
|
||||
});
|
||||
|
||||
const response = await fetch(backendUrl, {
|
||||
const fetchDetails = {
|
||||
method: request.method,
|
||||
headers: request.headers,
|
||||
body: request.body,
|
||||
signal: request.signal,
|
||||
// @ts-ignore
|
||||
duplex: "half",
|
||||
});
|
||||
};
|
||||
console.log(fetchDetails);
|
||||
console.log("BACKEND URL: ", backendUrl);
|
||||
const response = await fetch(backendUrl, fetchDetails);
|
||||
|
||||
// Check if the response is a stream
|
||||
if (
|
||||
|
||||
@@ -23,6 +23,7 @@ import {
|
||||
SubQuestionDetail,
|
||||
constructSubQuestions,
|
||||
DocumentsResponse,
|
||||
AgenticMessageResponseIDInfo,
|
||||
} from "./interfaces";
|
||||
|
||||
import Prism from "prismjs";
|
||||
@@ -1280,6 +1281,8 @@ export function ChatPage({
|
||||
let toolCall: ToolCallMetadata | null = null;
|
||||
let isImprovement: boolean | undefined = undefined;
|
||||
let isStreamingQuestions = true;
|
||||
let includeAgentic = false;
|
||||
let secondLevelMessageId: number | null = null;
|
||||
|
||||
let initialFetchDetails: null | {
|
||||
user_message_id: number;
|
||||
@@ -1336,7 +1339,7 @@ export function ChatPage({
|
||||
searchParams.get(SEARCH_PARAM_NAMES.SYSTEM_PROMPT) || undefined,
|
||||
useExistingUserMessage: isSeededChat,
|
||||
useLanggraph:
|
||||
!settings?.settings.pro_search_disabled &&
|
||||
settings?.settings.pro_search_enabled &&
|
||||
proSearchEnabled &&
|
||||
retrievalEnabled,
|
||||
});
|
||||
@@ -1417,6 +1420,17 @@ export function ChatPage({
|
||||
resetRegenerationState();
|
||||
} else {
|
||||
const { user_message_id, frozenMessageMap } = initialFetchDetails;
|
||||
if (Object.hasOwn(packet, "agentic_message_ids")) {
|
||||
const agenticMessageIds = (packet as AgenticMessageResponseIDInfo)
|
||||
.agentic_message_ids;
|
||||
const level1MessageId = agenticMessageIds.find(
|
||||
(item) => item.level === 1
|
||||
)?.message_id;
|
||||
if (level1MessageId) {
|
||||
secondLevelMessageId = level1MessageId;
|
||||
includeAgentic = true;
|
||||
}
|
||||
}
|
||||
|
||||
setChatState((prevState) => {
|
||||
if (prevState.get(chatSessionIdRef.current!) === "loading") {
|
||||
@@ -1568,7 +1582,10 @@ export function ChatPage({
|
||||
};
|
||||
}
|
||||
);
|
||||
} else if (Object.hasOwn(packet, "error")) {
|
||||
} else if (
|
||||
Object.hasOwn(packet, "error") &&
|
||||
(packet as any).error != null
|
||||
) {
|
||||
if (
|
||||
sub_questions.length > 0 &&
|
||||
sub_questions
|
||||
@@ -1580,8 +1597,8 @@ export function ChatPage({
|
||||
setAgenticGenerating(false);
|
||||
setAlternativeGeneratingAssistant(null);
|
||||
setSubmittedMessage("");
|
||||
return;
|
||||
// throw new Error((packet as StreamingError).error);
|
||||
|
||||
throw new Error((packet as StreamingError).error);
|
||||
} else {
|
||||
error = (packet as StreamingError).error;
|
||||
stackTrace = (packet as StreamingError).stack_trace;
|
||||
@@ -1664,6 +1681,19 @@ export function ChatPage({
|
||||
second_level_generating: second_level_generating,
|
||||
agentic_docs: agenticDocs,
|
||||
},
|
||||
...(includeAgentic
|
||||
? [
|
||||
{
|
||||
messageId: secondLevelMessageId!,
|
||||
message: second_level_answer,
|
||||
type: "assistant" as const,
|
||||
files: [],
|
||||
toolCall: null,
|
||||
parentMessageId:
|
||||
initialFetchDetails.assistant_message_id!,
|
||||
},
|
||||
]
|
||||
: []),
|
||||
]);
|
||||
}
|
||||
}
|
||||
@@ -2692,6 +2722,11 @@ export function ChatPage({
|
||||
? messageHistory[i + 1]?.documents
|
||||
: undefined;
|
||||
|
||||
const nextMessage =
|
||||
messageHistory[i + 1]?.type === "assistant"
|
||||
? messageHistory[i + 1]
|
||||
: undefined;
|
||||
|
||||
return (
|
||||
<div
|
||||
className="text-text"
|
||||
@@ -2720,7 +2755,10 @@ export function ChatPage({
|
||||
selectedMessageForDocDisplay ==
|
||||
secondLevelMessage?.messageId)
|
||||
}
|
||||
isImprovement={message.isImprovement}
|
||||
isImprovement={
|
||||
message.isImprovement ||
|
||||
nextMessage?.isImprovement
|
||||
}
|
||||
secondLevelGenerating={
|
||||
(message.second_level_generating &&
|
||||
currentSessionChatState !==
|
||||
|
||||
@@ -805,13 +805,12 @@ export function ChatInputBar({
|
||||
)}
|
||||
</div>
|
||||
<div className="flex items-center my-auto">
|
||||
{retrievalEnabled &&
|
||||
!settings?.settings.pro_search_disabled && (
|
||||
<AgenticToggle
|
||||
proSearchEnabled={proSearchEnabled}
|
||||
setProSearchEnabled={setProSearchEnabled}
|
||||
/>
|
||||
)}
|
||||
{retrievalEnabled && settings?.settings.pro_search_enabled && (
|
||||
<AgenticToggle
|
||||
proSearchEnabled={proSearchEnabled}
|
||||
setProSearchEnabled={setProSearchEnabled}
|
||||
/>
|
||||
)}
|
||||
<button
|
||||
id="onyx-chat-input-send-button"
|
||||
className={`cursor-pointer ${
|
||||
|
||||
@@ -155,6 +155,15 @@ export interface MessageResponseIDInfo {
|
||||
reserved_assistant_message_id: number;
|
||||
}
|
||||
|
||||
export interface AgentMessageIDInfo {
|
||||
level: number;
|
||||
message_id: number;
|
||||
}
|
||||
|
||||
export interface AgenticMessageResponseIDInfo {
|
||||
agentic_message_ids: AgentMessageIDInfo[];
|
||||
}
|
||||
|
||||
export interface DocumentsResponse {
|
||||
top_documents: OnyxDocument[];
|
||||
rephrased_query: string | null;
|
||||
|
||||
@@ -25,6 +25,7 @@ import {
|
||||
RetrievalType,
|
||||
StreamingError,
|
||||
ToolCallMetadata,
|
||||
AgenticMessageResponseIDInfo,
|
||||
} from "./interfaces";
|
||||
import { Persona } from "../admin/assistants/interfaces";
|
||||
import { ReadonlyURLSearchParams } from "next/navigation";
|
||||
@@ -154,7 +155,8 @@ export type PacketType =
|
||||
| AgentAnswerPiece
|
||||
| SubQuestionPiece
|
||||
| ExtendedToolResponse
|
||||
| RefinedAnswerImprovement;
|
||||
| RefinedAnswerImprovement
|
||||
| AgenticMessageResponseIDInfo;
|
||||
|
||||
export async function* sendMessage({
|
||||
regenerate,
|
||||
|
||||
@@ -21,11 +21,9 @@ import { fetchAssistantData } from "@/lib/chat/fetchAssistantdata";
|
||||
import { AppProvider } from "@/components/context/AppProvider";
|
||||
import { PHProvider } from "./providers";
|
||||
import { getCurrentUserSS } from "@/lib/userSS";
|
||||
import CardSection from "@/components/admin/CardSection";
|
||||
import { Suspense } from "react";
|
||||
import PostHogPageView from "./PostHogPageView";
|
||||
import Script from "next/script";
|
||||
import { LogoType } from "@/components/logo/Logo";
|
||||
import { Hanken_Grotesk } from "next/font/google";
|
||||
import { WebVitals } from "./web-vitals";
|
||||
import { ThemeProvider } from "next-themes";
|
||||
|
||||
@@ -51,7 +51,7 @@ export async function fetchSettingsSS(): Promise<CombinedSettings | null> {
|
||||
notifications: [],
|
||||
needs_reindexing: false,
|
||||
anonymous_user_enabled: false,
|
||||
pro_search_disabled: false,
|
||||
pro_search_enabled: true,
|
||||
temperature_override_enabled: true,
|
||||
};
|
||||
} else {
|
||||
@@ -95,8 +95,8 @@ export async function fetchSettingsSS(): Promise<CombinedSettings | null> {
|
||||
}
|
||||
}
|
||||
|
||||
if (enterpriseSettings && settings.pro_search_disabled == null) {
|
||||
settings.pro_search_disabled = true;
|
||||
if (settings.pro_search_enabled == null) {
|
||||
settings.pro_search_enabled = true;
|
||||
}
|
||||
|
||||
const webVersion = getWebVersion();
|
||||
|
||||
Reference in New Issue
Block a user