Compare commits

..

1 Commits

Author SHA1 Message Date
pablonyx
687122911d k 2025-03-05 15:27:14 -08:00
12 changed files with 390 additions and 286 deletions

View File

@@ -6,7 +6,8 @@ Create Date: 2025-02-26 13:07:56.217791
"""
from alembic import op
import time
from sqlalchemy import text
# revision identifiers, used by Alembic.
revision = "3bd4c84fe72f"
@@ -27,45 +28,357 @@ depends_on = None
# 4. Adds indexes to both chat_message and chat_session tables for comprehensive search
def upgrade() -> None:
# Create a GIN index for full-text search on chat_message.message
def upgrade():
# --- PART 1: chat_message table ---
# Step 1: Add nullable column (quick, minimal locking)
# op.execute("ALTER TABLE chat_message DROP COLUMN IF EXISTS message_tsv")
# op.execute("DROP TRIGGER IF EXISTS chat_message_tsv_trigger ON chat_message")
# op.execute("DROP FUNCTION IF EXISTS update_chat_message_tsv()")
# op.execute("ALTER TABLE chat_message DROP COLUMN IF EXISTS message_tsv")
# # Drop chat_session tsv trigger if it exists
# op.execute("DROP TRIGGER IF EXISTS chat_session_tsv_trigger ON chat_session")
# op.execute("DROP FUNCTION IF EXISTS update_chat_session_tsv()")
# op.execute("ALTER TABLE chat_session DROP COLUMN IF EXISTS title_tsv")
# raise Exception("Stop here")
time.time()
op.execute("ALTER TABLE chat_message ADD COLUMN IF NOT EXISTS message_tsv tsvector")
# Step 2: Create function and trigger for new/updated rows
op.execute(
"""
ALTER TABLE chat_message
ADD COLUMN message_tsv tsvector
GENERATED ALWAYS AS (to_tsvector('english', message)) STORED;
"""
CREATE OR REPLACE FUNCTION update_chat_message_tsv()
RETURNS TRIGGER AS $$
BEGIN
NEW.message_tsv = to_tsvector('english', NEW.message);
RETURN NEW;
END;
$$ LANGUAGE plpgsql
"""
)
# Commit the current transaction before creating concurrent indexes
op.execute("COMMIT")
# Create trigger in a separate execute call
op.execute(
"""
CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_chat_message_tsv
ON chat_message
USING GIN (message_tsv)
"""
CREATE TRIGGER chat_message_tsv_trigger
BEFORE INSERT OR UPDATE ON chat_message
FOR EACH ROW EXECUTE FUNCTION update_chat_message_tsv()
"""
)
# Also add a stored tsvector column for chat_session.description
op.execute(
"""
ALTER TABLE chat_session
ADD COLUMN description_tsv tsvector
GENERATED ALWAYS AS (to_tsvector('english', coalesce(description, ''))) STORED;
"""
# Step 3: Update existing rows in batches using Python
time.time()
# Get connection and count total rows
connection = op.get_bind()
total_count_result = connection.execute(
text("SELECT COUNT(*) FROM chat_message")
).scalar()
total_count = total_count_result if total_count_result is not None else 0
batch_size = 5000
batches = 0
# Calculate total batches needed
total_batches = (
(total_count + batch_size - 1) // batch_size if total_count > 0 else 0
)
# Commit again before creating the second concurrent index
op.execute("COMMIT")
# Process in batches - properly handling UUIDs by using OFFSET/LIMIT approach
for batch_num in range(total_batches):
offset = batch_num * batch_size
op.execute(
"""
CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_chat_session_desc_tsv
ON chat_session
USING GIN (description_tsv)
"""
# Execute update for this batch using OFFSET/LIMIT which works with UUIDs
connection.execute(
text(
"""
UPDATE chat_message
SET message_tsv = to_tsvector('english', message)
WHERE id IN (
SELECT id FROM chat_message
WHERE message_tsv IS NULL
ORDER BY id
LIMIT :batch_size OFFSET :offset
)
"""
).bindparams(batch_size=batch_size, offset=offset)
)
# Commit each batch
connection.execute(text("COMMIT"))
# Start a new transaction
connection.execute(text("BEGIN"))
batches += 1
# Final check for any remaining NULL values
connection.execute(
text(
"""
UPDATE chat_message SET message_tsv = to_tsvector('english', message)
WHERE message_tsv IS NULL
"""
)
)
# Create GIN index concurrently
connection.execute(text("COMMIT"))
time.time()
connection.execute(
text(
"""
CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_chat_message_tsv
ON chat_message USING GIN (message_tsv)
"""
)
)
# First drop the trigger as it won't be needed anymore
connection.execute(
text(
"""
DROP TRIGGER IF EXISTS chat_message_tsv_trigger ON chat_message;
"""
)
)
connection.execute(
text(
"""
DROP FUNCTION IF EXISTS update_chat_message_tsv();
"""
)
)
# Add new generated column
time.time()
connection.execute(
text(
"""
ALTER TABLE chat_message
ADD COLUMN message_tsv_gen tsvector
GENERATED ALWAYS AS (to_tsvector('english', message)) STORED;
"""
)
)
connection.execute(text("COMMIT"))
time.time()
connection.execute(
text(
"""
CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_chat_message_tsv_gen
ON chat_message USING GIN (message_tsv_gen)
"""
)
)
# Drop old index and column
connection.execute(text("COMMIT"))
connection.execute(
text(
"""
DROP INDEX CONCURRENTLY IF EXISTS idx_chat_message_tsv;
"""
)
)
connection.execute(text("COMMIT"))
connection.execute(
text(
"""
ALTER TABLE chat_message DROP COLUMN message_tsv;
"""
)
)
# Rename new column to old name
connection.execute(
text(
"""
ALTER TABLE chat_message RENAME COLUMN message_tsv_gen TO message_tsv;
"""
)
)
# --- PART 2: chat_session table ---
# Step 1: Add nullable column (quick, minimal locking)
time.time()
connection.execute(
text(
"ALTER TABLE chat_session ADD COLUMN IF NOT EXISTS description_tsv tsvector"
)
)
# Step 2: Create function and trigger for new/updated rows - SPLIT INTO SEPARATE CALLS
connection.execute(
text(
"""
CREATE OR REPLACE FUNCTION update_chat_session_tsv()
RETURNS TRIGGER AS $$
BEGIN
NEW.description_tsv = to_tsvector('english', COALESCE(NEW.description, ''));
RETURN NEW;
END;
$$ LANGUAGE plpgsql
"""
)
)
# Create trigger in a separate execute call
connection.execute(
text(
"""
CREATE TRIGGER chat_session_tsv_trigger
BEFORE INSERT OR UPDATE ON chat_session
FOR EACH ROW EXECUTE FUNCTION update_chat_session_tsv()
"""
)
)
# Step 3: Update existing rows in batches using Python
time.time()
# Get the maximum ID to determine batch count
# Cast id to text for MAX function since it's a UUID
max_id_result = connection.execute(
text("SELECT COALESCE(MAX(id::text), '0') FROM chat_session")
).scalar()
max_id_result if max_id_result is not None else "0"
batch_size = 5000
batches = 0
# Get all IDs ordered to process in batches
rows = connection.execute(
text("SELECT id FROM chat_session ORDER BY id")
).fetchall()
total_rows = len(rows)
# Process in batches
for batch_num, batch_start in enumerate(range(0, total_rows, batch_size)):
batch_end = min(batch_start + batch_size, total_rows)
batch_ids = [row[0] for row in rows[batch_start:batch_end]]
if not batch_ids:
continue
# Use IN clause instead of BETWEEN for UUIDs
placeholders = ", ".join([f":id{i}" for i in range(len(batch_ids))])
params = {f"id{i}": id_val for i, id_val in enumerate(batch_ids)}
# Execute update for this batch
connection.execute(
text(
f"""
UPDATE chat_session
SET description_tsv = to_tsvector('english', COALESCE(description, ''))
WHERE id IN ({placeholders})
AND description_tsv IS NULL
"""
).bindparams(**params)
)
# Commit each batch
connection.execute(text("COMMIT"))
# Start a new transaction
connection.execute(text("BEGIN"))
batches += 1
# Final check for any remaining NULL values
connection.execute(
text(
"""
UPDATE chat_session SET description_tsv = to_tsvector('english', COALESCE(description, ''))
WHERE description_tsv IS NULL
"""
)
)
# Create GIN index concurrently
connection.execute(text("COMMIT"))
time.time()
connection.execute(
text(
"""
CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_chat_session_desc_tsv
ON chat_session USING GIN (description_tsv)
"""
)
)
# After Final check for chat_session
# First drop the trigger as it won't be needed anymore
connection.execute(
text(
"""
DROP TRIGGER IF EXISTS chat_session_tsv_trigger ON chat_session;
"""
)
)
connection.execute(
text(
"""
DROP FUNCTION IF EXISTS update_chat_session_tsv();
"""
)
)
# Add new generated column
time.time()
connection.execute(
text(
"""
ALTER TABLE chat_session
ADD COLUMN description_tsv_gen tsvector
GENERATED ALWAYS AS (to_tsvector('english', COALESCE(description, ''))) STORED;
"""
)
)
# Create new index on generated column
connection.execute(text("COMMIT"))
time.time()
connection.execute(
text(
"""
CREATE INDEX CONCURRENTLY IF NOT EXISTS idx_chat_session_desc_tsv_gen
ON chat_session USING GIN (description_tsv_gen)
"""
)
)
# Drop old index and column
connection.execute(text("COMMIT"))
connection.execute(
text(
"""
DROP INDEX CONCURRENTLY IF EXISTS idx_chat_session_desc_tsv;
"""
)
)
connection.execute(text("COMMIT"))
connection.execute(
text(
"""
ALTER TABLE chat_session DROP COLUMN description_tsv;
"""
)
)
# Rename new column to old name
connection.execute(
text(
"""
ALTER TABLE chat_session RENAME COLUMN description_tsv_gen TO description_tsv;
"""
)
)

View File

@@ -54,26 +54,20 @@ logger = logging.getLogger(__name__)
async def get_or_provision_tenant(
email: str, referral_source: str | None = None, request: Request | None = None
) -> tuple[str, bool]:
"""Get existing tenant ID for an email or create a new tenant if none exists.
Returns:
tuple: (tenant_id, is_newly_created) - The tenant ID and a boolean indicating if it was newly created
"""
) -> str:
"""Get existing tenant ID for an email or create a new tenant if none exists."""
if not MULTI_TENANT:
return POSTGRES_DEFAULT_SCHEMA, False
return POSTGRES_DEFAULT_SCHEMA
if referral_source and request:
await submit_to_hubspot(email, referral_source, request)
is_newly_created = False
try:
tenant_id = get_tenant_id_for_email(email)
except exceptions.UserNotExists:
# If tenant does not exist and in Multi tenant mode, provision a new tenant
try:
tenant_id = await create_tenant(email, referral_source)
is_newly_created = True
except Exception as e:
logger.error(f"Tenant provisioning failed: {e}")
raise HTTPException(status_code=500, detail="Failed to provision tenant.")
@@ -83,7 +77,7 @@ async def get_or_provision_tenant(
status_code=401, detail="User does not belong to an organization"
)
return tenant_id, is_newly_created
return tenant_id
async def create_tenant(email: str, referral_source: str | None = None) -> str:
@@ -121,12 +115,36 @@ async def provision_tenant(tenant_id: str, email: str) -> None:
token = CURRENT_TENANT_ID_CONTEXTVAR.set(tenant_id)
# Await the Alembic migrations up to the specified revision
await asyncio.to_thread(run_alembic_migrations, tenant_id, "465f78d9b7f9")
# Await the Alembic migrations
await asyncio.to_thread(run_alembic_migrations, tenant_id)
with get_session_with_tenant(tenant_id=tenant_id) as db_session:
configure_default_api_keys(db_session)
current_search_settings = (
db_session.query(SearchSettings)
.filter_by(status=IndexModelStatus.FUTURE)
.first()
)
cohere_enabled = (
current_search_settings is not None
and current_search_settings.provider_type == EmbeddingProvider.COHERE
)
setup_onyx(db_session, tenant_id, cohere_enabled=cohere_enabled)
# Add users to tenant - this is needed for authentication
add_users_to_tenant([email], tenant_id)
with get_session_with_tenant(tenant_id=tenant_id) as db_session:
create_milestone_and_report(
user=None,
distinct_id=tenant_id,
event_type=MilestoneRecordType.TENANT_CREATED,
properties={
"email": email,
},
db_session=db_session,
)
except Exception as e:
logger.exception(f"Failed to create tenant {tenant_id}")
raise HTTPException(
@@ -331,62 +349,3 @@ async def delete_user_from_control_plane(tenant_id: str, email: str) -> None:
raise Exception(
f"Failed to delete tenant on control plane: {error_text}"
)
async def complete_tenant_setup(tenant_id: str, email: str) -> None:
"""Complete the tenant setup process after user creation.
This function handles the remaining steps of tenant provisioning after the initial
schema creation and user authentication:
1. Completes the remaining Alembic migrations
2. Configures default API keys
3. Sets up Onyx
4. Creates milestone record
"""
if not MULTI_TENANT:
raise HTTPException(status_code=403, detail="Multi-tenancy is not enabled")
logger.debug(f"Completing setup for tenant {tenant_id}")
token = None
try:
token = CURRENT_TENANT_ID_CONTEXTVAR.set(tenant_id)
# Complete the remaining Alembic migrations
await asyncio.to_thread(run_alembic_migrations, tenant_id)
with get_session_with_tenant(tenant_id=tenant_id) as db_session:
configure_default_api_keys(db_session)
current_search_settings = (
db_session.query(SearchSettings)
.filter_by(status=IndexModelStatus.FUTURE)
.first()
)
cohere_enabled = (
current_search_settings is not None
and current_search_settings.provider_type == EmbeddingProvider.COHERE
)
setup_onyx(db_session, tenant_id, cohere_enabled=cohere_enabled)
with get_session_with_tenant(tenant_id=tenant_id) as db_session:
create_milestone_and_report(
user=None,
distinct_id=tenant_id,
event_type=MilestoneRecordType.TENANT_CREATED,
properties={
"email": email,
},
db_session=db_session,
)
logger.info(f"Tenant setup completed for {tenant_id}")
except Exception as e:
logger.exception(f"Failed to complete tenant setup for {tenant_id}")
raise HTTPException(
status_code=500, detail=f"Failed to complete tenant setup: {str(e)}"
)
finally:
if token is not None:
CURRENT_TENANT_ID_CONTEXTVAR.reset(token)

View File

@@ -1,48 +0,0 @@
import logging
from fastapi import APIRouter
from fastapi import Depends
from fastapi import HTTPException
from pydantic import BaseModel
from ee.onyx.server.tenants.provisioning import complete_tenant_setup
from ee.onyx.server.tenants.user_mapping import get_tenant_id_for_email
from onyx.auth.users import current_user
from onyx.auth.users import exceptions
from onyx.db.models import User
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/tenants", tags=["tenants"])
class CompleteTenantSetupRequest(BaseModel):
email: str
@router.post("/complete-setup")
async def api_complete_tenant_setup(
request: CompleteTenantSetupRequest,
user: User = Depends(current_user),
) -> dict:
"""Complete the tenant setup process for a user.
This endpoint is called from the frontend after user creation to complete
the tenant setup process (migrations, seeding, etc.).
"""
if not user.is_admin and user.email != request.email:
raise HTTPException(
status_code=403, detail="You can only complete setup for your own tenant"
)
try:
tenant_id = get_tenant_id_for_email(request.email)
except exceptions.UserNotExists:
raise HTTPException(status_code=404, detail="User or tenant not found")
try:
await complete_tenant_setup(tenant_id, request.email)
return {"status": "success"}
except Exception as e:
logger.error(f"Failed to complete tenant setup: {e}")
raise HTTPException(status_code=500, detail="Failed to complete tenant setup")

View File

@@ -14,10 +14,8 @@ from onyx.db.engine import get_sqlalchemy_engine
logger = logging.getLogger(__name__)
def run_alembic_migrations(schema_name: str, target_revision: str = "head") -> None:
logger.info(
f"Starting Alembic migrations for schema: {schema_name} to target: {target_revision}"
)
def run_alembic_migrations(schema_name: str) -> None:
logger.info(f"Starting Alembic migrations for schema: {schema_name}")
try:
current_dir = os.path.dirname(os.path.abspath(__file__))
@@ -39,10 +37,11 @@ def run_alembic_migrations(schema_name: str, target_revision: str = "head") -> N
alembic_cfg.cmd_opts.x = [f"schema={schema_name}"] # type: ignore
# Run migrations programmatically
command.upgrade(alembic_cfg, target_revision)
command.upgrade(alembic_cfg, "head")
# Run migrations programmatically
logger.info(
f"Alembic migrations completed successfully for schema: {schema_name} to target: {target_revision}"
f"Alembic migrations completed successfully for schema: {schema_name}"
)
except Exception as e:

View File

@@ -90,7 +90,6 @@ from onyx.db.engine import get_async_session
from onyx.db.engine import get_async_session_with_tenant
from onyx.db.engine import get_session_with_tenant
from onyx.db.models import AccessToken
from onyx.db.models import MinimalUser
from onyx.db.models import OAuthAccount
from onyx.db.models import User
from onyx.db.users import get_user_by_email
@@ -187,7 +186,6 @@ def anonymous_user_enabled(*, tenant_id: str | None = None) -> bool:
def verify_email_is_invited(email: str) -> None:
return None
whitelist = get_invited_users()
if not whitelist:
return
@@ -217,7 +215,6 @@ def verify_email_is_invited(email: str) -> None:
def verify_email_in_whitelist(email: str, tenant_id: str) -> None:
return None
with get_session_with_tenant(tenant_id=tenant_id) as db_session:
if not get_user_by_email(email, db_session):
verify_email_is_invited(email)
@@ -238,13 +235,6 @@ def verify_email_domain(email: str) -> None:
)
class SimpleUserManager(UUIDIDMixin, BaseUserManager[MinimalUser, uuid.UUID]):
reset_password_token_secret = USER_AUTH_SECRET
verification_token_secret = USER_AUTH_SECRET
verification_token_lifetime_seconds = AUTH_COOKIE_EXPIRE_TIME_SECONDS
user_db: SQLAlchemyUserDatabase[MinimalUser, uuid.UUID]
class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
reset_password_token_secret = USER_AUTH_SECRET
verification_token_secret = USER_AUTH_SECRET
@@ -257,8 +247,8 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
)(user_email)
async with get_async_session_with_tenant(tenant_id) as db_session:
if MULTI_TENANT:
tenant_user_db = SQLAlchemyUserAdminDB[MinimalUser, uuid.UUID](
db_session, MinimalUser, OAuthAccount
tenant_user_db = SQLAlchemyUserAdminDB[User, uuid.UUID](
db_session, User, OAuthAccount
)
user = await tenant_user_db.get_by_email(user_email)
else:
@@ -287,7 +277,7 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
else None
)
tenant_id, is_newly_created = await fetch_ee_implementation_or_noop(
tenant_id = await fetch_ee_implementation_or_noop(
"onyx.server.tenants.provisioning",
"get_or_provision_tenant",
async_return_default_schema,
@@ -319,12 +309,7 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
else:
user_create.role = UserRole.BASIC
try:
simple_tennat_user_db = SQLAlchemyUserAdminDB[MinimalUser, uuid.UUID](
db_session, MinimalUser, OAuthAccount
)
user = await SimpleUserManager(simple_tennat_user_db).create(
user_create, safe=safe, request=request
) # type: ignore
user = await super().create(user_create, safe=safe, request=request) # type: ignore
except exceptions.UserAlreadyExists:
user = await self.get_by_email(user_create.email)
# Handle case where user has used product outside of web and is now creating an account through web
@@ -389,7 +374,7 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
getattr(request.state, "referral_source", None) if request else None
)
tenant_id, is_newly_created = await fetch_ee_implementation_or_noop(
tenant_id = await fetch_ee_implementation_or_noop(
"onyx.server.tenants.provisioning",
"get_or_provision_tenant",
async_return_default_schema,
@@ -526,7 +511,7 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
async def on_after_register(
self, user: User, request: Optional[Request] = None
) -> None:
tenant_id, is_newly_created = await fetch_ee_implementation_or_noop(
tenant_id = await fetch_ee_implementation_or_noop(
"onyx.server.tenants.provisioning",
"get_or_provision_tenant",
async_return_default_schema,
@@ -578,7 +563,7 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
status.HTTP_500_INTERNAL_SERVER_ERROR,
"Your admin has not enabled this feature.",
)
tenant_id, is_newly_created = await fetch_ee_implementation_or_noop(
tenant_id = await fetch_ee_implementation_or_noop(
"onyx.server.tenants.provisioning",
"get_or_provision_tenant",
async_return_default_schema,
@@ -602,8 +587,8 @@ class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]):
) -> Optional[User]:
email = credentials.username
# Get tenant_id from mapping
tenant_id, is_newly_created = await fetch_ee_implementation_or_noop(
# Get tenant_id from mapping table
tenant_id = await fetch_ee_implementation_or_noop(
"onyx.server.tenants.provisioning",
"get_or_provision_tenant",
async_return_default_schema,
@@ -724,7 +709,7 @@ class TenantAwareRedisStrategy(RedisStrategy[User, uuid.UUID]):
async def write_token(self, user: User) -> str:
redis = await get_async_redis_connection()
tenant_id, is_newly_created = await fetch_ee_implementation_or_noop(
tenant_id = await fetch_ee_implementation_or_noop(
"onyx.server.tenants.provisioning",
"get_or_provision_tenant",
async_return_default_schema,

View File

@@ -180,7 +180,6 @@ SCHEMA_NAME_REGEX = re.compile(r"^[a-zA-Z0-9_-]+$")
def is_valid_schema_name(name: str) -> bool:
print(f"Checking if {name} is valid")
return SCHEMA_NAME_REGEX.match(name) is not None

View File

@@ -2318,16 +2318,3 @@ class TenantAnonymousUserPath(Base):
anonymous_user_path: Mapped[str] = mapped_column(
String, nullable=False, unique=True
)
class AdditionalBase(DeclarativeBase):
__abstract__ = True
class MinimalUser(SQLAlchemyBaseUserTableUUID, AdditionalBase):
# oauth_accounts: Mapped[list[OAuthAccount]] = relationship(
# "OAuthAccount", lazy="joined", cascade="all, delete-orphan"
# )
role: Mapped[UserRole] = mapped_column(
Enum(UserRole, native_enum=False, default=UserRole.BASIC)
)

View File

@@ -43,7 +43,6 @@ def _get_or_generate_customer_id_mt(tenant_id: str) -> str:
def get_or_generate_uuid() -> str:
return "hi"
# TODO: split out the whole "instance UUID" generation logic into a separate
# utility function. Telemetry should not be aware at all of how the UUID is
# generated/stored.

View File

@@ -8,6 +8,8 @@ import {
} from "@/lib/userSS";
import { redirect } from "next/navigation";
import { EmailPasswordForm } from "../login/EmailPasswordForm";
import Text from "@/components/ui/text";
import Link from "next/link";
import { SignInButton } from "../login/SignInButton";
import AuthFlowContainer from "@/components/auth/AuthFlowContainer";
import ReferralSourceSelector from "./ReferralSourceSelector";

View File

@@ -1,72 +0,0 @@
import {
AuthTypeMetadata,
getAuthTypeMetadataSS,
getCurrentUserSS,
} from "@/lib/userSS";
import { redirect } from "next/navigation";
import { HealthCheckBanner } from "@/components/health/healthcheck";
import { User } from "@/lib/types";
import Text from "@/components/ui/text";
import { Logo } from "@/components/logo/Logo";
import { completeTenantSetup } from "@/lib/tenant";
export default async function Page() {
// catch cases where the backend is completely unreachable here
// without try / catch, will just raise an exception and the page
// will not render
let authTypeMetadata: AuthTypeMetadata | null = null;
let currentUser: User | null = null;
try {
[authTypeMetadata, currentUser] = await Promise.all([
getAuthTypeMetadataSS(),
getCurrentUserSS(),
]);
} catch (e) {
console.log(`Some fetch failed for the waiting-on-setup page - ${e}`);
}
if (!currentUser) {
if (authTypeMetadata?.authType === "disabled") {
return redirect("/chat");
}
return redirect("/auth/login");
}
// If the user is already verified, redirect to chat
if (!authTypeMetadata?.requiresVerification || currentUser.is_verified) {
// Trigger the tenant setup completion in the background
if (currentUser.email) {
try {
await completeTenantSetup(currentUser.email);
} catch (e) {
console.error("Failed to complete tenant setup:", e);
}
}
return redirect("/chat");
}
return (
<main>
<div className="absolute top-10x w-full">
<HealthCheckBanner />
</div>
<div className="min-h-screen flex items-center justify-center py-12 px-4 sm:px-6 lg:px-8">
<div>
<Logo height={64} width={64} className="mx-auto w-fit" />
<div className="flex">
<Text className="text-center font-medium text-lg mt-6 w-108">
Hey <i>{currentUser.email}</i> - we're setting up your account.
<br />
This may take a few moments. You'll be redirected automatically
when it's ready.
<br />
<br />
If you're not redirected within a minute, please refresh the page.
</Text>
</div>
</div>
</div>
</main>
);
}

View File

@@ -1,19 +0,0 @@
/**
* Completes the tenant setup process for a user
* @param email The email of the user
* @returns A promise that resolves when the setup is complete
*/
export async function completeTenantSetup(email: string): Promise<void> {
const response = await fetch(`/api/tenants/complete-setup`, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ email }),
});
if (!response.ok) {
const errorText = await response.text();
throw new Error(`Failed to complete tenant setup: ${errorText}`);
}
}