Compare commits

...

1 Commits

Author SHA1 Message Date
Jamison Lahman
6f8b6024ea fix(platform): better default API Base URLs when running in docker 2026-04-09 22:27:27 +00:00
11 changed files with 52 additions and 15 deletions

View File

@@ -12,7 +12,7 @@ founders@onyx.app for more information. Please visit https://github.com/onyx-dot
ARG ENABLE_CRAFT=false
# DO_NOT_TRACK is used to disable telemetry for Unstructured
ENV DANSWER_RUNNING_IN_DOCKER="true" \
ENV ONYX_RUNNING_IN_DOCKER="true" \
DO_NOT_TRACK="true" \
PLAYWRIGHT_BROWSERS_PATH="/app/.cache/ms-playwright"

View File

@@ -1,7 +1,7 @@
# Base stage with dependencies
FROM python:3.11.7-slim-bookworm AS base
ENV DANSWER_RUNNING_IN_DOCKER="true" \
ENV ONYX_RUNNING_IN_DOCKER="true" \
HF_HOME=/app/.cache/huggingface
COPY --from=ghcr.io/astral-sh/uv:0.9.9 /uv /uvx /bin/

View File

@@ -5,7 +5,7 @@ from logging.handlers import RotatingFileHandler
import psutil
from onyx.utils.logger import is_running_in_container
from onyx.utils.platform import is_running_in_container
from onyx.utils.logger import setup_logger
# Regular application logger

View File

@@ -42,7 +42,7 @@ from onyx.db.models import UserGroup
from onyx.db.search_settings import get_active_search_settings_list
from onyx.redis.redis_pool import get_redis_client
from onyx.redis.redis_pool import redis_lock_dump
from onyx.utils.logger import is_running_in_container
from onyx.utils.platform import is_running_in_container
from onyx.utils.telemetry import optional_telemetry
from onyx.utils.telemetry import RecordType
from shared_configs.configs import MULTI_TENANT

View File

@@ -6,6 +6,7 @@ from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy.orm import Session
from onyx import __version__ as onyx_version
from onyx.utils.platform import is_running_in_container
from onyx.auth.permissions import require_permission
from onyx.auth.users import is_user_admin
from onyx.configs.app_configs import DEFAULT_USER_FILE_MAX_UPLOAD_SIZE_MB
@@ -111,6 +112,7 @@ def fetch_settings(
if DISABLE_VECTOR_DB
else DEFAULT_FILE_TOKEN_COUNT_THRESHOLD_K_VECTOR_DB
),
is_containerized=is_running_in_container(),
)

View File

@@ -131,3 +131,7 @@ class UserSettings(Settings):
else DEFAULT_FILE_TOKEN_COUNT_THRESHOLD_K_VECTOR_DB
)
)
# True when the backend is running inside a container (Docker/Podman).
# The frontend uses this to default local-service URLs (e.g. Ollama,
# LM Studio) to host.docker.internal instead of localhost.
is_containerized: bool = False

View File

@@ -169,11 +169,7 @@ def get_standard_formatter() -> ColoredFormatter:
)
DANSWER_DOCKER_ENV_STR = "DANSWER_RUNNING_IN_DOCKER"
def is_running_in_container() -> bool:
return os.getenv(DANSWER_DOCKER_ENV_STR) == "true"
from onyx.utils.platform import is_running_in_container # noqa: F401
def setup_logger(

View File

@@ -0,0 +1,25 @@
import logging
import os
logger = logging.getLogger(__name__)
_ONYX_DOCKER_ENV_STR = "ONYX_RUNNING_IN_DOCKER"
_DANSWER_DOCKER_ENV_STR = "DANSWER_RUNNING_IN_DOCKER"
def is_running_in_container() -> bool:
onyx_val = os.getenv(_ONYX_DOCKER_ENV_STR)
if onyx_val is not None:
return onyx_val == "true"
danswer_val = os.getenv(_DANSWER_DOCKER_ENV_STR)
if danswer_val is not None:
logger.warning(
"%s is deprecated and will be ignored in a future release. "
"Use %s instead.",
_DANSWER_DOCKER_ENV_STR,
_ONYX_DOCKER_ENV_STR,
)
return danswer_val == "true"
return False

View File

@@ -75,6 +75,10 @@ export interface Settings {
// Factory defaults for the restore button.
default_user_file_max_upload_size_mb?: number;
default_file_token_count_threshold_k?: number;
// True when the backend runs inside a container (Docker/Podman).
// Used to default local-service URLs to host.docker.internal.
is_containerized?: boolean;
}
export enum NotificationType {

View File

@@ -26,8 +26,7 @@ import {
import { fetchModels } from "@/lib/llmConfig/svc";
import { toast } from "@/hooks/useToast";
import { refreshLlmProviderCaches } from "@/lib/llmConfig/cache";
const DEFAULT_API_BASE = "http://localhost:1234";
import { useSettingsContext } from "@/providers/SettingsProvider";
interface LMStudioModalValues extends BaseLLMModalValues {
api_base: string;
@@ -109,6 +108,10 @@ export default function LMStudioModal({
}: LLMProviderFormProps) {
const isOnboarding = variant === "onboarding";
const { mutate } = useSWRConfig();
const { settings } = useSettingsContext();
const defaultApiBase = settings.is_containerized
? "http://host.docker.internal:1234"
: "http://localhost:1234";
const onClose = () => onOpenChange?.(false);
@@ -118,7 +121,7 @@ export default function LMStudioModal({
LLMProviderName.LM_STUDIO,
existingLlmProvider
),
api_base: existingLlmProvider?.api_base ?? DEFAULT_API_BASE,
api_base: existingLlmProvider?.api_base ?? defaultApiBase,
custom_config: {
LM_STUDIO_API_KEY: existingLlmProvider?.custom_config?.LM_STUDIO_API_KEY,
},

View File

@@ -30,8 +30,7 @@ import { Card } from "@opal/components";
import { toast } from "@/hooks/useToast";
import { refreshLlmProviderCaches } from "@/lib/llmConfig/cache";
import InputTypeInField from "@/refresh-components/form/InputTypeInField";
const DEFAULT_API_BASE = "http://127.0.0.1:11434";
import { useSettingsContext } from "@/providers/SettingsProvider";
const CLOUD_API_BASE = "https://ollama.com";
enum Tab {
@@ -156,6 +155,10 @@ export default function OllamaModal({
}: LLMProviderFormProps) {
const isOnboarding = variant === "onboarding";
const { mutate } = useSWRConfig();
const { settings } = useSettingsContext();
const defaultApiBase = settings.is_containerized
? "http://host.docker.internal:11434"
: "http://127.0.0.1:11434";
const apiKey = existingLlmProvider?.custom_config?.OLLAMA_API_KEY;
const defaultTab =
existingLlmProvider && !!apiKey ? Tab.TAB_CLOUD : Tab.TAB_SELF_HOSTED;
@@ -169,7 +172,7 @@ export default function OllamaModal({
LLMProviderName.OLLAMA_CHAT,
existingLlmProvider
),
api_base: existingLlmProvider?.api_base ?? DEFAULT_API_BASE,
api_base: existingLlmProvider?.api_base ?? defaultApiBase,
custom_config: {
OLLAMA_API_KEY: apiKey,
},