Compare commits

...

1 Commits

Author SHA1 Message Date
Raunak Bhagat
36a9f8d986 make default model global instead of per-provider
Remove the "Default" tag from provider cards and the sort that puts
the default provider first. Replace per-provider default_model_name
tracking in ModelsField with a global default comparison. For existing
providers, "Set as default" fires the API immediately with a toast.
For new providers, the choice is deferred until after form submission.
2026-04-06 18:07:06 -07:00
17 changed files with 407 additions and 138 deletions

View File

@@ -124,8 +124,8 @@ export interface LLMProviderFormProps {
shouldMarkAsDefault?: boolean;
onOpenChange?: (open: boolean) => void;
/** The current default model name for this provider (from the global default). */
defaultModelName?: string;
/** The current global default model (provider_id + model_name). */
globalDefault?: DefaultModel | null;
// Onboarding-specific (only when variant === "onboarding")
onboardingState?: OnboardingState;

View File

@@ -31,6 +31,7 @@ import ConfirmationModalLayout from "@/refresh-components/layouts/ConfirmationMo
import { useCreateModal } from "@/refresh-components/contexts/ModalContext";
import Separator from "@/refresh-components/Separator";
import {
DefaultModel,
LLMProviderView,
WellKnownLLMProviderDescriptor,
} from "@/interfaces/llm";
@@ -114,16 +115,14 @@ const PROVIDER_MODAL_MAP: Record<
interface ExistingProviderCardProps {
provider: LLMProviderView;
isDefault: boolean;
isLastProvider: boolean;
defaultModelName?: string;
globalDefault?: DefaultModel | null;
}
function ExistingProviderCard({
provider,
isDefault,
isLastProvider,
defaultModelName,
globalDefault,
}: ExistingProviderCardProps) {
const { mutate } = useSWRConfig();
const [isOpen, setIsOpen] = useState(false);
@@ -182,7 +181,6 @@ function ExistingProviderCard({
description={getProviderDisplayName(provider.provider)}
sizePreset="main-ui"
variant="section"
tag={isDefault ? { title: "Default", color: "blue" } : undefined}
rightChildren={
<div className="flex flex-row">
<Hoverable.Item
@@ -212,7 +210,7 @@ function ExistingProviderCard({
}
/>
{isOpen &&
getModalForExistingProvider(provider, setIsOpen, defaultModelName)}
getModalForExistingProvider(provider, setIsOpen, globalDefault)}
</SelectCard>
</Hoverable.Root>
</>
@@ -336,15 +334,6 @@ export default function LLMConfigurationPage() {
const hasProviders = existingLlmProviders.length > 0;
const isFirstProvider = !hasProviders;
// Pre-sort providers so the default appears first
const sortedProviders = [...existingLlmProviders].sort((a, b) => {
const aIsDefault = defaultText?.provider_id === a.id;
const bIsDefault = defaultText?.provider_id === b.id;
if (aIsDefault && !bIsDefault) return -1;
if (!aIsDefault && bIsDefault) return 1;
return 0;
});
// Pre-filter to providers that have at least one visible model
const providersWithVisibleModels = existingLlmProviders
.map((provider) => ({
@@ -438,17 +427,12 @@ export default function LLMConfigurationPage() {
/>
<div className="flex flex-col gap-2">
{sortedProviders.map((provider) => (
{existingLlmProviders.map((provider) => (
<ExistingProviderCard
key={provider.id}
provider={provider}
isDefault={defaultText?.provider_id === provider.id}
isLastProvider={sortedProviders.length === 1}
defaultModelName={
defaultText?.provider_id === provider.id
? defaultText.model_name
: undefined
}
isLastProvider={existingLlmProviders.length === 1}
globalDefault={defaultText}
/>
))}
</div>

View File

@@ -25,6 +25,9 @@ import {
SingleDefaultModelField,
LLMConfigurationModalWrapper,
} from "@/sections/modals/llmConfig/shared";
import { setDefaultLlmModel } from "@/lib/llmConfig/svc";
import { refreshLlmProviderCaches } from "@/lib/llmConfig/cache";
import { toast } from "@/hooks/useToast";
const ANTHROPIC_PROVIDER_NAME = "anthropic";
const DEFAULT_DEFAULT_MODEL_NAME = "claude-sonnet-4-5";
@@ -34,13 +37,14 @@ export default function AnthropicModal({
existingLlmProvider,
shouldMarkAsDefault,
onOpenChange,
defaultModelName,
globalDefault,
onboardingState,
onboardingActions,
llmDescriptor,
}: LLMProviderFormProps) {
const isOnboarding = variant === "onboarding";
const [isTesting, setIsTesting] = useState(false);
const [pendingDefault, setPendingDefault] = useState<string | null>(null);
const { mutate } = useSWRConfig();
const { wellKnownLLMProvider } = useWellKnownLLMProvider(
ANTHROPIC_PROVIDER_NAME
@@ -62,18 +66,11 @@ export default function AnthropicModal({
default_model_name: DEFAULT_DEFAULT_MODEL_NAME,
}
: {
...buildDefaultInitialValues(
existingLlmProvider,
modelConfigurations,
defaultModelName
),
...buildDefaultInitialValues(existingLlmProvider, modelConfigurations),
api_key: existingLlmProvider?.api_key ?? "",
api_base: existingLlmProvider?.api_base ?? undefined,
default_model_name:
(defaultModelName &&
modelConfigurations.some((m) => m.name === defaultModelName)
? defaultModelName
: undefined) ??
existingLlmProvider?.model_configurations?.[0]?.name ??
wellKnownLLMProvider?.recommended_default_model?.name ??
DEFAULT_DEFAULT_MODEL_NAME,
is_auto_mode: existingLlmProvider?.is_auto_mode ?? true,
@@ -119,7 +116,9 @@ export default function AnthropicModal({
initialValues,
modelConfigurations,
existingLlmProvider,
shouldMarkAsDefault,
pendingDefaultModelName:
pendingDefault ??
(shouldMarkAsDefault ? values.default_model_name : undefined),
setIsTesting,
mutate,
onClose,
@@ -158,6 +157,26 @@ export default function AnthropicModal({
wellKnownLLMProvider?.recommended_default_model ?? null
}
shouldShowAutoUpdateToggle={true}
globalDefault={globalDefault}
providerId={existingLlmProvider?.id}
onSetGlobalDefault={
existingLlmProvider
? async (modelName) => {
try {
await setDefaultLlmModel(
existingLlmProvider.id,
modelName
);
await refreshLlmProviderCaches(mutate);
toast.success("Default model updated successfully!");
} catch (e) {
const msg =
e instanceof Error ? e.message : "Unknown error";
toast.error(`Failed to set default model: ${msg}`);
}
}
: (modelName) => setPendingDefault(modelName)
}
/>
)}

View File

@@ -38,6 +38,8 @@ import {
parseAzureTargetUri,
} from "@/lib/azureTargetUri";
import { toast } from "@/hooks/useToast";
import { setDefaultLlmModel } from "@/lib/llmConfig/svc";
import { refreshLlmProviderCaches } from "@/lib/llmConfig/cache";
const AZURE_PROVIDER_NAME = "azure";
@@ -84,13 +86,14 @@ export default function AzureModal({
existingLlmProvider,
shouldMarkAsDefault,
onOpenChange,
defaultModelName,
globalDefault,
onboardingState,
onboardingActions,
llmDescriptor,
}: LLMProviderFormProps) {
const isOnboarding = variant === "onboarding";
const [isTesting, setIsTesting] = useState(false);
const [pendingDefault, setPendingDefault] = useState<string | null>(null);
const { mutate } = useSWRConfig();
const { wellKnownLLMProvider } = useWellKnownLLMProvider(AZURE_PROVIDER_NAME);
@@ -123,11 +126,7 @@ export default function AzureModal({
default_model_name: "",
} as AzureModalValues)
: {
...buildDefaultInitialValues(
existingLlmProvider,
modelConfigurations,
defaultModelName
),
...buildDefaultInitialValues(existingLlmProvider, modelConfigurations),
api_key: existingLlmProvider?.api_key ?? "",
target_uri: buildTargetUri(existingLlmProvider),
};
@@ -186,7 +185,11 @@ export default function AzureModal({
initialValues,
modelConfigurations,
existingLlmProvider,
shouldMarkAsDefault,
pendingDefaultModelName:
pendingDefault ??
(shouldMarkAsDefault
? processedValues.default_model_name
: undefined),
setIsTesting,
mutate,
onClose,
@@ -237,6 +240,26 @@ export default function AzureModal({
formikProps={formikProps}
recommendedDefaultModel={null}
shouldShowAutoUpdateToggle={false}
globalDefault={globalDefault}
providerId={existingLlmProvider?.id}
onSetGlobalDefault={
existingLlmProvider
? async (modelName) => {
try {
await setDefaultLlmModel(
existingLlmProvider.id,
modelName
);
await refreshLlmProviderCaches(mutate);
toast.success("Default model updated successfully!");
} catch (e) {
const msg =
e instanceof Error ? e.message : "Unknown error";
toast.error(`Failed to set default model: ${msg}`);
}
}
: (modelName) => setPendingDefault(modelName)
}
onAddModel={(modelName) => {
const newModel: ModelConfiguration = {
name: modelName,

View File

@@ -40,6 +40,8 @@ import { Card } from "@opal/components";
import { Section } from "@/layouts/general-layouts";
import { SvgAlertCircle } from "@opal/icons";
import { Content } from "@opal/layouts";
import { setDefaultLlmModel } from "@/lib/llmConfig/svc";
import { refreshLlmProviderCaches } from "@/lib/llmConfig/cache";
import { toast } from "@/hooks/useToast";
import useOnMount from "@/hooks/useOnMount";
@@ -87,6 +89,9 @@ interface BedrockModalInternalsProps {
isTesting: boolean;
onClose: () => void;
isOnboarding: boolean;
onSetGlobalDefault?: (modelName: string) => void;
globalDefault?: { provider_id: number; model_name: string } | null;
providerId?: number;
}
function BedrockModalInternals({
@@ -98,6 +103,9 @@ function BedrockModalInternals({
isTesting,
onClose,
isOnboarding,
onSetGlobalDefault,
globalDefault,
providerId,
}: BedrockModalInternalsProps) {
const authMethod = formikProps.values.custom_config?.BEDROCK_AUTH_METHOD;
@@ -296,6 +304,9 @@ function BedrockModalInternals({
recommendedDefaultModel={null}
shouldShowAutoUpdateToggle={false}
onRefetch={isFetchDisabled ? undefined : handleFetchModels}
globalDefault={globalDefault}
providerId={providerId}
onSetGlobalDefault={onSetGlobalDefault}
/>
)}
@@ -314,13 +325,14 @@ export default function BedrockModal({
existingLlmProvider,
shouldMarkAsDefault,
onOpenChange,
defaultModelName,
globalDefault,
onboardingState,
onboardingActions,
llmDescriptor,
}: LLMProviderFormProps) {
const [fetchedModels, setFetchedModels] = useState<ModelConfiguration[]>([]);
const [isTesting, setIsTesting] = useState(false);
const [pendingDefault, setPendingDefault] = useState<string | null>(null);
const isOnboarding = variant === "onboarding";
const { mutate } = useSWRConfig();
const { wellKnownLLMProvider } = useWellKnownLLMProvider(
@@ -349,11 +361,7 @@ export default function BedrockModal({
},
} as BedrockModalValues)
: {
...buildDefaultInitialValues(
existingLlmProvider,
modelConfigurations,
defaultModelName
),
...buildDefaultInitialValues(existingLlmProvider, modelConfigurations),
custom_config: {
AWS_REGION_NAME:
(existingLlmProvider?.custom_config?.AWS_REGION_NAME as string) ??
@@ -428,7 +436,11 @@ export default function BedrockModal({
modelConfigurations:
fetchedModels.length > 0 ? fetchedModels : modelConfigurations,
existingLlmProvider,
shouldMarkAsDefault,
pendingDefaultModelName:
pendingDefault ??
(shouldMarkAsDefault
? submitValues.default_model_name
: undefined),
setIsTesting,
mutate,
onClose,
@@ -447,6 +459,23 @@ export default function BedrockModal({
isTesting={isTesting}
onClose={onClose}
isOnboarding={isOnboarding}
onSetGlobalDefault={
existingLlmProvider
? async (modelName) => {
try {
await setDefaultLlmModel(existingLlmProvider.id, modelName);
await refreshLlmProviderCaches(mutate);
toast.success("Default model updated successfully!");
} catch (e) {
const msg =
e instanceof Error ? e.message : "Unknown error";
toast.error(`Failed to set default model: ${msg}`);
}
}
: (modelName) => setPendingDefault(modelName)
}
globalDefault={globalDefault}
providerId={existingLlmProvider?.id}
/>
)}
</Formik>

View File

@@ -37,6 +37,8 @@ import {
LLMConfigurationModalWrapper,
} from "@/sections/modals/llmConfig/shared";
import { toast } from "@/hooks/useToast";
import { setDefaultLlmModel } from "@/lib/llmConfig/svc";
import { refreshLlmProviderCaches } from "@/lib/llmConfig/cache";
const BIFROST_PROVIDER_NAME = LLMProviderName.BIFROST;
const DEFAULT_API_BASE = "";
@@ -55,6 +57,9 @@ interface BifrostModalInternalsProps {
isTesting: boolean;
onClose: () => void;
isOnboarding: boolean;
onSetGlobalDefault?: (modelName: string) => void;
globalDefault?: { provider_id: number; model_name: string } | null;
providerId?: number;
}
function BifrostModalInternals({
@@ -66,6 +71,9 @@ function BifrostModalInternals({
isTesting,
onClose,
isOnboarding,
onSetGlobalDefault,
globalDefault,
providerId,
}: BifrostModalInternalsProps) {
const currentModels =
fetchedModels.length > 0
@@ -153,6 +161,9 @@ function BifrostModalInternals({
recommendedDefaultModel={null}
shouldShowAutoUpdateToggle={false}
onRefetch={isFetchDisabled ? undefined : handleFetchModels}
globalDefault={globalDefault}
providerId={providerId}
onSetGlobalDefault={onSetGlobalDefault}
/>
)}
@@ -171,13 +182,14 @@ export default function BifrostModal({
existingLlmProvider,
shouldMarkAsDefault,
onOpenChange,
defaultModelName,
globalDefault,
onboardingState,
onboardingActions,
llmDescriptor,
}: LLMProviderFormProps) {
const [fetchedModels, setFetchedModels] = useState<ModelConfiguration[]>([]);
const [isTesting, setIsTesting] = useState(false);
const [pendingDefault, setPendingDefault] = useState<string | null>(null);
const isOnboarding = variant === "onboarding";
const { mutate } = useSWRConfig();
const { wellKnownLLMProvider } = useWellKnownLLMProvider(
@@ -201,11 +213,7 @@ export default function BifrostModal({
default_model_name: "",
} as BifrostModalValues)
: {
...buildDefaultInitialValues(
existingLlmProvider,
modelConfigurations,
defaultModelName
),
...buildDefaultInitialValues(existingLlmProvider, modelConfigurations),
api_key: existingLlmProvider?.api_key ?? "",
api_base: existingLlmProvider?.api_base ?? DEFAULT_API_BASE,
};
@@ -249,7 +257,9 @@ export default function BifrostModal({
modelConfigurations:
fetchedModels.length > 0 ? fetchedModels : modelConfigurations,
existingLlmProvider,
shouldMarkAsDefault,
pendingDefaultModelName:
pendingDefault ??
(shouldMarkAsDefault ? values.default_model_name : undefined),
setIsTesting,
mutate,
onClose,
@@ -268,6 +278,23 @@ export default function BifrostModal({
isTesting={isTesting}
onClose={onClose}
isOnboarding={isOnboarding}
onSetGlobalDefault={
existingLlmProvider
? async (modelName) => {
try {
await setDefaultLlmModel(existingLlmProvider.id, modelName);
await refreshLlmProviderCaches(mutate);
toast.success("Default model updated successfully!");
} catch (e) {
const msg =
e instanceof Error ? e.message : "Unknown error";
toast.error(`Failed to set default model: ${msg}`);
}
}
: (modelName) => setPendingDefault(modelName)
}
globalDefault={globalDefault}
providerId={existingLlmProvider?.id}
/>
)}
</Formik>

View File

@@ -211,22 +211,19 @@ export default function CustomModal({
existingLlmProvider,
shouldMarkAsDefault,
onOpenChange,
defaultModelName,
globalDefault,
onboardingState,
onboardingActions,
}: LLMProviderFormProps) {
const isOnboarding = variant === "onboarding";
const [isTesting, setIsTesting] = useState(false);
const [pendingDefault, setPendingDefault] = useState<string | null>(null);
const { mutate } = useSWRConfig();
const onClose = () => onOpenChange?.(false);
const initialValues = {
...buildDefaultInitialValues(
existingLlmProvider,
undefined,
defaultModelName
),
...buildDefaultInitialValues(existingLlmProvider, undefined),
...(isOnboarding ? buildOnboardingInitialValues() : {}),
provider: existingLlmProvider?.provider ?? "",
model_configurations: existingLlmProvider?.model_configurations.map(
@@ -346,7 +343,11 @@ export default function CustomModal({
},
modelConfigurations,
existingLlmProvider,
shouldMarkAsDefault,
pendingDefaultModelName:
pendingDefault ??
(shouldMarkAsDefault
? values.default_model_name || modelConfigurations[0]?.name
: undefined),
setIsTesting,
mutate,
onClose,

View File

@@ -37,6 +37,8 @@ import {
import { fetchModels } from "@/app/admin/configuration/llm/utils";
import debounce from "lodash/debounce";
import { toast } from "@/hooks/useToast";
import { setDefaultLlmModel } from "@/lib/llmConfig/svc";
import { refreshLlmProviderCaches } from "@/lib/llmConfig/cache";
const DEFAULT_API_BASE = "http://localhost:1234";
@@ -55,6 +57,9 @@ interface LMStudioFormInternalsProps {
isTesting: boolean;
onClose: () => void;
isOnboarding: boolean;
onSetGlobalDefault?: (modelName: string) => void;
globalDefault?: { provider_id: number; model_name: string } | null;
providerId?: number;
}
function LMStudioFormInternals({
@@ -65,6 +70,9 @@ function LMStudioFormInternals({
isTesting,
onClose,
isOnboarding,
onSetGlobalDefault,
globalDefault,
providerId,
}: LMStudioFormInternalsProps) {
const initialApiKey =
(existingLlmProvider?.custom_config?.LM_STUDIO_API_KEY as string) ?? "";
@@ -173,6 +181,9 @@ function LMStudioFormInternals({
formikProps={formikProps}
recommendedDefaultModel={null}
shouldShowAutoUpdateToggle={false}
globalDefault={globalDefault}
providerId={providerId}
onSetGlobalDefault={onSetGlobalDefault}
/>
)}
@@ -191,13 +202,14 @@ export default function LMStudioForm({
existingLlmProvider,
shouldMarkAsDefault,
onOpenChange,
defaultModelName,
globalDefault,
onboardingState,
onboardingActions,
llmDescriptor,
}: LLMProviderFormProps) {
const [fetchedModels, setFetchedModels] = useState<ModelConfiguration[]>([]);
const [isTesting, setIsTesting] = useState(false);
const [pendingDefault, setPendingDefault] = useState<string | null>(null);
const isOnboarding = variant === "onboarding";
const { mutate } = useSWRConfig();
const { wellKnownLLMProvider } = useWellKnownLLMProvider(
@@ -223,11 +235,7 @@ export default function LMStudioForm({
},
} as LMStudioFormValues)
: {
...buildDefaultInitialValues(
existingLlmProvider,
modelConfigurations,
defaultModelName
),
...buildDefaultInitialValues(existingLlmProvider, modelConfigurations),
api_base: existingLlmProvider?.api_base ?? DEFAULT_API_BASE,
custom_config: {
LM_STUDIO_API_KEY:
@@ -287,7 +295,11 @@ export default function LMStudioForm({
modelConfigurations:
fetchedModels.length > 0 ? fetchedModels : modelConfigurations,
existingLlmProvider,
shouldMarkAsDefault,
pendingDefaultModelName:
pendingDefault ??
(shouldMarkAsDefault
? submitValues.default_model_name
: undefined),
setIsTesting,
mutate,
onClose,
@@ -305,6 +317,23 @@ export default function LMStudioForm({
isTesting={isTesting}
onClose={onClose}
isOnboarding={isOnboarding}
onSetGlobalDefault={
existingLlmProvider
? async (modelName) => {
try {
await setDefaultLlmModel(existingLlmProvider.id, modelName);
await refreshLlmProviderCaches(mutate);
toast.success("Default model updated successfully!");
} catch (e) {
const msg =
e instanceof Error ? e.message : "Unknown error";
toast.error(`Failed to set default model: ${msg}`);
}
}
: (modelName) => setPendingDefault(modelName)
}
globalDefault={globalDefault}
providerId={existingLlmProvider?.id}
/>
)}
</Formik>

View File

@@ -36,6 +36,8 @@ import {
LLMConfigurationModalWrapper,
} from "@/sections/modals/llmConfig/shared";
import { toast } from "@/hooks/useToast";
import { setDefaultLlmModel } from "@/lib/llmConfig/svc";
import { refreshLlmProviderCaches } from "@/lib/llmConfig/cache";
const DEFAULT_API_BASE = "http://localhost:4000";
@@ -53,6 +55,9 @@ interface LiteLLMProxyModalInternalsProps {
isTesting: boolean;
onClose: () => void;
isOnboarding: boolean;
onSetGlobalDefault?: (modelName: string) => void;
globalDefault?: { provider_id: number; model_name: string } | null;
providerId?: number;
}
function LiteLLMProxyModalInternals({
@@ -64,6 +69,9 @@ function LiteLLMProxyModalInternals({
isTesting,
onClose,
isOnboarding,
onSetGlobalDefault,
globalDefault,
providerId,
}: LiteLLMProxyModalInternalsProps) {
const currentModels =
fetchedModels.length > 0
@@ -140,6 +148,9 @@ function LiteLLMProxyModalInternals({
recommendedDefaultModel={null}
shouldShowAutoUpdateToggle={false}
onRefetch={isFetchDisabled ? undefined : handleFetchModels}
globalDefault={globalDefault}
providerId={providerId}
onSetGlobalDefault={onSetGlobalDefault}
/>
)}
@@ -158,13 +169,14 @@ export default function LiteLLMProxyModal({
existingLlmProvider,
shouldMarkAsDefault,
onOpenChange,
defaultModelName,
globalDefault,
onboardingState,
onboardingActions,
llmDescriptor,
}: LLMProviderFormProps) {
const [fetchedModels, setFetchedModels] = useState<ModelConfiguration[]>([]);
const [isTesting, setIsTesting] = useState(false);
const [pendingDefault, setPendingDefault] = useState<string | null>(null);
const isOnboarding = variant === "onboarding";
const { mutate } = useSWRConfig();
const { wellKnownLLMProvider } = useWellKnownLLMProvider(
@@ -188,11 +200,7 @@ export default function LiteLLMProxyModal({
default_model_name: "",
} as LiteLLMProxyModalValues)
: {
...buildDefaultInitialValues(
existingLlmProvider,
modelConfigurations,
defaultModelName
),
...buildDefaultInitialValues(existingLlmProvider, modelConfigurations),
api_key: existingLlmProvider?.api_key ?? "",
api_base: existingLlmProvider?.api_base ?? DEFAULT_API_BASE,
};
@@ -238,7 +246,9 @@ export default function LiteLLMProxyModal({
modelConfigurations:
fetchedModels.length > 0 ? fetchedModels : modelConfigurations,
existingLlmProvider,
shouldMarkAsDefault,
pendingDefaultModelName:
pendingDefault ??
(shouldMarkAsDefault ? values.default_model_name : undefined),
setIsTesting,
mutate,
onClose,
@@ -257,6 +267,23 @@ export default function LiteLLMProxyModal({
isTesting={isTesting}
onClose={onClose}
isOnboarding={isOnboarding}
onSetGlobalDefault={
existingLlmProvider
? async (modelName) => {
try {
await setDefaultLlmModel(existingLlmProvider.id, modelName);
await refreshLlmProviderCaches(mutate);
toast.success("Default model updated successfully!");
} catch (e) {
const msg =
e instanceof Error ? e.message : "Unknown error";
toast.error(`Failed to set default model: ${msg}`);
}
}
: (modelName) => setPendingDefault(modelName)
}
globalDefault={globalDefault}
providerId={existingLlmProvider?.id}
/>
)}
</Formik>

View File

@@ -37,6 +37,8 @@ import debounce from "lodash/debounce";
import Tabs from "@/refresh-components/Tabs";
import { Card } from "@opal/components";
import { toast } from "@/hooks/useToast";
import { setDefaultLlmModel } from "@/lib/llmConfig/svc";
import { refreshLlmProviderCaches } from "@/lib/llmConfig/cache";
const OLLAMA_PROVIDER_NAME = "ollama_chat";
const DEFAULT_API_BASE = "http://127.0.0.1:11434";
@@ -58,6 +60,9 @@ interface OllamaModalInternalsProps {
isTesting: boolean;
onClose: () => void;
isOnboarding: boolean;
onSetGlobalDefault?: (modelName: string) => void;
globalDefault?: { provider_id: number; model_name: string } | null;
providerId?: number;
}
function OllamaModalInternals({
@@ -68,6 +73,9 @@ function OllamaModalInternals({
isTesting,
onClose,
isOnboarding,
onSetGlobalDefault,
globalDefault,
providerId,
}: OllamaModalInternalsProps) {
const isInitialMount = useRef(true);
@@ -193,6 +201,9 @@ function OllamaModalInternals({
formikProps={formikProps}
recommendedDefaultModel={null}
shouldShowAutoUpdateToggle={false}
globalDefault={globalDefault}
providerId={providerId}
onSetGlobalDefault={onSetGlobalDefault}
/>
)}
@@ -211,13 +222,14 @@ export default function OllamaModal({
existingLlmProvider,
shouldMarkAsDefault,
onOpenChange,
defaultModelName,
globalDefault,
onboardingState,
onboardingActions,
llmDescriptor,
}: LLMProviderFormProps) {
const [fetchedModels, setFetchedModels] = useState<ModelConfiguration[]>([]);
const [isTesting, setIsTesting] = useState(false);
const [pendingDefault, setPendingDefault] = useState<string | null>(null);
const isOnboarding = variant === "onboarding";
const { mutate } = useSWRConfig();
const { wellKnownLLMProvider } =
@@ -242,11 +254,7 @@ export default function OllamaModal({
},
} as OllamaModalValues)
: {
...buildDefaultInitialValues(
existingLlmProvider,
modelConfigurations,
defaultModelName
),
...buildDefaultInitialValues(existingLlmProvider, modelConfigurations),
api_base: existingLlmProvider?.api_base ?? DEFAULT_API_BASE,
custom_config: {
OLLAMA_API_KEY:
@@ -306,7 +314,11 @@ export default function OllamaModal({
modelConfigurations:
fetchedModels.length > 0 ? fetchedModels : modelConfigurations,
existingLlmProvider,
shouldMarkAsDefault,
pendingDefaultModelName:
pendingDefault ??
(shouldMarkAsDefault
? submitValues.default_model_name
: undefined),
setIsTesting,
mutate,
onClose,
@@ -324,6 +336,23 @@ export default function OllamaModal({
isTesting={isTesting}
onClose={onClose}
isOnboarding={isOnboarding}
globalDefault={globalDefault}
providerId={existingLlmProvider?.id}
onSetGlobalDefault={
existingLlmProvider
? async (modelName) => {
try {
await setDefaultLlmModel(existingLlmProvider.id, modelName);
await refreshLlmProviderCaches(mutate);
toast.success("Default model updated successfully!");
} catch (e) {
const msg =
e instanceof Error ? e.message : "Unknown error";
toast.error(`Failed to set default model: ${msg}`);
}
}
: (modelName) => setPendingDefault(modelName)
}
/>
)}
</Formik>

View File

@@ -25,6 +25,9 @@ import {
SingleDefaultModelField,
LLMConfigurationModalWrapper,
} from "@/sections/modals/llmConfig/shared";
import { setDefaultLlmModel } from "@/lib/llmConfig/svc";
import { refreshLlmProviderCaches } from "@/lib/llmConfig/cache";
import { toast } from "@/hooks/useToast";
const OPENAI_PROVIDER_NAME = "openai";
const DEFAULT_DEFAULT_MODEL_NAME = "gpt-5.2";
@@ -34,13 +37,14 @@ export default function OpenAIModal({
existingLlmProvider,
shouldMarkAsDefault,
onOpenChange,
defaultModelName,
globalDefault,
onboardingState,
onboardingActions,
llmDescriptor,
}: LLMProviderFormProps) {
const isOnboarding = variant === "onboarding";
const [isTesting, setIsTesting] = useState(false);
const [pendingDefault, setPendingDefault] = useState<string | null>(null);
const { mutate } = useSWRConfig();
const { wellKnownLLMProvider } =
useWellKnownLLMProvider(OPENAI_PROVIDER_NAME);
@@ -61,17 +65,10 @@ export default function OpenAIModal({
default_model_name: DEFAULT_DEFAULT_MODEL_NAME,
}
: {
...buildDefaultInitialValues(
existingLlmProvider,
modelConfigurations,
defaultModelName
),
...buildDefaultInitialValues(existingLlmProvider, modelConfigurations),
api_key: existingLlmProvider?.api_key ?? "",
default_model_name:
(defaultModelName &&
modelConfigurations.some((m) => m.name === defaultModelName)
? defaultModelName
: undefined) ??
existingLlmProvider?.model_configurations?.[0]?.name ??
wellKnownLLMProvider?.recommended_default_model?.name ??
DEFAULT_DEFAULT_MODEL_NAME,
is_auto_mode: existingLlmProvider?.is_auto_mode ?? true,
@@ -117,7 +114,9 @@ export default function OpenAIModal({
initialValues,
modelConfigurations,
existingLlmProvider,
shouldMarkAsDefault,
pendingDefaultModelName:
pendingDefault ??
(shouldMarkAsDefault ? values.default_model_name : undefined),
setIsTesting,
mutate,
onClose,
@@ -156,6 +155,26 @@ export default function OpenAIModal({
wellKnownLLMProvider?.recommended_default_model ?? null
}
shouldShowAutoUpdateToggle={true}
globalDefault={globalDefault}
providerId={existingLlmProvider?.id}
onSetGlobalDefault={
existingLlmProvider
? async (modelName) => {
try {
await setDefaultLlmModel(
existingLlmProvider.id,
modelName
);
await refreshLlmProviderCaches(mutate);
toast.success("Default model updated successfully!");
} catch (e) {
const msg =
e instanceof Error ? e.message : "Unknown error";
toast.error(`Failed to set default model: ${msg}`);
}
}
: (modelName) => setPendingDefault(modelName)
}
/>
)}

View File

@@ -35,6 +35,8 @@ import {
LLMConfigurationModalWrapper,
} from "@/sections/modals/llmConfig/shared";
import { toast } from "@/hooks/useToast";
import { setDefaultLlmModel } from "@/lib/llmConfig/svc";
import { refreshLlmProviderCaches } from "@/lib/llmConfig/cache";
const OPENROUTER_PROVIDER_NAME = "openrouter";
const DEFAULT_API_BASE = "https://openrouter.ai/api/v1";
@@ -52,6 +54,9 @@ interface OpenRouterModalInternalsProps {
isTesting: boolean;
onClose: () => void;
isOnboarding: boolean;
onSetGlobalDefault?: (modelName: string) => void;
globalDefault?: { provider_id: number; model_name: string } | null;
providerId?: number;
}
function OpenRouterModalInternals({
@@ -63,6 +68,9 @@ function OpenRouterModalInternals({
isTesting,
onClose,
isOnboarding,
onSetGlobalDefault,
globalDefault,
providerId,
}: OpenRouterModalInternalsProps) {
const currentModels =
fetchedModels.length > 0
@@ -139,6 +147,9 @@ function OpenRouterModalInternals({
recommendedDefaultModel={null}
shouldShowAutoUpdateToggle={false}
onRefetch={isFetchDisabled ? undefined : handleFetchModels}
globalDefault={globalDefault}
providerId={providerId}
onSetGlobalDefault={onSetGlobalDefault}
/>
)}
@@ -157,13 +168,14 @@ export default function OpenRouterModal({
existingLlmProvider,
shouldMarkAsDefault,
onOpenChange,
defaultModelName,
globalDefault,
onboardingState,
onboardingActions,
llmDescriptor,
}: LLMProviderFormProps) {
const [fetchedModels, setFetchedModels] = useState<ModelConfiguration[]>([]);
const [isTesting, setIsTesting] = useState(false);
const [pendingDefault, setPendingDefault] = useState<string | null>(null);
const isOnboarding = variant === "onboarding";
const { mutate } = useSWRConfig();
const { wellKnownLLMProvider } = useWellKnownLLMProvider(
@@ -187,11 +199,7 @@ export default function OpenRouterModal({
default_model_name: "",
} as OpenRouterModalValues)
: {
...buildDefaultInitialValues(
existingLlmProvider,
modelConfigurations,
defaultModelName
),
...buildDefaultInitialValues(existingLlmProvider, modelConfigurations),
api_key: existingLlmProvider?.api_key ?? "",
api_base: existingLlmProvider?.api_base ?? DEFAULT_API_BASE,
};
@@ -237,7 +245,9 @@ export default function OpenRouterModal({
modelConfigurations:
fetchedModels.length > 0 ? fetchedModels : modelConfigurations,
existingLlmProvider,
shouldMarkAsDefault,
pendingDefaultModelName:
pendingDefault ??
(shouldMarkAsDefault ? values.default_model_name : undefined),
setIsTesting,
mutate,
onClose,
@@ -256,6 +266,23 @@ export default function OpenRouterModal({
isTesting={isTesting}
onClose={onClose}
isOnboarding={isOnboarding}
onSetGlobalDefault={
existingLlmProvider
? async (modelName) => {
try {
await setDefaultLlmModel(existingLlmProvider.id, modelName);
await refreshLlmProviderCaches(mutate);
toast.success("Default model updated successfully!");
} catch (e) {
const msg =
e instanceof Error ? e.message : "Unknown error";
toast.error(`Failed to set default model: ${msg}`);
}
}
: (modelName) => setPendingDefault(modelName)
}
globalDefault={globalDefault}
providerId={existingLlmProvider?.id}
/>
)}
</Formik>

View File

@@ -29,6 +29,9 @@ import {
SingleDefaultModelField,
LLMConfigurationModalWrapper,
} from "@/sections/modals/llmConfig/shared";
import { setDefaultLlmModel } from "@/lib/llmConfig/svc";
import { refreshLlmProviderCaches } from "@/lib/llmConfig/cache";
import { toast } from "@/hooks/useToast";
const VERTEXAI_PROVIDER_NAME = "vertex_ai";
const VERTEXAI_DISPLAY_NAME = "Google Cloud Vertex AI";
@@ -47,13 +50,14 @@ export default function VertexAIModal({
existingLlmProvider,
shouldMarkAsDefault,
onOpenChange,
defaultModelName,
globalDefault,
onboardingState,
onboardingActions,
llmDescriptor,
}: LLMProviderFormProps) {
const isOnboarding = variant === "onboarding";
const [isTesting, setIsTesting] = useState(false);
const [pendingDefault, setPendingDefault] = useState<string | null>(null);
const { mutate } = useSWRConfig();
const { wellKnownLLMProvider } = useWellKnownLLMProvider(
VERTEXAI_PROVIDER_NAME
@@ -78,16 +82,9 @@ export default function VertexAIModal({
},
} as VertexAIModalValues)
: {
...buildDefaultInitialValues(
existingLlmProvider,
modelConfigurations,
defaultModelName
),
...buildDefaultInitialValues(existingLlmProvider, modelConfigurations),
default_model_name:
(defaultModelName &&
modelConfigurations.some((m) => m.name === defaultModelName)
? defaultModelName
: undefined) ??
existingLlmProvider?.model_configurations?.[0]?.name ??
wellKnownLLMProvider?.recommended_default_model?.name ??
VERTEXAI_DEFAULT_MODEL,
is_auto_mode: existingLlmProvider?.is_auto_mode ?? true,
@@ -165,7 +162,11 @@ export default function VertexAIModal({
initialValues,
modelConfigurations,
existingLlmProvider,
shouldMarkAsDefault,
pendingDefaultModelName:
pendingDefault ??
(shouldMarkAsDefault
? submitValues.default_model_name
: undefined),
setIsTesting,
mutate,
onClose,
@@ -229,6 +230,26 @@ export default function VertexAIModal({
wellKnownLLMProvider?.recommended_default_model ?? null
}
shouldShowAutoUpdateToggle={true}
globalDefault={globalDefault}
providerId={existingLlmProvider?.id}
onSetGlobalDefault={
existingLlmProvider
? async (modelName) => {
try {
await setDefaultLlmModel(
existingLlmProvider.id,
modelName
);
await refreshLlmProviderCaches(mutate);
toast.success("Default model updated successfully!");
} catch (e) {
const msg =
e instanceof Error ? e.message : "Unknown error";
toast.error(`Failed to set default model: ${msg}`);
}
}
: (modelName) => setPendingDefault(modelName)
}
/>
)}

View File

@@ -1,4 +1,8 @@
import { LLMProviderName, LLMProviderView } from "@/interfaces/llm";
import {
DefaultModel,
LLMProviderName,
LLMProviderView,
} from "@/interfaces/llm";
import AnthropicModal from "@/sections/modals/llmConfig/AnthropicModal";
import OpenAIModal from "@/sections/modals/llmConfig/OpenAIModal";
import OllamaModal from "@/sections/modals/llmConfig/OllamaModal";
@@ -23,12 +27,12 @@ function detectIfRealOpenAIProvider(provider: LLMProviderView) {
export function getModalForExistingProvider(
provider: LLMProviderView,
onOpenChange?: (open: boolean) => void,
defaultModelName?: string
globalDefault?: DefaultModel | null
) {
const props = {
existingLlmProvider: provider,
onOpenChange,
defaultModelName,
globalDefault,
};
switch (provider.provider) {

View File

@@ -5,7 +5,11 @@ import { Form, FormikProps } from "formik";
import { usePaidEnterpriseFeaturesEnabled } from "@/components/settings/usePaidEnterpriseFeaturesEnabled";
import { useAgents } from "@/hooks/useAgents";
import { useUserGroups } from "@/lib/hooks";
import { ModelConfiguration, SimpleKnownModel } from "@/interfaces/llm";
import {
DefaultModel,
ModelConfiguration,
SimpleKnownModel,
} from "@/interfaces/llm";
import * as InputLayouts from "@/layouts/input-layouts";
import Checkbox from "@/refresh-components/inputs/Checkbox";
import InputTypeInField from "@/refresh-components/form/InputTypeInField";
@@ -375,6 +379,12 @@ export interface ModelsFieldProps<T> {
modelConfigurations: ModelConfiguration[];
recommendedDefaultModel: SimpleKnownModel | null;
shouldShowAutoUpdateToggle: boolean;
/** The current global default model. */
globalDefault?: DefaultModel | null;
/** The provider ID for this modal (set for existing providers). */
providerId?: number;
/** Called when the user clicks "Set as default" on a model. */
onSetGlobalDefault?: (modelName: string) => void;
/** Called when the user clicks the refresh button to re-fetch models. */
onRefetch?: () => Promise<void> | void;
/** Called when the user adds a custom model by name. Enables the "Add Model" input. */
@@ -386,13 +396,23 @@ export function ModelsField<T extends BaseLLMFormValues>({
modelConfigurations,
recommendedDefaultModel,
shouldShowAutoUpdateToggle,
globalDefault,
providerId,
onSetGlobalDefault,
onRefetch,
onAddModel,
}: ModelsFieldProps<T>) {
const [newModelName, setNewModelName] = useState("");
const isAutoMode = formikProps.values.is_auto_mode;
const selectedModels = formikProps.values.selected_model_names ?? [];
const defaultModel = formikProps.values.default_model_name;
// A model is the global default if it belongs to this provider and matches
// the global default model name.
const isGlobalDefault = (modelName: string) =>
globalDefault != null &&
providerId != null &&
globalDefault.provider_id === providerId &&
globalDefault.model_name === modelName;
function handleCheckboxChange(modelName: string, checked: boolean) {
// Read current values inside the handler to avoid stale closure issues
@@ -511,10 +531,27 @@ export function ModelsField<T extends BaseLLMFormValues>({
icon={() => <Checkbox checked />}
title={model.display_name || model.name}
rightChildren={
model.name === defaultModel ? (
isGlobalDefault(model.name) ? (
<Section>
<Tag title="Default Model" color="blue" />
</Section>
) : onSetGlobalDefault ? (
<Hoverable.Item
group="LLMConfigurationButton"
variant="opacity-on-hover"
>
<Button
size="sm"
prominence="internal"
onClick={(e) => {
e.stopPropagation();
onSetGlobalDefault(model.name);
}}
type="button"
>
Set as default
</Button>
</Hoverable.Item>
) : undefined
}
/>
@@ -525,7 +562,6 @@ export function ModelsField<T extends BaseLLMFormValues>({
const isSelected = selectedModels.includes(
modelConfiguration.name
);
const isDefault = defaultModel === modelConfiguration.name;
return (
<Hoverable.Root
@@ -548,11 +584,11 @@ export function ModelsField<T extends BaseLLMFormValues>({
}
rightChildren={
isSelected ? (
isDefault ? (
isGlobalDefault(modelConfiguration.name) ? (
<Section>
<Tag color="blue" title="Default Model" />
</Section>
) : (
) : onSetGlobalDefault ? (
<Hoverable.Item
group="LLMConfigurationButton"
variant="opacity-on-hover"
@@ -562,14 +598,14 @@ export function ModelsField<T extends BaseLLMFormValues>({
prominence="internal"
onClick={(e) => {
e.stopPropagation();
handleSetDefault(modelConfiguration.name);
onSetGlobalDefault(modelConfiguration.name);
}}
type="button"
>
Set as default
</Button>
</Hoverable.Item>
)
) : undefined
) : undefined
}
/>

View File

@@ -56,7 +56,7 @@ export const submitLLMProvider = async <T extends BaseLLMFormValues>({
initialValues,
modelConfigurations,
existingLlmProvider,
shouldMarkAsDefault,
pendingDefaultModelName,
hideSuccess,
setIsTesting,
mutate,
@@ -166,7 +166,7 @@ export const submitLLMProvider = async <T extends BaseLLMFormValues>({
return;
}
if (shouldMarkAsDefault) {
if (pendingDefaultModelName) {
const newLlmProvider = (await response.json()) as LLMProviderView;
const setDefaultResponse = await fetch(`${LLM_ADMIN_URL}/default`, {
method: "POST",
@@ -175,12 +175,12 @@ export const submitLLMProvider = async <T extends BaseLLMFormValues>({
},
body: JSON.stringify({
provider_id: newLlmProvider.id,
model_name: finalDefaultModelName,
model_name: pendingDefaultModelName,
}),
});
if (!setDefaultResponse.ok) {
const errorMsg = (await setDefaultResponse.json()).detail;
toast.error(`Failed to set provider as default: ${errorMsg}`);
toast.error(`Failed to set default model: ${errorMsg}`);
return;
}
}

View File

@@ -12,16 +12,9 @@ export const LLM_FORM_CLASS_NAME = "flex flex-col gap-y-4 items-stretch mt-6";
export const buildDefaultInitialValues = (
existingLlmProvider?: LLMProviderView,
modelConfigurations?: ModelConfiguration[],
currentDefaultModelName?: string
modelConfigurations?: ModelConfiguration[]
) => {
const defaultModelName =
(currentDefaultModelName &&
existingLlmProvider?.model_configurations?.some(
(m) => m.name === currentDefaultModelName
)
? currentDefaultModelName
: undefined) ??
existingLlmProvider?.model_configurations?.[0]?.name ??
modelConfigurations?.[0]?.name ??
"";
@@ -104,7 +97,8 @@ export interface SubmitLLMProviderParams<
initialValues: T;
modelConfigurations: ModelConfiguration[];
existingLlmProvider?: LLMProviderView;
shouldMarkAsDefault?: boolean;
/** When set, the given model will be marked as the global default after provider creation. */
pendingDefaultModelName?: string;
hideSuccess?: boolean;
setIsTesting: (testing: boolean) => void;
mutate: ScopedMutator;