Compare commits

..

4 Commits

Author SHA1 Message Date
Weves
5f82de7c45 Debug test 2024-09-23 11:05:27 -07:00
pablodanswer
45f67368a2 Add support for o1 (#2538)
* add o1 support + bump litellm/openai

* ports

* update exception message for testing
2024-09-22 23:16:28 +00:00
pablodanswer
014ba9e220 Begin distinguishing upsert operations for clarity (#2535)
* additional clarity for llm provider creation / updates

* update provider APIs

* update typing (minor)
2024-09-21 22:36:22 +00:00
pablodanswer
ba64543dd7 Updated modals for clarity (#2529)
* udpated modals for clarity

* fix build
2024-09-21 19:55:54 +00:00
20 changed files with 102 additions and 474 deletions

View File

@@ -135,7 +135,7 @@ POSTGRES_PASSWORD = urllib.parse.quote_plus(
os.environ.get("POSTGRES_PASSWORD") or "password"
)
POSTGRES_HOST = os.environ.get("POSTGRES_HOST") or "localhost"
POSTGRES_PORT = os.environ.get("POSTGRES_PORT") or "5433"
POSTGRES_PORT = os.environ.get("POSTGRES_PORT") or "5432"
POSTGRES_DB = os.environ.get("POSTGRES_DB") or "postgres"
# defaults to False

View File

@@ -137,8 +137,8 @@ def get_sqlalchemy_engine() -> Engine:
)
_SYNC_ENGINE = create_engine(
connection_string,
pool_size=40,
max_overflow=10,
pool_size=5,
max_overflow=0,
pool_pre_ping=POSTGRES_POOL_PRE_PING,
pool_recycle=POSTGRES_POOL_RECYCLE,
)
@@ -156,8 +156,8 @@ def get_sqlalchemy_async_engine() -> AsyncEngine:
connect_args={
"server_settings": {"application_name": POSTGRES_APP_NAME + "_async"}
},
pool_size=40,
max_overflow=10,
pool_size=5,
max_overflow=0,
pool_pre_ping=POSTGRES_POOL_PRE_PING,
pool_recycle=POSTGRES_POOL_RECYCLE,
)

View File

@@ -62,13 +62,21 @@ def upsert_cloud_embedding_provider(
def upsert_llm_provider(
llm_provider: LLMProviderUpsertRequest, db_session: Session
llm_provider: LLMProviderUpsertRequest,
db_session: Session,
is_creation: bool = True,
) -> FullLLMProvider:
existing_llm_provider = db_session.scalar(
select(LLMProviderModel).where(LLMProviderModel.name == llm_provider.name)
)
if existing_llm_provider and is_creation:
raise ValueError(f"LLM Provider with name {llm_provider.name} already exists")
if not existing_llm_provider:
if not is_creation:
raise ValueError(
f"LLM Provider with name {llm_provider.name} does not exist"
)
existing_llm_provider = LLMProviderModel(name=llm_provider.name)
db_session.add(existing_llm_provider)

View File

@@ -24,6 +24,8 @@ class WellKnownLLMProviderDescriptor(BaseModel):
OPENAI_PROVIDER_NAME = "openai"
OPEN_AI_MODEL_NAMES = [
"o1-mini",
"o1-preview",
"gpt-4",
"gpt-4o",
"gpt-4o-mini",

View File

@@ -47,7 +47,9 @@ if TYPE_CHECKING:
logger = setup_logger()
def litellm_exception_to_error_msg(e: Exception, llm: LLM) -> str:
def litellm_exception_to_error_msg(
e: Exception, llm: LLM, fallback_to_error_msg: bool = False
) -> str:
error_msg = str(e)
if isinstance(e, BadRequestError):
@@ -94,7 +96,7 @@ def litellm_exception_to_error_msg(e: Exception, llm: LLM) -> str:
error_msg = "Request timed out: The operation took too long to complete. Please try again."
elif isinstance(e, APIError):
error_msg = f"API error: An error occurred while communicating with the API. Details: {str(e)}"
else:
elif not fallback_to_error_msg:
error_msg = "An unexpected error occurred while processing your request. Please try again later."
return error_msg

View File

@@ -51,7 +51,6 @@ from danswer.db.credentials import create_initial_public_credential
from danswer.db.document import check_docs_exist
from danswer.db.engine import get_sqlalchemy_engine
from danswer.db.engine import init_sqlalchemy_engine
from danswer.db.engine import warm_up_connections
from danswer.db.index_attempt import cancel_indexing_attempts_past_model
from danswer.db.index_attempt import expire_index_attempts
from danswer.db.llm import fetch_default_provider
@@ -369,7 +368,7 @@ async def lifespan(app: FastAPI) -> AsyncGenerator:
logger.notice("Generative AI Q&A disabled")
# fill up Postgres connection pools
await warm_up_connections()
# await warm_up_connections()
# We cache this at the beginning so there is no delay in the first telemetry
get_or_generate_uuid()

View File

@@ -3,6 +3,7 @@ from collections.abc import Callable
from fastapi import APIRouter
from fastapi import Depends
from fastapi import HTTPException
from fastapi import Query
from sqlalchemy.orm import Session
from danswer.auth.users import current_admin_user
@@ -17,6 +18,7 @@ from danswer.llm.factory import get_default_llms
from danswer.llm.factory import get_llm
from danswer.llm.llm_provider_options import fetch_available_well_known_llms
from danswer.llm.llm_provider_options import WellKnownLLMProviderDescriptor
from danswer.llm.utils import litellm_exception_to_error_msg
from danswer.llm.utils import test_llm
from danswer.server.manage.llm.models import FullLLMProvider
from danswer.server.manage.llm.models import LLMProviderDescriptor
@@ -77,7 +79,10 @@ def test_llm_configuration(
)
if error:
raise HTTPException(status_code=400, detail=error)
client_error_msg = litellm_exception_to_error_msg(
error, llm, fallback_to_error_msg=True
)
raise HTTPException(status_code=400, detail=client_error_msg)
@admin_router.post("/test/default")
@@ -118,10 +123,22 @@ def list_llm_providers(
@admin_router.put("/provider")
def put_llm_provider(
llm_provider: LLMProviderUpsertRequest,
is_creation: bool = Query(
True,
description="True if updating an existing provider, False if creating a new one",
),
_: User | None = Depends(current_admin_user),
db_session: Session = Depends(get_session),
) -> FullLLMProvider:
return upsert_llm_provider(llm_provider=llm_provider, db_session=db_session)
try:
return upsert_llm_provider(
llm_provider=llm_provider,
db_session=db_session,
is_creation=is_creation,
)
except ValueError as e:
logger.exception("Failed to upsert LLM Provider")
raise HTTPException(status_code=400, detail=str(e))
@admin_router.delete("/provider/{provider_id}")

View File

@@ -204,7 +204,6 @@ def build_custom_tools_from_openapi_schema_and_headers(
url = openapi_to_url(openapi_schema)
method_specs = openapi_to_method_specs(openapi_schema)
return [
CustomTool(method_spec, url, custom_headers) for method_spec in method_specs
]

View File

@@ -16,7 +16,6 @@ class MethodSpec(BaseModel):
summary: str
path: str
method: str
body_schema: dict[str, Any] = {}
spec: dict[str, Any]
def get_request_body_schema(self) -> dict[str, Any]:
@@ -88,8 +87,6 @@ class MethodSpec(BaseModel):
tool_definition["function"]["parameters"]["properties"].update(
{param["name"]: param["schema"] for param in path_param_schemas}
)
print(tool_definition)
print("")
return tool_definition
def validate_spec(self) -> None:

View File

@@ -28,14 +28,14 @@ jsonref==1.1.0
langchain==0.1.17
langchain-core==0.1.50
langchain-text-splitters==0.0.1
litellm==1.43.18
litellm==1.47.1
llama-index==0.9.45
Mako==1.2.4
msal==1.28.0
nltk==3.8.1
Office365-REST-Python-Client==2.5.9
oauthlib==3.2.2
openai==1.41.1
openai==1.47.0
openpyxl==3.1.2
playwright==1.41.2
psutil==5.9.5

View File

@@ -3,7 +3,7 @@ einops==0.8.0
fastapi==0.109.2
google-cloud-aiplatform==1.58.0
numpy==1.26.4
openai==1.41.1
openai==1.47.0
pydantic==2.8.2
retry==0.9.2
safetensors==0.4.2

View File

@@ -1,219 +0,0 @@
import unittest
from unittest.mock import patch
import pytest
from danswer.tools.custom.custom_tool import (
build_custom_tools_from_openapi_schema_and_headers,
)
from danswer.tools.custom.custom_tool import CUSTOM_TOOL_RESPONSE_ID
from danswer.tools.custom.custom_tool import CustomToolCallSummary
from danswer.tools.custom.custom_tool import validate_openapi_schema
from danswer.tools.models import DynamicSchemaInfo
from danswer.tools.tool import ToolResponse
class TestCustomTool(unittest.TestCase):
"""
Test suite for CustomTool functionality.
This class tests the creation, running, and result handling of custom tools
based on OpenAPI schemas.
"""
def setUp(self):
"""
Set up the test environment before each test method.
Initializes an OpenAPI schema and DynamicSchemaInfo for testing.
"""
self.openapi_schema = {
"openapi": "3.0.0",
"info": {
"version": "1.0.0",
"title": "Assistants API",
"description": "An API for managing assistants",
},
"servers": [
{"url": "http://localhost:8080/CHAT_SESSION_ID/test/MESSAGE_ID"},
],
"paths": {
"/assistant/{assistant_id}": {
"GET": {
"summary": "Get a specific Assistant",
"operationId": "getAssistant",
"parameters": [
{
"name": "assistant_id",
"in": "path",
"required": True,
"schema": {"type": "string"},
}
],
},
"POST": {
"summary": "Create a new Assistant",
"operationId": "createAssistant",
"parameters": [
{
"name": "assistant_id",
"in": "path",
"required": True,
"schema": {"type": "string"},
}
],
"requestBody": {
"required": True,
"content": {
"application/json": {"schema": {"type": "object"}}
},
},
},
}
},
}
validate_openapi_schema(self.openapi_schema)
self.dynamic_schema_info = DynamicSchemaInfo(chat_session_id=10, message_id=20)
@patch("danswer.tools.custom.custom_tool.requests.request")
def test_custom_tool_run_get(self, mock_request):
"""
Test the GET method of a custom tool.
Verifies that the tool correctly constructs the URL and makes the GET request.
"""
tools = build_custom_tools_from_openapi_schema_and_headers(
self.openapi_schema, dynamic_schema_info=self.dynamic_schema_info
)
result = list(tools[0].run(assistant_id="123"))
expected_url = f"http://localhost:8080/{self.dynamic_schema_info.chat_session_id}/test/{self.dynamic_schema_info.message_id}/assistant/123"
mock_request.assert_called_once_with("GET", expected_url, json=None, headers={})
self.assertEqual(
len(result), 1, "Expected exactly one result from the tool run"
)
self.assertEqual(
result[0].id,
CUSTOM_TOOL_RESPONSE_ID,
"Tool response ID does not match expected value",
)
self.assertEqual(
result[0].response.tool_name,
"getAssistant",
"Tool name in response does not match expected value",
)
@patch("danswer.tools.custom.custom_tool.requests.request")
def test_custom_tool_run_post(self, mock_request):
"""
Test the POST method of a custom tool.
Verifies that the tool correctly constructs the URL and makes the POST request with the given body.
"""
tools = build_custom_tools_from_openapi_schema_and_headers(
self.openapi_schema, dynamic_schema_info=self.dynamic_schema_info
)
result = list(tools[1].run(assistant_id="456"))
expected_url = f"http://localhost:8080/{self.dynamic_schema_info.chat_session_id}/test/{self.dynamic_schema_info.message_id}/assistant/456"
mock_request.assert_called_once_with(
"POST", expected_url, json=None, headers={}
)
self.assertEqual(
len(result), 1, "Expected exactly one result from the tool run"
)
self.assertEqual(
result[0].id,
CUSTOM_TOOL_RESPONSE_ID,
"Tool response ID does not match expected value",
)
self.assertEqual(
result[0].response.tool_name,
"createAssistant",
"Tool name in response does not match expected value",
)
@patch("danswer.tools.custom.custom_tool.requests.request")
def test_custom_tool_with_headers(self, mock_request):
"""
Test the custom tool with custom headers.
Verifies that the tool correctly includes the custom headers in the request.
"""
custom_headers = [
{"key": "Authorization", "value": "Bearer token123"},
{"key": "Custom-Header", "value": "CustomValue"},
]
tools = build_custom_tools_from_openapi_schema_and_headers(
self.openapi_schema,
custom_headers=custom_headers,
dynamic_schema_info=self.dynamic_schema_info,
)
list(tools[0].run(assistant_id="123"))
expected_url = f"http://localhost:8080/{self.dynamic_schema_info.chat_session_id}/test/{self.dynamic_schema_info.message_id}/assistant/123"
expected_headers = {
"Authorization": "Bearer token123",
"Custom-Header": "CustomValue",
}
mock_request.assert_called_once_with(
"GET", expected_url, json=None, headers=expected_headers
)
@patch("danswer.tools.custom.custom_tool.requests.request")
def test_custom_tool_with_empty_headers(self, mock_request):
"""
Test the custom tool with an empty list of custom headers.
Verifies that the tool correctly handles an empty list of headers.
"""
custom_headers = []
tools = build_custom_tools_from_openapi_schema_and_headers(
self.openapi_schema,
custom_headers=custom_headers,
dynamic_schema_info=self.dynamic_schema_info,
)
list(tools[0].run(assistant_id="123"))
expected_url = f"http://localhost:8080/{self.dynamic_schema_info.chat_session_id}/test/{self.dynamic_schema_info.message_id}/assistant/123"
mock_request.assert_called_once_with("GET", expected_url, json=None, headers={})
def test_invalid_openapi_schema(self):
"""
Test that an invalid OpenAPI schema raises a ValueError.
"""
invalid_schema = {
"openapi": "3.0.0",
"info": {
"version": "1.0.0",
"title": "Invalid API",
},
# Missing required 'paths' key
}
with self.assertRaises(ValueError) as _:
validate_openapi_schema(invalid_schema)
def test_custom_tool_final_result(self):
"""
Test the final_result method of a custom tool.
Verifies that the method correctly extracts and returns the tool result.
"""
tools = build_custom_tools_from_openapi_schema_and_headers(
self.openapi_schema, dynamic_schema_info=self.dynamic_schema_info
)
mock_response = ToolResponse(
id=CUSTOM_TOOL_RESPONSE_ID,
response=CustomToolCallSummary(
tool_name="getAssistant",
tool_result={"id": "789", "name": "Final Assistant"},
),
)
final_result = tools[0].final_result(mock_response)
self.assertEqual(
final_result,
{"id": "789", "name": "Final Assistant"},
"Final result does not match expected output",
)
if __name__ == "__main__":
pytest.main([__file__])

View File

@@ -293,7 +293,7 @@ services:
- POSTGRES_USER=${POSTGRES_USER:-postgres}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-password}
ports:
- "5433:5432"
- "5432:5432"
volumes:
- db_volume:/var/lib/postgresql/data

View File

@@ -303,7 +303,7 @@ services:
- POSTGRES_USER=${POSTGRES_USER:-postgres}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-password}
ports:
- "5433:5432"
- "5432:5432"
volumes:
- db_volume:/var/lib/postgresql/data

View File

@@ -154,7 +154,7 @@ services:
- POSTGRES_USER=${POSTGRES_USER:-postgres}
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD:-password}
ports:
- "5433"
- "5432"
volumes:
- db_volume:/var/lib/postgresql/data

View File

@@ -150,18 +150,21 @@ export function LLMProviderUpdateForm({
}
}
const response = await fetch(LLM_PROVIDERS_ADMIN_URL, {
method: "PUT",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
provider: llmProviderDescriptor.name,
...values,
fast_default_model_name:
values.fast_default_model_name || values.default_model_name,
}),
});
const response = await fetch(
`${LLM_PROVIDERS_ADMIN_URL}${existingLlmProvider ? "" : "?is_creation=true"}`,
{
method: "PUT",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
provider: llmProviderDescriptor.name,
...values,
fast_default_model_name:
values.fast_default_model_name || values.default_model_name,
}),
}
);
if (!response.ok) {
const errorMsg = (await response.json()).detail;

View File

@@ -24,50 +24,21 @@ export function _CompletedWelcomeFlowDummyComponent() {
return null;
}
function UsageTypeSection({
title,
description,
callToAction,
onClick,
}: {
title: string;
description: string | JSX.Element;
callToAction: string;
onClick: () => void;
}) {
return (
<div>
<Text className="font-bold">{title}</Text>
<div className="text-base mt-1 mb-3">{description}</div>
<div
onClick={(e) => {
e.preventDefault();
onClick();
}}
>
<div className="text-link font-medium cursor-pointer select-none">
{callToAction}
</div>
</div>
</div>
);
}
export function _WelcomeModal({ user }: { user: User | null }) {
const router = useRouter();
const [selectedFlow, setSelectedFlow] = useState<null | "search" | "chat">(
null
);
const [isHidden, setIsHidden] = useState(false);
const [canBegin, setCanBegin] = useState(false);
const [apiKeyVerified, setApiKeyVerified] = useState<boolean>(false);
const [providerOptions, setProviderOptions] = useState<
WellKnownLLMProviderDescriptor[]
>([]);
const { refreshProviderInfo } = useProviderStatus();
const clientSetWelcomeFlowComplete = async () => {
setWelcomeFlowComplete();
refreshProviderInfo();
router.refresh();
};
useEffect(() => {
@@ -81,193 +52,35 @@ export function _WelcomeModal({ user }: { user: User | null }) {
fetchProviderInfo();
}, []);
if (isHidden) {
return null;
}
let title;
let body;
switch (selectedFlow) {
case "search":
title = undefined;
body = (
<div className="max-h-[85vh] overflow-y-auto px-4 pb-4">
<BackButton behaviorOverride={() => setSelectedFlow(null)} />
<div className="mt-3">
<Text className="font-bold flex">
{apiKeyVerified && (
<FiCheckCircle className="my-auto mr-2 text-success" />
)}
Step 1: Setup an LLM
</Text>
<div>
{apiKeyVerified ? (
<Text className="mt-2">
LLM setup complete!
<br /> <br />
If you want to change the key later, you&apos;ll be able to
easily to do so in the Admin Panel.
</Text>
) : (
<ApiKeyForm
onSuccess={() => setApiKeyVerified(true)}
providerOptions={providerOptions}
/>
)}
</div>
<Text className="font-bold mt-6 mb-2">
Step 2: Connect Data Sources
</Text>
<div>
<Text>
Connectors are the way that Danswer gets data from your
organization&apos;s various data sources. Once setup, we&apos;ll
automatically sync data from your apps and docs into Danswer, so
you can search through all of them in one place.
</Text>
<div className="flex mt-3">
<Link
href="/admin/add-connector"
onClick={(e) => {
e.preventDefault();
clientSetWelcomeFlowComplete();
router.push("/admin/add-connector");
}}
className="w-fit mx-auto"
>
<Button size="xs" icon={FiShare2} disabled={!apiKeyVerified}>
Setup your first connector!
</Button>
</Link>
</div>
</div>
</div>
</div>
);
break;
case "chat":
title = undefined;
body = (
<div className="mt-3 max-h-[85vh] overflow-y-auto px-4 pb-4">
<BackButton behaviorOverride={() => setSelectedFlow(null)} />
<div className="mt-3">
<Text className="font-bold flex">
{apiKeyVerified && (
<FiCheckCircle className="my-auto mr-2 text-success" />
)}
Step 1: Setup an LLM
</Text>
<div>
{apiKeyVerified ? (
<Text className="mt-2">
LLM setup complete!
<br /> <br />
If you want to change the key later or choose a different LLM,
you&apos;ll be able to easily to do so in the Admin Panel.
</Text>
) : (
<div>
<ApiKeyForm
onSuccess={() => setApiKeyVerified(true)}
providerOptions={providerOptions}
/>
</div>
)}
</div>
<Text className="font-bold mt-6 mb-2 flex">
Step 2: Start Chatting!
</Text>
<Text>
Click the button below to start chatting with the LLM setup above!
Don&apos;t worry, if you do decide later on you want to connect
your organization&apos;s knowledge, you can always do that in the{" "}
<Link
className="text-link"
href="/admin/add-connector"
onClick={(e) => {
e.preventDefault();
router.push("/admin/add-connector");
}}
>
Admin Panel
</Link>
.
</Text>
<div className="flex mt-3">
<Link
href="/chat"
onClick={(e) => {
e.preventDefault();
clientSetWelcomeFlowComplete();
router.push("/chat");
setIsHidden(true);
}}
className="w-fit mx-auto"
>
<Button size="xs" icon={FiShare2} disabled={!apiKeyVerified}>
Start chatting!
</Button>
</Link>
</div>
</div>
</div>
);
break;
default:
title = "🎉 Welcome to Danswer";
body = (
<>
<div>
<Text>How are you planning on using Danswer?</Text>
</div>
<Divider />
<UsageTypeSection
title="Search / Chat with Knowledge"
description={
<Text>
If you&apos;re looking to search through, chat with, or ask
direct questions of your organization&apos;s knowledge, then
this is the option for you!
</Text>
}
callToAction="Get Started"
onClick={() => setSelectedFlow("search")}
/>
<Divider />
<UsageTypeSection
title="Secure ChatGPT"
description={
<Text>
If you&apos;re looking for a pure ChatGPT-like experience, then
this is the option for you!
</Text>
}
callToAction="Get Started"
onClick={() => {
setSelectedFlow("chat");
}}
/>
{/* TODO: add a Slack option here */}
{/* <Divider />
<UsageTypeSection
title="AI-powered Slack Assistant"
description="If you're looking to setup a bot to auto-answer questions in Slack"
callToAction="Connect your company knowledge!"
link="/admin/add-connector"
/> */}
</>
);
}
return (
<Modal title={title} className="max-w-4xl">
<div className="text-base">{body}</div>
<Modal title={"Welcome to Danswer!"} width="w-full max-w-3xl">
<div>
<Text className="mb-4">
Danswer brings all your company&apos;s knowledge to your fingertips,
ready to be accessed instantly.
</Text>
<Text className="mb-4">
To get started, we need to set up an API key for the Language Model
(LLM) provider. This key allows Danswer to interact with the AI model,
enabling intelligent responses to your queries.
</Text>
<div className="max-h-[900px] overflow-y-scroll">
<ApiKeyForm
hidePopup
onSuccess={() => {
router.refresh();
refreshProviderInfo();
setCanBegin(true);
}}
providerOptions={providerOptions}
/>
</div>
<Divider />
<Button disabled={!canBegin} onClick={clientSetWelcomeFlowComplete}>
Get Started
</Button>
</div>
</Modal>
);
}

View File

@@ -8,9 +8,11 @@ import { CustomLLMProviderUpdateForm } from "@/app/admin/configuration/llm/Custo
export const ApiKeyForm = ({
onSuccess,
providerOptions,
hidePopup,
}: {
onSuccess: () => void;
providerOptions: WellKnownLLMProviderDescriptor[];
hidePopup?: boolean;
}) => {
const [popup, setPopup] = useState<{
message: string;
@@ -33,7 +35,10 @@ export const ApiKeyForm = ({
return (
<div>
{popup && <Popup message={popup.message} type={popup.type} />}
{!hidePopup && popup && (
<Popup message={popup.message} type={popup.type} />
)}
<TabGroup
index={providerNameToIndexMap.get(providerName) || 0}
onIndexChange={(index) =>

View File

@@ -21,10 +21,10 @@ export const ApiKeyModal = ({ hide }: { hide: () => void }) => {
return (
<Modal
title="Set an API Key!"
className="max-w-3xl"
width="max-w-3xl w-full"
onOutsideClick={() => hide()}
>
<div className="max-h-[75vh] overflow-y-auto flex flex-col px-4">
<div className="max-h-[75vh] overflow-y-auto flex flex-col">
<div>
<div className="mb-5 text-sm">
Please provide an API Key below in order to start using

View File

@@ -230,6 +230,8 @@ export const useUserGroups = (): {
const MODEL_DISPLAY_NAMES: { [key: string]: string } = {
// OpenAI models
"o1-mini": "O1 Mini",
"o1-preview": "O1 Preview",
"gpt-4": "GPT 4",
"gpt-4o": "GPT 4o",
"gpt-4o-2024-08-06": "GPT 4o (Structured Outputs)",
@@ -292,7 +294,7 @@ export function getDisplayNameForModel(modelName: string): string {
}
export const defaultModelsByProvider: { [name: string]: string[] } = {
openai: ["gpt-4", "gpt-4o", "gpt-4o-mini"],
openai: ["gpt-4", "gpt-4o", "gpt-4o-mini", "o1-mini", "o1-preview"],
bedrock: [
"meta.llama3-1-70b-instruct-v1:0",
"meta.llama3-1-8b-instruct-v1:0",