Compare commits

...

3 Commits
oops ... v1.8.3

Author SHA1 Message Date
Chris Weaver
5752a563c0 fix: image gen tool causing error (#5445) 2025-09-18 11:08:29 -07:00
edwin-onyx
1df04ca40d fix(infra): remove setfit dependency from api server (#5449) 2025-09-18 06:49:06 +00:00
Chris Weaver
4ae6297761 fix: non-image gen models (#5381) 2025-09-15 19:01:08 -07:00
6 changed files with 138 additions and 5 deletions

View File

@@ -21,6 +21,10 @@ env:
CONFLUENCE_USER_NAME: ${{ secrets.CONFLUENCE_USER_NAME }}
CONFLUENCE_ACCESS_TOKEN: ${{ secrets.CONFLUENCE_ACCESS_TOKEN }}
# LLMs
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
jobs:
discover-test-dirs:
runs-on: ubuntu-latest

View File

@@ -31,12 +31,14 @@ jobs:
cache-dependency-path: |
backend/requirements/default.txt
backend/requirements/dev.txt
backend/requirements/model_server.txt
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
pip install --retries 5 --timeout 30 -r backend/requirements/default.txt
pip install --retries 5 --timeout 30 -r backend/requirements/dev.txt
pip install --retries 5 --timeout 30 -r backend/requirements/model_server.txt
- name: Run Tests
shell: script -q -e -c "bash --noprofile --norc -eo pipefail {0}"

View File

@@ -193,7 +193,9 @@ def construct_tools(
custom_tool_config: CustomToolConfig | None = None,
allowed_tool_ids: list[int] | None = None,
) -> dict[int, list[Tool]]:
"""Constructs tools based on persona configuration and available APIs"""
"""Constructs tools based on persona configuration and available APIs.
Will simply skip tools that are not allowed/available."""
tool_dict: dict[int, list[Tool]] = {}
mcp_tool_cache: dict[int, dict[int, MCPTool]] = {}
@@ -212,6 +214,21 @@ def construct_tools(
db_tool_model.in_code_tool_id, db_session
)
try:
tool_is_available = tool_cls.is_available(db_session)
except Exception:
logger.exception(
"Failed checking availability for tool %s", tool_cls.__name__
)
tool_is_available = False
if not tool_is_available:
logger.debug(
"Skipping tool %s because it is not available",
tool_cls.__name__,
)
continue
# Handle Search Tool
if (
tool_cls.__name__ == SearchTool.__name__

View File

@@ -76,7 +76,6 @@ requests==2.32.5
requests-oauthlib==1.3.1
retry==0.9.2 # This pulls in py which is in CVE-2022-42969, must remove py from image
rfc3986==1.5.0
setfit==1.1.1
simple-salesforce==1.12.6
slack-sdk==3.20.2
SQLAlchemy[mypy]==2.0.15

View File

@@ -0,0 +1,112 @@
from __future__ import annotations
import os
from uuid import uuid4
from sqlalchemy.orm import Session
from onyx.chat.models import AnswerStreamPart
from onyx.chat.models import MessageResponseIDInfo
from onyx.chat.models import StreamingError
from onyx.chat.process_message import stream_chat_message_objects
from onyx.context.search.models import RetrievalDetails
from onyx.db.chat import create_chat_session
from onyx.db.llm import fetch_existing_llm_providers
from onyx.db.llm import remove_llm_provider
from onyx.db.llm import update_default_provider
from onyx.db.llm import upsert_llm_provider
from onyx.server.manage.llm.models import LLMProviderUpsertRequest
from onyx.server.manage.llm.models import ModelConfigurationUpsertRequest
from onyx.server.query_and_chat.models import CreateChatMessageRequest
from onyx.server.query_and_chat.streaming_models import MessageDelta
from onyx.server.query_and_chat.streaming_models import MessageStart
from onyx.server.query_and_chat.streaming_models import Packet
from tests.external_dependency_unit.conftest import create_test_user
def test_answer_with_only_anthropic_provider(
db_session: Session,
full_deployment_setup: None,
mock_external_deps: None,
) -> None:
"""Ensure chat still streams answers when only an Anthropic provider is configured."""
anthropic_api_key = os.environ.get("ANTHROPIC_API_KEY")
assert anthropic_api_key, "ANTHROPIC_API_KEY environment variable must be set"
# Drop any existing providers so that only Anthropic is available.
for provider in fetch_existing_llm_providers(db_session):
remove_llm_provider(db_session, provider.id)
anthropic_model = "claude-3-5-sonnet-20240620"
provider_name = f"anthropic-test-{uuid4().hex}"
anthropic_provider = upsert_llm_provider(
LLMProviderUpsertRequest(
name=provider_name,
provider="anthropic",
api_key=anthropic_api_key,
default_model_name=anthropic_model,
fast_default_model_name=anthropic_model,
is_public=True,
groups=[],
model_configurations=[
ModelConfigurationUpsertRequest(name=anthropic_model, is_visible=True)
],
api_key_changed=True,
),
db_session=db_session,
)
try:
update_default_provider(anthropic_provider.id, db_session)
test_user = create_test_user(db_session, email_prefix="anthropic_only")
chat_session = create_chat_session(
db_session=db_session,
description="Anthropic only chat",
user_id=test_user.id,
persona_id=0,
)
chat_request = CreateChatMessageRequest(
chat_session_id=chat_session.id,
parent_message_id=None,
message="hello",
file_descriptors=[],
search_doc_ids=None,
retrieval_options=RetrievalDetails(),
)
response_stream: list[AnswerStreamPart] = []
for packet in stream_chat_message_objects(
new_msg_req=chat_request,
user=test_user,
db_session=db_session,
):
response_stream.append(packet)
assert response_stream, "Should receive streamed packets"
assert not any(
isinstance(packet, StreamingError) for packet in response_stream
), "No streaming errors expected with Anthropic provider"
has_message_id = any(
isinstance(packet, MessageResponseIDInfo) for packet in response_stream
)
assert has_message_id, "Should include reserved assistant message ID"
has_message_start = any(
isinstance(packet, Packet) and isinstance(packet.obj, MessageStart)
for packet in response_stream
)
assert has_message_start, "Stream should have a MessageStart packet"
has_message_delta = any(
isinstance(packet, Packet) and isinstance(packet.obj, MessageDelta)
for packet in response_stream
)
assert has_message_delta, "Stream should have a MessageDelta packet"
finally:
remove_llm_provider(db_session, anthropic_provider.id)

View File

@@ -22,7 +22,7 @@ import {
SendIcon,
StopGeneratingIcon,
} from "@/components/icons/icons";
import { OnyxDocument, SourceMetadata } from "@/lib/search/interfaces";
import { OnyxDocument } from "@/lib/search/interfaces";
import { ChatState } from "@/app/chat/interfaces";
import { useAssistantsContext } from "@/components/context/AssistantsContext";
import { CalendarIcon, TagIcon, XIcon, FolderIcon } from "lucide-react";
@@ -37,7 +37,6 @@ import { UnconfiguredLlmProviderText } from "@/components/chat/UnconfiguredLlmPr
import { DeepResearchToggle } from "./DeepResearchToggle";
import { ActionToggle } from "./ActionManagement";
import { SelectedTool } from "./SelectedTool";
import { getProviderIcon } from "@/app/admin/configuration/llm/utils";
const MAX_INPUT_HEIGHT = 200;
@@ -661,7 +660,7 @@ export const ChatInputBar = React.memo(function ChatInputBar({
<LLMPopover
llmProviders={llmProviders}
llmManager={llmManager}
requiresImageGeneration={true}
requiresImageGeneration={false}
currentAssistant={selectedAssistant}
/>