mirror of
https://github.com/onyx-dot-app/onyx.git
synced 2026-02-22 02:05:46 +00:00
Compare commits
45 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
857bd07dff | ||
|
|
21471566d6 | ||
|
|
4d2ab5be85 | ||
|
|
129503b86f | ||
|
|
3862df6691 | ||
|
|
86ae7c55fb | ||
|
|
2405eb48ca | ||
|
|
6ebd4e224f | ||
|
|
afc8075cc3 | ||
|
|
71123f54a7 | ||
|
|
6061adb114 | ||
|
|
35300f6569 | ||
|
|
fe49e35ca4 | ||
|
|
804887fd31 | ||
|
|
fe1400aa36 | ||
|
|
5eddc89b5a | ||
|
|
9a492ceb6d | ||
|
|
3c54ae9de9 | ||
|
|
13f08f3ebb | ||
|
|
bd9f15854f | ||
|
|
366aa2a8ea | ||
|
|
deee237c7e | ||
|
|
100b4a0d16 | ||
|
|
70207b4b39 | ||
|
|
50826b6bef | ||
|
|
3f648cbc31 | ||
|
|
c875a4774f | ||
|
|
049091eb01 | ||
|
|
3dac24542b | ||
|
|
194dcb593d | ||
|
|
bf291d0c0a | ||
|
|
8309f4a802 | ||
|
|
0ff2565125 | ||
|
|
e89dcd7f84 | ||
|
|
645e7e828e | ||
|
|
2a54f14195 | ||
|
|
9209fc804b | ||
|
|
b712877701 | ||
|
|
e6df32dcc3 | ||
|
|
eb81258a23 | ||
|
|
487ef4acc0 | ||
|
|
9b7cc83eae | ||
|
|
ce3124f9e4 | ||
|
|
e69303e309 | ||
|
|
6e698ac84a |
@@ -65,6 +65,7 @@ jobs:
|
||||
NEXT_PUBLIC_POSTHOG_KEY=${{ secrets.POSTHOG_KEY }}
|
||||
NEXT_PUBLIC_POSTHOG_HOST=${{ secrets.POSTHOG_HOST }}
|
||||
NEXT_PUBLIC_SENTRY_DSN=${{ secrets.SENTRY_DSN }}
|
||||
NEXT_PUBLIC_GTM_ENABLED=true
|
||||
# needed due to weird interactions with the builds for different platforms
|
||||
no-cache: true
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
|
||||
14
.github/workflows/pr-chromatic-tests.yml
vendored
14
.github/workflows/pr-chromatic-tests.yml
vendored
@@ -3,12 +3,7 @@ concurrency:
|
||||
group: Run-Chromatic-Tests-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
on:
|
||||
merge_group:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
- 'release/**'
|
||||
on: push
|
||||
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
@@ -16,6 +11,8 @@ env:
|
||||
|
||||
jobs:
|
||||
playwright-tests:
|
||||
name: Playwright Tests
|
||||
|
||||
# See https://runs-on.com/runners/linux/
|
||||
runs-on: [runs-on,runner=8cpu-linux-x64,ram=16,"run-id=${{ github.run_id }}"]
|
||||
steps:
|
||||
@@ -108,7 +105,7 @@ jobs:
|
||||
cache-from: type=s3,prefix=cache/${{ github.repository }}/integration-tests/model-server/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
|
||||
cache-to: type=s3,prefix=cache/${{ github.repository }}/integration-tests/model-server/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
|
||||
|
||||
- name: Start Docker containers
|
||||
- name: Start Docker containers
|
||||
run: |
|
||||
cd deployment/docker_compose
|
||||
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true \
|
||||
@@ -193,7 +190,8 @@ jobs:
|
||||
docker compose -f docker-compose.dev.yml -p danswer-stack down -v
|
||||
|
||||
chromatic-tests:
|
||||
name: Run Chromatic
|
||||
name: Chromatic Tests
|
||||
|
||||
needs: playwright-tests
|
||||
runs-on: [runs-on,runner=8cpu-linux-x64,ram=16,"run-id=${{ github.run_id }}"]
|
||||
steps:
|
||||
|
||||
@@ -13,7 +13,10 @@ on:
|
||||
env:
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
|
||||
|
||||
CONFLUENCE_TEST_SPACE_URL: ${{ secrets.CONFLUENCE_TEST_SPACE_URL }}
|
||||
CONFLUENCE_USER_NAME: ${{ secrets.CONFLUENCE_USER_NAME }}
|
||||
CONFLUENCE_ACCESS_TOKEN: ${{ secrets.CONFLUENCE_ACCESS_TOKEN }}
|
||||
|
||||
jobs:
|
||||
integration-tests:
|
||||
# See https://runs-on.com/runners/linux/
|
||||
@@ -195,6 +198,9 @@ jobs:
|
||||
-e API_SERVER_HOST=api_server \
|
||||
-e OPENAI_API_KEY=${OPENAI_API_KEY} \
|
||||
-e SLACK_BOT_TOKEN=${SLACK_BOT_TOKEN} \
|
||||
-e CONFLUENCE_TEST_SPACE_URL=${CONFLUENCE_TEST_SPACE_URL} \
|
||||
-e CONFLUENCE_USER_NAME=${CONFLUENCE_USER_NAME} \
|
||||
-e CONFLUENCE_ACCESS_TOKEN=${CONFLUENCE_ACCESS_TOKEN} \
|
||||
-e TEST_WEB_HOSTNAME=test-runner \
|
||||
danswer/danswer-integration:test \
|
||||
/app/tests/integration/tests \
|
||||
@@ -12,7 +12,7 @@
|
||||
<a href="https://docs.danswer.dev/" target="_blank">
|
||||
<img src="https://img.shields.io/badge/docs-view-blue" alt="Documentation">
|
||||
</a>
|
||||
<a href="https://join.slack.com/t/danswer/shared_invite/zt-2lcmqw703-071hBuZBfNEOGUsLa5PXvQ" target="_blank">
|
||||
<a href="https://join.slack.com/t/danswer/shared_invite/zt-2twesxdr6-5iQitKZQpgq~hYIZ~dv3KA" target="_blank">
|
||||
<img src="https://img.shields.io/badge/slack-join-blue.svg?logo=slack" alt="Slack">
|
||||
</a>
|
||||
<a href="https://discord.gg/TDJ59cGV2X" target="_blank">
|
||||
@@ -135,7 +135,7 @@ Looking to contribute? Please check out the [Contribution Guide](CONTRIBUTING.md
|
||||
|
||||
## ✨Contributors
|
||||
|
||||
<a href="https://github.com/aryn-ai/sycamore/graphs/contributors">
|
||||
<a href="https://github.com/danswer-ai/danswer/graphs/contributors">
|
||||
<img alt="contributors" src="https://contrib.rocks/image?repo=danswer-ai/danswer"/>
|
||||
</a>
|
||||
|
||||
|
||||
@@ -0,0 +1,59 @@
|
||||
"""display custom llm models
|
||||
|
||||
Revision ID: 177de57c21c9
|
||||
Revises: 4ee1287bd26a
|
||||
Create Date: 2024-11-21 11:49:04.488677
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy import and_
|
||||
|
||||
revision = "177de57c21c9"
|
||||
down_revision = "4ee1287bd26a"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
conn = op.get_bind()
|
||||
llm_provider = sa.table(
|
||||
"llm_provider",
|
||||
sa.column("id", sa.Integer),
|
||||
sa.column("provider", sa.String),
|
||||
sa.column("model_names", postgresql.ARRAY(sa.String)),
|
||||
sa.column("display_model_names", postgresql.ARRAY(sa.String)),
|
||||
)
|
||||
|
||||
excluded_providers = ["openai", "bedrock", "anthropic", "azure"]
|
||||
|
||||
providers_to_update = sa.select(
|
||||
llm_provider.c.id,
|
||||
llm_provider.c.model_names,
|
||||
llm_provider.c.display_model_names,
|
||||
).where(
|
||||
and_(
|
||||
~llm_provider.c.provider.in_(excluded_providers),
|
||||
llm_provider.c.model_names.isnot(None),
|
||||
)
|
||||
)
|
||||
|
||||
results = conn.execute(providers_to_update).fetchall()
|
||||
|
||||
for provider_id, model_names, display_model_names in results:
|
||||
if display_model_names is None:
|
||||
display_model_names = []
|
||||
|
||||
combined_model_names = list(set(display_model_names + model_names))
|
||||
update_stmt = (
|
||||
llm_provider.update()
|
||||
.where(llm_provider.c.id == provider_id)
|
||||
.values(display_model_names=combined_model_names)
|
||||
)
|
||||
conn.execute(update_stmt)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
pass
|
||||
@@ -0,0 +1,280 @@
|
||||
"""add_multiple_slack_bot_support
|
||||
|
||||
Revision ID: 4ee1287bd26a
|
||||
Revises: 47e5bef3a1d7
|
||||
Create Date: 2024-11-06 13:15:53.302644
|
||||
|
||||
"""
|
||||
import logging
|
||||
from typing import cast
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.orm import Session
|
||||
from danswer.key_value_store.factory import get_kv_store
|
||||
from danswer.db.models import SlackBot
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "4ee1287bd26a"
|
||||
down_revision = "47e5bef3a1d7"
|
||||
branch_labels: None = None
|
||||
depends_on: None = None
|
||||
|
||||
# Configure logging
|
||||
logger = logging.getLogger("alembic.runtime.migration")
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
logger.info(f"{revision}: create_table: slack_bot")
|
||||
# Create new slack_bot table
|
||||
op.create_table(
|
||||
"slack_bot",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("name", sa.String(), nullable=False),
|
||||
sa.Column("enabled", sa.Boolean(), nullable=False, server_default="true"),
|
||||
sa.Column("bot_token", sa.LargeBinary(), nullable=False),
|
||||
sa.Column("app_token", sa.LargeBinary(), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("bot_token"),
|
||||
sa.UniqueConstraint("app_token"),
|
||||
)
|
||||
|
||||
# # Create new slack_channel_config table
|
||||
op.create_table(
|
||||
"slack_channel_config",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("slack_bot_id", sa.Integer(), nullable=True),
|
||||
sa.Column("persona_id", sa.Integer(), nullable=True),
|
||||
sa.Column("channel_config", postgresql.JSONB(), nullable=False),
|
||||
sa.Column("response_type", sa.String(), nullable=False),
|
||||
sa.Column(
|
||||
"enable_auto_filters", sa.Boolean(), nullable=False, server_default="false"
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["slack_bot_id"],
|
||||
["slack_bot.id"],
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
["persona_id"],
|
||||
["persona.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
|
||||
# Handle existing Slack bot tokens first
|
||||
logger.info(f"{revision}: Checking for existing Slack bot.")
|
||||
bot_token = None
|
||||
app_token = None
|
||||
first_row_id = None
|
||||
|
||||
try:
|
||||
tokens = cast(dict, get_kv_store().load("slack_bot_tokens_config_key"))
|
||||
except Exception:
|
||||
logger.warning("No existing Slack bot tokens found.")
|
||||
tokens = {}
|
||||
|
||||
bot_token = tokens.get("bot_token")
|
||||
app_token = tokens.get("app_token")
|
||||
|
||||
if bot_token and app_token:
|
||||
logger.info(f"{revision}: Found bot and app tokens.")
|
||||
|
||||
session = Session(bind=op.get_bind())
|
||||
new_slack_bot = SlackBot(
|
||||
name="Slack Bot (Migrated)",
|
||||
enabled=True,
|
||||
bot_token=bot_token,
|
||||
app_token=app_token,
|
||||
)
|
||||
session.add(new_slack_bot)
|
||||
session.commit()
|
||||
first_row_id = new_slack_bot.id
|
||||
|
||||
# Create a default bot if none exists
|
||||
# This is in case there are no slack tokens but there are channels configured
|
||||
op.execute(
|
||||
sa.text(
|
||||
"""
|
||||
INSERT INTO slack_bot (name, enabled, bot_token, app_token)
|
||||
SELECT 'Default Bot', true, '', ''
|
||||
WHERE NOT EXISTS (SELECT 1 FROM slack_bot)
|
||||
RETURNING id;
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
# Get the bot ID to use (either from existing migration or newly created)
|
||||
bot_id_query = sa.text(
|
||||
"""
|
||||
SELECT COALESCE(
|
||||
:first_row_id,
|
||||
(SELECT id FROM slack_bot ORDER BY id ASC LIMIT 1)
|
||||
) as bot_id;
|
||||
"""
|
||||
)
|
||||
result = op.get_bind().execute(bot_id_query, {"first_row_id": first_row_id})
|
||||
bot_id = result.scalar()
|
||||
|
||||
# CTE (Common Table Expression) that transforms the old slack_bot_config table data
|
||||
# This splits up the channel_names into their own rows
|
||||
channel_names_cte = """
|
||||
WITH channel_names AS (
|
||||
SELECT
|
||||
sbc.id as config_id,
|
||||
sbc.persona_id,
|
||||
sbc.response_type,
|
||||
sbc.enable_auto_filters,
|
||||
jsonb_array_elements_text(sbc.channel_config->'channel_names') as channel_name,
|
||||
sbc.channel_config->>'respond_tag_only' as respond_tag_only,
|
||||
sbc.channel_config->>'respond_to_bots' as respond_to_bots,
|
||||
sbc.channel_config->'respond_member_group_list' as respond_member_group_list,
|
||||
sbc.channel_config->'answer_filters' as answer_filters,
|
||||
sbc.channel_config->'follow_up_tags' as follow_up_tags
|
||||
FROM slack_bot_config sbc
|
||||
)
|
||||
"""
|
||||
|
||||
# Insert the channel names into the new slack_channel_config table
|
||||
insert_statement = """
|
||||
INSERT INTO slack_channel_config (
|
||||
slack_bot_id,
|
||||
persona_id,
|
||||
channel_config,
|
||||
response_type,
|
||||
enable_auto_filters
|
||||
)
|
||||
SELECT
|
||||
:bot_id,
|
||||
channel_name.persona_id,
|
||||
jsonb_build_object(
|
||||
'channel_name', channel_name.channel_name,
|
||||
'respond_tag_only',
|
||||
COALESCE((channel_name.respond_tag_only)::boolean, false),
|
||||
'respond_to_bots',
|
||||
COALESCE((channel_name.respond_to_bots)::boolean, false),
|
||||
'respond_member_group_list',
|
||||
COALESCE(channel_name.respond_member_group_list, '[]'::jsonb),
|
||||
'answer_filters',
|
||||
COALESCE(channel_name.answer_filters, '[]'::jsonb),
|
||||
'follow_up_tags',
|
||||
COALESCE(channel_name.follow_up_tags, '[]'::jsonb)
|
||||
),
|
||||
channel_name.response_type,
|
||||
channel_name.enable_auto_filters
|
||||
FROM channel_names channel_name;
|
||||
"""
|
||||
|
||||
op.execute(sa.text(channel_names_cte + insert_statement).bindparams(bot_id=bot_id))
|
||||
|
||||
# Clean up old tokens if they existed
|
||||
try:
|
||||
if bot_token and app_token:
|
||||
logger.info(f"{revision}: Removing old bot and app tokens.")
|
||||
get_kv_store().delete("slack_bot_tokens_config_key")
|
||||
except Exception:
|
||||
logger.warning("tried to delete tokens in dynamic config but failed")
|
||||
# Rename the table
|
||||
op.rename_table(
|
||||
"slack_bot_config__standard_answer_category",
|
||||
"slack_channel_config__standard_answer_category",
|
||||
)
|
||||
|
||||
# Rename the column
|
||||
op.alter_column(
|
||||
"slack_channel_config__standard_answer_category",
|
||||
"slack_bot_config_id",
|
||||
new_column_name="slack_channel_config_id",
|
||||
)
|
||||
|
||||
# Drop the table with CASCADE to handle dependent objects
|
||||
op.execute("DROP TABLE slack_bot_config CASCADE")
|
||||
|
||||
logger.info(f"{revision}: Migration complete.")
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Recreate the old slack_bot_config table
|
||||
op.create_table(
|
||||
"slack_bot_config",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("persona_id", sa.Integer(), nullable=True),
|
||||
sa.Column("channel_config", postgresql.JSONB(), nullable=False),
|
||||
sa.Column("response_type", sa.String(), nullable=False),
|
||||
sa.Column("enable_auto_filters", sa.Boolean(), nullable=False),
|
||||
sa.ForeignKeyConstraint(
|
||||
["persona_id"],
|
||||
["persona.id"],
|
||||
),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
|
||||
# Migrate data back to the old format
|
||||
# Group by persona_id to combine channel names back into arrays
|
||||
op.execute(
|
||||
sa.text(
|
||||
"""
|
||||
INSERT INTO slack_bot_config (
|
||||
persona_id,
|
||||
channel_config,
|
||||
response_type,
|
||||
enable_auto_filters
|
||||
)
|
||||
SELECT DISTINCT ON (persona_id)
|
||||
persona_id,
|
||||
jsonb_build_object(
|
||||
'channel_names', (
|
||||
SELECT jsonb_agg(c.channel_config->>'channel_name')
|
||||
FROM slack_channel_config c
|
||||
WHERE c.persona_id = scc.persona_id
|
||||
),
|
||||
'respond_tag_only', (channel_config->>'respond_tag_only')::boolean,
|
||||
'respond_to_bots', (channel_config->>'respond_to_bots')::boolean,
|
||||
'respond_member_group_list', channel_config->'respond_member_group_list',
|
||||
'answer_filters', channel_config->'answer_filters',
|
||||
'follow_up_tags', channel_config->'follow_up_tags'
|
||||
),
|
||||
response_type,
|
||||
enable_auto_filters
|
||||
FROM slack_channel_config scc
|
||||
WHERE persona_id IS NOT NULL;
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
# Rename the table back
|
||||
op.rename_table(
|
||||
"slack_channel_config__standard_answer_category",
|
||||
"slack_bot_config__standard_answer_category",
|
||||
)
|
||||
|
||||
# Rename the column back
|
||||
op.alter_column(
|
||||
"slack_bot_config__standard_answer_category",
|
||||
"slack_channel_config_id",
|
||||
new_column_name="slack_bot_config_id",
|
||||
)
|
||||
|
||||
# Try to save the first bot's tokens back to KV store
|
||||
try:
|
||||
first_bot = (
|
||||
op.get_bind()
|
||||
.execute(
|
||||
sa.text(
|
||||
"SELECT bot_token, app_token FROM slack_bot ORDER BY id LIMIT 1"
|
||||
)
|
||||
)
|
||||
.first()
|
||||
)
|
||||
if first_bot and first_bot.bot_token and first_bot.app_token:
|
||||
tokens = {
|
||||
"bot_token": first_bot.bot_token,
|
||||
"app_token": first_bot.app_token,
|
||||
}
|
||||
get_kv_store().store("slack_bot_tokens_config_key", tokens)
|
||||
except Exception:
|
||||
logger.warning("Failed to save tokens back to KV store")
|
||||
|
||||
# Drop the new tables in reverse order
|
||||
op.drop_table("slack_channel_config")
|
||||
op.drop_table("slack_bot")
|
||||
@@ -7,6 +7,7 @@ Create Date: 2024-10-26 13:06:06.937969
|
||||
"""
|
||||
from alembic import op
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import text
|
||||
|
||||
# Import your models and constants
|
||||
from danswer.db.models import (
|
||||
@@ -15,7 +16,6 @@ from danswer.db.models import (
|
||||
Credential,
|
||||
IndexAttempt,
|
||||
)
|
||||
from danswer.configs.constants import DocumentSource
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
@@ -30,13 +30,11 @@ def upgrade() -> None:
|
||||
bind = op.get_bind()
|
||||
session = Session(bind=bind)
|
||||
|
||||
connectors_to_delete = (
|
||||
session.query(Connector)
|
||||
.filter(Connector.source == DocumentSource.REQUESTTRACKER)
|
||||
.all()
|
||||
# Get connectors using raw SQL
|
||||
result = bind.execute(
|
||||
text("SELECT id FROM connector WHERE source = 'requesttracker'")
|
||||
)
|
||||
|
||||
connector_ids = [connector.id for connector in connectors_to_delete]
|
||||
connector_ids = [row[0] for row in result]
|
||||
|
||||
if connector_ids:
|
||||
cc_pairs_to_delete = (
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""add creator to cc pair
|
||||
|
||||
Revision ID: 9cf5c00f72fe
|
||||
Revises: c0fd6e4da83a
|
||||
Revises: 26b931506ecb
|
||||
Create Date: 2024-11-12 15:16:42.682902
|
||||
|
||||
"""
|
||||
|
||||
@@ -0,0 +1,29 @@
|
||||
"""add auto scroll to user model
|
||||
|
||||
Revision ID: a8c2065484e6
|
||||
Revises: 177de57c21c9
|
||||
Create Date: 2024-11-22 17:34:09.690295
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "a8c2065484e6"
|
||||
down_revision = "177de57c21c9"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Add the auto_scroll column with a default value of True
|
||||
op.add_column(
|
||||
"user",
|
||||
sa.Column("auto_scroll", sa.Boolean(), nullable=True, server_default=None),
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# Remove the auto_scroll column
|
||||
op.drop_column("user", "auto_scroll")
|
||||
551
backend/branch_commits.csv
Normal file
551
backend/branch_commits.csv
Normal file
@@ -0,0 +1,551 @@
|
||||
Branch,Commit Hash,Author,Date,Subject
|
||||
DAN-108,548c081fd6515c2e8b912d145c135e292db4613e,pablodanswer,2024-11-20,k
|
||||
DAN-108,0d4abfdc85fdb62c347d0f649744f1b7c12e8011,pablodanswer,2024-11-20,folder clarity
|
||||
a,36eee45a03c3227a9b070e18a043e16fe5179cb9,pablodanswer,2024-11-21,llm provider causing re render in effect
|
||||
account_for_json,b37d0b91e6a6596af91e1fa32786591b76e05a67,pablodanswer,2024-11-14,fix single quote block in llm answer
|
||||
account_for_json,4e0c048acba88f4c83d7c83af52bb0932234ddad,pablodanswer,2024-11-14,nit
|
||||
account_for_json,a0371a6750476fccc3b9892a7c58d72182c92507,pablodanswer,2024-11-14,minor logic update
|
||||
account_for_json,4f1c4baa80f7b747633bb3d528aed6de5b11f639,pablodanswer,2024-11-14,minor cosmetic update
|
||||
account_for_json,b6ef7e713a4eca3d65aa411604e8f67ad5efdd87,pablodanswer,2024-11-14,k
|
||||
account_for_json,66df9b6f7dae8bce61e35615d715ddefc6406614,pablodanswer,2024-11-14,improved fallback logic
|
||||
account_for_json,0473888ccdb5219cc39f275652bfeb72a420b5d9,pablodanswer,2024-11-13,silence warning
|
||||
accurate_user_counting,06f3a4590c05665b04851b30860aa431ad4b7217,pablodanswer,2024-11-02,ensure we remove users in time
|
||||
accurate_user_counting,6e75ba007302ce9adc4469b86695aee4b4b5c513,pablodanswer,2024-11-02,validate
|
||||
accurate_user_counting,11f3729ebb9f67b8e568c01a9ce1d098560033cf,pablodanswer,2024-11-02,update register
|
||||
add_csv_display,e7b044cf38cd3e25fdbe17ea8fcac3e8c17d9570,pablodanswer,2024-11-03,nit
|
||||
add_csv_display,93ec944a01ec87d87a4bf2b85c1164b7625a1259,pablodanswer,2024-11-02,update requirements
|
||||
add_csv_display,00f8e431ff81d7980c8d2c166bdad5f899752379,pablodanswer,2024-11-02,create portal for modal
|
||||
add_csv_display,a019a812bef27a20bd2e94d558974c55ded63035,pablodanswer,2024-11-02,restructure
|
||||
add_csv_display,eabc519f062b5e0fec3b2c29e89f109606e747bc,pablodanswer,2024-11-01,add downloading
|
||||
add_csv_display,4dbd74cacb350ebbf5ce0554239f999503a14d8f,pablodanswer,2024-11-01,add CSV display
|
||||
add_tool_formats,e7361dcb17a1d205627e46c87861f5be4dc06a03,pablodanswer,2024-11-03,add multiple formats to tools
|
||||
add_tool_formats,00f8e431ff81d7980c8d2c166bdad5f899752379,pablodanswer,2024-11-02,create portal for modal
|
||||
add_tool_formats,a019a812bef27a20bd2e94d558974c55ded63035,pablodanswer,2024-11-02,restructure
|
||||
add_tool_formats,eabc519f062b5e0fec3b2c29e89f109606e747bc,pablodanswer,2024-11-01,add downloading
|
||||
add_tool_formats,4dbd74cacb350ebbf5ce0554239f999503a14d8f,pablodanswer,2024-11-01,add CSV display
|
||||
admin_wonkiness,8a7f032acb35fca9260f1f15e48a6114279a1dc0,pablodanswer,2024-11-20,valid props
|
||||
api_keys_are_not_users,39c3e3f84b56f2b1d661f723fe9650503d8602ad,pablodanswer,2024-11-01,typing
|
||||
api_keys_are_not_users,cab9c925cc09b636e026f36057795a775d6a8289,pablodanswer,2024-11-01,don't count api keys as users
|
||||
assistant_categories,425da2250c6cade36e9dfe4aa9eaca9f60ad7c1f,pablodanswer,2024-11-18,alembic (once again)
|
||||
assistant_categories,c079165c60d58d781bb399220f0041a57dd27cde,pablodanswer,2024-11-18,alembic
|
||||
assistant_categories,dc5f9e5aa2fbf1a502474bc56cbe9a5eaa34ed91,pablodanswer,2024-11-11,nit
|
||||
assistant_categories,7ed84cf536aa5be737f4eff25e244def9987cfb3,pablodanswer,2024-11-11,typing
|
||||
assistant_categories,30a58ad86d96f841103f9bf5ef92355ba7550e72,pablodanswer,2024-11-11,finalize
|
||||
assistant_categories,4c5d0a45fd07dffa42717c78f4b20025ca7c67ad,pablodanswer,2024-11-11,update typing
|
||||
assistant_categories,ed7c62b450dd1b42a8e399c8abcaac8ccb006b1d,pablodanswer,2024-11-11,minor update to tests
|
||||
assistant_categories,501c6afdd0a8e4c67ee8ae864392549a19f68b85,pablodanswer,2024-11-11,post rebase update
|
||||
assistant_categories,8cd7e50b26d8ac5d5311c1ffc4517c35c2a9a6b6,pablodanswer,2024-11-08,add tests
|
||||
assistant_categories,ca0eb6f03344cf833b2aba45c5fbe4d01a112c6f,pablodanswer,2024-11-07,nit
|
||||
assistant_categories,2041484a515ebaedaf05dc0e19e3cb5095b34018,pablodanswer,2024-11-07,update assistant category display
|
||||
assistant_categories,a124d4e2229bcb9a9f1caf269c444357e4749700,pablodanswer,2024-11-07,finalize
|
||||
assistant_categories,59fa1d07f10b7f44010207d54547b947ca789fe1,pablodanswer,2024-11-05,functionality finalized
|
||||
assistant_categories,0a226b47e55dc6767dde8f478729616d1b4870f1,pablodanswer,2024-11-05,add assistant categories v1
|
||||
assistant_clarity,71c60c52dd37ccebd2d4f8862676d5f21a64acf1,pablodanswer,2024-11-12,minor update
|
||||
assistant_clarity,72f05a13485dab5a8ddd0d0e5ac7d4e98aed01a2,pablodanswer,2024-11-12,delete code
|
||||
assistant_clarity,0c22f8ab20c32043c9e1f5f991989a07ecbd6387,pablodanswer,2024-11-12,delete code!
|
||||
assistant_clarity,e376032f14621d645fda23f058b5712c33224e82,pablodanswer,2024-11-12,update paradigm
|
||||
assistant_clarity,3f2738006951ffcf58ea59473da3070e8023a9d0,pablodanswer,2024-11-12,alembic fix
|
||||
assistant_clarity,233f186fecb9eba7eefd6aa493ce70b299f68ac6,pablodanswer,2024-11-12,slight rejigger
|
||||
assistant_clarity,0582306d9be29f7c3daff7b7d5a2c1ef1517e033,pablodanswer,2024-11-12,k
|
||||
assistant_clarity,4f699b2591fe190abf1d68fefb3f2841c0f7f68e,pablodanswer,2024-11-12,add minor clarity
|
||||
assistant_clarity,bc6d47a6c5702d102cc04c16e56426a1561fe3e5,pablodanswer,2024-11-12,minor clean up
|
||||
assistant_clarity,09ec137a5f6fb230a0c39a67b19e9f772d3441ca,pablodanswer,2024-11-12,update organization
|
||||
auth_categories,f51d87833e591bdcb9a650aa762060387a96a292,pablodanswer,2024-11-07,nit
|
||||
auth_categories,01f93bab2f698bb0dc84bddb705de40a9a18e660,pablodanswer,2024-11-07,update assistant category display
|
||||
auth_categories,b162e9f4c4c9ff4b9cd718f548cc20ab0e60be0f,pablodanswer,2024-11-07,finalize
|
||||
auth_categories,c7097dffbd73e1b2d9b34ad67bbd8aa6e072c3b5,pablodanswer,2024-11-05,functionality finalized
|
||||
auth_categories,653bbffb3cda5cbc41f61917e5634e22d70d5e26,pablodanswer,2024-11-05,add assistant categories v1
|
||||
auto_prompts,06bc8f1f92e33af2c6bb1750936407ad8e29d3c0,pablodanswer,2024-10-28,base functionality
|
||||
auto_prompts,8093ceeb45088c813fbb117302738b3d225c2f8b,pablodanswer,2024-10-28,formatting
|
||||
auto_prompts,3d0ace1e450ac6d7271ddedc2ec122a2647be7df,pablodanswer,2024-10-28,minor nits
|
||||
auto_prompts,553aba79dc41b928c163a83481b202ad56805aae,pablodanswer,2024-10-28,update based on feedback
|
||||
auto_prompts,da038b317a0b5185ccc32297b01fcaa97ffbb429,pablodanswer,2024-09-21,remove logs
|
||||
auto_prompts,6769dc373faf7576c2d0ac212735b88eae755293,pablodanswer,2024-09-21,minor udpate to ui
|
||||
auto_prompts,b35e05315c4c506da87524fe788a9cf5aacb7375,pablodanswer,2024-09-20,use display name + minor updates to models
|
||||
auto_prompts,7cfd3d2d442255616ec5c477dc4b3eb0b2cad1ed,pablodanswer,2024-09-20,cleaner cards
|
||||
auto_prompts,b2aa1c864b20274386a1bbe699a3ef7e094bd858,pablodanswer,2024-09-20,slightly cleaner animation
|
||||
auto_prompts,d2f8177b8f1b9be8eebce520204018e6be59b03c,pablodanswer,2024-09-20,cleaner initial chat screen
|
||||
back_to_danswer,262a405195e1b1b07c96e1ae4a39df76b690ed69,pablodanswer,2024-11-06,update redirect
|
||||
beat_robustification,63959454df29709c149b71f82672c8752c646cfa,pablodanswer,2024-11-03,Remove locks (#3017)
|
||||
beat_robustification,96027f1d732f26b407afd2b52641615a96d5402b,pablodanswer,2024-11-02,ensure versioned apps capture
|
||||
beat_robustification,80ea6a36610775a0e57ec236f9a2bdaf419a51e5,pablodanswer,2024-11-01,typing
|
||||
beat_robustification,527c409f81a7d31c8ff6ebd2be465418476eba74,pablodanswer,2024-11-01,update
|
||||
beat_robustification,19ab457d926a05a0d61ada33684918a5d427e619,pablodanswer,2024-11-01,address comments
|
||||
beat_robustification,f5b38cd9362b4c7b84357a6fcf2bbeb4c1e7c8a8,pablodanswer,2024-10-30,nit
|
||||
beat_robustification,63d1cc56acdeba0430d5da9f8b752cd470df865f,pablodanswer,2024-10-30,reorg
|
||||
beat_robustification,4436bec97019893c256ee1750e28e3061edfd771,pablodanswer,2024-10-30,validate
|
||||
beat_robustification,90b7198d53ec8b383051925de16a2818653c4fe3,pablodanswer,2024-10-30,add validated + reformatted dynamic beat acquisition
|
||||
better_image_assistant_prompt,e9abbcdefdf21eef2000fc61342e4129bfd1498f,pablodanswer,2024-11-03,nit
|
||||
better_image_assistant_prompt,89f51078690bed44b2809aa5229f39b4d543d88e,pablodanswer,2024-11-02,k
|
||||
better_image_assistant_prompt,6972874aac31dcccd4ff739484b6a5b563e62405,pablodanswer,2024-11-02,slight upgrade to prompts
|
||||
bg_processing_improvements,48d24860e6f5401a265951b8e49e900ed6e40f63,pablodanswer,2024-11-03,improvements
|
||||
branding_update,12bbf2ad972a1f8887e5f5eb427b88261ef5097c,pablodanswer,2024-10-28,add additional configuration options
|
||||
bugfix/async,8b9e1a07d55b3f090d168768a74d09d60ba19649,pablodanswer,2024-11-11,typing
|
||||
bugfix/async,b6301ffcb9bb35f6d73c28ffd502bfb01f49272a,pablodanswer,2024-11-11,spacing
|
||||
bugfix/async,490ce0db18df25625446a4abe163790b96431645,pablodanswer,2024-11-11,cleaner approach
|
||||
bugfix/async,b2ca13eaae905af768519a62a38d3d84c239cba8,pablodanswer,2024-11-11,treat async values differently
|
||||
bugfix/curator_interface,a7312f62366cff5243e4b85c5c47e33e5da29f5c,pablodanswer,2024-11-21,remove values
|
||||
bugfix/curator_interface,85e08df5219f0e2e793beb65a1ce4dc36f2481d4,pablodanswer,2024-11-21,update user role
|
||||
bugfix/curator_interface,937a07d705a8620f47336c1c6c125ae6b025a950,pablodanswer,2024-11-21,update
|
||||
bugfix/curator_interface,1130d456aaa6ea38aeeacd234ab82504e3c5fc68,pablodanswer,2024-11-21,update
|
||||
bugfix/curator_interface,cf4cda235ce02bfdea1f1cd17ad4f6a2e0f7f9f7,pablodanswer,2024-11-21,update config
|
||||
bugfix/curator_interface,5a07f727c0563061398f50ed253f1efc2f83c176,pablodanswer,2024-11-21,mystery solved
|
||||
bugfix/index_attempt_logging_2,209514815547074a31b3121bf47e7b1e350e817d,Richard Kuo (Danswer),2024-11-21,Move unfenced check to check_for_indexing. implement a double check pattern for all indexing error checks
|
||||
bugfix/indexing_redux,0c068c47c2cb729a0450910f0f6b6d04b340b131,Richard Kuo (Danswer),2024-11-17,Merge branch 'main' of https://github.com/danswer-ai/danswer into bugfix/indexing_redux
|
||||
bugfix/indexing_redux,1dfde97a5a52a8c4c3996d14348e9fffe6073743,Richard Kuo (Danswer),2024-11-14,refactor unknown index attempts and redis lock
|
||||
bugfix/indexing_redux,5d95976bf1bc13caaa21655777e8e84efb682cd2,Richard Kuo (Danswer),2024-11-14,raise indexing lock timeout
|
||||
bugfix/pagination,1a009c6b6a3d52302e5bbdec20c75ce15a678f5c,pablodanswer,2024-11-07,minor update
|
||||
bugfix/pagination,e8cd2630e2bee96496b30f637a169df863e11495,pablodanswer,2024-11-06,minor update
|
||||
bugfix/pagination,d835de1f5219248f164221464b257b5a44c6ed8f,pablodanswer,2024-11-06,fixed query history
|
||||
bugfix/pagination,c6d35a8ad6be86c28ba8d3645d171d22390cc9fa,pablodanswer,2024-11-06,update side
|
||||
bugfix/pagination,a5641e5a5e001dc3a4740bfcdd53c9fafb64c20a,pablodanswer,2024-11-06,fix pagination
|
||||
bugfix/pruning,c27308c812f536a5e7410a73b0940f63330fb3fb,pablodanswer,2024-10-30,clarity
|
||||
calendar_clarity,7edb205a6837d0328062ecbb9a9318dd6e27f9d5,pablodanswer,2024-11-22,minor calendar cleanup
|
||||
callout_clarity,a8787b7be8e66d06edeaa997390ca118d1abaaac,pablodanswer,2024-11-04,k
|
||||
callout_clarity,585e6b7b2fec35e17f91d55354c48631cb773ca7,pablodanswer,2024-11-04,k
|
||||
callout_clarity,bdbfb62946b644ddf011a2e03a1a9b2158899f36,pablodanswer,2024-11-04,ensure props aligned
|
||||
cascade_search,9c975d829d0b67d245da18e905781c22578f413f,pablodanswer,2024-10-30,minor foreign key update
|
||||
clean-jira-pr,1eec84a6693add96e571eca96cf181bd32ab42f4,hagen-danswer,2024-11-20,cleanup
|
||||
clean-jira-pr,658951f66dfe2cb97e20f590f71f46bcb8b1f1ef,hagen-danswer,2024-11-20,more cleanup of Jira connector
|
||||
clean-jira-pr,da153ef5179592cfa11f9ce271c187739e242432,hagen-danswer,2024-11-20,fixed testing
|
||||
clean-jira-pr,82118e0837d486e8d66fb7eb26d523c4fa79f8a2,hagen-danswer,2024-11-20,Added Slim connector for Jira
|
||||
cloud_auth,bcce7733aa5bb2f3af2842d8e9938af6c5597c9c,pablodanswer,2024-11-11,typing
|
||||
cloud_auth,eeeb84c66bf1d5aefd16ad20f9727a61b2ddc5f3,pablodanswer,2024-11-11,minor modification to be best practice
|
||||
cloud_auth,a7b13762264b67ac720db21552c3a6c0f42e7c9d,pablodanswer,2024-11-11,k
|
||||
cloud_auth,1c020d11c4d4257732a7fca17eecbde979e42804,pablodanswer,2024-11-11,minor clarity
|
||||
cloud_auth,cb6fad26b8ec9f77a7bc82a94da8e6748bbc20f0,pablodanswer,2024-11-11,cloud auth referral source
|
||||
cohere,444ad36c0801810fadfcc4a0c1f355004f59e317,pablodanswer,2024-11-13,config
|
||||
cohere,227faf87c690ef9b30fbe79b1582ad36a4ec95b2,pablodanswer,2024-11-11,update config
|
||||
cohere,1bf33a6b7ae5fc84a779c3c6d9d8c514523b5af9,pablodanswer,2024-11-11,ensure we properly expose name(space) for slackbot
|
||||
cohere,15bd1d0ca6461ba7a9a1d2f468aea5f981e8750e,pablodanswer,2024-11-11,update configs
|
||||
cohere,ce48d189aa6f9f83a6a62b353ea04bd16659d0e2,pablodanswer,2024-11-11,update
|
||||
cohere,43b82e50cfdf9a1a260bde312a7e7e4f2929425b,pablodanswer,2024-11-11,update
|
||||
cohere,1d06787e1d5734c25e703ba4f4b2d7df6c8bac01,pablodanswer,2024-11-11,minor improvement
|
||||
cohere,8386d30f9230565136d2133b7c5cbcb623980761,pablodanswer,2024-11-11,finalize
|
||||
cohere,374e51221881fcd722876efa9f53080342f3dcbd,pablodanswer,2024-11-10,add cohere default
|
||||
cohere_default,8f67dc310fa1177430b8a47cfa685b4de4af105c,pablodanswer,2024-11-11,update
|
||||
cohere_default,ad7d18968075a932a4539ac37d5432fa99fe99f4,pablodanswer,2024-11-11,minor improvement
|
||||
cohere_default,72730a5ba3cef93523bfba9ee63994e5a1c0d63f,pablodanswer,2024-11-11,finalize
|
||||
cohere_default,df8bd6daf46c1fce951efb50aaeff5e7cbc4b74a,pablodanswer,2024-11-10,add cohere default
|
||||
cohere_default,6b78ab0a99bb5727df35c1dfc23c5e39008211ae,pablodanswer,2024-11-11,Cleaner EE fallback for no op (#3106)
|
||||
cohere_default,e97bf1d4e28bcbf32080c3a339d0e2ac3d6d0253,Chris Weaver,2024-11-11,New assistants api (#3097)
|
||||
cohere_default,293dbfb8eb7b3ac4d2878b7a72068b829b9e3469,rkuo-danswer,2024-11-09,re-enable helm (#3053)
|
||||
cohere_default,f4a61202a7b6de8a011d67896b16e14f94eb981a,pablodanswer,2024-11-09,Silence auth logs (#3098)
|
||||
cohere_default,53f9d94ceb7a6a8da2a0c2d94fee6971adb29bbf,pablodanswer,2024-11-11,revert
|
||||
cohere_default,5058d898b8532881c517e14c22ca5c32784288fe,pablodanswer,2024-11-11,update some configs
|
||||
cohere_default,bc7de4ec1b9832059426ed74f2755c9548852459,pablodanswer,2024-11-11,moderate slackbot switch
|
||||
cohere_default,3ad98078f5205c2df5a3ea96cc165b982256a975,pablodanswer,2024-11-10,finalized keda
|
||||
cohere_default,0fb12b42f10bae3d8633717f763fa42271349442,pablodanswer,2024-11-10,minor update
|
||||
cohere_default,158329a3cc659d666328dac36bac7c5ffa87e084,pablodanswer,2024-11-10,finalize slackbot improvements
|
||||
cohere_default,7f1a50823baf0f5bbab89587e7df6f03fe552e27,pablodanswer,2024-11-10,fix typing
|
||||
cohere_default,0e76bcef454e0c09cb83ce91834730fdd084d930,pablodanswer,2024-11-10,add improved cloud configuration
|
||||
csv_limits,45be7156c52d3b32799d67139998de7892c3490e,pablodanswer,2024-11-11,minor enforcement of CSV length for internal processing
|
||||
custom_llm_display_fix,01efa818bcc82eef92457cbe4acd6c3c2fab60f0,pablodanswer,2024-11-21,Revert "clean horizontal scrollbar"
|
||||
custom_llm_display_fix,dec279a9602825243ed7df4b7a5592ccd267bddd,pablodanswer,2024-11-21,update migration
|
||||
custom_llm_display_fix,4b03c0e6e24b36725f4501edb81f46dc2812ff4f,pablodanswer,2024-11-21,k
|
||||
custom_llm_display_fix,17eb0d3086b6249c806f51a0a45c78c927249bcd,pablodanswer,2024-11-21,ensure proper migration
|
||||
custom_llm_display_fix,0f638229f56966e480d3479de5f9a3108750afc8,pablodanswer,2024-11-20,provider fix
|
||||
custom_llm_display_fix,fa592a1b7a69897110a928a222b19eaef3b7267a,pablodanswer,2024-11-21,clean horizontal scrollbar
|
||||
danswer_authorization_header,856c2debd98187b28e341940dafeb97eed81cad9,pablodanswer,2024-10-29,add danswer api key header
|
||||
default_keys,4907d2271950fb2f45c56c21e6d641b616c02ad7,pablodanswer,2024-11-03,naming
|
||||
default_keys,8766502f6dd125a43ef6cc9e9a20cec1c8f3ae8a,pablodanswer,2024-11-03,add cohere as well
|
||||
default_keys,589e141bc9d2ed30c467257596f346c4824934a7,pablodanswer,2024-11-03,add default api keys for cloud users
|
||||
default_prompts,d1926d47b5b65aeb01c103d7c44fa5bb63e4fb1c,pablodanswer,2024-11-06,update default live assistant logic
|
||||
default_prompts,f457bdb49128b010da04612f598ef0e0810dcf7c,pablodanswer,2024-11-06,update starter message
|
||||
default_prompts,00adc2d0e0cd23d7c9664b68f4caa7859bdb4eeb,Yuhong Sun,2024-11-06,touchup
|
||||
default_prompts,f56b139d8dbcc44248080719fa9f3c81afdf1e81,pablodanswer,2024-11-06,nit
|
||||
default_prompts,09cd3c6c2792b94e7db220a921095f0af8054e0c,pablodanswer,2024-11-06,minor update to refresh
|
||||
default_prompts,32a688b6277b918afd7497f483ef457b85dc9d05,pablodanswer,2024-11-06,udpate refresh logic
|
||||
default_prompts,719fb914f5094f3a35095cbb8e0c75aa4f0d0c45,pablodanswer,2024-11-06,update ux + spacing
|
||||
default_prompts,7c5df1cf69e8c890cc02e27b2ba2edeac9c3c22a,pablodanswer,2024-11-05,fallback to all assistants
|
||||
default_prompts,8a900b732dd67215718e07273cc62c881b6786e4,pablodanswer,2024-11-03,formating nits
|
||||
default_prompts,eab00d7247cf0853b6a83888ae581c63c8c59981,pablodanswer,2024-11-03,nit
|
||||
default_prompts,9460009ed306a135110bc88cc6b75f3779df96d0,pablodanswer,2024-11-03,update typing
|
||||
default_prompts,4f1aa7f1ff04debb39b6ea8ea79de3d01254f4a5,pablodanswer,2024-11-03,validate
|
||||
default_prompts,c97b8938920b4406477f252b01a1e561b3b24f31,pablodanswer,2024-11-03,k
|
||||
default_prompts,074334e20d2208f52bbf00bda76e3e79494977c2,pablodanswer,2024-11-03,update user preferences
|
||||
default_prompts,85b50855c0778fb34fc32441e7c3791b905485fa,pablodanswer,2024-11-03,update persona defaults
|
||||
default_schema_slack,87931b759feb1431ce96090bd390e3e28cb30208,pablodanswer,2024-11-08,adjust default postgres schema for slack listener
|
||||
detailed_filters,bde4b4029af5334699e226afbd77ba0753a04797,pablodanswer,2024-11-18,update date range filter
|
||||
detailed_filters,d77629fc318db896c5b9f53c45c33dfad5038e6b,pablodanswer,2024-11-05,clarity updates
|
||||
detailed_filters,0038c32213681db3dab29dee2f21324743fc6d94,pablodanswer,2024-11-05,add new complicated filters
|
||||
double_auth,a7173eb689100c9abd1b68aeab890a992da32cbc,pablodanswer,2024-10-27,ports
|
||||
double_auth,45170a28fc8417b6f0de7ac97c643a36e4c03284,pablodanswer,2024-10-27,fix nagging double auth issue
|
||||
dropdown,c29beaf403a7722e1ee638cc50c8551931f8c5d9,pablodanswer,2024-11-13,combobox
|
||||
dropdown,46f84d15f8af635123557056542829a14d5fca60,pablodanswer,2024-11-13,content scroll differences
|
||||
dropdown,e8c93199f24cac94b73e8ac923b43b3159af74c9,pablodanswer,2024-11-13,minor dropdown fix
|
||||
fallback_context,3734e683e1719d9f6abe9e80e475a4c2c275cdaf,pablodanswer,2024-11-07,ensure proper attribution
|
||||
fallback_context,886e8c7b6e30328c1d95277f22dde48af2cb1a99,pablodanswer,2024-11-07,update comments
|
||||
fallback_context,4916d66df0ec3d348caafe6c40c5e16fb28381b1,pablodanswer,2024-11-07,clearer
|
||||
fallback_context,6ae512fc4e909a52e90c548f9674b60d536bdc54,pablodanswer,2024-11-06,update typing
|
||||
fallback_context,159c8ee22df75036d3db59c292fa13632982b427,pablodanswer,2024-11-06,add sentinel value
|
||||
feat/cert_clarity,35307d4f384039ef0df8f979e34912ab1cd4e201,pablodanswer,2024-10-30,first pass
|
||||
feat/cert_clarity,e6b9ebc198973a84dc9412302e6b98a24b0a2ce3,pablodanswer,2024-10-29,ensure functionality
|
||||
feat/cert_mount,a32e34b5571d60a4b8b8a1d62328b9a77fb0ad27,pablodanswer,2024-10-30,simplify
|
||||
feat/cert_mount,2dc7b08a9cb73164479c03dfd4b4fed162029399,pablodanswer,2024-10-30,first pass
|
||||
feat/cert_mount,e6b9ebc198973a84dc9412302e6b98a24b0a2ce3,pablodanswer,2024-10-29,ensure functionality
|
||||
feat/certificate,152e8c422bb9c6bf7b08221dcfe44a60d7a2de22,pablodanswer,2024-11-01,nit
|
||||
feat/certificate,45498a5f51a8efa9955c18fe5cb53b2d0f41ebd3,pablodanswer,2024-10-31,k
|
||||
feat/certificate,9ecf237435cd8a5b0ac60ebaca8d26840ab0abed,pablodanswer,2024-10-31,minor clean up
|
||||
feat/certificate,fed2c5666cb54d3edcfe14319e3f7d7befbed78e,pablodanswer,2024-10-30,remove now unneeded COPY command
|
||||
feat/certificate,56b3f2fa999db64aec3fd069b1de2bc77d00a6b6,pablodanswer,2024-10-30,simplify
|
||||
feat/certificate,7d03f3aa8cb8a4ada9af8551db62364eb8e2c217,pablodanswer,2024-10-30,first pass
|
||||
feat/silence_unauth_logs,d2ba35ca45ca77701075813fd64858b04c4e9eb2,pablodanswer,2024-11-09,k
|
||||
feat/silence_unauth_logs,923176ef6e1e1941f8dc461d1d7b1d76f88c4e1b,pablodanswer,2024-11-09,remove unnecessary line
|
||||
feat/silence_unauth_logs,888ce3e0ced3a63c57f7ec2221059d0012e772c2,pablodanswer,2024-11-09,silence auth logs
|
||||
feat/tenant_posthog,35ed1d2108dd1a28cf63ba45f776d8a25b91b5d7,pablodanswer,2024-10-27,nit
|
||||
feat/tenant_posthog,d1a9e0f6c4618aa4a7e5029dbbeb6179a40ff5c7,pablodanswer,2024-10-27,distinguish tenants in posthog
|
||||
fix-answer-with-specified-doc-ids,5fbcc70518bd5d1be00d6595f3fc690f81c52f21,pablodanswer,2024-11-01,minor logging updates for clarity
|
||||
fix-answer-with-specified-doc-ids,7db0de9505c3510a4db76e98a47d5b079056dc93,pablodanswer,2024-10-31,minor typo
|
||||
fix-answer-with-specified-doc-ids,18b4a8a26331bc013b49e486e2bf82c5ce4bfe73,pablodanswer,2024-10-31,fix stop generating
|
||||
fix-answer-with-specified-doc-ids,98660be16459038b438d12616bd6f00dde418b95,Weves,2024-10-31,Fix UT
|
||||
fix-answer-with-specified-doc-ids,3620266bddfbf1fca309ff2fe97f72bda7462979,Weves,2024-10-31,Remove unused exception
|
||||
fix-answer-with-specified-doc-ids,2132a430cc64abd869632c0f55a35bdc42b30be9,Weves,2024-10-31,Fix image generation slowness
|
||||
fix-answer-with-specified-doc-ids,24e34019ce25314c5e749d38dd0895a1c3d5141e,Weves,2024-10-31,More testing
|
||||
fix-answer-with-specified-doc-ids,3cd4ed5052277428dc06343f53e0e6486af26208,Weves,2024-10-31,Testing
|
||||
fix-answer-with-specified-doc-ids,200bb96853d6d96a99093f6e915fe9721ab5c6b3,Weves,2024-10-31,Add quote support
|
||||
fix-answer-with-specified-doc-ids,5a0c6d003607dfb9a7445a6a87df9a6062b73bc6,Weves,2024-10-02,Fix
|
||||
fix-openai-tokenizer,566e4cfd0f39db0a1fbc7c7fae040bcf98482f62,pablodanswer,2024-11-08,minor updates
|
||||
fix-openai-tokenizer,3b09f3e53e7a8f948cd36255fd53423d7b5827d0,pablodanswer,2024-11-07,minor organizational update
|
||||
fix-openai-tokenizer,75d5e6b8b6e81c77063fd79b4cfe532366da723a,pablodanswer,2024-11-07,minor update to ensure consistency
|
||||
fix-openai-tokenizer,362bb3557246e86de131c223acdf2adf17fb14e4,pablodanswer,2024-11-06,nit
|
||||
fix-openai-tokenizer,6d100d81d284dc98143bb8c94c16c25d64c56633,pablodanswer,2024-11-06,clean up test embeddings
|
||||
fix-openai-tokenizer,c5be5dc4c9710b684d0954a5224a75c090befe94,Yuhong Sun,2024-11-05,k
|
||||
fix_missing_json,1f6cc578c425f8bbe3b320f65f191f09c8fcfa0b,pablodanswer,2024-11-20,k
|
||||
fix_missing_json,d95b7d6695ba087f0b9da9bdf245f7c34e503499,pablodanswer,2024-11-20,k
|
||||
fix_missing_json,b75d4af102739a2b9e3ec2dff301f4affd08b3e5,pablodanswer,2024-11-20,remove logs
|
||||
fix_missing_json,559d9ed6d4fd27de8941a104c9c83322a75abea6,pablodanswer,2024-11-20,k
|
||||
fix_missing_json,9c900d658979341ce0d8c3c2eb87e7cfafd8ccf9,pablodanswer,2024-11-20,initial steps
|
||||
formatting_niceties,e2b47fa84c828e1c9f6ab0dd510e2eb83faeb877,pablodanswer,2024-11-20,update styling
|
||||
formatting_niceties,e4916209d6c9f4ed5765d7ae20f77903ffd93e9b,pablodanswer,2024-11-20,search bar formatting
|
||||
graceful_failure,03245a4366adeb1668a337b37d070d09922f5531,pablodanswer,2024-10-28,fail gracefully on provider fetch
|
||||
gtm,acff050f6b2bec0368571e0936f9342b7bcd3919,pablodanswer,2024-11-20,update github workflow
|
||||
gtm,b96260442d02c9298ed110ba97f5e9eff1ed9100,pablodanswer,2024-11-20,add gtm for cloud build
|
||||
gtm_v2,4f96ddf9e69923ef1209c5586c73eb40b0418aaa,pablodanswer,2024-11-21,quick fix
|
||||
horizontal_scrollbar,fa82e8c74cac273563badadec0c04176575ffbbb,pablodanswer,2024-11-21,account for additional edge case
|
||||
horizontal_scrollbar,fa592a1b7a69897110a928a222b19eaef3b7267a,pablodanswer,2024-11-21,clean horizontal scrollbar
|
||||
improved_cert,3b19c075ad6e8930d785943b24e46b2c08555c3a,pablodanswer,2024-11-07,minor improvements
|
||||
improved_cloud,379d569c61801f0c093b7474f888392aa2cb1249,pablodanswer,2024-11-11,include reset engine!
|
||||
improved_cloud,53f9d94ceb7a6a8da2a0c2d94fee6971adb29bbf,pablodanswer,2024-11-11,revert
|
||||
improved_cloud,5058d898b8532881c517e14c22ca5c32784288fe,pablodanswer,2024-11-11,update some configs
|
||||
improved_cloud,bc7de4ec1b9832059426ed74f2755c9548852459,pablodanswer,2024-11-11,moderate slackbot switch
|
||||
improved_cloud,3ad98078f5205c2df5a3ea96cc165b982256a975,pablodanswer,2024-11-10,finalized keda
|
||||
improved_cloud,0fb12b42f10bae3d8633717f763fa42271349442,pablodanswer,2024-11-10,minor update
|
||||
improved_cloud,158329a3cc659d666328dac36bac7c5ffa87e084,pablodanswer,2024-11-10,finalize slackbot improvements
|
||||
improved_cloud,7f1a50823baf0f5bbab89587e7df6f03fe552e27,pablodanswer,2024-11-10,fix typing
|
||||
improved_cloud,0e76bcef454e0c09cb83ce91834730fdd084d930,pablodanswer,2024-11-10,add improved cloud configuration
|
||||
indent,95ded1611c7d2199438b863c54f327eba632a5b0,pablodanswer,2024-10-27,add indent to scan_iter
|
||||
indexing_improvements,ff8e5612c9cd67a642314632658f5a55814f7c5e,pablodanswer,2024-11-05,minor
|
||||
individual_deployments,fe83d549a356d802ee1e693c8739db7563ed5ddc,pablodanswer,2024-11-02,add k8s configs
|
||||
individual_deployments,0e42bb64579328d18ff01049a7aaa2a0b49be142,pablodanswer,2024-10-31,remove unecessary locks
|
||||
individual_deployments,41ec9b23309a3bbfe598018832fbf5d3fe91c5e1,pablodanswer,2024-10-31,minor
|
||||
individual_deployments,9e4e848b98f35056dcf3df6f0815651e9fe56eba,pablodanswer,2024-10-30,initial removal of locks!
|
||||
individual_deployments,1407652e3b5825fae7a90a0d5818ef67ec44f50d,pablodanswer,2024-10-30,nit
|
||||
individual_deployments,2758ff7efd4dd47e891ef77c05985d6407e4cbd7,pablodanswer,2024-10-30,reorg
|
||||
individual_deployments,0718d5740b714a0222eb2520c6c2f0e70c095aa1,pablodanswer,2024-10-30,validate
|
||||
individual_deployments,922f3487fbd7585ce6a7251ff0644cbeca921133,pablodanswer,2024-10-30,add validated + reformatted dynamic beat acquisition
|
||||
json_account,f4b3f8356a5911cb4a0610773b824bc6e6eb8c73,pablodanswer,2024-11-14,fix single quote block in llm answer
|
||||
k8s_jobs,7124ce0b9a56f0b5dc45a733fe95cd581f9894a4,pablodanswer,2024-11-02,improve workers
|
||||
k8s_jobs,10ab08420479ab056d807cbf0942c67a1dd6e7c7,pablodanswer,2024-11-02,improved timeouts + worker configs
|
||||
k8s_jobs,9bc478fa1b7f1418fadfbd067383d67b417472aa,pablodanswer,2024-11-02,k
|
||||
k8s_jobs,930e392d69ecd1058a73c0dfb0e2e021232921fc,pablodanswer,2024-11-02,update config
|
||||
k8s_jobs,6d14ceeadf958cd1e7600b667b69ce0f3bf86830,pablodanswer,2024-11-02,k
|
||||
k8s_jobs,efdf95eb232870f83677b2b424ffaa117463649a,pablodanswer,2024-11-02,add k8s configs
|
||||
k8s_jobs,f687d3987cd9514f9fe587e563729ce27b8ff224,pablodanswer,2024-11-02,k
|
||||
k8s_jobs,af4c9361a926867a992239daa283900300d7247e,pablodanswer,2024-11-02,nit
|
||||
k8s_jobs,f74366bbd8699f9987ed8229e3368a5d7be71a53,pablodanswer,2024-11-01,update
|
||||
k8s_jobs,734fcdca98aa5eeaa99d9936fa8db716eda93ad7,pablodanswer,2024-10-31,remove unecessary locks
|
||||
k8s_jobs,dbc44315ad3cbf79509bd14a4025c2ecc4a6f86e,pablodanswer,2024-10-31,minor
|
||||
k8s_jobs,d80049262406a0c30e9ad0fc647bddb23cbfbad9,pablodanswer,2024-10-30,initial removal of locks!
|
||||
k8s_jobs,5646675ae094f39f3e7ead937cbcfd3fb7c7f24f,pablodanswer,2024-10-30,add validated + reformatted dynamic beat acquisition
|
||||
k8s_jobs,01bdcad4f038c5d4c642ca14680593988c28bf96,pablodanswer,2024-11-02,ensure versioned apps capture
|
||||
k8s_jobs,0994ac396612855ecac9afbce6ef9b8bd7e54742,pablodanswer,2024-11-01,typing
|
||||
k8s_jobs,8ff8a88d5b6ad2d02a653f959c39cfeeda9ef54c,pablodanswer,2024-11-01,update
|
||||
k8s_jobs,e11aee38ba5946a1453693fdc3bbd20d703d9e10,pablodanswer,2024-11-01,address comments
|
||||
k8s_jobs,53c6d16c3cdc7ffb3eebd3e7b73474025ef6cafc,pablodanswer,2024-10-30,nit
|
||||
k8s_jobs,a85b2a9745587c4e783e040496dee1ac83e492c9,pablodanswer,2024-10-30,reorg
|
||||
k8s_jobs,4ace16c905b47b97990de0ab0ef3c029870f9be0,pablodanswer,2024-10-30,validate
|
||||
k8s_jobs,89293ecc730387a864be6efc01230fedffdc7b82,pablodanswer,2024-10-30,add validated + reformatted dynamic beat acquisition
|
||||
lenient_counting,4836a74e1e2789051b6d1454b7f2bd22daced61a,pablodanswer,2024-11-13,nit
|
||||
lenient_counting,f7514011ef4cf62d80ab9afe170320b2e4135da2,pablodanswer,2024-11-13,lenient counting
|
||||
max_height_scroll,c354912c704b0aa31737bfd41d4bd8f0c7d85769,pablodanswer,2024-11-20,ensure everythigng has a default max height in selectorformfield
|
||||
migrate_tenant_upgrades_to_data_plane,572298aa8920d51320db5fff518f66fee6e42117,pablodanswer,2024-11-05,nit
|
||||
migrate_tenant_upgrades_to_data_plane,40b55197ac8336e6ef081074ea65fc4b0cbeb27c,pablodanswer,2024-11-05,minor config update
|
||||
migrate_tenant_upgrades_to_data_plane,4b9d868ecb78dedd3816ae7bc28e8f856881c6f4,pablodanswer,2024-11-04,minor pydantic update
|
||||
migrate_tenant_upgrades_to_data_plane,1295c3a38e827024d89ba56fe3c846fcbe204bc0,pablodanswer,2024-11-04,ensure proper conditional
|
||||
migrate_tenant_upgrades_to_data_plane,f2ac56d80213125f1f5d465b21a6a2e4b47566a2,pablodanswer,2024-11-04,improve import logic
|
||||
migrate_tenant_upgrades_to_data_plane,fcdb3891bf196ef7e1f10e9d7a0a77512c752710,pablodanswer,2024-11-04,update provisioning
|
||||
migrate_tenant_upgrades_to_data_plane,9a5d60c9a3df0891a769615e540af8332c0b416c,pablodanswer,2024-11-04,simplify
|
||||
migrate_tenant_upgrades_to_data_plane,b512f35521bcb8c8ee9e748dae493028093f05bb,pablodanswer,2024-11-04,k
|
||||
migrate_tenant_upgrades_to_data_plane,b872b7e778f7e0bd92e6eac9317e74e3157c12e1,pablodanswer,2024-11-04,minor clean up
|
||||
migrate_tenant_upgrades_to_data_plane,b7847d16686419fe024d361cfaf2212a4decc397,pablodanswer,2024-11-04,minor cleanup
|
||||
migrate_tenant_upgrades_to_data_plane,2f03ddb1bedada32576cb52bfa2cf36074fbb9fe,pablodanswer,2024-11-04,functional but scrappy
|
||||
migrate_tenant_upgrades_to_data_plane,dc001a3b7b48df659bc64c2486ceded5eea3ed0f,pablodanswer,2024-11-04,add provisioning on data plane
|
||||
minor,c7d58616b5943768e2e581751f4ede7a4f3292da,pablodanswer,2024-11-22,k
|
||||
minor,351ee543a0773ecb6acf99f3888dd648091d7f85,pablodanswer,2024-11-22,k
|
||||
minor_fixes,ea58c3259505aaa53c66343243667959ca79ecb8,pablodanswer,2024-11-05,minor changes
|
||||
minor_fixes,cbf577cf4623c8352664058d21b1a80ae7ab4299,pablodanswer,2024-11-05,nit
|
||||
minor_fixes,20d2301a7e594ad803c0486d63d056653c5b8c83,pablodanswer,2024-11-05,minor config update
|
||||
minor_fixes,fdf9601375464f3e7f49d4472dbc3eeacd1eab8f,pablodanswer,2024-11-05,form
|
||||
minor_fixes,7421328695641e943c7083639483fa36e4e9cfdb,pablodanswer,2024-11-04,minor pydantic update
|
||||
minor_fixes,d600d63876e7100894c47a7dc9120b689a55521f,pablodanswer,2024-11-04,ensure proper conditional
|
||||
minor_fixes,e7cae46867207789088df6611dbafc78650c8ace,pablodanswer,2024-11-04,improve import logic
|
||||
minor_fixes,b0894320f99fea9cb13a94a5fbb5a1e9523ef460,pablodanswer,2024-11-04,update provisioning
|
||||
minor_fixes,e623b494568d0bcc74937628984b6cc574aed9a6,pablodanswer,2024-11-04,simplify
|
||||
minor_fixes,99d91bd658e812996bcc03d0be29e57277b8fb67,pablodanswer,2024-11-04,k
|
||||
minor_fixes,77c180be0f8e91b9f997b90f631e18d41ba8fde2,pablodanswer,2024-11-04,minor clean up
|
||||
minor_fixes,baaed72297ef248dc5dc422f0e5adcdff7599416,pablodanswer,2024-11-04,minor cleanup
|
||||
minor_fixes,ab7fa7f6d0c3f1a59d97b5450262cb4ef6f8481d,pablodanswer,2024-11-04,functional but scrappy
|
||||
minor_fixes,acf3ede8b4baf044391176aacd3bba6f80bb4b3f,pablodanswer,2024-11-04,add provisioning on data plane
|
||||
minor_nits,bfcd418ecd9523376c605263565a9714ceeb3a18,pablodanswer,2024-11-09,k
|
||||
minor_nits,5dfcb94964f977bb603865858e1e6aa6582454fd,pablodanswer,2024-11-09,update colors
|
||||
minor_nits,a287cd94cd8090fefee7c1d20cc494b894bf39c1,pablodanswer,2024-11-09,nit
|
||||
minor_nits,2d9586b059cfb1cb8e1f6c0fccc696af6ba8873d,pablodanswer,2024-11-08,nit
|
||||
minor_nits,5dcc3692a7748ed20d49adef5f7672d45f600a4a,pablodanswer,2024-11-08,moderate component fixes
|
||||
minor_slack_fixes,425a678a5350ad5716c3efd6a60c78f6a9c2738e,pablodanswer,2024-11-20,reset time
|
||||
minor_slack_fixes,14adbcb497365f9e93c21aeb0476cffc72cab643,pablodanswer,2024-11-20,update slack redirect + token missing check
|
||||
misc_color_cleanup,83c8f04e5a183a289f76b809d9aabdd4ea0e664b,pablodanswer,2024-11-03,formatting
|
||||
misc_color_cleanup,334ff6fb5ab2e450e1e0709be16870b1ed07dae3,pablodanswer,2024-11-03,ensure tool call renders
|
||||
misc_color_cleanup,94262264e768cdc28ffe4fc31b2947c0cf3774a3,pablodanswer,2024-11-03,ensure tailwind config evaluates properly + update textarea -> input
|
||||
misc_color_cleanup,40cb9e9cdb4561eac777ede08ace88219d12ad96,pablodanswer,2024-11-02,additional minor nits
|
||||
misc_color_cleanup,2e81962a74567c0c510d911a22aee385c56b3207,pablodanswer,2024-11-02,nit
|
||||
misc_color_cleanup,76ca7eb3f2cf2408fee330f540987e6238cd632e,pablodanswer,2024-11-01,nit
|
||||
misc_color_cleanup,7269b7a4aa986dbba654be4b375bea1d9334fe01,pablodanswer,2024-11-01,additional nits
|
||||
misc_color_cleanup,4726a10fd7503882554d1dfaf1541657ffb45a04,pablodanswer,2024-11-01,misc color clean up
|
||||
mobile_scroll,eca41cc514446a2c0b2c756add3164462fb2c49d,pablodanswer,2024-11-11,improved mobile scroll
|
||||
modals,8093ceeb45088c813fbb117302738b3d225c2f8b,pablodanswer,2024-10-28,formatting
|
||||
modals,3d0ace1e450ac6d7271ddedc2ec122a2647be7df,pablodanswer,2024-10-28,minor nits
|
||||
modals,553aba79dc41b928c163a83481b202ad56805aae,pablodanswer,2024-10-28,update based on feedback
|
||||
modals,da038b317a0b5185ccc32297b01fcaa97ffbb429,pablodanswer,2024-09-21,remove logs
|
||||
modals,6769dc373faf7576c2d0ac212735b88eae755293,pablodanswer,2024-09-21,minor udpate to ui
|
||||
modals,b35e05315c4c506da87524fe788a9cf5aacb7375,pablodanswer,2024-09-20,use display name + minor updates to models
|
||||
modals,7cfd3d2d442255616ec5c477dc4b3eb0b2cad1ed,pablodanswer,2024-09-20,cleaner cards
|
||||
modals,b2aa1c864b20274386a1bbe699a3ef7e094bd858,pablodanswer,2024-09-20,slightly cleaner animation
|
||||
modals,d2f8177b8f1b9be8eebce520204018e6be59b03c,pablodanswer,2024-09-20,cleaner initial chat screen
|
||||
more_theming,1744d29bd6f6740fb20bbbf8b5651cd60edbf127,pablodanswer,2024-11-21,k
|
||||
more_theming,fa592a1b7a69897110a928a222b19eaef3b7267a,pablodanswer,2024-11-21,clean horizontal scrollbar
|
||||
multi_api_key,67e347a47fd2e4aa9efe7b17c7b177166c893d10,pablodanswer,2024-10-31,clean
|
||||
multi_api_key,3fb6e9bef96da888fa366a16f102358eb8e990e0,pablodanswer,2024-10-31,nit
|
||||
multi_api_key,c4514fe68f58a03da0c3c3efae78ad23e2eb88c9,pablodanswer,2024-10-30,organization
|
||||
multi_api_key,5b19209129542b885e123a51ce3da93b741d49d2,pablodanswer,2024-10-30,basic multi tenant api key
|
||||
new_seq_tool_calling,59e9a33b30ece8d41340787d9d9a82e9a07a8f24,pablodanswer,2024-11-18,k
|
||||
new_seq_tool_calling,6e60437c565a185475c715efbbef6caca1cfc2fb,pablodanswer,2024-11-17,quick nits
|
||||
new_seq_tool_calling,9cde51f1a2ca1df2f753c9b6d7910b8f9623d8a4,pablodanswer,2024-11-07,scalable but not formalized
|
||||
new_seq_tool_calling,8b8952f117e4d05bb484bc5dec1c12d4fbbafcca,pablodanswer,2024-11-07,k
|
||||
new_seq_tool_calling,dc01eea610817ab821ded6e5ce584f81fe1ba065,pablodanswer,2024-11-07,add logs
|
||||
new_seq_tool_calling,c89d8318c093c860037a839494876eff649f5d26,pablodanswer,2024-11-07,add image prompt citations
|
||||
new_seq_tool_calling,3f2d6557dcb5964dbb9ed88ade743f74a4285411,pablodanswer,2024-11-07,functioning albeit janky
|
||||
new_seq_tool_calling,b3818877afc406f9500e7bef1f2b7e233faf76fa,pablodanswer,2024-11-07,initial functioning update
|
||||
new_theming_updates,102c264fd06232bbc4c7a23615add5cf7c0618be,pablodanswer,2024-11-21,minor updates
|
||||
new_theming_updates,1744d29bd6f6740fb20bbbf8b5651cd60edbf127,pablodanswer,2024-11-21,k
|
||||
new_theming_updates,fa592a1b7a69897110a928a222b19eaef3b7267a,pablodanswer,2024-11-21,clean horizontal scrollbar
|
||||
nit,c68602f456c66279e760bd25067cfdfe03841f8a,pablodanswer,2024-11-10,specifically apply flex none to in progress!
|
||||
nit_mx,c5147db1ae5387e8fd5672779689485142fb1b1d,pablodanswer,2024-11-20,formatting
|
||||
nit_mx,3a6a74569544ee7d74c6b62a5a56730331838095,pablodanswer,2024-11-20,ensure margin properly applied
|
||||
nit_redis,85843632c5fe61a425d425feef6480c639471af7,pablodanswer,2024-10-28,add srem and sadd to tenant wrapper
|
||||
no_locks!,f687d3987cd9514f9fe587e563729ce27b8ff224,pablodanswer,2024-11-02,k
|
||||
no_locks!,af4c9361a926867a992239daa283900300d7247e,pablodanswer,2024-11-02,nit
|
||||
no_locks!,f74366bbd8699f9987ed8229e3368a5d7be71a53,pablodanswer,2024-11-01,update
|
||||
no_locks!,734fcdca98aa5eeaa99d9936fa8db716eda93ad7,pablodanswer,2024-10-31,remove unecessary locks
|
||||
no_locks!,dbc44315ad3cbf79509bd14a4025c2ecc4a6f86e,pablodanswer,2024-10-31,minor
|
||||
no_locks!,d80049262406a0c30e9ad0fc647bddb23cbfbad9,pablodanswer,2024-10-30,initial removal of locks!
|
||||
no_locks!,5646675ae094f39f3e7ead937cbcfd3fb7c7f24f,pablodanswer,2024-10-30,add validated + reformatted dynamic beat acquisition
|
||||
no_locks!,01bdcad4f038c5d4c642ca14680593988c28bf96,pablodanswer,2024-11-02,ensure versioned apps capture
|
||||
no_locks!,0994ac396612855ecac9afbce6ef9b8bd7e54742,pablodanswer,2024-11-01,typing
|
||||
no_locks!,8ff8a88d5b6ad2d02a653f959c39cfeeda9ef54c,pablodanswer,2024-11-01,update
|
||||
no_locks!,e11aee38ba5946a1453693fdc3bbd20d703d9e10,pablodanswer,2024-11-01,address comments
|
||||
no_locks!,53c6d16c3cdc7ffb3eebd3e7b73474025ef6cafc,pablodanswer,2024-10-30,nit
|
||||
no_locks!,a85b2a9745587c4e783e040496dee1ac83e492c9,pablodanswer,2024-10-30,reorg
|
||||
no_locks!,4ace16c905b47b97990de0ab0ef3c029870f9be0,pablodanswer,2024-10-30,validate
|
||||
no_locks!,89293ecc730387a864be6efc01230fedffdc7b82,pablodanswer,2024-10-30,add validated + reformatted dynamic beat acquisition
|
||||
pinned,233713cde3516c05b857f878ff452c7714a91c48,pablodanswer,2024-11-20,hide animations
|
||||
pinned,c0b17b4c51376d99685976430b9c4153c35e2ffa,Yuhong Sun,2024-11-20,k
|
||||
pinned,15f30b00507e337ec9ee85624fc0cc574eb7b952,Yuhong Sun,2024-11-20,k
|
||||
pinned,39d9df9b1b58dd2621bd575fa6c7ec720864d3bb,pablodanswer,2024-11-18,k
|
||||
point_to_proper_docker_repository,9893301f113691111669bc2ab05a7c3abf19ae32,pablodanswer,2024-11-09,raise exits
|
||||
point_to_proper_docker_repository,2344327112c01db8b2226dea0e02b2a8aa9ca875,pablodanswer,2024-11-09,ensure .github changes are passed
|
||||
point_to_proper_docker_repository,caa2966ebc607fb8d2899ee78573ed2454983efb,pablodanswer,2024-11-09,robustify cloud deployment + include initial KEDA configuration
|
||||
prev_doc,44f82fa928b79e7f51b41a0ee67cc93067880be3,pablodanswer,2024-11-22,k
|
||||
prev_doc,2c7c9fbc130b8f0c717fa9fa4e5d2f6073f92be5,pablodanswer,2024-11-22,revert to previous doc select logic
|
||||
prompting,4d8edad71ace767917a612dc628e266bd267d7d5,pablodanswer,2024-11-17,k
|
||||
prompting,b1265619a27a849f2fbb9ba85b440a8b1b698d7d,pablodanswer,2024-11-16,add proper category delineation
|
||||
prompting,dfe2c305866ad414143ce479b0601f8a61e615ea,pablodanswer,2024-11-05,post rebase cleanup
|
||||
prompting,236c19230f5165e24ef557db53d863953faa714a,pablodanswer,2024-11-05,add auto-generated starter messages
|
||||
proper_tenant_reset,4376bf773a81278ab92846673f193207be96052a,pablodanswer,2024-10-31,minor formatting
|
||||
proper_tenant_reset,95f660db67b1327208fde82ae043511f2187452f,pablodanswer,2024-10-31,clear comment
|
||||
proper_tenant_reset,1cdb5af9a1519ef8d63c94bf39256b00d4a8bdd2,pablodanswer,2024-10-31,add proper tenant reset
|
||||
proper_token_default,4e0c048acba88f4c83d7c83af52bb0932234ddad,pablodanswer,2024-11-14,nit
|
||||
proper_token_default,a0371a6750476fccc3b9892a7c58d72182c92507,pablodanswer,2024-11-14,minor logic update
|
||||
proper_token_default,4f1c4baa80f7b747633bb3d528aed6de5b11f639,pablodanswer,2024-11-14,minor cosmetic update
|
||||
proper_token_default,b6ef7e713a4eca3d65aa411604e8f67ad5efdd87,pablodanswer,2024-11-14,k
|
||||
proper_token_default,66df9b6f7dae8bce61e35615d715ddefc6406614,pablodanswer,2024-11-14,improved fallback logic
|
||||
proper_token_default,0473888ccdb5219cc39f275652bfeb72a420b5d9,pablodanswer,2024-11-13,silence warning
|
||||
regenerate_clarity,3e232c39193b1c67bda9d732c1c2ee77ee14c721,pablodanswer,2024-10-29,minor udpate
|
||||
regenerate_clarity,49e2da1c5c4fa34a8568ba0b3f08e79cd17cec93,pablodanswer,2024-10-29,add regeneration clarity
|
||||
remove_ee,132802b295b805292f427039617a00e04dca2ae9,pablodanswer,2024-11-09,k
|
||||
remove_ee,23883441f87ac3cd4e2ee717d2b033c3e7da9398,pablodanswer,2024-11-09,ensure callable
|
||||
remove_ee,f43ed0b6b9391e66e210c5d90acf7a2409c3300b,pablodanswer,2024-11-09,finalize
|
||||
remove_ee,fa42e5fa470e340e9b17fed5a3bd0e7976c6255e,pablodanswer,2024-11-08,finalize
|
||||
remove_ee,625b5c52a044027b3d469286910a3cdd1c6bee02,pablodanswer,2024-11-08,update
|
||||
remove_ee,239200dfc46f6cf18d7e689341b56a8baecdc0f6,pablodanswer,2024-11-08,update
|
||||
remove_ee,5b70a8fa6f65d8513670c3bbbfd6cec13c76d530,pablodanswer,2024-11-08,general cleanup
|
||||
remove_ee,14dfd6d29e178af9cfeb79ae20b7a846c5958966,pablodanswer,2024-11-08,move token rate limit to non-ee
|
||||
remove_ee,dc4fdbb312881585fbc860b7aaff5adb9af4d8c5,pablodanswer,2024-11-08,finalize previous migration
|
||||
remove_ee,cfd3d90493fad0af75569c98b6cfc9effa37b471,pablodanswer,2024-11-08,move api key to non-ee
|
||||
remove_empty_directory,81e1ac918364467e3009eae376930199e3e2943f,pablodanswer,2024-10-28,remove empty directory
|
||||
remove_endpoint,14f57d6475d835da6dfacc4ebd254e25618b3100,pablodanswer,2024-10-31,remove endpoint
|
||||
rerender,1392f2454061914ac8c5f6302318a24064034a5b,pablodanswer,2024-11-21,k
|
||||
rerender,617e6d905363cc91ca154bba0f6f2a11888b35e6,pablodanswer,2024-11-21,unused
|
||||
rerender,da36e208cd53ae25a2c89a4cf0c598333898387a,pablodanswer,2024-11-21,clean
|
||||
rerender,36eee45a03c3227a9b070e18a043e16fe5179cb9,pablodanswer,2024-11-21,llm provider causing re render in effect
|
||||
reset_all,bde1510923d69ca0eb57340da6b59f9035e3de0a,pablodanswer,2024-11-04,ensure we reset all
|
||||
search_chat_rework,931461bc8404fc51f15f0b75ae77e3a772a05989,pablodanswer,2024-11-21,v1
|
||||
sequential_messages,5fbcc70518bd5d1be00d6595f3fc690f81c52f21,pablodanswer,2024-11-01,minor logging updates for clarity
|
||||
sequential_messages,7db0de9505c3510a4db76e98a47d5b079056dc93,pablodanswer,2024-10-31,minor typo
|
||||
sequential_messages,18b4a8a26331bc013b49e486e2bf82c5ce4bfe73,pablodanswer,2024-10-31,fix stop generating
|
||||
sequential_messages,98660be16459038b438d12616bd6f00dde418b95,Weves,2024-10-31,Fix UT
|
||||
sequential_messages,3620266bddfbf1fca309ff2fe97f72bda7462979,Weves,2024-10-31,Remove unused exception
|
||||
sequential_messages,2132a430cc64abd869632c0f55a35bdc42b30be9,Weves,2024-10-31,Fix image generation slowness
|
||||
sequential_messages,24e34019ce25314c5e749d38dd0895a1c3d5141e,Weves,2024-10-31,More testing
|
||||
sequential_messages,3cd4ed5052277428dc06343f53e0e6486af26208,Weves,2024-10-31,Testing
|
||||
sequential_messages,200bb96853d6d96a99093f6e915fe9721ab5c6b3,Weves,2024-10-31,Add quote support
|
||||
sequential_messages,5a0c6d003607dfb9a7445a6a87df9a6062b73bc6,Weves,2024-10-02,Fix
|
||||
shadcn,fe9be6669538db406a0c67959dcf4c91e8d4858b,pablodanswer,2024-10-28,button + input updates
|
||||
shadcn,7cccb775c1f1385bc50131f7d548519d95ac64cd,pablodanswer,2024-10-28,initialization
|
||||
sheet_update,98aa32055203d32a6d25eb1266deab6c58a176fb,pablodanswer,2024-11-21,update configuration
|
||||
sheet_update,026134805a1418f32b61973f55571756ba102c09,pablodanswer,2024-11-21,finalized
|
||||
sheet_update,36c1fc23d087f41db06e2680233a1ade7e65e594,pablodanswer,2024-11-21,k
|
||||
sheet_update,3a4804b4b7d54fd3db576b698b5187d8dc0aa5ca,pablodanswer,2024-11-20,add multiple sheet stuff
|
||||
sheet_update,5e326bcd08d019103f78da1c8a4a45ba4e401353,pablodanswer,2024-11-20,update sheet
|
||||
sheet_update,d7f2a3e112c00bda2813933d673fb18080d6de6d,pablodanswer,2024-11-20,k
|
||||
sheet_update,3eaf2a883a5fb52169af2ba2e0571189fb3712eb,pablodanswer,2024-11-20,quick pass
|
||||
show_logs,189d62b72e0a2183ac3b25ea62eaea1b4db4366b,pablodanswer,2024-11-08,k
|
||||
show_logs,89cb3b503cf219d90338110cec34d288892c27ed,pablodanswer,2024-11-08,minor updates
|
||||
show_logs,cdda24f9ea4bc54f6a6c49d7848b63b2b5dacc9e,pablodanswer,2024-11-08,remove log
|
||||
show_logs,6dc4ca344c927b5e9c02b28662252a4067a2f7da,pablodanswer,2024-11-08,k
|
||||
show_logs,f91bac1cd90da5070247e70682e38adbe2722ce2,pablodanswer,2024-11-08,improved logging
|
||||
show_logs,5e25488d0af1e1939a366fe12ab42949daaa77f1,pablodanswer,2024-11-08,add additional logs
|
||||
silence_log,7400652fe70f86da3c8aab2a41f26103e395d739,pablodanswer,2024-11-20,silence small error
|
||||
single_tool_call,0230920240fa46e06e1cc66fb67fa42f5caf81b3,pablodanswer,2024-11-01,finalize migration
|
||||
single_tool_call,e7859e8bb4ea8409657cf0a7464724a5192e953e,pablodanswer,2024-11-01,single tool call per message
|
||||
single_tool_call,fd3937179f14968b4103c634a83430f7ae9303bc,pablodanswer,2024-11-01,minor logging updates for clarity
|
||||
single_tool_call,7a5a8f68a6e663d2b91badd47847193c92b523d0,pablodanswer,2024-10-31,minor typo
|
||||
single_tool_call,122cd2082e4ddd4a56992f5f8c36b9853057581a,pablodanswer,2024-10-31,fix stop generating
|
||||
single_tool_call,7384874e54a8ebc136b41efbe0842a327262b738,Weves,2024-10-31,Fix UT
|
||||
single_tool_call,2b06789d5133029d99763037ded18766e8d04d74,Weves,2024-10-31,Remove unused exception
|
||||
single_tool_call,4bdfd117370ac126e1bdc6e32f0192d59c51dd57,Weves,2024-10-31,Fix image generation slowness
|
||||
single_tool_call,6d4ccc354514ff328473a1c35974521c465aa2f5,Weves,2024-10-31,More testing
|
||||
single_tool_call,ef0ad8f8fce4eebc38cc9291047b84e5162572f3,Weves,2024-10-31,Testing
|
||||
single_tool_call,99b076412aa3501cbff75d7521c4cedb8f793c34,Weves,2024-10-31,Add quote support
|
||||
single_tool_call,499272ef25961ddb0861ee2a6ff6d978ea1e7772,Weves,2024-10-02,Fix
|
||||
slack_scaling,dd958cff6b0999190c5116e0354497207231d5d6,pablodanswer,2024-10-30,minor foreign key update
|
||||
super_user,0cc09c8b4d9ba0dca350a799ddc265fca38f4b90,pablodanswer,2024-11-02,nits
|
||||
super_user,ec8ae2b5f4491e3de0701ba31ae3124d8f549e66,pablodanswer,2024-11-02,add super user
|
||||
swap_buttons_cards,e6ce503bbbbed4d70734d11ebccc0db4994f69e0,pablodanswer,2024-11-01,nits
|
||||
swap_buttons_cards,680a160b2560594c3c99d4f1e8cffc3bfea66064,pablodanswer,2024-11-01,update colors
|
||||
swap_buttons_cards,748c99d655739c1bb7da0a25e2829c0d706ff810,pablodanswer,2024-10-31,clean build
|
||||
swap_buttons_cards,a222b9d3e7819e9a7e525b6994248caa167c8ac1,pablodanswer,2024-10-30,list item + configuration updates
|
||||
swap_buttons_cards,df38bde21a0f457fb6be4c1b66fae196ae32ec20,pablodanswer,2024-10-30,nits
|
||||
swap_buttons_cards,ddb22e659d1fb4cd8f30ec952e68db683f5a746e,pablodanswer,2024-10-29,fully swapped
|
||||
swap_buttons_cards,d91e54759a022acf478467b0906ee1a2867aa2ca,pablodanswer,2024-10-29,remove tremor
|
||||
swap_buttons_cards,f6117b0f16581bac8fbd181e13a5dbc061c5debb,pablodanswer,2024-10-29,begin date picker + badge transfer
|
||||
swap_buttons_cards,a8a73590bb24a59371c985931ac5dde96674f5b0,pablodanswer,2024-10-29,fix compiling
|
||||
swap_buttons_cards,5f4f0c0ebb3f12e9de996661eb722561a048311b,pablodanswer,2024-10-29,migrate cards
|
||||
swap_buttons_cards,8b8173bef0f05997c04ef9899d557d0f0a205767,pablodanswer,2024-10-29,minor updates
|
||||
swap_buttons_cards,92b7fe45b1bd1ea39252cd8a4ac6a323a548f518,pablodanswer,2024-10-28,migrate badges
|
||||
swap_buttons_cards,74091415c43c39080bd07c1ef9fc683ecc9742e2,pablodanswer,2024-10-28,migrate dividers + buttons
|
||||
swap_buttons_cards,80f9af73d0adcb06c8228b868632bdecc362d616,pablodanswer,2024-10-28,button + input updates
|
||||
swap_buttons_cards,efbeb2716536ea6b08fac40c1e074698a534ea11,pablodanswer,2024-10-28,initialization
|
||||
switch-to-turbopack,09f5fea799633152f59fb9a54451d922eb4914e0,pablodanswer,2024-11-02,slight modification
|
||||
switch-to-turbopack,f7ac9ae034605ac59a9c97650ebd6956d5628ed6,Weves,2024-11-02,Fix prettier
|
||||
switch-to-turbopack,e42f4c98c487f671887de0c43680a659a9132753,Weves,2024-11-01,Style
|
||||
switch-to-turbopack,f800017b21c2618ae51f16ef4f5d9b5e930f01fc,Weves,2024-11-01,Style
|
||||
switch-to-turbopack,7f5744974644d6cbbcf41815e27f9017de76d738,Weves,2024-11-01,Fix charts
|
||||
switch-to-turbopack,2b6514e75489842c8de0aae99d705e22daee9461,Weves,2024-11-01,Upgrade react
|
||||
switch-to-turbopack,85d5857dbcbbf353a883abf7681c85a48dc4f724,Weves,2024-11-01,Remove override
|
||||
switch-to-turbopack,7760230bf771cb6d3b0fca46b6e0bb35677ad5ee,Weves,2024-11-01,Update nextjs version
|
||||
switch-to-turbopack,a3be5be8c6c2bf653de9df48e6a3dfc01144f849,Weves,2024-11-01,Remove unintended change
|
||||
switch-to-turbopack,4d3fdba81ee2ccace76380b0b7318a5a5ed0ab79,Chris Weaver,2024-10-26,Upgrade to NextJS 15 + use turbopacK
|
||||
temp/include_file61,20d29eb51cca799b9cc04552dd083bf202c760bc,pablodanswer,2024-11-03,temporary update
|
||||
tenant_task_logger,02251aab75bad74647ba526654950b131748eb45,pablodanswer,2024-11-21,update
|
||||
tenant_task_logger,805575ef183348ce55a7d8749db477422d0b30de,pablodanswer,2024-11-09,don't prevent seeding
|
||||
tenant_task_logger,7146d02d553c568d99e7efd97a3b185f783a219a,pablodanswer,2024-11-06,update app base
|
||||
tenant_task_logger,6c360ccc483de4ce42fc88724a55f793398a1445,pablodanswer,2024-11-05,remove logs from beat
|
||||
tenant_task_logger,8773f215688e6775ebdf65bb5edda0f1e6080787,pablodanswer,2024-11-05,append
|
||||
tenant_task_logger,d715c8be8a0465551e4d5670a43bf52d1d4635de,pablodanswer,2024-11-05,remove tenant id logs
|
||||
tenant_task_logger,fa592a1b7a69897110a928a222b19eaef3b7267a,pablodanswer,2024-11-21,clean horizontal scrollbar
|
||||
text_view,5d1a664fdc8c712aa644452b061e76b3302f714a,pablodanswer,2024-11-20,nit
|
||||
text_view,b13a1d1d851b924f7b8f402894526d92712b09fa,pablodanswer,2024-11-18,k
|
||||
text_view,77ab27f982af152818dcb9b4390da80113f17e72,pablodanswer,2024-11-15,update
|
||||
text_view,61135ed7db5168d5517b8f11aed05e14b1aba471,pablodanswer,2024-11-14,basic log
|
||||
text_view,7c13ca547fc42988ef9ca10bd4a354a0fd4473cc,pablodanswer,2024-11-14,minor testing update
|
||||
text_view,46f9f0dc947da29271b16e893152402421cc1c85,pablodanswer,2024-11-14,update tests
|
||||
text_view,756b56d2cd63b7792de532d05a03bbaac2c80960,pablodanswer,2024-11-13,wip tests
|
||||
text_view,180c176136b46424021d4f0ca84052afae4946dd,pablodanswer,2024-11-13,minor docker file update
|
||||
text_view,fa8a92875bc8c3637c7aa0eac937bc3a0818e66a,pablodanswer,2024-11-13,remove left over string
|
||||
text_view,c6907ebebe9391140e272ebe0e89b6b6d207f8f5,pablodanswer,2024-11-13,finalize
|
||||
text_view,709b87d56d0e770c1ee6240cfbd4bc76743eb521,pablodanswer,2024-11-13,finalized
|
||||
text_view,b8df6e22d2d15a099aea2bc3b2e7d4c67b446ae8,pablodanswer,2024-11-13,k
|
||||
text_view,ba977e3f5dae439f4ec6b62edc717ada5f49e1f5,pablodanswer,2024-11-12,minor typing update
|
||||
text_view,ed5ed616efd0dceee374b2de5bec69adb4553a62,pablodanswer,2024-11-12,typing
|
||||
text_view,ff4f3bb211485274250eed299247631cc2f1d9a3,pablodanswer,2024-11-12,update text view
|
||||
text_view,e38fd6f7c76f3133fc407d99428a7286328843b6,pablodanswer,2024-11-12,update text view
|
||||
text_view,c76602b7be9968643726f2a8818d27d290d400dd,pablodanswer,2024-11-12,k
|
||||
text_view,62abe2511b8975ce050c4712a095372bf1d1ddc7,pablodanswer,2024-11-11,initial display
|
||||
theming,e1eff26216e42897db4e49a02cb7bb13e9425422,pablodanswer,2024-11-18,nit
|
||||
theming,4b1d428f71fd8993c516f35d8c4fa502c40baaae,pablodanswer,2024-11-18,add additional theming options
|
||||
theming_updated,f95813e381acf7590e094f774c0811f375cde670,pablodanswer,2024-11-21,update neutral
|
||||
theming_updated,804887fd311a783306f160591bc273866388a9f0,pablodanswer,2024-11-21,update
|
||||
theming_updates,c6556857cceacce98b8a90f9a42c4ddfac3b7884,pablodanswer,2024-10-30,update our tailwind config
|
||||
theming_updates,592394caeae4414bd87108ef9f8de65b77226e37,pablodanswer,2024-10-30,enforce colors
|
||||
theming_updates,8f2b0eb72d55347091339c9ba39e2c12f238a776,pablodanswer,2024-10-30,remove pr
|
||||
theming_updates,f92f8e7a73c238fc44ccca746d6fb597c5ad5cb8,pablodanswer,2024-10-30,nit
|
||||
theming_updates,5c6fc34d6316e033b5e258b9a469fa1bd8ea3167,pablodanswer,2024-10-30,add comments
|
||||
theming_updates,3472fb27371f59b454a4b27a699e2160b801ab46,pablodanswer,2024-10-30,ensure tailwind theme updated
|
||||
theming_updates,8210c8930b005cfe6248618373a708b150e412f2,pablodanswer,2024-10-29,naming
|
||||
theming_updates,e6b9ebc198973a84dc9412302e6b98a24b0a2ce3,pablodanswer,2024-10-29,ensure functionality
|
||||
tool_call_per_message,bd0259c05ff9364a99670582ff1cd804fc1b12b7,pablodanswer,2024-11-03,validated
|
||||
tool_call_per_message,381aadd24e897e28215964404048c84d7aeaa1df,pablodanswer,2024-11-03,remove print
|
||||
tool_call_per_message,90c711322dc19a6c4092a60beb5905ded89079d6,pablodanswer,2024-11-01,k
|
||||
tool_call_per_message,20a36e5f46755a55c022dd422c4d31e9abc24d46,pablodanswer,2024-11-01,validate simplify
|
||||
tool_call_per_message,9b3a008ef42d31227290f0ddfbc5b37daa82f360,pablodanswer,2024-11-01,minor image generation fix
|
||||
tool_call_per_message,a958903bd74c78457ef487debfb6084cd8ab6b2b,pablodanswer,2024-11-01,finalize migration
|
||||
tool_call_per_message,4ea0aceca97734ddca8d1f60da930668e0561694,pablodanswer,2024-11-01,single tool call per message
|
||||
tool_csv_image,8015e84531263cda72d7ca281ed0f790c0d0bb3f,pablodanswer,2024-11-03,add multiple formats to tools
|
||||
tool_search,04be3fcbf7e128136f38760845f5d39197c94a5e,pablodanswer,2024-11-15,k
|
||||
tool_search,601d497ed7acd05709384098a3132e1240d32932,pablodanswer,2024-11-15,add tests
|
||||
tool_search,4de18b2e23222fc2c628982db8659d17c136adfa,pablodanswer,2024-11-07,update
|
||||
tool_search,30e6e9b6dc8bebcc98fcf430fbd77af62faffd1a,pablodanswer,2024-11-07,somewhat cleaner
|
||||
tool_search,ac64d4aa71cca26898a0eeb8d849a15a60945e69,pablodanswer,2024-11-06,remove logs
|
||||
tool_search,1fd949ccfc6984904020ee50a845b119acd1f0be,pablodanswer,2024-11-06,finish functionality
|
||||
tool_search,1253eb27f62c81780def9e37e5498b42321d6f49,pablodanswer,2024-11-06,k
|
||||
tool_search,7dafd72d8c37ab505b35596fb3630c738b58688b,pablodanswer,2024-11-06,first pass
|
||||
tooltips,5fe453e18565a9c2f3b8f20520fb7868b5e08675,pablodanswer,2024-11-04,nit: fix delay duration
|
||||
tooltips,4bb9c461ef4c81543690f51c29c6c39949d3e882,pablodanswer,2024-11-04,clean up tooltips
|
||||
typo,4f2f4e6534605287678fa046524a3ffd705e8ab4,pablodanswer,2024-11-18,(minor) typo
|
||||
uf_theming,fe49e35ca476c494d0a9f36eb6cfea3e99ed0427,pablodanswer,2024-11-22,ensure added
|
||||
uf_theming,804887fd311a783306f160591bc273866388a9f0,pablodanswer,2024-11-21,update
|
||||
undo_temporary_fix,59fcdbaf5a096cc1bcd4599a1c0d7a256ca744f0,pablodanswer,2024-11-03,nit
|
||||
undo_temporary_fix,c3118f91b9958e736704277b5d3f98a10e3943c2,pablodanswer,2024-11-03,Revert temporary modifications
|
||||
update-confluence-behaviour,cc769b8bb9b47da9c955e70174bd498fb0b3231a,hagen-danswer,2024-11-15,has issue with boolean form
|
||||
update-confluence-behaviour,e44646dd799c7f95db1df9616e83241344ef0035,hagen-danswer,2024-11-15,fixed mnore treljsertjoslijt
|
||||
update-confluence-behaviour,b623630934171868c815b62e30be055fc6f06ec8,hagen-danswer,2024-11-15,whoops!
|
||||
update-confluence-behaviour,790db4f8ea6bcb02df170d2892c57ccb50aaa119,hagen-danswer,2024-11-15,so good
|
||||
update-confluence-behaviour,ccd6b8f38113b70ba3acf3beda199fa8ee6e3bab,hagen-danswer,2024-11-15,added key
|
||||
update-confluence-behaviour,4beffa4be3ed029fe23c95ce08c5d18c9314e54e,hagen-danswer,2024-11-15,details!
|
||||
update-confluence-behaviour,dacb1870dc98c986e1105fc797603957a2de4b5a,hagen-danswer,2024-11-15,copy change
|
||||
update-confluence-behaviour,008d6cac8e86429884bd38bbe21a23dac96be123,hagen-danswer,2024-11-15,frontend cleanup
|
||||
update-confluence-behaviour,f3310fbc73c45773dc19c2ef8da9f2fe4336b559,hagen-danswer,2024-11-15,fixed service account tests
|
||||
update-confluence-behaviour,c7819a2c5735f812e150718a3620e4bf90ca6a1e,hagen-danswer,2024-11-15,fixed oauth admin tests
|
||||
update-confluence-behaviour,f3fa6f1442910969f24ec4193b8cea3744f5847d,hagen-danswer,2024-11-15,reworked drive+confluence frontend and implied backend changes
|
||||
user_defaults,fff98ddc15d8a94b44ffbaf2225545bc2c4c01b6,pablodanswer,2024-11-12,minor clarity
|
||||
heads/v0.13.0-cloud.beta.0,102c264fd06232bbc4c7a23615add5cf7c0618be,pablodanswer,2024-11-21,minor updates
|
||||
heads/v0.13.0-cloud.beta.0,1744d29bd6f6740fb20bbbf8b5651cd60edbf127,pablodanswer,2024-11-21,k
|
||||
heads/v0.13.0-cloud.beta.0,fa592a1b7a69897110a928a222b19eaef3b7267a,pablodanswer,2024-11-21,clean horizontal scrollbar
|
||||
validate,afc8075cc3076261c8b98a4fe30822641fb9d2cf,pablodanswer,2024-11-22,add filters to chat
|
||||
validate,71123f54a753f243015f7f6bac62c3b8d1e6d05b,pablodanswer,2024-11-22,several steps
|
||||
validate,6061adb114ef20c4bf6567c9450ae51a2938c927,pablodanswer,2024-11-22,remove chat / search toggle
|
||||
validate,35300f65699862f982016284567ef12974ae05c2,pablodanswer,2024-11-22,update
|
||||
validate,fe49e35ca476c494d0a9f36eb6cfea3e99ed0427,pablodanswer,2024-11-22,ensure added
|
||||
validate,804887fd311a783306f160591bc273866388a9f0,pablodanswer,2024-11-21,update
|
||||
vespa_improvements,7c27de6fdcc6172bc1ff4e9522711210f2113e86,pablodanswer,2024-11-14,minor configuration updates
|
||||
|
Can't render this file because it contains an unexpected character in line 143 and column 96.
|
@@ -17,16 +17,21 @@ def set_no_auth_user_preferences(
|
||||
|
||||
|
||||
def load_no_auth_user_preferences(store: KeyValueStore) -> UserPreferences:
|
||||
print("LOADING NO AUTH USER PREFERENCES")
|
||||
try:
|
||||
preferences_data = cast(
|
||||
Mapping[str, Any], store.load(KV_NO_AUTH_USER_PREFERENCES_KEY)
|
||||
)
|
||||
print("PREFERENCES DATA", preferences_data)
|
||||
return UserPreferences(**preferences_data)
|
||||
except KvKeyNotFoundError:
|
||||
return UserPreferences(chosen_assistants=None, default_model=None)
|
||||
return UserPreferences(
|
||||
chosen_assistants=None, default_model=None, auto_scroll=True
|
||||
)
|
||||
|
||||
|
||||
def fetch_no_auth_user(store: KeyValueStore) -> UserInfo:
|
||||
print("FETCHING NO AUTH USER")
|
||||
return UserInfo(
|
||||
id="__no_auth_user__",
|
||||
email="anonymous@danswer.ai",
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import multiprocessing
|
||||
from typing import Any
|
||||
from typing import cast
|
||||
|
||||
from celery import bootsteps # type: ignore
|
||||
from celery import Celery
|
||||
@@ -95,6 +96,15 @@ def on_worker_init(sender: Any, **kwargs: Any) -> None:
|
||||
# by the primary worker. This is unnecessary in the multi tenant scenario
|
||||
r = get_redis_client(tenant_id=None)
|
||||
|
||||
# Log the role and slave count - being connected to a slave or slave count > 0 could be problematic
|
||||
info: dict[str, Any] = cast(dict, r.info("replication"))
|
||||
role: str = cast(str, info.get("role"))
|
||||
connected_slaves: int = info.get("connected_slaves", 0)
|
||||
|
||||
logger.info(
|
||||
f"Redis INFO REPLICATION: role={role} connected_slaves={connected_slaves}"
|
||||
)
|
||||
|
||||
# For the moment, we're assuming that we are the only primary worker
|
||||
# that should be running.
|
||||
# TODO: maybe check for or clean up another zombie primary worker if we detect it
|
||||
|
||||
@@ -4,7 +4,6 @@ from typing import Any
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from danswer.background.indexing.run_indexing import RunIndexingCallbackInterface
|
||||
from danswer.configs.app_configs import MAX_PRUNING_DOCUMENT_RETRIEVAL_PER_MINUTE
|
||||
from danswer.connectors.cross_connector_utils.rate_limit_wrapper import (
|
||||
rate_limit_builder,
|
||||
@@ -17,6 +16,7 @@ from danswer.connectors.models import Document
|
||||
from danswer.db.connector_credential_pair import get_connector_credential_pair
|
||||
from danswer.db.enums import TaskStatus
|
||||
from danswer.db.models import TaskQueueState
|
||||
from danswer.indexing.indexing_heartbeat import IndexingHeartbeatInterface
|
||||
from danswer.redis.redis_connector import RedisConnector
|
||||
from danswer.server.documents.models import DeletionAttemptSnapshot
|
||||
from danswer.utils.logger import setup_logger
|
||||
@@ -78,7 +78,7 @@ def document_batch_to_ids(
|
||||
|
||||
def extract_ids_from_runnable_connector(
|
||||
runnable_connector: BaseConnector,
|
||||
callback: RunIndexingCallbackInterface | None = None,
|
||||
callback: IndexingHeartbeatInterface | None = None,
|
||||
) -> set[str]:
|
||||
"""
|
||||
If the SlimConnector hasnt been implemented for the given connector, just pull
|
||||
@@ -111,10 +111,15 @@ def extract_ids_from_runnable_connector(
|
||||
for doc_batch in doc_batch_generator:
|
||||
if callback:
|
||||
if callback.should_stop():
|
||||
raise RuntimeError("Stop signal received")
|
||||
callback.progress(len(doc_batch))
|
||||
raise RuntimeError(
|
||||
"extract_ids_from_runnable_connector: Stop signal detected"
|
||||
)
|
||||
|
||||
all_connector_doc_ids.update(doc_batch_processing_func(doc_batch))
|
||||
|
||||
if callback:
|
||||
callback.progress("extract_ids_from_runnable_connector", len(doc_batch))
|
||||
|
||||
return all_connector_doc_ids
|
||||
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ from danswer.db.engine import get_session_with_tenant
|
||||
from danswer.db.enums import ConnectorCredentialPairStatus
|
||||
from danswer.db.search_settings import get_all_search_settings
|
||||
from danswer.redis.redis_connector import RedisConnector
|
||||
from danswer.redis.redis_connector_delete import RedisConnectorDeletionFenceData
|
||||
from danswer.redis.redis_connector_delete import RedisConnectorDeletePayload
|
||||
from danswer.redis.redis_pool import get_redis_client
|
||||
|
||||
|
||||
@@ -118,7 +118,7 @@ def try_generate_document_cc_pair_cleanup_tasks(
|
||||
return None
|
||||
|
||||
# set a basic fence to start
|
||||
fence_payload = RedisConnectorDeletionFenceData(
|
||||
fence_payload = RedisConnectorDeletePayload(
|
||||
num_tasks=None,
|
||||
submitted=datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
@@ -29,7 +29,7 @@ from danswer.utils.logger import setup_logger
|
||||
from ee.danswer.db.connector_credential_pair import get_all_auto_sync_cc_pairs
|
||||
from ee.danswer.db.external_perm import ExternalUserGroup
|
||||
from ee.danswer.db.external_perm import replace_user__ext_group_for_cc_pair
|
||||
from ee.danswer.external_permissions.sync_params import EXTERNAL_GROUP_SYNC_PERIOD
|
||||
from ee.danswer.external_permissions.sync_params import EXTERNAL_GROUP_SYNC_PERIODS
|
||||
from ee.danswer.external_permissions.sync_params import GROUP_PERMISSIONS_FUNC_MAP
|
||||
|
||||
logger = setup_logger()
|
||||
@@ -66,9 +66,9 @@ def _is_external_group_sync_due(cc_pair: ConnectorCredentialPair) -> bool:
|
||||
if last_ext_group_sync is None:
|
||||
return True
|
||||
|
||||
source_sync_period = EXTERNAL_GROUP_SYNC_PERIOD
|
||||
source_sync_period = EXTERNAL_GROUP_SYNC_PERIODS.get(cc_pair.connector.source)
|
||||
|
||||
# If EXTERNAL_GROUP_SYNC_PERIOD is None, we always run the sync.
|
||||
# If EXTERNAL_GROUP_SYNC_PERIODS is None, we always run the sync.
|
||||
if not source_sync_period:
|
||||
return True
|
||||
|
||||
|
||||
@@ -16,7 +16,6 @@ from sqlalchemy.orm import Session
|
||||
from danswer.background.celery.apps.app_base import task_logger
|
||||
from danswer.background.indexing.job_client import SimpleJobClient
|
||||
from danswer.background.indexing.run_indexing import run_indexing_entrypoint
|
||||
from danswer.background.indexing.run_indexing import RunIndexingCallbackInterface
|
||||
from danswer.configs.app_configs import DISABLE_INDEX_UPDATE_ON_SWAP
|
||||
from danswer.configs.constants import CELERY_INDEXING_LOCK_TIMEOUT
|
||||
from danswer.configs.constants import CELERY_VESPA_SYNC_BEAT_LOCK_TIMEOUT
|
||||
@@ -42,6 +41,7 @@ from danswer.db.models import SearchSettings
|
||||
from danswer.db.search_settings import get_current_search_settings
|
||||
from danswer.db.search_settings import get_secondary_search_settings
|
||||
from danswer.db.swap_index import check_index_swap
|
||||
from danswer.indexing.indexing_heartbeat import IndexingHeartbeatInterface
|
||||
from danswer.natural_language_processing.search_nlp_models import EmbeddingModel
|
||||
from danswer.natural_language_processing.search_nlp_models import warm_up_bi_encoder
|
||||
from danswer.redis.redis_connector import RedisConnector
|
||||
@@ -57,7 +57,7 @@ from shared_configs.configs import SENTRY_DSN
|
||||
logger = setup_logger()
|
||||
|
||||
|
||||
class RunIndexingCallback(RunIndexingCallbackInterface):
|
||||
class IndexingCallback(IndexingHeartbeatInterface):
|
||||
def __init__(
|
||||
self,
|
||||
stop_key: str,
|
||||
@@ -73,6 +73,7 @@ class RunIndexingCallback(RunIndexingCallbackInterface):
|
||||
self.started: datetime = datetime.now(timezone.utc)
|
||||
self.redis_lock.reacquire()
|
||||
|
||||
self.last_tag: str = ""
|
||||
self.last_lock_reacquire: datetime = datetime.now(timezone.utc)
|
||||
|
||||
def should_stop(self) -> bool:
|
||||
@@ -80,15 +81,17 @@ class RunIndexingCallback(RunIndexingCallbackInterface):
|
||||
return True
|
||||
return False
|
||||
|
||||
def progress(self, amount: int) -> None:
|
||||
def progress(self, tag: str, amount: int) -> None:
|
||||
try:
|
||||
self.redis_lock.reacquire()
|
||||
self.last_tag = tag
|
||||
self.last_lock_reacquire = datetime.now(timezone.utc)
|
||||
except LockError:
|
||||
logger.exception(
|
||||
f"RunIndexingCallback - lock.reacquire exceptioned. "
|
||||
f"IndexingCallback - lock.reacquire exceptioned. "
|
||||
f"lock_timeout={self.redis_lock.timeout} "
|
||||
f"start={self.started} "
|
||||
f"last_tag={self.last_tag} "
|
||||
f"last_reacquired={self.last_lock_reacquire} "
|
||||
f"now={datetime.now(timezone.utc)}"
|
||||
)
|
||||
@@ -192,7 +195,8 @@ def check_for_indexing(self: Task, *, tenant_id: str | None) -> int | None:
|
||||
)
|
||||
if attempt_id:
|
||||
task_logger.info(
|
||||
f"Indexing queued: index_attempt={attempt_id} "
|
||||
f"Connector indexing queued: "
|
||||
f"index_attempt={attempt_id} "
|
||||
f"cc_pair={cc_pair.id} "
|
||||
f"search_settings={search_settings_instance.id} "
|
||||
)
|
||||
@@ -383,7 +387,6 @@ def try_creating_indexing_task(
|
||||
payload.index_attempt_id = index_attempt_id
|
||||
payload.celery_task_id = result.id
|
||||
redis_connector_index.set_fence(payload)
|
||||
|
||||
except Exception:
|
||||
redis_connector_index.set_fence(None)
|
||||
task_logger.exception(
|
||||
@@ -516,7 +519,8 @@ def connector_indexing_task(
|
||||
logger.debug("Sentry DSN not provided, skipping Sentry initialization")
|
||||
|
||||
logger.info(
|
||||
f"Indexing spawned task starting: attempt={index_attempt_id} "
|
||||
f"Indexing spawned task starting: "
|
||||
f"attempt={index_attempt_id} "
|
||||
f"tenant={tenant_id} "
|
||||
f"cc_pair={cc_pair_id} "
|
||||
f"search_settings={search_settings_id}"
|
||||
@@ -618,7 +622,7 @@ def connector_indexing_task(
|
||||
)
|
||||
|
||||
# define a callback class
|
||||
callback = RunIndexingCallback(
|
||||
callback = IndexingCallback(
|
||||
redis_connector.stop.fence_key,
|
||||
redis_connector_index.generator_progress_key,
|
||||
lock,
|
||||
|
||||
@@ -12,7 +12,7 @@ from sqlalchemy.orm import Session
|
||||
|
||||
from danswer.background.celery.apps.app_base import task_logger
|
||||
from danswer.background.celery.celery_utils import extract_ids_from_runnable_connector
|
||||
from danswer.background.celery.tasks.indexing.tasks import RunIndexingCallback
|
||||
from danswer.background.celery.tasks.indexing.tasks import IndexingCallback
|
||||
from danswer.configs.app_configs import ALLOW_SIMULTANEOUS_PRUNING
|
||||
from danswer.configs.app_configs import JOB_TIMEOUT
|
||||
from danswer.configs.constants import CELERY_PRUNING_LOCK_TIMEOUT
|
||||
@@ -39,7 +39,14 @@ logger = setup_logger()
|
||||
|
||||
|
||||
def _is_pruning_due(cc_pair: ConnectorCredentialPair) -> bool:
|
||||
"""Returns boolean indicating if pruning is due."""
|
||||
"""Returns boolean indicating if pruning is due.
|
||||
|
||||
Next pruning time is calculated as a delta from the last successful prune, or the
|
||||
last successful indexing if pruning has never succeeded.
|
||||
|
||||
TODO(rkuo): consider whether we should allow pruning to be immediately rescheduled
|
||||
if pruning fails (which is what it does now). A backoff could be reasonable.
|
||||
"""
|
||||
|
||||
# skip pruning if no prune frequency is set
|
||||
# pruning can still be forced via the API which will run a pruning task directly
|
||||
@@ -225,6 +232,8 @@ def connector_pruning_generator_task(
|
||||
pruning_ctx_dict["request_id"] = self.request.id
|
||||
pruning_ctx.set(pruning_ctx_dict)
|
||||
|
||||
task_logger.info(f"Pruning generator starting: cc_pair={cc_pair_id}")
|
||||
|
||||
redis_connector = RedisConnector(tenant_id, cc_pair_id)
|
||||
|
||||
r = get_redis_client(tenant_id=tenant_id)
|
||||
@@ -255,6 +264,11 @@ def connector_pruning_generator_task(
|
||||
)
|
||||
return
|
||||
|
||||
task_logger.info(
|
||||
f"Pruning generator running connector: "
|
||||
f"cc_pair={cc_pair_id} "
|
||||
f"connector_source={cc_pair.connector.source}"
|
||||
)
|
||||
runnable_connector = instantiate_connector(
|
||||
db_session,
|
||||
cc_pair.connector.source,
|
||||
@@ -263,12 +277,13 @@ def connector_pruning_generator_task(
|
||||
cc_pair.credential,
|
||||
)
|
||||
|
||||
callback = RunIndexingCallback(
|
||||
callback = IndexingCallback(
|
||||
redis_connector.stop.fence_key,
|
||||
redis_connector.prune.generator_progress_key,
|
||||
lock,
|
||||
r,
|
||||
)
|
||||
|
||||
# a list of docs in the source
|
||||
all_connector_doc_ids: set[str] = extract_ids_from_runnable_connector(
|
||||
runnable_connector, callback
|
||||
@@ -290,8 +305,8 @@ def connector_pruning_generator_task(
|
||||
task_logger.info(
|
||||
f"Pruning set collected: "
|
||||
f"cc_pair={cc_pair_id} "
|
||||
f"docs_to_remove={len(doc_ids_to_remove)} "
|
||||
f"doc_source={cc_pair.connector.source}"
|
||||
f"connector_source={cc_pair.connector.source} "
|
||||
f"docs_to_remove={len(doc_ids_to_remove)}"
|
||||
)
|
||||
|
||||
task_logger.info(
|
||||
@@ -314,10 +329,10 @@ def connector_pruning_generator_task(
|
||||
f"Failed to run pruning: cc_pair={cc_pair_id} connector={connector_id}"
|
||||
)
|
||||
|
||||
redis_connector.prune.generator_clear()
|
||||
redis_connector.prune.taskset_clear()
|
||||
redis_connector.prune.set_fence(False)
|
||||
redis_connector.prune.reset()
|
||||
raise e
|
||||
finally:
|
||||
if lock.owned():
|
||||
lock.release()
|
||||
|
||||
task_logger.info(f"Pruning generator finished: cc_pair={cc_pair_id}")
|
||||
|
||||
@@ -177,7 +177,17 @@ def document_by_cc_pair_cleanup_task(
|
||||
f"Max celery task retries reached. Marking doc as dirty for reconciliation: "
|
||||
f"tenant={tenant_id} doc={document_id}"
|
||||
)
|
||||
with get_session_with_tenant(tenant_id):
|
||||
with get_session_with_tenant(tenant_id) as db_session:
|
||||
# delete the cc pair relationship now and let reconciliation clean it up
|
||||
# in vespa
|
||||
delete_document_by_connector_credential_pair__no_commit(
|
||||
db_session=db_session,
|
||||
document_id=document_id,
|
||||
connector_credential_pair_identifier=ConnectorCredentialPairIdentifier(
|
||||
connector_id=connector_id,
|
||||
credential_id=credential_id,
|
||||
),
|
||||
)
|
||||
mark_document_as_modified(document_id, db_session)
|
||||
return False
|
||||
|
||||
|
||||
@@ -444,11 +444,22 @@ def monitor_connector_deletion_taskset(
|
||||
db_session, cc_pair.connector_id, cc_pair.credential_id
|
||||
)
|
||||
if len(doc_ids) > 0:
|
||||
# if this happens, documents somehow got added while deletion was in progress. Likely a bug
|
||||
# gating off pruning and indexing work before deletion starts
|
||||
# NOTE(rkuo): if this happens, documents somehow got added while
|
||||
# deletion was in progress. Likely a bug gating off pruning and indexing
|
||||
# work before deletion starts.
|
||||
task_logger.warning(
|
||||
f"Connector deletion - documents still found after taskset completion: "
|
||||
f"cc_pair={cc_pair_id} num={len(doc_ids)}"
|
||||
"Connector deletion - documents still found after taskset completion. "
|
||||
"Clearing the current deletion attempt and allowing deletion to restart: "
|
||||
f"cc_pair={cc_pair_id} "
|
||||
f"docs_deleted={fence_data.num_tasks} "
|
||||
f"docs_remaining={len(doc_ids)}"
|
||||
)
|
||||
|
||||
# We don't want to waive off why we get into this state, but resetting
|
||||
# our attempt and letting the deletion restart is a good way to recover
|
||||
redis_connector.delete.reset()
|
||||
raise RuntimeError(
|
||||
"Connector deletion - documents still found after taskset completion"
|
||||
)
|
||||
|
||||
# clean up the rest of the related Postgres entities
|
||||
@@ -512,8 +523,7 @@ def monitor_connector_deletion_taskset(
|
||||
f"docs_deleted={fence_data.num_tasks}"
|
||||
)
|
||||
|
||||
redis_connector.delete.taskset_clear()
|
||||
redis_connector.delete.set_fence(None)
|
||||
redis_connector.delete.reset()
|
||||
|
||||
|
||||
def monitor_ccpair_pruning_taskset(
|
||||
@@ -645,26 +655,34 @@ def monitor_ccpair_indexing_taskset(
|
||||
result_state = result.state
|
||||
|
||||
status_int = redis_connector_index.get_completion()
|
||||
if status_int is None:
|
||||
if status_int is None: # completion signal not set ... check for errors
|
||||
# If we get here, and then the task both sets the completion signal and finishes,
|
||||
# we will incorrectly abort the task. We must check result state, then check
|
||||
# get_completion again to avoid the race condition.
|
||||
if result_state in READY_STATES:
|
||||
# IF the task state is READY, THEN generator_complete should be set
|
||||
# if it isn't, then the worker crashed
|
||||
task_logger.info(
|
||||
f"Connector indexing aborted: "
|
||||
f"cc_pair={cc_pair_id} "
|
||||
f"search_settings={search_settings_id} "
|
||||
f"elapsed_submitted={elapsed_submitted.total_seconds():.2f}"
|
||||
)
|
||||
|
||||
index_attempt = get_index_attempt(db_session, payload.index_attempt_id)
|
||||
if index_attempt:
|
||||
mark_attempt_failed(
|
||||
index_attempt_id=payload.index_attempt_id,
|
||||
db_session=db_session,
|
||||
failure_reason="Connector indexing aborted or exceptioned.",
|
||||
if redis_connector_index.get_completion() is None:
|
||||
# IF the task state is READY, THEN generator_complete should be set
|
||||
# if it isn't, then the worker crashed
|
||||
msg = (
|
||||
f"Connector indexing aborted or exceptioned: "
|
||||
f"attempt={payload.index_attempt_id} "
|
||||
f"celery_task={payload.celery_task_id} "
|
||||
f"result_state={result_state} "
|
||||
f"cc_pair={cc_pair_id} "
|
||||
f"search_settings={search_settings_id} "
|
||||
f"elapsed_submitted={elapsed_submitted.total_seconds():.2f}"
|
||||
)
|
||||
task_logger.warning(msg)
|
||||
|
||||
redis_connector_index.reset()
|
||||
index_attempt = get_index_attempt(db_session, payload.index_attempt_id)
|
||||
if index_attempt:
|
||||
mark_attempt_failed(
|
||||
index_attempt_id=payload.index_attempt_id,
|
||||
db_session=db_session,
|
||||
failure_reason=msg,
|
||||
)
|
||||
|
||||
redis_connector_index.reset()
|
||||
return
|
||||
|
||||
status_enum = HTTPStatus(status_int)
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import time
|
||||
import traceback
|
||||
from abc import ABC
|
||||
from abc import abstractmethod
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
from datetime import timezone
|
||||
@@ -31,7 +29,7 @@ from danswer.db.models import IndexingStatus
|
||||
from danswer.db.models import IndexModelStatus
|
||||
from danswer.document_index.factory import get_default_document_index
|
||||
from danswer.indexing.embedder import DefaultIndexingEmbedder
|
||||
from danswer.indexing.indexing_heartbeat import IndexingHeartbeat
|
||||
from danswer.indexing.indexing_heartbeat import IndexingHeartbeatInterface
|
||||
from danswer.indexing.indexing_pipeline import build_indexing_pipeline
|
||||
from danswer.utils.logger import setup_logger
|
||||
from danswer.utils.logger import TaskAttemptSingleton
|
||||
@@ -42,19 +40,6 @@ logger = setup_logger()
|
||||
INDEXING_TRACER_NUM_PRINT_ENTRIES = 5
|
||||
|
||||
|
||||
class RunIndexingCallbackInterface(ABC):
|
||||
"""Defines a callback interface to be passed to
|
||||
to run_indexing_entrypoint."""
|
||||
|
||||
@abstractmethod
|
||||
def should_stop(self) -> bool:
|
||||
"""Signal to stop the looping function in flight."""
|
||||
|
||||
@abstractmethod
|
||||
def progress(self, amount: int) -> None:
|
||||
"""Send progress updates to the caller."""
|
||||
|
||||
|
||||
def _get_connector_runner(
|
||||
db_session: Session,
|
||||
attempt: IndexAttempt,
|
||||
@@ -106,7 +91,7 @@ def _run_indexing(
|
||||
db_session: Session,
|
||||
index_attempt: IndexAttempt,
|
||||
tenant_id: str | None,
|
||||
callback: RunIndexingCallbackInterface | None = None,
|
||||
callback: IndexingHeartbeatInterface | None = None,
|
||||
) -> None:
|
||||
"""
|
||||
1. Get documents which are either new or updated from specified application
|
||||
@@ -138,13 +123,7 @@ def _run_indexing(
|
||||
|
||||
embedding_model = DefaultIndexingEmbedder.from_db_search_settings(
|
||||
search_settings=search_settings,
|
||||
heartbeat=IndexingHeartbeat(
|
||||
index_attempt_id=index_attempt.id,
|
||||
db_session=db_session,
|
||||
# let the world know we're still making progress after
|
||||
# every 10 batches
|
||||
freq=10,
|
||||
),
|
||||
callback=callback,
|
||||
)
|
||||
|
||||
indexing_pipeline = build_indexing_pipeline(
|
||||
@@ -157,6 +136,7 @@ def _run_indexing(
|
||||
),
|
||||
db_session=db_session,
|
||||
tenant_id=tenant_id,
|
||||
callback=callback,
|
||||
)
|
||||
|
||||
db_cc_pair = index_attempt.connector_credential_pair
|
||||
@@ -228,7 +208,9 @@ def _run_indexing(
|
||||
# contents still need to be initially pulled.
|
||||
if callback:
|
||||
if callback.should_stop():
|
||||
raise RuntimeError("Connector stop signal detected")
|
||||
raise RuntimeError(
|
||||
"_run_indexing: Connector stop signal detected"
|
||||
)
|
||||
|
||||
# TODO: should we move this into the above callback instead?
|
||||
db_session.refresh(db_cc_pair)
|
||||
@@ -289,7 +271,7 @@ def _run_indexing(
|
||||
db_session.commit()
|
||||
|
||||
if callback:
|
||||
callback.progress(len(doc_batch))
|
||||
callback.progress("_run_indexing", len(doc_batch))
|
||||
|
||||
# This new value is updated every batch, so UI can refresh per batch update
|
||||
update_docs_indexed(
|
||||
@@ -419,7 +401,7 @@ def run_indexing_entrypoint(
|
||||
tenant_id: str | None,
|
||||
connector_credential_pair_id: int,
|
||||
is_ee: bool = False,
|
||||
callback: RunIndexingCallbackInterface | None = None,
|
||||
callback: IndexingHeartbeatInterface | None = None,
|
||||
) -> None:
|
||||
try:
|
||||
if is_ee:
|
||||
|
||||
@@ -5,7 +5,7 @@ personas:
|
||||
# this is for DanswerBot to use when tagged in a non-configured channel
|
||||
# Careful setting specific IDs, this won't autoincrement the next ID value for postgres
|
||||
- id: 0
|
||||
name: "Knowledge"
|
||||
name: "Search"
|
||||
description: >
|
||||
Assistant with access to documents from your Connected Sources.
|
||||
# Default Prompt objects attached to the persona, see prompts.yaml
|
||||
|
||||
@@ -605,6 +605,7 @@ def stream_chat_message_objects(
|
||||
additional_headers=custom_tool_additional_headers,
|
||||
),
|
||||
)
|
||||
|
||||
tools: list[Tool] = []
|
||||
for tool_list in tool_dict.values():
|
||||
tools.extend(tool_list)
|
||||
|
||||
@@ -60,7 +60,6 @@ KV_GMAIL_CRED_KEY = "gmail_app_credential"
|
||||
KV_GMAIL_SERVICE_ACCOUNT_KEY = "gmail_service_account_key"
|
||||
KV_GOOGLE_DRIVE_CRED_KEY = "google_drive_app_credential"
|
||||
KV_GOOGLE_DRIVE_SERVICE_ACCOUNT_KEY = "google_drive_service_account_key"
|
||||
KV_SLACK_BOT_TOKENS_CONFIG_KEY = "slack_bot_tokens_config_key"
|
||||
KV_GEN_AI_KEY_CHECK_TIME = "genai_api_key_last_check_time"
|
||||
KV_SETTINGS_KEY = "danswer_settings"
|
||||
KV_CUSTOMER_UUID_KEY = "customer_uuid"
|
||||
|
||||
@@ -5,9 +5,9 @@ from io import BytesIO
|
||||
from typing import Any
|
||||
from typing import Optional
|
||||
|
||||
import boto3
|
||||
from botocore.client import Config
|
||||
from mypy_boto3_s3 import S3Client
|
||||
import boto3 # type: ignore
|
||||
from botocore.client import Config # type: ignore
|
||||
from mypy_boto3_s3 import S3Client # type: ignore
|
||||
|
||||
from danswer.configs.app_configs import INDEX_BATCH_SIZE
|
||||
from danswer.configs.constants import BlobType
|
||||
|
||||
@@ -7,9 +7,9 @@ from danswer.configs.app_configs import CONFLUENCE_CONNECTOR_LABELS_TO_SKIP
|
||||
from danswer.configs.app_configs import CONTINUE_ON_CONNECTOR_FAILURE
|
||||
from danswer.configs.app_configs import INDEX_BATCH_SIZE
|
||||
from danswer.configs.constants import DocumentSource
|
||||
from danswer.connectors.confluence.onyx_confluence import build_confluence_client
|
||||
from danswer.connectors.confluence.onyx_confluence import OnyxConfluence
|
||||
from danswer.connectors.confluence.utils import attachment_to_content
|
||||
from danswer.connectors.confluence.utils import build_confluence_client
|
||||
from danswer.connectors.confluence.utils import build_confluence_document_id
|
||||
from danswer.connectors.confluence.utils import datetime_from_string
|
||||
from danswer.connectors.confluence.utils import extract_text_from_confluence_html
|
||||
@@ -70,7 +70,7 @@ class ConfluenceConnector(LoadConnector, PollConnector, SlimConnector):
|
||||
) -> None:
|
||||
self.batch_size = batch_size
|
||||
self.continue_on_failure = continue_on_failure
|
||||
self.confluence_client: OnyxConfluence | None = None
|
||||
self._confluence_client: OnyxConfluence | None = None
|
||||
self.is_cloud = is_cloud
|
||||
|
||||
# Remove trailing slash from wiki_base if present
|
||||
@@ -97,39 +97,44 @@ class ConfluenceConnector(LoadConnector, PollConnector, SlimConnector):
|
||||
self.cql_label_filter = ""
|
||||
if labels_to_skip:
|
||||
labels_to_skip = list(set(labels_to_skip))
|
||||
comma_separated_labels = ",".join(f"'{label}'" for label in labels_to_skip)
|
||||
comma_separated_labels = ",".join(
|
||||
f"'{quote(label)}'" for label in labels_to_skip
|
||||
)
|
||||
self.cql_label_filter = f" and label not in ({comma_separated_labels})"
|
||||
|
||||
@property
|
||||
def confluence_client(self) -> OnyxConfluence:
|
||||
if self._confluence_client is None:
|
||||
raise ConnectorMissingCredentialError("Confluence")
|
||||
return self._confluence_client
|
||||
|
||||
def load_credentials(self, credentials: dict[str, Any]) -> dict[str, Any] | None:
|
||||
# see https://github.com/atlassian-api/atlassian-python-api/blob/master/atlassian/rest_client.py
|
||||
# for a list of other hidden constructor args
|
||||
self.confluence_client = build_confluence_client(
|
||||
credentials_json=credentials,
|
||||
self._confluence_client = build_confluence_client(
|
||||
credentials=credentials,
|
||||
is_cloud=self.is_cloud,
|
||||
wiki_base=self.wiki_base,
|
||||
)
|
||||
return None
|
||||
|
||||
def _get_comment_string_for_page_id(self, page_id: str) -> str:
|
||||
if self.confluence_client is None:
|
||||
raise ConnectorMissingCredentialError("Confluence")
|
||||
|
||||
comment_string = ""
|
||||
|
||||
comment_cql = f"type=comment and container='{page_id}'"
|
||||
comment_cql += self.cql_label_filter
|
||||
|
||||
expand = ",".join(_COMMENT_EXPANSION_FIELDS)
|
||||
for comments in self.confluence_client.paginated_cql_page_retrieval(
|
||||
for comment in self.confluence_client.paginated_cql_retrieval(
|
||||
cql=comment_cql,
|
||||
expand=expand,
|
||||
):
|
||||
for comment in comments:
|
||||
comment_string += "\nComment:\n"
|
||||
comment_string += extract_text_from_confluence_html(
|
||||
confluence_client=self.confluence_client,
|
||||
confluence_object=comment,
|
||||
)
|
||||
comment_string += "\nComment:\n"
|
||||
comment_string += extract_text_from_confluence_html(
|
||||
confluence_client=self.confluence_client,
|
||||
confluence_object=comment,
|
||||
fetched_titles=set(),
|
||||
)
|
||||
|
||||
return comment_string
|
||||
|
||||
@@ -141,9 +146,6 @@ class ConfluenceConnector(LoadConnector, PollConnector, SlimConnector):
|
||||
If its a page, it extracts the text, adds the comments for the document text.
|
||||
If its an attachment, it just downloads the attachment and converts that into a document.
|
||||
"""
|
||||
if self.confluence_client is None:
|
||||
raise ConnectorMissingCredentialError("Confluence")
|
||||
|
||||
# The url and the id are the same
|
||||
object_url = build_confluence_document_id(
|
||||
self.wiki_base, confluence_object["_links"]["webui"], self.is_cloud
|
||||
@@ -153,16 +155,19 @@ class ConfluenceConnector(LoadConnector, PollConnector, SlimConnector):
|
||||
# Extract text from page
|
||||
if confluence_object["type"] == "page":
|
||||
object_text = extract_text_from_confluence_html(
|
||||
self.confluence_client, confluence_object
|
||||
confluence_client=self.confluence_client,
|
||||
confluence_object=confluence_object,
|
||||
fetched_titles={confluence_object.get("title", "")},
|
||||
)
|
||||
# Add comments to text
|
||||
object_text += self._get_comment_string_for_page_id(confluence_object["id"])
|
||||
elif confluence_object["type"] == "attachment":
|
||||
object_text = attachment_to_content(
|
||||
self.confluence_client, confluence_object
|
||||
confluence_client=self.confluence_client, attachment=confluence_object
|
||||
)
|
||||
|
||||
if object_text is None:
|
||||
# This only happens for attachments that are not parseable
|
||||
return None
|
||||
|
||||
# Get space name
|
||||
@@ -193,44 +198,39 @@ class ConfluenceConnector(LoadConnector, PollConnector, SlimConnector):
|
||||
)
|
||||
|
||||
def _fetch_document_batches(self) -> GenerateDocumentsOutput:
|
||||
if self.confluence_client is None:
|
||||
raise ConnectorMissingCredentialError("Confluence")
|
||||
|
||||
doc_batch: list[Document] = []
|
||||
confluence_page_ids: list[str] = []
|
||||
|
||||
page_query = self.cql_page_query + self.cql_label_filter + self.cql_time_filter
|
||||
# Fetch pages as Documents
|
||||
for page_batch in self.confluence_client.paginated_cql_page_retrieval(
|
||||
for page in self.confluence_client.paginated_cql_retrieval(
|
||||
cql=page_query,
|
||||
expand=",".join(_PAGE_EXPANSION_FIELDS),
|
||||
limit=self.batch_size,
|
||||
):
|
||||
for page in page_batch:
|
||||
confluence_page_ids.append(page["id"])
|
||||
doc = self._convert_object_to_document(page)
|
||||
if doc is not None:
|
||||
doc_batch.append(doc)
|
||||
if len(doc_batch) >= self.batch_size:
|
||||
yield doc_batch
|
||||
doc_batch = []
|
||||
confluence_page_ids.append(page["id"])
|
||||
doc = self._convert_object_to_document(page)
|
||||
if doc is not None:
|
||||
doc_batch.append(doc)
|
||||
if len(doc_batch) >= self.batch_size:
|
||||
yield doc_batch
|
||||
doc_batch = []
|
||||
|
||||
# Fetch attachments as Documents
|
||||
for confluence_page_id in confluence_page_ids:
|
||||
attachment_cql = f"type=attachment and container='{confluence_page_id}'"
|
||||
attachment_cql += self.cql_label_filter
|
||||
# TODO: maybe should add time filter as well?
|
||||
for attachments in self.confluence_client.paginated_cql_page_retrieval(
|
||||
for attachment in self.confluence_client.paginated_cql_retrieval(
|
||||
cql=attachment_cql,
|
||||
expand=",".join(_ATTACHMENT_EXPANSION_FIELDS),
|
||||
):
|
||||
for attachment in attachments:
|
||||
doc = self._convert_object_to_document(attachment)
|
||||
if doc is not None:
|
||||
doc_batch.append(doc)
|
||||
if len(doc_batch) >= self.batch_size:
|
||||
yield doc_batch
|
||||
doc_batch = []
|
||||
doc = self._convert_object_to_document(attachment)
|
||||
if doc is not None:
|
||||
doc_batch.append(doc)
|
||||
if len(doc_batch) >= self.batch_size:
|
||||
yield doc_batch
|
||||
doc_batch = []
|
||||
|
||||
if doc_batch:
|
||||
yield doc_batch
|
||||
@@ -255,52 +255,47 @@ class ConfluenceConnector(LoadConnector, PollConnector, SlimConnector):
|
||||
start: SecondsSinceUnixEpoch | None = None,
|
||||
end: SecondsSinceUnixEpoch | None = None,
|
||||
) -> GenerateSlimDocumentOutput:
|
||||
if self.confluence_client is None:
|
||||
raise ConnectorMissingCredentialError("Confluence")
|
||||
|
||||
doc_metadata_list: list[SlimDocument] = []
|
||||
|
||||
restrictions_expand = ",".join(_RESTRICTIONS_EXPANSION_FIELDS)
|
||||
|
||||
page_query = self.cql_page_query + self.cql_label_filter
|
||||
for pages in self.confluence_client.cql_paginate_all_expansions(
|
||||
for page in self.confluence_client.cql_paginate_all_expansions(
|
||||
cql=page_query,
|
||||
expand=restrictions_expand,
|
||||
):
|
||||
for page in pages:
|
||||
# If the page has restrictions, add them to the perm_sync_data
|
||||
# These will be used by doc_sync.py to sync permissions
|
||||
perm_sync_data = {
|
||||
"restrictions": page.get("restrictions", {}),
|
||||
"space_key": page.get("space", {}).get("key"),
|
||||
}
|
||||
# If the page has restrictions, add them to the perm_sync_data
|
||||
# These will be used by doc_sync.py to sync permissions
|
||||
perm_sync_data = {
|
||||
"restrictions": page.get("restrictions", {}),
|
||||
"space_key": page.get("space", {}).get("key"),
|
||||
}
|
||||
|
||||
doc_metadata_list.append(
|
||||
SlimDocument(
|
||||
id=build_confluence_document_id(
|
||||
self.wiki_base,
|
||||
page["_links"]["webui"],
|
||||
self.is_cloud,
|
||||
),
|
||||
perm_sync_data=perm_sync_data,
|
||||
)
|
||||
)
|
||||
attachment_cql = f"type=attachment and container='{page['id']}'"
|
||||
attachment_cql += self.cql_label_filter
|
||||
for attachment in self.confluence_client.cql_paginate_all_expansions(
|
||||
cql=attachment_cql,
|
||||
expand=restrictions_expand,
|
||||
):
|
||||
doc_metadata_list.append(
|
||||
SlimDocument(
|
||||
id=build_confluence_document_id(
|
||||
self.wiki_base,
|
||||
page["_links"]["webui"],
|
||||
attachment["_links"]["webui"],
|
||||
self.is_cloud,
|
||||
),
|
||||
perm_sync_data=perm_sync_data,
|
||||
)
|
||||
)
|
||||
attachment_cql = f"type=attachment and container='{page['id']}'"
|
||||
attachment_cql += self.cql_label_filter
|
||||
for attachments in self.confluence_client.cql_paginate_all_expansions(
|
||||
cql=attachment_cql,
|
||||
expand=restrictions_expand,
|
||||
):
|
||||
for attachment in attachments:
|
||||
doc_metadata_list.append(
|
||||
SlimDocument(
|
||||
id=build_confluence_document_id(
|
||||
self.wiki_base,
|
||||
attachment["_links"]["webui"],
|
||||
self.is_cloud,
|
||||
),
|
||||
perm_sync_data=perm_sync_data,
|
||||
)
|
||||
)
|
||||
yield doc_metadata_list
|
||||
doc_metadata_list = []
|
||||
yield doc_metadata_list
|
||||
doc_metadata_list = []
|
||||
|
||||
@@ -20,6 +20,10 @@ F = TypeVar("F", bound=Callable[..., Any])
|
||||
|
||||
RATE_LIMIT_MESSAGE_LOWERCASE = "Rate limit exceeded".lower()
|
||||
|
||||
# https://jira.atlassian.com/browse/CONFCLOUD-76433
|
||||
_PROBLEMATIC_EXPANSIONS = "body.storage.value"
|
||||
_REPLACEMENT_EXPANSIONS = "body.view.value"
|
||||
|
||||
|
||||
class ConfluenceRateLimitError(Exception):
|
||||
pass
|
||||
@@ -80,7 +84,7 @@ def handle_confluence_rate_limit(confluence_call: F) -> F:
|
||||
def wrapped_call(*args: list[Any], **kwargs: Any) -> Any:
|
||||
MAX_RETRIES = 5
|
||||
|
||||
TIMEOUT = 3600
|
||||
TIMEOUT = 600
|
||||
timeout_at = time.monotonic() + TIMEOUT
|
||||
|
||||
for attempt in range(MAX_RETRIES):
|
||||
@@ -95,6 +99,10 @@ def handle_confluence_rate_limit(confluence_call: F) -> F:
|
||||
return confluence_call(*args, **kwargs)
|
||||
except HTTPError as e:
|
||||
delay_until = _handle_http_error(e, attempt)
|
||||
logger.warning(
|
||||
f"HTTPError in confluence call. "
|
||||
f"Retrying in {delay_until} seconds..."
|
||||
)
|
||||
while time.monotonic() < delay_until:
|
||||
# in the future, check a signal here to exit
|
||||
time.sleep(1)
|
||||
@@ -141,7 +149,7 @@ class OnyxConfluence(Confluence):
|
||||
|
||||
def _paginate_url(
|
||||
self, url_suffix: str, limit: int | None = None
|
||||
) -> Iterator[list[dict[str, Any]]]:
|
||||
) -> Iterator[dict[str, Any]]:
|
||||
"""
|
||||
This will paginate through the top level query.
|
||||
"""
|
||||
@@ -153,46 +161,43 @@ class OnyxConfluence(Confluence):
|
||||
|
||||
while url_suffix:
|
||||
try:
|
||||
logger.debug(f"Making confluence call to {url_suffix}")
|
||||
next_response = self.get(url_suffix)
|
||||
except Exception as e:
|
||||
logger.exception("Error in danswer_cql: \n")
|
||||
raise e
|
||||
yield next_response.get("results", [])
|
||||
logger.warning(f"Error in confluence call to {url_suffix}")
|
||||
|
||||
# If the problematic expansion is in the url, replace it
|
||||
# with the replacement expansion and try again
|
||||
# If that fails, raise the error
|
||||
if _PROBLEMATIC_EXPANSIONS not in url_suffix:
|
||||
logger.exception(f"Error in confluence call to {url_suffix}")
|
||||
raise e
|
||||
logger.warning(
|
||||
f"Replacing {_PROBLEMATIC_EXPANSIONS} with {_REPLACEMENT_EXPANSIONS}"
|
||||
" and trying again."
|
||||
)
|
||||
url_suffix = url_suffix.replace(
|
||||
_PROBLEMATIC_EXPANSIONS,
|
||||
_REPLACEMENT_EXPANSIONS,
|
||||
)
|
||||
continue
|
||||
|
||||
# yield the results individually
|
||||
yield from next_response.get("results", [])
|
||||
|
||||
url_suffix = next_response.get("_links", {}).get("next")
|
||||
|
||||
def paginated_groups_retrieval(
|
||||
self,
|
||||
limit: int | None = None,
|
||||
) -> Iterator[list[dict[str, Any]]]:
|
||||
return self._paginate_url("rest/api/group", limit)
|
||||
|
||||
def paginated_group_members_retrieval(
|
||||
self,
|
||||
group_name: str,
|
||||
limit: int | None = None,
|
||||
) -> Iterator[list[dict[str, Any]]]:
|
||||
group_name = quote(group_name)
|
||||
return self._paginate_url(f"rest/api/group/{group_name}/member", limit)
|
||||
|
||||
def paginated_cql_user_retrieval(
|
||||
def paginated_cql_retrieval(
|
||||
self,
|
||||
cql: str,
|
||||
expand: str | None = None,
|
||||
limit: int | None = None,
|
||||
) -> Iterator[list[dict[str, Any]]]:
|
||||
) -> Iterator[dict[str, Any]]:
|
||||
"""
|
||||
The content/search endpoint can be used to fetch pages, attachments, and comments.
|
||||
"""
|
||||
expand_string = f"&expand={expand}" if expand else ""
|
||||
return self._paginate_url(
|
||||
f"rest/api/search/user?cql={cql}{expand_string}", limit
|
||||
)
|
||||
|
||||
def paginated_cql_page_retrieval(
|
||||
self,
|
||||
cql: str,
|
||||
expand: str | None = None,
|
||||
limit: int | None = None,
|
||||
) -> Iterator[list[dict[str, Any]]]:
|
||||
expand_string = f"&expand={expand}" if expand else ""
|
||||
return self._paginate_url(
|
||||
yield from self._paginate_url(
|
||||
f"rest/api/content/search?cql={cql}{expand_string}", limit
|
||||
)
|
||||
|
||||
@@ -201,7 +206,7 @@ class OnyxConfluence(Confluence):
|
||||
cql: str,
|
||||
expand: str | None = None,
|
||||
limit: int | None = None,
|
||||
) -> Iterator[list[dict[str, Any]]]:
|
||||
) -> Iterator[dict[str, Any]]:
|
||||
"""
|
||||
This function will paginate through the top level query first, then
|
||||
paginate through all of the expansions.
|
||||
@@ -221,6 +226,110 @@ class OnyxConfluence(Confluence):
|
||||
for item in data:
|
||||
_traverse_and_update(item)
|
||||
|
||||
for results in self.paginated_cql_page_retrieval(cql, expand, limit):
|
||||
_traverse_and_update(results)
|
||||
yield results
|
||||
for confluence_object in self.paginated_cql_retrieval(cql, expand, limit):
|
||||
_traverse_and_update(confluence_object)
|
||||
yield confluence_object
|
||||
|
||||
def paginated_cql_user_retrieval(
|
||||
self,
|
||||
expand: str | None = None,
|
||||
limit: int | None = None,
|
||||
) -> Iterator[dict[str, Any]]:
|
||||
"""
|
||||
The search/user endpoint can be used to fetch users.
|
||||
It's a seperate endpoint from the content/search endpoint used only for users.
|
||||
Otherwise it's very similar to the content/search endpoint.
|
||||
"""
|
||||
cql = "type=user"
|
||||
url = "rest/api/search/user" if self.cloud else "rest/api/search"
|
||||
expand_string = f"&expand={expand}" if expand else ""
|
||||
url += f"?cql={cql}{expand_string}"
|
||||
yield from self._paginate_url(url, limit)
|
||||
|
||||
def paginated_groups_by_user_retrieval(
|
||||
self,
|
||||
user: dict[str, Any],
|
||||
limit: int | None = None,
|
||||
) -> Iterator[dict[str, Any]]:
|
||||
"""
|
||||
This is not an SQL like query.
|
||||
It's a confluence specific endpoint that can be used to fetch groups.
|
||||
"""
|
||||
user_field = "accountId" if self.cloud else "key"
|
||||
user_value = user["accountId"] if self.cloud else user["userKey"]
|
||||
# Server uses userKey (but calls it key during the API call), Cloud uses accountId
|
||||
user_query = f"{user_field}={quote(user_value)}"
|
||||
|
||||
url = f"rest/api/user/memberof?{user_query}"
|
||||
yield from self._paginate_url(url, limit)
|
||||
|
||||
def paginated_groups_retrieval(
|
||||
self,
|
||||
limit: int | None = None,
|
||||
) -> Iterator[dict[str, Any]]:
|
||||
"""
|
||||
This is not an SQL like query.
|
||||
It's a confluence specific endpoint that can be used to fetch groups.
|
||||
"""
|
||||
yield from self._paginate_url("rest/api/group", limit)
|
||||
|
||||
def paginated_group_members_retrieval(
|
||||
self,
|
||||
group_name: str,
|
||||
limit: int | None = None,
|
||||
) -> Iterator[dict[str, Any]]:
|
||||
"""
|
||||
This is not an SQL like query.
|
||||
It's a confluence specific endpoint that can be used to fetch the members of a group.
|
||||
THIS DOESN'T WORK FOR SERVER because it breaks when there is a slash in the group name.
|
||||
E.g. neither "test/group" nor "test%2Fgroup" works for confluence.
|
||||
"""
|
||||
group_name = quote(group_name)
|
||||
yield from self._paginate_url(f"rest/api/group/{group_name}/member", limit)
|
||||
|
||||
|
||||
def _validate_connector_configuration(
|
||||
credentials: dict[str, Any],
|
||||
is_cloud: bool,
|
||||
wiki_base: str,
|
||||
) -> None:
|
||||
# test connection with direct client, no retries
|
||||
confluence_client_without_retries = Confluence(
|
||||
api_version="cloud" if is_cloud else "latest",
|
||||
url=wiki_base.rstrip("/"),
|
||||
username=credentials["confluence_username"] if is_cloud else None,
|
||||
password=credentials["confluence_access_token"] if is_cloud else None,
|
||||
token=credentials["confluence_access_token"] if not is_cloud else None,
|
||||
)
|
||||
spaces = confluence_client_without_retries.get_all_spaces(limit=1)
|
||||
|
||||
if not spaces:
|
||||
raise RuntimeError(
|
||||
f"No spaces found at {wiki_base}! "
|
||||
"Check your credentials and wiki_base and make sure "
|
||||
"is_cloud is set correctly."
|
||||
)
|
||||
|
||||
|
||||
def build_confluence_client(
|
||||
credentials: dict[str, Any],
|
||||
is_cloud: bool,
|
||||
wiki_base: str,
|
||||
) -> OnyxConfluence:
|
||||
_validate_connector_configuration(
|
||||
credentials=credentials,
|
||||
is_cloud=is_cloud,
|
||||
wiki_base=wiki_base,
|
||||
)
|
||||
return OnyxConfluence(
|
||||
api_version="cloud" if is_cloud else "latest",
|
||||
# Remove trailing slash from wiki_base if present
|
||||
url=wiki_base.rstrip("/"),
|
||||
# passing in username causes issues for Confluence data center
|
||||
username=credentials["confluence_username"] if is_cloud else None,
|
||||
password=credentials["confluence_access_token"] if is_cloud else None,
|
||||
token=credentials["confluence_access_token"] if not is_cloud else None,
|
||||
backoff_and_retry=True,
|
||||
max_backoff_retries=10,
|
||||
max_backoff_seconds=60,
|
||||
)
|
||||
|
||||
@@ -2,6 +2,7 @@ import io
|
||||
from datetime import datetime
|
||||
from datetime import timezone
|
||||
from typing import Any
|
||||
from urllib.parse import quote
|
||||
|
||||
import bs4
|
||||
|
||||
@@ -71,7 +72,9 @@ def _get_user(confluence_client: OnyxConfluence, user_id: str) -> str:
|
||||
|
||||
|
||||
def extract_text_from_confluence_html(
|
||||
confluence_client: OnyxConfluence, confluence_object: dict[str, Any]
|
||||
confluence_client: OnyxConfluence,
|
||||
confluence_object: dict[str, Any],
|
||||
fetched_titles: set[str],
|
||||
) -> str:
|
||||
"""Parse a Confluence html page and replace the 'user Id' by the real
|
||||
User Display Name
|
||||
@@ -79,7 +82,7 @@ def extract_text_from_confluence_html(
|
||||
Args:
|
||||
confluence_object (dict): The confluence object as a dict
|
||||
confluence_client (Confluence): Confluence client
|
||||
|
||||
fetched_titles (set[str]): The titles of the pages that have already been fetched
|
||||
Returns:
|
||||
str: loaded and formated Confluence page
|
||||
"""
|
||||
@@ -101,38 +104,72 @@ def extract_text_from_confluence_html(
|
||||
# Include @ sign for tagging, more clear for LLM
|
||||
user.replaceWith("@" + _get_user(confluence_client, user_id))
|
||||
|
||||
for html_page_reference in soup.findAll("ri:page"):
|
||||
for html_page_reference in soup.findAll("ac:structured-macro"):
|
||||
# Here, we only want to process page within page macros
|
||||
if html_page_reference.attrs.get("ac:name") != "include":
|
||||
continue
|
||||
|
||||
page_data = html_page_reference.find("ri:page")
|
||||
if not page_data:
|
||||
logger.warning(
|
||||
f"Skipping retrieval of {html_page_reference} because because page data is missing"
|
||||
)
|
||||
continue
|
||||
|
||||
page_title = page_data.attrs.get("ri:content-title")
|
||||
if not page_title:
|
||||
# only fetch pages that have a title
|
||||
logger.warning(
|
||||
f"Skipping retrieval of {html_page_reference} because it has no title"
|
||||
)
|
||||
continue
|
||||
|
||||
if page_title in fetched_titles:
|
||||
# prevent recursive fetching of pages
|
||||
logger.debug(f"Skipping {page_title} because it has already been fetched")
|
||||
continue
|
||||
|
||||
fetched_titles.add(page_title)
|
||||
|
||||
# Wrap this in a try-except because there are some pages that might not exist
|
||||
try:
|
||||
page_title = html_page_reference.attrs["ri:content-title"]
|
||||
if not page_title:
|
||||
continue
|
||||
|
||||
page_query = f"type=page and title='{page_title}'"
|
||||
page_query = f"type=page and title='{quote(page_title)}'"
|
||||
|
||||
page_contents: dict[str, Any] | None = None
|
||||
# Confluence enforces title uniqueness, so we should only get one result here
|
||||
for page_batch in confluence_client.paginated_cql_page_retrieval(
|
||||
for page in confluence_client.paginated_cql_retrieval(
|
||||
cql=page_query,
|
||||
expand="body.storage.value",
|
||||
limit=1,
|
||||
):
|
||||
page_contents = page_batch[0]
|
||||
page_contents = page
|
||||
break
|
||||
except Exception:
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"Error getting page contents for object {confluence_object}"
|
||||
f"Error getting page contents for object {confluence_object}: {e}"
|
||||
)
|
||||
continue
|
||||
|
||||
if not page_contents:
|
||||
continue
|
||||
|
||||
text_from_page = extract_text_from_confluence_html(
|
||||
confluence_client, page_contents
|
||||
confluence_client=confluence_client,
|
||||
confluence_object=page_contents,
|
||||
fetched_titles=fetched_titles,
|
||||
)
|
||||
|
||||
html_page_reference.replaceWith(text_from_page)
|
||||
|
||||
for html_link_body in soup.findAll("ac:link-body"):
|
||||
# This extracts the text from inline links in the page so they can be
|
||||
# represented in the document text as plain text
|
||||
try:
|
||||
text_from_link = html_link_body.text
|
||||
html_link_body.replaceWith(f"(LINK TEXT: {text_from_link})")
|
||||
except Exception as e:
|
||||
logger.warning(f"Error processing ac:link-body: {e}")
|
||||
|
||||
return format_document_soup(soup)
|
||||
|
||||
|
||||
@@ -232,20 +269,3 @@ def datetime_from_string(datetime_string: str) -> datetime:
|
||||
datetime_object = datetime_object.astimezone(timezone.utc)
|
||||
|
||||
return datetime_object
|
||||
|
||||
|
||||
def build_confluence_client(
|
||||
credentials_json: dict[str, Any], is_cloud: bool, wiki_base: str
|
||||
) -> OnyxConfluence:
|
||||
return OnyxConfluence(
|
||||
api_version="cloud" if is_cloud else "latest",
|
||||
# Remove trailing slash from wiki_base if present
|
||||
url=wiki_base.rstrip("/"),
|
||||
# passing in username causes issues for Confluence data center
|
||||
username=credentials_json["confluence_username"] if is_cloud else None,
|
||||
password=credentials_json["confluence_access_token"] if is_cloud else None,
|
||||
token=credentials_json["confluence_access_token"] if not is_cloud else None,
|
||||
backoff_and_retry=True,
|
||||
max_backoff_retries=60,
|
||||
max_backoff_seconds=60,
|
||||
)
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import os
|
||||
from collections.abc import Iterable
|
||||
from datetime import datetime
|
||||
from datetime import timezone
|
||||
from typing import Any
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from jira import JIRA
|
||||
from jira.resources import Issue
|
||||
@@ -12,129 +12,93 @@ from danswer.configs.app_configs import JIRA_CONNECTOR_LABELS_TO_SKIP
|
||||
from danswer.configs.app_configs import JIRA_CONNECTOR_MAX_TICKET_SIZE
|
||||
from danswer.configs.constants import DocumentSource
|
||||
from danswer.connectors.cross_connector_utils.miscellaneous_utils import time_str_to_utc
|
||||
from danswer.connectors.danswer_jira.utils import best_effort_basic_expert_info
|
||||
from danswer.connectors.danswer_jira.utils import best_effort_get_field_from_issue
|
||||
from danswer.connectors.danswer_jira.utils import build_jira_client
|
||||
from danswer.connectors.danswer_jira.utils import build_jira_url
|
||||
from danswer.connectors.danswer_jira.utils import extract_jira_project
|
||||
from danswer.connectors.danswer_jira.utils import extract_text_from_adf
|
||||
from danswer.connectors.danswer_jira.utils import get_comment_strs
|
||||
from danswer.connectors.interfaces import GenerateDocumentsOutput
|
||||
from danswer.connectors.interfaces import GenerateSlimDocumentOutput
|
||||
from danswer.connectors.interfaces import LoadConnector
|
||||
from danswer.connectors.interfaces import PollConnector
|
||||
from danswer.connectors.interfaces import SecondsSinceUnixEpoch
|
||||
from danswer.connectors.models import BasicExpertInfo
|
||||
from danswer.connectors.interfaces import SlimConnector
|
||||
from danswer.connectors.models import ConnectorMissingCredentialError
|
||||
from danswer.connectors.models import Document
|
||||
from danswer.connectors.models import Section
|
||||
from danswer.connectors.models import SlimDocument
|
||||
from danswer.utils.logger import setup_logger
|
||||
|
||||
|
||||
logger = setup_logger()
|
||||
PROJECT_URL_PAT = "projects"
|
||||
|
||||
JIRA_API_VERSION = os.environ.get("JIRA_API_VERSION") or "2"
|
||||
_JIRA_SLIM_PAGE_SIZE = 500
|
||||
_JIRA_FULL_PAGE_SIZE = 50
|
||||
|
||||
|
||||
def extract_jira_project(url: str) -> tuple[str, str]:
|
||||
parsed_url = urlparse(url)
|
||||
jira_base = parsed_url.scheme + "://" + parsed_url.netloc
|
||||
def _paginate_jql_search(
|
||||
jira_client: JIRA,
|
||||
jql: str,
|
||||
max_results: int,
|
||||
fields: str | None = None,
|
||||
) -> Iterable[Issue]:
|
||||
start = 0
|
||||
while True:
|
||||
logger.debug(
|
||||
f"Fetching Jira issues with JQL: {jql}, "
|
||||
f"starting at {start}, max results: {max_results}"
|
||||
)
|
||||
issues = jira_client.search_issues(
|
||||
jql_str=jql,
|
||||
startAt=start,
|
||||
maxResults=max_results,
|
||||
fields=fields,
|
||||
)
|
||||
|
||||
# Split the path by '/' and find the position of 'projects' to get the project name
|
||||
split_path = parsed_url.path.split("/")
|
||||
if PROJECT_URL_PAT in split_path:
|
||||
project_pos = split_path.index(PROJECT_URL_PAT)
|
||||
if len(split_path) > project_pos + 1:
|
||||
jira_project = split_path[project_pos + 1]
|
||||
else:
|
||||
raise ValueError("No project name found in the URL")
|
||||
else:
|
||||
raise ValueError("'projects' not found in the URL")
|
||||
for issue in issues:
|
||||
if isinstance(issue, Issue):
|
||||
yield issue
|
||||
else:
|
||||
raise Exception(f"Found Jira object not of type Issue: {issue}")
|
||||
|
||||
return jira_base, jira_project
|
||||
if len(issues) < max_results:
|
||||
break
|
||||
|
||||
|
||||
def extract_text_from_adf(adf: dict | None) -> str:
|
||||
"""Extracts plain text from Atlassian Document Format:
|
||||
https://developer.atlassian.com/cloud/jira/platform/apis/document/structure/
|
||||
|
||||
WARNING: This function is incomplete and will e.g. skip lists!
|
||||
"""
|
||||
texts = []
|
||||
if adf is not None and "content" in adf:
|
||||
for block in adf["content"]:
|
||||
if "content" in block:
|
||||
for item in block["content"]:
|
||||
if item["type"] == "text":
|
||||
texts.append(item["text"])
|
||||
return " ".join(texts)
|
||||
|
||||
|
||||
def best_effort_get_field_from_issue(jira_issue: Issue, field: str) -> Any:
|
||||
if hasattr(jira_issue.fields, field):
|
||||
return getattr(jira_issue.fields, field)
|
||||
|
||||
try:
|
||||
return jira_issue.raw["fields"][field]
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _get_comment_strs(
|
||||
jira: Issue, comment_email_blacklist: tuple[str, ...] = ()
|
||||
) -> list[str]:
|
||||
comment_strs = []
|
||||
for comment in jira.fields.comment.comments:
|
||||
try:
|
||||
body_text = (
|
||||
comment.body
|
||||
if JIRA_API_VERSION == "2"
|
||||
else extract_text_from_adf(comment.raw["body"])
|
||||
)
|
||||
|
||||
if (
|
||||
hasattr(comment, "author")
|
||||
and hasattr(comment.author, "emailAddress")
|
||||
and comment.author.emailAddress in comment_email_blacklist
|
||||
):
|
||||
continue # Skip adding comment if author's email is in blacklist
|
||||
|
||||
comment_strs.append(body_text)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to process comment due to an error: {e}")
|
||||
continue
|
||||
|
||||
return comment_strs
|
||||
start += max_results
|
||||
|
||||
|
||||
def fetch_jira_issues_batch(
|
||||
jql: str,
|
||||
start_index: int,
|
||||
jira_client: JIRA,
|
||||
batch_size: int = INDEX_BATCH_SIZE,
|
||||
jql: str,
|
||||
batch_size: int,
|
||||
comment_email_blacklist: tuple[str, ...] = (),
|
||||
labels_to_skip: set[str] | None = None,
|
||||
) -> tuple[list[Document], int]:
|
||||
doc_batch = []
|
||||
|
||||
batch = jira_client.search_issues(
|
||||
jql,
|
||||
startAt=start_index,
|
||||
maxResults=batch_size,
|
||||
)
|
||||
|
||||
for jira in batch:
|
||||
if type(jira) != Issue:
|
||||
logger.warning(f"Found Jira object not of type Issue {jira}")
|
||||
continue
|
||||
|
||||
if labels_to_skip and any(
|
||||
label in jira.fields.labels for label in labels_to_skip
|
||||
):
|
||||
logger.info(
|
||||
f"Skipping {jira.key} because it has a label to skip. Found "
|
||||
f"labels: {jira.fields.labels}. Labels to skip: {labels_to_skip}."
|
||||
)
|
||||
continue
|
||||
) -> Iterable[Document]:
|
||||
for issue in _paginate_jql_search(
|
||||
jira_client=jira_client,
|
||||
jql=jql,
|
||||
max_results=batch_size,
|
||||
):
|
||||
if labels_to_skip:
|
||||
if any(label in issue.fields.labels for label in labels_to_skip):
|
||||
logger.info(
|
||||
f"Skipping {issue.key} because it has a label to skip. Found "
|
||||
f"labels: {issue.fields.labels}. Labels to skip: {labels_to_skip}."
|
||||
)
|
||||
continue
|
||||
|
||||
description = (
|
||||
jira.fields.description
|
||||
issue.fields.description
|
||||
if JIRA_API_VERSION == "2"
|
||||
else extract_text_from_adf(jira.raw["fields"]["description"])
|
||||
else extract_text_from_adf(issue.raw["fields"]["description"])
|
||||
)
|
||||
comments = get_comment_strs(
|
||||
issue=issue,
|
||||
comment_email_blacklist=comment_email_blacklist,
|
||||
)
|
||||
comments = _get_comment_strs(jira, comment_email_blacklist)
|
||||
ticket_content = f"{description}\n" + "\n".join(
|
||||
[f"Comment: {comment}" for comment in comments if comment]
|
||||
)
|
||||
@@ -142,66 +106,53 @@ def fetch_jira_issues_batch(
|
||||
# Check ticket size
|
||||
if len(ticket_content.encode("utf-8")) > JIRA_CONNECTOR_MAX_TICKET_SIZE:
|
||||
logger.info(
|
||||
f"Skipping {jira.key} because it exceeds the maximum size of "
|
||||
f"Skipping {issue.key} because it exceeds the maximum size of "
|
||||
f"{JIRA_CONNECTOR_MAX_TICKET_SIZE} bytes."
|
||||
)
|
||||
continue
|
||||
|
||||
page_url = f"{jira_client.client_info()}/browse/{jira.key}"
|
||||
page_url = f"{jira_client.client_info()}/browse/{issue.key}"
|
||||
|
||||
people = set()
|
||||
try:
|
||||
people.add(
|
||||
BasicExpertInfo(
|
||||
display_name=jira.fields.creator.displayName,
|
||||
email=jira.fields.creator.emailAddress,
|
||||
)
|
||||
)
|
||||
creator = best_effort_get_field_from_issue(issue, "creator")
|
||||
if basic_expert_info := best_effort_basic_expert_info(creator):
|
||||
people.add(basic_expert_info)
|
||||
except Exception:
|
||||
# Author should exist but if not, doesn't matter
|
||||
pass
|
||||
|
||||
try:
|
||||
people.add(
|
||||
BasicExpertInfo(
|
||||
display_name=jira.fields.assignee.displayName, # type: ignore
|
||||
email=jira.fields.assignee.emailAddress, # type: ignore
|
||||
)
|
||||
)
|
||||
assignee = best_effort_get_field_from_issue(issue, "assignee")
|
||||
if basic_expert_info := best_effort_basic_expert_info(assignee):
|
||||
people.add(basic_expert_info)
|
||||
except Exception:
|
||||
# Author should exist but if not, doesn't matter
|
||||
pass
|
||||
|
||||
metadata_dict = {}
|
||||
priority = best_effort_get_field_from_issue(jira, "priority")
|
||||
if priority:
|
||||
if priority := best_effort_get_field_from_issue(issue, "priority"):
|
||||
metadata_dict["priority"] = priority.name
|
||||
status = best_effort_get_field_from_issue(jira, "status")
|
||||
if status:
|
||||
if status := best_effort_get_field_from_issue(issue, "status"):
|
||||
metadata_dict["status"] = status.name
|
||||
resolution = best_effort_get_field_from_issue(jira, "resolution")
|
||||
if resolution:
|
||||
if resolution := best_effort_get_field_from_issue(issue, "resolution"):
|
||||
metadata_dict["resolution"] = resolution.name
|
||||
labels = best_effort_get_field_from_issue(jira, "labels")
|
||||
if labels:
|
||||
if labels := best_effort_get_field_from_issue(issue, "labels"):
|
||||
metadata_dict["label"] = labels
|
||||
|
||||
doc_batch.append(
|
||||
Document(
|
||||
id=page_url,
|
||||
sections=[Section(link=page_url, text=ticket_content)],
|
||||
source=DocumentSource.JIRA,
|
||||
semantic_identifier=jira.fields.summary,
|
||||
doc_updated_at=time_str_to_utc(jira.fields.updated),
|
||||
primary_owners=list(people) or None,
|
||||
# TODO add secondary_owners (commenters) if needed
|
||||
metadata=metadata_dict,
|
||||
)
|
||||
yield Document(
|
||||
id=page_url,
|
||||
sections=[Section(link=page_url, text=ticket_content)],
|
||||
source=DocumentSource.JIRA,
|
||||
semantic_identifier=issue.fields.summary,
|
||||
doc_updated_at=time_str_to_utc(issue.fields.updated),
|
||||
primary_owners=list(people) or None,
|
||||
# TODO add secondary_owners (commenters) if needed
|
||||
metadata=metadata_dict,
|
||||
)
|
||||
return doc_batch, len(batch)
|
||||
|
||||
|
||||
class JiraConnector(LoadConnector, PollConnector):
|
||||
class JiraConnector(LoadConnector, PollConnector, SlimConnector):
|
||||
def __init__(
|
||||
self,
|
||||
jira_project_url: str,
|
||||
@@ -213,8 +164,8 @@ class JiraConnector(LoadConnector, PollConnector):
|
||||
labels_to_skip: list[str] = JIRA_CONNECTOR_LABELS_TO_SKIP,
|
||||
) -> None:
|
||||
self.batch_size = batch_size
|
||||
self.jira_base, self.jira_project = extract_jira_project(jira_project_url)
|
||||
self.jira_client: JIRA | None = None
|
||||
self.jira_base, self._jira_project = extract_jira_project(jira_project_url)
|
||||
self._jira_client: JIRA | None = None
|
||||
self._comment_email_blacklist = comment_email_blacklist or []
|
||||
|
||||
self.labels_to_skip = set(labels_to_skip)
|
||||
@@ -223,54 +174,45 @@ class JiraConnector(LoadConnector, PollConnector):
|
||||
def comment_email_blacklist(self) -> tuple:
|
||||
return tuple(email.strip() for email in self._comment_email_blacklist)
|
||||
|
||||
@property
|
||||
def jira_client(self) -> JIRA:
|
||||
if self._jira_client is None:
|
||||
raise ConnectorMissingCredentialError("Jira")
|
||||
return self._jira_client
|
||||
|
||||
@property
|
||||
def quoted_jira_project(self) -> str:
|
||||
# Quote the project name to handle reserved words
|
||||
return f'"{self._jira_project}"'
|
||||
|
||||
def load_credentials(self, credentials: dict[str, Any]) -> dict[str, Any] | None:
|
||||
api_token = credentials["jira_api_token"]
|
||||
# if user provide an email we assume it's cloud
|
||||
if "jira_user_email" in credentials:
|
||||
email = credentials["jira_user_email"]
|
||||
self.jira_client = JIRA(
|
||||
basic_auth=(email, api_token),
|
||||
server=self.jira_base,
|
||||
options={"rest_api_version": JIRA_API_VERSION},
|
||||
)
|
||||
else:
|
||||
self.jira_client = JIRA(
|
||||
token_auth=api_token,
|
||||
server=self.jira_base,
|
||||
options={"rest_api_version": JIRA_API_VERSION},
|
||||
)
|
||||
self._jira_client = build_jira_client(
|
||||
credentials=credentials,
|
||||
jira_base=self.jira_base,
|
||||
)
|
||||
return None
|
||||
|
||||
def load_from_state(self) -> GenerateDocumentsOutput:
|
||||
if self.jira_client is None:
|
||||
raise ConnectorMissingCredentialError("Jira")
|
||||
jql = f"project = {self.quoted_jira_project}"
|
||||
|
||||
# Quote the project name to handle reserved words
|
||||
quoted_project = f'"{self.jira_project}"'
|
||||
start_ind = 0
|
||||
while True:
|
||||
doc_batch, fetched_batch_size = fetch_jira_issues_batch(
|
||||
jql=f"project = {quoted_project}",
|
||||
start_index=start_ind,
|
||||
jira_client=self.jira_client,
|
||||
batch_size=self.batch_size,
|
||||
comment_email_blacklist=self.comment_email_blacklist,
|
||||
labels_to_skip=self.labels_to_skip,
|
||||
)
|
||||
document_batch = []
|
||||
for doc in fetch_jira_issues_batch(
|
||||
jira_client=self.jira_client,
|
||||
jql=jql,
|
||||
batch_size=_JIRA_FULL_PAGE_SIZE,
|
||||
comment_email_blacklist=self.comment_email_blacklist,
|
||||
labels_to_skip=self.labels_to_skip,
|
||||
):
|
||||
document_batch.append(doc)
|
||||
if len(document_batch) >= self.batch_size:
|
||||
yield document_batch
|
||||
document_batch = []
|
||||
|
||||
if doc_batch:
|
||||
yield doc_batch
|
||||
|
||||
start_ind += fetched_batch_size
|
||||
if fetched_batch_size < self.batch_size:
|
||||
break
|
||||
yield document_batch
|
||||
|
||||
def poll_source(
|
||||
self, start: SecondsSinceUnixEpoch, end: SecondsSinceUnixEpoch
|
||||
) -> GenerateDocumentsOutput:
|
||||
if self.jira_client is None:
|
||||
raise ConnectorMissingCredentialError("Jira")
|
||||
|
||||
start_date_str = datetime.fromtimestamp(start, tz=timezone.utc).strftime(
|
||||
"%Y-%m-%d %H:%M"
|
||||
)
|
||||
@@ -278,31 +220,54 @@ class JiraConnector(LoadConnector, PollConnector):
|
||||
"%Y-%m-%d %H:%M"
|
||||
)
|
||||
|
||||
# Quote the project name to handle reserved words
|
||||
quoted_project = f'"{self.jira_project}"'
|
||||
jql = (
|
||||
f"project = {quoted_project} AND "
|
||||
f"project = {self.quoted_jira_project} AND "
|
||||
f"updated >= '{start_date_str}' AND "
|
||||
f"updated <= '{end_date_str}'"
|
||||
)
|
||||
|
||||
start_ind = 0
|
||||
while True:
|
||||
doc_batch, fetched_batch_size = fetch_jira_issues_batch(
|
||||
jql=jql,
|
||||
start_index=start_ind,
|
||||
jira_client=self.jira_client,
|
||||
batch_size=self.batch_size,
|
||||
comment_email_blacklist=self.comment_email_blacklist,
|
||||
labels_to_skip=self.labels_to_skip,
|
||||
document_batch = []
|
||||
for doc in fetch_jira_issues_batch(
|
||||
jira_client=self.jira_client,
|
||||
jql=jql,
|
||||
batch_size=_JIRA_FULL_PAGE_SIZE,
|
||||
comment_email_blacklist=self.comment_email_blacklist,
|
||||
labels_to_skip=self.labels_to_skip,
|
||||
):
|
||||
document_batch.append(doc)
|
||||
if len(document_batch) >= self.batch_size:
|
||||
yield document_batch
|
||||
document_batch = []
|
||||
|
||||
yield document_batch
|
||||
|
||||
def retrieve_all_slim_documents(
|
||||
self,
|
||||
start: SecondsSinceUnixEpoch | None = None,
|
||||
end: SecondsSinceUnixEpoch | None = None,
|
||||
) -> GenerateSlimDocumentOutput:
|
||||
jql = f"project = {self.quoted_jira_project}"
|
||||
|
||||
slim_doc_batch = []
|
||||
for issue in _paginate_jql_search(
|
||||
jira_client=self.jira_client,
|
||||
jql=jql,
|
||||
max_results=_JIRA_SLIM_PAGE_SIZE,
|
||||
fields="key",
|
||||
):
|
||||
issue_key = best_effort_get_field_from_issue(issue, "key")
|
||||
id = build_jira_url(self.jira_client, issue_key)
|
||||
slim_doc_batch.append(
|
||||
SlimDocument(
|
||||
id=id,
|
||||
perm_sync_data=None,
|
||||
)
|
||||
)
|
||||
if len(slim_doc_batch) >= _JIRA_SLIM_PAGE_SIZE:
|
||||
yield slim_doc_batch
|
||||
slim_doc_batch = []
|
||||
|
||||
if doc_batch:
|
||||
yield doc_batch
|
||||
|
||||
start_ind += fetched_batch_size
|
||||
if fetched_batch_size < self.batch_size:
|
||||
break
|
||||
yield slim_doc_batch
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -1,17 +1,136 @@
|
||||
"""Module with custom fields processing functions"""
|
||||
import os
|
||||
from typing import Any
|
||||
from typing import List
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from jira import JIRA
|
||||
from jira.resources import CustomFieldOption
|
||||
from jira.resources import Issue
|
||||
from jira.resources import User
|
||||
|
||||
from danswer.connectors.models import BasicExpertInfo
|
||||
from danswer.utils.logger import setup_logger
|
||||
|
||||
logger = setup_logger()
|
||||
|
||||
|
||||
PROJECT_URL_PAT = "projects"
|
||||
JIRA_API_VERSION = os.environ.get("JIRA_API_VERSION") or "2"
|
||||
|
||||
|
||||
def best_effort_basic_expert_info(obj: Any) -> BasicExpertInfo | None:
|
||||
display_name = None
|
||||
email = None
|
||||
if hasattr(obj, "display_name"):
|
||||
display_name = obj.display_name
|
||||
else:
|
||||
display_name = obj.get("displayName")
|
||||
|
||||
if hasattr(obj, "emailAddress"):
|
||||
email = obj.emailAddress
|
||||
else:
|
||||
email = obj.get("emailAddress")
|
||||
|
||||
if not email and not display_name:
|
||||
return None
|
||||
|
||||
return BasicExpertInfo(display_name=display_name, email=email)
|
||||
|
||||
|
||||
def best_effort_get_field_from_issue(jira_issue: Issue, field: str) -> Any:
|
||||
if hasattr(jira_issue.fields, field):
|
||||
return getattr(jira_issue.fields, field)
|
||||
|
||||
try:
|
||||
return jira_issue.raw["fields"][field]
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def extract_text_from_adf(adf: dict | None) -> str:
|
||||
"""Extracts plain text from Atlassian Document Format:
|
||||
https://developer.atlassian.com/cloud/jira/platform/apis/document/structure/
|
||||
|
||||
WARNING: This function is incomplete and will e.g. skip lists!
|
||||
"""
|
||||
texts = []
|
||||
if adf is not None and "content" in adf:
|
||||
for block in adf["content"]:
|
||||
if "content" in block:
|
||||
for item in block["content"]:
|
||||
if item["type"] == "text":
|
||||
texts.append(item["text"])
|
||||
return " ".join(texts)
|
||||
|
||||
|
||||
def build_jira_url(jira_client: JIRA, issue_key: str) -> str:
|
||||
return f"{jira_client.client_info()}/browse/{issue_key}"
|
||||
|
||||
|
||||
def build_jira_client(credentials: dict[str, Any], jira_base: str) -> JIRA:
|
||||
api_token = credentials["jira_api_token"]
|
||||
# if user provide an email we assume it's cloud
|
||||
if "jira_user_email" in credentials:
|
||||
email = credentials["jira_user_email"]
|
||||
return JIRA(
|
||||
basic_auth=(email, api_token),
|
||||
server=jira_base,
|
||||
options={"rest_api_version": JIRA_API_VERSION},
|
||||
)
|
||||
else:
|
||||
return JIRA(
|
||||
token_auth=api_token,
|
||||
server=jira_base,
|
||||
options={"rest_api_version": JIRA_API_VERSION},
|
||||
)
|
||||
|
||||
|
||||
def extract_jira_project(url: str) -> tuple[str, str]:
|
||||
parsed_url = urlparse(url)
|
||||
jira_base = parsed_url.scheme + "://" + parsed_url.netloc
|
||||
|
||||
# Split the path by '/' and find the position of 'projects' to get the project name
|
||||
split_path = parsed_url.path.split("/")
|
||||
if PROJECT_URL_PAT in split_path:
|
||||
project_pos = split_path.index(PROJECT_URL_PAT)
|
||||
if len(split_path) > project_pos + 1:
|
||||
jira_project = split_path[project_pos + 1]
|
||||
else:
|
||||
raise ValueError("No project name found in the URL")
|
||||
else:
|
||||
raise ValueError("'projects' not found in the URL")
|
||||
|
||||
return jira_base, jira_project
|
||||
|
||||
|
||||
def get_comment_strs(
|
||||
issue: Issue, comment_email_blacklist: tuple[str, ...] = ()
|
||||
) -> list[str]:
|
||||
comment_strs = []
|
||||
for comment in issue.fields.comment.comments:
|
||||
try:
|
||||
body_text = (
|
||||
comment.body
|
||||
if JIRA_API_VERSION == "2"
|
||||
else extract_text_from_adf(comment.raw["body"])
|
||||
)
|
||||
|
||||
if (
|
||||
hasattr(comment, "author")
|
||||
and hasattr(comment.author, "emailAddress")
|
||||
and comment.author.emailAddress in comment_email_blacklist
|
||||
):
|
||||
continue # Skip adding comment if author's email is in blacklist
|
||||
|
||||
comment_strs.append(body_text)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to process comment due to an error: {e}")
|
||||
continue
|
||||
|
||||
return comment_strs
|
||||
|
||||
|
||||
class CustomFieldExtractor:
|
||||
@staticmethod
|
||||
def _process_custom_field_value(value: Any) -> str:
|
||||
|
||||
@@ -2,6 +2,7 @@ import io
|
||||
from datetime import datetime
|
||||
from datetime import timezone
|
||||
|
||||
from googleapiclient.discovery import build # type: ignore
|
||||
from googleapiclient.errors import HttpError # type: ignore
|
||||
|
||||
from danswer.configs.app_configs import CONTINUE_ON_CONNECTOR_FAILURE
|
||||
@@ -48,6 +49,67 @@ def _extract_sections_basic(
|
||||
return [Section(link=link, text=UNSUPPORTED_FILE_TYPE_CONTENT)]
|
||||
|
||||
try:
|
||||
if mime_type == GDriveMimeType.SPREADSHEET.value:
|
||||
try:
|
||||
sheets_service = build(
|
||||
"sheets", "v4", credentials=service._http.credentials
|
||||
)
|
||||
spreadsheet = (
|
||||
sheets_service.spreadsheets()
|
||||
.get(spreadsheetId=file["id"])
|
||||
.execute()
|
||||
)
|
||||
|
||||
sections = []
|
||||
for sheet in spreadsheet["sheets"]:
|
||||
sheet_name = sheet["properties"]["title"]
|
||||
sheet_id = sheet["properties"]["sheetId"]
|
||||
|
||||
# Get sheet dimensions
|
||||
grid_properties = sheet["properties"].get("gridProperties", {})
|
||||
row_count = grid_properties.get("rowCount", 1000)
|
||||
column_count = grid_properties.get("columnCount", 26)
|
||||
|
||||
# Convert column count to letter (e.g., 26 -> Z, 27 -> AA)
|
||||
end_column = ""
|
||||
while column_count:
|
||||
column_count, remainder = divmod(column_count - 1, 26)
|
||||
end_column = chr(65 + remainder) + end_column
|
||||
|
||||
range_name = f"'{sheet_name}'!A1:{end_column}{row_count}"
|
||||
|
||||
try:
|
||||
result = (
|
||||
sheets_service.spreadsheets()
|
||||
.values()
|
||||
.get(spreadsheetId=file["id"], range=range_name)
|
||||
.execute()
|
||||
)
|
||||
values = result.get("values", [])
|
||||
|
||||
if values:
|
||||
text = f"Sheet: {sheet_name}\n"
|
||||
for row in values:
|
||||
text += "\t".join(str(cell) for cell in row) + "\n"
|
||||
sections.append(
|
||||
Section(
|
||||
link=f"{link}#gid={sheet_id}",
|
||||
text=text,
|
||||
)
|
||||
)
|
||||
except HttpError as e:
|
||||
logger.warning(
|
||||
f"Error fetching data for sheet '{sheet_name}': {e}"
|
||||
)
|
||||
continue
|
||||
return sections
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
f"Ran into exception '{e}' when pulling data from Google Sheet '{file['name']}'."
|
||||
" Falling back to basic extraction."
|
||||
)
|
||||
|
||||
if mime_type in [
|
||||
GDriveMimeType.DOC.value,
|
||||
GDriveMimeType.PPT.value,
|
||||
@@ -65,6 +127,7 @@ def _extract_sections_basic(
|
||||
.decode("utf-8")
|
||||
)
|
||||
return [Section(link=link, text=text)]
|
||||
|
||||
elif mime_type in [
|
||||
GDriveMimeType.PLAIN_TEXT.value,
|
||||
GDriveMimeType.MARKDOWN.value,
|
||||
|
||||
@@ -2,8 +2,8 @@ import os
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from danswer.db.models import SlackBotConfig
|
||||
from danswer.db.slack_bot_config import fetch_slack_bot_configs
|
||||
from danswer.db.models import SlackChannelConfig
|
||||
from danswer.db.slack_channel_config import fetch_slack_channel_configs
|
||||
|
||||
|
||||
VALID_SLACK_FILTERS = [
|
||||
@@ -13,46 +13,52 @@ VALID_SLACK_FILTERS = [
|
||||
]
|
||||
|
||||
|
||||
def get_slack_bot_config_for_channel(
|
||||
channel_name: str | None, db_session: Session
|
||||
) -> SlackBotConfig | None:
|
||||
def get_slack_channel_config_for_bot_and_channel(
|
||||
db_session: Session,
|
||||
slack_bot_id: int,
|
||||
channel_name: str | None,
|
||||
) -> SlackChannelConfig | None:
|
||||
if not channel_name:
|
||||
return None
|
||||
|
||||
slack_bot_configs = fetch_slack_bot_configs(db_session=db_session)
|
||||
slack_bot_configs = fetch_slack_channel_configs(
|
||||
db_session=db_session, slack_bot_id=slack_bot_id
|
||||
)
|
||||
for config in slack_bot_configs:
|
||||
if channel_name in config.channel_config["channel_names"]:
|
||||
if channel_name in config.channel_config["channel_name"]:
|
||||
return config
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def validate_channel_names(
|
||||
channel_names: list[str],
|
||||
current_slack_bot_config_id: int | None,
|
||||
def validate_channel_name(
|
||||
db_session: Session,
|
||||
) -> list[str]:
|
||||
"""Make sure that these channel_names don't exist in other slack bot configs.
|
||||
Returns a list of cleaned up channel names (e.g. '#' removed if present)"""
|
||||
slack_bot_configs = fetch_slack_bot_configs(db_session=db_session)
|
||||
cleaned_channel_names = [
|
||||
channel_name.lstrip("#").lower() for channel_name in channel_names
|
||||
]
|
||||
for slack_bot_config in slack_bot_configs:
|
||||
if slack_bot_config.id == current_slack_bot_config_id:
|
||||
current_slack_bot_id: int,
|
||||
channel_name: str,
|
||||
current_slack_channel_config_id: int | None,
|
||||
) -> str:
|
||||
"""Make sure that this channel_name does not exist in other Slack channel configs.
|
||||
Returns a cleaned up channel name (e.g. '#' removed if present)"""
|
||||
slack_bot_configs = fetch_slack_channel_configs(
|
||||
db_session=db_session,
|
||||
slack_bot_id=current_slack_bot_id,
|
||||
)
|
||||
cleaned_channel_name = channel_name.lstrip("#").lower()
|
||||
for slack_channel_config in slack_bot_configs:
|
||||
if slack_channel_config.id == current_slack_channel_config_id:
|
||||
continue
|
||||
|
||||
for channel_name in cleaned_channel_names:
|
||||
if channel_name in slack_bot_config.channel_config["channel_names"]:
|
||||
raise ValueError(
|
||||
f"Channel name '{channel_name}' already exists in "
|
||||
"another slack bot config"
|
||||
)
|
||||
if cleaned_channel_name == slack_channel_config.channel_config["channel_name"]:
|
||||
raise ValueError(
|
||||
f"Channel name '{channel_name}' already exists in "
|
||||
"another Slack channel config with in Slack Bot with name: "
|
||||
f"{slack_channel_config.slack_bot.name}"
|
||||
)
|
||||
|
||||
return cleaned_channel_names
|
||||
return cleaned_channel_name
|
||||
|
||||
|
||||
# Scaling configurations for multi-tenant Slack bot handling
|
||||
# Scaling configurations for multi-tenant Slack channel handling
|
||||
TENANT_LOCK_EXPIRATION = 1800 # How long a pod can hold exclusive access to a tenant before other pods can acquire it
|
||||
TENANT_HEARTBEAT_INTERVAL = (
|
||||
15 # How often pods send heartbeats to indicate they are still processing a tenant
|
||||
|
||||
@@ -13,7 +13,7 @@ from danswer.connectors.slack.utils import expert_info_from_slack_id
|
||||
from danswer.connectors.slack.utils import make_slack_api_rate_limited
|
||||
from danswer.danswerbot.slack.blocks import build_follow_up_resolved_blocks
|
||||
from danswer.danswerbot.slack.blocks import get_document_feedback_blocks
|
||||
from danswer.danswerbot.slack.config import get_slack_bot_config_for_channel
|
||||
from danswer.danswerbot.slack.config import get_slack_channel_config_for_bot_and_channel
|
||||
from danswer.danswerbot.slack.constants import DISLIKE_BLOCK_ACTION_ID
|
||||
from danswer.danswerbot.slack.constants import FeedbackVisibility
|
||||
from danswer.danswerbot.slack.constants import LIKE_BLOCK_ACTION_ID
|
||||
@@ -117,8 +117,10 @@ def handle_generate_answer_button(
|
||||
)
|
||||
|
||||
with get_session_with_tenant(client.tenant_id) as db_session:
|
||||
slack_bot_config = get_slack_bot_config_for_channel(
|
||||
channel_name=channel_name, db_session=db_session
|
||||
slack_channel_config = get_slack_channel_config_for_bot_and_channel(
|
||||
db_session=db_session,
|
||||
slack_bot_id=client.slack_bot_id,
|
||||
channel_name=channel_name,
|
||||
)
|
||||
|
||||
handle_regular_answer(
|
||||
@@ -133,7 +135,7 @@ def handle_generate_answer_button(
|
||||
is_bot_msg=False,
|
||||
is_bot_dm=False,
|
||||
),
|
||||
slack_bot_config=slack_bot_config,
|
||||
slack_channel_config=slack_channel_config,
|
||||
receiver_ids=None,
|
||||
client=client.web_client,
|
||||
tenant_id=client.tenant_id,
|
||||
@@ -256,11 +258,13 @@ def handle_followup_button(
|
||||
channel_name, is_dm = get_channel_name_from_id(
|
||||
client=client.web_client, channel_id=channel_id
|
||||
)
|
||||
slack_bot_config = get_slack_bot_config_for_channel(
|
||||
channel_name=channel_name, db_session=db_session
|
||||
slack_channel_config = get_slack_channel_config_for_bot_and_channel(
|
||||
db_session=db_session,
|
||||
slack_bot_id=client.slack_bot_id,
|
||||
channel_name=channel_name,
|
||||
)
|
||||
if slack_bot_config:
|
||||
tag_names = slack_bot_config.channel_config.get("follow_up_tags")
|
||||
if slack_channel_config:
|
||||
tag_names = slack_channel_config.channel_config.get("follow_up_tags")
|
||||
remaining = None
|
||||
if tag_names:
|
||||
tag_ids, remaining = fetch_user_ids_from_emails(
|
||||
|
||||
@@ -19,7 +19,7 @@ from danswer.danswerbot.slack.utils import respond_in_thread
|
||||
from danswer.danswerbot.slack.utils import slack_usage_report
|
||||
from danswer.danswerbot.slack.utils import update_emote_react
|
||||
from danswer.db.engine import get_session_with_tenant
|
||||
from danswer.db.models import SlackBotConfig
|
||||
from danswer.db.models import SlackChannelConfig
|
||||
from danswer.db.users import add_slack_user_if_not_exists
|
||||
from danswer.utils.logger import setup_logger
|
||||
from shared_configs.configs import SLACK_CHANNEL_ID
|
||||
@@ -106,7 +106,7 @@ def remove_scheduled_feedback_reminder(
|
||||
|
||||
def handle_message(
|
||||
message_info: SlackMessageInfo,
|
||||
slack_bot_config: SlackBotConfig | None,
|
||||
slack_channel_config: SlackChannelConfig | None,
|
||||
client: WebClient,
|
||||
feedback_reminder_id: str | None,
|
||||
tenant_id: str | None,
|
||||
@@ -140,7 +140,7 @@ def handle_message(
|
||||
)
|
||||
|
||||
document_set_names: list[str] | None = None
|
||||
persona = slack_bot_config.persona if slack_bot_config else None
|
||||
persona = slack_channel_config.persona if slack_channel_config else None
|
||||
prompt = None
|
||||
if persona:
|
||||
document_set_names = [
|
||||
@@ -152,8 +152,8 @@ def handle_message(
|
||||
respond_member_group_list = None
|
||||
|
||||
channel_conf = None
|
||||
if slack_bot_config and slack_bot_config.channel_config:
|
||||
channel_conf = slack_bot_config.channel_config
|
||||
if slack_channel_config and slack_channel_config.channel_config:
|
||||
channel_conf = slack_channel_config.channel_config
|
||||
if not bypass_filters and "answer_filters" in channel_conf:
|
||||
if (
|
||||
"questionmark_prefilter" in channel_conf["answer_filters"]
|
||||
@@ -219,7 +219,7 @@ def handle_message(
|
||||
used_standard_answer = handle_standard_answers(
|
||||
message_info=message_info,
|
||||
receiver_ids=send_to,
|
||||
slack_bot_config=slack_bot_config,
|
||||
slack_channel_config=slack_channel_config,
|
||||
prompt=prompt,
|
||||
logger=logger,
|
||||
client=client,
|
||||
@@ -231,7 +231,7 @@ def handle_message(
|
||||
# if no standard answer applies, try a regular answer
|
||||
issue_with_regular_answer = handle_regular_answer(
|
||||
message_info=message_info,
|
||||
slack_bot_config=slack_bot_config,
|
||||
slack_channel_config=slack_channel_config,
|
||||
receiver_ids=send_to,
|
||||
client=client,
|
||||
channel=channel,
|
||||
|
||||
@@ -34,8 +34,8 @@ from danswer.danswerbot.slack.utils import SlackRateLimiter
|
||||
from danswer.danswerbot.slack.utils import update_emote_react
|
||||
from danswer.db.engine import get_session_with_tenant
|
||||
from danswer.db.models import Persona
|
||||
from danswer.db.models import SlackBotConfig
|
||||
from danswer.db.models import SlackBotResponseType
|
||||
from danswer.db.models import SlackChannelConfig
|
||||
from danswer.db.persona import fetch_persona_by_id
|
||||
from danswer.db.search_settings import get_current_search_settings
|
||||
from danswer.db.users import get_user_by_email
|
||||
@@ -81,7 +81,7 @@ def rate_limits(
|
||||
|
||||
def handle_regular_answer(
|
||||
message_info: SlackMessageInfo,
|
||||
slack_bot_config: SlackBotConfig | None,
|
||||
slack_channel_config: SlackChannelConfig | None,
|
||||
receiver_ids: list[str] | None,
|
||||
client: WebClient,
|
||||
channel: str,
|
||||
@@ -96,7 +96,7 @@ def handle_regular_answer(
|
||||
disable_cot: bool = DANSWER_BOT_DISABLE_COT,
|
||||
reflexion: bool = ENABLE_DANSWERBOT_REFLEXION,
|
||||
) -> bool:
|
||||
channel_conf = slack_bot_config.channel_config if slack_bot_config else None
|
||||
channel_conf = slack_channel_config.channel_config if slack_channel_config else None
|
||||
|
||||
messages = message_info.thread_messages
|
||||
message_ts_to_respond_to = message_info.msg_to_respond
|
||||
@@ -108,7 +108,7 @@ def handle_regular_answer(
|
||||
user = get_user_by_email(message_info.email, db_session)
|
||||
|
||||
document_set_names: list[str] | None = None
|
||||
persona = slack_bot_config.persona if slack_bot_config else None
|
||||
persona = slack_channel_config.persona if slack_channel_config else None
|
||||
prompt = None
|
||||
if persona:
|
||||
document_set_names = [
|
||||
@@ -120,9 +120,9 @@ def handle_regular_answer(
|
||||
|
||||
bypass_acl = False
|
||||
if (
|
||||
slack_bot_config
|
||||
and slack_bot_config.persona
|
||||
and slack_bot_config.persona.document_sets
|
||||
slack_channel_config
|
||||
and slack_channel_config.persona
|
||||
and slack_channel_config.persona.document_sets
|
||||
):
|
||||
# For Slack channels, use the full document set, admin will be warned when configuring it
|
||||
# with non-public document sets
|
||||
@@ -131,8 +131,8 @@ def handle_regular_answer(
|
||||
# figure out if we want to use citations or quotes
|
||||
use_citations = (
|
||||
not DANSWER_BOT_USE_QUOTES
|
||||
if slack_bot_config is None
|
||||
else slack_bot_config.response_type == SlackBotResponseType.CITATIONS
|
||||
if slack_channel_config is None
|
||||
else slack_channel_config.response_type == SlackBotResponseType.CITATIONS
|
||||
)
|
||||
|
||||
if not message_ts_to_respond_to and not is_bot_msg:
|
||||
@@ -234,8 +234,8 @@ def handle_regular_answer(
|
||||
# persona.llm_filter_extraction if persona is not None else True
|
||||
# )
|
||||
auto_detect_filters = (
|
||||
slack_bot_config.enable_auto_filters
|
||||
if slack_bot_config is not None
|
||||
slack_channel_config.enable_auto_filters
|
||||
if slack_channel_config is not None
|
||||
else False
|
||||
)
|
||||
retrieval_details = RetrievalDetails(
|
||||
|
||||
@@ -3,7 +3,7 @@ from sqlalchemy.orm import Session
|
||||
|
||||
from danswer.danswerbot.slack.models import SlackMessageInfo
|
||||
from danswer.db.models import Prompt
|
||||
from danswer.db.models import SlackBotConfig
|
||||
from danswer.db.models import SlackChannelConfig
|
||||
from danswer.utils.logger import DanswerLoggingAdapter
|
||||
from danswer.utils.logger import setup_logger
|
||||
from danswer.utils.variable_functionality import fetch_versioned_implementation
|
||||
@@ -14,7 +14,7 @@ logger = setup_logger()
|
||||
def handle_standard_answers(
|
||||
message_info: SlackMessageInfo,
|
||||
receiver_ids: list[str] | None,
|
||||
slack_bot_config: SlackBotConfig | None,
|
||||
slack_channel_config: SlackChannelConfig | None,
|
||||
prompt: Prompt | None,
|
||||
logger: DanswerLoggingAdapter,
|
||||
client: WebClient,
|
||||
@@ -29,7 +29,7 @@ def handle_standard_answers(
|
||||
return versioned_handle_standard_answers(
|
||||
message_info=message_info,
|
||||
receiver_ids=receiver_ids,
|
||||
slack_bot_config=slack_bot_config,
|
||||
slack_channel_config=slack_channel_config,
|
||||
prompt=prompt,
|
||||
logger=logger,
|
||||
client=client,
|
||||
@@ -40,7 +40,7 @@ def handle_standard_answers(
|
||||
def _handle_standard_answers(
|
||||
message_info: SlackMessageInfo,
|
||||
receiver_ids: list[str] | None,
|
||||
slack_bot_config: SlackBotConfig | None,
|
||||
slack_channel_config: SlackChannelConfig | None,
|
||||
prompt: Prompt | None,
|
||||
logger: DanswerLoggingAdapter,
|
||||
client: WebClient,
|
||||
|
||||
@@ -4,6 +4,7 @@ import signal
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
from collections.abc import Callable
|
||||
from threading import Event
|
||||
from types import FrameType
|
||||
from typing import Any
|
||||
@@ -16,6 +17,7 @@ from prometheus_client import start_http_server
|
||||
from slack_sdk import WebClient
|
||||
from slack_sdk.socket_mode.request import SocketModeRequest
|
||||
from slack_sdk.socket_mode.response import SocketModeResponse
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from danswer.configs.app_configs import POD_NAME
|
||||
from danswer.configs.app_configs import POD_NAMESPACE
|
||||
@@ -25,7 +27,7 @@ from danswer.configs.danswerbot_configs import DANSWER_BOT_REPHRASE_MESSAGE
|
||||
from danswer.configs.danswerbot_configs import DANSWER_BOT_RESPOND_EVERY_CHANNEL
|
||||
from danswer.configs.danswerbot_configs import NOTIFY_SLACKBOT_NO_ANSWER
|
||||
from danswer.connectors.slack.utils import expert_info_from_slack_id
|
||||
from danswer.danswerbot.slack.config import get_slack_bot_config_for_channel
|
||||
from danswer.danswerbot.slack.config import get_slack_channel_config_for_bot_and_channel
|
||||
from danswer.danswerbot.slack.config import MAX_TENANTS_PER_POD
|
||||
from danswer.danswerbot.slack.config import TENANT_ACQUISITION_INTERVAL
|
||||
from danswer.danswerbot.slack.config import TENANT_HEARTBEAT_EXPIRATION
|
||||
@@ -54,20 +56,20 @@ from danswer.danswerbot.slack.handlers.handle_message import (
|
||||
)
|
||||
from danswer.danswerbot.slack.handlers.handle_message import schedule_feedback_reminder
|
||||
from danswer.danswerbot.slack.models import SlackMessageInfo
|
||||
from danswer.danswerbot.slack.tokens import fetch_tokens
|
||||
from danswer.danswerbot.slack.utils import check_message_limit
|
||||
from danswer.danswerbot.slack.utils import decompose_action_id
|
||||
from danswer.danswerbot.slack.utils import get_channel_name_from_id
|
||||
from danswer.danswerbot.slack.utils import get_danswer_bot_app_id
|
||||
from danswer.danswerbot.slack.utils import get_danswer_bot_slack_bot_id
|
||||
from danswer.danswerbot.slack.utils import read_slack_thread
|
||||
from danswer.danswerbot.slack.utils import remove_danswer_bot_tag
|
||||
from danswer.danswerbot.slack.utils import rephrase_slack_message
|
||||
from danswer.danswerbot.slack.utils import respond_in_thread
|
||||
from danswer.danswerbot.slack.utils import TenantSocketModeClient
|
||||
from danswer.db.engine import CURRENT_TENANT_ID_CONTEXTVAR
|
||||
from danswer.db.engine import get_all_tenant_ids
|
||||
from danswer.db.engine import get_session_with_tenant
|
||||
from danswer.db.models import SlackBot
|
||||
from danswer.db.search_settings import get_current_search_settings
|
||||
from danswer.db.slack_bot import fetch_slack_bots
|
||||
from danswer.key_value_store.interface import KvKeyNotFoundError
|
||||
from danswer.natural_language_processing.search_nlp_models import EmbeddingModel
|
||||
from danswer.natural_language_processing.search_nlp_models import warm_up_bi_encoder
|
||||
@@ -82,6 +84,8 @@ from shared_configs.configs import MODEL_SERVER_HOST
|
||||
from shared_configs.configs import MODEL_SERVER_PORT
|
||||
from shared_configs.configs import POSTGRES_DEFAULT_SCHEMA
|
||||
from shared_configs.configs import SLACK_CHANNEL_ID
|
||||
from shared_configs.contextvars import CURRENT_TENANT_ID_CONTEXTVAR
|
||||
|
||||
|
||||
logger = setup_logger()
|
||||
|
||||
@@ -113,8 +117,10 @@ class SlackbotHandler:
|
||||
def __init__(self) -> None:
|
||||
logger.info("Initializing SlackbotHandler")
|
||||
self.tenant_ids: Set[str | None] = set()
|
||||
self.socket_clients: Dict[str | None, TenantSocketModeClient] = {}
|
||||
self.slack_bot_tokens: Dict[str | None, SlackBotTokens] = {}
|
||||
# The keys for these dictionaries are tuples of (tenant_id, slack_bot_id)
|
||||
self.socket_clients: Dict[tuple[str | None, int], TenantSocketModeClient] = {}
|
||||
self.slack_bot_tokens: Dict[tuple[str | None, int], SlackBotTokens] = {}
|
||||
|
||||
self.running = True
|
||||
self.pod_id = self.get_pod_id()
|
||||
self._shutdown_event = Event()
|
||||
@@ -169,6 +175,52 @@ class SlackbotHandler:
|
||||
logger.exception(f"Error in heartbeat loop: {e}")
|
||||
self._shutdown_event.wait(timeout=TENANT_HEARTBEAT_INTERVAL)
|
||||
|
||||
def _manage_clients_per_tenant(
|
||||
self, db_session: Session, tenant_id: str | None, bot: SlackBot
|
||||
) -> None:
|
||||
slack_bot_tokens = SlackBotTokens(
|
||||
bot_token=bot.bot_token,
|
||||
app_token=bot.app_token,
|
||||
)
|
||||
tenant_bot_pair = (tenant_id, bot.id)
|
||||
|
||||
# If the tokens are not set, we need to close the socket client and delete the tokens
|
||||
# for the tenant and app
|
||||
if not slack_bot_tokens:
|
||||
logger.debug(
|
||||
f"No Slack bot token found for tenant {tenant_id}, bot {bot.id}"
|
||||
)
|
||||
if tenant_bot_pair in self.socket_clients:
|
||||
asyncio.run(self.socket_clients[tenant_bot_pair].close())
|
||||
del self.socket_clients[tenant_bot_pair]
|
||||
del self.slack_bot_tokens[tenant_bot_pair]
|
||||
return
|
||||
|
||||
tokens_exist = tenant_bot_pair in self.slack_bot_tokens
|
||||
tokens_changed = (
|
||||
tokens_exist and slack_bot_tokens != self.slack_bot_tokens[tenant_bot_pair]
|
||||
)
|
||||
if not tokens_exist or tokens_changed:
|
||||
if tokens_exist:
|
||||
logger.info(
|
||||
f"Slack Bot tokens have changed for tenant {tenant_id}, bot {bot.id} - reconnecting"
|
||||
)
|
||||
else:
|
||||
search_settings = get_current_search_settings(db_session)
|
||||
embedding_model = EmbeddingModel.from_db_model(
|
||||
search_settings=search_settings,
|
||||
server_host=MODEL_SERVER_HOST,
|
||||
server_port=MODEL_SERVER_PORT,
|
||||
)
|
||||
warm_up_bi_encoder(embedding_model=embedding_model)
|
||||
|
||||
self.slack_bot_tokens[tenant_bot_pair] = slack_bot_tokens
|
||||
|
||||
if tenant_bot_pair in self.socket_clients:
|
||||
asyncio.run(self.socket_clients[tenant_bot_pair].close())
|
||||
|
||||
self.start_socket_client(bot.id, tenant_id, slack_bot_tokens)
|
||||
|
||||
def acquire_tenants(self) -> None:
|
||||
tenant_ids = get_all_tenant_ids()
|
||||
|
||||
@@ -203,6 +255,7 @@ class SlackbotHandler:
|
||||
continue
|
||||
|
||||
logger.debug(f"Acquired lock for tenant {tenant_id}")
|
||||
|
||||
self.tenant_ids.add(tenant_id)
|
||||
|
||||
for tenant_id in self.tenant_ids:
|
||||
@@ -212,57 +265,20 @@ class SlackbotHandler:
|
||||
try:
|
||||
with get_session_with_tenant(tenant_id) as db_session:
|
||||
try:
|
||||
logger.debug(
|
||||
f"Setting tenant ID context variable for tenant {tenant_id}"
|
||||
)
|
||||
slack_bot_tokens = fetch_tokens()
|
||||
logger.debug(f"Fetched Slack bot tokens for tenant {tenant_id}")
|
||||
logger.debug(
|
||||
f"Reset tenant ID context variable for tenant {tenant_id}"
|
||||
)
|
||||
|
||||
if not slack_bot_tokens:
|
||||
logger.debug(
|
||||
f"No Slack bot token found for tenant {tenant_id}"
|
||||
bots = fetch_slack_bots(db_session=db_session)
|
||||
for bot in bots:
|
||||
self._manage_clients_per_tenant(
|
||||
db_session=db_session,
|
||||
tenant_id=tenant_id,
|
||||
bot=bot,
|
||||
)
|
||||
if tenant_id in self.socket_clients:
|
||||
asyncio.run(self.socket_clients[tenant_id].close())
|
||||
del self.socket_clients[tenant_id]
|
||||
del self.slack_bot_tokens[tenant_id]
|
||||
continue
|
||||
|
||||
if (
|
||||
tenant_id not in self.slack_bot_tokens
|
||||
or slack_bot_tokens != self.slack_bot_tokens[tenant_id]
|
||||
):
|
||||
if tenant_id in self.slack_bot_tokens:
|
||||
logger.info(
|
||||
f"Slack Bot tokens have changed for tenant {tenant_id} - reconnecting"
|
||||
)
|
||||
else:
|
||||
search_settings = get_current_search_settings(
|
||||
db_session
|
||||
)
|
||||
embedding_model = EmbeddingModel.from_db_model(
|
||||
search_settings=search_settings,
|
||||
server_host=MODEL_SERVER_HOST,
|
||||
server_port=MODEL_SERVER_PORT,
|
||||
)
|
||||
warm_up_bi_encoder(embedding_model=embedding_model)
|
||||
|
||||
self.slack_bot_tokens[tenant_id] = slack_bot_tokens
|
||||
|
||||
if self.socket_clients.get(tenant_id):
|
||||
asyncio.run(self.socket_clients[tenant_id].close())
|
||||
|
||||
self.start_socket_client(tenant_id, slack_bot_tokens)
|
||||
|
||||
except KvKeyNotFoundError:
|
||||
logger.debug(f"Missing Slack Bot tokens for tenant {tenant_id}")
|
||||
if self.socket_clients.get(tenant_id):
|
||||
asyncio.run(self.socket_clients[tenant_id].close())
|
||||
del self.socket_clients[tenant_id]
|
||||
del self.slack_bot_tokens[tenant_id]
|
||||
if (tenant_id, bot.id) in self.socket_clients:
|
||||
asyncio.run(self.socket_clients[tenant_id, bot.id].close())
|
||||
del self.socket_clients[tenant_id, bot.id]
|
||||
del self.slack_bot_tokens[tenant_id, bot.id]
|
||||
except Exception as e:
|
||||
logger.exception(f"Error handling tenant {tenant_id}: {e}")
|
||||
finally:
|
||||
@@ -281,26 +297,37 @@ class SlackbotHandler:
|
||||
)
|
||||
|
||||
def start_socket_client(
|
||||
self, tenant_id: str | None, slack_bot_tokens: SlackBotTokens
|
||||
self, slack_bot_id: int, tenant_id: str | None, slack_bot_tokens: SlackBotTokens
|
||||
) -> None:
|
||||
logger.info(f"Starting socket client for tenant {tenant_id}")
|
||||
socket_client = _get_socket_client(slack_bot_tokens, tenant_id)
|
||||
logger.info(
|
||||
f"Starting socket client for tenant: {tenant_id}, app: {slack_bot_id}"
|
||||
)
|
||||
socket_client: TenantSocketModeClient = _get_socket_client(
|
||||
slack_bot_tokens, tenant_id, slack_bot_id
|
||||
)
|
||||
|
||||
# Append the event handler
|
||||
process_slack_event = create_process_slack_event()
|
||||
socket_client.socket_mode_request_listeners.append(process_slack_event) # type: ignore
|
||||
|
||||
# Establish a WebSocket connection to the Socket Mode servers
|
||||
logger.info(f"Connecting socket client for tenant {tenant_id}")
|
||||
logger.info(
|
||||
f"Connecting socket client for tenant: {tenant_id}, app: {slack_bot_id}"
|
||||
)
|
||||
socket_client.connect()
|
||||
self.socket_clients[tenant_id] = socket_client
|
||||
logger.info(f"Started SocketModeClient for tenant {tenant_id}")
|
||||
self.socket_clients[tenant_id, slack_bot_id] = socket_client
|
||||
self.tenant_ids.add(tenant_id)
|
||||
logger.info(
|
||||
f"Started SocketModeClient for tenant: {tenant_id}, app: {slack_bot_id}"
|
||||
)
|
||||
|
||||
def stop_socket_clients(self) -> None:
|
||||
logger.info(f"Stopping {len(self.socket_clients)} socket clients")
|
||||
for tenant_id, client in self.socket_clients.items():
|
||||
if client:
|
||||
asyncio.run(client.close())
|
||||
logger.info(f"Stopped SocketModeClient for tenant {tenant_id}")
|
||||
for (tenant_id, slack_bot_id), client in self.socket_clients.items():
|
||||
asyncio.run(client.close())
|
||||
logger.info(
|
||||
f"Stopped SocketModeClient for tenant: {tenant_id}, app: {slack_bot_id}"
|
||||
)
|
||||
|
||||
def shutdown(self, signum: int | None, frame: FrameType | None) -> None:
|
||||
if not self.running:
|
||||
@@ -384,7 +411,7 @@ def prefilter_requests(req: SocketModeRequest, client: TenantSocketModeClient) -
|
||||
)
|
||||
return False
|
||||
|
||||
bot_tag_id = get_danswer_bot_app_id(client.web_client)
|
||||
bot_tag_id = get_danswer_bot_slack_bot_id(client.web_client)
|
||||
if event_type == "message":
|
||||
is_dm = event.get("channel_type") == "im"
|
||||
is_tagged = bot_tag_id and bot_tag_id in msg
|
||||
@@ -407,13 +434,15 @@ def prefilter_requests(req: SocketModeRequest, client: TenantSocketModeClient) -
|
||||
)
|
||||
|
||||
with get_session_with_tenant(client.tenant_id) as db_session:
|
||||
slack_bot_config = get_slack_bot_config_for_channel(
|
||||
channel_name=channel_name, db_session=db_session
|
||||
slack_channel_config = get_slack_channel_config_for_bot_and_channel(
|
||||
db_session=db_session,
|
||||
slack_bot_id=client.slack_bot_id,
|
||||
channel_name=channel_name,
|
||||
)
|
||||
# If DanswerBot is not specifically tagged and the channel is not set to respond to bots, ignore the message
|
||||
if (not bot_tag_id or bot_tag_id not in msg) and (
|
||||
not slack_bot_config
|
||||
or not slack_bot_config.channel_config.get("respond_to_bots")
|
||||
not slack_channel_config
|
||||
or not slack_channel_config.channel_config.get("respond_to_bots")
|
||||
):
|
||||
channel_specific_logger.info("Ignoring message from bot")
|
||||
return False
|
||||
@@ -618,14 +647,16 @@ def process_message(
|
||||
token = CURRENT_TENANT_ID_CONTEXTVAR.set(client.tenant_id)
|
||||
try:
|
||||
with get_session_with_tenant(client.tenant_id) as db_session:
|
||||
slack_bot_config = get_slack_bot_config_for_channel(
|
||||
channel_name=channel_name, db_session=db_session
|
||||
slack_channel_config = get_slack_channel_config_for_bot_and_channel(
|
||||
db_session=db_session,
|
||||
slack_bot_id=client.slack_bot_id,
|
||||
channel_name=channel_name,
|
||||
)
|
||||
|
||||
# Be careful about this default, don't want to accidentally spam every channel
|
||||
# Users should be able to DM slack bot in their private channels though
|
||||
if (
|
||||
slack_bot_config is None
|
||||
slack_channel_config is None
|
||||
and not respond_every_channel
|
||||
# Can't have configs for DMs so don't toss them out
|
||||
and not is_dm
|
||||
@@ -636,9 +667,10 @@ def process_message(
|
||||
return
|
||||
|
||||
follow_up = bool(
|
||||
slack_bot_config
|
||||
and slack_bot_config.channel_config
|
||||
and slack_bot_config.channel_config.get("follow_up_tags") is not None
|
||||
slack_channel_config
|
||||
and slack_channel_config.channel_config
|
||||
and slack_channel_config.channel_config.get("follow_up_tags")
|
||||
is not None
|
||||
)
|
||||
feedback_reminder_id = schedule_feedback_reminder(
|
||||
details=details, client=client.web_client, include_followup=follow_up
|
||||
@@ -646,7 +678,7 @@ def process_message(
|
||||
|
||||
failed = handle_message(
|
||||
message_info=details,
|
||||
slack_bot_config=slack_bot_config,
|
||||
slack_channel_config=slack_channel_config,
|
||||
client=client.web_client,
|
||||
feedback_reminder_id=feedback_reminder_id,
|
||||
tenant_id=client.tenant_id,
|
||||
@@ -698,26 +730,32 @@ def view_routing(req: SocketModeRequest, client: TenantSocketModeClient) -> None
|
||||
return process_feedback(req, client)
|
||||
|
||||
|
||||
def process_slack_event(client: TenantSocketModeClient, req: SocketModeRequest) -> None:
|
||||
# Always respond right away, if Slack doesn't receive these frequently enough
|
||||
# it will assume the Bot is DEAD!!! :(
|
||||
acknowledge_message(req, client)
|
||||
def create_process_slack_event() -> (
|
||||
Callable[[TenantSocketModeClient, SocketModeRequest], None]
|
||||
):
|
||||
def process_slack_event(
|
||||
client: TenantSocketModeClient, req: SocketModeRequest
|
||||
) -> None:
|
||||
# Always respond right away, if Slack doesn't receive these frequently enough
|
||||
# it will assume the Bot is DEAD!!! :(
|
||||
acknowledge_message(req, client)
|
||||
|
||||
try:
|
||||
if req.type == "interactive":
|
||||
if req.payload.get("type") == "block_actions":
|
||||
return action_routing(req, client)
|
||||
elif req.payload.get("type") == "view_submission":
|
||||
return view_routing(req, client)
|
||||
elif req.type == "events_api" or req.type == "slash_commands":
|
||||
return process_message(req, client)
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to process slack event. Error: {e}")
|
||||
logger.error(f"Slack request payload: {req.payload}")
|
||||
try:
|
||||
if req.type == "interactive":
|
||||
if req.payload.get("type") == "block_actions":
|
||||
return action_routing(req, client)
|
||||
elif req.payload.get("type") == "view_submission":
|
||||
return view_routing(req, client)
|
||||
elif req.type == "events_api" or req.type == "slash_commands":
|
||||
return process_message(req, client)
|
||||
except Exception:
|
||||
logger.exception("Failed to process slack event")
|
||||
|
||||
return process_slack_event
|
||||
|
||||
|
||||
def _get_socket_client(
|
||||
slack_bot_tokens: SlackBotTokens, tenant_id: str | None
|
||||
slack_bot_tokens: SlackBotTokens, tenant_id: str | None, slack_bot_id: int
|
||||
) -> TenantSocketModeClient:
|
||||
# For more info on how to set this up, checkout the docs:
|
||||
# https://docs.danswer.dev/slack_bot_setup
|
||||
@@ -726,6 +764,7 @@ def _get_socket_client(
|
||||
app_token=slack_bot_tokens.app_token,
|
||||
web_client=WebClient(token=slack_bot_tokens.bot_token),
|
||||
tenant_id=tenant_id,
|
||||
slack_bot_id=slack_bot_id,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
import os
|
||||
from typing import cast
|
||||
|
||||
from danswer.configs.constants import KV_SLACK_BOT_TOKENS_CONFIG_KEY
|
||||
from danswer.key_value_store.factory import get_kv_store
|
||||
from danswer.server.manage.models import SlackBotTokens
|
||||
|
||||
|
||||
def fetch_tokens() -> SlackBotTokens:
|
||||
# first check env variables
|
||||
app_token = os.environ.get("DANSWER_BOT_SLACK_APP_TOKEN")
|
||||
bot_token = os.environ.get("DANSWER_BOT_SLACK_BOT_TOKEN")
|
||||
if app_token and bot_token:
|
||||
return SlackBotTokens(app_token=app_token, bot_token=bot_token)
|
||||
|
||||
dynamic_config_store = get_kv_store()
|
||||
return SlackBotTokens(
|
||||
**cast(dict, dynamic_config_store.load(key=KV_SLACK_BOT_TOKENS_CONFIG_KEY))
|
||||
)
|
||||
|
||||
|
||||
def save_tokens(
|
||||
tokens: SlackBotTokens,
|
||||
) -> None:
|
||||
dynamic_config_store = get_kv_store()
|
||||
dynamic_config_store.store(
|
||||
key=KV_SLACK_BOT_TOKENS_CONFIG_KEY, val=dict(tokens), encrypt=True
|
||||
)
|
||||
@@ -30,7 +30,6 @@ from danswer.configs.danswerbot_configs import (
|
||||
from danswer.connectors.slack.utils import make_slack_api_rate_limited
|
||||
from danswer.connectors.slack.utils import SlackTextCleaner
|
||||
from danswer.danswerbot.slack.constants import FeedbackVisibility
|
||||
from danswer.danswerbot.slack.tokens import fetch_tokens
|
||||
from danswer.db.engine import get_session_with_tenant
|
||||
from danswer.db.users import get_user_by_email
|
||||
from danswer.llm.exceptions import GenAIDisabledException
|
||||
@@ -47,16 +46,16 @@ from danswer.utils.text_processing import replace_whitespaces_w_space
|
||||
logger = setup_logger()
|
||||
|
||||
|
||||
_DANSWER_BOT_APP_ID: str | None = None
|
||||
_DANSWER_BOT_SLACK_BOT_ID: str | None = None
|
||||
_DANSWER_BOT_MESSAGE_COUNT: int = 0
|
||||
_DANSWER_BOT_COUNT_START_TIME: float = time.time()
|
||||
|
||||
|
||||
def get_danswer_bot_app_id(web_client: WebClient) -> Any:
|
||||
global _DANSWER_BOT_APP_ID
|
||||
if _DANSWER_BOT_APP_ID is None:
|
||||
_DANSWER_BOT_APP_ID = web_client.auth_test().get("user_id")
|
||||
return _DANSWER_BOT_APP_ID
|
||||
def get_danswer_bot_slack_bot_id(web_client: WebClient) -> Any:
|
||||
global _DANSWER_BOT_SLACK_BOT_ID
|
||||
if _DANSWER_BOT_SLACK_BOT_ID is None:
|
||||
_DANSWER_BOT_SLACK_BOT_ID = web_client.auth_test().get("user_id")
|
||||
return _DANSWER_BOT_SLACK_BOT_ID
|
||||
|
||||
|
||||
def check_message_limit() -> bool:
|
||||
@@ -137,15 +136,10 @@ def update_emote_react(
|
||||
|
||||
|
||||
def remove_danswer_bot_tag(message_str: str, client: WebClient) -> str:
|
||||
bot_tag_id = get_danswer_bot_app_id(web_client=client)
|
||||
bot_tag_id = get_danswer_bot_slack_bot_id(web_client=client)
|
||||
return re.sub(rf"<@{bot_tag_id}>\s", "", message_str)
|
||||
|
||||
|
||||
def get_web_client() -> WebClient:
|
||||
slack_tokens = fetch_tokens()
|
||||
return WebClient(token=slack_tokens.bot_token)
|
||||
|
||||
|
||||
@retry(
|
||||
tries=DANSWER_BOT_NUM_RETRIES,
|
||||
delay=0.25,
|
||||
@@ -437,9 +431,9 @@ def read_slack_thread(
|
||||
)
|
||||
message_type = MessageType.USER
|
||||
else:
|
||||
self_app_id = get_danswer_bot_app_id(client)
|
||||
self_slack_bot_id = get_danswer_bot_slack_bot_id(client)
|
||||
|
||||
if reply.get("user") == self_app_id:
|
||||
if reply.get("user") == self_slack_bot_id:
|
||||
# DanswerBot response
|
||||
message_type = MessageType.ASSISTANT
|
||||
user_sem_id = "Assistant"
|
||||
@@ -582,6 +576,9 @@ def get_feedback_visibility() -> FeedbackVisibility:
|
||||
|
||||
|
||||
class TenantSocketModeClient(SocketModeClient):
|
||||
def __init__(self, tenant_id: str | None, *args: Any, **kwargs: Any):
|
||||
def __init__(
|
||||
self, tenant_id: str | None, slack_bot_id: int, *args: Any, **kwargs: Any
|
||||
):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.tenant_id = tenant_id
|
||||
self.slack_bot_id = slack_bot_id
|
||||
|
||||
@@ -126,6 +126,7 @@ class User(SQLAlchemyBaseUserTableUUID, Base):
|
||||
|
||||
# if specified, controls the assistants that are shown to the user + their order
|
||||
# if not specified, all assistants are shown
|
||||
auto_scroll: Mapped[bool] = mapped_column(Boolean, default=True)
|
||||
chosen_assistants: Mapped[list[int] | None] = mapped_column(
|
||||
postgresql.JSONB(), nullable=True, default=None
|
||||
)
|
||||
@@ -350,11 +351,11 @@ class StandardAnswer__StandardAnswerCategory(Base):
|
||||
)
|
||||
|
||||
|
||||
class SlackBotConfig__StandardAnswerCategory(Base):
|
||||
__tablename__ = "slack_bot_config__standard_answer_category"
|
||||
class SlackChannelConfig__StandardAnswerCategory(Base):
|
||||
__tablename__ = "slack_channel_config__standard_answer_category"
|
||||
|
||||
slack_bot_config_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("slack_bot_config.id"), primary_key=True
|
||||
slack_channel_config_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("slack_channel_config.id"), primary_key=True
|
||||
)
|
||||
standard_answer_category_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("standard_answer_category.id"), primary_key=True
|
||||
@@ -1181,7 +1182,7 @@ class LLMProvider(Base):
|
||||
default_model_name: Mapped[str] = mapped_column(String)
|
||||
fast_default_model_name: Mapped[str | None] = mapped_column(String, nullable=True)
|
||||
|
||||
# Models to actually disp;aly to users
|
||||
# Models to actually display to users
|
||||
# If nulled out, we assume in the application logic we should present all
|
||||
display_model_names: Mapped[list[str] | None] = mapped_column(
|
||||
postgresql.ARRAY(String), nullable=True
|
||||
@@ -1472,7 +1473,7 @@ class ChannelConfig(TypedDict):
|
||||
"""NOTE: is a `TypedDict` so it can be used as a type hint for a JSONB column
|
||||
in Postgres"""
|
||||
|
||||
channel_names: list[str]
|
||||
channel_name: str
|
||||
respond_tag_only: NotRequired[bool] # defaults to False
|
||||
respond_to_bots: NotRequired[bool] # defaults to False
|
||||
respond_member_group_list: NotRequired[list[str]]
|
||||
@@ -1487,10 +1488,11 @@ class SlackBotResponseType(str, PyEnum):
|
||||
CITATIONS = "citations"
|
||||
|
||||
|
||||
class SlackBotConfig(Base):
|
||||
__tablename__ = "slack_bot_config"
|
||||
class SlackChannelConfig(Base):
|
||||
__tablename__ = "slack_channel_config"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
slack_bot_id: Mapped[int] = mapped_column(ForeignKey("slack_bot.id"), nullable=True)
|
||||
persona_id: Mapped[int | None] = mapped_column(
|
||||
ForeignKey("persona.id"), nullable=True
|
||||
)
|
||||
@@ -1507,10 +1509,30 @@ class SlackBotConfig(Base):
|
||||
)
|
||||
|
||||
persona: Mapped[Persona | None] = relationship("Persona")
|
||||
slack_bot: Mapped["SlackBot"] = relationship(
|
||||
"SlackBot",
|
||||
back_populates="slack_channel_configs",
|
||||
)
|
||||
standard_answer_categories: Mapped[list["StandardAnswerCategory"]] = relationship(
|
||||
"StandardAnswerCategory",
|
||||
secondary=SlackBotConfig__StandardAnswerCategory.__table__,
|
||||
back_populates="slack_bot_configs",
|
||||
secondary=SlackChannelConfig__StandardAnswerCategory.__table__,
|
||||
back_populates="slack_channel_configs",
|
||||
)
|
||||
|
||||
|
||||
class SlackBot(Base):
|
||||
__tablename__ = "slack_bot"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
name: Mapped[str] = mapped_column(String)
|
||||
enabled: Mapped[bool] = mapped_column(Boolean, default=True)
|
||||
|
||||
bot_token: Mapped[str] = mapped_column(EncryptedString(), unique=True)
|
||||
app_token: Mapped[str] = mapped_column(EncryptedString(), unique=True)
|
||||
|
||||
slack_channel_configs: Mapped[list[SlackChannelConfig]] = relationship(
|
||||
"SlackChannelConfig",
|
||||
back_populates="slack_bot",
|
||||
)
|
||||
|
||||
|
||||
@@ -1749,9 +1771,9 @@ class StandardAnswerCategory(Base):
|
||||
secondary=StandardAnswer__StandardAnswerCategory.__table__,
|
||||
back_populates="categories",
|
||||
)
|
||||
slack_bot_configs: Mapped[list["SlackBotConfig"]] = relationship(
|
||||
"SlackBotConfig",
|
||||
secondary=SlackBotConfig__StandardAnswerCategory.__table__,
|
||||
slack_channel_configs: Mapped[list["SlackChannelConfig"]] = relationship(
|
||||
"SlackChannelConfig",
|
||||
secondary=SlackChannelConfig__StandardAnswerCategory.__table__,
|
||||
back_populates="standard_answer_categories",
|
||||
)
|
||||
|
||||
|
||||
76
backend/danswer/db/slack_bot.py
Normal file
76
backend/danswer/db/slack_bot.py
Normal file
@@ -0,0 +1,76 @@
|
||||
from collections.abc import Sequence
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from danswer.db.models import SlackBot
|
||||
|
||||
|
||||
def insert_slack_bot(
|
||||
db_session: Session,
|
||||
name: str,
|
||||
enabled: bool,
|
||||
bot_token: str,
|
||||
app_token: str,
|
||||
) -> SlackBot:
|
||||
slack_bot = SlackBot(
|
||||
name=name,
|
||||
enabled=enabled,
|
||||
bot_token=bot_token,
|
||||
app_token=app_token,
|
||||
)
|
||||
db_session.add(slack_bot)
|
||||
db_session.commit()
|
||||
|
||||
return slack_bot
|
||||
|
||||
|
||||
def update_slack_bot(
|
||||
db_session: Session,
|
||||
slack_bot_id: int,
|
||||
name: str,
|
||||
enabled: bool,
|
||||
bot_token: str,
|
||||
app_token: str,
|
||||
) -> SlackBot:
|
||||
slack_bot = db_session.scalar(select(SlackBot).where(SlackBot.id == slack_bot_id))
|
||||
if slack_bot is None:
|
||||
raise ValueError(f"Unable to find Slack Bot with ID {slack_bot_id}")
|
||||
|
||||
# update the app
|
||||
slack_bot.name = name
|
||||
slack_bot.enabled = enabled
|
||||
slack_bot.bot_token = bot_token
|
||||
slack_bot.app_token = app_token
|
||||
|
||||
db_session.commit()
|
||||
|
||||
return slack_bot
|
||||
|
||||
|
||||
def fetch_slack_bot(
|
||||
db_session: Session,
|
||||
slack_bot_id: int,
|
||||
) -> SlackBot:
|
||||
slack_bot = db_session.scalar(select(SlackBot).where(SlackBot.id == slack_bot_id))
|
||||
if slack_bot is None:
|
||||
raise ValueError(f"Unable to find Slack Bot with ID {slack_bot_id}")
|
||||
|
||||
return slack_bot
|
||||
|
||||
|
||||
def remove_slack_bot(
|
||||
db_session: Session,
|
||||
slack_bot_id: int,
|
||||
) -> None:
|
||||
slack_bot = fetch_slack_bot(
|
||||
db_session=db_session,
|
||||
slack_bot_id=slack_bot_id,
|
||||
)
|
||||
|
||||
db_session.delete(slack_bot)
|
||||
db_session.commit()
|
||||
|
||||
|
||||
def fetch_slack_bots(db_session: Session) -> Sequence[SlackBot]:
|
||||
return db_session.scalars(select(SlackBot)).all()
|
||||
@@ -9,8 +9,8 @@ from danswer.db.constants import SLACK_BOT_PERSONA_PREFIX
|
||||
from danswer.db.models import ChannelConfig
|
||||
from danswer.db.models import Persona
|
||||
from danswer.db.models import Persona__DocumentSet
|
||||
from danswer.db.models import SlackBotConfig
|
||||
from danswer.db.models import SlackBotResponseType
|
||||
from danswer.db.models import SlackChannelConfig
|
||||
from danswer.db.models import User
|
||||
from danswer.db.persona import get_default_prompt
|
||||
from danswer.db.persona import mark_persona_as_deleted
|
||||
@@ -22,8 +22,8 @@ from danswer.utils.variable_functionality import (
|
||||
)
|
||||
|
||||
|
||||
def _build_persona_name(channel_names: list[str]) -> str:
|
||||
return f"{SLACK_BOT_PERSONA_PREFIX}{'-'.join(channel_names)}"
|
||||
def _build_persona_name(channel_name: str) -> str:
|
||||
return f"{SLACK_BOT_PERSONA_PREFIX}{channel_name}"
|
||||
|
||||
|
||||
def _cleanup_relationships(db_session: Session, persona_id: int) -> None:
|
||||
@@ -38,9 +38,9 @@ def _cleanup_relationships(db_session: Session, persona_id: int) -> None:
|
||||
db_session.delete(rel)
|
||||
|
||||
|
||||
def create_slack_bot_persona(
|
||||
def create_slack_channel_persona(
|
||||
db_session: Session,
|
||||
channel_names: list[str],
|
||||
channel_name: str,
|
||||
document_set_ids: list[int],
|
||||
existing_persona_id: int | None = None,
|
||||
num_chunks: float = MAX_CHUNKS_FED_TO_CHAT,
|
||||
@@ -48,11 +48,11 @@ def create_slack_bot_persona(
|
||||
) -> Persona:
|
||||
"""NOTE: does not commit changes"""
|
||||
|
||||
# create/update persona associated with the slack bot
|
||||
persona_name = _build_persona_name(channel_names)
|
||||
# create/update persona associated with the Slack channel
|
||||
persona_name = _build_persona_name(channel_name)
|
||||
default_prompt = get_default_prompt(db_session)
|
||||
persona = upsert_persona(
|
||||
user=None, # Slack Bot Personas are not attached to users
|
||||
user=None, # Slack channel Personas are not attached to users
|
||||
persona_id=existing_persona_id,
|
||||
name=persona_name,
|
||||
description="",
|
||||
@@ -78,14 +78,15 @@ def _no_ee_standard_answer_categories(*args: Any, **kwargs: Any) -> list:
|
||||
return []
|
||||
|
||||
|
||||
def insert_slack_bot_config(
|
||||
def insert_slack_channel_config(
|
||||
db_session: Session,
|
||||
slack_bot_id: int,
|
||||
persona_id: int | None,
|
||||
channel_config: ChannelConfig,
|
||||
response_type: SlackBotResponseType,
|
||||
standard_answer_category_ids: list[int],
|
||||
enable_auto_filters: bool,
|
||||
db_session: Session,
|
||||
) -> SlackBotConfig:
|
||||
) -> SlackChannelConfig:
|
||||
versioned_fetch_standard_answer_categories_by_ids = (
|
||||
fetch_versioned_implementation_with_fallback(
|
||||
"danswer.db.standard_answer",
|
||||
@@ -110,34 +111,37 @@ def insert_slack_bot_config(
|
||||
f"Some or all categories with ids {standard_answer_category_ids} do not exist"
|
||||
)
|
||||
|
||||
slack_bot_config = SlackBotConfig(
|
||||
slack_channel_config = SlackChannelConfig(
|
||||
slack_bot_id=slack_bot_id,
|
||||
persona_id=persona_id,
|
||||
channel_config=channel_config,
|
||||
response_type=response_type,
|
||||
standard_answer_categories=existing_standard_answer_categories,
|
||||
enable_auto_filters=enable_auto_filters,
|
||||
)
|
||||
db_session.add(slack_bot_config)
|
||||
db_session.add(slack_channel_config)
|
||||
db_session.commit()
|
||||
|
||||
return slack_bot_config
|
||||
return slack_channel_config
|
||||
|
||||
|
||||
def update_slack_bot_config(
|
||||
slack_bot_config_id: int,
|
||||
def update_slack_channel_config(
|
||||
db_session: Session,
|
||||
slack_channel_config_id: int,
|
||||
persona_id: int | None,
|
||||
channel_config: ChannelConfig,
|
||||
response_type: SlackBotResponseType,
|
||||
standard_answer_category_ids: list[int],
|
||||
enable_auto_filters: bool,
|
||||
db_session: Session,
|
||||
) -> SlackBotConfig:
|
||||
slack_bot_config = db_session.scalar(
|
||||
select(SlackBotConfig).where(SlackBotConfig.id == slack_bot_config_id)
|
||||
) -> SlackChannelConfig:
|
||||
slack_channel_config = db_session.scalar(
|
||||
select(SlackChannelConfig).where(
|
||||
SlackChannelConfig.id == slack_channel_config_id
|
||||
)
|
||||
)
|
||||
if slack_bot_config is None:
|
||||
if slack_channel_config is None:
|
||||
raise ValueError(
|
||||
f"Unable to find slack bot config with ID {slack_bot_config_id}"
|
||||
f"Unable to find Slack channel config with ID {slack_channel_config_id}"
|
||||
)
|
||||
|
||||
versioned_fetch_standard_answer_categories_by_ids = (
|
||||
@@ -159,25 +163,25 @@ def update_slack_bot_config(
|
||||
)
|
||||
|
||||
# get the existing persona id before updating the object
|
||||
existing_persona_id = slack_bot_config.persona_id
|
||||
existing_persona_id = slack_channel_config.persona_id
|
||||
|
||||
# update the config
|
||||
# NOTE: need to do this before cleaning up the old persona or else we
|
||||
# will encounter `violates foreign key constraint` errors
|
||||
slack_bot_config.persona_id = persona_id
|
||||
slack_bot_config.channel_config = channel_config
|
||||
slack_bot_config.response_type = response_type
|
||||
slack_bot_config.standard_answer_categories = list(
|
||||
slack_channel_config.persona_id = persona_id
|
||||
slack_channel_config.channel_config = channel_config
|
||||
slack_channel_config.response_type = response_type
|
||||
slack_channel_config.standard_answer_categories = list(
|
||||
existing_standard_answer_categories
|
||||
)
|
||||
slack_bot_config.enable_auto_filters = enable_auto_filters
|
||||
slack_channel_config.enable_auto_filters = enable_auto_filters
|
||||
|
||||
# if the persona has changed, then clean up the old persona
|
||||
if persona_id != existing_persona_id and existing_persona_id:
|
||||
existing_persona = db_session.scalar(
|
||||
select(Persona).where(Persona.id == existing_persona_id)
|
||||
)
|
||||
# if the existing persona was one created just for use with this Slack Bot,
|
||||
# if the existing persona was one created just for use with this Slack channel,
|
||||
# then clean it up
|
||||
if existing_persona and existing_persona.name.startswith(
|
||||
SLACK_BOT_PERSONA_PREFIX
|
||||
@@ -188,28 +192,30 @@ def update_slack_bot_config(
|
||||
|
||||
db_session.commit()
|
||||
|
||||
return slack_bot_config
|
||||
return slack_channel_config
|
||||
|
||||
|
||||
def remove_slack_bot_config(
|
||||
slack_bot_config_id: int,
|
||||
user: User | None,
|
||||
def remove_slack_channel_config(
|
||||
db_session: Session,
|
||||
slack_channel_config_id: int,
|
||||
user: User | None,
|
||||
) -> None:
|
||||
slack_bot_config = db_session.scalar(
|
||||
select(SlackBotConfig).where(SlackBotConfig.id == slack_bot_config_id)
|
||||
slack_channel_config = db_session.scalar(
|
||||
select(SlackChannelConfig).where(
|
||||
SlackChannelConfig.id == slack_channel_config_id
|
||||
)
|
||||
)
|
||||
if slack_bot_config is None:
|
||||
if slack_channel_config is None:
|
||||
raise ValueError(
|
||||
f"Unable to find slack bot config with ID {slack_bot_config_id}"
|
||||
f"Unable to find Slack channel config with ID {slack_channel_config_id}"
|
||||
)
|
||||
|
||||
existing_persona_id = slack_bot_config.persona_id
|
||||
existing_persona_id = slack_channel_config.persona_id
|
||||
if existing_persona_id:
|
||||
existing_persona = db_session.scalar(
|
||||
select(Persona).where(Persona.id == existing_persona_id)
|
||||
)
|
||||
# if the existing persona was one created just for use with this Slack Bot,
|
||||
# if the existing persona was one created just for use with this Slack channel,
|
||||
# then clean it up
|
||||
if existing_persona and existing_persona.name.startswith(
|
||||
SLACK_BOT_PERSONA_PREFIX
|
||||
@@ -221,17 +227,28 @@ def remove_slack_bot_config(
|
||||
persona_id=existing_persona_id, user=user, db_session=db_session
|
||||
)
|
||||
|
||||
db_session.delete(slack_bot_config)
|
||||
db_session.delete(slack_channel_config)
|
||||
db_session.commit()
|
||||
|
||||
|
||||
def fetch_slack_bot_config(
|
||||
db_session: Session, slack_bot_config_id: int
|
||||
) -> SlackBotConfig | None:
|
||||
def fetch_slack_channel_configs(
|
||||
db_session: Session, slack_bot_id: int | None = None
|
||||
) -> Sequence[SlackChannelConfig]:
|
||||
if not slack_bot_id:
|
||||
return db_session.scalars(select(SlackChannelConfig)).all()
|
||||
|
||||
return db_session.scalars(
|
||||
select(SlackChannelConfig).where(
|
||||
SlackChannelConfig.slack_bot_id == slack_bot_id
|
||||
)
|
||||
).all()
|
||||
|
||||
|
||||
def fetch_slack_channel_config(
|
||||
db_session: Session, slack_channel_config_id: int
|
||||
) -> SlackChannelConfig | None:
|
||||
return db_session.scalar(
|
||||
select(SlackBotConfig).where(SlackBotConfig.id == slack_bot_config_id)
|
||||
select(SlackChannelConfig).where(
|
||||
SlackChannelConfig.id == slack_channel_config_id
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def fetch_slack_bot_configs(db_session: Session) -> Sequence[SlackBotConfig]:
|
||||
return db_session.scalars(select(SlackBotConfig)).all()
|
||||
@@ -2,6 +2,7 @@ import concurrent.futures
|
||||
import json
|
||||
from datetime import datetime
|
||||
from datetime import timezone
|
||||
from http import HTTPStatus
|
||||
|
||||
import httpx
|
||||
from retry import retry
|
||||
@@ -194,6 +195,14 @@ def _index_vespa_chunk(
|
||||
logger.exception(
|
||||
f"Failed to index document: '{document.id}'. Got response: '{res.text}'"
|
||||
)
|
||||
if isinstance(e, httpx.HTTPStatusError):
|
||||
if e.response.status_code == HTTPStatus.INSUFFICIENT_STORAGE:
|
||||
logger.error(
|
||||
"NOTE: HTTP Status 507 Insufficient Storage usually means "
|
||||
"you need to allocate more memory or disk space to the "
|
||||
"Vespa/index container."
|
||||
)
|
||||
|
||||
raise e
|
||||
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ from danswer.connectors.cross_connector_utils.miscellaneous_utils import (
|
||||
get_metadata_keys_to_ignore,
|
||||
)
|
||||
from danswer.connectors.models import Document
|
||||
from danswer.indexing.indexing_heartbeat import Heartbeat
|
||||
from danswer.indexing.indexing_heartbeat import IndexingHeartbeatInterface
|
||||
from danswer.indexing.models import DocAwareChunk
|
||||
from danswer.natural_language_processing.utils import BaseTokenizer
|
||||
from danswer.utils.logger import setup_logger
|
||||
@@ -125,7 +125,7 @@ class Chunker:
|
||||
chunk_token_limit: int = DOC_EMBEDDING_CONTEXT_SIZE,
|
||||
chunk_overlap: int = CHUNK_OVERLAP,
|
||||
mini_chunk_size: int = MINI_CHUNK_SIZE,
|
||||
heartbeat: Heartbeat | None = None,
|
||||
callback: IndexingHeartbeatInterface | None = None,
|
||||
) -> None:
|
||||
from llama_index.text_splitter import SentenceSplitter
|
||||
|
||||
@@ -134,7 +134,7 @@ class Chunker:
|
||||
self.enable_multipass = enable_multipass
|
||||
self.enable_large_chunks = enable_large_chunks
|
||||
self.tokenizer = tokenizer
|
||||
self.heartbeat = heartbeat
|
||||
self.callback = callback
|
||||
|
||||
self.blurb_splitter = SentenceSplitter(
|
||||
tokenizer=tokenizer.tokenize,
|
||||
@@ -356,9 +356,14 @@ class Chunker:
|
||||
def chunk(self, documents: list[Document]) -> list[DocAwareChunk]:
|
||||
final_chunks: list[DocAwareChunk] = []
|
||||
for document in documents:
|
||||
final_chunks.extend(self._handle_single_document(document))
|
||||
if self.callback:
|
||||
if self.callback.should_stop():
|
||||
raise RuntimeError("Chunker.chunk: Stop signal detected")
|
||||
|
||||
if self.heartbeat:
|
||||
self.heartbeat.heartbeat()
|
||||
chunks = self._handle_single_document(document)
|
||||
final_chunks.extend(chunks)
|
||||
|
||||
if self.callback:
|
||||
self.callback.progress("Chunker.chunk", len(chunks))
|
||||
|
||||
return final_chunks
|
||||
|
||||
@@ -2,7 +2,7 @@ from abc import ABC
|
||||
from abc import abstractmethod
|
||||
|
||||
from danswer.db.models import SearchSettings
|
||||
from danswer.indexing.indexing_heartbeat import Heartbeat
|
||||
from danswer.indexing.indexing_heartbeat import IndexingHeartbeatInterface
|
||||
from danswer.indexing.models import ChunkEmbedding
|
||||
from danswer.indexing.models import DocAwareChunk
|
||||
from danswer.indexing.models import IndexChunk
|
||||
@@ -34,7 +34,7 @@ class IndexingEmbedder(ABC):
|
||||
api_url: str | None,
|
||||
api_version: str | None,
|
||||
deployment_name: str | None,
|
||||
heartbeat: Heartbeat | None,
|
||||
callback: IndexingHeartbeatInterface | None,
|
||||
):
|
||||
self.model_name = model_name
|
||||
self.normalize = normalize
|
||||
@@ -60,7 +60,7 @@ class IndexingEmbedder(ABC):
|
||||
server_host=INDEXING_MODEL_SERVER_HOST,
|
||||
server_port=INDEXING_MODEL_SERVER_PORT,
|
||||
retrim_content=True,
|
||||
heartbeat=heartbeat,
|
||||
callback=callback,
|
||||
)
|
||||
|
||||
@abstractmethod
|
||||
@@ -83,7 +83,7 @@ class DefaultIndexingEmbedder(IndexingEmbedder):
|
||||
api_url: str | None = None,
|
||||
api_version: str | None = None,
|
||||
deployment_name: str | None = None,
|
||||
heartbeat: Heartbeat | None = None,
|
||||
callback: IndexingHeartbeatInterface | None = None,
|
||||
):
|
||||
super().__init__(
|
||||
model_name,
|
||||
@@ -95,7 +95,7 @@ class DefaultIndexingEmbedder(IndexingEmbedder):
|
||||
api_url,
|
||||
api_version,
|
||||
deployment_name,
|
||||
heartbeat,
|
||||
callback,
|
||||
)
|
||||
|
||||
@log_function_time()
|
||||
@@ -201,7 +201,9 @@ class DefaultIndexingEmbedder(IndexingEmbedder):
|
||||
|
||||
@classmethod
|
||||
def from_db_search_settings(
|
||||
cls, search_settings: SearchSettings, heartbeat: Heartbeat | None = None
|
||||
cls,
|
||||
search_settings: SearchSettings,
|
||||
callback: IndexingHeartbeatInterface | None = None,
|
||||
) -> "DefaultIndexingEmbedder":
|
||||
return cls(
|
||||
model_name=search_settings.model_name,
|
||||
@@ -213,5 +215,5 @@ class DefaultIndexingEmbedder(IndexingEmbedder):
|
||||
api_url=search_settings.api_url,
|
||||
api_version=search_settings.api_version,
|
||||
deployment_name=search_settings.deployment_name,
|
||||
heartbeat=heartbeat,
|
||||
callback=callback,
|
||||
)
|
||||
|
||||
@@ -1,41 +1,15 @@
|
||||
import abc
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from danswer.db.index_attempt import get_index_attempt
|
||||
from danswer.utils.logger import setup_logger
|
||||
|
||||
logger = setup_logger()
|
||||
from abc import ABC
|
||||
from abc import abstractmethod
|
||||
|
||||
|
||||
class Heartbeat(abc.ABC):
|
||||
"""Useful for any long-running work that goes through a bunch of items
|
||||
and needs to occasionally give updates on progress.
|
||||
e.g. chunking, embedding, updating vespa, etc."""
|
||||
class IndexingHeartbeatInterface(ABC):
|
||||
"""Defines a callback interface to be passed to
|
||||
to run_indexing_entrypoint."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def heartbeat(self, metadata: Any = None) -> None:
|
||||
raise NotImplementedError
|
||||
@abstractmethod
|
||||
def should_stop(self) -> bool:
|
||||
"""Signal to stop the looping function in flight."""
|
||||
|
||||
|
||||
class IndexingHeartbeat(Heartbeat):
|
||||
def __init__(self, index_attempt_id: int, db_session: Session, freq: int):
|
||||
self.cnt = 0
|
||||
|
||||
self.index_attempt_id = index_attempt_id
|
||||
self.db_session = db_session
|
||||
self.freq = freq
|
||||
|
||||
def heartbeat(self, metadata: Any = None) -> None:
|
||||
self.cnt += 1
|
||||
if self.cnt % self.freq == 0:
|
||||
index_attempt = get_index_attempt(
|
||||
db_session=self.db_session, index_attempt_id=self.index_attempt_id
|
||||
)
|
||||
if index_attempt:
|
||||
index_attempt.time_updated = func.now()
|
||||
self.db_session.commit()
|
||||
else:
|
||||
logger.error("Index attempt not found, this should not happen!")
|
||||
@abstractmethod
|
||||
def progress(self, tag: str, amount: int) -> None:
|
||||
"""Send progress updates to the caller."""
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import traceback
|
||||
from functools import partial
|
||||
from http import HTTPStatus
|
||||
from typing import Protocol
|
||||
|
||||
import httpx
|
||||
from pydantic import BaseModel
|
||||
from pydantic import ConfigDict
|
||||
from sqlalchemy.orm import Session
|
||||
@@ -32,7 +34,7 @@ from danswer.document_index.interfaces import DocumentIndex
|
||||
from danswer.document_index.interfaces import DocumentMetadata
|
||||
from danswer.indexing.chunker import Chunker
|
||||
from danswer.indexing.embedder import IndexingEmbedder
|
||||
from danswer.indexing.indexing_heartbeat import IndexingHeartbeat
|
||||
from danswer.indexing.indexing_heartbeat import IndexingHeartbeatInterface
|
||||
from danswer.indexing.models import DocAwareChunk
|
||||
from danswer.indexing.models import DocMetadataAwareIndexChunk
|
||||
from danswer.utils.logger import setup_logger
|
||||
@@ -154,6 +156,14 @@ def index_doc_batch_with_handler(
|
||||
tenant_id=tenant_id,
|
||||
)
|
||||
except Exception as e:
|
||||
if isinstance(e, httpx.HTTPStatusError):
|
||||
if e.response.status_code == HTTPStatus.INSUFFICIENT_STORAGE:
|
||||
logger.error(
|
||||
"NOTE: HTTP Status 507 Insufficient Storage indicates "
|
||||
"you need to allocate more memory or disk space to the "
|
||||
"Vespa/index container."
|
||||
)
|
||||
|
||||
if INDEXING_EXCEPTION_LIMIT == 0:
|
||||
raise
|
||||
|
||||
@@ -404,6 +414,7 @@ def build_indexing_pipeline(
|
||||
ignore_time_skip: bool = False,
|
||||
attempt_id: int | None = None,
|
||||
tenant_id: str | None = None,
|
||||
callback: IndexingHeartbeatInterface | None = None,
|
||||
) -> IndexingPipelineProtocol:
|
||||
"""Builds a pipeline which takes in a list (batch) of docs and indexes them."""
|
||||
search_settings = get_current_search_settings(db_session)
|
||||
@@ -430,13 +441,8 @@ def build_indexing_pipeline(
|
||||
tokenizer=embedder.embedding_model.tokenizer,
|
||||
enable_multipass=multipass,
|
||||
enable_large_chunks=enable_large_chunks,
|
||||
# after every doc, update status in case there are a bunch of
|
||||
# really long docs
|
||||
heartbeat=IndexingHeartbeat(
|
||||
index_attempt_id=attempt_id, db_session=db_session, freq=1
|
||||
)
|
||||
if attempt_id
|
||||
else None,
|
||||
# after every doc, update status in case there are a bunch of really long docs
|
||||
callback=callback,
|
||||
)
|
||||
|
||||
return partial(
|
||||
|
||||
@@ -231,16 +231,16 @@ class QuotesProcessor:
|
||||
|
||||
model_previous = self.model_output
|
||||
self.model_output += token
|
||||
|
||||
if not self.found_answer_start:
|
||||
m = answer_pattern.search(self.model_output)
|
||||
if m:
|
||||
self.found_answer_start = True
|
||||
|
||||
# Prevent heavy cases of hallucinations
|
||||
if self.is_json_prompt and len(self.model_output) > 70:
|
||||
logger.warning("LLM did not produce json as prompted")
|
||||
if self.is_json_prompt and len(self.model_output) > 400:
|
||||
self.found_answer_end = True
|
||||
logger.warning("LLM did not produce json as prompted")
|
||||
logger.debug("Model output thus far:", self.model_output)
|
||||
return
|
||||
|
||||
remaining = self.model_output[m.end() :]
|
||||
|
||||
@@ -16,7 +16,7 @@ from danswer.configs.model_configs import (
|
||||
)
|
||||
from danswer.configs.model_configs import DOC_EMBEDDING_CONTEXT_SIZE
|
||||
from danswer.db.models import SearchSettings
|
||||
from danswer.indexing.indexing_heartbeat import Heartbeat
|
||||
from danswer.indexing.indexing_heartbeat import IndexingHeartbeatInterface
|
||||
from danswer.natural_language_processing.utils import get_tokenizer
|
||||
from danswer.natural_language_processing.utils import tokenizer_trim_content
|
||||
from danswer.utils.logger import setup_logger
|
||||
@@ -99,7 +99,7 @@ class EmbeddingModel:
|
||||
api_url: str | None,
|
||||
provider_type: EmbeddingProvider | None,
|
||||
retrim_content: bool = False,
|
||||
heartbeat: Heartbeat | None = None,
|
||||
callback: IndexingHeartbeatInterface | None = None,
|
||||
api_version: str | None = None,
|
||||
deployment_name: str | None = None,
|
||||
) -> None:
|
||||
@@ -116,7 +116,7 @@ class EmbeddingModel:
|
||||
self.tokenizer = get_tokenizer(
|
||||
model_name=model_name, provider_type=provider_type
|
||||
)
|
||||
self.heartbeat = heartbeat
|
||||
self.callback = callback
|
||||
|
||||
model_server_url = build_model_server_url(server_host, server_port)
|
||||
self.embed_server_endpoint = f"{model_server_url}/encoder/bi-encoder-embed"
|
||||
@@ -160,6 +160,10 @@ class EmbeddingModel:
|
||||
|
||||
embeddings: list[Embedding] = []
|
||||
for idx, text_batch in enumerate(text_batches, start=1):
|
||||
if self.callback:
|
||||
if self.callback.should_stop():
|
||||
raise RuntimeError("_batch_encode_texts detected stop signal")
|
||||
|
||||
logger.debug(f"Encoding batch {idx} of {len(text_batches)}")
|
||||
embed_request = EmbedRequest(
|
||||
model_name=self.model_name,
|
||||
@@ -179,8 +183,8 @@ class EmbeddingModel:
|
||||
response = self._make_model_server_request(embed_request)
|
||||
embeddings.extend(response.embeddings)
|
||||
|
||||
if self.heartbeat:
|
||||
self.heartbeat.heartbeat()
|
||||
if self.callback:
|
||||
self.callback.progress("_batch_encode_texts", 1)
|
||||
return embeddings
|
||||
|
||||
def encode(
|
||||
|
||||
@@ -14,9 +14,10 @@ from danswer.configs.constants import DanswerCeleryPriority
|
||||
from danswer.configs.constants import DanswerCeleryQueues
|
||||
from danswer.db.connector_credential_pair import get_connector_credential_pair_from_id
|
||||
from danswer.db.document import construct_document_select_for_connector_credential_pair
|
||||
from danswer.db.models import Document as DbDocument
|
||||
|
||||
|
||||
class RedisConnectorDeletionFenceData(BaseModel):
|
||||
class RedisConnectorDeletePayload(BaseModel):
|
||||
num_tasks: int | None
|
||||
submitted: datetime
|
||||
|
||||
@@ -53,20 +54,18 @@ class RedisConnectorDelete:
|
||||
return False
|
||||
|
||||
@property
|
||||
def payload(self) -> RedisConnectorDeletionFenceData | None:
|
||||
def payload(self) -> RedisConnectorDeletePayload | None:
|
||||
# read related data and evaluate/print task progress
|
||||
fence_bytes = cast(bytes, self.redis.get(self.fence_key))
|
||||
if fence_bytes is None:
|
||||
return None
|
||||
|
||||
fence_str = fence_bytes.decode("utf-8")
|
||||
payload = RedisConnectorDeletionFenceData.model_validate_json(
|
||||
cast(str, fence_str)
|
||||
)
|
||||
payload = RedisConnectorDeletePayload.model_validate_json(cast(str, fence_str))
|
||||
|
||||
return payload
|
||||
|
||||
def set_fence(self, payload: RedisConnectorDeletionFenceData | None) -> None:
|
||||
def set_fence(self, payload: RedisConnectorDeletePayload | None) -> None:
|
||||
if not payload:
|
||||
self.redis.delete(self.fence_key)
|
||||
return
|
||||
@@ -98,7 +97,8 @@ class RedisConnectorDelete:
|
||||
stmt = construct_document_select_for_connector_credential_pair(
|
||||
cc_pair.connector_id, cc_pair.credential_id
|
||||
)
|
||||
for doc in db_session.scalars(stmt).yield_per(1):
|
||||
for doc_temp in db_session.scalars(stmt).yield_per(1):
|
||||
doc: DbDocument = doc_temp
|
||||
current_time = time.monotonic()
|
||||
if current_time - last_lock_time >= (
|
||||
CELERY_VESPA_SYNC_BEAT_LOCK_TIMEOUT / 4
|
||||
@@ -130,6 +130,10 @@ class RedisConnectorDelete:
|
||||
|
||||
return len(async_results)
|
||||
|
||||
def reset(self) -> None:
|
||||
self.redis.delete(self.taskset_key)
|
||||
self.redis.delete(self.fence_key)
|
||||
|
||||
@staticmethod
|
||||
def remove_from_taskset(id: int, task_id: str, r: redis.Redis) -> None:
|
||||
taskset_key = f"{RedisConnectorDelete.TASKSET_PREFIX}_{id}"
|
||||
|
||||
@@ -150,6 +150,12 @@ class RedisConnectorPrune:
|
||||
|
||||
return len(async_results)
|
||||
|
||||
def reset(self) -> None:
|
||||
self.redis.delete(self.generator_progress_key)
|
||||
self.redis.delete(self.generator_complete_key)
|
||||
self.redis.delete(self.taskset_key)
|
||||
self.redis.delete(self.fence_key)
|
||||
|
||||
@staticmethod
|
||||
def remove_from_taskset(id: int, task_id: str, r: redis.Redis) -> None:
|
||||
taskset_key = f"{RedisConnectorPrune.TASKSET_PREFIX}_{id}"
|
||||
|
||||
@@ -865,21 +865,31 @@ def connector_run_once(
|
||||
)
|
||||
if attempt_id:
|
||||
logger.info(
|
||||
f"try_creating_indexing_task succeeded: cc_pair={cc_pair.id} attempt_id={attempt_id}"
|
||||
f"connector_run_once - try_creating_indexing_task succeeded: "
|
||||
f"connector={run_info.connector_id} "
|
||||
f"cc_pair={cc_pair.id} "
|
||||
f"attempt={attempt_id} "
|
||||
)
|
||||
index_attempt_ids.append(attempt_id)
|
||||
else:
|
||||
logger.info(f"try_creating_indexing_task failed: cc_pair={cc_pair.id}")
|
||||
logger.info(
|
||||
f"connector_run_once - try_creating_indexing_task failed: "
|
||||
f"connector={run_info.connector_id} "
|
||||
f"cc_pair={cc_pair.id}"
|
||||
)
|
||||
|
||||
if not index_attempt_ids:
|
||||
msg = "No new indexing attempts created, indexing jobs are queued or running."
|
||||
logger.info(msg)
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="No new indexing attempts created, indexing jobs are queued or running.",
|
||||
detail=msg,
|
||||
)
|
||||
|
||||
msg = f"Successfully created {len(index_attempt_ids)} index attempts. {index_attempt_ids}"
|
||||
return StatusResponse(
|
||||
success=True,
|
||||
message=f"Successfully created {len(index_attempt_ids)} index attempts",
|
||||
message=msg,
|
||||
data=index_attempt_ids,
|
||||
)
|
||||
|
||||
|
||||
@@ -13,8 +13,9 @@ from danswer.configs.constants import AuthType
|
||||
from danswer.danswerbot.slack.config import VALID_SLACK_FILTERS
|
||||
from danswer.db.models import AllowedAnswerFilters
|
||||
from danswer.db.models import ChannelConfig
|
||||
from danswer.db.models import SlackBotConfig as SlackBotConfigModel
|
||||
from danswer.db.models import SlackBot as SlackAppModel
|
||||
from danswer.db.models import SlackBotResponseType
|
||||
from danswer.db.models import SlackChannelConfig as SlackChannelConfigModel
|
||||
from danswer.db.models import User
|
||||
from danswer.search.models import SavedSearchSettings
|
||||
from danswer.server.features.persona.models import PersonaSnapshot
|
||||
@@ -44,6 +45,7 @@ class UserPreferences(BaseModel):
|
||||
visible_assistants: list[int] = []
|
||||
recent_assistants: list[int] | None = None
|
||||
default_model: str | None = None
|
||||
auto_scroll: bool | None = None
|
||||
|
||||
|
||||
class UserInfo(BaseModel):
|
||||
@@ -78,6 +80,7 @@ class UserInfo(BaseModel):
|
||||
role=user.role,
|
||||
preferences=(
|
||||
UserPreferences(
|
||||
auto_scroll=user.auto_scroll,
|
||||
chosen_assistants=user.chosen_assistants,
|
||||
default_model=user.default_model,
|
||||
hidden_assistants=user.hidden_assistants,
|
||||
@@ -127,22 +130,36 @@ class HiddenUpdateRequest(BaseModel):
|
||||
hidden: bool
|
||||
|
||||
|
||||
class AutoScrollRequest(BaseModel):
|
||||
auto_scroll: bool | None
|
||||
|
||||
|
||||
class SlackBotCreationRequest(BaseModel):
|
||||
name: str
|
||||
enabled: bool
|
||||
|
||||
bot_token: str
|
||||
app_token: str
|
||||
|
||||
|
||||
class SlackBotTokens(BaseModel):
|
||||
bot_token: str
|
||||
app_token: str
|
||||
model_config = ConfigDict(frozen=True)
|
||||
|
||||
|
||||
class SlackBotConfigCreationRequest(BaseModel):
|
||||
# currently, a persona is created for each slack bot config
|
||||
class SlackChannelConfigCreationRequest(BaseModel):
|
||||
slack_bot_id: int
|
||||
# currently, a persona is created for each Slack channel config
|
||||
# in the future, `document_sets` will probably be replaced
|
||||
# by an optional `PersonaSnapshot` object. Keeping it like this
|
||||
# for now for simplicity / speed of development
|
||||
document_sets: list[int] | None = None
|
||||
persona_id: (
|
||||
int | None
|
||||
) = None # NOTE: only one of `document_sets` / `persona_id` should be set
|
||||
channel_names: list[str]
|
||||
|
||||
# NOTE: only one of `document_sets` / `persona_id` should be set
|
||||
persona_id: int | None = None
|
||||
|
||||
channel_name: str
|
||||
respond_tag_only: bool = False
|
||||
respond_to_bots: bool = False
|
||||
enable_auto_filters: bool = False
|
||||
@@ -165,14 +182,17 @@ class SlackBotConfigCreationRequest(BaseModel):
|
||||
return value
|
||||
|
||||
@model_validator(mode="after")
|
||||
def validate_document_sets_and_persona_id(self) -> "SlackBotConfigCreationRequest":
|
||||
def validate_document_sets_and_persona_id(
|
||||
self,
|
||||
) -> "SlackChannelConfigCreationRequest":
|
||||
if self.document_sets and self.persona_id:
|
||||
raise ValueError("Only one of `document_sets` / `persona_id` should be set")
|
||||
|
||||
return self
|
||||
|
||||
|
||||
class SlackBotConfig(BaseModel):
|
||||
class SlackChannelConfig(BaseModel):
|
||||
slack_bot_id: int
|
||||
id: int
|
||||
persona: PersonaSnapshot | None
|
||||
channel_config: ChannelConfig
|
||||
@@ -183,25 +203,53 @@ class SlackBotConfig(BaseModel):
|
||||
|
||||
@classmethod
|
||||
def from_model(
|
||||
cls, slack_bot_config_model: SlackBotConfigModel
|
||||
) -> "SlackBotConfig":
|
||||
cls, slack_channel_config_model: SlackChannelConfigModel
|
||||
) -> "SlackChannelConfig":
|
||||
return cls(
|
||||
id=slack_bot_config_model.id,
|
||||
id=slack_channel_config_model.id,
|
||||
slack_bot_id=slack_channel_config_model.slack_bot_id,
|
||||
persona=(
|
||||
PersonaSnapshot.from_model(
|
||||
slack_bot_config_model.persona, allow_deleted=True
|
||||
slack_channel_config_model.persona, allow_deleted=True
|
||||
)
|
||||
if slack_bot_config_model.persona
|
||||
if slack_channel_config_model.persona
|
||||
else None
|
||||
),
|
||||
channel_config=slack_bot_config_model.channel_config,
|
||||
response_type=slack_bot_config_model.response_type,
|
||||
channel_config=slack_channel_config_model.channel_config,
|
||||
response_type=slack_channel_config_model.response_type,
|
||||
# XXX this is going away soon
|
||||
standard_answer_categories=[
|
||||
StandardAnswerCategory.from_model(standard_answer_category_model)
|
||||
for standard_answer_category_model in slack_bot_config_model.standard_answer_categories
|
||||
for standard_answer_category_model in slack_channel_config_model.standard_answer_categories
|
||||
],
|
||||
enable_auto_filters=slack_bot_config_model.enable_auto_filters,
|
||||
enable_auto_filters=slack_channel_config_model.enable_auto_filters,
|
||||
)
|
||||
|
||||
|
||||
class SlackBot(BaseModel):
|
||||
"""
|
||||
This model is identical to the SlackAppModel, but it contains
|
||||
a `configs_count` field to make it easier to fetch the number
|
||||
of SlackChannelConfigs associated with a SlackBot.
|
||||
"""
|
||||
|
||||
id: int
|
||||
name: str
|
||||
enabled: bool
|
||||
configs_count: int
|
||||
|
||||
bot_token: str
|
||||
app_token: str
|
||||
|
||||
@classmethod
|
||||
def from_model(cls, slack_bot_model: SlackAppModel) -> "SlackBot":
|
||||
return cls(
|
||||
id=slack_bot_model.id,
|
||||
name=slack_bot_model.name,
|
||||
enabled=slack_bot_model.enabled,
|
||||
bot_token=slack_bot_model.bot_token,
|
||||
app_token=slack_bot_model.app_token,
|
||||
configs_count=len(slack_bot_model.slack_channel_configs),
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -4,53 +4,57 @@ from fastapi import HTTPException
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from danswer.auth.users import current_admin_user
|
||||
from danswer.danswerbot.slack.config import validate_channel_names
|
||||
from danswer.danswerbot.slack.tokens import fetch_tokens
|
||||
from danswer.danswerbot.slack.tokens import save_tokens
|
||||
from danswer.danswerbot.slack.config import validate_channel_name
|
||||
from danswer.db.constants import SLACK_BOT_PERSONA_PREFIX
|
||||
from danswer.db.engine import get_session
|
||||
from danswer.db.models import ChannelConfig
|
||||
from danswer.db.models import User
|
||||
from danswer.db.persona import get_persona_by_id
|
||||
from danswer.db.slack_bot_config import create_slack_bot_persona
|
||||
from danswer.db.slack_bot_config import fetch_slack_bot_config
|
||||
from danswer.db.slack_bot_config import fetch_slack_bot_configs
|
||||
from danswer.db.slack_bot_config import insert_slack_bot_config
|
||||
from danswer.db.slack_bot_config import remove_slack_bot_config
|
||||
from danswer.db.slack_bot_config import update_slack_bot_config
|
||||
from danswer.key_value_store.interface import KvKeyNotFoundError
|
||||
from danswer.server.manage.models import SlackBotConfig
|
||||
from danswer.server.manage.models import SlackBotConfigCreationRequest
|
||||
from danswer.server.manage.models import SlackBotTokens
|
||||
from danswer.db.slack_bot import fetch_slack_bot
|
||||
from danswer.db.slack_bot import fetch_slack_bots
|
||||
from danswer.db.slack_bot import insert_slack_bot
|
||||
from danswer.db.slack_bot import remove_slack_bot
|
||||
from danswer.db.slack_bot import update_slack_bot
|
||||
from danswer.db.slack_channel_config import create_slack_channel_persona
|
||||
from danswer.db.slack_channel_config import fetch_slack_channel_config
|
||||
from danswer.db.slack_channel_config import fetch_slack_channel_configs
|
||||
from danswer.db.slack_channel_config import insert_slack_channel_config
|
||||
from danswer.db.slack_channel_config import remove_slack_channel_config
|
||||
from danswer.db.slack_channel_config import update_slack_channel_config
|
||||
from danswer.server.manage.models import SlackBot
|
||||
from danswer.server.manage.models import SlackBotCreationRequest
|
||||
from danswer.server.manage.models import SlackChannelConfig
|
||||
from danswer.server.manage.models import SlackChannelConfigCreationRequest
|
||||
|
||||
|
||||
router = APIRouter(prefix="/manage")
|
||||
|
||||
|
||||
def _form_channel_config(
|
||||
slack_bot_config_creation_request: SlackBotConfigCreationRequest,
|
||||
current_slack_bot_config_id: int | None,
|
||||
db_session: Session,
|
||||
slack_channel_config_creation_request: SlackChannelConfigCreationRequest,
|
||||
current_slack_channel_config_id: int | None,
|
||||
) -> ChannelConfig:
|
||||
raw_channel_names = slack_bot_config_creation_request.channel_names
|
||||
respond_tag_only = slack_bot_config_creation_request.respond_tag_only
|
||||
raw_channel_name = slack_channel_config_creation_request.channel_name
|
||||
respond_tag_only = slack_channel_config_creation_request.respond_tag_only
|
||||
respond_member_group_list = (
|
||||
slack_bot_config_creation_request.respond_member_group_list
|
||||
slack_channel_config_creation_request.respond_member_group_list
|
||||
)
|
||||
answer_filters = slack_bot_config_creation_request.answer_filters
|
||||
follow_up_tags = slack_bot_config_creation_request.follow_up_tags
|
||||
answer_filters = slack_channel_config_creation_request.answer_filters
|
||||
follow_up_tags = slack_channel_config_creation_request.follow_up_tags
|
||||
|
||||
if not raw_channel_names:
|
||||
if not raw_channel_name:
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail="Must provide at least one channel name",
|
||||
)
|
||||
|
||||
try:
|
||||
cleaned_channel_names = validate_channel_names(
|
||||
channel_names=raw_channel_names,
|
||||
current_slack_bot_config_id=current_slack_bot_config_id,
|
||||
cleaned_channel_name = validate_channel_name(
|
||||
db_session=db_session,
|
||||
channel_name=raw_channel_name,
|
||||
current_slack_channel_config_id=current_slack_channel_config_id,
|
||||
current_slack_bot_id=slack_channel_config_creation_request.slack_bot_id,
|
||||
)
|
||||
except ValueError as e:
|
||||
raise HTTPException(
|
||||
@@ -65,7 +69,7 @@ def _form_channel_config(
|
||||
)
|
||||
|
||||
channel_config: ChannelConfig = {
|
||||
"channel_names": cleaned_channel_names,
|
||||
"channel_name": cleaned_channel_name,
|
||||
}
|
||||
if respond_tag_only is not None:
|
||||
channel_config["respond_tag_only"] = respond_tag_only
|
||||
@@ -78,69 +82,73 @@ def _form_channel_config(
|
||||
|
||||
channel_config[
|
||||
"respond_to_bots"
|
||||
] = slack_bot_config_creation_request.respond_to_bots
|
||||
] = slack_channel_config_creation_request.respond_to_bots
|
||||
|
||||
return channel_config
|
||||
|
||||
|
||||
@router.post("/admin/slack-bot/config")
|
||||
def create_slack_bot_config(
|
||||
slack_bot_config_creation_request: SlackBotConfigCreationRequest,
|
||||
@router.post("/admin/slack-app/channel")
|
||||
def create_slack_channel_config(
|
||||
slack_channel_config_creation_request: SlackChannelConfigCreationRequest,
|
||||
db_session: Session = Depends(get_session),
|
||||
_: User | None = Depends(current_admin_user),
|
||||
) -> SlackBotConfig:
|
||||
) -> SlackChannelConfig:
|
||||
channel_config = _form_channel_config(
|
||||
slack_bot_config_creation_request, None, db_session
|
||||
db_session=db_session,
|
||||
slack_channel_config_creation_request=slack_channel_config_creation_request,
|
||||
current_slack_channel_config_id=None,
|
||||
)
|
||||
|
||||
persona_id = None
|
||||
if slack_bot_config_creation_request.persona_id is not None:
|
||||
persona_id = slack_bot_config_creation_request.persona_id
|
||||
elif slack_bot_config_creation_request.document_sets:
|
||||
persona_id = create_slack_bot_persona(
|
||||
if slack_channel_config_creation_request.persona_id is not None:
|
||||
persona_id = slack_channel_config_creation_request.persona_id
|
||||
elif slack_channel_config_creation_request.document_sets:
|
||||
persona_id = create_slack_channel_persona(
|
||||
db_session=db_session,
|
||||
channel_names=channel_config["channel_names"],
|
||||
document_set_ids=slack_bot_config_creation_request.document_sets,
|
||||
channel_name=channel_config["channel_name"],
|
||||
document_set_ids=slack_channel_config_creation_request.document_sets,
|
||||
existing_persona_id=None,
|
||||
).id
|
||||
|
||||
slack_bot_config_model = insert_slack_bot_config(
|
||||
slack_channel_config_model = insert_slack_channel_config(
|
||||
slack_bot_id=slack_channel_config_creation_request.slack_bot_id,
|
||||
persona_id=persona_id,
|
||||
channel_config=channel_config,
|
||||
response_type=slack_bot_config_creation_request.response_type,
|
||||
# XXX this is going away soon
|
||||
standard_answer_category_ids=slack_bot_config_creation_request.standard_answer_categories,
|
||||
response_type=slack_channel_config_creation_request.response_type,
|
||||
standard_answer_category_ids=slack_channel_config_creation_request.standard_answer_categories,
|
||||
db_session=db_session,
|
||||
enable_auto_filters=slack_bot_config_creation_request.enable_auto_filters,
|
||||
enable_auto_filters=slack_channel_config_creation_request.enable_auto_filters,
|
||||
)
|
||||
return SlackBotConfig.from_model(slack_bot_config_model)
|
||||
return SlackChannelConfig.from_model(slack_channel_config_model)
|
||||
|
||||
|
||||
@router.patch("/admin/slack-bot/config/{slack_bot_config_id}")
|
||||
def patch_slack_bot_config(
|
||||
slack_bot_config_id: int,
|
||||
slack_bot_config_creation_request: SlackBotConfigCreationRequest,
|
||||
@router.patch("/admin/slack-app/channel/{slack_channel_config_id}")
|
||||
def patch_slack_channel_config(
|
||||
slack_channel_config_id: int,
|
||||
slack_channel_config_creation_request: SlackChannelConfigCreationRequest,
|
||||
db_session: Session = Depends(get_session),
|
||||
_: User | None = Depends(current_admin_user),
|
||||
) -> SlackBotConfig:
|
||||
) -> SlackChannelConfig:
|
||||
channel_config = _form_channel_config(
|
||||
slack_bot_config_creation_request, slack_bot_config_id, db_session
|
||||
db_session=db_session,
|
||||
slack_channel_config_creation_request=slack_channel_config_creation_request,
|
||||
current_slack_channel_config_id=slack_channel_config_id,
|
||||
)
|
||||
|
||||
persona_id = None
|
||||
if slack_bot_config_creation_request.persona_id is not None:
|
||||
persona_id = slack_bot_config_creation_request.persona_id
|
||||
elif slack_bot_config_creation_request.document_sets:
|
||||
existing_slack_bot_config = fetch_slack_bot_config(
|
||||
db_session=db_session, slack_bot_config_id=slack_bot_config_id
|
||||
if slack_channel_config_creation_request.persona_id is not None:
|
||||
persona_id = slack_channel_config_creation_request.persona_id
|
||||
elif slack_channel_config_creation_request.document_sets:
|
||||
existing_slack_channel_config = fetch_slack_channel_config(
|
||||
db_session=db_session, slack_channel_config_id=slack_channel_config_id
|
||||
)
|
||||
if existing_slack_bot_config is None:
|
||||
if existing_slack_channel_config is None:
|
||||
raise HTTPException(
|
||||
status_code=404,
|
||||
detail="Slack bot config not found",
|
||||
detail="Slack channel config not found",
|
||||
)
|
||||
|
||||
existing_persona_id = existing_slack_bot_config.persona_id
|
||||
existing_persona_id = existing_slack_channel_config.persona_id
|
||||
if existing_persona_id is not None:
|
||||
persona = get_persona_by_id(
|
||||
persona_id=existing_persona_id,
|
||||
@@ -155,62 +163,133 @@ def patch_slack_bot_config(
|
||||
# for this DanswerBot config
|
||||
existing_persona_id = None
|
||||
else:
|
||||
existing_persona_id = existing_slack_bot_config.persona_id
|
||||
existing_persona_id = existing_slack_channel_config.persona_id
|
||||
|
||||
persona_id = create_slack_bot_persona(
|
||||
persona_id = create_slack_channel_persona(
|
||||
db_session=db_session,
|
||||
channel_names=channel_config["channel_names"],
|
||||
document_set_ids=slack_bot_config_creation_request.document_sets,
|
||||
channel_name=channel_config["channel_name"],
|
||||
document_set_ids=slack_channel_config_creation_request.document_sets,
|
||||
existing_persona_id=existing_persona_id,
|
||||
enable_auto_filters=slack_bot_config_creation_request.enable_auto_filters,
|
||||
enable_auto_filters=slack_channel_config_creation_request.enable_auto_filters,
|
||||
).id
|
||||
|
||||
slack_bot_config_model = update_slack_bot_config(
|
||||
slack_bot_config_id=slack_bot_config_id,
|
||||
slack_channel_config_model = update_slack_channel_config(
|
||||
db_session=db_session,
|
||||
slack_channel_config_id=slack_channel_config_id,
|
||||
persona_id=persona_id,
|
||||
channel_config=channel_config,
|
||||
response_type=slack_bot_config_creation_request.response_type,
|
||||
standard_answer_category_ids=slack_bot_config_creation_request.standard_answer_categories,
|
||||
db_session=db_session,
|
||||
enable_auto_filters=slack_bot_config_creation_request.enable_auto_filters,
|
||||
response_type=slack_channel_config_creation_request.response_type,
|
||||
standard_answer_category_ids=slack_channel_config_creation_request.standard_answer_categories,
|
||||
enable_auto_filters=slack_channel_config_creation_request.enable_auto_filters,
|
||||
)
|
||||
return SlackBotConfig.from_model(slack_bot_config_model)
|
||||
return SlackChannelConfig.from_model(slack_channel_config_model)
|
||||
|
||||
|
||||
@router.delete("/admin/slack-bot/config/{slack_bot_config_id}")
|
||||
def delete_slack_bot_config(
|
||||
slack_bot_config_id: int,
|
||||
@router.delete("/admin/slack-app/channel/{slack_channel_config_id}")
|
||||
def delete_slack_channel_config(
|
||||
slack_channel_config_id: int,
|
||||
db_session: Session = Depends(get_session),
|
||||
user: User | None = Depends(current_admin_user),
|
||||
) -> None:
|
||||
remove_slack_bot_config(
|
||||
slack_bot_config_id=slack_bot_config_id, user=user, db_session=db_session
|
||||
remove_slack_channel_config(
|
||||
db_session=db_session,
|
||||
slack_channel_config_id=slack_channel_config_id,
|
||||
user=user,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/admin/slack-bot/config")
|
||||
def list_slack_bot_configs(
|
||||
@router.get("/admin/slack-app/channel")
|
||||
def list_slack_channel_configs(
|
||||
db_session: Session = Depends(get_session),
|
||||
_: User | None = Depends(current_admin_user),
|
||||
) -> list[SlackBotConfig]:
|
||||
slack_bot_config_models = fetch_slack_bot_configs(db_session=db_session)
|
||||
) -> list[SlackChannelConfig]:
|
||||
slack_channel_config_models = fetch_slack_channel_configs(db_session=db_session)
|
||||
return [
|
||||
SlackBotConfig.from_model(slack_bot_config_model)
|
||||
for slack_bot_config_model in slack_bot_config_models
|
||||
SlackChannelConfig.from_model(slack_channel_config_model)
|
||||
for slack_channel_config_model in slack_channel_config_models
|
||||
]
|
||||
|
||||
|
||||
@router.put("/admin/slack-bot/tokens")
|
||||
def put_tokens(
|
||||
tokens: SlackBotTokens,
|
||||
@router.post("/admin/slack-app/bots")
|
||||
def create_bot(
|
||||
slack_bot_creation_request: SlackBotCreationRequest,
|
||||
db_session: Session = Depends(get_session),
|
||||
_: User | None = Depends(current_admin_user),
|
||||
) -> SlackBot:
|
||||
slack_bot_model = insert_slack_bot(
|
||||
db_session=db_session,
|
||||
name=slack_bot_creation_request.name,
|
||||
enabled=slack_bot_creation_request.enabled,
|
||||
bot_token=slack_bot_creation_request.bot_token,
|
||||
app_token=slack_bot_creation_request.app_token,
|
||||
)
|
||||
return SlackBot.from_model(slack_bot_model)
|
||||
|
||||
|
||||
@router.patch("/admin/slack-app/bots/{slack_bot_id}")
|
||||
def patch_bot(
|
||||
slack_bot_id: int,
|
||||
slack_bot_creation_request: SlackBotCreationRequest,
|
||||
db_session: Session = Depends(get_session),
|
||||
_: User | None = Depends(current_admin_user),
|
||||
) -> SlackBot:
|
||||
slack_bot_model = update_slack_bot(
|
||||
db_session=db_session,
|
||||
slack_bot_id=slack_bot_id,
|
||||
name=slack_bot_creation_request.name,
|
||||
enabled=slack_bot_creation_request.enabled,
|
||||
bot_token=slack_bot_creation_request.bot_token,
|
||||
app_token=slack_bot_creation_request.app_token,
|
||||
)
|
||||
return SlackBot.from_model(slack_bot_model)
|
||||
|
||||
|
||||
@router.delete("/admin/slack-app/bots/{slack_bot_id}")
|
||||
def delete_bot(
|
||||
slack_bot_id: int,
|
||||
db_session: Session = Depends(get_session),
|
||||
_: User | None = Depends(current_admin_user),
|
||||
) -> None:
|
||||
save_tokens(tokens=tokens)
|
||||
remove_slack_bot(
|
||||
db_session=db_session,
|
||||
slack_bot_id=slack_bot_id,
|
||||
)
|
||||
|
||||
|
||||
@router.get("/admin/slack-bot/tokens")
|
||||
def get_tokens(_: User | None = Depends(current_admin_user)) -> SlackBotTokens:
|
||||
try:
|
||||
return fetch_tokens()
|
||||
except KvKeyNotFoundError:
|
||||
raise HTTPException(status_code=404, detail="No tokens found")
|
||||
@router.get("/admin/slack-app/bots/{slack_bot_id}")
|
||||
def get_bot_by_id(
|
||||
slack_bot_id: int,
|
||||
db_session: Session = Depends(get_session),
|
||||
_: User | None = Depends(current_admin_user),
|
||||
) -> SlackBot:
|
||||
slack_bot_model = fetch_slack_bot(
|
||||
db_session=db_session,
|
||||
slack_bot_id=slack_bot_id,
|
||||
)
|
||||
return SlackBot.from_model(slack_bot_model)
|
||||
|
||||
|
||||
@router.get("/admin/slack-app/bots")
|
||||
def list_bots(
|
||||
db_session: Session = Depends(get_session),
|
||||
_: User | None = Depends(current_admin_user),
|
||||
) -> list[SlackBot]:
|
||||
slack_bot_models = fetch_slack_bots(db_session=db_session)
|
||||
return [
|
||||
SlackBot.from_model(slack_bot_model) for slack_bot_model in slack_bot_models
|
||||
]
|
||||
|
||||
|
||||
@router.get("/admin/slack-app/bots/{bot_id}/config")
|
||||
def list_bot_configs(
|
||||
bot_id: int,
|
||||
db_session: Session = Depends(get_session),
|
||||
_: User | None = Depends(current_admin_user),
|
||||
) -> list[SlackChannelConfig]:
|
||||
slack_bot_config_models = fetch_slack_channel_configs(
|
||||
db_session=db_session, slack_bot_id=bot_id
|
||||
)
|
||||
return [
|
||||
SlackChannelConfig.from_model(slack_bot_config_model)
|
||||
for slack_bot_config_model in slack_bot_config_models
|
||||
]
|
||||
|
||||
@@ -52,6 +52,7 @@ from danswer.db.users import list_users
|
||||
from danswer.db.users import validate_user_role_update
|
||||
from danswer.key_value_store.factory import get_kv_store
|
||||
from danswer.server.manage.models import AllUsersResponse
|
||||
from danswer.server.manage.models import AutoScrollRequest
|
||||
from danswer.server.manage.models import UserByEmail
|
||||
from danswer.server.manage.models import UserInfo
|
||||
from danswer.server.manage.models import UserPreferences
|
||||
@@ -493,11 +494,14 @@ def verify_user_logged_in(
|
||||
# if auth type is disabled, return a dummy user with preferences from
|
||||
# the key-value store
|
||||
if AUTH_TYPE == AuthType.DISABLED:
|
||||
print("FETCHING NO AUTH USER")
|
||||
store = get_kv_store()
|
||||
return fetch_no_auth_user(store)
|
||||
user = fetch_no_auth_user(store)
|
||||
print("ll ", user)
|
||||
return user
|
||||
|
||||
raise BasicAuthenticationError(detail="User Not Authenticated")
|
||||
|
||||
print("not disabled", user)
|
||||
if user.oidc_expiry and user.oidc_expiry < datetime.now(timezone.utc):
|
||||
raise BasicAuthenticationError(
|
||||
detail="Access denied. User's OIDC token has expired.",
|
||||
@@ -581,6 +585,30 @@ def update_user_recent_assistants(
|
||||
db_session.commit()
|
||||
|
||||
|
||||
@router.patch("/auto-scroll")
|
||||
def update_user_auto_scroll(
|
||||
request: AutoScrollRequest,
|
||||
user: User | None = Depends(current_user),
|
||||
db_session: Session = Depends(get_session),
|
||||
) -> None:
|
||||
if user is None:
|
||||
if AUTH_TYPE == AuthType.DISABLED:
|
||||
store = get_kv_store()
|
||||
no_auth_user = fetch_no_auth_user(store)
|
||||
no_auth_user.preferences.auto_scroll = request.auto_scroll
|
||||
set_no_auth_user_preferences(store, no_auth_user.preferences)
|
||||
return
|
||||
else:
|
||||
raise RuntimeError("This should never happen")
|
||||
|
||||
db_session.execute(
|
||||
update(User)
|
||||
.where(User.id == user.id) # type: ignore
|
||||
.values(auto_scroll=request.auto_scroll)
|
||||
)
|
||||
db_session.commit()
|
||||
|
||||
|
||||
@router.patch("/user/default-model")
|
||||
def update_user_default_model(
|
||||
request: ChosenDefaultModelRequest,
|
||||
|
||||
@@ -83,6 +83,7 @@ class CreateChatMessageRequest(ChunkContext):
|
||||
message: str
|
||||
# Files that we should attach to this message
|
||||
file_descriptors: list[FileDescriptor]
|
||||
|
||||
# If no prompt provided, uses the largest prompt of the chat session
|
||||
# but really this should be explicitly specified, only in the simplified APIs is this inferred
|
||||
# Use prompt_id 0 to use the system default prompt which is Answer-Question
|
||||
|
||||
@@ -41,33 +41,10 @@ class Notification(BaseModel):
|
||||
class Settings(BaseModel):
|
||||
"""General settings"""
|
||||
|
||||
chat_page_enabled: bool = True
|
||||
search_page_enabled: bool = True
|
||||
default_page: PageType = PageType.SEARCH
|
||||
maximum_chat_retention_days: int | None = None
|
||||
gpu_enabled: bool | None = None
|
||||
product_gating: GatingType = GatingType.NONE
|
||||
|
||||
def check_validity(self) -> None:
|
||||
chat_page_enabled = self.chat_page_enabled
|
||||
search_page_enabled = self.search_page_enabled
|
||||
default_page = self.default_page
|
||||
|
||||
if chat_page_enabled is False and search_page_enabled is False:
|
||||
raise ValueError(
|
||||
"One of `search_page_enabled` and `chat_page_enabled` must be True."
|
||||
)
|
||||
|
||||
if default_page == PageType.CHAT and chat_page_enabled is False:
|
||||
raise ValueError(
|
||||
"The default page cannot be 'chat' if the chat page is disabled."
|
||||
)
|
||||
|
||||
if default_page == PageType.SEARCH and search_page_enabled is False:
|
||||
raise ValueError(
|
||||
"The default page cannot be 'search' if the search page is disabled."
|
||||
)
|
||||
|
||||
|
||||
class UserSettings(Settings):
|
||||
notifications: list[Notification]
|
||||
|
||||
@@ -4,6 +4,10 @@ import re
|
||||
import string
|
||||
from urllib.parse import quote
|
||||
|
||||
from danswer.utils.logger import setup_logger
|
||||
|
||||
|
||||
logger = setup_logger(__name__)
|
||||
|
||||
ESCAPE_SEQUENCE_RE = re.compile(
|
||||
r"""
|
||||
@@ -77,7 +81,8 @@ def extract_embedded_json(s: str) -> dict:
|
||||
last_brace_index = s.rfind("}")
|
||||
|
||||
if first_brace_index == -1 or last_brace_index == -1:
|
||||
raise ValueError("No valid json found")
|
||||
logger.warning("No valid json found, assuming answer is entire string")
|
||||
return {"answer": s, "quotes": []}
|
||||
|
||||
json_str = s[first_brace_index : last_brace_index + 1]
|
||||
try:
|
||||
|
||||
@@ -19,7 +19,7 @@ from danswer.db.chat import get_chat_messages_by_sessions
|
||||
from danswer.db.chat import get_chat_sessions_by_slack_thread_id
|
||||
from danswer.db.chat import get_or_create_root_message
|
||||
from danswer.db.models import Prompt
|
||||
from danswer.db.models import SlackBotConfig
|
||||
from danswer.db.models import SlackChannelConfig
|
||||
from danswer.db.models import StandardAnswer as StandardAnswerModel
|
||||
from danswer.utils.logger import DanswerLoggingAdapter
|
||||
from danswer.utils.logger import setup_logger
|
||||
@@ -80,7 +80,7 @@ def oneoff_standard_answers(
|
||||
def _handle_standard_answers(
|
||||
message_info: SlackMessageInfo,
|
||||
receiver_ids: list[str] | None,
|
||||
slack_bot_config: SlackBotConfig | None,
|
||||
slack_channel_config: SlackChannelConfig | None,
|
||||
prompt: Prompt | None,
|
||||
logger: DanswerLoggingAdapter,
|
||||
client: WebClient,
|
||||
@@ -95,12 +95,12 @@ def _handle_standard_answers(
|
||||
we still need to respond to the users.
|
||||
"""
|
||||
# if no channel config, then no standard answers are configured
|
||||
if not slack_bot_config:
|
||||
if not slack_channel_config:
|
||||
return False
|
||||
|
||||
slack_thread_id = message_info.thread_to_respond
|
||||
configured_standard_answer_categories = (
|
||||
slack_bot_config.standard_answer_categories if slack_bot_config else []
|
||||
slack_channel_config.standard_answer_categories if slack_channel_config else []
|
||||
)
|
||||
configured_standard_answers = set(
|
||||
[
|
||||
@@ -150,7 +150,9 @@ def _handle_standard_answers(
|
||||
db_session=db_session,
|
||||
description="",
|
||||
user_id=None,
|
||||
persona_id=slack_bot_config.persona.id if slack_bot_config.persona else 0,
|
||||
persona_id=slack_channel_config.persona.id
|
||||
if slack_channel_config.persona
|
||||
else 0,
|
||||
danswerbot_flow=True,
|
||||
slack_thread_id=slack_thread_id,
|
||||
one_shot=True,
|
||||
|
||||
@@ -411,6 +411,8 @@ def _validate_curator_status__no_commit(
|
||||
.all()
|
||||
)
|
||||
|
||||
# if the user is a curator in any of their groups, set their role to CURATOR
|
||||
# otherwise, set their role to BASIC
|
||||
if curator_relationships:
|
||||
user.role = UserRole.CURATOR
|
||||
elif user.role == UserRole.CURATOR:
|
||||
@@ -436,6 +438,15 @@ def update_user_curator_relationship(
|
||||
user = fetch_user_by_id(db_session, set_curator_request.user_id)
|
||||
if not user:
|
||||
raise ValueError(f"User with id '{set_curator_request.user_id}' not found")
|
||||
|
||||
if user.role == UserRole.ADMIN:
|
||||
raise ValueError(
|
||||
f"User '{user.email}' is an admin and therefore has all permissions "
|
||||
"of a curator. If you'd like this user to only have curator permissions, "
|
||||
"you must update their role to BASIC then assign them to be CURATOR in the "
|
||||
"appropriate groups."
|
||||
)
|
||||
|
||||
requested_user_groups = fetch_user_groups_for_user(
|
||||
db_session=db_session,
|
||||
user_id=set_curator_request.user_id,
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
from typing import Any
|
||||
|
||||
from danswer.connectors.confluence.onyx_confluence import build_confluence_client
|
||||
from danswer.connectors.confluence.onyx_confluence import OnyxConfluence
|
||||
from danswer.connectors.confluence.utils import build_confluence_client
|
||||
from danswer.connectors.confluence.utils import get_user_email_from_username__server
|
||||
from danswer.db.models import ConnectorCredentialPair
|
||||
from danswer.utils.logger import setup_logger
|
||||
@@ -11,26 +9,30 @@ from ee.danswer.db.external_perm import ExternalUserGroup
|
||||
logger = setup_logger()
|
||||
|
||||
|
||||
def _get_group_members_email_paginated(
|
||||
def _build_group_member_email_map(
|
||||
confluence_client: OnyxConfluence,
|
||||
group_name: str,
|
||||
) -> set[str]:
|
||||
members: list[dict[str, Any]] = []
|
||||
for member_batch in confluence_client.paginated_group_members_retrieval(group_name):
|
||||
members.extend(member_batch)
|
||||
|
||||
group_member_emails: set[str] = set()
|
||||
for member in members:
|
||||
email = member.get("email")
|
||||
) -> dict[str, set[str]]:
|
||||
group_member_emails: dict[str, set[str]] = {}
|
||||
for user_result in confluence_client.paginated_cql_user_retrieval():
|
||||
user = user_result["user"]
|
||||
email = user.get("email")
|
||||
if not email:
|
||||
user_name = member.get("username")
|
||||
# This field is only present in Confluence Server
|
||||
user_name = user.get("username")
|
||||
# If it is present, try to get the email using a Server-specific method
|
||||
if user_name:
|
||||
email = get_user_email_from_username__server(
|
||||
confluence_client=confluence_client,
|
||||
user_name=user_name,
|
||||
)
|
||||
if email:
|
||||
group_member_emails.add(email)
|
||||
if not email:
|
||||
# If we still don't have an email, skip this user
|
||||
continue
|
||||
|
||||
for group in confluence_client.paginated_groups_by_user_retrieval(user):
|
||||
# group name uniqueness is enforced by Confluence, so we can use it as a group ID
|
||||
group_id = group["name"]
|
||||
group_member_emails.setdefault(group_id, set()).add(email)
|
||||
|
||||
return group_member_emails
|
||||
|
||||
@@ -38,31 +40,20 @@ def _get_group_members_email_paginated(
|
||||
def confluence_group_sync(
|
||||
cc_pair: ConnectorCredentialPair,
|
||||
) -> list[ExternalUserGroup]:
|
||||
is_cloud = cc_pair.connector.connector_specific_config.get("is_cloud", False)
|
||||
confluence_client = build_confluence_client(
|
||||
credentials_json=cc_pair.credential.credential_json,
|
||||
is_cloud=is_cloud,
|
||||
credentials=cc_pair.credential.credential_json,
|
||||
is_cloud=cc_pair.connector.connector_specific_config.get("is_cloud", False),
|
||||
wiki_base=cc_pair.connector.connector_specific_config["wiki_base"],
|
||||
)
|
||||
|
||||
# Get all group names
|
||||
group_names: list[str] = []
|
||||
for group_batch in confluence_client.paginated_groups_retrieval():
|
||||
for group in group_batch:
|
||||
if group_name := group.get("name"):
|
||||
group_names.append(group_name)
|
||||
|
||||
# For each group name, get all members and create a danswer group
|
||||
group_member_email_map = _build_group_member_email_map(
|
||||
confluence_client=confluence_client,
|
||||
)
|
||||
danswer_groups: list[ExternalUserGroup] = []
|
||||
for group_name in group_names:
|
||||
group_member_emails = _get_group_members_email_paginated(
|
||||
confluence_client, group_name
|
||||
)
|
||||
if not group_member_emails:
|
||||
continue
|
||||
for group_id, group_member_emails in group_member_email_map.items():
|
||||
danswer_groups.append(
|
||||
ExternalUserGroup(
|
||||
id=group_name,
|
||||
id=group_id,
|
||||
user_emails=list(group_member_emails),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -55,7 +55,12 @@ DOC_PERMISSION_SYNC_PERIODS: dict[DocumentSource, int] = {
|
||||
DocumentSource.SLACK: 5 * 60,
|
||||
}
|
||||
|
||||
EXTERNAL_GROUP_SYNC_PERIOD: int = 30 # 30 seconds
|
||||
# If nothing is specified here, we run the doc_sync every time the celery beat runs
|
||||
EXTERNAL_GROUP_SYNC_PERIODS: dict[DocumentSource, int] = {
|
||||
# Polling is not supported so we fetch all group permissions every 60 seconds
|
||||
DocumentSource.GOOGLE_DRIVE: 60,
|
||||
DocumentSource.CONFLUENCE: 60,
|
||||
}
|
||||
|
||||
|
||||
def check_if_valid_sync_source(source_type: DocumentSource) -> bool:
|
||||
|
||||
@@ -92,10 +92,12 @@ class ChatSessionMinimal(BaseModel):
|
||||
name: str | None
|
||||
first_user_message: str
|
||||
first_ai_message: str
|
||||
persona_name: str | None
|
||||
assistant_id: int | None
|
||||
assistant_name: str | None
|
||||
time_created: datetime
|
||||
feedback_type: QAFeedbackType | Literal["mixed"] | None
|
||||
flow_type: SessionType
|
||||
conversation_length: int
|
||||
|
||||
|
||||
class ChatSessionSnapshot(BaseModel):
|
||||
@@ -103,7 +105,8 @@ class ChatSessionSnapshot(BaseModel):
|
||||
user_email: str
|
||||
name: str | None
|
||||
messages: list[MessageSnapshot]
|
||||
persona_name: str | None
|
||||
assistant_id: int | None
|
||||
assistant_name: str | None
|
||||
time_created: datetime
|
||||
flow_type: SessionType
|
||||
|
||||
@@ -146,7 +149,7 @@ class QuestionAnswerPairSnapshot(BaseModel):
|
||||
retrieved_documents=ai_message.documents,
|
||||
feedback_type=ai_message.feedback_type,
|
||||
feedback_text=ai_message.feedback_text,
|
||||
persona_name=chat_session_snapshot.persona_name,
|
||||
persona_name=chat_session_snapshot.assistant_name,
|
||||
user_email=get_display_email(chat_session_snapshot.user_email),
|
||||
time_created=user_message.time_created,
|
||||
flow_type=chat_session_snapshot.flow_type,
|
||||
@@ -257,12 +260,20 @@ def fetch_and_process_chat_session_history_minimal(
|
||||
name=chat_session.description,
|
||||
first_user_message=first_user_message,
|
||||
first_ai_message=first_ai_message,
|
||||
persona_name=chat_session.persona.name
|
||||
if chat_session.persona
|
||||
else None,
|
||||
assistant_id=chat_session.persona_id,
|
||||
assistant_name=(
|
||||
chat_session.persona.name if chat_session.persona else None
|
||||
),
|
||||
time_created=chat_session.time_created,
|
||||
feedback_type=feedback_type,
|
||||
flow_type=flow_type,
|
||||
conversation_length=len(
|
||||
[
|
||||
m
|
||||
for m in chat_session.messages
|
||||
if m.message_type != MessageType.SYSTEM
|
||||
]
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
@@ -327,7 +338,8 @@ def snapshot_from_chat_session(
|
||||
for message in messages
|
||||
if message.message_type != MessageType.SYSTEM
|
||||
],
|
||||
persona_name=chat_session.persona.name if chat_session.persona else None,
|
||||
assistant_id=chat_session.persona_id,
|
||||
assistant_name=chat_session.persona.name if chat_session.persona else None,
|
||||
time_created=chat_session.time_created,
|
||||
flow_type=flow_type,
|
||||
)
|
||||
|
||||
26
backend/script.sh
Executable file
26
backend/script.sh
Executable file
@@ -0,0 +1,26 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Number of recent branches to show
|
||||
num_branches=5
|
||||
|
||||
# Get recent branches
|
||||
recent_branches=$(git for-each-ref --sort=-committerdate --format='%(refname:short)' --count=$num_branches refs/heads/)
|
||||
|
||||
# Loop through recent branches
|
||||
for branch in $recent_branches; do
|
||||
echo "Branch: $branch"
|
||||
echo "Last commit: $(git log -1 --pretty=format:"%cr" $branch)"
|
||||
|
||||
# Get the number of commits ahead/behind master
|
||||
ahead_behind=$(git rev-list --left-right --count master...$branch)
|
||||
ahead=$(echo $ahead_behind | cut -f2 -d$'\t')
|
||||
behind=$(echo $ahead_behind | cut -f1 -d$'\t')
|
||||
|
||||
echo "Commits ahead of master: $ahead"
|
||||
echo "Commits behind master: $behind"
|
||||
|
||||
# Get the number of lines changed compared to master
|
||||
lines_changed=$(git diff --shortstat master...$branch)
|
||||
echo "Changes compared to master: $lines_changed"
|
||||
echo "----------------------------------------"
|
||||
done
|
||||
25
backend/test.sh
Executable file
25
backend/test.sh
Executable file
@@ -0,0 +1,25 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Output CSV file
|
||||
output_file="branch_commits.csv"
|
||||
|
||||
# Write CSV header
|
||||
echo "Branch,Commit Hash,Author,Date,Subject" > "$output_file"
|
||||
|
||||
# Get all local branches except main
|
||||
branches=$(git for-each-ref --format='%(refname:short)' refs/heads/ | grep -v '^main$')
|
||||
|
||||
# Loop through each branch
|
||||
for branch in $branches; do
|
||||
# Get commits for the branch
|
||||
commits=$(git log main..$branch --pretty=format:"%H,%an,%ad,%s" --date=short)
|
||||
|
||||
# If there are commits, add them to the CSV
|
||||
if [ ! -z "$commits" ]; then
|
||||
while IFS= read -r commit; do
|
||||
echo "$branch,$commit" >> "$output_file"
|
||||
done <<< "$commits"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "CSV file created: $output_file"
|
||||
@@ -0,0 +1,84 @@
|
||||
import os
|
||||
from datetime import datetime
|
||||
from datetime import timezone
|
||||
|
||||
from danswer.server.documents.models import DocumentSource
|
||||
from tests.integration.common_utils.managers.cc_pair import CCPairManager
|
||||
from tests.integration.common_utils.managers.user import UserManager
|
||||
from tests.integration.common_utils.test_models import DATestUser
|
||||
|
||||
|
||||
def test_connector_creation(reset: None) -> None:
|
||||
# Creating an admin user (first user created is automatically an admin)
|
||||
admin_user: DATestUser = UserManager.create(name="admin_user")
|
||||
|
||||
# create connectors
|
||||
cc_pair_1 = CCPairManager.create_from_scratch(
|
||||
source=DocumentSource.INGESTION_API,
|
||||
user_performing_action=admin_user,
|
||||
)
|
||||
|
||||
cc_pair_info = CCPairManager.get_single(
|
||||
cc_pair_1.id, user_performing_action=admin_user
|
||||
)
|
||||
assert cc_pair_info
|
||||
assert cc_pair_info.creator
|
||||
assert str(cc_pair_info.creator) == admin_user.id
|
||||
assert cc_pair_info.creator_email == admin_user.email
|
||||
|
||||
|
||||
def test_overlapping_connector_creation(reset: None) -> None:
|
||||
"""Tests that connectors indexing the same documents don't interfere with each other.
|
||||
A previous bug involved document by cc pair entries not being added for new connectors
|
||||
when the docs existed already via another connector and were up to date relative to the source.
|
||||
"""
|
||||
admin_user: DATestUser = UserManager.create(name="admin_user")
|
||||
|
||||
config = {
|
||||
"wiki_base": os.environ["CONFLUENCE_TEST_SPACE_URL"],
|
||||
"space": "DailyConne",
|
||||
"is_cloud": True,
|
||||
"page_id": "",
|
||||
}
|
||||
|
||||
credential = {
|
||||
"confluence_username": os.environ["CONFLUENCE_USER_NAME"],
|
||||
"confluence_access_token": os.environ["CONFLUENCE_ACCESS_TOKEN"],
|
||||
}
|
||||
|
||||
# store the time before we create the connector so that we know after
|
||||
# when the indexing should have started
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# create connector
|
||||
cc_pair_1 = CCPairManager.create_from_scratch(
|
||||
source=DocumentSource.CONFLUENCE,
|
||||
connector_specific_config=config,
|
||||
credential_json=credential,
|
||||
user_performing_action=admin_user,
|
||||
)
|
||||
|
||||
CCPairManager.wait_for_indexing(
|
||||
cc_pair_1, now, timeout=120, user_performing_action=admin_user
|
||||
)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
cc_pair_2 = CCPairManager.create_from_scratch(
|
||||
source=DocumentSource.CONFLUENCE,
|
||||
connector_specific_config=config,
|
||||
credential_json=credential,
|
||||
user_performing_action=admin_user,
|
||||
)
|
||||
|
||||
CCPairManager.wait_for_indexing(
|
||||
cc_pair_2, now, timeout=120, user_performing_action=admin_user
|
||||
)
|
||||
|
||||
info_1 = CCPairManager.get_single(cc_pair_1.id, user_performing_action=admin_user)
|
||||
assert info_1
|
||||
|
||||
info_2 = CCPairManager.get_single(cc_pair_2.id, user_performing_action=admin_user)
|
||||
assert info_2
|
||||
|
||||
assert info_1.num_docs_indexed == info_2.num_docs_indexed
|
||||
@@ -29,78 +29,6 @@ from tests.integration.common_utils.test_models import DATestUserGroup
|
||||
from tests.integration.common_utils.vespa import vespa_fixture
|
||||
|
||||
|
||||
def test_connector_creation(reset: None) -> None:
|
||||
# Creating an admin user (first user created is automatically an admin)
|
||||
admin_user: DATestUser = UserManager.create(name="admin_user")
|
||||
|
||||
# create connectors
|
||||
cc_pair_1 = CCPairManager.create_from_scratch(
|
||||
source=DocumentSource.INGESTION_API,
|
||||
user_performing_action=admin_user,
|
||||
)
|
||||
|
||||
cc_pair_info = CCPairManager.get_single(
|
||||
cc_pair_1.id, user_performing_action=admin_user
|
||||
)
|
||||
assert cc_pair_info
|
||||
assert cc_pair_info.creator
|
||||
assert str(cc_pair_info.creator) == admin_user.id
|
||||
assert cc_pair_info.creator_email == admin_user.email
|
||||
|
||||
|
||||
# TODO(rkuo): will enable this once i have credentials on github
|
||||
# def test_overlapping_connector_creation(reset: None) -> None:
|
||||
# # Creating an admin user (first user created is automatically an admin)
|
||||
# admin_user: DATestUser = UserManager.create(name="admin_user")
|
||||
|
||||
# config = {
|
||||
# "wiki_base": os.environ["CONFLUENCE_TEST_SPACE_URL"],
|
||||
# "space": os.environ["CONFLUENCE_TEST_SPACE"],
|
||||
# "is_cloud": True,
|
||||
# "page_id": "",
|
||||
# }
|
||||
|
||||
# credential = {
|
||||
# "confluence_username": os.environ["CONFLUENCE_USER_NAME"],
|
||||
# "confluence_access_token": os.environ["CONFLUENCE_ACCESS_TOKEN"],
|
||||
# }
|
||||
|
||||
# # store the time before we create the connector so that we know after
|
||||
# # when the indexing should have started
|
||||
# now = datetime.now(timezone.utc)
|
||||
|
||||
# # create connector
|
||||
# cc_pair_1 = CCPairManager.create_from_scratch(
|
||||
# source=DocumentSource.CONFLUENCE,
|
||||
# connector_specific_config=config,
|
||||
# credential_json=credential,
|
||||
# user_performing_action=admin_user,
|
||||
# )
|
||||
|
||||
# CCPairManager.wait_for_indexing(
|
||||
# cc_pair_1, now, timeout=60, user_performing_action=admin_user
|
||||
# )
|
||||
|
||||
# cc_pair_2 = CCPairManager.create_from_scratch(
|
||||
# source=DocumentSource.CONFLUENCE,
|
||||
# connector_specific_config=config,
|
||||
# credential_json=credential,
|
||||
# user_performing_action=admin_user,
|
||||
# )
|
||||
|
||||
# CCPairManager.wait_for_indexing(
|
||||
# cc_pair_2, now, timeout=60, user_performing_action=admin_user
|
||||
# )
|
||||
|
||||
# info_1 = CCPairManager.get_single(cc_pair_1.id)
|
||||
# assert info_1
|
||||
|
||||
# info_2 = CCPairManager.get_single(cc_pair_2.id)
|
||||
# assert info_2
|
||||
|
||||
# assert info_1.num_docs_indexed == info_2.num_docs_indexed
|
||||
|
||||
|
||||
def test_connector_deletion(reset: None, vespa_client: vespa_fixture) -> None:
|
||||
# Creating an admin user (first user created is automatically an admin)
|
||||
admin_user: DATestUser = UserManager.create(name="admin_user")
|
||||
|
||||
@@ -0,0 +1,119 @@
|
||||
from datetime import datetime
|
||||
from datetime import timedelta
|
||||
from datetime import timezone
|
||||
|
||||
import requests
|
||||
|
||||
from danswer.configs.constants import QAFeedbackType
|
||||
from danswer.configs.constants import SessionType
|
||||
from tests.integration.common_utils.constants import API_SERVER_URL
|
||||
from tests.integration.common_utils.managers.api_key import APIKeyManager
|
||||
from tests.integration.common_utils.managers.cc_pair import CCPairManager
|
||||
from tests.integration.common_utils.managers.chat import ChatSessionManager
|
||||
from tests.integration.common_utils.managers.document import DocumentManager
|
||||
from tests.integration.common_utils.managers.llm_provider import LLMProviderManager
|
||||
from tests.integration.common_utils.managers.user import UserManager
|
||||
from tests.integration.common_utils.test_models import DATestUser
|
||||
|
||||
|
||||
def test_query_history_endpoints(reset: None) -> None:
|
||||
# Create admin user and required resources
|
||||
admin_user: DATestUser = UserManager.create(name="admin_user")
|
||||
cc_pair = CCPairManager.create_from_scratch(user_performing_action=admin_user)
|
||||
api_key = APIKeyManager.create(user_performing_action=admin_user)
|
||||
LLMProviderManager.create(user_performing_action=admin_user)
|
||||
|
||||
# Seed a document
|
||||
cc_pair.documents = []
|
||||
cc_pair.documents.append(
|
||||
DocumentManager.seed_doc_with_content(
|
||||
cc_pair=cc_pair,
|
||||
content="The company's revenue in Q1 was $1M",
|
||||
api_key=api_key,
|
||||
)
|
||||
)
|
||||
|
||||
# Create chat session and send a message
|
||||
chat_session = ChatSessionManager.create(
|
||||
persona_id=0,
|
||||
description="Test chat session",
|
||||
user_performing_action=admin_user,
|
||||
)
|
||||
|
||||
ChatSessionManager.send_message(
|
||||
chat_session_id=chat_session.id,
|
||||
message="What was the Q1 revenue?",
|
||||
user_performing_action=admin_user,
|
||||
)
|
||||
|
||||
# Test get chat session history endpoint
|
||||
end_time = datetime.now(tz=timezone.utc)
|
||||
start_time = end_time - timedelta(days=1)
|
||||
|
||||
response = requests.get(
|
||||
f"{API_SERVER_URL}/admin/chat-session-history",
|
||||
params={
|
||||
"start": start_time.isoformat(),
|
||||
"end": end_time.isoformat(),
|
||||
},
|
||||
headers=admin_user.headers,
|
||||
)
|
||||
assert response.status_code == 200
|
||||
history_response = response.json()
|
||||
|
||||
# Verify we got back the one chat session we created
|
||||
assert len(history_response) == 1
|
||||
|
||||
# Verify the first chat session details
|
||||
first_session = history_response[0]
|
||||
first_chat_id = first_session["id"]
|
||||
assert first_session["user_email"] == admin_user.email
|
||||
assert first_session["name"] == "Test chat session"
|
||||
assert first_session["first_user_message"] == "What was the Q1 revenue?"
|
||||
assert first_session["first_ai_message"] is not None
|
||||
assert first_session["assistant_id"] == 0
|
||||
assert first_session["feedback_type"] is None
|
||||
assert first_session["flow_type"] == SessionType.CHAT.value
|
||||
assert first_session["conversation_length"] == 2 # User message + AI response
|
||||
|
||||
# Test get specific chat session endpoint
|
||||
response = requests.get(
|
||||
f"{API_SERVER_URL}/admin/chat-session-history/{first_chat_id}",
|
||||
headers=admin_user.headers,
|
||||
)
|
||||
assert response.status_code == 200
|
||||
session_details = response.json()
|
||||
|
||||
# Verify the session details
|
||||
assert session_details["id"] == first_chat_id
|
||||
assert len(session_details["messages"]) > 0
|
||||
assert session_details["flow_type"] == SessionType.CHAT.value
|
||||
|
||||
# Test CSV export endpoint
|
||||
response = requests.get(
|
||||
f"{API_SERVER_URL}/admin/query-history-csv",
|
||||
headers=admin_user.headers,
|
||||
)
|
||||
assert response.status_code == 200
|
||||
assert response.headers["Content-Type"] == "text/csv; charset=utf-8"
|
||||
assert "Content-Disposition" in response.headers
|
||||
|
||||
# Verify CSV content
|
||||
csv_content = response.content.decode()
|
||||
assert "chat_session_id" in csv_content
|
||||
assert "user_message" in csv_content
|
||||
assert "ai_response" in csv_content
|
||||
|
||||
# Test filtering by feedback
|
||||
response = requests.get(
|
||||
f"{API_SERVER_URL}/admin/chat-session-history",
|
||||
params={
|
||||
"feedback_type": QAFeedbackType.LIKE.value,
|
||||
"start": start_time.isoformat(),
|
||||
"end": end_time.isoformat(),
|
||||
},
|
||||
headers=admin_user.headers,
|
||||
)
|
||||
assert response.status_code == 200
|
||||
history_response = response.json()
|
||||
assert len(history_response) == 0
|
||||
@@ -1,4 +1,3 @@
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Generator
|
||||
from typing import Any
|
||||
from unittest.mock import MagicMock
|
||||
@@ -18,49 +17,48 @@ def mock_jira_client() -> MagicMock:
|
||||
|
||||
@pytest.fixture
|
||||
def mock_issue_small() -> MagicMock:
|
||||
issue = MagicMock()
|
||||
issue.key = "SMALL-1"
|
||||
issue.fields.description = "Small description"
|
||||
issue.fields.comment.comments = [
|
||||
issue = MagicMock(spec=Issue)
|
||||
fields = MagicMock()
|
||||
fields.description = "Small description"
|
||||
fields.comment = MagicMock()
|
||||
fields.comment.comments = [
|
||||
MagicMock(body="Small comment 1"),
|
||||
MagicMock(body="Small comment 2"),
|
||||
]
|
||||
issue.fields.creator.displayName = "John Doe"
|
||||
issue.fields.creator.emailAddress = "john@example.com"
|
||||
issue.fields.summary = "Small Issue"
|
||||
issue.fields.updated = "2023-01-01T00:00:00+0000"
|
||||
issue.fields.labels = []
|
||||
fields.creator = MagicMock()
|
||||
fields.creator.displayName = "John Doe"
|
||||
fields.creator.emailAddress = "john@example.com"
|
||||
fields.summary = "Small Issue"
|
||||
fields.updated = "2023-01-01T00:00:00+0000"
|
||||
fields.labels = []
|
||||
|
||||
issue.fields = fields
|
||||
issue.key = "SMALL-1"
|
||||
return issue
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_issue_large() -> MagicMock:
|
||||
# This will be larger than 100KB
|
||||
issue = MagicMock()
|
||||
issue.key = "LARGE-1"
|
||||
issue.fields.description = "a" * 99_000
|
||||
issue.fields.comment.comments = [
|
||||
issue = MagicMock(spec=Issue)
|
||||
fields = MagicMock()
|
||||
fields.description = "a" * 99_000
|
||||
fields.comment = MagicMock()
|
||||
fields.comment.comments = [
|
||||
MagicMock(body="Large comment " * 1000),
|
||||
MagicMock(body="Another large comment " * 1000),
|
||||
]
|
||||
issue.fields.creator.displayName = "Jane Doe"
|
||||
issue.fields.creator.emailAddress = "jane@example.com"
|
||||
issue.fields.summary = "Large Issue"
|
||||
issue.fields.updated = "2023-01-02T00:00:00+0000"
|
||||
issue.fields.labels = []
|
||||
fields.creator = MagicMock()
|
||||
fields.creator.displayName = "Jane Doe"
|
||||
fields.creator.emailAddress = "jane@example.com"
|
||||
fields.summary = "Large Issue"
|
||||
fields.updated = "2023-01-02T00:00:00+0000"
|
||||
fields.labels = []
|
||||
|
||||
issue.fields = fields
|
||||
issue.key = "LARGE-1"
|
||||
return issue
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def patched_type() -> Callable[[Any], type]:
|
||||
def _patched_type(obj: Any) -> type:
|
||||
if isinstance(obj, MagicMock):
|
||||
return Issue
|
||||
return type(obj)
|
||||
|
||||
return _patched_type
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_jira_api_version() -> Generator[Any, Any, Any]:
|
||||
with patch("danswer.connectors.danswer_jira.connector.JIRA_API_VERSION", "2"):
|
||||
@@ -69,11 +67,9 @@ def mock_jira_api_version() -> Generator[Any, Any, Any]:
|
||||
|
||||
@pytest.fixture
|
||||
def patched_environment(
|
||||
patched_type: type,
|
||||
mock_jira_api_version: MockFixture,
|
||||
) -> Generator[Any, Any, Any]:
|
||||
with patch("danswer.connectors.danswer_jira.connector.type", patched_type):
|
||||
yield
|
||||
yield
|
||||
|
||||
|
||||
def test_fetch_jira_issues_batch_small_ticket(
|
||||
@@ -83,9 +79,8 @@ def test_fetch_jira_issues_batch_small_ticket(
|
||||
) -> None:
|
||||
mock_jira_client.search_issues.return_value = [mock_issue_small]
|
||||
|
||||
docs, count = fetch_jira_issues_batch("project = TEST", 0, mock_jira_client)
|
||||
docs = list(fetch_jira_issues_batch(mock_jira_client, "project = TEST", 50))
|
||||
|
||||
assert count == 1
|
||||
assert len(docs) == 1
|
||||
assert docs[0].id.endswith("/SMALL-1")
|
||||
assert "Small description" in docs[0].sections[0].text
|
||||
@@ -100,9 +95,8 @@ def test_fetch_jira_issues_batch_large_ticket(
|
||||
) -> None:
|
||||
mock_jira_client.search_issues.return_value = [mock_issue_large]
|
||||
|
||||
docs, count = fetch_jira_issues_batch("project = TEST", 0, mock_jira_client)
|
||||
docs = list(fetch_jira_issues_batch(mock_jira_client, "project = TEST", 50))
|
||||
|
||||
assert count == 1
|
||||
assert len(docs) == 0 # The large ticket should be skipped
|
||||
|
||||
|
||||
@@ -114,9 +108,8 @@ def test_fetch_jira_issues_batch_mixed_tickets(
|
||||
) -> None:
|
||||
mock_jira_client.search_issues.return_value = [mock_issue_small, mock_issue_large]
|
||||
|
||||
docs, count = fetch_jira_issues_batch("project = TEST", 0, mock_jira_client)
|
||||
docs = list(fetch_jira_issues_batch(mock_jira_client, "project = TEST", 50))
|
||||
|
||||
assert count == 2
|
||||
assert len(docs) == 1 # Only the small ticket should be included
|
||||
assert docs[0].id.endswith("/SMALL-1")
|
||||
|
||||
@@ -130,7 +123,6 @@ def test_fetch_jira_issues_batch_custom_size_limit(
|
||||
) -> None:
|
||||
mock_jira_client.search_issues.return_value = [mock_issue_small, mock_issue_large]
|
||||
|
||||
docs, count = fetch_jira_issues_batch("project = TEST", 0, mock_jira_client)
|
||||
docs = list(fetch_jira_issues_batch(mock_jira_client, "project = TEST", 50))
|
||||
|
||||
assert count == 2
|
||||
assert len(docs) == 0 # Both tickets should be skipped due to the low size limit
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
from typing import Any
|
||||
|
||||
import pytest
|
||||
|
||||
from danswer.indexing.indexing_heartbeat import Heartbeat
|
||||
from danswer.indexing.indexing_heartbeat import IndexingHeartbeatInterface
|
||||
|
||||
|
||||
class MockHeartbeat(Heartbeat):
|
||||
class MockHeartbeat(IndexingHeartbeatInterface):
|
||||
def __init__(self) -> None:
|
||||
self.call_count = 0
|
||||
|
||||
def heartbeat(self, metadata: Any = None) -> None:
|
||||
def should_stop(self) -> bool:
|
||||
return False
|
||||
|
||||
def progress(self, tag: str, amount: int) -> None:
|
||||
self.call_count += 1
|
||||
|
||||
|
||||
|
||||
@@ -74,7 +74,7 @@ def test_chunker_heartbeat(
|
||||
chunker = Chunker(
|
||||
tokenizer=embedder.embedding_model.tokenizer,
|
||||
enable_multipass=False,
|
||||
heartbeat=mock_heartbeat,
|
||||
callback=mock_heartbeat,
|
||||
)
|
||||
|
||||
chunks = chunker.chunk([document])
|
||||
|
||||
@@ -1,80 +0,0 @@
|
||||
from unittest.mock import MagicMock
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from danswer.db.index_attempt import IndexAttempt
|
||||
from danswer.indexing.indexing_heartbeat import IndexingHeartbeat
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_db_session() -> MagicMock:
|
||||
return MagicMock(spec=Session)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_index_attempt() -> MagicMock:
|
||||
return MagicMock(spec=IndexAttempt)
|
||||
|
||||
|
||||
def test_indexing_heartbeat(
|
||||
mock_db_session: MagicMock, mock_index_attempt: MagicMock
|
||||
) -> None:
|
||||
with patch(
|
||||
"danswer.indexing.indexing_heartbeat.get_index_attempt"
|
||||
) as mock_get_index_attempt:
|
||||
mock_get_index_attempt.return_value = mock_index_attempt
|
||||
|
||||
heartbeat = IndexingHeartbeat(
|
||||
index_attempt_id=1, db_session=mock_db_session, freq=5
|
||||
)
|
||||
|
||||
# Test that heartbeat doesn't update before freq is reached
|
||||
for _ in range(4):
|
||||
heartbeat.heartbeat()
|
||||
|
||||
mock_db_session.commit.assert_not_called()
|
||||
|
||||
# Test that heartbeat updates when freq is reached
|
||||
heartbeat.heartbeat()
|
||||
|
||||
mock_get_index_attempt.assert_called_once_with(
|
||||
db_session=mock_db_session, index_attempt_id=1
|
||||
)
|
||||
assert mock_index_attempt.time_updated is not None
|
||||
mock_db_session.commit.assert_called_once()
|
||||
|
||||
# Reset mock calls
|
||||
mock_db_session.reset_mock()
|
||||
mock_get_index_attempt.reset_mock()
|
||||
|
||||
# Test that heartbeat updates again after freq more calls
|
||||
for _ in range(5):
|
||||
heartbeat.heartbeat()
|
||||
|
||||
mock_get_index_attempt.assert_called_once()
|
||||
mock_db_session.commit.assert_called_once()
|
||||
|
||||
|
||||
def test_indexing_heartbeat_not_found(mock_db_session: MagicMock) -> None:
|
||||
with patch(
|
||||
"danswer.indexing.indexing_heartbeat.get_index_attempt"
|
||||
) as mock_get_index_attempt, patch(
|
||||
"danswer.indexing.indexing_heartbeat.logger"
|
||||
) as mock_logger:
|
||||
mock_get_index_attempt.return_value = None
|
||||
|
||||
heartbeat = IndexingHeartbeat(
|
||||
index_attempt_id=1, db_session=mock_db_session, freq=1
|
||||
)
|
||||
|
||||
heartbeat.heartbeat()
|
||||
|
||||
mock_get_index_attempt.assert_called_once_with(
|
||||
db_session=mock_db_session, index_attempt_id=1
|
||||
)
|
||||
mock_logger.error.assert_called_once_with(
|
||||
"Index attempt not found, this should not happen!"
|
||||
)
|
||||
mock_db_session.commit.assert_not_called()
|
||||
@@ -324,8 +324,13 @@ def test_lengthy_prefixed_json_with_quotes() -> None:
|
||||
assert quotes[0] == "Document"
|
||||
|
||||
|
||||
def test_prefixed_json_with_quotes() -> None:
|
||||
def test_json_with_lengthy_prefix_and_quotes() -> None:
|
||||
tokens = [
|
||||
"*** Based on the provided documents, there does not appear to be any information ",
|
||||
"directly relevant to answering which documents are my favorite. ",
|
||||
"The documents seem to be focused on describing the Danswer product ",
|
||||
"and its features/use cases. Since I do not have personal preferences ",
|
||||
"for documents, I will provide a general response:\n\n",
|
||||
"```",
|
||||
"json",
|
||||
"\n",
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
For general information, please read the instructions in this [README](https://github.com/danswer-ai/danswer/blob/main/deployment/README.md).
|
||||
|
||||
## Deploy in a system without GPU support
|
||||
This part is elaborated precisely in in this [README](https://github.com/danswer-ai/danswer/blob/main/deployment/README.md) in section *Docker Compose*. If you have any questions, please feel free to open an issue or get in touch in slack for support.
|
||||
This part is elaborated precisely in this [README](https://github.com/danswer-ai/danswer/blob/main/deployment/README.md) in section *Docker Compose*. If you have any questions, please feel free to open an issue or get in touch in slack for support.
|
||||
|
||||
## Deploy in a system with GPU support
|
||||
Running Model servers with GPU support while indexing and querying can result in significant improvements in performance. This is highly recommended if you have access to resources. Currently, Danswer offloads embedding model and tokenizers to the GPU VRAM and the size needed depends on chosen embedding model. For example, the embedding model `nomic-ai/nomic-embed-text-v1` takes up about 1GB of VRAM. That means running this model for inference and embedding pipeline would require roughly 2GB of VRAM.
|
||||
|
||||
@@ -188,8 +188,6 @@ services:
|
||||
- CELERY_WORKER_LIGHT_PREFETCH_MULTIPLIER=${CELERY_WORKER_LIGHT_PREFETCH_MULTIPLIER:-}
|
||||
|
||||
# Danswer SlackBot Configs
|
||||
- DANSWER_BOT_SLACK_APP_TOKEN=${DANSWER_BOT_SLACK_APP_TOKEN:-}
|
||||
- DANSWER_BOT_SLACK_BOT_TOKEN=${DANSWER_BOT_SLACK_BOT_TOKEN:-}
|
||||
- DANSWER_BOT_DISABLE_DOCS_ONLY_ANSWER=${DANSWER_BOT_DISABLE_DOCS_ONLY_ANSWER:-}
|
||||
- DANSWER_BOT_FEEDBACK_VISIBILITY=${DANSWER_BOT_FEEDBACK_VISIBILITY:-}
|
||||
- DANSWER_BOT_DISPLAY_ERROR_MSGS=${DANSWER_BOT_DISPLAY_ERROR_MSGS:-}
|
||||
|
||||
@@ -167,8 +167,6 @@ services:
|
||||
- NOTION_CONNECTOR_ENABLE_RECURSIVE_PAGE_LOOKUP=${NOTION_CONNECTOR_ENABLE_RECURSIVE_PAGE_LOOKUP:-}
|
||||
- GITHUB_CONNECTOR_BASE_URL=${GITHUB_CONNECTOR_BASE_URL:-}
|
||||
# Danswer SlackBot Configs
|
||||
- DANSWER_BOT_SLACK_APP_TOKEN=${DANSWER_BOT_SLACK_APP_TOKEN:-}
|
||||
- DANSWER_BOT_SLACK_BOT_TOKEN=${DANSWER_BOT_SLACK_BOT_TOKEN:-}
|
||||
- DANSWER_BOT_DISABLE_DOCS_ONLY_ANSWER=${DANSWER_BOT_DISABLE_DOCS_ONLY_ANSWER:-}
|
||||
- DANSWER_BOT_FEEDBACK_VISIBILITY=${DANSWER_BOT_FEEDBACK_VISIBILITY:-}
|
||||
- DANSWER_BOT_DISPLAY_ERROR_MSGS=${DANSWER_BOT_DISPLAY_ERROR_MSGS:-}
|
||||
|
||||
@@ -9,12 +9,6 @@ WEB_DOMAIN=http://localhost:3000
|
||||
|
||||
# NOTE: Generative AI configurations are done via the UI now
|
||||
|
||||
# If you want to setup a slack bot to answer questions automatically in Slack
|
||||
# channels it is added to, you must specify the two below.
|
||||
# More information in the guide here: https://docs.danswer.dev/slack_bot_setup
|
||||
#DANSWER_BOT_SLACK_APP_TOKEN=
|
||||
#DANSWER_BOT_SLACK_BOT_TOKEN=
|
||||
|
||||
|
||||
# The following are for configuring User Authentication, supported flows are:
|
||||
# disabled
|
||||
|
||||
@@ -387,8 +387,6 @@ auth:
|
||||
oauth_client_id: ""
|
||||
oauth_client_secret: ""
|
||||
oauth_cookie_secret: ""
|
||||
danswer_bot_slack_app_token: ""
|
||||
danswer_bot_slack_bot_token: ""
|
||||
redis_password: "redis_password"
|
||||
# will be overridden by the existingSecret if set
|
||||
secretName: "danswer-secrets"
|
||||
@@ -400,8 +398,6 @@ auth:
|
||||
oauth_client_id: ""
|
||||
oauth_client_secret: ""
|
||||
oauth_cookie_secret: ""
|
||||
danswer_bot_slack_app_token: ""
|
||||
danswer_bot_slack_bot_token: ""
|
||||
redis_password: "password"
|
||||
|
||||
configMap:
|
||||
@@ -451,8 +447,6 @@ configMap:
|
||||
GONG_CONNECTOR_START_TIME: ""
|
||||
NOTION_CONNECTOR_ENABLE_RECURSIVE_PAGE_LOOKUP: ""
|
||||
# DanswerBot SlackBot Configs
|
||||
# DANSWER_BOT_SLACK_APP_TOKEN: ""
|
||||
# DANSWER_BOT_SLACK_BOT_TOKEN: ""
|
||||
DANSWER_BOT_DISABLE_DOCS_ONLY_ANSWER: ""
|
||||
DANSWER_BOT_DISPLAY_ERROR_MSGS: ""
|
||||
DANSWER_BOT_RESPOND_EVERY_CHANNEL: ""
|
||||
|
||||
@@ -62,8 +62,6 @@ data:
|
||||
GONG_CONNECTOR_START_TIME: ""
|
||||
NOTION_CONNECTOR_ENABLE_RECURSIVE_PAGE_LOOKUP: ""
|
||||
# DanswerBot SlackBot Configs
|
||||
DANSWER_BOT_SLACK_APP_TOKEN: ""
|
||||
DANSWER_BOT_SLACK_BOT_TOKEN: ""
|
||||
DANSWER_BOT_DISABLE_DOCS_ONLY_ANSWER: ""
|
||||
DANSWER_BOT_DISPLAY_ERROR_MSGS: ""
|
||||
DANSWER_BOT_RESPOND_EVERY_CHANNEL: ""
|
||||
|
||||
4
web/.gitignore
vendored
4
web/.gitignore
vendored
@@ -34,3 +34,7 @@ yarn-error.log*
|
||||
# typescript
|
||||
*.tsbuildinfo
|
||||
next-env.d.ts
|
||||
|
||||
/admin_auth.json
|
||||
/build-archive.log
|
||||
|
||||
|
||||
@@ -69,6 +69,9 @@ ENV NEXT_PUBLIC_POSTHOG_HOST=${NEXT_PUBLIC_POSTHOG_HOST}
|
||||
ARG NEXT_PUBLIC_SENTRY_DSN
|
||||
ENV NEXT_PUBLIC_SENTRY_DSN=${NEXT_PUBLIC_SENTRY_DSN}
|
||||
|
||||
ARG NEXT_PUBLIC_GTM_ENABLED
|
||||
ENV NEXT_PUBLIC_GTM_ENABLED=${NEXT_PUBLIC_GTM_ENABLED}
|
||||
|
||||
RUN npx next build
|
||||
|
||||
# Step 2. Production image, copy all the files and run next
|
||||
@@ -134,9 +137,12 @@ ARG NEXT_PUBLIC_POSTHOG_KEY
|
||||
ARG NEXT_PUBLIC_POSTHOG_HOST
|
||||
ENV NEXT_PUBLIC_POSTHOG_KEY=${NEXT_PUBLIC_POSTHOG_KEY}
|
||||
ENV NEXT_PUBLIC_POSTHOG_HOST=${NEXT_PUBLIC_POSTHOG_HOST}
|
||||
|
||||
ARG NEXT_PUBLIC_SENTRY_DSN
|
||||
ENV NEXT_PUBLIC_SENTRY_DSN=${NEXT_PUBLIC_SENTRY_DSN}
|
||||
|
||||
ARG NEXT_PUBLIC_GTM_ENABLED
|
||||
ENV NEXT_PUBLIC_GTM_ENABLED=${NEXT_PUBLIC_GTM_ENABLED}
|
||||
|
||||
# Note: Don't expose ports here, Compose will handle that for us if necessary.
|
||||
# If you want to run this without compose, specify the ports to
|
||||
|
||||
103
web/package-lock.json
generated
103
web/package-lock.json
generated
@@ -15,11 +15,13 @@
|
||||
"@headlessui/react": "^2.2.0",
|
||||
"@headlessui/tailwindcss": "^0.2.1",
|
||||
"@phosphor-icons/react": "^2.0.8",
|
||||
"@radix-ui/react-checkbox": "^1.1.2",
|
||||
"@radix-ui/react-dialog": "^1.0.5",
|
||||
"@radix-ui/react-popover": "^1.1.2",
|
||||
"@radix-ui/react-select": "^2.1.2",
|
||||
"@radix-ui/react-separator": "^1.1.0",
|
||||
"@radix-ui/react-slot": "^1.1.0",
|
||||
"@radix-ui/react-switch": "^1.1.1",
|
||||
"@radix-ui/react-tabs": "^1.1.1",
|
||||
"@radix-ui/react-tooltip": "^1.1.3",
|
||||
"@sentry/nextjs": "^8.34.0",
|
||||
@@ -2661,6 +2663,57 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-checkbox": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-checkbox/-/react-checkbox-1.1.2.tgz",
|
||||
"integrity": "sha512-/i0fl686zaJbDQLNKrkCbMyDm6FQMt4jg323k7HuqitoANm9sE23Ql8yOK3Wusk34HSLKDChhMux05FnP6KUkw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/primitive": "1.1.0",
|
||||
"@radix-ui/react-compose-refs": "1.1.0",
|
||||
"@radix-ui/react-context": "1.1.1",
|
||||
"@radix-ui/react-presence": "1.1.1",
|
||||
"@radix-ui/react-primitive": "2.0.0",
|
||||
"@radix-ui/react-use-controllable-state": "1.1.0",
|
||||
"@radix-ui/react-use-previous": "1.1.0",
|
||||
"@radix-ui/react-use-size": "1.1.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"@types/react-dom": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
|
||||
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
},
|
||||
"@types/react-dom": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-checkbox/node_modules/@radix-ui/primitive": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.0.tgz",
|
||||
"integrity": "sha512-4Z8dn6Upk0qk4P74xBhZ6Hd/w0mPEzOOLxy4xiPXOXqjF7jZS0VAKk7/x/H6FyY2zCkYJqePf1G5KmkmNJ4RBA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@radix-ui/react-checkbox/node_modules/@radix-ui/react-context": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.1.tgz",
|
||||
"integrity": "sha512-UASk9zi+crv9WteK/NU4PLvOoL3OuE6BWVKNF6hPRBtYBDXQ2u5iu3O59zUlJiTVvkyuycnqrztsHVJwcK9K+Q==",
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-collection": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-collection/-/react-collection-1.1.0.tgz",
|
||||
@@ -3821,6 +3874,56 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-switch": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-switch/-/react-switch-1.1.1.tgz",
|
||||
"integrity": "sha512-diPqDDoBcZPSicYoMWdWx+bCPuTRH4QSp9J+65IvtdS0Kuzt67bI6n32vCj8q6NZmYW/ah+2orOtMwcX5eQwIg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@radix-ui/primitive": "1.1.0",
|
||||
"@radix-ui/react-compose-refs": "1.1.0",
|
||||
"@radix-ui/react-context": "1.1.1",
|
||||
"@radix-ui/react-primitive": "2.0.0",
|
||||
"@radix-ui/react-use-controllable-state": "1.1.0",
|
||||
"@radix-ui/react-use-previous": "1.1.0",
|
||||
"@radix-ui/react-use-size": "1.1.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"@types/react-dom": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
|
||||
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
},
|
||||
"@types/react-dom": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-switch/node_modules/@radix-ui/primitive": {
|
||||
"version": "1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/primitive/-/primitive-1.1.0.tgz",
|
||||
"integrity": "sha512-4Z8dn6Upk0qk4P74xBhZ6Hd/w0mPEzOOLxy4xiPXOXqjF7jZS0VAKk7/x/H6FyY2zCkYJqePf1G5KmkmNJ4RBA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/@radix-ui/react-switch/node_modules/@radix-ui/react-context": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-context/-/react-context-1.1.1.tgz",
|
||||
"integrity": "sha512-UASk9zi+crv9WteK/NU4PLvOoL3OuE6BWVKNF6hPRBtYBDXQ2u5iu3O59zUlJiTVvkyuycnqrztsHVJwcK9K+Q==",
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"@types/react": "*",
|
||||
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
|
||||
},
|
||||
"peerDependenciesMeta": {
|
||||
"@types/react": {
|
||||
"optional": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"node_modules/@radix-ui/react-tabs": {
|
||||
"version": "1.1.1",
|
||||
"resolved": "https://registry.npmjs.org/@radix-ui/react-tabs/-/react-tabs-1.1.1.tgz",
|
||||
|
||||
@@ -17,11 +17,13 @@
|
||||
"@headlessui/react": "^2.2.0",
|
||||
"@headlessui/tailwindcss": "^0.2.1",
|
||||
"@phosphor-icons/react": "^2.0.8",
|
||||
"@radix-ui/react-checkbox": "^1.1.2",
|
||||
"@radix-ui/react-dialog": "^1.0.5",
|
||||
"@radix-ui/react-popover": "^1.1.2",
|
||||
"@radix-ui/react-select": "^2.1.2",
|
||||
"@radix-ui/react-separator": "^1.1.0",
|
||||
"@radix-ui/react-slot": "^1.1.0",
|
||||
"@radix-ui/react-switch": "^1.1.1",
|
||||
"@radix-ui/react-tabs": "^1.1.1",
|
||||
"@radix-ui/react-tooltip": "^1.1.3",
|
||||
"@sentry/nextjs": "^8.34.0",
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { defineConfig } from "@playwright/test";
|
||||
import { defineConfig, devices } from "@playwright/test";
|
||||
|
||||
export default defineConfig({
|
||||
// Other Playwright config options
|
||||
testDir: "./tests/e2e", // Folder for test files
|
||||
reporter: "list",
|
||||
// Configure paths for screenshots
|
||||
// expect: {
|
||||
// toMatchSnapshot: {
|
||||
@@ -11,4 +11,30 @@ export default defineConfig({
|
||||
// },
|
||||
// reporter: [["html", { outputFolder: "test-results/output/report" }]], // HTML report location
|
||||
// outputDir: "test-results/output/screenshots", // Set output folder for test artifacts
|
||||
projects: [
|
||||
{
|
||||
// dependency for admin workflows
|
||||
name: "admin_setup",
|
||||
testMatch: /.*\admin_auth.setup\.ts/,
|
||||
},
|
||||
{
|
||||
// tests admin workflows
|
||||
name: "chromium-admin",
|
||||
grep: /@admin/,
|
||||
use: {
|
||||
...devices["Desktop Chrome"],
|
||||
// Use prepared auth state.
|
||||
storageState: "admin_auth.json",
|
||||
},
|
||||
dependencies: ["admin_setup"],
|
||||
},
|
||||
{
|
||||
// tests logged out / guest workflows
|
||||
name: "chromium-guest",
|
||||
grep: /@guest/,
|
||||
use: {
|
||||
...devices["Desktop Chrome"],
|
||||
},
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
@@ -29,9 +29,7 @@ import { deleteApiKey, regenerateApiKey } from "./lib";
|
||||
import { DanswerApiKeyForm } from "./DanswerApiKeyForm";
|
||||
import { APIKey } from "./types";
|
||||
|
||||
const API_KEY_TEXT = `
|
||||
API Keys allow you to access Danswer APIs programmatically. Click the button below to generate a new API Key.
|
||||
`;
|
||||
const API_KEY_TEXT = `API Keys allow you to access Danswer APIs programmatically. Click the button below to generate a new API Key.`;
|
||||
|
||||
function NewApiKeyModal({
|
||||
apiKey,
|
||||
|
||||
@@ -25,6 +25,7 @@ export default async function Page(props: { params: Promise<{ id: string }> }) {
|
||||
<CardSection>
|
||||
<AssistantEditor
|
||||
{...values}
|
||||
admin
|
||||
defaultPublic={true}
|
||||
redirectType={SuccessfulPersonaUpdateRedirectType.ADMIN}
|
||||
/>
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
import { errorHandlingFetcher } from "@/lib/fetcher";
|
||||
import { SlackBotConfig, SlackBotTokens } from "@/lib/types";
|
||||
import useSWR, { mutate } from "swr";
|
||||
|
||||
export const useSlackBotConfigs = () => {
|
||||
const url = "/api/manage/admin/slack-bot/config";
|
||||
const swrResponse = useSWR<SlackBotConfig[]>(url, errorHandlingFetcher);
|
||||
|
||||
return {
|
||||
...swrResponse,
|
||||
refreshSlackBotConfigs: () => mutate(url),
|
||||
};
|
||||
};
|
||||
|
||||
export const useSlackBotTokens = () => {
|
||||
const url = "/api/manage/admin/slack-bot/tokens";
|
||||
const swrResponse = useSWR<SlackBotTokens>(url, errorHandlingFetcher);
|
||||
|
||||
return {
|
||||
...swrResponse,
|
||||
refreshSlackBotTokens: () => mutate(url),
|
||||
};
|
||||
};
|
||||
@@ -1,65 +0,0 @@
|
||||
import { AdminPageTitle } from "@/components/admin/Title";
|
||||
import { CPUIcon } from "@/components/icons/icons";
|
||||
import { SlackBotCreationForm } from "../SlackBotConfigCreationForm";
|
||||
import { fetchSS } from "@/lib/utilsSS";
|
||||
import { ErrorCallout } from "@/components/ErrorCallout";
|
||||
import { DocumentSet } from "@/lib/types";
|
||||
import { BackButton } from "@/components/BackButton";
|
||||
import {
|
||||
FetchAssistantsResponse,
|
||||
fetchAssistantsSS,
|
||||
} from "@/lib/assistants/fetchAssistantsSS";
|
||||
import { getStandardAnswerCategoriesIfEE } from "@/components/standardAnswers/getStandardAnswerCategoriesIfEE";
|
||||
|
||||
async function Page() {
|
||||
const tasks = [fetchSS("/manage/document-set"), fetchAssistantsSS()];
|
||||
const [
|
||||
documentSetsResponse,
|
||||
[assistants, assistantsFetchError],
|
||||
standardAnswerCategoriesResponse,
|
||||
] = (await Promise.all(tasks)) as [
|
||||
Response,
|
||||
FetchAssistantsResponse,
|
||||
Response,
|
||||
];
|
||||
|
||||
const eeStandardAnswerCategoryResponse =
|
||||
await getStandardAnswerCategoriesIfEE();
|
||||
|
||||
if (!documentSetsResponse.ok) {
|
||||
return (
|
||||
<ErrorCallout
|
||||
errorTitle="Something went wrong :("
|
||||
errorMsg={`Failed to fetch document sets - ${await documentSetsResponse.text()}`}
|
||||
/>
|
||||
);
|
||||
}
|
||||
const documentSets = (await documentSetsResponse.json()) as DocumentSet[];
|
||||
|
||||
if (assistantsFetchError) {
|
||||
return (
|
||||
<ErrorCallout
|
||||
errorTitle="Something went wrong :("
|
||||
errorMsg={`Failed to fetch assistants - ${assistantsFetchError}`}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="container mx-auto">
|
||||
<BackButton />
|
||||
<AdminPageTitle
|
||||
icon={<CPUIcon size={32} />}
|
||||
title="New Slack Bot Config"
|
||||
/>
|
||||
|
||||
<SlackBotCreationForm
|
||||
documentSets={documentSets}
|
||||
personas={assistants}
|
||||
standardAnswerCategoryResponse={eeStandardAnswerCategoryResponse}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default Page;
|
||||
@@ -1,304 +0,0 @@
|
||||
"use client";
|
||||
|
||||
import { ThreeDotsLoader } from "@/components/Loading";
|
||||
import { PageSelector } from "@/components/PageSelector";
|
||||
import { EditIcon, SlackIcon, TrashIcon } from "@/components/icons/icons";
|
||||
import { SlackBotConfig } from "@/lib/types";
|
||||
import { useState } from "react";
|
||||
import { useSlackBotConfigs, useSlackBotTokens } from "./hooks";
|
||||
import { PopupSpec, usePopup } from "@/components/admin/connectors/Popup";
|
||||
import { deleteSlackBotConfig, isPersonaASlackBotPersona } from "./lib";
|
||||
import { SlackBotTokensForm } from "./SlackBotTokensForm";
|
||||
import { AdminPageTitle } from "@/components/admin/Title";
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
import Text from "@/components/ui/text";
|
||||
import Title from "@/components/ui/title";
|
||||
import { FiArrowUpRight, FiChevronDown, FiChevronUp } from "react-icons/fi";
|
||||
import Link from "next/link";
|
||||
import { InstantSSRAutoRefresh } from "@/components/SSRAutoRefresh";
|
||||
import { ErrorCallout } from "@/components/ErrorCallout";
|
||||
import { Button } from "@/components/ui/button";
|
||||
|
||||
const numToDisplay = 50;
|
||||
|
||||
const SlackBotConfigsTable = ({
|
||||
slackBotConfigs,
|
||||
refresh,
|
||||
setPopup,
|
||||
}: {
|
||||
slackBotConfigs: SlackBotConfig[];
|
||||
refresh: () => void;
|
||||
setPopup: (popupSpec: PopupSpec | null) => void;
|
||||
}) => {
|
||||
const [page, setPage] = useState(1);
|
||||
|
||||
// sort by name for consistent ordering
|
||||
slackBotConfigs.sort((a, b) => {
|
||||
if (a.id < b.id) {
|
||||
return -1;
|
||||
} else if (a.id > b.id) {
|
||||
return 1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
|
||||
return (
|
||||
<div>
|
||||
<Table>
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
<TableHead>Channels</TableHead>
|
||||
<TableHead>Assistant</TableHead>
|
||||
<TableHead>Document Sets</TableHead>
|
||||
<TableHead>Delete</TableHead>
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
{slackBotConfigs
|
||||
.slice(numToDisplay * (page - 1), numToDisplay * page)
|
||||
.map((slackBotConfig) => {
|
||||
return (
|
||||
<TableRow key={slackBotConfig.id}>
|
||||
<TableCell>
|
||||
<div className="flex gap-x-2">
|
||||
<Link
|
||||
className="cursor-pointer my-auto"
|
||||
href={`/admin/bot/${slackBotConfig.id}`}
|
||||
>
|
||||
<EditIcon />
|
||||
</Link>
|
||||
<div className="my-auto">
|
||||
{slackBotConfig.channel_config.channel_names
|
||||
.map((channel_name) => `#${channel_name}`)
|
||||
.join(", ")}
|
||||
</div>
|
||||
</div>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
{slackBotConfig.persona &&
|
||||
!isPersonaASlackBotPersona(slackBotConfig.persona) ? (
|
||||
<Link
|
||||
href={`/admin/assistants/${slackBotConfig.persona.id}`}
|
||||
className="text-blue-500 flex"
|
||||
>
|
||||
<FiArrowUpRight className="my-auto mr-1" />
|
||||
{slackBotConfig.persona.name}
|
||||
</Link>
|
||||
) : (
|
||||
"-"
|
||||
)}
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
{" "}
|
||||
<div>
|
||||
{slackBotConfig.persona &&
|
||||
slackBotConfig.persona.document_sets.length > 0
|
||||
? slackBotConfig.persona.document_sets
|
||||
.map((documentSet) => documentSet.name)
|
||||
.join(", ")
|
||||
: "-"}
|
||||
</div>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
{" "}
|
||||
<div
|
||||
className="cursor-pointer"
|
||||
onClick={async () => {
|
||||
const response = await deleteSlackBotConfig(
|
||||
slackBotConfig.id
|
||||
);
|
||||
if (response.ok) {
|
||||
setPopup({
|
||||
message: `Slack bot config "${slackBotConfig.id}" deleted`,
|
||||
type: "success",
|
||||
});
|
||||
} else {
|
||||
const errorMsg = await response.text();
|
||||
setPopup({
|
||||
message: `Failed to delete Slack bot config - ${errorMsg}`,
|
||||
type: "error",
|
||||
});
|
||||
}
|
||||
refresh();
|
||||
}}
|
||||
>
|
||||
<TrashIcon />
|
||||
</div>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
);
|
||||
})}
|
||||
</TableBody>
|
||||
</Table>
|
||||
|
||||
<div className="mt-3 flex">
|
||||
<div className="mx-auto">
|
||||
<PageSelector
|
||||
totalPages={Math.ceil(slackBotConfigs.length / numToDisplay)}
|
||||
currentPage={page}
|
||||
onPageChange={(newPage) => setPage(newPage)}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const Main = () => {
|
||||
const [slackBotTokensModalIsOpen, setSlackBotTokensModalIsOpen] =
|
||||
useState(false);
|
||||
const { popup, setPopup } = usePopup();
|
||||
const {
|
||||
data: slackBotConfigs,
|
||||
isLoading: isSlackBotConfigsLoading,
|
||||
error: slackBotConfigsError,
|
||||
refreshSlackBotConfigs,
|
||||
} = useSlackBotConfigs();
|
||||
|
||||
const { data: slackBotTokens, refreshSlackBotTokens } = useSlackBotTokens();
|
||||
|
||||
if (isSlackBotConfigsLoading) {
|
||||
return <ThreeDotsLoader />;
|
||||
}
|
||||
|
||||
if (slackBotConfigsError || !slackBotConfigs || !slackBotConfigs) {
|
||||
return (
|
||||
<ErrorCallout
|
||||
errorTitle="Error loading slack bot configs"
|
||||
errorMsg={
|
||||
slackBotConfigsError.info?.message ||
|
||||
slackBotConfigsError.info?.detail
|
||||
}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="mb-8">
|
||||
{popup}
|
||||
|
||||
<Text className="mb-2">
|
||||
Setup a Slack bot that connects to Danswer. Once setup, you will be able
|
||||
to ask questions to Danswer directly from Slack. Additionally, you can:
|
||||
</Text>
|
||||
|
||||
<Text className="mb-2">
|
||||
<ul className="list-disc mt-2 ml-4">
|
||||
<li>
|
||||
Setup DanswerBot to automatically answer questions in certain
|
||||
channels.
|
||||
</li>
|
||||
<li>
|
||||
Choose which document sets DanswerBot should answer from, depending
|
||||
on the channel the question is being asked.
|
||||
</li>
|
||||
<li>
|
||||
Directly message DanswerBot to search just as you would in the web
|
||||
UI.
|
||||
</li>
|
||||
</ul>
|
||||
</Text>
|
||||
|
||||
<Text className="mb-6">
|
||||
Follow the{" "}
|
||||
<a
|
||||
className="text-blue-500"
|
||||
href="https://docs.danswer.dev/slack_bot_setup"
|
||||
target="_blank"
|
||||
rel="noreferrer"
|
||||
>
|
||||
guide{" "}
|
||||
</a>
|
||||
found in the Danswer documentation to get started!
|
||||
</Text>
|
||||
|
||||
<Title>Step 1: Configure Slack Tokens</Title>
|
||||
{!slackBotTokens ? (
|
||||
<div className="mt-3">
|
||||
<SlackBotTokensForm
|
||||
onClose={() => refreshSlackBotTokens()}
|
||||
setPopup={setPopup}
|
||||
/>
|
||||
</div>
|
||||
) : (
|
||||
<>
|
||||
<Text className="italic mt-3">Tokens saved!</Text>
|
||||
<Button
|
||||
onClick={() => {
|
||||
setSlackBotTokensModalIsOpen(!slackBotTokensModalIsOpen);
|
||||
}}
|
||||
variant="outline"
|
||||
className="mt-2"
|
||||
icon={slackBotTokensModalIsOpen ? FiChevronUp : FiChevronDown}
|
||||
>
|
||||
{slackBotTokensModalIsOpen ? "Hide" : "Edit Tokens"}
|
||||
</Button>
|
||||
{slackBotTokensModalIsOpen && (
|
||||
<div className="mt-3">
|
||||
<SlackBotTokensForm
|
||||
onClose={() => {
|
||||
refreshSlackBotTokens();
|
||||
setSlackBotTokensModalIsOpen(false);
|
||||
}}
|
||||
setPopup={setPopup}
|
||||
existingTokens={slackBotTokens}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
{slackBotTokens && (
|
||||
<>
|
||||
<Title className="mb-2 mt-4">Step 2: Setup DanswerBot</Title>
|
||||
<Text className="mb-3">
|
||||
Configure Danswer to automatically answer questions in Slack
|
||||
channels. By default, Danswer only responds in channels where a
|
||||
configuration is setup unless it is explicitly tagged.
|
||||
</Text>
|
||||
|
||||
<div className="mb-2"></div>
|
||||
|
||||
<Link className="flex mb-3 w-fit" href="/admin/bot/new">
|
||||
<Button className="my-auto" variant="next">
|
||||
New Slack Bot Configuration
|
||||
</Button>
|
||||
</Link>
|
||||
|
||||
{slackBotConfigs.length > 0 && (
|
||||
<div className="mt-8">
|
||||
<SlackBotConfigsTable
|
||||
slackBotConfigs={slackBotConfigs}
|
||||
refresh={refreshSlackBotConfigs}
|
||||
setPopup={setPopup}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
</>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
const Page = () => {
|
||||
return (
|
||||
<div className="container mx-auto">
|
||||
<AdminPageTitle
|
||||
icon={<SlackIcon size={32} />}
|
||||
title="Slack Bot Configuration"
|
||||
/>
|
||||
<InstantSSRAutoRefresh />
|
||||
|
||||
<Main />
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
export default Page;
|
||||
31
web/src/app/admin/bots/SlackBotCreationForm.tsx
Normal file
31
web/src/app/admin/bots/SlackBotCreationForm.tsx
Normal file
@@ -0,0 +1,31 @@
|
||||
"use client";
|
||||
|
||||
import { usePopup } from "@/components/admin/connectors/Popup";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useState } from "react";
|
||||
import { SlackTokensForm } from "./SlackTokensForm";
|
||||
|
||||
export const NewSlackBotForm = ({}: {}) => {
|
||||
const [formValues] = useState({
|
||||
name: "",
|
||||
enabled: true,
|
||||
bot_token: "",
|
||||
app_token: "",
|
||||
});
|
||||
const { popup, setPopup } = usePopup();
|
||||
const router = useRouter();
|
||||
|
||||
return (
|
||||
<div>
|
||||
{popup}
|
||||
<div className="p-4">
|
||||
<SlackTokensForm
|
||||
isUpdate={false}
|
||||
initialValues={formValues}
|
||||
setPopup={setPopup}
|
||||
router={router}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
121
web/src/app/admin/bots/SlackBotTable.tsx
Normal file
121
web/src/app/admin/bots/SlackBotTable.tsx
Normal file
@@ -0,0 +1,121 @@
|
||||
"use client";
|
||||
|
||||
import { PageSelector } from "@/components/PageSelector";
|
||||
import { SlackBot } from "@/lib/types";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { useEffect, useState } from "react";
|
||||
import { FiCheck, FiEdit, FiXCircle } from "react-icons/fi";
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
|
||||
const NUM_IN_PAGE = 20;
|
||||
|
||||
function ClickableTableRow({
|
||||
url,
|
||||
children,
|
||||
...props
|
||||
}: {
|
||||
url: string;
|
||||
children: React.ReactNode;
|
||||
[key: string]: any;
|
||||
}) {
|
||||
const router = useRouter();
|
||||
|
||||
useEffect(() => {
|
||||
router.prefetch(url);
|
||||
}, [router]);
|
||||
|
||||
const navigate = () => {
|
||||
router.push(url);
|
||||
};
|
||||
|
||||
return (
|
||||
<TableRow {...props} onClick={navigate}>
|
||||
{children}
|
||||
</TableRow>
|
||||
);
|
||||
}
|
||||
|
||||
export function SlackBotTable({ slackBots }: { slackBots: SlackBot[] }) {
|
||||
const [page, setPage] = useState(1);
|
||||
|
||||
// sort by id for consistent ordering
|
||||
slackBots.sort((a, b) => {
|
||||
if (a.id < b.id) {
|
||||
return -1;
|
||||
} else if (a.id > b.id) {
|
||||
return 1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
|
||||
const slackBotsForPage = slackBots.slice(
|
||||
NUM_IN_PAGE * (page - 1),
|
||||
NUM_IN_PAGE * page
|
||||
);
|
||||
|
||||
return (
|
||||
<div>
|
||||
<Table>
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
<TableHead>Name</TableHead>
|
||||
<TableHead>Channel Count</TableHead>
|
||||
<TableHead>Enabled</TableHead>
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
{slackBotsForPage.map((slackBot) => {
|
||||
return (
|
||||
<ClickableTableRow
|
||||
url={`/admin/bots/${slackBot.id}`}
|
||||
key={slackBot.id}
|
||||
className="hover:bg-muted cursor-pointer"
|
||||
>
|
||||
<TableCell>
|
||||
<div className="flex items-center">
|
||||
<FiEdit className="mr-4" />
|
||||
{slackBot.name}
|
||||
</div>
|
||||
</TableCell>
|
||||
<TableCell>{slackBot.configs_count}</TableCell>
|
||||
<TableCell>
|
||||
{slackBot.enabled ? (
|
||||
<FiCheck className="text-emerald-600" size="18" />
|
||||
) : (
|
||||
<FiXCircle className="text-red-600" size="18" />
|
||||
)}
|
||||
</TableCell>
|
||||
</ClickableTableRow>
|
||||
);
|
||||
})}
|
||||
</TableBody>
|
||||
</Table>
|
||||
{slackBots.length > NUM_IN_PAGE && (
|
||||
<div className="mt-3 flex">
|
||||
<div className="mx-auto">
|
||||
<PageSelector
|
||||
totalPages={Math.ceil(slackBots.length / NUM_IN_PAGE)}
|
||||
currentPage={page}
|
||||
onPageChange={(newPage) => {
|
||||
setPage(newPage);
|
||||
window.scrollTo({
|
||||
top: 0,
|
||||
left: 0,
|
||||
behavior: "smooth",
|
||||
});
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,34 +1,52 @@
|
||||
import { Form, Formik } from "formik";
|
||||
import * as Yup from "yup";
|
||||
import { PopupSpec } from "@/components/admin/connectors/Popup";
|
||||
import { SlackBotTokens } from "@/lib/types";
|
||||
import { SlackBot } from "@/lib/types";
|
||||
import { TextFormField } from "@/components/admin/connectors/Field";
|
||||
import { setSlackBotTokens } from "./lib";
|
||||
import CardSection from "@/components/admin/CardSection";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { updateSlackBot, SlackBotCreationRequest } from "./new/lib";
|
||||
|
||||
interface SlackBotTokensFormProps {
|
||||
onClose: () => void;
|
||||
setPopup: (popupSpec: PopupSpec | null) => void;
|
||||
existingTokens?: SlackBotTokens;
|
||||
existingSlackApp?: SlackBot;
|
||||
onTokensSet?: (tokens: { bot_token: string; app_token: string }) => void;
|
||||
embedded?: boolean;
|
||||
noForm?: boolean;
|
||||
}
|
||||
|
||||
export const SlackBotTokensForm = ({
|
||||
onClose,
|
||||
setPopup,
|
||||
existingTokens,
|
||||
existingSlackApp,
|
||||
onTokensSet,
|
||||
embedded = true,
|
||||
noForm = true,
|
||||
}: SlackBotTokensFormProps) => {
|
||||
const Wrapper = embedded ? "div" : CardSection;
|
||||
|
||||
const FormWrapper = noForm ? "div" : Form;
|
||||
|
||||
return (
|
||||
<CardSection>
|
||||
<Wrapper className="w-full">
|
||||
<Formik
|
||||
initialValues={existingTokens || { app_token: "", bot_token: "" }}
|
||||
initialValues={existingSlackApp || { app_token: "", bot_token: "" }}
|
||||
validationSchema={Yup.object().shape({
|
||||
channel_names: Yup.array().of(Yup.string().required()),
|
||||
document_sets: Yup.array().of(Yup.number()),
|
||||
bot_token: Yup.string().required(),
|
||||
app_token: Yup.string().required(),
|
||||
})}
|
||||
onSubmit={async (values, formikHelpers) => {
|
||||
if (embedded && onTokensSet) {
|
||||
onTokensSet(values);
|
||||
return;
|
||||
}
|
||||
|
||||
formikHelpers.setSubmitting(true);
|
||||
const response = await setSlackBotTokens(values);
|
||||
const response = await updateSlackBot(
|
||||
existingSlackApp?.id || 0,
|
||||
values as SlackBotCreationRequest
|
||||
);
|
||||
formikHelpers.setSubmitting(false);
|
||||
if (response.ok) {
|
||||
setPopup({
|
||||
@@ -46,25 +64,29 @@ export const SlackBotTokensForm = ({
|
||||
}}
|
||||
>
|
||||
{({ isSubmitting }) => (
|
||||
<Form>
|
||||
<FormWrapper className="w-full">
|
||||
<TextFormField
|
||||
width="w-full"
|
||||
name="bot_token"
|
||||
label="Slack Bot Token"
|
||||
type="password"
|
||||
/>
|
||||
<TextFormField
|
||||
width="w-full"
|
||||
name="app_token"
|
||||
label="Slack App Token"
|
||||
type="password"
|
||||
/>
|
||||
<div className="flex">
|
||||
<Button type="submit" disabled={isSubmitting} variant="submit">
|
||||
Set Tokens
|
||||
</Button>
|
||||
</div>
|
||||
</Form>
|
||||
{!embedded && (
|
||||
<div className="flex w-full">
|
||||
<Button type="submit" disabled={isSubmitting} variant="submit">
|
||||
Set Tokens
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</FormWrapper>
|
||||
)}
|
||||
</Formik>
|
||||
</CardSection>
|
||||
</Wrapper>
|
||||
);
|
||||
};
|
||||
170
web/src/app/admin/bots/SlackBotUpdateForm.tsx
Normal file
170
web/src/app/admin/bots/SlackBotUpdateForm.tsx
Normal file
@@ -0,0 +1,170 @@
|
||||
"use client";
|
||||
|
||||
import { usePopup } from "@/components/admin/connectors/Popup";
|
||||
import { SlackBot } from "@/lib/types";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { ChevronDown, ChevronRight } from "lucide-react";
|
||||
import { useState, useEffect, useRef } from "react";
|
||||
import { updateSlackBotField } from "@/lib/updateSlackBotField";
|
||||
import { Checkbox } from "@/app/admin/settings/SettingsForm";
|
||||
import { SlackTokensForm } from "./SlackTokensForm";
|
||||
import { SourceIcon } from "@/components/SourceIcon";
|
||||
import { EditableStringFieldDisplay } from "@/components/EditableStringFieldDisplay";
|
||||
import { deleteSlackBot } from "./new/lib";
|
||||
import { GenericConfirmModal } from "@/components/modals/GenericConfirmModal";
|
||||
import { FiTrash } from "react-icons/fi";
|
||||
import { Button } from "@/components/ui/button";
|
||||
|
||||
export const ExistingSlackBotForm = ({
|
||||
existingSlackBot,
|
||||
refreshSlackBot,
|
||||
}: {
|
||||
existingSlackBot: SlackBot;
|
||||
refreshSlackBot?: () => void;
|
||||
}) => {
|
||||
const [isExpanded, setIsExpanded] = useState(false);
|
||||
const [formValues, setFormValues] = useState(existingSlackBot);
|
||||
const { popup, setPopup } = usePopup();
|
||||
const router = useRouter();
|
||||
const dropdownRef = useRef<HTMLDivElement>(null);
|
||||
const [showDeleteModal, setShowDeleteModal] = useState(false);
|
||||
|
||||
const handleUpdateField = async (
|
||||
field: keyof SlackBot,
|
||||
value: string | boolean
|
||||
) => {
|
||||
try {
|
||||
const response = await updateSlackBotField(
|
||||
existingSlackBot,
|
||||
field,
|
||||
value
|
||||
);
|
||||
if (!response.ok) {
|
||||
throw new Error(await response.text());
|
||||
}
|
||||
setPopup({
|
||||
message: `Connector ${field} updated successfully`,
|
||||
type: "success",
|
||||
});
|
||||
} catch (error) {
|
||||
setPopup({
|
||||
message: `Failed to update connector ${field}`,
|
||||
type: "error",
|
||||
});
|
||||
}
|
||||
setFormValues((prev) => ({ ...prev, [field]: value }));
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (event: MouseEvent) => {
|
||||
if (
|
||||
dropdownRef.current &&
|
||||
!dropdownRef.current.contains(event.target as Node) &&
|
||||
isExpanded
|
||||
) {
|
||||
setIsExpanded(false);
|
||||
}
|
||||
};
|
||||
|
||||
document.addEventListener("mousedown", handleClickOutside);
|
||||
return () => {
|
||||
document.removeEventListener("mousedown", handleClickOutside);
|
||||
};
|
||||
}, [isExpanded]);
|
||||
|
||||
return (
|
||||
<div>
|
||||
{popup}
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex items-center gap-2">
|
||||
<div className="my-auto">
|
||||
<SourceIcon iconSize={36} sourceType={"slack"} />
|
||||
</div>
|
||||
<EditableStringFieldDisplay
|
||||
value={formValues.name}
|
||||
isEditable={true}
|
||||
onUpdate={(value) => handleUpdateField("name", value)}
|
||||
scale={2.5}
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="flex flex-col" ref={dropdownRef}>
|
||||
<div className="flex items-center gap-4">
|
||||
<div className="border rounded-lg border-gray-200">
|
||||
<div
|
||||
className="flex items-center gap-2 cursor-pointer hover:bg-gray-100 p-2"
|
||||
onClick={() => setIsExpanded(!isExpanded)}
|
||||
>
|
||||
{isExpanded ? (
|
||||
<ChevronDown size={20} />
|
||||
) : (
|
||||
<ChevronRight size={20} />
|
||||
)}
|
||||
<span>Update Tokens</span>
|
||||
</div>
|
||||
</div>
|
||||
<Button
|
||||
variant="destructive"
|
||||
onClick={() => setShowDeleteModal(true)}
|
||||
icon={FiTrash}
|
||||
tooltip="Click to delete"
|
||||
>
|
||||
Delete
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
{isExpanded && (
|
||||
<div className="bg-white border rounded-lg border-gray-200 shadow-lg absolute mt-12 right-0 z-10 w-full md:w-3/4 lg:w-1/2">
|
||||
<div className="p-4">
|
||||
<SlackTokensForm
|
||||
isUpdate={true}
|
||||
initialValues={formValues}
|
||||
existingSlackBotId={existingSlackBot.id}
|
||||
refreshSlackBot={refreshSlackBot}
|
||||
setPopup={setPopup}
|
||||
router={router}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
<div className="mt-4">
|
||||
<div className="inline-block border rounded-lg border-gray-200 px-2 py-2">
|
||||
<Checkbox
|
||||
label="Enabled"
|
||||
checked={formValues.enabled}
|
||||
onChange={(e) => handleUpdateField("enabled", e.target.checked)}
|
||||
/>
|
||||
</div>
|
||||
{showDeleteModal && (
|
||||
<GenericConfirmModal
|
||||
title="Delete Slack Bot"
|
||||
message="Are you sure you want to delete this Slack bot? This action cannot be undone."
|
||||
confirmText="Delete"
|
||||
onClose={() => setShowDeleteModal(false)}
|
||||
onConfirm={async () => {
|
||||
try {
|
||||
const response = await deleteSlackBot(existingSlackBot.id);
|
||||
if (!response.ok) {
|
||||
throw new Error(await response.text());
|
||||
}
|
||||
setPopup({
|
||||
message: "Slack bot deleted successfully",
|
||||
type: "success",
|
||||
});
|
||||
router.push("/admin/bots");
|
||||
} catch (error) {
|
||||
setPopup({
|
||||
message: "Failed to delete Slack bot",
|
||||
type: "error",
|
||||
});
|
||||
}
|
||||
setShowDeleteModal(false);
|
||||
}}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
108
web/src/app/admin/bots/SlackTokensForm.tsx
Normal file
108
web/src/app/admin/bots/SlackTokensForm.tsx
Normal file
@@ -0,0 +1,108 @@
|
||||
"use client";
|
||||
|
||||
import { TextFormField } from "@/components/admin/connectors/Field";
|
||||
import { Form, Formik } from "formik";
|
||||
import * as Yup from "yup";
|
||||
import { createSlackBot, updateSlackBot } from "./new/lib";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { SourceIcon } from "@/components/SourceIcon";
|
||||
|
||||
export const SlackTokensForm = ({
|
||||
isUpdate,
|
||||
initialValues,
|
||||
existingSlackBotId,
|
||||
refreshSlackBot,
|
||||
setPopup,
|
||||
router,
|
||||
}: {
|
||||
isUpdate: boolean;
|
||||
initialValues: any;
|
||||
existingSlackBotId?: number;
|
||||
refreshSlackBot?: () => void;
|
||||
setPopup: (popup: { message: string; type: "error" | "success" }) => void;
|
||||
router: any;
|
||||
}) => (
|
||||
<Formik
|
||||
initialValues={initialValues}
|
||||
validationSchema={Yup.object().shape({
|
||||
bot_token: Yup.string().required(),
|
||||
app_token: Yup.string().required(),
|
||||
name: Yup.string().required(),
|
||||
})}
|
||||
onSubmit={async (values, formikHelpers) => {
|
||||
formikHelpers.setSubmitting(true);
|
||||
|
||||
let response;
|
||||
if (isUpdate) {
|
||||
response = await updateSlackBot(existingSlackBotId!, values);
|
||||
} else {
|
||||
response = await createSlackBot(values);
|
||||
}
|
||||
formikHelpers.setSubmitting(false);
|
||||
if (response.ok) {
|
||||
if (refreshSlackBot) {
|
||||
refreshSlackBot();
|
||||
}
|
||||
const responseJson = await response.json();
|
||||
const botId = isUpdate ? existingSlackBotId : responseJson.id;
|
||||
setPopup({
|
||||
message: isUpdate
|
||||
? "Successfully updated Slack Bot!"
|
||||
: "Successfully created Slack Bot!",
|
||||
type: "success",
|
||||
});
|
||||
router.push(`/admin/bots/${encodeURIComponent(botId)}`);
|
||||
} else {
|
||||
const responseJson = await response.json();
|
||||
const errorMsg = responseJson.detail || responseJson.message;
|
||||
setPopup({
|
||||
message: isUpdate
|
||||
? `Error updating Slack Bot - ${errorMsg}`
|
||||
: `Error creating Slack Bot - ${errorMsg}`,
|
||||
type: "error",
|
||||
});
|
||||
}
|
||||
}}
|
||||
enableReinitialize={true}
|
||||
>
|
||||
{({ isSubmitting, setFieldValue, values }) => (
|
||||
<Form className="w-full">
|
||||
{!isUpdate && (
|
||||
<div className="flex items-center gap-2 mb-4">
|
||||
<div className="my-auto">
|
||||
<SourceIcon iconSize={36} sourceType={"slack"} />
|
||||
</div>
|
||||
<TextFormField name="name" label="Slack Bot Name" type="text" />
|
||||
</div>
|
||||
)}
|
||||
|
||||
{!isUpdate && (
|
||||
<div className="mb-4">
|
||||
Please enter your Slack Bot Token and Slack App Token to give
|
||||
Danswerbot access to your Slack!
|
||||
</div>
|
||||
)}
|
||||
<TextFormField
|
||||
name="bot_token"
|
||||
label="Slack Bot Token"
|
||||
type="password"
|
||||
/>
|
||||
<TextFormField
|
||||
name="app_token"
|
||||
label="Slack App Token"
|
||||
type="password"
|
||||
/>
|
||||
<div className="flex justify-end w-full mt-4">
|
||||
<Button
|
||||
type="submit"
|
||||
disabled={isSubmitting}
|
||||
variant="submit"
|
||||
size="default"
|
||||
>
|
||||
{isUpdate ? "Update!" : "Create!"}
|
||||
</Button>
|
||||
</div>
|
||||
</Form>
|
||||
)}
|
||||
</Formik>
|
||||
);
|
||||
157
web/src/app/admin/bots/[bot-id]/SlackChannelConfigsTable.tsx
Normal file
157
web/src/app/admin/bots/[bot-id]/SlackChannelConfigsTable.tsx
Normal file
@@ -0,0 +1,157 @@
|
||||
"use client";
|
||||
|
||||
import { PageSelector } from "@/components/PageSelector";
|
||||
import { PopupSpec } from "@/components/admin/connectors/Popup";
|
||||
import { EditIcon, TrashIcon } from "@/components/icons/icons";
|
||||
import { SlackChannelConfig } from "@/lib/types";
|
||||
import {
|
||||
Table,
|
||||
TableBody,
|
||||
TableCell,
|
||||
TableHead,
|
||||
TableHeader,
|
||||
TableRow,
|
||||
} from "@/components/ui/table";
|
||||
import Link from "next/link";
|
||||
import { useState } from "react";
|
||||
import { FiArrowUpRight } from "react-icons/fi";
|
||||
import { deleteSlackChannelConfig, isPersonaASlackBotPersona } from "./lib";
|
||||
|
||||
const numToDisplay = 50;
|
||||
|
||||
export function SlackChannelConfigsTable({
|
||||
slackBotId,
|
||||
slackChannelConfigs,
|
||||
refresh,
|
||||
setPopup,
|
||||
}: {
|
||||
slackBotId: number;
|
||||
slackChannelConfigs: SlackChannelConfig[];
|
||||
refresh: () => void;
|
||||
setPopup: (popupSpec: PopupSpec | null) => void;
|
||||
}) {
|
||||
const [page, setPage] = useState(1);
|
||||
|
||||
// sort by name for consistent ordering
|
||||
slackChannelConfigs.sort((a, b) => {
|
||||
if (a.id < b.id) {
|
||||
return -1;
|
||||
} else if (a.id > b.id) {
|
||||
return 1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
});
|
||||
|
||||
return (
|
||||
<div>
|
||||
<div className="rounded-md border">
|
||||
<Table>
|
||||
<TableHeader>
|
||||
<TableRow>
|
||||
<TableHead>Channel</TableHead>
|
||||
<TableHead>Persona</TableHead>
|
||||
<TableHead>Document Sets</TableHead>
|
||||
<TableHead>Delete</TableHead>
|
||||
</TableRow>
|
||||
</TableHeader>
|
||||
<TableBody>
|
||||
{slackChannelConfigs
|
||||
.slice(numToDisplay * (page - 1), numToDisplay * page)
|
||||
.map((slackChannelConfig) => {
|
||||
return (
|
||||
<TableRow key={slackChannelConfig.id}>
|
||||
<TableCell>
|
||||
<div className="flex gap-x-2">
|
||||
<Link
|
||||
className="cursor-pointer my-auto"
|
||||
href={`/admin/bots/${slackBotId}/channels/${slackChannelConfig.id}`}
|
||||
>
|
||||
<EditIcon />
|
||||
</Link>
|
||||
<div className="my-auto">
|
||||
{"#" + slackChannelConfig.channel_config.channel_name}
|
||||
</div>
|
||||
</div>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
{slackChannelConfig.persona &&
|
||||
!isPersonaASlackBotPersona(slackChannelConfig.persona) ? (
|
||||
<Link
|
||||
href={`/admin/assistants/${slackChannelConfig.persona.id}`}
|
||||
className="text-blue-500 flex hover:underline"
|
||||
>
|
||||
<FiArrowUpRight className="my-auto mr-1" />
|
||||
{slackChannelConfig.persona.name}
|
||||
</Link>
|
||||
) : (
|
||||
"-"
|
||||
)}
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<div>
|
||||
{slackChannelConfig.persona &&
|
||||
slackChannelConfig.persona.document_sets.length > 0
|
||||
? slackChannelConfig.persona.document_sets
|
||||
.map((documentSet) => documentSet.name)
|
||||
.join(", ")
|
||||
: "-"}
|
||||
</div>
|
||||
</TableCell>
|
||||
<TableCell>
|
||||
<div
|
||||
className="cursor-pointer hover:text-destructive"
|
||||
onClick={async () => {
|
||||
const response = await deleteSlackChannelConfig(
|
||||
slackChannelConfig.id
|
||||
);
|
||||
if (response.ok) {
|
||||
setPopup({
|
||||
message: `Slack bot config "${slackChannelConfig.id}" deleted`,
|
||||
type: "success",
|
||||
});
|
||||
} else {
|
||||
const errorMsg = await response.text();
|
||||
setPopup({
|
||||
message: `Failed to delete Slack bot config - ${errorMsg}`,
|
||||
type: "error",
|
||||
});
|
||||
}
|
||||
refresh();
|
||||
}}
|
||||
>
|
||||
<TrashIcon />
|
||||
</div>
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
);
|
||||
})}
|
||||
|
||||
{/* Empty row with message when table has no data */}
|
||||
{slackChannelConfigs.length === 0 && (
|
||||
<TableRow>
|
||||
<TableCell
|
||||
colSpan={4}
|
||||
className="text-center text-muted-foreground"
|
||||
>
|
||||
Please add a New Slack Bot Configuration to begin chatting
|
||||
with Danswer!
|
||||
</TableCell>
|
||||
</TableRow>
|
||||
)}
|
||||
</TableBody>
|
||||
</Table>
|
||||
</div>
|
||||
|
||||
<div className="mt-3 flex">
|
||||
<div className="mx-auto">
|
||||
<PageSelector
|
||||
totalPages={Math.ceil(slackChannelConfigs.length / numToDisplay)}
|
||||
currentPage={page}
|
||||
onPageChange={(newPage) => setPage(newPage)}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -3,47 +3,55 @@
|
||||
import { ArrayHelpers, FieldArray, Form, Formik } from "formik";
|
||||
import * as Yup from "yup";
|
||||
import { usePopup } from "@/components/admin/connectors/Popup";
|
||||
import { DocumentSet, SlackBotConfig } from "@/lib/types";
|
||||
import { DocumentSet, SlackChannelConfig } from "@/lib/types";
|
||||
import {
|
||||
BooleanFormField,
|
||||
Label,
|
||||
SelectorFormField,
|
||||
SubLabel,
|
||||
TextArrayField,
|
||||
TextFormField,
|
||||
} from "@/components/admin/connectors/Field";
|
||||
import {
|
||||
createSlackBotConfig,
|
||||
createSlackChannelConfig,
|
||||
isPersonaASlackBotPersona,
|
||||
updateSlackBotConfig,
|
||||
} from "./lib";
|
||||
import { Separator } from "@/components/ui/separator";
|
||||
updateSlackChannelConfig,
|
||||
} from "../lib";
|
||||
import CardSection from "@/components/admin/CardSection";
|
||||
import { Button } from "@/components/ui/button";
|
||||
import { useRouter } from "next/navigation";
|
||||
import { Persona } from "../assistants/interfaces";
|
||||
import { Persona } from "@/app/admin/assistants/interfaces";
|
||||
import { useState } from "react";
|
||||
import { AdvancedOptionsToggle } from "@/components/AdvancedOptionsToggle";
|
||||
import { DocumentSetSelectable } from "@/components/documentSet/DocumentSetSelectable";
|
||||
import CollapsibleSection from "../assistants/CollapsibleSection";
|
||||
import CollapsibleSection from "@/app/admin/assistants/CollapsibleSection";
|
||||
import { StandardAnswerCategoryResponse } from "@/components/standardAnswers/getStandardAnswerCategoriesIfEE";
|
||||
import { StandardAnswerCategoryDropdownField } from "@/components/standardAnswers/StandardAnswerCategoryDropdown";
|
||||
import {
|
||||
Tabs,
|
||||
TabsList,
|
||||
TabsTrigger,
|
||||
TabsContent,
|
||||
} from "@/components/ui/fully_wrapped_tabs";
|
||||
|
||||
export const SlackBotCreationForm = ({
|
||||
export const SlackChannelConfigCreationForm = ({
|
||||
slack_bot_id,
|
||||
documentSets,
|
||||
personas,
|
||||
standardAnswerCategoryResponse,
|
||||
existingSlackBotConfig,
|
||||
existingSlackChannelConfig,
|
||||
}: {
|
||||
slack_bot_id: number;
|
||||
documentSets: DocumentSet[];
|
||||
personas: Persona[];
|
||||
standardAnswerCategoryResponse: StandardAnswerCategoryResponse;
|
||||
existingSlackBotConfig?: SlackBotConfig;
|
||||
existingSlackChannelConfig?: SlackChannelConfig;
|
||||
}) => {
|
||||
const isUpdate = existingSlackBotConfig !== undefined;
|
||||
const isUpdate = existingSlackChannelConfig !== undefined;
|
||||
const { popup, setPopup } = usePopup();
|
||||
const router = useRouter();
|
||||
const existingSlackBotUsesPersona = existingSlackBotConfig?.persona
|
||||
? !isPersonaASlackBotPersona(existingSlackBotConfig.persona)
|
||||
const existingSlackBotUsesPersona = existingSlackChannelConfig?.persona
|
||||
? !isPersonaASlackBotPersona(existingSlackChannelConfig.persona)
|
||||
: false;
|
||||
const [usingPersonas, setUsingPersonas] = useState(
|
||||
existingSlackBotUsesPersona
|
||||
@@ -58,48 +66,52 @@ export const SlackBotCreationForm = ({
|
||||
{popup}
|
||||
<Formik
|
||||
initialValues={{
|
||||
channel_names: existingSlackBotConfig
|
||||
? existingSlackBotConfig.channel_config.channel_names
|
||||
: ([""] as string[]),
|
||||
slack_bot_id: slack_bot_id,
|
||||
channel_name:
|
||||
existingSlackChannelConfig?.channel_config.channel_name,
|
||||
answer_validity_check_enabled: (
|
||||
existingSlackBotConfig?.channel_config?.answer_filters || []
|
||||
existingSlackChannelConfig?.channel_config?.answer_filters || []
|
||||
).includes("well_answered_postfilter"),
|
||||
questionmark_prefilter_enabled: (
|
||||
existingSlackBotConfig?.channel_config?.answer_filters || []
|
||||
existingSlackChannelConfig?.channel_config?.answer_filters || []
|
||||
).includes("questionmark_prefilter"),
|
||||
respond_tag_only:
|
||||
existingSlackBotConfig?.channel_config?.respond_tag_only || false,
|
||||
existingSlackChannelConfig?.channel_config?.respond_tag_only ||
|
||||
false,
|
||||
respond_to_bots:
|
||||
existingSlackBotConfig?.channel_config?.respond_to_bots || false,
|
||||
existingSlackChannelConfig?.channel_config?.respond_to_bots ||
|
||||
false,
|
||||
enable_auto_filters:
|
||||
existingSlackBotConfig?.enable_auto_filters || false,
|
||||
existingSlackChannelConfig?.enable_auto_filters || false,
|
||||
respond_member_group_list:
|
||||
existingSlackBotConfig?.channel_config
|
||||
existingSlackChannelConfig?.channel_config
|
||||
?.respond_member_group_list ?? [],
|
||||
still_need_help_enabled:
|
||||
existingSlackBotConfig?.channel_config?.follow_up_tags !==
|
||||
existingSlackChannelConfig?.channel_config?.follow_up_tags !==
|
||||
undefined,
|
||||
follow_up_tags:
|
||||
existingSlackBotConfig?.channel_config?.follow_up_tags,
|
||||
existingSlackChannelConfig?.channel_config?.follow_up_tags,
|
||||
document_sets:
|
||||
existingSlackBotConfig && existingSlackBotConfig.persona
|
||||
? existingSlackBotConfig.persona.document_sets.map(
|
||||
existingSlackChannelConfig && existingSlackChannelConfig.persona
|
||||
? existingSlackChannelConfig.persona.document_sets.map(
|
||||
(documentSet) => documentSet.id
|
||||
)
|
||||
: ([] as number[]),
|
||||
// prettier-ignore
|
||||
persona_id:
|
||||
existingSlackBotConfig?.persona &&
|
||||
!isPersonaASlackBotPersona(existingSlackBotConfig.persona)
|
||||
? existingSlackBotConfig.persona.id
|
||||
existingSlackChannelConfig?.persona &&
|
||||
!isPersonaASlackBotPersona(existingSlackChannelConfig.persona)
|
||||
? existingSlackChannelConfig.persona.id
|
||||
: knowledgePersona?.id ?? null,
|
||||
response_type: existingSlackBotConfig?.response_type || "citations",
|
||||
standard_answer_categories: existingSlackBotConfig
|
||||
? existingSlackBotConfig.standard_answer_categories
|
||||
response_type:
|
||||
existingSlackChannelConfig?.response_type || "citations",
|
||||
standard_answer_categories: existingSlackChannelConfig
|
||||
? existingSlackChannelConfig.standard_answer_categories
|
||||
: [],
|
||||
}}
|
||||
validationSchema={Yup.object().shape({
|
||||
channel_names: Yup.array().of(Yup.string()),
|
||||
slack_bot_id: Yup.number().required(),
|
||||
channel_name: Yup.string(),
|
||||
response_type: Yup.string()
|
||||
.oneOf(["quotes", "citations"])
|
||||
.required(),
|
||||
@@ -118,12 +130,10 @@ export const SlackBotCreationForm = ({
|
||||
onSubmit={async (values, formikHelpers) => {
|
||||
formikHelpers.setSubmitting(true);
|
||||
|
||||
// remove empty channel names
|
||||
const cleanedValues = {
|
||||
...values,
|
||||
channel_names: values.channel_names.filter(
|
||||
(channelName) => channelName !== ""
|
||||
),
|
||||
slack_bot_id: slack_bot_id,
|
||||
channel_name: values.channel_name!,
|
||||
respond_member_group_list: values.respond_member_group_list,
|
||||
usePersona: usingPersonas,
|
||||
standard_answer_categories: values.standard_answer_categories.map(
|
||||
@@ -139,16 +149,16 @@ export const SlackBotCreationForm = ({
|
||||
}
|
||||
let response;
|
||||
if (isUpdate) {
|
||||
response = await updateSlackBotConfig(
|
||||
existingSlackBotConfig.id,
|
||||
response = await updateSlackChannelConfig(
|
||||
existingSlackChannelConfig.id,
|
||||
cleanedValues
|
||||
);
|
||||
} else {
|
||||
response = await createSlackBotConfig(cleanedValues);
|
||||
response = await createSlackChannelConfig(cleanedValues);
|
||||
}
|
||||
formikHelpers.setSubmitting(false);
|
||||
if (response.ok) {
|
||||
router.push(`/admin/bot?u=${Date.now()}`);
|
||||
router.push(`/admin/bots/${slack_bot_id}`);
|
||||
} else {
|
||||
const responseJson = await response.json();
|
||||
const errorMsg = responseJson.detail || responseJson.message;
|
||||
@@ -164,53 +174,38 @@ export const SlackBotCreationForm = ({
|
||||
{({ isSubmitting, values, setFieldValue }) => (
|
||||
<Form>
|
||||
<div className="px-6 pb-6 pt-4 w-full">
|
||||
<TextArrayField
|
||||
name="channel_names"
|
||||
label="Channel Names"
|
||||
values={values}
|
||||
subtext="The names of the Slack channels you want this configuration to apply to.
|
||||
For example, #ask-danswer."
|
||||
minFields={1}
|
||||
placeholder="Enter channel name..."
|
||||
<TextFormField
|
||||
name="channel_name"
|
||||
label="Slack Channel Name:"
|
||||
/>
|
||||
|
||||
<div className="mt-6">
|
||||
<Label>Knowledge Sources</Label>
|
||||
|
||||
<SubLabel>
|
||||
Controls which information DanswerBot will pull from when
|
||||
answering questions.
|
||||
</SubLabel>
|
||||
|
||||
<div className="flex mt-4">
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setUsingPersonas(false)}
|
||||
className={`p-2 font-bold text-xs mr-3 ${
|
||||
!usingPersonas
|
||||
? "rounded bg-background-900 text-text-100 underline"
|
||||
: "hover:underline bg-background-100"
|
||||
}`}
|
||||
>
|
||||
Document Sets
|
||||
</button>
|
||||
<Tabs
|
||||
defaultValue="document_sets"
|
||||
className="w-full mt-4"
|
||||
value={usingPersonas ? "assistants" : "document_sets"}
|
||||
onValueChange={(value) =>
|
||||
setUsingPersonas(value === "assistants")
|
||||
}
|
||||
>
|
||||
<TabsList>
|
||||
<TabsTrigger value="document_sets">
|
||||
Document Sets
|
||||
</TabsTrigger>
|
||||
<TabsTrigger value="assistants">Assistants</TabsTrigger>
|
||||
</TabsList>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setUsingPersonas(true)}
|
||||
className={`p-2 font-bold text-xs ${
|
||||
usingPersonas
|
||||
? "rounded bg-background-900 text-text-100 underline"
|
||||
: "hover:underline bg-background-100"
|
||||
}`}
|
||||
>
|
||||
Assistants
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="mt-4">
|
||||
{/* TODO: make this look nicer */}
|
||||
{usingPersonas ? (
|
||||
<TabsContent value="assistants">
|
||||
<SubLabel>
|
||||
Select the assistant DanswerBot will use while answering
|
||||
questions in Slack.
|
||||
</SubLabel>
|
||||
<SelectorFormField
|
||||
name="persona_id"
|
||||
options={personas.map((persona) => {
|
||||
@@ -220,7 +215,17 @@ export const SlackBotCreationForm = ({
|
||||
};
|
||||
})}
|
||||
/>
|
||||
) : (
|
||||
</TabsContent>
|
||||
|
||||
<TabsContent value="document_sets">
|
||||
<SubLabel>
|
||||
Select the document sets DanswerBot will use while
|
||||
answering questions in Slack.
|
||||
</SubLabel>
|
||||
<SubLabel>
|
||||
Note: If No Document Sets are selected, DanswerBot will
|
||||
search through all connected documents.
|
||||
</SubLabel>
|
||||
<FieldArray
|
||||
name="document_sets"
|
||||
render={(arrayHelpers: ArrayHelpers) => (
|
||||
@@ -248,21 +253,14 @@ export const SlackBotCreationForm = ({
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
<div>
|
||||
<SubLabel>
|
||||
Note: If left blank, DanswerBot will search
|
||||
through all connected documents.
|
||||
</SubLabel>
|
||||
</div>
|
||||
<div></div>
|
||||
</div>
|
||||
)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</TabsContent>
|
||||
</Tabs>
|
||||
</div>
|
||||
|
||||
<Separator />
|
||||
|
||||
<AdvancedOptionsToggle
|
||||
showAdvancedOptions={showAdvancedOptions}
|
||||
setShowAdvancedOptions={setShowAdvancedOptions}
|
||||
@@ -1,10 +1,9 @@
|
||||
import { AdminPageTitle } from "@/components/admin/Title";
|
||||
import { CPUIcon } from "@/components/icons/icons";
|
||||
import { SlackBotCreationForm } from "../SlackBotConfigCreationForm";
|
||||
import { SourceIcon } from "@/components/SourceIcon";
|
||||
import { SlackChannelConfigCreationForm } from "../SlackChannelConfigCreationForm";
|
||||
import { fetchSS } from "@/lib/utilsSS";
|
||||
import { ErrorCallout } from "@/components/ErrorCallout";
|
||||
import { DocumentSet, SlackBotConfig } from "@/lib/types";
|
||||
import Text from "@/components/ui/text";
|
||||
import { DocumentSet, SlackChannelConfig } from "@/lib/types";
|
||||
import { BackButton } from "@/components/BackButton";
|
||||
import { InstantSSRAutoRefresh } from "@/components/SSRAutoRefresh";
|
||||
import {
|
||||
@@ -13,16 +12,18 @@ import {
|
||||
} from "@/lib/assistants/fetchAssistantsSS";
|
||||
import { getStandardAnswerCategoriesIfEE } from "@/components/standardAnswers/getStandardAnswerCategoriesIfEE";
|
||||
|
||||
async function Page(props: { params: Promise<{ id: string }> }) {
|
||||
async function EditslackChannelConfigPage(props: {
|
||||
params: Promise<{ id: number }>;
|
||||
}) {
|
||||
const params = await props.params;
|
||||
const tasks = [
|
||||
fetchSS("/manage/admin/slack-bot/config"),
|
||||
fetchSS("/manage/admin/slack-app/channel"),
|
||||
fetchSS("/manage/document-set"),
|
||||
fetchAssistantsSS(),
|
||||
];
|
||||
|
||||
const [
|
||||
slackBotsResponse,
|
||||
slackChannelsResponse,
|
||||
documentSetsResponse,
|
||||
[assistants, assistantsFetchError],
|
||||
] = (await Promise.all(tasks)) as [
|
||||
@@ -34,24 +35,26 @@ async function Page(props: { params: Promise<{ id: string }> }) {
|
||||
const eeStandardAnswerCategoryResponse =
|
||||
await getStandardAnswerCategoriesIfEE();
|
||||
|
||||
if (!slackBotsResponse.ok) {
|
||||
if (!slackChannelsResponse.ok) {
|
||||
return (
|
||||
<ErrorCallout
|
||||
errorTitle="Something went wrong :("
|
||||
errorMsg={`Failed to fetch slack bots - ${await slackBotsResponse.text()}`}
|
||||
errorMsg={`Failed to fetch Slack Channels - ${await slackChannelsResponse.text()}`}
|
||||
/>
|
||||
);
|
||||
}
|
||||
const allSlackBotConfigs =
|
||||
(await slackBotsResponse.json()) as SlackBotConfig[];
|
||||
const slackBotConfig = allSlackBotConfigs.find(
|
||||
(config) => config.id.toString() === params.id
|
||||
const allslackChannelConfigs =
|
||||
(await slackChannelsResponse.json()) as SlackChannelConfig[];
|
||||
|
||||
const slackChannelConfig = allslackChannelConfigs.find(
|
||||
(config) => config.id === Number(params.id)
|
||||
);
|
||||
if (!slackBotConfig) {
|
||||
|
||||
if (!slackChannelConfig) {
|
||||
return (
|
||||
<ErrorCallout
|
||||
errorTitle="Something went wrong :("
|
||||
errorMsg={`Did not find Slack Bot config with ID: ${params.id}`}
|
||||
errorMsg={`Did not find Slack Channel config with ID: ${params.id}`}
|
||||
/>
|
||||
);
|
||||
}
|
||||
@@ -81,23 +84,19 @@ async function Page(props: { params: Promise<{ id: string }> }) {
|
||||
|
||||
<BackButton />
|
||||
<AdminPageTitle
|
||||
icon={<CPUIcon size={32} />}
|
||||
title="Edit Slack Bot Config"
|
||||
icon={<SourceIcon sourceType={"slack"} iconSize={32} />}
|
||||
title="Edit Slack Channel Config"
|
||||
/>
|
||||
|
||||
<Text className="mb-8">
|
||||
Edit the existing configuration below! This config will determine how
|
||||
DanswerBot behaves in the specified channels.
|
||||
</Text>
|
||||
|
||||
<SlackBotCreationForm
|
||||
<SlackChannelConfigCreationForm
|
||||
slack_bot_id={slackChannelConfig.slack_bot_id}
|
||||
documentSets={documentSets}
|
||||
personas={assistants}
|
||||
standardAnswerCategoryResponse={eeStandardAnswerCategoryResponse}
|
||||
existingSlackBotConfig={slackBotConfig}
|
||||
existingSlackChannelConfig={slackChannelConfig}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default Page;
|
||||
export default EditslackChannelConfigPage;
|
||||
76
web/src/app/admin/bots/[bot-id]/channels/new/page.tsx
Normal file
76
web/src/app/admin/bots/[bot-id]/channels/new/page.tsx
Normal file
@@ -0,0 +1,76 @@
|
||||
import { AdminPageTitle } from "@/components/admin/Title";
|
||||
import { SlackChannelConfigCreationForm } from "../SlackChannelConfigCreationForm";
|
||||
import { fetchSS } from "@/lib/utilsSS";
|
||||
import { ErrorCallout } from "@/components/ErrorCallout";
|
||||
import { DocumentSet } from "@/lib/types";
|
||||
import { BackButton } from "@/components/BackButton";
|
||||
import { fetchAssistantsSS } from "@/lib/assistants/fetchAssistantsSS";
|
||||
import {
|
||||
getStandardAnswerCategoriesIfEE,
|
||||
StandardAnswerCategoryResponse,
|
||||
} from "@/components/standardAnswers/getStandardAnswerCategoriesIfEE";
|
||||
import { redirect } from "next/navigation";
|
||||
import { Persona } from "../../../../assistants/interfaces";
|
||||
import { SourceIcon } from "@/components/SourceIcon";
|
||||
|
||||
async function NewChannelConfigPage(props: {
|
||||
params: Promise<{ "bot-id": string }>;
|
||||
}) {
|
||||
const unwrappedParams = await props.params;
|
||||
const slack_bot_id_raw = unwrappedParams?.["bot-id"] || null;
|
||||
const slack_bot_id = slack_bot_id_raw
|
||||
? parseInt(slack_bot_id_raw as string, 10)
|
||||
: null;
|
||||
if (!slack_bot_id || isNaN(slack_bot_id)) {
|
||||
redirect("/admin/bots");
|
||||
return null;
|
||||
}
|
||||
|
||||
const [
|
||||
documentSetsResponse,
|
||||
assistantsResponse,
|
||||
standardAnswerCategoryResponse,
|
||||
] = await Promise.all([
|
||||
fetchSS("/manage/document-set") as Promise<Response>,
|
||||
fetchAssistantsSS() as Promise<[Persona[], string | null]>,
|
||||
getStandardAnswerCategoriesIfEE() as Promise<StandardAnswerCategoryResponse>,
|
||||
]);
|
||||
|
||||
if (!documentSetsResponse.ok) {
|
||||
return (
|
||||
<ErrorCallout
|
||||
errorTitle="Something went wrong :("
|
||||
errorMsg={`Failed to fetch document sets - ${await documentSetsResponse.text()}`}
|
||||
/>
|
||||
);
|
||||
}
|
||||
const documentSets = (await documentSetsResponse.json()) as DocumentSet[];
|
||||
|
||||
if (assistantsResponse[1]) {
|
||||
return (
|
||||
<ErrorCallout
|
||||
errorTitle="Something went wrong :("
|
||||
errorMsg={`Failed to fetch assistants - ${assistantsResponse[1]}`}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="container mx-auto">
|
||||
<BackButton />
|
||||
<AdminPageTitle
|
||||
icon={<SourceIcon iconSize={32} sourceType={"slack"} />}
|
||||
title="Configure DanswerBot for Slack Channel"
|
||||
/>
|
||||
|
||||
<SlackChannelConfigCreationForm
|
||||
slack_bot_id={slack_bot_id}
|
||||
documentSets={documentSets}
|
||||
personas={assistantsResponse[0]}
|
||||
standardAnswerCategoryResponse={standardAnswerCategoryResponse}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default NewChannelConfigPage;
|
||||
43
web/src/app/admin/bots/[bot-id]/hooks.ts
Normal file
43
web/src/app/admin/bots/[bot-id]/hooks.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { errorHandlingFetcher } from "@/lib/fetcher";
|
||||
import { SlackBot, SlackChannelConfig } from "@/lib/types";
|
||||
import useSWR, { mutate } from "swr";
|
||||
|
||||
export const useSlackChannelConfigs = () => {
|
||||
const url = "/api/manage/admin/slack-app/channel";
|
||||
const swrResponse = useSWR<SlackChannelConfig[]>(url, errorHandlingFetcher);
|
||||
|
||||
return {
|
||||
...swrResponse,
|
||||
refreshSlackChannelConfigs: () => mutate(url),
|
||||
};
|
||||
};
|
||||
|
||||
export const useSlackBots = () => {
|
||||
const url = "/api/manage/admin/slack-app/bots";
|
||||
const swrResponse = useSWR<SlackBot[]>(url, errorHandlingFetcher);
|
||||
|
||||
return {
|
||||
...swrResponse,
|
||||
refreshSlackBots: () => mutate(url),
|
||||
};
|
||||
};
|
||||
|
||||
export const useSlackBot = (botId: number) => {
|
||||
const url = `/api/manage/admin/slack-app/bots/${botId}`;
|
||||
const swrResponse = useSWR<SlackBot>(url, errorHandlingFetcher);
|
||||
|
||||
return {
|
||||
...swrResponse,
|
||||
refreshSlackBot: () => mutate(url),
|
||||
};
|
||||
};
|
||||
|
||||
export const useSlackChannelConfigsByBot = (botId: number) => {
|
||||
const url = `/api/manage/admin/slack-app/bots/${botId}/config`;
|
||||
const swrResponse = useSWR<SlackChannelConfig[]>(url, errorHandlingFetcher);
|
||||
|
||||
return {
|
||||
...swrResponse,
|
||||
refreshSlackChannelConfigs: () => mutate(url),
|
||||
};
|
||||
};
|
||||
@@ -3,13 +3,14 @@ import {
|
||||
SlackBotResponseType,
|
||||
SlackBotTokens,
|
||||
} from "@/lib/types";
|
||||
import { Persona } from "../assistants/interfaces";
|
||||
import { Persona } from "@/app/admin/assistants/interfaces";
|
||||
|
||||
interface SlackBotConfigCreationRequest {
|
||||
interface SlackChannelConfigCreationRequest {
|
||||
slack_bot_id: number;
|
||||
document_sets: number[];
|
||||
persona_id: number | null;
|
||||
enable_auto_filters: boolean;
|
||||
channel_names: string[];
|
||||
channel_name: string;
|
||||
answer_validity_check_enabled: boolean;
|
||||
questionmark_prefilter_enabled: boolean;
|
||||
respond_tag_only: boolean;
|
||||
@@ -22,7 +23,7 @@ interface SlackBotConfigCreationRequest {
|
||||
}
|
||||
|
||||
const buildFiltersFromCreationRequest = (
|
||||
creationRequest: SlackBotConfigCreationRequest
|
||||
creationRequest: SlackChannelConfigCreationRequest
|
||||
): string[] => {
|
||||
const answerFilters = [] as string[];
|
||||
if (creationRequest.answer_validity_check_enabled) {
|
||||
@@ -35,10 +36,11 @@ const buildFiltersFromCreationRequest = (
|
||||
};
|
||||
|
||||
const buildRequestBodyFromCreationRequest = (
|
||||
creationRequest: SlackBotConfigCreationRequest
|
||||
creationRequest: SlackChannelConfigCreationRequest
|
||||
) => {
|
||||
return JSON.stringify({
|
||||
channel_names: creationRequest.channel_names,
|
||||
slack_bot_id: creationRequest.slack_bot_id,
|
||||
channel_name: creationRequest.channel_name,
|
||||
respond_tag_only: creationRequest.respond_tag_only,
|
||||
respond_to_bots: creationRequest.respond_to_bots,
|
||||
enable_auto_filters: creationRequest.enable_auto_filters,
|
||||
@@ -53,10 +55,10 @@ const buildRequestBodyFromCreationRequest = (
|
||||
});
|
||||
};
|
||||
|
||||
export const createSlackBotConfig = async (
|
||||
creationRequest: SlackBotConfigCreationRequest
|
||||
export const createSlackChannelConfig = async (
|
||||
creationRequest: SlackChannelConfigCreationRequest
|
||||
) => {
|
||||
return fetch("/api/manage/admin/slack-bot/config", {
|
||||
return fetch("/api/manage/admin/slack-app/channel", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
@@ -65,11 +67,11 @@ export const createSlackBotConfig = async (
|
||||
});
|
||||
};
|
||||
|
||||
export const updateSlackBotConfig = async (
|
||||
export const updateSlackChannelConfig = async (
|
||||
id: number,
|
||||
creationRequest: SlackBotConfigCreationRequest
|
||||
creationRequest: SlackChannelConfigCreationRequest
|
||||
) => {
|
||||
return fetch(`/api/manage/admin/slack-bot/config/${id}`, {
|
||||
return fetch(`/api/manage/admin/slack-app/channel/${id}`, {
|
||||
method: "PATCH",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
@@ -78,8 +80,8 @@ export const updateSlackBotConfig = async (
|
||||
});
|
||||
};
|
||||
|
||||
export const deleteSlackBotConfig = async (id: number) => {
|
||||
return fetch(`/api/manage/admin/slack-bot/config/${id}`, {
|
||||
export const deleteSlackChannelConfig = async (id: number) => {
|
||||
return fetch(`/api/manage/admin/slack-app/channel/${id}`, {
|
||||
method: "DELETE",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
@@ -87,16 +89,6 @@ export const deleteSlackBotConfig = async (id: number) => {
|
||||
});
|
||||
};
|
||||
|
||||
export const setSlackBotTokens = async (slackBotTokens: SlackBotTokens) => {
|
||||
return fetch(`/api/manage/admin/slack-bot/tokens`, {
|
||||
method: "PUT",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify(slackBotTokens),
|
||||
});
|
||||
};
|
||||
|
||||
export function isPersonaASlackBotPersona(persona: Persona) {
|
||||
return persona.name.startsWith("__slack_bot_persona__");
|
||||
}
|
||||
118
web/src/app/admin/bots/[bot-id]/page.tsx
Normal file
118
web/src/app/admin/bots/[bot-id]/page.tsx
Normal file
@@ -0,0 +1,118 @@
|
||||
"use client";
|
||||
|
||||
import { use } from "react";
|
||||
import { BackButton } from "@/components/BackButton";
|
||||
import { ErrorCallout } from "@/components/ErrorCallout";
|
||||
import { ThreeDotsLoader } from "@/components/Loading";
|
||||
import { InstantSSRAutoRefresh } from "@/components/SSRAutoRefresh";
|
||||
import { usePopup } from "@/components/admin/connectors/Popup";
|
||||
import Link from "next/link";
|
||||
import { SlackChannelConfigsTable } from "./SlackChannelConfigsTable";
|
||||
import { useSlackBot, useSlackChannelConfigsByBot } from "./hooks";
|
||||
import { ExistingSlackBotForm } from "../SlackBotUpdateForm";
|
||||
import { FiPlusSquare } from "react-icons/fi";
|
||||
import { Separator } from "@/components/ui/separator";
|
||||
|
||||
function SlackBotEditPage({
|
||||
params,
|
||||
}: {
|
||||
params: Promise<{ "bot-id": string }>;
|
||||
}) {
|
||||
// Unwrap the params promise
|
||||
const unwrappedParams = use(params);
|
||||
const { popup, setPopup } = usePopup();
|
||||
|
||||
console.log("unwrappedParams", unwrappedParams);
|
||||
const {
|
||||
data: slackBot,
|
||||
isLoading: isSlackBotLoading,
|
||||
error: slackBotError,
|
||||
refreshSlackBot,
|
||||
} = useSlackBot(Number(unwrappedParams["bot-id"]));
|
||||
|
||||
const {
|
||||
data: slackChannelConfigs,
|
||||
isLoading: isSlackChannelConfigsLoading,
|
||||
error: slackChannelConfigsError,
|
||||
refreshSlackChannelConfigs,
|
||||
} = useSlackChannelConfigsByBot(Number(unwrappedParams["bot-id"]));
|
||||
|
||||
if (isSlackBotLoading || isSlackChannelConfigsLoading) {
|
||||
return <ThreeDotsLoader />;
|
||||
}
|
||||
|
||||
if (slackBotError || !slackBot) {
|
||||
const errorMsg =
|
||||
slackBotError?.info?.message ||
|
||||
slackBotError?.info?.detail ||
|
||||
"An unknown error occurred";
|
||||
return (
|
||||
<ErrorCallout
|
||||
errorTitle="Something went wrong :("
|
||||
errorMsg={`Failed to fetch Slack Bot ${unwrappedParams["bot-id"]}: ${errorMsg}`}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
if (slackChannelConfigsError || !slackChannelConfigs) {
|
||||
const errorMsg =
|
||||
slackChannelConfigsError?.info?.message ||
|
||||
slackChannelConfigsError?.info?.detail ||
|
||||
"An unknown error occurred";
|
||||
return (
|
||||
<ErrorCallout
|
||||
errorTitle="Something went wrong :("
|
||||
errorMsg={`Failed to fetch Slack Bot ${unwrappedParams["bot-id"]}: ${errorMsg}`}
|
||||
/>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="container mx-auto">
|
||||
<InstantSSRAutoRefresh />
|
||||
|
||||
<BackButton routerOverride="/admin/bots" />
|
||||
|
||||
<ExistingSlackBotForm
|
||||
existingSlackBot={slackBot}
|
||||
refreshSlackBot={refreshSlackBot}
|
||||
/>
|
||||
<Separator />
|
||||
|
||||
<div className="my-8" />
|
||||
|
||||
<Link
|
||||
className="
|
||||
flex
|
||||
py-2
|
||||
px-4
|
||||
mt-2
|
||||
border
|
||||
border-border
|
||||
h-fit
|
||||
cursor-pointer
|
||||
hover:bg-hover
|
||||
text-sm
|
||||
w-80
|
||||
"
|
||||
href={`/admin/bots/new?slack_bot_id=${unwrappedParams["bot-id"]}`}
|
||||
>
|
||||
<div className="mx-auto flex">
|
||||
<FiPlusSquare className="my-auto mr-2" />
|
||||
New Slack Channel Configuration
|
||||
</div>
|
||||
</Link>
|
||||
|
||||
<div className="mt-8">
|
||||
<SlackChannelConfigsTable
|
||||
slackBotId={slackBot.id}
|
||||
slackChannelConfigs={slackChannelConfigs}
|
||||
refresh={refreshSlackChannelConfigs}
|
||||
setPopup={setPopup}
|
||||
/>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default SlackBotEditPage;
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user