Compare commits

..

23 Commits

Author SHA1 Message Date
pablodanswer
08b26c3227 update folder logic 2024-12-14 17:00:22 -08:00
pablodanswer
2cc72255d2 cloud settings -> billing 2024-12-14 17:00:22 -08:00
pablonyx
0c3dab8e8d Make doc count query more efficient (#3461) 2024-12-14 16:26:36 -08:00
Yuhong Sun
47735e2044 Rebrand Seeding Docs (#3467) 2024-12-14 16:08:13 -08:00
pablonyx
1eeab8c773 Update gmail test configuration
Update gmail test configuration
2024-12-14 14:53:45 -08:00
pablodanswer
e9b41bddc9 gmail configuration update 2024-12-14 14:53:02 -08:00
Yuhong Sun
73a86b9019 Reenable Seeding (#3464) 2024-12-14 12:26:08 -08:00
rkuo-danswer
12c426c87b Merge pull request #3458 from onyx-dot-app/bugfix/connector_tests
test changing back emails
2024-12-13 20:30:55 -08:00
Richard Kuo
06aeab6d59 fix scope typo 2024-12-13 20:21:10 -08:00
Richard Kuo
9b7e67004c Revert "test changing back emails"
This reverts commit 626ce74aa3.
2024-12-13 20:20:54 -08:00
Richard Kuo
626ce74aa3 test changing back emails 2024-12-13 18:18:01 -08:00
pablonyx
cec63465eb Improved invited users
Improved invited users
2024-12-13 17:22:32 -08:00
pablodanswer
5f4b31d322 k 2024-12-13 17:21:54 -08:00
pablonyx
ab5e515a5a Organize frontend tests
Organize frontend tests
2024-12-13 14:58:43 -08:00
pablodanswer
699a02902a nit 2024-12-13 12:50:02 -08:00
pablodanswer
c85157f734 k 2024-12-13 12:48:50 -08:00
pablodanswer
824844bf84 post rebase fix 2024-12-13 12:08:03 -08:00
pablodanswer
a6ab8a8da4 organize fe tests 2024-12-13 12:06:26 -08:00
pablodanswer
40719eb542 github workflow reference updates 2024-12-13 11:50:46 -08:00
pablonyx
e8c72f9e82 Minor Docker Reference Updates
Minor Docker Reference Updates
2024-12-13 11:50:21 -08:00
pablodanswer
0ba77963c4 update nit references 2024-12-13 11:49:27 -08:00
pablonyx
86f2892349 Merge pull request #3439 from onyx-dot-app/goodbye_danswer
Introducing Onyx!
2024-12-13 11:43:00 -08:00
pablonyx
a01a9b9a99 nit (#3441) 2024-12-13 18:04:46 +00:00
82 changed files with 11301 additions and 4321 deletions

View File

@@ -6,7 +6,7 @@ on:
- "*"
env:
REGISTRY_IMAGE: ${{ contains(github.ref_name, 'cloud') && 'danswer/danswer-backend-cloud' || 'danswer/danswer-backend' }}
REGISTRY_IMAGE: ${{ contains(github.ref_name, 'cloud') && 'onyxdotapp/onyx-backend-cloud' || 'onyxdotapp/onyx-backend' }}
LATEST_TAG: ${{ contains(github.ref_name, 'latest') }}
jobs:
@@ -44,7 +44,7 @@ jobs:
${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}
${{ env.LATEST_TAG == 'true' && format('{0}:latest', env.REGISTRY_IMAGE) || '' }}
build-args: |
DANSWER_VERSION=${{ github.ref_name }}
ONYX_VERSION=${{ github.ref_name }}
# trivy has their own rate limiting issues causing this action to flake
# we worked around it by hardcoding to different db repos in env
@@ -57,7 +57,7 @@ jobs:
TRIVY_DB_REPOSITORY: "public.ecr.aws/aquasecurity/trivy-db:2"
TRIVY_JAVA_DB_REPOSITORY: "public.ecr.aws/aquasecurity/trivy-java-db:1"
with:
# To run locally: trivy image --severity HIGH,CRITICAL danswer/danswer-backend
# To run locally: trivy image --severity HIGH,CRITICAL onyxdotapp/onyx-backend
image-ref: docker.io/${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}
severity: "CRITICAL,HIGH"
trivyignores: ./backend/.trivyignore

View File

@@ -7,7 +7,7 @@ on:
- "*"
env:
REGISTRY_IMAGE: danswer/danswer-web-server-cloud
REGISTRY_IMAGE: onyxdotapp/onyx-web-server-cloud
LATEST_TAG: ${{ contains(github.ref_name, 'latest') }}
jobs:
@@ -60,7 +60,7 @@ jobs:
platforms: ${{ matrix.platform }}
push: true
build-args: |
DANSWER_VERSION=${{ github.ref_name }}
ONYX_VERSION=${{ github.ref_name }}
NEXT_PUBLIC_CLOUD_ENABLED=true
NEXT_PUBLIC_POSTHOG_KEY=${{ secrets.POSTHOG_KEY }}
NEXT_PUBLIC_POSTHOG_HOST=${{ secrets.POSTHOG_HOST }}

View File

@@ -6,7 +6,7 @@ on:
- "*"
env:
REGISTRY_IMAGE: ${{ contains(github.ref_name, 'cloud') && 'danswer/danswer-model-server-cloud' || 'danswer/danswer-model-server' }}
REGISTRY_IMAGE: ${{ contains(github.ref_name, 'cloud') && 'onyxdotapp/onyx-model-server-cloud' || 'onyxdotapp/onyx-model-server' }}
LATEST_TAG: ${{ contains(github.ref_name, 'latest') }}
jobs:
@@ -38,7 +38,7 @@ jobs:
${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}
${{ env.LATEST_TAG == 'true' && format('{0}:latest', env.REGISTRY_IMAGE) || '' }}
build-args: |
DANSWER_VERSION=${{ github.ref_name }}
ONYX_VERSION=${{ github.ref_name }}
# trivy has their own rate limiting issues causing this action to flake
# we worked around it by hardcoding to different db repos in env
@@ -51,5 +51,5 @@ jobs:
TRIVY_DB_REPOSITORY: "public.ecr.aws/aquasecurity/trivy-db:2"
TRIVY_JAVA_DB_REPOSITORY: "public.ecr.aws/aquasecurity/trivy-java-db:1"
with:
image-ref: docker.io/danswer/danswer-model-server:${{ github.ref_name }}
image-ref: docker.io/onyxdotapp/onyx-model-server:${{ github.ref_name }}
severity: "CRITICAL,HIGH"

View File

@@ -3,12 +3,12 @@ name: Build and Push Web Image on Tag
on:
push:
tags:
- '*'
- "*"
env:
REGISTRY_IMAGE: danswer/danswer-web-server
REGISTRY_IMAGE: onyxdotapp/onyx-web-server
LATEST_TAG: ${{ contains(github.ref_name, 'latest') }}
jobs:
build:
runs-on:
@@ -27,11 +27,11 @@ jobs:
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Checkout
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
@@ -40,16 +40,16 @@ jobs:
tags: |
type=raw,value=${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}
type=raw,value=${{ env.LATEST_TAG == 'true' && format('{0}:latest', env.REGISTRY_IMAGE) || '' }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push by digest
id: build
uses: docker/build-push-action@v5
@@ -59,18 +59,18 @@ jobs:
platforms: ${{ matrix.platform }}
push: true
build-args: |
DANSWER_VERSION=${{ github.ref_name }}
# needed due to weird interactions with the builds for different platforms
ONYX_VERSION=${{ github.ref_name }}
# needed due to weird interactions with the builds for different platforms
no-cache: true
labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true
- name: Export digest
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v4
with:
@@ -90,42 +90,42 @@ jobs:
path: /tmp/digests
pattern: digests-*
merge-multiple: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }}
# trivy has their own rate limiting issues causing this action to flake
# we worked around it by hardcoding to different db repos in env
# can re-enable when they figure it out
# https://github.com/aquasecurity/trivy/discussions/7538
# https://github.com/aquasecurity/trivy-action/issues/389
# trivy has their own rate limiting issues causing this action to flake
# we worked around it by hardcoding to different db repos in env
# can re-enable when they figure it out
# https://github.com/aquasecurity/trivy/discussions/7538
# https://github.com/aquasecurity/trivy-action/issues/389
- name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@master
env:
TRIVY_DB_REPOSITORY: 'public.ecr.aws/aquasecurity/trivy-db:2'
TRIVY_JAVA_DB_REPOSITORY: 'public.ecr.aws/aquasecurity/trivy-java-db:1'
TRIVY_DB_REPOSITORY: "public.ecr.aws/aquasecurity/trivy-db:2"
TRIVY_JAVA_DB_REPOSITORY: "public.ecr.aws/aquasecurity/trivy-java-db:1"
with:
image-ref: docker.io/${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}
severity: 'CRITICAL,HIGH'
severity: "CRITICAL,HIGH"

View File

@@ -7,31 +7,31 @@ on:
workflow_dispatch:
inputs:
version:
description: 'The version (ie v0.0.1) to tag as latest'
description: "The version (ie v0.0.1) to tag as latest"
required: true
jobs:
tag:
# See https://runs-on.com/runners/linux/
# use a lower powered instance since this just does i/o to docker hub
runs-on: [runs-on,runner=2cpu-linux-x64,"run-id=${{ github.run_id }}"]
runs-on: [runs-on, runner=2cpu-linux-x64, "run-id=${{ github.run_id }}"]
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Login to Docker Hub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Enable Docker CLI experimental features
run: echo "DOCKER_CLI_EXPERIMENTAL=enabled" >> $GITHUB_ENV
- name: Enable Docker CLI experimental features
run: echo "DOCKER_CLI_EXPERIMENTAL=enabled" >> $GITHUB_ENV
- name: Pull, Tag and Push Web Server Image
run: |
docker buildx imagetools create -t danswer/danswer-web-server:latest danswer/danswer-web-server:${{ github.event.inputs.version }}
- name: Pull, Tag and Push Web Server Image
run: |
docker buildx imagetools create -t onyxdotapp/onyx-web-server:latest onyxdotapp/onyx-web-server:${{ github.event.inputs.version }}
- name: Pull, Tag and Push API Server Image
run: |
docker buildx imagetools create -t danswer/danswer-backend:latest danswer/danswer-backend:${{ github.event.inputs.version }}
- name: Pull, Tag and Push API Server Image
run: |
docker buildx imagetools create -t onyxdotapp/onyx-backend:latest onyxdotapp/onyx-backend:${{ github.event.inputs.version }}

View File

@@ -8,43 +8,42 @@ on:
workflow_dispatch:
inputs:
hotfix_commit:
description: 'Hotfix commit hash'
description: "Hotfix commit hash"
required: true
hotfix_suffix:
description: 'Hotfix branch suffix (e.g. hotfix/v0.8-{suffix})'
description: "Hotfix branch suffix (e.g. hotfix/v0.8-{suffix})"
required: true
release_branch_pattern:
description: 'Release branch pattern (regex)'
description: "Release branch pattern (regex)"
required: true
default: 'release/.*'
default: "release/.*"
auto_merge:
description: 'Automatically merge the hotfix PRs'
description: "Automatically merge the hotfix PRs"
required: true
type: choice
default: 'true'
default: "true"
options:
- true
- false
jobs:
hotfix_release_branches:
permissions: write-all
# See https://runs-on.com/runners/linux/
# use a lower powered instance since this just does i/o to docker hub
runs-on: [runs-on,runner=2cpu-linux-x64,"run-id=${{ github.run_id }}"]
runs-on: [runs-on, runner=2cpu-linux-x64, "run-id=${{ github.run_id }}"]
steps:
# needs RKUO_DEPLOY_KEY for write access to merge PR's
- name: Checkout Repository
uses: actions/checkout@v4
with:
ssh-key: "${{ secrets.RKUO_DEPLOY_KEY }}"
fetch-depth: 0
- name: Set up Git user
run: |
git config user.name "Richard Kuo [bot]"
git config user.email "rkuo[bot]@danswer.ai"
git config user.email "rkuo[bot]@onyx.app"
- name: Fetch All Branches
run: |
@@ -62,10 +61,10 @@ jobs:
echo "No release branches found matching pattern '${{ github.event.inputs.release_branch_pattern }}'."
exit 1
fi
echo "Found release branches:"
echo "$BRANCHES"
# Join the branches into a single line separated by commas
BRANCHES_JOINED=$(echo "$BRANCHES" | tr '\n' ',' | sed 's/,$//')
@@ -169,4 +168,4 @@ jobs:
echo "Failed to merge pull request #$PR_NUMBER."
fi
fi
done
done

View File

@@ -4,7 +4,7 @@ name: Backport on Merge
on:
pull_request:
types: [closed] # Later we check for merge so only PRs that go in can get backported
types: [closed] # Later we check for merge so only PRs that go in can get backported
permissions:
contents: write
@@ -26,9 +26,9 @@ jobs:
- name: Set up Git user
run: |
git config user.name "Richard Kuo [bot]"
git config user.email "rkuo[bot]@danswer.ai"
git config user.email "rkuo[bot]@onyx.app"
git fetch --prune
- name: Check for Backport Checkbox
id: checkbox-check
run: |
@@ -51,14 +51,14 @@ jobs:
# Fetch latest tags for beta and stable
LATEST_BETA_TAG=$(git tag -l "v[0-9]*.[0-9]*.[0-9]*-beta.[0-9]*" | grep -E "^v[0-9]+\.[0-9]+\.[0-9]+-beta\.[0-9]+$" | grep -v -- "-cloud" | sort -Vr | head -n 1)
LATEST_STABLE_TAG=$(git tag -l "v[0-9]*.[0-9]*.[0-9]*" | grep -E "^v[0-9]+\.[0-9]+\.[0-9]+$" | sort -Vr | head -n 1)
# Handle case where no beta tags exist
if [[ -z "$LATEST_BETA_TAG" ]]; then
NEW_BETA_TAG="v1.0.0-beta.1"
else
NEW_BETA_TAG=$(echo $LATEST_BETA_TAG | awk -F '[.-]' '{print $1 "." $2 "." $3 "-beta." ($NF+1)}')
fi
# Increment latest stable tag
NEW_STABLE_TAG=$(echo $LATEST_STABLE_TAG | awk -F '.' '{print $1 "." $2 "." ($3+1)}')
echo "latest_beta_tag=$LATEST_BETA_TAG" >> $GITHUB_OUTPUT
@@ -80,10 +80,10 @@ jobs:
run: |
set -e
echo "Backporting to beta ${{ steps.list-branches.outputs.beta }} and stable ${{ steps.list-branches.outputs.stable }}"
# Echo the merge commit SHA
echo "Merge commit SHA: ${{ github.event.pull_request.merge_commit_sha }}"
# Fetch all history for all branches and tags
git fetch --prune
@@ -98,7 +98,7 @@ jobs:
echo "Cherry-pick to beta failed due to conflicts."
exit 1
}
# Create new beta branch/tag
git tag ${{ steps.list-branches.outputs.new_beta_tag }}
# Push the changes and tag to the beta branch using PAT
@@ -110,13 +110,13 @@ jobs:
echo "Last 5 commits on stable branch:"
git log -n 5 --pretty=format:"%H"
echo "" # Newline for formatting
# Cherry-pick the merge commit from the merged PR
git cherry-pick -m 1 ${{ github.event.pull_request.merge_commit_sha }} || {
echo "Cherry-pick to stable failed due to conflicts."
exit 1
}
# Create new stable branch/tag
git tag ${{ steps.list-branches.outputs.new_stable_tag }}
# Push the changes and tag to the stable branch using PAT

View File

@@ -14,18 +14,19 @@ jobs:
name: Playwright Tests
# See https://runs-on.com/runners/linux/
runs-on: [runs-on,runner=8cpu-linux-x64,ram=16,"run-id=${{ github.run_id }}"]
runs-on:
[runs-on, runner=8cpu-linux-x64, ram=16, "run-id=${{ github.run_id }}"]
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
cache: 'pip'
python-version: "3.11"
cache: "pip"
cache-dependency-path: |
backend/requirements/default.txt
backend/requirements/dev.txt
@@ -35,7 +36,7 @@ jobs:
pip install --retries 5 --timeout 30 -r backend/requirements/default.txt
pip install --retries 5 --timeout 30 -r backend/requirements/dev.txt
pip install --retries 5 --timeout 30 -r backend/requirements/model_server.txt
- name: Setup node
uses: actions/setup-node@v4
with:
@@ -48,7 +49,7 @@ jobs:
- name: Install playwright browsers
working-directory: ./web
run: npx playwright install --with-deps
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
@@ -60,13 +61,13 @@ jobs:
# tag every docker image with "test" so that we can spin up the correct set
# of images during testing
# we use the runs-on cache for docker builds
# in conjunction with runs-on runners, it has better speed and unlimited caching
# https://runs-on.com/caching/s3-cache-for-github-actions/
# https://runs-on.com/caching/docker/
# https://github.com/moby/buildkit#s3-cache-experimental
# images are built and run locally for testing purposes. Not pushed.
- name: Build Web Docker image
@@ -75,7 +76,7 @@ jobs:
context: ./web
file: ./web/Dockerfile
platforms: linux/amd64
tags: danswer/danswer-web-server:test
tags: onyxdotapp/onyx-web-server:test
push: false
load: true
cache-from: type=s3,prefix=cache/${{ github.repository }}/integration-tests/web-server/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
@@ -87,7 +88,7 @@ jobs:
context: ./backend
file: ./backend/Dockerfile
platforms: linux/amd64
tags: danswer/danswer-backend:test
tags: onyxdotapp/onyx-backend:test
push: false
load: true
cache-from: type=s3,prefix=cache/${{ github.repository }}/integration-tests/backend/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
@@ -99,7 +100,7 @@ jobs:
context: ./backend
file: ./backend/Dockerfile.model_server
platforms: linux/amd64
tags: danswer/danswer-model-server:test
tags: onyxdotapp/onyx-model-server:test
push: false
load: true
cache-from: type=s3,prefix=cache/${{ github.repository }}/integration-tests/model-server/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
@@ -110,6 +111,7 @@ jobs:
cd deployment/docker_compose
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true \
AUTH_TYPE=basic \
GEN_AI_API_KEY=${{ secrets.OPENAI_API_KEY }} \
REQUIRE_EMAIL_VERIFICATION=false \
DISABLE_TELEMETRY=true \
IMAGE_TAG=test \
@@ -119,12 +121,12 @@ jobs:
- name: Wait for service to be ready
run: |
echo "Starting wait-for-service script..."
docker logs -f danswer-stack-api_server-1 &
start_time=$(date +%s)
timeout=300 # 5 minutes in seconds
while true; do
current_time=$(date +%s)
elapsed_time=$((current_time - start_time))
@@ -152,7 +154,7 @@ jobs:
- name: Run pytest playwright test init
working-directory: ./backend
env:
env:
PYTEST_IGNORE_SKIP: true
run: pytest -s tests/integration/tests/playwright/test_playwright.py
@@ -168,7 +170,7 @@ jobs:
name: test-results
path: ./web/test-results
retention-days: 30
# save before stopping the containers so the logs can be captured
- name: Save Docker logs
if: success() || failure()
@@ -176,7 +178,7 @@ jobs:
cd deployment/docker_compose
docker compose -f docker-compose.dev.yml -p danswer-stack logs > docker-compose.log
mv docker-compose.log ${{ github.workspace }}/docker-compose.log
- name: Upload logs
if: success() || failure()
uses: actions/upload-artifact@v4
@@ -191,35 +193,36 @@ jobs:
chromatic-tests:
name: Chromatic Tests
needs: playwright-tests
runs-on: [runs-on,runner=8cpu-linux-x64,ram=16,"run-id=${{ github.run_id }}"]
runs-on:
[runs-on, runner=8cpu-linux-x64, ram=16, "run-id=${{ github.run_id }}"]
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Setup node
uses: actions/setup-node@v4
with:
node-version: 22
- name: Install node dependencies
working-directory: ./web
run: npm ci
- name: Download Playwright test results
uses: actions/download-artifact@v4
with:
name: test-results
path: ./web/test-results
- name: Run Chromatic
uses: chromaui/action@latest
with:
playwright: true
projectToken: ${{ secrets.CHROMATIC_PROJECT_TOKEN }}
workingDir: ./web
env:
env:
CHROMATIC_ARCHIVE_LOCATION: ./test-results

View File

@@ -8,7 +8,7 @@ on:
pull_request:
branches:
- main
- 'release/**'
- "release/**"
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
@@ -16,11 +16,12 @@ env:
CONFLUENCE_TEST_SPACE_URL: ${{ secrets.CONFLUENCE_TEST_SPACE_URL }}
CONFLUENCE_USER_NAME: ${{ secrets.CONFLUENCE_USER_NAME }}
CONFLUENCE_ACCESS_TOKEN: ${{ secrets.CONFLUENCE_ACCESS_TOKEN }}
jobs:
integration-tests:
# See https://runs-on.com/runners/linux/
runs-on: [runs-on,runner=8cpu-linux-x64,ram=16,"run-id=${{ github.run_id }}"]
runs-on:
[runs-on, runner=8cpu-linux-x64, ram=16, "run-id=${{ github.run_id }}"]
steps:
- name: Checkout code
uses: actions/checkout@v4
@@ -36,21 +37,21 @@ jobs:
# tag every docker image with "test" so that we can spin up the correct set
# of images during testing
# We don't need to build the Web Docker image since it's not yet used
# in the integration tests. We have a separate action to verify that it builds
# in the integration tests. We have a separate action to verify that it builds
# successfully.
- name: Pull Web Docker image
run: |
docker pull danswer/danswer-web-server:latest
docker tag danswer/danswer-web-server:latest danswer/danswer-web-server:test
docker pull onyxdotapp/onyx-web-server:latest
docker tag onyxdotapp/onyx-web-server:latest onyxdotapp/onyx-web-server:test
# we use the runs-on cache for docker builds
# in conjunction with runs-on runners, it has better speed and unlimited caching
# https://runs-on.com/caching/s3-cache-for-github-actions/
# https://runs-on.com/caching/docker/
# https://github.com/moby/buildkit#s3-cache-experimental
# images are built and run locally for testing purposes. Not pushed.
- name: Build Backend Docker image
uses: ./.github/actions/custom-build-and-push
@@ -58,7 +59,7 @@ jobs:
context: ./backend
file: ./backend/Dockerfile
platforms: linux/amd64
tags: danswer/danswer-backend:test
tags: onyxdotapp/onyx-backend:test
push: false
load: true
cache-from: type=s3,prefix=cache/${{ github.repository }}/integration-tests/backend/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
@@ -70,19 +71,19 @@ jobs:
context: ./backend
file: ./backend/Dockerfile.model_server
platforms: linux/amd64
tags: danswer/danswer-model-server:test
tags: onyxdotapp/onyx-model-server:test
push: false
load: true
cache-from: type=s3,prefix=cache/${{ github.repository }}/integration-tests/model-server/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/integration-tests/model-server/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
- name: Build integration test Docker image
uses: ./.github/actions/custom-build-and-push
with:
context: ./backend
file: ./backend/tests/integration/Dockerfile
platforms: linux/amd64
tags: danswer/danswer-integration:test
tags: onyxdotapp/onyx-integration:test
push: false
load: true
cache-from: type=s3,prefix=cache/${{ github.repository }}/integration-tests/integration/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
@@ -119,7 +120,7 @@ jobs:
-e TEST_WEB_HOSTNAME=test-runner \
-e AUTH_TYPE=cloud \
-e MULTI_TENANT=true \
danswer/danswer-integration:test \
onyxdotapp/onyx-integration:test \
/app/tests/integration/multitenant_tests
continue-on-error: true
id: run_multitenant_tests
@@ -131,15 +132,14 @@ jobs:
exit 1
else
echo "All integration tests passed successfully."
fi
fi
- name: Stop multi-tenant Docker containers
run: |
cd deployment/docker_compose
docker compose -f docker-compose.dev.yml -p danswer-stack down -v
- name: Start Docker containers
- name: Start Docker containers
run: |
cd deployment/docker_compose
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true \
@@ -153,12 +153,12 @@ jobs:
- name: Wait for service to be ready
run: |
echo "Starting wait-for-service script..."
docker logs -f danswer-stack-api_server-1 &
start_time=$(date +%s)
timeout=300 # 5 minutes in seconds
while true; do
current_time=$(date +%s)
elapsed_time=$((current_time - start_time))
@@ -202,7 +202,7 @@ jobs:
-e CONFLUENCE_USER_NAME=${CONFLUENCE_USER_NAME} \
-e CONFLUENCE_ACCESS_TOKEN=${CONFLUENCE_ACCESS_TOKEN} \
-e TEST_WEB_HOSTNAME=test-runner \
danswer/danswer-integration:test \
onyxdotapp/onyx-integration:test \
/app/tests/integration/tests \
/app/tests/integration/connector_job_tests
continue-on-error: true
@@ -229,7 +229,7 @@ jobs:
run: |
cd deployment/docker_compose
docker compose -f docker-compose.dev.yml -p danswer-stack down -v
- name: Upload logs
if: success() || failure()
uses: actions/upload-artifact@v4

View File

@@ -2,53 +2,52 @@ name: Nightly Tag Push
on:
schedule:
- cron: '0 10 * * *' # Runs every day at 2 AM PST / 3 AM PDT / 10 AM UTC
- cron: "0 10 * * *" # Runs every day at 2 AM PST / 3 AM PDT / 10 AM UTC
permissions:
contents: write # Allows pushing tags to the repository
contents: write # Allows pushing tags to the repository
jobs:
create-and-push-tag:
runs-on: [runs-on,runner=2cpu-linux-x64,"run-id=${{ github.run_id }}"]
runs-on: [runs-on, runner=2cpu-linux-x64, "run-id=${{ github.run_id }}"]
steps:
# actions using GITHUB_TOKEN cannot trigger another workflow, but we do want this to trigger docker pushes
# see https://github.com/orgs/community/discussions/27028#discussioncomment-3254367 for the workaround we
# implement here which needs an actual user's deploy key
- name: Checkout code
uses: actions/checkout@v4
with:
ssh-key: "${{ secrets.RKUO_DEPLOY_KEY }}"
# actions using GITHUB_TOKEN cannot trigger another workflow, but we do want this to trigger docker pushes
# see https://github.com/orgs/community/discussions/27028#discussioncomment-3254367 for the workaround we
# implement here which needs an actual user's deploy key
- name: Checkout code
uses: actions/checkout@v4
with:
ssh-key: "${{ secrets.RKUO_DEPLOY_KEY }}"
- name: Set up Git user
run: |
git config user.name "Richard Kuo [bot]"
git config user.email "rkuo[bot]@danswer.ai"
- name: Set up Git user
run: |
git config user.name "Richard Kuo [bot]"
git config user.email "rkuo[bot]@onyx.app"
- name: Check for existing nightly tag
id: check_tag
run: |
if git tag --points-at HEAD --list "nightly-latest*" | grep -q .; then
echo "A tag starting with 'nightly-latest' already exists on HEAD."
echo "tag_exists=true" >> $GITHUB_OUTPUT
else
echo "No tag starting with 'nightly-latest' exists on HEAD."
echo "tag_exists=false" >> $GITHUB_OUTPUT
fi
# don't tag again if HEAD already has a nightly-latest tag on it
- name: Create Nightly Tag
if: steps.check_tag.outputs.tag_exists == 'false'
env:
DATE: ${{ github.run_id }}
run: |
TAG_NAME="nightly-latest-$(date +'%Y%m%d')"
echo "Creating tag: $TAG_NAME"
git tag $TAG_NAME
- name: Check for existing nightly tag
id: check_tag
run: |
if git tag --points-at HEAD --list "nightly-latest*" | grep -q .; then
echo "A tag starting with 'nightly-latest' already exists on HEAD."
echo "tag_exists=true" >> $GITHUB_OUTPUT
else
echo "No tag starting with 'nightly-latest' exists on HEAD."
echo "tag_exists=false" >> $GITHUB_OUTPUT
fi
- name: Push Tag
if: steps.check_tag.outputs.tag_exists == 'false'
run: |
TAG_NAME="nightly-latest-$(date +'%Y%m%d')"
git push origin $TAG_NAME
# don't tag again if HEAD already has a nightly-latest tag on it
- name: Create Nightly Tag
if: steps.check_tag.outputs.tag_exists == 'false'
env:
DATE: ${{ github.run_id }}
run: |
TAG_NAME="nightly-latest-$(date +'%Y%m%d')"
echo "Creating tag: $TAG_NAME"
git tag $TAG_NAME
- name: Push Tag
if: steps.check_tag.outputs.tag_exists == 'false'
run: |
TAG_NAME="nightly-latest-$(date +'%Y%m%d')"
git push origin $TAG_NAME

View File

@@ -7,13 +7,13 @@ have a contract or agreement with DanswerAI, you are not permitted to use the En
Edition features outside of personal development or testing purposes. Please reach out to \
founders@onyx.app for more information. Please visit https://github.com/onyx-dot-app/onyx"
# Default DANSWER_VERSION, typically overriden during builds by GitHub Actions.
ARG DANSWER_VERSION=0.8-dev
ENV DANSWER_VERSION=${DANSWER_VERSION} \
# Default ONYX_VERSION, typically overriden during builds by GitHub Actions.
ARG ONYX_VERSION=0.8-dev
ENV ONYX_VERSION=${ONYX_VERSION} \
DANSWER_RUNNING_IN_DOCKER="true"
RUN echo "DANSWER_VERSION: ${DANSWER_VERSION}"
RUN echo "ONYX_VERSION: ${ONYX_VERSION}"
# Install system dependencies
# cmake needed for psycopg (postgres)
# libpq-dev needed for psycopg (postgres)

View File

@@ -6,13 +6,13 @@ AI models for Onyx. This container and all the code is MIT Licensed and free for
You can find it at https://hub.docker.com/r/onyx/onyx-model-server. For more details, \
visit https://github.com/onyx-dot-app/onyx."
# Default DANSWER_VERSION, typically overriden during builds by GitHub Actions.
ARG DANSWER_VERSION=0.8-dev
ENV DANSWER_VERSION=${DANSWER_VERSION} \
# Default ONYX_VERSION, typically overriden during builds by GitHub Actions.
ARG ONYX_VERSION=0.8-dev
ENV ONYX_VERSION=${ONYX_VERSION} \
DANSWER_RUNNING_IN_DOCKER="true"
RUN echo "DANSWER_VERSION: ${DANSWER_VERSION}"
RUN echo "ONYX_VERSION: ${ONYX_VERSION}"
COPY ./requirements/model_server.txt /tmp/requirements.txt
RUN pip install --no-cache-dir --upgrade \

View File

@@ -0,0 +1,32 @@
"""Add composite index to document_by_connector_credential_pair
Revision ID: dab04867cd88
Revises: 54a74a0417fc
Create Date: 2024-12-13 22:43:20.119990
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = "dab04867cd88"
down_revision = "54a74a0417fc"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Composite index on (connector_id, credential_id)
op.create_index(
"idx_document_cc_pair_connector_credential",
"document_by_connector_credential_pair",
["connector_id", "credential_id"],
unique=False,
)
def downgrade() -> None:
op.drop_index(
"idx_document_cc_pair_connector_credential",
table_name="document_by_connector_credential_pair",
)

View File

@@ -1,3 +1,3 @@
import os
__version__ = os.environ.get("DANSWER_VERSION", "") or "Development"
__version__ = os.environ.get("ONYX_VERSION", "") or "Development"

View File

@@ -12,7 +12,7 @@ GOOGLE_SCOPES = {
"https://www.googleapis.com/auth/admin.directory.user.readonly",
],
DocumentSource.GMAIL: [
"https://www.googleapis.com/auth/gmail.readonfromly",
"https://www.googleapis.com/auth/gmail.readonly",
"https://www.googleapis.com/auth/admin.directory.user.readonly",
"https://www.googleapis.com/auth/admin.directory.group.readonly",
],
@@ -44,9 +44,9 @@ USER_FIELDS = "nextPageToken, users(primaryEmail)"
MISSING_SCOPES_ERROR_STR = "client not authorized for any of the scopes requested"
# Documentation and error messages
SCOPE_DOC_URL = "https://docs.danswer.dev/connectors/google_drive/overview"
SCOPE_DOC_URL = "https://docs.onyx.app/connectors/google_drive/overview"
ONYX_SCOPE_INSTRUCTIONS = (
"You have upgraded Danswer without updating the Google Auth scopes. "
"You have upgraded Onyx without updating the Google Auth scopes. "
f"Please refer to the documentation to learn how to update the scopes: {SCOPE_DOC_URL}"
)

View File

@@ -12,6 +12,7 @@ from sqlalchemy import func
from sqlalchemy import or_
from sqlalchemy import Select
from sqlalchemy import select
from sqlalchemy import tuple_
from sqlalchemy.dialects.postgresql import insert
from sqlalchemy.engine.util import TransactionalContext
from sqlalchemy.exc import OperationalError
@@ -210,6 +211,10 @@ def get_document_counts_for_cc_pairs(
db_session: Session, cc_pair_identifiers: list[ConnectorCredentialPairIdentifier]
) -> Sequence[tuple[int, int, int]]:
"""Returns a sequence of tuples of (connector_id, credential_id, document count)"""
# Prepare a list of (connector_id, credential_id) tuples
cc_ids = [(x.connector_id, x.credential_id) for x in cc_pair_identifiers]
stmt = (
select(
DocumentByConnectorCredentialPair.connector_id,
@@ -217,17 +222,10 @@ def get_document_counts_for_cc_pairs(
func.count(),
)
.where(
or_(
*[
and_(
DocumentByConnectorCredentialPair.connector_id
== cc_pair_identifier.connector_id,
DocumentByConnectorCredentialPair.credential_id
== cc_pair_identifier.credential_id,
)
for cc_pair_identifier in cc_pair_identifiers
]
)
tuple_(
DocumentByConnectorCredentialPair.connector_id,
DocumentByConnectorCredentialPair.credential_id,
).in_(cc_ids)
)
.group_by(
DocumentByConnectorCredentialPair.connector_id,

View File

@@ -865,6 +865,15 @@ class DocumentByConnectorCredentialPair(Base):
"Credential", back_populates="documents_by_credential"
)
__table_args__ = (
Index(
"idx_document_cc_pair_connector_credential",
"connector_id",
"credential_id",
unique=False,
),
)
"""
Messages Tables

View File

@@ -239,9 +239,9 @@ def get_application() -> FastAPI:
include_router_with_global_prefix_prepended(application, chat_router)
include_router_with_global_prefix_prepended(application, query_router)
include_router_with_global_prefix_prepended(application, document_router)
include_router_with_global_prefix_prepended(application, user_router)
include_router_with_global_prefix_prepended(application, admin_query_router)
include_router_with_global_prefix_prepended(application, admin_router)
include_router_with_global_prefix_prepended(application, user_router)
include_router_with_global_prefix_prepended(application, connector_router)
include_router_with_global_prefix_prepended(application, credential_router)
include_router_with_global_prefix_prepended(application, cc_pair_router)

File diff suppressed because it is too large Load Diff

View File

@@ -206,9 +206,9 @@ def list_all_users(
invited=[InvitedUserSnapshot(email=email) for email in invited_emails][
invited_page * USERS_PAGE_SIZE : (invited_page + 1) * USERS_PAGE_SIZE
],
accepted_pages=accepted_count // USERS_PAGE_SIZE + 1,
invited_pages=invited_count // USERS_PAGE_SIZE + 1,
slack_users_pages=slack_users_count // USERS_PAGE_SIZE + 1,
accepted_pages=(accepted_count + USERS_PAGE_SIZE - 1) // USERS_PAGE_SIZE,
invited_pages=(invited_count + USERS_PAGE_SIZE - 1) // USERS_PAGE_SIZE,
slack_users_pages=(slack_users_count + USERS_PAGE_SIZE - 1) // USERS_PAGE_SIZE,
)

View File

@@ -39,6 +39,7 @@ from onyx.key_value_store.interface import KvKeyNotFoundError
from onyx.natural_language_processing.search_nlp_models import EmbeddingModel
from onyx.natural_language_processing.search_nlp_models import warm_up_bi_encoder
from onyx.natural_language_processing.search_nlp_models import warm_up_cross_encoder
from onyx.seeding.load_docs import seed_initial_documents
from onyx.seeding.load_yamls import load_chat_yamls
from onyx.server.manage.llm.models import LLMProviderUpsertRequest
from onyx.server.settings.store import load_settings
@@ -150,7 +151,7 @@ def setup_onyx(
# update multipass indexing setting based on GPU availability
update_default_multipass_indexing(db_session)
# seed_initial_documents(db_session, tenant_id, cohere_enabled)
seed_initial_documents(db_session, tenant_id, cohere_enabled)
def translate_saved_search_settings(db_session: Session) -> None:

View File

@@ -54,7 +54,7 @@ def parse_credentials(env_str: str) -> dict:
@pytest.fixture
def google_gmail_oauth_connector_factory() -> Callable[..., GmailConnector]:
def _connector_factory(
primary_admin_email: str = "admin@danswer-test.com",
primary_admin_email: str = "admin@onyx-test.com",
) -> GmailConnector:
print("Creating GmailConnector with OAuth credentials")
connector = GmailConnector()
@@ -76,7 +76,7 @@ def google_gmail_oauth_connector_factory() -> Callable[..., GmailConnector]:
@pytest.fixture
def google_gmail_service_acct_connector_factory() -> Callable[..., GmailConnector]:
def _connector_factory(
primary_admin_email: str = "admin@danswer-test.com",
primary_admin_email: str = "admin@onyx-test.com",
) -> GmailConnector:
print("Creating GmailConnector with service account credentials")
connector = GmailConnector()

View File

@@ -13,18 +13,18 @@ _THREAD_1_END_TIME = 1730569000
"""
This thread was 4 emails long:
admin@danswer-test.com -> test-group-1@onyx-test.com (conaining test_user_1 and test_user_2)
test_user_1@onyx-test.com -> admin@danswer-test.com
admin@danswer-test.com -> test_user_2@onyx-test.com + BCC: test_user_3@onyx-test.com
test_user_3@onyx-test.com -> admin@danswer-test.com
admin@onyx-test.com -> test-group-1@onyx-test.com (conaining test_user_1 and test_user_2)
test_user_1@onyx-test.com -> admin@onyx-test.com
admin@onyx-test.com -> test_user_2@onyx-test.com + BCC: test_user_3@onyx-test.com
test_user_3@onyx-test.com -> admin@onyx-test.com
"""
_THREAD_1_BY_ID: dict[str, dict[str, Any]] = {
"192edefb315737c3": {
"email": "admin@danswer-test.com",
"email": "admin@onyx-test.com",
"sections_count": 4,
"primary_owners": set(
[
"admin@danswer-test.com",
"admin@onyx-test.com",
"test_user_1@onyx-test.com",
"test_user_3@onyx-test.com",
]
@@ -32,7 +32,7 @@ _THREAD_1_BY_ID: dict[str, dict[str, Any]] = {
"secondary_owners": set(
[
"test-group-1@onyx-test.com",
"admin@danswer-test.com",
"admin@onyx-test.com",
"test_user_2@onyx-test.com",
"test_user_3@onyx-test.com",
]
@@ -41,15 +41,13 @@ _THREAD_1_BY_ID: dict[str, dict[str, Any]] = {
"192edf020d2f5def": {
"email": "test_user_1@onyx-test.com",
"sections_count": 2,
"primary_owners": set(["admin@danswer-test.com", "test_user_1@onyx-test.com"]),
"secondary_owners": set(
["test-group-1@onyx-test.com", "admin@danswer-test.com"]
),
"primary_owners": set(["admin@onyx-test.com", "test_user_1@onyx-test.com"]),
"secondary_owners": set(["test-group-1@onyx-test.com", "admin@onyx-test.com"]),
},
"192edf020ae90aab": {
"email": "test_user_2@onyx-test.com",
"sections_count": 2,
"primary_owners": set(["admin@danswer-test.com"]),
"primary_owners": set(["admin@onyx-test.com"]),
"secondary_owners": set(
["test-group-1@onyx-test.com", "test_user_2@onyx-test.com"]
),
@@ -57,10 +55,10 @@ _THREAD_1_BY_ID: dict[str, dict[str, Any]] = {
"192edf18316015fa": {
"email": "test_user_3@onyx-test.com",
"sections_count": 2,
"primary_owners": set(["admin@danswer-test.com", "test_user_3@onyx-test.com"]),
"primary_owners": set(["admin@onyx-test.com", "test_user_3@onyx-test.com"]),
"secondary_owners": set(
[
"admin@danswer-test.com",
"admin@onyx-test.com",
"test_user_2@onyx-test.com",
"test_user_3@onyx-test.com",
]

View File

@@ -28,12 +28,12 @@ load_env_vars()
_USER_TO_OAUTH_CREDENTIALS_MAP = {
"admin@danswer-test.com": "GOOGLE_DRIVE_OAUTH_CREDENTIALS_JSON_STR",
"admin@onyx-test.com": "GOOGLE_DRIVE_OAUTH_CREDENTIALS_JSON_STR",
"test_user_1@onyx-test.com": "GOOGLE_DRIVE_OAUTH_CREDENTIALS_JSON_STR_TEST_USER_1",
}
_USER_TO_SERVICE_ACCOUNT_CREDENTIALS_MAP = {
"admin@danswer-test.com": "GOOGLE_DRIVE_SERVICE_ACCOUNT_JSON_STR",
"admin@onyx-test.com": "GOOGLE_DRIVE_SERVICE_ACCOUNT_JSON_STR",
}

View File

@@ -53,7 +53,7 @@ SECTIONS_FOLDER_URL = (
"https://drive.google.com/drive/u/5/folders/1loe6XJ-pJxu9YYPv7cF3Hmz296VNzA33"
)
ADMIN_EMAIL = "admin@danswer-test.com"
ADMIN_EMAIL = "admin@onyx-test.com"
TEST_USER_1_EMAIL = "test_user_1@onyx-test.com"
TEST_USER_2_EMAIL = "test_user_2@onyx-test.com"
TEST_USER_3_EMAIL = "test_user_3@onyx-test.com"

View File

@@ -15,7 +15,7 @@ from tests.integration.connector_job_tests.slack.slack_api_utils import SlackMan
def slack_test_setup() -> Generator[tuple[dict[str, Any], dict[str, Any]], None, None]:
slack_client = SlackManager.get_slack_client(os.environ["SLACK_BOT_TOKEN"])
admin_user_id = SlackManager.build_slack_user_email_id_map(slack_client)[
"admin@danswer-test.com"
"admin@onyx-test.com"
]
(

View File

@@ -35,7 +35,7 @@ def test_slack_permission_sync(
# Creating an admin user (first user created is automatically an admin)
admin_user: DATestUser = UserManager.create(
email="admin@danswer-test.com",
email="admin@onyx-test.com",
)
# Creating a non-admin user
@@ -231,7 +231,7 @@ def test_slack_group_permission_sync(
# Creating an admin user (first user created is automatically an admin)
admin_user: DATestUser = UserManager.create(
email="admin@danswer-test.com",
email="admin@onyx-test.com",
)
# Creating a non-admin user

View File

@@ -34,7 +34,7 @@ def test_slack_prune(
# Creating an admin user (first user created is automatically an admin)
admin_user: DATestUser = UserManager.create(
email="admin@danswer-test.com",
email="admin@onyx-test.com",
)
# Creating a non-admin user

View File

@@ -23,7 +23,7 @@
},
{
"name": "From",
"value": "Test Admin Admin <admin@danswer-test.com>"
"value": "Test Admin Admin <admin@onyx-test.com>"
},
{
"name": "To",
@@ -56,7 +56,7 @@
"headers": [
{
"name": "Delivered-To",
"value": "admin@danswer-test.com"
"value": "admin@onyx-test.com"
},
{
"name": "Received",
@@ -84,7 +84,7 @@
},
{
"name": "Received",
"value": "from mail-sor-f41.google.com (mail-sor-f41.google.com. [209.85.220.41]) by mx.google.com with SMTPS id a1e0cc1a2514c-855dae589a1sor1192309241.6.2024.11.02.10.33.34 for <admin@danswer-test.com> (Google Transport Security); Sat, 02 Nov 2024 10:33:34 -0700 (PDT)"
"value": "from mail-sor-f41.google.com (mail-sor-f41.google.com. [209.85.220.41]) by mx.google.com with SMTPS id a1e0cc1a2514c-855dae589a1sor1192309241.6.2024.11.02.10.33.34 for <admin@onyx-test.com> (Google Transport Security); Sat, 02 Nov 2024 10:33:34 -0700 (PDT)"
},
{
"name": "Received-SPF",
@@ -144,7 +144,7 @@
},
{
"name": "To",
"value": "Test Admin Admin <admin@danswer-test.com>"
"value": "Test Admin Admin <admin@onyx-test.com>"
},
{
"name": "Content-Type",
@@ -201,7 +201,7 @@
},
{
"name": "From",
"value": "Test Admin Admin <admin@danswer-test.com>"
"value": "Test Admin Admin <admin@onyx-test.com>"
},
{
"name": "To",
@@ -234,7 +234,7 @@
"headers": [
{
"name": "Delivered-To",
"value": "admin@danswer-test.com"
"value": "admin@onyx-test.com"
},
{
"name": "Received",
@@ -262,7 +262,7 @@
},
{
"name": "Received",
"value": "from mail-sor-f41.google.com (mail-sor-f41.google.com. [209.85.220.41]) by mx.google.com with SMTPS id 71dfb90a1353d-5106f3f9037sor1051490e0c.7.2024.11.02.10.35.06 for <admin@danswer-test.com> (Google Transport Security); Sat, 02 Nov 2024 10:35:06 -0700 (PDT)"
"value": "from mail-sor-f41.google.com (mail-sor-f41.google.com. [209.85.220.41]) by mx.google.com with SMTPS id 71dfb90a1353d-5106f3f9037sor1051490e0c.7.2024.11.02.10.35.06 for <admin@onyx-test.com> (Google Transport Security); Sat, 02 Nov 2024 10:35:06 -0700 (PDT)"
},
{
"name": "Received-SPF",
@@ -322,7 +322,7 @@
},
{
"name": "To",
"value": "Test Admin Admin <admin@danswer-test.com>"
"value": "Test Admin Admin <admin@onyx-test.com>"
},
{
"name": "Content-Type",

View File

@@ -14,7 +14,7 @@ spec:
spec:
containers:
- name: celery-beat
image: onyxdotapp/onyx-backend-cloud:v0.14.0-cloud.beta.4
image: onyxdotapp/onyx-backend-cloud:v0.14.0-cloud.beta.20
imagePullPolicy: IfNotPresent
command:
[
@@ -30,7 +30,7 @@ spec:
secretKeyRef:
name: onyx-secrets
key: redis_password
- name: DANSWER_VERSION
- name: ONYX_VERSION
value: "v0.11.0-cloud.beta.8"
envFrom:
- configMapRef:

View File

@@ -14,7 +14,7 @@ spec:
spec:
containers:
- name: celery-worker-heavy
image: onyxdotapp/onyx-backend-cloud:v0.14.0-cloud.beta.4
image: onyxdotapp/onyx-backend-cloud:v0.14.0-cloud.beta.20
imagePullPolicy: IfNotPresent
command:
[
@@ -33,7 +33,7 @@ spec:
secretKeyRef:
name: onyx-secrets
key: redis_password
- name: DANSWER_VERSION
- name: ONYX_VERSION
value: "v0.11.0-cloud.beta.8"
envFrom:
- configMapRef:

View File

@@ -14,7 +14,7 @@ spec:
spec:
containers:
- name: celery-worker-indexing
image: onyxdotapp/onyx-backend-cloud:v0.14.0-cloud.beta.4
image: onyxdotapp/onyx-backend-cloud:v0.14.0-cloud.beta.20
imagePullPolicy: IfNotPresent
command:
[
@@ -35,7 +35,7 @@ spec:
secretKeyRef:
name: onyx-secrets
key: redis_password
- name: DANSWER_VERSION
- name: ONYX_VERSION
value: "v0.11.0-cloud.beta.8"
envFrom:
- configMapRef:

View File

@@ -14,7 +14,7 @@ spec:
spec:
containers:
- name: celery-worker-light
image: onyxdotapp/onyx-backend-cloud:v0.14.0-cloud.beta.4
image: onyxdotapp/onyx-backend-cloud:v0.14.0-cloud.beta.20
imagePullPolicy: IfNotPresent
command:
[
@@ -35,7 +35,7 @@ spec:
secretKeyRef:
name: onyx-secrets
key: redis_password
- name: DANSWER_VERSION
- name: ONYX_VERSION
value: "v0.11.0-cloud.beta.8"
envFrom:
- configMapRef:

View File

@@ -14,7 +14,7 @@ spec:
spec:
containers:
- name: celery-worker-primary
image: onyxdotapp/onyx-backend-cloud:v0.14.0-cloud.beta.4
image: onyxdotapp/onyx-backend-cloud:v0.14.0-cloud.beta.20
imagePullPolicy: IfNotPresent
command:
[
@@ -35,7 +35,7 @@ spec:
secretKeyRef:
name: onyx-secrets
key: redis_password
- name: DANSWER_VERSION
- name: ONYX_VERSION
value: "v0.11.0-cloud.beta.8"
envFrom:
- configMapRef:

View File

@@ -47,6 +47,7 @@ services:
- LITELLM_EXTRA_HEADERS=${LITELLM_EXTRA_HEADERS:-}
- BING_API_KEY=${BING_API_KEY:-}
- DISABLE_LLM_DOC_RELEVANCE=${DISABLE_LLM_DOC_RELEVANCE:-}
- GEN_AI_API_KEY=${GEN_AI_API_KEY:-}
# if set, allows for the use of the token budget system
- TOKEN_BUDGET_GLOBALLY_ENABLED=${TOKEN_BUDGET_GLOBALLY_ENABLED:-}
# Enables the use of bedrock models
@@ -141,6 +142,7 @@ services:
- GENERATIVE_MODEL_ACCESS_CHECK_FREQ=${GENERATIVE_MODEL_ACCESS_CHECK_FREQ:-}
- DISABLE_LITELLM_STREAMING=${DISABLE_LITELLM_STREAMING:-}
- LITELLM_EXTRA_HEADERS=${LITELLM_EXTRA_HEADERS:-}
- GEN_AI_API_KEY=${GEN_AI_API_KEY:-}
- BING_API_KEY=${BING_API_KEY:-}
# Query Options
- DOC_TIME_DECAY=${DOC_TIME_DECAY:-} # Recency Bias for search results, decay at 1 / (1 + DOC_TIME_DECAY * x years)

View File

@@ -7,10 +7,10 @@ have a contract or agreement with DanswerAI, you are not permitted to use the En
Edition features outside of personal development or testing purposes. Please reach out to \
founders@onyx.app for more information. Please visit https://github.com/onyx-dot-app/onyx"
# Default DANSWER_VERSION, typically overriden during builds by GitHub Actions.
ARG DANSWER_VERSION=0.8-dev
ENV DANSWER_VERSION=${DANSWER_VERSION}
RUN echo "DANSWER_VERSION: ${DANSWER_VERSION}"
# Default ONYX_VERSION, typically overriden during builds by GitHub Actions.
ARG ONYX_VERSION=0.8-dev
ENV ONYX_VERSION=${ONYX_VERSION}
RUN echo "ONYX_VERSION: ${ONYX_VERSION}"
# Step 1. Install dependencies + rebuild the source code only when needed
FROM base AS builder

View File

@@ -1,6 +1,6 @@
// Get Onyx Web Version
const { version: package_version } = require("./package.json"); // version from package.json
const env_version = process.env.DANSWER_VERSION; // version from env variable
const env_version = process.env.ONYX_VERSION; // version from env variable
// Use env version if set & valid, otherwise default to package version
const version = env_version || package_version;

View File

@@ -16,7 +16,7 @@ export default defineConfig({
{
// dependency for admin workflows
name: "admin_setup",
testMatch: /.*\admin_auth.setup\.ts/,
testMatch: /.*\admin_auth\.setup\.ts/,
},
{
// tests admin workflows

Binary file not shown.

After

Width:  |  Height:  |  Size: 58 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 49 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 51 KiB

View File

@@ -595,6 +595,7 @@ export function AssistantEditor({
tooltip="Used to identify the Assistant in the UI."
label="Name"
placeholder="e.g. 'Email Assistant'"
aria-label="assistant-name-input"
/>
<TextFormField
@@ -602,6 +603,7 @@ export function AssistantEditor({
name="description"
label="Description"
placeholder="e.g. 'Use this Assistant to help draft professional emails'"
data-testid="assistant-description-input"
/>
<TextFormField
@@ -610,9 +612,7 @@ export function AssistantEditor({
label="Instructions"
isTextArea={true}
placeholder="e.g. 'You are a professional email writing assistant that always uses a polite enthusiastic tone, emphasizes action items, and leaves blanks for the human to fill in when you have unknowns'"
onChange={(e) => {
setFieldValue("system_prompt", e.target.value);
}}
data-testid="assistant-instructions-input"
/>
<div>

View File

@@ -24,6 +24,7 @@ import { CHAT_SESSION_ID_KEY } from "@/lib/drag/constants";
import Cookies from "js-cookie";
import { Popover } from "@/components/popover/Popover";
import { ChatSession } from "../interfaces";
import { useChatContext } from "@/components/context/ChatContext";
const FolderItem = ({
folder,
currentChatId,
@@ -40,6 +41,7 @@ const FolderItem = ({
showShareModal: ((chatSession: ChatSession) => void) | undefined;
showDeleteModal: ((chatSession: ChatSession) => void) | undefined;
}) => {
const { refreshChatSessions } = useChatContext();
const [isExpanded, setIsExpanded] = useState<boolean>(isInitiallyExpanded);
const [isEditing, setIsEditing] = useState<boolean>(initiallySelected);
const [editedFolderName, setEditedFolderName] = useState<string>(
@@ -93,7 +95,7 @@ const FolderItem = ({
if (!continueEditing) {
setIsEditing(false);
}
router.refresh(); // Refresh values to update the sidebar
router.refresh();
} catch (error) {
setPopup({ message: "Failed to save folder name", type: "error" });
}
@@ -154,7 +156,8 @@ const FolderItem = ({
const chatSessionId = event.dataTransfer.getData(CHAT_SESSION_ID_KEY);
try {
await addChatToFolder(folder.folder_id, chatSessionId);
router.refresh(); // Refresh to show the updated folder contents
await refreshChatSessions();
router.refresh();
} catch (error) {
setPopup({
message: "Failed to add chat session to folder",

View File

@@ -1,6 +1,6 @@
import { AdminPageTitle } from "@/components/admin/Title";
import BillingInformationPage from "./BillingInformationPage";
import { FaCloud } from "react-icons/fa";
import { MdOutlineCreditCard } from "react-icons/md";
export interface BillingInformation {
seats: number;
@@ -14,8 +14,8 @@ export default function page() {
return (
<div className="container max-w-4xl">
<AdminPageTitle
title="Cloud Settings"
icon={<FaCloud size={32} className="my-auto" />}
title="Billing Information"
icon={<MdOutlineCreditCard size={32} className="my-auto" />}
/>
<BillingInformationPage />
</div>

View File

@@ -47,12 +47,8 @@ export function Modal({
}, []);
const handleMouseDown = (e: React.MouseEvent<HTMLDivElement>) => {
if (
onOutsideClick &&
modalRef.current &&
!modalRef.current.contains(e.target as Node) &&
!isEventWithinRef(e.nativeEvent, modalRef)
) {
// Only close if the user clicked exactly on the overlay (and not on a child element).
if (onOutsideClick && e.target === e.currentTarget) {
onOutsideClick();
}
};

View File

@@ -30,7 +30,7 @@ import { User } from "@/lib/types";
import { usePathname } from "next/navigation";
import { SettingsContext } from "../settings/SettingsProvider";
import { useContext } from "react";
import { Cloud } from "@phosphor-icons/react";
import { MdOutlineCreditCard } from "react-icons/md";
export function ClientLayout({
user,
@@ -386,14 +386,14 @@ export function ClientLayout({
{
name: (
<div className="flex">
<Cloud
<MdOutlineCreditCard
className="text-icon-settings-sidebar"
size={18}
/>
<div className="ml-1">Cloud Settings</div>
<div className="ml-1">Billing</div>
</div>
),
link: "/admin/cloud-settings",
link: "/admin/billing",
},
]
: []),

View File

@@ -7,7 +7,7 @@ import {
import { redirect } from "next/navigation";
import { ClientLayout } from "./ClientLayout";
import {
SERVER_SIDE_ONLY__CLOUD_ENABLED,
NEXT_PUBLIC_CLOUD_ENABLED,
SERVER_SIDE_ONLY__PAID_ENTERPRISE_FEATURES_ENABLED,
} from "@/lib/constants";
import { AnnouncementBanner } from "../header/AnnouncementBanner";
@@ -45,7 +45,7 @@ export async function Layout({ children }: { children: React.ReactNode }) {
return (
<ClientLayout
enableEnterprise={SERVER_SIDE_ONLY__PAID_ENTERPRISE_FEATURES_ENABLED}
enableCloud={SERVER_SIDE_ONLY__CLOUD_ENABLED}
enableCloud={NEXT_PUBLIC_CLOUD_ENABLED}
user={user}
>
<AnnouncementBanner />

View File

@@ -145,6 +145,7 @@ export function TextFormField({
min,
onChange,
width,
vertical,
}: {
value?: string;
name: string;
@@ -170,6 +171,7 @@ export function TextFormField({
min?: number;
onChange?: (e: React.ChangeEvent<HTMLInputElement>) => void;
width?: string;
vertical?: boolean;
}) {
let heightString = defaultHeight || "";
if (isTextArea && !heightString) {
@@ -209,14 +211,16 @@ export function TextFormField({
return (
<div className={`w-full ${width}`}>
<div className="flex flex-col gap-x-2 items-start">
{!removeLabel && (
<Label className={sizeClass.label} small={small}>
{label}
</Label>
)}
{optional ? <span>(optional) </span> : ""}
{tooltip && <ToolTipDetails>{tooltip}</ToolTipDetails>}
<div className={`flex ${vertical ? "flex-col" : "flex-row"} items-start`}>
<div className="flex gap-x-2 items-center">
{!removeLabel && (
<Label className={sizeClass.label} small={small}>
{label}
</Label>
)}
{optional ? <span>(optional) </span> : ""}
{tooltip && <ToolTipDetails>{tooltip}</ToolTipDetails>}
</div>
{error ? (
<ManualErrorMessage>{error}</ManualErrorMessage>
) : (
@@ -237,6 +241,7 @@ export function TextFormField({
as={isTextArea ? "textarea" : "input"}
type={type}
defaultValue={value}
data-testid={name}
name={name}
id={name}
className={`

View File

@@ -139,10 +139,9 @@ export default function FunctionalHeader({
<NewChatIcon size={20} />
</div>
</Link>
</div>
<div
style={{ transition: "width 0.30s ease-out" }}
className={`
<div
style={{ transition: "width 0.30s ease-out" }}
className={`
hidden
md:flex
mx-auto
@@ -153,7 +152,8 @@ export default function FunctionalHeader({
h-full
${documentSidebarToggled ? "w-[400px]" : "w-[0px]"}
`}
/>
/>
</div>
{page != "assistants" && (
<div

View File

@@ -82,7 +82,7 @@ export function AnnouncementBanner() {
Your trial is ending soon - submit your billing information to
continue using Onyx.{" "}
<Link
href="/admin/cloud-settings"
href="/admin/billing"
className="ml-2 underline cursor-pointer"
>
Update here

View File

@@ -44,6 +44,7 @@ const EditPropertyModal = ({
</h2>
<TextFormField
vertical
label={propertyDetails || ""}
name="propertyValue"
placeholder="Property value"

View File

@@ -74,7 +74,4 @@ export const NEXT_PUBLIC_CLOUD_ENABLED =
export const REGISTRATION_URL =
process.env.INTERNAL_URL || "http://127.0.0.1:3001";
export const SERVER_SIDE_ONLY__CLOUD_ENABLED =
process.env.NEXT_PUBLIC_CLOUD_ENABLED?.toLowerCase() === "true";
export const TEST_ENV = process.env.TEST_ENV?.toLowerCase() === "true";

View File

@@ -2,7 +2,7 @@ import { cookies } from "next/headers";
import { User } from "./types";
import { buildUrl } from "./utilsSS";
import { ReadonlyRequestCookies } from "next/dist/server/web/spec-extension/adapters/request-cookies";
import { AuthType, SERVER_SIDE_ONLY__CLOUD_ENABLED } from "./constants";
import { AuthType, NEXT_PUBLIC_CLOUD_ENABLED } from "./constants";
export interface AuthTypeMetadata {
authType: AuthType;
@@ -22,7 +22,7 @@ export const getAuthTypeMetadataSS = async (): Promise<AuthTypeMetadata> => {
let authType: AuthType;
// Override fasapi users auth so we can use both
if (SERVER_SIDE_ONLY__CLOUD_ENABLED) {
if (NEXT_PUBLIC_CLOUD_ENABLED) {
authType = "cloud";
} else {
authType = data.auth_type as AuthType;

View File

@@ -14,7 +14,7 @@ export const config = {
"/admin/standard-answer/:path*",
// Cloud only
"/admin/cloud-settings/:path*",
"/admin/billing/:path*",
],
};

View File

@@ -1,14 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Admin - Connectors - Add Connector",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/admin/add-connector");
await expect(page.locator("h1.text-3xl")).toHaveText("Add Connector");
await expect(page.locator("h1.text-lg").nth(0)).toHaveText(/^Storage/);
}
);

View File

@@ -1,19 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Admin - User Management - API Keys",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/admin/api-key");
await expect(page.locator("h1.text-3xl")).toHaveText("API Keys");
await expect(page.locator("p.text-sm")).toHaveText(
/^API Keys allow you to access Onyx APIs programmatically/
);
await expect(
page.getByRole("button", { name: "Create API Key" })
).toHaveCount(1);
}
);

View File

@@ -1,19 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Admin - Custom Assistants - Assistants",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/admin/assistants");
await expect(page.locator("h1.text-3xl")).toHaveText("Assistants");
await expect(page.locator("p.text-sm").nth(0)).toHaveText(
/^Assistants are a way to build/
);
const generalTextLocator = page.locator("tr.border-b td").nth(1);
await expect(generalTextLocator.locator("p.text")).toHaveText("General");
}
);

View File

@@ -1,16 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Admin - Custom Assistants - Slack Bots",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/admin/bots");
await expect(page.locator("h1.text-3xl")).toHaveText("Slack Bots");
await expect(page.locator("p.text-sm").nth(0)).toHaveText(
/^Setup Slack bots that connect to Onyx./
);
}
);

View File

@@ -1,18 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Admin - Configuration - Document Processing",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto(
"http://localhost:3000/admin/configuration/document-processing"
);
await expect(page.locator("h1.text-3xl")).toHaveText("Document Processing");
await expect(page.locator("h3.text-2xl")).toHaveText(
"Process with Unstructured API"
);
}
);

View File

@@ -1,16 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Admin - Configuration - LLM",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/admin/configuration/llm");
await expect(page.locator("h1.text-3xl")).toHaveText("LLM Setup");
await expect(page.locator("h1.text-lg").nth(0)).toHaveText(
"Enabled LLM Providers"
);
}
);

View File

@@ -1,16 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Admin - Configuration - Search Settings",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/admin/configuration/search");
await expect(page.locator("h1.text-3xl")).toHaveText("Search Settings");
await expect(page.locator("h1.text-lg").nth(0)).toHaveText(
"Embedding Model"
);
}
);

View File

@@ -1,16 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Admin - Document Management - Feedback",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/admin/documents/explorer");
await expect(page.locator("h1.text-3xl")).toHaveText("Document Explorer");
await expect(page.locator("div.flex.text-emphasis.mt-3")).toHaveText(
"Search for a document above to modify its boost or hide it from searches."
);
}
);

View File

@@ -1,19 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Admin - Document Management - Feedback",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/admin/documents/feedback");
await expect(page.locator("h1.text-3xl")).toHaveText("Document Feedback");
await expect(page.locator("h1.text-lg").nth(0)).toHaveText(
"Most Liked Documents"
);
await expect(page.locator("h1.text-lg").nth(1)).toHaveText(
"Most Disliked Documents"
);
}
);

View File

@@ -1,16 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Admin - Document Management - Document Sets",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/admin/documents/sets");
await expect(page.locator("h1.text-3xl")).toHaveText("Document Sets");
await expect(page.locator("p.text-sm")).toHaveText(
/^Document Sets allow you to group logically connected documents into a single bundle./
);
}
);

View File

@@ -1,16 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Admin - User Management - Groups",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/admin/groups");
await expect(page.locator("h1.text-3xl")).toHaveText("Manage User Groups");
await expect(
page.getByRole("button", { name: "Create New User Group" })
).toHaveCount(1);
}
);

View File

@@ -1,16 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Admin - Connectors - Existing Connectors",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/admin/indexing/status");
await expect(page.locator("h1.text-3xl")).toHaveText("Existing Connectors");
await expect(page.locator("p.text-sm")).toHaveText(
/^It looks like you don't have any connectors setup yet./
);
}
);

View File

@@ -0,0 +1,52 @@
import { test, expect } from "@playwright/test";
import chromaticSnpashots from "./chromaticSnpashots.json";
import type { Page } from "@playwright/test";
async function verifyAdminPageNavigation(
page: Page,
path: string,
pageTitle: string,
options?: {
paragraphText?: string | RegExp;
buttonName?: string;
subHeaderText?: string;
}
) {
await page.goto(`http://localhost:3000/admin/${path}`);
await expect(page.locator("h1.text-3xl")).toHaveText(pageTitle);
if (options?.paragraphText) {
await expect(page.locator("p.text-sm").nth(0)).toHaveText(
options.paragraphText
);
}
if (options?.buttonName) {
await expect(
page.getByRole("button", { name: options.buttonName })
).toHaveCount(1);
}
if (options?.subHeaderText) {
await expect(page.locator("h1.text-lg").nth(0)).toHaveText(
options.subHeaderText
);
}
}
for (const chromaticSnapshot of chromaticSnpashots) {
test(
`Admin - ${chromaticSnapshot.name}`,
{
tag: "@admin",
},
async ({ page }) => {
await verifyAdminPageNavigation(
page,
chromaticSnapshot.path,
chromaticSnapshot.pageTitle,
chromaticSnapshot.options
);
}
);
}

View File

@@ -1,16 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Admin - Performance - Custom Analytics",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/admin/performance/custom-analytics");
await expect(page.locator("h1.text-3xl")).toHaveText("Custom Analytics");
await expect(page.locator("div.font-medium").nth(0)).toHaveText(
"Custom Analytics is not enabled."
);
}
);

View File

@@ -1,22 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test.describe("Admin Performance Query History", () => {
// Ignores the diff for elements targeted by the specified list of selectors
// exclude button since they change based on the date
test.use({ ignoreSelectors: ["button"] });
test(
"Admin - Performance - Query History",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/admin/performance/query-history");
await expect(page.locator("h1.text-3xl")).toHaveText("Query History");
await expect(page.locator("p.text-sm").nth(0)).toHaveText(
"Feedback Type"
);
}
);
});

View File

@@ -1,20 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test.describe("Admin Performance Usage", () => {
// Ignores the diff for elements targeted by the specified list of selectors
// exclude button and svg since they change based on the date
test.use({ ignoreSelectors: ["button", "svg"] });
test(
"Admin - Performance - Usage Statistics",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
await page.goto("http://localhost:3000/admin/performance/usage");
await expect(page.locator("h1.text-3xl")).toHaveText("Usage Statistics");
await expect(page.locator("h1.text-lg").nth(0)).toHaveText("Usage");
await expect(page.locator("h1.text-lg").nth(1)).toHaveText("Feedback");
}
);
});

View File

@@ -1,19 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Admin - Settings - Workspace Settings",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/admin/settings");
await expect(page.locator("h1.text-3xl")).toHaveText("Workspace Settings");
await expect(page.locator("p.text-sm").nth(0)).toHaveText(
/^Manage general Onyx settings applicable to all users in the workspace./
);
await expect(
page.getByRole("button", { name: "Set Retention Limit" })
).toHaveCount(1);
}
);

View File

@@ -1,16 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Admin - Custom Assistants - Standard Answers",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/admin/standard-answer");
await expect(page.locator("h1.text-3xl")).toHaveText("Standard Answers");
await expect(page.locator("p.text-sm").nth(0)).toHaveText(
/^Manage the standard answers for pre-defined questions./
);
}
);

View File

@@ -1,22 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Admin - User Management - Token Rate Limits",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/admin/token-rate-limits");
await expect(page.locator("h1.text-3xl")).toHaveText("Token Rate Limits");
await expect(page.locator("p.text-sm").nth(0)).toHaveText(
/^Token rate limits enable you control how many tokens can be spent in a given time period./
);
await expect(
page.getByRole("button", { name: "Create a Token Rate Limit" })
).toHaveCount(1);
await expect(page.locator("h1.text-lg")).toHaveText(
"Global Token Rate Limits"
);
}
);

View File

@@ -1,16 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Admin - Custom Assistants - Tools",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/admin/tools");
await expect(page.locator("h1.text-3xl")).toHaveText("Tools");
await expect(page.locator("p.text-sm")).toHaveText(
"Tools allow assistants to retrieve information or take actions."
);
}
);

View File

@@ -1,13 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Admin - User Management - Groups",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/admin/users");
await expect(page.locator("h1.text-3xl")).toHaveText("Manage Users");
}
);

View File

@@ -1,18 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Admin - Performance - Whitelabeling",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/admin/whitelabeling");
await expect(page.locator("h1.text-3xl")).toHaveText("Whitelabeling");
await expect(page.locator("div.block").nth(0)).toHaveText(
"Application Name"
);
await expect(page.locator("div.block").nth(1)).toHaveText("Custom Logo");
await expect(page.getByRole("button", { name: "Update" })).toHaveCount(1);
}
);

View File

@@ -1,27 +0,0 @@
import { test, expect } from "@chromatic-com/playwright";
test(
"Chat",
{
tag: "@admin",
},
async ({ page }, testInfo) => {
// Test simple loading
await page.goto("http://localhost:3000/chat");
// Check for the "General" text in the new UI element
await expect(
page.locator("div.flex.items-center span.font-bold")
).toHaveText("General");
// Check for the presence of the new UI element
await expect(
page.locator("div.flex.justify-center div.bg-black.rounded-full")
).toBeVisible();
// Check for the SVG icon
await expect(
page.locator("div.flex.justify-center svg.w-5.h-5")
).toBeVisible();
}
);

View File

@@ -0,0 +1,108 @@
[
{
"name": "Document Management - Explorer",
"path": "documents/explorer",
"pageTitle": "Document Explorer"
},
{
"name": "Connectors - Add Connector",
"path": "add-connector",
"pageTitle": "Add Connector",
"options": {
"subHeaderText": "Storage"
}
},
{
"name": "Custom Assistants - Assistants",
"path": "assistants",
"pageTitle": "Assistants",
"options": {
"paragraphText": "Assistants are a way to build custom search/question-answering experiences for different use cases."
}
},
{
"name": "Configuration - Document Processing",
"path": "configuration/document-processing",
"pageTitle": "Document Processing"
},
{
"name": "Document Management - Document Sets",
"path": "documents/sets",
"pageTitle": "Document Sets",
"options": {
"paragraphText": "Document Sets allow you to group logically connected documents into a single bundle. These can then be used as a filter when performing searches to control the scope of information Onyx searches over."
}
},
{
"name": "Custom Assistants - Slack Bots",
"path": "bots",
"pageTitle": "Slack Bots",
"options": {
"paragraphText": "Setup Slack bots that connect to Onyx. Once setup, you will be able to ask questions to Onyx directly from Slack. Additionally, you can:"
}
},
{
"name": "Custom Assistants - Standard Answers",
"path": "standard-answer",
"pageTitle": "Standard Answers"
},
{
"name": "Performance - Usage Statistics",
"path": "performance/usage",
"pageTitle": "Usage Statistics"
},
{
"name": "Document Management - Feedback",
"path": "documents/feedback",
"pageTitle": "Document Feedback",
"options": {
"subHeaderText": "Most Liked Documents"
}
},
{
"name": "Configuration - LLM",
"path": "configuration/llm",
"pageTitle": "LLM Setup"
},
{
"name": "Connectors - Existing Connectors",
"path": "indexing/status",
"pageTitle": "Existing Connectors"
},
{
"name": "User Management - Groups",
"path": "groups",
"pageTitle": "Manage User Groups"
},
{
"name": "Performance - Whitelabeling",
"path": "whitelabeling",
"pageTitle": "Whitelabeling"
},
{
"name": "Configuration - Search Settings",
"path": "configuration/search",
"pageTitle": "Search Settings",
"options": {
"subHeaderText": "Embedding Model"
}
},
{
"name": "Custom Assistants - Tools",
"path": "tools",
"pageTitle": "Tools",
"options": {
"paragraphText": "Tools allow assistants to retrieve information or take actions."
}
},
{
"name": "User Management - Token Rate Limits",
"path": "token-rate-limits",
"pageTitle": "Token Rate Limits",
"options": {
"paragraphText": "Token rate limits enable you control how many tokens can be spent in a given time period. With token rate limits, you can:",
"buttonName": "Create a Token Rate Limit",
"subHeaderText": "Global Token Rate Limits"
}
}
]

View File

@@ -1,4 +1,3 @@
// constants.js
export const TEST_CREDENTIALS = {
email: "admin_user@test.com",
password: "test",