Compare commits

..

1216 Commits

Author SHA1 Message Date
Dane Urban
5848975679 Remove comment 2026-01-08 19:21:24 -08:00
Dane Urban
dcc330010e Remove comment 2026-01-08 19:21:08 -08:00
Dane Urban
d0f5f1f5ae Handle error and log 2026-01-08 19:20:28 -08:00
Dane Urban
3e475993ff Change which event loop we get 2026-01-08 19:16:12 -08:00
Dane Urban
7c2b5fa822 Change loggin 2026-01-08 17:29:00 -08:00
Dane Urban
409cfdc788 nits 2026-01-08 17:23:08 -08:00
dependabot[bot]
7a9a132739 chore(deps): bump werkzeug from 3.1.4 to 3.1.5 in /backend/requirements (#7300)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2026-01-09 00:08:17 +00:00
dependabot[bot]
33bad8c37b chore(deps): bump authlib from 1.6.5 to 1.6.6 in /backend/requirements (#7299)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2026-01-08 23:28:19 +00:00
Raunak Bhagat
9241ff7a75 refactor: migrate hooks to /hooks directory and update imports (#7295) 2026-01-08 14:57:06 -08:00
Chris Weaver
0a25bc30ec fix: auto-pause (#7289) 2026-01-08 14:45:30 -08:00
Raunak Bhagat
e359732f4c feat: add SvgEmpty icon and alphabetize icon exports (#7294) 2026-01-08 21:40:55 +00:00
Evan Lohn
be47866a4d chore: logging confluence perm sync errors better (#7291) 2026-01-08 20:24:03 +00:00
Wenxi
8a20540559 fix: use tag constraint name instead of index elements (#7288) 2026-01-08 18:52:12 +00:00
Jamison Lahman
e6e1f2860a chore(fe): remove items-center from onboarding cards (#7285) 2026-01-08 18:28:36 +00:00
Evan Lohn
fc3f433df7 fix: usage limits for indexing (#7287) 2026-01-08 18:26:52 +00:00
Evan Lohn
016caf453b fix: indexing and usage bugs (#7279) 2026-01-08 17:08:20 +00:00
Jamison Lahman
a9de25053f refactor(fe): remove "container" divs (#7271) 2026-01-08 07:23:51 +00:00
SubashMohan
8ef8dfdeb7 Cleanup/userfile indexing (#7221) 2026-01-08 05:07:19 +00:00
Danelegend
0643b626d9 fix(files): Display protected file errors (#7265)
Co-authored-by: Dane Urban <durban@Danes-MacBook-Pro.local>
2026-01-08 00:31:26 +00:00
Yuhong Sun
64a0eb52e0 chore: limit Deep Research to sequential calls only (#7275) 2026-01-08 00:03:09 +00:00
Evan Lohn
b82ffc82cf chore: upgrade client libs (#7249) 2026-01-07 23:59:57 +00:00
Danelegend
b3014b9911 fix(ui): deep research flag in chat edit (#7276)
Co-authored-by: Dane Urban <durban@Danes-MacBook-Pro.local>
2026-01-07 23:52:52 +00:00
Yuhong Sun
439707c395 chore: exa prompt fix (#7274) 2026-01-07 23:36:27 +00:00
dependabot[bot]
65351aa8bd chore(deps): bump marshmallow from 3.26.1 to 3.26.2 in /backend/requirements (#6970)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2026-01-07 23:33:59 +00:00
Wenxi
b44ee07eaf feat: improved backend driven notifications and new notification display (#7246) 2026-01-07 22:57:49 +00:00
Justin Tahara
065d391c08 fix(web crawler): Fixing decoding bytes issue (#7270) 2026-01-07 22:32:33 +00:00
dependabot[bot]
14fe3b375f chore(deps): bump urllib3 from 2.6.2 to 2.6.3 in /backend/requirements (#7272)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2026-01-07 21:47:53 +00:00
dependabot[bot]
bb1b96dded chore(deps): bump preact from 10.27.2 to 10.28.2 in /web (#7267)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-07 21:17:10 +00:00
Evan Lohn
9f949ae2d9 fix: custom llm provider prompt caching type safety (#7269) 2026-01-07 20:41:53 +00:00
acaprau
975c0e8009 feat(opensearch): Some low hanging fruit for Vespa <-> OpenSearch data parity (#7252) 2026-01-07 20:36:12 +00:00
Jamison Lahman
3dfb38c460 fix(fe): Failed indexing colors support dark theme (#7264) 2026-01-07 11:52:46 -08:00
Jamison Lahman
a1512a0485 fix(fe): fix InputComboBox shrinking when disabled (#7266) 2026-01-07 19:43:39 +00:00
roshan
8ea3bacd38 feat(evals): weekly eval runs (#7236) 2026-01-07 19:39:13 +00:00
Jamison Lahman
6b560b8162 fix(fe): admin containers apply bottom padding (#7263) 2026-01-07 18:34:53 +00:00
Jamison Lahman
3b750939ed fix(fe): move Text horizontal padding to pseudo-element (#7226)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2026-01-07 18:14:33 +00:00
Yuhong Sun
bd4cb17a48 chore: agent pin behavior (#7261) 2026-01-07 18:11:33 +00:00
SubashMohan
485cd9a311 feat(projects): enhance FileCard component with className prop to fix width issue (#7259) 2026-01-07 18:04:59 +00:00
SubashMohan
2108c72353 feat(chat): add custom copy behavior for HumanMessage component (#7257) 2026-01-07 18:04:55 +00:00
Danelegend
98f43fb6ab fix(files): propagate file error from backend (#7245)
Co-authored-by: Dane Urban <durban@Danes-MacBook-Pro.local>
2026-01-07 17:43:15 +00:00
Danelegend
e112ebb371 chore: add msoffcrypto-tool (#7247)
Co-authored-by: Dane Urban <durban@Danes-MacBook-Pro.local>
2026-01-07 17:38:09 +00:00
Jamison Lahman
f88cbcfe27 revert: "chore(deployments): prefer release environment (#6997)" (#7260) 2026-01-07 07:06:56 -08:00
Wenxi
0df0b10d3a feat: add public tag for api reference docs (#7227) 2026-01-07 06:09:36 +00:00
Jamison Lahman
ed0d12452a chore(deployments): dont treat ad-hoc releases as dry-runs (#7256) 2026-01-06 21:57:51 -08:00
Wenxi
dc7cb80594 fix: don't pass tool_choice for mistral provider (#7255) 2026-01-07 05:42:59 +00:00
Yuhong Sun
4312b24945 feat: Fix last cycle LLM did not return an answer (#7254) 2026-01-07 05:41:44 +00:00
Justin Tahara
afd920bb33 fix(users): Multi-tenant signup (#7237) 2026-01-06 18:38:05 -08:00
Jamison Lahman
d009b12aa7 chore(gha): paths-filter depends on actions/checkout (#7244) 2026-01-06 17:11:45 -08:00
Jamison Lahman
596b3d9f3e chore(gha): skip all of zizmor when applicable (#7243) 2026-01-06 17:08:50 -08:00
Jamison Lahman
1981c912b7 chore(gha): conditionally run zizmor (#7240) 2026-01-06 16:18:33 -08:00
Jamison Lahman
68b1bb8448 chore(gha): pin uv version w/ chart-testing-action (#7239) 2026-01-06 16:03:37 -08:00
Jamison Lahman
4676b5017f chore(whitespace): format pr-helm-chart-testing.yml (#7238) 2026-01-06 16:01:02 -08:00
Danelegend
eb7b6a5ce1 fix(chat): enable exclusion of failed chat sessions from api (#7233)
Co-authored-by: Dane Urban <durban@Danes-MacBook-Pro.local>
2026-01-06 23:04:35 +00:00
Justin Tahara
87d6df2621 fix(user): Block Malicious Accounts (#7235) 2026-01-06 14:52:44 -08:00
Danelegend
13b4108b53 fix: serper api key errors when adding (#7217)
Co-authored-by: Dane Urban <durban@Danes-MacBook-Pro.local>
2026-01-06 22:42:07 +00:00
acaprau
13e806b625 feat(opensearch): Add OpenSearch document index interface (#7143) 2026-01-06 22:35:47 +00:00
Nikolas Garza
f4f7839d84 fix: sidebar button shifting on hover (#7234) 2026-01-06 21:54:39 +00:00
Jamison Lahman
2dbf1c3b1f chore(devtools): ods with no args outputs help (#7230) 2026-01-06 21:14:26 +00:00
dependabot[bot]
288d4147c3 chore(deps): bump pynacl from 1.6.1 to 1.6.2 in /backend/requirements (#7228)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2026-01-06 20:56:17 +00:00
dependabot[bot]
fee27b2274 chore(deps): bump aiohttp from 3.13.2 to 3.13.3 in /backend/requirements (#7216)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-06 20:53:40 +00:00
Justin Tahara
340e938627 fix(chat): Math Formatting (#7229) 2026-01-06 20:37:34 +00:00
roshan
6faa47e0f7 fix: json serialize tool calls and other types in prompt cache (#7225) 2026-01-06 19:16:19 +00:00
Evan Lohn
ba6801f5af chore: add logs to tenant overrides (#7215) 2026-01-06 18:46:38 +00:00
SubashMohan
d7447eb8af fix(projects): projct folder button not expandable (#7223) 2026-01-06 18:22:39 +00:00
SubashMohan
196f890a68 feat(image-generation): Add Azure OpenAI GPT image models (#7224) 2026-01-06 17:37:09 +00:00
Justin Tahara
3ac96572c3 fix(open_url): Parse PDF files with Open URL Tool (#7219) 2026-01-06 17:32:35 +00:00
SubashMohan
3d8ae22b3a seeds(config): image gen from llm providers (#7198) 2026-01-06 16:17:40 +00:00
SubashMohan
233d06ec0e feat(api): Enhance API key handling and masking in image generation (#7220) 2026-01-06 14:17:03 +05:30
Justin Tahara
9ff82ac740 fix(chat): Thinking in Regen Chat (#7213) 2026-01-06 04:13:11 +00:00
Justin Tahara
b15f01fd78 fix(ui): Image Gen Tooltip for Agent Workflow (#7211) 2026-01-06 02:57:08 +00:00
Nikolas Garza
6480cf6738 fix(fe): chat input box spacing and sizing fixes (#7204) 2026-01-06 01:38:08 +00:00
Justin Tahara
c521a4397a chore(llm): Remove Claude Opus 3 (#7214) 2026-01-06 01:34:58 +00:00
Evan Lohn
41a8d86df3 feat: prompt cache 3 (#6605) 2026-01-06 00:39:39 +00:00
roshan
735cf926e4 feat(evals): multi-turn evals (#7210) 2026-01-05 23:33:19 +00:00
Justin Tahara
035e73655f fix(ui): Update coloring for Doc Set Tooltip (#7208) 2026-01-05 22:12:42 +00:00
roshan
f317420f58 feat(evals): set log level for eval runs to warning (#7209) 2026-01-05 22:11:45 +00:00
Justin Tahara
d50a84f2e4 fix(ui): Remove Open URL Filter for Agents (#7205) 2026-01-05 21:16:30 +00:00
Justin Tahara
9b441e3686 feat(braintrust): Cost Tracking (#7201) 2026-01-05 20:44:53 +00:00
Justin Tahara
c4c1e16f19 fix(braintrust): Implement actual TTFA Metric (#7169) 2026-01-05 20:31:47 +00:00
Evan Lohn
9044e0f5fa feat: per-tenant usage limits (#7197) 2026-01-05 19:01:00 +00:00
Jamison Lahman
a180e1337b chore(fe): replace js isHovered with css hover effects (#7200) 2026-01-05 09:01:55 -08:00
Evan Lohn
6ca72291bc fix: llm usage tracking for dr (#7196) 2026-01-05 01:24:20 +00:00
Evan Lohn
c23046f7c0 chore: bump limits on cloud LLM usage (#7195) 2026-01-04 21:38:13 +00:00
Evan Lohn
d5f66ac146 feat: cloud usage limits (#7192) 2026-01-04 06:51:12 +00:00
Yuhong Sun
241fc8f877 feat: Deep Research Internal Search Tuning (#7193) 2026-01-03 22:54:23 -08:00
Jamison Lahman
f1ea41b519 chore(whitespace): ignore refactor rev (#7191) 2026-01-02 23:52:48 -08:00
Jamison Lahman
ed3f72bc75 refactor(whitespace): rm react fragment (#7190) 2026-01-02 23:49:39 -08:00
Jamison Lahman
2247e3cf8e chore(fe): rm unnecessary spacer from chat ui (#7189) 2026-01-02 23:42:54 -08:00
Jamison Lahman
47c49d86e8 chore(fe): improve human chat responsiveness (#7187) 2026-01-02 23:26:52 -08:00
Yuhong Sun
8c11330d46 feat: Easy send message nonstreaming (#7186) 2026-01-02 19:46:54 -08:00
Chris Weaver
22ac22c17d feat: improve display for models that are no longer present (#7184) 2026-01-03 02:39:06 +00:00
Yuhong Sun
c0a6a0fb4a feat: nonstreaming send chat message api (#7181) 2026-01-03 02:33:17 +00:00
Chris Weaver
7f31a39dc2 fix: regenerate models stuck in perma loading state (#7182)
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2026-01-03 02:18:34 +00:00
Yuhong Sun
f1f61690e3 chore: spacing (#7183) 2026-01-02 17:57:55 -08:00
Jamison Lahman
8c3e17bbe5 revert: "chore(pre-commit): run uv-sync in active venv" (#7178) 2026-01-03 01:16:01 +00:00
Yuhong Sun
a1ab3678a0 chore: Plugin issue (#7179) 2026-01-02 16:43:51 -08:00
Yuhong Sun
2d79ed7bb4 New send message api (#7167) 2026-01-02 23:57:54 +00:00
Justin Tahara
f472fd763e fix(braintrust): Span Attributes Association (#7174) 2026-01-02 15:20:10 -08:00
Jamison Lahman
e47b2fccb4 chore(playwright): fix Exa configure tests (#7176) 2026-01-02 15:10:54 -08:00
acaprau
17a6fc4ebf chore(opensearch): Add external dep tests for OpenSearchClient (#7155) 2026-01-02 22:28:46 +00:00
acaprau
391c8c5cf7 feat(opensearch): Add OpenSearch client (#7137)
flakey connector tests are failing for reasons unrelated to this pr. all other tests pass.
2026-01-02 14:11:14 -08:00
Jamison Lahman
d0e3ee1055 chore(deployments): prefer release environment (#6997) 2026-01-02 22:00:33 +00:00
Jamison Lahman
dc760cf580 chore(playwright): prefer baseURL (#7171) 2026-01-02 13:30:10 -08:00
Justin Tahara
d49931fce1 fix(braintrust): Fix Tenant ID to Token Association (#7173) 2026-01-02 13:10:34 -08:00
Jamison Lahman
41d1d265a0 chore(docker): .dockerignore /tests/ (#7172) 2026-01-02 20:19:52 +00:00
Chris Weaver
45a2207662 chore: cleanup old LLM provider update mechanism (#7170) 2026-01-02 20:14:27 +00:00
Justin Tahara
725ed6a523 fix(braintrust): Updating naming for metric (#7168) 2026-01-02 20:06:43 +00:00
acaprau
2452671420 feat(opensearch): Add OpenSearch queries (#7133) 2026-01-02 19:05:43 +00:00
Jamison Lahman
a4a767f146 fix(ollama): rm unsupported tool_choice option (#7156) 2026-01-02 18:55:57 +00:00
Wenxi
8304fbd14c fix: don't pass selected tab to connector specific config (#7165) 2026-01-02 18:19:33 +00:00
Jamison Lahman
7db7d4c965 chore(docker): publish inference_model_server port 9000 in dev (#7166) 2026-01-02 10:04:45 -08:00
SubashMohan
2cc2b5aee9 feat(image-generation): e2e tests (#7164) 2026-01-02 19:13:59 +05:30
SubashMohan
0c35ffe468 feat(config): Image generation frontend (#7019) 2026-01-02 11:36:57 +00:00
SubashMohan
adece3f812 Tests/theme (#7163)
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-02 16:14:13 +05:30
Jamison Lahman
b44349e67d chore(blame): introduce .git-blame-ignore-revs to ignore refactors (#7162) 2026-01-01 22:23:34 -08:00
Jamison Lahman
3134e5f840 refactor(whitespace): rm temporary react fragments (#7161) 2026-01-01 22:10:31 -08:00
dependabot[bot]
5b8223b6af chore(deps): bump qs from 6.14.0 to 6.14.1 in /web (#7147)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2026-01-02 05:05:00 +00:00
Jamison Lahman
30ab85f5a0 chore(fe): follow up styling fixes to #7129 (#7160) 2026-01-01 19:58:43 -08:00
Jamison Lahman
daa343c30b perf(chat): memoize chat messages (#7157)
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2026-01-01 19:10:18 -08:00
devin-ai-integration[bot]
c67936a4c1 fix: non-thinking responses not displaying until page refresh (#7123)
Co-authored-by: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com>
Co-authored-by: roshan@onyx.app <rohod04@gmail.com>
Co-authored-by: Wenxi <wenxi@onyx.app>
Co-authored-by: Chris <chris@onyx.app>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
Co-authored-by: Nikolas Garza <90273783+nmgarza5@users.noreply.github.com>
Co-authored-by: Yuhong Sun <yuhongsun96@gmail.com>
Co-authored-by: Raunak Bhagat <r@rabh.io>
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
Co-authored-by: SubashMohan <subashmohan75@gmail.com>
Co-authored-by: Justin Tahara <105671973+justin-tahara@users.noreply.github.com>
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
Co-authored-by: roshan <38771624+rohoswagger@users.noreply.github.com>
2026-01-01 21:15:55 +00:00
Jamison Lahman
4578c268ed perf(chat): consildate chat UI layout style (#7129) 2026-01-01 13:10:47 -08:00
roshan
7658917fe8 feat: running evals locally (#7145) 2026-01-01 18:39:08 +00:00
roshan
fd4695d5bd feat: add tool call validation to eval cli (#7144)
Co-authored-by: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com>
2026-01-01 15:46:05 +00:00
devin-ai-integration[bot]
a25362a709 fix: check stop signal during active streaming (#7151)
Co-authored-by: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com>
Co-authored-by: roshan@onyx.app <rohod04@gmail.com>
2026-01-01 15:33:03 +00:00
SubashMohan
1eb4962861 refactor: White-labelling (#6938) 2026-01-01 09:55:58 +00:00
Nikolas Garza
aa1c956608 fix: Duplicate model provider sections for unenriched LLM models (#7148) 2026-01-01 03:03:40 +00:00
Chris Weaver
19e5c47f85 fix: when onboarding flow shows up (#7154) 2025-12-31 18:29:36 -08:00
Chris Weaver
872a2ed58a feat: add new models to cloud (#7149) 2026-01-01 01:50:26 +00:00
Jessica Singh
42047a4dce feat(tools): extend open_url to handle indexed content urls (#6822) 2026-01-01 01:31:28 +00:00
Chris Weaver
a3a9847d76 fix: onboarding display (#7153) 2025-12-31 17:19:00 -08:00
Yuhong Sun
3ade17c380 chore: fix linter issues (#7122) 2025-12-31 16:48:33 -08:00
Chris Weaver
9150ba1905 fix: skip failing tests (#7152) 2026-01-01 00:08:46 +00:00
Justin Tahara
cb14e84750 feat(connectors): Add Deletion Popup (#7054) 2025-12-31 22:12:57 +00:00
Chris Weaver
c916517342 feat: add auto LLM model updates from GitHub config (#6830)
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-12-31 14:02:08 -08:00
Justin Tahara
45b902c950 fix(desktop): Disable reload on Mac (#7141) 2025-12-31 21:06:02 +00:00
Nikolas Garza
981b43e47b fix: prevent Slack federated search query multiplication (#7125) 2025-12-31 20:41:50 +00:00
Yuhong Sun
b5c45cbce0 Chat Flow Readme (#7142) 2025-12-31 11:15:48 -08:00
Yuhong Sun
451f10343e Update README.md (#7140) 2025-12-31 10:11:31 -08:00
SubashMohan
ceeed2a562 Feat/image config backend (#6961) 2025-12-31 11:39:32 +00:00
SubashMohan
bcc7a7f264 refactor(modals): All modals use new Modal component (#6729) 2025-12-31 07:54:08 +00:00
SubashMohan
972ef34b92 Fix/input combobox dropdown (#7015) 2025-12-31 13:01:03 +05:30
Raunak Bhagat
9d11d1f218 feat: Refreshed agent creation page (#6241)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-12-31 05:09:07 +00:00
Chris Weaver
4db68853cd fix: openai provider identification on the admin panel (#7135) 2025-12-31 02:14:46 +00:00
Wenxi
b08fafc66b fix: make litellm testing script prettier (#7136) 2025-12-30 18:08:25 -08:00
Wenxi
1e61bf401e fix: lazy load tracing providers to avoid spamming logs when not configured (#7134) 2025-12-31 02:03:33 +00:00
Chris Weaver
0541c2989d fix: downgrade (#7132) 2025-12-31 01:45:41 +00:00
Yuhong Sun
743b996698 fix: Remove Default Reminder (#7131) 2025-12-31 00:55:16 +00:00
Chris Weaver
16e77aebfc refactor: onboarding forms (#7105) 2025-12-30 16:56:13 -08:00
Yuhong Sun
944f4a2464 fix: reenable force search parameter (#7130) 2025-12-31 00:27:17 +00:00
Nikolas Garza
67db7c0346 fix: suppress Jest act() warning spam in test output (#7127) 2025-12-30 22:32:15 +00:00
Jamison Lahman
8e47cd4e4f chore(fe): baseline align inline code spans (#7128) 2025-12-30 22:21:59 +00:00
devin-ai-integration[bot]
e8a4fca0a3 fix: persist onboarding flow until user explicitly finishes (#7111)
Co-authored-by: Devin AI <158243242+devin-ai-integration[bot]@users.noreply.github.com>
Co-authored-by: Chris <chris@onyx.app>
2025-12-30 21:42:04 +00:00
Wenxi
6d783ca691 fix: gemini default location global (#7124) 2025-12-30 21:15:57 +00:00
Yuhong Sun
283317bd65 chore: prompts (#7108) 2025-12-30 12:22:21 -08:00
acaprau
2afbc74224 feat: Add OpenSearch schema (#7118) 2025-12-30 19:55:34 +00:00
acaprau
5b273de8be chore: Add script to restart OpenSearch container (#7110) 2025-12-30 19:48:30 +00:00
roshan
a0a24147b5 fix: stop-generation for deep research (#7050)
Co-authored-by: Raunak Bhagat <r@rabh.io>
Co-authored-by: acaprau <48705707+acaprau@users.noreply.github.com>
Co-authored-by: Justin Tahara <105671973+justin-tahara@users.noreply.github.com>
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2025-12-30 19:17:28 +00:00
roshan
fd31da3159 chore: clean up stop signal redis fence (#7119) 2025-12-30 18:55:21 +00:00
Yuhong Sun
cd76ac876b fix: MIT integration tests (#7121) 2025-12-30 10:51:36 -08:00
Jamison Lahman
8f205172eb chore(gha): ensure uv cache is pruned before upload (#7120) 2025-12-30 10:50:08 -08:00
roshan
be70fa21e3 fix: stop-generation for non-deep research (#7045)
Co-authored-by: Raunak Bhagat <r@rabh.io>
Co-authored-by: acaprau <48705707+acaprau@users.noreply.github.com>
Co-authored-by: Justin Tahara <105671973+justin-tahara@users.noreply.github.com>
2025-12-30 18:41:20 +00:00
roshan
0687bddb6f fix: popover max height setting (#7093)
Co-authored-by: Cursor Agent <cursoragent@cursor.com>
2025-12-30 18:40:54 +00:00
roshan
73091118e3 fix: rendering parallel research agents cleanly (#7078) 2025-12-30 18:40:45 +00:00
Wenxi
bf8590a637 feat: add z indices for confirmation modal (#7114) 2025-12-30 18:40:16 +00:00
Chris Weaver
8a6d597496 perf: update web/STANDARDS.md + add standards to CLAUDE.md / AGENTS.md (#7039)
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2025-12-30 09:36:58 -08:00
Jamison Lahman
f0bc538f60 chore(fe): fix some Text that should be spans (#7112) 2025-12-30 08:06:15 -08:00
Jamison Lahman
0b6d9347bb fix(ux): Share Chat modal uses CopyIconButton (#7116) 2025-12-30 08:05:02 -08:00
Raunak Bhagat
415538f9f8 refactor: Improve form field components (#7104) 2025-12-29 23:26:56 -08:00
Jamison Lahman
969261f314 chore(desktop): disable nightly builds (#7115) 2025-12-29 22:42:39 -08:00
Jamison Lahman
eaa4d5d434 chore(desktop): remove duplicate startup log, onyx-desktop (#7113) 2025-12-29 19:58:25 -08:00
acaprau
19e6900d96 chore: Add opensearch-py 3.0.0 (#7103) 2025-12-30 03:50:22 +00:00
Jamison Lahman
f3535b94a0 chore(docker): add healthchecks (#7089)
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2025-12-29 19:29:16 -08:00
Jamison Lahman
383aa222ba chore(fe): refresh chat Stack Trace button (#7092) 2025-12-29 18:29:58 -08:00
Yuhong Sun
f32b21400f chore: Fix Tests (#7107) 2025-12-29 17:24:40 -08:00
Jamison Lahman
5d5e71900e chore(fe): Text default span follow up (#7106) 2025-12-29 17:22:09 -08:00
Yuhong Sun
06ce7484b3 chore: docker compose no MCP server (#7100) 2025-12-29 16:40:15 -08:00
Jamison Lahman
700db01b33 chore(fe): make Text component default to span (#7096) 2025-12-29 16:30:09 -08:00
acaprau
521e9f108f fix: The update method for the new Vespa interface should correctly handle None chunk_count (#7098) 2025-12-30 00:23:37 +00:00
Yuhong Sun
1dfb62bb69 chore: Remove unused resources from model server (#7094) 2025-12-29 16:18:37 -08:00
Wenxi
14a1b3d197 fix: get_tenant_users script invalid sql stmt (#7097) 2025-12-29 23:58:11 +00:00
Chris Weaver
f3feac84f3 refactor: llm provider forms (#7006) 2025-12-29 14:09:52 -08:00
roshan
d6e7c11c92 fix: think tool newline unescaping (#7086)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-12-29 20:34:12 +00:00
Jamison Lahman
d66eef36d3 feat(ux): include a copy button for chat stack traces (#7091) 2025-12-29 19:59:38 +00:00
Wenxi
05fd974968 refactor: let litellm handle translating reasoning_effort to anthropic thinking (#7090) 2025-12-29 19:55:54 +00:00
roshan
ad882e587d fix: parallel tool tab hover (#7083)
Co-authored-by: Cursor Agent <cursoragent@cursor.com>
2025-12-29 18:01:39 +00:00
Jamison Lahman
f2b1f20161 chore(gha): playwright and integration are optional on merge_group (#7080) 2025-12-29 17:42:50 +00:00
Raunak Bhagat
6ec3b4c6cf feat: Add warnings support to Formik input layouts (#7087) 2025-12-29 09:30:30 -08:00
roshan
529a2e0336 chore: bolding enhancement (#7002)
Co-authored-by: Cursor Agent <cursoragent@cursor.com>
2025-12-29 03:27:37 +00:00
Wenxi
35602519c5 feat: add litellm debugging scripts (#7085)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-12-28 14:30:12 -08:00
Wenxi
7e0b773247 feat: centralized llm provider names (#7084) 2025-12-28 20:50:37 +00:00
Wenxi
924b5e5c70 refactor: stopgap cleanup core litellm arg processing (#7065) 2025-12-28 19:54:54 +00:00
Chris Weaver
cfcb09070d fix: improve URL handling (#7079) 2025-12-27 21:09:31 -08:00
Jamison Lahman
27b0fee3c4 chore(pre-commit): rm check-yaml (#7081) 2025-12-27 12:16:13 -08:00
Jamison Lahman
5617e86b14 chore(tests): use pytest-alembic to validate migrations (#7069) 2025-12-27 19:16:49 +00:00
Jamison Lahman
b909eb0205 chore(alembic): fix new_chat_history downgrade (#7073) 2025-12-27 16:56:56 +00:00
Raunak Bhagat
2a821134c0 refactor: Improve shared components (#7077) 2025-12-26 22:37:47 -08:00
Raunak Bhagat
ad632e4440 fix: Update context API (#7076) 2025-12-26 22:02:00 -08:00
Raunak Bhagat
153e313021 refactor: reorganize hooks to web/src/hooks directory (#7071) 2025-12-26 21:01:40 -08:00
Raunak Bhagat
abc80d7feb feat: add actions-layouts and improve input-layouts (#7072) 2025-12-26 21:01:17 -08:00
Jamison Lahman
1a96e894fe chore(deps): pin uv in CI (#7074) 2025-12-26 20:40:05 -08:00
Jamison Lahman
5a09a73df8 chore(tests): delete skipped migration tests (#7070) 2025-12-27 04:19:59 +00:00
Jamison Lahman
02723291b3 chore(gha): remove fetch-depth: 0 from playwright (#7066) 2025-12-27 02:10:20 +00:00
Justin Tahara
324388fefc chore(envvar): Cleaning up Unused EnvVars (#7067) 2025-12-26 17:57:32 -08:00
Justin Tahara
4a119e869b chore(envvar): Cleanup Unused envvars (#7056) 2025-12-27 01:32:52 +00:00
Jamison Lahman
20127ba115 chore(docker): move docker-bake.hcl to toplevel (#7064) 2025-12-27 01:04:05 +00:00
Justin Tahara
3d6344073d fix(ui): Align Web Search Page (#7061) 2025-12-26 16:17:28 -08:00
Justin Tahara
7dd98b717b fix(ui): Align Performance Pages (#7062) 2025-12-26 16:05:34 -08:00
Wenxi
0ce5667444 fix: default to global region for gemini models (#7060) 2025-12-26 23:08:17 +00:00
Wenxi
b03414e643 chore: removed unnecessary monkey patch (#7058) 2025-12-26 22:41:09 +00:00
Jamison Lahman
7a67de2d72 chore(github): make PR template instructions comments (#7053) 2025-12-26 21:00:14 +00:00
roshan
300bf58715 fix: remove dr feature flag (#7052) 2025-12-26 20:58:08 +00:00
Justin Tahara
b2bd0ddc50 fix(chat): Custom Agent Chat Rename (#7051) 2025-12-26 20:46:40 +00:00
Justin Tahara
a3d847b05c fix(ui): Copy Traceback button (#7049) 2025-12-26 19:29:29 +00:00
acaprau
d529d0672d fix: test_connector_pause_while_indexing keeps timing out, lower the number of docs to wait for to 4 from 16 (#6976) 2025-12-26 17:33:57 +00:00
Raunak Bhagat
f98a5e1119 fix: Overlay ordering bug (#7048) 2025-12-26 09:00:29 -08:00
Raunak Bhagat
6ec0b09139 feat: Add small icons + scripts + readme to Opal (#7046) 2025-12-26 08:06:57 -08:00
roshan
53691fc95a chore: refactor search tool renderer (#7044) 2025-12-25 22:04:11 -05:00
Jamison Lahman
3400e2a14d chore(desktop): skip desktop on beta tags (#7043) 2025-12-25 13:41:05 -08:00
roshan
d8cc1f7a2c chore: clean up unused feature flag (#7042) 2025-12-25 16:35:53 -05:00
roshan
2098e910dd chore: clean up search renderer v2 (#7041) 2025-12-25 16:31:26 -05:00
Jamison Lahman
e5491d6f79 revert: "chore(fe): enable reactRemoveProperties" (#7040) 2025-12-25 12:00:52 -08:00
Raunak Bhagat
a8934a083a feat: Add useOnChangeValue hook and update form components (#7036) 2025-12-25 11:40:39 -08:00
Chris Weaver
80e9507e01 fix: google index names (#7038) 2025-12-25 17:56:22 +00:00
Raunak Bhagat
60d3be5fe2 refactor: Improve form hook to handle events directly (#7035) 2025-12-25 02:16:47 -08:00
Raunak Bhagat
b481cc36d0 refactor: Update form field components to use new hook (#7034) 2025-12-25 01:54:07 -08:00
Raunak Bhagat
65c5da8912 feat: Create new InputDatePicker component (#7023) 2025-12-24 23:23:47 -08:00
Jamison Lahman
0a0366e6ca chore(fe): enable reactRemoveProperties (#7030) 2025-12-25 05:12:36 +00:00
Jamison Lahman
84a623e884 chore(fe): remove reliance on data-testid prop (#7031) 2025-12-24 20:44:28 -08:00
roshan
6b91607b17 chore: feature flag for deep research (#7022) 2025-12-24 21:38:34 -05:00
Wenxi
82fb737ad9 fix: conditional tool choice param for anthropic (#7029) 2025-12-25 00:25:19 +00:00
Justin Tahara
eed49e699e fix(docprocessing): Cleaning up Events (#7025) 2025-12-24 12:25:43 -08:00
Justin Tahara
3cc7afd334 fix(chat): Copy functionality (#7027) 2025-12-24 12:22:02 -08:00
Jamison Lahman
bcbfd28234 chore(fe): "Copy code"->"Copy" (#7018) 2025-12-24 11:38:02 -08:00
Rohit V
faa47d9691 chore(docs): update docker compose command in CONTRIBUTING.md (#7020)
Co-authored-by: Rohit V <rohit.v@thoughtspot.com>
2025-12-24 11:18:12 -08:00
Wenxi
6649561bf3 fix: multiple tool calls unit test (#7026) 2025-12-24 18:08:12 +00:00
Wenxi
026cda0468 fix: force tool with openai (#7024) 2025-12-24 09:37:14 -08:00
Raunak Bhagat
64297e5996 feat: add formik field components and helpers (#7017)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-12-24 08:09:24 -08:00
Raunak Bhagat
c517137c0a refactor: Update CSS stylings for SidebarTab component (#7016) 2025-12-23 22:56:06 -08:00
SubashMohan
cbfbe0bbbe fix(onboarding): Azure llm url parsing (#6950) 2025-12-24 12:17:31 +05:30
Raunak Bhagat
13ca4c6650 refactor: remove icon prop from UserFilesModal (#7014) 2025-12-23 22:35:42 -08:00
Raunak Bhagat
e8d9e36d62 refactor: SidebarTab fixes (#7012)
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2025-12-24 06:06:06 +00:00
Jamison Lahman
77e4f3c574 fix(fe): right sidebar buttons dont inherit href (#7007)
Co-authored-by: Raunak Bhagat <r@rabh.io>
2025-12-24 04:41:22 +00:00
Chris Weaver
2bdc06201a fix: improve scrollbar for code blocks (#7013) 2025-12-24 03:38:09 +00:00
Yuhong Sun
077ba9624c fix: parallel tool call with openai (#7010) 2025-12-23 19:07:23 -08:00
Raunak Bhagat
81eb1a1c7c fix: Fix import error (#7011) 2025-12-23 19:00:10 -08:00
Yuhong Sun
1a16fef783 feat: DEEP RESEARCH ALPHA HUZZAH (#7001) 2025-12-23 18:45:43 -08:00
Yuhong Sun
027692d5eb chore: bump litellm version (#7009) 2025-12-23 18:09:21 -08:00
Raunak Bhagat
3a889f7069 refactor: Add more comprehensive layout components (#6989) 2025-12-23 17:54:32 -08:00
Raunak Bhagat
20d67bd956 feat: Add new components to refresh-components (#6991)
Co-authored-by: Nikolas Garza <90273783+nmgarza5@users.noreply.github.com>
2025-12-23 17:53:59 -08:00
acaprau
8d6b6accaf feat(new vector db interface): Plug in retrievals for Vespa (#6966) 2025-12-23 23:30:59 +00:00
Chris Weaver
ed76b4eb55 fix: masking (#7003) 2025-12-23 23:23:03 +00:00
Raunak Bhagat
7613c100d1 feat: update icons (#6988) 2025-12-23 15:11:33 -08:00
Raunak Bhagat
c52d3412de refactor: add more helpful utility hooks (#6987) 2025-12-23 14:38:13 -08:00
Jamison Lahman
96b6162b52 chore(desktop): fix windows version (#6999) 2025-12-23 22:21:30 +00:00
Yuhong Sun
502ed8909b chore: Tuning Deep Research (#7000) 2025-12-23 14:19:20 -08:00
roshan
8de75dd033 feat: deep research (#6936)
Co-authored-by: Yuhong Sun <yuhongsun96@gmail.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
Co-authored-by: Cursor Agent <cursoragent@cursor.com>
2025-12-23 21:24:27 +00:00
Wenxi
74e3668e38 chore: cleanup drupal connector nits (#6998) 2025-12-23 21:24:21 +00:00
Justin Tahara
2475a9ef92 fix(gdrive): Investigation Logging (#6996) 2025-12-23 13:26:44 -08:00
rexjohannes
690f54c441 feat: Drupal Wiki connector (#4773)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-12-23 19:28:23 +00:00
Jamison Lahman
71bb0c029e chore(desktop): deployment automation for the desktop app (#6990) 2025-12-23 09:20:59 -08:00
Yuhong Sun
ccf890a129 Small Tuning (#6986) 2025-12-22 20:13:17 -08:00
acaprau
a7bfdebddf feat(new vector db interface): Implement retrievals for Vespa (#6963) 2025-12-23 03:00:38 +00:00
Yuhong Sun
6fc5ca12a3 Fine grained Braintrust tracing (#6985) 2025-12-22 19:08:49 -08:00
Wenxi
8298452522 feat: add open book icon (#6984) 2025-12-22 19:00:31 -08:00
Wenxi
2559327636 fix: allow chat file previewing and fix csv rendering (#6915) 2025-12-23 02:08:42 +00:00
Yuhong Sun
ef185ce2c8 feat: DR Tab for intermediate reports and Index increment for final report section end (#6983) 2025-12-22 18:10:45 -08:00
Wenxi
a04fee5cbd feat: add optional image parsing for docx (#6981) 2025-12-22 17:45:44 -08:00
Justin Tahara
e507378244 fix(vertex-ai): Bump Default Batch Size (#6982) 2025-12-22 17:21:55 -08:00
Justin Tahara
e6be3f85b2 fix(gemini): No Asyncio (#6980) 2025-12-23 01:07:40 +00:00
acaprau
cc96e303ce feat(new vector db interface): Plug in delete for Vespa (#6867)
Co-authored-by: Yuhong Sun <yuhongsun96@gmail.com>
2025-12-23 00:54:52 +00:00
Nikolas Garza
e0fcb1f860 feat(fe): speed up pre-commit TypeScript type checking with tsgo (#6978) 2025-12-23 00:22:42 +00:00
roshan
f5442c431d feat: add PacketException handling (#6968) 2025-12-23 00:09:51 +00:00
acaprau
652e5848e5 feat(new vector db interface): Implement delete for Vespa (#6866)
Co-authored-by: Yuhong Sun <yuhongsun96@gmail.com>
2025-12-22 23:58:32 +00:00
Wenxi
3fa1896316 fix: download cloud svg (#6977) 2025-12-22 14:54:33 -08:00
roshan
f855ecab11 feat: add dr loop tracing (#6971) 2025-12-22 21:35:29 +00:00
Jamison Lahman
fd26176e7d revert: "fix(fe): make recent chat sidebar buttons links" (#6967) 2025-12-22 12:12:48 -08:00
Justin Tahara
8986f67779 fix(docprocessing): Reusing Threads (#6916) 2025-12-22 19:03:46 +00:00
Nikolas Garza
42f2d4aca5 feat(teams): Enable Auto Sync Permissions for Teams connector (#6648) 2025-12-22 18:57:01 +00:00
Evan Lohn
7116d24a8c fix: small MCP UI changes (#6862) 2025-12-22 18:09:36 +00:00
Justin Tahara
7f4593be32 fix(vertex): Infinite Embedding (#6917) 2025-12-22 10:43:11 -08:00
Wenxi
f47e25e693 feat(ingestion): restore delete api (#6962) 2025-12-22 10:06:43 -08:00
acaprau
877184ae97 feat(new vector db interface): Plug in update for Vespa (#6792) 2025-12-22 16:25:13 +00:00
acaprau
54961ec8ef fix: test_multi_llm.py::test_multiple_tool_calls callsite fix (#6959) 2025-12-22 08:06:13 -08:00
Raunak Bhagat
e797971ce5 fix: Layout fix + CSR updates (#6958)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-12-22 08:00:39 -08:00
Jamison Lahman
566cca70d8 chore(fe): conditionally render header on chatSession (#6955) 2025-12-22 02:37:01 -08:00
Jamison Lahman
be2d0e2b5d chore(fe): prevent header continuous render (#6954) 2025-12-22 00:46:21 -08:00
Jamison Lahman
692f937ca4 chore(fmt): fix prettier (#6953) 2025-12-22 00:30:21 -08:00
Jamison Lahman
11de1ceb65 chore(ts): typedRoutes = true (#6930) 2025-12-22 00:21:44 -08:00
Jamison Lahman
19993b4679 chore(chat): refactor chat header (#6952)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-12-22 00:20:46 -08:00
Yuhong Sun
9063827782 Enable DR on the backend (#6948) 2025-12-21 18:25:24 -08:00
Yuhong Sun
0cc6fa49d7 DR Minor tweaking (#6947) 2025-12-21 17:23:52 -08:00
roshan
3f3508b668 fix: sanitize postgres to remove nul characters (#6934) 2025-12-22 00:19:25 +00:00
Jamison Lahman
1c3a88daf8 perf(chat): avoid re-rendering chat on ChatInput change (#6945) 2025-12-21 16:15:34 -08:00
Yuhong Sun
92f30bbad9 Fix misalignment in DR failed agents (#6946) 2025-12-21 15:07:45 -08:00
Yuhong Sun
4abf43d85b DR bug fixes (#6944) 2025-12-21 14:56:52 -08:00
Jamison Lahman
b08f9adb23 chore(perf): frontend stats overlay in dev (#6840)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-12-21 22:12:54 +00:00
Yuhong Sun
7a915833bb More correct packet handling (#6943) 2025-12-21 13:48:27 -08:00
Jamison Lahman
9698b700e6 fix(desktop): Linux-specific fixes (#6928) 2025-12-21 20:39:52 +00:00
Jamison Lahman
fd944acc5b fix(fe): chat content links use proper hrefs (#6939) 2025-12-21 12:09:20 -08:00
Yuhong Sun
a1309257f5 Log (#6937) 2025-12-20 23:28:28 -08:00
Yuhong Sun
6266dc816d feat: Deep Research Citation Handling (#6935) 2025-12-20 22:46:20 -08:00
Jamison Lahman
83c011a9e4 chore(deps): upgrade urllib3 2.6.1->2.6.2 (#6932) 2025-12-20 20:21:10 -08:00
Yuhong Sun
8d1ac81d09 Citation Processing (#6933) 2025-12-20 20:08:24 -08:00
Yuhong Sun
d8cd4c9928 feat: DR fix a couple issues with saving (#6931) 2025-12-20 18:28:04 -08:00
Jamison Lahman
5caa4fdaa0 fix(chat): attached images are flush right (#6927) 2025-12-20 07:20:14 -08:00
Jamison Lahman
f22f33564b fix(fe): ensure error messages have padding (#6926) 2025-12-20 07:03:27 -08:00
Jamison Lahman
f86d282a47 chore(fe): ensure chat padding on medium size viewport (#6925) 2025-12-20 06:38:16 -08:00
Jamison Lahman
ece1edb80f fix(fe): make recent chat sidebar buttons links (#6924) 2025-12-20 06:04:59 -08:00
Jamison Lahman
c9c17e19f3 fix(chat): only scroll to bottom on page load (#6923) 2025-12-20 05:01:56 -08:00
Jamison Lahman
40e834e0b8 fix(fe): make "New Session" button a link (#6922) 2025-12-20 04:29:22 -08:00
Jamison Lahman
45bd82d031 fix(style): floating scroll down is z-sticky (#6921) 2025-12-20 04:12:48 -08:00
Yuhong Sun
27c1619c3d feat: hyperparams (#6920) 2025-12-19 20:32:00 -08:00
Yuhong Sun
8cfeb85c43 feat: Deep Research packets streaming done (#6919) 2025-12-19 20:23:02 -08:00
Yuhong Sun
491b550ebc feat: Deep Research more stuff (#6918) 2025-12-19 19:14:22 -08:00
Chris Weaver
1a94dfd113 fix: reasoning width (#6914) 2025-12-20 02:24:46 +00:00
Jamison Lahman
bcd9d7ae41 fix(install): handle non-semver docker-compose versions (#6913) 2025-12-19 18:17:44 -08:00
Vinit
98b4353632 fix: use consistent INSTALL_ROOT instead of pwd for deployment paths (#6680)
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2025-12-20 01:25:51 +00:00
Yuhong Sun
f071b280d4 feat: Deep Research packets (#6912) 2025-12-19 17:18:56 -08:00
acaprau
f7ebaa42fc feat(new vector db interface): Implement update for Vespa (#6790) 2025-12-20 00:56:23 +00:00
Justin Tahara
11737c2069 fix(vespa): Handling Rate Limits (#6878) 2025-12-20 00:52:11 +00:00
Jamison Lahman
1712253e5f fix(fe): Set up provider logos are equal size (#6900) 2025-12-20 00:50:31 +00:00
Yuhong Sun
de8f292fce feat: DR packets cont (#6910) 2025-12-19 16:47:03 -08:00
Jamison Lahman
bbe5058131 chore(mypy): "ragas.metrics" [import-not-found] (#6909) 2025-12-19 16:35:45 -08:00
Yuhong Sun
45fc5e3c97 chore: Tool interface (#6908) 2025-12-19 16:12:21 -08:00
Yuhong Sun
5c976815cc Mypy (#6906) 2025-12-19 15:50:30 -08:00
Justin Tahara
3ea4b6e6cc feat(desktop): Make Desktop App (#6690)
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2025-12-19 15:49:21 -08:00
Yuhong Sun
7b75c0049b chore: minor refactor (#6905) 2025-12-19 15:37:27 -08:00
Yuhong Sun
04bdce55f4 chore: Placement used in more places (#6904) 2025-12-19 15:07:48 -08:00
Yuhong Sun
2446b1898e chore: Test Manager class (#6903) 2025-12-19 14:58:55 -08:00
Yuhong Sun
6f22a2f656 chore: Update Packet structure to make the positioning info an object (#6899) 2025-12-19 14:12:39 -08:00
Justin Tahara
e307a84863 fix(agents): Fix User File Search (#6895) 2025-12-19 21:42:28 +00:00
Chris Weaver
2dd27f25cb feat: allow cmd+click on connector rows in admin panel (#6894) 2025-12-19 21:39:23 +00:00
Nikolas Garza
e402c0e3b4 fix: fix Icon React Compiler error in LLMPopover when searching models (#6891) 2025-12-19 21:16:41 +00:00
Jamison Lahman
2721c8582a chore(pre-commit): run uv-sync in active venv (#6898) 2025-12-19 13:44:00 -08:00
Yuhong Sun
43c8b7a712 feat: Deep Research substep initial (#6896) 2025-12-19 13:30:25 -08:00
acaprau
f473b85acd feat(new vector db interface): Plug in hybrid_retrieval for Vespa (#6752) 2025-12-19 21:03:19 +00:00
Nikolas Garza
02cd84c39a fix(slack): limit thread context fetch to top N messages by relevance (#6861) 2025-12-19 20:26:30 +00:00
Raunak Bhagat
46d17d6c64 fix: Fix header on AgentsNavigationPage (#6873) 2025-12-19 20:15:44 +00:00
Jamison Lahman
10ad536491 chore(mypy): enable warn-unused-ignores (#6893) 2025-12-19 12:00:30 -08:00
acaprau
ccabc1a7a7 feat(new vector db interface): Implement hybrid_retrieval for Vespa (#6750) 2025-12-19 19:32:48 +00:00
Chris Weaver
8e262e4da8 feat: make first runs be high priority (#6871) 2025-12-19 19:05:15 +00:00
Raunak Bhagat
79dea9d901 Revert "refactor: Consolidate chat and agents contexts" (#6872)
Co-authored-by: Nikolas Garza <90273783+nmgarza5@users.noreply.github.com>
2025-12-19 11:11:33 -08:00
Yuhong Sun
2f650bbef8 chore: Matplotlib for mypy (#6892) 2025-12-19 10:47:59 -08:00
Jamison Lahman
021e67ca71 chore(pre-commit): "Check lazy imports" prefers active venv (#6890) 2025-12-19 10:04:02 -08:00
roshan
87ae024280 fix icon button z-index (#6889) 2025-12-19 09:52:47 -08:00
SubashMohan
5092429557 Feat/tests GitHub perm sync (#6882) 2025-12-19 17:26:55 +00:00
Nikolas Garza
dc691199f5 fix: persist user-selected connector sources on follow-up messages (#6865) 2025-12-19 17:26:48 +00:00
Jamison Lahman
1662c391f0 fix(fe): chat attachment alignment regression (#6884) 2025-12-19 07:44:34 -08:00
Jamison Lahman
08aefbc115 fix(style): bottom message padding on small screen (#6883) 2025-12-19 06:50:43 -08:00
Jamison Lahman
fb6342daa9 fix(style): chat page is flush left on small screens (#6881) 2025-12-19 06:37:35 -08:00
Jamison Lahman
4e7adcc9ee chore(devtools): pass debug auth token with server-side requests (#6836) 2025-12-19 04:07:53 -08:00
Wenxi
aa4b3d8a24 fix(tests): add research agent tool to tool seeding test (#6877) 2025-12-18 23:09:18 -08:00
Wenxi
f3bc459b6e fix(anthropic): parse chat history tool calls correctly for anthropic models (#6876) 2025-12-18 22:28:34 -08:00
Yuhong Sun
87cab60b01 feat: Deep Research Tool (#6875) 2025-12-18 20:30:36 -08:00
Yuhong Sun
08ab73caf8 fix: Reasoning (#6874) 2025-12-18 19:00:13 -08:00
Justin Tahara
675761c81e fix(users): Clean up Invited Users who are Active (#6857) 2025-12-19 01:43:32 +00:00
Raunak Bhagat
18e15c6da6 refactor: Consolidate chat and agents contexts (#6834) 2025-12-19 01:31:02 +00:00
Yuhong Sun
e1f77e2e17 feat: Deep Research works till the end (#6870) 2025-12-18 17:18:08 -08:00
Justin Tahara
4ef388b2dc fix(tf): Instance Configurability (#6869) 2025-12-18 17:15:05 -08:00
Justin Tahara
031485232b fix(admin): Sidebar Scroll (#6853) 2025-12-19 00:39:27 +00:00
Wenxi
c0debefaf6 fix(bandaid): admin pages bottom padding (#6856) 2025-12-18 16:49:32 -08:00
Nikolas Garza
bbebe5f201 fix: reset actions popover to main menu on open (#6863) 2025-12-19 00:24:01 +00:00
Yuhong Sun
ac9cb22fee feat: deep research continued (#6864) 2025-12-18 15:51:13 -08:00
Wenxi
5e281ce2e6 refactor: unify mimetype and file extensions (#6849) 2025-12-18 23:08:26 +00:00
Chris Weaver
9ea5b7a424 chore: better cloud metrics (#6851) 2025-12-18 22:47:41 +00:00
Justin Tahara
e0b83fad4c fix(web): Avoiding Bot Detection issues (#6845) 2025-12-18 22:43:38 +00:00
Chris Weaver
7191b9010d fix: handle 401s in attachment fetching (#6858)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-12-18 14:52:05 -08:00
Yuhong Sun
fb3428ed37 feat: deep research more dev stuff (#6854) 2025-12-18 14:09:46 -08:00
Chris Weaver
444ad297da chore: remove fast model (#6841) 2025-12-18 20:38:13 +00:00
roshan
f46df421a7 fix: correct tool response pairing for parallel tool calls in llm_loop (#6846) 2025-12-18 11:46:34 -08:00
Yuhong Sun
98a2e12090 feat: DR continued work (#6848) 2025-12-18 11:36:34 -08:00
Jamison Lahman
36bfa8645e chore(gha): run playwright and jest similar to other tests (#6844) 2025-12-18 18:41:16 +00:00
roshan
56e71d7f6c fix: text view auto focus on button (#6843) 2025-12-18 10:18:43 -08:00
roshan
e0d172615b fix: TextView tooltip z-index (#6842) 2025-12-18 10:11:40 -08:00
Shahar Mazor
bde52b13d4 feat: add file management capabilities (#5623)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Wenxi <wenxi@onyx.app>
2025-12-18 17:40:24 +00:00
SubashMohan
b273d91512 feat(actions): add passthrough auth (#6665) 2025-12-18 10:58:52 +00:00
Jamison Lahman
1fbe76a607 fix(fe): center-align credential update icons (#6837) 2025-12-18 02:43:24 -08:00
Jamison Lahman
6ee7316130 fix(fe): avoid chat message shift on hover (#6835) 2025-12-17 23:44:09 -08:00
Raunak Bhagat
51802f46bb fix: Open sub menu on tool force (#6813) 2025-12-18 05:16:43 +00:00
Jamison Lahman
d430444424 fix(fe): apply z-sticky to ChatInput (#6827) 2025-12-17 21:04:34 -08:00
Yuhong Sun
17fff6c805 fix: reasoning with 5 series (#6833) 2025-12-17 20:16:48 -08:00
Yuhong Sun
a33f6e8416 fix: LLM can hallucinate tool calls (#6832) 2025-12-17 19:45:31 -08:00
Nikolas Garza
d157649069 fix(llm-popover): hide provider group when single provider (#6820) 2025-12-17 19:30:48 -08:00
Wenxi
77bbb9f7a7 fix: decrement litellm and openai broken versions (#6831) 2025-12-17 19:09:06 -08:00
Yuhong Sun
996b5177d9 feat: parallel tool calling (#6779)
Co-authored-by: rohoswagger <rohod04@gmail.com>
2025-12-17 18:59:34 -08:00
acaprau
ab9a3ba970 feat(new vector db interface): Plug in index for Vespa (#6659)
Co-authored-by: Yuhong Sun <yuhongsun96@gmail.com>
2025-12-18 01:42:08 +00:00
Yuhong Sun
87c1f0ab10 feat: more orchestrator stuff (#6826) 2025-12-17 17:12:22 -08:00
acaprau
dcea1d88e5 feat(new vector db interface): Implement index for Vespa (#6658)
Co-authored-by: Yuhong Sun <yuhongsun96@gmail.com>
2025-12-18 00:26:07 +00:00
Nikolas Garza
cc481e20d3 feat: ee license tracking - API Endpoints (#6812) 2025-12-18 00:24:01 +00:00
Nikolas Garza
4d141a8f68 feat: ee license tracking - DB and Cache Operations (#6811) 2025-12-17 23:53:28 +00:00
Wenxi
cb32c81d1b refactor(web search): use refreshed modal, improve ux, add playwright tests (#6791) 2025-12-17 15:24:47 -08:00
Nikolas Garza
64f327fdef feat: ee license tracking - Crypto Verification Utils (#6810) 2025-12-17 22:41:12 +00:00
Yuhong Sun
902d6112c3 feat: Deep Research orchestration start (#6825) 2025-12-17 14:53:25 -08:00
Jamison Lahman
f71e3b9151 chore(devtools): address hatch.version.raw-options review comment (#6823) 2025-12-17 14:52:06 -08:00
Nikolas Garza
dd7e1520c5 feat: ee license tracking - Data Plane Models + Database Schema (#6809) 2025-12-17 21:26:33 +00:00
Jamison Lahman
97553de299 chore(devtools): go onboarding docs + replace hatch-vcs w/ code script (#6819) 2025-12-17 13:27:43 -08:00
Justin Tahara
c80ab8b200 fix(jira): Handle Errors better (#6816) 2025-12-17 21:12:14 +00:00
Jamison Lahman
85c4ddce39 chore(frontend): optionally inject auth cookie into requests (#6794)
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2025-12-17 20:43:36 +00:00
Wenxi
1caa860f8e fix(file upload): properly convert and process files uploaded directly to chat (#6815)
Co-authored-by: _htz_ <100520465+1htz2@users.noreply.github.com>
2025-12-17 12:38:14 -08:00
trial-danswer
7181cc41af feat: adding support for SearXNG as an option for web search. It operates a… (#6653)
Co-authored-by: Weves <chrisweaver101@gmail.com>
2025-12-17 12:27:19 -08:00
Chris Weaver
959b8c320d fix: don't leave redis ports exposed (#6814) 2025-12-17 12:06:10 -08:00
roshan
96fd0432ff fix(tool): default tool descriptions assistant -> agent (#6788)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-12-17 19:12:17 +00:00
Jamison Lahman
4c73a03f57 chore(fe): followups to 7f79e34aa (#6808) 2025-12-17 18:36:31 +00:00
Raunak Bhagat
e57713e376 fix: Clean up DocumentsSidebar (#6805) 2025-12-17 09:00:14 -08:00
Jamison Lahman
21ea320323 fix(style): standardize projects page layout (#6807) 2025-12-17 01:11:09 -08:00
Jamison Lahman
bac9c48e53 fix(style): "More Agents" page is responsive (#6806) 2025-12-17 01:01:13 -08:00
roshan
7f79e34aa4 fix(projects): add special logic for internal search tool when no connectors available (#6774)
Co-authored-by: Yuhong Sun <yuhongsun96@gmail.com>
2025-12-17 06:45:03 +00:00
Jamison Lahman
f1a81d45a1 chore(fe): popover component uses z-index.css (#6804) 2025-12-16 23:07:31 -08:00
Jamison Lahman
285755a540 chore(pre-commit): fix uv.lock after filelock "upgrade" (#6803) 2025-12-16 22:16:19 -08:00
Justin Tahara
89003ad2d8 chore(tf): Update VPC calling (#6798) 2025-12-17 05:38:50 +00:00
Yuhong Sun
9f93f97259 feat(vectordb): New Document Index Interface (#5700) 2025-12-17 03:28:02 +00:00
Yuhong Sun
f702eebbe7 chore: some readme updates (#6802) 2025-12-16 19:53:23 -08:00
Yuhong Sun
8487e1856b feat: Deep Research first couple stages (#6801) 2025-12-16 19:40:54 -08:00
acaprau
a36445f840 fix(devtools): restart_containers.sh should source venv before running alembic (#6795) 2025-12-17 02:33:21 +00:00
roshan
7f30293b0e chore: improved error handling and display for agent failure types (#6784) 2025-12-17 02:30:24 +00:00
acaprau
619d9528b4 fix(devtools): CLAUDE.md.template makes reference to a venv that does not exist (#6796) 2025-12-17 02:29:47 +00:00
Yuhong Sun
6f83c669e7 feat: enable skip clarification (#6797) 2025-12-16 18:25:15 -08:00
Chris Weaver
c3e5f48cb4 fix: horrible typo in README (#6793) 2025-12-16 17:05:57 -08:00
Justin Tahara
fdf8fe391c fix(ui): Search Settings Active Only (#6657) 2025-12-16 17:00:06 -08:00
Raunak Bhagat
f1d6bb9e02 refactor: Transfer all icons to @opal/icons (#6755) 2025-12-17 00:16:44 +00:00
Justin Tahara
9a64a717dc fix(users): User Groups Race Condition (#6710) 2025-12-17 00:11:07 +00:00
Raunak Bhagat
aa0f475e01 refactor: Add new z-indexing file (#6789) 2025-12-16 23:56:13 +00:00
Nikolas Garza
75238dc353 fix: attach user credentials to assistant requests (#6785) 2025-12-16 23:15:31 +00:00
Nikolas Garza
9e19803244 chore: bump fallback max token limit to 32k (#6787) 2025-12-16 23:09:47 +00:00
dependabot[bot]
5cabd32638 chore(deps): Bump filelock from 3.15.4 to 3.20.1 in /backend/requirements (#6781)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-16 22:36:09 +00:00
Justin Tahara
4ccd88c331 fix(confluence): Skip attachments gracefully (#6769) 2025-12-16 22:34:16 +00:00
Justin Tahara
5a80b98320 feat(cleanup): No Bastion Setup (#6562) 2025-12-16 14:51:05 -08:00
Jamison Lahman
ff109d9f5c chore(style): fix chat page scrollbar after padding change (#6780) 2025-12-16 22:08:12 +00:00
Justin Tahara
4cc276aca9 fix(helm): Add Update Strategy (#6782) 2025-12-16 14:19:20 -08:00
Jamison Lahman
29f0df2c93 fix(style): increase tooltip z-index (#6778) 2025-12-16 21:30:19 +00:00
Nikolas Garza
e2edcf0e0b fix: improve ux for fed slack config error handling (#6699) 2025-12-16 21:23:11 +00:00
Chris Weaver
9396fc547d fix: confluence params (#6773) 2025-12-16 20:53:39 +00:00
Jamison Lahman
c089903aad fix: chat page overflow on small screens (#6723) 2025-12-16 13:03:07 -08:00
Chris Weaver
95471f64e9 fix: main chat page w/ overridden app name (#6775) 2025-12-16 12:56:15 -08:00
Jamison Lahman
13c1619d01 fix(style): center-ish align chat icon on small screen (#6727) 2025-12-16 20:10:09 +00:00
Justin Tahara
ddb5068847 fix(helm): Redis Operator Name (#6770) 2025-12-16 20:07:00 +00:00
Nikolas Garza
81a4f654c2 fix: scrollable container height for popover.tsx (#6772) 2025-12-16 20:04:33 +00:00
Jamison Lahman
9393c56a21 fix: remove unnecessary chat display tabindex (#6722) 2025-12-16 20:00:01 +00:00
Nikolas Garza
1ee96ff99c fix(llm): fix custom provider detection and model filtering (#6766) 2025-12-16 19:14:38 +00:00
Jamison Lahman
6bb00d2c6b chore(gha): run connector tests when uv.lock changes (#6768) 2025-12-16 18:44:06 +00:00
Wenxi
d9cc923c6a fix(hubspot): api client and urllib conflict (#6765) 2025-12-16 18:35:24 +00:00
Evan Lohn
bfbba0f036 chore: gpt 5.2 model naming (#6754) 2025-12-16 10:38:29 -08:00
Wenxi
ccf6911f97 chore: alembic readme nit (#6767) 2025-12-16 10:20:50 -08:00
Wenxi
15c9c2ba8e fix(llms): only save model configs for active/usable LLMs (#6758) 2025-12-16 17:54:47 +00:00
Wenxi
8b3fedf480 fix(web search): clamp google pse max results to api max (#6764) 2025-12-16 09:47:56 -08:00
Jamison Lahman
b8dc0749ee chore(tests): allow REDIS_CLOUD_PYTEST_PASSWORD to be empty (#6249) 2025-12-16 02:53:28 -08:00
Jamison Lahman
d6426458c6 chore(hygiene): rm unused secrets (#6762) 2025-12-16 02:29:56 -08:00
Jamison Lahman
941c4d6a54 chore(gha): use ods openapi in CI (#6761) 2025-12-16 02:04:42 -08:00
Jamison Lahman
653b65da66 chore(devtools): replace check_lazy_imports.py w/ ods check-lazy-imports (#6760) 2025-12-16 01:05:08 -08:00
Jamison Lahman
503e70be02 chore(deployment): fetch-depth: 0 for check-version-tag (#6759) 2025-12-15 23:51:37 -08:00
Nikolas Garza
9c19493160 fix: llm popover scroll (#6757) 2025-12-16 05:24:28 +00:00
Nikolas Garza
933315646b fix(llm): restore default models and filter obsolete/duplicate models from API (#6731) 2025-12-16 03:11:38 +00:00
Nikolas Garza
d2061f8a26 chore(ui): LLM popover improvements (#6742) 2025-12-15 19:36:00 -08:00
Jamison Lahman
6a98f0bf3c chore(devtools): ods openapi to generate schema and client (#6748) 2025-12-15 19:34:12 -08:00
Jamison Lahman
2f4d39d834 chore(devtools): ods check-lazy-imports (#6751) 2025-12-15 18:54:49 -08:00
Raunak Bhagat
40f8bcc6f8 refactor: Clean up message display (#6706) 2025-12-15 18:48:32 -08:00
Wenxi
af9ed73f00 fix(llms): reduce list of openai models (#6753) 2025-12-16 02:28:17 +00:00
acaprau
bf28041f4e feat(agents pagination): FE changes for pagination to the agents admin page (#6516)
Co-authored-by: Andrei <andrei@Andreis-MacBook-Pro.local>
2025-12-16 02:21:43 +00:00
Wenxi
395d5927b7 fix(llms): destructure fetched_model_configurations (#6749) 2025-12-16 01:33:16 +00:00
Jamison Lahman
c96f24e37c chore(deployment): run check-version-tag in debug mode (#6747) 2025-12-15 17:15:51 -08:00
Emerson Gomes
070519f823 Add LLM Session Tracking for Budget Control and Observability (#6564)
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
Co-authored-by: Wenxi Onyx <wenxi@onyx.app>
2025-12-15 23:45:25 +00:00
Jamison Lahman
a7dc1c0f3b chore(gha): remove duplicate check-lazy-imports (#6746) 2025-12-15 15:38:13 -08:00
Jamison Lahman
a947e44926 chore(gha): uv run openapi-generator-cli instead of docker (#6737) 2025-12-15 22:00:39 +00:00
Evan Lohn
a6575b6254 feat: allow updating embedding API key (#6707) 2025-12-15 19:21:05 +00:00
Wenxi
31733a9c7c fix(projects): don't disable internal search when no project files are uploaded (#6732) 2025-12-15 10:53:17 -08:00
dependabot[bot]
5415e2faf1 chore(deps): Bump actions/setup-node from 6.0.0 to 6.1.0 (#6735)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-15 18:34:29 +00:00
dependabot[bot]
749f720dfd chore(deps): Bump actions/checkout from 6.0.0 to 6.0.1 (#6734)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-15 18:32:05 +00:00
Wenxi
eac79cfdf2 chore: disable coda tests temporarily until we fully configure (#6733) 2025-12-15 10:19:28 -08:00
Chris Weaver
e3b1202731 fix: mypy (#6724) 2025-12-15 09:46:02 -08:00
Yuhong Sun
6df13cc2de feat: Handle repeat calls to internal search (#6728) 2025-12-14 23:59:35 -08:00
Yuhong Sun
682f660aa3 feat: Minor teachups on DR (#6726) 2025-12-14 23:00:30 -08:00
Yuhong Sun
c4670ea86c feat: Deep Research Clarification Stage (#6725) 2025-12-14 22:55:39 -08:00
ethan
a6757eb49f feat: add coda connector (#6558)
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2025-12-14 19:49:55 -08:00
Justin Tahara
cd372fb585 fix(asana): Cleaning up Errors (#6689) 2025-12-15 02:07:05 +00:00
Chris Weaver
45fa0d9b32 fix: package-lock.json (#6721) 2025-12-14 17:36:48 -08:00
Chris Weaver
45091f2ee2 fix: add darwin (#6634) 2025-12-14 17:14:16 -08:00
Chris Weaver
43a3cb89b9 fix: env vars for tests (#6720) 2025-12-14 16:37:06 -08:00
Chris Weaver
9428eaed8d fix: copying markdown tables into spreadsheets (#6717) 2025-12-14 23:01:07 +00:00
Chris Weaver
dd29d989ff chore: ignore plans dir (#6718) 2025-12-14 14:50:21 -08:00
Chris Weaver
f44daa2116 fix: remove bottom logo (#6716) 2025-12-14 22:09:27 +00:00
Justin Tahara
212cbcb683 fix(redis): Adding missing TTL's (#6708) 2025-12-13 02:15:09 +00:00
Justin Tahara
aaad573c3f feat(helm): Add Default Redis Configs (#6709) 2025-12-13 02:10:27 +00:00
Jamison Lahman
e1325e84ae chore(pre-commit): test selection w/ merge-group & postsubmits (#6705) 2025-12-13 00:08:39 +00:00
Evan Lohn
e759cdd4ab fix: mcp server name and desc updates (#6692) 2025-12-12 07:04:46 +00:00
Yuhong Sun
2ed6607e10 chore: Few frontend cleanup (#6700) 2025-12-11 19:47:51 -08:00
dependabot[bot]
ba5b9cf395 chore(deps): Bump next from 16.0.7 to 16.0.10 in /web (#6695)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-11 19:18:37 -08:00
Yuhong Sun
bab23f62b8 fix: Citation on replay bug (#6697) 2025-12-11 19:17:06 -08:00
Yuhong Sun
d72e2e4081 fix: Search tool reasoning level (#6696) 2025-12-11 18:28:01 -08:00
Raunak Bhagat
4ed2d08336 fix: Fix custom-agent-avatar-invocation (#6644) 2025-12-11 16:20:39 -08:00
Yuhong Sun
24a0ceee18 chore: fix llm interface (#6691) 2025-12-11 15:44:44 -08:00
Jamison Lahman
d8fba38780 chore(gha): replace pre-commit with prek (#6684)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-12-10 17:23:08 -08:00
Justin Tahara
5f358a1e20 fix(users): Add Race Condition Handling (#6639) 2025-12-09 07:43:47 -10:00
Evan Lohn
00b0c23e13 fix(web): handle br encoding of sitemap (#6647) 2025-12-09 04:03:56 +00:00
Chris Weaver
2103ed9e81 fix: tag race condition (#6674) 2025-12-08 17:01:07 -10:00
Chris Weaver
2c5ab72312 chore: only pause after repeated failure on cloud (#6673) 2025-12-08 16:44:13 -10:00
roshan
672d1ca8fa fix: toast for non-admin onboarding flow (#6651) 2025-12-07 00:48:18 +00:00
Jamison Lahman
a418de4287 chore(devtools): upgrade onyx-devtools 0.0.3->0.1.0 (#6663) 2025-12-06 10:48:46 -08:00
Jamison Lahman
349aba6c02 chore(devtools): upgrade onyx-devtools 0.0.2->0.0.3 (#6662) 2025-12-06 10:10:02 -08:00
Jamison Lahman
18a7bdc292 chore(devtools): ods db operations (#6661)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-12-06 09:53:25 -08:00
Raunak Bhagat
c658fd4c7d refactor: Modal cleanup (#6614) 2025-12-05 19:40:30 -08:00
Yuhong Sun
f1e87dda5b chore: LLM step to give packets that can be modified as needed (#6641) 2025-12-05 17:12:55 -08:00
roshan
b93edb3e89 feat: standardize placeholders in default system prompt (#6643) 2025-12-06 00:13:36 +00:00
Jamison Lahman
dc4e76bd64 chore(vscode): migrate install python reqs to uv (#6654) 2025-12-05 16:15:58 -08:00
Justin Tahara
c4242ad17a fix(ui): Normalize Emails (#6636) 2025-12-05 23:26:06 +00:00
roshan
a4dee62660 fix: add reciprocal rank score for web search docs based on ordering (#6625) 2025-12-05 22:53:07 +00:00
Nikolas Garza
2d2c76ec7b feat(llm): fetch dynamic provider models directly from source APIs (#6619) 2025-12-05 22:22:56 +00:00
dependabot[bot]
d80025138d chore(deps): Bump urllib3 from 2.5.0 to 2.6.0 in /backend/requirements (#6638)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-05 21:28:41 +00:00
Evan Lohn
90ec595936 fix: hitting endpoint with undefined persona (#6637) 2025-12-05 20:33:00 +00:00
Jamison Lahman
f30e88a61b chore(dev): make "dev" an optional-dependency (#6640) 2025-12-05 10:51:23 -08:00
roshan
9c04e9269f feat: add standard for default tools -> make openURL a default tool (#6581)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Wenxi Onyx <wenxi@onyx.app>
Co-authored-by: Raunak Bhagat <r@rabh.io>
Co-authored-by: Nikolas Garza <90273783+nmgarza5@users.noreply.github.com>
Co-authored-by: Yuhong Sun <yuhongsun96@gmail.com>
Co-authored-by: SubashMohan <subashmohan75@gmail.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
Co-authored-by: Justin Tahara <105671973+justin-tahara@users.noreply.github.com>
Co-authored-by: Roshan Desai <rohoswagger@rohoswagger-onyx.local>
Co-authored-by: acaprau <48705707+acaprau@users.noreply.github.com>
Co-authored-by: Andrei <andrei@Andreis-MacBook-Pro.local>
2025-12-05 18:02:50 +00:00
Jamison Lahman
8c65fcd193 chore(devtools): simplify compile requirements (#6630) 2025-12-05 10:29:07 -08:00
Jamison Lahman
f42e3eb823 chore(docs): prefer uv over pip (#6628) 2025-12-05 10:28:03 -08:00
Yuhong Sun
9b76ed085c feat: deep research prompts (#6635) 2025-12-05 10:15:37 -08:00
Chris Weaver
0eb4d039ae fix: only re-index active connectors (#6631) 2025-12-05 17:52:44 +00:00
Justin Tahara
3c0b66a174 fix(sharepoint): Shared link fix (#6607) 2025-12-05 17:35:10 +00:00
Chris Weaver
895a8e774e fix: add default-groups=all (#6632) 2025-12-05 17:30:06 +00:00
SubashMohan
c14ea4dbb9 refactor(actions): improved flexibility of actioncard and few ui changes (#6597) 2025-12-05 07:06:33 +00:00
Wenxi
80b1e07586 fix(llms): sanitize vision providers (#6624) 2025-12-05 03:03:21 +00:00
Nikolas Garza
59b243d585 chore(slack): add better typing + move some logs from debug to info (#6613)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-12-05 03:02:28 +00:00
Jamison Lahman
d4ae3d1cb5 chore(devtools): upgrade onyx-devtools 0.0.1->0.0.2 (#6623) 2025-12-04 18:36:21 -08:00
Jamison Lahman
ed0a86c681 chore(deps): make backend/ a uv workspace (#6460) 2025-12-04 18:30:04 -08:00
dependabot[bot]
e825e5732f chore(deps): Bump aiohttp from 3.12.14 to 3.13.2 in /backend (#6406)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-04 18:28:05 -08:00
dependabot[bot]
a93854ae70 chore(deps): Bump sendgrid from 6.11.0 to 6.12.5 in /backend (#6408)
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2025-12-05 02:04:24 +00:00
Yuhong Sun
fc8767a04f chore: delete unused code (#6622) 2025-12-04 17:50:17 -08:00
Jamison Lahman
6c231e7ad1 chore(devtools): QOL improvements for cherry-pick script (#6620) 2025-12-04 17:27:27 -08:00
Wenxi
bac751d4a9 feat(helm): add mcp server (#6586) 2025-12-05 00:57:05 +00:00
Jessica Singh
3e0f386d5b fix(web search ui): make font sizes consistent (#6606) 2025-12-05 00:09:21 +00:00
Chris Weaver
edb6957268 fix: litellm w/ azure reasoning mode (#6612) 2025-12-04 23:49:55 +00:00
Jamison Lahman
0348d11fb2 chore(mypy): type-check tools/ (#6615)
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2025-12-04 23:44:34 +00:00
Wenxi
fe514eada0 fix(docs): update admin docs links (#6611) 2025-12-04 23:05:09 +00:00
acaprau
e7672b89bb feat(agents admin page): Make display priority adjustments PATCH instead of PUT, allowing granular edits + small cleanups (#6565)
Co-authored-by: Andrei <andrei@Andreis-MacBook-Pro.local>
2025-12-04 22:27:04 +00:00
Nikolas Garza
c1494660e1 fix: slack bot fixes for channel filtering, spammy logs, and fed slack searching (#6588) 2025-12-04 21:35:48 +00:00
roshan
7ee3df6b92 fix: frontend continues shimmering when tool call stopped partway (#6544)
Co-authored-by: Roshan Desai <rohoswagger@rohoswagger-onyx.local>
2025-12-04 20:46:46 +00:00
Wenxi
54afed0d23 fix(api): limit ingestion api to curators and admins (#6608) 2025-12-04 20:43:49 +00:00
Justin Tahara
1c776fcc73 fix(persona): Fix sorting logic (#6602) 2025-12-04 11:30:32 -08:00
Jamison Lahman
340ddce294 chore(mypy): un-ignore braintrust missing import (#6603) 2025-12-04 11:30:05 -08:00
Nikolas Garza
e166c1b095 chore: bump react version for sec vuln (#6600) 2025-12-04 17:04:30 +00:00
SubashMohan
84be68ef7c refactor(MCP): mcp backend and schema (#6475) 2025-12-04 08:24:44 +00:00
Yuhong Sun
90e9af82bf chore: Cleanup chat turn and prompts (#6589) 2025-12-03 23:46:06 -08:00
Raunak Bhagat
7f36fb2a4c refactor: Refresh "Agent Icon" to the new "Agent Avatar" standard (#6509) 2025-12-03 21:18:54 -08:00
Nikolas Garza
307464a736 feat: surface better model names in the ui/chat bar (#6514) 2025-12-04 04:51:38 +00:00
Raunak Bhagat
1d5c8bdb20 refactor: Icon cleanup (#6573) 2025-12-04 04:16:40 +00:00
dependabot[bot]
6de626ecc3 chore(deps): Bump next from 16.0.1 to 16.0.7 in /web (#6563)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Wenxi Onyx <wenxi@onyx.app>
2025-12-03 19:52:05 -08:00
roshan
6663c81aa6 fix: use absolute path for icon imports (#6585) 2025-12-04 02:29:10 +00:00
dependabot[bot]
35ca94c17e chore(deps): Bump werkzeug from 3.1.1 to 3.1.4 in /backend/requirements (#6521)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2025-12-03 18:08:21 -08:00
Jamison Lahman
431f652be8 chore(pre-commit): upgrade some hooks to latest (#6583) 2025-12-03 18:07:00 -08:00
Yuhong Sun
6535d85ceb chore: Prompt builder update (#6582) 2025-12-03 17:30:17 -08:00
Chris Weaver
3a349d6ab3 fix: jira attribute error (#6584) 2025-12-03 17:26:21 -08:00
Chris Weaver
ddae686dc7 fix: workaround for bugged Confluence API (#6311) 2025-12-04 01:03:51 +00:00
roshan
0e42891cbf fix: install node dependencies for quality-checks pre-commit hook (#6580) 2025-12-04 00:38:43 +00:00
Chris Weaver
823b28b4a7 fix: improve jira perm sync handling (#6575) 2025-12-03 23:45:34 +00:00
Jamison Lahman
828036ceb8 chore(devtools): introduce the Onyx Developer Script, ods (#6559) 2025-12-03 23:45:09 +00:00
Wenxi
2a40ceab26 refactor(API): replace redundant api key dep from ingestion endpoints (#6568) 2025-12-03 23:39:27 +00:00
Yuhong Sun
f03f2bff78 chore: continue cleanup of dead files (#6579) 2025-12-03 15:46:44 -08:00
Raunak Bhagat
f9a548fbe9 refactor: Input styles (#6571) 2025-12-03 22:31:45 +00:00
Wenxi
8b45f911ff refactor(openapi generation): generate python client with openapi generation script for one click integration test setup (#6574) 2025-12-03 21:47:20 +00:00
Yuhong Sun
ae64ded7bb Removing LangGraph code (#6578) 2025-12-03 14:07:18 -08:00
Jamison Lahman
7287e3490d chore(pre-commit): disable mypy hook (#6576) 2025-12-03 13:57:00 -08:00
Yuhong Sun
7681c11585 chore: Removing Retrievaldoc (#6577) 2025-12-03 13:49:22 -08:00
Richard Guan
365e31a7f3 chore(tool): call output fix (#6572) 2025-12-03 21:28:06 +00:00
Nikolas Garza
dd33886946 chore: add fe type checking to pre-commit hooks (#6569) 2025-12-03 20:29:08 +00:00
Raunak Bhagat
6cdd5b7d3e fix: Fix failing type checks in message feedback tests (#6567)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-12-03 12:47:45 -08:00
Yuhong Sun
7b6ae2b72a chore: Cleanup PreviousMessage class (#6570) 2025-12-03 12:37:02 -08:00
Yuhong Sun
629502ef6a fix: Basic Reenabling Code Interpreter (#6566) 2025-12-03 11:50:11 -08:00
Yuhong Sun
927e8addb5 fix: Reasoning Block Linebreaks (#6552) 2025-12-03 18:28:26 +00:00
Evan Lohn
14712af431 fix: expand special casing around sharepoint shared drives (#6539) 2025-12-03 18:12:19 +00:00
Richard Guan
4b38b91674 chore(framework): cleanup (#6538) 2025-12-03 18:01:11 +00:00
Emerson Gomes
508c248032 fix: prevent heartbeat timeout state pollution in validation loop (#5782)
Co-authored-by: Claude <noreply@anthropic.com>
2025-12-03 10:08:53 -08:00
Emerson Gomes
45db59eab1 db: remove duplicate chunk_stats deletion in delete_documents_complete__no_commit (#5792) 2025-12-03 10:02:57 -08:00
Yuhong Sun
5a14055a29 feat: Some UI enhancements for tools (#6550)
Co-authored-by: SubashMohan <subashmohan75@gmail.com>
2025-12-03 16:42:49 +00:00
Nikolas Garza
a698f01cab feat: add model metadata enrichments for LiteLLM (#6541)
Co-authored-by: Justin Tahara <105671973+justin-tahara@users.noreply.github.com>
2025-12-03 06:18:02 +00:00
Jamison Lahman
4e4bf197cf chore(gha): docker cache from HEAD (#6549) 2025-12-03 03:57:07 +00:00
dependabot[bot]
517b0d1e70 chore(deps): Bump mcp from 1.19.0 to 1.23.0 in /backend/requirements (#6526)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2025-12-03 02:55:16 +00:00
Yuhong Sun
7b2b163d4e chore: Removes the translation layer for the new backend packets (#6546) 2025-12-03 02:40:55 +00:00
Jamison Lahman
29b28c8352 chore(deployment): run tests on tag push (#6543) 2025-12-03 01:49:21 +00:00
Jamison Lahman
83b624b658 chore(gha): /uv pip install/uv run --with/ (#6545) 2025-12-02 17:48:22 -08:00
Jamison Lahman
d3cd68014a chore(gha): persist docker cache intra-PR builds (#6524) 2025-12-03 01:14:10 +00:00
Jamison Lahman
64d9fd97ec chore(zizmor): upgrade and track verison via pyproject (#6542) 2025-12-02 17:12:10 -08:00
Jamison Lahman
7a9e2ebec6 chore(deployment): check if tagged correctly (#6537) 2025-12-03 00:39:57 +00:00
Richard Guan
51a69d7e55 chore(tracing): add tracing to new backend (#6532) 2025-12-02 22:38:23 +00:00
Nikolas Garza
f19362ce27 fix: eager load persona in slack channel config (#6535) 2025-12-02 22:13:24 +00:00
Justin Tahara
0c3330c105 chore(test): Playwright for User Feedback (#6534) 2025-12-02 21:14:12 +00:00
きわみざむらい
81cb0f2518 fix: Add proper DISABLE_MODEL_SERVER environment variable support (#6468)
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2025-12-02 21:11:09 +00:00
Chris Weaver
beb4e619e7 feat: move to client side rendering + incremental loading (#6464)
Co-authored-by: Claude <noreply@anthropic.com>
2025-12-02 12:30:43 -08:00
Yuhong Sun
0fa1d5b0ca Update search_tool.py description (#6531) 2025-12-02 11:08:36 -08:00
Yuhong Sun
1e30882222 Update README.md (#6530) 2025-12-02 11:07:19 -08:00
Yuhong Sun
42996a63fe README for DB Models (#6529) 2025-12-02 11:00:48 -08:00
Yuhong Sun
4a38068192 Knowledge for future (#6528) 2025-12-02 10:48:49 -08:00
Emerson Gomes
97f66b68c1 Harden markdown link protocol handling (#6517) 2025-12-02 17:49:44 +00:00
Wenxi
aeafd83cd1 fix(migration): new chat history downgrade (#6527) 2025-12-02 17:47:33 +00:00
Justin Tahara
0ba9a873e9 feat(pginto): Support IAM Auth (#6520) 2025-12-01 22:40:09 -06:00
Justin Tahara
b72bac993f feat(helm): PGInto Workflow (#6519) 2025-12-01 21:54:06 -06:00
Yuhong Sun
9572c63089 Fix Alembic Downgrade just in case (#6515) 2025-12-01 18:01:38 -08:00
Nikolas Garza
c4505cdb06 chore: remove fed slack entities button on doc set edit page (#6385) 2025-12-02 01:26:30 +00:00
Jamison Lahman
9055691c38 chore(docker): explicitly default env to empty string (#6511) 2025-12-02 01:25:39 +00:00
Raunak Bhagat
1afa7b0689 fix: Edit separator (#6513) 2025-12-01 17:15:23 -08:00
Evan Lohn
72c96a502e feat: mcp pass through oauth (#6469) 2025-12-02 00:35:08 +00:00
acaprau
093b399472 feat(persona): Add GET paginated personas to REST API (#6448)
Co-authored-by: Andrei <andrei@Andreis-MacBook-Pro.local>
2025-12-02 00:14:47 +00:00
Jamison Lahman
d89dd3c76b chore(gha): remove duplicate python checks (#6510) 2025-12-01 16:19:15 -08:00
dependabot[bot]
a24d0aa26d chore(deps): Bump actions/upload-artifact from 4 to 5 (#6502)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2025-12-01 23:37:28 +00:00
dependabot[bot]
5e581c2c60 chore(deps): Bump actions/setup-python from 6.0.0 to 6.1.0 (#6501)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-12-01 23:36:43 +00:00
dependabot[bot]
17ea20ef5c chore(deps): Bump astral-sh/setup-uv from 3.2.4 to 7.1.4 (#6503)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-12-01 23:32:06 +00:00
Justin Tahara
0b8207ef4c fix(feedback): API Endpoint fix (#6500) 2025-12-01 17:28:32 -06:00
Yuhong Sun
c26da8dc75 feat: Updated Processing for Context to the LLM (#6485)
Co-authored-by: Vega <33913017+weijia619@users.noreply.github.com>
2025-12-01 14:41:13 -08:00
Richard Guan
f1f3cd392c chore(fix): langfuse tracing (#6505) 2025-12-01 21:54:53 +00:00
Justin Tahara
36e31e9ffa fix(helm): Code Interpreter Chart release (#6506) 2025-12-01 15:41:20 -06:00
Chris Weaver
f57c12cdaa feat: add code-interpreter to helm chart (#6489) 2025-12-01 13:37:56 -08:00
brano-rohlik
514c76c3ea feat(vertex-ai): add Gemini 3 Pro and Claude Opus 4.5 models (#6481) 2025-12-01 09:37:52 -08:00
Chris Weaver
921e82b839 fix: code interpreter UI (#6498) 2025-12-01 09:36:39 -08:00
Chris Weaver
6b7c6c9a37 fix: icon coloring in Renderer (#6491) 2025-11-30 18:53:26 -08:00
SubashMohan
53ae1b598b fix(WebSearch): adjust Separator styling for improved layout consistency (#6487) 2025-11-30 11:45:37 +05:30
SubashMohan
83e756bf05 fix(Projects): file ordering in project panel (#6334) 2025-11-29 19:40:01 +00:00
Jamison Lahman
19b485cffd chore(deps): upgrade supervisor 4.2.5->4.3.0 (#6466) 2025-11-26 18:38:13 -05:00
Jamison Lahman
f5a99053ac chore(deps): upgrade dropbox 11.36.2->12.0.2 (#6467) 2025-11-26 18:10:13 -05:00
Chris Weaver
91f0377dd5 chore: enable code interpreter tests (#6404) 2025-11-26 14:55:07 -08:00
Jamison Lahman
25522dfbb8 chore(gha): setup-python accepts requirements to install (#6463) 2025-11-26 17:27:30 -05:00
Jamison Lahman
b0e124ec89 chore(deps): upgrade pytest-asyncio 0.22.0->1.3.0 (#6461) 2025-11-26 16:39:52 -05:00
Raunak Bhagat
b699a65384 refactor: Edit Modal.Header to be more concise and adherent to mocks (#6452) 2025-11-26 13:17:51 -08:00
Jamison Lahman
cc82d6e506 chore(deps): remove non-dev packages (#6462) 2025-11-26 16:17:01 -05:00
Jamison Lahman
8a6db7474d chore(gha): assert GHA jobs have timeouts (#6455) 2025-11-26 18:14:23 +00:00
Jamison Lahman
fd9aea212b chore(dev): run mypy and uv-sync on pre-commit (#6454) 2025-11-26 17:24:28 +00:00
acaprau
4aed383e49 chore(logs): When final doc for context pruning gets pruned, that prob doesn't need to be an error (#6451)
Co-authored-by: Andrei <andrei@Andreis-MacBook-Pro.local>
2025-11-25 22:41:47 -08:00
Justin Tahara
d0ce313b1a fix(google): Fix embedding scopes (#6450) 2025-11-25 22:10:42 -06:00
Jamison Lahman
4d32c9f5e0 chore(python): use uv to manage and compile requirements (#6291) 2025-11-26 03:01:52 +00:00
Justin Tahara
158fe31b71 fix(azure): Normalizing Azure Target URIs (#6443) 2025-11-26 00:19:22 +00:00
Raunak Bhagat
97cddc1dd4 fix: Line item cleanup (#6444)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-11-25 16:11:16 -08:00
Chris Weaver
c520a4ec17 fix: spinner during CSV load (#6441)
Co-authored-by: SubashMohan <subashmohan75@gmail.com>
2025-11-25 22:01:55 +00:00
Raunak Bhagat
9c1f8cc98c refactor: Line item cleanup (#6434) 2025-11-25 13:51:17 -08:00
Justin Tahara
58ba8cc68a chore(langfuse): Remove Env Var (#6440) 2025-11-25 15:32:15 -06:00
Evan Lohn
a307b0d366 fix: use raw mcp url (#6432) 2025-11-25 21:10:03 +00:00
Wenxi
e34f58e994 refactor(tests): use PATManager for tests that use PATs (#6438) 2025-11-25 15:39:49 -05:00
Justin Tahara
7f6dd2dc93 feat(api): Add Users to Group Endpoint (#6427) 2025-11-25 20:12:20 +00:00
Wenxi
ef3daa58b3 feat(claude): update claude models (#6433) 2025-11-25 14:24:47 -05:00
Raunak Bhagat
972c33046e fix: Responsiveness flash fix (#6422) 2025-11-25 10:27:41 -08:00
Jamison Lahman
802248c4e4 chore(python): update stale external type stubs (#6429) 2025-11-25 17:46:12 +00:00
Justin Tahara
f359c44183 fix(gemini): Migrate from Vertex AI to Gemini (#6424) 2025-11-25 17:16:55 +00:00
Jamison Lahman
bab2220091 chore(db): onyx_list_tenants.py --csv -n [count] (#6425) 2025-11-25 17:01:35 +00:00
Wenxi
bc35354ced feat(MCP): basic Onyx MCP server with search tools and indexed sources resource (#6309) 2025-11-25 02:30:55 +00:00
Jamison Lahman
742dd23fdd chore(deps): upgrade psutil: 5.9.8->7.1.3 (#6300) 2025-11-25 01:23:31 +00:00
Richard Guan
ea5690db81 chore(hotfix): tool choice bug (#6417) 2025-11-24 14:41:57 -08:00
Justin Tahara
853ca635d2 feat(helm): Add Deployment Labels (#6421) 2025-11-24 16:25:15 -06:00
Wenxi
c4d2fc9492 feat(API): make EE query APIs CE (#6411) 2025-11-24 21:15:52 +00:00
Justin Tahara
7aa12c0a36 feat(claude): Adding Opus 4.5 (#6415) 2025-11-24 15:27:58 -06:00
Wenxi
e74cf14401 chore(docker): make container startup checks case-less-sensitive (#6412) 2025-11-24 20:48:23 +00:00
Justin Tahara
75c42ffa9d feat(claude): Add Gov Cloud Names (#6414) 2025-11-24 20:40:23 +00:00
dependabot[bot]
d6fbb7affd chore(deps): Bump actions/checkout from 4.3.0 to 6.0.0 (#6410)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
2025-11-24 18:57:28 +00:00
dependabot[bot]
75cee70bbb chore(deps): Bump actions/stale from 9.1.0 to 10.1.0 (#6409)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-11-24 18:26:02 +00:00
Jamison Lahman
1c8b819aa2 chore(gha): fix zizmor for .github/actions/ (#6399) 2025-11-24 17:11:43 +00:00
Nikolas Garza
b7cf33a4cc fix: prevent slack bot from always using fed slack connector (#6400) 2025-11-24 02:37:28 +00:00
Nikolas Garza
b06459f674 chore: unify checkbox implementation across the frontend (#6345) 2025-11-23 21:18:56 +00:00
Raunak Bhagat
920db6b3c2 fix: Input select state-rendering fix (#6402) 2025-11-23 12:32:33 -08:00
Raunak Bhagat
b7e4b65a74 refactor: Consolidate hover, active, and focus styles (#6397) 2025-11-23 19:09:40 +00:00
Raunak Bhagat
e648e0f725 fix: Fix non-persistence issue with input-select (#6398) 2025-11-23 10:24:12 -08:00
SubashMohan
c8a3368fce fix(projects): Add Create New Project option in chat move menu (#6353) 2025-11-23 10:17:05 +00:00
SubashMohan
f74b02ad9e feat(UserFilesModal): add file count divider and conditional rendering (#6379) 2025-11-23 15:47:59 +05:30
SubashMohan
65b59c4a73 feat(projects): Improved Folder Icon Animation (#6354) 2025-11-23 09:46:25 +00:00
Raunak Bhagat
b74bcd0efc refactor: Separator cleanup (#6396) 2025-11-22 20:39:36 -08:00
Raunak Bhagat
8c133b3853 refactor: Input select (#6290)
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2025-11-23 02:00:20 +00:00
Raunak Bhagat
67554cef96 refactor: Input text area cleanup (#6395) 2025-11-22 17:37:54 -08:00
Jamison Lahman
07e03f3677 fix(docker): chown /app directory (#6390) 2025-11-22 07:29:58 +00:00
Chris Weaver
33fee46d71 feat: code interpreter (python) (#6136) 2025-11-22 07:19:25 +00:00
Jamison Lahman
72f5e3d38f chore(dev): docker-compose.multitenant-dev respects HOST_PORT (#6388) 2025-11-22 07:11:43 +00:00
Jamison Lahman
f89380ad87 chore(gha): increase playwright runner volume size: 40->50gb (#6392) 2025-11-21 21:48:35 -08:00
Raunak Bhagat
e6f00098f2 refactor: (light) Refresh of the "Shared Chat Display" (#6387)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-11-21 20:58:46 -08:00
Evan Lohn
9100afa594 feat: Allow attaching mcp tools to default assistant (#6343) 2025-11-21 17:29:34 -08:00
Raunak Bhagat
93d2febf2a fix: Update buttons and stylings for new-team-modal (#6384) 2025-11-21 21:26:51 +00:00
Raunak Bhagat
693286411a feat: Responsiveness (#6383)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-11-21 21:01:27 +00:00
Justin Tahara
01a3064ca3 fix(testrail): Linting (#6382) 2025-11-21 10:50:08 -08:00
sashank-rayapudi-ai
09a80265ee feat(testrail): Implement a read-only custom connector for Testrail (#6084) 2025-11-21 10:16:40 -08:00
Wenxi
2a77481c1e test(onboarding): add playwright test for onboarding flow (#6376) 2025-11-21 12:23:37 -05:00
Jamison Lahman
6838487689 chore(deployments): separate flag for model-server, enable nightly (#6377) 2025-11-21 04:29:41 +00:00
Jamison Lahman
1713c24080 chore(docker): breakup model-server model layers (#6370) 2025-11-21 03:47:47 +00:00
Chris Weaver
73b3a2525a fix: chat switching (#6374) 2025-11-20 18:32:54 -08:00
Wenxi
59738d9243 feat: cross link cookies (#6371) 2025-11-21 02:03:52 +00:00
Wenxi
c0ff9c623b feat(APIs): web search apis and indexed sources api (#6363) 2025-11-20 20:23:06 -05:00
Jessica Singh
c03979209a fix(ui): icon alignment + color (#6373) 2025-11-20 17:16:10 -08:00
Justin Tahara
a0b7639693 fix(connectors): Normalizing Onyx Metatada Connector Type (#6315) 2025-11-21 00:46:45 +00:00
Raunak Bhagat
e3ede3c186 fix: Sidebar fixes (#6358) 2025-11-21 00:35:31 +00:00
Jessica Singh
092dbebdf2 fix(migration): exa env var into db (#6366) 2025-11-21 00:12:09 +00:00
Justin Tahara
838e2fe924 chore(bedrock): Add better logging (#6368) 2025-11-20 23:38:19 +00:00
Chris Weaver
48e2bfa3eb chore: prevent sentry spam on fake issue (#6369) 2025-11-20 22:47:30 +00:00
Jamison Lahman
2a004ad257 chore(deployments): fix nightly tagging + add alerts & workflow_dispatch (#6367) 2025-11-20 21:55:24 +00:00
Wenxi
416c7fd75e chore(WebSearch): remove old web search env vars and update tooltip (#6365)
Co-authored-by: justin-tahara <justintahara@gmail.com>
2025-11-20 21:09:24 +00:00
Justin Tahara
a4372b461f feat(helm): Add Tolerations and Affinity (#6362) 2025-11-20 20:25:20 +00:00
mristau-alltrails
7eb13db6d9 SECURITY FIX: CVE-2023-38545 and CVE-2023-38546 (#6356) 2025-11-20 20:11:35 +00:00
Justin Tahara
c0075d5f59 fix(docprocessing): Pause Failing Connectors (#6350) 2025-11-20 19:14:56 +00:00
Wenxi
475a3afe56 fix(connector): handle hubspot ticket with None content (#6357) 2025-11-20 13:35:46 -05:00
SubashMohan
bf5b8e7bae fix(Project): project pending issues (#6099) 2025-11-20 17:53:08 +00:00
Jamison Lahman
4ff28c897b chore(dev): nginx container port 80 respects HOST_PORT_80 (#6338) 2025-11-20 17:48:10 +00:00
SubashMohan
ec9e9be42e Fix/user file modal (#6333) 2025-11-20 16:41:38 +00:00
Nikolas Garza
af5fa8fe54 fix: web search and image generation tool playwright test failures (#6347) 2025-11-20 07:13:05 +00:00
Jamison Lahman
03a9e9e068 chore(gha): playwright browser cache is arch-aware (#6351) 2025-11-20 03:28:53 +00:00
Richard Guan
ad81c3f9eb chore(tracing): updates (#6322) 2025-11-20 00:58:00 +00:00
Jamison Lahman
62129f4ab9 chore(gha): require playwright passing on merge (#6346) 2025-11-20 00:55:19 +00:00
Jamison Lahman
b30d38c747 chore(gha): fix zizmor issues (#6344) 2025-11-19 23:57:34 +00:00
Nikolas Garza
0596b57501 fix: featured assistant typo (#6341) 2025-11-19 14:44:54 -08:00
Jamison Lahman
482b2c4204 chore(gha): run uvx zizmor --fix=all (#6342) 2025-11-19 14:26:45 -08:00
Jamison Lahman
df155835b1 chore(docker): docker bake UX (#6339)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-11-19 14:19:53 -08:00
Richard Guan
fd0762a1ee chore(agent): framework query improvements (#6297) 2025-11-19 21:43:33 +00:00
Jamison Lahman
bd41618dd9 chore(deployments): correctly set --debug for docker build (#6337) 2025-11-19 11:04:15 -08:00
Justin Tahara
5a7c6312af feat(jwt): JIT provision from token (#6252) 2025-11-19 10:06:20 -08:00
Raunak Bhagat
a477508bd7 fix: Fix header flashing (#6331)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-11-19 09:27:49 -08:00
Raunak Bhagat
8ac34a8433 refactor: input type in fixes (#6335) 2025-11-19 08:31:39 -08:00
Raunak Bhagat
2c51466bc3 fix: Some minor touch-ups for the new modal (#6332) 2025-11-19 14:03:15 +00:00
Raunak Bhagat
62966bd172 fix: Switch fix (#6279) 2025-11-19 01:40:40 -08:00
Jamison Lahman
a8d4482b59 chore(deployments): set provenance=false and flag debug (#6330) 2025-11-18 22:26:53 -08:00
Jamison Lahman
dd42a45008 chore(deployments): flag to disable docker caching (#6328) 2025-11-19 04:07:07 +00:00
Jessica Singh
a368556282 feat(web search providers): adding support and changing env var approach (#6273) 2025-11-19 02:49:54 +00:00
Evan Lohn
679d1a5ef6 fix: openpyxl bug (#6317) 2025-11-19 00:59:46 +00:00
Nikolas Garza
12e49cd661 fix: slack config forms + scope issues (#6318) 2025-11-18 16:49:16 -08:00
Jamison Lahman
1859a0ad79 chore(gha): run zizmor (#6326) 2025-11-18 16:10:07 -08:00
Jamison Lahman
9199d146be fix(tests): test_partial_match_in_model_map AssertionError (#6321) 2025-11-18 16:06:01 -08:00
Jamison Lahman
9c1208ffd6 chore(deployments): separate builds by platform (#6314) 2025-11-18 14:49:23 -08:00
Jamison Lahman
c3387e33eb chore(deployments): remove DEPLOYMENT from cache path (#6319) 2025-11-18 14:16:09 -08:00
Jamison Lahman
c37f633a37 chore(deployments): remove driver-opts from model-server build (#6313) 2025-11-18 10:45:24 -08:00
Justin Tahara
6677e12e55 chore(vespa): Update version (#6299) 2025-11-18 09:50:38 -08:00
SubashMohan
7175b93a4c enhancement(onboarding) : Replacing Select input with combobox (#6048) 2025-11-18 17:40:57 +05:30
SubashMohan
fbbcd9646d fix(onboarding): Header animated icon (#6098) 2025-11-18 12:24:42 +05:30
SubashMohan
7afc9d417c feat(modal): Implement a new modal component (#6289) 2025-11-17 23:37:35 +00:00
Wenxi
a905f2d3fb chore: pydantic v2 model configs (#6302) 2025-11-17 23:24:41 +00:00
Jamison Lahman
3d1994a515 chore(deployments): run trivy scanners separate from build and push (#6301) 2025-11-17 23:16:16 +00:00
dependabot[bot]
7f507c7be0 chore(deps): Bump actions/setup-python from 4.9.1 to 6.0.0 (#6296)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-11-17 20:38:08 +00:00
Jamison Lahman
c0e418d63e chore(deployment): notifications on build failures (#6298) 2025-11-17 20:20:21 +00:00
dependabot[bot]
db49e14f12 chore(deps): Bump docker/login-action from 1.14.1 to 3.6.0 (#6295)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-11-17 20:19:48 +00:00
dependabot[bot]
e87d6403e8 chore(deps): Bump helm/kind-action from 1.12.0 to 1.13.0 (#6294)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-17 19:48:13 +00:00
Richard Guan
2b6e02a775 chore(internal): search prune sections (#6247) 2025-11-17 18:40:42 +00:00
Justin Tahara
26e1f349b9 fix(index attempts): Preserve some attempts (#6266) 2025-11-17 18:06:26 +00:00
Jamison Lahman
ba83d7e6c3 chore(docker): generate OpenAPI schema/client with docker (#6286) 2025-11-17 17:20:07 +00:00
dependabot[bot]
f869e44497 chore(deps-dev): Bump js-yaml from 3.14.1 to 3.14.2 in /web (#6293)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-17 17:17:37 +00:00
Jamison Lahman
b367a60680 chore(gha): replace background docker pulls with docker-compose (#6287) 2025-11-17 17:11:56 +00:00
Jamison Lahman
98a7e8b7e2 chore(docker): avoid ONYX_VERSION invalidating the docker cache (#6288) 2025-11-17 17:10:54 +00:00
Nikolas Garza
f93752a2b3 fix: disable aggressive caching for Next.js static assets in dev (#6280)
Co-authored-by: Nikolas Garza <nikolas@unknowna6c9beeb7428.attlocal.net>
2025-11-17 09:15:51 -08:00
Evan Lohn
0d20140cad fix: mcp fixes (#6080) 2025-11-17 08:58:49 -08:00
Wenxi
bdd6dc036e fix(ui): new action form spacing fixes (#6285) 2025-11-17 05:14:05 +00:00
dependabot[bot]
27fe196df3 chore(deps): Bump nanoid from 3.3.7 to 3.3.8 in /examples/widget (#3405)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-17 03:36:35 +00:00
Wenxi
18dad51bf8 fix(tests): pause connector while indexing timeout (#6282) 2025-11-16 22:51:49 +00:00
Wenxi
b6d60fb6a9 fix(permsync): don't fail on empty group ids (#6281) 2025-11-16 22:02:03 +00:00
Wenxi
86e7975c42 chore: foss sync readme (#6256) 2025-11-16 13:18:13 -08:00
Jamison Lahman
bb1fb2250e chore(scripts): only run check_lazy_imports on changed files (#6275) 2025-11-16 18:31:53 +00:00
Nikolas Garza
8fdc3411ed feat(slack federated search scoping - 4/4): Add frontend connector config support (#6181)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.local>
2025-11-16 10:29:44 -08:00
Chris Weaver
d5038e8e68 fix: assistant reordering (#6278) 2025-11-16 09:07:56 -08:00
Jamison Lahman
bc035a78e4 chore(deployment): increase model-server builder to 40GB disk (#6277) 2025-11-16 05:17:11 +00:00
Jamison Lahman
9e1043b2fa chore(mypy): color output in CI (#6274) 2025-11-16 05:12:50 +00:00
SubashMohan
107e83bf2a refactor(chat): Apply Ownership Checks Only to Current Message User Files (#6240) 2025-11-16 05:06:35 +00:00
SubashMohan
f5aade9f69 fix(userfiles): remove fixed width in AssistantEditor and ProjectContextPanel (#6239) 2025-11-15 11:50:37 +00:00
dependabot[bot]
9b9ca43671 chore(deps): bump next from 14.2.27 to 14.2.32 in /examples/widget (#5395)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-15 07:07:40 +00:00
Nikolas Garza
0c61cc3f65 feat(slack federated search scoping - 3/4): Add connector-level config support (#6178)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.local>
2025-11-15 04:42:23 +00:00
Nikolas Garza
553853c7f4 feat(slack federated search scoping - 2/4): Add query construction and filtering (#6175)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.local>
2025-11-15 04:11:28 +00:00
Jamison Lahman
15a05663ca chore(docker): install node deps before copying source (#6261) 2025-11-15 03:55:11 +00:00
Jamison Lahman
940773b9c5 chore(deployments): fix cross-platform related issues (#6272) 2025-11-15 03:24:26 +00:00
Nikolas Garza
a95ae6e88b feat(slack federated search scoping - 1/4): Add entity filtering config (#6174)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.local>
2025-11-15 02:47:52 +00:00
Raunak Bhagat
369f923929 refactor: Implement a proper Switch component (#6270) 2025-11-15 02:28:58 +00:00
Raunak Bhagat
3eefbfb646 fix: Fix header for white-labelling (#6271) 2025-11-14 18:27:29 -08:00
Justin Tahara
3919a2d0a2 fix(gdrive): Missing Id Field (#6262) 2025-11-14 17:59:34 -08:00
Justin Tahara
4553e811b0 feat(github): Showcasing our Github Repo Ranking (#6267) 2025-11-14 16:54:34 -08:00
Justin Tahara
7f7389692e fix(reformat): Teams Test (#6268) 2025-11-14 16:53:19 -08:00
Richard Guan
30147c03cf chore(fix): agent sdk replacement message formatting (#6180) 2025-11-14 14:51:37 -08:00
Wenxi
dc48ccc117 fix(teams): mypy (#6259) 2025-11-14 14:42:30 -08:00
Alex Kim
ee366c50c4 fix(teams): handle OData parsing errors with special characters (#6115)
Co-authored-by: Jessica Singh <86633231+jessicasingh7@users.noreply.github.com>
2025-11-14 14:38:58 -08:00
sktbcpraha
caf92a6cce fix: Assistant instruction ignored (#6243) 2025-11-14 14:30:14 -08:00
Jamison Lahman
259bc9d64b chore(deployments): fix actions/checkout typo (#6255) 2025-11-14 21:48:12 +00:00
dependabot[bot]
60664f7e5b chore(deps-dev): bump js-yaml from 4.1.0 to 4.1.1 in /examples/widget (#6248)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-14 21:09:16 +00:00
Wenxi
07f55c6ae2 fix: readme (#6254) 2025-11-14 13:31:29 -08:00
Wenxi
256ece05a6 chore: readme (#6253) 2025-11-14 13:26:53 -08:00
Jamison Lahman
530d6d8284 chore(deployments): simplify pipeline + cross-platform builds (#6250) 2025-11-14 13:16:20 -08:00
Chris Weaver
6299bc30b6 fix: playwright test (#6244)
Co-authored-by: Nikolas Garza <90273783+nmgarza5@users.noreply.github.com>
2025-11-14 12:26:50 -08:00
Jamison Lahman
0607ea9741 chore(deployments): add ability to trigger dry-run build (#6246) 2025-11-14 11:22:22 -08:00
Chris Weaver
3ba4bdfe78 fix: gpt-5 output formatting (#6245) 2025-11-14 10:55:17 -08:00
Chris Weaver
a9165ad329 feat: allow switchover with active connectors only (#6226) 2025-11-14 16:52:07 +00:00
Raunak Bhagat
24aea2d7ce refactor: Edit button types (#6235) 2025-11-14 16:21:08 +00:00
SubashMohan
aa30008419 feat(component): new switch component (#6212) 2025-11-14 08:46:53 +00:00
Raunak Bhagat
3605676f61 fix: Fix inputs overflowing in Settings page (#6238) 2025-11-14 06:24:25 +00:00
Raunak Bhagat
1faa9e7812 refactor: Updated Modals API (#6227)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-11-13 21:48:28 -08:00
Jamison Lahman
d85b702cac chore(deployments): remove unnecessary install of build-essentials (#6234) 2025-11-14 04:33:28 +00:00
Jamison Lahman
a724f53e5b chore(deployments): prefer ecr over s3 as docker cache backend (#6232) 2025-11-13 19:39:55 -08:00
Chris Weaver
68fcc5cb8a fix: signup button (#6228) 2025-11-13 19:02:05 -08:00
Justin Tahara
3eb1ca01a2 fix(docprocessing): OOM cleanup (#6223) 2025-11-13 18:24:59 -08:00
Chris Weaver
c2c3d1a722 feat: allow disabling the default assistant (#6222) 2025-11-13 17:42:47 -08:00
Wenxi
f79a8533fb fix: show agent descriptions (#6219) 2025-11-13 14:17:43 -08:00
Jamison Lahman
c1dce9fabd chore(runs-on): define custom AMI specs (#6216) 2025-11-13 22:01:07 +00:00
Jamison Lahman
244bf82c7a chore(gha): prefer venv over installing python packages to the system (#6213) 2025-11-13 17:39:54 +00:00
Jamison Lahman
188ea3faff chore(gha): prefer Github-hosted for simple steps (#6208) 2025-11-13 02:37:48 +00:00
Justin Tahara
c04f624891 fix(slack): Fixing the link coloring (#6203) 2025-11-13 02:32:50 +00:00
Jamison Lahman
43ae02a870 chore(gha): remove custom cloudwatch metrics (#6202) 2025-11-13 00:12:13 +00:00
Jamison Lahman
14123926a7 chore(gha): final runs-on migration nits (#6170) 2025-11-12 23:00:25 +00:00
Justin Tahara
d14d1b833f fix(slack): Show Channels when Editing Fed Slack (#6200) 2025-11-12 22:30:49 +00:00
Nikolas Garza
ff06f10af6 fix: type checking for multiToolTestHelpers (#6199)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.local>
2025-11-12 14:36:04 -08:00
Justin Tahara
5d26c1bafc fix(slackbot): Switch between document set and assistant (#6198) 2025-11-12 22:21:27 +00:00
dependabot[bot]
dbf06c6a1b chore(deps): bump aquasecurity/trivy-action from 0.29.0 to 0.33.1 (#6194)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-12 19:43:15 +00:00
dependabot[bot]
d31e83900f chore(deps): bump docker/setup-buildx-action from 1.7.0 to 3.11.1 (#6196)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-11-12 19:25:04 +00:00
dependabot[bot]
1ac92e6bd0 chore(deps-dev): bump types-urllib3 from 1.26.25.11 to 1.26.25.14 in /backend (#6193)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-12 19:21:32 +00:00
dependabot[bot]
5e159c35f3 chore(deps): bump pilosus/action-pip-license-checker from 2.0.0 to 3.1.0 (#6191)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-11-12 19:00:23 +00:00
Raunak Bhagat
550271abd9 feat: Share chat button in top right corner (#6186)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-11-12 11:08:23 -08:00
Nikolas Garza
db3d0bfb34 feat: improve usability of react testing framework + multi-tool renderer tests (#5973)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.local>
2025-11-12 10:48:49 -08:00
Nikolas Garza
860bdd3c0f chore: run playwright projects as separate jobs (#6190)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.local>
2025-11-12 18:28:19 +00:00
Jamison Lahman
3bc63b30ce chore(deps): dependabot for python (#6188) 2025-11-12 18:18:27 +00:00
dependabot[bot]
78a23eeec0 chore(deps): bump pypdf from 6.0.0 to 6.1.3 in /backend/requirements (#5866)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-12 18:15:05 +00:00
Raunak Bhagat
096e4029ba build: Revert to using webpack instead of turbopack (#6185)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-11-12 18:10:17 +00:00
SubashMohan
e8da5722df feat(upload): increase token limit to 100k and configurable skip (#6187) 2025-11-12 09:53:37 -08:00
Jamison Lahman
a1a261f68e chore(lint): introduce actionlint, github actions linter (#6184) 2025-11-12 03:39:17 +00:00
Jamison Lahman
ac57b10240 chore(gha): ensure run-id is unique, fix hanging jobs (#6183) 2025-11-12 01:25:59 +00:00
Richard Guan
ce35e01ce3 chore(hotfix): for configuration (#6182) 2025-11-12 00:59:28 +00:00
Richard Guan
808f82de0b chore(agent sdk): make alternative to openai agent sdk (#6153) 2025-11-11 16:25:19 -08:00
Jamison Lahman
9518bd14bb chore(gha): explicit spot pricing (#6177) 2025-11-11 23:52:54 +00:00
Justin Tahara
54eb655634 fix(gdrive): Checkbox fix (#6171) 2025-11-11 22:39:36 +00:00
Wenxi
a773c398af fix: safari input bar quadrupling new lines (#6173) 2025-11-11 13:31:23 -08:00
Jamison Lahman
53131e7669 chore(gha): run whitespace fixers on actions (#6172) 2025-11-11 13:06:59 -08:00
Richard Guan
d5cb56b0e9 chore(llm): interface decoupled from langchain (#6128) 2025-11-11 19:48:25 +00:00
Wenxi
de6226e192 fix: img input support check false vs. none (#6169) 2025-11-11 11:21:58 -08:00
Jamison Lahman
a1d502804a chore(gha): migrate pr-integration-tests off blacksmith (#6164) 2025-11-11 19:06:56 +00:00
Jamison Lahman
76fc01968b chore(gha): de-dupe python setup for external dep tests (#6159)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-11-11 09:38:01 -08:00
Jamison Lahman
f9de82c135 chore(runs-on): more instance families and use price-capacity-optimized (#6165) 2025-11-11 09:37:50 -08:00
Justin Tahara
db4b074938 fix(pegasus): Cleanup (#6163) 2025-11-11 09:26:58 -08:00
Justin Tahara
bc5a574cf1 fix(embedding): Fix Deletion of Same Name (#6149) 2025-11-10 19:37:21 -08:00
Jamison Lahman
c14414c9be feat(pre-commit): run check-yaml on .github/ (#6160) 2025-11-11 02:21:50 +00:00
Justin Tahara
770bfcf360 fix(gpt-5): Catch all (#6162) 2025-11-10 18:35:06 -08:00
Chris Weaver
67c1099f98 fix: improve /llm/provider performance (#6158) 2025-11-10 17:01:56 -08:00
Jamison Lahman
67eb54734f chore(gha): migrate playwright tests to runs-on (#6154) 2025-11-10 15:51:14 -08:00
Justin Tahara
f819fdf09b feat(auth): Allow JIT even with Invite List (#6157) 2025-11-10 14:36:59 -08:00
Justin Tahara
b39a4a075a fix(cohere): Add Billing Handler (#6156) 2025-11-10 14:31:01 -08:00
Justin Tahara
8a244aff0d feat(api): Paginated Document Search (#6155) 2025-11-10 14:10:36 -08:00
Jamison Lahman
6a74e54eda feat(gha): python tests use uv w/ caching (#6152) 2025-11-10 12:10:21 -08:00
Jamison Lahman
e87818c961 feat(gha): enable npm caching in CI (#6151) 2025-11-10 11:34:06 -08:00
dependabot[bot]
fbec393faa chore(deps): bump actions/download-artifact from 4.3.0 to 6.0.0 (#6147)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-10 11:27:45 -08:00
dependabot[bot]
da167e93ab chore(deps): bump actions/upload-artifact from 4.6.2 to 5.0.0 (#6146)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-10 11:27:36 -08:00
dependabot[bot]
91c0b273bf chore(deps): bump actions/setup-node from 4.4.0 to 6.0.0 (#6148)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-10 11:14:01 -08:00
Jamison Lahman
72d1cfa36a chore(gha): docker-login follow up (#6150) 2025-11-10 10:57:34 -08:00
Jamison Lahman
1f45ebc818 fix(gha): docker login for all external image fetching (#6139) 2025-11-10 10:34:02 -08:00
Chris Weaver
c1428d03f5 fix: infinite render on embedding model page (#6144) 2025-11-10 09:39:32 -08:00
Chris Weaver
904bcdb0fe chore: change log-level of keyword extraction failure (#6135) 2025-11-08 14:52:38 -08:00
Nikolas Garza
9caf136f0e feat: llm access controls (#5819) 2025-11-08 10:36:14 -08:00
Raunak Bhagat
e06ad214cb fix: White labelling (#6133)
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2025-11-07 18:39:04 -08:00
Chris Weaver
fe53ae9d79 fix: package-lock.json (#6106) 2025-11-07 18:10:01 -08:00
Jamison Lahman
5a2796d285 chore(gha): pr-python-checks instance update (#6129) 2025-11-07 17:29:25 -08:00
Justin Tahara
aba5bee4d7 fix(ui): Make Private Groups selectable again (#6116) 2025-11-07 17:10:39 -08:00
Justin Tahara
a0eaf126be feat(azure): Support OpenAI Image models (#6107) 2025-11-07 17:10:24 -08:00
Justin Tahara
28712aab1d fix(vercel): Remove deprecated fields (#6130) 2025-11-07 17:09:41 -08:00
Justin Tahara
25de38fcf7 fix(chat): Adding buffer for instructions (#6125) 2025-11-07 16:33:37 -08:00
Justin Tahara
53123e2870 fix(upload): File type handling (#6126) 2025-11-07 16:25:13 -08:00
Jamison Lahman
fa8487a1a8 chore(gha): reduce size of pr-quality-check instance (#6123) 2025-11-07 16:21:20 -08:00
Jamison Lahman
3f0bcd516d fix(gha): fix terraform pre-commit test (#6124) 2025-11-07 15:26:29 -08:00
Justin Tahara
76d25ff489 fix(tool): Hide Okta Tool (#6120) 2025-11-07 13:36:23 -08:00
Wenxi
f99d0285f9 fix: openapi generation required fields (#6117) 2025-11-07 18:45:30 +00:00
Raunak Bhagat
988221550e fix: Sidebar sizing fix (#6113) 2025-11-06 19:43:31 -08:00
Raunak Bhagat
6b636c1b90 feat: Sidebar anim (#6111) 2025-11-06 19:32:06 -08:00
Justin Tahara
1fee528d86 fix(icons): Update Token Rate Limits page (#6112) 2025-11-06 17:35:55 -08:00
Wenxi
815ab5ebc2 fix: hydration errors (#6100)
Co-authored-by: Raunak Bhagat <r@rabh.io>
2025-11-06 17:14:26 -08:00
Jessica Singh
c8553331ae fix(teams): increase timeout w special char team names (#6086)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-11-06 23:22:25 +00:00
Raunak Bhagat
362da495ff refactor: Update tooltip colours (#6093)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2025-11-06 14:49:22 -08:00
Richard Guan
abb444cd85 chore(prompting): fix default behavior assistant (#6103) 2025-11-06 21:25:41 +00:00
Justin Tahara
fc7e6f798d fix(bedrock): Add Gov Cloud regions (#6105) 2025-11-06 13:18:59 -08:00
Jamison Lahman
8b39d60bca chore(gha): migrate connectors-check to uv w/ caching (#6102) 2025-11-06 19:55:24 +00:00
Justin Tahara
9ac8331cd3 fix(gdrive): Add support for domain link only filtering (#6076) 2025-11-06 19:53:39 +00:00
Raunak Bhagat
295dc2d28c refactor: New Opal component library (#6062)
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2025-11-06 18:50:32 +00:00
Raunak Bhagat
c15266227a refactor: Update how disabled actions are rendered (#6094) 2025-11-06 10:09:03 -08:00
Raunak Bhagat
1c54b357ee fix: Fix modal dragging issue (#6095) 2025-11-06 10:05:18 -08:00
Jamison Lahman
791346eca8 chore(gha): migrate external dependency tests to uv (#6083) 2025-11-06 07:17:25 +00:00
Chris Weaver
2d2a2452bf fix: setDisplayComplete not called for ollama (#6092) 2025-11-05 22:04:00 -08:00
dependabot[bot]
4d3094c09f chore(deps): bump aws-actions/configure-aws-credentials from 4.3.1 to 5.1.0 (#6089)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-06 05:28:20 +00:00
dependabot[bot]
882e5f999d chore(deps): bump helm/chart-testing-action from 2.7.0 to 2.8.0 (#6090)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-11-06 05:25:29 +00:00
dependabot[bot]
c2cf3991a0 chore(deps): bump actions/github-script from 7.1.0 to 8.0.0 (#6091)
Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Jamison Lahman <jamison@lahman.dev>
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-11-05 21:04:30 -08:00
Wenxi
402dfdad2c fix: disable PAT when AUTH_TYPE is disabled (#6088) 2025-11-05 20:00:20 -08:00
Jamison Lahman
ef8de62478 chore(deps): basic dependabot setup for actions (#6087) 2025-11-06 02:28:37 +00:00
Evan Lohn
8101be42ea feat: gmail connector checkpointing (#6040) 2025-11-05 18:03:28 -08:00
Chris Weaver
0c615cd76d fix: infinite render on React 19 (#6085) 2025-11-05 17:54:21 -08:00
Wenxi
421e9899b8 fix: preload user settings correctly (#6063) 2025-11-06 01:13:43 +00:00
Jamison Lahman
6379423dfc feat(gha): persist the mypy_cache (#6079)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-11-06 00:49:56 +00:00
Jamison Lahman
1c742e675a fix(gha): Dont wait for vespa server (#6081) 2025-11-06 00:21:54 +00:00
Richard Guan
5c3b2320a7 chore(simplified): tools (#6064) 2025-11-05 16:12:14 -08:00
Richard Guan
198fc145fc chore(custom): instructions (#6055) 2025-11-05 22:05:30 +00:00
Jamison Lahman
0f84391f60 chore(gha): migrate mypy workflow to uv w/ caching (#6074) 2025-11-05 14:03:57 -08:00
Jamison Lahman
1e101f8028 chore(gha): pin workflow versions (#6058)
## Description

SHAs are more secure than version tags.
2025-11-05 13:29:58 -08:00
Wenxi
7e40cbe0d1 fix: honor pw min length env var on fe (#6065) 2025-11-05 11:43:44 -08:00
Chris Weaver
a528dbe241 fix: airgapped (#6067) 2025-11-04 23:34:37 -08:00
Chris Weaver
587cca4b13 feat: nextjs upgrade + react compiler (#6060) 2025-11-04 19:52:53 -08:00
Wenxi
990842c1cf feat(PAT): Final/3 add tests (#6047) 2025-11-04 15:23:01 -08:00
Wenxi
a3a420a6de feat(PAT): 3/3 PAT frontend (#6046) 2025-11-04 14:50:54 -08:00
Wenxi
03c2e62aee feat(PAT): 2/3 PAT APIs (#6045) 2025-11-04 14:50:26 -08:00
Wenxi
b7d7c62a7c feat(PAT): 1/3 PAT backend, crud, shared utils with API key (#6044) 2025-11-04 14:50:06 -08:00
Yuhong Sun
6f5c466f38 Ollama Img Compat (#6057) 2025-11-04 12:15:33 -08:00
Yuhong Sun
2b19b84245 Upgrade when version latest (#6056) 2025-11-04 11:57:00 -08:00
Justin Tahara
16e1b45cee fix(helm): Remove OAUTH Cookie secret (#6054) 2025-11-04 10:29:55 -08:00
Richard Guan
d9c2793341 chore(new): framework enhancements (#6039) 2025-11-04 00:44:22 +00:00
Chris Weaver
644920f2f9 fix: misc fixes for ollama reasoning (#6043)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-11-03 15:14:16 -08:00
Raunak Bhagat
e38e96d4d1 fix: Fix signin button (#6041) 2025-11-03 13:48:47 -08:00
Wenxi
fc84e83fdb fix: don't re-direct to login when auth disabled (#6042) 2025-11-03 21:08:37 +00:00
Richard Guan
c428ad6dfe chore(robustness): quick fixes for ollama -- error handling fixes + litellm register lifecycle fix (#6024) 2025-11-03 12:58:49 -08:00
Wenxi
98a7a04633 chore: update vertex ai names (#6029) 2025-11-03 12:03:06 -08:00
Nikolas Garza
9b42a8c1f3 fix: update playwright tests for agent creation with knowledge (#5892)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.local>
Co-authored-by: Raunak Bhagat <r@rabh.io>
2025-11-03 19:26:35 +00:00
Raunak Bhagat
eacc663d5b fix: Remove dbg-red (#6038) 2025-11-03 10:00:29 -08:00
Raunak Bhagat
3020ee5964 fix: Fix "Projects" renaming UI (#6037) 2025-11-03 09:56:34 -08:00
Chris Weaver
472d080239 feat: support reasoning (#6004)
Co-authored-by: Richard Guan <rguan72@gmail.com>
2025-11-03 09:47:44 -08:00
SubashMohan
922069bfd3 Enhancement/new log in UI (#6009)
Co-authored-by: Yuhong Sun <yuhongsun96@gmail.com>
2025-11-03 09:31:06 -08:00
Jessica Singh
bffca81477 fix(citations): icon not visible in ui (#6003)
Co-authored-by: Raunak Bhagat <r@rabh.io>
2025-11-03 08:12:53 +00:00
Raunak Bhagat
561b487102 fix: Update transience state when like/dislike buttons are pressed (#6036) 2025-11-02 22:26:54 -08:00
Raunak Bhagat
cc9b14c99b feat: New agents nav (#6006) 2025-11-02 22:26:25 -08:00
Raunak Bhagat
de674a19e0 fix: Create new CopyIconButton component (#6035) 2025-11-02 18:35:33 -08:00
Chris Weaver
79114bf92c feat: add image previews (#6030) 2025-11-02 11:25:07 -08:00
Wenxi
b5dccd96b3 fix: don't fail filestore cleanup (#6018) 2025-11-02 04:09:21 +00:00
Wenxi
a55cc5a537 fix: don't flash connectors tab and cache federated connectors (#6019) 2025-11-01 18:18:42 +00:00
Wenxi
cdf3cc444b chore(claude): track plans/ and use CLAUDE/AGENT templates (#5993) 2025-11-01 11:23:31 -07:00
Richard Guan
cd3941f4b7 chore(reduce): multitenant flakiness (#6021) 2025-11-01 18:06:25 +00:00
Wenxi
0182743619 feat: tag beta images (#6022) 2025-11-01 01:49:19 +00:00
Nikolas Garza
0e2f596aa2 fix: dark/light coloring of manage subscription button (#6026)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.local>
2025-10-31 17:42:28 -07:00
Justin Tahara
0be45676b7 fix(helm): Adding config for db_readonly_user (#6025) 2025-10-31 17:30:34 -07:00
Nikolas Garza
30a3470001 fix: text overlap for tool responses when expanded (#5960)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.local>
2025-10-31 23:55:32 +00:00
Richard Guan
d52fa83afa chore(make): simple agent framework default on (#6017) 2025-10-31 14:11:35 -07:00
Richard Guan
9eb5643cc3 chore(fix): mypy check (#6008) 2025-10-31 10:36:37 -07:00
Justin Tahara
afe34218b8 fix(vespa): Adjust node count (#6016) 2025-10-31 10:36:26 -07:00
Richard Guan
4776947dfa chore(ollama): ollama support (#5963) 2025-10-31 00:26:27 -07:00
trial2onyx
c4bc25f540 feat: Load random documents in document explorer page for empty queries (#5966)
Co-authored-by: Onyx Trialee 2 <onyxtrial2@Onyxs-MBP.attlocal.net>
2025-10-30 15:45:04 -07:00
Dominic Feliton
b77078b339 fix(web-connector): empty semantic identifiers from trailing / with PDF URLs (#5997)
Co-authored-by: Dominic Feliton <37809476+dominicfeliton@users.noreply.github.com>
2025-10-30 15:28:51 -07:00
SubashMohan
88b28a303b fix(chat): enhance file upload handling and improve file removal (#5975) 2025-10-30 10:08:46 +05:30
Raunak Bhagat
59d7d3905a fix: Fix bug in which bottom-padding of mask would not get applied (#5994) 2025-10-30 01:53:04 +00:00
Justin Tahara
a48fe7550a fix: Better Logs for Rate Limits (#5988) 2025-10-30 01:50:14 +00:00
Nikolas Garza
c25a99955c fix: always convert strings to document sources (#5992)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.local>
2025-10-30 00:30:14 +00:00
Justin Tahara
ac509f865a chore(helm): Reducing Helm Chart Resource Requests/Limits (#5980) 2025-10-29 15:51:34 -07:00
Justin Tahara
5819389ae8 fix(message): Process UUID properly (#5989) 2025-10-29 15:51:19 -07:00
Justin Tahara
eae5774cdc fix(helm): Bump test version (#5978) 2025-10-29 13:38:25 -07:00
Raunak Bhagat
8fed0a8138 perf: Remove admin sidebar delay (#5985) 2025-10-29 19:31:43 +00:00
Nikolas Garza
c04196941d fix: fix linear icon in dark mode (#5971)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.local>
2025-10-29 17:08:38 +00:00
Nikolas Garza
19461955ed feat(e2e-testing): record playwright traces for CI failures (#5923)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.local>
2025-10-29 10:08:50 -07:00
Raunak Bhagat
cb3152ff5c refactor: Update button naming colour states (#5972) 2025-10-29 09:58:26 -07:00
Wenxi
cf187e8f58 fix: stop redis from spawning anonymous volumes (#5969) 2025-10-29 01:22:12 +00:00
Evan Lohn
deaa3df42f fix: deprecated confluence oauth api (#5962)
Co-authored-by: Wenxi Onyx <wenxi@onyx.app>
2025-10-28 17:00:14 -07:00
Richard Guan
d6e98bfbc8 chore(turn): fix sending multiple messages (#5961) 2025-10-28 22:41:52 +00:00
Raunak Bhagat
ff58ad0b87 fix: More MCP fixes (#5933) 2025-10-28 14:32:31 -07:00
Justin Tahara
eb7cb02cc0 fix(saml): Align Cookie Usage (#5954) 2025-10-28 19:15:30 +00:00
Evan Lohn
7876d8da1b fix: jira connector creation (#5956)
Co-authored-by: Wenxi Onyx <wenxi@onyx.app>
2025-10-28 18:47:14 +00:00
Justin Tahara
8a6f83115e fix(gmail): Adding Size Thresholds (#5948) 2025-10-27 20:56:59 -07:00
Richard Guan
b7f81aed10 chore(add): tests for citation and force tool use and some cleanup (#5953) 2025-10-28 01:36:49 +00:00
Richard Guan
a415a997cf chore(reduce): model strength for tests (#5726) 2025-10-28 00:47:36 +00:00
Justin Tahara
7781afd74e fix(gdrive): Adding fallback logic for Web View Links (#5952) 2025-10-27 23:59:41 +00:00
Evan Lohn
d0a4f4ce66 fix: always trigger DR (#5831) 2025-10-27 23:32:38 +00:00
Richard Guan
ba00de8904 chore(citation): processing (#5904) 2025-10-27 16:11:24 -07:00
Justin Tahara
91f21bb22b fix(openpyxl): Workbook issue (#5950) 2025-10-27 15:40:08 -07:00
Justin Tahara
491f3127c5 fix(misc): Update Date Checker (#5947) 2025-10-27 15:39:58 -07:00
Richard Guan
0987fb852b chore(force): tool use fix (#5930) 2025-10-27 21:15:37 +00:00
Justin Tahara
5f68141335 fix(vespa): Reducing Number of Nodes (#5942) 2025-10-27 14:26:38 -07:00
Wenxi
b5793ee522 fix: failing web connector test due sync api (#5936) 2025-10-27 12:39:27 -07:00
Wenxi
238c244fec fix: standardize and make user settings pretty (#5922) 2025-10-27 12:38:52 -07:00
Wenxi
c103a878b7 fix: chat feedback (#5896) 2025-10-27 11:55:06 -07:00
Justin Tahara
03deb064cc fix(ui): Remove Bubble Text for non-search ui chats (#5887) 2025-10-27 11:32:20 -07:00
Yuhong Sun
09062195b4 Script to generate test data (#5935) 2025-10-27 10:51:57 -07:00
Raunak Bhagat
dc57a5451c feat: Update pinning behaviour (#5934) 2025-10-27 10:51:21 -07:00
Raunak Bhagat
781f60a5ab fix: Edit recent files UI (#5879) 2025-10-27 00:10:51 -07:00
Raunak Bhagat
423961fefb refactor: Replace all instances of the old button with the new refreshed Button (#5889) 2025-10-26 23:02:41 -07:00
Chris Weaver
324b6ceeef refactor: remove custom spacing definitions (#5928)
Co-authored-by: Raunak Bhagat <r@rabh.io>
2025-10-26 22:32:18 -07:00
Nikolas Garza
d9e14bf5da fix: decrease background blur in modals (#5823)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.local>
Co-authored-by: Raunak Bhagat <r@rabh.io>
2025-10-26 22:25:26 -07:00
Wenxi
eb2cb1bb25 fix: auth logic, create util func, and add edge check in middleware (#5931) 2025-10-26 19:08:34 -07:00
Chris Weaver
0de9f47694 fix: change url at the start of a new chat rather than at the end of … (#5932) 2025-10-26 17:20:04 -07:00
Chris Weaver
2757f3936c fix: remove default agent from sidebar (#5929) 2025-10-26 15:22:34 -07:00
Chris Weaver
8ba61e9123 fix: code rendering in chat (#5927) 2025-10-26 13:38:20 -07:00
Chris Weaver
c10d7fbc32 fix: copy button switch to check (#5926) 2025-10-25 17:50:09 -07:00
Chris Weaver
b6ed217781 feat: oauth tools/fe (#5844) 2025-10-24 23:39:11 -07:00
Chris Weaver
7d20f73f71 feat: Oauth tools/support removing (#5876) 2025-10-24 20:02:12 -07:00
Nikolas Garza
2b306255f9 fix: better handling of dark/light icons (#5909)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.local>
2025-10-24 23:46:58 +00:00
Wenxi
e149d08d47 feat: save user's theme preference (#5908) 2025-10-24 23:28:23 +00:00
Chris Weaver
e98ddb9fe6 feat: center input bar (#5919) 2025-10-24 16:22:00 -07:00
Chris Weaver
b9a5297694 fix: input bar thickness (#5917) 2025-10-24 16:20:50 -07:00
Chris Weaver
4666312df2 fix: initial screen when no LLM provider is set up (#5912) 2025-10-24 14:08:36 -07:00
Evan Lohn
d4e524cd83 fix: mcp chat frontend part1 (#5913) 2025-10-24 11:50:17 -07:00
trial2onyx
a719228034 chore(gha): disable docker caching for backend images (#5910)
Co-authored-by: Onyx Trialee 2 <onyxtrial2@Onyxs-MBP.attlocal.net>
2025-10-24 18:13:51 +00:00
Chris Weaver
2fe8b5e33a fix: deep research disable (#5911) 2025-10-24 11:15:08 -07:00
Justin Tahara
af243b0ef5 chore(tf): Clean up and add linting (#5905) 2025-10-23 18:23:37 -07:00
Wenxi
c96ac04619 feat: show personal name if provided (#5898) 2025-10-23 17:45:04 -07:00
Wenxi
e2f2950fee fix: make entire query history row clickable (#5894) 2025-10-23 17:44:16 -07:00
Chris Weaver
8b84c59d29 fix: add __init__.py in empty ee dir (#5903) 2025-10-23 17:34:52 -07:00
Chris Weaver
b718a276cf fix: add global (#5902) 2025-10-23 17:29:27 -07:00
Raunak Bhagat
700511720f refactor: Remove assistant icon (#5882)
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2025-10-23 17:23:26 -07:00
Raunak Bhagat
6bd1719156 fix: Shared chat UI (#5895) 2025-10-23 17:21:54 -07:00
Richard Guan
c8bfe9e0a1 chore(integration): test instructions and image gen test (#5897) 2025-10-23 23:45:46 +00:00
Chris Weaver
037bc04740 fix: set git config early (#5900) 2025-10-23 16:59:30 -07:00
Chris Weaver
c3704d47df fix: add back empty ee dir (#5899) 2025-10-23 16:51:48 -07:00
Richard Guan
397a153ff6 chore(fix): bring llm prompts to spec (#5863) 2025-10-23 22:56:56 +00:00
Chris Weaver
870c432ccf fix: sharepoint .msg handling + lazy load check fix (#5497) 2025-10-23 16:00:49 -07:00
Chris Weaver
c4a81a590f fix: add license (#5891) 2025-10-23 15:08:45 -07:00
Nikolas Garza
017c095eed Revert "chore: add fe type check to pre-commit" (#5893) 2025-10-23 14:40:58 -07:00
Nikolas Garza
ee37d21aa4 fix: fix typing errors in react tests (#5881)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.local>
2025-10-23 21:08:48 +00:00
Nikolas Garza
e492d88b2d chore: add fe type check to pre-commit (#5883)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.local>
2025-10-23 20:46:48 +00:00
Wenxi
3512fdcd9d fix: don't set env vars if they're empty (aws bedrock) (#5886)
Co-authored-by: EC2 Default User <ec2-user@ip-172-31-7-79.us-east-2.compute.internal>
2025-10-23 20:45:53 +00:00
Chris Weaver
3550795cab fix: make_foss_repo.sh (#5890) 2025-10-23 14:02:12 -07:00
Chris Weaver
b26306d678 fix: foss repo syncing (#5888) 2025-10-23 13:43:04 -07:00
Chris Weaver
85140b4ba6 feat: FOSS repo sync (#5885) 2025-10-23 13:39:28 -07:00
Jessica Singh
c241f79f97 fix(team special char): ampersand fix (#5877) 2025-10-23 19:22:36 +00:00
Chris Weaver
9808dec6b7 feat: oauth tool apis (#5840) 2025-10-23 11:59:31 -07:00
Wenxi
632c74af6d chore: Update CONTRIBUTING.md (#5880) 2025-10-23 11:41:49 -07:00
trial2onyx
79073d878c chore(docker): migrate integration image to docker bake and de-dupe (#5873)
Co-authored-by: Onyx Trialee 2 <onyxtrial2@Onyxs-MBP.attlocal.net>
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2025-10-23 17:17:45 +00:00
Chris Weaver
620df88c51 fix: citation look (#5871) 2025-10-23 10:41:33 -07:00
Chris Weaver
717f05975d feat: add OAuthTokenManager (#5838) 2025-10-23 10:26:47 -07:00
Chris Weaver
d2176342c1 feat: add CRUD operations for OAuth Tools (#5837) 2025-10-23 10:21:36 -07:00
Wenxi
bb198b05e1 feat: update icons (#5864) 2025-10-23 10:12:55 -07:00
Chris Weaver
085013d8c3 feat: add DB models for OAuthTools (#5836) 2025-10-23 09:50:04 -07:00
Nikolas Garza
e46f632570 fix: allow user knowledge (file uploads) always (#5857)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.attlocal.net>
2025-10-22 23:37:25 +00:00
Justin Tahara
bbb4b9eda3 fix(docker): Clean up USE_IAM_AUTH log (#5870) 2025-10-22 15:59:12 -07:00
Richard Guan
12b7c7d4dd chore(ripsecrets): ripsecrets (#5868) 2025-10-22 22:02:57 +00:00
trial2onyx
464967340b chore(docker): prefer uv for installing python system packages (#5861)
Co-authored-by: Onyx Trialee 2 <onyxtrial2@Onyxs-MBP.attlocal.net>
2025-10-22 21:48:41 +00:00
trial2onyx
a2308c2f45 chore(gha): deduplicate prepare-build and migrate to uv (#5862)
Co-authored-by: Onyx Trialee 2 <onyxtrial2@Onyxs-MBP.attlocal.net>
2025-10-22 21:20:37 +00:00
trial2onyx
2ee9f79f71 chore(docker): remove empty echo ONYX_VERSION layers (#5848)
Co-authored-by: Onyx Trialee 2 <onyxtrial2@Onyxs-MBP.attlocal.net>
2025-10-22 20:36:36 +00:00
trial2onyx
c3904b7c96 fix(release): correctly set ONYX_VERSION in model-server image (#5847)
Co-authored-by: Onyx Trialee 2 <onyxtrial2@Onyxs-MBP.attlocal.net>
2025-10-22 19:56:55 +00:00
trial2onyx
5009dcf911 chore(docker): avoid duplicating cached models layer (#5845)
Co-authored-by: Onyx Trialee 2 <onyxtrial2@Onyxs-MBP.attlocal.net>
2025-10-22 19:56:42 +00:00
trial2onyx
c7b4a0fad9 chore(github): flag and enable docker build caching (#5839)
Co-authored-by: Onyx Trialee 2 <onyxtrial2@Onyxs-MBP.attlocal.net>
2025-10-22 19:56:23 +00:00
Raunak Bhagat
60a402fcab Render chat and project button popovers using the PopoverMenu component (#5858) 2025-10-21 20:37:22 -07:00
Raunak Bhagat
c9bb078a37 Edit height of mask again (#5856) 2025-10-21 20:22:51 -07:00
Raunak Bhagat
c36c2a6c8d fix: Edit height of mask (#5855) 2025-10-21 20:17:39 -07:00
Raunak Bhagat
f9e2f9cbb4 refactor: Remove hover state on chatbutton rename (#5850) 2025-10-21 19:44:57 -07:00
Raunak Bhagat
0b7c808480 refactor: "Unnest" admin panel button (#5852) 2025-10-21 19:30:00 -07:00
Justin Tahara
0a6ff30ee4 fix(ui): Update spacing for the API Key page (#5826) 2025-10-21 18:26:14 -07:00
Raunak Bhagat
dc036eb452 fix: Spacings update (#5846) 2025-10-21 18:11:13 -07:00
Justin Tahara
ee950b9cbd fix(ui): Document Processing revamp (#5825) 2025-10-21 17:56:06 -07:00
Justin Tahara
dd71765849 fix(internal search): Restore functionality (#5843) 2025-10-21 16:54:10 -07:00
Raunak Bhagat
dc6b97f1b1 refactor: Edit message generation ui (#5816) 2025-10-21 16:51:14 -07:00
Richard Guan
d960c23b6a chore(fix): input images in msg (#5798) 2025-10-21 20:33:00 +00:00
Richard Guan
d9c753ba92 chore(simple): agent small adjustments (#5729) 2025-10-21 20:32:57 +00:00
Chris Weaver
60234dd6da feat: Improve litellm model map logic (#5829) 2025-10-21 13:22:35 -07:00
Justin Tahara
f88ef2e9ff fix(ui): Align Default Assistant Page (#5828) 2025-10-21 19:12:30 +00:00
Chris Weaver
6b479a01ea feat: run tasks for gated tenants (#5827) 2025-10-21 11:47:39 -07:00
Wenxi
248fe416e1 chore: update template reference to sso ee (#5830) 2025-10-21 11:39:12 -07:00
trial2onyx
cbea4bb75c chore(docker): avoid chown-ing playwright cache (#5805)
Co-authored-by: Onyx Trialee 2 <onyxtrial2@Onyxs-MBP.attlocal.net>
2025-10-21 17:23:49 +00:00
Justin Tahara
4a147a48dc fix(ui): Update Upload Image and Generate Icon buttons (#5824) 2025-10-21 10:41:57 -07:00
Chris Weaver
a77025cd46 fix: adjust deletion threshold (#5818) 2025-10-21 10:37:10 -07:00
Jessica Singh
d10914ccc6 fix(teams connector): special char bug (#5767) 2025-10-21 10:27:37 -07:00
Nikolas Garza
7d44d48f87 fix: switch out OnyxSparkleIcon for OnyxIcon for default assistant (#5806)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.attlocal.net>
2025-10-20 17:48:30 -07:00
Chris Weaver
82fd0e0316 fix: extra sidebar spacing (#5811) 2025-10-20 17:48:02 -07:00
Wenxi
d7e4c47ef1 fix: custom llm setup fixes (#5804) 2025-10-20 17:47:38 -07:00
Wenxi
799b0df1cb fix: don't set new default provider when deleted provider was not default (#5812)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-10-20 17:42:47 -07:00
Justin Tahara
b31d36564a fix(ui): Make Document Sets editable (#5809) 2025-10-20 17:41:08 -07:00
Chris Weaver
84df0a1bf9 feat: more cleanup script improvements (#5803) 2025-10-20 17:29:38 -07:00
Justin Tahara
dbc53fe176 fix(ui): Set as Default for LLM (#5795) 2025-10-20 17:21:12 -07:00
Wenxi
1e4ba93daa feat: optimistically rename chat sidebar items (#5810)
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2025-10-20 17:01:44 -07:00
Wenxi
d872715620 feat: azure parse deployment name (#5807)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
Co-authored-by: Evan Lohn <evan@danswer.ai>
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2025-10-20 16:52:56 -07:00
Chris Weaver
46ad541ebc fix: whitelabling assistant logo (#5808) 2025-10-20 16:52:26 -07:00
Raunak Bhagat
613907a06f fix: Fix colouring for all error pages (#5802) 2025-10-20 15:42:41 -07:00
Wenxi
ff723992d1 feat: openrouter support (#5772) 2025-10-20 14:24:12 -07:00
Raunak Bhagat
bda3c6b189 fix: Edit background colour (#5800) 2025-10-20 14:09:53 -07:00
Evan Lohn
264d1de994 chore: disable contextual rag in the cloud (#5801) 2025-10-20 13:52:57 -07:00
Nikolas Garza
335571ce79 feat: Add React testing framework (#5778) 2025-10-20 13:49:44 -07:00
Chris Weaver
4d3fac2574 feat: enhance tenant cleanup (#5788) 2025-10-20 13:29:04 -07:00
Evan Lohn
7c229dd103 fix: reduce spam of org info toast (#5794) 2025-10-20 13:01:05 -07:00
Evan Lohn
b5df182a36 chore: hide search settings in the cloud (#5796) 2025-10-20 13:00:48 -07:00
Justin Tahara
7e7cfa4187 fix(ui): Initial Index Attempt Tooltip (#5789) 2025-10-20 12:57:34 -07:00
Justin Tahara
69d8430288 fix(ui): Create Button Type (#5797) 2025-10-20 12:48:39 -07:00
Raunak Bhagat
467d294b30 fix: Add white-labelling back (#5757) 2025-10-19 13:25:25 -07:00
Chris Weaver
ba2dd18233 feat: improve performance of deletion scripts (#5787) 2025-10-19 13:02:19 -07:00
Chris Weaver
891eeb0212 feat: add new fields to usage report (#5784) 2025-10-19 12:37:57 -07:00
Wenxi
9085731ff0 fix: add latest check to merge step (#5781) 2025-10-18 18:26:21 -07:00
Chris Weaver
f5d88c47f4 fix: docker-tag-latest.yml (#5780) 2025-10-18 09:06:14 -07:00
Raunak Bhagat
807e5c21b0 fix: Fix styling (#5776) 2025-10-17 18:49:33 -07:00
Raunak Bhagat
1bcd795011 fix: Font loading fix (#5773) 2025-10-17 18:39:30 -07:00
Raunak Bhagat
aae357df40 fix: Fix document sidebar positioning + update stylings (#5769)
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2025-10-17 18:26:53 -07:00
Justin Tahara
4f03e85c57 fix(llm): Cleaning up models (#5771) 2025-10-17 23:48:08 +00:00
Nikolas Garza
c3411fb28d feat: read latest permission sync from the frontend (#5687)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.attlocal.net>
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MBP.attlocal.net>
2025-10-17 23:43:55 +00:00
Richard Guan
b3d1b1f4aa chore(langfuse): tracing (#5753) 2025-10-17 22:44:22 +00:00
Justin Tahara
cbb86c12aa fix(bedrock): Make Region Selectable (#5770) 2025-10-17 22:26:24 +00:00
Chris Weaver
8fd606b713 fix: documents in chat flow (#5762) 2025-10-17 13:56:38 -07:00
Nikolas Garza
d69170ee13 chore: cleanup some console.logs (#5766)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.attlocal.net>
2025-10-17 13:52:44 -07:00
Justin Tahara
e356c5308c fix(ui): Cleaning up the Edit Action page (#5765) 2025-10-17 13:52:36 -07:00
Wenxi
3026ac8912 feat: blob connector and test enhancements (#5746) 2025-10-17 13:52:03 -07:00
Justin Tahara
0cee7c849f feat(curators): Allow curators to customize Actions (#5752) 2025-10-17 19:07:58 +00:00
Yuhong Sun
14bfb7fd0c No Bash in Background Container (#5761) 2025-10-17 11:15:11 -07:00
Justin Tahara
804e48a3da fix(ui): Fix Available Methods Table (#5756) 2025-10-17 17:29:18 +00:00
SubashMohan
907271656e fix: Fix "Projects" new UI components (#5662) 2025-10-17 17:21:36 +00:00
Chris Weaver
1f11dd3e46 refactor: make OIDC / SAML MIT licensed (#5739) 2025-10-17 16:25:50 +00:00
Raunak Bhagat
048561ce0b fix: Fix colours for error page (#5758) 2025-10-17 09:35:34 -07:00
Raunak Bhagat
8718f10c38 fix: Fix all tooltips rendering raw text (#5755) 2025-10-17 09:34:46 -07:00
Evan Lohn
ab4d820089 feat: user info personalization (#5743) 2025-10-17 00:49:36 +00:00
Justin Tahara
77ae4f1a45 feat(users): Add User Counts (#5750) 2025-10-16 18:17:17 -07:00
Raunak Bhagat
8fd1f42a1c docs: Add a new standards file for the web directory (#5749) 2025-10-16 18:03:25 -07:00
Chris Weaver
b94c7e581b fix: quality checks (#5747)
Co-authored-by: Evan Lohn <evan@danswer.ai>
2025-10-16 16:58:24 -07:00
Wenxi
c90ff701dc chore: move gh non-secrets to vars (#5744) 2025-10-16 16:42:24 -07:00
Justin Tahara
b1ad58c5af fix(ui): Fix Invite Modal (#5748) 2025-10-16 16:30:48 -07:00
Eli Ben-Shoshan
345f9b3497 feat: added support to generate sha256 checksum before uploading file to object store (#5734)
Co-authored-by: Eli Ben-Shoshan <ebs@ufl.edu>
2025-10-16 14:36:12 -07:00
Justin Tahara
4671d18d4f fix(sso): Fix Logout UI (#5741) 2025-10-16 17:24:07 +00:00
Wenxi
f0598be875 fix: s3 connector citation bugs (#5740) 2025-10-16 10:08:13 -07:00
Justin Tahara
eb361c6434 feat(onboarding): Pin Featured Agents to New Users (#5736) 2025-10-15 16:11:56 -07:00
Nikolas Garza
e39b0a921c feat: plumb auto sync permission attempts to celery tasks (#5686)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.attlocal.net>
2025-10-15 21:15:35 +00:00
Jessica Singh
2dd8a8c788 fix(slack bot ui): update tokens dark mode (#5728)
Co-authored-by: Raunak Bhagat <r@rabh.io>
2025-10-15 21:14:41 +00:00
Raunak Bhagat
8b79e2e90b feat: Unpin agent (#5721)
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2025-10-15 11:20:49 -07:00
Nikolas Garza
d05941d1bd feat: basic db methods to create, update, delete permission sync attempts (#5682)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.attlocal.net>
2025-10-15 17:28:18 +00:00
Raunak Bhagat
50070fb264 refactor: Make colours in AppSidebar darker (#5725) 2025-10-15 10:15:36 -07:00
edwin-onyx
5792d8d5ed fix(infra): consolidate more celery workers into background worker for default lightweight mode (#5718) 2025-10-15 15:48:18 +00:00
Raunak Bhagat
e1c4b33cf7 fix: Edit AccessRestrictedPage component to render the proper colours (#5724) 2025-10-15 03:10:47 +00:00
Richard Guan
2c2f6e7c23 feat(framework): simple agent to feature flags (#5692)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-10-14 20:27:07 -07:00
Raunak Bhagat
3d30233d46 fix: Edit rendering issues with attached files (CSVs, images, text files, all other files) (#5708)
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-10-14 18:26:35 -07:00
Justin Tahara
875f8cff5c feat(google drive): Add small log (#5723) 2025-10-14 17:21:22 -07:00
Raunak Bhagat
6e4686a09f fix: Update colour of checkbox (#5707) 2025-10-14 22:47:31 +00:00
Justin Tahara
237c18e15e fix(ui): Fix Assistant Image not showing up in Sidebar (#5722) 2025-10-14 21:47:42 +00:00
Justin Tahara
a71d80329d fix(admin): Properly show Unique User count (#5717) 2025-10-14 21:44:09 +00:00
Justin Tahara
91c392b4fc fix(logout): Fix logout again for new UI (#5719) 2025-10-14 21:28:49 +00:00
Justin Tahara
a25df4002d fix(document sets): Delete Federated Slack Document Sets (#5716) 2025-10-14 18:45:57 +00:00
Nikolas Garza
436a5add88 feat: tables/migration for permission syncing attempts (#5681)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.attlocal.net>
2025-10-14 18:43:02 +00:00
edwin-onyx
3a4bb239b1 fix(infra): consolidate heavy, monitoring, and user file worker into one (#5558)
Co-authored-by: Edwin Luo <edwin@parafin.com>
2025-10-14 01:19:47 +00:00
Raunak Bhagat
2acb4cfdb6 fix: Fix selector colour (#5705) 2025-10-14 00:41:27 +00:00
Justin Tahara
f1d626adb0 fix(ui): Updated Document Sets UI (#5706) 2025-10-14 00:27:21 +00:00
Justin Tahara
5ca604f186 fix(slack): Fix Fed Slack Gear Button Error (#5704) 2025-10-13 23:16:26 +00:00
Chris Weaver
c19c76c3ad feat: tenant cleanup (#5703) 2025-10-13 14:19:02 -07:00
Justin Tahara
4555f6badc fix(entra): JWT Passthrough for Entra (#5697) 2025-10-13 19:57:11 +00:00
Justin Tahara
71bd643537 fix(helm): File Processing fix for helm (#5696) 2025-10-12 15:14:31 -07:00
SubashMohan
23f70f0a96 fix(indexing page): Improve page loading time (#5695) 2025-10-11 10:16:28 -07:00
Evan Lohn
c97672559a feat: org info (#5694) 2025-10-11 04:10:52 +00:00
Evan Lohn
243f0bbdbd fix: assorted mcp improvements (#5684) 2025-10-11 03:51:00 +00:00
Chris Weaver
0a5ca7f1cf feat: gemini-embedding-001 + search settings fixes (#5691) 2025-10-10 18:04:17 -07:00
Justin Tahara
8d56d213ec fix(ui): Update UI change (#5688) 2025-10-10 23:24:20 +00:00
Richard Guan
cea2ea924b feat(Simple Agent): [part1 - backwards compatible changes] (#5569) 2025-10-10 22:06:54 +00:00
Richard Guan
569d205e31 feat(flags): posthog feature flags (#5690) 2025-10-10 21:54:07 +00:00
Chris Weaver
9feff5002f fix: chat tweaks (#5685) 2025-10-09 22:04:56 -07:00
Nikolas Garza
a1314e49a3 fix: use system node version for prettier pre-commit hook (#5679)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.attlocal.net>
2025-10-09 17:21:19 -07:00
Nikolas Garza
463f839154 fix: show canceled status when indexing is canceled (#5675)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.attlocal.net>
2025-10-09 17:02:53 -07:00
Nikolas Garza
5a0fe3c1d1 fix: surface user friendly model names for bedrock models (#5680)
Co-authored-by: Nikolas Garza <nikolas@Nikolass-MacBook-Pro.attlocal.net>
2025-10-09 17:00:11 -07:00
Raunak Bhagat
8ac5c86c1e fix: Fix failing tests (#5628) 2025-10-09 22:34:49 +00:00
Chris Weaver
d803b48edd feat: Increase nginx default timeout (#5677) 2025-10-09 22:13:18 +00:00
Wenxi
bc3adcdc89 fix: non-image gen models and add test (#5678) 2025-10-09 14:38:15 -07:00
Raunak Bhagat
95e27f1c30 refactor: Clean up some context files (#5672) 2025-10-09 19:27:29 +00:00
Justin Tahara
d0724312db fix(helm): Removing duplicate exec (#5676) 2025-10-09 12:16:14 -07:00
Justin Tahara
5b1021f20b fix(eml): Fixing EML to Text Function (#5674) 2025-10-09 10:59:26 -07:00
Chris Weaver
55cdbe396f fix: action toggle (#5670) 2025-10-08 19:03:06 -07:00
Raunak Bhagat
e8fe0fecd2 feat: Update formik colours (action + danger) new new colour palette (#5668) 2025-10-08 14:42:26 -07:00
SubashMohan
5b4fc91a3e Fix/doc id migration task (#5620) 2025-10-08 14:40:12 -07:00
Raunak Bhagat
afd2d8c362 fix: Comment out keystroke hijacking (#5659) 2025-10-08 11:59:27 -07:00
Evan Lohn
8a8cf13089 feat: attachments are separate docs (#5641) 2025-10-08 10:29:06 -07:00
Evan Lohn
c7e872d4e3 feat: selectively run sf and hubspot tests (#5657) 2025-10-08 10:25:00 -07:00
Wenxi
1dbe926518 chore: bump dependabot backlog (#5653) 2025-10-08 09:57:27 -07:00
Chris Weaver
d095bec6df fix: deep research hiding logic (#5660) 2025-10-08 09:05:35 -07:00
Raunak Bhagat
58e8d501a1 fix: Add settings sections back (#5661) 2025-10-08 02:54:07 +00:00
Chris Weaver
a39782468b refactor: improve modal behavior (#5649) 2025-10-07 19:10:17 -07:00
Justin Tahara
d747b48d22 fix(Blob Storage): Add Chunking + Size Limits (#5638) 2025-10-07 19:02:01 -07:00
Yuhong Sun
817de23854 Remove document seeding (#5656) 2025-10-07 18:41:39 -07:00
Richard Guan
6474d30ba0 fix(braintrust): dont decorate generate and clean up unused code (#5636) 2025-10-07 15:11:47 -07:00
Paulius Klyvis
6c9635373a fix: ensure now and dt are in utc in gtp search (#5605) 2025-10-07 14:50:55 -07:00
Wenxi
1a945b6f94 chore: update comm links (#5650) 2025-10-07 14:40:54 -07:00
Chris Weaver
526c76fa08 fix: assistant creation (#5648) 2025-10-07 14:20:44 -07:00
Justin Tahara
932e62531f fix(UI): Update User Settings Model Selection (#5630) 2025-10-07 14:11:24 -07:00
edwin-onyx
83768e2ff1 fix(infra): lazy load nltk and some more (#5634) 2025-10-07 13:53:50 -07:00
Chris Weaver
f23b6506f4 fix: sidebar state persistence (#5647) 2025-10-07 13:25:16 -07:00
Chris Weaver
5f09318302 fix: pin text + create click (#5646) 2025-10-07 12:52:51 -07:00
Justin Tahara
674e789036 fix(playwright): Update Email Password Form (#5644) 2025-10-07 12:18:08 -07:00
Chris Weaver
cb514e6e34 fix: align text with icon (#5645) 2025-10-07 12:08:04 -07:00
Justin Tahara
965dad785c fix(images): Update Image Gen workflow after Refactor (#5631) 2025-10-07 12:03:48 -07:00
Chris Weaver
c9558224d2 feat: improved markdown spacing (#5643) 2025-10-07 12:02:24 -07:00
Chris Weaver
c2dbd3fd1e fix: slack bot creation (#5637) 2025-10-07 11:40:25 -07:00
Wenxi
d27c2b1b4e chore: update contributing readmes (#5635) 2025-10-07 10:24:58 -07:00
edwin-onyx
8c52444bda fix(infra): lazy load and don't warm up model server models (#5527)
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
Co-authored-by: greptile-apps[bot] <165735046+greptile-apps[bot]@users.noreply.github.com>
2025-10-07 09:07:40 -07:00
Justin Tahara
b4caa85cd4 fix(infra): Nginx updates (#5627) 2025-10-06 13:37:23 -07:00
Wenxi
57163dd936 chore: pin prettier pre-commit version and run on web for comm prs (#5624) 2025-10-06 12:08:46 -07:00
Chris Weaver
15f2a0bf60 fix: regression (#5625) 2025-10-06 12:02:31 -07:00
Justin Tahara
aeae7ebdef fix(SAML): Add additional Email Fields (#5557) 2025-10-06 12:00:43 -07:00
Raunak Bhagat
eaa14a5ce0 feat: UI Refresh (#5529)
Co-authored-by: SubashMohan <subashmohan75@gmail.com>
2025-10-05 23:04:14 -07:00
Shahar Mazor
b07c834e83 Add RTL support (#5609) 2025-10-05 12:24:38 -07:00
Chris Weaver
97cd308ef7 fix: remove duplicate "file" option in Add Connector page (#5612) 2025-10-05 11:59:39 -07:00
Nils
28cdab7a70 feat: support SharePoint Teams URLs (#5498)
Co-authored-by: nsklei <nils.kleinrahm@pledoc.de>
2025-10-05 11:34:35 -07:00
Chris Weaver
ad9aa01819 ci: adjust latest/edge tags (#5610) 2025-10-04 14:30:40 -07:00
Justin Tahara
508a88c8d7 fix(helm): Migrate from Bitanmi NGINX (#5599) 2025-10-03 17:45:59 -07:00
Justin Tahara
b6f81fbb8e fix(SSO): Logout funtionality fixed (#5600) 2025-10-03 16:58:51 -07:00
Chris Weaver
b9b66396ec fix: try reduce playwright flake (#5598) 2025-10-03 13:10:32 -07:00
Justin Tahara
dd20b9ef4c fix(helm): MinIO Migration from Bitnami (#5597) 2025-10-03 12:32:27 -07:00
Evan Lohn
e1f7e8cacf feat: better interface for slim connectors (#5592) 2025-10-03 10:51:14 -07:00
Justin Tahara
fd567279fd fix(helm): Chart dependency for DB chart (#5596) 2025-10-03 10:43:27 -07:00
Justin Tahara
1427eb3cf0 fix(helm): Remove Bitnmai Dependency for DB Charts (#5593) 2025-10-03 10:38:13 -07:00
trial-danswer
e70be0f816 feat: add serper web search provider (#5545) 2025-10-03 10:35:33 -07:00
Justin Tahara
0014c7cff7 Revert "fix(github): Revert cache being turned off" (#5594) 2025-10-03 09:48:44 -07:00
Richard Guan
1c23dbeaee fix(mcp): asyncio simple sync run (#5591) 2025-10-03 01:29:57 +00:00
Chris Weaver
b2b122a24b fix: jira perm sync (#5585) 2025-10-02 16:32:22 -07:00
Wenxi
033ae74b0e fix: allow web connector to recurse www even if not specified (#5584) 2025-10-02 16:14:54 -07:00
Evan Lohn
c593fb4866 fix(github): Revert cache being turned off (#5589) 2025-10-02 15:36:01 -07:00
trial-danswer
b9580ef346 feat: Add download users (#5563) 2025-10-02 15:30:27 -07:00
Wenxi
4df3a9204f fix: reindex logic and allow seeded docs to refresh (#5578) 2025-10-02 15:22:24 -07:00
Wenxi
e0ad313a60 chore: bump playwright version (#5581) 2025-10-02 15:16:54 -07:00
Evan Lohn
a2bfb46edd fix: deny invalid space keys (#5570) 2025-10-02 15:01:17 -07:00
Evan Lohn
25e3371bee fix: minor mcp fixes + test (#5564) 2025-10-02 13:05:56 -07:00
Chris Weaver
4b9b306140 feat: enable DR by default (#5576) 2025-10-02 12:53:16 -07:00
Wenxi
ccf55136be feat: ollama official support (#5509) 2025-10-02 10:47:16 -07:00
Evan Lohn
a13db828f3 Revert "fix(github): Revert cache being turned off" (#5575) 2025-10-02 10:22:43 -07:00
SubashMohan
b7d56d0645 increase docid migration task priority (#5571) 2025-10-02 21:42:55 +05:30
SubashMohan
9ac70d35a8 Fix/indexattempt deletion failure (#5573) 2025-10-02 08:42:16 -07:00
Evan Lohn
7da792dd27 fix: user info 404s (#5567) 2025-10-01 23:20:31 +00:00
Chris Weaver
136c2f4082 Skip flakey test (#5566) 2025-10-01 15:14:18 -07:00
edwin-onyx
67bd14e801 fix(infra): lazy import litellm and some more pkgs and add layer to connector instantiation for lazy loading (#5488)
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2025-10-01 15:05:27 -07:00
Evan Lohn
8c9a20be7a fix: atlassian scoped tokens (#5483) 2025-10-01 14:53:28 -07:00
Wenxi
0427845502 fix: weird text wrapping (#5565) 2025-10-01 14:41:49 -07:00
Chris Weaver
a85a5a324e More prod fixes (#5556) 2025-09-30 14:43:21 -07:00
SubashMohan
78f1fb5bf4 fix(projects): Fix Migration (#5550) 2025-09-30 12:56:40 -07:00
Evan Lohn
6a8a214324 fix: avoid attempting to retrieve with non-org owners (#5555) 2025-09-30 12:55:03 -07:00
Justin Tahara
884266c009 fix(saml): Update the route to take GET's and transform to POST (#5554) 2025-09-30 11:28:07 -07:00
Chris Weaver
2c422215e6 Fix prod compose (#5553) 2025-09-30 10:41:38 -07:00
joachim-danswer
32fe185bb4 fix: set gpt-5 thinking setting (#5539) 2025-09-30 09:57:36 -07:00
Chris Weaver
c2758a28d5 fix: project migration tweak (#5544) 2025-09-29 19:57:09 -07:00
Justin Tahara
5cda2e0173 feat(LLM): Add Claude Sonnet 4.5 (#5543) 2025-09-29 17:58:18 -07:00
Evan Lohn
9e885a68b3 feat: mcp client v2 (#5481) 2025-09-29 17:01:32 -07:00
Justin Tahara
376fc86b0c fix(saml): GET Method for SAML Callback (#5538) 2025-09-29 15:08:44 -07:00
Chris Weaver
2eb1444d80 fix: more test hardening (#5537) 2025-09-29 13:54:56 -07:00
SubashMohan
bd6ebe4718 feat(chat): add popup handling for image file selection in ChatInputBar (#5536) 2025-09-29 11:02:18 -07:00
Chris Weaver
691d63bc0f fix: remove console.log (#5533) 2025-09-29 10:54:54 -07:00
Chris Weaver
dfd4d9abef fix: playwright tests (#5522) 2025-09-29 09:04:10 -07:00
SubashMohan
4cb39bc150 fix chat issue and change view icon (#5525) 2025-09-29 12:28:07 +05:30
Chris Weaver
4e357478e0 fix: package-lock.json (#5530) 2025-09-28 13:43:42 -07:00
Wenxi
b5b1b3287c fix: update package lock after projects merge (#5514) 2025-09-28 13:00:32 -07:00
Wenxi
2f58a972eb fix: launch template post projects merge (#5528) 2025-09-28 12:57:54 -07:00
Yuhong Sun
6b39d8eed9 Docker Version Check (#5523) 2025-09-27 19:03:43 -07:00
Chris Weaver
f81c34d040 fix: editing/regeneration (#5521) 2025-09-27 17:43:03 -07:00
Yuhong Sun
0771b1f476 SQL plaintext file (#5520) 2025-09-27 15:36:44 -07:00
Jessica Singh
eedd2ba3fe fix(source selection): enable all by default and persist choice (#5511) 2025-09-26 17:15:40 -07:00
Chris Weaver
98554e5025 feat: small projects UX tweaks (#5513) 2025-09-26 15:33:37 -07:00
Justin Tahara
dcd2cad6b4 fix(infra): Increment Helm Version for Projects (#5512) 2025-09-26 13:59:27 -07:00
Chris Weaver
189f4bb071 fix: add bitbucket env vars (#5510) 2025-09-26 12:38:59 -07:00
SubashMohan
7eeab8fb80 feat(projects): add project creation and management (#5248)
Co-authored-by: Weves <chrisweaver101@gmail.com>
2025-09-26 12:05:20 -07:00
Justin Tahara
60f83dd0db fix(gmail): Skip over emails that don't have gmail enabled (#5506) 2025-09-25 19:57:47 -07:00
Jessica Singh
2618602fd6 fix(source filter): dark mode support (#5505) 2025-09-25 18:10:48 -07:00
Chris Weaver
b80f96de85 fix: LlmPopover after filling in an initial model (#5504) 2025-09-25 17:09:22 -07:00
edwin-onyx
74a15b2c01 fix(infra): fix some dependency hells and add some lazy loading to reduce celery worker RAM usage (#5478)
Co-authored-by: cubic-dev-ai[bot] <191113872+cubic-dev-ai[bot]@users.noreply.github.com>
2025-09-25 16:12:26 -07:00
Jessica Singh
408b80ce51 feat(source selection): adding source selection for internal search in chat (#5455) 2025-09-25 16:12:02 -07:00
Wenxi
e82b68c1b0 fix: update seeded docs connector name (#5502) 2025-09-25 15:58:54 -07:00
Justin Tahara
af5eec648b fix(playwright): Add new fix for Playwright test (#5503) 2025-09-25 15:34:24 -07:00
Chris Weaver
d186c5e82e feat(docker): Add DEV_MODE flag for exposing service ports (#5499)
Co-authored-by: Claude <noreply@anthropic.com>
Co-authored-by: justin-tahara <justintahara@gmail.com>
2025-09-25 15:08:20 -07:00
Justin Tahara
4420a50aed fix(github): Revert cache being turned off (#5487) 2025-09-25 14:07:58 -07:00
Justin Tahara
9caa6ea7ff feat(infra): Default to HPA w/ KEDA option (#5480) 2025-09-25 11:58:19 -07:00
Yuhong Sun
8d7b217d33 Deployment README (#5496) 2025-09-25 11:34:30 -07:00
Yuhong Sun
57908769f1 Port 80 (#5495) 2025-09-25 11:10:41 -07:00
2238 changed files with 219547 additions and 128210 deletions

8
.git-blame-ignore-revs Normal file
View File

@@ -0,0 +1,8 @@
# Exclude these commits from git blame (e.g. mass reformatting).
# These are ignored by GitHub automatically.
# To enable this locally, run:
#
# git config blame.ignoreRevsFile .git-blame-ignore-revs
3134e5f840c12c8f32613ce520101a047c89dcc2 # refactor(whitespace): rm temporary react fragments (#7161)
ed3f72bc75f3e3a9ae9e4d8cd38278f9c97e78b4 # refactor(whitespace): rm react fragment #7190

7
.github/CODEOWNERS vendored
View File

@@ -1,3 +1,10 @@
* @onyx-dot-app/onyx-core-team
# Helm charts Owners
/helm/ @justin-tahara
# Web standards updates
/web/STANDARDS.md @raunakab @Weves
# Agent context files
/CLAUDE.md.template @Weves
/AGENTS.md.template @Weves

43
.github/actionlint.yml vendored Normal file
View File

@@ -0,0 +1,43 @@
self-hosted-runner:
# Labels of self-hosted runner in array of strings.
labels:
- extras=ecr-cache
- extras=s3-cache
- hdd=256
- runs-on
- runner=1cpu-linux-arm64
- runner=1cpu-linux-x64
- runner=2cpu-linux-arm64
- runner=2cpu-linux-x64
- runner=4cpu-linux-arm64
- runner=4cpu-linux-x64
- runner=8cpu-linux-arm64
- runner=8cpu-linux-x64
- runner=16cpu-linux-arm64
- runner=16cpu-linux-x64
- ubuntu-slim # Currently in public preview
- volume=40gb
- volume=50gb
# Configuration variables in array of strings defined in your repository or
# organization. `null` means disabling configuration variables check.
# Empty array means no configuration variable is allowed.
config-variables: null
# Configuration for file paths. The keys are glob patterns to match to file
# paths relative to the repository root. The values are the configurations for
# the file paths. Note that the path separator is always '/'.
# The following configurations are available.
#
# "ignore" is an array of regular expression patterns. Matched error messages
# are ignored. This is similar to the "-ignore" command line option.
paths:
# Glob pattern relative to the repository root for matching files. The path separator is always '/'.
# This example configures any YAML file under the '.github/workflows/' directory.
.github/workflows/**/*.{yml,yaml}:
# TODO: These are real and should be fixed eventually.
ignore:
- 'shellcheck reported issue in this script: SC2038:.+'
- 'shellcheck reported issue in this script: SC2046:.+'
- 'shellcheck reported issue in this script: SC2086:.+'
- 'shellcheck reported issue in this script: SC2193:.+'

View File

@@ -1,135 +0,0 @@
name: 'Build and Push Docker Image with Retry'
description: 'Attempts to build and push a Docker image, with a retry on failure'
inputs:
context:
description: 'Build context'
required: true
file:
description: 'Dockerfile location'
required: true
platforms:
description: 'Target platforms'
required: true
pull:
description: 'Always attempt to pull a newer version of the image'
required: false
default: 'true'
push:
description: 'Push the image to registry'
required: false
default: 'true'
load:
description: 'Load the image into Docker daemon'
required: false
default: 'true'
tags:
description: 'Image tags'
required: true
no-cache:
description: 'Read from cache'
required: false
default: 'false'
cache-from:
description: 'Cache sources'
required: false
cache-to:
description: 'Cache destinations'
required: false
outputs:
description: 'Output destinations'
required: false
provenance:
description: 'Generate provenance attestation'
required: false
default: 'false'
build-args:
description: 'Build arguments'
required: false
retry-wait-time:
description: 'Time to wait before attempt 2 in seconds'
required: false
default: '60'
retry-wait-time-2:
description: 'Time to wait before attempt 3 in seconds'
required: false
default: '120'
runs:
using: "composite"
steps:
- name: Build and push Docker image (Attempt 1 of 3)
id: buildx1
uses: docker/build-push-action@v6
continue-on-error: true
with:
context: ${{ inputs.context }}
file: ${{ inputs.file }}
platforms: ${{ inputs.platforms }}
pull: ${{ inputs.pull }}
push: ${{ inputs.push }}
load: ${{ inputs.load }}
tags: ${{ inputs.tags }}
no-cache: ${{ inputs.no-cache }}
cache-from: ${{ inputs.cache-from }}
cache-to: ${{ inputs.cache-to }}
outputs: ${{ inputs.outputs }}
provenance: ${{ inputs.provenance }}
build-args: ${{ inputs.build-args }}
- name: Wait before attempt 2
if: steps.buildx1.outcome != 'success'
run: |
echo "First attempt failed. Waiting ${{ inputs.retry-wait-time }} seconds before retry..."
sleep ${{ inputs.retry-wait-time }}
shell: bash
- name: Build and push Docker image (Attempt 2 of 3)
id: buildx2
if: steps.buildx1.outcome != 'success'
uses: docker/build-push-action@v6
with:
context: ${{ inputs.context }}
file: ${{ inputs.file }}
platforms: ${{ inputs.platforms }}
pull: ${{ inputs.pull }}
push: ${{ inputs.push }}
load: ${{ inputs.load }}
tags: ${{ inputs.tags }}
no-cache: ${{ inputs.no-cache }}
cache-from: ${{ inputs.cache-from }}
cache-to: ${{ inputs.cache-to }}
outputs: ${{ inputs.outputs }}
provenance: ${{ inputs.provenance }}
build-args: ${{ inputs.build-args }}
- name: Wait before attempt 3
if: steps.buildx1.outcome != 'success' && steps.buildx2.outcome != 'success'
run: |
echo "Second attempt failed. Waiting ${{ inputs.retry-wait-time-2 }} seconds before retry..."
sleep ${{ inputs.retry-wait-time-2 }}
shell: bash
- name: Build and push Docker image (Attempt 3 of 3)
id: buildx3
if: steps.buildx1.outcome != 'success' && steps.buildx2.outcome != 'success'
uses: docker/build-push-action@v6
with:
context: ${{ inputs.context }}
file: ${{ inputs.file }}
platforms: ${{ inputs.platforms }}
pull: ${{ inputs.pull }}
push: ${{ inputs.push }}
load: ${{ inputs.load }}
tags: ${{ inputs.tags }}
no-cache: ${{ inputs.no-cache }}
cache-from: ${{ inputs.cache-from }}
cache-to: ${{ inputs.cache-to }}
outputs: ${{ inputs.outputs }}
provenance: ${{ inputs.provenance }}
build-args: ${{ inputs.build-args }}
- name: Report failure
if: steps.buildx1.outcome != 'success' && steps.buildx2.outcome != 'success' && steps.buildx3.outcome != 'success'
run: |
echo "All attempts failed. Possible transient infrastucture issues? Try again later or inspect logs for details."
shell: bash

View File

@@ -0,0 +1,17 @@
name: "Setup Playwright"
description: "Sets up Playwright and system deps (assumes Python and Playwright are installed)"
runs:
using: "composite"
steps:
- name: Cache playwright cache
uses: runs-on/cache@50350ad4242587b6c8c2baa2e740b1bc11285ff4 # ratchet:runs-on/cache@v4
with:
path: ~/.cache/ms-playwright
key: ${{ runner.os }}-${{ runner.arch }}-playwright-${{ hashFiles('backend/requirements/default.txt') }}
restore-keys: |
${{ runner.os }}-${{ runner.arch }}-playwright-
- name: Install playwright
shell: bash
run: |
playwright install chromium --with-deps

View File

@@ -0,0 +1,76 @@
name: "Setup Python and Install Dependencies"
description: "Sets up Python with uv and installs deps"
inputs:
requirements:
description: "Newline-separated list of requirement files to install (relative to repo root)"
required: true
runs:
using: "composite"
steps:
- name: Compute requirements hash
id: req-hash
shell: bash
env:
REQUIREMENTS: ${{ inputs.requirements }}
run: |
# Hash the contents of the specified requirement files
hash=""
while IFS= read -r req; do
if [ -n "$req" ] && [ -f "$req" ]; then
hash="$hash$(sha256sum "$req")"
fi
done <<< "$REQUIREMENTS"
echo "hash=$(echo "$hash" | sha256sum | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
# NOTE: This comes before Setup uv since clean-ups run in reverse chronological order
# such that Setup uv's prune-cache is able to prune the cache before we upload.
- name: Cache uv cache directory
uses: runs-on/cache@50350ad4242587b6c8c2baa2e740b1bc11285ff4 # ratchet:runs-on/cache@v4
with:
path: ~/.cache/uv
key: ${{ runner.os }}-uv-${{ steps.req-hash.outputs.hash }}
restore-keys: |
${{ runner.os }}-uv-
- name: Setup uv
uses: astral-sh/setup-uv@ed21f2f24f8dd64503750218de024bcf64c7250a # ratchet:astral-sh/setup-uv@v7
with:
version: "0.9.9"
# TODO: Enable caching once there is a uv.lock file checked in.
# with:
# enable-cache: true
- name: Setup Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # ratchet:actions/setup-python@v5
with:
python-version: "3.11"
- name: Create virtual environment
shell: bash
env:
VENV_DIR: ${{ runner.temp }}/venv
run: | # zizmor: ignore[github-env]
uv venv "$VENV_DIR"
# Validate path before adding to GITHUB_PATH to prevent code injection
if [ -d "$VENV_DIR/bin" ]; then
realpath "$VENV_DIR/bin" >> "$GITHUB_PATH"
else
echo "Error: $VENV_DIR/bin does not exist"
exit 1
fi
- name: Install Python dependencies with uv
shell: bash
env:
REQUIREMENTS: ${{ inputs.requirements }}
run: |
# Build the uv pip install command with each requirement file as array elements
cmd=("uv" "pip" "install")
while IFS= read -r req; do
# Skip empty lines
if [ -n "$req" ]; then
cmd+=("-r" "$req")
fi
done <<< "$REQUIREMENTS"
echo "Running: ${cmd[*]}"
"${cmd[@]}"

102
.github/actions/slack-notify/action.yml vendored Normal file
View File

@@ -0,0 +1,102 @@
name: "Slack Notify on Failure"
description: "Sends a Slack notification when a workflow fails"
inputs:
webhook-url:
description: "Slack webhook URL (can also use SLACK_WEBHOOK_URL env var)"
required: false
failed-jobs:
description: "List of failed job names (newline-separated)"
required: false
title:
description: "Title for the notification"
required: false
default: "🚨 Workflow Failed"
ref-name:
description: "Git ref name (tag/branch)"
required: false
runs:
using: "composite"
steps:
- name: Send Slack notification
shell: bash
env:
SLACK_WEBHOOK_URL: ${{ inputs.webhook-url }}
FAILED_JOBS: ${{ inputs.failed-jobs }}
TITLE: ${{ inputs.title }}
REF_NAME: ${{ inputs.ref-name }}
REPO: ${{ github.repository }}
WORKFLOW: ${{ github.workflow }}
RUN_NUMBER: ${{ github.run_number }}
RUN_ID: ${{ github.run_id }}
SERVER_URL: ${{ github.server_url }}
GITHUB_REF_NAME: ${{ github.ref_name }}
run: |
if [ -z "$SLACK_WEBHOOK_URL" ]; then
echo "webhook-url input or SLACK_WEBHOOK_URL env var is not set, skipping notification"
exit 0
fi
# Build workflow URL
WORKFLOW_URL="${SERVER_URL}/${REPO}/actions/runs/${RUN_ID}"
# Use ref_name from input or fall back to github.ref_name
if [ -z "$REF_NAME" ]; then
REF_NAME="$GITHUB_REF_NAME"
fi
# Escape JSON special characters
escape_json() {
local input="$1"
# Escape backslashes first (but preserve \n sequences)
# Protect \n sequences temporarily
input=$(printf '%s' "$input" | sed 's/\\n/\x01NL\x01/g')
# Escape remaining backslashes
input=$(printf '%s' "$input" | sed 's/\\/\\\\/g')
# Restore \n sequences (single backslash, will be correct in JSON)
input=$(printf '%s' "$input" | sed 's/\x01NL\x01/\\n/g')
# Escape quotes
printf '%s' "$input" | sed 's/"/\\"/g'
}
REF_NAME_ESC=$(escape_json "$REF_NAME")
FAILED_JOBS_ESC=$(escape_json "$FAILED_JOBS")
WORKFLOW_URL_ESC=$(escape_json "$WORKFLOW_URL")
TITLE_ESC=$(escape_json "$TITLE")
# Build JSON payload piece by piece
# Note: FAILED_JOBS_ESC already contains \n sequences that should remain as \n in JSON
PAYLOAD="{"
PAYLOAD="${PAYLOAD}\"text\":\"${TITLE_ESC}\","
PAYLOAD="${PAYLOAD}\"blocks\":[{"
PAYLOAD="${PAYLOAD}\"type\":\"header\","
PAYLOAD="${PAYLOAD}\"text\":{\"type\":\"plain_text\",\"text\":\"${TITLE_ESC}\"}"
PAYLOAD="${PAYLOAD}},{"
PAYLOAD="${PAYLOAD}\"type\":\"section\","
PAYLOAD="${PAYLOAD}\"fields\":["
if [ -n "$REF_NAME" ]; then
PAYLOAD="${PAYLOAD}{\"type\":\"mrkdwn\",\"text\":\"*Ref:*\\n${REF_NAME_ESC}\"},"
fi
PAYLOAD="${PAYLOAD}{\"type\":\"mrkdwn\",\"text\":\"*Run ID:*\\n#${RUN_NUMBER}\"}"
PAYLOAD="${PAYLOAD}]"
PAYLOAD="${PAYLOAD}}"
if [ -n "$FAILED_JOBS" ]; then
PAYLOAD="${PAYLOAD},{"
PAYLOAD="${PAYLOAD}\"type\":\"section\","
PAYLOAD="${PAYLOAD}\"text\":{\"type\":\"mrkdwn\",\"text\":\"*Failed Jobs:*\\n${FAILED_JOBS_ESC}\"}"
PAYLOAD="${PAYLOAD}}"
fi
PAYLOAD="${PAYLOAD},{"
PAYLOAD="${PAYLOAD}\"type\":\"actions\","
PAYLOAD="${PAYLOAD}\"elements\":[{"
PAYLOAD="${PAYLOAD}\"type\":\"button\","
PAYLOAD="${PAYLOAD}\"text\":{\"type\":\"plain_text\",\"text\":\"View Workflow Run\"},"
PAYLOAD="${PAYLOAD}\"url\":\"${WORKFLOW_URL_ESC}\""
PAYLOAD="${PAYLOAD}}]"
PAYLOAD="${PAYLOAD}}"
PAYLOAD="${PAYLOAD}]"
PAYLOAD="${PAYLOAD}}"
curl -X POST -H 'Content-type: application/json' \
--data "$PAYLOAD" \
"$SLACK_WEBHOOK_URL"

24
.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,24 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
cooldown:
default-days: 7
open-pull-requests-limit: 3
assignees:
- "jmelahman"
labels:
- "dependabot:actions"
- package-ecosystem: "pip"
directory: "/backend"
schedule:
interval: "weekly"
cooldown:
default-days: 7
open-pull-requests-limit: 3
assignees:
- "jmelahman"
labels:
- "dependabot:python"

View File

@@ -1,10 +1,10 @@
## Description
[Provide a brief description of the changes in this PR]
<!--- Provide a brief description of the changes in this PR --->
## How Has This Been Tested?
[Describe the tests you ran to verify your changes]
<!--- Describe the tests you ran to verify your changes --->
## Additional Options

1
.github/runs-on.yml vendored Normal file
View File

@@ -0,0 +1 @@
_extend: .github-private

View File

@@ -1,24 +0,0 @@
name: Check Lazy Imports
on:
merge_group:
pull_request:
branches:
- main
- 'release/**'
jobs:
check-lazy-imports:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Check lazy imports
run: python3 backend/scripts/check_lazy_imports.py

1247
.github/workflows/deployment.yml vendored Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,166 +0,0 @@
name: Build and Push Backend Image on Tag
on:
push:
tags:
- "*"
env:
REGISTRY_IMAGE: ${{ contains(github.ref_name, 'cloud') && 'onyxdotapp/onyx-backend-cloud' || 'onyxdotapp/onyx-backend' }}
DEPLOYMENT: ${{ contains(github.ref_name, 'cloud') && 'cloud' || 'standalone' }}
# don't tag cloud images with "latest"
LATEST_TAG: ${{ contains(github.ref_name, 'latest') && !contains(github.ref_name, 'cloud') }}
jobs:
build-and-push:
# TODO: investigate a matrix build like the web container
# See https://runs-on.com/runners/linux/
runs-on:
- runs-on
- runner=${{ matrix.platform == 'linux/amd64' && '8cpu-linux-x64' || '8cpu-linux-arm64' }}
- run-id=${{ github.run_id }}
- tag=platform-${{ matrix.platform }}
strategy:
fail-fast: false
matrix:
platform:
- linux/amd64
- linux/arm64
steps:
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Checkout code
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
latest=false
tags: |
type=raw,value=${{ github.ref_name }}
type=raw,value=${{ env.LATEST_TAG == 'true' && 'latest' || '' }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Install build-essential
run: |
sudo apt-get update
sudo apt-get install -y build-essential
- name: Backend Image Docker Build and Push
id: build
uses: docker/build-push-action@v6
with:
context: ./backend
file: ./backend/Dockerfile
platforms: ${{ matrix.platform }}
push: true
build-args: |
ONYX_VERSION=${{ github.ref_name }}
labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true
cache-from: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/backend-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/backend-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
- name: Export digest
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v4
with:
name: backend-digests-${{ env.PLATFORM_PAIR }}-${{ github.run_id }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
merge:
runs-on: ubuntu-latest
needs:
- build-and-push
steps:
# Needed for trivyignore
- name: Checkout
uses: actions/checkout@v4
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: backend-digests-*-${{ github.run_id }}
merge-multiple: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
latest=false
tags: |
type=raw,value=${{ github.ref_name }}
type=raw,value=${{ env.LATEST_TAG == 'true' && 'latest' || '' }}
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }}
# trivy has their own rate limiting issues causing this action to flake
# we worked around it by hardcoding to different db repos in env
# can re-enable when they figure it out
# https://github.com/aquasecurity/trivy/discussions/7538
# https://github.com/aquasecurity/trivy-action/issues/389
# Security: Using pinned digest (0.65.0@sha256:a22415a38938a56c379387a8163fcb0ce38b10ace73e593475d3658d578b2436)
# Security: No Docker socket mount needed for remote registry scanning
- name: Run Trivy vulnerability scanner
uses: nick-fields/retry@v3
with:
timeout_minutes: 30
max_attempts: 3
retry_wait_seconds: 10
command: |
docker run --rm -v $HOME/.cache/trivy:/root/.cache/trivy \
-v ${{ github.workspace }}/backend/.trivyignore:/tmp/.trivyignore:ro \
-e TRIVY_DB_REPOSITORY="public.ecr.aws/aquasecurity/trivy-db:2" \
-e TRIVY_JAVA_DB_REPOSITORY="public.ecr.aws/aquasecurity/trivy-java-db:1" \
-e TRIVY_USERNAME="${{ secrets.DOCKER_USERNAME }}" \
-e TRIVY_PASSWORD="${{ secrets.DOCKER_TOKEN }}" \
aquasec/trivy@sha256:a22415a38938a56c379387a8163fcb0ce38b10ace73e593475d3658d578b2436 \
image \
--skip-version-check \
--timeout 20m \
--severity CRITICAL,HIGH \
--ignorefile /tmp/.trivyignore \
docker.io/${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}

View File

@@ -1,158 +0,0 @@
name: Build and Push Cloud Web Image on Tag
# Identical to the web container build, but with correct image tag and build args
on:
push:
tags:
- "*cloud*"
env:
REGISTRY_IMAGE: onyxdotapp/onyx-web-server-cloud
DEPLOYMENT: cloud
jobs:
build:
runs-on:
- runs-on
- runner=${{ matrix.platform == 'linux/amd64' && '8cpu-linux-x64' || '8cpu-linux-arm64' }}
- run-id=${{ github.run_id }}
- tag=platform-${{ matrix.platform }}
strategy:
fail-fast: false
matrix:
platform:
- linux/amd64
- linux/arm64
steps:
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Checkout
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
latest=false
tags: |
type=raw,value=${{ github.ref_name }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push by digest
id: build
uses: docker/build-push-action@v6
with:
context: ./web
file: ./web/Dockerfile
platforms: ${{ matrix.platform }}
push: true
build-args: |
ONYX_VERSION=${{ github.ref_name }}
NEXT_PUBLIC_CLOUD_ENABLED=true
NEXT_PUBLIC_POSTHOG_KEY=${{ secrets.POSTHOG_KEY }}
NEXT_PUBLIC_POSTHOG_HOST=${{ secrets.POSTHOG_HOST }}
NEXT_PUBLIC_SENTRY_DSN=${{ secrets.SENTRY_DSN }}
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=${{ secrets.STRIPE_PUBLISHABLE_KEY }}
NEXT_PUBLIC_GTM_ENABLED=true
NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED=true
NEXT_PUBLIC_INCLUDE_ERROR_POPUP_SUPPORT_LINK=true
NODE_OPTIONS=--max-old-space-size=8192
labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true
cache-from: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/cloudweb-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/cloudweb-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
# no-cache needed due to weird interactions with the builds for different platforms
# NOTE(rkuo): this may not be true any more with the proper cache prefixing by architecture - currently testing with it off
- name: Export digest
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v4
with:
name: cloudweb-digests-${{ env.PLATFORM_PAIR }}-${{ github.run_id }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
merge:
runs-on: ubuntu-latest
needs:
- build
steps:
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: cloudweb-digests-*-${{ github.run_id }}
merge-multiple: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
latest=false
tags: |
type=raw,value=${{ github.ref_name }}
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }}
# trivy has their own rate limiting issues causing this action to flake
# we worked around it by hardcoding to different db repos in env
# can re-enable when they figure it out
# https://github.com/aquasecurity/trivy/discussions/7538
# https://github.com/aquasecurity/trivy-action/issues/389
- name: Run Trivy vulnerability scanner
uses: nick-fields/retry@v3
with:
timeout_minutes: 30
max_attempts: 3
retry_wait_seconds: 10
command: |
docker run --rm -v $HOME/.cache/trivy:/root/.cache/trivy \
-e TRIVY_DB_REPOSITORY="public.ecr.aws/aquasecurity/trivy-db:2" \
-e TRIVY_JAVA_DB_REPOSITORY="public.ecr.aws/aquasecurity/trivy-java-db:1" \
-e TRIVY_USERNAME="${{ secrets.DOCKER_USERNAME }}" \
-e TRIVY_PASSWORD="${{ secrets.DOCKER_TOKEN }}" \
aquasec/trivy@sha256:a22415a38938a56c379387a8163fcb0ce38b10ace73e593475d3658d578b2436 \
image \
--skip-version-check \
--timeout 20m \
--severity CRITICAL,HIGH \
docker.io/${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}

View File

@@ -1,183 +0,0 @@
name: Build and Push Model Server Image on Tag
on:
push:
tags:
- "*"
env:
REGISTRY_IMAGE: ${{ contains(github.ref_name, 'cloud') && 'onyxdotapp/onyx-model-server-cloud' || 'onyxdotapp/onyx-model-server' }}
DOCKER_BUILDKIT: 1
BUILDKIT_PROGRESS: plain
DEPLOYMENT: ${{ contains(github.ref_name, 'cloud') && 'cloud' || 'standalone' }}
# don't tag cloud images with "latest"
LATEST_TAG: ${{ contains(github.ref_name, 'latest') && !contains(github.ref_name, 'cloud') }}
jobs:
# Bypassing this for now as the idea of not building is glitching
# releases and builds that depends on everything being tagged in docker
# 1) Preliminary job to check if the changed files are relevant
# check_model_server_changes:
# runs-on: ubuntu-latest
# outputs:
# changed: ${{ steps.check.outputs.changed }}
# steps:
# - name: Checkout code
# uses: actions/checkout@v4
#
# - name: Check if relevant files changed
# id: check
# run: |
# # Default to "false"
# echo "changed=false" >> $GITHUB_OUTPUT
#
# # Compare the previous commit (github.event.before) to the current one (github.sha)
# # If any file in backend/model_server/** or backend/Dockerfile.model_server is changed,
# # set changed=true
# if git diff --name-only ${{ github.event.before }} ${{ github.sha }} \
# | grep -E '^backend/model_server/|^backend/Dockerfile.model_server'; then
# echo "changed=true" >> $GITHUB_OUTPUT
# fi
check_model_server_changes:
runs-on: ubuntu-latest
outputs:
changed: "true"
steps:
- name: Bypass check and set output
run: echo "changed=true" >> $GITHUB_OUTPUT
build-amd64:
needs: [check_model_server_changes]
if: needs.check_model_server_changes.outputs.changed == 'true'
runs-on:
[runs-on, runner=8cpu-linux-x64, "run-id=${{ github.run_id }}-amd64"]
env:
PLATFORM_PAIR: linux-amd64
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: System Info
run: |
df -h
free -h
docker system prune -af --volumes
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver-opts: |
image=moby/buildkit:latest
network=host
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and Push AMD64
uses: docker/build-push-action@v6
with:
context: ./backend
file: ./backend/Dockerfile.model_server
platforms: linux/amd64
push: true
tags: ${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}-amd64
build-args: |
DANSWER_VERSION=${{ github.ref_name }}
outputs: type=registry
provenance: false
cache-from: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/model-server-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/model-server-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
# no-cache: true
build-arm64:
needs: [check_model_server_changes]
if: needs.check_model_server_changes.outputs.changed == 'true'
runs-on:
[runs-on, runner=8cpu-linux-arm64, "run-id=${{ github.run_id }}-arm64"]
env:
PLATFORM_PAIR: linux-arm64
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: System Info
run: |
df -h
free -h
docker system prune -af --volumes
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver-opts: |
image=moby/buildkit:latest
network=host
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and Push ARM64
uses: docker/build-push-action@v6
with:
context: ./backend
file: ./backend/Dockerfile.model_server
platforms: linux/arm64
push: true
tags: ${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}-arm64
build-args: |
DANSWER_VERSION=${{ github.ref_name }}
outputs: type=registry
provenance: false
cache-from: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/model-server-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/model-server-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
merge-and-scan:
needs: [build-amd64, build-arm64, check_model_server_changes]
if: needs.check_model_server_changes.outputs.changed == 'true'
runs-on: ubuntu-latest
steps:
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Create and Push Multi-arch Manifest
run: |
docker buildx create --use
docker buildx imagetools create -t ${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }} \
${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}-amd64 \
${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}-arm64
if [[ "${{ env.LATEST_TAG }}" == "true" ]]; then
docker buildx imagetools create -t ${{ env.REGISTRY_IMAGE }}:latest \
${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}-amd64 \
${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}-arm64
fi
- name: Run Trivy vulnerability scanner
uses: nick-fields/retry@v3
with:
timeout_minutes: 30
max_attempts: 3
retry_wait_seconds: 10
command: |
docker run --rm -v $HOME/.cache/trivy:/root/.cache/trivy \
-e TRIVY_DB_REPOSITORY="public.ecr.aws/aquasecurity/trivy-db:2" \
-e TRIVY_JAVA_DB_REPOSITORY="public.ecr.aws/aquasecurity/trivy-java-db:1" \
-e TRIVY_USERNAME="${{ secrets.DOCKER_USERNAME }}" \
-e TRIVY_PASSWORD="${{ secrets.DOCKER_TOKEN }}" \
aquasec/trivy@sha256:a22415a38938a56c379387a8163fcb0ce38b10ace73e593475d3658d578b2436 \
image \
--skip-version-check \
--timeout 20m \
--severity CRITICAL,HIGH \
docker.io/${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}

View File

@@ -1,169 +0,0 @@
name: Build and Push Web Image on Tag
on:
push:
tags:
- "*"
env:
REGISTRY_IMAGE: onyxdotapp/onyx-web-server
LATEST_TAG: ${{ contains(github.ref_name, 'latest') }}
DEPLOYMENT: standalone
jobs:
precheck:
runs-on: [runs-on, runner=2cpu-linux-x64, "run-id=${{ github.run_id }}"]
outputs:
should-run: ${{ steps.set-output.outputs.should-run }}
steps:
- name: Check if tag contains "cloud"
id: set-output
run: |
if [[ "${{ github.ref_name }}" == *cloud* ]]; then
echo "should-run=false" >> "$GITHUB_OUTPUT"
else
echo "should-run=true" >> "$GITHUB_OUTPUT"
fi
build:
needs: precheck
if: needs.precheck.outputs.should-run == 'true'
runs-on:
- runs-on
- runner=${{ matrix.platform == 'linux/amd64' && '8cpu-linux-x64' || '8cpu-linux-arm64' }}
- run-id=${{ github.run_id }}
- tag=platform-${{ matrix.platform }}
strategy:
fail-fast: false
matrix:
platform:
- linux/amd64
- linux/arm64
steps:
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Checkout
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
latest=false
tags: |
type=raw,value=${{ github.ref_name }}
type=raw,value=${{ env.LATEST_TAG == 'true' && 'latest' || '' }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push by digest
id: build
uses: docker/build-push-action@v6
with:
context: ./web
file: ./web/Dockerfile
platforms: ${{ matrix.platform }}
push: true
build-args: |
ONYX_VERSION=${{ github.ref_name }}
NODE_OPTIONS=--max-old-space-size=8192
labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true
cache-from: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/web-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/web-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
# no-cache needed due to weird interactions with the builds for different platforms
# NOTE(rkuo): this may not be true any more with the proper cache prefixing by architecture - currently testing with it off
- name: Export digest
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v4
with:
name: web-digests-${{ env.PLATFORM_PAIR }}-${{ github.run_id }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
merge:
needs:
- build
if: needs.precheck.outputs.should-run == 'true'
runs-on: ubuntu-latest
steps:
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: web-digests-*-${{ github.run_id }}
merge-multiple: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
latest=false
tags: |
type=raw,value=${{ github.ref_name }}
type=raw,value=${{ env.LATEST_TAG == 'true' && 'latest' || '' }}
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }}
# trivy has their own rate limiting issues causing this action to flake
# we worked around it by hardcoding to different db repos in env
# can re-enable when they figure it out
# https://github.com/aquasecurity/trivy/discussions/7538
# https://github.com/aquasecurity/trivy-action/issues/389
- name: Run Trivy vulnerability scanner
uses: nick-fields/retry@v3
with:
timeout_minutes: 30
max_attempts: 3
retry_wait_seconds: 10
command: |
docker run --rm -v $HOME/.cache/trivy:/root/.cache/trivy \
-e TRIVY_DB_REPOSITORY="public.ecr.aws/aquasecurity/trivy-db:2" \
-e TRIVY_JAVA_DB_REPOSITORY="public.ecr.aws/aquasecurity/trivy-java-db:1" \
-e TRIVY_USERNAME="${{ secrets.DOCKER_USERNAME }}" \
-e TRIVY_PASSWORD="${{ secrets.DOCKER_TOKEN }}" \
aquasec/trivy@sha256:a22415a38938a56c379387a8163fcb0ce38b10ace73e593475d3658d578b2436 \
image \
--skip-version-check \
--timeout 20m \
--severity CRITICAL,HIGH \
docker.io/${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}

51
.github/workflows/docker-tag-beta.yml vendored Normal file
View File

@@ -0,0 +1,51 @@
# This workflow is set up to be manually triggered via the GitHub Action tab.
# Given a version, it will tag those backend and webserver images as "beta".
name: Tag Beta Version
on:
workflow_dispatch:
inputs:
version:
description: "The version (ie v1.0.0-beta.0) to tag as beta"
required: true
permissions:
contents: read
jobs:
tag:
# See https://runs-on.com/runners/linux/
# use a lower powered instance since this just does i/o to docker hub
runs-on: [runs-on, runner=2cpu-linux-x64, "run-id=${{ github.run_id }}-tag"]
timeout-minutes: 45
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Enable Docker CLI experimental features
run: echo "DOCKER_CLI_EXPERIMENTAL=enabled" >> $GITHUB_ENV
- name: Pull, Tag and Push Web Server Image
env:
VERSION: ${{ github.event.inputs.version }}
run: |
docker buildx imagetools create -t onyxdotapp/onyx-web-server:beta onyxdotapp/onyx-web-server:${VERSION}
- name: Pull, Tag and Push API Server Image
env:
VERSION: ${{ github.event.inputs.version }}
run: |
docker buildx imagetools create -t onyxdotapp/onyx-backend:beta onyxdotapp/onyx-backend:${VERSION}
- name: Pull, Tag and Push Model Server Image
env:
VERSION: ${{ github.event.inputs.version }}
run: |
docker buildx imagetools create -t onyxdotapp/onyx-model-server:beta onyxdotapp/onyx-model-server:${VERSION}

View File

@@ -10,17 +10,21 @@ on:
description: "The version (ie v0.0.1) to tag as latest"
required: true
permissions:
contents: read
jobs:
tag:
# See https://runs-on.com/runners/linux/
# use a lower powered instance since this just does i/o to docker hub
runs-on: [runs-on, runner=2cpu-linux-x64, "run-id=${{ github.run_id }}"]
runs-on: [runs-on, runner=2cpu-linux-x64, "run-id=${{ github.run_id }}-tag"]
timeout-minutes: 45
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v1
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
@@ -29,9 +33,19 @@ jobs:
run: echo "DOCKER_CLI_EXPERIMENTAL=enabled" >> $GITHUB_ENV
- name: Pull, Tag and Push Web Server Image
env:
VERSION: ${{ github.event.inputs.version }}
run: |
docker buildx imagetools create -t onyxdotapp/onyx-web-server:latest onyxdotapp/onyx-web-server:${{ github.event.inputs.version }}
docker buildx imagetools create -t onyxdotapp/onyx-web-server:latest onyxdotapp/onyx-web-server:${VERSION}
- name: Pull, Tag and Push API Server Image
env:
VERSION: ${{ github.event.inputs.version }}
run: |
docker buildx imagetools create -t onyxdotapp/onyx-backend:latest onyxdotapp/onyx-backend:${{ github.event.inputs.version }}
docker buildx imagetools create -t onyxdotapp/onyx-backend:latest onyxdotapp/onyx-backend:${VERSION}
- name: Pull, Tag and Push Model Server Image
env:
VERSION: ${{ github.event.inputs.version }}
run: |
docker buildx imagetools create -t onyxdotapp/onyx-model-server:latest onyxdotapp/onyx-model-server:${VERSION}

View File

@@ -12,22 +12,27 @@ jobs:
permissions:
contents: write
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- name: Checkout
uses: actions/checkout@v4
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
fetch-depth: 0
persist-credentials: false
- name: Install Helm CLI
uses: azure/setup-helm@v4
uses: azure/setup-helm@1a275c3b69536ee54be43f2070a358922e12c8d4 # ratchet:azure/setup-helm@v4
with:
version: v3.12.1
- name: Add required Helm repositories
run: |
helm repo add bitnami https://charts.bitnami.com/bitnami
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
helm repo add onyx-vespa https://onyx-dot-app.github.io/vespa-helm-charts
helm repo add keda https://kedacore.github.io/charts
helm repo add cloudnative-pg https://cloudnative-pg.github.io/charts
helm repo add ot-container-kit https://ot-container-kit.github.io/helm-charts
helm repo add minio https://charts.min.io/
helm repo add code-interpreter https://onyx-dot-app.github.io/code-interpreter/
helm repo update
- name: Build chart dependencies
@@ -41,7 +46,7 @@ jobs:
done
- name: Publish Helm charts to gh-pages
uses: stefanprodan/helm-gh-pages@v1.7.0
uses: stefanprodan/helm-gh-pages@0ad2bb377311d61ac04ad9eb6f252fb68e207260 # ratchet:stefanprodan/helm-gh-pages@v1.7.0
with:
token: ${{ secrets.GITHUB_TOKEN }}
charts_dir: deployment/helm/charts

View File

@@ -1,171 +0,0 @@
# This workflow is intended to be manually triggered via the GitHub Action tab.
# Given a hotfix branch, it will attempt to open a PR to all release branches and
# by default auto merge them
name: Hotfix release branches
on:
workflow_dispatch:
inputs:
hotfix_commit:
description: "Hotfix commit hash"
required: true
hotfix_suffix:
description: "Hotfix branch suffix (e.g. hotfix/v0.8-{suffix})"
required: true
release_branch_pattern:
description: "Release branch pattern (regex)"
required: true
default: "release/.*"
auto_merge:
description: "Automatically merge the hotfix PRs"
required: true
type: choice
default: "true"
options:
- true
- false
jobs:
hotfix_release_branches:
permissions: write-all
# See https://runs-on.com/runners/linux/
# use a lower powered instance since this just does i/o to docker hub
runs-on: [runs-on, runner=2cpu-linux-x64, "run-id=${{ github.run_id }}"]
steps:
# needs RKUO_DEPLOY_KEY for write access to merge PR's
- name: Checkout Repository
uses: actions/checkout@v4
with:
ssh-key: "${{ secrets.RKUO_DEPLOY_KEY }}"
fetch-depth: 0
- name: Set up Git user
run: |
git config user.name "Richard Kuo [bot]"
git config user.email "rkuo[bot]@onyx.app"
- name: Fetch All Branches
run: |
git fetch --all --prune
- name: Verify Hotfix Commit Exists
run: |
git rev-parse --verify "${{ github.event.inputs.hotfix_commit }}" || { echo "Commit not found: ${{ github.event.inputs.hotfix_commit }}"; exit 1; }
- name: Get Release Branches
id: get_release_branches
run: |
BRANCHES=$(git branch -r | grep -E "${{ github.event.inputs.release_branch_pattern }}" | sed 's|origin/||' | tr -d ' ')
if [ -z "$BRANCHES" ]; then
echo "No release branches found matching pattern '${{ github.event.inputs.release_branch_pattern }}'."
exit 1
fi
echo "Found release branches:"
echo "$BRANCHES"
# Join the branches into a single line separated by commas
BRANCHES_JOINED=$(echo "$BRANCHES" | tr '\n' ',' | sed 's/,$//')
# Set the branches as an output
echo "branches=$BRANCHES_JOINED" >> $GITHUB_OUTPUT
# notes on all the vagaries of wiring up automated PR's
# https://github.com/peter-evans/create-pull-request/blob/main/docs/concepts-guidelines.md#triggering-further-workflow-runs
# we must use a custom token for GH_TOKEN to trigger the subsequent PR checks
- name: Create and Merge Pull Requests to Matching Release Branches
env:
HOTFIX_COMMIT: ${{ github.event.inputs.hotfix_commit }}
HOTFIX_SUFFIX: ${{ github.event.inputs.hotfix_suffix }}
AUTO_MERGE: ${{ github.event.inputs.auto_merge }}
GH_TOKEN: ${{ secrets.RKUO_PERSONAL_ACCESS_TOKEN }}
run: |
# Get the branches from the previous step
BRANCHES="${{ steps.get_release_branches.outputs.branches }}"
# Convert BRANCHES to an array
IFS=$',' read -ra BRANCH_ARRAY <<< "$BRANCHES"
# Loop through each release branch and create and merge a PR
for RELEASE_BRANCH in "${BRANCH_ARRAY[@]}"; do
echo "Processing $RELEASE_BRANCH..."
# Parse out the release version by removing "release/" from the branch name
RELEASE_VERSION=${RELEASE_BRANCH#release/}
echo "Release version parsed: $RELEASE_VERSION"
HOTFIX_BRANCH="hotfix/${RELEASE_VERSION}-${HOTFIX_SUFFIX}"
echo "Creating PR from $HOTFIX_BRANCH to $RELEASE_BRANCH"
# Checkout the release branch
echo "Checking out $RELEASE_BRANCH"
git checkout "$RELEASE_BRANCH"
# Create the new hotfix branch
if git rev-parse --verify "$HOTFIX_BRANCH" >/dev/null 2>&1; then
echo "Hotfix branch $HOTFIX_BRANCH already exists. Skipping branch creation."
else
echo "Branching $RELEASE_BRANCH to $HOTFIX_BRANCH"
git checkout -b "$HOTFIX_BRANCH"
fi
# Check if the hotfix commit is a merge commit
if git rev-list --merges -n 1 "$HOTFIX_COMMIT" >/dev/null 2>&1; then
# -m 1 uses the target branch as the base (which is what we want)
echo "Hotfix commit $HOTFIX_COMMIT is a merge commit, using -m 1 for cherry-pick"
CHERRY_PICK_CMD="git cherry-pick -m 1 $HOTFIX_COMMIT"
else
CHERRY_PICK_CMD="git cherry-pick $HOTFIX_COMMIT"
fi
# Perform the cherry-pick
echo "Executing: $CHERRY_PICK_CMD"
eval "$CHERRY_PICK_CMD"
if [ $? -ne 0 ]; then
echo "Cherry-pick failed for $HOTFIX_COMMIT on $HOTFIX_BRANCH. Aborting..."
git cherry-pick --abort
continue
fi
# Push the hotfix branch to the remote
echo "Pushing $HOTFIX_BRANCH..."
git push origin "$HOTFIX_BRANCH"
echo "Hotfix branch $HOTFIX_BRANCH created and pushed."
# Check if PR already exists
EXISTING_PR=$(gh pr list --head "$HOTFIX_BRANCH" --base "$RELEASE_BRANCH" --state open --json number --jq '.[0].number')
if [ -n "$EXISTING_PR" ]; then
echo "An open PR already exists: #$EXISTING_PR. Skipping..."
continue
fi
# Create a new PR and capture the output
PR_OUTPUT=$(gh pr create --title "Merge $HOTFIX_BRANCH into $RELEASE_BRANCH" \
--body "Automated PR to merge \`$HOTFIX_BRANCH\` into \`$RELEASE_BRANCH\`." \
--head "$HOTFIX_BRANCH" --base "$RELEASE_BRANCH")
# Extract the URL from the output
PR_URL=$(echo "$PR_OUTPUT" | grep -Eo 'https://github.com/[^ ]+')
echo "Pull request created: $PR_URL"
# Extract PR number from URL
PR_NUMBER=$(basename "$PR_URL")
echo "Pull request created: $PR_NUMBER"
if [ "$AUTO_MERGE" == "true" ]; then
echo "Attempting to merge pull request #$PR_NUMBER"
# Attempt to merge the PR
gh pr merge "$PR_NUMBER" --merge --auto --delete-branch
if [ $? -eq 0 ]; then
echo "Pull request #$PR_NUMBER merged successfully."
else
# Optionally, handle the error or continue
echo "Failed to merge pull request #$PR_NUMBER."
fi
fi
done

31
.github/workflows/merge-group.yml vendored Normal file
View File

@@ -0,0 +1,31 @@
name: Merge Group-Specific
on:
merge_group:
permissions:
contents: read
jobs:
# This job immediately succeeds to satisfy branch protection rules on merge_group events.
# There is a similarly named "required" job in pr-integration-tests.yml which runs the actual
# integration tests. That job runs on both pull_request and merge_group events, and this job
# exists solely to provide a fast-passing check with the same name for branch protection.
# The actual tests remain enforced on presubmit (pull_request events).
required:
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- name: Success
run: echo "Success"
# This job immediately succeeds to satisfy branch protection rules on merge_group events.
# There is a similarly named "playwright-required" job in pr-playwright-tests.yml which runs
# the actual playwright tests. That job runs on both pull_request and merge_group events, and
# this job exists solely to provide a fast-passing check with the same name for branch protection.
# The actual tests remain enforced on presubmit (pull_request events).
playwright-required:
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- name: Success
run: echo "Success"

View File

@@ -7,12 +7,13 @@ permissions:
# contents: write # only for delete-branch option
issues: write
pull-requests: write
jobs:
stale:
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- uses: actions/stale@v9
- uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # ratchet:actions/stale@v10
with:
stale-issue-message: 'This issue is stale because it has been open 75 days with no activity. Remove stale label or comment or this will be closed in 15 days.'
stale-pr-message: 'This PR is stale because it has been open 75 days with no activity. Remove stale label or comment or this will be closed in 15 days.'
@@ -20,4 +21,3 @@ jobs:
close-pr-message: 'This PR was closed because it has been stalled for 90 days with no activity.'
days-before-stale: 75
# days-before-close: 90 # uncomment after we test stale behavior

View File

@@ -15,19 +15,25 @@ on:
permissions:
actions: read
contents: read
security-events: write
jobs:
scan-licenses:
# See https://runs-on.com/runners/linux/
runs-on: [runs-on,runner=2cpu-linux-x64,"run-id=${{ github.run_id }}"]
runs-on: [runs-on,runner=2cpu-linux-x64,"run-id=${{ github.run_id }}-scan-licenses"]
timeout-minutes: 45
permissions:
actions: read
contents: read
security-events: write
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # ratchet:actions/setup-python@v6
with:
python-version: '3.11'
cache: 'pip'
@@ -35,7 +41,7 @@ jobs:
backend/requirements/default.txt
backend/requirements/dev.txt
backend/requirements/model_server.txt
- name: Get explicit and transitive dependencies
run: |
python -m pip install --upgrade pip
@@ -43,28 +49,30 @@ jobs:
pip install --retries 5 --timeout 30 -r backend/requirements/dev.txt
pip install --retries 5 --timeout 30 -r backend/requirements/model_server.txt
pip freeze > requirements-all.txt
- name: Check python
id: license_check_report
uses: pilosus/action-pip-license-checker@v2
uses: pilosus/action-pip-license-checker@e909b0226ff49d3235c99c4585bc617f49fff16a # ratchet:pilosus/action-pip-license-checker@v3
with:
requirements: 'requirements-all.txt'
fail: 'Copyleft'
exclude: '(?i)^(pylint|aio[-_]*).*'
- name: Print report
if: always()
run: echo "${{ steps.license_check_report.outputs.report }}"
env:
REPORT: ${{ steps.license_check_report.outputs.report }}
run: echo "$REPORT"
- name: Install npm dependencies
working-directory: ./web
run: npm ci
# be careful enabling the sarif and upload as it may spam the security tab
# with a huge amount of items. Work out the issues before enabling upload.
# with a huge amount of items. Work out the issues before enabling upload.
# - name: Run Trivy vulnerability scanner in repo mode
# if: always()
# uses: aquasecurity/trivy-action@0.29.0
# uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # ratchet:aquasecurity/trivy-action@0.33.1
# with:
# scan-type: fs
# scan-ref: .
@@ -73,7 +81,7 @@ jobs:
# severity: HIGH,CRITICAL
# # format: sarif
# # output: trivy-results.sarif
#
#
# # - name: Upload Trivy scan results to GitHub Security tab
# # uses: github/codeql-action/upload-sarif@v3
# # with:
@@ -81,14 +89,15 @@ jobs:
scan-trivy:
# See https://runs-on.com/runners/linux/
runs-on: [runs-on,runner=2cpu-linux-x64,"run-id=${{ github.run_id }}"]
runs-on: [runs-on,runner=2cpu-linux-x64,"run-id=${{ github.run_id }}-scan-trivy"]
timeout-minutes: 45
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
@@ -98,7 +107,7 @@ jobs:
run: docker pull onyxdotapp/onyx-backend:latest
- name: Run Trivy vulnerability scanner on backend
uses: aquasecurity/trivy-action@0.29.0
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # ratchet:aquasecurity/trivy-action@0.33.1
env:
TRIVY_DB_REPOSITORY: 'public.ecr.aws/aquasecurity/trivy-db:2'
TRIVY_JAVA_DB_REPOSITORY: 'public.ecr.aws/aquasecurity/trivy-java-db:1'
@@ -112,9 +121,9 @@ jobs:
# Web server
- name: Pull web server docker image
run: docker pull onyxdotapp/onyx-web-server:latest
- name: Run Trivy vulnerability scanner on web server
uses: aquasecurity/trivy-action@0.29.0
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # ratchet:aquasecurity/trivy-action@0.33.1
env:
TRIVY_DB_REPOSITORY: 'public.ecr.aws/aquasecurity/trivy-db:2'
TRIVY_JAVA_DB_REPOSITORY: 'public.ecr.aws/aquasecurity/trivy-java-db:1'
@@ -130,7 +139,7 @@ jobs:
run: docker pull onyxdotapp/onyx-model-server:latest
- name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@0.29.0
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # ratchet:aquasecurity/trivy-action@0.33.1
env:
TRIVY_DB_REPOSITORY: 'public.ecr.aws/aquasecurity/trivy-db:2'
TRIVY_JAVA_DB_REPOSITORY: 'public.ecr.aws/aquasecurity/trivy-java-db:1'
@@ -139,4 +148,4 @@ jobs:
scanners: license
severity: HIGH,CRITICAL
vuln-type: library
exit-code: 0
exit-code: 0

62
.github/workflows/pr-database-tests.yml vendored Normal file
View File

@@ -0,0 +1,62 @@
name: Database Tests
concurrency:
group: Database-Tests-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }}
cancel-in-progress: true
on:
merge_group:
pull_request:
branches:
- main
- "release/**"
push:
tags:
- "v*.*.*"
permissions:
contents: read
jobs:
database-tests:
runs-on:
- runs-on
- runner=2cpu-linux-arm64
- "run-id=${{ github.run_id }}-database-tests"
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Setup Python and Install Dependencies
uses: ./.github/actions/setup-python-and-install-dependencies
with:
requirements: |
backend/requirements/default.txt
backend/requirements/dev.txt
- name: Generate OpenAPI schema and Python client
shell: bash
run: |
ods openapi all
# needed for pulling external images otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Start Docker containers
working-directory: ./deployment/docker_compose
run: |
docker compose -f docker-compose.yml -f docker-compose.dev.yml up -d \
relational_db
- name: Run Database Tests
working-directory: ./backend
run: pytest -m alembic tests/integration/tests/migrations/

View File

@@ -1,39 +1,63 @@
name: External Dependency Unit Tests
concurrency:
group: External-Dependency-Unit-Tests-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }}
cancel-in-progress: true
on:
merge_group:
pull_request:
branches: [main]
push:
tags:
- "v*.*.*"
permissions:
contents: read
env:
# AWS
S3_AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
S3_AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
# AWS credentials for S3-specific test
S3_AWS_ACCESS_KEY_ID_FOR_TEST: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
S3_AWS_SECRET_ACCESS_KEY_FOR_TEST: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
# MinIO
S3_ENDPOINT_URL: "http://localhost:9004"
S3_AWS_ACCESS_KEY_ID: "minioadmin"
S3_AWS_SECRET_ACCESS_KEY: "minioadmin"
# Confluence
CONFLUENCE_TEST_SPACE_URL: ${{ secrets.CONFLUENCE_TEST_SPACE_URL }}
CONFLUENCE_TEST_SPACE: ${{ secrets.CONFLUENCE_TEST_SPACE }}
CONFLUENCE_TEST_SPACE_URL: ${{ vars.CONFLUENCE_TEST_SPACE_URL }}
CONFLUENCE_TEST_SPACE: ${{ vars.CONFLUENCE_TEST_SPACE }}
CONFLUENCE_TEST_PAGE_ID: ${{ secrets.CONFLUENCE_TEST_PAGE_ID }}
CONFLUENCE_IS_CLOUD: ${{ secrets.CONFLUENCE_IS_CLOUD }}
CONFLUENCE_USER_NAME: ${{ secrets.CONFLUENCE_USER_NAME }}
CONFLUENCE_USER_NAME: ${{ vars.CONFLUENCE_USER_NAME }}
CONFLUENCE_ACCESS_TOKEN: ${{ secrets.CONFLUENCE_ACCESS_TOKEN }}
CONFLUENCE_ACCESS_TOKEN_SCOPED: ${{ secrets.CONFLUENCE_ACCESS_TOKEN_SCOPED }}
# Jira
JIRA_ADMIN_API_TOKEN: ${{ secrets.JIRA_ADMIN_API_TOKEN }}
# LLMs
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
VERTEX_CREDENTIALS: ${{ secrets.VERTEX_CREDENTIALS }}
VERTEX_LOCATION: ${{ vars.VERTEX_LOCATION }}
# Code Interpreter
# TODO: debug why this is failing and enable
CODE_INTERPRETER_BASE_URL: http://localhost:8000
jobs:
discover-test-dirs:
runs-on: ubuntu-latest
# NOTE: Github-hosted runners have about 20s faster queue times and are preferred here.
runs-on: ubuntu-slim
timeout-minutes: 45
outputs:
test-dirs: ${{ steps.set-matrix.outputs.test-dirs }}
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Discover test directories
id: set-matrix
run: |
@@ -44,8 +68,12 @@ jobs:
external-dependency-unit-tests:
needs: discover-test-dirs
# Use larger runner with more resources for Vespa
runs-on: [runs-on, runner=16cpu-linux-x64, "run-id=${{ github.run_id }}"]
runs-on:
- runs-on
- runner=2cpu-linux-arm64
- ${{ format('run-id={0}-external-dependency-unit-tests-job-{1}', github.run_id, strategy['job-index']) }}
- extras=s3-cache
timeout-minutes: 45
strategy:
fail-fast: false
matrix:
@@ -54,43 +82,55 @@ jobs:
env:
PYTHONPATH: ./backend
MODEL_SERVER_HOST: "disabled"
DISABLE_TELEMETRY: "true"
steps:
- name: Checkout code
uses: actions/checkout@v4
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Set up Python
uses: actions/setup-python@v5
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
python-version: "3.11"
cache: "pip"
cache-dependency-path: |
persist-credentials: false
- name: Setup Python and Install Dependencies
uses: ./.github/actions/setup-python-and-install-dependencies
with:
requirements: |
backend/requirements/default.txt
backend/requirements/dev.txt
backend/requirements/ee.txt
- name: Install Dependencies
- name: Setup Playwright
uses: ./.github/actions/setup-playwright
# needed for pulling Vespa, Redis, Postgres, and Minio images
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Create .env file for Docker Compose
run: |
python -m pip install --upgrade pip
pip install --retries 5 --timeout 30 -r backend/requirements/default.txt
pip install --retries 5 --timeout 30 -r backend/requirements/dev.txt
playwright install chromium
playwright install-deps chromium
cat <<EOF > deployment/docker_compose/.env
CODE_INTERPRETER_BETA_ENABLED=true
DISABLE_TELEMETRY=true
EOF
- name: Set up Standard Dependencies
run: |
cd deployment/docker_compose
docker compose up -d minio relational_db cache index
- name: Wait for services
run: |
echo "Waiting for services to be ready..."
sleep 30
# Wait for Vespa specifically
echo "Waiting for Vespa to be ready..."
timeout 300 bash -c 'until curl -f -s http://localhost:8081/ApplicationStatus > /dev/null 2>&1; do echo "Vespa not ready, waiting..."; sleep 10; done' || echo "Vespa timeout - continuing anyway"
echo "Services should be ready now"
docker compose \
-f docker-compose.yml \
-f docker-compose.dev.yml \
up -d \
minio \
relational_db \
cache \
index \
code-interpreter
- name: Run migrations
run: |
@@ -101,10 +141,39 @@ jobs:
- name: Run Tests for ${{ matrix.test-dir }}
shell: script -q -e -c "bash --noprofile --norc -eo pipefail {0}"
env:
TEST_DIR: ${{ matrix.test-dir }}
run: |
py.test \
--durations=8 \
-o junit_family=xunit2 \
-xv \
--ff \
backend/tests/external_dependency_unit/${{ matrix.test-dir }}
backend/tests/external_dependency_unit/${TEST_DIR}
- name: Collect Docker logs on failure
if: failure()
run: |
mkdir -p docker-logs
cd deployment/docker_compose
# Get list of running containers
containers=$(docker compose -f docker-compose.yml -f docker-compose.dev.yml ps -q)
# Collect logs from each container
for container in $containers; do
container_name=$(docker inspect --format='{{.Name}}' $container | sed 's/^\///')
echo "Collecting logs from $container_name..."
docker logs $container > ../../docker-logs/${container_name}.log 2>&1
done
cd ../..
echo "Docker logs collected in docker-logs directory"
- name: Upload Docker logs
if: failure()
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # ratchet:actions/upload-artifact@v5
with:
name: docker-logs-${{ matrix.test-dir }}
path: docker-logs/
retention-days: 7

View File

@@ -1,206 +1,250 @@
name: Helm - Lint and Test Charts
concurrency:
group: Helm-Lint-and-Test-Charts-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }}
cancel-in-progress: true
on:
merge_group:
pull_request:
branches: [ main ]
workflow_dispatch: # Allows manual triggering
branches: [main]
push:
tags:
- "v*.*.*"
workflow_dispatch: # Allows manual triggering
permissions:
contents: read
jobs:
helm-chart-check:
# See https://runs-on.com/runners/linux/
runs-on: [runs-on,runner=8cpu-linux-x64,hdd=256,"run-id=${{ github.run_id }}"]
runs-on:
[
runs-on,
runner=8cpu-linux-x64,
hdd=256,
"run-id=${{ github.run_id }}-helm-chart-check",
]
timeout-minutes: 45
# fetch-depth 0 is required for helm/chart-testing-action
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Helm
uses: azure/setup-helm@v4.2.0
with:
version: v3.17.0
- name: Set up chart-testing
uses: helm/chart-testing-action@v2.7.0
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
fetch-depth: 0
persist-credentials: false
# even though we specify chart-dirs in ct.yaml, it isn't used by ct for the list-changed command...
- name: Run chart-testing (list-changed)
id: list-changed
run: |
echo "default_branch: ${{ github.event.repository.default_branch }}"
changed=$(ct list-changed --remote origin --target-branch ${{ github.event.repository.default_branch }} --chart-dirs deployment/helm/charts)
echo "list-changed output: $changed"
if [[ -n "$changed" ]]; then
echo "changed=true" >> "$GITHUB_OUTPUT"
fi
- name: Set up Helm
uses: azure/setup-helm@1a275c3b69536ee54be43f2070a358922e12c8d4 # ratchet:azure/setup-helm@v4.3.1
with:
version: v3.19.0
# uncomment to force run chart-testing
# - name: Force run chart-testing (list-changed)
# id: list-changed
# run: echo "changed=true" >> $GITHUB_OUTPUT
# lint all charts if any changes were detected
- name: Run chart-testing (lint)
if: steps.list-changed.outputs.changed == 'true'
run: ct lint --config ct.yaml --all
# the following would lint only changed charts, but linting isn't expensive
# run: ct lint --config ct.yaml --target-branch ${{ github.event.repository.default_branch }}
- name: Set up chart-testing
# NOTE: This is Jamison's patch from https://github.com/helm/chart-testing-action/pull/194
uses: helm/chart-testing-action@8958a6ac472cbd8ee9a8fbb6f1acbc1b0e966e44 # zizmor: ignore[impostor-commit]
with:
uv_version: "0.9.9"
- name: Create kind cluster
if: steps.list-changed.outputs.changed == 'true'
uses: helm/kind-action@v1.12.0
# even though we specify chart-dirs in ct.yaml, it isn't used by ct for the list-changed command...
- name: Run chart-testing (list-changed)
id: list-changed
env:
DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
run: |
echo "default_branch: ${DEFAULT_BRANCH}"
changed=$(ct list-changed --remote origin --target-branch ${DEFAULT_BRANCH} --chart-dirs deployment/helm/charts)
echo "list-changed output: $changed"
if [[ -n "$changed" ]]; then
echo "changed=true" >> "$GITHUB_OUTPUT"
fi
- name: Pre-install cluster status check
if: steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Pre-install Cluster Status ==="
kubectl get nodes -o wide
kubectl get pods --all-namespaces
kubectl get storageclass
# uncomment to force run chart-testing
# - name: Force run chart-testing (list-changed)
# id: list-changed
# run: echo "changed=true" >> $GITHUB_OUTPUT
# lint all charts if any changes were detected
- name: Run chart-testing (lint)
if: steps.list-changed.outputs.changed == 'true'
run: ct lint --config ct.yaml --all
# the following would lint only changed charts, but linting isn't expensive
# run: ct lint --config ct.yaml --target-branch ${{ github.event.repository.default_branch }}
- name: Add Helm repositories and update
if: steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Adding Helm repositories ==="
helm repo add bitnami https://charts.bitnami.com/bitnami
helm repo add vespa https://onyx-dot-app.github.io/vespa-helm-charts
helm repo update
- name: Create kind cluster
if: steps.list-changed.outputs.changed == 'true'
uses: helm/kind-action@92086f6be054225fa813e0a4b13787fc9088faab # ratchet:helm/kind-action@v1.13.0
- name: Pre-pull critical images
if: steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Pre-pulling critical images to avoid timeout ==="
# Get kind cluster name
KIND_CLUSTER=$(kubectl config current-context | sed 's/kind-//')
echo "Kind cluster: $KIND_CLUSTER"
# Pre-pull images that are likely to be used
echo "Pre-pulling PostgreSQL image..."
docker pull postgres:15-alpine || echo "Failed to pull postgres:15-alpine"
kind load docker-image postgres:15-alpine --name $KIND_CLUSTER || echo "Failed to load postgres image"
echo "Pre-pulling Redis image..."
docker pull redis:7-alpine || echo "Failed to pull redis:7-alpine"
kind load docker-image redis:7-alpine --name $KIND_CLUSTER || echo "Failed to load redis image"
echo "Pre-pulling Onyx images..."
docker pull docker.io/onyxdotapp/onyx-web-server:latest || echo "Failed to pull onyx web server"
docker pull docker.io/onyxdotapp/onyx-backend:latest || echo "Failed to pull onyx backend"
kind load docker-image docker.io/onyxdotapp/onyx-web-server:latest --name $KIND_CLUSTER || echo "Failed to load onyx web server"
kind load docker-image docker.io/onyxdotapp/onyx-backend:latest --name $KIND_CLUSTER || echo "Failed to load onyx backend"
echo "=== Images loaded into Kind cluster ==="
docker exec $KIND_CLUSTER-control-plane crictl images | grep -E "(postgres|redis|onyx)" || echo "Some images may still be loading..."
- name: Pre-install cluster status check
if: steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Pre-install Cluster Status ==="
kubectl get nodes -o wide
kubectl get pods --all-namespaces
kubectl get storageclass
- name: Validate chart dependencies
if: steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Validating chart dependencies ==="
cd deployment/helm/charts/onyx
helm dependency update
helm lint .
- name: Add Helm repositories and update
if: steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Adding Helm repositories ==="
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
helm repo add vespa https://onyx-dot-app.github.io/vespa-helm-charts
helm repo add cloudnative-pg https://cloudnative-pg.github.io/charts
helm repo add ot-container-kit https://ot-container-kit.github.io/helm-charts
helm repo add minio https://charts.min.io/
helm repo add code-interpreter https://onyx-dot-app.github.io/code-interpreter/
helm repo update
- name: Run chart-testing (install) with enhanced monitoring
timeout-minutes: 25
if: steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Starting chart installation with monitoring ==="
# Function to monitor cluster state
monitor_cluster() {
while true; do
echo "=== Cluster Status Check at $(date) ==="
# Only show non-running pods to reduce noise
NON_RUNNING_PODS=$(kubectl get pods --all-namespaces --field-selector=status.phase!=Running,status.phase!=Succeeded --no-headers 2>/dev/null | wc -l)
if [ "$NON_RUNNING_PODS" -gt 0 ]; then
echo "Non-running pods:"
kubectl get pods --all-namespaces --field-selector=status.phase!=Running,status.phase!=Succeeded
- name: Install Redis operator
if: steps.list-changed.outputs.changed == 'true'
shell: bash
run: |
echo "=== Installing redis-operator CRDs ==="
helm upgrade --install redis-operator ot-container-kit/redis-operator \
--namespace redis-operator --create-namespace --wait --timeout 300s
- name: Pre-pull required images
if: steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Pre-pulling required images to avoid timeout ==="
KIND_CLUSTER=$(kubectl config current-context | sed 's/kind-//')
echo "Kind cluster: $KIND_CLUSTER"
IMAGES=(
"ghcr.io/cloudnative-pg/cloudnative-pg:1.27.0"
"quay.io/opstree/redis:v7.0.15"
"docker.io/onyxdotapp/onyx-web-server:latest"
)
for image in "${IMAGES[@]}"; do
echo "Pre-pulling $image"
if docker pull "$image"; then
kind load docker-image "$image" --name "$KIND_CLUSTER" || echo "Failed to load $image into kind"
else
echo "All pods running successfully"
echo "Failed to pull $image"
fi
# Only show recent events if there are issues
RECENT_EVENTS=$(kubectl get events --sort-by=.lastTimestamp --all-namespaces --field-selector=type!=Normal 2>/dev/null | tail -5)
if [ -n "$RECENT_EVENTS" ]; then
echo "Recent warnings/errors:"
echo "$RECENT_EVENTS"
fi
sleep 60
done
}
# Start monitoring in background
monitor_cluster &
MONITOR_PID=$!
# Set up cleanup
cleanup() {
echo "=== Cleaning up monitoring process ==="
kill $MONITOR_PID 2>/dev/null || true
echo "=== Images loaded into Kind cluster ==="
docker exec "$KIND_CLUSTER"-control-plane crictl images | grep -E "(cloudnative-pg|redis|onyx)" || echo "Some images may still be loading..."
- name: Validate chart dependencies
if: steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Validating chart dependencies ==="
cd deployment/helm/charts/onyx
helm dependency update
helm lint .
- name: Run chart-testing (install) with enhanced monitoring
timeout-minutes: 25
if: steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Starting chart installation with monitoring ==="
# Function to monitor cluster state
monitor_cluster() {
while true; do
echo "=== Cluster Status Check at $(date) ==="
# Only show non-running pods to reduce noise
NON_RUNNING_PODS=$(kubectl get pods --all-namespaces --field-selector=status.phase!=Running,status.phase!=Succeeded --no-headers 2>/dev/null | wc -l)
if [ "$NON_RUNNING_PODS" -gt 0 ]; then
echo "Non-running pods:"
kubectl get pods --all-namespaces --field-selector=status.phase!=Running,status.phase!=Succeeded
else
echo "All pods running successfully"
fi
# Only show recent events if there are issues
RECENT_EVENTS=$(kubectl get events --sort-by=.lastTimestamp --all-namespaces --field-selector=type!=Normal 2>/dev/null | tail -5)
if [ -n "$RECENT_EVENTS" ]; then
echo "Recent warnings/errors:"
echo "$RECENT_EVENTS"
fi
sleep 60
done
}
# Start monitoring in background
monitor_cluster &
MONITOR_PID=$!
# Set up cleanup
cleanup() {
echo "=== Cleaning up monitoring process ==="
kill $MONITOR_PID 2>/dev/null || true
echo "=== Final cluster state ==="
kubectl get pods --all-namespaces
kubectl get events --all-namespaces --sort-by=.lastTimestamp | tail -20
}
# Trap cleanup on exit
trap cleanup EXIT
# Run the actual installation with detailed logging
echo "=== Starting ct install ==="
set +e
ct install --all \
--helm-extra-set-args="\
--set=nginx.enabled=false \
--set=minio.enabled=false \
--set=vespa.enabled=false \
--set=slackbot.enabled=false \
--set=postgresql.enabled=true \
--set=postgresql.nameOverride=cloudnative-pg \
--set=postgresql.cluster.storage.storageClass=standard \
--set=redis.enabled=true \
--set=redis.storageSpec.volumeClaimTemplate.spec.storageClassName=standard \
--set=webserver.replicaCount=1 \
--set=api.replicaCount=0 \
--set=inferenceCapability.replicaCount=0 \
--set=indexCapability.replicaCount=0 \
--set=celery_beat.replicaCount=0 \
--set=celery_worker_heavy.replicaCount=0 \
--set=celery_worker_docfetching.replicaCount=0 \
--set=celery_worker_docprocessing.replicaCount=0 \
--set=celery_worker_light.replicaCount=0 \
--set=celery_worker_monitoring.replicaCount=0 \
--set=celery_worker_primary.replicaCount=0 \
--set=celery_worker_user_file_processing.replicaCount=0 \
--set=celery_worker_user_files_indexing.replicaCount=0" \
--helm-extra-args="--timeout 900s --debug" \
--debug --config ct.yaml
CT_EXIT=$?
set -e
if [[ $CT_EXIT -ne 0 ]]; then
echo "ct install failed with exit code $CT_EXIT"
exit $CT_EXIT
else
echo "=== Installation completed successfully ==="
fi
kubectl get pods --all-namespaces
- name: Post-install verification
if: steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Post-install verification ==="
kubectl get pods --all-namespaces
kubectl get services --all-namespaces
# Only show issues if they exist
kubectl describe pods --all-namespaces | grep -A 5 -B 2 "Failed\|Error\|Warning" || echo "No pod issues found"
- name: Cleanup on failure
if: failure() && steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Cleanup on failure ==="
echo "=== Final cluster state ==="
kubectl get pods --all-namespaces
kubectl get events --all-namespaces --sort-by=.lastTimestamp | tail -20
}
# Trap cleanup on exit
trap cleanup EXIT
# Run the actual installation with detailed logging
echo "=== Starting ct install ==="
ct install --all \
--helm-extra-set-args="\
--set=nginx.enabled=false \
--set=minio.enabled=false \
--set=vespa.enabled=false \
--set=slackbot.enabled=false \
--set=postgresql.enabled=true \
--set=postgresql.primary.persistence.enabled=false \
--set=redis.enabled=true \
--set=webserver.replicaCount=1 \
--set=api.replicaCount=0 \
--set=inferenceCapability.replicaCount=0 \
--set=indexCapability.replicaCount=0 \
--set=celery_beat.replicaCount=0 \
--set=celery_worker_heavy.replicaCount=0 \
--set=celery_worker_docfetching.replicaCount=0 \
--set=celery_worker_docprocessing.replicaCount=0 \
--set=celery_worker_light.replicaCount=0 \
--set=celery_worker_monitoring.replicaCount=0 \
--set=celery_worker_primary.replicaCount=0 \
--set=celery_worker_user_files_indexing.replicaCount=0" \
--helm-extra-args="--timeout 900s --debug" \
--debug --config ct.yaml
echo "=== Installation completed successfully ==="
kubectl get pods --all-namespaces
kubectl get events --all-namespaces --sort-by=.lastTimestamp | tail -10
- name: Post-install verification
if: steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Post-install verification ==="
kubectl get pods --all-namespaces
kubectl get services --all-namespaces
# Only show issues if they exist
kubectl describe pods --all-namespaces | grep -A 5 -B 2 "Failed\|Error\|Warning" || echo "No pod issues found"
echo "=== Pod descriptions for debugging ==="
kubectl describe pods --all-namespaces | grep -A 10 -B 3 "Failed\|Error\|Warning\|Pending" || echo "No problematic pods found"
- name: Cleanup on failure
if: failure() && steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Cleanup on failure ==="
echo "=== Final cluster state ==="
kubectl get pods --all-namespaces
kubectl get events --all-namespaces --sort-by=.lastTimestamp | tail -10
echo "=== Pod descriptions for debugging ==="
kubectl describe pods --all-namespaces | grep -A 10 -B 3 "Failed\|Error\|Warning\|Pending" || echo "No problematic pods found"
echo "=== Recent logs for debugging ==="
kubectl logs --all-namespaces --tail=50 | grep -i "error\|timeout\|failed\|pull" || echo "No error logs found"
echo "=== Helm releases ==="
helm list --all-namespaces
# the following would install only changed charts, but we only have one chart so
# don't worry about that for now
# run: ct install --target-branch ${{ github.event.repository.default_branch }}
echo "=== Recent logs for debugging ==="
kubectl logs --all-namespaces --tail=50 | grep -i "error\|timeout\|failed\|pull" || echo "No error logs found"
echo "=== Helm releases ==="
helm list --all-namespaces
# the following would install only changed charts, but we only have one chart so
# don't worry about that for now
# run: ct install --target-branch ${{ github.event.repository.default_branch }}

View File

@@ -9,41 +9,54 @@ on:
branches:
- main
- "release/**"
push:
tags:
- "v*.*.*"
permissions:
contents: read
env:
# Private Registry Configuration
PRIVATE_REGISTRY: experimental-registry.blacksmith.sh:5000
PRIVATE_REGISTRY_USERNAME: ${{ secrets.PRIVATE_REGISTRY_USERNAME }}
PRIVATE_REGISTRY_PASSWORD: ${{ secrets.PRIVATE_REGISTRY_PASSWORD }}
# Test Environment Variables
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
CONFLUENCE_TEST_SPACE_URL: ${{ secrets.CONFLUENCE_TEST_SPACE_URL }}
CONFLUENCE_USER_NAME: ${{ secrets.CONFLUENCE_USER_NAME }}
CONFLUENCE_TEST_SPACE_URL: ${{ vars.CONFLUENCE_TEST_SPACE_URL }}
CONFLUENCE_USER_NAME: ${{ vars.CONFLUENCE_USER_NAME }}
CONFLUENCE_ACCESS_TOKEN: ${{ secrets.CONFLUENCE_ACCESS_TOKEN }}
CONFLUENCE_ACCESS_TOKEN_SCOPED: ${{ secrets.CONFLUENCE_ACCESS_TOKEN_SCOPED }}
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}
JIRA_API_TOKEN_SCOPED: ${{ secrets.JIRA_API_TOKEN_SCOPED }}
PERM_SYNC_SHAREPOINT_CLIENT_ID: ${{ secrets.PERM_SYNC_SHAREPOINT_CLIENT_ID }}
PERM_SYNC_SHAREPOINT_PRIVATE_KEY: ${{ secrets.PERM_SYNC_SHAREPOINT_PRIVATE_KEY }}
PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD: ${{ secrets.PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD }}
PERM_SYNC_SHAREPOINT_DIRECTORY_ID: ${{ secrets.PERM_SYNC_SHAREPOINT_DIRECTORY_ID }}
EXA_API_KEY: ${{ secrets.EXA_API_KEY }}
GITHUB_PERMISSION_SYNC_TEST_ACCESS_TOKEN: ${{ secrets.ONYX_GITHUB_PERMISSION_SYNC_TEST_ACCESS_TOKEN }}
GITHUB_PERMISSION_SYNC_TEST_ACCESS_TOKEN_CLASSIC: ${{ secrets.ONYX_GITHUB_PERMISSION_SYNC_TEST_ACCESS_TOKEN_CLASSIC }}
GITHUB_ADMIN_EMAIL: ${{ secrets.ONYX_GITHUB_ADMIN_EMAIL }}
GITHUB_TEST_USER_1_EMAIL: ${{ secrets.ONYX_GITHUB_TEST_USER_1_EMAIL }}
GITHUB_TEST_USER_2_EMAIL: ${{ secrets.ONYX_GITHUB_TEST_USER_2_EMAIL }}
jobs:
discover-test-dirs:
runs-on: blacksmith-2vcpu-ubuntu-2404-arm
# NOTE: Github-hosted runners have about 20s faster queue times and are preferred here.
runs-on: ubuntu-slim
timeout-minutes: 45
outputs:
test-dirs: ${{ steps.set-matrix.outputs.test-dirs }}
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Discover test directories
id: set-matrix
run: |
# Find all leaf-level directories in both test directories
tests_dirs=$(find backend/tests/integration/tests -mindepth 1 -maxdepth 1 -type d ! -name "__pycache__" -exec basename {} \; | sort)
tests_dirs=$(find backend/tests/integration/tests -mindepth 1 -maxdepth 1 -type d ! -name "__pycache__" ! -name "mcp" -exec basename {} \; | sort)
connector_dirs=$(find backend/tests/integration/connector_job_tests -mindepth 1 -maxdepth 1 -type d ! -name "__pycache__" -exec basename {} \; | sort)
# Create JSON array with directory info
@@ -59,141 +72,189 @@ jobs:
all_dirs="[${all_dirs%,}]"
echo "test-dirs=$all_dirs" >> $GITHUB_OUTPUT
prepare-build:
runs-on: blacksmith-2vcpu-ubuntu-2404-arm
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.11"
cache: "pip"
cache-dependency-path: |
backend/requirements/default.txt
backend/requirements/dev.txt
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install --retries 5 --timeout 30 -r backend/requirements/default.txt
pip install --retries 5 --timeout 30 -r backend/requirements/dev.txt
- name: Generate OpenAPI schema
working-directory: ./backend
env:
PYTHONPATH: "."
run: |
python scripts/onyx_openapi_schema.py --filename generated/openapi.json
- name: Generate OpenAPI Python client
working-directory: ./backend
run: |
docker run --rm \
-v "${{ github.workspace }}/backend/generated:/local" \
openapitools/openapi-generator-cli generate \
-i /local/openapi.json \
-g python \
-o /local/onyx_openapi_client \
--package-name onyx_openapi_client \
--skip-validate-spec \
--openapi-normalizer "SIMPLIFY_ONEOF_ANYOF=true,SET_OAS3_NULLABLE=true"
- name: Upload OpenAPI artifacts
uses: actions/upload-artifact@v4
with:
name: openapi-artifacts
path: backend/generated/
build-backend-image:
runs-on: blacksmith-16vcpu-ubuntu-2404-arm
runs-on:
[
runs-on,
runner=1cpu-linux-arm64,
"run-id=${{ github.run_id }}-build-backend-image",
"extras=ecr-cache",
]
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@v4
- name: Login to Private Registry
uses: docker/login-action@v3
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
registry: ${{ env.PRIVATE_REGISTRY }}
username: ${{ env.PRIVATE_REGISTRY_USERNAME }}
password: ${{ env.PRIVATE_REGISTRY_PASSWORD }}
persist-credentials: false
- name: Format branch name for cache
id: format-branch
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
REF_NAME: ${{ github.ref_name }}
run: |
if [ -n "${PR_NUMBER}" ]; then
CACHE_SUFFIX="${PR_NUMBER}"
else
# shellcheck disable=SC2001
CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g')
fi
echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: useblacksmith/setup-docker-builder@v1
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
# needed for pulling Vespa, Redis, Postgres, and Minio images
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push Backend Docker image
uses: useblacksmith/build-push-action@v2
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # ratchet:docker/build-push-action@v6
with:
context: ./backend
file: ./backend/Dockerfile
platforms: linux/arm64
tags: ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-backend:test-${{ github.run_id }}
push: true
outputs: type=registry
no-cache: true
tags: ${{ env.RUNS_ON_ECR_CACHE }}:integration-test-backend-test-${{ github.run_id }}
cache-from: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ github.event.pull_request.head.sha || github.sha }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ steps.format-branch.outputs.cache-suffix }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache
type=registry,ref=onyxdotapp/onyx-backend:latest
cache-to: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ github.event.pull_request.head.sha || github.sha }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ steps.format-branch.outputs.cache-suffix }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache,mode=max
no-cache: ${{ vars.DOCKER_NO_CACHE == 'true' }}
build-model-server-image:
runs-on: blacksmith-16vcpu-ubuntu-2404-arm
runs-on:
[
runs-on,
runner=1cpu-linux-arm64,
"run-id=${{ github.run_id }}-build-model-server-image",
"extras=ecr-cache",
]
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@v4
- name: Login to Private Registry
uses: docker/login-action@v3
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
registry: ${{ env.PRIVATE_REGISTRY }}
username: ${{ env.PRIVATE_REGISTRY_USERNAME }}
password: ${{ env.PRIVATE_REGISTRY_PASSWORD }}
persist-credentials: false
- name: Format branch name for cache
id: format-branch
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
REF_NAME: ${{ github.ref_name }}
run: |
if [ -n "${PR_NUMBER}" ]; then
CACHE_SUFFIX="${PR_NUMBER}"
else
# shellcheck disable=SC2001
CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g')
fi
echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: useblacksmith/setup-docker-builder@v1
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
# needed for pulling Vespa, Redis, Postgres, and Minio images
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push Model Server Docker image
uses: useblacksmith/build-push-action@v2
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # ratchet:docker/build-push-action@v6
with:
context: ./backend
file: ./backend/Dockerfile.model_server
platforms: linux/arm64
tags: ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-model-server:test-${{ github.run_id }}
push: true
outputs: type=registry
provenance: false
no-cache: true
tags: ${{ env.RUNS_ON_ECR_CACHE }}:integration-test-model-server-test-${{ github.run_id }}
cache-from: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ github.event.pull_request.head.sha || github.sha }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ steps.format-branch.outputs.cache-suffix }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache
type=registry,ref=onyxdotapp/onyx-model-server:latest
cache-to: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ github.event.pull_request.head.sha || github.sha }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ steps.format-branch.outputs.cache-suffix }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache,mode=max
build-integration-image:
needs: prepare-build
runs-on: blacksmith-16vcpu-ubuntu-2404-arm
runs-on:
[
runs-on,
runner=2cpu-linux-arm64,
"run-id=${{ github.run_id }}-build-integration-image",
"extras=ecr-cache",
]
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@v4
- name: Login to Private Registry
uses: docker/login-action@v3
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
registry: ${{ env.PRIVATE_REGISTRY }}
username: ${{ env.PRIVATE_REGISTRY_USERNAME }}
password: ${{ env.PRIVATE_REGISTRY_PASSWORD }}
- name: Download OpenAPI artifacts
uses: actions/download-artifact@v4
with:
name: openapi-artifacts
path: backend/generated/
persist-credentials: false
- name: Set up Docker Buildx
uses: useblacksmith/setup-docker-builder@v1
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
- name: Build and push integration test Docker image
uses: useblacksmith/build-push-action@v2
# needed for pulling openapitools/openapi-generator-cli
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
context: ./backend
file: ./backend/tests/integration/Dockerfile
platforms: linux/arm64
tags: ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-integration:test-${{ github.run_id }}
push: true
outputs: type=registry
no-cache: true
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Format branch name for cache
id: format-branch
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
REF_NAME: ${{ github.ref_name }}
run: |
if [ -n "${PR_NUMBER}" ]; then
CACHE_SUFFIX="${PR_NUMBER}"
else
# shellcheck disable=SC2001
CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g')
fi
echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT
- name: Build and push integration test image with Docker Bake
env:
INTEGRATION_REPOSITORY: ${{ env.RUNS_ON_ECR_CACHE }}
TAG: integration-test-${{ github.run_id }}
CACHE_SUFFIX: ${{ steps.format-branch.outputs.cache-suffix }}
HEAD_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
run: |
docker buildx bake --push \
--set backend.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${HEAD_SHA} \
--set backend.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${CACHE_SUFFIX} \
--set backend.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache \
--set backend.cache-from=type=registry,ref=onyxdotapp/onyx-backend:latest \
--set backend.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${HEAD_SHA},mode=max \
--set backend.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${CACHE_SUFFIX},mode=max \
--set backend.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache,mode=max \
--set integration.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${HEAD_SHA} \
--set integration.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${CACHE_SUFFIX} \
--set integration.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache \
--set integration.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${HEAD_SHA},mode=max \
--set integration.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${CACHE_SUFFIX},mode=max \
--set integration.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache,mode=max \
integration
integration-tests:
needs:
@@ -203,7 +264,12 @@ jobs:
build-model-server-image,
build-integration-image,
]
runs-on: blacksmith-8vcpu-ubuntu-2404-arm
runs-on:
- runs-on
- runner=4cpu-linux-arm64
- ${{ format('run-id={0}-integration-tests-job-{1}', github.run_id, strategy['job-index']) }}
- extras=ecr-cache
timeout-minutes: 45
strategy:
fail-fast: false
@@ -211,58 +277,47 @@ jobs:
test-dir: ${{ fromJson(needs.discover-test-dirs.outputs.test-dirs) }}
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@v4
- name: Login to Private Registry
uses: docker/login-action@v3
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
registry: ${{ env.PRIVATE_REGISTRY }}
username: ${{ env.PRIVATE_REGISTRY_USERNAME }}
password: ${{ env.PRIVATE_REGISTRY_PASSWORD }}
persist-credentials: false
# needed for pulling Vespa, Redis, Postgres, and Minio images
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@v3
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Pull Docker images
run: |
# Pull all images from registry in parallel
echo "Pulling Docker images in parallel..."
# Pull images from private registry
(docker pull --platform linux/arm64 ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-backend:test-${{ github.run_id }}) &
(docker pull --platform linux/arm64 ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-model-server:test-${{ github.run_id }}) &
(docker pull --platform linux/arm64 ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-integration:test-${{ github.run_id }}) &
# Wait for all background jobs to complete
wait
echo "All Docker images pulled successfully"
# Re-tag to remove registry prefix for docker-compose
docker tag ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-backend:test-${{ github.run_id }} onyxdotapp/onyx-backend:test
docker tag ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-model-server:test-${{ github.run_id }} onyxdotapp/onyx-model-server:test
docker tag ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-integration:test-${{ github.run_id }} onyxdotapp/onyx-integration:test
# NOTE: Use pre-ping/null pool to reduce flakiness due to dropped connections
# NOTE: don't need web server for integration tests
- name: Create .env file for Docker Compose
env:
ECR_CACHE: ${{ env.RUNS_ON_ECR_CACHE }}
RUN_ID: ${{ github.run_id }}
run: |
cat <<EOF > deployment/docker_compose/.env
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true
AUTH_TYPE=basic
POSTGRES_POOL_PRE_PING=true
POSTGRES_USE_NULL_POOL=true
REQUIRE_EMAIL_VERIFICATION=false
DISABLE_TELEMETRY=true
ONYX_BACKEND_IMAGE=${ECR_CACHE}:integration-test-backend-test-${RUN_ID}
ONYX_MODEL_SERVER_IMAGE=${ECR_CACHE}:integration-test-model-server-test-${RUN_ID}
INTEGRATION_TESTS_MODE=true
CHECK_TTL_MANAGEMENT_TASK_FREQUENCY_IN_HOURS=0.001
AUTO_LLM_UPDATE_INTERVAL_SECONDS=1
MCP_SERVER_ENABLED=true
EOF
- name: Start Docker containers
run: |
cd deployment/docker_compose
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true \
AUTH_TYPE=basic \
POSTGRES_POOL_PRE_PING=true \
POSTGRES_USE_NULL_POOL=true \
REQUIRE_EMAIL_VERIFICATION=false \
DISABLE_TELEMETRY=true \
IMAGE_TAG=test \
INTEGRATION_TESTS_MODE=true \
CHECK_TTL_MANAGEMENT_TASK_FREQUENCY_IN_HOURS=0.001 \
docker compose up \
docker compose -f docker-compose.yml -f docker-compose.dev.yml up \
relational_db \
index \
cache \
@@ -274,39 +329,45 @@ jobs:
-d
id: start_docker
- name: Wait for service to be ready
- name: Wait for services to be ready
run: |
echo "Starting wait-for-service script..."
docker logs -f onyx-api_server-1 &
wait_for_service() {
local url=$1
local label=$2
local timeout=${3:-300} # default 5 minutes
local start_time
start_time=$(date +%s)
start_time=$(date +%s)
timeout=300 # 5 minutes in seconds
while true; do
local current_time
current_time=$(date +%s)
local elapsed_time=$((current_time - start_time))
while true; do
current_time=$(date +%s)
elapsed_time=$((current_time - start_time))
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. Service did not become ready in 5 minutes."
exit 1
fi
# Use curl with error handling to ignore specific exit code 56
response=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:8080/health || echo "curl_error")
if [ "$response" = "200" ]; then
echo "Service is ready!"
break
elif [ "$response" = "curl_error" ]; then
echo "Curl encountered an error, possibly exit code 56. Continuing to retry..."
else
echo "Service not ready yet (HTTP status $response). Retrying in 5 seconds..."
fi
sleep 5
done
echo "Finished waiting for service."
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. ${label} did not become ready in $timeout seconds."
exit 1
fi
local response
response=$(curl -s -o /dev/null -w "%{http_code}" "$url" || echo "curl_error")
if [ "$response" = "200" ]; then
echo "${label} is ready!"
break
elif [ "$response" = "curl_error" ]; then
echo "Curl encountered an error while checking ${label}. Retrying in 5 seconds..."
else
echo "${label} not ready yet (HTTP status $response). Retrying in 5 seconds..."
fi
sleep 5
done
}
wait_for_service "http://localhost:8080/health" "API server"
echo "Finished waiting for services."
- name: Start Mock Services
run: |
@@ -315,7 +376,7 @@ jobs:
-p mock-it-services-stack up -d
- name: Run Integration Tests for ${{ matrix.test-dir.name }}
uses: nick-fields/retry@v3
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # ratchet:nick-fields/retry@v3
with:
timeout_minutes: 20
max_attempts: 3
@@ -336,21 +397,29 @@ jobs:
-e REDIS_HOST=cache \
-e API_SERVER_HOST=api_server \
-e OPENAI_API_KEY=${OPENAI_API_KEY} \
-e EXA_API_KEY=${EXA_API_KEY} \
-e SLACK_BOT_TOKEN=${SLACK_BOT_TOKEN} \
-e CONFLUENCE_TEST_SPACE_URL=${CONFLUENCE_TEST_SPACE_URL} \
-e CONFLUENCE_USER_NAME=${CONFLUENCE_USER_NAME} \
-e CONFLUENCE_ACCESS_TOKEN=${CONFLUENCE_ACCESS_TOKEN} \
-e CONFLUENCE_ACCESS_TOKEN_SCOPED=${CONFLUENCE_ACCESS_TOKEN_SCOPED} \
-e JIRA_BASE_URL=${JIRA_BASE_URL} \
-e JIRA_USER_EMAIL=${JIRA_USER_EMAIL} \
-e JIRA_API_TOKEN=${JIRA_API_TOKEN} \
-e JIRA_API_TOKEN_SCOPED=${JIRA_API_TOKEN_SCOPED} \
-e PERM_SYNC_SHAREPOINT_CLIENT_ID=${PERM_SYNC_SHAREPOINT_CLIENT_ID} \
-e PERM_SYNC_SHAREPOINT_PRIVATE_KEY="${PERM_SYNC_SHAREPOINT_PRIVATE_KEY}" \
-e PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD=${PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD} \
-e PERM_SYNC_SHAREPOINT_DIRECTORY_ID=${PERM_SYNC_SHAREPOINT_DIRECTORY_ID} \
-e GITHUB_PERMISSION_SYNC_TEST_ACCESS_TOKEN=${GITHUB_PERMISSION_SYNC_TEST_ACCESS_TOKEN} \
-e GITHUB_PERMISSION_SYNC_TEST_ACCESS_TOKEN_CLASSIC=${GITHUB_PERMISSION_SYNC_TEST_ACCESS_TOKEN_CLASSIC} \
-e GITHUB_ADMIN_EMAIL=${GITHUB_ADMIN_EMAIL} \
-e GITHUB_TEST_USER_1_EMAIL=${GITHUB_TEST_USER_1_EMAIL} \
-e GITHUB_TEST_USER_2_EMAIL=${GITHUB_TEST_USER_2_EMAIL} \
-e TEST_WEB_HOSTNAME=test-runner \
-e MOCK_CONNECTOR_SERVER_HOST=mock_connector_server \
-e MOCK_CONNECTOR_SERVER_PORT=8001 \
onyxdotapp/onyx-integration:test \
${{ env.RUNS_ON_ECR_CACHE }}:integration-test-${{ github.run_id }} \
/app/tests/integration/${{ matrix.test-dir.path }}
# ------------------------------------------------------------
@@ -369,56 +438,41 @@ jobs:
- name: Upload logs
if: always()
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # ratchet:actions/upload-artifact@v4
with:
name: docker-all-logs-${{ matrix.test-dir.name }}
path: ${{ github.workspace }}/docker-compose.log
# ------------------------------------------------------------
- name: Stop Docker containers
if: always()
run: |
cd deployment/docker_compose
docker compose down -v
multitenant-tests:
needs:
[build-backend-image, build-model-server-image, build-integration-image]
runs-on:
[
build-backend-image,
build-model-server-image,
build-integration-image,
runs-on,
runner=8cpu-linux-arm64,
"run-id=${{ github.run_id }}-multitenant-tests",
"extras=ecr-cache",
]
runs-on: blacksmith-8vcpu-ubuntu-2404-arm
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@v4
- name: Login to Private Registry
uses: docker/login-action@v3
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
registry: ${{ env.PRIVATE_REGISTRY }}
username: ${{ env.PRIVATE_REGISTRY_USERNAME }}
password: ${{ env.PRIVATE_REGISTRY_PASSWORD }}
persist-credentials: false
- name: Login to Docker Hub
uses: docker/login-action@v3
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Pull Docker images
run: |
(docker pull --platform linux/arm64 ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-backend:test-${{ github.run_id }}) &
(docker pull --platform linux/arm64 ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-model-server:test-${{ github.run_id }}) &
(docker pull --platform linux/arm64 ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-integration:test-${{ github.run_id }}) &
wait
docker tag ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-backend:test-${{ github.run_id }} onyxdotapp/onyx-backend:test
docker tag ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-model-server:test-${{ github.run_id }} onyxdotapp/onyx-model-server:test
docker tag ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-integration:test-${{ github.run_id }} onyxdotapp/onyx-integration:test
- name: Start Docker containers for multi-tenant tests
env:
ECR_CACHE: ${{ env.RUNS_ON_ECR_CACHE }}
RUN_ID: ${{ github.run_id }}
run: |
cd deployment/docker_compose
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true \
@@ -426,7 +480,9 @@ jobs:
AUTH_TYPE=cloud \
REQUIRE_EMAIL_VERIFICATION=false \
DISABLE_TELEMETRY=true \
IMAGE_TAG=test \
OPENAI_DEFAULT_API_KEY=${OPENAI_API_KEY} \
ONYX_BACKEND_IMAGE=${ECR_CACHE}:integration-test-backend-test-${RUN_ID} \
ONYX_MODEL_SERVER_IMAGE=${ECR_CACHE}:integration-test-model-server-test-${RUN_ID} \
DEV_MODE=true \
docker compose -f docker-compose.multitenant-dev.yml up \
relational_db \
@@ -467,6 +523,9 @@ jobs:
echo "Finished waiting for service."
- name: Run Multi-Tenant Integration Tests
env:
ECR_CACHE: ${{ env.RUNS_ON_ECR_CACHE }}
RUN_ID: ${{ github.run_id }}
run: |
echo "Running multi-tenant integration tests..."
docker run --rm --network onyx_default \
@@ -482,6 +541,7 @@ jobs:
-e REDIS_HOST=cache \
-e API_SERVER_HOST=api_server \
-e OPENAI_API_KEY=${OPENAI_API_KEY} \
-e EXA_API_KEY=${EXA_API_KEY} \
-e SLACK_BOT_TOKEN=${SLACK_BOT_TOKEN} \
-e TEST_WEB_HOSTNAME=test-runner \
-e AUTH_TYPE=cloud \
@@ -489,9 +549,8 @@ jobs:
-e SKIP_RESET=true \
-e REQUIRE_EMAIL_VERIFICATION=false \
-e DISABLE_TELEMETRY=true \
-e IMAGE_TAG=test \
-e DEV_MODE=true \
onyxdotapp/onyx-integration:test \
${ECR_CACHE}:integration-test-${RUN_ID} \
/app/tests/integration/multitenant_tests
- name: Dump API server logs (multi-tenant)
@@ -508,7 +567,7 @@ jobs:
- name: Upload logs (multi-tenant)
if: always()
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # ratchet:actions/upload-artifact@v4
with:
name: docker-all-logs-multitenant
path: ${{ github.workspace }}/docker-compose-multitenant.log
@@ -519,18 +578,13 @@ jobs:
cd deployment/docker_compose
docker compose -f docker-compose.multitenant-dev.yml down -v
required:
runs-on: blacksmith-2vcpu-ubuntu-2404-arm
required:
# NOTE: Github-hosted runners have about 20s faster queue times and are preferred here.
runs-on: ubuntu-slim
timeout-minutes: 45
needs: [integration-tests, multitenant-tests]
if: ${{ always() }}
steps:
- uses: actions/github-script@v7
with:
script: |
const needs = ${{ toJSON(needs) }};
const failed = Object.values(needs).some(n => n.result !== 'success');
if (failed) {
core.setFailed('One or more upstream jobs failed or were cancelled.');
} else {
core.notice('All required jobs succeeded.');
}
- name: Check job status
if: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }}
run: exit 1

51
.github/workflows/pr-jest-tests.yml vendored Normal file
View File

@@ -0,0 +1,51 @@
name: Run Jest Tests
concurrency:
group: Run-Jest-Tests-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }}
cancel-in-progress: true
on:
merge_group:
pull_request:
branches:
- main
- "release/**"
push:
tags:
- "v*.*.*"
permissions:
contents: read
jobs:
jest-tests:
name: Jest Tests
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Setup node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # ratchet:actions/setup-node@v4
with:
node-version: 22
cache: "npm"
cache-dependency-path: ./web/package-lock.json
- name: Install node dependencies
working-directory: ./web
run: npm ci
- name: Run Jest tests
working-directory: ./web
run: npm test -- --ci --coverage --maxWorkers=50%
- name: Upload coverage reports
if: always()
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # ratchet:actions/upload-artifact@v4
with:
name: jest-coverage-${{ github.run_id }}
path: ./web/coverage
retention-days: 7

View File

@@ -1,7 +1,7 @@
name: PR Labeler
on:
pull_request_target:
pull_request:
branches:
- main
types:
@@ -12,11 +12,11 @@ on:
permissions:
contents: read
pull-requests: write
jobs:
validate_pr_title:
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- name: Check PR title for Conventional Commits
env:
@@ -27,7 +27,7 @@ jobs:
echo "::error::❌ Your PR title does not follow the Conventional Commits format.
This check ensures that all pull requests use clear, consistent titles that help automate changelogs and improve project history.
Please update your PR title to follow the Conventional Commits style.
Please update your PR title to follow the Conventional Commits style.
Here is a link to a blog explaining the reason why we've included the Conventional Commits style into our PR titles: https://xfuture-blog.com/working-with-conventional-commits
**Here are some examples of valid PR titles:**

View File

@@ -1,12 +1,19 @@
name: Ensure PR references Linear
concurrency:
group: Ensure-PR-references-Linear-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }}
cancel-in-progress: true
on:
pull_request:
types: [opened, edited, reopened, synchronize]
permissions:
contents: read
jobs:
linear-check:
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- name: Check PR body for Linear link or override
env:

View File

@@ -6,22 +6,26 @@ concurrency:
on:
merge_group:
types: [checks_requested]
push:
tags:
- "v*.*.*"
permissions:
contents: read
env:
# Private Registry Configuration
PRIVATE_REGISTRY: experimental-registry.blacksmith.sh:5000
PRIVATE_REGISTRY_USERNAME: ${{ secrets.PRIVATE_REGISTRY_USERNAME }}
PRIVATE_REGISTRY_PASSWORD: ${{ secrets.PRIVATE_REGISTRY_PASSWORD }}
# Test Environment Variables
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
CONFLUENCE_TEST_SPACE_URL: ${{ secrets.CONFLUENCE_TEST_SPACE_URL }}
CONFLUENCE_USER_NAME: ${{ secrets.CONFLUENCE_USER_NAME }}
EXA_API_KEY: ${{ secrets.EXA_API_KEY }}
CONFLUENCE_TEST_SPACE_URL: ${{ vars.CONFLUENCE_TEST_SPACE_URL }}
CONFLUENCE_USER_NAME: ${{ vars.CONFLUENCE_USER_NAME }}
CONFLUENCE_ACCESS_TOKEN: ${{ secrets.CONFLUENCE_ACCESS_TOKEN }}
CONFLUENCE_ACCESS_TOKEN_SCOPED: ${{ secrets.CONFLUENCE_ACCESS_TOKEN_SCOPED }}
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}
JIRA_API_TOKEN_SCOPED: ${{ secrets.JIRA_API_TOKEN_SCOPED }}
PERM_SYNC_SHAREPOINT_CLIENT_ID: ${{ secrets.PERM_SYNC_SHAREPOINT_CLIENT_ID }}
PERM_SYNC_SHAREPOINT_PRIVATE_KEY: ${{ secrets.PERM_SYNC_SHAREPOINT_PRIVATE_KEY }}
PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD: ${{ secrets.PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD }}
@@ -29,18 +33,22 @@ env:
jobs:
discover-test-dirs:
runs-on: blacksmith-2vcpu-ubuntu-2404-arm
# NOTE: Github-hosted runners have about 20s faster queue times and are preferred here.
runs-on: ubuntu-slim
timeout-minutes: 45
outputs:
test-dirs: ${{ steps.set-matrix.outputs.test-dirs }}
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Discover test directories
id: set-matrix
run: |
# Find all leaf-level directories in both test directories
tests_dirs=$(find backend/tests/integration/tests -mindepth 1 -maxdepth 1 -type d ! -name "__pycache__" -exec basename {} \; | sort)
tests_dirs=$(find backend/tests/integration/tests -mindepth 1 -maxdepth 1 -type d ! -name "__pycache__" ! -name "mcp" -exec basename {} \; | sort)
connector_dirs=$(find backend/tests/integration/connector_job_tests -mindepth 1 -maxdepth 1 -type d ! -name "__pycache__" -exec basename {} \; | sort)
# Create JSON array with directory info
@@ -56,141 +64,189 @@ jobs:
all_dirs="[${all_dirs%,}]"
echo "test-dirs=$all_dirs" >> $GITHUB_OUTPUT
prepare-build:
runs-on: blacksmith-2vcpu-ubuntu-2404-arm
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
python-version: "3.11"
cache: "pip"
cache-dependency-path: |
backend/requirements/default.txt
backend/requirements/dev.txt
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install --retries 5 --timeout 30 -r backend/requirements/default.txt
pip install --retries 5 --timeout 30 -r backend/requirements/dev.txt
- name: Generate OpenAPI schema
working-directory: ./backend
env:
PYTHONPATH: "."
run: |
python scripts/onyx_openapi_schema.py --filename generated/openapi.json
- name: Generate OpenAPI Python client
working-directory: ./backend
run: |
docker run --rm \
-v "${{ github.workspace }}/backend/generated:/local" \
openapitools/openapi-generator-cli generate \
-i /local/openapi.json \
-g python \
-o /local/onyx_openapi_client \
--package-name onyx_openapi_client \
--skip-validate-spec \
--openapi-normalizer "SIMPLIFY_ONEOF_ANYOF=true,SET_OAS3_NULLABLE=true"
- name: Upload OpenAPI artifacts
uses: actions/upload-artifact@v4
with:
name: openapi-artifacts
path: backend/generated/
build-backend-image:
runs-on: blacksmith-16vcpu-ubuntu-2404-arm
runs-on:
[
runs-on,
runner=1cpu-linux-arm64,
"run-id=${{ github.run_id }}-build-backend-image",
"extras=ecr-cache",
]
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@v4
- name: Login to Private Registry
uses: docker/login-action@v3
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
registry: ${{ env.PRIVATE_REGISTRY }}
username: ${{ env.PRIVATE_REGISTRY_USERNAME }}
password: ${{ env.PRIVATE_REGISTRY_PASSWORD }}
persist-credentials: false
- name: Format branch name for cache
id: format-branch
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
REF_NAME: ${{ github.ref_name }}
run: |
if [ -n "${PR_NUMBER}" ]; then
CACHE_SUFFIX="${PR_NUMBER}"
else
# shellcheck disable=SC2001
CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g')
fi
echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: useblacksmith/setup-docker-builder@v1
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
# needed for pulling Vespa, Redis, Postgres, and Minio images
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push Backend Docker image
uses: useblacksmith/build-push-action@v2
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # ratchet:docker/build-push-action@v6
with:
context: ./backend
file: ./backend/Dockerfile
platforms: linux/arm64
tags: ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-backend:test-${{ github.run_id }}
push: true
outputs: type=registry
no-cache: true
tags: ${{ env.RUNS_ON_ECR_CACHE }}:integration-test-backend-test-${{ github.run_id }}
cache-from: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ github.event.pull_request.head.sha || github.sha }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ steps.format-branch.outputs.cache-suffix }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache
type=registry,ref=onyxdotapp/onyx-backend:latest
cache-to: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ github.event.pull_request.head.sha || github.sha }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ steps.format-branch.outputs.cache-suffix }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache,mode=max
no-cache: ${{ vars.DOCKER_NO_CACHE == 'true' }}
build-model-server-image:
runs-on: blacksmith-16vcpu-ubuntu-2404-arm
runs-on:
[
runs-on,
runner=1cpu-linux-arm64,
"run-id=${{ github.run_id }}-build-model-server-image",
"extras=ecr-cache",
]
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@v4
- name: Login to Private Registry
uses: docker/login-action@v3
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
registry: ${{ env.PRIVATE_REGISTRY }}
username: ${{ env.PRIVATE_REGISTRY_USERNAME }}
password: ${{ env.PRIVATE_REGISTRY_PASSWORD }}
persist-credentials: false
- name: Format branch name for cache
id: format-branch
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
REF_NAME: ${{ github.ref_name }}
run: |
if [ -n "${PR_NUMBER}" ]; then
CACHE_SUFFIX="${PR_NUMBER}"
else
# shellcheck disable=SC2001
CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g')
fi
echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: useblacksmith/setup-docker-builder@v1
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
# needed for pulling Vespa, Redis, Postgres, and Minio images
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push Model Server Docker image
uses: useblacksmith/build-push-action@v2
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # ratchet:docker/build-push-action@v6
with:
context: ./backend
file: ./backend/Dockerfile.model_server
platforms: linux/arm64
tags: ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-model-server:test-${{ github.run_id }}
push: true
outputs: type=registry
provenance: false
no-cache: true
tags: ${{ env.RUNS_ON_ECR_CACHE }}:integration-test-model-server-test-${{ github.run_id }}
cache-from: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ github.event.pull_request.head.sha || github.sha }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ steps.format-branch.outputs.cache-suffix }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache
type=registry,ref=onyxdotapp/onyx-model-server:latest
cache-to: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ github.event.pull_request.head.sha || github.sha }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ steps.format-branch.outputs.cache-suffix }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache,mode=max
build-integration-image:
needs: prepare-build
runs-on: blacksmith-16vcpu-ubuntu-2404-arm
runs-on:
[
runs-on,
runner=2cpu-linux-arm64,
"run-id=${{ github.run_id }}-build-integration-image",
"extras=ecr-cache",
]
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@v4
- name: Login to Private Registry
uses: docker/login-action@v3
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
registry: ${{ env.PRIVATE_REGISTRY }}
username: ${{ env.PRIVATE_REGISTRY_USERNAME }}
password: ${{ env.PRIVATE_REGISTRY_PASSWORD }}
persist-credentials: false
- name: Download OpenAPI artifacts
uses: actions/download-artifact@v4
with:
name: openapi-artifacts
path: backend/generated/
- name: Format branch name for cache
id: format-branch
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
REF_NAME: ${{ github.ref_name }}
run: |
if [ -n "${PR_NUMBER}" ]; then
CACHE_SUFFIX="${PR_NUMBER}"
else
# shellcheck disable=SC2001
CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g')
fi
echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: useblacksmith/setup-docker-builder@v1
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
- name: Build and push integration test Docker image
uses: useblacksmith/build-push-action@v2
# needed for pulling openapitools/openapi-generator-cli
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
context: ./backend
file: ./backend/tests/integration/Dockerfile
platforms: linux/arm64
tags: ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-integration:test-${{ github.run_id }}
push: true
outputs: type=registry
no-cache: true
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push integration test image with Docker Bake
env:
INTEGRATION_REPOSITORY: ${{ env.RUNS_ON_ECR_CACHE }}
TAG: integration-test-${{ github.run_id }}
CACHE_SUFFIX: ${{ steps.format-branch.outputs.cache-suffix }}
HEAD_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
run: |
docker buildx bake --push \
--set backend.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${HEAD_SHA} \
--set backend.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${CACHE_SUFFIX} \
--set backend.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache \
--set backend.cache-from=type=registry,ref=onyxdotapp/onyx-backend:latest \
--set backend.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${HEAD_SHA},mode=max \
--set backend.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${CACHE_SUFFIX},mode=max \
--set backend.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache,mode=max \
--set integration.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${HEAD_SHA} \
--set integration.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${CACHE_SUFFIX} \
--set integration.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache \
--set integration.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${HEAD_SHA},mode=max \
--set integration.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${CACHE_SUFFIX},mode=max \
--set integration.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache,mode=max \
integration
integration-tests-mit:
needs:
@@ -200,8 +256,12 @@ jobs:
build-model-server-image,
build-integration-image,
]
# See https://docs.blacksmith.sh/blacksmith-runners/overview
runs-on: blacksmith-8vcpu-ubuntu-2404-arm
runs-on:
- runs-on
- runner=4cpu-linux-arm64
- ${{ format('run-id={0}-integration-tests-mit-job-{1}', github.run_id, strategy['job-index']) }}
- extras=ecr-cache
timeout-minutes: 45
strategy:
fail-fast: false
@@ -209,56 +269,45 @@ jobs:
test-dir: ${{ fromJson(needs.discover-test-dirs.outputs.test-dirs) }}
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@v4
- name: Login to Private Registry
uses: docker/login-action@v3
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
registry: ${{ env.PRIVATE_REGISTRY }}
username: ${{ env.PRIVATE_REGISTRY_USERNAME }}
password: ${{ env.PRIVATE_REGISTRY_PASSWORD }}
persist-credentials: false
# needed for pulling Vespa, Redis, Postgres, and Minio images
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@v3
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Pull Docker images
run: |
# Pull all images from registry in parallel
echo "Pulling Docker images in parallel..."
# Pull images from private registry
(docker pull --platform linux/arm64 ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-backend:test-${{ github.run_id }}) &
(docker pull --platform linux/arm64 ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-model-server:test-${{ github.run_id }}) &
(docker pull --platform linux/arm64 ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-integration:test-${{ github.run_id }}) &
# Wait for all background jobs to complete
wait
echo "All Docker images pulled successfully"
# Re-tag to remove registry prefix for docker-compose
docker tag ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-backend:test-${{ github.run_id }} onyxdotapp/onyx-backend:test
docker tag ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-model-server:test-${{ github.run_id }} onyxdotapp/onyx-model-server:test
docker tag ${{ env.PRIVATE_REGISTRY }}/integration-test-onyx-integration:test-${{ github.run_id }} onyxdotapp/onyx-integration:test
# NOTE: Use pre-ping/null pool to reduce flakiness due to dropped connections
# NOTE: don't need web server for integration tests
- name: Create .env file for Docker Compose
env:
ECR_CACHE: ${{ env.RUNS_ON_ECR_CACHE }}
RUN_ID: ${{ github.run_id }}
run: |
cat <<EOF > deployment/docker_compose/.env
AUTH_TYPE=basic
POSTGRES_POOL_PRE_PING=true
POSTGRES_USE_NULL_POOL=true
REQUIRE_EMAIL_VERIFICATION=false
DISABLE_TELEMETRY=true
ONYX_BACKEND_IMAGE=${ECR_CACHE}:integration-test-backend-test-${RUN_ID}
ONYX_MODEL_SERVER_IMAGE=${ECR_CACHE}:integration-test-model-server-test-${RUN_ID}
INTEGRATION_TESTS_MODE=true
MCP_SERVER_ENABLED=true
AUTO_LLM_UPDATE_INTERVAL_SECONDS=1
EOF
- name: Start Docker containers
run: |
cd deployment/docker_compose
AUTH_TYPE=basic \
POSTGRES_POOL_PRE_PING=true \
POSTGRES_USE_NULL_POOL=true \
REQUIRE_EMAIL_VERIFICATION=false \
DISABLE_TELEMETRY=true \
IMAGE_TAG=test \
INTEGRATION_TESTS_MODE=true \
docker compose up \
docker compose -f docker-compose.yml -f docker-compose.dev.yml up \
relational_db \
index \
cache \
@@ -270,39 +319,45 @@ jobs:
-d
id: start_docker
- name: Wait for service to be ready
- name: Wait for services to be ready
run: |
echo "Starting wait-for-service script..."
docker logs -f onyx-api_server-1 &
wait_for_service() {
local url=$1
local label=$2
local timeout=${3:-300} # default 5 minutes
local start_time
start_time=$(date +%s)
start_time=$(date +%s)
timeout=300 # 5 minutes in seconds
while true; do
local current_time
current_time=$(date +%s)
local elapsed_time=$((current_time - start_time))
while true; do
current_time=$(date +%s)
elapsed_time=$((current_time - start_time))
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. Service did not become ready in 5 minutes."
exit 1
fi
# Use curl with error handling to ignore specific exit code 56
response=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:8080/health || echo "curl_error")
if [ "$response" = "200" ]; then
echo "Service is ready!"
break
elif [ "$response" = "curl_error" ]; then
echo "Curl encountered an error, possibly exit code 56. Continuing to retry..."
else
echo "Service not ready yet (HTTP status $response). Retrying in 5 seconds..."
fi
sleep 5
done
echo "Finished waiting for service."
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. ${label} did not become ready in $timeout seconds."
exit 1
fi
local response
response=$(curl -s -o /dev/null -w "%{http_code}" "$url" || echo "curl_error")
if [ "$response" = "200" ]; then
echo "${label} is ready!"
break
elif [ "$response" = "curl_error" ]; then
echo "Curl encountered an error while checking ${label}. Retrying in 5 seconds..."
else
echo "${label} not ready yet (HTTP status $response). Retrying in 5 seconds..."
fi
sleep 5
done
}
wait_for_service "http://localhost:8080/health" "API server"
echo "Finished waiting for services."
- name: Start Mock Services
run: |
@@ -312,7 +367,7 @@ jobs:
# NOTE: Use pre-ping/null to reduce flakiness due to dropped connections
- name: Run Integration Tests for ${{ matrix.test-dir.name }}
uses: nick-fields/retry@v3
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # ratchet:nick-fields/retry@v3
with:
timeout_minutes: 20
max_attempts: 3
@@ -333,13 +388,16 @@ jobs:
-e REDIS_HOST=cache \
-e API_SERVER_HOST=api_server \
-e OPENAI_API_KEY=${OPENAI_API_KEY} \
-e EXA_API_KEY=${EXA_API_KEY} \
-e SLACK_BOT_TOKEN=${SLACK_BOT_TOKEN} \
-e CONFLUENCE_TEST_SPACE_URL=${CONFLUENCE_TEST_SPACE_URL} \
-e CONFLUENCE_USER_NAME=${CONFLUENCE_USER_NAME} \
-e CONFLUENCE_ACCESS_TOKEN=${CONFLUENCE_ACCESS_TOKEN} \
-e CONFLUENCE_ACCESS_TOKEN_SCOPED=${CONFLUENCE_ACCESS_TOKEN_SCOPED} \
-e JIRA_BASE_URL=${JIRA_BASE_URL} \
-e JIRA_USER_EMAIL=${JIRA_USER_EMAIL} \
-e JIRA_API_TOKEN=${JIRA_API_TOKEN} \
-e JIRA_API_TOKEN_SCOPED=${JIRA_API_TOKEN_SCOPED} \
-e PERM_SYNC_SHAREPOINT_CLIENT_ID=${PERM_SYNC_SHAREPOINT_CLIENT_ID} \
-e PERM_SYNC_SHAREPOINT_PRIVATE_KEY="${PERM_SYNC_SHAREPOINT_PRIVATE_KEY}" \
-e PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD=${PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD} \
@@ -347,7 +405,7 @@ jobs:
-e TEST_WEB_HOSTNAME=test-runner \
-e MOCK_CONNECTOR_SERVER_HOST=mock_connector_server \
-e MOCK_CONNECTOR_SERVER_PORT=8001 \
onyxdotapp/onyx-integration:test \
${{ env.RUNS_ON_ECR_CACHE }}:integration-test-${{ github.run_id }} \
/app/tests/integration/${{ matrix.test-dir.path }}
# ------------------------------------------------------------
@@ -366,31 +424,19 @@ jobs:
- name: Upload logs
if: always()
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # ratchet:actions/upload-artifact@v4
with:
name: docker-all-logs-${{ matrix.test-dir.name }}
path: ${{ github.workspace }}/docker-compose.log
# ------------------------------------------------------------
- name: Stop Docker containers
if: always()
run: |
cd deployment/docker_compose
docker compose down -v
required:
runs-on: blacksmith-2vcpu-ubuntu-2404-arm
required:
# NOTE: Github-hosted runners have about 20s faster queue times and are preferred here.
runs-on: ubuntu-slim
timeout-minutes: 45
needs: [integration-tests-mit]
if: ${{ always() }}
steps:
- uses: actions/github-script@v7
with:
script: |
const needs = ${{ toJSON(needs) }};
const failed = Object.values(needs).some(n => n.result !== 'success');
if (failed) {
core.setFailed('One or more upstream jobs failed or were cancelled.');
} else {
core.notice('All required jobs succeeded.');
}
- name: Check job status
if: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }}
run: exit 1

View File

@@ -3,16 +3,20 @@ concurrency:
group: Run-Playwright-Tests-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }}
cancel-in-progress: true
on: push
on:
merge_group:
pull_request:
branches:
- main
- "release/**"
push:
tags:
- "v*.*.*"
permissions:
contents: read
env:
# AWS ECR Configuration
AWS_REGION: ${{ secrets.AWS_REGION || 'us-west-2' }}
ECR_REGISTRY: ${{ secrets.ECR_REGISTRY }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID_ECR }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY_ECR }}
BUILDX_NO_DEFAULT_ATTESTATIONS: 1
# Test Environment Variables
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
@@ -23,173 +27,295 @@ env:
SLACK_CLIENT_ID: ${{ secrets.SLACK_CLIENT_ID }}
SLACK_CLIENT_SECRET: ${{ secrets.SLACK_CLIENT_SECRET }}
# for MCP Oauth tests
MCP_OAUTH_CLIENT_ID: ${{ secrets.MCP_OAUTH_CLIENT_ID }}
MCP_OAUTH_CLIENT_SECRET: ${{ secrets.MCP_OAUTH_CLIENT_SECRET }}
MCP_OAUTH_ISSUER: ${{ secrets.MCP_OAUTH_ISSUER }}
MCP_OAUTH_JWKS_URI: ${{ secrets.MCP_OAUTH_JWKS_URI }}
MCP_OAUTH_USERNAME: ${{ vars.MCP_OAUTH_USERNAME }}
MCP_OAUTH_PASSWORD: ${{ secrets.MCP_OAUTH_PASSWORD }}
# for MCP API Key tests
MCP_API_KEY: test-api-key-12345
MCP_API_KEY_TEST_PORT: 8005
MCP_API_KEY_TEST_URL: http://host.docker.internal:8005/mcp
MCP_API_KEY_SERVER_HOST: 0.0.0.0
MCP_API_KEY_SERVER_PUBLIC_HOST: host.docker.internal
MOCK_LLM_RESPONSE: true
MCP_TEST_SERVER_PORT: 8004
MCP_TEST_SERVER_URL: http://host.docker.internal:8004/mcp
MCP_TEST_SERVER_PUBLIC_URL: http://host.docker.internal:8004/mcp
MCP_TEST_SERVER_BIND_HOST: 0.0.0.0
MCP_TEST_SERVER_PUBLIC_HOST: host.docker.internal
MCP_SERVER_HOST: 0.0.0.0
MCP_SERVER_PUBLIC_HOST: host.docker.internal
MCP_SERVER_PUBLIC_URL: http://host.docker.internal:8004/mcp
jobs:
build-web-image:
runs-on: blacksmith-8vcpu-ubuntu-2404-arm
runs-on:
[
runs-on,
runner=4cpu-linux-arm64,
"run-id=${{ github.run_id }}-build-web-image",
"extras=ecr-cache",
]
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@v4
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}
persist-credentials: false
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
- name: Format branch name for cache
id: format-branch
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
REF_NAME: ${{ github.ref_name }}
run: |
if [ -n "${PR_NUMBER}" ]; then
CACHE_SUFFIX="${PR_NUMBER}"
else
# shellcheck disable=SC2001
CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g')
fi
echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: useblacksmith/setup-docker-builder@v1
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
- name: Build and push Web Docker image
uses: useblacksmith/build-push-action@v2
with:
context: ./web
file: ./web/Dockerfile
platforms: linux/arm64
tags: ${{ env.ECR_REGISTRY }}/integration-test-onyx-web-server:playwright-test-${{ github.run_id }}
provenance: false
sbom: false
push: true
build-backend-image:
runs-on: blacksmith-8vcpu-ubuntu-2404-arm
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
- name: Set up Docker Buildx
uses: useblacksmith/setup-docker-builder@v1
- name: Build and push Backend Docker image
uses: useblacksmith/build-push-action@v2
with:
context: ./backend
file: ./backend/Dockerfile
platforms: linux/arm64
tags: ${{ env.ECR_REGISTRY }}/integration-test-onyx-backend:playwright-test-${{ github.run_id }}
provenance: false
sbom: false
push: true
build-model-server-image:
runs-on: blacksmith-8vcpu-ubuntu-2404-arm
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
- name: Set up Docker Buildx
uses: useblacksmith/setup-docker-builder@v1
- name: Build and push Model Server Docker image
uses: useblacksmith/build-push-action@v2
with:
context: ./backend
file: ./backend/Dockerfile.model_server
platforms: linux/arm64
tags: ${{ env.ECR_REGISTRY }}/integration-test-onyx-model-server:playwright-test-${{ github.run_id }}
provenance: false
sbom: false
push: true
playwright-tests:
needs: [build-web-image, build-backend-image, build-model-server-image]
name: Playwright Tests
runs-on: blacksmith-8vcpu-ubuntu-2404-arm
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
aws-access-key-id: ${{ env.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ env.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.AWS_REGION }}
- name: Login to Amazon ECR
id: login-ecr
uses: aws-actions/amazon-ecr-login@v2
# needed for pulling Vespa, Redis, Postgres, and Minio images
# otherwise, we hit the "Unauthenticated users" limit
# needed for pulling external images otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@v3
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Pull Docker images
- name: Build and push Web Docker image
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # ratchet:docker/build-push-action@v6
with:
context: ./web
file: ./web/Dockerfile
platforms: linux/arm64
tags: ${{ env.RUNS_ON_ECR_CACHE }}:playwright-test-web-${{ github.run_id }}
push: true
cache-from: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:web-cache-${{ github.event.pull_request.head.sha || github.sha }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:web-cache-${{ steps.format-branch.outputs.cache-suffix }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:web-cache
type=registry,ref=onyxdotapp/onyx-web-server:latest
cache-to: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:web-cache-${{ github.event.pull_request.head.sha || github.sha }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:web-cache-${{ steps.format-branch.outputs.cache-suffix }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:web-cache,mode=max
no-cache: ${{ vars.DOCKER_NO_CACHE == 'true' }}
build-backend-image:
runs-on:
[
runs-on,
runner=1cpu-linux-arm64,
"run-id=${{ github.run_id }}-build-backend-image",
"extras=ecr-cache",
]
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Format branch name for cache
id: format-branch
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
REF_NAME: ${{ github.ref_name }}
run: |
# Pull all images from ECR in parallel
echo "Pulling Docker images in parallel..."
(docker pull ${{ env.ECR_REGISTRY }}/integration-test-onyx-web-server:playwright-test-${{ github.run_id }}) &
(docker pull ${{ env.ECR_REGISTRY }}/integration-test-onyx-backend:playwright-test-${{ github.run_id }}) &
(docker pull ${{ env.ECR_REGISTRY }}/integration-test-onyx-model-server:playwright-test-${{ github.run_id }}) &
if [ -n "${PR_NUMBER}" ]; then
CACHE_SUFFIX="${PR_NUMBER}"
else
# shellcheck disable=SC2001
CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g')
fi
echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT
# Wait for all background jobs to complete
wait
echo "All Docker images pulled successfully"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
# Re-tag with expected names for docker-compose
docker tag ${{ env.ECR_REGISTRY }}/integration-test-onyx-web-server:playwright-test-${{ github.run_id }} onyxdotapp/onyx-web-server:test
docker tag ${{ env.ECR_REGISTRY }}/integration-test-onyx-backend:playwright-test-${{ github.run_id }} onyxdotapp/onyx-backend:test
docker tag ${{ env.ECR_REGISTRY }}/integration-test-onyx-model-server:playwright-test-${{ github.run_id }} onyxdotapp/onyx-model-server:test
# needed for pulling external images otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push Backend Docker image
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # ratchet:docker/build-push-action@v6
with:
context: ./backend
file: ./backend/Dockerfile
platforms: linux/arm64
tags: ${{ env.RUNS_ON_ECR_CACHE }}:playwright-test-backend-${{ github.run_id }}
push: true
cache-from: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ github.event.pull_request.head.sha || github.sha }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ steps.format-branch.outputs.cache-suffix }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache
type=registry,ref=onyxdotapp/onyx-backend:latest
cache-to: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ github.event.pull_request.head.sha || github.sha }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ steps.format-branch.outputs.cache-suffix }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache,mode=max
no-cache: ${{ vars.DOCKER_NO_CACHE == 'true' }}
build-model-server-image:
runs-on:
[
runs-on,
runner=1cpu-linux-arm64,
"run-id=${{ github.run_id }}-build-model-server-image",
"extras=ecr-cache",
]
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Format branch name for cache
id: format-branch
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
REF_NAME: ${{ github.ref_name }}
run: |
if [ -n "${PR_NUMBER}" ]; then
CACHE_SUFFIX="${PR_NUMBER}"
else
# shellcheck disable=SC2001
CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g')
fi
echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
# needed for pulling external images otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push Model Server Docker image
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # ratchet:docker/build-push-action@v6
with:
context: ./backend
file: ./backend/Dockerfile.model_server
platforms: linux/arm64
tags: ${{ env.RUNS_ON_ECR_CACHE }}:playwright-test-model-server-${{ github.run_id }}
push: true
cache-from: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ github.event.pull_request.head.sha || github.sha }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ steps.format-branch.outputs.cache-suffix }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache
type=registry,ref=onyxdotapp/onyx-model-server:latest
cache-to: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ github.event.pull_request.head.sha || github.sha }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ steps.format-branch.outputs.cache-suffix }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache,mode=max
no-cache: ${{ vars.DOCKER_NO_CACHE == 'true' }}
playwright-tests:
needs: [build-web-image, build-backend-image, build-model-server-image]
name: Playwright Tests (${{ matrix.project }})
runs-on:
- runs-on
- runner=8cpu-linux-arm64
- "run-id=${{ github.run_id }}-playwright-tests-${{ matrix.project }}"
- "extras=ecr-cache"
- volume=50gb
timeout-minutes: 45
strategy:
fail-fast: false
matrix:
project: [admin, no-auth, exclusive]
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Setup node
uses: actions/setup-node@v4
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # ratchet:actions/setup-node@v4
with:
node-version: 22
cache: "npm"
cache-dependency-path: ./web/package-lock.json
- name: Install node dependencies
working-directory: ./web
run: npm ci
- name: Cache playwright cache
uses: runs-on/cache@50350ad4242587b6c8c2baa2e740b1bc11285ff4 # ratchet:runs-on/cache@v4
with:
path: ~/.cache/ms-playwright
key: ${{ runner.os }}-playwright-npm-${{ hashFiles('web/package-lock.json') }}
restore-keys: |
${{ runner.os }}-playwright-npm-
- name: Install playwright browsers
working-directory: ./web
run: npx playwright install --with-deps
- name: Create .env file for Docker Compose
env:
OPENAI_API_KEY_VALUE: ${{ env.OPENAI_API_KEY }}
EXA_API_KEY_VALUE: ${{ env.EXA_API_KEY }}
ECR_CACHE: ${{ env.RUNS_ON_ECR_CACHE }}
RUN_ID: ${{ github.run_id }}
run: |
cat <<EOF > deployment/docker_compose/.env
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true
AUTH_TYPE=basic
GEN_AI_API_KEY=${OPENAI_API_KEY_VALUE}
EXA_API_KEY=${EXA_API_KEY_VALUE}
REQUIRE_EMAIL_VERIFICATION=false
DISABLE_TELEMETRY=true
ONYX_BACKEND_IMAGE=${ECR_CACHE}:playwright-test-backend-${RUN_ID}
ONYX_MODEL_SERVER_IMAGE=${ECR_CACHE}:playwright-test-model-server-${RUN_ID}
ONYX_WEB_SERVER_IMAGE=${ECR_CACHE}:playwright-test-web-${RUN_ID}
EOF
if [ "${{ matrix.project }}" = "no-auth" ]; then
echo "PLAYWRIGHT_FORCE_EMPTY_LLM_PROVIDERS=true" >> deployment/docker_compose/.env
fi
# needed for pulling Vespa, Redis, Postgres, and Minio images
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Start Docker containers
run: |
cd deployment/docker_compose
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true \
AUTH_TYPE=basic \
GEN_AI_API_KEY=${{ env.OPENAI_API_KEY }} \
EXA_API_KEY=${{ env.EXA_API_KEY }} \
REQUIRE_EMAIL_VERIFICATION=false \
DISABLE_TELEMETRY=true \
IMAGE_TAG=test \
docker compose up -d
docker compose -f docker-compose.yml -f docker-compose.dev.yml -f docker-compose.mcp-oauth-test.yml -f docker-compose.mcp-api-key-test.yml up -d
id: start_docker
- name: Wait for service to be ready
@@ -204,15 +330,15 @@ jobs:
while true; do
current_time=$(date +%s)
elapsed_time=$((current_time - start_time))
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. Service did not become ready in 5 minutes."
exit 1
fi
# Use curl with error handling to ignore specific exit code 56
response=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:8080/health || echo "curl_error")
if [ "$response" = "200" ]; then
echo "Service is ready!"
break
@@ -221,43 +347,129 @@ jobs:
else
echo "Service not ready yet (HTTP status $response). Retrying in 5 seconds..."
fi
sleep 5
done
echo "Finished waiting for service."
- name: Wait for MCP OAuth mock server
run: |
echo "Waiting for MCP OAuth mock server on port ${MCP_TEST_SERVER_PORT:-8004}..."
start_time=$(date +%s)
timeout=120
while true; do
current_time=$(date +%s)
elapsed_time=$((current_time - start_time))
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. MCP OAuth mock server did not become ready in ${timeout}s."
exit 1
fi
if curl -sf "http://localhost:${MCP_TEST_SERVER_PORT:-8004}/healthz" > /dev/null; then
echo "MCP OAuth mock server is ready!"
break
fi
sleep 3
done
- name: Wait for MCP API Key mock server
run: |
echo "Waiting for MCP API Key mock server on port ${MCP_API_KEY_TEST_PORT:-8005}..."
start_time=$(date +%s)
timeout=120
while true; do
current_time=$(date +%s)
elapsed_time=$((current_time - start_time))
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. MCP API Key mock server did not become ready in ${timeout}s."
exit 1
fi
if curl -sf "http://localhost:${MCP_API_KEY_TEST_PORT:-8005}/healthz" > /dev/null; then
echo "MCP API Key mock server is ready!"
break
fi
sleep 3
done
- name: Wait for web server to be ready
run: |
echo "Waiting for web server on port 3000..."
start_time=$(date +%s)
timeout=120
while true; do
current_time=$(date +%s)
elapsed_time=$((current_time - start_time))
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. Web server did not become ready in ${timeout}s."
exit 1
fi
if curl -sf "http://localhost:3000/api/health" > /dev/null 2>&1 || \
curl -sf "http://localhost:3000/" > /dev/null 2>&1; then
echo "Web server is ready!"
break
fi
echo "Web server not ready yet. Retrying in 3 seconds..."
sleep 3
done
- name: Run Playwright tests
working-directory: ./web
run: npx playwright test
env:
PROJECT: ${{ matrix.project }}
run: |
# Create test-results directory to ensure it exists for artifact upload
mkdir -p test-results
if [ "${PROJECT}" = "no-auth" ]; then
export PLAYWRIGHT_FORCE_EMPTY_LLM_PROVIDERS=true
fi
npx playwright test --project ${PROJECT}
- uses: actions/upload-artifact@v4
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # ratchet:actions/upload-artifact@v4
if: always()
with:
# Chromatic automatically defaults to the test-results directory.
# Replace with the path to your custom directory and adjust the CHROMATIC_ARCHIVE_LOCATION environment variable accordingly.
name: test-results
path: ./web/test-results
# Includes test results and trace.zip files
name: playwright-test-results-${{ matrix.project }}-${{ github.run_id }}
path: ./web/test-results/
retention-days: 30
# save before stopping the containers so the logs can be captured
- name: Save Docker logs
if: success() || failure()
env:
WORKSPACE: ${{ github.workspace }}
run: |
cd deployment/docker_compose
docker compose logs > docker-compose.log
mv docker-compose.log ${{ github.workspace }}/docker-compose.log
mv docker-compose.log ${WORKSPACE}/docker-compose.log
- name: Upload logs
if: success() || failure()
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # ratchet:actions/upload-artifact@v4
with:
name: docker-logs
name: docker-logs-${{ matrix.project }}-${{ github.run_id }}
path: ${{ github.workspace }}/docker-compose.log
- name: Stop Docker containers
run: |
cd deployment/docker_compose
docker compose down -v
playwright-required:
# NOTE: Github-hosted runners have about 20s faster queue times and are preferred here.
runs-on: ubuntu-slim
timeout-minutes: 45
needs: [playwright-tests]
if: ${{ always() }}
steps:
- name: Check job status
if: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }}
run: exit 1
# NOTE: Chromatic UI diff testing is currently disabled.
# We are using Playwright for local and CI testing without visual regression checks.
@@ -276,12 +488,12 @@ jobs:
# ]
# steps:
# - name: Checkout code
# uses: actions/checkout@v4
# uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
# with:
# fetch-depth: 0
# - name: Setup node
# uses: actions/setup-node@v4
# uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # ratchet:actions/setup-node@v4
# with:
# node-version: 22
@@ -290,7 +502,7 @@ jobs:
# run: npm ci
# - name: Download Playwright test results
# uses: actions/download-artifact@v4
# uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # ratchet:actions/download-artifact@v4
# with:
# name: test-results
# path: ./web/test-results

View File

@@ -1,4 +1,7 @@
name: Python Checks
concurrency:
group: Python-Checks-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }}
cancel-in-progress: true
on:
merge_group:
@@ -6,62 +9,60 @@ on:
branches:
- main
- 'release/**'
push:
tags:
- "v*.*.*"
permissions:
contents: read
jobs:
mypy-check:
# See https://runs-on.com/runners/linux/
runs-on: [runs-on,runner=8cpu-linux-x64,"run-id=${{ github.run_id }}"]
# Note: Mypy seems quite optimized for x64 compared to arm64.
# Similarly, mypy is single-threaded and incremental, so 2cpu is sufficient.
runs-on: [runs-on, runner=2cpu-linux-x64, "run-id=${{ github.run_id }}-mypy-check", "extras=s3-cache"]
timeout-minutes: 45
steps:
- name: Checkout code
uses: actions/checkout@v4
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
cache: 'pip'
cache-dependency-path: |
backend/requirements/default.txt
backend/requirements/dev.txt
backend/requirements/model_server.txt
- run: |
python -m pip install --upgrade pip
pip install --retries 5 --timeout 30 -r backend/requirements/default.txt
pip install --retries 5 --timeout 30 -r backend/requirements/dev.txt
pip install --retries 5 --timeout 30 -r backend/requirements/model_server.txt
- name: Setup Python and Install Dependencies
uses: ./.github/actions/setup-python-and-install-dependencies
with:
requirements: |
backend/requirements/default.txt
backend/requirements/dev.txt
backend/requirements/model_server.txt
backend/requirements/ee.txt
- name: Generate OpenAPI schema
working-directory: ./backend
env:
PYTHONPATH: "."
run: |
python scripts/onyx_openapi_schema.py --filename generated/openapi.json
- name: Generate OpenAPI schema and Python client
shell: bash
run: |
ods openapi all
- name: Generate OpenAPI Python client
working-directory: ./backend
run: |
docker run --rm \
-v "${{ github.workspace }}/backend/generated:/local" \
openapitools/openapi-generator-cli generate \
-i /local/openapi.json \
-g python \
-o /local/onyx_openapi_client \
--package-name onyx_openapi_client \
--skip-validate-spec \
--openapi-normalizer "SIMPLIFY_ONEOF_ANYOF=true,SET_OAS3_NULLABLE=true"
- name: Run MyPy
run: |
cd backend
mypy .
- name: Cache mypy cache
if: ${{ vars.DISABLE_MYPY_CACHE != 'true' }}
uses: runs-on/cache@50350ad4242587b6c8c2baa2e740b1bc11285ff4 # ratchet:runs-on/cache@v4
with:
path: backend/.mypy_cache
key: mypy-${{ runner.os }}-${{ hashFiles('**/*.py', '**/*.pyi', 'backend/pyproject.toml') }}
restore-keys: |
mypy-${{ runner.os }}-
- name: Check import order with reorder-python-imports
run: |
cd backend
find ./onyx -name "*.py" | xargs reorder-python-imports --py311-plus
- name: Run MyPy
working-directory: ./backend
env:
MYPY_FORCE_COLOR: 1
TERM: xterm-256color
run: mypy .
- name: Check code formatting with Black
run: |
cd backend
black --check .
- name: Run MyPy (tools/)
env:
MYPY_FORCE_COLOR: 1
TERM: xterm-256color
run: mypy tools/

View File

@@ -1,30 +1,49 @@
name: Connector Tests
concurrency:
group: Connector-Tests-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }}
cancel-in-progress: true
on:
merge_group:
pull_request:
branches: [main]
push:
tags:
- "v*.*.*"
schedule:
# This cron expression runs the job daily at 16:00 UTC (9am PT)
- cron: "0 16 * * *"
permissions:
contents: read
env:
# AWS
AWS_ACCESS_KEY_ID_DAILY_CONNECTOR_TESTS: ${{ secrets.AWS_ACCESS_KEY_ID_DAILY_CONNECTOR_TESTS }}
AWS_SECRET_ACCESS_KEY_DAILY_CONNECTOR_TESTS: ${{ secrets.AWS_SECRET_ACCESS_KEY_DAILY_CONNECTOR_TESTS }}
# Cloudflare R2
R2_ACCOUNT_ID_DAILY_CONNECTOR_TESTS: ${{ vars.R2_ACCOUNT_ID_DAILY_CONNECTOR_TESTS }}
R2_ACCESS_KEY_ID_DAILY_CONNECTOR_TESTS: ${{ secrets.R2_ACCESS_KEY_ID_DAILY_CONNECTOR_TESTS }}
R2_SECRET_ACCESS_KEY_DAILY_CONNECTOR_TESTS: ${{ secrets.R2_SECRET_ACCESS_KEY_DAILY_CONNECTOR_TESTS }}
# Google Cloud Storage
GCS_ACCESS_KEY_ID_DAILY_CONNECTOR_TESTS: ${{ secrets.GCS_ACCESS_KEY_ID_DAILY_CONNECTOR_TESTS }}
GCS_SECRET_ACCESS_KEY_DAILY_CONNECTOR_TESTS: ${{ secrets.GCS_SECRET_ACCESS_KEY_DAILY_CONNECTOR_TESTS }}
# Confluence
CONFLUENCE_TEST_SPACE_URL: ${{ secrets.CONFLUENCE_TEST_SPACE_URL }}
CONFLUENCE_TEST_SPACE: ${{ secrets.CONFLUENCE_TEST_SPACE }}
CONFLUENCE_TEST_SPACE_URL: ${{ vars.CONFLUENCE_TEST_SPACE_URL }}
CONFLUENCE_TEST_SPACE: ${{ vars.CONFLUENCE_TEST_SPACE }}
CONFLUENCE_TEST_PAGE_ID: ${{ secrets.CONFLUENCE_TEST_PAGE_ID }}
CONFLUENCE_IS_CLOUD: ${{ secrets.CONFLUENCE_IS_CLOUD }}
CONFLUENCE_USER_NAME: ${{ secrets.CONFLUENCE_USER_NAME }}
CONFLUENCE_USER_NAME: ${{ vars.CONFLUENCE_USER_NAME }}
CONFLUENCE_ACCESS_TOKEN: ${{ secrets.CONFLUENCE_ACCESS_TOKEN }}
CONFLUENCE_ACCESS_TOKEN_SCOPED: ${{ secrets.CONFLUENCE_ACCESS_TOKEN_SCOPED }}
# Jira
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}
JIRA_API_TOKEN_SCOPED: ${{ secrets.JIRA_API_TOKEN_SCOPED }}
# Gong
GONG_ACCESS_KEY: ${{ secrets.GONG_ACCESS_KEY }}
@@ -54,22 +73,22 @@ env:
HUBSPOT_ACCESS_TOKEN: ${{ secrets.HUBSPOT_ACCESS_TOKEN }}
# IMAP
IMAP_HOST: ${{ secrets.IMAP_HOST }}
IMAP_USERNAME: ${{ secrets.IMAP_USERNAME }}
IMAP_HOST: ${{ vars.IMAP_HOST }}
IMAP_USERNAME: ${{ vars.IMAP_USERNAME }}
IMAP_PASSWORD: ${{ secrets.IMAP_PASSWORD }}
IMAP_MAILBOXES: ${{ secrets.IMAP_MAILBOXES }}
IMAP_MAILBOXES: ${{ vars.IMAP_MAILBOXES }}
# Airtable
AIRTABLE_TEST_BASE_ID: ${{ secrets.AIRTABLE_TEST_BASE_ID }}
AIRTABLE_TEST_TABLE_ID: ${{ secrets.AIRTABLE_TEST_TABLE_ID }}
AIRTABLE_TEST_TABLE_NAME: ${{ secrets.AIRTABLE_TEST_TABLE_NAME }}
AIRTABLE_TEST_BASE_ID: ${{ vars.AIRTABLE_TEST_BASE_ID }}
AIRTABLE_TEST_TABLE_ID: ${{ vars.AIRTABLE_TEST_TABLE_ID }}
AIRTABLE_TEST_TABLE_NAME: ${{ vars.AIRTABLE_TEST_TABLE_NAME }}
AIRTABLE_ACCESS_TOKEN: ${{ secrets.AIRTABLE_ACCESS_TOKEN }}
# Sharepoint
SHAREPOINT_CLIENT_ID: ${{ secrets.SHAREPOINT_CLIENT_ID }}
SHAREPOINT_CLIENT_ID: ${{ vars.SHAREPOINT_CLIENT_ID }}
SHAREPOINT_CLIENT_SECRET: ${{ secrets.SHAREPOINT_CLIENT_SECRET }}
SHAREPOINT_CLIENT_DIRECTORY_ID: ${{ secrets.SHAREPOINT_CLIENT_DIRECTORY_ID }}
SHAREPOINT_SITE: ${{ secrets.SHAREPOINT_SITE }}
SHAREPOINT_CLIENT_DIRECTORY_ID: ${{ vars.SHAREPOINT_CLIENT_DIRECTORY_ID }}
SHAREPOINT_SITE: ${{ vars.SHAREPOINT_SITE }}
# Github
ACCESS_TOKEN_GITHUB: ${{ secrets.ACCESS_TOKEN_GITHUB }}
@@ -96,36 +115,66 @@ env:
TEAMS_DIRECTORY_ID: ${{ secrets.TEAMS_DIRECTORY_ID }}
TEAMS_SECRET: ${{ secrets.TEAMS_SECRET }}
# Bitbucket
BITBUCKET_WORKSPACE: ${{ secrets.BITBUCKET_WORKSPACE }}
BITBUCKET_REPOSITORIES: ${{ secrets.BITBUCKET_REPOSITORIES }}
BITBUCKET_PROJECTS: ${{ secrets.BITBUCKET_PROJECTS }}
BITBUCKET_EMAIL: ${{ vars.BITBUCKET_EMAIL }}
BITBUCKET_API_TOKEN: ${{ secrets.BITBUCKET_API_TOKEN }}
# Fireflies
FIREFLIES_API_KEY: ${{ secrets.FIREFLIES_API_KEY }}
jobs:
connectors-check:
# See https://runs-on.com/runners/linux/
runs-on: [runs-on, runner=8cpu-linux-x64, "run-id=${{ github.run_id }}"]
runs-on: [runs-on, runner=8cpu-linux-x64, "run-id=${{ github.run_id }}-connectors-check", "extras=s3-cache"]
timeout-minutes: 45
env:
PYTHONPATH: ./backend
DISABLE_TELEMETRY: "true"
steps:
- name: Checkout code
uses: actions/checkout@v4
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Set up Python
uses: actions/setup-python@v5
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
python-version: "3.11"
cache: "pip"
cache-dependency-path: |
persist-credentials: false
- name: Setup Python and Install Dependencies
uses: ./.github/actions/setup-python-and-install-dependencies
with:
requirements: |
backend/requirements/default.txt
backend/requirements/dev.txt
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
pip install --retries 5 --timeout 30 -r backend/requirements/default.txt
pip install --retries 5 --timeout 30 -r backend/requirements/dev.txt
playwright install chromium
playwright install-deps chromium
- name: Setup Playwright
uses: ./.github/actions/setup-playwright
- name: Run Tests
- name: Detect Connector changes
id: changes
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # ratchet:dorny/paths-filter@v3
with:
filters: |
hubspot:
- 'backend/onyx/connectors/hubspot/**'
- 'backend/tests/daily/connectors/hubspot/**'
- 'uv.lock'
salesforce:
- 'backend/onyx/connectors/salesforce/**'
- 'backend/tests/daily/connectors/salesforce/**'
- 'uv.lock'
github:
- 'backend/onyx/connectors/github/**'
- 'backend/tests/daily/connectors/github/**'
- 'uv.lock'
file_processing:
- 'backend/onyx/file_processing/**'
- 'uv.lock'
- name: Run Tests (excluding HubSpot, Salesforce, GitHub, and Coda)
shell: script -q -e -c "bash --noprofile --norc -eo pipefail {0}"
run: |
py.test \
@@ -135,14 +184,59 @@ jobs:
-o junit_family=xunit2 \
-xv \
--ff \
backend/tests/daily/connectors
backend/tests/daily/connectors \
--ignore backend/tests/daily/connectors/hubspot \
--ignore backend/tests/daily/connectors/salesforce \
--ignore backend/tests/daily/connectors/github \
--ignore backend/tests/daily/connectors/coda
- name: Run HubSpot Connector Tests
if: ${{ github.event_name == 'schedule' || steps.changes.outputs.hubspot == 'true' || steps.changes.outputs.file_processing == 'true' }}
shell: script -q -e -c "bash --noprofile --norc -eo pipefail {0}"
run: |
py.test \
-n 8 \
--dist loadfile \
--durations=8 \
-o junit_family=xunit2 \
-xv \
--ff \
backend/tests/daily/connectors/hubspot
- name: Run Salesforce Connector Tests
if: ${{ github.event_name == 'schedule' || steps.changes.outputs.salesforce == 'true' || steps.changes.outputs.file_processing == 'true' }}
shell: script -q -e -c "bash --noprofile --norc -eo pipefail {0}"
run: |
py.test \
-n 8 \
--dist loadfile \
--durations=8 \
-o junit_family=xunit2 \
-xv \
--ff \
backend/tests/daily/connectors/salesforce
- name: Run GitHub Connector Tests
if: ${{ github.event_name == 'schedule' || steps.changes.outputs.github == 'true' || steps.changes.outputs.file_processing == 'true' }}
shell: script -q -e -c "bash --noprofile --norc -eo pipefail {0}"
run: |
py.test \
-n 8 \
--dist loadfile \
--durations=8 \
-o junit_family=xunit2 \
-xv \
--ff \
backend/tests/daily/connectors/github
- name: Alert on Failure
if: failure() && github.event_name == 'schedule'
env:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
REPO: ${{ github.repository }}
RUN_ID: ${{ github.run_id }}
run: |
curl -X POST \
-H 'Content-type: application/json' \
--data '{"text":"Scheduled Connector Tests failed! Check the run at: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' \
--data "{\"text\":\"Scheduled Connector Tests failed! Check the run at: https://github.com/${REPO}/actions/runs/${RUN_ID}\"}" \
$SLACK_WEBHOOK

View File

@@ -10,12 +10,15 @@ on:
description: 'Branch to run the workflow on'
required: false
default: 'main'
permissions:
contents: read
env:
# Bedrock
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_REGION_NAME: ${{ secrets.AWS_REGION_NAME }}
AWS_REGION_NAME: ${{ vars.AWS_REGION_NAME }}
# API keys for testing
COHERE_API_KEY: ${{ secrets.COHERE_API_KEY }}
@@ -23,22 +26,25 @@ env:
LITELLM_API_URL: ${{ secrets.LITELLM_API_URL }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
AZURE_API_KEY: ${{ secrets.AZURE_API_KEY }}
AZURE_API_URL: ${{ secrets.AZURE_API_URL }}
AZURE_API_URL: ${{ vars.AZURE_API_URL }}
jobs:
model-check:
# See https://runs-on.com/runners/linux/
runs-on: [runs-on,runner=8cpu-linux-x64,"run-id=${{ github.run_id }}"]
runs-on: [runs-on,runner=8cpu-linux-x64,"run-id=${{ github.run_id }}-model-check"]
timeout-minutes: 45
env:
PYTHONPATH: ./backend
steps:
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Login to Docker Hub
uses: docker/login-action@v3
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
@@ -53,9 +59,9 @@ jobs:
run: |
docker pull onyxdotapp/onyx-model-server:latest
docker tag onyxdotapp/onyx-model-server:latest onyxdotapp/onyx-model-server:test
- name: Set up Python
uses: actions/setup-python@v5
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # ratchet:actions/setup-python@v6
with:
python-version: "3.11"
cache: "pip"
@@ -90,15 +96,15 @@ jobs:
while true; do
current_time=$(date +%s)
elapsed_time=$((current_time - start_time))
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. Service did not become ready in 5 minutes."
exit 1
fi
# Use curl with error handling to ignore specific exit code 56
response=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:9000/api/health || echo "curl_error")
if [ "$response" = "200" ]; then
echo "Service is ready!"
break
@@ -107,11 +113,11 @@ jobs:
else
echo "Service not ready yet (HTTP status $response). Retrying in 5 seconds..."
fi
sleep 5
done
echo "Finished waiting for service."
- name: Run Tests
shell: script -q -e -c "bash --noprofile --norc -eo pipefail {0}"
run: |
@@ -122,12 +128,14 @@ jobs:
if: failure() && github.event_name == 'schedule'
env:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
REPO: ${{ github.repository }}
RUN_ID: ${{ github.run_id }}
run: |
curl -X POST \
-H 'Content-type: application/json' \
--data '{"text":"Scheduled Model Tests failed! Check the run at: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' \
--data "{\"text\":\"Scheduled Model Tests failed! Check the run at: https://github.com/${REPO}/actions/runs/${RUN_ID}\"}" \
$SLACK_WEBHOOK
- name: Dump all-container logs (optional)
if: always()
run: |
@@ -136,14 +144,7 @@ jobs:
- name: Upload logs
if: always()
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # ratchet:actions/upload-artifact@v4
with:
name: docker-all-logs
path: ${{ github.workspace }}/docker-compose.log
- name: Stop Docker containers
if: always()
run: |
cd deployment/docker_compose
docker compose -f docker-compose.model-server-test.yml down -v

View File

@@ -1,4 +1,7 @@
name: Python Unit Tests
concurrency:
group: Python-Unit-Tests-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }}
cancel-in-progress: true
on:
merge_group:
@@ -6,39 +9,41 @@ on:
branches:
- main
- 'release/**'
push:
tags:
- "v*.*.*"
permissions:
contents: read
jobs:
backend-check:
# See https://runs-on.com/runners/linux/
runs-on: [runs-on,runner=8cpu-linux-x64,"run-id=${{ github.run_id }}"]
runs-on: [runs-on, runner=2cpu-linux-arm64, "run-id=${{ github.run_id }}-backend-check"]
timeout-minutes: 45
env:
PYTHONPATH: ./backend
REDIS_CLOUD_PYTEST_PASSWORD: ${{ secrets.REDIS_CLOUD_PYTEST_PASSWORD }}
SF_USERNAME: ${{ secrets.SF_USERNAME }}
SF_PASSWORD: ${{ secrets.SF_PASSWORD }}
SF_SECURITY_TOKEN: ${{ secrets.SF_SECURITY_TOKEN }}
steps:
- name: Checkout code
uses: actions/checkout@v4
DISABLE_TELEMETRY: "true"
- name: Set up Python
uses: actions/setup-python@v5
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
python-version: '3.11'
cache: 'pip'
cache-dependency-path: |
persist-credentials: false
- name: Setup Python and Install Dependencies
uses: ./.github/actions/setup-python-and-install-dependencies
with:
requirements: |
backend/requirements/default.txt
backend/requirements/dev.txt
backend/requirements/model_server.txt
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
pip install --retries 5 --timeout 30 -r backend/requirements/default.txt
pip install --retries 5 --timeout 30 -r backend/requirements/dev.txt
pip install --retries 5 --timeout 30 -r backend/requirements/model_server.txt
backend/requirements/ee.txt
- name: Run Tests
shell: script -q -e -c "bash --noprofile --norc -eo pipefail {0}"

View File

@@ -6,18 +6,44 @@ concurrency:
on:
merge_group:
pull_request: null
push:
branches:
- main
tags:
- "v*.*.*"
permissions:
contents: read
jobs:
quality-checks:
# See https://runs-on.com/runners/linux/
runs-on: [runs-on,runner=8cpu-linux-x64,"run-id=${{ github.run_id }}"]
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
fetch-depth: 0
- uses: actions/setup-python@v5
persist-credentials: false
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # ratchet:actions/setup-python@v6
with:
python-version: "3.11"
- uses: pre-commit/action@v3.0.1
- name: Setup Terraform
uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd # ratchet:hashicorp/setup-terraform@v3
- name: Setup node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # ratchet:actions/setup-node@v6
with: # zizmor: ignore[cache-poisoning]
node-version: 22
cache: "npm"
cache-dependency-path: ./web/package-lock.json
- name: Install node dependencies
working-directory: ./web
run: npm ci
- uses: j178/prek-action@91fd7d7cf70ae1dee9f4f44e7dfa5d1073fe6623 # ratchet:j178/prek-action@v1
with:
extra_args: ${{ github.event_name == 'pull_request' && format('--from-ref {0} --to-ref {1}', github.event.pull_request.base.sha, github.event.pull_request.head.sha) || '' }}
prek-version: '0.2.21'
extra-args: ${{ github.event_name == 'pull_request' && format('--from-ref {0} --to-ref {1}', github.event.pull_request.base.sha, github.event.pull_request.head.sha) || github.event_name == 'merge_group' && format('--from-ref {0} --to-ref {1}', github.event.merge_group.base_sha, github.event.merge_group.head_sha) || github.ref_name == 'main' && '--all-files' || '' }}
- name: Check Actions
uses: giner/check-actions@28d366c7cbbe235f9624a88aa31a628167eee28c # ratchet:giner/check-actions@v1.0.1
with:
check_permissions: false
check_versions: false

41
.github/workflows/release-devtools.yml vendored Normal file
View File

@@ -0,0 +1,41 @@
name: Release Devtools
on:
push:
tags:
- "ods/v*.*.*"
jobs:
pypi:
runs-on: ubuntu-latest
environment:
name: release-devtools
permissions:
id-token: write
timeout-minutes: 10
strategy:
matrix:
os-arch:
- { goos: "linux", goarch: "amd64" }
- { goos: "linux", goarch: "arm64" }
- { goos: "windows", goarch: "amd64" }
- { goos: "windows", goarch: "arm64" }
- { goos: "darwin", goarch: "amd64" }
- { goos: "darwin", goarch: "arm64" }
- { goos: "", goarch: "" }
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
fetch-depth: 0
- uses: astral-sh/setup-uv@ed21f2f24f8dd64503750218de024bcf64c7250a # ratchet:astral-sh/setup-uv@v7
with:
enable-cache: false
version: "0.9.9"
- run: |
GOOS="${{ matrix.os-arch.goos }}" \
GOARCH="${{ matrix.os-arch.goarch }}" \
uv build --wheel
working-directory: tools/ods
- run: uv publish
working-directory: tools/ods

49
.github/workflows/sync_foss.yml vendored Normal file
View File

@@ -0,0 +1,49 @@
name: Sync FOSS Repo
on:
schedule:
# Run daily at 3am PT (11am UTC during PST)
- cron: '0 11 * * *'
workflow_dispatch:
jobs:
sync-foss:
runs-on: ubuntu-latest
timeout-minutes: 45
permissions:
contents: read
steps:
- name: Checkout main Onyx repo
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
fetch-depth: 0
persist-credentials: false
- name: Install git-filter-repo
run: |
sudo apt-get update && sudo apt-get install -y git-filter-repo
- name: Configure SSH for deploy key
env:
FOSS_REPO_DEPLOY_KEY: ${{ secrets.FOSS_REPO_DEPLOY_KEY }}
run: |
mkdir -p ~/.ssh
echo "$FOSS_REPO_DEPLOY_KEY" > ~/.ssh/id_ed25519
chmod 600 ~/.ssh/id_ed25519
ssh-keyscan github.com >> ~/.ssh/known_hosts
- name: Set Git config
run: |
git config --global user.name "onyx-bot"
git config --global user.email "bot@onyx.app"
- name: Build FOSS version
run: bash backend/scripts/make_foss_repo.sh
- name: Push to FOSS repo
env:
FOSS_REPO_URL: git@github.com:onyx-dot-app/onyx-foss.git
run: |
cd /tmp/foss_repo
git remote add public "$FOSS_REPO_URL"
git push --force public main

View File

@@ -3,27 +3,30 @@ name: Nightly Tag Push
on:
schedule:
- cron: "0 10 * * *" # Runs every day at 2 AM PST / 3 AM PDT / 10 AM UTC
workflow_dispatch:
permissions:
contents: write # Allows pushing tags to the repository
jobs:
create-and-push-tag:
runs-on: [runs-on, runner=2cpu-linux-x64, "run-id=${{ github.run_id }}"]
runs-on: ubuntu-slim
timeout-minutes: 45
steps:
# actions using GITHUB_TOKEN cannot trigger another workflow, but we do want this to trigger docker pushes
# see https://github.com/orgs/community/discussions/27028#discussioncomment-3254367 for the workaround we
# implement here which needs an actual user's deploy key
- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
ssh-key: "${{ secrets.RKUO_DEPLOY_KEY }}"
ssh-key: "${{ secrets.DEPLOY_KEY }}"
persist-credentials: true
- name: Set up Git user
run: |
git config user.name "Richard Kuo [bot]"
git config user.email "rkuo[bot]@onyx.app"
git config user.name "Onyx Bot [bot]"
git config user.email "onyx-bot[bot]@onyx.app"
- name: Check for existing nightly tag
id: check_tag
@@ -51,3 +54,12 @@ jobs:
run: |
TAG_NAME="nightly-latest-$(date +'%Y%m%d')"
git push origin $TAG_NAME
- name: Send Slack notification
if: failure()
uses: ./.github/actions/slack-notify
with:
webhook-url: ${{ secrets.MONITOR_DEPLOYMENTS_WEBHOOK }}
title: "🚨 Nightly Tag Push Failed"
ref-name: ${{ github.ref_name }}
failed-jobs: "create-and-push-tag"

50
.github/workflows/zizmor.yml vendored Normal file
View File

@@ -0,0 +1,50 @@
name: Run Zizmor
on:
push:
branches: ["main"]
pull_request:
branches: ["**"]
permissions: {}
jobs:
zizmor:
name: zizmor
runs-on: ubuntu-slim
timeout-minutes: 45
permissions:
security-events: write # needed for SARIF uploads
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6.0.1
with:
persist-credentials: false
- name: Detect changes
id: filter
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # ratchet:dorny/paths-filter@v3
with:
filters: |
zizmor:
- '.github/**'
- name: Install the latest version of uv
if: steps.filter.outputs.zizmor == 'true' || github.ref_name == 'main'
uses: astral-sh/setup-uv@ed21f2f24f8dd64503750218de024bcf64c7250a # ratchet:astral-sh/setup-uv@v7
with:
enable-cache: false
version: "0.9.9"
- name: Run zizmor
if: steps.filter.outputs.zizmor == 'true' || github.ref_name == 'main'
run: uv run --no-sync --with zizmor zizmor --format=sarif . > results.sarif
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload SARIF file
if: steps.filter.outputs.zizmor == 'true' || github.ref_name == 'main'
uses: github/codeql-action/upload-sarif@ba454b8ab46733eb6145342877cd148270bb77ab # ratchet:github/codeql-action/upload-sarif@codeql-bundle-v2.23.5
with:
sarif_file: results.sarif
category: zizmor

16
.gitignore vendored
View File

@@ -1,6 +1,7 @@
# editors
.vscode
.zed
.cursor
# macos
.DS_store
@@ -18,6 +19,7 @@ backend/tests/regression/search_quality/eval-*
backend/tests/regression/search_quality/search_eval_config.yaml
backend/tests/regression/search_quality/*.json
backend/onyx/evals/data/
backend/onyx/evals/one_off/*.json
*.log
# secret files
@@ -27,10 +29,16 @@ settings.json
# others
/deployment/data/nginx/app.conf
/deployment/data/nginx/mcp.conf.inc
/deployment/data/nginx/mcp_upstream.conf.inc
*.sw?
/backend/tests/regression/answer_quality/search_test_config.yaml
*.egg-info
# Claude
AGENTS.md
CLAUDE.md
# Local .terraform directories
**/.terraform/*
@@ -40,3 +48,11 @@ settings.json
# Local .terraform.lock.hcl file
.terraform.lock.hcl
node_modules
# MCP configs
.playwright-mcp
# plans
plans/

View File

@@ -1,8 +0,0 @@
{
"mcpServers": {
"onyx-mcp": {
"type": "http",
"url": "http://localhost:8000/mcp"
}
}
}

View File

@@ -1,75 +1,159 @@
default_install_hook_types:
- pre-commit
- post-checkout
- post-merge
- post-rewrite
repos:
- repo: https://github.com/psf/black
rev: 25.1.0
- repo: https://github.com/astral-sh/uv-pre-commit
# From: https://github.com/astral-sh/uv-pre-commit/pull/53/commits/d30b4298e4fb63ce8609e29acdbcf4c9018a483c
rev: d30b4298e4fb63ce8609e29acdbcf4c9018a483c
hooks:
- id: black
language_version: python3.11
- id: uv-sync
args: ["--locked", "--all-extras"]
- id: uv-lock
files: ^pyproject\.toml$
- id: uv-export
name: uv-export default.txt
args:
[
"--no-emit-project",
"--no-default-groups",
"--no-hashes",
"--extra",
"backend",
"-o",
"backend/requirements/default.txt",
]
files: ^(pyproject\.toml|uv\.lock|backend/requirements/.*\.txt)$
- id: uv-export
name: uv-export dev.txt
args:
[
"--no-emit-project",
"--no-default-groups",
"--no-hashes",
"--extra",
"dev",
"-o",
"backend/requirements/dev.txt",
]
files: ^(pyproject\.toml|uv\.lock|backend/requirements/.*\.txt)$
- id: uv-export
name: uv-export ee.txt
args:
[
"--no-emit-project",
"--no-default-groups",
"--no-hashes",
"--extra",
"ee",
"-o",
"backend/requirements/ee.txt",
]
files: ^(pyproject\.toml|uv\.lock|backend/requirements/.*\.txt)$
- id: uv-export
name: uv-export model_server.txt
args:
[
"--no-emit-project",
"--no-default-groups",
"--no-hashes",
"--extra",
"model_server",
"-o",
"backend/requirements/model_server.txt",
]
files: ^(pyproject\.toml|uv\.lock|backend/requirements/.*\.txt)$
- id: uv-run
name: Check lazy imports
args: ["--active", "--with=onyx-devtools", "ods", "check-lazy-imports"]
files: ^backend/(?!\.venv/).*\.py$
# NOTE: This takes ~6s on a single, large module which is prohibitively slow.
# - id: uv-run
# name: mypy
# args: ["--all-extras", "mypy"]
# pass_filenames: true
# files: ^backend/.*\.py$
- repo: https://github.com/rhysd/actionlint
rev: a443f344ff32813837fa49f7aa6cbc478d770e62 # frozen: v1.7.9
hooks:
- id: actionlint
- repo: https://github.com/psf/black
rev: 8a737e727ac5ab2f1d4cf5876720ed276dc8dc4b # frozen: 25.1.0
hooks:
- id: black
language_version: python3.11
# this is a fork which keeps compatibility with black
- repo: https://github.com/wimglenn/reorder-python-imports-black
rev: v3.14.0
rev: f55cd27f90f0cf0ee775002c2383ce1c7820013d # frozen: v3.14.0
hooks:
- id: reorder-python-imports
args: ['--py311-plus', '--application-directories=backend/']
# need to ignore alembic files, since reorder-python-imports gets confused
# and thinks that alembic is a local package since there is a folder
# in the backend directory called `alembic`
exclude: ^backend/alembic/
- id: reorder-python-imports
args: ["--py311-plus", "--application-directories=backend/"]
# need to ignore alembic files, since reorder-python-imports gets confused
# and thinks that alembic is a local package since there is a folder
# in the backend directory called `alembic`
exclude: ^backend/alembic/
# These settings will remove unused imports with side effects
# Note: The repo currently does not and should not have imports with side effects
- repo: https://github.com/PyCQA/autoflake
rev: v2.3.1
rev: 0544741e2b4a22b472d9d93e37d4ea9153820bb1 # frozen: v2.3.1
hooks:
- id: autoflake
args: [ '--remove-all-unused-imports', '--remove-unused-variables', '--in-place' , '--recursive']
args:
[
"--remove-all-unused-imports",
"--remove-unused-variables",
"--in-place",
"--recursive",
]
- repo: https://github.com/golangci/golangci-lint
rev: 9f61b0f53f80672872fced07b6874397c3ed197b # frozen: v2.7.2
hooks:
- id: golangci-lint
entry: bash -c "find tools/ -name go.mod -print0 | xargs -0 -I{} bash -c 'cd \"$(dirname {})\" && golangci-lint run ./...'"
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.11.4
rev: 971923581912ef60a6b70dbf0c3e9a39563c9d47 # frozen: v0.11.4
hooks:
- id: ruff
- repo: https://github.com/pre-commit/mirrors-prettier
rev: v3.1.0
rev: ffb6a759a979008c0e6dff86e39f4745a2d9eac4 # frozen: v3.1.0
hooks:
- id: prettier
types_or: [html, css, javascript, ts, tsx]
additional_dependencies:
- prettier
- id: prettier
types_or: [html, css, javascript, ts, tsx]
language_version: system
- repo: https://github.com/sirwart/ripsecrets
rev: 7d94620933e79b8acaa0cd9e60e9864b07673d86 # frozen: v0.1.11
hooks:
- id: ripsecrets
args:
- --additional-pattern
- ^sk-[A-Za-z0-9_\-]{20,}$
- repo: local
hooks:
- id: check-lazy-imports
name: Check lazy imports are not directly imported
entry: python3 backend/scripts/check_lazy_imports.py
- id: terraform-fmt
name: terraform fmt
entry: terraform fmt -recursive
language: system
files: ^backend/(?!\.venv/).*\.py$
pass_filenames: false
files: \.tf$
# We would like to have a mypy pre-commit hook, but due to the fact that
# pre-commit runs in it's own isolated environment, we would need to install
# and keep in sync all dependencies so mypy has access to the appropriate type
# stubs. This does not seem worth it at the moment, so for now we will stick to
# having mypy run via Github Actions / manually by contributors
# - repo: https://github.com/pre-commit/mirrors-mypy
# rev: v1.1.1
# hooks:
# - id: mypy
# exclude: ^tests/
# # below are needed for type stubs since pre-commit runs in it's own
# # isolated environment. Unfortunately, this needs to be kept in sync
# # with requirements/dev.txt + requirements/default.txt
# additional_dependencies: [
# alembic==1.10.4,
# types-beautifulsoup4==4.12.0.3,
# types-html5lib==1.1.11.13,
# types-oauthlib==3.2.0.9,
# types-psycopg2==2.9.21.10,
# types-python-dateutil==2.8.19.13,
# types-regex==2023.3.23.1,
# types-requests==2.28.11.17,
# types-retry==0.9.9.3,
# types-urllib3==1.26.25.11
# ]
# # TODO: add back once errors are addressed
# # args: [--strict]
# Uses tsgo (TypeScript's native Go compiler) for ~10x faster type checking.
# This is a preview package - if it breaks:
# 1. Try updating: cd web && npm update @typescript/native-preview
# 2. Or fallback to tsc: replace 'tsgo' with 'tsc' below
- id: typescript-check
name: TypeScript type check
entry: bash -c 'cd web && npx tsgo --noEmit --project tsconfig.types.json'
language: system
pass_filenames: false
files: ^web/.*\.(ts|tsx)$

View File

@@ -1,66 +1,59 @@
# Copy this file to .env in the .vscode folder
# Fill in the <REPLACE THIS> values as needed, it is recommended to set the GEN_AI_API_KEY value to avoid having to set up an LLM in the UI
# Also check out danswer/backend/scripts/restart_containers.sh for a script to restart the containers which Danswer relies on outside of VSCode/Cursor processes
# Copy this file to .env in the .vscode folder.
# Fill in the <REPLACE THIS> values as needed; it is recommended to set the
# GEN_AI_API_KEY value to avoid having to set up an LLM in the UI.
# Also check out onyx/backend/scripts/restart_containers.sh for a script to
# restart the containers which Onyx relies on outside of VSCode/Cursor
# processes.
# For local dev, often user Authentication is not needed
# For local dev, often user Authentication is not needed.
AUTH_TYPE=disabled
# Skip warm up for dev
SKIP_WARM_UP=True
# Always keep these on for Dev
# Logs all model prompts to stdout
# Always keep these on for Dev.
# Logs model prompts, reasoning, and answer to stdout.
LOG_ONYX_MODEL_INTERACTIONS=True
# More verbose logging
LOG_LEVEL=debug
# This passes top N results to LLM an additional time for reranking prior to answer generation
# This step is quite heavy on token usage so we disable it for dev generally
# This passes top N results to LLM an additional time for reranking prior to
# answer generation.
# This step is quite heavy on token usage so we disable it for dev generally.
DISABLE_LLM_DOC_RELEVANCE=False
# Useful if you want to toggle auth on/off (google_oauth/OIDC specifically)
# Useful if you want to toggle auth on/off (google_oauth/OIDC specifically).
OAUTH_CLIENT_ID=<REPLACE THIS>
OAUTH_CLIENT_SECRET=<REPLACE THIS>
OPENID_CONFIG_URL=<REPLACE THIS>
SAML_CONF_DIR=/<ABSOLUTE PATH TO ONYX>/onyx/backend/ee/onyx/configs/saml_config
# Generally not useful for dev, we don't generally want to set up an SMTP server for dev
# Generally not useful for dev, we don't generally want to set up an SMTP server
# for dev.
REQUIRE_EMAIL_VERIFICATION=False
# Set these so if you wipe the DB, you don't end up having to go through the UI every time
# Set these so if you wipe the DB, you don't end up having to go through the UI
# every time.
GEN_AI_API_KEY=<REPLACE THIS>
OPENAI_API_KEY=<REPLACE THIS>
# If answer quality isn't important for dev, use gpt-4o-mini since it's cheaper
# If answer quality isn't important for dev, use gpt-4o-mini since it's cheaper.
GEN_AI_MODEL_VERSION=gpt-4o
FAST_GEN_AI_MODEL_VERSION=gpt-4o
# For Danswer Slack Bot, overrides the UI values so no need to set this up via UI every time
# Only needed if using DanswerBot
#ONYX_BOT_SLACK_APP_TOKEN=<REPLACE THIS>
#ONYX_BOT_SLACK_BOT_TOKEN=<REPLACE THIS>
# Python stuff
PYTHONPATH=../backend
PYTHONUNBUFFERED=1
# Internet Search
EXA_API_KEY=<REPLACE THIS>
# Enable the full set of Danswer Enterprise Edition features
# NOTE: DO NOT ENABLE THIS UNLESS YOU HAVE A PAID ENTERPRISE LICENSE (or if you are using this for local testing/development)
# Enable the full set of Danswer Enterprise Edition features.
# NOTE: DO NOT ENABLE THIS UNLESS YOU HAVE A PAID ENTERPRISE LICENSE (or if you
# are using this for local testing/development).
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=False
# Agent Search configs # TODO: Remove give proper namings
AGENT_RETRIEVAL_STATS=False # Note: This setting will incur substantial re-ranking effort
AGENT_RERANKING_STATS=True
AGENT_MAX_QUERY_RETRIEVAL_RESULTS=20
AGENT_RERANKING_MAX_QUERY_RETRIEVAL_RESULTS=20
# S3 File Store Configuration (MinIO for local development)
S3_ENDPOINT_URL=http://localhost:9004
@@ -68,11 +61,24 @@ S3_FILE_STORE_BUCKET_NAME=onyx-file-store-bucket
S3_AWS_ACCESS_KEY_ID=minioadmin
S3_AWS_SECRET_ACCESS_KEY=minioadmin
# Show extra/uncommon connectors
# Show extra/uncommon connectors.
SHOW_EXTRA_CONNECTORS=True
# Local langsmith tracing
LANGSMITH_TRACING="true"
LANGSMITH_ENDPOINT="https://api.smith.langchain.com"
LANGSMITH_API_KEY=<REPLACE_THIS>
LANGSMITH_PROJECT=<REPLACE_THIS>
LANGSMITH_PROJECT=<REPLACE_THIS>
# Local Confluence OAuth testing
# OAUTH_CONFLUENCE_CLOUD_CLIENT_ID=<REPLACE_THIS>
# OAUTH_CONFLUENCE_CLOUD_CLIENT_SECRET=<REPLACE_THIS>
# NEXT_PUBLIC_TEST_ENV=True
# OpenSearch
# Arbitrary password is fine for local development.
OPENSEARCH_INITIAL_ADMIN_PASSWORD=<REPLACE THIS>

File diff suppressed because it is too large Load Diff

View File

@@ -1,13 +1,13 @@
# AGENTS.md
This file provides guidance to Codex when working with code in this repository.
This file provides guidance to AI agents when working with code in this repository.
## KEY NOTES
- If you run into any missing python dependency errors, try running your command with `source backend/.venv/bin/activate` \
- If you run into any missing python dependency errors, try running your command with `source .venv/bin/activate` \
to assume the python venv.
- To make tests work, check the `.env` file at the root of the project to find an OpenAI key.
- If using `playwright` to explore the frontend, you can usually log in with username `a@test.com` and password
- If using `playwright` to explore the frontend, you can usually log in with username `a@example.com` and password
`a`. The app can be accessed at `http://localhost:3000`.
- You should assume that all Onyx services are running. To verify, you can check the `backend/log` directory to
make sure we see logs coming out from the relevant service.
@@ -70,7 +70,12 @@ Onyx uses Celery for asynchronous task processing with multiple specialized work
- Single thread (monitoring doesn't need parallelism)
- Cloud-specific monitoring tasks
8. **Beat Worker** (`beat`)
8. **User File Processing Worker** (`user_file_processing`)
- Processes user-uploaded files
- Handles user file indexing and project synchronization
- Configurable concurrency
9. **Beat Worker** (`beat`)
- Celery's scheduler for periodic tasks
- Uses DynamicTenantScheduler for multi-tenant support
- Schedules tasks like:
@@ -82,6 +87,31 @@ Onyx uses Celery for asynchronous task processing with multiple specialized work
- Monitoring tasks (every 5 minutes)
- Cleanup tasks (hourly)
#### Worker Deployment Modes
Onyx supports two deployment modes for background workers, controlled by the `USE_LIGHTWEIGHT_BACKGROUND_WORKER` environment variable:
**Lightweight Mode** (default, `USE_LIGHTWEIGHT_BACKGROUND_WORKER=true`):
- Runs a single consolidated `background` worker that handles all background tasks:
- Pruning operations (from `heavy` worker)
- Knowledge graph processing (from `kg_processing` worker)
- Monitoring tasks (from `monitoring` worker)
- User file processing (from `user_file_processing` worker)
- Lower resource footprint (single worker process)
- Suitable for smaller deployments or development environments
- Default concurrency: 6 threads
**Standard Mode** (`USE_LIGHTWEIGHT_BACKGROUND_WORKER=false`):
- Runs separate specialized workers as documented above (heavy, kg_processing, monitoring, user_file_processing)
- Better isolation and scalability
- Can scale individual workers independently based on workload
- Suitable for production deployments with higher load
The deployment mode affects:
- **Backend**: Worker processes spawned by supervisord or dev scripts
- **Helm**: Which Kubernetes deployments are created
- **Dev Environment**: Which workers `dev_run_background_jobs.py` spawns
#### Key Features
- **Thread-based Workers**: All workers use thread pools (not processes) for stability
@@ -151,6 +181,286 @@ web/
└── src/lib/ # Utilities & business logic
```
## Frontend Standards
### 1. Import Standards
**Always use absolute imports with the `@` prefix.**
**Reason:** Moving files around becomes easier since you don't also have to update those import statements. This makes modifications to the codebase much nicer.
```typescript
// ✅ Good
import { Button } from "@/components/ui/button";
import { useAuth } from "@/hooks/useAuth";
import { Text } from "@/refresh-components/texts/Text";
// ❌ Bad
import { Button } from "../../../components/ui/button";
import { useAuth } from "./hooks/useAuth";
```
### 2. React Component Functions
**Prefer regular functions over arrow functions for React components.**
**Reason:** Functions just become easier to read.
```typescript
// ✅ Good
function UserProfile({ userId }: UserProfileProps) {
return <div>User Profile</div>
}
// ❌ Bad
const UserProfile = ({ userId }: UserProfileProps) => {
return <div>User Profile</div>
}
```
### 3. Props Interface Extraction
**Extract prop types into their own interface definitions.**
**Reason:** Functions just become easier to read.
```typescript
// ✅ Good
interface UserCardProps {
user: User
showActions?: boolean
onEdit?: (userId: string) => void
}
function UserCard({ user, showActions = false, onEdit }: UserCardProps) {
return <div>User Card</div>
}
// ❌ Bad
function UserCard({
user,
showActions = false,
onEdit
}: {
user: User
showActions?: boolean
onEdit?: (userId: string) => void
}) {
return <div>User Card</div>
}
```
### 4. Spacing Guidelines
**Prefer padding over margins for spacing.**
**Reason:** We want to consolidate usage to paddings instead of margins.
```typescript
// ✅ Good
<div className="p-4 space-y-2">
<div className="p-2">Content</div>
</div>
// ❌ Bad
<div className="m-4 space-y-2">
<div className="m-2">Content</div>
</div>
```
### 5. Tailwind Dark Mode
**Strictly forbid using the `dark:` modifier in Tailwind classes, except for logo icon handling.**
**Reason:** The `colors.css` file already, VERY CAREFULLY, defines what the exact opposite colour of each light-mode colour is. Overriding this behaviour is VERY bad and will lead to horrible UI breakages.
**Exception:** The `createLogoIcon` helper in `web/src/components/icons/icons.tsx` uses `dark:` modifiers (`dark:invert`, `dark:hidden`, `dark:block`) to handle third-party logo icons that cannot automatically adapt through `colors.css`. This is the ONLY acceptable use of dark mode modifiers.
```typescript
// ✅ Good - Standard components use `web/tailwind-themes/tailwind.config.js` / `web/src/app/css/colors.css`
<div className="bg-background-neutral-03 text-text-02">
Content
</div>
// ✅ Good - Logo icons with dark mode handling via createLogoIcon
export const GithubIcon = createLogoIcon(githubLightIcon, {
monochromatic: true, // Will apply dark:invert internally
});
export const GitbookIcon = createLogoIcon(gitbookLightIcon, {
darkSrc: gitbookDarkIcon, // Will use dark:hidden/dark:block internally
});
// ❌ Bad - Manual dark mode overrides
<div className="bg-white dark:bg-black text-black dark:text-white">
Content
</div>
```
### 6. Class Name Utilities
**Use the `cn` utility instead of raw string formatting for classNames.**
**Reason:** `cn`s are easier to read. They also allow for more complex types (i.e., string-arrays) to get formatted properly (it flattens each element in that string array down). As a result, it can allow things such as conditionals (i.e., `myCondition && "some-tailwind-class"`, which evaluates to `false` when `myCondition` is `false`) to get filtered out.
```typescript
import { cn } from '@/lib/utils'
// ✅ Good
<div className={cn(
'base-class',
isActive && 'active-class',
className
)}>
Content
</div>
// ❌ Bad
<div className={`base-class ${isActive ? 'active-class' : ''} ${className}`}>
Content
</div>
```
### 7. Custom Hooks Organization
**Follow a "hook-per-file" layout. Each hook should live in its own file within `web/src/hooks`.**
**Reason:** This is just a layout preference. Keeps code clean.
```typescript
// web/src/hooks/useUserData.ts
export function useUserData(userId: string) {
// hook implementation
}
// web/src/hooks/useLocalStorage.ts
export function useLocalStorage<T>(key: string, initialValue: T) {
// hook implementation
}
```
### 8. Icon Usage
**ONLY use icons from the `web/src/icons` directory. Do NOT use icons from `react-icons`, `lucide`, or other external libraries.**
**Reason:** We have a very carefully curated selection of icons that match our Onyx guidelines. We do NOT want to muddy those up with different aesthetic stylings.
```typescript
// ✅ Good
import SvgX from "@/icons/x";
import SvgMoreHorizontal from "@/icons/more-horizontal";
// ❌ Bad
import { User } from "lucide-react";
import { FiSearch } from "react-icons/fi";
```
**Missing Icons**: If an icon is needed but doesn't exist in the `web/src/icons` directory, import it from Figma using the Figma MCP tool and add it to the icons directory.
If you need help with this step, reach out to `raunak@onyx.app`.
### 9. Text Rendering
**Prefer using the `refresh-components/texts/Text` component for all text rendering. Avoid "naked" text nodes.**
**Reason:** The `Text` component is fully compliant with the stylings provided in Figma. It provides easy utilities to specify the text-colour and font-size in the form of flags. Super duper easy.
```typescript
// ✅ Good
import { Text } from '@/refresh-components/texts/Text'
function UserCard({ name }: { name: string }) {
return (
<Text
{/* The `text03` flag makes the text it renders to be coloured the 3rd-scale grey */}
text03
{/* The `mainAction` flag makes the text it renders to be "main-action" font + line-height + weightage, as described in the Figma */}
mainAction
>
{name}
</Text>
)
}
// ❌ Bad
function UserCard({ name }: { name: string }) {
return (
<div>
<h2>{name}</h2>
<p>User details</p>
</div>
)
}
```
### 10. Component Usage
**Heavily avoid raw HTML input components. Always use components from the `web/src/refresh-components` or `web/lib/opal/src` directory.**
**Reason:** We've put in a lot of effort to unify the components that are rendered in the Onyx app. Using raw components breaks the entire UI of the application, and leaves it in a muddier state than before.
```typescript
// ✅ Good
import Button from '@/refresh-components/buttons/Button'
import InputTypeIn from '@/refresh-components/inputs/InputTypeIn'
import SvgPlusCircle from '@/icons/plus-circle'
function ContactForm() {
return (
<form>
<InputTypeIn placeholder="Search..." />
<Button type="submit" leftIcon={SvgPlusCircle}>Submit</Button>
</form>
)
}
// ❌ Bad
function ContactForm() {
return (
<form>
<input placeholder="Name" />
<textarea placeholder="Message" />
<button type="submit">Submit</button>
</form>
)
}
```
### 11. Colors
**Always use custom overrides for colors and borders rather than built in Tailwind CSS colors. These overrides live in `web/tailwind-themes/tailwind.config.js`.**
**Reason:** Our custom color system uses CSS variables that automatically handle dark mode and maintain design consistency across the app. Standard Tailwind colors bypass this system.
**Available color categories:**
- **Text:** `text-01` through `text-05`, `text-inverted-XX`
- **Backgrounds:** `background-neutral-XX`, `background-tint-XX` (and inverted variants)
- **Borders:** `border-01` through `border-05`, `border-inverted-XX`
- **Actions:** `action-link-XX`, `action-danger-XX`
- **Status:** `status-info-XX`, `status-success-XX`, `status-warning-XX`, `status-error-XX`
- **Theme:** `theme-primary-XX`, `theme-red-XX`, `theme-blue-XX`, etc.
```typescript
// ✅ Good - Use custom Onyx color classes
<div className="bg-background-neutral-01 border border-border-02" />
<div className="bg-background-tint-02 border border-border-01" />
<div className="bg-status-success-01" />
<div className="bg-action-link-01" />
<div className="bg-theme-primary-05" />
// ❌ Bad - Do NOT use standard Tailwind colors
<div className="bg-gray-100 border border-gray-300 text-gray-600" />
<div className="bg-white border border-slate-200" />
<div className="bg-green-100 text-green-700" />
<div className="bg-blue-100 text-blue-600" />
<div className="bg-indigo-500" />
```
### 12. Data Fetching
**Prefer using `useSWR` for data fetching. Data should generally be fetched on the client side. Components that need data should display a loader / placeholder while waiting for that data. Prefer loading data within the component that needs it rather than at the top level and passing it down.**
**Reason:** Client side fetching allows us to load the skeleton of the page without waiting for data to load, leading to a snappier UX. Loading data where needed reduces dependencies between a component and its parent component(s).
## Database & Migrations
### Running Migrations
@@ -164,13 +474,15 @@ alembic -n schema_private upgrade head
### Creating Migrations
```bash
# Auto-generate migration
alembic revision --autogenerate -m "description"
# Create migration
alembic revision -m "description"
# Multi-tenant migration
alembic -n schema_private revision --autogenerate -m "description"
alembic -n schema_private revision -m "description"
```
Write the migration manually and place it in the file that alembic creates when running the above command.
## Testing Strategy
There are 4 main types of tests within Onyx:
@@ -263,14 +575,6 @@ will be tailing their logs to this file.
- Token management and rate limiting
- Custom prompts and agent actions
## UI/UX Patterns
- Tailwind CSS with design system in `web/src/components/ui/`
- Radix UI and Headless UI for accessible components
- SWR for data fetching and caching
- Form validation with react-hook-form
- Error handling with popup notifications
## Creating a Plan
When creating a plan in the `plans` directory, make sure to include at least these elements:

295
CLAUDE.md
View File

@@ -1,295 +0,0 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
## KEY NOTES
- If you run into any missing python dependency errors, try running your command with `source backend/.venv/bin/activate` \
to assume the python venv.
- To make tests work, check the `.env` file at the root of the project to find an OpenAI key.
- If using `playwright` to explore the frontend, you can usually log in with username `a@test.com` and password
`a`. The app can be accessed at `http://localhost:3000`.
- You should assume that all Onyx services are running. To verify, you can check the `backend/log` directory to
make sure we see logs coming out from the relevant service.
- To connect to the Postgres database, use: `docker exec -it onyx-relational_db-1 psql -U postgres -c "<SQL>"`
- When making calls to the backend, always go through the frontend. E.g. make a call to `http://localhost:3000/api/persona` not `http://localhost:8080/api/persona`
- Put ALL db operations under the `backend/onyx/db` / `backend/ee/onyx/db` directories. Don't run queries
outside of those directories.
## Project Overview
**Onyx** (formerly Danswer) is an open-source Gen-AI and Enterprise Search platform that connects to company documents, apps, and people. It features a modular architecture with both Community Edition (MIT licensed) and Enterprise Edition offerings.
### Background Workers (Celery)
Onyx uses Celery for asynchronous task processing with multiple specialized workers:
#### Worker Types
1. **Primary Worker** (`celery_app.py`)
- Coordinates core background tasks and system-wide operations
- Handles connector management, document sync, pruning, and periodic checks
- Runs with 4 threads concurrency
- Tasks: connector deletion, vespa sync, pruning, LLM model updates, user file sync
2. **Docfetching Worker** (`docfetching`)
- Fetches documents from external data sources (connectors)
- Spawns docprocessing tasks for each document batch
- Implements watchdog monitoring for stuck connectors
- Configurable concurrency (default from env)
3. **Docprocessing Worker** (`docprocessing`)
- Processes fetched documents through the indexing pipeline:
- Upserts documents to PostgreSQL
- Chunks documents and adds contextual information
- Embeds chunks via model server
- Writes chunks to Vespa vector database
- Updates document metadata
- Configurable concurrency (default from env)
4. **Light Worker** (`light`)
- Handles lightweight, fast operations
- Tasks: vespa operations, document permissions sync, external group sync
- Higher concurrency for quick tasks
5. **Heavy Worker** (`heavy`)
- Handles resource-intensive operations
- Primary task: document pruning operations
- Runs with 4 threads concurrency
6. **KG Processing Worker** (`kg_processing`)
- Handles Knowledge Graph processing and clustering
- Builds relationships between documents
- Runs clustering algorithms
- Configurable concurrency
7. **Monitoring Worker** (`monitoring`)
- System health monitoring and metrics collection
- Monitors Celery queues, process memory, and system status
- Single thread (monitoring doesn't need parallelism)
- Cloud-specific monitoring tasks
8. **Beat Worker** (`beat`)
- Celery's scheduler for periodic tasks
- Uses DynamicTenantScheduler for multi-tenant support
- Schedules tasks like:
- Indexing checks (every 15 seconds)
- Connector deletion checks (every 20 seconds)
- Vespa sync checks (every 20 seconds)
- Pruning checks (every 20 seconds)
- KG processing (every 60 seconds)
- Monitoring tasks (every 5 minutes)
- Cleanup tasks (hourly)
#### Key Features
- **Thread-based Workers**: All workers use thread pools (not processes) for stability
- **Tenant Awareness**: Multi-tenant support with per-tenant task isolation. There is a
middleware layer that automatically finds the appropriate tenant ID when sending tasks
via Celery Beat.
- **Task Prioritization**: High, Medium, Low priority queues
- **Monitoring**: Built-in heartbeat and liveness checking
- **Failure Handling**: Automatic retry and failure recovery mechanisms
- **Redis Coordination**: Inter-process communication via Redis
- **PostgreSQL State**: Task state and metadata stored in PostgreSQL
#### Important Notes
**Defining Tasks**:
- Always use `@shared_task` rather than `@celery_app`
- Put tasks under `background/celery/tasks/` or `ee/background/celery/tasks`
**Defining APIs**:
When creating new FastAPI APIs, do NOT use the `response_model` field. Instead, just type the
function.
**Testing Updates**:
If you make any updates to a celery worker and you want to test these changes, you will need
to ask me to restart the celery worker. There is no auto-restart on code-change mechanism.
### Code Quality
```bash
# Install and run pre-commit hooks
pre-commit install
pre-commit run --all-files
```
NOTE: Always make sure everything is strictly typed (both in Python and Typescript).
## Architecture Overview
### Technology Stack
- **Backend**: Python 3.11, FastAPI, SQLAlchemy, Alembic, Celery
- **Frontend**: Next.js 15+, React 18, TypeScript, Tailwind CSS
- **Database**: PostgreSQL with Redis caching
- **Search**: Vespa vector database
- **Auth**: OAuth2, SAML, multi-provider support
- **AI/ML**: LangChain, LiteLLM, multiple embedding models
### Directory Structure
```
backend/
├── onyx/
│ ├── auth/ # Authentication & authorization
│ ├── chat/ # Chat functionality & LLM interactions
│ ├── connectors/ # Data source connectors
│ ├── db/ # Database models & operations
│ ├── document_index/ # Vespa integration
│ ├── federated_connectors/ # External search connectors
│ ├── llm/ # LLM provider integrations
│ └── server/ # API endpoints & routers
├── ee/ # Enterprise Edition features
├── alembic/ # Database migrations
└── tests/ # Test suites
web/
├── src/app/ # Next.js app router pages
├── src/components/ # Reusable React components
└── src/lib/ # Utilities & business logic
```
## Database & Migrations
### Running Migrations
```bash
# Standard migrations
alembic upgrade head
# Multi-tenant (Enterprise)
alembic -n schema_private upgrade head
```
### Creating Migrations
```bash
# Auto-generate migration
alembic revision --autogenerate -m "description"
# Multi-tenant migration
alembic -n schema_private revision --autogenerate -m "description"
```
## Testing Strategy
There are 4 main types of tests within Onyx:
### Unit Tests
These should not assume any Onyx/external services are available to be called.
Interactions with the outside world should be mocked using `unittest.mock`. Generally, only
write these for complex, isolated modules e.g. `citation_processing.py`.
To run them:
```bash
python -m dotenv -f .vscode/.env run -- pytest -xv backend/tests/unit
```
### External Dependency Unit Tests
These tests assume that all external dependencies of Onyx are available and callable (e.g. Postgres, Redis,
MinIO/S3, Vespa are running + OpenAI can be called + any request to the internet is fine + etc.).
However, the actual Onyx containers are not running and with these tests we call the function to test directly.
We can also mock components/calls at will.
The goal with these tests are to minimize mocking while giving some flexibility to mock things that are flakey,
need strictly controlled behavior, or need to have their internal behavior validated (e.g. verify a function is called
with certain args, something that would be impossible with proper integration tests).
A great example of this type of test is `backend/tests/external_dependency_unit/connectors/confluence/test_confluence_group_sync.py`.
To run them:
```bash
python -m dotenv -f .vscode/.env run -- pytest backend/tests/external_dependency_unit
```
### Integration Tests
Standard integration tests. Every test in `backend/tests/integration` runs against a real Onyx deployment. We cannot
mock anything in these tests. Prefer writing integration tests (or External Dependency Unit Tests if mocking/internal
verification is necessary) over any other type of test.
Tests are parallelized at a directory level.
When writing integration tests, make sure to check the root `conftest.py` for useful fixtures + the `backend/tests/integration/common_utils` directory for utilities. Prefer (if one exists), calling the appropriate Manager
class in the utils over directly calling the APIs with a library like `requests`. Prefer using fixtures rather than
calling the utilities directly (e.g. do NOT create admin users with
`admin_user = UserManager.create(name="admin_user")`, instead use the `admin_user` fixture).
A great example of this type of test is `backend/tests/integration/dev_apis/test_simple_chat_api.py`.
To run them:
```bash
python -m dotenv -f .vscode/.env run -- pytest backend/tests/integration
```
### Playwright (E2E) Tests
These tests are an even more complete version of the Integration Tests mentioned above. Has all services of Onyx
running, *including* the Web Server.
Use these tests for anything that requires significant frontend <-> backend coordination.
Tests are located at `web/tests/e2e`. Tests are written in TypeScript.
To run them:
```bash
npx playwright test <TEST_NAME>
```
## Logs
When (1) writing integration tests or (2) doing live tests (e.g. curl / playwright) you can get access
to logs via the `backend/log/<service_name>_debug.log` file. All Onyx services (api_server, web_server, celery_X)
will be tailing their logs to this file.
## Security Considerations
- Never commit API keys or secrets to repository
- Use encrypted credential storage for connector credentials
- Follow RBAC patterns for new features
- Implement proper input validation with Pydantic models
- Use parameterized queries to prevent SQL injection
## AI/LLM Integration
- Multiple LLM providers supported via LiteLLM
- Configurable models per feature (chat, search, embeddings)
- Streaming support for real-time responses
- Token management and rate limiting
- Custom prompts and agent actions
## UI/UX Patterns
- Tailwind CSS with design system in `web/src/components/ui/`
- Radix UI and Headless UI for accessible components
- SWR for data fetching and caching
- Form validation with react-hook-form
- Error handling with popup notifications
## Creating a Plan
When creating a plan in the `plans` directory, make sure to include at least these elements:
**Issues to Address**
What the change is meant to do.
**Important Notes**
Things you come across in your research that are important to the implementation.
**Implementation strategy**
How you are going to make the changes happen. High level approach.
**Tests**
What unit (use rarely), external dependency unit, integration, and playwright tests you plan to write to
verify the correct behavior. Don't overtest. Usually, a given change only needs one type of test.
Do NOT include these: *Timeline*, *Rollback plan*
This is a minimal list - feel free to include more. Do NOT write code as part of your plan.
Keep it high level. You can reference certain files or functions though.
Before writing your plan, make sure to do research. Explore the relevant sections in the codebase.

604
CLAUDE.md.template Normal file
View File

@@ -0,0 +1,604 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
## KEY NOTES
- If you run into any missing python dependency errors, try running your command with `source .venv/bin/activate` \
to assume the python venv.
- To make tests work, check the `.env` file at the root of the project to find an OpenAI key.
- If using `playwright` to explore the frontend, you can usually log in with username `a@example.com` and password
`a`. The app can be accessed at `http://localhost:3000`.
- You should assume that all Onyx services are running. To verify, you can check the `backend/log` directory to
make sure we see logs coming out from the relevant service.
- To connect to the Postgres database, use: `docker exec -it onyx-relational_db-1 psql -U postgres -c "<SQL>"`
- When making calls to the backend, always go through the frontend. E.g. make a call to `http://localhost:3000/api/persona` not `http://localhost:8080/api/persona`
- Put ALL db operations under the `backend/onyx/db` / `backend/ee/onyx/db` directories. Don't run queries
outside of those directories.
## Project Overview
**Onyx** (formerly Danswer) is an open-source Gen-AI and Enterprise Search platform that connects to company documents, apps, and people. It features a modular architecture with both Community Edition (MIT licensed) and Enterprise Edition offerings.
### Background Workers (Celery)
Onyx uses Celery for asynchronous task processing with multiple specialized workers:
#### Worker Types
1. **Primary Worker** (`celery_app.py`)
- Coordinates core background tasks and system-wide operations
- Handles connector management, document sync, pruning, and periodic checks
- Runs with 4 threads concurrency
- Tasks: connector deletion, vespa sync, pruning, LLM model updates, user file sync
2. **Docfetching Worker** (`docfetching`)
- Fetches documents from external data sources (connectors)
- Spawns docprocessing tasks for each document batch
- Implements watchdog monitoring for stuck connectors
- Configurable concurrency (default from env)
3. **Docprocessing Worker** (`docprocessing`)
- Processes fetched documents through the indexing pipeline:
- Upserts documents to PostgreSQL
- Chunks documents and adds contextual information
- Embeds chunks via model server
- Writes chunks to Vespa vector database
- Updates document metadata
- Configurable concurrency (default from env)
4. **Light Worker** (`light`)
- Handles lightweight, fast operations
- Tasks: vespa operations, document permissions sync, external group sync
- Higher concurrency for quick tasks
5. **Heavy Worker** (`heavy`)
- Handles resource-intensive operations
- Primary task: document pruning operations
- Runs with 4 threads concurrency
6. **KG Processing Worker** (`kg_processing`)
- Handles Knowledge Graph processing and clustering
- Builds relationships between documents
- Runs clustering algorithms
- Configurable concurrency
7. **Monitoring Worker** (`monitoring`)
- System health monitoring and metrics collection
- Monitors Celery queues, process memory, and system status
- Single thread (monitoring doesn't need parallelism)
- Cloud-specific monitoring tasks
8. **User File Processing Worker** (`user_file_processing`)
- Processes user-uploaded files
- Handles user file indexing and project synchronization
- Configurable concurrency
9. **Beat Worker** (`beat`)
- Celery's scheduler for periodic tasks
- Uses DynamicTenantScheduler for multi-tenant support
- Schedules tasks like:
- Indexing checks (every 15 seconds)
- Connector deletion checks (every 20 seconds)
- Vespa sync checks (every 20 seconds)
- Pruning checks (every 20 seconds)
- KG processing (every 60 seconds)
- Monitoring tasks (every 5 minutes)
- Cleanup tasks (hourly)
#### Worker Deployment Modes
Onyx supports two deployment modes for background workers, controlled by the `USE_LIGHTWEIGHT_BACKGROUND_WORKER` environment variable:
**Lightweight Mode** (default, `USE_LIGHTWEIGHT_BACKGROUND_WORKER=true`):
- Runs a single consolidated `background` worker that handles all background tasks:
- Light worker tasks (Vespa operations, permissions sync, deletion)
- Document processing (indexing pipeline)
- Document fetching (connector data retrieval)
- Pruning operations (from `heavy` worker)
- Knowledge graph processing (from `kg_processing` worker)
- Monitoring tasks (from `monitoring` worker)
- User file processing (from `user_file_processing` worker)
- Lower resource footprint (fewer worker processes)
- Suitable for smaller deployments or development environments
- Default concurrency: 20 threads (increased to handle combined workload)
**Standard Mode** (`USE_LIGHTWEIGHT_BACKGROUND_WORKER=false`):
- Runs separate specialized workers as documented above (light, docprocessing, docfetching, heavy, kg_processing, monitoring, user_file_processing)
- Better isolation and scalability
- Can scale individual workers independently based on workload
- Suitable for production deployments with higher load
The deployment mode affects:
- **Backend**: Worker processes spawned by supervisord or dev scripts
- **Helm**: Which Kubernetes deployments are created
- **Dev Environment**: Which workers `dev_run_background_jobs.py` spawns
#### Key Features
- **Thread-based Workers**: All workers use thread pools (not processes) for stability
- **Tenant Awareness**: Multi-tenant support with per-tenant task isolation. There is a
middleware layer that automatically finds the appropriate tenant ID when sending tasks
via Celery Beat.
- **Task Prioritization**: High, Medium, Low priority queues
- **Monitoring**: Built-in heartbeat and liveness checking
- **Failure Handling**: Automatic retry and failure recovery mechanisms
- **Redis Coordination**: Inter-process communication via Redis
- **PostgreSQL State**: Task state and metadata stored in PostgreSQL
#### Important Notes
**Defining Tasks**:
- Always use `@shared_task` rather than `@celery_app`
- Put tasks under `background/celery/tasks/` or `ee/background/celery/tasks`
**Defining APIs**:
When creating new FastAPI APIs, do NOT use the `response_model` field. Instead, just type the
function.
**Testing Updates**:
If you make any updates to a celery worker and you want to test these changes, you will need
to ask me to restart the celery worker. There is no auto-restart on code-change mechanism.
### Code Quality
```bash
# Install and run pre-commit hooks
pre-commit install
pre-commit run --all-files
```
NOTE: Always make sure everything is strictly typed (both in Python and Typescript).
## Architecture Overview
### Technology Stack
- **Backend**: Python 3.11, FastAPI, SQLAlchemy, Alembic, Celery
- **Frontend**: Next.js 15+, React 18, TypeScript, Tailwind CSS
- **Database**: PostgreSQL with Redis caching
- **Search**: Vespa vector database
- **Auth**: OAuth2, SAML, multi-provider support
- **AI/ML**: LangChain, LiteLLM, multiple embedding models
### Directory Structure
```
backend/
├── onyx/
│ ├── auth/ # Authentication & authorization
│ ├── chat/ # Chat functionality & LLM interactions
│ ├── connectors/ # Data source connectors
│ ├── db/ # Database models & operations
│ ├── document_index/ # Vespa integration
│ ├── federated_connectors/ # External search connectors
│ ├── llm/ # LLM provider integrations
│ └── server/ # API endpoints & routers
├── ee/ # Enterprise Edition features
├── alembic/ # Database migrations
└── tests/ # Test suites
web/
├── src/app/ # Next.js app router pages
├── src/components/ # Reusable React components
└── src/lib/ # Utilities & business logic
```
## Frontend Standards
### 1. Import Standards
**Always use absolute imports with the `@` prefix.**
**Reason:** Moving files around becomes easier since you don't also have to update those import statements. This makes modifications to the codebase much nicer.
```typescript
// ✅ Good
import { Button } from "@/components/ui/button";
import { useAuth } from "@/hooks/useAuth";
import { Text } from "@/refresh-components/texts/Text";
// ❌ Bad
import { Button } from "../../../components/ui/button";
import { useAuth } from "./hooks/useAuth";
```
### 2. React Component Functions
**Prefer regular functions over arrow functions for React components.**
**Reason:** Functions just become easier to read.
```typescript
// ✅ Good
function UserProfile({ userId }: UserProfileProps) {
return <div>User Profile</div>
}
// ❌ Bad
const UserProfile = ({ userId }: UserProfileProps) => {
return <div>User Profile</div>
}
```
### 3. Props Interface Extraction
**Extract prop types into their own interface definitions.**
**Reason:** Functions just become easier to read.
```typescript
// ✅ Good
interface UserCardProps {
user: User
showActions?: boolean
onEdit?: (userId: string) => void
}
function UserCard({ user, showActions = false, onEdit }: UserCardProps) {
return <div>User Card</div>
}
// ❌ Bad
function UserCard({
user,
showActions = false,
onEdit
}: {
user: User
showActions?: boolean
onEdit?: (userId: string) => void
}) {
return <div>User Card</div>
}
```
### 4. Spacing Guidelines
**Prefer padding over margins for spacing.**
**Reason:** We want to consolidate usage to paddings instead of margins.
```typescript
// ✅ Good
<div className="p-4 space-y-2">
<div className="p-2">Content</div>
</div>
// ❌ Bad
<div className="m-4 space-y-2">
<div className="m-2">Content</div>
</div>
```
### 5. Tailwind Dark Mode
**Strictly forbid using the `dark:` modifier in Tailwind classes, except for logo icon handling.**
**Reason:** The `colors.css` file already, VERY CAREFULLY, defines what the exact opposite colour of each light-mode colour is. Overriding this behaviour is VERY bad and will lead to horrible UI breakages.
**Exception:** The `createLogoIcon` helper in `web/src/components/icons/icons.tsx` uses `dark:` modifiers (`dark:invert`, `dark:hidden`, `dark:block`) to handle third-party logo icons that cannot automatically adapt through `colors.css`. This is the ONLY acceptable use of dark mode modifiers.
```typescript
// ✅ Good - Standard components use `tailwind-themes/tailwind.config.js` / `src/app/css/colors.css`
<div className="bg-background-neutral-03 text-text-02">
Content
</div>
// ✅ Good - Logo icons with dark mode handling via createLogoIcon
export const GithubIcon = createLogoIcon(githubLightIcon, {
monochromatic: true, // Will apply dark:invert internally
});
export const GitbookIcon = createLogoIcon(gitbookLightIcon, {
darkSrc: gitbookDarkIcon, // Will use dark:hidden/dark:block internally
});
// ❌ Bad - Manual dark mode overrides
<div className="bg-white dark:bg-black text-black dark:text-white">
Content
</div>
```
### 6. Class Name Utilities
**Use the `cn` utility instead of raw string formatting for classNames.**
**Reason:** `cn`s are easier to read. They also allow for more complex types (i.e., string-arrays) to get formatted properly (it flattens each element in that string array down). As a result, it can allow things such as conditionals (i.e., `myCondition && "some-tailwind-class"`, which evaluates to `false` when `myCondition` is `false`) to get filtered out.
```typescript
import { cn } from '@/lib/utils'
// ✅ Good
<div className={cn(
'base-class',
isActive && 'active-class',
className
)}>
Content
</div>
// ❌ Bad
<div className={`base-class ${isActive ? 'active-class' : ''} ${className}`}>
Content
</div>
```
### 7. Custom Hooks Organization
**Follow a "hook-per-file" layout. Each hook should live in its own file within `web/src/hooks`.**
**Reason:** This is just a layout preference. Keeps code clean.
```typescript
// web/src/hooks/useUserData.ts
export function useUserData(userId: string) {
// hook implementation
}
// web/src/hooks/useLocalStorage.ts
export function useLocalStorage<T>(key: string, initialValue: T) {
// hook implementation
}
```
### 8. Icon Usage
**ONLY use icons from the `web/src/icons` directory. Do NOT use icons from `react-icons`, `lucide`, or other external libraries.**
**Reason:** We have a very carefully curated selection of icons that match our Onyx guidelines. We do NOT want to muddy those up with different aesthetic stylings.
```typescript
// ✅ Good
import SvgX from "@/icons/x";
import SvgMoreHorizontal from "@/icons/more-horizontal";
// ❌ Bad
import { User } from "lucide-react";
import { FiSearch } from "react-icons/fi";
```
**Missing Icons**: If an icon is needed but doesn't exist in the `web/src/icons` directory, import it from Figma using the Figma MCP tool and add it to the icons directory.
If you need help with this step, reach out to `raunak@onyx.app`.
### 9. Text Rendering
**Prefer using the `refresh-components/texts/Text` component for all text rendering. Avoid "naked" text nodes.**
**Reason:** The `Text` component is fully compliant with the stylings provided in Figma. It provides easy utilities to specify the text-colour and font-size in the form of flags. Super duper easy.
```typescript
// ✅ Good
import { Text } from '@/refresh-components/texts/Text'
function UserCard({ name }: { name: string }) {
return (
<Text
{/* The `text03` flag makes the text it renders to be coloured the 3rd-scale grey */}
text03
{/* The `mainAction` flag makes the text it renders to be "main-action" font + line-height + weightage, as described in the Figma */}
mainAction
>
{name}
</Text>
)
}
// ❌ Bad
function UserCard({ name }: { name: string }) {
return (
<div>
<h2>{name}</h2>
<p>User details</p>
</div>
)
}
```
### 10. Component Usage
**Heavily avoid raw HTML input components. Always use components from the `web/src/refresh-components` or `web/lib/opal/src` directory.**
**Reason:** We've put in a lot of effort to unify the components that are rendered in the Onyx app. Using raw components breaks the entire UI of the application, and leaves it in a muddier state than before.
```typescript
// ✅ Good
import Button from '@/refresh-components/buttons/Button'
import InputTypeIn from '@/refresh-components/inputs/InputTypeIn'
import SvgPlusCircle from '@/icons/plus-circle'
function ContactForm() {
return (
<form>
<InputTypeIn placeholder="Search..." />
<Button type="submit" leftIcon={SvgPlusCircle}>Submit</Button>
</form>
)
}
// ❌ Bad
function ContactForm() {
return (
<form>
<input placeholder="Name" />
<textarea placeholder="Message" />
<button type="submit">Submit</button>
</form>
)
}
```
### 11. Colors
**Always use custom overrides for colors and borders rather than built in Tailwind CSS colors. These overrides live in `web/tailwind-themes/tailwind.config.js`.**
**Reason:** Our custom color system uses CSS variables that automatically handle dark mode and maintain design consistency across the app. Standard Tailwind colors bypass this system.
**Available color categories:**
- **Text:** `text-01` through `text-05`, `text-inverted-XX`
- **Backgrounds:** `background-neutral-XX`, `background-tint-XX` (and inverted variants)
- **Borders:** `border-01` through `border-05`, `border-inverted-XX`
- **Actions:** `action-link-XX`, `action-danger-XX`
- **Status:** `status-info-XX`, `status-success-XX`, `status-warning-XX`, `status-error-XX`
- **Theme:** `theme-primary-XX`, `theme-red-XX`, `theme-blue-XX`, etc.
```typescript
// ✅ Good - Use custom Onyx color classes
<div className="bg-background-neutral-01 border border-border-02" />
<div className="bg-background-tint-02 border border-border-01" />
<div className="bg-status-success-01" />
<div className="bg-action-link-01" />
<div className="bg-theme-primary-05" />
// ❌ Bad - Do NOT use standard Tailwind colors
<div className="bg-gray-100 border border-gray-300 text-gray-600" />
<div className="bg-white border border-slate-200" />
<div className="bg-green-100 text-green-700" />
<div className="bg-blue-100 text-blue-600" />
<div className="bg-indigo-500" />
```
### 12. Data Fetching
**Prefer using `useSWR` for data fetching. Data should generally be fetched on the client side. Components that need data should display a loader / placeholder while waiting for that data. Prefer loading data within the component that needs it rather than at the top level and passing it down.**
**Reason:** Client side fetching allows us to load the skeleton of the page without waiting for data to load, leading to a snappier UX. Loading data where needed reduces dependencies between a component and its parent component(s).
## Database & Migrations
### Running Migrations
```bash
# Standard migrations
alembic upgrade head
# Multi-tenant (Enterprise)
alembic -n schema_private upgrade head
```
### Creating Migrations
```bash
# Create migration
alembic revision -m "description"
# Multi-tenant migration
alembic -n schema_private revision -m "description"
```
Write the migration manually and place it in the file that alembic creates when running the above command.
## Testing Strategy
First, you must activate the virtual environment with `source .venv/bin/activate`.
There are 4 main types of tests within Onyx:
### Unit Tests
These should not assume any Onyx/external services are available to be called.
Interactions with the outside world should be mocked using `unittest.mock`. Generally, only
write these for complex, isolated modules e.g. `citation_processing.py`.
To run them:
```bash
pytest -xv backend/tests/unit
```
### External Dependency Unit Tests
These tests assume that all external dependencies of Onyx are available and callable (e.g. Postgres, Redis,
MinIO/S3, Vespa are running + OpenAI can be called + any request to the internet is fine + etc.).
However, the actual Onyx containers are not running and with these tests we call the function to test directly.
We can also mock components/calls at will.
The goal with these tests are to minimize mocking while giving some flexibility to mock things that are flakey,
need strictly controlled behavior, or need to have their internal behavior validated (e.g. verify a function is called
with certain args, something that would be impossible with proper integration tests).
A great example of this type of test is `backend/tests/external_dependency_unit/connectors/confluence/test_confluence_group_sync.py`.
To run them:
```bash
python -m dotenv -f .vscode/.env run -- pytest backend/tests/external_dependency_unit
```
### Integration Tests
Standard integration tests. Every test in `backend/tests/integration` runs against a real Onyx deployment. We cannot
mock anything in these tests. Prefer writing integration tests (or External Dependency Unit Tests if mocking/internal
verification is necessary) over any other type of test.
Tests are parallelized at a directory level.
When writing integration tests, make sure to check the root `conftest.py` for useful fixtures + the `backend/tests/integration/common_utils` directory for utilities. Prefer (if one exists), calling the appropriate Manager
class in the utils over directly calling the APIs with a library like `requests`. Prefer using fixtures rather than
calling the utilities directly (e.g. do NOT create admin users with
`admin_user = UserManager.create(name="admin_user")`, instead use the `admin_user` fixture).
A great example of this type of test is `backend/tests/integration/dev_apis/test_simple_chat_api.py`.
To run them:
```bash
python -m dotenv -f .vscode/.env run -- pytest backend/tests/integration
```
### Playwright (E2E) Tests
These tests are an even more complete version of the Integration Tests mentioned above. Has all services of Onyx
running, *including* the Web Server.
Use these tests for anything that requires significant frontend <-> backend coordination.
Tests are located at `web/tests/e2e`. Tests are written in TypeScript.
To run them:
```bash
npx playwright test <TEST_NAME>
```
## Logs
When (1) writing integration tests or (2) doing live tests (e.g. curl / playwright) you can get access
to logs via the `backend/log/<service_name>_debug.log` file. All Onyx services (api_server, web_server, celery_X)
will be tailing their logs to this file.
## Security Considerations
- Never commit API keys or secrets to repository
- Use encrypted credential storage for connector credentials
- Follow RBAC patterns for new features
- Implement proper input validation with Pydantic models
- Use parameterized queries to prevent SQL injection
## AI/LLM Integration
- Multiple LLM providers supported via LiteLLM
- Configurable models per feature (chat, search, embeddings)
- Streaming support for real-time responses
- Token management and rate limiting
- Custom prompts and agent actions
## Creating a Plan
When creating a plan in the `plans` directory, make sure to include at least these elements:
**Issues to Address**
What the change is meant to do.
**Important Notes**
Things you come across in your research that are important to the implementation.
**Implementation strategy**
How you are going to make the changes happen. High level approach.
**Tests**
What unit (use rarely), external dependency unit, integration, and playwright tests you plan to write to
verify the correct behavior. Don't overtest. Usually, a given change only needs one type of test.
Do NOT include these: *Timeline*, *Rollback plan*
This is a minimal list - feel free to include more. Do NOT write code as part of your plan.
Keep it high level. You can reference certain files or functions though.
Before writing your plan, make sure to do research. Explore the relevant sections in the codebase.

View File

@@ -13,8 +13,7 @@ As an open source project in a rapidly changing space, we welcome all contributi
The [GitHub Issues](https://github.com/onyx-dot-app/onyx/issues) page is a great place to start for contribution ideas.
To ensure that your contribution is aligned with the project's direction, please reach out to any maintainer on the Onyx team
via [Slack](https://join.slack.com/t/onyx-dot-app/shared_invite/zt-34lu4m7xg-TsKGO6h8PDvR5W27zTdyhA) /
[Discord](https://discord.gg/TDJ59cGV2X) or [email](mailto:founders@onyx.app).
via [Discord](https://discord.gg/4NA5SbzrWb) or [email](mailto:hello@onyx.app).
Issues that have been explicitly approved by the maintainers (aligned with the direction of the project)
will be marked with the `approved by maintainers` label.
@@ -28,8 +27,7 @@ Your input is vital to making sure that Onyx moves in the right direction.
Before starting on implementation, please raise a GitHub issue.
Also, always feel free to message the founders (Chris Weaver / Yuhong Sun) on
[Slack](https://join.slack.com/t/onyx-dot-app/shared_invite/zt-34lu4m7xg-TsKGO6h8PDvR5W27zTdyhA) /
[Discord](https://discord.gg/TDJ59cGV2X) directly about anything at all.
[Discord](https://discord.gg/4NA5SbzrWb) directly about anything at all.
### Contributing Code
@@ -46,9 +44,7 @@ Our goal is to make contributing as easy as possible. If you run into any issues
That way we can help future contributors and users can avoid the same issue.
We also have support channels and generally interesting discussions on our
[Slack](https://join.slack.com/t/onyx-dot-app/shared_invite/zt-2twesxdr6-5iQitKZQpgq~hYIZ~dv3KA)
and
[Discord](https://discord.gg/TDJ59cGV2X).
[Discord](https://discord.gg/4NA5SbzrWb).
We would love to see you there!
@@ -75,12 +71,12 @@ If using a higher version, sometimes some libraries will not be available (i.e.
#### Backend: Python requirements
Currently, we use pip and recommend creating a virtual environment.
Currently, we use [uv](https://docs.astral.sh/uv/) and recommend creating a [virtual environment](https://docs.astral.sh/uv/pip/environments/#using-a-virtual-environment).
For convenience here's a command for it:
```bash
python -m venv .venv
uv venv .venv --python 3.11
source .venv/bin/activate
```
@@ -99,26 +95,26 @@ If using PowerShell, the command slightly differs:
Install the required python dependencies:
```bash
pip install -r backend/requirements/default.txt
pip install -r backend/requirements/dev.txt
pip install -r backend/requirements/ee.txt
pip install -r backend/requirements/model_server.txt
uv sync --all-extras
```
Install Playwright for Python (headless browser required by the Web Connector)
In the activated Python virtualenv, install Playwright for Python by running:
Install Playwright for Python (headless browser required by the Web Connector):
```bash
playwright install
uv run playwright install
```
You may have to deactivate and reactivate your virtualenv for `playwright` to appear on your path.
#### Frontend: Node dependencies
Install [Node.js and npm](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm) for the frontend.
Once the above is done, navigate to `onyx/web` run:
Onyx uses Node v22.20.0. We highly recommend you use [Node Version Manager (nvm)](https://github.com/nvm-sh/nvm)
to manage your Node installations. Once installed, you can run
```bash
nvm install 22 && nvm use 22
node -v # verify your active version
```
Navigate to `onyx/web` and run:
```bash
npm i
@@ -129,36 +125,30 @@ npm i
### Backend
For the backend, you'll need to setup pre-commit hooks (black / reorder-python-imports).
First, install pre-commit (if you don't have it already) following the instructions
[here](https://pre-commit.com/#installation).
With the virtual environment active, install the pre-commit library with:
Then run:
```bash
pip install pre-commit
```
Then, from the `onyx/backend` directory, run:
```bash
pre-commit install
uv run pre-commit install
```
Additionally, we use `mypy` for static type checking.
Onyx is fully type-annotated, and we want to keep it that way!
To run the mypy checks manually, run `python -m mypy .` from the `onyx/backend` directory.
To run the mypy checks manually, run `uv run mypy .` from the `onyx/backend` directory.
### Web
We use `prettier` for formatting. The desired version (2.8.8) will be installed via a `npm i` from the `onyx/web` directory.
We use `prettier` for formatting. The desired version will be installed via a `npm i` from the `onyx/web` directory.
To run the formatter, use `npx prettier --write .` from the `onyx/web` directory.
Please double check that prettier passes before creating a pull request.
Pre-commit will also run prettier automatically on files you've recently touched. If re-formatted, your commit will fail.
Re-stage your changes and commit again.
# Running the application for development
## Developing using VSCode Debugger (recommended)
We highly recommend using VSCode debugger for development.
**We highly recommend using VSCode debugger for development.**
See [CONTRIBUTING_VSCODE.md](./CONTRIBUTING_VSCODE.md) for more details.
Otherwise, you can follow the instructions below to run the application for development.
@@ -171,7 +161,7 @@ You will need Docker installed to run these containers.
First navigate to `onyx/deployment/docker_compose`, then start up Postgres/Vespa/Redis/MinIO with:
```bash
docker compose up -d index relational_db cache minio
docker compose -f docker-compose.yml -f docker-compose.dev.yml up -d index relational_db cache minio
```
(index refers to Vespa, relational_db refers to Postgres, and cache refers to Redis)

View File

@@ -21,6 +21,9 @@ Before starting, make sure the Docker Daemon is running.
5. You can set breakpoints by clicking to the left of line numbers to help debug while the app is running
6. Use the debug toolbar to step through code, inspect variables, etc.
Note: Clear and Restart External Volumes and Containers will reset your postgres and Vespa (relational-db and index).
Only run this if you are okay with wiping your data.
## Features
- Hot reload is enabled for the web server and API servers

View File

@@ -1,29 +1,34 @@
<a name="readme-top"></a>
<h2 align="center">
<a href="https://www.onyx.app/"> <img width="50%" src="https://github.com/onyx-dot-app/onyx/blob/logo/OnyxLogoCropped.jpg?raw=true)" /></a>
<a href="https://www.onyx.app/?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme"> <img width="50%" src="https://github.com/onyx-dot-app/onyx/blob/logo/OnyxLogoCropped.jpg?raw=true" /></a>
</h2>
<p align="center">Open Source AI Platform</p>
<p align="center">
<a href="https://discord.gg/TDJ59cGV2X" target="_blank">
<img src="https://img.shields.io/badge/discord-join-blue.svg?logo=discord&logoColor=white" alt="Discord">
<img src="https://img.shields.io/badge/discord-join-blue.svg?logo=discord&logoColor=white" alt="Discord" />
</a>
<a href="https://docs.onyx.app/" target="_blank">
<img src="https://img.shields.io/badge/docs-view-blue" alt="Documentation">
<a href="https://docs.onyx.app/?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme" target="_blank">
<img src="https://img.shields.io/badge/docs-view-blue" alt="Documentation" />
</a>
<a href="https://docs.onyx.app/" target="_blank">
<img src="https://img.shields.io/website?url=https://www.onyx.app&up_message=visit&up_color=blue" alt="Documentation">
<a href="https://www.onyx.app/?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme" target="_blank">
<img src="https://img.shields.io/website?url=https://www.onyx.app&up_message=visit&up_color=blue" alt="Documentation" />
</a>
<a href="https://github.com/onyx-dot-app/onyx/blob/main/LICENSE" target="_blank">
<img src="https://img.shields.io/static/v1?label=license&message=MIT&color=blue" alt="License">
<img src="https://img.shields.io/static/v1?label=license&message=MIT&color=blue" alt="License" />
</a>
</p>
<p align="center">
<a href="https://trendshift.io/repositories/12516" target="_blank">
<img src="https://trendshift.io/api/badge/repositories/12516" alt="onyx-dot-app/onyx | Trendshift" style="width: 250px; height: 55px;" />
</a>
</p>
**[Onyx](https://www.onyx.app/)** is a feature-rich, self-hostable Chat UI that works with any LLM. It is easy to deploy and can run in a completely airgapped environment.
**[Onyx](https://www.onyx.app/?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme)** is a feature-rich, self-hostable Chat UI that works with any LLM. It is easy to deploy and can run in a completely airgapped environment.
Onyx comes loaded with advanced features like Agents, Web Search, RAG, MCP, Deep Research, Connectors to 40+ knowledge sources, and more.
@@ -52,7 +57,7 @@ Onyx comes loaded with advanced features like Agents, Web Search, RAG, MCP, Deep
Onyx works with all LLMs (like OpenAI, Anthropic, Gemini, etc.) and self-hosted LLMs (like Ollama, vLLM, etc.)
To learn more about the features, check out our [documentation](https://docs.onyx.app/welcome)!
To learn more about the features, check out our [documentation](https://docs.onyx.app/welcome?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme)!
@@ -60,13 +65,13 @@ To learn more about the features, check out our [documentation](https://docs.ony
Onyx supports deployments in Docker, Kubernetes, Terraform, along with guides for major cloud providers.
See guides below:
- [Docker](https://docs.onyx.app/deployment/local/docker) or [Quickstart](https://docs.onyx.app/deployment/getting_started/quickstart) (best for most users)
- [Kubernetes](https://docs.onyx.app/deployment/local/kubernetes) (best for large teams)
- [Terraform](https://docs.onyx.app/deployment/local/terraform) (best for teams already using Terraform)
- Cloud specific guides (best if specifically using [AWS EKS](https://docs.onyx.app/deployment/cloud/aws/eks), [Azure VMs](https://docs.onyx.app/deployment/cloud/azure), etc.)
- [Docker](https://docs.onyx.app/deployment/local/docker?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme) or [Quickstart](https://docs.onyx.app/deployment/getting_started/quickstart?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme) (best for most users)
- [Kubernetes](https://docs.onyx.app/deployment/local/kubernetes?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme) (best for large teams)
- [Terraform](https://docs.onyx.app/deployment/local/terraform?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme) (best for teams already using Terraform)
- Cloud specific guides (best if specifically using [AWS EKS](https://docs.onyx.app/deployment/cloud/aws/eks?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme), [Azure VMs](https://docs.onyx.app/deployment/cloud/azure?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme), etc.)
> [!TIP]
> **To try Onyx for free without deploying, check out [Onyx Cloud](https://cloud.onyx.app/signup)**.
> **To try Onyx for free without deploying, check out [Onyx Cloud](https://cloud.onyx.app/signup?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme)**.
@@ -90,7 +95,7 @@ There are two editions of Onyx:
- Onyx Community Edition (CE) is available freely under the MIT license.
- Onyx Enterprise Edition (EE) includes extra features that are primarily useful for larger organizations.
For feature details, check out [our website](https://www.onyx.app/pricing).
For feature details, check out [our website](https://www.onyx.app/pricing?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme).

View File

@@ -15,3 +15,4 @@ build/
dist/
.coverage
htmlcov/
model_server/legacy/

View File

@@ -7,16 +7,20 @@ have a contract or agreement with DanswerAI, you are not permitted to use the En
Edition features outside of personal development or testing purposes. Please reach out to \
founders@onyx.app for more information. Please visit https://github.com/onyx-dot-app/onyx"
# Default ONYX_VERSION, typically overriden during builds by GitHub Actions.
ARG ONYX_VERSION=0.0.0-dev
# DO_NOT_TRACK is used to disable telemetry for Unstructured
ENV ONYX_VERSION=${ONYX_VERSION} \
DANSWER_RUNNING_IN_DOCKER="true" \
ENV DANSWER_RUNNING_IN_DOCKER="true" \
DO_NOT_TRACK="true" \
PLAYWRIGHT_BROWSERS_PATH="/app/.cache/ms-playwright"
# Create non-root user for security best practices
RUN groupadd -g 1001 onyx && \
useradd -u 1001 -g onyx -m -s /bin/bash onyx && \
mkdir -p /var/log/onyx && \
chmod 755 /var/log/onyx && \
chown onyx:onyx /var/log/onyx
COPY --from=ghcr.io/astral-sh/uv:0.9.9 /uv /uvx /bin/
RUN echo "ONYX_VERSION: ${ONYX_VERSION}"
# Install system dependencies
# cmake needed for psycopg (postgres)
# libpq-dev needed for psycopg (postgres)
@@ -48,22 +52,20 @@ RUN apt-get update && \
# Remove py which is pulled in by retry, py is not needed and is a CVE
COPY ./requirements/default.txt /tmp/requirements.txt
COPY ./requirements/ee.txt /tmp/ee-requirements.txt
RUN pip install --no-cache-dir --upgrade \
--retries 5 \
--timeout 30 \
RUN uv pip install --system --no-cache-dir --upgrade \
-r /tmp/requirements.txt \
-r /tmp/ee-requirements.txt && \
pip uninstall -y py && \
playwright install chromium && \
playwright install-deps chromium && \
ln -s /usr/local/bin/supervisord /usr/bin/supervisord
# Cleanup for CVEs and size reduction
# https://github.com/tornadoweb/tornado/issues/3107
# xserver-common and xvfb included by playwright installation but not needed after
# perl-base is part of the base Python Debian image but not needed for Onyx functionality
# perl-base could only be removed with --allow-remove-essential
RUN apt-get update && \
chown -R onyx:onyx /app && \
ln -s /usr/local/bin/supervisord /usr/bin/supervisord && \
# Cleanup for CVEs and size reduction
# https://github.com/tornadoweb/tornado/issues/3107
# xserver-common and xvfb included by playwright installation but not needed after
# perl-base is part of the base Python Debian image but not needed for Onyx functionality
# perl-base could only be removed with --allow-remove-essential
apt-get update && \
apt-get remove -y --allow-remove-essential \
perl-base \
xserver-common \
@@ -73,15 +75,16 @@ RUN apt-get update && \
libxmlsec1-dev \
pkg-config \
gcc && \
apt-get install -y libxmlsec1-openssl && \
# Install here to avoid some packages being cleaned up above
apt-get install -y \
libxmlsec1-openssl \
# Install postgresql-client for easy manual tests
postgresql-client && \
apt-get autoremove -y && \
rm -rf /var/lib/apt/lists/* && \
rm -rf ~/.cache/uv /tmp/*.txt && \
rm -f /usr/local/lib/python3.11/site-packages/tornado/test/test.key
# Install postgresql-client for easy manual tests
# Install it here to avoid it being cleaned up above
RUN apt-get update && apt-get install -y postgresql-client
# Pre-downloading models for setups with limited egress
RUN python -c "from tokenizers import Tokenizer; \
Tokenizer.from_pretrained('nomic-ai/nomic-embed-text-v1')"
@@ -92,38 +95,40 @@ nltk.download('stopwords', quiet=True); \
nltk.download('punkt_tab', quiet=True);"
# nltk.download('wordnet', quiet=True); introduce this back if lemmatization is needed
# Pre-downloading tiktoken for setups with limited egress
RUN python -c "import tiktoken; \
tiktoken.get_encoding('cl100k_base')"
# Set up application files
WORKDIR /app
# Enterprise Version Files
COPY ./ee /app/ee
COPY --chown=onyx:onyx ./ee /app/ee
COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf
# Set up application files
COPY ./onyx /app/onyx
COPY ./shared_configs /app/shared_configs
COPY ./alembic /app/alembic
COPY ./alembic_tenants /app/alembic_tenants
COPY ./alembic.ini /app/alembic.ini
COPY --chown=onyx:onyx ./onyx /app/onyx
COPY --chown=onyx:onyx ./shared_configs /app/shared_configs
COPY --chown=onyx:onyx ./alembic /app/alembic
COPY --chown=onyx:onyx ./alembic_tenants /app/alembic_tenants
COPY --chown=onyx:onyx ./alembic.ini /app/alembic.ini
COPY supervisord.conf /usr/etc/supervisord.conf
COPY ./static /app/static
COPY --chown=onyx:onyx ./static /app/static
# Escape hatch scripts
COPY ./scripts/debugging /app/scripts/debugging
COPY ./scripts/force_delete_connector_by_id.py /app/scripts/force_delete_connector_by_id.py
COPY --chown=onyx:onyx ./scripts/debugging /app/scripts/debugging
COPY --chown=onyx:onyx ./scripts/force_delete_connector_by_id.py /app/scripts/force_delete_connector_by_id.py
COPY --chown=onyx:onyx ./scripts/supervisord_entrypoint.sh /app/scripts/supervisord_entrypoint.sh
RUN chmod +x /app/scripts/supervisord_entrypoint.sh
# Put logo in assets
COPY ./assets /app/assets
COPY --chown=onyx:onyx ./assets /app/assets
ENV PYTHONPATH=/app
# Create non-root user for security best practices
RUN groupadd -g 1001 onyx && \
useradd -u 1001 -g onyx -m -s /bin/bash onyx && \
chown -R onyx:onyx /app && \
mkdir -p /var/log/onyx && \
chmod 755 /var/log/onyx && \
chown onyx:onyx /var/log/onyx
# Default ONYX_VERSION, typically overriden during builds by GitHub Actions.
ARG ONYX_VERSION=0.0.0-dev
ENV ONYX_VERSION=${ONYX_VERSION}
# Default command which does nothing
# This container is used by api server and background which specify their own CMD

View File

@@ -1,4 +1,29 @@
FROM python:3.11.7-slim-bookworm
# Base stage with dependencies
FROM python:3.11.7-slim-bookworm AS base
ENV DANSWER_RUNNING_IN_DOCKER="true" \
HF_HOME=/app/.cache/huggingface
COPY --from=ghcr.io/astral-sh/uv:0.9.9 /uv /uvx /bin/
RUN mkdir -p /app/.cache/huggingface
COPY ./requirements/model_server.txt /tmp/requirements.txt
RUN uv pip install --system --no-cache-dir --upgrade \
-r /tmp/requirements.txt && \
rm -rf ~/.cache/uv /tmp/*.txt
# Stage for downloading embedding models
FROM base AS embedding-models
RUN python -c "from huggingface_hub import snapshot_download; \
snapshot_download('nomic-ai/nomic-embed-text-v1');"
# Initialize SentenceTransformer to cache the custom architecture
RUN python -c "from sentence_transformers import SentenceTransformer; \
SentenceTransformer(model_name_or_path='nomic-ai/nomic-embed-text-v1', trust_remote_code=True);"
# Final stage - combine all downloads
FROM base AS final
LABEL com.danswer.maintainer="founders@onyx.app"
LABEL com.danswer.description="This image is for the Onyx model server which runs all of the \
@@ -6,68 +31,17 @@ AI models for Onyx. This container and all the code is MIT Licensed and free for
You can find it at https://hub.docker.com/r/onyx/onyx-model-server. For more details, \
visit https://github.com/onyx-dot-app/onyx."
# Default ONYX_VERSION, typically overriden during builds by GitHub Actions.
ARG ONYX_VERSION=0.0.0-dev
ENV ONYX_VERSION=${ONYX_VERSION} \
DANSWER_RUNNING_IN_DOCKER="true" \
HF_HOME=/app/.cache/huggingface
RUN echo "ONYX_VERSION: ${ONYX_VERSION}"
# Create non-root user for security best practices
RUN mkdir -p /app && \
groupadd -g 1001 onyx && \
useradd -u 1001 -g onyx -m -s /bin/bash onyx && \
chown -R onyx:onyx /app && \
RUN groupadd -g 1001 onyx && \
useradd -u 1001 -g onyx -m -s /bin/bash onyx && \
mkdir -p /var/log/onyx && \
chmod 755 /var/log/onyx && \
chown onyx:onyx /var/log/onyx
# --- add toolchain needed for Rust/Python builds (fastuuid) ---
ENV RUSTUP_HOME=/usr/local/rustup \
CARGO_HOME=/usr/local/cargo \
PATH=/usr/local/cargo/bin:$PATH
RUN set -eux; \
apt-get update && apt-get install -y --no-install-recommends \
build-essential \
pkg-config \
curl \
ca-certificates \
&& rm -rf /var/lib/apt/lists/* \
# Install latest stable Rust (supports Cargo.lock v4)
&& curl -sSf https://sh.rustup.rs | sh -s -- -y --profile minimal --default-toolchain stable \
&& rustc --version && cargo --version
COPY ./requirements/model_server.txt /tmp/requirements.txt
RUN pip install --no-cache-dir --upgrade \
--retries 5 \
--timeout 30 \
-r /tmp/requirements.txt
RUN apt-get remove -y --allow-remove-essential perl-base && \
apt-get autoremove -y
# Pre-downloading models for setups with limited egress
# Download tokenizers, distilbert for the Onyx model
# Download model weights
# Run Nomic to pull in the custom architecture and have it cached locally
RUN python -c "from transformers import AutoTokenizer; \
AutoTokenizer.from_pretrained('distilbert-base-uncased'); \
AutoTokenizer.from_pretrained('mixedbread-ai/mxbai-rerank-xsmall-v1'); \
from huggingface_hub import snapshot_download; \
snapshot_download(repo_id='onyx-dot-app/hybrid-intent-token-classifier'); \
snapshot_download(repo_id='onyx-dot-app/information-content-model'); \
snapshot_download('nomic-ai/nomic-embed-text-v1'); \
snapshot_download('mixedbread-ai/mxbai-rerank-xsmall-v1'); \
from sentence_transformers import SentenceTransformer; \
SentenceTransformer(model_name_or_path='nomic-ai/nomic-embed-text-v1', trust_remote_code=True);"
# In case the user has volumes mounted to /app/.cache/huggingface that they've downloaded while
# running Onyx, move the current contents of the cache folder to a temporary location to ensure
# running Onyx, move the current contents of the cache folder to a temporary location to ensure
# it's preserved in order to combine with the user's cache contents
RUN mv /app/.cache/huggingface /app/.cache/temp_huggingface && \
chown -R onyx:onyx /app
COPY --chown=onyx:onyx --from=embedding-models /app/.cache/huggingface /app/.cache/temp_huggingface
WORKDIR /app
@@ -86,4 +60,8 @@ COPY ./model_server /app/model_server
ENV PYTHONPATH=/app
# Default ONYX_VERSION, typically overriden during builds by GitHub Actions.
ARG ONYX_VERSION=0.0.0-dev
ENV ONYX_VERSION=${ONYX_VERSION}
CMD ["uvicorn", "model_server.main:app", "--host", "0.0.0.0", "--port", "9000"]

View File

@@ -7,8 +7,12 @@ Onyx migrations use a generic single-database configuration with an async dbapi.
## To generate new migrations:
run from onyx/backend:
`alembic revision --autogenerate -m <DESCRIPTION_OF_MIGRATION>`
From onyx/backend, run:
`alembic revision -m <DESCRIPTION_OF_MIGRATION>`
Note: you cannot use the `--autogenerate` flag as the automatic schema parsing does not work.
Manually populate the upgrade and downgrade in your new migration.
More info can be found here: https://alembic.sqlalchemy.org/en/latest/autogenerate.html

View File

@@ -39,7 +39,9 @@ config = context.config
if config.config_file_name is not None and config.attributes.get(
"configure_logger", True
):
fileConfig(config.config_file_name)
# disable_existing_loggers=False prevents breaking pytest's caplog fixture
# See: https://pytest-alembic.readthedocs.io/en/latest/setup.html#caplog-issues
fileConfig(config.config_file_name, disable_existing_loggers=False)
target_metadata = [Base.metadata, ResultModelBase.metadata]
@@ -460,8 +462,49 @@ def run_migrations_offline() -> None:
def run_migrations_online() -> None:
logger.info("run_migrations_online starting.")
asyncio.run(run_async_migrations())
"""Run migrations in 'online' mode.
Supports pytest-alembic by checking for a pre-configured connection
in context.config.attributes["connection"]. If present, uses that
connection/engine directly instead of creating a new async engine.
"""
# Check if pytest-alembic is providing a connection/engine
connectable = context.config.attributes.get("connection", None)
if connectable is not None:
# pytest-alembic is providing an engine - use it directly
logger.info("run_migrations_online starting (pytest-alembic mode).")
# For pytest-alembic, we use the default schema (public)
schema_name = context.config.attributes.get(
"schema_name", POSTGRES_DEFAULT_SCHEMA
)
# pytest-alembic passes an Engine, we need to get a connection from it
with connectable.connect() as connection:
# Set search path for the schema
connection.execute(text(f'SET search_path TO "{schema_name}"'))
context.configure(
connection=connection,
target_metadata=target_metadata, # type: ignore
include_object=include_object,
version_table_schema=schema_name,
include_schemas=True,
compare_type=True,
compare_server_default=True,
script_location=config.get_main_option("script_location"),
)
with context.begin_transaction():
context.run_migrations()
# Commit the transaction to ensure changes are visible to next migration
connection.commit()
else:
# Normal operation - use async migrations
logger.info("run_migrations_online starting.")
asyncio.run(run_async_migrations())
if context.is_offline_mode():

View File

@@ -0,0 +1,153 @@
"""add permission sync attempt tables
Revision ID: 03d710ccf29c
Revises: 96a5702df6aa
Create Date: 2025-09-11 13:30:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "03d710ccf29c" # Generate a new unique ID
down_revision = "96a5702df6aa"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create the permission sync status enum
permission_sync_status_enum = sa.Enum(
"not_started",
"in_progress",
"success",
"canceled",
"failed",
"completed_with_errors",
name="permissionsyncstatus",
native_enum=False,
)
# Create doc_permission_sync_attempt table
op.create_table(
"doc_permission_sync_attempt",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("connector_credential_pair_id", sa.Integer(), nullable=False),
sa.Column("status", permission_sync_status_enum, nullable=False),
sa.Column("total_docs_synced", sa.Integer(), nullable=True),
sa.Column("docs_with_permission_errors", sa.Integer(), nullable=True),
sa.Column("error_message", sa.Text(), nullable=True),
sa.Column(
"time_created",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("time_started", sa.DateTime(timezone=True), nullable=True),
sa.Column("time_finished", sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(
["connector_credential_pair_id"],
["connector_credential_pair.id"],
),
sa.PrimaryKeyConstraint("id"),
)
# Create indexes for doc_permission_sync_attempt
op.create_index(
"ix_doc_permission_sync_attempt_time_created",
"doc_permission_sync_attempt",
["time_created"],
unique=False,
)
op.create_index(
"ix_permission_sync_attempt_latest_for_cc_pair",
"doc_permission_sync_attempt",
["connector_credential_pair_id", "time_created"],
unique=False,
)
op.create_index(
"ix_permission_sync_attempt_status_time",
"doc_permission_sync_attempt",
["status", sa.text("time_finished DESC")],
unique=False,
)
# Create external_group_permission_sync_attempt table
# connector_credential_pair_id is nullable - group syncs can be global (e.g., Confluence)
op.create_table(
"external_group_permission_sync_attempt",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("connector_credential_pair_id", sa.Integer(), nullable=True),
sa.Column("status", permission_sync_status_enum, nullable=False),
sa.Column("total_users_processed", sa.Integer(), nullable=True),
sa.Column("total_groups_processed", sa.Integer(), nullable=True),
sa.Column("total_group_memberships_synced", sa.Integer(), nullable=True),
sa.Column("error_message", sa.Text(), nullable=True),
sa.Column(
"time_created",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("time_started", sa.DateTime(timezone=True), nullable=True),
sa.Column("time_finished", sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(
["connector_credential_pair_id"],
["connector_credential_pair.id"],
),
sa.PrimaryKeyConstraint("id"),
)
# Create indexes for external_group_permission_sync_attempt
op.create_index(
"ix_external_group_permission_sync_attempt_time_created",
"external_group_permission_sync_attempt",
["time_created"],
unique=False,
)
op.create_index(
"ix_group_sync_attempt_cc_pair_time",
"external_group_permission_sync_attempt",
["connector_credential_pair_id", "time_created"],
unique=False,
)
op.create_index(
"ix_group_sync_attempt_status_time",
"external_group_permission_sync_attempt",
["status", sa.text("time_finished DESC")],
unique=False,
)
def downgrade() -> None:
# Drop indexes
op.drop_index(
"ix_group_sync_attempt_status_time",
table_name="external_group_permission_sync_attempt",
)
op.drop_index(
"ix_group_sync_attempt_cc_pair_time",
table_name="external_group_permission_sync_attempt",
)
op.drop_index(
"ix_external_group_permission_sync_attempt_time_created",
table_name="external_group_permission_sync_attempt",
)
op.drop_index(
"ix_permission_sync_attempt_status_time",
table_name="doc_permission_sync_attempt",
)
op.drop_index(
"ix_permission_sync_attempt_latest_for_cc_pair",
table_name="doc_permission_sync_attempt",
)
op.drop_index(
"ix_doc_permission_sync_attempt_time_created",
table_name="doc_permission_sync_attempt",
)
# Drop tables
op.drop_table("external_group_permission_sync_attempt")
op.drop_table("doc_permission_sync_attempt")

View File

@@ -0,0 +1,33 @@
"""add theme_preference to user
Revision ID: 09995b8811eb
Revises: 3d1cca026fe8
Create Date: 2025-10-24 08:58:50.246949
"""
from alembic import op
import sqlalchemy as sa
from onyx.db.enums import ThemePreference
# revision identifiers, used by Alembic.
revision = "09995b8811eb"
down_revision = "3d1cca026fe8"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"user",
sa.Column(
"theme_preference",
sa.Enum(ThemePreference, native_enum=False),
nullable=True,
),
)
def downgrade() -> None:
op.drop_column("user", "theme_preference")

View File

@@ -0,0 +1,389 @@
"""Migration 2: User file data preparation and backfill
Revision ID: 0cd424f32b1d
Revises: 9b66d3156fc6
Create Date: 2025-09-22 09:44:42.727034
This migration populates the new columns added in migration 1.
It prepares data for the UUID transition and relationship migration.
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
import logging
logger = logging.getLogger("alembic.runtime.migration")
# revision identifiers, used by Alembic.
revision = "0cd424f32b1d"
down_revision = "9b66d3156fc6"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Populate new columns with data."""
bind = op.get_bind()
inspector = sa.inspect(bind)
# === Step 1: Populate user_file.new_id ===
user_file_columns = [col["name"] for col in inspector.get_columns("user_file")]
has_new_id = "new_id" in user_file_columns
if has_new_id:
logger.info("Populating user_file.new_id with UUIDs...")
# Count rows needing UUIDs
null_count = bind.execute(
text("SELECT COUNT(*) FROM user_file WHERE new_id IS NULL")
).scalar_one()
if null_count > 0:
logger.info(f"Generating UUIDs for {null_count} user_file records...")
# Populate in batches to avoid long locks
batch_size = 10000
total_updated = 0
while True:
result = bind.execute(
text(
"""
UPDATE user_file
SET new_id = gen_random_uuid()
WHERE new_id IS NULL
AND id IN (
SELECT id FROM user_file
WHERE new_id IS NULL
LIMIT :batch_size
)
"""
),
{"batch_size": batch_size},
)
updated = result.rowcount
total_updated += updated
if updated < batch_size:
break
logger.info(f" Updated {total_updated}/{null_count} records...")
logger.info(f"Generated UUIDs for {total_updated} user_file records")
# Verify all records have UUIDs
remaining_null = bind.execute(
text("SELECT COUNT(*) FROM user_file WHERE new_id IS NULL")
).scalar_one()
if remaining_null > 0:
raise Exception(
f"Failed to populate all user_file.new_id values ({remaining_null} NULL)"
)
# Lock down the column
op.alter_column("user_file", "new_id", nullable=False)
op.alter_column("user_file", "new_id", server_default=None)
logger.info("Locked down user_file.new_id column")
# === Step 2: Populate persona__user_file.user_file_id_uuid ===
persona_user_file_columns = [
col["name"] for col in inspector.get_columns("persona__user_file")
]
if has_new_id and "user_file_id_uuid" in persona_user_file_columns:
logger.info("Populating persona__user_file.user_file_id_uuid...")
# Count rows needing update
null_count = bind.execute(
text(
"""
SELECT COUNT(*) FROM persona__user_file
WHERE user_file_id IS NOT NULL AND user_file_id_uuid IS NULL
"""
)
).scalar_one()
if null_count > 0:
logger.info(f"Updating {null_count} persona__user_file records...")
# Update in batches
batch_size = 10000
total_updated = 0
while True:
result = bind.execute(
text(
"""
UPDATE persona__user_file p
SET user_file_id_uuid = uf.new_id
FROM user_file uf
WHERE p.user_file_id = uf.id
AND p.user_file_id_uuid IS NULL
AND p.persona_id IN (
SELECT persona_id
FROM persona__user_file
WHERE user_file_id_uuid IS NULL
LIMIT :batch_size
)
"""
),
{"batch_size": batch_size},
)
updated = result.rowcount
total_updated += updated
if updated < batch_size:
break
logger.info(f" Updated {total_updated}/{null_count} records...")
logger.info(f"Updated {total_updated} persona__user_file records")
# Verify all records are populated
remaining_null = bind.execute(
text(
"""
SELECT COUNT(*) FROM persona__user_file
WHERE user_file_id IS NOT NULL AND user_file_id_uuid IS NULL
"""
)
).scalar_one()
if remaining_null > 0:
raise Exception(
f"Failed to populate all persona__user_file.user_file_id_uuid values ({remaining_null} NULL)"
)
op.alter_column("persona__user_file", "user_file_id_uuid", nullable=False)
logger.info("Locked down persona__user_file.user_file_id_uuid column")
# === Step 3: Create user_project records from chat_folder ===
if "chat_folder" in inspector.get_table_names():
logger.info("Creating user_project records from chat_folder...")
result = bind.execute(
text(
"""
INSERT INTO user_project (user_id, name)
SELECT cf.user_id, cf.name
FROM chat_folder cf
WHERE NOT EXISTS (
SELECT 1
FROM user_project up
WHERE up.user_id = cf.user_id AND up.name = cf.name
)
"""
)
)
logger.info(f"Created {result.rowcount} user_project records from chat_folder")
# === Step 4: Populate chat_session.project_id ===
chat_session_columns = [
col["name"] for col in inspector.get_columns("chat_session")
]
if "folder_id" in chat_session_columns and "project_id" in chat_session_columns:
logger.info("Populating chat_session.project_id...")
# Count sessions needing update
null_count = bind.execute(
text(
"""
SELECT COUNT(*) FROM chat_session
WHERE project_id IS NULL AND folder_id IS NOT NULL
"""
)
).scalar_one()
if null_count > 0:
logger.info(f"Updating {null_count} chat_session records...")
result = bind.execute(
text(
"""
UPDATE chat_session cs
SET project_id = up.id
FROM chat_folder cf
JOIN user_project up ON up.user_id = cf.user_id AND up.name = cf.name
WHERE cs.folder_id = cf.id AND cs.project_id IS NULL
"""
)
)
logger.info(f"Updated {result.rowcount} chat_session records")
# Verify all records are populated
remaining_null = bind.execute(
text(
"""
SELECT COUNT(*) FROM chat_session
WHERE project_id IS NULL AND folder_id IS NOT NULL
"""
)
).scalar_one()
if remaining_null > 0:
logger.warning(
f"Warning: {remaining_null} chat_session records could not be mapped to projects"
)
# === Step 5: Update plaintext FileRecord IDs/display names to UUID scheme ===
# Prior to UUID migration, plaintext cache files were stored with file_id like 'plain_text_<int_id>'.
# After migration, we use 'plaintext_<uuid>' (note the name change to 'plaintext_').
# This step remaps existing FileRecord rows to the new naming while preserving object_key/bucket.
logger.info("Updating plaintext FileRecord ids and display names to UUID scheme...")
# Count legacy plaintext records that can be mapped to UUID user_file ids
count_query = text(
"""
SELECT COUNT(*)
FROM file_record fr
JOIN user_file uf ON fr.file_id = CONCAT('plaintext_', uf.id::text)
WHERE LOWER(fr.file_origin::text) = 'plaintext_cache'
"""
)
legacy_count = bind.execute(count_query).scalar_one()
if legacy_count and legacy_count > 0:
logger.info(f"Found {legacy_count} legacy plaintext file records to update")
# Update display_name first for readability (safe regardless of rename)
bind.execute(
text(
"""
UPDATE file_record fr
SET display_name = CONCAT('Plaintext for user file ', uf.new_id::text)
FROM user_file uf
WHERE LOWER(fr.file_origin::text) = 'plaintext_cache'
AND fr.file_id = CONCAT('plaintext_', uf.id::text)
"""
)
)
# Remap file_id from 'plaintext_<int>' -> 'plaintext_<uuid>' using transitional new_id
# Use a single UPDATE ... WHERE file_id LIKE 'plain_text_%'
# and ensure it aligns to existing user_file ids to avoid renaming unrelated rows
result = bind.execute(
text(
"""
UPDATE file_record fr
SET file_id = CONCAT('plaintext_', uf.new_id::text)
FROM user_file uf
WHERE LOWER(fr.file_origin::text) = 'plaintext_cache'
AND fr.file_id = CONCAT('plaintext_', uf.id::text)
"""
)
)
logger.info(
f"Updated {result.rowcount} plaintext file_record ids to UUID scheme"
)
# === Step 6: Ensure document_id_migrated default TRUE and backfill existing FALSE ===
# New records should default to migrated=True so the migration task won't run for them.
# Existing rows that had a legacy document_id should be marked as not migrated to be processed.
# Backfill existing records: if document_id is not null, set to FALSE
bind.execute(
text(
"""
UPDATE user_file
SET document_id_migrated = FALSE
WHERE document_id IS NOT NULL
"""
)
)
# === Step 7: Backfill user_file.status from index_attempt ===
logger.info("Backfilling user_file.status from index_attempt...")
# Update user_file status based on latest index attempt
# Using CTEs instead of temp tables for asyncpg compatibility
result = bind.execute(
text(
"""
WITH latest_attempt AS (
SELECT DISTINCT ON (ia.connector_credential_pair_id)
ia.connector_credential_pair_id,
ia.status
FROM index_attempt ia
ORDER BY ia.connector_credential_pair_id, ia.time_updated DESC
),
uf_to_ccp AS (
SELECT DISTINCT uf.id AS uf_id, ccp.id AS cc_pair_id
FROM user_file uf
JOIN document_by_connector_credential_pair dcc
ON dcc.id = REPLACE(uf.document_id, 'USER_FILE_CONNECTOR__', 'FILE_CONNECTOR__')
JOIN connector_credential_pair ccp
ON ccp.connector_id = dcc.connector_id
AND ccp.credential_id = dcc.credential_id
)
UPDATE user_file uf
SET status = CASE
WHEN la.status IN ('NOT_STARTED', 'IN_PROGRESS') THEN 'PROCESSING'
WHEN la.status = 'SUCCESS' THEN 'COMPLETED'
ELSE 'FAILED'
END
FROM uf_to_ccp ufc
LEFT JOIN latest_attempt la
ON la.connector_credential_pair_id = ufc.cc_pair_id
WHERE uf.id = ufc.uf_id
AND uf.status = 'PROCESSING'
"""
)
)
logger.info(f"Updated status for {result.rowcount} user_file records")
logger.info("Migration 2 (data preparation) completed successfully")
def downgrade() -> None:
"""Reset populated data to allow clean downgrade of schema."""
bind = op.get_bind()
inspector = sa.inspect(bind)
logger.info("Starting downgrade of data preparation...")
# Reset user_file columns to allow nulls before data removal
if "user_file" in inspector.get_table_names():
columns = [col["name"] for col in inspector.get_columns("user_file")]
if "new_id" in columns:
op.alter_column(
"user_file",
"new_id",
nullable=True,
server_default=sa.text("gen_random_uuid()"),
)
# Optionally clear the data
# bind.execute(text("UPDATE user_file SET new_id = NULL"))
logger.info("Reset user_file.new_id to nullable")
# Reset persona__user_file.user_file_id_uuid
if "persona__user_file" in inspector.get_table_names():
columns = [col["name"] for col in inspector.get_columns("persona__user_file")]
if "user_file_id_uuid" in columns:
op.alter_column("persona__user_file", "user_file_id_uuid", nullable=True)
# Optionally clear the data
# bind.execute(text("UPDATE persona__user_file SET user_file_id_uuid = NULL"))
logger.info("Reset persona__user_file.user_file_id_uuid to nullable")
# Note: We don't delete user_project records or reset chat_session.project_id
# as these might be in use and can be handled by the schema downgrade
# Reset user_file.status to default
if "user_file" in inspector.get_table_names():
columns = [col["name"] for col in inspector.get_columns("user_file")]
if "status" in columns:
bind.execute(text("UPDATE user_file SET status = 'PROCESSING'"))
logger.info("Reset user_file.status to default")
logger.info("Downgrade completed successfully")

View File

@@ -0,0 +1,261 @@
"""Migration 3: User file relationship migration
Revision ID: 16c37a30adf2
Revises: 0cd424f32b1d
Create Date: 2025-09-22 09:47:34.175596
This migration converts folder-based relationships to project-based relationships.
It migrates persona__user_folder to persona__user_file and populates project__user_file.
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
import logging
logger = logging.getLogger("alembic.runtime.migration")
# revision identifiers, used by Alembic.
revision = "16c37a30adf2"
down_revision = "0cd424f32b1d"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Migrate folder-based relationships to project-based relationships."""
bind = op.get_bind()
inspector = sa.inspect(bind)
# === Step 1: Migrate persona__user_folder to persona__user_file ===
table_names = inspector.get_table_names()
if "persona__user_folder" in table_names and "user_file" in table_names:
user_file_columns = [col["name"] for col in inspector.get_columns("user_file")]
has_new_id = "new_id" in user_file_columns
if has_new_id and "folder_id" in user_file_columns:
logger.info(
"Migrating persona__user_folder relationships to persona__user_file..."
)
# Count relationships to migrate (asyncpg-compatible)
count_query = text(
"""
SELECT COUNT(*)
FROM (
SELECT DISTINCT puf.persona_id, uf.id
FROM persona__user_folder puf
JOIN user_file uf ON uf.folder_id = puf.user_folder_id
WHERE NOT EXISTS (
SELECT 1
FROM persona__user_file p2
WHERE p2.persona_id = puf.persona_id
AND p2.user_file_id = uf.id
)
) AS distinct_pairs
"""
)
to_migrate = bind.execute(count_query).scalar_one()
if to_migrate > 0:
logger.info(f"Creating {to_migrate} persona-file relationships...")
# Migrate in batches to avoid memory issues
batch_size = 10000
total_inserted = 0
while True:
# Insert batch directly using subquery (asyncpg compatible)
result = bind.execute(
text(
"""
INSERT INTO persona__user_file (persona_id, user_file_id, user_file_id_uuid)
SELECT DISTINCT puf.persona_id, uf.id as file_id, uf.new_id
FROM persona__user_folder puf
JOIN user_file uf ON uf.folder_id = puf.user_folder_id
WHERE NOT EXISTS (
SELECT 1
FROM persona__user_file p2
WHERE p2.persona_id = puf.persona_id
AND p2.user_file_id = uf.id
)
LIMIT :batch_size
"""
),
{"batch_size": batch_size},
)
inserted = result.rowcount
total_inserted += inserted
if inserted < batch_size:
break
logger.info(
f" Migrated {total_inserted}/{to_migrate} relationships..."
)
logger.info(
f"Created {total_inserted} persona__user_file relationships"
)
# === Step 2: Add foreign key for chat_session.project_id ===
chat_session_fks = inspector.get_foreign_keys("chat_session")
fk_exists = any(
fk["name"] == "fk_chat_session_project_id" for fk in chat_session_fks
)
if not fk_exists:
logger.info("Adding foreign key constraint for chat_session.project_id...")
op.create_foreign_key(
"fk_chat_session_project_id",
"chat_session",
"user_project",
["project_id"],
["id"],
)
logger.info("Added foreign key constraint")
# === Step 3: Populate project__user_file from user_file.folder_id ===
user_file_columns = [col["name"] for col in inspector.get_columns("user_file")]
has_new_id = "new_id" in user_file_columns
if has_new_id and "folder_id" in user_file_columns:
logger.info("Populating project__user_file from folder relationships...")
# Count relationships to create
count_query = text(
"""
SELECT COUNT(*)
FROM user_file uf
WHERE uf.folder_id IS NOT NULL
AND NOT EXISTS (
SELECT 1
FROM project__user_file puf
WHERE puf.project_id = uf.folder_id
AND puf.user_file_id = uf.new_id
)
"""
)
to_create = bind.execute(count_query).scalar_one()
if to_create > 0:
logger.info(f"Creating {to_create} project-file relationships...")
# Insert in batches
batch_size = 10000
total_inserted = 0
while True:
result = bind.execute(
text(
"""
INSERT INTO project__user_file (project_id, user_file_id)
SELECT uf.folder_id, uf.new_id
FROM user_file uf
WHERE uf.folder_id IS NOT NULL
AND NOT EXISTS (
SELECT 1
FROM project__user_file puf
WHERE puf.project_id = uf.folder_id
AND puf.user_file_id = uf.new_id
)
LIMIT :batch_size
ON CONFLICT (project_id, user_file_id) DO NOTHING
"""
),
{"batch_size": batch_size},
)
inserted = result.rowcount
total_inserted += inserted
if inserted < batch_size:
break
logger.info(f" Created {total_inserted}/{to_create} relationships...")
logger.info(f"Created {total_inserted} project__user_file relationships")
# === Step 4: Create index on chat_session.project_id ===
try:
indexes = [ix.get("name") for ix in inspector.get_indexes("chat_session")]
except Exception:
indexes = []
if "ix_chat_session_project_id" not in indexes:
logger.info("Creating index on chat_session.project_id...")
op.create_index(
"ix_chat_session_project_id", "chat_session", ["project_id"], unique=False
)
logger.info("Created index")
logger.info("Migration 3 (relationship migration) completed successfully")
def downgrade() -> None:
"""Remove migrated relationships and constraints."""
bind = op.get_bind()
inspector = sa.inspect(bind)
logger.info("Starting downgrade of relationship migration...")
# Drop index on chat_session.project_id
try:
indexes = [ix.get("name") for ix in inspector.get_indexes("chat_session")]
if "ix_chat_session_project_id" in indexes:
op.drop_index("ix_chat_session_project_id", "chat_session")
logger.info("Dropped index on chat_session.project_id")
except Exception:
pass
# Drop foreign key constraint
try:
chat_session_fks = inspector.get_foreign_keys("chat_session")
fk_exists = any(
fk["name"] == "fk_chat_session_project_id" for fk in chat_session_fks
)
if fk_exists:
op.drop_constraint(
"fk_chat_session_project_id", "chat_session", type_="foreignkey"
)
logger.info("Dropped foreign key constraint on chat_session.project_id")
except Exception:
pass
# Clear project__user_file relationships (but keep the table for migration 1 to handle)
if "project__user_file" in inspector.get_table_names():
result = bind.execute(text("DELETE FROM project__user_file"))
logger.info(f"Cleared {result.rowcount} records from project__user_file")
# Remove migrated persona__user_file relationships
# Only remove those that came from folder relationships
if all(
table in inspector.get_table_names()
for table in ["persona__user_file", "persona__user_folder", "user_file"]
):
user_file_columns = [col["name"] for col in inspector.get_columns("user_file")]
if "folder_id" in user_file_columns:
result = bind.execute(
text(
"""
DELETE FROM persona__user_file puf
WHERE EXISTS (
SELECT 1
FROM user_file uf
JOIN persona__user_folder puf2
ON puf2.user_folder_id = uf.folder_id
WHERE puf.persona_id = puf2.persona_id
AND puf.user_file_id = uf.id
)
"""
)
)
logger.info(
f"Removed {result.rowcount} migrated persona__user_file relationships"
)
logger.info("Downgrade completed successfully")

View File

@@ -0,0 +1,29 @@
"""add is_clarification to chat_message
Revision ID: 18b5b2524446
Revises: 87c52ec39f84
Create Date: 2025-01-16
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "18b5b2524446"
down_revision = "87c52ec39f84"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"chat_message",
sa.Column(
"is_clarification", sa.Boolean(), nullable=False, server_default="false"
),
)
def downgrade() -> None:
op.drop_column("chat_message", "is_clarification")

View File

@@ -0,0 +1,89 @@
"""add internet search and content provider tables
Revision ID: 1f2a3b4c5d6e
Revises: 9drpiiw74ljy
Create Date: 2025-11-10 19:45:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "1f2a3b4c5d6e"
down_revision = "9drpiiw74ljy"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"internet_search_provider",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("name", sa.String(), nullable=False, unique=True),
sa.Column("provider_type", sa.String(), nullable=False),
sa.Column("api_key", sa.LargeBinary(), nullable=True),
sa.Column("config", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column(
"is_active", sa.Boolean(), nullable=False, server_default=sa.text("false")
),
sa.Column(
"time_created",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
sa.Column(
"time_updated",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
)
op.create_index(
"ix_internet_search_provider_is_active",
"internet_search_provider",
["is_active"],
)
op.create_table(
"internet_content_provider",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("name", sa.String(), nullable=False, unique=True),
sa.Column("provider_type", sa.String(), nullable=False),
sa.Column("api_key", sa.LargeBinary(), nullable=True),
sa.Column("config", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column(
"is_active", sa.Boolean(), nullable=False, server_default=sa.text("false")
),
sa.Column(
"time_created",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
sa.Column(
"time_updated",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
)
op.create_index(
"ix_internet_content_provider_is_active",
"internet_content_provider",
["is_active"],
)
def downgrade() -> None:
op.drop_index(
"ix_internet_content_provider_is_active", table_name="internet_content_provider"
)
op.drop_table("internet_content_provider")
op.drop_index(
"ix_internet_search_provider_is_active", table_name="internet_search_provider"
)
op.drop_table("internet_search_provider")

View File

@@ -12,8 +12,8 @@ import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "23957775e5f5"
down_revision = "bc9771dccadf"
branch_labels = None # type: ignore
depends_on = None # type: ignore
branch_labels = None
depends_on = None
def upgrade() -> None:

View File

@@ -0,0 +1,27 @@
"""add last refreshed at mcp server
Revision ID: 2a391f840e85
Revises: 4cebcbc9b2ae
Create Date: 2025-12-06 15:19:59.766066
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembi.
revision = "2a391f840e85"
down_revision = "4cebcbc9b2ae"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"mcp_server",
sa.Column("last_refreshed_at", sa.DateTime(timezone=True), nullable=True),
)
def downgrade() -> None:
op.drop_column("mcp_server", "last_refreshed_at")

View File

@@ -0,0 +1,72 @@
"""add switchover_type field and remove background_reindex_enabled
Revision ID: 2acdef638fc2
Revises: a4f23d6b71c8
Create Date: 2025-01-XX XX:XX:XX.XXXXXX
"""
from alembic import op
import sqlalchemy as sa
from onyx.db.enums import SwitchoverType
# revision identifiers, used by Alembic.
revision = "2acdef638fc2"
down_revision = "a4f23d6b71c8"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add switchover_type column with default value of REINDEX
op.add_column(
"search_settings",
sa.Column(
"switchover_type",
sa.Enum(SwitchoverType, native_enum=False),
nullable=False,
server_default=SwitchoverType.REINDEX.value,
),
)
# Migrate existing data: set switchover_type based on background_reindex_enabled
# REINDEX where background_reindex_enabled=True, INSTANT where False
op.execute(
"""
UPDATE search_settings
SET switchover_type = CASE
WHEN background_reindex_enabled = true THEN 'REINDEX'
ELSE 'INSTANT'
END
"""
)
# Remove the background_reindex_enabled column (replaced by switchover_type)
op.drop_column("search_settings", "background_reindex_enabled")
def downgrade() -> None:
# Re-add the background_reindex_enabled column with default value of True
op.add_column(
"search_settings",
sa.Column(
"background_reindex_enabled",
sa.Boolean(),
nullable=False,
server_default="true",
),
)
# Set background_reindex_enabled based on switchover_type
op.execute(
"""
UPDATE search_settings
SET background_reindex_enabled = CASE
WHEN switchover_type = 'INSTANT' THEN false
ELSE true
END
"""
)
# Remove the switchover_type column
op.drop_column("search_settings", "switchover_type")

View File

@@ -0,0 +1,228 @@
"""Migration 6: User file schema cleanup
Revision ID: 2b75d0a8ffcb
Revises: 3a78dba1080a
Create Date: 2025-09-22 10:09:26.375377
This migration removes legacy columns and tables after data migration is complete.
It should only be run after verifying all data has been successfully migrated.
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
import logging
import fastapi_users_db_sqlalchemy
logger = logging.getLogger("alembic.runtime.migration")
# revision identifiers, used by Alembic.
revision = "2b75d0a8ffcb"
down_revision = "3a78dba1080a"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Remove legacy columns and tables."""
bind = op.get_bind()
inspector = sa.inspect(bind)
logger.info("Starting schema cleanup...")
# === Step 1: Verify data migration is complete ===
logger.info("Verifying data migration completion...")
# Check if any chat sessions still have folder_id references
chat_session_columns = [
col["name"] for col in inspector.get_columns("chat_session")
]
if "folder_id" in chat_session_columns:
orphaned_count = bind.execute(
text(
"""
SELECT COUNT(*) FROM chat_session
WHERE folder_id IS NOT NULL AND project_id IS NULL
"""
)
).scalar_one()
if orphaned_count > 0:
logger.warning(
f"WARNING: {orphaned_count} chat_session records still have "
f"folder_id without project_id. Proceeding anyway."
)
# === Step 2: Drop chat_session.folder_id ===
if "folder_id" in chat_session_columns:
logger.info("Dropping chat_session.folder_id...")
# Drop foreign key constraint first
op.execute(
"ALTER TABLE chat_session DROP CONSTRAINT IF EXISTS chat_session_chat_folder_fk"
)
op.execute(
"ALTER TABLE chat_session DROP CONSTRAINT IF EXISTS chat_session_folder_fk"
)
# Drop the column
op.drop_column("chat_session", "folder_id")
logger.info("Dropped chat_session.folder_id")
# === Step 3: Drop persona__user_folder table ===
if "persona__user_folder" in inspector.get_table_names():
logger.info("Dropping persona__user_folder table...")
# Check for any remaining data
remaining = bind.execute(
text("SELECT COUNT(*) FROM persona__user_folder")
).scalar_one()
if remaining > 0:
logger.warning(
f"WARNING: Dropping persona__user_folder with {remaining} records"
)
op.drop_table("persona__user_folder")
logger.info("Dropped persona__user_folder table")
# === Step 4: Drop chat_folder table ===
if "chat_folder" in inspector.get_table_names():
logger.info("Dropping chat_folder table...")
# Check for any remaining data
remaining = bind.execute(text("SELECT COUNT(*) FROM chat_folder")).scalar_one()
if remaining > 0:
logger.warning(f"WARNING: Dropping chat_folder with {remaining} records")
op.drop_table("chat_folder")
logger.info("Dropped chat_folder table")
# === Step 5: Drop user_file legacy columns ===
user_file_columns = [col["name"] for col in inspector.get_columns("user_file")]
# Drop folder_id
if "folder_id" in user_file_columns:
logger.info("Dropping user_file.folder_id...")
op.drop_column("user_file", "folder_id")
logger.info("Dropped user_file.folder_id")
# Drop cc_pair_id (already handled in migration 5, but be sure)
if "cc_pair_id" in user_file_columns:
logger.info("Dropping user_file.cc_pair_id...")
# Drop any remaining foreign key constraints
bind.execute(
text(
"""
DO $$
DECLARE r RECORD;
BEGIN
FOR r IN (
SELECT conname
FROM pg_constraint c
JOIN pg_class t ON c.conrelid = t.oid
WHERE c.contype = 'f'
AND t.relname = 'user_file'
AND EXISTS (
SELECT 1 FROM pg_attribute a
WHERE a.attrelid = t.oid
AND a.attname = 'cc_pair_id'
)
) LOOP
EXECUTE format('ALTER TABLE user_file DROP CONSTRAINT IF EXISTS %I', r.conname);
END LOOP;
END$$;
"""
)
)
op.drop_column("user_file", "cc_pair_id")
logger.info("Dropped user_file.cc_pair_id")
# === Step 6: Clean up any remaining constraints ===
logger.info("Cleaning up remaining constraints...")
# Drop any unique constraints on removed columns
op.execute(
"ALTER TABLE user_file DROP CONSTRAINT IF EXISTS user_file_cc_pair_id_key"
)
logger.info("Migration 6 (schema cleanup) completed successfully")
logger.info("Legacy schema has been fully removed")
def downgrade() -> None:
"""Recreate dropped columns and tables (structure only, no data)."""
bind = op.get_bind()
inspector = sa.inspect(bind)
logger.warning("Downgrading schema cleanup - recreating structure only, no data!")
# Recreate user_file columns
if "user_file" in inspector.get_table_names():
columns = [col["name"] for col in inspector.get_columns("user_file")]
if "cc_pair_id" not in columns:
op.add_column(
"user_file", sa.Column("cc_pair_id", sa.Integer(), nullable=True)
)
if "folder_id" not in columns:
op.add_column(
"user_file", sa.Column("folder_id", sa.Integer(), nullable=True)
)
# Recreate persona__user_folder table
if "persona__user_folder" not in inspector.get_table_names():
op.create_table(
"persona__user_folder",
sa.Column("persona_id", sa.Integer(), nullable=False),
sa.Column("user_folder_id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("persona_id", "user_folder_id"),
sa.ForeignKeyConstraint(["persona_id"], ["persona.id"]),
sa.ForeignKeyConstraint(["user_folder_id"], ["user_project.id"]),
)
# Recreate chat_folder table and related structures
if "chat_folder" not in inspector.get_table_names():
op.create_table(
"chat_folder",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column(
"user_id",
fastapi_users_db_sqlalchemy.generics.GUID(),
nullable=True,
),
sa.Column("name", sa.String(), nullable=True),
sa.Column("display_priority", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["user.id"],
name="chat_folder_user_id_fkey",
),
sa.PrimaryKeyConstraint("id"),
)
# Add folder_id back to chat_session
if "chat_session" in inspector.get_table_names():
columns = [col["name"] for col in inspector.get_columns("chat_session")]
if "folder_id" not in columns:
op.add_column(
"chat_session", sa.Column("folder_id", sa.Integer(), nullable=True)
)
# Add foreign key if chat_folder exists
if "chat_folder" in inspector.get_table_names():
op.create_foreign_key(
"chat_session_chat_folder_fk",
"chat_session",
"chat_folder",
["folder_id"],
["id"],
)
logger.info("Downgrade completed - structure recreated but data is lost")

View File

@@ -0,0 +1,46 @@
"""usage_limits
Revision ID: 2b90f3af54b8
Revises: 9a0296d7421e
Create Date: 2026-01-03 16:55:30.449692
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "2b90f3af54b8"
down_revision = "9a0296d7421e"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"tenant_usage",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column(
"window_start", sa.DateTime(timezone=True), nullable=False, index=True
),
sa.Column("llm_cost_cents", sa.Float(), nullable=False, server_default="0.0"),
sa.Column("chunks_indexed", sa.Integer(), nullable=False, server_default="0"),
sa.Column("api_calls", sa.Integer(), nullable=False, server_default="0"),
sa.Column(
"non_streaming_api_calls", sa.Integer(), nullable=False, server_default="0"
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.func.now(),
nullable=True,
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("window_start", name="uq_tenant_usage_window"),
)
def downgrade() -> None:
op.drop_index("ix_tenant_usage_window_start", table_name="tenant_usage")
op.drop_table("tenant_usage")

View File

@@ -0,0 +1,298 @@
"""Migration 5: User file legacy data cleanup
Revision ID: 3a78dba1080a
Revises: 7cc3fcc116c1
Create Date: 2025-09-22 10:04:27.986294
This migration removes legacy user-file documents and connector_credential_pairs.
It performs bulk deletions of obsolete data after the UUID migration.
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql as psql
from sqlalchemy import text
import logging
from typing import List
import uuid
logger = logging.getLogger("alembic.runtime.migration")
# revision identifiers, used by Alembic.
revision = "3a78dba1080a"
down_revision = "7cc3fcc116c1"
branch_labels = None
depends_on = None
def batch_delete(
bind: sa.engine.Connection,
table_name: str,
id_column: str,
ids: List[str | int | uuid.UUID],
batch_size: int = 1000,
id_type: str = "int",
) -> int:
"""Delete records in batches to avoid memory issues and timeouts."""
total_count = len(ids)
if total_count == 0:
return 0
logger.info(
f"Starting batch deletion of {total_count} records from {table_name}..."
)
# Determine appropriate ARRAY type
if id_type == "uuid":
array_type = psql.ARRAY(psql.UUID(as_uuid=True))
elif id_type == "int":
array_type = psql.ARRAY(sa.Integer())
else:
array_type = psql.ARRAY(sa.String())
total_deleted = 0
failed_batches = []
for i in range(0, total_count, batch_size):
batch_ids = ids[i : i + batch_size]
try:
stmt = text(
f"DELETE FROM {table_name} WHERE {id_column} = ANY(:ids)"
).bindparams(sa.bindparam("ids", value=batch_ids, type_=array_type))
result = bind.execute(stmt)
total_deleted += result.rowcount
# Log progress every 10 batches or at completion
batch_num = (i // batch_size) + 1
if batch_num % 10 == 0 or i + batch_size >= total_count:
logger.info(
f" Deleted {min(i + batch_size, total_count)}/{total_count} records "
f"({total_deleted} actual) from {table_name}"
)
except Exception as e:
logger.error(f"Failed to delete batch {(i // batch_size) + 1}: {e}")
failed_batches.append((i, min(i + batch_size, total_count)))
if failed_batches:
logger.warning(
f"Failed to delete {len(failed_batches)} batches from {table_name}. "
f"Total deleted: {total_deleted}/{total_count}"
)
# Fail the migration to avoid silently succeeding on partial cleanup
raise RuntimeError(
f"Batch deletion failed for {table_name}: "
f"{len(failed_batches)} failed batches out of "
f"{(total_count + batch_size - 1) // batch_size}."
)
return total_deleted
def upgrade() -> None:
"""Remove legacy user-file documents and connector_credential_pairs."""
bind = op.get_bind()
inspector = sa.inspect(bind)
logger.info("Starting legacy data cleanup...")
# === Step 1: Identify and delete user-file documents ===
logger.info("Identifying user-file documents to delete...")
# Get document IDs to delete
doc_rows = bind.execute(
text(
"""
SELECT DISTINCT dcc.id AS document_id
FROM document_by_connector_credential_pair dcc
JOIN connector_credential_pair u
ON u.connector_id = dcc.connector_id
AND u.credential_id = dcc.credential_id
WHERE u.is_user_file IS TRUE
"""
)
).fetchall()
doc_ids = [r[0] for r in doc_rows]
if doc_ids:
logger.info(f"Found {len(doc_ids)} user-file documents to delete")
# Delete dependent rows first
tables_to_clean = [
("document_retrieval_feedback", "document_id"),
("document__tag", "document_id"),
("chunk_stats", "document_id"),
]
for table_name, column_name in tables_to_clean:
if table_name in inspector.get_table_names():
# document_id is a string in these tables
deleted = batch_delete(
bind, table_name, column_name, doc_ids, id_type="str"
)
logger.info(f"Deleted {deleted} records from {table_name}")
# Delete document_by_connector_credential_pair entries
deleted = batch_delete(
bind, "document_by_connector_credential_pair", "id", doc_ids, id_type="str"
)
logger.info(f"Deleted {deleted} document_by_connector_credential_pair records")
# Delete documents themselves
deleted = batch_delete(bind, "document", "id", doc_ids, id_type="str")
logger.info(f"Deleted {deleted} document records")
else:
logger.info("No user-file documents found to delete")
# === Step 2: Clean up user-file connector_credential_pairs ===
logger.info("Cleaning up user-file connector_credential_pairs...")
# Get cc_pair IDs
cc_pair_rows = bind.execute(
text(
"""
SELECT id AS cc_pair_id
FROM connector_credential_pair
WHERE is_user_file IS TRUE
"""
)
).fetchall()
cc_pair_ids = [r[0] for r in cc_pair_rows]
if cc_pair_ids:
logger.info(
f"Found {len(cc_pair_ids)} user-file connector_credential_pairs to clean up"
)
# Delete related records
# Clean child tables first to satisfy foreign key constraints,
# then the parent tables
tables_to_clean = [
("index_attempt_errors", "connector_credential_pair_id"),
("index_attempt", "connector_credential_pair_id"),
("background_error", "cc_pair_id"),
("document_set__connector_credential_pair", "connector_credential_pair_id"),
("user_group__connector_credential_pair", "cc_pair_id"),
]
for table_name, column_name in tables_to_clean:
if table_name in inspector.get_table_names():
deleted = batch_delete(
bind, table_name, column_name, cc_pair_ids, id_type="int"
)
logger.info(f"Deleted {deleted} records from {table_name}")
# === Step 3: Identify connectors and credentials to delete ===
logger.info("Identifying orphaned connectors and credentials...")
# Get connectors used only by user-file cc_pairs
connector_rows = bind.execute(
text(
"""
SELECT DISTINCT ccp.connector_id
FROM connector_credential_pair ccp
WHERE ccp.is_user_file IS TRUE
AND ccp.connector_id != 0 -- Exclude system default
AND NOT EXISTS (
SELECT 1
FROM connector_credential_pair c2
WHERE c2.connector_id = ccp.connector_id
AND c2.is_user_file IS NOT TRUE
)
"""
)
).fetchall()
userfile_only_connector_ids = [r[0] for r in connector_rows]
# Get credentials used only by user-file cc_pairs
credential_rows = bind.execute(
text(
"""
SELECT DISTINCT ccp.credential_id
FROM connector_credential_pair ccp
WHERE ccp.is_user_file IS TRUE
AND ccp.credential_id != 0 -- Exclude public/default
AND NOT EXISTS (
SELECT 1
FROM connector_credential_pair c2
WHERE c2.credential_id = ccp.credential_id
AND c2.is_user_file IS NOT TRUE
)
"""
)
).fetchall()
userfile_only_credential_ids = [r[0] for r in credential_rows]
# === Step 4: Delete the cc_pairs themselves ===
if cc_pair_ids:
# Remove FK dependency from user_file first
bind.execute(
text(
"""
DO $$
DECLARE r RECORD;
BEGIN
FOR r IN (
SELECT conname
FROM pg_constraint c
JOIN pg_class t ON c.conrelid = t.oid
JOIN pg_class ft ON c.confrelid = ft.oid
WHERE c.contype = 'f'
AND t.relname = 'user_file'
AND ft.relname = 'connector_credential_pair'
) LOOP
EXECUTE format('ALTER TABLE user_file DROP CONSTRAINT IF EXISTS %I', r.conname);
END LOOP;
END$$;
"""
)
)
# Delete cc_pairs
deleted = batch_delete(
bind, "connector_credential_pair", "id", cc_pair_ids, id_type="int"
)
logger.info(f"Deleted {deleted} connector_credential_pair records")
# === Step 5: Delete orphaned connectors ===
if userfile_only_connector_ids:
deleted = batch_delete(
bind, "connector", "id", userfile_only_connector_ids, id_type="int"
)
logger.info(f"Deleted {deleted} orphaned connector records")
# === Step 6: Delete orphaned credentials ===
if userfile_only_credential_ids:
# Clean up credential__user_group mappings first
deleted = batch_delete(
bind,
"credential__user_group",
"credential_id",
userfile_only_credential_ids,
id_type="int",
)
logger.info(f"Deleted {deleted} credential__user_group records")
# Delete credentials
deleted = batch_delete(
bind, "credential", "id", userfile_only_credential_ids, id_type="int"
)
logger.info(f"Deleted {deleted} orphaned credential records")
logger.info("Migration 5 (legacy data cleanup) completed successfully")
def downgrade() -> None:
"""Cannot restore deleted data - requires backup restoration."""
logger.error("CRITICAL: Downgrading data cleanup cannot restore deleted data!")
logger.error("Data restoration requires backup files or database backup.")
# raise NotImplementedError(
# "Downgrade of legacy data cleanup is not supported. "
# "Deleted data must be restored from backups."
# )

View File

@@ -0,0 +1,89 @@
"""seed_exa_provider_from_env
Revision ID: 3c9a65f1207f
Revises: 1f2a3b4c5d6e
Create Date: 2025-11-20 19:18:00.000000
"""
from __future__ import annotations
import os
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from dotenv import load_dotenv, find_dotenv
from onyx.utils.encryption import encrypt_string_to_bytes
revision = "3c9a65f1207f"
down_revision = "1f2a3b4c5d6e"
branch_labels = None
depends_on = None
EXA_PROVIDER_NAME = "Exa"
def _get_internet_search_table(metadata: sa.MetaData) -> sa.Table:
return sa.Table(
"internet_search_provider",
metadata,
sa.Column("id", sa.Integer, primary_key=True),
sa.Column("name", sa.String),
sa.Column("provider_type", sa.String),
sa.Column("api_key", sa.LargeBinary),
sa.Column("config", postgresql.JSONB),
sa.Column("is_active", sa.Boolean),
sa.Column(
"time_created",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
sa.Column(
"time_updated",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
)
def upgrade() -> None:
load_dotenv(find_dotenv())
exa_api_key = os.environ.get("EXA_API_KEY")
if not exa_api_key:
return
bind = op.get_bind()
metadata = sa.MetaData()
table = _get_internet_search_table(metadata)
existing = bind.execute(
sa.select(table.c.id).where(table.c.name == EXA_PROVIDER_NAME)
).first()
if existing:
return
encrypted_key = encrypt_string_to_bytes(exa_api_key)
has_active_provider = bind.execute(
sa.select(table.c.id).where(table.c.is_active.is_(True))
).first()
bind.execute(
table.insert().values(
name=EXA_PROVIDER_NAME,
provider_type="exa",
api_key=encrypted_key,
config=None,
is_active=not bool(has_active_provider),
)
)
def downgrade() -> None:
return

View File

@@ -0,0 +1,121 @@
"""add_oauth_config_and_user_tokens
Revision ID: 3d1cca026fe8
Revises: c8a93a2af083
Create Date: 2025-10-21 13:27:34.274721
"""
from alembic import op
import fastapi_users_db_sqlalchemy
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "3d1cca026fe8"
down_revision = "c8a93a2af083"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create oauth_config table
op.create_table(
"oauth_config",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(), nullable=False),
sa.Column("authorization_url", sa.Text(), nullable=False),
sa.Column("token_url", sa.Text(), nullable=False),
sa.Column("client_id", sa.LargeBinary(), nullable=False),
sa.Column("client_secret", sa.LargeBinary(), nullable=False),
sa.Column("scopes", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column(
"additional_params",
postgresql.JSONB(astext_type=sa.Text()),
nullable=True,
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name"),
)
# Create oauth_user_token table
op.create_table(
"oauth_user_token",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("oauth_config_id", sa.Integer(), nullable=False),
sa.Column(
"user_id",
fastapi_users_db_sqlalchemy.generics.GUID(),
nullable=False,
),
sa.Column("token_data", sa.LargeBinary(), nullable=False),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.ForeignKeyConstraint(
["oauth_config_id"], ["oauth_config.id"], ondelete="CASCADE"
),
sa.ForeignKeyConstraint(["user_id"], ["user.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("oauth_config_id", "user_id", name="uq_oauth_user_token"),
)
# Create index on user_id for efficient user-based token lookups
# Note: unique constraint on (oauth_config_id, user_id) already creates
# an index for config-based lookups
op.create_index(
"ix_oauth_user_token_user_id",
"oauth_user_token",
["user_id"],
)
# Add oauth_config_id column to tool table
op.add_column("tool", sa.Column("oauth_config_id", sa.Integer(), nullable=True))
# Create foreign key from tool to oauth_config
op.create_foreign_key(
"tool_oauth_config_fk",
"tool",
"oauth_config",
["oauth_config_id"],
["id"],
ondelete="SET NULL",
)
def downgrade() -> None:
# Drop foreign key from tool to oauth_config
op.drop_constraint("tool_oauth_config_fk", "tool", type_="foreignkey")
# Drop oauth_config_id column from tool table
op.drop_column("tool", "oauth_config_id")
# Drop index on user_id
op.drop_index("ix_oauth_user_token_user_id", table_name="oauth_user_token")
# Drop oauth_user_token table (will cascade delete tokens)
op.drop_table("oauth_user_token")
# Drop oauth_config table
op.drop_table("oauth_config")

View File

@@ -0,0 +1,28 @@
"""reset userfile document_id_migrated field
Revision ID: 40926a4dab77
Revises: 64bd5677aeb6
Create Date: 2025-10-06 16:10:32.898668
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = "40926a4dab77"
down_revision = "64bd5677aeb6"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Set all existing records to not migrated
op.execute(
"UPDATE user_file SET document_id_migrated = FALSE "
"WHERE document_id_migrated IS DISTINCT FROM FALSE;"
)
def downgrade() -> None:
# No-op
pass

View File

@@ -11,7 +11,7 @@ from pydantic import BaseModel, ConfigDict
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from onyx.llm.llm_provider_options import (
from onyx.llm.well_known_providers.llm_provider_options import (
fetch_model_names_for_provider_as_set,
fetch_visible_model_names_for_provider_as_set,
)

View File

@@ -0,0 +1,27 @@
"""add tab_index to tool_call
Revision ID: 4cebcbc9b2ae
Revises: a1b2c3d4e5f6
Create Date: 2025-12-16
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "4cebcbc9b2ae"
down_revision = "a1b2c3d4e5f6"
branch_labels: None = None
depends_on: None = None
def upgrade() -> None:
op.add_column(
"tool_call",
sa.Column("tab_index", sa.Integer(), nullable=False, server_default="0"),
)
def downgrade() -> None:
op.drop_column("tool_call", "tab_index")

View File

@@ -62,6 +62,11 @@ def upgrade() -> None:
)
"""
)
# Drop the temporary table to avoid conflicts if migration runs again
# (e.g., during upgrade -> downgrade -> upgrade cycles in tests)
op.execute("DROP TABLE IF EXISTS temp_connector_credential")
# If no exception was raised, alter the column
op.alter_column("credential", "source", nullable=True) # TODO modify
# # ### end Alembic commands ###

View File

@@ -0,0 +1,104 @@
"""add_open_url_tool
Revision ID: 4f8a2b3c1d9e
Revises: a852cbe15577
Create Date: 2025-11-24 12:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "4f8a2b3c1d9e"
down_revision = "a852cbe15577"
branch_labels = None
depends_on = None
OPEN_URL_TOOL = {
"name": "OpenURLTool",
"display_name": "Open URL",
"description": (
"The Open URL Action allows the agent to fetch and read contents of web pages."
),
"in_code_tool_id": "OpenURLTool",
"enabled": True,
}
def upgrade() -> None:
conn = op.get_bind()
# Check if tool already exists
existing = conn.execute(
sa.text("SELECT id FROM tool WHERE in_code_tool_id = :in_code_tool_id"),
{"in_code_tool_id": OPEN_URL_TOOL["in_code_tool_id"]},
).fetchone()
if existing:
tool_id = existing[0]
# Update existing tool
conn.execute(
sa.text(
"""
UPDATE tool
SET name = :name,
display_name = :display_name,
description = :description
WHERE in_code_tool_id = :in_code_tool_id
"""
),
OPEN_URL_TOOL,
)
else:
# Insert new tool
conn.execute(
sa.text(
"""
INSERT INTO tool (name, display_name, description, in_code_tool_id, enabled)
VALUES (:name, :display_name, :description, :in_code_tool_id, :enabled)
"""
),
OPEN_URL_TOOL,
)
# Get the newly inserted tool's id
result = conn.execute(
sa.text("SELECT id FROM tool WHERE in_code_tool_id = :in_code_tool_id"),
{"in_code_tool_id": OPEN_URL_TOOL["in_code_tool_id"]},
).fetchone()
tool_id = result[0] # type: ignore
# Associate the tool with all existing personas
# Get all persona IDs
persona_ids = conn.execute(sa.text("SELECT id FROM persona")).fetchall()
for (persona_id,) in persona_ids:
# Check if association already exists
exists = conn.execute(
sa.text(
"""
SELECT 1 FROM persona__tool
WHERE persona_id = :persona_id AND tool_id = :tool_id
"""
),
{"persona_id": persona_id, "tool_id": tool_id},
).fetchone()
if not exists:
conn.execute(
sa.text(
"""
INSERT INTO persona__tool (persona_id, tool_id)
VALUES (:persona_id, :tool_id)
"""
),
{"persona_id": persona_id, "tool_id": tool_id},
)
def downgrade() -> None:
# We don't remove the tool on downgrade since it's fine to have it around.
# If we upgrade again, it will be a no-op.
pass

View File

@@ -0,0 +1,35 @@
"""backend driven notification details
Revision ID: 5c3dca366b35
Revises: 9087b548dd69
Create Date: 2026-01-06 16:03:11.413724
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "5c3dca366b35"
down_revision = "9087b548dd69"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"notification",
sa.Column(
"title", sa.String(), nullable=False, server_default="New Notification"
),
)
op.add_column(
"notification",
sa.Column("description", sa.String(), nullable=True, server_default=""),
)
def downgrade() -> None:
op.drop_column("notification", "title")
op.drop_column("notification", "description")

View File

@@ -0,0 +1,88 @@
"""add_personal_access_token_table
Revision ID: 5e1c073d48a3
Revises: 09995b8811eb
Create Date: 2025-10-30 17:30:24.308521
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "5e1c073d48a3"
down_revision = "09995b8811eb"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create personal_access_token table
op.create_table(
"personal_access_token",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(), nullable=False),
sa.Column("hashed_token", sa.String(length=64), nullable=False),
sa.Column("token_display", sa.String(), nullable=False),
sa.Column(
"user_id",
postgresql.UUID(as_uuid=True),
nullable=False,
),
sa.Column(
"expires_at",
sa.DateTime(timezone=True),
nullable=True,
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"last_used_at",
sa.DateTime(timezone=True),
nullable=True,
),
sa.Column(
"is_revoked",
sa.Boolean(),
server_default=sa.text("false"),
nullable=False,
),
sa.ForeignKeyConstraint(
["user_id"],
["user.id"],
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("hashed_token"),
)
# Create indexes
op.create_index(
"ix_personal_access_token_expires_at",
"personal_access_token",
["expires_at"],
unique=False,
)
op.create_index(
"ix_pat_user_created",
"personal_access_token",
["user_id", sa.text("created_at DESC")],
unique=False,
)
def downgrade() -> None:
# Drop indexes first
op.drop_index("ix_pat_user_created", table_name="personal_access_token")
op.drop_index(
"ix_personal_access_token_expires_at", table_name="personal_access_token"
)
# Drop table
op.drop_table("personal_access_token")

View File

@@ -0,0 +1,55 @@
"""update_default_persona_prompt
Revision ID: 5e6f7a8b9c0d
Revises: 4f8a2b3c1d9e
Create Date: 2025-11-30 12:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "5e6f7a8b9c0d"
down_revision = "4f8a2b3c1d9e"
branch_labels = None
depends_on = None
DEFAULT_PERSONA_ID = 0
# ruff: noqa: E501, W605 start
DEFAULT_SYSTEM_PROMPT = """
You are a highly capable, thoughtful, and precise assistant. Your goal is to deeply understand the user's intent, ask clarifying questions when needed, think step-by-step through complex problems, provide clear and accurate answers, and proactively anticipate helpful follow-up information. Always prioritize being truthful, nuanced, insightful, and efficient.
The current date is [[CURRENT_DATETIME]].{citation_reminder_or_empty}
# Response Style
You use different text styles, bolding, emojis (sparingly), block quotes, and other formatting to make your responses more readable and engaging.
You use proper Markdown and LaTeX to format your responses for math, scientific, and chemical formulas, symbols, etc.: '$$\\n[expression]\\n$$' for standalone cases and '\\( [expression] \\)' when inline.
For code you prefer to use Markdown and specify the language.
You can use horizontal rules (---) to separate sections of your responses.
You can use Markdown tables to format your responses for data, lists, and other structured information.
""".lstrip()
# ruff: noqa: E501, W605 end
def upgrade() -> None:
conn = op.get_bind()
conn.execute(
sa.text(
"""
UPDATE persona
SET system_prompt = :system_prompt
WHERE id = :persona_id
"""
),
{"system_prompt": DEFAULT_SYSTEM_PROMPT, "persona_id": DEFAULT_PERSONA_ID},
)
def downgrade() -> None:
# We don't revert the system prompt on downgrade since we don't know
# what the previous value was. The new prompt is a reasonable default.
pass

View File

@@ -0,0 +1,44 @@
"""add_created_at_in_project_userfile
Revision ID: 6436661d5b65
Revises: c7e9f4a3b2d1
Create Date: 2025-11-24 11:50:24.536052
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "6436661d5b65"
down_revision = "c7e9f4a3b2d1"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add created_at column to project__user_file table
op.add_column(
"project__user_file",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
)
# Add composite index on (project_id, created_at DESC)
op.create_index(
"ix_project__user_file_project_id_created_at",
"project__user_file",
["project_id", sa.text("created_at DESC")],
)
def downgrade() -> None:
# Remove composite index on (project_id, created_at)
op.drop_index(
"ix_project__user_file_project_id_created_at", table_name="project__user_file"
)
# Remove created_at column from project__user_file table
op.drop_column("project__user_file", "created_at")

View File

@@ -0,0 +1,37 @@
"""Add image input support to model config
Revision ID: 64bd5677aeb6
Revises: b30353be4eec
Create Date: 2025-09-28 15:48:12.003612
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "64bd5677aeb6"
down_revision = "b30353be4eec"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"model_configuration",
sa.Column("supports_image_input", sa.Boolean(), nullable=True),
)
# Seems to be left over from when model visibility was introduced and a nullable field.
# Set any null is_visible values to False
connection = op.get_bind()
connection.execute(
sa.text(
"UPDATE model_configuration SET is_visible = false WHERE is_visible IS NULL"
)
)
def downgrade() -> None:
op.drop_column("model_configuration", "supports_image_input")

View File

@@ -0,0 +1,75 @@
"""nullify_default_task_prompt
Revision ID: 699221885109
Revises: 7e490836d179
Create Date: 2025-12-30 10:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "699221885109"
down_revision = "7e490836d179"
branch_labels = None
depends_on = None
DEFAULT_PERSONA_ID = 0
def upgrade() -> None:
# Make task_prompt column nullable
# Note: The model had nullable=True but the DB column was NOT NULL until this point
op.alter_column(
"persona",
"task_prompt",
nullable=True,
)
# Set task_prompt to NULL for the default persona
conn = op.get_bind()
conn.execute(
sa.text(
"""
UPDATE persona
SET task_prompt = NULL
WHERE id = :persona_id
"""
),
{"persona_id": DEFAULT_PERSONA_ID},
)
def downgrade() -> None:
# Restore task_prompt to empty string for the default persona
conn = op.get_bind()
conn.execute(
sa.text(
"""
UPDATE persona
SET task_prompt = ''
WHERE id = :persona_id AND task_prompt IS NULL
"""
),
{"persona_id": DEFAULT_PERSONA_ID},
)
# Set any remaining NULL task_prompts to empty string before making non-nullable
conn.execute(
sa.text(
"""
UPDATE persona
SET task_prompt = ''
WHERE task_prompt IS NULL
"""
)
)
# Revert task_prompt column to not nullable
op.alter_column(
"persona",
"task_prompt",
nullable=False,
)

View File

@@ -0,0 +1,37 @@
"""add queries and is web fetch to iteration answer
Revision ID: 6f4f86aef280
Revises: 03d710ccf29c
Create Date: 2025-10-14 18:08:30.920123
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "6f4f86aef280"
down_revision = "03d710ccf29c"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add is_web_fetch column
op.add_column(
"research_agent_iteration_sub_step",
sa.Column("is_web_fetch", sa.Boolean(), nullable=True),
)
# Add queries column
op.add_column(
"research_agent_iteration_sub_step",
sa.Column("queries", postgresql.JSONB(), nullable=True),
)
def downgrade() -> None:
op.drop_column("research_agent_iteration_sub_step", "queries")
op.drop_column("research_agent_iteration_sub_step", "is_web_fetch")

View File

@@ -0,0 +1,54 @@
"""add image generation config table
Revision ID: 7206234e012a
Revises: 699221885109
Create Date: 2025-12-21 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "7206234e012a"
down_revision = "699221885109"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"image_generation_config",
sa.Column("image_provider_id", sa.String(), primary_key=True),
sa.Column("model_configuration_id", sa.Integer(), nullable=False),
sa.Column("is_default", sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(
["model_configuration_id"],
["model_configuration.id"],
ondelete="CASCADE",
),
)
op.create_index(
"ix_image_generation_config_is_default",
"image_generation_config",
["is_default"],
unique=False,
)
op.create_index(
"ix_image_generation_config_model_configuration_id",
"image_generation_config",
["model_configuration_id"],
unique=False,
)
def downgrade() -> None:
op.drop_index(
"ix_image_generation_config_model_configuration_id",
table_name="image_generation_config",
)
op.drop_index(
"ix_image_generation_config_is_default", table_name="image_generation_config"
)
op.drop_table("image_generation_config")

View File

@@ -45,8 +45,23 @@ def upgrade() -> None:
def downgrade() -> None:
op.drop_constraint(
"chat_session_chat_folder_fk", "chat_session", type_="foreignkey"
)
op.drop_column("chat_session", "folder_id")
op.drop_table("chat_folder")
bind = op.get_bind()
inspector = sa.inspect(bind)
if "chat_session" in inspector.get_table_names():
chat_session_fks = {
fk.get("name") for fk in inspector.get_foreign_keys("chat_session")
}
if "chat_session_chat_folder_fk" in chat_session_fks:
op.drop_constraint(
"chat_session_chat_folder_fk", "chat_session", type_="foreignkey"
)
chat_session_columns = {
col["name"] for col in inspector.get_columns("chat_session")
}
if "folder_id" in chat_session_columns:
op.drop_column("chat_session", "folder_id")
if "chat_folder" in inspector.get_table_names():
op.drop_table("chat_folder")

View File

@@ -10,7 +10,7 @@ from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from onyx.llm.llm_provider_options import (
from onyx.llm.well_known_providers.llm_provider_options import (
fetch_model_names_for_provider_as_set,
fetch_visible_model_names_for_provider_as_set,
)

View File

@@ -0,0 +1,27 @@
"""Add display_name to model_configuration
Revision ID: 7bd55f264e1b
Revises: e8f0d2a38171
Create Date: 2025-12-04
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "7bd55f264e1b"
down_revision = "e8f0d2a38171"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"model_configuration",
sa.Column("display_name", sa.String(), nullable=True),
)
def downgrade() -> None:
op.drop_column("model_configuration", "display_name")

View File

@@ -0,0 +1,341 @@
"""Migration 4: User file UUID primary key swap
Revision ID: 7cc3fcc116c1
Revises: 16c37a30adf2
Create Date: 2025-09-22 09:54:38.292952
This migration performs the critical UUID primary key swap on user_file table.
It updates all foreign key references to use UUIDs instead of integers.
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql as psql
import logging
logger = logging.getLogger("alembic.runtime.migration")
# revision identifiers, used by Alembic.
revision = "7cc3fcc116c1"
down_revision = "16c37a30adf2"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Swap user_file primary key from integer to UUID."""
bind = op.get_bind()
inspector = sa.inspect(bind)
# Verify we're in the expected state
user_file_columns = [col["name"] for col in inspector.get_columns("user_file")]
if "new_id" not in user_file_columns:
logger.warning(
"user_file.new_id not found - migration may have already been applied"
)
return
logger.info("Starting UUID primary key swap...")
# === Step 1: Update persona__user_file foreign key to UUID ===
logger.info("Updating persona__user_file foreign key...")
# Drop existing foreign key constraints
op.execute(
"ALTER TABLE persona__user_file DROP CONSTRAINT IF EXISTS persona__user_file_user_file_id_uuid_fkey"
)
op.execute(
"ALTER TABLE persona__user_file DROP CONSTRAINT IF EXISTS persona__user_file_user_file_id_fkey"
)
# Create new foreign key to user_file.new_id
op.create_foreign_key(
"persona__user_file_user_file_id_fkey",
"persona__user_file",
"user_file",
local_cols=["user_file_id_uuid"],
remote_cols=["new_id"],
)
# Drop the old integer column and rename UUID column
op.execute("ALTER TABLE persona__user_file DROP COLUMN IF EXISTS user_file_id")
op.alter_column(
"persona__user_file",
"user_file_id_uuid",
new_column_name="user_file_id",
existing_type=psql.UUID(as_uuid=True),
nullable=False,
)
# Recreate composite primary key
op.execute(
"ALTER TABLE persona__user_file DROP CONSTRAINT IF EXISTS persona__user_file_pkey"
)
op.execute(
"ALTER TABLE persona__user_file ADD PRIMARY KEY (persona_id, user_file_id)"
)
logger.info("Updated persona__user_file to use UUID foreign key")
# === Step 2: Perform the primary key swap on user_file ===
logger.info("Swapping user_file primary key to UUID...")
# Drop the primary key constraint
op.execute("ALTER TABLE user_file DROP CONSTRAINT IF EXISTS user_file_pkey")
# Drop the old id column and rename new_id to id
op.execute("ALTER TABLE user_file DROP COLUMN IF EXISTS id")
op.alter_column(
"user_file",
"new_id",
new_column_name="id",
existing_type=psql.UUID(as_uuid=True),
nullable=False,
)
# Set default for new inserts
op.alter_column(
"user_file",
"id",
existing_type=psql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
)
# Create new primary key
op.execute("ALTER TABLE user_file ADD PRIMARY KEY (id)")
logger.info("Swapped user_file primary key to UUID")
# === Step 3: Update foreign key constraints ===
logger.info("Updating foreign key constraints...")
# Recreate persona__user_file foreign key to point to user_file.id
# Drop existing FK first to break dependency on the unique constraint
op.execute(
"ALTER TABLE persona__user_file DROP CONSTRAINT IF EXISTS persona__user_file_user_file_id_fkey"
)
# Drop the unique constraint on (formerly) new_id BEFORE recreating the FK,
# so the FK will bind to the primary key instead of the unique index.
op.execute("ALTER TABLE user_file DROP CONSTRAINT IF EXISTS uq_user_file_new_id")
# Now recreate FK to the primary key column
op.create_foreign_key(
"persona__user_file_user_file_id_fkey",
"persona__user_file",
"user_file",
local_cols=["user_file_id"],
remote_cols=["id"],
)
# Add foreign keys for project__user_file
existing_fks = inspector.get_foreign_keys("project__user_file")
has_user_file_fk = any(
fk.get("referred_table") == "user_file"
and fk.get("constrained_columns") == ["user_file_id"]
for fk in existing_fks
)
if not has_user_file_fk:
op.create_foreign_key(
"fk_project__user_file_user_file_id",
"project__user_file",
"user_file",
["user_file_id"],
["id"],
)
logger.info("Added project__user_file -> user_file foreign key")
has_project_fk = any(
fk.get("referred_table") == "user_project"
and fk.get("constrained_columns") == ["project_id"]
for fk in existing_fks
)
if not has_project_fk:
op.create_foreign_key(
"fk_project__user_file_project_id",
"project__user_file",
"user_project",
["project_id"],
["id"],
)
logger.info("Added project__user_file -> user_project foreign key")
# === Step 4: Mark files for document_id migration ===
logger.info("Marking files for background document_id migration...")
logger.info("Migration 4 (UUID primary key swap) completed successfully")
logger.info(
"NOTE: Background task will update document IDs in Vespa and search_doc"
)
def downgrade() -> None:
"""Revert UUID primary key back to integer (data destructive!)."""
logger.error("CRITICAL: Downgrading UUID primary key swap is data destructive!")
logger.error(
"This will break all UUID-based references created after the migration."
)
logger.error("Only proceed if absolutely necessary and have backups.")
bind = op.get_bind()
inspector = sa.inspect(bind)
# Capture existing primary key definitions so we can restore them after swaps
persona_pk = inspector.get_pk_constraint("persona__user_file") or {}
persona_pk_name = persona_pk.get("name")
persona_pk_cols = persona_pk.get("constrained_columns") or []
project_pk = inspector.get_pk_constraint("project__user_file") or {}
project_pk_name = project_pk.get("name")
project_pk_cols = project_pk.get("constrained_columns") or []
# Drop foreign keys that reference the UUID primary key
op.drop_constraint(
"persona__user_file_user_file_id_fkey",
"persona__user_file",
type_="foreignkey",
)
op.drop_constraint(
"fk_project__user_file_user_file_id",
"project__user_file",
type_="foreignkey",
)
# Drop primary keys that rely on the UUID column so we can replace it
if persona_pk_name:
op.drop_constraint(persona_pk_name, "persona__user_file", type_="primary")
if project_pk_name:
op.drop_constraint(project_pk_name, "project__user_file", type_="primary")
# Rebuild integer IDs on user_file using a sequence-backed column
op.execute("CREATE SEQUENCE IF NOT EXISTS user_file_id_seq")
op.add_column(
"user_file",
sa.Column(
"id_int",
sa.Integer(),
server_default=sa.text("nextval('user_file_id_seq')"),
nullable=False,
),
)
op.execute("ALTER SEQUENCE user_file_id_seq OWNED BY user_file.id_int")
# Prepare integer foreign key columns on referencing tables
op.add_column(
"persona__user_file",
sa.Column("user_file_id_int", sa.Integer(), nullable=True),
)
op.add_column(
"project__user_file",
sa.Column("user_file_id_int", sa.Integer(), nullable=True),
)
# Populate the new integer foreign key columns by mapping from the UUID IDs
op.execute(
"""
UPDATE persona__user_file AS p
SET user_file_id_int = uf.id_int
FROM user_file AS uf
WHERE p.user_file_id = uf.id
"""
)
op.execute(
"""
UPDATE project__user_file AS p
SET user_file_id_int = uf.id_int
FROM user_file AS uf
WHERE p.user_file_id = uf.id
"""
)
op.alter_column(
"persona__user_file",
"user_file_id_int",
existing_type=sa.Integer(),
nullable=False,
)
op.alter_column(
"project__user_file",
"user_file_id_int",
existing_type=sa.Integer(),
nullable=False,
)
# Remove the UUID foreign key columns and rename the integer replacements
op.drop_column("persona__user_file", "user_file_id")
op.alter_column(
"persona__user_file",
"user_file_id_int",
new_column_name="user_file_id",
existing_type=sa.Integer(),
nullable=False,
)
op.drop_column("project__user_file", "user_file_id")
op.alter_column(
"project__user_file",
"user_file_id_int",
new_column_name="user_file_id",
existing_type=sa.Integer(),
nullable=False,
)
# Swap the user_file primary key back to the integer column
op.drop_constraint("user_file_pkey", "user_file", type_="primary")
op.drop_column("user_file", "id")
op.alter_column(
"user_file",
"id_int",
new_column_name="id",
existing_type=sa.Integer(),
)
op.alter_column(
"user_file",
"id",
existing_type=sa.Integer(),
nullable=False,
server_default=sa.text("nextval('user_file_id_seq')"),
)
op.execute("ALTER SEQUENCE user_file_id_seq OWNED BY user_file.id")
op.execute(
"""
SELECT setval(
'user_file_id_seq',
GREATEST(COALESCE(MAX(id), 1), 1),
MAX(id) IS NOT NULL
)
FROM user_file
"""
)
op.create_primary_key("user_file_pkey", "user_file", ["id"])
# Restore primary keys on referencing tables
if persona_pk_cols:
op.create_primary_key(
"persona__user_file_pkey", "persona__user_file", persona_pk_cols
)
if project_pk_cols:
op.create_primary_key(
"project__user_file_pkey",
"project__user_file",
project_pk_cols,
)
# Recreate foreign keys pointing at the integer primary key
op.create_foreign_key(
"persona__user_file_user_file_id_fkey",
"persona__user_file",
"user_file",
["user_file_id"],
["id"],
)
op.create_foreign_key(
"fk_project__user_file_user_file_id",
"project__user_file",
"user_file",
["user_file_id"],
["id"],
)

View File

@@ -0,0 +1,80 @@
"""nullify_default_system_prompt
Revision ID: 7e490836d179
Revises: c1d2e3f4a5b6
Create Date: 2025-12-29 16:54:36.635574
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "7e490836d179"
down_revision = "c1d2e3f4a5b6"
branch_labels = None
depends_on = None
# This is the default system prompt from the previous migration (87c52ec39f84)
# ruff: noqa: E501, W605 start
PREVIOUS_DEFAULT_SYSTEM_PROMPT = """
You are a highly capable, thoughtful, and precise assistant. Your goal is to deeply understand the user's intent, ask clarifying questions when needed, think step-by-step through complex problems, provide clear and accurate answers, and proactively anticipate helpful follow-up information. Always prioritize being truthful, nuanced, insightful, and efficient.
The current date is [[CURRENT_DATETIME]].[[CITATION_GUIDANCE]]
# Response Style
You use different text styles, bolding, emojis (sparingly), block quotes, and other formatting to make your responses more readable and engaging.
You use proper Markdown and LaTeX to format your responses for math, scientific, and chemical formulas, symbols, etc.: '$$\\n[expression]\\n$$' for standalone cases and '\\( [expression] \\)' when inline.
For code you prefer to use Markdown and specify the language.
You can use horizontal rules (---) to separate sections of your responses.
You can use Markdown tables to format your responses for data, lists, and other structured information.
""".lstrip()
# ruff: noqa: E501, W605 end
def upgrade() -> None:
# Make system_prompt column nullable (model already has nullable=True but DB doesn't)
op.alter_column(
"persona",
"system_prompt",
nullable=True,
)
# Set system_prompt to NULL where it matches the previous default
conn = op.get_bind()
conn.execute(
sa.text(
"""
UPDATE persona
SET system_prompt = NULL
WHERE system_prompt = :previous_default
"""
),
{"previous_default": PREVIOUS_DEFAULT_SYSTEM_PROMPT},
)
def downgrade() -> None:
# Restore the default system prompt for personas that have NULL
# Note: This may restore the prompt to personas that originally had NULL
# before this migration, but there's no way to distinguish them
conn = op.get_bind()
conn.execute(
sa.text(
"""
UPDATE persona
SET system_prompt = :previous_default
WHERE system_prompt IS NULL
"""
),
{"previous_default": PREVIOUS_DEFAULT_SYSTEM_PROMPT},
)
# Revert system_prompt column to not nullable
op.alter_column(
"persona",
"system_prompt",
nullable=False,
)

View File

@@ -42,13 +42,13 @@ def upgrade() -> None:
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"), # type: ignore
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"), # type: ignore
server_default=sa.text("now()"),
nullable=False,
),
)
@@ -63,13 +63,13 @@ def upgrade() -> None:
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"), # type: ignore
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"), # type: ignore
server_default=sa.text("now()"),
nullable=False,
),
sa.ForeignKeyConstraint(

View File

@@ -0,0 +1,55 @@
"""update_default_system_prompt
Revision ID: 87c52ec39f84
Revises: 7bd55f264e1b
Create Date: 2025-12-05 15:54:06.002452
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "87c52ec39f84"
down_revision = "7bd55f264e1b"
branch_labels = None
depends_on = None
DEFAULT_PERSONA_ID = 0
# ruff: noqa: E501, W605 start
DEFAULT_SYSTEM_PROMPT = """
You are a highly capable, thoughtful, and precise assistant. Your goal is to deeply understand the user's intent, ask clarifying questions when needed, think step-by-step through complex problems, provide clear and accurate answers, and proactively anticipate helpful follow-up information. Always prioritize being truthful, nuanced, insightful, and efficient.
The current date is [[CURRENT_DATETIME]].[[CITATION_GUIDANCE]]
# Response Style
You use different text styles, bolding, emojis (sparingly), block quotes, and other formatting to make your responses more readable and engaging.
You use proper Markdown and LaTeX to format your responses for math, scientific, and chemical formulas, symbols, etc.: '$$\\n[expression]\\n$$' for standalone cases and '\\( [expression] \\)' when inline.
For code you prefer to use Markdown and specify the language.
You can use horizontal rules (---) to separate sections of your responses.
You can use Markdown tables to format your responses for data, lists, and other structured information.
""".lstrip()
# ruff: noqa: E501, W605 end
def upgrade() -> None:
conn = op.get_bind()
conn.execute(
sa.text(
"""
UPDATE persona
SET system_prompt = :system_prompt
WHERE id = :persona_id
"""
),
{"system_prompt": DEFAULT_SYSTEM_PROMPT, "persona_id": DEFAULT_PERSONA_ID},
)
def downgrade() -> None:
# We don't revert the system prompt on downgrade since we don't know
# what the previous value was. The new prompt is a reasonable default.
pass

View File

@@ -0,0 +1,136 @@
"""seed_default_image_gen_config
Revision ID: 9087b548dd69
Revises: 2b90f3af54b8
Create Date: 2026-01-05 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "9087b548dd69"
down_revision = "2b90f3af54b8"
branch_labels = None
depends_on = None
# Constants for default image generation config
# Source: web/src/app/admin/configuration/image-generation/constants.ts
IMAGE_PROVIDER_ID = "openai_gpt_image_1"
MODEL_NAME = "gpt-image-1"
PROVIDER_NAME = "openai"
def upgrade() -> None:
conn = op.get_bind()
# Check if image_generation_config table already has records
existing_configs = (
conn.execute(sa.text("SELECT COUNT(*) FROM image_generation_config")).scalar()
or 0
)
if existing_configs > 0:
# Skip if configs already exist - user may have configured manually
return
# Find the first OpenAI LLM provider
openai_provider = conn.execute(
sa.text(
"""
SELECT id, api_key
FROM llm_provider
WHERE provider = :provider
ORDER BY id
LIMIT 1
"""
),
{"provider": PROVIDER_NAME},
).fetchone()
if not openai_provider:
# No OpenAI provider found - nothing to do
return
source_provider_id, api_key = openai_provider
# Create new LLM provider for image generation (clone only api_key)
result = conn.execute(
sa.text(
"""
INSERT INTO llm_provider (
name, provider, api_key, api_base, api_version,
deployment_name, default_model_name, is_public,
is_default_provider, is_default_vision_provider, is_auto_mode
)
VALUES (
:name, :provider, :api_key, NULL, NULL,
NULL, :default_model_name, :is_public,
NULL, NULL, :is_auto_mode
)
RETURNING id
"""
),
{
"name": f"Image Gen - {IMAGE_PROVIDER_ID}",
"provider": PROVIDER_NAME,
"api_key": api_key,
"default_model_name": MODEL_NAME,
"is_public": True,
"is_auto_mode": False,
},
)
new_provider_id = result.scalar()
# Create model configuration
result = conn.execute(
sa.text(
"""
INSERT INTO model_configuration (
llm_provider_id, name, is_visible, max_input_tokens,
supports_image_input, display_name
)
VALUES (
:llm_provider_id, :name, :is_visible, :max_input_tokens,
:supports_image_input, :display_name
)
RETURNING id
"""
),
{
"llm_provider_id": new_provider_id,
"name": MODEL_NAME,
"is_visible": True,
"max_input_tokens": None,
"supports_image_input": False,
"display_name": None,
},
)
model_config_id = result.scalar()
# Create image generation config
conn.execute(
sa.text(
"""
INSERT INTO image_generation_config (
image_provider_id, model_configuration_id, is_default
)
VALUES (
:image_provider_id, :model_configuration_id, :is_default
)
"""
),
{
"image_provider_id": IMAGE_PROVIDER_ID,
"model_configuration_id": model_config_id,
"is_default": True,
},
)
def downgrade() -> None:
# We don't remove the config on downgrade since it's safe to keep around
# If we upgrade again, it will be a no-op due to the existing records check
pass

View File

@@ -0,0 +1,45 @@
"""mcp_tool_enabled
Revision ID: 96a5702df6aa
Revises: 40926a4dab77
Create Date: 2025-10-09 12:10:21.733097
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "96a5702df6aa"
down_revision = "40926a4dab77"
branch_labels = None
depends_on = None
DELETE_DISABLED_TOOLS_SQL = "DELETE FROM tool WHERE enabled = false"
def upgrade() -> None:
op.add_column(
"tool",
sa.Column(
"enabled",
sa.Boolean(),
nullable=False,
server_default=sa.true(),
),
)
op.create_index(
"ix_tool_mcp_server_enabled",
"tool",
["mcp_server_id", "enabled"],
)
# Remove the server default so application controls defaulting
op.alter_column("tool", "enabled", server_default=None)
def downgrade() -> None:
op.execute(DELETE_DISABLED_TOOLS_SQL)
op.drop_index("ix_tool_mcp_server_enabled", table_name="tool")
op.drop_column("tool", "enabled")

View File

@@ -0,0 +1,33 @@
"""add_is_auto_mode_to_llm_provider
Revision ID: 9a0296d7421e
Revises: 7206234e012a
Create Date: 2025-12-17 18:14:29.620981
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "9a0296d7421e"
down_revision = "7206234e012a"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"llm_provider",
sa.Column(
"is_auto_mode",
sa.Boolean(),
nullable=False,
server_default="false",
),
)
def downgrade() -> None:
op.drop_column("llm_provider", "is_auto_mode")

View File

@@ -0,0 +1,268 @@
"""Migration 1: User file schema additions
Revision ID: 9b66d3156fc6
Revises: b4ef3ae0bf6e
Create Date: 2025-09-22 09:42:06.086732
This migration adds new columns and tables without modifying existing data.
It is safe to run and can be easily rolled back.
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql as psql
import logging
logger = logging.getLogger("alembic.runtime.migration")
# revision identifiers, used by Alembic.
revision = "9b66d3156fc6"
down_revision = "b4ef3ae0bf6e"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Add new columns and tables without modifying existing data."""
# Enable pgcrypto for UUID generation
op.execute("CREATE EXTENSION IF NOT EXISTS pgcrypto")
bind = op.get_bind()
inspector = sa.inspect(bind)
# === USER_FILE: Add new columns ===
logger.info("Adding new columns to user_file table...")
user_file_columns = [col["name"] for col in inspector.get_columns("user_file")]
# Check if ID is already UUID (in case of re-run after partial migration)
id_is_uuid = any(
col["name"] == "id" and "uuid" in str(col["type"]).lower()
for col in inspector.get_columns("user_file")
)
# Add transitional UUID column only if ID is not already UUID
if "new_id" not in user_file_columns and not id_is_uuid:
op.add_column(
"user_file",
sa.Column(
"new_id",
psql.UUID(as_uuid=True),
nullable=True,
server_default=sa.text("gen_random_uuid()"),
),
)
op.create_unique_constraint("uq_user_file_new_id", "user_file", ["new_id"])
logger.info("Added new_id column to user_file")
# Add status column
if "status" not in user_file_columns:
op.add_column(
"user_file",
sa.Column(
"status",
sa.Enum(
"PROCESSING",
"COMPLETED",
"FAILED",
"CANCELED",
name="userfilestatus",
native_enum=False,
),
nullable=False,
server_default="PROCESSING",
),
)
logger.info("Added status column to user_file")
# Add other tracking columns
if "chunk_count" not in user_file_columns:
op.add_column(
"user_file", sa.Column("chunk_count", sa.Integer(), nullable=True)
)
logger.info("Added chunk_count column to user_file")
if "last_accessed_at" not in user_file_columns:
op.add_column(
"user_file",
sa.Column("last_accessed_at", sa.DateTime(timezone=True), nullable=True),
)
logger.info("Added last_accessed_at column to user_file")
if "needs_project_sync" not in user_file_columns:
op.add_column(
"user_file",
sa.Column(
"needs_project_sync",
sa.Boolean(),
nullable=False,
server_default=sa.text("false"),
),
)
logger.info("Added needs_project_sync column to user_file")
if "last_project_sync_at" not in user_file_columns:
op.add_column(
"user_file",
sa.Column(
"last_project_sync_at", sa.DateTime(timezone=True), nullable=True
),
)
logger.info("Added last_project_sync_at column to user_file")
if "document_id_migrated" not in user_file_columns:
op.add_column(
"user_file",
sa.Column(
"document_id_migrated",
sa.Boolean(),
nullable=False,
server_default=sa.text("true"),
),
)
logger.info("Added document_id_migrated column to user_file")
# === USER_FOLDER -> USER_PROJECT rename ===
table_names = set(inspector.get_table_names())
if "user_folder" in table_names:
logger.info("Updating user_folder table...")
# Make description nullable first
op.alter_column("user_folder", "description", nullable=True)
# Rename table if user_project doesn't exist
if "user_project" not in table_names:
op.execute("ALTER TABLE user_folder RENAME TO user_project")
logger.info("Renamed user_folder to user_project")
elif "user_project" in table_names:
# If already renamed, ensure column nullability
project_cols = [col["name"] for col in inspector.get_columns("user_project")]
if "description" in project_cols:
op.alter_column("user_project", "description", nullable=True)
# Add instructions column to user_project
inspector = sa.inspect(bind) # Refresh after rename
if "user_project" in inspector.get_table_names():
project_columns = [col["name"] for col in inspector.get_columns("user_project")]
if "instructions" not in project_columns:
op.add_column(
"user_project",
sa.Column("instructions", sa.String(), nullable=True),
)
logger.info("Added instructions column to user_project")
# === CHAT_SESSION: Add project_id ===
chat_session_columns = [
col["name"] for col in inspector.get_columns("chat_session")
]
if "project_id" not in chat_session_columns:
op.add_column(
"chat_session",
sa.Column("project_id", sa.Integer(), nullable=True),
)
logger.info("Added project_id column to chat_session")
# === PERSONA__USER_FILE: Add UUID column ===
persona_user_file_columns = [
col["name"] for col in inspector.get_columns("persona__user_file")
]
if "user_file_id_uuid" not in persona_user_file_columns:
op.add_column(
"persona__user_file",
sa.Column("user_file_id_uuid", psql.UUID(as_uuid=True), nullable=True),
)
logger.info("Added user_file_id_uuid column to persona__user_file")
# === PROJECT__USER_FILE: Create new table ===
if "project__user_file" not in inspector.get_table_names():
op.create_table(
"project__user_file",
sa.Column("project_id", sa.Integer(), nullable=False),
sa.Column("user_file_id", psql.UUID(as_uuid=True), nullable=False),
sa.PrimaryKeyConstraint("project_id", "user_file_id"),
)
logger.info("Created project__user_file table")
# Only create the index if it doesn't exist
existing_indexes = [
ix["name"] for ix in inspector.get_indexes("project__user_file")
]
if "idx_project__user_file_user_file_id" not in existing_indexes:
op.create_index(
"idx_project__user_file_user_file_id",
"project__user_file",
["user_file_id"],
)
logger.info(
"Created index idx_project__user_file_user_file_id on project__user_file"
)
logger.info("Migration 1 (schema additions) completed successfully")
def downgrade() -> None:
"""Remove added columns and tables."""
bind = op.get_bind()
inspector = sa.inspect(bind)
logger.info("Starting downgrade of schema additions...")
# Drop project__user_file table
if "project__user_file" in inspector.get_table_names():
# op.drop_index("idx_project__user_file_user_file_id", "project__user_file")
op.drop_table("project__user_file")
logger.info("Dropped project__user_file table")
# Remove columns from persona__user_file
if "persona__user_file" in inspector.get_table_names():
columns = [col["name"] for col in inspector.get_columns("persona__user_file")]
if "user_file_id_uuid" in columns:
op.drop_column("persona__user_file", "user_file_id_uuid")
logger.info("Dropped user_file_id_uuid from persona__user_file")
# Remove columns from chat_session
if "chat_session" in inspector.get_table_names():
columns = [col["name"] for col in inspector.get_columns("chat_session")]
if "project_id" in columns:
op.drop_column("chat_session", "project_id")
logger.info("Dropped project_id from chat_session")
# Rename user_project back to user_folder and remove instructions
if "user_project" in inspector.get_table_names():
columns = [col["name"] for col in inspector.get_columns("user_project")]
if "instructions" in columns:
op.drop_column("user_project", "instructions")
op.execute("ALTER TABLE user_project RENAME TO user_folder")
# Update NULL descriptions to empty string before setting NOT NULL constraint
op.execute("UPDATE user_folder SET description = '' WHERE description IS NULL")
op.alter_column("user_folder", "description", nullable=False)
logger.info("Renamed user_project back to user_folder")
# Remove columns from user_file
if "user_file" in inspector.get_table_names():
columns = [col["name"] for col in inspector.get_columns("user_file")]
columns_to_drop = [
"document_id_migrated",
"last_project_sync_at",
"needs_project_sync",
"last_accessed_at",
"chunk_count",
"status",
]
for col in columns_to_drop:
if col in columns:
op.drop_column("user_file", col)
logger.info(f"Dropped {col} from user_file")
if "new_id" in columns:
op.drop_constraint("uq_user_file_new_id", "user_file", type_="unique")
op.drop_column("user_file", "new_id")
logger.info("Dropped new_id from user_file")
# Drop enum type if no columns use it
bind.execute(sa.text("DROP TYPE IF EXISTS userfilestatus"))
logger.info("Downgrade completed successfully")

View File

@@ -0,0 +1,97 @@
"""add config to federated_connector
Revision ID: 9drpiiw74ljy
Revises: 2acdef638fc2
Create Date: 2025-11-03 12:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "9drpiiw74ljy"
down_revision = "2acdef638fc2"
branch_labels = None
depends_on = None
def upgrade() -> None:
connection = op.get_bind()
# Check if column already exists in current schema
result = connection.execute(
sa.text(
"""
SELECT column_name
FROM information_schema.columns
WHERE table_schema = current_schema()
AND table_name = 'federated_connector'
AND column_name = 'config'
"""
)
)
column_exists = result.fetchone() is not None
# Add config column with default empty object (only if it doesn't exist)
if not column_exists:
op.add_column(
"federated_connector",
sa.Column(
"config", postgresql.JSONB(), nullable=False, server_default="{}"
),
)
# Data migration: Single bulk update for all Slack connectors
connection.execute(
sa.text(
"""
WITH connector_configs AS (
SELECT
fc.id as connector_id,
CASE
WHEN fcds.entities->'channels' IS NOT NULL
AND jsonb_typeof(fcds.entities->'channels') = 'array'
AND jsonb_array_length(fcds.entities->'channels') > 0
THEN
jsonb_build_object(
'channels', fcds.entities->'channels',
'search_all_channels', false
) ||
CASE
WHEN fcds.entities->'include_dm' IS NOT NULL
THEN jsonb_build_object('include_dm', fcds.entities->'include_dm')
ELSE '{}'::jsonb
END
ELSE
jsonb_build_object('search_all_channels', true) ||
CASE
WHEN fcds.entities->'include_dm' IS NOT NULL
THEN jsonb_build_object('include_dm', fcds.entities->'include_dm')
ELSE '{}'::jsonb
END
END as config
FROM federated_connector fc
LEFT JOIN LATERAL (
SELECT entities
FROM federated_connector__document_set
WHERE federated_connector_id = fc.id
AND entities IS NOT NULL
ORDER BY id
LIMIT 1
) fcds ON true
WHERE fc.source = 'FEDERATED_SLACK'
AND fcds.entities IS NOT NULL
)
UPDATE federated_connector fc
SET config = cc.config
FROM connector_configs cc
WHERE fc.id = cc.connector_id
"""
)
)
def downgrade() -> None:
op.drop_column("federated_connector", "config")

View File

@@ -0,0 +1,62 @@
"""update_default_tool_descriptions
Revision ID: a01bf2971c5d
Revises: 87c52ec39f84
Create Date: 2025-12-16 15:21:25.656375
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "a01bf2971c5d"
down_revision = "18b5b2524446"
branch_labels = None
depends_on = None
# new tool descriptions (12/2025)
TOOL_DESCRIPTIONS = {
"SearchTool": "The Search Action allows the agent to search through connected knowledge to help build an answer.",
"ImageGenerationTool": (
"The Image Generation Action allows the agent to use DALL-E 3 or GPT-IMAGE-1 to generate images. "
"The action will be used when the user asks the agent to generate an image."
),
"WebSearchTool": (
"The Web Search Action allows the agent "
"to perform internet searches for up-to-date information."
),
"KnowledgeGraphTool": (
"The Knowledge Graph Search Action allows the agent to search the "
"Knowledge Graph for information. This tool can (for now) only be active in the KG Beta Agent, "
"and it requires the Knowledge Graph to be enabled."
),
"OktaProfileTool": (
"The Okta Profile Action allows the agent to fetch the current user's information from Okta. "
"This may include the user's name, email, phone number, address, and other details such as their "
"manager and direct reports."
),
}
def upgrade() -> None:
conn = op.get_bind()
conn.execute(sa.text("BEGIN"))
try:
for tool_id, description in TOOL_DESCRIPTIONS.items():
conn.execute(
sa.text(
"UPDATE tool SET description = :description WHERE in_code_tool_id = :tool_id"
),
{"description": description, "tool_id": tool_id},
)
conn.execute(sa.text("COMMIT"))
except Exception as e:
conn.execute(sa.text("ROLLBACK"))
raise e
def downgrade() -> None:
pass

View File

@@ -0,0 +1,49 @@
"""add license table
Revision ID: a1b2c3d4e5f6
Revises: a01bf2971c5d
Create Date: 2025-12-04 10:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "a1b2c3d4e5f6"
down_revision = "a01bf2971c5d"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"license",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("license_data", sa.Text(), nullable=False),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.func.now(),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.func.now(),
nullable=False,
),
)
# Singleton pattern - only ever one row in this table
op.create_index(
"idx_license_singleton",
"license",
[sa.text("(true)")],
unique=True,
)
def downgrade() -> None:
op.drop_index("idx_license_singleton", table_name="license")
op.drop_table("license")

View File

@@ -0,0 +1,27 @@
"""Remove fast_default_model_name from llm_provider
Revision ID: a2b3c4d5e6f7
Revises: 2a391f840e85
Create Date: 2024-12-17
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "a2b3c4d5e6f7"
down_revision = "2a391f840e85"
branch_labels: None = None
depends_on: None = None
def upgrade() -> None:
op.drop_column("llm_provider", "fast_default_model_name")
def downgrade() -> None:
op.add_column(
"llm_provider",
sa.Column("fast_default_model_name", sa.String(), nullable=True),
)

View File

@@ -0,0 +1,39 @@
"""remove userfile related deprecated fields
Revision ID: a3c1a7904cd0
Revises: 5c3dca366b35
Create Date: 2026-01-06 13:00:30.634396
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "a3c1a7904cd0"
down_revision = "5c3dca366b35"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.drop_column("user_file", "document_id")
op.drop_column("user_file", "document_id_migrated")
op.drop_column("connector_credential_pair", "is_user_file")
def downgrade() -> None:
op.add_column(
"connector_credential_pair",
sa.Column("is_user_file", sa.Boolean(), nullable=False, server_default="false"),
)
op.add_column(
"user_file",
sa.Column("document_id", sa.String(), nullable=True),
)
op.add_column(
"user_file",
sa.Column(
"document_id_migrated", sa.Boolean(), nullable=False, server_default="true"
),
)

Some files were not shown because too many files have changed in this diff Show More