Compare commits

..

2178 Commits

Author SHA1 Message Date
github-merge-queue[bot]
4fc095e507 Publish onyx-0.4.32.tgz 2026-02-24 01:25:09 +00:00
github-merge-queue[bot]
b91c3eeb67 Publish onyx-0.4.32.tgz 2026-02-24 01:21:31 +00:00
github-merge-queue[bot]
02384b25c8 Publish onyx-0.4.32.tgz 2026-02-24 01:19:43 +00:00
github-merge-queue[bot]
dd1b7f7eb5 Publish onyx-0.4.32.tgz 2026-02-24 01:08:07 +00:00
github-merge-queue[bot]
7b95363d01 Publish onyx-0.4.32.tgz 2026-02-24 01:07:11 +00:00
github-merge-queue[bot]
96e5db7473 Publish onyx-0.4.32.tgz 2026-02-24 00:29:16 +00:00
github-merge-queue[bot]
edf6e33f96 Publish onyx-0.4.32.tgz 2026-02-24 00:11:43 +00:00
github-merge-queue[bot]
718e5a3160 Publish onyx-0.4.32.tgz 2026-02-24 00:10:25 +00:00
github-merge-queue[bot]
6be9e296e4 Publish onyx-0.4.32.tgz 2026-02-23 23:59:39 +00:00
github-merge-queue[bot]
7b089a11e6 Publish onyx-0.4.32.tgz 2026-02-23 23:48:36 +00:00
github-merge-queue[bot]
25e441cc92 Publish onyx-0.4.32.tgz 2026-02-23 22:52:45 +00:00
github-merge-queue[bot]
97d7135a57 Publish onyx-0.4.32.tgz 2026-02-23 22:31:44 +00:00
github-merge-queue[bot]
030d383da5 Publish onyx-0.4.32.tgz 2026-02-23 21:34:06 +00:00
github-merge-queue[bot]
6979be0099 Publish onyx-0.4.32.tgz 2026-02-23 21:33:14 +00:00
github-merge-queue[bot]
cc6ab4820c Publish onyx-0.4.32.tgz 2026-02-23 21:22:33 +00:00
github-merge-queue[bot]
ffe2bc2c5e Publish onyx-0.4.32.tgz 2026-02-23 21:17:22 +00:00
github-merge-queue[bot]
0ad7d9459c Publish onyx-0.4.32.tgz 2026-02-23 20:36:42 +00:00
github-merge-queue[bot]
196ffda3f1 Publish onyx-0.4.32.tgz 2026-02-23 19:34:16 +00:00
github-merge-queue[bot]
0706e418b3 Publish onyx-0.4.32.tgz 2026-02-23 13:45:58 +00:00
github-merge-queue[bot]
53e4971290 Publish onyx-0.4.32.tgz 2026-02-23 05:17:25 +00:00
github-merge-queue[bot]
68e62976a5 Publish onyx-0.4.32.tgz 2026-02-23 01:48:38 +00:00
github-merge-queue[bot]
1971302205 Publish onyx-0.4.32.tgz 2026-02-22 07:29:23 +00:00
github-merge-queue[bot]
aac6735187 Publish onyx-0.4.32.tgz 2026-02-22 06:52:49 +00:00
github-merge-queue[bot]
0680a93907 Publish onyx-0.4.32.tgz 2026-02-22 06:20:15 +00:00
github-merge-queue[bot]
62c4cab5e2 Publish onyx-0.4.32.tgz 2026-02-22 01:09:18 +00:00
github-merge-queue[bot]
d43e2c9aa3 Publish onyx-0.4.32.tgz 2026-02-21 04:59:06 +00:00
github-merge-queue[bot]
4b8b83fad4 Publish onyx-0.4.32.tgz 2026-02-21 04:55:14 +00:00
github-merge-queue[bot]
7bac783da8 Publish onyx-0.4.32.tgz 2026-02-21 04:10:10 +00:00
github-merge-queue[bot]
63c8e1d08e Publish onyx-0.4.32.tgz 2026-02-21 04:03:31 +00:00
github-merge-queue[bot]
7704f69c5a Publish onyx-0.4.32.tgz 2026-02-21 03:54:36 +00:00
github-merge-queue[bot]
1945b7ad81 Publish onyx-0.4.32.tgz 2026-02-21 03:05:07 +00:00
github-merge-queue[bot]
ba4aeeba6f Publish onyx-0.4.32.tgz 2026-02-21 02:44:52 +00:00
github-merge-queue[bot]
44513eac44 Publish onyx-0.4.32.tgz 2026-02-21 02:22:55 +00:00
github-merge-queue[bot]
90b4c54cde Publish onyx-0.4.32.tgz 2026-02-21 02:14:45 +00:00
github-merge-queue[bot]
9ba769f0ad Publish onyx-0.4.32.tgz 2026-02-21 02:03:31 +00:00
github-merge-queue[bot]
09f8733514 Publish onyx-0.4.32.tgz 2026-02-21 01:37:52 +00:00
github-merge-queue[bot]
090d1892f8 Publish onyx-0.4.32.tgz 2026-02-21 00:21:17 +00:00
github-merge-queue[bot]
80e91155e0 Publish onyx-0.4.32.tgz 2026-02-20 22:50:40 +00:00
github-merge-queue[bot]
33feb30399 Publish onyx-0.4.32.tgz 2026-02-20 22:35:18 +00:00
github-merge-queue[bot]
7ef9da9908 Publish onyx-0.4.32.tgz 2026-02-20 21:55:31 +00:00
github-merge-queue[bot]
62889c09dd Publish onyx-0.4.32.tgz 2026-02-20 20:59:16 +00:00
nmgarza5
425f69d3e8 Publish onyx-0.4.32.tgz 2026-02-20 20:45:16 +00:00
github-merge-queue[bot]
5f417377e9 Publish onyx-0.4.32.tgz 2026-02-20 20:10:12 +00:00
github-merge-queue[bot]
cedce7ea37 Publish onyx-0.4.32.tgz 2026-02-20 19:46:28 +00:00
nmgarza5
91e73938e9 Publish onyx-0.4.32.tgz 2026-02-20 19:39:59 +00:00
github-merge-queue[bot]
b92513422d Publish onyx-0.4.30.tgz 2026-02-19 19:33:28 +00:00
github-merge-queue[bot]
28767dcf3f Publish onyx-0.4.30.tgz 2026-02-19 19:23:26 +00:00
github-merge-queue[bot]
dafbd904ce Publish onyx-0.4.30.tgz 2026-02-19 18:38:37 +00:00
github-merge-queue[bot]
04fd1a7950 Publish onyx-0.4.30.tgz 2026-02-19 17:34:47 +00:00
justin-tahara
bb0dddb8b4 Publish onyx-0.4.30.tgz 2026-02-19 17:20:41 +00:00
github-merge-queue[bot]
c7416353d1 Publish onyx-0.4.29.tgz 2026-02-19 14:09:39 +00:00
github-merge-queue[bot]
ea1884e406 Publish onyx-0.4.29.tgz 2026-02-19 05:51:03 +00:00
github-merge-queue[bot]
7dd2cf9bfa Publish onyx-0.4.29.tgz 2026-02-19 05:40:44 +00:00
github-merge-queue[bot]
1bb01920bc Publish onyx-0.4.29.tgz 2026-02-19 04:34:05 +00:00
github-merge-queue[bot]
c4383c79aa Publish onyx-0.4.29.tgz 2026-02-19 04:11:27 +00:00
github-merge-queue[bot]
cc2327e450 Publish onyx-0.4.29.tgz 2026-02-19 03:52:56 +00:00
github-merge-queue[bot]
e0722389f4 Publish onyx-0.4.29.tgz 2026-02-19 02:39:38 +00:00
github-merge-queue[bot]
41bfed4092 Publish onyx-0.4.29.tgz 2026-02-19 02:32:05 +00:00
github-merge-queue[bot]
b2c1783912 Publish onyx-0.4.29.tgz 2026-02-19 01:44:27 +00:00
github-merge-queue[bot]
32b7a02d68 Publish onyx-0.4.29.tgz 2026-02-19 01:32:22 +00:00
github-merge-queue[bot]
74b68cec5b Publish onyx-0.4.29.tgz 2026-02-19 01:31:51 +00:00
github-merge-queue[bot]
b0b2dde40e Publish onyx-0.4.29.tgz 2026-02-19 00:20:45 +00:00
github-merge-queue[bot]
ac3c88025f Publish onyx-0.4.29.tgz 2026-02-18 23:47:22 +00:00
github-merge-queue[bot]
a674b677df Publish onyx-0.4.29.tgz 2026-02-18 23:13:49 +00:00
github-merge-queue[bot]
0363adac66 Publish onyx-0.4.29.tgz 2026-02-18 23:06:34 +00:00
github-merge-queue[bot]
4b064282ed Publish onyx-0.4.29.tgz 2026-02-18 22:39:05 +00:00
github-merge-queue[bot]
0fa4dbbe1c Publish onyx-0.4.29.tgz 2026-02-18 22:25:47 +00:00
github-merge-queue[bot]
ec2e3a5e9d Publish onyx-0.4.29.tgz 2026-02-18 22:04:05 +00:00
github-merge-queue[bot]
abe2c0d41d Publish onyx-0.4.29.tgz 2026-02-18 21:58:16 +00:00
github-merge-queue[bot]
2736848853 Publish onyx-0.4.29.tgz 2026-02-18 21:46:14 +00:00
github-merge-queue[bot]
ede74236ac Publish onyx-0.4.29.tgz 2026-02-18 21:45:06 +00:00
github-merge-queue[bot]
0d8a299cfd Publish onyx-0.4.29.tgz 2026-02-18 21:34:29 +00:00
github-merge-queue[bot]
f052905377 Publish onyx-0.4.29.tgz 2026-02-18 21:34:03 +00:00
github-merge-queue[bot]
704ea9c6f7 Publish onyx-0.4.29.tgz 2026-02-18 21:33:23 +00:00
github-merge-queue[bot]
ee60a72bf0 Publish onyx-0.4.29.tgz 2026-02-18 21:20:44 +00:00
github-merge-queue[bot]
aaa056f8ca Publish onyx-0.4.29.tgz 2026-02-18 20:56:53 +00:00
github-merge-queue[bot]
29590d9f24 Publish onyx-0.4.29.tgz 2026-02-18 20:16:36 +00:00
github-merge-queue[bot]
dc8d63b805 Publish onyx-0.4.29.tgz 2026-02-18 20:02:47 +00:00
jmelahman
0b034612e1 Publish onyx-0.4.29.tgz 2026-02-18 19:22:18 +00:00
github-merge-queue[bot]
fabb7522a4 Publish onyx-0.4.29.tgz 2026-02-18 18:45:19 +00:00
github-merge-queue[bot]
779d182ece Publish onyx-0.4.29.tgz 2026-02-18 15:24:00 +00:00
github-merge-queue[bot]
9246384dc2 Publish onyx-0.4.29.tgz 2026-02-18 05:49:01 +00:00
github-merge-queue[bot]
0eb18888ae Publish onyx-0.4.29.tgz 2026-02-18 05:38:59 +00:00
github-merge-queue[bot]
1a6d2d4a2f Publish onyx-0.4.29.tgz 2026-02-18 05:16:55 +00:00
github-merge-queue[bot]
acbd97599a Publish onyx-0.4.29.tgz 2026-02-18 05:09:53 +00:00
jmelahman
58807b7be4 Publish onyx-0.4.29.tgz 2026-02-18 04:57:13 +00:00
github-merge-queue[bot]
afa55dfea8 Publish onyx-0.4.29.tgz 2026-02-18 04:34:50 +00:00
github-merge-queue[bot]
0bba288ffc Publish onyx-0.4.29.tgz 2026-02-18 03:54:26 +00:00
github-merge-queue[bot]
a20f9c3b09 Publish onyx-0.4.29.tgz 2026-02-18 02:15:21 +00:00
jmelahman
be7fb3bc84 Publish onyx-0.4.29.tgz 2026-02-18 01:52:55 +00:00
github-merge-queue[bot]
00ab2cb7f7 Publish onyx-0.4.29.tgz 2026-02-18 01:51:47 +00:00
github-merge-queue[bot]
9879ed606e Publish onyx-0.4.29.tgz 2026-02-18 01:04:40 +00:00
github-merge-queue[bot]
5f08263b92 Publish onyx-0.4.29.tgz 2026-02-18 00:48:42 +00:00
jmelahman
ca64fd709b Publish onyx-0.4.29.tgz 2026-02-18 00:26:07 +00:00
jmelahman
f7147e5219 Publish onyx-0.4.29.tgz 2026-02-18 00:23:54 +00:00
github-merge-queue[bot]
a8791d9636 Publish onyx-0.4.29.tgz 2026-02-18 00:07:06 +00:00
github-merge-queue[bot]
e70728dda1 Publish onyx-0.4.29.tgz 2026-02-18 00:04:39 +00:00
jmelahman
be6257e503 Publish onyx-0.4.29.tgz 2026-02-17 23:38:27 +00:00
github-merge-queue[bot]
6d034bead8 Publish onyx-0.4.29.tgz 2026-02-17 23:35:21 +00:00
jmelahman
74d9738bd3 Publish onyx-0.4.29.tgz 2026-02-17 23:12:54 +00:00
github-merge-queue[bot]
c163a83c14 Publish onyx-0.4.29.tgz 2026-02-17 23:02:44 +00:00
github-merge-queue[bot]
313f2b2e5b Publish onyx-0.4.29.tgz 2026-02-17 22:51:06 +00:00
github-merge-queue[bot]
f1fd9f92d0 Publish onyx-0.4.29.tgz 2026-02-17 22:40:16 +00:00
github-merge-queue[bot]
79cd778dba Publish onyx-0.4.29.tgz 2026-02-17 22:01:31 +00:00
github-merge-queue[bot]
3f055c7fa3 Publish onyx-0.4.29.tgz 2026-02-17 22:00:32 +00:00
github-merge-queue[bot]
46f921dce0 Publish onyx-0.4.29.tgz 2026-02-17 21:43:26 +00:00
github-merge-queue[bot]
d13d16d24f Publish onyx-0.4.29.tgz 2026-02-17 21:22:56 +00:00
github-merge-queue[bot]
6c740746a5 Publish onyx-0.4.28.tgz 2026-02-17 21:17:25 +00:00
github-merge-queue[bot]
d62b7be5a2 Publish onyx-0.4.28.tgz 2026-02-17 20:28:45 +00:00
github-merge-queue[bot]
112a192373 Publish onyx-0.4.28.tgz 2026-02-17 19:50:24 +00:00
github-merge-queue[bot]
9d47b3264d Publish onyx-0.4.28.tgz 2026-02-17 18:32:52 +00:00
github-merge-queue[bot]
c8b3b6ffe6 Publish onyx-0.4.28.tgz 2026-02-17 18:25:20 +00:00
github-merge-queue[bot]
49372ec265 Publish onyx-0.4.28.tgz 2026-02-17 18:23:58 +00:00
github-merge-queue[bot]
625452850a Publish onyx-0.4.28.tgz 2026-02-17 18:20:04 +00:00
justin-tahara
95a413c36b Publish onyx-0.4.28.tgz 2026-02-17 15:20:25 +00:00
github-merge-queue[bot]
b0b9e12d3d Publish onyx-0.4.27.tgz 2026-02-17 08:49:49 +00:00
github-merge-queue[bot]
88cb357e13 Publish onyx-0.4.27.tgz 2026-02-17 05:29:47 +00:00
Danelegend
35b179e1df Publish onyx-0.4.27.tgz 2026-02-17 02:54:17 +00:00
github-merge-queue[bot]
79c3b141e3 Publish onyx-0.4.27.tgz 2026-02-17 02:44:19 +00:00
github-merge-queue[bot]
6ecbe5e64a Publish onyx-0.4.27.tgz 2026-02-17 01:28:38 +00:00
github-merge-queue[bot]
1b48e04303 Publish onyx-0.4.27.tgz 2026-02-17 00:37:30 +00:00
github-merge-queue[bot]
6239a2ec29 Publish onyx-0.4.27.tgz 2026-02-17 00:03:23 +00:00
github-merge-queue[bot]
213e69783f Publish onyx-0.4.27.tgz 2026-02-16 23:20:30 +00:00
github-merge-queue[bot]
3ad3fd94b9 Publish onyx-0.4.27.tgz 2026-02-16 18:30:57 +00:00
github-merge-queue[bot]
18f6eea110 Publish onyx-0.4.27.tgz 2026-02-16 11:29:53 +00:00
github-merge-queue[bot]
adb86590b6 Publish onyx-0.4.27.tgz 2026-02-16 03:09:32 +00:00
github-merge-queue[bot]
d8f125535e Publish onyx-0.4.27.tgz 2026-02-16 00:52:50 +00:00
github-merge-queue[bot]
e9d59640d3 Publish onyx-0.4.27.tgz 2026-02-15 23:46:56 +00:00
github-merge-queue[bot]
cb8dd7a2ff Publish onyx-0.4.27.tgz 2026-02-15 20:19:57 +00:00
github-merge-queue[bot]
d6696a09f8 Publish onyx-0.4.27.tgz 2026-02-14 02:51:44 +00:00
github-merge-queue[bot]
45d8214d77 Publish onyx-0.4.27.tgz 2026-02-14 02:36:47 +00:00
github-merge-queue[bot]
b9047451db Publish onyx-0.4.27.tgz 2026-02-14 02:21:30 +00:00
github-merge-queue[bot]
17cabe213a Publish onyx-0.4.27.tgz 2026-02-14 02:11:49 +00:00
github-merge-queue[bot]
89153d3bd0 Publish onyx-0.4.27.tgz 2026-02-14 02:04:21 +00:00
github-merge-queue[bot]
588aefdabb Publish onyx-0.4.27.tgz 2026-02-14 01:45:37 +00:00
github-merge-queue[bot]
df370f71bf Publish onyx-0.4.27.tgz 2026-02-14 01:24:51 +00:00
github-merge-queue[bot]
803e4d4471 Publish onyx-0.4.27.tgz 2026-02-14 01:19:47 +00:00
github-merge-queue[bot]
9ce23097cd Publish onyx-0.4.27.tgz 2026-02-14 00:38:29 +00:00
github-merge-queue[bot]
fa7d7ed306 Publish onyx-0.4.27.tgz 2026-02-14 00:27:47 +00:00
github-merge-queue[bot]
7e29d36183 Publish onyx-0.4.27.tgz 2026-02-13 23:56:23 +00:00
github-merge-queue[bot]
f33291804b Publish onyx-0.4.27.tgz 2026-02-13 23:44:28 +00:00
github-merge-queue[bot]
82cfa6b786 Publish onyx-0.4.27.tgz 2026-02-13 23:39:36 +00:00
github-merge-queue[bot]
590c222d29 Publish onyx-0.4.27.tgz 2026-02-13 23:12:53 +00:00
github-merge-queue[bot]
4949334bdf Publish onyx-0.4.27.tgz 2026-02-13 22:52:56 +00:00
github-merge-queue[bot]
0dbf47d9f7 Publish onyx-0.4.27.tgz 2026-02-13 22:24:31 +00:00
github-merge-queue[bot]
101e794c24 Publish onyx-0.4.27.tgz 2026-02-13 22:08:57 +00:00
justin-tahara
35785391f7 Publish onyx-0.4.27.tgz 2026-02-13 21:38:49 +00:00
github-merge-queue[bot]
f0320db1a0 Publish onyx-0.4.27.tgz 2026-02-13 21:36:08 +00:00
github-merge-queue[bot]
d413019cbe Publish onyx-0.4.27.tgz 2026-02-13 21:28:24 +00:00
github-merge-queue[bot]
9fd5d09210 Publish onyx-0.4.27.tgz 2026-02-13 21:24:03 +00:00
github-merge-queue[bot]
7c28c44bc3 Publish onyx-0.4.27.tgz 2026-02-13 21:20:36 +00:00
github-merge-queue[bot]
a0358e62ee Publish onyx-0.4.27.tgz 2026-02-13 21:16:35 +00:00
github-merge-queue[bot]
5b87e1764e Publish onyx-0.4.27.tgz 2026-02-13 21:11:08 +00:00
github-merge-queue[bot]
e30a6b77be Publish onyx-0.4.27.tgz 2026-02-13 21:02:06 +00:00
jmelahman
6ae8912ee3 Publish onyx-0.4.27.tgz 2026-02-13 20:51:10 +00:00
jmelahman
4a2291955a Publish onyx-0.4.27.tgz 2026-02-13 20:30:41 +00:00
jmelahman
f902a19bd0 Publish onyx-0.4.27.tgz 2026-02-13 19:07:19 +00:00
jmelahman
997feadce8 Publish onyx-0.4.27.tgz 2026-02-13 18:08:16 +00:00
github-merge-queue[bot]
75b67e05bd Publish onyx-0.4.27.tgz 2026-02-13 17:28:31 +00:00
github-merge-queue[bot]
1a55d58eb4 Publish onyx-0.4.27.tgz 2026-02-13 17:27:42 +00:00
github-merge-queue[bot]
755f7637de Publish onyx-0.4.27.tgz 2026-02-13 07:46:20 +00:00
github-merge-queue[bot]
f87866d65d Publish onyx-0.4.27.tgz 2026-02-13 04:36:49 +00:00
jmelahman
99a4ca57ed Publish onyx-0.4.27.tgz 2026-02-13 03:54:08 +00:00
yuhongsun96
6239aa82a1 Publish onyx-0.4.27.tgz 2026-02-13 03:45:29 +00:00
github-merge-queue[bot]
c63906edf8 Publish onyx-0.4.27.tgz 2026-02-13 02:58:51 +00:00
github-merge-queue[bot]
36dd724fd8 Publish onyx-0.4.27.tgz 2026-02-13 02:10:35 +00:00
github-merge-queue[bot]
b5bcb926b4 Publish onyx-0.4.27.tgz 2026-02-13 01:48:09 +00:00
github-merge-queue[bot]
1189593d57 Publish onyx-0.4.27.tgz 2026-02-13 01:44:57 +00:00
github-merge-queue[bot]
fdc60214c4 Publish onyx-0.4.27.tgz 2026-02-13 01:25:15 +00:00
github-merge-queue[bot]
0344fab93e Publish onyx-0.4.27.tgz 2026-02-13 00:25:48 +00:00
github-merge-queue[bot]
bd4dca91a6 Publish onyx-0.4.27.tgz 2026-02-13 00:11:21 +00:00
github-merge-queue[bot]
e599f08a76 Publish onyx-0.4.27.tgz 2026-02-12 23:52:37 +00:00
github-merge-queue[bot]
2384cf79a3 Publish onyx-0.4.27.tgz 2026-02-12 23:50:22 +00:00
github-merge-queue[bot]
a85e85a003 Publish onyx-0.4.27.tgz 2026-02-12 23:20:20 +00:00
github-merge-queue[bot]
6434db854a Publish onyx-0.4.27.tgz 2026-02-12 23:19:53 +00:00
github-merge-queue[bot]
0cddc70073 Publish onyx-0.4.27.tgz 2026-02-12 23:07:48 +00:00
github-merge-queue[bot]
8df682c3fa Publish onyx-0.4.27.tgz 2026-02-12 23:05:13 +00:00
github-merge-queue[bot]
437b3bc1fb Publish onyx-0.4.27.tgz 2026-02-12 22:59:12 +00:00
github-merge-queue[bot]
fde8078b2f Publish onyx-0.4.27.tgz 2026-02-12 22:49:08 +00:00
github-merge-queue[bot]
2e93b63f7f Publish onyx-0.4.27.tgz 2026-02-12 22:38:22 +00:00
github-merge-queue[bot]
862e515648 Publish onyx-0.4.27.tgz 2026-02-12 22:00:44 +00:00
github-merge-queue[bot]
b3ee2e3865 Publish onyx-0.4.27.tgz 2026-02-12 21:47:25 +00:00
github-merge-queue[bot]
65ed4fa92e Publish onyx-0.4.27.tgz 2026-02-12 21:42:17 +00:00
github-merge-queue[bot]
c163ec26c5 Publish onyx-0.4.27.tgz 2026-02-12 20:41:40 +00:00
github-merge-queue[bot]
7736287b32 Publish onyx-0.4.27.tgz 2026-02-12 20:31:28 +00:00
github-merge-queue[bot]
90ecd9ed9e Publish onyx-0.4.27.tgz 2026-02-12 20:20:28 +00:00
jmelahman
1dfec3c872 Publish onyx-0.4.27.tgz 2026-02-12 19:16:21 +00:00
github-merge-queue[bot]
35dbff6f43 Publish onyx-0.4.27.tgz 2026-02-12 19:01:36 +00:00
github-merge-queue[bot]
18b858aa10 Publish onyx-0.4.27.tgz 2026-02-12 18:47:46 +00:00
github-merge-queue[bot]
8a5deac14b Publish onyx-0.4.27.tgz 2026-02-12 18:01:49 +00:00
github-merge-queue[bot]
d79ef3a8d5 Publish onyx-0.4.27.tgz 2026-02-12 17:27:09 +00:00
github-merge-queue[bot]
a3f3d5fac8 Publish onyx-0.4.27.tgz 2026-02-12 14:42:27 +00:00
github-merge-queue[bot]
2c3de42224 Publish onyx-0.4.27.tgz 2026-02-12 09:16:55 +00:00
github-merge-queue[bot]
da1b32edfa Publish onyx-0.4.27.tgz 2026-02-12 09:10:43 +00:00
github-merge-queue[bot]
98e15027e8 Publish onyx-0.4.27.tgz 2026-02-12 09:09:19 +00:00
github-merge-queue[bot]
2f65739265 Publish onyx-0.4.27.tgz 2026-02-12 08:41:01 +00:00
github-merge-queue[bot]
11584840e7 Publish onyx-0.4.27.tgz 2026-02-12 08:37:15 +00:00
github-merge-queue[bot]
a6552e1047 Publish onyx-0.4.27.tgz 2026-02-12 06:24:20 +00:00
github-merge-queue[bot]
42a5d2a6c4 Publish onyx-0.4.27.tgz 2026-02-12 03:44:27 +00:00
github-merge-queue[bot]
70848307da Publish onyx-0.4.27.tgz 2026-02-12 02:13:09 +00:00
github-merge-queue[bot]
6f8ade19b1 Publish onyx-0.4.27.tgz 2026-02-12 02:10:50 +00:00
github-merge-queue[bot]
06e8bd7f5b Publish onyx-0.4.27.tgz 2026-02-12 01:45:22 +00:00
github-merge-queue[bot]
e2ac492837 Publish onyx-0.4.27.tgz 2026-02-12 01:23:18 +00:00
github-merge-queue[bot]
bf3b91e104 Publish onyx-0.4.27.tgz 2026-02-12 00:35:29 +00:00
github-merge-queue[bot]
4ea0a41b94 Publish onyx-0.4.27.tgz 2026-02-12 00:04:12 +00:00
github-merge-queue[bot]
1a03568d27 Publish onyx-0.4.27.tgz 2026-02-12 00:00:54 +00:00
github-merge-queue[bot]
36cbf8ed67 Publish onyx-0.4.27.tgz 2026-02-11 23:48:56 +00:00
github-merge-queue[bot]
26c9271a01 Publish onyx-0.4.27.tgz 2026-02-11 23:37:18 +00:00
github-merge-queue[bot]
a4d7388798 Publish onyx-0.4.27.tgz 2026-02-11 23:18:49 +00:00
jmelahman
eb02aeaa49 Publish onyx-0.4.27.tgz 2026-02-11 23:10:19 +00:00
jmelahman
b4be07c7f5 Publish onyx-0.4.27.tgz 2026-02-11 23:05:16 +00:00
github-merge-queue[bot]
502e20d1f2 Publish onyx-0.4.27.tgz 2026-02-11 23:04:45 +00:00
jmelahman
0aa8a333f3 Publish onyx-0.4.27.tgz 2026-02-11 22:41:46 +00:00
jmelahman
2c8258ac7b Publish onyx-0.4.27.tgz 2026-02-11 22:36:15 +00:00
github-merge-queue[bot]
49a92d296e Publish onyx-0.4.27.tgz 2026-02-11 22:23:55 +00:00
github-merge-queue[bot]
523f4b4294 Publish onyx-0.4.27.tgz 2026-02-11 22:01:23 +00:00
github-merge-queue[bot]
a29d1ac904 Publish onyx-0.4.27.tgz 2026-02-11 21:53:20 +00:00
github-merge-queue[bot]
73da445b17 Publish onyx-0.4.27.tgz 2026-02-11 21:17:23 +00:00
github-merge-queue[bot]
cd17e1e51e Publish onyx-0.4.26.tgz 2026-02-11 21:14:22 +00:00
github-merge-queue[bot]
d1818e3005 Publish onyx-0.4.26.tgz 2026-02-11 20:20:08 +00:00
github-merge-queue[bot]
4a59880c0e Publish onyx-0.4.26.tgz 2026-02-11 20:06:46 +00:00
github-merge-queue[bot]
7d5940a810 Publish onyx-0.4.26.tgz 2026-02-11 19:42:21 +00:00
github-merge-queue[bot]
db68b6ed67 Publish onyx-0.4.26.tgz 2026-02-11 19:24:40 +00:00
github-merge-queue[bot]
2daa6020c1 Publish onyx-0.4.25.tgz 2026-02-11 18:55:53 +00:00
github-merge-queue[bot]
1dd0ef54e4 Publish onyx-0.4.25.tgz 2026-02-11 18:47:11 +00:00
github-merge-queue[bot]
e41369c06f Publish onyx-0.4.25.tgz 2026-02-11 18:32:54 +00:00
github-merge-queue[bot]
fa60c97d8b Publish onyx-0.4.25.tgz 2026-02-11 18:17:05 +00:00
github-merge-queue[bot]
c4497bfdaf Publish onyx-0.4.25.tgz 2026-02-11 16:20:38 +00:00
github-merge-queue[bot]
a7f6c4cd3f Publish onyx-0.4.25.tgz 2026-02-11 06:36:54 +00:00
github-merge-queue[bot]
1424d8a3a6 Publish onyx-0.4.25.tgz 2026-02-11 06:33:28 +00:00
github-merge-queue[bot]
9524bbbd5f Publish onyx-0.4.25.tgz 2026-02-11 04:41:18 +00:00
github-merge-queue[bot]
acfcb6b869 Publish onyx-0.4.25.tgz 2026-02-11 03:46:24 +00:00
github-merge-queue[bot]
618e5dbb25 Publish onyx-0.4.25.tgz 2026-02-11 02:53:23 +00:00
github-merge-queue[bot]
5575f2e78c Publish onyx-0.4.25.tgz 2026-02-11 02:41:39 +00:00
github-merge-queue[bot]
6278cbaa2e Publish onyx-0.4.25.tgz 2026-02-11 02:36:02 +00:00
github-merge-queue[bot]
2978c21f87 Publish onyx-0.4.25.tgz 2026-02-11 02:24:21 +00:00
github-merge-queue[bot]
2b6f880d3a Publish onyx-0.4.25.tgz 2026-02-11 02:22:24 +00:00
github-merge-queue[bot]
b6959842d5 Publish onyx-0.4.25.tgz 2026-02-11 02:15:15 +00:00
github-merge-queue[bot]
79ff22778c Publish onyx-0.4.25.tgz 2026-02-11 01:20:47 +00:00
github-merge-queue[bot]
8d974940e5 Publish onyx-0.4.25.tgz 2026-02-11 01:10:42 +00:00
github-merge-queue[bot]
f1b0b4d87d Publish onyx-0.4.25.tgz 2026-02-11 01:06:28 +00:00
github-merge-queue[bot]
719e997ec9 Publish onyx-0.4.25.tgz 2026-02-11 01:02:31 +00:00
github-merge-queue[bot]
d8dce7a2dc Publish onyx-0.4.25.tgz 2026-02-10 23:18:40 +00:00
github-merge-queue[bot]
96cd7e111d Publish onyx-0.4.25.tgz 2026-02-10 22:49:51 +00:00
github-merge-queue[bot]
c458bae6b7 Publish onyx-0.4.25.tgz 2026-02-10 21:49:09 +00:00
github-merge-queue[bot]
aa628f192b Publish onyx-0.4.25.tgz 2026-02-10 20:09:25 +00:00
github-merge-queue[bot]
b4c0bc2172 Publish onyx-0.4.25.tgz 2026-02-10 18:59:19 +00:00
github-merge-queue[bot]
3c5c4dd32f Publish onyx-0.4.25.tgz 2026-02-10 18:44:44 +00:00
jmelahman
4cbe10dc77 Publish onyx-0.4.25.tgz 2026-02-10 16:10:45 +00:00
github-merge-queue[bot]
ecd87ea0f9 Publish onyx-0.4.25.tgz 2026-02-10 04:28:20 +00:00
github-merge-queue[bot]
ce69796fea Publish onyx-0.4.25.tgz 2026-02-10 04:14:13 +00:00
github-merge-queue[bot]
03db362615 Publish onyx-0.4.25.tgz 2026-02-10 03:44:39 +00:00
github-merge-queue[bot]
ce559d0646 Publish onyx-0.4.25.tgz 2026-02-10 03:26:29 +00:00
github-merge-queue[bot]
1c8d5ba39f Publish onyx-0.4.25.tgz 2026-02-10 02:56:51 +00:00
github-merge-queue[bot]
671902c020 Publish onyx-0.4.25.tgz 2026-02-10 02:11:44 +00:00
github-merge-queue[bot]
67e085cca9 Publish onyx-0.4.25.tgz 2026-02-10 02:11:05 +00:00
github-merge-queue[bot]
d78b762dd6 Publish onyx-0.4.25.tgz 2026-02-10 01:28:19 +00:00
github-merge-queue[bot]
57b72a9159 Publish onyx-0.4.25.tgz 2026-02-10 00:49:18 +00:00
github-merge-queue[bot]
b9266d6aaf Publish onyx-0.4.25.tgz 2026-02-09 23:39:34 +00:00
github-merge-queue[bot]
6fc09154e0 Publish onyx-0.4.25.tgz 2026-02-09 23:06:11 +00:00
github-merge-queue[bot]
52684a0e69 Publish onyx-0.4.25.tgz 2026-02-09 22:59:04 +00:00
github-merge-queue[bot]
9ac8c465f4 Publish onyx-0.4.25.tgz 2026-02-09 22:19:33 +00:00
github-merge-queue[bot]
c571cf7502 Publish onyx-0.4.25.tgz 2026-02-09 22:12:03 +00:00
github-merge-queue[bot]
be1a341a06 Publish onyx-0.4.25.tgz 2026-02-09 21:57:42 +00:00
github-merge-queue[bot]
69f07ac734 Publish onyx-0.4.25.tgz 2026-02-09 21:51:10 +00:00
github-merge-queue[bot]
c45ef02d36 Publish onyx-0.4.25.tgz 2026-02-09 21:33:31 +00:00
jmelahman
a058e87e17 Publish onyx-0.4.25.tgz 2026-02-09 21:21:03 +00:00
jmelahman
a12bc75c02 Publish onyx-0.4.25.tgz 2026-02-09 21:20:35 +00:00
jmelahman
f50ed1ae89 Publish onyx-0.4.25.tgz 2026-02-09 21:19:25 +00:00
github-merge-queue[bot]
ac210a438c Publish onyx-0.4.25.tgz 2026-02-09 21:12:55 +00:00
github-merge-queue[bot]
637d2070ce Publish onyx-0.4.25.tgz 2026-02-09 20:52:10 +00:00
github-merge-queue[bot]
e3ce46182b Publish onyx-0.4.25.tgz 2026-02-09 20:41:54 +00:00
jmelahman
6c307f3aa8 Publish onyx-0.4.25.tgz 2026-02-09 20:35:08 +00:00
jmelahman
6a883d8521 Publish onyx-0.4.25.tgz 2026-02-09 20:19:25 +00:00
jmelahman
b6df242ea5 Publish onyx-0.4.25.tgz 2026-02-09 20:18:29 +00:00
jmelahman
6dd2c1d850 Publish onyx-0.4.25.tgz 2026-02-09 20:15:38 +00:00
jmelahman
5f8e11a826 Publish onyx-0.4.25.tgz 2026-02-09 20:14:27 +00:00
github-merge-queue[bot]
0ad5e5cd21 Publish onyx-0.4.25.tgz 2026-02-09 18:20:00 +00:00
github-merge-queue[bot]
12dbf9e8bb Publish onyx-0.4.25.tgz 2026-02-09 18:05:23 +00:00
github-merge-queue[bot]
3bcecebb81 Publish onyx-0.4.25.tgz 2026-02-09 18:00:06 +00:00
github-merge-queue[bot]
db71b7e767 Publish onyx-0.4.25.tgz 2026-02-09 17:33:47 +00:00
github-merge-queue[bot]
5844b7eb15 Publish onyx-0.4.25.tgz 2026-02-09 08:02:04 +00:00
github-merge-queue[bot]
b7a70c920a Publish onyx-0.4.25.tgz 2026-02-09 08:00:51 +00:00
github-merge-queue[bot]
0188be28aa Publish onyx-0.4.25.tgz 2026-02-09 07:33:14 +00:00
github-merge-queue[bot]
bbf68bdb08 Publish onyx-0.4.25.tgz 2026-02-09 04:36:06 +00:00
github-merge-queue[bot]
97061e12f6 Publish onyx-0.4.25.tgz 2026-02-09 04:19:06 +00:00
github-merge-queue[bot]
8aa94186c6 Publish onyx-0.4.25.tgz 2026-02-09 00:37:18 +00:00
github-merge-queue[bot]
26791128c1 Publish onyx-0.4.25.tgz 2026-02-08 22:34:56 +00:00
github-merge-queue[bot]
31c6e2bcb4 Publish onyx-0.4.25.tgz 2026-02-08 19:59:29 +00:00
github-merge-queue[bot]
352349de22 Publish onyx-0.4.25.tgz 2026-02-07 08:47:16 +00:00
github-merge-queue[bot]
47dd0f69df Publish onyx-0.4.25.tgz 2026-02-07 07:06:06 +00:00
github-merge-queue[bot]
e1a397167d Publish onyx-0.4.25.tgz 2026-02-07 03:45:16 +00:00
github-merge-queue[bot]
78dd178e1e Publish onyx-0.4.25.tgz 2026-02-07 00:43:22 +00:00
github-merge-queue[bot]
a97bee3b33 Publish onyx-0.4.25.tgz 2026-02-07 00:41:34 +00:00
github-merge-queue[bot]
196044b228 Publish onyx-0.4.25.tgz 2026-02-06 22:14:55 +00:00
github-merge-queue[bot]
651f350f4a Publish onyx-0.4.25.tgz 2026-02-06 22:04:51 +00:00
github-merge-queue[bot]
1cb72818e9 Publish onyx-0.4.25.tgz 2026-02-06 21:54:20 +00:00
github-merge-queue[bot]
8bef2b1390 Publish onyx-0.4.25.tgz 2026-02-06 21:07:31 +00:00
github-merge-queue[bot]
bee47a662e Publish onyx-0.4.25.tgz 2026-02-06 18:42:53 +00:00
github-merge-queue[bot]
ba004e8b1f Publish onyx-0.4.25.tgz 2026-02-06 18:35:46 +00:00
github-merge-queue[bot]
05cffec4ad Publish onyx-0.4.25.tgz 2026-02-06 18:10:19 +00:00
github-merge-queue[bot]
c4e8ee5a62 Publish onyx-0.4.25.tgz 2026-02-06 18:02:50 +00:00
github-merge-queue[bot]
e565646c7c Publish onyx-0.4.25.tgz 2026-02-06 17:15:45 +00:00
github-merge-queue[bot]
1ce5a79c4c Publish onyx-0.4.25.tgz 2026-02-06 04:46:15 +00:00
github-merge-queue[bot]
577798e692 Publish onyx-0.4.25.tgz 2026-02-06 04:05:52 +00:00
github-merge-queue[bot]
7452038633 Publish onyx-0.4.25.tgz 2026-02-06 03:39:35 +00:00
github-merge-queue[bot]
96432b453c Publish onyx-0.4.25.tgz 2026-02-06 03:35:51 +00:00
github-merge-queue[bot]
c0ee11d972 Publish onyx-0.4.25.tgz 2026-02-06 02:52:23 +00:00
github-merge-queue[bot]
b41dc2739e Publish onyx-0.4.25.tgz 2026-02-06 02:48:21 +00:00
github-merge-queue[bot]
f0fb7135ec Publish onyx-0.4.25.tgz 2026-02-06 02:28:34 +00:00
github-merge-queue[bot]
af47628af8 Publish onyx-0.4.25.tgz 2026-02-06 01:48:11 +00:00
github-merge-queue[bot]
729c06ab42 Publish onyx-0.4.25.tgz 2026-02-06 00:54:52 +00:00
github-merge-queue[bot]
b11a325de8 Publish onyx-0.4.25.tgz 2026-02-06 00:21:02 +00:00
wenxi-onyx
2d063ececa Publish onyx-0.4.25.tgz 2026-02-05 23:33:39 +00:00
github-merge-queue[bot]
269aa5c18a Publish onyx-0.4.25.tgz 2026-02-05 23:32:26 +00:00
github-merge-queue[bot]
9df4268cdc Publish onyx-0.4.25.tgz 2026-02-05 23:22:58 +00:00
github-merge-queue[bot]
e5da468272 Publish onyx-0.4.25.tgz 2026-02-05 22:54:04 +00:00
github-merge-queue[bot]
5c904c3c7c Publish onyx-0.4.25.tgz 2026-02-05 21:20:57 +00:00
github-merge-queue[bot]
4e0538fdd7 Publish onyx-0.4.25.tgz 2026-02-05 20:59:08 +00:00
github-merge-queue[bot]
2c5f08bf69 Publish onyx-0.4.25.tgz 2026-02-05 20:49:07 +00:00
github-merge-queue[bot]
47d820b661 Publish onyx-0.4.25.tgz 2026-02-05 20:43:21 +00:00
github-merge-queue[bot]
9d940c2188 Publish onyx-0.4.25.tgz 2026-02-05 20:31:38 +00:00
github-merge-queue[bot]
98d60df338 Publish onyx-0.4.25.tgz 2026-02-05 20:24:21 +00:00
github-merge-queue[bot]
46f2e27180 Publish onyx-0.4.25.tgz 2026-02-05 20:19:22 +00:00
github-merge-queue[bot]
e3052641fd Publish onyx-0.4.25.tgz 2026-02-05 20:15:26 +00:00
github-merge-queue[bot]
4ce8739334 Publish onyx-0.4.25.tgz 2026-02-05 19:52:47 +00:00
github-merge-queue[bot]
45fd853cd2 Publish onyx-0.4.25.tgz 2026-02-05 19:46:23 +00:00
github-merge-queue[bot]
653dbafc66 Publish onyx-0.4.25.tgz 2026-02-05 19:23:45 +00:00
github-merge-queue[bot]
3b26b355d3 Publish onyx-0.4.25.tgz 2026-02-05 19:05:55 +00:00
github-merge-queue[bot]
867e87eedb Publish onyx-0.4.25.tgz 2026-02-05 18:53:43 +00:00
github-merge-queue[bot]
d1daa7d397 Publish onyx-0.4.25.tgz 2026-02-05 18:40:15 +00:00
github-merge-queue[bot]
6c51b940d2 Publish onyx-0.4.25.tgz 2026-02-05 18:34:39 +00:00
github-merge-queue[bot]
2884a622d8 Publish onyx-0.4.25.tgz 2026-02-05 18:05:10 +00:00
github-merge-queue[bot]
8a26ae3f4e Publish onyx-0.4.25.tgz 2026-02-05 07:09:19 +00:00
yuhongsun96
b3a730bbe6 Publish onyx-0.4.25.tgz 2026-02-05 05:08:26 +00:00
github-merge-queue[bot]
91a70f8028 Publish onyx-0.4.25.tgz 2026-02-05 02:57:07 +00:00
github-merge-queue[bot]
83eb7fc872 Publish onyx-0.4.25.tgz 2026-02-05 02:34:59 +00:00
github-merge-queue[bot]
a279b61e07 Publish onyx-0.4.25.tgz 2026-02-05 01:29:33 +00:00
github-merge-queue[bot]
a11404203d Publish onyx-0.4.25.tgz 2026-02-05 01:21:32 +00:00
github-merge-queue[bot]
1b9b4b1121 Publish onyx-0.4.25.tgz 2026-02-05 00:47:16 +00:00
github-merge-queue[bot]
303bc834fb Publish onyx-0.4.25.tgz 2026-02-05 00:41:49 +00:00
github-merge-queue[bot]
fceae97798 Publish onyx-0.4.25.tgz 2026-02-04 23:23:26 +00:00
github-merge-queue[bot]
21a115eedc Publish onyx-0.4.25.tgz 2026-02-04 22:24:50 +00:00
github-merge-queue[bot]
71085bd575 Publish onyx-0.4.25.tgz 2026-02-04 21:58:03 +00:00
github-merge-queue[bot]
b16155bf06 Publish onyx-0.4.25.tgz 2026-02-04 21:45:29 +00:00
github-merge-queue[bot]
e9276ff234 Publish onyx-0.4.25.tgz 2026-02-04 21:38:55 +00:00
github-merge-queue[bot]
7b18d86c47 Publish onyx-0.4.25.tgz 2026-02-04 20:02:05 +00:00
github-merge-queue[bot]
b8bde12676 Publish onyx-0.4.25.tgz 2026-02-04 19:03:39 +00:00
github-merge-queue[bot]
e48c059aed Publish onyx-0.4.25.tgz 2026-02-04 17:52:47 +00:00
github-merge-queue[bot]
92c10aa013 Publish onyx-0.4.25.tgz 2026-02-04 09:48:30 +00:00
yuhongsun96
ea7659cc89 Publish onyx-0.4.25.tgz 2026-02-04 07:13:24 +00:00
github-merge-queue[bot]
e974bc1ce1 Publish onyx-0.4.25.tgz 2026-02-04 07:04:54 +00:00
github-merge-queue[bot]
5f2f26ef1e Publish onyx-0.4.25.tgz 2026-02-04 05:22:41 +00:00
github-merge-queue[bot]
4c3d5ad778 Publish onyx-0.4.25.tgz 2026-02-04 04:56:03 +00:00
github-merge-queue[bot]
e9b35e0bc6 Publish onyx-0.4.25.tgz 2026-02-04 03:27:27 +00:00
github-merge-queue[bot]
a1d9f52450 Publish onyx-0.4.25.tgz 2026-02-04 01:58:58 +00:00
github-merge-queue[bot]
542c3130ac Publish onyx-0.4.25.tgz 2026-02-04 01:03:32 +00:00
github-merge-queue[bot]
0eb1d970d9 Publish onyx-0.4.25.tgz 2026-02-04 00:33:32 +00:00
yuhongsun96
f2f8869707 Publish onyx-0.4.25.tgz 2026-02-03 23:29:53 +00:00
yuhongsun96
646c1456ae Publish onyx-0.4.25.tgz 2026-02-03 23:22:53 +00:00
yuhongsun96
2e7f67fddb Publish onyx-0.4.25.tgz 2026-02-03 23:08:23 +00:00
github-merge-queue[bot]
be12647305 Publish onyx-0.4.25.tgz 2026-02-03 22:54:35 +00:00
github-merge-queue[bot]
aa7cef1576 Publish onyx-0.4.25.tgz 2026-02-03 22:47:14 +00:00
yuhongsun96
e1d49a428d Publish onyx-0.4.25.tgz 2026-02-03 22:40:54 +00:00
github-merge-queue[bot]
d17cdcda72 Publish onyx-0.4.25.tgz 2026-02-03 19:48:19 +00:00
github-merge-queue[bot]
465955eeeb Publish onyx-0.4.25.tgz 2026-02-03 19:31:30 +00:00
github-merge-queue[bot]
ad0e069d3d Publish onyx-0.4.25.tgz 2026-02-03 18:02:31 +00:00
github-merge-queue[bot]
4952e0464c Publish onyx-0.4.25.tgz 2026-02-03 17:59:17 +00:00
nmgarza5
7efd66e877 Publish onyx-0.4.25.tgz 2026-02-03 16:37:10 +00:00
jessicasingh7
13c91ab5d0 Publish onyx-0.4.25.tgz 2026-02-03 08:36:15 +00:00
github-merge-queue[bot]
0e2f840f09 Publish onyx-0.4.25.tgz 2026-02-03 06:41:46 +00:00
github-merge-queue[bot]
2dd73186b4 Publish onyx-0.4.25.tgz 2026-02-03 05:19:04 +00:00
github-merge-queue[bot]
b280b00169 Publish onyx-0.4.25.tgz 2026-02-03 04:12:33 +00:00
github-merge-queue[bot]
3fe1a5e9f9 Publish onyx-0.4.25.tgz 2026-02-03 02:54:31 +00:00
github-merge-queue[bot]
47da373fce Publish onyx-0.4.25.tgz 2026-02-03 02:19:59 +00:00
github-merge-queue[bot]
d8cd54a580 Publish onyx-0.4.25.tgz 2026-02-03 02:05:35 +00:00
github-merge-queue[bot]
34418253d4 Publish onyx-0.4.25.tgz 2026-02-03 01:47:09 +00:00
github-merge-queue[bot]
dfc1117179 Publish onyx-0.4.25.tgz 2026-02-03 01:35:36 +00:00
github-merge-queue[bot]
1e215f1ee2 Publish onyx-0.4.25.tgz 2026-02-03 01:22:57 +00:00
github-merge-queue[bot]
c639e5de9d Publish onyx-0.4.25.tgz 2026-02-03 00:53:54 +00:00
github-merge-queue[bot]
d8d1aa1f37 Publish onyx-0.4.25.tgz 2026-02-03 00:52:28 +00:00
github-merge-queue[bot]
6291605ddb Publish onyx-0.4.25.tgz 2026-02-03 00:33:59 +00:00
github-merge-queue[bot]
1fb928ac07 Publish onyx-0.4.25.tgz 2026-02-03 00:22:08 +00:00
github-merge-queue[bot]
fb74017631 Publish onyx-0.4.25.tgz 2026-02-03 00:17:48 +00:00
github-merge-queue[bot]
115a9f2b76 Publish onyx-0.4.25.tgz 2026-02-02 23:51:47 +00:00
github-merge-queue[bot]
044a5f0dfa Publish onyx-0.4.25.tgz 2026-02-02 23:49:26 +00:00
github-merge-queue[bot]
045f72aaf9 Publish onyx-0.4.25.tgz 2026-02-02 23:12:22 +00:00
github-merge-queue[bot]
e96084b4a9 Publish onyx-0.4.24.tgz 2026-02-02 18:54:11 +00:00
github-merge-queue[bot]
5211983bf0 Publish onyx-0.4.23.tgz 2026-02-02 18:41:27 +00:00
github-merge-queue[bot]
28534088d4 Publish onyx-0.4.23.tgz 2026-02-02 17:43:08 +00:00
github-merge-queue[bot]
91586da94a Publish onyx-0.4.23.tgz 2026-02-02 15:04:01 +00:00
github-merge-queue[bot]
467742bfd8 Publish onyx-0.4.23.tgz 2026-02-02 15:00:37 +00:00
Danelegend
bd36aab974 Publish onyx-0.4.23.tgz 2026-02-02 03:36:56 +00:00
yuhongsun96
a189025bf8 Publish onyx-0.4.23.tgz 2026-02-02 01:18:45 +00:00
github-merge-queue[bot]
de1ed3466c Publish onyx-0.4.23.tgz 2026-02-01 22:45:47 +00:00
github-merge-queue[bot]
6209d81930 Publish onyx-0.4.23.tgz 2026-02-01 22:33:34 +00:00
github-merge-queue[bot]
80e0942e85 Publish onyx-0.4.23.tgz 2026-02-01 21:37:27 +00:00
github-merge-queue[bot]
dab1083187 Publish onyx-0.4.23.tgz 2026-02-01 20:33:54 +00:00
github-merge-queue[bot]
704cffca9b Publish onyx-0.4.23.tgz 2026-02-01 20:11:10 +00:00
github-merge-queue[bot]
7bd5b0876d Publish onyx-0.4.23.tgz 2026-02-01 17:58:37 +00:00
github-merge-queue[bot]
cbaf5fdb8a Publish onyx-0.4.23.tgz 2026-02-01 09:01:17 +00:00
yuhongsun96
23a8f45511 Publish onyx-0.4.23.tgz 2026-02-01 07:45:40 +00:00
yuhongsun96
e4e6dd51f5 Publish onyx-0.4.23.tgz 2026-02-01 07:43:38 +00:00
yuhongsun96
3f7eb20075 Publish onyx-0.4.23.tgz 2026-02-01 07:17:26 +00:00
yuhongsun96
75983de1c3 Publish onyx-0.4.23.tgz 2026-02-01 07:12:45 +00:00
yuhongsun96
282c87a508 Publish onyx-0.4.23.tgz 2026-02-01 06:58:56 +00:00
yuhongsun96
2d66c5d775 Publish onyx-0.4.23.tgz 2026-02-01 06:24:22 +00:00
yuhongsun96
0daa7c9997 Publish onyx-0.4.23.tgz 2026-02-01 05:30:04 +00:00
yuhongsun96
db19521361 Publish onyx-0.4.23.tgz 2026-02-01 04:22:06 +00:00
github-merge-queue[bot]
cad802c6e4 Publish onyx-0.4.23.tgz 2026-02-01 03:42:51 +00:00
yuhongsun96
d2d5632e4c Publish onyx-0.4.23.tgz 2026-02-01 03:27:01 +00:00
github-merge-queue[bot]
031131e812 Publish onyx-0.4.23.tgz 2026-02-01 03:22:38 +00:00
github-merge-queue[bot]
16c0c7322c Publish onyx-0.4.23.tgz 2026-02-01 03:12:11 +00:00
github-merge-queue[bot]
9ebeb4d178 Publish onyx-0.4.23.tgz 2026-02-01 02:43:09 +00:00
github-merge-queue[bot]
ce6d7ed6c8 Publish onyx-0.4.23.tgz 2026-02-01 02:39:16 +00:00
github-merge-queue[bot]
9157e6b009 Publish onyx-0.4.23.tgz 2026-02-01 02:07:09 +00:00
github-merge-queue[bot]
2ff26b015a Publish onyx-0.4.23.tgz 2026-02-01 01:40:41 +00:00
github-merge-queue[bot]
9fdd879386 Publish onyx-0.4.23.tgz 2026-02-01 00:31:53 +00:00
yuhongsun96
624f060374 Publish onyx-0.4.23.tgz 2026-01-31 23:45:28 +00:00
github-merge-queue[bot]
0b3cfbb44e Publish onyx-0.4.23.tgz 2026-01-31 23:42:27 +00:00
github-merge-queue[bot]
a39553c8b5 Publish onyx-0.4.23.tgz 2026-01-31 23:18:22 +00:00
github-merge-queue[bot]
b0ed7781e6 Publish onyx-0.4.23.tgz 2026-01-31 22:25:49 +00:00
github-merge-queue[bot]
8f3fcfc91d Publish onyx-0.4.23.tgz 2026-01-31 22:15:15 +00:00
github-merge-queue[bot]
79fdc073f5 Publish onyx-0.4.23.tgz 2026-01-31 19:54:51 +00:00
github-merge-queue[bot]
5a3a59cc66 Publish onyx-0.4.23.tgz 2026-01-31 13:53:22 +00:00
github-merge-queue[bot]
9c0400addf Publish onyx-0.4.23.tgz 2026-01-31 03:13:19 +00:00
github-merge-queue[bot]
5d98cceca4 Publish onyx-0.4.23.tgz 2026-01-31 02:30:07 +00:00
github-merge-queue[bot]
7a1b89037a Publish onyx-0.4.23.tgz 2026-01-31 02:27:33 +00:00
github-merge-queue[bot]
4fb2e4767b Publish onyx-0.4.23.tgz 2026-01-31 01:24:58 +00:00
github-merge-queue[bot]
4a561c03ae Publish onyx-0.4.23.tgz 2026-01-31 00:54:14 +00:00
github-merge-queue[bot]
e459734d6e Publish onyx-0.4.23.tgz 2026-01-31 00:33:16 +00:00
github-merge-queue[bot]
101a75101f Publish onyx-0.4.23.tgz 2026-01-31 00:23:50 +00:00
nmgarza5
5f169f86a7 Publish onyx-0.4.23.tgz 2026-01-31 00:17:27 +00:00
github-merge-queue[bot]
1c07a69e8d Publish onyx-0.4.23.tgz 2026-01-30 23:39:40 +00:00
github-merge-queue[bot]
5691c4956a Publish onyx-0.4.23.tgz 2026-01-30 23:05:57 +00:00
github-merge-queue[bot]
f8ecb64a34 Publish onyx-0.4.23.tgz 2026-01-30 22:59:31 +00:00
github-merge-queue[bot]
1c9342de93 Publish onyx-0.4.23.tgz 2026-01-30 22:40:33 +00:00
github-merge-queue[bot]
6279ab8b58 Publish onyx-0.4.23.tgz 2026-01-30 22:35:29 +00:00
github-merge-queue[bot]
508a0fdf8c Publish onyx-0.4.23.tgz 2026-01-30 22:13:29 +00:00
github-merge-queue[bot]
5e2e846444 Publish onyx-0.4.23.tgz 2026-01-30 21:26:49 +00:00
github-merge-queue[bot]
0b9aec3a58 Publish onyx-0.4.23.tgz 2026-01-30 20:59:37 +00:00
github-merge-queue[bot]
456d09e35c Publish onyx-0.4.23.tgz 2026-01-30 20:55:24 +00:00
github-merge-queue[bot]
419c150efe Publish onyx-0.4.23.tgz 2026-01-30 20:51:03 +00:00
github-merge-queue[bot]
a28fa85f4b Publish onyx-0.4.23.tgz 2026-01-30 20:02:54 +00:00
github-merge-queue[bot]
f1f530b96e Publish onyx-0.4.23.tgz 2026-01-30 19:36:08 +00:00
github-merge-queue[bot]
cb6c8e1af8 Publish onyx-0.4.23.tgz 2026-01-30 18:37:13 +00:00
github-merge-queue[bot]
c660c91c6e Publish onyx-0.4.23.tgz 2026-01-30 18:13:02 +00:00
github-merge-queue[bot]
0eae90cb65 Publish onyx-0.4.23.tgz 2026-01-30 17:43:58 +00:00
github-merge-queue[bot]
8dc8dba85f Publish onyx-0.4.23.tgz 2026-01-30 12:20:05 +00:00
github-merge-queue[bot]
7466375fab Publish onyx-0.4.23.tgz 2026-01-30 11:50:45 +00:00
github-merge-queue[bot]
5adbed866c Publish onyx-0.4.23.tgz 2026-01-30 10:44:17 +00:00
github-merge-queue[bot]
7bcdefdb50 Publish onyx-0.4.23.tgz 2026-01-30 10:05:15 +00:00
github-merge-queue[bot]
8a2b1bc140 Publish onyx-0.4.23.tgz 2026-01-30 07:55:16 +00:00
github-merge-queue[bot]
402a535cb3 Publish onyx-0.4.23.tgz 2026-01-30 06:58:15 +00:00
github-merge-queue[bot]
c2c5704645 Publish onyx-0.4.23.tgz 2026-01-30 06:09:46 +00:00
github-merge-queue[bot]
8798938b03 Publish onyx-0.4.23.tgz 2026-01-30 05:03:31 +00:00
github-merge-queue[bot]
fd994703a5 Publish onyx-0.4.23.tgz 2026-01-30 05:00:27 +00:00
github-merge-queue[bot]
2f4a08a7ee Publish onyx-0.4.23.tgz 2026-01-30 04:58:10 +00:00
github-merge-queue[bot]
4c1d7f4c0c Publish onyx-0.4.23.tgz 2026-01-30 03:53:58 +00:00
acaprau
135eead407 Publish onyx-0.4.23.tgz 2026-01-30 02:43:15 +00:00
evan-onyx
a99301deab Publish onyx-0.4.23.tgz 2026-01-30 02:41:34 +00:00
github-merge-queue[bot]
f77f5297b8 Publish onyx-0.4.23.tgz 2026-01-30 01:37:18 +00:00
github-merge-queue[bot]
59577803c5 Publish onyx-0.4.23.tgz 2026-01-30 01:00:18 +00:00
github-merge-queue[bot]
e2c8d35f24 Publish onyx-0.4.23.tgz 2026-01-30 00:55:20 +00:00
github-merge-queue[bot]
971135fbd3 Publish onyx-0.4.23.tgz 2026-01-30 00:42:32 +00:00
github-merge-queue[bot]
20e92499f6 Publish onyx-0.4.23.tgz 2026-01-30 00:26:37 +00:00
github-merge-queue[bot]
68d5628107 Publish onyx-0.4.23.tgz 2026-01-30 00:12:30 +00:00
raunakab
579d2faec9 Publish onyx-0.4.23.tgz 2026-01-29 23:45:37 +00:00
github-merge-queue[bot]
4fe7cf2947 Publish onyx-0.4.23.tgz 2026-01-29 23:35:18 +00:00
github-merge-queue[bot]
81e8db020b Publish onyx-0.4.22.tgz 2026-01-29 23:06:39 +00:00
github-merge-queue[bot]
4ff3fd9fb5 Publish onyx-0.4.22.tgz 2026-01-29 23:00:13 +00:00
github-merge-queue[bot]
a6b4e00345 Publish onyx-0.4.22.tgz 2026-01-29 21:44:14 +00:00
jessicasingh7
927818540b Publish onyx-0.4.21.tgz 2026-01-29 20:58:14 +00:00
github-merge-queue[bot]
a6f7c16cbe Publish onyx-0.4.21.tgz 2026-01-29 20:49:22 +00:00
github-merge-queue[bot]
729005e8e7 Publish onyx-0.4.21.tgz 2026-01-29 20:01:47 +00:00
github-merge-queue[bot]
07524d53bd Publish onyx-0.4.21.tgz 2026-01-29 18:17:48 +00:00
github-merge-queue[bot]
c67294318e Publish onyx-0.4.21.tgz 2026-01-29 18:05:09 +00:00
github-merge-queue[bot]
29668bc298 Publish onyx-0.4.21.tgz 2026-01-29 17:52:21 +00:00
github-merge-queue[bot]
48bc278509 Publish onyx-0.4.21.tgz 2026-01-29 16:13:57 +00:00
raunakab
ba70761e76 Publish onyx-0.4.21.tgz 2026-01-29 08:52:49 +00:00
rohoswagger
6ceebcf6a7 Publish onyx-0.4.21.tgz 2026-01-29 06:59:59 +00:00
github-merge-queue[bot]
1a0253bdf7 Publish onyx-0.4.21.tgz 2026-01-29 06:40:54 +00:00
raunakab
54b737ad14 Publish onyx-0.4.21.tgz 2026-01-29 06:19:49 +00:00
rohoswagger
65efee19aa Publish onyx-0.4.21.tgz 2026-01-29 04:07:40 +00:00
rohoswagger
bfc7d0e1ac Publish onyx-0.4.21.tgz 2026-01-29 03:17:53 +00:00
github-merge-queue[bot]
7219dde6d1 Publish onyx-0.4.21.tgz 2026-01-29 01:33:32 +00:00
github-merge-queue[bot]
2dfcfc4d5f Publish onyx-0.4.21.tgz 2026-01-29 01:27:03 +00:00
github-merge-queue[bot]
ee5cd89a25 Publish onyx-0.4.21.tgz 2026-01-29 00:36:26 +00:00
Weves
9f2f0130ca Publish onyx-0.4.21.tgz 2026-01-29 00:20:23 +00:00
github-merge-queue[bot]
34cb12b5c9 Publish onyx-0.4.21.tgz 2026-01-29 00:17:10 +00:00
Weves
8dd3ef4d02 Publish onyx-0.4.21.tgz 2026-01-29 00:13:18 +00:00
github-merge-queue[bot]
567e6db41d Publish onyx-0.4.21.tgz 2026-01-28 23:55:03 +00:00
github-merge-queue[bot]
a595ba2d6a Publish onyx-0.4.21.tgz 2026-01-28 23:06:33 +00:00
github-merge-queue[bot]
f99261dd7e Publish onyx-0.4.21.tgz 2026-01-28 22:59:06 +00:00
github-merge-queue[bot]
4db3b43c78 Publish onyx-0.4.21.tgz 2026-01-28 22:58:22 +00:00
github-merge-queue[bot]
0fdcbee366 Publish onyx-0.4.21.tgz 2026-01-28 22:41:53 +00:00
jmelahman
a4c02a8cc2 Publish onyx-0.4.21.tgz 2026-01-28 22:34:38 +00:00
jmelahman
4fbcad7b30 Publish onyx-0.4.21.tgz 2026-01-28 22:32:03 +00:00
jmelahman
66ebae7bc5 Publish onyx-0.4.21.tgz 2026-01-28 22:31:36 +00:00
github-merge-queue[bot]
7853b7f367 Publish onyx-0.4.21.tgz 2026-01-28 22:30:38 +00:00
github-merge-queue[bot]
e3fa88108d Publish onyx-0.4.21.tgz 2026-01-28 22:24:32 +00:00
jmelahman
56bcf06e25 Publish onyx-0.4.21.tgz 2026-01-28 22:20:29 +00:00
jmelahman
5d571b5e6c Publish onyx-0.4.21.tgz 2026-01-28 22:13:13 +00:00
jmelahman
d03aebdbdc Publish onyx-0.4.21.tgz 2026-01-28 22:12:46 +00:00
rohoswagger
4761589b7a Publish onyx-0.4.21.tgz 2026-01-28 22:11:51 +00:00
jmelahman
2699fc2d25 Publish onyx-0.4.21.tgz 2026-01-28 22:11:24 +00:00
github-merge-queue[bot]
f29812e501 Publish onyx-0.4.21.tgz 2026-01-28 21:55:42 +00:00
github-merge-queue[bot]
0b1647bfdb Publish onyx-0.4.21.tgz 2026-01-28 21:38:32 +00:00
github-merge-queue[bot]
1d1bfb4452 Publish onyx-0.4.21.tgz 2026-01-28 21:13:20 +00:00
github-merge-queue[bot]
6b3cf7041f Publish onyx-0.4.21.tgz 2026-01-28 21:07:50 +00:00
github-merge-queue[bot]
39e712c572 Publish onyx-0.4.21.tgz 2026-01-28 20:46:10 +00:00
github-merge-queue[bot]
b87e6c1347 Publish onyx-0.4.21.tgz 2026-01-28 20:33:39 +00:00
github-merge-queue[bot]
d22abbe764 Publish onyx-0.4.21.tgz 2026-01-28 20:29:19 +00:00
github-merge-queue[bot]
6f786df1fe Publish onyx-0.4.21.tgz 2026-01-28 20:18:19 +00:00
github-merge-queue[bot]
51c1518c7d Publish onyx-0.4.21.tgz 2026-01-28 19:11:13 +00:00
github-merge-queue[bot]
2610b52839 Publish onyx-0.4.21.tgz 2026-01-28 18:58:46 +00:00
github-merge-queue[bot]
b403364db8 Publish onyx-0.4.21.tgz 2026-01-28 18:38:26 +00:00
github-merge-queue[bot]
037eafcd83 Publish onyx-0.4.21.tgz 2026-01-28 09:20:15 +00:00
github-merge-queue[bot]
8e5a8dcbaa Publish onyx-0.4.21.tgz 2026-01-28 08:22:34 +00:00
github-merge-queue[bot]
0c12d2462d Publish onyx-0.4.21.tgz 2026-01-28 06:51:50 +00:00
Weves
0c375759f8 Publish onyx-0.4.21.tgz 2026-01-28 06:45:26 +00:00
Weves
cc4dccd6b0 Publish onyx-0.4.21.tgz 2026-01-28 06:32:51 +00:00
wenxi-onyx
ca9f7a45fe Publish onyx-0.4.21.tgz 2026-01-28 06:13:33 +00:00
github-merge-queue[bot]
19ff3330c8 Publish onyx-0.4.21.tgz 2026-01-28 06:10:14 +00:00
github-merge-queue[bot]
8e4dcc2cb8 Publish onyx-0.4.21.tgz 2026-01-28 06:05:11 +00:00
rohoswagger
3eee8c3537 Publish onyx-0.4.21.tgz 2026-01-28 05:41:22 +00:00
github-merge-queue[bot]
927f3ab259 Publish onyx-0.4.21.tgz 2026-01-28 05:36:27 +00:00
github-merge-queue[bot]
181a29b768 Publish onyx-0.4.21.tgz 2026-01-28 05:20:41 +00:00
rohoswagger
27484bb2f3 Publish onyx-0.4.21.tgz 2026-01-28 05:14:22 +00:00
github-merge-queue[bot]
0c5db07012 Publish onyx-0.4.21.tgz 2026-01-28 04:40:34 +00:00
github-merge-queue[bot]
2166d34e52 Publish onyx-0.4.21.tgz 2026-01-28 04:36:00 +00:00
github-merge-queue[bot]
8f8d4102cf Publish onyx-0.4.21.tgz 2026-01-28 04:11:41 +00:00
yuhongsun96
fb18042a42 Publish onyx-0.4.21.tgz 2026-01-28 03:46:03 +00:00
github-merge-queue[bot]
8fbb0aab44 Publish onyx-0.4.21.tgz 2026-01-28 03:43:44 +00:00
github-merge-queue[bot]
fe36725ae2 Publish onyx-0.4.21.tgz 2026-01-28 03:36:08 +00:00
github-merge-queue[bot]
139b7d7e73 Publish onyx-0.4.21.tgz 2026-01-28 03:18:05 +00:00
Weves
d050904a94 Publish onyx-0.4.21.tgz 2026-01-28 03:01:57 +00:00
github-merge-queue[bot]
eb6f36dce0 Publish onyx-0.4.21.tgz 2026-01-28 02:59:50 +00:00
github-merge-queue[bot]
44a0495746 Publish onyx-0.4.21.tgz 2026-01-28 02:57:07 +00:00
github-merge-queue[bot]
1ab16d8b0b Publish onyx-0.4.21.tgz 2026-01-28 02:52:48 +00:00
rohoswagger
afb3322478 Publish onyx-0.4.21.tgz 2026-01-28 02:46:28 +00:00
github-merge-queue[bot]
796a777560 Publish onyx-0.4.21.tgz 2026-01-28 01:51:29 +00:00
github-merge-queue[bot]
7e21244567 Publish onyx-0.4.21.tgz 2026-01-28 01:43:10 +00:00
github-merge-queue[bot]
94ce49f295 Publish onyx-0.4.21.tgz 2026-01-28 01:37:48 +00:00
github-merge-queue[bot]
d83d466edd Publish onyx-0.4.21.tgz 2026-01-28 01:37:26 +00:00
rohoswagger
cb73699150 Publish onyx-0.4.21.tgz 2026-01-28 01:31:23 +00:00
nmgarza5
f3c062e5b3 Publish onyx-0.4.21.tgz 2026-01-28 01:03:09 +00:00
github-merge-queue[bot]
742f7d22a3 Publish onyx-0.4.21.tgz 2026-01-28 00:11:12 +00:00
Weves
c176d37533 Publish onyx-0.4.21.tgz 2026-01-27 23:47:30 +00:00
github-merge-queue[bot]
fac386f59c Publish onyx-0.4.21.tgz 2026-01-27 23:36:59 +00:00
github-merge-queue[bot]
2e298b3b42 Publish onyx-0.4.21.tgz 2026-01-27 23:13:30 +00:00
github-merge-queue[bot]
365bc26b00 Publish onyx-0.4.21.tgz 2026-01-27 22:37:25 +00:00
github-merge-queue[bot]
3221d727c5 Publish onyx-0.4.21.tgz 2026-01-27 22:12:31 +00:00
github-merge-queue[bot]
30853bd822 Publish onyx-0.4.21.tgz 2026-01-27 22:06:08 +00:00
github-merge-queue[bot]
d1d3e5ad46 Publish onyx-0.4.21.tgz 2026-01-27 21:55:43 +00:00
github-merge-queue[bot]
86f18d6e59 Publish onyx-0.4.21.tgz 2026-01-27 21:00:19 +00:00
github-merge-queue[bot]
3e14b3df69 Publish onyx-0.4.21.tgz 2026-01-27 20:39:01 +00:00
github-merge-queue[bot]
999eec73e6 Publish onyx-0.4.21.tgz 2026-01-27 19:55:35 +00:00
github-merge-queue[bot]
ccc0742543 Publish onyx-0.4.21.tgz 2026-01-27 19:50:22 +00:00
github-merge-queue[bot]
fb8bf31a2e Publish onyx-0.4.21.tgz 2026-01-27 19:36:49 +00:00
github-merge-queue[bot]
76dd684060 Publish onyx-0.4.21.tgz 2026-01-27 18:57:09 +00:00
Weves
7e59f1c06e Publish onyx-0.4.21.tgz 2026-01-27 18:50:25 +00:00
github-merge-queue[bot]
b17b322b03 Publish onyx-0.4.21.tgz 2026-01-27 18:43:46 +00:00
github-merge-queue[bot]
e8be17e306 Publish onyx-0.4.21.tgz 2026-01-27 18:43:24 +00:00
github-merge-queue[bot]
53920fb1f0 Publish onyx-0.4.21.tgz 2026-01-27 18:20:23 +00:00
github-merge-queue[bot]
55877d1bee Publish onyx-0.4.21.tgz 2026-01-27 17:26:53 +00:00
github-merge-queue[bot]
e1bde506c9 Publish onyx-0.4.20.tgz 2026-01-27 17:26:26 +00:00
github-merge-queue[bot]
26484a8e3e Publish onyx-0.4.20.tgz 2026-01-27 17:20:00 +00:00
github-merge-queue[bot]
aeeff4c9ce Publish onyx-0.4.20.tgz 2026-01-27 17:04:06 +00:00
github-merge-queue[bot]
9a9f89891f Publish onyx-0.4.20.tgz 2026-01-27 05:34:26 +00:00
github-merge-queue[bot]
aa566088cb Publish onyx-0.4.20.tgz 2026-01-27 04:04:11 +00:00
wenxi-onyx
5cf0ced3ab Publish onyx-0.4.20.tgz 2026-01-27 03:10:12 +00:00
Weves
3c09316cce Publish onyx-0.4.20.tgz 2026-01-27 02:27:53 +00:00
wenxi-onyx
546352ba5f Publish onyx-0.4.20.tgz 2026-01-27 01:13:38 +00:00
github-merge-queue[bot]
d92779046e Publish onyx-0.4.20.tgz 2026-01-27 00:45:43 +00:00
github-merge-queue[bot]
ec9806923d Publish onyx-0.4.20.tgz 2026-01-26 23:49:20 +00:00
justin-tahara
fc7281bc46 Publish onyx-0.4.20.tgz 2026-01-26 21:22:26 +00:00
jmelahman
29ae285099 Publish onyx-0.4.20.tgz 2026-01-26 16:56:12 +00:00
github-merge-queue[bot]
93d52a1d6a Publish onyx-0.4.20.tgz 2026-01-26 10:30:46 +00:00
github-merge-queue[bot]
791290cc59 Publish onyx-0.4.20.tgz 2026-01-26 04:50:35 +00:00
github-merge-queue[bot]
ba20e213d5 Publish onyx-0.4.20.tgz 2026-01-26 04:47:20 +00:00
github-merge-queue[bot]
bf1c4d26f6 Publish onyx-0.4.20.tgz 2026-01-25 07:51:32 +00:00
github-merge-queue[bot]
6573862991 Publish onyx-0.4.20.tgz 2026-01-25 04:48:49 +00:00
github-merge-queue[bot]
9b14f0b8c9 Publish onyx-0.4.20.tgz 2026-01-24 21:03:16 +00:00
yuhongsun96
21d7098c05 Publish onyx-0.4.20.tgz 2026-01-24 20:59:45 +00:00
nmgarza5
01b97443e5 Publish onyx-0.4.20.tgz 2026-01-24 19:49:15 +00:00
github-merge-queue[bot]
090bd0f099 Publish onyx-0.4.20.tgz 2026-01-24 01:51:58 +00:00
github-merge-queue[bot]
fed6aa7733 Publish onyx-0.4.20.tgz 2026-01-23 23:32:24 +00:00
github-merge-queue[bot]
adc2afd1f3 Publish onyx-0.4.20.tgz 2026-01-23 20:59:45 +00:00
github-merge-queue[bot]
db411f65bc Publish onyx-0.4.19.tgz 2026-01-23 19:54:20 +00:00
jmelahman
a532c80189 Publish onyx-0.4.19.tgz 2026-01-23 18:22:13 +00:00
github-merge-queue[bot]
60603ea818 Publish onyx-0.4.19.tgz 2026-01-23 17:10:08 +00:00
jmelahman
6d8eb292cb Publish onyx-0.4.19.tgz 2026-01-23 16:50:45 +00:00
github-merge-queue[bot]
968dd7ac43 Publish onyx-0.4.19.tgz 2026-01-23 05:59:29 +00:00
github-merge-queue[bot]
87e7ee1797 Publish onyx-0.4.19.tgz 2026-01-23 05:26:04 +00:00
jmelahman
7fdb025d73 Publish onyx-0.4.19.tgz 2026-01-23 04:42:10 +00:00
yuhongsun96
c7deba27b1 Publish onyx-0.4.19.tgz 2026-01-23 03:24:57 +00:00
github-merge-queue[bot]
a5473a1a28 Publish onyx-0.4.19.tgz 2026-01-23 02:51:44 +00:00
github-merge-queue[bot]
18ea27f85e Publish onyx-0.4.19.tgz 2026-01-23 01:53:20 +00:00
jmelahman
0ce8580277 Publish onyx-0.4.19.tgz 2026-01-22 23:18:14 +00:00
github-merge-queue[bot]
50ee097f82 Publish onyx-0.4.19.tgz 2026-01-22 20:13:36 +00:00
jmelahman
42f6177420 Publish onyx-0.4.19.tgz 2026-01-22 19:27:01 +00:00
github-merge-queue[bot]
2e47d9fcb6 Publish onyx-0.4.19.tgz 2026-01-22 18:22:24 +00:00
justin-tahara
817561c88e Publish onyx-0.4.19.tgz 2026-01-22 18:11:00 +00:00
github-merge-queue[bot]
1fb72046bc Publish onyx-0.4.19.tgz 2026-01-22 18:05:46 +00:00
github-merge-queue[bot]
9c31df2391 Publish onyx-0.4.19.tgz 2026-01-22 17:56:50 +00:00
github-merge-queue[bot]
ad2a141953 Publish onyx-0.4.19.tgz 2026-01-22 17:47:07 +00:00
github-merge-queue[bot]
1366cdefde Publish onyx-0.4.18.tgz 2026-01-21 18:48:07 +00:00
github-merge-queue[bot]
045d6203f7 Publish onyx-0.4.18.tgz 2026-01-21 16:48:44 +00:00
raunakab
d9ee3c5726 Publish onyx-0.4.18.tgz 2026-01-21 08:30:06 +00:00
github-merge-queue[bot]
48cf83a076 Publish onyx-0.4.18.tgz 2026-01-21 07:38:08 +00:00
github-merge-queue[bot]
f9e70cea61 Publish onyx-0.4.18.tgz 2026-01-21 04:38:37 +00:00
github-merge-queue[bot]
562dac0840 Publish onyx-0.4.18.tgz 2026-01-21 04:28:30 +00:00
yuhongsun96
d3ca839863 Publish onyx-0.4.18.tgz 2026-01-21 03:50:49 +00:00
github-merge-queue[bot]
5d9bcee1f4 Publish onyx-0.4.18.tgz 2026-01-21 03:48:49 +00:00
jmelahman
85d94be44e Publish onyx-0.4.18.tgz 2026-01-21 03:20:40 +00:00
github-merge-queue[bot]
61d4e73902 Publish onyx-0.4.18.tgz 2026-01-21 02:01:46 +00:00
github-merge-queue[bot]
8afe49c8e2 Publish onyx-0.4.18.tgz 2026-01-21 01:39:46 +00:00
github-merge-queue[bot]
8e37d4e9c8 Publish onyx-0.4.18.tgz 2026-01-21 01:11:21 +00:00
github-merge-queue[bot]
11592597e9 Publish onyx-0.4.18.tgz 2026-01-21 00:44:32 +00:00
yuhongsun96
e1c810ed88 Publish onyx-0.4.18.tgz 2026-01-21 00:18:43 +00:00
github-merge-queue[bot]
840218b0a5 Publish onyx-0.4.18.tgz 2026-01-21 00:09:37 +00:00
github-merge-queue[bot]
3ac06954c5 Publish onyx-0.4.18.tgz 2026-01-20 23:41:09 +00:00
yuhongsun96
b048a0382b Publish onyx-0.4.18.tgz 2026-01-20 23:14:23 +00:00
justin-tahara
3e823eb61b Publish onyx-0.4.18.tgz 2026-01-20 22:45:10 +00:00
github-merge-queue[bot]
7bb7186c30 Publish onyx-0.4.18.tgz 2026-01-20 22:40:54 +00:00
jmelahman
f36178cdeb Publish onyx-0.4.18.tgz 2026-01-20 22:30:03 +00:00
github-merge-queue[bot]
cdd24ce3e7 Publish onyx-0.4.18.tgz 2026-01-20 22:17:05 +00:00
justin-tahara
208bf9fa2d Publish onyx-0.4.18.tgz 2026-01-20 22:08:05 +00:00
github-merge-queue[bot]
a06d1a2d08 Publish onyx-0.4.18.tgz 2026-01-20 22:02:24 +00:00
yuhongsun96
a6ac5b6e30 Publish onyx-0.4.18.tgz 2026-01-20 21:36:48 +00:00
github-merge-queue[bot]
74f739dcbb Publish onyx-0.4.18.tgz 2026-01-20 21:16:31 +00:00
raunakab
ad7592c511 Publish onyx-0.4.18.tgz 2026-01-20 21:05:10 +00:00
github-merge-queue[bot]
e9f2da764b Publish onyx-0.4.18.tgz 2026-01-20 20:58:04 +00:00
github-merge-queue[bot]
a161b0e6c9 Publish onyx-0.4.18.tgz 2026-01-20 20:37:52 +00:00
github-merge-queue[bot]
1af9ff5876 Publish onyx-0.4.18.tgz 2026-01-20 20:36:48 +00:00
github-merge-queue[bot]
8069352584 Publish onyx-0.4.18.tgz 2026-01-20 19:10:40 +00:00
github-merge-queue[bot]
a6df0f570c Publish onyx-0.4.18.tgz 2026-01-20 19:06:59 +00:00
jmelahman
961d676a47 Publish onyx-0.4.18.tgz 2026-01-20 19:03:41 +00:00
github-merge-queue[bot]
00e8a334f3 Publish onyx-0.4.18.tgz 2026-01-20 18:26:09 +00:00
github-merge-queue[bot]
5179b98499 Publish onyx-0.4.18.tgz 2026-01-20 17:15:57 +00:00
github-merge-queue[bot]
dae67fee85 Publish onyx-0.4.18.tgz 2026-01-20 07:12:50 +00:00
github-merge-queue[bot]
3c357a1c25 Publish onyx-0.4.18.tgz 2026-01-20 06:22:48 +00:00
jmelahman
63cf169cbf Publish onyx-0.4.18.tgz 2026-01-20 03:48:01 +00:00
jmelahman
daa8d7db4d Publish onyx-0.4.18.tgz 2026-01-20 03:20:48 +00:00
yuhongsun96
652a1e2b9f Publish onyx-0.4.18.tgz 2026-01-20 03:05:15 +00:00
yuhongsun96
9bae8138d2 Publish onyx-0.4.18.tgz 2026-01-20 02:57:56 +00:00
github-merge-queue[bot]
e59035920c Publish onyx-0.4.18.tgz 2026-01-20 01:28:43 +00:00
github-merge-queue[bot]
30d0588d1e Publish onyx-0.4.18.tgz 2026-01-20 00:22:31 +00:00
github-merge-queue[bot]
90cabe2469 Publish onyx-0.4.18.tgz 2026-01-20 00:06:26 +00:00
github-merge-queue[bot]
16586a375a Publish onyx-0.4.18.tgz 2026-01-19 23:19:26 +00:00
yuhongsun96
c4f72a61a9 Publish onyx-0.4.18.tgz 2026-01-19 21:37:44 +00:00
github-merge-queue[bot]
afe75479e4 Publish onyx-0.4.18.tgz 2026-01-19 21:25:00 +00:00
github-merge-queue[bot]
08ed29dda0 Publish onyx-0.4.18.tgz 2026-01-19 20:29:31 +00:00
github-merge-queue[bot]
80b406413f Publish onyx-0.4.18.tgz 2026-01-19 18:48:37 +00:00
github-merge-queue[bot]
ac289b7cfa Publish onyx-0.4.18.tgz 2026-01-19 18:08:48 +00:00
github-merge-queue[bot]
d937cf39b7 Publish onyx-0.4.18.tgz 2026-01-19 04:37:36 +00:00
github-merge-queue[bot]
085d602d4f Publish onyx-0.4.18.tgz 2026-01-19 02:53:38 +00:00
github-merge-queue[bot]
684ee1f15d Publish onyx-0.4.18.tgz 2026-01-19 00:33:37 +00:00
github-merge-queue[bot]
8f88a6bea0 Publish onyx-0.4.18.tgz 2026-01-18 23:18:01 +00:00
yuhongsun96
6fdd157db8 Publish onyx-0.4.18.tgz 2026-01-18 04:06:09 +00:00
github-merge-queue[bot]
3c495f8d4a Publish onyx-0.4.18.tgz 2026-01-18 02:04:22 +00:00
yuhongsun96
d3ec3e1035 Publish onyx-0.4.18.tgz 2026-01-17 23:56:56 +00:00
github-merge-queue[bot]
e3b178bcdc Publish onyx-0.4.18.tgz 2026-01-17 02:15:05 +00:00
github-merge-queue[bot]
468f1af394 Publish onyx-0.4.18.tgz 2026-01-17 02:04:30 +00:00
github-merge-queue[bot]
30ba8f7efd Publish onyx-0.4.18.tgz 2026-01-17 01:55:56 +00:00
jmelahman
084c418e51 Publish onyx-0.4.18.tgz 2026-01-17 01:04:26 +00:00
yuhongsun96
170e70ccf3 Publish onyx-0.4.18.tgz 2026-01-17 00:33:47 +00:00
github-merge-queue[bot]
9ffcd931ca Publish onyx-0.4.18.tgz 2026-01-17 00:03:57 +00:00
github-merge-queue[bot]
b2f451b019 Publish onyx-0.4.18.tgz 2026-01-16 23:55:08 +00:00
github-merge-queue[bot]
fb6ff08acc Publish onyx-0.4.18.tgz 2026-01-16 23:41:48 +00:00
yuhongsun96
377ae0f170 Publish onyx-0.4.18.tgz 2026-01-16 23:26:59 +00:00
github-merge-queue[bot]
56dbaff3a2 Publish onyx-0.4.18.tgz 2026-01-16 22:16:29 +00:00
github-merge-queue[bot]
c03c1081d0 Publish onyx-0.4.18.tgz 2026-01-16 21:46:32 +00:00
github-merge-queue[bot]
303c82002b Publish onyx-0.4.18.tgz 2026-01-16 20:32:10 +00:00
github-merge-queue[bot]
ccd4eebab0 Publish onyx-0.4.18.tgz 2026-01-16 20:25:54 +00:00
github-merge-queue[bot]
bab1da82db Publish onyx-0.4.18.tgz 2026-01-16 20:14:59 +00:00
yuhongsun96
1fb06f69a4 Publish onyx-0.4.18.tgz 2026-01-16 19:51:39 +00:00
yuhongsun96
8444c39394 Publish onyx-0.4.18.tgz 2026-01-16 19:24:50 +00:00
github-merge-queue[bot]
f874612225 Publish onyx-0.4.18.tgz 2026-01-16 18:16:14 +00:00
github-merge-queue[bot]
9399413ceb Publish onyx-0.4.18.tgz 2026-01-16 17:58:55 +00:00
github-merge-queue[bot]
8e7cf40190 Publish onyx-0.4.18.tgz 2026-01-16 16:39:26 +00:00
github-merge-queue[bot]
0cda20238c Publish onyx-0.4.18.tgz 2026-01-16 10:37:49 +00:00
github-merge-queue[bot]
3d1dce8f20 Publish onyx-0.4.18.tgz 2026-01-16 10:13:32 +00:00
github-merge-queue[bot]
ec3b1b102e Publish onyx-0.4.18.tgz 2026-01-16 10:06:58 +00:00
github-merge-queue[bot]
18394374b1 Publish onyx-0.4.18.tgz 2026-01-16 10:05:40 +00:00
github-merge-queue[bot]
6edafaab51 Publish onyx-0.4.18.tgz 2026-01-16 09:36:14 +00:00
jmelahman
883cbdbd03 Publish onyx-0.4.18.tgz 2026-01-16 09:03:47 +00:00
github-merge-queue[bot]
9651cdc5a0 Publish onyx-0.4.18.tgz 2026-01-16 08:48:48 +00:00
github-merge-queue[bot]
b6b8f7d368 Publish onyx-0.4.18.tgz 2026-01-16 03:54:33 +00:00
github-merge-queue[bot]
88591bb89a Publish onyx-0.4.18.tgz 2026-01-16 03:52:14 +00:00
github-merge-queue[bot]
1971958c8a Publish onyx-0.4.18.tgz 2026-01-16 03:18:02 +00:00
github-merge-queue[bot]
c16c59bf9d Publish onyx-0.4.18.tgz 2026-01-16 02:07:24 +00:00
github-merge-queue[bot]
6853104215 Publish onyx-0.4.18.tgz 2026-01-16 01:46:41 +00:00
github-merge-queue[bot]
7bd9ddd7d6 Publish onyx-0.4.18.tgz 2026-01-16 01:08:07 +00:00
github-merge-queue[bot]
6fb17fc641 Publish onyx-0.4.18.tgz 2026-01-16 00:18:47 +00:00
github-merge-queue[bot]
122a00063d Publish onyx-0.4.18.tgz 2026-01-16 00:00:06 +00:00
github-merge-queue[bot]
5856e5b852 Publish onyx-0.4.18.tgz 2026-01-15 23:09:26 +00:00
jmelahman
64931247b4 Publish onyx-0.4.18.tgz 2026-01-15 20:36:48 +00:00
jmelahman
44a935b03c Publish onyx-0.4.18.tgz 2026-01-15 20:28:09 +00:00
jmelahman
8f98863b04 Publish onyx-0.4.18.tgz 2026-01-15 19:50:53 +00:00
jmelahman
7a0df41eec Publish onyx-0.4.18.tgz 2026-01-15 19:11:22 +00:00
github-merge-queue[bot]
37513fd749 Publish onyx-0.4.18.tgz 2026-01-15 17:52:40 +00:00
github-merge-queue[bot]
3b9b513a65 Publish onyx-0.4.18.tgz 2026-01-15 17:16:27 +00:00
jmelahman
df1aa8fa36 Publish onyx-0.4.18.tgz 2026-01-15 16:09:17 +00:00
github-merge-queue[bot]
206cdec607 Publish onyx-0.4.18.tgz 2026-01-15 02:26:20 +00:00
github-merge-queue[bot]
77162612ba Publish onyx-0.4.18.tgz 2026-01-15 01:48:12 +00:00
github-merge-queue[bot]
80a145f096 Publish onyx-0.4.18.tgz 2026-01-15 01:21:37 +00:00
github-merge-queue[bot]
9aa559852e Publish onyx-0.4.18.tgz 2026-01-15 00:46:11 +00:00
github-merge-queue[bot]
993b9c3414 Publish onyx-0.4.18.tgz 2026-01-15 00:02:23 +00:00
github-merge-queue[bot]
06252bfa6b Publish onyx-0.4.18.tgz 2026-01-14 23:22:54 +00:00
yuhongsun96
e241dc11ea Publish onyx-0.4.18.tgz 2026-01-14 22:06:50 +00:00
github-merge-queue[bot]
3ed6056423 Publish onyx-0.4.18.tgz 2026-01-14 21:09:49 +00:00
github-merge-queue[bot]
2bd7e7ebb9 Publish onyx-0.4.18.tgz 2026-01-14 20:41:53 +00:00
github-merge-queue[bot]
c5ca0195c1 Publish onyx-0.4.18.tgz 2026-01-14 20:10:14 +00:00
github-merge-queue[bot]
f8795bd4bf Publish onyx-0.4.18.tgz 2026-01-14 20:04:46 +00:00
github-merge-queue[bot]
3b9f7892a4 Publish onyx-0.4.18.tgz 2026-01-14 19:56:47 +00:00
github-merge-queue[bot]
bb2669da77 Publish onyx-0.4.18.tgz 2026-01-14 19:55:46 +00:00
github-merge-queue[bot]
c0ffd54089 Publish onyx-0.4.18.tgz 2026-01-14 19:43:34 +00:00
github-merge-queue[bot]
c2ec3f3ed8 Publish onyx-0.4.18.tgz 2026-01-14 19:39:47 +00:00
github-merge-queue[bot]
2cb1b196f4 Publish onyx-0.4.18.tgz 2026-01-14 17:56:54 +00:00
github-merge-queue[bot]
713811e4cc Publish onyx-0.4.18.tgz 2026-01-14 17:24:45 +00:00
github-merge-queue[bot]
acb3017bc6 Publish onyx-0.4.18.tgz 2026-01-14 05:17:29 +00:00
github-merge-queue[bot]
dd6513c1af Publish onyx-0.4.18.tgz 2026-01-14 03:12:57 +00:00
github-merge-queue[bot]
b6d9e631dd Publish onyx-0.4.18.tgz 2026-01-14 02:52:58 +00:00
github-merge-queue[bot]
a7a44a9058 Publish onyx-0.4.18.tgz 2026-01-14 02:05:58 +00:00
github-merge-queue[bot]
74d1cd9bb6 Publish onyx-0.4.18.tgz 2026-01-14 00:34:12 +00:00
github-merge-queue[bot]
3ef3ac5e26 Publish onyx-0.4.18.tgz 2026-01-14 00:12:52 +00:00
github-merge-queue[bot]
385ef1f28e Publish onyx-0.4.18.tgz 2026-01-13 23:45:09 +00:00
github-merge-queue[bot]
899f6653dd Publish onyx-0.4.18.tgz 2026-01-13 23:42:07 +00:00
github-merge-queue[bot]
6f527cf169 Publish onyx-0.4.18.tgz 2026-01-13 22:42:08 +00:00
github-merge-queue[bot]
1d76f8fb8c Publish onyx-0.4.18.tgz 2026-01-13 22:21:50 +00:00
jmelahman
16f58a21cd Publish onyx-0.4.18.tgz 2026-01-13 22:10:15 +00:00
jmelahman
62c7755556 Publish onyx-0.4.18.tgz 2026-01-13 21:59:12 +00:00
jmelahman
2d6b066a54 Publish onyx-0.4.18.tgz 2026-01-13 21:22:52 +00:00
jmelahman
f559e8636c Publish onyx-0.4.18.tgz 2026-01-13 21:22:12 +00:00
github-merge-queue[bot]
a63dd5eb32 Publish onyx-0.4.18.tgz 2026-01-13 20:47:51 +00:00
github-merge-queue[bot]
dbbfc869d1 Publish onyx-0.4.18.tgz 2026-01-13 19:53:40 +00:00
github-merge-queue[bot]
f73f6033a9 Publish onyx-0.4.18.tgz 2026-01-13 19:53:11 +00:00
github-merge-queue[bot]
11d3fb9598 Publish onyx-0.4.18.tgz 2026-01-13 19:41:19 +00:00
github-merge-queue[bot]
4973aa54d3 Publish onyx-0.4.18.tgz 2026-01-13 19:29:24 +00:00
jmelahman
f97ee9e287 Publish onyx-0.4.18.tgz 2026-01-13 19:23:01 +00:00
github-merge-queue[bot]
6f0219e79c Publish onyx-0.4.18.tgz 2026-01-13 19:09:25 +00:00
github-merge-queue[bot]
67e1d7240d Publish onyx-0.4.18.tgz 2026-01-13 18:41:54 +00:00
github-merge-queue[bot]
f6b08a227f Publish onyx-0.4.18.tgz 2026-01-13 18:04:04 +00:00
github-merge-queue[bot]
bd55da78dd Publish onyx-0.4.18.tgz 2026-01-13 03:57:49 +00:00
github-merge-queue[bot]
5593b791c1 Publish onyx-0.4.18.tgz 2026-01-13 03:45:18 +00:00
github-merge-queue[bot]
55c89559b0 Publish onyx-0.4.18.tgz 2026-01-13 01:48:23 +00:00
github-merge-queue[bot]
17b2dbdcee Publish onyx-0.4.18.tgz 2026-01-13 01:10:22 +00:00
github-merge-queue[bot]
9ed2afb29a Publish onyx-0.4.18.tgz 2026-01-13 00:04:37 +00:00
github-merge-queue[bot]
7e5335d15c Publish onyx-0.4.18.tgz 2026-01-12 23:27:01 +00:00
github-merge-queue[bot]
60014c48b4 Publish onyx-0.4.18.tgz 2026-01-12 23:08:44 +00:00
github-merge-queue[bot]
5113a17a6c Publish onyx-0.4.18.tgz 2026-01-12 23:02:50 +00:00
github-merge-queue[bot]
0eb1269df4 Publish onyx-0.4.18.tgz 2026-01-12 22:00:04 +00:00
github-merge-queue[bot]
6f3f81a8cd Publish onyx-0.4.18.tgz 2026-01-12 21:54:19 +00:00
jmelahman
20323b613c Publish onyx-0.4.18.tgz 2026-01-12 21:49:29 +00:00
jmelahman
af6d2b291a Publish onyx-0.4.18.tgz 2026-01-12 21:13:16 +00:00
jmelahman
40fabdc291 Publish onyx-0.4.18.tgz 2026-01-12 20:53:03 +00:00
github-merge-queue[bot]
40a5731223 Publish onyx-0.4.18.tgz 2026-01-12 20:42:46 +00:00
github-merge-queue[bot]
58e0345903 Publish onyx-0.4.18.tgz 2026-01-12 20:35:15 +00:00
github-merge-queue[bot]
d198431fe2 Publish onyx-0.4.18.tgz 2026-01-12 19:53:26 +00:00
github-merge-queue[bot]
f82a425f43 Publish onyx-0.4.18.tgz 2026-01-12 19:19:12 +00:00
wenxi-onyx
4b9f633be9 Publish onyx-0.4.18.tgz 2026-01-12 19:11:10 +00:00
github-merge-queue[bot]
c07a94ff9c Publish onyx-0.4.18.tgz 2026-01-12 18:28:08 +00:00
github-merge-queue[bot]
2d469f7b72 Publish onyx-0.4.18.tgz 2026-01-12 12:14:26 +00:00
raunakab
95cff1789c Publish onyx-0.4.18.tgz 2026-01-12 11:28:35 +00:00
github-merge-queue[bot]
c17a2958c0 Publish onyx-0.4.18.tgz 2026-01-12 11:03:20 +00:00
github-merge-queue[bot]
4482c6d38a Publish onyx-0.4.18.tgz 2026-01-12 07:06:33 +00:00
github-merge-queue[bot]
3cdf929ac4 Publish onyx-0.4.18.tgz 2026-01-12 06:32:05 +00:00
github-merge-queue[bot]
73fc930f23 Publish onyx-0.4.18.tgz 2026-01-12 02:08:55 +00:00
github-merge-queue[bot]
13fc11418e Publish onyx-0.4.18.tgz 2026-01-12 01:17:56 +00:00
github-merge-queue[bot]
e38a6416fd Publish onyx-0.4.18.tgz 2026-01-11 22:24:36 +00:00
github-merge-queue[bot]
1682c548a1 Publish onyx-0.4.18.tgz 2026-01-11 22:21:43 +00:00
github-merge-queue[bot]
46d95399b9 Publish onyx-0.4.18.tgz 2026-01-11 21:19:24 +00:00
github-merge-queue[bot]
f2b2590f1a Publish onyx-0.4.18.tgz 2026-01-11 21:09:46 +00:00
github-merge-queue[bot]
81f0ed7c89 Publish onyx-0.4.18.tgz 2026-01-11 20:51:01 +00:00
wenxi-onyx
8efeaff739 Publish onyx-0.4.18.tgz 2026-01-11 20:45:50 +00:00
github-merge-queue[bot]
b2edfad49d Publish onyx-0.4.18.tgz 2026-01-11 11:27:24 +00:00
jmelahman
a403976793 Publish onyx-0.4.18.tgz 2026-01-11 06:02:40 +00:00
jmelahman
fd466eb12d Publish onyx-0.4.18.tgz 2026-01-11 05:23:07 +00:00
jmelahman
58c31a84c6 Publish onyx-0.4.18.tgz 2026-01-11 04:39:58 +00:00
jmelahman
721344dad6 Publish onyx-0.4.18.tgz 2026-01-11 04:02:13 +00:00
github-merge-queue[bot]
0516f199f6 Publish onyx-0.4.18.tgz 2026-01-11 00:35:07 +00:00
github-merge-queue[bot]
79d3c5fe4d Publish onyx-0.4.18.tgz 2026-01-11 00:29:32 +00:00
Weves
21f48e9e9c Publish onyx-0.4.18.tgz 2026-01-11 00:01:32 +00:00
wenxi-onyx
4dee622ff0 Publish onyx-0.4.18.tgz 2026-01-10 23:13:07 +00:00
Weves
91f5febad7 Publish onyx-0.4.18.tgz 2026-01-10 21:44:25 +00:00
github-merge-queue[bot]
0395a3bb30 Publish onyx-0.4.18.tgz 2026-01-10 06:36:43 +00:00
github-merge-queue[bot]
e52818f4fa Publish onyx-0.4.18.tgz 2026-01-10 05:19:07 +00:00
github-merge-queue[bot]
4fd649eda3 Publish onyx-0.4.18.tgz 2026-01-10 05:02:47 +00:00
yuhongsun96
88acc1a307 Publish onyx-0.4.18.tgz 2026-01-10 04:40:54 +00:00
yuhongsun96
e92b9cb353 Publish onyx-0.4.18.tgz 2026-01-10 04:38:24 +00:00
jmelahman
63839739fe Publish onyx-0.4.18.tgz 2026-01-10 04:21:15 +00:00
github-merge-queue[bot]
af938d8f89 Publish onyx-0.4.18.tgz 2026-01-10 00:22:37 +00:00
github-merge-queue[bot]
6ae461b9c9 Publish onyx-0.4.18.tgz 2026-01-10 00:16:22 +00:00
github-merge-queue[bot]
db1ede41a1 Publish onyx-0.4.18.tgz 2026-01-09 23:59:21 +00:00
yuhongsun96
9744b24040 Publish onyx-0.4.18.tgz 2026-01-09 23:54:12 +00:00
yuhongsun96
65544c0ba5 Publish onyx-0.4.18.tgz 2026-01-09 23:28:51 +00:00
nmgarza5
d668bec674 Publish onyx-0.4.18.tgz 2026-01-09 22:21:44 +00:00
github-merge-queue[bot]
01e1463540 Publish onyx-0.4.18.tgz 2026-01-09 20:10:19 +00:00
github-merge-queue[bot]
12e344e59f Publish onyx-0.4.18.tgz 2026-01-09 19:13:44 +00:00
github-merge-queue[bot]
8f4710def0 Publish onyx-0.4.18.tgz 2026-01-09 18:51:13 +00:00
github-merge-queue[bot]
5a8bc6c132 Publish onyx-0.4.18.tgz 2026-01-09 18:20:23 +00:00
github-merge-queue[bot]
7199976b7b Publish onyx-0.4.18.tgz 2026-01-09 17:52:14 +00:00
github-merge-queue[bot]
1b302d0b31 Publish onyx-0.4.18.tgz 2026-01-09 17:22:10 +00:00
github-merge-queue[bot]
390375b318 Publish onyx-0.4.18.tgz 2026-01-09 05:52:07 +00:00
github-merge-queue[bot]
ba0978f2e9 Publish onyx-0.4.18.tgz 2026-01-09 04:06:49 +00:00
github-merge-queue[bot]
d0a868536b Publish onyx-0.4.18.tgz 2026-01-09 03:31:37 +00:00
raunakab
4a0e8acd68 Publish onyx-0.4.18.tgz 2026-01-09 03:26:30 +00:00
github-merge-queue[bot]
6de08bc8c5 Publish onyx-0.4.18.tgz 2026-01-09 03:15:24 +00:00
github-merge-queue[bot]
3e9a0bd99a Publish onyx-0.4.18.tgz 2026-01-09 02:56:15 +00:00
github-merge-queue[bot]
4abd0d7de9 Publish onyx-0.4.18.tgz 2026-01-09 00:14:17 +00:00
github-merge-queue[bot]
9cab9b3287 Publish onyx-0.4.18.tgz 2026-01-08 23:34:17 +00:00
raunakab
005136ffbc Publish onyx-0.4.18.tgz 2026-01-08 22:57:56 +00:00
Weves
80e1be19b3 Publish onyx-0.4.18.tgz 2026-01-08 22:46:18 +00:00
github-merge-queue[bot]
1c2008d348 Publish onyx-0.4.18.tgz 2026-01-08 21:46:51 +00:00
github-merge-queue[bot]
fb236d1664 Publish onyx-0.4.18.tgz 2026-01-08 20:29:48 +00:00
github-merge-queue[bot]
337ea7acef Publish onyx-0.4.18.tgz 2026-01-08 18:58:15 +00:00
github-merge-queue[bot]
771d2b2937 Publish onyx-0.4.18.tgz 2026-01-08 18:35:04 +00:00
github-merge-queue[bot]
51b27b283b Publish onyx-0.4.18.tgz 2026-01-08 18:32:40 +00:00
github-merge-queue[bot]
556198653d Publish onyx-0.4.18.tgz 2026-01-08 17:14:52 +00:00
github-merge-queue[bot]
23fb312855 Publish onyx-0.4.18.tgz 2026-01-08 07:29:51 +00:00
github-merge-queue[bot]
98d3f94d64 Publish onyx-0.4.18.tgz 2026-01-08 05:13:20 +00:00
github-merge-queue[bot]
02d93cdde7 Publish onyx-0.4.17.tgz 2026-01-08 00:37:51 +00:00
github-merge-queue[bot]
4a4f5b4423 Publish onyx-0.4.17.tgz 2026-01-08 00:10:26 +00:00
github-merge-queue[bot]
d72adfab5a Publish onyx-0.4.17.tgz 2026-01-08 00:07:42 +00:00
github-merge-queue[bot]
f6288898cf Publish onyx-0.4.17.tgz 2026-01-07 23:58:37 +00:00
github-merge-queue[bot]
3e6210befb Publish onyx-0.4.17.tgz 2026-01-07 23:42:00 +00:00
github-merge-queue[bot]
4b02046be1 Publish onyx-0.4.17.tgz 2026-01-07 23:40:02 +00:00
github-merge-queue[bot]
9289564aa5 Publish onyx-0.4.17.tgz 2026-01-07 23:03:51 +00:00
github-merge-queue[bot]
111f7c6558 Publish onyx-0.4.17.tgz 2026-01-07 22:38:20 +00:00
github-merge-queue[bot]
18c72e4554 Publish onyx-0.4.17.tgz 2026-01-07 21:53:37 +00:00
github-merge-queue[bot]
82736ff925 Publish onyx-0.4.17.tgz 2026-01-07 21:23:03 +00:00
github-merge-queue[bot]
dfabd71aae Publish onyx-0.4.17.tgz 2026-01-07 20:47:52 +00:00
github-merge-queue[bot]
f741bf2c36 Publish onyx-0.4.17.tgz 2026-01-07 20:42:06 +00:00
jmelahman
575934d517 Publish onyx-0.4.17.tgz 2026-01-07 19:53:33 +00:00
github-merge-queue[bot]
f748454ebb Publish onyx-0.4.17.tgz 2026-01-07 19:50:03 +00:00
github-merge-queue[bot]
d5b0072989 Publish onyx-0.4.17.tgz 2026-01-07 19:45:13 +00:00
github-merge-queue[bot]
041260341a Publish onyx-0.4.17.tgz 2026-01-07 18:39:41 +00:00
github-merge-queue[bot]
1f184e7afc Publish onyx-0.4.17.tgz 2026-01-07 18:20:42 +00:00
github-merge-queue[bot]
781dab3ea5 Publish onyx-0.4.17.tgz 2026-01-07 18:17:36 +00:00
github-merge-queue[bot]
70473d7b7e Publish onyx-0.4.17.tgz 2026-01-07 18:11:12 +00:00
github-merge-queue[bot]
c694b33fff Publish onyx-0.4.17.tgz 2026-01-07 17:49:55 +00:00
github-merge-queue[bot]
ff3f1cb607 Publish onyx-0.4.17.tgz 2026-01-07 17:45:21 +00:00
jmelahman
966a27ebee Publish onyx-0.4.17.tgz 2026-01-07 15:07:43 +00:00
github-merge-queue[bot]
a524321921 Publish onyx-0.4.17.tgz 2026-01-07 06:15:31 +00:00
jmelahman
a03c47a17f Publish onyx-0.4.17.tgz 2026-01-07 05:58:34 +00:00
github-merge-queue[bot]
d7bb26174a Publish onyx-0.4.17.tgz 2026-01-07 05:48:40 +00:00
github-merge-queue[bot]
fa51203707 Publish onyx-0.4.17.tgz 2026-01-07 05:47:22 +00:00
justin-tahara
937861e721 Publish onyx-0.4.17.tgz 2026-01-07 02:38:55 +00:00
jmelahman
f411a48650 Publish onyx-0.4.17.tgz 2026-01-07 01:12:23 +00:00
jmelahman
d1934398dd Publish onyx-0.4.17.tgz 2026-01-07 01:09:31 +00:00
jmelahman
472db781af Publish onyx-0.4.17.tgz 2026-01-07 00:20:20 +00:00
jmelahman
92d2e6e11e Publish onyx-0.4.17.tgz 2026-01-07 00:04:17 +00:00
jmelahman
44b413d1c5 Publish onyx-0.4.17.tgz 2026-01-07 00:01:41 +00:00
github-merge-queue[bot]
ac1305675a Publish onyx-0.4.17.tgz 2026-01-06 23:09:27 +00:00
justin-tahara
da5ea3cf75 Publish onyx-0.4.17.tgz 2026-01-06 22:53:23 +00:00
github-merge-queue[bot]
5520fd611f Publish onyx-0.4.17.tgz 2026-01-06 22:48:50 +00:00
github-merge-queue[bot]
8dfa9abd57 Publish onyx-0.4.17.tgz 2026-01-06 22:40:04 +00:00
github-merge-queue[bot]
e00085a95f Publish onyx-0.4.17.tgz 2026-01-06 22:00:39 +00:00
github-merge-queue[bot]
8a831dde79 Publish onyx-0.4.17.tgz 2026-01-06 21:20:27 +00:00
github-merge-queue[bot]
d82466ffc8 Publish onyx-0.4.17.tgz 2026-01-06 21:03:03 +00:00
github-merge-queue[bot]
d73f2c759d Publish onyx-0.4.17.tgz 2026-01-06 20:59:25 +00:00
github-merge-queue[bot]
8fd1f04722 Publish onyx-0.4.17.tgz 2026-01-06 20:42:57 +00:00
github-merge-queue[bot]
ecdda76e5a Publish onyx-0.4.17.tgz 2026-01-06 19:21:23 +00:00
github-merge-queue[bot]
0221929b13 Publish onyx-0.4.17.tgz 2026-01-06 18:50:56 +00:00
github-merge-queue[bot]
cfe82b49c9 Publish onyx-0.4.17.tgz 2026-01-06 18:27:06 +00:00
github-merge-queue[bot]
60461fb87c Publish onyx-0.4.17.tgz 2026-01-06 17:41:42 +00:00
github-merge-queue[bot]
769fdfd801 Publish onyx-0.4.17.tgz 2026-01-06 17:37:13 +00:00
github-merge-queue[bot]
4806371d09 Publish onyx-0.4.17.tgz 2026-01-06 16:24:28 +00:00
Subash-Mohan
c52fdf9922 Publish onyx-0.4.17.tgz 2026-01-06 08:47:46 +00:00
github-merge-queue[bot]
e1b9ce21b5 Publish onyx-0.4.17.tgz 2026-01-06 04:17:28 +00:00
github-merge-queue[bot]
3253450e95 Publish onyx-0.4.17.tgz 2026-01-06 03:00:55 +00:00
github-merge-queue[bot]
fc97091163 Publish onyx-0.4.17.tgz 2026-01-06 01:41:54 +00:00
github-merge-queue[bot]
01f1457ec9 Publish onyx-0.4.17.tgz 2026-01-06 01:38:52 +00:00
github-merge-queue[bot]
448a167648 Publish onyx-0.4.17.tgz 2026-01-06 00:43:45 +00:00
github-merge-queue[bot]
acb50f0aa5 Publish onyx-0.4.17.tgz 2026-01-05 23:37:12 +00:00
github-merge-queue[bot]
7b9e6d2cd6 Publish onyx-0.4.17.tgz 2026-01-05 22:16:55 +00:00
github-merge-queue[bot]
73a0a1a8e3 Publish onyx-0.4.17.tgz 2026-01-05 21:20:29 +00:00
github-merge-queue[bot]
ab65222841 Publish onyx-0.4.17.tgz 2026-01-05 20:49:39 +00:00
github-merge-queue[bot]
207c3a4540 Publish onyx-0.4.17.tgz 2026-01-05 20:36:05 +00:00
github-merge-queue[bot]
f5291ab0ba Publish onyx-0.4.17.tgz 2026-01-05 19:05:27 +00:00
jmelahman
df5ea64183 Publish onyx-0.4.17.tgz 2026-01-05 17:02:39 +00:00
github-merge-queue[bot]
a84556be4d Publish onyx-0.4.17.tgz 2026-01-05 01:28:24 +00:00
github-merge-queue[bot]
0e1fa32a00 Publish onyx-0.4.17.tgz 2026-01-04 21:42:24 +00:00
github-merge-queue[bot]
00fd3531c6 Publish onyx-0.4.17.tgz 2026-01-04 06:58:27 +00:00
yuhongsun96
9628c4e965 Publish onyx-0.4.17.tgz 2026-01-04 06:55:05 +00:00
jmelahman
4c2faf0cf2 Publish onyx-0.4.17.tgz 2026-01-03 07:53:36 +00:00
jmelahman
96f2e1eb2b Publish onyx-0.4.17.tgz 2026-01-03 07:50:23 +00:00
jmelahman
eb8e834a33 Publish onyx-0.4.17.tgz 2026-01-03 07:43:33 +00:00
jmelahman
aedca55735 Publish onyx-0.4.17.tgz 2026-01-03 07:27:30 +00:00
yuhongsun96
1aee5f5a75 Publish onyx-0.4.17.tgz 2026-01-03 03:47:42 +00:00
github-merge-queue[bot]
5b58657310 Publish onyx-0.4.17.tgz 2026-01-03 02:42:52 +00:00
github-merge-queue[bot]
64f2be22e6 Publish onyx-0.4.17.tgz 2026-01-03 02:36:59 +00:00
github-merge-queue[bot]
cd51f73360 Publish onyx-0.4.17.tgz 2026-01-03 02:22:46 +00:00
yuhongsun96
fea6572f3f Publish onyx-0.4.17.tgz 2026-01-03 01:58:33 +00:00
github-merge-queue[bot]
e5992d8af4 Publish onyx-0.4.17.tgz 2026-01-03 01:20:11 +00:00
yuhongsun96
7385df89e0 Publish onyx-0.4.17.tgz 2026-01-03 00:44:33 +00:00
github-merge-queue[bot]
edc9429357 Publish onyx-0.4.17.tgz 2026-01-03 00:01:59 +00:00
justin-tahara
acd75331da Publish onyx-0.4.17.tgz 2026-01-02 23:20:56 +00:00
jmelahman
ab9d49bb24 Publish onyx-0.4.17.tgz 2026-01-02 23:11:35 +00:00
github-merge-queue[bot]
470f4c672d Publish onyx-0.4.17.tgz 2026-01-02 22:33:03 +00:00
acaprau
5201d425aa Publish onyx-0.4.17.tgz 2026-01-02 22:11:53 +00:00
github-merge-queue[bot]
9adabe161e Publish onyx-0.4.17.tgz 2026-01-02 22:04:40 +00:00
jmelahman
604910e623 Publish onyx-0.4.17.tgz 2026-01-02 21:30:48 +00:00
justin-tahara
75f7964a48 Publish onyx-0.4.17.tgz 2026-01-02 21:11:12 +00:00
github-merge-queue[bot]
bd96e6f29b Publish onyx-0.4.17.tgz 2026-01-02 20:25:51 +00:00
github-merge-queue[bot]
eac06c089e Publish onyx-0.4.17.tgz 2026-01-02 20:19:06 +00:00
github-merge-queue[bot]
bf1a7efd76 Publish onyx-0.4.17.tgz 2026-01-02 20:11:15 +00:00
github-merge-queue[bot]
12fa091100 Publish onyx-0.4.17.tgz 2026-01-02 19:10:29 +00:00
github-merge-queue[bot]
195603b783 Publish onyx-0.4.17.tgz 2026-01-02 18:59:55 +00:00
github-merge-queue[bot]
888a5a28b3 Publish onyx-0.4.17.tgz 2026-01-02 18:23:20 +00:00
jmelahman
0cfdde5f54 Publish onyx-0.4.17.tgz 2026-01-02 18:05:24 +00:00
Subash-Mohan
52a45b3238 Publish onyx-0.4.17.tgz 2026-01-02 13:44:44 +00:00
github-merge-queue[bot]
079ce11a07 Publish onyx-0.4.17.tgz 2026-01-02 11:40:44 +00:00
Subash-Mohan
a6633f5867 Publish onyx-0.4.17.tgz 2026-01-02 10:44:51 +00:00
jmelahman
e460d1b4dc Publish onyx-0.4.17.tgz 2026-01-02 06:24:22 +00:00
jmelahman
e37936cdd4 Publish onyx-0.4.17.tgz 2026-01-02 06:11:14 +00:00
github-merge-queue[bot]
dafdff5fb1 Publish onyx-0.4.17.tgz 2026-01-02 05:09:02 +00:00
jmelahman
17bb5ce214 Publish onyx-0.4.17.tgz 2026-01-02 03:59:27 +00:00
jmelahman
b4ef75f26d Publish onyx-0.4.17.tgz 2026-01-02 03:11:51 +00:00
github-merge-queue[bot]
f68aff4c12 Publish onyx-0.4.17.tgz 2026-01-01 21:19:51 +00:00
jmelahman
c159ecbd82 Publish onyx-0.4.17.tgz 2026-01-01 21:11:31 +00:00
github-merge-queue[bot]
737dbe0d29 Publish onyx-0.4.17.tgz 2026-01-01 18:43:18 +00:00
github-merge-queue[bot]
0ac9591ef7 Publish onyx-0.4.17.tgz 2026-01-01 15:50:23 +00:00
github-merge-queue[bot]
72662d1507 Publish onyx-0.4.17.tgz 2026-01-01 15:37:12 +00:00
github-merge-queue[bot]
cf8dab90dd Publish onyx-0.4.17.tgz 2026-01-01 09:59:59 +00:00
github-merge-queue[bot]
ee76192f40 Publish onyx-0.4.17.tgz 2026-01-01 03:07:59 +00:00
Weves
58485f966a Publish onyx-0.4.17.tgz 2026-01-01 02:30:17 +00:00
github-merge-queue[bot]
87c837e102 Publish onyx-0.4.17.tgz 2026-01-01 01:54:20 +00:00
github-merge-queue[bot]
c02ca5acb6 Publish onyx-0.4.17.tgz 2026-01-01 01:36:01 +00:00
Weves
f23cd30b37 Publish onyx-0.4.17.tgz 2026-01-01 01:19:40 +00:00
yuhongsun96
34d8d831a3 Publish onyx-0.4.17.tgz 2026-01-01 00:49:13 +00:00
github-merge-queue[bot]
4120981723 Publish onyx-0.4.17.tgz 2026-01-01 00:12:37 +00:00
github-merge-queue[bot]
6f7ee9d1a4 Publish onyx-0.4.17.tgz 2025-12-31 22:16:38 +00:00
Weves
6052aa11e8 Publish onyx-0.4.17.tgz 2025-12-31 22:02:49 +00:00
github-merge-queue[bot]
b0028f13f4 Publish onyx-0.4.17.tgz 2025-12-31 21:09:49 +00:00
github-merge-queue[bot]
beb602414b Publish onyx-0.4.17.tgz 2025-12-31 20:45:48 +00:00
yuhongsun96
3831d674e8 Publish onyx-0.4.17.tgz 2025-12-31 19:16:26 +00:00
yuhongsun96
6f578624c4 Publish onyx-0.4.17.tgz 2025-12-31 18:12:19 +00:00
github-merge-queue[bot]
1cfd22f35f Publish onyx-0.4.17.tgz 2025-12-31 11:43:37 +00:00
github-merge-queue[bot]
1bc109b663 Publish onyx-0.4.17.tgz 2025-12-31 07:58:04 +00:00
Subash-Mohan
965aa86e39 Publish onyx-0.4.17.tgz 2025-12-31 07:31:40 +00:00
github-merge-queue[bot]
4e46b763ed Publish onyx-0.4.17.tgz 2025-12-31 05:28:37 +00:00
github-merge-queue[bot]
11e635cadd Publish onyx-0.4.17.tgz 2025-12-31 02:18:35 +00:00
wenxi-onyx
5c22067803 Publish onyx-0.4.17.tgz 2025-12-31 02:09:08 +00:00
github-merge-queue[bot]
c1891e5a17 Publish onyx-0.4.17.tgz 2025-12-31 02:08:30 +00:00
github-merge-queue[bot]
6a5faefb1c Publish onyx-0.4.17.tgz 2025-12-31 01:49:32 +00:00
github-merge-queue[bot]
efaa5b94fb Publish onyx-0.4.17.tgz 2025-12-31 01:00:22 +00:00
Weves
de3f7c1be9 Publish onyx-0.4.17.tgz 2025-12-31 00:56:56 +00:00
github-merge-queue[bot]
90af8f7755 Publish onyx-0.4.17.tgz 2025-12-31 00:31:39 +00:00
github-merge-queue[bot]
2a32015dd6 Publish onyx-0.4.17.tgz 2025-12-30 22:36:11 +00:00
github-merge-queue[bot]
33c7808086 Publish onyx-0.4.17.tgz 2025-12-30 22:26:06 +00:00
github-merge-queue[bot]
defb7e041d Publish onyx-0.4.17.tgz 2025-12-30 21:45:55 +00:00
github-merge-queue[bot]
7f489a8d10 Publish onyx-0.4.17.tgz 2025-12-30 21:20:03 +00:00
yuhongsun96
3314954883 Publish onyx-0.4.17.tgz 2025-12-30 20:23:03 +00:00
github-merge-queue[bot]
c9f0da16fb Publish onyx-0.4.17.tgz 2025-12-30 19:59:56 +00:00
github-merge-queue[bot]
fd06cd55b0 Publish onyx-0.4.17.tgz 2025-12-30 19:52:17 +00:00
github-merge-queue[bot]
cd7a635cbe Publish onyx-0.4.17.tgz 2025-12-30 19:22:39 +00:00
github-merge-queue[bot]
30426f5b38 Publish onyx-0.4.17.tgz 2025-12-30 18:59:56 +00:00
yuhongsun96
a773598440 Publish onyx-0.4.17.tgz 2025-12-30 18:52:20 +00:00
jmelahman
a864216a7c Publish onyx-0.4.17.tgz 2025-12-30 18:50:46 +00:00
github-merge-queue[bot]
9b02c2c392 Publish onyx-0.4.17.tgz 2025-12-30 18:45:50 +00:00
github-merge-queue[bot]
a5fcce39fc Publish onyx-0.4.17.tgz 2025-12-30 18:44:39 +00:00
Weves
917e503bb6 Publish onyx-0.4.17.tgz 2025-12-30 17:37:44 +00:00
jmelahman
31ef198e75 Publish onyx-0.4.17.tgz 2025-12-30 16:06:57 +00:00
jmelahman
aececde888 Publish onyx-0.4.17.tgz 2025-12-30 16:05:42 +00:00
raunakab
76cdf69916 Publish onyx-0.4.17.tgz 2025-12-30 07:27:44 +00:00
jmelahman
c4c03ebc89 Publish onyx-0.4.17.tgz 2025-12-30 06:43:23 +00:00
jmelahman
56f982b940 Publish onyx-0.4.17.tgz 2025-12-30 03:59:13 +00:00
github-merge-queue[bot]
0057a53af1 Publish onyx-0.4.17.tgz 2025-12-30 03:56:11 +00:00
jmelahman
5fc3cfa5b0 Publish onyx-0.4.17.tgz 2025-12-30 03:29:56 +00:00
jmelahman
7ccf646bec Publish onyx-0.4.17.tgz 2025-12-30 02:30:36 +00:00
yuhongsun96
45bf1c0394 Publish onyx-0.4.17.tgz 2025-12-30 01:25:21 +00:00
jmelahman
6d697b6151 Publish onyx-0.4.17.tgz 2025-12-30 01:22:55 +00:00
yuhongsun96
74ccd5c59c Publish onyx-0.4.17.tgz 2025-12-30 00:41:11 +00:00
jmelahman
2fab387242 Publish onyx-0.4.17.tgz 2025-12-30 00:30:54 +00:00
github-merge-queue[bot]
184b1a4269 Publish onyx-0.4.17.tgz 2025-12-30 00:28:12 +00:00
yuhongsun96
f8421830d0 Publish onyx-0.4.17.tgz 2025-12-30 00:19:20 +00:00
github-merge-queue[bot]
777a4824fc Publish onyx-0.4.17.tgz 2025-12-30 00:01:58 +00:00
Weves
e105cf76f9 Publish onyx-0.4.17.tgz 2025-12-29 22:10:31 +00:00
github-merge-queue[bot]
b246906c43 Publish onyx-0.4.17.tgz 2025-12-29 20:38:05 +00:00
github-merge-queue[bot]
1c05950f06 Publish onyx-0.4.17.tgz 2025-12-29 20:04:03 +00:00
github-merge-queue[bot]
95ee450002 Publish onyx-0.4.17.tgz 2025-12-29 19:59:36 +00:00
github-merge-queue[bot]
e522426c94 Publish onyx-0.4.17.tgz 2025-12-29 18:05:42 +00:00
github-merge-queue[bot]
25298e3953 Publish onyx-0.4.17.tgz 2025-12-29 17:47:49 +00:00
raunakab
4a674e015c Publish onyx-0.4.17.tgz 2025-12-29 17:31:12 +00:00
github-merge-queue[bot]
502098cef9 Publish onyx-0.4.17.tgz 2025-12-29 03:42:32 +00:00
wenxi-onyx
1dd8b9534e Publish onyx-0.4.17.tgz 2025-12-28 22:31:00 +00:00
github-merge-queue[bot]
2415c90ada Publish onyx-0.4.17.tgz 2025-12-28 21:03:56 +00:00
github-merge-queue[bot]
aeb7224681 Publish onyx-0.4.17.tgz 2025-12-28 20:10:40 +00:00
Weves
d7263771cf Publish onyx-0.4.17.tgz 2025-12-28 05:10:09 +00:00
jmelahman
c7d72f1724 Publish onyx-0.4.17.tgz 2025-12-27 20:17:06 +00:00
github-merge-queue[bot]
6ef3c5fd66 Publish onyx-0.4.17.tgz 2025-12-27 19:31:19 +00:00
github-merge-queue[bot]
aa1586f742 Publish onyx-0.4.17.tgz 2025-12-27 17:13:16 +00:00
raunakab
79c7bc7b23 Publish onyx-0.4.17.tgz 2025-12-27 06:38:26 +00:00
raunakab
b777784aff Publish onyx-0.4.17.tgz 2025-12-27 06:02:45 +00:00
raunakab
90f721f54e Publish onyx-0.4.17.tgz 2025-12-27 05:02:15 +00:00
raunakab
c809001cc4 Publish onyx-0.4.17.tgz 2025-12-27 05:01:57 +00:00
jmelahman
31aa2cc581 Publish onyx-0.4.17.tgz 2025-12-27 04:40:47 +00:00
github-merge-queue[bot]
066b6c22e5 Publish onyx-0.4.17.tgz 2025-12-27 04:35:21 +00:00
github-merge-queue[bot]
442efdbba5 Publish onyx-0.4.17.tgz 2025-12-27 02:29:04 +00:00
justin-tahara
a1c22a8fba Publish onyx-0.4.17.tgz 2025-12-27 01:58:14 +00:00
github-merge-queue[bot]
791abb6e17 Publish onyx-0.4.17.tgz 2025-12-27 01:49:52 +00:00
github-merge-queue[bot]
d8c8c9e14b Publish onyx-0.4.17.tgz 2025-12-27 01:23:58 +00:00
justin-tahara
2e0457e4d3 Publish onyx-0.4.17.tgz 2025-12-27 00:18:08 +00:00
justin-tahara
7c384ff874 Publish onyx-0.4.17.tgz 2025-12-27 00:06:13 +00:00
github-merge-queue[bot]
6b1a7f7e6c Publish onyx-0.4.17.tgz 2025-12-26 23:24:08 +00:00
github-merge-queue[bot]
fb44edba0d Publish onyx-0.4.17.tgz 2025-12-26 22:57:08 +00:00
github-merge-queue[bot]
8c8c5c892a Publish onyx-0.4.17.tgz 2025-12-26 21:15:42 +00:00
github-merge-queue[bot]
fdff75e9e5 Publish onyx-0.4.17.tgz 2025-12-26 21:13:24 +00:00
github-merge-queue[bot]
5b1895deaa Publish onyx-0.4.17.tgz 2025-12-26 21:01:46 +00:00
github-merge-queue[bot]
201fe1e79e Publish onyx-0.4.17.tgz 2025-12-26 19:42:23 +00:00
github-merge-queue[bot]
bfb45632fb Publish onyx-0.4.17.tgz 2025-12-26 17:46:52 +00:00
raunakab
73a49a7822 Publish onyx-0.4.17.tgz 2025-12-26 17:01:13 +00:00
raunakab
afb738a06c Publish onyx-0.4.17.tgz 2025-12-26 16:07:38 +00:00
rohoswagger
de14705ae9 Publish onyx-0.4.17.tgz 2025-12-26 03:04:51 +00:00
jmelahman
1399469568 Publish onyx-0.4.17.tgz 2025-12-25 21:41:48 +00:00
rohoswagger
246022bbfe Publish onyx-0.4.17.tgz 2025-12-25 21:36:36 +00:00
rohoswagger
48a7baced9 Publish onyx-0.4.17.tgz 2025-12-25 21:32:08 +00:00
jmelahman
473c07d54a Publish onyx-0.4.17.tgz 2025-12-25 20:01:34 +00:00
raunakab
42815afc8e Publish onyx-0.4.17.tgz 2025-12-25 19:41:21 +00:00
github-merge-queue[bot]
3daff89ec7 Publish onyx-0.4.17.tgz 2025-12-25 18:09:52 +00:00
raunakab
2962d47c24 Publish onyx-0.4.17.tgz 2025-12-25 10:17:24 +00:00
raunakab
d7ca795b00 Publish onyx-0.4.17.tgz 2025-12-25 09:54:47 +00:00
raunakab
a3c81da469 Publish onyx-0.4.17.tgz 2025-12-25 07:24:34 +00:00
github-merge-queue[bot]
0a112fe1b2 Publish onyx-0.4.17.tgz 2025-12-25 05:26:30 +00:00
jmelahman
12d49e3ef3 Publish onyx-0.4.17.tgz 2025-12-25 04:45:18 +00:00
rohoswagger
748bd87985 Publish onyx-0.4.17.tgz 2025-12-25 02:39:16 +00:00
github-merge-queue[bot]
c3b820855c Publish onyx-0.4.17.tgz 2025-12-25 00:38:49 +00:00
justin-tahara
3b0ca278de Publish onyx-0.4.17.tgz 2025-12-24 20:26:26 +00:00
justin-tahara
70f04e6c63 Publish onyx-0.4.17.tgz 2025-12-24 20:22:40 +00:00
jmelahman
72d166ef3d Publish onyx-0.4.17.tgz 2025-12-24 19:38:47 +00:00
jmelahman
aa7d487576 Publish onyx-0.4.17.tgz 2025-12-24 19:18:54 +00:00
github-merge-queue[bot]
4295080357 Publish onyx-0.4.17.tgz 2025-12-24 18:23:28 +00:00
wenxi-onyx
c97ffa0165 Publish onyx-0.4.17.tgz 2025-12-24 17:38:07 +00:00
raunakab
5b9923369c Publish onyx-0.4.17.tgz 2025-12-24 16:10:08 +00:00
raunakab
2230d0d7da Publish onyx-0.4.17.tgz 2025-12-24 06:56:49 +00:00
Subash-Mohan
f90a623839 Publish onyx-0.4.17.tgz 2025-12-24 06:48:10 +00:00
raunakab
2d45232402 Publish onyx-0.4.17.tgz 2025-12-24 06:36:18 +00:00
github-merge-queue[bot]
10c6adb185 Publish onyx-0.4.17.tgz 2025-12-24 06:21:43 +00:00
github-merge-queue[bot]
64f00ecc91 Publish onyx-0.4.17.tgz 2025-12-24 04:54:46 +00:00
github-merge-queue[bot]
8020bb5242 Publish onyx-0.4.17.tgz 2025-12-24 03:51:25 +00:00
yuhongsun96
016ce702d6 Publish onyx-0.4.17.tgz 2025-12-24 03:08:03 +00:00
raunakab
e8b59adec7 Publish onyx-0.4.17.tgz 2025-12-24 03:01:23 +00:00
yuhongsun96
f8e41b4e88 Publish onyx-0.4.17.tgz 2025-12-24 02:46:22 +00:00
yuhongsun96
f8b063700a Publish onyx-0.4.17.tgz 2025-12-24 02:09:59 +00:00
raunakab
7422151cf6 Publish onyx-0.4.17.tgz 2025-12-24 01:55:17 +00:00
raunakab
ffadcb2324 Publish onyx-0.4.17.tgz 2025-12-24 01:54:40 +00:00
github-merge-queue[bot]
b6ae3a1bfb Publish onyx-0.4.17.tgz 2025-12-23 23:46:11 +00:00
github-merge-queue[bot]
0c40dc17c2 Publish onyx-0.4.17.tgz 2025-12-23 23:38:26 +00:00
raunakab
c5d5f138cc Publish onyx-0.4.17.tgz 2025-12-23 23:12:18 +00:00
raunakab
03a8c570ba Publish onyx-0.4.17.tgz 2025-12-23 22:38:58 +00:00
github-merge-queue[bot]
23c7f75bf2 Publish onyx-0.4.17.tgz 2025-12-23 22:37:59 +00:00
yuhongsun96
d42362189b Publish onyx-0.4.17.tgz 2025-12-23 22:20:00 +00:00
github-merge-queue[bot]
37886b0b45 Publish onyx-0.4.17.tgz 2025-12-23 21:44:43 +00:00
justin-tahara
1450b3fa1a Publish onyx-0.4.17.tgz 2025-12-23 21:27:28 +00:00
github-merge-queue[bot]
c04ab841f4 Publish onyx-0.4.17.tgz 2025-12-23 19:44:27 +00:00
jmelahman
d98e7e48a3 Publish onyx-0.4.17.tgz 2025-12-23 17:21:40 +00:00
yuhongsun96
7af945da0f Publish onyx-0.4.17.tgz 2025-12-23 04:14:03 +00:00
github-merge-queue[bot]
7c407da697 Publish onyx-0.4.17.tgz 2025-12-23 03:26:41 +00:00
yuhongsun96
7dd2ccf39c Publish onyx-0.4.17.tgz 2025-12-23 03:09:56 +00:00
wenxi-onyx
33f967d9f7 Publish onyx-0.4.17.tgz 2025-12-23 03:01:07 +00:00
github-merge-queue[bot]
d9f5fa84c6 Publish onyx-0.4.17.tgz 2025-12-23 02:26:39 +00:00
yuhongsun96
418e8797f1 Publish onyx-0.4.17.tgz 2025-12-23 02:11:24 +00:00
wenxi-onyx
fd0d12cc24 Publish onyx-0.4.17.tgz 2025-12-23 01:46:24 +00:00
justin-tahara
458e8106e7 Publish onyx-0.4.17.tgz 2025-12-23 01:22:34 +00:00
github-merge-queue[bot]
bfd7498a3d Publish onyx-0.4.17.tgz 2025-12-23 01:22:22 +00:00
github-merge-queue[bot]
ca290e8061 Publish onyx-0.4.17.tgz 2025-12-23 01:09:51 +00:00
github-merge-queue[bot]
fb287bad7a Publish onyx-0.4.17.tgz 2025-12-23 00:39:12 +00:00
github-merge-queue[bot]
5f5049f779 Publish onyx-0.4.17.tgz 2025-12-23 00:26:22 +00:00
github-merge-queue[bot]
8b66df511c Publish onyx-0.4.17.tgz 2025-12-23 00:14:26 +00:00
wenxi-onyx
d6ff7bb2e5 Publish onyx-0.4.17.tgz 2025-12-22 22:55:08 +00:00
github-merge-queue[bot]
1551eca12c Publish onyx-0.4.17.tgz 2025-12-22 21:50:34 +00:00
jmelahman
4e4823a91e Publish onyx-0.4.17.tgz 2025-12-22 20:13:26 +00:00
github-merge-queue[bot]
14294cd513 Publish onyx-0.4.17.tgz 2025-12-22 19:19:02 +00:00
github-merge-queue[bot]
1bca23ffdc Publish onyx-0.4.17.tgz 2025-12-22 19:12:52 +00:00
github-merge-queue[bot]
7f410ad9d8 Publish onyx-0.4.17.tgz 2025-12-22 18:57:52 +00:00
justin-tahara
30c065748b Publish onyx-0.4.17.tgz 2025-12-22 18:45:08 +00:00
wenxi-onyx
3b342ac800 Publish onyx-0.4.17.tgz 2025-12-22 18:07:26 +00:00
github-merge-queue[bot]
10dbc0150f Publish onyx-0.4.17.tgz 2025-12-22 16:41:06 +00:00
acaprau
f26ef858af Publish onyx-0.4.17.tgz 2025-12-22 16:06:54 +00:00
raunakab
3a1ecd4470 Publish onyx-0.4.17.tgz 2025-12-22 16:01:17 +00:00
jmelahman
17c793556b Publish onyx-0.4.17.tgz 2025-12-22 10:37:41 +00:00
jmelahman
0b413b75f6 Publish onyx-0.4.17.tgz 2025-12-22 08:47:08 +00:00
jmelahman
b317bdddb7 Publish onyx-0.4.17.tgz 2025-12-22 08:31:02 +00:00
jmelahman
33baf24173 Publish onyx-0.4.17.tgz 2025-12-22 08:22:24 +00:00
jmelahman
09b2258269 Publish onyx-0.4.17.tgz 2025-12-22 08:21:31 +00:00
yuhongsun96
1817fd71bb Publish onyx-0.4.17.tgz 2025-12-22 02:26:05 +00:00
yuhongsun96
de930f6f9b Publish onyx-0.4.17.tgz 2025-12-22 01:24:38 +00:00
github-merge-queue[bot]
d887ff2a8e Publish onyx-0.4.17.tgz 2025-12-22 00:34:40 +00:00
jmelahman
63bdcff060 Publish onyx-0.4.17.tgz 2025-12-22 00:16:17 +00:00
yuhongsun96
382b11ae48 Publish onyx-0.4.17.tgz 2025-12-21 23:08:24 +00:00
yuhongsun96
3661670b42 Publish onyx-0.4.17.tgz 2025-12-21 22:57:39 +00:00
github-merge-queue[bot]
0fd15f2bb4 Publish onyx-0.4.17.tgz 2025-12-21 22:26:18 +00:00
yuhongsun96
c1cd609f15 Publish onyx-0.4.17.tgz 2025-12-21 21:49:07 +00:00
github-merge-queue[bot]
803e6cefd9 Publish onyx-0.4.17.tgz 2025-12-21 20:53:54 +00:00
jmelahman
246c47f6f8 Publish onyx-0.4.17.tgz 2025-12-21 20:10:07 +00:00
yuhongsun96
85a46e7a2d Publish onyx-0.4.17.tgz 2025-12-21 07:29:06 +00:00
yuhongsun96
3ddd497f80 Publish onyx-0.4.17.tgz 2025-12-21 06:46:57 +00:00
jmelahman
ef29c3663f Publish onyx-0.4.17.tgz 2025-12-21 04:21:49 +00:00
yuhongsun96
3822392875 Publish onyx-0.4.17.tgz 2025-12-21 04:09:05 +00:00
yuhongsun96
a5fdef5c1d Publish onyx-0.4.17.tgz 2025-12-21 02:28:47 +00:00
jmelahman
7a50d21ed5 Publish onyx-0.4.17.tgz 2025-12-20 15:20:57 +00:00
jmelahman
4c38f3d510 Publish onyx-0.4.17.tgz 2025-12-20 15:04:05 +00:00
jmelahman
3fc66af7c9 Publish onyx-0.4.17.tgz 2025-12-20 14:38:56 +00:00
jmelahman
6838b1dd20 Publish onyx-0.4.17.tgz 2025-12-20 14:05:36 +00:00
jmelahman
08459d643e Publish onyx-0.4.17.tgz 2025-12-20 13:02:41 +00:00
jmelahman
e85b0b1b30 Publish onyx-0.4.17.tgz 2025-12-20 12:30:01 +00:00
jmelahman
eb9e8362f1 Publish onyx-0.4.17.tgz 2025-12-20 12:13:26 +00:00
yuhongsun96
2a6baf8f4b Publish onyx-0.4.17.tgz 2025-12-20 04:32:42 +00:00
yuhongsun96
c0bfccb479 Publish onyx-0.4.17.tgz 2025-12-20 04:23:45 +00:00
yuhongsun96
be10b599ba Publish onyx-0.4.17.tgz 2025-12-20 03:15:02 +00:00
github-merge-queue[bot]
3a1d5e5bdc Publish onyx-0.4.17.tgz 2025-12-20 02:40:50 +00:00
github-merge-queue[bot]
d8e6fbea95 Publish onyx-0.4.17.tgz 2025-12-20 01:45:12 +00:00
yuhongsun96
1bb202ae7c Publish onyx-0.4.17.tgz 2025-12-20 01:19:44 +00:00
github-merge-queue[bot]
2d66631d9b Publish onyx-0.4.17.tgz 2025-12-20 01:12:49 +00:00
github-merge-queue[bot]
9596d0046f Publish onyx-0.4.17.tgz 2025-12-20 01:10:37 +00:00
github-merge-queue[bot]
4b723fb026 Publish onyx-0.4.17.tgz 2025-12-20 01:05:56 +00:00
yuhongsun96
7d19c451bb Publish onyx-0.4.17.tgz 2025-12-20 00:47:43 +00:00
jmelahman
5e811c69ed Publish onyx-0.4.17.tgz 2025-12-20 00:36:25 +00:00
yuhongsun96
acbdc34702 Publish onyx-0.4.17.tgz 2025-12-20 00:13:04 +00:00
yuhongsun96
62162485f9 Publish onyx-0.4.17.tgz 2025-12-19 23:51:05 +00:00
justin-tahara
c2ed8c98d1 Publish onyx-0.4.17.tgz 2025-12-19 23:50:01 +00:00
yuhongsun96
8760915ecd Publish onyx-0.4.17.tgz 2025-12-19 23:38:30 +00:00
yuhongsun96
1cb1e16d31 Publish onyx-0.4.17.tgz 2025-12-19 23:10:13 +00:00
yuhongsun96
cd5ff24bdf Publish onyx-0.4.17.tgz 2025-12-19 23:00:04 +00:00
yuhongsun96
0cda5ea120 Publish onyx-0.4.17.tgz 2025-12-19 22:13:14 +00:00
github-merge-queue[bot]
748a4f1859 Publish onyx-0.4.17.tgz 2025-12-19 22:03:10 +00:00
jmelahman
4b56d37c48 Publish onyx-0.4.17.tgz 2025-12-19 21:44:38 +00:00
yuhongsun96
1564526e2b Publish onyx-0.4.17.tgz 2025-12-19 21:31:02 +00:00
github-merge-queue[bot]
e4dd2dca34 Publish onyx-0.4.17.tgz 2025-12-19 21:19:05 +00:00
github-merge-queue[bot]
55f29eef37 Publish onyx-0.4.17.tgz 2025-12-19 20:42:41 +00:00
github-merge-queue[bot]
f52aef5697 Publish onyx-0.4.17.tgz 2025-12-19 20:32:34 +00:00
jmelahman
e73b08b1b5 Publish onyx-0.4.17.tgz 2025-12-19 20:01:07 +00:00
github-merge-queue[bot]
ea72f4281f Publish onyx-0.4.17.tgz 2025-12-19 19:49:32 +00:00
github-merge-queue[bot]
632f51f62e Publish onyx-0.4.17.tgz 2025-12-19 19:26:56 +00:00
raunakab
ae00cb671a Publish onyx-0.4.17.tgz 2025-12-19 19:12:17 +00:00
yuhongsun96
e0fbb64dde Publish onyx-0.4.17.tgz 2025-12-19 18:48:44 +00:00
jmelahman
b1db38765d Publish onyx-0.4.17.tgz 2025-12-19 18:04:51 +00:00
rohoswagger
f6d663b638 Publish onyx-0.4.17.tgz 2025-12-19 17:53:21 +00:00
github-merge-queue[bot]
0658019b9d Publish onyx-0.4.17.tgz 2025-12-19 17:42:53 +00:00
jmelahman
bf76262630 Publish onyx-0.4.17.tgz 2025-12-19 15:45:12 +00:00
jmelahman
6c90993695 Publish onyx-0.4.17.tgz 2025-12-19 14:51:21 +00:00
jmelahman
c113f49222 Publish onyx-0.4.17.tgz 2025-12-19 14:38:14 +00:00
jmelahman
32087cf85b Publish onyx-0.4.17.tgz 2025-12-19 12:08:35 +00:00
wenxi-onyx
46df314861 Publish onyx-0.4.17.tgz 2025-12-19 07:09:58 +00:00
wenxi-onyx
025921093f Publish onyx-0.4.17.tgz 2025-12-19 06:29:14 +00:00
yuhongsun96
47e11967a7 Publish onyx-0.4.17.tgz 2025-12-19 04:31:12 +00:00
yuhongsun96
60ff7459c5 Publish onyx-0.4.17.tgz 2025-12-19 03:00:52 +00:00
github-merge-queue[bot]
fa90278315 Publish onyx-0.4.17.tgz 2025-12-19 01:59:20 +00:00
github-merge-queue[bot]
330c1a0a8f Publish onyx-0.4.17.tgz 2025-12-19 01:45:34 +00:00
yuhongsun96
82e62e78ae Publish onyx-0.4.17.tgz 2025-12-19 01:18:48 +00:00
justin-tahara
7c6ac5f4e8 Publish onyx-0.4.17.tgz 2025-12-19 01:15:48 +00:00
github-merge-queue[bot]
ec49127cd5 Publish onyx-0.4.17.tgz 2025-12-19 01:03:57 +00:00
wenxi-onyx
a2c7ed8c9f Publish onyx-0.4.17.tgz 2025-12-19 00:50:16 +00:00
github-merge-queue[bot]
4a7a708e72 Publish onyx-0.4.17.tgz 2025-12-19 00:38:51 +00:00
yuhongsun96
f98258370e Publish onyx-0.4.17.tgz 2025-12-18 23:51:50 +00:00
github-merge-queue[bot]
7282bea848 Publish onyx-0.4.17.tgz 2025-12-18 23:28:10 +00:00
github-merge-queue[bot]
f5ae8b0280 Publish onyx-0.4.17.tgz 2025-12-18 23:07:59 +00:00
Weves
7b03bc9813 Publish onyx-0.4.17.tgz 2025-12-18 22:52:49 +00:00
yuhongsun96
e37ff4a899 Publish onyx-0.4.17.tgz 2025-12-18 22:10:22 +00:00
github-merge-queue[bot]
b6b204291d Publish onyx-0.4.17.tgz 2025-12-18 20:54:34 +00:00
yuhongsun96
23d827d939 Publish onyx-0.4.17.tgz 2025-12-18 19:47:11 +00:00
yuhongsun96
c387ef1dca Publish onyx-0.4.17.tgz 2025-12-18 19:37:17 +00:00
github-merge-queue[bot]
94c16fd705 Publish onyx-0.4.17.tgz 2025-12-18 18:56:58 +00:00
rohoswagger
90ccccc1e5 Publish onyx-0.4.17.tgz 2025-12-18 18:19:23 +00:00
rohoswagger
912240e139 Publish onyx-0.4.17.tgz 2025-12-18 18:12:24 +00:00
github-merge-queue[bot]
1a14a0c38d Publish onyx-0.4.17.tgz 2025-12-18 17:54:07 +00:00
github-merge-queue[bot]
69d3903808 Publish onyx-0.4.17.tgz 2025-12-18 11:16:09 +00:00
jmelahman
b5f6c17f42 Publish onyx-0.4.17.tgz 2025-12-18 10:44:07 +00:00
jmelahman
dd2bfe1252 Publish onyx-0.4.17.tgz 2025-12-18 07:44:44 +00:00
github-merge-queue[bot]
7d1a6a68e6 Publish onyx-0.4.17.tgz 2025-12-18 05:39:43 +00:00
jmelahman
8dc93216f4 Publish onyx-0.4.17.tgz 2025-12-18 05:05:18 +00:00
yuhongsun96
28bb4ea931 Publish onyx-0.4.17.tgz 2025-12-18 04:17:31 +00:00
yuhongsun96
b387d7eb94 Publish onyx-0.4.17.tgz 2025-12-18 03:46:08 +00:00
nmgarza5
4c9fe3c543 Publish onyx-0.4.17.tgz 2025-12-18 03:31:26 +00:00
wenxi-onyx
2e9fb7368e Publish onyx-0.4.17.tgz 2025-12-18 03:09:40 +00:00
yuhongsun96
4e15917e0b Publish onyx-0.4.17.tgz 2025-12-18 03:00:14 +00:00
github-merge-queue[bot]
d93eb4ba0e Publish onyx-0.4.17.tgz 2025-12-18 01:57:20 +00:00
yuhongsun96
344c673504 Publish onyx-0.4.17.tgz 2025-12-18 01:13:06 +00:00
github-merge-queue[bot]
a40842dd60 Publish onyx-0.4.17.tgz 2025-12-18 00:41:23 +00:00
github-merge-queue[bot]
8cd43ebc2c Publish onyx-0.4.17.tgz 2025-12-18 00:38:43 +00:00
github-merge-queue[bot]
e5832a0d60 Publish onyx-0.4.17.tgz 2025-12-18 00:08:21 +00:00
wenxi-onyx
acd25c914d Publish onyx-0.4.17.tgz 2025-12-17 23:25:24 +00:00
github-merge-queue[bot]
2a091a79b7 Publish onyx-0.4.17.tgz 2025-12-17 23:07:47 +00:00
yuhongsun96
9c99709322 Publish onyx-0.4.17.tgz 2025-12-17 22:54:07 +00:00
jmelahman
416e8fb580 Publish onyx-0.4.17.tgz 2025-12-17 22:52:53 +00:00
github-merge-queue[bot]
d87c479085 Publish onyx-0.4.17.tgz 2025-12-17 21:42:35 +00:00
jmelahman
192a0818b9 Publish onyx-0.4.17.tgz 2025-12-17 21:28:23 +00:00
github-merge-queue[bot]
999042f475 Publish onyx-0.4.17.tgz 2025-12-17 21:26:19 +00:00
github-merge-queue[bot]
1d4150e8eb Publish onyx-0.4.17.tgz 2025-12-17 21:00:36 +00:00
wenxi-onyx
2548d5bb67 Publish onyx-0.4.17.tgz 2025-12-17 20:38:55 +00:00
Weves
fdf955d6ca Publish onyx-0.4.17.tgz 2025-12-17 20:27:57 +00:00
Weves
4ad13fede3 Publish onyx-0.4.17.tgz 2025-12-17 20:09:00 +00:00
github-merge-queue[bot]
2fa85dcc6c Publish onyx-0.4.17.tgz 2025-12-17 19:28:44 +00:00
github-merge-queue[bot]
3d95859b66 Publish onyx-0.4.17.tgz 2025-12-17 18:51:44 +00:00
raunakab
b1ee288296 Publish onyx-0.4.17.tgz 2025-12-17 17:00:58 +00:00
jmelahman
8c09b03a97 Publish onyx-0.4.17.tgz 2025-12-17 09:11:52 +00:00
jmelahman
3c2e5400d5 Publish onyx-0.4.17.tgz 2025-12-17 09:01:56 +00:00
github-merge-queue[bot]
0c592d4b29 Publish onyx-0.4.17.tgz 2025-12-17 07:21:46 +00:00
jmelahman
8d9389a0aa Publish onyx-0.4.17.tgz 2025-12-17 07:08:16 +00:00
jmelahman
fa345f616b Publish onyx-0.4.17.tgz 2025-12-17 06:17:03 +00:00
github-merge-queue[bot]
b44465d88b Publish onyx-0.4.17.tgz 2025-12-17 05:53:17 +00:00
github-merge-queue[bot]
63393e9dd0 Publish onyx-0.4.17.tgz 2025-12-17 04:08:08 +00:00
yuhongsun96
3eb2c3816a Publish onyx-0.4.17.tgz 2025-12-17 03:54:06 +00:00
yuhongsun96
951e0d8dc7 Publish onyx-0.4.17.tgz 2025-12-17 03:41:30 +00:00
github-merge-queue[bot]
98b2aef3e4 Publish onyx-0.4.17.tgz 2025-12-17 03:00:36 +00:00
github-merge-queue[bot]
cce9ea783e Publish onyx-0.4.17.tgz 2025-12-17 02:53:36 +00:00
github-merge-queue[bot]
6f4d8e612a Publish onyx-0.4.17.tgz 2025-12-17 02:53:05 +00:00
yuhongsun96
4daa7ac0ea Publish onyx-0.4.17.tgz 2025-12-17 02:26:02 +00:00
justin-tahara
9b1a09e152 Publish onyx-0.4.17.tgz 2025-12-17 01:01:05 +00:00
github-merge-queue[bot]
2ef9f72cda Publish onyx-0.4.17.tgz 2025-12-17 00:40:58 +00:00
github-merge-queue[bot]
c5bfd21752 Publish onyx-0.4.17.tgz 2025-12-17 00:27:58 +00:00
github-merge-queue[bot]
38e61ae794 Publish onyx-0.4.17.tgz 2025-12-17 00:19:42 +00:00
github-merge-queue[bot]
ec7a4217b0 Publish onyx-0.4.17.tgz 2025-12-16 23:37:50 +00:00
github-merge-queue[bot]
727e0b8d89 Publish onyx-0.4.17.tgz 2025-12-16 23:32:11 +00:00
github-merge-queue[bot]
e5dfdc5bc3 Publish onyx-0.4.17.tgz 2025-12-16 23:14:55 +00:00
github-merge-queue[bot]
ed552377dc Publish onyx-0.4.17.tgz 2025-12-16 23:06:51 +00:00
justin-tahara
7cc484e8bc Publish onyx-0.4.17.tgz 2025-12-16 22:51:50 +00:00
github-merge-queue[bot]
958c219cef Publish onyx-0.4.17.tgz 2025-12-16 22:34:00 +00:00
justin-tahara
d94ea1049f Publish onyx-0.4.17.tgz 2025-12-16 22:20:01 +00:00
github-merge-queue[bot]
994e1f8afa Publish onyx-0.4.16.tgz 2025-12-16 21:45:40 +00:00
github-merge-queue[bot]
ddecbf6c47 Publish onyx-0.4.16.tgz 2025-12-16 21:38:20 +00:00
github-merge-queue[bot]
c5d0172351 Publish onyx-0.4.16.tgz 2025-12-16 21:16:35 +00:00
jmelahman
6d29345fb7 Publish onyx-0.4.16.tgz 2025-12-16 21:03:49 +00:00
Weves
71dd57b92c Publish onyx-0.4.16.tgz 2025-12-16 20:57:01 +00:00
github-merge-queue[bot]
88e1d984b3 Publish onyx-0.4.16.tgz 2025-12-16 20:32:45 +00:00
github-merge-queue[bot]
e64150be85 Publish onyx-0.4.15.tgz 2025-12-16 19:37:31 +00:00
github-merge-queue[bot]
9b3e6d5fb1 Publish onyx-0.4.15.tgz 2025-12-16 19:06:23 +00:00
github-merge-queue[bot]
c85706b40c Publish onyx-0.4.15.tgz 2025-12-16 19:01:38 +00:00
wenxi-onyx
b4ef57663c Publish onyx-0.4.15.tgz 2025-12-16 18:39:07 +00:00
wenxi-onyx
c0983c0278 Publish onyx-0.4.15.tgz 2025-12-16 18:21:40 +00:00
github-merge-queue[bot]
0e66cd9fe5 Publish onyx-0.4.15.tgz 2025-12-16 18:09:25 +00:00
wenxi-onyx
c2a3d1f817 Publish onyx-0.4.15.tgz 2025-12-16 17:48:34 +00:00
jmelahman
7fa94f772f Publish onyx-0.4.15.tgz 2025-12-16 10:54:21 +00:00
jmelahman
3bf0715dca Publish onyx-0.4.15.tgz 2025-12-16 10:30:35 +00:00
jmelahman
747b18431b Publish onyx-0.4.15.tgz 2025-12-16 10:05:23 +00:00
jmelahman
02d7fb2a83 Publish onyx-0.4.15.tgz 2025-12-16 09:05:46 +00:00
jmelahman
282167c0ec Publish onyx-0.4.15.tgz 2025-12-16 07:52:19 +00:00
github-merge-queue[bot]
ab4d7d22b6 Publish onyx-0.4.15.tgz 2025-12-16 05:36:19 +00:00
github-merge-queue[bot]
8395dbbb6f Publish onyx-0.4.15.tgz 2025-12-16 03:50:31 +00:00
nmgarza5
9f14b965e4 Publish onyx-0.4.15.tgz 2025-12-16 03:36:42 +00:00
jmelahman
edb3905a22 Publish onyx-0.4.15.tgz 2025-12-16 03:34:48 +00:00
jmelahman
1521445c04 Publish onyx-0.4.15.tgz 2025-12-16 02:55:35 +00:00
raunakab
66009f6739 Publish onyx-0.4.15.tgz 2025-12-16 02:49:10 +00:00
github-merge-queue[bot]
c2bc449b35 Publish onyx-0.4.15.tgz 2025-12-16 02:42:46 +00:00
github-merge-queue[bot]
17476fdcf0 Publish onyx-0.4.15.tgz 2025-12-16 02:36:44 +00:00
github-merge-queue[bot]
00e92cdfc1 Publish onyx-0.4.15.tgz 2025-12-16 01:48:34 +00:00
jmelahman
dcfb327981 Publish onyx-0.4.15.tgz 2025-12-16 01:16:30 +00:00
github-merge-queue[bot]
93ef56dca9 Publish onyx-0.4.15.tgz 2025-12-16 00:08:06 +00:00
jmelahman
42851099b4 Publish onyx-0.4.15.tgz 2025-12-15 23:38:53 +00:00
github-merge-queue[bot]
f8afba56f5 Publish onyx-0.4.15.tgz 2025-12-15 22:17:07 +00:00
github-merge-queue[bot]
f058af3c2a Publish onyx-0.4.15.tgz 2025-12-15 19:36:08 +00:00
wenxi-onyx
e9b4fe3e39 Publish onyx-0.4.15.tgz 2025-12-15 18:54:05 +00:00
github-merge-queue[bot]
c3a767030f Publish onyx-0.4.15.tgz 2025-12-15 18:52:19 +00:00
github-merge-queue[bot]
37409c6dcd Publish onyx-0.4.15.tgz 2025-12-15 18:48:39 +00:00
wenxi-onyx
2d3f8e4eb0 Publish onyx-0.4.15.tgz 2025-12-15 18:20:03 +00:00
Weves
c12616e83c Publish onyx-0.4.15.tgz 2025-12-15 17:46:40 +00:00
yuhongsun96
3833e1ebf8 Publish onyx-0.4.15.tgz 2025-12-15 08:00:10 +00:00
yuhongsun96
8cb3e5fc65 Publish onyx-0.4.15.tgz 2025-12-15 07:01:06 +00:00
yuhongsun96
e2d6750c2f Publish onyx-0.4.15.tgz 2025-12-15 06:56:23 +00:00
Weves
2da27140da Publish onyx-0.4.15.tgz 2025-12-15 03:50:39 +00:00
github-merge-queue[bot]
a41747006d Publish onyx-0.4.15.tgz 2025-12-15 02:22:19 +00:00
Weves
7345eb8472 Publish onyx-0.4.15.tgz 2025-12-15 01:37:32 +00:00
Weves
17a7fbbd50 Publish onyx-0.4.15.tgz 2025-12-15 01:14:57 +00:00
Weves
553f7b6105 Publish onyx-0.4.15.tgz 2025-12-15 00:37:46 +00:00
github-merge-queue[bot]
e778af7676 Publish onyx-0.4.15.tgz 2025-12-14 23:15:00 +00:00
Weves
6f1080bff8 Publish onyx-0.4.15.tgz 2025-12-14 22:51:01 +00:00
github-merge-queue[bot]
344148d751 Publish onyx-0.4.15.tgz 2025-12-14 22:22:06 +00:00
github-merge-queue[bot]
c437ce2f8d Publish onyx-0.4.15.tgz 2025-12-13 02:29:45 +00:00
github-merge-queue[bot]
87919aea44 Publish onyx-0.4.15.tgz 2025-12-13 02:26:55 +00:00
github-merge-queue[bot]
6ee3fc7d40 Publish onyx-0.4.14.tgz 2025-12-13 00:21:23 +00:00
github-merge-queue[bot]
dc3b11a2fc Publish onyx-0.4.14.tgz 2025-12-12 07:39:22 +00:00
yuhongsun96
debb922649 Publish onyx-0.4.14.tgz 2025-12-12 03:48:30 +00:00
jmelahman
14ea3b466b Publish onyx-0.4.14.tgz 2025-12-12 03:19:18 +00:00
yuhongsun96
ebf2c8d2f1 Publish onyx-0.4.14.tgz 2025-12-12 03:17:43 +00:00
yuhongsun96
42a8b66c74 Publish onyx-0.4.14.tgz 2025-12-12 02:28:36 +00:00
raunakab
d6191c30ad Publish onyx-0.4.14.tgz 2025-12-12 00:22:39 +00:00
yuhongsun96
2a4461f913 Publish onyx-0.4.14.tgz 2025-12-11 23:45:26 +00:00
jmelahman
3682358fc0 Publish onyx-0.4.14.tgz 2025-12-11 01:23:47 +00:00
justin-tahara
c1d3c439f1 Publish onyx-0.4.14.tgz 2025-12-09 17:44:23 +00:00
github-merge-queue[bot]
b2ee3119a5 Publish onyx-0.4.14.tgz 2025-12-09 04:26:11 +00:00
Weves
520237e881 Publish onyx-0.4.14.tgz 2025-12-09 03:01:43 +00:00
Weves
019e148db3 Publish onyx-0.4.14.tgz 2025-12-09 02:44:51 +00:00
github-merge-queue[bot]
e498ba629c Publish onyx-0.4.14.tgz 2025-12-07 01:02:05 +00:00
jmelahman
b548fff581 Publish onyx-0.4.14.tgz 2025-12-06 18:49:27 +00:00
jmelahman
bf660c5f16 Publish onyx-0.4.14.tgz 2025-12-06 18:10:41 +00:00
jmelahman
e5624ab298 Publish onyx-0.4.14.tgz 2025-12-06 17:53:58 +00:00
raunakab
837a57950a Publish onyx-0.4.14.tgz 2025-12-06 03:41:07 +00:00
yuhongsun96
2890d38439 Publish onyx-0.4.14.tgz 2025-12-06 01:13:35 +00:00
github-merge-queue[bot]
1bbfa63966 Publish onyx-0.4.14.tgz 2025-12-06 00:30:03 +00:00
jmelahman
5878abfe17 Publish onyx-0.4.14.tgz 2025-12-06 00:17:33 +00:00
github-merge-queue[bot]
cea5c23d8b Publish onyx-0.4.14.tgz 2025-12-05 23:49:04 +00:00
github-merge-queue[bot]
e8cbcbb5d0 Publish onyx-0.4.14.tgz 2025-12-05 23:15:05 +00:00
github-merge-queue[bot]
f1c7aae545 Publish onyx-0.4.14.tgz 2025-12-05 22:36:47 +00:00
github-merge-queue[bot]
99e4ec6353 Publish onyx-0.4.14.tgz 2025-12-05 21:51:44 +00:00
github-merge-queue[bot]
0209cbb1f6 Publish onyx-0.4.14.tgz 2025-12-05 20:52:25 +00:00
jmelahman
243589a3cd Publish onyx-0.4.14.tgz 2025-12-05 18:52:05 +00:00
github-merge-queue[bot]
4c4c29fbcd Publish onyx-0.4.14.tgz 2025-12-05 18:45:29 +00:00
jmelahman
0badf1c599 Publish onyx-0.4.14.tgz 2025-12-05 18:32:02 +00:00
jmelahman
846571208f Publish onyx-0.4.14.tgz 2025-12-05 18:28:48 +00:00
yuhongsun96
69b9fa834e Publish onyx-0.4.14.tgz 2025-12-05 18:16:22 +00:00
github-merge-queue[bot]
7b09025e5b Publish onyx-0.4.14.tgz 2025-12-05 18:07:29 +00:00
github-merge-queue[bot]
5e516f2cd4 Publish onyx-0.4.14.tgz 2025-12-05 17:54:15 +00:00
github-merge-queue[bot]
d17fb03813 Publish onyx-0.4.14.tgz 2025-12-05 17:45:43 +00:00
github-merge-queue[bot]
da1114bc05 Publish onyx-0.4.14.tgz 2025-12-05 07:19:22 +00:00
github-merge-queue[bot]
56ffc55f25 Publish onyx-0.4.14.tgz 2025-12-05 03:21:15 +00:00
github-merge-queue[bot]
528b894e29 Publish onyx-0.4.14.tgz 2025-12-05 03:18:35 +00:00
jmelahman
d9338763c8 Publish onyx-0.4.14.tgz 2025-12-05 02:36:59 +00:00
jmelahman
09dc8bfc99 Publish onyx-0.4.14.tgz 2025-12-05 02:30:44 +00:00
jmelahman
fd2de6e373 Publish onyx-0.4.14.tgz 2025-12-05 02:28:41 +00:00
github-merge-queue[bot]
b6f3ff8379 Publish onyx-0.4.14.tgz 2025-12-05 02:21:00 +00:00
yuhongsun96
e1a632cebe Publish onyx-0.4.14.tgz 2025-12-05 01:51:03 +00:00
jmelahman
918bf1e4f3 Publish onyx-0.4.14.tgz 2025-12-05 01:28:06 +00:00
github-merge-queue[bot]
79064f602d Publish onyx-0.4.14.tgz 2025-12-05 01:12:59 +00:00
github-merge-queue[bot]
9b656b76f9 Publish onyx-0.4.13.tgz 2025-12-05 00:24:23 +00:00
github-merge-queue[bot]
c637235b1e Publish onyx-0.4.13.tgz 2025-12-05 00:04:35 +00:00
github-merge-queue[bot]
19c098a8c1 Publish onyx-0.4.13.tgz 2025-12-04 23:59:43 +00:00
github-merge-queue[bot]
7d98599805 Publish onyx-0.4.13.tgz 2025-12-04 23:21:03 +00:00
github-merge-queue[bot]
aa33016bd0 Publish onyx-0.4.13.tgz 2025-12-04 22:40:03 +00:00
github-merge-queue[bot]
6d04b1718b Publish onyx-0.4.13.tgz 2025-12-04 21:50:55 +00:00
github-merge-queue[bot]
57bebf9582 Publish onyx-0.4.13.tgz 2025-12-04 21:01:06 +00:00
github-merge-queue[bot]
5a1b2c6de2 Publish onyx-0.4.13.tgz 2025-12-04 20:58:53 +00:00
justin-tahara
44b7eb4189 Publish onyx-0.4.13.tgz 2025-12-04 19:31:23 +00:00
jmelahman
9903c0e852 Publish onyx-0.4.13.tgz 2025-12-04 19:30:52 +00:00
github-merge-queue[bot]
1c5c234c81 Publish onyx-0.4.13.tgz 2025-12-04 17:17:42 +00:00
github-merge-queue[bot]
aef3ae1059 Publish onyx-0.4.13.tgz 2025-12-04 08:39:32 +00:00
yuhongsun96
837cabd1f0 Publish onyx-0.4.13.tgz 2025-12-04 07:46:43 +00:00
raunakab
ce8ce39d79 Publish onyx-0.4.13.tgz 2025-12-04 05:19:31 +00:00
github-merge-queue[bot]
fcb3987e1c Publish onyx-0.4.13.tgz 2025-12-04 05:08:24 +00:00
github-merge-queue[bot]
0ad7aae02f Publish onyx-0.4.13.tgz 2025-12-04 04:34:19 +00:00
wenxi-onyx
c415832662 Publish onyx-0.4.13.tgz 2025-12-04 03:52:43 +00:00
github-merge-queue[bot]
6408760fbf Publish onyx-0.4.13.tgz 2025-12-04 02:41:52 +00:00
jmelahman
15ec8d38a4 Publish onyx-0.4.13.tgz 2025-12-04 02:09:01 +00:00
jmelahman
d7e3fbdd51 Publish onyx-0.4.13.tgz 2025-12-04 02:07:33 +00:00
yuhongsun96
71a3434d89 Publish onyx-0.4.13.tgz 2025-12-04 01:30:57 +00:00
Weves
5b1202e08c Publish onyx-0.4.13.tgz 2025-12-04 01:27:01 +00:00
github-merge-queue[bot]
552cb9bd7e Publish onyx-0.4.13.tgz 2025-12-04 01:20:09 +00:00
github-merge-queue[bot]
e4df251ebb Publish onyx-0.4.13.tgz 2025-12-04 00:54:06 +00:00
github-merge-queue[bot]
c5a99ef84a Publish onyx-0.4.13.tgz 2025-12-04 00:07:20 +00:00
github-merge-queue[bot]
475f17ba88 Publish onyx-0.4.13.tgz 2025-12-04 00:05:10 +00:00
yuhongsun96
da3e6a7589 Publish onyx-0.4.13.tgz 2025-12-03 23:47:25 +00:00
github-merge-queue[bot]
065cb844c5 Publish onyx-0.4.13.tgz 2025-12-03 22:47:10 +00:00
github-merge-queue[bot]
3ade4b6e1d Publish onyx-0.4.13.tgz 2025-12-03 22:23:50 +00:00
yuhongsun96
550aa35735 Publish onyx-0.4.13.tgz 2025-12-03 22:08:03 +00:00
jmelahman
52bb13b7a0 Publish onyx-0.4.13.tgz 2025-12-03 21:57:35 +00:00
yuhongsun96
6894ebf1e6 Publish onyx-0.4.13.tgz 2025-12-03 21:50:00 +00:00
github-merge-queue[bot]
ef0cda4ada Publish onyx-0.4.13.tgz 2025-12-03 21:43:44 +00:00
github-merge-queue[bot]
0993f7a6c4 Publish onyx-0.4.13.tgz 2025-12-03 21:01:33 +00:00
raunakab
5a4dca0a68 Publish onyx-0.4.13.tgz 2025-12-03 20:48:28 +00:00
yuhongsun96
46d924877e Publish onyx-0.4.13.tgz 2025-12-03 20:38:11 +00:00
yuhongsun96
02b6b5d0f9 Publish onyx-0.4.13.tgz 2025-12-03 19:50:56 +00:00
github-merge-queue[bot]
280eafe630 Publish onyx-0.4.13.tgz 2025-12-03 18:43:38 +00:00
github-merge-queue[bot]
f9ded6b05a Publish onyx-0.4.13.tgz 2025-12-03 18:28:41 +00:00
github-merge-queue[bot]
44aaa808e0 Publish onyx-0.4.13.tgz 2025-12-03 18:24:38 +00:00
wenxi-onyx
f08dbc3dfd Publish onyx-0.4.13.tgz 2025-12-03 18:09:28 +00:00
wenxi-onyx
1952d80fdb Publish onyx-0.4.13.tgz 2025-12-03 18:03:36 +00:00
github-merge-queue[bot]
bf2e9f4e35 Publish onyx-0.4.13.tgz 2025-12-03 16:57:30 +00:00
github-merge-queue[bot]
ff2dddd0bf Publish onyx-0.4.13.tgz 2025-12-03 06:32:34 +00:00
github-merge-queue[bot]
d58b157007 Publish onyx-0.4.13.tgz 2025-12-03 04:10:08 +00:00
github-merge-queue[bot]
4a71fcf6a0 Publish onyx-0.4.13.tgz 2025-12-03 03:10:15 +00:00
github-merge-queue[bot]
b301a2f7f9 Publish onyx-0.4.13.tgz 2025-12-03 02:54:16 +00:00
github-merge-queue[bot]
601f1c3697 Publish onyx-0.4.13.tgz 2025-12-03 02:02:35 +00:00
jmelahman
4b7bbbb6f1 Publish onyx-0.4.13.tgz 2025-12-03 01:48:56 +00:00
github-merge-queue[bot]
b458030be8 Publish onyx-0.4.13.tgz 2025-12-03 01:28:53 +00:00
jmelahman
dc10feee5f Publish onyx-0.4.13.tgz 2025-12-03 01:12:48 +00:00
github-merge-queue[bot]
2aa5dbb5eb Publish onyx-0.4.13.tgz 2025-12-03 00:53:27 +00:00
github-merge-queue[bot]
b232017687 Publish onyx-0.4.13.tgz 2025-12-02 22:54:03 +00:00
github-merge-queue[bot]
cc4015c031 Publish onyx-0.4.13.tgz 2025-12-02 22:28:43 +00:00
github-merge-queue[bot]
c9bc1333d8 Publish onyx-0.4.13.tgz 2025-12-02 21:29:32 +00:00
github-merge-queue[bot]
f97a0c2d16 Publish onyx-0.4.13.tgz 2025-12-02 21:26:43 +00:00
Weves
7299145723 Publish onyx-0.4.13.tgz 2025-12-02 20:31:15 +00:00
yuhongsun96
008bb1fd77 Publish onyx-0.4.13.tgz 2025-12-02 19:09:10 +00:00
yuhongsun96
6538cc0775 Publish onyx-0.4.13.tgz 2025-12-02 19:07:55 +00:00
yuhongsun96
8d9ff6ed31 Publish onyx-0.4.13.tgz 2025-12-02 19:01:23 +00:00
yuhongsun96
10d2fffb10 Publish onyx-0.4.13.tgz 2025-12-02 18:50:58 +00:00
github-merge-queue[bot]
9d2e309ed4 Publish onyx-0.4.13.tgz 2025-12-02 18:05:04 +00:00
github-merge-queue[bot]
33fb005cf7 Publish onyx-0.4.13.tgz 2025-12-02 17:59:49 +00:00
justin-tahara
6a9d235e09 Publish onyx-0.4.13.tgz 2025-12-02 04:40:43 +00:00
justin-tahara
52780326c3 Publish onyx-0.4.13.tgz 2025-12-02 03:54:46 +00:00
yuhongsun96
87ca46f128 Publish onyx-0.4.12.tgz 2025-12-02 02:02:17 +00:00
github-merge-queue[bot]
6960594905 Publish onyx-0.4.12.tgz 2025-12-02 01:42:02 +00:00
raunakab
ee43b826e2 Publish onyx-0.4.12.tgz 2025-12-02 01:15:57 +00:00
github-merge-queue[bot]
864dce89e4 Publish onyx-0.4.12.tgz 2025-12-02 00:51:05 +00:00
github-merge-queue[bot]
3f9113c892 Publish onyx-0.4.12.tgz 2025-12-02 00:35:10 +00:00
jmelahman
a78b8f0bc3 Publish onyx-0.4.12.tgz 2025-12-02 00:19:58 +00:00
github-merge-queue[bot]
62e2bfa1e8 Publish onyx-0.4.12.tgz 2025-12-01 23:52:07 +00:00
github-merge-queue[bot]
12ba157fb8 Publish onyx-0.4.12.tgz 2025-12-01 23:44:49 +00:00
justin-tahara
36cafb38ab Publish onyx-0.4.12.tgz 2025-12-01 23:29:10 +00:00
yuhongsun96
ea9df74ca6 Publish onyx-0.4.12.tgz 2025-12-01 22:41:51 +00:00
github-merge-queue[bot]
fac9865674 Publish onyx-0.4.12.tgz 2025-12-01 22:11:21 +00:00
justin-tahara
5d134b848b Publish onyx-0.4.12.tgz 2025-12-01 21:42:09 +00:00
wenxi-onyx
b1a5c4131e Publish onyx-0.4.11.tgz 2025-12-01 17:38:28 +00:00
Weves
2d9f11e84e Publish onyx-0.4.11.tgz 2025-12-01 17:37:15 +00:00
Weves
09072038d4 Publish onyx-0.4.11.tgz 2025-12-01 02:53:59 +00:00
Subash-Mohan
01a5985c70 Publish onyx-0.4.11.tgz 2025-11-30 06:16:13 +00:00
github-merge-queue[bot]
cc08f460ee Publish onyx-0.4.11.tgz 2025-11-29 19:56:15 +00:00
jmelahman
8b7f180223 Publish onyx-0.4.11.tgz 2025-11-26 23:38:54 +00:00
jmelahman
fb4c84e09c Publish onyx-0.4.11.tgz 2025-11-26 23:10:52 +00:00
Weves
711587aeb2 Publish onyx-0.4.11.tgz 2025-11-26 22:55:42 +00:00
jmelahman
914601c437 Publish onyx-0.4.11.tgz 2025-11-26 22:28:07 +00:00
jmelahman
610c8e34fd Publish onyx-0.4.11.tgz 2025-11-26 21:40:42 +00:00
raunakab
e0a0ad2fdb Publish onyx-0.4.11.tgz 2025-11-26 21:18:33 +00:00
jmelahman
0681f03cdc Publish onyx-0.4.11.tgz 2025-11-26 21:17:40 +00:00
github-merge-queue[bot]
96b452c316 Publish onyx-0.4.11.tgz 2025-11-26 18:30:06 +00:00
github-merge-queue[bot]
b0fdcc1d4c Publish onyx-0.4.11.tgz 2025-11-26 17:47:49 +00:00
Weves
932be0135e Publish onyx-0.4.11.tgz 2025-11-26 06:42:25 +00:00
justin-tahara
3e567aecf2 Publish onyx-0.4.11.tgz 2025-11-26 04:11:22 +00:00
github-merge-queue[bot]
3cd087f859 Publish onyx-0.4.11.tgz 2025-11-26 03:24:18 +00:00
github-merge-queue[bot]
94305fad8f Publish onyx-0.4.11.tgz 2025-11-26 00:34:35 +00:00
raunakab
a2270e6787 Publish onyx-0.4.11.tgz 2025-11-26 00:11:52 +00:00
github-merge-queue[bot]
e6bbb71bd7 Publish onyx-0.4.11.tgz 2025-11-25 22:16:47 +00:00
raunakab
ffefb4cdaf Publish onyx-0.4.11.tgz 2025-11-25 21:51:54 +00:00
justin-tahara
f405aa45aa Publish onyx-0.4.11.tgz 2025-11-25 21:32:53 +00:00
github-merge-queue[bot]
0980c00bb9 Publish onyx-0.4.11.tgz 2025-11-25 21:30:48 +00:00
wenxi-onyx
2a49cd18e6 Publish onyx-0.4.11.tgz 2025-11-25 20:40:24 +00:00
github-merge-queue[bot]
3575238fac Publish onyx-0.4.11.tgz 2025-11-25 20:33:33 +00:00
wenxi-onyx
98a89e9855 Publish onyx-0.4.11.tgz 2025-11-25 19:25:21 +00:00
raunakab
eff360d5c1 Publish onyx-0.4.11.tgz 2025-11-25 18:28:16 +00:00
github-merge-queue[bot]
06fb8911ff Publish onyx-0.4.11.tgz 2025-11-25 18:03:13 +00:00
github-merge-queue[bot]
0e881d024d Publish onyx-0.4.11.tgz 2025-11-25 17:36:48 +00:00
github-merge-queue[bot]
b8c67d0453 Publish onyx-0.4.11.tgz 2025-11-25 17:16:34 +00:00
github-merge-queue[bot]
11be109ac8 Publish onyx-0.4.11.tgz 2025-11-25 02:44:53 +00:00
github-merge-queue[bot]
889ab179f9 Publish onyx-0.4.11.tgz 2025-11-25 01:35:55 +00:00
rguan72
9cd9da2ec1 Publish onyx-0.4.11.tgz 2025-11-24 22:42:31 +00:00
justin-tahara
49536bec45 Publish onyx-0.4.11.tgz 2025-11-24 22:25:56 +00:00
github-merge-queue[bot]
c366aad426 Publish onyx-0.4.10.tgz 2025-11-24 21:44:14 +00:00
justin-tahara
7870ead6c5 Publish onyx-0.4.10.tgz 2025-11-24 21:30:02 +00:00
github-merge-queue[bot]
500477484a Publish onyx-0.4.10.tgz 2025-11-24 21:17:55 +00:00
github-merge-queue[bot]
0b3acefdec Publish onyx-0.4.10.tgz 2025-11-24 19:15:32 +00:00
github-merge-queue[bot]
54bdc8f84b Publish onyx-0.4.10.tgz 2025-11-24 18:41:04 +00:00
github-merge-queue[bot]
363929b19a Publish onyx-0.4.10.tgz 2025-11-24 17:28:28 +00:00
github-merge-queue[bot]
2b4230e484 Publish onyx-0.4.10.tgz 2025-11-24 02:52:05 +00:00
github-merge-queue[bot]
a8b704a74c Publish onyx-0.4.10.tgz 2025-11-23 21:33:03 +00:00
raunakab
476e199c4d Publish onyx-0.4.10.tgz 2025-11-23 20:33:07 +00:00
github-merge-queue[bot]
ac8c82d953 Publish onyx-0.4.10.tgz 2025-11-23 19:25:00 +00:00
raunakab
a2a3b7d23f Publish onyx-0.4.10.tgz 2025-11-23 18:24:50 +00:00
github-merge-queue[bot]
fdf99a5d8a Publish onyx-0.4.10.tgz 2025-11-23 10:32:29 +00:00
Subash-Mohan
7e19c6e8fa Publish onyx-0.4.10.tgz 2025-11-23 10:18:41 +00:00
github-merge-queue[bot]
6fb86e5ca7 Publish onyx-0.4.10.tgz 2025-11-23 10:03:44 +00:00
raunakab
b983c80a45 Publish onyx-0.4.10.tgz 2025-11-23 04:40:16 +00:00
github-merge-queue[bot]
f80dfa5597 Publish onyx-0.4.10.tgz 2025-11-23 02:13:05 +00:00
raunakab
b0e1fc8473 Publish onyx-0.4.10.tgz 2025-11-23 01:38:32 +00:00
github-merge-queue[bot]
8d94382f29 Publish onyx-0.4.10.tgz 2025-11-22 07:45:21 +00:00
github-merge-queue[bot]
80cd4d1a0d Publish onyx-0.4.10.tgz 2025-11-22 07:34:59 +00:00
github-merge-queue[bot]
fd8b1eafb5 Publish onyx-0.4.10.tgz 2025-11-22 07:32:42 +00:00
nmgarza5
901d00ccbe Publish onyx-0.4.10.tgz 2025-11-22 05:49:10 +00:00
raunakab
f193689472 Publish onyx-0.4.10.tgz 2025-11-22 04:59:21 +00:00
evan-onyx
bb27c29a3b Publish onyx-0.4.10.tgz 2025-11-22 01:30:06 +00:00
github-merge-queue[bot]
0b1d64389f Publish onyx-0.4.10.tgz 2025-11-21 21:42:01 +00:00
github-merge-queue[bot]
6a767ad668 Publish onyx-0.4.10.tgz 2025-11-21 21:14:51 +00:00
justin-tahara
179a15e3ae Publish onyx-0.4.10.tgz 2025-11-21 18:50:39 +00:00
justin-tahara
f397d67c69 Publish onyx-0.4.10.tgz 2025-11-21 18:17:12 +00:00
wenxi-onyx
a6fd686142 Publish onyx-0.4.10.tgz 2025-11-21 17:24:24 +00:00
github-merge-queue[bot]
1f73ece782 Publish onyx-0.4.10.tgz 2025-11-21 04:47:57 +00:00
github-merge-queue[bot]
e25b49b0c2 Publish onyx-0.4.10.tgz 2025-11-21 04:25:18 +00:00
Weves
73b82dc63e Publish onyx-0.4.10.tgz 2025-11-21 02:33:34 +00:00
github-merge-queue[bot]
0d8c15cd2e Publish onyx-0.4.10.tgz 2025-11-21 02:18:55 +00:00
wenxi-onyx
9f9b1a91b0 Publish onyx-0.4.10.tgz 2025-11-21 01:23:43 +00:00
justin-tahara
968aef17e5 Publish onyx-0.4.10.tgz 2025-11-21 01:16:54 +00:00
github-merge-queue[bot]
7bc98a1403 Publish onyx-0.4.10.tgz 2025-11-21 01:01:16 +00:00
github-merge-queue[bot]
70cc37e967 Publish onyx-0.4.10.tgz 2025-11-21 00:50:15 +00:00
github-merge-queue[bot]
b860e6855b Publish onyx-0.4.10.tgz 2025-11-21 00:27:25 +00:00
github-merge-queue[bot]
c3d48d76a8 Publish onyx-0.4.10.tgz 2025-11-20 23:52:18 +00:00
github-merge-queue[bot]
0ce4d2d4f8 Publish onyx-0.4.10.tgz 2025-11-20 23:00:53 +00:00
github-merge-queue[bot]
7aa4514976 Publish onyx-0.4.10.tgz 2025-11-20 22:06:47 +00:00
github-merge-queue[bot]
c5f264cae4 Publish onyx-0.4.10.tgz 2025-11-20 21:22:47 +00:00
github-merge-queue[bot]
3b609149ea Publish onyx-0.4.9.tgz 2025-11-20 20:39:55 +00:00
github-merge-queue[bot]
6512411546 Publish onyx-0.4.8.tgz 2025-11-20 20:25:38 +00:00
wenxi-onyx
617ec34ac3 Publish onyx-0.4.8.tgz 2025-11-20 18:36:25 +00:00
github-merge-queue[bot]
1c8751483e Publish onyx-0.4.8.tgz 2025-11-20 18:05:16 +00:00
github-merge-queue[bot]
dec8e1657c Publish onyx-0.4.8.tgz 2025-11-20 18:01:23 +00:00
github-merge-queue[bot]
e3f8310d1b Publish onyx-0.4.8.tgz 2025-11-20 16:59:27 +00:00
github-merge-queue[bot]
77d06b04b1 Publish onyx-0.4.8.tgz 2025-11-20 07:26:10 +00:00
github-merge-queue[bot]
209b518a49 Publish onyx-0.4.8.tgz 2025-11-20 03:39:24 +00:00
github-merge-queue[bot]
c202b42ad2 Publish onyx-0.4.8.tgz 2025-11-20 01:12:52 +00:00
github-merge-queue[bot]
d167b4fa3e Publish onyx-0.4.8.tgz 2025-11-20 01:08:39 +00:00
github-merge-queue[bot]
337c30868b Publish onyx-0.4.8.tgz 2025-11-20 00:09:10 +00:00
nmgarza5
012d61be85 Publish onyx-0.4.8.tgz 2025-11-19 22:45:27 +00:00
jmelahman
790a7a68aa Publish onyx-0.4.8.tgz 2025-11-19 22:27:24 +00:00
jmelahman
fe636dd4c6 Publish onyx-0.4.8.tgz 2025-11-19 22:20:28 +00:00
github-merge-queue[bot]
13c876dd33 Publish onyx-0.4.8.tgz 2025-11-19 22:01:34 +00:00
jmelahman
de4fbb7392 Publish onyx-0.4.8.tgz 2025-11-19 19:04:54 +00:00
justin-tahara
6ed12c6957 Publish onyx-0.4.8.tgz 2025-11-19 18:06:53 +00:00
raunakab
5579e277dd Publish onyx-0.4.8.tgz 2025-11-19 17:28:26 +00:00
raunakab
593226404e Publish onyx-0.4.8.tgz 2025-11-19 16:32:14 +00:00
github-merge-queue[bot]
087ae426f1 Publish onyx-0.4.8.tgz 2025-11-19 14:30:33 +00:00
raunakab
eb794b53f3 Publish onyx-0.4.8.tgz 2025-11-19 09:41:18 +00:00
jmelahman
daf2981bd1 Publish onyx-0.4.8.tgz 2025-11-19 06:27:36 +00:00
github-merge-queue[bot]
969fee9345 Publish onyx-0.4.8.tgz 2025-11-19 04:20:39 +00:00
github-merge-queue[bot]
f3f65d5e22 Publish onyx-0.4.8.tgz 2025-11-19 03:02:46 +00:00
github-merge-queue[bot]
e058d52751 Publish onyx-0.4.8.tgz 2025-11-19 01:14:55 +00:00
nmgarza5
274b454fa7 Publish onyx-0.4.8.tgz 2025-11-19 00:49:52 +00:00
jmelahman
53d784cfd4 Publish onyx-0.4.8.tgz 2025-11-19 00:10:46 +00:00
jmelahman
e4f5c3cb30 Publish onyx-0.4.8.tgz 2025-11-19 00:06:37 +00:00
jmelahman
aa1ef9e6bd Publish onyx-0.4.8.tgz 2025-11-18 22:50:08 +00:00
jmelahman
91b4997924 Publish onyx-0.4.8.tgz 2025-11-18 22:16:44 +00:00
jmelahman
62d9bbcf34 Publish onyx-0.4.8.tgz 2025-11-18 18:46:30 +00:00
justin-tahara
4307991cb9 Publish onyx-0.4.8.tgz 2025-11-18 17:51:07 +00:00
Subash-Mohan
411f715e6f Publish onyx-0.4.7.tgz 2025-11-18 12:11:36 +00:00
Subash-Mohan
8dab6c049f Publish onyx-0.4.7.tgz 2025-11-18 06:55:17 +00:00
github-merge-queue[bot]
41aef33360 Publish onyx-0.4.7.tgz 2025-11-17 23:51:01 +00:00
github-merge-queue[bot]
940f471df7 Publish onyx-0.4.7.tgz 2025-11-17 23:38:50 +00:00
github-merge-queue[bot]
ab432eca02 Publish onyx-0.4.7.tgz 2025-11-17 23:29:14 +00:00
github-merge-queue[bot]
f5f1f48ecf Publish onyx-0.4.7.tgz 2025-11-17 20:48:49 +00:00
github-merge-queue[bot]
f4824d9979 Publish onyx-0.4.7.tgz 2025-11-17 20:33:31 +00:00
github-merge-queue[bot]
001e5c3362 Publish onyx-0.4.7.tgz 2025-11-17 20:30:23 +00:00
github-merge-queue[bot]
23676c21a4 Publish onyx-0.4.7.tgz 2025-11-17 20:02:59 +00:00
github-merge-queue[bot]
c8b5befe29 Publish onyx-0.4.7.tgz 2025-11-17 18:53:56 +00:00
github-merge-queue[bot]
d8b9bc18c8 Publish onyx-0.4.7.tgz 2025-11-17 18:19:44 +00:00
github-merge-queue[bot]
cbbd60c4cb Publish onyx-0.4.7.tgz 2025-11-17 17:36:44 +00:00
github-merge-queue[bot]
08a63d01bd Publish onyx-0.4.7.tgz 2025-11-17 17:35:02 +00:00
github-merge-queue[bot]
9587cc8591 Publish onyx-0.4.7.tgz 2025-11-17 17:34:26 +00:00
nmgarza5
a0a51c889c Publish onyx-0.4.7.tgz 2025-11-17 17:17:24 +00:00
evan-onyx
af81c1c8b4 Publish onyx-0.4.7.tgz 2025-11-17 16:59:25 +00:00
github-merge-queue[bot]
1727ed5951 Publish onyx-0.4.7.tgz 2025-11-17 05:31:29 +00:00
github-merge-queue[bot]
1b5d49c068 Publish onyx-0.4.7.tgz 2025-11-17 03:50:04 +00:00
github-merge-queue[bot]
218b20a399 Publish onyx-0.4.7.tgz 2025-11-16 23:06:48 +00:00
github-merge-queue[bot]
9cdaa10293 Publish onyx-0.4.7.tgz 2025-11-16 22:18:03 +00:00
wenxi-onyx
3655cc88dc Publish onyx-0.4.7.tgz 2025-11-16 21:18:53 +00:00
github-merge-queue[bot]
183428c3bd Publish onyx-0.4.7.tgz 2025-11-16 18:47:29 +00:00
nmgarza5
c471d03b1b Publish onyx-0.4.7.tgz 2025-11-16 18:30:13 +00:00
raunakab
147b71516a Publish onyx-0.4.7.tgz 2025-11-16 17:08:34 +00:00
github-merge-queue[bot]
1365d02223 Publish onyx-0.4.7.tgz 2025-11-16 05:35:51 +00:00
github-merge-queue[bot]
4c4957b8ab Publish onyx-0.4.7.tgz 2025-11-16 05:26:43 +00:00
github-merge-queue[bot]
6acf92476e Publish onyx-0.4.7.tgz 2025-11-16 05:20:43 +00:00
github-merge-queue[bot]
bb7457992f Publish onyx-0.4.7.tgz 2025-11-15 12:05:11 +00:00
github-merge-queue[bot]
3a95f0dd72 Publish onyx-0.4.7.tgz 2025-11-15 07:22:29 +00:00
github-merge-queue[bot]
dfa0bfaada Publish onyx-0.4.7.tgz 2025-11-15 04:57:36 +00:00
github-merge-queue[bot]
57c550ae95 Publish onyx-0.4.7.tgz 2025-11-15 04:26:59 +00:00
github-merge-queue[bot]
a7bc72e129 Publish onyx-0.4.7.tgz 2025-11-15 04:10:13 +00:00
github-merge-queue[bot]
a0cbc786c6 Publish onyx-0.4.7.tgz 2025-11-15 03:38:43 +00:00
github-merge-queue[bot]
3ceeeff964 Publish onyx-0.4.7.tgz 2025-11-15 03:03:38 +00:00
github-merge-queue[bot]
15313de8f7 Publish onyx-0.4.7.tgz 2025-11-15 02:43:27 +00:00
raunakab
18117e6674 Publish onyx-0.4.7.tgz 2025-11-15 02:28:09 +00:00
justin-tahara
9b3b3c8431 Publish onyx-0.4.7.tgz 2025-11-15 02:00:04 +00:00
justin-tahara
2b1ed6116a Publish onyx-0.4.7.tgz 2025-11-15 00:55:08 +00:00
justin-tahara
4cd5f7f924 Publish onyx-0.4.7.tgz 2025-11-15 00:53:51 +00:00
rguan72
ce5621787c Publish onyx-0.4.7.tgz 2025-11-14 22:52:12 +00:00
wenxi-onyx
2d4afd952f Publish onyx-0.4.7.tgz 2025-11-14 22:43:08 +00:00
wenxi-onyx
bba807c085 Publish onyx-0.4.7.tgz 2025-11-14 22:39:34 +00:00
wenxi-onyx
a7e6034baf Publish onyx-0.4.7.tgz 2025-11-14 22:30:46 +00:00
github-merge-queue[bot]
9fdf20548e Publish onyx-0.4.7.tgz 2025-11-14 22:10:23 +00:00
github-merge-queue[bot]
e83eb56495 Publish onyx-0.4.7.tgz 2025-11-14 21:46:34 +00:00
wenxi-onyx
8103e01148 Publish onyx-0.4.7.tgz 2025-11-14 21:32:21 +00:00
wenxi-onyx
e47d2a95f2 Publish onyx-0.4.7.tgz 2025-11-14 21:27:27 +00:00
jmelahman
c582e67db2 Publish onyx-0.4.7.tgz 2025-11-14 21:17:03 +00:00
Weves
4833251e92 Publish onyx-0.4.7.tgz 2025-11-14 20:27:25 +00:00
jmelahman
3baa1296e1 Publish onyx-0.4.7.tgz 2025-11-14 19:23:03 +00:00
Weves
f2c420d88b Publish onyx-0.4.7.tgz 2025-11-14 18:56:40 +00:00
github-merge-queue[bot]
503b58313b Publish onyx-0.4.7.tgz 2025-11-14 17:07:07 +00:00
github-merge-queue[bot]
0a82657da2 Publish onyx-0.4.7.tgz 2025-11-14 16:35:42 +00:00
github-merge-queue[bot]
90d95a7fde Publish onyx-0.4.7.tgz 2025-11-14 09:01:04 +00:00
github-merge-queue[bot]
e23c72b492 Publish onyx-0.4.7.tgz 2025-11-14 06:38:30 +00:00
raunakab
38a5d7c4be Publish onyx-0.4.7.tgz 2025-11-14 05:49:05 +00:00
github-merge-queue[bot]
cdfd85af95 Publish onyx-0.4.7.tgz 2025-11-14 04:47:47 +00:00
jmelahman
2ccc6a39a8 Publish onyx-0.4.7.tgz 2025-11-14 03:40:34 +00:00
Weves
a0a9337467 Publish onyx-0.4.7.tgz 2025-11-14 03:02:35 +00:00
justin-tahara
908ba42a10 Publish onyx-0.4.7.tgz 2025-11-14 02:25:41 +00:00
Weves
9f6163b9e9 Publish onyx-0.4.7.tgz 2025-11-14 01:43:24 +00:00
wenxi-onyx
ffdb30965d Publish onyx-0.4.7.tgz 2025-11-13 22:19:25 +00:00
github-merge-queue[bot]
1b507e3a14 Publish onyx-0.4.7.tgz 2025-11-13 22:16:21 +00:00
github-merge-queue[bot]
2dda377c1c Publish onyx-0.4.7.tgz 2025-11-13 17:54:18 +00:00
github-merge-queue[bot]
1a5cb435af Publish onyx-0.4.7.tgz 2025-11-13 02:53:13 +00:00
github-merge-queue[bot]
998db46bef Publish onyx-0.4.7.tgz 2025-11-13 02:49:04 +00:00
github-merge-queue[bot]
cf8ccdbf55 Publish onyx-0.4.7.tgz 2025-11-13 00:28:38 +00:00
github-merge-queue[bot]
e8a2292028 Publish onyx-0.4.7.tgz 2025-11-12 23:16:47 +00:00
github-merge-queue[bot]
80b864f163 Publish onyx-0.4.7.tgz 2025-11-12 22:52:31 +00:00
nmgarza5
15242f41d4 Publish onyx-0.4.7.tgz 2025-11-12 22:36:44 +00:00
github-merge-queue[bot]
816c87ee48 Publish onyx-0.4.7.tgz 2025-11-12 22:36:22 +00:00
github-merge-queue[bot]
6cf2c5248a Publish onyx-0.4.7.tgz 2025-11-12 19:58:44 +00:00
github-merge-queue[bot]
5e5d392b74 Publish onyx-0.4.7.tgz 2025-11-12 19:39:37 +00:00
github-merge-queue[bot]
1cc5e75b2d Publish onyx-0.4.7.tgz 2025-11-12 19:36:07 +00:00
github-merge-queue[bot]
a6ffdc5142 Publish onyx-0.4.7.tgz 2025-11-12 19:20:58 +00:00
raunakab
02ceb75a4e Publish onyx-0.4.7.tgz 2025-11-12 19:09:05 +00:00
nmgarza5
e29b0e7a10 Publish onyx-0.4.7.tgz 2025-11-12 18:49:28 +00:00
github-merge-queue[bot]
589d9019d3 Publish onyx-0.4.7.tgz 2025-11-12 18:44:56 +00:00
github-merge-queue[bot]
4f8a91ca6e Publish onyx-0.4.7.tgz 2025-11-12 18:32:10 +00:00
github-merge-queue[bot]
1e706fdccb Publish onyx-0.4.7.tgz 2025-11-12 18:31:58 +00:00
github-merge-queue[bot]
33c4dfd498 Publish onyx-0.4.7.tgz 2025-11-12 18:24:27 +00:00
Weves
67e1b2d6ee Publish onyx-0.4.7.tgz 2025-11-12 17:54:08 +00:00
github-merge-queue[bot]
f78b583bbd Publish onyx-0.4.7.tgz 2025-11-12 03:53:32 +00:00
github-merge-queue[bot]
0316ee7f17 Publish onyx-0.4.7.tgz 2025-11-12 01:40:40 +00:00
github-merge-queue[bot]
20f1e90e54 Publish onyx-0.4.7.tgz 2025-11-12 01:14:39 +00:00
rguan72
22508aac61 Publish onyx-0.4.7.tgz 2025-11-12 00:25:53 +00:00
github-merge-queue[bot]
fb8acda7b8 Publish onyx-0.4.7.tgz 2025-11-12 00:06:53 +00:00
github-merge-queue[bot]
6983c419d8 Publish onyx-0.4.7.tgz 2025-11-11 23:27:48 +00:00
wenxi-onyx
573235b404 Publish onyx-0.4.7.tgz 2025-11-11 21:31:58 +00:00
jmelahman
15a9761d6f Publish onyx-0.4.7.tgz 2025-11-11 21:07:36 +00:00
github-merge-queue[bot]
fce72063fe Publish onyx-0.4.7.tgz 2025-11-11 20:03:10 +00:00
wenxi-onyx
aeabc3d1f7 Publish onyx-0.4.7.tgz 2025-11-11 19:22:36 +00:00
github-merge-queue[bot]
fca1f4111b Publish onyx-0.4.7.tgz 2025-11-11 19:19:58 +00:00
jmelahman
232322374c Publish onyx-0.4.7.tgz 2025-11-11 17:38:37 +00:00
jmelahman
2fcf495734 Publish onyx-0.4.7.tgz 2025-11-11 17:38:26 +00:00
justin-tahara
e6ca0f0a90 Publish onyx-0.4.7.tgz 2025-11-11 17:27:31 +00:00
justin-tahara
7675aa7521 Publish onyx-0.4.7.tgz 2025-11-11 03:38:06 +00:00
github-merge-queue[bot]
828c652e19 Publish onyx-0.4.7.tgz 2025-11-11 02:53:30 +00:00
justin-tahara
85dc63ba4a Publish onyx-0.4.7.tgz 2025-11-11 02:35:38 +00:00
Weves
0d7d80f76a Publish onyx-0.4.7.tgz 2025-11-11 01:02:27 +00:00
jmelahman
4da6351546 Publish onyx-0.4.7.tgz 2025-11-10 23:51:46 +00:00
justin-tahara
07973e2528 Publish onyx-0.4.7.tgz 2025-11-10 22:40:11 +00:00
justin-tahara
d24d3d1a2a Publish onyx-0.4.7.tgz 2025-11-10 22:32:32 +00:00
justin-tahara
2f19680898 Publish onyx-0.4.7.tgz 2025-11-10 22:11:11 +00:00
jmelahman
a57abfe2ea Publish onyx-0.4.7.tgz 2025-11-10 20:10:53 +00:00
jmelahman
508fbbf107 Publish onyx-0.4.7.tgz 2025-11-10 19:34:42 +00:00
jmelahman
e276a0f5f3 Publish onyx-0.4.7.tgz 2025-11-10 19:28:23 +00:00
jmelahman
5a898bbec7 Publish onyx-0.4.7.tgz 2025-11-10 19:28:09 +00:00
jmelahman
c73a5fd2c0 Publish onyx-0.4.7.tgz 2025-11-10 19:14:34 +00:00
jmelahman
c51c3d79fb Publish onyx-0.4.7.tgz 2025-11-10 18:58:10 +00:00
jmelahman
505376e8c7 Publish onyx-0.4.7.tgz 2025-11-10 18:34:35 +00:00
Weves
cf654fe55f Publish onyx-0.4.7.tgz 2025-11-10 17:40:10 +00:00
Weves
fa7d5f81d6 Publish onyx-0.4.7.tgz 2025-11-08 22:53:12 +00:00
nmgarza5
1e0083964c Publish onyx-0.4.7.tgz 2025-11-08 18:36:46 +00:00
raunakab
1f5f1d2351 Publish onyx-0.4.7.tgz 2025-11-08 02:39:39 +00:00
Weves
819d10e629 Publish onyx-0.4.7.tgz 2025-11-08 02:10:40 +00:00
jmelahman
4591eb6017 Publish onyx-0.4.7.tgz 2025-11-08 01:29:57 +00:00
justin-tahara
56f477fe1c Publish onyx-0.4.7.tgz 2025-11-08 01:11:11 +00:00
justin-tahara
4ace01ff0f Publish onyx-0.4.7.tgz 2025-11-08 01:10:52 +00:00
justin-tahara
1f0ce10e34 Publish onyx-0.4.7.tgz 2025-11-08 01:10:12 +00:00
justin-tahara
fe8d09b47d Publish onyx-0.4.7.tgz 2025-11-08 00:34:24 +00:00
justin-tahara
1405fd6d82 Publish onyx-0.4.7.tgz 2025-11-08 00:25:43 +00:00
jmelahman
d4a4a3f3d0 Publish onyx-0.4.7.tgz 2025-11-08 00:21:54 +00:00
jmelahman
609661edae Publish onyx-0.4.7.tgz 2025-11-07 23:27:01 +00:00
justin-tahara
5f964108ce Publish onyx-0.4.7.tgz 2025-11-07 21:37:00 +00:00
github-merge-queue[bot]
6274cac375 Publish onyx-0.4.7.tgz 2025-11-07 19:03:23 +00:00
raunakab
acc9d986f9 Publish onyx-0.4.7.tgz 2025-11-07 03:44:03 +00:00
raunakab
c184c98ab7 Publish onyx-0.4.7.tgz 2025-11-07 03:32:35 +00:00
justin-tahara
e4116338ef Publish onyx-0.4.7.tgz 2025-11-07 01:36:28 +00:00
wenxi-onyx
9cf744cca2 Publish onyx-0.4.7.tgz 2025-11-07 01:15:03 +00:00
github-merge-queue[bot]
b64e5902b9 Publish onyx-0.4.7.tgz 2025-11-06 23:40:18 +00:00
raunakab
1871860d5a Publish onyx-0.4.7.tgz 2025-11-06 22:49:53 +00:00
github-merge-queue[bot]
960792e48f Publish onyx-0.4.7.tgz 2025-11-06 21:43:55 +00:00
justin-tahara
8c16183a0c Publish onyx-0.4.7.tgz 2025-11-06 21:19:31 +00:00
github-merge-queue[bot]
e6a5301db9 Publish onyx-0.4.7.tgz 2025-11-06 20:13:15 +00:00
github-merge-queue[bot]
a4a0a191e0 Publish onyx-0.4.7.tgz 2025-11-06 20:11:45 +00:00
github-merge-queue[bot]
a9317b5fe7 Publish onyx-0.4.7.tgz 2025-11-06 19:06:42 +00:00
raunakab
6ac641df8d Publish onyx-0.4.7.tgz 2025-11-06 18:09:36 +00:00
raunakab
6d039ffda8 Publish onyx-0.4.7.tgz 2025-11-06 18:07:26 +00:00
github-merge-queue[bot]
eda4a10d41 Publish onyx-0.4.7.tgz 2025-11-06 07:34:26 +00:00
Weves
b613210d2b Publish onyx-0.4.7.tgz 2025-11-06 06:04:35 +00:00
github-merge-queue[bot]
85326d1dda Publish onyx-0.4.7.tgz 2025-11-06 05:45:11 +00:00
github-merge-queue[bot]
362f6d57cf Publish onyx-0.4.7.tgz 2025-11-06 05:42:24 +00:00
jmelahman
a547b3e2d4 Publish onyx-0.4.7.tgz 2025-11-06 05:05:05 +00:00
wenxi-onyx
7c4a882de4 Publish onyx-0.4.7.tgz 2025-11-06 04:00:48 +00:00
github-merge-queue[bot]
b23b362233 Publish onyx-0.4.7.tgz 2025-11-06 02:45:40 +00:00
justin-tahara
66f8213ce2 Publish onyx-0.4.7.tgz 2025-11-06 02:04:01 +00:00
Weves
1e41a0169d Publish onyx-0.4.7.tgz 2025-11-06 01:54:54 +00:00
github-merge-queue[bot]
2a64a99d4d Publish onyx-0.4.7.tgz 2025-11-06 01:30:45 +00:00
github-merge-queue[bot]
57d25c6444 Publish onyx-0.4.7.tgz 2025-11-06 01:07:24 +00:00
github-merge-queue[bot]
8ee0e5e7e1 Publish onyx-0.4.7.tgz 2025-11-06 00:39:19 +00:00
rguan72
edd17a0e32 Publish onyx-0.4.7.tgz 2025-11-06 00:14:41 +00:00
github-merge-queue[bot]
661504000f Publish onyx-0.4.7.tgz 2025-11-05 22:24:51 +00:00
jmelahman
85a00d9d47 Publish onyx-0.4.7.tgz 2025-11-05 22:04:36 +00:00
jmelahman
cd7e60333c Publish onyx-0.4.7.tgz 2025-11-05 21:32:19 +00:00
wenxi-onyx
64d47e5ccf Publish onyx-0.4.7.tgz 2025-11-05 19:44:16 +00:00
Weves
657e955512 Publish onyx-0.4.7.tgz 2025-11-05 07:35:06 +00:00
Weves
b5bc32dca8 Publish onyx-0.4.7.tgz 2025-11-05 03:53:21 +00:00
wenxi-onyx
03f5e2c1eb Publish onyx-0.4.7.tgz 2025-11-04 23:23:44 +00:00
wenxi-onyx
224b30ffe7 Publish onyx-0.4.7.tgz 2025-11-04 22:51:26 +00:00
wenxi-onyx
8227acc59e Publish onyx-0.4.7.tgz 2025-11-04 22:50:54 +00:00
wenxi-onyx
2162f24bd0 Publish onyx-0.4.7.tgz 2025-11-04 22:50:40 +00:00
wenxi-onyx
cf2b31cdd9 Publish onyx-0.4.7.tgz 2025-11-04 20:16:01 +00:00
yuhongsun96
498446fb1b Publish onyx-0.4.7.tgz 2025-11-04 19:57:30 +00:00
justin-tahara
ba502a7a53 Publish onyx-0.4.7.tgz 2025-11-04 18:30:32 +00:00
github-merge-queue[bot]
0a2cc9168a Publish onyx-0.4.6.tgz 2025-11-04 01:01:01 +00:00
Weves
bf6c3f1d11 Publish onyx-0.4.6.tgz 2025-11-03 23:14:44 +00:00
raunakab
e2bb2c0e68 Publish onyx-0.4.6.tgz 2025-11-03 21:50:41 +00:00
github-merge-queue[bot]
a3e40509ba Publish onyx-0.4.6.tgz 2025-11-03 21:26:04 +00:00
rguan72
db78cdc071 Publish onyx-0.4.6.tgz 2025-11-03 20:59:20 +00:00
wenxi-onyx
1fe5bcb360 Publish onyx-0.4.6.tgz 2025-11-03 20:03:42 +00:00
github-merge-queue[bot]
82595f6d3e Publish onyx-0.4.6.tgz 2025-11-03 19:44:29 +00:00
raunakab
c7ab6daaab Publish onyx-0.4.6.tgz 2025-11-03 18:01:07 +00:00
raunakab
d8e1c30738 Publish onyx-0.4.6.tgz 2025-11-03 17:57:09 +00:00
Weves
2386f7a7cd Publish onyx-0.4.6.tgz 2025-11-03 17:48:20 +00:00
yuhongsun96
23a3443eb4 Publish onyx-0.4.6.tgz 2025-11-03 17:31:38 +00:00
github-merge-queue[bot]
b828c58bf0 Publish onyx-0.4.6.tgz 2025-11-03 08:30:22 +00:00
raunakab
702c4794c2 Publish onyx-0.4.6.tgz 2025-11-03 06:27:22 +00:00
raunakab
a94cd163db Publish onyx-0.4.6.tgz 2025-11-03 06:26:53 +00:00
raunakab
0ec699ad75 Publish onyx-0.4.6.tgz 2025-11-03 02:36:07 +00:00
Weves
e861525d2a Publish onyx-0.4.6.tgz 2025-11-02 19:25:42 +00:00
github-merge-queue[bot]
d6503e470c Publish onyx-0.4.6.tgz 2025-11-02 04:27:40 +00:00
github-merge-queue[bot]
191691a6bc Publish onyx-0.4.6.tgz 2025-11-01 18:40:57 +00:00
Weves
ab17323949 Publish onyx-0.4.6.tgz 2025-11-01 18:24:01 +00:00
github-merge-queue[bot]
71844b7e3d Publish onyx-0.4.6.tgz 2025-11-01 18:23:22 +00:00
github-merge-queue[bot]
627cfb2d11 Publish onyx-0.4.6.tgz 2025-11-01 02:06:11 +00:00
raunakab
642946f44b Publish onyx-0.4.6.tgz 2025-11-01 00:43:02 +00:00
justin-tahara
879a67ef10 Publish onyx-0.4.6.tgz 2025-11-01 00:31:08 +00:00
github-merge-queue[bot]
f727b18748 Publish onyx-0.4.5.tgz 2025-11-01 00:12:31 +00:00
rguan72
f95d9c46c1 Publish onyx-0.4.5.tgz 2025-10-31 21:12:07 +00:00
rguan72
3c444d6b3d Publish onyx-0.4.5.tgz 2025-10-31 17:37:21 +00:00
justin-tahara
0f98e36c78 Publish onyx-0.4.5.tgz 2025-10-31 17:37:07 +00:00
rguan72
6da707d770 Publish onyx-0.4.5.tgz 2025-10-31 07:26:56 +00:00
Weves
cd63baa02a Publish onyx-0.4.5.tgz 2025-10-30 22:45:40 +00:00
wenxi-onyx
0b0412312c Publish onyx-0.4.5.tgz 2025-10-30 22:29:22 +00:00
Subash-Mohan
8dade60dd6 Publish onyx-0.4.5.tgz 2025-10-30 04:39:15 +00:00
github-merge-queue[bot]
b2dda14a95 Publish onyx-0.4.5.tgz 2025-10-30 02:11:13 +00:00
github-merge-queue[bot]
7e8b53d68a Publish onyx-0.4.5.tgz 2025-10-30 02:08:05 +00:00
github-merge-queue[bot]
5d36a4b190 Publish onyx-0.4.5.tgz 2025-10-30 00:47:47 +00:00
justin-tahara
ed87cba01f Publish onyx-0.4.5.tgz 2025-10-29 22:52:46 +00:00
justin-tahara
8a46172d97 Publish onyx-0.4.4.tgz 2025-10-29 22:51:54 +00:00
justin-tahara
e85317662b Publish onyx-0.4.4.tgz 2025-10-29 20:39:47 +00:00
github-merge-queue[bot]
5969396815 Publish onyx-0.4.4.tgz 2025-10-29 01:39:22 +00:00
evan-onyx
72a832ef8b Publish onyx-0.4.4.tgz 2025-10-29 00:00:46 +00:00
github-merge-queue[bot]
52d56211bf Publish onyx-0.4.4.tgz 2025-10-28 22:59:03 +00:00
raunakab
efcef0b322 Publish onyx-0.4.4.tgz 2025-10-28 21:33:12 +00:00
github-merge-queue[bot]
b87e1d5836 Publish onyx-0.4.4.tgz 2025-10-28 19:33:10 +00:00
github-merge-queue[bot]
0dd3857e3b Publish onyx-0.4.4.tgz 2025-10-28 19:05:53 +00:00
justin-tahara
881cc4736e Publish onyx-0.4.4.tgz 2025-10-28 03:57:28 +00:00
github-merge-queue[bot]
318191157e Publish onyx-0.4.4.tgz 2025-10-28 01:53:50 +00:00
github-merge-queue[bot]
c89b441987 Publish onyx-0.4.4.tgz 2025-10-28 01:04:38 +00:00
github-merge-queue[bot]
0fa5b346ba Publish onyx-0.4.4.tgz 2025-10-28 00:18:30 +00:00
github-merge-queue[bot]
94b69c5d4b Publish onyx-0.4.4.tgz 2025-10-27 23:51:48 +00:00
rguan72
d135caddf7 Publish onyx-0.4.4.tgz 2025-10-27 23:11:56 +00:00
justin-tahara
98d695e26c Publish onyx-0.4.4.tgz 2025-10-27 22:40:35 +00:00
justin-tahara
5fd82473b2 Publish onyx-0.4.4.tgz 2025-10-27 22:40:26 +00:00
github-merge-queue[bot]
ddfd4960bc Publish onyx-0.4.4.tgz 2025-10-27 21:59:18 +00:00
justin-tahara
b18f392976 Publish onyx-0.4.4.tgz 2025-10-27 21:27:12 +00:00
wenxi-onyx
056352b1bf Publish onyx-0.4.4.tgz 2025-10-27 19:39:59 +00:00
wenxi-onyx
1956e4dcbf Publish onyx-0.4.4.tgz 2025-10-27 19:39:25 +00:00
wenxi-onyx
fb026b094c Publish onyx-0.4.4.tgz 2025-10-27 18:55:38 +00:00
raunakab
41ad3174ab Publish onyx-0.4.4.tgz 2025-10-27 18:33:50 +00:00
yuhongsun96
f39951fed4 Publish onyx-0.4.4.tgz 2025-10-27 17:53:34 +00:00
raunakab
f5b0771a92 Publish onyx-0.4.4.tgz 2025-10-27 17:51:55 +00:00
raunakab
01717998e1 Publish onyx-0.4.4.tgz 2025-10-27 07:11:42 +00:00
raunakab
f850f3aa95 Publish onyx-0.4.4.tgz 2025-10-27 06:03:21 +00:00
raunakab
d3aa953bf5 Publish onyx-0.4.4.tgz 2025-10-27 05:32:52 +00:00
raunakab
eae70b8308 Publish onyx-0.4.4.tgz 2025-10-27 05:26:00 +00:00
Weves
644160486a Publish onyx-0.4.4.tgz 2025-10-27 02:09:12 +00:00
Weves
93e1cd340a Publish onyx-0.4.4.tgz 2025-10-27 00:20:41 +00:00
Weves
998d86077a Publish onyx-0.4.4.tgz 2025-10-26 22:23:18 +00:00
Weves
73b21184f7 Publish onyx-0.4.4.tgz 2025-10-26 20:38:59 +00:00
Weves
ea87c8320a Publish onyx-0.4.4.tgz 2025-10-26 00:50:48 +00:00
Weves
e723c3dfce Publish onyx-0.4.4.tgz 2025-10-25 06:39:50 +00:00
Weves
1020973a53 Publish onyx-0.4.4.tgz 2025-10-25 03:02:51 +00:00
github-merge-queue[bot]
4d757b4f94 Publish onyx-0.4.4.tgz 2025-10-25 00:06:45 +00:00
github-merge-queue[bot]
08012e25fe Publish onyx-0.4.4.tgz 2025-10-24 23:47:08 +00:00
Weves
a26e3b22ee Publish onyx-0.4.4.tgz 2025-10-24 23:22:37 +00:00
Weves
79d54ce542 Publish onyx-0.4.4.tgz 2025-10-24 23:21:30 +00:00
Weves
f390b3dcc6 Publish onyx-0.4.4.tgz 2025-10-24 21:09:16 +00:00
evan-onyx
ab935234d8 Publish onyx-0.4.4.tgz 2025-10-24 18:50:51 +00:00
github-merge-queue[bot]
aff7ecffcf Publish onyx-0.4.4.tgz 2025-10-24 18:42:18 +00:00
Weves
e0f6d46b2e Publish onyx-0.4.4.tgz 2025-10-24 18:15:45 +00:00
justin-tahara
41afcf9504 Publish onyx-0.4.4.tgz 2025-10-24 01:24:16 +00:00
wenxi-onyx
cc6f45f6ad Publish onyx-0.4.4.tgz 2025-10-24 00:45:45 +00:00
wenxi-onyx
1b54ac165c Publish onyx-0.4.4.tgz 2025-10-24 00:44:54 +00:00
Weves
735b0a0e82 Publish onyx-0.4.4.tgz 2025-10-24 00:35:29 +00:00
Weves
f717326cae Publish onyx-0.4.4.tgz 2025-10-24 00:30:03 +00:00
raunakab
bb621480d5 Publish onyx-0.4.4.tgz 2025-10-24 00:24:06 +00:00
raunakab
461bc2b647 Publish onyx-0.4.4.tgz 2025-10-24 00:22:33 +00:00
github-merge-queue[bot]
8689d10631 Publish onyx-0.4.4.tgz 2025-10-24 00:16:30 +00:00
Weves
ca5f2b6912 Publish onyx-0.4.4.tgz 2025-10-24 00:00:09 +00:00
Weves
5e53e5768b Publish onyx-0.4.4.tgz 2025-10-23 23:52:25 +00:00
github-merge-queue[bot]
4fe74e4caa Publish onyx-0.4.4.tgz 2025-10-23 23:18:54 +00:00
Weves
5ef89e9ed5 Publish onyx-0.4.4.tgz 2025-10-23 23:01:32 +00:00
Weves
e81ef91f31 Publish onyx-0.4.4.tgz 2025-10-23 22:09:30 +00:00
nmgarza5
fbc42bc6d0 Publish onyx-0.4.4.tgz 2025-10-23 21:41:39 +00:00
github-merge-queue[bot]
86712bcae7 Publish onyx-0.4.4.tgz 2025-10-23 21:31:57 +00:00
github-merge-queue[bot]
3da70dbdeb Publish onyx-0.4.4.tgz 2025-10-23 21:23:14 +00:00
Weves
4f9bf3a4f3 Publish onyx-0.4.4.tgz 2025-10-23 21:02:59 +00:00
Weves
0f178da7d8 Publish onyx-0.4.4.tgz 2025-10-23 20:43:47 +00:00
Weves
a41845ae87 Publish onyx-0.4.4.tgz 2025-10-23 20:40:10 +00:00
github-merge-queue[bot]
2717da9dc7 Publish onyx-0.4.4.tgz 2025-10-23 19:41:00 +00:00
Weves
0ad85cdbcf Publish onyx-0.4.4.tgz 2025-10-23 19:00:07 +00:00
wenxi-onyx
0cf30673f7 Publish onyx-0.4.4.tgz 2025-10-23 18:42:53 +00:00
github-merge-queue[bot]
0464d4f516 Publish onyx-0.4.4.tgz 2025-10-23 17:58:52 +00:00
Weves
f269aed596 Publish onyx-0.4.4.tgz 2025-10-23 17:42:31 +00:00
Weves
4c1affc450 Publish onyx-0.4.4.tgz 2025-10-23 17:27:21 +00:00
Weves
20da7d8298 Publish onyx-0.4.4.tgz 2025-10-23 17:22:14 +00:00
wenxi-onyx
040666ad00 Publish onyx-0.4.4.tgz 2025-10-23 17:13:38 +00:00
Weves
2b3087a7d3 Publish onyx-0.4.4.tgz 2025-10-23 16:50:41 +00:00
github-merge-queue[bot]
555b000b2b Publish onyx-0.4.4.tgz 2025-10-22 23:53:51 +00:00
justin-tahara
7a40e51bdc Publish onyx-0.4.4.tgz 2025-10-22 22:59:48 +00:00
github-merge-queue[bot]
7d6d2dc341 Publish onyx-0.4.4.tgz 2025-10-22 22:24:08 +00:00
github-merge-queue[bot]
91e38c03be Publish onyx-0.4.4.tgz 2025-10-22 22:15:11 +00:00
github-merge-queue[bot]
1a50ee5056 Publish onyx-0.4.4.tgz 2025-10-22 21:40:43 +00:00
github-merge-queue[bot]
9cd1a5d2ab Publish onyx-0.4.4.tgz 2025-10-22 21:01:20 +00:00
github-merge-queue[bot]
6af5efc605 Publish onyx-0.4.4.tgz 2025-10-22 20:36:43 +00:00
github-merge-queue[bot]
6ff9d2e513 Publish onyx-0.4.4.tgz 2025-10-22 20:35:10 +00:00
raunakab
0cb9640509 Publish onyx-0.4.4.tgz 2025-10-22 03:37:57 +00:00
raunakab
0de637235a Publish onyx-0.4.4.tgz 2025-10-22 03:23:28 +00:00
raunakab
cc5cb6c87e Publish onyx-0.4.4.tgz 2025-10-22 03:18:12 +00:00
raunakab
af6d9d8d87 Publish onyx-0.4.4.tgz 2025-10-22 02:45:48 +00:00
raunakab
1e80836aa5 Publish onyx-0.4.4.tgz 2025-10-22 02:30:32 +00:00
justin-tahara
e804303b99 Publish onyx-0.4.4.tgz 2025-10-22 01:26:54 +00:00
raunakab
5a3d4d8949 Publish onyx-0.4.4.tgz 2025-10-22 01:11:47 +00:00
justin-tahara
d47dd3673b Publish onyx-0.4.4.tgz 2025-10-22 00:56:49 +00:00
justin-tahara
98fdbb4497 Publish onyx-0.4.4.tgz 2025-10-21 23:54:50 +00:00
raunakab
3a299b8069 Publish onyx-0.4.4.tgz 2025-10-21 23:51:50 +00:00
github-merge-queue[bot]
6b2c473ade Publish onyx-0.4.4.tgz 2025-10-21 21:09:38 +00:00
Weves
5d066efd89 Publish onyx-0.4.4.tgz 2025-10-21 20:23:28 +00:00
github-merge-queue[bot]
d9eb2ee362 Publish onyx-0.4.4.tgz 2025-10-21 19:41:25 +00:00
Weves
bbaf25eced Publish onyx-0.4.4.tgz 2025-10-21 18:48:17 +00:00
wenxi-onyx
42ee750b1f Publish onyx-0.4.4.tgz 2025-10-21 18:39:46 +00:00
github-merge-queue[bot]
6a8a9e67fc Publish onyx-0.4.4.tgz 2025-10-21 18:27:11 +00:00
justin-tahara
c277590dce Publish onyx-0.4.4.tgz 2025-10-21 17:42:59 +00:00
Weves
1adcc6135d Publish onyx-0.4.4.tgz 2025-10-21 17:39:30 +00:00
Weves
a4024e8b6a Publish onyx-0.4.4.tgz 2025-10-21 17:28:15 +00:00
wenxi-onyx
b981697dcb Publish onyx-0.4.4.tgz 2025-10-21 00:49:07 +00:00
Weves
e2877bd180 Publish onyx-0.4.4.tgz 2025-10-21 00:48:37 +00:00
wenxi-onyx
e66c07fbef Publish onyx-0.4.4.tgz 2025-10-21 00:48:16 +00:00
wenxi-onyx
d54ac2daf7 Publish onyx-0.4.4.tgz 2025-10-21 00:43:23 +00:00
justin-tahara
a00a28fb73 Publish onyx-0.4.4.tgz 2025-10-21 00:41:46 +00:00
Weves
e2041ac597 Publish onyx-0.4.4.tgz 2025-10-21 00:30:40 +00:00
justin-tahara
1a35f728c5 Publish onyx-0.4.4.tgz 2025-10-21 00:21:54 +00:00
wenxi-onyx
022fba787a Publish onyx-0.4.4.tgz 2025-10-21 00:02:22 +00:00
wenxi-onyx
048f6f39b3 Publish onyx-0.4.4.tgz 2025-10-20 23:53:47 +00:00
Weves
e65dfd9ba2 Publish onyx-0.4.4.tgz 2025-10-20 23:53:02 +00:00
raunakab
cdf57d463d Publish onyx-0.4.4.tgz 2025-10-20 22:43:21 +00:00
wenxi-onyx
a81f3c36ee Publish onyx-0.4.4.tgz 2025-10-20 21:24:52 +00:00
raunakab
580cd7fea1 Publish onyx-0.4.4.tgz 2025-10-20 21:10:31 +00:00
Weves
5ce59a5cca Publish onyx-0.4.4.tgz 2025-10-20 20:53:34 +00:00
nmgarza5
c893ebeaf5 Publish onyx-0.4.4.tgz 2025-10-20 20:50:16 +00:00
Weves
29abc18269 Publish onyx-0.4.4.tgz 2025-10-20 20:29:40 +00:00
evan-onyx
3abc32d45a Publish onyx-0.4.4.tgz 2025-10-20 20:01:39 +00:00
justin-tahara
8e6e0d543c Publish onyx-0.4.4.tgz 2025-10-20 19:58:16 +00:00
raunakab
9d8f7cc608 Publish onyx-0.4.4.tgz 2025-10-20 19:49:16 +00:00
Weves
931fedcb5f Publish onyx-0.4.4.tgz 2025-10-19 20:26:04 +00:00
Weves
ed5ba46bc2 Publish onyx-0.4.4.tgz 2025-10-19 20:02:48 +00:00
Weves
2be412f674 Publish onyx-0.4.4.tgz 2025-10-19 19:38:30 +00:00
Weves
350a21a73b Publish onyx-0.4.4.tgz 2025-10-19 01:26:55 +00:00
Weves
f58895d542 Publish onyx-0.4.4.tgz 2025-10-18 16:06:42 +00:00
raunakab
dea0276581 Publish onyx-0.4.4.tgz 2025-10-18 01:50:07 +00:00
wenxi-onyx
d3dec72421 Publish onyx-0.4.4.tgz 2025-10-18 01:27:26 +00:00
github-merge-queue[bot]
8504ad07aa Publish onyx-0.4.4.tgz 2025-10-18 00:20:58 +00:00
github-merge-queue[bot]
9983e60c15 Publish onyx-0.4.4.tgz 2025-10-18 00:18:39 +00:00
github-merge-queue[bot]
77a190cb78 Publish onyx-0.4.4.tgz 2025-10-17 23:10:35 +00:00
github-merge-queue[bot]
80b2299732 Publish onyx-0.4.4.tgz 2025-10-17 22:57:20 +00:00
Weves
66fe67b428 Publish onyx-0.4.4.tgz 2025-10-17 20:57:08 +00:00
justin-tahara
8c30d9b21d Publish onyx-0.4.4.tgz 2025-10-17 20:53:11 +00:00
wenxi-onyx
c32e4fdafc Publish onyx-0.4.4.tgz 2025-10-17 20:52:45 +00:00
github-merge-queue[bot]
5ac4b4123d Publish onyx-0.4.4.tgz 2025-10-17 19:37:12 +00:00
yuhongsun96
29e735905b Publish onyx-0.4.4.tgz 2025-10-17 18:15:48 +00:00
github-merge-queue[bot]
3866bdcae8 Publish onyx-0.4.4.tgz 2025-10-17 18:00:20 +00:00
github-merge-queue[bot]
4afcfc3de4 Publish onyx-0.4.4.tgz 2025-10-17 17:51:34 +00:00
github-merge-queue[bot]
5afe528792 Publish onyx-0.4.4.tgz 2025-10-17 17:14:13 +00:00
Weves
e2ce5f91ab Publish onyx-0.4.4.tgz 2025-10-17 16:36:09 +00:00
Weves
c665c0bb1a Publish onyx-0.4.4.tgz 2025-10-17 16:35:19 +00:00
github-merge-queue[bot]
10db34f283 Publish onyx-0.4.4.tgz 2025-10-17 01:50:13 +00:00
justin-tahara
57f5863be4 Publish onyx-0.4.4.tgz 2025-10-17 01:17:57 +00:00
raunakab
8a285a8c50 Publish onyx-0.4.4.tgz 2025-10-17 01:04:07 +00:00
evan-onyx
a184b35d2a Publish onyx-0.4.4.tgz 2025-10-16 23:59:00 +00:00
evan-onyx
1e73df14ad Publish onyx-0.4.4.tgz 2025-10-16 23:42:55 +00:00
justin-tahara
e786885742 Publish onyx-0.4.4.tgz 2025-10-16 23:31:16 +00:00
Weves
e1a30513cd Publish onyx-0.4.4.tgz 2025-10-16 21:36:45 +00:00
github-merge-queue[bot]
c4820f667f Publish onyx-0.4.4.tgz 2025-10-16 17:54:03 +00:00
wenxi-onyx
940b310b21 Publish onyx-0.4.4.tgz 2025-10-16 17:08:44 +00:00
justin-tahara
428af24b1b Publish onyx-0.4.4.tgz 2025-10-15 23:12:31 +00:00
github-merge-queue[bot]
d6e83c56e5 Publish onyx-0.4.4.tgz 2025-10-15 21:51:06 +00:00
github-merge-queue[bot]
08bc366848 Publish onyx-0.4.4.tgz 2025-10-15 21:50:33 +00:00
raunakab
727ea3ca0a Publish onyx-0.4.4.tgz 2025-10-15 18:21:21 +00:00
github-merge-queue[bot]
8aa9d90c63 Publish onyx-0.4.4.tgz 2025-10-15 18:06:36 +00:00
raunakab
181b38fdaa Publish onyx-0.4.4.tgz 2025-10-15 17:16:05 +00:00
github-merge-queue[bot]
5bcc231f6d Publish onyx-0.4.4.tgz 2025-10-15 16:18:58 +00:00
github-merge-queue[bot]
dc03dcfb4b Publish onyx-0.4.4.tgz 2025-10-15 03:57:17 +00:00
rguan72
8cbf5b10fb Publish onyx-0.4.4.tgz 2025-10-15 03:27:39 +00:00
raunakab
092512c6a0 Publish onyx-0.4.4.tgz 2025-10-15 01:27:05 +00:00
justin-tahara
d167e777e3 Publish onyx-0.4.4.tgz 2025-10-15 00:21:53 +00:00
github-merge-queue[bot]
384ba06b57 Publish onyx-0.4.4.tgz 2025-10-14 23:18:00 +00:00
github-merge-queue[bot]
bb56002f75 Publish onyx-0.4.4.tgz 2025-10-14 22:29:32 +00:00
github-merge-queue[bot]
48f74ccce1 Publish onyx-0.4.4.tgz 2025-10-14 22:21:33 +00:00
github-merge-queue[bot]
ce39602a6f Publish onyx-0.4.4.tgz 2025-10-14 22:05:17 +00:00
github-merge-queue[bot]
5401ad49f3 Publish onyx-0.4.4.tgz 2025-10-14 19:27:35 +00:00
github-merge-queue[bot]
06f70ac245 Publish onyx-0.4.4.tgz 2025-10-14 19:23:12 +00:00
github-merge-queue[bot]
f49597de80 Publish onyx-0.4.4.tgz 2025-10-14 02:20:32 +00:00
github-merge-queue[bot]
ea7293d28f Publish onyx-0.4.4.tgz 2025-10-14 01:14:17 +00:00
github-merge-queue[bot]
bfea2ed371 Publish onyx-0.4.4.tgz 2025-10-14 00:54:13 +00:00
github-merge-queue[bot]
2276a6ab2c Publish onyx-0.4.4.tgz 2025-10-13 23:45:06 +00:00
Weves
9605a956e3 Publish onyx-0.4.4.tgz 2025-10-13 21:19:35 +00:00
github-merge-queue[bot]
7580666382 Publish onyx-0.4.4.tgz 2025-10-13 20:29:37 +00:00
justin-tahara
a6267e1188 Publish onyx-0.4.4.tgz 2025-10-12 22:15:01 +00:00
Weves
fa4bd58324 Publish onyx-0.4.3.tgz 2025-10-11 17:16:57 +00:00
github-merge-queue[bot]
0c8b4035ed Publish onyx-0.4.3.tgz 2025-10-11 04:40:03 +00:00
github-merge-queue[bot]
847b873a34 Publish onyx-0.4.3.tgz 2025-10-11 04:19:46 +00:00
Weves
18aa2249c1 Publish onyx-0.4.3.tgz 2025-10-11 01:04:45 +00:00
github-merge-queue[bot]
b08f409b63 Publish onyx-0.4.3.tgz 2025-10-10 23:52:28 +00:00
github-merge-queue[bot]
c7b994f8ae Publish onyx-0.4.3.tgz 2025-10-10 22:36:23 +00:00
github-merge-queue[bot]
b596e9fc3e Publish onyx-0.4.3.tgz 2025-10-10 22:23:31 +00:00
Weves
6b68223165 Publish onyx-0.4.3.tgz 2025-10-10 05:05:25 +00:00
nmgarza5
9c6953eafa Publish onyx-0.4.3.tgz 2025-10-10 00:21:53 +00:00
nmgarza5
39ca5a1458 Publish onyx-0.4.3.tgz 2025-10-10 00:03:23 +00:00
nmgarza5
6853f74df3 Publish onyx-0.4.3.tgz 2025-10-10 00:00:49 +00:00
github-merge-queue[bot]
97c12f550b Publish onyx-0.4.3.tgz 2025-10-09 23:01:56 +00:00
github-merge-queue[bot]
8d72c93006 Publish onyx-0.4.3.tgz 2025-10-09 22:43:59 +00:00
wenxi-onyx
9aa9b2319f Publish onyx-0.4.2.tgz 2025-10-09 21:38:43 +00:00
github-merge-queue[bot]
ea4fbdde02 Publish onyx-0.4.2.tgz 2025-10-09 19:56:25 +00:00
justin-tahara
435d225df5 Publish onyx-0.4.2.tgz 2025-10-09 19:16:44 +00:00
justin-tahara
d16d5e3f2c Publish onyx-0.4.1.tgz 2025-10-09 18:01:26 +00:00
Weves
099f38c659 Publish onyx-0.4.1.tgz 2025-10-09 02:03:37 +00:00
Weves
df36a07b6c Publish onyx-0.4.1.tgz 2025-10-08 21:43:01 +00:00
Weves
e275f6b47e Publish onyx-0.4.1.tgz 2025-10-08 21:40:43 +00:00
raunakab
bcd0ac51c4 Publish onyx-0.4.1.tgz 2025-10-08 18:59:55 +00:00
evan-onyx
00e0b5aa7e Publish onyx-0.4.1.tgz 2025-10-08 17:29:43 +00:00
evan-onyx
f737d33896 Publish onyx-0.4.1.tgz 2025-10-08 17:26:04 +00:00
Weves
40d4e14b83 Publish onyx-0.4.1.tgz 2025-10-08 16:57:58 +00:00
Weves
7f98790d35 Publish onyx-0.4.1.tgz 2025-10-08 16:06:04 +00:00
github-merge-queue[bot]
588878725c Publish onyx-0.4.1.tgz 2025-10-08 03:23:17 +00:00
Weves
45fb0a5a4b Publish onyx-0.4.1.tgz 2025-10-08 02:10:47 +00:00
justin-tahara
9c5d23e217 Publish onyx-0.4.1.tgz 2025-10-08 02:02:33 +00:00
yuhongsun96
8c341731d1 Publish onyx-0.4.1.tgz 2025-10-08 01:42:12 +00:00
rguan72
87f1387cc8 Publish onyx-0.4.1.tgz 2025-10-07 22:12:24 +00:00
wenxi-onyx
d5cdd042fe Publish onyx-0.4.1.tgz 2025-10-07 21:51:23 +00:00
wenxi-onyx
c8511cea98 Publish onyx-0.4.1.tgz 2025-10-07 21:41:24 +00:00
Weves
d27ece6c17 Publish onyx-0.4.1.tgz 2025-10-07 21:21:19 +00:00
justin-tahara
ef668870c8 Publish onyx-0.4.1.tgz 2025-10-07 21:11:51 +00:00
Weves
08a0846f42 Publish onyx-0.4.1.tgz 2025-10-07 20:54:19 +00:00
Weves
d4c2a615d7 Publish onyx-0.4.1.tgz 2025-10-07 20:25:48 +00:00
Weves
85a5b3aa24 Publish onyx-0.4.1.tgz 2025-10-07 19:53:23 +00:00
justin-tahara
5f17937569 Publish onyx-0.4.1.tgz 2025-10-07 19:18:40 +00:00
Weves
76e26fead2 Publish onyx-0.4.1.tgz 2025-10-07 19:08:38 +00:00
justin-tahara
40a9eaa37b Publish onyx-0.4.1.tgz 2025-10-07 19:04:15 +00:00
Weves
a7fc2cdad3 Publish onyx-0.4.1.tgz 2025-10-07 19:02:58 +00:00
Weves
ea3d5cd08e Publish onyx-0.4.1.tgz 2025-10-07 18:40:57 +00:00
wenxi-onyx
778c5f0621 Publish onyx-0.4.1.tgz 2025-10-07 17:25:34 +00:00
Weves
3f0dcc255f Publish onyx-0.4.1.tgz 2025-10-07 16:09:01 +00:00
justin-tahara
8256738a71 Publish onyx-0.4.1.tgz 2025-10-06 20:37:56 +00:00
wenxi-onyx
5574fd2da5 Publish onyx-0.4.0.tgz 2025-10-06 19:09:15 +00:00
Weves
8ff1d06e84 Publish onyx-0.4.0.tgz 2025-10-06 19:03:09 +00:00
Weves
f6b67514f4 Publish onyx-0.4.0.tgz 2025-10-06 19:01:16 +00:00
raunakab
d9e72867fa Publish onyx-0.4.0.tgz 2025-10-06 06:04:41 +00:00
wenxi-onyx
dbc008c520 Publish onyx-0.4.0.tgz 2025-10-05 19:25:11 +00:00
Weves
d992fac05a Publish onyx-0.4.0.tgz 2025-10-05 19:00:12 +00:00
wenxi-onyx
4869d54a84 Publish onyx-0.4.0.tgz 2025-10-05 18:35:12 +00:00
Weves
06e00b9c87 Publish onyx-0.4.0.tgz 2025-10-04 21:31:13 +00:00
justin-tahara
ba87f5b027 Publish onyx-0.4.0.tgz 2025-10-04 00:46:25 +00:00
Weves
7c0b2223ca Publish onyx-0.3.5.tgz 2025-10-03 23:59:46 +00:00
Weves
7e88248f66 Publish onyx-0.3.5.tgz 2025-10-03 20:11:25 +00:00
justin-tahara
7be459980e Publish onyx-0.3.5.tgz 2025-10-03 19:33:22 +00:00
evan-onyx
549c48d76d Publish onyx-0.3.4.tgz 2025-10-03 17:52:12 +00:00
justin-tahara
44e875d903 Publish onyx-0.3.4.tgz 2025-10-03 17:45:43 +00:00
Weves
ae9313a400 Publish onyx-0.3.3.tgz 2025-10-03 17:36:43 +00:00
justin-tahara
44647361c8 Publish onyx-0.3.3.tgz 2025-10-03 16:50:06 +00:00
github-merge-queue[bot]
6d4cc2bae5 Publish onyx-0.3.3.tgz 2025-10-03 01:57:36 +00:00
Weves
04a223008d Publish onyx-0.3.3.tgz 2025-10-02 23:33:53 +00:00
wenxi-onyx
83d9d1e682 Publish onyx-0.3.3.tgz 2025-10-02 23:16:09 +00:00
evan-onyx
88486480dc Publish onyx-0.3.3.tgz 2025-10-02 22:37:18 +00:00
Weves
bb3676f69c Publish onyx-0.3.3.tgz 2025-10-02 22:31:38 +00:00
wenxi-onyx
122def562b Publish onyx-0.3.3.tgz 2025-10-02 22:23:35 +00:00
Weves
0706c6cc69 Publish onyx-0.3.3.tgz 2025-10-02 22:18:00 +00:00
evan-onyx
3c265b9825 Publish onyx-0.3.3.tgz 2025-10-02 22:02:36 +00:00
evan-onyx
92f99f0329 Publish onyx-0.3.3.tgz 2025-10-02 20:07:12 +00:00
Weves
a8ba1daa0e Publish onyx-0.3.3.tgz 2025-10-02 19:54:27 +00:00
wenxi-onyx
c763c215e1 Publish onyx-0.3.3.tgz 2025-10-02 17:48:21 +00:00
evan-onyx
56d2aa6a86 Publish onyx-0.3.3.tgz 2025-10-02 17:24:05 +00:00
Subash-Mohan
5552507432 Publish onyx-0.3.3.tgz 2025-10-02 16:14:02 +00:00
Weves
664556ddb5 Publish onyx-0.3.3.tgz 2025-10-02 15:43:36 +00:00
github-merge-queue[bot]
9af74b7b4d Publish onyx-0.3.3.tgz 2025-10-01 23:45:42 +00:00
Weves
aba219d435 Publish onyx-0.3.3.tgz 2025-10-01 22:15:31 +00:00
Weves
d46474f2ae Publish onyx-0.3.3.tgz 2025-10-01 22:06:32 +00:00
evan-onyx
b990c606bd Publish onyx-0.3.3.tgz 2025-10-01 21:54:35 +00:00
wenxi-onyx
8c6c223d21 Publish onyx-0.3.3.tgz 2025-10-01 21:43:01 +00:00
Weves
369836a0c1 Publish onyx-0.3.3.tgz 2025-09-30 21:44:50 +00:00
justin-tahara
a8dfb2e061 Publish onyx-0.3.3.tgz 2025-09-30 19:57:42 +00:00
justin-tahara
2e9b109718 Publish onyx-0.3.3.tgz 2025-09-30 19:56:13 +00:00
justin-tahara
1f02116d94 Publish onyx-0.3.3.tgz 2025-09-30 18:29:28 +00:00
Weves
c23ef63028 Publish onyx-0.3.3.tgz 2025-09-30 17:42:50 +00:00
Weves
354d4f9eda Publish onyx-0.3.3.tgz 2025-09-30 16:58:45 +00:00
Weves
f43c5c45dd Publish onyx-0.3.3.tgz 2025-09-30 02:58:16 +00:00
Weves
847f6fa830 Publish onyx-0.3.3.tgz 2025-09-30 00:59:20 +00:00
evan-onyx
cd8e9866b0 Publish onyx-0.3.3.tgz 2025-09-30 00:02:49 +00:00
Weves
9cf9f1404d Publish onyx-0.3.3.tgz 2025-09-29 22:09:51 +00:00
Weves
101be8e3bb Publish onyx-0.3.3.tgz 2025-09-29 20:56:01 +00:00
Weves
036b528757 Publish onyx-0.3.3.tgz 2025-09-29 18:03:25 +00:00
Weves
ce9fd2c3c8 Publish onyx-0.3.3.tgz 2025-09-29 17:56:02 +00:00
Weves
ac6c85bf35 Publish onyx-0.3.3.tgz 2025-09-29 16:05:21 +00:00
Subash-Mohan
fee2f623ae Publish onyx-0.3.3.tgz 2025-09-29 06:59:11 +00:00
Weves
af6ced79a8 Publish onyx-0.3.3.tgz 2025-09-28 20:44:58 +00:00
Weves
d0e1737d32 Publish onyx-0.3.3.tgz 2025-09-28 20:01:39 +00:00
Weves
554f5f3ab7 Publish onyx-0.3.3.tgz 2025-09-28 19:59:07 +00:00
yuhongsun96
33a4005a1d Publish onyx-0.3.3.tgz 2025-09-28 02:04:51 +00:00
Weves
ef12d7f822 Publish onyx-0.3.3.tgz 2025-09-28 00:44:18 +00:00
yuhongsun96
1ee54dbe73 Publish onyx-0.3.3.tgz 2025-09-27 22:38:01 +00:00
Weves
b70836e9d7 Publish onyx-0.3.3.tgz 2025-09-27 00:16:52 +00:00
Weves
610d2fc213 Publish onyx-0.3.3.tgz 2025-09-26 22:34:47 +00:00
justin-tahara
0f6eccfdfb Publish onyx-0.3.3.tgz 2025-09-26 21:00:42 +00:00
Weves
2b5276f995 Publish onyx-0.3.2.tgz 2025-09-26 02:58:51 +00:00
jessicasingh7
ec1831c198 Publish onyx-0.3.2.tgz 2025-09-26 01:11:55 +00:00
Weves
4b883eca52 Publish onyx-0.3.2.tgz 2025-09-26 00:10:31 +00:00
Weves
1a1f74fb0b Publish onyx-0.3.2.tgz 2025-09-25 23:13:36 +00:00
jessicasingh7
66e07d01f3 Publish onyx-0.3.2.tgz 2025-09-25 23:13:16 +00:00
wenxi-onyx
a968bcd433 Publish onyx-0.3.2.tgz 2025-09-25 23:00:06 +00:00
justin-tahara
a18b5253ad Publish onyx-0.3.2.tgz 2025-09-25 22:35:26 +00:00
justin-tahara
3cdb6e6531 Publish onyx-0.3.2.tgz 2025-09-25 22:09:30 +00:00
justin-tahara
cb5ff10ae7 Publish onyx-0.3.2.tgz 2025-09-25 21:09:11 +00:00
justin-tahara
0a7631204b Publish onyx-0.3.2.tgz 2025-09-25 18:59:26 +00:00
yuhongsun96
e006fc8e08 Publish onyx-0.3.1.tgz 2025-09-25 18:35:48 +00:00
yuhongsun96
7f8d28af37 Publish onyx-0.3.1.tgz 2025-09-25 18:12:00 +00:00
yuhongsun96
c93a88608f Publish onyx-0.3.1.tgz 2025-09-25 17:10:05 +00:00
yuhongsun96
e84d6170d3 Publish onyx-0.3.1.tgz 2025-09-25 16:07:03 +00:00
yuhongsun96
d9d06f884b Publish onyx-0.3.1.tgz 2025-09-25 03:34:42 +00:00
justin-tahara
de52c4a0f5 Publish onyx-0.3.1.tgz 2025-09-25 02:34:25 +00:00
justin-tahara
ff3b56ca95 Publish onyx-0.3.1.tgz 2025-09-25 02:31:15 +00:00
yuhongsun96
763e4077c9 Publish onyx-0.3.1.tgz 2025-09-24 22:33:10 +00:00
Weves
9487864920 Publish onyx-stack-0.3.1.tgz 2025-09-23 23:08:59 +00:00
jessicasingh7
9b356b48d8 Publish onyx-stack-0.3.1.tgz 2025-09-23 02:20:58 +00:00
justin-tahara
2bd2c1f33a Publish onyx-stack-0.3.1.tgz 2025-09-23 01:49:57 +00:00
justin-tahara
6c863481ad Publish onyx-stack-0.3.1.tgz 2025-09-23 01:49:37 +00:00
yuhongsun96
9c67e5f47a Publish onyx-stack-0.3.1.tgz 2025-09-23 00:26:50 +00:00
yuhongsun96
cf847d060e Publish onyx-stack-0.3.1.tgz 2025-09-23 00:10:54 +00:00
yuhongsun96
2e1f3a5cc5 Publish onyx-stack-0.3.1.tgz 2025-09-22 22:39:11 +00:00
Subash-Mohan
52e9960ca8 Publish onyx-stack-0.3.1.tgz 2025-09-22 13:54:59 +00:00
edwin-onyx
d566ca0c91 Publish onyx-stack-0.3.1.tgz 2025-09-22 03:44:32 +00:00
evan-onyx
a149cfa6a2 Publish onyx-stack-0.3.1.tgz 2025-09-20 00:11:46 +00:00
rguan72
35f72e40ab Publish onyx-stack-0.3.1.tgz 2025-09-19 21:11:44 +00:00
rguan72
82d2985eec Publish onyx-stack-0.3.1.tgz 2025-09-19 01:18:59 +00:00
evan-onyx
0a7a90d310 Publish onyx-stack-0.3.1.tgz 2025-09-18 23:37:08 +00:00
Weves
860c3e0ba1 Publish onyx-stack-0.3.1.tgz 2025-09-18 20:00:52 +00:00
Weves
3bb2bddf61 Publish onyx-stack-0.3.1.tgz 2025-09-18 18:10:37 +00:00
Weves
4d2b6e453a Publish onyx-stack-0.3.1.tgz 2025-09-18 06:49:57 +00:00
rguan72
0a9c5a619f Publish onyx-stack-0.3.1.tgz 2025-09-18 00:13:17 +00:00
wenxi-onyx
968e8b7ec8 Publish onyx-stack-0.3.1.tgz 2025-09-18 00:07:46 +00:00
Weves
8b1c15cef2 Publish onyx-stack-0.3.1.tgz 2025-09-17 23:41:04 +00:00
justin-tahara
2113dd4a75 Publish onyx-stack-0.3.1.tgz 2025-09-17 20:53:21 +00:00
rguan72
0b826e7095 Publish onyx-stack-0.3.1.tgz 2025-09-17 19:18:28 +00:00
Weves
86eb089c02 Publish onyx-stack-0.3.1.tgz 2025-09-17 06:26:42 +00:00
rguan72
4e39d5851e Publish onyx-stack-0.3.1.tgz 2025-09-17 01:59:11 +00:00
justin-tahara
ee411195a8 Publish onyx-stack-0.3.1.tgz 2025-09-16 23:59:53 +00:00
justin-tahara
229459835a Publish onyx-stack-0.3.0.tgz 2025-09-16 23:49:27 +00:00
Weves
25a0f3a53d Publish onyx-stack-0.3.0.tgz 2025-09-16 22:49:41 +00:00
justin-tahara
836e58ffc9 Publish onyx-stack-0.3.0.tgz 2025-09-16 21:57:12 +00:00
Weves
2ebe86565a Publish onyx-stack-0.2.12.tgz 2025-09-16 21:50:10 +00:00
justin-tahara
2ba0974117 Publish onyx-stack-0.2.12.tgz 2025-09-16 20:57:58 +00:00
wenxi-onyx
c87e52a699 Publish onyx-stack-0.2.11.tgz 2025-09-16 18:07:27 +00:00
evan-onyx
46ac793c69 Publish onyx-stack-0.2.11.tgz 2025-09-16 01:25:18 +00:00
Weves
c6abd907cb Publish onyx-stack-0.2.11.tgz 2025-09-15 23:47:04 +00:00
evan-onyx
b005accd56 Publish onyx-stack-0.2.11.tgz 2025-09-15 18:31:25 +00:00
Weves
f6c2891d65 Publish onyx-stack-0.2.11.tgz 2025-09-15 03:08:29 +00:00
Weves
0950d78cb5 Publish onyx-stack-0.2.11.tgz 2025-09-15 03:06:37 +00:00
Weves
5f6cb591ba Publish onyx-stack-0.2.11.tgz 2025-09-14 00:34:45 +00:00
github-merge-queue[bot]
de40793bf5 Publish onyx-stack-0.2.11.tgz 2025-09-13 12:53:35 +00:00
github-merge-queue[bot]
5ad7e97f05 Publish onyx-stack-0.2.11.tgz 2025-09-13 02:33:23 +00:00
wenxi-onyx
88d5a253bd Publish onyx-stack-0.2.11.tgz 2025-09-13 01:16:20 +00:00
Weves
3c747f839d Publish onyx-stack-0.2.11.tgz 2025-09-12 07:16:01 +00:00
justin-tahara
3abee88d15 Publish onyx-stack-0.2.11.tgz 2025-09-12 02:28:53 +00:00
Weves
be61941063 Publish onyx-stack-0.2.11.tgz 2025-09-12 01:22:02 +00:00
justin-tahara
f7c210176d Publish onyx-stack-0.2.11.tgz 2025-09-12 01:20:15 +00:00
Weves
c2ca0e2fa7 Publish onyx-stack-0.2.10.tgz 2025-09-11 21:07:30 +00:00
justin-tahara
227542631c Publish onyx-stack-0.2.10.tgz 2025-09-11 17:36:28 +00:00
wenxi-onyx
8f44d2c78a Publish onyx-stack-0.2.10.tgz 2025-09-11 04:50:11 +00:00
Weves
81ae79abb6 Publish onyx-stack-0.2.10.tgz 2025-09-11 02:30:18 +00:00
Weves
2122f1f0ed Publish onyx-stack-0.2.10.tgz 2025-09-11 02:30:04 +00:00
joachim-danswer
f99bcc7736 Publish onyx-stack-0.2.10.tgz 2025-09-10 23:40:32 +00:00
Weves
7db3a9a880 Publish onyx-stack-0.2.10.tgz 2025-09-10 23:20:59 +00:00
Weves
4c28b2ae1c Publish onyx-stack-0.2.10.tgz 2025-09-10 21:38:48 +00:00
Weves
3cf7b0a7ba Publish onyx-stack-0.2.10.tgz 2025-09-10 21:15:02 +00:00
Weves
d382508e5b Publish onyx-stack-0.2.10.tgz 2025-09-10 19:16:21 +00:00
justin-tahara
a5e457458c Publish onyx-stack-0.2.10.tgz 2025-09-10 17:25:00 +00:00
Weves
b3ca478411 Publish onyx-stack-0.2.9.tgz 2025-09-10 17:23:03 +00:00
Weves
e80554619a Publish onyx-stack-0.2.9.tgz 2025-09-09 22:53:17 +00:00
wenxi-onyx
5669ce29a7 Publish onyx-stack-0.2.9.tgz 2025-09-09 17:24:40 +00:00
justin-tahara
23ab597d6f Publish onyx-stack-0.2.9.tgz 2025-09-09 16:38:21 +00:00
Weves
d8e33695d5 Publish onyx-stack-0.2.9.tgz 2025-09-09 07:30:56 +00:00
wenxi-onyx
6fc0f1b724 Publish onyx-stack-0.2.9.tgz 2025-09-09 01:51:11 +00:00
wenxi-onyx
c602ccbec5 Publish onyx-stack-0.2.9.tgz 2025-09-09 01:07:34 +00:00
Weves
ee94fcfdd9 Publish onyx-stack-0.2.9.tgz 2025-09-09 00:48:27 +00:00
github-merge-queue[bot]
4da51c3d98 Publish onyx-stack-0.2.9.tgz 2025-09-08 23:00:13 +00:00
Weves
bc8f472e70 Publish onyx-stack-0.2.9.tgz 2025-09-08 21:01:39 +00:00
wenxi-onyx
240de9ead8 Publish onyx-stack-0.2.9.tgz 2025-09-08 20:50:22 +00:00
wenxi-onyx
8a22fb9157 Publish onyx-stack-0.2.9.tgz 2025-09-08 19:12:20 +00:00
Weves
49401be4f6 Publish onyx-stack-0.2.9.tgz 2025-09-08 06:18:19 +00:00
Weves
a468ee668d Publish onyx-stack-0.2.9.tgz 2025-09-07 21:45:45 +00:00
wenxi-onyx
c23a029aef Publish onyx-stack-0.2.9.tgz 2025-09-07 20:07:50 +00:00
Weves
94fcde3629 Publish onyx-stack-0.2.9.tgz 2025-09-07 18:58:45 +00:00
Weves
b1f1bf6fc4 Publish onyx-stack-0.2.9.tgz 2025-09-07 18:25:36 +00:00
wenxi-onyx
2d85a2be1f Publish onyx-stack-0.2.9.tgz 2025-09-07 02:30:36 +00:00
Weves
50c3cf8245 Publish onyx-stack-0.2.9.tgz 2025-09-07 01:28:02 +00:00
github-merge-queue[bot]
66a55d4e39 Publish onyx-stack-0.2.9.tgz 2025-09-06 21:34:50 +00:00
Weves
6ffbe431f1 Publish onyx-stack-0.2.9.tgz 2025-09-06 06:29:37 +00:00
Weves
22404f18e2 Publish onyx-stack-0.2.9.tgz 2025-09-06 06:14:54 +00:00
Weves
cc2b4c44c4 Publish onyx-stack-0.2.9.tgz 2025-09-06 04:07:44 +00:00
github-merge-queue[bot]
14fbcc5bc8 Publish onyx-stack-0.2.9.tgz 2025-09-06 02:16:46 +00:00
github-merge-queue[bot]
2f073e43db Publish onyx-stack-0.2.9.tgz 2025-09-05 21:06:41 +00:00
wenxi-onyx
50504fcf3a Publish onyx-stack-0.2.9.tgz 2025-09-05 17:22:35 +00:00
justin-tahara
640e1cbae1 Publish onyx-stack-0.2.9.tgz 2025-09-05 17:03:38 +00:00
github-merge-queue[bot]
11224351d1 Publish onyx-stack-0.2.9.tgz 2025-09-05 07:59:01 +00:00
Weves
997a4ad613 Publish onyx-stack-0.2.9.tgz 2025-09-05 04:52:58 +00:00
Weves
05fe9b743f Publish onyx-stack-0.2.9.tgz 2025-09-05 04:51:41 +00:00
Weves
310b3b3f8b Publish onyx-stack-0.2.9.tgz 2025-09-04 23:15:39 +00:00
wenxi-onyx
f0bbcb3a06 Publish onyx-stack-0.2.9.tgz 2025-09-04 19:07:43 +00:00
Weves
93fdc1793b Publish onyx-stack-0.2.9.tgz 2025-09-04 18:15:00 +00:00
Weves
e55fc27f0c Publish onyx-stack-0.2.9.tgz 2025-09-04 02:08:39 +00:00
wenxi-onyx
68786ecd38 Publish onyx-stack-0.2.9.tgz 2025-09-04 00:33:28 +00:00
wenxi-onyx
954c70f422 Publish onyx-stack-0.2.9.tgz 2025-09-04 00:33:13 +00:00
Weves
6080fe48a7 Publish onyx-stack-0.2.9.tgz 2025-09-03 23:26:43 +00:00
github-merge-queue[bot]
8ba887ccfe Publish onyx-stack-0.2.9.tgz 2025-09-03 23:13:08 +00:00
Weves
c2209c1522 Publish onyx-stack-0.2.9.tgz 2025-09-02 23:45:37 +00:00
github-merge-queue[bot]
85ce395df2 Publish onyx-stack-0.2.9.tgz 2025-09-02 22:53:33 +00:00
github-merge-queue[bot]
fd0d302113 Publish onyx-stack-0.2.9.tgz 2025-09-02 20:52:56 +00:00
Weves
48f3816451 Publish onyx-stack-0.2.9.tgz 2025-09-02 15:58:33 +00:00
wenxi-onyx
73b555c844 Publish onyx-stack-0.2.9.tgz 2025-09-02 04:54:13 +00:00
justin-tahara
ea21f88db8 Publish onyx-stack-0.2.9.tgz 2025-09-02 02:36:52 +00:00
justin-tahara
61670c4be4 Publish onyx-stack-0.2.9.tgz 2025-09-01 22:34:57 +00:00
justin-tahara
094970deb3 Publish onyx-stack-0.2.9.tgz 2025-09-01 22:34:09 +00:00
Weves
5fa375ac3b Publish onyx-stack-0.2.9.tgz 2025-09-01 20:34:01 +00:00
justin-tahara
fd4c5d7389 Publish onyx-stack-0.2.9.tgz 2025-09-01 17:53:28 +00:00
Weves
411e67dad7 Publish onyx-stack-0.2.9.tgz 2025-09-01 17:19:43 +00:00
Weves
9ec7c20f88 Publish onyx-stack-0.2.9.tgz 2025-09-01 17:19:09 +00:00
evan-onyx
d3f65cb8aa Publish onyx-stack-0.2.9.tgz 2025-09-01 16:53:48 +00:00
Weves
9f2e10020e Publish onyx-stack-0.2.9.tgz 2025-09-01 06:20:28 +00:00
Weves
bde4f8d10a Publish onyx-stack-0.2.9.tgz 2025-09-01 04:30:24 +00:00
Weves
625fa4d61a Publish onyx-stack-0.2.9.tgz 2025-09-01 03:57:17 +00:00
Weves
9d35781773 Publish onyx-stack-0.2.9.tgz 2025-09-01 02:51:01 +00:00
wenxi-onyx
94bf94ff1b Publish onyx-stack-0.2.9.tgz 2025-09-01 02:17:33 +00:00
Weves
ae5fe8de0e Publish onyx-stack-0.2.9.tgz 2025-08-31 21:30:06 +00:00
Weves
770bdacc67 Publish onyx-stack-0.2.9.tgz 2025-08-30 00:18:43 +00:00
Weves
d37bc1d5f3 Publish onyx-stack-0.2.9.tgz 2025-08-30 00:18:18 +00:00
wenxi-onyx
8575fbc284 Publish onyx-stack-0.2.9.tgz 2025-08-30 00:09:02 +00:00
Weves
649c3b9391 Publish onyx-stack-0.2.9.tgz 2025-08-30 00:00:09 +00:00
Weves
2ce2ee7614 Publish onyx-stack-0.2.9.tgz 2025-08-29 23:17:10 +00:00
Weves
42779ff17a Publish onyx-stack-0.2.9.tgz 2025-08-29 22:09:28 +00:00
Weves
f9c94693cd Publish onyx-stack-0.2.9.tgz 2025-08-29 21:03:17 +00:00
Weves
5dfb35507a Publish onyx-stack-0.2.9.tgz 2025-08-29 16:03:03 +00:00
Weves
23708dadfd Publish onyx-stack-0.2.9.tgz 2025-08-29 03:43:18 +00:00
wenxi-onyx
578a22c653 Publish onyx-stack-0.2.9.tgz 2025-08-28 21:18:51 +00:00
wenxi-onyx
12657196ca Publish onyx-stack-0.2.9.tgz 2025-08-28 21:18:30 +00:00
wenxi-onyx
65df5dd49d Publish onyx-stack-0.2.9.tgz 2025-08-28 20:36:02 +00:00
Weves
dca2d4cd6b Publish onyx-stack-0.2.9.tgz 2025-08-28 06:25:00 +00:00
rguan72
52b4930e49 Publish onyx-stack-0.2.9.tgz 2025-08-28 03:04:10 +00:00
Weves
8c8e1b239c Publish onyx-stack-0.2.9.tgz 2025-08-27 05:35:03 +00:00
wenxi-onyx
37d883a8a5 Publish onyx-stack-0.2.9.tgz 2025-08-27 01:16:02 +00:00
Weves
d1e4c357e9 Publish onyx-stack-0.2.9.tgz 2025-08-27 00:34:29 +00:00
wenxi-onyx
7f9a8615eb Publish onyx-stack-0.2.9.tgz 2025-08-27 00:28:18 +00:00
wenxi-onyx
faff1f34b5 Publish onyx-stack-0.2.9.tgz 2025-08-26 19:45:46 +00:00
wenxi-onyx
26c8f32e95 Publish onyx-stack-0.2.9.tgz 2025-08-26 19:34:05 +00:00
Weves
c0b027a88e Publish onyx-stack-0.2.9.tgz 2025-08-26 18:22:11 +00:00
evan-onyx
39021f5ecc Publish onyx-stack-0.2.9.tgz 2025-08-26 18:15:24 +00:00
wenxi-onyx
10d00ec489 Publish onyx-stack-0.2.9.tgz 2025-08-26 16:50:41 +00:00
Weves
564f5699c5 Publish onyx-stack-0.2.9.tgz 2025-08-26 07:27:43 +00:00
evan-onyx
fd2f3ad351 Publish onyx-stack-0.2.9.tgz 2025-08-25 23:43:56 +00:00
github-merge-queue[bot]
f2725db40c Publish onyx-stack-0.2.9.tgz 2025-08-25 23:28:03 +00:00
justin-tahara
af4b64304a Publish onyx-stack-0.2.9.tgz 2025-08-25 21:12:23 +00:00
evan-onyx
6abf464c56 Publish onyx-stack-0.2.9.tgz 2025-08-25 20:36:57 +00:00
github-merge-queue[bot]
4ba236e010 Publish onyx-stack-0.2.9.tgz 2025-08-25 19:29:42 +00:00
Subash-Mohan
a997968142 Publish onyx-stack-0.2.9.tgz 2025-08-25 02:38:45 +00:00
Weves
cd3700254a Publish onyx-stack-0.2.9.tgz 2025-08-25 00:44:56 +00:00
justin-tahara
b18842e7fe Publish onyx-stack-0.2.9.tgz 2025-08-23 01:14:16 +00:00
justin-tahara
65bfd97dcc Publish onyx-stack-0.2.9.tgz 2025-08-22 23:40:25 +00:00
justin-tahara
733e44786b Publish onyx-stack-0.2.9.tgz 2025-08-22 00:37:00 +00:00
evan-onyx
a1979f957b Publish onyx-stack-0.2.8.tgz 2025-08-22 00:26:34 +00:00
justin-tahara
bc85c1aa68 Publish onyx-stack-0.2.8.tgz 2025-08-21 21:16:11 +00:00
justin-tahara
db046e4d3c Publish onyx-stack-0.2.7.tgz 2025-08-21 16:44:26 +00:00
github-merge-queue[bot]
3aac46e58b Publish onyx-stack-0.2.7.tgz 2025-08-20 23:27:52 +00:00
justin-tahara
043fc279a8 Publish onyx-stack-0.2.7.tgz 2025-08-20 20:51:11 +00:00
justin-tahara
551dfd5d58 Publish onyx-stack-0.2.7.tgz 2025-08-20 20:51:01 +00:00
wenxi-onyx
4d0c140961 Publish onyx-stack-0.2.6.tgz 2025-08-20 16:15:17 +00:00
github-merge-queue[bot]
3a238277ec Publish onyx-stack-0.2.6.tgz 2025-08-20 05:11:52 +00:00
github-merge-queue[bot]
abf15bb4a6 Publish onyx-stack-0.2.6.tgz 2025-08-20 03:51:11 +00:00
Weves
b68d35d14f Publish onyx-stack-0.2.6.tgz 2025-08-19 22:17:15 +00:00
Weves
ec90a7370a Publish onyx-stack-0.2.6.tgz 2025-08-19 20:02:42 +00:00
justin-tahara
e34a035afc Publish onyx-stack-0.2.6.tgz 2025-08-19 00:46:58 +00:00
wenxi-onyx
7cbdd0b729 Publish onyx-stack-0.2.6.tgz 2025-08-18 21:35:18 +00:00
github-merge-queue[bot]
5bbb6b059b Publish onyx-stack-0.2.6.tgz 2025-08-18 18:25:51 +00:00
github-merge-queue[bot]
ae1635e93d Publish onyx-stack-0.2.6.tgz 2025-08-18 17:27:13 +00:00
github-merge-queue[bot]
0c2d3cbc1a Publish onyx-stack-0.2.6.tgz 2025-08-18 04:30:45 +00:00
github-merge-queue[bot]
102dcefc9d Publish onyx-stack-0.2.6.tgz 2025-08-17 23:37:06 +00:00
github-merge-queue[bot]
af3cc8edb9 Publish onyx-stack-0.2.6.tgz 2025-08-16 03:49:15 +00:00
wenxi-onyx
c96b27389e Publish onyx-stack-0.2.6.tgz 2025-08-16 01:20:47 +00:00
Weves
4b128f3e85 Publish onyx-stack-0.2.6.tgz 2025-08-15 22:44:16 +00:00
github-merge-queue[bot]
691f6797bb Publish onyx-stack-0.2.6.tgz 2025-08-15 01:53:14 +00:00
github-merge-queue[bot]
2e9f41a7f7 Publish onyx-stack-0.2.6.tgz 2025-08-15 00:59:46 +00:00
justin-tahara
d275e440ca Publish onyx-stack-0.2.6.tgz 2025-08-14 23:48:20 +00:00
wenxi-onyx
8f1d5fae8c Publish onyx-stack-0.2.6.tgz 2025-08-14 22:18:29 +00:00
wenxi-onyx
c904b50454 Publish onyx-stack-0.2.6.tgz 2025-08-14 20:21:22 +00:00
github-merge-queue[bot]
fb9d9fd995 Publish onyx-stack-0.2.5.tgz 2025-08-14 10:13:19 +00:00
github-merge-queue[bot]
fd599797db Publish onyx-stack-0.2.5.tgz 2025-08-13 21:21:27 +00:00
github-merge-queue[bot]
094f416441 Publish onyx-stack-0.2.5.tgz 2025-08-13 19:09:25 +00:00
Weves
ba9e777bdf Publish onyx-stack-0.2.5.tgz 2025-08-13 16:58:38 +00:00
github-merge-queue[bot]
e16ba43392 Publish onyx-stack-0.2.5.tgz 2025-08-13 02:23:28 +00:00
github-merge-queue[bot]
4d4ff04d2e Publish onyx-stack-0.2.5.tgz 2025-08-13 01:33:06 +00:00
github-merge-queue[bot]
ba1dcd346b Publish onyx-stack-0.2.5.tgz 2025-08-12 03:45:29 +00:00
github-merge-queue[bot]
3b5352a65a Publish onyx-stack-0.2.5.tgz 2025-08-11 18:16:34 +00:00
evan-onyx
1cb87fe4b1 Publish onyx-stack-0.2.5.tgz 2025-08-11 16:58:46 +00:00
github-merge-queue[bot]
f098eacf09 Publish onyx-stack-0.2.5.tgz 2025-08-10 23:50:24 +00:00
github-merge-queue[bot]
3afea18239 Publish onyx-stack-0.2.5.tgz 2025-08-10 20:09:42 +00:00
justin-tahara
26c6f60595 Publish onyx-stack-0.2.5.tgz 2025-08-09 01:41:59 +00:00
justin-tahara
df03438e34 Update index.yaml
Signed-off-by: justin-tahara <justin-tahara@users.noreply.github.com>
2025-08-07 02:06:52 +00:00
justin-tahara
88c2a87b18 Update index.yaml
Signed-off-by: justin-tahara <justin-tahara@users.noreply.github.com>
2025-08-05 21:12:35 +00:00
justin-tahara
e717796131 Add README.md 2025-07-30 16:39:07 -07:00
2107 changed files with 103404 additions and 194268 deletions

2
.github/CODEOWNERS vendored
View File

@@ -1,3 +1 @@
* @onyx-dot-app/onyx-core-team
# Helm charts Owners
/helm/ @justin-tahara

View File

@@ -1,43 +0,0 @@
self-hosted-runner:
# Labels of self-hosted runner in array of strings.
labels:
- extras=ecr-cache
- extras=s3-cache
- hdd=256
- runs-on
- runner=1cpu-linux-arm64
- runner=1cpu-linux-x64
- runner=2cpu-linux-arm64
- runner=2cpu-linux-x64
- runner=4cpu-linux-arm64
- runner=4cpu-linux-x64
- runner=8cpu-linux-arm64
- runner=8cpu-linux-x64
- runner=16cpu-linux-arm64
- runner=16cpu-linux-x64
- ubuntu-slim # Currently in public preview
- volume=40gb
- volume=50gb
# Configuration variables in array of strings defined in your repository or
# organization. `null` means disabling configuration variables check.
# Empty array means no configuration variable is allowed.
config-variables: null
# Configuration for file paths. The keys are glob patterns to match to file
# paths relative to the repository root. The values are the configurations for
# the file paths. Note that the path separator is always '/'.
# The following configurations are available.
#
# "ignore" is an array of regular expression patterns. Matched error messages
# are ignored. This is similar to the "-ignore" command line option.
paths:
# Glob pattern relative to the repository root for matching files. The path separator is always '/'.
# This example configures any YAML file under the '.github/workflows/' directory.
.github/workflows/**/*.{yml,yaml}:
# TODO: These are real and should be fixed eventually.
ignore:
- 'shellcheck reported issue in this script: SC2038:.+'
- 'shellcheck reported issue in this script: SC2046:.+'
- 'shellcheck reported issue in this script: SC2086:.+'
- 'shellcheck reported issue in this script: SC2193:.+'

View File

@@ -0,0 +1,116 @@
name: 'Build and Push Docker Image with Retry'
description: 'Attempts to build and push a Docker image, with a retry on failure'
inputs:
context:
description: 'Build context'
required: true
file:
description: 'Dockerfile location'
required: true
platforms:
description: 'Target platforms'
required: true
pull:
description: 'Always attempt to pull a newer version of the image'
required: false
default: 'true'
push:
description: 'Push the image to registry'
required: false
default: 'true'
load:
description: 'Load the image into Docker daemon'
required: false
default: 'true'
tags:
description: 'Image tags'
required: true
no-cache:
description: 'Read from cache'
required: false
default: 'false'
cache-from:
description: 'Cache sources'
required: false
cache-to:
description: 'Cache destinations'
required: false
retry-wait-time:
description: 'Time to wait before attempt 2 in seconds'
required: false
default: '60'
retry-wait-time-2:
description: 'Time to wait before attempt 3 in seconds'
required: false
default: '120'
runs:
using: "composite"
steps:
- name: Build and push Docker image (Attempt 1 of 3)
id: buildx1
uses: docker/build-push-action@v6
continue-on-error: true
with:
context: ${{ inputs.context }}
file: ${{ inputs.file }}
platforms: ${{ inputs.platforms }}
pull: ${{ inputs.pull }}
push: ${{ inputs.push }}
load: ${{ inputs.load }}
tags: ${{ inputs.tags }}
no-cache: ${{ inputs.no-cache }}
cache-from: ${{ inputs.cache-from }}
cache-to: ${{ inputs.cache-to }}
- name: Wait before attempt 2
if: steps.buildx1.outcome != 'success'
run: |
echo "First attempt failed. Waiting ${{ inputs.retry-wait-time }} seconds before retry..."
sleep ${{ inputs.retry-wait-time }}
shell: bash
- name: Build and push Docker image (Attempt 2 of 3)
id: buildx2
if: steps.buildx1.outcome != 'success'
uses: docker/build-push-action@v6
with:
context: ${{ inputs.context }}
file: ${{ inputs.file }}
platforms: ${{ inputs.platforms }}
pull: ${{ inputs.pull }}
push: ${{ inputs.push }}
load: ${{ inputs.load }}
tags: ${{ inputs.tags }}
no-cache: ${{ inputs.no-cache }}
cache-from: ${{ inputs.cache-from }}
cache-to: ${{ inputs.cache-to }}
- name: Wait before attempt 3
if: steps.buildx1.outcome != 'success' && steps.buildx2.outcome != 'success'
run: |
echo "Second attempt failed. Waiting ${{ inputs.retry-wait-time-2 }} seconds before retry..."
sleep ${{ inputs.retry-wait-time-2 }}
shell: bash
- name: Build and push Docker image (Attempt 3 of 3)
id: buildx3
if: steps.buildx1.outcome != 'success' && steps.buildx2.outcome != 'success'
uses: docker/build-push-action@v6
with:
context: ${{ inputs.context }}
file: ${{ inputs.file }}
platforms: ${{ inputs.platforms }}
pull: ${{ inputs.pull }}
push: ${{ inputs.push }}
load: ${{ inputs.load }}
tags: ${{ inputs.tags }}
no-cache: ${{ inputs.no-cache }}
cache-from: ${{ inputs.cache-from }}
cache-to: ${{ inputs.cache-to }}
- name: Report failure
if: steps.buildx1.outcome != 'success' && steps.buildx2.outcome != 'success' && steps.buildx3.outcome != 'success'
run: |
echo "All attempts failed. Possible transient infrastucture issues? Try again later or inspect logs for details."
shell: bash

View File

@@ -1,17 +0,0 @@
name: "Setup Playwright"
description: "Sets up Playwright and system deps (assumes Python and Playwright are installed)"
runs:
using: "composite"
steps:
- name: Cache playwright cache
uses: runs-on/cache@50350ad4242587b6c8c2baa2e740b1bc11285ff4 # ratchet:runs-on/cache@v4
with:
path: ~/.cache/ms-playwright
key: ${{ runner.os }}-${{ runner.arch }}-playwright-${{ hashFiles('backend/requirements/default.txt') }}
restore-keys: |
${{ runner.os }}-${{ runner.arch }}-playwright-
- name: Install playwright
shell: bash
run: |
playwright install chromium --with-deps

View File

@@ -1,72 +0,0 @@
name: "Setup Python and Install Dependencies"
description: "Sets up Python with uv and installs deps"
inputs:
requirements:
description: "Newline-separated list of requirement files to install (relative to repo root)"
required: true
runs:
using: "composite"
steps:
- name: Setup uv
uses: astral-sh/setup-uv@caf0cab7a618c569241d31dcd442f54681755d39 # ratchet:astral-sh/setup-uv@v3
# TODO: Enable caching once there is a uv.lock file checked in.
# with:
# enable-cache: true
- name: Compute requirements hash
id: req-hash
shell: bash
env:
REQUIREMENTS: ${{ inputs.requirements }}
run: |
# Hash the contents of the specified requirement files
hash=""
while IFS= read -r req; do
if [ -n "$req" ] && [ -f "$req" ]; then
hash="$hash$(sha256sum "$req")"
fi
done <<< "$REQUIREMENTS"
echo "hash=$(echo "$hash" | sha256sum | cut -d' ' -f1)" >> "$GITHUB_OUTPUT"
- name: Cache uv cache directory
uses: runs-on/cache@50350ad4242587b6c8c2baa2e740b1bc11285ff4 # ratchet:runs-on/cache@v4
with:
path: ~/.cache/uv
key: ${{ runner.os }}-uv-${{ steps.req-hash.outputs.hash }}
restore-keys: |
${{ runner.os }}-uv-
- name: Setup Python
uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # ratchet:actions/setup-python@v5
with:
python-version: "3.11"
- name: Create virtual environment
shell: bash
env:
VENV_DIR: ${{ runner.temp }}/venv
run: | # zizmor: ignore[github-env]
uv venv "$VENV_DIR"
# Validate path before adding to GITHUB_PATH to prevent code injection
if [ -d "$VENV_DIR/bin" ]; then
realpath "$VENV_DIR/bin" >> "$GITHUB_PATH"
else
echo "Error: $VENV_DIR/bin does not exist"
exit 1
fi
- name: Install Python dependencies with uv
shell: bash
env:
REQUIREMENTS: ${{ inputs.requirements }}
run: |
# Build the uv pip install command with each requirement file as array elements
cmd=("uv" "pip" "install")
while IFS= read -r req; do
# Skip empty lines
if [ -n "$req" ]; then
cmd+=("-r" "$req")
fi
done <<< "$REQUIREMENTS"
echo "Running: ${cmd[*]}"
"${cmd[@]}"

View File

@@ -1,102 +0,0 @@
name: "Slack Notify on Failure"
description: "Sends a Slack notification when a workflow fails"
inputs:
webhook-url:
description: "Slack webhook URL (can also use SLACK_WEBHOOK_URL env var)"
required: false
failed-jobs:
description: "List of failed job names (newline-separated)"
required: false
title:
description: "Title for the notification"
required: false
default: "🚨 Workflow Failed"
ref-name:
description: "Git ref name (tag/branch)"
required: false
runs:
using: "composite"
steps:
- name: Send Slack notification
shell: bash
env:
SLACK_WEBHOOK_URL: ${{ inputs.webhook-url }}
FAILED_JOBS: ${{ inputs.failed-jobs }}
TITLE: ${{ inputs.title }}
REF_NAME: ${{ inputs.ref-name }}
REPO: ${{ github.repository }}
WORKFLOW: ${{ github.workflow }}
RUN_NUMBER: ${{ github.run_number }}
RUN_ID: ${{ github.run_id }}
SERVER_URL: ${{ github.server_url }}
GITHUB_REF_NAME: ${{ github.ref_name }}
run: |
if [ -z "$SLACK_WEBHOOK_URL" ]; then
echo "webhook-url input or SLACK_WEBHOOK_URL env var is not set, skipping notification"
exit 0
fi
# Build workflow URL
WORKFLOW_URL="${SERVER_URL}/${REPO}/actions/runs/${RUN_ID}"
# Use ref_name from input or fall back to github.ref_name
if [ -z "$REF_NAME" ]; then
REF_NAME="$GITHUB_REF_NAME"
fi
# Escape JSON special characters
escape_json() {
local input="$1"
# Escape backslashes first (but preserve \n sequences)
# Protect \n sequences temporarily
input=$(printf '%s' "$input" | sed 's/\\n/\x01NL\x01/g')
# Escape remaining backslashes
input=$(printf '%s' "$input" | sed 's/\\/\\\\/g')
# Restore \n sequences (single backslash, will be correct in JSON)
input=$(printf '%s' "$input" | sed 's/\x01NL\x01/\\n/g')
# Escape quotes
printf '%s' "$input" | sed 's/"/\\"/g'
}
REF_NAME_ESC=$(escape_json "$REF_NAME")
FAILED_JOBS_ESC=$(escape_json "$FAILED_JOBS")
WORKFLOW_URL_ESC=$(escape_json "$WORKFLOW_URL")
TITLE_ESC=$(escape_json "$TITLE")
# Build JSON payload piece by piece
# Note: FAILED_JOBS_ESC already contains \n sequences that should remain as \n in JSON
PAYLOAD="{"
PAYLOAD="${PAYLOAD}\"text\":\"${TITLE_ESC}\","
PAYLOAD="${PAYLOAD}\"blocks\":[{"
PAYLOAD="${PAYLOAD}\"type\":\"header\","
PAYLOAD="${PAYLOAD}\"text\":{\"type\":\"plain_text\",\"text\":\"${TITLE_ESC}\"}"
PAYLOAD="${PAYLOAD}},{"
PAYLOAD="${PAYLOAD}\"type\":\"section\","
PAYLOAD="${PAYLOAD}\"fields\":["
if [ -n "$REF_NAME" ]; then
PAYLOAD="${PAYLOAD}{\"type\":\"mrkdwn\",\"text\":\"*Ref:*\\n${REF_NAME_ESC}\"},"
fi
PAYLOAD="${PAYLOAD}{\"type\":\"mrkdwn\",\"text\":\"*Run ID:*\\n#${RUN_NUMBER}\"}"
PAYLOAD="${PAYLOAD}]"
PAYLOAD="${PAYLOAD}}"
if [ -n "$FAILED_JOBS" ]; then
PAYLOAD="${PAYLOAD},{"
PAYLOAD="${PAYLOAD}\"type\":\"section\","
PAYLOAD="${PAYLOAD}\"text\":{\"type\":\"mrkdwn\",\"text\":\"*Failed Jobs:*\\n${FAILED_JOBS_ESC}\"}"
PAYLOAD="${PAYLOAD}}"
fi
PAYLOAD="${PAYLOAD},{"
PAYLOAD="${PAYLOAD}\"type\":\"actions\","
PAYLOAD="${PAYLOAD}\"elements\":[{"
PAYLOAD="${PAYLOAD}\"type\":\"button\","
PAYLOAD="${PAYLOAD}\"text\":{\"type\":\"plain_text\",\"text\":\"View Workflow Run\"},"
PAYLOAD="${PAYLOAD}\"url\":\"${WORKFLOW_URL_ESC}\""
PAYLOAD="${PAYLOAD}}]"
PAYLOAD="${PAYLOAD}}"
PAYLOAD="${PAYLOAD}]"
PAYLOAD="${PAYLOAD}}"
curl -X POST -H 'Content-type: application/json' \
--data "$PAYLOAD" \
"$SLACK_WEBHOOK_URL"

View File

@@ -1,24 +0,0 @@
version: 2
updates:
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "weekly"
cooldown:
default-days: 7
open-pull-requests-limit: 3
assignees:
- "jmelahman"
labels:
- "dependabot:actions"
- package-ecosystem: "pip"
directory: "/backend"
schedule:
interval: "weekly"
cooldown:
default-days: 7
open-pull-requests-limit: 3
assignees:
- "jmelahman"
labels:
- "dependabot:python"

View File

@@ -6,6 +6,9 @@
[Describe the tests you ran to verify your changes]
## Additional Options
## Backporting (check the box to trigger backport action)
Note: You have to check that the action passes, otherwise resolve the conflicts manually and tag the patches.
- [ ] This PR should be backported (make sure to check that the backport attempt succeeds)
- [ ] [Optional] Override Linear Check

1
.github/runs-on.yml vendored
View File

@@ -1 +0,0 @@
_extend: .github-private

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,156 @@
name: Build and Push Backend Image on Tag
on:
push:
tags:
- "*"
env:
REGISTRY_IMAGE: ${{ contains(github.ref_name, 'cloud') && 'onyxdotapp/onyx-backend-cloud' || 'onyxdotapp/onyx-backend' }}
DEPLOYMENT: ${{ contains(github.ref_name, 'cloud') && 'cloud' || 'standalone' }}
# don't tag cloud images with "latest"
LATEST_TAG: ${{ contains(github.ref_name, 'latest') && !contains(github.ref_name, 'cloud') }}
jobs:
build-and-push:
# TODO: investigate a matrix build like the web container
# See https://runs-on.com/runners/linux/
runs-on:
- runs-on
- runner=${{ matrix.platform == 'linux/amd64' && '8cpu-linux-x64' || '8cpu-linux-arm64' }}
- run-id=${{ github.run_id }}
- tag=platform-${{ matrix.platform }}
strategy:
fail-fast: false
matrix:
platform:
- linux/amd64
- linux/arm64
steps:
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Checkout code
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
latest=false
tags: |
type=raw,value=${{ github.ref_name }}
type=raw,value=${{ env.LATEST_TAG == 'true' && 'latest' || '' }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Install build-essential
run: |
sudo apt-get update
sudo apt-get install -y build-essential
- name: Backend Image Docker Build and Push
id: build
uses: docker/build-push-action@v6
with:
context: ./backend
file: ./backend/Dockerfile
platforms: ${{ matrix.platform }}
push: true
build-args: |
ONYX_VERSION=${{ github.ref_name }}
labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true
cache-from: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/backend-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/backend-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
- name: Export digest
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v4
with:
name: backend-digests-${{ env.PLATFORM_PAIR }}-${{ github.run_id }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
merge:
runs-on: ubuntu-latest
needs:
- build-and-push
steps:
# Needed for trivyignore
- name: Checkout
uses: actions/checkout@v4
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: backend-digests-*-${{ github.run_id }}
merge-multiple: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
latest=false
tags: |
type=raw,value=${{ github.ref_name }}
type=raw,value=${{ env.LATEST_TAG == 'true' && 'latest' || '' }}
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }}
# trivy has their own rate limiting issues causing this action to flake
# we worked around it by hardcoding to different db repos in env
# can re-enable when they figure it out
# https://github.com/aquasecurity/trivy/discussions/7538
# https://github.com/aquasecurity/trivy-action/issues/389
- name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@master
env:
TRIVY_DB_REPOSITORY: "public.ecr.aws/aquasecurity/trivy-db:2"
TRIVY_JAVA_DB_REPOSITORY: "public.ecr.aws/aquasecurity/trivy-java-db:1"
TRIVY_USERNAME: ${{ secrets.DOCKER_USERNAME }}
TRIVY_PASSWORD: ${{ secrets.DOCKER_TOKEN }}
with:
# To run locally: trivy image --severity HIGH,CRITICAL onyxdotapp/onyx-backend
image-ref: docker.io/${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}
severity: "CRITICAL,HIGH"
trivyignores: ./backend/.trivyignore

View File

@@ -0,0 +1,150 @@
name: Build and Push Cloud Web Image on Tag
# Identical to the web container build, but with correct image tag and build args
on:
push:
tags:
- "*cloud*"
env:
REGISTRY_IMAGE: onyxdotapp/onyx-web-server-cloud
DEPLOYMENT: cloud
jobs:
build:
runs-on:
- runs-on
- runner=${{ matrix.platform == 'linux/amd64' && '8cpu-linux-x64' || '8cpu-linux-arm64' }}
- run-id=${{ github.run_id }}
- tag=platform-${{ matrix.platform }}
strategy:
fail-fast: false
matrix:
platform:
- linux/amd64
- linux/arm64
steps:
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Checkout
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
latest=false
tags: |
type=raw,value=${{ github.ref_name }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push by digest
id: build
uses: docker/build-push-action@v6
with:
context: ./web
file: ./web/Dockerfile
platforms: ${{ matrix.platform }}
push: true
build-args: |
ONYX_VERSION=${{ github.ref_name }}
NEXT_PUBLIC_CLOUD_ENABLED=true
NEXT_PUBLIC_POSTHOG_KEY=${{ secrets.POSTHOG_KEY }}
NEXT_PUBLIC_POSTHOG_HOST=${{ secrets.POSTHOG_HOST }}
NEXT_PUBLIC_SENTRY_DSN=${{ secrets.SENTRY_DSN }}
NEXT_PUBLIC_STRIPE_PUBLISHABLE_KEY=${{ secrets.STRIPE_PUBLISHABLE_KEY }}
NEXT_PUBLIC_GTM_ENABLED=true
NEXT_PUBLIC_FORGOT_PASSWORD_ENABLED=true
NEXT_PUBLIC_INCLUDE_ERROR_POPUP_SUPPORT_LINK=true
NODE_OPTIONS=--max-old-space-size=8192
labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true
cache-from: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/cloudweb-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/cloudweb-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
# no-cache needed due to weird interactions with the builds for different platforms
# NOTE(rkuo): this may not be true any more with the proper cache prefixing by architecture - currently testing with it off
- name: Export digest
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v4
with:
name: cloudweb-digests-${{ env.PLATFORM_PAIR }}-${{ github.run_id }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
merge:
runs-on: ubuntu-latest
needs:
- build
steps:
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: cloudweb-digests-*-${{ github.run_id }}
merge-multiple: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
latest=false
tags: |
type=raw,value=${{ github.ref_name }}
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }}
# trivy has their own rate limiting issues causing this action to flake
# we worked around it by hardcoding to different db repos in env
# can re-enable when they figure it out
# https://github.com/aquasecurity/trivy/discussions/7538
# https://github.com/aquasecurity/trivy-action/issues/389
- name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@master
env:
TRIVY_DB_REPOSITORY: "public.ecr.aws/aquasecurity/trivy-db:2"
TRIVY_JAVA_DB_REPOSITORY: "public.ecr.aws/aquasecurity/trivy-java-db:1"
TRIVY_USERNAME: ${{ secrets.DOCKER_USERNAME }}
TRIVY_PASSWORD: ${{ secrets.DOCKER_TOKEN }}
with:
image-ref: docker.io/${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}
severity: "CRITICAL,HIGH"

View File

@@ -0,0 +1,176 @@
name: Build and Push Model Server Image on Tag
on:
push:
tags:
- "*"
env:
REGISTRY_IMAGE: ${{ contains(github.ref_name, 'cloud') && 'onyxdotapp/onyx-model-server-cloud' || 'onyxdotapp/onyx-model-server' }}
DOCKER_BUILDKIT: 1
BUILDKIT_PROGRESS: plain
DEPLOYMENT: ${{ contains(github.ref_name, 'cloud') && 'cloud' || 'standalone' }}
# don't tag cloud images with "latest"
LATEST_TAG: ${{ contains(github.ref_name, 'latest') && !contains(github.ref_name, 'cloud') }}
jobs:
# Bypassing this for now as the idea of not building is glitching
# releases and builds that depends on everything being tagged in docker
# 1) Preliminary job to check if the changed files are relevant
# check_model_server_changes:
# runs-on: ubuntu-latest
# outputs:
# changed: ${{ steps.check.outputs.changed }}
# steps:
# - name: Checkout code
# uses: actions/checkout@v4
#
# - name: Check if relevant files changed
# id: check
# run: |
# # Default to "false"
# echo "changed=false" >> $GITHUB_OUTPUT
#
# # Compare the previous commit (github.event.before) to the current one (github.sha)
# # If any file in backend/model_server/** or backend/Dockerfile.model_server is changed,
# # set changed=true
# if git diff --name-only ${{ github.event.before }} ${{ github.sha }} \
# | grep -E '^backend/model_server/|^backend/Dockerfile.model_server'; then
# echo "changed=true" >> $GITHUB_OUTPUT
# fi
check_model_server_changes:
runs-on: ubuntu-latest
outputs:
changed: "true"
steps:
- name: Bypass check and set output
run: echo "changed=true" >> $GITHUB_OUTPUT
build-amd64:
needs: [check_model_server_changes]
if: needs.check_model_server_changes.outputs.changed == 'true'
runs-on:
[runs-on, runner=8cpu-linux-x64, "run-id=${{ github.run_id }}-amd64"]
env:
PLATFORM_PAIR: linux-amd64
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: System Info
run: |
df -h
free -h
docker system prune -af --volumes
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver-opts: |
image=moby/buildkit:latest
network=host
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and Push AMD64
uses: docker/build-push-action@v6
with:
context: ./backend
file: ./backend/Dockerfile.model_server
platforms: linux/amd64
push: true
tags: ${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}-amd64
build-args: |
DANSWER_VERSION=${{ github.ref_name }}
outputs: type=registry
provenance: false
cache-from: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/model-server-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/model-server-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
# no-cache: true
build-arm64:
needs: [check_model_server_changes]
if: needs.check_model_server_changes.outputs.changed == 'true'
runs-on:
[runs-on, runner=8cpu-linux-x64, "run-id=${{ github.run_id }}-arm64"]
env:
PLATFORM_PAIR: linux-arm64
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: System Info
run: |
df -h
free -h
docker system prune -af --volumes
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
with:
driver-opts: |
image=moby/buildkit:latest
network=host
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and Push ARM64
uses: docker/build-push-action@v6
with:
context: ./backend
file: ./backend/Dockerfile.model_server
platforms: linux/arm64
push: true
tags: ${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}-arm64
build-args: |
DANSWER_VERSION=${{ github.ref_name }}
outputs: type=registry
provenance: false
cache-from: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/model-server-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/model-server-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
merge-and-scan:
needs: [build-amd64, build-arm64, check_model_server_changes]
if: needs.check_model_server_changes.outputs.changed == 'true'
runs-on: ubuntu-latest
steps:
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Create and Push Multi-arch Manifest
run: |
docker buildx create --use
docker buildx imagetools create -t ${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }} \
${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}-amd64 \
${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}-arm64
if [[ "${{ env.LATEST_TAG }}" == "true" ]]; then
docker buildx imagetools create -t ${{ env.REGISTRY_IMAGE }}:latest \
${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}-amd64 \
${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}-arm64
fi
- name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@master
env:
TRIVY_DB_REPOSITORY: "public.ecr.aws/aquasecurity/trivy-db:2"
TRIVY_JAVA_DB_REPOSITORY: "public.ecr.aws/aquasecurity/trivy-java-db:1"
TRIVY_USERNAME: ${{ secrets.DOCKER_USERNAME }}
TRIVY_PASSWORD: ${{ secrets.DOCKER_TOKEN }}
with:
image-ref: docker.io/${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}
severity: "CRITICAL,HIGH"
timeout: "10m"

View File

@@ -0,0 +1,161 @@
name: Build and Push Web Image on Tag
on:
push:
tags:
- "*"
env:
REGISTRY_IMAGE: onyxdotapp/onyx-web-server
LATEST_TAG: ${{ contains(github.ref_name, 'latest') }}
DEPLOYMENT: standalone
jobs:
precheck:
runs-on: [runs-on, runner=2cpu-linux-x64, "run-id=${{ github.run_id }}"]
outputs:
should-run: ${{ steps.set-output.outputs.should-run }}
steps:
- name: Check if tag contains "cloud"
id: set-output
run: |
if [[ "${{ github.ref_name }}" == *cloud* ]]; then
echo "should-run=false" >> "$GITHUB_OUTPUT"
else
echo "should-run=true" >> "$GITHUB_OUTPUT"
fi
build:
needs: precheck
if: needs.precheck.outputs.should-run == 'true'
runs-on:
- runs-on
- runner=${{ matrix.platform == 'linux/amd64' && '8cpu-linux-x64' || '8cpu-linux-arm64' }}
- run-id=${{ github.run_id }}
- tag=platform-${{ matrix.platform }}
strategy:
fail-fast: false
matrix:
platform:
- linux/amd64
- linux/arm64
steps:
- name: Prepare
run: |
platform=${{ matrix.platform }}
echo "PLATFORM_PAIR=${platform//\//-}" >> $GITHUB_ENV
- name: Checkout
uses: actions/checkout@v4
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
latest=false
tags: |
type=raw,value=${{ github.ref_name }}
type=raw,value=${{ env.LATEST_TAG == 'true' && 'latest' || '' }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push by digest
id: build
uses: docker/build-push-action@v6
with:
context: ./web
file: ./web/Dockerfile
platforms: ${{ matrix.platform }}
push: true
build-args: |
ONYX_VERSION=${{ github.ref_name }}
NODE_OPTIONS=--max-old-space-size=8192
labels: ${{ steps.meta.outputs.labels }}
outputs: type=image,name=${{ env.REGISTRY_IMAGE }},push-by-digest=true,name-canonical=true,push=true
cache-from: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/web-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/${{ env.DEPLOYMENT }}/web-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
# no-cache needed due to weird interactions with the builds for different platforms
# NOTE(rkuo): this may not be true any more with the proper cache prefixing by architecture - currently testing with it off
- name: Export digest
run: |
mkdir -p /tmp/digests
digest="${{ steps.build.outputs.digest }}"
touch "/tmp/digests/${digest#sha256:}"
- name: Upload digest
uses: actions/upload-artifact@v4
with:
name: web-digests-${{ env.PLATFORM_PAIR }}-${{ github.run_id }}
path: /tmp/digests/*
if-no-files-found: error
retention-days: 1
merge:
needs:
- build
if: needs.precheck.outputs.should-run == 'true'
runs-on: ubuntu-latest
steps:
- name: Download digests
uses: actions/download-artifact@v4
with:
path: /tmp/digests
pattern: web-digests-*-${{ github.run_id }}
merge-multiple: true
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY_IMAGE }}
flavor: |
latest=false
tags: |
type=raw,value=${{ github.ref_name }}
type=raw,value=${{ env.LATEST_TAG == 'true' && 'latest' || '' }}
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Create manifest list and push
working-directory: /tmp/digests
run: |
docker buildx imagetools create $(jq -cr '.tags | map("-t " + .) | join(" ")' <<< "$DOCKER_METADATA_OUTPUT_JSON") \
$(printf '${{ env.REGISTRY_IMAGE }}@sha256:%s ' *)
- name: Inspect image
run: |
docker buildx imagetools inspect ${{ env.REGISTRY_IMAGE }}:${{ steps.meta.outputs.version }}
# trivy has their own rate limiting issues causing this action to flake
# we worked around it by hardcoding to different db repos in env
# can re-enable when they figure it out
# https://github.com/aquasecurity/trivy/discussions/7538
# https://github.com/aquasecurity/trivy-action/issues/389
- name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@master
env:
TRIVY_DB_REPOSITORY: "public.ecr.aws/aquasecurity/trivy-db:2"
TRIVY_JAVA_DB_REPOSITORY: "public.ecr.aws/aquasecurity/trivy-java-db:1"
TRIVY_USERNAME: ${{ secrets.DOCKER_USERNAME }}
TRIVY_PASSWORD: ${{ secrets.DOCKER_TOKEN }}
with:
image-ref: docker.io/${{ env.REGISTRY_IMAGE }}:${{ github.ref_name }}
severity: "CRITICAL,HIGH"

View File

@@ -1,51 +0,0 @@
# This workflow is set up to be manually triggered via the GitHub Action tab.
# Given a version, it will tag those backend and webserver images as "beta".
name: Tag Beta Version
on:
workflow_dispatch:
inputs:
version:
description: "The version (ie v1.0.0-beta.0) to tag as beta"
required: true
permissions:
contents: read
jobs:
tag:
# See https://runs-on.com/runners/linux/
# use a lower powered instance since this just does i/o to docker hub
runs-on: [runs-on, runner=2cpu-linux-x64, "run-id=${{ github.run_id }}-tag"]
timeout-minutes: 45
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Enable Docker CLI experimental features
run: echo "DOCKER_CLI_EXPERIMENTAL=enabled" >> $GITHUB_ENV
- name: Pull, Tag and Push Web Server Image
env:
VERSION: ${{ github.event.inputs.version }}
run: |
docker buildx imagetools create -t onyxdotapp/onyx-web-server:beta onyxdotapp/onyx-web-server:${VERSION}
- name: Pull, Tag and Push API Server Image
env:
VERSION: ${{ github.event.inputs.version }}
run: |
docker buildx imagetools create -t onyxdotapp/onyx-backend:beta onyxdotapp/onyx-backend:${VERSION}
- name: Pull, Tag and Push Model Server Image
env:
VERSION: ${{ github.event.inputs.version }}
run: |
docker buildx imagetools create -t onyxdotapp/onyx-model-server:beta onyxdotapp/onyx-model-server:${VERSION}

View File

@@ -10,21 +10,17 @@ on:
description: "The version (ie v0.0.1) to tag as latest"
required: true
permissions:
contents: read
jobs:
tag:
# See https://runs-on.com/runners/linux/
# use a lower powered instance since this just does i/o to docker hub
runs-on: [runs-on, runner=2cpu-linux-x64, "run-id=${{ github.run_id }}-tag"]
timeout-minutes: 45
runs-on: [runs-on, runner=2cpu-linux-x64, "run-id=${{ github.run_id }}"]
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@v1
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
@@ -33,19 +29,9 @@ jobs:
run: echo "DOCKER_CLI_EXPERIMENTAL=enabled" >> $GITHUB_ENV
- name: Pull, Tag and Push Web Server Image
env:
VERSION: ${{ github.event.inputs.version }}
run: |
docker buildx imagetools create -t onyxdotapp/onyx-web-server:latest onyxdotapp/onyx-web-server:${VERSION}
docker buildx imagetools create -t onyxdotapp/onyx-web-server:latest onyxdotapp/onyx-web-server:${{ github.event.inputs.version }}
- name: Pull, Tag and Push API Server Image
env:
VERSION: ${{ github.event.inputs.version }}
run: |
docker buildx imagetools create -t onyxdotapp/onyx-backend:latest onyxdotapp/onyx-backend:${VERSION}
- name: Pull, Tag and Push Model Server Image
env:
VERSION: ${{ github.event.inputs.version }}
run: |
docker buildx imagetools create -t onyxdotapp/onyx-model-server:latest onyxdotapp/onyx-model-server:${VERSION}
docker buildx imagetools create -t onyxdotapp/onyx-backend:latest onyxdotapp/onyx-backend:${{ github.event.inputs.version }}

View File

@@ -1,55 +0,0 @@
name: Release Onyx Helm Charts
on:
push:
branches:
- main
permissions: write-all
jobs:
release:
permissions:
contents: write
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- name: Checkout
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
fetch-depth: 0
persist-credentials: false
- name: Install Helm CLI
uses: azure/setup-helm@1a275c3b69536ee54be43f2070a358922e12c8d4 # ratchet:azure/setup-helm@v4
with:
version: v3.12.1
- name: Add required Helm repositories
run: |
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
helm repo add onyx-vespa https://onyx-dot-app.github.io/vespa-helm-charts
helm repo add cloudnative-pg https://cloudnative-pg.github.io/charts
helm repo add ot-container-kit https://ot-container-kit.github.io/helm-charts
helm repo add minio https://charts.min.io/
helm repo add code-interpreter https://onyx-dot-app.github.io/code-interpreter/
helm repo update
- name: Build chart dependencies
run: |
set -euo pipefail
for chart_dir in deployment/helm/charts/*; do
if [ -f "$chart_dir/Chart.yaml" ]; then
echo "Building dependencies for $chart_dir"
helm dependency build "$chart_dir"
fi
done
- name: Publish Helm charts to gh-pages
uses: stefanprodan/helm-gh-pages@0ad2bb377311d61ac04ad9eb6f252fb68e207260 # ratchet:stefanprodan/helm-gh-pages@v1.7.0
with:
token: ${{ secrets.GITHUB_TOKEN }}
charts_dir: deployment/helm/charts
branch: gh-pages
commit_username: ${{ github.actor }}
commit_email: ${{ github.actor }}@users.noreply.github.com

View File

@@ -0,0 +1,171 @@
# This workflow is intended to be manually triggered via the GitHub Action tab.
# Given a hotfix branch, it will attempt to open a PR to all release branches and
# by default auto merge them
name: Hotfix release branches
on:
workflow_dispatch:
inputs:
hotfix_commit:
description: "Hotfix commit hash"
required: true
hotfix_suffix:
description: "Hotfix branch suffix (e.g. hotfix/v0.8-{suffix})"
required: true
release_branch_pattern:
description: "Release branch pattern (regex)"
required: true
default: "release/.*"
auto_merge:
description: "Automatically merge the hotfix PRs"
required: true
type: choice
default: "true"
options:
- true
- false
jobs:
hotfix_release_branches:
permissions: write-all
# See https://runs-on.com/runners/linux/
# use a lower powered instance since this just does i/o to docker hub
runs-on: [runs-on, runner=2cpu-linux-x64, "run-id=${{ github.run_id }}"]
steps:
# needs RKUO_DEPLOY_KEY for write access to merge PR's
- name: Checkout Repository
uses: actions/checkout@v4
with:
ssh-key: "${{ secrets.RKUO_DEPLOY_KEY }}"
fetch-depth: 0
- name: Set up Git user
run: |
git config user.name "Richard Kuo [bot]"
git config user.email "rkuo[bot]@onyx.app"
- name: Fetch All Branches
run: |
git fetch --all --prune
- name: Verify Hotfix Commit Exists
run: |
git rev-parse --verify "${{ github.event.inputs.hotfix_commit }}" || { echo "Commit not found: ${{ github.event.inputs.hotfix_commit }}"; exit 1; }
- name: Get Release Branches
id: get_release_branches
run: |
BRANCHES=$(git branch -r | grep -E "${{ github.event.inputs.release_branch_pattern }}" | sed 's|origin/||' | tr -d ' ')
if [ -z "$BRANCHES" ]; then
echo "No release branches found matching pattern '${{ github.event.inputs.release_branch_pattern }}'."
exit 1
fi
echo "Found release branches:"
echo "$BRANCHES"
# Join the branches into a single line separated by commas
BRANCHES_JOINED=$(echo "$BRANCHES" | tr '\n' ',' | sed 's/,$//')
# Set the branches as an output
echo "branches=$BRANCHES_JOINED" >> $GITHUB_OUTPUT
# notes on all the vagaries of wiring up automated PR's
# https://github.com/peter-evans/create-pull-request/blob/main/docs/concepts-guidelines.md#triggering-further-workflow-runs
# we must use a custom token for GH_TOKEN to trigger the subsequent PR checks
- name: Create and Merge Pull Requests to Matching Release Branches
env:
HOTFIX_COMMIT: ${{ github.event.inputs.hotfix_commit }}
HOTFIX_SUFFIX: ${{ github.event.inputs.hotfix_suffix }}
AUTO_MERGE: ${{ github.event.inputs.auto_merge }}
GH_TOKEN: ${{ secrets.RKUO_PERSONAL_ACCESS_TOKEN }}
run: |
# Get the branches from the previous step
BRANCHES="${{ steps.get_release_branches.outputs.branches }}"
# Convert BRANCHES to an array
IFS=$',' read -ra BRANCH_ARRAY <<< "$BRANCHES"
# Loop through each release branch and create and merge a PR
for RELEASE_BRANCH in "${BRANCH_ARRAY[@]}"; do
echo "Processing $RELEASE_BRANCH..."
# Parse out the release version by removing "release/" from the branch name
RELEASE_VERSION=${RELEASE_BRANCH#release/}
echo "Release version parsed: $RELEASE_VERSION"
HOTFIX_BRANCH="hotfix/${RELEASE_VERSION}-${HOTFIX_SUFFIX}"
echo "Creating PR from $HOTFIX_BRANCH to $RELEASE_BRANCH"
# Checkout the release branch
echo "Checking out $RELEASE_BRANCH"
git checkout "$RELEASE_BRANCH"
# Create the new hotfix branch
if git rev-parse --verify "$HOTFIX_BRANCH" >/dev/null 2>&1; then
echo "Hotfix branch $HOTFIX_BRANCH already exists. Skipping branch creation."
else
echo "Branching $RELEASE_BRANCH to $HOTFIX_BRANCH"
git checkout -b "$HOTFIX_BRANCH"
fi
# Check if the hotfix commit is a merge commit
if git rev-list --merges -n 1 "$HOTFIX_COMMIT" >/dev/null 2>&1; then
# -m 1 uses the target branch as the base (which is what we want)
echo "Hotfix commit $HOTFIX_COMMIT is a merge commit, using -m 1 for cherry-pick"
CHERRY_PICK_CMD="git cherry-pick -m 1 $HOTFIX_COMMIT"
else
CHERRY_PICK_CMD="git cherry-pick $HOTFIX_COMMIT"
fi
# Perform the cherry-pick
echo "Executing: $CHERRY_PICK_CMD"
eval "$CHERRY_PICK_CMD"
if [ $? -ne 0 ]; then
echo "Cherry-pick failed for $HOTFIX_COMMIT on $HOTFIX_BRANCH. Aborting..."
git cherry-pick --abort
continue
fi
# Push the hotfix branch to the remote
echo "Pushing $HOTFIX_BRANCH..."
git push origin "$HOTFIX_BRANCH"
echo "Hotfix branch $HOTFIX_BRANCH created and pushed."
# Check if PR already exists
EXISTING_PR=$(gh pr list --head "$HOTFIX_BRANCH" --base "$RELEASE_BRANCH" --state open --json number --jq '.[0].number')
if [ -n "$EXISTING_PR" ]; then
echo "An open PR already exists: #$EXISTING_PR. Skipping..."
continue
fi
# Create a new PR and capture the output
PR_OUTPUT=$(gh pr create --title "Merge $HOTFIX_BRANCH into $RELEASE_BRANCH" \
--body "Automated PR to merge \`$HOTFIX_BRANCH\` into \`$RELEASE_BRANCH\`." \
--head "$HOTFIX_BRANCH" --base "$RELEASE_BRANCH")
# Extract the URL from the output
PR_URL=$(echo "$PR_OUTPUT" | grep -Eo 'https://github.com/[^ ]+')
echo "Pull request created: $PR_URL"
# Extract PR number from URL
PR_NUMBER=$(basename "$PR_URL")
echo "Pull request created: $PR_NUMBER"
if [ "$AUTO_MERGE" == "true" ]; then
echo "Attempting to merge pull request #$PR_NUMBER"
# Attempt to merge the PR
gh pr merge "$PR_NUMBER" --merge --auto --delete-branch
if [ $? -eq 0 ]; then
echo "Pull request #$PR_NUMBER merged successfully."
else
# Optionally, handle the error or continue
echo "Failed to merge pull request #$PR_NUMBER."
fi
fi
done

View File

@@ -7,13 +7,12 @@ permissions:
# contents: write # only for delete-branch option
issues: write
pull-requests: write
jobs:
stale:
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # ratchet:actions/stale@v10
- uses: actions/stale@v9
with:
stale-issue-message: 'This issue is stale because it has been open 75 days with no activity. Remove stale label or comment or this will be closed in 15 days.'
stale-pr-message: 'This PR is stale because it has been open 75 days with no activity. Remove stale label or comment or this will be closed in 15 days.'
@@ -21,3 +20,4 @@ jobs:
close-pr-message: 'This PR was closed because it has been stalled for 90 days with no activity.'
days-before-stale: 75
# days-before-close: 90 # uncomment after we test stale behavior

View File

@@ -15,25 +15,19 @@ on:
permissions:
actions: read
contents: read
security-events: write
jobs:
scan-licenses:
# See https://runs-on.com/runners/linux/
runs-on: [runs-on,runner=2cpu-linux-x64,"run-id=${{ github.run_id }}-scan-licenses"]
timeout-minutes: 45
permissions:
actions: read
contents: read
security-events: write
runs-on: [runs-on,runner=2cpu-linux-x64,"run-id=${{ github.run_id }}"]
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # ratchet:actions/setup-python@v6
uses: actions/setup-python@v5
with:
python-version: '3.11'
cache: 'pip'
@@ -41,7 +35,7 @@ jobs:
backend/requirements/default.txt
backend/requirements/dev.txt
backend/requirements/model_server.txt
- name: Get explicit and transitive dependencies
run: |
python -m pip install --upgrade pip
@@ -49,30 +43,28 @@ jobs:
pip install --retries 5 --timeout 30 -r backend/requirements/dev.txt
pip install --retries 5 --timeout 30 -r backend/requirements/model_server.txt
pip freeze > requirements-all.txt
- name: Check python
id: license_check_report
uses: pilosus/action-pip-license-checker@e909b0226ff49d3235c99c4585bc617f49fff16a # ratchet:pilosus/action-pip-license-checker@v3
uses: pilosus/action-pip-license-checker@v2
with:
requirements: 'requirements-all.txt'
fail: 'Copyleft'
exclude: '(?i)^(pylint|aio[-_]*).*'
- name: Print report
if: always()
env:
REPORT: ${{ steps.license_check_report.outputs.report }}
run: echo "$REPORT"
run: echo "${{ steps.license_check_report.outputs.report }}"
- name: Install npm dependencies
working-directory: ./web
run: npm ci
# be careful enabling the sarif and upload as it may spam the security tab
# with a huge amount of items. Work out the issues before enabling upload.
# with a huge amount of items. Work out the issues before enabling upload.
# - name: Run Trivy vulnerability scanner in repo mode
# if: always()
# uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # ratchet:aquasecurity/trivy-action@0.33.1
# uses: aquasecurity/trivy-action@0.29.0
# with:
# scan-type: fs
# scan-ref: .
@@ -81,7 +73,7 @@ jobs:
# severity: HIGH,CRITICAL
# # format: sarif
# # output: trivy-results.sarif
#
#
# # - name: Upload Trivy scan results to GitHub Security tab
# # uses: github/codeql-action/upload-sarif@v3
# # with:
@@ -89,15 +81,14 @@ jobs:
scan-trivy:
# See https://runs-on.com/runners/linux/
runs-on: [runs-on,runner=2cpu-linux-x64,"run-id=${{ github.run_id }}-scan-trivy"]
timeout-minutes: 45
runs-on: [runs-on,runner=2cpu-linux-x64,"run-id=${{ github.run_id }}"]
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
@@ -107,7 +98,7 @@ jobs:
run: docker pull onyxdotapp/onyx-backend:latest
- name: Run Trivy vulnerability scanner on backend
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # ratchet:aquasecurity/trivy-action@0.33.1
uses: aquasecurity/trivy-action@0.29.0
env:
TRIVY_DB_REPOSITORY: 'public.ecr.aws/aquasecurity/trivy-db:2'
TRIVY_JAVA_DB_REPOSITORY: 'public.ecr.aws/aquasecurity/trivy-java-db:1'
@@ -121,9 +112,9 @@ jobs:
# Web server
- name: Pull web server docker image
run: docker pull onyxdotapp/onyx-web-server:latest
- name: Run Trivy vulnerability scanner on web server
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # ratchet:aquasecurity/trivy-action@0.33.1
uses: aquasecurity/trivy-action@0.29.0
env:
TRIVY_DB_REPOSITORY: 'public.ecr.aws/aquasecurity/trivy-db:2'
TRIVY_JAVA_DB_REPOSITORY: 'public.ecr.aws/aquasecurity/trivy-java-db:1'
@@ -139,7 +130,7 @@ jobs:
run: docker pull onyxdotapp/onyx-model-server:latest
- name: Run Trivy vulnerability scanner
uses: aquasecurity/trivy-action@b6643a29fecd7f34b3597bc6acb0a98b03d33ff8 # ratchet:aquasecurity/trivy-action@0.33.1
uses: aquasecurity/trivy-action@0.29.0
env:
TRIVY_DB_REPOSITORY: 'public.ecr.aws/aquasecurity/trivy-db:2'
TRIVY_JAVA_DB_REPOSITORY: 'public.ecr.aws/aquasecurity/trivy-java-db:1'
@@ -148,4 +139,4 @@ jobs:
scanners: license
severity: HIGH,CRITICAL
vuln-type: library
exit-code: 0
exit-code: 0

View File

@@ -0,0 +1,124 @@
name: Backport on Merge
# Note this workflow does not trigger the builds, be sure to manually tag the branches to trigger the builds
on:
pull_request:
types: [closed] # Later we check for merge so only PRs that go in can get backported
permissions:
contents: write
actions: write
jobs:
backport:
if: github.event.pull_request.merged == true
runs-on: ubuntu-latest
env:
GITHUB_TOKEN: ${{ secrets.YUHONG_GH_ACTIONS }}
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
ssh-key: "${{ secrets.RKUO_DEPLOY_KEY }}"
fetch-depth: 0
- name: Set up Git user
run: |
git config user.name "Richard Kuo [bot]"
git config user.email "rkuo[bot]@onyx.app"
git fetch --prune
- name: Check for Backport Checkbox
id: checkbox-check
run: |
PR_BODY="${{ github.event.pull_request.body }}"
if [[ "$PR_BODY" == *"[x] This PR should be backported"* ]]; then
echo "backport=true" >> $GITHUB_OUTPUT
else
echo "backport=false" >> $GITHUB_OUTPUT
fi
- name: List and sort release branches
id: list-branches
run: |
git fetch --all --tags
BRANCHES=$(git for-each-ref --format='%(refname:short)' refs/remotes/origin/release/* | sed 's|origin/release/||' | sort -Vr)
BETA=$(echo "$BRANCHES" | head -n 1)
STABLE=$(echo "$BRANCHES" | head -n 2 | tail -n 1)
echo "beta=release/$BETA" >> $GITHUB_OUTPUT
echo "stable=release/$STABLE" >> $GITHUB_OUTPUT
# Fetch latest tags for beta and stable
LATEST_BETA_TAG=$(git tag -l "v[0-9]*.[0-9]*.[0-9]*-beta.[0-9]*" | grep -E "^v[0-9]+\.[0-9]+\.[0-9]+-beta\.[0-9]+$" | grep -v -- "-cloud" | sort -Vr | head -n 1)
LATEST_STABLE_TAG=$(git tag -l "v[0-9]*.[0-9]*.[0-9]*" | grep -E "^v[0-9]+\.[0-9]+\.[0-9]+$" | sort -Vr | head -n 1)
# Handle case where no beta tags exist
if [[ -z "$LATEST_BETA_TAG" ]]; then
NEW_BETA_TAG="v1.0.0-beta.1"
else
NEW_BETA_TAG=$(echo $LATEST_BETA_TAG | awk -F '[.-]' '{print $1 "." $2 "." $3 "-beta." ($NF+1)}')
fi
# Increment latest stable tag
NEW_STABLE_TAG=$(echo $LATEST_STABLE_TAG | awk -F '.' '{print $1 "." $2 "." ($3+1)}')
echo "latest_beta_tag=$LATEST_BETA_TAG" >> $GITHUB_OUTPUT
echo "latest_stable_tag=$LATEST_STABLE_TAG" >> $GITHUB_OUTPUT
echo "new_beta_tag=$NEW_BETA_TAG" >> $GITHUB_OUTPUT
echo "new_stable_tag=$NEW_STABLE_TAG" >> $GITHUB_OUTPUT
- name: Echo branch and tag information
run: |
echo "Beta branch: ${{ steps.list-branches.outputs.beta }}"
echo "Stable branch: ${{ steps.list-branches.outputs.stable }}"
echo "Latest beta tag: ${{ steps.list-branches.outputs.latest_beta_tag }}"
echo "Latest stable tag: ${{ steps.list-branches.outputs.latest_stable_tag }}"
echo "New beta tag: ${{ steps.list-branches.outputs.new_beta_tag }}"
echo "New stable tag: ${{ steps.list-branches.outputs.new_stable_tag }}"
- name: Trigger Backport
if: steps.checkbox-check.outputs.backport == 'true'
run: |
set -e
echo "Backporting to beta ${{ steps.list-branches.outputs.beta }} and stable ${{ steps.list-branches.outputs.stable }}"
# Echo the merge commit SHA
echo "Merge commit SHA: ${{ github.event.pull_request.merge_commit_sha }}"
# Fetch all history for all branches and tags
git fetch --prune
# Reset and prepare the beta branch
git checkout ${{ steps.list-branches.outputs.beta }}
echo "Last 5 commits on beta branch:"
git log -n 5 --pretty=format:"%H"
echo "" # Newline for formatting
# Cherry-pick the merge commit from the merged PR
git cherry-pick -m 1 ${{ github.event.pull_request.merge_commit_sha }} || {
echo "Cherry-pick to beta failed due to conflicts."
exit 1
}
# Create new beta branch/tag
git tag ${{ steps.list-branches.outputs.new_beta_tag }}
# Push the changes and tag to the beta branch using PAT
git push origin ${{ steps.list-branches.outputs.beta }}
git push origin ${{ steps.list-branches.outputs.new_beta_tag }}
# Reset and prepare the stable branch
git checkout ${{ steps.list-branches.outputs.stable }}
echo "Last 5 commits on stable branch:"
git log -n 5 --pretty=format:"%H"
echo "" # Newline for formatting
# Cherry-pick the merge commit from the merged PR
git cherry-pick -m 1 ${{ github.event.pull_request.merge_commit_sha }} || {
echo "Cherry-pick to stable failed due to conflicts."
exit 1
}
# Create new stable branch/tag
git tag ${{ steps.list-branches.outputs.new_stable_tag }}
# Push the changes and tag to the stable branch using PAT
git push origin ${{ steps.list-branches.outputs.stable }}
git push origin ${{ steps.list-branches.outputs.new_stable_tag }}

View File

@@ -1,61 +1,35 @@
name: External Dependency Unit Tests
concurrency:
group: External-Dependency-Unit-Tests-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }}
cancel-in-progress: true
on:
merge_group:
pull_request:
branches: [main]
push:
tags:
- "v*.*.*"
permissions:
contents: read
env:
# AWS credentials for S3-specific test
S3_AWS_ACCESS_KEY_ID_FOR_TEST: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
S3_AWS_SECRET_ACCESS_KEY_FOR_TEST: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
# AWS
S3_AWS_ACCESS_KEY_ID: ${{ secrets.S3_AWS_ACCESS_KEY_ID }}
S3_AWS_SECRET_ACCESS_KEY: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }}
# MinIO
S3_ENDPOINT_URL: "http://localhost:9004"
S3_AWS_ACCESS_KEY_ID: "minioadmin"
S3_AWS_SECRET_ACCESS_KEY: "minioadmin"
# Confluence
CONFLUENCE_TEST_SPACE_URL: ${{ vars.CONFLUENCE_TEST_SPACE_URL }}
CONFLUENCE_TEST_SPACE: ${{ vars.CONFLUENCE_TEST_SPACE }}
CONFLUENCE_TEST_SPACE_URL: ${{ secrets.CONFLUENCE_TEST_SPACE_URL }}
CONFLUENCE_TEST_SPACE: ${{ secrets.CONFLUENCE_TEST_SPACE }}
CONFLUENCE_TEST_PAGE_ID: ${{ secrets.CONFLUENCE_TEST_PAGE_ID }}
CONFLUENCE_USER_NAME: ${{ vars.CONFLUENCE_USER_NAME }}
CONFLUENCE_IS_CLOUD: ${{ secrets.CONFLUENCE_IS_CLOUD }}
CONFLUENCE_USER_NAME: ${{ secrets.CONFLUENCE_USER_NAME }}
CONFLUENCE_ACCESS_TOKEN: ${{ secrets.CONFLUENCE_ACCESS_TOKEN }}
CONFLUENCE_ACCESS_TOKEN_SCOPED: ${{ secrets.CONFLUENCE_ACCESS_TOKEN_SCOPED }}
# Jira
JIRA_ADMIN_API_TOKEN: ${{ secrets.JIRA_ADMIN_API_TOKEN }}
# LLMs
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
# Code Interpreter
# TODO: debug why this is failing and enable
CODE_INTERPRETER_BASE_URL: http://localhost:8000
jobs:
discover-test-dirs:
# NOTE: Github-hosted runners have about 20s faster queue times and are preferred here.
runs-on: ubuntu-slim
timeout-minutes: 45
runs-on: ubuntu-latest
outputs:
test-dirs: ${{ steps.set-matrix.outputs.test-dirs }}
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Discover test directories
id: set-matrix
run: |
@@ -65,13 +39,9 @@ jobs:
external-dependency-unit-tests:
needs: discover-test-dirs
# Use larger runner with more resources for Vespa
runs-on:
- runs-on
- runner=2cpu-linux-arm64
- ${{ format('run-id={0}-external-dependency-unit-tests-job-{1}', github.run_id, strategy['job-index']) }}
- extras=s3-cache
timeout-minutes: 45
# See https://runs-on.com/runners/linux/
runs-on: [runs-on, runner=8cpu-linux-x64, "run-id=${{ github.run_id }}"]
strategy:
fail-fast: false
matrix:
@@ -79,99 +49,46 @@ jobs:
env:
PYTHONPATH: ./backend
MODEL_SERVER_HOST: "disabled"
DISABLE_TELEMETRY: "true"
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Setup Python and Install Dependencies
uses: ./.github/actions/setup-python-and-install-dependencies
- name: Set up Python
uses: actions/setup-python@v5
with:
requirements: |
python-version: "3.11"
cache: "pip"
cache-dependency-path: |
backend/requirements/default.txt
backend/requirements/dev.txt
backend/requirements/ee.txt
- name: Setup Playwright
uses: ./.github/actions/setup-playwright
# needed for pulling Vespa, Redis, Postgres, and Minio images
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Create .env file for Docker Compose
- name: Install Dependencies
run: |
cat <<EOF > deployment/docker_compose/.env
CODE_INTERPRETER_BETA_ENABLED=true
DISABLE_TELEMETRY=true
EOF
python -m pip install --upgrade pip
pip install --retries 5 --timeout 30 -r backend/requirements/default.txt
pip install --retries 5 --timeout 30 -r backend/requirements/dev.txt
playwright install chromium
playwright install-deps chromium
- name: Set up Standard Dependencies
run: |
cd deployment/docker_compose
docker compose \
-f docker-compose.yml \
-f docker-compose.dev.yml \
up -d \
minio \
relational_db \
cache \
index \
code-interpreter
docker compose -f docker-compose.dev.yml -p onyx-stack up -d minio relational_db cache index
- name: Run migrations
run: |
cd backend
# Run migrations to head
alembic upgrade head
alembic heads --verbose
- name: Run Tests for ${{ matrix.test-dir }}
shell: script -q -e -c "bash --noprofile --norc -eo pipefail {0}"
env:
TEST_DIR: ${{ matrix.test-dir }}
run: |
py.test \
-n 8 \
--dist loadfile \
--durations=8 \
-o junit_family=xunit2 \
-xv \
--ff \
backend/tests/external_dependency_unit/${TEST_DIR}
- name: Collect Docker logs on failure
if: failure()
run: |
mkdir -p docker-logs
cd deployment/docker_compose
# Get list of running containers
containers=$(docker compose -f docker-compose.yml -f docker-compose.dev.yml ps -q)
# Collect logs from each container
for container in $containers; do
container_name=$(docker inspect --format='{{.Name}}' $container | sed 's/^\///')
echo "Collecting logs from $container_name..."
docker logs $container > ../../docker-logs/${container_name}.log 2>&1
done
cd ../..
echo "Docker logs collected in docker-logs directory"
- name: Upload Docker logs
if: failure()
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # ratchet:actions/upload-artifact@v5
with:
name: docker-logs-${{ matrix.test-dir }}
path: docker-logs/
retention-days: 7
backend/tests/external_dependency_unit/${{ matrix.test-dir }}

View File

@@ -1,50 +1,37 @@
name: Helm - Lint and Test Charts
concurrency:
group: Helm-Lint-and-Test-Charts-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }}
cancel-in-progress: true
on:
merge_group:
pull_request:
branches: [ main ]
push:
tags:
- "v*.*.*"
workflow_dispatch: # Allows manual triggering
permissions:
contents: read
jobs:
helm-chart-check:
# See https://runs-on.com/runners/linux/
runs-on: [runs-on,runner=8cpu-linux-x64,hdd=256,"run-id=${{ github.run_id }}-helm-chart-check"]
timeout-minutes: 45
runs-on: [runs-on,runner=8cpu-linux-x64,hdd=256,"run-id=${{ github.run_id }}"]
# fetch-depth 0 is required for helm/chart-testing-action
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
uses: actions/checkout@v4
with:
fetch-depth: 0
persist-credentials: false
- name: Set up Helm
uses: azure/setup-helm@1a275c3b69536ee54be43f2070a358922e12c8d4 # ratchet:azure/setup-helm@v4.3.1
uses: azure/setup-helm@v4.2.0
with:
version: v3.19.0
version: v3.17.0
- name: Set up chart-testing
uses: helm/chart-testing-action@6ec842c01de15ebb84c8627d2744a0c2f2755c9f # ratchet:helm/chart-testing-action@v2.8.0
uses: helm/chart-testing-action@v2.7.0
# even though we specify chart-dirs in ct.yaml, it isn't used by ct for the list-changed command...
- name: Run chart-testing (list-changed)
id: list-changed
env:
DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
run: |
echo "default_branch: ${DEFAULT_BRANCH}"
changed=$(ct list-changed --remote origin --target-branch ${DEFAULT_BRANCH} --chart-dirs deployment/helm/charts)
echo "default_branch: ${{ github.event.repository.default_branch }}"
changed=$(ct list-changed --remote origin --target-branch ${{ github.event.repository.default_branch }} --chart-dirs deployment/helm/charts)
echo "list-changed output: $changed"
if [[ -n "$changed" ]]; then
echo "changed=true" >> "$GITHUB_OUTPUT"
@@ -54,7 +41,7 @@ jobs:
# - name: Force run chart-testing (list-changed)
# id: list-changed
# run: echo "changed=true" >> $GITHUB_OUTPUT
# lint all charts if any changes were detected
- name: Run chart-testing (lint)
if: steps.list-changed.outputs.changed == 'true'
@@ -64,179 +51,11 @@ jobs:
- name: Create kind cluster
if: steps.list-changed.outputs.changed == 'true'
uses: helm/kind-action@92086f6be054225fa813e0a4b13787fc9088faab # ratchet:helm/kind-action@v1.13.0
uses: helm/kind-action@v1.12.0
- name: Pre-install cluster status check
- name: Run chart-testing (install)
if: steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Pre-install Cluster Status ==="
kubectl get nodes -o wide
kubectl get pods --all-namespaces
kubectl get storageclass
- name: Add Helm repositories and update
if: steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Adding Helm repositories ==="
helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx
helm repo add vespa https://onyx-dot-app.github.io/vespa-helm-charts
helm repo add cloudnative-pg https://cloudnative-pg.github.io/charts
helm repo add ot-container-kit https://ot-container-kit.github.io/helm-charts
helm repo add minio https://charts.min.io/
helm repo add code-interpreter https://onyx-dot-app.github.io/code-interpreter/
helm repo update
- name: Install Redis operator
if: steps.list-changed.outputs.changed == 'true'
shell: bash
run: |
echo "=== Installing redis-operator CRDs ==="
helm upgrade --install redis-operator ot-container-kit/redis-operator \
--namespace redis-operator --create-namespace --wait --timeout 300s
- name: Pre-pull required images
if: steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Pre-pulling required images to avoid timeout ==="
KIND_CLUSTER=$(kubectl config current-context | sed 's/kind-//')
echo "Kind cluster: $KIND_CLUSTER"
IMAGES=(
"ghcr.io/cloudnative-pg/cloudnative-pg:1.27.0"
"quay.io/opstree/redis:v7.0.15"
"docker.io/onyxdotapp/onyx-web-server:latest"
)
for image in "${IMAGES[@]}"; do
echo "Pre-pulling $image"
if docker pull "$image"; then
kind load docker-image "$image" --name "$KIND_CLUSTER" || echo "Failed to load $image into kind"
else
echo "Failed to pull $image"
fi
done
echo "=== Images loaded into Kind cluster ==="
docker exec "$KIND_CLUSTER"-control-plane crictl images | grep -E "(cloudnative-pg|redis|onyx)" || echo "Some images may still be loading..."
- name: Validate chart dependencies
if: steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Validating chart dependencies ==="
cd deployment/helm/charts/onyx
helm dependency update
helm lint .
- name: Run chart-testing (install) with enhanced monitoring
timeout-minutes: 25
if: steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Starting chart installation with monitoring ==="
# Function to monitor cluster state
monitor_cluster() {
while true; do
echo "=== Cluster Status Check at $(date) ==="
# Only show non-running pods to reduce noise
NON_RUNNING_PODS=$(kubectl get pods --all-namespaces --field-selector=status.phase!=Running,status.phase!=Succeeded --no-headers 2>/dev/null | wc -l)
if [ "$NON_RUNNING_PODS" -gt 0 ]; then
echo "Non-running pods:"
kubectl get pods --all-namespaces --field-selector=status.phase!=Running,status.phase!=Succeeded
else
echo "All pods running successfully"
fi
# Only show recent events if there are issues
RECENT_EVENTS=$(kubectl get events --sort-by=.lastTimestamp --all-namespaces --field-selector=type!=Normal 2>/dev/null | tail -5)
if [ -n "$RECENT_EVENTS" ]; then
echo "Recent warnings/errors:"
echo "$RECENT_EVENTS"
fi
sleep 60
done
}
# Start monitoring in background
monitor_cluster &
MONITOR_PID=$!
# Set up cleanup
cleanup() {
echo "=== Cleaning up monitoring process ==="
kill $MONITOR_PID 2>/dev/null || true
echo "=== Final cluster state ==="
kubectl get pods --all-namespaces
kubectl get events --all-namespaces --sort-by=.lastTimestamp | tail -20
}
# Trap cleanup on exit
trap cleanup EXIT
# Run the actual installation with detailed logging
echo "=== Starting ct install ==="
set +e
ct install --all \
--helm-extra-set-args="\
--set=nginx.enabled=false \
--set=minio.enabled=false \
--set=vespa.enabled=false \
--set=slackbot.enabled=false \
--set=postgresql.enabled=true \
--set=postgresql.nameOverride=cloudnative-pg \
--set=postgresql.cluster.storage.storageClass=standard \
--set=redis.enabled=true \
--set=redis.storageSpec.volumeClaimTemplate.spec.storageClassName=standard \
--set=webserver.replicaCount=1 \
--set=api.replicaCount=0 \
--set=inferenceCapability.replicaCount=0 \
--set=indexCapability.replicaCount=0 \
--set=celery_beat.replicaCount=0 \
--set=celery_worker_heavy.replicaCount=0 \
--set=celery_worker_docfetching.replicaCount=0 \
--set=celery_worker_docprocessing.replicaCount=0 \
--set=celery_worker_light.replicaCount=0 \
--set=celery_worker_monitoring.replicaCount=0 \
--set=celery_worker_primary.replicaCount=0 \
--set=celery_worker_user_file_processing.replicaCount=0 \
--set=celery_worker_user_files_indexing.replicaCount=0" \
--helm-extra-args="--timeout 900s --debug" \
--debug --config ct.yaml
CT_EXIT=$?
set -e
if [[ $CT_EXIT -ne 0 ]]; then
echo "ct install failed with exit code $CT_EXIT"
exit $CT_EXIT
else
echo "=== Installation completed successfully ==="
fi
kubectl get pods --all-namespaces
- name: Post-install verification
if: steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Post-install verification ==="
kubectl get pods --all-namespaces
kubectl get services --all-namespaces
# Only show issues if they exist
kubectl describe pods --all-namespaces | grep -A 5 -B 2 "Failed\|Error\|Warning" || echo "No pod issues found"
- name: Cleanup on failure
if: failure() && steps.list-changed.outputs.changed == 'true'
run: |
echo "=== Cleanup on failure ==="
echo "=== Final cluster state ==="
kubectl get pods --all-namespaces
kubectl get events --all-namespaces --sort-by=.lastTimestamp | tail -10
echo "=== Pod descriptions for debugging ==="
kubectl describe pods --all-namespaces | grep -A 10 -B 3 "Failed\|Error\|Warning\|Pending" || echo "No problematic pods found"
echo "=== Recent logs for debugging ==="
kubectl logs --all-namespaces --tail=50 | grep -i "error\|timeout\|failed\|pull" || echo "No error logs found"
echo "=== Helm releases ==="
helm list --all-namespaces
# the following would install only changed charts, but we only have one chart so
run: ct install --all --helm-extra-set-args="--set=nginx.enabled=false" --debug --config ct.yaml
# the following would install only changed charts, but we only have one chart so
# don't worry about that for now
# run: ct install --target-branch ${{ github.event.repository.default_branch }}

View File

@@ -9,452 +9,130 @@ on:
branches:
- main
- "release/**"
push:
tags:
- "v*.*.*"
permissions:
contents: read
env:
# Test Environment Variables
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
CONFLUENCE_TEST_SPACE_URL: ${{ vars.CONFLUENCE_TEST_SPACE_URL }}
CONFLUENCE_USER_NAME: ${{ vars.CONFLUENCE_USER_NAME }}
CONFLUENCE_TEST_SPACE_URL: ${{ secrets.CONFLUENCE_TEST_SPACE_URL }}
CONFLUENCE_USER_NAME: ${{ secrets.CONFLUENCE_USER_NAME }}
CONFLUENCE_ACCESS_TOKEN: ${{ secrets.CONFLUENCE_ACCESS_TOKEN }}
CONFLUENCE_ACCESS_TOKEN_SCOPED: ${{ secrets.CONFLUENCE_ACCESS_TOKEN_SCOPED }}
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}
JIRA_API_TOKEN_SCOPED: ${{ secrets.JIRA_API_TOKEN_SCOPED }}
PERM_SYNC_SHAREPOINT_CLIENT_ID: ${{ secrets.PERM_SYNC_SHAREPOINT_CLIENT_ID }}
PERM_SYNC_SHAREPOINT_PRIVATE_KEY: ${{ secrets.PERM_SYNC_SHAREPOINT_PRIVATE_KEY }}
PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD: ${{ secrets.PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD }}
PERM_SYNC_SHAREPOINT_DIRECTORY_ID: ${{ secrets.PERM_SYNC_SHAREPOINT_DIRECTORY_ID }}
EXA_API_KEY: ${{ secrets.EXA_API_KEY }}
PLATFORM_PAIR: linux-amd64
jobs:
discover-test-dirs:
# NOTE: Github-hosted runners have about 20s faster queue times and are preferred here.
runs-on: ubuntu-slim
timeout-minutes: 45
outputs:
test-dirs: ${{ steps.set-matrix.outputs.test-dirs }}
integration-tests:
# See https://runs-on.com/runners/linux/
runs-on:
[
runs-on,
runner=32cpu-linux-x64,
disk=large,
"run-id=${{ github.run_id }}",
]
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
persist-credentials: false
python-version: "3.11"
cache: "pip"
cache-dependency-path: |
backend/requirements/default.txt
backend/requirements/dev.txt
backend/requirements/ee.txt
- run: |
python -m pip install --upgrade pip
pip install --retries 5 --timeout 30 -r backend/requirements/default.txt
pip install --retries 5 --timeout 30 -r backend/requirements/dev.txt
pip install --retries 5 --timeout 30 -r backend/requirements/ee.txt
- name: Discover test directories
id: set-matrix
run: |
# Find all leaf-level directories in both test directories
tests_dirs=$(find backend/tests/integration/tests -mindepth 1 -maxdepth 1 -type d ! -name "__pycache__" -exec basename {} \; | sort)
connector_dirs=$(find backend/tests/integration/connector_job_tests -mindepth 1 -maxdepth 1 -type d ! -name "__pycache__" -exec basename {} \; | sort)
# Create JSON array with directory info
all_dirs=""
for dir in $tests_dirs; do
all_dirs="$all_dirs{\"path\":\"tests/$dir\",\"name\":\"tests-$dir\"},"
done
for dir in $connector_dirs; do
all_dirs="$all_dirs{\"path\":\"connector_job_tests/$dir\",\"name\":\"connector-$dir\"},"
done
# Remove trailing comma and wrap in array
all_dirs="[${all_dirs%,}]"
echo "test-dirs=$all_dirs" >> $GITHUB_OUTPUT
build-backend-image:
runs-on: [runs-on, runner=1cpu-linux-arm64, "run-id=${{ github.run_id }}-build-backend-image", "extras=ecr-cache"]
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Format branch name for cache
id: format-branch
- name: Generate OpenAPI schema
working-directory: ./backend
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
REF_NAME: ${{ github.ref_name }}
PYTHONPATH: "."
run: |
if [ -n "${PR_NUMBER}" ]; then
CACHE_SUFFIX="${PR_NUMBER}"
else
# shellcheck disable=SC2001
CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g')
fi
echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT
python scripts/onyx_openapi_schema.py --filename generated/openapi.json
- name: Generate OpenAPI Python client
working-directory: ./backend
run: |
docker run --rm \
-v "${{ github.workspace }}/backend/generated:/local" \
openapitools/openapi-generator-cli generate \
-i /local/openapi.json \
-g python \
-o /local/onyx_openapi_client \
--package-name onyx_openapi_client
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@v3
# needed for pulling Vespa, Redis, Postgres, and Minio images
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push Backend Docker image
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # ratchet:docker/build-push-action@v6
# tag every docker image with "test" so that we can spin up the correct set
# of images during testing
# We don't need to build the Web Docker image since it's not yet used
# in the integration tests. We have a separate action to verify that it builds
# successfully.
- name: Pull Web Docker image
run: |
docker pull onyxdotapp/onyx-web-server:latest
docker tag onyxdotapp/onyx-web-server:latest onyxdotapp/onyx-web-server:test
# we use the runs-on cache for docker builds
# in conjunction with runs-on runners, it has better speed and unlimited caching
# https://runs-on.com/caching/s3-cache-for-github-actions/
# https://runs-on.com/caching/docker/
# https://github.com/moby/buildkit#s3-cache-experimental
# images are built and run locally for testing purposes. Not pushed.
- name: Build Backend Docker image
uses: ./.github/actions/custom-build-and-push
with:
context: ./backend
file: ./backend/Dockerfile
push: true
tags: ${{ env.RUNS_ON_ECR_CACHE }}:integration-test-backend-test-${{ github.run_id }}
cache-from: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ github.event.pull_request.head.sha || github.sha }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ steps.format-branch.outputs.cache-suffix }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache
type=registry,ref=onyxdotapp/onyx-backend:latest
cache-to: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ github.event.pull_request.head.sha || github.sha }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ steps.format-branch.outputs.cache-suffix }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache,mode=max
no-cache: ${{ vars.DOCKER_NO_CACHE == 'true' }}
platforms: linux/amd64
tags: onyxdotapp/onyx-backend:test
push: false
load: true
cache-from: type=s3,prefix=cache/${{ github.repository }}/integration-tests/backend-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/integration-tests/backend-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
build-model-server-image:
runs-on: [runs-on, runner=1cpu-linux-arm64, "run-id=${{ github.run_id }}-build-model-server-image", "extras=ecr-cache"]
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Format branch name for cache
id: format-branch
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
REF_NAME: ${{ github.ref_name }}
run: |
if [ -n "${PR_NUMBER}" ]; then
CACHE_SUFFIX="${PR_NUMBER}"
else
# shellcheck disable=SC2001
CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g')
fi
echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
# needed for pulling Vespa, Redis, Postgres, and Minio images
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push Model Server Docker image
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # ratchet:docker/build-push-action@v6
- name: Build Model Server Docker image
uses: ./.github/actions/custom-build-and-push
with:
context: ./backend
file: ./backend/Dockerfile.model_server
push: true
tags: ${{ env.RUNS_ON_ECR_CACHE }}:integration-test-model-server-test-${{ github.run_id }}
cache-from: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ github.event.pull_request.head.sha || github.sha }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ steps.format-branch.outputs.cache-suffix }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache
type=registry,ref=onyxdotapp/onyx-model-server:latest
cache-to: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ github.event.pull_request.head.sha || github.sha }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ steps.format-branch.outputs.cache-suffix }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache,mode=max
platforms: linux/amd64
tags: onyxdotapp/onyx-model-server:test
push: false
load: true
cache-from: type=s3,prefix=cache/${{ github.repository }}/integration-tests/model-server-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/integration-tests/model-server-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
build-integration-image:
runs-on: [runs-on, runner=2cpu-linux-arm64, "run-id=${{ github.run_id }}-build-integration-image", "extras=ecr-cache"]
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
- name: Build integration test Docker image
uses: ./.github/actions/custom-build-and-push
with:
persist-credentials: false
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
# needed for pulling openapitools/openapi-generator-cli
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Format branch name for cache
id: format-branch
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
REF_NAME: ${{ github.ref_name }}
run: |
if [ -n "${PR_NUMBER}" ]; then
CACHE_SUFFIX="${PR_NUMBER}"
else
# shellcheck disable=SC2001
CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g')
fi
echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT
- name: Build and push integration test image with Docker Bake
env:
INTEGRATION_REPOSITORY: ${{ env.RUNS_ON_ECR_CACHE }}
TAG: integration-test-${{ github.run_id }}
CACHE_SUFFIX: ${{ steps.format-branch.outputs.cache-suffix }}
HEAD_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
run: |
cd backend && docker buildx bake --push \
--set backend.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${HEAD_SHA} \
--set backend.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${CACHE_SUFFIX} \
--set backend.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache \
--set backend.cache-from=type=registry,ref=onyxdotapp/onyx-backend:latest \
--set backend.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${HEAD_SHA},mode=max \
--set backend.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${CACHE_SUFFIX},mode=max \
--set backend.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache,mode=max \
--set integration.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${HEAD_SHA} \
--set integration.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${CACHE_SUFFIX} \
--set integration.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache \
--set integration.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${HEAD_SHA},mode=max \
--set integration.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${CACHE_SUFFIX},mode=max \
--set integration.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache,mode=max \
integration
integration-tests:
needs:
[
discover-test-dirs,
build-backend-image,
build-model-server-image,
build-integration-image,
]
runs-on:
- runs-on
- runner=4cpu-linux-arm64
- ${{ format('run-id={0}-integration-tests-job-{1}', github.run_id, strategy['job-index']) }}
- extras=ecr-cache
timeout-minutes: 45
strategy:
fail-fast: false
matrix:
test-dir: ${{ fromJson(needs.discover-test-dirs.outputs.test-dirs) }}
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
# needed for pulling Vespa, Redis, Postgres, and Minio images
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
# NOTE: Use pre-ping/null pool to reduce flakiness due to dropped connections
# NOTE: don't need web server for integration tests
- name: Create .env file for Docker Compose
env:
ECR_CACHE: ${{ env.RUNS_ON_ECR_CACHE }}
RUN_ID: ${{ github.run_id }}
run: |
cat <<EOF > deployment/docker_compose/.env
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true
AUTH_TYPE=basic
POSTGRES_POOL_PRE_PING=true
POSTGRES_USE_NULL_POOL=true
REQUIRE_EMAIL_VERIFICATION=false
DISABLE_TELEMETRY=true
ONYX_BACKEND_IMAGE=${ECR_CACHE}:integration-test-backend-test-${RUN_ID}
ONYX_MODEL_SERVER_IMAGE=${ECR_CACHE}:integration-test-model-server-test-${RUN_ID}
INTEGRATION_TESTS_MODE=true
CHECK_TTL_MANAGEMENT_TASK_FREQUENCY_IN_HOURS=0.001
MCP_SERVER_ENABLED=true
EOF
- name: Start Docker containers
run: |
cd deployment/docker_compose
docker compose -f docker-compose.yml -f docker-compose.dev.yml up \
relational_db \
index \
cache \
minio \
api_server \
inference_model_server \
indexing_model_server \
mcp_server \
background \
-d
id: start_docker
- name: Wait for services to be ready
run: |
echo "Starting wait-for-service script..."
wait_for_service() {
local url=$1
local label=$2
local timeout=${3:-300} # default 5 minutes
local start_time
start_time=$(date +%s)
while true; do
local current_time
current_time=$(date +%s)
local elapsed_time=$((current_time - start_time))
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. ${label} did not become ready in $timeout seconds."
exit 1
fi
local response
response=$(curl -s -o /dev/null -w "%{http_code}" "$url" || echo "curl_error")
if [ "$response" = "200" ]; then
echo "${label} is ready!"
break
elif [ "$response" = "curl_error" ]; then
echo "Curl encountered an error while checking ${label}. Retrying in 5 seconds..."
else
echo "${label} not ready yet (HTTP status $response). Retrying in 5 seconds..."
fi
sleep 5
done
}
wait_for_service "http://localhost:8080/health" "API server"
test_dir="${{ matrix.test-dir.path }}"
if [ "$test_dir" = "tests/mcp" ]; then
wait_for_service "http://localhost:8090/health" "MCP server"
else
echo "Skipping MCP server wait for non-MCP suite: $test_dir"
fi
echo "Finished waiting for services."
- name: Start Mock Services
run: |
cd backend/tests/integration/mock_services
docker compose -f docker-compose.mock-it-services.yml \
-p mock-it-services-stack up -d
- name: Run Integration Tests for ${{ matrix.test-dir.name }}
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # ratchet:nick-fields/retry@v3
with:
timeout_minutes: 20
max_attempts: 3
retry_wait_seconds: 10
command: |
echo "Running integration tests for ${{ matrix.test-dir.path }}..."
docker run --rm --network onyx_default \
--name test-runner \
-e POSTGRES_HOST=relational_db \
-e POSTGRES_USER=postgres \
-e POSTGRES_PASSWORD=password \
-e POSTGRES_DB=postgres \
-e DB_READONLY_USER=db_readonly_user \
-e DB_READONLY_PASSWORD=password \
-e POSTGRES_POOL_PRE_PING=true \
-e POSTGRES_USE_NULL_POOL=true \
-e VESPA_HOST=index \
-e REDIS_HOST=cache \
-e API_SERVER_HOST=api_server \
-e MCP_SERVER_HOST=mcp_server \
-e MCP_SERVER_PORT=8090 \
-e OPENAI_API_KEY=${OPENAI_API_KEY} \
-e EXA_API_KEY=${EXA_API_KEY} \
-e SLACK_BOT_TOKEN=${SLACK_BOT_TOKEN} \
-e CONFLUENCE_TEST_SPACE_URL=${CONFLUENCE_TEST_SPACE_URL} \
-e CONFLUENCE_USER_NAME=${CONFLUENCE_USER_NAME} \
-e CONFLUENCE_ACCESS_TOKEN=${CONFLUENCE_ACCESS_TOKEN} \
-e CONFLUENCE_ACCESS_TOKEN_SCOPED=${CONFLUENCE_ACCESS_TOKEN_SCOPED} \
-e JIRA_BASE_URL=${JIRA_BASE_URL} \
-e JIRA_USER_EMAIL=${JIRA_USER_EMAIL} \
-e JIRA_API_TOKEN=${JIRA_API_TOKEN} \
-e JIRA_API_TOKEN_SCOPED=${JIRA_API_TOKEN_SCOPED} \
-e PERM_SYNC_SHAREPOINT_CLIENT_ID=${PERM_SYNC_SHAREPOINT_CLIENT_ID} \
-e PERM_SYNC_SHAREPOINT_PRIVATE_KEY="${PERM_SYNC_SHAREPOINT_PRIVATE_KEY}" \
-e PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD=${PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD} \
-e PERM_SYNC_SHAREPOINT_DIRECTORY_ID=${PERM_SYNC_SHAREPOINT_DIRECTORY_ID} \
-e TEST_WEB_HOSTNAME=test-runner \
-e MOCK_CONNECTOR_SERVER_HOST=mock_connector_server \
-e MOCK_CONNECTOR_SERVER_PORT=8001 \
${{ env.RUNS_ON_ECR_CACHE }}:integration-test-${{ github.run_id }} \
/app/tests/integration/${{ matrix.test-dir.path }}
# ------------------------------------------------------------
# Always gather logs BEFORE "down":
- name: Dump API server logs
if: always()
run: |
cd deployment/docker_compose
docker compose logs --no-color api_server > $GITHUB_WORKSPACE/api_server.log || true
- name: Dump all-container logs (optional)
if: always()
run: |
cd deployment/docker_compose
docker compose logs --no-color > $GITHUB_WORKSPACE/docker-compose.log || true
- name: Upload logs
if: always()
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # ratchet:actions/upload-artifact@v4
with:
name: docker-all-logs-${{ matrix.test-dir.name }}
path: ${{ github.workspace }}/docker-compose.log
# ------------------------------------------------------------
multitenant-tests:
needs:
[
build-backend-image,
build-model-server-image,
build-integration-image,
]
runs-on: [runs-on, runner=8cpu-linux-arm64, "run-id=${{ github.run_id }}-multitenant-tests", "extras=ecr-cache"]
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
context: ./backend
file: ./backend/tests/integration/Dockerfile
platforms: linux/amd64
tags: onyxdotapp/onyx-integration:test
push: false
load: true
cache-from: type=s3,prefix=cache/${{ github.repository }}/integration-tests/integration-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/integration-tests/integration-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
# Start containers for multi-tenant tests
- name: Start Docker containers for multi-tenant tests
env:
ECR_CACHE: ${{ env.RUNS_ON_ECR_CACHE }}
RUN_ID: ${{ github.run_id }}
run: |
cd deployment/docker_compose
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true \
@@ -462,56 +140,18 @@ jobs:
AUTH_TYPE=cloud \
REQUIRE_EMAIL_VERIFICATION=false \
DISABLE_TELEMETRY=true \
ONYX_BACKEND_IMAGE=${ECR_CACHE}:integration-test-backend-test-${RUN_ID} \
ONYX_MODEL_SERVER_IMAGE=${ECR_CACHE}:integration-test-model-server-test-${RUN_ID} \
IMAGE_TAG=test \
DEV_MODE=true \
MCP_SERVER_ENABLED=true \
docker compose -f docker-compose.multitenant-dev.yml up \
relational_db \
index \
cache \
minio \
api_server \
inference_model_server \
indexing_model_server \
mcp_server \
background \
-d
docker compose -f docker-compose.multitenant-dev.yml -p onyx-stack up -d
id: start_docker_multi_tenant
- name: Wait for service to be ready (multi-tenant)
run: |
echo "Starting wait-for-service script for multi-tenant..."
docker logs -f onyx-api_server-1 &
start_time=$(date +%s)
timeout=300
while true; do
current_time=$(date +%s)
elapsed_time=$((current_time - start_time))
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. Service did not become ready in 5 minutes."
exit 1
fi
response=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:8080/health || echo "curl_error")
if [ "$response" = "200" ]; then
echo "Service is ready!"
break
elif [ "$response" = "curl_error" ]; then
echo "Curl encountered an error; retrying..."
else
echo "Service not ready yet (HTTP $response). Retrying in 5 seconds..."
fi
sleep 5
done
echo "Finished waiting for service."
# In practice, `cloud` Auth type would require OAUTH credentials to be set.
- name: Run Multi-Tenant Integration Tests
env:
ECR_CACHE: ${{ env.RUNS_ON_ECR_CACHE }}
RUN_ID: ${{ github.run_id }}
run: |
echo "Running multi-tenant integration tests..."
docker run --rm --network onyx_default \
echo "Waiting for 3 minutes to ensure API server is ready..."
sleep 180
echo "Running integration tests..."
docker run --rm --network onyx-stack_default \
--name test-runner \
-e POSTGRES_HOST=relational_db \
-e POSTGRES_USER=postgres \
@@ -523,53 +163,157 @@ jobs:
-e VESPA_HOST=index \
-e REDIS_HOST=cache \
-e API_SERVER_HOST=api_server \
-e MCP_SERVER_HOST=mcp_server \
-e MCP_SERVER_PORT=8090 \
-e OPENAI_API_KEY=${OPENAI_API_KEY} \
-e EXA_API_KEY=${EXA_API_KEY} \
-e SLACK_BOT_TOKEN=${SLACK_BOT_TOKEN} \
-e TEST_WEB_HOSTNAME=test-runner \
-e AUTH_TYPE=cloud \
-e MULTI_TENANT=true \
-e SKIP_RESET=true \
-e REQUIRE_EMAIL_VERIFICATION=false \
-e DISABLE_TELEMETRY=true \
-e IMAGE_TAG=test \
-e DEV_MODE=true \
${ECR_CACHE}:integration-test-${RUN_ID} \
onyxdotapp/onyx-integration:test \
/app/tests/integration/multitenant_tests
continue-on-error: true
id: run_multitenant_tests
- name: Dump API server logs (multi-tenant)
if: always()
- name: Check multi-tenant test results
run: |
cd deployment/docker_compose
docker compose -f docker-compose.multitenant-dev.yml logs --no-color api_server > $GITHUB_WORKSPACE/api_server_multitenant.log || true
- name: Dump all-container logs (multi-tenant)
if: always()
run: |
cd deployment/docker_compose
docker compose -f docker-compose.multitenant-dev.yml logs --no-color > $GITHUB_WORKSPACE/docker-compose-multitenant.log || true
- name: Upload logs (multi-tenant)
if: always()
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # ratchet:actions/upload-artifact@v4
with:
name: docker-all-logs-multitenant
path: ${{ github.workspace }}/docker-compose-multitenant.log
if [ ${{ steps.run_multitenant_tests.outcome }} == 'failure' ]; then
echo "Multi-tenant integration tests failed. Exiting with error."
exit 1
else
echo "All multi-tenant integration tests passed successfully."
fi
- name: Stop multi-tenant Docker containers
run: |
cd deployment/docker_compose
docker compose -f docker-compose.multitenant-dev.yml -p onyx-stack down -v
# NOTE: Use pre-ping/null pool to reduce flakiness due to dropped connections
- name: Start Docker containers
run: |
cd deployment/docker_compose
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true \
AUTH_TYPE=basic \
POSTGRES_POOL_PRE_PING=true \
POSTGRES_USE_NULL_POOL=true \
REQUIRE_EMAIL_VERIFICATION=false \
DISABLE_TELEMETRY=true \
IMAGE_TAG=test \
INTEGRATION_TESTS_MODE=true \
CHECK_TTL_MANAGEMENT_TASK_FREQUENCY_IN_HOURS=0.001 \
docker compose -f docker-compose.dev.yml -p onyx-stack up -d
id: start_docker
- name: Wait for service to be ready
run: |
echo "Starting wait-for-service script..."
docker logs -f onyx-stack-api_server-1 &
start_time=$(date +%s)
timeout=300 # 5 minutes in seconds
while true; do
current_time=$(date +%s)
elapsed_time=$((current_time - start_time))
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. Service did not become ready in 5 minutes."
exit 1
fi
# Use curl with error handling to ignore specific exit code 56
response=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:8080/health || echo "curl_error")
if [ "$response" = "200" ]; then
echo "Service is ready!"
break
elif [ "$response" = "curl_error" ]; then
echo "Curl encountered an error, possibly exit code 56. Continuing to retry..."
else
echo "Service not ready yet (HTTP status $response). Retrying in 5 seconds..."
fi
sleep 5
done
echo "Finished waiting for service."
- name: Start Mock Services
run: |
cd backend/tests/integration/mock_services
docker compose -f docker-compose.mock-it-services.yml \
-p mock-it-services-stack up -d
# NOTE: Use pre-ping/null to reduce flakiness due to dropped connections
- name: Run Standard Integration Tests
run: |
echo "Running integration tests..."
docker run --rm --network onyx-stack_default \
--name test-runner \
-e POSTGRES_HOST=relational_db \
-e POSTGRES_USER=postgres \
-e POSTGRES_PASSWORD=password \
-e DB_READONLY_USER=db_readonly_user \
-e DB_READONLY_PASSWORD=password \
-e POSTGRES_DB=postgres \
-e POSTGRES_POOL_PRE_PING=true \
-e POSTGRES_USE_NULL_POOL=true \
-e VESPA_HOST=index \
-e REDIS_HOST=cache \
-e API_SERVER_HOST=api_server \
-e OPENAI_API_KEY=${OPENAI_API_KEY} \
-e SLACK_BOT_TOKEN=${SLACK_BOT_TOKEN} \
-e CONFLUENCE_TEST_SPACE_URL=${CONFLUENCE_TEST_SPACE_URL} \
-e CONFLUENCE_USER_NAME=${CONFLUENCE_USER_NAME} \
-e CONFLUENCE_ACCESS_TOKEN=${CONFLUENCE_ACCESS_TOKEN} \
-e JIRA_BASE_URL=${JIRA_BASE_URL} \
-e JIRA_USER_EMAIL=${JIRA_USER_EMAIL} \
-e JIRA_API_TOKEN=${JIRA_API_TOKEN} \
-e TEST_WEB_HOSTNAME=test-runner \
-e MOCK_CONNECTOR_SERVER_HOST=mock_connector_server \
-e MOCK_CONNECTOR_SERVER_PORT=8001 \
onyxdotapp/onyx-integration:test \
/app/tests/integration/tests \
/app/tests/integration/connector_job_tests
continue-on-error: true
id: run_tests
- name: Check test results
run: |
if [ ${{ steps.run_tests.outcome }} == 'failure' ]; then
echo "Integration tests failed. Exiting with error."
exit 1
else
echo "All integration tests passed successfully."
fi
# ------------------------------------------------------------
# Always gather logs BEFORE "down":
- name: Dump API server logs
if: always()
run: |
cd deployment/docker_compose
docker compose -f docker-compose.multitenant-dev.yml down -v
docker compose -f docker-compose.dev.yml -p onyx-stack logs --no-color api_server > $GITHUB_WORKSPACE/api_server.log || true
required:
# NOTE: Github-hosted runners have about 20s faster queue times and are preferred here.
runs-on: ubuntu-slim
timeout-minutes: 45
needs: [integration-tests, multitenant-tests]
if: ${{ always() }}
steps:
- name: Check job status
if: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }}
run: exit 1
- name: Dump all-container logs (optional)
if: always()
run: |
cd deployment/docker_compose
docker compose -f docker-compose.dev.yml -p onyx-stack logs --no-color > $GITHUB_WORKSPACE/docker-compose.log || true
- name: Upload logs
if: always()
uses: actions/upload-artifact@v4
with:
name: docker-all-logs
path: ${{ github.workspace }}/docker-compose.log
# ------------------------------------------------------------
- name: Stop Docker containers
if: always()
run: |
cd deployment/docker_compose
docker compose -f docker-compose.dev.yml -p onyx-stack down -v

View File

@@ -1,44 +0,0 @@
name: Run Jest Tests
concurrency:
group: Run-Jest-Tests-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }}
cancel-in-progress: true
on:
push:
permissions:
contents: read
jobs:
jest-tests:
name: Jest Tests
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Setup node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # ratchet:actions/setup-node@v4
with:
node-version: 22
cache: "npm"
cache-dependency-path: ./web/package-lock.json
- name: Install node dependencies
working-directory: ./web
run: npm ci
- name: Run Jest tests
working-directory: ./web
run: npm test -- --ci --coverage --maxWorkers=50%
- name: Upload coverage reports
if: always()
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # ratchet:actions/upload-artifact@v4
with:
name: jest-coverage-${{ github.run_id }}
path: ./web/coverage
retention-days: 7

View File

@@ -1,7 +1,7 @@
name: PR Labeler
on:
pull_request:
pull_request_target:
branches:
- main
types:
@@ -12,11 +12,11 @@ on:
permissions:
contents: read
pull-requests: write
jobs:
validate_pr_title:
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- name: Check PR title for Conventional Commits
env:
@@ -27,7 +27,7 @@ jobs:
echo "::error::❌ Your PR title does not follow the Conventional Commits format.
This check ensures that all pull requests use clear, consistent titles that help automate changelogs and improve project history.
Please update your PR title to follow the Conventional Commits style.
Please update your PR title to follow the Conventional Commits style.
Here is a link to a blog explaining the reason why we've included the Conventional Commits style into our PR titles: https://xfuture-blog.com/working-with-conventional-commits
**Here are some examples of valid PR titles:**

View File

@@ -1,19 +1,12 @@
name: Ensure PR references Linear
concurrency:
group: Ensure-PR-references-Linear-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }}
cancel-in-progress: true
on:
pull_request:
types: [opened, edited, reopened, synchronize]
permissions:
contents: read
jobs:
linear-check:
runs-on: ubuntu-latest
timeout-minutes: 45
steps:
- name: Check PR body for Linear link or override
env:

View File

@@ -5,347 +5,176 @@ concurrency:
on:
merge_group:
types: [checks_requested]
push:
tags:
- "v*.*.*"
permissions:
contents: read
pull_request:
branches:
- main
- "release/**"
env:
# Test Environment Variables
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
EXA_API_KEY: ${{ secrets.EXA_API_KEY }}
CONFLUENCE_TEST_SPACE_URL: ${{ vars.CONFLUENCE_TEST_SPACE_URL }}
CONFLUENCE_USER_NAME: ${{ vars.CONFLUENCE_USER_NAME }}
CONFLUENCE_TEST_SPACE_URL: ${{ secrets.CONFLUENCE_TEST_SPACE_URL }}
CONFLUENCE_USER_NAME: ${{ secrets.CONFLUENCE_USER_NAME }}
CONFLUENCE_ACCESS_TOKEN: ${{ secrets.CONFLUENCE_ACCESS_TOKEN }}
CONFLUENCE_ACCESS_TOKEN_SCOPED: ${{ secrets.CONFLUENCE_ACCESS_TOKEN_SCOPED }}
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}
JIRA_API_TOKEN_SCOPED: ${{ secrets.JIRA_API_TOKEN_SCOPED }}
PERM_SYNC_SHAREPOINT_CLIENT_ID: ${{ secrets.PERM_SYNC_SHAREPOINT_CLIENT_ID }}
PERM_SYNC_SHAREPOINT_PRIVATE_KEY: ${{ secrets.PERM_SYNC_SHAREPOINT_PRIVATE_KEY }}
PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD: ${{ secrets.PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD }}
PERM_SYNC_SHAREPOINT_DIRECTORY_ID: ${{ secrets.PERM_SYNC_SHAREPOINT_DIRECTORY_ID }}
PLATFORM_PAIR: linux-amd64
jobs:
discover-test-dirs:
# NOTE: Github-hosted runners have about 20s faster queue times and are preferred here.
runs-on: ubuntu-slim
timeout-minutes: 45
outputs:
test-dirs: ${{ steps.set-matrix.outputs.test-dirs }}
integration-tests-mit:
# See https://runs-on.com/runners/linux/
runs-on:
[
runs-on,
runner=32cpu-linux-x64,
disk=large,
"run-id=${{ github.run_id }}",
]
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
uses: actions/checkout@v4
- name: Setup Python
uses: actions/setup-python@v5
with:
persist-credentials: false
python-version: "3.11"
cache: "pip"
cache-dependency-path: |
backend/requirements/default.txt
backend/requirements/dev.txt
- run: |
python -m pip install --upgrade pip
pip install --retries 5 --timeout 30 -r backend/requirements/default.txt
pip install --retries 5 --timeout 30 -r backend/requirements/dev.txt
- name: Discover test directories
id: set-matrix
run: |
# Find all leaf-level directories in both test directories
tests_dirs=$(find backend/tests/integration/tests -mindepth 1 -maxdepth 1 -type d ! -name "__pycache__" -exec basename {} \; | sort)
connector_dirs=$(find backend/tests/integration/connector_job_tests -mindepth 1 -maxdepth 1 -type d ! -name "__pycache__" -exec basename {} \; | sort)
# Create JSON array with directory info
all_dirs=""
for dir in $tests_dirs; do
all_dirs="$all_dirs{\"path\":\"tests/$dir\",\"name\":\"tests-$dir\"},"
done
for dir in $connector_dirs; do
all_dirs="$all_dirs{\"path\":\"connector_job_tests/$dir\",\"name\":\"connector-$dir\"},"
done
# Remove trailing comma and wrap in array
all_dirs="[${all_dirs%,}]"
echo "test-dirs=$all_dirs" >> $GITHUB_OUTPUT
build-backend-image:
runs-on: [runs-on, runner=1cpu-linux-arm64, "run-id=${{ github.run_id }}-build-backend-image", "extras=ecr-cache"]
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Format branch name for cache
id: format-branch
- name: Generate OpenAPI schema
working-directory: ./backend
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
REF_NAME: ${{ github.ref_name }}
PYTHONPATH: "."
run: |
if [ -n "${PR_NUMBER}" ]; then
CACHE_SUFFIX="${PR_NUMBER}"
else
# shellcheck disable=SC2001
CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g')
fi
echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT
python scripts/onyx_openapi_schema.py --filename generated/openapi.json
- name: Generate OpenAPI Python client
working-directory: ./backend
run: |
docker run --rm \
-v "${{ github.workspace }}/backend/generated:/local" \
openapitools/openapi-generator-cli generate \
-i /local/openapi.json \
-g python \
-o /local/onyx_openapi_client \
--package-name onyx_openapi_client
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
uses: docker/setup-buildx-action@v3
# needed for pulling Vespa, Redis, Postgres, and Minio images
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push Backend Docker image
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # ratchet:docker/build-push-action@v6
# tag every docker image with "test" so that we can spin up the correct set
# of images during testing
# We don't need to build the Web Docker image since it's not yet used
# in the integration tests. We have a separate action to verify that it builds
# successfully.
- name: Pull Web Docker image
run: |
docker pull onyxdotapp/onyx-web-server:latest
docker tag onyxdotapp/onyx-web-server:latest onyxdotapp/onyx-web-server:test
# we use the runs-on cache for docker builds
# in conjunction with runs-on runners, it has better speed and unlimited caching
# https://runs-on.com/caching/s3-cache-for-github-actions/
# https://runs-on.com/caching/docker/
# https://github.com/moby/buildkit#s3-cache-experimental
# images are built and run locally for testing purposes. Not pushed.
- name: Build Backend Docker image
uses: ./.github/actions/custom-build-and-push
with:
context: ./backend
file: ./backend/Dockerfile
push: true
tags: ${{ env.RUNS_ON_ECR_CACHE }}:integration-test-backend-test-${{ github.run_id }}
cache-from: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ github.event.pull_request.head.sha || github.sha }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ steps.format-branch.outputs.cache-suffix }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache
type=registry,ref=onyxdotapp/onyx-backend:latest
cache-to: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ github.event.pull_request.head.sha || github.sha }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ steps.format-branch.outputs.cache-suffix }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache,mode=max
no-cache: ${{ vars.DOCKER_NO_CACHE == 'true' }}
platforms: linux/amd64
tags: onyxdotapp/onyx-backend:test
push: false
load: true
cache-from: type=s3,prefix=cache/${{ github.repository }}/mit-integration-tests/backend-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/mit-integration-tests/backend-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
build-model-server-image:
runs-on: [runs-on, runner=1cpu-linux-arm64, "run-id=${{ github.run_id }}-build-model-server-image", "extras=ecr-cache"]
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Format branch name for cache
id: format-branch
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
REF_NAME: ${{ github.ref_name }}
run: |
if [ -n "${PR_NUMBER}" ]; then
CACHE_SUFFIX="${PR_NUMBER}"
else
# shellcheck disable=SC2001
CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g')
fi
echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
# needed for pulling Vespa, Redis, Postgres, and Minio images
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push Model Server Docker image
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # ratchet:docker/build-push-action@v6
- name: Build Model Server Docker image
uses: ./.github/actions/custom-build-and-push
with:
context: ./backend
file: ./backend/Dockerfile.model_server
push: true
tags: ${{ env.RUNS_ON_ECR_CACHE }}:integration-test-model-server-test-${{ github.run_id }}
cache-from: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ github.event.pull_request.head.sha || github.sha }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ steps.format-branch.outputs.cache-suffix }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache
type=registry,ref=onyxdotapp/onyx-model-server:latest
cache-to: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ github.event.pull_request.head.sha || github.sha }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ steps.format-branch.outputs.cache-suffix }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache,mode=max
platforms: linux/amd64
tags: onyxdotapp/onyx-model-server:test
push: false
load: true
cache-from: type=s3,prefix=cache/${{ github.repository }}/mit-integration-tests/model-server-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/mit-integration-tests/model-server-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
build-integration-image:
runs-on: [runs-on, runner=2cpu-linux-arm64, "run-id=${{ github.run_id }}-build-integration-image", "extras=ecr-cache"]
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
- name: Build integration test Docker image
uses: ./.github/actions/custom-build-and-push
with:
persist-credentials: false
- name: Format branch name for cache
id: format-branch
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
REF_NAME: ${{ github.ref_name }}
run: |
if [ -n "${PR_NUMBER}" ]; then
CACHE_SUFFIX="${PR_NUMBER}"
else
# shellcheck disable=SC2001
CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g')
fi
echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
# needed for pulling openapitools/openapi-generator-cli
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push integration test image with Docker Bake
env:
INTEGRATION_REPOSITORY: ${{ env.RUNS_ON_ECR_CACHE }}
TAG: integration-test-${{ github.run_id }}
CACHE_SUFFIX: ${{ steps.format-branch.outputs.cache-suffix }}
HEAD_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
run: |
cd backend && docker buildx bake --push \
--set backend.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${HEAD_SHA} \
--set backend.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${CACHE_SUFFIX} \
--set backend.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache \
--set backend.cache-from=type=registry,ref=onyxdotapp/onyx-backend:latest \
--set backend.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${HEAD_SHA},mode=max \
--set backend.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache-${CACHE_SUFFIX},mode=max \
--set backend.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:backend-cache,mode=max \
--set integration.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${HEAD_SHA} \
--set integration.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${CACHE_SUFFIX} \
--set integration.cache-from=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache \
--set integration.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${HEAD_SHA},mode=max \
--set integration.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache-${CACHE_SUFFIX},mode=max \
--set integration.cache-to=type=registry,ref=${RUNS_ON_ECR_CACHE}:integration-cache,mode=max \
integration
integration-tests-mit:
needs:
[
discover-test-dirs,
build-backend-image,
build-model-server-image,
build-integration-image,
]
runs-on:
- runs-on
- runner=4cpu-linux-arm64
- ${{ format('run-id={0}-integration-tests-mit-job-{1}', github.run_id, strategy['job-index']) }}
- extras=ecr-cache
timeout-minutes: 45
strategy:
fail-fast: false
matrix:
test-dir: ${{ fromJson(needs.discover-test-dirs.outputs.test-dirs) }}
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
# needed for pulling Vespa, Redis, Postgres, and Minio images
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
context: ./backend
file: ./backend/tests/integration/Dockerfile
platforms: linux/amd64
tags: onyxdotapp/onyx-integration:test
push: false
load: true
cache-from: type=s3,prefix=cache/${{ github.repository }}/mit-integration-tests/integration-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/mit-integration-tests/integration-${{ env.PLATFORM_PAIR }}/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
# NOTE: Use pre-ping/null pool to reduce flakiness due to dropped connections
# NOTE: don't need web server for integration tests
- name: Create .env file for Docker Compose
env:
ECR_CACHE: ${{ env.RUNS_ON_ECR_CACHE }}
RUN_ID: ${{ github.run_id }}
run: |
cat <<EOF > deployment/docker_compose/.env
AUTH_TYPE=basic
POSTGRES_POOL_PRE_PING=true
POSTGRES_USE_NULL_POOL=true
REQUIRE_EMAIL_VERIFICATION=false
DISABLE_TELEMETRY=true
ONYX_BACKEND_IMAGE=${ECR_CACHE}:integration-test-backend-test-${RUN_ID}
ONYX_MODEL_SERVER_IMAGE=${ECR_CACHE}:integration-test-model-server-test-${RUN_ID}
INTEGRATION_TESTS_MODE=true
MCP_SERVER_ENABLED=true
EOF
- name: Start Docker containers
run: |
cd deployment/docker_compose
docker compose -f docker-compose.yml -f docker-compose.dev.yml up \
relational_db \
index \
cache \
minio \
api_server \
inference_model_server \
indexing_model_server \
mcp_server \
background \
-d
AUTH_TYPE=basic \
POSTGRES_POOL_PRE_PING=true \
POSTGRES_USE_NULL_POOL=true \
REQUIRE_EMAIL_VERIFICATION=false \
DISABLE_TELEMETRY=true \
IMAGE_TAG=test \
INTEGRATION_TESTS_MODE=true \
docker compose -f docker-compose.dev.yml -p onyx-stack up -d
id: start_docker
- name: Wait for services to be ready
- name: Wait for service to be ready
run: |
echo "Starting wait-for-service script..."
wait_for_service() {
local url=$1
local label=$2
local timeout=${3:-300} # default 5 minutes
local start_time
start_time=$(date +%s)
docker logs -f onyx-stack-api_server-1 &
while true; do
local current_time
current_time=$(date +%s)
local elapsed_time=$((current_time - start_time))
start_time=$(date +%s)
timeout=300 # 5 minutes in seconds
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. ${label} did not become ready in $timeout seconds."
exit 1
fi
local response
response=$(curl -s -o /dev/null -w "%{http_code}" "$url" || echo "curl_error")
if [ "$response" = "200" ]; then
echo "${label} is ready!"
break
elif [ "$response" = "curl_error" ]; then
echo "Curl encountered an error while checking ${label}. Retrying in 5 seconds..."
else
echo "${label} not ready yet (HTTP status $response). Retrying in 5 seconds..."
fi
sleep 5
done
}
wait_for_service "http://localhost:8080/health" "API server"
test_dir="${{ matrix.test-dir.path }}"
if [ "$test_dir" = "tests/mcp" ]; then
wait_for_service "http://localhost:8090/health" "MCP server"
else
echo "Skipping MCP server wait for non-MCP suite: $test_dir"
fi
echo "Finished waiting for services."
while true; do
current_time=$(date +%s)
elapsed_time=$((current_time - start_time))
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. Service did not become ready in 5 minutes."
exit 1
fi
# Use curl with error handling to ignore specific exit code 56
response=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:8080/health || echo "curl_error")
if [ "$response" = "200" ]; then
echo "Service is ready!"
break
elif [ "$response" = "curl_error" ]; then
echo "Curl encountered an error, possibly exit code 56. Continuing to retry..."
else
echo "Service not ready yet (HTTP status $response). Retrying in 5 seconds..."
fi
sleep 5
done
echo "Finished waiting for service."
- name: Start Mock Services
run: |
@@ -354,49 +183,47 @@ jobs:
-p mock-it-services-stack up -d
# NOTE: Use pre-ping/null to reduce flakiness due to dropped connections
- name: Run Integration Tests for ${{ matrix.test-dir.name }}
uses: nick-fields/retry@ce71cc2ab81d554ebbe88c79ab5975992d79ba08 # ratchet:nick-fields/retry@v3
with:
timeout_minutes: 20
max_attempts: 3
retry_wait_seconds: 10
command: |
echo "Running integration tests for ${{ matrix.test-dir.path }}..."
docker run --rm --network onyx_default \
--name test-runner \
-e POSTGRES_HOST=relational_db \
-e POSTGRES_USER=postgres \
-e POSTGRES_PASSWORD=password \
-e POSTGRES_DB=postgres \
-e DB_READONLY_USER=db_readonly_user \
-e DB_READONLY_PASSWORD=password \
-e POSTGRES_POOL_PRE_PING=true \
-e POSTGRES_USE_NULL_POOL=true \
-e VESPA_HOST=index \
-e REDIS_HOST=cache \
-e API_SERVER_HOST=api_server \
-e MCP_SERVER_HOST=mcp_server \
-e MCP_SERVER_PORT=8090 \
-e OPENAI_API_KEY=${OPENAI_API_KEY} \
-e EXA_API_KEY=${EXA_API_KEY} \
-e SLACK_BOT_TOKEN=${SLACK_BOT_TOKEN} \
-e CONFLUENCE_TEST_SPACE_URL=${CONFLUENCE_TEST_SPACE_URL} \
-e CONFLUENCE_USER_NAME=${CONFLUENCE_USER_NAME} \
-e CONFLUENCE_ACCESS_TOKEN=${CONFLUENCE_ACCESS_TOKEN} \
-e CONFLUENCE_ACCESS_TOKEN_SCOPED=${CONFLUENCE_ACCESS_TOKEN_SCOPED} \
-e JIRA_BASE_URL=${JIRA_BASE_URL} \
-e JIRA_USER_EMAIL=${JIRA_USER_EMAIL} \
-e JIRA_API_TOKEN=${JIRA_API_TOKEN} \
-e JIRA_API_TOKEN_SCOPED=${JIRA_API_TOKEN_SCOPED} \
-e PERM_SYNC_SHAREPOINT_CLIENT_ID=${PERM_SYNC_SHAREPOINT_CLIENT_ID} \
-e PERM_SYNC_SHAREPOINT_PRIVATE_KEY="${PERM_SYNC_SHAREPOINT_PRIVATE_KEY}" \
-e PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD=${PERM_SYNC_SHAREPOINT_CERTIFICATE_PASSWORD} \
-e PERM_SYNC_SHAREPOINT_DIRECTORY_ID=${PERM_SYNC_SHAREPOINT_DIRECTORY_ID} \
-e TEST_WEB_HOSTNAME=test-runner \
-e MOCK_CONNECTOR_SERVER_HOST=mock_connector_server \
-e MOCK_CONNECTOR_SERVER_PORT=8001 \
${{ env.RUNS_ON_ECR_CACHE }}:integration-test-${{ github.run_id }} \
/app/tests/integration/${{ matrix.test-dir.path }}
- name: Run Standard Integration Tests
run: |
echo "Running integration tests..."
docker run --rm --network onyx-stack_default \
--name test-runner \
-e POSTGRES_HOST=relational_db \
-e POSTGRES_USER=postgres \
-e POSTGRES_PASSWORD=password \
-e POSTGRES_DB=postgres \
-e DB_READONLY_USER=db_readonly_user \
-e DB_READONLY_PASSWORD=password \
-e POSTGRES_POOL_PRE_PING=true \
-e POSTGRES_USE_NULL_POOL=true \
-e VESPA_HOST=index \
-e REDIS_HOST=cache \
-e API_SERVER_HOST=api_server \
-e OPENAI_API_KEY=${OPENAI_API_KEY} \
-e SLACK_BOT_TOKEN=${SLACK_BOT_TOKEN} \
-e CONFLUENCE_TEST_SPACE_URL=${CONFLUENCE_TEST_SPACE_URL} \
-e CONFLUENCE_USER_NAME=${CONFLUENCE_USER_NAME} \
-e CONFLUENCE_ACCESS_TOKEN=${CONFLUENCE_ACCESS_TOKEN} \
-e JIRA_BASE_URL=${JIRA_BASE_URL} \
-e JIRA_USER_EMAIL=${JIRA_USER_EMAIL} \
-e JIRA_API_TOKEN=${JIRA_API_TOKEN} \
-e TEST_WEB_HOSTNAME=test-runner \
-e MOCK_CONNECTOR_SERVER_HOST=mock_connector_server \
-e MOCK_CONNECTOR_SERVER_PORT=8001 \
onyxdotapp/onyx-integration:test \
/app/tests/integration/tests \
/app/tests/integration/connector_job_tests
continue-on-error: true
id: run_tests
- name: Check test results
run: |
if [ ${{ steps.run_tests.outcome }} == 'failure' ]; then
echo "Integration tests failed. Exiting with error."
exit 1
else
echo "All integration tests passed successfully."
fi
# ------------------------------------------------------------
# Always gather logs BEFORE "down":
@@ -404,30 +231,24 @@ jobs:
if: always()
run: |
cd deployment/docker_compose
docker compose logs --no-color api_server > $GITHUB_WORKSPACE/api_server.log || true
docker compose -f docker-compose.dev.yml -p onyx-stack logs --no-color api_server > $GITHUB_WORKSPACE/api_server.log || true
- name: Dump all-container logs (optional)
if: always()
run: |
cd deployment/docker_compose
docker compose logs --no-color > $GITHUB_WORKSPACE/docker-compose.log || true
docker compose -f docker-compose.dev.yml -p onyx-stack logs --no-color > $GITHUB_WORKSPACE/docker-compose.log || true
- name: Upload logs
if: always()
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # ratchet:actions/upload-artifact@v4
uses: actions/upload-artifact@v4
with:
name: docker-all-logs-${{ matrix.test-dir.name }}
name: docker-all-logs
path: ${{ github.workspace }}/docker-compose.log
# ------------------------------------------------------------
required:
# NOTE: Github-hosted runners have about 20s faster queue times and are preferred here.
runs-on: ubuntu-slim
timeout-minutes: 45
needs: [integration-tests-mit]
if: ${{ always() }}
steps:
- name: Check job status
if: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }}
run: exit 1
- name: Stop Docker containers
if: always()
run: |
cd deployment/docker_compose
docker compose -f docker-compose.dev.yml -p onyx-stack down -v

View File

@@ -3,302 +3,134 @@ concurrency:
group: Run-Playwright-Tests-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }}
cancel-in-progress: true
on:
push:
permissions:
contents: read
on: push
env:
# Test Environment Variables
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
GEN_AI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
EXA_API_KEY: ${{ secrets.EXA_API_KEY }}
# for federated slack tests
SLACK_CLIENT_ID: ${{ secrets.SLACK_CLIENT_ID }}
SLACK_CLIENT_SECRET: ${{ secrets.SLACK_CLIENT_SECRET }}
# for MCP Oauth tests
MCP_OAUTH_CLIENT_ID: ${{ secrets.MCP_OAUTH_CLIENT_ID }}
MCP_OAUTH_CLIENT_SECRET: ${{ secrets.MCP_OAUTH_CLIENT_SECRET }}
MCP_OAUTH_ISSUER: ${{ secrets.MCP_OAUTH_ISSUER }}
MCP_OAUTH_JWKS_URI: ${{ secrets.MCP_OAUTH_JWKS_URI }}
MCP_OAUTH_USERNAME: ${{ vars.MCP_OAUTH_USERNAME }}
MCP_OAUTH_PASSWORD: ${{ secrets.MCP_OAUTH_PASSWORD }}
# for MCP API Key tests
MCP_API_KEY: test-api-key-12345
MCP_API_KEY_TEST_PORT: 8005
MCP_API_KEY_TEST_URL: http://host.docker.internal:8005/mcp
MCP_API_KEY_SERVER_HOST: 0.0.0.0
MCP_API_KEY_SERVER_PUBLIC_HOST: host.docker.internal
MOCK_LLM_RESPONSE: true
MCP_TEST_SERVER_PORT: 8004
MCP_TEST_SERVER_URL: http://host.docker.internal:8004/mcp
MCP_TEST_SERVER_PUBLIC_URL: http://host.docker.internal:8004/mcp
MCP_TEST_SERVER_BIND_HOST: 0.0.0.0
MCP_TEST_SERVER_PUBLIC_HOST: host.docker.internal
MCP_SERVER_HOST: 0.0.0.0
MCP_SERVER_PUBLIC_HOST: host.docker.internal
MCP_SERVER_PUBLIC_URL: http://host.docker.internal:8004/mcp
PYTEST_PLAYWRIGHT_SKIP_INITIAL_RESET: true
jobs:
build-web-image:
runs-on: [runs-on, runner=4cpu-linux-arm64, "run-id=${{ github.run_id }}-build-web-image", "extras=ecr-cache"]
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Format branch name for cache
id: format-branch
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
REF_NAME: ${{ github.ref_name }}
run: |
if [ -n "${PR_NUMBER}" ]; then
CACHE_SUFFIX="${PR_NUMBER}"
else
# shellcheck disable=SC2001
CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g')
fi
echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
# needed for pulling external images otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push Web Docker image
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # ratchet:docker/build-push-action@v6
with:
context: ./web
file: ./web/Dockerfile
platforms: linux/arm64
tags: ${{ env.RUNS_ON_ECR_CACHE }}:playwright-test-web-${{ github.run_id }}
push: true
cache-from: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:web-cache-${{ github.event.pull_request.head.sha || github.sha }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:web-cache-${{ steps.format-branch.outputs.cache-suffix }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:web-cache
type=registry,ref=onyxdotapp/onyx-web-server:latest
cache-to: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:web-cache-${{ github.event.pull_request.head.sha || github.sha }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:web-cache-${{ steps.format-branch.outputs.cache-suffix }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:web-cache,mode=max
no-cache: ${{ vars.DOCKER_NO_CACHE == 'true' }}
build-backend-image:
runs-on: [runs-on, runner=1cpu-linux-arm64, "run-id=${{ github.run_id }}-build-backend-image", "extras=ecr-cache"]
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Format branch name for cache
id: format-branch
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
REF_NAME: ${{ github.ref_name }}
run: |
if [ -n "${PR_NUMBER}" ]; then
CACHE_SUFFIX="${PR_NUMBER}"
else
# shellcheck disable=SC2001
CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g')
fi
echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
# needed for pulling external images otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push Backend Docker image
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # ratchet:docker/build-push-action@v6
with:
context: ./backend
file: ./backend/Dockerfile
platforms: linux/arm64
tags: ${{ env.RUNS_ON_ECR_CACHE }}:playwright-test-backend-${{ github.run_id }}
push: true
cache-from: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ github.event.pull_request.head.sha || github.sha }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ steps.format-branch.outputs.cache-suffix }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache
type=registry,ref=onyxdotapp/onyx-backend:latest
cache-to: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ github.event.pull_request.head.sha || github.sha }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache-${{ steps.format-branch.outputs.cache-suffix }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:backend-cache,mode=max
no-cache: ${{ vars.DOCKER_NO_CACHE == 'true' }}
build-model-server-image:
runs-on: [runs-on, runner=1cpu-linux-arm64, "run-id=${{ github.run_id }}-build-model-server-image", "extras=ecr-cache"]
timeout-minutes: 45
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Format branch name for cache
id: format-branch
env:
PR_NUMBER: ${{ github.event.pull_request.number }}
REF_NAME: ${{ github.ref_name }}
run: |
if [ -n "${PR_NUMBER}" ]; then
CACHE_SUFFIX="${PR_NUMBER}"
else
# shellcheck disable=SC2001
CACHE_SUFFIX=$(echo "${REF_NAME}" | sed 's/[^A-Za-z0-9._-]/-/g')
fi
echo "cache-suffix=${CACHE_SUFFIX}" >> $GITHUB_OUTPUT
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # ratchet:docker/setup-buildx-action@v3
# needed for pulling external images otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build and push Model Server Docker image
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # ratchet:docker/build-push-action@v6
with:
context: ./backend
file: ./backend/Dockerfile.model_server
platforms: linux/arm64
tags: ${{ env.RUNS_ON_ECR_CACHE }}:playwright-test-model-server-${{ github.run_id }}
push: true
cache-from: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ github.event.pull_request.head.sha || github.sha }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ steps.format-branch.outputs.cache-suffix }}
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache
type=registry,ref=onyxdotapp/onyx-model-server:latest
cache-to: |
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ github.event.pull_request.head.sha || github.sha }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache-${{ steps.format-branch.outputs.cache-suffix }},mode=max
type=registry,ref=${{ env.RUNS_ON_ECR_CACHE }}:model-server-cache,mode=max
no-cache: ${{ vars.DOCKER_NO_CACHE == 'true' }}
playwright-tests:
needs: [build-web-image, build-backend-image, build-model-server-image]
name: Playwright Tests (${{ matrix.project }})
runs-on:
- runs-on
- runner=8cpu-linux-arm64
- "run-id=${{ github.run_id }}-playwright-tests-${{ matrix.project }}"
- "extras=ecr-cache"
- volume=50gb
timeout-minutes: 45
strategy:
fail-fast: false
matrix:
project: [admin, no-auth, exclusive]
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
name: Playwright Tests
# See https://runs-on.com/runners/linux/
runs-on:
[
runs-on,
runner=32cpu-linux-x64,
disk=large,
"run-id=${{ github.run_id }}",
]
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
uses: actions/checkout@v4
with:
fetch-depth: 0
persist-credentials: false
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.11"
cache: "pip"
cache-dependency-path: |
backend/requirements/default.txt
backend/requirements/dev.txt
backend/requirements/model_server.txt
- run: |
python -m pip install --upgrade pip
pip install --retries 5 --timeout 30 -r backend/requirements/default.txt
pip install --retries 5 --timeout 30 -r backend/requirements/dev.txt
pip install --retries 5 --timeout 30 -r backend/requirements/model_server.txt
- name: Setup node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # ratchet:actions/setup-node@v4
uses: actions/setup-node@v4
with:
node-version: 22
cache: 'npm'
cache-dependency-path: ./web/package-lock.json
- name: Install node dependencies
working-directory: ./web
run: npm ci
- name: Cache playwright cache
uses: runs-on/cache@50350ad4242587b6c8c2baa2e740b1bc11285ff4 # ratchet:runs-on/cache@v4
with:
path: ~/.cache/ms-playwright
key: ${{ runner.os }}-playwright-npm-${{ hashFiles('web/package-lock.json') }}
restore-keys: |
${{ runner.os }}-playwright-npm-
- name: Install playwright browsers
working-directory: ./web
run: npx playwright install --with-deps
- name: Create .env file for Docker Compose
env:
OPENAI_API_KEY_VALUE: ${{ env.OPENAI_API_KEY }}
EXA_API_KEY_VALUE: ${{ env.EXA_API_KEY }}
ECR_CACHE: ${{ env.RUNS_ON_ECR_CACHE }}
RUN_ID: ${{ github.run_id }}
run: |
cat <<EOF > deployment/docker_compose/.env
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true
AUTH_TYPE=basic
GEN_AI_API_KEY=${OPENAI_API_KEY_VALUE}
EXA_API_KEY=${EXA_API_KEY_VALUE}
REQUIRE_EMAIL_VERIFICATION=false
DISABLE_TELEMETRY=true
ONYX_BACKEND_IMAGE=${ECR_CACHE}:playwright-test-backend-${RUN_ID}
ONYX_MODEL_SERVER_IMAGE=${ECR_CACHE}:playwright-test-model-server-${RUN_ID}
ONYX_WEB_SERVER_IMAGE=${ECR_CACHE}:playwright-test-web-${RUN_ID}
EOF
if [ "${{ matrix.project }}" = "no-auth" ]; then
echo "PLAYWRIGHT_FORCE_EMPTY_LLM_PROVIDERS=true" >> deployment/docker_compose/.env
fi
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
# needed for pulling Vespa, Redis, Postgres, and Minio images
# otherwise, we hit the "Unauthenticated users" limit
# https://docs.docker.com/docker-hub/usage/
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
# tag every docker image with "test" so that we can spin up the correct set
# of images during testing
# we use the runs-on cache for docker builds
# in conjunction with runs-on runners, it has better speed and unlimited caching
# https://runs-on.com/caching/s3-cache-for-github-actions/
# https://runs-on.com/caching/docker/
# https://github.com/moby/buildkit#s3-cache-experimental
# images are built and run locally for testing purposes. Not pushed.
- name: Build Web Docker image
uses: ./.github/actions/custom-build-and-push
with:
context: ./web
file: ./web/Dockerfile
platforms: linux/amd64
tags: onyxdotapp/onyx-web-server:test
push: false
load: true
cache-from: type=s3,prefix=cache/${{ github.repository }}/integration-tests/web-server/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/integration-tests/web-server/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
- name: Build Backend Docker image
uses: ./.github/actions/custom-build-and-push
with:
context: ./backend
file: ./backend/Dockerfile
platforms: linux/amd64
tags: onyxdotapp/onyx-backend:test
push: false
load: true
cache-from: type=s3,prefix=cache/${{ github.repository }}/integration-tests/backend/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/integration-tests/backend/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
- name: Build Model Server Docker image
uses: ./.github/actions/custom-build-and-push
with:
context: ./backend
file: ./backend/Dockerfile.model_server
platforms: linux/amd64
tags: onyxdotapp/onyx-model-server:test
push: false
load: true
cache-from: type=s3,prefix=cache/${{ github.repository }}/integration-tests/model-server/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }}
cache-to: type=s3,prefix=cache/${{ github.repository }}/integration-tests/model-server/,region=${{ env.RUNS_ON_AWS_REGION }},bucket=${{ env.RUNS_ON_S3_BUCKET_CACHE }},mode=max
- name: Start Docker containers
run: |
cd deployment/docker_compose
docker compose -f docker-compose.yml -f docker-compose.dev.yml -f docker-compose.mcp-oauth-test.yml -f docker-compose.mcp-api-key-test.yml up -d
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true \
AUTH_TYPE=basic \
GEN_AI_API_KEY=${{ secrets.OPENAI_API_KEY }} \
REQUIRE_EMAIL_VERIFICATION=false \
DISABLE_TELEMETRY=true \
IMAGE_TAG=test \
docker compose -f docker-compose.dev.yml -p danswer-stack up -d
id: start_docker
- name: Wait for service to be ready
run: |
echo "Starting wait-for-service script..."
docker logs -f onyx-api_server-1 &
docker logs -f danswer-stack-api_server-1 &
start_time=$(date +%s)
timeout=300 # 5 minutes in seconds
@@ -306,15 +138,15 @@ jobs:
while true; do
current_time=$(date +%s)
elapsed_time=$((current_time - start_time))
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. Service did not become ready in 5 minutes."
exit 1
fi
# Use curl with error handling to ignore specific exit code 56
response=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:8080/health || echo "curl_error")
if [ "$response" = "200" ]; then
echo "Service is ready!"
break
@@ -323,130 +155,49 @@ jobs:
else
echo "Service not ready yet (HTTP status $response). Retrying in 5 seconds..."
fi
sleep 5
done
echo "Finished waiting for service."
- name: Wait for MCP OAuth mock server
run: |
echo "Waiting for MCP OAuth mock server on port ${MCP_TEST_SERVER_PORT:-8004}..."
start_time=$(date +%s)
timeout=120
while true; do
current_time=$(date +%s)
elapsed_time=$((current_time - start_time))
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. MCP OAuth mock server did not become ready in ${timeout}s."
exit 1
fi
if curl -sf "http://localhost:${MCP_TEST_SERVER_PORT:-8004}/healthz" > /dev/null; then
echo "MCP OAuth mock server is ready!"
break
fi
sleep 3
done
- name: Wait for MCP API Key mock server
run: |
echo "Waiting for MCP API Key mock server on port ${MCP_API_KEY_TEST_PORT:-8005}..."
start_time=$(date +%s)
timeout=120
while true; do
current_time=$(date +%s)
elapsed_time=$((current_time - start_time))
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. MCP API Key mock server did not become ready in ${timeout}s."
exit 1
fi
if curl -sf "http://localhost:${MCP_API_KEY_TEST_PORT:-8005}/healthz" > /dev/null; then
echo "MCP API Key mock server is ready!"
break
fi
sleep 3
done
- name: Wait for web server to be ready
run: |
echo "Waiting for web server on port 3000..."
start_time=$(date +%s)
timeout=120
while true; do
current_time=$(date +%s)
elapsed_time=$((current_time - start_time))
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. Web server did not become ready in ${timeout}s."
exit 1
fi
if curl -sf "http://localhost:3000/api/health" > /dev/null 2>&1 || \
curl -sf "http://localhost:3000/" > /dev/null 2>&1; then
echo "Web server is ready!"
break
fi
echo "Web server not ready yet. Retrying in 3 seconds..."
sleep 3
done
- name: Run pytest playwright test init
working-directory: ./backend
env:
PYTEST_IGNORE_SKIP: true
run: pytest -s tests/integration/tests/playwright/test_playwright.py
- name: Run Playwright tests
working-directory: ./web
env:
PROJECT: ${{ matrix.project }}
run: |
# Create test-results directory to ensure it exists for artifact upload
mkdir -p test-results
if [ "${PROJECT}" = "no-auth" ]; then
export PLAYWRIGHT_FORCE_EMPTY_LLM_PROVIDERS=true
fi
npx playwright test --project ${PROJECT}
run: npx playwright test
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # ratchet:actions/upload-artifact@v4
- uses: actions/upload-artifact@v4
if: always()
with:
# Includes test results and trace.zip files
name: playwright-test-results-${{ matrix.project }}-${{ github.run_id }}
path: ./web/test-results/
# Chromatic automatically defaults to the test-results directory.
# Replace with the path to your custom directory and adjust the CHROMATIC_ARCHIVE_LOCATION environment variable accordingly.
name: test-results
path: ./web/test-results
retention-days: 30
# save before stopping the containers so the logs can be captured
- name: Save Docker logs
if: success() || failure()
env:
WORKSPACE: ${{ github.workspace }}
run: |
cd deployment/docker_compose
docker compose logs > docker-compose.log
mv docker-compose.log ${WORKSPACE}/docker-compose.log
docker compose -f docker-compose.dev.yml -p danswer-stack logs > docker-compose.log
mv docker-compose.log ${{ github.workspace }}/docker-compose.log
- name: Upload logs
if: success() || failure()
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # ratchet:actions/upload-artifact@v4
uses: actions/upload-artifact@v4
with:
name: docker-logs-${{ matrix.project }}-${{ github.run_id }}
name: docker-logs
path: ${{ github.workspace }}/docker-compose.log
playwright-required:
# NOTE: Github-hosted runners have about 20s faster queue times and are preferred here.
runs-on: ubuntu-slim
timeout-minutes: 45
needs: [playwright-tests]
if: ${{ always() }}
steps:
- name: Check job status
if: ${{ contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled') || contains(needs.*.result, 'skipped') }}
run: exit 1
- name: Stop Docker containers
run: |
cd deployment/docker_compose
docker compose -f docker-compose.dev.yml -p danswer-stack down -v
# NOTE: Chromatic UI diff testing is currently disabled.
# We are using Playwright for local and CI testing without visual regression checks.
@@ -465,12 +216,12 @@ jobs:
# ]
# steps:
# - name: Checkout code
# uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
# uses: actions/checkout@v4
# with:
# fetch-depth: 0
# - name: Setup node
# uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # ratchet:actions/setup-node@v4
# uses: actions/setup-node@v4
# with:
# node-version: 22
@@ -479,7 +230,7 @@ jobs:
# run: npm ci
# - name: Download Playwright test results
# uses: actions/download-artifact@d3f86a106a0bac45b974a628896c90dbdf5c8093 # ratchet:actions/download-artifact@v4
# uses: actions/download-artifact@v4
# with:
# name: test-results
# path: ./web/test-results

View File

@@ -1,7 +1,4 @@
name: Python Checks
concurrency:
group: Python-Checks-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }}
cancel-in-progress: true
on:
merge_group:
@@ -9,60 +6,60 @@ on:
branches:
- main
- 'release/**'
push:
tags:
- "v*.*.*"
permissions:
contents: read
jobs:
mypy-check:
# See https://runs-on.com/runners/linux/
# Note: Mypy seems quite optimized for x64 compared to arm64.
# Similarly, mypy is single-threaded and incremental, so 2cpu is sufficient.
runs-on: [runs-on, runner=2cpu-linux-x64, "run-id=${{ github.run_id }}-mypy-check", "extras=s3-cache"]
timeout-minutes: 45
runs-on: [runs-on,runner=8cpu-linux-x64,"run-id=${{ github.run_id }}"]
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
- name: Checkout code
uses: actions/checkout@v4
- name: Setup Python and Install Dependencies
uses: ./.github/actions/setup-python-and-install-dependencies
with:
requirements: |
backend/requirements/default.txt
backend/requirements/dev.txt
backend/requirements/model_server.txt
backend/requirements/ee.txt
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: '3.11'
cache: 'pip'
cache-dependency-path: |
backend/requirements/default.txt
backend/requirements/dev.txt
backend/requirements/model_server.txt
- run: |
python -m pip install --upgrade pip
pip install --retries 5 --timeout 30 -r backend/requirements/default.txt
pip install --retries 5 --timeout 30 -r backend/requirements/dev.txt
pip install --retries 5 --timeout 30 -r backend/requirements/model_server.txt
- name: Generate OpenAPI schema and Python client
shell: bash
run: |
ods openapi all
- name: Generate OpenAPI schema
working-directory: ./backend
env:
PYTHONPATH: "."
run: |
python scripts/onyx_openapi_schema.py --filename generated/openapi.json
- name: Cache mypy cache
if: ${{ vars.DISABLE_MYPY_CACHE != 'true' }}
uses: runs-on/cache@50350ad4242587b6c8c2baa2e740b1bc11285ff4 # ratchet:runs-on/cache@v4
with:
path: backend/.mypy_cache
key: mypy-${{ runner.os }}-${{ hashFiles('**/*.py', '**/*.pyi', 'backend/pyproject.toml') }}
restore-keys: |
mypy-${{ runner.os }}-
- name: Generate OpenAPI Python client
working-directory: ./backend
run: |
docker run --rm \
-v "${{ github.workspace }}/backend/generated:/local" \
openapitools/openapi-generator-cli generate \
-i /local/openapi.json \
-g python \
-o /local/onyx_openapi_client \
--package-name onyx_openapi_client \
- name: Run MyPy
run: |
cd backend
mypy .
- name: Run MyPy
working-directory: ./backend
env:
MYPY_FORCE_COLOR: 1
TERM: xterm-256color
run: mypy .
- name: Check import order with reorder-python-imports
run: |
cd backend
find ./onyx -name "*.py" | xargs reorder-python-imports --py311-plus
- name: Run MyPy (tools/)
env:
MYPY_FORCE_COLOR: 1
TERM: xterm-256color
run: mypy tools/
- name: Check code formatting with Black
run: |
cd backend
black --check .

View File

@@ -1,49 +1,30 @@
name: Connector Tests
concurrency:
group: Connector-Tests-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }}
cancel-in-progress: true
on:
merge_group:
pull_request:
branches: [main]
push:
tags:
- "v*.*.*"
schedule:
# This cron expression runs the job daily at 16:00 UTC (9am PT)
- cron: "0 16 * * *"
permissions:
contents: read
env:
# AWS
AWS_ACCESS_KEY_ID_DAILY_CONNECTOR_TESTS: ${{ secrets.AWS_ACCESS_KEY_ID_DAILY_CONNECTOR_TESTS }}
AWS_SECRET_ACCESS_KEY_DAILY_CONNECTOR_TESTS: ${{ secrets.AWS_SECRET_ACCESS_KEY_DAILY_CONNECTOR_TESTS }}
# Cloudflare R2
R2_ACCOUNT_ID_DAILY_CONNECTOR_TESTS: ${{ vars.R2_ACCOUNT_ID_DAILY_CONNECTOR_TESTS }}
R2_ACCESS_KEY_ID_DAILY_CONNECTOR_TESTS: ${{ secrets.R2_ACCESS_KEY_ID_DAILY_CONNECTOR_TESTS }}
R2_SECRET_ACCESS_KEY_DAILY_CONNECTOR_TESTS: ${{ secrets.R2_SECRET_ACCESS_KEY_DAILY_CONNECTOR_TESTS }}
# Google Cloud Storage
GCS_ACCESS_KEY_ID_DAILY_CONNECTOR_TESTS: ${{ secrets.GCS_ACCESS_KEY_ID_DAILY_CONNECTOR_TESTS }}
GCS_SECRET_ACCESS_KEY_DAILY_CONNECTOR_TESTS: ${{ secrets.GCS_SECRET_ACCESS_KEY_DAILY_CONNECTOR_TESTS }}
# Confluence
CONFLUENCE_TEST_SPACE_URL: ${{ vars.CONFLUENCE_TEST_SPACE_URL }}
CONFLUENCE_TEST_SPACE: ${{ vars.CONFLUENCE_TEST_SPACE }}
CONFLUENCE_TEST_SPACE_URL: ${{ secrets.CONFLUENCE_TEST_SPACE_URL }}
CONFLUENCE_TEST_SPACE: ${{ secrets.CONFLUENCE_TEST_SPACE }}
CONFLUENCE_TEST_PAGE_ID: ${{ secrets.CONFLUENCE_TEST_PAGE_ID }}
CONFLUENCE_USER_NAME: ${{ vars.CONFLUENCE_USER_NAME }}
CONFLUENCE_IS_CLOUD: ${{ secrets.CONFLUENCE_IS_CLOUD }}
CONFLUENCE_USER_NAME: ${{ secrets.CONFLUENCE_USER_NAME }}
CONFLUENCE_ACCESS_TOKEN: ${{ secrets.CONFLUENCE_ACCESS_TOKEN }}
CONFLUENCE_ACCESS_TOKEN_SCOPED: ${{ secrets.CONFLUENCE_ACCESS_TOKEN_SCOPED }}
# Jira
JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }}
JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }}
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }}
JIRA_API_TOKEN_SCOPED: ${{ secrets.JIRA_API_TOKEN_SCOPED }}
# Gong
GONG_ACCESS_KEY: ${{ secrets.GONG_ACCESS_KEY }}
@@ -73,22 +54,22 @@ env:
HUBSPOT_ACCESS_TOKEN: ${{ secrets.HUBSPOT_ACCESS_TOKEN }}
# IMAP
IMAP_HOST: ${{ vars.IMAP_HOST }}
IMAP_USERNAME: ${{ vars.IMAP_USERNAME }}
IMAP_HOST: ${{ secrets.IMAP_HOST }}
IMAP_USERNAME: ${{ secrets.IMAP_USERNAME }}
IMAP_PASSWORD: ${{ secrets.IMAP_PASSWORD }}
IMAP_MAILBOXES: ${{ vars.IMAP_MAILBOXES }}
IMAP_MAILBOXES: ${{ secrets.IMAP_MAILBOXES }}
# Airtable
AIRTABLE_TEST_BASE_ID: ${{ vars.AIRTABLE_TEST_BASE_ID }}
AIRTABLE_TEST_TABLE_ID: ${{ vars.AIRTABLE_TEST_TABLE_ID }}
AIRTABLE_TEST_TABLE_NAME: ${{ vars.AIRTABLE_TEST_TABLE_NAME }}
AIRTABLE_TEST_BASE_ID: ${{ secrets.AIRTABLE_TEST_BASE_ID }}
AIRTABLE_TEST_TABLE_ID: ${{ secrets.AIRTABLE_TEST_TABLE_ID }}
AIRTABLE_TEST_TABLE_NAME: ${{ secrets.AIRTABLE_TEST_TABLE_NAME }}
AIRTABLE_ACCESS_TOKEN: ${{ secrets.AIRTABLE_ACCESS_TOKEN }}
# Sharepoint
SHAREPOINT_CLIENT_ID: ${{ vars.SHAREPOINT_CLIENT_ID }}
SHAREPOINT_CLIENT_ID: ${{ secrets.SHAREPOINT_CLIENT_ID }}
SHAREPOINT_CLIENT_SECRET: ${{ secrets.SHAREPOINT_CLIENT_SECRET }}
SHAREPOINT_CLIENT_DIRECTORY_ID: ${{ vars.SHAREPOINT_CLIENT_DIRECTORY_ID }}
SHAREPOINT_SITE: ${{ vars.SHAREPOINT_SITE }}
SHAREPOINT_CLIENT_DIRECTORY_ID: ${{ secrets.SHAREPOINT_CLIENT_DIRECTORY_ID }}
SHAREPOINT_SITE: ${{ secrets.SHAREPOINT_SITE }}
# Github
ACCESS_TOKEN_GITHUB: ${{ secrets.ACCESS_TOKEN_GITHUB }}
@@ -115,66 +96,36 @@ env:
TEAMS_DIRECTORY_ID: ${{ secrets.TEAMS_DIRECTORY_ID }}
TEAMS_SECRET: ${{ secrets.TEAMS_SECRET }}
# Bitbucket
BITBUCKET_WORKSPACE: ${{ secrets.BITBUCKET_WORKSPACE }}
BITBUCKET_REPOSITORIES: ${{ secrets.BITBUCKET_REPOSITORIES }}
BITBUCKET_PROJECTS: ${{ secrets.BITBUCKET_PROJECTS }}
BITBUCKET_EMAIL: ${{ vars.BITBUCKET_EMAIL }}
BITBUCKET_API_TOKEN: ${{ secrets.BITBUCKET_API_TOKEN }}
# Fireflies
FIREFLIES_API_KEY: ${{ secrets.FIREFLIES_API_KEY }}
jobs:
connectors-check:
# See https://runs-on.com/runners/linux/
runs-on: [runs-on, runner=8cpu-linux-x64, "run-id=${{ github.run_id }}-connectors-check", "extras=s3-cache"]
timeout-minutes: 45
runs-on: [runs-on, runner=8cpu-linux-x64, "run-id=${{ github.run_id }}"]
env:
PYTHONPATH: ./backend
DISABLE_TELEMETRY: "true"
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Setup Python and Install Dependencies
uses: ./.github/actions/setup-python-and-install-dependencies
- name: Set up Python
uses: actions/setup-python@v5
with:
requirements: |
python-version: "3.11"
cache: "pip"
cache-dependency-path: |
backend/requirements/default.txt
backend/requirements/dev.txt
- name: Setup Playwright
uses: ./.github/actions/setup-playwright
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
pip install --retries 5 --timeout 30 -r backend/requirements/default.txt
pip install --retries 5 --timeout 30 -r backend/requirements/dev.txt
playwright install chromium
playwright install-deps chromium
- name: Detect Connector changes
id: changes
uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # ratchet:dorny/paths-filter@v3
with:
filters: |
hubspot:
- 'backend/onyx/connectors/hubspot/**'
- 'backend/tests/daily/connectors/hubspot/**'
- 'uv.lock'
salesforce:
- 'backend/onyx/connectors/salesforce/**'
- 'backend/tests/daily/connectors/salesforce/**'
- 'uv.lock'
github:
- 'backend/onyx/connectors/github/**'
- 'backend/tests/daily/connectors/github/**'
- 'uv.lock'
file_processing:
- 'backend/onyx/file_processing/**'
- 'uv.lock'
- name: Run Tests (excluding HubSpot, Salesforce, GitHub, and Coda)
- name: Run Tests
shell: script -q -e -c "bash --noprofile --norc -eo pipefail {0}"
run: |
py.test \
@@ -184,59 +135,14 @@ jobs:
-o junit_family=xunit2 \
-xv \
--ff \
backend/tests/daily/connectors \
--ignore backend/tests/daily/connectors/hubspot \
--ignore backend/tests/daily/connectors/salesforce \
--ignore backend/tests/daily/connectors/github \
--ignore backend/tests/daily/connectors/coda
- name: Run HubSpot Connector Tests
if: ${{ github.event_name == 'schedule' || steps.changes.outputs.hubspot == 'true' || steps.changes.outputs.file_processing == 'true' }}
shell: script -q -e -c "bash --noprofile --norc -eo pipefail {0}"
run: |
py.test \
-n 8 \
--dist loadfile \
--durations=8 \
-o junit_family=xunit2 \
-xv \
--ff \
backend/tests/daily/connectors/hubspot
- name: Run Salesforce Connector Tests
if: ${{ github.event_name == 'schedule' || steps.changes.outputs.salesforce == 'true' || steps.changes.outputs.file_processing == 'true' }}
shell: script -q -e -c "bash --noprofile --norc -eo pipefail {0}"
run: |
py.test \
-n 8 \
--dist loadfile \
--durations=8 \
-o junit_family=xunit2 \
-xv \
--ff \
backend/tests/daily/connectors/salesforce
- name: Run GitHub Connector Tests
if: ${{ github.event_name == 'schedule' || steps.changes.outputs.github == 'true' || steps.changes.outputs.file_processing == 'true' }}
shell: script -q -e -c "bash --noprofile --norc -eo pipefail {0}"
run: |
py.test \
-n 8 \
--dist loadfile \
--durations=8 \
-o junit_family=xunit2 \
-xv \
--ff \
backend/tests/daily/connectors/github
backend/tests/daily/connectors
- name: Alert on Failure
if: failure() && github.event_name == 'schedule'
env:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
REPO: ${{ github.repository }}
RUN_ID: ${{ github.run_id }}
run: |
curl -X POST \
-H 'Content-type: application/json' \
--data "{\"text\":\"Scheduled Connector Tests failed! Check the run at: https://github.com/${REPO}/actions/runs/${RUN_ID}\"}" \
--data '{"text":"Scheduled Connector Tests failed! Check the run at: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' \
$SLACK_WEBHOOK

View File

@@ -10,15 +10,12 @@ on:
description: 'Branch to run the workflow on'
required: false
default: 'main'
permissions:
contents: read
env:
# Bedrock
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_REGION_NAME: ${{ vars.AWS_REGION_NAME }}
AWS_REGION_NAME: ${{ secrets.AWS_REGION_NAME }}
# API keys for testing
COHERE_API_KEY: ${{ secrets.COHERE_API_KEY }}
@@ -26,25 +23,22 @@ env:
LITELLM_API_URL: ${{ secrets.LITELLM_API_URL }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
AZURE_API_KEY: ${{ secrets.AZURE_API_KEY }}
AZURE_API_URL: ${{ vars.AZURE_API_URL }}
AZURE_API_URL: ${{ secrets.AZURE_API_URL }}
jobs:
model-check:
# See https://runs-on.com/runners/linux/
runs-on: [runs-on,runner=8cpu-linux-x64,"run-id=${{ github.run_id }}-model-check"]
timeout-minutes: 45
runs-on: [runs-on,runner=8cpu-linux-x64,"run-id=${{ github.run_id }}"]
env:
PYTHONPATH: ./backend
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Login to Docker Hub
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # ratchet:docker/login-action@v3
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
@@ -59,9 +53,9 @@ jobs:
run: |
docker pull onyxdotapp/onyx-model-server:latest
docker tag onyxdotapp/onyx-model-server:latest onyxdotapp/onyx-model-server:test
- name: Set up Python
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # ratchet:actions/setup-python@v6
uses: actions/setup-python@v5
with:
python-version: "3.11"
cache: "pip"
@@ -83,7 +77,7 @@ jobs:
REQUIRE_EMAIL_VERIFICATION=false \
DISABLE_TELEMETRY=true \
IMAGE_TAG=test \
docker compose -f docker-compose.model-server-test.yml up -d indexing_model_server
docker compose -f docker-compose.model-server-test.yml -p onyx-stack up -d indexing_model_server
id: start_docker
- name: Wait for service to be ready
@@ -96,15 +90,15 @@ jobs:
while true; do
current_time=$(date +%s)
elapsed_time=$((current_time - start_time))
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. Service did not become ready in 5 minutes."
exit 1
fi
# Use curl with error handling to ignore specific exit code 56
response=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:9000/api/health || echo "curl_error")
if [ "$response" = "200" ]; then
echo "Service is ready!"
break
@@ -113,11 +107,11 @@ jobs:
else
echo "Service not ready yet (HTTP status $response). Retrying in 5 seconds..."
fi
sleep 5
done
echo "Finished waiting for service."
- name: Run Tests
shell: script -q -e -c "bash --noprofile --norc -eo pipefail {0}"
run: |
@@ -128,23 +122,28 @@ jobs:
if: failure() && github.event_name == 'schedule'
env:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }}
REPO: ${{ github.repository }}
RUN_ID: ${{ github.run_id }}
run: |
curl -X POST \
-H 'Content-type: application/json' \
--data "{\"text\":\"Scheduled Model Tests failed! Check the run at: https://github.com/${REPO}/actions/runs/${RUN_ID}\"}" \
--data '{"text":"Scheduled Model Tests failed! Check the run at: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' \
$SLACK_WEBHOOK
- name: Dump all-container logs (optional)
if: always()
run: |
cd deployment/docker_compose
docker compose -f docker-compose.model-server-test.yml logs --no-color > $GITHUB_WORKSPACE/docker-compose.log || true
docker compose -f docker-compose.model-server-test.yml -p onyx-stack logs --no-color > $GITHUB_WORKSPACE/docker-compose.log || true
- name: Upload logs
if: always()
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # ratchet:actions/upload-artifact@v4
uses: actions/upload-artifact@v4
with:
name: docker-all-logs
path: ${{ github.workspace }}/docker-compose.log
- name: Stop Docker containers
if: always()
run: |
cd deployment/docker_compose
docker compose -f docker-compose.model-server-test.yml -p onyx-stack down -v

View File

@@ -1,7 +1,4 @@
name: Python Unit Tests
concurrency:
group: Python-Unit-Tests-${{ github.workflow }}-${{ github.head_ref || github.event.workflow_run.head_branch || github.run_id }}
cancel-in-progress: true
on:
merge_group:
@@ -9,41 +6,37 @@ on:
branches:
- main
- 'release/**'
push:
tags:
- "v*.*.*"
permissions:
contents: read
jobs:
backend-check:
# See https://runs-on.com/runners/linux/
runs-on: [runs-on, runner=2cpu-linux-arm64, "run-id=${{ github.run_id }}-backend-check"]
timeout-minutes: 45
runs-on: [runs-on,runner=8cpu-linux-x64,"run-id=${{ github.run_id }}"]
env:
PYTHONPATH: ./backend
REDIS_CLOUD_PYTEST_PASSWORD: ${{ secrets.REDIS_CLOUD_PYTEST_PASSWORD }}
DISABLE_TELEMETRY: "true"
SF_USERNAME: ${{ secrets.SF_USERNAME }}
SF_PASSWORD: ${{ secrets.SF_PASSWORD }}
SF_SECURITY_TOKEN: ${{ secrets.SF_SECURITY_TOKEN }}
steps:
- uses: runs-on/action@cd2b598b0515d39d78c38a02d529db87d2196d1e # ratchet:runs-on/action@v2
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
uses: actions/checkout@v4
- name: Setup Python and Install Dependencies
uses: ./.github/actions/setup-python-and-install-dependencies
- name: Set up Python
uses: actions/setup-python@v5
with:
requirements: |
python-version: '3.11'
cache: 'pip'
cache-dependency-path: |
backend/requirements/default.txt
backend/requirements/dev.txt
backend/requirements/model_server.txt
backend/requirements/ee.txt
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
pip install --retries 5 --timeout 30 -r backend/requirements/default.txt
pip install --retries 5 --timeout 30 -r backend/requirements/dev.txt
- name: Run Tests
shell: script -q -e -c "bash --noprofile --norc -eo pipefail {0}"

View File

@@ -6,44 +6,18 @@ concurrency:
on:
merge_group:
pull_request: null
push:
branches:
- main
tags:
- "v*.*.*"
permissions:
contents: read
jobs:
quality-checks:
runs-on: ubuntu-latest
timeout-minutes: 45
# See https://runs-on.com/runners/linux/
runs-on: [runs-on,runner=8cpu-linux-x64,"run-id=${{ github.run_id }}"]
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
- uses: actions/checkout@v4
with:
fetch-depth: 0
persist-credentials: false
- uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # ratchet:actions/setup-python@v6
- uses: actions/setup-python@v5
with:
python-version: "3.11"
- name: Setup Terraform
uses: hashicorp/setup-terraform@b9cd54a3c349d3f38e8881555d616ced269862dd # ratchet:hashicorp/setup-terraform@v3
- name: Setup node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # ratchet:actions/setup-node@v6
with: # zizmor: ignore[cache-poisoning]
node-version: 22
cache: "npm"
cache-dependency-path: ./web/package-lock.json
- name: Install node dependencies
working-directory: ./web
run: npm ci
- uses: j178/prek-action@91fd7d7cf70ae1dee9f4f44e7dfa5d1073fe6623 # ratchet:j178/prek-action@v1
- uses: pre-commit/action@v3.0.1
with:
prek-version: '0.2.21'
extra-args: ${{ github.event_name == 'pull_request' && format('--from-ref {0} --to-ref {1}', github.event.pull_request.base.sha, github.event.pull_request.head.sha) || github.event_name == 'merge_group' && format('--from-ref {0} --to-ref {1}', github.event.merge_group.base_sha, github.event.merge_group.head_sha) || github.ref_name == 'main' && '--all-files' || '' }}
- name: Check Actions
uses: giner/check-actions@28d366c7cbbe235f9624a88aa31a628167eee28c # ratchet:giner/check-actions@v1.0.1
with:
check_permissions: false
check_versions: false
extra_args: ${{ github.event_name == 'pull_request' && format('--from-ref {0} --to-ref {1}', github.event.pull_request.base.sha, github.event.pull_request.head.sha) || '' }}

View File

@@ -1,40 +0,0 @@
name: Release Devtools
on:
push:
tags:
- "ods/v*.*.*"
jobs:
pypi:
runs-on: ubuntu-latest
environment:
name: release-devtools
permissions:
id-token: write
timeout-minutes: 10
strategy:
matrix:
os-arch:
- {goos: "linux", goarch: "amd64"}
- {goos: "linux", goarch: "arm64"}
- {goos: "windows", goarch: "amd64"}
- {goos: "windows", goarch: "arm64"}
- {goos: "darwin", goarch: "amd64"}
- {goos: "darwin", goarch: "arm64"}
- {goos: "", goarch: ""}
steps:
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
persist-credentials: false
fetch-depth: 0
- uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # ratchet:astral-sh/setup-uv@v7
with:
enable-cache: false
- run: |
GOOS="${{ matrix.os-arch.goos }}" \
GOARCH="${{ matrix.os-arch.goarch }}" \
uv build --wheel
working-directory: tools/ods
- run: uv publish
working-directory: tools/ods

View File

@@ -1,49 +0,0 @@
name: Sync FOSS Repo
on:
schedule:
# Run daily at 3am PT (11am UTC during PST)
- cron: '0 11 * * *'
workflow_dispatch:
jobs:
sync-foss:
runs-on: ubuntu-latest
timeout-minutes: 45
permissions:
contents: read
steps:
- name: Checkout main Onyx repo
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
with:
fetch-depth: 0
persist-credentials: false
- name: Install git-filter-repo
run: |
sudo apt-get update && sudo apt-get install -y git-filter-repo
- name: Configure SSH for deploy key
env:
FOSS_REPO_DEPLOY_KEY: ${{ secrets.FOSS_REPO_DEPLOY_KEY }}
run: |
mkdir -p ~/.ssh
echo "$FOSS_REPO_DEPLOY_KEY" > ~/.ssh/id_ed25519
chmod 600 ~/.ssh/id_ed25519
ssh-keyscan github.com >> ~/.ssh/known_hosts
- name: Set Git config
run: |
git config --global user.name "onyx-bot"
git config --global user.email "bot@onyx.app"
- name: Build FOSS version
run: bash backend/scripts/make_foss_repo.sh
- name: Push to FOSS repo
env:
FOSS_REPO_URL: git@github.com:onyx-dot-app/onyx-foss.git
run: |
cd /tmp/foss_repo
git remote add public "$FOSS_REPO_URL"
git push --force public main

View File

@@ -3,30 +3,27 @@ name: Nightly Tag Push
on:
schedule:
- cron: "0 10 * * *" # Runs every day at 2 AM PST / 3 AM PDT / 10 AM UTC
workflow_dispatch:
permissions:
contents: write # Allows pushing tags to the repository
jobs:
create-and-push-tag:
runs-on: ubuntu-slim
timeout-minutes: 45
runs-on: [runs-on, runner=2cpu-linux-x64, "run-id=${{ github.run_id }}"]
steps:
# actions using GITHUB_TOKEN cannot trigger another workflow, but we do want this to trigger docker pushes
# see https://github.com/orgs/community/discussions/27028#discussioncomment-3254367 for the workaround we
# implement here which needs an actual user's deploy key
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6
uses: actions/checkout@v4
with:
ssh-key: "${{ secrets.DEPLOY_KEY }}"
persist-credentials: true
ssh-key: "${{ secrets.RKUO_DEPLOY_KEY }}"
- name: Set up Git user
run: |
git config user.name "Onyx Bot [bot]"
git config user.email "onyx-bot[bot]@onyx.app"
git config user.name "Richard Kuo [bot]"
git config user.email "rkuo[bot]@onyx.app"
- name: Check for existing nightly tag
id: check_tag
@@ -54,12 +51,3 @@ jobs:
run: |
TAG_NAME="nightly-latest-$(date +'%Y%m%d')"
git push origin $TAG_NAME
- name: Send Slack notification
if: failure()
uses: ./.github/actions/slack-notify
with:
webhook-url: ${{ secrets.MONITOR_DEPLOYMENTS_WEBHOOK }}
title: "🚨 Nightly Tag Push Failed"
ref-name: ${{ github.ref_name }}
failed-jobs: "create-and-push-tag"

View File

@@ -1,38 +0,0 @@
name: Run Zizmor
on:
push:
branches: ["main"]
pull_request:
branches: ["**"]
permissions: {}
jobs:
zizmor:
name: zizmor
runs-on: ubuntu-slim
timeout-minutes: 45
permissions:
security-events: write # needed for SARIF uploads
steps:
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # ratchet:actions/checkout@v6.0.1
with:
persist-credentials: false
- name: Install the latest version of uv
uses: astral-sh/setup-uv@1e862dfacbd1d6d858c55d9b792c756523627244 # ratchet:astral-sh/setup-uv@v7.1.4
with:
enable-cache: false
- name: Run zizmor
run: uv run --no-sync --with zizmor zizmor --format=sarif . > results.sarif
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Upload SARIF file
uses: github/codeql-action/upload-sarif@ba454b8ab46733eb6145342877cd148270bb77ab # ratchet:github/codeql-action/upload-sarif@codeql-bundle-v2.23.5
with:
sarif_file: results.sarif
category: zizmor

30
.gitignore vendored
View File

@@ -1,7 +1,6 @@
# editors
.vscode
.zed
.cursor
# macos
.DS_store
@@ -18,41 +17,12 @@ backend/tests/regression/answer_quality/test_data.json
backend/tests/regression/search_quality/eval-*
backend/tests/regression/search_quality/search_eval_config.yaml
backend/tests/regression/search_quality/*.json
backend/onyx/evals/data/
backend/onyx/evals/one_off/*.json
*.log
# secret files
.env
jira_test_env
settings.json
# others
/deployment/data/nginx/app.conf
/deployment/data/nginx/mcp.conf.inc
/deployment/data/nginx/mcp_upstream.conf.inc
*.sw?
/backend/tests/regression/answer_quality/search_test_config.yaml
*.egg-info
# Claude
AGENTS.md
CLAUDE.md
# Local .terraform directories
**/.terraform/*
# Local .tfstate files
*.tfstate
*.tfstate.*
# Local .terraform.lock.hcl file
.terraform.lock.hcl
node_modules
# MCP configs
.playwright-mcp
# plans
plans/

View File

@@ -1,64 +1,13 @@
default_install_hook_types:
- pre-commit
- post-checkout
- post-merge
- post-rewrite
repos:
- repo: https://github.com/astral-sh/uv-pre-commit
# From: https://github.com/astral-sh/uv-pre-commit/pull/53/commits/d30b4298e4fb63ce8609e29acdbcf4c9018a483c
rev: d30b4298e4fb63ce8609e29acdbcf4c9018a483c
hooks:
- id: uv-run
name: Check lazy imports
args: ["--with=onyx-devtools", "ods", "check-lazy-imports"]
files: ^backend/(?!\.venv/).*\.py$
- id: uv-sync
args: ["--locked", "--all-extras"]
- id: uv-lock
files: ^pyproject\.toml$
- id: uv-export
name: uv-export default.txt
args: ["--no-emit-project", "--no-default-groups", "--no-hashes", "--extra", "backend", "-o", "backend/requirements/default.txt"]
files: ^(pyproject\.toml|uv\.lock|backend/requirements/.*\.txt)$
- id: uv-export
name: uv-export dev.txt
args: ["--no-emit-project", "--no-default-groups", "--no-hashes", "--extra", "dev", "-o", "backend/requirements/dev.txt"]
files: ^(pyproject\.toml|uv\.lock|backend/requirements/.*\.txt)$
- id: uv-export
name: uv-export ee.txt
args: ["--no-emit-project", "--no-default-groups", "--no-hashes", "--extra", "ee", "-o", "backend/requirements/ee.txt"]
files: ^(pyproject\.toml|uv\.lock|backend/requirements/.*\.txt)$
- id: uv-export
name: uv-export model_server.txt
args: ["--no-emit-project", "--no-default-groups", "--no-hashes", "--extra", "model_server", "-o", "backend/requirements/model_server.txt"]
files: ^(pyproject\.toml|uv\.lock|backend/requirements/.*\.txt)$
# NOTE: This takes ~6s on a single, large module which is prohibitively slow.
# - id: uv-run
# name: mypy
# args: ["--all-extras", "mypy"]
# pass_filenames: true
# files: ^backend/.*\.py$
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: 3e8a8703264a2f4a69428a0aa4dcb512790b2c8c # frozen: v6.0.0
hooks:
- id: check-yaml
files: ^.github/
- repo: https://github.com/rhysd/actionlint
rev: a443f344ff32813837fa49f7aa6cbc478d770e62 # frozen: v1.7.9
hooks:
- id: actionlint
- repo: https://github.com/psf/black
rev: 8a737e727ac5ab2f1d4cf5876720ed276dc8dc4b # frozen: 25.1.0
rev: 25.1.0
hooks:
- id: black
language_version: python3.11
# this is a fork which keeps compatibility with black
- repo: https://github.com/wimglenn/reorder-python-imports-black
rev: f55cd27f90f0cf0ee775002c2383ce1c7820013d # frozen: v3.14.0
rev: v3.14.0
hooks:
- id: reorder-python-imports
args: ['--py311-plus', '--application-directories=backend/']
@@ -70,51 +19,48 @@ repos:
# These settings will remove unused imports with side effects
# Note: The repo currently does not and should not have imports with side effects
- repo: https://github.com/PyCQA/autoflake
rev: 0544741e2b4a22b472d9d93e37d4ea9153820bb1 # frozen: v2.3.1
rev: v2.3.1
hooks:
- id: autoflake
args: [ '--remove-all-unused-imports', '--remove-unused-variables', '--in-place' , '--recursive']
- repo: https://github.com/golangci/golangci-lint
rev: 9f61b0f53f80672872fced07b6874397c3ed197b # frozen: v2.7.2
hooks:
- id: golangci-lint
entry: bash -c "find tools/ -name go.mod -print0 | xargs -0 -I{} bash -c 'cd \"$(dirname {})\" && golangci-lint run ./...'"
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: 971923581912ef60a6b70dbf0c3e9a39563c9d47 # frozen: v0.11.4
rev: v0.11.4
hooks:
- id: ruff
- repo: https://github.com/pre-commit/mirrors-prettier
rev: ffb6a759a979008c0e6dff86e39f4745a2d9eac4 # frozen: v3.1.0
rev: v3.1.0
hooks:
- id: prettier
types_or: [html, css, javascript, ts, tsx]
language_version: system
additional_dependencies:
- prettier
- repo: https://github.com/sirwart/ripsecrets
rev: 7d94620933e79b8acaa0cd9e60e9864b07673d86 # frozen: v0.1.11
hooks:
- id: ripsecrets
args:
- --additional-pattern
- ^sk-[A-Za-z0-9_\-]{20,}$
- repo: local
hooks:
- id: terraform-fmt
name: terraform fmt
entry: terraform fmt -recursive
language: system
pass_filenames: false
files: \.tf$
- id: typescript-check
name: TypeScript type check
entry: bash -c 'cd web && npm run types:check'
language: system
pass_filenames: false
files: ^web/.*\.(ts|tsx)$
# We would like to have a mypy pre-commit hook, but due to the fact that
# pre-commit runs in it's own isolated environment, we would need to install
# and keep in sync all dependencies so mypy has access to the appropriate type
# stubs. This does not seem worth it at the moment, so for now we will stick to
# having mypy run via Github Actions / manually by contributors
# - repo: https://github.com/pre-commit/mirrors-mypy
# rev: v1.1.1
# hooks:
# - id: mypy
# exclude: ^tests/
# # below are needed for type stubs since pre-commit runs in it's own
# # isolated environment. Unfortunately, this needs to be kept in sync
# # with requirements/dev.txt + requirements/default.txt
# additional_dependencies: [
# alembic==1.10.4,
# types-beautifulsoup4==4.12.0.3,
# types-html5lib==1.1.11.13,
# types-oauthlib==3.2.0.9,
# types-psycopg2==2.9.21.10,
# types-python-dateutil==2.8.19.13,
# types-regex==2023.3.23.1,
# types-requests==2.28.11.17,
# types-retry==0.9.9.3,
# types-urllib3==1.26.25.11
# ]
# # TODO: add back once errors are addressed
# # args: [--strict]

View File

@@ -1,13 +1,16 @@
# Copy this file to .env in the .vscode folder
# Fill in the <REPLACE THIS> values as needed, it is recommended to set the GEN_AI_API_KEY value to avoid having to set up an LLM in the UI
# Also check out onyx/backend/scripts/restart_containers.sh for a script to restart the containers which Onyx relies on outside of VSCode/Cursor processes
# Also check out danswer/backend/scripts/restart_containers.sh for a script to restart the containers which Danswer relies on outside of VSCode/Cursor processes
# For local dev, often user Authentication is not needed
AUTH_TYPE=disabled
# Skip warm up for dev
SKIP_WARM_UP=True
# Always keep these on for Dev
# Logs model prompts, reasoning, and answer to stdout
LOG_ONYX_MODEL_INTERACTIONS=True
# Logs all model prompts to stdout
LOG_DANSWER_MODEL_INTERACTIONS=True
# More verbose logging
LOG_LEVEL=debug
@@ -20,9 +23,6 @@ DISABLE_LLM_DOC_RELEVANCE=False
# Useful if you want to toggle auth on/off (google_oauth/OIDC specifically)
OAUTH_CLIENT_ID=<REPLACE THIS>
OAUTH_CLIENT_SECRET=<REPLACE THIS>
OPENID_CONFIG_URL=<REPLACE THIS>
SAML_CONF_DIR=/<ABSOLUTE PATH TO ONYX>/onyx/backend/ee/onyx/configs/saml_config
# Generally not useful for dev, we don't generally want to set up an SMTP server for dev
REQUIRE_EMAIL_VERIFICATION=False
@@ -34,32 +34,34 @@ OPENAI_API_KEY=<REPLACE THIS>
GEN_AI_MODEL_VERSION=gpt-4o
FAST_GEN_AI_MODEL_VERSION=gpt-4o
# For Danswer Slack Bot, overrides the UI values so no need to set this up via UI every time
# Only needed if using DanswerBot
#DANSWER_BOT_SLACK_APP_TOKEN=<REPLACE THIS>
#DANSWER_BOT_SLACK_BOT_TOKEN=<REPLACE THIS>
# Python stuff
PYTHONPATH=../backend
PYTHONUNBUFFERED=1
# Internet Search
BING_API_KEY=<REPLACE THIS>
EXA_API_KEY=<REPLACE THIS>
# Enable the full set of Danswer Enterprise Edition features
# NOTE: DO NOT ENABLE THIS UNLESS YOU HAVE A PAID ENTERPRISE LICENSE (or if you are using this for local testing/development)
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=False
# Agent Search configs # TODO: Remove give proper namings
AGENT_RETRIEVAL_STATS=False # Note: This setting will incur substantial re-ranking effort
AGENT_RERANKING_STATS=True
AGENT_MAX_QUERY_RETRIEVAL_RESULTS=20
AGENT_RERANKING_MAX_QUERY_RETRIEVAL_RESULTS=20
# S3 File Store Configuration (MinIO for local development)
S3_ENDPOINT_URL=http://localhost:9004
S3_FILE_STORE_BUCKET_NAME=onyx-file-store-bucket
S3_AWS_ACCESS_KEY_ID=minioadmin
S3_AWS_SECRET_ACCESS_KEY=minioadmin
# Show extra/uncommon connectors
SHOW_EXTRA_CONNECTORS=True
# Local langsmith tracing
LANGSMITH_TRACING="true"
LANGSMITH_ENDPOINT="https://api.smith.langchain.com"
LANGSMITH_API_KEY=<REPLACE_THIS>
LANGSMITH_PROJECT=<REPLACE_THIS>
# Local Confluence OAuth testing
# OAUTH_CONFLUENCE_CLOUD_CLIENT_ID=<REPLACE_THIS>
# OAUTH_CONFLUENCE_CLOUD_CLIENT_SECRET=<REPLACE_THIS>
# NEXT_PUBLIC_TEST_ENV=True

File diff suppressed because it is too large Load Diff

View File

@@ -1,327 +0,0 @@
# AGENTS.md
This file provides guidance to Codex when working with code in this repository.
## KEY NOTES
- If you run into any missing python dependency errors, try running your command with `source backend/.venv/bin/activate` \
to assume the python venv.
- To make tests work, check the `.env` file at the root of the project to find an OpenAI key.
- If using `playwright` to explore the frontend, you can usually log in with username `a@test.com` and password
`a`. The app can be accessed at `http://localhost:3000`.
- You should assume that all Onyx services are running. To verify, you can check the `backend/log` directory to
make sure we see logs coming out from the relevant service.
- To connect to the Postgres database, use: `docker exec -it onyx-relational_db-1 psql -U postgres -c "<SQL>"`
- When making calls to the backend, always go through the frontend. E.g. make a call to `http://localhost:3000/api/persona` not `http://localhost:8080/api/persona`
- Put ALL db operations under the `backend/onyx/db` / `backend/ee/onyx/db` directories. Don't run queries
outside of those directories.
## Project Overview
**Onyx** (formerly Danswer) is an open-source Gen-AI and Enterprise Search platform that connects to company documents, apps, and people. It features a modular architecture with both Community Edition (MIT licensed) and Enterprise Edition offerings.
### Background Workers (Celery)
Onyx uses Celery for asynchronous task processing with multiple specialized workers:
#### Worker Types
1. **Primary Worker** (`celery_app.py`)
- Coordinates core background tasks and system-wide operations
- Handles connector management, document sync, pruning, and periodic checks
- Runs with 4 threads concurrency
- Tasks: connector deletion, vespa sync, pruning, LLM model updates, user file sync
2. **Docfetching Worker** (`docfetching`)
- Fetches documents from external data sources (connectors)
- Spawns docprocessing tasks for each document batch
- Implements watchdog monitoring for stuck connectors
- Configurable concurrency (default from env)
3. **Docprocessing Worker** (`docprocessing`)
- Processes fetched documents through the indexing pipeline:
- Upserts documents to PostgreSQL
- Chunks documents and adds contextual information
- Embeds chunks via model server
- Writes chunks to Vespa vector database
- Updates document metadata
- Configurable concurrency (default from env)
4. **Light Worker** (`light`)
- Handles lightweight, fast operations
- Tasks: vespa operations, document permissions sync, external group sync
- Higher concurrency for quick tasks
5. **Heavy Worker** (`heavy`)
- Handles resource-intensive operations
- Primary task: document pruning operations
- Runs with 4 threads concurrency
6. **KG Processing Worker** (`kg_processing`)
- Handles Knowledge Graph processing and clustering
- Builds relationships between documents
- Runs clustering algorithms
- Configurable concurrency
7. **Monitoring Worker** (`monitoring`)
- System health monitoring and metrics collection
- Monitors Celery queues, process memory, and system status
- Single thread (monitoring doesn't need parallelism)
- Cloud-specific monitoring tasks
8. **User File Processing Worker** (`user_file_processing`)
- Processes user-uploaded files
- Handles user file indexing and project synchronization
- Configurable concurrency
9. **Beat Worker** (`beat`)
- Celery's scheduler for periodic tasks
- Uses DynamicTenantScheduler for multi-tenant support
- Schedules tasks like:
- Indexing checks (every 15 seconds)
- Connector deletion checks (every 20 seconds)
- Vespa sync checks (every 20 seconds)
- Pruning checks (every 20 seconds)
- KG processing (every 60 seconds)
- Monitoring tasks (every 5 minutes)
- Cleanup tasks (hourly)
#### Worker Deployment Modes
Onyx supports two deployment modes for background workers, controlled by the `USE_LIGHTWEIGHT_BACKGROUND_WORKER` environment variable:
**Lightweight Mode** (default, `USE_LIGHTWEIGHT_BACKGROUND_WORKER=true`):
- Runs a single consolidated `background` worker that handles all background tasks:
- Pruning operations (from `heavy` worker)
- Knowledge graph processing (from `kg_processing` worker)
- Monitoring tasks (from `monitoring` worker)
- User file processing (from `user_file_processing` worker)
- Lower resource footprint (single worker process)
- Suitable for smaller deployments or development environments
- Default concurrency: 6 threads
**Standard Mode** (`USE_LIGHTWEIGHT_BACKGROUND_WORKER=false`):
- Runs separate specialized workers as documented above (heavy, kg_processing, monitoring, user_file_processing)
- Better isolation and scalability
- Can scale individual workers independently based on workload
- Suitable for production deployments with higher load
The deployment mode affects:
- **Backend**: Worker processes spawned by supervisord or dev scripts
- **Helm**: Which Kubernetes deployments are created
- **Dev Environment**: Which workers `dev_run_background_jobs.py` spawns
#### Key Features
- **Thread-based Workers**: All workers use thread pools (not processes) for stability
- **Tenant Awareness**: Multi-tenant support with per-tenant task isolation. There is a
middleware layer that automatically finds the appropriate tenant ID when sending tasks
via Celery Beat.
- **Task Prioritization**: High, Medium, Low priority queues
- **Monitoring**: Built-in heartbeat and liveness checking
- **Failure Handling**: Automatic retry and failure recovery mechanisms
- **Redis Coordination**: Inter-process communication via Redis
- **PostgreSQL State**: Task state and metadata stored in PostgreSQL
#### Important Notes
**Defining Tasks**:
- Always use `@shared_task` rather than `@celery_app`
- Put tasks under `background/celery/tasks/` or `ee/background/celery/tasks`
**Defining APIs**:
When creating new FastAPI APIs, do NOT use the `response_model` field. Instead, just type the
function.
**Testing Updates**:
If you make any updates to a celery worker and you want to test these changes, you will need
to ask me to restart the celery worker. There is no auto-restart on code-change mechanism.
### Code Quality
```bash
# Install and run pre-commit hooks
pre-commit install
pre-commit run --all-files
```
NOTE: Always make sure everything is strictly typed (both in Python and Typescript).
## Architecture Overview
### Technology Stack
- **Backend**: Python 3.11, FastAPI, SQLAlchemy, Alembic, Celery
- **Frontend**: Next.js 15+, React 18, TypeScript, Tailwind CSS
- **Database**: PostgreSQL with Redis caching
- **Search**: Vespa vector database
- **Auth**: OAuth2, SAML, multi-provider support
- **AI/ML**: LangChain, LiteLLM, multiple embedding models
### Directory Structure
```
backend/
├── onyx/
│ ├── auth/ # Authentication & authorization
│ ├── chat/ # Chat functionality & LLM interactions
│ ├── connectors/ # Data source connectors
│ ├── db/ # Database models & operations
│ ├── document_index/ # Vespa integration
│ ├── federated_connectors/ # External search connectors
│ ├── llm/ # LLM provider integrations
│ └── server/ # API endpoints & routers
├── ee/ # Enterprise Edition features
├── alembic/ # Database migrations
└── tests/ # Test suites
web/
├── src/app/ # Next.js app router pages
├── src/components/ # Reusable React components
└── src/lib/ # Utilities & business logic
```
## Database & Migrations
### Running Migrations
```bash
# Standard migrations
alembic upgrade head
# Multi-tenant (Enterprise)
alembic -n schema_private upgrade head
```
### Creating Migrations
```bash
# Create migration
alembic revision -m "description"
# Multi-tenant migration
alembic -n schema_private revision -m "description"
```
Write the migration manually and place it in the file that alembic creates when running the above command.
## Testing Strategy
There are 4 main types of tests within Onyx:
### Unit Tests
These should not assume any Onyx/external services are available to be called.
Interactions with the outside world should be mocked using `unittest.mock`. Generally, only
write these for complex, isolated modules e.g. `citation_processing.py`.
To run them:
```bash
python -m dotenv -f .vscode/.env run -- pytest -xv backend/tests/unit
```
### External Dependency Unit Tests
These tests assume that all external dependencies of Onyx are available and callable (e.g. Postgres, Redis,
MinIO/S3, Vespa are running + OpenAI can be called + any request to the internet is fine + etc.).
However, the actual Onyx containers are not running and with these tests we call the function to test directly.
We can also mock components/calls at will.
The goal with these tests are to minimize mocking while giving some flexibility to mock things that are flakey,
need strictly controlled behavior, or need to have their internal behavior validated (e.g. verify a function is called
with certain args, something that would be impossible with proper integration tests).
A great example of this type of test is `backend/tests/external_dependency_unit/connectors/confluence/test_confluence_group_sync.py`.
To run them:
```bash
python -m dotenv -f .vscode/.env run -- pytest backend/tests/external_dependency_unit
```
### Integration Tests
Standard integration tests. Every test in `backend/tests/integration` runs against a real Onyx deployment. We cannot
mock anything in these tests. Prefer writing integration tests (or External Dependency Unit Tests if mocking/internal
verification is necessary) over any other type of test.
Tests are parallelized at a directory level.
When writing integration tests, make sure to check the root `conftest.py` for useful fixtures + the `backend/tests/integration/common_utils` directory for utilities. Prefer (if one exists), calling the appropriate Manager
class in the utils over directly calling the APIs with a library like `requests`. Prefer using fixtures rather than
calling the utilities directly (e.g. do NOT create admin users with
`admin_user = UserManager.create(name="admin_user")`, instead use the `admin_user` fixture).
A great example of this type of test is `backend/tests/integration/dev_apis/test_simple_chat_api.py`.
To run them:
```bash
python -m dotenv -f .vscode/.env run -- pytest backend/tests/integration
```
### Playwright (E2E) Tests
These tests are an even more complete version of the Integration Tests mentioned above. Has all services of Onyx
running, *including* the Web Server.
Use these tests for anything that requires significant frontend <-> backend coordination.
Tests are located at `web/tests/e2e`. Tests are written in TypeScript.
To run them:
```bash
npx playwright test <TEST_NAME>
```
## Logs
When (1) writing integration tests or (2) doing live tests (e.g. curl / playwright) you can get access
to logs via the `backend/log/<service_name>_debug.log` file. All Onyx services (api_server, web_server, celery_X)
will be tailing their logs to this file.
## Security Considerations
- Never commit API keys or secrets to repository
- Use encrypted credential storage for connector credentials
- Follow RBAC patterns for new features
- Implement proper input validation with Pydantic models
- Use parameterized queries to prevent SQL injection
## AI/LLM Integration
- Multiple LLM providers supported via LiteLLM
- Configurable models per feature (chat, search, embeddings)
- Streaming support for real-time responses
- Token management and rate limiting
- Custom prompts and agent actions
## UI/UX Patterns
- Tailwind CSS with design system in `web/src/components/ui/`
- Radix UI and Headless UI for accessible components
- SWR for data fetching and caching
- Form validation with react-hook-form
- Error handling with popup notifications
## Creating a Plan
When creating a plan in the `plans` directory, make sure to include at least these elements:
**Issues to Address**
What the change is meant to do.
**Important Notes**
Things you come across in your research that are important to the implementation.
**Implementation strategy**
How you are going to make the changes happen. High level approach.
**Tests**
What unit (use rarely), external dependency unit, integration, and playwright tests you plan to write to
verify the correct behavior. Don't overtest. Usually, a given change only needs one type of test.
Do NOT include these: *Timeline*, *Rollback plan*
This is a minimal list - feel free to include more. Do NOT write code as part of your plan.
Keep it high level. You can reference certain files or functions though.
Before writing your plan, make sure to do research. Explore the relevant sections in the codebase.

View File

@@ -1,332 +0,0 @@
# CLAUDE.md
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
## KEY NOTES
- If you run into any missing python dependency errors, try running your command with `source .venv/bin/activate` \
to assume the python venv.
- To make tests work, check the `.env` file at the root of the project to find an OpenAI key.
- If using `playwright` to explore the frontend, you can usually log in with username `a@test.com` and password
`a`. The app can be accessed at `http://localhost:3000`.
- You should assume that all Onyx services are running. To verify, you can check the `backend/log` directory to
make sure we see logs coming out from the relevant service.
- To connect to the Postgres database, use: `docker exec -it onyx-relational_db-1 psql -U postgres -c "<SQL>"`
- When making calls to the backend, always go through the frontend. E.g. make a call to `http://localhost:3000/api/persona` not `http://localhost:8080/api/persona`
- Put ALL db operations under the `backend/onyx/db` / `backend/ee/onyx/db` directories. Don't run queries
outside of those directories.
## Project Overview
**Onyx** (formerly Danswer) is an open-source Gen-AI and Enterprise Search platform that connects to company documents, apps, and people. It features a modular architecture with both Community Edition (MIT licensed) and Enterprise Edition offerings.
### Background Workers (Celery)
Onyx uses Celery for asynchronous task processing with multiple specialized workers:
#### Worker Types
1. **Primary Worker** (`celery_app.py`)
- Coordinates core background tasks and system-wide operations
- Handles connector management, document sync, pruning, and periodic checks
- Runs with 4 threads concurrency
- Tasks: connector deletion, vespa sync, pruning, LLM model updates, user file sync
2. **Docfetching Worker** (`docfetching`)
- Fetches documents from external data sources (connectors)
- Spawns docprocessing tasks for each document batch
- Implements watchdog monitoring for stuck connectors
- Configurable concurrency (default from env)
3. **Docprocessing Worker** (`docprocessing`)
- Processes fetched documents through the indexing pipeline:
- Upserts documents to PostgreSQL
- Chunks documents and adds contextual information
- Embeds chunks via model server
- Writes chunks to Vespa vector database
- Updates document metadata
- Configurable concurrency (default from env)
4. **Light Worker** (`light`)
- Handles lightweight, fast operations
- Tasks: vespa operations, document permissions sync, external group sync
- Higher concurrency for quick tasks
5. **Heavy Worker** (`heavy`)
- Handles resource-intensive operations
- Primary task: document pruning operations
- Runs with 4 threads concurrency
6. **KG Processing Worker** (`kg_processing`)
- Handles Knowledge Graph processing and clustering
- Builds relationships between documents
- Runs clustering algorithms
- Configurable concurrency
7. **Monitoring Worker** (`monitoring`)
- System health monitoring and metrics collection
- Monitors Celery queues, process memory, and system status
- Single thread (monitoring doesn't need parallelism)
- Cloud-specific monitoring tasks
8. **User File Processing Worker** (`user_file_processing`)
- Processes user-uploaded files
- Handles user file indexing and project synchronization
- Configurable concurrency
9. **Beat Worker** (`beat`)
- Celery's scheduler for periodic tasks
- Uses DynamicTenantScheduler for multi-tenant support
- Schedules tasks like:
- Indexing checks (every 15 seconds)
- Connector deletion checks (every 20 seconds)
- Vespa sync checks (every 20 seconds)
- Pruning checks (every 20 seconds)
- KG processing (every 60 seconds)
- Monitoring tasks (every 5 minutes)
- Cleanup tasks (hourly)
#### Worker Deployment Modes
Onyx supports two deployment modes for background workers, controlled by the `USE_LIGHTWEIGHT_BACKGROUND_WORKER` environment variable:
**Lightweight Mode** (default, `USE_LIGHTWEIGHT_BACKGROUND_WORKER=true`):
- Runs a single consolidated `background` worker that handles all background tasks:
- Light worker tasks (Vespa operations, permissions sync, deletion)
- Document processing (indexing pipeline)
- Document fetching (connector data retrieval)
- Pruning operations (from `heavy` worker)
- Knowledge graph processing (from `kg_processing` worker)
- Monitoring tasks (from `monitoring` worker)
- User file processing (from `user_file_processing` worker)
- Lower resource footprint (fewer worker processes)
- Suitable for smaller deployments or development environments
- Default concurrency: 20 threads (increased to handle combined workload)
**Standard Mode** (`USE_LIGHTWEIGHT_BACKGROUND_WORKER=false`):
- Runs separate specialized workers as documented above (light, docprocessing, docfetching, heavy, kg_processing, monitoring, user_file_processing)
- Better isolation and scalability
- Can scale individual workers independently based on workload
- Suitable for production deployments with higher load
The deployment mode affects:
- **Backend**: Worker processes spawned by supervisord or dev scripts
- **Helm**: Which Kubernetes deployments are created
- **Dev Environment**: Which workers `dev_run_background_jobs.py` spawns
#### Key Features
- **Thread-based Workers**: All workers use thread pools (not processes) for stability
- **Tenant Awareness**: Multi-tenant support with per-tenant task isolation. There is a
middleware layer that automatically finds the appropriate tenant ID when sending tasks
via Celery Beat.
- **Task Prioritization**: High, Medium, Low priority queues
- **Monitoring**: Built-in heartbeat and liveness checking
- **Failure Handling**: Automatic retry and failure recovery mechanisms
- **Redis Coordination**: Inter-process communication via Redis
- **PostgreSQL State**: Task state and metadata stored in PostgreSQL
#### Important Notes
**Defining Tasks**:
- Always use `@shared_task` rather than `@celery_app`
- Put tasks under `background/celery/tasks/` or `ee/background/celery/tasks`
**Defining APIs**:
When creating new FastAPI APIs, do NOT use the `response_model` field. Instead, just type the
function.
**Testing Updates**:
If you make any updates to a celery worker and you want to test these changes, you will need
to ask me to restart the celery worker. There is no auto-restart on code-change mechanism.
### Code Quality
```bash
# Install and run pre-commit hooks
pre-commit install
pre-commit run --all-files
```
NOTE: Always make sure everything is strictly typed (both in Python and Typescript).
## Architecture Overview
### Technology Stack
- **Backend**: Python 3.11, FastAPI, SQLAlchemy, Alembic, Celery
- **Frontend**: Next.js 15+, React 18, TypeScript, Tailwind CSS
- **Database**: PostgreSQL with Redis caching
- **Search**: Vespa vector database
- **Auth**: OAuth2, SAML, multi-provider support
- **AI/ML**: LangChain, LiteLLM, multiple embedding models
### Directory Structure
```
backend/
├── onyx/
│ ├── auth/ # Authentication & authorization
│ ├── chat/ # Chat functionality & LLM interactions
│ ├── connectors/ # Data source connectors
│ ├── db/ # Database models & operations
│ ├── document_index/ # Vespa integration
│ ├── federated_connectors/ # External search connectors
│ ├── llm/ # LLM provider integrations
│ └── server/ # API endpoints & routers
├── ee/ # Enterprise Edition features
├── alembic/ # Database migrations
└── tests/ # Test suites
web/
├── src/app/ # Next.js app router pages
├── src/components/ # Reusable React components
└── src/lib/ # Utilities & business logic
```
## Database & Migrations
### Running Migrations
```bash
# Standard migrations
alembic upgrade head
# Multi-tenant (Enterprise)
alembic -n schema_private upgrade head
```
### Creating Migrations
```bash
# Create migration
alembic revision -m "description"
# Multi-tenant migration
alembic -n schema_private revision -m "description"
```
Write the migration manually and place it in the file that alembic creates when running the above command.
## Testing Strategy
First, you must activate the virtual environment with `source .venv/bin/activate`.
There are 4 main types of tests within Onyx:
### Unit Tests
These should not assume any Onyx/external services are available to be called.
Interactions with the outside world should be mocked using `unittest.mock`. Generally, only
write these for complex, isolated modules e.g. `citation_processing.py`.
To run them:
```bash
pytest -xv backend/tests/unit
```
### External Dependency Unit Tests
These tests assume that all external dependencies of Onyx are available and callable (e.g. Postgres, Redis,
MinIO/S3, Vespa are running + OpenAI can be called + any request to the internet is fine + etc.).
However, the actual Onyx containers are not running and with these tests we call the function to test directly.
We can also mock components/calls at will.
The goal with these tests are to minimize mocking while giving some flexibility to mock things that are flakey,
need strictly controlled behavior, or need to have their internal behavior validated (e.g. verify a function is called
with certain args, something that would be impossible with proper integration tests).
A great example of this type of test is `backend/tests/external_dependency_unit/connectors/confluence/test_confluence_group_sync.py`.
To run them:
```bash
python -m dotenv -f .vscode/.env run -- pytest backend/tests/external_dependency_unit
```
### Integration Tests
Standard integration tests. Every test in `backend/tests/integration` runs against a real Onyx deployment. We cannot
mock anything in these tests. Prefer writing integration tests (or External Dependency Unit Tests if mocking/internal
verification is necessary) over any other type of test.
Tests are parallelized at a directory level.
When writing integration tests, make sure to check the root `conftest.py` for useful fixtures + the `backend/tests/integration/common_utils` directory for utilities. Prefer (if one exists), calling the appropriate Manager
class in the utils over directly calling the APIs with a library like `requests`. Prefer using fixtures rather than
calling the utilities directly (e.g. do NOT create admin users with
`admin_user = UserManager.create(name="admin_user")`, instead use the `admin_user` fixture).
A great example of this type of test is `backend/tests/integration/dev_apis/test_simple_chat_api.py`.
To run them:
```bash
python -m dotenv -f .vscode/.env run -- pytest backend/tests/integration
```
### Playwright (E2E) Tests
These tests are an even more complete version of the Integration Tests mentioned above. Has all services of Onyx
running, *including* the Web Server.
Use these tests for anything that requires significant frontend <-> backend coordination.
Tests are located at `web/tests/e2e`. Tests are written in TypeScript.
To run them:
```bash
npx playwright test <TEST_NAME>
```
## Logs
When (1) writing integration tests or (2) doing live tests (e.g. curl / playwright) you can get access
to logs via the `backend/log/<service_name>_debug.log` file. All Onyx services (api_server, web_server, celery_X)
will be tailing their logs to this file.
## Security Considerations
- Never commit API keys or secrets to repository
- Use encrypted credential storage for connector credentials
- Follow RBAC patterns for new features
- Implement proper input validation with Pydantic models
- Use parameterized queries to prevent SQL injection
## AI/LLM Integration
- Multiple LLM providers supported via LiteLLM
- Configurable models per feature (chat, search, embeddings)
- Streaming support for real-time responses
- Token management and rate limiting
- Custom prompts and agent actions
## UI/UX Patterns
- Tailwind CSS with design system in `web/src/components/ui/`
- Radix UI and Headless UI for accessible components
- SWR for data fetching and caching
- Form validation with react-hook-form
- Error handling with popup notifications
## Creating a Plan
When creating a plan in the `plans` directory, make sure to include at least these elements:
**Issues to Address**
What the change is meant to do.
**Important Notes**
Things you come across in your research that are important to the implementation.
**Implementation strategy**
How you are going to make the changes happen. High level approach.
**Tests**
What unit (use rarely), external dependency unit, integration, and playwright tests you plan to write to
verify the correct behavior. Don't overtest. Usually, a given change only needs one type of test.
Do NOT include these: *Timeline*, *Rollback plan*
This is a minimal list - feel free to include more. Do NOT write code as part of your plan.
Keep it high level. You can reference certain files or functions though.
Before writing your plan, make sure to do research. Explore the relevant sections in the codebase.

View File

@@ -13,7 +13,8 @@ As an open source project in a rapidly changing space, we welcome all contributi
The [GitHub Issues](https://github.com/onyx-dot-app/onyx/issues) page is a great place to start for contribution ideas.
To ensure that your contribution is aligned with the project's direction, please reach out to any maintainer on the Onyx team
via [Discord](https://discord.gg/4NA5SbzrWb) or [email](mailto:hello@onyx.app).
via [Slack](https://join.slack.com/t/onyx-dot-app/shared_invite/zt-34lu4m7xg-TsKGO6h8PDvR5W27zTdyhA) /
[Discord](https://discord.gg/TDJ59cGV2X) or [email](mailto:founders@onyx.app).
Issues that have been explicitly approved by the maintainers (aligned with the direction of the project)
will be marked with the `approved by maintainers` label.
@@ -27,7 +28,8 @@ Your input is vital to making sure that Onyx moves in the right direction.
Before starting on implementation, please raise a GitHub issue.
Also, always feel free to message the founders (Chris Weaver / Yuhong Sun) on
[Discord](https://discord.gg/4NA5SbzrWb) directly about anything at all.
[Slack](https://join.slack.com/t/onyx-dot-app/shared_invite/zt-34lu4m7xg-TsKGO6h8PDvR5W27zTdyhA) /
[Discord](https://discord.gg/TDJ59cGV2X) directly about anything at all.
### Contributing Code
@@ -44,7 +46,9 @@ Our goal is to make contributing as easy as possible. If you run into any issues
That way we can help future contributors and users can avoid the same issue.
We also have support channels and generally interesting discussions on our
[Discord](https://discord.gg/4NA5SbzrWb).
[Slack](https://join.slack.com/t/onyx-dot-app/shared_invite/zt-2twesxdr6-5iQitKZQpgq~hYIZ~dv3KA)
and
[Discord](https://discord.gg/TDJ59cGV2X).
We would love to see you there!
@@ -71,15 +75,19 @@ If using a higher version, sometimes some libraries will not be available (i.e.
#### Backend: Python requirements
Currently, we use [uv](https://docs.astral.sh/uv/) and recommend creating a [virtual environment](https://docs.astral.sh/uv/pip/environments/#using-a-virtual-environment).
Currently, we use pip and recommend creating a virtual environment.
For convenience here's a command for it:
```bash
uv venv .venv --python 3.11
python -m venv .venv
source .venv/bin/activate
```
> **Note:**
> This virtual environment MUST NOT be set up WITHIN the onyx directory if you plan on using mypy within certain IDEs.
> For simplicity, we recommend setting up the virtual environment outside of the onyx directory.
_For Windows, activate the virtual environment using Command Prompt:_
```bash
@@ -95,26 +103,26 @@ If using PowerShell, the command slightly differs:
Install the required python dependencies:
```bash
uv sync --all-extras
pip install -r onyx/backend/requirements/default.txt
pip install -r onyx/backend/requirements/dev.txt
pip install -r onyx/backend/requirements/ee.txt
pip install -r onyx/backend/requirements/model_server.txt
```
Install Playwright for Python (headless browser required by the Web Connector):
Install Playwright for Python (headless browser required by the Web Connector)
In the activated Python virtualenv, install Playwright for Python by running:
```bash
uv run playwright install
playwright install
```
You may have to deactivate and reactivate your virtualenv for `playwright` to appear on your path.
#### Frontend: Node dependencies
Onyx uses Node v22.20.0. We highly recommend you use [Node Version Manager (nvm)](https://github.com/nvm-sh/nvm)
to manage your Node installations. Once installed, you can run
```bash
nvm install 22 && nvm use 22
node -v # verify your active version
```
Navigate to `onyx/web` and run:
Install [Node.js and npm](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm) for the frontend.
Once the above is done, navigate to `onyx/web` run:
```bash
npm i
@@ -125,30 +133,36 @@ npm i
### Backend
For the backend, you'll need to setup pre-commit hooks (black / reorder-python-imports).
First, install pre-commit (if you don't have it already) following the instructions
[here](https://pre-commit.com/#installation).
Then run:
With the virtual environment active, install the pre-commit library with:
```bash
uv run pre-commit install
pip install pre-commit
```
Then, from the `onyx/backend` directory, run:
```bash
pre-commit install
```
Additionally, we use `mypy` for static type checking.
Onyx is fully type-annotated, and we want to keep it that way!
To run the mypy checks manually, run `uv run mypy .` from the `onyx/backend` directory.
To run the mypy checks manually, run `python -m mypy .` from the `onyx/backend` directory.
### Web
We use `prettier` for formatting. The desired version will be installed via a `npm i` from the `onyx/web` directory.
We use `prettier` for formatting. The desired version (2.8.8) will be installed via a `npm i` from the `onyx/web` directory.
To run the formatter, use `npx prettier --write .` from the `onyx/web` directory.
Pre-commit will also run prettier automatically on files you've recently touched. If re-formatted, your commit will fail.
Re-stage your changes and commit again.
Please double check that prettier passes before creating a pull request.
# Running the application for development
## Developing using VSCode Debugger (recommended)
**We highly recommend using VSCode debugger for development.**
We highly recommend using VSCode debugger for development.
See [CONTRIBUTING_VSCODE.md](./CONTRIBUTING_VSCODE.md) for more details.
Otherwise, you can follow the instructions below to run the application for development.
@@ -161,7 +175,7 @@ You will need Docker installed to run these containers.
First navigate to `onyx/deployment/docker_compose`, then start up Postgres/Vespa/Redis/MinIO with:
```bash
docker compose up -d index relational_db cache minio
docker compose -f docker-compose.dev.yml -p onyx-stack up -d index relational_db cache minio
```
(index refers to Vespa, relational_db refers to Postgres, and cache refers to Redis)
@@ -243,7 +257,7 @@ You can run the full Onyx application stack from pre-built images including all
Navigate to `onyx/deployment/docker_compose` and run:
```bash
docker compose up -d
docker compose -f docker-compose.dev.yml -p onyx-stack up -d
```
After Docker pulls and starts these containers, navigate to `http://localhost:3000` to use Onyx.
@@ -251,7 +265,7 @@ After Docker pulls and starts these containers, navigate to `http://localhost:30
If you want to make changes to Onyx and run those changes in Docker, you can also build a local version of the Onyx container images that incorporates your changes like so:
```bash
docker compose up -d --build
docker compose -f docker-compose.dev.yml -p onyx-stack up -d --build
```

View File

@@ -5,7 +5,7 @@ This guide explains how to set up and use VSCode's debugging capabilities with t
## Initial Setup
1. **Environment Setup**:
- Copy `.vscode/env_template.txt` to `.vscode/.env`
- Copy `.vscode/.env.template` to `.vscode/.env`
- Fill in the necessary environment variables in `.vscode/.env`
2. **launch.json**:
- Copy `.vscode/launch.template.jsonc` to `.vscode/launch.json`
@@ -17,12 +17,10 @@ Before starting, make sure the Docker Daemon is running.
1. Open the Debug view in VSCode (Cmd+Shift+D on macOS)
2. From the dropdown at the top, select "Clear and Restart External Volumes and Containers" and press the green play button
3. From the dropdown at the top, select "Run All Onyx Services" and press the green play button
4. Now, you can navigate to onyx in your browser (default is http://localhost:3000) and start using the app
5. You can set breakpoints by clicking to the left of line numbers to help debug while the app is running
6. Use the debug toolbar to step through code, inspect variables, etc.
Note: Clear and Restart External Volumes and Containers will reset your postgres and Vespa (relational-db and index).
Only run this if you are okay with wiping your data.
4. CD into web, run "npm i" followed by npm run dev.
5. Now, you can navigate to onyx in your browser (default is http://localhost:3000) and start using the app
6. You can set breakpoints by clicking to the left of line numbers to help debug while the app is running
7. Use the debug toolbar to step through code, inspect variables, etc.
## Features

141
README.md
View File

@@ -1,108 +1,117 @@
<!-- ONYX_METADATA={"link": "https://github.com/onyx-dot-app/onyx/blob/main/README.md"} -->
<a name="readme-top"></a>
<h2 align="center">
<a href="https://www.onyx.app/?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme"> <img width="50%" src="https://github.com/onyx-dot-app/onyx/blob/logo/OnyxLogoCropped.jpg?raw=true" /></a>
<a href="https://www.onyx.app/"> <img width="50%" src="https://github.com/onyx-dot-app/onyx/blob/logo/OnyxLogoCropped.jpg?raw=true)" /></a>
</h2>
<p align="center">Open Source AI Platform</p>
<p align="center">
<p align="center">Open Source Gen-AI + Enterprise Search.</p>
<p align="center">
<a href="https://discord.gg/TDJ59cGV2X" target="_blank">
<img src="https://img.shields.io/badge/discord-join-blue.svg?logo=discord&logoColor=white" alt="Discord" />
</a>
<a href="https://docs.onyx.app/?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme" target="_blank">
<img src="https://img.shields.io/badge/docs-view-blue" alt="Documentation" />
</a>
<a href="https://www.onyx.app/?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme" target="_blank">
<img src="https://img.shields.io/website?url=https://www.onyx.app&up_message=visit&up_color=blue" alt="Documentation" />
</a>
<a href="https://github.com/onyx-dot-app/onyx/blob/main/LICENSE" target="_blank">
<img src="https://img.shields.io/static/v1?label=license&message=MIT&color=blue" alt="License" />
</a>
<a href="https://docs.onyx.app/" target="_blank">
<img src="https://img.shields.io/badge/docs-view-blue" alt="Documentation">
</a>
<a href="https://join.slack.com/t/onyx-dot-app/shared_invite/zt-34lu4m7xg-TsKGO6h8PDvR5W27zTdyhA" target="_blank">
<img src="https://img.shields.io/badge/slack-join-blue.svg?logo=slack" alt="Slack">
</a>
<a href="https://discord.gg/TDJ59cGV2X" target="_blank">
<img src="https://img.shields.io/badge/discord-join-blue.svg?logo=discord&logoColor=white" alt="Discord">
</a>
<a href="https://github.com/onyx-dot-app/onyx/blob/main/README.md" target="_blank">
<img src="https://img.shields.io/static/v1?label=license&message=MIT&color=blue" alt="License">
</a>
</p>
<p align="center">
<a href="https://trendshift.io/repositories/12516" target="_blank">
<img src="https://trendshift.io/api/badge/repositories/12516" alt="onyx-dot-app/onyx | Trendshift" style="width: 250px; height: 55px;" />
</a>
</p>
<strong>[Onyx](https://www.onyx.app/)</strong> (formerly Danswer) is the AI platform connected to your company's docs, apps, and people.
Onyx provides a feature rich Chat interface and plugs into any LLM of your choice.
Keep knowledge and access controls sync-ed across over 40 connectors like Google Drive, Slack, Confluence, Salesforce, etc.
Create custom AI agents with unique prompts, knowledge, and actions that the agents can take.
Onyx can be deployed securely anywhere and for any scale - on a laptop, on-premise, or to cloud.
**[Onyx](https://www.onyx.app/?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme)** is a feature-rich, self-hostable Chat UI that works with any LLM. It is easy to deploy and can run in a completely airgapped environment.
<h3>Feature Highlights</h3>
Onyx comes loaded with advanced features like Agents, Web Search, RAG, MCP, Deep Research, Connectors to 40+ knowledge sources, and more.
**Deep research over your team's knowledge:**
> [!TIP]
> Run Onyx with one command (or see deployment section below):
> ```
> curl -fsSL https://raw.githubusercontent.com/onyx-dot-app/onyx/main/deployment/docker_compose/install.sh > install.sh && chmod +x install.sh && ./install.sh
> ```
https://private-user-images.githubusercontent.com/32520769/414509312-48392e83-95d0-4fb5-8650-a396e05e0a32.mp4?jwt=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJnaXRodWIuY29tIiwiYXVkIjoicmF3LmdpdGh1YnVzZXJjb250ZW50LmNvbSIsImtleSI6ImtleTUiLCJleHAiOjE3Mzk5Mjg2MzYsIm5iZiI6MTczOTkyODMzNiwicGF0aCI6Ii8zMjUyMDc2OS80MTQ1MDkzMTItNDgzOTJlODMtOTVkMC00ZmI1LTg2NTAtYTM5NmUwNWUwYTMyLm1wND9YLUFtei1BbGdvcml0aG09QVdTNC1ITUFDLVNIQTI1NiZYLUFtei1DcmVkZW50aWFsPUFLSUFWQ09EWUxTQTUzUFFLNFpBJTJGMjAyNTAyMTklMkZ1cy1lYXN0LTElMkZzMyUyRmF3czRfcmVxdWVzdCZYLUFtei1EYXRlPTIwMjUwMjE5VDAxMjUzNlomWC1BbXotRXhwaXJlcz0zMDAmWC1BbXotU2lnbmF0dXJlPWFhMzk5Njg2Y2Y5YjFmNDNiYTQ2YzM5ZTg5YWJiYTU2NWMyY2YwNmUyODE2NWUxMDRiMWQxZWJmODI4YTA0MTUmWC1BbXotU2lnbmVkSGVhZGVycz1ob3N0In0.a9D8A0sgKE9AoaoE-mfFbJ6_OKYeqaf7TZ4Han2JfW8
****
**Use Onyx as a secure AI Chat with any LLM:**
![Onyx Chat Silent Demo](https://github.com/onyx-dot-app/onyx/releases/download/v0.21.1/OnyxChatSilentDemo.gif)
**Easily set up connectors to your apps:**
## ⭐ Features
- **🤖 Custom Agents:** Build AI Agents with unique instructions, knowledge and actions.
- **🌍 Web Search:** Browse the web with Google PSE, Exa, and Serper as well as an in-house scraper or Firecrawl.
- **🔍 RAG:** Best in class hybrid-search + knowledge graph for uploaded files and ingested documents from connectors.
- **🔄 Connectors:** Pull knowledge, metadata, and access information from over 40 applications.
- **🔬 Deep Research:** Get in depth answers with an agentic multi-step search.
- **▶️ Actions & MCP:** Give AI Agents the ability to interact with external systems.
- **💻 Code Interpreter:** Execute code to analyze data, render graphs and create files.
- **🎨 Image Generation:** Generate images based on user prompts.
- **👥 Collaboration:** Chat sharing, feedback gathering, user management, usage analytics, and more.
Onyx works with all LLMs (like OpenAI, Anthropic, Gemini, etc.) and self-hosted LLMs (like Ollama, vLLM, etc.)
To learn more about the features, check out our [documentation](https://docs.onyx.app/welcome?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme)!
![Onyx Connector Silent Demo](https://github.com/onyx-dot-app/onyx/releases/download/v0.21.1/OnyxConnectorSilentDemo.gif)
**Access Onyx where your team already works:**
## 🚀 Deployment
Onyx supports deployments in Docker, Kubernetes, Terraform, along with guides for major cloud providers.
See guides below:
- [Docker](https://docs.onyx.app/deployment/local/docker?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme) or [Quickstart](https://docs.onyx.app/deployment/getting_started/quickstart?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme) (best for most users)
- [Kubernetes](https://docs.onyx.app/deployment/local/kubernetes?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme) (best for large teams)
- [Terraform](https://docs.onyx.app/deployment/local/terraform?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme) (best for teams already using Terraform)
- Cloud specific guides (best if specifically using [AWS EKS](https://docs.onyx.app/deployment/cloud/aws/eks?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme), [Azure VMs](https://docs.onyx.app/deployment/cloud/azure?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme), etc.)
> [!TIP]
> **To try Onyx for free without deploying, check out [Onyx Cloud](https://cloud.onyx.app/signup?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme)**.
![Onyx Bot Demo](https://github.com/onyx-dot-app/onyx/releases/download/v0.21.1/OnyxBot.png)
## Deployment
**To try it out for free and get started in seconds, check out [Onyx Cloud](https://cloud.onyx.app/signup)**.
## 🔍 Other Notable Benefits
Onyx is built for teams of all sizes, from individual users to the largest global enterprises.
Onyx can also be run locally (even on a laptop) or deployed on a virtual machine with a single
`docker compose` command. Checkout our [docs](https://docs.onyx.app/quickstart) to learn more.
- **Enterprise Search**: far more than simple RAG, Onyx has custom indexing and retrieval that remains performant and accurate for scales of up to tens of millions of documents.
- **Security**: SSO (OIDC/SAML/OAuth2), RBAC, encryption of credentials, etc.
- **Management UI**: different user roles such as basic, curator, and admin.
- **Document Permissioning**: mirrors user access from external apps for RAG use cases.
We also have built-in support for high-availability/scalable deployment on Kubernetes.
References [here](https://github.com/onyx-dot-app/onyx/tree/main/deployment).
## 🔍 Other Notable Benefits of Onyx
- Custom deep learning models for indexing and inference time, only through Onyx + learning from user feedback.
- Flexible security features like SSO (OIDC/SAML/OAuth2), RBAC, encryption of credentials, etc.
- Knowledge curation features like document-sets, query history, usage analytics, etc.
- Scalable deployment options tested up to many tens of thousands users and hundreds of millions of documents.
## 🚧 Roadmap
To see ongoing and upcoming projects, check out our [roadmap](https://github.com/orgs/onyx-dot-app/projects/2)!
- New methods in information retrieval (StructRAG, LightGraphRAG, etc.)
- Personalized Search
- Organizational understanding and ability to locate and suggest experts from your team.
- Code Search
- SQL and Structured Query Language
## 🔌 Connectors
Keep knowledge and access up to sync across 40+ connectors:
- Google Drive
- Confluence
- Slack
- Gmail
- Salesforce
- Microsoft Sharepoint
- Github
- Jira
- Zendesk
- Gong
- Microsoft Teams
- Dropbox
- Local Files
- Websites
- And more ...
See the full list [here](https://docs.onyx.app/connectors).
## 📚 Licensing
There are two editions of Onyx:
- Onyx Community Edition (CE) is available freely under the MIT license.
- Onyx Community Edition (CE) is available freely under the MIT Expat license. Simply follow the Deployment guide above.
- Onyx Enterprise Edition (EE) includes extra features that are primarily useful for larger organizations.
For feature details, check out [our website](https://www.onyx.app/pricing?utm_source=onyx_repo&utm_medium=github&utm_campaign=readme).
## 👪 Community
Join our open source community on **[Discord](https://discord.gg/TDJ59cGV2X)**!
For feature details, check out [our website](https://www.onyx.app/pricing).
To try the Onyx Enterprise Edition:
1. Checkout [Onyx Cloud](https://cloud.onyx.app/signup).
2. For self-hosting the Enterprise Edition, contact us at [founders@onyx.app](mailto:founders@onyx.app) or book a call with us on our [Cal](https://cal.com/team/onyx/founders).
## 💡 Contributing
Looking to contribute? Please check out the [Contribution Guide](CONTRIBUTING.md) for more details.

View File

@@ -7,20 +7,15 @@ have a contract or agreement with DanswerAI, you are not permitted to use the En
Edition features outside of personal development or testing purposes. Please reach out to \
founders@onyx.app for more information. Please visit https://github.com/onyx-dot-app/onyx"
# Default ONYX_VERSION, typically overriden during builds by GitHub Actions.
ARG ONYX_VERSION=0.0.0-dev
# DO_NOT_TRACK is used to disable telemetry for Unstructured
ENV DANSWER_RUNNING_IN_DOCKER="true" \
DO_NOT_TRACK="true" \
PLAYWRIGHT_BROWSERS_PATH="/app/.cache/ms-playwright"
ENV ONYX_VERSION=${ONYX_VERSION} \
DANSWER_RUNNING_IN_DOCKER="true" \
DO_NOT_TRACK="true"
# Create non-root user for security best practices
RUN groupadd -g 1001 onyx && \
useradd -u 1001 -g onyx -m -s /bin/bash onyx && \
mkdir -p /var/log/onyx && \
chmod 755 /var/log/onyx && \
chown onyx:onyx /var/log/onyx
COPY --from=ghcr.io/astral-sh/uv:0.9.9 /uv /uvx /bin/
RUN echo "ONYX_VERSION: ${ONYX_VERSION}"
# Install system dependencies
# cmake needed for psycopg (postgres)
# libpq-dev needed for psycopg (postgres)
@@ -52,20 +47,22 @@ RUN apt-get update && \
# Remove py which is pulled in by retry, py is not needed and is a CVE
COPY ./requirements/default.txt /tmp/requirements.txt
COPY ./requirements/ee.txt /tmp/ee-requirements.txt
RUN uv pip install --system --no-cache-dir --upgrade \
RUN pip install --no-cache-dir --upgrade \
--retries 5 \
--timeout 30 \
-r /tmp/requirements.txt \
-r /tmp/ee-requirements.txt && \
pip uninstall -y py && \
playwright install chromium && \
playwright install-deps chromium && \
chown -R onyx:onyx /app && \
ln -s /usr/local/bin/supervisord /usr/bin/supervisord && \
# Cleanup for CVEs and size reduction
# https://github.com/tornadoweb/tornado/issues/3107
# xserver-common and xvfb included by playwright installation but not needed after
# perl-base is part of the base Python Debian image but not needed for Onyx functionality
# perl-base could only be removed with --allow-remove-essential
apt-get update && \
ln -s /usr/local/bin/supervisord /usr/bin/supervisord
# Cleanup for CVEs and size reduction
# https://github.com/tornadoweb/tornado/issues/3107
# xserver-common and xvfb included by playwright installation but not needed after
# perl-base is part of the base Python Debian image but not needed for Onyx functionality
# perl-base could only be removed with --allow-remove-essential
RUN apt-get update && \
apt-get remove -y --allow-remove-essential \
perl-base \
xserver-common \
@@ -75,16 +72,15 @@ RUN uv pip install --system --no-cache-dir --upgrade \
libxmlsec1-dev \
pkg-config \
gcc && \
# Install here to avoid some packages being cleaned up above
apt-get install -y \
libxmlsec1-openssl \
# Install postgresql-client for easy manual tests
postgresql-client && \
apt-get install -y libxmlsec1-openssl && \
apt-get autoremove -y && \
rm -rf /var/lib/apt/lists/* && \
rm -rf ~/.cache/uv /tmp/*.txt && \
rm -f /usr/local/lib/python3.11/site-packages/tornado/test/test.key
# Install postgresql-client for easy manual tests
# Install it here to avoid it being cleaned up above
RUN apt-get update && apt-get install -y postgresql-client
# Pre-downloading models for setups with limited egress
RUN python -c "from tokenizers import Tokenizer; \
Tokenizer.from_pretrained('nomic-ai/nomic-embed-text-v1')"
@@ -95,41 +91,31 @@ nltk.download('stopwords', quiet=True); \
nltk.download('punkt_tab', quiet=True);"
# nltk.download('wordnet', quiet=True); introduce this back if lemmatization is needed
# Pre-downloading tiktoken for setups with limited egress
RUN python -c "import tiktoken; \
tiktoken.get_encoding('cl100k_base')"
# Set up application files
WORKDIR /app
# Enterprise Version Files
COPY --chown=onyx:onyx ./ee /app/ee
COPY ./ee /app/ee
COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf
# Set up application files
COPY --chown=onyx:onyx ./onyx /app/onyx
COPY --chown=onyx:onyx ./shared_configs /app/shared_configs
COPY --chown=onyx:onyx ./alembic /app/alembic
COPY --chown=onyx:onyx ./alembic_tenants /app/alembic_tenants
COPY --chown=onyx:onyx ./alembic.ini /app/alembic.ini
COPY ./onyx /app/onyx
COPY ./shared_configs /app/shared_configs
COPY ./alembic /app/alembic
COPY ./alembic_tenants /app/alembic_tenants
COPY ./alembic.ini /app/alembic.ini
COPY supervisord.conf /usr/etc/supervisord.conf
COPY --chown=onyx:onyx ./static /app/static
COPY ./static /app/static
# Escape hatch scripts
COPY --chown=onyx:onyx ./scripts/debugging /app/scripts/debugging
COPY --chown=onyx:onyx ./scripts/force_delete_connector_by_id.py /app/scripts/force_delete_connector_by_id.py
COPY --chown=onyx:onyx ./scripts/supervisord_entrypoint.sh /app/scripts/supervisord_entrypoint.sh
RUN chmod +x /app/scripts/supervisord_entrypoint.sh
COPY ./scripts/debugging /app/scripts/debugging
COPY ./scripts/force_delete_connector_by_id.py /app/scripts/force_delete_connector_by_id.py
# Put logo in assets
COPY --chown=onyx:onyx ./assets /app/assets
COPY ./assets /app/assets
ENV PYTHONPATH=/app
# Default ONYX_VERSION, typically overriden during builds by GitHub Actions.
ARG ONYX_VERSION=0.0.0-dev
ENV ONYX_VERSION=${ONYX_VERSION}
# Default command which does nothing
# This container is used by api server and background which specify their own CMD
CMD ["tail", "-f", "/dev/null"]

View File

@@ -1,42 +1,4 @@
# Base stage with dependencies
FROM python:3.11.7-slim-bookworm AS base
ENV DANSWER_RUNNING_IN_DOCKER="true" \
HF_HOME=/app/.cache/huggingface
COPY --from=ghcr.io/astral-sh/uv:0.9.9 /uv /uvx /bin/
RUN mkdir -p /app/.cache/huggingface
COPY ./requirements/model_server.txt /tmp/requirements.txt
RUN uv pip install --system --no-cache-dir --upgrade \
-r /tmp/requirements.txt && \
rm -rf ~/.cache/uv /tmp/*.txt
# Stage for downloading tokenizers
FROM base AS tokenizers
RUN python -c "from transformers import AutoTokenizer; \
AutoTokenizer.from_pretrained('distilbert-base-uncased'); \
AutoTokenizer.from_pretrained('mixedbread-ai/mxbai-rerank-xsmall-v1');"
# Stage for downloading Onyx models
FROM base AS onyx-models
RUN python -c "from huggingface_hub import snapshot_download; \
snapshot_download(repo_id='onyx-dot-app/hybrid-intent-token-classifier'); \
snapshot_download(repo_id='onyx-dot-app/information-content-model');"
# Stage for downloading embedding and reranking models
FROM base AS embedding-models
RUN python -c "from huggingface_hub import snapshot_download; \
snapshot_download('nomic-ai/nomic-embed-text-v1'); \
snapshot_download('mixedbread-ai/mxbai-rerank-xsmall-v1');"
# Initialize SentenceTransformer to cache the custom architecture
RUN python -c "from sentence_transformers import SentenceTransformer; \
SentenceTransformer(model_name_or_path='nomic-ai/nomic-embed-text-v1', trust_remote_code=True);"
# Final stage - combine all downloads
FROM base AS final
FROM python:3.11.7-slim-bookworm
LABEL com.danswer.maintainer="founders@onyx.app"
LABEL com.danswer.description="This image is for the Onyx model server which runs all of the \
@@ -44,19 +6,41 @@ AI models for Onyx. This container and all the code is MIT Licensed and free for
You can find it at https://hub.docker.com/r/onyx/onyx-model-server. For more details, \
visit https://github.com/onyx-dot-app/onyx."
# Create non-root user for security best practices
RUN groupadd -g 1001 onyx && \
useradd -u 1001 -g onyx -m -s /bin/bash onyx && \
mkdir -p /var/log/onyx && \
chmod 755 /var/log/onyx && \
chown onyx:onyx /var/log/onyx
# Default ONYX_VERSION, typically overriden during builds by GitHub Actions.
ARG ONYX_VERSION=0.0.0-dev
ENV ONYX_VERSION=${ONYX_VERSION} \
DANSWER_RUNNING_IN_DOCKER="true"
# In case the user has volumes mounted to /app/.cache/huggingface that they've downloaded while
# running Onyx, move the current contents of the cache folder to a temporary location to ensure
# it's preserved in order to combine with the user's cache contents
COPY --chown=onyx:onyx --from=tokenizers /app/.cache/huggingface /app/.cache/temp_huggingface
COPY --chown=onyx:onyx --from=onyx-models /app/.cache/huggingface /app/.cache/temp_huggingface
COPY --chown=onyx:onyx --from=embedding-models /app/.cache/huggingface /app/.cache/temp_huggingface
RUN echo "ONYX_VERSION: ${ONYX_VERSION}"
COPY ./requirements/model_server.txt /tmp/requirements.txt
RUN pip install --no-cache-dir --upgrade \
--retries 5 \
--timeout 30 \
-r /tmp/requirements.txt
RUN apt-get remove -y --allow-remove-essential perl-base && \
apt-get autoremove -y
# Pre-downloading models for setups with limited egress
# Download tokenizers, distilbert for the Onyx model
# Download model weights
# Run Nomic to pull in the custom architecture and have it cached locally
RUN python -c "from transformers import AutoTokenizer; \
AutoTokenizer.from_pretrained('distilbert-base-uncased'); \
AutoTokenizer.from_pretrained('mixedbread-ai/mxbai-rerank-xsmall-v1'); \
from huggingface_hub import snapshot_download; \
snapshot_download(repo_id='onyx-dot-app/hybrid-intent-token-classifier'); \
snapshot_download(repo_id='onyx-dot-app/information-content-model'); \
snapshot_download('nomic-ai/nomic-embed-text-v1'); \
snapshot_download('mixedbread-ai/mxbai-rerank-xsmall-v1'); \
from sentence_transformers import SentenceTransformer; \
SentenceTransformer(model_name_or_path='nomic-ai/nomic-embed-text-v1', trust_remote_code=True);"
# In case the user has volumes mounted to /root/.cache/huggingface that they've downloaded while
# running Onyx, don't overwrite it with the built in cache folder
RUN mv /root/.cache/huggingface /root/.cache/temp_huggingface
WORKDIR /app
@@ -75,8 +59,4 @@ COPY ./model_server /app/model_server
ENV PYTHONPATH=/app
# Default ONYX_VERSION, typically overriden during builds by GitHub Actions.
ARG ONYX_VERSION=0.0.0-dev
ENV ONYX_VERSION=${ONYX_VERSION}
CMD ["uvicorn", "model_server.main:app", "--host", "0.0.0.0", "--port", "9000"]

View File

@@ -7,12 +7,8 @@ Onyx migrations use a generic single-database configuration with an async dbapi.
## To generate new migrations:
From onyx/backend, run:
`alembic revision -m <DESCRIPTION_OF_MIGRATION>`
Note: you cannot use the `--autogenerate` flag as the automatic schema parsing does not work.
Manually populate the upgrade and downgrade in your new migration.
run from onyx/backend:
`alembic revision --autogenerate -m <DESCRIPTION_OF_MIGRATION>`
More info can be found here: https://alembic.sqlalchemy.org/en/latest/autogenerate.html

View File

@@ -1,153 +0,0 @@
"""add permission sync attempt tables
Revision ID: 03d710ccf29c
Revises: 96a5702df6aa
Create Date: 2025-09-11 13:30:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "03d710ccf29c" # Generate a new unique ID
down_revision = "96a5702df6aa"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create the permission sync status enum
permission_sync_status_enum = sa.Enum(
"not_started",
"in_progress",
"success",
"canceled",
"failed",
"completed_with_errors",
name="permissionsyncstatus",
native_enum=False,
)
# Create doc_permission_sync_attempt table
op.create_table(
"doc_permission_sync_attempt",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("connector_credential_pair_id", sa.Integer(), nullable=False),
sa.Column("status", permission_sync_status_enum, nullable=False),
sa.Column("total_docs_synced", sa.Integer(), nullable=True),
sa.Column("docs_with_permission_errors", sa.Integer(), nullable=True),
sa.Column("error_message", sa.Text(), nullable=True),
sa.Column(
"time_created",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("time_started", sa.DateTime(timezone=True), nullable=True),
sa.Column("time_finished", sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(
["connector_credential_pair_id"],
["connector_credential_pair.id"],
),
sa.PrimaryKeyConstraint("id"),
)
# Create indexes for doc_permission_sync_attempt
op.create_index(
"ix_doc_permission_sync_attempt_time_created",
"doc_permission_sync_attempt",
["time_created"],
unique=False,
)
op.create_index(
"ix_permission_sync_attempt_latest_for_cc_pair",
"doc_permission_sync_attempt",
["connector_credential_pair_id", "time_created"],
unique=False,
)
op.create_index(
"ix_permission_sync_attempt_status_time",
"doc_permission_sync_attempt",
["status", sa.text("time_finished DESC")],
unique=False,
)
# Create external_group_permission_sync_attempt table
# connector_credential_pair_id is nullable - group syncs can be global (e.g., Confluence)
op.create_table(
"external_group_permission_sync_attempt",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("connector_credential_pair_id", sa.Integer(), nullable=True),
sa.Column("status", permission_sync_status_enum, nullable=False),
sa.Column("total_users_processed", sa.Integer(), nullable=True),
sa.Column("total_groups_processed", sa.Integer(), nullable=True),
sa.Column("total_group_memberships_synced", sa.Integer(), nullable=True),
sa.Column("error_message", sa.Text(), nullable=True),
sa.Column(
"time_created",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("time_started", sa.DateTime(timezone=True), nullable=True),
sa.Column("time_finished", sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(
["connector_credential_pair_id"],
["connector_credential_pair.id"],
),
sa.PrimaryKeyConstraint("id"),
)
# Create indexes for external_group_permission_sync_attempt
op.create_index(
"ix_external_group_permission_sync_attempt_time_created",
"external_group_permission_sync_attempt",
["time_created"],
unique=False,
)
op.create_index(
"ix_group_sync_attempt_cc_pair_time",
"external_group_permission_sync_attempt",
["connector_credential_pair_id", "time_created"],
unique=False,
)
op.create_index(
"ix_group_sync_attempt_status_time",
"external_group_permission_sync_attempt",
["status", sa.text("time_finished DESC")],
unique=False,
)
def downgrade() -> None:
# Drop indexes
op.drop_index(
"ix_group_sync_attempt_status_time",
table_name="external_group_permission_sync_attempt",
)
op.drop_index(
"ix_group_sync_attempt_cc_pair_time",
table_name="external_group_permission_sync_attempt",
)
op.drop_index(
"ix_external_group_permission_sync_attempt_time_created",
table_name="external_group_permission_sync_attempt",
)
op.drop_index(
"ix_permission_sync_attempt_status_time",
table_name="doc_permission_sync_attempt",
)
op.drop_index(
"ix_permission_sync_attempt_latest_for_cc_pair",
table_name="doc_permission_sync_attempt",
)
op.drop_index(
"ix_doc_permission_sync_attempt_time_created",
table_name="doc_permission_sync_attempt",
)
# Drop tables
op.drop_table("external_group_permission_sync_attempt")
op.drop_table("doc_permission_sync_attempt")

View File

@@ -1,33 +0,0 @@
"""add theme_preference to user
Revision ID: 09995b8811eb
Revises: 3d1cca026fe8
Create Date: 2025-10-24 08:58:50.246949
"""
from alembic import op
import sqlalchemy as sa
from onyx.db.enums import ThemePreference
# revision identifiers, used by Alembic.
revision = "09995b8811eb"
down_revision = "3d1cca026fe8"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"user",
sa.Column(
"theme_preference",
sa.Enum(ThemePreference, native_enum=False),
nullable=True,
),
)
def downgrade() -> None:
op.drop_column("user", "theme_preference")

View File

@@ -1,389 +0,0 @@
"""Migration 2: User file data preparation and backfill
Revision ID: 0cd424f32b1d
Revises: 9b66d3156fc6
Create Date: 2025-09-22 09:44:42.727034
This migration populates the new columns added in migration 1.
It prepares data for the UUID transition and relationship migration.
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
import logging
logger = logging.getLogger("alembic.runtime.migration")
# revision identifiers, used by Alembic.
revision = "0cd424f32b1d"
down_revision = "9b66d3156fc6"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Populate new columns with data."""
bind = op.get_bind()
inspector = sa.inspect(bind)
# === Step 1: Populate user_file.new_id ===
user_file_columns = [col["name"] for col in inspector.get_columns("user_file")]
has_new_id = "new_id" in user_file_columns
if has_new_id:
logger.info("Populating user_file.new_id with UUIDs...")
# Count rows needing UUIDs
null_count = bind.execute(
text("SELECT COUNT(*) FROM user_file WHERE new_id IS NULL")
).scalar_one()
if null_count > 0:
logger.info(f"Generating UUIDs for {null_count} user_file records...")
# Populate in batches to avoid long locks
batch_size = 10000
total_updated = 0
while True:
result = bind.execute(
text(
"""
UPDATE user_file
SET new_id = gen_random_uuid()
WHERE new_id IS NULL
AND id IN (
SELECT id FROM user_file
WHERE new_id IS NULL
LIMIT :batch_size
)
"""
),
{"batch_size": batch_size},
)
updated = result.rowcount
total_updated += updated
if updated < batch_size:
break
logger.info(f" Updated {total_updated}/{null_count} records...")
logger.info(f"Generated UUIDs for {total_updated} user_file records")
# Verify all records have UUIDs
remaining_null = bind.execute(
text("SELECT COUNT(*) FROM user_file WHERE new_id IS NULL")
).scalar_one()
if remaining_null > 0:
raise Exception(
f"Failed to populate all user_file.new_id values ({remaining_null} NULL)"
)
# Lock down the column
op.alter_column("user_file", "new_id", nullable=False)
op.alter_column("user_file", "new_id", server_default=None)
logger.info("Locked down user_file.new_id column")
# === Step 2: Populate persona__user_file.user_file_id_uuid ===
persona_user_file_columns = [
col["name"] for col in inspector.get_columns("persona__user_file")
]
if has_new_id and "user_file_id_uuid" in persona_user_file_columns:
logger.info("Populating persona__user_file.user_file_id_uuid...")
# Count rows needing update
null_count = bind.execute(
text(
"""
SELECT COUNT(*) FROM persona__user_file
WHERE user_file_id IS NOT NULL AND user_file_id_uuid IS NULL
"""
)
).scalar_one()
if null_count > 0:
logger.info(f"Updating {null_count} persona__user_file records...")
# Update in batches
batch_size = 10000
total_updated = 0
while True:
result = bind.execute(
text(
"""
UPDATE persona__user_file p
SET user_file_id_uuid = uf.new_id
FROM user_file uf
WHERE p.user_file_id = uf.id
AND p.user_file_id_uuid IS NULL
AND p.persona_id IN (
SELECT persona_id
FROM persona__user_file
WHERE user_file_id_uuid IS NULL
LIMIT :batch_size
)
"""
),
{"batch_size": batch_size},
)
updated = result.rowcount
total_updated += updated
if updated < batch_size:
break
logger.info(f" Updated {total_updated}/{null_count} records...")
logger.info(f"Updated {total_updated} persona__user_file records")
# Verify all records are populated
remaining_null = bind.execute(
text(
"""
SELECT COUNT(*) FROM persona__user_file
WHERE user_file_id IS NOT NULL AND user_file_id_uuid IS NULL
"""
)
).scalar_one()
if remaining_null > 0:
raise Exception(
f"Failed to populate all persona__user_file.user_file_id_uuid values ({remaining_null} NULL)"
)
op.alter_column("persona__user_file", "user_file_id_uuid", nullable=False)
logger.info("Locked down persona__user_file.user_file_id_uuid column")
# === Step 3: Create user_project records from chat_folder ===
if "chat_folder" in inspector.get_table_names():
logger.info("Creating user_project records from chat_folder...")
result = bind.execute(
text(
"""
INSERT INTO user_project (user_id, name)
SELECT cf.user_id, cf.name
FROM chat_folder cf
WHERE NOT EXISTS (
SELECT 1
FROM user_project up
WHERE up.user_id = cf.user_id AND up.name = cf.name
)
"""
)
)
logger.info(f"Created {result.rowcount} user_project records from chat_folder")
# === Step 4: Populate chat_session.project_id ===
chat_session_columns = [
col["name"] for col in inspector.get_columns("chat_session")
]
if "folder_id" in chat_session_columns and "project_id" in chat_session_columns:
logger.info("Populating chat_session.project_id...")
# Count sessions needing update
null_count = bind.execute(
text(
"""
SELECT COUNT(*) FROM chat_session
WHERE project_id IS NULL AND folder_id IS NOT NULL
"""
)
).scalar_one()
if null_count > 0:
logger.info(f"Updating {null_count} chat_session records...")
result = bind.execute(
text(
"""
UPDATE chat_session cs
SET project_id = up.id
FROM chat_folder cf
JOIN user_project up ON up.user_id = cf.user_id AND up.name = cf.name
WHERE cs.folder_id = cf.id AND cs.project_id IS NULL
"""
)
)
logger.info(f"Updated {result.rowcount} chat_session records")
# Verify all records are populated
remaining_null = bind.execute(
text(
"""
SELECT COUNT(*) FROM chat_session
WHERE project_id IS NULL AND folder_id IS NOT NULL
"""
)
).scalar_one()
if remaining_null > 0:
logger.warning(
f"Warning: {remaining_null} chat_session records could not be mapped to projects"
)
# === Step 5: Update plaintext FileRecord IDs/display names to UUID scheme ===
# Prior to UUID migration, plaintext cache files were stored with file_id like 'plain_text_<int_id>'.
# After migration, we use 'plaintext_<uuid>' (note the name change to 'plaintext_').
# This step remaps existing FileRecord rows to the new naming while preserving object_key/bucket.
logger.info("Updating plaintext FileRecord ids and display names to UUID scheme...")
# Count legacy plaintext records that can be mapped to UUID user_file ids
count_query = text(
"""
SELECT COUNT(*)
FROM file_record fr
JOIN user_file uf ON fr.file_id = CONCAT('plaintext_', uf.id::text)
WHERE LOWER(fr.file_origin::text) = 'plaintext_cache'
"""
)
legacy_count = bind.execute(count_query).scalar_one()
if legacy_count and legacy_count > 0:
logger.info(f"Found {legacy_count} legacy plaintext file records to update")
# Update display_name first for readability (safe regardless of rename)
bind.execute(
text(
"""
UPDATE file_record fr
SET display_name = CONCAT('Plaintext for user file ', uf.new_id::text)
FROM user_file uf
WHERE LOWER(fr.file_origin::text) = 'plaintext_cache'
AND fr.file_id = CONCAT('plaintext_', uf.id::text)
"""
)
)
# Remap file_id from 'plaintext_<int>' -> 'plaintext_<uuid>' using transitional new_id
# Use a single UPDATE ... WHERE file_id LIKE 'plain_text_%'
# and ensure it aligns to existing user_file ids to avoid renaming unrelated rows
result = bind.execute(
text(
"""
UPDATE file_record fr
SET file_id = CONCAT('plaintext_', uf.new_id::text)
FROM user_file uf
WHERE LOWER(fr.file_origin::text) = 'plaintext_cache'
AND fr.file_id = CONCAT('plaintext_', uf.id::text)
"""
)
)
logger.info(
f"Updated {result.rowcount} plaintext file_record ids to UUID scheme"
)
# === Step 6: Ensure document_id_migrated default TRUE and backfill existing FALSE ===
# New records should default to migrated=True so the migration task won't run for them.
# Existing rows that had a legacy document_id should be marked as not migrated to be processed.
# Backfill existing records: if document_id is not null, set to FALSE
bind.execute(
text(
"""
UPDATE user_file
SET document_id_migrated = FALSE
WHERE document_id IS NOT NULL
"""
)
)
# === Step 7: Backfill user_file.status from index_attempt ===
logger.info("Backfilling user_file.status from index_attempt...")
# Update user_file status based on latest index attempt
# Using CTEs instead of temp tables for asyncpg compatibility
result = bind.execute(
text(
"""
WITH latest_attempt AS (
SELECT DISTINCT ON (ia.connector_credential_pair_id)
ia.connector_credential_pair_id,
ia.status
FROM index_attempt ia
ORDER BY ia.connector_credential_pair_id, ia.time_updated DESC
),
uf_to_ccp AS (
SELECT DISTINCT uf.id AS uf_id, ccp.id AS cc_pair_id
FROM user_file uf
JOIN document_by_connector_credential_pair dcc
ON dcc.id = REPLACE(uf.document_id, 'USER_FILE_CONNECTOR__', 'FILE_CONNECTOR__')
JOIN connector_credential_pair ccp
ON ccp.connector_id = dcc.connector_id
AND ccp.credential_id = dcc.credential_id
)
UPDATE user_file uf
SET status = CASE
WHEN la.status IN ('NOT_STARTED', 'IN_PROGRESS') THEN 'PROCESSING'
WHEN la.status = 'SUCCESS' THEN 'COMPLETED'
ELSE 'FAILED'
END
FROM uf_to_ccp ufc
LEFT JOIN latest_attempt la
ON la.connector_credential_pair_id = ufc.cc_pair_id
WHERE uf.id = ufc.uf_id
AND uf.status = 'PROCESSING'
"""
)
)
logger.info(f"Updated status for {result.rowcount} user_file records")
logger.info("Migration 2 (data preparation) completed successfully")
def downgrade() -> None:
"""Reset populated data to allow clean downgrade of schema."""
bind = op.get_bind()
inspector = sa.inspect(bind)
logger.info("Starting downgrade of data preparation...")
# Reset user_file columns to allow nulls before data removal
if "user_file" in inspector.get_table_names():
columns = [col["name"] for col in inspector.get_columns("user_file")]
if "new_id" in columns:
op.alter_column(
"user_file",
"new_id",
nullable=True,
server_default=sa.text("gen_random_uuid()"),
)
# Optionally clear the data
# bind.execute(text("UPDATE user_file SET new_id = NULL"))
logger.info("Reset user_file.new_id to nullable")
# Reset persona__user_file.user_file_id_uuid
if "persona__user_file" in inspector.get_table_names():
columns = [col["name"] for col in inspector.get_columns("persona__user_file")]
if "user_file_id_uuid" in columns:
op.alter_column("persona__user_file", "user_file_id_uuid", nullable=True)
# Optionally clear the data
# bind.execute(text("UPDATE persona__user_file SET user_file_id_uuid = NULL"))
logger.info("Reset persona__user_file.user_file_id_uuid to nullable")
# Note: We don't delete user_project records or reset chat_session.project_id
# as these might be in use and can be handled by the schema downgrade
# Reset user_file.status to default
if "user_file" in inspector.get_table_names():
columns = [col["name"] for col in inspector.get_columns("user_file")]
if "status" in columns:
bind.execute(text("UPDATE user_file SET status = 'PROCESSING'"))
logger.info("Reset user_file.status to default")
logger.info("Downgrade completed successfully")

View File

@@ -1,261 +0,0 @@
"""Migration 3: User file relationship migration
Revision ID: 16c37a30adf2
Revises: 0cd424f32b1d
Create Date: 2025-09-22 09:47:34.175596
This migration converts folder-based relationships to project-based relationships.
It migrates persona__user_folder to persona__user_file and populates project__user_file.
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
import logging
logger = logging.getLogger("alembic.runtime.migration")
# revision identifiers, used by Alembic.
revision = "16c37a30adf2"
down_revision = "0cd424f32b1d"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Migrate folder-based relationships to project-based relationships."""
bind = op.get_bind()
inspector = sa.inspect(bind)
# === Step 1: Migrate persona__user_folder to persona__user_file ===
table_names = inspector.get_table_names()
if "persona__user_folder" in table_names and "user_file" in table_names:
user_file_columns = [col["name"] for col in inspector.get_columns("user_file")]
has_new_id = "new_id" in user_file_columns
if has_new_id and "folder_id" in user_file_columns:
logger.info(
"Migrating persona__user_folder relationships to persona__user_file..."
)
# Count relationships to migrate (asyncpg-compatible)
count_query = text(
"""
SELECT COUNT(*)
FROM (
SELECT DISTINCT puf.persona_id, uf.id
FROM persona__user_folder puf
JOIN user_file uf ON uf.folder_id = puf.user_folder_id
WHERE NOT EXISTS (
SELECT 1
FROM persona__user_file p2
WHERE p2.persona_id = puf.persona_id
AND p2.user_file_id = uf.id
)
) AS distinct_pairs
"""
)
to_migrate = bind.execute(count_query).scalar_one()
if to_migrate > 0:
logger.info(f"Creating {to_migrate} persona-file relationships...")
# Migrate in batches to avoid memory issues
batch_size = 10000
total_inserted = 0
while True:
# Insert batch directly using subquery (asyncpg compatible)
result = bind.execute(
text(
"""
INSERT INTO persona__user_file (persona_id, user_file_id, user_file_id_uuid)
SELECT DISTINCT puf.persona_id, uf.id as file_id, uf.new_id
FROM persona__user_folder puf
JOIN user_file uf ON uf.folder_id = puf.user_folder_id
WHERE NOT EXISTS (
SELECT 1
FROM persona__user_file p2
WHERE p2.persona_id = puf.persona_id
AND p2.user_file_id = uf.id
)
LIMIT :batch_size
"""
),
{"batch_size": batch_size},
)
inserted = result.rowcount
total_inserted += inserted
if inserted < batch_size:
break
logger.info(
f" Migrated {total_inserted}/{to_migrate} relationships..."
)
logger.info(
f"Created {total_inserted} persona__user_file relationships"
)
# === Step 2: Add foreign key for chat_session.project_id ===
chat_session_fks = inspector.get_foreign_keys("chat_session")
fk_exists = any(
fk["name"] == "fk_chat_session_project_id" for fk in chat_session_fks
)
if not fk_exists:
logger.info("Adding foreign key constraint for chat_session.project_id...")
op.create_foreign_key(
"fk_chat_session_project_id",
"chat_session",
"user_project",
["project_id"],
["id"],
)
logger.info("Added foreign key constraint")
# === Step 3: Populate project__user_file from user_file.folder_id ===
user_file_columns = [col["name"] for col in inspector.get_columns("user_file")]
has_new_id = "new_id" in user_file_columns
if has_new_id and "folder_id" in user_file_columns:
logger.info("Populating project__user_file from folder relationships...")
# Count relationships to create
count_query = text(
"""
SELECT COUNT(*)
FROM user_file uf
WHERE uf.folder_id IS NOT NULL
AND NOT EXISTS (
SELECT 1
FROM project__user_file puf
WHERE puf.project_id = uf.folder_id
AND puf.user_file_id = uf.new_id
)
"""
)
to_create = bind.execute(count_query).scalar_one()
if to_create > 0:
logger.info(f"Creating {to_create} project-file relationships...")
# Insert in batches
batch_size = 10000
total_inserted = 0
while True:
result = bind.execute(
text(
"""
INSERT INTO project__user_file (project_id, user_file_id)
SELECT uf.folder_id, uf.new_id
FROM user_file uf
WHERE uf.folder_id IS NOT NULL
AND NOT EXISTS (
SELECT 1
FROM project__user_file puf
WHERE puf.project_id = uf.folder_id
AND puf.user_file_id = uf.new_id
)
LIMIT :batch_size
ON CONFLICT (project_id, user_file_id) DO NOTHING
"""
),
{"batch_size": batch_size},
)
inserted = result.rowcount
total_inserted += inserted
if inserted < batch_size:
break
logger.info(f" Created {total_inserted}/{to_create} relationships...")
logger.info(f"Created {total_inserted} project__user_file relationships")
# === Step 4: Create index on chat_session.project_id ===
try:
indexes = [ix.get("name") for ix in inspector.get_indexes("chat_session")]
except Exception:
indexes = []
if "ix_chat_session_project_id" not in indexes:
logger.info("Creating index on chat_session.project_id...")
op.create_index(
"ix_chat_session_project_id", "chat_session", ["project_id"], unique=False
)
logger.info("Created index")
logger.info("Migration 3 (relationship migration) completed successfully")
def downgrade() -> None:
"""Remove migrated relationships and constraints."""
bind = op.get_bind()
inspector = sa.inspect(bind)
logger.info("Starting downgrade of relationship migration...")
# Drop index on chat_session.project_id
try:
indexes = [ix.get("name") for ix in inspector.get_indexes("chat_session")]
if "ix_chat_session_project_id" in indexes:
op.drop_index("ix_chat_session_project_id", "chat_session")
logger.info("Dropped index on chat_session.project_id")
except Exception:
pass
# Drop foreign key constraint
try:
chat_session_fks = inspector.get_foreign_keys("chat_session")
fk_exists = any(
fk["name"] == "fk_chat_session_project_id" for fk in chat_session_fks
)
if fk_exists:
op.drop_constraint(
"fk_chat_session_project_id", "chat_session", type_="foreignkey"
)
logger.info("Dropped foreign key constraint on chat_session.project_id")
except Exception:
pass
# Clear project__user_file relationships (but keep the table for migration 1 to handle)
if "project__user_file" in inspector.get_table_names():
result = bind.execute(text("DELETE FROM project__user_file"))
logger.info(f"Cleared {result.rowcount} records from project__user_file")
# Remove migrated persona__user_file relationships
# Only remove those that came from folder relationships
if all(
table in inspector.get_table_names()
for table in ["persona__user_file", "persona__user_folder", "user_file"]
):
user_file_columns = [col["name"] for col in inspector.get_columns("user_file")]
if "folder_id" in user_file_columns:
result = bind.execute(
text(
"""
DELETE FROM persona__user_file puf
WHERE EXISTS (
SELECT 1
FROM user_file uf
JOIN persona__user_folder puf2
ON puf2.user_folder_id = uf.folder_id
WHERE puf.persona_id = puf2.persona_id
AND puf.user_file_id = uf.id
)
"""
)
)
logger.info(
f"Removed {result.rowcount} migrated persona__user_file relationships"
)
logger.info("Downgrade completed successfully")

View File

@@ -1,29 +0,0 @@
"""add is_clarification to chat_message
Revision ID: 18b5b2524446
Revises: 87c52ec39f84
Create Date: 2025-01-16
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "18b5b2524446"
down_revision = "87c52ec39f84"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"chat_message",
sa.Column(
"is_clarification", sa.Boolean(), nullable=False, server_default="false"
),
)
def downgrade() -> None:
op.drop_column("chat_message", "is_clarification")

View File

@@ -1,89 +0,0 @@
"""add internet search and content provider tables
Revision ID: 1f2a3b4c5d6e
Revises: 9drpiiw74ljy
Create Date: 2025-11-10 19:45:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "1f2a3b4c5d6e"
down_revision = "9drpiiw74ljy"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"internet_search_provider",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("name", sa.String(), nullable=False, unique=True),
sa.Column("provider_type", sa.String(), nullable=False),
sa.Column("api_key", sa.LargeBinary(), nullable=True),
sa.Column("config", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column(
"is_active", sa.Boolean(), nullable=False, server_default=sa.text("false")
),
sa.Column(
"time_created",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
sa.Column(
"time_updated",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
)
op.create_index(
"ix_internet_search_provider_is_active",
"internet_search_provider",
["is_active"],
)
op.create_table(
"internet_content_provider",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("name", sa.String(), nullable=False, unique=True),
sa.Column("provider_type", sa.String(), nullable=False),
sa.Column("api_key", sa.LargeBinary(), nullable=True),
sa.Column("config", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column(
"is_active", sa.Boolean(), nullable=False, server_default=sa.text("false")
),
sa.Column(
"time_created",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
sa.Column(
"time_updated",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
)
op.create_index(
"ix_internet_content_provider_is_active",
"internet_content_provider",
["is_active"],
)
def downgrade() -> None:
op.drop_index(
"ix_internet_content_provider_is_active", table_name="internet_content_provider"
)
op.drop_table("internet_content_provider")
op.drop_index(
"ix_internet_search_provider_is_active", table_name="internet_search_provider"
)
op.drop_table("internet_search_provider")

View File

@@ -1,72 +0,0 @@
"""add switchover_type field and remove background_reindex_enabled
Revision ID: 2acdef638fc2
Revises: a4f23d6b71c8
Create Date: 2025-01-XX XX:XX:XX.XXXXXX
"""
from alembic import op
import sqlalchemy as sa
from onyx.db.enums import SwitchoverType
# revision identifiers, used by Alembic.
revision = "2acdef638fc2"
down_revision = "a4f23d6b71c8"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add switchover_type column with default value of REINDEX
op.add_column(
"search_settings",
sa.Column(
"switchover_type",
sa.Enum(SwitchoverType, native_enum=False),
nullable=False,
server_default=SwitchoverType.REINDEX.value,
),
)
# Migrate existing data: set switchover_type based on background_reindex_enabled
# REINDEX where background_reindex_enabled=True, INSTANT where False
op.execute(
"""
UPDATE search_settings
SET switchover_type = CASE
WHEN background_reindex_enabled = true THEN 'REINDEX'
ELSE 'INSTANT'
END
"""
)
# Remove the background_reindex_enabled column (replaced by switchover_type)
op.drop_column("search_settings", "background_reindex_enabled")
def downgrade() -> None:
# Re-add the background_reindex_enabled column with default value of True
op.add_column(
"search_settings",
sa.Column(
"background_reindex_enabled",
sa.Boolean(),
nullable=False,
server_default="true",
),
)
# Set background_reindex_enabled based on switchover_type
op.execute(
"""
UPDATE search_settings
SET background_reindex_enabled = CASE
WHEN switchover_type = 'INSTANT' THEN false
ELSE true
END
"""
)
# Remove the switchover_type column
op.drop_column("search_settings", "switchover_type")

View File

@@ -1,228 +0,0 @@
"""Migration 6: User file schema cleanup
Revision ID: 2b75d0a8ffcb
Revises: 3a78dba1080a
Create Date: 2025-09-22 10:09:26.375377
This migration removes legacy columns and tables after data migration is complete.
It should only be run after verifying all data has been successfully migrated.
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy import text
import logging
import fastapi_users_db_sqlalchemy
logger = logging.getLogger("alembic.runtime.migration")
# revision identifiers, used by Alembic.
revision = "2b75d0a8ffcb"
down_revision = "3a78dba1080a"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Remove legacy columns and tables."""
bind = op.get_bind()
inspector = sa.inspect(bind)
logger.info("Starting schema cleanup...")
# === Step 1: Verify data migration is complete ===
logger.info("Verifying data migration completion...")
# Check if any chat sessions still have folder_id references
chat_session_columns = [
col["name"] for col in inspector.get_columns("chat_session")
]
if "folder_id" in chat_session_columns:
orphaned_count = bind.execute(
text(
"""
SELECT COUNT(*) FROM chat_session
WHERE folder_id IS NOT NULL AND project_id IS NULL
"""
)
).scalar_one()
if orphaned_count > 0:
logger.warning(
f"WARNING: {orphaned_count} chat_session records still have "
f"folder_id without project_id. Proceeding anyway."
)
# === Step 2: Drop chat_session.folder_id ===
if "folder_id" in chat_session_columns:
logger.info("Dropping chat_session.folder_id...")
# Drop foreign key constraint first
op.execute(
"ALTER TABLE chat_session DROP CONSTRAINT IF EXISTS chat_session_chat_folder_fk"
)
op.execute(
"ALTER TABLE chat_session DROP CONSTRAINT IF EXISTS chat_session_folder_fk"
)
# Drop the column
op.drop_column("chat_session", "folder_id")
logger.info("Dropped chat_session.folder_id")
# === Step 3: Drop persona__user_folder table ===
if "persona__user_folder" in inspector.get_table_names():
logger.info("Dropping persona__user_folder table...")
# Check for any remaining data
remaining = bind.execute(
text("SELECT COUNT(*) FROM persona__user_folder")
).scalar_one()
if remaining > 0:
logger.warning(
f"WARNING: Dropping persona__user_folder with {remaining} records"
)
op.drop_table("persona__user_folder")
logger.info("Dropped persona__user_folder table")
# === Step 4: Drop chat_folder table ===
if "chat_folder" in inspector.get_table_names():
logger.info("Dropping chat_folder table...")
# Check for any remaining data
remaining = bind.execute(text("SELECT COUNT(*) FROM chat_folder")).scalar_one()
if remaining > 0:
logger.warning(f"WARNING: Dropping chat_folder with {remaining} records")
op.drop_table("chat_folder")
logger.info("Dropped chat_folder table")
# === Step 5: Drop user_file legacy columns ===
user_file_columns = [col["name"] for col in inspector.get_columns("user_file")]
# Drop folder_id
if "folder_id" in user_file_columns:
logger.info("Dropping user_file.folder_id...")
op.drop_column("user_file", "folder_id")
logger.info("Dropped user_file.folder_id")
# Drop cc_pair_id (already handled in migration 5, but be sure)
if "cc_pair_id" in user_file_columns:
logger.info("Dropping user_file.cc_pair_id...")
# Drop any remaining foreign key constraints
bind.execute(
text(
"""
DO $$
DECLARE r RECORD;
BEGIN
FOR r IN (
SELECT conname
FROM pg_constraint c
JOIN pg_class t ON c.conrelid = t.oid
WHERE c.contype = 'f'
AND t.relname = 'user_file'
AND EXISTS (
SELECT 1 FROM pg_attribute a
WHERE a.attrelid = t.oid
AND a.attname = 'cc_pair_id'
)
) LOOP
EXECUTE format('ALTER TABLE user_file DROP CONSTRAINT IF EXISTS %I', r.conname);
END LOOP;
END$$;
"""
)
)
op.drop_column("user_file", "cc_pair_id")
logger.info("Dropped user_file.cc_pair_id")
# === Step 6: Clean up any remaining constraints ===
logger.info("Cleaning up remaining constraints...")
# Drop any unique constraints on removed columns
op.execute(
"ALTER TABLE user_file DROP CONSTRAINT IF EXISTS user_file_cc_pair_id_key"
)
logger.info("Migration 6 (schema cleanup) completed successfully")
logger.info("Legacy schema has been fully removed")
def downgrade() -> None:
"""Recreate dropped columns and tables (structure only, no data)."""
bind = op.get_bind()
inspector = sa.inspect(bind)
logger.warning("Downgrading schema cleanup - recreating structure only, no data!")
# Recreate user_file columns
if "user_file" in inspector.get_table_names():
columns = [col["name"] for col in inspector.get_columns("user_file")]
if "cc_pair_id" not in columns:
op.add_column(
"user_file", sa.Column("cc_pair_id", sa.Integer(), nullable=True)
)
if "folder_id" not in columns:
op.add_column(
"user_file", sa.Column("folder_id", sa.Integer(), nullable=True)
)
# Recreate persona__user_folder table
if "persona__user_folder" not in inspector.get_table_names():
op.create_table(
"persona__user_folder",
sa.Column("persona_id", sa.Integer(), nullable=False),
sa.Column("user_folder_id", sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint("persona_id", "user_folder_id"),
sa.ForeignKeyConstraint(["persona_id"], ["persona.id"]),
sa.ForeignKeyConstraint(["user_folder_id"], ["user_project.id"]),
)
# Recreate chat_folder table and related structures
if "chat_folder" not in inspector.get_table_names():
op.create_table(
"chat_folder",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column(
"user_id",
fastapi_users_db_sqlalchemy.generics.GUID(),
nullable=True,
),
sa.Column("name", sa.String(), nullable=True),
sa.Column("display_priority", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["user_id"],
["user.id"],
name="chat_folder_user_id_fkey",
),
sa.PrimaryKeyConstraint("id"),
)
# Add folder_id back to chat_session
if "chat_session" in inspector.get_table_names():
columns = [col["name"] for col in inspector.get_columns("chat_session")]
if "folder_id" not in columns:
op.add_column(
"chat_session", sa.Column("folder_id", sa.Integer(), nullable=True)
)
# Add foreign key if chat_folder exists
if "chat_folder" in inspector.get_table_names():
op.create_foreign_key(
"chat_session_chat_folder_fk",
"chat_session",
"chat_folder",
["folder_id"],
["id"],
)
logger.info("Downgrade completed - structure recreated but data is lost")

View File

@@ -1,298 +0,0 @@
"""Migration 5: User file legacy data cleanup
Revision ID: 3a78dba1080a
Revises: 7cc3fcc116c1
Create Date: 2025-09-22 10:04:27.986294
This migration removes legacy user-file documents and connector_credential_pairs.
It performs bulk deletions of obsolete data after the UUID migration.
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql as psql
from sqlalchemy import text
import logging
from typing import List
import uuid
logger = logging.getLogger("alembic.runtime.migration")
# revision identifiers, used by Alembic.
revision = "3a78dba1080a"
down_revision = "7cc3fcc116c1"
branch_labels = None
depends_on = None
def batch_delete(
bind: sa.engine.Connection,
table_name: str,
id_column: str,
ids: List[str | int | uuid.UUID],
batch_size: int = 1000,
id_type: str = "int",
) -> int:
"""Delete records in batches to avoid memory issues and timeouts."""
total_count = len(ids)
if total_count == 0:
return 0
logger.info(
f"Starting batch deletion of {total_count} records from {table_name}..."
)
# Determine appropriate ARRAY type
if id_type == "uuid":
array_type = psql.ARRAY(psql.UUID(as_uuid=True))
elif id_type == "int":
array_type = psql.ARRAY(sa.Integer())
else:
array_type = psql.ARRAY(sa.String())
total_deleted = 0
failed_batches = []
for i in range(0, total_count, batch_size):
batch_ids = ids[i : i + batch_size]
try:
stmt = text(
f"DELETE FROM {table_name} WHERE {id_column} = ANY(:ids)"
).bindparams(sa.bindparam("ids", value=batch_ids, type_=array_type))
result = bind.execute(stmt)
total_deleted += result.rowcount
# Log progress every 10 batches or at completion
batch_num = (i // batch_size) + 1
if batch_num % 10 == 0 or i + batch_size >= total_count:
logger.info(
f" Deleted {min(i + batch_size, total_count)}/{total_count} records "
f"({total_deleted} actual) from {table_name}"
)
except Exception as e:
logger.error(f"Failed to delete batch {(i // batch_size) + 1}: {e}")
failed_batches.append((i, min(i + batch_size, total_count)))
if failed_batches:
logger.warning(
f"Failed to delete {len(failed_batches)} batches from {table_name}. "
f"Total deleted: {total_deleted}/{total_count}"
)
# Fail the migration to avoid silently succeeding on partial cleanup
raise RuntimeError(
f"Batch deletion failed for {table_name}: "
f"{len(failed_batches)} failed batches out of "
f"{(total_count + batch_size - 1) // batch_size}."
)
return total_deleted
def upgrade() -> None:
"""Remove legacy user-file documents and connector_credential_pairs."""
bind = op.get_bind()
inspector = sa.inspect(bind)
logger.info("Starting legacy data cleanup...")
# === Step 1: Identify and delete user-file documents ===
logger.info("Identifying user-file documents to delete...")
# Get document IDs to delete
doc_rows = bind.execute(
text(
"""
SELECT DISTINCT dcc.id AS document_id
FROM document_by_connector_credential_pair dcc
JOIN connector_credential_pair u
ON u.connector_id = dcc.connector_id
AND u.credential_id = dcc.credential_id
WHERE u.is_user_file IS TRUE
"""
)
).fetchall()
doc_ids = [r[0] for r in doc_rows]
if doc_ids:
logger.info(f"Found {len(doc_ids)} user-file documents to delete")
# Delete dependent rows first
tables_to_clean = [
("document_retrieval_feedback", "document_id"),
("document__tag", "document_id"),
("chunk_stats", "document_id"),
]
for table_name, column_name in tables_to_clean:
if table_name in inspector.get_table_names():
# document_id is a string in these tables
deleted = batch_delete(
bind, table_name, column_name, doc_ids, id_type="str"
)
logger.info(f"Deleted {deleted} records from {table_name}")
# Delete document_by_connector_credential_pair entries
deleted = batch_delete(
bind, "document_by_connector_credential_pair", "id", doc_ids, id_type="str"
)
logger.info(f"Deleted {deleted} document_by_connector_credential_pair records")
# Delete documents themselves
deleted = batch_delete(bind, "document", "id", doc_ids, id_type="str")
logger.info(f"Deleted {deleted} document records")
else:
logger.info("No user-file documents found to delete")
# === Step 2: Clean up user-file connector_credential_pairs ===
logger.info("Cleaning up user-file connector_credential_pairs...")
# Get cc_pair IDs
cc_pair_rows = bind.execute(
text(
"""
SELECT id AS cc_pair_id
FROM connector_credential_pair
WHERE is_user_file IS TRUE
"""
)
).fetchall()
cc_pair_ids = [r[0] for r in cc_pair_rows]
if cc_pair_ids:
logger.info(
f"Found {len(cc_pair_ids)} user-file connector_credential_pairs to clean up"
)
# Delete related records
# Clean child tables first to satisfy foreign key constraints,
# then the parent tables
tables_to_clean = [
("index_attempt_errors", "connector_credential_pair_id"),
("index_attempt", "connector_credential_pair_id"),
("background_error", "cc_pair_id"),
("document_set__connector_credential_pair", "connector_credential_pair_id"),
("user_group__connector_credential_pair", "cc_pair_id"),
]
for table_name, column_name in tables_to_clean:
if table_name in inspector.get_table_names():
deleted = batch_delete(
bind, table_name, column_name, cc_pair_ids, id_type="int"
)
logger.info(f"Deleted {deleted} records from {table_name}")
# === Step 3: Identify connectors and credentials to delete ===
logger.info("Identifying orphaned connectors and credentials...")
# Get connectors used only by user-file cc_pairs
connector_rows = bind.execute(
text(
"""
SELECT DISTINCT ccp.connector_id
FROM connector_credential_pair ccp
WHERE ccp.is_user_file IS TRUE
AND ccp.connector_id != 0 -- Exclude system default
AND NOT EXISTS (
SELECT 1
FROM connector_credential_pair c2
WHERE c2.connector_id = ccp.connector_id
AND c2.is_user_file IS NOT TRUE
)
"""
)
).fetchall()
userfile_only_connector_ids = [r[0] for r in connector_rows]
# Get credentials used only by user-file cc_pairs
credential_rows = bind.execute(
text(
"""
SELECT DISTINCT ccp.credential_id
FROM connector_credential_pair ccp
WHERE ccp.is_user_file IS TRUE
AND ccp.credential_id != 0 -- Exclude public/default
AND NOT EXISTS (
SELECT 1
FROM connector_credential_pair c2
WHERE c2.credential_id = ccp.credential_id
AND c2.is_user_file IS NOT TRUE
)
"""
)
).fetchall()
userfile_only_credential_ids = [r[0] for r in credential_rows]
# === Step 4: Delete the cc_pairs themselves ===
if cc_pair_ids:
# Remove FK dependency from user_file first
bind.execute(
text(
"""
DO $$
DECLARE r RECORD;
BEGIN
FOR r IN (
SELECT conname
FROM pg_constraint c
JOIN pg_class t ON c.conrelid = t.oid
JOIN pg_class ft ON c.confrelid = ft.oid
WHERE c.contype = 'f'
AND t.relname = 'user_file'
AND ft.relname = 'connector_credential_pair'
) LOOP
EXECUTE format('ALTER TABLE user_file DROP CONSTRAINT IF EXISTS %I', r.conname);
END LOOP;
END$$;
"""
)
)
# Delete cc_pairs
deleted = batch_delete(
bind, "connector_credential_pair", "id", cc_pair_ids, id_type="int"
)
logger.info(f"Deleted {deleted} connector_credential_pair records")
# === Step 5: Delete orphaned connectors ===
if userfile_only_connector_ids:
deleted = batch_delete(
bind, "connector", "id", userfile_only_connector_ids, id_type="int"
)
logger.info(f"Deleted {deleted} orphaned connector records")
# === Step 6: Delete orphaned credentials ===
if userfile_only_credential_ids:
# Clean up credential__user_group mappings first
deleted = batch_delete(
bind,
"credential__user_group",
"credential_id",
userfile_only_credential_ids,
id_type="int",
)
logger.info(f"Deleted {deleted} credential__user_group records")
# Delete credentials
deleted = batch_delete(
bind, "credential", "id", userfile_only_credential_ids, id_type="int"
)
logger.info(f"Deleted {deleted} orphaned credential records")
logger.info("Migration 5 (legacy data cleanup) completed successfully")
def downgrade() -> None:
"""Cannot restore deleted data - requires backup restoration."""
logger.error("CRITICAL: Downgrading data cleanup cannot restore deleted data!")
logger.error("Data restoration requires backup files or database backup.")
# raise NotImplementedError(
# "Downgrade of legacy data cleanup is not supported. "
# "Deleted data must be restored from backups."
# )

View File

@@ -1,89 +0,0 @@
"""seed_exa_provider_from_env
Revision ID: 3c9a65f1207f
Revises: 1f2a3b4c5d6e
Create Date: 2025-11-20 19:18:00.000000
"""
from __future__ import annotations
import os
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from dotenv import load_dotenv, find_dotenv
from onyx.utils.encryption import encrypt_string_to_bytes
revision = "3c9a65f1207f"
down_revision = "1f2a3b4c5d6e"
branch_labels = None
depends_on = None
EXA_PROVIDER_NAME = "Exa"
def _get_internet_search_table(metadata: sa.MetaData) -> sa.Table:
return sa.Table(
"internet_search_provider",
metadata,
sa.Column("id", sa.Integer, primary_key=True),
sa.Column("name", sa.String),
sa.Column("provider_type", sa.String),
sa.Column("api_key", sa.LargeBinary),
sa.Column("config", postgresql.JSONB),
sa.Column("is_active", sa.Boolean),
sa.Column(
"time_created",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
sa.Column(
"time_updated",
sa.DateTime(timezone=True),
nullable=False,
server_default=sa.text("now()"),
),
)
def upgrade() -> None:
load_dotenv(find_dotenv())
exa_api_key = os.environ.get("EXA_API_KEY")
if not exa_api_key:
return
bind = op.get_bind()
metadata = sa.MetaData()
table = _get_internet_search_table(metadata)
existing = bind.execute(
sa.select(table.c.id).where(table.c.name == EXA_PROVIDER_NAME)
).first()
if existing:
return
encrypted_key = encrypt_string_to_bytes(exa_api_key)
has_active_provider = bind.execute(
sa.select(table.c.id).where(table.c.is_active.is_(True))
).first()
bind.execute(
table.insert().values(
name=EXA_PROVIDER_NAME,
provider_type="exa",
api_key=encrypted_key,
config=None,
is_active=not bool(has_active_provider),
)
)
def downgrade() -> None:
return

View File

@@ -1,121 +0,0 @@
"""add_oauth_config_and_user_tokens
Revision ID: 3d1cca026fe8
Revises: c8a93a2af083
Create Date: 2025-10-21 13:27:34.274721
"""
from alembic import op
import fastapi_users_db_sqlalchemy
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "3d1cca026fe8"
down_revision = "c8a93a2af083"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create oauth_config table
op.create_table(
"oauth_config",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(), nullable=False),
sa.Column("authorization_url", sa.Text(), nullable=False),
sa.Column("token_url", sa.Text(), nullable=False),
sa.Column("client_id", sa.LargeBinary(), nullable=False),
sa.Column("client_secret", sa.LargeBinary(), nullable=False),
sa.Column("scopes", postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.Column(
"additional_params",
postgresql.JSONB(astext_type=sa.Text()),
nullable=True,
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("name"),
)
# Create oauth_user_token table
op.create_table(
"oauth_user_token",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("oauth_config_id", sa.Integer(), nullable=False),
sa.Column(
"user_id",
fastapi_users_db_sqlalchemy.generics.GUID(),
nullable=False,
),
sa.Column("token_data", sa.LargeBinary(), nullable=False),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.ForeignKeyConstraint(
["oauth_config_id"], ["oauth_config.id"], ondelete="CASCADE"
),
sa.ForeignKeyConstraint(["user_id"], ["user.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("oauth_config_id", "user_id", name="uq_oauth_user_token"),
)
# Create index on user_id for efficient user-based token lookups
# Note: unique constraint on (oauth_config_id, user_id) already creates
# an index for config-based lookups
op.create_index(
"ix_oauth_user_token_user_id",
"oauth_user_token",
["user_id"],
)
# Add oauth_config_id column to tool table
op.add_column("tool", sa.Column("oauth_config_id", sa.Integer(), nullable=True))
# Create foreign key from tool to oauth_config
op.create_foreign_key(
"tool_oauth_config_fk",
"tool",
"oauth_config",
["oauth_config_id"],
["id"],
ondelete="SET NULL",
)
def downgrade() -> None:
# Drop foreign key from tool to oauth_config
op.drop_constraint("tool_oauth_config_fk", "tool", type_="foreignkey")
# Drop oauth_config_id column from tool table
op.drop_column("tool", "oauth_config_id")
# Drop index on user_id
op.drop_index("ix_oauth_user_token_user_id", table_name="oauth_user_token")
# Drop oauth_user_token table (will cascade delete tokens)
op.drop_table("oauth_user_token")
# Drop oauth_config table
op.drop_table("oauth_config")

View File

@@ -1,28 +0,0 @@
"""reset userfile document_id_migrated field
Revision ID: 40926a4dab77
Revises: 64bd5677aeb6
Create Date: 2025-10-06 16:10:32.898668
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = "40926a4dab77"
down_revision = "64bd5677aeb6"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Set all existing records to not migrated
op.execute(
"UPDATE user_file SET document_id_migrated = FALSE "
"WHERE document_id_migrated IS DISTINCT FROM FALSE;"
)
def downgrade() -> None:
# No-op
pass

View File

@@ -1,104 +0,0 @@
"""add_open_url_tool
Revision ID: 4f8a2b3c1d9e
Revises: a852cbe15577
Create Date: 2025-11-24 12:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "4f8a2b3c1d9e"
down_revision = "a852cbe15577"
branch_labels = None
depends_on = None
OPEN_URL_TOOL = {
"name": "OpenURLTool",
"display_name": "Open URL",
"description": (
"The Open URL Action allows the agent to fetch and read contents of web pages."
),
"in_code_tool_id": "OpenURLTool",
"enabled": True,
}
def upgrade() -> None:
conn = op.get_bind()
# Check if tool already exists
existing = conn.execute(
sa.text("SELECT id FROM tool WHERE in_code_tool_id = :in_code_tool_id"),
{"in_code_tool_id": OPEN_URL_TOOL["in_code_tool_id"]},
).fetchone()
if existing:
tool_id = existing[0]
# Update existing tool
conn.execute(
sa.text(
"""
UPDATE tool
SET name = :name,
display_name = :display_name,
description = :description
WHERE in_code_tool_id = :in_code_tool_id
"""
),
OPEN_URL_TOOL,
)
else:
# Insert new tool
conn.execute(
sa.text(
"""
INSERT INTO tool (name, display_name, description, in_code_tool_id, enabled)
VALUES (:name, :display_name, :description, :in_code_tool_id, :enabled)
"""
),
OPEN_URL_TOOL,
)
# Get the newly inserted tool's id
result = conn.execute(
sa.text("SELECT id FROM tool WHERE in_code_tool_id = :in_code_tool_id"),
{"in_code_tool_id": OPEN_URL_TOOL["in_code_tool_id"]},
).fetchone()
tool_id = result[0] # type: ignore
# Associate the tool with all existing personas
# Get all persona IDs
persona_ids = conn.execute(sa.text("SELECT id FROM persona")).fetchall()
for (persona_id,) in persona_ids:
# Check if association already exists
exists = conn.execute(
sa.text(
"""
SELECT 1 FROM persona__tool
WHERE persona_id = :persona_id AND tool_id = :tool_id
"""
),
{"persona_id": persona_id, "tool_id": tool_id},
).fetchone()
if not exists:
conn.execute(
sa.text(
"""
INSERT INTO persona__tool (persona_id, tool_id)
VALUES (:persona_id, :tool_id)
"""
),
{"persona_id": persona_id, "tool_id": tool_id},
)
def downgrade() -> None:
# We don't remove the tool on downgrade since it's fine to have it around.
# If we upgrade again, it will be a no-op.
pass

View File

@@ -1,380 +0,0 @@
"""merge_default_assistants_into_unified
Revision ID: 505c488f6662
Revises: d09fc20a3c66
Create Date: 2025-09-09 19:00:56.816626
"""
import json
from typing import Any
from typing import NamedTuple
from uuid import UUID
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "505c488f6662"
down_revision = "d09fc20a3c66"
branch_labels = None
depends_on = None
# Constants for the unified assistant
UNIFIED_ASSISTANT_NAME = "Assistant"
UNIFIED_ASSISTANT_DESCRIPTION = (
"Your AI assistant with search, web browsing, and image generation capabilities."
)
UNIFIED_ASSISTANT_NUM_CHUNKS = 25
UNIFIED_ASSISTANT_DISPLAY_PRIORITY = 0
UNIFIED_ASSISTANT_LLM_FILTER_EXTRACTION = True
UNIFIED_ASSISTANT_LLM_RELEVANCE_FILTER = False
UNIFIED_ASSISTANT_RECENCY_BIAS = "AUTO" # NOTE: needs to be capitalized
UNIFIED_ASSISTANT_CHUNKS_ABOVE = 0
UNIFIED_ASSISTANT_CHUNKS_BELOW = 0
UNIFIED_ASSISTANT_DATETIME_AWARE = True
# NOTE: tool specific prompts are handled on the fly and automatically injected
# into the prompt before passing to the LLM.
DEFAULT_SYSTEM_PROMPT = """
You are a highly capable, thoughtful, and precise assistant. Your goal is to deeply understand the \
user's intent, ask clarifying questions when needed, think step-by-step through complex problems, \
provide clear and accurate answers, and proactively anticipate helpful follow-up information. Always \
prioritize being truthful, nuanced, insightful, and efficient.
The current date is [[CURRENT_DATETIME]]
You use different text styles, bolding, emojis (sparingly), block quotes, and other formatting to make \
your responses more readable and engaging.
You use proper Markdown and LaTeX to format your responses for math, scientific, and chemical formulas, \
symbols, etc.: '$$\\n[expression]\\n$$' for standalone cases and '\\( [expression] \\)' when inline.
For code you prefer to use Markdown and specify the language.
You can use Markdown horizontal rules (---) to separate sections of your responses.
You can use Markdown tables to format your responses for data, lists, and other structured information.
""".strip()
INSERT_DICT: dict[str, Any] = {
"name": UNIFIED_ASSISTANT_NAME,
"description": UNIFIED_ASSISTANT_DESCRIPTION,
"system_prompt": DEFAULT_SYSTEM_PROMPT,
"num_chunks": UNIFIED_ASSISTANT_NUM_CHUNKS,
"display_priority": UNIFIED_ASSISTANT_DISPLAY_PRIORITY,
"llm_filter_extraction": UNIFIED_ASSISTANT_LLM_FILTER_EXTRACTION,
"llm_relevance_filter": UNIFIED_ASSISTANT_LLM_RELEVANCE_FILTER,
"recency_bias": UNIFIED_ASSISTANT_RECENCY_BIAS,
"chunks_above": UNIFIED_ASSISTANT_CHUNKS_ABOVE,
"chunks_below": UNIFIED_ASSISTANT_CHUNKS_BELOW,
"datetime_aware": UNIFIED_ASSISTANT_DATETIME_AWARE,
}
GENERAL_ASSISTANT_ID = -1
ART_ASSISTANT_ID = -3
class UserRow(NamedTuple):
"""Typed representation of user row from database query."""
id: UUID
chosen_assistants: list[int] | None
visible_assistants: list[int] | None
hidden_assistants: list[int] | None
pinned_assistants: list[int] | None
def upgrade() -> None:
conn = op.get_bind()
# Start transaction
conn.execute(sa.text("BEGIN"))
try:
# Step 1: Create or update the unified assistant (ID 0)
search_assistant = conn.execute(
sa.text("SELECT * FROM persona WHERE id = 0")
).fetchone()
if search_assistant:
# Update existing Search assistant to be the unified assistant
conn.execute(
sa.text(
"""
UPDATE persona
SET name = :name,
description = :description,
system_prompt = :system_prompt,
num_chunks = :num_chunks,
is_default_persona = true,
is_visible = true,
deleted = false,
display_priority = :display_priority,
llm_filter_extraction = :llm_filter_extraction,
llm_relevance_filter = :llm_relevance_filter,
recency_bias = :recency_bias,
chunks_above = :chunks_above,
chunks_below = :chunks_below,
datetime_aware = :datetime_aware,
starter_messages = null
WHERE id = 0
"""
),
INSERT_DICT,
)
else:
# Create new unified assistant with ID 0
conn.execute(
sa.text(
"""
INSERT INTO persona (
id, name, description, system_prompt, num_chunks,
is_default_persona, is_visible, deleted, display_priority,
llm_filter_extraction, llm_relevance_filter, recency_bias,
chunks_above, chunks_below, datetime_aware, starter_messages,
builtin_persona
) VALUES (
0, :name, :description, :system_prompt, :num_chunks,
true, true, false, :display_priority, :llm_filter_extraction,
:llm_relevance_filter, :recency_bias, :chunks_above, :chunks_below,
:datetime_aware, null, true
)
"""
),
INSERT_DICT,
)
# Step 2: Mark ALL builtin assistants as deleted (except the unified assistant ID 0)
conn.execute(
sa.text(
"""
UPDATE persona
SET deleted = true, is_visible = false, is_default_persona = false
WHERE builtin_persona = true AND id != 0
"""
)
)
# Step 3: Add all built-in tools to the unified assistant
# First, get the tool IDs for SearchTool, ImageGenerationTool, and WebSearchTool
search_tool = conn.execute(
sa.text("SELECT id FROM tool WHERE in_code_tool_id = 'SearchTool'")
).fetchone()
if not search_tool:
raise ValueError(
"SearchTool not found in database. Ensure tools migration has run first."
)
image_gen_tool = conn.execute(
sa.text("SELECT id FROM tool WHERE in_code_tool_id = 'ImageGenerationTool'")
).fetchone()
if not image_gen_tool:
raise ValueError(
"ImageGenerationTool not found in database. Ensure tools migration has run first."
)
# WebSearchTool is optional - may not be configured
web_search_tool = conn.execute(
sa.text("SELECT id FROM tool WHERE in_code_tool_id = 'WebSearchTool'")
).fetchone()
# Clear existing tool associations for persona 0
conn.execute(sa.text("DELETE FROM persona__tool WHERE persona_id = 0"))
# Add tools to the unified assistant
conn.execute(
sa.text(
"""
INSERT INTO persona__tool (persona_id, tool_id)
VALUES (0, :tool_id)
ON CONFLICT DO NOTHING
"""
),
{"tool_id": search_tool[0]},
)
conn.execute(
sa.text(
"""
INSERT INTO persona__tool (persona_id, tool_id)
VALUES (0, :tool_id)
ON CONFLICT DO NOTHING
"""
),
{"tool_id": image_gen_tool[0]},
)
if web_search_tool:
conn.execute(
sa.text(
"""
INSERT INTO persona__tool (persona_id, tool_id)
VALUES (0, :tool_id)
ON CONFLICT DO NOTHING
"""
),
{"tool_id": web_search_tool[0]},
)
# Step 4: Migrate existing chat sessions from all builtin assistants to unified assistant
conn.execute(
sa.text(
"""
UPDATE chat_session
SET persona_id = 0
WHERE persona_id IN (
SELECT id FROM persona WHERE builtin_persona = true AND id != 0
)
"""
)
)
# Step 5: Migrate user preferences - remove references to all builtin assistants
# First, get all builtin assistant IDs (except 0)
builtin_assistants_result = conn.execute(
sa.text(
"""
SELECT id FROM persona
WHERE builtin_persona = true AND id != 0
"""
)
).fetchall()
builtin_assistant_ids = [row[0] for row in builtin_assistants_result]
# Get all users with preferences
users_result = conn.execute(
sa.text(
"""
SELECT id, chosen_assistants, visible_assistants,
hidden_assistants, pinned_assistants
FROM "user"
"""
)
).fetchall()
for user_row in users_result:
user = UserRow(*user_row)
user_id: UUID = user.id
updates: dict[str, Any] = {}
# Remove all builtin assistants from chosen_assistants
if user.chosen_assistants:
new_chosen: list[int] = [
assistant_id
for assistant_id in user.chosen_assistants
if assistant_id not in builtin_assistant_ids
]
if new_chosen != user.chosen_assistants:
updates["chosen_assistants"] = json.dumps(new_chosen)
# Remove all builtin assistants from visible_assistants
if user.visible_assistants:
new_visible: list[int] = [
assistant_id
for assistant_id in user.visible_assistants
if assistant_id not in builtin_assistant_ids
]
if new_visible != user.visible_assistants:
updates["visible_assistants"] = json.dumps(new_visible)
# Add all builtin assistants to hidden_assistants
if user.hidden_assistants:
new_hidden: list[int] = list(user.hidden_assistants)
for old_id in builtin_assistant_ids:
if old_id not in new_hidden:
new_hidden.append(old_id)
if new_hidden != user.hidden_assistants:
updates["hidden_assistants"] = json.dumps(new_hidden)
else:
updates["hidden_assistants"] = json.dumps(builtin_assistant_ids)
# Remove all builtin assistants from pinned_assistants
if user.pinned_assistants:
new_pinned: list[int] = [
assistant_id
for assistant_id in user.pinned_assistants
if assistant_id not in builtin_assistant_ids
]
if new_pinned != user.pinned_assistants:
updates["pinned_assistants"] = json.dumps(new_pinned)
# Apply updates if any
if updates:
set_clause = ", ".join([f"{k} = :{k}" for k in updates.keys()])
updates["user_id"] = str(user_id) # Convert UUID to string for SQL
conn.execute(
sa.text(f'UPDATE "user" SET {set_clause} WHERE id = :user_id'),
updates,
)
# Commit transaction
conn.execute(sa.text("COMMIT"))
except Exception as e:
# Rollback on error
conn.execute(sa.text("ROLLBACK"))
raise e
def downgrade() -> None:
conn = op.get_bind()
# Start transaction
conn.execute(sa.text("BEGIN"))
try:
# Only restore General (ID -1) and Art (ID -3) assistants
# Step 1: Keep Search assistant (ID 0) as default but restore original state
conn.execute(
sa.text(
"""
UPDATE persona
SET is_default_persona = true,
is_visible = true,
deleted = false
WHERE id = 0
"""
)
)
# Step 2: Restore General assistant (ID -1)
conn.execute(
sa.text(
"""
UPDATE persona
SET deleted = false,
is_visible = true,
is_default_persona = true
WHERE id = :general_assistant_id
"""
),
{"general_assistant_id": GENERAL_ASSISTANT_ID},
)
# Step 3: Restore Art assistant (ID -3)
conn.execute(
sa.text(
"""
UPDATE persona
SET deleted = false,
is_visible = true,
is_default_persona = true
WHERE id = :art_assistant_id
"""
),
{"art_assistant_id": ART_ASSISTANT_ID},
)
# Note: We don't restore the original tool associations, names, or descriptions
# as those would require more complex logic to determine original state.
# We also cannot restore original chat session persona_ids as we don't
# have the original mappings.
# Other builtin assistants remain deleted as per the requirement.
# Commit transaction
conn.execute(sa.text("COMMIT"))
except Exception as e:
# Rollback on error
conn.execute(sa.text("ROLLBACK"))
raise e

View File

@@ -1,115 +0,0 @@
"""add research agent database tables and chat message research fields
Revision ID: 5ae8240accb3
Revises: b558f51620b4
Create Date: 2025-08-06 14:29:24.691388
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "5ae8240accb3"
down_revision = "b558f51620b4"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add research_type and research_plan columns to chat_message table
op.add_column(
"chat_message",
sa.Column("research_type", sa.String(), nullable=True),
)
op.add_column(
"chat_message",
sa.Column("research_plan", postgresql.JSONB(), nullable=True),
)
# Create research_agent_iteration table
op.create_table(
"research_agent_iteration",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column(
"primary_question_id",
sa.Integer(),
sa.ForeignKey("chat_message.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column("iteration_nr", sa.Integer(), nullable=False),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.func.now(),
nullable=False,
),
sa.Column("purpose", sa.String(), nullable=True),
sa.Column("reasoning", sa.String(), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint(
"primary_question_id",
"iteration_nr",
name="_research_agent_iteration_unique_constraint",
),
)
# Create research_agent_iteration_sub_step table
op.create_table(
"research_agent_iteration_sub_step",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column(
"primary_question_id",
sa.Integer(),
sa.ForeignKey("chat_message.id", ondelete="CASCADE"),
nullable=False,
),
sa.Column(
"parent_question_id",
sa.Integer(),
sa.ForeignKey("research_agent_iteration_sub_step.id", ondelete="CASCADE"),
nullable=True,
),
sa.Column("iteration_nr", sa.Integer(), nullable=False),
sa.Column("iteration_sub_step_nr", sa.Integer(), nullable=False),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.func.now(),
nullable=False,
),
sa.Column("sub_step_instructions", sa.String(), nullable=True),
sa.Column(
"sub_step_tool_id",
sa.Integer(),
sa.ForeignKey("tool.id"),
nullable=True,
),
sa.Column("reasoning", sa.String(), nullable=True),
sa.Column("sub_answer", sa.String(), nullable=True),
sa.Column("cited_doc_results", postgresql.JSONB(), nullable=True),
sa.Column("claims", postgresql.JSONB(), nullable=True),
sa.Column("generated_images", postgresql.JSONB(), nullable=True),
sa.Column("additional_data", postgresql.JSONB(), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.ForeignKeyConstraint(
["primary_question_id", "iteration_nr"],
[
"research_agent_iteration.primary_question_id",
"research_agent_iteration.iteration_nr",
],
ondelete="CASCADE",
),
)
def downgrade() -> None:
# Drop tables in reverse order
op.drop_table("research_agent_iteration_sub_step")
op.drop_table("research_agent_iteration")
# Remove columns from chat_message table
op.drop_column("chat_message", "research_plan")
op.drop_column("chat_message", "research_type")

View File

@@ -1,88 +0,0 @@
"""add_personal_access_token_table
Revision ID: 5e1c073d48a3
Revises: 09995b8811eb
Create Date: 2025-10-30 17:30:24.308521
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "5e1c073d48a3"
down_revision = "09995b8811eb"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create personal_access_token table
op.create_table(
"personal_access_token",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("name", sa.String(), nullable=False),
sa.Column("hashed_token", sa.String(length=64), nullable=False),
sa.Column("token_display", sa.String(), nullable=False),
sa.Column(
"user_id",
postgresql.UUID(as_uuid=True),
nullable=False,
),
sa.Column(
"expires_at",
sa.DateTime(timezone=True),
nullable=True,
),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column(
"last_used_at",
sa.DateTime(timezone=True),
nullable=True,
),
sa.Column(
"is_revoked",
sa.Boolean(),
server_default=sa.text("false"),
nullable=False,
),
sa.ForeignKeyConstraint(
["user_id"],
["user.id"],
ondelete="CASCADE",
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("hashed_token"),
)
# Create indexes
op.create_index(
"ix_personal_access_token_expires_at",
"personal_access_token",
["expires_at"],
unique=False,
)
op.create_index(
"ix_pat_user_created",
"personal_access_token",
["user_id", sa.text("created_at DESC")],
unique=False,
)
def downgrade() -> None:
# Drop indexes first
op.drop_index("ix_pat_user_created", table_name="personal_access_token")
op.drop_index(
"ix_personal_access_token_expires_at", table_name="personal_access_token"
)
# Drop table
op.drop_table("personal_access_token")

View File

@@ -1,55 +0,0 @@
"""update_default_persona_prompt
Revision ID: 5e6f7a8b9c0d
Revises: 4f8a2b3c1d9e
Create Date: 2025-11-30 12:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "5e6f7a8b9c0d"
down_revision = "4f8a2b3c1d9e"
branch_labels = None
depends_on = None
DEFAULT_PERSONA_ID = 0
# ruff: noqa: E501, W605 start
DEFAULT_SYSTEM_PROMPT = """
You are a highly capable, thoughtful, and precise assistant. Your goal is to deeply understand the user's intent, ask clarifying questions when needed, think step-by-step through complex problems, provide clear and accurate answers, and proactively anticipate helpful follow-up information. Always prioritize being truthful, nuanced, insightful, and efficient.
The current date is [[CURRENT_DATETIME]].{citation_reminder_or_empty}
# Response Style
You use different text styles, bolding, emojis (sparingly), block quotes, and other formatting to make your responses more readable and engaging.
You use proper Markdown and LaTeX to format your responses for math, scientific, and chemical formulas, symbols, etc.: '$$\\n[expression]\\n$$' for standalone cases and '\\( [expression] \\)' when inline.
For code you prefer to use Markdown and specify the language.
You can use horizontal rules (---) to separate sections of your responses.
You can use Markdown tables to format your responses for data, lists, and other structured information.
""".lstrip()
# ruff: noqa: E501, W605 end
def upgrade() -> None:
conn = op.get_bind()
conn.execute(
sa.text(
"""
UPDATE persona
SET system_prompt = :system_prompt
WHERE id = :persona_id
"""
),
{"system_prompt": DEFAULT_SYSTEM_PROMPT, "persona_id": DEFAULT_PERSONA_ID},
)
def downgrade() -> None:
# We don't revert the system prompt on downgrade since we don't know
# what the previous value was. The new prompt is a reasonable default.
pass

View File

@@ -1,132 +0,0 @@
"""add file names to file connector config
Revision ID: 62c3a055a141
Revises: 3fc5d75723b3
Create Date: 2025-07-30 17:01:24.417551
"""
from alembic import op
import sqlalchemy as sa
import json
import os
import logging
# revision identifiers, used by Alembic.
revision = "62c3a055a141"
down_revision = "3fc5d75723b3"
branch_labels = None
depends_on = None
SKIP_FILE_NAME_MIGRATION = (
os.environ.get("SKIP_FILE_NAME_MIGRATION", "true").lower() == "true"
)
logger = logging.getLogger("alembic.runtime.migration")
def upgrade() -> None:
if SKIP_FILE_NAME_MIGRATION:
logger.info(
"Skipping file name migration. Hint: set SKIP_FILE_NAME_MIGRATION=false to run this migration"
)
return
logger.info("Running file name migration")
# Get connection
conn = op.get_bind()
# Get all FILE connectors with their configs
file_connectors = conn.execute(
sa.text(
"""
SELECT id, connector_specific_config
FROM connector
WHERE source = 'FILE'
"""
)
).fetchall()
for connector_id, config in file_connectors:
# Parse config if it's a string
if isinstance(config, str):
config = json.loads(config)
# Get file_locations list
file_locations = config.get("file_locations", [])
# Get display names for each file_id
file_names = []
for file_id in file_locations:
result = conn.execute(
sa.text(
"""
SELECT display_name
FROM file_record
WHERE file_id = :file_id
"""
),
{"file_id": file_id},
).fetchone()
if result:
file_names.append(result[0])
else:
file_names.append(file_id) # Should not happen
# Add file_names to config
new_config = dict(config)
new_config["file_names"] = file_names
# Update the connector
conn.execute(
sa.text(
"""
UPDATE connector
SET connector_specific_config = :new_config
WHERE id = :connector_id
"""
),
{"connector_id": connector_id, "new_config": json.dumps(new_config)},
)
def downgrade() -> None:
# Get connection
conn = op.get_bind()
# Remove file_names from all FILE connectors
file_connectors = conn.execute(
sa.text(
"""
SELECT id, connector_specific_config
FROM connector
WHERE source = 'FILE'
"""
)
).fetchall()
for connector_id, config in file_connectors:
# Parse config if it's a string
if isinstance(config, str):
config = json.loads(config)
# Remove file_names if it exists
if "file_names" in config:
new_config = dict(config)
del new_config["file_names"]
# Update the connector
conn.execute(
sa.text(
"""
UPDATE connector
SET connector_specific_config = :new_config
WHERE id = :connector_id
"""
),
{
"connector_id": connector_id,
"new_config": json.dumps(new_config),
},
)

View File

@@ -1,44 +0,0 @@
"""add_created_at_in_project_userfile
Revision ID: 6436661d5b65
Revises: c7e9f4a3b2d1
Create Date: 2025-11-24 11:50:24.536052
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "6436661d5b65"
down_revision = "c7e9f4a3b2d1"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add created_at column to project__user_file table
op.add_column(
"project__user_file",
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
)
# Add composite index on (project_id, created_at DESC)
op.create_index(
"ix_project__user_file_project_id_created_at",
"project__user_file",
["project_id", sa.text("created_at DESC")],
)
def downgrade() -> None:
# Remove composite index on (project_id, created_at)
op.drop_index(
"ix_project__user_file_project_id_created_at", table_name="project__user_file"
)
# Remove created_at column from project__user_file table
op.drop_column("project__user_file", "created_at")

View File

@@ -1,37 +0,0 @@
"""Add image input support to model config
Revision ID: 64bd5677aeb6
Revises: b30353be4eec
Create Date: 2025-09-28 15:48:12.003612
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "64bd5677aeb6"
down_revision = "b30353be4eec"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"model_configuration",
sa.Column("supports_image_input", sa.Boolean(), nullable=True),
)
# Seems to be left over from when model visibility was introduced and a nullable field.
# Set any null is_visible values to False
connection = op.get_bind()
connection.execute(
sa.text(
"UPDATE model_configuration SET is_visible = false WHERE is_visible IS NULL"
)
)
def downgrade() -> None:
op.drop_column("model_configuration", "supports_image_input")

View File

@@ -1,37 +0,0 @@
"""add queries and is web fetch to iteration answer
Revision ID: 6f4f86aef280
Revises: 03d710ccf29c
Create Date: 2025-10-14 18:08:30.920123
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "6f4f86aef280"
down_revision = "03d710ccf29c"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add is_web_fetch column
op.add_column(
"research_agent_iteration_sub_step",
sa.Column("is_web_fetch", sa.Boolean(), nullable=True),
)
# Add queries column
op.add_column(
"research_agent_iteration_sub_step",
sa.Column("queries", postgresql.JSONB(), nullable=True),
)
def downgrade() -> None:
op.drop_column("research_agent_iteration_sub_step", "queries")
op.drop_column("research_agent_iteration_sub_step", "is_web_fetch")

View File

@@ -45,23 +45,8 @@ def upgrade() -> None:
def downgrade() -> None:
bind = op.get_bind()
inspector = sa.inspect(bind)
if "chat_session" in inspector.get_table_names():
chat_session_fks = {
fk.get("name") for fk in inspector.get_foreign_keys("chat_session")
}
if "chat_session_chat_folder_fk" in chat_session_fks:
op.drop_constraint(
"chat_session_chat_folder_fk", "chat_session", type_="foreignkey"
)
chat_session_columns = {
col["name"] for col in inspector.get_columns("chat_session")
}
if "folder_id" in chat_session_columns:
op.drop_column("chat_session", "folder_id")
if "chat_folder" in inspector.get_table_names():
op.drop_table("chat_folder")
op.drop_constraint(
"chat_session_chat_folder_fk", "chat_session", type_="foreignkey"
)
op.drop_column("chat_session", "folder_id")
op.drop_table("chat_folder")

View File

@@ -1,27 +0,0 @@
"""Add display_name to model_configuration
Revision ID: 7bd55f264e1b
Revises: e8f0d2a38171
Create Date: 2025-12-04
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "7bd55f264e1b"
down_revision = "e8f0d2a38171"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"model_configuration",
sa.Column("display_name", sa.String(), nullable=True),
)
def downgrade() -> None:
op.drop_column("model_configuration", "display_name")

View File

@@ -1,341 +0,0 @@
"""Migration 4: User file UUID primary key swap
Revision ID: 7cc3fcc116c1
Revises: 16c37a30adf2
Create Date: 2025-09-22 09:54:38.292952
This migration performs the critical UUID primary key swap on user_file table.
It updates all foreign key references to use UUIDs instead of integers.
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql as psql
import logging
logger = logging.getLogger("alembic.runtime.migration")
# revision identifiers, used by Alembic.
revision = "7cc3fcc116c1"
down_revision = "16c37a30adf2"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Swap user_file primary key from integer to UUID."""
bind = op.get_bind()
inspector = sa.inspect(bind)
# Verify we're in the expected state
user_file_columns = [col["name"] for col in inspector.get_columns("user_file")]
if "new_id" not in user_file_columns:
logger.warning(
"user_file.new_id not found - migration may have already been applied"
)
return
logger.info("Starting UUID primary key swap...")
# === Step 1: Update persona__user_file foreign key to UUID ===
logger.info("Updating persona__user_file foreign key...")
# Drop existing foreign key constraints
op.execute(
"ALTER TABLE persona__user_file DROP CONSTRAINT IF EXISTS persona__user_file_user_file_id_uuid_fkey"
)
op.execute(
"ALTER TABLE persona__user_file DROP CONSTRAINT IF EXISTS persona__user_file_user_file_id_fkey"
)
# Create new foreign key to user_file.new_id
op.create_foreign_key(
"persona__user_file_user_file_id_fkey",
"persona__user_file",
"user_file",
local_cols=["user_file_id_uuid"],
remote_cols=["new_id"],
)
# Drop the old integer column and rename UUID column
op.execute("ALTER TABLE persona__user_file DROP COLUMN IF EXISTS user_file_id")
op.alter_column(
"persona__user_file",
"user_file_id_uuid",
new_column_name="user_file_id",
existing_type=psql.UUID(as_uuid=True),
nullable=False,
)
# Recreate composite primary key
op.execute(
"ALTER TABLE persona__user_file DROP CONSTRAINT IF EXISTS persona__user_file_pkey"
)
op.execute(
"ALTER TABLE persona__user_file ADD PRIMARY KEY (persona_id, user_file_id)"
)
logger.info("Updated persona__user_file to use UUID foreign key")
# === Step 2: Perform the primary key swap on user_file ===
logger.info("Swapping user_file primary key to UUID...")
# Drop the primary key constraint
op.execute("ALTER TABLE user_file DROP CONSTRAINT IF EXISTS user_file_pkey")
# Drop the old id column and rename new_id to id
op.execute("ALTER TABLE user_file DROP COLUMN IF EXISTS id")
op.alter_column(
"user_file",
"new_id",
new_column_name="id",
existing_type=psql.UUID(as_uuid=True),
nullable=False,
)
# Set default for new inserts
op.alter_column(
"user_file",
"id",
existing_type=psql.UUID(as_uuid=True),
server_default=sa.text("gen_random_uuid()"),
)
# Create new primary key
op.execute("ALTER TABLE user_file ADD PRIMARY KEY (id)")
logger.info("Swapped user_file primary key to UUID")
# === Step 3: Update foreign key constraints ===
logger.info("Updating foreign key constraints...")
# Recreate persona__user_file foreign key to point to user_file.id
# Drop existing FK first to break dependency on the unique constraint
op.execute(
"ALTER TABLE persona__user_file DROP CONSTRAINT IF EXISTS persona__user_file_user_file_id_fkey"
)
# Drop the unique constraint on (formerly) new_id BEFORE recreating the FK,
# so the FK will bind to the primary key instead of the unique index.
op.execute("ALTER TABLE user_file DROP CONSTRAINT IF EXISTS uq_user_file_new_id")
# Now recreate FK to the primary key column
op.create_foreign_key(
"persona__user_file_user_file_id_fkey",
"persona__user_file",
"user_file",
local_cols=["user_file_id"],
remote_cols=["id"],
)
# Add foreign keys for project__user_file
existing_fks = inspector.get_foreign_keys("project__user_file")
has_user_file_fk = any(
fk.get("referred_table") == "user_file"
and fk.get("constrained_columns") == ["user_file_id"]
for fk in existing_fks
)
if not has_user_file_fk:
op.create_foreign_key(
"fk_project__user_file_user_file_id",
"project__user_file",
"user_file",
["user_file_id"],
["id"],
)
logger.info("Added project__user_file -> user_file foreign key")
has_project_fk = any(
fk.get("referred_table") == "user_project"
and fk.get("constrained_columns") == ["project_id"]
for fk in existing_fks
)
if not has_project_fk:
op.create_foreign_key(
"fk_project__user_file_project_id",
"project__user_file",
"user_project",
["project_id"],
["id"],
)
logger.info("Added project__user_file -> user_project foreign key")
# === Step 4: Mark files for document_id migration ===
logger.info("Marking files for background document_id migration...")
logger.info("Migration 4 (UUID primary key swap) completed successfully")
logger.info(
"NOTE: Background task will update document IDs in Vespa and search_doc"
)
def downgrade() -> None:
"""Revert UUID primary key back to integer (data destructive!)."""
logger.error("CRITICAL: Downgrading UUID primary key swap is data destructive!")
logger.error(
"This will break all UUID-based references created after the migration."
)
logger.error("Only proceed if absolutely necessary and have backups.")
bind = op.get_bind()
inspector = sa.inspect(bind)
# Capture existing primary key definitions so we can restore them after swaps
persona_pk = inspector.get_pk_constraint("persona__user_file") or {}
persona_pk_name = persona_pk.get("name")
persona_pk_cols = persona_pk.get("constrained_columns") or []
project_pk = inspector.get_pk_constraint("project__user_file") or {}
project_pk_name = project_pk.get("name")
project_pk_cols = project_pk.get("constrained_columns") or []
# Drop foreign keys that reference the UUID primary key
op.drop_constraint(
"persona__user_file_user_file_id_fkey",
"persona__user_file",
type_="foreignkey",
)
op.drop_constraint(
"fk_project__user_file_user_file_id",
"project__user_file",
type_="foreignkey",
)
# Drop primary keys that rely on the UUID column so we can replace it
if persona_pk_name:
op.drop_constraint(persona_pk_name, "persona__user_file", type_="primary")
if project_pk_name:
op.drop_constraint(project_pk_name, "project__user_file", type_="primary")
# Rebuild integer IDs on user_file using a sequence-backed column
op.execute("CREATE SEQUENCE IF NOT EXISTS user_file_id_seq")
op.add_column(
"user_file",
sa.Column(
"id_int",
sa.Integer(),
server_default=sa.text("nextval('user_file_id_seq')"),
nullable=False,
),
)
op.execute("ALTER SEQUENCE user_file_id_seq OWNED BY user_file.id_int")
# Prepare integer foreign key columns on referencing tables
op.add_column(
"persona__user_file",
sa.Column("user_file_id_int", sa.Integer(), nullable=True),
)
op.add_column(
"project__user_file",
sa.Column("user_file_id_int", sa.Integer(), nullable=True),
)
# Populate the new integer foreign key columns by mapping from the UUID IDs
op.execute(
"""
UPDATE persona__user_file AS p
SET user_file_id_int = uf.id_int
FROM user_file AS uf
WHERE p.user_file_id = uf.id
"""
)
op.execute(
"""
UPDATE project__user_file AS p
SET user_file_id_int = uf.id_int
FROM user_file AS uf
WHERE p.user_file_id = uf.id
"""
)
op.alter_column(
"persona__user_file",
"user_file_id_int",
existing_type=sa.Integer(),
nullable=False,
)
op.alter_column(
"project__user_file",
"user_file_id_int",
existing_type=sa.Integer(),
nullable=False,
)
# Remove the UUID foreign key columns and rename the integer replacements
op.drop_column("persona__user_file", "user_file_id")
op.alter_column(
"persona__user_file",
"user_file_id_int",
new_column_name="user_file_id",
existing_type=sa.Integer(),
nullable=False,
)
op.drop_column("project__user_file", "user_file_id")
op.alter_column(
"project__user_file",
"user_file_id_int",
new_column_name="user_file_id",
existing_type=sa.Integer(),
nullable=False,
)
# Swap the user_file primary key back to the integer column
op.drop_constraint("user_file_pkey", "user_file", type_="primary")
op.drop_column("user_file", "id")
op.alter_column(
"user_file",
"id_int",
new_column_name="id",
existing_type=sa.Integer(),
)
op.alter_column(
"user_file",
"id",
existing_type=sa.Integer(),
nullable=False,
server_default=sa.text("nextval('user_file_id_seq')"),
)
op.execute("ALTER SEQUENCE user_file_id_seq OWNED BY user_file.id")
op.execute(
"""
SELECT setval(
'user_file_id_seq',
GREATEST(COALESCE(MAX(id), 1), 1),
MAX(id) IS NOT NULL
)
FROM user_file
"""
)
op.create_primary_key("user_file_pkey", "user_file", ["id"])
# Restore primary keys on referencing tables
if persona_pk_cols:
op.create_primary_key(
"persona__user_file_pkey", "persona__user_file", persona_pk_cols
)
if project_pk_cols:
op.create_primary_key(
"project__user_file_pkey",
"project__user_file",
project_pk_cols,
)
# Recreate foreign keys pointing at the integer primary key
op.create_foreign_key(
"persona__user_file_user_file_id_fkey",
"persona__user_file",
"user_file",
["user_file_id"],
["id"],
)
op.create_foreign_key(
"fk_project__user_file_user_file_id",
"project__user_file",
"user_file",
["user_file_id"],
["id"],
)

View File

@@ -1,249 +0,0 @@
"""add_mcp_server_and_connection_config_models
Revision ID: 7ed603b64d5a
Revises: b329d00a9ea6
Create Date: 2025-07-28 17:35:59.900680
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from onyx.db.enums import MCPAuthenticationType
# revision identifiers, used by Alembic.
revision = "7ed603b64d5a"
down_revision = "b329d00a9ea6"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Create tables and columns for MCP Server support"""
# 1. MCP Server main table (no FK constraints yet to avoid circular refs)
op.create_table(
"mcp_server",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("owner", sa.String(), nullable=False),
sa.Column("name", sa.String(), nullable=False),
sa.Column("description", sa.String(), nullable=True),
sa.Column("server_url", sa.String(), nullable=False),
sa.Column(
"auth_type",
sa.Enum(
MCPAuthenticationType,
name="mcp_authentication_type",
native_enum=False,
),
nullable=False,
),
sa.Column("admin_connection_config_id", sa.Integer(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"), # type: ignore
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"), # type: ignore
nullable=False,
),
)
# 2. MCP Connection Config table (can reference mcp_server now that it exists)
op.create_table(
"mcp_connection_config",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("mcp_server_id", sa.Integer(), nullable=True),
sa.Column("user_email", sa.String(), nullable=False, default=""),
sa.Column("config", sa.LargeBinary(), nullable=False),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"), # type: ignore
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"), # type: ignore
nullable=False,
),
sa.ForeignKeyConstraint(
["mcp_server_id"], ["mcp_server.id"], ondelete="CASCADE"
),
)
# Helpful indexes
op.create_index(
"ix_mcp_connection_config_server_user",
"mcp_connection_config",
["mcp_server_id", "user_email"],
)
op.create_index(
"ix_mcp_connection_config_user_email",
"mcp_connection_config",
["user_email"],
)
# 3. Add the back-references from mcp_server to connection configs
op.create_foreign_key(
"mcp_server_admin_config_fk",
"mcp_server",
"mcp_connection_config",
["admin_connection_config_id"],
["id"],
ondelete="SET NULL",
)
# 4. Association / access-control tables
op.create_table(
"mcp_server__user",
sa.Column("mcp_server_id", sa.Integer(), primary_key=True),
sa.Column("user_id", sa.UUID(), primary_key=True),
sa.ForeignKeyConstraint(
["mcp_server_id"], ["mcp_server.id"], ondelete="CASCADE"
),
sa.ForeignKeyConstraint(["user_id"], ["user.id"], ondelete="CASCADE"),
)
op.create_table(
"mcp_server__user_group",
sa.Column("mcp_server_id", sa.Integer(), primary_key=True),
sa.Column("user_group_id", sa.Integer(), primary_key=True),
sa.ForeignKeyConstraint(
["mcp_server_id"], ["mcp_server.id"], ondelete="CASCADE"
),
sa.ForeignKeyConstraint(["user_group_id"], ["user_group.id"]),
)
# 5. Update existing `tool` table allow tools to belong to an MCP server
op.add_column(
"tool",
sa.Column("mcp_server_id", sa.Integer(), nullable=True),
)
# Add column for MCP tool input schema
op.add_column(
"tool",
sa.Column("mcp_input_schema", postgresql.JSONB(), nullable=True),
)
op.create_foreign_key(
"tool_mcp_server_fk",
"tool",
"mcp_server",
["mcp_server_id"],
["id"],
ondelete="CASCADE",
)
# 6. Update persona__tool foreign keys to cascade delete
# This ensures that when a tool is deleted (including via MCP server deletion),
# the corresponding persona__tool rows are also deleted
op.drop_constraint(
"persona__tool_tool_id_fkey", "persona__tool", type_="foreignkey"
)
op.drop_constraint(
"persona__tool_persona_id_fkey", "persona__tool", type_="foreignkey"
)
op.create_foreign_key(
"persona__tool_persona_id_fkey",
"persona__tool",
"persona",
["persona_id"],
["id"],
ondelete="CASCADE",
)
op.create_foreign_key(
"persona__tool_tool_id_fkey",
"persona__tool",
"tool",
["tool_id"],
["id"],
ondelete="CASCADE",
)
# 7. Update research_agent_iteration_sub_step foreign key to SET NULL on delete
# This ensures that when a tool is deleted, the sub_step_tool_id is set to NULL
# instead of causing a foreign key constraint violation
op.drop_constraint(
"research_agent_iteration_sub_step_sub_step_tool_id_fkey",
"research_agent_iteration_sub_step",
type_="foreignkey",
)
op.create_foreign_key(
"research_agent_iteration_sub_step_sub_step_tool_id_fkey",
"research_agent_iteration_sub_step",
"tool",
["sub_step_tool_id"],
["id"],
ondelete="SET NULL",
)
def downgrade() -> None:
"""Drop all MCP-related tables / columns"""
# # # 1. Drop FK & columns from tool
# op.drop_constraint("tool_mcp_server_fk", "tool", type_="foreignkey")
op.execute("DELETE FROM tool WHERE mcp_server_id IS NOT NULL")
op.drop_constraint(
"research_agent_iteration_sub_step_sub_step_tool_id_fkey",
"research_agent_iteration_sub_step",
type_="foreignkey",
)
op.create_foreign_key(
"research_agent_iteration_sub_step_sub_step_tool_id_fkey",
"research_agent_iteration_sub_step",
"tool",
["sub_step_tool_id"],
["id"],
)
# Restore original persona__tool foreign keys (without CASCADE)
op.drop_constraint(
"persona__tool_persona_id_fkey", "persona__tool", type_="foreignkey"
)
op.drop_constraint(
"persona__tool_tool_id_fkey", "persona__tool", type_="foreignkey"
)
op.create_foreign_key(
"persona__tool_persona_id_fkey",
"persona__tool",
"persona",
["persona_id"],
["id"],
)
op.create_foreign_key(
"persona__tool_tool_id_fkey",
"persona__tool",
"tool",
["tool_id"],
["id"],
)
op.drop_column("tool", "mcp_input_schema")
op.drop_column("tool", "mcp_server_id")
# 2. Drop association tables
op.drop_table("mcp_server__user_group")
op.drop_table("mcp_server__user")
# 3. Drop FK from mcp_server to connection configs
op.drop_constraint("mcp_server_admin_config_fk", "mcp_server", type_="foreignkey")
# 4. Drop connection config indexes & table
op.drop_index(
"ix_mcp_connection_config_user_email", table_name="mcp_connection_config"
)
op.drop_index(
"ix_mcp_connection_config_server_user", table_name="mcp_connection_config"
)
op.drop_table("mcp_connection_config")
# 5. Finally drop mcp_server table
op.drop_table("mcp_server")

View File

@@ -1,55 +0,0 @@
"""update_default_system_prompt
Revision ID: 87c52ec39f84
Revises: 7bd55f264e1b
Create Date: 2025-12-05 15:54:06.002452
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "87c52ec39f84"
down_revision = "7bd55f264e1b"
branch_labels = None
depends_on = None
DEFAULT_PERSONA_ID = 0
# ruff: noqa: E501, W605 start
DEFAULT_SYSTEM_PROMPT = """
You are a highly capable, thoughtful, and precise assistant. Your goal is to deeply understand the user's intent, ask clarifying questions when needed, think step-by-step through complex problems, provide clear and accurate answers, and proactively anticipate helpful follow-up information. Always prioritize being truthful, nuanced, insightful, and efficient.
The current date is [[CURRENT_DATETIME]].[[CITATION_GUIDANCE]]
# Response Style
You use different text styles, bolding, emojis (sparingly), block quotes, and other formatting to make your responses more readable and engaging.
You use proper Markdown and LaTeX to format your responses for math, scientific, and chemical formulas, symbols, etc.: '$$\\n[expression]\\n$$' for standalone cases and '\\( [expression] \\)' when inline.
For code you prefer to use Markdown and specify the language.
You can use horizontal rules (---) to separate sections of your responses.
You can use Markdown tables to format your responses for data, lists, and other structured information.
""".lstrip()
# ruff: noqa: E501, W605 end
def upgrade() -> None:
conn = op.get_bind()
conn.execute(
sa.text(
"""
UPDATE persona
SET system_prompt = :system_prompt
WHERE id = :persona_id
"""
),
{"system_prompt": DEFAULT_SYSTEM_PROMPT, "persona_id": DEFAULT_PERSONA_ID},
)
def downgrade() -> None:
# We don't revert the system prompt on downgrade since we don't know
# what the previous value was. The new prompt is a reasonable default.
pass

View File

@@ -1,38 +0,0 @@
"""drop include citations
Revision ID: 8818cf73fa1a
Revises: 7ed603b64d5a
Create Date: 2025-09-02 19:43:50.060680
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "8818cf73fa1a"
down_revision = "7ed603b64d5a"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.drop_column("prompt", "include_citations")
def downgrade() -> None:
op.add_column(
"prompt",
sa.Column(
"include_citations",
sa.BOOLEAN(),
autoincrement=False,
nullable=True,
),
)
# Set include_citations based on prompt name: FALSE for ImageGeneration, TRUE for others
op.execute(
sa.text(
"UPDATE prompt SET include_citations = CASE WHEN name = 'ImageGeneration' THEN FALSE ELSE TRUE END"
)
)

View File

@@ -1,341 +0,0 @@
"""tag-fix
Revision ID: 90e3b9af7da4
Revises: 62c3a055a141
Create Date: 2025-08-01 20:58:14.607624
"""
import json
import logging
import os
from typing import cast
from typing import Generator
from alembic import op
import sqlalchemy as sa
from onyx.document_index.factory import get_default_document_index
from onyx.document_index.vespa_constants import DOCUMENT_ID_ENDPOINT
from onyx.db.search_settings import SearchSettings
from onyx.configs.app_configs import AUTH_TYPE
from onyx.configs.constants import AuthType
from onyx.document_index.vespa.shared_utils.utils import get_vespa_http_client
logger = logging.getLogger("alembic.runtime.migration")
# revision identifiers, used by Alembic.
revision = "90e3b9af7da4"
down_revision = "62c3a055a141"
branch_labels = None
depends_on = None
SKIP_TAG_FIX = os.environ.get("SKIP_TAG_FIX", "true").lower() == "true"
# override for cloud
if AUTH_TYPE == AuthType.CLOUD:
SKIP_TAG_FIX = True
def set_is_list_for_known_tags() -> None:
"""
Sets is_list to true for all tags that are known to be lists.
"""
LIST_METADATA: list[tuple[str, str]] = [
("CLICKUP", "tags"),
("CONFLUENCE", "labels"),
("DISCOURSE", "tags"),
("FRESHDESK", "emails"),
("GITHUB", "assignees"),
("GITHUB", "labels"),
("GURU", "tags"),
("GURU", "folders"),
("HUBSPOT", "associated_contact_ids"),
("HUBSPOT", "associated_company_ids"),
("HUBSPOT", "associated_deal_ids"),
("HUBSPOT", "associated_ticket_ids"),
("JIRA", "labels"),
("MEDIAWIKI", "categories"),
("ZENDESK", "labels"),
("ZENDESK", "content_tags"),
]
bind = op.get_bind()
for source, key in LIST_METADATA:
bind.execute(
sa.text(
f"""
UPDATE tag
SET is_list = true
WHERE tag_key = '{key}'
AND source = '{source}'
"""
)
)
def set_is_list_for_list_tags() -> None:
"""
Sets is_list to true for all tags which have multiple values for a given
document, key, and source triplet. This only works if we remove old tags
from the database.
"""
bind = op.get_bind()
bind.execute(
sa.text(
"""
UPDATE tag
SET is_list = true
FROM (
SELECT DISTINCT tag.tag_key, tag.source
FROM tag
JOIN document__tag ON tag.id = document__tag.tag_id
GROUP BY tag.tag_key, tag.source, document__tag.document_id
HAVING count(*) > 1
) AS list_tags
WHERE tag.tag_key = list_tags.tag_key
AND tag.source = list_tags.source
"""
)
)
def log_list_tags() -> None:
bind = op.get_bind()
result = bind.execute(
sa.text(
"""
SELECT DISTINCT source, tag_key
FROM tag
WHERE is_list
ORDER BY source, tag_key
"""
)
).fetchall()
logger.info(
"List tags:\n" + "\n".join(f" {source}: {key}" for source, key in result)
)
def remove_old_tags() -> None:
"""
Removes old tags from the database.
Previously, there was a bug where if a document got indexed with a tag and then
the document got reindexed, the old tag would not be removed.
This function removes those old tags by comparing it against the tags in vespa.
"""
current_search_settings, future_search_settings = active_search_settings()
document_index = get_default_document_index(
current_search_settings, future_search_settings
)
# Get the index name
if hasattr(document_index, "index_name"):
index_name = document_index.index_name
else:
# Default index name if we can't get it from the document_index
index_name = "danswer_index"
for batch in _get_batch_documents_with_multiple_tags():
n_deleted = 0
for document_id in batch:
true_metadata = _get_vespa_metadata(document_id, index_name)
tags = _get_document_tags(document_id)
# identify document__tags to delete
to_delete: list[str] = []
for tag_id, tag_key, tag_value in tags:
true_val = true_metadata.get(tag_key, "")
if (isinstance(true_val, list) and tag_value not in true_val) or (
isinstance(true_val, str) and tag_value != true_val
):
to_delete.append(str(tag_id))
if not to_delete:
continue
# delete old document__tags
bind = op.get_bind()
result = bind.execute(
sa.text(
f"""
DELETE FROM document__tag
WHERE document_id = '{document_id}'
AND tag_id IN ({','.join(to_delete)})
"""
)
)
n_deleted += result.rowcount
logger.info(f"Processed {len(batch)} documents and deleted {n_deleted} tags")
def active_search_settings() -> tuple[SearchSettings, SearchSettings | None]:
result = op.get_bind().execute(
sa.text(
"""
SELECT * FROM search_settings WHERE status = 'PRESENT' ORDER BY id DESC LIMIT 1
"""
)
)
search_settings_fetch = result.fetchall()
search_settings = (
SearchSettings(**search_settings_fetch[0]._asdict())
if search_settings_fetch
else None
)
result2 = op.get_bind().execute(
sa.text(
"""
SELECT * FROM search_settings WHERE status = 'FUTURE' ORDER BY id DESC LIMIT 1
"""
)
)
search_settings_future_fetch = result2.fetchall()
search_settings_future = (
SearchSettings(**search_settings_future_fetch[0]._asdict())
if search_settings_future_fetch
else None
)
if not isinstance(search_settings, SearchSettings):
raise RuntimeError(
"current search settings is of type " + str(type(search_settings))
)
if (
not isinstance(search_settings_future, SearchSettings)
and search_settings_future is not None
):
raise RuntimeError(
"future search settings is of type " + str(type(search_settings_future))
)
return search_settings, search_settings_future
def _get_batch_documents_with_multiple_tags(
batch_size: int = 128,
) -> Generator[list[str], None, None]:
"""
Returns a list of document ids which contain a one to many tag.
The document may either contain a list metadata value, or may contain leftover
old tags from reindexing.
"""
offset_clause = ""
bind = op.get_bind()
while True:
batch = bind.execute(
sa.text(
f"""
SELECT DISTINCT document__tag.document_id
FROM tag
JOIN document__tag ON tag.id = document__tag.tag_id
GROUP BY tag.tag_key, tag.source, document__tag.document_id
HAVING count(*) > 1 {offset_clause}
ORDER BY document__tag.document_id
LIMIT {batch_size}
"""
)
).fetchall()
if not batch:
break
doc_ids = [document_id for document_id, in batch]
yield doc_ids
offset_clause = f"AND document__tag.document_id > '{doc_ids[-1]}'"
def _get_vespa_metadata(
document_id: str, index_name: str
) -> dict[str, str | list[str]]:
url = DOCUMENT_ID_ENDPOINT.format(index_name=index_name)
# Document-Selector language
selection = (
f"{index_name}.document_id=='{document_id}' and {index_name}.chunk_id==0"
)
params: dict[str, str | int] = {
"selection": selection,
"wantedDocumentCount": 1,
"fieldSet": f"{index_name}:metadata",
}
with get_vespa_http_client() as client:
resp = client.get(url, params=params)
resp.raise_for_status()
docs = resp.json().get("documents", [])
if not docs:
raise RuntimeError(f"No chunk-0 found for document {document_id}")
# for some reason, metadata is a string
metadata = docs[0]["fields"]["metadata"]
return json.loads(metadata)
def _get_document_tags(document_id: str) -> list[tuple[int, str, str]]:
bind = op.get_bind()
result = bind.execute(
sa.text(
f"""
SELECT tag.id, tag.tag_key, tag.tag_value
FROM tag
JOIN document__tag ON tag.id = document__tag.tag_id
WHERE document__tag.document_id = '{document_id}'
"""
)
).fetchall()
return cast(list[tuple[int, str, str]], result)
def upgrade() -> None:
op.add_column(
"tag",
sa.Column("is_list", sa.Boolean(), nullable=False, server_default="false"),
)
op.drop_constraint(
constraint_name="_tag_key_value_source_uc",
table_name="tag",
type_="unique",
)
op.create_unique_constraint(
constraint_name="_tag_key_value_source_list_uc",
table_name="tag",
columns=["tag_key", "tag_value", "source", "is_list"],
)
set_is_list_for_known_tags()
if SKIP_TAG_FIX:
logger.warning(
"Skipping removal of old tags. "
"This can cause issues when using the knowledge graph, or "
"when filtering for documents by tags."
)
log_list_tags()
return
remove_old_tags()
set_is_list_for_list_tags()
# debug
log_list_tags()
def downgrade() -> None:
# the migration adds and populates the is_list column, and removes old bugged tags
# there isn't a point in adding back the bugged tags, so we just drop the column
op.drop_constraint(
constraint_name="_tag_key_value_source_list_uc",
table_name="tag",
type_="unique",
)
op.create_unique_constraint(
constraint_name="_tag_key_value_source_uc",
table_name="tag",
columns=["tag_key", "tag_value", "source"],
)
op.drop_column("tag", "is_list")

View File

@@ -1,45 +0,0 @@
"""mcp_tool_enabled
Revision ID: 96a5702df6aa
Revises: 40926a4dab77
Create Date: 2025-10-09 12:10:21.733097
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "96a5702df6aa"
down_revision = "40926a4dab77"
branch_labels = None
depends_on = None
DELETE_DISABLED_TOOLS_SQL = "DELETE FROM tool WHERE enabled = false"
def upgrade() -> None:
op.add_column(
"tool",
sa.Column(
"enabled",
sa.Boolean(),
nullable=False,
server_default=sa.true(),
),
)
op.create_index(
"ix_tool_mcp_server_enabled",
"tool",
["mcp_server_id", "enabled"],
)
# Remove the server default so application controls defaulting
op.alter_column("tool", "enabled", server_default=None)
def downgrade() -> None:
op.execute(DELETE_DISABLED_TOOLS_SQL)
op.drop_index("ix_tool_mcp_server_enabled", table_name="tool")
op.drop_column("tool", "enabled")

View File

@@ -1,266 +0,0 @@
"""Migration 1: User file schema additions
Revision ID: 9b66d3156fc6
Revises: b4ef3ae0bf6e
Create Date: 2025-09-22 09:42:06.086732
This migration adds new columns and tables without modifying existing data.
It is safe to run and can be easily rolled back.
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql as psql
import logging
logger = logging.getLogger("alembic.runtime.migration")
# revision identifiers, used by Alembic.
revision = "9b66d3156fc6"
down_revision = "b4ef3ae0bf6e"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Add new columns and tables without modifying existing data."""
# Enable pgcrypto for UUID generation
op.execute("CREATE EXTENSION IF NOT EXISTS pgcrypto")
bind = op.get_bind()
inspector = sa.inspect(bind)
# === USER_FILE: Add new columns ===
logger.info("Adding new columns to user_file table...")
user_file_columns = [col["name"] for col in inspector.get_columns("user_file")]
# Check if ID is already UUID (in case of re-run after partial migration)
id_is_uuid = any(
col["name"] == "id" and "uuid" in str(col["type"]).lower()
for col in inspector.get_columns("user_file")
)
# Add transitional UUID column only if ID is not already UUID
if "new_id" not in user_file_columns and not id_is_uuid:
op.add_column(
"user_file",
sa.Column(
"new_id",
psql.UUID(as_uuid=True),
nullable=True,
server_default=sa.text("gen_random_uuid()"),
),
)
op.create_unique_constraint("uq_user_file_new_id", "user_file", ["new_id"])
logger.info("Added new_id column to user_file")
# Add status column
if "status" not in user_file_columns:
op.add_column(
"user_file",
sa.Column(
"status",
sa.Enum(
"PROCESSING",
"COMPLETED",
"FAILED",
"CANCELED",
name="userfilestatus",
native_enum=False,
),
nullable=False,
server_default="PROCESSING",
),
)
logger.info("Added status column to user_file")
# Add other tracking columns
if "chunk_count" not in user_file_columns:
op.add_column(
"user_file", sa.Column("chunk_count", sa.Integer(), nullable=True)
)
logger.info("Added chunk_count column to user_file")
if "last_accessed_at" not in user_file_columns:
op.add_column(
"user_file",
sa.Column("last_accessed_at", sa.DateTime(timezone=True), nullable=True),
)
logger.info("Added last_accessed_at column to user_file")
if "needs_project_sync" not in user_file_columns:
op.add_column(
"user_file",
sa.Column(
"needs_project_sync",
sa.Boolean(),
nullable=False,
server_default=sa.text("false"),
),
)
logger.info("Added needs_project_sync column to user_file")
if "last_project_sync_at" not in user_file_columns:
op.add_column(
"user_file",
sa.Column(
"last_project_sync_at", sa.DateTime(timezone=True), nullable=True
),
)
logger.info("Added last_project_sync_at column to user_file")
if "document_id_migrated" not in user_file_columns:
op.add_column(
"user_file",
sa.Column(
"document_id_migrated",
sa.Boolean(),
nullable=False,
server_default=sa.text("true"),
),
)
logger.info("Added document_id_migrated column to user_file")
# === USER_FOLDER -> USER_PROJECT rename ===
table_names = set(inspector.get_table_names())
if "user_folder" in table_names:
logger.info("Updating user_folder table...")
# Make description nullable first
op.alter_column("user_folder", "description", nullable=True)
# Rename table if user_project doesn't exist
if "user_project" not in table_names:
op.execute("ALTER TABLE user_folder RENAME TO user_project")
logger.info("Renamed user_folder to user_project")
elif "user_project" in table_names:
# If already renamed, ensure column nullability
project_cols = [col["name"] for col in inspector.get_columns("user_project")]
if "description" in project_cols:
op.alter_column("user_project", "description", nullable=True)
# Add instructions column to user_project
inspector = sa.inspect(bind) # Refresh after rename
if "user_project" in inspector.get_table_names():
project_columns = [col["name"] for col in inspector.get_columns("user_project")]
if "instructions" not in project_columns:
op.add_column(
"user_project",
sa.Column("instructions", sa.String(), nullable=True),
)
logger.info("Added instructions column to user_project")
# === CHAT_SESSION: Add project_id ===
chat_session_columns = [
col["name"] for col in inspector.get_columns("chat_session")
]
if "project_id" not in chat_session_columns:
op.add_column(
"chat_session",
sa.Column("project_id", sa.Integer(), nullable=True),
)
logger.info("Added project_id column to chat_session")
# === PERSONA__USER_FILE: Add UUID column ===
persona_user_file_columns = [
col["name"] for col in inspector.get_columns("persona__user_file")
]
if "user_file_id_uuid" not in persona_user_file_columns:
op.add_column(
"persona__user_file",
sa.Column("user_file_id_uuid", psql.UUID(as_uuid=True), nullable=True),
)
logger.info("Added user_file_id_uuid column to persona__user_file")
# === PROJECT__USER_FILE: Create new table ===
if "project__user_file" not in inspector.get_table_names():
op.create_table(
"project__user_file",
sa.Column("project_id", sa.Integer(), nullable=False),
sa.Column("user_file_id", psql.UUID(as_uuid=True), nullable=False),
sa.PrimaryKeyConstraint("project_id", "user_file_id"),
)
logger.info("Created project__user_file table")
# Only create the index if it doesn't exist
existing_indexes = [
ix["name"] for ix in inspector.get_indexes("project__user_file")
]
if "idx_project__user_file_user_file_id" not in existing_indexes:
op.create_index(
"idx_project__user_file_user_file_id",
"project__user_file",
["user_file_id"],
)
logger.info(
"Created index idx_project__user_file_user_file_id on project__user_file"
)
logger.info("Migration 1 (schema additions) completed successfully")
def downgrade() -> None:
"""Remove added columns and tables."""
bind = op.get_bind()
inspector = sa.inspect(bind)
logger.info("Starting downgrade of schema additions...")
# Drop project__user_file table
if "project__user_file" in inspector.get_table_names():
# op.drop_index("idx_project__user_file_user_file_id", "project__user_file")
op.drop_table("project__user_file")
logger.info("Dropped project__user_file table")
# Remove columns from persona__user_file
if "persona__user_file" in inspector.get_table_names():
columns = [col["name"] for col in inspector.get_columns("persona__user_file")]
if "user_file_id_uuid" in columns:
op.drop_column("persona__user_file", "user_file_id_uuid")
logger.info("Dropped user_file_id_uuid from persona__user_file")
# Remove columns from chat_session
if "chat_session" in inspector.get_table_names():
columns = [col["name"] for col in inspector.get_columns("chat_session")]
if "project_id" in columns:
op.drop_column("chat_session", "project_id")
logger.info("Dropped project_id from chat_session")
# Rename user_project back to user_folder and remove instructions
if "user_project" in inspector.get_table_names():
columns = [col["name"] for col in inspector.get_columns("user_project")]
if "instructions" in columns:
op.drop_column("user_project", "instructions")
op.execute("ALTER TABLE user_project RENAME TO user_folder")
op.alter_column("user_folder", "description", nullable=False)
logger.info("Renamed user_project back to user_folder")
# Remove columns from user_file
if "user_file" in inspector.get_table_names():
columns = [col["name"] for col in inspector.get_columns("user_file")]
columns_to_drop = [
"document_id_migrated",
"last_project_sync_at",
"needs_project_sync",
"last_accessed_at",
"chunk_count",
"status",
]
for col in columns_to_drop:
if col in columns:
op.drop_column("user_file", col)
logger.info(f"Dropped {col} from user_file")
if "new_id" in columns:
op.drop_constraint("uq_user_file_new_id", "user_file", type_="unique")
op.drop_column("user_file", "new_id")
logger.info("Dropped new_id from user_file")
# Drop enum type if no columns use it
bind.execute(sa.text("DROP TYPE IF EXISTS userfilestatus"))
logger.info("Downgrade completed successfully")

View File

@@ -1,97 +0,0 @@
"""add config to federated_connector
Revision ID: 9drpiiw74ljy
Revises: 2acdef638fc2
Create Date: 2025-11-03 12:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "9drpiiw74ljy"
down_revision = "2acdef638fc2"
branch_labels = None
depends_on = None
def upgrade() -> None:
connection = op.get_bind()
# Check if column already exists in current schema
result = connection.execute(
sa.text(
"""
SELECT column_name
FROM information_schema.columns
WHERE table_schema = current_schema()
AND table_name = 'federated_connector'
AND column_name = 'config'
"""
)
)
column_exists = result.fetchone() is not None
# Add config column with default empty object (only if it doesn't exist)
if not column_exists:
op.add_column(
"federated_connector",
sa.Column(
"config", postgresql.JSONB(), nullable=False, server_default="{}"
),
)
# Data migration: Single bulk update for all Slack connectors
connection.execute(
sa.text(
"""
WITH connector_configs AS (
SELECT
fc.id as connector_id,
CASE
WHEN fcds.entities->'channels' IS NOT NULL
AND jsonb_typeof(fcds.entities->'channels') = 'array'
AND jsonb_array_length(fcds.entities->'channels') > 0
THEN
jsonb_build_object(
'channels', fcds.entities->'channels',
'search_all_channels', false
) ||
CASE
WHEN fcds.entities->'include_dm' IS NOT NULL
THEN jsonb_build_object('include_dm', fcds.entities->'include_dm')
ELSE '{}'::jsonb
END
ELSE
jsonb_build_object('search_all_channels', true) ||
CASE
WHEN fcds.entities->'include_dm' IS NOT NULL
THEN jsonb_build_object('include_dm', fcds.entities->'include_dm')
ELSE '{}'::jsonb
END
END as config
FROM federated_connector fc
LEFT JOIN LATERAL (
SELECT entities
FROM federated_connector__document_set
WHERE federated_connector_id = fc.id
AND entities IS NOT NULL
ORDER BY id
LIMIT 1
) fcds ON true
WHERE fc.source = 'FEDERATED_SLACK'
AND fcds.entities IS NOT NULL
)
UPDATE federated_connector fc
SET config = cc.config
FROM connector_configs cc
WHERE fc.id = cc.connector_id
"""
)
)
def downgrade() -> None:
op.drop_column("federated_connector", "config")

View File

@@ -1,62 +0,0 @@
"""update_default_tool_descriptions
Revision ID: a01bf2971c5d
Revises: 87c52ec39f84
Create Date: 2025-12-16 15:21:25.656375
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "a01bf2971c5d"
down_revision = "18b5b2524446"
branch_labels = None
depends_on = None
# new tool descriptions (12/2025)
TOOL_DESCRIPTIONS = {
"SearchTool": "The Search Action allows the agent to search through connected knowledge to help build an answer.",
"ImageGenerationTool": (
"The Image Generation Action allows the agent to use DALL-E 3 or GPT-IMAGE-1 to generate images. "
"The action will be used when the user asks the agent to generate an image."
),
"WebSearchTool": (
"The Web Search Action allows the agent "
"to perform internet searches for up-to-date information."
),
"KnowledgeGraphTool": (
"The Knowledge Graph Search Action allows the agent to search the "
"Knowledge Graph for information. This tool can (for now) only be active in the KG Beta Agent, "
"and it requires the Knowledge Graph to be enabled."
),
"OktaProfileTool": (
"The Okta Profile Action allows the agent to fetch the current user's information from Okta. "
"This may include the user's name, email, phone number, address, and other details such as their "
"manager and direct reports."
),
}
def upgrade() -> None:
conn = op.get_bind()
conn.execute(sa.text("BEGIN"))
try:
for tool_id, description in TOOL_DESCRIPTIONS.items():
conn.execute(
sa.text(
"UPDATE tool SET description = :description WHERE in_code_tool_id = :tool_id"
),
{"description": description, "tool_id": tool_id},
)
conn.execute(sa.text("COMMIT"))
except Exception as e:
conn.execute(sa.text("ROLLBACK"))
raise e
def downgrade() -> None:
pass

View File

@@ -1,61 +0,0 @@
"""add llm provider persona restrictions
Revision ID: a4f23d6b71c8
Revises: 5e1c073d48a3
Create Date: 2025-10-21 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "a4f23d6b71c8"
down_revision = "5e1c073d48a3"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"llm_provider__persona",
sa.Column("llm_provider_id", sa.Integer(), nullable=False),
sa.Column("persona_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["llm_provider_id"], ["llm_provider.id"], ondelete="CASCADE"
),
sa.ForeignKeyConstraint(["persona_id"], ["persona.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("llm_provider_id", "persona_id"),
)
op.create_index(
"ix_llm_provider__persona_llm_provider_id",
"llm_provider__persona",
["llm_provider_id"],
)
op.create_index(
"ix_llm_provider__persona_persona_id",
"llm_provider__persona",
["persona_id"],
)
op.create_index(
"ix_llm_provider__persona_composite",
"llm_provider__persona",
["persona_id", "llm_provider_id"],
)
def downgrade() -> None:
op.drop_index(
"ix_llm_provider__persona_composite",
table_name="llm_provider__persona",
)
op.drop_index(
"ix_llm_provider__persona_persona_id",
table_name="llm_provider__persona",
)
op.drop_index(
"ix_llm_provider__persona_llm_provider_id",
table_name="llm_provider__persona",
)
op.drop_table("llm_provider__persona")

View File

@@ -1,417 +0,0 @@
"""New Chat History
Revision ID: a852cbe15577
Revises: 6436661d5b65
Create Date: 2025-11-08 15:16:37.781308
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "a852cbe15577"
down_revision = "6436661d5b65"
branch_labels = None
depends_on = None
def upgrade() -> None:
# 1. Drop old research/agent tables (CASCADE handles dependencies)
op.execute("DROP TABLE IF EXISTS research_agent_iteration_sub_step CASCADE")
op.execute("DROP TABLE IF EXISTS research_agent_iteration CASCADE")
op.execute("DROP TABLE IF EXISTS agent__sub_query__search_doc CASCADE")
op.execute("DROP TABLE IF EXISTS agent__sub_query CASCADE")
op.execute("DROP TABLE IF EXISTS agent__sub_question CASCADE")
# 2. ChatMessage table changes
# Rename columns and add FKs
op.alter_column(
"chat_message", "parent_message", new_column_name="parent_message_id"
)
op.create_foreign_key(
"fk_chat_message_parent_message_id",
"chat_message",
"chat_message",
["parent_message_id"],
["id"],
)
op.alter_column(
"chat_message",
"latest_child_message",
new_column_name="latest_child_message_id",
)
op.create_foreign_key(
"fk_chat_message_latest_child_message_id",
"chat_message",
"chat_message",
["latest_child_message_id"],
["id"],
)
# Add new column
op.add_column(
"chat_message", sa.Column("reasoning_tokens", sa.Text(), nullable=True)
)
# Drop old columns
op.drop_column("chat_message", "rephrased_query")
op.drop_column("chat_message", "alternate_assistant_id")
op.drop_column("chat_message", "overridden_model")
op.drop_column("chat_message", "is_agentic")
op.drop_column("chat_message", "refined_answer_improvement")
op.drop_column("chat_message", "research_type")
op.drop_column("chat_message", "research_plan")
op.drop_column("chat_message", "research_answer_purpose")
# 3. ToolCall table changes
# Drop the unique constraint first
op.drop_constraint("uq_tool_call_message_id", "tool_call", type_="unique")
# Delete orphaned tool_call rows (those without valid chat_message)
op.execute(
"DELETE FROM tool_call WHERE message_id NOT IN (SELECT id FROM chat_message)"
)
# Add chat_session_id as nullable first, populate, then make NOT NULL
op.add_column(
"tool_call",
sa.Column("chat_session_id", postgresql.UUID(as_uuid=True), nullable=True),
)
# Populate chat_session_id from the related chat_message
op.execute(
"""
UPDATE tool_call
SET chat_session_id = chat_message.chat_session_id
FROM chat_message
WHERE tool_call.message_id = chat_message.id
"""
)
# Now make it NOT NULL and add FK
op.alter_column("tool_call", "chat_session_id", nullable=False)
op.create_foreign_key(
"fk_tool_call_chat_session_id",
"tool_call",
"chat_session",
["chat_session_id"],
["id"],
ondelete="CASCADE",
)
# Rename message_id and make nullable, recreate FK with CASCADE
op.drop_constraint("tool_call_message_id_fkey", "tool_call", type_="foreignkey")
op.alter_column(
"tool_call",
"message_id",
new_column_name="parent_chat_message_id",
nullable=True,
)
op.create_foreign_key(
"fk_tool_call_parent_chat_message_id",
"tool_call",
"chat_message",
["parent_chat_message_id"],
["id"],
ondelete="CASCADE",
)
# Add parent_tool_call_id with FK
op.add_column(
"tool_call", sa.Column("parent_tool_call_id", sa.Integer(), nullable=True)
)
op.create_foreign_key(
"fk_tool_call_parent_tool_call_id",
"tool_call",
"tool_call",
["parent_tool_call_id"],
["id"],
ondelete="CASCADE",
)
# Add other new columns
op.add_column(
"tool_call",
sa.Column("turn_number", sa.Integer(), nullable=False, server_default="0"),
)
op.add_column(
"tool_call",
sa.Column("tool_call_id", sa.String(), nullable=False, server_default=""),
)
op.add_column("tool_call", sa.Column("reasoning_tokens", sa.Text(), nullable=True))
op.add_column(
"tool_call",
sa.Column("tool_call_tokens", sa.Integer(), nullable=False, server_default="0"),
)
op.add_column(
"tool_call",
sa.Column("generated_images", postgresql.JSONB(), nullable=True),
)
# Rename columns
op.alter_column(
"tool_call", "tool_arguments", new_column_name="tool_call_arguments"
)
op.alter_column("tool_call", "tool_result", new_column_name="tool_call_response")
# Change tool_call_response type from JSONB to Text
op.execute(
"""
ALTER TABLE tool_call
ALTER COLUMN tool_call_response TYPE TEXT
USING tool_call_response::text
"""
)
# Drop old columns
op.drop_column("tool_call", "tool_name")
# 4. Create new association table
op.create_table(
"tool_call__search_doc",
sa.Column("tool_call_id", sa.Integer(), nullable=False),
sa.Column("search_doc_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(["tool_call_id"], ["tool_call.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(
["search_doc_id"], ["search_doc.id"], ondelete="CASCADE"
),
sa.PrimaryKeyConstraint("tool_call_id", "search_doc_id"),
)
# 5. Persona table change
op.add_column(
"persona",
sa.Column(
"replace_base_system_prompt",
sa.Boolean(),
nullable=False,
server_default="false",
),
)
def downgrade() -> None:
# Reverse persona changes
op.drop_column("persona", "replace_base_system_prompt")
# Drop new association table
op.drop_table("tool_call__search_doc")
# Reverse ToolCall changes
op.add_column(
"tool_call",
sa.Column("tool_name", sa.String(), nullable=False, server_default=""),
)
# Change tool_call_response back to JSONB
op.execute(
"""
ALTER TABLE tool_call
ALTER COLUMN tool_call_response TYPE JSONB
USING tool_call_response::jsonb
"""
)
op.alter_column("tool_call", "tool_call_response", new_column_name="tool_result")
op.alter_column(
"tool_call", "tool_call_arguments", new_column_name="tool_arguments"
)
op.drop_column("tool_call", "generated_images")
op.drop_column("tool_call", "tool_call_tokens")
op.drop_column("tool_call", "reasoning_tokens")
op.drop_column("tool_call", "tool_call_id")
op.drop_column("tool_call", "turn_number")
op.drop_constraint(
"fk_tool_call_parent_tool_call_id", "tool_call", type_="foreignkey"
)
op.drop_column("tool_call", "parent_tool_call_id")
op.drop_constraint(
"fk_tool_call_parent_chat_message_id", "tool_call", type_="foreignkey"
)
op.alter_column(
"tool_call",
"parent_chat_message_id",
new_column_name="message_id",
nullable=False,
)
op.create_foreign_key(
"tool_call_message_id_fkey",
"tool_call",
"chat_message",
["message_id"],
["id"],
)
op.drop_constraint("fk_tool_call_chat_session_id", "tool_call", type_="foreignkey")
op.drop_column("tool_call", "chat_session_id")
op.create_unique_constraint("uq_tool_call_message_id", "tool_call", ["message_id"])
# Reverse ChatMessage changes
# Note: research_answer_purpose and research_type were originally String columns,
# not Enum types (see migrations 5ae8240accb3 and f8a9b2c3d4e5)
op.add_column(
"chat_message",
sa.Column("research_answer_purpose", sa.String(), nullable=True),
)
op.add_column(
"chat_message", sa.Column("research_plan", postgresql.JSONB(), nullable=True)
)
op.add_column(
"chat_message",
sa.Column("research_type", sa.String(), nullable=True),
)
op.add_column(
"chat_message",
sa.Column("refined_answer_improvement", sa.Boolean(), nullable=True),
)
op.add_column(
"chat_message",
sa.Column("is_agentic", sa.Boolean(), nullable=False, server_default="false"),
)
op.add_column(
"chat_message", sa.Column("overridden_model", sa.String(), nullable=True)
)
op.add_column(
"chat_message", sa.Column("alternate_assistant_id", sa.Integer(), nullable=True)
)
op.add_column(
"chat_message", sa.Column("rephrased_query", sa.Text(), nullable=True)
)
op.drop_column("chat_message", "reasoning_tokens")
op.drop_constraint(
"fk_chat_message_latest_child_message_id", "chat_message", type_="foreignkey"
)
op.alter_column(
"chat_message",
"latest_child_message_id",
new_column_name="latest_child_message",
)
op.drop_constraint(
"fk_chat_message_parent_message_id", "chat_message", type_="foreignkey"
)
op.alter_column(
"chat_message", "parent_message_id", new_column_name="parent_message"
)
# Recreate agent sub question and sub query tables
op.create_table(
"agent__sub_question",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("primary_question_id", sa.Integer(), nullable=False),
sa.Column("chat_session_id", postgresql.UUID(as_uuid=True), nullable=False),
sa.Column("sub_question", sa.Text(), nullable=False),
sa.Column("level", sa.Integer(), nullable=False),
sa.Column("level_question_num", sa.Integer(), nullable=False),
sa.Column(
"time_created",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("sub_answer", sa.Text(), nullable=False),
sa.Column("sub_question_doc_results", postgresql.JSONB(), nullable=False),
sa.ForeignKeyConstraint(
["primary_question_id"], ["chat_message.id"], ondelete="CASCADE"
),
sa.ForeignKeyConstraint(["chat_session_id"], ["chat_session.id"]),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"agent__sub_query",
sa.Column("id", sa.Integer(), primary_key=True),
sa.Column("parent_question_id", sa.Integer(), nullable=False),
sa.Column("chat_session_id", postgresql.UUID(as_uuid=True), nullable=False),
sa.Column("sub_query", sa.Text(), nullable=False),
sa.Column(
"time_created",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.ForeignKeyConstraint(
["parent_question_id"], ["agent__sub_question.id"], ondelete="CASCADE"
),
sa.ForeignKeyConstraint(["chat_session_id"], ["chat_session.id"]),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"agent__sub_query__search_doc",
sa.Column("sub_query_id", sa.Integer(), nullable=False),
sa.Column("search_doc_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["sub_query_id"], ["agent__sub_query.id"], ondelete="CASCADE"
),
sa.ForeignKeyConstraint(["search_doc_id"], ["search_doc.id"]),
sa.PrimaryKeyConstraint("sub_query_id", "search_doc_id"),
)
# Recreate research agent tables
op.create_table(
"research_agent_iteration",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("primary_question_id", sa.Integer(), nullable=False),
sa.Column("iteration_nr", sa.Integer(), nullable=False),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("purpose", sa.String(), nullable=True),
sa.Column("reasoning", sa.String(), nullable=True),
sa.ForeignKeyConstraint(
["primary_question_id"], ["chat_message.id"], ondelete="CASCADE"
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint(
"primary_question_id",
"iteration_nr",
name="_research_agent_iteration_unique_constraint",
),
)
op.create_table(
"research_agent_iteration_sub_step",
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("primary_question_id", sa.Integer(), nullable=False),
sa.Column("iteration_nr", sa.Integer(), nullable=False),
sa.Column("iteration_sub_step_nr", sa.Integer(), nullable=False),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.text("now()"),
nullable=False,
),
sa.Column("sub_step_instructions", sa.String(), nullable=True),
sa.Column("sub_step_tool_id", sa.Integer(), nullable=True),
sa.Column("reasoning", sa.String(), nullable=True),
sa.Column("sub_answer", sa.String(), nullable=True),
sa.Column("cited_doc_results", postgresql.JSONB(), nullable=False),
sa.Column("claims", postgresql.JSONB(), nullable=True),
sa.Column("is_web_fetch", sa.Boolean(), nullable=True),
sa.Column("queries", postgresql.JSONB(), nullable=True),
sa.Column("generated_images", postgresql.JSONB(), nullable=True),
sa.Column("additional_data", postgresql.JSONB(), nullable=True),
sa.Column("file_ids", postgresql.JSONB(), nullable=True),
sa.ForeignKeyConstraint(
["primary_question_id", "iteration_nr"],
[
"research_agent_iteration.primary_question_id",
"research_agent_iteration.iteration_nr",
],
ondelete="CASCADE",
),
sa.ForeignKeyConstraint(["sub_step_tool_id"], ["tool.id"], ondelete="SET NULL"),
sa.PrimaryKeyConstraint("id"),
)

View File

@@ -1,225 +0,0 @@
"""merge prompt into persona
Revision ID: abbfec3a5ac5
Revises: 8818cf73fa1a
Create Date: 2024-12-19 12:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "abbfec3a5ac5"
down_revision = "8818cf73fa1a"
branch_labels = None
depends_on = None
MAX_PROMPT_LENGTH = 5_000_000
def upgrade() -> None:
"""NOTE: Prompts without any Personas will just be lost."""
# Step 1: Add new columns to persona table (only if they don't exist)
# Check if columns exist before adding them
connection = op.get_bind()
inspector = sa.inspect(connection)
existing_columns = [col["name"] for col in inspector.get_columns("persona")]
if "system_prompt" not in existing_columns:
op.add_column(
"persona",
sa.Column(
"system_prompt", sa.String(length=MAX_PROMPT_LENGTH), nullable=True
),
)
if "task_prompt" not in existing_columns:
op.add_column(
"persona",
sa.Column(
"task_prompt", sa.String(length=MAX_PROMPT_LENGTH), nullable=True
),
)
if "datetime_aware" not in existing_columns:
op.add_column(
"persona",
sa.Column(
"datetime_aware", sa.Boolean(), nullable=False, server_default="true"
),
)
# Step 2: Migrate data from prompt table to persona table (only if tables exist)
existing_tables = inspector.get_table_names()
if "prompt" in existing_tables and "persona__prompt" in existing_tables:
# For personas that have associated prompts, copy the prompt data
op.execute(
"""
UPDATE persona
SET
system_prompt = p.system_prompt,
task_prompt = p.task_prompt,
datetime_aware = p.datetime_aware
FROM (
-- Get the first prompt for each persona (in case there are multiple)
SELECT DISTINCT ON (pp.persona_id)
pp.persona_id,
pr.system_prompt,
pr.task_prompt,
pr.datetime_aware
FROM persona__prompt pp
JOIN prompt pr ON pp.prompt_id = pr.id
) p
WHERE persona.id = p.persona_id
"""
)
# Step 3: Update chat_message references
# Since chat messages referenced prompt_id, we need to update them to use persona_id
# This is complex as we need to map from prompt_id to persona_id
# Check if chat_message has prompt_id column
chat_message_columns = [
col["name"] for col in inspector.get_columns("chat_message")
]
if "prompt_id" in chat_message_columns:
op.execute(
"""
ALTER TABLE chat_message
DROP CONSTRAINT IF EXISTS chat_message__prompt_fk
"""
)
op.drop_column("chat_message", "prompt_id")
# Step 4: Handle personas without prompts - set default values if needed (always run this)
op.execute(
"""
UPDATE persona
SET
system_prompt = COALESCE(system_prompt, ''),
task_prompt = COALESCE(task_prompt, '')
WHERE system_prompt IS NULL OR task_prompt IS NULL
"""
)
# Step 5: Drop the persona__prompt association table (if it exists)
if "persona__prompt" in existing_tables:
op.drop_table("persona__prompt")
# Step 6: Drop the prompt table (if it exists)
if "prompt" in existing_tables:
op.drop_table("prompt")
# Step 7: Make system_prompt and task_prompt non-nullable after migration (only if they exist)
op.alter_column(
"persona",
"system_prompt",
existing_type=sa.String(length=MAX_PROMPT_LENGTH),
nullable=False,
server_default=None,
)
op.alter_column(
"persona",
"task_prompt",
existing_type=sa.String(length=MAX_PROMPT_LENGTH),
nullable=False,
server_default=None,
)
def downgrade() -> None:
# Step 1: Recreate the prompt table
op.create_table(
"prompt",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=True),
sa.Column("name", sa.String(), nullable=False),
sa.Column("description", sa.String(), nullable=False),
sa.Column("system_prompt", sa.String(length=MAX_PROMPT_LENGTH), nullable=False),
sa.Column("task_prompt", sa.String(length=MAX_PROMPT_LENGTH), nullable=False),
sa.Column(
"datetime_aware", sa.Boolean(), nullable=False, server_default="true"
),
sa.Column(
"default_prompt", sa.Boolean(), nullable=False, server_default="false"
),
sa.Column("deleted", sa.Boolean(), nullable=False, server_default="false"),
sa.ForeignKeyConstraint(["user_id"], ["user.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
# Step 2: Recreate the persona__prompt association table
op.create_table(
"persona__prompt",
sa.Column("persona_id", sa.Integer(), nullable=False),
sa.Column("prompt_id", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
["persona_id"],
["persona.id"],
),
sa.ForeignKeyConstraint(
["prompt_id"],
["prompt.id"],
),
sa.PrimaryKeyConstraint("persona_id", "prompt_id"),
)
# Step 3: Migrate data back from persona to prompt table
op.execute(
"""
INSERT INTO prompt (
name,
description,
system_prompt,
task_prompt,
datetime_aware,
default_prompt,
deleted,
user_id
)
SELECT
CONCAT('Prompt for ', name),
description,
system_prompt,
task_prompt,
datetime_aware,
is_default_persona,
deleted,
user_id
FROM persona
WHERE system_prompt IS NOT NULL AND system_prompt != ''
RETURNING id, name
"""
)
# Step 4: Re-establish persona__prompt relationships
op.execute(
"""
INSERT INTO persona__prompt (persona_id, prompt_id)
SELECT
p.id as persona_id,
pr.id as prompt_id
FROM persona p
JOIN prompt pr ON pr.name = CONCAT('Prompt for ', p.name)
WHERE p.system_prompt IS NOT NULL AND p.system_prompt != ''
"""
)
# Step 5: Add prompt_id column back to chat_message
op.add_column("chat_message", sa.Column("prompt_id", sa.Integer(), nullable=True))
# Step 6: Re-establish foreign key constraint
op.create_foreign_key(
"chat_message__prompt_fk", "chat_message", "prompt", ["prompt_id"], ["id"]
)
# Step 7: Remove columns from persona table
op.drop_column("persona", "datetime_aware")
op.drop_column("persona", "task_prompt")
op.drop_column("persona", "system_prompt")

View File

@@ -1,123 +0,0 @@
"""add_mcp_auth_performer
Revision ID: b30353be4eec
Revises: 2b75d0a8ffcb
Create Date: 2025-09-13 14:58:08.413534
"""
from alembic import op
import sqlalchemy as sa
from onyx.db.enums import MCPAuthenticationPerformer, MCPTransport
# revision identifiers, used by Alembic.
revision = "b30353be4eec"
down_revision = "2b75d0a8ffcb"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""moving to a better way of handling auth performer and transport"""
# Add nullable column first for backward compatibility
op.add_column(
"mcp_server",
sa.Column(
"auth_performer",
sa.Enum(MCPAuthenticationPerformer, native_enum=False),
nullable=True,
),
)
op.add_column(
"mcp_server",
sa.Column(
"transport",
sa.Enum(MCPTransport, native_enum=False),
nullable=True,
),
)
# # Backfill values using existing data and inference rules
bind = op.get_bind()
# 1) OAUTH servers are always PER_USER
bind.execute(
sa.text(
"""
UPDATE mcp_server
SET auth_performer = 'PER_USER'
WHERE auth_type = 'OAUTH'
"""
)
)
# 2) If there is no admin connection config, mark as ADMIN (and not set yet)
bind.execute(
sa.text(
"""
UPDATE mcp_server
SET auth_performer = 'ADMIN'
WHERE admin_connection_config_id IS NULL
AND auth_performer IS NULL
"""
)
)
# 3) If there exists any user-specific connection config (user_email != ''), mark as PER_USER
bind.execute(
sa.text(
"""
UPDATE mcp_server AS ms
SET auth_performer = 'PER_USER'
FROM mcp_connection_config AS mcc
WHERE mcc.mcp_server_id = ms.id
AND COALESCE(mcc.user_email, '') <> ''
AND ms.auth_performer IS NULL
"""
)
)
# 4) Default any remaining nulls to ADMIN (covers API_TOKEN admin-managed and NONE)
bind.execute(
sa.text(
"""
UPDATE mcp_server
SET auth_performer = 'ADMIN'
WHERE auth_performer IS NULL
"""
)
)
# Finally, make the column non-nullable
op.alter_column(
"mcp_server",
"auth_performer",
existing_type=sa.Enum(MCPAuthenticationPerformer, native_enum=False),
nullable=False,
)
# Backfill transport for existing rows to STREAMABLE_HTTP, then make non-nullable
bind.execute(
sa.text(
"""
UPDATE mcp_server
SET transport = 'STREAMABLE_HTTP'
WHERE transport IS NULL
"""
)
)
op.alter_column(
"mcp_server",
"transport",
existing_type=sa.Enum(MCPTransport, native_enum=False),
nullable=False,
)
def downgrade() -> None:
"""remove cols"""
op.drop_column("mcp_server", "transport")
op.drop_column("mcp_server", "auth_performer")

View File

@@ -1,38 +0,0 @@
"""Adding assistant-specific user preferences
Revision ID: b329d00a9ea6
Revises: f9b8c7d6e5a4
Create Date: 2025-08-26 23:14:44.592985
"""
from alembic import op
import fastapi_users_db_sqlalchemy
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "b329d00a9ea6"
down_revision = "f9b8c7d6e5a4"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
"assistant__user_specific_config",
sa.Column("assistant_id", sa.Integer(), nullable=False),
sa.Column(
"user_id",
fastapi_users_db_sqlalchemy.generics.GUID(),
nullable=False,
),
sa.Column("disabled_tool_ids", postgresql.ARRAY(sa.Integer()), nullable=False),
sa.ForeignKeyConstraint(["assistant_id"], ["persona.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["user_id"], ["user.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("assistant_id", "user_id"),
)
def downgrade() -> None:
op.drop_table("assistant__user_specific_config")

View File

@@ -1,27 +0,0 @@
"""add_user_oauth_token_to_slack_bot
Revision ID: b4ef3ae0bf6e
Revises: 505c488f6662
Create Date: 2025-08-26 17:47:41.788462
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "b4ef3ae0bf6e"
down_revision = "505c488f6662"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add user_token column to slack_bot table
op.add_column("slack_bot", sa.Column("user_token", sa.LargeBinary(), nullable=True))
def downgrade() -> None:
# Remove user_token column from slack_bot table
op.drop_column("slack_bot", "user_token")

View File

@@ -1,33 +0,0 @@
"""Pause finished user file connectors
Revision ID: b558f51620b4
Revises: 90e3b9af7da4
Create Date: 2025-08-15 17:17:02.456704
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = "b558f51620b4"
down_revision = "90e3b9af7da4"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Set all user file connector credential pairs with ACTIVE status to PAUSED
# This ensures user files don't continue to run indexing tasks after processing
op.execute(
"""
UPDATE connector_credential_pair
SET status = 'PAUSED'
WHERE is_user_file = true
AND status = 'ACTIVE'
"""
)
def downgrade() -> None:
pass

View File

@@ -1,43 +0,0 @@
"""adjust prompt length
Revision ID: b7ec9b5b505f
Revises: abbfec3a5ac5
Create Date: 2025-09-10 18:51:15.629197
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "b7ec9b5b505f"
down_revision = "abbfec3a5ac5"
branch_labels = None
depends_on = None
MAX_PROMPT_LENGTH = 5_000_000
def upgrade() -> None:
# NOTE: need to run this since the previous migration PREVIOUSLY set the length to 8000
op.alter_column(
"persona",
"system_prompt",
existing_type=sa.String(length=8000),
type_=sa.String(length=MAX_PROMPT_LENGTH),
existing_nullable=False,
)
op.alter_column(
"persona",
"task_prompt",
existing_type=sa.String(length=8000),
type_=sa.String(length=MAX_PROMPT_LENGTH),
existing_nullable=False,
)
def downgrade() -> None:
# Downgrade not necessary
pass

View File

@@ -1,147 +0,0 @@
"""migrate_agent_sub_questions_to_research_iterations
Revision ID: bd7c3bf8beba
Revises: f8a9b2c3d4e5
Create Date: 2025-08-18 11:33:27.098287
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "bd7c3bf8beba"
down_revision = "f8a9b2c3d4e5"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Get connection to execute raw SQL
connection = op.get_bind()
# First, insert data into research_agent_iteration table
# This creates one iteration record per primary_question_id using the earliest time_created
connection.execute(
sa.text(
"""
INSERT INTO research_agent_iteration (primary_question_id, created_at, iteration_nr, purpose, reasoning)
SELECT
primary_question_id,
MIN(time_created) as created_at,
1 as iteration_nr,
'Generating and researching subquestions' as purpose,
'(No previous reasoning)' as reasoning
FROM agent__sub_question
JOIN chat_message on agent__sub_question.primary_question_id = chat_message.id
WHERE primary_question_id IS NOT NULL
AND chat_message.is_agentic = true
GROUP BY primary_question_id
ON CONFLICT DO NOTHING;
"""
)
)
# Then, insert data into research_agent_iteration_sub_step table
# This migrates each sub-question as a sub-step
connection.execute(
sa.text(
"""
INSERT INTO research_agent_iteration_sub_step (
primary_question_id,
iteration_nr,
iteration_sub_step_nr,
created_at,
sub_step_instructions,
sub_step_tool_id,
sub_answer,
cited_doc_results
)
SELECT
primary_question_id,
1 as iteration_nr,
level_question_num as iteration_sub_step_nr,
time_created as created_at,
sub_question as sub_step_instructions,
1 as sub_step_tool_id,
sub_answer,
sub_question_doc_results as cited_doc_results
FROM agent__sub_question
JOIN chat_message on agent__sub_question.primary_question_id = chat_message.id
WHERE chat_message.is_agentic = true
AND primary_question_id IS NOT NULL
ON CONFLICT DO NOTHING;
"""
)
)
# Update chat_message records: set legacy agentic type and answer purpose for existing agentic messages
connection.execute(
sa.text(
"""
UPDATE chat_message
SET research_answer_purpose = 'ANSWER'
WHERE is_agentic = true
AND research_type IS NULL and
message_type = 'ASSISTANT';
"""
)
)
connection.execute(
sa.text(
"""
UPDATE chat_message
SET research_type = 'LEGACY_AGENTIC'
WHERE is_agentic = true
AND research_type IS NULL;
"""
)
)
def downgrade() -> None:
# Get connection to execute raw SQL
connection = op.get_bind()
# Note: This downgrade removes all research agent iteration data
# There's no way to perfectly restore the original agent__sub_question data
# if it was deleted after this migration
# Delete all research_agent_iteration_sub_step records that were migrated
connection.execute(
sa.text(
"""
DELETE FROM research_agent_iteration_sub_step
USING chat_message
WHERE research_agent_iteration_sub_step.primary_question_id = chat_message.id
AND chat_message.research_type = 'LEGACY_AGENTIC';
"""
)
)
# Delete all research_agent_iteration records that were migrated
connection.execute(
sa.text(
"""
DELETE FROM research_agent_iteration
USING chat_message
WHERE research_agent_iteration.primary_question_id = chat_message.id
AND chat_message.research_type = 'LEGACY_AGENTIC';
"""
)
)
# Revert chat_message updates: clear research fields for legacy agentic messages
connection.execute(
sa.text(
"""
UPDATE chat_message
SET research_type = NULL,
research_answer_purpose = NULL
WHERE is_agentic = true
AND research_type = 'LEGACY_AGENTIC'
AND message_type = 'ASSISTANT';
"""
)
)

View File

@@ -1,73 +0,0 @@
"""add_python_tool
Revision ID: c7e9f4a3b2d1
Revises: 3c9a65f1207f
Create Date: 2025-11-08 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "c7e9f4a3b2d1"
down_revision = "3c9a65f1207f"
branch_labels = None
depends_on = None
def upgrade() -> None:
"""Add PythonTool to built-in tools"""
conn = op.get_bind()
conn.execute(
sa.text(
"""
INSERT INTO tool (name, display_name, description, in_code_tool_id, enabled)
VALUES (:name, :display_name, :description, :in_code_tool_id, :enabled)
"""
),
{
"name": "PythonTool",
# in the UI, call it `Code Interpreter` since this is a well known term for this tool
"display_name": "Code Interpreter",
"description": (
"The Code Interpreter Action allows the assistant to execute "
"Python code in a secure, isolated environment for data analysis, "
"computation, visualization, and file processing."
),
"in_code_tool_id": "PythonTool",
"enabled": True,
},
)
# needed to store files generated by the python tool
op.add_column(
"research_agent_iteration_sub_step",
sa.Column(
"file_ids",
postgresql.JSONB(astext_type=sa.Text()),
nullable=True,
),
)
def downgrade() -> None:
"""Remove PythonTool from built-in tools"""
conn = op.get_bind()
conn.execute(
sa.text(
"""
DELETE FROM tool
WHERE in_code_tool_id = :in_code_tool_id
"""
),
{
"in_code_tool_id": "PythonTool",
},
)
op.drop_column("research_agent_iteration_sub_step", "file_ids")

View File

@@ -1,72 +0,0 @@
"""personalization_user_info
Revision ID: c8a93a2af083
Revises: 6f4f86aef280
Create Date: 2025-10-14 15:59:03.577343
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "c8a93a2af083"
down_revision = "6f4f86aef280"
branch_labels = None
depends_on = None
def upgrade() -> None:
op.add_column(
"user",
sa.Column("personal_name", sa.String(), nullable=True),
)
op.add_column(
"user",
sa.Column("personal_role", sa.String(), nullable=True),
)
op.add_column(
"user",
sa.Column(
"use_memories",
sa.Boolean(),
nullable=False,
server_default=sa.true(),
),
)
op.create_table(
"memory",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("user_id", postgresql.UUID(as_uuid=True), nullable=False),
sa.Column("memory_text", sa.Text(), nullable=False),
sa.Column("conversation_id", postgresql.UUID(as_uuid=True), nullable=True),
sa.Column("message_id", sa.Integer(), nullable=True),
sa.Column(
"created_at",
sa.DateTime(timezone=True),
server_default=sa.func.now(),
nullable=False,
),
sa.Column(
"updated_at",
sa.DateTime(timezone=True),
server_default=sa.func.now(),
nullable=False,
),
sa.ForeignKeyConstraint(["user_id"], ["user.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_index("ix_memory_user_id", "memory", ["user_id"])
def downgrade() -> None:
op.drop_index("ix_memory_user_id", table_name="memory")
op.drop_table("memory")
op.drop_column("user", "use_memories")
op.drop_column("user", "personal_role")
op.drop_column("user", "personal_name")

View File

@@ -1,152 +0,0 @@
"""seed_builtin_tools
Revision ID: d09fc20a3c66
Revises: b7ec9b5b505f
Create Date: 2025-09-09 19:32:16.824373
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "d09fc20a3c66"
down_revision = "b7ec9b5b505f"
branch_labels = None
depends_on = None
# Tool definitions - core tools that should always be seeded
# Names/in_code_tool_id are the same as the class names in the tool_implementations package
BUILT_IN_TOOLS = [
{
"name": "SearchTool",
"display_name": "Internal Search",
"description": "The Search Action allows the Assistant to search through connected knowledge to help build an answer.",
"in_code_tool_id": "SearchTool",
},
{
"name": "ImageGenerationTool",
"display_name": "Image Generation",
"description": (
"The Image Generation Action allows the assistant to use DALL-E 3 or GPT-IMAGE-1 to generate images. "
"The action will be used when the user asks the assistant to generate an image."
),
"in_code_tool_id": "ImageGenerationTool",
},
{
"name": "WebSearchTool",
"display_name": "Web Search",
"description": (
"The Web Search Action allows the assistant "
"to perform internet searches for up-to-date information."
),
"in_code_tool_id": "WebSearchTool",
},
{
"name": "KnowledgeGraphTool",
"display_name": "Knowledge Graph Search",
"description": (
"The Knowledge Graph Search Action allows the assistant to search the "
"Knowledge Graph for information. This tool can (for now) only be active in the KG Beta Assistant, "
"and it requires the Knowledge Graph to be enabled."
),
"in_code_tool_id": "KnowledgeGraphTool",
},
{
"name": "OktaProfileTool",
"display_name": "Okta Profile",
"description": (
"The Okta Profile Action allows the assistant to fetch the current user's information from Okta. "
"This may include the user's name, email, phone number, address, and other details such as their "
"manager and direct reports."
),
"in_code_tool_id": "OktaProfileTool",
},
]
def upgrade() -> None:
conn = op.get_bind()
# Start transaction
conn.execute(sa.text("BEGIN"))
try:
# Get existing tools to check what already exists
existing_tools = conn.execute(
sa.text(
"SELECT in_code_tool_id FROM tool WHERE in_code_tool_id IS NOT NULL"
)
).fetchall()
existing_tool_ids = {row[0] for row in existing_tools}
# Insert or update built-in tools
for tool in BUILT_IN_TOOLS:
in_code_id = tool["in_code_tool_id"]
# Handle historical rename: InternetSearchTool -> WebSearchTool
if (
in_code_id == "WebSearchTool"
and "WebSearchTool" not in existing_tool_ids
and "InternetSearchTool" in existing_tool_ids
):
# Rename the existing InternetSearchTool row in place and update fields
conn.execute(
sa.text(
"""
UPDATE tool
SET name = :name,
display_name = :display_name,
description = :description,
in_code_tool_id = :in_code_tool_id
WHERE in_code_tool_id = 'InternetSearchTool'
"""
),
tool,
)
# Keep the local view of existing ids in sync to avoid duplicate insert
existing_tool_ids.discard("InternetSearchTool")
existing_tool_ids.add("WebSearchTool")
continue
if in_code_id in existing_tool_ids:
# Update existing tool
conn.execute(
sa.text(
"""
UPDATE tool
SET name = :name,
display_name = :display_name,
description = :description
WHERE in_code_tool_id = :in_code_tool_id
"""
),
tool,
)
else:
# Insert new tool
conn.execute(
sa.text(
"""
INSERT INTO tool (name, display_name, description, in_code_tool_id)
VALUES (:name, :display_name, :description, :in_code_tool_id)
"""
),
tool,
)
# Commit transaction
conn.execute(sa.text("COMMIT"))
except Exception as e:
# Rollback on error
conn.execute(sa.text("ROLLBACK"))
raise e
def downgrade() -> None:
# We don't remove the tools on downgrade since it's totally fine to just
# have them around. If we upgrade again, it will be a no-op.
pass

View File

@@ -1,115 +0,0 @@
"""add status to mcp server and make auth fields nullable
Revision ID: e8f0d2a38171
Revises: ed9e44312505
Create Date: 2025-11-28 11:15:37.667340
"""
from alembic import op
import sqlalchemy as sa
from onyx.db.enums import ( # type: ignore[import-untyped]
MCPTransport,
MCPAuthenticationType,
MCPAuthenticationPerformer,
MCPServerStatus,
)
# revision identifiers, used by Alembic.
revision = "e8f0d2a38171"
down_revision = "ed9e44312505"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Make auth fields nullable
op.alter_column(
"mcp_server",
"transport",
existing_type=sa.Enum(MCPTransport, name="mcp_transport", native_enum=False),
nullable=True,
)
op.alter_column(
"mcp_server",
"auth_type",
existing_type=sa.Enum(
MCPAuthenticationType, name="mcp_authentication_type", native_enum=False
),
nullable=True,
)
op.alter_column(
"mcp_server",
"auth_performer",
existing_type=sa.Enum(
MCPAuthenticationPerformer,
name="mcp_authentication_performer",
native_enum=False,
),
nullable=True,
)
# Add status column with default
op.add_column(
"mcp_server",
sa.Column(
"status",
sa.Enum(MCPServerStatus, name="mcp_server_status", native_enum=False),
nullable=False,
server_default="CREATED",
),
)
# For existing records, mark status as CONNECTED
bind = op.get_bind()
bind.execute(
sa.text(
"""
UPDATE mcp_server
SET status = 'CONNECTED'
WHERE status != 'CONNECTED'
and admin_connection_config_id IS NOT NULL
"""
)
)
def downgrade() -> None:
# Remove status column
op.drop_column("mcp_server", "status")
# Make auth fields non-nullable (set defaults first)
op.execute(
"UPDATE mcp_server SET transport = 'STREAMABLE_HTTP' WHERE transport IS NULL"
)
op.execute("UPDATE mcp_server SET auth_type = 'NONE' WHERE auth_type IS NULL")
op.execute(
"UPDATE mcp_server SET auth_performer = 'ADMIN' WHERE auth_performer IS NULL"
)
op.alter_column(
"mcp_server",
"transport",
existing_type=sa.Enum(MCPTransport, name="mcp_transport", native_enum=False),
nullable=False,
)
op.alter_column(
"mcp_server",
"auth_type",
existing_type=sa.Enum(
MCPAuthenticationType, name="mcp_authentication_type", native_enum=False
),
nullable=False,
)
op.alter_column(
"mcp_server",
"auth_performer",
existing_type=sa.Enum(
MCPAuthenticationPerformer,
name="mcp_authentication_performer",
native_enum=False,
),
nullable=False,
)

View File

@@ -1,34 +0,0 @@
"""Add icon_name field
Revision ID: ed9e44312505
Revises: 5e6f7a8b9c0d
Create Date: 2025-12-03 16:35:07.828393
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "ed9e44312505"
down_revision = "5e6f7a8b9c0d"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add icon_name column
op.add_column("persona", sa.Column("icon_name", sa.String(), nullable=True))
# Remove old icon columns
op.drop_column("persona", "icon_shape")
op.drop_column("persona", "icon_color")
def downgrade() -> None:
# Re-add old icon columns
op.add_column("persona", sa.Column("icon_color", sa.String(), nullable=True))
op.add_column("persona", sa.Column("icon_shape", sa.Integer(), nullable=True))
# Remove icon_name column
op.drop_column("persona", "icon_name")

View File

@@ -1,30 +0,0 @@
"""add research_answer_purpose to chat_message
Revision ID: f8a9b2c3d4e5
Revises: 5ae8240accb3
Create Date: 2025-01-27 12:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "f8a9b2c3d4e5"
down_revision = "5ae8240accb3"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Add research_answer_purpose column to chat_message table
op.add_column(
"chat_message",
sa.Column("research_answer_purpose", sa.String(), nullable=True),
)
def downgrade() -> None:
# Remove research_answer_purpose column from chat_message table
op.drop_column("chat_message", "research_answer_purpose")

View File

@@ -1,69 +0,0 @@
"""remove foreign key constraints from research_agent_iteration_sub_step
Revision ID: f9b8c7d6e5a4
Revises: bd7c3bf8beba
Create Date: 2025-01-27 12:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = "f9b8c7d6e5a4"
down_revision = "bd7c3bf8beba"
branch_labels = None
depends_on = None
def upgrade() -> None:
# Drop the existing foreign key constraint for parent_question_id
op.drop_constraint(
"research_agent_iteration_sub_step_parent_question_id_fkey",
"research_agent_iteration_sub_step",
type_="foreignkey",
)
# Drop the parent_question_id column entirely
op.drop_column("research_agent_iteration_sub_step", "parent_question_id")
# Drop the foreign key constraint for primary_question_id to chat_message.id
# (keep the column as it's needed for the composite foreign key)
op.drop_constraint(
"research_agent_iteration_sub_step_primary_question_id_fkey",
"research_agent_iteration_sub_step",
type_="foreignkey",
)
def downgrade() -> None:
# Restore the foreign key constraint for primary_question_id to chat_message.id
op.create_foreign_key(
"research_agent_iteration_sub_step_primary_question_id_fkey",
"research_agent_iteration_sub_step",
"chat_message",
["primary_question_id"],
["id"],
ondelete="CASCADE",
)
# Add back the parent_question_id column
op.add_column(
"research_agent_iteration_sub_step",
sa.Column(
"parent_question_id",
sa.Integer(),
nullable=True,
),
)
# Restore the foreign key constraint pointing to research_agent_iteration_sub_step.id
op.create_foreign_key(
"research_agent_iteration_sub_step_parent_question_id_fkey",
"research_agent_iteration_sub_step",
"research_agent_iteration_sub_step",
["parent_question_id"],
["id"],
ondelete="CASCADE",
)

View File

@@ -1,52 +0,0 @@
group "default" {
targets = ["backend", "model-server"]
}
variable "BACKEND_REPOSITORY" {
default = "onyxdotapp/onyx-backend"
}
variable "MODEL_SERVER_REPOSITORY" {
default = "onyxdotapp/onyx-model-server"
}
variable "INTEGRATION_REPOSITORY" {
default = "onyxdotapp/onyx-integration"
}
variable "TAG" {
default = "latest"
}
target "backend" {
context = "."
dockerfile = "Dockerfile"
cache-from = ["type=registry,ref=${BACKEND_REPOSITORY}:latest"]
cache-to = ["type=inline"]
tags = ["${BACKEND_REPOSITORY}:${TAG}"]
}
target "model-server" {
context = "."
dockerfile = "Dockerfile.model_server"
cache-from = ["type=registry,ref=${MODEL_SERVER_REPOSITORY}:latest"]
cache-to = ["type=inline"]
tags = ["${MODEL_SERVER_REPOSITORY}:${TAG}"]
}
target "integration" {
context = "."
dockerfile = "tests/integration/Dockerfile"
// Provide the base image via build context from the backend target
contexts = {
base = "target:backend"
}
tags = ["${INTEGRATION_REPOSITORY}:${TAG}"]
}

View File

@@ -1,17 +1,29 @@
from datetime import datetime
from functools import lru_cache
import jwt
import requests
from fastapi import Depends
from fastapi import HTTPException
from fastapi import Request
from fastapi import status
from jwt import decode as jwt_decode
from jwt import InvalidTokenError
from jwt import PyJWTError
from sqlalchemy import func
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from ee.onyx.configs.app_configs import JWT_PUBLIC_KEY_URL
from ee.onyx.configs.app_configs import SUPER_CLOUD_API_KEY
from ee.onyx.configs.app_configs import SUPER_USERS
from ee.onyx.db.saml import get_saml_account
from ee.onyx.server.seeding import get_seed_config
from ee.onyx.utils.secrets import extract_hashed_cookie
from onyx.auth.users import current_admin_user
from onyx.configs.app_configs import AUTH_TYPE
from onyx.configs.app_configs import USER_AUTH_SECRET
from onyx.configs.constants import AuthType
from onyx.db.models import User
from onyx.utils.logger import setup_logger
@@ -19,11 +31,75 @@ from onyx.utils.logger import setup_logger
logger = setup_logger()
@lru_cache()
def get_public_key() -> str | None:
if JWT_PUBLIC_KEY_URL is None:
logger.error("JWT_PUBLIC_KEY_URL is not set")
return None
response = requests.get(JWT_PUBLIC_KEY_URL)
response.raise_for_status()
return response.text
async def verify_jwt_token(token: str, async_db_session: AsyncSession) -> User | None:
try:
public_key_pem = get_public_key()
if public_key_pem is None:
logger.error("Failed to retrieve public key")
return None
payload = jwt_decode(
token,
public_key_pem,
algorithms=["RS256"],
audience=None,
)
email = payload.get("email")
if email:
result = await async_db_session.execute(
select(User).where(func.lower(User.email) == func.lower(email))
)
return result.scalars().first()
except InvalidTokenError:
logger.error("Invalid JWT token")
get_public_key.cache_clear()
except PyJWTError as e:
logger.error(f"JWT decoding error: {str(e)}")
get_public_key.cache_clear()
return None
def verify_auth_setting() -> None:
# All the Auth flows are valid for EE version
logger.notice(f"Using Auth Type: {AUTH_TYPE.value}")
async def optional_user_(
request: Request,
user: User | None,
async_db_session: AsyncSession,
) -> User | None:
# Check if the user has a session cookie from SAML
if AUTH_TYPE == AuthType.SAML:
saved_cookie = extract_hashed_cookie(request)
if saved_cookie:
saml_account = await get_saml_account(
cookie=saved_cookie, async_db_session=async_db_session
)
user = saml_account.user if saml_account else None
# If user is still None, check for JWT in Authorization header
if user is None and JWT_PUBLIC_KEY_URL is not None:
auth_header = request.headers.get("Authorization")
if auth_header and auth_header.startswith("Bearer "):
token = auth_header[len("Bearer ") :].strip()
user = await verify_jwt_token(token, async_db_session)
return user
def get_default_admin_user_emails_() -> list[str]:
seed_config = get_seed_config()
if seed_config and seed_config.admin_user_emails:

Some files were not shown because too many files have changed in this diff Show More