Compare commits
3052 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
154301b3ad | ||
|
|
e7b7ac9e94 | ||
|
|
e531eaa4ad | ||
|
|
23b03da714 | ||
|
|
22be93807d | ||
|
|
653ecdb494 | ||
|
|
15c426aefc | ||
|
|
2fa153ac34 | ||
|
|
62bd91a1e1 | ||
|
|
f7e0e790b6 | ||
|
|
c854c54c0c | ||
|
|
a14872666a | ||
|
|
e9f469b623 | ||
|
|
5a016860aa | ||
|
|
c773abb688 | ||
|
|
2561fcd78a | ||
|
|
28ed5e2af2 | ||
|
|
4c5c8e6aed | ||
|
|
f07e43cfe0 | ||
|
|
e46200868e | ||
|
|
147cab94a8 | ||
|
|
2fb956ccaf | ||
|
|
4dee0a4ba1 | ||
|
|
418a48ab39 | ||
|
|
9bc56f6e30 | ||
|
|
f0ca9cce35 | ||
|
|
746dbc632b | ||
|
|
462b05e612 | ||
|
|
946b347f44 | ||
|
|
b23e88f078 | ||
|
|
476fdd6040 | ||
|
|
fa8b3664cb | ||
|
|
8b687619b2 | ||
|
|
176ad07f1d | ||
|
|
7b5060c9be | ||
|
|
584daa9682 | ||
|
|
31f9501d8e | ||
|
|
c096929d8b | ||
|
|
01050f3103 | ||
|
|
45d57dd34d | ||
|
|
f4066e6e5a | ||
|
|
a2fbce698f | ||
|
|
fc784620e0 | ||
|
|
340042682a | ||
|
|
6dd2d2d6c1 | ||
|
|
7762cea10a | ||
|
|
610b741db2 | ||
|
|
814034d3d7 | ||
|
|
839672b82f | ||
|
|
03762c3634 | ||
|
|
59c76bcb1c | ||
|
|
1905f9a9ce | ||
|
|
ff93dc3c5c | ||
|
|
510c4a5188 | ||
|
|
c7d1a3c65d | ||
|
|
999b7b3cd8 | ||
|
|
f224d18d7f | ||
|
|
1bf5c770cf | ||
|
|
613bc85a13 | ||
|
|
b1abc97bda | ||
|
|
eec94c47dd | ||
|
|
b2632a005c | ||
|
|
706dad2687 | ||
|
|
e5be683a97 | ||
|
|
e7fcc809e7 | ||
|
|
26006c143e | ||
|
|
28afafaad7 | ||
|
|
dd1b3c98bf | ||
|
|
3919cd0306 | ||
|
|
7d615b4163 | ||
|
|
4828f34fec | ||
|
|
61c3fd4b5a | ||
|
|
816d60b22a | ||
|
|
310ca7cb23 | ||
|
|
fa397038ef | ||
|
|
dabc49567c | ||
|
|
d8dcba3552 | ||
|
|
f4efd479d5 | ||
|
|
a72c74dbf9 | ||
|
|
6c4dd85e6f | ||
|
|
9bfec5c7b3 | ||
|
|
fe2c2a8687 | ||
|
|
b23aa68254 | ||
|
|
0588cb34c6 | ||
|
|
b32e3ebd60 | ||
|
|
cc77ae3430 | ||
|
|
ac6eda063a | ||
|
|
f160d83ca9 | ||
|
|
13c7ad707e | ||
|
|
17dced03c0 | ||
|
|
39b1ba4691 | ||
|
|
8c7ad408a9 | ||
|
|
f0c4784b7b | ||
|
|
83fc68e27d | ||
|
|
be9a6c0254 | ||
|
|
1acdb77ad1 | ||
|
|
51e7a08118 | ||
|
|
aa0c4b0d1b | ||
|
|
80ee8fd2b2 | ||
|
|
291482cc16 | ||
|
|
154bc17778 | ||
|
|
cc5130c1af | ||
|
|
2d9f20ffb6 | ||
|
|
3e332eceae | ||
|
|
73b241c16f | ||
|
|
8ed68d9c19 | ||
|
|
b5e57a429a | ||
|
|
62fa0ffa57 | ||
|
|
481e9c1130 | ||
|
|
93652ce884 | ||
|
|
fa61163350 | ||
|
|
cb9eb6d238 | ||
|
|
41a93690a1 | ||
|
|
b309d34274 | ||
|
|
ca485b4a35 | ||
|
|
252336d95c | ||
|
|
1d620df625 | ||
|
|
e346b293f6 | ||
|
|
8b448d1c0b | ||
|
|
b628b3d9ab | ||
|
|
cef351bfd0 | ||
|
|
5adaf62975 | ||
|
|
c797283409 | ||
|
|
c4af638a17 | ||
|
|
4da0fd1ca3 | ||
|
|
bdd8549d3e | ||
|
|
24ff36d424 | ||
|
|
524ac11294 | ||
|
|
1747e0ad41 | ||
|
|
08d527366e | ||
|
|
70c6582a58 | ||
|
|
9720ad368e | ||
|
|
d3b4144c5b | ||
|
|
e6f366d373 | ||
|
|
4fbc7873ca | ||
|
|
f537429d2c | ||
|
|
f6f9813ef2 | ||
|
|
eccaa282e0 | ||
|
|
87db633b7d | ||
|
|
ab2bc11465 | ||
|
|
df145d63a9 | ||
|
|
8e10f27cc2 | ||
|
|
9f72debc17 | ||
|
|
1b9efee52c | ||
|
|
d5110de67b | ||
|
|
870c12af9b | ||
|
|
fb8b61c567 | ||
|
|
0f0ba43b7f | ||
|
|
6a4aaaec87 | ||
|
|
a5574eaacb | ||
|
|
247db95bad | ||
|
|
771d097309 | ||
|
|
157ecacb20 | ||
|
|
e0ab77cb24 | ||
|
|
7a36ed238c | ||
|
|
99d1713517 | ||
|
|
d102165028 | ||
|
|
199623b414 | ||
|
|
2679b19e32 | ||
|
|
0de5f17071 | ||
|
|
262a1a9544 | ||
|
|
4cb3c46f89 | ||
|
|
46d55030f0 | ||
|
|
9a72a2d5d5 | ||
|
|
c5a5968bf8 | ||
|
|
d50226b4dd | ||
|
|
817680565e | ||
|
|
84f46dc997 | ||
|
|
3c7eb64353 | ||
|
|
191e6f9714 | ||
|
|
e6bc1182a6 | ||
|
|
82d29e7a7d | ||
|
|
f33c2c205a | ||
|
|
7eacee26d0 | ||
|
|
631a95e018 | ||
|
|
f7be59e99c | ||
|
|
6bd623c020 | ||
|
|
a62852f384 | ||
|
|
508cbe16f8 | ||
|
|
dcb511023d | ||
|
|
4718203960 | ||
|
|
b3fc64049a | ||
|
|
0b8f3e01fb | ||
|
|
2a9166c9f8 | ||
|
|
43d20e2a32 | ||
|
|
6c0fa46784 | ||
|
|
de9c04474e | ||
|
|
c3184bd233 | ||
|
|
334b160012 | ||
|
|
04fcd788a3 | ||
|
|
a3fcd28c12 | ||
|
|
cb02809ffb | ||
|
|
34b254e2d8 | ||
|
|
d315522c5a | ||
|
|
a300a46707 | ||
|
|
c53d8c09c4 | ||
|
|
1c9890649d | ||
|
|
3cbf348585 | ||
|
|
2101242530 | ||
|
|
f718b0f96b | ||
|
|
ccf378f2c6 | ||
|
|
e3e809753c | ||
|
|
8b9aca27bf | ||
|
|
e6d34623b0 | ||
|
|
b69330ffca | ||
|
|
549c962248 | ||
|
|
54d17c9c72 | ||
|
|
5e57caee43 | ||
|
|
70ac13e536 | ||
|
|
eb79642863 | ||
|
|
b062e6840a | ||
|
|
987063fede | ||
|
|
5fd249c72d | ||
|
|
3c4c6d1c50 | ||
|
|
cf8ada5e8e | ||
|
|
e8666ee5ec | ||
|
|
e065e56f5c | ||
|
|
e09c1f0316 | ||
|
|
13aa72a51f | ||
|
|
38ed873de0 | ||
|
|
a9737a1541 | ||
|
|
7f002409e3 | ||
|
|
0ac667d99c | ||
|
|
4166b93e75 | ||
|
|
dda46f30c2 | ||
|
|
0dbf3b1652 | ||
|
|
1504533246 | ||
|
|
f2a3861410 | ||
|
|
a5eb7ea894 | ||
|
|
03ee9c14a3 | ||
|
|
75dee86375 | ||
|
|
ed661bf4a3 | ||
|
|
103681ef98 | ||
|
|
f73e1c4618 | ||
|
|
0cf1b830cf | ||
|
|
b4df56e0d8 | ||
|
|
4d5cc0033a | ||
|
|
a3153ee4cd | ||
|
|
5403e58681 | ||
|
|
311b79fe13 | ||
|
|
86c7496a70 | ||
|
|
abb0da491b | ||
|
|
97cb7c45a5 | ||
|
|
183645c637 | ||
|
|
12012232ac | ||
|
|
03a53cbed3 | ||
|
|
bfbd198202 | ||
|
|
ae171574e8 | ||
|
|
da38384be2 | ||
|
|
220cb3ae0a | ||
|
|
d47a3efe89 | ||
|
|
0c5ccea9d4 | ||
|
|
c091b8cd77 | ||
|
|
7d6e0c22ac | ||
|
|
c6be66e595 | ||
|
|
b422970369 | ||
|
|
580bdc511a | ||
|
|
191c0eec83 | ||
|
|
eeaaf5ce0d | ||
|
|
1dd9adf8d4 | ||
|
|
b7dae19756 | ||
|
|
3d224f8cff | ||
|
|
453df559b5 | ||
|
|
39e3a02b3e | ||
|
|
ea124f7fd4 | ||
|
|
d1c8cd2dc9 | ||
|
|
c12bef55e9 | ||
|
|
1bbea4b30f | ||
|
|
39aa1c4f7e | ||
|
|
32df9fda0f | ||
|
|
55c42ba526 | ||
|
|
cc218b1b40 | ||
|
|
fb9bbb66b0 | ||
|
|
ba3e49928d | ||
|
|
e8cac2823f | ||
|
|
b0ad2a6b7a | ||
|
|
a0aa0074f4 | ||
|
|
c4228855b5 | ||
|
|
0a82fdd784 | ||
|
|
fdf9f40aef | ||
|
|
75396dbd06 | ||
|
|
20c8991f55 | ||
|
|
d806c4719e | ||
|
|
978565e7f0 | ||
|
|
9fb3171d82 | ||
|
|
9923c6028e | ||
|
|
d690e0fa87 | ||
|
|
0fb6413368 | ||
|
|
a8b9e1f147 | ||
|
|
374a1f9771 | ||
|
|
97af43cadb | ||
|
|
2aead28c9b | ||
|
|
11b3a60675 | ||
|
|
ba768d46e6 | ||
|
|
7fe58d6bee | ||
|
|
8479b7c105 | ||
|
|
d994bc8f6a | ||
|
|
45590e3188 | ||
|
|
e0781677ad | ||
|
|
1b120712d4 | ||
|
|
8205c33176 | ||
|
|
b0bca2ac1f | ||
|
|
d0f62fa634 | ||
|
|
e7a1a06395 | ||
|
|
814024982e | ||
|
|
3dcf522224 | ||
|
|
23f14ebbb7 | ||
|
|
e322e46e4b | ||
|
|
57110d2bd7 | ||
|
|
fe05478336 | ||
|
|
b2c5063e0b | ||
|
|
9568ff12e8 | ||
|
|
7780fb3edd | ||
|
|
6747a0e9e2 | ||
|
|
b8da32c14e | ||
|
|
4f7236d72a | ||
|
|
91e30d3c19 | ||
|
|
4dc55fc43b | ||
|
|
0cf2ffd32d | ||
|
|
db35d045b8 | ||
|
|
a8dee4a95d | ||
|
|
5b59c7a18a | ||
|
|
78fc5a8184 | ||
|
|
dc097eaef9 | ||
|
|
4d43841ae3 | ||
|
|
8102ca24e5 | ||
|
|
38ad8deae2 | ||
|
|
1c1993fbd7 | ||
|
|
c46e13b8b1 | ||
|
|
8ba0342337 | ||
|
|
b322c7649b | ||
|
|
f1d97ac834 | ||
|
|
3a5580c725 | ||
|
|
d3fff154d4 | ||
|
|
3f51c35229 | ||
|
|
2a905c6c02 | ||
|
|
7a15c2646d | ||
|
|
6c83580283 | ||
|
|
6dc38f18cf | ||
|
|
50b66eff74 | ||
|
|
7257eb4ae6 | ||
|
|
8951123557 | ||
|
|
0151274659 | ||
|
|
53707a5083 | ||
|
|
7f09c36a92 | ||
|
|
6f827872fb | ||
|
|
9741bf15b5 | ||
|
|
8153287520 | ||
|
|
a6a2f0c191 | ||
|
|
3ec41e080f | ||
|
|
26ed125693 | ||
|
|
3bbce97329 | ||
|
|
8a51cc1656 | ||
|
|
3133d40081 | ||
|
|
df61546942 | ||
|
|
2f85f214ea | ||
|
|
3c004819ca | ||
|
|
9a936ef826 | ||
|
|
d2c86801b4 | ||
|
|
292941e1d0 | ||
|
|
5adc0d9d46 | ||
|
|
568ee857d5 | ||
|
|
885da29f28 | ||
|
|
05e58cffe8 | ||
|
|
bf7f176741 | ||
|
|
e1294cdc30 | ||
|
|
6373625206 | ||
|
|
47b7509d2e | ||
|
|
04e81ba20e | ||
|
|
5c4ce729f7 | ||
|
|
0d2c99a413 | ||
|
|
c327622a6c | ||
|
|
ccce3b24e1 | ||
|
|
e3c29277f3 | ||
|
|
f639ec70d4 | ||
|
|
b0b84b7eda | ||
|
|
a418574841 | ||
|
|
6d131f8cbb | ||
|
|
8cdb021b3d | ||
|
|
8e1654961a | ||
|
|
003598204e | ||
|
|
48ddc20026 | ||
|
|
361c9eeff4 | ||
|
|
b6169a48e6 | ||
|
|
29862435ab | ||
|
|
ec425163d3 | ||
|
|
991bbec53a | ||
|
|
e1fa60be99 | ||
|
|
465544cc2c | ||
|
|
4694ab8350 | ||
|
|
ae2532d40d | ||
|
|
2c251d0cef | ||
|
|
6bd64409e8 | ||
|
|
162e0ef378 | ||
|
|
0f349f4b8c | ||
|
|
1ec9c29df6 | ||
|
|
3d8cfbf99a | ||
|
|
d1a8bdc4e3 | ||
|
|
c9d165e65c | ||
|
|
ebb6837437 | ||
|
|
4cfc88ae95 | ||
|
|
c5c39f22e6 | ||
|
|
a39c38e1fd | ||
|
|
508879e3ae | ||
|
|
7c5633c9a1 | ||
|
|
b8a3699497 | ||
|
|
359dd8f41e | ||
|
|
645548e931 | ||
|
|
f237936b0c | ||
|
|
5f6a68d844 | ||
|
|
be83395f69 | ||
|
|
6342956cd6 | ||
|
|
86a92ead85 | ||
|
|
2a2bc40dfe | ||
|
|
707fdac2ce | ||
|
|
e64607eb43 | ||
|
|
da69f3acec | ||
|
|
f271e8fe9c | ||
|
|
f90398e29a | ||
|
|
241a90492e | ||
|
|
f76387ac6a | ||
|
|
15d56e8e7e | ||
|
|
21d0943aa4 | ||
|
|
0f85fce6ef | ||
|
|
4205b2a479 | ||
|
|
1cddc4be25 | ||
|
|
b9f658f2ae | ||
|
|
973f77cdc6 | ||
|
|
d9a0a8e09d | ||
|
|
0ab6417caa | ||
|
|
4a4423ed6b | ||
|
|
a85ffa270e | ||
|
|
15164fc0be | ||
|
|
cac6253d87 | ||
|
|
37b2cf4e13 | ||
|
|
1e72e474ec | ||
|
|
f3f5998e23 | ||
|
|
adfcb35ca6 | ||
|
|
066c70fca5 | ||
|
|
3d2d25cba5 | ||
|
|
630463257b | ||
|
|
b4fd7c300a | ||
|
|
eaa0bee9d4 | ||
|
|
dc808765aa | ||
|
|
c825ba4e4b | ||
|
|
6f2a01990f | ||
|
|
7fc3afd0ef | ||
|
|
aae1936620 | ||
|
|
a3c2c1aa3b | ||
|
|
8fa79aa394 | ||
|
|
a49d488ae1 | ||
|
|
224bee5c66 | ||
|
|
fd046a2d08 | ||
|
|
633a6ac032 | ||
|
|
854cc69414 | ||
|
|
e5dccf741a | ||
|
|
fa0ed11258 | ||
|
|
e579eed2f4 | ||
|
|
022048f577 | ||
|
|
5fa0826ee8 | ||
|
|
703b44558c | ||
|
|
bb39cb1e99 | ||
|
|
93807745cd | ||
|
|
0945eaedcd | ||
|
|
272ef9f8b8 | ||
|
|
ea354ce621 | ||
|
|
fd052149fe | ||
|
|
0e972646e8 | ||
|
|
4ef64ce18c | ||
|
|
9fe517dbf8 | ||
|
|
6dde7e0ba1 | ||
|
|
da2f790f25 | ||
|
|
c02d8200ac | ||
|
|
491012fa87 | ||
|
|
52fe8e8c0e | ||
|
|
7b9b9666cb | ||
|
|
3b22f0b7c0 | ||
|
|
cf87313f28 | ||
|
|
e02be47aae | ||
|
|
5736d7b110 | ||
|
|
9e3a77a559 | ||
|
|
e78e4e5cca | ||
|
|
311a451005 | ||
|
|
71e2e24112 | ||
|
|
18114c0a15 | ||
|
|
59b2755810 | ||
|
|
1aa39392ab | ||
|
|
7921bb21c7 | ||
|
|
d73a932773 | ||
|
|
1ca765476e | ||
|
|
e80a865def | ||
|
|
32ff0df6c9 | ||
|
|
f02f001266 | ||
|
|
55edc7129e | ||
|
|
43431c26e7 | ||
|
|
489c5a3d9c | ||
|
|
1db67ea9a2 | ||
|
|
7a2782e6fd | ||
|
|
a4483b7eb7 | ||
|
|
a26445e557 | ||
|
|
44893bc3c3 | ||
|
|
1d36a4ad6e | ||
|
|
4fa78fa892 | ||
|
|
6959329fd6 | ||
|
|
0771b998a5 | ||
|
|
78d816b435 | ||
|
|
654e566dcb | ||
|
|
596930174f | ||
|
|
b8ca6f5d76 | ||
|
|
91e17dc4ee | ||
|
|
6c935132a9 | ||
|
|
129c64a9a4 | ||
|
|
ad428f587b | ||
|
|
b5b6e1dc19 | ||
|
|
72128117a9 | ||
|
|
32440a9510 | ||
|
|
4293fe7edc | ||
|
|
ea72c1ae0f | ||
|
|
db52057030 | ||
|
|
e709d200aa | ||
|
|
4856cfbfc4 | ||
|
|
19aa15ffcd | ||
|
|
7c41408a1a | ||
|
|
8f67f29317 | ||
|
|
14c7fc0450 | ||
|
|
abd0cbe599 | ||
|
|
198955285e | ||
|
|
bc29046c06 | ||
|
|
7fea6e1ab0 | ||
|
|
791563e6ba | ||
|
|
5ff7935d91 | ||
|
|
d856651380 | ||
|
|
65a515a9db | ||
|
|
773b252555 | ||
|
|
7a94e74d6b | ||
|
|
13806a3f06 | ||
|
|
3cd099ee87 | ||
|
|
42411b5f33 | ||
|
|
84874c6039 | ||
|
|
0eff1bfaa9 | ||
|
|
5ef9d8dfc3 | ||
|
|
e2e572ccab | ||
|
|
6067ffe107 | ||
|
|
cbd2651a63 | ||
|
|
2e350ab607 | ||
|
|
ba4dc6d1eb | ||
|
|
a6e4f28830 | ||
|
|
4e37c361c4 | ||
|
|
7ac927ff02 | ||
|
|
7a174e621a | ||
|
|
9e592b2aca | ||
|
|
3158722a63 | ||
|
|
86dcfc10cf | ||
|
|
eea76802d4 | ||
|
|
0edf9635a1 | ||
|
|
18351e3850 | ||
|
|
aef0538adc | ||
|
|
9ec87614b2 | ||
|
|
97eebd1c97 | ||
|
|
5cd81bc6e8 | ||
|
|
dbaf563e88 | ||
|
|
5e30e10bf4 | ||
|
|
a5b0ced9a9 | ||
|
|
dbcc1db0b9 | ||
|
|
2db9af94fa | ||
|
|
94dc298181 | ||
|
|
cd85ae062e | ||
|
|
f2594134c7 | ||
|
|
be851ebcf1 | ||
|
|
9b13b140d5 | ||
|
|
fcd285ca0f | ||
|
|
0e7c12f340 | ||
|
|
836990b7d9 | ||
|
|
4141910ee2 | ||
|
|
1b035fd2f1 | ||
|
|
9fcd9503e7 | ||
|
|
a49008e02d | ||
|
|
66af5c7386 | ||
|
|
045778406f | ||
|
|
88fb1acc0a | ||
|
|
afcaf531ed | ||
|
|
aea474bf57 | ||
|
|
0e44d9340c | ||
|
|
153a8c635d | ||
|
|
49ca18ca6b | ||
|
|
9e8dc9f84a | ||
|
|
17da6d4ada | ||
|
|
354cda8c35 | ||
|
|
82206954e9 | ||
|
|
58cdb61ad7 | ||
|
|
f11899ddb5 | ||
|
|
c3a44dd54e | ||
|
|
4d7bf5271d | ||
|
|
c5ea4cb8f9 | ||
|
|
d490174544 | ||
|
|
13e05809a9 | ||
|
|
3a0e114715 | ||
|
|
1e6cf8d771 | ||
|
|
a30eb7f968 | ||
|
|
1b74772d07 | ||
|
|
b2c4e89402 | ||
|
|
f886434c59 | ||
|
|
38629afc89 | ||
|
|
5f9f766965 | ||
|
|
2801fde181 | ||
|
|
cb4641e4ad | ||
|
|
0ea9da95c8 | ||
|
|
08e756f27b | ||
|
|
8db764c426 | ||
|
|
3d0ce15fe8 | ||
|
|
70bf257e75 | ||
|
|
8c4a88a182 | ||
|
|
9a4e703d6a | ||
|
|
17ccc9dca9 | ||
|
|
1544aac663 | ||
|
|
12118192ef | ||
|
|
d9e6199835 | ||
|
|
c19a0df16e | ||
|
|
77877ec15f | ||
|
|
0d51a13cc7 | ||
|
|
88decc95aa | ||
|
|
f14d23d719 | ||
|
|
9665e6e765 | ||
|
|
5ec612d74f | ||
|
|
478902633f | ||
|
|
e36928e2e1 | ||
|
|
71c00cae2c | ||
|
|
5048593337 | ||
|
|
db6a3105be | ||
|
|
786f5f801f | ||
|
|
798cae28a9 | ||
|
|
d3edc0dc96 | ||
|
|
605322c917 | ||
|
|
cd41e9f140 | ||
|
|
0793ce47e2 | ||
|
|
94ca9ba902 | ||
|
|
eaec02bf6b | ||
|
|
5fae954ac5 | ||
|
|
2c6430313c | ||
|
|
bf15036ef8 | ||
|
|
7a657477de | ||
|
|
eab4b277a2 | ||
|
|
328d680f73 | ||
|
|
5257a24181 | ||
|
|
11a7bf7331 | ||
|
|
85b614f44a | ||
|
|
6bb66b2b93 | ||
|
|
8166f3a32f | ||
|
|
ca20828532 | ||
|
|
95d2f0f86f | ||
|
|
5fcf36ed8e | ||
|
|
a7a5a5ab67 | ||
|
|
d922d7454d | ||
|
|
742f9c1a70 | ||
|
|
75d189900c | ||
|
|
1fa288f11b | ||
|
|
b095d7150c | ||
|
|
861590b9ac | ||
|
|
b93f77a9f5 | ||
|
|
b915f1f674 | ||
|
|
a7ed4fe1dc | ||
|
|
02f7b88dab | ||
|
|
0efdf54819 | ||
|
|
8cdb9e943d | ||
|
|
e2adb3037a | ||
|
|
87f3b168c9 | ||
|
|
cad5b5642c | ||
|
|
f76e337d38 | ||
|
|
a92aa4e3f1 | ||
|
|
128ea578bd | ||
|
|
b5fbba6c81 | ||
|
|
8cba22937e | ||
|
|
2a5cffd64c | ||
|
|
31364dd9f8 | ||
|
|
4137821e53 | ||
|
|
e376314315 | ||
|
|
e73a710c06 | ||
|
|
2472cc949a | ||
|
|
62eb422934 | ||
|
|
9ff46757bb | ||
|
|
91eb37c240 | ||
|
|
8d06c3addf | ||
|
|
45ba949d96 | ||
|
|
1a6a37c003 | ||
|
|
6c0e6dce06 | ||
|
|
639bd5fb27 | ||
|
|
aa568e6290 | ||
|
|
1bf5802a68 | ||
|
|
4e8dd9281f | ||
|
|
9a68631d39 | ||
|
|
f35e6cdae8 | ||
|
|
3339629747 | ||
|
|
5d9c3ab462 | ||
|
|
8548e0fb12 | ||
|
|
82eb18bc00 | ||
|
|
e331e8fb76 | ||
|
|
61ae8eb88b | ||
|
|
37110ea262 | ||
|
|
63acb3a275 | ||
|
|
51086f39ca | ||
|
|
7801f9c330 | ||
|
|
f7e2c49154 | ||
|
|
f6e4d55f5e | ||
|
|
39e8d3b80c | ||
|
|
9d15453b66 | ||
|
|
64a608039b | ||
|
|
2cf6464780 | ||
|
|
2000fc31b5 | ||
|
|
1180f3e42b | ||
|
|
404695f1d8 | ||
|
|
ef90f07db8 | ||
|
|
510c70bdf7 | ||
|
|
9c25480c7b | ||
|
|
5c3a6fec32 | ||
|
|
5c4deff215 | ||
|
|
32a2ca95b6 | ||
|
|
41b4cc3bb3 | ||
|
|
e096c3114f | ||
|
|
8d3fcf1680 | ||
|
|
9bc243c3f7 | ||
|
|
8e7378b70f | ||
|
|
7b9e752301 | ||
|
|
65cb473d25 | ||
|
|
ec1a695e06 | ||
|
|
6c44ec558d | ||
|
|
8de5c1709b | ||
|
|
cfbc6b9150 | ||
|
|
6b55812739 | ||
|
|
cd5b8b3e6f | ||
|
|
e6587844d0 | ||
|
|
51a9021994 | ||
|
|
7076a69e3a | ||
|
|
3a36353b02 | ||
|
|
973a8dd8cc | ||
|
|
bf3e9c5b1e | ||
|
|
83620e127e | ||
|
|
6850dda108 | ||
|
|
f4829f7e72 | ||
|
|
39df7b22b9 | ||
|
|
2398c0ab33 | ||
|
|
68f8c2d3f8 | ||
|
|
cfa68c5500 | ||
|
|
ebae86f3e6 | ||
|
|
c2cdb48ff3 | ||
|
|
f7df8d2a38 | ||
|
|
76dfe027ab | ||
|
|
4059d7a7ec | ||
|
|
b6cf19a33f | ||
|
|
906b87022d | ||
|
|
43ab6dfc65 | ||
|
|
1ad29077e3 | ||
|
|
6519327244 | ||
|
|
263960752e | ||
|
|
df4491e568 | ||
|
|
ee0b29d9f0 | ||
|
|
c3b043eafa | ||
|
|
8f0999cc37 | ||
|
|
f96ff52506 | ||
|
|
48422e0a4b | ||
|
|
c018713676 | ||
|
|
0fc3dc6841 | ||
|
|
ecc9020452 | ||
|
|
b0eec07a01 | ||
|
|
a6e5273c7a | ||
|
|
5481ee9e35 | ||
|
|
87af59b65d | ||
|
|
6efdd66bbd | ||
|
|
6203a93325 | ||
|
|
109b6e8c3d | ||
|
|
df27def9cb | ||
|
|
91f05b8f32 | ||
|
|
5155f19c9b | ||
|
|
07b1101736 | ||
|
|
80dc491f9d | ||
|
|
e3eb8e909b | ||
|
|
e761c439d7 | ||
|
|
df41bad465 | ||
|
|
b828d3f84f | ||
|
|
a210e2f13a | ||
|
|
6c612d8eba | ||
|
|
9e3237da38 | ||
|
|
6ee044d106 | ||
|
|
067f8b5447 | ||
|
|
cffbe1a77b | ||
|
|
fdc8e07f86 | ||
|
|
00674ec0d5 | ||
|
|
9f39f0cdb8 | ||
|
|
050d38c0ab | ||
|
|
9afeb5e514 | ||
|
|
e04bb86171 | ||
|
|
f70449fd98 | ||
|
|
ec9925ed5a | ||
|
|
f95e17c047 | ||
|
|
cef3e122ff | ||
|
|
e9bd0f0bec | ||
|
|
5f1cb67ffc | ||
|
|
926f3ab99a | ||
|
|
f26d78dfe4 | ||
|
|
b32c8b372e | ||
|
|
cedf42d683 | ||
|
|
9ea9b7d87d | ||
|
|
872284b770 | ||
|
|
55390de070 | ||
|
|
65f23fb387 | ||
|
|
d7cf258a40 | ||
|
|
0dc0a9a4a3 | ||
|
|
bfe671d7b4 | ||
|
|
e43454d518 | ||
|
|
3e6757300c | ||
|
|
2d486d7b3a | ||
|
|
5ad3d282af | ||
|
|
8f2f3ca352 | ||
|
|
1eb1bb91ce | ||
|
|
d2cc9cc13b | ||
|
|
362324cb87 | ||
|
|
9024e2d382 | ||
|
|
2492d6f9d0 | ||
|
|
b1b2ca1987 | ||
|
|
716d516a60 | ||
|
|
b77c8932a6 | ||
|
|
83b53210ee | ||
|
|
34fe0e1fa4 | ||
|
|
4ba56f1377 | ||
|
|
38b0202365 | ||
|
|
56fa4f9677 | ||
|
|
f58e425b35 | ||
|
|
d95f104e03 | ||
|
|
3f9f10d479 | ||
|
|
e96c5b2416 | ||
|
|
ee2e7487e7 | ||
|
|
a3c7604374 | ||
|
|
1444104418 | ||
|
|
bf86512916 | ||
|
|
8d1ad031fa | ||
|
|
c38f8a90a6 | ||
|
|
fd3acc39d9 | ||
|
|
54e0f9b595 | ||
|
|
c8d9592ab8 | ||
|
|
25043dda7b | ||
|
|
13a1969150 | ||
|
|
e1c8d98bf2 | ||
|
|
9732a9b8b9 | ||
|
|
9e7de65990 | ||
|
|
1d4a6a72ed | ||
|
|
298f408d3f | ||
|
|
aa97159837 | ||
|
|
b9d6aec0e3 | ||
|
|
16652e7bf9 | ||
|
|
dcb46174ff | ||
|
|
aa8fb0464c | ||
|
|
388959a1fe | ||
|
|
a66d83c598 | ||
|
|
6acdf22e41 | ||
|
|
10f3f7cea5 | ||
|
|
16bfd6eafb | ||
|
|
83bb765bcc | ||
|
|
8c28f9b6a6 | ||
|
|
526e532e2d | ||
|
|
5657b6d917 | ||
|
|
dd5b9e23f5 | ||
|
|
da08becd93 | ||
|
|
4350d4c9a0 | ||
|
|
ace8214d4d | ||
|
|
1f0e79e934 | ||
|
|
b338849952 | ||
|
|
f4661912b0 | ||
|
|
985a8f31ae | ||
|
|
da31675f64 | ||
|
|
156bf8230a | ||
|
|
5266583e5b | ||
|
|
186e6c5bc9 | ||
|
|
c708dce033 | ||
|
|
02ded9b545 | ||
|
|
87e542af65 | ||
|
|
55d998ea99 | ||
|
|
d2389badef | ||
|
|
6bec267ef5 | ||
|
|
8e22b8959a | ||
|
|
2e6d1f6f8d | ||
|
|
34b97e41d9 | ||
|
|
7b2d0432e6 | ||
|
|
1a72a592b9 | ||
|
|
7c25d1dbfd | ||
|
|
8e5fac83fd | ||
|
|
736790473e | ||
|
|
276a286853 | ||
|
|
25a0c14be8 | ||
|
|
a75b9c5027 | ||
|
|
9ed98614fe | ||
|
|
d5ca195059 | ||
|
|
f26b7ada50 | ||
|
|
503c3011e1 | ||
|
|
376873b5ee | ||
|
|
15ba8d217f | ||
|
|
c355649759 | ||
|
|
4afabd4c55 | ||
|
|
0bc1bf9f3d | ||
|
|
1563a41864 | ||
|
|
963d0a6c9d | ||
|
|
f3dd26e499 | ||
|
|
b924156a15 | ||
|
|
4d3c84f1c6 | ||
|
|
7e3db2f1ff | ||
|
|
4135d8b1e6 | ||
|
|
dc730412de | ||
|
|
882996a875 | ||
|
|
fc56c418d6 | ||
|
|
654920b3b4 | ||
|
|
c3d4d957cd | ||
|
|
2eeb0b15e8 | ||
|
|
f97d4d9355 | ||
|
|
f3d0cbb03d | ||
|
|
6905eab2b1 | ||
|
|
c91ddae6ed | ||
|
|
a7ab08fa4c | ||
|
|
516c2e5e1d | ||
|
|
269918c09c | ||
|
|
46c285aa41 | ||
|
|
a93a13b172 | ||
|
|
ba58b8c8d9 | ||
|
|
3473924a29 | ||
|
|
6e39ed7404 | ||
|
|
e3bc6c0818 | ||
|
|
be659111a9 | ||
|
|
3fc85c59e0 | ||
|
|
72588db776 | ||
|
|
c0de25a817 | ||
|
|
ddd417598e | ||
|
|
7d1541c9ad | ||
|
|
0b2d2439c1 | ||
|
|
4f5d9b8222 | ||
|
|
59afafba0e | ||
|
|
f0a9080ef7 | ||
|
|
86c5204772 | ||
|
|
9b8fb62790 | ||
|
|
700af88bf7 | ||
|
|
b423d7291e | ||
|
|
ae97668280 | ||
|
|
9f74194f73 | ||
|
|
7154d1081f | ||
|
|
b29ad205df | ||
|
|
b2b0c64c48 | ||
|
|
0cfc745954 | ||
|
|
fc7d8a7a9c | ||
|
|
0b7477ea56 | ||
|
|
9285714345 | ||
|
|
4d67418b0d | ||
|
|
bdec0af791 | ||
|
|
9482c0a6b9 | ||
|
|
b0e7030939 | ||
|
|
dc82fd6051 | ||
|
|
6507159e34 | ||
|
|
3366d26d65 | ||
|
|
010f0091b9 | ||
|
|
cafd8741ca | ||
|
|
b8f5fd510a | ||
|
|
25699995fc | ||
|
|
ab44faeda3 | ||
|
|
c1668a4e4a | ||
|
|
469d787888 | ||
|
|
f58332e9b5 | ||
|
|
d8e1fef3f0 | ||
|
|
d646856b7c | ||
|
|
e38d653f4d | ||
|
|
179c1f66b7 | ||
|
|
6a699d8004 | ||
|
|
1f1605bdf9 | ||
|
|
c1cdc434a8 | ||
|
|
06941b932d | ||
|
|
c3cca93850 | ||
|
|
d55a5e695f | ||
|
|
397f71db6e | ||
|
|
b0785e9db0 | ||
|
|
0b1b84dbf4 | ||
|
|
fc815dc98e | ||
|
|
ffdb6829e1 | ||
|
|
1739958664 | ||
|
|
683fb34709 | ||
|
|
2306da0e84 | ||
|
|
fec4cce560 | ||
|
|
e74601443f | ||
|
|
f41a04b1a2 | ||
|
|
3bdd5f00b6 | ||
|
|
de146f363a | ||
|
|
83527a7533 | ||
|
|
026b21f779 | ||
|
|
dcffa4fa0a | ||
|
|
318f6f504f | ||
|
|
8e4c696583 | ||
|
|
7e52512d0e | ||
|
|
eb13189d07 | ||
|
|
026383e92d | ||
|
|
4b6269a4f0 | ||
|
|
3ffb30b544 | ||
|
|
5f06a35f4e | ||
|
|
280ea1aa9f | ||
|
|
ad9c4854a9 | ||
|
|
49c8334e40 | ||
|
|
f1dd42de9e | ||
|
|
4e31e1d3a8 | ||
|
|
00f686a733 | ||
|
|
890a917dec | ||
|
|
8d147c1774 | ||
|
|
c642076ec3 | ||
|
|
3b349a60f1 | ||
|
|
4ee6873ca5 | ||
|
|
fc4ecd3412 | ||
|
|
5d001dfd5a | ||
|
|
f95c6c4d3d | ||
|
|
4c66b1f65b | ||
|
|
088b9d7f25 | ||
|
|
dbdea2db10 | ||
|
|
2a2798fa84 | ||
|
|
fea516f98f | ||
|
|
fb15687388 | ||
|
|
fcfae4127e | ||
|
|
e3ba37ba15 | ||
|
|
99567a1102 | ||
|
|
065a850a94 | ||
|
|
b8070f1871 | ||
|
|
90e46a2696 | ||
|
|
7f0c605651 | ||
|
|
339a1957c8 | ||
|
|
813c911487 | ||
|
|
517e556552 | ||
|
|
4a4bc35cce | ||
|
|
ac66b49eca | ||
|
|
80707c42e8 | ||
|
|
48004024ee | ||
|
|
e3331dd508 | ||
|
|
0a79cf79a6 | ||
|
|
4c40236441 | ||
|
|
71987a383a | ||
|
|
ad78a78339 | ||
|
|
56a4a19163 | ||
|
|
324ec77d98 | ||
|
|
641df474fd | ||
|
|
da0cf10f91 | ||
|
|
477651e5d5 | ||
|
|
72de519430 | ||
|
|
29ad6691d8 | ||
|
|
5e533bdedc | ||
|
|
38c5911460 | ||
|
|
a8613e5d15 | ||
|
|
714a9ba241 | ||
|
|
45c2b02842 | ||
|
|
bf9315dbbe | ||
|
|
c403029d5d | ||
|
|
3f71428c29 | ||
|
|
9ce7527d58 | ||
|
|
e51847830a | ||
|
|
b1984a452e | ||
|
|
787c4ee073 | ||
|
|
3acb61b5ed | ||
|
|
ea29785a8a | ||
|
|
6603460c39 | ||
|
|
b53d5f3638 | ||
|
|
9c0d09c487 | ||
|
|
9573329d06 | ||
|
|
4bcfa84d75 | ||
|
|
76966135d3 | ||
|
|
8a7edbf3a7 | ||
|
|
a93d4f380b | ||
|
|
23ba41fa9d | ||
|
|
183c028e46 | ||
|
|
aa117cf917 | ||
|
|
b5d54e48d1 | ||
|
|
8511e35df8 | ||
|
|
6c189ea961 | ||
|
|
df5fac06b4 | ||
|
|
49de36cac8 | ||
|
|
a4d00be62f | ||
|
|
48f3b5e54f | ||
|
|
eaf5b5d59e | ||
|
|
e11b4c9449 | ||
|
|
e76d207718 | ||
|
|
b3e8201481 | ||
|
|
59a17d4a2a | ||
|
|
25fa4901c2 | ||
|
|
e48b8c9792 | ||
|
|
5e72d210d4 | ||
|
|
99cc2efb90 | ||
|
|
76bcf33f80 | ||
|
|
f58d745585 | ||
|
|
1e7e572d4a | ||
|
|
d2f99c36f5 | ||
|
|
7a3d9d765c | ||
|
|
12c552c987 | ||
|
|
0aaaca05a4 | ||
|
|
fd2fc35b48 | ||
|
|
469a0814f3 | ||
|
|
079759939a | ||
|
|
a2aac5a63a | ||
|
|
9e43f61366 | ||
|
|
ee582a8e52 | ||
|
|
f61ce886a0 | ||
|
|
04353a289c | ||
|
|
bd7022fb58 | ||
|
|
1ff86e237f | ||
|
|
eb714776ba | ||
|
|
210ace79d5 | ||
|
|
1836863066 | ||
|
|
4b24176d2c | ||
|
|
63cb4fbf3b | ||
|
|
160d158152 | ||
|
|
812a8082b8 | ||
|
|
07421d7f53 | ||
|
|
1c74bfd5ef | ||
|
|
fa896b3bf3 | ||
|
|
a48925f6bc | ||
|
|
4e4c683211 | ||
|
|
8ed5bddc02 | ||
|
|
432c942330 | ||
|
|
a4e5973573 | ||
|
|
fdaa3b1992 | ||
|
|
08af8a49aa | ||
|
|
1fc37a9349 | ||
|
|
c9b44eec52 | ||
|
|
54d44ce741 | ||
|
|
c3fa04fdd7 | ||
|
|
b89d9d090f | ||
|
|
8ec4a36826 | ||
|
|
2e870ad4d0 | ||
|
|
7dec980630 | ||
|
|
6c15129ce8 | ||
|
|
3982ed4c6f | ||
|
|
0c53ad0e16 | ||
|
|
ceadff78aa | ||
|
|
4d4cf896af | ||
|
|
0bc07e2de6 | ||
|
|
4d446b14ee | ||
|
|
78706a40c5 | ||
|
|
a1af70f7a9 | ||
|
|
d51f904826 | ||
|
|
077707b0a3 | ||
|
|
ce6af62c7d | ||
|
|
5d00cf652a | ||
|
|
399671488c | ||
|
|
6599d91660 | ||
|
|
101b4d700f | ||
|
|
d8b6b10870 | ||
|
|
1d0bb53f2a | ||
|
|
d6ad797769 | ||
|
|
9a511fd5fa | ||
|
|
3a8af3c24d | ||
|
|
af116dd7dc | ||
|
|
3afdd0fa1d | ||
|
|
43b7eb6e18 | ||
|
|
ed610dcd4e | ||
|
|
fd26270e78 | ||
|
|
3e30c61fb0 | ||
|
|
fe95031c6e | ||
|
|
5cbd79b525 | ||
|
|
d347058d6b | ||
|
|
8e9cd2566b | ||
|
|
1eaec5e4f6 | ||
|
|
7d3811f879 | ||
|
|
5553096bc4 | ||
|
|
1d443e1f7d | ||
|
|
7b3b6cc8be | ||
|
|
1dd5b810c2 | ||
|
|
f59739d2b0 | ||
|
|
e3813ab1af | ||
|
|
e424938e02 | ||
|
|
fceecffed7 | ||
|
|
25952f293c | ||
|
|
78df0a20ec | ||
|
|
af892428a5 | ||
|
|
e82e5e1da9 | ||
|
|
9ea081576b | ||
|
|
275cab7538 | ||
|
|
db962b2ba6 | ||
|
|
08de0fa42d | ||
|
|
05b9d4e9fd | ||
|
|
81d6ed3785 | ||
|
|
bc4fe52f8d | ||
|
|
5241557a74 | ||
|
|
c03f6fcc3a | ||
|
|
5696e951f2 | ||
|
|
19cb1c96e0 | ||
|
|
05a5995865 | ||
|
|
722a6881fd | ||
|
|
04f8bf0903 | ||
|
|
4d0b777f9f | ||
|
|
b7b3eb9d19 | ||
|
|
c550f6cf0d | ||
|
|
5fdae1259b | ||
|
|
18dfe2b883 | ||
|
|
6ce26b7b6d | ||
|
|
178d1546fe | ||
|
|
54c1bf6950 | ||
|
|
7927f4ca2b | ||
|
|
8f1a36c8e3 | ||
|
|
59ad2eb784 | ||
|
|
4d44f4324d | ||
|
|
a56e58f69b | ||
|
|
8d84c3b884 | ||
|
|
bcb60378c0 | ||
|
|
8ca507c01c | ||
|
|
32966f9259 | ||
|
|
92703bceb2 | ||
|
|
4699b07ca6 | ||
|
|
333cd3a694 | ||
|
|
f606ba6906 | ||
|
|
5d87dc8d32 | ||
|
|
2f77c6ba9c | ||
|
|
c2839794cf | ||
|
|
b736d09168 | ||
|
|
b17c6f4eb6 | ||
|
|
3eaa3424e1 | ||
|
|
44082846d5 | ||
|
|
646f40d664 | ||
|
|
a3b95f01de | ||
|
|
b0e66993fe | ||
|
|
bd3b8bff35 | ||
|
|
9872a6e82a | ||
|
|
f083fafcfd | ||
|
|
08fe2c27fd | ||
|
|
f674c786ba | ||
|
|
02c049653e | ||
|
|
65bc24530f | ||
|
|
8e85c2fd06 | ||
|
|
55b68df956 | ||
|
|
9a76c94e22 | ||
|
|
dee98f41d1 | ||
|
|
96ba42e411 | ||
|
|
eaf4b1b954 | ||
|
|
1ff62de3b6 | ||
|
|
03bcd02002 | ||
|
|
c2790d9181 | ||
|
|
864ad50880 | ||
|
|
c5fd964bf2 | ||
|
|
191537884e | ||
|
|
09a45baa5d | ||
|
|
c23c34583d | ||
|
|
b2ce138ea0 | ||
|
|
b8259e7794 | ||
|
|
9cea579c8e | ||
|
|
114f2b9092 | ||
|
|
963ad448f5 | ||
|
|
0f7f2a7b38 | ||
|
|
a770e08013 | ||
|
|
1c7a20be44 | ||
|
|
bc0ea343cc | ||
|
|
37718d1e71 | ||
|
|
af8404d627 | ||
|
|
5fe4cc6bab | ||
|
|
389721ba89 | ||
|
|
ca6f4f8977 | ||
|
|
6e0218c084 | ||
|
|
94d9304c0b | ||
|
|
94ada5969b | ||
|
|
39256dad09 | ||
|
|
291a32759a | ||
|
|
3790bd5753 | ||
|
|
0fd346181c | ||
|
|
5b01d0c196 | ||
|
|
08342b5b00 | ||
|
|
10bb8527bd | ||
|
|
a668204cdc | ||
|
|
7d04d66a0b | ||
|
|
4134917a45 | ||
|
|
a8ed759a06 | ||
|
|
61cd33284d | ||
|
|
46c95ebb97 | ||
|
|
9567bd9a49 | ||
|
|
d2d8778425 | ||
|
|
2f02a228cc | ||
|
|
e160339c7b | ||
|
|
60b7e6a081 | ||
|
|
0a3185f88d | ||
|
|
b26ae7d0a4 | ||
|
|
d5ffc6a476 | ||
|
|
e3faf09ab2 | ||
|
|
5ce9bbb304 | ||
|
|
e75bb0de27 | ||
|
|
6b496ae413 | ||
|
|
42d4d48362 | ||
|
|
ccab7f9119 | ||
|
|
a2c59e9934 | ||
|
|
641275ee79 | ||
|
|
3287e8b300 | ||
|
|
977a8a5774 | ||
|
|
1422d94fac | ||
|
|
a0be23b500 | ||
|
|
9696589e79 | ||
|
|
9975580497 | ||
|
|
9ff3cbe63f | ||
|
|
5bf78a31d9 | ||
|
|
a1ff097336 | ||
|
|
d4a3d6a0d6 | ||
|
|
dbdc031583 | ||
|
|
f8daa07a48 | ||
|
|
36c2024cb3 | ||
|
|
4665876698 | ||
|
|
ffef7b9cab | ||
|
|
3a197d56c0 | ||
|
|
19fc399ae1 | ||
|
|
40f1fd4ffd | ||
|
|
03f0c5aad6 | ||
|
|
11db1ecaed | ||
|
|
6ab7d37a08 | ||
|
|
c34291237f | ||
|
|
0343c09704 | ||
|
|
c2493fc535 | ||
|
|
396573055f | ||
|
|
37151ba926 | ||
|
|
f572350c20 | ||
|
|
c778e8bcac | ||
|
|
b173fe2dcb | ||
|
|
264895cd59 | ||
|
|
e123d22b8d | ||
|
|
663c720f2a | ||
|
|
449bf17692 | ||
|
|
0dbfde4c80 | ||
|
|
66bae2adb8 | ||
|
|
9b730058b4 | ||
|
|
d264220245 | ||
|
|
2bdbce2e40 | ||
|
|
c6ac9f1d2a | ||
|
|
c9c1ff1778 | ||
|
|
b538d57207 | ||
|
|
0cbf35dc77 | ||
|
|
f70bb2705d | ||
|
|
5edc773535 | ||
|
|
54691044d4 | ||
|
|
ae58c427a5 | ||
|
|
6b86baaa2f | ||
|
|
6d5251d1c6 | ||
|
|
7f25311d26 | ||
|
|
8bd4a3389f | ||
|
|
ad06b2a903 | ||
|
|
7de694c0cd | ||
|
|
62ba503b86 | ||
|
|
01d49a4b28 | ||
|
|
7dbc2c3af2 | ||
|
|
48d1164858 | ||
|
|
2eeabf8ae6 | ||
|
|
f463008362 | ||
|
|
9d4e7cb2b8 | ||
|
|
d60257ebbd | ||
|
|
dbcce86bb8 | ||
|
|
4ffc504150 | ||
|
|
c2b606a3fc | ||
|
|
6c769c5db9 | ||
|
|
8bf4a5eb7d | ||
|
|
525cc93d4a | ||
|
|
ae349159ce | ||
|
|
95944fa081 | ||
|
|
65d6f34878 | ||
|
|
1eb49350e9 | ||
|
|
622f4118c0 | ||
|
|
f7bcf43334 | ||
|
|
3b72d80979 | ||
|
|
331c0e04a5 | ||
|
|
e3d6cbd80f | ||
|
|
4dfdcd68d5 | ||
|
|
f9d3935269 | ||
|
|
5d61062b0e | ||
|
|
6679debf72 | ||
|
|
1ae7c0b59a | ||
|
|
fd9e034461 | ||
|
|
9e24e28341 | ||
|
|
070d8534c4 | ||
|
|
494a47aaa5 | ||
|
|
f32ae402d5 | ||
|
|
27146eb5cc | ||
|
|
a75d13f42f | ||
|
|
3490299f66 | ||
|
|
267bbbf77b | ||
|
|
07d76dc871 | ||
|
|
fbfaaf43c5 | ||
|
|
40cd7e962a | ||
|
|
da23673a44 | ||
|
|
d6edb1e944 | ||
|
|
39bb3f34d6 | ||
|
|
31971e7821 | ||
|
|
1dd762f0cf | ||
|
|
7481fae0df | ||
|
|
77d861f56f | ||
|
|
d5c07acdb5 | ||
|
|
9bb4deeb78 | ||
|
|
5803fcdb99 | ||
|
|
d01cbe44ae | ||
|
|
77f6770333 | ||
|
|
742e731e96 | ||
|
|
fe457a5368 | ||
|
|
c5a0ee7f6e | ||
|
|
cd36b423b6 | ||
|
|
d523166b61 | ||
|
|
587ed3c83c | ||
|
|
ded9ae733a | ||
|
|
a7a2dcc8d8 | ||
|
|
ee2cb9e625 | ||
|
|
0e9a8a27e5 | ||
|
|
bbbf4779df | ||
|
|
b4670b5526 | ||
|
|
47881db696 | ||
|
|
021f8ae80f | ||
|
|
ed6f86d4b1 | ||
|
|
a8abc9f9aa | ||
|
|
19274f7e69 | ||
|
|
3f082372fd | ||
|
|
15f0b11c0d | ||
|
|
0551b0bfd2 | ||
|
|
f5b5596306 | ||
|
|
5c9e8dce76 | ||
|
|
9016fbff68 | ||
|
|
c288e75407 | ||
|
|
a3fd6fcd3c | ||
|
|
dc4e592de9 | ||
|
|
ccffe4ced5 | ||
|
|
2a0597ff01 | ||
|
|
f072aded62 | ||
|
|
defc6b92d2 | ||
|
|
a8a1de9ad0 | ||
|
|
5690c43633 | ||
|
|
5dc9073108 | ||
|
|
99df080bd6 | ||
|
|
8e837fc73d | ||
|
|
71cf69bb16 | ||
|
|
94b25ec6e8 | ||
|
|
0b88d93e18 | ||
|
|
7dd95101c2 | ||
|
|
f1e28c2ab5 | ||
|
|
9e9bc49729 | ||
|
|
db74f2cf3f | ||
|
|
7b22ba8c54 | ||
|
|
1e8c9956cd | ||
|
|
e274d04d9b | ||
|
|
439cc55147 | ||
|
|
9ba1995030 | ||
|
|
662879bb5d | ||
|
|
ac6d683733 | ||
|
|
2a143e71e4 | ||
|
|
34014451ce | ||
|
|
aa353fbbaa | ||
|
|
f62290cb00 | ||
|
|
041fc2b330 | ||
|
|
0e2a00e006 | ||
|
|
c3b6a08ff3 | ||
|
|
50f387733a | ||
|
|
5caf9cc690 | ||
|
|
9917716cc9 | ||
|
|
48f51b5ec7 | ||
|
|
fa41ae11a4 | ||
|
|
f24346b4fa | ||
|
|
0735d3bc80 | ||
|
|
fd893c3af6 | ||
|
|
18f0228794 | ||
|
|
62d078355a | ||
|
|
73ff61620e | ||
|
|
5c988d8890 | ||
|
|
f4b6a2054e | ||
|
|
03f30b8a1d | ||
|
|
fcf38aae35 | ||
|
|
0d20f2e0b8 | ||
|
|
841c2301d6 | ||
|
|
9fe34ca226 | ||
|
|
bbd7f024aa | ||
|
|
bfa5410a52 | ||
|
|
f0adeaec7d | ||
|
|
0e41502bcc | ||
|
|
d6bf62743d | ||
|
|
8a309cf3d9 | ||
|
|
a41b7114ca | ||
|
|
05a74481fa | ||
|
|
d34ccadd2c | ||
|
|
69438035eb | ||
|
|
5086b45ce9 | ||
|
|
c251b04d15 | ||
|
|
229bed9955 | ||
|
|
aa59a70b88 | ||
|
|
c358831808 | ||
|
|
1c9a64855e | ||
|
|
3e22835049 | ||
|
|
d05e5a5dae | ||
|
|
1444374fea | ||
|
|
5918c9cd6f | ||
|
|
39f220fae1 | ||
|
|
35154887e3 | ||
|
|
e52b6553a5 | ||
|
|
cc2e6c7edc | ||
|
|
834bc39242 | ||
|
|
1e51f1699a | ||
|
|
3f696b4d12 | ||
|
|
a7453d16ef | ||
|
|
3b7ed88180 | ||
|
|
2fdfba208a | ||
|
|
224aa1b4e6 | ||
|
|
8a77c41519 | ||
|
|
9c3b93a8c7 | ||
|
|
0114d89993 | ||
|
|
3329be5c73 | ||
|
|
e6f77faf06 | ||
|
|
f962c45d8e | ||
|
|
c187af27e7 | ||
|
|
e5b392af09 | ||
|
|
4f940055be | ||
|
|
50f7de0f2d | ||
|
|
5fbde39c75 | ||
|
|
342509d136 | ||
|
|
c6ce78912c | ||
|
|
90041552b9 | ||
|
|
6ec0849a1b | ||
|
|
bd11ce2579 | ||
|
|
fd43b608e9 | ||
|
|
6be9cb974c | ||
|
|
ea8d15e36b | ||
|
|
54ac47d9af | ||
|
|
472ef88bdc | ||
|
|
50f3563a96 | ||
|
|
5fa9b9b66d | ||
|
|
86e601dbe4 | ||
|
|
2de4990543 | ||
|
|
242fbcd43e | ||
|
|
239f93e48d | ||
|
|
be465ec297 | ||
|
|
ae25dbbbc4 | ||
|
|
a324f4b84d | ||
|
|
b49619b389 | ||
|
|
ba4a36dfb3 | ||
|
|
e4116f6bb4 | ||
|
|
6380ad0657 | ||
|
|
c07f5dba03 | ||
|
|
7223f6fc3f | ||
|
|
094d4a8879 | ||
|
|
74fafac5fa | ||
|
|
e1b6690763 | ||
|
|
ab0a21ab0d | ||
|
|
ffc7289e90 | ||
|
|
79a394effe | ||
|
|
4229b9f873 | ||
|
|
c5aac9270c | ||
|
|
d75db73a43 | ||
|
|
2ae51bcf4e | ||
|
|
0af271b05f | ||
|
|
16aab0d661 | ||
|
|
0e28916f8a | ||
|
|
9bbf9a590c | ||
|
|
f46d45e786 | ||
|
|
d229cbd098 | ||
|
|
a670c49d14 | ||
|
|
45f6cd9368 | ||
|
|
52d7a928cc | ||
|
|
69b55db4a7 | ||
|
|
ade7c6cb6c | ||
|
|
906c539541 | ||
|
|
63761efca2 | ||
|
|
e8728dfa77 | ||
|
|
9018f6deae | ||
|
|
fd5ca5a215 | ||
|
|
6c08558512 | ||
|
|
685ae14467 | ||
|
|
ea662c4876 | ||
|
|
d407ebc4e9 | ||
|
|
a7c441ee58 | ||
|
|
3eb31be62e | ||
|
|
54a9d140b5 | ||
|
|
f5cfe8a537 | ||
|
|
0890de1869 | ||
|
|
52d15802d9 | ||
|
|
c463090edb | ||
|
|
942a3d6195 | ||
|
|
c74e5280e7 | ||
|
|
6e3852554a | ||
|
|
4f237b88ac | ||
|
|
3a1cb87a47 | ||
|
|
8fdb65eba5 | ||
|
|
329fb7d023 | ||
|
|
5373b9336c | ||
|
|
828261ea9c | ||
|
|
5185d19129 | ||
|
|
51b6c0a7de | ||
|
|
e389011b48 | ||
|
|
1dc9e47524 | ||
|
|
9ec159ddf8 | ||
|
|
0c4eb4eacf | ||
|
|
b554fddb01 | ||
|
|
43a4a98f6f | ||
|
|
64a7822cc8 | ||
|
|
4e2b28c687 | ||
|
|
e9b28c5a90 | ||
|
|
565c7412c2 | ||
|
|
2e9aa4ec17 | ||
|
|
aa7775225a | ||
|
|
0518fea2a2 | ||
|
|
1771cec811 | ||
|
|
3ed6252ad8 | ||
|
|
ec2602307d | ||
|
|
c32dd092d0 | ||
|
|
7a8a38ac6f | ||
|
|
ebd9e0863e | ||
|
|
47145ef4ac | ||
|
|
efca02904d | ||
|
|
76b2be8ba4 | ||
|
|
5bcd850add | ||
|
|
75208737ee | ||
|
|
a22d08394b | ||
|
|
1d5852fbb5 | ||
|
|
00e960b9ef | ||
|
|
7a53228341 | ||
|
|
a1cef450de | ||
|
|
5a09317c8d | ||
|
|
67592f3f45 | ||
|
|
5a73031d62 | ||
|
|
cf17576e1c | ||
|
|
85d32a4c70 | ||
|
|
22396111be | ||
|
|
8c507e5569 | ||
|
|
4400d50c30 | ||
|
|
5b60116d21 | ||
|
|
c6cd37d916 | ||
|
|
0681c6bb9e | ||
|
|
f5971b9d03 | ||
|
|
eb5655bbd4 | ||
|
|
6cee466f52 | ||
|
|
45df0b272d | ||
|
|
369ba5ac75 | ||
|
|
f41c565f95 | ||
|
|
a075debb05 | ||
|
|
771105a5b2 | ||
|
|
417b789b5e | ||
|
|
b30de3d1ae | ||
|
|
06be778e98 | ||
|
|
1160ea140b | ||
|
|
3f07358125 | ||
|
|
3b624eb466 | ||
|
|
52bb54d2d5 | ||
|
|
ebc268018b | ||
|
|
11d767633e | ||
|
|
86079074d2 | ||
|
|
2899d58ad7 | ||
|
|
50c25b65b6 | ||
|
|
321fb858dd | ||
|
|
f0ec3e03d1 | ||
|
|
a745d24fbe | ||
|
|
f7b53da898 | ||
|
|
4dfc9f604a | ||
|
|
4181454799 | ||
|
|
035cc0f79c | ||
|
|
38dedc2fb8 | ||
|
|
9ddee97107 | ||
|
|
8d514c3e30 | ||
|
|
9b1f206cc6 | ||
|
|
f3e7ee2f2f | ||
|
|
8e921ab521 | ||
|
|
da2f97c227 | ||
|
|
637cf77ed9 | ||
|
|
f7b5408275 | ||
|
|
617213eb1c | ||
|
|
4395be40f3 | ||
|
|
bea806c26c | ||
|
|
079c1a6bab | ||
|
|
97b975b527 | ||
|
|
833f749e4b | ||
|
|
29fd785614 | ||
|
|
235d4b43e9 | ||
|
|
2a9f5d4209 | ||
|
|
5ed9908881 | ||
|
|
84030e8f94 | ||
|
|
e20c801ff0 | ||
|
|
de5ddaac6d | ||
|
|
b93f8afda2 | ||
|
|
4b3a0918fe | ||
|
|
665de7df55 | ||
|
|
f74024db66 | ||
|
|
af428a58b9 | ||
|
|
a88b97c875 | ||
|
|
96492f6a1a | ||
|
|
051d6d3727 | ||
|
|
d131752419 | ||
|
|
3ae4d12f60 | ||
|
|
2ff1776379 | ||
|
|
9ab69b157b | ||
|
|
c896398fab | ||
|
|
2634659366 | ||
|
|
a10dbef2c8 | ||
|
|
b5bdf3cfd2 | ||
|
|
1e17cc6ec7 | ||
|
|
23240f958e | ||
|
|
e04dfc73c7 | ||
|
|
2b8c448be4 | ||
|
|
e586f2387e | ||
|
|
72aa39d67f | ||
|
|
27c4918395 | ||
|
|
e22452b26e | ||
|
|
32f68de36f | ||
|
|
414a9f0da5 | ||
|
|
6300875e19 | ||
|
|
37d72c3869 | ||
|
|
57ad0894ab | ||
|
|
82736c96b1 | ||
|
|
079f206044 | ||
|
|
3accef8c92 | ||
|
|
e33a37776f | ||
|
|
d2e88ffdc4 | ||
|
|
35a0a4e9a5 | ||
|
|
ab5ce6e774 | ||
|
|
6152afaed5 | ||
|
|
7b59aa32f9 | ||
|
|
50b83d7342 | ||
|
|
46add8e88a | ||
|
|
dba35bc5a4 | ||
|
|
808fdb02a7 | ||
|
|
cfefcb00cb | ||
|
|
6f030f720a | ||
|
|
eef37defb4 | ||
|
|
1799bfed3f | ||
|
|
92a52dfa08 | ||
|
|
51a321219d | ||
|
|
dcb3f794d3 | ||
|
|
fbb6b6f800 | ||
|
|
8ba4b0be36 | ||
|
|
891c149f1b | ||
|
|
6d6790d5e0 | ||
|
|
ecfbdf8256 | ||
|
|
c41a6c3899 | ||
|
|
919b6671a1 | ||
|
|
0d0c624e99 | ||
|
|
4a3a0e6496 | ||
|
|
1d69101f97 | ||
|
|
1bbac32d88 | ||
|
|
44b5310a6a | ||
|
|
46c7f02827 | ||
|
|
b72259d78f | ||
|
|
1fa9f4e731 | ||
|
|
c444643294 | ||
|
|
19bcce80fa | ||
|
|
cceae09fef | ||
|
|
2feba874ef | ||
|
|
b85bce8e09 | ||
|
|
81f1d644e1 | ||
|
|
ac8a3b4f96 | ||
|
|
3cdbff53eb | ||
|
|
a84034c552 | ||
|
|
8ee4969aa9 | ||
|
|
2588c3fa55 | ||
|
|
9c9a9cb521 | ||
|
|
fb7dc21135 | ||
|
|
f97a51b34d | ||
|
|
534a673c51 | ||
|
|
cb3ac4b136 | ||
|
|
da10502bdd | ||
|
|
6faca3e732 | ||
|
|
30acc6f493 | ||
|
|
a88da16edc | ||
|
|
7ca57e1fa7 | ||
|
|
b674a521f2 | ||
|
|
a3e78dd563 | ||
|
|
171b1e8c60 | ||
|
|
9c825956e8 | ||
|
|
7dc51c5e0f | ||
|
|
135e75b812 | ||
|
|
a3ebabfd4e | ||
|
|
cf56f7e5c0 | ||
|
|
6861c46ac6 | ||
|
|
de9c59d309 | ||
|
|
307ea39413 | ||
|
|
ecef9d7df6 | ||
|
|
817e3175f3 | ||
|
|
aeba895250 | ||
|
|
b0d3d485cf | ||
|
|
d56686fd21 | ||
|
|
1a0f643d87 | ||
|
|
8509479dfb | ||
|
|
ae6d343d19 | ||
|
|
302ebc0a72 | ||
|
|
38c9cf9a68 | ||
|
|
5d070f6a17 | ||
|
|
5c60d2887a | ||
|
|
5dd4b62bcc | ||
|
|
fa9d81e3b2 | ||
|
|
101e692e50 | ||
|
|
b98c31b184 | ||
|
|
057accfb96 | ||
|
|
573e667c34 | ||
|
|
646ba86de8 | ||
|
|
88c7a0fcb6 | ||
|
|
9265a56f04 | ||
|
|
120de819e7 | ||
|
|
81a7248a93 | ||
|
|
cde10a662c | ||
|
|
3022fdebc2 | ||
|
|
426dc2ab87 | ||
|
|
27c2a360f0 | ||
|
|
e9c9205544 | ||
|
|
2b7b9c2dc6 | ||
|
|
ec7d0c6abf | ||
|
|
8046f95b67 | ||
|
|
dc1071fff8 | ||
|
|
a1111033d9 | ||
|
|
1b3f16b3e1 | ||
|
|
72d1192499 | ||
|
|
f812952044 | ||
|
|
cfcc99d75f | ||
|
|
4672754b81 | ||
|
|
ab3f027d02 | ||
|
|
eb0e21e5d6 | ||
|
|
26154941ca | ||
|
|
0b8c7bae69 | ||
|
|
2ae6c7ed92 | ||
|
|
c6ce193256 | ||
|
|
bbfc754fa4 | ||
|
|
db731efb62 | ||
|
|
e08b98ec6a | ||
|
|
6415be9c74 | ||
|
|
e312913088 | ||
|
|
0fbc4a4664 | ||
|
|
eb97e4ef26 | ||
|
|
3aaa6078c0 | ||
|
|
64f5703461 | ||
|
|
b8de48c9e9 | ||
|
|
2744e787d1 | ||
|
|
71bb7ce1e9 | ||
|
|
8afb962739 | ||
|
|
83464b367f | ||
|
|
0b312f7a06 | ||
|
|
253f9f9501 | ||
|
|
b29e579042 | ||
|
|
484caf04aa | ||
|
|
0df92d8bda | ||
|
|
a29152aa76 | ||
|
|
3443c42947 | ||
|
|
cdd13594a3 | ||
|
|
125104320e | ||
|
|
634efb9d9d | ||
|
|
56f4ec3b61 | ||
|
|
dfe15fac32 | ||
|
|
063292db6d | ||
|
|
994de67f9e | ||
|
|
c324ed592a | ||
|
|
4684cf8ba0 | ||
|
|
5ce1fad118 | ||
|
|
fce559a8fa | ||
|
|
bed0f98beb | ||
|
|
2dda66390d | ||
|
|
7df7d8ffa0 | ||
|
|
942f2f51b7 | ||
|
|
e813440332 | ||
|
|
570bb8290f | ||
|
|
4d66eaf0a7 | ||
|
|
aba75a7d2c | ||
|
|
d3c6d7cfaa | ||
|
|
12ad61aaa8 | ||
|
|
2d7d731a76 | ||
|
|
e43b109291 | ||
|
|
dde4e97d8b | ||
|
|
434abaaca6 | ||
|
|
a7d2e72313 | ||
|
|
fab8f0a9c9 | ||
|
|
bd0d2a5cec | ||
|
|
6a6ccc9a9e | ||
|
|
81e88f6632 | ||
|
|
0622106f8e | ||
|
|
0c73035b3c | ||
|
|
661e907784 | ||
|
|
80cfd57dbd | ||
|
|
974f7901e6 | ||
|
|
d06526a97e | ||
|
|
58f6b061a8 | ||
|
|
2e5795945b | ||
|
|
fa1a71cc01 | ||
|
|
9d5e670344 | ||
|
|
ad962c2cf6 | ||
|
|
0627f29059 | ||
|
|
a7e4037449 | ||
|
|
afab839733 | ||
|
|
4b77548eb9 | ||
|
|
2812a8978a | ||
|
|
6676e0ced8 | ||
|
|
e4168a4c17 | ||
|
|
b0a55d63f0 | ||
|
|
abe265ac71 | ||
|
|
88ea8b0638 | ||
|
|
e8ca7f3c1b | ||
|
|
c0cef78ef2 | ||
|
|
81c4dc516b | ||
|
|
3e887deb3e | ||
|
|
22298f46a3 | ||
|
|
c54e8c80b6 | ||
|
|
9fe4dbdb3d | ||
|
|
06ba2f178b | ||
|
|
c4d32c72d1 | ||
|
|
421c24bf4b | ||
|
|
d5393e4563 | ||
|
|
adc90167d4 | ||
|
|
4adaeed3da | ||
|
|
46943a1cf7 | ||
|
|
76827d42f5 | ||
|
|
d70e20ae0a | ||
|
|
d95439e3f4 | ||
|
|
ead591d3b8 | ||
|
|
97728734a7 | ||
|
|
ba4d1d3c12 | ||
|
|
4c50ebfe0a | ||
|
|
837d3ab0e3 | ||
|
|
b3a6edb704 | ||
|
|
9047d0df4f | ||
|
|
cbad4396cd | ||
|
|
7d25bcb168 | ||
|
|
2dac53e9ca | ||
|
|
f6433544af | ||
|
|
0cc50531f8 | ||
|
|
fba7529281 | ||
|
|
b69a9ceb0f | ||
|
|
28c7e0d105 | ||
|
|
afb8ff716a | ||
|
|
2ae7417e10 | ||
|
|
90da49f873 | ||
|
|
1d4af26b5c | ||
|
|
1507f220e5 | ||
|
|
42088c44cb | ||
|
|
2bc8fccaf0 | ||
|
|
1ce0480684 | ||
|
|
16514050b9 | ||
|
|
3dea599b9a | ||
|
|
1c81e5a95e | ||
|
|
3fedc9231c | ||
|
|
1435cd3162 | ||
|
|
53f5729674 | ||
|
|
2345ff172d | ||
|
|
74c2c59c90 | ||
|
|
c1091ce812 | ||
|
|
1b2a29565f | ||
|
|
bc327c40d4 | ||
|
|
314bfb2313 | ||
|
|
589de63328 | ||
|
|
d57ed85d7e | ||
|
|
9299e772ba | ||
|
|
05ce85d9b1 | ||
|
|
95f90f601d | ||
|
|
66a4ac420b | ||
|
|
4bde6645d0 | ||
|
|
759d5c7257 | ||
|
|
e9f37e98d1 | ||
|
|
bf3f22c854 | ||
|
|
852ea84cd8 | ||
|
|
e15a5ab599 | ||
|
|
9063f7e6c4 | ||
|
|
2418095656 | ||
|
|
8fefe6e167 | ||
|
|
b8b0a77e88 | ||
|
|
dc23cc2716 | ||
|
|
dbe7ef65e2 | ||
|
|
6ed5d44258 | ||
|
|
52ac14ad06 | ||
|
|
dea29ff8c4 | ||
|
|
f3a808cb89 | ||
|
|
7748f0c7e1 | ||
|
|
5c88cedaf0 | ||
|
|
946e8dda65 | ||
|
|
aefe9cc23a | ||
|
|
89c095094f | ||
|
|
7edb987cbd | ||
|
|
a81b644a8f | ||
|
|
1630e2eb77 | ||
|
|
5e80587138 | ||
|
|
e1995a3ccb | ||
|
|
1a3cc64a7e | ||
|
|
9c1f0f8a33 | ||
|
|
92d7e33b7e | ||
|
|
eda0d639f0 | ||
|
|
c4ae61dd75 | ||
|
|
19146fec6a | ||
|
|
efea4ebb59 | ||
|
|
c0b9e8eb77 | ||
|
|
80d11e1057 | ||
|
|
72cb1c16cc | ||
|
|
d60967b4f7 | ||
|
|
6f53f1056a | ||
|
|
f8144a73c1 | ||
|
|
ea88b27593 | ||
|
|
e285f622bb | ||
|
|
0b26647d94 | ||
|
|
9463ab4491 | ||
|
|
cfc49c10ca | ||
|
|
8eb92e4291 | ||
|
|
3173328396 | ||
|
|
baee71dd8e | ||
|
|
52290f9ee5 | ||
|
|
ed85ecb1e1 | ||
|
|
1027059a6b | ||
|
|
cc90c1e86e | ||
|
|
72a7157509 | ||
|
|
24cafa2d30 | ||
|
|
8f0b7dc87c | ||
|
|
05857e85f0 | ||
|
|
cdd5515a98 | ||
|
|
8d97e37985 | ||
|
|
9467834c29 | ||
|
|
726bf4d333 | ||
|
|
4f7af0a10d | ||
|
|
f4c5b95ba2 | ||
|
|
66decbca84 | ||
|
|
b3c109f3a4 | ||
|
|
e9d1731781 | ||
|
|
a85a98f295 | ||
|
|
68d7dec23b | ||
|
|
a0c00d660e | ||
|
|
53c05bc561 | ||
|
|
c65a8fde0c | ||
|
|
c597cb6af6 | ||
|
|
fc521ecda9 | ||
|
|
cb6261e4d1 | ||
|
|
dfbeb553b3 | ||
|
|
27e85010d4 | ||
|
|
eed0db00fd | ||
|
|
f0487bea8a | ||
|
|
520a0d04ea | ||
|
|
85389aec68 | ||
|
|
9079067332 | ||
|
|
24b8a1b66a | ||
|
|
27beb46801 | ||
|
|
154e0cba47 | ||
|
|
b64749b4bb | ||
|
|
db43008813 | ||
|
|
8d8703bedf | ||
|
|
29a53a5747 | ||
|
|
dc9750ecb4 | ||
|
|
75d3dd2de0 | ||
|
|
57e0707850 | ||
|
|
9fe7a5797f | ||
|
|
869b89c78d | ||
|
|
86b2a64c70 | ||
|
|
ebcd75af48 | ||
|
|
0eb6a80d83 | ||
|
|
5b21853e8b | ||
|
|
8e5ce1e188 | ||
|
|
c068b152b0 | ||
|
|
642931bb0a | ||
|
|
cfdd2f0cf8 | ||
|
|
535f2b4de1 | ||
|
|
6b89e5cc48 | ||
|
|
21984afa1b | ||
|
|
7a84732682 | ||
|
|
bcc9f74607 | ||
|
|
5d9dd92625 | ||
|
|
d932960323 | ||
|
|
697bb47664 | ||
|
|
ebf4951597 | ||
|
|
cd4be34559 | ||
|
|
b9800ff6cc | ||
|
|
912db293d4 | ||
|
|
3ce6b30887 | ||
|
|
c1871da1de | ||
|
|
9d64e9bef4 | ||
|
|
eacae5f17a | ||
|
|
0f9269a1ed | ||
|
|
52f1fe85e5 | ||
|
|
33055b8db6 | ||
|
|
39e5649454 | ||
|
|
74f6682263 | ||
|
|
9e9f685eca | ||
|
|
6def8193cb | ||
|
|
24e301854c | ||
|
|
6e0e33dbd0 | ||
|
|
c0f4517c34 | ||
|
|
a84f10cba2 | ||
|
|
a3b2ce25ea | ||
|
|
8985926b9a | ||
|
|
216dbf6ba0 | ||
|
|
3c4afea5c7 | ||
|
|
cce52f012c | ||
|
|
72c484ea8d | ||
|
|
a7c3cbec55 | ||
|
|
3e6bccdc9c | ||
|
|
ab2c6a87cc | ||
|
|
ba736d264b | ||
|
|
f2b1d68ab1 | ||
|
|
c000f75593 | ||
|
|
f4f4d411aa | ||
|
|
580cf5d38f | ||
|
|
2a2adf2223 | ||
|
|
60514ac56d | ||
|
|
e98f303d9b | ||
|
|
3bf88a3da1 | ||
|
|
8844046b19 | ||
|
|
55aa045991 | ||
|
|
abfbb2616c | ||
|
|
074992dcd1 | ||
|
|
ba282d637e | ||
|
|
455d320d35 | ||
|
|
77717a780e | ||
|
|
4f7a8d58ca | ||
|
|
8dc30dd047 | ||
|
|
b0deeed937 | ||
|
|
3168ce7016 | ||
|
|
c4cccc1574 | ||
|
|
a8aa938f42 | ||
|
|
c79c10c96b | ||
|
|
889ab953e0 | ||
|
|
45f957475d | ||
|
|
e376e8e1f9 | ||
|
|
fb93226aff | ||
|
|
fb196139b2 | ||
|
|
bc48e99650 | ||
|
|
31edf0d315 | ||
|
|
7d7d343de1 | ||
|
|
dda0feb548 | ||
|
|
b4ba76aa71 | ||
|
|
7fef7b16f7 | ||
|
|
ad1765cd3f | ||
|
|
250d30d73a | ||
|
|
f586c19951 | ||
|
|
5682aa3a7e | ||
|
|
b849fcb493 | ||
|
|
aa26e45c22 | ||
|
|
6a0fb09610 | ||
|
|
ecb14b3621 | ||
|
|
f0ad526aa9 | ||
|
|
36a14600ee | ||
|
|
d7329c7719 | ||
|
|
916b28044d | ||
|
|
053604ec28 | ||
|
|
a4368cf26e | ||
|
|
c7f5d68a39 | ||
|
|
51f8c96a57 | ||
|
|
99f324286b | ||
|
|
fc4c567395 | ||
|
|
b990b259bc | ||
|
|
2ea8083ad7 | ||
|
|
81fea5665b | ||
|
|
60257635ad | ||
|
|
2d7dd8e8ce | ||
|
|
2e9cca376c | ||
|
|
bc2f382e64 | ||
|
|
18e289e8f7 | ||
|
|
cfc6734702 | ||
|
|
1b4a91ba7e | ||
|
|
c5a5e55afe | ||
|
|
a47942d7df | ||
|
|
cddaf61c99 | ||
|
|
fddb7251fb | ||
|
|
839a00127d | ||
|
|
bd7113aa74 | ||
|
|
badc632ee4 | ||
|
|
eba6884abb | ||
|
|
046e8d5094 | ||
|
|
4abd471a5f | ||
|
|
c4d7a143eb | ||
|
|
7d5aa46eef | ||
|
|
133af33014 | ||
|
|
dc06a07c71 | ||
|
|
29a2a171d5 | ||
|
|
6c6fc08a45 | ||
|
|
f08de8cc8f | ||
|
|
4ff7959334 | ||
|
|
23d1b72ab5 | ||
|
|
243a3faf77 | ||
|
|
8db8110dd7 | ||
|
|
e0437afb2c | ||
|
|
9dc2597540 | ||
|
|
929fc579ec | ||
|
|
b0c6779015 | ||
|
|
476c7f723f | ||
|
|
1927262b62 | ||
|
|
d472f2e30c | ||
|
|
9aed6523ea | ||
|
|
dde260e723 | ||
|
|
7a6890bd7f | ||
|
|
0c0b7e5fcc | ||
|
|
d2336cfb0f | ||
|
|
c538c60ce0 | ||
|
|
470fd64ab5 | ||
|
|
aed42edb3f | ||
|
|
6d73f49cdf | ||
|
|
89fc777f0d | ||
|
|
b8743cfb50 | ||
|
|
82c45debb7 | ||
|
|
2510ed7bfe | ||
|
|
6a87b73ced | ||
|
|
087aaa5a55 | ||
|
|
ab3b47f134 | ||
|
|
931d70fc50 | ||
|
|
1fe15236a0 | ||
|
|
a6d2dac406 | ||
|
|
ceb93cafef | ||
|
|
386d41028a | ||
|
|
e877871dc6 | ||
|
|
6e891d9a3b | ||
|
|
f8f19af8c5 | ||
|
|
60447289bd | ||
|
|
af7297b6ae | ||
|
|
2ef3e8b691 | ||
|
|
45edb9cc51 | ||
|
|
05b124f624 | ||
|
|
433bee7134 | ||
|
|
5b76a69bab | ||
|
|
8419973d5e | ||
|
|
a9a63ab5d9 | ||
|
|
c5f17b4e83 | ||
|
|
b0f24dd258 | ||
|
|
d4bd51a205 | ||
|
|
42f767c191 | ||
|
|
404c3f096e | ||
|
|
7c6b85d960 | ||
|
|
9837d09756 | ||
|
|
52b3ba40a3 | ||
|
|
aff09598db | ||
|
|
5ca310384a | ||
|
|
a70735b8a5 | ||
|
|
a87bbd2840 | ||
|
|
ca220d18a6 | ||
|
|
c32c9e1dcb | ||
|
|
b79edc225b | ||
|
|
f2f31d8dc8 | ||
|
|
ed5c6a61a5 | ||
|
|
4846befd4a | ||
|
|
095a00441c | ||
|
|
7f4155c443 | ||
|
|
2d82567de7 | ||
|
|
678116c6d6 | ||
|
|
ddc2cd5e0f | ||
|
|
e99cbd9e5c | ||
|
|
0cc6f29f7f | ||
|
|
f8557c78d5 | ||
|
|
daa7c0ca21 | ||
|
|
54e04c8262 | ||
|
|
72b9aaeba1 | ||
|
|
899f01d5c4 | ||
|
|
a091e8e84d | ||
|
|
043284f51e | ||
|
|
8f620b9146 | ||
|
|
705116c109 | ||
|
|
f2bb9c91de | ||
|
|
33db5135b1 | ||
|
|
f071497c0e | ||
|
|
350c9f7def | ||
|
|
144b836935 | ||
|
|
0a41a2456c | ||
|
|
010802012f | ||
|
|
83f090826e | ||
|
|
261b44d906 | ||
|
|
c46e30d001 | ||
|
|
be6de587c5 | ||
|
|
6dd090acc1 | ||
|
|
65c9bb9fe1 | ||
|
|
afae57b52b | ||
|
|
628838442d | ||
|
|
7f6c8aa205 | ||
|
|
32436121c7 | ||
|
|
cdc7d2ba47 | ||
|
|
5dc184eed9 | ||
|
|
83550cd0d1 | ||
|
|
8c6e6edd36 | ||
|
|
959c09dbbd | ||
|
|
3dcc8657ba | ||
|
|
3037206108 | ||
|
|
27a4d979f3 | ||
|
|
f480dd35ca | ||
|
|
53f2943b2c | ||
|
|
a59ba000c0 | ||
|
|
a3b242c4b7 | ||
|
|
156dbc21d0 | ||
|
|
a6238a22dd | ||
|
|
bcb016caa9 | ||
|
|
a2ca6a685f | ||
|
|
105a1c3078 | ||
|
|
0dd251915d | ||
|
|
9cf63f87cc | ||
|
|
c7bc2eff62 | ||
|
|
0547cac0bf | ||
|
|
b81516a50f | ||
|
|
fd5c6fcfd3 | ||
|
|
f0bd2393fc | ||
|
|
96a14c23c4 | ||
|
|
56df7374f7 | ||
|
|
a8cac58355 | ||
|
|
6041d6128d | ||
|
|
f328c357da | ||
|
|
df6110b447 | ||
|
|
1affa5a144 | ||
|
|
b9ea4baaa8 | ||
|
|
9213649580 | ||
|
|
99c8ef1341 | ||
|
|
eeac4c0767 | ||
|
|
89e21be09d | ||
|
|
5298d7584d | ||
|
|
589fc53030 | ||
|
|
713926e68c | ||
|
|
687b034b06 | ||
|
|
a4649fa9cc | ||
|
|
d05f20002a | ||
|
|
3763e96ffb | ||
|
|
d1bfb419ca | ||
|
|
f5e2712172 | ||
|
|
ae7a6c5d64 | ||
|
|
99388f4418 | ||
|
|
3852ed6756 | ||
|
|
4748bbbe08 | ||
|
|
91e0ca1263 | ||
|
|
54becc85d6 | ||
|
|
52cb278166 | ||
|
|
c869b5ce27 | ||
|
|
2d1d0c3c48 | ||
|
|
afad0f0f79 | ||
|
|
2b1a49db1a | ||
|
|
d800f3fcd7 | ||
|
|
b1a99c826d | ||
|
|
63c938e2f5 | ||
|
|
bde35b46b9 | ||
|
|
dd791b9527 | ||
|
|
00930ebecb | ||
|
|
0f4b35d179 | ||
|
|
f5dd44c4cb | ||
|
|
9579e8647b | ||
|
|
1964a9cf4d | ||
|
|
231c7116ea | ||
|
|
06ee10be1b | ||
|
|
58bb1b4939 | ||
|
|
4e387a79f7 | ||
|
|
ffec1cfde3 | ||
|
|
a7a497a386 | ||
|
|
6b44bde9c9 | ||
|
|
99536b7dac | ||
|
|
276f5d14c5 | ||
|
|
1174772958 | ||
|
|
e5b6901b57 | ||
|
|
97cd0224f1 | ||
|
|
154e9cc7fc | ||
|
|
ebf6f38523 | ||
|
|
8bb44b5f9e | ||
|
|
b644467196 | ||
|
|
d15f6fee1a | ||
|
|
389d83eeea | ||
|
|
2a0601f75a | ||
|
|
f4b3880252 | ||
|
|
6b98f459d4 | ||
|
|
f4dad4c5e5 | ||
|
|
1cf9e0ec24 | ||
|
|
54aae456c4 | ||
|
|
0048fe022f | ||
|
|
548dd6c027 | ||
|
|
1ee0095bfe | ||
|
|
3b019ca518 | ||
|
|
93bac5fbd9 | ||
|
|
1427f2966a | ||
|
|
5457d34006 | ||
|
|
ed131f91ef | ||
|
|
395f5d41a1 | ||
|
|
18f99b1d37 | ||
|
|
f46eb878cc | ||
|
|
4dcd2c792b | ||
|
|
bc44c8ed84 | ||
|
|
b6c1fb5058 | ||
|
|
50769af895 | ||
|
|
53cd8a49da | ||
|
|
5812fd798f | ||
|
|
dfdf417cc1 | ||
|
|
d480b87de1 | ||
|
|
7fe92c1555 | ||
|
|
a3077ea4a5 | ||
|
|
1d349ff03f | ||
|
|
55804caeac | ||
|
|
d2717fb8df | ||
|
|
7af18cf51f | ||
|
|
48aa0533a5 | ||
|
|
59c6c3f12c | ||
|
|
500529b410 | ||
|
|
6cd392a935 | ||
|
|
55dbc9f9f5 | ||
|
|
09dacef0de | ||
|
|
6afec69642 | ||
|
|
a197dc290d | ||
|
|
d63c24851a | ||
|
|
d4d6f2c190 | ||
|
|
c3076f9441 | ||
|
|
f8ad764332 | ||
|
|
77ebd68146 | ||
|
|
a90287b9c3 | ||
|
|
57b605fcd5 | ||
|
|
e082ae220f | ||
|
|
867a51cf61 | ||
|
|
56a5ad829f | ||
|
|
2c4acb372a | ||
|
|
ca6bcf4e85 | ||
|
|
e8ebc821d5 | ||
|
|
31120d2d54 | ||
|
|
bd4d81842c | ||
|
|
6ee369b979 | ||
|
|
a2021a42df | ||
|
|
0252667656 | ||
|
|
1f8f5cbd12 | ||
|
|
3b0d4aadb1 | ||
|
|
c619f8c713 | ||
|
|
b12e13efb1 | ||
|
|
b4d2176280 | ||
|
|
e3d44c9e7b | ||
|
|
6627505ce4 | ||
|
|
b7d220744c | ||
|
|
287ced5bc4 | ||
|
|
3f2d2ef10d | ||
|
|
3cd4c9852a | ||
|
|
1680a21830 | ||
|
|
dc2e89fb52 | ||
|
|
f4dec37c23 | ||
|
|
13d6d0ff34 | ||
|
|
0d6b927acf | ||
|
|
00ad453f0a | ||
|
|
57b4efcf7d | ||
|
|
f6f432f768 | ||
|
|
d17c2ea5e5 | ||
|
|
72118e7d6d | ||
|
|
f213666dab | ||
|
|
f69c31be64 | ||
|
|
4f53502fad | ||
|
|
4b6eceb339 | ||
|
|
12dfa0d5ee | ||
|
|
786d528abb | ||
|
|
d494260ae6 | ||
|
|
57f3644269 | ||
|
|
45a51916aa | ||
|
|
538a6bf923 | ||
|
|
9fb6b9f923 | ||
|
|
33bb35cb99 | ||
|
|
5127df4e6d | ||
|
|
27e40c35d8 | ||
|
|
04bd41bf69 | ||
|
|
502bd9299b | ||
|
|
f6ca24256a | ||
|
|
ed3bdf4172 | ||
|
|
b2ab5423df | ||
|
|
150bc7e799 | ||
|
|
bfbf35106e | ||
|
|
b0e50dad83 | ||
|
|
e9958df2c8 | ||
|
|
e321e482b6 | ||
|
|
2f755eeb8e | ||
|
|
e8aa977886 | ||
|
|
a9529f2a05 | ||
|
|
8ccf02a26d | ||
|
|
1bb9d6ce1f | ||
|
|
8ae1e4a24a | ||
|
|
2fe4514678 | ||
|
|
84ed5ec814 | ||
|
|
e9ed1521b3 | ||
|
|
0be53493fe | ||
|
|
947522e752 | ||
|
|
c1f918ab8c | ||
|
|
fc7a771432 | ||
|
|
6d88b824a3 | ||
|
|
724c9a8897 | ||
|
|
da55b46240 | ||
|
|
b8bd846744 | ||
|
|
60bb719a53 | ||
|
|
a6a0cb325b | ||
|
|
a33563be9a | ||
|
|
81ad90b951 | ||
|
|
9b8d8ab3f7 | ||
|
|
16dce88a75 | ||
|
|
42353d940f | ||
|
|
3af34c0d56 | ||
|
|
d0c21ee506 | ||
|
|
c0f3fb5007 | ||
|
|
bd0e525d66 | ||
|
|
d982f819dc | ||
|
|
43593b4cd2 | ||
|
|
ea63879599 | ||
|
|
77fdd56720 | ||
|
|
50ea481661 | ||
|
|
0e72372e68 | ||
|
|
c77275a35c | ||
|
|
a720affd93 | ||
|
|
5931dbede3 | ||
|
|
da4336cc49 | ||
|
|
3cd4db0a92 | ||
|
|
dccd9dd77c | ||
|
|
b28cac7083 | ||
|
|
e8e5c8c5f4 | ||
|
|
56d1ffa136 | ||
|
|
df1c465fd9 | ||
|
|
171dad1eca | ||
|
|
9c4dcd38a8 | ||
|
|
365cdfa333 | ||
|
|
1f4c00dc41 | ||
|
|
24222679fb | ||
|
|
abb718c57f | ||
|
|
5163e124d8 | ||
|
|
8738421e75 | ||
|
|
af84a3a8d9 | ||
|
|
4e43f9798d | ||
|
|
1acbb33677 | ||
|
|
11f4f20120 | ||
|
|
9603d36a1f | ||
|
|
714097851a | ||
|
|
c7a5cb52e3 | ||
|
|
a1ea94aeca | ||
|
|
4489af6ad9 | ||
|
|
0271774773 | ||
|
|
fd45e7e47f | ||
|
|
5b9c1fc668 | ||
|
|
b81b008a93 | ||
|
|
4cd0e63029 | ||
|
|
bdcc3ef48c | ||
|
|
8663a7e4bf | ||
|
|
432477341c | ||
|
|
d12408cd83 | ||
|
|
9c5bda758d | ||
|
|
ffdea36b18 | ||
|
|
643c7276b5 | ||
|
|
b14c09dc00 | ||
|
|
7d07e995ab | ||
|
|
6e786170e8 | ||
|
|
0cd0ff0443 | ||
|
|
3a9c99972c | ||
|
|
14b1e61576 | ||
|
|
8613e35c15 | ||
|
|
1395aa40eb | ||
|
|
66e37d80d6 | ||
|
|
2b0a4c433c | ||
|
|
f2db31781e | ||
|
|
0164db2139 | ||
|
|
a3b3287327 | ||
|
|
2410de0ba9 | ||
|
|
c166f7c636 | ||
|
|
294d7ec602 | ||
|
|
b91c3c8e75 | ||
|
|
ef4dcb7d9e | ||
|
|
8cc24df902 | ||
|
|
1de98873c1 | ||
|
|
d09b1b7a87 | ||
|
|
f18207f21c | ||
|
|
bb106eba01 | ||
|
|
6efdb74027 | ||
|
|
5fc04e09eb | ||
|
|
5041651c21 | ||
|
|
e5a2d41206 | ||
|
|
9bb73f10b7 | ||
|
|
4238a3bf22 | ||
|
|
3272c30b20 | ||
|
|
5a4c5eee83 | ||
|
|
d310d651b8 | ||
|
|
707c23fa83 | ||
|
|
3bb2af2d4f | ||
|
|
f6ba447f1b | ||
|
|
174f1c7dcb | ||
|
|
14737b7e31 | ||
|
|
d8689e2bf8 | ||
|
|
137ba0789b | ||
|
|
42a63c398b | ||
|
|
aac2216aef | ||
|
|
977ac3ae98 | ||
|
|
09bfc0549e | ||
|
|
c15bb9e9b4 | ||
|
|
5d9332f23c | ||
|
|
893d6ff40e | ||
|
|
20a652e0eb | ||
|
|
21389070fb | ||
|
|
3341a3be15 | ||
|
|
1c80cb2cd5 | ||
|
|
41e0d4cddc | ||
|
|
b71dfb8047 | ||
|
|
fbdb8e2bc8 | ||
|
|
7a22802e4a | ||
|
|
c7f637348a | ||
|
|
863b0ac2f1 | ||
|
|
85ec8c6904 | ||
|
|
4c067de5ed | ||
|
|
05358de800 | ||
|
|
7585bb00ea | ||
|
|
2b27624f42 | ||
|
|
7e28939a0c | ||
|
|
2df843fe5b | ||
|
|
e56b3e46d3 | ||
|
|
fdc819e1c8 | ||
|
|
2ab306e955 | ||
|
|
38ff118276 | ||
|
|
da09f50253 | ||
|
|
1778620fe2 | ||
|
|
bf5d121a9e | ||
|
|
9c5a6e2e99 | ||
|
|
6557d640f7 | ||
|
|
68a8293179 | ||
|
|
fe190658c1 | ||
|
|
308a26b9d8 | ||
|
|
9e634c783f | ||
|
|
57d31bcb11 | ||
|
|
7f62e84096 | ||
|
|
6244bbdfe4 | ||
|
|
fb0a916c50 | ||
|
|
4da37087b6 | ||
|
|
7f8517afc5 | ||
|
|
bfd66c421c | ||
|
|
a058710fed | ||
|
|
d9313d670a | ||
|
|
c000cd5345 | ||
|
|
7b5f8dd9ac | ||
|
|
e0216233e0 | ||
|
|
4fc884bfa9 | ||
|
|
0401ae7805 | ||
|
|
852d65e02a | ||
|
|
7f66b6bc9d | ||
|
|
f554001ccb | ||
|
|
a4467cf5bf | ||
|
|
3118bbaa51 | ||
|
|
541a37b8f1 | ||
|
|
a3849c4b55 | ||
|
|
fc75fb64d2 | ||
|
|
e800349ed5 | ||
|
|
19feb9e506 | ||
|
|
0c8297d5c6 | ||
|
|
4cdab2b98b | ||
|
|
b5d88f9aa7 | ||
|
|
23cd298de0 | ||
|
|
b335add0d5 | ||
|
|
304ae4fbc8 | ||
|
|
339917c131 | ||
|
|
7b13384fd4 | ||
|
|
7ef6eae97a | ||
|
|
6d7814fe24 | ||
|
|
67721a45ee | ||
|
|
73d34ea69b | ||
|
|
351236b16a | ||
|
|
ecb39e41c5 | ||
|
|
0f403ffd34 | ||
|
|
39cdbbaeb9 | ||
|
|
226a2245ba | ||
|
|
492ff1a7c9 | ||
|
|
c3ab3e5203 | ||
|
|
79b29f7913 | ||
|
|
b95fa0664c | ||
|
|
e23cc166f7 | ||
|
|
e8558d8f77 | ||
|
|
0f834a6b0d | ||
|
|
a60022c3cc | ||
|
|
ed01db408a | ||
|
|
7b1a1be74f | ||
|
|
0485134343 | ||
|
|
0c37be302c | ||
|
|
9a3a8d1464 | ||
|
|
8657e032a7 | ||
|
|
cd5e1ff4ed | ||
|
|
7bd59f0c84 | ||
|
|
19c6a26ef5 | ||
|
|
2e35b2f20d | ||
|
|
cd1a2746ce | ||
|
|
3a68f29c0f | ||
|
|
eb3669e0a8 | ||
|
|
1a824f0162 | ||
|
|
8642588694 | ||
|
|
43643fb079 | ||
|
|
6d1d890fd0 | ||
|
|
eab19b30cf | ||
|
|
e0f1330e3a | ||
|
|
873dd41460 | ||
|
|
697fdc235d | ||
|
|
0033c7cf96 | ||
|
|
bff0a3aad4 | ||
|
|
943b034c7d | ||
|
|
cacc9bc165 | ||
|
|
bf9bdded9f | ||
|
|
3a9a8dad3f | ||
|
|
dc649bf523 | ||
|
|
ad8484533d | ||
|
|
3854d43e8a | ||
|
|
5d1bd495a5 | ||
|
|
2478cf7bc3 | ||
|
|
15588fa82a | ||
|
|
813357cff6 | ||
|
|
97ad9e726a | ||
|
|
786c16cc6f | ||
|
|
1a59b8cecb | ||
|
|
a87b6333c3 | ||
|
|
899822a581 | ||
|
|
dd503767a2 | ||
|
|
9a8b76517c | ||
|
|
655a4a159a | ||
|
|
b046ee3e22 | ||
|
|
d55b1dd2f8 | ||
|
|
638dbf178e | ||
|
|
60a888c9d7 | ||
|
|
79287d5748 | ||
|
|
1befd10a1c | ||
|
|
1108e6c221 | ||
|
|
ea1600d6c3 | ||
|
|
601850005e | ||
|
|
4f45bd8136 | ||
|
|
1a75459004 | ||
|
|
f227d35238 | ||
|
|
148a1e5d32 | ||
|
|
314cb03693 | ||
|
|
aa175000e3 | ||
|
|
a5acaf0556 | ||
|
|
614cf7774f | ||
|
|
8f54ab3e3a | ||
|
|
2c210124e8 | ||
|
|
5c125a33df | ||
|
|
9ecad0972e | ||
|
|
8b485ba44f | ||
|
|
1c68c819ca | ||
|
|
dd308db17b | ||
|
|
4f70a5dff2 | ||
|
|
28e32df318 | ||
|
|
e9de9e728d | ||
|
|
f709c7723f | ||
|
|
3c3b5bf3d8 | ||
|
|
73007b49fa | ||
|
|
f74994a735 | ||
|
|
ef3c090360 | ||
|
|
7de6800c54 | ||
|
|
b002069284 | ||
|
|
a465da8a8e | ||
|
|
868262f625 | ||
|
|
b733a8738d | ||
|
|
55ad2f3931 | ||
|
|
fe6412e1d4 | ||
|
|
0039c5234e | ||
|
|
e2a45095b2 | ||
|
|
daefa99a9d | ||
|
|
8590709480 | ||
|
|
18d3ac8df5 | ||
|
|
06df084342 | ||
|
|
3602483f6c | ||
|
|
e0a4cafd46 | ||
|
|
43c5feb3cc | ||
|
|
df0801d6d9 | ||
|
|
4820218a11 | ||
|
|
c6e1bb55f9 | ||
|
|
4f398b86bb | ||
|
|
33bfd0eed8 | ||
|
|
a4d689e7c3 | ||
|
|
239e5f52e8 | ||
|
|
3ea2d9c0ae | ||
|
|
10e1201083 | ||
|
|
b4f25408c4 | ||
|
|
70284572f5 | ||
|
|
0b3847a8b1 | ||
|
|
cc2b0c2eb1 | ||
|
|
8bd0f3da21 | ||
|
|
af20587846 | ||
|
|
11637c5244 | ||
|
|
c501641c91 | ||
|
|
43eff865ff | ||
|
|
0f05910f63 | ||
|
|
52c520d2c3 | ||
|
|
28add39a51 | ||
|
|
bd850fb357 | ||
|
|
05019f7236 | ||
|
|
363008d37f | ||
|
|
04dd8367a1 | ||
|
|
7a084b6589 | ||
|
|
5c5239097d | ||
|
|
e9fd7f4554 | ||
|
|
b8e521164b | ||
|
|
1eb600f881 | ||
|
|
5c512ae7b4 | ||
|
|
18e9a2e4d8 | ||
|
|
a6f2de922c | ||
|
|
f75dc662ee | ||
|
|
5243e4a095 | ||
|
|
888da28e91 | ||
|
|
c548381590 | ||
|
|
f5a709b268 | ||
|
|
693796a68d | ||
|
|
6e9c34baf7 | ||
|
|
6f066357ed | ||
|
|
a38715f18a | ||
|
|
a15d92dbdc | ||
|
|
3b48cb5816 | ||
|
|
e196aefcd3 | ||
|
|
8a81678bb4 | ||
|
|
44b7198bdb | ||
|
|
11322e189f | ||
|
|
dd68ba9d61 | ||
|
|
193cfba9ec | ||
|
|
54e8421953 | ||
|
|
8053ff9eb8 | ||
|
|
31d0b8d336 | ||
|
|
ff2a81e41f | ||
|
|
23cbeeedc3 | ||
|
|
1bd58e1327 | ||
|
|
e87d25de85 | ||
|
|
818f4757ca | ||
|
|
cd4f72dd31 | ||
|
|
20c467e615 | ||
|
|
3f6fb1561b | ||
|
|
b739c94143 | ||
|
|
4e91c80bcc | ||
|
|
afe05b2a58 | ||
|
|
5cb4b64383 | ||
|
|
4618dea029 | ||
|
|
4cd0208bb9 | ||
|
|
e636bc5bd0 | ||
|
|
9530c695d3 | ||
|
|
67aa89c948 | ||
|
|
19879ef279 | ||
|
|
c3096d9574 | ||
|
|
244d3d10dc | ||
|
|
1643b9dd86 | ||
|
|
7b25812e10 | ||
|
|
8378e0d062 | ||
|
|
848f9c5eed | ||
|
|
487fd27912 | ||
|
|
b877e84f4e | ||
|
|
4a414bf5e9 | ||
|
|
81015bab3c | ||
|
|
60235a4078 | ||
|
|
731d42863c | ||
|
|
f3802bff04 | ||
|
|
5bac278be4 | ||
|
|
1dc2b4d9ef | ||
|
|
ebad6c3e93 | ||
|
|
387f140ba8 | ||
|
|
43d0e272ed | ||
|
|
9ddd26d952 | ||
|
|
102d8b08c1 | ||
|
|
9d57ecd6f3 | ||
|
|
e148068edd | ||
|
|
d84c869b14 | ||
|
|
a1ce164e7b | ||
|
|
5271c9f75c | ||
|
|
091dd5af70 | ||
|
|
766a4e8e8a | ||
|
|
8d4734cb8a | ||
|
|
fb1fcc573f | ||
|
|
0847b32e87 | ||
|
|
a773e169fc | ||
|
|
77034c4749 | ||
|
|
2e9b7fdad2 | ||
|
|
b519c0814f | ||
|
|
14d486f299 | ||
|
|
8090a7be24 | ||
|
|
40019892b8 | ||
|
|
ced44973b8 | ||
|
|
e41bcffcef | ||
|
|
0e653793eb | ||
|
|
d9504b6152 | ||
|
|
264720c71a | ||
|
|
a82c701087 | ||
|
|
c92b809883 | ||
|
|
0154199161 | ||
|
|
1f46c82ff7 | ||
|
|
d0eb91fcd4 | ||
|
|
56e35df84d | ||
|
|
4445abfa05 | ||
|
|
24b7d64efc | ||
|
|
6efc327819 | ||
|
|
ff123d50f2 | ||
|
|
964269dc46 | ||
|
|
1ad897782c | ||
|
|
23764caedf | ||
|
|
2c8c76afa6 | ||
|
|
d2fa38f081 | ||
|
|
92906ea2fa | ||
|
|
46d5faf59f | ||
|
|
b913af9f88 | ||
|
|
a986c6de2d | ||
|
|
d0ffe6c611 | ||
|
|
5ddbf5fb34 | ||
|
|
2a0d87a393 | ||
|
|
8c759676d9 | ||
|
|
8837b872af | ||
|
|
1ab02b706f | ||
|
|
61f72e6775 | ||
|
|
83edc1fbc7 | ||
|
|
e5f5d887e3 | ||
|
|
a466bbca38 | ||
|
|
2c11caf87e | ||
|
|
3643c8866e | ||
|
|
90c7420c34 | ||
|
|
8f778ee90f | ||
|
|
5f075c8554 | ||
|
|
dc87f37a9b | ||
|
|
a0e2f16a3b | ||
|
|
6c0c2a00d6 | ||
|
|
03333cc4c2 | ||
|
|
2d80d6962b | ||
|
|
7ce0625047 | ||
|
|
9afbfd609a | ||
|
|
e8a9e8e28c | ||
|
|
981675f365 | ||
|
|
8ccbb56f95 | ||
|
|
40957f7686 | ||
|
|
523c745150 | ||
|
|
718ee8dfa9 | ||
|
|
716fd96d56 | ||
|
|
5edf121d96 | ||
|
|
895c3d4246 | ||
|
|
8129c2319f | ||
|
|
b5edc12b28 | ||
|
|
ba59425e6a | ||
|
|
03e15fb70f | ||
|
|
f44f291bd8 | ||
|
|
f3aa40bc0b | ||
|
|
4080dc4b65 | ||
|
|
c6481d4668 | ||
|
|
755b2f234b | ||
|
|
3c721cb97d | ||
|
|
a4ebf69bc9 | ||
|
|
a221fdf09e | ||
|
|
10d4b3f4e0 | ||
|
|
de454381ab | ||
|
|
92f2df1e89 | ||
|
|
b0087fca49 | ||
|
|
7a24e2d325 | ||
|
|
6ca6ee3fb8 | ||
|
|
32ad4aeab3 | ||
|
|
b37d4a5c7e | ||
|
|
a6ca2e2bf0 | ||
|
|
f7d318d20c | ||
|
|
267f3ab051 | ||
|
|
f56879bde0 | ||
|
|
9ce935e989 | ||
|
|
26659c0c6c | ||
|
|
66cb12316a | ||
|
|
f354817322 | ||
|
|
25cd8339f8 | ||
|
|
d024e7fa51 | ||
|
|
5a972ffb9e | ||
|
|
4127184cbd | ||
|
|
651d75715c | ||
|
|
be191015b6 | ||
|
|
98d56e6858 | ||
|
|
41f7323b20 | ||
|
|
570f6b330c | ||
|
|
09b01543e2 | ||
|
|
b0f6d057d9 | ||
|
|
777b0d3036 | ||
|
|
59d92d4c9c | ||
|
|
92267396f3 | ||
|
|
356b233780 | ||
|
|
c47d049920 | ||
|
|
037357e04c | ||
|
|
f7ce83aba4 | ||
|
|
0c78aadf1e | ||
|
|
d81570c86c | ||
|
|
12ee329895 | ||
|
|
33eda227a5 | ||
|
|
462073276c | ||
|
|
9a266db745 | ||
|
|
9045eeb1d6 | ||
|
|
53098699ef | ||
|
|
72d4575e32 | ||
|
|
9867634b5a | ||
|
|
b14f54e866 | ||
|
|
4c562c3e12 | ||
|
|
6b4e556a7a | ||
|
|
e2d9533e79 | ||
|
|
4983247918 | ||
|
|
d6891a5628 | ||
|
|
72d178f3c3 | ||
|
|
331e2bd35c | ||
|
|
44878d6103 | ||
|
|
ff46a1b009 | ||
|
|
4caa2a5322 | ||
|
|
3df98b87a3 | ||
|
|
f1ecdc4d5a | ||
|
|
dc154687a5 | ||
|
|
e7c5a398c4 | ||
|
|
eeecf26893 | ||
|
|
a2efa357fa | ||
|
|
02190c5cbd | ||
|
|
19a60bc973 | ||
|
|
5e8a27b010 | ||
|
|
5d628a2748 | ||
|
|
87e33497ce | ||
|
|
944b813f45 | ||
|
|
f93deb6749 | ||
|
|
be8c4f8efa | ||
|
|
3f6843aa53 | ||
|
|
2c2f179e1d | ||
|
|
81110da8ea | ||
|
|
024992264f | ||
|
|
27c6ac6c8d | ||
|
|
0bc2f82ec0 | ||
|
|
db0997ffd4 | ||
|
|
2122adf99f | ||
|
|
6082959d17 | ||
|
|
358d8f83f9 | ||
|
|
da5865f3b4 | ||
|
|
b0b49f44f6 | ||
|
|
6fdfe760e8 | ||
|
|
080bef214b | ||
|
|
87e1754e3a | ||
|
|
87a8bc7359 | ||
|
|
7c63914e64 | ||
|
|
ba1ddacde3 | ||
|
|
8334076047 | ||
|
|
cf0a232001 | ||
|
|
143f2f5c58 | ||
|
|
7f08f0fa79 | ||
|
|
f46ab45e0e | ||
|
|
dbc4bed40f | ||
|
|
886b1cbc54 | ||
|
|
acd3be3a2a | ||
|
|
36c1306390 | ||
|
|
6af17c6455 | ||
|
|
c0b59ece4d | ||
|
|
87c8fdf5ac | ||
|
|
15f42fb269 | ||
|
|
aee8624338 | ||
|
|
c5e0c93ab4 | ||
|
|
5b7f0c1308 | ||
|
|
3091bb0e5e | ||
|
|
9471481e33 | ||
|
|
302b73ae73 | ||
|
|
1aefb8f7ab | ||
|
|
194093d95d | ||
|
|
17be901d1a | ||
|
|
ba3c07eba8 | ||
|
|
8fe376340a | ||
|
|
cfe657faf5 | ||
|
|
062af9bcda | ||
|
|
338232b173 | ||
|
|
3949515c8a | ||
|
|
c62b6e9842 | ||
|
|
031b3517dc | ||
|
|
8e603bd5db | ||
|
|
c3cb192486 | ||
|
|
c8a08f1115 | ||
|
|
670013baa0 | ||
|
|
98bef7b7cf | ||
|
|
daf372f7ec | ||
|
|
4087b94d97 | ||
|
|
6e0c582f1e | ||
|
|
3ffaa1ef7f | ||
|
|
0cce4a0c83 | ||
|
|
ba1ba89fa3 | ||
|
|
592b3e69a5 | ||
|
|
e139f88c0a | ||
|
|
2599f31115 | ||
|
|
5217437912 | ||
|
|
2d05134cb1 | ||
|
|
ac67247636 | ||
|
|
fe9936cf5b | ||
|
|
6956ed1e1c | ||
|
|
9eee7dca52 | ||
|
|
71c5c52c54 | ||
|
|
85d90865cc | ||
|
|
e614702d18 | ||
|
|
163952b914 | ||
|
|
882af35e03 | ||
|
|
e31921ba4d | ||
|
|
1c9342fc08 | ||
|
|
48b2773c8f | ||
|
|
ecf83d03c5 | ||
|
|
3ddb32e2c7 | ||
|
|
846efb3126 | ||
|
|
f759fae55f | ||
|
|
1ffb7f213d | ||
|
|
edf5c77dd6 | ||
|
|
fb01216ae2 | ||
|
|
e9b657ae4e | ||
|
|
28c6ba10f1 | ||
|
|
d4c014da1d | ||
|
|
4e52410673 | ||
|
|
7d6fd658f4 | ||
|
|
7d12de1c4a | ||
|
|
e572feff91 | ||
|
|
0f84a57545 | ||
|
|
49166d0209 |
31
.claude/commands/dedupe.md
Normal file
31
.claude/commands/dedupe.md
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
---
|
||||||
|
allowed-tools: Bash(gh issue view:*), Bash(gh search:*), Bash(gh issue list:*), Bash(gh api:*), Bash(gh issue comment:*)
|
||||||
|
description: Find duplicate GitHub issues
|
||||||
|
---
|
||||||
|
|
||||||
|
Find up to 3 likely duplicate issues for a given GitHub issue.
|
||||||
|
|
||||||
|
To do this, follow these steps precisely:
|
||||||
|
|
||||||
|
1. Use an agent to check if the Github issue (a) is closed, (b) does not need to be deduped (eg. because it is broad product feedback without a specific solution, or positive feedback), or (c) already has a duplicates comment that you made earlier. If so, do not proceed.
|
||||||
|
2. Use an agent to view a Github issue, and ask the agent to return a summary of the issue
|
||||||
|
3. Then, launch 5 parallel agents to search Github for duplicates of this issue, using diverse keywords and search approaches, using the summary from #1
|
||||||
|
4. Next, feed the results from #1 and #2 into another agent, so that it can filter out false positives, that are likely not actually duplicates of the original issue. If there are no duplicates remaining, do not proceed.
|
||||||
|
5. Finally, comment back on the issue with a list of up to three duplicate issues (or zero, if there are no likely duplicates)
|
||||||
|
|
||||||
|
Notes (be sure to tell this to your agents, too):
|
||||||
|
|
||||||
|
- Use `gh` to interact with Github, rather than web fetch
|
||||||
|
- Do not use other tools, beyond `gh` (eg. don't use other MCP servers, file edit, etc.)
|
||||||
|
- Make a todo list first
|
||||||
|
- For your comment, follow the following format precisely (assuming for this example that you found 3 suspected duplicates):
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
Found 3 possible duplicate issues:
|
||||||
|
|
||||||
|
1. <link to issue>
|
||||||
|
2. <link to issue>
|
||||||
|
3. <link to issue>
|
||||||
|
|
||||||
|
---
|
||||||
@ -1,4 +1,20 @@
|
|||||||
{
|
{
|
||||||
"name": "jan",
|
"name": "Jan",
|
||||||
"image": "node:20"
|
"image": "mcr.microsoft.com/devcontainers/base:jammy",
|
||||||
|
"features": {
|
||||||
|
"ghcr.io/devcontainers/features/node:1": {
|
||||||
|
"version": "20"
|
||||||
|
},
|
||||||
|
"ghcr.io/devcontainers/features/rust:1": {},
|
||||||
|
"ghcr.io/devcontainers-extra/features/corepack:1": {}
|
||||||
|
},
|
||||||
|
|
||||||
|
"postCreateCommand": "./.devcontainer/postCreateCommand.sh",
|
||||||
|
|
||||||
|
// appimagekit requires fuse to package appimage, to use fuse in the container you need to enable it on the host
|
||||||
|
"runArgs": [
|
||||||
|
"--device", "/dev/fuse",
|
||||||
|
"--cap-add=SYS_ADMIN",
|
||||||
|
"--security-opt", "apparmor:unconfined"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
16
.devcontainer/postCreateCommand.sh
Executable file
16
.devcontainer/postCreateCommand.sh
Executable file
@ -0,0 +1,16 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# install tauri prerequisites + xdg-utils for xdg-open + libfuse2 for using appimagekit
|
||||||
|
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -yqq libwebkit2gtk-4.1-dev \
|
||||||
|
build-essential \
|
||||||
|
curl \
|
||||||
|
wget \
|
||||||
|
file \
|
||||||
|
libxdo-dev \
|
||||||
|
libssl-dev \
|
||||||
|
libayatana-appindicator3-dev \
|
||||||
|
librsvg2-dev \
|
||||||
|
xdg-utils \
|
||||||
|
libfuse2
|
||||||
24
.github/ISSUE_TEMPLATE/1-bug-report.md
vendored
Normal file
24
.github/ISSUE_TEMPLATE/1-bug-report.md
vendored
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
---
|
||||||
|
name: 🐛 Bug Report
|
||||||
|
about: If something isn't working as expected 🤔
|
||||||
|
title: 'bug: '
|
||||||
|
type: Bug
|
||||||
|
---
|
||||||
|
|
||||||
|
**Version:** e.g. 0.5.x-xxx
|
||||||
|
|
||||||
|
## Describe the Bug
|
||||||
|
<!-- A clear & concise description of the bug -->
|
||||||
|
|
||||||
|
|
||||||
|
## Steps to Reproduce
|
||||||
|
1.
|
||||||
|
|
||||||
|
## Screenshots / Logs
|
||||||
|
<!-- You can find logs in: Setting -> General -> Data Folder -> App Logs -->
|
||||||
|
|
||||||
|
|
||||||
|
## Operating System
|
||||||
|
- [ ] MacOS
|
||||||
|
- [ ] Windows
|
||||||
|
- [ ] Linux
|
||||||
12
.github/ISSUE_TEMPLATE/2-feature-request.md
vendored
Normal file
12
.github/ISSUE_TEMPLATE/2-feature-request.md
vendored
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
---
|
||||||
|
name: 🚀 Feature Request
|
||||||
|
about: Suggest an idea for this project 😻!
|
||||||
|
title: 'idea: '
|
||||||
|
type: Idea
|
||||||
|
---
|
||||||
|
|
||||||
|
## Problem Statement
|
||||||
|
<!-- Describe the problem you're facing -->
|
||||||
|
|
||||||
|
## Feature Idea
|
||||||
|
<!-- Describe what you want instead. Examples are welcome! -->
|
||||||
27
.github/ISSUE_TEMPLATE/3-epic.md
vendored
Normal file
27
.github/ISSUE_TEMPLATE/3-epic.md
vendored
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
---
|
||||||
|
name: 🌟 Epic
|
||||||
|
about: User stories and specs
|
||||||
|
title: 'epic: '
|
||||||
|
type: Epic
|
||||||
|
---
|
||||||
|
|
||||||
|
## User Stories
|
||||||
|
|
||||||
|
- As a [user type], I can [do something] so that [outcome]
|
||||||
|
|
||||||
|
## Not in scope
|
||||||
|
|
||||||
|
-
|
||||||
|
|
||||||
|
## User Flows & Designs
|
||||||
|
|
||||||
|
- Key user flows
|
||||||
|
- Figma link
|
||||||
|
- Edge cases
|
||||||
|
- Error states
|
||||||
|
|
||||||
|
## Engineering Decisions
|
||||||
|
|
||||||
|
- **Technical Approach:** Brief outline of the solution.
|
||||||
|
- **Key Trade-offs:** What’s been considered/rejected and why.
|
||||||
|
- **Dependencies:** APIs, services, libraries, teams.
|
||||||
24
.github/ISSUE_TEMPLATE/4-goal.md
vendored
Normal file
24
.github/ISSUE_TEMPLATE/4-goal.md
vendored
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
---
|
||||||
|
name: 🎯 Goal
|
||||||
|
about: Roadmap goals for our users
|
||||||
|
title: 'goal: '
|
||||||
|
type: Goal
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🎯 Goal
|
||||||
|
<!-- Short description of our goal -->
|
||||||
|
|
||||||
|
## 📖 Context
|
||||||
|
<!-- Give a description of our current context -->
|
||||||
|
|
||||||
|
## ✅ Scope
|
||||||
|
<!-- High lever description of what we are going to deliver -->
|
||||||
|
|
||||||
|
## ❌ Out of Scope
|
||||||
|
<!-- What we are not targeting / delivering / discussing in this goal -->
|
||||||
|
|
||||||
|
## 🛠 Deliverables
|
||||||
|
<!-- What we are the tangible deliverables for this goal -->
|
||||||
|
|
||||||
|
## ❓Open questions
|
||||||
|
<!-- What are we not sure about and need to discuss more -->
|
||||||
37
.github/ISSUE_TEMPLATE/bug_report.md
vendored
37
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -1,37 +0,0 @@
|
|||||||
---
|
|
||||||
name: "🖋️ Report"
|
|
||||||
about: Create a report to help us improve Jan
|
|
||||||
title: 'bug: [DESCRIPTION]'
|
|
||||||
labels: 'type: bug'
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Describe the bug**
|
|
||||||
A clear and concise description of what the bug is.
|
|
||||||
|
|
||||||
**Steps to reproduce**
|
|
||||||
Steps to reproduce the behavior:
|
|
||||||
1. Go to '...'
|
|
||||||
2. Click on '....'
|
|
||||||
3. Scroll down to '....'
|
|
||||||
4. See error
|
|
||||||
|
|
||||||
**Expected behavior**
|
|
||||||
A clear and concise description of what you expected to happen.
|
|
||||||
|
|
||||||
**Screenshots**
|
|
||||||
If applicable, add screenshots to help explain your issue.
|
|
||||||
|
|
||||||
**Environment details**
|
|
||||||
- Operating System: [Specify your OS. e.g., MacOS Sonoma 14.2.1, Windows 11, Ubuntu 22, etc]
|
|
||||||
- Jan Version: [e.g., 0.4.xxx nightly or manual]
|
|
||||||
- Processor: [e.g., Apple M1, Intel Core i7, AMD Ryzen 5, etc]
|
|
||||||
- RAM: [e.g., 8GB, 16GB]
|
|
||||||
- Any additional relevant hardware specifics: [e.g., Graphics card, SSD/HDD]
|
|
||||||
|
|
||||||
**Logs**
|
|
||||||
If the cause of the error is not clear, kindly provide your usage logs: https://jan.ai/docs/troubleshooting#how-to-get-error-logs
|
|
||||||
|
|
||||||
**Additional context**
|
|
||||||
Add any other context or information that could be helpful in diagnosing the problem.
|
|
||||||
82
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
82
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@ -1,82 +0,0 @@
|
|||||||
name: "\U0001F41B Bug Report"
|
|
||||||
description: "If something isn't working as expected \U0001F914"
|
|
||||||
labels: [ "type: bug" ]
|
|
||||||
title: 'bug: [DESCRIPTION]'
|
|
||||||
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: "Thanks for taking the time to fill out this bug report!"
|
|
||||||
|
|
||||||
- type: checkboxes
|
|
||||||
attributes:
|
|
||||||
label: "#"
|
|
||||||
description: "Please search [here](./?q=is%3Aissue) to see if an issue already exists for the bug you encountered"
|
|
||||||
options:
|
|
||||||
- label: "I have searched the existing issues"
|
|
||||||
required: false
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
attributes:
|
|
||||||
label: "Current behavior"
|
|
||||||
description: "A clear and concise description of what the bug is"
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
attributes:
|
|
||||||
label: "Minimum reproduction step"
|
|
||||||
description: |
|
|
||||||
Please list out steps to reproduce the behavior
|
|
||||||
placeholder: |
|
|
||||||
1. Go to '...'
|
|
||||||
2. Click on '....'
|
|
||||||
3. Scroll down to '....'
|
|
||||||
4. See error
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
attributes:
|
|
||||||
label: "Expected behavior"
|
|
||||||
description: "A clear and concise description of what you expected to happen"
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
attributes:
|
|
||||||
label: "Screenshots / Logs"
|
|
||||||
description: |
|
|
||||||
Kindly provide your screenshots / [usage logs](https://jan.ai/docs/troubleshooting#how-to-get-error-logs) that could be helpful in diagnosing the problem
|
|
||||||
**Tip:** You can attach images, recordings or log files by clicking this area to highlight it and then dragging files in
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: |
|
|
||||||
---
|
|
||||||
|
|
||||||
- type: input
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
attributes:
|
|
||||||
label: "Jan version"
|
|
||||||
description: "**Tip:** The version is located in the lower right conner of the Jan app"
|
|
||||||
placeholder: "e.g. 0.5.x-xxx nightly or stable"
|
|
||||||
|
|
||||||
- type: checkboxes
|
|
||||||
attributes:
|
|
||||||
label: "In which operating systems have you tested?"
|
|
||||||
options:
|
|
||||||
- label: macOS
|
|
||||||
- label: Windows
|
|
||||||
- label: Linux
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: "Environment details"
|
|
||||||
description: |
|
|
||||||
- Operating System: [Specify your OS details: e.g., MacOS Sonoma 14.2.1, Windows 11, Ubuntu 22, etc]
|
|
||||||
- Processor: [e.g., Apple M1, Intel Core i7, AMD Ryzen 5, etc]
|
|
||||||
- RAM: [e.g., 8GB, 16GB]
|
|
||||||
- Any additional relevant hardware specifics: [e.g., Graphics card, SSD/HDD]
|
|
||||||
8
.github/ISSUE_TEMPLATE/config.yml
vendored
8
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,7 +1,5 @@
|
|||||||
## To encourage contributors to use issue templates, we don't allow blank issues
|
|
||||||
blank_issues_enabled: true
|
blank_issues_enabled: true
|
||||||
|
|
||||||
contact_links:
|
contact_links:
|
||||||
- name: "\u2753 Our GitHub Discussions page"
|
- name: Jan Discussions
|
||||||
url: "https://github.com/orgs/janhq/discussions/categories/q-a"
|
url: https://github.com/orgs/janhq/discussions/categories/q-a
|
||||||
about: "Please ask and answer questions here!"
|
about: Get help, discuss features & roadmap, and share your projects
|
||||||
|
|||||||
17
.github/ISSUE_TEMPLATE/documentation-request.md
vendored
17
.github/ISSUE_TEMPLATE/documentation-request.md
vendored
@ -1,17 +0,0 @@
|
|||||||
---
|
|
||||||
name: "📖 Documentation request"
|
|
||||||
about: Documentation requests
|
|
||||||
title: 'docs: TITLE'
|
|
||||||
labels: 'type: documentation'
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Pages**
|
|
||||||
- Page(s) that need to be done
|
|
||||||
|
|
||||||
**Success Criteria**
|
|
||||||
Content that should be covered
|
|
||||||
|
|
||||||
**Additional context**
|
|
||||||
Examples, reference pages, resources
|
|
||||||
25
.github/ISSUE_TEMPLATE/epic-request.md
vendored
25
.github/ISSUE_TEMPLATE/epic-request.md
vendored
@ -1,25 +0,0 @@
|
|||||||
---
|
|
||||||
name: "💥 Epic request"
|
|
||||||
about: Suggest an idea for this project
|
|
||||||
title: 'epic: [DESCRIPTION]'
|
|
||||||
labels: 'type: epic'
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Motivation
|
|
||||||
-
|
|
||||||
|
|
||||||
## Specs
|
|
||||||
-
|
|
||||||
|
|
||||||
## Designs
|
|
||||||
[Figma](link)
|
|
||||||
|
|
||||||
## Tasklist
|
|
||||||
- [ ]
|
|
||||||
|
|
||||||
## Not in Scope
|
|
||||||
-
|
|
||||||
|
|
||||||
## Appendix
|
|
||||||
44
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
44
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
@ -1,44 +0,0 @@
|
|||||||
name: "\U0001F680 Feature Request"
|
|
||||||
description: "Suggest an idea for this project \U0001F63B!"
|
|
||||||
title: 'feat: [DESCRIPTION]'
|
|
||||||
labels: 'type: feature request'
|
|
||||||
body:
|
|
||||||
- type: markdown
|
|
||||||
attributes:
|
|
||||||
value: "Thanks for taking the time to fill out this form!"
|
|
||||||
|
|
||||||
- type: checkboxes
|
|
||||||
attributes:
|
|
||||||
label: "#"
|
|
||||||
description: "Please search [here](./?q=is%3Aissue) to see if an issue already exists for the feature you are requesting"
|
|
||||||
options:
|
|
||||||
- label: "I have searched the existing issues"
|
|
||||||
required: false
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
attributes:
|
|
||||||
label: "Is your feature request related to a problem? Please describe it"
|
|
||||||
description: "A clear and concise description of what the problem is"
|
|
||||||
placeholder: |
|
|
||||||
I'm always frustrated when ...
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
attributes:
|
|
||||||
label: "Describe the solution"
|
|
||||||
description: "Description of what you want to happen. Add any considered drawbacks"
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
attributes:
|
|
||||||
label: "Teachability, documentation, adoption, migration strategy"
|
|
||||||
description: "Explain how users will be able to use this and possibly write out something for the docs. Maybe a screenshot or design?"
|
|
||||||
|
|
||||||
- type: textarea
|
|
||||||
validations:
|
|
||||||
required: true
|
|
||||||
attributes:
|
|
||||||
label: "What is the motivation / use case for changing the behavior?"
|
|
||||||
description: "Describe the motivation or the concrete use case"
|
|
||||||
19
.github/dependabot.yaml
vendored
Normal file
19
.github/dependabot.yaml
vendored
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
# https://docs.github.com/en/code-security/dependabot/working-with-dependabot/dependabot-options-reference#package-ecosystem-
|
||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: 'cargo'
|
||||||
|
directory: 'src-tauri'
|
||||||
|
schedule:
|
||||||
|
interval: 'weekly'
|
||||||
|
open-pull-requests-limit: 0
|
||||||
|
- package-ecosystem: 'npm'
|
||||||
|
directories:
|
||||||
|
- '/'
|
||||||
|
- 'core'
|
||||||
|
- 'docs'
|
||||||
|
- 'extensions'
|
||||||
|
- 'extensions/*'
|
||||||
|
- 'web-app'
|
||||||
|
schedule:
|
||||||
|
interval: 'weekly'
|
||||||
|
open-pull-requests-limit: 0
|
||||||
BIN
.github/scripts/icon-beta.png
vendored
Normal file
BIN
.github/scripts/icon-beta.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 49 KiB |
BIN
.github/scripts/icon-nightly.png
vendored
Normal file
BIN
.github/scripts/icon-nightly.png
vendored
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 49 KiB |
55
.github/scripts/rename-app.sh
vendored
Normal file
55
.github/scripts/rename-app.sh
vendored
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Check if the correct number of arguments is provided
|
||||||
|
if [ "$#" -ne 2 ]; then
|
||||||
|
echo "Usage: $0 <path_to_json_input_file> <channel>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
INPUT_JSON_FILE="$1"
|
||||||
|
|
||||||
|
CHANNEL="$2"
|
||||||
|
|
||||||
|
if [ "$CHANNEL" == "nightly" ]; then
|
||||||
|
UPDATER="latest"
|
||||||
|
else
|
||||||
|
UPDATER="beta"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if the input file exists
|
||||||
|
if [ ! -f "$INPUT_JSON_FILE" ]; then
|
||||||
|
echo "Input file not found: $INPUT_JSON_FILE"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Use jq to transform the content
|
||||||
|
jq --arg channel "$CHANNEL" --arg updater "$UPDATER" '
|
||||||
|
.name = "jan-\($channel)" |
|
||||||
|
.productName = "Jan-\($channel)" |
|
||||||
|
.build.appId = "jan-\($channel).ai.app" |
|
||||||
|
.build.productName = "Jan-\($channel)" |
|
||||||
|
.build.appId = "jan-\($channel).ai.app" |
|
||||||
|
.build.protocols[0].name = "Jan-\($channel)" |
|
||||||
|
.build.protocols[0].schemes = ["jan-\($channel)"] |
|
||||||
|
.build.artifactName = "jan-\($channel)-${os}-${arch}-${version}.${ext}" |
|
||||||
|
.build.publish[0].channel = $updater
|
||||||
|
' "$INPUT_JSON_FILE" > ./package.json.tmp
|
||||||
|
|
||||||
|
cat ./package.json.tmp
|
||||||
|
|
||||||
|
rm $INPUT_JSON_FILE
|
||||||
|
mv ./package.json.tmp $INPUT_JSON_FILE
|
||||||
|
|
||||||
|
# Update the layout file
|
||||||
|
LAYOUT_FILE_PATH="web/app/layout.tsx"
|
||||||
|
|
||||||
|
if [ ! -f "$LAYOUT_FILE_PATH" ]; then
|
||||||
|
echo "File does not exist: $LAYOUT_FILE_PATH"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Perform the replacements
|
||||||
|
sed -i -e "s#Jan#Jan-$CHANNEL#g" "$LAYOUT_FILE_PATH"
|
||||||
|
|
||||||
|
# Notify completion
|
||||||
|
echo "File has been updated: $LAYOUT_FILE_PATH"
|
||||||
63
.github/scripts/rename-tauri-app.sh
vendored
Normal file
63
.github/scripts/rename-tauri-app.sh
vendored
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Check if the correct number of arguments is provided
|
||||||
|
if [ "$#" -ne 2 ]; then
|
||||||
|
echo "Usage: $0 <path_to_json_input_file> <channel>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
INPUT_JSON_FILE="$1"
|
||||||
|
|
||||||
|
CHANNEL="$2"
|
||||||
|
|
||||||
|
if [ "$CHANNEL" == "nightly" ]; then
|
||||||
|
UPDATER="latest"
|
||||||
|
else
|
||||||
|
UPDATER="beta"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if the input file exists
|
||||||
|
if [ ! -f "$INPUT_JSON_FILE" ]; then
|
||||||
|
echo "Input file not found: $INPUT_JSON_FILE"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Use jq to transform the content
|
||||||
|
jq --arg channel "$CHANNEL" --arg updater "$UPDATER" '
|
||||||
|
.productName = "Jan-\($channel)" |
|
||||||
|
.identifier = "jan-\($channel).ai.app"
|
||||||
|
' "$INPUT_JSON_FILE" > ./tauri.conf.json.tmp
|
||||||
|
|
||||||
|
cat ./tauri.conf.json.tmp
|
||||||
|
|
||||||
|
rm $INPUT_JSON_FILE
|
||||||
|
mv ./tauri.conf.json.tmp $INPUT_JSON_FILE
|
||||||
|
|
||||||
|
# Update Info.plist if it exists
|
||||||
|
INFO_PLIST_PATH="./src-tauri/Info.plist"
|
||||||
|
if [ -f "$INFO_PLIST_PATH" ]; then
|
||||||
|
echo "Updating Info.plist..."
|
||||||
|
|
||||||
|
# Replace jan.ai.app with jan-{channel}.ai.app
|
||||||
|
sed -i '' "s|jan\.ai\.app|jan-${CHANNEL}.ai.app|g" "$INFO_PLIST_PATH"
|
||||||
|
|
||||||
|
# Replace <string>jan</string> with <string>jan-{channel}</string>
|
||||||
|
sed -i '' "s|<string>jan</string>|<string>jan-${CHANNEL}</string>|g" "$INFO_PLIST_PATH"
|
||||||
|
|
||||||
|
echo "Info.plist updated"
|
||||||
|
|
||||||
|
cat ./src-tauri/Info.plist
|
||||||
|
fi
|
||||||
|
# Update the layout file
|
||||||
|
# LAYOUT_FILE_PATH="web/app/layout.tsx"
|
||||||
|
|
||||||
|
# if [ ! -f "$LAYOUT_FILE_PATH" ]; then
|
||||||
|
# echo "File does not exist: $LAYOUT_FILE_PATH"
|
||||||
|
# exit 1
|
||||||
|
# fi
|
||||||
|
|
||||||
|
# Perform the replacements
|
||||||
|
# sed -i -e "s#Jan#Jan-$CHANNEL#g" "$LAYOUT_FILE_PATH"
|
||||||
|
|
||||||
|
# Notify completion
|
||||||
|
# echo "File has been updated: $LAYOUT_FILE_PATH"
|
||||||
25
.github/scripts/rename-uninstaller.sh
vendored
Normal file
25
.github/scripts/rename-uninstaller.sh
vendored
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# File path to be modified
|
||||||
|
FILE_PATH="electron/scripts/uninstaller.nsh"
|
||||||
|
|
||||||
|
# Check if the correct number of arguments is provided
|
||||||
|
if [ "$#" -ne 1 ]; then
|
||||||
|
echo "Usage: $0 <channel>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
CHANNEL="$1"
|
||||||
|
|
||||||
|
# Check if the file exists
|
||||||
|
if [ ! -f "$FILE_PATH" ]; then
|
||||||
|
echo "File does not exist: $FILE_PATH"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Perform the replacements
|
||||||
|
sed -i -e "s#Jan#Jan-$CHANNEL#g" "$FILE_PATH"
|
||||||
|
sed -i -e "s#jan#jan-$CHANNEL#g" "$FILE_PATH"
|
||||||
|
|
||||||
|
# Notify completion
|
||||||
|
echo "File has been updated: $FILE_PATH"
|
||||||
18
.github/scripts/rename-workspace.sh
vendored
Normal file
18
.github/scripts/rename-workspace.sh
vendored
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# File path to be modified
|
||||||
|
FILE_PATH="$1"
|
||||||
|
|
||||||
|
CHANNEL="$2"
|
||||||
|
|
||||||
|
# Check if the file exists
|
||||||
|
if [ ! -f "$FILE_PATH" ]; then
|
||||||
|
echo "File does not exist: $FILE_PATH"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Perform the replacements
|
||||||
|
sed -i -e "s/yarn workspace jan/yarn workspace jan-$CHANNEL/g" "$FILE_PATH"
|
||||||
|
|
||||||
|
# Notify completion
|
||||||
|
echo "File has been updated: $FILE_PATH"
|
||||||
1
.github/workflows/auto-assign-author.yml
vendored
1
.github/workflows/auto-assign-author.yml
vendored
@ -6,6 +6,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
assign-author:
|
assign-author:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
|
||||||
permissions:
|
permissions:
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
steps:
|
steps:
|
||||||
|
|||||||
1
.github/workflows/auto-assign-milestone.yml
vendored
1
.github/workflows/auto-assign-milestone.yml
vendored
@ -7,6 +7,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
assign_milestone:
|
assign_milestone:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
if: ${{ github.event.pull_request.head.repo.full_name == github.repository }}
|
||||||
permissions:
|
permissions:
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
issues: write
|
issues: write
|
||||||
|
|||||||
@ -6,6 +6,7 @@ on:
|
|||||||
- opened
|
- opened
|
||||||
jobs:
|
jobs:
|
||||||
label_prs:
|
label_prs:
|
||||||
|
if: github.event.pull_request.head.repo.full_name == github.repository
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
pull-requests: write
|
pull-requests: write
|
||||||
|
|||||||
37
.github/workflows/autoqa-manual-trigger.yml
vendored
Normal file
37
.github/workflows/autoqa-manual-trigger.yml
vendored
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
name: Manual trigger AutoQA Test Runner
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
jan_app_url_windows:
|
||||||
|
description: 'URL to download Jan app for Windows (.exe)'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'https://delta.jan.ai/nightly/Jan-nightly_0.6.5-758_x64-setup.exe'
|
||||||
|
jan_app_url_ubuntu:
|
||||||
|
description: 'URL to download Jan app for Ubuntu (.deb)'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'https://delta.jan.ai/nightly/Jan-nightly_0.6.5-758_amd64.deb'
|
||||||
|
jan_app_url_macos:
|
||||||
|
description: 'URL to download Jan app for macOS (.dmg)'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'https://delta.jan.ai/nightly/Jan-nightly_0.6.5-758_universal.dmg'
|
||||||
|
is_nightly:
|
||||||
|
description: 'Is this a nightly build?'
|
||||||
|
required: true
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
call-autoqa-template:
|
||||||
|
uses: ./.github/workflows/autoqa-template.yml
|
||||||
|
with:
|
||||||
|
jan_app_windows_source: ${{ inputs.jan_app_url_windows }}
|
||||||
|
jan_app_ubuntu_source: ${{ inputs.jan_app_url_ubuntu }}
|
||||||
|
jan_app_macos_source: ${{ inputs.jan_app_url_macos }}
|
||||||
|
is_nightly: ${{ inputs.is_nightly }}
|
||||||
|
source_type: 'url'
|
||||||
|
secrets:
|
||||||
|
RP_TOKEN: ${{ secrets.RP_TOKEN }}
|
||||||
330
.github/workflows/autoqa-migration.yml
vendored
Normal file
330
.github/workflows/autoqa-migration.yml
vendored
Normal file
@ -0,0 +1,330 @@
|
|||||||
|
name: AutoQA Migration (Manual)
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
old_windows_installer:
|
||||||
|
description: 'Windows OLD installer URL or path (.exe)'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
new_windows_installer:
|
||||||
|
description: 'Windows NEW installer URL or path (.exe)'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
old_ubuntu_installer:
|
||||||
|
description: 'Ubuntu OLD installer URL or path (.deb)'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
new_ubuntu_installer:
|
||||||
|
description: 'Ubuntu NEW installer URL or path (.deb)'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
old_macos_installer:
|
||||||
|
description: 'macOS OLD installer URL or path (.dmg)'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
new_macos_installer:
|
||||||
|
description: 'macOS NEW installer URL or path (.dmg)'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
migration_test_case:
|
||||||
|
description: 'Specific migration test case key (leave empty to run all)'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
max_turns:
|
||||||
|
description: 'Maximum turns per test phase'
|
||||||
|
required: false
|
||||||
|
type: number
|
||||||
|
default: 65
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
migration-windows:
|
||||||
|
runs-on: windows-11-nvidia-gpu
|
||||||
|
timeout-minutes: 60
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Python 3.13
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: '3.13'
|
||||||
|
|
||||||
|
- name: Clean existing Jan installations
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
.\autoqa\scripts\windows_cleanup.ps1 -IsNightly $false
|
||||||
|
|
||||||
|
- name: Download OLD and NEW installers
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
# Download OLD installer using existing script
|
||||||
|
.\autoqa\scripts\windows_download.ps1 `
|
||||||
|
-WorkflowInputUrl "${{ inputs.old_windows_installer }}" `
|
||||||
|
-WorkflowInputIsNightly "false" `
|
||||||
|
-RepoVariableUrl "" `
|
||||||
|
-RepoVariableIsNightly "" `
|
||||||
|
-DefaultUrl "" `
|
||||||
|
-DefaultIsNightly ""
|
||||||
|
|
||||||
|
$oldSrc = Join-Path $env:TEMP 'jan-installer.exe'
|
||||||
|
$oldOut = Join-Path $env:TEMP 'jan-old.exe'
|
||||||
|
Copy-Item -Path $oldSrc -Destination $oldOut -Force
|
||||||
|
|
||||||
|
# Download NEW installer using existing script
|
||||||
|
.\autoqa\scripts\windows_download.ps1 `
|
||||||
|
-WorkflowInputUrl "${{ inputs.new_windows_installer }}" `
|
||||||
|
-WorkflowInputIsNightly "false" `
|
||||||
|
-RepoVariableUrl "" `
|
||||||
|
-RepoVariableIsNightly "" `
|
||||||
|
-DefaultUrl "" `
|
||||||
|
-DefaultIsNightly ""
|
||||||
|
|
||||||
|
$newSrc = Join-Path $env:TEMP 'jan-installer.exe'
|
||||||
|
$newOut = Join-Path $env:TEMP 'jan-new.exe'
|
||||||
|
Copy-Item -Path $newSrc -Destination $newOut -Force
|
||||||
|
|
||||||
|
Write-Host "OLD installer: $oldOut"
|
||||||
|
Write-Host "NEW installer: $newOut"
|
||||||
|
echo "OLD_VERSION=$oldOut" | Out-File -FilePath $env:GITHUB_ENV -Append
|
||||||
|
echo "NEW_VERSION=$newOut" | Out-File -FilePath $env:GITHUB_ENV -Append
|
||||||
|
|
||||||
|
- name: Install Python dependencies
|
||||||
|
working-directory: autoqa
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
|
|
||||||
|
- name: Run migration tests (Windows)
|
||||||
|
working-directory: autoqa
|
||||||
|
shell: powershell
|
||||||
|
env:
|
||||||
|
RP_TOKEN: ${{ secrets.RP_TOKEN }}
|
||||||
|
ENABLE_REPORTPORTAL: 'true'
|
||||||
|
RP_ENDPOINT: 'https://reportportal.menlo.ai'
|
||||||
|
RP_PROJECT: 'default_personal'
|
||||||
|
run: |
|
||||||
|
$case = "${{ inputs.migration_test_case }}"
|
||||||
|
$caseArg = ""
|
||||||
|
if ($case -and $case.Trim() -ne "") { $caseArg = "--migration-test-case `"$case`"" }
|
||||||
|
python main.py --enable-migration-test --old-version "$env:OLD_VERSION" --new-version "$env:NEW_VERSION" --max-turns ${{ inputs.max_turns }} $caseArg
|
||||||
|
|
||||||
|
- name: Upload screen recordings
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
name: migration-recordings-${{ github.run_number }}-windows
|
||||||
|
path: autoqa/recordings/
|
||||||
|
|
||||||
|
- name: Upload trajectories
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
name: migration-trajectories-${{ github.run_number }}-windows
|
||||||
|
path: autoqa/trajectories/
|
||||||
|
|
||||||
|
- name: Cleanup after tests
|
||||||
|
if: always()
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
.\autoqa\scripts\windows_post_cleanup.ps1 -IsNightly $false
|
||||||
|
|
||||||
|
migration-ubuntu:
|
||||||
|
if: inputs.old_ubuntu_installer != '' && inputs.new_ubuntu_installer != ''
|
||||||
|
runs-on: ubuntu-22-04-nvidia-gpu
|
||||||
|
timeout-minutes: 60
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Python 3.13
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: '3.13'
|
||||||
|
|
||||||
|
- name: Install system dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y \
|
||||||
|
x11-utils \
|
||||||
|
python3-tk \
|
||||||
|
python3-dev \
|
||||||
|
wmctrl \
|
||||||
|
xdotool \
|
||||||
|
libnss3-dev \
|
||||||
|
libgconf-2-4 \
|
||||||
|
libxss1 \
|
||||||
|
libasound2 \
|
||||||
|
libxtst6 \
|
||||||
|
libgtk-3-0 \
|
||||||
|
libgbm-dev \
|
||||||
|
libxshmfence1 \
|
||||||
|
libxrandr2 \
|
||||||
|
libpangocairo-1.0-0 \
|
||||||
|
libatk1.0-0 \
|
||||||
|
libcairo-gobject2 \
|
||||||
|
libgdk-pixbuf2.0-0 \
|
||||||
|
gnome-screenshot \
|
||||||
|
xvfb
|
||||||
|
|
||||||
|
- name: Setup script permissions
|
||||||
|
run: |
|
||||||
|
chmod +x autoqa/scripts/setup_permissions.sh || true
|
||||||
|
./autoqa/scripts/setup_permissions.sh || true
|
||||||
|
|
||||||
|
- name: Clean existing Jan installations
|
||||||
|
run: |
|
||||||
|
./autoqa/scripts/ubuntu_cleanup.sh
|
||||||
|
|
||||||
|
- name: Download OLD and NEW installers
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
# Download OLD installer using existing script
|
||||||
|
./autoqa/scripts/ubuntu_download.sh \
|
||||||
|
"${{ inputs.old_ubuntu_installer }}" \
|
||||||
|
"false" \
|
||||||
|
"" \
|
||||||
|
"" \
|
||||||
|
"" \
|
||||||
|
""
|
||||||
|
cp /tmp/jan-installer.deb /tmp/jan-old.deb
|
||||||
|
|
||||||
|
# Download NEW installer using existing script
|
||||||
|
./autoqa/scripts/ubuntu_download.sh \
|
||||||
|
"${{ inputs.new_ubuntu_installer }}" \
|
||||||
|
"false" \
|
||||||
|
"" \
|
||||||
|
"" \
|
||||||
|
"" \
|
||||||
|
""
|
||||||
|
cp /tmp/jan-installer.deb /tmp/jan-new.deb
|
||||||
|
|
||||||
|
echo "OLD_VERSION=/tmp/jan-old.deb" >> $GITHUB_ENV
|
||||||
|
echo "NEW_VERSION=/tmp/jan-new.deb" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Install Python dependencies
|
||||||
|
working-directory: autoqa
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
|
|
||||||
|
- name: Run migration tests (Ubuntu)
|
||||||
|
working-directory: autoqa
|
||||||
|
run: |
|
||||||
|
case="${{ inputs.migration_test_case }}"
|
||||||
|
caseArg=""
|
||||||
|
if [ -n "${case}" ]; then caseArg="--migration-test-case \"${case}\""; fi
|
||||||
|
xvfb-run -a python main.py --enable-migration-test --old-version "${OLD_VERSION}" --new-version "${NEW_VERSION}" --max-turns ${{ inputs.max_turns }} ${caseArg}
|
||||||
|
|
||||||
|
- name: Upload screen recordings
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
name: migration-recordings-${{ github.run_number }}-ubuntu
|
||||||
|
path: autoqa/recordings/
|
||||||
|
|
||||||
|
- name: Upload trajectories
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
name: migration-trajectories-${{ github.run_number }}-ubuntu
|
||||||
|
path: autoqa/trajectories/
|
||||||
|
|
||||||
|
- name: Cleanup after tests
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
./autoqa/scripts/ubuntu_post_cleanup.sh "false"
|
||||||
|
|
||||||
|
migration-macos:
|
||||||
|
if: inputs.old_macos_installer != '' && inputs.new_macos_installer != ''
|
||||||
|
runs-on: macos-selfhosted-15-arm64-cua
|
||||||
|
timeout-minutes: 60
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Python 3.13
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: '3.13'
|
||||||
|
|
||||||
|
- name: Setup script permissions
|
||||||
|
run: |
|
||||||
|
chmod +x autoqa/scripts/setup_permissions.sh || true
|
||||||
|
./autoqa/scripts/setup_permissions.sh || true
|
||||||
|
|
||||||
|
- name: Clean existing Jan installations
|
||||||
|
run: |
|
||||||
|
./autoqa/scripts/macos_cleanup.sh
|
||||||
|
|
||||||
|
- name: Download OLD and NEW installers
|
||||||
|
run: |
|
||||||
|
set -e
|
||||||
|
# Download OLD installer using existing script
|
||||||
|
./autoqa/scripts/macos_download.sh \
|
||||||
|
"${{ inputs.old_macos_installer }}" \
|
||||||
|
"false" \
|
||||||
|
"" \
|
||||||
|
"" \
|
||||||
|
"" \
|
||||||
|
""
|
||||||
|
cp /tmp/jan-installer.dmg /tmp/jan-old.dmg
|
||||||
|
|
||||||
|
# Download NEW installer using existing script
|
||||||
|
./autoqa/scripts/macos_download.sh \
|
||||||
|
"${{ inputs.new_macos_installer }}" \
|
||||||
|
"false" \
|
||||||
|
"" \
|
||||||
|
"" \
|
||||||
|
"" \
|
||||||
|
""
|
||||||
|
cp /tmp/jan-installer.dmg /tmp/jan-new.dmg
|
||||||
|
|
||||||
|
echo "OLD_VERSION=/tmp/jan-old.dmg" >> $GITHUB_ENV
|
||||||
|
echo "NEW_VERSION=/tmp/jan-new.dmg" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Install Python dependencies
|
||||||
|
working-directory: autoqa
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
|
|
||||||
|
- name: Run migration tests (macOS)
|
||||||
|
working-directory: autoqa
|
||||||
|
run: |
|
||||||
|
case="${{ inputs.migration_test_case }}"
|
||||||
|
caseArg=""
|
||||||
|
if [ -n "${case}" ]; then caseArg="--migration-test-case \"${case}\""; fi
|
||||||
|
python main.py --enable-migration-test --old-version "${OLD_VERSION}" --new-version "${NEW_VERSION}" --max-turns ${{ inputs.max_turns }} ${caseArg}
|
||||||
|
|
||||||
|
- name: Upload screen recordings
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
name: migration-recordings-${{ github.run_number }}-macos
|
||||||
|
path: autoqa/recordings/
|
||||||
|
|
||||||
|
- name: Upload trajectories
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
name: migration-trajectories-${{ github.run_number }}-macos
|
||||||
|
path: autoqa/trajectories/
|
||||||
|
|
||||||
|
- name: Cleanup after tests
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
./autoqa/scripts/macos_post_cleanup.sh
|
||||||
|
|
||||||
|
|
||||||
121
.github/workflows/autoqa-reliability.yml
vendored
Normal file
121
.github/workflows/autoqa-reliability.yml
vendored
Normal file
@ -0,0 +1,121 @@
|
|||||||
|
name: AutoQA Reliability (Manual)
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
source_type:
|
||||||
|
description: 'App source type (url)'
|
||||||
|
required: true
|
||||||
|
type: choice
|
||||||
|
options: [url]
|
||||||
|
default: url
|
||||||
|
jan_app_windows_source:
|
||||||
|
description: 'Windows installer URL path (used when source_type=url or to select artifact)'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'https://catalog.jan.ai/windows/Jan_0.6.8_x64-setup.exe'
|
||||||
|
jan_app_ubuntu_source:
|
||||||
|
description: 'Ubuntu .deb URL path'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'https://delta.jan.ai/nightly/Jan-nightly_0.6.4-728_amd64.deb'
|
||||||
|
jan_app_macos_source:
|
||||||
|
description: 'macOS .dmg URL path'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'https://delta.jan.ai/nightly/Jan-nightly_0.6.4-728_universal.dmg'
|
||||||
|
is_nightly:
|
||||||
|
description: 'Is the app a nightly build?'
|
||||||
|
required: true
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
reliability_phase:
|
||||||
|
description: 'Reliability phase'
|
||||||
|
required: true
|
||||||
|
type: choice
|
||||||
|
options: [development, deployment]
|
||||||
|
default: development
|
||||||
|
reliability_runs:
|
||||||
|
description: 'Custom runs (0 uses phase default)'
|
||||||
|
required: true
|
||||||
|
type: number
|
||||||
|
default: 0
|
||||||
|
reliability_test_path:
|
||||||
|
description: 'Test file path (relative to autoqa working directory)'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'tests/base/settings/app-data.txt'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
reliability-windows:
|
||||||
|
runs-on: windows-11-nvidia-gpu
|
||||||
|
timeout-minutes: 60
|
||||||
|
env:
|
||||||
|
DEFAULT_JAN_APP_URL: 'https://catalog.jan.ai/windows/Jan_0.6.8_x64-setup.exe'
|
||||||
|
DEFAULT_IS_NIGHTLY: 'false'
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Python 3.13
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: '3.13'
|
||||||
|
|
||||||
|
- name: Clean existing Jan installations
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
.\autoqa\scripts\windows_cleanup.ps1 -IsNightly "${{ inputs.is_nightly }}"
|
||||||
|
|
||||||
|
- name: Download/Prepare Jan app
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
.\autoqa\scripts\windows_download.ps1 `
|
||||||
|
-WorkflowInputUrl "${{ inputs.jan_app_windows_source }}" `
|
||||||
|
-WorkflowInputIsNightly "${{ inputs.is_nightly }}" `
|
||||||
|
-RepoVariableUrl "${{ vars.JAN_APP_URL }}" `
|
||||||
|
-RepoVariableIsNightly "${{ vars.IS_NIGHTLY }}" `
|
||||||
|
-DefaultUrl "$env:DEFAULT_JAN_APP_URL" `
|
||||||
|
-DefaultIsNightly "$env:DEFAULT_IS_NIGHTLY"
|
||||||
|
|
||||||
|
- name: Install Jan app
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
.\autoqa\scripts\windows_install.ps1 -IsNightly "$env:IS_NIGHTLY"
|
||||||
|
|
||||||
|
- name: Install Python dependencies
|
||||||
|
working-directory: autoqa
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
|
|
||||||
|
- name: Run reliability tests
|
||||||
|
working-directory: autoqa
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
$runs = "${{ inputs.reliability_runs }}"
|
||||||
|
$runsArg = ""
|
||||||
|
if ([int]$runs -gt 0) { $runsArg = "--reliability-runs $runs" }
|
||||||
|
python main.py --enable-reliability-test --reliability-phase "${{ inputs.reliability_phase }}" --reliability-test-path "${{ inputs.reliability_test_path }}" $runsArg
|
||||||
|
|
||||||
|
- name: Upload screen recordings
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
name: reliability-recordings-${{ github.run_number }}-${{ runner.os }}
|
||||||
|
path: autoqa/recordings/
|
||||||
|
|
||||||
|
- name: Upload trajectories
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
name: reliability-trajectories-${{ github.run_number }}-${{ runner.os }}
|
||||||
|
path: autoqa/trajectories/
|
||||||
|
|
||||||
|
- name: Cleanup after tests
|
||||||
|
if: always()
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
.\autoqa\scripts\windows_post_cleanup.ps1 -IsNightly "${{ inputs.is_nightly }}"
|
||||||
471
.github/workflows/autoqa-template.yml
vendored
Normal file
471
.github/workflows/autoqa-template.yml
vendored
Normal file
@ -0,0 +1,471 @@
|
|||||||
|
name: Auto QA Test Runner Template
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
jan_app_windows_source:
|
||||||
|
description: 'Windows app source - can be URL or local path'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
jan_app_ubuntu_source:
|
||||||
|
description: 'Ubuntu app source - can be URL or local path'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
jan_app_macos_source:
|
||||||
|
description: 'macOS app source - can be URL or local path'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
is_nightly:
|
||||||
|
description: 'Is this a nightly build?'
|
||||||
|
required: true
|
||||||
|
type: boolean
|
||||||
|
default: true
|
||||||
|
source_type:
|
||||||
|
description: 'Source type: url or local'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'url'
|
||||||
|
artifact_name_windows:
|
||||||
|
description: 'Windows artifact name (only needed for local)'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
artifact_name_ubuntu:
|
||||||
|
description: 'Ubuntu artifact name (only needed for local)'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
artifact_name_macos:
|
||||||
|
description: 'macOS artifact name (only needed for local)'
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
secrets:
|
||||||
|
RP_TOKEN:
|
||||||
|
description: 'ReportPortal API token'
|
||||||
|
required: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
windows:
|
||||||
|
runs-on: windows-11-nvidia-gpu
|
||||||
|
timeout-minutes: 60
|
||||||
|
|
||||||
|
env:
|
||||||
|
DEFAULT_JAN_APP_URL: 'https://catalog.jan.ai/windows/Jan-nightly_0.6.5-758_x64-setup.exe'
|
||||||
|
DEFAULT_IS_NIGHTLY: 'true'
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Python 3.13
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: '3.13'
|
||||||
|
|
||||||
|
- name: Download artifact (if source_type is local)
|
||||||
|
if: inputs.source_type == 'local'
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.artifact_name_windows }}
|
||||||
|
path: ${{ runner.temp }}/windows-artifact
|
||||||
|
|
||||||
|
- name: Clean existing Jan installations
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
.\autoqa\scripts\windows_cleanup.ps1 -IsNightly "${{ inputs.is_nightly }}"
|
||||||
|
|
||||||
|
- name: Download/Prepare Jan app
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
if ("${{ inputs.source_type }}" -eq "local") {
|
||||||
|
# Find the exe file in the artifact
|
||||||
|
$exeFile = Get-ChildItem -Path "${{ runner.temp }}/windows-artifact" -Recurse -Filter "*.exe" | Select-Object -First 1
|
||||||
|
if ($exeFile) {
|
||||||
|
Write-Host "[SUCCESS] Found local installer: $($exeFile.FullName)"
|
||||||
|
Copy-Item -Path $exeFile.FullName -Destination "$env:TEMP\jan-installer.exe" -Force
|
||||||
|
Write-Host "[SUCCESS] Installer copied to: $env:TEMP\jan-installer.exe"
|
||||||
|
# Don't set JAN_APP_PATH here - let the install script set it to the correct installed app path
|
||||||
|
echo "IS_NIGHTLY=${{ inputs.is_nightly }}" >> $env:GITHUB_ENV
|
||||||
|
} else {
|
||||||
|
Write-Error "[FAILED] No .exe file found in artifact"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
# Use the existing download script for URLs
|
||||||
|
.\autoqa\scripts\windows_download.ps1 `
|
||||||
|
-WorkflowInputUrl "${{ inputs.jan_app_windows_source }}" `
|
||||||
|
-WorkflowInputIsNightly "${{ inputs.is_nightly }}" `
|
||||||
|
-RepoVariableUrl "${{ vars.JAN_APP_URL }}" `
|
||||||
|
-RepoVariableIsNightly "${{ vars.IS_NIGHTLY }}" `
|
||||||
|
-DefaultUrl "$env:DEFAULT_JAN_APP_URL" `
|
||||||
|
-DefaultIsNightly "$env:DEFAULT_IS_NIGHTLY"
|
||||||
|
}
|
||||||
|
|
||||||
|
- name: Install Jan app
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
.\autoqa\scripts\windows_install.ps1 -IsNightly "$env:IS_NIGHTLY"
|
||||||
|
|
||||||
|
- name: Install Python dependencies
|
||||||
|
working-directory: autoqa
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
|
|
||||||
|
- name: Run Auto QA Tests
|
||||||
|
working-directory: autoqa
|
||||||
|
shell: powershell
|
||||||
|
env:
|
||||||
|
RP_TOKEN: ${{ secrets.RP_TOKEN }}
|
||||||
|
ENABLE_REPORTPORTAL: 'true'
|
||||||
|
RP_ENDPOINT: 'https://reportportal.menlo.ai'
|
||||||
|
RP_PROJECT: 'default_personal'
|
||||||
|
MAX_TURNS: '50'
|
||||||
|
DELAY_BETWEEN_TESTS: '3'
|
||||||
|
LAUNCH_NAME: 'CI AutoQA Run Windows - ${{ github.run_number }} - ${{ github.ref_name }}'
|
||||||
|
run: |
|
||||||
|
.\scripts\run_tests.ps1 -JanAppPath "$env:JAN_APP_PATH" -ProcessName "$env:JAN_PROCESS_NAME" -RpToken "$env:RP_TOKEN"
|
||||||
|
|
||||||
|
- name: Collect Jan logs for artifact upload
|
||||||
|
if: always()
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
$logDirs = @(
|
||||||
|
"$env:APPDATA\Jan-nightly\data\logs",
|
||||||
|
"$env:APPDATA\Jan\data\logs"
|
||||||
|
)
|
||||||
|
$dest = "autoqa\jan-logs"
|
||||||
|
mkdir $dest -Force | Out-Null
|
||||||
|
foreach ($dir in $logDirs) {
|
||||||
|
if (Test-Path $dir) {
|
||||||
|
Copy-Item "$dir\*.log" $dest -Force -ErrorAction SilentlyContinue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
- name: Upload screen recordings
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.is_nightly && 'jan-nightly' || 'jan' }}-recordings-${{ github.run_number }}-${{ runner.os }}
|
||||||
|
path: autoqa/recordings/
|
||||||
|
|
||||||
|
- name: Upload Jan logs
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.is_nightly && 'jan-nightly' || 'jan' }}-logs-${{ github.run_number }}-${{ runner.os }}
|
||||||
|
path: autoqa/jan-logs/
|
||||||
|
|
||||||
|
- name: Cleanup after tests
|
||||||
|
if: always()
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
.\autoqa\scripts\windows_post_cleanup.ps1 -IsNightly "${{ inputs.is_nightly }}"
|
||||||
|
|
||||||
|
ubuntu:
|
||||||
|
runs-on: ubuntu-22-04-nvidia-gpu
|
||||||
|
timeout-minutes: 60
|
||||||
|
|
||||||
|
env:
|
||||||
|
DEFAULT_JAN_APP_URL: 'https://delta.jan.ai/nightly/Jan-nightly_0.6.4-728_amd64.deb'
|
||||||
|
DEFAULT_IS_NIGHTLY: 'true'
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Python 3.13
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: '3.13'
|
||||||
|
|
||||||
|
- name: Download artifact (if source_type is local)
|
||||||
|
if: inputs.source_type == 'local'
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.artifact_name_ubuntu }}
|
||||||
|
path: ${{ runner.temp }}/ubuntu-artifact
|
||||||
|
|
||||||
|
- name: Install system dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y \
|
||||||
|
x11-utils \
|
||||||
|
python3-tk \
|
||||||
|
python3-dev \
|
||||||
|
wmctrl \
|
||||||
|
xdotool \
|
||||||
|
libnss3-dev \
|
||||||
|
libgconf-2-4 \
|
||||||
|
libxss1 \
|
||||||
|
libasound2 \
|
||||||
|
libxtst6 \
|
||||||
|
libgtk-3-0 \
|
||||||
|
libgbm-dev \
|
||||||
|
libxshmfence1 \
|
||||||
|
libxrandr2 \
|
||||||
|
libpangocairo-1.0-0 \
|
||||||
|
libatk1.0-0 \
|
||||||
|
libcairo-gobject2 \
|
||||||
|
libgdk-pixbuf2.0-0 \
|
||||||
|
gnome-screenshot
|
||||||
|
|
||||||
|
- name: Setup script permissions
|
||||||
|
run: |
|
||||||
|
chmod +x autoqa/scripts/setup_permissions.sh
|
||||||
|
./autoqa/scripts/setup_permissions.sh
|
||||||
|
|
||||||
|
- name: Clean existing Jan installations
|
||||||
|
run: |
|
||||||
|
./autoqa/scripts/ubuntu_cleanup.sh
|
||||||
|
|
||||||
|
- name: Download/Prepare Jan app
|
||||||
|
run: |
|
||||||
|
if [ "${{ inputs.source_type }}" = "local" ]; then
|
||||||
|
# Find the deb file in the artifact
|
||||||
|
DEB_FILE=$(find "${{ runner.temp }}/ubuntu-artifact" -name "*.deb" -type f | head -1)
|
||||||
|
if [ -n "$DEB_FILE" ]; then
|
||||||
|
echo "[SUCCESS] Found local installer: $DEB_FILE"
|
||||||
|
cp "$DEB_FILE" "/tmp/jan-installer.deb"
|
||||||
|
echo "[SUCCESS] Installer copied to: /tmp/jan-installer.deb"
|
||||||
|
echo "JAN_APP_PATH=/tmp/jan-installer.deb" >> $GITHUB_ENV
|
||||||
|
echo "IS_NIGHTLY=${{ inputs.is_nightly }}" >> $GITHUB_ENV
|
||||||
|
if [ "${{ inputs.is_nightly }}" = "true" ]; then
|
||||||
|
echo "JAN_PROCESS_NAME=Jan-nightly" >> $GITHUB_ENV
|
||||||
|
else
|
||||||
|
echo "JAN_PROCESS_NAME=Jan" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "[FAILED] No .deb file found in artifact"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# Use the existing download script for URLs
|
||||||
|
./autoqa/scripts/ubuntu_download.sh \
|
||||||
|
"${{ inputs.jan_app_ubuntu_source }}" \
|
||||||
|
"${{ inputs.is_nightly }}" \
|
||||||
|
"${{ vars.JAN_APP_URL_LINUX }}" \
|
||||||
|
"${{ vars.IS_NIGHTLY }}" \
|
||||||
|
"$DEFAULT_JAN_APP_URL" \
|
||||||
|
"$DEFAULT_IS_NIGHTLY"
|
||||||
|
|
||||||
|
# Set the correct environment variables for the test runner
|
||||||
|
echo "JAN_APP_PATH=/tmp/jan-installer.deb" >> $GITHUB_ENV
|
||||||
|
if [ "${{ inputs.is_nightly }}" = "true" ]; then
|
||||||
|
echo "JAN_PROCESS_NAME=Jan-nightly" >> $GITHUB_ENV
|
||||||
|
else
|
||||||
|
echo "JAN_PROCESS_NAME=Jan" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Install Jan app
|
||||||
|
run: |
|
||||||
|
./autoqa/scripts/ubuntu_install.sh "$IS_NIGHTLY"
|
||||||
|
|
||||||
|
- name: Install Python dependencies
|
||||||
|
working-directory: autoqa
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
|
|
||||||
|
- name: Run Auto QA Tests
|
||||||
|
working-directory: autoqa
|
||||||
|
env:
|
||||||
|
RP_TOKEN: ${{ secrets.RP_TOKEN }}
|
||||||
|
ENABLE_REPORTPORTAL: 'true'
|
||||||
|
RP_ENDPOINT: 'https://reportportal.menlo.ai'
|
||||||
|
RP_PROJECT: 'default_personal'
|
||||||
|
MAX_TURNS: '50'
|
||||||
|
DELAY_BETWEEN_TESTS: '3'
|
||||||
|
LAUNCH_NAME: 'CI AutoQA Run Ubuntu - ${{ github.run_number }} - ${{ github.ref_name }}'
|
||||||
|
run: |
|
||||||
|
./scripts/run_tests.sh "$JAN_APP_PATH" "$JAN_PROCESS_NAME" "$RP_TOKEN" "ubuntu"
|
||||||
|
|
||||||
|
- name: Collect Jan logs for artifact upload
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
mkdir -p autoqa/jan-logs
|
||||||
|
cp ~/.local/share/Jan-nightly/data/logs/*.log autoqa/jan-logs/ 2>/dev/null || true
|
||||||
|
cp ~/.local/share/Jan/data/logs/*.log autoqa/jan-logs/ 2>/dev/null || true
|
||||||
|
|
||||||
|
- name: Upload screen recordings
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.is_nightly && 'jan-nightly' || 'jan' }}-recordings-${{ github.run_number }}-${{ runner.os }}
|
||||||
|
path: autoqa/recordings/
|
||||||
|
|
||||||
|
- name: Upload Jan logs
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.is_nightly && 'jan-nightly' || 'jan' }}-logs-${{ github.run_number }}-${{ runner.os }}
|
||||||
|
path: autoqa/jan-logs/
|
||||||
|
|
||||||
|
- name: Cleanup after tests
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
./autoqa/scripts/ubuntu_post_cleanup.sh "$IS_NIGHTLY"
|
||||||
|
|
||||||
|
macos:
|
||||||
|
runs-on: macos-selfhosted-15-arm64-cua
|
||||||
|
timeout-minutes: 60
|
||||||
|
|
||||||
|
env:
|
||||||
|
DEFAULT_JAN_APP_URL: 'https://delta.jan.ai/nightly/Jan-nightly_0.6.4-728_universal.dmg'
|
||||||
|
DEFAULT_IS_NIGHTLY: 'true'
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Python 3.13
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: '3.13'
|
||||||
|
|
||||||
|
- name: Download artifact (if source_type is local)
|
||||||
|
if: inputs.source_type == 'local'
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.artifact_name_macos }}
|
||||||
|
path: ${{ runner.temp }}/macos-artifact
|
||||||
|
|
||||||
|
- name: Setup script permissions
|
||||||
|
run: |
|
||||||
|
chmod +x autoqa/scripts/setup_permissions.sh
|
||||||
|
./autoqa/scripts/setup_permissions.sh
|
||||||
|
|
||||||
|
- name: Clean existing Jan installations
|
||||||
|
run: |
|
||||||
|
./autoqa/scripts/macos_cleanup.sh
|
||||||
|
|
||||||
|
- name: Download/Prepare Jan app
|
||||||
|
run: |
|
||||||
|
if [ "${{ inputs.source_type }}" = "local" ]; then
|
||||||
|
# Find the dmg file in the artifact
|
||||||
|
DMG_FILE=$(find "${{ runner.temp }}/macos-artifact" -name "*.dmg" -type f | head -1)
|
||||||
|
if [ -n "$DMG_FILE" ]; then
|
||||||
|
echo "[SUCCESS] Found local installer: $DMG_FILE"
|
||||||
|
cp "$DMG_FILE" "/tmp/jan-installer.dmg"
|
||||||
|
echo "[SUCCESS] Installer copied to: /tmp/jan-installer.dmg"
|
||||||
|
echo "JAN_APP_PATH=/tmp/jan-installer.dmg" >> $GITHUB_ENV
|
||||||
|
echo "IS_NIGHTLY=${{ inputs.is_nightly }}" >> $GITHUB_ENV
|
||||||
|
if [ "${{ inputs.is_nightly }}" = "true" ]; then
|
||||||
|
echo "PROCESS_NAME=Jan-nightly" >> $GITHUB_ENV
|
||||||
|
else
|
||||||
|
echo "PROCESS_NAME=Jan" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "[FAILED] No .dmg file found in artifact"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# Use the existing download script for URLs
|
||||||
|
./autoqa/scripts/macos_download.sh \
|
||||||
|
"${{ inputs.jan_app_macos_source }}" \
|
||||||
|
"${{ inputs.is_nightly }}" \
|
||||||
|
"${{ vars.JAN_APP_URL }}" \
|
||||||
|
"${{ vars.IS_NIGHTLY }}" \
|
||||||
|
"$DEFAULT_JAN_APP_URL" \
|
||||||
|
"$DEFAULT_IS_NIGHTLY"
|
||||||
|
|
||||||
|
# Set the correct environment variables for the test runner
|
||||||
|
echo "JAN_APP_PATH=/tmp/jan-installer.dmg" >> $GITHUB_ENV
|
||||||
|
if [ "${{ inputs.is_nightly }}" = "true" ]; then
|
||||||
|
echo "PROCESS_NAME=Jan-nightly" >> $GITHUB_ENV
|
||||||
|
else
|
||||||
|
echo "PROCESS_NAME=Jan" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Install Jan app
|
||||||
|
run: |
|
||||||
|
./autoqa/scripts/macos_install.sh
|
||||||
|
|
||||||
|
- name: Install system dependencies
|
||||||
|
run: |
|
||||||
|
echo "Installing system dependencies for macOS..."
|
||||||
|
|
||||||
|
# Check if Homebrew is available
|
||||||
|
if command -v brew >/dev/null 2>&1; then
|
||||||
|
echo "Homebrew is available"
|
||||||
|
|
||||||
|
# Install python-tk if not available
|
||||||
|
python3 -c "import tkinter" 2>/dev/null || {
|
||||||
|
echo "Installing python-tk via Homebrew..."
|
||||||
|
brew install python-tk || true
|
||||||
|
}
|
||||||
|
else
|
||||||
|
echo "Homebrew not available, checking if tkinter works..."
|
||||||
|
python3 -c "import tkinter" || {
|
||||||
|
echo "[WARNING] tkinter not available and Homebrew not found"
|
||||||
|
echo "This may cause issues with mouse control"
|
||||||
|
}
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "System dependencies check completed"
|
||||||
|
|
||||||
|
- name: Install Python dependencies
|
||||||
|
run: |
|
||||||
|
cd autoqa
|
||||||
|
echo "Installing Python dependencies..."
|
||||||
|
pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
|
echo "[SUCCESS] Python dependencies installed"
|
||||||
|
|
||||||
|
- name: Setup ReportPortal environment
|
||||||
|
run: |
|
||||||
|
echo "Setting up ReportPortal environment..."
|
||||||
|
echo "RP_TOKEN=${{ secrets.RP_TOKEN }}" >> $GITHUB_ENV
|
||||||
|
echo "ReportPortal environment configured"
|
||||||
|
|
||||||
|
- name: Run E2E tests
|
||||||
|
env:
|
||||||
|
RP_TOKEN: ${{ secrets.RP_TOKEN }}
|
||||||
|
ENABLE_REPORTPORTAL: 'true'
|
||||||
|
RP_ENDPOINT: 'https://reportportal.menlo.ai'
|
||||||
|
RP_PROJECT: 'default_personal'
|
||||||
|
MAX_TURNS: '50'
|
||||||
|
DELAY_BETWEEN_TESTS: '3'
|
||||||
|
LAUNCH_NAME: 'CI AutoQA Run Macos - ${{ github.run_number }} - ${{ github.ref_name }}'
|
||||||
|
run: |
|
||||||
|
cd autoqa
|
||||||
|
echo "Starting E2E test execution..."
|
||||||
|
|
||||||
|
echo "Environment variables:"
|
||||||
|
echo "JAN_APP_PATH: $JAN_APP_PATH"
|
||||||
|
echo "PROCESS_NAME: $PROCESS_NAME"
|
||||||
|
echo "IS_NIGHTLY: $IS_NIGHTLY"
|
||||||
|
|
||||||
|
./scripts/run_tests.sh "$JAN_APP_PATH" "$PROCESS_NAME" "$RP_TOKEN" "macos"
|
||||||
|
|
||||||
|
- name: Collect Jan logs for artifact upload
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
mkdir -p autoqa/jan-logs
|
||||||
|
cp ~/Library/Application\ Support/Jan-nightly/data/logs/*.log autoqa/jan-logs/ 2>/dev/null || true
|
||||||
|
cp ~/Library/Application\ Support/Jan/data/logs/*.log autoqa/jan-logs/ 2>/dev/null || true
|
||||||
|
|
||||||
|
- name: Upload screen recordings
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.is_nightly && 'jan-nightly' || 'jan' }}-recordings-${{ github.run_number }}-${{ runner.os }}
|
||||||
|
path: autoqa/recordings/
|
||||||
|
|
||||||
|
- name: Upload Jan logs
|
||||||
|
if: always()
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ${{ inputs.is_nightly && 'jan-nightly' || 'jan' }}-logs-${{ github.run_number }}-${{ runner.os }}
|
||||||
|
path: autoqa/jan-logs/
|
||||||
|
|
||||||
|
- name: Cleanup after tests
|
||||||
|
if: always()
|
||||||
|
run: |
|
||||||
|
./autoqa/scripts/macos_post_cleanup.sh
|
||||||
31
.github/workflows/claude-issue-dedup.yml
vendored
Normal file
31
.github/workflows/claude-issue-dedup.yml
vendored
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
name: Claude Issue Dedupe
|
||||||
|
description: Automatically dedupe GitHub issues using Claude Code
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types: [opened]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
issue_number:
|
||||||
|
description: 'Issue number to process for duplicate detection'
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
claude-dedupe-issues:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 10
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
issues: write
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Run Claude Code dedupe
|
||||||
|
uses: anthropics/claude-code-base-action@beta
|
||||||
|
with:
|
||||||
|
prompt: "/dedupe ${{ github.repository }}/issues/${{ github.event.issue.number || inputs.issue_number }}"
|
||||||
|
anthropic_api_key: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||||
|
claude_env: |
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
52
.github/workflows/clean-cloudflare-page-preview-url-and-r2.yml
vendored
Normal file
52
.github/workflows/clean-cloudflare-page-preview-url-and-r2.yml
vendored
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
name: "Clean old cloudflare pages preview urls and nightly build"
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 0 * * *" # every day at 00:00
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
clean-cloudflare-pages-preview-urls:
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
project: ["nitro", "docs"]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: '3.x'
|
||||||
|
- name: install requests
|
||||||
|
run: |
|
||||||
|
python3 -m pip install requests pytz tqdm
|
||||||
|
- name: Python Inline script
|
||||||
|
uses: jannekem/run-python-script-action@v1
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
import requests
|
||||||
|
from datetime import datetime, UTC
|
||||||
|
from pytz import timezone
|
||||||
|
from tqdm import tqdm
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
endpoint = "https://api.cloudflare.com/client/v4/accounts/${{ secrets.CLOUDFLARE_ACCOUNT_ID }}/pages/projects/${{ matrix.project }}/deployments"
|
||||||
|
expiration_days = 3
|
||||||
|
headers = {
|
||||||
|
"Content-Type": "application/json;charset=UTF-8",
|
||||||
|
"Authorization": "Bearer ${{ secrets.CLOUDFLARE_API_TOKEN }}"
|
||||||
|
}
|
||||||
|
utc_tz = timezone('UTC')
|
||||||
|
|
||||||
|
# Fetch the list of deployments
|
||||||
|
response = requests.get(endpoint, headers=headers)
|
||||||
|
deployments = response.json()
|
||||||
|
|
||||||
|
for deployment in tqdm(deployments['result']):
|
||||||
|
# Calculate the age of the deployment
|
||||||
|
created_on = datetime.strptime(deployment['created_on'], "%Y-%m-%dT%H:%M:%S.%fZ").replace(tzinfo=utc_tz)
|
||||||
|
if (datetime.now(UTC) - created_on).days > expiration_days:
|
||||||
|
# Delete the deployment
|
||||||
|
delete_response = requests.delete(f"{endpoint}/{deployment['id']}", headers=headers)
|
||||||
|
if delete_response.status_code == 200:
|
||||||
|
print(f"Deleted deployment: {deployment['id']}")
|
||||||
|
else:
|
||||||
|
print(f"Failed to delete deployment: {deployment['id']}")
|
||||||
|
|
||||||
16
.github/workflows/issues.yaml
vendored
Normal file
16
.github/workflows/issues.yaml
vendored
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
name: Adds all issues to project board
|
||||||
|
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
add-to-project:
|
||||||
|
name: Add issue to project
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/add-to-project@v1.0.2
|
||||||
|
with:
|
||||||
|
project-url: https://github.com/orgs/${{ vars.ORG_NAME }}/projects/${{ vars.JAN_PROJECT_NUMBER }}
|
||||||
|
github-token: ${{ secrets.AUTO_ADD_TICKET_PAT }}
|
||||||
145
.github/workflows/jan-astro-docs.yml
vendored
Normal file
145
.github/workflows/jan-astro-docs.yml
vendored
Normal file
@ -0,0 +1,145 @@
|
|||||||
|
name: Jan Astro Docs
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- dev
|
||||||
|
paths:
|
||||||
|
- 'website/**'
|
||||||
|
- '.github/workflows/jan-astro-docs.yml'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'website/**'
|
||||||
|
- '.github/workflows/jan-astro-docs.yml'
|
||||||
|
# Review gh actions docs if you want to further define triggers, paths, etc
|
||||||
|
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#on
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
update_cloud_spec:
|
||||||
|
description: 'Update Jan Server API specification'
|
||||||
|
required: false
|
||||||
|
default: 'false'
|
||||||
|
type: choice
|
||||||
|
options:
|
||||||
|
- 'true'
|
||||||
|
- 'false'
|
||||||
|
schedule:
|
||||||
|
# Run daily at 2 AM UTC to sync with Jan Server updates
|
||||||
|
- cron: '0 2 * * *'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
name: Deploy to CloudFlare Pages
|
||||||
|
env:
|
||||||
|
CLOUDFLARE_PROJECT_NAME: astro-docs # docs.jan.ai
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
deployments: write
|
||||||
|
pull-requests: write
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
|
||||||
|
- uses: oven-sh/setup-bun@v2
|
||||||
|
|
||||||
|
- name: Install jq
|
||||||
|
uses: dcarbone/install-jq-action@v2.0.1
|
||||||
|
|
||||||
|
- name: Fill env vars
|
||||||
|
continue-on-error: true
|
||||||
|
working-directory: website
|
||||||
|
run: |
|
||||||
|
env_example_file=".env.example"
|
||||||
|
touch .env
|
||||||
|
while IFS= read -r line || [[ -n "$line" ]]; do
|
||||||
|
if [[ "$line" == *"="* ]]; then
|
||||||
|
var_name=$(echo $line | cut -d '=' -f 1)
|
||||||
|
echo $var_name
|
||||||
|
var_value="$(jq -r --arg key "$var_name" '.[$key]' <<< "$SECRETS")"
|
||||||
|
echo "$var_name=$var_value" >> .env
|
||||||
|
fi
|
||||||
|
done < "$env_example_file"
|
||||||
|
env:
|
||||||
|
SECRETS: '${{ toJson(secrets) }}'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
working-directory: website
|
||||||
|
run: bun install
|
||||||
|
|
||||||
|
- name: Update Jan Server API Spec (Scheduled/Manual)
|
||||||
|
if: github.event_name == 'schedule' || (github.event_name == 'workflow_dispatch' && github.event.inputs.update_cloud_spec == 'true')
|
||||||
|
working-directory: website
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
echo "📡 Updating Jan Server API specification..."
|
||||||
|
bun run generate:cloud-spec
|
||||||
|
|
||||||
|
# Check if the spec file was updated
|
||||||
|
if git diff --quiet public/openapi/cloud-openapi.json; then
|
||||||
|
echo "✅ No changes to API specification"
|
||||||
|
else
|
||||||
|
echo "📝 API specification updated"
|
||||||
|
# Commit the changes if this is a scheduled run on main branch
|
||||||
|
if [ "${{ github.event_name }}" = "schedule" ] && [ "${{ github.ref }}" = "refs/heads/dev" ]; then
|
||||||
|
git config --local user.email "github-actions[bot]@users.noreply.github.com"
|
||||||
|
git config --local user.name "github-actions[bot]"
|
||||||
|
git add public/openapi/cloud-openapi.json
|
||||||
|
git commit -m "chore: update Jan Server API specification [skip ci]"
|
||||||
|
git push
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
env:
|
||||||
|
JAN_SERVER_SPEC_URL: ${{ secrets.JAN_SERVER_SPEC_URL || 'https://api.jan.ai/api/swagger/doc.json' }}
|
||||||
|
JAN_SERVER_PROD_URL: ${{ secrets.JAN_SERVER_PROD_URL || 'https://api.jan.ai/v1' }}
|
||||||
|
- name: Build website
|
||||||
|
working-directory: website
|
||||||
|
run: |
|
||||||
|
# For PR and regular pushes, skip cloud spec generation in prebuild
|
||||||
|
# It will use the existing committed spec or fallback
|
||||||
|
if [ "${{ github.event_name }}" = "pull_request" ] || [ "${{ github.event_name }}" = "push" ]; then
|
||||||
|
echo "Using existing cloud spec for build"
|
||||||
|
export SKIP_CLOUD_SPEC_UPDATE=true
|
||||||
|
fi
|
||||||
|
bun run build
|
||||||
|
env:
|
||||||
|
SKIP_CLOUD_SPEC_UPDATE: ${{ github.event_name == 'pull_request' || github.event_name == 'push' }}
|
||||||
|
|
||||||
|
- name: copy redirects and headers
|
||||||
|
continue-on-error: true
|
||||||
|
working-directory: website
|
||||||
|
run: |
|
||||||
|
cp _redirects dist/_redirects
|
||||||
|
cp _headers dist/_headers
|
||||||
|
|
||||||
|
- name: Publish to Cloudflare Pages PR Preview and Staging
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
uses: cloudflare/pages-action@v1
|
||||||
|
with:
|
||||||
|
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||||
|
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||||
|
projectName: ${{ env.CLOUDFLARE_PROJECT_NAME }}
|
||||||
|
directory: ./website/dist
|
||||||
|
# Optional: Enable this if you want to have GitHub Deployments triggered
|
||||||
|
gitHubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
id: deployCloudflarePages
|
||||||
|
|
||||||
|
- uses: mshick/add-pr-comment@v2
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
with:
|
||||||
|
message: |
|
||||||
|
Preview URL Astro Docs: ${{ steps.deployCloudflarePages.outputs.url }}
|
||||||
|
|
||||||
|
- name: Publish to Cloudflare Pages Production
|
||||||
|
if: (github.event_name == 'push' && github.ref == 'refs/heads/dev') || (github.event_name == 'workflow_dispatch' && github.ref == 'refs/heads/dev')
|
||||||
|
uses: cloudflare/pages-action@v1
|
||||||
|
with:
|
||||||
|
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||||
|
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||||
|
projectName: ${{ env.CLOUDFLARE_PROJECT_NAME }}
|
||||||
|
directory: ./website/dist
|
||||||
|
branch: main
|
||||||
|
# Optional: Enable this if you want to have GitHub Deployments triggered
|
||||||
|
gitHubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||||
91
.github/workflows/jan-docs.yml
vendored
Normal file
91
.github/workflows/jan-docs.yml
vendored
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
name: Jan Docs
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- dev
|
||||||
|
paths:
|
||||||
|
- 'docs/**'
|
||||||
|
- '.github/workflows/jan-docs.yml'
|
||||||
|
pull_request:
|
||||||
|
paths:
|
||||||
|
- 'docs/**'
|
||||||
|
- '.github/workflows/jan-docs.yml'
|
||||||
|
# Review gh actions docs if you want to further define triggers, paths, etc
|
||||||
|
# https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#on
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
name: Deploy to CloudFlare Pages
|
||||||
|
env:
|
||||||
|
CLOUDFLARE_PROJECT_NAME: docs
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
deployments: write
|
||||||
|
pull-requests: write
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
|
||||||
|
- name: Install jq
|
||||||
|
uses: dcarbone/install-jq-action@v2.0.1
|
||||||
|
|
||||||
|
- name: Fill env vars
|
||||||
|
working-directory: docs
|
||||||
|
run: |
|
||||||
|
env_example_file=".env.example"
|
||||||
|
touch .env
|
||||||
|
while IFS= read -r line || [[ -n "$line" ]]; do
|
||||||
|
if [[ "$line" == *"="* ]]; then
|
||||||
|
var_name=$(echo $line | cut -d '=' -f 1)
|
||||||
|
echo $var_name
|
||||||
|
var_value="$(jq -r --arg key "$var_name" '.[$key]' <<< "$SECRETS")"
|
||||||
|
echo "$var_name=$var_value" >> .env
|
||||||
|
fi
|
||||||
|
done < "$env_example_file"
|
||||||
|
env:
|
||||||
|
SECRETS: '${{ toJson(secrets) }}'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
working-directory: docs
|
||||||
|
run: yarn install
|
||||||
|
- name: Clean output directory
|
||||||
|
working-directory: docs
|
||||||
|
run: rm -rf out/* .next/*
|
||||||
|
- name: Build website
|
||||||
|
working-directory: docs
|
||||||
|
run: export NODE_ENV=production && yarn build && cp _redirects out/_redirects && cp _headers out/_headers
|
||||||
|
|
||||||
|
- name: Publish to Cloudflare Pages PR Preview and Staging
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
uses: cloudflare/pages-action@v1
|
||||||
|
with:
|
||||||
|
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||||
|
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||||
|
projectName: ${{ env.CLOUDFLARE_PROJECT_NAME }}
|
||||||
|
directory: ./docs/out
|
||||||
|
# Optional: Enable this if you want to have GitHub Deployments triggered
|
||||||
|
gitHubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
id: deployCloudflarePages
|
||||||
|
|
||||||
|
- uses: mshick/add-pr-comment@v2
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
with:
|
||||||
|
message: |
|
||||||
|
Preview URL: ${{ steps.deployCloudflarePages.outputs.url }}
|
||||||
|
|
||||||
|
- name: Publish to Cloudflare Pages Production
|
||||||
|
if: (github.event_name == 'push' && github.ref == 'refs/heads/dev') || (github.event_name == 'workflow_dispatch' && github.ref == 'refs/heads/dev') || (github.event_name == 'workflow_dispatch' && startsWith(github.ref, 'refs/heads/release/'))
|
||||||
|
uses: cloudflare/pages-action@v1
|
||||||
|
with:
|
||||||
|
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||||
|
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||||
|
projectName: ${{ env.CLOUDFLARE_PROJECT_NAME }}
|
||||||
|
directory: ./docs/out
|
||||||
|
branch: main
|
||||||
|
# Optional: Enable this if you want to have GitHub Deployments triggered
|
||||||
|
gitHubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||||
156
.github/workflows/jan-electron-build-nightly.yml
vendored
156
.github/workflows/jan-electron-build-nightly.yml
vendored
@ -1,156 +0,0 @@
|
|||||||
name: Electron Builder - Nightly / Manual
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: '0 20 * * 1,2,3' # At 8 PM UTC on Monday, Tuesday, and Wednesday which is 3 AM UTC+7 Tuesday, Wednesday, and Thursday
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
public_provider:
|
|
||||||
type: choice
|
|
||||||
description: 'Public Provider'
|
|
||||||
options:
|
|
||||||
- none
|
|
||||||
- aws-s3
|
|
||||||
default: none
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
set-public-provider:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
outputs:
|
|
||||||
public_provider: ${{ steps.set-public-provider.outputs.public_provider }}
|
|
||||||
ref: ${{ steps.set-public-provider.outputs.ref }}
|
|
||||||
steps:
|
|
||||||
- name: Set public provider
|
|
||||||
id: set-public-provider
|
|
||||||
run: |
|
|
||||||
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
|
|
||||||
echo "::set-output name=public_provider::${{ github.event.inputs.public_provider }}"
|
|
||||||
echo "::set-output name=ref::${{ github.ref }}"
|
|
||||||
else
|
|
||||||
if [ "${{ github.event_name }}" == "schedule" ]; then
|
|
||||||
echo "::set-output name=public_provider::aws-s3"
|
|
||||||
echo "::set-output name=ref::refs/heads/dev"
|
|
||||||
elif [ "${{ github.event_name }}" == "push" ]; then
|
|
||||||
echo "::set-output name=public_provider::aws-s3"
|
|
||||||
echo "::set-output name=ref::${{ github.ref }}"
|
|
||||||
else
|
|
||||||
echo "::set-output name=public_provider::none"
|
|
||||||
echo "::set-output name=ref::${{ github.ref }}"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
# Job create Update app version based on latest release tag with build number and save to output
|
|
||||||
get-update-version:
|
|
||||||
uses: ./.github/workflows/template-get-update-version.yml
|
|
||||||
|
|
||||||
build-macos-x64:
|
|
||||||
uses: ./.github/workflows/template-build-macos-x64.yml
|
|
||||||
needs: [get-update-version, set-public-provider]
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
ref: ${{ needs.set-public-provider.outputs.ref }}
|
|
||||||
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
|
||||||
|
|
||||||
build-macos-arm64:
|
|
||||||
uses: ./.github/workflows/template-build-macos-arm64.yml
|
|
||||||
needs: [get-update-version, set-public-provider]
|
|
||||||
secrets: inherit
|
|
||||||
with:
|
|
||||||
ref: ${{ needs.set-public-provider.outputs.ref }}
|
|
||||||
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
|
||||||
|
|
||||||
build-windows-x64:
|
|
||||||
uses: ./.github/workflows/template-build-windows-x64.yml
|
|
||||||
secrets: inherit
|
|
||||||
needs: [get-update-version, set-public-provider]
|
|
||||||
with:
|
|
||||||
ref: ${{ needs.set-public-provider.outputs.ref }}
|
|
||||||
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
|
||||||
|
|
||||||
|
|
||||||
build-linux-x64:
|
|
||||||
uses: ./.github/workflows/template-build-linux-x64.yml
|
|
||||||
secrets: inherit
|
|
||||||
needs: [get-update-version, set-public-provider]
|
|
||||||
with:
|
|
||||||
ref: ${{ needs.set-public-provider.outputs.ref }}
|
|
||||||
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
|
||||||
|
|
||||||
combine-latest-mac-yml:
|
|
||||||
needs: [set-public-provider, build-macos-x64, build-macos-arm64]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- name: Getting the repo
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
ref: ${{ needs.set-public-provider.outputs.ref }}
|
|
||||||
- name: Download mac-x64 artifacts
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
name: latest-mac-x64
|
|
||||||
path: ./latest-mac-x64
|
|
||||||
- name: Download mac-arm artifacts
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
name: latest-mac-arm64
|
|
||||||
path: ./latest-mac-arm64
|
|
||||||
|
|
||||||
- name: 'Merge latest-mac.yml'
|
|
||||||
# unfortunately electron-builder doesn't understand that we have two different releases for mac-x64 and mac-arm, so we need to manually merge the latest files
|
|
||||||
# see https://github.com/electron-userland/electron-builder/issues/5592
|
|
||||||
run: |
|
|
||||||
ls -la .
|
|
||||||
ls -la ./latest-mac-x64
|
|
||||||
ls -la ./latest-mac-arm64
|
|
||||||
ls -la ./electron
|
|
||||||
cp ./electron/merge-latest-ymls.js /tmp/merge-latest-ymls.js
|
|
||||||
npm install js-yaml --prefix /tmp
|
|
||||||
node /tmp/merge-latest-ymls.js ./latest-mac-x64/latest-mac.yml ./latest-mac-arm64/latest-mac.yml ./latest-mac.yml
|
|
||||||
cat ./latest-mac.yml
|
|
||||||
|
|
||||||
- name: Upload latest-mac.yml
|
|
||||||
if: ${{ needs.set-public-provider.outputs.public_provider == 'aws-s3' }}
|
|
||||||
run: |
|
|
||||||
aws s3 cp ./latest-mac.yml "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/latest/latest-mac.yml"
|
|
||||||
env:
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
|
||||||
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
|
|
||||||
AWS_EC2_METADATA_DISABLED: "true"
|
|
||||||
|
|
||||||
|
|
||||||
noti-discord-nightly-and-update-url-readme:
|
|
||||||
needs: [build-macos-x64, build-macos-arm64, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, combine-latest-mac-yml]
|
|
||||||
secrets: inherit
|
|
||||||
if: github.event_name == 'schedule'
|
|
||||||
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
|
||||||
with:
|
|
||||||
ref: refs/heads/dev
|
|
||||||
build_reason: Nightly
|
|
||||||
push_to_branch: dev
|
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
|
||||||
|
|
||||||
noti-discord-pre-release-and-update-url-readme:
|
|
||||||
needs: [build-macos-x64, build-macos-arm64, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, combine-latest-mac-yml]
|
|
||||||
secrets: inherit
|
|
||||||
if: github.event_name == 'push'
|
|
||||||
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
|
||||||
with:
|
|
||||||
ref: refs/heads/dev
|
|
||||||
build_reason: Pre-release
|
|
||||||
push_to_branch: dev
|
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
|
||||||
|
|
||||||
noti-discord-manual-and-update-url-readme:
|
|
||||||
needs: [build-macos-x64, build-macos-arm64, build-windows-x64, build-linux-x64, get-update-version, set-public-provider, combine-latest-mac-yml]
|
|
||||||
secrets: inherit
|
|
||||||
if: github.event_name == 'workflow_dispatch' && github.event.inputs.public_provider == 'aws-s3'
|
|
||||||
uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
|
||||||
with:
|
|
||||||
ref: refs/heads/dev
|
|
||||||
build_reason: Manual
|
|
||||||
push_to_branch: dev
|
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
|
||||||
138
.github/workflows/jan-electron-build.yml
vendored
138
.github/workflows/jan-electron-build.yml
vendored
@ -1,138 +0,0 @@
|
|||||||
name: Electron Builder - Tag
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
tags: ["v[0-9]+.[0-9]+.[0-9]+"]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
# Job create Update app version based on latest release tag with build number and save to output
|
|
||||||
get-update-version:
|
|
||||||
uses: ./.github/workflows/template-get-update-version.yml
|
|
||||||
|
|
||||||
create-draft-release:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
|
||||||
outputs:
|
|
||||||
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
|
||||||
version: ${{ steps.get_version.outputs.version }}
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
steps:
|
|
||||||
- name: Extract tag name without v prefix
|
|
||||||
id: get_version
|
|
||||||
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV && echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
|
|
||||||
env:
|
|
||||||
GITHUB_REF: ${{ github.ref }}
|
|
||||||
- name: Create Draft Release
|
|
||||||
id: create_release
|
|
||||||
uses: softprops/action-gh-release@v2
|
|
||||||
with:
|
|
||||||
tag_name: ${{ github.ref_name }}
|
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
name: "${{ env.VERSION }}"
|
|
||||||
draft: true
|
|
||||||
prerelease: false
|
|
||||||
|
|
||||||
build-macos-x64:
|
|
||||||
uses: ./.github/workflows/template-build-macos-x64.yml
|
|
||||||
secrets: inherit
|
|
||||||
needs: [get-update-version]
|
|
||||||
with:
|
|
||||||
ref: ${{ github.ref }}
|
|
||||||
public_provider: github
|
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
|
||||||
|
|
||||||
build-macos-arm64:
|
|
||||||
uses: ./.github/workflows/template-build-macos-arm64.yml
|
|
||||||
secrets: inherit
|
|
||||||
needs: [get-update-version]
|
|
||||||
with:
|
|
||||||
ref: ${{ github.ref }}
|
|
||||||
public_provider: github
|
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
|
||||||
|
|
||||||
build-windows-x64:
|
|
||||||
uses: ./.github/workflows/template-build-windows-x64.yml
|
|
||||||
secrets: inherit
|
|
||||||
needs: [get-update-version]
|
|
||||||
with:
|
|
||||||
ref: ${{ github.ref }}
|
|
||||||
public_provider: github
|
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
|
||||||
|
|
||||||
build-linux-x64:
|
|
||||||
uses: ./.github/workflows/template-build-linux-x64.yml
|
|
||||||
secrets: inherit
|
|
||||||
needs: [get-update-version]
|
|
||||||
with:
|
|
||||||
ref: ${{ github.ref }}
|
|
||||||
public_provider: github
|
|
||||||
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
|
||||||
|
|
||||||
combine-latest-mac-yml:
|
|
||||||
needs: [build-macos-x64, build-macos-arm64, create-draft-release]
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
steps:
|
|
||||||
- name: Getting the repo
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
|
|
||||||
- name: Download mac-x64 artifacts
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
name: latest-mac-x64
|
|
||||||
path: ./latest-mac-x64
|
|
||||||
- name: Download mac-arm artifacts
|
|
||||||
uses: actions/download-artifact@v3
|
|
||||||
with:
|
|
||||||
name: latest-mac-arm64
|
|
||||||
path: ./latest-mac-arm64
|
|
||||||
|
|
||||||
- name: 'Merge latest-mac.yml'
|
|
||||||
# unfortunately electron-builder doesn't understand that we have two different releases for mac-x64 and mac-arm, so we need to manually merge the latest files
|
|
||||||
# see https://github.com/electron-userland/electron-builder/issues/5592
|
|
||||||
run: |
|
|
||||||
ls -la .
|
|
||||||
ls -la ./latest-mac-x64
|
|
||||||
ls -la ./latest-mac-arm64
|
|
||||||
ls -la ./electron
|
|
||||||
cp ./electron/merge-latest-ymls.js /tmp/merge-latest-ymls.js
|
|
||||||
npm install js-yaml --prefix /tmp
|
|
||||||
node /tmp/merge-latest-ymls.js ./latest-mac-x64/latest-mac.yml ./latest-mac-arm64/latest-mac.yml ./latest-mac.yml
|
|
||||||
cat ./latest-mac.yml
|
|
||||||
|
|
||||||
- name: Yet Another Upload Release Asset Action
|
|
||||||
uses: shogo82148/actions-upload-release-asset@v1.7.2
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
with:
|
|
||||||
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
|
||||||
asset_path: ./latest-mac.yml
|
|
||||||
asset_name: latest-mac.yml
|
|
||||||
asset_content_type: text/yaml
|
|
||||||
overwrite: true
|
|
||||||
|
|
||||||
update_release_draft:
|
|
||||||
needs: [build-macos-x64, build-macos-arm64, build-windows-x64, build-linux-x64, combine-latest-mac-yml]
|
|
||||||
permissions:
|
|
||||||
# write permission is required to create a github release
|
|
||||||
contents: write
|
|
||||||
# write permission is required for autolabeler
|
|
||||||
# otherwise, read permission is required at least
|
|
||||||
pull-requests: write
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
# (Optional) GitHub Enterprise requires GHE_HOST variable set
|
|
||||||
#- name: Set GHE_HOST
|
|
||||||
# run: |
|
|
||||||
# echo "GHE_HOST=${GITHUB_SERVER_URL##https:\/\/}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
# Drafts your next Release notes as Pull Requests are merged into "master"
|
|
||||||
- uses: release-drafter/release-drafter@v5
|
|
||||||
# (Optional) specify config name to use, relative to .github/. Default: release-drafter.yml
|
|
||||||
# with:
|
|
||||||
# config-name: my-config.yml
|
|
||||||
# disable-autolabeler: true
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
321
.github/workflows/jan-electron-linter-and-test.yml
vendored
321
.github/workflows/jan-electron-linter-and-test.yml
vendored
@ -1,321 +0,0 @@
|
|||||||
name: Test - Linter & Playwright
|
|
||||||
on:
|
|
||||||
workflow_dispatch:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
- dev
|
|
||||||
paths:
|
|
||||||
- 'electron/**'
|
|
||||||
- .github/workflows/jan-electron-linter-and-test.yml
|
|
||||||
- 'web/**'
|
|
||||||
- 'joi/**'
|
|
||||||
- 'package.json'
|
|
||||||
- 'node_modules/**'
|
|
||||||
- 'yarn.lock'
|
|
||||||
- 'core/**'
|
|
||||||
- 'extensions/**'
|
|
||||||
- '!README.md'
|
|
||||||
- 'Makefile'
|
|
||||||
|
|
||||||
pull_request:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
- dev
|
|
||||||
- release/**
|
|
||||||
paths:
|
|
||||||
- 'electron/**'
|
|
||||||
- .github/workflows/jan-electron-linter-and-test.yml
|
|
||||||
- 'web/**'
|
|
||||||
- 'joi/**'
|
|
||||||
- 'package.json'
|
|
||||||
- 'node_modules/**'
|
|
||||||
- 'yarn.lock'
|
|
||||||
- 'Makefile'
|
|
||||||
- 'extensions/**'
|
|
||||||
- 'core/**'
|
|
||||||
- '!README.md'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
test-on-macos:
|
|
||||||
if: (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository) || github.event_name == 'push' || github.event_name == 'workflow_dispatch'
|
|
||||||
runs-on: [self-hosted, macOS, macos-desktop]
|
|
||||||
steps:
|
|
||||||
- name: Getting the repo
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Installing node
|
|
||||||
uses: actions/setup-node@v3
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
|
|
||||||
- name: 'Cleanup cache'
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
rm -rf ~/jan
|
|
||||||
make clean
|
|
||||||
|
|
||||||
- name: Get Commit Message for PR
|
|
||||||
if: github.event_name == 'pull_request'
|
|
||||||
run: |
|
|
||||||
echo "REPORT_PORTAL_DESCRIPTION=${{github.event.after}})" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Get Commit Message for push event
|
|
||||||
if: github.event_name == 'push'
|
|
||||||
run: |
|
|
||||||
echo "REPORT_PORTAL_DESCRIPTION=${{github.sha}})" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: 'Config report portal'
|
|
||||||
run: |
|
|
||||||
make update-playwright-config REPORT_PORTAL_URL=${{ secrets.REPORT_PORTAL_URL }} REPORT_PORTAL_API_KEY=${{ secrets.REPORT_PORTAL_API_KEY }} REPORT_PORTAL_PROJECT_NAME=${{ secrets.REPORT_PORTAL_PROJECT_NAME }} REPORT_PORTAL_LAUNCH_NAME="Jan App macos" REPORT_PORTAL_DESCRIPTION="${{env.REPORT_PORTAL_DESCRIPTION}}"
|
|
||||||
|
|
||||||
- name: Linter and test
|
|
||||||
run: |
|
|
||||||
npm config set registry ${{ secrets.NPM_PROXY }} --global
|
|
||||||
yarn config set registry ${{ secrets.NPM_PROXY }} --global
|
|
||||||
make test
|
|
||||||
env:
|
|
||||||
CSC_IDENTITY_AUTO_DISCOVERY: 'false'
|
|
||||||
TURBO_API: '${{ secrets.TURBO_API }}'
|
|
||||||
TURBO_TEAM: 'macos'
|
|
||||||
TURBO_TOKEN: '${{ secrets.TURBO_TOKEN }}'
|
|
||||||
|
|
||||||
test-on-macos-pr-target:
|
|
||||||
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository
|
|
||||||
runs-on: [self-hosted, macOS, macos-desktop]
|
|
||||||
steps:
|
|
||||||
- name: Getting the repo
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Installing node
|
|
||||||
uses: actions/setup-node@v3
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
|
|
||||||
- name: 'Cleanup cache'
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
rm -rf ~/jan
|
|
||||||
make clean
|
|
||||||
|
|
||||||
- name: Linter and test
|
|
||||||
run: |
|
|
||||||
npm config set registry https://registry.npmjs.org --global
|
|
||||||
yarn config set registry https://registry.npmjs.org --global
|
|
||||||
make test
|
|
||||||
env:
|
|
||||||
CSC_IDENTITY_AUTO_DISCOVERY: 'false'
|
|
||||||
|
|
||||||
test-on-windows:
|
|
||||||
if: github.event_name == 'push'
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
antivirus-tools: ['mcafee', 'default-windows-security', 'bit-defender']
|
|
||||||
runs-on: windows-desktop-${{ matrix.antivirus-tools }}
|
|
||||||
steps:
|
|
||||||
- name: Getting the repo
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Installing node
|
|
||||||
uses: actions/setup-node@v3
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
|
|
||||||
# Clean cache, continue on error
|
|
||||||
- name: 'Cleanup cache'
|
|
||||||
shell: powershell
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
$path = "$Env:APPDATA\jan"
|
|
||||||
if (Test-Path $path) {
|
|
||||||
Remove-Item "\\?\$path" -Recurse -Force
|
|
||||||
} else {
|
|
||||||
Write-Output "Folder does not exist."
|
|
||||||
}
|
|
||||||
make clean
|
|
||||||
|
|
||||||
- name: Get Commit Message for push event
|
|
||||||
if: github.event_name == 'push'
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo "REPORT_PORTAL_DESCRIPTION=${{github.sha}}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: 'Config report portal'
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
make update-playwright-config REPORT_PORTAL_URL=${{ secrets.REPORT_PORTAL_URL }} REPORT_PORTAL_API_KEY=${{ secrets.REPORT_PORTAL_API_KEY }} REPORT_PORTAL_PROJECT_NAME=${{ secrets.REPORT_PORTAL_PROJECT_NAME }} REPORT_PORTAL_LAUNCH_NAME="Jan App Windows ${{ matrix.antivirus-tools }}" REPORT_PORTAL_DESCRIPTION="${{env.REPORT_PORTAL_DESCRIPTION}}"
|
|
||||||
|
|
||||||
- name: Linter and test
|
|
||||||
shell: powershell
|
|
||||||
run: |
|
|
||||||
npm config set registry ${{ secrets.NPM_PROXY }} --global
|
|
||||||
yarn config set registry ${{ secrets.NPM_PROXY }} --global
|
|
||||||
make test
|
|
||||||
env:
|
|
||||||
TURBO_API: '${{ secrets.TURBO_API }}'
|
|
||||||
TURBO_TEAM: 'windows'
|
|
||||||
TURBO_TOKEN: '${{ secrets.TURBO_TOKEN }}'
|
|
||||||
test-on-windows-pr:
|
|
||||||
if: (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository)
|
|
||||||
runs-on: windows-desktop-default-windows-security
|
|
||||||
steps:
|
|
||||||
- name: Getting the repo
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Installing node
|
|
||||||
uses: actions/setup-node@v1
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
|
|
||||||
# Clean cache, continue on error
|
|
||||||
- name: 'Cleanup cache'
|
|
||||||
shell: powershell
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
$path = "$Env:APPDATA\jan"
|
|
||||||
if (Test-Path $path) {
|
|
||||||
Remove-Item "\\?\$path" -Recurse -Force
|
|
||||||
} else {
|
|
||||||
Write-Output "Folder does not exist."
|
|
||||||
}
|
|
||||||
make clean
|
|
||||||
|
|
||||||
- name: Get Commit Message for PR
|
|
||||||
if: github.event_name == 'pull_request'
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo "REPORT_PORTAL_DESCRIPTION=${{github.event.after}}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: 'Config report portal'
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
make update-playwright-config REPORT_PORTAL_URL=${{ secrets.REPORT_PORTAL_URL }} REPORT_PORTAL_API_KEY=${{ secrets.REPORT_PORTAL_API_KEY }} REPORT_PORTAL_PROJECT_NAME=${{ secrets.REPORT_PORTAL_PROJECT_NAME }} REPORT_PORTAL_LAUNCH_NAME="Jan App Windows" REPORT_PORTAL_DESCRIPTION="${{env.REPORT_PORTAL_DESCRIPTION}}"
|
|
||||||
|
|
||||||
- name: Linter and test
|
|
||||||
shell: powershell
|
|
||||||
run: |
|
|
||||||
npm config set registry ${{ secrets.NPM_PROXY }} --global
|
|
||||||
yarn config set registry ${{ secrets.NPM_PROXY }} --global
|
|
||||||
make test
|
|
||||||
env:
|
|
||||||
TURBO_API: '${{ secrets.TURBO_API }}'
|
|
||||||
TURBO_TEAM: 'windows'
|
|
||||||
TURBO_TOKEN: '${{ secrets.TURBO_TOKEN }}'
|
|
||||||
|
|
||||||
test-on-windows-pr-target:
|
|
||||||
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository
|
|
||||||
runs-on: windows-desktop-default-windows-security
|
|
||||||
steps:
|
|
||||||
- name: Getting the repo
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Installing node
|
|
||||||
uses: actions/setup-node@v1
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
|
|
||||||
# Clean cache, continue on error
|
|
||||||
- name: 'Cleanup cache'
|
|
||||||
shell: powershell
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
$path = "$Env:APPDATA\jan"
|
|
||||||
if (Test-Path $path) {
|
|
||||||
Remove-Item "\\?\$path" -Recurse -Force
|
|
||||||
} else {
|
|
||||||
Write-Output "Folder does not exist."
|
|
||||||
}
|
|
||||||
make clean
|
|
||||||
|
|
||||||
- name: Linter and test
|
|
||||||
shell: powershell
|
|
||||||
run: |
|
|
||||||
npm config set registry https://registry.npmjs.org --global
|
|
||||||
yarn config set registry https://registry.npmjs.org --global
|
|
||||||
make test
|
|
||||||
|
|
||||||
test-on-ubuntu:
|
|
||||||
runs-on: [self-hosted, Linux, ubuntu-desktop]
|
|
||||||
if: (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository) || github.event_name == 'push' || github.event_name == 'workflow_dispatch'
|
|
||||||
steps:
|
|
||||||
- name: Getting the repo
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Installing node
|
|
||||||
uses: actions/setup-node@v3
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
|
|
||||||
- name: 'Cleanup cache'
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
rm -rf ~/jan
|
|
||||||
make clean
|
|
||||||
|
|
||||||
- name: Get Commit Message for PR
|
|
||||||
if: github.event_name == 'pull_request'
|
|
||||||
run: |
|
|
||||||
echo "REPORT_PORTAL_DESCRIPTION=${{github.event.after}}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: Get Commit Message for push event
|
|
||||||
if: github.event_name == 'push'
|
|
||||||
run: |
|
|
||||||
echo "REPORT_PORTAL_DESCRIPTION=${{github.sha}}" >> $GITHUB_ENV
|
|
||||||
|
|
||||||
- name: 'Config report portal'
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
make update-playwright-config REPORT_PORTAL_URL=${{ secrets.REPORT_PORTAL_URL }} REPORT_PORTAL_API_KEY=${{ secrets.REPORT_PORTAL_API_KEY }} REPORT_PORTAL_PROJECT_NAME=${{ secrets.REPORT_PORTAL_PROJECT_NAME }} REPORT_PORTAL_LAUNCH_NAME="Jan App Linux" REPORT_PORTAL_DESCRIPTION="${{env.REPORT_PORTAL_DESCRIPTION}}"
|
|
||||||
|
|
||||||
- name: Linter and test
|
|
||||||
run: |
|
|
||||||
export DISPLAY=$(w -h | awk 'NR==1 {print $2}')
|
|
||||||
echo -e "Display ID: $DISPLAY"
|
|
||||||
npm config set registry ${{ secrets.NPM_PROXY }} --global
|
|
||||||
yarn config set registry ${{ secrets.NPM_PROXY }} --global
|
|
||||||
make test
|
|
||||||
env:
|
|
||||||
TURBO_API: '${{ secrets.TURBO_API }}'
|
|
||||||
TURBO_TEAM: 'linux'
|
|
||||||
TURBO_TOKEN: '${{ secrets.TURBO_TOKEN }}'
|
|
||||||
|
|
||||||
test-on-ubuntu-pr-target:
|
|
||||||
runs-on: [self-hosted, Linux, ubuntu-desktop]
|
|
||||||
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository
|
|
||||||
steps:
|
|
||||||
- name: Getting the repo
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Installing node
|
|
||||||
uses: actions/setup-node@v3
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
|
|
||||||
- name: 'Cleanup cache'
|
|
||||||
continue-on-error: true
|
|
||||||
run: |
|
|
||||||
rm -rf ~/jan
|
|
||||||
make clean
|
|
||||||
|
|
||||||
- name: Linter and test
|
|
||||||
run: |
|
|
||||||
export DISPLAY=$(w -h | awk 'NR==1 {print $2}')
|
|
||||||
echo -e "Display ID: $DISPLAY"
|
|
||||||
npm config set registry https://registry.npmjs.org --global
|
|
||||||
yarn config set registry https://registry.npmjs.org --global
|
|
||||||
make test
|
|
||||||
257
.github/workflows/jan-linter-and-test.yml
vendored
Normal file
257
.github/workflows/jan-linter-and-test.yml
vendored
Normal file
@ -0,0 +1,257 @@
|
|||||||
|
name: Linter & Test
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- dev
|
||||||
|
paths:
|
||||||
|
- .github/workflows/jan-linter-and-test.yml
|
||||||
|
- 'web/**'
|
||||||
|
- 'joi/**'
|
||||||
|
- 'package.json'
|
||||||
|
- 'node_modules/**'
|
||||||
|
- 'yarn.lock'
|
||||||
|
- 'core/**'
|
||||||
|
- 'extensions/**'
|
||||||
|
- '!README.md'
|
||||||
|
- 'Makefile'
|
||||||
|
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- dev
|
||||||
|
- release/**
|
||||||
|
paths:
|
||||||
|
- .github/workflows/jan-linter-and-test.yml
|
||||||
|
- 'web/**'
|
||||||
|
- 'joi/**'
|
||||||
|
- 'package.json'
|
||||||
|
- 'node_modules/**'
|
||||||
|
- 'yarn.lock'
|
||||||
|
- 'Makefile'
|
||||||
|
- 'extensions/**'
|
||||||
|
- 'core/**'
|
||||||
|
- 'src-tauri/**'
|
||||||
|
- 'web-app/**'
|
||||||
|
- '!README.md'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
base_branch_cov:
|
||||||
|
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
continue-on-error: true
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: ${{ github.base_ref }}
|
||||||
|
- name: Use Node.js 20.x
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
|
||||||
|
- name: 'Cleanup cache'
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
rm -rf ~/jan
|
||||||
|
make clean
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
make lint
|
||||||
|
|
||||||
|
- name: Run test coverage
|
||||||
|
run: |
|
||||||
|
yarn test:coverage
|
||||||
|
|
||||||
|
- name: Upload code coverage for ref branch
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ref-lcov.info
|
||||||
|
path: coverage/lcov.info
|
||||||
|
|
||||||
|
test-on-macos:
|
||||||
|
runs-on: ${{ (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository) && 'macos-latest' || 'macos-selfhosted-15-arm64' }}
|
||||||
|
if: github.event_name == 'pull_request' || github.event_name == 'push' || github.event_name == 'workflow_dispatch'
|
||||||
|
steps:
|
||||||
|
- name: Getting the repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Installing node
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
|
||||||
|
- name: 'Cleanup cache'
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
rm -rf ~/jan
|
||||||
|
make clean
|
||||||
|
|
||||||
|
- name: Linter and test
|
||||||
|
run: |
|
||||||
|
make test
|
||||||
|
env:
|
||||||
|
CSC_IDENTITY_AUTO_DISCOVERY: 'false'
|
||||||
|
|
||||||
|
test-on-windows:
|
||||||
|
if: github.event_name == 'push'
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
antivirus-tools: ['mcafee', 'default-windows-security', 'bit-defender']
|
||||||
|
runs-on: windows-desktop-${{ matrix.antivirus-tools }}
|
||||||
|
steps:
|
||||||
|
- name: Getting the repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Installing node
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
|
||||||
|
# Clean cache, continue on error
|
||||||
|
- name: 'Cleanup cache'
|
||||||
|
shell: powershell
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
$path = "$Env:APPDATA\jan"
|
||||||
|
if (Test-Path $path) {
|
||||||
|
Remove-Item "\\?\$path" -Recurse -Force
|
||||||
|
} else {
|
||||||
|
Write-Output "Folder does not exist."
|
||||||
|
}
|
||||||
|
make clean
|
||||||
|
|
||||||
|
- name: Linter and test
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
make test
|
||||||
|
|
||||||
|
test-on-windows-pr:
|
||||||
|
if: github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch'
|
||||||
|
runs-on: 'windows-latest'
|
||||||
|
steps:
|
||||||
|
- name: Getting the repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: install dependencies
|
||||||
|
run: |
|
||||||
|
choco install --yes --no-progress make
|
||||||
|
|
||||||
|
- name: Installing node
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
|
||||||
|
- name: 'Cleanup cache'
|
||||||
|
shell: powershell
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
$path = "$Env:APPDATA\jan"
|
||||||
|
if (Test-Path $path) {
|
||||||
|
Remove-Item "\\?\$path" -Recurse -Force
|
||||||
|
} else {
|
||||||
|
Write-Output "Folder does not exist."
|
||||||
|
}
|
||||||
|
make clean
|
||||||
|
|
||||||
|
- name: Install WebView2 Runtime (Bootstrapper)
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
Invoke-WebRequest -Uri 'https://go.microsoft.com/fwlink/p/?LinkId=2124703' -OutFile 'setup.exe'
|
||||||
|
Start-Process -FilePath setup.exe -Verb RunAs -Wait
|
||||||
|
|
||||||
|
- name: Linter and test
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
make test
|
||||||
|
env:
|
||||||
|
NODE_OPTIONS: '--max-old-space-size=2048'
|
||||||
|
|
||||||
|
test-on-ubuntu:
|
||||||
|
runs-on: ${{ (github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name != github.repository) && 'ubuntu-latest' || 'ubuntu-latest' }}
|
||||||
|
if: github.event_name == 'pull_request' || github.event_name == 'push' || github.event_name == 'workflow_dispatch'
|
||||||
|
steps:
|
||||||
|
- name: Getting the repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Installing node
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
|
||||||
|
- name: Install Tauri dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y libglib2.0-dev libatk1.0-dev libpango1.0-dev libgtk-3-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev librsvg2-dev libfuse2 webkit2gtk-driver
|
||||||
|
|
||||||
|
- name: 'Cleanup cache'
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
rm -rf ~/jan
|
||||||
|
make clean
|
||||||
|
|
||||||
|
- name: Linter and test
|
||||||
|
run: |
|
||||||
|
export DISPLAY=$(w -h | awk 'NR==1 {print $2}')
|
||||||
|
echo -e "Display ID: $DISPLAY"
|
||||||
|
make test
|
||||||
|
|
||||||
|
- uses: actions/upload-artifact@v4
|
||||||
|
if: always()
|
||||||
|
with:
|
||||||
|
name: playwright-report
|
||||||
|
path: electron/playwright-report/
|
||||||
|
retention-days: 2
|
||||||
|
|
||||||
|
coverage-check:
|
||||||
|
if: github.event_name == 'pull_request' && github.event.pull_request.head.repo.full_name == github.repository
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: base_branch_cov
|
||||||
|
continue-on-error: true
|
||||||
|
steps:
|
||||||
|
- name: Getting the repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Installing node
|
||||||
|
uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
- name: 'Cleanup cache'
|
||||||
|
continue-on-error: true
|
||||||
|
run: |
|
||||||
|
rm -rf ~/jan
|
||||||
|
make clean
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
make lint
|
||||||
|
|
||||||
|
- name: Run test coverage
|
||||||
|
run: |
|
||||||
|
yarn test:coverage
|
||||||
|
|
||||||
|
- name: Download code coverage report from base branch
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
name: ref-lcov.info
|
||||||
|
- name: Generate Code Coverage report
|
||||||
|
id: code-coverage
|
||||||
|
uses: barecheck/code-coverage-action@v1
|
||||||
|
with:
|
||||||
|
github-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
lcov-file: './coverage/lcov.info'
|
||||||
|
base-lcov-file: './lcov.info'
|
||||||
|
send-summary-comment: true
|
||||||
|
show-annotations: 'warning'
|
||||||
60
.github/workflows/jan-server-web-ci-dev.yml
vendored
Normal file
60
.github/workflows/jan-server-web-ci-dev.yml
vendored
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
name: Jan Web Server build image and push to Harbor Registry
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- dev-web
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- dev-web
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-preview:
|
||||||
|
runs-on: [ubuntu-24-04-docker]
|
||||||
|
env:
|
||||||
|
MENLO_PLATFORM_BASE_URL: "https://api-dev.jan.ai/v1"
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout source repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Login to Harbor Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: registry.menlo.ai
|
||||||
|
username: ${{ secrets.HARBOR_USERNAME }}
|
||||||
|
password: ${{ secrets.HARBOR_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
(type -p wget >/dev/null || (sudo apt update && sudo apt install wget -y)) \
|
||||||
|
&& sudo mkdir -p -m 755 /etc/apt/keyrings \
|
||||||
|
&& out=$(mktemp) && wget -nv -O$out https://cli.github.com/packages/githubcli-archive-keyring.gpg \
|
||||||
|
&& cat $out | sudo tee /etc/apt/keyrings/githubcli-archive-keyring.gpg > /dev/null \
|
||||||
|
&& sudo chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \
|
||||||
|
&& sudo mkdir -p -m 755 /etc/apt/sources.list.d \
|
||||||
|
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
|
||||||
|
&& sudo apt update
|
||||||
|
sudo apt-get install -y jq gettext
|
||||||
|
|
||||||
|
- name: Set image tag
|
||||||
|
id: vars
|
||||||
|
run: |
|
||||||
|
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||||
|
IMAGE_TAG="web:preview-${{ github.sha }}"
|
||||||
|
else
|
||||||
|
IMAGE_TAG="web:dev-${{ github.sha }}"
|
||||||
|
fi
|
||||||
|
echo "IMAGE_TAG=${IMAGE_TAG}" >> $GITHUB_OUTPUT
|
||||||
|
echo "FULL_IMAGE=registry.menlo.ai/jan-server/${IMAGE_TAG}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Build docker image
|
||||||
|
run: |
|
||||||
|
docker build --build-arg MENLO_PLATFORM_BASE_URL=${{ env.MENLO_PLATFORM_BASE_URL }} -t ${{ steps.vars.outputs.FULL_IMAGE }} .
|
||||||
|
|
||||||
|
- name: Push docker image
|
||||||
|
if: github.event_name == 'push'
|
||||||
|
run: |
|
||||||
|
docker push ${{ steps.vars.outputs.FULL_IMAGE }}
|
||||||
58
.github/workflows/jan-server-web-ci-prod.yml
vendored
Normal file
58
.github/workflows/jan-server-web-ci-prod.yml
vendored
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
name: Jan Web Server deploy to production
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- prod-web
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
deployments: write
|
||||||
|
pull-requests: write
|
||||||
|
env:
|
||||||
|
MENLO_PLATFORM_BASE_URL: "https://api.jan.ai/v1"
|
||||||
|
GA_MEASUREMENT_ID: "G-YK53MX8M8M"
|
||||||
|
CLOUDFLARE_PROJECT_NAME: "jan-server-web"
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
|
||||||
|
- name: Install jq
|
||||||
|
uses: dcarbone/install-jq-action@v2.0.1
|
||||||
|
|
||||||
|
# - name: Fill env vars
|
||||||
|
# run: |
|
||||||
|
# env_example_file=".env.example"
|
||||||
|
# touch .env
|
||||||
|
# while IFS= read -r line || [[ -n "$line" ]]; do
|
||||||
|
# if [[ "$line" == *"="* ]]; then
|
||||||
|
# var_name=$(echo $line | cut -d '=' -f 1)
|
||||||
|
# echo $var_name
|
||||||
|
# var_value="$(jq -r --arg key "$var_name" '.[$key]' <<< "$SECRETS")"
|
||||||
|
# echo "$var_name=$var_value" >> .env
|
||||||
|
# fi
|
||||||
|
# done < "$env_example_file"
|
||||||
|
# env:
|
||||||
|
# SECRETS: '${{ toJson(secrets) }}'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: make config-yarn && yarn install && yarn build:core && make build-web-app
|
||||||
|
env:
|
||||||
|
MENLO_PLATFORM_BASE_URL: ${{ env.MENLO_PLATFORM_BASE_URL }}
|
||||||
|
GA_MEASUREMENT_ID: ${{ env.GA_MEASUREMENT_ID }}
|
||||||
|
|
||||||
|
- name: Publish to Cloudflare Pages Production
|
||||||
|
uses: cloudflare/pages-action@v1
|
||||||
|
with:
|
||||||
|
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||||
|
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||||
|
projectName: ${{ env.CLOUDFLARE_PROJECT_NAME }}
|
||||||
|
directory: ./web-app/dist-web
|
||||||
|
branch: main
|
||||||
|
# Optional: Enable this if you want to have GitHub Deployments triggered
|
||||||
|
gitHubToken: ${{ secrets.GITHUB_TOKEN }}
|
||||||
60
.github/workflows/jan-server-web-ci-stag.yml
vendored
Normal file
60
.github/workflows/jan-server-web-ci-stag.yml
vendored
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
name: Jan Web Server build image and push to Harbor Registry
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- stag-web
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- stag-web
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-and-preview:
|
||||||
|
runs-on: [ubuntu-24-04-docker]
|
||||||
|
env:
|
||||||
|
MENLO_PLATFORM_BASE_URL: "https://api-stag.jan.ai/v1"
|
||||||
|
permissions:
|
||||||
|
pull-requests: write
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Checkout source repo
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Login to Harbor Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: registry.menlo.ai
|
||||||
|
username: ${{ secrets.HARBOR_USERNAME }}
|
||||||
|
password: ${{ secrets.HARBOR_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
(type -p wget >/dev/null || (sudo apt update && sudo apt install wget -y)) \
|
||||||
|
&& sudo mkdir -p -m 755 /etc/apt/keyrings \
|
||||||
|
&& out=$(mktemp) && wget -nv -O$out https://cli.github.com/packages/githubcli-archive-keyring.gpg \
|
||||||
|
&& cat $out | sudo tee /etc/apt/keyrings/githubcli-archive-keyring.gpg > /dev/null \
|
||||||
|
&& sudo chmod go+r /etc/apt/keyrings/githubcli-archive-keyring.gpg \
|
||||||
|
&& sudo mkdir -p -m 755 /etc/apt/sources.list.d \
|
||||||
|
&& echo "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null \
|
||||||
|
&& sudo apt update
|
||||||
|
sudo apt-get install -y jq gettext
|
||||||
|
|
||||||
|
- name: Set image tag
|
||||||
|
id: vars
|
||||||
|
run: |
|
||||||
|
if [[ "${{ github.event_name }}" == "pull_request" ]]; then
|
||||||
|
IMAGE_TAG="web:preview-${{ github.sha }}"
|
||||||
|
else
|
||||||
|
IMAGE_TAG="web:stag-${{ github.sha }}"
|
||||||
|
fi
|
||||||
|
echo "IMAGE_TAG=${IMAGE_TAG}" >> $GITHUB_OUTPUT
|
||||||
|
echo "FULL_IMAGE=registry.menlo.ai/jan-server/${IMAGE_TAG}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Build docker image
|
||||||
|
run: |
|
||||||
|
docker build --build-arg MENLO_PLATFORM_BASE_URL=${{ env.MENLO_PLATFORM_BASE_URL }} -t ${{ steps.vars.outputs.FULL_IMAGE }} .
|
||||||
|
|
||||||
|
- name: Push docker image
|
||||||
|
if: github.event_name == 'push'
|
||||||
|
run: |
|
||||||
|
docker push ${{ steps.vars.outputs.FULL_IMAGE }}
|
||||||
156
.github/workflows/jan-tauri-build-beta.yml
vendored
Normal file
156
.github/workflows/jan-tauri-build-beta.yml
vendored
Normal file
@ -0,0 +1,156 @@
|
|||||||
|
name: Tauri Builder - Beta Build
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags: ["v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+-beta"]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Job create Update app version based on latest release tag with build number and save to output
|
||||||
|
get-update-version:
|
||||||
|
uses: ./.github/workflows/template-get-update-version.yml
|
||||||
|
create-draft-release:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||||
|
outputs:
|
||||||
|
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||||
|
version: ${{ steps.get_version.outputs.version }}
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Extract tag name without v prefix
|
||||||
|
id: get_version
|
||||||
|
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV && echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
|
||||||
|
env:
|
||||||
|
GITHUB_REF: ${{ github.ref }}
|
||||||
|
- name: Create Draft Release
|
||||||
|
id: create_release
|
||||||
|
uses: softprops/action-gh-release@v2
|
||||||
|
with:
|
||||||
|
tag_name: ${{ github.ref_name }}
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
name: "${{ env.VERSION }}"
|
||||||
|
draft: true
|
||||||
|
prerelease: false
|
||||||
|
generate_release_notes: true
|
||||||
|
|
||||||
|
build-macos:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-macos.yml
|
||||||
|
secrets: inherit
|
||||||
|
needs: [get-update-version, create-draft-release]
|
||||||
|
with:
|
||||||
|
ref: ${{ github.ref }}
|
||||||
|
public_provider: github
|
||||||
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
channel: beta
|
||||||
|
cortex_api_port: "39271"
|
||||||
|
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||||
|
|
||||||
|
build-windows-x64:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
|
||||||
|
secrets: inherit
|
||||||
|
needs: [get-update-version, create-draft-release]
|
||||||
|
with:
|
||||||
|
ref: ${{ github.ref }}
|
||||||
|
public_provider: github
|
||||||
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
channel: beta
|
||||||
|
cortex_api_port: "39271"
|
||||||
|
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||||
|
|
||||||
|
build-linux-x64:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
|
||||||
|
secrets: inherit
|
||||||
|
needs: [get-update-version, create-draft-release]
|
||||||
|
with:
|
||||||
|
ref: ${{ github.ref }}
|
||||||
|
public_provider: github
|
||||||
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
channel: beta
|
||||||
|
cortex_api_port: "39271"
|
||||||
|
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||||
|
|
||||||
|
sync-temp-to-latest:
|
||||||
|
needs: [create-draft-release, get-update-version, build-macos, build-windows-x64, build-linux-x64]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Getting the repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: create latest.json file
|
||||||
|
run: |
|
||||||
|
|
||||||
|
VERSION=${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
|
||||||
|
LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}"
|
||||||
|
LINUX_URL="https://delta.jan.ai/beta/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
|
||||||
|
WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}"
|
||||||
|
WINDOWS_URL="https://delta.jan.ai/beta/${{ needs.build-windows-x64.outputs.FILE_NAME }}"
|
||||||
|
DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}"
|
||||||
|
DARWIN_URL="https://delta.jan.ai/beta/Jan-beta_${{ needs.get-update-version.outputs.new_version }}.app.tar.gz"
|
||||||
|
|
||||||
|
jq --arg version "$VERSION" \
|
||||||
|
--arg pub_date "$PUB_DATE" \
|
||||||
|
--arg linux_signature "$LINUX_SIGNATURE" \
|
||||||
|
--arg linux_url "$LINUX_URL" \
|
||||||
|
--arg windows_signature "$WINDOWS_SIGNATURE" \
|
||||||
|
--arg windows_url "$WINDOWS_URL" \
|
||||||
|
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
|
||||||
|
--arg darwin_arm_url "$DARWIN_URL" \
|
||||||
|
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
|
||||||
|
--arg darwin_amd_url "$DARWIN_URL" \
|
||||||
|
'.version = $version
|
||||||
|
| .pub_date = $pub_date
|
||||||
|
| .platforms["linux-x86_64"].signature = $linux_signature
|
||||||
|
| .platforms["linux-x86_64"].url = $linux_url
|
||||||
|
| .platforms["windows-x86_64"].signature = $windows_signature
|
||||||
|
| .platforms["windows-x86_64"].url = $windows_url
|
||||||
|
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
|
||||||
|
| .platforms["darwin-aarch64"].url = $darwin_arm_url
|
||||||
|
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
|
||||||
|
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
|
||||||
|
src-tauri/latest.json.template > latest.json
|
||||||
|
cat latest.json
|
||||||
|
- name: Sync temp to latest
|
||||||
|
run: |
|
||||||
|
# sync temp-beta to beta by copy files that are different or new
|
||||||
|
aws s3 cp ./latest.json s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-beta/latest.json
|
||||||
|
aws s3 sync "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-beta/" "s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/beta/"
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
||||||
|
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
|
||||||
|
AWS_EC2_METADATA_DISABLED: "true"
|
||||||
|
|
||||||
|
- name: Upload release assert if public provider is github
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
|
with:
|
||||||
|
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||||
|
asset_path: ./latest.json
|
||||||
|
asset_name: latest.json
|
||||||
|
asset_content_type: text/json
|
||||||
|
|
||||||
|
noti-discord-and-update-url-readme:
|
||||||
|
needs: [build-macos, get-update-version, build-windows-x64, build-linux-x64, sync-temp-to-latest]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Set version to environment variable
|
||||||
|
run: |
|
||||||
|
VERSION=${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
VERSION="${VERSION#v}"
|
||||||
|
echo "VERSION=$VERSION" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Notify Discord
|
||||||
|
uses: Ilshidur/action-discord@master
|
||||||
|
with:
|
||||||
|
args: |
|
||||||
|
Jan-beta App version {{ VERSION }}, has been released, use the following links to download the app with faster speed or visit the Github release page for more information:
|
||||||
|
- Windows: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_x64-setup.exe
|
||||||
|
- macOS Universal: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_universal.dmg
|
||||||
|
- Linux Deb: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_amd64.deb
|
||||||
|
- Linux AppImage: https://delta.jan.ai/beta/Jan-beta_{{ VERSION }}_amd64.AppImage
|
||||||
|
env:
|
||||||
|
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK_JAN_BETA }}
|
||||||
20
.github/workflows/jan-tauri-build-flatpak.yaml
vendored
Normal file
20
.github/workflows/jan-tauri-build-flatpak.yaml
vendored
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
name: Tauri Builder Flatpak
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
version:
|
||||||
|
description: 'Version to build. For example: 0.6.8'
|
||||||
|
required: false
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
|
||||||
|
build-linux-x64:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-linux-x64-flatpak.yml
|
||||||
|
secrets: inherit
|
||||||
|
with:
|
||||||
|
ref: ${{ github.ref }}
|
||||||
|
public_provider: none
|
||||||
|
channel: stable
|
||||||
|
new_version: ${{ inputs.version }}
|
||||||
|
disable_updater: true
|
||||||
46
.github/workflows/jan-tauri-build-nightly-external.yaml
vendored
Normal file
46
.github/workflows/jan-tauri-build-nightly-external.yaml
vendored
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
name: Tauri Builder - Nightly / External PRs
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- dev
|
||||||
|
paths:
|
||||||
|
- '.github/workflows/jan-tauri-build-nightly-external.yaml'
|
||||||
|
- '.github/workflows/template-tauri-build-*-external.yml'
|
||||||
|
- 'src-tauri/**'
|
||||||
|
- 'core/**'
|
||||||
|
- 'web-app/**'
|
||||||
|
- 'extensions/**'
|
||||||
|
- 'scripts/**'
|
||||||
|
- 'pre-install/**'
|
||||||
|
- 'Makefile'
|
||||||
|
- 'package.json'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
get-update-version:
|
||||||
|
uses: ./.github/workflows/template-get-update-version.yml
|
||||||
|
|
||||||
|
build-macos:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-macos-external.yml
|
||||||
|
needs: [get-update-version]
|
||||||
|
with:
|
||||||
|
ref: ${{ github.ref }}
|
||||||
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
channel: nightly
|
||||||
|
|
||||||
|
build-windows-x64:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-windows-x64-external.yml
|
||||||
|
needs: [get-update-version]
|
||||||
|
with:
|
||||||
|
ref: ${{ github.ref }}
|
||||||
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
channel: nightly
|
||||||
|
|
||||||
|
build-linux-x64:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-linux-x64-external.yml
|
||||||
|
needs: [get-update-version]
|
||||||
|
with:
|
||||||
|
ref: ${{ github.ref }}
|
||||||
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
channel: nightly
|
||||||
|
disable_updater: false
|
||||||
253
.github/workflows/jan-tauri-build-nightly.yaml
vendored
Normal file
253
.github/workflows/jan-tauri-build-nightly.yaml
vendored
Normal file
@ -0,0 +1,253 @@
|
|||||||
|
name: Tauri Builder - Nightly / Manual
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 20 * * 1,2,3' # At 8 PM UTC on Monday, Tuesday, and Wednesday which is 3 AM UTC+7 Tuesday, Wednesday, and Thursday
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
public_provider:
|
||||||
|
type: choice
|
||||||
|
description: 'Public Provider'
|
||||||
|
options:
|
||||||
|
- none
|
||||||
|
- aws-s3
|
||||||
|
default: none
|
||||||
|
disable_updater:
|
||||||
|
type: boolean
|
||||||
|
description: 'If true, builds both .deb and .appimage but disables auto-updater'
|
||||||
|
default: false
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- release/**
|
||||||
|
- dev
|
||||||
|
paths:
|
||||||
|
- '.github/workflows/jan-tauri-build-nightly.yaml'
|
||||||
|
- '.github/workflows/template-get-update-version.yml'
|
||||||
|
- '.github/workflows/template-tauri-build-macos.yml'
|
||||||
|
- '.github/workflows/template-tauri-build-windows-x64.yml'
|
||||||
|
- '.github/workflows/template-tauri-build-linux-x64.yml'
|
||||||
|
- '.github/workflows/template-noti-discord-and-update-url-readme.yml'
|
||||||
|
- 'src-tauri/**'
|
||||||
|
- 'core/**'
|
||||||
|
- 'web-app/**'
|
||||||
|
- 'extensions/**'
|
||||||
|
- 'scripts/**'
|
||||||
|
- 'pre-install/**'
|
||||||
|
- 'Makefile'
|
||||||
|
- 'package.json'
|
||||||
|
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
set-public-provider:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
|
||||||
|
outputs:
|
||||||
|
public_provider: ${{ steps.set-public-provider.outputs.public_provider }}
|
||||||
|
ref: ${{ steps.set-public-provider.outputs.ref }}
|
||||||
|
steps:
|
||||||
|
- name: Set public provider
|
||||||
|
id: set-public-provider
|
||||||
|
run: |
|
||||||
|
if [ "${{ github.event_name }}" == "workflow_dispatch" ]; then
|
||||||
|
echo "::set-output name=public_provider::${{ github.event.inputs.public_provider }}"
|
||||||
|
echo "::set-output name=ref::${{ github.ref }}"
|
||||||
|
else
|
||||||
|
if [ "${{ github.event_name }}" == "schedule" ]; then
|
||||||
|
echo "::set-output name=public_provider::aws-s3"
|
||||||
|
echo "::set-output name=ref::refs/heads/dev"
|
||||||
|
elif [ "${{ github.event_name }}" == "push" ]; then
|
||||||
|
echo "::set-output name=public_provider::aws-s3"
|
||||||
|
echo "::set-output name=ref::${{ github.ref }}"
|
||||||
|
elif [ "${{ github.event_name }}" == "pull_request_review" ]; then
|
||||||
|
echo "::set-output name=public_provider::none"
|
||||||
|
echo "::set-output name=ref::${{ github.ref }}"
|
||||||
|
else
|
||||||
|
echo "::set-output name=public_provider::none"
|
||||||
|
echo "::set-output name=ref::${{ github.ref }}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
# Job create Update app version based on latest release tag with build number and save to output
|
||||||
|
get-update-version:
|
||||||
|
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
|
||||||
|
uses: ./.github/workflows/template-get-update-version.yml
|
||||||
|
|
||||||
|
build-macos:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-macos.yml
|
||||||
|
needs: [get-update-version, set-public-provider]
|
||||||
|
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
|
||||||
|
secrets: inherit
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.set-public-provider.outputs.ref }}
|
||||||
|
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
||||||
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
channel: nightly
|
||||||
|
cortex_api_port: '39261'
|
||||||
|
|
||||||
|
build-windows-x64:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
|
||||||
|
secrets: inherit
|
||||||
|
needs: [get-update-version, set-public-provider]
|
||||||
|
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.set-public-provider.outputs.ref }}
|
||||||
|
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
||||||
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
channel: nightly
|
||||||
|
cortex_api_port: '39261'
|
||||||
|
build-linux-x64:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
|
||||||
|
secrets: inherit
|
||||||
|
needs: [get-update-version, set-public-provider]
|
||||||
|
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
|
||||||
|
with:
|
||||||
|
ref: ${{ needs.set-public-provider.outputs.ref }}
|
||||||
|
public_provider: ${{ needs.set-public-provider.outputs.public_provider }}
|
||||||
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
channel: nightly
|
||||||
|
cortex_api_port: '39261'
|
||||||
|
disable_updater: ${{ github.event.inputs.disable_updater == 'true' }}
|
||||||
|
|
||||||
|
sync-temp-to-latest:
|
||||||
|
needs:
|
||||||
|
[
|
||||||
|
get-update-version,
|
||||||
|
set-public-provider,
|
||||||
|
build-windows-x64,
|
||||||
|
build-linux-x64,
|
||||||
|
build-macos,
|
||||||
|
]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository
|
||||||
|
steps:
|
||||||
|
- name: Getting the repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Install jq
|
||||||
|
uses: dcarbone/install-jq-action@v2.0.1
|
||||||
|
- name: create latest.json file
|
||||||
|
run: |
|
||||||
|
|
||||||
|
VERSION=${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
|
||||||
|
LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}"
|
||||||
|
LINUX_URL="https://delta.jan.ai/nightly/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
|
||||||
|
WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}"
|
||||||
|
WINDOWS_URL="https://delta.jan.ai/nightly/${{ needs.build-windows-x64.outputs.FILE_NAME }}"
|
||||||
|
DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}"
|
||||||
|
DARWIN_URL="https://delta.jan.ai/nightly/Jan-nightly_${{ needs.get-update-version.outputs.new_version }}.app.tar.gz"
|
||||||
|
|
||||||
|
jq --arg version "$VERSION" \
|
||||||
|
--arg pub_date "$PUB_DATE" \
|
||||||
|
--arg linux_signature "$LINUX_SIGNATURE" \
|
||||||
|
--arg linux_url "$LINUX_URL" \
|
||||||
|
--arg windows_signature "$WINDOWS_SIGNATURE" \
|
||||||
|
--arg windows_url "$WINDOWS_URL" \
|
||||||
|
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
|
||||||
|
--arg darwin_arm_url "$DARWIN_URL" \
|
||||||
|
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
|
||||||
|
--arg darwin_amd_url "$DARWIN_URL" \
|
||||||
|
'.version = $version
|
||||||
|
| .pub_date = $pub_date
|
||||||
|
| .platforms["linux-x86_64"].signature = $linux_signature
|
||||||
|
| .platforms["linux-x86_64"].url = $linux_url
|
||||||
|
| .platforms["windows-x86_64"].signature = $windows_signature
|
||||||
|
| .platforms["windows-x86_64"].url = $windows_url
|
||||||
|
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
|
||||||
|
| .platforms["darwin-aarch64"].url = $darwin_arm_url
|
||||||
|
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
|
||||||
|
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
|
||||||
|
src-tauri/latest.json.template > latest.json
|
||||||
|
cat latest.json
|
||||||
|
- name: Sync temp to latest
|
||||||
|
if: ${{ needs.set-public-provider.outputs.public_provider == 'aws-s3' }}
|
||||||
|
run: |
|
||||||
|
aws s3 cp ./latest.json s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/latest.json
|
||||||
|
aws s3 sync s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-nightly/ s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/nightly/
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
||||||
|
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
|
||||||
|
AWS_EC2_METADATA_DISABLED: 'true'
|
||||||
|
|
||||||
|
# noti-discord-nightly-and-update-url-readme:
|
||||||
|
# needs:
|
||||||
|
# [
|
||||||
|
# build-macos,
|
||||||
|
# build-windows-x64,
|
||||||
|
# build-linux-x64,
|
||||||
|
# get-update-version,
|
||||||
|
# set-public-provider,
|
||||||
|
# sync-temp-to-latest,
|
||||||
|
# ]
|
||||||
|
# secrets: inherit
|
||||||
|
# if: github.event_name == 'schedule'
|
||||||
|
# uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
||||||
|
# with:
|
||||||
|
# ref: refs/heads/dev
|
||||||
|
# build_reason: Nightly
|
||||||
|
# push_to_branch: dev
|
||||||
|
# new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
|
||||||
|
# noti-discord-pre-release-and-update-url-readme:
|
||||||
|
# needs:
|
||||||
|
# [
|
||||||
|
# build-macos,
|
||||||
|
# build-windows-x64,
|
||||||
|
# build-linux-x64,
|
||||||
|
# get-update-version,
|
||||||
|
# set-public-provider,
|
||||||
|
# sync-temp-to-latest,
|
||||||
|
# ]
|
||||||
|
# secrets: inherit
|
||||||
|
# if: github.event_name == 'push'
|
||||||
|
# uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
||||||
|
# with:
|
||||||
|
# ref: refs/heads/dev
|
||||||
|
# build_reason: Pre-release
|
||||||
|
# push_to_branch: dev
|
||||||
|
# new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
|
||||||
|
# noti-discord-manual-and-update-url-readme:
|
||||||
|
# needs:
|
||||||
|
# [
|
||||||
|
# build-macos,
|
||||||
|
# build-windows-x64,
|
||||||
|
# build-linux-x64,
|
||||||
|
# get-update-version,
|
||||||
|
# set-public-provider,
|
||||||
|
# sync-temp-to-latest,
|
||||||
|
# ]
|
||||||
|
# secrets: inherit
|
||||||
|
# if: github.event_name == 'workflow_dispatch' && github.event.inputs.public_provider == 'aws-s3'
|
||||||
|
# uses: ./.github/workflows/template-noti-discord-and-update-url-readme.yml
|
||||||
|
# with:
|
||||||
|
# ref: refs/heads/dev
|
||||||
|
# build_reason: Manual
|
||||||
|
# push_to_branch: dev
|
||||||
|
# new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
|
||||||
|
comment-pr-build-url:
|
||||||
|
needs:
|
||||||
|
[
|
||||||
|
build-macos,
|
||||||
|
build-windows-x64,
|
||||||
|
build-linux-x64,
|
||||||
|
get-update-version,
|
||||||
|
set-public-provider,
|
||||||
|
sync-temp-to-latest,
|
||||||
|
]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'pull_request_review'
|
||||||
|
steps:
|
||||||
|
- name: Set up GitHub CLI
|
||||||
|
run: |
|
||||||
|
curl -sSL https://github.com/cli/cli/releases/download/v2.33.0/gh_2.33.0_linux_amd64.tar.gz | tar xz
|
||||||
|
sudo cp gh_2.33.0_linux_amd64/bin/gh /usr/local/bin/
|
||||||
|
|
||||||
|
- name: Comment build URL on PR
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
PR_URL=${{ github.event.pull_request.html_url }}
|
||||||
|
RUN_ID=${{ github.run_id }}
|
||||||
|
COMMENT="This is the build for this pull request. You can download it from the Artifacts section here: [Build URL](https://github.com/${{ github.repository }}/actions/runs/${RUN_ID})."
|
||||||
|
gh pr comment $PR_URL --body "$COMMENT"
|
||||||
122
.github/workflows/jan-tauri-build.yaml
vendored
Normal file
122
.github/workflows/jan-tauri-build.yaml
vendored
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
name: Tauri Builder - Tag
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags: ["v[0-9]+.[0-9]+.[0-9]+"]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Job create Update app version based on latest release tag with build number and save to output
|
||||||
|
get-update-version:
|
||||||
|
uses: ./.github/workflows/template-get-update-version.yml
|
||||||
|
|
||||||
|
create-draft-release:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||||
|
outputs:
|
||||||
|
upload_url: ${{ steps.create_release.outputs.upload_url }}
|
||||||
|
version: ${{ steps.get_version.outputs.version }}
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Extract tag name without v prefix
|
||||||
|
id: get_version
|
||||||
|
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV && echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
|
||||||
|
env:
|
||||||
|
GITHUB_REF: ${{ github.ref }}
|
||||||
|
- name: Create Draft Release
|
||||||
|
id: create_release
|
||||||
|
uses: softprops/action-gh-release@v2
|
||||||
|
with:
|
||||||
|
tag_name: ${{ github.ref_name }}
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
name: "${{ env.VERSION }}"
|
||||||
|
draft: true
|
||||||
|
prerelease: false
|
||||||
|
generate_release_notes: true
|
||||||
|
|
||||||
|
build-macos:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-macos.yml
|
||||||
|
secrets: inherit
|
||||||
|
needs: [get-update-version, create-draft-release]
|
||||||
|
with:
|
||||||
|
ref: ${{ github.ref }}
|
||||||
|
public_provider: github
|
||||||
|
channel: stable
|
||||||
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||||
|
|
||||||
|
build-windows-x64:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-windows-x64.yml
|
||||||
|
secrets: inherit
|
||||||
|
needs: [get-update-version, create-draft-release]
|
||||||
|
with:
|
||||||
|
ref: ${{ github.ref }}
|
||||||
|
public_provider: github
|
||||||
|
channel: stable
|
||||||
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||||
|
|
||||||
|
build-linux-x64:
|
||||||
|
uses: ./.github/workflows/template-tauri-build-linux-x64.yml
|
||||||
|
secrets: inherit
|
||||||
|
needs: [get-update-version, create-draft-release]
|
||||||
|
with:
|
||||||
|
ref: ${{ github.ref }}
|
||||||
|
public_provider: github
|
||||||
|
channel: stable
|
||||||
|
new_version: ${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||||
|
|
||||||
|
sync-temp-to-latest:
|
||||||
|
needs: [create-draft-release, get-update-version, build-macos, build-windows-x64, build-linux-x64]
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Getting the repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: create latest.json file
|
||||||
|
run: |
|
||||||
|
|
||||||
|
VERSION=${{ needs.get-update-version.outputs.new_version }}
|
||||||
|
PUB_DATE=$(date -u +"%Y-%m-%dT%H:%M:%S.%3NZ")
|
||||||
|
LINUX_SIGNATURE="${{ needs.build-linux-x64.outputs.APPIMAGE_SIG }}"
|
||||||
|
LINUX_URL="https://github.com/janhq/jan/releases/download/v${{ needs.get-update-version.outputs.new_version }}/${{ needs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}"
|
||||||
|
WINDOWS_SIGNATURE="${{ needs.build-windows-x64.outputs.WIN_SIG }}"
|
||||||
|
WINDOWS_URL="https://github.com/janhq/jan/releases/download/v${{ needs.get-update-version.outputs.new_version }}/${{ needs.build-windows-x64.outputs.FILE_NAME }}"
|
||||||
|
DARWIN_SIGNATURE="${{ needs.build-macos.outputs.MAC_UNIVERSAL_SIG }}"
|
||||||
|
DARWIN_URL="https://github.com/janhq/jan/releases/download/v${{ needs.get-update-version.outputs.new_version }}/${{ needs.build-macos.outputs.TAR_NAME }}"
|
||||||
|
|
||||||
|
jq --arg version "$VERSION" \
|
||||||
|
--arg pub_date "$PUB_DATE" \
|
||||||
|
--arg linux_signature "$LINUX_SIGNATURE" \
|
||||||
|
--arg linux_url "$LINUX_URL" \
|
||||||
|
--arg windows_signature "$WINDOWS_SIGNATURE" \
|
||||||
|
--arg windows_url "$WINDOWS_URL" \
|
||||||
|
--arg darwin_arm_signature "$DARWIN_SIGNATURE" \
|
||||||
|
--arg darwin_arm_url "$DARWIN_URL" \
|
||||||
|
--arg darwin_amd_signature "$DARWIN_SIGNATURE" \
|
||||||
|
--arg darwin_amd_url "$DARWIN_URL" \
|
||||||
|
'.version = $version
|
||||||
|
| .pub_date = $pub_date
|
||||||
|
| .platforms["linux-x86_64"].signature = $linux_signature
|
||||||
|
| .platforms["linux-x86_64"].url = $linux_url
|
||||||
|
| .platforms["windows-x86_64"].signature = $windows_signature
|
||||||
|
| .platforms["windows-x86_64"].url = $windows_url
|
||||||
|
| .platforms["darwin-aarch64"].signature = $darwin_arm_signature
|
||||||
|
| .platforms["darwin-aarch64"].url = $darwin_arm_url
|
||||||
|
| .platforms["darwin-x86_64"].signature = $darwin_amd_signature
|
||||||
|
| .platforms["darwin-x86_64"].url = $darwin_amd_url' \
|
||||||
|
src-tauri/latest.json.template > latest.json
|
||||||
|
cat latest.json
|
||||||
|
|
||||||
|
- name: Upload release assert if public provider is github
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
|
with:
|
||||||
|
upload_url: ${{ needs.create-draft-release.outputs.upload_url }}
|
||||||
|
asset_path: ./latest.json
|
||||||
|
asset_name: latest.json
|
||||||
|
asset_content_type: text/json
|
||||||
127
.github/workflows/nightly-integrate-cortex-cpp.yml
vendored
127
.github/workflows/nightly-integrate-cortex-cpp.yml
vendored
@ -1,127 +0,0 @@
|
|||||||
name: Nightly Update cortex cpp
|
|
||||||
|
|
||||||
on:
|
|
||||||
schedule:
|
|
||||||
- cron: '30 19 * * 1-5' # At 01:30 on every day-of-week from Monday through Friday UTC +7
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
update-submodule:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
actions: write
|
|
||||||
|
|
||||||
outputs:
|
|
||||||
pr_number: ${{ steps.check-update.outputs.pr_number }}
|
|
||||||
pr_created: ${{ steps.check-update.outputs.pr_created }}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
submodules: recursive
|
|
||||||
ref: dev
|
|
||||||
fetch-depth: 0
|
|
||||||
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
|
||||||
|
|
||||||
- name: Configure Git
|
|
||||||
run: |
|
|
||||||
git config --global user.name 'github-actions[bot]'
|
|
||||||
git config --global user.email 'github-actions[bot]@users.noreply.github.com'
|
|
||||||
|
|
||||||
- name: Update submodule to latest release
|
|
||||||
id: check-update
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
|
||||||
run: |
|
|
||||||
curl -s https://api.github.com/repos/janhq/cortex/releases > /tmp/github_api_releases.json
|
|
||||||
latest_prerelease_name=$(cat /tmp/github_api_releases.json | jq -r '.[] | select(.prerelease) | .name' | head -n 1)
|
|
||||||
|
|
||||||
get_asset_count() {
|
|
||||||
local version_name=$1
|
|
||||||
cat /tmp/github_api_releases.json | jq -r --arg version_name "$version_name" '.[] | select(.name == $version_name) | .assets | length'
|
|
||||||
}
|
|
||||||
|
|
||||||
cortex_cpp_version_file_path="extensions/inference-nitro-extension/bin/version.txt"
|
|
||||||
current_version_name=$(cat "$cortex_cpp_version_file_path" | head -n 1)
|
|
||||||
|
|
||||||
current_version_asset_count=$(get_asset_count "$current_version_name")
|
|
||||||
latest_prerelease_asset_count=$(get_asset_count "$latest_prerelease_name")
|
|
||||||
|
|
||||||
if [ "$current_version_name" = "$latest_prerelease_name" ]; then
|
|
||||||
echo "cortex cpp remote repo doesn't have update today, skip update cortex-cpp for today nightly build"
|
|
||||||
echo "::set-output name=pr_created::false"
|
|
||||||
exit 0
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$current_version_asset_count" != "$latest_prerelease_asset_count" ]; then
|
|
||||||
echo "Latest prerelease version has different number of assets, somethink went wrong, skip update cortex-cpp for today nightly build"
|
|
||||||
echo "::set-output name=pr_created::false"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo $latest_prerelease_name > $cortex_cpp_version_file_path
|
|
||||||
echo "Updated version from $current_version_name to $latest_prerelease_name."
|
|
||||||
echo "::set-output name=pr_created::true"
|
|
||||||
|
|
||||||
git add -f $cortex_cpp_version_file_path
|
|
||||||
git commit -m "Update cortex cpp nightly to version $latest_prerelease_name"
|
|
||||||
branch_name="update-nightly-$(date +'%Y-%m-%d-%H-%M')"
|
|
||||||
git checkout -b $branch_name
|
|
||||||
git push origin $branch_name
|
|
||||||
|
|
||||||
pr_title="Update cortex cpp nightly to version $latest_prerelease_name"
|
|
||||||
pr_body="This PR updates the Update cortex cpp nightly to version $latest_prerelease_name"
|
|
||||||
|
|
||||||
gh pr create --title "$pr_title" --body "$pr_body" --head $branch_name --base dev --reviewer Van-QA
|
|
||||||
|
|
||||||
pr_number=$(gh pr list --head $branch_name --json number --jq '.[0].number')
|
|
||||||
echo "::set-output name=pr_number::$pr_number"
|
|
||||||
|
|
||||||
check-and-merge-pr:
|
|
||||||
needs: update-submodule
|
|
||||||
if: needs.update-submodule.outputs.pr_created == 'true'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
submodules: recursive
|
|
||||||
fetch-depth: 0
|
|
||||||
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
|
||||||
|
|
||||||
- name: Wait for CI to pass
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
|
||||||
run: |
|
|
||||||
pr_number=${{ needs.update-submodule.outputs.pr_number }}
|
|
||||||
while true; do
|
|
||||||
ci_completed=$(gh pr checks $pr_number --json completedAt --jq '.[].completedAt')
|
|
||||||
if echo "$ci_completed" | grep -q "0001-01-01T00:00:00Z"; then
|
|
||||||
echo "CI is still running, waiting..."
|
|
||||||
sleep 60
|
|
||||||
else
|
|
||||||
echo "CI has completed, checking states..."
|
|
||||||
ci_states=$(gh pr checks $pr_number --json state --jq '.[].state')
|
|
||||||
if echo "$ci_states" | grep -vqE "SUCCESS|SKIPPED"; then
|
|
||||||
echo "CI failed, exiting..."
|
|
||||||
exit 1
|
|
||||||
else
|
|
||||||
echo "CI passed, merging PR..."
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
- name: Merge the PR
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
|
||||||
run: |
|
|
||||||
pr_number=${{ needs.update-submodule.outputs.pr_number }}
|
|
||||||
gh pr merge $pr_number --merge --admin
|
|
||||||
51
.github/workflows/publish-npm-core.yml
vendored
Normal file
51
.github/workflows/publish-npm-core.yml
vendored
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
name: Publish core Package to npmjs
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags: ['v[0-9]+.[0-9]+.[0-9]+-core']
|
||||||
|
paths: ['core/**', '.github/workflows/publish-npm-core.yml']
|
||||||
|
workflow_dispatch:
|
||||||
|
jobs:
|
||||||
|
build-and-publish-plugins:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: '0'
|
||||||
|
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
||||||
|
|
||||||
|
- name: Install jq
|
||||||
|
uses: dcarbone/install-jq-action@v2.0.1
|
||||||
|
|
||||||
|
- name: Extract tag name without v prefix
|
||||||
|
id: get_version
|
||||||
|
run: echo "VERSION=${GITHUB_REF#refs/tags/v}" >> $GITHUB_ENV && echo "::set-output name=version::${GITHUB_REF#refs/tags/v}"
|
||||||
|
env:
|
||||||
|
GITHUB_REF: ${{ github.ref }}
|
||||||
|
|
||||||
|
- name: 'Get Semantic Version from tag'
|
||||||
|
if: github.event_name == 'push'
|
||||||
|
run: |
|
||||||
|
# Get the tag from the event
|
||||||
|
tag=${GITHUB_REF#refs/tags/v}
|
||||||
|
# remove the -core suffix
|
||||||
|
new_version=$(echo $tag | sed -n 's/-core//p')
|
||||||
|
echo $new_version
|
||||||
|
# Replace the old version with the new version in package.json
|
||||||
|
jq --arg version "$new_version" '.version = $version' core/package.json > /tmp/package.json && mv /tmp/package.json core/package.json
|
||||||
|
|
||||||
|
# Print the new version
|
||||||
|
echo "Updated package.json version to: $new_version"
|
||||||
|
cat core/package.json
|
||||||
|
|
||||||
|
# Setup .npmrc file to publish to npm
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: '20.x'
|
||||||
|
registry-url: 'https://registry.npmjs.org'
|
||||||
|
|
||||||
|
- run: cd core && corepack enable && corepack prepare yarn@4.5.3 --activate && yarn --version && yarn config set -H enableImmutableInstalls false && yarn install && yarn build
|
||||||
|
|
||||||
|
- run: cd core && yarn publish --access public
|
||||||
|
if: github.event_name == 'push'
|
||||||
|
env:
|
||||||
|
NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
|
||||||
39
.github/workflows/template-build-jan-server.yml
vendored
39
.github/workflows/template-build-jan-server.yml
vendored
@ -1,39 +0,0 @@
|
|||||||
name: build-jan-server
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
dockerfile_path:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: './Dockerfile'
|
|
||||||
docker_image_tag:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
default: 'ghcr.io/janhq/jan-server:dev-latest'
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
REGISTRY: ghcr.io
|
|
||||||
IMAGE_NAME: janhq/jan-server
|
|
||||||
permissions:
|
|
||||||
packages: write
|
|
||||||
steps:
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Log in to the Container registry
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
registry: ${{ env.REGISTRY }}
|
|
||||||
username: ${{ github.actor }}
|
|
||||||
password: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Build and push Docker image
|
|
||||||
uses: docker/build-push-action@v3
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
file: ${{ inputs.dockerfile_path }}
|
|
||||||
push: true
|
|
||||||
tags: ${{ inputs.docker_image_tag }}
|
|
||||||
115
.github/workflows/template-build-linux-x64.yml
vendored
115
.github/workflows/template-build-linux-x64.yml
vendored
@ -1,115 +0,0 @@
|
|||||||
name: build-linux-x64
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
ref:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
default: 'refs/heads/main'
|
|
||||||
public_provider:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
default: none
|
|
||||||
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
|
|
||||||
new_version:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
default: ''
|
|
||||||
aws_s3_prefix:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: '/latest/'
|
|
||||||
secrets:
|
|
||||||
DELTA_AWS_S3_BUCKET_NAME:
|
|
||||||
required: false
|
|
||||||
DELTA_AWS_ACCESS_KEY_ID:
|
|
||||||
required: false
|
|
||||||
DELTA_AWS_SECRET_ACCESS_KEY:
|
|
||||||
required: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-linux-x64:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
environment: production
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
steps:
|
|
||||||
- name: Getting the repo
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
ref: ${{ inputs.ref }}
|
|
||||||
|
|
||||||
- name: Installing node
|
|
||||||
uses: actions/setup-node@v1
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
|
|
||||||
- name: Install jq
|
|
||||||
uses: dcarbone/install-jq-action@v2.0.1
|
|
||||||
|
|
||||||
- name: Update app version base public_provider
|
|
||||||
if: inputs.public_provider != 'github'
|
|
||||||
run: |
|
|
||||||
echo "Version: ${{ inputs.new_version }}"
|
|
||||||
# Update the version in electron/package.json
|
|
||||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' electron/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json electron/package.json
|
|
||||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json web/package.json
|
|
||||||
jq '.build.publish = [{"provider": "generic", "url": "${{ secrets.CLOUDFLARE_R2_PUBLIC_URL }}", "channel": "latest"}, {"provider": "s3", "acl": null, "bucket": "${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}", "region": "${{ secrets.DELTA_AWS_REGION}}", "path": "${{ inputs.aws_s3_prefix }}", "channel": "latest"}]' electron/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json electron/package.json
|
|
||||||
cat electron/package.json
|
|
||||||
|
|
||||||
- name: Update app version base on tag
|
|
||||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github'
|
|
||||||
run: |
|
|
||||||
if [[ ! "${VERSION_TAG}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
|
||||||
echo "Error: Tag is not valid!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
jq --arg version "${VERSION_TAG#v}" '.version = $version' electron/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json electron/package.json
|
|
||||||
jq --arg version "${VERSION_TAG#v}" '.version = $version' web/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json web/package.json
|
|
||||||
env:
|
|
||||||
VERSION_TAG: ${{ inputs.new_version }}
|
|
||||||
|
|
||||||
- name: Build and publish app to aws s3 r2 or github artifactory
|
|
||||||
if: inputs.public_provider != 'github'
|
|
||||||
run: |
|
|
||||||
# check public_provider is true or not
|
|
||||||
echo "public_provider is ${{ inputs.public_provider }}"
|
|
||||||
if [ "${{ inputs.public_provider }}" == "none" ]; then
|
|
||||||
make build
|
|
||||||
else
|
|
||||||
make build-and-publish
|
|
||||||
fi
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
|
||||||
AWS_EC2_METADATA_DISABLED: "true"
|
|
||||||
AWS_MAX_ATTEMPTS: "5"
|
|
||||||
|
|
||||||
- name: Build and publish app to github
|
|
||||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github'
|
|
||||||
run: |
|
|
||||||
make build-and-publish
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
ANALYTICS_ID: ${{ secrets.JAN_APP_UMAMI_PROJECT_API_KEY }}
|
|
||||||
ANALYTICS_HOST: ${{ secrets.JAN_APP_UMAMI_URL }}
|
|
||||||
|
|
||||||
- name: Upload Artifact .deb file
|
|
||||||
if: inputs.public_provider != 'github'
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: jan-linux-amd64-${{ inputs.new_version }}-deb
|
|
||||||
path: ./electron/dist/*.deb
|
|
||||||
|
|
||||||
- name: Upload Artifact .AppImage file
|
|
||||||
if: inputs.public_provider != 'github'
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: jan-linux-amd64-${{ inputs.new_version }}-AppImage
|
|
||||||
path: ./electron/dist/*.AppImage
|
|
||||||
160
.github/workflows/template-build-macos-arm64.yml
vendored
160
.github/workflows/template-build-macos-arm64.yml
vendored
@ -1,160 +0,0 @@
|
|||||||
name: build-macos
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
ref:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
default: 'refs/heads/main'
|
|
||||||
public_provider:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
default: none
|
|
||||||
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
|
|
||||||
new_version:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
default: ''
|
|
||||||
aws_s3_prefix:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: '/latest/'
|
|
||||||
secrets:
|
|
||||||
DELTA_AWS_S3_BUCKET_NAME:
|
|
||||||
required: false
|
|
||||||
DELTA_AWS_ACCESS_KEY_ID:
|
|
||||||
required: false
|
|
||||||
DELTA_AWS_SECRET_ACCESS_KEY:
|
|
||||||
required: false
|
|
||||||
CODE_SIGN_P12_BASE64:
|
|
||||||
required: false
|
|
||||||
CODE_SIGN_P12_PASSWORD:
|
|
||||||
required: false
|
|
||||||
APPLE_ID:
|
|
||||||
required: false
|
|
||||||
APPLE_APP_SPECIFIC_PASSWORD:
|
|
||||||
required: false
|
|
||||||
DEVELOPER_ID:
|
|
||||||
required: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-macos:
|
|
||||||
runs-on: macos-latest
|
|
||||||
environment: production
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
steps:
|
|
||||||
- name: Getting the repo
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
ref: ${{ inputs.ref }}
|
|
||||||
|
|
||||||
- name: Installing node
|
|
||||||
uses: actions/setup-node@v1
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
|
|
||||||
- name: Install jq
|
|
||||||
uses: dcarbone/install-jq-action@v2.0.1
|
|
||||||
|
|
||||||
- name: Update app version based on latest release tag with build number
|
|
||||||
if: inputs.public_provider != 'github'
|
|
||||||
run: |
|
|
||||||
echo "Version: ${{ inputs.new_version }}"
|
|
||||||
# Update the version in electron/package.json
|
|
||||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' electron/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json electron/package.json
|
|
||||||
|
|
||||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json web/package.json
|
|
||||||
|
|
||||||
jq '.build.publish = [{"provider": "generic", "url": "${{ secrets.CLOUDFLARE_R2_PUBLIC_URL }}", "channel": "latest"}, {"provider": "s3", "acl": null, "bucket": "${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}", "region": "${{ secrets.DELTA_AWS_REGION}}", "path": "${{ inputs.aws_s3_prefix }}", "channel": "latest"}]' electron/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json electron/package.json
|
|
||||||
|
|
||||||
jq --arg teamid "${{ secrets.APPLE_TEAM_ID }}" '.build.mac.notarize.teamId = $teamid' electron/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json electron/package.json
|
|
||||||
|
|
||||||
cat electron/package.json
|
|
||||||
|
|
||||||
- name: Update app version base on tag
|
|
||||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github'
|
|
||||||
run: |
|
|
||||||
if [[ ! "${VERSION_TAG}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
|
||||||
echo "Error: Tag is not valid!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
jq --arg version "${VERSION_TAG#v}" '.version = $version' electron/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json electron/package.json
|
|
||||||
jq --arg version "${VERSION_TAG#v}" '.version = $version' web/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json web/package.json
|
|
||||||
jq --arg teamid "${{ secrets.APPLE_TEAM_ID }}" '.build.mac.notarize.teamId = $teamid' electron/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json electron/package.json
|
|
||||||
cat electron/package.json
|
|
||||||
env:
|
|
||||||
VERSION_TAG: ${{ inputs.new_version }}
|
|
||||||
|
|
||||||
- name: Get Cer for code signing
|
|
||||||
run: base64 -d <<< "$CODE_SIGN_P12_BASE64" > /tmp/codesign.p12
|
|
||||||
shell: bash
|
|
||||||
env:
|
|
||||||
CODE_SIGN_P12_BASE64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
|
|
||||||
|
|
||||||
- uses: apple-actions/import-codesign-certs@v2
|
|
||||||
continue-on-error: true
|
|
||||||
with:
|
|
||||||
p12-file-base64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
|
|
||||||
p12-password: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
|
|
||||||
|
|
||||||
- name: Build and publish app to aws s3 r2 or github artifactory
|
|
||||||
if: inputs.public_provider != 'github'
|
|
||||||
run: |
|
|
||||||
# check public_provider is true or not
|
|
||||||
echo "public_provider is ${{ inputs.public_provider }}"
|
|
||||||
if [ "${{ inputs.public_provider }}" == "none" ]; then
|
|
||||||
make build
|
|
||||||
else
|
|
||||||
make build-and-publish
|
|
||||||
fi
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
CSC_LINK: "/tmp/codesign.p12"
|
|
||||||
CSC_KEY_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
|
|
||||||
CSC_IDENTITY_AUTO_DISCOVERY: "true"
|
|
||||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
|
||||||
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
|
|
||||||
APP_PATH: "."
|
|
||||||
DEVELOPER_ID: ${{ secrets.DEVELOPER_ID }}
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
|
||||||
AWS_DEFAULT_REGION: auto
|
|
||||||
AWS_EC2_METADATA_DISABLED: "true"
|
|
||||||
AWS_MAX_ATTEMPTS: "5"
|
|
||||||
|
|
||||||
- name: Build and publish app to github
|
|
||||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github'
|
|
||||||
run: |
|
|
||||||
make build-and-publish
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
CSC_LINK: "/tmp/codesign.p12"
|
|
||||||
CSC_KEY_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
|
|
||||||
CSC_IDENTITY_AUTO_DISCOVERY: "true"
|
|
||||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
|
||||||
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
|
|
||||||
APP_PATH: "."
|
|
||||||
DEVELOPER_ID: ${{ secrets.DEVELOPER_ID }}
|
|
||||||
ANALYTICS_ID: ${{ secrets.JAN_APP_UMAMI_PROJECT_API_KEY }}
|
|
||||||
ANALYTICS_HOST: ${{ secrets.JAN_APP_UMAMI_URL }}
|
|
||||||
|
|
||||||
- name: Upload Artifact
|
|
||||||
if: inputs.public_provider != 'github'
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: jan-mac-arm64-${{ inputs.new_version }}
|
|
||||||
path: ./electron/dist/jan-mac-arm64-${{ inputs.new_version }}.dmg
|
|
||||||
|
|
||||||
- name: Upload Artifact
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: latest-mac-arm64
|
|
||||||
path: ./electron/dist/latest-mac.yml
|
|
||||||
160
.github/workflows/template-build-macos-x64.yml
vendored
160
.github/workflows/template-build-macos-x64.yml
vendored
@ -1,160 +0,0 @@
|
|||||||
name: build-macos
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
ref:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
default: 'refs/heads/main'
|
|
||||||
public_provider:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
default: none
|
|
||||||
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
|
|
||||||
new_version:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
default: ''
|
|
||||||
aws_s3_prefix:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: '/latest/'
|
|
||||||
secrets:
|
|
||||||
DELTA_AWS_S3_BUCKET_NAME:
|
|
||||||
required: false
|
|
||||||
DELTA_AWS_ACCESS_KEY_ID:
|
|
||||||
required: false
|
|
||||||
DELTA_AWS_SECRET_ACCESS_KEY:
|
|
||||||
required: false
|
|
||||||
CODE_SIGN_P12_BASE64:
|
|
||||||
required: false
|
|
||||||
CODE_SIGN_P12_PASSWORD:
|
|
||||||
required: false
|
|
||||||
APPLE_ID:
|
|
||||||
required: false
|
|
||||||
APPLE_APP_SPECIFIC_PASSWORD:
|
|
||||||
required: false
|
|
||||||
DEVELOPER_ID:
|
|
||||||
required: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-macos:
|
|
||||||
runs-on: macos-13
|
|
||||||
environment: production
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
steps:
|
|
||||||
- name: Getting the repo
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
ref: ${{ inputs.ref }}
|
|
||||||
|
|
||||||
- name: Installing node
|
|
||||||
uses: actions/setup-node@v1
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
|
|
||||||
- name: Install jq
|
|
||||||
uses: dcarbone/install-jq-action@v2.0.1
|
|
||||||
|
|
||||||
- name: Update app version based on latest release tag with build number
|
|
||||||
if: inputs.public_provider != 'github'
|
|
||||||
run: |
|
|
||||||
echo "Version: ${{ inputs.new_version }}"
|
|
||||||
# Update the version in electron/package.json
|
|
||||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' electron/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json electron/package.json
|
|
||||||
|
|
||||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json web/package.json
|
|
||||||
|
|
||||||
jq '.build.publish = [{"provider": "generic", "url": "${{ secrets.CLOUDFLARE_R2_PUBLIC_URL }}", "channel": "latest"}, {"provider": "s3", "acl": null, "bucket": "${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}", "region": "${{ secrets.DELTA_AWS_REGION}}", "path": "${{ inputs.aws_s3_prefix }}", "channel": "latest"}]' electron/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json electron/package.json
|
|
||||||
|
|
||||||
jq --arg teamid "${{ secrets.APPLE_TEAM_ID }}" '.build.mac.notarize.teamId = $teamid' electron/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json electron/package.json
|
|
||||||
|
|
||||||
cat electron/package.json
|
|
||||||
|
|
||||||
- name: Update app version base on tag
|
|
||||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github'
|
|
||||||
run: |
|
|
||||||
if [[ ! "${VERSION_TAG}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
|
||||||
echo "Error: Tag is not valid!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
jq --arg version "${VERSION_TAG#v}" '.version = $version' electron/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json electron/package.json
|
|
||||||
jq --arg version "${VERSION_TAG#v}" '.version = $version' web/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json web/package.json
|
|
||||||
jq --arg teamid "${{ secrets.APPLE_TEAM_ID }}" '.build.mac.notarize.teamId = $teamid' electron/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json electron/package.json
|
|
||||||
cat electron/package.json
|
|
||||||
env:
|
|
||||||
VERSION_TAG: ${{ inputs.new_version }}
|
|
||||||
|
|
||||||
- name: Get Cer for code signing
|
|
||||||
run: base64 -d <<< "$CODE_SIGN_P12_BASE64" > /tmp/codesign.p12
|
|
||||||
shell: bash
|
|
||||||
env:
|
|
||||||
CODE_SIGN_P12_BASE64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
|
|
||||||
|
|
||||||
- uses: apple-actions/import-codesign-certs@v2
|
|
||||||
continue-on-error: true
|
|
||||||
with:
|
|
||||||
p12-file-base64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
|
|
||||||
p12-password: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
|
|
||||||
|
|
||||||
- name: Build and publish app to aws s3 r2 or github artifactory
|
|
||||||
if: inputs.public_provider != 'github'
|
|
||||||
run: |
|
|
||||||
# check public_provider is true or not
|
|
||||||
echo "public_provider is ${{ inputs.public_provider }}"
|
|
||||||
if [ "${{ inputs.public_provider }}" == "none" ]; then
|
|
||||||
make build
|
|
||||||
else
|
|
||||||
make build-and-publish
|
|
||||||
fi
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
CSC_LINK: "/tmp/codesign.p12"
|
|
||||||
CSC_KEY_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
|
|
||||||
CSC_IDENTITY_AUTO_DISCOVERY: "true"
|
|
||||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
|
||||||
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
|
|
||||||
APP_PATH: "."
|
|
||||||
DEVELOPER_ID: ${{ secrets.DEVELOPER_ID }}
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
|
||||||
AWS_DEFAULT_REGION: auto
|
|
||||||
AWS_EC2_METADATA_DISABLED: "true"
|
|
||||||
AWS_MAX_ATTEMPTS: "5"
|
|
||||||
|
|
||||||
- name: Build and publish app to github
|
|
||||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github'
|
|
||||||
run: |
|
|
||||||
make build-and-publish
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
CSC_LINK: "/tmp/codesign.p12"
|
|
||||||
CSC_KEY_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
|
|
||||||
CSC_IDENTITY_AUTO_DISCOVERY: "true"
|
|
||||||
APPLE_ID: ${{ secrets.APPLE_ID }}
|
|
||||||
APPLE_APP_SPECIFIC_PASSWORD: ${{ secrets.APPLE_APP_SPECIFIC_PASSWORD }}
|
|
||||||
APP_PATH: "."
|
|
||||||
DEVELOPER_ID: ${{ secrets.DEVELOPER_ID }}
|
|
||||||
ANALYTICS_ID: ${{ secrets.JAN_APP_UMAMI_PROJECT_API_KEY }}
|
|
||||||
ANALYTICS_HOST: ${{ secrets.JAN_APP_UMAMI_URL }}
|
|
||||||
|
|
||||||
- name: Upload Artifact
|
|
||||||
if: inputs.public_provider != 'github'
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: jan-mac-x64-${{ inputs.new_version }}
|
|
||||||
path: ./electron/dist/jan-mac-x64-${{ inputs.new_version }}.dmg
|
|
||||||
|
|
||||||
- name: Upload Artifact
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: latest-mac-x64
|
|
||||||
path: ./electron/dist/latest-mac.yml
|
|
||||||
143
.github/workflows/template-build-windows-x64.yml
vendored
143
.github/workflows/template-build-windows-x64.yml
vendored
@ -1,143 +0,0 @@
|
|||||||
name: build-windows-x64
|
|
||||||
on:
|
|
||||||
workflow_call:
|
|
||||||
inputs:
|
|
||||||
ref:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
default: 'refs/heads/main'
|
|
||||||
public_provider:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
default: none
|
|
||||||
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
|
|
||||||
new_version:
|
|
||||||
required: true
|
|
||||||
type: string
|
|
||||||
default: ''
|
|
||||||
aws_s3_prefix:
|
|
||||||
required: false
|
|
||||||
type: string
|
|
||||||
default: '/latest/'
|
|
||||||
secrets:
|
|
||||||
DELTA_AWS_S3_BUCKET_NAME:
|
|
||||||
required: false
|
|
||||||
DELTA_AWS_ACCESS_KEY_ID:
|
|
||||||
required: false
|
|
||||||
DELTA_AWS_SECRET_ACCESS_KEY:
|
|
||||||
required: false
|
|
||||||
AZURE_KEY_VAULT_URI:
|
|
||||||
required: false
|
|
||||||
AZURE_CLIENT_ID:
|
|
||||||
required: false
|
|
||||||
AZURE_TENANT_ID:
|
|
||||||
required: false
|
|
||||||
AZURE_CLIENT_SECRET:
|
|
||||||
required: false
|
|
||||||
AZURE_CERT_NAME:
|
|
||||||
required: false
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-windows-x64:
|
|
||||||
runs-on: windows-latest
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
steps:
|
|
||||||
- name: Getting the repo
|
|
||||||
uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
ref: ${{ inputs.ref }}
|
|
||||||
|
|
||||||
- name: Installing node
|
|
||||||
uses: actions/setup-node@v1
|
|
||||||
with:
|
|
||||||
node-version: 20
|
|
||||||
|
|
||||||
- name: Install jq
|
|
||||||
uses: dcarbone/install-jq-action@v2.0.1
|
|
||||||
|
|
||||||
- name: Update app version base on tag
|
|
||||||
if: inputs.public_provider != 'github'
|
|
||||||
id: version_update
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
echo "Version: ${{ inputs.new_version }}"
|
|
||||||
# Update the version in electron/package.json
|
|
||||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' electron/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json electron/package.json
|
|
||||||
|
|
||||||
jq --arg version "${{ inputs.new_version }}" '.version = $version' web/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json web/package.json
|
|
||||||
|
|
||||||
jq '.build.publish = [{"provider": "generic", "url": "${{ secrets.CLOUDFLARE_R2_PUBLIC_URL }}", "channel": "latest"}, {"provider": "s3", "acl": null, "bucket": "${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}", "region": "${{ secrets.DELTA_AWS_REGION}}", "path": "${{ inputs.aws_s3_prefix }}", "channel": "latest"}]' electron/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json electron/package.json
|
|
||||||
|
|
||||||
jq '.build.win.sign = "./sign.js"' electron/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json electron/package.json
|
|
||||||
cat electron/package.json
|
|
||||||
|
|
||||||
- name: Update app version base on tag
|
|
||||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github'
|
|
||||||
shell: bash
|
|
||||||
run: |
|
|
||||||
if [[ ! "${VERSION_TAG}" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
|
||||||
echo "Error: Tag is not valid!"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
jq --arg version "${VERSION_TAG#v}" '.version = $version' electron/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json electron/package.json
|
|
||||||
jq --arg version "${VERSION_TAG#v}" '.version = $version' web/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json web/package.json
|
|
||||||
jq '.build.win.sign = "./sign.js"' electron/package.json > /tmp/package.json
|
|
||||||
mv /tmp/package.json electron/package.json
|
|
||||||
env:
|
|
||||||
VERSION_TAG: ${{ inputs.new_version }}
|
|
||||||
|
|
||||||
- name: Install AzureSignTool
|
|
||||||
run: |
|
|
||||||
dotnet tool install --global AzureSignTool
|
|
||||||
|
|
||||||
- name: Build and publish app to aws s3 r2 or github artifactory
|
|
||||||
shell: bash
|
|
||||||
if: inputs.public_provider != 'github'
|
|
||||||
run: |
|
|
||||||
# check public_provider is true or not
|
|
||||||
echo "public_provider is ${{ inputs.public_provider }}"
|
|
||||||
if [ "${{ inputs.public_provider }}" == "none" ]; then
|
|
||||||
make build
|
|
||||||
else
|
|
||||||
make build-and-publish
|
|
||||||
fi
|
|
||||||
env:
|
|
||||||
AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }}
|
|
||||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
|
|
||||||
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
|
|
||||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
|
|
||||||
AZURE_CERT_NAME: ${{ secrets.AZURE_CERT_NAME }}
|
|
||||||
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
|
||||||
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
|
||||||
AWS_DEFAULT_REGION: auto
|
|
||||||
AWS_EC2_METADATA_DISABLED: "true"
|
|
||||||
AWS_MAX_ATTEMPTS: "5"
|
|
||||||
|
|
||||||
- name: Build app and publish app to github
|
|
||||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') && inputs.public_provider == 'github'
|
|
||||||
run: |
|
|
||||||
make build-and-publish
|
|
||||||
env:
|
|
||||||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
ANALYTICS_ID: ${{ secrets.JAN_APP_UMAMI_PROJECT_API_KEY }}
|
|
||||||
ANALYTICS_HOST: ${{ secrets.JAN_APP_UMAMI_URL }}
|
|
||||||
AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }}
|
|
||||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
|
|
||||||
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
|
|
||||||
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
|
|
||||||
AZURE_CERT_NAME: ${{ secrets.AZURE_CERT_NAME }}
|
|
||||||
|
|
||||||
- name: Upload Artifact
|
|
||||||
if: inputs.public_provider != 'github'
|
|
||||||
uses: actions/upload-artifact@v2
|
|
||||||
with:
|
|
||||||
name: jan-win-x64-${{ inputs.new_version }}
|
|
||||||
path: ./electron/dist/*.exe
|
|
||||||
|
|
||||||
@ -9,7 +9,6 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
get-update-version:
|
get-update-version:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
environment: production
|
|
||||||
outputs:
|
outputs:
|
||||||
new_version: ${{ steps.version_update.outputs.new_version }}
|
new_version: ${{ steps.version_update.outputs.new_version }}
|
||||||
steps:
|
steps:
|
||||||
@ -46,7 +45,10 @@ jobs:
|
|||||||
|
|
||||||
if ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }}; then
|
if ${{ github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') }}; then
|
||||||
echo "Tag detected, set output follow tag"
|
echo "Tag detected, set output follow tag"
|
||||||
echo "::set-output name=new_version::${{ steps.tag.outputs.tag }}"
|
sanitized_tag="${{ steps.tag.outputs.tag }}"
|
||||||
|
# Remove the 'v' prefix if it exists
|
||||||
|
sanitized_tag="${sanitized_tag#v}"
|
||||||
|
echo "::set-output name=new_version::$sanitized_tag"
|
||||||
else
|
else
|
||||||
# Get the latest release tag from GitHub API
|
# Get the latest release tag from GitHub API
|
||||||
LATEST_TAG=$(get_latest_tag)
|
LATEST_TAG=$(get_latest_tag)
|
||||||
|
|||||||
@ -26,7 +26,6 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
noti-discord-and-update-url-readme:
|
noti-discord-and-update-url-readme:
|
||||||
environment: production
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
permissions:
|
permissions:
|
||||||
contents: write
|
contents: write
|
||||||
@ -34,7 +33,7 @@ jobs:
|
|||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
with:
|
with:
|
||||||
fetch-depth: "0"
|
fetch-depth: '0'
|
||||||
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
token: ${{ secrets.PAT_SERVICE_ACCOUNT }}
|
||||||
ref: ${{ inputs.ref }}
|
ref: ${{ inputs.ref }}
|
||||||
|
|
||||||
@ -47,11 +46,10 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
args: |
|
args: |
|
||||||
Jan App ${{ inputs.build_reason }} build artifact version {{ VERSION }}:
|
Jan App ${{ inputs.build_reason }} build artifact version {{ VERSION }}:
|
||||||
- Windows: https://delta.jan.ai/latest/jan-win-x64-{{ VERSION }}.exe
|
- Windows: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_x64-setup.exe
|
||||||
- macOS Intel: https://delta.jan.ai/latest/jan-mac-x64-{{ VERSION }}.dmg
|
- macOS Universal: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_universal.dmg
|
||||||
- macOS Apple Silicon: https://delta.jan.ai/latest/jan-mac-arm64-{{ VERSION }}.dmg
|
- Linux Deb: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_amd64.deb
|
||||||
- Linux Deb: https://delta.jan.ai/latest/jan-linux-amd64-{{ VERSION }}.deb
|
- Linux AppImage: https://delta.jan.ai/nightly/Jan-nightly_{{ VERSION }}_amd64.AppImage
|
||||||
- Linux AppImage: https://delta.jan.ai/latest/jan-linux-x86_64-{{ VERSION }}.AppImage
|
|
||||||
- Github action run: https://github.com/janhq/jan/actions/runs/{{ GITHUB_RUN_ID }}
|
- Github action run: https://github.com/janhq/jan/actions/runs/{{ GITHUB_RUN_ID }}
|
||||||
env:
|
env:
|
||||||
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}
|
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}
|
||||||
138
.github/workflows/template-tauri-build-linux-x64-external.yml
vendored
Normal file
138
.github/workflows/template-tauri-build-linux-x64-external.yml
vendored
Normal file
@ -0,0 +1,138 @@
|
|||||||
|
name: tauri-build-linux-x64-external
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
ref:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'refs/heads/main'
|
||||||
|
new_version:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
channel:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'nightly'
|
||||||
|
description: 'The channel to use for this job'
|
||||||
|
disable_updater:
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
description: 'If true, builds both .deb and .appimage but disables auto-updater'
|
||||||
|
jobs:
|
||||||
|
build-linux-x64-external:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Getting the repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: ${{ inputs.ref }}
|
||||||
|
|
||||||
|
- name: Free Disk Space Before Build
|
||||||
|
run: |
|
||||||
|
echo "Disk space before cleanup:"
|
||||||
|
df -h
|
||||||
|
sudo rm -rf /usr/local/.ghcup
|
||||||
|
sudo rm -rf /opt/hostedtoolcache/CodeQL
|
||||||
|
sudo rm -rf /usr/local/lib/android/sdk/ndk
|
||||||
|
sudo rm -rf /usr/share/dotnet
|
||||||
|
sudo rm -rf /opt/ghc
|
||||||
|
sudo rm -rf /usr/local/share/boost
|
||||||
|
sudo apt-get clean
|
||||||
|
echo "Disk space after cleanup:"
|
||||||
|
df -h
|
||||||
|
|
||||||
|
- name: Replace Icons for Beta Build
|
||||||
|
if: inputs.channel != 'stable'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
|
||||||
|
|
||||||
|
- name: Installing node
|
||||||
|
uses: actions/setup-node@v1
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
|
||||||
|
- name: Install jq
|
||||||
|
uses: dcarbone/install-jq-action@v2.0.1
|
||||||
|
|
||||||
|
- name: Install ctoml
|
||||||
|
run: |
|
||||||
|
cargo install ctoml
|
||||||
|
|
||||||
|
- name: Install Tauri dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y libglib2.0-dev libatk1.0-dev libpango1.0-dev libgtk-3-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev librsvg2-dev libfuse2 libayatana-appindicator3-dev
|
||||||
|
|
||||||
|
- name: Update app version
|
||||||
|
run: |
|
||||||
|
echo "Version: ${{ inputs.new_version }}"
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = false' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
|
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
|
||||||
|
mv /tmp/tauri.linux.conf.json ./src-tauri/tauri.linux.conf.json
|
||||||
|
fi
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json web-app/package.json
|
||||||
|
|
||||||
|
# Update tauri plugin versions
|
||||||
|
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||||
|
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||||
|
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-llamacpp/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
|
||||||
|
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/package.json---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
|
||||||
|
|
||||||
|
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
|
||||||
|
|
||||||
|
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
|
||||||
|
|
||||||
|
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
|
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||||
|
|
||||||
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
|
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
|
chmod +x .github/scripts/rename-tauri-app.sh
|
||||||
|
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
|
||||||
|
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
|
||||||
|
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||||
|
chmod +x .github/scripts/rename-workspace.sh
|
||||||
|
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Build app
|
||||||
|
run: |
|
||||||
|
make build
|
||||||
|
|
||||||
|
env:
|
||||||
|
RELEASE_CHANNEL: '${{ inputs.channel }}'
|
||||||
|
AUTO_UPDATER_DISABLED: ${{ inputs.disable_updater && 'true' || 'false' }}
|
||||||
|
|
||||||
|
- name: Upload Artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: jan-linux-amd64-${{ inputs.new_version }}-deb
|
||||||
|
path: ./src-tauri/target/release/bundle/deb/*.deb
|
||||||
|
|
||||||
|
- name: Upload Artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: jan-linux-amd64-${{ inputs.new_version }}-AppImage
|
||||||
|
path: ./src-tauri/target/release/bundle/appimage/*.AppImage
|
||||||
185
.github/workflows/template-tauri-build-linux-x64-flatpak.yml
vendored
Normal file
185
.github/workflows/template-tauri-build-linux-x64-flatpak.yml
vendored
Normal file
@ -0,0 +1,185 @@
|
|||||||
|
name: tauri-build-linux-x64-flatpak
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
ref:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'refs/heads/main'
|
||||||
|
public_provider:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: none
|
||||||
|
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
|
||||||
|
new_version:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
cortex_api_port:
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
upload_url:
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
channel:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'nightly'
|
||||||
|
description: 'The channel to use for this job'
|
||||||
|
disable_updater:
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
description: 'If true, builds both .deb and .appimage but disables auto-updater'
|
||||||
|
secrets:
|
||||||
|
DELTA_AWS_S3_BUCKET_NAME:
|
||||||
|
required: false
|
||||||
|
DELTA_AWS_ACCESS_KEY_ID:
|
||||||
|
required: false
|
||||||
|
DELTA_AWS_SECRET_ACCESS_KEY:
|
||||||
|
required: false
|
||||||
|
TAURI_SIGNING_PRIVATE_KEY:
|
||||||
|
required: false
|
||||||
|
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
|
||||||
|
required: false
|
||||||
|
jobs:
|
||||||
|
build-linux-x64:
|
||||||
|
runs-on: ubuntu-22.04
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Getting the repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: ${{ inputs.ref }}
|
||||||
|
|
||||||
|
- name: Free Disk Space Before Build
|
||||||
|
run: |
|
||||||
|
echo "Disk space before cleanup:"
|
||||||
|
df -h
|
||||||
|
sudo rm -rf /usr/local/.ghcup
|
||||||
|
sudo rm -rf /opt/hostedtoolcache/CodeQL
|
||||||
|
sudo rm -rf /usr/local/lib/android/sdk/ndk
|
||||||
|
sudo rm -rf /usr/share/dotnet
|
||||||
|
sudo rm -rf /opt/ghc
|
||||||
|
sudo rm -rf /usr/local/share/boost
|
||||||
|
sudo apt-get clean
|
||||||
|
echo "Disk space after cleanup:"
|
||||||
|
df -h
|
||||||
|
|
||||||
|
- name: Installing node
|
||||||
|
uses: actions/setup-node@v1
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
|
||||||
|
- name: Install jq
|
||||||
|
uses: dcarbone/install-jq-action@v2.0.1
|
||||||
|
|
||||||
|
- name: Install ctoml
|
||||||
|
run: |
|
||||||
|
cargo install ctoml
|
||||||
|
|
||||||
|
- name: Install Tauri dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y libglib2.0-dev libatk1.0-dev libpango1.0-dev libgtk-3-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev librsvg2-dev libfuse2
|
||||||
|
|
||||||
|
- name: Update app version base public_provider
|
||||||
|
run: |
|
||||||
|
echo "Version: ${{ inputs.new_version }}"
|
||||||
|
# Update tauri.conf.json
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
|
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
|
||||||
|
mv /tmp/tauri.linux.conf.json ./src-tauri/tauri.linux.conf.json
|
||||||
|
fi
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json web-app/package.json
|
||||||
|
|
||||||
|
# Update tauri plugin versions
|
||||||
|
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||||
|
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||||
|
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-llamacpp/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
|
||||||
|
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/package.json---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
|
||||||
|
|
||||||
|
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
|
||||||
|
|
||||||
|
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
|
||||||
|
|
||||||
|
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
|
# Temporarily enable devtool on prod build
|
||||||
|
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||||
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
|
# Change app name for beta and nightly builds
|
||||||
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
|
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
|
|
||||||
|
chmod +x .github/scripts/rename-tauri-app.sh
|
||||||
|
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
|
||||||
|
|
||||||
|
cat ./src-tauri/tauri.conf.json
|
||||||
|
|
||||||
|
# Update Cargo.toml
|
||||||
|
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
|
||||||
|
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||||
|
echo "------------------"
|
||||||
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
|
chmod +x .github/scripts/rename-workspace.sh
|
||||||
|
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
|
||||||
|
cat ./package.json
|
||||||
|
fi
|
||||||
|
- name: Build app
|
||||||
|
run: |
|
||||||
|
make build
|
||||||
|
|
||||||
|
APP_IMAGE=./src-tauri/target/release/bundle/appimage/$(ls ./src-tauri/target/release/bundle/appimage/ | grep AppImage | head -1)
|
||||||
|
yarn tauri signer sign \
|
||||||
|
--private-key "$TAURI_SIGNING_PRIVATE_KEY" \
|
||||||
|
--password "$TAURI_SIGNING_PRIVATE_KEY_PASSWORD" \
|
||||||
|
"$APP_IMAGE"
|
||||||
|
|
||||||
|
env:
|
||||||
|
RELEASE_CHANNEL: '${{ inputs.channel }}'
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
|
||||||
|
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
|
||||||
|
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
|
||||||
|
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
|
||||||
|
AUTO_UPDATER_DISABLED: ${{ inputs.disable_updater && 'true' || 'false' }}
|
||||||
|
# Publish app
|
||||||
|
|
||||||
|
## Artifacts, for dev and test
|
||||||
|
- name: Upload Artifact
|
||||||
|
if: inputs.public_provider != 'github'
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: jan-linux-amd64-flatpak-${{ inputs.new_version }}-deb
|
||||||
|
path: ./src-tauri/target/release/bundle/deb/*.deb
|
||||||
|
|
||||||
|
- name: Upload Artifact
|
||||||
|
if: inputs.public_provider != 'github'
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: jan-linux-amd64-flatpak-${{ inputs.new_version }}-AppImage
|
||||||
|
path: ./src-tauri/target/release/bundle/appimage/*.AppImage
|
||||||
264
.github/workflows/template-tauri-build-linux-x64.yml
vendored
Normal file
264
.github/workflows/template-tauri-build-linux-x64.yml
vendored
Normal file
@ -0,0 +1,264 @@
|
|||||||
|
name: tauri-build-linux-x64
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
ref:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'refs/heads/main'
|
||||||
|
public_provider:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: none
|
||||||
|
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
|
||||||
|
new_version:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
cortex_api_port:
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
upload_url:
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
channel:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'nightly'
|
||||||
|
description: 'The channel to use for this job'
|
||||||
|
disable_updater:
|
||||||
|
required: false
|
||||||
|
type: boolean
|
||||||
|
default: false
|
||||||
|
description: 'If true, builds both .deb and .appimage but disables auto-updater'
|
||||||
|
secrets:
|
||||||
|
DELTA_AWS_S3_BUCKET_NAME:
|
||||||
|
required: false
|
||||||
|
DELTA_AWS_ACCESS_KEY_ID:
|
||||||
|
required: false
|
||||||
|
DELTA_AWS_SECRET_ACCESS_KEY:
|
||||||
|
required: false
|
||||||
|
TAURI_SIGNING_PRIVATE_KEY:
|
||||||
|
required: false
|
||||||
|
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
|
||||||
|
required: false
|
||||||
|
outputs:
|
||||||
|
DEB_SIG:
|
||||||
|
value: ${{ jobs.build-linux-x64.outputs.DEB_SIG }}
|
||||||
|
APPIMAGE_SIG:
|
||||||
|
value: ${{ jobs.build-linux-x64.outputs.APPIMAGE_SIG }}
|
||||||
|
APPIMAGE_FILE_NAME:
|
||||||
|
value: ${{ jobs.build-linux-x64.outputs.APPIMAGE_FILE_NAME }}
|
||||||
|
jobs:
|
||||||
|
build-linux-x64:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
DEB_SIG: ${{ steps.packageinfo.outputs.DEB_SIG }}
|
||||||
|
APPIMAGE_SIG: ${{ steps.packageinfo.outputs.APPIMAGE_SIG }}
|
||||||
|
APPIMAGE_FILE_NAME: ${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Getting the repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: ${{ inputs.ref }}
|
||||||
|
|
||||||
|
- name: Free Disk Space Before Build
|
||||||
|
run: |
|
||||||
|
echo "Disk space before cleanup:"
|
||||||
|
df -h
|
||||||
|
sudo rm -rf /usr/local/.ghcup
|
||||||
|
sudo rm -rf /opt/hostedtoolcache/CodeQL
|
||||||
|
sudo rm -rf /usr/local/lib/android/sdk/ndk
|
||||||
|
sudo rm -rf /usr/share/dotnet
|
||||||
|
sudo rm -rf /opt/ghc
|
||||||
|
sudo rm -rf /usr/local/share/boost
|
||||||
|
sudo apt-get clean
|
||||||
|
echo "Disk space after cleanup:"
|
||||||
|
df -h
|
||||||
|
|
||||||
|
- name: Replace Icons for Beta Build
|
||||||
|
if: inputs.channel != 'stable'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
|
||||||
|
|
||||||
|
- name: Installing node
|
||||||
|
uses: actions/setup-node@v1
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
|
||||||
|
- name: Install jq
|
||||||
|
uses: dcarbone/install-jq-action@v2.0.1
|
||||||
|
|
||||||
|
- name: Install ctoml
|
||||||
|
run: |
|
||||||
|
cargo install ctoml
|
||||||
|
|
||||||
|
- name: Install Tauri dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y libglib2.0-dev libatk1.0-dev libpango1.0-dev libgtk-3-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev librsvg2-dev libfuse2 libayatana-appindicator3-dev
|
||||||
|
|
||||||
|
- name: Update app version base public_provider
|
||||||
|
run: |
|
||||||
|
echo "Version: ${{ inputs.new_version }}"
|
||||||
|
# Update tauri.conf.json
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
|
jq '.bundle.linux.deb.files = {"usr/bin/bun": "resources/bin/bun"}' ./src-tauri/tauri.linux.conf.json > /tmp/tauri.linux.conf.json
|
||||||
|
mv /tmp/tauri.linux.conf.json ./src-tauri/tauri.linux.conf.json
|
||||||
|
fi
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json web-app/package.json
|
||||||
|
|
||||||
|
# Update tauri plugin versions
|
||||||
|
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||||
|
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||||
|
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-llamacpp/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
|
||||||
|
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/package.json---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
|
||||||
|
|
||||||
|
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
|
||||||
|
|
||||||
|
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
|
||||||
|
|
||||||
|
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
|
# Temporarily enable devtool on prod build
|
||||||
|
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||||
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
|
# Change app name for beta and nightly builds
|
||||||
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
|
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
|
|
||||||
|
chmod +x .github/scripts/rename-tauri-app.sh
|
||||||
|
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
|
||||||
|
|
||||||
|
cat ./src-tauri/tauri.conf.json
|
||||||
|
|
||||||
|
# Update Cargo.toml
|
||||||
|
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
|
||||||
|
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||||
|
echo "------------------"
|
||||||
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
|
chmod +x .github/scripts/rename-workspace.sh
|
||||||
|
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
|
||||||
|
cat ./package.json
|
||||||
|
fi
|
||||||
|
- name: Build app
|
||||||
|
run: |
|
||||||
|
make build
|
||||||
|
|
||||||
|
APP_IMAGE=./src-tauri/target/release/bundle/appimage/$(ls ./src-tauri/target/release/bundle/appimage/ | grep AppImage | head -1)
|
||||||
|
yarn tauri signer sign \
|
||||||
|
--private-key "$TAURI_SIGNING_PRIVATE_KEY" \
|
||||||
|
--password "$TAURI_SIGNING_PRIVATE_KEY_PASSWORD" \
|
||||||
|
"$APP_IMAGE"
|
||||||
|
|
||||||
|
env:
|
||||||
|
RELEASE_CHANNEL: '${{ inputs.channel }}'
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
|
||||||
|
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
|
||||||
|
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
|
||||||
|
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
|
||||||
|
AUTO_UPDATER_DISABLED: ${{ inputs.disable_updater && 'true' || 'false' }}
|
||||||
|
# Publish app
|
||||||
|
|
||||||
|
## Artifacts, for dev and test
|
||||||
|
- name: Upload Artifact
|
||||||
|
if: inputs.public_provider != 'github'
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: jan-linux-amd64-${{ inputs.new_version }}-deb
|
||||||
|
path: ./src-tauri/target/release/bundle/deb/*.deb
|
||||||
|
|
||||||
|
- name: Upload Artifact
|
||||||
|
if: inputs.public_provider != 'github'
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: jan-linux-amd64-${{ inputs.new_version }}-AppImage
|
||||||
|
path: ./src-tauri/target/release/bundle/appimage/*.AppImage
|
||||||
|
|
||||||
|
## Set output filename for linux
|
||||||
|
- name: Set output filename for linux
|
||||||
|
id: packageinfo
|
||||||
|
run: |
|
||||||
|
cd ./src-tauri/target/release/bundle
|
||||||
|
|
||||||
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
|
DEB_FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb
|
||||||
|
APPIMAGE_FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage
|
||||||
|
DEB_SIG=$(cat deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig)
|
||||||
|
APPIMAGE_SIG=$(cat appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig)
|
||||||
|
else
|
||||||
|
DEB_FILE_NAME=Jan_${{ inputs.new_version }}_amd64.deb
|
||||||
|
APPIMAGE_FILE_NAME=Jan_${{ inputs.new_version }}_amd64.AppImage
|
||||||
|
DEB_SIG=$(cat deb/Jan_${{ inputs.new_version }}_amd64.deb.sig)
|
||||||
|
APPIMAGE_SIG=$(cat appimage/Jan_${{ inputs.new_version }}_amd64.AppImage.sig)
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "DEB_SIG=$DEB_SIG" >> $GITHUB_OUTPUT
|
||||||
|
echo "APPIMAGE_SIG=$APPIMAGE_SIG" >> $GITHUB_OUTPUT
|
||||||
|
echo "DEB_FILE_NAME=$DEB_FILE_NAME" >> $GITHUB_OUTPUT
|
||||||
|
echo "APPIMAGE_FILE_NAME=$APPIMAGE_FILE_NAME" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
## Upload to s3 for nightly and beta
|
||||||
|
- name: upload to aws s3 if public provider is aws
|
||||||
|
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
|
||||||
|
run: |
|
||||||
|
cd ./src-tauri/target/release/bundle
|
||||||
|
|
||||||
|
# Upload for tauri updater
|
||||||
|
aws s3 cp ./appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage
|
||||||
|
aws s3 cp ./deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb
|
||||||
|
aws s3 cp ./appimage/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.AppImage.sig
|
||||||
|
aws s3 cp ./deb/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_amd64.deb.sig
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
||||||
|
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
|
||||||
|
AWS_EC2_METADATA_DISABLED: 'true'
|
||||||
|
|
||||||
|
- name: Upload release assert if public provider is github
|
||||||
|
if: inputs.public_provider == 'github'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
|
with:
|
||||||
|
upload_url: ${{ inputs.upload_url }}
|
||||||
|
asset_path: ./src-tauri/target/release/bundle/appimage/${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
|
||||||
|
asset_name: ${{ steps.packageinfo.outputs.APPIMAGE_FILE_NAME }}
|
||||||
|
asset_content_type: application/octet-stream
|
||||||
|
|
||||||
|
- name: Upload release assert if public provider is github
|
||||||
|
if: inputs.public_provider == 'github'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
|
with:
|
||||||
|
upload_url: ${{ inputs.upload_url }}
|
||||||
|
asset_path: ./src-tauri/target/release/bundle/deb/${{ steps.packageinfo.outputs.DEB_FILE_NAME }}
|
||||||
|
asset_name: ${{ steps.packageinfo.outputs.DEB_FILE_NAME }}
|
||||||
|
asset_content_type: application/octet-stream
|
||||||
103
.github/workflows/template-tauri-build-macos-external.yml
vendored
Normal file
103
.github/workflows/template-tauri-build-macos-external.yml
vendored
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
name: tauri-build-macos-external
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
ref:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'refs/heads/main'
|
||||||
|
new_version:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
channel:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'nightly'
|
||||||
|
description: 'The channel to use for this job'
|
||||||
|
jobs:
|
||||||
|
build-macos-external:
|
||||||
|
runs-on: macos-latest
|
||||||
|
steps:
|
||||||
|
- name: Getting the repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: ${{ inputs.ref }}
|
||||||
|
|
||||||
|
- name: Replace Icons for Beta Build
|
||||||
|
if: inputs.channel != 'stable'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
|
||||||
|
|
||||||
|
- name: Installing node
|
||||||
|
uses: actions/setup-node@v1
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
|
||||||
|
- name: Install jq
|
||||||
|
uses: dcarbone/install-jq-action@v2.0.1
|
||||||
|
|
||||||
|
- name: Install ctoml
|
||||||
|
run: |
|
||||||
|
cargo install ctoml
|
||||||
|
|
||||||
|
- name: Update app version
|
||||||
|
run: |
|
||||||
|
echo "Version: ${{ inputs.new_version }}"
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = false' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json web-app/package.json
|
||||||
|
|
||||||
|
# Update tauri plugin versions
|
||||||
|
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||||
|
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||||
|
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-llamacpp/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
|
||||||
|
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/package.json---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
|
||||||
|
|
||||||
|
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
|
||||||
|
|
||||||
|
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
|
||||||
|
|
||||||
|
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
|
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||||
|
|
||||||
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
|
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
|
chmod +x .github/scripts/rename-tauri-app.sh
|
||||||
|
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
|
||||||
|
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
|
||||||
|
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||||
|
chmod +x .github/scripts/rename-workspace.sh
|
||||||
|
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Build app
|
||||||
|
run: |
|
||||||
|
make build
|
||||||
|
env:
|
||||||
|
APP_PATH: '.'
|
||||||
|
|
||||||
|
- name: Upload Artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.dmg
|
||||||
|
path: |
|
||||||
|
./src-tauri/target/universal-apple-darwin/release/bundle/dmg/*.dmg
|
||||||
264
.github/workflows/template-tauri-build-macos.yml
vendored
Normal file
264
.github/workflows/template-tauri-build-macos.yml
vendored
Normal file
@ -0,0 +1,264 @@
|
|||||||
|
name: tauri-build-macos
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
ref:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'refs/heads/main'
|
||||||
|
public_provider:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: none
|
||||||
|
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
|
||||||
|
new_version:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
cortex_api_port:
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
upload_url:
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
channel:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'nightly'
|
||||||
|
description: 'The channel to use for this job'
|
||||||
|
secrets:
|
||||||
|
DELTA_AWS_S3_BUCKET_NAME:
|
||||||
|
required: false
|
||||||
|
DELTA_AWS_ACCESS_KEY_ID:
|
||||||
|
required: false
|
||||||
|
DELTA_AWS_SECRET_ACCESS_KEY:
|
||||||
|
required: false
|
||||||
|
CODE_SIGN_P12_BASE64:
|
||||||
|
required: false
|
||||||
|
CODE_SIGN_P12_PASSWORD:
|
||||||
|
required: false
|
||||||
|
APPLE_ID:
|
||||||
|
required: false
|
||||||
|
APPLE_APP_SPECIFIC_PASSWORD:
|
||||||
|
required: false
|
||||||
|
DEVELOPER_ID:
|
||||||
|
required: false
|
||||||
|
TAURI_SIGNING_PRIVATE_KEY:
|
||||||
|
required: false
|
||||||
|
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
|
||||||
|
required: false
|
||||||
|
outputs:
|
||||||
|
MAC_UNIVERSAL_SIG:
|
||||||
|
value: ${{ jobs.build-macos.outputs.MAC_UNIVERSAL_SIG }}
|
||||||
|
FILE_NAME:
|
||||||
|
value: ${{ jobs.build-macos.outputs.FILE_NAME }}
|
||||||
|
DMG_NAME:
|
||||||
|
value: ${{ jobs.build-macos.outputs.DMG_NAME }}
|
||||||
|
TAR_NAME:
|
||||||
|
value: ${{ jobs.build-macos.outputs.TAR_NAME }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-macos:
|
||||||
|
runs-on: macos-latest
|
||||||
|
outputs:
|
||||||
|
MAC_UNIVERSAL_SIG: ${{ steps.metadata.outputs.MAC_UNIVERSAL_SIG }}
|
||||||
|
FILE_NAME: ${{ steps.metadata.outputs.FILE_NAME }}
|
||||||
|
DMG_NAME: ${{ steps.metadata.outputs.DMG_NAME }}
|
||||||
|
TAR_NAME: ${{ steps.metadata.outputs.TAR_NAME }}
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Getting the repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: ${{ inputs.ref }}
|
||||||
|
- name: Replace Icons for Beta Build
|
||||||
|
if: inputs.channel != 'stable'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
|
||||||
|
|
||||||
|
- name: Installing node
|
||||||
|
uses: actions/setup-node@v1
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
|
||||||
|
- name: Install jq
|
||||||
|
uses: dcarbone/install-jq-action@v2.0.1
|
||||||
|
|
||||||
|
- name: Install ctoml
|
||||||
|
run: |
|
||||||
|
cargo install ctoml
|
||||||
|
|
||||||
|
- name: Update app version based on latest release tag with build number
|
||||||
|
run: |
|
||||||
|
echo "Version: ${{ inputs.new_version }}"
|
||||||
|
# Update tauri.conf.json
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json web-app/package.json
|
||||||
|
|
||||||
|
# Update tauri plugin versions
|
||||||
|
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||||
|
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||||
|
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-llamacpp/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
|
||||||
|
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/package.json---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
|
||||||
|
|
||||||
|
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
|
||||||
|
|
||||||
|
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
|
||||||
|
|
||||||
|
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
|
# Temporarily enable devtool on prod build
|
||||||
|
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||||
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
|
# Change app name for beta and nightly builds
|
||||||
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
|
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
|
|
||||||
|
chmod +x .github/scripts/rename-tauri-app.sh
|
||||||
|
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
|
||||||
|
|
||||||
|
cat ./src-tauri/tauri.conf.json
|
||||||
|
|
||||||
|
# Update Cargo.toml
|
||||||
|
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
|
||||||
|
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||||
|
echo "------------------"
|
||||||
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
|
chmod +x .github/scripts/rename-workspace.sh
|
||||||
|
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
|
||||||
|
cat ./package.json
|
||||||
|
fi
|
||||||
|
- name: Get key for notarize
|
||||||
|
run: base64 -d <<< "$NOTARIZE_P8_BASE64" > /tmp/notary-key.p8
|
||||||
|
shell: bash
|
||||||
|
env:
|
||||||
|
NOTARIZE_P8_BASE64: ${{ secrets.NOTARIZE_P8_BASE64 }}
|
||||||
|
|
||||||
|
- uses: apple-actions/import-codesign-certs@v2
|
||||||
|
continue-on-error: true
|
||||||
|
with:
|
||||||
|
p12-file-base64: ${{ secrets.CODE_SIGN_P12_BASE64 }}
|
||||||
|
p12-password: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Build app
|
||||||
|
run: |
|
||||||
|
make build
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
APP_PATH: '.'
|
||||||
|
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
|
||||||
|
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
|
||||||
|
APPLE_CERTIFICATE: ${{ secrets.CODE_SIGN_P12_BASE64 }}
|
||||||
|
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.CODE_SIGN_P12_PASSWORD }}
|
||||||
|
APPLE_API_ISSUER: ${{ secrets.NOTARY_ISSUER }}
|
||||||
|
APPLE_API_KEY: ${{ secrets.NOTARY_KEY_ID }}
|
||||||
|
APPLE_API_KEY_PATH: /tmp/notary-key.p8
|
||||||
|
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
|
||||||
|
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
|
||||||
|
|
||||||
|
# Publish app
|
||||||
|
|
||||||
|
## Artifacts, for dev and test
|
||||||
|
- name: Upload Artifact
|
||||||
|
if: inputs.public_provider != 'github'
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.dmg
|
||||||
|
path: |
|
||||||
|
./src-tauri/target/universal-apple-darwin/release/bundle/dmg/*.dmg
|
||||||
|
|
||||||
|
## Set output filename for mac
|
||||||
|
- name: Set output filename for mac
|
||||||
|
run: |
|
||||||
|
cd ./src-tauri/target/universal-apple-darwin/release/bundle/macos
|
||||||
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
|
zip -r jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip Jan-${{ inputs.channel }}.app
|
||||||
|
FILE_NAME=jan-${{ inputs.channel }}-mac-universal-${{ inputs.new_version }}.zip
|
||||||
|
DMG_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg
|
||||||
|
MAC_UNIVERSAL_SIG=$(cat Jan-${{ inputs.channel }}.app.tar.gz.sig)
|
||||||
|
TAR_NAME=Jan-${{ inputs.channel }}.app.tar.gz
|
||||||
|
else
|
||||||
|
zip -r jan-mac-universal-${{ inputs.new_version }}.zip Jan.app
|
||||||
|
FILE_NAME=jan-mac-universal-${{ inputs.new_version }}.zip
|
||||||
|
MAC_UNIVERSAL_SIG=$(cat Jan.app.tar.gz.sig)
|
||||||
|
DMG_NAME=Jan_${{ inputs.new_version }}_universal.dmg
|
||||||
|
TAR_NAME=Jan.app.tar.gz
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "::set-output name=MAC_UNIVERSAL_SIG::$MAC_UNIVERSAL_SIG"
|
||||||
|
echo "::set-output name=FILE_NAME::$FILE_NAME"
|
||||||
|
echo "::set-output name=DMG_NAME::$DMG_NAME"
|
||||||
|
echo "::set-output name=TAR_NAME::$TAR_NAME"
|
||||||
|
id: metadata
|
||||||
|
|
||||||
|
## Upload to s3 for nightly and beta
|
||||||
|
- name: upload to aws s3 if public provider is aws
|
||||||
|
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
|
||||||
|
run: |
|
||||||
|
cd ./src-tauri/target/universal-apple-darwin/release/bundle
|
||||||
|
|
||||||
|
# Upload for tauri updater
|
||||||
|
aws s3 cp ./dmg/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}_universal.dmg
|
||||||
|
aws s3 cp ./macos/Jan-${{ inputs.channel }}.app.tar.gz s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}.app.tar.gz
|
||||||
|
aws s3 cp ./macos/Jan-${{ inputs.channel }}.app.tar.gz.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/Jan-${{ inputs.channel }}_${{ inputs.new_version }}.app.tar.gz.sig
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
||||||
|
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
|
||||||
|
AWS_EC2_METADATA_DISABLED: 'true'
|
||||||
|
|
||||||
|
- name: Upload release assert if public provider is github
|
||||||
|
if: inputs.public_provider == 'github'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
|
with:
|
||||||
|
upload_url: ${{ inputs.upload_url }}
|
||||||
|
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/${{ steps.metadata.outputs.FILE_NAME }}
|
||||||
|
asset_name: ${{ steps.metadata.outputs.FILE_NAME }}
|
||||||
|
asset_content_type: application/gzip
|
||||||
|
|
||||||
|
- name: Upload release assert if public provider is github
|
||||||
|
if: inputs.public_provider == 'github'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
|
with:
|
||||||
|
upload_url: ${{ inputs.upload_url }}
|
||||||
|
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/dmg/${{ steps.metadata.outputs.DMG_NAME }}
|
||||||
|
asset_name: ${{ steps.metadata.outputs.DMG_NAME }}
|
||||||
|
asset_content_type: application/octet-stream
|
||||||
|
|
||||||
|
- name: Upload release assert if public provider is github
|
||||||
|
if: inputs.public_provider == 'github'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
|
with:
|
||||||
|
upload_url: ${{ inputs.upload_url }}
|
||||||
|
asset_path: ./src-tauri/target/universal-apple-darwin/release/bundle/macos/${{ steps.metadata.outputs.TAR_NAME }}
|
||||||
|
asset_name: ${{ steps.metadata.outputs.TAR_NAME }}
|
||||||
|
asset_content_type: application/gzip
|
||||||
156
.github/workflows/template-tauri-build-windows-x64-external.yml
vendored
Normal file
156
.github/workflows/template-tauri-build-windows-x64-external.yml
vendored
Normal file
@ -0,0 +1,156 @@
|
|||||||
|
name: tauri-build-windows-x64-external
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
ref:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'refs/heads/main'
|
||||||
|
new_version:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
channel:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'nightly'
|
||||||
|
description: 'The channel to use for this job'
|
||||||
|
jobs:
|
||||||
|
build-windows-x64-external:
|
||||||
|
runs-on: windows-latest
|
||||||
|
steps:
|
||||||
|
- name: Getting the repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: ${{ inputs.ref }}
|
||||||
|
|
||||||
|
- name: Replace Icons for Beta Build
|
||||||
|
if: inputs.channel != 'stable'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
|
||||||
|
|
||||||
|
- name: Installing node
|
||||||
|
uses: actions/setup-node@v1
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
|
||||||
|
- name: Install jq
|
||||||
|
uses: dcarbone/install-jq-action@v2.0.1
|
||||||
|
|
||||||
|
- name: Install ctoml
|
||||||
|
run: |
|
||||||
|
cargo install ctoml
|
||||||
|
|
||||||
|
- name: Update app version
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "Version: ${{ inputs.new_version }}"
|
||||||
|
# Update tauri.conf.json
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = false' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
|
jq '.bundle.windows.nsis.template = "tauri.bundle.windows.nsis.template"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
|
||||||
|
mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
|
||||||
|
jq '.bundle.windows.signCommand = "echo External build - skipping signature: %1"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
|
||||||
|
mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json web-app/package.json
|
||||||
|
|
||||||
|
# Update tauri plugin versions
|
||||||
|
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||||
|
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||||
|
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-llamacpp/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
|
||||||
|
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/package.json---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
|
||||||
|
|
||||||
|
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
|
||||||
|
|
||||||
|
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
|
||||||
|
|
||||||
|
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
|
generate_build_version() {
|
||||||
|
### Examble
|
||||||
|
### input 0.5.6 output will be 0.5.6 and 0.5.6.0
|
||||||
|
### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2
|
||||||
|
### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213
|
||||||
|
local new_version="$1"
|
||||||
|
local base_version
|
||||||
|
local t_value
|
||||||
|
# Check if it has a "-"
|
||||||
|
if [[ "$new_version" == *-* ]]; then
|
||||||
|
base_version="${new_version%%-*}" # part before -
|
||||||
|
suffix="${new_version#*-}" # part after -
|
||||||
|
# Check if it is rcX-beta
|
||||||
|
if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then
|
||||||
|
t_value="${BASH_REMATCH[1]}"
|
||||||
|
else
|
||||||
|
t_value="$suffix"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
base_version="$new_version"
|
||||||
|
t_value="0"
|
||||||
|
fi
|
||||||
|
# Export two values
|
||||||
|
new_base_version="$base_version"
|
||||||
|
new_build_version="${base_version}.${t_value}"
|
||||||
|
}
|
||||||
|
generate_build_version ${{ inputs.new_version }}
|
||||||
|
sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||||
|
sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||||
|
|
||||||
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
|
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
|
|
||||||
|
# Update product name
|
||||||
|
jq --arg name "Jan-${{ inputs.channel }}" '.productName = $name' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
|
|
||||||
|
chmod +x .github/scripts/rename-tauri-app.sh
|
||||||
|
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
|
||||||
|
|
||||||
|
echo "---------tauri.conf.json---------"
|
||||||
|
cat ./src-tauri/tauri.conf.json
|
||||||
|
|
||||||
|
# Update Cargo.toml
|
||||||
|
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
|
||||||
|
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||||
|
echo "------------------"
|
||||||
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
|
chmod +x .github/scripts/rename-workspace.sh
|
||||||
|
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
|
||||||
|
cat ./package.json
|
||||||
|
sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||||
|
sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||||
|
else
|
||||||
|
sed -i "s/jan_productname/Jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||||
|
sed -i "s/jan_mainbinaryname/jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||||
|
fi
|
||||||
|
echo "---------nsis.template---------"
|
||||||
|
cat ./src-tauri/tauri.bundle.windows.nsis.template
|
||||||
|
- name: Build app
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
make build
|
||||||
|
|
||||||
|
- name: Upload Artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: jan-windows-${{ inputs.new_version }}
|
||||||
|
path: |
|
||||||
|
./src-tauri/target/release/bundle/nsis/*.exe
|
||||||
291
.github/workflows/template-tauri-build-windows-x64.yml
vendored
Normal file
291
.github/workflows/template-tauri-build-windows-x64.yml
vendored
Normal file
@ -0,0 +1,291 @@
|
|||||||
|
name: tauri-build-windows-x64
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
inputs:
|
||||||
|
ref:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'refs/heads/main'
|
||||||
|
public_provider:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: none
|
||||||
|
description: 'none: build only, github: build and publish to github, aws s3: build and publish to aws s3'
|
||||||
|
new_version:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
cortex_api_port:
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
upload_url:
|
||||||
|
required: false
|
||||||
|
type: string
|
||||||
|
default: ''
|
||||||
|
channel:
|
||||||
|
required: true
|
||||||
|
type: string
|
||||||
|
default: 'nightly'
|
||||||
|
description: 'The channel to use for this job'
|
||||||
|
secrets:
|
||||||
|
DELTA_AWS_S3_BUCKET_NAME:
|
||||||
|
required: false
|
||||||
|
DELTA_AWS_ACCESS_KEY_ID:
|
||||||
|
required: false
|
||||||
|
DELTA_AWS_SECRET_ACCESS_KEY:
|
||||||
|
required: false
|
||||||
|
AZURE_KEY_VAULT_URI:
|
||||||
|
required: false
|
||||||
|
AZURE_CLIENT_ID:
|
||||||
|
required: false
|
||||||
|
AZURE_TENANT_ID:
|
||||||
|
required: false
|
||||||
|
AZURE_CLIENT_SECRET:
|
||||||
|
required: false
|
||||||
|
AZURE_CERT_NAME:
|
||||||
|
required: false
|
||||||
|
TAURI_SIGNING_PRIVATE_KEY:
|
||||||
|
required: false
|
||||||
|
TAURI_SIGNING_PRIVATE_KEY_PASSWORD:
|
||||||
|
required: false
|
||||||
|
outputs:
|
||||||
|
WIN_SIG:
|
||||||
|
value: ${{ jobs.build-windows-x64.outputs.WIN_SIG }}
|
||||||
|
FILE_NAME:
|
||||||
|
value: ${{ jobs.build-windows-x64.outputs.FILE_NAME }}
|
||||||
|
MSI_FILE_NAME:
|
||||||
|
value: ${{ jobs.build-windows-x64.outputs.MSI_FILE_NAME }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-windows-x64:
|
||||||
|
runs-on: windows-latest
|
||||||
|
outputs:
|
||||||
|
WIN_SIG: ${{ steps.metadata.outputs.WIN_SIG }}
|
||||||
|
FILE_NAME: ${{ steps.metadata.outputs.FILE_NAME }}
|
||||||
|
MSI_FILE_NAME: ${{ steps.metadata.outputs.MSI_FILE_NAME }}
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
steps:
|
||||||
|
- name: Getting the repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
ref: ${{ inputs.ref }}
|
||||||
|
|
||||||
|
- name: Replace Icons for Beta Build
|
||||||
|
if: inputs.channel != 'stable'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cp .github/scripts/icon-${{ inputs.channel }}.png src-tauri/icons/icon.png
|
||||||
|
|
||||||
|
- name: Installing node
|
||||||
|
uses: actions/setup-node@v1
|
||||||
|
with:
|
||||||
|
node-version: 20
|
||||||
|
|
||||||
|
- name: Install jq
|
||||||
|
uses: dcarbone/install-jq-action@v2.0.1
|
||||||
|
|
||||||
|
- name: Install ctoml
|
||||||
|
run: |
|
||||||
|
cargo install ctoml
|
||||||
|
|
||||||
|
- name: Update app version base on tag
|
||||||
|
id: version_update
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "Version: ${{ inputs.new_version }}"
|
||||||
|
# Update tauri.conf.json
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version | .bundle.createUpdaterArtifacts = true' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
|
jq '.bundle.windows.nsis.template = "tauri.bundle.windows.nsis.template"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
|
||||||
|
mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' web-app/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json web-app/package.json
|
||||||
|
|
||||||
|
# Add sign commands to tauri.windows.conf.json
|
||||||
|
jq '.bundle.windows.signCommand = "powershell -ExecutionPolicy Bypass -File ./sign.ps1 %1"' ./src-tauri/tauri.windows.conf.json > /tmp/tauri.windows.conf.json
|
||||||
|
mv /tmp/tauri.windows.conf.json ./src-tauri/tauri.windows.conf.json
|
||||||
|
|
||||||
|
# Update tauri plugin versions
|
||||||
|
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-hardware/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||||
|
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-hardware/package.json---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-hardware/package.json
|
||||||
|
|
||||||
|
jq --arg version "${{ inputs.new_version }}" '.version = $version' ./src-tauri/plugins/tauri-plugin-llamacpp/package.json > /tmp/package.json
|
||||||
|
mv /tmp/package.json ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
|
||||||
|
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/package.json---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-llamacpp/package.json
|
||||||
|
|
||||||
|
ctoml ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
|
||||||
|
|
||||||
|
ctoml ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
|
||||||
|
|
||||||
|
ctoml ./src-tauri/Cargo.toml package.version "${{ inputs.new_version }}"
|
||||||
|
echo "---------./src-tauri/Cargo.toml---------"
|
||||||
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
|
generate_build_version() {
|
||||||
|
### Example
|
||||||
|
### input 0.5.6 output will be 0.5.6 and 0.5.6.0
|
||||||
|
### input 0.5.6-rc2-beta output will be 0.5.6 and 0.5.6.2
|
||||||
|
### input 0.5.6-1213 output will be 0.5.6 and and 0.5.6.1213
|
||||||
|
local new_version="$1"
|
||||||
|
local base_version
|
||||||
|
local t_value
|
||||||
|
# Check if it has a "-"
|
||||||
|
if [[ "$new_version" == *-* ]]; then
|
||||||
|
base_version="${new_version%%-*}" # part before -
|
||||||
|
suffix="${new_version#*-}" # part after -
|
||||||
|
# Check if it is rcX-beta
|
||||||
|
if [[ "$suffix" =~ ^rc([0-9]+)-beta$ ]]; then
|
||||||
|
t_value="${BASH_REMATCH[1]}"
|
||||||
|
else
|
||||||
|
t_value="$suffix"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
base_version="$new_version"
|
||||||
|
t_value="0"
|
||||||
|
fi
|
||||||
|
# Export two values
|
||||||
|
new_base_version="$base_version"
|
||||||
|
new_build_version="${base_version}.${t_value}"
|
||||||
|
}
|
||||||
|
generate_build_version ${{ inputs.new_version }}
|
||||||
|
sed -i "s/jan_version/$new_base_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||||
|
sed -i "s/jan_build/$new_build_version/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||||
|
|
||||||
|
echo "---------tauri.windows.conf.json---------"
|
||||||
|
cat ./src-tauri/tauri.windows.conf.json
|
||||||
|
|
||||||
|
# Temporarily enable devtool on prod build
|
||||||
|
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||||
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
|
# Change app name for beta and nightly builds
|
||||||
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
|
# Update updater endpoint
|
||||||
|
jq '.plugins.updater.endpoints = ["https://delta.jan.ai/${{ inputs.channel }}/latest.json"]' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
|
|
||||||
|
# Update product name
|
||||||
|
jq --arg name "Jan-${{ inputs.channel }}" '.productName = $name' ./src-tauri/tauri.conf.json > /tmp/tauri.conf.json
|
||||||
|
mv /tmp/tauri.conf.json ./src-tauri/tauri.conf.json
|
||||||
|
|
||||||
|
chmod +x .github/scripts/rename-tauri-app.sh
|
||||||
|
.github/scripts/rename-tauri-app.sh ./src-tauri/tauri.conf.json ${{ inputs.channel }}
|
||||||
|
|
||||||
|
echo "---------tauri.conf.json---------"
|
||||||
|
cat ./src-tauri/tauri.conf.json
|
||||||
|
|
||||||
|
# Update Cargo.toml
|
||||||
|
ctoml ./src-tauri/Cargo.toml package.name "Jan-${{ inputs.channel }}"
|
||||||
|
ctoml ./src-tauri/Cargo.toml dependencies.tauri.features[] "devtools"
|
||||||
|
echo "------------------"
|
||||||
|
cat ./src-tauri/Cargo.toml
|
||||||
|
|
||||||
|
chmod +x .github/scripts/rename-workspace.sh
|
||||||
|
.github/scripts/rename-workspace.sh ./package.json ${{ inputs.channel }}
|
||||||
|
cat ./package.json
|
||||||
|
sed -i "s/jan_productname/Jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||||
|
sed -i "s/jan_mainbinaryname/jan-${{ inputs.channel }}/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||||
|
else
|
||||||
|
sed -i "s/jan_productname/Jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||||
|
sed -i "s/jan_mainbinaryname/jan/g" ./src-tauri/tauri.bundle.windows.nsis.template
|
||||||
|
fi
|
||||||
|
echo "---------nsis.template---------"
|
||||||
|
cat ./src-tauri/tauri.bundle.windows.nsis.template
|
||||||
|
|
||||||
|
- name: Install AzureSignTool
|
||||||
|
run: |
|
||||||
|
dotnet tool install --global --version 6.0.0 AzureSignTool
|
||||||
|
|
||||||
|
- name: Build app
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
make build
|
||||||
|
env:
|
||||||
|
AZURE_KEY_VAULT_URI: ${{ secrets.AZURE_KEY_VAULT_URI }}
|
||||||
|
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
|
||||||
|
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
|
||||||
|
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_CLIENT_SECRET }}
|
||||||
|
AZURE_CERT_NAME: ${{ secrets.AZURE_CERT_NAME }}
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
||||||
|
AWS_DEFAULT_REGION: auto
|
||||||
|
AWS_EC2_METADATA_DISABLED: 'true'
|
||||||
|
AWS_MAX_ATTEMPTS: '5'
|
||||||
|
POSTHOG_KEY: ${{ secrets.POSTHOG_KEY }}
|
||||||
|
POSTHOG_HOST: ${{ secrets.POSTHOG_HOST }}
|
||||||
|
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
|
||||||
|
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
|
||||||
|
|
||||||
|
- name: Upload Artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: jan-windows-exe-${{ inputs.new_version }}
|
||||||
|
path: |
|
||||||
|
./src-tauri/target/release/bundle/nsis/*.exe
|
||||||
|
- name: Upload Artifact
|
||||||
|
uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: jan-windows-msi-${{ inputs.new_version }}
|
||||||
|
path: |
|
||||||
|
./src-tauri/target/release/bundle/msi/*.msi
|
||||||
|
|
||||||
|
## Set output filename for windows
|
||||||
|
- name: Set output filename for windows
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
cd ./src-tauri/target/release/bundle/nsis
|
||||||
|
if [ "${{ inputs.channel }}" != "stable" ]; then
|
||||||
|
FILE_NAME=Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe
|
||||||
|
WIN_SIG=$(cat Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64-setup.exe.sig)
|
||||||
|
|
||||||
|
MSI_FILE="Jan-${{ inputs.channel }}_${{ inputs.new_version }}_x64_en-US.msi"
|
||||||
|
else
|
||||||
|
FILE_NAME=Jan_${{ inputs.new_version }}_x64-setup.exe
|
||||||
|
WIN_SIG=$(cat Jan_${{ inputs.new_version }}_x64-setup.exe.sig)
|
||||||
|
|
||||||
|
MSI_FILE="Jan_${{ inputs.new_version }}_x64_en-US.msi"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "::set-output name=WIN_SIG::$WIN_SIG"
|
||||||
|
echo "::set-output name=FILE_NAME::$FILE_NAME"
|
||||||
|
echo "::set-output name=MSI_FILE_NAME::$MSI_FILE"
|
||||||
|
id: metadata
|
||||||
|
|
||||||
|
## Upload to s3 for nightly and beta
|
||||||
|
- name: upload to aws s3 if public provider is aws
|
||||||
|
shell: bash
|
||||||
|
if: inputs.public_provider == 'aws-s3' || inputs.channel == 'beta'
|
||||||
|
run: |
|
||||||
|
cd ./src-tauri/target/release/bundle/nsis
|
||||||
|
|
||||||
|
# Upload for tauri updater
|
||||||
|
aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }} s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }}
|
||||||
|
aws s3 cp ./${{ steps.metadata.outputs.FILE_NAME }}.sig s3://${{ secrets.DELTA_AWS_S3_BUCKET_NAME }}/temp-${{ inputs.channel }}/${{ steps.metadata.outputs.FILE_NAME }}.sig
|
||||||
|
env:
|
||||||
|
AWS_ACCESS_KEY_ID: ${{ secrets.DELTA_AWS_ACCESS_KEY_ID }}
|
||||||
|
AWS_SECRET_ACCESS_KEY: ${{ secrets.DELTA_AWS_SECRET_ACCESS_KEY }}
|
||||||
|
AWS_DEFAULT_REGION: ${{ secrets.DELTA_AWS_REGION }}
|
||||||
|
AWS_EC2_METADATA_DISABLED: 'true'
|
||||||
|
|
||||||
|
- name: Upload release assert if public provider is github
|
||||||
|
if: inputs.public_provider == 'github'
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
uses: actions/upload-release-asset@v1.0.1
|
||||||
|
with:
|
||||||
|
upload_url: ${{ inputs.upload_url }}
|
||||||
|
asset_path: ./src-tauri/target/release/bundle/nsis/${{ steps.metadata.outputs.FILE_NAME }}
|
||||||
|
asset_name: ${{ steps.metadata.outputs.FILE_NAME }}
|
||||||
|
asset_content_type: application/octet-stream
|
||||||
82
.gitignore
vendored
82
.gitignore
vendored
@ -1,43 +1,65 @@
|
|||||||
.vscode
|
|
||||||
.idea
|
|
||||||
.env
|
.env
|
||||||
.idea
|
|
||||||
|
|
||||||
# Jan inference
|
|
||||||
error.log
|
error.log
|
||||||
node_modules
|
node_modules
|
||||||
*.tgz
|
*.tgz
|
||||||
!charts/server/charts/*.tgz
|
|
||||||
yarn.lock
|
|
||||||
dist
|
dist
|
||||||
build
|
build
|
||||||
.DS_Store
|
.DS_Store
|
||||||
electron/renderer
|
|
||||||
electron/models
|
|
||||||
electron/docs
|
|
||||||
electron/engines
|
|
||||||
electron/themes
|
|
||||||
electron/playwright-report
|
|
||||||
server/pre-install
|
|
||||||
package-lock.json
|
package-lock.json
|
||||||
|
coverage
|
||||||
*.log
|
*.log
|
||||||
core/lib/**
|
core/lib/**
|
||||||
|
.yarn
|
||||||
|
.yarnrc
|
||||||
|
*.tsbuildinfo
|
||||||
|
test_results.html
|
||||||
|
pre-install
|
||||||
|
|
||||||
# Nitro binary files
|
# docs
|
||||||
extensions/*-extension/bin/*/nitro
|
docs/yarn.lock
|
||||||
extensions/*-extension/bin/*/*.metal
|
src-tauri/resources/lib
|
||||||
extensions/*-extension/bin/*/*.exe
|
src-tauri/icons
|
||||||
extensions/*-extension/bin/*/*.dll
|
!src-tauri/icons/icon.png
|
||||||
extensions/*-extension/bin/*/*.exp
|
src-tauri/gen/apple
|
||||||
extensions/*-extension/bin/*/*.lib
|
src-tauri/gen/android
|
||||||
extensions/*-extension/bin/saved-*
|
src-tauri/resources/bin
|
||||||
extensions/*-extension/bin/*.tar.gz
|
|
||||||
extensions/*-extension/bin/vulkaninfoSDK.exe
|
|
||||||
extensions/*-extension/bin/vulkaninfo
|
|
||||||
|
|
||||||
|
# Helper tools
|
||||||
|
.opencode
|
||||||
|
OpenCode.md
|
||||||
|
Claude.md
|
||||||
|
archive/
|
||||||
|
.cache/
|
||||||
|
|
||||||
# Turborepo
|
# auto qa
|
||||||
.turbo
|
autoqa/trajectories
|
||||||
electron/test-data
|
autoqa/recordings
|
||||||
electron/test-results
|
autoqa/__pycache__
|
||||||
|
|
||||||
|
# Astro / Starlight specific
|
||||||
|
website/dist/
|
||||||
|
website/.astro/
|
||||||
|
website/src/content/config.ts.timestamp-*
|
||||||
|
|
||||||
|
# Nextra specific
|
||||||
|
docs/out/
|
||||||
|
docs/.next/
|
||||||
|
|
||||||
|
# General Node.js
|
||||||
|
**/node_modules
|
||||||
|
**/.env
|
||||||
|
**/.env.*
|
||||||
|
**/npm-debug.log*
|
||||||
|
**/yarn-debug.log*
|
||||||
|
**/yarn-error.log*
|
||||||
|
**/pnpm-debug.log*
|
||||||
|
|
||||||
|
## cargo
|
||||||
|
target
|
||||||
|
Cargo.lock
|
||||||
|
src-tauri/resources/
|
||||||
|
|
||||||
|
## test
|
||||||
|
test-data
|
||||||
|
llm-docs
|
||||||
|
.claude/agents
|
||||||
|
|||||||
@ -1 +1 @@
|
|||||||
npm run lint --fix
|
yarn lint --fix --quiet
|
||||||
3
.yarnrc.yml
Normal file
3
.yarnrc.yml
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
nmHoistingLimits: workspaces
|
||||||
|
nodeLinker: node-modules
|
||||||
|
checksumBehavior: update
|
||||||
248
CONTRIBUTING.md
248
CONTRIBUTING.md
@ -1,32 +1,252 @@
|
|||||||
# Contributing to jan
|
# Contributing to Jan
|
||||||
|
|
||||||
First off, thank you for considering contributing to jan. It's people like you that make jan such an amazing project.
|
First off, thank you for considering contributing to Jan. It's people like you that make Jan such an amazing project.
|
||||||
|
|
||||||
|
Jan is an AI assistant that can run 100% offline on your device. Think ChatGPT, but private, local, and under your complete control. If you're thinking about contributing, you're already awesome - let's make AI accessible to everyone, one commit at a time.
|
||||||
|
|
||||||
|
## Quick Links to Component Guides
|
||||||
|
|
||||||
|
- **[Web App](./web-app/CONTRIBUTING.md)** - React UI and logic
|
||||||
|
- **[Core SDK](./core/CONTRIBUTING.md)** - TypeScript SDK and extension system
|
||||||
|
- **[Extensions](./extensions/CONTRIBUTING.md)** - Supportive modules for the frontend
|
||||||
|
- **[Tauri Backend](./src-tauri/CONTRIBUTING.md)** - Rust native integration
|
||||||
|
- **[Tauri Plugins](./src-tauri/plugins/CONTRIBUTING.md)** - Hardware and system plugins
|
||||||
|
|
||||||
|
## How Jan Actually Works
|
||||||
|
|
||||||
|
Jan is a desktop app that runs local AI models. Here's how the components actually connect:
|
||||||
|
|
||||||
|
```
|
||||||
|
┌──────────────────────────────────────────────────────────┐
|
||||||
|
│ Web App (Frontend) │
|
||||||
|
│ (web-app/) │
|
||||||
|
│ • React UI │
|
||||||
|
│ • Chat Interface │
|
||||||
|
│ • Settings Pages │
|
||||||
|
│ • Model Hub │
|
||||||
|
└────────────┬─────────────────────────────┬───────────────┘
|
||||||
|
│ │
|
||||||
|
│ imports │ imports
|
||||||
|
▼ ▼
|
||||||
|
┌──────────────────────┐ ┌──────────────────────┐
|
||||||
|
│ Core SDK │ │ Extensions │
|
||||||
|
│ (core/) │ │ (extensions/) │
|
||||||
|
│ │ │ │
|
||||||
|
│ • TypeScript APIs │◄─────│ • Assistant Mgmt │
|
||||||
|
│ • Extension System │ uses │ • Conversations │
|
||||||
|
│ • Event Bus │ │ • Downloads │
|
||||||
|
│ • Type Definitions │ │ • LlamaCPP │
|
||||||
|
└──────────┬───────────┘ └───────────┬──────────┘
|
||||||
|
│ │
|
||||||
|
│ ┌──────────────────────┐ │
|
||||||
|
│ │ Web App │ │
|
||||||
|
│ └──────────┬───────────┘ │
|
||||||
|
│ │ │
|
||||||
|
└──────────────┼───────────────┘
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
Tauri IPC
|
||||||
|
(invoke commands)
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
┌───────────────────────────────────────────────────────────┐
|
||||||
|
│ Tauri Backend (Rust) │
|
||||||
|
│ (src-tauri/) │
|
||||||
|
│ │
|
||||||
|
│ • Window Management • File System Access │
|
||||||
|
│ • Process Control • System Integration │
|
||||||
|
│ • IPC Command Handler • Security & Permissions │
|
||||||
|
└───────────────────────────┬───────────────────────────────┘
|
||||||
|
│
|
||||||
|
│
|
||||||
|
▼
|
||||||
|
┌───────────────────────────────────────────────────────────┐
|
||||||
|
│ Tauri Plugins (Rust) │
|
||||||
|
│ (src-tauri/plugins/) │
|
||||||
|
│ │
|
||||||
|
│ ┌──────────────────┐ ┌──────────────────┐ │
|
||||||
|
│ │ Hardware Plugin │ │ LlamaCPP Plugin │ │
|
||||||
|
│ │ │ │ │ │
|
||||||
|
│ │ • CPU/GPU Info │ │ • Process Mgmt │ │
|
||||||
|
│ │ • Memory Stats │ │ • Model Loading │ │
|
||||||
|
│ │ • System Info │ │ • Inference │ │
|
||||||
|
│ └──────────────────┘ └──────────────────┘ │
|
||||||
|
└───────────────────────────────────────────────────────────┘
|
||||||
|
```
|
||||||
|
|
||||||
|
### The Communication Flow
|
||||||
|
|
||||||
|
1. **JavaScript Layer Relationships**:
|
||||||
|
- Web App imports Core SDK and Extensions as JavaScript modules
|
||||||
|
- Extensions use Core SDK for shared functionality
|
||||||
|
- All run in the browser/webview context
|
||||||
|
|
||||||
|
2. **All Three → Backend**: Through Tauri IPC
|
||||||
|
- **Web App** → Backend: `await invoke('app_command', data)`
|
||||||
|
- **Core SDK** → Backend: `await invoke('core_command', data)`
|
||||||
|
- **Extensions** → Backend: `await invoke('ext_command', data)`
|
||||||
|
- Each component can independently call backend commands
|
||||||
|
|
||||||
|
3. **Backend → Plugins**: Native Rust integration
|
||||||
|
- Backend loads plugins as Rust libraries
|
||||||
|
- Direct function calls, no IPC overhead
|
||||||
|
|
||||||
|
4. **Response Flow**:
|
||||||
|
- Plugin → Backend → IPC → Requester (Web App/Core/Extension) → UI updates
|
||||||
|
|
||||||
|
### Real-World Example: Loading a Model
|
||||||
|
|
||||||
|
Here's what actually happens when you click "Download Llama 3":
|
||||||
|
|
||||||
|
1. **Web App** (`web-app/`) - User clicks download button
|
||||||
|
2. **Extension** (`extensions/download-extension`) - Handles the download logic
|
||||||
|
3. **Tauri Backend** (`src-tauri/`) - Actually downloads the file to disk
|
||||||
|
4. **Extension** (`extensions/llamacpp-extension`) - Prepares model for loading
|
||||||
|
5. **Tauri Plugin** (`src-tauri/plugins/llamacpp`) - Starts llama.cpp process
|
||||||
|
6. **Hardware Plugin** (`src-tauri/plugins/hardware`) - Detects GPU, optimizes settings
|
||||||
|
7. **Model ready!** - User can start chatting
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
jan/
|
||||||
|
├── web-app/ # React frontend (what users see)
|
||||||
|
├── src-tauri/ # Rust backend (system integration)
|
||||||
|
│ ├── src/core/ # Core Tauri commands
|
||||||
|
│ └── plugins/ # Tauri plugins (hardware, llamacpp)
|
||||||
|
├── core/ # TypeScript SDK (API layer)
|
||||||
|
├── extensions/ # JavaScript extensions
|
||||||
|
│ ├── assistant-extension/
|
||||||
|
│ ├── conversational-extension/
|
||||||
|
│ ├── download-extension/
|
||||||
|
│ └── llamacpp-extension/
|
||||||
|
├── docs/ # Documentation website
|
||||||
|
├── website/ # Marketing website
|
||||||
|
├── autoqa/ # Automated testing
|
||||||
|
├── scripts/ # Build utilities
|
||||||
|
│
|
||||||
|
├── package.json # Root workspace configuration
|
||||||
|
├── Makefile # Build automation commands
|
||||||
|
├── LICENSE # Apache 2.0 license
|
||||||
|
└── README.md # Project overview
|
||||||
|
```
|
||||||
|
|
||||||
|
## Development Setup
|
||||||
|
|
||||||
|
### The Scenic Route (Build from Source)
|
||||||
|
|
||||||
|
**Prerequisites:**
|
||||||
|
- Node.js ≥ 20.0.0
|
||||||
|
- Yarn ≥ 1.22.0
|
||||||
|
- Rust (for Tauri)
|
||||||
|
- Make ≥ 3.81
|
||||||
|
|
||||||
|
**Option 1: The Easy Way (Make)**
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/janhq/jan
|
||||||
|
cd jan
|
||||||
|
make dev
|
||||||
|
```
|
||||||
|
|
||||||
## How Can I Contribute?
|
## How Can I Contribute?
|
||||||
|
|
||||||
### Reporting Bugs
|
### Reporting Bugs
|
||||||
|
|
||||||
- **Ensure the bug was not already reported** by searching on GitHub under [Issues](https://github.com/janhq/jan/issues).
|
- **Ensure the bug was not already reported** by searching on GitHub under [Issues](https://github.com/janhq/jan/issues)
|
||||||
- If you're unable to find an open issue addressing the problem, [open a new one](https://github.com/janhq/jan/issues/new).
|
- If you're unable to find an open issue addressing the problem, [open a new one](https://github.com/janhq/jan/issues/new)
|
||||||
|
- Include your system specs and error logs - it helps a ton
|
||||||
|
|
||||||
### Suggesting Enhancements
|
### Suggesting Enhancements
|
||||||
|
|
||||||
- Open a new issue with a clear title and description.
|
- Open a new issue with a clear title and description
|
||||||
|
- Explain why this enhancement would be useful
|
||||||
|
- Include mockups or examples if you can
|
||||||
|
|
||||||
### Your First Code Contribution
|
### Your First Code Contribution
|
||||||
|
|
||||||
- Fork the repo.
|
**Choose Your Adventure:**
|
||||||
- Create a new branch (`git checkout -b feature-name`).
|
- **Frontend UI and logic** → `web-app/`
|
||||||
- Commit your changes (`git commit -am 'Add some feature'`).
|
- **Shared API declarations** → `core/`
|
||||||
- Push to the branch (`git push origin feature-name`).
|
- **Backend system integration** → `src-tauri/`
|
||||||
- Open a new Pull Request.
|
- **Business logic features** → `extensions/`
|
||||||
|
- **Dedicated backend handler** → `src-tauri/plugins/`
|
||||||
|
|
||||||
## Styleguides
|
**The Process:**
|
||||||
|
1. Fork the repo
|
||||||
|
2. Create a new branch (`git checkout -b feature-name`)
|
||||||
|
3. Make your changes (and write tests!)
|
||||||
|
4. Commit your changes (`git commit -am 'Add some feature'`)
|
||||||
|
5. Push to the branch (`git push origin feature-name`)
|
||||||
|
6. Open a new Pull Request against `dev` branch
|
||||||
|
|
||||||
### Git Commit Messages
|
## Testing
|
||||||
|
|
||||||
- Use the present tense ("Add feature" not "Added feature").
|
```bash
|
||||||
|
yarn test # All tests
|
||||||
|
cd src-tauri && cargo test # Rust tests
|
||||||
|
cd autoqa && python main.py # End-to-end tests
|
||||||
|
```
|
||||||
|
|
||||||
|
## Code Standards
|
||||||
|
|
||||||
|
### TypeScript/JavaScript
|
||||||
|
- TypeScript required (we're not animals)
|
||||||
|
- ESLint + Prettier
|
||||||
|
- Functional React components
|
||||||
|
- Proper typing (no `any` - seriously!)
|
||||||
|
|
||||||
|
### Rust
|
||||||
|
- `cargo fmt` + `cargo clippy`
|
||||||
|
- `Result<T, E>` for error handling
|
||||||
|
- Document public APIs
|
||||||
|
|
||||||
|
## Git Conventions
|
||||||
|
|
||||||
|
### Branches
|
||||||
|
- `main` - stable releases
|
||||||
|
- `dev` - development (target this for PRs)
|
||||||
|
- `feature/*` - new features
|
||||||
|
- `fix/*` - bug fixes
|
||||||
|
|
||||||
|
### Commit Messages
|
||||||
|
- Use the present tense ("Add feature" not "Added feature")
|
||||||
|
- Be descriptive but concise
|
||||||
|
- Reference issues when applicable
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
```
|
||||||
|
feat: add support for Qwen models
|
||||||
|
fix: resolve memory leak in model loading
|
||||||
|
docs: update installation instructions
|
||||||
|
```
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
If things go sideways:
|
||||||
|
|
||||||
|
1. **Check our [troubleshooting docs](https://jan.ai/docs/troubleshooting)**
|
||||||
|
2. **Clear everything and start fresh:** `make clean` then `make dev`
|
||||||
|
3. **Copy your error logs and system specs**
|
||||||
|
4. **Ask for help in our [Discord](https://discord.gg/FTk2MvZwJH)** `#🆘|jan-help` channel
|
||||||
|
|
||||||
|
Common issues:
|
||||||
|
- **Build failures**: Check Node.js and Rust versions
|
||||||
|
- **Extension not loading**: Verify it's properly registered
|
||||||
|
- **Model not working**: Check hardware requirements and GPU drivers
|
||||||
|
|
||||||
|
## Getting Help
|
||||||
|
|
||||||
|
- [Documentation](https://jan.ai/docs) - The manual you should read
|
||||||
|
- [Discord Community](https://discord.gg/jan) - Where the community lives
|
||||||
|
- [GitHub Issues](https://github.com/janhq/jan/issues) - Report bugs here
|
||||||
|
- [GitHub Discussions](https://github.com/janhq/jan/discussions) - Ask questions
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
Apache 2.0 - Because sharing is caring. See [LICENSE](./LICENSE) for the legal stuff.
|
||||||
|
|
||||||
## Additional Notes
|
## Additional Notes
|
||||||
|
|
||||||
Thank you for contributing to jan!
|
We're building something pretty cool here - an AI assistant that respects your privacy and runs entirely on your machine. Every contribution, no matter how small, helps make AI more accessible to everyone.
|
||||||
|
|
||||||
|
Thanks for being part of the journey. Let's build the future of local AI together! 🚀
|
||||||
|
|||||||
50
Dockerfile
Normal file
50
Dockerfile
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
# Stage 1: Build stage with Node.js and Yarn v4
|
||||||
|
FROM node:20-alpine AS builder
|
||||||
|
|
||||||
|
ARG MENLO_PLATFORM_BASE_URL=https://api-dev.menlo.ai/v1
|
||||||
|
ENV MENLO_PLATFORM_BASE_URL=$MENLO_PLATFORM_BASE_URL
|
||||||
|
|
||||||
|
# Install build dependencies
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
make \
|
||||||
|
g++ \
|
||||||
|
python3 \
|
||||||
|
py3-pip \
|
||||||
|
git
|
||||||
|
|
||||||
|
# Enable corepack and install Yarn 4
|
||||||
|
RUN corepack enable && corepack prepare yarn@4.5.3 --activate
|
||||||
|
|
||||||
|
# Verify Yarn version
|
||||||
|
RUN yarn --version
|
||||||
|
|
||||||
|
# Set working directory
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy source code
|
||||||
|
COPY ./extensions-web ./extensions-web
|
||||||
|
COPY ./web-app ./web-app
|
||||||
|
COPY ./Makefile ./Makefile
|
||||||
|
COPY ./.* /
|
||||||
|
COPY ./package.json ./package.json
|
||||||
|
COPY ./yarn.lock ./yarn.lock
|
||||||
|
COPY ./pre-install ./pre-install
|
||||||
|
COPY ./core ./core
|
||||||
|
|
||||||
|
# Build web application
|
||||||
|
RUN yarn install && yarn build:core && make build-web-app
|
||||||
|
|
||||||
|
# Stage 2: Production stage with Nginx
|
||||||
|
FROM nginx:alpine
|
||||||
|
|
||||||
|
# Copy static files from build stage
|
||||||
|
COPY --from=builder /app/web-app/dist-web /usr/share/nginx/html
|
||||||
|
|
||||||
|
# Copy custom nginx config
|
||||||
|
COPY nginx.conf /etc/nginx/conf.d/default.conf
|
||||||
|
|
||||||
|
# Expose port 80
|
||||||
|
EXPOSE 80
|
||||||
|
|
||||||
|
# Start nginx
|
||||||
|
CMD ["nginx", "-g", "daemon off;"]
|
||||||
BIN
JanBanner.png
Normal file
BIN
JanBanner.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 3.7 MiB |
667
LICENSE
667
LICENSE
@ -1,660 +1,19 @@
|
|||||||
# GNU AFFERO GENERAL PUBLIC LICENSE
|
Jan
|
||||||
|
|
||||||
Version 3, 19 November 2007
|
Copyright 2025 Menlo Research
|
||||||
|
|
||||||
Copyright (C) 2007 Free Software Foundation, Inc.
|
This product includes software developed by Menlo Research (https://menlo.ai).
|
||||||
<https://fsf.org/>
|
|
||||||
|
|
||||||
Everyone is permitted to copy and distribute verbatim copies of this
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
license document, but changing it is not allowed.
|
You may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
## Preamble
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
The GNU Affero General Public License is a free, copyleft license for
|
Unless required by applicable law or agreed to in writing, software
|
||||||
software and other kinds of works, specifically designed to ensure
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
cooperation with the community in the case of network server software.
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
||||||
|
|
||||||
The licenses for most software and other practical works are designed
|
Attribution is requested in user-facing documentation and materials, where appropriate.
|
||||||
to take away your freedom to share and change the works. By contrast,
|
|
||||||
our General Public Licenses are intended to guarantee your freedom to
|
|
||||||
share and change all versions of a program--to make sure it remains
|
|
||||||
free software for all its users.
|
|
||||||
|
|
||||||
When we speak of free software, we are referring to freedom, not
|
|
||||||
price. Our General Public Licenses are designed to make sure that you
|
|
||||||
have the freedom to distribute copies of free software (and charge for
|
|
||||||
them if you wish), that you receive source code or can get it if you
|
|
||||||
want it, that you can change the software or use pieces of it in new
|
|
||||||
free programs, and that you know you can do these things.
|
|
||||||
|
|
||||||
Developers that use our General Public Licenses protect your rights
|
|
||||||
with two steps: (1) assert copyright on the software, and (2) offer
|
|
||||||
you this License which gives you legal permission to copy, distribute
|
|
||||||
and/or modify the software.
|
|
||||||
|
|
||||||
A secondary benefit of defending all users' freedom is that
|
|
||||||
improvements made in alternate versions of the program, if they
|
|
||||||
receive widespread use, become available for other developers to
|
|
||||||
incorporate. Many developers of free software are heartened and
|
|
||||||
encouraged by the resulting cooperation. However, in the case of
|
|
||||||
software used on network servers, this result may fail to come about.
|
|
||||||
The GNU General Public License permits making a modified version and
|
|
||||||
letting the public access it on a server without ever releasing its
|
|
||||||
source code to the public.
|
|
||||||
|
|
||||||
The GNU Affero General Public License is designed specifically to
|
|
||||||
ensure that, in such cases, the modified source code becomes available
|
|
||||||
to the community. It requires the operator of a network server to
|
|
||||||
provide the source code of the modified version running there to the
|
|
||||||
users of that server. Therefore, public use of a modified version, on
|
|
||||||
a publicly accessible server, gives the public access to the source
|
|
||||||
code of the modified version.
|
|
||||||
|
|
||||||
An older license, called the Affero General Public License and
|
|
||||||
published by Affero, was designed to accomplish similar goals. This is
|
|
||||||
a different license, not a version of the Affero GPL, but Affero has
|
|
||||||
released a new version of the Affero GPL which permits relicensing
|
|
||||||
under this license.
|
|
||||||
|
|
||||||
The precise terms and conditions for copying, distribution and
|
|
||||||
modification follow.
|
|
||||||
|
|
||||||
## TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
### 0. Definitions.
|
|
||||||
|
|
||||||
"This License" refers to version 3 of the GNU Affero General Public
|
|
||||||
License.
|
|
||||||
|
|
||||||
"Copyright" also means copyright-like laws that apply to other kinds
|
|
||||||
of works, such as semiconductor masks.
|
|
||||||
|
|
||||||
"The Program" refers to any copyrightable work licensed under this
|
|
||||||
License. Each licensee is addressed as "you". "Licensees" and
|
|
||||||
"recipients" may be individuals or organizations.
|
|
||||||
|
|
||||||
To "modify" a work means to copy from or adapt all or part of the work
|
|
||||||
in a fashion requiring copyright permission, other than the making of
|
|
||||||
an exact copy. The resulting work is called a "modified version" of
|
|
||||||
the earlier work or a work "based on" the earlier work.
|
|
||||||
|
|
||||||
A "covered work" means either the unmodified Program or a work based
|
|
||||||
on the Program.
|
|
||||||
|
|
||||||
To "propagate" a work means to do anything with it that, without
|
|
||||||
permission, would make you directly or secondarily liable for
|
|
||||||
infringement under applicable copyright law, except executing it on a
|
|
||||||
computer or modifying a private copy. Propagation includes copying,
|
|
||||||
distribution (with or without modification), making available to the
|
|
||||||
public, and in some countries other activities as well.
|
|
||||||
|
|
||||||
To "convey" a work means any kind of propagation that enables other
|
|
||||||
parties to make or receive copies. Mere interaction with a user
|
|
||||||
through a computer network, with no transfer of a copy, is not
|
|
||||||
conveying.
|
|
||||||
|
|
||||||
An interactive user interface displays "Appropriate Legal Notices" to
|
|
||||||
the extent that it includes a convenient and prominently visible
|
|
||||||
feature that (1) displays an appropriate copyright notice, and (2)
|
|
||||||
tells the user that there is no warranty for the work (except to the
|
|
||||||
extent that warranties are provided), that licensees may convey the
|
|
||||||
work under this License, and how to view a copy of this License. If
|
|
||||||
the interface presents a list of user commands or options, such as a
|
|
||||||
menu, a prominent item in the list meets this criterion.
|
|
||||||
|
|
||||||
### 1. Source Code.
|
|
||||||
|
|
||||||
The "source code" for a work means the preferred form of the work for
|
|
||||||
making modifications to it. "Object code" means any non-source form of
|
|
||||||
a work.
|
|
||||||
|
|
||||||
A "Standard Interface" means an interface that either is an official
|
|
||||||
standard defined by a recognized standards body, or, in the case of
|
|
||||||
interfaces specified for a particular programming language, one that
|
|
||||||
is widely used among developers working in that language.
|
|
||||||
|
|
||||||
The "System Libraries" of an executable work include anything, other
|
|
||||||
than the work as a whole, that (a) is included in the normal form of
|
|
||||||
packaging a Major Component, but which is not part of that Major
|
|
||||||
Component, and (b) serves only to enable use of the work with that
|
|
||||||
Major Component, or to implement a Standard Interface for which an
|
|
||||||
implementation is available to the public in source code form. A
|
|
||||||
"Major Component", in this context, means a major essential component
|
|
||||||
(kernel, window system, and so on) of the specific operating system
|
|
||||||
(if any) on which the executable work runs, or a compiler used to
|
|
||||||
produce the work, or an object code interpreter used to run it.
|
|
||||||
|
|
||||||
The "Corresponding Source" for a work in object code form means all
|
|
||||||
the source code needed to generate, install, and (for an executable
|
|
||||||
work) run the object code and to modify the work, including scripts to
|
|
||||||
control those activities. However, it does not include the work's
|
|
||||||
System Libraries, or general-purpose tools or generally available free
|
|
||||||
programs which are used unmodified in performing those activities but
|
|
||||||
which are not part of the work. For example, Corresponding Source
|
|
||||||
includes interface definition files associated with source files for
|
|
||||||
the work, and the source code for shared libraries and dynamically
|
|
||||||
linked subprograms that the work is specifically designed to require,
|
|
||||||
such as by intimate data communication or control flow between those
|
|
||||||
subprograms and other parts of the work.
|
|
||||||
|
|
||||||
The Corresponding Source need not include anything that users can
|
|
||||||
regenerate automatically from other parts of the Corresponding Source.
|
|
||||||
|
|
||||||
The Corresponding Source for a work in source code form is that same
|
|
||||||
work.
|
|
||||||
|
|
||||||
### 2. Basic Permissions.
|
|
||||||
|
|
||||||
All rights granted under this License are granted for the term of
|
|
||||||
copyright on the Program, and are irrevocable provided the stated
|
|
||||||
conditions are met. This License explicitly affirms your unlimited
|
|
||||||
permission to run the unmodified Program. The output from running a
|
|
||||||
covered work is covered by this License only if the output, given its
|
|
||||||
content, constitutes a covered work. This License acknowledges your
|
|
||||||
rights of fair use or other equivalent, as provided by copyright law.
|
|
||||||
|
|
||||||
You may make, run and propagate covered works that you do not convey,
|
|
||||||
without conditions so long as your license otherwise remains in force.
|
|
||||||
You may convey covered works to others for the sole purpose of having
|
|
||||||
them make modifications exclusively for you, or provide you with
|
|
||||||
facilities for running those works, provided that you comply with the
|
|
||||||
terms of this License in conveying all material for which you do not
|
|
||||||
control copyright. Those thus making or running the covered works for
|
|
||||||
you must do so exclusively on your behalf, under your direction and
|
|
||||||
control, on terms that prohibit them from making any copies of your
|
|
||||||
copyrighted material outside their relationship with you.
|
|
||||||
|
|
||||||
Conveying under any other circumstances is permitted solely under the
|
|
||||||
conditions stated below. Sublicensing is not allowed; section 10 makes
|
|
||||||
it unnecessary.
|
|
||||||
|
|
||||||
### 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
|
||||||
|
|
||||||
No covered work shall be deemed part of an effective technological
|
|
||||||
measure under any applicable law fulfilling obligations under article
|
|
||||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
|
||||||
similar laws prohibiting or restricting circumvention of such
|
|
||||||
measures.
|
|
||||||
|
|
||||||
When you convey a covered work, you waive any legal power to forbid
|
|
||||||
circumvention of technological measures to the extent such
|
|
||||||
circumvention is effected by exercising rights under this License with
|
|
||||||
respect to the covered work, and you disclaim any intention to limit
|
|
||||||
operation or modification of the work as a means of enforcing, against
|
|
||||||
the work's users, your or third parties' legal rights to forbid
|
|
||||||
circumvention of technological measures.
|
|
||||||
|
|
||||||
### 4. Conveying Verbatim Copies.
|
|
||||||
|
|
||||||
You may convey verbatim copies of the Program's source code as you
|
|
||||||
receive it, in any medium, provided that you conspicuously and
|
|
||||||
appropriately publish on each copy an appropriate copyright notice;
|
|
||||||
keep intact all notices stating that this License and any
|
|
||||||
non-permissive terms added in accord with section 7 apply to the code;
|
|
||||||
keep intact all notices of the absence of any warranty; and give all
|
|
||||||
recipients a copy of this License along with the Program.
|
|
||||||
|
|
||||||
You may charge any price or no price for each copy that you convey,
|
|
||||||
and you may offer support or warranty protection for a fee.
|
|
||||||
|
|
||||||
### 5. Conveying Modified Source Versions.
|
|
||||||
|
|
||||||
You may convey a work based on the Program, or the modifications to
|
|
||||||
produce it from the Program, in the form of source code under the
|
|
||||||
terms of section 4, provided that you also meet all of these
|
|
||||||
conditions:
|
|
||||||
|
|
||||||
- a) The work must carry prominent notices stating that you modified
|
|
||||||
it, and giving a relevant date.
|
|
||||||
- b) The work must carry prominent notices stating that it is
|
|
||||||
released under this License and any conditions added under
|
|
||||||
section 7. This requirement modifies the requirement in section 4
|
|
||||||
to "keep intact all notices".
|
|
||||||
- c) You must license the entire work, as a whole, under this
|
|
||||||
License to anyone who comes into possession of a copy. This
|
|
||||||
License will therefore apply, along with any applicable section 7
|
|
||||||
additional terms, to the whole of the work, and all its parts,
|
|
||||||
regardless of how they are packaged. This License gives no
|
|
||||||
permission to license the work in any other way, but it does not
|
|
||||||
invalidate such permission if you have separately received it.
|
|
||||||
- d) If the work has interactive user interfaces, each must display
|
|
||||||
Appropriate Legal Notices; however, if the Program has interactive
|
|
||||||
interfaces that do not display Appropriate Legal Notices, your
|
|
||||||
work need not make them do so.
|
|
||||||
|
|
||||||
A compilation of a covered work with other separate and independent
|
|
||||||
works, which are not by their nature extensions of the covered work,
|
|
||||||
and which are not combined with it such as to form a larger program,
|
|
||||||
in or on a volume of a storage or distribution medium, is called an
|
|
||||||
"aggregate" if the compilation and its resulting copyright are not
|
|
||||||
used to limit the access or legal rights of the compilation's users
|
|
||||||
beyond what the individual works permit. Inclusion of a covered work
|
|
||||||
in an aggregate does not cause this License to apply to the other
|
|
||||||
parts of the aggregate.
|
|
||||||
|
|
||||||
### 6. Conveying Non-Source Forms.
|
|
||||||
|
|
||||||
You may convey a covered work in object code form under the terms of
|
|
||||||
sections 4 and 5, provided that you also convey the machine-readable
|
|
||||||
Corresponding Source under the terms of this License, in one of these
|
|
||||||
ways:
|
|
||||||
|
|
||||||
- a) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by the
|
|
||||||
Corresponding Source fixed on a durable physical medium
|
|
||||||
customarily used for software interchange.
|
|
||||||
- b) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by a
|
|
||||||
written offer, valid for at least three years and valid for as
|
|
||||||
long as you offer spare parts or customer support for that product
|
|
||||||
model, to give anyone who possesses the object code either (1) a
|
|
||||||
copy of the Corresponding Source for all the software in the
|
|
||||||
product that is covered by this License, on a durable physical
|
|
||||||
medium customarily used for software interchange, for a price no
|
|
||||||
more than your reasonable cost of physically performing this
|
|
||||||
conveying of source, or (2) access to copy the Corresponding
|
|
||||||
Source from a network server at no charge.
|
|
||||||
- c) Convey individual copies of the object code with a copy of the
|
|
||||||
written offer to provide the Corresponding Source. This
|
|
||||||
alternative is allowed only occasionally and noncommercially, and
|
|
||||||
only if you received the object code with such an offer, in accord
|
|
||||||
with subsection 6b.
|
|
||||||
- d) Convey the object code by offering access from a designated
|
|
||||||
place (gratis or for a charge), and offer equivalent access to the
|
|
||||||
Corresponding Source in the same way through the same place at no
|
|
||||||
further charge. You need not require recipients to copy the
|
|
||||||
Corresponding Source along with the object code. If the place to
|
|
||||||
copy the object code is a network server, the Corresponding Source
|
|
||||||
may be on a different server (operated by you or a third party)
|
|
||||||
that supports equivalent copying facilities, provided you maintain
|
|
||||||
clear directions next to the object code saying where to find the
|
|
||||||
Corresponding Source. Regardless of what server hosts the
|
|
||||||
Corresponding Source, you remain obligated to ensure that it is
|
|
||||||
available for as long as needed to satisfy these requirements.
|
|
||||||
- e) Convey the object code using peer-to-peer transmission,
|
|
||||||
provided you inform other peers where the object code and
|
|
||||||
Corresponding Source of the work are being offered to the general
|
|
||||||
public at no charge under subsection 6d.
|
|
||||||
|
|
||||||
A separable portion of the object code, whose source code is excluded
|
|
||||||
from the Corresponding Source as a System Library, need not be
|
|
||||||
included in conveying the object code work.
|
|
||||||
|
|
||||||
A "User Product" is either (1) a "consumer product", which means any
|
|
||||||
tangible personal property which is normally used for personal,
|
|
||||||
family, or household purposes, or (2) anything designed or sold for
|
|
||||||
incorporation into a dwelling. In determining whether a product is a
|
|
||||||
consumer product, doubtful cases shall be resolved in favor of
|
|
||||||
coverage. For a particular product received by a particular user,
|
|
||||||
"normally used" refers to a typical or common use of that class of
|
|
||||||
product, regardless of the status of the particular user or of the way
|
|
||||||
in which the particular user actually uses, or expects or is expected
|
|
||||||
to use, the product. A product is a consumer product regardless of
|
|
||||||
whether the product has substantial commercial, industrial or
|
|
||||||
non-consumer uses, unless such uses represent the only significant
|
|
||||||
mode of use of the product.
|
|
||||||
|
|
||||||
"Installation Information" for a User Product means any methods,
|
|
||||||
procedures, authorization keys, or other information required to
|
|
||||||
install and execute modified versions of a covered work in that User
|
|
||||||
Product from a modified version of its Corresponding Source. The
|
|
||||||
information must suffice to ensure that the continued functioning of
|
|
||||||
the modified object code is in no case prevented or interfered with
|
|
||||||
solely because modification has been made.
|
|
||||||
|
|
||||||
If you convey an object code work under this section in, or with, or
|
|
||||||
specifically for use in, a User Product, and the conveying occurs as
|
|
||||||
part of a transaction in which the right of possession and use of the
|
|
||||||
User Product is transferred to the recipient in perpetuity or for a
|
|
||||||
fixed term (regardless of how the transaction is characterized), the
|
|
||||||
Corresponding Source conveyed under this section must be accompanied
|
|
||||||
by the Installation Information. But this requirement does not apply
|
|
||||||
if neither you nor any third party retains the ability to install
|
|
||||||
modified object code on the User Product (for example, the work has
|
|
||||||
been installed in ROM).
|
|
||||||
|
|
||||||
The requirement to provide Installation Information does not include a
|
|
||||||
requirement to continue to provide support service, warranty, or
|
|
||||||
updates for a work that has been modified or installed by the
|
|
||||||
recipient, or for the User Product in which it has been modified or
|
|
||||||
installed. Access to a network may be denied when the modification
|
|
||||||
itself materially and adversely affects the operation of the network
|
|
||||||
or violates the rules and protocols for communication across the
|
|
||||||
network.
|
|
||||||
|
|
||||||
Corresponding Source conveyed, and Installation Information provided,
|
|
||||||
in accord with this section must be in a format that is publicly
|
|
||||||
documented (and with an implementation available to the public in
|
|
||||||
source code form), and must require no special password or key for
|
|
||||||
unpacking, reading or copying.
|
|
||||||
|
|
||||||
### 7. Additional Terms.
|
|
||||||
|
|
||||||
"Additional permissions" are terms that supplement the terms of this
|
|
||||||
License by making exceptions from one or more of its conditions.
|
|
||||||
Additional permissions that are applicable to the entire Program shall
|
|
||||||
be treated as though they were included in this License, to the extent
|
|
||||||
that they are valid under applicable law. If additional permissions
|
|
||||||
apply only to part of the Program, that part may be used separately
|
|
||||||
under those permissions, but the entire Program remains governed by
|
|
||||||
this License without regard to the additional permissions.
|
|
||||||
|
|
||||||
When you convey a copy of a covered work, you may at your option
|
|
||||||
remove any additional permissions from that copy, or from any part of
|
|
||||||
it. (Additional permissions may be written to require their own
|
|
||||||
removal in certain cases when you modify the work.) You may place
|
|
||||||
additional permissions on material, added by you to a covered work,
|
|
||||||
for which you have or can give appropriate copyright permission.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, for material you
|
|
||||||
add to a covered work, you may (if authorized by the copyright holders
|
|
||||||
of that material) supplement the terms of this License with terms:
|
|
||||||
|
|
||||||
- a) Disclaiming warranty or limiting liability differently from the
|
|
||||||
terms of sections 15 and 16 of this License; or
|
|
||||||
- b) Requiring preservation of specified reasonable legal notices or
|
|
||||||
author attributions in that material or in the Appropriate Legal
|
|
||||||
Notices displayed by works containing it; or
|
|
||||||
- c) Prohibiting misrepresentation of the origin of that material,
|
|
||||||
or requiring that modified versions of such material be marked in
|
|
||||||
reasonable ways as different from the original version; or
|
|
||||||
- d) Limiting the use for publicity purposes of names of licensors
|
|
||||||
or authors of the material; or
|
|
||||||
- e) Declining to grant rights under trademark law for use of some
|
|
||||||
trade names, trademarks, or service marks; or
|
|
||||||
- f) Requiring indemnification of licensors and authors of that
|
|
||||||
material by anyone who conveys the material (or modified versions
|
|
||||||
of it) with contractual assumptions of liability to the recipient,
|
|
||||||
for any liability that these contractual assumptions directly
|
|
||||||
impose on those licensors and authors.
|
|
||||||
|
|
||||||
All other non-permissive additional terms are considered "further
|
|
||||||
restrictions" within the meaning of section 10. If the Program as you
|
|
||||||
received it, or any part of it, contains a notice stating that it is
|
|
||||||
governed by this License along with a term that is a further
|
|
||||||
restriction, you may remove that term. If a license document contains
|
|
||||||
a further restriction but permits relicensing or conveying under this
|
|
||||||
License, you may add to a covered work material governed by the terms
|
|
||||||
of that license document, provided that the further restriction does
|
|
||||||
not survive such relicensing or conveying.
|
|
||||||
|
|
||||||
If you add terms to a covered work in accord with this section, you
|
|
||||||
must place, in the relevant source files, a statement of the
|
|
||||||
additional terms that apply to those files, or a notice indicating
|
|
||||||
where to find the applicable terms.
|
|
||||||
|
|
||||||
Additional terms, permissive or non-permissive, may be stated in the
|
|
||||||
form of a separately written license, or stated as exceptions; the
|
|
||||||
above requirements apply either way.
|
|
||||||
|
|
||||||
### 8. Termination.
|
|
||||||
|
|
||||||
You may not propagate or modify a covered work except as expressly
|
|
||||||
provided under this License. Any attempt otherwise to propagate or
|
|
||||||
modify it is void, and will automatically terminate your rights under
|
|
||||||
this License (including any patent licenses granted under the third
|
|
||||||
paragraph of section 11).
|
|
||||||
|
|
||||||
However, if you cease all violation of this License, then your license
|
|
||||||
from a particular copyright holder is reinstated (a) provisionally,
|
|
||||||
unless and until the copyright holder explicitly and finally
|
|
||||||
terminates your license, and (b) permanently, if the copyright holder
|
|
||||||
fails to notify you of the violation by some reasonable means prior to
|
|
||||||
60 days after the cessation.
|
|
||||||
|
|
||||||
Moreover, your license from a particular copyright holder is
|
|
||||||
reinstated permanently if the copyright holder notifies you of the
|
|
||||||
violation by some reasonable means, this is the first time you have
|
|
||||||
received notice of violation of this License (for any work) from that
|
|
||||||
copyright holder, and you cure the violation prior to 30 days after
|
|
||||||
your receipt of the notice.
|
|
||||||
|
|
||||||
Termination of your rights under this section does not terminate the
|
|
||||||
licenses of parties who have received copies or rights from you under
|
|
||||||
this License. If your rights have been terminated and not permanently
|
|
||||||
reinstated, you do not qualify to receive new licenses for the same
|
|
||||||
material under section 10.
|
|
||||||
|
|
||||||
### 9. Acceptance Not Required for Having Copies.
|
|
||||||
|
|
||||||
You are not required to accept this License in order to receive or run
|
|
||||||
a copy of the Program. Ancillary propagation of a covered work
|
|
||||||
occurring solely as a consequence of using peer-to-peer transmission
|
|
||||||
to receive a copy likewise does not require acceptance. However,
|
|
||||||
nothing other than this License grants you permission to propagate or
|
|
||||||
modify any covered work. These actions infringe copyright if you do
|
|
||||||
not accept this License. Therefore, by modifying or propagating a
|
|
||||||
covered work, you indicate your acceptance of this License to do so.
|
|
||||||
|
|
||||||
### 10. Automatic Licensing of Downstream Recipients.
|
|
||||||
|
|
||||||
Each time you convey a covered work, the recipient automatically
|
|
||||||
receives a license from the original licensors, to run, modify and
|
|
||||||
propagate that work, subject to this License. You are not responsible
|
|
||||||
for enforcing compliance by third parties with this License.
|
|
||||||
|
|
||||||
An "entity transaction" is a transaction transferring control of an
|
|
||||||
organization, or substantially all assets of one, or subdividing an
|
|
||||||
organization, or merging organizations. If propagation of a covered
|
|
||||||
work results from an entity transaction, each party to that
|
|
||||||
transaction who receives a copy of the work also receives whatever
|
|
||||||
licenses to the work the party's predecessor in interest had or could
|
|
||||||
give under the previous paragraph, plus a right to possession of the
|
|
||||||
Corresponding Source of the work from the predecessor in interest, if
|
|
||||||
the predecessor has it or can get it with reasonable efforts.
|
|
||||||
|
|
||||||
You may not impose any further restrictions on the exercise of the
|
|
||||||
rights granted or affirmed under this License. For example, you may
|
|
||||||
not impose a license fee, royalty, or other charge for exercise of
|
|
||||||
rights granted under this License, and you may not initiate litigation
|
|
||||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
|
||||||
any patent claim is infringed by making, using, selling, offering for
|
|
||||||
sale, or importing the Program or any portion of it.
|
|
||||||
|
|
||||||
### 11. Patents.
|
|
||||||
|
|
||||||
A "contributor" is a copyright holder who authorizes use under this
|
|
||||||
License of the Program or a work on which the Program is based. The
|
|
||||||
work thus licensed is called the contributor's "contributor version".
|
|
||||||
|
|
||||||
A contributor's "essential patent claims" are all patent claims owned
|
|
||||||
or controlled by the contributor, whether already acquired or
|
|
||||||
hereafter acquired, that would be infringed by some manner, permitted
|
|
||||||
by this License, of making, using, or selling its contributor version,
|
|
||||||
but do not include claims that would be infringed only as a
|
|
||||||
consequence of further modification of the contributor version. For
|
|
||||||
purposes of this definition, "control" includes the right to grant
|
|
||||||
patent sublicenses in a manner consistent with the requirements of
|
|
||||||
this License.
|
|
||||||
|
|
||||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
|
||||||
patent license under the contributor's essential patent claims, to
|
|
||||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
|
||||||
propagate the contents of its contributor version.
|
|
||||||
|
|
||||||
In the following three paragraphs, a "patent license" is any express
|
|
||||||
agreement or commitment, however denominated, not to enforce a patent
|
|
||||||
(such as an express permission to practice a patent or covenant not to
|
|
||||||
sue for patent infringement). To "grant" such a patent license to a
|
|
||||||
party means to make such an agreement or commitment not to enforce a
|
|
||||||
patent against the party.
|
|
||||||
|
|
||||||
If you convey a covered work, knowingly relying on a patent license,
|
|
||||||
and the Corresponding Source of the work is not available for anyone
|
|
||||||
to copy, free of charge and under the terms of this License, through a
|
|
||||||
publicly available network server or other readily accessible means,
|
|
||||||
then you must either (1) cause the Corresponding Source to be so
|
|
||||||
available, or (2) arrange to deprive yourself of the benefit of the
|
|
||||||
patent license for this particular work, or (3) arrange, in a manner
|
|
||||||
consistent with the requirements of this License, to extend the patent
|
|
||||||
license to downstream recipients. "Knowingly relying" means you have
|
|
||||||
actual knowledge that, but for the patent license, your conveying the
|
|
||||||
covered work in a country, or your recipient's use of the covered work
|
|
||||||
in a country, would infringe one or more identifiable patents in that
|
|
||||||
country that you have reason to believe are valid.
|
|
||||||
|
|
||||||
If, pursuant to or in connection with a single transaction or
|
|
||||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
|
||||||
covered work, and grant a patent license to some of the parties
|
|
||||||
receiving the covered work authorizing them to use, propagate, modify
|
|
||||||
or convey a specific copy of the covered work, then the patent license
|
|
||||||
you grant is automatically extended to all recipients of the covered
|
|
||||||
work and works based on it.
|
|
||||||
|
|
||||||
A patent license is "discriminatory" if it does not include within the
|
|
||||||
scope of its coverage, prohibits the exercise of, or is conditioned on
|
|
||||||
the non-exercise of one or more of the rights that are specifically
|
|
||||||
granted under this License. You may not convey a covered work if you
|
|
||||||
are a party to an arrangement with a third party that is in the
|
|
||||||
business of distributing software, under which you make payment to the
|
|
||||||
third party based on the extent of your activity of conveying the
|
|
||||||
work, and under which the third party grants, to any of the parties
|
|
||||||
who would receive the covered work from you, a discriminatory patent
|
|
||||||
license (a) in connection with copies of the covered work conveyed by
|
|
||||||
you (or copies made from those copies), or (b) primarily for and in
|
|
||||||
connection with specific products or compilations that contain the
|
|
||||||
covered work, unless you entered into that arrangement, or that patent
|
|
||||||
license was granted, prior to 28 March 2007.
|
|
||||||
|
|
||||||
Nothing in this License shall be construed as excluding or limiting
|
|
||||||
any implied license or other defenses to infringement that may
|
|
||||||
otherwise be available to you under applicable patent law.
|
|
||||||
|
|
||||||
### 12. No Surrender of Others' Freedom.
|
|
||||||
|
|
||||||
If conditions are imposed on you (whether by court order, agreement or
|
|
||||||
otherwise) that contradict the conditions of this License, they do not
|
|
||||||
excuse you from the conditions of this License. If you cannot convey a
|
|
||||||
covered work so as to satisfy simultaneously your obligations under
|
|
||||||
this License and any other pertinent obligations, then as a
|
|
||||||
consequence you may not convey it at all. For example, if you agree to
|
|
||||||
terms that obligate you to collect a royalty for further conveying
|
|
||||||
from those to whom you convey the Program, the only way you could
|
|
||||||
satisfy both those terms and this License would be to refrain entirely
|
|
||||||
from conveying the Program.
|
|
||||||
|
|
||||||
### 13. Remote Network Interaction; Use with the GNU General Public License.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, if you modify the
|
|
||||||
Program, your modified version must prominently offer all users
|
|
||||||
interacting with it remotely through a computer network (if your
|
|
||||||
version supports such interaction) an opportunity to receive the
|
|
||||||
Corresponding Source of your version by providing access to the
|
|
||||||
Corresponding Source from a network server at no charge, through some
|
|
||||||
standard or customary means of facilitating copying of software. This
|
|
||||||
Corresponding Source shall include the Corresponding Source for any
|
|
||||||
work covered by version 3 of the GNU General Public License that is
|
|
||||||
incorporated pursuant to the following paragraph.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, you have
|
|
||||||
permission to link or combine any covered work with a work licensed
|
|
||||||
under version 3 of the GNU General Public License into a single
|
|
||||||
combined work, and to convey the resulting work. The terms of this
|
|
||||||
License will continue to apply to the part which is the covered work,
|
|
||||||
but the work with which it is combined will remain governed by version
|
|
||||||
3 of the GNU General Public License.
|
|
||||||
|
|
||||||
### 14. Revised Versions of this License.
|
|
||||||
|
|
||||||
The Free Software Foundation may publish revised and/or new versions
|
|
||||||
of the GNU Affero General Public License from time to time. Such new
|
|
||||||
versions will be similar in spirit to the present version, but may
|
|
||||||
differ in detail to address new problems or concerns.
|
|
||||||
|
|
||||||
Each version is given a distinguishing version number. If the Program
|
|
||||||
specifies that a certain numbered version of the GNU Affero General
|
|
||||||
Public License "or any later version" applies to it, you have the
|
|
||||||
option of following the terms and conditions either of that numbered
|
|
||||||
version or of any later version published by the Free Software
|
|
||||||
Foundation. If the Program does not specify a version number of the
|
|
||||||
GNU Affero General Public License, you may choose any version ever
|
|
||||||
published by the Free Software Foundation.
|
|
||||||
|
|
||||||
If the Program specifies that a proxy can decide which future versions
|
|
||||||
of the GNU Affero General Public License can be used, that proxy's
|
|
||||||
public statement of acceptance of a version permanently authorizes you
|
|
||||||
to choose that version for the Program.
|
|
||||||
|
|
||||||
Later license versions may give you additional or different
|
|
||||||
permissions. However, no additional obligations are imposed on any
|
|
||||||
author or copyright holder as a result of your choosing to follow a
|
|
||||||
later version.
|
|
||||||
|
|
||||||
### 15. Disclaimer of Warranty.
|
|
||||||
|
|
||||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
|
||||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
|
||||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT
|
|
||||||
WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT
|
|
||||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND
|
|
||||||
PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE
|
|
||||||
DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR
|
|
||||||
CORRECTION.
|
|
||||||
|
|
||||||
### 16. Limitation of Liability.
|
|
||||||
|
|
||||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
|
||||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR
|
|
||||||
CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
|
|
||||||
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES
|
|
||||||
ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT
|
|
||||||
NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR
|
|
||||||
LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM
|
|
||||||
TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER
|
|
||||||
PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
|
|
||||||
|
|
||||||
### 17. Interpretation of Sections 15 and 16.
|
|
||||||
|
|
||||||
If the disclaimer of warranty and limitation of liability provided
|
|
||||||
above cannot be given local legal effect according to their terms,
|
|
||||||
reviewing courts shall apply local law that most closely approximates
|
|
||||||
an absolute waiver of all civil liability in connection with the
|
|
||||||
Program, unless a warranty or assumption of liability accompanies a
|
|
||||||
copy of the Program in return for a fee.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
## How to Apply These Terms to Your New Programs
|
|
||||||
|
|
||||||
If you develop a new program, and you want it to be of the greatest
|
|
||||||
possible use to the public, the best way to achieve this is to make it
|
|
||||||
free software which everyone can redistribute and change under these
|
|
||||||
terms.
|
|
||||||
|
|
||||||
To do so, attach the following notices to the program. It is safest to
|
|
||||||
attach them to the start of each source file to most effectively state
|
|
||||||
the exclusion of warranty; and each file should have at least the
|
|
||||||
"copyright" line and a pointer to where the full notice is found.
|
|
||||||
|
|
||||||
<one line to give the program's name and a brief idea of what it does.>
|
|
||||||
Copyright (C) <year> <name of author>
|
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
|
||||||
it under the terms of the GNU Affero General Public License as
|
|
||||||
published by the Free Software Foundation, either version 3 of the
|
|
||||||
License, or (at your option) any later version.
|
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Affero General Public License
|
|
||||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
Also add information on how to contact you by electronic and paper
|
|
||||||
mail.
|
|
||||||
|
|
||||||
If your software can interact with users remotely through a computer
|
|
||||||
network, you should also make sure that it provides a way for users to
|
|
||||||
get its source. For example, if your program is a web application, its
|
|
||||||
interface could display a "Source" link that leads users to an archive
|
|
||||||
of the code. There are many ways you could offer source, and different
|
|
||||||
solutions will be better for different programs; see section 13 for
|
|
||||||
the specific requirements.
|
|
||||||
|
|
||||||
You should also get your employer (if you work as a programmer) or
|
|
||||||
school, if any, to sign a "copyright disclaimer" for the program, if
|
|
||||||
necessary. For more information on this, and how to apply and follow
|
|
||||||
the GNU AGPL, see <https://www.gnu.org/licenses/>.
|
|
||||||
220
Makefile
220
Makefile
@ -10,119 +10,135 @@ REPORT_PORTAL_DESCRIPTION ?= "Jan App report"
|
|||||||
all:
|
all:
|
||||||
@echo "Specify a target to run"
|
@echo "Specify a target to run"
|
||||||
|
|
||||||
# Builds the UI kit
|
# Config yarn version
|
||||||
build-joi:
|
|
||||||
ifeq ($(OS),Windows_NT)
|
config-yarn:
|
||||||
cd joi && yarn config set network-timeout 300000 && yarn install && yarn build
|
corepack enable
|
||||||
else
|
corepack prepare yarn@4.5.3 --activate
|
||||||
cd joi && yarn install && yarn build
|
yarn --version
|
||||||
endif
|
yarn config set -H enableImmutableInstalls false
|
||||||
|
|
||||||
# Installs yarn dependencies and builds core and extensions
|
# Installs yarn dependencies and builds core and extensions
|
||||||
install-and-build: build-joi
|
install-and-build: config-yarn
|
||||||
ifeq ($(OS),Windows_NT)
|
ifeq ($(OS),Windows_NT)
|
||||||
yarn config set network-timeout 300000
|
echo "skip"
|
||||||
|
else ifeq ($(shell uname -s),Linux)
|
||||||
|
chmod +x src-tauri/build-utils/*
|
||||||
endif
|
endif
|
||||||
yarn global add turbo@1.13.2
|
|
||||||
yarn build:core
|
|
||||||
yarn build:server
|
|
||||||
yarn install
|
yarn install
|
||||||
yarn build:extensions
|
yarn build:tauri:plugin:api
|
||||||
|
yarn build:core
|
||||||
|
yarn build:extensions && yarn build:extensions-web
|
||||||
|
|
||||||
check-file-counts: install-and-build
|
# Install required Rust targets for macOS universal builds
|
||||||
ifeq ($(OS),Windows_NT)
|
install-rust-targets:
|
||||||
powershell -Command "if ((Get-ChildItem -Path pre-install -Filter *.tgz | Measure-Object | Select-Object -ExpandProperty Count) -ne (Get-ChildItem -Path extensions -Directory | Where-Object Name -like *-extension* | Measure-Object | Select-Object -ExpandProperty Count)) { Write-Host 'Number of .tgz files in pre-install does not match the number of subdirectories in extensions with package.json'; exit 1 } else { Write-Host 'Extension build successful' }"
|
ifeq ($(shell uname -s),Darwin)
|
||||||
|
@echo "Detected macOS, installing universal build targets..."
|
||||||
|
rustup target add x86_64-apple-darwin
|
||||||
|
rustup target add aarch64-apple-darwin
|
||||||
|
@echo "Rust targets installed successfully!"
|
||||||
else
|
else
|
||||||
@tgz_count=$$(find pre-install -type f -name "*.tgz" | wc -l); dir_count=$$(find extensions -mindepth 1 -maxdepth 1 -type d -exec test -e '{}/package.json' \; -print | wc -l); if [ $$tgz_count -ne $$dir_count ]; then echo "Number of .tgz files in pre-install ($$tgz_count) does not match the number of subdirectories in extension ($$dir_count)"; exit 1; else echo "Extension build successful"; fi
|
@echo "Not macOS; skipping Rust target installation."
|
||||||
endif
|
endif
|
||||||
|
|
||||||
dev: check-file-counts
|
# Install required Rust targets for Android builds
|
||||||
|
install-android-rust-targets:
|
||||||
|
@echo "Checking and installing Android Rust targets..."
|
||||||
|
@rustup target list --installed | grep -q "aarch64-linux-android" || rustup target add aarch64-linux-android
|
||||||
|
@rustup target list --installed | grep -q "armv7-linux-androideabi" || rustup target add armv7-linux-androideabi
|
||||||
|
@rustup target list --installed | grep -q "i686-linux-android" || rustup target add i686-linux-android
|
||||||
|
@rustup target list --installed | grep -q "x86_64-linux-android" || rustup target add x86_64-linux-android
|
||||||
|
@echo "Android Rust targets ready!"
|
||||||
|
|
||||||
|
# Install required Rust targets for iOS builds
|
||||||
|
install-ios-rust-targets:
|
||||||
|
@echo "Checking and installing iOS Rust targets..."
|
||||||
|
@rustup target list --installed | grep -q "aarch64-apple-ios" || rustup target add aarch64-apple-ios
|
||||||
|
@rustup target list --installed | grep -q "aarch64-apple-ios-sim" || rustup target add aarch64-apple-ios-sim
|
||||||
|
@rustup target list --installed | grep -q "x86_64-apple-ios" || rustup target add x86_64-apple-ios
|
||||||
|
@echo "iOS Rust targets ready!"
|
||||||
|
|
||||||
|
dev: install-and-build
|
||||||
|
yarn download:bin
|
||||||
yarn dev
|
yarn dev
|
||||||
|
|
||||||
# Linting
|
# Web application targets
|
||||||
lint: check-file-counts
|
install-web-app: config-yarn
|
||||||
yarn lint
|
yarn install
|
||||||
|
|
||||||
update-playwright-config:
|
dev-web-app: install-web-app
|
||||||
ifeq ($(OS),Windows_NT)
|
yarn build:core
|
||||||
echo -e "const RPconfig = {\n\
|
yarn dev:web-app
|
||||||
apiKey: '$(REPORT_PORTAL_API_KEY)',\n\
|
|
||||||
endpoint: '$(REPORT_PORTAL_URL)',\n\
|
|
||||||
project: '$(REPORT_PORTAL_PROJECT_NAME)',\n\
|
|
||||||
launch: '$(REPORT_PORTAL_LAUNCH_NAME)',\n\
|
|
||||||
attributes: [\n\
|
|
||||||
{\n\
|
|
||||||
key: 'key',\n\
|
|
||||||
value: 'value',\n\
|
|
||||||
},\n\
|
|
||||||
{\n\
|
|
||||||
value: 'value',\n\
|
|
||||||
},\n\
|
|
||||||
],\n\
|
|
||||||
description: '$(REPORT_PORTAL_DESCRIPTION)',\n\
|
|
||||||
}\n$$(cat electron/playwright.config.ts)" > electron/playwright.config.ts;
|
|
||||||
sed -i "s/^ reporter: .*/ reporter: [['@reportportal\/agent-js-playwright', RPconfig]],/" electron/playwright.config.ts
|
|
||||||
|
|
||||||
else ifeq ($(shell uname -s),Linux)
|
build-web-app: install-web-app
|
||||||
echo "const RPconfig = {\n\
|
yarn build:core
|
||||||
apiKey: '$(REPORT_PORTAL_API_KEY)',\n\
|
yarn build:web-app
|
||||||
endpoint: '$(REPORT_PORTAL_URL)',\n\
|
|
||||||
project: '$(REPORT_PORTAL_PROJECT_NAME)',\n\
|
serve-web-app:
|
||||||
launch: '$(REPORT_PORTAL_LAUNCH_NAME)',\n\
|
yarn serve:web-app
|
||||||
attributes: [\n\
|
|
||||||
{\n\
|
build-serve-web-app: build-web-app
|
||||||
key: 'key',\n\
|
yarn serve:web-app
|
||||||
value: 'value',\n\
|
|
||||||
},\n\
|
# Mobile
|
||||||
{\n\
|
dev-android: install-and-build install-android-rust-targets
|
||||||
value: 'value',\n\
|
@echo "Setting up Android development environment..."
|
||||||
},\n\
|
@if [ ! -d "src-tauri/gen/android" ]; then \
|
||||||
],\n\
|
echo "Android app not initialized. Initializing..."; \
|
||||||
description: '$(REPORT_PORTAL_DESCRIPTION)',\n\
|
yarn tauri android init; \
|
||||||
}\n$$(cat electron/playwright.config.ts)" > electron/playwright.config.ts;
|
fi
|
||||||
sed -i "s/^ reporter: .*/ reporter: [['@reportportal\/agent-js-playwright', RPconfig]],/" electron/playwright.config.ts
|
@echo "Sourcing Android environment setup..."
|
||||||
|
@bash autoqa/scripts/setup-android-env.sh echo "Android environment ready"
|
||||||
|
@echo "Starting Android development server..."
|
||||||
|
yarn dev:android
|
||||||
|
|
||||||
|
dev-ios: install-and-build install-ios-rust-targets
|
||||||
|
@echo "Setting up iOS development environment..."
|
||||||
|
ifeq ($(shell uname -s),Darwin)
|
||||||
|
@if [ ! -d "src-tauri/gen/ios" ]; then \
|
||||||
|
echo "iOS app not initialized. Initializing..."; \
|
||||||
|
yarn tauri ios init; \
|
||||||
|
fi
|
||||||
|
@echo "Checking iOS development requirements..."
|
||||||
|
@xcrun --version > /dev/null 2>&1 || (echo "❌ Xcode command line tools not found. Install with: xcode-select --install" && exit 1)
|
||||||
|
@xcrun simctl list devices available | grep -q "iPhone\|iPad" || (echo "❌ No iOS simulators found. Install simulators through Xcode." && exit 1)
|
||||||
|
@echo "Starting iOS development server..."
|
||||||
|
yarn dev:ios
|
||||||
else
|
else
|
||||||
echo "const RPconfig = {\n\
|
@echo "❌ iOS development is only supported on macOS"
|
||||||
apiKey: '$(REPORT_PORTAL_API_KEY)',\n\
|
@exit 1
|
||||||
endpoint: '$(REPORT_PORTAL_URL)',\n\
|
|
||||||
project: '$(REPORT_PORTAL_PROJECT_NAME)',\n\
|
|
||||||
launch: '$(REPORT_PORTAL_LAUNCH_NAME)',\n\
|
|
||||||
attributes: [\n\
|
|
||||||
{\n\
|
|
||||||
key: 'key',\n\
|
|
||||||
value: 'value',\n\
|
|
||||||
},\n\
|
|
||||||
{\n\
|
|
||||||
value: 'value',\n\
|
|
||||||
},\n\
|
|
||||||
],\n\
|
|
||||||
description: '$(REPORT_PORTAL_DESCRIPTION)',\n\
|
|
||||||
}\n$$(cat electron/playwright.config.ts)" > electron/playwright.config.ts;
|
|
||||||
sed -i '' "s|^ reporter: .*| reporter: [['@reportportal\/agent-js-playwright', RPconfig]],|" electron/playwright.config.ts
|
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
# Linting
|
||||||
|
lint: install-and-build
|
||||||
|
yarn lint
|
||||||
|
|
||||||
# Testing
|
# Testing
|
||||||
test: lint
|
test: lint
|
||||||
yarn build:test
|
yarn download:bin
|
||||||
yarn test:unit
|
ifeq ($(OS),Windows_NT)
|
||||||
|
endif
|
||||||
yarn test
|
yarn test
|
||||||
|
yarn copy:assets:tauri
|
||||||
# Builds and publishes the app
|
yarn build:icon
|
||||||
build-and-publish: check-file-counts
|
cargo test --manifest-path src-tauri/Cargo.toml --no-default-features --features test-tauri -- --test-threads=1
|
||||||
yarn build:publish
|
cargo test --manifest-path src-tauri/plugins/tauri-plugin-hardware/Cargo.toml
|
||||||
|
cargo test --manifest-path src-tauri/plugins/tauri-plugin-llamacpp/Cargo.toml
|
||||||
|
cargo test --manifest-path src-tauri/utils/Cargo.toml
|
||||||
|
|
||||||
# Build
|
# Build
|
||||||
build: check-file-counts
|
build: install-and-build install-rust-targets
|
||||||
yarn build
|
yarn build
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
ifeq ($(OS),Windows_NT)
|
ifeq ($(OS),Windows_NT)
|
||||||
-powershell -Command "Get-ChildItem -Path . -Include node_modules, .next, dist, build, out, .turbo -Recurse -Directory | Remove-Item -Recurse -Force"
|
-powershell -Command "Get-ChildItem -Path . -Include node_modules, .next, dist, build, out, .turbo, .yarn -Recurse -Directory | Remove-Item -Recurse -Force"
|
||||||
-powershell -Command "Get-ChildItem -Path . -Include package-lock.json -Recurse -File | Remove-Item -Recurse -Force"
|
-powershell -Command "Get-ChildItem -Path . -Include package-lock.json, tsconfig.tsbuildinfo -Recurse -File | Remove-Item -Recurse -Force"
|
||||||
-powershell -Command "Get-ChildItem -Path . -Include yarn.lock -Recurse -File | Remove-Item -Recurse -Force"
|
|
||||||
-powershell -Command "Remove-Item -Recurse -Force ./pre-install/*.tgz"
|
-powershell -Command "Remove-Item -Recurse -Force ./pre-install/*.tgz"
|
||||||
-powershell -Command "Remove-Item -Recurse -Force ./extensions/*/*.tgz"
|
-powershell -Command "Remove-Item -Recurse -Force ./extensions/*/*.tgz"
|
||||||
-powershell -Command "Remove-Item -Recurse -Force ./electron/pre-install/*.tgz"
|
-powershell -Command "Remove-Item -Recurse -Force ./electron/pre-install/*.tgz"
|
||||||
|
-powershell -Command "Remove-Item -Recurse -Force ./src-tauri/resources"
|
||||||
|
-powershell -Command "Remove-Item -Recurse -Force ./src-tauri/target"
|
||||||
-powershell -Command "if (Test-Path \"$($env:USERPROFILE)\jan\extensions\") { Remove-Item -Path \"$($env:USERPROFILE)\jan\extensions\" -Recurse -Force }"
|
-powershell -Command "if (Test-Path \"$($env:USERPROFILE)\jan\extensions\") { Remove-Item -Path \"$($env:USERPROFILE)\jan\extensions\" -Recurse -Force }"
|
||||||
else ifeq ($(shell uname -s),Linux)
|
else ifeq ($(shell uname -s),Linux)
|
||||||
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
|
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
|
||||||
@ -131,25 +147,31 @@ else ifeq ($(shell uname -s),Linux)
|
|||||||
find . -name "build" -type d -exec rm -rf '{}' +
|
find . -name "build" -type d -exec rm -rf '{}' +
|
||||||
find . -name "out" -type d -exec rm -rf '{}' +
|
find . -name "out" -type d -exec rm -rf '{}' +
|
||||||
find . -name ".turbo" -type d -exec rm -rf '{}' +
|
find . -name ".turbo" -type d -exec rm -rf '{}' +
|
||||||
|
find . -name ".yarn" -type d -exec rm -rf '{}' +
|
||||||
find . -name "packake-lock.json" -type f -exec rm -rf '{}' +
|
find . -name "packake-lock.json" -type f -exec rm -rf '{}' +
|
||||||
find . -name "yarn.lock" -type f -exec rm -rf '{}' +
|
find . -name "package-lock.json" -type f -exec rm -rf '{}' +
|
||||||
rm -rf ./pre-install/*.tgz
|
rm -rf ./pre-install/*.tgz
|
||||||
rm -rf ./extensions/*/*.tgz
|
rm -rf ./extensions/*/*.tgz
|
||||||
rm -rf ./electron/pre-install/*.tgz
|
rm -rf ./electron/pre-install/*.tgz
|
||||||
|
rm -rf ./src-tauri/resources
|
||||||
|
rm -rf ./src-tauri/target
|
||||||
rm -rf "~/jan/extensions"
|
rm -rf "~/jan/extensions"
|
||||||
rm -rf "~/.cache/jan*"
|
rm -rf "~/.cache/jan*"
|
||||||
|
rm -rf "./.cache"
|
||||||
else
|
else
|
||||||
find . -name "node_modules" -type d -prune -exec rm -rf '{}' +
|
find . -name "node_modules" -type d -prune -exec rm -rfv '{}' +
|
||||||
find . -name ".next" -type d -exec rm -rf '{}' +
|
find . -name ".next" -type d -exec rm -rfv '{}' +
|
||||||
find . -name "dist" -type d -exec rm -rf '{}' +
|
find . -name "dist" -type d -exec rm -rfv '{}' +
|
||||||
find . -name "build" -type d -exec rm -rf '{}' +
|
find . -name "build" -type d -exec rm -rfv '{}' +
|
||||||
find . -name "out" -type d -exec rm -rf '{}' +
|
find . -name "out" -type d -exec rm -rfv '{}' +
|
||||||
find . -name ".turbo" -type d -exec rm -rf '{}' +
|
find . -name ".turbo" -type d -exec rm -rfv '{}' +
|
||||||
find . -name "packake-lock.json" -type f -exec rm -rf '{}' +
|
find . -name ".yarn" -type d -exec rm -rfv '{}' +
|
||||||
find . -name "yarn.lock" -type f -exec rm -rf '{}' +
|
find . -name "package-lock.json" -type f -exec rm -rfv '{}' +
|
||||||
rm -rf ./pre-install/*.tgz
|
rm -rfv ./pre-install/*.tgz
|
||||||
rm -rf ./extensions/*/*.tgz
|
rm -rfv ./extensions/*/*.tgz
|
||||||
rm -rf ./electron/pre-install/*.tgz
|
rm -rfv ./electron/pre-install/*.tgz
|
||||||
rm -rf ~/jan/extensions
|
rm -rfv ./src-tauri/resources
|
||||||
rm -rf ~/Library/Caches/jan*
|
rm -rfv ./src-tauri/target
|
||||||
|
rm -rfv ~/jan/extensions
|
||||||
|
rm -rfv ~/Library/Caches/jan*
|
||||||
endif
|
endif
|
||||||
|
|||||||
400
README.md
400
README.md
@ -1,6 +1,6 @@
|
|||||||
# Jan - Turn your computer into an AI computer
|
# Jan - Open-source ChatGPT replacement
|
||||||
|
|
||||||

|
<img width="2048" height="280" alt="github jan banner" src="https://github.com/user-attachments/assets/f3f87889-c133-433b-b250-236218150d3f" />
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section -->
|
<!-- ALL-CONTRIBUTORS-BADGE:START - Do not remove or modify this section -->
|
||||||
@ -12,341 +12,135 @@
|
|||||||
</p>
|
</p>
|
||||||
|
|
||||||
<p align="center">
|
<p align="center">
|
||||||
<a href="https://jan.ai/guides">Getting Started</a>
|
<a href="https://www.jan.ai/docs/desktop">Getting Started</a>
|
||||||
- <a href="https://jan.ai/docs">Docs</a>
|
- <a href="https://discord.gg/Exe46xPMbK">Community</a>
|
||||||
- <a href="https://github.com/janhq/jan/releases">Changelog</a>
|
- <a href="https://jan.ai/changelog">Changelog</a>
|
||||||
- <a href="https://github.com/janhq/jan/issues">Bug reports</a>
|
- <a href="https://github.com/janhq/jan/issues">Bug reports</a>
|
||||||
- <a href="https://discord.gg/AsJ8krTT3N">Discord</a>
|
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
>[!Warning]
|
Jan is bringing the best of open-source AI in an easy-to-use product. Download and run LLMs with **full control** and **privacy**.
|
||||||
>**Jan is currently in Development**: Expect breaking changes and bugs!
|
|
||||||
|
|
||||||
Jan is an open-source ChatGPT alternative that runs 100% offline on your computer.
|
## Installation
|
||||||
|
|
||||||
**Jan runs on any hardware.** From PCs to multi-GPU clusters, Jan supports universal architectures:
|
The easiest way to get started is by downloading one of the following versions for your respective operating system:
|
||||||
|
|
||||||
- [x] NVIDIA GPUs (fast)
|
|
||||||
- [x] Apple M-series (fast)
|
|
||||||
- [x] Apple Intel
|
|
||||||
- [x] Linux Debian
|
|
||||||
- [x] Windows x64
|
|
||||||
|
|
||||||
## Download
|
|
||||||
|
|
||||||
<table>
|
<table>
|
||||||
<tr style="text-align:center">
|
<tr>
|
||||||
<td style="text-align:center"><b>Version Type</b></td>
|
<td><b>Platform</b></td>
|
||||||
<td style="text-align:center"><b>Windows</b></td>
|
<td><b>Download</b></td>
|
||||||
<td colspan="2" style="text-align:center"><b>MacOS</b></td>
|
|
||||||
<td colspan="2" style="text-align:center"><b>Linux</b></td>
|
|
||||||
</tr>
|
</tr>
|
||||||
<tr style="text-align:center">
|
<tr>
|
||||||
<td style="text-align:center"><b>Stable (Recommended)</b></td>
|
<td><b>Windows</b></td>
|
||||||
<td style="text-align:center">
|
<td><a href='https://app.jan.ai/download/latest/win-x64'>jan.exe</a></td>
|
||||||
<a href='https://app.jan.ai/download/latest/win-x64'>
|
|
||||||
<img src='https://github.com/janhq/docs/blob/main/static/img/windows.png' style="height:14px; width: 14px" />
|
|
||||||
<b>jan.exe</b>
|
|
||||||
</a>
|
|
||||||
</td>
|
|
||||||
<td style="text-align:center">
|
|
||||||
<a href='https://app.jan.ai/download/latest/mac-x64'>
|
|
||||||
<img src='https://github.com/janhq/docs/blob/main/static/img/mac.png' style="height:15px; width: 15px" />
|
|
||||||
<b>Intel</b>
|
|
||||||
</a>
|
|
||||||
</td>
|
|
||||||
<td style="text-align:center">
|
|
||||||
<a href='https://app.jan.ai/download/latest/mac-arm64'>
|
|
||||||
<img src='https://github.com/janhq/docs/blob/main/static/img/mac.png' style="height:15px; width: 15px" />
|
|
||||||
<b>M1/M2/M3/M4</b>
|
|
||||||
</a>
|
|
||||||
</td>
|
|
||||||
<td style="text-align:center">
|
|
||||||
<a href='https://app.jan.ai/download/latest/linux-amd64-deb'>
|
|
||||||
<img src='https://github.com/janhq/docs/blob/main/static/img/linux.png' style="height:14px; width: 14px" />
|
|
||||||
<b>jan.deb</b>
|
|
||||||
</a>
|
|
||||||
</td>
|
|
||||||
<td style="text-align:center">
|
|
||||||
<a href='https://app.jan.ai/download/latest/linux-amd64-appimage'>
|
|
||||||
<img src='https://github.com/janhq/docs/blob/main/static/img/linux.png' style="height:14px; width: 14px" />
|
|
||||||
<b>jan.AppImage</b>
|
|
||||||
</a>
|
|
||||||
</td>
|
|
||||||
</tr>
|
</tr>
|
||||||
<tr style="text-align:center">
|
<tr>
|
||||||
<td style="text-align:center"><b>Experimental (Nightly Build)</b></td>
|
<td><b>macOS</b></td>
|
||||||
<td style="text-align:center">
|
<td><a href='https://app.jan.ai/download/latest/mac-universal'>jan.dmg</a></td>
|
||||||
<a href='https://app.jan.ai/download/nightly/win-x64'>
|
</tr>
|
||||||
<img src='https://github.com/janhq/docs/blob/main/static/img/windows.png' style="height:14px; width: 14px" />
|
<tr>
|
||||||
<b>jan.exe</b>
|
<td><b>Linux (deb)</b></td>
|
||||||
</a>
|
<td><a href='https://app.jan.ai/download/latest/linux-amd64-deb'>jan.deb</a></td>
|
||||||
</td>
|
</tr>
|
||||||
<td style="text-align:center">
|
<tr>
|
||||||
<a href='https://app.jan.ai/download/nightly/mac-x64'>
|
<td><b>Linux (AppImage)</b></td>
|
||||||
<img src='https://github.com/janhq/docs/blob/main/static/img/mac.png' style="height:15px; width: 15px" />
|
<td><a href='https://app.jan.ai/download/latest/linux-amd64-appimage'>jan.AppImage</a></td>
|
||||||
<b>Intel</b>
|
|
||||||
</a>
|
|
||||||
</td>
|
|
||||||
<td style="text-align:center">
|
|
||||||
<a href='https://app.jan.ai/download/nightly/mac-arm64'>
|
|
||||||
<img src='https://github.com/janhq/docs/blob/main/static/img/mac.png' style="height:15px; width: 15px" />
|
|
||||||
<b>M1/M2/M3/M4</b>
|
|
||||||
</a>
|
|
||||||
</td>
|
|
||||||
<td style="text-align:center">
|
|
||||||
<a href='https://app.jan.ai/download/nightly/linux-amd64-deb'>
|
|
||||||
<img src='https://github.com/janhq/docs/blob/main/static/img/linux.png' style="height:14px; width: 14px" />
|
|
||||||
<b>jan.deb</b>
|
|
||||||
</a>
|
|
||||||
</td>
|
|
||||||
<td style="text-align:center">
|
|
||||||
<a href='https://app.jan.ai/download/nightly/linux-amd64-appimage'>
|
|
||||||
<img src='https://github.com/janhq/docs/blob/main/static/img/linux.png' style="height:14px; width: 14px" />
|
|
||||||
<b>jan.AppImage</b>
|
|
||||||
</a>
|
|
||||||
</td>
|
|
||||||
</tr>
|
</tr>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
Download the latest version of Jan at https://jan.ai/ or visit the **[GitHub Releases](https://github.com/janhq/jan/releases)** to download any previous release.
|
|
||||||
|
|
||||||
## Demo
|
Download from [jan.ai](https://jan.ai/) or [GitHub Releases](https://github.com/janhq/jan/releases).
|
||||||
|
|
||||||

|
## Features
|
||||||
|
|
||||||
_Realtime Video: Jan v0.4.3-nightly on a Mac M1, 16GB Sonoma 14_
|
- **Local AI Models**: Download and run LLMs (Llama, Gemma, Qwen, GPT-oss etc.) from HuggingFace
|
||||||
|
- **Cloud Integration**: Connect to GPT models via OpenAI, Claude models via Anthropic, Mistral, Groq, and others
|
||||||
|
- **Custom Assistants**: Create specialized AI assistants for your tasks
|
||||||
|
- **OpenAI-Compatible API**: Local server at `localhost:1337` for other applications
|
||||||
|
- **Model Context Protocol**: MCP integration for agentic capabilities
|
||||||
|
- **Privacy First**: Everything runs locally when you want it to
|
||||||
|
|
||||||
## Quicklinks
|
## Build from Source
|
||||||
|
|
||||||
#### Jan
|
For those who enjoy the scenic route:
|
||||||
|
|
||||||
- [Jan website](https://jan.ai/)
|
### Prerequisites
|
||||||
- [Jan GitHub](https://github.com/janhq/jan)
|
|
||||||
- [User Guides](https://jan.ai/guides/)
|
|
||||||
- [Developer docs](https://jan.ai/developer/)
|
|
||||||
- [API reference](https://jan.ai/api-reference/)
|
|
||||||
- [Specs](https://jan.ai/docs/)
|
|
||||||
|
|
||||||
#### Nitro
|
- Node.js ≥ 20.0.0
|
||||||
|
- Yarn ≥ 1.22.0
|
||||||
|
- Make ≥ 3.81
|
||||||
|
- Rust (for Tauri)
|
||||||
|
|
||||||
Nitro is a high-efficiency C++ inference engine for edge computing. It is lightweight and embeddable, and can be used on its own within your own projects.
|
### Run with Make
|
||||||
|
|
||||||
- [Nitro Website](https://nitro.jan.ai)
|
```bash
|
||||||
- [Nitro GitHub](https://github.com/janhq/nitro)
|
git clone https://github.com/janhq/jan
|
||||||
- [Documentation](https://nitro.jan.ai/docs)
|
cd jan
|
||||||
- [API Reference](https://nitro.jan.ai/api-reference)
|
make dev
|
||||||
|
```
|
||||||
|
|
||||||
|
This handles everything: installs dependencies, builds core components, and launches the app.
|
||||||
|
|
||||||
|
**Available make targets:**
|
||||||
|
- `make dev` - Full development setup and launch
|
||||||
|
- `make build` - Production build
|
||||||
|
- `make test` - Run tests and linting
|
||||||
|
- `make clean` - Delete everything and start fresh
|
||||||
|
|
||||||
|
### Manual Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn install
|
||||||
|
yarn build:tauri:plugin:api
|
||||||
|
yarn build:core
|
||||||
|
yarn build:extensions
|
||||||
|
yarn dev
|
||||||
|
```
|
||||||
|
|
||||||
|
## System Requirements
|
||||||
|
|
||||||
|
**Minimum specs for a decent experience:**
|
||||||
|
|
||||||
|
- **macOS**: 13.6+ (8GB RAM for 3B models, 16GB for 7B, 32GB for 13B)
|
||||||
|
- **Windows**: 10+ with GPU support for NVIDIA/AMD/Intel Arc
|
||||||
|
- **Linux**: Most distributions work, GPU acceleration available
|
||||||
|
|
||||||
|
For detailed compatibility, check our [installation guides](https://jan.ai/docs/desktop/mac).
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
As Jan is in development mode, you might get stuck on a broken build.
|
If things go sideways:
|
||||||
|
|
||||||
To reset your installation:
|
1. Check our [troubleshooting docs](https://jan.ai/docs/troubleshooting)
|
||||||
|
2. Copy your error logs and system specs
|
||||||
|
3. Ask for help in our [Discord](https://discord.gg/FTk2MvZwJH) `#🆘|jan-help` channel
|
||||||
|
|
||||||
1. Use the following commands to remove any dangling backend processes:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
ps aux | grep nitro
|
|
||||||
```
|
|
||||||
|
|
||||||
Look for processes like "nitro" and "nitro_arm_64," and kill them one by one with:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
kill -9 <PID>
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Remove Jan from your Applications folder and Cache folder**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
make clean
|
|
||||||
```
|
|
||||||
|
|
||||||
This will remove all build artifacts and cached files:
|
|
||||||
|
|
||||||
- Delete Jan extension from your `~/jan/extensions` folder
|
|
||||||
- Delete all `node_modules` in current folder
|
|
||||||
- Clear Application cache in `~/Library/Caches/jan`
|
|
||||||
|
|
||||||
## Requirements for running Jan
|
|
||||||
|
|
||||||
- MacOS: 13 or higher
|
|
||||||
- Windows:
|
|
||||||
- Windows 10 or higher
|
|
||||||
- To enable GPU support:
|
|
||||||
- Nvidia GPU with CUDA Toolkit 11.7 or higher
|
|
||||||
- Nvidia driver 470.63.01 or higher
|
|
||||||
- Linux:
|
|
||||||
- glibc 2.27 or higher (check with `ldd --version`)
|
|
||||||
- gcc 11, g++ 11, cpp 11 or higher, refer to this [link](https://jan.ai/guides/troubleshooting/gpu-not-used/#specific-requirements-for-linux) for more information
|
|
||||||
- To enable GPU support:
|
|
||||||
- Nvidia GPU with CUDA Toolkit 11.7 or higher
|
|
||||||
- Nvidia driver 470.63.01 or higher
|
|
||||||
|
|
||||||
## Contributing
|
## Contributing
|
||||||
|
|
||||||
Contributions are welcome! Please read the [CONTRIBUTING.md](CONTRIBUTING.md) file
|
Contributions welcome. See [CONTRIBUTING.md](CONTRIBUTING.md) for the full spiel.
|
||||||
|
|
||||||
### Pre-requisites
|
## Links
|
||||||
|
|
||||||
- node >= 20.0.0
|
- [Documentation](https://jan.ai/docs) - The manual you should read
|
||||||
- yarn >= 1.22.0
|
- [API Reference](https://jan.ai/api-reference) - For the technically inclined
|
||||||
- make >= 3.81
|
- [Changelog](https://jan.ai/changelog) - What we broke and fixed
|
||||||
|
- [Discord](https://discord.gg/FTk2MvZwJH) - Where the community lives
|
||||||
### Instructions
|
|
||||||
|
|
||||||
1. **Clone the repository and prepare:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git clone https://github.com/janhq/jan
|
|
||||||
cd jan
|
|
||||||
git checkout -b DESIRED_BRANCH
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Run development and use Jan Desktop**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
make dev
|
|
||||||
```
|
|
||||||
|
|
||||||
This will start the development server and open the desktop app.
|
|
||||||
|
|
||||||
3. (Optional) **Run the API server without frontend**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
yarn dev:server
|
|
||||||
```
|
|
||||||
|
|
||||||
### For production build
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Do steps 1 and 2 in the previous section
|
|
||||||
# Build the app
|
|
||||||
make build
|
|
||||||
```
|
|
||||||
|
|
||||||
This will build the app MacOS m1/m2 for production (with code signing already done) and put the result in `dist` folder.
|
|
||||||
|
|
||||||
### Docker mode
|
|
||||||
|
|
||||||
- Supported OS: Linux, WSL2 Docker
|
|
||||||
- Pre-requisites:
|
|
||||||
|
|
||||||
- Docker Engine and Docker Compose are required to run Jan in Docker mode. Follow the [instructions](https://docs.docker.com/engine/install/ubuntu/) below to get started with Docker Engine on Ubuntu.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
curl -fsSL https://get.docker.com -o get-docker.sh
|
|
||||||
sudo sh ./get-docker.sh --dry-run
|
|
||||||
```
|
|
||||||
|
|
||||||
- If you intend to run Jan in GPU mode, you need to install `nvidia-driver` and `nvidia-docker2`. Follow the instruction [here](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html) for installation.
|
|
||||||
|
|
||||||
- Run Jan in Docker mode
|
|
||||||
> User can choose between `docker-compose.yml` with latest prebuilt docker image or `docker-compose-dev.yml` with local docker build
|
|
||||||
|
|
||||||
| Docker compose Profile | Description |
|
|
||||||
| ---------------------- | -------------------------------------------- |
|
|
||||||
| `cpu-fs` | Run Jan in CPU mode with default file system |
|
|
||||||
| `cpu-s3fs` | Run Jan in CPU mode with S3 file system |
|
|
||||||
| `gpu-fs` | Run Jan in GPU mode with default file system |
|
|
||||||
| `gpu-s3fs` | Run Jan in GPU mode with S3 file system |
|
|
||||||
|
|
||||||
| Environment Variable | Description |
|
|
||||||
| ----------------------- | ------------------------------------------------------------------------------------------------------- |
|
|
||||||
| `S3_BUCKET_NAME` | S3 bucket name - leave blank for default file system |
|
|
||||||
| `AWS_ACCESS_KEY_ID` | AWS access key ID - leave blank for default file system |
|
|
||||||
| `AWS_SECRET_ACCESS_KEY` | AWS secret access key - leave blank for default file system |
|
|
||||||
| `AWS_ENDPOINT` | AWS endpoint URL - leave blank for default file system |
|
|
||||||
| `AWS_REGION` | AWS region - leave blank for default file system |
|
|
||||||
| `API_BASE_URL` | Jan Server URL, please modify it as your public ip address or domain name default http://localhost:1377 |
|
|
||||||
|
|
||||||
- **Option 1**: Run Jan in CPU mode
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# cpu mode with default file system
|
|
||||||
docker compose --profile cpu-fs up -d
|
|
||||||
|
|
||||||
# cpu mode with S3 file system
|
|
||||||
docker compose --profile cpu-s3fs up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
- **Option 2**: Run Jan in GPU mode
|
|
||||||
|
|
||||||
- **Step 1**: Check CUDA compatibility with your NVIDIA driver by running `nvidia-smi` and check the CUDA version in the output
|
|
||||||
|
|
||||||
```bash
|
|
||||||
nvidia-smi
|
|
||||||
|
|
||||||
# Output
|
|
||||||
+---------------------------------------------------------------------------------------+
|
|
||||||
| NVIDIA-SMI 531.18 Driver Version: 531.18 CUDA Version: 12.1 |
|
|
||||||
|-----------------------------------------+----------------------+----------------------+
|
|
||||||
| GPU Name TCC/WDDM | Bus-Id Disp.A | Volatile Uncorr. ECC |
|
|
||||||
| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |
|
|
||||||
| | | MIG M. |
|
|
||||||
|=========================================+======================+======================|
|
|
||||||
| 0 NVIDIA GeForce RTX 4070 Ti WDDM | 00000000:01:00.0 On | N/A |
|
|
||||||
| 0% 44C P8 16W / 285W| 1481MiB / 12282MiB | 2% Default |
|
|
||||||
| | | N/A |
|
|
||||||
+-----------------------------------------+----------------------+----------------------+
|
|
||||||
| 1 NVIDIA GeForce GTX 1660 Ti WDDM | 00000000:02:00.0 Off | N/A |
|
|
||||||
| 0% 49C P8 14W / 120W| 0MiB / 6144MiB | 0% Default |
|
|
||||||
| | | N/A |
|
|
||||||
+-----------------------------------------+----------------------+----------------------+
|
|
||||||
| 2 NVIDIA GeForce GTX 1660 Ti WDDM | 00000000:05:00.0 Off | N/A |
|
|
||||||
| 29% 38C P8 11W / 120W| 0MiB / 6144MiB | 0% Default |
|
|
||||||
| | | N/A |
|
|
||||||
+-----------------------------------------+----------------------+----------------------+
|
|
||||||
|
|
||||||
+---------------------------------------------------------------------------------------+
|
|
||||||
| Processes: |
|
|
||||||
| GPU GI CI PID Type Process name GPU Memory |
|
|
||||||
| ID ID Usage |
|
|
||||||
|=======================================================================================|
|
|
||||||
```
|
|
||||||
|
|
||||||
- **Step 2**: Visit [NVIDIA NGC Catalog ](https://catalog.ngc.nvidia.com/orgs/nvidia/containers/cuda/tags) and find the smallest minor version of image tag that matches your CUDA version (e.g., 12.1 -> 12.1.0)
|
|
||||||
|
|
||||||
- **Step 3**: Update the `Dockerfile.gpu` line number 5 with the latest minor version of the image tag from step 2 (e.g. change `FROM nvidia/cuda:12.2.0-runtime-ubuntu22.04 AS base` to `FROM nvidia/cuda:12.1.0-runtime-ubuntu22.04 AS base`)
|
|
||||||
|
|
||||||
- **Step 4**: Run command to start Jan in GPU mode
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# GPU mode with default file system
|
|
||||||
docker compose --profile gpu-fs up -d
|
|
||||||
|
|
||||||
# GPU mode with S3 file system
|
|
||||||
docker compose --profile gpu-s3fs up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
This will start the web server and you can access Jan at `http://localhost:3000`.
|
|
||||||
|
|
||||||
> Note: RAG feature is not supported in Docker mode with s3fs yet.
|
|
||||||
|
|
||||||
## Acknowledgements
|
|
||||||
|
|
||||||
Jan builds on top of other open-source projects:
|
|
||||||
|
|
||||||
- [llama.cpp](https://github.com/ggerganov/llama.cpp)
|
|
||||||
- [LangChain](https://github.com/langchain-ai)
|
|
||||||
- [TensorRT](https://github.com/NVIDIA/TensorRT)
|
|
||||||
- [TensorRT-LLM](https://github.com/NVIDIA/TensorRT-LLM)
|
|
||||||
|
|
||||||
## Contact
|
## Contact
|
||||||
|
|
||||||
- Bugs & requests: file a GitHub ticket
|
- **Bugs**: [GitHub Issues](https://github.com/janhq/jan/issues)
|
||||||
- For discussion: join our Discord [here](https://discord.gg/FTk2MvZwJH)
|
- **Business**: hello@jan.ai
|
||||||
- For business inquiries: email hello@jan.ai
|
- **Jobs**: hr@jan.ai
|
||||||
- For jobs: please email hr@jan.ai
|
- **General Discussion**: [Discord](https://discord.gg/FTk2MvZwJH)
|
||||||
|
|
||||||
## Trust & Safety
|
|
||||||
|
|
||||||
Beware of scams.
|
|
||||||
|
|
||||||
- We will never ask you for personal info
|
|
||||||
- We are a free product; there's no paid version
|
|
||||||
- We don't have a token or ICO
|
|
||||||
- We are not actively fundraising or seeking donations
|
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
Jan is free and open source, under the AGPLv3 license.
|
Apache 2.0 - Because sharing is caring.
|
||||||
|
|
||||||
|
## Acknowledgements
|
||||||
|
|
||||||
|
Built on the shoulders of giants:
|
||||||
|
|
||||||
|
- [Llama.cpp](https://github.com/ggerganov/llama.cpp)
|
||||||
|
- [Tauri](https://tauri.app/)
|
||||||
|
- [Scalar](https://github.com/scalar/scalar)
|
||||||
|
|||||||
319
autoqa/README.md
Normal file
319
autoqa/README.md
Normal file
@ -0,0 +1,319 @@
|
|||||||
|
# E2E Test Runner with ReportPortal Integration
|
||||||
|
|
||||||
|
🚀 An automated end-to-end test runner for Jan application with ReportPortal integration, screen recording, and comprehensive test monitoring.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- ✅ **Automated Jan App Testing**: Automatically starts/stops Jan application
|
||||||
|
- 🖥️ **Auto Computer Server**: Automatically starts computer server in background
|
||||||
|
- 📹 **Screen Recording**: Records test execution for debugging
|
||||||
|
- 📊 **ReportPortal Integration**: Optional test results upload to ReportPortal
|
||||||
|
- 🔄 **Turn Monitoring**: Prevents infinite loops with configurable turn limits
|
||||||
|
- 🎯 **Flexible Configuration**: Command-line arguments and environment variables
|
||||||
|
- 🌐 **Cross-platform**: Windows, macOS, and Linux support
|
||||||
|
- 📁 **Test Discovery**: Automatically scans test files from directory
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
- Python 3.8+
|
||||||
|
- Jan application installed
|
||||||
|
- Windows Sandbox (for computer provider)
|
||||||
|
- Computer server package installed
|
||||||
|
- Required Python packages (see requirements.txt)
|
||||||
|
|
||||||
|
## Installation
|
||||||
|
|
||||||
|
1. Clone the repository:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone <repository-url>
|
||||||
|
cd autoqa
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Install dependencies:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
## For Windows and Linux
|
||||||
|
pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Ensure Jan application is installed in one of the default locations:
|
||||||
|
- Windows: `%LOCALAPPDATA%\Programs\jan\Jan.exe`
|
||||||
|
- macOS: `~/Applications/Jan.app/Contents/MacOS/Jan`
|
||||||
|
- Linux: `jan` (in PATH)
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
### Local Development (No ReportPortal)
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run all tests in ./tests directory (auto-starts computer server)
|
||||||
|
python main.py
|
||||||
|
|
||||||
|
# Run with custom test directory
|
||||||
|
python main.py --tests-dir "my_tests"
|
||||||
|
|
||||||
|
# Run with custom Jan app path
|
||||||
|
python main.py --jan-app-path "C:/Custom/Path/Jan.exe"
|
||||||
|
|
||||||
|
# Skip auto computer server start (if already running)
|
||||||
|
python main.py --skip-server-start
|
||||||
|
```
|
||||||
|
|
||||||
|
### With ReportPortal Integration
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Enable ReportPortal with token
|
||||||
|
python main.py --enable-reportportal --rp-token "YOUR_API_TOKEN"
|
||||||
|
|
||||||
|
# Full ReportPortal configuration
|
||||||
|
python main.py \
|
||||||
|
--enable-reportportal \
|
||||||
|
--rp-endpoint "https://reportportal.example.com" \
|
||||||
|
--rp-project "my_project" \
|
||||||
|
--rp-token "YOUR_API_TOKEN"
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### Command Line Arguments
|
||||||
|
|
||||||
|
| Argument | Environment Variable | Default | Description |
|
||||||
|
| ----------------------- | --------------------- | ------------------------------- | ------------------------------------------------- |
|
||||||
|
| **Computer Server** |
|
||||||
|
| `--skip-server-start` | `SKIP_SERVER_START` | `false` | Skip automatic computer server startup |
|
||||||
|
| **ReportPortal** |
|
||||||
|
| `--enable-reportportal` | `ENABLE_REPORTPORTAL` | `false` | Enable ReportPortal integration |
|
||||||
|
| `--rp-endpoint` | `RP_ENDPOINT` | `https://reportportal.menlo.ai` | ReportPortal endpoint URL |
|
||||||
|
| `--rp-project` | `RP_PROJECT` | `default_personal` | ReportPortal project name |
|
||||||
|
| `--rp-token` | `RP_TOKEN` | - | ReportPortal API token (required when RP enabled) |
|
||||||
|
| **Jan Application** |
|
||||||
|
| `--jan-app-path` | `JAN_APP_PATH` | _auto-detected_ | Path to Jan application executable |
|
||||||
|
| `--jan-process-name` | `JAN_PROCESS_NAME` | `Jan.exe` | Jan process name for monitoring |
|
||||||
|
| **Model Configuration** |
|
||||||
|
| `--model-name` | `MODEL_NAME` | `ByteDance-Seed/UI-TARS-1.5-7B` | AI model name |
|
||||||
|
| `--model-base-url` | `MODEL_BASE_URL` | `http://10.200.108.58:1234/v1` | Model API endpoint |
|
||||||
|
| `--model-provider` | `MODEL_PROVIDER` | `oaicompat` | Model provider type |
|
||||||
|
| `--model-loop` | `MODEL_LOOP` | `uitars` | Agent loop type |
|
||||||
|
| **Test Execution** |
|
||||||
|
| `--max-turns` | `MAX_TURNS` | `30` | Maximum turns per test |
|
||||||
|
| `--tests-dir` | `TESTS_DIR` | `tests` | Directory containing test files |
|
||||||
|
| `--delay-between-tests` | `DELAY_BETWEEN_TESTS` | `3` | Delay between tests (seconds) |
|
||||||
|
|
||||||
|
### Environment Variables
|
||||||
|
|
||||||
|
Create a `.env` file or set environment variables:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Computer Server
|
||||||
|
SKIP_SERVER_START=false
|
||||||
|
|
||||||
|
# ReportPortal Configuration
|
||||||
|
ENABLE_REPORTPORTAL=true
|
||||||
|
RP_ENDPOINT=https://reportportal.example.com
|
||||||
|
RP_PROJECT=my_project
|
||||||
|
RP_TOKEN=your_secret_token
|
||||||
|
|
||||||
|
# Jan Application
|
||||||
|
JAN_APP_PATH=C:\Custom\Path\Jan.exe
|
||||||
|
JAN_PROCESS_NAME=Jan.exe
|
||||||
|
|
||||||
|
# Model Configuration
|
||||||
|
MODEL_NAME=gpt-4
|
||||||
|
MODEL_BASE_URL=https://api.openai.com/v1
|
||||||
|
MODEL_PROVIDER=openai
|
||||||
|
MODEL_LOOP=uitars
|
||||||
|
|
||||||
|
# Test Settings
|
||||||
|
MAX_TURNS=50
|
||||||
|
TESTS_DIR=e2e_tests
|
||||||
|
DELAY_BETWEEN_TESTS=5
|
||||||
|
```
|
||||||
|
|
||||||
|
## Test Structure
|
||||||
|
|
||||||
|
### Test Files
|
||||||
|
|
||||||
|
- Test files should be `.txt` files containing test prompts
|
||||||
|
- Place test files in the `tests/` directory (or custom directory)
|
||||||
|
- Support nested directories for organization
|
||||||
|
|
||||||
|
Example test file (`tests/basic/login_test.txt`):
|
||||||
|
|
||||||
|
```
|
||||||
|
Test the login functionality of Jan application.
|
||||||
|
Navigate to login screen, enter valid credentials, and verify successful login.
|
||||||
|
```
|
||||||
|
|
||||||
|
### Directory Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
autoqa/
|
||||||
|
├── main.py # Main test runner
|
||||||
|
├── utils.py # Jan app utilities
|
||||||
|
├── test_runner.py # Test execution logic
|
||||||
|
├── screen_recorder.py # Screen recording functionality
|
||||||
|
├── reportportal_handler.py # ReportPortal integration
|
||||||
|
├── tests/ # Test files directory
|
||||||
|
│ ├── basic/
|
||||||
|
│ │ ├── login_test.txt
|
||||||
|
│ │ └── navigation_test.txt
|
||||||
|
│ └── advanced/
|
||||||
|
│ └── complex_workflow.txt
|
||||||
|
├── recordings/ # Screen recordings (auto-created)
|
||||||
|
├── trajectories/ # Agent trajectories (auto-created)
|
||||||
|
└── README.md
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage Examples
|
||||||
|
|
||||||
|
### Basic Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run all tests locally (auto-starts computer server)
|
||||||
|
python main.py
|
||||||
|
|
||||||
|
# Get help
|
||||||
|
python main.py --help
|
||||||
|
|
||||||
|
# Run without auto-starting computer server
|
||||||
|
python main.py --skip-server-start
|
||||||
|
```
|
||||||
|
|
||||||
|
### Advanced Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Custom configuration
|
||||||
|
python main.py \
|
||||||
|
--tests-dir "integration_tests" \
|
||||||
|
--max-turns 40 \
|
||||||
|
--delay-between-tests 10 \
|
||||||
|
--model-name "gpt-4"
|
||||||
|
|
||||||
|
# Environment + Arguments
|
||||||
|
ENABLE_REPORTPORTAL=true RP_TOKEN=secret python main.py --max-turns 50
|
||||||
|
|
||||||
|
# Different model provider
|
||||||
|
python main.py \
|
||||||
|
--model-provider "openai" \
|
||||||
|
--model-name "gpt-4" \
|
||||||
|
--model-base-url "https://api.openai.com/v1"
|
||||||
|
|
||||||
|
# External computer server (skip auto-start)
|
||||||
|
SKIP_SERVER_START=true python main.py
|
||||||
|
```
|
||||||
|
|
||||||
|
### CI/CD Usage
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# GitHub Actions / CI environment
|
||||||
|
ENABLE_REPORTPORTAL=true \
|
||||||
|
RP_TOKEN=${{ secrets.RP_TOKEN }} \
|
||||||
|
MODEL_NAME=production-model \
|
||||||
|
MAX_TURNS=40 \
|
||||||
|
SKIP_SERVER_START=false \
|
||||||
|
python main.py
|
||||||
|
```
|
||||||
|
|
||||||
|
## Computer Server Management
|
||||||
|
|
||||||
|
The test runner automatically manages the computer server:
|
||||||
|
|
||||||
|
### Automatic Server Management (Default)
|
||||||
|
|
||||||
|
- **Auto-start**: Computer server starts automatically in background thread
|
||||||
|
- **Auto-cleanup**: Server stops when main program exits (daemon thread)
|
||||||
|
- **Error handling**: Graceful fallback if server fails to start
|
||||||
|
|
||||||
|
### Manual Server Management
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# If you prefer to manage computer server manually:
|
||||||
|
python -m computer_server # In separate terminal
|
||||||
|
|
||||||
|
# Then run tests without auto-start:
|
||||||
|
python main.py --skip-server-start
|
||||||
|
```
|
||||||
|
|
||||||
|
### Server Logs
|
||||||
|
|
||||||
|
```
|
||||||
|
2025-07-15 15:30:45 - INFO - Starting computer server in background...
|
||||||
|
2025-07-15 15:30:45 - INFO - Calling computer_server.run_cli()...
|
||||||
|
2025-07-15 15:30:45 - INFO - Computer server thread started
|
||||||
|
2025-07-15 15:30:50 - INFO - Computer server is running successfully
|
||||||
|
```
|
||||||
|
|
||||||
|
## Output
|
||||||
|
|
||||||
|
### Local Development
|
||||||
|
|
||||||
|
- **Console logs**: Detailed execution information
|
||||||
|
- **Screen recordings**: Saved to `recordings/` directory as MP4 files
|
||||||
|
- **Trajectories**: Agent interaction data in `trajectories/` directory
|
||||||
|
- **Local results**: Test results logged to console
|
||||||
|
|
||||||
|
### ReportPortal Integration
|
||||||
|
|
||||||
|
When enabled, results are uploaded to ReportPortal including:
|
||||||
|
|
||||||
|
- Test execution status (PASSED/FAILED)
|
||||||
|
- Screen recordings as attachments
|
||||||
|
- Detailed turn-by-turn interaction logs
|
||||||
|
- Error messages and debugging information
|
||||||
|
|
||||||
|
## Troubleshooting
|
||||||
|
|
||||||
|
### Common Issues
|
||||||
|
|
||||||
|
1. **Computer server startup failed**:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Install required dependencies
|
||||||
|
pip install computer_server
|
||||||
|
|
||||||
|
# Check if computer_server is available
|
||||||
|
python -c "import computer_server; print('OK')"
|
||||||
|
|
||||||
|
# Use manual server if auto-start fails
|
||||||
|
python main.py --skip-server-start
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **Jan app not found**:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Specify custom path
|
||||||
|
python main.py --jan-app-path "D:/Apps/Jan/Jan.exe"
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **Windows dependencies missing**:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Install Windows-specific packages
|
||||||
|
pip install pywin32 psutil
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **ReportPortal connection failed**:
|
||||||
|
|
||||||
|
- Verify endpoint URL and token
|
||||||
|
- Check network connectivity
|
||||||
|
- Ensure project exists
|
||||||
|
|
||||||
|
5. **Screen recording issues**:
|
||||||
|
|
||||||
|
- Check disk space in `recordings/` directory
|
||||||
|
- Verify screen recording permissions
|
||||||
|
|
||||||
|
6. **Test timeouts**:
|
||||||
|
```bash
|
||||||
|
# Increase turn limit
|
||||||
|
python main.py --max-turns 50
|
||||||
|
```
|
||||||
|
|
||||||
|
### Debug Mode
|
||||||
|
|
||||||
|
Enable detailed logging by modifying the logging level in `main.py`:
|
||||||
|
|
||||||
|
```python
|
||||||
|
logging.basicConfig(level=logging.DEBUG)
|
||||||
|
```
|
||||||
264
autoqa/checklist.md
Normal file
264
autoqa/checklist.md
Normal file
@ -0,0 +1,264 @@
|
|||||||
|
# I. Before release
|
||||||
|
|
||||||
|
## A. Initial update / migration Data check
|
||||||
|
|
||||||
|
Before testing, set-up the following in the old version to make sure that we can see the data is properly migrated:
|
||||||
|
- [ ] Changing appearance / theme to something that is obviously different from default set-up
|
||||||
|
- [ ] Ensure there are a few chat threads
|
||||||
|
- [ ] Ensure there are a few favourites / star threads
|
||||||
|
- [ ] Ensure there are 2 model downloaded
|
||||||
|
- [ ] Ensure there are 2 import on local provider (llama.cpp)
|
||||||
|
- [ ] Modify MCP servers list and add some ENV value to MCP servers
|
||||||
|
- [ ] Modify Local API Server
|
||||||
|
- [ ] HTTPS proxy config value
|
||||||
|
- [ ] Add 2 custom assistants to Jan
|
||||||
|
- [ ] Create a new chat with the custom assistant
|
||||||
|
- [ ] Change the `App Data` to some other folder
|
||||||
|
- [ ] Create a Custom Provider
|
||||||
|
- [ ] Disabled some model providers
|
||||||
|
- [NEW] Change llama.cpp setting of 2 models
|
||||||
|
#### Validate that the update does not corrupt existing user data or settings (before and after update show the same information):
|
||||||
|
- [ ] Threads
|
||||||
|
- [ ] Previously used model and assistants is shown correctly
|
||||||
|
- [ ] Can resume chat in threads with the previous context
|
||||||
|
- [ ] Assistants
|
||||||
|
- Settings:
|
||||||
|
- [ ] Appearance
|
||||||
|
- [ ] MCP Servers
|
||||||
|
- [ ] Local API Server
|
||||||
|
- [ ] HTTPS Proxy
|
||||||
|
- [ ] Custom Provider Set-up
|
||||||
|
|
||||||
|
#### In `Hub`:
|
||||||
|
- [ ] Can see model from HF listed properly
|
||||||
|
- [ ] Downloaded model will show `Use` instead of `Download`
|
||||||
|
- [ ] Toggling on `Downloaded` on the right corner show the correct list of downloaded models
|
||||||
|
|
||||||
|
#### In `Settings -> General`:
|
||||||
|
- [ ] Ensure the `App Data` path is the same
|
||||||
|
- [ ] Click Open Logs, App Log will show
|
||||||
|
|
||||||
|
#### In `Settings -> Model Providers`:
|
||||||
|
- [ ] Llama.cpp still listed downloaded models and user can chat with the models
|
||||||
|
- [ ] Llama.cpp still listed imported models and user can chat with the models
|
||||||
|
- [ ] Remote model still retain previously set up API keys and user can chat with model from the provider without having to re-enter API keys
|
||||||
|
- [ ] Enabled and Disabled Model Providers stay the same as before update
|
||||||
|
|
||||||
|
#### In `Settings -> Extensions`, check that following exists:
|
||||||
|
- [ ] Conversational
|
||||||
|
- [ ] Jan Assistant
|
||||||
|
- [ ] Download Manager
|
||||||
|
- [ ] llama.cpp Inference Engine
|
||||||
|
|
||||||
|
## B. `Settings`
|
||||||
|
|
||||||
|
#### In `General`:
|
||||||
|
- [ ] Ensure `Community` links work and point to the correct website
|
||||||
|
- [ ] Ensure the `Check for Updates` function detect the correct latest version
|
||||||
|
- [ ] [ENG] Create a folder with un-standard character as title (e.g. Chinese character) => change the `App data` location to that folder => test that model is still able to load and run properly.
|
||||||
|
#### In `Appearance`:
|
||||||
|
- [ ] Toggle between different `Theme` options to check that they change accordingly and that all elements of the UI are legible with the right contrast:
|
||||||
|
- [ ] Light
|
||||||
|
- [ ] Dark
|
||||||
|
- [ ] System (should follow your OS system settings)
|
||||||
|
- [ ] Change the following values => close the application => re-open the application => ensure that the change is persisted across session:
|
||||||
|
- [ ] Theme
|
||||||
|
- [ ] Font Size
|
||||||
|
- [ ] Window Background
|
||||||
|
- [ ] App Main View
|
||||||
|
- [ ] Primary
|
||||||
|
- [ ] Accent
|
||||||
|
- [ ] Destructive
|
||||||
|
- [ ] Chat Width
|
||||||
|
- [ ] Ensure that when this value is changed, there is no broken UI caused by it
|
||||||
|
- [ ] Code Block
|
||||||
|
- [ ] Show Line Numbers
|
||||||
|
- [ENG] Ensure that when click on `Reset` in the `Appearance` section, it reset back to the default values
|
||||||
|
- [ENG] Ensure that when click on `Reset` in the `Code Block` section, it reset back to the default values
|
||||||
|
|
||||||
|
#### In `Model Providers`:
|
||||||
|
|
||||||
|
In `Llama.cpp`:
|
||||||
|
- [ ] After downloading a model from hub, the model is listed with the correct name under `Models`
|
||||||
|
- [ ] Can import `gguf` model with no error
|
||||||
|
- [ ] Imported model will be listed with correct name under the `Models`
|
||||||
|
- [ ] Check that when click `delete` the model will be removed from the list
|
||||||
|
- [ ] Deleted model doesn't appear in the selectable models section in chat input (even in old threads that use the model previously)
|
||||||
|
- [ ] Ensure that user can re-import deleted imported models
|
||||||
|
- [ ] Enable `Auto-Unload Old Models`, and ensure that only one model can run / start at a time. If there are two model running at the time of enable, both of them will be stopped.
|
||||||
|
- [ ] Disable `Auto-Unload Old Models`, and ensure that multiple models can run at the same time.
|
||||||
|
- [ ] Enable `Context Shift` and ensure that context can run for long without encountering memory error. Use the `banana test` by turn on fetch MCP => ask local model to fetch and summarize the history of banana (banana has a very long history on wiki it turns out). It should run out of context memory sufficiently fast if `Context Shift` is not enabled.
|
||||||
|
- [ ] Ensure that user can change the Jinja chat template of individual model and it doesn't affect the template of other model
|
||||||
|
- [ ] Ensure that there is a recommended `llama.cpp` for each system and that it works out of the box for users.
|
||||||
|
- [ ] [0.6.9] Take a `gguf` file and delete the `.gguf` extensions from the file name, import it into Jan and verify that it works.
|
||||||
|
|
||||||
|
In Remote Model Providers:
|
||||||
|
- [ ] Check that the following providers are presence:
|
||||||
|
- [ ] OpenAI
|
||||||
|
- [ ] Anthropic
|
||||||
|
- [ ] Cohere
|
||||||
|
- [ ] OpenRouter
|
||||||
|
- [ ] Mistral
|
||||||
|
- [ ] Groq
|
||||||
|
- [ ] Gemini
|
||||||
|
- [ ] Hugging Face
|
||||||
|
- [ ] Models should appear as available on the selectable dropdown in chat input once some value is input in the API key field. (it could be the wrong API key)
|
||||||
|
- [ ] Once a valid API key is used, user can select a model from that provider and chat without any error.
|
||||||
|
- [ ] Delete a model and ensure that it doesn't show up in the `Modesl` list view or in the selectable dropdown in chat input.
|
||||||
|
- [ ] Ensure that a deleted model also not selectable or appear in old threads that used it.
|
||||||
|
- [ ] Adding of new model manually works and user can chat with the newly added model without error (you can add back the model you just delete for testing)
|
||||||
|
- [ ] [0.6.9] Make sure that Ollama set-up as a custom provider work with Jan
|
||||||
|
In Custom Providers:
|
||||||
|
- [ ] Ensure that user can create a new custom providers with the right baseURL and API key.
|
||||||
|
- [ ] Click `Refresh` should retrieve a list of available models from the Custom Providers.
|
||||||
|
- [ ] User can chat with the custom providers
|
||||||
|
- [ ] Ensure that Custom Providers can be deleted and won't reappear in a new session
|
||||||
|
|
||||||
|
In general:
|
||||||
|
- [ ] Disabled Model Provider should not show up as selectable in chat input of new thread and old thread alike (old threads' chat input should show `Select Model` instead of disabled model)
|
||||||
|
|
||||||
|
#### In `Shortcuts`:
|
||||||
|
|
||||||
|
Make sure the following shortcut key combo is visible and works:
|
||||||
|
- [ ] New chat
|
||||||
|
- [ ] Toggle Sidebar
|
||||||
|
- [ ] Zoom In
|
||||||
|
- [ ] Zoom Out
|
||||||
|
- [ ] Send Message
|
||||||
|
- [ ] New Line
|
||||||
|
- [ ] Navigation
|
||||||
|
|
||||||
|
#### In `Hardware`:
|
||||||
|
Ensure that the following section information show up for hardware
|
||||||
|
- [ ] Operating System
|
||||||
|
- [ ] CPU
|
||||||
|
- [ ] Memory
|
||||||
|
- [ ] GPU (If the machine has one)
|
||||||
|
- [ ] Enabling and Disabling GPUs and ensure that model still run correctly in both mode
|
||||||
|
- [ ] Enabling or Disabling GPU should not affect the UI of the application
|
||||||
|
|
||||||
|
#### In `MCP Servers`:
|
||||||
|
- [ ] Ensure that an user can create a MCP server successfully when enter in the correct information
|
||||||
|
- [ ] Ensure that `Env` value is masked by `*` in the quick view.
|
||||||
|
- [ ] If an `Env` value is missing, there should be a error pop up.
|
||||||
|
- [ ] Ensure that deleted MCP server disappear from the `MCP Server` list without any error
|
||||||
|
- [ ] Ensure that before a MCP is deleted, it will be disable itself first and won't appear on the tool list after deleted.
|
||||||
|
- [ ] Ensure that when the content of a MCP server is edited, it will be updated and reflected accordingly in the UI and when running it.
|
||||||
|
- [ ] Toggling enable and disabled of a MCP server work properly
|
||||||
|
- [ ] A disabled MCP should not appear in the available tool list in chat input
|
||||||
|
- [ ] An disabled MCP should not be callable even when forced prompt by the model (ensure there is no ghost MCP server)
|
||||||
|
- [ ] Ensure that enabled MCP server start automatically upon starting of the application
|
||||||
|
- [ ] An enabled MCP should show functions in the available tool list
|
||||||
|
- [ ] User can use a model and call different tool from multiple enabled MCP servers in the same thread
|
||||||
|
- [ ] If `Allow All MCP Tool Permissions` is disabled, in every new thread, before a tool is called, there should be a confirmation dialog pop up to confirm the action.
|
||||||
|
- [ ] When the user click `Deny`, the tool call will not be executed and return a message indicate so in the tool call result.
|
||||||
|
- [ ] When the user click `Allow Once` on the pop up, a confirmation dialog will appear again when the tool is called next time.
|
||||||
|
- [ ] When the user click `Always Allow` on the pop up, the tool will retain permission and won't ask for confirmation again. (this applied at an individual tool level, not at the MCP server level)
|
||||||
|
- [ ] If `Allow All MCP Tool Permissions` is enabled, in every new thread, there should not be any confirmation dialog pop up when a tool is called.
|
||||||
|
- [ ] When the pop-up appear, make sure that the `Tool Parameters` is also shown with detail in the pop-up.a
|
||||||
|
- [ ] [0.6.9] Go to Enter JSON configuration when created a new MCp => paste the JSON config inside => click `Save` => server works
|
||||||
|
- [ ] [0.6.9] If individual JSON config format is failed, the MCP server should not be activated
|
||||||
|
- [ ] [0.6.9] Make sure that MCP server can be used with streamable-http transport => connect to Smithery and test MCP server
|
||||||
|
|
||||||
|
#### In `Local API Server`:
|
||||||
|
- [ ] User can `Start Server` and chat with the default endpoint
|
||||||
|
- [ ] User should see the correct model name at `v1/models`
|
||||||
|
- [ ] User should be able to chat with it at `v1/chat/completions`
|
||||||
|
- [ ] `Open Logs` show the correct query log send to the server and return from the server
|
||||||
|
- [ ] Make sure that changing all the parameter in `Server Configuration` is reflected when `Start Server`
|
||||||
|
- [ ] [0.6.9] When the startup configuration, the last used model is also automatically start (users does not have to manually start a model before starting the server)
|
||||||
|
- [ ] [0.6.9] Make sure that you can send an image to a Local API Server and it also works (can set up Local API Server as a Custom Provider in Jan to test)
|
||||||
|
|
||||||
|
#### In `HTTPS Proxy`:
|
||||||
|
- [ ] Model download request goes through proxy endpoint
|
||||||
|
|
||||||
|
## C. Hub
|
||||||
|
- [ ] User can click `Download` to download a model
|
||||||
|
- [ ] User can cancel a model in the middle of downloading
|
||||||
|
- [ ] User can add a Hugging Face model detail to the list by pasting a model name / model url into the search bar and press enter
|
||||||
|
- [ ] Clicking on a listing will open up the model card information within Jan and render the HTML properly
|
||||||
|
- [ ] Clicking download work on the `Show variants` section
|
||||||
|
- [ ] Clicking download work inside the Model card HTML
|
||||||
|
- [ ] [0.6.9] Check that the model recommendation base on user hardware work as expected in the Model Hub
|
||||||
|
|
||||||
|
## D. Threads
|
||||||
|
|
||||||
|
#### In the left bar:
|
||||||
|
- [ ] User can delete an old thread, and it won't reappear even when app restart
|
||||||
|
- [ ] Change the title of the thread should update its last modification date and re-organise its position in the correct chronological order on the left bar.
|
||||||
|
- [ ] The title of a new thread is the first message from the user.
|
||||||
|
- [ ] Users can starred / un-starred threads accordingly
|
||||||
|
- [ ] Starred threads should move to `Favourite` section and other threads should stay in `Recent`
|
||||||
|
- [ ] Ensure that the search thread feature return accurate result based on thread titles and contents (including from both `Favourite` and `Recent`)
|
||||||
|
- [ ] `Delete All` should delete only threads in the `Recents` section
|
||||||
|
- [ ] `Unstar All` should un-star all of the `Favourites` threads and return them to `Recent`
|
||||||
|
|
||||||
|
#### In a thread:
|
||||||
|
- [ ] When `New Chat` is clicked, the assistant is set as the last selected assistant, the model selected is set as the last used model, and the user can immediately chat with the model.
|
||||||
|
- [ ] User can conduct multi-turn conversation in a single thread without lost of data (given that `Context Shift` is not enabled)
|
||||||
|
- [ ] User can change to a different model in the middle of a conversation in a thread and the model work.
|
||||||
|
- [ ] User can click on `Regenerate` button on a returned message from the model to get a new response base on the previous context.
|
||||||
|
- [ ] User can change `Assistant` in the middle of a conversation in a thread and the new assistant setting will be applied instead.
|
||||||
|
- [ ] The chat windows can render and show all the content of a selected threads (including scroll up and down on long threads)
|
||||||
|
- [ ] Old thread retained their setting as of the last update / usage
|
||||||
|
- [ ] Assistant option
|
||||||
|
- [ ] Model option (except if the model / model provider has been deleted or disabled)
|
||||||
|
- [ ] User can send message with different type of text content (e.g text, emoji, ...)
|
||||||
|
- [ ] When request model to generate a markdown table, the table is correctly formatted as returned from the model.
|
||||||
|
- [ ] When model generate code, ensure that the code snippets is properly formatted according to the `Appearance -> Code Block` setting.
|
||||||
|
- [ ] Users can edit their old message and and user can regenerate the answer based on the new message
|
||||||
|
- [ ] User can click `Copy` to copy the model response
|
||||||
|
- [ ] User can click `Delete` to delete either the user message or the model response.
|
||||||
|
- [ ] The token speed appear when a response from model is being generated and the final value is show under the response.
|
||||||
|
- [ ] Make sure that user when using IME keyboard to type Chinese and Japanese character and they press `Enter`, the `Send` button doesn't trigger automatically after each words.
|
||||||
|
- [ ] [0.6.9] Attach an image to the chat input and see if you can chat with it using a remote model
|
||||||
|
- [ ] [0.6.9] Attach an image to the chat input and see if you can chat with it using a local model
|
||||||
|
- [ ] [0.6.9] Check that you can paste an image to text box from your system clipboard (Copy - Paste)
|
||||||
|
- [ ] [0.6.9] Make sure that user can favourite a model in the model selection in chat input
|
||||||
|
|
||||||
|
## E. Assistants
|
||||||
|
- [ ] There is always at least one default Assistant which is Jan
|
||||||
|
- [ ] The default Jan assistant has `stream = True` by default
|
||||||
|
- [ ] User can create / edit a new assistant with different parameters and instructions choice.
|
||||||
|
- [ ] When user delete the default Assistant, the next Assistant in line will be come the default Assistant and apply their setting to new chat accordingly.
|
||||||
|
- [ ] User can create / edit assistant from within a Chat windows (on the top left)
|
||||||
|
|
||||||
|
## F. After checking everything else
|
||||||
|
|
||||||
|
In `Settings -> General`:
|
||||||
|
- [ ] Change the location of the `App Data` to some other path that is not the default path
|
||||||
|
- [ ] Click on `Reset` button in `Other` to factory reset the app:
|
||||||
|
- [ ] All threads deleted
|
||||||
|
- [ ] All Assistant deleted except for default Jan Assistant
|
||||||
|
- [ ] `App Data` location is reset back to default path
|
||||||
|
- [ ] Appearance reset
|
||||||
|
- [ ] Model Providers information all reset
|
||||||
|
- [ ] Llama.cpp setting reset
|
||||||
|
- [ ] API keys cleared
|
||||||
|
- [ ] All Custom Providers deleted
|
||||||
|
- [ ] MCP Servers reset
|
||||||
|
- [ ] Local API Server reset
|
||||||
|
- [ ] HTTPS Proxy reset
|
||||||
|
- [ ] After closing the app, all models are unloaded properly
|
||||||
|
- [ ] Locate to the data folder using the `App Data` path information => delete the folder => reopen the app to check that all the folder is re-created with all the necessary data.
|
||||||
|
- [ ] Ensure that the uninstallation process removes the app successfully from the system.
|
||||||
|
## G. New App Installation
|
||||||
|
- [ ] Clean up by deleting all the left over folder created by Jan
|
||||||
|
- [ ] On MacOS
|
||||||
|
- [ ] `~/Library/Application Support/Jan`
|
||||||
|
- [ ] `~/Library/Caches/jan.ai.app`
|
||||||
|
- [ ] On Windows
|
||||||
|
- [ ] `C:\Users<Username>\AppData\Roaming\Jan\`
|
||||||
|
- [ ] `C:\Users<Username>\AppData\Local\jan.ai.app`
|
||||||
|
- [ ] On Linux
|
||||||
|
- [ ] `~/.cache/Jan`
|
||||||
|
- [ ] `~/.cache/jan.ai.app`
|
||||||
|
- [ ] `~/.local/share/Jan`
|
||||||
|
- [ ] `~/.local/share/jan.ai.app`
|
||||||
|
- [ ] Ensure that the fresh install of Jan launch
|
||||||
|
- [ ] Do some basic check to see that all function still behaved as expected. To be extra careful, you can go through the whole list again. However, it is more advisable to just check to make sure that all the core functionality like `Thread` and `Model Providers` work as intended.
|
||||||
|
|
||||||
|
# II. After release
|
||||||
|
- [ ] Check that the App Updater works and user can update to the latest release without any problem
|
||||||
|
- [ ] App restarts after the user finished an update
|
||||||
|
- [ ] Repeat section `A. Initial update / migration Data check` above to verify that update is done correctly on live version
|
||||||
514
autoqa/main.py
Normal file
514
autoqa/main.py
Normal file
@ -0,0 +1,514 @@
|
|||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import argparse
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
import platform
|
||||||
|
from datetime import datetime
|
||||||
|
from computer import Computer
|
||||||
|
from reportportal_client import RPClient
|
||||||
|
from reportportal_client.helpers import timestamp
|
||||||
|
|
||||||
|
from utils import scan_test_files
|
||||||
|
from test_runner import run_single_test_with_timeout
|
||||||
|
|
||||||
|
# Configure logging
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.INFO,
|
||||||
|
format='%(asctime)s - %(levelname)s - %(message)s',
|
||||||
|
handlers=[
|
||||||
|
logging.StreamHandler()
|
||||||
|
]
|
||||||
|
)
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Platform detection
|
||||||
|
IS_WINDOWS = platform.system() == "Windows"
|
||||||
|
IS_LINUX = platform.system() == "Linux"
|
||||||
|
IS_MACOS = platform.system() == "Darwin"
|
||||||
|
|
||||||
|
def get_computer_config():
|
||||||
|
"""Get computer configuration based on platform"""
|
||||||
|
if IS_WINDOWS:
|
||||||
|
return {
|
||||||
|
"os_type": "windows"
|
||||||
|
}
|
||||||
|
elif IS_LINUX:
|
||||||
|
return {
|
||||||
|
"os_type": "linux"
|
||||||
|
}
|
||||||
|
elif IS_MACOS:
|
||||||
|
return {
|
||||||
|
"os_type": "macos"
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
# Default fallback
|
||||||
|
logger.warning(f"Unknown platform {platform.system()}, using Linux config as fallback")
|
||||||
|
return {
|
||||||
|
"os_type": "linux"
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_default_jan_path():
|
||||||
|
"""Get default Jan app path based on OS"""
|
||||||
|
if IS_WINDOWS:
|
||||||
|
# Try multiple common locations on Windows
|
||||||
|
possible_paths = [
|
||||||
|
os.path.expanduser(r"~\AppData\Local\Programs\jan\Jan.exe"),
|
||||||
|
os.path.join(os.environ.get('LOCALAPPDATA', ''), 'Programs', 'jan', 'Jan.exe'),
|
||||||
|
os.path.join(os.environ.get('APPDATA', ''), 'jan', 'Jan.exe'),
|
||||||
|
r"C:\Program Files\jan\Jan.exe",
|
||||||
|
r"C:\Program Files (x86)\jan\Jan.exe"
|
||||||
|
]
|
||||||
|
|
||||||
|
# Return first existing path, or first option as default
|
||||||
|
for path in possible_paths:
|
||||||
|
if os.path.exists(path):
|
||||||
|
return path
|
||||||
|
|
||||||
|
# If none exist, return the most likely default
|
||||||
|
return possible_paths[0]
|
||||||
|
|
||||||
|
elif IS_LINUX:
|
||||||
|
# Linux possible locations
|
||||||
|
possible_paths = [
|
||||||
|
"/usr/bin/Jan",
|
||||||
|
"/usr/local/bin/Jan",
|
||||||
|
os.path.expanduser("~/Applications/Jan/Jan"),
|
||||||
|
"/opt/Jan/Jan"
|
||||||
|
]
|
||||||
|
|
||||||
|
# Return first existing path, or first option as default
|
||||||
|
for path in possible_paths:
|
||||||
|
if os.path.exists(path):
|
||||||
|
return path
|
||||||
|
|
||||||
|
# Default to nightly build path
|
||||||
|
return "/usr/bin/Jan"
|
||||||
|
|
||||||
|
elif IS_MACOS:
|
||||||
|
# macOS defaults
|
||||||
|
possible_paths = [
|
||||||
|
"/Applications/Jan.app/Contents/MacOS/Jan",
|
||||||
|
os.path.expanduser("~/Applications/Jan.app/Contents/MacOS/Jan")
|
||||||
|
]
|
||||||
|
|
||||||
|
for path in possible_paths:
|
||||||
|
if os.path.exists(path):
|
||||||
|
return path
|
||||||
|
|
||||||
|
return possible_paths[0]
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Unknown platform
|
||||||
|
return "jan"
|
||||||
|
|
||||||
|
def start_computer_server():
|
||||||
|
"""Start computer server in background thread"""
|
||||||
|
try:
|
||||||
|
logger.info("Starting computer server in background...")
|
||||||
|
|
||||||
|
# Import computer_server module
|
||||||
|
import computer_server
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# Start server in a separate thread
|
||||||
|
def run_server():
|
||||||
|
try:
|
||||||
|
# Save original sys.argv to avoid argument conflicts
|
||||||
|
original_argv = sys.argv.copy()
|
||||||
|
|
||||||
|
# Override sys.argv for computer_server to use default args
|
||||||
|
sys.argv = ['computer_server'] # Reset to minimal args
|
||||||
|
|
||||||
|
# Use the proper entry point
|
||||||
|
logger.info("Calling computer_server.run_cli()...")
|
||||||
|
computer_server.run_cli()
|
||||||
|
logger.info("Computer server.run_cli() completed")
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
logger.info("Computer server interrupted")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Computer server error: {e}")
|
||||||
|
import traceback
|
||||||
|
logger.error(f"Traceback: {traceback.format_exc()}")
|
||||||
|
finally:
|
||||||
|
# Restore original sys.argv
|
||||||
|
try:
|
||||||
|
sys.argv = original_argv
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
|
||||||
|
server_thread = threading.Thread(target=run_server, daemon=True)
|
||||||
|
server_thread.start()
|
||||||
|
|
||||||
|
logger.info("Computer server thread started")
|
||||||
|
|
||||||
|
# Give server more time to start up
|
||||||
|
time.sleep(5)
|
||||||
|
|
||||||
|
# Check if thread is still alive (server is running)
|
||||||
|
if server_thread.is_alive():
|
||||||
|
logger.info("Computer server is running successfully")
|
||||||
|
return server_thread
|
||||||
|
else:
|
||||||
|
logger.error("Computer server thread died unexpectedly")
|
||||||
|
return None
|
||||||
|
|
||||||
|
except ImportError as e:
|
||||||
|
logger.error(f"Cannot import computer_server module: {e}")
|
||||||
|
logger.error("Please install computer_server package")
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error starting computer server: {e}")
|
||||||
|
import traceback
|
||||||
|
logger.error(f"Traceback: {traceback.format_exc()}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def parse_arguments():
|
||||||
|
"""Parse command line arguments"""
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="E2E Test Runner with ReportPortal integration",
|
||||||
|
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||||
|
epilog="""
|
||||||
|
Examples:
|
||||||
|
# Run locally without ReportPortal
|
||||||
|
python main.py
|
||||||
|
|
||||||
|
# Run with ReportPortal integration
|
||||||
|
python main.py --enable-reportportal --rp-token YOUR_TOKEN
|
||||||
|
|
||||||
|
# Run with custom Jan app path
|
||||||
|
python main.py --jan-app-path "C:/Custom/Path/Jan.exe"
|
||||||
|
|
||||||
|
# Run with different model
|
||||||
|
python main.py --model-name "gpt-4" --model-base-url "https://api.openai.com/v1"
|
||||||
|
|
||||||
|
# Using environment variables
|
||||||
|
ENABLE_REPORTPORTAL=true RP_TOKEN=xxx MODEL_NAME=gpt-4 python main.py
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get default Jan path
|
||||||
|
default_jan_path = get_default_jan_path()
|
||||||
|
|
||||||
|
# Computer server arguments
|
||||||
|
server_group = parser.add_argument_group('Computer Server Configuration')
|
||||||
|
server_group.add_argument(
|
||||||
|
'--skip-server-start',
|
||||||
|
action='store_true',
|
||||||
|
default=os.getenv('SKIP_SERVER_START', 'false').lower() == 'true',
|
||||||
|
help='Skip automatic computer server startup (env: SKIP_SERVER_START, default: false)'
|
||||||
|
)
|
||||||
|
|
||||||
|
# ReportPortal arguments
|
||||||
|
rp_group = parser.add_argument_group('ReportPortal Configuration')
|
||||||
|
rp_group.add_argument(
|
||||||
|
'--enable-reportportal',
|
||||||
|
action='store_true',
|
||||||
|
default=os.getenv('ENABLE_REPORTPORTAL', 'false').lower() == 'true',
|
||||||
|
help='Enable ReportPortal integration (env: ENABLE_REPORTPORTAL, default: false)'
|
||||||
|
)
|
||||||
|
rp_group.add_argument(
|
||||||
|
'--rp-endpoint',
|
||||||
|
default=os.getenv('RP_ENDPOINT', 'https://reportportal.menlo.ai'),
|
||||||
|
help='ReportPortal endpoint URL (env: RP_ENDPOINT, default: %(default)s)'
|
||||||
|
)
|
||||||
|
rp_group.add_argument(
|
||||||
|
'--rp-project',
|
||||||
|
default=os.getenv('RP_PROJECT', 'default_personal'),
|
||||||
|
help='ReportPortal project name (env: RP_PROJECT, default: %(default)s)'
|
||||||
|
)
|
||||||
|
rp_group.add_argument(
|
||||||
|
'--rp-token',
|
||||||
|
default=os.getenv('RP_TOKEN'),
|
||||||
|
help='ReportPortal API token (env: RP_TOKEN, required when --enable-reportportal is used)'
|
||||||
|
)
|
||||||
|
rp_group.add_argument(
|
||||||
|
'--launch-name',
|
||||||
|
default=os.getenv('LAUNCH_NAME'),
|
||||||
|
help='Custom launch name for ReportPortal (env: LAUNCH_NAME, default: auto-generated with timestamp)'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Jan app arguments
|
||||||
|
jan_group = parser.add_argument_group('Jan Application Configuration')
|
||||||
|
jan_group.add_argument(
|
||||||
|
'--jan-app-path',
|
||||||
|
default=os.getenv('JAN_APP_PATH', default_jan_path),
|
||||||
|
help=f'Path to Jan application executable (env: JAN_APP_PATH, default: auto-detected or {default_jan_path})'
|
||||||
|
)
|
||||||
|
jan_group.add_argument(
|
||||||
|
'--jan-process-name',
|
||||||
|
default=os.getenv('JAN_PROCESS_NAME', 'Jan.exe' if IS_WINDOWS else ('Jan' if IS_MACOS else 'Jan-nightly')),
|
||||||
|
help='Jan process name for monitoring (env: JAN_PROCESS_NAME, default: platform-specific)'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Model/Agent arguments
|
||||||
|
model_group = parser.add_argument_group('Model Configuration')
|
||||||
|
model_group.add_argument(
|
||||||
|
'--model-loop',
|
||||||
|
default=os.getenv('MODEL_LOOP', 'uitars'),
|
||||||
|
help='Agent loop type (env: MODEL_LOOP, default: %(default)s)'
|
||||||
|
)
|
||||||
|
model_group.add_argument(
|
||||||
|
'--model-provider',
|
||||||
|
default=os.getenv('MODEL_PROVIDER', 'oaicompat'),
|
||||||
|
help='Model provider (env: MODEL_PROVIDER, default: %(default)s)'
|
||||||
|
)
|
||||||
|
model_group.add_argument(
|
||||||
|
'--model-name',
|
||||||
|
default=os.getenv('MODEL_NAME', 'ByteDance-Seed/UI-TARS-1.5-7B'),
|
||||||
|
help='Model name (env: MODEL_NAME, default: %(default)s)'
|
||||||
|
)
|
||||||
|
model_group.add_argument(
|
||||||
|
'--model-base-url',
|
||||||
|
default=os.getenv('MODEL_BASE_URL', 'http://10.200.108.58:1234/v1'),
|
||||||
|
help='Model base URL (env: MODEL_BASE_URL, default: %(default)s)'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Test execution arguments
|
||||||
|
test_group = parser.add_argument_group('Test Execution Configuration')
|
||||||
|
test_group.add_argument(
|
||||||
|
'--max-turns',
|
||||||
|
type=int,
|
||||||
|
default=int(os.getenv('MAX_TURNS', '30')),
|
||||||
|
help='Maximum number of turns per test (env: MAX_TURNS, default: %(default)s)'
|
||||||
|
)
|
||||||
|
test_group.add_argument(
|
||||||
|
'--tests-dir',
|
||||||
|
default=os.getenv('TESTS_DIR', 'tests'),
|
||||||
|
help='Directory containing test files (env: TESTS_DIR, default: %(default)s)'
|
||||||
|
)
|
||||||
|
test_group.add_argument(
|
||||||
|
'--delay-between-tests',
|
||||||
|
type=int,
|
||||||
|
default=int(os.getenv('DELAY_BETWEEN_TESTS', '3')),
|
||||||
|
help='Delay in seconds between tests (env: DELAY_BETWEEN_TESTS, default: %(default)s)'
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Validate ReportPortal token if ReportPortal is enabled
|
||||||
|
if args.enable_reportportal and not args.rp_token:
|
||||||
|
parser.error("--rp-token (or RP_TOKEN env var) is required when --enable-reportportal is used")
|
||||||
|
|
||||||
|
return args
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
"""
|
||||||
|
Main function to scan and run all test files with optional ReportPortal integration
|
||||||
|
"""
|
||||||
|
# Parse command line arguments
|
||||||
|
args = parse_arguments()
|
||||||
|
|
||||||
|
# Initialize final exit code
|
||||||
|
final_exit_code = 0
|
||||||
|
|
||||||
|
# Start computer server if not skipped
|
||||||
|
server_thread = None
|
||||||
|
if not args.skip_server_start:
|
||||||
|
server_thread = start_computer_server()
|
||||||
|
if server_thread is None:
|
||||||
|
logger.error("Failed to start computer server. Exiting...")
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
logger.info("Skipping computer server startup (assuming it's already running)")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Build agent config from arguments
|
||||||
|
agent_config = {
|
||||||
|
"loop": args.model_loop,
|
||||||
|
"model_provider": args.model_provider,
|
||||||
|
"model_name": args.model_name,
|
||||||
|
"model_base_url": args.model_base_url
|
||||||
|
}
|
||||||
|
|
||||||
|
# Log configuration
|
||||||
|
logger.info("=== Configuration ===")
|
||||||
|
logger.info(f"Computer server: {'STARTED' if server_thread else 'EXTERNAL'}")
|
||||||
|
logger.info(f"Tests directory: {args.tests_dir}")
|
||||||
|
logger.info(f"Max turns per test: {args.max_turns}")
|
||||||
|
logger.info(f"Delay between tests: {args.delay_between_tests}s")
|
||||||
|
logger.info(f"Jan app path: {args.jan_app_path}")
|
||||||
|
logger.info(f"Jan app exists: {os.path.exists(args.jan_app_path)}")
|
||||||
|
logger.info(f"Jan process name: {args.jan_process_name}")
|
||||||
|
logger.info(f"Model: {args.model_name}")
|
||||||
|
logger.info(f"Model URL: {args.model_base_url}")
|
||||||
|
logger.info(f"Model provider: {args.model_provider}")
|
||||||
|
logger.info(f"ReportPortal integration: {'ENABLED' if args.enable_reportportal else 'DISABLED'}")
|
||||||
|
if args.enable_reportportal:
|
||||||
|
logger.info(f"ReportPortal endpoint: {args.rp_endpoint}")
|
||||||
|
logger.info(f"ReportPortal project: {args.rp_project}")
|
||||||
|
logger.info(f"ReportPortal token: {'SET' if args.rp_token else 'NOT SET'}")
|
||||||
|
logger.info(f"Launch name: {args.launch_name if args.launch_name else 'AUTO-GENERATED'}")
|
||||||
|
logger.info("======================")
|
||||||
|
|
||||||
|
# Scan all test files
|
||||||
|
test_files = scan_test_files(args.tests_dir)
|
||||||
|
|
||||||
|
if not test_files:
|
||||||
|
logger.warning(f"No test files found in directory: {args.tests_dir}")
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info(f"Found {len(test_files)} test files")
|
||||||
|
|
||||||
|
# Track test results for final exit code
|
||||||
|
test_results = {"passed": 0, "failed": 0, "total": len(test_files)}
|
||||||
|
|
||||||
|
# Initialize ReportPortal client only if enabled
|
||||||
|
rp_client = None
|
||||||
|
launch_id = None
|
||||||
|
|
||||||
|
if args.enable_reportportal:
|
||||||
|
try:
|
||||||
|
rp_client = RPClient(
|
||||||
|
endpoint=args.rp_endpoint,
|
||||||
|
project=args.rp_project,
|
||||||
|
api_key=args.rp_token
|
||||||
|
)
|
||||||
|
|
||||||
|
# Start ReportPortal launch
|
||||||
|
current_time = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||||
|
|
||||||
|
# Use custom launch name if provided, otherwise generate default
|
||||||
|
if args.launch_name:
|
||||||
|
launch_name = args.launch_name
|
||||||
|
logger.info(f"Using custom launch name: {launch_name}")
|
||||||
|
else:
|
||||||
|
launch_name = f"E2E Test Run - {current_time}"
|
||||||
|
logger.info(f"Using auto-generated launch name: {launch_name}")
|
||||||
|
|
||||||
|
launch_id = rp_client.start_launch(
|
||||||
|
name=launch_name,
|
||||||
|
start_time=timestamp(),
|
||||||
|
description=f"Automated E2E test run with {len(test_files)} test cases\n"
|
||||||
|
f"Model: {args.model_name}\n"
|
||||||
|
f"Max turns: {args.max_turns}"
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Started ReportPortal launch: {launch_name}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to initialize ReportPortal: {e}")
|
||||||
|
logger.warning("Continuing without ReportPortal integration...")
|
||||||
|
rp_client = None
|
||||||
|
launch_id = None
|
||||||
|
else:
|
||||||
|
logger.info("Running in local development mode - results will not be uploaded to ReportPortal")
|
||||||
|
|
||||||
|
# Start computer environment
|
||||||
|
logger.info("Initializing computer environment...")
|
||||||
|
|
||||||
|
# Get platform-specific computer configuration
|
||||||
|
computer_config = get_computer_config()
|
||||||
|
logger.info(f"Using computer config: {computer_config}")
|
||||||
|
|
||||||
|
computer = Computer(
|
||||||
|
os_type=computer_config["os_type"],
|
||||||
|
use_host_computer_server=True
|
||||||
|
)
|
||||||
|
await computer.run()
|
||||||
|
logger.info("Computer environment ready")
|
||||||
|
|
||||||
|
# Run each test sequentially with turn monitoring
|
||||||
|
for i, test_data in enumerate(test_files, 1):
|
||||||
|
logger.info(f"Running test {i}/{len(test_files)}: {test_data['path']}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Pass all configs to test runner
|
||||||
|
test_result = await run_single_test_with_timeout(
|
||||||
|
computer=computer,
|
||||||
|
test_data=test_data,
|
||||||
|
rp_client=rp_client, # Can be None
|
||||||
|
launch_id=launch_id, # Can be None
|
||||||
|
max_turns=args.max_turns,
|
||||||
|
jan_app_path=args.jan_app_path,
|
||||||
|
jan_process_name=args.jan_process_name,
|
||||||
|
agent_config=agent_config,
|
||||||
|
enable_reportportal=args.enable_reportportal
|
||||||
|
)
|
||||||
|
|
||||||
|
# Track test result - properly handle different return formats
|
||||||
|
test_passed = False
|
||||||
|
|
||||||
|
if test_result:
|
||||||
|
# Check different possible return formats
|
||||||
|
if isinstance(test_result, dict):
|
||||||
|
# Dictionary format: check 'success' key
|
||||||
|
test_passed = test_result.get('success', False)
|
||||||
|
elif isinstance(test_result, bool):
|
||||||
|
# Boolean format: direct boolean value
|
||||||
|
test_passed = test_result
|
||||||
|
elif hasattr(test_result, 'success'):
|
||||||
|
# Object format: check success attribute
|
||||||
|
test_passed = getattr(test_result, 'success', False)
|
||||||
|
else:
|
||||||
|
# Any truthy value is considered success
|
||||||
|
test_passed = bool(test_result)
|
||||||
|
else:
|
||||||
|
test_passed = False
|
||||||
|
|
||||||
|
# Update counters and log result
|
||||||
|
if test_passed:
|
||||||
|
test_results["passed"] += 1
|
||||||
|
logger.info(f"[SUCCESS] Test {i} PASSED: {test_data['path']}")
|
||||||
|
else:
|
||||||
|
test_results["failed"] += 1
|
||||||
|
logger.error(f"[FAILED] Test {i} FAILED: {test_data['path']}")
|
||||||
|
|
||||||
|
# Debug log for troubleshooting
|
||||||
|
logger.info(f"[INFO] Debug - Test result: type={type(test_result)}, value={test_result}, success_field={test_result.get('success', 'N/A') if isinstance(test_result, dict) else 'N/A'}, final_passed={test_passed}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
test_results["failed"] += 1
|
||||||
|
logger.error(f"[FAILED] Test {i} FAILED with exception: {test_data['path']} - {e}")
|
||||||
|
|
||||||
|
# Add delay between tests
|
||||||
|
if i < len(test_files):
|
||||||
|
logger.info(f"Waiting {args.delay_between_tests} seconds before next test...")
|
||||||
|
await asyncio.sleep(args.delay_between_tests)
|
||||||
|
|
||||||
|
# Log final test results summary
|
||||||
|
logger.info("=" * 50)
|
||||||
|
logger.info("TEST EXECUTION SUMMARY")
|
||||||
|
logger.info("=" * 50)
|
||||||
|
logger.info(f"Total tests: {test_results['total']}")
|
||||||
|
logger.info(f"Passed: {test_results['passed']}")
|
||||||
|
logger.info(f"Failed: {test_results['failed']}")
|
||||||
|
logger.info(f"Success rate: {(test_results['passed']/test_results['total']*100):.1f}%")
|
||||||
|
logger.info("=" * 50)
|
||||||
|
|
||||||
|
if test_results["failed"] > 0:
|
||||||
|
logger.error(f"[FAILED] Test execution completed with {test_results['failed']} failures!")
|
||||||
|
final_exit_code = 1
|
||||||
|
else:
|
||||||
|
logger.info("[SUCCESS] All tests completed successfully!")
|
||||||
|
final_exit_code = 0
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
logger.info("Test execution interrupted by user")
|
||||||
|
final_exit_code = 1
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in main execution: {e}")
|
||||||
|
final_exit_code = 1
|
||||||
|
finally:
|
||||||
|
# Finish ReportPortal launch only if it was started
|
||||||
|
if args.enable_reportportal and rp_client and launch_id:
|
||||||
|
try:
|
||||||
|
rp_client.finish_launch(
|
||||||
|
launch_id=launch_id,
|
||||||
|
end_time=timestamp()
|
||||||
|
)
|
||||||
|
rp_client.session.close()
|
||||||
|
logger.info("ReportPortal launch finished and session closed")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error finishing ReportPortal launch: {e}")
|
||||||
|
|
||||||
|
# Note: daemon thread will automatically terminate when main program ends
|
||||||
|
if server_thread:
|
||||||
|
logger.info("Computer server will stop when main program exits (daemon thread)")
|
||||||
|
|
||||||
|
# Exit with appropriate code based on test results
|
||||||
|
logger.info(f"Exiting with code: {final_exit_code}")
|
||||||
|
exit(final_exit_code)
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(main())
|
||||||
439
autoqa/reportportal_handler.py
Normal file
439
autoqa/reportportal_handler.py
Normal file
@ -0,0 +1,439 @@
|
|||||||
|
import os
|
||||||
|
import json
|
||||||
|
import mimetypes
|
||||||
|
import re
|
||||||
|
import logging
|
||||||
|
import glob
|
||||||
|
import platform
|
||||||
|
from reportportal_client.helpers import timestamp
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
def upload_turn_folder(client, test_item_id, turn_path, turn_name, force_fail=False):
|
||||||
|
"""
|
||||||
|
Upload turn folder content to ReportPortal
|
||||||
|
"""
|
||||||
|
step_item_id = client.start_test_item(
|
||||||
|
parent_item_id=test_item_id,
|
||||||
|
name=turn_name,
|
||||||
|
start_time=timestamp(),
|
||||||
|
item_type="STEP"
|
||||||
|
)
|
||||||
|
|
||||||
|
uploaded = False
|
||||||
|
step_has_errors = False # Track if this step has any errors
|
||||||
|
|
||||||
|
for fname in sorted(os.listdir(turn_path)):
|
||||||
|
fpath = os.path.join(turn_path, fname)
|
||||||
|
|
||||||
|
if fname.endswith(".json"):
|
||||||
|
try:
|
||||||
|
with open(fpath, "r", encoding="utf-8") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="INFO",
|
||||||
|
message=f"[{fname}]\n{json.dumps(data, indent=2)}",
|
||||||
|
item_id=step_item_id
|
||||||
|
)
|
||||||
|
uploaded = True
|
||||||
|
except Exception as e:
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="ERROR",
|
||||||
|
message=f"[ERROR parsing {fname}] {str(e)}",
|
||||||
|
item_id=step_item_id
|
||||||
|
)
|
||||||
|
step_has_errors = True
|
||||||
|
|
||||||
|
elif fname.endswith(".png"):
|
||||||
|
try:
|
||||||
|
with open(fpath, "rb") as img_file:
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="INFO",
|
||||||
|
message=f"Screenshot: {fname}",
|
||||||
|
item_id=step_item_id,
|
||||||
|
attachment={
|
||||||
|
"name": fname,
|
||||||
|
"data": img_file.read(),
|
||||||
|
"mime": mimetypes.guess_type(fname)[0] or "image/png"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
uploaded = True
|
||||||
|
except Exception as e:
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="ERROR",
|
||||||
|
message=f"[ERROR attaching {fname}] {str(e)}",
|
||||||
|
item_id=step_item_id
|
||||||
|
)
|
||||||
|
step_has_errors = True
|
||||||
|
|
||||||
|
if not uploaded:
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="WARNING",
|
||||||
|
message="No data found in this turn.",
|
||||||
|
item_id=step_item_id
|
||||||
|
)
|
||||||
|
|
||||||
|
# Determine step status based on test case result
|
||||||
|
if force_fail:
|
||||||
|
step_status = "FAILED"
|
||||||
|
else:
|
||||||
|
step_status = "FAILED" if step_has_errors else "PASSED"
|
||||||
|
|
||||||
|
client.finish_test_item(
|
||||||
|
item_id=step_item_id,
|
||||||
|
end_time=timestamp(),
|
||||||
|
status=step_status
|
||||||
|
)
|
||||||
|
|
||||||
|
def extract_test_result_from_trajectory(trajectory_dir):
|
||||||
|
"""
|
||||||
|
Extract test result from the last turn's API response
|
||||||
|
Returns True only if found {"result": True}, False for all other cases including {"result": False}
|
||||||
|
"""
|
||||||
|
if not trajectory_dir or not os.path.exists(trajectory_dir):
|
||||||
|
logger.warning(f"Trajectory directory not found: {trajectory_dir}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get all turn folders and find the last one
|
||||||
|
turn_folders = [f for f in os.listdir(trajectory_dir)
|
||||||
|
if os.path.isdir(os.path.join(trajectory_dir, f)) and f.startswith("turn_")]
|
||||||
|
|
||||||
|
if not turn_folders:
|
||||||
|
logger.warning("No turn folders found")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Sort to get the last turn
|
||||||
|
last_turn = sorted(turn_folders)[-1]
|
||||||
|
last_turn_path = os.path.join(trajectory_dir, last_turn)
|
||||||
|
|
||||||
|
logger.info(f"Checking result in last turn: {last_turn}")
|
||||||
|
|
||||||
|
# Look for API call response files
|
||||||
|
response_files = [f for f in os.listdir(last_turn_path)
|
||||||
|
if f.startswith("api_call_") and f.endswith("_response.json")]
|
||||||
|
|
||||||
|
if not response_files:
|
||||||
|
logger.warning("No API response files found in last turn")
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check the last response file
|
||||||
|
last_response_file = sorted(response_files)[-1]
|
||||||
|
response_file_path = os.path.join(last_turn_path, last_response_file)
|
||||||
|
|
||||||
|
logger.info(f"Checking response file: {last_response_file}")
|
||||||
|
|
||||||
|
with open(response_file_path, 'r', encoding='utf-8') as f:
|
||||||
|
data = json.load(f)
|
||||||
|
|
||||||
|
# Extract content from response
|
||||||
|
if 'response' in data and 'choices' in data['response'] and data['response']['choices']:
|
||||||
|
last_choice = data['response']['choices'][-1]
|
||||||
|
if 'message' in last_choice and 'content' in last_choice['message']:
|
||||||
|
content = last_choice['message']['content']
|
||||||
|
logger.info(f"Last response content: {content}")
|
||||||
|
|
||||||
|
# Look for result patterns - need to check both True and False
|
||||||
|
true_pattern = r'\{\s*"result"\s*:\s*True\s*\}'
|
||||||
|
false_pattern = r'\{\s*"result"\s*:\s*False\s*\}'
|
||||||
|
|
||||||
|
true_match = re.search(true_pattern, content)
|
||||||
|
false_match = re.search(false_pattern, content)
|
||||||
|
|
||||||
|
if true_match:
|
||||||
|
logger.info(f"Found test result: True - PASSED")
|
||||||
|
return True
|
||||||
|
elif false_match:
|
||||||
|
logger.info(f"Found test result: False - FAILED")
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
logger.warning("No valid result pattern found in response content - marking as FAILED")
|
||||||
|
return False
|
||||||
|
|
||||||
|
logger.warning("Could not extract content from response structure")
|
||||||
|
return False
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error extracting test result: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_jan_log_paths(is_nightly=False):
|
||||||
|
"""
|
||||||
|
Get Jan application log file paths based on OS and version (nightly vs regular)
|
||||||
|
Returns list of glob patterns for log files
|
||||||
|
"""
|
||||||
|
system = platform.system().lower()
|
||||||
|
app_name = "Jan-nightly" if is_nightly else "Jan"
|
||||||
|
|
||||||
|
if system == "windows":
|
||||||
|
# Windows: %APPDATA%\Jan(-nightly)\data\logs\*.log
|
||||||
|
appdata = os.path.expandvars("%APPDATA%")
|
||||||
|
return [f"{appdata}\\{app_name}\\data\\logs\\*.log"]
|
||||||
|
|
||||||
|
elif system == "darwin": # macOS
|
||||||
|
# macOS: ~/Library/Application Support/Jan(-nightly)/data/logs/*.log
|
||||||
|
home_dir = os.path.expanduser("~")
|
||||||
|
return [f"{home_dir}/Library/Application Support/{app_name}/data/logs/*.log"]
|
||||||
|
|
||||||
|
elif system == "linux":
|
||||||
|
# Linux: ~/.local/share/Jan(-nightly)/data/logs/*.log
|
||||||
|
home_dir = os.path.expanduser("~")
|
||||||
|
return [f"{home_dir}/.local/share/{app_name}/data/logs/*.log"]
|
||||||
|
|
||||||
|
else:
|
||||||
|
logger.warning(f"Unsupported OS: {system}")
|
||||||
|
return []
|
||||||
|
|
||||||
|
def upload_jan_logs(client, test_item_id, is_nightly=False, max_log_files=5):
|
||||||
|
"""
|
||||||
|
Upload Jan application log files to ReportPortal
|
||||||
|
"""
|
||||||
|
log_patterns = get_jan_log_paths(is_nightly)
|
||||||
|
app_type = "nightly" if is_nightly else "regular"
|
||||||
|
|
||||||
|
logger.info(f"Looking for Jan {app_type} logs...")
|
||||||
|
|
||||||
|
all_log_files = []
|
||||||
|
for pattern in log_patterns:
|
||||||
|
try:
|
||||||
|
log_files = glob.glob(pattern)
|
||||||
|
all_log_files.extend(log_files)
|
||||||
|
logger.info(f"Found {len(log_files)} log files matching pattern: {pattern}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error searching for logs with pattern {pattern}: {e}")
|
||||||
|
|
||||||
|
if not all_log_files:
|
||||||
|
logger.warning(f"No Jan {app_type} log files found")
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="WARNING",
|
||||||
|
message=f"[INFO] No Jan {app_type} application logs found",
|
||||||
|
item_id=test_item_id
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Sort by modification time (newest first) and limit to max_log_files
|
||||||
|
try:
|
||||||
|
all_log_files.sort(key=lambda x: os.path.getmtime(x), reverse=True)
|
||||||
|
log_files_to_upload = all_log_files[:max_log_files]
|
||||||
|
|
||||||
|
logger.info(f"Uploading {len(log_files_to_upload)} most recent Jan {app_type} log files")
|
||||||
|
|
||||||
|
for i, log_file in enumerate(log_files_to_upload, 1):
|
||||||
|
try:
|
||||||
|
file_size = os.path.getsize(log_file)
|
||||||
|
file_name = os.path.basename(log_file)
|
||||||
|
|
||||||
|
# Check file size limit (50MB = 50 * 1024 * 1024 bytes)
|
||||||
|
max_file_size = 50 * 1024 * 1024 # 50MB
|
||||||
|
if file_size > max_file_size:
|
||||||
|
logger.warning(f"Log file {file_name} is too large ({file_size} bytes > {max_file_size} bytes), skipping upload")
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="WARNING",
|
||||||
|
message=f"[INFO] Log file {file_name} skipped (size: {file_size} bytes > 50MB limit)",
|
||||||
|
item_id=test_item_id
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
logger.info(f"Uploading log file {i}/{len(log_files_to_upload)}: {file_name} ({file_size} bytes)")
|
||||||
|
|
||||||
|
# Read log file content (safe to read since we checked size)
|
||||||
|
with open(log_file, 'r', encoding='utf-8', errors='ignore') as f:
|
||||||
|
log_content = f.read()
|
||||||
|
|
||||||
|
# Upload as text attachment
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="INFO",
|
||||||
|
message=f"[INFO] Jan {app_type} application log: {file_name}",
|
||||||
|
item_id=test_item_id,
|
||||||
|
attachment={
|
||||||
|
"name": f"jan_{app_type}_log_{i}_{file_name}",
|
||||||
|
"data": log_content.encode('utf-8'),
|
||||||
|
"mime": "text/plain"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Successfully uploaded log: {file_name}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error uploading log file {log_file}: {e}")
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="ERROR",
|
||||||
|
message=f"Failed to upload log file {os.path.basename(log_file)}: {str(e)}",
|
||||||
|
item_id=test_item_id
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add summary log
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="INFO",
|
||||||
|
message=f"[INFO] Uploaded {len(log_files_to_upload)} Jan {app_type} log files (total available: {len(all_log_files)})",
|
||||||
|
item_id=test_item_id
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error processing Jan logs: {e}")
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="ERROR",
|
||||||
|
message=f"Error processing Jan {app_type} logs: {str(e)}",
|
||||||
|
item_id=test_item_id
|
||||||
|
)
|
||||||
|
|
||||||
|
def upload_test_results_to_rp(client, launch_id, test_path, trajectory_dir, force_stopped=False, video_path=None, is_nightly=False):
|
||||||
|
"""
|
||||||
|
Upload test results to ReportPortal with proper status based on test result
|
||||||
|
"""
|
||||||
|
if not trajectory_dir or not os.path.exists(trajectory_dir):
|
||||||
|
logger.warning(f"Trajectory directory not found: {trajectory_dir}")
|
||||||
|
formatted_test_path = test_path.replace('\\', '/').replace('.txt', '').replace('/', '__')
|
||||||
|
test_item_id = client.start_test_item(
|
||||||
|
launch_id=launch_id,
|
||||||
|
name=formatted_test_path,
|
||||||
|
start_time=timestamp(),
|
||||||
|
item_type="TEST",
|
||||||
|
description=f"Test case from: {test_path}"
|
||||||
|
)
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="ERROR",
|
||||||
|
message="[FAILED] TEST FAILED [FAILED]\nNo trajectory directory found",
|
||||||
|
item_id=test_item_id
|
||||||
|
)
|
||||||
|
|
||||||
|
# Upload video if available
|
||||||
|
if video_path and os.path.exists(video_path):
|
||||||
|
try:
|
||||||
|
with open(video_path, "rb") as video_file:
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="INFO",
|
||||||
|
message="Screen recording of test execution",
|
||||||
|
item_id=test_item_id,
|
||||||
|
attachment={
|
||||||
|
"name": f"test_recording_{formatted_test_path}.mp4",
|
||||||
|
"data": video_file.read(),
|
||||||
|
"mime": "video/x-msvideo"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
logger.info(f"Uploaded video for failed test: {video_path}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error uploading video: {e}")
|
||||||
|
|
||||||
|
client.finish_test_item(
|
||||||
|
item_id=test_item_id,
|
||||||
|
end_time=timestamp(),
|
||||||
|
status="FAILED"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
formatted_test_path = test_path.replace('\\', '/').replace('.txt', '').replace('/', '__')
|
||||||
|
|
||||||
|
# Determine final status
|
||||||
|
if force_stopped:
|
||||||
|
final_status = "FAILED"
|
||||||
|
status_message = "exceeded maximum turn limit (30 turns)"
|
||||||
|
else:
|
||||||
|
test_result = extract_test_result_from_trajectory(trajectory_dir)
|
||||||
|
if test_result is True:
|
||||||
|
final_status = "PASSED"
|
||||||
|
status_message = "completed successfully with positive result"
|
||||||
|
else:
|
||||||
|
final_status = "FAILED"
|
||||||
|
status_message = "no valid success result found"
|
||||||
|
|
||||||
|
# Create test item
|
||||||
|
test_item_id = client.start_test_item(
|
||||||
|
launch_id=launch_id,
|
||||||
|
name=formatted_test_path,
|
||||||
|
start_time=timestamp(),
|
||||||
|
item_type="TEST",
|
||||||
|
description=f"Test case from: {test_path}"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
turn_folders = [f for f in os.listdir(trajectory_dir)
|
||||||
|
if os.path.isdir(os.path.join(trajectory_dir, f)) and f.startswith("turn_")]
|
||||||
|
|
||||||
|
# Add clear status log
|
||||||
|
status_emoji = "[SUCCESS]" if final_status == "PASSED" else "[FAILED]"
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="INFO" if final_status == "PASSED" else "ERROR",
|
||||||
|
message=f"{status_emoji} TEST {final_status} {status_emoji}\nReason: {status_message}\nTotal turns: {len(turn_folders)}",
|
||||||
|
item_id=test_item_id
|
||||||
|
)
|
||||||
|
|
||||||
|
# Upload screen recording video first
|
||||||
|
if video_path and os.path.exists(video_path):
|
||||||
|
logger.info(f"Attempting to upload video: {video_path}")
|
||||||
|
logger.info(f"Video file size: {os.path.getsize(video_path)} bytes")
|
||||||
|
try:
|
||||||
|
with open(video_path, "rb") as video_file:
|
||||||
|
video_data = video_file.read()
|
||||||
|
logger.info(f"Read video data: {len(video_data)} bytes")
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="INFO",
|
||||||
|
message="[INFO] Screen recording of test execution",
|
||||||
|
item_id=test_item_id,
|
||||||
|
attachment={
|
||||||
|
"name": f"test_recording_{formatted_test_path}.mp4",
|
||||||
|
"data": video_data,
|
||||||
|
"mime": "video/x-msvideo"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
logger.info(f"Successfully uploaded screen recording: {video_path}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error uploading screen recording: {e}")
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="WARNING",
|
||||||
|
message=f"Failed to upload screen recording: {str(e)}",
|
||||||
|
item_id=test_item_id
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.warning(f"Video upload skipped - video_path: {video_path}, exists: {os.path.exists(video_path) if video_path else 'N/A'}")
|
||||||
|
client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="WARNING",
|
||||||
|
message="No screen recording available for this test",
|
||||||
|
item_id=test_item_id
|
||||||
|
)
|
||||||
|
|
||||||
|
# Upload Jan application logs
|
||||||
|
logger.info("Uploading Jan application logs...")
|
||||||
|
upload_jan_logs(client, test_item_id, is_nightly=is_nightly, max_log_files=5)
|
||||||
|
|
||||||
|
# Upload all turn data with appropriate status
|
||||||
|
# If test failed, mark all turns as failed
|
||||||
|
force_fail_turns = (final_status == "FAILED")
|
||||||
|
|
||||||
|
for turn_folder in sorted(turn_folders):
|
||||||
|
turn_path = os.path.join(trajectory_dir, turn_folder)
|
||||||
|
upload_turn_folder(client, test_item_id, turn_path, turn_folder, force_fail=force_fail_turns)
|
||||||
|
|
||||||
|
# Finish with correct status
|
||||||
|
client.finish_test_item(
|
||||||
|
item_id=test_item_id,
|
||||||
|
end_time=timestamp(),
|
||||||
|
status=final_status
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Uploaded test results for {formatted_test_path}: {final_status}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error uploading test results: {e}")
|
||||||
|
client.finish_test_item(
|
||||||
|
item_id=test_item_id,
|
||||||
|
end_time=timestamp(),
|
||||||
|
status="FAILED"
|
||||||
|
)
|
||||||
18
autoqa/requirements.txt
Normal file
18
autoqa/requirements.txt
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
# Core dependencies
|
||||||
|
cua-computer[all]~=0.3.5
|
||||||
|
cua-agent[all]~=0.3.0
|
||||||
|
cua-agent @ git+https://github.com/janhq/cua.git@compute-agent-0.3.0-patch#subdirectory=libs/python/agent
|
||||||
|
|
||||||
|
# ReportPortal integration
|
||||||
|
reportportal-client~=5.6.5
|
||||||
|
|
||||||
|
# Screen recording and automation
|
||||||
|
opencv-python~=4.10.0
|
||||||
|
numpy~=2.2.6
|
||||||
|
PyAutoGUI~=0.9.54
|
||||||
|
|
||||||
|
# System utilities
|
||||||
|
psutil~=7.0.0
|
||||||
|
|
||||||
|
# Server component
|
||||||
|
cua-computer-server~=0.1.19
|
||||||
84
autoqa/screen_recorder.py
Normal file
84
autoqa/screen_recorder.py
Normal file
@ -0,0 +1,84 @@
|
|||||||
|
import cv2
|
||||||
|
import numpy as np
|
||||||
|
import pyautogui
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class ScreenRecorder:
|
||||||
|
def __init__(self, output_path, fps=10):
|
||||||
|
self.output_path = output_path
|
||||||
|
self.fps = fps
|
||||||
|
self.recording = False
|
||||||
|
self.writer = None
|
||||||
|
self.thread = None
|
||||||
|
|
||||||
|
def start_recording(self):
|
||||||
|
"""Start screen recording"""
|
||||||
|
if self.recording:
|
||||||
|
logger.warning("Recording already in progress")
|
||||||
|
return
|
||||||
|
|
||||||
|
self.recording = True
|
||||||
|
self.thread = threading.Thread(target=self._record_screen, daemon=True)
|
||||||
|
self.thread.start()
|
||||||
|
logger.info(f"Started screen recording: {self.output_path}")
|
||||||
|
|
||||||
|
def stop_recording(self):
|
||||||
|
"""Stop screen recording"""
|
||||||
|
if not self.recording:
|
||||||
|
logger.warning("No recording in progress")
|
||||||
|
return
|
||||||
|
|
||||||
|
self.recording = False
|
||||||
|
if self.thread:
|
||||||
|
self.thread.join(timeout=5)
|
||||||
|
if self.writer:
|
||||||
|
self.writer.release()
|
||||||
|
logger.info(f"Stopped screen recording: {self.output_path}")
|
||||||
|
|
||||||
|
def _record_screen(self):
|
||||||
|
"""Internal method to record screen"""
|
||||||
|
try:
|
||||||
|
# Get screen dimensions
|
||||||
|
screen_size = pyautogui.size()
|
||||||
|
|
||||||
|
# Try MP4 with H264 codec for better compatibility
|
||||||
|
fourcc = cv2.VideoWriter_fourcc(*'mp4v') # or 'H264'
|
||||||
|
output_path_mp4 = self.output_path
|
||||||
|
|
||||||
|
self.writer = cv2.VideoWriter(
|
||||||
|
output_path_mp4,
|
||||||
|
fourcc,
|
||||||
|
self.fps,
|
||||||
|
screen_size
|
||||||
|
)
|
||||||
|
|
||||||
|
while self.recording:
|
||||||
|
try:
|
||||||
|
# Capture screen
|
||||||
|
screenshot = pyautogui.screenshot()
|
||||||
|
|
||||||
|
# Convert PIL image to numpy array
|
||||||
|
frame = np.array(screenshot)
|
||||||
|
|
||||||
|
# Convert RGB to BGR (OpenCV uses BGR)
|
||||||
|
frame = cv2.cvtColor(frame, cv2.COLOR_RGB2BGR)
|
||||||
|
|
||||||
|
# Write frame
|
||||||
|
self.writer.write(frame)
|
||||||
|
|
||||||
|
# Control FPS
|
||||||
|
time.sleep(1.0 / self.fps)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error capturing frame: {e}")
|
||||||
|
break
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in screen recording: {e}")
|
||||||
|
finally:
|
||||||
|
if self.writer:
|
||||||
|
self.writer.release()
|
||||||
116
autoqa/scripts/README.md
Normal file
116
autoqa/scripts/README.md
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
# AutoQA Scripts
|
||||||
|
|
||||||
|
This directory contains platform-specific scripts used by the AutoQA GitHub Actions workflow. These scripts help maintain a cleaner and more maintainable workflow file by extracting complex inline scripts into separate files.
|
||||||
|
|
||||||
|
## Directory Structure
|
||||||
|
|
||||||
|
```text
|
||||||
|
autoqa/scripts/
|
||||||
|
├── setup_permissions.sh # Setup executable permissions for all scripts
|
||||||
|
├── windows_cleanup.ps1 # Windows: Clean existing Jan installations
|
||||||
|
├── windows_download.ps1 # Windows: Download Jan app installer
|
||||||
|
├── windows_install.ps1 # Windows: Install Jan app
|
||||||
|
├── windows_post_cleanup.ps1 # Windows: Post-test cleanup
|
||||||
|
├── run_tests.ps1 # Windows: Run AutoQA tests
|
||||||
|
├── ubuntu_cleanup.sh # Ubuntu: Clean existing Jan installations
|
||||||
|
├── ubuntu_download.sh # Ubuntu: Download Jan app (.deb)
|
||||||
|
├── ubuntu_install.sh # Ubuntu: Install Jan app
|
||||||
|
├── ubuntu_post_cleanup.sh # Ubuntu: Post-test cleanup
|
||||||
|
├── macos_cleanup.sh # macOS: Clean existing Jan installations
|
||||||
|
├── macos_download.sh # macOS: Download Jan app (.dmg)
|
||||||
|
├── macos_install.sh # macOS: Install Jan app
|
||||||
|
├── macos_post_cleanup.sh # macOS: Post-test cleanup
|
||||||
|
├── run_tests.sh # Unix: Run AutoQA tests (Ubuntu/macOS)
|
||||||
|
├── README.md # This file
|
||||||
|
└── PERMISSIONS.md # Permission setup documentation
|
||||||
|
```
|
||||||
|
|
||||||
|
## Script Functions
|
||||||
|
|
||||||
|
### Windows Scripts (.ps1)
|
||||||
|
|
||||||
|
- **windows_cleanup.ps1**: Removes existing Jan installations and kills running processes
|
||||||
|
- **windows_download.ps1**: Downloads Jan installer with priority-based URL selection
|
||||||
|
- **windows_install.ps1**: Installs Jan app and sets environment variables
|
||||||
|
- **windows_post_cleanup.ps1**: Comprehensive cleanup after tests including uninstallation
|
||||||
|
- **run_tests.ps1**: Runs the AutoQA Python tests with proper arguments
|
||||||
|
|
||||||
|
### Ubuntu Scripts (.sh)
|
||||||
|
|
||||||
|
- **ubuntu_cleanup.sh**: Removes existing Jan installations and kills running processes
|
||||||
|
- **ubuntu_download.sh**: Downloads Jan .deb package with priority-based URL selection
|
||||||
|
- **ubuntu_install.sh**: Installs Jan .deb package and sets environment variables
|
||||||
|
- **ubuntu_post_cleanup.sh**: Comprehensive cleanup after tests including package removal
|
||||||
|
|
||||||
|
### macOS Scripts (.sh)
|
||||||
|
|
||||||
|
- **macos_cleanup.sh**: Removes existing Jan installations and kills running processes
|
||||||
|
- **macos_download.sh**: Downloads Jan .dmg package with priority-based URL selection
|
||||||
|
- **macos_install.sh**: Mounts DMG, extracts .app, and installs to Applications
|
||||||
|
- **macos_post_cleanup.sh**: Comprehensive cleanup after tests
|
||||||
|
|
||||||
|
### Common Scripts
|
||||||
|
|
||||||
|
- **setup_permissions.sh**: Automatically sets executable permissions for all shell scripts
|
||||||
|
- **run_tests.sh**: Platform-agnostic test runner for Unix-based systems (Ubuntu/macOS)
|
||||||
|
|
||||||
|
## Usage in GitHub Actions
|
||||||
|
|
||||||
|
These scripts are called from the `.github/workflows/autoqa.yml` workflow file:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# Setup permissions first (Ubuntu/macOS)
|
||||||
|
- name: Setup script permissions
|
||||||
|
run: |
|
||||||
|
chmod +x autoqa/scripts/setup_permissions.sh
|
||||||
|
./autoqa/scripts/setup_permissions.sh
|
||||||
|
|
||||||
|
# Then use scripts without chmod
|
||||||
|
- name: Clean existing Jan installations
|
||||||
|
run: |
|
||||||
|
./autoqa/scripts/ubuntu_cleanup.sh
|
||||||
|
|
||||||
|
# Windows example (no chmod needed)
|
||||||
|
- name: Clean existing Jan installations
|
||||||
|
shell: powershell
|
||||||
|
run: |
|
||||||
|
.\autoqa\scripts\windows_cleanup.ps1
|
||||||
|
```
|
||||||
|
|
||||||
|
## Benefits
|
||||||
|
|
||||||
|
1. **Maintainability**: Complex scripts are in separate files, easier to read and modify
|
||||||
|
2. **Reusability**: Scripts can be reused across different workflows or locally
|
||||||
|
3. **Testing**: Scripts can be tested independently
|
||||||
|
4. **Version Control**: Better diff tracking for script changes
|
||||||
|
5. **Platform Consistency**: Similar functionality across platforms in separate files
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
When modifying these scripts:
|
||||||
|
|
||||||
|
1. Test them locally on the respective platforms
|
||||||
|
2. Ensure proper error handling and exit codes
|
||||||
|
3. Follow platform-specific best practices
|
||||||
|
4. Update this README if new scripts are added
|
||||||
|
|
||||||
|
## Script Parameters
|
||||||
|
|
||||||
|
### Windows Scripts
|
||||||
|
|
||||||
|
- Most scripts accept `-IsNightly` parameter to handle nightly vs stable builds
|
||||||
|
- Download script accepts multiple URL sources with priority ordering
|
||||||
|
|
||||||
|
### Unix Scripts
|
||||||
|
|
||||||
|
- Most scripts accept positional parameters for nightly flag and URLs
|
||||||
|
- Scripts use `$1`, `$2`, etc. for parameter access
|
||||||
|
|
||||||
|
## Environment Variables
|
||||||
|
|
||||||
|
Scripts set these environment variables for subsequent workflow steps:
|
||||||
|
|
||||||
|
- `JAN_APP_URL`: The selected Jan app download URL
|
||||||
|
- `IS_NIGHTLY`: Boolean flag indicating if it's a nightly build
|
||||||
|
- `JAN_APP_PATH`: Path to the installed Jan executable
|
||||||
|
- `JAN_PROCESS_NAME`: Name of the Jan process for monitoring
|
||||||
34
autoqa/scripts/macos_cleanup.sh
Normal file
34
autoqa/scripts/macos_cleanup.sh
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# macOS cleanup script for Jan app
|
||||||
|
|
||||||
|
echo "Cleaning existing Jan installations..."
|
||||||
|
|
||||||
|
# Kill any running Jan processes (both regular and nightly)
|
||||||
|
pkill -f "Jan" || true
|
||||||
|
pkill -f "jan" || true
|
||||||
|
pkill -f "Jan-nightly" || true
|
||||||
|
pkill -f "jan-nightly" || true
|
||||||
|
|
||||||
|
# Remove Jan app directories
|
||||||
|
rm -rf /Applications/Jan.app
|
||||||
|
rm -rf /Applications/Jan-nightly.app
|
||||||
|
rm -rf ~/Applications/Jan.app
|
||||||
|
rm -rf ~/Applications/Jan-nightly.app
|
||||||
|
|
||||||
|
# Remove Jan data folders (both regular and nightly)
|
||||||
|
rm -rf ~/Library/Application\ Support/Jan
|
||||||
|
rm -rf ~/Library/Application\ Support/Jan-nightly
|
||||||
|
rm -rf ~/Library/Application\ Support/jan.ai.app
|
||||||
|
rm -rf ~/Library/Application\ Support/jan-nightly.ai.app
|
||||||
|
rm -rf ~/Library/Preferences/jan.*
|
||||||
|
rm -rf ~/Library/Preferences/jan-nightly.*
|
||||||
|
rm -rf ~/Library/Caches/jan.*
|
||||||
|
rm -rf ~/Library/Caches/jan-nightly.*
|
||||||
|
rm -rf ~/Library/Caches/jan.ai.app
|
||||||
|
rm -rf ~/Library/Caches/jan-nightly.ai.app
|
||||||
|
rm -rf ~/Library/WebKit/jan.ai.app
|
||||||
|
rm -rf ~/Library/WebKit/jan-nightly.ai.app
|
||||||
|
rm -rf ~/Library/Saved\ Application\ State/jan.ai.app
|
||||||
|
rm -rf ~/Library/Saved\ Application\ State/jan-nightly.ai.app
|
||||||
|
|
||||||
|
echo "Jan cleanup completed"
|
||||||
49
autoqa/scripts/macos_download.sh
Normal file
49
autoqa/scripts/macos_download.sh
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# macOS download script for Jan app
|
||||||
|
|
||||||
|
WORKFLOW_INPUT_URL="$1"
|
||||||
|
WORKFLOW_INPUT_IS_NIGHTLY="$2"
|
||||||
|
REPO_VARIABLE_URL="$3"
|
||||||
|
REPO_VARIABLE_IS_NIGHTLY="$4"
|
||||||
|
DEFAULT_URL="$5"
|
||||||
|
DEFAULT_IS_NIGHTLY="$6"
|
||||||
|
|
||||||
|
# Determine Jan app URL and nightly flag from multiple sources (priority order):
|
||||||
|
# 1. Workflow dispatch input (manual trigger)
|
||||||
|
# 2. Repository variable JAN_APP_URL
|
||||||
|
# 3. Default URL from env
|
||||||
|
|
||||||
|
JAN_APP_URL=""
|
||||||
|
IS_NIGHTLY="false"
|
||||||
|
|
||||||
|
if [ -n "$WORKFLOW_INPUT_URL" ]; then
|
||||||
|
JAN_APP_URL="$WORKFLOW_INPUT_URL"
|
||||||
|
IS_NIGHTLY="$WORKFLOW_INPUT_IS_NIGHTLY"
|
||||||
|
echo "Using Jan app URL from workflow input: $JAN_APP_URL"
|
||||||
|
echo "Is nightly build: $IS_NIGHTLY"
|
||||||
|
elif [ -n "$REPO_VARIABLE_URL" ]; then
|
||||||
|
JAN_APP_URL="$REPO_VARIABLE_URL"
|
||||||
|
IS_NIGHTLY="$REPO_VARIABLE_IS_NIGHTLY"
|
||||||
|
echo "Using Jan app URL from repository variable: $JAN_APP_URL"
|
||||||
|
echo "Is nightly build: $IS_NIGHTLY"
|
||||||
|
else
|
||||||
|
JAN_APP_URL="$DEFAULT_URL"
|
||||||
|
IS_NIGHTLY="$DEFAULT_IS_NIGHTLY"
|
||||||
|
echo "Using default Jan app URL: $JAN_APP_URL"
|
||||||
|
echo "Is nightly build: $IS_NIGHTLY"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Export for later steps
|
||||||
|
echo "JAN_APP_URL=$JAN_APP_URL" >> $GITHUB_ENV
|
||||||
|
echo "IS_NIGHTLY=$IS_NIGHTLY" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
echo "Downloading Jan app from: $JAN_APP_URL"
|
||||||
|
curl -L -o "/tmp/jan-installer.dmg" "$JAN_APP_URL"
|
||||||
|
|
||||||
|
if [ ! -f "/tmp/jan-installer.dmg" ]; then
|
||||||
|
echo "[FAILED] Failed to download Jan app"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "[SUCCESS] Successfully downloaded Jan app"
|
||||||
|
ls -la "/tmp/jan-installer.dmg"
|
||||||
91
autoqa/scripts/macos_install.sh
Normal file
91
autoqa/scripts/macos_install.sh
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# macOS install script for Jan app
|
||||||
|
|
||||||
|
echo "Installing Jan app from DMG..."
|
||||||
|
|
||||||
|
# Mount the DMG
|
||||||
|
hdiutil attach "/tmp/jan-installer.dmg" -mountpoint "/tmp/jan-mount"
|
||||||
|
|
||||||
|
# Find the .app file in the mounted DMG
|
||||||
|
APP_FILE=$(find "/tmp/jan-mount" -name "*.app" -type d | head -1)
|
||||||
|
|
||||||
|
if [ -z "$APP_FILE" ]; then
|
||||||
|
echo "[Failed] No .app file found in DMG"
|
||||||
|
hdiutil detach "/tmp/jan-mount" || true
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Found app file: $APP_FILE"
|
||||||
|
|
||||||
|
# Copy to Applications directory
|
||||||
|
cp -R "$APP_FILE" /Applications/
|
||||||
|
|
||||||
|
# Unmount the DMG
|
||||||
|
hdiutil detach "/tmp/jan-mount"
|
||||||
|
|
||||||
|
# Determine app name and executable path
|
||||||
|
APP_NAME=$(basename "$APP_FILE")
|
||||||
|
|
||||||
|
echo "App name: $APP_NAME"
|
||||||
|
|
||||||
|
# First, check what's actually in the MacOS folder
|
||||||
|
echo "Contents of MacOS folder:"
|
||||||
|
ls -la "/Applications/$APP_NAME/Contents/MacOS/"
|
||||||
|
|
||||||
|
# Find all executable files in MacOS folder
|
||||||
|
echo "Looking for executable files..."
|
||||||
|
find "/Applications/$APP_NAME/Contents/MacOS/" -type f -perm +111 -ls
|
||||||
|
|
||||||
|
# Try to find the main executable - it's usually the one with the same name as the app (without .app)
|
||||||
|
APP_BASE_NAME=$(basename "$APP_NAME" .app)
|
||||||
|
POTENTIAL_EXECUTABLES=(
|
||||||
|
"/Applications/$APP_NAME/Contents/MacOS/$APP_BASE_NAME"
|
||||||
|
"/Applications/$APP_NAME/Contents/MacOS/Jan"
|
||||||
|
"/Applications/$APP_NAME/Contents/MacOS/Jan-nightly"
|
||||||
|
)
|
||||||
|
|
||||||
|
APP_PATH=""
|
||||||
|
for potential_exec in "${POTENTIAL_EXECUTABLES[@]}"; do
|
||||||
|
echo "Checking: $potential_exec"
|
||||||
|
if [ -f "$potential_exec" ] && [ -x "$potential_exec" ]; then
|
||||||
|
APP_PATH="$potential_exec"
|
||||||
|
echo "Found executable: $APP_PATH"
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# If still not found, get any executable file
|
||||||
|
if [ -z "$APP_PATH" ]; then
|
||||||
|
echo "No predefined executable found, searching for any executable..."
|
||||||
|
APP_PATH=$(find "/Applications/$APP_NAME/Contents/MacOS/" -type f -perm +111 | head -1)
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$APP_PATH" ]; then
|
||||||
|
echo "[FAILED] No executable found in MacOS folder"
|
||||||
|
ls -la "/Applications/$APP_NAME/Contents/MacOS/"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
PROCESS_NAME=$(basename "$APP_PATH")
|
||||||
|
|
||||||
|
echo "App installed at: /Applications/$APP_NAME"
|
||||||
|
echo "Executable path: $APP_PATH"
|
||||||
|
echo "Process name: $PROCESS_NAME"
|
||||||
|
|
||||||
|
# Export for test step
|
||||||
|
echo "JAN_APP_PATH=$APP_PATH" >> $GITHUB_ENV
|
||||||
|
echo "PROCESS_NAME=$PROCESS_NAME" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
echo "[INFO] Waiting for Jan app first initialization (120 seconds)..."
|
||||||
|
echo "This allows Jan to complete its initial setup and configuration"
|
||||||
|
sleep 120
|
||||||
|
echo "[SUCCESS] Initialization wait completed"
|
||||||
|
|
||||||
|
# Verify installation
|
||||||
|
if [ -f "$APP_PATH" ]; then
|
||||||
|
echo "[SUCCESS] Jan app installed successfully"
|
||||||
|
ls -la "/Applications/$APP_NAME"
|
||||||
|
else
|
||||||
|
echo "[FAILED] Jan app installation failed - executable not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
38
autoqa/scripts/macos_post_cleanup.sh
Normal file
38
autoqa/scripts/macos_post_cleanup.sh
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# macOS post-test cleanup script
|
||||||
|
|
||||||
|
echo "Cleaning up after tests..."
|
||||||
|
|
||||||
|
# Kill any running Jan processes (both regular and nightly)
|
||||||
|
pkill -f "Jan" || true
|
||||||
|
pkill -f "jan" || true
|
||||||
|
pkill -f "Jan-nightly" || true
|
||||||
|
pkill -f "jan-nightly" || true
|
||||||
|
|
||||||
|
# Remove Jan app directories
|
||||||
|
rm -rf /Applications/Jan.app
|
||||||
|
rm -rf /Applications/Jan-nightly.app
|
||||||
|
rm -rf ~/Applications/Jan.app
|
||||||
|
rm -rf ~/Applications/Jan-nightly.app
|
||||||
|
|
||||||
|
# Remove Jan data folders (both regular and nightly)
|
||||||
|
rm -rf ~/Library/Application\ Support/Jan
|
||||||
|
rm -rf ~/Library/Application\ Support/Jan-nightly
|
||||||
|
rm -rf ~/Library/Application\ Support/jan.ai.app
|
||||||
|
rm -rf ~/Library/Application\ Support/jan-nightly.ai.app
|
||||||
|
rm -rf ~/Library/Preferences/jan.*
|
||||||
|
rm -rf ~/Library/Preferences/jan-nightly.*
|
||||||
|
rm -rf ~/Library/Caches/jan.*
|
||||||
|
rm -rf ~/Library/Caches/jan-nightly.*
|
||||||
|
rm -rf ~/Library/Caches/jan.ai.app
|
||||||
|
rm -rf ~/Library/Caches/jan-nightly.ai.app
|
||||||
|
rm -rf ~/Library/WebKit/jan.ai.app
|
||||||
|
rm -rf ~/Library/WebKit/jan-nightly.ai.app
|
||||||
|
rm -rf ~/Library/Saved\ Application\ State/jan.ai.app
|
||||||
|
rm -rf ~/Library/Saved\ Application\ State/jan-nightly.ai.app
|
||||||
|
|
||||||
|
# Clean up downloaded installer
|
||||||
|
rm -f "/tmp/jan-installer.dmg"
|
||||||
|
rm -rf "/tmp/jan-mount"
|
||||||
|
|
||||||
|
echo "Cleanup completed"
|
||||||
31
autoqa/scripts/run_tests.ps1
Normal file
31
autoqa/scripts/run_tests.ps1
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
#!/usr/bin/env pwsh
|
||||||
|
# Windows test runner script
|
||||||
|
|
||||||
|
param(
|
||||||
|
[string]$JanAppPath,
|
||||||
|
[string]$ProcessName,
|
||||||
|
[string]$RpToken
|
||||||
|
)
|
||||||
|
|
||||||
|
Write-Host "Starting Auto QA Tests..."
|
||||||
|
|
||||||
|
Write-Host "Jan app path: $JanAppPath"
|
||||||
|
Write-Host "Process name: $ProcessName"
|
||||||
|
Write-Host "Current working directory: $(Get-Location)"
|
||||||
|
Write-Host "Contents of current directory:"
|
||||||
|
Get-ChildItem
|
||||||
|
Write-Host "Contents of trajectories directory (if exists):"
|
||||||
|
if (Test-Path "trajectories") {
|
||||||
|
Get-ChildItem "trajectories"
|
||||||
|
} else {
|
||||||
|
Write-Host "trajectories directory not found"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Run the main test with proper arguments
|
||||||
|
if ($JanAppPath -and $ProcessName) {
|
||||||
|
python main.py --enable-reportportal --rp-token "$RpToken" --jan-app-path "$JanAppPath" --jan-process-name "$ProcessName"
|
||||||
|
} elseif ($JanAppPath) {
|
||||||
|
python main.py --enable-reportportal --rp-token "$RpToken" --jan-app-path "$JanAppPath"
|
||||||
|
} else {
|
||||||
|
python main.py --enable-reportportal --rp-token "$RpToken"
|
||||||
|
}
|
||||||
69
autoqa/scripts/run_tests.sh
Normal file
69
autoqa/scripts/run_tests.sh
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Common test runner script
|
||||||
|
|
||||||
|
JAN_APP_PATH="$1"
|
||||||
|
PROCESS_NAME="$2"
|
||||||
|
RP_TOKEN="$3"
|
||||||
|
PLATFORM="$4"
|
||||||
|
|
||||||
|
echo "Starting Auto QA Tests..."
|
||||||
|
echo "Platform: $PLATFORM"
|
||||||
|
echo "Jan app path: $JAN_APP_PATH"
|
||||||
|
echo "Process name: $PROCESS_NAME"
|
||||||
|
|
||||||
|
# Platform-specific setup
|
||||||
|
if [ "$PLATFORM" = "ubuntu" ]; then
|
||||||
|
# Get the current display session
|
||||||
|
export DISPLAY=$(w -h | awk 'NR==1 {print $2}')
|
||||||
|
echo "Display ID: $DISPLAY"
|
||||||
|
|
||||||
|
# Verify display is working
|
||||||
|
if [ -z "$DISPLAY" ]; then
|
||||||
|
echo "No display session found, falling back to :0"
|
||||||
|
export DISPLAY=:0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Using display: $DISPLAY"
|
||||||
|
|
||||||
|
# Test display connection
|
||||||
|
xdpyinfo -display $DISPLAY >/dev/null 2>&1 || {
|
||||||
|
echo "Display $DISPLAY is not available"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Make Jan executable if needed
|
||||||
|
if [ -f "/usr/bin/Jan-nightly" ]; then
|
||||||
|
sudo chmod +x /usr/bin/Jan-nightly
|
||||||
|
fi
|
||||||
|
if [ -f "/usr/bin/Jan" ]; then
|
||||||
|
sudo chmod +x /usr/bin/Jan
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# macOS specific setup
|
||||||
|
if [ "$PLATFORM" = "macos" ]; then
|
||||||
|
# Verify Jan app path
|
||||||
|
if [ ! -f "$JAN_APP_PATH" ]; then
|
||||||
|
echo "❌ Jan app not found at: $JAN_APP_PATH"
|
||||||
|
echo "Available files in /Applications:"
|
||||||
|
ls -la /Applications/ | grep -i jan || echo "No Jan apps found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Change to autoqa directory to ensure correct working directory
|
||||||
|
cd "$(dirname "$0")/.."
|
||||||
|
echo "Current working directory: $(pwd)"
|
||||||
|
echo "Contents of current directory:"
|
||||||
|
ls -la
|
||||||
|
echo "Contents of trajectories directory (if exists):"
|
||||||
|
ls -la trajectories/ 2>/dev/null || echo "trajectories directory not found"
|
||||||
|
|
||||||
|
# Run the main test with proper arguments
|
||||||
|
if [ -n "$JAN_APP_PATH" ] && [ -n "$PROCESS_NAME" ]; then
|
||||||
|
python main.py --enable-reportportal --rp-token "$RP_TOKEN" --jan-app-path "$JAN_APP_PATH" --jan-process-name "$PROCESS_NAME"
|
||||||
|
elif [ -n "$JAN_APP_PATH" ]; then
|
||||||
|
python main.py --enable-reportportal --rp-token "$RP_TOKEN" --jan-app-path "$JAN_APP_PATH"
|
||||||
|
else
|
||||||
|
python main.py --enable-reportportal --rp-token "$RP_TOKEN"
|
||||||
|
fi
|
||||||
80
autoqa/scripts/setup-android-env.sh
Executable file
80
autoqa/scripts/setup-android-env.sh
Executable file
@ -0,0 +1,80 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Android Development Environment Setup for Jan
|
||||||
|
|
||||||
|
# Ensure rustup's Rust toolchain is used instead of Homebrew's
|
||||||
|
export PATH="$HOME/.cargo/bin:$PATH"
|
||||||
|
|
||||||
|
# Set JAVA_HOME for Android builds
|
||||||
|
export JAVA_HOME=/opt/homebrew/opt/openjdk@17/libexec/openjdk.jdk/Contents/Home
|
||||||
|
export PATH="/opt/homebrew/opt/openjdk@17/bin:$PATH"
|
||||||
|
|
||||||
|
export ANDROID_HOME="$HOME/Library/Android/sdk"
|
||||||
|
export ANDROID_NDK_ROOT="$HOME/Library/Android/sdk/ndk/29.0.14033849"
|
||||||
|
export NDK_HOME="$HOME/Library/Android/sdk/ndk/29.0.14033849"
|
||||||
|
|
||||||
|
# Add Android tools to PATH
|
||||||
|
export PATH=$PATH:$ANDROID_HOME/platform-tools:$ANDROID_HOME/tools:$ANDROID_HOME/cmdline-tools/latest/bin:$ANDROID_HOME/emulator:$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin
|
||||||
|
|
||||||
|
# Set up CC and CXX for Android compilation
|
||||||
|
export CC_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang"
|
||||||
|
export CXX_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang++"
|
||||||
|
export AR_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ar"
|
||||||
|
export RANLIB_aarch64_linux_android="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ranlib"
|
||||||
|
|
||||||
|
# Additional environment variables for Rust cross-compilation
|
||||||
|
export CARGO_TARGET_AARCH64_LINUX_ANDROID_LINKER="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang"
|
||||||
|
|
||||||
|
# Only set global CC and AR for Android builds (when IS_ANDROID is set)
|
||||||
|
if [ "$IS_ANDROID" = "true" ]; then
|
||||||
|
export CC="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang"
|
||||||
|
export AR="$NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ar"
|
||||||
|
echo "Global CC and AR set for Android build"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create symlinks for Android tools if they don't exist
|
||||||
|
mkdir -p ~/.local/bin
|
||||||
|
if [ ! -f ~/.local/bin/aarch64-linux-android-ranlib ]; then
|
||||||
|
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ranlib ~/.local/bin/aarch64-linux-android-ranlib
|
||||||
|
fi
|
||||||
|
if [ ! -f ~/.local/bin/aarch64-linux-android-clang ]; then
|
||||||
|
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang ~/.local/bin/aarch64-linux-android-clang
|
||||||
|
fi
|
||||||
|
if [ ! -f ~/.local/bin/aarch64-linux-android-clang++ ]; then
|
||||||
|
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/aarch64-linux-android21-clang++ ~/.local/bin/aarch64-linux-android-clang++
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Fix the broken clang symlinks by ensuring base clang is available
|
||||||
|
if [ ! -f ~/.local/bin/clang ]; then
|
||||||
|
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/clang ~/.local/bin/clang
|
||||||
|
fi
|
||||||
|
if [ ! -f ~/.local/bin/clang++ ]; then
|
||||||
|
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/clang++ ~/.local/bin/clang++
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create symlinks for target-specific ar tools
|
||||||
|
if [ ! -f ~/.local/bin/aarch64-linux-android-ar ]; then
|
||||||
|
ln -sf $NDK_HOME/toolchains/llvm/prebuilt/darwin-x86_64/bin/llvm-ar ~/.local/bin/aarch64-linux-android-ar
|
||||||
|
fi
|
||||||
|
export PATH="$HOME/.local/bin:$PATH"
|
||||||
|
|
||||||
|
echo "Android environment configured:"
|
||||||
|
echo "ANDROID_HOME: $ANDROID_HOME"
|
||||||
|
echo "ANDROID_NDK_ROOT: $ANDROID_NDK_ROOT"
|
||||||
|
echo "PATH includes NDK toolchain: $(echo $PATH | grep -o "ndk.*bin" || echo "NOT FOUND")"
|
||||||
|
|
||||||
|
# Verify required tools
|
||||||
|
echo -e "\nChecking required tools:"
|
||||||
|
which adb && echo "✅ adb found" || echo "❌ adb not found"
|
||||||
|
which emulator && echo "✅ emulator found" || echo "❌ emulator not found"
|
||||||
|
which $CC_aarch64_linux_android && echo "✅ Android clang found" || echo "❌ Android clang not found"
|
||||||
|
|
||||||
|
# Show available AVDs
|
||||||
|
echo -e "\nAvailable Android Virtual Devices:"
|
||||||
|
emulator -list-avds 2>/dev/null || echo "No AVDs found"
|
||||||
|
|
||||||
|
# Execute the provided command
|
||||||
|
if [ "$1" ]; then
|
||||||
|
echo -e "\nExecuting: $@"
|
||||||
|
exec "$@"
|
||||||
|
fi
|
||||||
15
autoqa/scripts/setup_permissions.sh
Normal file
15
autoqa/scripts/setup_permissions.sh
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Setup script permissions for AutoQA scripts
|
||||||
|
|
||||||
|
echo "Setting up permissions for AutoQA scripts..."
|
||||||
|
|
||||||
|
# Get the directory where this script is located
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
|
||||||
|
# Make all shell scripts executable
|
||||||
|
chmod +x "$SCRIPT_DIR"/*.sh
|
||||||
|
|
||||||
|
echo "[SUCCESS] All shell scripts are now executable:"
|
||||||
|
ls -la "$SCRIPT_DIR"/*.sh
|
||||||
|
|
||||||
|
echo "[SUCCESS] Permission setup completed"
|
||||||
22
autoqa/scripts/ubuntu_cleanup.sh
Normal file
22
autoqa/scripts/ubuntu_cleanup.sh
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Ubuntu cleanup script for Jan app
|
||||||
|
|
||||||
|
echo "Cleaning existing Jan installations..."
|
||||||
|
|
||||||
|
# Remove Jan data folders (both regular and nightly)
|
||||||
|
rm -rf ~/.config/Jan
|
||||||
|
rm -rf ~/.config/Jan-nightly
|
||||||
|
rm -rf ~/.local/share/Jan
|
||||||
|
rm -rf ~/.local/share/Jan-nightly
|
||||||
|
rm -rf ~/.cache/jan
|
||||||
|
rm -rf ~/.cache/jan-nightly
|
||||||
|
rm -rf ~/.local/share/jan-nightly.ai.app
|
||||||
|
rm -rf ~/.local/share/jan.ai.app
|
||||||
|
|
||||||
|
# Kill any running Jan processes (both regular and nightly)
|
||||||
|
pkill -f "Jan" || true
|
||||||
|
pkill -f "jan" || true
|
||||||
|
pkill -f "Jan-nightly" || true
|
||||||
|
pkill -f "jan-nightly" || true
|
||||||
|
|
||||||
|
echo "Jan cleanup completed"
|
||||||
57
autoqa/scripts/ubuntu_download.sh
Normal file
57
autoqa/scripts/ubuntu_download.sh
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Ubuntu download script for Jan app
|
||||||
|
|
||||||
|
WORKFLOW_INPUT_URL="$1"
|
||||||
|
WORKFLOW_INPUT_IS_NIGHTLY="$2"
|
||||||
|
REPO_VARIABLE_URL="$3"
|
||||||
|
REPO_VARIABLE_IS_NIGHTLY="$4"
|
||||||
|
DEFAULT_URL="$5"
|
||||||
|
DEFAULT_IS_NIGHTLY="$6"
|
||||||
|
|
||||||
|
# Determine Jan app URL and nightly flag from multiple sources (priority order):
|
||||||
|
# 1. Workflow dispatch input (manual trigger)
|
||||||
|
# 2. Repository variable JAN_APP_URL_LINUX
|
||||||
|
# 3. Default URL from env
|
||||||
|
|
||||||
|
JAN_APP_URL=""
|
||||||
|
IS_NIGHTLY=false
|
||||||
|
|
||||||
|
if [ -n "$WORKFLOW_INPUT_URL" ]; then
|
||||||
|
JAN_APP_URL="$WORKFLOW_INPUT_URL"
|
||||||
|
IS_NIGHTLY="$WORKFLOW_INPUT_IS_NIGHTLY"
|
||||||
|
echo "Using Jan app URL from workflow input: $JAN_APP_URL"
|
||||||
|
echo "Is nightly build: $IS_NIGHTLY"
|
||||||
|
elif [ -n "$REPO_VARIABLE_URL" ]; then
|
||||||
|
JAN_APP_URL="$REPO_VARIABLE_URL"
|
||||||
|
IS_NIGHTLY="$REPO_VARIABLE_IS_NIGHTLY"
|
||||||
|
echo "Using Jan app URL from repository variable: $JAN_APP_URL"
|
||||||
|
echo "Is nightly build: $IS_NIGHTLY"
|
||||||
|
else
|
||||||
|
JAN_APP_URL="$DEFAULT_URL"
|
||||||
|
IS_NIGHTLY="$DEFAULT_IS_NIGHTLY"
|
||||||
|
echo "Using default Jan app URL: $JAN_APP_URL"
|
||||||
|
echo "Is nightly build: $IS_NIGHTLY"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Set environment variables for later steps
|
||||||
|
echo "JAN_APP_URL=$JAN_APP_URL" >> $GITHUB_ENV
|
||||||
|
echo "IS_NIGHTLY=$IS_NIGHTLY" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
echo "Downloading Jan app from: $JAN_APP_URL"
|
||||||
|
|
||||||
|
DOWNLOAD_PATH="/tmp/jan-installer.deb"
|
||||||
|
|
||||||
|
# Download the package
|
||||||
|
if ! wget "$JAN_APP_URL" -O "$DOWNLOAD_PATH"; then
|
||||||
|
echo "Failed to download Jan app"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -f "$DOWNLOAD_PATH" ]; then
|
||||||
|
FILE_SIZE=$(stat -c%s "$DOWNLOAD_PATH")
|
||||||
|
echo "Downloaded Jan app successfully. Size: $FILE_SIZE bytes"
|
||||||
|
echo "File saved to: $DOWNLOAD_PATH"
|
||||||
|
else
|
||||||
|
echo "Downloaded file not found"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
39
autoqa/scripts/ubuntu_install.sh
Normal file
39
autoqa/scripts/ubuntu_install.sh
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Ubuntu install script for Jan app
|
||||||
|
|
||||||
|
IS_NIGHTLY="$1"
|
||||||
|
|
||||||
|
INSTALLER_PATH="/tmp/jan-installer.deb"
|
||||||
|
|
||||||
|
echo "Installing Jan app..."
|
||||||
|
echo "Is nightly build: $IS_NIGHTLY"
|
||||||
|
|
||||||
|
# Install the .deb package
|
||||||
|
sudo apt install "$INSTALLER_PATH" -y
|
||||||
|
sudo apt-get install -f -y
|
||||||
|
|
||||||
|
# Wait for installation to complete
|
||||||
|
sleep 10
|
||||||
|
|
||||||
|
echo "[INFO] Waiting for Jan app first initialization (120 seconds)..."
|
||||||
|
echo "This allows Jan to complete its initial setup and configuration"
|
||||||
|
sleep 120
|
||||||
|
echo "[SUCCESS] Initialization wait completed"
|
||||||
|
|
||||||
|
# Verify installation based on nightly flag
|
||||||
|
if [ "$IS_NIGHTLY" = "true" ]; then
|
||||||
|
DEFAULT_JAN_PATH="/usr/bin/Jan-nightly"
|
||||||
|
PROCESS_NAME="Jan-nightly"
|
||||||
|
else
|
||||||
|
DEFAULT_JAN_PATH="/usr/bin/Jan"
|
||||||
|
PROCESS_NAME="Jan"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -f "$DEFAULT_JAN_PATH" ]; then
|
||||||
|
echo "Jan app installed successfully at: $DEFAULT_JAN_PATH"
|
||||||
|
echo "JAN_APP_PATH=$DEFAULT_JAN_PATH" >> $GITHUB_ENV
|
||||||
|
echo "JAN_PROCESS_NAME=$PROCESS_NAME" >> $GITHUB_ENV
|
||||||
|
else
|
||||||
|
echo "Jan app not found at expected location: $DEFAULT_JAN_PATH"
|
||||||
|
echo "Will auto-detect during test run"
|
||||||
|
fi
|
||||||
44
autoqa/scripts/ubuntu_post_cleanup.sh
Normal file
44
autoqa/scripts/ubuntu_post_cleanup.sh
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Ubuntu post-test cleanup script
|
||||||
|
|
||||||
|
IS_NIGHTLY="$1"
|
||||||
|
|
||||||
|
echo "Cleaning up after tests..."
|
||||||
|
|
||||||
|
# Kill any running Jan processes (both regular and nightly)
|
||||||
|
pkill -f "Jan" || true
|
||||||
|
pkill -f "jan" || true
|
||||||
|
pkill -f "Jan-nightly" || true
|
||||||
|
pkill -f "jan-nightly" || true
|
||||||
|
|
||||||
|
# Remove Jan data folders (both regular and nightly)
|
||||||
|
rm -rf ~/.config/Jan
|
||||||
|
rm -rf ~/.config/Jan-nightly
|
||||||
|
rm -rf ~/.local/share/Jan
|
||||||
|
rm -rf ~/.local/share/Jan-nightly
|
||||||
|
rm -rf ~/.cache/jan
|
||||||
|
rm -rf ~/.cache/jan-nightly
|
||||||
|
rm -rf ~/.local/share/jan-nightly.ai.app
|
||||||
|
rm -rf ~/.local/share/jan.ai.app
|
||||||
|
|
||||||
|
# Try to uninstall Jan app
|
||||||
|
if [ "$IS_NIGHTLY" = "true" ]; then
|
||||||
|
PACKAGE_NAME="jan-nightly"
|
||||||
|
else
|
||||||
|
PACKAGE_NAME="jan"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Attempting to uninstall package: $PACKAGE_NAME"
|
||||||
|
|
||||||
|
if dpkg -l | grep -q "$PACKAGE_NAME"; then
|
||||||
|
echo "Found package $PACKAGE_NAME, uninstalling..."
|
||||||
|
sudo dpkg -r "$PACKAGE_NAME" || true
|
||||||
|
sudo apt-get autoremove -y || true
|
||||||
|
else
|
||||||
|
echo "Package $PACKAGE_NAME not found in dpkg list"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean up downloaded installer
|
||||||
|
rm -f "/tmp/jan-installer.deb"
|
||||||
|
|
||||||
|
echo "Cleanup completed"
|
||||||
50
autoqa/scripts/windows_cleanup.ps1
Normal file
50
autoqa/scripts/windows_cleanup.ps1
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
#!/usr/bin/env pwsh
|
||||||
|
# Windows cleanup script for Jan app
|
||||||
|
|
||||||
|
param(
|
||||||
|
[string]$IsNightly = "false"
|
||||||
|
)
|
||||||
|
|
||||||
|
Write-Host "Cleaning existing Jan installations..."
|
||||||
|
|
||||||
|
# Remove Jan data folders (both regular and nightly)
|
||||||
|
$janAppData = "$env:APPDATA\Jan"
|
||||||
|
$janNightlyAppData = "$env:APPDATA\Jan-nightly"
|
||||||
|
$janLocalAppData = "$env:LOCALAPPDATA\jan.ai.app"
|
||||||
|
$janNightlyLocalAppData = "$env:LOCALAPPDATA\jan-nightly.ai.app"
|
||||||
|
|
||||||
|
if (Test-Path $janAppData) {
|
||||||
|
Write-Host "Removing $janAppData"
|
||||||
|
Remove-Item -Path $janAppData -Recurse -Force -ErrorAction SilentlyContinue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Test-Path $janNightlyAppData) {
|
||||||
|
Write-Host "Removing $janNightlyAppData"
|
||||||
|
Remove-Item -Path $janNightlyAppData -Recurse -Force -ErrorAction SilentlyContinue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Test-Path $janLocalAppData) {
|
||||||
|
Write-Host "Removing $janLocalAppData"
|
||||||
|
Remove-Item -Path $janLocalAppData -Recurse -Force -ErrorAction SilentlyContinue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Test-Path $janNightlyLocalAppData) {
|
||||||
|
Write-Host "Removing $janNightlyLocalAppData"
|
||||||
|
Remove-Item -Path $janNightlyLocalAppData -Recurse -Force -ErrorAction SilentlyContinue
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Kill any running Jan processes (both regular and nightly)
|
||||||
|
Get-Process -Name "Jan" -ErrorAction SilentlyContinue | Stop-Process -Force -ErrorAction SilentlyContinue
|
||||||
|
Get-Process -Name "jan" -ErrorAction SilentlyContinue | Stop-Process -Force -ErrorAction SilentlyContinue
|
||||||
|
Get-Process -Name "Jan-nightly" -ErrorAction SilentlyContinue | Stop-Process -Force -ErrorAction SilentlyContinue
|
||||||
|
Get-Process -Name "jan-nightly" -ErrorAction SilentlyContinue | Stop-Process -Force -ErrorAction SilentlyContinue
|
||||||
|
|
||||||
|
# Remove Jan extensions folder
|
||||||
|
$janExtensionsPath = "$env:USERPROFILE\jan\extensions"
|
||||||
|
if (Test-Path $janExtensionsPath) {
|
||||||
|
Write-Host "Removing $janExtensionsPath"
|
||||||
|
Remove-Item -Path $janExtensionsPath -Recurse -Force -ErrorAction SilentlyContinue
|
||||||
|
}
|
||||||
|
|
||||||
|
Write-Host "Jan cleanup completed"
|
||||||
63
autoqa/scripts/windows_download.ps1
Normal file
63
autoqa/scripts/windows_download.ps1
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
#!/usr/bin/env pwsh
|
||||||
|
# Windows download script for Jan app
|
||||||
|
|
||||||
|
param(
|
||||||
|
[string]$WorkflowInputUrl = "",
|
||||||
|
[string]$WorkflowInputIsNightly = "",
|
||||||
|
[string]$RepoVariableUrl = "",
|
||||||
|
[string]$RepoVariableIsNightly = "",
|
||||||
|
[string]$DefaultUrl = "",
|
||||||
|
[string]$DefaultIsNightly = ""
|
||||||
|
)
|
||||||
|
|
||||||
|
# Determine Jan app URL and nightly flag from multiple sources (priority order):
|
||||||
|
# 1. Workflow dispatch input (manual trigger)
|
||||||
|
# 2. Repository variable JAN_APP_URL
|
||||||
|
# 3. Default URL from env
|
||||||
|
|
||||||
|
$janAppUrl = ""
|
||||||
|
$isNightly = $false
|
||||||
|
|
||||||
|
if ($WorkflowInputUrl -ne "") {
|
||||||
|
$janAppUrl = $WorkflowInputUrl
|
||||||
|
$isNightly = [System.Convert]::ToBoolean($WorkflowInputIsNightly)
|
||||||
|
Write-Host "Using Jan app URL from workflow input: $janAppUrl"
|
||||||
|
Write-Host "Is nightly build: $isNightly"
|
||||||
|
}
|
||||||
|
elseif ($RepoVariableUrl -ne "") {
|
||||||
|
$janAppUrl = $RepoVariableUrl
|
||||||
|
$isNightly = [System.Convert]::ToBoolean($RepoVariableIsNightly)
|
||||||
|
Write-Host "Using Jan app URL from repository variable: $janAppUrl"
|
||||||
|
Write-Host "Is nightly build: $isNightly"
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
$janAppUrl = $DefaultUrl
|
||||||
|
$isNightly = [System.Convert]::ToBoolean($DefaultIsNightly)
|
||||||
|
Write-Host "Using default Jan app URL: $janAppUrl"
|
||||||
|
Write-Host "Is nightly build: $isNightly"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Set environment variables for later steps
|
||||||
|
Write-Output "JAN_APP_URL=$janAppUrl" >> $env:GITHUB_ENV
|
||||||
|
Write-Output "IS_NIGHTLY=$isNightly" >> $env:GITHUB_ENV
|
||||||
|
|
||||||
|
Write-Host "Downloading Jan app from: $janAppUrl"
|
||||||
|
|
||||||
|
$downloadPath = "$env:TEMP\jan-installer.exe"
|
||||||
|
|
||||||
|
try {
|
||||||
|
# Use wget for better performance
|
||||||
|
wget.exe "$janAppUrl" -O "$downloadPath"
|
||||||
|
|
||||||
|
if (Test-Path $downloadPath) {
|
||||||
|
$fileSize = (Get-Item $downloadPath).Length
|
||||||
|
Write-Host "Downloaded Jan app successfully. Size: $fileSize bytes"
|
||||||
|
Write-Host "File saved to: $downloadPath"
|
||||||
|
} else {
|
||||||
|
throw "Downloaded file not found"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
Write-Error "Failed to download Jan app: $_"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
48
autoqa/scripts/windows_install.ps1
Normal file
48
autoqa/scripts/windows_install.ps1
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
#!/usr/bin/env pwsh
|
||||||
|
# Windows install script for Jan app
|
||||||
|
|
||||||
|
param(
|
||||||
|
[string]$IsNightly = "false"
|
||||||
|
)
|
||||||
|
|
||||||
|
$installerPath = "$env:TEMP\jan-installer.exe"
|
||||||
|
$isNightly = [System.Convert]::ToBoolean($IsNightly)
|
||||||
|
|
||||||
|
Write-Host "Installing Jan app..."
|
||||||
|
Write-Host "Is nightly build: $isNightly"
|
||||||
|
|
||||||
|
# Try silent installation first
|
||||||
|
try {
|
||||||
|
Start-Process -FilePath $installerPath -ArgumentList "/S" -Wait -NoNewWindow
|
||||||
|
Write-Host "Jan app installed silently"
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
Write-Host "Silent installation failed, trying normal installation..."
|
||||||
|
Start-Process -FilePath $installerPath -Wait -NoNewWindow
|
||||||
|
}
|
||||||
|
|
||||||
|
# Wait a bit for installation to complete
|
||||||
|
Start-Sleep -Seconds 10
|
||||||
|
|
||||||
|
Write-Host "[INFO] Waiting for Jan app first initialization (120 seconds)..."
|
||||||
|
Write-Host "This allows Jan to complete its initial setup and configuration"
|
||||||
|
Start-Sleep -Seconds 120
|
||||||
|
Write-Host "[SUCCESS] Initialization wait completed"
|
||||||
|
|
||||||
|
# Verify installation based on nightly flag
|
||||||
|
if ($isNightly) {
|
||||||
|
$defaultJanPath = "$env:LOCALAPPDATA\Programs\jan-nightly\Jan-nightly.exe"
|
||||||
|
$processName = "Jan-nightly.exe"
|
||||||
|
} else {
|
||||||
|
$defaultJanPath = "$env:LOCALAPPDATA\Programs\jan\Jan.exe"
|
||||||
|
$processName = "Jan.exe"
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Test-Path $defaultJanPath) {
|
||||||
|
Write-Host "Jan app installed successfully at: $defaultJanPath"
|
||||||
|
Write-Output "JAN_APP_PATH=$defaultJanPath" >> $env:GITHUB_ENV
|
||||||
|
Write-Output "JAN_PROCESS_NAME=$processName" >> $env:GITHUB_ENV
|
||||||
|
} else {
|
||||||
|
Write-Warning "Jan app not found at expected location: $defaultJanPath"
|
||||||
|
Write-Host "Will auto-detect during test run"
|
||||||
|
}
|
||||||
102
autoqa/scripts/windows_post_cleanup.ps1
Normal file
102
autoqa/scripts/windows_post_cleanup.ps1
Normal file
@ -0,0 +1,102 @@
|
|||||||
|
#!/usr/bin/env pwsh
|
||||||
|
# Windows post-test cleanup script
|
||||||
|
|
||||||
|
param(
|
||||||
|
[string]$IsNightly = "false"
|
||||||
|
)
|
||||||
|
|
||||||
|
Write-Host "Cleaning up after tests..."
|
||||||
|
|
||||||
|
# Kill any running Jan processes (both regular and nightly)
|
||||||
|
Get-Process -Name "Jan" -ErrorAction SilentlyContinue | Stop-Process -Force -ErrorAction SilentlyContinue
|
||||||
|
Get-Process -Name "jan" -ErrorAction SilentlyContinue | Stop-Process -Force -ErrorAction SilentlyContinue
|
||||||
|
Get-Process -Name "Jan-nightly" -ErrorAction SilentlyContinue | Stop-Process -Force -ErrorAction SilentlyContinue
|
||||||
|
Get-Process -Name "jan-nightly" -ErrorAction SilentlyContinue | Stop-Process -Force -ErrorAction SilentlyContinue
|
||||||
|
|
||||||
|
# Remove Jan data folders (both regular and nightly)
|
||||||
|
$janAppData = "$env:APPDATA\Jan"
|
||||||
|
$janNightlyAppData = "$env:APPDATA\Jan-nightly"
|
||||||
|
$janLocalAppData = "$env:LOCALAPPDATA\jan.ai.app"
|
||||||
|
$janNightlyLocalAppData = "$env:LOCALAPPDATA\jan-nightly.ai.app"
|
||||||
|
$janProgramsPath = "$env:LOCALAPPDATA\Programs\Jan"
|
||||||
|
$janNightlyProgramsPath = "$env:LOCALAPPDATA\Programs\Jan-nightly"
|
||||||
|
|
||||||
|
if (Test-Path $janAppData) {
|
||||||
|
Write-Host "Removing $janAppData"
|
||||||
|
Remove-Item -Path $janAppData -Recurse -Force -ErrorAction SilentlyContinue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Test-Path $janNightlyAppData) {
|
||||||
|
Write-Host "Removing $janNightlyAppData"
|
||||||
|
Remove-Item -Path $janNightlyAppData -Recurse -Force -ErrorAction SilentlyContinue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Test-Path $janLocalAppData) {
|
||||||
|
Write-Host "Removing $janLocalAppData"
|
||||||
|
Remove-Item -Path $janLocalAppData -Recurse -Force -ErrorAction SilentlyContinue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Test-Path $janNightlyLocalAppData) {
|
||||||
|
Write-Host "Removing $janNightlyLocalAppData"
|
||||||
|
Remove-Item -Path $janNightlyLocalAppData -Recurse -Force -ErrorAction SilentlyContinue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Test-Path $janProgramsPath) {
|
||||||
|
Write-Host "Removing $janProgramsPath"
|
||||||
|
Remove-Item -Path $janProgramsPath -Recurse -Force -ErrorAction SilentlyContinue
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Test-Path $janNightlyProgramsPath) {
|
||||||
|
Write-Host "Removing $janNightlyProgramsPath"
|
||||||
|
Remove-Item -Path $janNightlyProgramsPath -Recurse -Force -ErrorAction SilentlyContinue
|
||||||
|
}
|
||||||
|
|
||||||
|
# Remove Jan extensions folder
|
||||||
|
$janExtensionsPath = "$env:USERPROFILE\jan\extensions"
|
||||||
|
if (Test-Path $janExtensionsPath) {
|
||||||
|
Write-Host "Removing $janExtensionsPath"
|
||||||
|
Remove-Item -Path $janExtensionsPath -Recurse -Force -ErrorAction SilentlyContinue
|
||||||
|
}
|
||||||
|
|
||||||
|
# Try to uninstall Jan app silently
|
||||||
|
try {
|
||||||
|
$isNightly = [System.Convert]::ToBoolean($IsNightly)
|
||||||
|
|
||||||
|
# Determine uninstaller path based on nightly flag
|
||||||
|
if ($isNightly) {
|
||||||
|
$uninstallerPath = "$env:LOCALAPPDATA\Programs\jan-nightly\uninstall.exe"
|
||||||
|
$installPath = "$env:LOCALAPPDATA\Programs\jan-nightly"
|
||||||
|
} else {
|
||||||
|
$uninstallerPath = "$env:LOCALAPPDATA\Programs\jan\uninstall.exe"
|
||||||
|
$installPath = "$env:LOCALAPPDATA\Programs\jan"
|
||||||
|
}
|
||||||
|
|
||||||
|
Write-Host "Looking for uninstaller at: $uninstallerPath"
|
||||||
|
|
||||||
|
if (Test-Path $uninstallerPath) {
|
||||||
|
Write-Host "Found uninstaller, attempting silent uninstall..."
|
||||||
|
Start-Process -FilePath $uninstallerPath -ArgumentList "/S" -Wait -NoNewWindow -ErrorAction SilentlyContinue
|
||||||
|
Write-Host "Uninstall completed"
|
||||||
|
} else {
|
||||||
|
Write-Host "No uninstaller found, attempting manual cleanup..."
|
||||||
|
|
||||||
|
if (Test-Path $installPath) {
|
||||||
|
Write-Host "Removing installation directory: $installPath"
|
||||||
|
Remove-Item -Path $installPath -Recurse -Force -ErrorAction SilentlyContinue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Write-Host "Jan app cleanup completed"
|
||||||
|
}
|
||||||
|
catch {
|
||||||
|
Write-Warning "Failed to uninstall Jan app cleanly: $_"
|
||||||
|
Write-Host "Manual cleanup may be required"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Clean up downloaded installer
|
||||||
|
$installerPath = "$env:TEMP\jan-installer.exe"
|
||||||
|
if (Test-Path $installerPath) {
|
||||||
|
Remove-Item -Path $installerPath -Force -ErrorAction SilentlyContinue
|
||||||
|
}
|
||||||
|
|
||||||
|
Write-Host "Cleanup completed"
|
||||||
322
autoqa/test_runner.py
Normal file
322
autoqa/test_runner.py
Normal file
@ -0,0 +1,322 @@
|
|||||||
|
import os
|
||||||
|
import asyncio
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
from pathlib import Path
|
||||||
|
# from computer import Computer
|
||||||
|
from agent import ComputerAgent, LLM
|
||||||
|
|
||||||
|
from utils import is_jan_running, force_close_jan, start_jan_app, get_latest_trajectory_folder
|
||||||
|
from screen_recorder import ScreenRecorder
|
||||||
|
from reportportal_handler import upload_test_results_to_rp
|
||||||
|
from reportportal_client.helpers import timestamp
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
async def run_single_test_with_timeout(computer, test_data, rp_client, launch_id, max_turns=30,
|
||||||
|
jan_app_path=None, jan_process_name="Jan.exe", agent_config=None,
|
||||||
|
enable_reportportal=False):
|
||||||
|
"""
|
||||||
|
Run a single test case with turn count monitoring, forced stop, and screen recording
|
||||||
|
Returns dict with test result: {"success": bool, "status": str, "message": str}
|
||||||
|
"""
|
||||||
|
path = test_data['path']
|
||||||
|
prompt = test_data['prompt']
|
||||||
|
|
||||||
|
# Detect if using nightly version based on process name
|
||||||
|
is_nightly = "nightly" in jan_process_name.lower() if jan_process_name else False
|
||||||
|
|
||||||
|
# Default agent config if not provided
|
||||||
|
if agent_config is None:
|
||||||
|
agent_config = {
|
||||||
|
"loop": "uitars",
|
||||||
|
"model_provider": "oaicompat",
|
||||||
|
"model_name": "ByteDance-Seed/UI-TARS-1.5-7B",
|
||||||
|
"model_base_url": "http://10.200.108.58:1234/v1"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Create trajectory_dir from path (remove .txt extension)
|
||||||
|
trajectory_name = str(Path(path).with_suffix(''))
|
||||||
|
trajectory_base_dir = os.path.abspath(f"trajectories/{trajectory_name.replace(os.sep, '/')}")
|
||||||
|
|
||||||
|
# Ensure trajectories directory exists
|
||||||
|
os.makedirs(os.path.dirname(trajectory_base_dir), exist_ok=True)
|
||||||
|
|
||||||
|
# Create recordings directory
|
||||||
|
recordings_dir = "recordings"
|
||||||
|
os.makedirs(recordings_dir, exist_ok=True)
|
||||||
|
|
||||||
|
# Create video filename
|
||||||
|
current_time = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||||
|
safe_test_name = trajectory_name.replace('/', '_').replace('\\', '_')
|
||||||
|
video_filename = f"{safe_test_name}_{current_time}.mp4"
|
||||||
|
video_path = os.path.abspath(os.path.join(recordings_dir, video_filename))
|
||||||
|
|
||||||
|
# Initialize result tracking
|
||||||
|
test_result_data = {
|
||||||
|
"success": False,
|
||||||
|
"status": "UNKNOWN",
|
||||||
|
"message": "Test execution incomplete",
|
||||||
|
"trajectory_dir": None,
|
||||||
|
"video_path": video_path
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info(f"Starting test: {path}")
|
||||||
|
logger.info(f"Current working directory: {os.getcwd()}")
|
||||||
|
logger.info(f"Trajectory base directory: {trajectory_base_dir}")
|
||||||
|
logger.info(f"Screen recording will be saved to: {video_path}")
|
||||||
|
logger.info(f"Using model: {agent_config['model_name']} from {agent_config['model_base_url']}")
|
||||||
|
logger.info(f"ReportPortal upload: {'ENABLED' if enable_reportportal else 'DISABLED'}")
|
||||||
|
|
||||||
|
trajectory_dir = None
|
||||||
|
agent_task = None
|
||||||
|
monitor_stop_event = threading.Event()
|
||||||
|
force_stopped_due_to_turns = False # Track if test was force stopped
|
||||||
|
|
||||||
|
# Initialize screen recorder
|
||||||
|
recorder = ScreenRecorder(video_path, fps=10)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Step 1: Check and force close Jan app if running
|
||||||
|
if is_jan_running(jan_process_name):
|
||||||
|
logger.info("Jan application is running, force closing...")
|
||||||
|
force_close_jan(jan_process_name)
|
||||||
|
|
||||||
|
# Step 2: Start Jan app in maximized mode
|
||||||
|
if jan_app_path:
|
||||||
|
start_jan_app(jan_app_path)
|
||||||
|
else:
|
||||||
|
start_jan_app() # Use default path
|
||||||
|
|
||||||
|
# Step 3: Start screen recording
|
||||||
|
recorder.start_recording()
|
||||||
|
|
||||||
|
# Step 4: Create agent for this test using config
|
||||||
|
agent = ComputerAgent(
|
||||||
|
computer=computer,
|
||||||
|
loop=agent_config["loop"],
|
||||||
|
model=LLM(
|
||||||
|
provider=agent_config["model_provider"],
|
||||||
|
name=agent_config["model_name"],
|
||||||
|
provider_base_url=agent_config["model_base_url"]
|
||||||
|
),
|
||||||
|
trajectory_dir=trajectory_base_dir
|
||||||
|
)
|
||||||
|
|
||||||
|
# Step 5: Start monitoring thread
|
||||||
|
def monitor_thread():
|
||||||
|
nonlocal force_stopped_due_to_turns
|
||||||
|
while not monitor_stop_event.is_set():
|
||||||
|
try:
|
||||||
|
if os.path.exists(trajectory_base_dir):
|
||||||
|
folders = [f for f in os.listdir(trajectory_base_dir)
|
||||||
|
if os.path.isdir(os.path.join(trajectory_base_dir, f))]
|
||||||
|
|
||||||
|
if folders:
|
||||||
|
latest_folder = sorted(folders)[-1]
|
||||||
|
latest_folder_path = os.path.join(trajectory_base_dir, latest_folder)
|
||||||
|
|
||||||
|
if os.path.exists(latest_folder_path):
|
||||||
|
turn_folders = [f for f in os.listdir(latest_folder_path)
|
||||||
|
if os.path.isdir(os.path.join(latest_folder_path, f)) and f.startswith("turn_")]
|
||||||
|
|
||||||
|
turn_count = len(turn_folders)
|
||||||
|
logger.info(f"Current turn count: {turn_count}")
|
||||||
|
|
||||||
|
if turn_count >= max_turns:
|
||||||
|
logger.warning(f"Turn count exceeded {max_turns} for test {path}, forcing stop")
|
||||||
|
force_stopped_due_to_turns = True # Mark as force stopped
|
||||||
|
# Cancel the agent task
|
||||||
|
if agent_task and not agent_task.done():
|
||||||
|
agent_task.cancel()
|
||||||
|
monitor_stop_event.set()
|
||||||
|
return
|
||||||
|
|
||||||
|
# Check every 5 seconds
|
||||||
|
if not monitor_stop_event.wait(5):
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in monitor thread: {e}")
|
||||||
|
time.sleep(5)
|
||||||
|
|
||||||
|
# Start monitoring in background thread
|
||||||
|
monitor_thread_obj = threading.Thread(target=monitor_thread, daemon=True)
|
||||||
|
monitor_thread_obj.start()
|
||||||
|
|
||||||
|
# Step 6: Run the test with prompt
|
||||||
|
logger.info(f"Running test case: {path}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Create the agent task
|
||||||
|
async def run_agent():
|
||||||
|
async for result in agent.run(prompt):
|
||||||
|
if monitor_stop_event.is_set():
|
||||||
|
logger.warning(f"Test {path} stopped due to turn limit")
|
||||||
|
break
|
||||||
|
logger.info(f"Test result for {path}: {result}")
|
||||||
|
print(result)
|
||||||
|
|
||||||
|
agent_task = asyncio.create_task(run_agent())
|
||||||
|
|
||||||
|
# Wait for agent task to complete or timeout
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(agent_task, timeout=600) # 10 minute timeout as backup
|
||||||
|
if not monitor_stop_event.is_set():
|
||||||
|
logger.info(f"Successfully completed test execution: {path}")
|
||||||
|
else:
|
||||||
|
logger.warning(f"Test {path} was stopped due to turn limit")
|
||||||
|
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
logger.warning(f"Test {path} timed out after 10 minutes")
|
||||||
|
agent_task.cancel()
|
||||||
|
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
logger.warning(f"Test {path} was cancelled due to turn limit")
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Stop monitoring
|
||||||
|
monitor_stop_event.set()
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error running test {path}: {e}")
|
||||||
|
monitor_stop_event.set()
|
||||||
|
# Update result data for exception case
|
||||||
|
test_result_data.update({
|
||||||
|
"success": False,
|
||||||
|
"status": "ERROR",
|
||||||
|
"message": f"Test execution failed with exception: {str(e)}",
|
||||||
|
"trajectory_dir": None
|
||||||
|
})
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Step 7: Stop screen recording
|
||||||
|
try:
|
||||||
|
recorder.stop_recording()
|
||||||
|
logger.info(f"Screen recording saved to: {video_path}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error stopping screen recording: {e}")
|
||||||
|
|
||||||
|
# Step 8: Upload results to ReportPortal only if enabled
|
||||||
|
if enable_reportportal and rp_client and launch_id:
|
||||||
|
# Get trajectory folder first
|
||||||
|
trajectory_dir = get_latest_trajectory_folder(trajectory_base_dir)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if trajectory_dir:
|
||||||
|
logger.info(f"Uploading results to ReportPortal for: {path}")
|
||||||
|
logger.info(f"Video path for upload: {video_path}")
|
||||||
|
logger.info(f"Video exists: {os.path.exists(video_path)}")
|
||||||
|
if os.path.exists(video_path):
|
||||||
|
logger.info(f"Video file size: {os.path.getsize(video_path)} bytes")
|
||||||
|
upload_test_results_to_rp(rp_client, launch_id, path, trajectory_dir, force_stopped_due_to_turns, video_path, is_nightly)
|
||||||
|
else:
|
||||||
|
logger.warning(f"Test completed but no trajectory found for: {path}")
|
||||||
|
# Handle case where test completed but no trajectory found
|
||||||
|
formatted_test_path = path.replace('\\', '/').replace('.txt', '').replace('/', '__')
|
||||||
|
test_item_id = rp_client.start_test_item(
|
||||||
|
launch_id=launch_id,
|
||||||
|
name=formatted_test_path,
|
||||||
|
start_time=timestamp(),
|
||||||
|
item_type="TEST"
|
||||||
|
)
|
||||||
|
rp_client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="ERROR",
|
||||||
|
message="Test execution completed but no trajectory data found",
|
||||||
|
item_id=test_item_id
|
||||||
|
)
|
||||||
|
|
||||||
|
# Still upload video for failed test
|
||||||
|
if video_path and os.path.exists(video_path):
|
||||||
|
try:
|
||||||
|
with open(video_path, "rb") as video_file:
|
||||||
|
rp_client.log(
|
||||||
|
time=timestamp(),
|
||||||
|
level="INFO",
|
||||||
|
message="[INFO] Screen recording of failed test",
|
||||||
|
item_id=test_item_id,
|
||||||
|
attachment={
|
||||||
|
"name": f"failed_test_recording_{formatted_test_path}.mp4",
|
||||||
|
"data": video_file.read(),
|
||||||
|
"mime": "video/x-msvideo"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error uploading video for failed test: {e}")
|
||||||
|
|
||||||
|
rp_client.finish_test_item(
|
||||||
|
item_id=test_item_id,
|
||||||
|
end_time=timestamp(),
|
||||||
|
status="FAILED"
|
||||||
|
)
|
||||||
|
except Exception as upload_error:
|
||||||
|
logger.error(f"Error uploading results for {path}: {upload_error}")
|
||||||
|
else:
|
||||||
|
# For non-ReportPortal mode, still get trajectory for final results
|
||||||
|
trajectory_dir = get_latest_trajectory_folder(trajectory_base_dir)
|
||||||
|
|
||||||
|
# Always process results for consistency (both RP and local mode)
|
||||||
|
# trajectory_dir is already set above, no need to call get_latest_trajectory_folder again
|
||||||
|
if trajectory_dir:
|
||||||
|
# Extract test result for processing
|
||||||
|
from reportportal_handler import extract_test_result_from_trajectory
|
||||||
|
|
||||||
|
if force_stopped_due_to_turns:
|
||||||
|
final_status = "FAILED"
|
||||||
|
status_message = "exceeded maximum turn limit ({} turns)".format(max_turns)
|
||||||
|
test_result_data.update({
|
||||||
|
"success": False,
|
||||||
|
"status": final_status,
|
||||||
|
"message": status_message,
|
||||||
|
"trajectory_dir": trajectory_dir
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
test_result = extract_test_result_from_trajectory(trajectory_dir)
|
||||||
|
if test_result is True:
|
||||||
|
final_status = "PASSED"
|
||||||
|
status_message = "completed successfully with positive result"
|
||||||
|
test_result_data.update({
|
||||||
|
"success": True,
|
||||||
|
"status": final_status,
|
||||||
|
"message": status_message,
|
||||||
|
"trajectory_dir": trajectory_dir
|
||||||
|
})
|
||||||
|
else:
|
||||||
|
final_status = "FAILED"
|
||||||
|
status_message = "no valid success result found"
|
||||||
|
test_result_data.update({
|
||||||
|
"success": False,
|
||||||
|
"status": final_status,
|
||||||
|
"message": status_message,
|
||||||
|
"trajectory_dir": trajectory_dir
|
||||||
|
})
|
||||||
|
|
||||||
|
if not enable_reportportal:
|
||||||
|
# Local development mode - log results
|
||||||
|
logger.info(f"[INFO] LOCAL RESULT: {path} - {final_status} ({status_message})")
|
||||||
|
logger.info(f"[INFO] Video saved: {video_path}")
|
||||||
|
logger.info(f"[INFO] Trajectory: {trajectory_dir}")
|
||||||
|
else:
|
||||||
|
final_status = "FAILED"
|
||||||
|
status_message = "no trajectory found"
|
||||||
|
test_result_data.update({
|
||||||
|
"success": False,
|
||||||
|
"status": final_status,
|
||||||
|
"message": status_message,
|
||||||
|
"trajectory_dir": None
|
||||||
|
})
|
||||||
|
|
||||||
|
if not enable_reportportal:
|
||||||
|
logger.warning(f"[INFO] LOCAL RESULT: {path} - {final_status} ({status_message})")
|
||||||
|
|
||||||
|
# Step 9: Always force close Jan app after test completion
|
||||||
|
logger.info(f"Cleaning up after test: {path}")
|
||||||
|
force_close_jan(jan_process_name)
|
||||||
|
|
||||||
|
# Return test result
|
||||||
|
return test_result_data
|
||||||
17
autoqa/tests/new-user/1-user-start-chatting.txt
Normal file
17
autoqa/tests/new-user/1-user-start-chatting.txt
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
prompt = """
|
||||||
|
You are going to test the Jan application by downloading and chatting with a model (bitcpm4).
|
||||||
|
|
||||||
|
Step-by-step instructions:
|
||||||
|
1. Given the Jan application is already opened.
|
||||||
|
2. In the **bottom-left corner**, click the **Hub** menu item.
|
||||||
|
3. Scroll through the model list or use the search bar to find **qwen3-0.6B**.
|
||||||
|
4. Click **Use** on the qwen3-0.6B model.
|
||||||
|
5. Wait for the model to finish downloading and become ready.
|
||||||
|
6. Once redirected to the chat screen, type any message into the input box (e.g. `Hello World`).
|
||||||
|
7. Press **Enter** to send the message.
|
||||||
|
8. Wait for the model’s response.
|
||||||
|
|
||||||
|
If the model responds correctly, return: {"result": True}, otherwise return: {"result": False}.
|
||||||
|
|
||||||
|
In all your responses, use only plain ASCII characters. Do NOT use Unicode symbols
|
||||||
|
"""
|
||||||
343
autoqa/utils.py
Normal file
343
autoqa/utils.py
Normal file
@ -0,0 +1,343 @@
|
|||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import subprocess
|
||||||
|
import psutil
|
||||||
|
import time
|
||||||
|
import pyautogui
|
||||||
|
import platform
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Cross-platform window management
|
||||||
|
IS_LINUX = platform.system() == "Linux"
|
||||||
|
IS_WINDOWS = platform.system() == "Windows"
|
||||||
|
IS_MACOS = platform.system() == "Darwin"
|
||||||
|
|
||||||
|
if IS_WINDOWS:
|
||||||
|
try:
|
||||||
|
import pygetwindow as gw
|
||||||
|
except ImportError:
|
||||||
|
gw = None
|
||||||
|
logger.warning("pygetwindow not available on this system")
|
||||||
|
|
||||||
|
def is_jan_running(jan_process_name="Jan.exe"):
|
||||||
|
"""
|
||||||
|
Check if Jan application is currently running
|
||||||
|
"""
|
||||||
|
for proc in psutil.process_iter(['pid', 'name']):
|
||||||
|
try:
|
||||||
|
if proc.info['name'] and jan_process_name.lower() in proc.info['name'].lower():
|
||||||
|
return True
|
||||||
|
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
|
||||||
|
pass
|
||||||
|
return False
|
||||||
|
|
||||||
|
def force_close_jan(jan_process_name="Jan.exe"):
|
||||||
|
"""
|
||||||
|
Force close Jan application if it's running
|
||||||
|
"""
|
||||||
|
logger.info("Checking for running Jan processes...")
|
||||||
|
closed_any = False
|
||||||
|
|
||||||
|
for proc in psutil.process_iter(['pid', 'name']):
|
||||||
|
try:
|
||||||
|
if proc.info['name'] and jan_process_name.lower() in proc.info['name'].lower():
|
||||||
|
logger.info(f"Force closing Jan process (PID: {proc.info['pid']})")
|
||||||
|
proc.kill()
|
||||||
|
closed_any = True
|
||||||
|
except (psutil.NoSuchProcess, psutil.AccessDenied, psutil.ZombieProcess):
|
||||||
|
pass
|
||||||
|
|
||||||
|
if closed_any:
|
||||||
|
logger.info("Waiting for Jan processes to terminate...")
|
||||||
|
time.sleep(3) # Wait for processes to fully terminate
|
||||||
|
else:
|
||||||
|
logger.info("No Jan processes found running")
|
||||||
|
|
||||||
|
def find_jan_window_linux():
|
||||||
|
"""
|
||||||
|
Find Jan window on Linux using wmctrl
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
result = subprocess.run(['wmctrl', '-l'], capture_output=True, text=True, timeout=10)
|
||||||
|
if result.returncode == 0:
|
||||||
|
for line in result.stdout.split('\n'):
|
||||||
|
if 'jan' in line.lower() or 'Jan' in line:
|
||||||
|
# Extract window ID (first column)
|
||||||
|
window_id = line.split()[0]
|
||||||
|
logger.info(f"Found Jan window with ID: {window_id}")
|
||||||
|
return window_id
|
||||||
|
except (subprocess.TimeoutExpired, FileNotFoundError, subprocess.SubprocessError) as e:
|
||||||
|
logger.warning(f"wmctrl command failed: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def maximize_jan_window_linux():
|
||||||
|
"""
|
||||||
|
Maximize Jan window on Linux using wmctrl
|
||||||
|
"""
|
||||||
|
window_id = find_jan_window_linux()
|
||||||
|
if window_id:
|
||||||
|
try:
|
||||||
|
# Maximize window using wmctrl
|
||||||
|
subprocess.run(['wmctrl', '-i', '-r', window_id, '-b', 'add,maximized_vert,maximized_horz'],
|
||||||
|
timeout=5)
|
||||||
|
logger.info("Jan window maximized using wmctrl")
|
||||||
|
return True
|
||||||
|
except (subprocess.TimeoutExpired, subprocess.SubprocessError) as e:
|
||||||
|
logger.warning(f"Failed to maximize with wmctrl: {e}")
|
||||||
|
|
||||||
|
# Fallback: Try xdotool
|
||||||
|
try:
|
||||||
|
result = subprocess.run(['xdotool', 'search', '--name', 'Jan'],
|
||||||
|
capture_output=True, text=True, timeout=5)
|
||||||
|
if result.returncode == 0 and result.stdout.strip():
|
||||||
|
window_id = result.stdout.strip().split('\n')[0]
|
||||||
|
subprocess.run(['xdotool', 'windowactivate', window_id], timeout=5)
|
||||||
|
subprocess.run(['xdotool', 'key', 'alt+F10'], timeout=5) # Maximize shortcut
|
||||||
|
logger.info("Jan window maximized using xdotool")
|
||||||
|
return True
|
||||||
|
except (subprocess.TimeoutExpired, FileNotFoundError, subprocess.SubprocessError) as e:
|
||||||
|
logger.warning(f"xdotool command failed: {e}")
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def find_jan_window_macos():
|
||||||
|
"""
|
||||||
|
Find Jan window on macOS using AppleScript
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# AppleScript to find Jan window
|
||||||
|
script = '''
|
||||||
|
tell application "System Events"
|
||||||
|
set janApps to (every process whose name contains "Jan")
|
||||||
|
if length of janApps > 0 then
|
||||||
|
return name of first item of janApps
|
||||||
|
else
|
||||||
|
return ""
|
||||||
|
end if
|
||||||
|
end tell
|
||||||
|
'''
|
||||||
|
result = subprocess.run(['osascript', '-e', script],
|
||||||
|
capture_output=True, text=True, timeout=10)
|
||||||
|
if result.returncode == 0 and result.stdout.strip():
|
||||||
|
app_name = result.stdout.strip()
|
||||||
|
logger.info(f"Found Jan app: {app_name}")
|
||||||
|
return app_name
|
||||||
|
except (subprocess.TimeoutExpired, FileNotFoundError, subprocess.SubprocessError) as e:
|
||||||
|
logger.warning(f"AppleScript command failed: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def maximize_jan_window_macos():
|
||||||
|
"""
|
||||||
|
Maximize Jan window on macOS using AppleScript
|
||||||
|
"""
|
||||||
|
app_name = find_jan_window_macos()
|
||||||
|
if app_name:
|
||||||
|
try:
|
||||||
|
# AppleScript to maximize window
|
||||||
|
script = f'''
|
||||||
|
tell application "System Events"
|
||||||
|
tell process "{app_name}"
|
||||||
|
set frontmost to true
|
||||||
|
tell window 1
|
||||||
|
set value of attribute "AXFullScreen" to true
|
||||||
|
end tell
|
||||||
|
end tell
|
||||||
|
end tell
|
||||||
|
'''
|
||||||
|
result = subprocess.run(['osascript', '-e', script], timeout=10)
|
||||||
|
if result.returncode == 0:
|
||||||
|
logger.info("Jan window maximized using AppleScript")
|
||||||
|
return True
|
||||||
|
except (subprocess.TimeoutExpired, subprocess.SubprocessError) as e:
|
||||||
|
logger.warning(f"Failed to maximize with AppleScript: {e}")
|
||||||
|
|
||||||
|
# Fallback: Try Command+M (fullscreen hotkey on macOS)
|
||||||
|
try:
|
||||||
|
logger.info("Trying Cmd+Ctrl+F hotkey to maximize")
|
||||||
|
pyautogui.hotkey('cmd', 'ctrl', 'f')
|
||||||
|
time.sleep(1)
|
||||||
|
logger.info("Attempted to maximize using Cmd+Ctrl+F")
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Hotkey maximize failed: {e}")
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def maximize_jan_window():
|
||||||
|
"""
|
||||||
|
Find and maximize Jan window (cross-platform)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Wait a bit for window to appear
|
||||||
|
time.sleep(2)
|
||||||
|
|
||||||
|
if IS_LINUX:
|
||||||
|
return maximize_jan_window_linux()
|
||||||
|
|
||||||
|
elif IS_MACOS:
|
||||||
|
return maximize_jan_window_macos()
|
||||||
|
|
||||||
|
elif IS_WINDOWS and gw:
|
||||||
|
# Method 1: Try to find window by title containing "Jan"
|
||||||
|
windows = gw.getWindowsWithTitle("Jan")
|
||||||
|
if windows:
|
||||||
|
jan_window = windows[0]
|
||||||
|
logger.info(f"Found Jan window: {jan_window.title}")
|
||||||
|
jan_window.maximize()
|
||||||
|
logger.info("Jan window maximized using pygetwindow")
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Fallback methods for both platforms
|
||||||
|
# Method 2: Try Alt+Space then X (maximize hotkey) - works on both platforms
|
||||||
|
logger.info("Trying Alt+Space+X hotkey to maximize")
|
||||||
|
pyautogui.hotkey('alt', 'space')
|
||||||
|
time.sleep(0.5)
|
||||||
|
pyautogui.press('x')
|
||||||
|
logger.info("Attempted to maximize using Alt+Space+X")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Could not maximize Jan window: {e}")
|
||||||
|
|
||||||
|
# Method 3: Platform-specific fallback
|
||||||
|
try:
|
||||||
|
if IS_WINDOWS:
|
||||||
|
logger.info("Trying Windows+Up arrow to maximize")
|
||||||
|
pyautogui.hotkey('win', 'up')
|
||||||
|
elif IS_LINUX:
|
||||||
|
logger.info("Trying Alt+F10 to maximize")
|
||||||
|
pyautogui.hotkey('alt', 'F10')
|
||||||
|
elif IS_MACOS:
|
||||||
|
logger.info("Trying macOS specific maximize")
|
||||||
|
pyautogui.hotkey('cmd', 'tab') # Switch to Jan if it's running
|
||||||
|
time.sleep(0.5)
|
||||||
|
return True
|
||||||
|
except Exception as e2:
|
||||||
|
logger.warning(f"All maximize methods failed: {e2}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def start_jan_app(jan_app_path=None):
|
||||||
|
"""
|
||||||
|
Start Jan application in maximized window (cross-platform)
|
||||||
|
"""
|
||||||
|
# Set default path based on platform
|
||||||
|
if jan_app_path is None:
|
||||||
|
if IS_WINDOWS:
|
||||||
|
jan_app_path = os.path.expanduser(r"~\AppData\Local\Programs\jan\Jan.exe")
|
||||||
|
elif IS_LINUX:
|
||||||
|
jan_app_path = "/usr/bin/Jan" # or "/usr/bin/Jan" for regular
|
||||||
|
elif IS_MACOS:
|
||||||
|
jan_app_path = "/Applications/Jan.app/Contents/MacOS/Jan" # Default macOS path
|
||||||
|
else:
|
||||||
|
raise NotImplementedError(f"Platform {platform.system()} not supported")
|
||||||
|
|
||||||
|
logger.info(f"Starting Jan application from: {jan_app_path}")
|
||||||
|
|
||||||
|
if not os.path.exists(jan_app_path):
|
||||||
|
logger.error(f"Jan executable not found at: {jan_app_path}")
|
||||||
|
raise FileNotFoundError(f"Jan app not found at {jan_app_path}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Start the Jan application
|
||||||
|
if IS_WINDOWS:
|
||||||
|
subprocess.Popen([jan_app_path], shell=True)
|
||||||
|
elif IS_LINUX:
|
||||||
|
# On Linux, start with DISPLAY environment variable
|
||||||
|
env = os.environ.copy()
|
||||||
|
subprocess.Popen([jan_app_path], env=env)
|
||||||
|
elif IS_MACOS:
|
||||||
|
# On macOS, use 'open' command to launch .app bundle properly
|
||||||
|
if jan_app_path.endswith('.app/Contents/MacOS/Jan'):
|
||||||
|
# Use the .app bundle path instead
|
||||||
|
app_bundle = jan_app_path.replace('/Contents/MacOS/Jan', '')
|
||||||
|
subprocess.Popen(['open', app_bundle])
|
||||||
|
elif jan_app_path.endswith('.app'):
|
||||||
|
# Direct .app bundle
|
||||||
|
subprocess.Popen(['open', jan_app_path])
|
||||||
|
elif '/Contents/MacOS/' in jan_app_path:
|
||||||
|
# Extract app bundle from full executable path
|
||||||
|
app_bundle = jan_app_path.split('/Contents/MacOS/')[0]
|
||||||
|
subprocess.Popen(['open', app_bundle])
|
||||||
|
else:
|
||||||
|
# Fallback: try to execute directly
|
||||||
|
subprocess.Popen([jan_app_path])
|
||||||
|
else:
|
||||||
|
raise NotImplementedError(f"Platform {platform.system()} not supported")
|
||||||
|
logger.info("Jan application started")
|
||||||
|
|
||||||
|
# Wait for app to fully load
|
||||||
|
logger.info("Waiting for Jan application to initialize...")
|
||||||
|
time.sleep(5)
|
||||||
|
|
||||||
|
# Try to maximize the window
|
||||||
|
if maximize_jan_window():
|
||||||
|
logger.info("Jan application maximized successfully")
|
||||||
|
else:
|
||||||
|
logger.warning("Could not maximize Jan application window")
|
||||||
|
|
||||||
|
# Wait a bit more after maximizing
|
||||||
|
time.sleep(10)
|
||||||
|
logger.info("Jan application should be ready, waiting for additional setup...")
|
||||||
|
time.sleep(10) # Additional wait to ensure everything is ready
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error starting Jan application: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def scan_test_files(tests_dir="tests"):
|
||||||
|
"""
|
||||||
|
Scan tests folder and find all .txt files
|
||||||
|
Returns list with format [{'path': 'relative_path', 'prompt': 'file_content'}]
|
||||||
|
"""
|
||||||
|
test_files = []
|
||||||
|
tests_path = Path(tests_dir)
|
||||||
|
|
||||||
|
if not tests_path.exists():
|
||||||
|
logger.error(f"Tests directory {tests_dir} does not exist!")
|
||||||
|
return test_files
|
||||||
|
|
||||||
|
# Scan all .txt files in folder and subfolders
|
||||||
|
for txt_file in tests_path.rglob("*.txt"):
|
||||||
|
try:
|
||||||
|
# Read file content
|
||||||
|
with open(txt_file, 'r', encoding='utf-8') as f:
|
||||||
|
content = f.read().strip()
|
||||||
|
|
||||||
|
# Get relative path
|
||||||
|
relative_path = txt_file.relative_to(tests_path)
|
||||||
|
|
||||||
|
test_files.append({
|
||||||
|
'path': str(relative_path),
|
||||||
|
'prompt': content
|
||||||
|
})
|
||||||
|
logger.info(f"Found test file: {relative_path}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error reading file {txt_file}: {e}")
|
||||||
|
|
||||||
|
return test_files
|
||||||
|
|
||||||
|
def get_latest_trajectory_folder(trajectory_base_path):
|
||||||
|
"""
|
||||||
|
Get the latest created folder in trajectory base path
|
||||||
|
"""
|
||||||
|
if not os.path.exists(trajectory_base_path):
|
||||||
|
logger.warning(f"Trajectory base path not found: {trajectory_base_path}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Get all folders and sort by creation time (latest first)
|
||||||
|
folders = [f for f in os.listdir(trajectory_base_path)
|
||||||
|
if os.path.isdir(os.path.join(trajectory_base_path, f))]
|
||||||
|
|
||||||
|
if not folders:
|
||||||
|
logger.warning(f"No trajectory folders found in: {trajectory_base_path}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Sort by folder name (assuming timestamp format like 20250715_100443)
|
||||||
|
folders.sort(reverse=True)
|
||||||
|
latest_folder = folders[0]
|
||||||
|
|
||||||
|
full_path = os.path.join(trajectory_base_path, latest_folder)
|
||||||
|
logger.info(f"Found latest trajectory folder: {full_path}")
|
||||||
|
return full_path
|
||||||
71
core/CONTRIBUTING.md
Normal file
71
core/CONTRIBUTING.md
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
# Contributing to Jan Core
|
||||||
|
|
||||||
|
[← Back to Main Contributing Guide](../CONTRIBUTING.md)
|
||||||
|
|
||||||
|
TypeScript SDK providing extension system, APIs, and type definitions for all Jan components.
|
||||||
|
|
||||||
|
## Key Directories
|
||||||
|
|
||||||
|
- **`/src/browser`** - Core APIs (events, extensions, file system)
|
||||||
|
- **`/src/browser/extensions`** - Built-in extensions (assistant, inference, conversational)
|
||||||
|
- **`/src/types`** - TypeScript type definitions
|
||||||
|
- **`/src/test`** - Testing utilities
|
||||||
|
|
||||||
|
## Development
|
||||||
|
|
||||||
|
### Key Principles
|
||||||
|
|
||||||
|
1. **Platform Agnostic** - Works everywhere (browser, Node.js)
|
||||||
|
2. **Extension-Based** - New features = new extensions
|
||||||
|
3. **Type Everything** - TypeScript required
|
||||||
|
4. **Event-Driven** - Components communicate via events
|
||||||
|
|
||||||
|
### Building & Testing
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Build the SDK
|
||||||
|
yarn build
|
||||||
|
|
||||||
|
# Run tests
|
||||||
|
yarn test
|
||||||
|
|
||||||
|
# Watch mode
|
||||||
|
yarn test:watch
|
||||||
|
```
|
||||||
|
|
||||||
|
### Event System
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Emit events
|
||||||
|
events.emit('model:loaded', { modelId: 'llama-3' })
|
||||||
|
|
||||||
|
// Listen for events
|
||||||
|
events.on('model:loaded', (data) => {
|
||||||
|
console.log('Model loaded:', data.modelId)
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
describe('MyFeature', () => {
|
||||||
|
it('should do something', () => {
|
||||||
|
const result = doSomething()
|
||||||
|
expect(result).toBe('expected')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
```
|
||||||
|
|
||||||
|
## Best Practices
|
||||||
|
|
||||||
|
- Keep it simple
|
||||||
|
- Use TypeScript fully (no `any`)
|
||||||
|
- Write tests for critical features
|
||||||
|
- Follow existing patterns
|
||||||
|
- Export new modules in index files
|
||||||
|
|
||||||
|
## Dependencies
|
||||||
|
|
||||||
|
- **TypeScript** - Type safety
|
||||||
|
- **Rolldown** - Bundling
|
||||||
|
- **Vitest** - Testing
|
||||||
@ -8,10 +8,7 @@
|
|||||||
|
|
||||||
```js
|
```js
|
||||||
// Web / extension runtime
|
// Web / extension runtime
|
||||||
import * as core from "@janhq/core";
|
import * as core from '@janhq/core'
|
||||||
|
|
||||||
// Node runtime
|
|
||||||
import * as node from "@janhq/core/node";
|
|
||||||
```
|
```
|
||||||
|
|
||||||
## Build an Extension
|
## Build an Extension
|
||||||
@ -19,26 +16,27 @@ import * as node from "@janhq/core/node";
|
|||||||
1. Download an extension template, for example, [https://github.com/janhq/extension-template](https://github.com/janhq/extension-template).
|
1. Download an extension template, for example, [https://github.com/janhq/extension-template](https://github.com/janhq/extension-template).
|
||||||
|
|
||||||
2. Update the source code:
|
2. Update the source code:
|
||||||
|
|
||||||
1. Open `index.ts` in your code editor.
|
1. Open `index.ts` in your code editor.
|
||||||
2. Rename the extension class from `SampleExtension` to your preferred extension name.
|
2. Rename the extension class from `SampleExtension` to your preferred extension name.
|
||||||
3. Import modules from the core package.
|
3. Import modules from the core package.
|
||||||
```ts
|
```ts
|
||||||
import * as core from "@janhq/core";
|
import * as core from '@janhq/core'
|
||||||
```
|
```
|
||||||
4. In the `onLoad()` method, add your code:
|
4. In the `onLoad()` method, add your code:
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
// Example of listening to app events and providing customized inference logic:
|
// Example of listening to app events and providing customized inference logic:
|
||||||
import * as core from "@janhq/core";
|
import * as core from '@janhq/core'
|
||||||
|
|
||||||
export default class MyExtension extends BaseExtension {
|
export default class MyExtension extends BaseExtension {
|
||||||
// On extension load
|
// On extension load
|
||||||
onLoad() {
|
onLoad() {
|
||||||
core.events.on(MessageEvent.OnMessageSent, (data) => MyExtension.inference(data, this));
|
core.events.on(MessageEvent.OnMessageSent, (data) => MyExtension.inference(data, this))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Customized inference logic
|
// Customized inference logic
|
||||||
private static inference(incomingMessage: MessageRequestData) {
|
private static inference(incomingMessage: MessageRequestData) {
|
||||||
|
|
||||||
// Prepare customized message content
|
// Prepare customized message content
|
||||||
const content: ThreadContent = {
|
const content: ThreadContent = {
|
||||||
type: ContentType.Text,
|
type: ContentType.Text,
|
||||||
@ -46,16 +44,17 @@ import * as node from "@janhq/core/node";
|
|||||||
value: "I'm Jan Assistant!",
|
value: "I'm Jan Assistant!",
|
||||||
annotations: [],
|
annotations: [],
|
||||||
},
|
},
|
||||||
};
|
}
|
||||||
|
|
||||||
// Modify message and send out
|
// Modify message and send out
|
||||||
const outGoingMessage: ThreadMessage = {
|
const outGoingMessage: ThreadMessage = {
|
||||||
...incomingMessage,
|
...incomingMessage,
|
||||||
content
|
content,
|
||||||
};
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
3. Build the extension:
|
3. Build the extension:
|
||||||
1. Navigate to the extension directory.
|
1. Navigate to the extension directory.
|
||||||
2. Install dependencies.
|
2. Install dependencies.
|
||||||
|
|||||||
@ -1,7 +0,0 @@
|
|||||||
module.exports = {
|
|
||||||
preset: 'ts-jest',
|
|
||||||
testEnvironment: 'node',
|
|
||||||
moduleNameMapper: {
|
|
||||||
'@/(.*)': '<rootDir>/src/$1',
|
|
||||||
},
|
|
||||||
}
|
|
||||||
@ -1,64 +1,53 @@
|
|||||||
{
|
{
|
||||||
"name": "@janhq/core",
|
"name": "@janhq/core",
|
||||||
"version": "0.1.10",
|
"version": "0.1.10",
|
||||||
"description": "Jan app core lib",
|
"description": "Core library for the Jan AI application framework",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"jan",
|
"jan",
|
||||||
"core"
|
"core"
|
||||||
],
|
],
|
||||||
"homepage": "https://jan.ai",
|
"homepage": "https://jan.ai",
|
||||||
"license": "AGPL-3.0",
|
"license": "AGPL-3.0",
|
||||||
"main": "dist/core.es5.js",
|
"main": "dist/index.js",
|
||||||
"module": "dist/core.cjs.js",
|
|
||||||
"typings": "dist/types/index.d.ts",
|
"typings": "dist/types/index.d.ts",
|
||||||
"files": [
|
"files": [
|
||||||
"dist",
|
"dist",
|
||||||
"types"
|
"types"
|
||||||
],
|
],
|
||||||
"author": "Jan <service@jan.ai>",
|
"author": "Jan <service@jan.ai>",
|
||||||
"exports": {
|
|
||||||
".": "./dist/core.es5.js",
|
|
||||||
"./node": "./dist/node/index.cjs.js"
|
|
||||||
},
|
|
||||||
"typesVersions": {
|
|
||||||
"*": {
|
|
||||||
".": [
|
|
||||||
"./dist/core.es5.js.map",
|
|
||||||
"./dist/types/index.d.ts"
|
|
||||||
],
|
|
||||||
"node": [
|
|
||||||
"./dist/node/index.cjs.js.map",
|
|
||||||
"./dist/types/node/index.d.ts"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"lint": "tslint --project tsconfig.json -t codeFrame 'src/**/*.ts' 'test/**/*.ts'",
|
"lint": "tslint --project tsconfig.json -t codeFrame 'src/**/*.ts' 'test/**/*.ts'",
|
||||||
"test": "jest",
|
"test": "vitest run",
|
||||||
|
"test:watch": "vitest",
|
||||||
|
"test:ui": "vitest --ui",
|
||||||
|
"test:coverage": "vitest run --coverage",
|
||||||
"prebuild": "rimraf dist",
|
"prebuild": "rimraf dist",
|
||||||
"build": "tsc --module commonjs && rollup -c rollup.config.ts",
|
"build": "tsc -p . && rolldown -c rolldown.config.mjs"
|
||||||
"start": "rollup -c rollup.config.ts -w"
|
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@rollup/plugin-replace": "^5.0.5",
|
"@npmcli/arborist": "^7.1.0",
|
||||||
"@types/jest": "^29.5.12",
|
"@types/node": "^22.10.0",
|
||||||
"@types/node": "^20.11.4",
|
"@types/react": "19.1.2",
|
||||||
|
"@vitest/coverage-v8": "^2.1.8",
|
||||||
|
"@vitest/ui": "^2.1.8",
|
||||||
"eslint": "8.57.0",
|
"eslint": "8.57.0",
|
||||||
"eslint-plugin-jest": "^27.9.0",
|
"happy-dom": "^20.0.0",
|
||||||
"jest": "^29.7.0",
|
"pacote": "^21.0.0",
|
||||||
"rimraf": "^3.0.2",
|
"react": "19.0.0",
|
||||||
"rollup": "^2.38.5",
|
"request": "^2.88.2",
|
||||||
"rollup-plugin-commonjs": "^9.1.8",
|
"request-progress": "^3.0.0",
|
||||||
"rollup-plugin-json": "^3.1.0",
|
"rimraf": "^6.0.1",
|
||||||
"rollup-plugin-node-resolve": "^5.2.0",
|
"rolldown": "1.0.0-beta.1",
|
||||||
"rollup-plugin-sourcemaps": "^0.6.3",
|
|
||||||
"rollup-plugin-typescript2": "^0.36.0",
|
|
||||||
"ts-jest": "^29.1.2",
|
|
||||||
"tslib": "^2.6.2",
|
"tslib": "^2.6.2",
|
||||||
"typescript": "^5.3.3"
|
"typescript": "^5.8.3",
|
||||||
|
"vitest": "^2.1.8"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"rxjs": "^7.8.1",
|
"rxjs": "^7.8.1",
|
||||||
"ulidx": "^2.3.0"
|
"ulidx": "^2.3.0"
|
||||||
}
|
},
|
||||||
|
"peerDependencies": {
|
||||||
|
"react": "19.0.0"
|
||||||
|
},
|
||||||
|
"packageManager": "yarn@4.5.3"
|
||||||
}
|
}
|
||||||
|
|||||||
19
core/rolldown.config.mjs
Normal file
19
core/rolldown.config.mjs
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
import { defineConfig } from 'rolldown'
|
||||||
|
import pkgJson from './package.json' with { type: 'json' }
|
||||||
|
|
||||||
|
export default defineConfig([
|
||||||
|
{
|
||||||
|
input: 'src/index.ts',
|
||||||
|
output: {
|
||||||
|
format: 'esm',
|
||||||
|
file: 'dist/index.js',
|
||||||
|
sourcemap: true,
|
||||||
|
},
|
||||||
|
platform: 'browser',
|
||||||
|
external: ['path', 'react', 'react-dom', 'react/jsx-runtime'],
|
||||||
|
define: {
|
||||||
|
NODE: JSON.stringify(`${pkgJson.name}/${pkgJson.node}`),
|
||||||
|
VERSION: JSON.stringify(pkgJson.version),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
])
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user