Compare commits
862 Commits
sdk
...
feature/ag
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a25b173f98 | ||
|
|
d7944a7051 | ||
|
|
67917ea7d3 | ||
|
|
e3287115c5 | ||
|
|
855c1f1b07 | ||
|
|
dc815664c4 | ||
|
|
e08bbb142a | ||
|
|
58e0b166cb | ||
|
|
2a678bb017 | ||
|
|
5664456b77 | ||
|
|
3685b7e57e | ||
|
|
989d5f73b1 | ||
|
|
4f84073cb5 | ||
|
|
c190295c34 | ||
|
|
60875644a1 | ||
|
|
113b09ad01 | ||
|
|
2605d0e671 | ||
|
|
d232b91d31 | ||
|
|
c65db31fd9 | ||
|
|
99871805bd | ||
|
|
e8ef39adad | ||
|
|
466b9217b5 | ||
|
|
c9a7f519b9 | ||
|
|
96ae532879 | ||
|
|
eda08d5b0f | ||
|
|
7c12b58bb5 | ||
|
|
5446c89bc0 | ||
|
|
2d0251e585 | ||
|
|
f41710c892 | ||
|
|
df3f79f282 | ||
|
|
f8df692865 | ||
|
|
0c6d3b188d | ||
|
|
e7a38863ab | ||
|
|
720e0fcdab | ||
|
|
bf8ff84522 | ||
|
|
5a9510238e | ||
|
|
7b3c74179b | ||
|
|
cd70fa4c32 | ||
|
|
83133ced6a | ||
|
|
6c5179a179 | ||
|
|
e33ab39b85 | ||
|
|
9567bcec1b | ||
|
|
550b16dc0b | ||
|
|
5d8331b7f7 | ||
|
|
e35b643e51 | ||
|
|
bc6a92677b | ||
|
|
f52072e6ec | ||
|
|
9c43c43a46 | ||
|
|
0430e0f930 | ||
|
|
b945243d1a | ||
|
|
d8484a8b26 | ||
|
|
3c27499795 | ||
|
|
7c772e873d | ||
|
|
db2fab245e | ||
|
|
a9c9917f1a | ||
|
|
23e2e9e9cc | ||
|
|
2369e92460 | ||
|
|
a53b15f2a3 | ||
|
|
72eb8b1eb6 | ||
|
|
4db54f3b83 | ||
|
|
24eb27f005 | ||
|
|
009d76ea35 | ||
|
|
6e8a425eb1 | ||
|
|
66188d791b | ||
|
|
015ff02d71 | ||
|
|
10bfaf5415 | ||
|
|
e3e0b85e0c | ||
|
|
ad0632892e | ||
|
|
f26791ba39 | ||
|
|
2fbaaebf44 | ||
|
|
edb916338c | ||
|
|
f7e947d37d | ||
|
|
a9e3d1ed75 | ||
|
|
ce97827c42 | ||
|
|
3efec07338 | ||
|
|
68f401bfa3 | ||
|
|
1ea525feaa | ||
|
|
57c4a7527e | ||
|
|
5aa9c045e1 | ||
|
|
6f1900f3bb | ||
|
|
bc62de795e | ||
|
|
c62ca4b183 | ||
|
|
876e5bc683 | ||
|
|
b99f3b73cd | ||
|
|
7eecf29449 | ||
|
|
1d331d7810 | ||
|
|
68414678d8 | ||
|
|
2f6b9dac26 | ||
|
|
d1812d875b | ||
|
|
723dea100f | ||
|
|
c4419ed31f | ||
|
|
754ab86e51 | ||
|
|
04dab532cd | ||
|
|
add01ebc68 | ||
|
|
1cc9a1a30b | ||
|
|
92a1de7500 | ||
|
|
a6fedcff80 | ||
|
|
55eb999305 | ||
|
|
377b7b12ce | ||
|
|
ba2906a42e | ||
|
|
ee27f14be0 | ||
|
|
46c8be63a7 | ||
|
|
7ba66c419a | ||
|
|
340775a593 | ||
|
|
35d2ec8a44 | ||
|
|
2983b9950f | ||
|
|
dbf08a6cf8 | ||
|
|
28f31be36f | ||
|
|
3ec4db0225 | ||
|
|
f5688e077a | ||
|
|
2464d255d5 | ||
|
|
586d950b8c | ||
|
|
e7469388cc | ||
|
|
ab6ca8e16a | ||
|
|
02413a4fac | ||
|
|
05b8dd9ad8 | ||
|
|
29c9419a6e | ||
|
|
90e61989a4 | ||
|
|
b1f9f90fec | ||
|
|
b40849f672 | ||
|
|
44560c8da8 | ||
|
|
46fd01c264 | ||
|
|
100695c262 | ||
|
|
54b5a4ae55 | ||
|
|
ffb252962b | ||
|
|
ae31270e63 | ||
|
|
9b2b54d585 | ||
|
|
e1ccc583a3 | ||
|
|
7750e33f82 | ||
|
|
d2c4741f0b | ||
|
|
c79c4f6bde | ||
|
|
3849d0d1a9 | ||
|
|
8bd71ccd5e | ||
|
|
b731f7fb64 | ||
|
|
cd554f77f3 | ||
|
|
8c977c51ca | ||
|
|
a3252f9671 | ||
|
|
9bc945f76f | ||
|
|
f6b4dfffb6 | ||
|
|
68955c29cb | ||
|
|
97e4d036dc | ||
|
|
0f49f54c29 | ||
|
|
828e13adbb | ||
|
|
e6f0067728 | ||
|
|
5c473eb9cc | ||
|
|
2adf34fbaf | ||
|
|
05dd760388 | ||
|
|
2cf4864078 | ||
|
|
df4c92672f | ||
|
|
5b173315f9 | ||
|
|
c85ea7d8fa | ||
|
|
113154702f | ||
|
|
33ae46f76a | ||
|
|
27272680a2 | ||
|
|
b1621f6b34 | ||
|
|
2c65033c0a | ||
|
|
dcfbaa9243 | ||
|
|
accef65ede | ||
|
|
50755d8ba3 | ||
|
|
47b6509f70 | ||
|
|
89f3fdc05f | ||
|
|
03f8b73627 | ||
|
|
2e6e9635c3 | ||
|
|
6a312e3fdd | ||
|
|
79dbbdf6b4 | ||
|
|
0e8961efe3 | ||
|
|
fc2be42418 | ||
|
|
ab4336cfd7 | ||
|
|
20d3b5288c | ||
|
|
63a29d3a4a | ||
|
|
31856d9895 | ||
|
|
f51dcf23d6 | ||
|
|
6ecaeb4fde | ||
|
|
1883c9666e | ||
|
|
4b4cf76641 | ||
|
|
0016b4bd72 | ||
|
|
495bbecc01 | ||
|
|
e6af7e9885 | ||
|
|
5318cccc5f | ||
|
|
99739575d4 | ||
|
|
b8ff331ccc | ||
|
|
9e63f3f7c6 | ||
|
|
6f9069a4fb | ||
|
|
a18ab7f1e9 | ||
|
|
05162ca350 | ||
|
|
be0371fb11 | ||
|
|
fa3329abf2 | ||
|
|
e830fade06 | ||
|
|
ac392dcb96 | ||
|
|
e662b2f393 | ||
|
|
00a5fdf491 | ||
|
|
63bc71da13 | ||
|
|
7fff9579c0 | ||
|
|
737beb11f6 | ||
|
|
f55af7da4c | ||
|
|
80461a78b0 | ||
|
|
40d194672b | ||
|
|
d63341ea06 | ||
|
|
df8c8dc93b | ||
|
|
dd3a140cb1 | ||
|
|
1b006599cf | ||
|
|
44aa3cc9b5 | ||
|
|
b88b24e231 | ||
|
|
890c31ba74 | ||
|
|
6dc9a11a89 | ||
|
|
ce2842d365 | ||
|
|
7d1096dbd8 | ||
|
|
95722802dc | ||
|
|
3047dae703 | ||
|
|
95cad7bdd9 | ||
|
|
4e22f13007 | ||
|
|
04611b0ae2 | ||
|
|
a00f1ab549 | ||
|
|
446b37793b | ||
|
|
b2b98643d8 | ||
|
|
b83eeeb131 | ||
|
|
e8d727c07a | ||
|
|
bb8109f67d | ||
|
|
e28fa26c43 | ||
|
|
639fc3793a | ||
|
|
2aaae5265a | ||
|
|
baa4c1fd25 | ||
|
|
479797361e | ||
|
|
0a9f1d2a27 | ||
|
|
5e103770fd | ||
|
|
e012a29b5e | ||
|
|
5d759f810c | ||
|
|
eb1f3a0ced | ||
|
|
29e8210782 | ||
|
|
45ca9405d3 | ||
|
|
e6f02bf8f7 | ||
|
|
57e75e3614 | ||
|
|
89ab67e067 | ||
|
|
e9d851e4d3 | ||
|
|
c675d0feee | ||
|
|
1859c0505e | ||
|
|
f15251096c | ||
|
|
115c599fd8 | ||
|
|
3121c08ee8 | ||
|
|
ef28b01286 | ||
|
|
a5bac39196 | ||
|
|
9f640b24b3 | ||
|
|
f48750c22c | ||
|
|
7a96e94491 | ||
|
|
22a32af750 | ||
|
|
dd423f2e7b | ||
|
|
12dec676db | ||
|
|
75e7556bfa | ||
|
|
504f1a8e97 | ||
|
|
e4a2af6ae7 | ||
|
|
fefa88fc2a | ||
|
|
ed8a7ee8a5 | ||
|
|
1771797453 | ||
|
|
46179f5c83 | ||
|
|
db6fc661a6 | ||
|
|
beb3a9f60a | ||
|
|
c088ab7a79 | ||
|
|
aab2b8fdbc | ||
|
|
b1e7a717af | ||
|
|
25e38bfc98 | ||
|
|
279c7324c4 | ||
|
|
1c90303914 | ||
|
|
6ab6502742 | ||
|
|
b79c029f21 | ||
|
|
020268fe67 | ||
|
|
176b1c9d20 | ||
|
|
5ab2efa0c0 | ||
|
|
88320488a7 | ||
|
|
2091abeea2 | ||
|
|
480f5c1a9a | ||
|
|
8e0db2705f | ||
|
|
1be9cdae67 | ||
|
|
e1a91a7e53 | ||
|
|
b952e3183f | ||
|
|
26ae0bf207 | ||
|
|
42cfd69463 | ||
|
|
7694b68e06 | ||
|
|
28e39c57bd | ||
|
|
2fa0a57d2b | ||
|
|
c9f3e1bdab | ||
|
|
2ba56b8c59 | ||
|
|
fb074c8c32 | ||
|
|
9fc082d1e6 | ||
|
|
dfda2f7d5d | ||
|
|
0c04802560 | ||
|
|
5146689158 | ||
|
|
e7fa94c3d3 | ||
|
|
db0695126f | ||
|
|
eec5cf6b65 | ||
|
|
a9569d0ed9 | ||
|
|
88d9388be2 | ||
|
|
93c72ecea5 | ||
|
|
b5b0ac50bd | ||
|
|
4d2afdb1a9 | ||
|
|
39a177bd70 | ||
|
|
34fb6ac837 | ||
|
|
f868a454d9 | ||
|
|
24c6cd235b | ||
|
|
47855dc78b | ||
|
|
751ceab04e | ||
|
|
dbbc42c5fd | ||
|
|
27416efb6d | ||
|
|
21dd08544b | ||
|
|
ae88f7d181 | ||
|
|
9981ee7601 | ||
|
|
66b018a355 | ||
|
|
b6c48d0f98 | ||
|
|
097d77f7b3 | ||
|
|
ed1bc6c215 | ||
|
|
c552fdfc0f | ||
|
|
4006dba9f1 | ||
|
|
7a0586684b | ||
|
|
8f34d1c555 | ||
|
|
571db5c0ee | ||
|
|
9059855f2b | ||
|
|
e423678995 | ||
|
|
ece5577f26 | ||
|
|
f373abdd14 | ||
|
|
4defec194f | ||
|
|
5270a6781f | ||
|
|
fa93e195cb | ||
|
|
72898d897c | ||
|
|
c6ee65b654 | ||
|
|
4d7694de24 | ||
|
|
a083f25b6c | ||
|
|
befa9eb16d | ||
|
|
a278c630bb | ||
|
|
6a8d8babce | ||
|
|
76eb0f1775 | ||
|
|
0abe08f243 | ||
|
|
f692ebbbb9 | ||
|
|
c174b65465 | ||
|
|
015131f198 | ||
|
|
a730543c76 | ||
|
|
c704626a39 | ||
|
|
7ef25a3816 | ||
|
|
b43ad93c54 | ||
|
|
7850681ce1 | ||
|
|
846189b15b | ||
|
|
46a893a8b6 | ||
|
|
657aac0d68 | ||
|
|
30885cee01 | ||
|
|
9237984782 | ||
|
|
c289629a28 | ||
|
|
806196f572 | ||
|
|
0e598660b4 | ||
|
|
058bfe0737 | ||
|
|
81932c8cff | ||
|
|
bd7adafee0 | ||
|
|
faf0c2b816 | ||
|
|
419d4986f6 | ||
|
|
9f1a9a7d9c | ||
|
|
a3e7e7c6c9 | ||
|
|
94a5075b6d | ||
|
|
7c32404b69 | ||
|
|
d0c2dc53fe | ||
|
|
0e8530172c | ||
|
|
4427aeac54 | ||
|
|
93640bb08e | ||
|
|
512ed71fc3 | ||
|
|
0cfc43c444 | ||
|
|
ecd0edc29e | ||
|
|
6168a006f4 | ||
|
|
82ba5dad1b | ||
|
|
972ee8e42e | ||
|
|
7cd3f285ad | ||
|
|
89e327383e | ||
|
|
290a15bbd9 | ||
|
|
1dd21f1f76 | ||
|
|
46b3f83ce2 | ||
|
|
5c153c9e21 | ||
|
|
bca75a3ea4 | ||
|
|
0bc6f972b2 | ||
|
|
36cc9cc1ec | ||
|
|
20f6a5e797 | ||
|
|
ccbb68aa0c | ||
|
|
08003c59b6 | ||
|
|
dafa638558 | ||
|
|
75e5250509 | ||
|
|
0ed6eb7029 | ||
|
|
63e26b6050 | ||
|
|
949f1c648a | ||
|
|
3e7578d670 | ||
|
|
6f07ec2597 | ||
|
|
e65c0a0d1d | ||
|
|
be217b5354 | ||
|
|
bfe3029d31 | ||
|
|
6abdc39fe5 | ||
|
|
bf55367f4d | ||
|
|
9480758310 | ||
|
|
25b33fb031 | ||
|
|
10ede0d21c | ||
|
|
698bdd619f | ||
|
|
5cef6874f6 | ||
|
|
6d42ae2629 | ||
|
|
a3b94816f9 | ||
|
|
e0b47feb8b | ||
|
|
8aecec0b9a | ||
|
|
078bf41029 | ||
|
|
2754302fb7 | ||
|
|
dfb7658c3e | ||
|
|
a743785faf | ||
|
|
e4782dee68 | ||
|
|
64315df85f | ||
|
|
2a1fd16849 | ||
|
|
21e31d540e | ||
|
|
370c38ec76 | ||
|
|
854044229c | ||
|
|
69baa44a3a | ||
|
|
419e3f7f2b | ||
|
|
a9373d9779 | ||
|
|
1a0536d212 | ||
|
|
099b77cf9b | ||
|
|
c3d17bf847 | ||
|
|
e04b93a51a | ||
|
|
b36b62c68e | ||
|
|
ab465a755e | ||
|
|
c6f19db1ec | ||
|
|
019142efc9 | ||
|
|
a535fc17c3 | ||
|
|
0fbb18b315 | ||
|
|
3eb0093d2a | ||
|
|
d159dde2ca | ||
|
|
729a510c5b | ||
|
|
196561fed2 | ||
|
|
8f0bdcd172 | ||
|
|
fffc7f4098 | ||
|
|
c7a2e7ada1 | ||
|
|
95611e9c4b | ||
|
|
62fc6afd8a | ||
|
|
0f5cec0a60 | ||
|
|
d235ebaac9 | ||
|
|
6def083b4f | ||
|
|
87322744d4 | ||
|
|
f2a02b392e | ||
|
|
e6cedc257e | ||
|
|
1b5cf2d272 | ||
|
|
f76e822381 | ||
|
|
a2b1968d6e | ||
|
|
398eb13a7f | ||
|
|
956c8a8e03 | ||
|
|
6aba166c82 | ||
|
|
fd7c7ea6b7 | ||
|
|
d85e621bb3 | ||
|
|
822dd5e100 | ||
|
|
25801f374c | ||
|
|
8fd2d0b35c | ||
|
|
c16d8a1da1 | ||
|
|
ab1fdf69c8 | ||
|
|
dd196c0e11 | ||
|
|
0e506f5716 | ||
|
|
0a98ccff0c | ||
|
|
0c188f6d10 | ||
|
|
8009dd691b | ||
|
|
13d0e9914b | ||
|
|
9da49be44d | ||
|
|
00f7fa507b | ||
|
|
2c255b6dfe | ||
|
|
6e2cf8bb3f | ||
|
|
68ed1c80ce | ||
|
|
e0d23f4436 | ||
|
|
509f8a5353 | ||
|
|
b0c0cd7fda | ||
|
|
133dfd5063 | ||
|
|
e6abf4e33b | ||
|
|
07104b18f5 | ||
|
|
f39b85abf2 | ||
|
|
c6c97491ac | ||
|
|
355452cdb3 | ||
|
|
da3720c7a9 | ||
|
|
e92d4ff147 | ||
|
|
bb514d6216 | ||
|
|
3f380fa0da | ||
|
|
5aefb707fa | ||
|
|
4afd3c2322 | ||
|
|
b8eb8a90a5 | ||
|
|
4d6cb091cc | ||
|
|
2c12af5af8 | ||
|
|
bd4d89fc21 | ||
|
|
9487529992 | ||
|
|
fa347fd49d | ||
|
|
8f7072d7e9 | ||
|
|
412c5d68cc | ||
|
|
e06b068033 | ||
|
|
6234391229 | ||
|
|
2568bfde5e | ||
|
|
fd7c2fbe93 | ||
|
|
206c185a3b | ||
|
|
7689cbbe0d | ||
|
|
b57a9351b3 | ||
|
|
f0ae9e21ae | ||
|
|
9510c92288 | ||
|
|
755f3f05d8 | ||
|
|
5d8114b475 | ||
|
|
0ccbb52c1f | ||
|
|
85b39ecf99 | ||
|
|
230838c22b | ||
|
|
a7bfcdcb01 | ||
|
|
47ff630c55 | ||
|
|
70dc53bda7 | ||
|
|
0b8a142de0 | ||
|
|
7e1b433c17 | ||
|
|
800b0763e4 | ||
|
|
30aabe255b | ||
|
|
9b14d714ca | ||
|
|
8a38666105 | ||
|
|
ec878defab | ||
|
|
1786b70e14 | ||
|
|
7f525fa7dc | ||
|
|
e08d93b2aa | ||
|
|
3a5ee4a296 | ||
|
|
7b8a0114f5 | ||
|
|
003d110948 | ||
|
|
8b89e03999 | ||
|
|
9eff920989 | ||
|
|
711c82472c | ||
|
|
156bf02d21 | ||
|
|
932b53d92d | ||
|
|
2693b9a42d | ||
|
|
e9166c4a7d | ||
|
|
2bc64920dd | ||
|
|
aee5500833 | ||
|
|
6b336b7b2f | ||
|
|
f07992c091 | ||
|
|
3c0e77241d | ||
|
|
87461c7f72 | ||
|
|
a67f2b4976 | ||
|
|
8594781780 | ||
|
|
313e415ee9 | ||
|
|
c13d8f3699 | ||
|
|
e41f8f1d0f | ||
|
|
b2c8907635 | ||
|
|
05f4df1a30 | ||
|
|
35fe06a892 | ||
|
|
75ff541aec | ||
|
|
cd933ce6e4 | ||
|
|
0b93988450 | ||
|
|
056cab23e0 | ||
|
|
6bc8027644 | ||
|
|
3b9298ed2b | ||
|
|
12a323f691 | ||
|
|
9c4c211233 | ||
|
|
74ba68ff2c | ||
|
|
7273b37c16 | ||
|
|
0d4ebffc0e | ||
|
|
352b2fb4e7 | ||
|
|
6e6ef57303 | ||
|
|
cc1f14e5e9 | ||
|
|
1c419d5c65 | ||
|
|
71b83245b4 | ||
|
|
2b88555028 | ||
|
|
f021ad9b0a | ||
|
|
8884f64b4e | ||
|
|
dd790dceb5 | ||
|
|
b80e41503f | ||
|
|
8dfc5052e9 | ||
|
|
7f28fc17ca | ||
|
|
2c308ccd35 | ||
|
|
4d6dd44e10 | ||
|
|
b6992e32a5 | ||
|
|
231859303d | ||
|
|
1acdd67fd9 | ||
|
|
bec63a9471 | ||
|
|
44e856e8dc | ||
|
|
3bab7678b7 | ||
|
|
61f68d9e1b | ||
|
|
94f1562ec5 | ||
|
|
46412acd13 | ||
|
|
e7426ea365 | ||
|
|
665eef68b9 | ||
|
|
7c63d4012f | ||
|
|
92be4e774e | ||
|
|
2395502e60 | ||
|
|
9f3902b48d | ||
|
|
6e76bcb77e | ||
|
|
e05a95dc2d | ||
|
|
86d61d698a | ||
|
|
8ce6535a7e | ||
|
|
65ca038eee | ||
|
|
f41f5ebebd | ||
|
|
9cf62f03fa | ||
|
|
f770d5072e | ||
|
|
5698b830ed | ||
|
|
bcc76dd60a | ||
|
|
70d4a0c022 | ||
|
|
8cfd994170 | ||
|
|
22d8d08355 | ||
|
|
641e829e3f | ||
|
|
f9edff8bf4 | ||
|
|
33e6be1ca6 | ||
|
|
e25c50a467 | ||
|
|
f8441ab42e | ||
|
|
4589d4b3f5 | ||
|
|
9cf720e040 | ||
|
|
cf793f7f49 | ||
|
|
2b3fddfe89 | ||
|
|
e148f143ea | ||
|
|
d202cb731d | ||
|
|
299d9998ad | ||
|
|
fba1484e2e | ||
|
|
4ab7300376 | ||
|
|
18cc5e0ee8 | ||
|
|
af0cda5dbf | ||
|
|
a730a3719b | ||
|
|
3b669193f6 | ||
|
|
c782bab296 | ||
|
|
b14646ebd9 | ||
|
|
7441de5fd9 | ||
|
|
f5360cb8d4 | ||
|
|
22cd2e3337 | ||
|
|
7e9d453a2c | ||
|
|
a4338b0d03 | ||
|
|
a35baca580 | ||
|
|
66b0108c51 | ||
|
|
2021431e2f | ||
|
|
ab836c6922 | ||
|
|
405b3be496 | ||
|
|
4a27128a1c | ||
|
|
c74bdc97ca | ||
|
|
ddd5e4c76d | ||
|
|
5e6a7e134f | ||
|
|
41bc519855 | ||
|
|
53d82618d9 | ||
|
|
57f548c6c0 | ||
|
|
8d83f64aba | ||
|
|
9162697117 | ||
|
|
47b19e3211 | ||
|
|
590f6d4c19 | ||
|
|
53108e816f | ||
|
|
3ac71e2f7f | ||
|
|
f4fadd366e | ||
|
|
cc38dab76f | ||
|
|
c8be701f0e | ||
|
|
417befb2be | ||
|
|
a0ce7f38e7 | ||
|
|
962e3d8e56 | ||
|
|
3a3df96996 | ||
|
|
2ffa632796 | ||
|
|
3c6c0b253d | ||
|
|
5f40fd6038 | ||
|
|
8e2dc8b3ee | ||
|
|
a02b531e47 | ||
|
|
a4cb2708cc | ||
|
|
973284607d | ||
|
|
28fd2f0314 | ||
|
|
9715873007 | ||
|
|
18a20407f6 | ||
|
|
1a396cfc7b | ||
|
|
e604c914d1 | ||
|
|
a310c160a5 | ||
|
|
45d50b12fd | ||
|
|
e87182264a | ||
|
|
a089d544a5 | ||
|
|
b6fe0be1b2 | ||
|
|
ba325b1581 | ||
|
|
1f47abf195 | ||
|
|
750f35bc36 | ||
|
|
c99d9d95c5 | ||
|
|
4d402b2600 | ||
|
|
64fb002168 | ||
|
|
1308b5bcf3 | ||
|
|
dc3dc4a1f0 | ||
|
|
99bb55af73 | ||
|
|
4a285225db | ||
|
|
d986bd2a6c | ||
|
|
8665342edf | ||
|
|
2e7c3bf789 | ||
|
|
31ea0fe3fe | ||
|
|
e0c9f8a5aa | ||
|
|
a17ec4221b | ||
|
|
328beaba35 | ||
|
|
efbbaa5741 | ||
|
|
14be2fa344 | ||
|
|
f3ccad192c | ||
|
|
8410929e86 | ||
|
|
093a5d4ddf | ||
|
|
88028412bd | ||
|
|
11c93231aa | ||
|
|
5366b4c873 | ||
|
|
171e0ed312 | ||
|
|
a5b1b4e103 | ||
|
|
f50ddb436f | ||
|
|
0b4b091580 | ||
|
|
2f6d7ac128 | ||
|
|
6b990e1cee | ||
|
|
ddeed65994 | ||
|
|
d87748fda1 | ||
|
|
50f0ead113 | ||
|
|
4e3075aaba | ||
|
|
87d6684ca7 | ||
|
|
3bd7596873 | ||
|
|
39964bf077 | ||
|
|
089199e7c2 | ||
|
|
7b41b295b7 | ||
|
|
d7bc7a2d38 | ||
|
|
eae75c13bb | ||
|
|
fab13db4b4 | ||
|
|
69d5f521a5 | ||
|
|
c0a55142b5 | ||
|
|
513fb3428a | ||
|
|
9a0ae549f6 | ||
|
|
4410d7f195 | ||
|
|
92aa70182d | ||
|
|
90f5864f1e | ||
|
|
d44de670cd | ||
|
|
cb63025078 | ||
|
|
685e865b42 | ||
|
|
e47f126bd5 | ||
|
|
ea6f70e3c5 | ||
|
|
0469aab433 | ||
|
|
ad13b5eb4e | ||
|
|
7324a4973f | ||
|
|
8bc93d23b2 | ||
|
|
c708b685e1 | ||
|
|
65009e2f69 | ||
|
|
cbde91744f | ||
|
|
4c8a92bb0c | ||
|
|
11a2e96d06 | ||
|
|
095c5e4f95 | ||
|
|
069db28fb6 | ||
|
|
2e747d3ece | ||
|
|
147e24204b | ||
|
|
6580153f29 | ||
|
|
13c50e428f | ||
|
|
8403ccd3da | ||
|
|
c988bca958 | ||
|
|
e92bd61545 | ||
|
|
e84e8edb29 | ||
|
|
8215e0221a | ||
|
|
a4ef7205ca | ||
|
|
4b44d6fb83 | ||
|
|
ba8df96e41 | ||
|
|
0e2fc07881 | ||
|
|
0ae3e83ce4 | ||
|
|
f4b573379d | ||
|
|
862ca375ee | ||
|
|
5c578c0328 | ||
|
|
530de6741b | ||
|
|
5f7ff460fb | ||
|
|
3b3e1e37b9 | ||
|
|
5f40d9400c | ||
|
|
fcdc642acb | ||
|
|
46f594ab71 | ||
|
|
e8684cbb9d | ||
|
|
a36ab71600 | ||
|
|
35c1ff9014 | ||
|
|
e4ce05f94d | ||
|
|
38a624fecf | ||
|
|
fd96859883 | ||
|
|
94d22ed1aa | ||
|
|
3f4caed922 | ||
|
|
09303ab2fb | ||
|
|
df1ac8e1e2 | ||
|
|
7a55c91349 | ||
|
|
c491dfdd3a | ||
|
|
b5da076e2c | ||
|
|
18cd6c81a3 | ||
|
|
d9cc21f761 | ||
|
|
40b19c5e67 | ||
|
|
06207145af | ||
|
|
b195e3435f | ||
|
|
34b4577c0b | ||
|
|
8034e5bbcb | ||
|
|
df7a30bd14 | ||
|
|
d9dfacaaf4 | ||
|
|
d43767b945 | ||
|
|
cb36754c46 | ||
|
|
7e18aafe20 | ||
|
|
7a31d09356 | ||
|
|
f7b079b1b4 | ||
|
|
72ffedead7 | ||
|
|
cf3a501562 | ||
|
|
7becdc3034 | ||
|
|
f0d599781d | ||
|
|
3386105048 | ||
|
|
3b8fb70db1 | ||
|
|
c3ae146580 | ||
|
|
0d079f0d89 | ||
|
|
9f5a90ee9c | ||
|
|
a5307fd8cc | ||
|
|
180589144a | ||
|
|
d9c1867bd7 | ||
|
|
da37d649ec | ||
|
|
4204b4af90 | ||
|
|
941650f668 | ||
|
|
9c0c6c1bd6 | ||
|
|
bd0ddafcd0 | ||
|
|
19f5e92a74 | ||
|
|
3202c38061 | ||
|
|
e35a8c942b | ||
|
|
31811eb91e | ||
|
|
b9316a4112 | ||
|
|
b7abd878ac | ||
|
|
38c2c47789 | ||
|
|
c03778ec8b | ||
|
|
29b0850a94 | ||
|
|
712fde46eb | ||
|
|
c2e79ca5a7 | ||
|
|
c3a52b3989 | ||
|
|
7213d82f1b | ||
|
|
5bcad69cf7 | ||
|
|
c9a487fa4d | ||
|
|
3804a46f3b | ||
|
|
52c0bb5302 | ||
|
|
8aa19e6420 | ||
|
|
4d1c7a3884 | ||
|
|
25f2c057b7 | ||
|
|
010be05920 | ||
|
|
4c465850a2 | ||
|
|
8313dfaeb9 | ||
|
|
873f2b2814 | ||
|
|
e53c90f8f0 | ||
|
|
9499ea8ca9 | ||
|
|
f6c09109ba | ||
|
|
273b5768c4 | ||
|
|
ee13cf7dd9 | ||
|
|
fecbae761e | ||
|
|
e0ee89bdd9 | ||
|
|
833c1f22a3 | ||
|
|
6fed6c8d30 | ||
|
|
94cdaf5314 | ||
|
|
f83ae27352 | ||
|
|
6badf047c3 | ||
|
|
47de9ad15f | ||
|
|
09b91cc663 | ||
|
|
ded16549f7 | ||
|
|
c89e47577b | ||
|
|
bb50beb7ab | ||
|
|
e4cd4d64d7 | ||
|
|
5675fc51a0 | ||
|
|
c7438c4aff | ||
|
|
4a6a3da36c | ||
|
|
a657c332b1 | ||
|
|
cc9cd3fc14 | ||
|
|
234258a077 | ||
|
|
13cda80ee6 | ||
|
|
f6e142baf5 | ||
|
|
ddf1f9bcd5 | ||
|
|
aa950669f6 | ||
|
|
dacd5d3e6b | ||
|
|
e76ccba2f7 | ||
|
|
3933819d53 | ||
|
|
99019c2b1f | ||
|
|
4bf5eb398b | ||
|
|
dbfbac62c0 | ||
|
|
7685293da4 | ||
|
|
ee9c328606 | ||
|
|
cb7790ccba | ||
|
|
6556fcc531 | ||
|
|
178391e7b2 | ||
|
|
18922a1c6d | ||
|
|
5e9e26fa67 | ||
|
|
f5430f9151 | ||
|
|
4dfdf2f92f | ||
|
|
e4d283cc99 | ||
|
|
8ee64d22b3 | ||
|
|
10e3e80042 | ||
|
|
f77a208e2c | ||
|
|
9366dbb96e | ||
|
|
550b17552b | ||
|
|
bec307d0e9 | ||
|
|
93c751f6eb |
5
.claude/settings.json
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"attribution": {
|
||||||
|
"commit": ""
|
||||||
|
}
|
||||||
|
}
|
||||||
81
.github/actions/setup-build/action.yml
vendored
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
name: Setup Build Environment
|
||||||
|
description: Common build environment setup steps
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
nodejs-version:
|
||||||
|
description: Node.js version
|
||||||
|
required: true
|
||||||
|
setup-python:
|
||||||
|
description: Set up Python
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
|
setup-docker:
|
||||||
|
description: Set up Docker QEMU and Buildx
|
||||||
|
required: false
|
||||||
|
default: "true"
|
||||||
|
setup-sccache:
|
||||||
|
description: Configure sccache for GitHub Actions
|
||||||
|
required: false
|
||||||
|
default: "true"
|
||||||
|
free-space:
|
||||||
|
description: Remove unnecessary packages to free disk space
|
||||||
|
required: false
|
||||||
|
default: "true"
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: composite
|
||||||
|
steps:
|
||||||
|
- name: Free disk space
|
||||||
|
if: inputs.free-space == 'true'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
sudo apt-get remove --purge -y azure-cli || true
|
||||||
|
sudo apt-get remove --purge -y firefox || true
|
||||||
|
sudo apt-get remove --purge -y ghc-* || true
|
||||||
|
sudo apt-get remove --purge -y google-cloud-sdk || true
|
||||||
|
sudo apt-get remove --purge -y google-chrome-stable || true
|
||||||
|
sudo apt-get remove --purge -y powershell || true
|
||||||
|
sudo apt-get remove --purge -y php* || true
|
||||||
|
sudo apt-get remove --purge -y ruby* || true
|
||||||
|
sudo apt-get remove --purge -y mono-* || true
|
||||||
|
sudo apt-get autoremove -y
|
||||||
|
sudo apt-get clean
|
||||||
|
sudo rm -rf /usr/lib/jvm
|
||||||
|
sudo rm -rf /usr/local/.ghcup
|
||||||
|
sudo rm -rf /usr/local/lib/android
|
||||||
|
sudo rm -rf /usr/share/dotnet
|
||||||
|
sudo rm -rf /usr/share/swift
|
||||||
|
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||||
|
|
||||||
|
# BuildJet runners lack /opt/hostedtoolcache, which setup-python and setup-qemu expect
|
||||||
|
- name: Ensure hostedtoolcache exists
|
||||||
|
shell: bash
|
||||||
|
run: sudo mkdir -p /opt/hostedtoolcache && sudo chown $USER:$USER /opt/hostedtoolcache
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
if: inputs.setup-python == 'true'
|
||||||
|
uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.x"
|
||||||
|
|
||||||
|
- uses: actions/setup-node@v4
|
||||||
|
with:
|
||||||
|
node-version: ${{ inputs.nodejs-version }}
|
||||||
|
cache: npm
|
||||||
|
cache-dependency-path: "**/package-lock.json"
|
||||||
|
|
||||||
|
- name: Set up Docker QEMU
|
||||||
|
if: inputs.setup-docker == 'true'
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
if: inputs.setup-docker == 'true'
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: Configure sccache
|
||||||
|
if: inputs.setup-sccache == 'true'
|
||||||
|
uses: actions/github-script@v7
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
|
||||||
|
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
|
||||||
88
.github/workflows/start-cli.yaml
vendored
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
name: start-cli
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
environment:
|
||||||
|
type: choice
|
||||||
|
description: Environment
|
||||||
|
options:
|
||||||
|
- NONE
|
||||||
|
- dev
|
||||||
|
- unstable
|
||||||
|
- dev-unstable
|
||||||
|
runner:
|
||||||
|
type: choice
|
||||||
|
description: Runner
|
||||||
|
options:
|
||||||
|
- standard
|
||||||
|
- fast
|
||||||
|
arch:
|
||||||
|
type: choice
|
||||||
|
description: Architecture
|
||||||
|
options:
|
||||||
|
- ALL
|
||||||
|
- x86_64
|
||||||
|
- x86_64-apple
|
||||||
|
- aarch64
|
||||||
|
- aarch64-apple
|
||||||
|
- riscv64
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
- next/*
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
- next/*
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
NODEJS_VERSION: "24.11.0"
|
||||||
|
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
compile:
|
||||||
|
name: Build Debian Package
|
||||||
|
if: github.event.pull_request.draft != true
|
||||||
|
strategy:
|
||||||
|
fail-fast: true
|
||||||
|
matrix:
|
||||||
|
triple: >-
|
||||||
|
${{
|
||||||
|
fromJson('{
|
||||||
|
"x86_64": ["x86_64-unknown-linux-musl"],
|
||||||
|
"x86_64-apple": ["x86_64-apple-darwin"],
|
||||||
|
"aarch64": ["aarch64-unknown-linux-musl"],
|
||||||
|
"x86_64-apple": ["aarch64-apple-darwin"],
|
||||||
|
"riscv64": ["riscv64gc-unknown-linux-musl"],
|
||||||
|
"ALL": ["x86_64-unknown-linux-musl", "x86_64-apple-darwin", "aarch64-unknown-linux-musl", "aarch64-apple-darwin", "riscv64gc-unknown-linux-musl"]
|
||||||
|
}')[github.event.inputs.platform || 'ALL']
|
||||||
|
}}
|
||||||
|
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
|
||||||
|
steps:
|
||||||
|
- name: Mount tmpfs
|
||||||
|
if: ${{ github.event.inputs.runner == 'fast' }}
|
||||||
|
run: sudo mount -t tmpfs tmpfs .
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: recursive
|
||||||
|
- uses: ./.github/actions/setup-build
|
||||||
|
with:
|
||||||
|
nodejs-version: ${{ env.NODEJS_VERSION }}
|
||||||
|
|
||||||
|
- name: Make
|
||||||
|
run: TARGET=${{ matrix.triple }} make cli
|
||||||
|
env:
|
||||||
|
PLATFORM: ${{ matrix.arch }}
|
||||||
|
SCCACHE_GHA_ENABLED: on
|
||||||
|
SCCACHE_GHA_VERSION: 0
|
||||||
|
|
||||||
|
- uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: start-cli_${{ matrix.triple }}
|
||||||
|
path: core/target/${{ matrix.triple }}/release/start-cli
|
||||||
173
.github/workflows/start-registry.yaml
vendored
Normal file
@@ -0,0 +1,173 @@
|
|||||||
|
name: start-registry
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
environment:
|
||||||
|
type: choice
|
||||||
|
description: Environment
|
||||||
|
options:
|
||||||
|
- NONE
|
||||||
|
- dev
|
||||||
|
- unstable
|
||||||
|
- dev-unstable
|
||||||
|
runner:
|
||||||
|
type: choice
|
||||||
|
description: Runner
|
||||||
|
options:
|
||||||
|
- standard
|
||||||
|
- fast
|
||||||
|
arch:
|
||||||
|
type: choice
|
||||||
|
description: Architecture
|
||||||
|
options:
|
||||||
|
- ALL
|
||||||
|
- x86_64
|
||||||
|
- aarch64
|
||||||
|
- riscv64
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
- next/*
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
- next/*
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
NODEJS_VERSION: "24.11.0"
|
||||||
|
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
compile:
|
||||||
|
name: Build Debian Package
|
||||||
|
if: github.event.pull_request.draft != true
|
||||||
|
strategy:
|
||||||
|
fail-fast: true
|
||||||
|
matrix:
|
||||||
|
arch: >-
|
||||||
|
${{
|
||||||
|
fromJson('{
|
||||||
|
"x86_64": ["x86_64"],
|
||||||
|
"aarch64": ["aarch64"],
|
||||||
|
"riscv64": ["riscv64"],
|
||||||
|
"ALL": ["x86_64", "aarch64", "riscv64"]
|
||||||
|
}')[github.event.inputs.platform || 'ALL']
|
||||||
|
}}
|
||||||
|
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
|
||||||
|
steps:
|
||||||
|
- name: Mount tmpfs
|
||||||
|
if: ${{ github.event.inputs.runner == 'fast' }}
|
||||||
|
run: sudo mount -t tmpfs tmpfs .
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: recursive
|
||||||
|
- uses: ./.github/actions/setup-build
|
||||||
|
with:
|
||||||
|
nodejs-version: ${{ env.NODEJS_VERSION }}
|
||||||
|
|
||||||
|
- name: Make
|
||||||
|
run: make registry-deb
|
||||||
|
env:
|
||||||
|
PLATFORM: ${{ matrix.arch }}
|
||||||
|
SCCACHE_GHA_ENABLED: on
|
||||||
|
SCCACHE_GHA_VERSION: 0
|
||||||
|
|
||||||
|
- uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: start-registry_${{ matrix.arch }}.deb
|
||||||
|
path: results/start-registry-*_${{ matrix.arch }}.deb
|
||||||
|
|
||||||
|
create-image:
|
||||||
|
name: Create Docker Image
|
||||||
|
needs: [compile]
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
packages: write
|
||||||
|
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
|
||||||
|
steps:
|
||||||
|
- name: Cleaning up unnecessary files
|
||||||
|
run: |
|
||||||
|
sudo apt-get remove --purge -y google-chrome-stable firefox mono-devel
|
||||||
|
sudo apt-get autoremove -y
|
||||||
|
sudo apt-get clean
|
||||||
|
|
||||||
|
- run: |
|
||||||
|
sudo mount -t tmpfs tmpfs .
|
||||||
|
if: ${{ github.event.inputs.runner == 'fast' }}
|
||||||
|
|
||||||
|
- name: Set up docker QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
|
||||||
|
- name: "Login to GitHub Container Registry"
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{github.actor}}
|
||||||
|
password: ${{secrets.GITHUB_TOKEN}}
|
||||||
|
|
||||||
|
- name: Docker meta
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ghcr.io/Start9Labs/startos-registry
|
||||||
|
tags: |
|
||||||
|
type=raw,value=${{ github.ref_name }}
|
||||||
|
|
||||||
|
- name: Download debian package
|
||||||
|
uses: actions/download-artifact@v4
|
||||||
|
with:
|
||||||
|
pattern: start-registry_*.deb
|
||||||
|
|
||||||
|
- name: Map matrix.arch to docker platform
|
||||||
|
run: |
|
||||||
|
platforms=""
|
||||||
|
for deb in *.deb; do
|
||||||
|
filename=$(basename "$deb" .deb)
|
||||||
|
arch="${filename#*_}"
|
||||||
|
case "$arch" in
|
||||||
|
x86_64)
|
||||||
|
platform="linux/amd64"
|
||||||
|
;;
|
||||||
|
aarch64)
|
||||||
|
platform="linux/arm64"
|
||||||
|
;;
|
||||||
|
riscv64)
|
||||||
|
platform="linux/riscv64"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Unknown architecture: $arch" >&2
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
if [ -z "$platforms" ]; then
|
||||||
|
platforms="$platform"
|
||||||
|
else
|
||||||
|
platforms="$platforms,$platform"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
echo "DOCKER_PLATFORM=$platforms" >> "$GITHUB_ENV"
|
||||||
|
|
||||||
|
- run: |
|
||||||
|
cat | docker buildx build --platform "$DOCKER_PLATFORM" --push -t ${{ steps.meta.outputs.tags }} -f - . << 'EOF'
|
||||||
|
FROM debian:trixie
|
||||||
|
|
||||||
|
ADD *.deb .
|
||||||
|
|
||||||
|
RUN apt-get install -y ./*_$(uname -m).deb && rm *.deb
|
||||||
|
|
||||||
|
VOLUME /var/lib/startos
|
||||||
|
|
||||||
|
ENV RUST_LOG=startos=debug
|
||||||
|
|
||||||
|
ENTRYPOINT ["start-registryd"]
|
||||||
|
|
||||||
|
EOF
|
||||||
84
.github/workflows/start-tunnel.yaml
vendored
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
name: start-tunnel
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
environment:
|
||||||
|
type: choice
|
||||||
|
description: Environment
|
||||||
|
options:
|
||||||
|
- NONE
|
||||||
|
- dev
|
||||||
|
- unstable
|
||||||
|
- dev-unstable
|
||||||
|
runner:
|
||||||
|
type: choice
|
||||||
|
description: Runner
|
||||||
|
options:
|
||||||
|
- standard
|
||||||
|
- fast
|
||||||
|
arch:
|
||||||
|
type: choice
|
||||||
|
description: Architecture
|
||||||
|
options:
|
||||||
|
- ALL
|
||||||
|
- x86_64
|
||||||
|
- aarch64
|
||||||
|
- riscv64
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
- next/*
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
- next/*
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
NODEJS_VERSION: "24.11.0"
|
||||||
|
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
compile:
|
||||||
|
name: Build Debian Package
|
||||||
|
if: github.event.pull_request.draft != true
|
||||||
|
strategy:
|
||||||
|
fail-fast: true
|
||||||
|
matrix:
|
||||||
|
arch: >-
|
||||||
|
${{
|
||||||
|
fromJson('{
|
||||||
|
"x86_64": ["x86_64"],
|
||||||
|
"aarch64": ["aarch64"],
|
||||||
|
"riscv64": ["riscv64"],
|
||||||
|
"ALL": ["x86_64", "aarch64", "riscv64"]
|
||||||
|
}')[github.event.inputs.platform || 'ALL']
|
||||||
|
}}
|
||||||
|
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
|
||||||
|
steps:
|
||||||
|
- name: Mount tmpfs
|
||||||
|
if: ${{ github.event.inputs.runner == 'fast' }}
|
||||||
|
run: sudo mount -t tmpfs tmpfs .
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
submodules: recursive
|
||||||
|
- uses: ./.github/actions/setup-build
|
||||||
|
with:
|
||||||
|
nodejs-version: ${{ env.NODEJS_VERSION }}
|
||||||
|
|
||||||
|
- name: Make
|
||||||
|
run: make tunnel-deb
|
||||||
|
env:
|
||||||
|
PLATFORM: ${{ matrix.arch }}
|
||||||
|
SCCACHE_GHA_ENABLED: on
|
||||||
|
SCCACHE_GHA_VERSION: 0
|
||||||
|
|
||||||
|
- uses: actions/upload-artifact@v4
|
||||||
|
with:
|
||||||
|
name: start-tunnel_${{ matrix.arch }}.deb
|
||||||
|
path: results/start-tunnel-*_${{ matrix.arch }}.deb
|
||||||
183
.github/workflows/startos-iso.yaml
vendored
@@ -12,9 +12,6 @@ on:
|
|||||||
- dev
|
- dev
|
||||||
- unstable
|
- unstable
|
||||||
- dev-unstable
|
- dev-unstable
|
||||||
- docker
|
|
||||||
- dev-docker
|
|
||||||
- dev-unstable-docker
|
|
||||||
runner:
|
runner:
|
||||||
type: choice
|
type: choice
|
||||||
description: Runner
|
description: Runner
|
||||||
@@ -30,7 +27,8 @@ on:
|
|||||||
- x86_64-nonfree
|
- x86_64-nonfree
|
||||||
- aarch64
|
- aarch64
|
||||||
- aarch64-nonfree
|
- aarch64-nonfree
|
||||||
- raspberrypi
|
# - raspberrypi
|
||||||
|
- riscv64
|
||||||
deploy:
|
deploy:
|
||||||
type: choice
|
type: choice
|
||||||
description: Deploy
|
description: Deploy
|
||||||
@@ -47,13 +45,18 @@ on:
|
|||||||
- master
|
- master
|
||||||
- next/*
|
- next/*
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODEJS_VERSION: "18.15.0"
|
NODEJS_VERSION: "24.11.0"
|
||||||
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
|
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
compile:
|
compile:
|
||||||
name: Compile Base Binaries
|
name: Compile Base Binaries
|
||||||
|
if: github.event.pull_request.draft != true
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
@@ -65,33 +68,47 @@ jobs:
|
|||||||
"aarch64": ["aarch64"],
|
"aarch64": ["aarch64"],
|
||||||
"aarch64-nonfree": ["aarch64"],
|
"aarch64-nonfree": ["aarch64"],
|
||||||
"raspberrypi": ["aarch64"],
|
"raspberrypi": ["aarch64"],
|
||||||
"ALL": ["x86_64", "aarch64"]
|
"riscv64": ["riscv64"],
|
||||||
|
"ALL": ["x86_64", "aarch64", "riscv64"]
|
||||||
}')[github.event.inputs.platform || 'ALL']
|
}')[github.event.inputs.platform || 'ALL']
|
||||||
}}
|
}}
|
||||||
runs-on: ${{ fromJson('["ubuntu-22.04", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
|
runs-on: >-
|
||||||
|
${{
|
||||||
|
fromJson(
|
||||||
|
format(
|
||||||
|
'["{0}", "{1}"]',
|
||||||
|
fromJson('{
|
||||||
|
"x86_64": "ubuntu-latest",
|
||||||
|
"aarch64": "ubuntu-24.04-arm",
|
||||||
|
"riscv64": "ubuntu-latest"
|
||||||
|
}')[matrix.arch],
|
||||||
|
fromJson('{
|
||||||
|
"x86_64": "buildjet-32vcpu-ubuntu-2204",
|
||||||
|
"aarch64": "buildjet-32vcpu-ubuntu-2204-arm",
|
||||||
|
"riscv64": "buildjet-32vcpu-ubuntu-2204"
|
||||||
|
}')[matrix.arch]
|
||||||
|
)
|
||||||
|
)[github.event.inputs.runner == 'fast']
|
||||||
|
}}
|
||||||
steps:
|
steps:
|
||||||
- run: |
|
- name: Mount tmpfs
|
||||||
sudo mount -t tmpfs tmpfs .
|
|
||||||
if: ${{ github.event.inputs.runner == 'fast' }}
|
if: ${{ github.event.inputs.runner == 'fast' }}
|
||||||
|
run: sudo mount -t tmpfs tmpfs .
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
- uses: ./.github/actions/setup-build
|
||||||
- uses: actions/setup-node@v3
|
|
||||||
with:
|
with:
|
||||||
node-version: ${{ env.NODEJS_VERSION }}
|
nodejs-version: ${{ env.NODEJS_VERSION }}
|
||||||
|
setup-python: "true"
|
||||||
- name: Set up QEMU
|
|
||||||
uses: docker/setup-qemu-action@v2
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v2
|
|
||||||
|
|
||||||
- name: Make
|
- name: Make
|
||||||
run: make ARCH=${{ matrix.arch }} compiled-${{ matrix.arch }}.tar
|
run: make ARCH=${{ matrix.arch }} compiled-${{ matrix.arch }}.tar
|
||||||
|
env:
|
||||||
|
SCCACHE_GHA_ENABLED: on
|
||||||
|
SCCACHE_GHA_VERSION: 0
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: compiled-${{ matrix.arch }}.tar
|
name: compiled-${{ matrix.arch }}.tar
|
||||||
path: compiled-${{ matrix.arch }}.tar
|
path: compiled-${{ matrix.arch }}.tar
|
||||||
@@ -101,13 +118,14 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
|
# TODO: re-add "raspberrypi" to the platform list below
|
||||||
platform: >-
|
platform: >-
|
||||||
${{
|
${{
|
||||||
fromJson(
|
fromJson(
|
||||||
format(
|
format(
|
||||||
'[
|
'[
|
||||||
["{0}"],
|
["{0}"],
|
||||||
["x86_64", "x86_64-nonfree", "aarch64", "aarch64-nonfree", "raspberrypi"]
|
["x86_64", "x86_64-nonfree", "aarch64", "aarch64-nonfree", "riscv64"]
|
||||||
]',
|
]',
|
||||||
github.event.inputs.platform || 'ALL'
|
github.event.inputs.platform || 'ALL'
|
||||||
)
|
)
|
||||||
@@ -117,13 +135,22 @@ jobs:
|
|||||||
${{
|
${{
|
||||||
fromJson(
|
fromJson(
|
||||||
format(
|
format(
|
||||||
'["ubuntu-22.04", "{0}"]',
|
'["{0}", "{1}"]',
|
||||||
|
fromJson('{
|
||||||
|
"x86_64": "ubuntu-latest",
|
||||||
|
"x86_64-nonfree": "ubuntu-latest",
|
||||||
|
"aarch64": "ubuntu-24.04-arm",
|
||||||
|
"aarch64-nonfree": "ubuntu-24.04-arm",
|
||||||
|
"raspberrypi": "ubuntu-24.04-arm",
|
||||||
|
"riscv64": "ubuntu-24.04-arm",
|
||||||
|
}')[matrix.platform],
|
||||||
fromJson('{
|
fromJson('{
|
||||||
"x86_64": "buildjet-8vcpu-ubuntu-2204",
|
"x86_64": "buildjet-8vcpu-ubuntu-2204",
|
||||||
"x86_64-nonfree": "buildjet-8vcpu-ubuntu-2204",
|
"x86_64-nonfree": "buildjet-8vcpu-ubuntu-2204",
|
||||||
"aarch64": "buildjet-8vcpu-ubuntu-2204-arm",
|
"aarch64": "buildjet-8vcpu-ubuntu-2204-arm",
|
||||||
"aarch64-nonfree": "buildjet-8vcpu-ubuntu-2204-arm",
|
"aarch64-nonfree": "buildjet-8vcpu-ubuntu-2204-arm",
|
||||||
"raspberrypi": "buildjet-8vcpu-ubuntu-2204-arm",
|
"raspberrypi": "buildjet-8vcpu-ubuntu-2204-arm",
|
||||||
|
"riscv64": "buildjet-8vcpu-ubuntu-2204",
|
||||||
}')[matrix.platform]
|
}')[matrix.platform]
|
||||||
)
|
)
|
||||||
)[github.event.inputs.runner == 'fast']
|
)[github.event.inputs.runner == 'fast']
|
||||||
@@ -137,32 +164,44 @@ jobs:
|
|||||||
"aarch64": "aarch64",
|
"aarch64": "aarch64",
|
||||||
"aarch64-nonfree": "aarch64",
|
"aarch64-nonfree": "aarch64",
|
||||||
"raspberrypi": "aarch64",
|
"raspberrypi": "aarch64",
|
||||||
|
"riscv64": "riscv64",
|
||||||
}')[matrix.platform]
|
}')[matrix.platform]
|
||||||
}}
|
}}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- name: Free space
|
||||||
|
run: |
|
||||||
|
sudo apt-get remove --purge -y azure-cli || true
|
||||||
|
sudo apt-get remove --purge -y firefox || true
|
||||||
|
sudo apt-get remove --purge -y ghc-* || true
|
||||||
|
sudo apt-get remove --purge -y google-cloud-sdk || true
|
||||||
|
sudo apt-get remove --purge -y google-chrome-stable || true
|
||||||
|
sudo apt-get remove --purge -y powershell || true
|
||||||
|
sudo apt-get remove --purge -y php* || true
|
||||||
|
sudo apt-get remove --purge -y ruby* || true
|
||||||
|
sudo apt-get remove --purge -y mono-* || true
|
||||||
|
sudo apt-get autoremove -y
|
||||||
|
sudo apt-get clean
|
||||||
|
sudo rm -rf /usr/lib/jvm # All JDKs
|
||||||
|
sudo rm -rf /usr/local/.ghcup # Haskell toolchain
|
||||||
|
sudo rm -rf /usr/local/lib/android # Android SDK/NDK, emulator
|
||||||
|
sudo rm -rf /usr/share/dotnet # .NET SDKs
|
||||||
|
sudo rm -rf /usr/share/swift # Swift toolchain (if present)
|
||||||
|
sudo rm -rf "$AGENT_TOOLSDIRECTORY" # Pre-cached tool cache (Go, Node, etc.)
|
||||||
|
if: ${{ github.event.inputs.runner != 'fast' }}
|
||||||
|
|
||||||
|
# BuildJet runners lack /opt/hostedtoolcache, which setup-qemu expects
|
||||||
|
- name: Ensure hostedtoolcache exists
|
||||||
|
run: sudo mkdir -p /opt/hostedtoolcache && sudo chown $USER:$USER /opt/hostedtoolcache
|
||||||
|
|
||||||
|
- name: Set up docker QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
|
||||||
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: |
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y qemu-user-static
|
|
||||||
wget https://deb.debian.org/debian/pool/main/d/debspawn/debspawn_0.6.2-1_all.deb
|
|
||||||
sha256sum ./debspawn_0.6.2-1_all.deb | grep 37ef27458cb1e35e8bce4d4f639b06b4b3866fc0b9191ec6b9bd157afd06a817
|
|
||||||
sudo apt-get install -y ./debspawn_0.6.2-1_all.deb
|
|
||||||
|
|
||||||
- name: Configure debspawn
|
|
||||||
run: |
|
|
||||||
sudo mkdir -p /etc/debspawn/
|
|
||||||
echo "AllowUnsafePermissions=true" | sudo tee /etc/debspawn/global.toml
|
|
||||||
sudo mkdir -p /var/tmp/debspawn
|
|
||||||
|
|
||||||
- run: sudo mount -t tmpfs tmpfs /var/tmp/debspawn
|
|
||||||
if: ${{ github.event.inputs.runner == 'fast' && (matrix.platform == 'x86_64' || matrix.platform == 'x86_64-nonfree') }}
|
|
||||||
|
|
||||||
- name: Download compiled artifacts
|
- name: Download compiled artifacts
|
||||||
uses: actions/download-artifact@v3
|
uses: actions/download-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: compiled-${{ env.ARCH }}.tar
|
name: compiled-${{ env.ARCH }}.tar
|
||||||
|
|
||||||
@@ -171,9 +210,26 @@ jobs:
|
|||||||
|
|
||||||
- name: Prevent rebuild of compiled artifacts
|
- name: Prevent rebuild of compiled artifacts
|
||||||
run: |
|
run: |
|
||||||
|
mkdir -p web/node_modules
|
||||||
mkdir -p web/dist/raw
|
mkdir -p web/dist/raw
|
||||||
|
mkdir -p core/bindings
|
||||||
|
mkdir -p sdk/base/lib/osBindings
|
||||||
|
mkdir -p container-runtime/node_modules
|
||||||
|
mkdir -p container-runtime/dist
|
||||||
|
mkdir -p container-runtime/dist/node_modules
|
||||||
|
mkdir -p sdk/dist
|
||||||
|
mkdir -p sdk/baseDist
|
||||||
|
mkdir -p patch-db/client/node_modules
|
||||||
|
mkdir -p patch-db/client/dist
|
||||||
|
mkdir -p web/.angular
|
||||||
|
mkdir -p web/dist/raw/ui
|
||||||
|
mkdir -p web/dist/raw/setup-wizard
|
||||||
|
mkdir -p web/dist/static/ui
|
||||||
|
mkdir -p web/dist/static/setup-wizard
|
||||||
PLATFORM=${{ matrix.platform }} make -t compiled-${{ env.ARCH }}.tar
|
PLATFORM=${{ matrix.platform }} make -t compiled-${{ env.ARCH }}.tar
|
||||||
|
|
||||||
|
- run: git status
|
||||||
|
|
||||||
- name: Run iso build
|
- name: Run iso build
|
||||||
run: PLATFORM=${{ matrix.platform }} make iso
|
run: PLATFORM=${{ matrix.platform }} make iso
|
||||||
if: ${{ matrix.platform != 'raspberrypi' }}
|
if: ${{ matrix.platform != 'raspberrypi' }}
|
||||||
@@ -182,56 +238,19 @@ jobs:
|
|||||||
run: PLATFORM=${{ matrix.platform }} make img
|
run: PLATFORM=${{ matrix.platform }} make img
|
||||||
if: ${{ matrix.platform == 'raspberrypi' }}
|
if: ${{ matrix.platform == 'raspberrypi' }}
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ matrix.platform }}.squashfs
|
name: ${{ matrix.platform }}.squashfs
|
||||||
path: results/*.squashfs
|
path: results/*.squashfs
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ matrix.platform }}.iso
|
name: ${{ matrix.platform }}.iso
|
||||||
path: results/*.iso
|
path: results/*.iso
|
||||||
if: ${{ matrix.platform != 'raspberrypi' }}
|
if: ${{ matrix.platform != 'raspberrypi' }}
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v3
|
- uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
name: ${{ matrix.platform }}.img
|
name: ${{ matrix.platform }}.img
|
||||||
path: results/*.img
|
path: results/*.img
|
||||||
if: ${{ matrix.platform == 'raspberrypi' }}
|
if: ${{ matrix.platform == 'raspberrypi' }}
|
||||||
|
|
||||||
- name: Upload OTA to registry
|
|
||||||
run: >-
|
|
||||||
PLATFORM=${{ matrix.platform }} make upload-ota TARGET="${{
|
|
||||||
fromJson('{
|
|
||||||
"alpha": "alpha-registry-x.start9.com",
|
|
||||||
"beta": "beta-registry.start9.com",
|
|
||||||
}')[github.event.inputs.deploy]
|
|
||||||
}}" KEY="${{
|
|
||||||
fromJson(
|
|
||||||
format('{{
|
|
||||||
"alpha": "{0}",
|
|
||||||
"beta": "{1}",
|
|
||||||
}}', secrets.ALPHA_INDEX_KEY, secrets.BETA_INDEX_KEY)
|
|
||||||
)[github.event.inputs.deploy]
|
|
||||||
}}"
|
|
||||||
if: ${{ github.event.inputs.deploy != '' && github.event.inputs.deploy != 'NONE' }}
|
|
||||||
|
|
||||||
index:
|
|
||||||
if: ${{ github.event.inputs.deploy != '' && github.event.inputs.deploy != 'NONE' }}
|
|
||||||
needs: [image]
|
|
||||||
runs-on: ubuntu-22.04
|
|
||||||
steps:
|
|
||||||
- run: >-
|
|
||||||
curl "https://${{
|
|
||||||
fromJson('{
|
|
||||||
"alpha": "alpha-registry-x.start9.com",
|
|
||||||
"beta": "beta-registry.start9.com",
|
|
||||||
}')[github.event.inputs.deploy]
|
|
||||||
}}:8443/resync.cgi?key=${{
|
|
||||||
fromJson(
|
|
||||||
format('{{
|
|
||||||
"alpha": "{0}",
|
|
||||||
"beta": "{1}",
|
|
||||||
}}', secrets.ALPHA_INDEX_KEY, secrets.BETA_INDEX_KEY)
|
|
||||||
)[github.event.inputs.deploy]
|
|
||||||
}}"
|
|
||||||
|
|||||||
19
.github/workflows/test.yaml
vendored
@@ -10,22 +10,29 @@ on:
|
|||||||
- master
|
- master
|
||||||
- next/*
|
- next/*
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODEJS_VERSION: "18.15.0"
|
NODEJS_VERSION: "24.11.0"
|
||||||
ENVIRONMENT: dev-unstable
|
ENVIRONMENT: dev-unstable
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
name: Run Automated Tests
|
name: Run Automated Tests
|
||||||
runs-on: ubuntu-22.04
|
if: github.event.pull_request.draft != true
|
||||||
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
- uses: ./.github/actions/setup-build
|
||||||
- uses: actions/setup-node@v3
|
|
||||||
with:
|
with:
|
||||||
node-version: ${{ env.NODEJS_VERSION }}
|
nodejs-version: ${{ env.NODEJS_VERSION }}
|
||||||
|
free-space: "false"
|
||||||
|
setup-docker: "false"
|
||||||
|
setup-sccache: "false"
|
||||||
|
|
||||||
- name: Build And Run Tests
|
- name: Build And Run Tests
|
||||||
run: make test
|
run: make test
|
||||||
|
|||||||
29
.gitignore
vendored
@@ -1,31 +1,24 @@
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
.idea
|
.idea
|
||||||
system-images/binfmt/binfmt.tar
|
*.img
|
||||||
system-images/compat/compat.tar
|
*.img.gz
|
||||||
system-images/util/util.tar
|
*.img.xz
|
||||||
/*.img
|
*.zip
|
||||||
/*.img.gz
|
|
||||||
/*.img.xz
|
|
||||||
/*-raspios-bullseye-arm64-lite.img
|
|
||||||
/*-raspios-bullseye-arm64-lite.zip
|
|
||||||
/product_key.txt
|
/product_key.txt
|
||||||
/*_product_key.txt
|
/*_product_key.txt
|
||||||
.vscode/settings.json
|
.vscode/settings.json
|
||||||
deploy_web.sh
|
deploy_web.sh
|
||||||
deploy_web.sh
|
|
||||||
secrets.db
|
secrets.db
|
||||||
.vscode/
|
.vscode/
|
||||||
/cargo-deps/**/*
|
/build/env/*.txt
|
||||||
/PLATFORM.txt
|
*.deb
|
||||||
/ENVIRONMENT.txt
|
|
||||||
/GIT_HASH.txt
|
|
||||||
/VERSION.txt
|
|
||||||
/eos-*.tar.gz
|
|
||||||
/*.deb
|
|
||||||
/target
|
/target
|
||||||
/*.squashfs
|
*.squashfs
|
||||||
/results
|
/results
|
||||||
/dpkg-workdir
|
/dpkg-workdir
|
||||||
/compiled.tar
|
/compiled.tar
|
||||||
/compiled-*.tar
|
/compiled-*.tar
|
||||||
/firmware
|
/build/lib/firmware
|
||||||
|
tmp
|
||||||
|
web/.i18n-checked
|
||||||
|
agents/USER.md
|
||||||
|
|||||||
146
CLAUDE.md
Normal file
@@ -0,0 +1,146 @@
|
|||||||
|
# CLAUDE.md
|
||||||
|
|
||||||
|
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||||
|
|
||||||
|
## Project Overview
|
||||||
|
|
||||||
|
StartOS is an open-source Linux distribution for running personal servers. It manages discovery, installation, network configuration, backups, and health monitoring of self-hosted services.
|
||||||
|
|
||||||
|
**Tech Stack:**
|
||||||
|
- Backend: Rust (async/Tokio, Axum web framework)
|
||||||
|
- Frontend: Angular 20 + TypeScript + TaigaUI
|
||||||
|
- Container runtime: Node.js/TypeScript with LXC
|
||||||
|
- Database/State: Patch-DB (git submodule) - storage layer with reactive frontend sync
|
||||||
|
- API: JSON-RPC via rpc-toolkit (see `agents/rpc-toolkit.md`)
|
||||||
|
- Auth: Password + session cookie, public/private key signatures, local authcookie (see `core/src/middleware/auth/`)
|
||||||
|
|
||||||
|
## Build & Development
|
||||||
|
|
||||||
|
See [CONTRIBUTING.md](CONTRIBUTING.md) for:
|
||||||
|
- Environment setup and requirements
|
||||||
|
- Build commands and make targets
|
||||||
|
- Testing and formatting commands
|
||||||
|
- Environment variables
|
||||||
|
|
||||||
|
**Quick reference:**
|
||||||
|
```bash
|
||||||
|
. ./devmode.sh # Enable dev mode
|
||||||
|
make update-startbox REMOTE=start9@<ip> # Fastest iteration (binary + UI)
|
||||||
|
make test-core # Run Rust tests
|
||||||
|
```
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
### Core (`/core`)
|
||||||
|
The Rust backend daemon. Main binaries:
|
||||||
|
- `startbox` - Main daemon (runs as `startd`)
|
||||||
|
- `start-cli` - CLI interface
|
||||||
|
- `start-container` - Runs inside LXC containers; communicates with host and manages subcontainers
|
||||||
|
- `registrybox` - Registry daemon
|
||||||
|
- `tunnelbox` - VPN/tunnel daemon
|
||||||
|
|
||||||
|
**Key modules:**
|
||||||
|
- `src/context/` - Context types (RpcContext, CliContext, InitContext, DiagnosticContext)
|
||||||
|
- `src/service/` - Service lifecycle management with actor pattern (`service_actor.rs`)
|
||||||
|
- `src/db/model/` - Patch-DB models (`public.rs` synced to frontend, `private.rs` backend-only)
|
||||||
|
- `src/net/` - Networking (DNS, ACME, WiFi, Tor via Arti, WireGuard)
|
||||||
|
- `src/s9pk/` - S9PK package format (merkle archive)
|
||||||
|
- `src/registry/` - Package registry management
|
||||||
|
|
||||||
|
**RPC Pattern:** See `agents/rpc-toolkit.md`
|
||||||
|
|
||||||
|
### Web (`/web`)
|
||||||
|
Angular projects sharing common code:
|
||||||
|
- `projects/ui/` - Main admin interface
|
||||||
|
- `projects/setup-wizard/` - Initial setup
|
||||||
|
- `projects/start-tunnel/` - VPN management UI
|
||||||
|
- `projects/shared/` - Common library (API clients, components)
|
||||||
|
- `projects/marketplace/` - Service discovery
|
||||||
|
|
||||||
|
**Development:**
|
||||||
|
```bash
|
||||||
|
cd web
|
||||||
|
npm ci
|
||||||
|
npm run start:ui # Dev server with mocks
|
||||||
|
npm run build:ui # Production build
|
||||||
|
npm run check # Type check all projects
|
||||||
|
```
|
||||||
|
|
||||||
|
### Container Runtime (`/container-runtime`)
|
||||||
|
Node.js runtime that manages service containers via RPC. See `RPCSpec.md` for protocol.
|
||||||
|
|
||||||
|
**Container Architecture:**
|
||||||
|
```
|
||||||
|
LXC Container (uniform base for all services)
|
||||||
|
└── systemd
|
||||||
|
└── container-runtime.service
|
||||||
|
└── Loads /usr/lib/startos/package/index.js (from s9pk javascript.squashfs)
|
||||||
|
└── Package JS launches subcontainers (from images in s9pk)
|
||||||
|
```
|
||||||
|
|
||||||
|
The container runtime communicates with the host via JSON-RPC over Unix socket. Package JavaScript must export functions conforming to the `ABI` type defined in `sdk/base/lib/types.ts`.
|
||||||
|
|
||||||
|
**`/media/startos/` directory (mounted by host into container):**
|
||||||
|
|
||||||
|
| Path | Description |
|
||||||
|
|------|-------------|
|
||||||
|
| `volumes/<name>/` | Package data volumes (id-mapped, persistent) |
|
||||||
|
| `assets/` | Read-only assets from s9pk `assets.squashfs` |
|
||||||
|
| `images/<name>/` | Container images (squashfs, used for subcontainers) |
|
||||||
|
| `images/<name>.env` | Environment variables for image |
|
||||||
|
| `images/<name>.json` | Image metadata |
|
||||||
|
| `backup/` | Backup mount point (mounted during backup operations) |
|
||||||
|
| `rpc/service.sock` | RPC socket (container runtime listens here) |
|
||||||
|
| `rpc/host.sock` | Host RPC socket (for effects callbacks to host) |
|
||||||
|
|
||||||
|
**S9PK Structure:** See `agents/s9pk-structure.md`
|
||||||
|
|
||||||
|
### SDK (`/sdk`)
|
||||||
|
TypeScript SDK for packaging services (`@start9labs/start-sdk`).
|
||||||
|
|
||||||
|
- `base/` - Core types, ABI definitions, effects interface (`@start9labs/start-sdk-base`)
|
||||||
|
- `package/` - Full SDK for package developers, re-exports base
|
||||||
|
|
||||||
|
### Patch-DB (`/patch-db`)
|
||||||
|
Git submodule providing diff-based state synchronization. Changes to `db/model/public.rs` automatically sync to the frontend.
|
||||||
|
|
||||||
|
**Key patterns:**
|
||||||
|
- `db.peek().await` - Get a read-only snapshot of the database state
|
||||||
|
- `db.mutate(|db| { ... }).await` - Apply mutations atomically, returns `MutateResult`
|
||||||
|
- `#[derive(HasModel)]` - Derive macro for types stored in the database, generates typed accessors
|
||||||
|
|
||||||
|
**Generated accessor types** (from `HasModel` derive):
|
||||||
|
- `as_field()` - Immutable reference: `&Model<T>`
|
||||||
|
- `as_field_mut()` - Mutable reference: `&mut Model<T>`
|
||||||
|
- `into_field()` - Owned value: `Model<T>`
|
||||||
|
|
||||||
|
**`Model<T>` APIs** (from `db/prelude.rs`):
|
||||||
|
- `.de()` - Deserialize to `T`
|
||||||
|
- `.ser(&value)` - Serialize from `T`
|
||||||
|
- `.mutate(|v| ...)` - Deserialize, mutate, reserialize
|
||||||
|
- For maps: `.keys()`, `.as_idx(&key)`, `.as_idx_mut(&key)`, `.insert()`, `.remove()`, `.contains_key()`
|
||||||
|
|
||||||
|
## Supplementary Documentation
|
||||||
|
|
||||||
|
The `agents/` directory contains detailed documentation for AI assistants:
|
||||||
|
|
||||||
|
- `TODO.md` - Pending tasks for AI agents (check this first, remove items when completed)
|
||||||
|
- `USER.md` - Current user identifier (gitignored, see below)
|
||||||
|
- `rpc-toolkit.md` - JSON-RPC patterns and handler configuration
|
||||||
|
- `core-rust-patterns.md` - Common utilities and patterns for Rust code in `/core` (guard pattern, mount guards, etc.)
|
||||||
|
- `s9pk-structure.md` - S9PK package format structure
|
||||||
|
- `i18n-patterns.md` - Internationalization key conventions and usage in `/core`
|
||||||
|
|
||||||
|
### Session Startup
|
||||||
|
|
||||||
|
On startup:
|
||||||
|
|
||||||
|
1. **Check for `agents/USER.md`** - If it doesn't exist, prompt the user for their name/identifier and create it. This file is gitignored since it varies per developer.
|
||||||
|
|
||||||
|
2. **Check `agents/TODO.md` for relevant tasks** - Show TODOs that either:
|
||||||
|
- Have no `@username` tag (relevant to everyone)
|
||||||
|
- Are tagged with the current user's identifier
|
||||||
|
|
||||||
|
Skip TODOs tagged with a different user.
|
||||||
|
|
||||||
|
3. **Ask "What would you like to do today?"** - Offer options for each relevant TODO item, plus "Something else" for other requests.
|
||||||
269
CONTRIBUTING.md
@@ -1,7 +1,6 @@
|
|||||||
# Contributing to StartOS
|
# Contributing to StartOS
|
||||||
|
|
||||||
This guide is for contributing to the StartOS. If you are interested in packaging a service for StartOS, visit the [service packaging guide](https://docs.start9.com/latest/developer-docs/). If you are interested in promoting, providing technical support, creating tutorials, or helping in other ways, please visit the [Start9 website](https://start9.com/contribute).
|
This guide is for contributing to the StartOS. If you are interested in packaging a service for StartOS, visit the [service packaging guide](https://docs.start9.com/latest/packaging-guide/). If you are interested in promoting, providing technical support, creating tutorials, or helping in other ways, please visit the [Start9 website](https://start9.com/contribute).
|
||||||
|
|
||||||
|
|
||||||
## Collaboration
|
## Collaboration
|
||||||
|
|
||||||
@@ -12,108 +11,190 @@ This guide is for contributing to the StartOS. If you are interested in packagin
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
/
|
/
|
||||||
├── assets/
|
├── assets/ # Screenshots for README
|
||||||
├── core/
|
├── build/ # Auxiliary files and scripts for deployed images
|
||||||
├── build/
|
├── container-runtime/ # Node.js program managing package containers
|
||||||
├── debian/
|
├── core/ # Rust backend: API, daemon (startd), CLI (start-cli)
|
||||||
├── web/
|
├── debian/ # Debian package maintainer scripts
|
||||||
├── image-recipe/
|
├── image-recipe/ # Scripts for building StartOS images
|
||||||
├── patch-db
|
├── patch-db/ # (submodule) Diff-based data store for frontend sync
|
||||||
└── system-images/
|
├── sdk/ # TypeScript SDK for building StartOS packages
|
||||||
```
|
└── web/ # Web UIs (Angular)
|
||||||
#### assets
|
|
||||||
screenshots for the StartOS README
|
|
||||||
|
|
||||||
#### core
|
|
||||||
An API, daemon (startd), CLI (start-cli), and SDK (start-sdk) that together provide the core functionality of StartOS.
|
|
||||||
|
|
||||||
#### build
|
|
||||||
Auxiliary files and scripts to include in deployed StartOS images
|
|
||||||
|
|
||||||
#### debian
|
|
||||||
Maintainer scripts for the StartOS Debian package
|
|
||||||
|
|
||||||
#### web
|
|
||||||
Web UIs served under various conditions and used to interact with StartOS APIs.
|
|
||||||
|
|
||||||
#### image-recipe
|
|
||||||
Scripts for building StartOS images
|
|
||||||
|
|
||||||
#### patch-db (submodule)
|
|
||||||
A diff based data store used to synchronize data between the web interfaces and server.
|
|
||||||
|
|
||||||
#### system-images
|
|
||||||
Docker images that assist with creating backups.
|
|
||||||
|
|
||||||
## Environment Setup
|
|
||||||
|
|
||||||
#### Clone the StartOS repository
|
|
||||||
```sh
|
|
||||||
git clone https://github.com/Start9Labs/start-os.git
|
|
||||||
cd start-os
|
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Load the PatchDB submodule
|
See component READMEs for details:
|
||||||
```sh
|
|
||||||
git submodule update --init --recursive
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Continue to your project of interest for additional instructions:
|
|
||||||
- [`core`](core/README.md)
|
- [`core`](core/README.md)
|
||||||
- [`web-interfaces`](web-interfaces/README.md)
|
- [`web`](web/README.md)
|
||||||
- [`build`](build/README.md)
|
- [`build`](build/README.md)
|
||||||
- [`patch-db`](https://github.com/Start9Labs/patch-db)
|
- [`patch-db`](https://github.com/Start9Labs/patch-db)
|
||||||
|
|
||||||
|
## Environment Setup
|
||||||
|
|
||||||
|
```sh
|
||||||
|
git clone https://github.com/Start9Labs/start-os.git --recurse-submodules
|
||||||
|
cd start-os
|
||||||
|
```
|
||||||
|
|
||||||
|
### Development Mode
|
||||||
|
|
||||||
|
For faster iteration during development:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
. ./devmode.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
This sets `ENVIRONMENT=dev` and `GIT_BRANCH_AS_HASH=1` to prevent rebuilds on every commit.
|
||||||
|
|
||||||
## Building
|
## Building
|
||||||
This project uses [GNU Make](https://www.gnu.org/software/make/) to build its components. To build any specific component, simply run `make <TARGET>` replacing `<TARGET>` with the name of the target you'd like to build
|
|
||||||
|
All builds can be performed on any operating system that can run Docker.
|
||||||
|
|
||||||
|
This project uses [GNU Make](https://www.gnu.org/software/make/) to build its components.
|
||||||
|
|
||||||
### Requirements
|
### Requirements
|
||||||
|
|
||||||
- [GNU Make](https://www.gnu.org/software/make/)
|
- [GNU Make](https://www.gnu.org/software/make/)
|
||||||
- [Docker](https://docs.docker.com/get-docker/)
|
- [Docker](https://docs.docker.com/get-docker/) or [Podman](https://podman.io/)
|
||||||
- [NodeJS v18.15.0](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm)
|
- [NodeJS v20.16.0](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm)
|
||||||
- [sed](https://www.gnu.org/software/sed/)
|
- [Rust](https://rustup.rs/) (nightly for formatting)
|
||||||
- [grep](https://www.gnu.org/software/grep/)
|
- [sed](https://www.gnu.org/software/sed/), [grep](https://www.gnu.org/software/grep/), [awk](https://www.gnu.org/software/gawk/)
|
||||||
- [awk](https://www.gnu.org/software/gawk/)
|
|
||||||
- [jq](https://jqlang.github.io/jq/)
|
- [jq](https://jqlang.github.io/jq/)
|
||||||
- [gzip](https://www.gnu.org/software/gzip/)
|
- [gzip](https://www.gnu.org/software/gzip/), [brotli](https://github.com/google/brotli)
|
||||||
- [brotli](https://github.com/google/brotli)
|
|
||||||
|
|
||||||
### Environment variables
|
### Environment Variables
|
||||||
- `PLATFORM`: which platform you would like to build for. Must be one of `x86_64`, `x86_64-nonfree`, `aarch64`, `aarch64-nonfree`, `raspberrypi`
|
|
||||||
- NOTE: `nonfree` images are for including `nonfree` firmware packages in the built ISO
|
| Variable | Description |
|
||||||
- `ENVIRONMENT`: a hyphen separated set of feature flags to enable
|
|----------|-------------|
|
||||||
- `dev`: enables password ssh (INSECURE!) and does not compress frontends
|
| `PLATFORM` | Target platform: `x86_64`, `x86_64-nonfree`, `aarch64`, `aarch64-nonfree`, `riscv64`, `raspberrypi` |
|
||||||
- `unstable`: enables assertions that will cause errors on unexpected inconsistencies that are undesirable in production use either for performance or reliability reasons
|
| `ENVIRONMENT` | Hyphen-separated feature flags (see below) |
|
||||||
- `docker`: use `docker` instead of `podman`
|
| `PROFILE` | Build profile: `release` (default) or `dev` |
|
||||||
- `GIT_BRANCH_AS_HASH`: set to `1` to use the current git branch name as the git hash so that the project does not need to be rebuilt on each commit
|
| `GIT_BRANCH_AS_HASH` | Set to `1` to use git branch name as version hash (avoids rebuilds) |
|
||||||
|
|
||||||
|
**ENVIRONMENT flags:**
|
||||||
|
- `dev` - Enables password SSH before setup, skips frontend compression
|
||||||
|
- `unstable` - Enables assertions and debugging with performance penalty
|
||||||
|
- `console` - Enables tokio-console for async debugging
|
||||||
|
|
||||||
|
**Platform notes:**
|
||||||
|
- `-nonfree` variants include proprietary firmware and drivers
|
||||||
|
- `raspberrypi` includes non-free components by necessity
|
||||||
|
- Platform is remembered between builds if not specified
|
||||||
|
|
||||||
|
### Make Targets
|
||||||
|
|
||||||
|
#### Building
|
||||||
|
|
||||||
|
| Target | Description |
|
||||||
|
|--------|-------------|
|
||||||
|
| `iso` | Create full `.iso` image (not for raspberrypi) |
|
||||||
|
| `img` | Create full `.img` image (raspberrypi only) |
|
||||||
|
| `deb` | Build Debian package |
|
||||||
|
| `all` | Build all Rust binaries |
|
||||||
|
| `uis` | Build all web UIs |
|
||||||
|
| `ui` | Build main UI only |
|
||||||
|
| `ts-bindings` | Generate TypeScript bindings from Rust types |
|
||||||
|
|
||||||
|
#### Deploying to Device
|
||||||
|
|
||||||
|
For devices on the same network:
|
||||||
|
|
||||||
|
| Target | Description |
|
||||||
|
|--------|-------------|
|
||||||
|
| `update-startbox REMOTE=start9@<ip>` | Deploy binary + UI only (fastest) |
|
||||||
|
| `update-deb REMOTE=start9@<ip>` | Deploy full Debian package |
|
||||||
|
| `update REMOTE=start9@<ip>` | OTA-style update |
|
||||||
|
| `reflash REMOTE=start9@<ip>` | Reflash as if using live ISO |
|
||||||
|
| `update-overlay REMOTE=start9@<ip>` | Deploy to in-memory overlay (reverts on reboot) |
|
||||||
|
|
||||||
|
For devices on different networks (uses [magic-wormhole](https://github.com/magic-wormhole/magic-wormhole)):
|
||||||
|
|
||||||
|
| Target | Description |
|
||||||
|
|--------|-------------|
|
||||||
|
| `wormhole` | Send startbox binary |
|
||||||
|
| `wormhole-deb` | Send Debian package |
|
||||||
|
| `wormhole-squashfs` | Send squashfs image |
|
||||||
|
|
||||||
|
#### Other
|
||||||
|
|
||||||
|
| Target | Description |
|
||||||
|
|--------|-------------|
|
||||||
|
| `format` | Run code formatting (Rust nightly required) |
|
||||||
|
| `test` | Run all automated tests |
|
||||||
|
| `test-core` | Run Rust tests |
|
||||||
|
| `test-sdk` | Run SDK tests |
|
||||||
|
| `test-container-runtime` | Run container runtime tests |
|
||||||
|
| `clean` | Delete all compiled artifacts |
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
```bash
|
||||||
|
make test # All tests
|
||||||
|
make test-core # Rust tests (via ./core/run-tests.sh)
|
||||||
|
make test-sdk # SDK tests
|
||||||
|
make test-container-runtime # Container runtime tests
|
||||||
|
|
||||||
|
# Run specific Rust test
|
||||||
|
cd core && cargo test <test_name> --features=test
|
||||||
|
```
|
||||||
|
|
||||||
|
## Code Formatting
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Rust (requires nightly)
|
||||||
|
make format
|
||||||
|
|
||||||
|
# TypeScript/HTML/SCSS (web)
|
||||||
|
cd web && npm run format
|
||||||
|
```
|
||||||
|
|
||||||
|
## Code Style Guidelines
|
||||||
|
|
||||||
|
### Formatting
|
||||||
|
|
||||||
|
Run the formatters before committing. Configuration is handled by `rustfmt.toml` (Rust) and prettier configs (TypeScript).
|
||||||
|
|
||||||
|
### Documentation & Comments
|
||||||
|
|
||||||
|
**Rust:**
|
||||||
|
- Add doc comments (`///`) to public APIs, structs, and non-obvious functions
|
||||||
|
- Use `//` comments sparingly for complex logic that isn't self-evident
|
||||||
|
- Prefer self-documenting code (clear naming, small functions) over comments
|
||||||
|
|
||||||
|
**TypeScript:**
|
||||||
|
- Document exported functions and complex types with JSDoc
|
||||||
|
- Keep comments focused on "why" rather than "what"
|
||||||
|
|
||||||
|
**General:**
|
||||||
|
- Don't add comments that just restate the code
|
||||||
|
- Update or remove comments when code changes
|
||||||
|
- TODOs should include context: `// TODO(username): reason`
|
||||||
|
|
||||||
|
### Commit Messages
|
||||||
|
|
||||||
|
Use [Conventional Commits](https://www.conventionalcommits.org/):
|
||||||
|
|
||||||
|
```
|
||||||
|
<type>(<scope>): <description>
|
||||||
|
|
||||||
|
[optional body]
|
||||||
|
|
||||||
|
[optional footer]
|
||||||
|
```
|
||||||
|
|
||||||
|
**Types:**
|
||||||
|
- `feat` - New feature
|
||||||
|
- `fix` - Bug fix
|
||||||
|
- `docs` - Documentation only
|
||||||
|
- `style` - Formatting, no code change
|
||||||
|
- `refactor` - Code change that neither fixes a bug nor adds a feature
|
||||||
|
- `test` - Adding or updating tests
|
||||||
|
- `chore` - Build process, dependencies, etc.
|
||||||
|
|
||||||
|
**Examples:**
|
||||||
|
```
|
||||||
|
feat(web): add dark mode toggle
|
||||||
|
fix(core): resolve race condition in service startup
|
||||||
|
docs: update CONTRIBUTING.md with style guidelines
|
||||||
|
refactor(sdk): simplify package validation logic
|
||||||
|
```
|
||||||
|
|
||||||
### Useful Make Targets
|
|
||||||
- `iso`: Create a full `.iso` image
|
|
||||||
- Only possible from Debian
|
|
||||||
- Not available for `PLATFORM=raspberrypi`
|
|
||||||
- Additional Requirements:
|
|
||||||
- [debspawn](https://github.com/lkhq/debspawn)
|
|
||||||
- `img`: Create a full `.img` image
|
|
||||||
- Only possible from Debian
|
|
||||||
- Only available for `PLATFORM=raspberrypi`
|
|
||||||
- Additional Requirements:
|
|
||||||
- [debspawn](https://github.com/lkhq/debspawn)
|
|
||||||
- `format`: Run automatic code formatting for the project
|
|
||||||
- Additional Requirements:
|
|
||||||
- [rust](https://rustup.rs/)
|
|
||||||
- `test`: Run automated tests for the project
|
|
||||||
- Additional Requirements:
|
|
||||||
- [rust](https://rustup.rs/)
|
|
||||||
- `update`: Deploy the current working project to a device over ssh as if through an over-the-air update
|
|
||||||
- Requires an argument `REMOTE` which is the ssh address of the device, i.e. `start9@192.168.122.2`
|
|
||||||
- `reflash`: Deploy the current working project to a device over ssh as if using a live `iso` image to reflash it
|
|
||||||
- Requires an argument `REMOTE` which is the ssh address of the device, i.e. `start9@192.168.122.2`
|
|
||||||
- `update-overlay`: Deploy the current working project to a device over ssh to the in-memory overlay without restarting it
|
|
||||||
- WARNING: changes will be reverted after the device is rebooted
|
|
||||||
- WARNING: changes to `init` will not take effect as the device is already initialized
|
|
||||||
- Requires an argument `REMOTE` which is the ssh address of the device, i.e. `start9@192.168.122.2`
|
|
||||||
- `wormhole`: Deploy the `startbox` to a device using [magic-wormhole](https://github.com/magic-wormhole/magic-wormhole)
|
|
||||||
- When the build it complete will emit a command to paste into the shell of the device to upgrade it
|
|
||||||
- Additional Requirements:
|
|
||||||
- [magic-wormhole](https://github.com/magic-wormhole/magic-wormhole)
|
|
||||||
- `clean`: Delete all compiled artifacts
|
|
||||||
134
DEVELOPMENT.md
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
# Setting up your development environment on Debian/Ubuntu
|
||||||
|
|
||||||
|
A step-by-step guide
|
||||||
|
|
||||||
|
> This is the only officially supported build environment.
|
||||||
|
> MacOS has limited build capabilities and Windows requires [WSL2](https://learn.microsoft.com/en-us/windows/wsl/install)
|
||||||
|
|
||||||
|
## Installing dependencies
|
||||||
|
|
||||||
|
Run the following commands one at a time
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y ca-certificates curl gpg build-essential
|
||||||
|
curl -fsSL https://download.docker.com/linux/debian/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
|
||||||
|
echo "deb [arch=$(dpkg-architecture -q DEB_HOST_ARCH) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian bookworm stable" | sudo tee /etc/apt/sources.list.d/docker.list
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y sed grep gawk jq gzip brotli containerd.io docker-ce docker-ce-cli docker-compose-plugin qemu-user-static binfmt-support squashfs-tools git debspawn rsync b3sum
|
||||||
|
sudo mkdir -p /etc/debspawn/
|
||||||
|
echo "AllowUnsafePermissions=true" | sudo tee /etc/debspawn/global.toml
|
||||||
|
sudo usermod -aG docker $USER
|
||||||
|
sudo su $USER
|
||||||
|
docker run --privileged --rm tonistiigi/binfmt --install all
|
||||||
|
docker buildx create --use
|
||||||
|
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh # proceed with default installation
|
||||||
|
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/master/install.sh | bash
|
||||||
|
source ~/.bashrc
|
||||||
|
nvm install 24
|
||||||
|
nvm use 24
|
||||||
|
nvm alias default 24 # this prevents your machine from reverting back to another version
|
||||||
|
```
|
||||||
|
|
||||||
|
## Cloning the repository
|
||||||
|
|
||||||
|
```sh
|
||||||
|
git clone --recursive https://github.com/Start9Labs/start-os.git --branch next/major
|
||||||
|
cd start-os
|
||||||
|
```
|
||||||
|
|
||||||
|
## Building an ISO
|
||||||
|
|
||||||
|
```sh
|
||||||
|
PLATFORM=$(uname -m) ENVIRONMENT=dev make iso
|
||||||
|
```
|
||||||
|
|
||||||
|
This will build an ISO for your current architecture. If you are building to run on an architecture other than the one you are currently on, replace `$(uname -m)` with the correct platform for the device (one of `aarch64`, `aarch64-nonfree`, `x86_64`, `x86_64-nonfree`, `raspberrypi`)
|
||||||
|
|
||||||
|
## Creating a VM
|
||||||
|
|
||||||
|
### Install virt-manager
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y virt-manager
|
||||||
|
sudo usermod -aG libvirt $USER
|
||||||
|
sudo su $USER
|
||||||
|
```
|
||||||
|
|
||||||
|
### Launch virt-manager
|
||||||
|
|
||||||
|
```sh
|
||||||
|
virt-manager
|
||||||
|
```
|
||||||
|
|
||||||
|
### Create new virtual machine
|
||||||
|
|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|
|
||||||
|
#### make sure to set "Target Path" to the path to your results directory in start-os
|
||||||
|
|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|
|
||||||
|
## Updating a VM
|
||||||
|
|
||||||
|
The fastest way to update a VM to your latest code depends on what you changed:
|
||||||
|
|
||||||
|
### UI or startd:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
PLATFORM=$(uname -m) ENVIRONMENT=dev make update-startbox REMOTE=start9@<VM IP>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Container runtime or debian dependencies:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
PLATFORM=$(uname -m) ENVIRONMENT=dev make update-deb REMOTE=start9@<VM IP>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Image recipe:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
PLATFORM=$(uname -m) ENVIRONMENT=dev make update-squashfs REMOTE=start9@<VM IP>
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
If the device you are building for is not available via ssh, it is also possible to use `magic-wormhole` to send the relevant files.
|
||||||
|
|
||||||
|
### Prerequisites:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y magic-wormhole
|
||||||
|
```
|
||||||
|
|
||||||
|
As before, the fastest way to update a VM to your latest code depends on what you changed. Each of the following commands will return a command to paste into the shell of the device you would like to upgrade.
|
||||||
|
|
||||||
|
### UI or startd:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
PLATFORM=$(uname -m) ENVIRONMENT=dev make wormhole
|
||||||
|
```
|
||||||
|
|
||||||
|
### Container runtime or debian dependencies:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
PLATFORM=$(uname -m) ENVIRONMENT=dev make wormhole-deb
|
||||||
|
```
|
||||||
|
|
||||||
|
### Image recipe:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
PLATFORM=$(uname -m) ENVIRONMENT=dev make wormhole-squashfs
|
||||||
|
```
|
||||||
389
Makefile
@@ -1,32 +1,44 @@
|
|||||||
PLATFORM_FILE := $(shell ./check-platform.sh)
|
ls-files = $(shell git ls-files --cached --others --exclude-standard $1)
|
||||||
ENVIRONMENT_FILE := $(shell ./check-environment.sh)
|
PROFILE = release
|
||||||
GIT_HASH_FILE := $(shell ./check-git-hash.sh)
|
|
||||||
VERSION_FILE := $(shell ./check-version.sh)
|
PLATFORM_FILE := $(shell ./build/env/check-platform.sh)
|
||||||
BASENAME := $(shell ./basename.sh)
|
ENVIRONMENT_FILE := $(shell ./build/env/check-environment.sh)
|
||||||
PLATFORM := $(shell if [ -f ./PLATFORM.txt ]; then cat ./PLATFORM.txt; else echo unknown; fi)
|
GIT_HASH_FILE := $(shell ./build/env/check-git-hash.sh)
|
||||||
|
VERSION_FILE := $(shell ./build/env/check-version.sh)
|
||||||
|
BASENAME := $(shell PROJECT=startos ./build/env/basename.sh)
|
||||||
|
PLATFORM := $(shell if [ -f $(PLATFORM_FILE) ]; then cat $(PLATFORM_FILE); else echo unknown; fi)
|
||||||
ARCH := $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then echo aarch64; else echo $(PLATFORM) | sed 's/-nonfree$$//g'; fi)
|
ARCH := $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then echo aarch64; else echo $(PLATFORM) | sed 's/-nonfree$$//g'; fi)
|
||||||
|
RUST_ARCH := $(shell if [ "$(ARCH)" = "riscv64" ]; then echo riscv64gc; else echo $(ARCH); fi)
|
||||||
|
REGISTRY_BASENAME := $(shell PROJECT=start-registry PLATFORM=$(ARCH) ./build/env/basename.sh)
|
||||||
|
TUNNEL_BASENAME := $(shell PROJECT=start-tunnel PLATFORM=$(ARCH) ./build/env/basename.sh)
|
||||||
IMAGE_TYPE=$(shell if [ "$(PLATFORM)" = raspberrypi ]; then echo img; else echo iso; fi)
|
IMAGE_TYPE=$(shell if [ "$(PLATFORM)" = raspberrypi ]; then echo img; else echo iso; fi)
|
||||||
BINS := core/target/$(ARCH)-unknown-linux-gnu/release/startbox core/target/aarch64-unknown-linux-musl/release/container-init core/target/x86_64-unknown-linux-musl/release/container-init
|
WEB_UIS := web/dist/raw/ui/index.html web/dist/raw/setup-wizard/index.html
|
||||||
WEB_UIS := web/dist/raw/ui web/dist/raw/setup-wizard web/dist/raw/diagnostic-ui web/dist/raw/install-wizard
|
COMPRESSED_WEB_UIS := web/dist/static/ui/index.html web/dist/static/setup-wizard/index.html
|
||||||
FIRMWARE_ROMS := ./firmware/$(PLATFORM) $(shell jq --raw-output '.[] | select(.platform[] | contains("$(PLATFORM)")) | "./firmware/$(PLATFORM)/" + .id + ".rom.gz"' build/lib/firmware.json)
|
FIRMWARE_ROMS := build/lib/firmware/$(PLATFORM) $(shell jq --raw-output '.[] | select(.platform[] | contains("$(PLATFORM)")) | "./build/lib/firmware/$(PLATFORM)/" + .id + ".rom.gz"' build/lib/firmware.json)
|
||||||
BUILD_SRC := $(shell git ls-files build) build/lib/depends build/lib/conflicts $(FIRMWARE_ROMS)
|
BUILD_SRC := $(call ls-files, build/lib) build/lib/depends build/lib/conflicts $(FIRMWARE_ROMS)
|
||||||
DEBIAN_SRC := $(shell git ls-files debian/)
|
IMAGE_RECIPE_SRC := $(call ls-files, build/image-recipe/)
|
||||||
IMAGE_RECIPE_SRC := $(shell git ls-files image-recipe/)
|
STARTD_SRC := core/startd.service $(BUILD_SRC)
|
||||||
STARTD_SRC := core/startos/startd.service $(BUILD_SRC)
|
CORE_SRC := $(call ls-files, core) $(shell git ls-files --recurse-submodules patch-db) $(GIT_HASH_FILE)
|
||||||
COMPAT_SRC := $(shell git ls-files system-images/compat/)
|
WEB_SHARED_SRC := $(call ls-files, web/projects/shared) $(call ls-files, web/projects/marketplace) $(shell ls -p web/ | grep -v / | sed 's/^/web\//g') web/node_modules/.package-lock.json web/config.json patch-db/client/dist/index.js sdk/baseDist/package.json web/patchdb-ui-seed.json sdk/dist/package.json
|
||||||
UTILS_SRC := $(shell git ls-files system-images/utils/)
|
WEB_UI_SRC := $(call ls-files, web/projects/ui)
|
||||||
BINFMT_SRC := $(shell git ls-files system-images/binfmt/)
|
WEB_SETUP_WIZARD_SRC := $(call ls-files, web/projects/setup-wizard)
|
||||||
CORE_SRC := $(shell git ls-files core) $(shell git ls-files --recurse-submodules patch-db) web/dist/static web/patchdb-ui-seed.json $(GIT_HASH_FILE)
|
WEB_START_TUNNEL_SRC := $(call ls-files, web/projects/start-tunnel)
|
||||||
WEB_SHARED_SRC := $(shell git ls-files web/projects/shared) $(shell ls -p web/ | grep -v / | sed 's/^/web\//g') web/node_modules web/config.json patch-db/client/dist web/patchdb-ui-seed.json
|
|
||||||
WEB_UI_SRC := $(shell git ls-files web/projects/ui)
|
|
||||||
WEB_SETUP_WIZARD_SRC := $(shell git ls-files web/projects/setup-wizard)
|
|
||||||
WEB_DIAGNOSTIC_UI_SRC := $(shell git ls-files web/projects/diagnostic-ui)
|
|
||||||
WEB_INSTALL_WIZARD_SRC := $(shell git ls-files web/projects/install-wizard)
|
|
||||||
PATCH_DB_CLIENT_SRC := $(shell git ls-files --recurse-submodules patch-db/client)
|
PATCH_DB_CLIENT_SRC := $(shell git ls-files --recurse-submodules patch-db/client)
|
||||||
GZIP_BIN := $(shell which pigz || which gzip)
|
GZIP_BIN := $(shell which pigz || which gzip)
|
||||||
TAR_BIN := $(shell which gtar || which tar)
|
TAR_BIN := $(shell which gtar || which tar)
|
||||||
COMPILED_TARGETS := $(BINS) system-images/compat/docker-images/$(ARCH).tar system-images/utils/docker-images/$(ARCH).tar system-images/binfmt/docker-images/$(ARCH).tar
|
COMPILED_TARGETS := core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox core/target/$(RUST_ARCH)-unknown-linux-musl/release/start-container container-runtime/rootfs.$(ARCH).squashfs
|
||||||
ALL_TARGETS := $(STARTD_SRC) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) $(VERSION_FILE) $(COMPILED_TARGETS) $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then echo cargo-deps/aarch64-unknown-linux-gnu/release/pi-beep; fi) $(shell /bin/bash -c 'if [[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]; then echo cargo-deps/$(ARCH)-unknown-linux-gnu/release/tokio-console; fi') $(PLATFORM_FILE)
|
STARTOS_TARGETS := $(STARTD_SRC) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) $(VERSION_FILE) $(COMPILED_TARGETS) target/$(RUST_ARCH)-unknown-linux-musl/release/startos-backup-fs $(PLATFORM_FILE) \
|
||||||
|
$(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then \
|
||||||
|
echo target/aarch64-unknown-linux-musl/release/pi-beep; \
|
||||||
|
fi) \
|
||||||
|
$(shell /bin/bash -c 'if [[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]; then \
|
||||||
|
echo target/$(RUST_ARCH)-unknown-linux-musl/release/flamegraph; \
|
||||||
|
fi') \
|
||||||
|
$(shell /bin/bash -c 'if [[ "${ENVIRONMENT}" =~ (^|-)console($$|-) ]]; then \
|
||||||
|
echo target/$(RUST_ARCH)-unknown-linux-musl/release/tokio-console; \
|
||||||
|
fi')
|
||||||
|
REGISTRY_TARGETS := core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/registrybox core/start-registryd.service
|
||||||
|
TUNNEL_TARGETS := core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox core/start-tunneld.service
|
||||||
|
|
||||||
ifeq ($(REMOTE),)
|
ifeq ($(REMOTE),)
|
||||||
mkdir = mkdir -p $1
|
mkdir = mkdir -p $1
|
||||||
@@ -49,19 +61,18 @@ endif
|
|||||||
|
|
||||||
.DELETE_ON_ERROR:
|
.DELETE_ON_ERROR:
|
||||||
|
|
||||||
.PHONY: all metadata install clean format sdk snapshots uis ui reflash deb $(IMAGE_TYPE) squashfs sudo wormhole test
|
.PHONY: all metadata install clean format install-cli cli uis ui reflash deb $(IMAGE_TYPE) squashfs wormhole wormhole-deb test test-core test-sdk test-container-runtime registry install-registry tunnel install-tunnel ts-bindings
|
||||||
|
|
||||||
all: $(ALL_TARGETS)
|
all: $(STARTOS_TARGETS)
|
||||||
|
|
||||||
|
touch:
|
||||||
|
touch $(STARTOS_TARGETS)
|
||||||
|
|
||||||
metadata: $(VERSION_FILE) $(PLATFORM_FILE) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE)
|
metadata: $(VERSION_FILE) $(PLATFORM_FILE) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE)
|
||||||
|
|
||||||
sudo:
|
|
||||||
sudo true
|
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
rm -f system-images/**/*.tar
|
|
||||||
rm -rf system-images/compat/target
|
|
||||||
rm -rf core/target
|
rm -rf core/target
|
||||||
|
rm -rf core/bindings
|
||||||
rm -rf web/.angular
|
rm -rf web/.angular
|
||||||
rm -f web/config.json
|
rm -f web/config.json
|
||||||
rm -rf web/node_modules
|
rm -rf web/node_modules
|
||||||
@@ -69,157 +80,280 @@ clean:
|
|||||||
rm -rf patch-db/client/node_modules
|
rm -rf patch-db/client/node_modules
|
||||||
rm -rf patch-db/client/dist
|
rm -rf patch-db/client/dist
|
||||||
rm -rf patch-db/target
|
rm -rf patch-db/target
|
||||||
rm -rf cargo-deps
|
rm -rf target
|
||||||
rm -rf dpkg-workdir
|
rm -rf dpkg-workdir
|
||||||
rm -rf image-recipe/deb
|
rm -rf image-recipe/deb
|
||||||
rm -rf results
|
rm -rf results
|
||||||
rm -rf build/lib/firmware
|
rm -rf build/lib/firmware
|
||||||
rm -f ENVIRONMENT.txt
|
rm -rf container-runtime/dist
|
||||||
rm -f PLATFORM.txt
|
rm -rf container-runtime/node_modules
|
||||||
rm -f GIT_HASH.txt
|
rm -f container-runtime/*.squashfs
|
||||||
rm -f VERSION.txt
|
(cd sdk && make clean)
|
||||||
|
rm -f env/*.txt
|
||||||
|
|
||||||
format:
|
format:
|
||||||
cd core && cargo +nightly fmt
|
cd core && cargo +nightly fmt
|
||||||
|
|
||||||
test: $(CORE_SRC) $(ENVIRONMENT_FILE)
|
test: | test-core test-sdk test-container-runtime
|
||||||
cd core && cargo build && cargo test
|
|
||||||
|
|
||||||
sdk:
|
test-core: $(CORE_SRC) $(ENVIRONMENT_FILE)
|
||||||
cd core && ./install-sdk.sh
|
./core/run-tests.sh
|
||||||
|
|
||||||
|
test-sdk: $(call ls-files, sdk) sdk/base/lib/osBindings/index.ts
|
||||||
|
cd sdk && make test
|
||||||
|
|
||||||
|
test-container-runtime: container-runtime/node_modules/.package-lock.json $(call ls-files, container-runtime/src) container-runtime/package.json container-runtime/tsconfig.json
|
||||||
|
cd container-runtime && npm test
|
||||||
|
|
||||||
|
install-cli: $(GIT_HASH_FILE)
|
||||||
|
./core/build/build-cli.sh --install
|
||||||
|
|
||||||
|
cli: $(GIT_HASH_FILE)
|
||||||
|
./core/build/build-cli.sh
|
||||||
|
|
||||||
|
registry: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/registrybox
|
||||||
|
|
||||||
|
install-registry: $(REGISTRY_TARGETS)
|
||||||
|
$(call mkdir,$(DESTDIR)/usr/bin)
|
||||||
|
$(call cp,core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/registrybox,$(DESTDIR)/usr/bin/start-registrybox)
|
||||||
|
$(call ln,/usr/bin/start-registrybox,$(DESTDIR)/usr/bin/start-registryd)
|
||||||
|
$(call ln,/usr/bin/start-registrybox,$(DESTDIR)/usr/bin/start-registry)
|
||||||
|
|
||||||
|
$(call mkdir,$(DESTDIR)/lib/systemd/system)
|
||||||
|
$(call cp,core/start-registryd.service,$(DESTDIR)/lib/systemd/system/start-registryd.service)
|
||||||
|
|
||||||
|
core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/registrybox: $(CORE_SRC) $(ENVIRONMENT_FILE)
|
||||||
|
ARCH=$(ARCH) PROFILE=$(PROFILE) ./core/build/build-registrybox.sh
|
||||||
|
|
||||||
|
tunnel: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox
|
||||||
|
|
||||||
|
install-tunnel: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox core/start-tunneld.service
|
||||||
|
$(call mkdir,$(DESTDIR)/usr/bin)
|
||||||
|
$(call cp,core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox,$(DESTDIR)/usr/bin/start-tunnelbox)
|
||||||
|
$(call ln,/usr/bin/start-tunnelbox,$(DESTDIR)/usr/bin/start-tunneld)
|
||||||
|
$(call ln,/usr/bin/start-tunnelbox,$(DESTDIR)/usr/bin/start-tunnel)
|
||||||
|
|
||||||
|
$(call mkdir,$(DESTDIR)/lib/systemd/system)
|
||||||
|
$(call cp,core/start-tunneld.service,$(DESTDIR)/lib/systemd/system/start-tunneld.service)
|
||||||
|
|
||||||
|
$(call mkdir,$(DESTDIR)/usr/lib/startos/scripts)
|
||||||
|
$(call cp,build/lib/scripts/forward-port,$(DESTDIR)/usr/lib/startos/scripts/forward-port)
|
||||||
|
|
||||||
|
core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox: $(CORE_SRC) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) web/dist/static/start-tunnel/index.html
|
||||||
|
ARCH=$(ARCH) PROFILE=$(PROFILE) ./core/build/build-tunnelbox.sh
|
||||||
|
|
||||||
deb: results/$(BASENAME).deb
|
deb: results/$(BASENAME).deb
|
||||||
|
|
||||||
debian/control: build/lib/depends build/lib/conflicts
|
results/$(BASENAME).deb: debian/dpkg-build.sh $(call ls-files,debian/startos) $(STARTOS_TARGETS)
|
||||||
./debuild/control.sh
|
PLATFORM=$(PLATFORM) REQUIRES=debian ./build/os-compat/run-compat.sh ./debian/dpkg-build.sh
|
||||||
|
|
||||||
results/$(BASENAME).deb: dpkg-build.sh $(DEBIAN_SRC) $(VERSION_FILE) $(PLATFORM_FILE) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE)
|
registry-deb: results/$(REGISTRY_BASENAME).deb
|
||||||
PLATFORM=$(PLATFORM) ./dpkg-build.sh
|
|
||||||
|
results/$(REGISTRY_BASENAME).deb: debian/dpkg-build.sh $(call ls-files,debian/start-registry) $(REGISTRY_TARGETS)
|
||||||
|
PROJECT=start-registry PLATFORM=$(ARCH) REQUIRES=debian ./build/os-compat/run-compat.sh ./debian/dpkg-build.sh
|
||||||
|
|
||||||
|
tunnel-deb: results/$(TUNNEL_BASENAME).deb
|
||||||
|
|
||||||
|
results/$(TUNNEL_BASENAME).deb: debian/dpkg-build.sh $(call ls-files,debian/start-tunnel) $(TUNNEL_TARGETS) build/lib/scripts/forward-port
|
||||||
|
PROJECT=start-tunnel PLATFORM=$(ARCH) REQUIRES=debian DEPENDS=wireguard-tools,iptables,conntrack ./build/os-compat/run-compat.sh ./debian/dpkg-build.sh
|
||||||
|
|
||||||
$(IMAGE_TYPE): results/$(BASENAME).$(IMAGE_TYPE)
|
$(IMAGE_TYPE): results/$(BASENAME).$(IMAGE_TYPE)
|
||||||
|
|
||||||
squashfs: results/$(BASENAME).squashfs
|
squashfs: results/$(BASENAME).squashfs
|
||||||
|
|
||||||
results/$(BASENAME).$(IMAGE_TYPE) results/$(BASENAME).squashfs: $(IMAGE_RECIPE_SRC) results/$(BASENAME).deb
|
results/$(BASENAME).$(IMAGE_TYPE) results/$(BASENAME).squashfs: $(IMAGE_RECIPE_SRC) results/$(BASENAME).deb
|
||||||
./image-recipe/run-local-build.sh "results/$(BASENAME).deb"
|
ARCH=$(ARCH) ./build/image-recipe/run-local-build.sh "results/$(BASENAME).deb"
|
||||||
|
|
||||||
# For creating os images. DO NOT USE
|
# For creating os images. DO NOT USE
|
||||||
install: $(ALL_TARGETS)
|
install: $(STARTOS_TARGETS)
|
||||||
$(call mkdir,$(DESTDIR)/usr/bin)
|
$(call mkdir,$(DESTDIR)/usr/bin)
|
||||||
$(call cp,core/target/$(ARCH)-unknown-linux-gnu/release/startbox,$(DESTDIR)/usr/bin/startbox)
|
$(call mkdir,$(DESTDIR)/usr/sbin)
|
||||||
|
$(call cp,core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox,$(DESTDIR)/usr/bin/startbox)
|
||||||
$(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/startd)
|
$(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/startd)
|
||||||
$(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/start-cli)
|
$(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/start-cli)
|
||||||
$(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/start-sdk)
|
if [ "$(PLATFORM)" = "raspberrypi" ]; then $(call cp,target/aarch64-unknown-linux-musl/release/pi-beep,$(DESTDIR)/usr/bin/pi-beep); fi
|
||||||
$(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/start-deno)
|
if /bin/bash -c '[[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]'; then \
|
||||||
$(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/avahi-alias)
|
$(call cp,target/$(RUST_ARCH)-unknown-linux-musl/release/flamegraph,$(DESTDIR)/usr/bin/flamegraph); \
|
||||||
$(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/embassy-cli)
|
fi
|
||||||
if [ "$(PLATFORM)" = "raspberrypi" ]; then $(call cp,cargo-deps/aarch64-unknown-linux-gnu/release/pi-beep,$(DESTDIR)/usr/bin/pi-beep); fi
|
if /bin/bash -c '[[ "${ENVIRONMENT}" =~ (^|-)console($$|-) ]]'; then \
|
||||||
if /bin/bash -c '[[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]'; then $(call cp,cargo-deps/$(ARCH)-unknown-linux-gnu/release/tokio-console,$(DESTDIR)/usr/bin/tokio-console); fi
|
$(call cp,target/$(RUST_ARCH)-unknown-linux-musl/release/tokio-console,$(DESTDIR)/usr/bin/tokio-console); \
|
||||||
|
fi
|
||||||
|
$(call cp,target/$(RUST_ARCH)-unknown-linux-musl/release/startos-backup-fs,$(DESTDIR)/usr/bin/startos-backup-fs)
|
||||||
|
$(call ln,/usr/bin/startos-backup-fs,$(DESTDIR)/usr/sbin/mount.backup-fs)
|
||||||
|
|
||||||
$(call mkdir,$(DESTDIR)/lib/systemd/system)
|
$(call mkdir,$(DESTDIR)/lib/systemd/system)
|
||||||
$(call cp,core/startos/startd.service,$(DESTDIR)/lib/systemd/system/startd.service)
|
$(call cp,core/startd.service,$(DESTDIR)/lib/systemd/system/startd.service)
|
||||||
|
|
||||||
$(call mkdir,$(DESTDIR)/usr/lib)
|
$(call mkdir,$(DESTDIR)/usr/lib)
|
||||||
$(call rm,$(DESTDIR)/usr/lib/startos)
|
$(call rm,$(DESTDIR)/usr/lib/startos)
|
||||||
$(call cp,build/lib,$(DESTDIR)/usr/lib/startos)
|
$(call cp,build/lib,$(DESTDIR)/usr/lib/startos)
|
||||||
|
$(call mkdir,$(DESTDIR)/usr/lib/startos/container-runtime)
|
||||||
|
$(call cp,container-runtime/rootfs.$(ARCH).squashfs,$(DESTDIR)/usr/lib/startos/container-runtime/rootfs.squashfs)
|
||||||
|
|
||||||
$(call cp,PLATFORM.txt,$(DESTDIR)/usr/lib/startos/PLATFORM.txt)
|
$(call cp,build/env/PLATFORM.txt,$(DESTDIR)/usr/lib/startos/PLATFORM.txt)
|
||||||
$(call cp,ENVIRONMENT.txt,$(DESTDIR)/usr/lib/startos/ENVIRONMENT.txt)
|
$(call cp,build/env/ENVIRONMENT.txt,$(DESTDIR)/usr/lib/startos/ENVIRONMENT.txt)
|
||||||
$(call cp,GIT_HASH.txt,$(DESTDIR)/usr/lib/startos/GIT_HASH.txt)
|
$(call cp,build/env/GIT_HASH.txt,$(DESTDIR)/usr/lib/startos/GIT_HASH.txt)
|
||||||
$(call cp,VERSION.txt,$(DESTDIR)/usr/lib/startos/VERSION.txt)
|
$(call cp,build/env/VERSION.txt,$(DESTDIR)/usr/lib/startos/VERSION.txt)
|
||||||
|
|
||||||
$(call mkdir,$(DESTDIR)/usr/lib/startos/container)
|
update-overlay: $(STARTOS_TARGETS)
|
||||||
$(call cp,core/target/aarch64-unknown-linux-musl/release/container-init,$(DESTDIR)/usr/lib/startos/container/container-init.arm64)
|
|
||||||
$(call cp,core/target/x86_64-unknown-linux-musl/release/container-init,$(DESTDIR)/usr/lib/startos/container/container-init.amd64)
|
|
||||||
|
|
||||||
$(call mkdir,$(DESTDIR)/usr/lib/startos/system-images)
|
|
||||||
$(call cp,system-images/compat/docker-images/$(ARCH).tar,$(DESTDIR)/usr/lib/startos/system-images/compat.tar)
|
|
||||||
$(call cp,system-images/utils/docker-images/$(ARCH).tar,$(DESTDIR)/usr/lib/startos/system-images/utils.tar)
|
|
||||||
$(call cp,system-images/binfmt/docker-images/$(ARCH).tar,$(DESTDIR)/usr/lib/startos/system-images/binfmt.tar)
|
|
||||||
|
|
||||||
$(call cp,firmware/$(PLATFORM),$(DESTDIR)/usr/lib/startos/firmware)
|
|
||||||
|
|
||||||
update-overlay: $(ALL_TARGETS)
|
|
||||||
@echo "\033[33m!!! THIS WILL ONLY REFLASH YOUR DEVICE IN MEMORY !!!\033[0m"
|
@echo "\033[33m!!! THIS WILL ONLY REFLASH YOUR DEVICE IN MEMORY !!!\033[0m"
|
||||||
@echo "\033[33mALL CHANGES WILL BE REVERTED IF YOU RESTART THE DEVICE\033[0m"
|
@echo "\033[33mALL CHANGES WILL BE REVERTED IF YOU RESTART THE DEVICE\033[0m"
|
||||||
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
|
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
|
||||||
@if [ "`ssh $(REMOTE) 'cat /usr/lib/startos/VERSION.txt'`" != "`cat ./VERSION.txt`" ]; then >&2 echo "StartOS requires migrations: update-overlay is unavailable." && false; fi
|
@if [ "`ssh $(REMOTE) 'cat /usr/lib/startos/VERSION.txt'`" != "`cat $(VERSION_FILE)`" ]; then >&2 echo "StartOS requires migrations: update-overlay is unavailable." && false; fi
|
||||||
$(call ssh,"sudo systemctl stop startd")
|
$(call ssh,"sudo systemctl stop startd")
|
||||||
$(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) PLATFORM=$(PLATFORM)
|
$(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) PLATFORM=$(PLATFORM)
|
||||||
$(call ssh,"sudo systemctl start startd")
|
$(call ssh,"sudo systemctl start startd")
|
||||||
|
|
||||||
wormhole: core/target/$(ARCH)-unknown-linux-gnu/release/startbox
|
wormhole: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox
|
||||||
@wormhole send core/target/$(ARCH)-unknown-linux-gnu/release/startbox 2>&1 | awk -Winteractive '/wormhole receive/ { printf "sudo /usr/lib/startos/scripts/chroot-and-upgrade \"cd /usr/bin && rm startbox && wormhole receive --accept-file %s && chmod +x startbox\"\n", $$3 }'
|
@echo "Paste the following command into the shell of your StartOS server:"
|
||||||
|
@echo
|
||||||
|
@wormhole send core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox 2>&1 | awk -Winteractive '/wormhole receive/ { printf "sudo /usr/lib/startos/scripts/chroot-and-upgrade \"cd /usr/bin && rm startbox && wormhole receive --accept-file %s && chmod +x startbox\"\n", $$3 }'
|
||||||
|
|
||||||
update: $(ALL_TARGETS)
|
wormhole-deb: results/$(BASENAME).deb
|
||||||
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
|
@echo "Paste the following command into the shell of your StartOS server:"
|
||||||
$(call ssh,"sudo rsync -a --delete --force --info=progress2 /media/embassy/embassyfs/current/ /media/embassy/next/")
|
@echo
|
||||||
$(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) DESTDIR=/media/embassy/next PLATFORM=$(PLATFORM)
|
@wormhole send results/$(BASENAME).deb 2>&1 | awk -Winteractive '/wormhole receive/ { printf "sudo /usr/lib/startos/scripts/chroot-and-upgrade '"'"'cd $$(mktemp -d) && wormhole receive --accept-file %s && apt-get install -y --reinstall ./$(BASENAME).deb'"'"'\n", $$3 }'
|
||||||
$(call ssh,'sudo NO_SYNC=1 /media/embassy/next/usr/lib/startos/scripts/chroot-and-upgrade "apt-get install -y $(shell cat ./build/lib/depends)"')
|
|
||||||
|
|
||||||
emulate-reflash: $(ALL_TARGETS)
|
wormhole-squashfs: results/$(BASENAME).squashfs
|
||||||
|
$(eval SQFS_SUM := $(shell b3sum results/$(BASENAME).squashfs | head -c 32))
|
||||||
|
$(eval SQFS_SIZE := $(shell du -s --bytes results/$(BASENAME).squashfs | awk '{print $$1}'))
|
||||||
|
@echo "Paste the following command into the shell of your StartOS server:"
|
||||||
|
@echo
|
||||||
|
@wormhole send results/$(BASENAME).squashfs 2>&1 | awk -Winteractive '/wormhole receive/ { printf "sudo sh -c '"'"'/usr/lib/startos/scripts/prune-images $(SQFS_SIZE) && /usr/lib/startos/scripts/prune-boot && cd /media/startos/images && wormhole receive --accept-file %s && CHECKSUM=$(SQFS_SUM) /usr/lib/startos/scripts/upgrade ./$(BASENAME).squashfs'"'"'\n", $$3 }'
|
||||||
|
|
||||||
|
update: $(STARTOS_TARGETS)
|
||||||
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
|
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
|
||||||
$(call ssh,"sudo rsync -a --delete --force --info=progress2 /media/embassy/embassyfs/current/ /media/embassy/next/")
|
$(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create')
|
||||||
$(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) DESTDIR=/media/embassy/next PLATFORM=$(PLATFORM)
|
$(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) DESTDIR=/media/startos/next PLATFORM=$(PLATFORM)
|
||||||
$(call ssh,"sudo touch /media/embassy/config/upgrade && sudo rm -f /media/embassy/config/disk.guid && sudo sync && sudo reboot")
|
$(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync "apt-get install -y $(shell cat ./build/lib/depends)"')
|
||||||
|
|
||||||
|
update-startbox: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox # only update binary (faster than full update)
|
||||||
|
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
|
||||||
|
$(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create')
|
||||||
|
$(call cp,core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox,/media/startos/next/usr/bin/startbox)
|
||||||
|
$(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync true')
|
||||||
|
|
||||||
|
update-deb: results/$(BASENAME).deb # better than update, but only available from debian
|
||||||
|
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
|
||||||
|
$(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create')
|
||||||
|
$(call mkdir,/media/startos/next/tmp/startos-deb)
|
||||||
|
$(call cp,results/$(BASENAME).deb,/media/startos/next/tmp/startos-deb/$(BASENAME).deb)
|
||||||
|
$(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync "apt-get install -y --reinstall /tmp/startos-deb/$(BASENAME).deb"')
|
||||||
|
|
||||||
|
update-squashfs: results/$(BASENAME).squashfs
|
||||||
|
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
|
||||||
|
$(eval SQFS_SUM := $(shell b3sum results/$(BASENAME).squashfs))
|
||||||
|
$(eval SQFS_SIZE := $(shell du -s --bytes results/$(BASENAME).squashfs | awk '{print $$1}'))
|
||||||
|
$(call ssh,'/usr/lib/startos/scripts/prune-images $(SQFS_SIZE)')
|
||||||
|
$(call ssh,'/usr/lib/startos/scripts/prune-boot')
|
||||||
|
$(call cp,results/$(BASENAME).squashfs,/media/startos/images/next.rootfs)
|
||||||
|
$(call ssh,'sudo CHECKSUM=$(SQFS_SUM) /usr/lib/startos/scripts/upgrade /media/startos/images/next.rootfs')
|
||||||
|
|
||||||
|
emulate-reflash: $(STARTOS_TARGETS)
|
||||||
|
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
|
||||||
|
$(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create')
|
||||||
|
$(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) DESTDIR=/media/startos/next PLATFORM=$(PLATFORM)
|
||||||
|
$(call ssh,'sudo rm -f /media/startos/config/disk.guid /media/startos/config/overlay/etc/hostname')
|
||||||
|
$(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync "apt-get install -y $(shell cat ./build/lib/depends)"')
|
||||||
|
|
||||||
upload-ota: results/$(BASENAME).squashfs
|
upload-ota: results/$(BASENAME).squashfs
|
||||||
TARGET=$(TARGET) KEY=$(KEY) ./upload-ota.sh
|
TARGET=$(TARGET) KEY=$(KEY) ./build/upload-ota.sh
|
||||||
|
|
||||||
build/lib/depends build/lib/conflicts: build/dpkg-deps/*
|
container-runtime/debian.$(ARCH).squashfs: ./container-runtime/download-base-image.sh
|
||||||
build/dpkg-deps/generate.sh
|
ARCH=$(ARCH) ./container-runtime/download-base-image.sh
|
||||||
|
|
||||||
$(FIRMWARE_ROMS): build/lib/firmware.json download-firmware.sh $(PLATFORM_FILE)
|
container-runtime/package-lock.json: sdk/dist/package.json
|
||||||
./download-firmware.sh $(PLATFORM)
|
npm --prefix container-runtime i
|
||||||
|
touch container-runtime/package-lock.json
|
||||||
|
|
||||||
system-images/compat/docker-images/$(ARCH).tar: $(COMPAT_SRC) core/Cargo.lock
|
container-runtime/node_modules/.package-lock.json: container-runtime/package-lock.json
|
||||||
cd system-images/compat && make docker-images/$(ARCH).tar && touch docker-images/$(ARCH).tar
|
npm --prefix container-runtime ci
|
||||||
|
touch container-runtime/node_modules/.package-lock.json
|
||||||
|
|
||||||
system-images/utils/docker-images/$(ARCH).tar: $(UTILS_SRC)
|
ts-bindings: core/bindings/index.ts
|
||||||
cd system-images/utils && make docker-images/$(ARCH).tar && touch docker-images/$(ARCH).tar
|
mkdir -p sdk/base/lib/osBindings
|
||||||
|
rsync -ac --delete core/bindings/ sdk/base/lib/osBindings/
|
||||||
|
|
||||||
system-images/binfmt/docker-images/$(ARCH).tar: $(BINFMT_SRC)
|
core/bindings/index.ts: $(call ls-files, core) $(ENVIRONMENT_FILE)
|
||||||
cd system-images/binfmt && make docker-images/$(ARCH).tar && touch docker-images/$(ARCH).tar
|
rm -rf core/bindings
|
||||||
|
./core/build/build-ts.sh
|
||||||
|
ls core/bindings/*.ts | sed 's/core\/bindings\/\([^.]*\)\.ts/export { \1 } from ".\/\1";/g' | grep -v '"./index"' | tee core/bindings/index.ts
|
||||||
|
npm --prefix sdk exec -- prettier --config ./sdk/base/package.json -w ./core/bindings/*.ts
|
||||||
|
touch core/bindings/index.ts
|
||||||
|
|
||||||
snapshots: core/snapshot-creator/Cargo.toml
|
sdk/dist/package.json sdk/baseDist/package.json: $(call ls-files, sdk) sdk/base/lib/osBindings/index.ts
|
||||||
cd core/ && ARCH=aarch64 ./build-v8-snapshot.sh
|
(cd sdk && make bundle)
|
||||||
cd core/ && ARCH=x86_64 ./build-v8-snapshot.sh
|
touch sdk/dist/package.json
|
||||||
|
touch sdk/baseDist/package.json
|
||||||
|
|
||||||
$(BINS): $(CORE_SRC) $(ENVIRONMENT_FILE)
|
# TODO: make container-runtime its own makefile?
|
||||||
cd core && ARCH=$(ARCH) ./build-prod.sh
|
container-runtime/dist/index.js: container-runtime/node_modules/.package-lock.json $(call ls-files, container-runtime/src) container-runtime/package.json container-runtime/tsconfig.json
|
||||||
touch $(BINS)
|
npm --prefix container-runtime run build
|
||||||
|
|
||||||
web/node_modules: web/package.json
|
container-runtime/dist/node_modules/.package-lock.json container-runtime/dist/package.json container-runtime/dist/package-lock.json: container-runtime/package.json container-runtime/package-lock.json sdk/dist/package.json container-runtime/install-dist-deps.sh
|
||||||
|
./container-runtime/install-dist-deps.sh
|
||||||
|
touch container-runtime/dist/node_modules/.package-lock.json
|
||||||
|
|
||||||
|
container-runtime/rootfs.$(ARCH).squashfs: container-runtime/debian.$(ARCH).squashfs container-runtime/container-runtime.service container-runtime/update-image.sh container-runtime/update-image-local.sh container-runtime/deb-install.sh container-runtime/dist/index.js container-runtime/dist/node_modules/.package-lock.json core/target/$(RUST_ARCH)-unknown-linux-musl/release/start-container
|
||||||
|
ARCH=$(ARCH) ./container-runtime/update-image-local.sh
|
||||||
|
|
||||||
|
build/lib/depends build/lib/conflicts: $(ENVIRONMENT_FILE) $(PLATFORM_FILE) $(shell ls build/dpkg-deps/*)
|
||||||
|
PLATFORM=$(PLATFORM) ARCH=$(ARCH) build/dpkg-deps/generate.sh
|
||||||
|
|
||||||
|
$(FIRMWARE_ROMS): build/lib/firmware.json ./build/download-firmware.sh $(PLATFORM_FILE)
|
||||||
|
./build/download-firmware.sh $(PLATFORM)
|
||||||
|
|
||||||
|
core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox: $(CORE_SRC) $(COMPRESSED_WEB_UIS) web/patchdb-ui-seed.json $(ENVIRONMENT_FILE)
|
||||||
|
ARCH=$(ARCH) PROFILE=$(PROFILE) ./core/build/build-startbox.sh
|
||||||
|
touch core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox
|
||||||
|
|
||||||
|
core/target/$(RUST_ARCH)-unknown-linux-musl/release/start-container: $(CORE_SRC) $(ENVIRONMENT_FILE)
|
||||||
|
ARCH=$(ARCH) ./core/build/build-start-container.sh
|
||||||
|
touch core/target/$(RUST_ARCH)-unknown-linux-musl/release/start-container
|
||||||
|
|
||||||
|
web/package-lock.json: web/package.json sdk/baseDist/package.json
|
||||||
|
npm --prefix web i
|
||||||
|
touch web/package-lock.json
|
||||||
|
|
||||||
|
web/node_modules/.package-lock.json: web/package-lock.json
|
||||||
npm --prefix web ci
|
npm --prefix web ci
|
||||||
|
touch web/node_modules/.package-lock.json
|
||||||
|
|
||||||
web/dist/raw/ui: $(WEB_UI_SRC) $(WEB_SHARED_SRC)
|
web/.angular/.updated: patch-db/client/dist/index.js sdk/baseDist/package.json web/node_modules/.package-lock.json
|
||||||
|
rm -rf web/.angular
|
||||||
|
mkdir -p web/.angular
|
||||||
|
touch web/.angular/.updated
|
||||||
|
|
||||||
|
web/.i18n-checked: $(WEB_SHARED_SRC) $(WEB_UI_SRC) $(WEB_SETUP_WIZARD_SRC) $(WEB_START_TUNNEL_SRC)
|
||||||
|
npm --prefix web run check:i18n
|
||||||
|
touch web/.i18n-checked
|
||||||
|
|
||||||
|
web/dist/raw/ui/index.html: $(WEB_UI_SRC) $(WEB_SHARED_SRC) web/.angular/.updated web/.i18n-checked
|
||||||
npm --prefix web run build:ui
|
npm --prefix web run build:ui
|
||||||
|
touch web/dist/raw/ui/index.html
|
||||||
|
|
||||||
web/dist/raw/setup-wizard: $(WEB_SETUP_WIZARD_SRC) $(WEB_SHARED_SRC)
|
web/dist/raw/setup-wizard/index.html: $(WEB_SETUP_WIZARD_SRC) $(WEB_SHARED_SRC) web/.angular/.updated web/.i18n-checked
|
||||||
npm --prefix web run build:setup
|
npm --prefix web run build:setup
|
||||||
|
touch web/dist/raw/setup-wizard/index.html
|
||||||
|
|
||||||
web/dist/raw/diagnostic-ui: $(WEB_DIAGNOSTIC_UI_SRC) $(WEB_SHARED_SRC)
|
web/dist/raw/start-tunnel/index.html: $(WEB_START_TUNNEL_SRC) $(WEB_SHARED_SRC) web/.angular/.updated web/.i18n-checked
|
||||||
npm --prefix web run build:dui
|
npm --prefix web run build:tunnel
|
||||||
|
touch web/dist/raw/start-tunnel/index.html
|
||||||
|
|
||||||
web/dist/raw/install-wizard: $(WEB_INSTALL_WIZARD_SRC) $(WEB_SHARED_SRC)
|
web/dist/static/%/index.html: web/dist/raw/%/index.html
|
||||||
npm --prefix web run build:install-wiz
|
./web/compress-uis.sh $*
|
||||||
|
|
||||||
web/dist/static: $(WEB_UIS) $(ENVIRONMENT_FILE)
|
web/config.json: $(GIT_HASH_FILE) $(ENVIRONMENT_FILE) web/config-sample.json web/update-config.sh
|
||||||
./compress-uis.sh
|
./web/update-config.sh
|
||||||
|
|
||||||
web/config.json: $(GIT_HASH_FILE) web/config-sample.json
|
patch-db/client/node_modules/.package-lock.json: patch-db/client/package.json
|
||||||
jq '.useMocks = false' web/config-sample.json | jq '.gitHash = "$(shell cat GIT_HASH.txt)"' > web/config.json
|
|
||||||
|
|
||||||
web/patchdb-ui-seed.json: web/package.json
|
|
||||||
jq '."ack-welcome" = $(shell jq '.version' web/package.json)' web/patchdb-ui-seed.json > ui-seed.tmp
|
|
||||||
mv ui-seed.tmp web/patchdb-ui-seed.json
|
|
||||||
|
|
||||||
patch-db/client/node_modules: patch-db/client/package.json
|
|
||||||
npm --prefix patch-db/client ci
|
npm --prefix patch-db/client ci
|
||||||
|
touch patch-db/client/node_modules/.package-lock.json
|
||||||
|
|
||||||
patch-db/client/dist: $(PATCH_DB_CLIENT_SRC) patch-db/client/node_modules
|
patch-db/client/dist/index.js: $(PATCH_DB_CLIENT_SRC) patch-db/client/node_modules/.package-lock.json
|
||||||
! test -d patch-db/client/dist || rm -rf patch-db/client/dist
|
rm -rf patch-db/client/dist
|
||||||
npm --prefix web run build:deps
|
npm --prefix patch-db/client run build
|
||||||
|
touch patch-db/client/dist/index.js
|
||||||
|
|
||||||
# used by github actions
|
# used by github actions
|
||||||
compiled-$(ARCH).tar: $(COMPILED_TARGETS) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) $(VERSION_FILE)
|
compiled-$(ARCH).tar: $(COMPILED_TARGETS) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) $(VERSION_FILE)
|
||||||
@@ -231,8 +365,17 @@ uis: $(WEB_UIS)
|
|||||||
# this is a convenience step to build the UI
|
# this is a convenience step to build the UI
|
||||||
ui: web/dist/raw/ui
|
ui: web/dist/raw/ui
|
||||||
|
|
||||||
cargo-deps/aarch64-unknown-linux-gnu/release/pi-beep:
|
target/aarch64-unknown-linux-musl/release/pi-beep: ./build/build-cargo-dep.sh
|
||||||
ARCH=aarch64 ./build-cargo-dep.sh pi-beep
|
ARCH=aarch64 ./build/build-cargo-dep.sh pi-beep
|
||||||
|
|
||||||
cargo-deps/$(ARCH)-unknown-linux-gnu/release/tokio-console:
|
target/$(RUST_ARCH)-unknown-linux-musl/release/tokio-console: ./build/build-cargo-dep.sh
|
||||||
ARCH=$(ARCH) ./build-cargo-dep.sh tokio-console
|
ARCH=$(ARCH) ./build/build-cargo-dep.sh tokio-console
|
||||||
|
touch $@
|
||||||
|
|
||||||
|
target/$(RUST_ARCH)-unknown-linux-musl/release/startos-backup-fs: ./build/build-cargo-dep.sh
|
||||||
|
ARCH=$(ARCH) ./build/build-cargo-dep.sh --git https://github.com/Start9Labs/start-fs.git startos-backup-fs
|
||||||
|
touch $@
|
||||||
|
|
||||||
|
target/$(RUST_ARCH)-unknown-linux-musl/release/flamegraph: ./build/build-cargo-dep.sh
|
||||||
|
ARCH=$(ARCH) ./build/build-cargo-dep.sh flamegraph
|
||||||
|
touch $@
|
||||||
|
|||||||
9
agents/TODO.md
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
# AI Agent TODOs
|
||||||
|
|
||||||
|
Pending tasks for AI agents. Remove items when completed.
|
||||||
|
|
||||||
|
## Unreviewed CLAUDE.md Sections
|
||||||
|
|
||||||
|
- [ ] Architecture - Web (`/web`) - @MattDHill
|
||||||
|
|
||||||
|
|
||||||
201
agents/VERSION_BUMP.md
Normal file
@@ -0,0 +1,201 @@
|
|||||||
|
# StartOS Version Bump Guide
|
||||||
|
|
||||||
|
This document explains how to bump the StartOS version across the entire codebase.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
When bumping from version `X.Y.Z-alpha.N` to `X.Y.Z-alpha.N+1`, you need to update files in multiple locations across the repository. The `// VERSION_BUMP` comment markers indicate where changes are needed.
|
||||||
|
|
||||||
|
## Files to Update
|
||||||
|
|
||||||
|
### 1. Core Rust Crate Version
|
||||||
|
|
||||||
|
**File: `core/Cargo.toml`**
|
||||||
|
|
||||||
|
Update the version string (line ~18):
|
||||||
|
|
||||||
|
```toml
|
||||||
|
version = "0.4.0-alpha.15" # VERSION_BUMP
|
||||||
|
```
|
||||||
|
|
||||||
|
**File: `core/Cargo.lock`**
|
||||||
|
|
||||||
|
This file is auto-generated. After updating `Cargo.toml`, run:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cd core
|
||||||
|
cargo check
|
||||||
|
```
|
||||||
|
|
||||||
|
This will update the version in `Cargo.lock` automatically.
|
||||||
|
|
||||||
|
### 2. Create New Version Migration Module
|
||||||
|
|
||||||
|
**File: `core/src/version/vX_Y_Z_alpha_N+1.rs`**
|
||||||
|
|
||||||
|
Create a new version file by copying the previous version and updating:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use exver::{PreReleaseSegment, VersionRange};
|
||||||
|
|
||||||
|
use super::v0_3_5::V0_3_0_COMPAT;
|
||||||
|
use super::{VersionT, v0_4_0_alpha_14}; // Update to previous version
|
||||||
|
use crate::prelude::*;
|
||||||
|
|
||||||
|
lazy_static::lazy_static! {
|
||||||
|
static ref V0_4_0_alpha_15: exver::Version = exver::Version::new(
|
||||||
|
[0, 4, 0],
|
||||||
|
[PreReleaseSegment::String("alpha".into()), 15.into()] // Update number
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy, Debug, Default)]
|
||||||
|
pub struct Version;
|
||||||
|
|
||||||
|
impl VersionT for Version {
|
||||||
|
type Previous = v0_4_0_alpha_14::Version; // Update to previous version
|
||||||
|
type PreUpRes = ();
|
||||||
|
|
||||||
|
async fn pre_up(self) -> Result<Self::PreUpRes, Error> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
fn semver(self) -> exver::Version {
|
||||||
|
V0_4_0_alpha_15.clone() // Update version name
|
||||||
|
}
|
||||||
|
fn compat(self) -> &'static VersionRange {
|
||||||
|
&V0_3_0_COMPAT
|
||||||
|
}
|
||||||
|
#[instrument(skip_all)]
|
||||||
|
fn up(self, _db: &mut Value, _: Self::PreUpRes) -> Result<Value, Error> {
|
||||||
|
// Add migration logic here if needed
|
||||||
|
Ok(Value::Null)
|
||||||
|
}
|
||||||
|
fn down(self, _db: &mut Value) -> Result<(), Error> {
|
||||||
|
// Add rollback logic here if needed
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3. Update Version Module Registry
|
||||||
|
|
||||||
|
**File: `core/src/version/mod.rs`**
|
||||||
|
|
||||||
|
Make changes in **5 locations**:
|
||||||
|
|
||||||
|
#### Location 1: Module Declaration (~line 57)
|
||||||
|
|
||||||
|
Add the new module after the previous version:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
mod v0_4_0_alpha_14;
|
||||||
|
mod v0_4_0_alpha_15; // Add this
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Location 2: Current Type Alias (~line 59)
|
||||||
|
|
||||||
|
Update the `Current` type and move the `// VERSION_BUMP` comment:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
pub type Current = v0_4_0_alpha_15::Version; // VERSION_BUMP
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Location 3: Version Enum (~line 175)
|
||||||
|
|
||||||
|
Remove `// VERSION_BUMP` from the previous version, add new variant, add comment:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
V0_4_0_alpha_14(Wrapper<v0_4_0_alpha_14::Version>),
|
||||||
|
V0_4_0_alpha_15(Wrapper<v0_4_0_alpha_15::Version>), // VERSION_BUMP
|
||||||
|
Other(exver::Version),
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Location 4: as_version_t() Match (~line 233)
|
||||||
|
|
||||||
|
Remove `// VERSION_BUMP`, add new match arm, add comment:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
Self::V0_4_0_alpha_14(v) => DynVersion(Box::new(v.0)),
|
||||||
|
Self::V0_4_0_alpha_15(v) => DynVersion(Box::new(v.0)), // VERSION_BUMP
|
||||||
|
Self::Other(v) => {
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Location 5: as_exver() Match (~line 284, inside #[cfg(test)])
|
||||||
|
|
||||||
|
Remove `// VERSION_BUMP`, add new match arm, add comment:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
Version::V0_4_0_alpha_14(Wrapper(x)) => x.semver(),
|
||||||
|
Version::V0_4_0_alpha_15(Wrapper(x)) => x.semver(), // VERSION_BUMP
|
||||||
|
Version::Other(x) => x.clone(),
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4. SDK TypeScript Version
|
||||||
|
|
||||||
|
**File: `sdk/package/lib/StartSdk.ts`**
|
||||||
|
|
||||||
|
Update the OSVersion constant (~line 64):
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
export const OSVersion = testTypeVersion("0.4.0-alpha.15");
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5. Web UI Package Version
|
||||||
|
|
||||||
|
**File: `web/package.json`**
|
||||||
|
|
||||||
|
Update the version field:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "startos-ui",
|
||||||
|
"version": "0.4.0-alpha.15",
|
||||||
|
...
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**File: `web/package-lock.json`**
|
||||||
|
|
||||||
|
This file is auto-generated, but it's faster to update manually. Find all instances of "startos-ui" and update the version field.
|
||||||
|
|
||||||
|
## Verification Step
|
||||||
|
|
||||||
|
```
|
||||||
|
make
|
||||||
|
```
|
||||||
|
|
||||||
|
## VERSION_BUMP Comment Pattern
|
||||||
|
|
||||||
|
The `// VERSION_BUMP` comment serves as a marker for where to make changes next time:
|
||||||
|
|
||||||
|
- Always **remove** it from the old location
|
||||||
|
- **Add** the new version entry
|
||||||
|
- **Move** the comment to mark the new location
|
||||||
|
|
||||||
|
This pattern helps you quickly find all the places that need updating in the next version bump.
|
||||||
|
|
||||||
|
## Summary Checklist
|
||||||
|
|
||||||
|
- [ ] Update `core/Cargo.toml` version
|
||||||
|
- [ ] Create new `core/src/version/vX_Y_Z_alpha_N+1.rs` file
|
||||||
|
- [ ] Update `core/src/version/mod.rs` in 5 locations
|
||||||
|
- [ ] Run `cargo check` to update `core/Cargo.lock`
|
||||||
|
- [ ] Update `sdk/package/lib/StartSdk.ts` OSVersion
|
||||||
|
- [ ] Update `web/package.json` and `web/package-lock.json` version
|
||||||
|
- [ ] Verify all changes compile/build successfully
|
||||||
|
|
||||||
|
## Migration Logic
|
||||||
|
|
||||||
|
The `up()` and `down()` methods in the version file handle database migrations:
|
||||||
|
|
||||||
|
- **up()**: Migrates the database from the previous version to this version
|
||||||
|
- **down()**: Rolls back from this version to the previous version
|
||||||
|
- **pre_up()**: Runs before migration, useful for pre-migration checks or data gathering
|
||||||
|
|
||||||
|
If no migration is needed, return `Ok(Value::Null)` for `up()` and `Ok(())` for `down()`.
|
||||||
|
|
||||||
|
For complex migrations, you may need to:
|
||||||
|
|
||||||
|
1. Update `type PreUpRes` to pass data between `pre_up()` and `up()`
|
||||||
|
2. Implement database transformations in the `up()` method
|
||||||
|
3. Implement reverse transformations in `down()` for rollback support
|
||||||
249
agents/core-rust-patterns.md
Normal file
@@ -0,0 +1,249 @@
|
|||||||
|
# Utilities & Patterns
|
||||||
|
|
||||||
|
This document covers common utilities and patterns used throughout the StartOS codebase.
|
||||||
|
|
||||||
|
## Util Module (`core/src/util/`)
|
||||||
|
|
||||||
|
The `util` module contains reusable utilities. Key submodules:
|
||||||
|
|
||||||
|
| Module | Purpose |
|
||||||
|
|--------|---------|
|
||||||
|
| `actor/` | Actor pattern implementation for concurrent state management |
|
||||||
|
| `collections/` | Custom collection types |
|
||||||
|
| `crypto.rs` | Cryptographic utilities (encryption, hashing) |
|
||||||
|
| `future.rs` | Future/async utilities |
|
||||||
|
| `io.rs` | File I/O helpers (create_file, canonicalize, etc.) |
|
||||||
|
| `iter.rs` | Iterator extensions |
|
||||||
|
| `net.rs` | Network utilities |
|
||||||
|
| `rpc.rs` | RPC helpers |
|
||||||
|
| `rpc_client.rs` | RPC client utilities |
|
||||||
|
| `serde.rs` | Serialization helpers (Base64, display/fromstr, etc.) |
|
||||||
|
| `sync.rs` | Synchronization primitives (SyncMutex, etc.) |
|
||||||
|
|
||||||
|
## Command Invocation (`Invoke` trait)
|
||||||
|
|
||||||
|
The `Invoke` trait provides a clean way to run external commands with error handling:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::util::Invoke;
|
||||||
|
|
||||||
|
// Simple invocation
|
||||||
|
tokio::process::Command::new("ls")
|
||||||
|
.arg("-la")
|
||||||
|
.invoke(ErrorKind::Filesystem)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// With timeout
|
||||||
|
tokio::process::Command::new("slow-command")
|
||||||
|
.timeout(Some(Duration::from_secs(30)))
|
||||||
|
.invoke(ErrorKind::Timeout)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// With input
|
||||||
|
let mut input = Cursor::new(b"input data");
|
||||||
|
tokio::process::Command::new("cat")
|
||||||
|
.input(Some(&mut input))
|
||||||
|
.invoke(ErrorKind::Filesystem)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Piped commands
|
||||||
|
tokio::process::Command::new("cat")
|
||||||
|
.arg("file.txt")
|
||||||
|
.pipe(&mut tokio::process::Command::new("grep").arg("pattern"))
|
||||||
|
.invoke(ErrorKind::Filesystem)
|
||||||
|
.await?;
|
||||||
|
```
|
||||||
|
|
||||||
|
## Guard Pattern
|
||||||
|
|
||||||
|
Guards ensure cleanup happens when they go out of scope.
|
||||||
|
|
||||||
|
### `GeneralGuard` / `GeneralBoxedGuard`
|
||||||
|
|
||||||
|
For arbitrary cleanup actions:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::util::GeneralGuard;
|
||||||
|
|
||||||
|
let guard = GeneralGuard::new(|| {
|
||||||
|
println!("Cleanup runs on drop");
|
||||||
|
});
|
||||||
|
|
||||||
|
// Do work...
|
||||||
|
|
||||||
|
// Explicit drop with action
|
||||||
|
guard.drop();
|
||||||
|
|
||||||
|
// Or skip the action
|
||||||
|
// guard.drop_without_action();
|
||||||
|
```
|
||||||
|
|
||||||
|
### `FileLock`
|
||||||
|
|
||||||
|
File-based locking with automatic unlock:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::util::FileLock;
|
||||||
|
|
||||||
|
let lock = FileLock::new("/path/to/lockfile", true).await?; // blocking=true
|
||||||
|
// Lock held until dropped or explicitly unlocked
|
||||||
|
lock.unlock().await?;
|
||||||
|
```
|
||||||
|
|
||||||
|
## Mount Guard Pattern (`core/src/disk/mount/guard.rs`)
|
||||||
|
|
||||||
|
RAII guards for filesystem mounts. Ensures filesystems are unmounted when guards are dropped.
|
||||||
|
|
||||||
|
### `MountGuard`
|
||||||
|
|
||||||
|
Basic mount guard:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::disk::mount::guard::MountGuard;
|
||||||
|
use crate::disk::mount::filesystem::{MountType, ReadOnly};
|
||||||
|
|
||||||
|
let guard = MountGuard::mount(&filesystem, "/mnt/target", ReadOnly).await?;
|
||||||
|
|
||||||
|
// Use the mounted filesystem at guard.path()
|
||||||
|
do_something(guard.path()).await?;
|
||||||
|
|
||||||
|
// Explicit unmount (or auto-unmounts on drop)
|
||||||
|
guard.unmount(false).await?; // false = don't delete mountpoint
|
||||||
|
```
|
||||||
|
|
||||||
|
### `TmpMountGuard`
|
||||||
|
|
||||||
|
Reference-counted temporary mount (mounts to `/media/startos/tmp/`):
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::disk::mount::guard::TmpMountGuard;
|
||||||
|
use crate::disk::mount::filesystem::ReadOnly;
|
||||||
|
|
||||||
|
// Multiple clones share the same mount
|
||||||
|
let guard1 = TmpMountGuard::mount(&filesystem, ReadOnly).await?;
|
||||||
|
let guard2 = guard1.clone();
|
||||||
|
|
||||||
|
// Mount stays alive while any guard exists
|
||||||
|
// Auto-unmounts when last guard is dropped
|
||||||
|
```
|
||||||
|
|
||||||
|
### `GenericMountGuard` trait
|
||||||
|
|
||||||
|
All mount guards implement this trait:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
pub trait GenericMountGuard: std::fmt::Debug + Send + Sync + 'static {
|
||||||
|
fn path(&self) -> &Path;
|
||||||
|
fn unmount(self) -> impl Future<Output = Result<(), Error>> + Send;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### `SubPath`
|
||||||
|
|
||||||
|
Wraps a mount guard to point to a subdirectory:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::disk::mount::guard::SubPath;
|
||||||
|
|
||||||
|
let mount = TmpMountGuard::mount(&filesystem, ReadOnly).await?;
|
||||||
|
let subdir = SubPath::new(mount, "data/subdir");
|
||||||
|
|
||||||
|
// subdir.path() returns the full path including subdirectory
|
||||||
|
```
|
||||||
|
|
||||||
|
## FileSystem Implementations (`core/src/disk/mount/filesystem/`)
|
||||||
|
|
||||||
|
Various filesystem types that can be mounted:
|
||||||
|
|
||||||
|
| Type | Description |
|
||||||
|
|------|-------------|
|
||||||
|
| `bind.rs` | Bind mounts |
|
||||||
|
| `block_dev.rs` | Block device mounts |
|
||||||
|
| `cifs.rs` | CIFS/SMB network shares |
|
||||||
|
| `ecryptfs.rs` | Encrypted filesystem |
|
||||||
|
| `efivarfs.rs` | EFI variables |
|
||||||
|
| `httpdirfs.rs` | HTTP directory as filesystem |
|
||||||
|
| `idmapped.rs` | ID-mapped mounts |
|
||||||
|
| `label.rs` | Mount by label |
|
||||||
|
| `loop_dev.rs` | Loop device mounts |
|
||||||
|
| `overlayfs.rs` | Overlay filesystem |
|
||||||
|
|
||||||
|
## Other Useful Utilities
|
||||||
|
|
||||||
|
### `Apply` / `ApplyRef` traits
|
||||||
|
|
||||||
|
Fluent method chaining:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::util::Apply;
|
||||||
|
|
||||||
|
let result = some_value
|
||||||
|
.apply(|v| transform(v))
|
||||||
|
.apply(|v| another_transform(v));
|
||||||
|
```
|
||||||
|
|
||||||
|
### `Container<T>`
|
||||||
|
|
||||||
|
Async-safe optional container:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::util::Container;
|
||||||
|
|
||||||
|
let container = Container::new(None);
|
||||||
|
container.set(value).await;
|
||||||
|
let taken = container.take().await;
|
||||||
|
```
|
||||||
|
|
||||||
|
### `HashWriter<H, W>`
|
||||||
|
|
||||||
|
Write data while computing hash:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::util::HashWriter;
|
||||||
|
use sha2::Sha256;
|
||||||
|
|
||||||
|
let writer = HashWriter::new(Sha256::new(), file);
|
||||||
|
// Write data...
|
||||||
|
let (hasher, file) = writer.finish();
|
||||||
|
let hash = hasher.finalize();
|
||||||
|
```
|
||||||
|
|
||||||
|
### `Never` type
|
||||||
|
|
||||||
|
Uninhabited type for impossible cases:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::util::Never;
|
||||||
|
|
||||||
|
fn impossible() -> Never {
|
||||||
|
// This function can never return
|
||||||
|
}
|
||||||
|
|
||||||
|
let never: Never = impossible();
|
||||||
|
never.absurd::<String>() // Can convert to any type
|
||||||
|
```
|
||||||
|
|
||||||
|
### `MaybeOwned<'a, T>`
|
||||||
|
|
||||||
|
Either borrowed or owned data:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::util::MaybeOwned;
|
||||||
|
|
||||||
|
fn accept_either(data: MaybeOwned<'_, String>) {
|
||||||
|
// Use &*data to access the value
|
||||||
|
}
|
||||||
|
|
||||||
|
accept_either(MaybeOwned::from(&existing_string));
|
||||||
|
accept_either(MaybeOwned::from(owned_string));
|
||||||
|
```
|
||||||
|
|
||||||
|
### `new_guid()`
|
||||||
|
|
||||||
|
Generate a random GUID:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::util::new_guid;
|
||||||
|
|
||||||
|
let guid = new_guid(); // Returns InternedString
|
||||||
|
```
|
||||||
100
agents/i18n-patterns.md
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
# i18n Patterns in `core/`
|
||||||
|
|
||||||
|
## Library & Setup
|
||||||
|
|
||||||
|
**Crate:** [`rust-i18n`](https://crates.io/crates/rust-i18n) v3.1.5 (`core/Cargo.toml`)
|
||||||
|
|
||||||
|
**Initialization** (`core/src/lib.rs:3`):
|
||||||
|
```rust
|
||||||
|
rust_i18n::i18n!("locales", fallback = ["en_US"]);
|
||||||
|
```
|
||||||
|
This macro scans `core/locales/` at compile time and embeds all translations as constants.
|
||||||
|
|
||||||
|
**Prelude re-export** (`core/src/prelude.rs:4`):
|
||||||
|
```rust
|
||||||
|
pub use rust_i18n::t;
|
||||||
|
```
|
||||||
|
Most modules import `t!` via the prelude.
|
||||||
|
|
||||||
|
## Translation File
|
||||||
|
|
||||||
|
**Location:** `core/locales/i18n.yaml`
|
||||||
|
**Format:** YAML v2 (~755 keys)
|
||||||
|
|
||||||
|
**Supported languages:** `en_US`, `de_DE`, `es_ES`, `fr_FR`, `pl_PL`
|
||||||
|
|
||||||
|
**Entry structure:**
|
||||||
|
```yaml
|
||||||
|
namespace.sub.key-name:
|
||||||
|
en_US: "English text with %{param}"
|
||||||
|
de_DE: "German text with %{param}"
|
||||||
|
# ...
|
||||||
|
```
|
||||||
|
|
||||||
|
## Using `t!()`
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// Simple key
|
||||||
|
t!("error.unknown")
|
||||||
|
|
||||||
|
// With parameter interpolation (%{name} in YAML)
|
||||||
|
t!("bins.deprecated.renamed", old = old_name, new = new_name)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Naming Conventions
|
||||||
|
|
||||||
|
Keys use **dot-separated hierarchical namespaces** with **kebab-case** for multi-word segments:
|
||||||
|
|
||||||
|
```
|
||||||
|
<module>.<submodule>.<descriptive-name>
|
||||||
|
```
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
- `error.incorrect-password` — error kind label
|
||||||
|
- `bins.start-init.updating-firmware` — startup phase message
|
||||||
|
- `backup.bulk.complete-title` — backup notification title
|
||||||
|
- `help.arg.acme-contact` — CLI help text for an argument
|
||||||
|
- `context.diagnostic.starting-diagnostic-ui` — diagnostic context status
|
||||||
|
|
||||||
|
### Top-Level Namespaces
|
||||||
|
|
||||||
|
| Namespace | Purpose |
|
||||||
|
|-----------|---------|
|
||||||
|
| `error.*` | `ErrorKind` display strings (see `src/error.rs`) |
|
||||||
|
| `bins.*` | CLI binary messages (deprecated, start-init, startd, etc.) |
|
||||||
|
| `init.*` | Initialization phase labels |
|
||||||
|
| `setup.*` | First-run setup messages |
|
||||||
|
| `context.*` | Context startup messages (diagnostic, setup, CLI) |
|
||||||
|
| `service.*` | Service lifecycle messages |
|
||||||
|
| `backup.*` | Backup/restore operation messages |
|
||||||
|
| `registry.*` | Package registry messages |
|
||||||
|
| `net.*` | Network-related messages |
|
||||||
|
| `middleware.*` | Request middleware messages (auth, etc.) |
|
||||||
|
| `disk.*` | Disk operation messages |
|
||||||
|
| `lxc.*` | Container management messages |
|
||||||
|
| `system.*` | System monitoring/metrics messages |
|
||||||
|
| `notifications.*` | User-facing notification messages |
|
||||||
|
| `update.*` | OS update messages |
|
||||||
|
| `util.*` | Utility messages (TUI, RPC) |
|
||||||
|
| `ssh.*` | SSH operation messages |
|
||||||
|
| `shutdown.*` | Shutdown-related messages |
|
||||||
|
| `logs.*` | Log-related messages |
|
||||||
|
| `auth.*` | Authentication messages |
|
||||||
|
| `help.*` | CLI help text (`help.arg.<arg-name>`) |
|
||||||
|
| `about.*` | CLI command descriptions |
|
||||||
|
|
||||||
|
## Locale Selection
|
||||||
|
|
||||||
|
`core/src/bins/mod.rs:15-36` — `set_locale_from_env()`:
|
||||||
|
|
||||||
|
1. Reads `LANG` environment variable
|
||||||
|
2. Strips `.UTF-8` suffix
|
||||||
|
3. Exact-matches against available locales, falls back to language-prefix match (e.g. `en_GB` matches `en_US`)
|
||||||
|
|
||||||
|
## Adding New Keys
|
||||||
|
|
||||||
|
1. Add the key to `core/locales/i18n.yaml` with all 5 language translations
|
||||||
|
2. Use the `t!("your.key.name")` macro in Rust code
|
||||||
|
3. Follow existing namespace conventions — match the module path where the key is used
|
||||||
|
4. Use kebab-case for multi-word segments
|
||||||
|
5. Translations are validated at compile time
|
||||||
226
agents/rpc-toolkit.md
Normal file
@@ -0,0 +1,226 @@
|
|||||||
|
# rpc-toolkit
|
||||||
|
|
||||||
|
StartOS uses [rpc-toolkit](https://github.com/Start9Labs/rpc-toolkit) for its JSON-RPC API. This document covers the patterns used in this codebase.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
The API is JSON-RPC (not REST). All endpoints are RPC methods organized in a hierarchical command structure.
|
||||||
|
|
||||||
|
## Handler Functions
|
||||||
|
|
||||||
|
There are four types of handler functions, chosen based on the function's characteristics:
|
||||||
|
|
||||||
|
### `from_fn_async` - Async handlers
|
||||||
|
For standard async functions. Most handlers use this.
|
||||||
|
|
||||||
|
```rust
|
||||||
|
pub async fn my_handler(ctx: RpcContext, params: MyParams) -> Result<MyResponse, Error> {
|
||||||
|
// Can use .await
|
||||||
|
}
|
||||||
|
|
||||||
|
from_fn_async(my_handler)
|
||||||
|
```
|
||||||
|
|
||||||
|
### `from_fn_async_local` - Non-thread-safe async handlers
|
||||||
|
For async functions that are not `Send` (cannot be safely moved between threads). Use when working with non-thread-safe types.
|
||||||
|
|
||||||
|
```rust
|
||||||
|
pub async fn cli_download(ctx: CliContext, params: Params) -> Result<(), Error> {
|
||||||
|
// Non-Send async operations
|
||||||
|
}
|
||||||
|
|
||||||
|
from_fn_async_local(cli_download)
|
||||||
|
```
|
||||||
|
|
||||||
|
### `from_fn_blocking` - Sync blocking handlers
|
||||||
|
For synchronous functions that perform blocking I/O or long computations.
|
||||||
|
|
||||||
|
```rust
|
||||||
|
pub fn query_dns(ctx: RpcContext, params: DnsParams) -> Result<DnsResponse, Error> {
|
||||||
|
// Blocking operations (file I/O, DNS lookup, etc.)
|
||||||
|
}
|
||||||
|
|
||||||
|
from_fn_blocking(query_dns)
|
||||||
|
```
|
||||||
|
|
||||||
|
### `from_fn` - Sync non-blocking handlers
|
||||||
|
For pure functions or quick synchronous operations with no I/O.
|
||||||
|
|
||||||
|
```rust
|
||||||
|
pub fn echo(ctx: RpcContext, params: EchoParams) -> Result<String, Error> {
|
||||||
|
Ok(params.message)
|
||||||
|
}
|
||||||
|
|
||||||
|
from_fn(echo)
|
||||||
|
```
|
||||||
|
|
||||||
|
## ParentHandler
|
||||||
|
|
||||||
|
Groups related RPC methods into a hierarchy:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use rpc_toolkit::{Context, HandlerExt, ParentHandler, from_fn_async};
|
||||||
|
|
||||||
|
pub fn my_api<C: Context>() -> ParentHandler<C> {
|
||||||
|
ParentHandler::new()
|
||||||
|
.subcommand("list", from_fn_async(list_handler).with_call_remote::<CliContext>())
|
||||||
|
.subcommand("create", from_fn_async(create_handler).with_call_remote::<CliContext>())
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Handler Extensions
|
||||||
|
|
||||||
|
Chain methods to configure handler behavior.
|
||||||
|
|
||||||
|
**Ordering rules:**
|
||||||
|
1. `with_about()` must come AFTER other CLI modifiers (`no_display()`, `with_custom_display_fn()`, etc.)
|
||||||
|
2. `with_call_remote()` must be the LAST adapter in the chain
|
||||||
|
|
||||||
|
| Method | Purpose |
|
||||||
|
|--------|---------|
|
||||||
|
| `.with_metadata("key", Value)` | Attach metadata for middleware |
|
||||||
|
| `.no_cli()` | RPC-only, not available via CLI |
|
||||||
|
| `.no_display()` | No CLI output |
|
||||||
|
| `.with_display_serializable()` | Default JSON/YAML output for CLI |
|
||||||
|
| `.with_custom_display_fn(\|_, res\| ...)` | Custom CLI output formatting |
|
||||||
|
| `.with_about("about.description")` | Add help text (i18n key) - **after CLI modifiers** |
|
||||||
|
| `.with_call_remote::<CliContext>()` | Enable CLI to call remotely - **must be last** |
|
||||||
|
|
||||||
|
### Correct ordering example:
|
||||||
|
```rust
|
||||||
|
from_fn_async(my_handler)
|
||||||
|
.with_metadata("sync_db", Value::Bool(true)) // metadata early
|
||||||
|
.no_display() // CLI modifier
|
||||||
|
.with_about("about.my-handler") // after CLI modifiers
|
||||||
|
.with_call_remote::<CliContext>() // always last
|
||||||
|
```
|
||||||
|
|
||||||
|
## Metadata by Middleware
|
||||||
|
|
||||||
|
Metadata tags are processed by different middleware. Group them logically:
|
||||||
|
|
||||||
|
### Auth Middleware (`middleware/auth/mod.rs`)
|
||||||
|
|
||||||
|
| Metadata | Default | Description |
|
||||||
|
|----------|---------|-------------|
|
||||||
|
| `authenticated` | `true` | Whether endpoint requires authentication. Set to `false` for public endpoints. |
|
||||||
|
|
||||||
|
### Session Auth Middleware (`middleware/auth/session.rs`)
|
||||||
|
|
||||||
|
| Metadata | Default | Description |
|
||||||
|
|----------|---------|-------------|
|
||||||
|
| `login` | `false` | Special handling for login endpoints (rate limiting, cookie setting) |
|
||||||
|
| `get_session` | `false` | Inject session ID into params as `__Auth_session` |
|
||||||
|
|
||||||
|
### Signature Auth Middleware (`middleware/auth/signature.rs`)
|
||||||
|
|
||||||
|
| Metadata | Default | Description |
|
||||||
|
|----------|---------|-------------|
|
||||||
|
| `get_signer` | `false` | Inject signer public key into params as `__Auth_signer` |
|
||||||
|
|
||||||
|
### Registry Auth (extends Signature Auth)
|
||||||
|
|
||||||
|
| Metadata | Default | Description |
|
||||||
|
|----------|---------|-------------|
|
||||||
|
| `admin` | `false` | Require admin privileges (signer must be in admin list) |
|
||||||
|
| `get_device_info` | `false` | Inject device info header for hardware filtering |
|
||||||
|
|
||||||
|
### Database Middleware (`middleware/db.rs`)
|
||||||
|
|
||||||
|
| Metadata | Default | Description |
|
||||||
|
|----------|---------|-------------|
|
||||||
|
| `sync_db` | `false` | Sync database after mutation, add `X-Patch-Sequence` header |
|
||||||
|
|
||||||
|
## Context Types
|
||||||
|
|
||||||
|
Different contexts for different execution environments:
|
||||||
|
|
||||||
|
- `RpcContext` - Web/RPC requests with full service access
|
||||||
|
- `CliContext` - CLI operations, calls remote RPC
|
||||||
|
- `InitContext` - During system initialization
|
||||||
|
- `DiagnosticContext` - Diagnostic/recovery mode
|
||||||
|
- `RegistryContext` - Registry daemon context
|
||||||
|
- `EffectContext` - Service effects context (container-to-host calls)
|
||||||
|
|
||||||
|
## Parameter Structs
|
||||||
|
|
||||||
|
Parameters use derive macros for JSON-RPC, CLI parsing, and TypeScript generation:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#[derive(Deserialize, Serialize, Parser, TS)]
|
||||||
|
#[serde(rename_all = "camelCase")] // JSON-RPC uses camelCase
|
||||||
|
#[command(rename_all = "kebab-case")] // CLI uses kebab-case
|
||||||
|
#[ts(export)] // Generate TypeScript types
|
||||||
|
pub struct MyParams {
|
||||||
|
pub package_id: PackageId,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Middleware Injection
|
||||||
|
|
||||||
|
Auth middleware can inject values into params using special field names:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
#[derive(Deserialize, Serialize, Parser, TS)]
|
||||||
|
pub struct MyParams {
|
||||||
|
#[ts(skip)]
|
||||||
|
#[serde(rename = "__Auth_session")] // Injected by session auth
|
||||||
|
session: InternedString,
|
||||||
|
|
||||||
|
#[ts(skip)]
|
||||||
|
#[serde(rename = "__Auth_signer")] // Injected by signature auth
|
||||||
|
signer: AnyVerifyingKey,
|
||||||
|
|
||||||
|
#[ts(skip)]
|
||||||
|
#[serde(rename = "__Auth_userAgent")] // Injected during login
|
||||||
|
user_agent: Option<String>,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Common Patterns
|
||||||
|
|
||||||
|
### Adding a New RPC Endpoint
|
||||||
|
|
||||||
|
1. Define params struct with `Deserialize, Serialize, Parser, TS`
|
||||||
|
2. Choose handler type based on sync/async and thread-safety
|
||||||
|
3. Write handler function taking `(Context, Params) -> Result<Response, Error>`
|
||||||
|
4. Add to parent handler with appropriate extensions (display modifiers before `with_about`)
|
||||||
|
5. TypeScript types auto-generated via `make ts-bindings`
|
||||||
|
|
||||||
|
### Public (Unauthenticated) Endpoint
|
||||||
|
|
||||||
|
```rust
|
||||||
|
from_fn_async(get_info)
|
||||||
|
.with_metadata("authenticated", Value::Bool(false))
|
||||||
|
.with_display_serializable()
|
||||||
|
.with_about("about.get-info")
|
||||||
|
.with_call_remote::<CliContext>() // last
|
||||||
|
```
|
||||||
|
|
||||||
|
### Mutating Endpoint with DB Sync
|
||||||
|
|
||||||
|
```rust
|
||||||
|
from_fn_async(update_config)
|
||||||
|
.with_metadata("sync_db", Value::Bool(true))
|
||||||
|
.no_display()
|
||||||
|
.with_about("about.update-config")
|
||||||
|
.with_call_remote::<CliContext>() // last
|
||||||
|
```
|
||||||
|
|
||||||
|
### Session-Aware Endpoint
|
||||||
|
|
||||||
|
```rust
|
||||||
|
from_fn_async(logout)
|
||||||
|
.with_metadata("get_session", Value::Bool(true))
|
||||||
|
.no_display()
|
||||||
|
.with_about("about.logout")
|
||||||
|
.with_call_remote::<CliContext>() // last
|
||||||
|
```
|
||||||
|
|
||||||
|
## File Locations
|
||||||
|
|
||||||
|
- Handler definitions: Throughout `core/src/` modules
|
||||||
|
- Main API tree: `core/src/lib.rs` (`main_api()`, `server()`, `package()`)
|
||||||
|
- Auth middleware: `core/src/middleware/auth/`
|
||||||
|
- DB middleware: `core/src/middleware/db.rs`
|
||||||
|
- Context types: `core/src/context/`
|
||||||
122
agents/s9pk-structure.md
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
# S9PK Package Format
|
||||||
|
|
||||||
|
S9PK is the package format for StartOS services. Version 2 uses a merkle archive structure for efficient downloading and cryptographic verification.
|
||||||
|
|
||||||
|
## File Format
|
||||||
|
|
||||||
|
S9PK files begin with a 3-byte header: `0x3b 0x3b 0x02` (magic bytes + version 2).
|
||||||
|
|
||||||
|
The archive is cryptographically signed using Ed25519 with prehashed content (SHA-512 over blake3 merkle root hash).
|
||||||
|
|
||||||
|
## Archive Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
/
|
||||||
|
├── manifest.json # Package metadata (required)
|
||||||
|
├── icon.<ext> # Package icon - any image/* format (required)
|
||||||
|
├── LICENSE.md # License text (required)
|
||||||
|
├── dependencies/ # Dependency metadata (optional)
|
||||||
|
│ └── <package-id>/
|
||||||
|
│ ├── metadata.json # DependencyMetadata
|
||||||
|
│ └── icon.<ext> # Dependency icon
|
||||||
|
├── javascript.squashfs # Package JavaScript code (required)
|
||||||
|
├── assets.squashfs # Static assets (optional, legacy: assets/ directory)
|
||||||
|
└── images/ # Container images by architecture
|
||||||
|
└── <arch>/ # e.g., x86_64, aarch64, riscv64
|
||||||
|
├── <image-id>.squashfs # Container filesystem
|
||||||
|
├── <image-id>.json # Image metadata
|
||||||
|
└── <image-id>.env # Environment variables
|
||||||
|
```
|
||||||
|
|
||||||
|
## Components
|
||||||
|
|
||||||
|
### manifest.json
|
||||||
|
|
||||||
|
The package manifest contains all metadata:
|
||||||
|
|
||||||
|
| Field | Type | Description |
|
||||||
|
|-------|------|-------------|
|
||||||
|
| `id` | string | Package identifier (e.g., `bitcoind`) |
|
||||||
|
| `title` | string | Display name |
|
||||||
|
| `version` | string | Extended version string |
|
||||||
|
| `satisfies` | string[] | Version ranges this version satisfies |
|
||||||
|
| `releaseNotes` | string/object | Release notes (localized) |
|
||||||
|
| `canMigrateTo` | string | Version range for forward migration |
|
||||||
|
| `canMigrateFrom` | string | Version range for backward migration |
|
||||||
|
| `license` | string | License type |
|
||||||
|
| `wrapperRepo` | string | StartOS wrapper repository URL |
|
||||||
|
| `upstreamRepo` | string | Upstream project URL |
|
||||||
|
| `supportSite` | string | Support site URL |
|
||||||
|
| `marketingSite` | string | Marketing site URL |
|
||||||
|
| `donationUrl` | string? | Optional donation URL |
|
||||||
|
| `docsUrl` | string? | Optional documentation URL |
|
||||||
|
| `description` | object | Short and long descriptions (localized) |
|
||||||
|
| `images` | object | Image configurations by image ID |
|
||||||
|
| `volumes` | string[] | Volume IDs for persistent data |
|
||||||
|
| `alerts` | object | User alerts for lifecycle events |
|
||||||
|
| `dependencies` | object | Package dependencies |
|
||||||
|
| `hardwareRequirements` | object | Hardware requirements (arch, RAM, devices) |
|
||||||
|
| `hardwareAcceleration` | boolean | Whether package uses hardware acceleration |
|
||||||
|
| `gitHash` | string? | Git commit hash |
|
||||||
|
| `osVersion` | string | Minimum StartOS version |
|
||||||
|
| `sdkVersion` | string? | SDK version used to build |
|
||||||
|
|
||||||
|
### javascript.squashfs
|
||||||
|
|
||||||
|
Contains the package JavaScript that implements the `ABI` interface from `@start9labs/start-sdk-base`. This code runs in the container runtime and manages the package lifecycle.
|
||||||
|
|
||||||
|
The squashfs is mounted at `/usr/lib/startos/package/` and the runtime loads `index.js`.
|
||||||
|
|
||||||
|
### images/
|
||||||
|
|
||||||
|
Container images organized by architecture:
|
||||||
|
|
||||||
|
- **`<image-id>.squashfs`** - Container root filesystem
|
||||||
|
- **`<image-id>.json`** - Image metadata (entrypoint, user, workdir, etc.)
|
||||||
|
- **`<image-id>.env`** - Environment variables for the container
|
||||||
|
|
||||||
|
Images are built from Docker/Podman and converted to squashfs. The `ImageConfig` in manifest specifies:
|
||||||
|
- `arch` - Supported architectures
|
||||||
|
- `emulateMissingAs` - Fallback architecture for emulation
|
||||||
|
- `nvidiaContainer` - Whether to enable NVIDIA container support
|
||||||
|
|
||||||
|
### assets.squashfs
|
||||||
|
|
||||||
|
Static assets accessible to the package, mounted read-only at `/media/startos/assets/` in the container.
|
||||||
|
|
||||||
|
### dependencies/
|
||||||
|
|
||||||
|
Metadata for dependencies displayed in the UI:
|
||||||
|
- `metadata.json` - Just title for now
|
||||||
|
- `icon.<ext>` - Icon for the dependency
|
||||||
|
|
||||||
|
## Merkle Archive
|
||||||
|
|
||||||
|
The S9PK uses a merkle tree structure where each file and directory has a blake3 hash. This enables:
|
||||||
|
|
||||||
|
1. **Partial downloads** - Download and verify individual files
|
||||||
|
2. **Integrity verification** - Verify any subset of the archive
|
||||||
|
3. **Efficient updates** - Only download changed portions
|
||||||
|
4. **DOS protection** - Size limits enforced before downloading content
|
||||||
|
|
||||||
|
Files are sorted by priority for streaming (manifest first, then icon, license, dependencies, javascript, assets, images).
|
||||||
|
|
||||||
|
## Building S9PK
|
||||||
|
|
||||||
|
Use `start-cli s9pk pack` to build packages:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
start-cli s9pk pack <manifest-path> -o <output.s9pk>
|
||||||
|
```
|
||||||
|
|
||||||
|
Images can be sourced from:
|
||||||
|
- Docker/Podman build (`--docker-build`)
|
||||||
|
- Existing Docker tag (`--docker-tag`)
|
||||||
|
- Pre-built squashfs files
|
||||||
|
|
||||||
|
## Related Code
|
||||||
|
|
||||||
|
- `core/src/s9pk/v2/mod.rs` - S9pk struct and serialization
|
||||||
|
- `core/src/s9pk/v2/manifest.rs` - Manifest types
|
||||||
|
- `core/src/s9pk/v2/pack.rs` - Packing logic
|
||||||
|
- `core/src/s9pk/merkle_archive/` - Merkle archive implementation
|
||||||
BIN
assets/create-vm/step-1.png
Normal file
|
After Width: | Height: | Size: 27 KiB |
BIN
assets/create-vm/step-10.png
Normal file
|
After Width: | Height: | Size: 44 KiB |
BIN
assets/create-vm/step-11.png
Normal file
|
After Width: | Height: | Size: 53 KiB |
BIN
assets/create-vm/step-12.png
Normal file
|
After Width: | Height: | Size: 48 KiB |
BIN
assets/create-vm/step-2.png
Normal file
|
After Width: | Height: | Size: 46 KiB |
BIN
assets/create-vm/step-3.png
Normal file
|
After Width: | Height: | Size: 46 KiB |
BIN
assets/create-vm/step-4.png
Normal file
|
After Width: | Height: | Size: 50 KiB |
BIN
assets/create-vm/step-5.png
Normal file
|
After Width: | Height: | Size: 64 KiB |
BIN
assets/create-vm/step-6.png
Normal file
|
After Width: | Height: | Size: 52 KiB |
BIN
assets/create-vm/step-7.png
Normal file
|
After Width: | Height: | Size: 64 KiB |
BIN
assets/create-vm/step-8.png
Normal file
|
After Width: | Height: | Size: 53 KiB |
BIN
assets/create-vm/step-9.png
Normal file
|
After Width: | Height: | Size: 44 KiB |
@@ -1,25 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
shopt -s expand_aliases
|
|
||||||
|
|
||||||
if [ "$0" != "./build-cargo-dep.sh" ]; then
|
|
||||||
>&2 echo "Must be run from start-os directory"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
USE_TTY=
|
|
||||||
if tty -s; then
|
|
||||||
USE_TTY="-it"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -z "$ARCH" ]; then
|
|
||||||
ARCH=$(uname -m)
|
|
||||||
fi
|
|
||||||
|
|
||||||
mkdir -p cargo-deps
|
|
||||||
alias 'rust-arm64-builder'='docker run $USE_TTY --rm -v "$HOME/.cargo/registry":/usr/local/cargo/registry -v "$(pwd)"/cargo-deps:/home/rust/src -P start9/rust-arm-cross:aarch64'
|
|
||||||
|
|
||||||
rust-arm64-builder cargo install "$1" --target-dir /home/rust/src --target=$ARCH-unknown-linux-gnu
|
|
||||||
sudo chown -R $USER cargo-deps
|
|
||||||
sudo chown -R $USER ~/.cargo
|
|
||||||
4
build/.gitignore
vendored
@@ -1,2 +1,2 @@
|
|||||||
lib/depends
|
/lib/depends
|
||||||
lib/conflicts
|
/lib/conflicts
|
||||||
107
build/README.md
@@ -1,107 +0,0 @@
|
|||||||
# Building StartOS
|
|
||||||
|
|
||||||
⚠️ The commands given assume a Debian or Ubuntu-based environment. _Building in
|
|
||||||
a VM is NOT yet supported_ ⚠️
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
1. Install dependencies
|
|
||||||
|
|
||||||
- Avahi
|
|
||||||
- `sudo apt install -y avahi-daemon`
|
|
||||||
- Installed by default on most Debian systems - https://avahi.org
|
|
||||||
- Build Essentials (needed to run `make`)
|
|
||||||
- `sudo apt install -y build-essential`
|
|
||||||
- Docker
|
|
||||||
- `curl -fsSL https://get.docker.com | sh`
|
|
||||||
- https://docs.docker.com/get-docker
|
|
||||||
- Add your user to the docker group: `sudo usermod -a -G docker $USER`
|
|
||||||
- Reload user environment `exec sudo su -l $USER`
|
|
||||||
- Prepare Docker environment
|
|
||||||
- Setup buildx (https://docs.docker.com/buildx/working-with-buildx/)
|
|
||||||
- Create a builder: `docker buildx create --use`
|
|
||||||
- Add multi-arch build ability:
|
|
||||||
`docker run --rm --privileged linuxkit/binfmt:v0.8`
|
|
||||||
- Node Version 12+
|
|
||||||
- snap: `sudo snap install node`
|
|
||||||
- [nvm](https://github.com/nvm-sh/nvm#installing-and-updating):
|
|
||||||
`nvm install --lts`
|
|
||||||
- https://nodejs.org/en/docs
|
|
||||||
- NPM Version 7+
|
|
||||||
- apt: `sudo apt install -y npm`
|
|
||||||
- [nvm](https://github.com/nvm-sh/nvm#installing-and-updating):
|
|
||||||
`nvm install --lts`
|
|
||||||
- https://docs.npmjs.com/downloading-and-installing-node-js-and-npm
|
|
||||||
- jq
|
|
||||||
- `sudo apt install -y jq`
|
|
||||||
- https://stedolan.github.io/jq
|
|
||||||
- yq
|
|
||||||
- snap: `sudo snap install yq`
|
|
||||||
- binaries: https://github.com/mikefarah/yq/releases/
|
|
||||||
- https://mikefarah.gitbook.io/yq
|
|
||||||
|
|
||||||
2. Clone the latest repo with required submodules
|
|
||||||
> :information_source: You chan check latest available version
|
|
||||||
> [here](https://github.com/Start9Labs/start-os/releases)
|
|
||||||
```
|
|
||||||
git clone --recursive https://github.com/Start9Labs/start-os.git --branch latest
|
|
||||||
```
|
|
||||||
|
|
||||||
## Build Raspberry Pi Image
|
|
||||||
|
|
||||||
```
|
|
||||||
cd start-os
|
|
||||||
make embassyos-raspi.img ARCH=aarch64
|
|
||||||
```
|
|
||||||
|
|
||||||
## Flash
|
|
||||||
|
|
||||||
Flash the resulting `embassyos-raspi.img` to your SD Card
|
|
||||||
|
|
||||||
We recommend [Balena Etcher](https://www.balena.io/etcher/)
|
|
||||||
|
|
||||||
## Setup
|
|
||||||
|
|
||||||
Visit http://start.local from any web browser - We recommend
|
|
||||||
[Firefox](https://www.mozilla.org/firefox/browsers)
|
|
||||||
|
|
||||||
Enter your product key. This is generated during the build process and can be
|
|
||||||
found in `product_key.txt`, located in the root directory.
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
1. I just flashed my SD card, fired up StartOS, bootup sounds and all, but my
|
|
||||||
browser is saying "Unable to connect" with start.local.
|
|
||||||
|
|
||||||
- Try doing a hard refresh on your browser, or opening the url in a
|
|
||||||
private/incognito window. If you've ran an instance of StartOS before,
|
|
||||||
sometimes you can have a stale cache that will block you from navigating to
|
|
||||||
the page.
|
|
||||||
|
|
||||||
2. Flashing the image isn't working with balenaEtcher. I'm getting
|
|
||||||
`Cannot read property 'message' of null` when I try.
|
|
||||||
|
|
||||||
- The latest versions of Balena may not flash properly. This version here:
|
|
||||||
https://github.com/balena-io/etcher/releases/tag/v1.5.122 should work
|
|
||||||
properly.
|
|
||||||
|
|
||||||
3. Startup isn't working properly and I'm curious as to why. How can I view logs
|
|
||||||
regarding startup for debugging?
|
|
||||||
|
|
||||||
- Find the IP of your device
|
|
||||||
- Run `nc <ip> 8080` and it will print the logs
|
|
||||||
|
|
||||||
4. I need to ssh into my server to fix something, but I cannot get to the
|
|
||||||
console to add ssh keys normally.
|
|
||||||
|
|
||||||
- During the Build step, instead of running just
|
|
||||||
`make embassyos-raspi.img ARCH=aarch64` run
|
|
||||||
`ENVIRONMENT=dev make embassyos-raspi.img ARCH=aarch64`. Flash like normal,
|
|
||||||
and insert into your server. Boot up StartOS, then on another computer on
|
|
||||||
the same network, ssh into the the server with the username `start9` password
|
|
||||||
`embassy`.
|
|
||||||
|
|
||||||
4. I need to reset my password, how can I do that?
|
|
||||||
|
|
||||||
- You will need to reflash your device. Select "Use Existing Drive" once you are
|
|
||||||
in setup, and it will prompt you to set a new password.
|
|
||||||
|
|||||||
@@ -1,76 +0,0 @@
|
|||||||
# Release Process
|
|
||||||
|
|
||||||
## `embassyos_0.3.x-1_amd64.deb`
|
|
||||||
|
|
||||||
- Description: debian package for x86_64 - intended to be installed on pureos
|
|
||||||
- Destination: GitHub Release Tag
|
|
||||||
- Requires: N/A
|
|
||||||
- Build steps:
|
|
||||||
- Clone `https://github.com/Start9Labs/embassy-os-deb` at `master`
|
|
||||||
- Run `make TAG=master` from that folder
|
|
||||||
- Artifact: `./embassyos_0.3.x-1_amd64.deb`
|
|
||||||
|
|
||||||
## `eos-<version>-<git hash>-<date>_amd64.iso`
|
|
||||||
|
|
||||||
- Description: live usb image for x86_64
|
|
||||||
- Destination: GitHub Release Tag
|
|
||||||
- Requires: `embassyos_0.3.x-1_amd64.deb`
|
|
||||||
- Build steps:
|
|
||||||
- Clone `https://github.com/Start9Labs/eos-image-recipes` at `master`
|
|
||||||
- Copy `embassyos_0.3.x-1_amd64.deb` to
|
|
||||||
`overlays/vendor/root/embassyos_0.3.x-1_amd64.deb`
|
|
||||||
- Run `./run-local-build.sh byzantium` from that folder
|
|
||||||
- Artifact: `./results/eos-<version>-<git hash>-<date>_amd64.iso`
|
|
||||||
|
|
||||||
## `eos.x86_64.squashfs`
|
|
||||||
|
|
||||||
- Description: compressed embassyOS x86_64 filesystem image
|
|
||||||
- Destination: GitHub Release Tag, Registry @
|
|
||||||
`resources/eos/<version>/eos.x86_64.squashfs`
|
|
||||||
- Requires: `eos-<version>-<git hash>-<date>_amd64.iso`
|
|
||||||
- Build steps:
|
|
||||||
- From `https://github.com/Start9Labs/eos-image-recipes` at `master`
|
|
||||||
- `./extract-squashfs.sh results/eos-<version>-<git hash>-<date>_amd64.iso`
|
|
||||||
- Artifact: `./results/eos.x86_64.squashfs`
|
|
||||||
|
|
||||||
## `eos.raspberrypi.squashfs`
|
|
||||||
|
|
||||||
- Description: compressed embassyOS raspberrypi filesystem image
|
|
||||||
- Destination: GitHub Release Tag, Registry @
|
|
||||||
`resources/eos/<version>/eos.raspberrypi.squashfs`
|
|
||||||
- Requires: N/A
|
|
||||||
- Build steps:
|
|
||||||
- Clone `https://github.com/Start9Labs/embassy-os` at `master`
|
|
||||||
- `make embassyos-raspi.img`
|
|
||||||
- flash `embassyos-raspi.img` to raspberry pi
|
|
||||||
- boot raspberry pi with ethernet
|
|
||||||
- wait for chime
|
|
||||||
- you can watch logs using `nc <ip> 8080`
|
|
||||||
- unplug raspberry pi, put sd card back in build machine
|
|
||||||
- `./build/raspberry-pi/rip-image.sh`
|
|
||||||
- Artifact: `./eos.raspberrypi.squashfs`
|
|
||||||
|
|
||||||
## `lite-upgrade.img`
|
|
||||||
|
|
||||||
- Description: update image for users coming from 0.3.2.1 and before
|
|
||||||
- Destination: Registry @ `resources/eos/<version>/eos.img`
|
|
||||||
- Requires: `eos.raspberrypi.squashfs`
|
|
||||||
- Build steps:
|
|
||||||
- From `https://github.com/Start9Labs/embassy-os` at `master`
|
|
||||||
- `make lite-upgrade.img`
|
|
||||||
- Artifact `./lite-upgrade.img`
|
|
||||||
|
|
||||||
## `eos-<version>-<git hash>-<date>_raspberrypi.tar.gz`
|
|
||||||
|
|
||||||
- Description: pre-initialized raspberrypi image
|
|
||||||
- Destination: GitHub Release Tag (as tar.gz)
|
|
||||||
- Requires: `eos.raspberrypi.squashfs`
|
|
||||||
- Build steps:
|
|
||||||
- From `https://github.com/Start9Labs/embassy-os` at `master`
|
|
||||||
- `make eos_raspberrypi.img`
|
|
||||||
- `tar --format=posix -cS -f- eos-<version>-<git hash>-<date>_raspberrypi.img | gzip > eos-<version>-<git hash>-<date>_raspberrypi.tar.gz`
|
|
||||||
- Artifact `./eos-<version>-<git hash>-<date>_raspberrypi.tar.gz`
|
|
||||||
|
|
||||||
## `embassy-sdk`
|
|
||||||
|
|
||||||
- Build and deploy to all registries
|
|
||||||
26
build/build-cargo-dep.sh
Executable file
@@ -0,0 +1,26 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
cd "$(dirname "${BASH_SOURCE[0]}")/.."
|
||||||
|
|
||||||
|
set -e
|
||||||
|
shopt -s expand_aliases
|
||||||
|
|
||||||
|
if [ -z "$ARCH" ]; then
|
||||||
|
ARCH=$(uname -m)
|
||||||
|
fi
|
||||||
|
|
||||||
|
RUST_ARCH="$ARCH"
|
||||||
|
if [ "$ARCH" = "riscv64" ]; then
|
||||||
|
RUST_ARCH="riscv64gc"
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir -p target
|
||||||
|
|
||||||
|
source core/build/builder-alias.sh
|
||||||
|
|
||||||
|
RUSTFLAGS="-C target-feature=+crt-static"
|
||||||
|
|
||||||
|
rust-zig-builder cargo-zigbuild install $* --target-dir /workdir/target/ --target=$RUST_ARCH-unknown-linux-musl
|
||||||
|
if [ "$(ls -nd "target/$RUST_ARCH-unknown-linux-musl/release/${!#}" | awk '{ print $3 }')" != "$UID" ]; then
|
||||||
|
rust-zig-builder sh -c "chown -R $UID:$UID target && chown -R $UID:$UID /usr/local/cargo"
|
||||||
|
fi
|
||||||
@@ -11,12 +11,13 @@ if [ -z "$PLATFORM" ]; then
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
rm -rf ./firmware/$PLATFORM
|
rm -rf ./lib/firmware/$PLATFORM
|
||||||
mkdir -p ./firmware/$PLATFORM
|
mkdir -p ./lib/firmware/$PLATFORM
|
||||||
|
|
||||||
cd ./firmware/$PLATFORM
|
cd ./lib/firmware/$PLATFORM
|
||||||
|
|
||||||
mapfile -t firmwares <<< "$(jq -c ".[] | select(.platform[] | contains(\"$PLATFORM\"))" ../../build/lib/firmware.json)"
|
firmwares=()
|
||||||
|
while IFS= read -r line; do firmwares+=("$line"); done < <(jq -c ".[] | select(.platform[] | contains(\"$PLATFORM\"))" ../../firmware.json)
|
||||||
for firmware in "${firmwares[@]}"; do
|
for firmware in "${firmwares[@]}"; do
|
||||||
if [ -n "$firmware" ]; then
|
if [ -n "$firmware" ]; then
|
||||||
id=$(echo "$firmware" | jq --raw-output '.id')
|
id=$(echo "$firmware" | jq --raw-output '.id')
|
||||||
@@ -1,48 +1,60 @@
|
|||||||
avahi-daemon
|
avahi-daemon
|
||||||
avahi-utils
|
avahi-utils
|
||||||
|
b3sum
|
||||||
bash-completion
|
bash-completion
|
||||||
beep
|
beep
|
||||||
|
binfmt-support
|
||||||
bmon
|
bmon
|
||||||
btrfs-progs
|
btrfs-progs
|
||||||
ca-certificates
|
ca-certificates
|
||||||
cifs-utils
|
cifs-utils
|
||||||
|
conntrack
|
||||||
cryptsetup
|
cryptsetup
|
||||||
curl
|
curl
|
||||||
dmidecode
|
dmidecode
|
||||||
|
dnsutils
|
||||||
dosfstools
|
dosfstools
|
||||||
e2fsprogs
|
e2fsprogs
|
||||||
ecryptfs-utils
|
ecryptfs-utils
|
||||||
|
equivs
|
||||||
exfatprogs
|
exfatprogs
|
||||||
flashrom
|
flashrom
|
||||||
|
fuse3
|
||||||
grub-common
|
grub-common
|
||||||
|
grub-efi
|
||||||
htop
|
htop
|
||||||
httpdirfs
|
httpdirfs
|
||||||
iotop
|
iotop
|
||||||
|
iptables
|
||||||
iw
|
iw
|
||||||
jq
|
jq
|
||||||
libavahi-client3
|
|
||||||
libyajl2
|
libyajl2
|
||||||
linux-cpupower
|
linux-cpupower
|
||||||
lm-sensors
|
lm-sensors
|
||||||
lshw
|
lshw
|
||||||
lvm2
|
lvm2
|
||||||
|
lxc
|
||||||
magic-wormhole
|
magic-wormhole
|
||||||
man-db
|
man-db
|
||||||
ncdu
|
ncdu
|
||||||
net-tools
|
net-tools
|
||||||
network-manager
|
network-manager
|
||||||
|
nfs-common
|
||||||
nvme-cli
|
nvme-cli
|
||||||
nyx
|
nyx
|
||||||
openssh-server
|
openssh-server
|
||||||
podman
|
podman
|
||||||
postgresql
|
|
||||||
psmisc
|
psmisc
|
||||||
qemu-guest-agent
|
qemu-guest-agent
|
||||||
|
qemu-user-static
|
||||||
|
rfkill
|
||||||
rsync
|
rsync
|
||||||
samba-common-bin
|
samba-common-bin
|
||||||
smartmontools
|
smartmontools
|
||||||
|
socat
|
||||||
sqlite3
|
sqlite3
|
||||||
squashfs-tools
|
squashfs-tools
|
||||||
|
squashfs-tools-ng
|
||||||
sudo
|
sudo
|
||||||
systemd
|
systemd
|
||||||
systemd-resolved
|
systemd-resolved
|
||||||
@@ -51,4 +63,5 @@ systemd-timesyncd
|
|||||||
tor
|
tor
|
||||||
util-linux
|
util-linux
|
||||||
vim
|
vim
|
||||||
|
wireguard-tools
|
||||||
wireless-tools
|
wireless-tools
|
||||||
|
|||||||
@@ -1,5 +0,0 @@
|
|||||||
+ containerd.io
|
|
||||||
+ docker-ce
|
|
||||||
+ docker-ce-cli
|
|
||||||
+ docker-compose-plugin
|
|
||||||
- podman
|
|
||||||
@@ -5,11 +5,18 @@ set -e
|
|||||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||||
|
|
||||||
IFS="-" read -ra FEATURES <<< "$ENVIRONMENT"
|
IFS="-" read -ra FEATURES <<< "$ENVIRONMENT"
|
||||||
|
FEATURES+=("${ARCH}")
|
||||||
|
if [ "$ARCH" != "$PLATFORM" ]; then
|
||||||
|
FEATURES+=("${PLATFORM}")
|
||||||
|
fi
|
||||||
|
if [[ "$PLATFORM" =~ -nonfree$ ]]; then
|
||||||
|
FEATURES+=("nonfree")
|
||||||
|
fi
|
||||||
|
|
||||||
feature_file_checker='
|
feature_file_checker='
|
||||||
/^#/ { next }
|
/^#/ { next }
|
||||||
/^\+ [a-z0-9]+$/ { next }
|
/^\+ [a-z0-9.-]+$/ { next }
|
||||||
/^- [a-z0-9]+$/ { next }
|
/^- [a-z0-9.-]+$/ { next }
|
||||||
{ exit 1 }
|
{ exit 1 }
|
||||||
'
|
'
|
||||||
|
|
||||||
@@ -30,8 +37,8 @@ for type in conflicts depends; do
|
|||||||
for feature in ${FEATURES[@]}; do
|
for feature in ${FEATURES[@]}; do
|
||||||
file="$feature.$type"
|
file="$feature.$type"
|
||||||
if [ -f $file ]; then
|
if [ -f $file ]; then
|
||||||
if grep "^- $pkg$" $file; then
|
if grep "^- $pkg$" $file > /dev/null; then
|
||||||
SKIP=1
|
SKIP=yes
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|||||||
10
build/dpkg-deps/nonfree.depends
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
+ firmware-amd-graphics
|
||||||
|
+ firmware-atheros
|
||||||
|
+ firmware-brcm80211
|
||||||
|
+ firmware-iwlwifi
|
||||||
|
+ firmware-libertas
|
||||||
|
+ firmware-misc-nonfree
|
||||||
|
+ firmware-realtek
|
||||||
|
+ nvidia-container-toolkit
|
||||||
|
# + nvidia-driver
|
||||||
|
# + nvidia-kernel-dkms
|
||||||
10
build/dpkg-deps/raspberrypi.depends
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
- grub-efi
|
||||||
|
+ parted
|
||||||
|
+ raspberrypi-net-mods
|
||||||
|
+ raspberrypi-sys-mods
|
||||||
|
+ raspi-config
|
||||||
|
+ raspi-firmware
|
||||||
|
+ raspi-utils
|
||||||
|
+ rpi-eeprom
|
||||||
|
+ rpi-update
|
||||||
|
+ rpi.gpio-common
|
||||||
@@ -1,2 +1,3 @@
|
|||||||
+ gdb
|
+ gdb
|
||||||
+ heaptrack
|
+ heaptrack
|
||||||
|
+ linux-perf
|
||||||
1
build/dpkg-deps/x86_64.depends
Normal file
@@ -0,0 +1 @@
|
|||||||
|
+ grub-pc-bin
|
||||||
4
basename.sh → build/env/basename.sh
vendored
@@ -1,5 +1,7 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
|
PROJECT=${PROJECT:-"startos"}
|
||||||
|
|
||||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||||
|
|
||||||
PLATFORM="$(if [ -f ./PLATFORM.txt ]; then cat ./PLATFORM.txt; else echo unknown; fi)"
|
PLATFORM="$(if [ -f ./PLATFORM.txt ]; then cat ./PLATFORM.txt; else echo unknown; fi)"
|
||||||
@@ -16,4 +18,4 @@ if [ -n "$STARTOS_ENV" ]; then
|
|||||||
VERSION_FULL="$VERSION_FULL~${STARTOS_ENV}"
|
VERSION_FULL="$VERSION_FULL~${STARTOS_ENV}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo -n "startos-${VERSION_FULL}_${PLATFORM}"
|
echo -n "${PROJECT}-${VERSION_FULL}_${PLATFORM}"
|
||||||
@@ -1,8 +1,10 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
|
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||||
|
|
||||||
if ! [ -f ./ENVIRONMENT.txt ] || [ "$(cat ./ENVIRONMENT.txt)" != "$ENVIRONMENT" ]; then
|
if ! [ -f ./ENVIRONMENT.txt ] || [ "$(cat ./ENVIRONMENT.txt)" != "$ENVIRONMENT" ]; then
|
||||||
>&2 echo "Updating ENVIRONMENT.txt to \"$ENVIRONMENT\""
|
>&2 echo "Updating ENVIRONMENT.txt to \"$ENVIRONMENT\""
|
||||||
echo -n "$ENVIRONMENT" > ./ENVIRONMENT.txt
|
echo -n "$ENVIRONMENT" > ./ENVIRONMENT.txt
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo -n ./ENVIRONMENT.txt
|
echo -n ./build/env/ENVIRONMENT.txt
|
||||||
16
build/env/check-git-hash.sh
vendored
Executable file
@@ -0,0 +1,16 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||||
|
|
||||||
|
if [ "$GIT_BRANCH_AS_HASH" != 1 ]; then
|
||||||
|
GIT_HASH="$(git rev-parse HEAD)$(if ! git diff-index --quiet HEAD --; then echo '-modified'; fi)"
|
||||||
|
else
|
||||||
|
GIT_HASH="@$(git rev-parse --abbrev-ref HEAD)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! [ -f ./GIT_HASH.txt ] || [ "$(cat ./GIT_HASH.txt)" != "$GIT_HASH" ]; then
|
||||||
|
>&2 echo Git hash changed from "$([ -f ./GIT_HASH.txt ] && cat ./GIT_HASH.txt)" to "$GIT_HASH"
|
||||||
|
echo -n "$GIT_HASH" > ./GIT_HASH.txt
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -n ./build/env/GIT_HASH.txt
|
||||||
@@ -1,8 +1,10 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
|
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||||
|
|
||||||
if ! [ -f ./PLATFORM.txt ] || [ "$(cat ./PLATFORM.txt)" != "$PLATFORM" ] && [ -n "$PLATFORM" ]; then
|
if ! [ -f ./PLATFORM.txt ] || [ "$(cat ./PLATFORM.txt)" != "$PLATFORM" ] && [ -n "$PLATFORM" ]; then
|
||||||
>&2 echo "Updating PLATFORM.txt to \"$PLATFORM\""
|
>&2 echo "Updating PLATFORM.txt to \"$PLATFORM\""
|
||||||
echo -n "$PLATFORM" > ./PLATFORM.txt
|
echo -n "$PLATFORM" > ./PLATFORM.txt
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo -n ./PLATFORM.txt
|
echo -n ./build/env/PLATFORM.txt
|
||||||
@@ -1,6 +1,8 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
FE_VERSION="$(cat web/package.json | grep '"version"' | sed 's/[ \t]*"version":[ \t]*"\([^"]*\)",/\1/')"
|
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||||
|
|
||||||
|
FE_VERSION="$(cat ../../web/package.json | grep '"version"' | sed 's/[ \t]*"version":[ \t]*"\([^"]*\)",/\1/')"
|
||||||
|
|
||||||
# TODO: Validate other version sources - backend/Cargo.toml, backend/src/version/mod.rs
|
# TODO: Validate other version sources - backend/Cargo.toml, backend/src/version/mod.rs
|
||||||
|
|
||||||
@@ -10,4 +12,4 @@ if ! [ -f ./VERSION.txt ] || [ "$(cat ./VERSION.txt)" != "$VERSION" ]; then
|
|||||||
echo -n "$VERSION" > ./VERSION.txt
|
echo -n "$VERSION" > ./VERSION.txt
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo -n ./VERSION.txt
|
echo -n ./build/env/VERSION.txt
|
||||||
35
build/image-recipe/Dockerfile
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
ARG SUITE=trixie
|
||||||
|
|
||||||
|
FROM debian:${SUITE}
|
||||||
|
|
||||||
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -yq \
|
||||||
|
live-build \
|
||||||
|
procps \
|
||||||
|
binfmt-support \
|
||||||
|
qemu-utils \
|
||||||
|
qemu-user-static \
|
||||||
|
xorriso \
|
||||||
|
isolinux \
|
||||||
|
ca-certificates \
|
||||||
|
curl \
|
||||||
|
wget \
|
||||||
|
gpg \
|
||||||
|
git \
|
||||||
|
fdisk \
|
||||||
|
dosfstools \
|
||||||
|
e2fsprogs \
|
||||||
|
squashfs-tools \
|
||||||
|
rsync \
|
||||||
|
b3sum \
|
||||||
|
dpkg-dev
|
||||||
|
|
||||||
|
|
||||||
|
COPY binary_grub-efi.patch /root/binary_grub-efi.patch
|
||||||
|
RUN patch /usr/lib/live/build/binary_grub-efi < /root/binary_grub-efi.patch && rm /root/binary_grub-efi.patch
|
||||||
|
|
||||||
|
RUN echo 'retry_connrefused = on' > /etc/wgetrc && \
|
||||||
|
echo 'tries = 100' >> /etc/wgetrc
|
||||||
|
|
||||||
|
WORKDIR /root
|
||||||
@@ -8,13 +8,9 @@ official StartOS images, you can use the `run-local-build.sh` helper script:
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Prerequisites
|
# Prerequisites
|
||||||
sudo apt-get install -y debspawn
|
sudo apt-get install -y debspawn binfmt-support
|
||||||
sudo mkdir -p /etc/debspawn/ && echo "AllowUnsafePermissions=true" | sudo tee /etc/debspawn/global.toml
|
sudo mkdir -p /etc/debspawn/ && echo "AllowUnsafePermissions=true" | sudo tee /etc/debspawn/global.toml
|
||||||
|
|
||||||
# Get dpkg
|
|
||||||
mkdir -p overlays/startos/root
|
|
||||||
wget -O overlays/startos/root/startos_0.3.x-1_amd64.deb <dpkg_url>
|
|
||||||
|
|
||||||
# Build image
|
# Build image
|
||||||
./run-local-build.sh
|
./run-local-build.sh
|
||||||
```
|
```
|
||||||
47
build/image-recipe/binary_grub-efi.patch
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
--- /usr/lib/live/build/binary_grub-efi 2024-05-25 05:22:52.000000000 -0600
|
||||||
|
+++ binary_grub-efi 2025-10-16 13:04:32.338740922 -0600
|
||||||
|
@@ -54,6 +54,8 @@
|
||||||
|
armhf)
|
||||||
|
Check_package chroot /usr/lib/grub/arm-efi/configfile.mod grub-efi-arm-bin
|
||||||
|
;;
|
||||||
|
+ riscv64)
|
||||||
|
+ Check_package chroot /usr/lib/grub/riscv64-efi/configfile.mod grub-efi-riscv64-bin
|
||||||
|
esac
|
||||||
|
Check_package chroot /usr/bin/grub-mkimage grub-common
|
||||||
|
Check_package chroot /usr/bin/mcopy mtools
|
||||||
|
@@ -136,7 +138,7 @@
|
||||||
|
esac
|
||||||
|
|
||||||
|
# Cleanup files that we generate
|
||||||
|
-rm -rf binary/boot/efi.img binary/boot/grub/i386-efi/ binary/boot/grub/x86_64-efi binary/boot/grub/arm64-efi binary/boot/grub/arm-efi
|
||||||
|
+rm -rf binary/boot/efi.img binary/boot/grub/i386-efi/ binary/boot/grub/x86_64-efi binary/boot/grub/arm64-efi binary/boot/grub/arm-efi binary/boot/grub/riscv64-efi
|
||||||
|
|
||||||
|
# This is workaround till both efi-image and grub-cpmodules are put into a binary package
|
||||||
|
case "${LB_BUILD_WITH_CHROOT}" in
|
||||||
|
@@ -243,6 +245,10 @@
|
||||||
|
gen_efi_boot_img "arm-efi" "arm" "debian-live/arm"
|
||||||
|
PATH="\${PRE_EFI_IMAGE_PATH}"
|
||||||
|
;;
|
||||||
|
+ riscv64)
|
||||||
|
+ gen_efi_boot_img "riscv64-efi" "riscv64" "debian-live/riscv64"
|
||||||
|
+ PATH="\${PRE_EFI_IMAGE_PATH}"
|
||||||
|
+ ;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
|
||||||
|
@@ -324,6 +330,7 @@
|
||||||
|
rm -f chroot/grub-efi-temp/bootnetx64.efi
|
||||||
|
rm -f chroot/grub-efi-temp/bootnetaa64.efi
|
||||||
|
rm -f chroot/grub-efi-temp/bootnetarm.efi
|
||||||
|
+rm -f chroot/grub-efi-temp/bootnetriscv64.efi
|
||||||
|
|
||||||
|
mkdir -p binary
|
||||||
|
cp -a chroot/grub-efi-temp/* binary/
|
||||||
|
@@ -331,6 +338,7 @@
|
||||||
|
rm -rf chroot/grub-efi-temp-i386-efi
|
||||||
|
rm -rf chroot/grub-efi-temp-arm64-efi
|
||||||
|
rm -rf chroot/grub-efi-temp-arm-efi
|
||||||
|
+rm -rf chroot/grub-efi-temp-riscv64-efi
|
||||||
|
rm -rf chroot/grub-efi-temp-cfg
|
||||||
|
rm -rf chroot/grub-efi-temp
|
||||||
|
|
||||||
449
build/image-recipe/build.sh
Executable file
@@ -0,0 +1,449 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
MAX_IMG_LEN=$((4 * 1024 * 1024 * 1024)) # 4GB
|
||||||
|
|
||||||
|
echo "==== StartOS Image Build ===="
|
||||||
|
|
||||||
|
echo "Building for architecture: $IB_TARGET_ARCH"
|
||||||
|
|
||||||
|
SOURCE_DIR="$(realpath $(dirname "${BASH_SOURCE[0]}"))"
|
||||||
|
|
||||||
|
base_dir="$(pwd -P)"
|
||||||
|
prep_results_dir="$base_dir/images-prep"
|
||||||
|
RESULTS_DIR="$base_dir/results"
|
||||||
|
echo "Saving results in: $RESULTS_DIR"
|
||||||
|
|
||||||
|
DEB_PATH="$base_dir/$1"
|
||||||
|
|
||||||
|
VERSION="$(dpkg-deb --fsys-tarfile $DEB_PATH | tar --to-stdout -xvf - ./usr/lib/startos/VERSION.txt)"
|
||||||
|
GIT_HASH="$(dpkg-deb --fsys-tarfile $DEB_PATH | tar --to-stdout -xvf - ./usr/lib/startos/GIT_HASH.txt)"
|
||||||
|
if [[ "$GIT_HASH" =~ ^@ ]]; then
|
||||||
|
GIT_HASH="unknown"
|
||||||
|
else
|
||||||
|
GIT_HASH="$(echo -n "$GIT_HASH" | head -c 7)"
|
||||||
|
fi
|
||||||
|
IB_OS_ENV="$(dpkg-deb --fsys-tarfile $DEB_PATH | tar --to-stdout -xvf - ./usr/lib/startos/ENVIRONMENT.txt)"
|
||||||
|
IB_TARGET_PLATFORM="$(dpkg-deb --fsys-tarfile $DEB_PATH | tar --to-stdout -xvf - ./usr/lib/startos/PLATFORM.txt)"
|
||||||
|
|
||||||
|
VERSION_FULL="${VERSION}-${GIT_HASH}"
|
||||||
|
if [ -n "$IB_OS_ENV" ]; then
|
||||||
|
VERSION_FULL="$VERSION_FULL~${IB_OS_ENV}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
IMAGE_BASENAME=startos-${VERSION_FULL}_${IB_TARGET_PLATFORM}
|
||||||
|
|
||||||
|
BOOTLOADERS=grub-efi
|
||||||
|
if [ "$IB_TARGET_PLATFORM" = "x86_64" ] || [ "$IB_TARGET_PLATFORM" = "x86_64-nonfree" ]; then
|
||||||
|
IB_TARGET_ARCH=amd64
|
||||||
|
QEMU_ARCH=x86_64
|
||||||
|
BOOTLOADERS=grub-efi,syslinux
|
||||||
|
elif [ "$IB_TARGET_PLATFORM" = "aarch64" ] || [ "$IB_TARGET_PLATFORM" = "aarch64-nonfree" ] || [ "$IB_TARGET_PLATFORM" = "raspberrypi" ] || [ "$IB_TARGET_PLATFORM" = "rockchip64" ]; then
|
||||||
|
IB_TARGET_ARCH=arm64
|
||||||
|
QEMU_ARCH=aarch64
|
||||||
|
elif [ "$IB_TARGET_PLATFORM" = "riscv64" ]; then
|
||||||
|
IB_TARGET_ARCH=riscv64
|
||||||
|
QEMU_ARCH=riscv64
|
||||||
|
else
|
||||||
|
IB_TARGET_ARCH="$IB_TARGET_PLATFORM"
|
||||||
|
QEMU_ARCH="$IB_TARGET_PLATFORM"
|
||||||
|
fi
|
||||||
|
|
||||||
|
QEMU_ARGS=()
|
||||||
|
if [ "$QEMU_ARCH" != $(uname -m) ]; then
|
||||||
|
QEMU_ARGS+=(--bootstrap-qemu-arch ${IB_TARGET_ARCH})
|
||||||
|
QEMU_ARGS+=(--bootstrap-qemu-static /usr/bin/qemu-${QEMU_ARCH}-static)
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir -p $prep_results_dir
|
||||||
|
|
||||||
|
cd $prep_results_dir
|
||||||
|
|
||||||
|
NON_FREE=
|
||||||
|
if [[ "${IB_TARGET_PLATFORM}" =~ -nonfree$ ]] || [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
||||||
|
NON_FREE=1
|
||||||
|
fi
|
||||||
|
IMAGE_TYPE=iso
|
||||||
|
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ] || [ "${IB_TARGET_PLATFORM}" = "rockchip64" ]; then
|
||||||
|
IMAGE_TYPE=img
|
||||||
|
fi
|
||||||
|
|
||||||
|
ARCHIVE_AREAS="main contrib"
|
||||||
|
if [ "$NON_FREE" = 1 ]; then
|
||||||
|
if [ "$IB_SUITE" = "bullseye" ]; then
|
||||||
|
ARCHIVE_AREAS="$ARCHIVE_AREAS non-free"
|
||||||
|
else
|
||||||
|
ARCHIVE_AREAS="$ARCHIVE_AREAS non-free non-free-firmware"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
PLATFORM_CONFIG_EXTRAS=()
|
||||||
|
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
||||||
|
PLATFORM_CONFIG_EXTRAS+=( --firmware-binary false )
|
||||||
|
PLATFORM_CONFIG_EXTRAS+=( --firmware-chroot false )
|
||||||
|
RPI_KERNEL_VERSION=6.12.47+rpt
|
||||||
|
PLATFORM_CONFIG_EXTRAS+=( --linux-packages linux-image-$RPI_KERNEL_VERSION )
|
||||||
|
PLATFORM_CONFIG_EXTRAS+=( --linux-flavours "rpi-v8 rpi-2712" )
|
||||||
|
elif [ "${IB_TARGET_PLATFORM}" = "rockchip64" ]; then
|
||||||
|
PLATFORM_CONFIG_EXTRAS+=( --linux-flavours rockchip64 )
|
||||||
|
elif [ "${IB_TARGET_ARCH}" = "riscv64" ]; then
|
||||||
|
PLATFORM_CONFIG_EXTRAS+=( --uefi-secure-boot=disable )
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
cat > /etc/wgetrc << EOF
|
||||||
|
retry_connrefused = on
|
||||||
|
tries = 100
|
||||||
|
EOF
|
||||||
|
lb config \
|
||||||
|
--iso-application "StartOS v${VERSION_FULL} ${IB_TARGET_ARCH}" \
|
||||||
|
--iso-volume "StartOS v${VERSION} ${IB_TARGET_ARCH}" \
|
||||||
|
--iso-preparer "START9 LABS; HTTPS://START9.COM" \
|
||||||
|
--iso-publisher "START9 LABS; HTTPS://START9.COM" \
|
||||||
|
--backports true \
|
||||||
|
--bootappend-live "boot=live noautologin" \
|
||||||
|
--bootloaders $BOOTLOADERS \
|
||||||
|
--cache false \
|
||||||
|
--mirror-bootstrap "https://deb.debian.org/debian/" \
|
||||||
|
--mirror-chroot "https://deb.debian.org/debian/" \
|
||||||
|
--mirror-chroot-security "https://security.debian.org/debian-security" \
|
||||||
|
-d ${IB_SUITE} \
|
||||||
|
-a ${IB_TARGET_ARCH} \
|
||||||
|
${QEMU_ARGS[@]} \
|
||||||
|
--archive-areas "${ARCHIVE_AREAS}" \
|
||||||
|
${PLATFORM_CONFIG_EXTRAS[@]}
|
||||||
|
|
||||||
|
# Overlays
|
||||||
|
|
||||||
|
mkdir -p config/packages.chroot/
|
||||||
|
cp $RESULTS_DIR/$IMAGE_BASENAME.deb config/packages.chroot/
|
||||||
|
dpkg-name config/packages.chroot/*.deb
|
||||||
|
|
||||||
|
mkdir -p config/includes.chroot/etc
|
||||||
|
echo start > config/includes.chroot/etc/hostname
|
||||||
|
cat > config/includes.chroot/etc/hosts << EOT
|
||||||
|
127.0.0.1 localhost start
|
||||||
|
::1 localhost start ip6-localhost ip6-loopback
|
||||||
|
ff02::1 ip6-allnodes
|
||||||
|
ff02::2 ip6-allrouters
|
||||||
|
EOT
|
||||||
|
|
||||||
|
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
||||||
|
mkdir -p config/includes.chroot
|
||||||
|
git clone --depth=1 --branch=stable https://github.com/raspberrypi/rpi-firmware.git config/includes.chroot/boot
|
||||||
|
rm -rf config/includes.chroot/boot/.git config/includes.chroot/boot/modules
|
||||||
|
rsync -rLp $SOURCE_DIR/raspberrypi/squashfs/ config/includes.chroot/
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Bootloaders
|
||||||
|
|
||||||
|
rm -rf config/bootloaders
|
||||||
|
cp -r /usr/share/live/build/bootloaders config/bootloaders
|
||||||
|
|
||||||
|
cat > config/bootloaders/syslinux/syslinux.cfg << EOF
|
||||||
|
include menu.cfg
|
||||||
|
default vesamenu.c32
|
||||||
|
prompt 0
|
||||||
|
timeout 50
|
||||||
|
EOF
|
||||||
|
|
||||||
|
cat > config/bootloaders/isolinux/isolinux.cfg << EOF
|
||||||
|
include menu.cfg
|
||||||
|
default vesamenu.c32
|
||||||
|
prompt 0
|
||||||
|
timeout 50
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Extract splash.png from the deb package
|
||||||
|
dpkg-deb --fsys-tarfile $DEB_PATH | tar --to-stdout -xf - ./usr/lib/startos/splash.png > /tmp/splash.png
|
||||||
|
cp /tmp/splash.png config/bootloaders/syslinux_common/splash.png
|
||||||
|
cp /tmp/splash.png config/bootloaders/isolinux/splash.png
|
||||||
|
cp /tmp/splash.png config/bootloaders/grub-pc/splash.png
|
||||||
|
rm /tmp/splash.png
|
||||||
|
|
||||||
|
sed -i -e '2i set timeout=5' config/bootloaders/grub-pc/config.cfg
|
||||||
|
|
||||||
|
# Archives
|
||||||
|
|
||||||
|
mkdir -p config/archives
|
||||||
|
|
||||||
|
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
||||||
|
curl -fsSL https://archive.raspberrypi.com/debian/raspberrypi.gpg.key | gpg --dearmor -o config/archives/raspi.key
|
||||||
|
echo "deb [arch=${IB_TARGET_ARCH} signed-by=/etc/apt/trusted.gpg.d/raspi.key.gpg] https://archive.raspberrypi.com/debian/ ${IB_SUITE} main" > config/archives/raspi.list
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "${IB_TARGET_PLATFORM}" = "rockchip64" ]; then
|
||||||
|
curl -fsSL https://apt.armbian.com/armbian.key | gpg --dearmor -o config/archives/armbian.key
|
||||||
|
echo "deb https://apt.armbian.com/ ${IB_SUITE} main" > config/archives/armbian.list
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$NON_FREE" = 1 ]; then
|
||||||
|
curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | gpg --dearmor -o config/archives/nvidia-container-toolkit.key
|
||||||
|
curl -s -L https://nvidia.github.io/libnvidia-container/stable/deb/nvidia-container-toolkit.list \
|
||||||
|
| sed 's#deb https://#deb [signed-by=/etc/apt/trusted.gpg.d/nvidia-container-toolkit.key.gpg] https://#g' \
|
||||||
|
> config/archives/nvidia-container-toolkit.list
|
||||||
|
fi
|
||||||
|
|
||||||
|
cat > config/archives/backports.pref <<-EOF
|
||||||
|
Package: linux-image-*
|
||||||
|
Pin: release n=${IB_SUITE}-backports
|
||||||
|
Pin-Priority: 500
|
||||||
|
|
||||||
|
Package: linux-headers-*
|
||||||
|
Pin: release n=${IB_SUITE}-backports
|
||||||
|
Pin-Priority: 500
|
||||||
|
|
||||||
|
Package: *nvidia*
|
||||||
|
Pin: release n=${IB_SUITE}-backports
|
||||||
|
Pin-Priority: 500
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Hooks
|
||||||
|
|
||||||
|
cat > config/hooks/normal/9000-install-startos.hook.chroot << EOF
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [ "${NON_FREE}" = "1" ] && [ "${IB_TARGET_PLATFORM}" != "raspberrypi" ]; then
|
||||||
|
# install a specific NVIDIA driver version
|
||||||
|
|
||||||
|
# ---------------- configuration ----------------
|
||||||
|
NVIDIA_DRIVER_VERSION="\${NVIDIA_DRIVER_VERSION:-580.119.02}"
|
||||||
|
|
||||||
|
BASE_URL="https://download.nvidia.com/XFree86/Linux-${QEMU_ARCH}"
|
||||||
|
|
||||||
|
echo "[nvidia-hook] Using NVIDIA driver: \${NVIDIA_DRIVER_VERSION}" >&2
|
||||||
|
|
||||||
|
# ---------------- kernel version ----------------
|
||||||
|
|
||||||
|
# Determine target kernel version from newest /boot/vmlinuz-* in the chroot.
|
||||||
|
KVER="\$(
|
||||||
|
ls -1t /boot/vmlinuz-* 2>/dev/null \
|
||||||
|
| head -n1 \
|
||||||
|
| sed 's|.*/vmlinuz-||'
|
||||||
|
)"
|
||||||
|
|
||||||
|
if [ -z "\${KVER}" ]; then
|
||||||
|
echo "[nvidia-hook] ERROR: no /boot/vmlinuz-* found; cannot determine kernel version" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "[nvidia-hook] Target kernel version: \${KVER}" >&2
|
||||||
|
|
||||||
|
# Ensure kernel headers are present
|
||||||
|
TEMP_APT_DEPS=(build-essential)
|
||||||
|
if [ ! -e "/lib/modules/\${KVER}/build" ]; then
|
||||||
|
TEMP_APT_DEPS+=(linux-headers-\${KVER})
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "[nvidia-hook] Installing build dependencies" >&2
|
||||||
|
|
||||||
|
/usr/lib/startos/scripts/install-equivs <<-EOF
|
||||||
|
Package: nvidia-depends
|
||||||
|
Version: \${NVIDIA_DRIVER_VERSION}
|
||||||
|
Section: unknown
|
||||||
|
Priority: optional
|
||||||
|
Depends: \${dep_list="\$(IFS=', '; echo "\${TEMP_APT_DEPS[*]}")"}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# ---------------- download and run installer ----------------
|
||||||
|
|
||||||
|
RUN_NAME="NVIDIA-Linux-${QEMU_ARCH}-\${NVIDIA_DRIVER_VERSION}.run"
|
||||||
|
RUN_PATH="/root/\${RUN_NAME}"
|
||||||
|
RUN_URL="\${BASE_URL}/\${NVIDIA_DRIVER_VERSION}/\${RUN_NAME}"
|
||||||
|
|
||||||
|
echo "[nvidia-hook] Downloading \${RUN_URL}" >&2
|
||||||
|
wget -O "\${RUN_PATH}" "\${RUN_URL}"
|
||||||
|
chmod +x "\${RUN_PATH}"
|
||||||
|
|
||||||
|
echo "[nvidia-hook] Running NVIDIA installer for kernel \${KVER}" >&2
|
||||||
|
|
||||||
|
sh "\${RUN_PATH}" \
|
||||||
|
--silent \
|
||||||
|
--kernel-name="\${KVER}" \
|
||||||
|
--no-x-check \
|
||||||
|
--no-nouveau-check \
|
||||||
|
--no-runlevel-check
|
||||||
|
|
||||||
|
# Rebuild module metadata
|
||||||
|
echo "[nvidia-hook] Running depmod for \${KVER}" >&2
|
||||||
|
depmod -a "\${KVER}"
|
||||||
|
|
||||||
|
echo "[nvidia-hook] NVIDIA \${NVIDIA_DRIVER_VERSION} installation complete for kernel \${KVER}" >&2
|
||||||
|
|
||||||
|
echo "[nvidia-hook] Removing build dependencies..." >&2
|
||||||
|
apt-get purge -y nvidia-depends
|
||||||
|
apt-get autoremove -y
|
||||||
|
echo "[nvidia-hook] Removed build dependencies." >&2
|
||||||
|
fi
|
||||||
|
|
||||||
|
cp /etc/resolv.conf /etc/resolv.conf.bak
|
||||||
|
|
||||||
|
if [ "${IB_SUITE}" = trixie ] && [ "${IB_TARGET_ARCH}" != riscv64 ]; then
|
||||||
|
echo 'deb https://deb.debian.org/debian/ bookworm main' > /etc/apt/sources.list.d/bookworm.list
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y postgresql-15
|
||||||
|
rm /etc/apt/sources.list.d/bookworm.list
|
||||||
|
apt-get update
|
||||||
|
systemctl mask postgresql
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
||||||
|
ln -sf /usr/bin/pi-beep /usr/local/bin/beep
|
||||||
|
KERNEL_VERSION=${RPI_KERNEL_VERSION} sh /boot/config.sh > /boot/config.txt
|
||||||
|
mkinitramfs -c gzip -o /boot/initrd.img-${RPI_KERNEL_VERSION}-rpi-v8 ${RPI_KERNEL_VERSION}-rpi-v8
|
||||||
|
mkinitramfs -c gzip -o /boot/initrd.img-${RPI_KERNEL_VERSION}-rpi-2712 ${RPI_KERNEL_VERSION}-rpi-2712
|
||||||
|
fi
|
||||||
|
|
||||||
|
useradd --shell /bin/bash -G startos -m start9
|
||||||
|
echo start9:embassy | chpasswd
|
||||||
|
usermod -aG sudo start9
|
||||||
|
usermod -aG systemd-journal start9
|
||||||
|
|
||||||
|
echo "start9 ALL=(ALL:ALL) NOPASSWD: ALL" | sudo tee "/etc/sudoers.d/010_start9-nopasswd"
|
||||||
|
|
||||||
|
if [ "${IB_TARGET_PLATFORM}" != "raspberrypi" ]; then
|
||||||
|
/usr/lib/startos/scripts/enable-kiosk
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! [[ "${IB_OS_ENV}" =~ (^|-)dev($|-) ]]; then
|
||||||
|
passwd -l start9
|
||||||
|
fi
|
||||||
|
|
||||||
|
EOF
|
||||||
|
|
||||||
|
SOURCE_DATE_EPOCH="${SOURCE_DATE_EPOCH:-$(date '+%s')}"
|
||||||
|
|
||||||
|
if lb bootstrap; then
|
||||||
|
true
|
||||||
|
else
|
||||||
|
EXIT=$?
|
||||||
|
cat ./chroot/debootstrap/debootstrap.log
|
||||||
|
exit $EXIT
|
||||||
|
fi
|
||||||
|
lb chroot
|
||||||
|
lb installer
|
||||||
|
lb binary_chroot
|
||||||
|
lb chroot_prep install all mode-apt-install-binary mode-archives-chroot
|
||||||
|
mv chroot/chroot/etc/resolv.conf.bak chroot/chroot/etc/resolv.conf
|
||||||
|
lb binary_rootfs
|
||||||
|
|
||||||
|
cp $prep_results_dir/binary/live/filesystem.squashfs $RESULTS_DIR/$IMAGE_BASENAME.squashfs
|
||||||
|
|
||||||
|
if [ "${IMAGE_TYPE}" = iso ]; then
|
||||||
|
|
||||||
|
lb binary_manifest
|
||||||
|
lb binary_package-lists
|
||||||
|
lb binary_linux-image
|
||||||
|
lb binary_memtest
|
||||||
|
lb binary_grub-legacy
|
||||||
|
lb binary_grub-pc
|
||||||
|
lb binary_grub_cfg
|
||||||
|
lb binary_syslinux
|
||||||
|
lb binary_disk
|
||||||
|
lb binary_loadlin
|
||||||
|
lb binary_win32-loader
|
||||||
|
lb binary_includes
|
||||||
|
lb binary_grub-efi
|
||||||
|
lb binary_hooks
|
||||||
|
lb binary_checksums
|
||||||
|
find binary -newermt "$(date -d@${SOURCE_DATE_EPOCH} '+%Y-%m-%d %H:%M:%S')" -printf "%y %p\n" -exec touch '{}' -d@${SOURCE_DATE_EPOCH} --no-dereference ';' > binary.modified_timestamps
|
||||||
|
lb binary_iso
|
||||||
|
lb binary_onie
|
||||||
|
lb binary_netboot
|
||||||
|
lb binary_tar
|
||||||
|
lb binary_hdd
|
||||||
|
lb binary_zsync
|
||||||
|
lb chroot_prep remove all mode-archives-chroot
|
||||||
|
lb source
|
||||||
|
|
||||||
|
mv $prep_results_dir/live-image-${IB_TARGET_ARCH}.hybrid.iso $RESULTS_DIR/$IMAGE_BASENAME.iso
|
||||||
|
|
||||||
|
elif [ "${IMAGE_TYPE}" = img ]; then
|
||||||
|
|
||||||
|
SECTOR_LEN=512
|
||||||
|
BOOT_START=$((1024 * 1024)) # 1MiB
|
||||||
|
BOOT_LEN=$((512 * 1024 * 1024)) # 512MiB
|
||||||
|
BOOT_END=$((BOOT_START + BOOT_LEN - 1))
|
||||||
|
ROOT_START=$((BOOT_END + 1))
|
||||||
|
ROOT_LEN=$((MAX_IMG_LEN - ROOT_START))
|
||||||
|
ROOT_END=$((MAX_IMG_LEN - 1))
|
||||||
|
|
||||||
|
TARGET_NAME=$prep_results_dir/${IMAGE_BASENAME}.img
|
||||||
|
truncate -s $MAX_IMG_LEN $TARGET_NAME
|
||||||
|
|
||||||
|
sfdisk $TARGET_NAME <<-EOF
|
||||||
|
label: dos
|
||||||
|
label-id: 0xcb15ae4d
|
||||||
|
unit: sectors
|
||||||
|
sector-size: 512
|
||||||
|
|
||||||
|
${TARGET_NAME}1 : start=$((BOOT_START / SECTOR_LEN)), size=$((BOOT_LEN / SECTOR_LEN)), type=c, bootable
|
||||||
|
${TARGET_NAME}2 : start=$((ROOT_START / SECTOR_LEN)), size=$((ROOT_LEN / SECTOR_LEN)), type=83
|
||||||
|
EOF
|
||||||
|
|
||||||
|
BOOT_DEV=$(losetup --show -f --offset $BOOT_START --sizelimit $BOOT_LEN $TARGET_NAME)
|
||||||
|
ROOT_DEV=$(losetup --show -f --offset $ROOT_START --sizelimit $ROOT_LEN $TARGET_NAME)
|
||||||
|
|
||||||
|
mkfs.vfat -F32 $BOOT_DEV
|
||||||
|
mkfs.ext4 $ROOT_DEV
|
||||||
|
|
||||||
|
TMPDIR=$(mktemp -d)
|
||||||
|
|
||||||
|
mkdir -p $TMPDIR/boot $TMPDIR/root
|
||||||
|
mount $ROOT_DEV $TMPDIR/root
|
||||||
|
mount $BOOT_DEV $TMPDIR/boot
|
||||||
|
unsquashfs -n -f -d $TMPDIR $prep_results_dir/binary/live/filesystem.squashfs boot
|
||||||
|
|
||||||
|
mkdir $TMPDIR/root/images $TMPDIR/root/config
|
||||||
|
B3SUM=$(b3sum $prep_results_dir/binary/live/filesystem.squashfs | head -c 16)
|
||||||
|
cp $prep_results_dir/binary/live/filesystem.squashfs $TMPDIR/root/images/$B3SUM.rootfs
|
||||||
|
ln -rsf $TMPDIR/root/images/$B3SUM.rootfs $TMPDIR/root/config/current.rootfs
|
||||||
|
|
||||||
|
mkdir -p $TMPDIR/next $TMPDIR/lower $TMPDIR/root/config/work $TMPDIR/root/config/overlay
|
||||||
|
mount $TMPDIR/root/config/current.rootfs $TMPDIR/lower
|
||||||
|
|
||||||
|
mount -t overlay -o lowerdir=$TMPDIR/lower,workdir=$TMPDIR/root/config/work,upperdir=$TMPDIR/root/config/overlay overlay $TMPDIR/next
|
||||||
|
|
||||||
|
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
||||||
|
sed -i 's| boot=startos| boot=startos init=/usr/lib/startos/scripts/init_resize\.sh|' $TMPDIR/boot/cmdline.txt
|
||||||
|
rsync -a $SOURCE_DIR/raspberrypi/img/ $TMPDIR/next/
|
||||||
|
fi
|
||||||
|
|
||||||
|
umount $TMPDIR/next
|
||||||
|
umount $TMPDIR/lower
|
||||||
|
|
||||||
|
umount $TMPDIR/boot
|
||||||
|
umount $TMPDIR/root
|
||||||
|
|
||||||
|
|
||||||
|
e2fsck -fy $ROOT_DEV
|
||||||
|
resize2fs -M $ROOT_DEV
|
||||||
|
|
||||||
|
BLOCK_COUNT=$(dumpe2fs -h $ROOT_DEV | awk '/^Block count:/ { print $3 }')
|
||||||
|
BLOCK_SIZE=$(dumpe2fs -h $ROOT_DEV | awk '/^Block size:/ { print $3 }')
|
||||||
|
ROOT_LEN=$((BLOCK_COUNT * BLOCK_SIZE))
|
||||||
|
|
||||||
|
losetup -d $ROOT_DEV
|
||||||
|
losetup -d $BOOT_DEV
|
||||||
|
|
||||||
|
# Recreate partition 2 with the new size using sfdisk
|
||||||
|
sfdisk $TARGET_NAME <<-EOF
|
||||||
|
label: dos
|
||||||
|
label-id: 0xcb15ae4d
|
||||||
|
unit: sectors
|
||||||
|
sector-size: 512
|
||||||
|
|
||||||
|
${TARGET_NAME}1 : start=$((BOOT_START / SECTOR_LEN)), size=$((BOOT_LEN / SECTOR_LEN)), type=c, bootable
|
||||||
|
${TARGET_NAME}2 : start=$((ROOT_START / SECTOR_LEN)), size=$((ROOT_LEN / SECTOR_LEN)), type=83
|
||||||
|
EOF
|
||||||
|
|
||||||
|
TARGET_SIZE=$((ROOT_START + ROOT_LEN))
|
||||||
|
truncate -s $TARGET_SIZE $TARGET_NAME
|
||||||
|
|
||||||
|
mv $TARGET_NAME $RESULTS_DIR/$IMAGE_BASENAME.img
|
||||||
|
|
||||||
|
fi
|
||||||
|
|
||||||
|
chown $IB_UID:$IB_UID $RESULTS_DIR/$IMAGE_BASENAME.*
|
||||||
2
build/image-recipe/raspberrypi/img/etc/fstab
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
/dev/mmcblk0p1 /boot vfat umask=0077 0 2
|
||||||
|
/dev/mmcblk0p2 / ext4 defaults 0 1
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
get_variables () {
|
get_variables () {
|
||||||
ROOT_PART_DEV=$(findmnt / -o source -n)
|
ROOT_PART_DEV=$(findmnt /media/startos/root -o source -n)
|
||||||
ROOT_PART_NAME=$(echo "$ROOT_PART_DEV" | cut -d "/" -f 3)
|
ROOT_PART_NAME=$(echo "$ROOT_PART_DEV" | cut -d "/" -f 3)
|
||||||
ROOT_DEV_NAME=$(echo /sys/block/*/"${ROOT_PART_NAME}" | cut -d "/" -f 4)
|
ROOT_DEV_NAME=$(echo /sys/block/*/"${ROOT_PART_NAME}" | cut -d "/" -f 4)
|
||||||
ROOT_DEV="/dev/${ROOT_DEV_NAME}"
|
ROOT_DEV="/dev/${ROOT_DEV_NAME}"
|
||||||
@@ -89,12 +89,12 @@ main () {
|
|||||||
|
|
||||||
resize2fs $ROOT_PART_DEV
|
resize2fs $ROOT_PART_DEV
|
||||||
|
|
||||||
if ! systemd-machine-id-setup; then
|
if ! systemd-machine-id-setup --root=/media/startos/config/overlay/; then
|
||||||
FAIL_REASON="systemd-machine-id-setup failed"
|
FAIL_REASON="systemd-machine-id-setup failed"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! ssh-keygen -A; then
|
if ! (mkdir -p /media/startos/config/overlay/etc/ssh && ssh-keygen -A -f /media/startos/config/overlay/); then
|
||||||
FAIL_REASON="ssh host key generation failed"
|
FAIL_REASON="ssh host key generation failed"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
@@ -104,9 +104,6 @@ main () {
|
|||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
mount -t proc proc /proc
|
|
||||||
mount -t sysfs sys /sys
|
|
||||||
mount -t tmpfs tmp /run
|
|
||||||
mkdir -p /run/systemd
|
mkdir -p /run/systemd
|
||||||
mount /boot
|
mount /boot
|
||||||
mount / -o remount,ro
|
mount / -o remount,ro
|
||||||
@@ -114,7 +111,7 @@ mount / -o remount,ro
|
|||||||
beep
|
beep
|
||||||
|
|
||||||
if main; then
|
if main; then
|
||||||
sed -i 's| init=/usr/lib/startos/scripts/init_resize\.sh| boot=embassy|' /boot/cmdline.txt
|
sed -i 's| init=/usr/lib/startos/scripts/init_resize\.sh||' /boot/cmdline.txt
|
||||||
echo "Resized root filesystem. Rebooting in 5 seconds..."
|
echo "Resized root filesystem. Rebooting in 5 seconds..."
|
||||||
sleep 5
|
sleep 5
|
||||||
else
|
else
|
||||||
@@ -1 +1 @@
|
|||||||
usb-storage.quirks=152d:0562:u,14cd:121c:u,0781:cfcb:u console=serial0,115200 console=tty1 root=PARTUUID=cb15ae4d-02 rootfstype=ext4 fsck.repair=yes rootwait cgroup_enable=cpuset cgroup_memory=1 cgroup_enable=memory quiet boot=embassy
|
usb-storage.quirks=152d:0562:u,14cd:121c:u,0781:cfcb:u console=serial0,115200 console=tty1 root=PARTUUID=cb15ae4d-02 rootfstype=ext4 fsck.repair=yes rootwait cgroup_enable=cpuset cgroup_memory=1 cgroup_enable=memory boot=startos
|
||||||
46
build/image-recipe/raspberrypi/squashfs/boot/config.sh
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
cat << EOF
|
||||||
|
|
||||||
|
# Enable audio (loads snd_bcm2835)
|
||||||
|
dtparam=audio=on
|
||||||
|
|
||||||
|
# Automatically load overlays for detected cameras
|
||||||
|
camera_auto_detect=1
|
||||||
|
|
||||||
|
# Automatically load overlays for detected DSI displays
|
||||||
|
display_auto_detect=1
|
||||||
|
|
||||||
|
# Enable DRM VC4 V3D driver
|
||||||
|
dtoverlay=vc4-kms-v3d
|
||||||
|
max_framebuffers=2
|
||||||
|
|
||||||
|
# Run in 64-bit mode
|
||||||
|
arm_64bit=1
|
||||||
|
|
||||||
|
# Disable compensation for displays with overscan
|
||||||
|
disable_overscan=1
|
||||||
|
|
||||||
|
[cm4]
|
||||||
|
# Enable host mode on the 2711 built-in XHCI USB controller.
|
||||||
|
# This line should be removed if the legacy DWC2 controller is required
|
||||||
|
# (e.g. for USB device mode) or if USB support is not required.
|
||||||
|
otg_mode=1
|
||||||
|
|
||||||
|
[all]
|
||||||
|
|
||||||
|
[pi4]
|
||||||
|
# Run as fast as firmware / board allows
|
||||||
|
arm_boost=1
|
||||||
|
kernel=vmlinuz-${KERNEL_VERSION}-rpi-v8
|
||||||
|
initramfs initrd.img-${KERNEL_VERSION}-rpi-v8 followkernel
|
||||||
|
|
||||||
|
[pi5]
|
||||||
|
kernel=vmlinuz-${KERNEL_VERSION}-rpi-2712
|
||||||
|
initramfs initrd.img-${KERNEL_VERSION}-rpi-2712 followkernel
|
||||||
|
|
||||||
|
[all]
|
||||||
|
gpu_mem=16
|
||||||
|
dtoverlay=pwm-2chan,disable-bt
|
||||||
|
|
||||||
|
EOF
|
||||||
@@ -83,4 +83,5 @@ arm_boost=1
|
|||||||
[all]
|
[all]
|
||||||
gpu_mem=16
|
gpu_mem=16
|
||||||
dtoverlay=pwm-2chan,disable-bt
|
dtoverlay=pwm-2chan,disable-bt
|
||||||
initramfs initrd.img-6.1.21-v8+
|
|
||||||
|
auto_initramfs=1
|
||||||
35
build/image-recipe/run-local-build.sh
Executable file
@@ -0,0 +1,35 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
cd "$(dirname "${BASH_SOURCE[0]}")/../.."
|
||||||
|
|
||||||
|
BASEDIR="$(pwd -P)"
|
||||||
|
|
||||||
|
SUITE=trixie
|
||||||
|
|
||||||
|
USE_TTY=
|
||||||
|
if tty -s; then
|
||||||
|
USE_TTY="-it"
|
||||||
|
fi
|
||||||
|
|
||||||
|
dockerfile_hash=$(sha256sum ${BASEDIR}/build/image-recipe/Dockerfile | head -c 7)
|
||||||
|
|
||||||
|
docker_img_name="start9/build-iso:${SUITE}-${dockerfile_hash}"
|
||||||
|
|
||||||
|
platform=linux/${ARCH}
|
||||||
|
case $ARCH in
|
||||||
|
x86_64)
|
||||||
|
platform=linux/amd64;;
|
||||||
|
aarch64)
|
||||||
|
platform=linux/arm64;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if ! docker run --rm --platform=$platform "${docker_img_name}" true 2> /dev/null; then
|
||||||
|
docker buildx build --load --platform=$platform --build-arg=SUITE=${SUITE} -t "${docker_img_name}" ./build/image-recipe
|
||||||
|
fi
|
||||||
|
|
||||||
|
docker run $USE_TTY --rm --platform=$platform --privileged -v "$(pwd)/build/image-recipe:/root/image-recipe" -v "$(pwd)/results:/root/results" \
|
||||||
|
-e IB_SUITE="$SUITE" \
|
||||||
|
-e IB_UID="$UID" \
|
||||||
|
-e IB_INCLUDE \
|
||||||
|
"${docker_img_name}" /root/image-recipe/build.sh $@
|
||||||
@@ -1,13 +1,13 @@
|
|||||||
[
|
[
|
||||||
{
|
{
|
||||||
"id": "pureboot-librem_mini_v2-basic_usb_autoboot_blob_jail-Release-28.3",
|
"id": "pureboot-librem_mini_v2-basic_usb_autoboot_blob_jail-Release-29",
|
||||||
"platform": ["x86_64"],
|
"platform": ["x86_64"],
|
||||||
"system-product-name": "librem_mini_v2",
|
"system-product-name": "librem_mini_v2",
|
||||||
"bios-version": {
|
"bios-version": {
|
||||||
"semver-prefix": "PureBoot-Release-",
|
"semver-prefix": "PureBoot-Release-",
|
||||||
"semver-range": "<28.3"
|
"semver-range": "<29"
|
||||||
},
|
},
|
||||||
"url": "https://source.puri.sm/firmware/releases/-/raw/98418b5b8e9edc2bd1243ad7052a062f79e2b88e/librem_mini_v2/custom/pureboot-librem_mini_v2-basic_usb_autoboot_blob_jail-Release-28.3.rom.gz",
|
"url": "https://source.puri.sm/firmware/releases/-/raw/75631ad6dcf7e6ee73e06a517ac7dc4e017518b7/librem_mini_v2/custom/pureboot-librem_mini_v2-basic_usb_autoboot_blob_jail-Release-29.rom.gz",
|
||||||
"shasum": "5019bcf53f7493c7aa74f8ef680d18b5fc26ec156c705a841433aaa2fdef8f35"
|
"shasum": "96ec04f21b1cfe8e28d9a2418f1ff533efe21f9bbbbf16e162f7c814761b068b"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|||||||
51
build/lib/grub-theme/theme.txt
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
desktop-image: "../splash.png"
|
||||||
|
title-color: "#ffffff"
|
||||||
|
title-font: "Unifont Regular 16"
|
||||||
|
title-text: "StartOS Boot Menu with GRUB"
|
||||||
|
message-font: "Unifont Regular 16"
|
||||||
|
terminal-font: "Unifont Regular 16"
|
||||||
|
|
||||||
|
#help bar at the bottom
|
||||||
|
+ label {
|
||||||
|
top = 100%-50
|
||||||
|
left = 0
|
||||||
|
width = 100%
|
||||||
|
height = 20
|
||||||
|
text = "@KEYMAP_SHORT@"
|
||||||
|
align = "center"
|
||||||
|
color = "#ffffff"
|
||||||
|
font = "Unifont Regular 16"
|
||||||
|
}
|
||||||
|
|
||||||
|
#boot menu
|
||||||
|
+ boot_menu {
|
||||||
|
left = 10%
|
||||||
|
width = 80%
|
||||||
|
top = 52%
|
||||||
|
height = 48%-80
|
||||||
|
item_color = "#a8a8a8"
|
||||||
|
item_font = "Unifont Regular 16"
|
||||||
|
selected_item_color= "#ffffff"
|
||||||
|
selected_item_font = "Unifont Regular 16"
|
||||||
|
item_height = 16
|
||||||
|
item_padding = 0
|
||||||
|
item_spacing = 4
|
||||||
|
icon_width = 0
|
||||||
|
icon_heigh = 0
|
||||||
|
item_icon_space = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
#progress bar
|
||||||
|
+ progress_bar {
|
||||||
|
id = "__timeout__"
|
||||||
|
left = 15%
|
||||||
|
top = 100%-80
|
||||||
|
height = 16
|
||||||
|
width = 70%
|
||||||
|
font = "Unifont Regular 16"
|
||||||
|
text_color = "#000000"
|
||||||
|
fg_color = "#ffffff"
|
||||||
|
bg_color = "#a8a8a8"
|
||||||
|
border_color = "#ffffff"
|
||||||
|
text = "@TIMEOUT_NOTIFICATION_LONG@"
|
||||||
|
}
|
||||||
147
build/lib/motd
@@ -1,34 +1,123 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
printf "\n"
|
|
||||||
printf "Welcome to\n"
|
|
||||||
cat << "ASCII"
|
|
||||||
|
|
||||||
███████
|
parse_essential_db_info() {
|
||||||
█ █ █
|
DB_DUMP="/tmp/startos_db.json"
|
||||||
█ █ █ █
|
|
||||||
█ █ █ █
|
|
||||||
█ █ █ █
|
|
||||||
█ █ █ █
|
|
||||||
█ █
|
|
||||||
███████
|
|
||||||
|
|
||||||
_____ __ ___ __ __
|
if command -v start-cli >/dev/null 2>&1; then
|
||||||
(_ | /\ |__) | / \(_
|
timeout 30 start-cli db dump > "$DB_DUMP" 2>/dev/null || return 1
|
||||||
__) | / \| \ | \__/__)
|
else
|
||||||
ASCII
|
return 1
|
||||||
printf " v$(cat /usr/lib/startos/VERSION.txt)\n\n"
|
fi
|
||||||
printf " %s (%s %s)\n" "$(uname -o)" "$(uname -r)" "$(uname -m)"
|
|
||||||
printf " Git Hash: $(cat /usr/lib/startos/GIT_HASH.txt)"
|
if command -v jq >/dev/null 2>&1 && [ -f "$DB_DUMP" ]; then
|
||||||
if [ -n "$(cat /usr/lib/startos/ENVIRONMENT.txt)" ]; then
|
HOSTNAME=$(jq -r '.value.serverInfo.hostname // "unknown"' "$DB_DUMP" 2>/dev/null)
|
||||||
printf " ~ $(cat /usr/lib/startos/ENVIRONMENT.txt)\n"
|
VERSION=$(jq -r '.value.serverInfo.version // "unknown"' "$DB_DUMP" 2>/dev/null)
|
||||||
else
|
RAM_BYTES=$(jq -r '.value.serverInfo.ram // 0' "$DB_DUMP" 2>/dev/null)
|
||||||
printf "\n"
|
WAN_IP=$(jq -r '.value.serverInfo.network.gateways[].ipInfo.wanIp // "unknown"' "$DB_DUMP" 2>/dev/null | head -1)
|
||||||
|
NTP_SYNCED=$(jq -r '.value.serverInfo.ntpSynced // false' "$DB_DUMP" 2>/dev/null)
|
||||||
|
|
||||||
|
if [ "$RAM_BYTES" != "0" ] && [ "$RAM_BYTES" != "null" ]; then
|
||||||
|
RAM_GB=$(echo "scale=1; $RAM_BYTES / 1073741824" | bc 2>/dev/null || echo "unknown")
|
||||||
|
else
|
||||||
|
RAM_GB="unknown"
|
||||||
|
fi
|
||||||
|
|
||||||
|
RUNNING_SERVICES=$(jq -r '[.value.packageData[] | select(.statusInfo.started != null)] | length' "$DB_DUMP" 2>/dev/null)
|
||||||
|
TOTAL_SERVICES=$(jq -r '.value.packageData | length' "$DB_DUMP" 2>/dev/null)
|
||||||
|
|
||||||
|
rm -f "$DB_DUMP"
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
rm -f "$DB_DUMP" 2>/dev/null
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
DB_INFO_AVAILABLE=0
|
||||||
|
if parse_essential_db_info; then
|
||||||
|
DB_INFO_AVAILABLE=1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
printf "\n"
|
if [ "$DB_INFO_AVAILABLE" -eq 1 ] && [ "$VERSION" != "unknown" ]; then
|
||||||
printf " * Documentation: https://docs.start9.com\n"
|
version_display="v$VERSION"
|
||||||
printf " * Management: https://%s.local\n" "$(hostname)"
|
else
|
||||||
printf " * Support: https://start9.com/contact\n"
|
version_display="v$(cat /usr/lib/startos/VERSION.txt 2>/dev/null || echo 'unknown')"
|
||||||
printf " * Source Code: https://github.com/Start9Labs/start-os\n"
|
fi
|
||||||
printf " * License: MIT\n"
|
|
||||||
printf "\n"
|
printf "\n\033[1;37m ▄▄▀▀▀▀▀▄▄\033[0m\n"
|
||||||
|
printf "\033[1;37m ▄▀ ▄ ▀▄ ▄▄▄▄▄ ▄▄▄▄▄▄▄ ▄ ▄▄▄▄▄ ▄▄▄▄▄▄▄ \033[1;31m▄██████▄ ▄██████\033[0m\n"
|
||||||
|
printf "\033[1;37m █ █ █ █ █ █ █ █ █ ▀▄ █ \033[1;31m██ ██ ██ \033[0m\n"
|
||||||
|
printf "\033[1;37m█ █ █ █ ▀▄▄▄▄ █ █ █ █ ▄▄▄▀ █ \033[1;31m██ ██ ▀█████▄\033[0m\n"
|
||||||
|
printf "\033[1;37m█ █ █ █ █ █ █ █ █ ▀▄ █ \033[1;31m██ ██ ██\033[0m\n"
|
||||||
|
printf "\033[1;37m █ █ █ █ ▄▄▄▄▄▀ █ █ █ █ ▀▄ █ \033[1;31m▀██████▀ ██████▀\033[0m\n"
|
||||||
|
printf "\033[1;37m █ █\033[0m\n"
|
||||||
|
printf "\033[1;37m ▀▀▄▄▄▀▀ $version_display\033[0m\n\n"
|
||||||
|
|
||||||
|
uptime_str=$(uptime | awk -F'up ' '{print $2}' | awk -F',' '{print $1}' | sed 's/^ *//')
|
||||||
|
|
||||||
|
if [ "$DB_INFO_AVAILABLE" -eq 1 ] && [ "$RAM_GB" != "unknown" ]; then
|
||||||
|
memory_used=$(free -m | awk 'NR==2{printf "%.0fMB", $3}')
|
||||||
|
memory_display="$memory_used / ${RAM_GB}GB"
|
||||||
|
else
|
||||||
|
memory_display=$(free -m | awk 'NR==2{printf "%.0fMB / %.0fMB", $3, $2}')
|
||||||
|
fi
|
||||||
|
|
||||||
|
root_usage=$(df -h / | awk 'NR==2{printf "%s (%s free)", $5, $4}')
|
||||||
|
|
||||||
|
if [ -d "/media/startos/data/package-data" ]; then
|
||||||
|
data_usage=$(df -h /media/startos/data/package-data | awk 'NR==2{printf "%s (%s free)", $5, $4}')
|
||||||
|
else
|
||||||
|
data_usage="N/A"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$DB_INFO_AVAILABLE" -eq 1 ]; then
|
||||||
|
services_text="$RUNNING_SERVICES/$TOTAL_SERVICES running"
|
||||||
|
else
|
||||||
|
services_text="Unknown"
|
||||||
|
fi
|
||||||
|
|
||||||
|
local_ip=$(ip route get 1.1.1.1 2>/dev/null | awk '{for(i=1;i<=NF;i++) if($i=="src") print $(i+1)}' | head -1)
|
||||||
|
if [ -z "$local_ip" ]; then local_ip="N/A"; fi
|
||||||
|
|
||||||
|
if [ "$DB_INFO_AVAILABLE" -eq 1 ] && [ "$WAN_IP" != "unknown" ]; then
|
||||||
|
wan_ip="$WAN_IP"
|
||||||
|
else
|
||||||
|
wan_ip="N/A"
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf " \033[1;37m┌─ SYSTEM STATUS ───────────────────────────────────────────────────┐\033[0m\n"
|
||||||
|
printf " \033[1;37m│\033[0m %-8s \033[0;33m%-22s\033[0m %-8s \033[0;33m%-23s\033[0m \033[1;37m│\033[0m\n" "Uptime:" "$uptime_str" "Memory:" "$memory_display"
|
||||||
|
printf " \033[1;37m│\033[0m %-8s \033[0;33m%-22s\033[0m %-8s \033[0;33m%-23s\033[0m \033[1;37m│\033[0m\n" "Root:" "$root_usage" "Data:" "$data_usage"
|
||||||
|
|
||||||
|
if [ "$DB_INFO_AVAILABLE" -eq 1 ]; then
|
||||||
|
if [ "$RUNNING_SERVICES" -eq "$TOTAL_SERVICES" ] && [ "$TOTAL_SERVICES" -gt 0 ]; then
|
||||||
|
printf " \033[1;37m│\033[0m %-8s \033[0;32m%-22s\033[0m %-8s \033[0;33m%-23s\033[0m \033[1;37m│\033[0m\n" "Services:" "$services_text" "WAN:" "$wan_ip"
|
||||||
|
elif [ "$RUNNING_SERVICES" -gt 0 ]; then
|
||||||
|
printf " \033[1;37m│\033[0m %-8s \033[0;33m%-22s\033[0m %-8s \033[0;33m%-23s\033[0m \033[1;37m│\033[0m\n" "Services:" "$services_text" "WAN:" "$wan_ip"
|
||||||
|
else
|
||||||
|
printf " \033[1;37m│\033[0m %-8s \033[0;31m%-22s\033[0m %-8s \033[0;33m%-23s\033[0m \033[1;37m│\033[0m\n" "Services:" "$services_text" "WAN:" "$wan_ip"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
printf " \033[1;37m│\033[0m %-8s \033[0;37m%-22s\033[0m %-8s \033[0;33m%-23s\033[0m \033[1;37m│\033[0m\n" "Services:" "$services_text" "WAN:" "$wan_ip"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$DB_INFO_AVAILABLE" -eq 1 ] && [ "$NTP_SYNCED" = "true" ]; then
|
||||||
|
printf " \033[1;37m│\033[0m %-8s \033[0;33m%-22s\033[0m %-8s \033[0;32m%-23s\033[0m \033[1;37m│\033[0m\n" "Local:" "$local_ip" "NTP:" "Synced"
|
||||||
|
elif [ "$DB_INFO_AVAILABLE" -eq 1 ] && [ "$NTP_SYNCED" = "false" ]; then
|
||||||
|
printf " \033[1;37m│\033[0m %-8s \033[0;33m%-22s\033[0m %-8s \033[0;31m%-23s\033[0m \033[1;37m│\033[0m\n" "Local:" "$local_ip" "NTP:" "Not Synced"
|
||||||
|
else
|
||||||
|
printf " \033[1;37m│\033[0m %-8s \033[0;33m%-22s\033[0m %-8s \033[0;37m%-23s\033[0m \033[1;37m│\033[0m\n" "Local:" "$local_ip" "NTP:" "Unknown"
|
||||||
|
fi
|
||||||
|
|
||||||
|
printf " \033[1;37m└───────────────────────────────────────────────────────────────────┘\033[0m"
|
||||||
|
|
||||||
|
if [ "$DB_INFO_AVAILABLE" -eq 1 ] && [ "$HOSTNAME" != "unknown" ]; then
|
||||||
|
web_url="https://$HOSTNAME.local"
|
||||||
|
else
|
||||||
|
web_url="https://$(hostname).local"
|
||||||
|
fi
|
||||||
|
printf "\n \033[1;37m┌──────────────────────────────────────────────────── QUICK ACCESS ─┐\033[0m\n"
|
||||||
|
printf " \033[1;37m│\033[0m Web Interface: \033[0;36m%-50s\033[0m \033[1;37m│\033[0m\n" "$web_url"
|
||||||
|
printf " \033[1;37m│\033[0m Documentation: \033[0;36m%-50s\033[0m \033[1;37m│\033[0m\n" "https://staging.docs.start9.com"
|
||||||
|
printf " \033[1;37m│\033[0m Support: \033[0;36m%-50s\033[0m \033[1;37m│\033[0m\n" "https://start9.com/contact"
|
||||||
|
printf " \033[1;37m└───────────────────────────────────────────────────────────────────┘\033[0m\n\n"
|
||||||
|
|||||||
@@ -4,6 +4,3 @@ set -e
|
|||||||
|
|
||||||
curl -fsSL https://deb.torproject.org/torproject.org/A3C4F0F979CAA22CDBA8F512EE8CBC9E886DDD89.asc | gpg --dearmor -o- > /usr/share/keyrings/tor-archive-keyring.gpg
|
curl -fsSL https://deb.torproject.org/torproject.org/A3C4F0F979CAA22CDBA8F512EE8CBC9E886DDD89.asc | gpg --dearmor -o- > /usr/share/keyrings/tor-archive-keyring.gpg
|
||||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/tor-archive-keyring.gpg] https://deb.torproject.org/torproject.org bullseye main" > /etc/apt/sources.list.d/tor.list
|
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/tor-archive-keyring.gpg] https://deb.torproject.org/torproject.org bullseye main" > /etc/apt/sources.list.d/tor.list
|
||||||
|
|
||||||
curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o- > /usr/share/keyrings/docker-archive-keyring.gpg
|
|
||||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian bullseye stable" > /etc/apt/sources.list.d/docker.list
|
|
||||||
|
|||||||
@@ -1,8 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
|
|
||||||
if cat /sys/class/drm/*/status | grep -qw connected; then
|
|
||||||
exit 0
|
|
||||||
else
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
@@ -1,46 +1,116 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
|
SOURCE_DIR="$(dirname $(realpath "${BASH_SOURCE[0]}"))"
|
||||||
|
|
||||||
if [ "$UID" -ne 0 ]; then
|
if [ "$UID" -ne 0 ]; then
|
||||||
>&2 echo 'Must be run as root'
|
>&2 echo 'Must be run as root'
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
POSITIONAL_ARGS=()
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
--no-sync)
|
||||||
|
NO_SYNC=1
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
--create)
|
||||||
|
ONLY_CREATE=1
|
||||||
|
shift
|
||||||
|
;;
|
||||||
|
-*|--*)
|
||||||
|
echo "Unknown option $1"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
POSITIONAL_ARGS+=("$1") # save positional arg
|
||||||
|
shift # past argument
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
set -- "${POSITIONAL_ARGS[@]}" # restore positional parameters
|
||||||
|
|
||||||
if [ -z "$NO_SYNC" ]; then
|
if [ -z "$NO_SYNC" ]; then
|
||||||
echo 'Syncing...'
|
echo 'Syncing...'
|
||||||
rsync -a --delete --force --info=progress2 /media/embassy/embassyfs/current/ /media/embassy/next
|
umount -R /media/startos/next 2> /dev/null
|
||||||
|
umount /media/startos/upper 2> /dev/null
|
||||||
|
rm -rf /media/startos/upper /media/startos/next
|
||||||
|
mkdir /media/startos/upper
|
||||||
|
mount -t tmpfs tmpfs /media/startos/upper
|
||||||
|
mkdir -p /media/startos/upper/data /media/startos/upper/work /media/startos/next
|
||||||
|
mount -t overlay \
|
||||||
|
-olowerdir=/media/startos/current,upperdir=/media/startos/upper/data,workdir=/media/startos/upper/work \
|
||||||
|
overlay /media/startos/next
|
||||||
fi
|
fi
|
||||||
|
|
||||||
mkdir -p /media/embassy/next/run
|
if [ -n "$ONLY_CREATE" ]; then
|
||||||
mkdir -p /media/embassy/next/dev
|
exit 0
|
||||||
mkdir -p /media/embassy/next/sys
|
fi
|
||||||
mkdir -p /media/embassy/next/proc
|
|
||||||
mkdir -p /media/embassy/next/boot
|
mkdir -p /media/startos/next/run
|
||||||
mount --bind /run /media/embassy/next/run
|
mkdir -p /media/startos/next/dev
|
||||||
mount --bind /dev /media/embassy/next/dev
|
mkdir -p /media/startos/next/sys
|
||||||
mount --bind /sys /media/embassy/next/sys
|
mkdir -p /media/startos/next/proc
|
||||||
mount --bind /proc /media/embassy/next/proc
|
mkdir -p /media/startos/next/boot
|
||||||
mount --bind /boot /media/embassy/next/boot
|
mkdir -p /media/startos/next/media/startos/root
|
||||||
|
mount --bind /run /media/startos/next/run
|
||||||
|
mount --bind /tmp /media/startos/next/tmp
|
||||||
|
mount --bind /dev /media/startos/next/dev
|
||||||
|
mount --bind /sys /media/startos/next/sys
|
||||||
|
mount --bind /proc /media/startos/next/proc
|
||||||
|
mount --bind /boot /media/startos/next/boot
|
||||||
|
mount --bind /media/startos/root /media/startos/next/media/startos/root
|
||||||
|
|
||||||
|
if mountpoint /sys/firmware/efi/efivars 2>&1 > /dev/null; then
|
||||||
|
mount --bind /sys/firmware/efi/efivars /media/startos/next/sys/firmware/efi/efivars
|
||||||
|
fi
|
||||||
|
|
||||||
if [ -z "$*" ]; then
|
if [ -z "$*" ]; then
|
||||||
chroot /media/embassy/next
|
chroot /media/startos/next
|
||||||
CHROOT_RES=$?
|
CHROOT_RES=$?
|
||||||
else
|
else
|
||||||
chroot /media/embassy/next "$SHELL" -c "$*"
|
chroot /media/startos/next "$SHELL" -c "$*"
|
||||||
CHROOT_RES=$?
|
CHROOT_RES=$?
|
||||||
fi
|
fi
|
||||||
|
|
||||||
umount /media/embassy/next/run
|
if mountpoint /media/startos/next/sys/firmware/efi/efivars 2>&1 > /dev/null; then
|
||||||
umount /media/embassy/next/dev
|
umount /media/startos/next/sys/firmware/efi/efivars
|
||||||
umount /media/embassy/next/sys
|
fi
|
||||||
umount /media/embassy/next/proc
|
|
||||||
umount /media/embassy/next/boot
|
umount /media/startos/next/run
|
||||||
|
umount /media/startos/next/tmp
|
||||||
|
umount /media/startos/next/dev
|
||||||
|
umount /media/startos/next/sys
|
||||||
|
umount /media/startos/next/proc
|
||||||
|
umount /media/startos/next/boot
|
||||||
|
umount /media/startos/next/media/startos/root
|
||||||
|
|
||||||
if [ "$CHROOT_RES" -eq 0 ]; then
|
if [ "$CHROOT_RES" -eq 0 ]; then
|
||||||
|
|
||||||
|
if [ -h /media/startos/config/current.rootfs ] && [ -e /media/startos/config/current.rootfs ]; then
|
||||||
|
${SOURCE_DIR}/prune-images $(du -s --bytes /media/startos/next | awk '{print $1}')
|
||||||
|
fi
|
||||||
|
|
||||||
echo 'Upgrading...'
|
echo 'Upgrading...'
|
||||||
|
|
||||||
touch /media/embassy/config/upgrade
|
rm -f /media/startos/images/next.squashfs
|
||||||
|
if ! time mksquashfs /media/startos/next /media/startos/images/next.squashfs -b 4096 -comp gzip; then
|
||||||
|
umount -l /media/startos/next
|
||||||
|
umount -l /media/startos/upper
|
||||||
|
rm -rf /media/startos/upper /media/startos/next
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
hash=$(b3sum /media/startos/images/next.squashfs | head -c 32)
|
||||||
|
mv /media/startos/images/next.squashfs /media/startos/images/${hash}.rootfs
|
||||||
|
ln -rsf /media/startos/images/${hash}.rootfs /media/startos/config/current.rootfs
|
||||||
|
|
||||||
sync
|
sync
|
||||||
|
|
||||||
reboot
|
reboot
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
umount /media/startos/next
|
||||||
|
umount /media/startos/upper
|
||||||
|
rm -rf /media/startos/upper /media/startos/next
|
||||||
@@ -1 +0,0 @@
|
|||||||
start-cli net dhcp update $interface
|
|
||||||
@@ -1,98 +0,0 @@
|
|||||||
# Local filesystem mounting -*- shell-script -*-
|
|
||||||
|
|
||||||
#
|
|
||||||
# This script overrides local_mount_root() in /scripts/local
|
|
||||||
# and mounts root as a read-only filesystem with a temporary (rw)
|
|
||||||
# overlay filesystem.
|
|
||||||
#
|
|
||||||
|
|
||||||
. /scripts/local
|
|
||||||
|
|
||||||
local_mount_root()
|
|
||||||
{
|
|
||||||
echo 'using embassy initramfs module'
|
|
||||||
|
|
||||||
local_top
|
|
||||||
local_device_setup "${ROOT}" "root file system"
|
|
||||||
ROOT="${DEV}"
|
|
||||||
|
|
||||||
# Get the root filesystem type if not set
|
|
||||||
if [ -z "${ROOTFSTYPE}" ]; then
|
|
||||||
FSTYPE=$(get_fstype "${ROOT}")
|
|
||||||
else
|
|
||||||
FSTYPE=${ROOTFSTYPE}
|
|
||||||
fi
|
|
||||||
|
|
||||||
local_premount
|
|
||||||
|
|
||||||
# CHANGES TO THE ORIGINAL FUNCTION BEGIN HERE
|
|
||||||
# N.B. this code still lacks error checking
|
|
||||||
|
|
||||||
modprobe ${FSTYPE}
|
|
||||||
checkfs ${ROOT} root "${FSTYPE}"
|
|
||||||
|
|
||||||
ROOTFLAGS="$(echo "${ROOTFLAGS}" | sed 's/subvol=\(next\|current\)//' | sed 's/^-o *$//')"
|
|
||||||
|
|
||||||
if [ "${FSTYPE}" != "unknown" ]; then
|
|
||||||
mount -t ${FSTYPE} ${ROOTFLAGS} ${ROOT} ${rootmnt}
|
|
||||||
else
|
|
||||||
mount ${ROOTFLAGS} ${ROOT} ${rootmnt}
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo 'mounting embassyfs'
|
|
||||||
|
|
||||||
mkdir /embassyfs
|
|
||||||
|
|
||||||
mount --move ${rootmnt} /embassyfs
|
|
||||||
|
|
||||||
if ! [ -d /embassyfs/current ] && [ -d /embassyfs/prev ]; then
|
|
||||||
mv /embassyfs/prev /embassyfs/current
|
|
||||||
fi
|
|
||||||
|
|
||||||
if ! [ -d /embassyfs/current ]; then
|
|
||||||
mkdir /embassyfs/current
|
|
||||||
for FILE in $(ls /embassyfs); do
|
|
||||||
if [ "$FILE" != current ]; then
|
|
||||||
mv /embassyfs/$FILE /embassyfs/current/
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
mkdir -p /embassyfs/config
|
|
||||||
|
|
||||||
if [ -f /embassyfs/config/upgrade ] && [ -d /embassyfs/next ]; then
|
|
||||||
mv /embassyfs/current /embassyfs/prev
|
|
||||||
mv /embassyfs/next /embassyfs/current
|
|
||||||
rm /embassyfs/config/upgrade
|
|
||||||
fi
|
|
||||||
|
|
||||||
if ! [ -d /embassyfs/next ]; then
|
|
||||||
if [ -d /embassyfs/prev ]; then
|
|
||||||
mv /embassyfs/prev /embassyfs/next
|
|
||||||
else
|
|
||||||
mkdir /embassyfs/next
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
mkdir /lower /upper
|
|
||||||
|
|
||||||
mount -r --bind /embassyfs/current /lower
|
|
||||||
|
|
||||||
modprobe overlay || insmod "/lower/lib/modules/$(uname -r)/kernel/fs/overlayfs/overlay.ko"
|
|
||||||
|
|
||||||
# Mount a tmpfs for the overlay in /upper
|
|
||||||
mount -t tmpfs tmpfs /upper
|
|
||||||
mkdir /upper/data /upper/work
|
|
||||||
|
|
||||||
# Mount the final overlay-root in $rootmnt
|
|
||||||
mount -t overlay \
|
|
||||||
-olowerdir=/lower,upperdir=/upper/data,workdir=/upper/work \
|
|
||||||
overlay ${rootmnt}
|
|
||||||
|
|
||||||
mkdir -p ${rootmnt}/media/embassy/config
|
|
||||||
mount --bind /embassyfs/config ${rootmnt}/media/embassy/config
|
|
||||||
mkdir -p ${rootmnt}/media/embassy/next
|
|
||||||
mount --bind /embassyfs/next ${rootmnt}/media/embassy/next
|
|
||||||
mkdir -p ${rootmnt}/media/embassy/embassyfs
|
|
||||||
mount -r --bind /embassyfs ${rootmnt}/media/embassy/embassyfs
|
|
||||||
}
|
|
||||||
@@ -4,7 +4,7 @@ set -e
|
|||||||
|
|
||||||
# install dependencies
|
# install dependencies
|
||||||
/usr/bin/apt update
|
/usr/bin/apt update
|
||||||
/usr/bin/apt install --no-install-recommends -y xserver-xorg x11-xserver-utils xinit firefox-esr matchbox-window-manager libnss3-tools
|
/usr/bin/apt install --no-install-recommends -y xserver-xorg x11-xserver-utils xinit firefox-esr matchbox-window-manager libnss3-tools p11-kit-modules
|
||||||
|
|
||||||
#Change a default preference set by stock debian firefox-esr
|
#Change a default preference set by stock debian firefox-esr
|
||||||
sed -i 's|^pref("extensions.update.enabled", true);$|pref("extensions.update.enabled", false);|' /etc/firefox-esr/firefox-esr.js
|
sed -i 's|^pref("extensions.update.enabled", true);$|pref("extensions.update.enabled", false);|' /etc/firefox-esr/firefox-esr.js
|
||||||
@@ -14,14 +14,8 @@ if ! id kiosk; then
|
|||||||
useradd -s /bin/bash --create-home kiosk
|
useradd -s /bin/bash --create-home kiosk
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# create kiosk script
|
mkdir /home/kiosk/fx-profile
|
||||||
cat > /home/kiosk/kiosk.sh << 'EOF'
|
cat >> /home/kiosk/fx-profile/prefs.js << EOF
|
||||||
#!/bin/sh
|
|
||||||
PROFILE=$(mktemp -d)
|
|
||||||
if [ -f /usr/local/share/ca-certificates/startos-root-ca.crt ]; then
|
|
||||||
certutil -A -n "StartOS Local Root CA" -t "TCu,Cuw,Tuw" -i /usr/local/share/ca-certificates/startos-root-ca.crt -d $PROFILE
|
|
||||||
fi
|
|
||||||
cat >> $PROFILE/prefs.js << EOT
|
|
||||||
user_pref("app.normandy.api_url", "");
|
user_pref("app.normandy.api_url", "");
|
||||||
user_pref("app.normandy.enabled", false);
|
user_pref("app.normandy.enabled", false);
|
||||||
user_pref("app.shield.optoutstudies.enabled", false);
|
user_pref("app.shield.optoutstudies.enabled", false);
|
||||||
@@ -33,7 +27,6 @@ user_pref("browser.crashReports.unsubmittedCheck.autoSubmit2", false);
|
|||||||
user_pref("browser.newtabpage.activity-stream.feeds.asrouterfeed", false);
|
user_pref("browser.newtabpage.activity-stream.feeds.asrouterfeed", false);
|
||||||
user_pref("browser.newtabpage.activity-stream.feeds.topsites", false);
|
user_pref("browser.newtabpage.activity-stream.feeds.topsites", false);
|
||||||
user_pref("browser.newtabpage.activity-stream.showSponsoredTopSites", false);
|
user_pref("browser.newtabpage.activity-stream.showSponsoredTopSites", false);
|
||||||
user_pref("browser.onboarding.enabled", false);
|
|
||||||
user_pref("browser.ping-centre.telemetry", false);
|
user_pref("browser.ping-centre.telemetry", false);
|
||||||
user_pref("browser.pocket.enabled", false);
|
user_pref("browser.pocket.enabled", false);
|
||||||
user_pref("browser.safebrowsing.blockedURIs.enabled", false);
|
user_pref("browser.safebrowsing.blockedURIs.enabled", false);
|
||||||
@@ -49,7 +42,7 @@ user_pref("browser.startup.homepage_override.mstone", "ignore");
|
|||||||
user_pref("browser.theme.content-theme", 0);
|
user_pref("browser.theme.content-theme", 0);
|
||||||
user_pref("browser.theme.toolbar-theme", 0);
|
user_pref("browser.theme.toolbar-theme", 0);
|
||||||
user_pref("browser.urlbar.groupLabels.enabled", false);
|
user_pref("browser.urlbar.groupLabels.enabled", false);
|
||||||
user_pref("browser.urlbar.suggest.searches" false);
|
user_pref("browser.urlbar.suggest.searches", false);
|
||||||
user_pref("datareporting.policy.firstRunURL", "");
|
user_pref("datareporting.policy.firstRunURL", "");
|
||||||
user_pref("datareporting.healthreport.service.enabled", false);
|
user_pref("datareporting.healthreport.service.enabled", false);
|
||||||
user_pref("datareporting.healthreport.uploadEnabled", false);
|
user_pref("datareporting.healthreport.uploadEnabled", false);
|
||||||
@@ -58,10 +51,9 @@ user_pref("dom.securecontext.allowlist_onions", true);
|
|||||||
user_pref("dom.securecontext.whitelist_onions", true);
|
user_pref("dom.securecontext.whitelist_onions", true);
|
||||||
user_pref("experiments.enabled", false);
|
user_pref("experiments.enabled", false);
|
||||||
user_pref("experiments.activeExperiment", false);
|
user_pref("experiments.activeExperiment", false);
|
||||||
user_pref("experiments.supported", false);
|
|
||||||
user_pref("extensions.activeThemeID", "firefox-compact-dark@mozilla.org");
|
user_pref("extensions.activeThemeID", "firefox-compact-dark@mozilla.org");
|
||||||
user_pref("extensions.blocklist.enabled", false);
|
user_pref("extensions.blocklist.enabled", false);
|
||||||
user_pref("extensions.getAddons.cache.enabled", false);
|
user_pref("extensions.htmlaboutaddons.recommendations.enabled", false);
|
||||||
user_pref("extensions.pocket.enabled", false);
|
user_pref("extensions.pocket.enabled", false);
|
||||||
user_pref("extensions.update.enabled", false);
|
user_pref("extensions.update.enabled", false);
|
||||||
user_pref("extensions.shield-recipe-client.enabled", false);
|
user_pref("extensions.shield-recipe-client.enabled", false);
|
||||||
@@ -72,9 +64,15 @@ user_pref("messaging-system.rsexperimentloader.enabled", false);
|
|||||||
user_pref("network.allow-experiments", false);
|
user_pref("network.allow-experiments", false);
|
||||||
user_pref("network.captive-portal-service.enabled", false);
|
user_pref("network.captive-portal-service.enabled", false);
|
||||||
user_pref("network.connectivity-service.enabled", false);
|
user_pref("network.connectivity-service.enabled", false);
|
||||||
user_pref("network.proxy.autoconfig_url", "file:///usr/lib/startos/proxy.pac");
|
user_pref("network.proxy.socks", "10.0.3.1");
|
||||||
|
user_pref("network.proxy.socks_port", 9050);
|
||||||
|
user_pref("network.proxy.socks_version", 5);
|
||||||
user_pref("network.proxy.socks_remote_dns", true);
|
user_pref("network.proxy.socks_remote_dns", true);
|
||||||
user_pref("network.proxy.type", 2);
|
user_pref("network.proxy.type", 1);
|
||||||
|
user_pref("privacy.resistFingerprinting", true);
|
||||||
|
//Enable letterboxing if we want the window size sent to the server to snap to common resolutions:
|
||||||
|
//user_pref("privacy.resistFingerprinting.letterboxing", true);
|
||||||
|
user_pref("privacy.trackingprotection.enabled", true);
|
||||||
user_pref("signon.rememberSignons", false);
|
user_pref("signon.rememberSignons", false);
|
||||||
user_pref("toolkit.telemetry.archive.enabled", false);
|
user_pref("toolkit.telemetry.archive.enabled", false);
|
||||||
user_pref("toolkit.telemetry.bhrPing.enabled", false);
|
user_pref("toolkit.telemetry.bhrPing.enabled", false);
|
||||||
@@ -87,22 +85,31 @@ user_pref("toolkit.telemetry.shutdownPingSender.enabled", false);
|
|||||||
user_pref("toolkit.telemetry.unified", false);
|
user_pref("toolkit.telemetry.unified", false);
|
||||||
user_pref("toolkit.telemetry.updatePing.enabled", false);
|
user_pref("toolkit.telemetry.updatePing.enabled", false);
|
||||||
user_pref("toolkit.telemetry.cachedClientID", "");
|
user_pref("toolkit.telemetry.cachedClientID", "");
|
||||||
EOT
|
//Blocking automatic Mozilla CDN server requests
|
||||||
|
user_pref("extensions.getAddons.showPane", false);
|
||||||
|
user_pref("extensions.getAddons.cache.enabled", false);
|
||||||
|
//user_pref("services.settings.server", ""); // Remote settings server (HSTS preload updates and Cerfiticate Revocation Lists are fetched)
|
||||||
|
user_pref("browser.aboutHomeSnippets.updateUrl", "");
|
||||||
|
user_pref("browser.newtabpage.activity-stream.feeds.snippets", false);
|
||||||
|
user_pref("browser.newtabpage.activity-stream.feeds.section.topstories", false);
|
||||||
|
user_pref("browser.newtabpage.activity-stream.feeds.system.topstories", false);
|
||||||
|
user_pref("browser.newtabpage.activity-stream.feeds.discoverystreamfeed", false);
|
||||||
|
user_pref("browser.safebrowsing.provider.mozilla.updateURL", "");
|
||||||
|
user_pref("browser.safebrowsing.provider.mozilla.gethashURL", "");
|
||||||
|
EOF
|
||||||
|
|
||||||
|
ln -sf /usr/lib/$(uname -m)-linux-gnu/pkcs11/p11-kit-trust.so /usr/lib/firefox-esr/libnssckbi.so
|
||||||
|
|
||||||
|
# create kiosk script
|
||||||
|
cat > /home/kiosk/kiosk.sh << 'EOF'
|
||||||
|
#!/bin/sh
|
||||||
while ! curl "http://localhost" > /dev/null; do
|
while ! curl "http://localhost" > /dev/null; do
|
||||||
sleep 1
|
sleep 1
|
||||||
done
|
done
|
||||||
while ! /usr/lib/startos/scripts/check-monitor; do
|
|
||||||
sleep 15
|
|
||||||
done
|
|
||||||
(
|
|
||||||
while /usr/lib/startos/scripts/check-monitor; do
|
|
||||||
sleep 15
|
|
||||||
done
|
|
||||||
killall firefox-esr
|
|
||||||
) &
|
|
||||||
matchbox-window-manager -use_titlebar no &
|
matchbox-window-manager -use_titlebar no &
|
||||||
firefox-esr http://localhost --profile $PROFILE
|
cp -r /home/kiosk/fx-profile /home/kiosk/fx-profile-tmp
|
||||||
rm -rf $PROFILE
|
firefox-esr http://localhost --profile /home/kiosk/fx-profile-tmp
|
||||||
|
rm -rf /home/kiosk/fx-profile-tmp
|
||||||
EOF
|
EOF
|
||||||
chmod +x /home/kiosk/kiosk.sh
|
chmod +x /home/kiosk/kiosk.sh
|
||||||
|
|
||||||
@@ -116,6 +123,8 @@ fi
|
|||||||
EOF
|
EOF
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
chown -R kiosk:kiosk /home/kiosk
|
||||||
|
|
||||||
# enable autologin
|
# enable autologin
|
||||||
mkdir -p /etc/systemd/system/getty@tty1.service.d
|
mkdir -p /etc/systemd/system/getty@tty1.service.d
|
||||||
cat > /etc/systemd/system/getty@tty1.service.d/autologin.conf << 'EOF'
|
cat > /etc/systemd/system/getty@tty1.service.d/autologin.conf << 'EOF'
|
||||||
|
|||||||
55
build/lib/scripts/forward-port
Executable file
@@ -0,0 +1,55 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
if [ -z "$sip" ] || [ -z "$dip" ] || [ -z "$dprefix" ] || [ -z "$sport" ] || [ -z "$dport" ]; then
|
||||||
|
>&2 echo 'missing required env var'
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
NAME="F$(echo "$sip:$sport -> $dip/$dprefix:$dport" | sha256sum | head -c 15)"
|
||||||
|
|
||||||
|
for kind in INPUT FORWARD ACCEPT; do
|
||||||
|
if ! iptables -C $kind -j "${NAME}_${kind}" 2> /dev/null; then
|
||||||
|
iptables -N "${NAME}_${kind}" 2> /dev/null
|
||||||
|
iptables -A $kind -j "${NAME}_${kind}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
for kind in PREROUTING INPUT OUTPUT POSTROUTING; do
|
||||||
|
if ! iptables -t nat -C $kind -j "${NAME}_${kind}" 2> /dev/null; then
|
||||||
|
iptables -t nat -N "${NAME}_${kind}" 2> /dev/null
|
||||||
|
iptables -t nat -A $kind -j "${NAME}_${kind}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
err=0
|
||||||
|
trap 'err=1' ERR
|
||||||
|
|
||||||
|
for kind in INPUT FORWARD ACCEPT; do
|
||||||
|
iptables -F "${NAME}_${kind}" 2> /dev/null
|
||||||
|
done
|
||||||
|
for kind in PREROUTING INPUT OUTPUT POSTROUTING; do
|
||||||
|
iptables -t nat -F "${NAME}_${kind}" 2> /dev/null
|
||||||
|
done
|
||||||
|
if [ "$UNDO" = 1 ]; then
|
||||||
|
conntrack -D -p tcp -d $sip --dport $sport || true # conntrack returns exit 1 if no connections are active
|
||||||
|
conntrack -D -p udp -d $sip --dport $sport || true # conntrack returns exit 1 if no connections are active
|
||||||
|
exit $err
|
||||||
|
fi
|
||||||
|
|
||||||
|
# DNAT: rewrite destination for incoming packets (external traffic)
|
||||||
|
iptables -t nat -A ${NAME}_PREROUTING -d "$sip" -p tcp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
||||||
|
iptables -t nat -A ${NAME}_PREROUTING -d "$sip" -p udp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
||||||
|
|
||||||
|
# DNAT: rewrite destination for locally-originated packets (hairpin from host itself)
|
||||||
|
iptables -t nat -A ${NAME}_OUTPUT -d "$sip" -p tcp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
||||||
|
iptables -t nat -A ${NAME}_OUTPUT -d "$sip" -p udp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
||||||
|
|
||||||
|
# MASQUERADE: rewrite source for all forwarded traffic to the destination
|
||||||
|
# This ensures responses are routed back through the host regardless of source IP
|
||||||
|
iptables -t nat -A ${NAME}_POSTROUTING -d "$dip" -p tcp --dport "$dport" -j MASQUERADE
|
||||||
|
iptables -t nat -A ${NAME}_POSTROUTING -d "$dip" -p udp --dport "$dport" -j MASQUERADE
|
||||||
|
|
||||||
|
# Allow new connections to be forwarded to the destination
|
||||||
|
iptables -A ${NAME}_FORWARD -d $dip -p tcp --dport $dport -m state --state NEW -j ACCEPT
|
||||||
|
iptables -A ${NAME}_FORWARD -d $dip -p udp --dport $dport -m state --state NEW -j ACCEPT
|
||||||
|
|
||||||
|
exit $err
|
||||||
105
build/lib/scripts/gather-debug-info
Executable file
@@ -0,0 +1,105 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Define the output file
|
||||||
|
OUTPUT_FILE="system_debug_info.txt"
|
||||||
|
|
||||||
|
# Check if the script is run as root, if not, restart with sudo
|
||||||
|
if [ "$(id -u)" -ne 0 ]; then
|
||||||
|
exec sudo bash "$0" "$@"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Create or clear the output file and add a header
|
||||||
|
echo "===================================================================" > "$OUTPUT_FILE"
|
||||||
|
echo " StartOS System Debug Information " >> "$OUTPUT_FILE"
|
||||||
|
echo "===================================================================" >> "$OUTPUT_FILE"
|
||||||
|
echo "Generated on: $(date)" >> "$OUTPUT_FILE"
|
||||||
|
echo "" >> "$OUTPUT_FILE"
|
||||||
|
|
||||||
|
# Function to check if a command exists
|
||||||
|
command_exists() {
|
||||||
|
command -v "$1" >/dev/null 2>&1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to run a command if it exists and append its output to the file with headers
|
||||||
|
run_command() {
|
||||||
|
local CMD="$1"
|
||||||
|
local DESC="$2"
|
||||||
|
local CMD_NAME="${CMD%% *}" # Extract the command name (first word)
|
||||||
|
|
||||||
|
if command_exists "$CMD_NAME"; then
|
||||||
|
echo "===================================================================" >> "$OUTPUT_FILE"
|
||||||
|
echo "COMMAND: $CMD" >> "$OUTPUT_FILE"
|
||||||
|
echo "DESCRIPTION: $DESC" >> "$OUTPUT_FILE"
|
||||||
|
echo "===================================================================" >> "$OUTPUT_FILE"
|
||||||
|
echo "" >> "$OUTPUT_FILE"
|
||||||
|
eval "$CMD" >> "$OUTPUT_FILE" 2>&1
|
||||||
|
echo "" >> "$OUTPUT_FILE"
|
||||||
|
else
|
||||||
|
echo "===================================================================" >> "$OUTPUT_FILE"
|
||||||
|
echo "COMMAND: $CMD" >> "$OUTPUT_FILE"
|
||||||
|
echo "DESCRIPTION: $DESC" >> "$OUTPUT_FILE"
|
||||||
|
echo "===================================================================" >> "$OUTPUT_FILE"
|
||||||
|
echo "SKIPPED: Command not found" >> "$OUTPUT_FILE"
|
||||||
|
echo "" >> "$OUTPUT_FILE"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Collecting basic system information
|
||||||
|
run_command "start-cli --version; start-cli git-info" "StartOS CLI version and Git information"
|
||||||
|
run_command "hostname" "Hostname of the system"
|
||||||
|
run_command "uname -a" "Kernel version and system architecture"
|
||||||
|
|
||||||
|
# Services Info
|
||||||
|
run_command "start-cli lxc stats" "All Running Services"
|
||||||
|
|
||||||
|
# Collecting CPU information
|
||||||
|
run_command "lscpu" "CPU architecture information"
|
||||||
|
run_command "cat /proc/cpuinfo" "Detailed CPU information"
|
||||||
|
|
||||||
|
# Collecting memory information
|
||||||
|
run_command "free -h" "Available and used memory"
|
||||||
|
run_command "cat /proc/meminfo" "Detailed memory information"
|
||||||
|
|
||||||
|
# Collecting storage information
|
||||||
|
run_command "lsblk" "List of block devices"
|
||||||
|
run_command "df -h" "Disk space usage"
|
||||||
|
run_command "fdisk -l" "Detailed disk partition information"
|
||||||
|
|
||||||
|
# Collecting network information
|
||||||
|
run_command "ip a" "Network interfaces and IP addresses"
|
||||||
|
run_command "ip route" "Routing table"
|
||||||
|
run_command "netstat -i" "Network interface statistics"
|
||||||
|
|
||||||
|
# Collecting RAID information (if applicable)
|
||||||
|
run_command "cat /proc/mdstat" "List of RAID devices (if applicable)"
|
||||||
|
|
||||||
|
# Collecting virtualization information
|
||||||
|
run_command "egrep -c '(vmx|svm)' /proc/cpuinfo" "Check if CPU supports virtualization"
|
||||||
|
run_command "systemd-detect-virt" "Check if the system is running inside a virtual machine"
|
||||||
|
|
||||||
|
# Final message
|
||||||
|
echo "===================================================================" >> "$OUTPUT_FILE"
|
||||||
|
echo " End of StartOS System Debug Information " >> "$OUTPUT_FILE"
|
||||||
|
echo "===================================================================" >> "$OUTPUT_FILE"
|
||||||
|
|
||||||
|
# Prompt user to send the log file to a Start9 Technician
|
||||||
|
echo "System debug information has been collected in $OUTPUT_FILE."
|
||||||
|
echo ""
|
||||||
|
echo "Would you like to send this log file to a Start9 Technician? (yes/no)"
|
||||||
|
read SEND_LOG
|
||||||
|
|
||||||
|
if [[ "$SEND_LOG" == "yes" || "$SEND_LOG" == "y" ]]; then
|
||||||
|
if command -v wormhole >/dev/null 2>&1; then
|
||||||
|
echo ""
|
||||||
|
echo "==================================================================="
|
||||||
|
echo " Running wormhole to send the file. Please follow the "
|
||||||
|
echo " instructions and provide the code to the Start9 support team. "
|
||||||
|
echo "==================================================================="
|
||||||
|
wormhole send "$OUTPUT_FILE"
|
||||||
|
echo "==================================================================="
|
||||||
|
else
|
||||||
|
echo "Error: wormhole command not found."
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "Log file not sent. You can manually share $OUTPUT_FILE with the Start9 support team if needed."
|
||||||
|
fi
|
||||||
@@ -3,8 +3,8 @@
|
|||||||
ARGS=
|
ARGS=
|
||||||
|
|
||||||
for ARG in $@; do
|
for ARG in $@; do
|
||||||
if [ -d "/media/embassy/embassyfs" ] && [ "$ARG" = "/" ]; then
|
if [ -d "/media/startos/root" ] && [ "$ARG" = "/" ]; then
|
||||||
ARG=/media/embassy/embassyfs
|
ARG=/media/startos/root
|
||||||
fi
|
fi
|
||||||
ARGS="$ARGS $ARG"
|
ARGS="$ARGS $ARG"
|
||||||
done
|
done
|
||||||
|
|||||||
20
build/lib/scripts/install-equivs
Executable file
@@ -0,0 +1,20 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
export DEBIAN_FRONTEND=noninteractive
|
||||||
|
export DEBCONF_NONINTERACTIVE_SEEN=true
|
||||||
|
|
||||||
|
TMP_DIR=$(mktemp -d)
|
||||||
|
|
||||||
|
(
|
||||||
|
set -e
|
||||||
|
cd $TMP_DIR
|
||||||
|
|
||||||
|
cat > control.equivs
|
||||||
|
equivs-build control.equivs
|
||||||
|
apt-get install -y ./*.deb < /dev/null
|
||||||
|
)
|
||||||
|
|
||||||
|
rm -rf $TMP_DIR
|
||||||
|
|
||||||
|
echo Install complete. >&2
|
||||||
|
exit 0
|
||||||
35
build/lib/scripts/prune-boot
Executable file
@@ -0,0 +1,35 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [ "$UID" -ne 0 ]; then
|
||||||
|
>&2 echo 'Must be run as root'
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get the current kernel version
|
||||||
|
current_kernel=$(uname -r)
|
||||||
|
|
||||||
|
echo "Current kernel: $current_kernel"
|
||||||
|
echo "Searching for old kernel files in /boot..."
|
||||||
|
|
||||||
|
# Extract base kernel version (without possible suffixes)
|
||||||
|
current_base=$(echo "$current_kernel" | sed 's/-.*//')
|
||||||
|
|
||||||
|
cd /boot || { echo "/boot directory not found!"; exit 1; }
|
||||||
|
|
||||||
|
for file in vmlinuz-* initrd.img-* System.map-* config-*; do
|
||||||
|
# Extract version from filename
|
||||||
|
version=$(echo "$file" | sed -E 's/^[^0-9]*([0-9][^ ]*).*/\1/')
|
||||||
|
# Skip if file matches current kernel version
|
||||||
|
if [[ "$file" == *"$current_kernel"* ]]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
# Compare versions, delete if less than current
|
||||||
|
if dpkg --compare-versions "$version" lt "$current_kernel"; then
|
||||||
|
echo "Deleting $file (version $version is older than $current_kernel)"
|
||||||
|
sudo rm -f "$file"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "Old kernel files deleted."
|
||||||
53
build/lib/scripts/prune-images
Executable file
@@ -0,0 +1,53 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
if [ "$UID" -ne 0 ]; then
|
||||||
|
>&2 echo 'Must be run as root'
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
POSITIONAL_ARGS=()
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
-*|--*)
|
||||||
|
echo "Unknown option $1"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
POSITIONAL_ARGS+=("$1") # save positional arg
|
||||||
|
shift # past argument
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
set -- "${POSITIONAL_ARGS[@]}" # restore positional parameters
|
||||||
|
|
||||||
|
needed=$1
|
||||||
|
|
||||||
|
if [ -z "$needed" ]; then
|
||||||
|
>&2 echo "usage: $0 <SPACE NEEDED>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
MARGIN=${MARGIN:-1073741824}
|
||||||
|
target=$((needed + MARGIN))
|
||||||
|
|
||||||
|
if [ -h /media/startos/config/current.rootfs ] && [ -e /media/startos/config/current.rootfs ]; then
|
||||||
|
echo 'Pruning...'
|
||||||
|
current="$(readlink -f /media/startos/config/current.rootfs)"
|
||||||
|
while [[ "$(df -B1 --output=avail --sync /media/startos/images | tail -n1)" -lt "$target" ]]; do
|
||||||
|
to_prune="$(ls -t1 /media/startos/images/*.rootfs /media/startos/images/*.squashfs 2> /dev/null | grep -v "$current" | tail -n1)"
|
||||||
|
if [ -e "$to_prune" ]; then
|
||||||
|
echo " Pruning $to_prune"
|
||||||
|
rm -rf "$to_prune"
|
||||||
|
sync
|
||||||
|
else
|
||||||
|
>&2 echo "Not enough space and nothing to prune!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
echo 'done.'
|
||||||
|
else
|
||||||
|
>&2 echo 'No current.rootfs, not safe to prune'
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
115
build/lib/scripts/startos-initramfs-module
Executable file
@@ -0,0 +1,115 @@
|
|||||||
|
# Local filesystem mounting -*- shell-script -*-
|
||||||
|
|
||||||
|
#
|
||||||
|
# This script overrides local_mount_root() in /scripts/local
|
||||||
|
# and mounts root as a read-only filesystem with a temporary (rw)
|
||||||
|
# overlay filesystem.
|
||||||
|
#
|
||||||
|
|
||||||
|
. /scripts/local
|
||||||
|
|
||||||
|
local_mount_root()
|
||||||
|
{
|
||||||
|
echo 'using startos initramfs module'
|
||||||
|
|
||||||
|
local_top
|
||||||
|
local_device_setup "${ROOT}" "root file system"
|
||||||
|
ROOT="${DEV}"
|
||||||
|
|
||||||
|
# Get the root filesystem type if not set
|
||||||
|
if [ -z "${ROOTFSTYPE}" ]; then
|
||||||
|
FSTYPE=$(get_fstype "${ROOT}")
|
||||||
|
else
|
||||||
|
FSTYPE=${ROOTFSTYPE}
|
||||||
|
fi
|
||||||
|
|
||||||
|
local_premount
|
||||||
|
|
||||||
|
# CHANGES TO THE ORIGINAL FUNCTION BEGIN HERE
|
||||||
|
# N.B. this code still lacks error checking
|
||||||
|
|
||||||
|
modprobe ${FSTYPE}
|
||||||
|
checkfs ${ROOT} root "${FSTYPE}"
|
||||||
|
|
||||||
|
echo 'mounting startos'
|
||||||
|
mkdir /startos
|
||||||
|
|
||||||
|
ROOTFLAGS="$(echo "${ROOTFLAGS}" | sed 's/subvol=\(next\|current\)//' | sed 's/^-o *$//')"
|
||||||
|
|
||||||
|
if [ "${FSTYPE}" != "unknown" ]; then
|
||||||
|
mount -t ${FSTYPE} ${ROOTFLAGS} ${ROOT} /startos
|
||||||
|
else
|
||||||
|
mount ${ROOTFLAGS} ${ROOT} /startos
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -d /startos/images ]; then
|
||||||
|
if [ -h /startos/config/current.rootfs ] && [ -e /startos/config/current.rootfs ]; then
|
||||||
|
image=$(readlink -f /startos/config/current.rootfs)
|
||||||
|
else
|
||||||
|
image="$(ls -t1 /startos/images/*.rootfs | head -n1)"
|
||||||
|
fi
|
||||||
|
if ! [ -f "$image" ]; then
|
||||||
|
>&2 echo "image $image not available to boot"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
if [ -f /startos/config/upgrade ] && [ -d /startos/next ]; then
|
||||||
|
oldroot=/startos/next
|
||||||
|
elif [ -d /startos/current ]; then
|
||||||
|
oldroot=/startos/current
|
||||||
|
elif [ -d /startos/prev ]; then
|
||||||
|
oldroot=/startos/prev
|
||||||
|
else
|
||||||
|
>&2 echo no StartOS filesystem found
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir -p /startos/config/overlay/etc
|
||||||
|
mv $oldroot/etc/fstab /startos/config/overlay/etc/fstab
|
||||||
|
mv $oldroot/etc/machine-id /startos/config/overlay/etc/machine-id
|
||||||
|
mv $oldroot/etc/ssh /startos/config/overlay/etc/ssh
|
||||||
|
|
||||||
|
mkdir -p /startos/images
|
||||||
|
mv $oldroot /startos/images/legacy.rootfs
|
||||||
|
|
||||||
|
rm -rf /startos/next /startos/current /startos/prev
|
||||||
|
|
||||||
|
ln -rsf /startos/images/old.squashfs /startos/config/current.rootfs
|
||||||
|
image=$(readlink -f /startos/config/current.rootfs)
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir /lower /upper
|
||||||
|
|
||||||
|
if [ -d "$image" ]; then
|
||||||
|
mount -r --bind $image /lower
|
||||||
|
elif [ -f "$image" ]; then
|
||||||
|
modprobe loop
|
||||||
|
modprobe squashfs
|
||||||
|
mount -r $image /lower
|
||||||
|
else
|
||||||
|
>&2 echo "not a regular file or directory: $image"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
modprobe overlay || insmod "/lower/lib/modules/$(uname -r)/kernel/fs/overlayfs/overlay.ko"
|
||||||
|
|
||||||
|
# Mount a tmpfs for the overlay in /upper
|
||||||
|
mount -t tmpfs tmpfs /upper
|
||||||
|
mkdir /upper/data /upper/work
|
||||||
|
|
||||||
|
mkdir -p /startos/config/overlay
|
||||||
|
|
||||||
|
# Mount the final overlay-root in $rootmnt
|
||||||
|
mount -t overlay \
|
||||||
|
-olowerdir=/startos/config/overlay:/lower,upperdir=/upper/data,workdir=/upper/work \
|
||||||
|
overlay ${rootmnt}
|
||||||
|
|
||||||
|
mkdir -p ${rootmnt}/media/startos/config
|
||||||
|
mount --bind /startos/config ${rootmnt}/media/startos/config
|
||||||
|
mkdir -p ${rootmnt}/media/startos/images
|
||||||
|
mount --bind /startos/images ${rootmnt}/media/startos/images
|
||||||
|
mkdir -p ${rootmnt}/media/startos/root
|
||||||
|
mount -r --bind /startos ${rootmnt}/media/startos/root
|
||||||
|
mkdir -p ${rootmnt}/media/startos/current
|
||||||
|
mount -r --bind /lower ${rootmnt}/media/startos/current
|
||||||
|
}
|
||||||
64
build/lib/scripts/tor-check
Executable file
@@ -0,0 +1,64 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# --- Config ---
|
||||||
|
# Colors (using printf to ensure compatibility)
|
||||||
|
GRAY=$(printf '\033[90m')
|
||||||
|
GREEN=$(printf '\033[32m')
|
||||||
|
RED=$(printf '\033[31m')
|
||||||
|
NC=$(printf '\033[0m') # No Color
|
||||||
|
|
||||||
|
# Proxies to test
|
||||||
|
proxies=(
|
||||||
|
"Host Tor|127.0.1.1:9050"
|
||||||
|
"Startd Tor|10.0.3.1:9050"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Default URLs
|
||||||
|
onion_list=(
|
||||||
|
"The Tor Project|http://2gzyxa5ihm7nsggfxnu52rck2vv4rvmdlkiu3zzui5du4xyclen53wid.onion"
|
||||||
|
"Start9|http://privacy34kn4ez3y3nijweec6w4g54i3g54sdv7r5mr6soma3w4begyd.onion"
|
||||||
|
"Mempool|http://mempoolhqx4isw62xs7abwphsq7ldayuidyx2v2oethdhhj6mlo2r6ad.onion"
|
||||||
|
"DuckDuckGo|https://duckduckgogg42xjoc72x3sjasowoarfbgcmvfimaftt6twagswzczad.onion"
|
||||||
|
"Brave Search|https://search.brave4u7jddbv7cyviptqjc7jusxh72uik7zt6adtckl5f4nwy2v72qd.onion"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Load custom list
|
||||||
|
[ -f ~/.startos/tor-check.list ] && readarray -t custom_list < <(grep -v '^#' ~/.startos/tor-check.list) && onion_list+=("${custom_list[@]}")
|
||||||
|
|
||||||
|
# --- Functions ---
|
||||||
|
print_line() { printf "${GRAY}────────────────────────────────────────${NC}\n"; }
|
||||||
|
|
||||||
|
# --- Main ---
|
||||||
|
echo "Testing Onion Connections..."
|
||||||
|
|
||||||
|
for proxy_info in "${proxies[@]}"; do
|
||||||
|
proxy_name="${proxy_info%%|*}"
|
||||||
|
proxy_addr="${proxy_info#*|}"
|
||||||
|
|
||||||
|
print_line
|
||||||
|
printf "${GRAY}Proxy: %s (%s)${NC}\n" "$proxy_name" "$proxy_addr"
|
||||||
|
|
||||||
|
for data in "${onion_list[@]}"; do
|
||||||
|
name="${data%%|*}"
|
||||||
|
url="${data#*|}"
|
||||||
|
|
||||||
|
# Capture verbose output + http code.
|
||||||
|
# --no-progress-meter: Suppresses the "0 0 0" stats but keeps -v output
|
||||||
|
output=$(curl -v --no-progress-meter --max-time 15 --socks5-hostname "$proxy_addr" "$url" 2>&1)
|
||||||
|
exit_code=$?
|
||||||
|
|
||||||
|
if [ $exit_code -eq 0 ]; then
|
||||||
|
printf " ${GREEN}[pass]${NC} %s (%s)\n" "$name" "$url"
|
||||||
|
else
|
||||||
|
printf " ${RED}[fail]${NC} %s (%s)\n" "$name" "$url"
|
||||||
|
printf " ${RED}↳ Curl Error %s${NC}\n" "$exit_code"
|
||||||
|
|
||||||
|
# Print the last 4 lines of verbose log to show the specific handshake error
|
||||||
|
# We look for lines starting with '*' or '>' or '<' to filter out junk if any remains
|
||||||
|
echo "$output" | tail -n 4 | sed "s/^/ ${GRAY}/"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
done
|
||||||
|
print_line
|
||||||
|
# Reset color just in case
|
||||||
|
printf "${NC}"
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
fail=$(printf " [\033[31m fail \033[0m]")
|
|
||||||
pass=$(printf " [\033[32m pass \033[0m]")
|
|
||||||
|
|
||||||
onion_list=(
|
|
||||||
"Start9|http://privacy34kn4ez3y3nijweec6w4g54i3g54sdv7r5mr6soma3w4begyd.onion"
|
|
||||||
"Mempool|http://mempoolhqx4isw62xs7abwphsq7ldayuidyx2v2oethdhhj6mlo2r6ad.onion"
|
|
||||||
"DuckDuckGo|https://duckduckgogg42xjoc72x3sjasowoarfbgcmvfimaftt6twagswzczad.onion"
|
|
||||||
"Brave Search|https://search.brave4u7jddbv7cyviptqjc7jusxh72uik7zt6adtckl5f4nwy2v72qd.onion"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check if ~/.startos/tor-check.list exists and read its contents if available
|
|
||||||
if [ -f ~/.startos/tor-check.list ]; then
|
|
||||||
while IFS= read -r line; do
|
|
||||||
# Check if the line starts with a #
|
|
||||||
if [[ ! "$line" =~ ^# ]]; then
|
|
||||||
onion_list+=("$line")
|
|
||||||
fi
|
|
||||||
done < ~/.startos/tor-check.list
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo "Testing connection to Onion Pages ..."
|
|
||||||
|
|
||||||
for data in "${onion_list[@]}"; do
|
|
||||||
name="${data%%|*}"
|
|
||||||
url="${data#*|}"
|
|
||||||
if curl --socks5-hostname localhost:9050 "$url" > /dev/null 2>&1; then
|
|
||||||
echo " ${pass}: $name ($url) "
|
|
||||||
else
|
|
||||||
echo " ${fail}: $name ($url) "
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
|
||||||
echo
|
|
||||||
echo "Done."
|
|
||||||
82
build/lib/scripts/upgrade
Executable file
@@ -0,0 +1,82 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
SOURCE_DIR="$(dirname $(realpath "${BASH_SOURCE[0]}"))"
|
||||||
|
|
||||||
|
if [ "$UID" -ne 0 ]; then
|
||||||
|
>&2 echo 'Must be run as root'
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! [ -f "$1" ]; then
|
||||||
|
>&2 echo "usage: $0 <SQUASHFS>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo 'Upgrading...'
|
||||||
|
|
||||||
|
hash=$(b3sum $1 | head -c 32)
|
||||||
|
if [ -n "$2" ] && [ "$hash" != "$CHECKSUM" ]; then
|
||||||
|
>&2 echo 'Checksum mismatch'
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
unsquashfs -f -d / $1 boot
|
||||||
|
|
||||||
|
umount -R /media/startos/next 2> /dev/null || true
|
||||||
|
umount /media/startos/upper 2> /dev/null || true
|
||||||
|
umount /media/startos/lower 2> /dev/null || true
|
||||||
|
|
||||||
|
mkdir -p /media/startos/upper
|
||||||
|
mount -t tmpfs tmpfs /media/startos/upper
|
||||||
|
mkdir -p /media/startos/lower /media/startos/upper/data /media/startos/upper/work /media/startos/next
|
||||||
|
mount $1 /media/startos/lower
|
||||||
|
mount -t overlay \
|
||||||
|
-olowerdir=/media/startos/lower,upperdir=/media/startos/upper/data,workdir=/media/startos/upper/work \
|
||||||
|
overlay /media/startos/next
|
||||||
|
|
||||||
|
mkdir -p /media/startos/next/run
|
||||||
|
mkdir -p /media/startos/next/dev
|
||||||
|
mkdir -p /media/startos/next/sys
|
||||||
|
mkdir -p /media/startos/next/proc
|
||||||
|
mkdir -p /media/startos/next/boot
|
||||||
|
mkdir -p /media/startos/next/media/startos/root
|
||||||
|
mount --bind /run /media/startos/next/run
|
||||||
|
mount --bind /tmp /media/startos/next/tmp
|
||||||
|
mount --bind /dev /media/startos/next/dev
|
||||||
|
mount --bind /sys /media/startos/next/sys
|
||||||
|
mount --bind /proc /media/startos/next/proc
|
||||||
|
mount --bind /boot /media/startos/next/boot
|
||||||
|
mount --bind /media/startos/root /media/startos/next/media/startos/root
|
||||||
|
|
||||||
|
if mountpoint /boot/efi 2>&1 > /dev/null; then
|
||||||
|
mkdir -p /media/startos/next/boot/efi
|
||||||
|
mount --bind /boot/efi /media/startos/next/boot/efi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if mountpoint /sys/firmware/efi/efivars 2>&1 > /dev/null; then
|
||||||
|
mount --bind /sys/firmware/efi/efivars /media/startos/next/sys/firmware/efi/efivars
|
||||||
|
fi
|
||||||
|
|
||||||
|
chroot /media/startos/next bash -e << "EOF"
|
||||||
|
|
||||||
|
if [ -f /boot/grub/grub.cfg ]; then
|
||||||
|
grub-install /dev/$(eval $(lsblk -o MOUNTPOINT,PKNAME -P | grep 'MOUNTPOINT="/media/startos/root"') && echo $PKNAME)
|
||||||
|
update-grub
|
||||||
|
fi
|
||||||
|
|
||||||
|
EOF
|
||||||
|
|
||||||
|
sync
|
||||||
|
|
||||||
|
umount -Rl /media/startos/next
|
||||||
|
umount /media/startos/upper
|
||||||
|
umount /media/startos/lower
|
||||||
|
|
||||||
|
mv $1 /media/startos/images/${hash}.rootfs
|
||||||
|
ln -rsf /media/startos/images/${hash}.rootfs /media/startos/config/current.rootfs
|
||||||
|
|
||||||
|
sync
|
||||||
|
|
||||||
|
echo 'System upgrade complete. Reboot to apply changes...'
|
||||||
555
build/lib/scripts/wireguard-vps-proxy-setup
Executable file
@@ -0,0 +1,555 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# Wireguard VPS Proxy Setup
|
||||||
|
# =============================================================================
|
||||||
|
#
|
||||||
|
# This script automates the setup of a WireGuard VPN server on a remote VPS
|
||||||
|
# for StartOS Clearnet functionality. It handles:
|
||||||
|
#
|
||||||
|
# 1. SSH key-based authentication setup
|
||||||
|
# 2. Root access configuration (if needed)
|
||||||
|
# 3. WireGuard server installation
|
||||||
|
# 4. Configuration file generation and import
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# wireguard-vps-proxy-setup [-h] [-i IP] [-u USERNAME] [-p PORT] [-k SSH_KEY]
|
||||||
|
#
|
||||||
|
# Options:
|
||||||
|
# -h Show help message
|
||||||
|
# -i VPS IP address
|
||||||
|
# -u SSH username (default: root)
|
||||||
|
# -p SSH port (default: 22)
|
||||||
|
# -k Path to custom SSH private key
|
||||||
|
#
|
||||||
|
# Example:
|
||||||
|
# wireguard-vps-proxy-setup -i 110.18.1.1 -u debian
|
||||||
|
#
|
||||||
|
# Note: This script requires root privileges and will auto-elevate if needed.
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
# Colors for better output
|
||||||
|
RED='\033[0;31m'
|
||||||
|
GREEN='\033[0;32m'
|
||||||
|
BLUE='\033[1;34m'
|
||||||
|
YELLOW='\033[1;33m'
|
||||||
|
NC='\033[0;37m' # No Color
|
||||||
|
|
||||||
|
# --- Constants ---
|
||||||
|
readonly WIREGUARD_INSTALL_URL="https://raw.githubusercontent.com/start9labs/wireguard-vps-proxy-setup/master/wireguard-install.sh"
|
||||||
|
readonly SSH_KEY_DIR="/home/start9/.ssh"
|
||||||
|
readonly SSH_KEY_NAME="id_ed25519"
|
||||||
|
readonly SSH_PRIVATE_KEY="$SSH_KEY_DIR/$SSH_KEY_NAME"
|
||||||
|
readonly SSH_PUBLIC_KEY="$SSH_PRIVATE_KEY.pub"
|
||||||
|
|
||||||
|
# Store original arguments
|
||||||
|
SCRIPT_ARGS=("$@")
|
||||||
|
|
||||||
|
# --- Functions ---
|
||||||
|
|
||||||
|
# Function to ensure script runs with root privileges by auto-elevating if needed
|
||||||
|
check_root() {
|
||||||
|
if [[ "$EUID" -ne 0 ]]; then
|
||||||
|
exec sudo "$0" "${SCRIPT_ARGS[@]}"
|
||||||
|
fi
|
||||||
|
sudo chown -R start9:startos "$SSH_KEY_DIR"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to print banner
|
||||||
|
print_banner() {
|
||||||
|
echo -e "${BLUE}"
|
||||||
|
echo "================================================"
|
||||||
|
echo -e " ${NC}Wireguard VPS Proxy Setup${BLUE} "
|
||||||
|
echo "================================================"
|
||||||
|
echo -e "${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to print usage
|
||||||
|
print_usage() {
|
||||||
|
echo -e "Usage: $0 [-h] [-i IP] [-u USERNAME] [-p PORT] [-k SSH_KEY]"
|
||||||
|
echo "Options:"
|
||||||
|
echo " -h Show this help message"
|
||||||
|
echo " -i VPS IP address"
|
||||||
|
echo " -u SSH username (default: root)"
|
||||||
|
echo " -p SSH port (default: 22)"
|
||||||
|
echo " -k Path to the custom SSH private key (optional)"
|
||||||
|
echo " If no key is provided, the default key '$SSH_PRIVATE_KEY' will be used."
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to display end message
|
||||||
|
display_end_message() {
|
||||||
|
echo -e "\n${BLUE}------------------------------------------------------------------${NC}"
|
||||||
|
echo -e "${GREEN}Wireguard VPS Proxy server setup complete!${NC}"
|
||||||
|
echo -e "${BLUE}------------------------------------------------------------------${NC}"
|
||||||
|
echo -e "\n${GREEN}Clearnet functionality has been enabled via VPS (${VPS_IP})${NC}"
|
||||||
|
echo -e "\n${YELLOW}Next steps:${NC}"
|
||||||
|
echo -e "Visit https://docs.start9.com to complete the Clearnet setup"
|
||||||
|
echo -e "\n${BLUE}------------------------------------------------------------------${NC}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to validate IP address
|
||||||
|
validate_ip() {
|
||||||
|
local ip=$1
|
||||||
|
# IPv4 validation
|
||||||
|
if [[ $ip =~ ^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$ ]]; then
|
||||||
|
# Additional IPv4 validation to ensure each octet is <= 255
|
||||||
|
local IFS='.'
|
||||||
|
read -ra ADDR <<< "$ip"
|
||||||
|
for i in "${ADDR[@]}"; do
|
||||||
|
if [ "$i" -gt 255 ]; then
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
return 0
|
||||||
|
# IPv6 validation
|
||||||
|
elif [[ $ip =~ ^([0-9a-fA-F]{1,4}:){7}[0-9a-fA-F]{1,4}$ ]] || \
|
||||||
|
[[ $ip =~ ^([0-9a-fA-F]{1,4}:){6}:[0-9a-fA-F]{1,4}$ ]] || \
|
||||||
|
[[ $ip =~ ^([0-9a-fA-F]{1,4}:){5}(:[0-9a-fA-F]{1,4}){1,2}$ ]] || \
|
||||||
|
[[ $ip =~ ^([0-9a-fA-F]{1,4}:){4}(:[0-9a-fA-F]{1,4}){1,3}$ ]] || \
|
||||||
|
[[ $ip =~ ^([0-9a-fA-F]{1,4}:){3}(:[0-9a-fA-F]{1,4}){1,4}$ ]] || \
|
||||||
|
[[ $ip =~ ^([0-9a-fA-F]{1,4}:){2}(:[0-9a-fA-F]{1,4}){1,5}$ ]] || \
|
||||||
|
[[ $ip =~ ^([0-9a-fA-F]{1,4}:){1}(:[0-9a-fA-F]{1,4}){1,6}$ ]] || \
|
||||||
|
[[ $ip =~ ^::([0-9a-fA-F]{1,4}:){0,6}[0-9a-fA-F]{1,4}$ ]] || \
|
||||||
|
[[ $ip =~ ^[0-9a-fA-F]{1,4}::([0-9a-fA-F]{1,4}:){0,5}[0-9a-fA-F]{1,4}$ ]] || \
|
||||||
|
[[ $ip =~ ^([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,4}$ ]] || \
|
||||||
|
[[ $ip =~ ^([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,3}$ ]] || \
|
||||||
|
[[ $ip =~ ^([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,2}$ ]] || \
|
||||||
|
[[ $ip =~ ^([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,1}$ ]] || \
|
||||||
|
[[ $ip =~ ^([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}$ ]] || \
|
||||||
|
[[ $ip =~ ^([0-9a-fA-F]{1,4}:){1,7}:$ ]] || \
|
||||||
|
[[ $ip =~ ^::([0-9a-fA-F]{1,4}:){0,7}[0-9a-fA-F]{1,4}$ ]] || \
|
||||||
|
[[ $ip =~ ^[0-9a-fA-F]{1,4}::([0-9a-fA-F]{1,4}:){0,6}[0-9a-fA-F]{1,4}$ ]] || \
|
||||||
|
[[ $ip =~ ^([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}$ ]] || \
|
||||||
|
[[ $ip =~ ^([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}$ ]] || \
|
||||||
|
[[ $ip =~ ^([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}$ ]] || \
|
||||||
|
[[ $ip =~ ^([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}$ ]] || \
|
||||||
|
[[ $ip =~ ^([0-9a-fA-F]{1,4}:){1,6}(:[0-9a-fA-F]{1,4}){1,1}$ ]] || \
|
||||||
|
[[ $ip =~ ^([0-9a-fA-F]{1,4}:){1,7}:$ ]] || \
|
||||||
|
[[ $ip =~ ^::$ ]]; then
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function for configuring SSH key authentication on remote server
|
||||||
|
configure_ssh_key_auth() {
|
||||||
|
echo -e "${BLUE}Configuring SSH key authentication on remote server...${NC}"
|
||||||
|
|
||||||
|
ssh -i "$SSH_PRIVATE_KEY" -o StrictHostKeyChecking=no -p "$SSH_PORT" "$SSH_USER@$VPS_IP" '
|
||||||
|
# Check if PubkeyAuthentication is commented out
|
||||||
|
if grep -q "^#PubkeyAuthentication" /etc/ssh/sshd_config; then
|
||||||
|
sed -i "s/^#PubkeyAuthentication.*/PubkeyAuthentication yes/" /etc/ssh/sshd_config
|
||||||
|
# Check if PubkeyAuthentication exists but is not enabled
|
||||||
|
elif grep -q "^PubkeyAuthentication" /etc/ssh/sshd_config; then
|
||||||
|
sed -i "s/^PubkeyAuthentication.*/PubkeyAuthentication yes/" /etc/ssh/sshd_config
|
||||||
|
# Add PubkeyAuthentication if it doesnt exist
|
||||||
|
else
|
||||||
|
echo "PubkeyAuthentication yes" >> /etc/ssh/sshd_config
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Enable root login
|
||||||
|
if grep -q "^#PermitRootLogin" /etc/ssh/sshd_config; then
|
||||||
|
sed -i "s/^#PermitRootLogin.*/PermitRootLogin yes/" /etc/ssh/sshd_config
|
||||||
|
elif grep -q "^PermitRootLogin" /etc/ssh/sshd_config; then
|
||||||
|
sed -i "s/^PermitRootLogin.*/PermitRootLogin yes/" /etc/ssh/sshd_config
|
||||||
|
else
|
||||||
|
echo "PermitRootLogin yes" >> /etc/ssh/sshd_config
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Configure AuthorizedKeysFile if needed
|
||||||
|
if grep -q "^#AuthorizedKeysFile" /etc/ssh/sshd_config; then
|
||||||
|
sed -i "s/^#AuthorizedKeysFile.*/AuthorizedKeysFile .ssh\/authorized_keys .ssh\/authorized_keys2/" /etc/ssh/sshd_config
|
||||||
|
elif ! grep -q "^AuthorizedKeysFile" /etc/ssh/sshd_config; then
|
||||||
|
echo "AuthorizedKeysFile .ssh/authorized_keys .ssh/authorized_keys2" >> /etc/ssh/sshd_config
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Reload SSH service
|
||||||
|
systemctl reload sshd
|
||||||
|
'
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to handle StartOS connection (download only)
|
||||||
|
handle_startos_connection() {
|
||||||
|
echo -e "${BLUE}Fetching the WireGuard configuration file...${NC}"
|
||||||
|
|
||||||
|
# Fetch the client configuration file
|
||||||
|
config_file=$(ssh -i "$SSH_PRIVATE_KEY" -o StrictHostKeyChecking=no -p "$SSH_PORT" "$SSH_USER@$VPS_IP" 'ls -t ~/*.conf 2>/dev/null | head -n 1')
|
||||||
|
if [ -z "$config_file" ]; then
|
||||||
|
echo -e "${RED}Error: No WireGuard configuration file found on the remote server.${NC}"
|
||||||
|
return 1 # Exit with error
|
||||||
|
fi
|
||||||
|
CONFIG_NAME=$(basename "$config_file")
|
||||||
|
|
||||||
|
# Download the configuration file
|
||||||
|
if ! scp -i "$SSH_PRIVATE_KEY" -o StrictHostKeyChecking=no -P "$SSH_PORT" "$SSH_USER@$VPS_IP":~/"$CONFIG_NAME" ./; then
|
||||||
|
echo -e "${RED}Error: Failed to download the WireGuard configuration file.${NC}"
|
||||||
|
return 1 # Exit with error
|
||||||
|
fi
|
||||||
|
echo -e "${GREEN}WireGuard configuration file '$CONFIG_NAME' downloaded successfully.${NC}"
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to import WireGuard configuration
|
||||||
|
import_wireguard_config() {
|
||||||
|
local config_name="$1"
|
||||||
|
if [ -z "$config_name" ]; then
|
||||||
|
echo -e "${RED}Error: Configuration file name is missing.${NC}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
local connection_name=$(basename "$config_name" .conf) #Extract base name without extension
|
||||||
|
|
||||||
|
# Check if the connection with same name already exists
|
||||||
|
if nmcli connection show --active | grep -q "^${connection_name}\s"; then
|
||||||
|
read -r -p "A connection with the name '$connection_name' already exists. Do you want to override it? (y/N): " answer
|
||||||
|
if [[ "$answer" =~ ^[Yy]$ ]]; then
|
||||||
|
nmcli connection delete "$connection_name"
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
echo -e "${RED}Error: Failed to delete existing connection '$connection_name'.${NC}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
# Import if user chose to override or if connection did not exist
|
||||||
|
if ! nmcli connection import type wireguard file "$config_name"; then
|
||||||
|
echo -e "${RED}Error: Failed to import the WireGuard configuration using NetworkManager.${NC}"
|
||||||
|
rm -f "$config_name"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
echo -e "${GREEN}WireGuard configuration '$config_name' has been imported to NetworkManager.${NC}"
|
||||||
|
rm -f "$config_name"
|
||||||
|
display_end_message
|
||||||
|
else
|
||||||
|
echo -e "${BLUE}Skipping import of the WireGuard configuration.${NC}"
|
||||||
|
rm -f "$config_name"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# Import if connection did not exist
|
||||||
|
if command -v nmcli &>/dev/null; then
|
||||||
|
if ! nmcli connection import type wireguard file "$config_name"; then
|
||||||
|
echo -e "${RED}Error: Failed to import the WireGuard configuration using NetworkManager.${NC}"
|
||||||
|
rm -f "$config_name"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
echo -e "${GREEN}WireGuard configuration '$config_name' has been imported to NetworkManager.${NC}"
|
||||||
|
rm -f "$config_name"
|
||||||
|
display_end_message
|
||||||
|
else
|
||||||
|
echo -e "${YELLOW}Warning: NetworkManager 'nmcli' not found. Configuration file '$config_name' saved in current directory.${NC}"
|
||||||
|
echo -e "${YELLOW}Import the configuration to your StartOS manually by going to NetworkManager or using wg-quick up <config> command${NC}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to download the install script
|
||||||
|
download_install_script() {
|
||||||
|
echo -e "${BLUE}Downloading latest WireGuard install script...${NC}"
|
||||||
|
# Download the script
|
||||||
|
if ! curl -sSf "$WIREGUARD_INSTALL_URL" -o wireguard-install.sh; then
|
||||||
|
echo -e "${RED}Failed to download WireGuard installation script.${NC}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
chmod +x wireguard-install.sh
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
echo -e "${RED}Failed to chmod +x wireguard install script.${NC}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
echo -e "${GREEN}WireGuard install script downloaded successfully!${NC}"
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to install WireGuard
|
||||||
|
install_wireguard() {
|
||||||
|
echo -e "\n${BLUE}Installing WireGuard...${NC}"
|
||||||
|
|
||||||
|
# Check if install script exist
|
||||||
|
if [ ! -f "wireguard-install.sh" ]; then
|
||||||
|
echo -e "${RED}WireGuard install script is missing. Did it failed to download?${NC}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Run the remote install script and let it complete
|
||||||
|
if ! ssh -o ConnectTimeout=60 -i "$SSH_PRIVATE_KEY" -o StrictHostKeyChecking=no -p "$SSH_PORT" -t "$SSH_USER@$VPS_IP" "bash -c 'export TERM=xterm-256color; export STARTOS_HOSTNAME=clearnet; bash ~/wireguard-install.sh'"; then
|
||||||
|
echo -e "${RED}WireGuard installation failed on remote server.${NC}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test if wireguard installed
|
||||||
|
if ! ssh -q -o BatchMode=yes -o ConnectTimeout=5 -i "$SSH_PRIVATE_KEY" -o StrictHostKeyChecking=no -p "$SSH_PORT" "$SSH_USER@$VPS_IP" "test -f /etc/wireguard/wg0.conf"; then
|
||||||
|
echo -e "\n${RED}WireGuard installation failed because /etc/wireguard/wg0.conf is missing, which means the script removed it.${NC}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "\n${GREEN}WireGuard installation completed successfully!${NC}"
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Function to enable root login via SSH
|
||||||
|
enable_root_login() {
|
||||||
|
echo -e "${BLUE}Checking and configuring root SSH access...${NC}"
|
||||||
|
|
||||||
|
# Try to modify sshd config using sudo
|
||||||
|
if ! ssh -i "$SSH_PRIVATE_KEY" -o StrictHostKeyChecking=no -p "$SSH_PORT" "$SSH_USER@$VPS_IP" '
|
||||||
|
# Check if we can use sudo without password
|
||||||
|
if ! sudo -n true 2>/dev/null; then
|
||||||
|
echo -e "\033[1;33mNOTE: You may be prompted for your sudo password.\033[0m"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if user is in sudo group
|
||||||
|
if ! groups | grep -q sudo; then
|
||||||
|
echo -e "\033[1;31mError: Your user is not in the sudo group. Root access cannot be configured.\033[0m"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Backup sshd config
|
||||||
|
sudo cp /etc/ssh/sshd_config /etc/ssh/sshd_config.bak
|
||||||
|
|
||||||
|
# Enable root login with SSH keys only
|
||||||
|
if sudo grep -q "^PermitRootLogin" /etc/ssh/sshd_config; then
|
||||||
|
sudo sed -i "s/^PermitRootLogin.*/PermitRootLogin prohibit-password/" /etc/ssh/sshd_config
|
||||||
|
else
|
||||||
|
echo "PermitRootLogin prohibit-password" | sudo tee -a /etc/ssh/sshd_config
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Ensure password authentication is disabled
|
||||||
|
if sudo grep -q "^PasswordAuthentication" /etc/ssh/sshd_config; then
|
||||||
|
sudo sed -i "s/^PasswordAuthentication.*/PasswordAuthentication no/" /etc/ssh/sshd_config
|
||||||
|
else
|
||||||
|
echo "PasswordAuthentication no" | sudo tee -a /etc/ssh/sshd_config
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Set up root SSH directory and keys
|
||||||
|
echo -e "\033[1;33mSetting up root SSH access...\033[0m"
|
||||||
|
sudo mkdir -p /root/.ssh
|
||||||
|
sudo cp ~/.ssh/authorized_keys /root/.ssh/
|
||||||
|
sudo chown -R root:root /root/.ssh
|
||||||
|
sudo chmod 700 /root/.ssh
|
||||||
|
sudo chmod 600 /root/.ssh/authorized_keys
|
||||||
|
|
||||||
|
# Reload SSH service
|
||||||
|
sudo systemctl reload sshd
|
||||||
|
|
||||||
|
# Verify the changes
|
||||||
|
if ! sudo grep -q "^PermitRootLogin prohibit-password" /etc/ssh/sshd_config; then
|
||||||
|
echo -e "\033[1;31mError: Failed to verify root login configuration.\033[0m"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Test root SSH access
|
||||||
|
if ! sudo -n true 2>/dev/null; then
|
||||||
|
echo -e "\033[1;33mNOTE: Please try to log in as root now using your SSH key.\033[0m"
|
||||||
|
echo -e "\033[1;33mIf successful, run this script again without the -u parameter.\033[0m"
|
||||||
|
else
|
||||||
|
echo -e "\033[1;32mRoot SSH access has been configured successfully!\033[0m"
|
||||||
|
fi
|
||||||
|
'; then
|
||||||
|
echo -e "${RED}Failed to configure root SSH access.${NC}"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${GREEN}Root SSH access has been configured successfully!${NC}"
|
||||||
|
echo -e "${YELLOW}Please try to log in as root now using your SSH key. If successful, run this script again without the -u parameter.${NC}"
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# --- Main Script ---
|
||||||
|
# Initialize variables
|
||||||
|
VPS_IP=""
|
||||||
|
SSH_USER="root"
|
||||||
|
SSH_PORT="22"
|
||||||
|
CUSTOM_SSH_KEY=""
|
||||||
|
CONFIG_NAME=""
|
||||||
|
|
||||||
|
# Check if the script is run as root before anything else
|
||||||
|
check_root
|
||||||
|
|
||||||
|
# Print banner
|
||||||
|
print_banner
|
||||||
|
|
||||||
|
# Parse command line arguments
|
||||||
|
while getopts "hi:u:p:k:" opt; do
|
||||||
|
case $opt in
|
||||||
|
h)
|
||||||
|
print_usage
|
||||||
|
exit 0
|
||||||
|
;;
|
||||||
|
i)
|
||||||
|
VPS_IP=$OPTARG
|
||||||
|
;;
|
||||||
|
u)
|
||||||
|
SSH_USER=$OPTARG
|
||||||
|
;;
|
||||||
|
p)
|
||||||
|
SSH_PORT=$OPTARG
|
||||||
|
;;
|
||||||
|
k)
|
||||||
|
CUSTOM_SSH_KEY=$OPTARG
|
||||||
|
;;
|
||||||
|
\?)
|
||||||
|
echo "Invalid option: -$OPTARG" >&2
|
||||||
|
print_usage
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Check if custom SSH key is passed and update the private key variable
|
||||||
|
if [ -n "$CUSTOM_SSH_KEY" ]; then
|
||||||
|
if [ ! -f "$CUSTOM_SSH_KEY" ]; then
|
||||||
|
echo -e "${RED}Custom SSH key '$CUSTOM_SSH_KEY' not found.${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
SSH_PRIVATE_KEY="$CUSTOM_SSH_KEY"
|
||||||
|
SSH_PUBLIC_KEY="$CUSTOM_SSH_KEY.pub"
|
||||||
|
else
|
||||||
|
# Use default StartOS SSH key
|
||||||
|
if [ ! -f "$SSH_PRIVATE_KEY" ]; then
|
||||||
|
echo -e "${RED}No SSH key found at default location '$SSH_PRIVATE_KEY'. Please ensure StartOS SSH keys are properly configured.${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -f "$SSH_PUBLIC_KEY" ]; then
|
||||||
|
echo -e "${RED}Public key '$SSH_PUBLIC_KEY' not found. Please ensure both private and public keys exist.${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If VPS_IP is not provided via command line, ask for it
|
||||||
|
if [ -z "$VPS_IP" ]; then
|
||||||
|
while true; do
|
||||||
|
echo -n "Please enter your VPS IP address: "
|
||||||
|
read VPS_IP
|
||||||
|
if validate_ip "$VPS_IP"; then
|
||||||
|
break
|
||||||
|
else
|
||||||
|
echo -e "${RED}Invalid IP address format. Please try again.${NC}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Confirm SSH connection details
|
||||||
|
echo -e "\n${GREEN}Connection details:${NC}"
|
||||||
|
echo "VPS IP: $VPS_IP"
|
||||||
|
echo "SSH User: $SSH_USER"
|
||||||
|
echo "SSH Port: $SSH_PORT"
|
||||||
|
|
||||||
|
echo -e "\n${GREEN}Proceeding with SSH key-based authentication...${NC}\n"
|
||||||
|
|
||||||
|
# Copy SSH public key to the remote server
|
||||||
|
if ! ssh-copy-id -i "$SSH_PUBLIC_KEY" -o StrictHostKeyChecking=no -p "$SSH_PORT" "$SSH_USER@$VPS_IP"; then
|
||||||
|
echo -e "${RED}Failed to copy SSH key to the remote server. Please ensure you have correct credentials.${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${GREEN}SSH key-based authentication configured successfully!${NC}"
|
||||||
|
|
||||||
|
# Test SSH connection using key-based authentication
|
||||||
|
echo -e "\nTesting SSH connection with key-based authentication..."
|
||||||
|
if ! ssh -q -o BatchMode=yes -o ConnectTimeout=5 -i "$SSH_PRIVATE_KEY" -o StrictHostKeyChecking=no -p "$SSH_PORT" "$SSH_USER@$VPS_IP" 'exit'; then
|
||||||
|
echo -e "${RED}SSH connection test failed. Please check your credentials and try again.${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If we're connecting as a non-root user, set up root access first
|
||||||
|
if [ "$SSH_USER" != "root" ]; then
|
||||||
|
echo -e "\n${YELLOW}You are connecting as a non-root user. This script needs to enable root SSH access.${NC}"
|
||||||
|
echo -e "${YELLOW}This is a one-time setup that will allow direct root login for WireGuard installation.${NC}"
|
||||||
|
echo -n -e "${YELLOW}Would you like to proceed? (y/N): ${NC}"
|
||||||
|
read -r answer
|
||||||
|
|
||||||
|
if [[ "$answer" =~ ^[Yy]$ ]]; then
|
||||||
|
if enable_root_login; then
|
||||||
|
echo -e "\n${BLUE}------------------------------------------------------------------${NC}"
|
||||||
|
echo -e "${GREEN}Root SSH access has been configured successfully!${NC}"
|
||||||
|
echo -e "${YELLOW}Please run this script again without the -u parameter to continue setup.${NC}"
|
||||||
|
echo -e "${BLUE}------------------------------------------------------------------${NC}"
|
||||||
|
exit 0
|
||||||
|
else
|
||||||
|
echo -e "${RED}Failed to configure root SSH access. Please check your sudo privileges and try again.${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo -e "\n${BLUE}------------------------------------------------------------------${NC}"
|
||||||
|
echo -e "${YELLOW}To manually configure SSH for root access:${NC}"
|
||||||
|
echo -e "\n ${YELLOW}1. Connect to your VPS and edit sshd_config:${NC}"
|
||||||
|
echo " sudo nano /etc/ssh/sshd_config"
|
||||||
|
echo -e "\n ${YELLOW}2. Find and uncomment or add these lines:${NC}"
|
||||||
|
echo " PubkeyAuthentication yes"
|
||||||
|
echo " PermitRootLogin yes"
|
||||||
|
echo " AuthorizedKeysFile .ssh/authorized_keys .ssh/authorized_keys2"
|
||||||
|
echo -e "\n ${YELLOW}3. Restart the SSH service:${NC}"
|
||||||
|
echo " sudo systemctl restart sshd"
|
||||||
|
echo -e "\n ${YELLOW}4. Copy your SSH key to root user:${NC}"
|
||||||
|
echo " sudo mkdir -p /root/.ssh"
|
||||||
|
echo " sudo cp ~/.ssh/authorized_keys /root/.ssh/"
|
||||||
|
echo " sudo chown -R root:root /root/.ssh"
|
||||||
|
echo " sudo chmod 700 /root/.ssh"
|
||||||
|
echo " sudo chmod 600 /root/.ssh/authorized_keys"
|
||||||
|
echo -e "${BLUE}------------------------------------------------------------------${NC}"
|
||||||
|
echo -e "\n${YELLOW}After completing these steps, run this script again without the -u parameter.${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if root login is permitted when connecting as root
|
||||||
|
if [ "$SSH_USER" = "root" ]; then
|
||||||
|
# Check for both "yes" and "prohibit-password" as valid root login settings
|
||||||
|
if ! ssh -q -o BatchMode=yes -o ConnectTimeout=5 -i "$SSH_PRIVATE_KEY" -o StrictHostKeyChecking=no -p "$SSH_PORT" "$SSH_USER@$VPS_IP" 'grep -q "^PermitRootLogin.*\(yes\|prohibit-password\)" /etc/ssh/sshd_config'; then
|
||||||
|
echo -e "\n${RED}Root SSH login is not enabled on your VPS.${NC}"
|
||||||
|
echo -e "\n${YELLOW}Would you like this script to automatically enable root SSH access? (y/N):${NC} "
|
||||||
|
read -r answer
|
||||||
|
|
||||||
|
if [[ "$answer" =~ ^[Yy]$ ]]; then
|
||||||
|
configure_ssh_key_auth
|
||||||
|
else
|
||||||
|
echo -e "\n${BLUE}------------------------------------------------------------------${NC}"
|
||||||
|
echo -e "${YELLOW}To manually configure SSH for root access:${NC}"
|
||||||
|
echo -e "\n ${YELLOW}1. Connect to your VPS and edit sshd_config:${NC}"
|
||||||
|
echo " sudo nano /etc/ssh/sshd_config"
|
||||||
|
echo -e "\n ${YELLOW}2. Find and uncomment or add these lines:${NC}"
|
||||||
|
echo " PubkeyAuthentication yes"
|
||||||
|
echo " PermitRootLogin prohibit-password"
|
||||||
|
echo " AuthorizedKeysFile .ssh/authorized_keys .ssh/authorized_keys2"
|
||||||
|
echo -e "\n ${YELLOW}3. Restart the SSH service:${NC}"
|
||||||
|
echo " sudo systemctl restart sshd"
|
||||||
|
echo -e "${BLUE}------------------------------------------------------------------${NC}"
|
||||||
|
echo -e "\n${YELLOW}Please enable root SSH access and run this script again.${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -e "${GREEN}SSH connection successful with key-based authentication!${NC}"
|
||||||
|
|
||||||
|
# Download the WireGuard install script locally
|
||||||
|
if ! download_install_script; then
|
||||||
|
echo -e "${RED}Failed to download the latest install script. Exiting...${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Upload the install script to the remote server
|
||||||
|
if ! scp -i "$SSH_PRIVATE_KEY" -o StrictHostKeyChecking=no -P "$SSH_PORT" wireguard-install.sh "$SSH_USER@$VPS_IP":~/; then
|
||||||
|
echo -e "${RED}Failed to upload WireGuard install script to the remote server.${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Install WireGuard on remote server using the downloaded script
|
||||||
|
if ! install_wireguard; then
|
||||||
|
echo -e "${RED}WireGuard installation failed.${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Remove the local install script
|
||||||
|
rm wireguard-install.sh >/dev/null 2>&1
|
||||||
|
|
||||||
|
# Handle the StartOS config (download)
|
||||||
|
if ! handle_startos_connection; then
|
||||||
|
echo -e "${RED}StartOS configuration download failed!${NC}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Import the configuration
|
||||||
|
if ! import_wireguard_config "$CONFIG_NAME"; then
|
||||||
|
echo -e "${RED}StartOS configuration import failed or skipped!${NC}"
|
||||||
|
fi
|
||||||
|
Before Width: | Height: | Size: 9.6 KiB After Width: | Height: | Size: 9.6 KiB |
25
build/os-compat/buildenv.Dockerfile
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
FROM debian:trixie
|
||||||
|
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y \
|
||||||
|
ca-certificates \
|
||||||
|
curl \
|
||||||
|
gpg \
|
||||||
|
build-essential \
|
||||||
|
sed \
|
||||||
|
grep \
|
||||||
|
gawk \
|
||||||
|
jq \
|
||||||
|
gzip \
|
||||||
|
brotli \
|
||||||
|
squashfs-tools \
|
||||||
|
git \
|
||||||
|
rsync \
|
||||||
|
b3sum \
|
||||||
|
sudo \
|
||||||
|
nodejs
|
||||||
|
|
||||||
|
RUN git config --global --add safe.directory /root/start-os
|
||||||
|
|
||||||
|
RUN mkdir -p /root/start-os
|
||||||
|
WORKDIR /root/start-os
|
||||||
30
build/os-compat/run-compat.sh
Executable file
@@ -0,0 +1,30 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
pwd=$(pwd)
|
||||||
|
|
||||||
|
cd "$(dirname "${BASH_SOURCE[0]}")/../.."
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
rel_pwd="${pwd#"$(pwd)"}"
|
||||||
|
|
||||||
|
COMPAT_ARCH=$(uname -m)
|
||||||
|
|
||||||
|
platform=linux/$COMPAT_ARCH
|
||||||
|
|
||||||
|
case $COMPAT_ARCH in
|
||||||
|
x86_64)
|
||||||
|
platform=linux/amd64;;
|
||||||
|
aarch64)
|
||||||
|
platform=linux/arm64;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
if [ "$FORCE_COMPAT" = 1 ] || ( [ "$REQUIRES" = "linux" ] && [ "$(uname -s)" != "Linux" ] ) || ( [ "$REQUIRES" = "debian" ] && ! which dpkg > /dev/null ); then
|
||||||
|
if tty -s; then
|
||||||
|
USE_TTY="-it"
|
||||||
|
fi
|
||||||
|
|
||||||
|
docker run $USE_TTY --platform=$platform -eARCH -eENVIRONMENT -ePLATFORM -eGIT_BRANCH_AS_HASH -ePROJECT -eDEPENDS -eCONFLICTS -w "/root/start-os${rel_pwd}" --rm -v "$(pwd):/root/start-os" start9/build-env $@
|
||||||
|
else
|
||||||
|
exec $@
|
||||||
|
fi
|
||||||
@@ -1,87 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
function partition_for () {
|
|
||||||
if [[ "$1" =~ [0-9]+$ ]]; then
|
|
||||||
echo "$1p$2"
|
|
||||||
else
|
|
||||||
echo "$1$2"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
VERSION=$(cat VERSION.txt)
|
|
||||||
ENVIRONMENT=$(cat ENVIRONMENT.txt)
|
|
||||||
GIT_HASH=$(cat GIT_HASH.txt | head -c 7)
|
|
||||||
DATE=$(date +%Y%m%d)
|
|
||||||
|
|
||||||
ROOT_PART_END=7217792
|
|
||||||
|
|
||||||
VERSION_FULL="$VERSION-$GIT_HASH"
|
|
||||||
|
|
||||||
if [ -n "$ENVIRONMENT" ]; then
|
|
||||||
VERSION_FULL="$VERSION_FULL~$ENVIRONMENT"
|
|
||||||
fi
|
|
||||||
|
|
||||||
TARGET_NAME=startos-${VERSION_FULL}-${DATE}_raspberrypi.img
|
|
||||||
TARGET_SIZE=$[($ROOT_PART_END+1)*512]
|
|
||||||
|
|
||||||
rm -f $TARGET_NAME
|
|
||||||
truncate -s $TARGET_SIZE $TARGET_NAME
|
|
||||||
(
|
|
||||||
echo o
|
|
||||||
echo x
|
|
||||||
echo i
|
|
||||||
echo "0xcb15ae4d"
|
|
||||||
echo r
|
|
||||||
echo n
|
|
||||||
echo p
|
|
||||||
echo 1
|
|
||||||
echo 2048
|
|
||||||
echo 526335
|
|
||||||
echo t
|
|
||||||
echo c
|
|
||||||
echo n
|
|
||||||
echo p
|
|
||||||
echo 2
|
|
||||||
echo 526336
|
|
||||||
echo $ROOT_PART_END
|
|
||||||
echo a
|
|
||||||
echo 1
|
|
||||||
echo w
|
|
||||||
) | fdisk $TARGET_NAME
|
|
||||||
OUTPUT_DEVICE=$(sudo losetup --show -fP $TARGET_NAME)
|
|
||||||
sudo mkfs.ext4 `partition_for ${OUTPUT_DEVICE} 2`
|
|
||||||
sudo mkfs.vfat `partition_for ${OUTPUT_DEVICE} 1`
|
|
||||||
|
|
||||||
TMPDIR=$(mktemp -d)
|
|
||||||
|
|
||||||
sudo mount `partition_for ${OUTPUT_DEVICE} 2` $TMPDIR
|
|
||||||
sudo mkdir $TMPDIR/boot
|
|
||||||
sudo mount `partition_for ${OUTPUT_DEVICE} 1` $TMPDIR/boot
|
|
||||||
sudo unsquashfs -f -d $TMPDIR startos.raspberrypi.squashfs
|
|
||||||
REAL_GIT_HASH=$(cat $TMPDIR/usr/lib/startos/GIT_HASH.txt)
|
|
||||||
REAL_VERSION=$(cat $TMPDIR/usr/lib/startos/VERSION.txt)
|
|
||||||
REAL_ENVIRONMENT=$(cat $TMPDIR/usr/lib/startos/ENVIRONMENT.txt)
|
|
||||||
sudo sed -i 's| boot=embassy| init=/usr/lib/startos/scripts/init_resize\.sh|' $TMPDIR/boot/cmdline.txt
|
|
||||||
sudo cp ./build/raspberrypi/fstab $TMPDIR/etc/
|
|
||||||
sudo cp ./build/raspberrypi/init_resize.sh $TMPDIR/usr/lib/startos/scripts/init_resize.sh
|
|
||||||
sudo umount $TMPDIR/boot
|
|
||||||
sudo umount $TMPDIR
|
|
||||||
sudo losetup -d $OUTPUT_DEVICE
|
|
||||||
|
|
||||||
if [ "$ALLOW_VERSION_MISMATCH" != 1 ]; then
|
|
||||||
if [ "$(cat GIT_HASH.txt)" != "$REAL_GIT_HASH" ]; then
|
|
||||||
>&2 echo "startos.raspberrypi.squashfs GIT_HASH.txt mismatch"
|
|
||||||
>&2 echo "expected $REAL_GIT_HASH (dpkg) found $(cat GIT_HASH.txt) (repo)"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
if [ "$(cat VERSION.txt)" != "$REAL_VERSION" ]; then
|
|
||||||
>&2 echo "startos.raspberrypi.squashfs VERSION.txt mismatch"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
if [ "$(cat ENVIRONMENT.txt)" != "$REAL_ENVIRONMENT" ]; then
|
|
||||||
>&2 echo "startos.raspberrypi.squashfs ENVIRONMENT.txt mismatch"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
142
build/upload-ota.sh
Executable file
@@ -0,0 +1,142 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
if [ -z "$VERSION" ]; then
|
||||||
|
>&2 echo '$VERSION required'
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [ "$SKIP_DL" != "1" ]; then
|
||||||
|
if [ "$SKIP_CLEAN" != "1" ]; then
|
||||||
|
rm -rf ~/Downloads/v$VERSION
|
||||||
|
mkdir ~/Downloads/v$VERSION
|
||||||
|
cd ~/Downloads/v$VERSION
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$RUN_ID" ]; then
|
||||||
|
for arch in aarch64 aarch64-nonfree riscv64 x86_64 x86_64-nonfree; do
|
||||||
|
while ! gh run download -R Start9Labs/start-os $RUN_ID -n $arch.squashfs -D $(pwd); do sleep 1; done
|
||||||
|
done
|
||||||
|
for arch in aarch64 aarch64-nonfree riscv64 x86_64 x86_64-nonfree; do
|
||||||
|
while ! gh run download -R Start9Labs/start-os $RUN_ID -n $arch.iso -D $(pwd); do sleep 1; done
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$ST_RUN_ID" ]; then
|
||||||
|
for arch in aarch64 riscv64 x86_64; do
|
||||||
|
while ! gh run download -R Start9Labs/start-os $ST_RUN_ID -n start-tunnel_$arch.deb -D $(pwd); do sleep 1; done
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$CLI_RUN_ID" ]; then
|
||||||
|
for arch in aarch64 riscv64 x86_64; do
|
||||||
|
for os in linux macos; do
|
||||||
|
pair=${arch}-${os}
|
||||||
|
if [ "${pair}" = "riscv64-linux" ]; then
|
||||||
|
target=riscv64gc-unknown-linux-musl
|
||||||
|
elif [ "${pair}" = "riscv64-macos" ]; then
|
||||||
|
continue
|
||||||
|
elif [ "${os}" = "linux" ]; then
|
||||||
|
target="${arch}-unknown-linux-musl"
|
||||||
|
elif [ "${os}" = "macos" ]; then
|
||||||
|
target="${arch}-apple-darwin"
|
||||||
|
fi
|
||||||
|
while ! gh run download -R Start9Labs/start-os $CLI_RUN_ID -n start-cli_$target -D $(pwd); do sleep 1; done
|
||||||
|
mv start-cli "start-cli_${pair}"
|
||||||
|
done
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
cd ~/Downloads/v$VERSION
|
||||||
|
fi
|
||||||
|
|
||||||
|
start-cli --registry=https://alpha-registry-x.start9.com registry os version add $VERSION "v$VERSION" '' ">=0.3.5 <=$VERSION"
|
||||||
|
|
||||||
|
if [ "$SKIP_UL" = "2" ]; then
|
||||||
|
exit 2
|
||||||
|
elif [ "$SKIP_UL" != "1" ]; then
|
||||||
|
for file in *.deb start-cli_*; do
|
||||||
|
gh release upload -R Start9Labs/start-os v$VERSION $file
|
||||||
|
done
|
||||||
|
for file in *.iso *.squashfs; do
|
||||||
|
s3cmd put -P $file s3://startos-images/v$VERSION/$file
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$SKIP_INDEX" != "1" ]; then
|
||||||
|
for arch in aarch64 aarch64-nonfree riscv64 x86_64 x86_64-nonfree; do
|
||||||
|
for file in *_$arch.squashfs *_$arch.iso; do
|
||||||
|
start-cli --registry=https://alpha-registry-x.start9.com registry os asset add --platform=$arch --version=$VERSION $file https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$file
|
||||||
|
done
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
for file in *.iso *.squashfs *.deb start-cli_*; do
|
||||||
|
gpg -u 7CFFDA41CA66056A --detach-sign --armor -o "${file}.asc" "$file"
|
||||||
|
done
|
||||||
|
|
||||||
|
gpg --export -a 7CFFDA41CA66056A > dr-bonez.key.asc
|
||||||
|
tar -czvf signatures.tar.gz *.asc
|
||||||
|
|
||||||
|
gh release upload -R Start9Labs/start-os v$VERSION signatures.tar.gz
|
||||||
|
|
||||||
|
cat << EOF
|
||||||
|
# ISO Downloads
|
||||||
|
|
||||||
|
- [x86_64/AMD64](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_x86_64-nonfree.iso))
|
||||||
|
- [x86_64/AMD64-slim (FOSS-only)](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_x86_64.iso) "Without proprietary software or drivers")
|
||||||
|
- [aarch64/ARM64](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_aarch64-nonfree.iso))
|
||||||
|
- [aarch64/ARM64-slim (FOSS-Only)](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_aarch64.iso) "Without proprietary software or drivers")
|
||||||
|
- [RISCV64 (RVA23)](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_riscv64.iso))
|
||||||
|
|
||||||
|
EOF
|
||||||
|
cat << 'EOF'
|
||||||
|
# StartOS Checksums
|
||||||
|
|
||||||
|
## SHA-256
|
||||||
|
```
|
||||||
|
EOF
|
||||||
|
sha256sum *.iso *.squashfs
|
||||||
|
cat << 'EOF'
|
||||||
|
```
|
||||||
|
|
||||||
|
## BLAKE-3
|
||||||
|
```
|
||||||
|
EOF
|
||||||
|
b3sum *.iso *.squashfs
|
||||||
|
cat << 'EOF'
|
||||||
|
```
|
||||||
|
|
||||||
|
# Start-Tunnel Checksums
|
||||||
|
|
||||||
|
## SHA-256
|
||||||
|
```
|
||||||
|
EOF
|
||||||
|
sha256sum start-tunnel*.deb
|
||||||
|
cat << 'EOF'
|
||||||
|
```
|
||||||
|
|
||||||
|
## BLAKE-3
|
||||||
|
```
|
||||||
|
EOF
|
||||||
|
b3sum start-tunnel*.deb
|
||||||
|
cat << 'EOF'
|
||||||
|
```
|
||||||
|
|
||||||
|
# start-cli Checksums
|
||||||
|
|
||||||
|
## SHA-256
|
||||||
|
```
|
||||||
|
EOF
|
||||||
|
sha256sum start-cli_*
|
||||||
|
cat << 'EOF'
|
||||||
|
```
|
||||||
|
|
||||||
|
## BLAKE-3
|
||||||
|
```
|
||||||
|
EOF
|
||||||
|
b3sum start-cli_*
|
||||||
|
cat << 'EOF'
|
||||||
|
```
|
||||||
|
EOF
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
if [ "$GIT_BRANCH_AS_HASH" != 1 ]; then
|
|
||||||
GIT_HASH="$(git describe --always --abbrev=40 --dirty=-modified)"
|
|
||||||
else
|
|
||||||
GIT_HASH="@$(git rev-parse --abbrev-ref HEAD)"
|
|
||||||
fi
|
|
||||||
|
|
||||||
if ! [ -f ./GIT_HASH.txt ] || [ "$(cat ./GIT_HASH.txt)" != "$GIT_HASH" ]; then
|
|
||||||
echo -n "$GIT_HASH" > ./GIT_HASH.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo -n ./GIT_HASH.txt
|
|
||||||
8
container-runtime/.gitignore
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
node_modules/
|
||||||
|
dist/
|
||||||
|
bundle.js
|
||||||
|
startInit.js
|
||||||
|
service/
|
||||||
|
service.js
|
||||||
|
*.squashfs
|
||||||
|
/tmp
|
||||||
146
container-runtime/RPCSpec.md
Normal file
@@ -0,0 +1,146 @@
|
|||||||
|
# Container RPC Server Specification
|
||||||
|
|
||||||
|
The container runtime exposes a JSON-RPC server over a Unix socket at `/media/startos/rpc/service.sock`.
|
||||||
|
|
||||||
|
## Methods
|
||||||
|
|
||||||
|
### init
|
||||||
|
|
||||||
|
Initialize the runtime and system.
|
||||||
|
|
||||||
|
#### params
|
||||||
|
|
||||||
|
```ts
|
||||||
|
{
|
||||||
|
id: string,
|
||||||
|
kind: "install" | "update" | "restore" | null,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### response
|
||||||
|
|
||||||
|
`null`
|
||||||
|
|
||||||
|
### exit
|
||||||
|
|
||||||
|
Shutdown runtime and optionally run exit hooks for a target version.
|
||||||
|
|
||||||
|
#### params
|
||||||
|
|
||||||
|
```ts
|
||||||
|
{
|
||||||
|
id: string,
|
||||||
|
target: string | null, // ExtendedVersion or VersionRange
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### response
|
||||||
|
|
||||||
|
`null`
|
||||||
|
|
||||||
|
### start
|
||||||
|
|
||||||
|
Run main method if not already running.
|
||||||
|
|
||||||
|
#### params
|
||||||
|
|
||||||
|
None
|
||||||
|
|
||||||
|
#### response
|
||||||
|
|
||||||
|
`null`
|
||||||
|
|
||||||
|
### stop
|
||||||
|
|
||||||
|
Stop main method by sending SIGTERM to child processes, and SIGKILL after timeout.
|
||||||
|
|
||||||
|
#### params
|
||||||
|
|
||||||
|
None
|
||||||
|
|
||||||
|
#### response
|
||||||
|
|
||||||
|
`null`
|
||||||
|
|
||||||
|
### execute
|
||||||
|
|
||||||
|
Run a specific package procedure.
|
||||||
|
|
||||||
|
#### params
|
||||||
|
|
||||||
|
```ts
|
||||||
|
{
|
||||||
|
id: string, // event ID
|
||||||
|
procedure: string, // JSON path (e.g., "/backup/create", "/actions/{name}/run")
|
||||||
|
input: any,
|
||||||
|
timeout: number | null,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### response
|
||||||
|
|
||||||
|
`any`
|
||||||
|
|
||||||
|
### sandbox
|
||||||
|
|
||||||
|
Run a specific package procedure in sandbox mode. Same interface as `execute`.
|
||||||
|
|
||||||
|
UNIMPLEMENTED: this feature is planned but does not exist
|
||||||
|
|
||||||
|
#### params
|
||||||
|
|
||||||
|
```ts
|
||||||
|
{
|
||||||
|
id: string,
|
||||||
|
procedure: string,
|
||||||
|
input: any,
|
||||||
|
timeout: number | null,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### response
|
||||||
|
|
||||||
|
`any`
|
||||||
|
|
||||||
|
### callback
|
||||||
|
|
||||||
|
Handle a callback from an effect.
|
||||||
|
|
||||||
|
#### params
|
||||||
|
|
||||||
|
```ts
|
||||||
|
{
|
||||||
|
id: number,
|
||||||
|
args: any[],
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### response
|
||||||
|
|
||||||
|
`null` (no response sent)
|
||||||
|
|
||||||
|
### eval
|
||||||
|
|
||||||
|
Evaluate a script in the runtime context. Used for debugging.
|
||||||
|
|
||||||
|
#### params
|
||||||
|
|
||||||
|
```ts
|
||||||
|
{
|
||||||
|
script: string,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### response
|
||||||
|
|
||||||
|
`any`
|
||||||
|
|
||||||
|
## Procedures
|
||||||
|
|
||||||
|
The `execute` and `sandbox` methods route to procedures based on the `procedure` path:
|
||||||
|
|
||||||
|
| Procedure | Description |
|
||||||
|
|-----------|-------------|
|
||||||
|
| `/backup/create` | Create a backup |
|
||||||
|
| `/actions/{name}/getInput` | Get input spec for an action |
|
||||||
|
| `/actions/{name}/run` | Run an action with input |
|
||||||
6
container-runtime/container-runtime-failure.service
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=StartOS Container Runtime Failure Handler
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=oneshot
|
||||||
|
ExecStart=/usr/bin/start-container rebuild
|
||||||
12
container-runtime/container-runtime.service
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=StartOS Container Runtime
|
||||||
|
OnFailure=container-runtime-failure.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
Environment=RUST_LOG=startos=debug
|
||||||
|
ExecStart=/usr/bin/node --experimental-detect-module --trace-warnings --unhandled-rejections=warn /usr/lib/startos/init/index.js
|
||||||
|
Restart=no
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
16
container-runtime/deb-install.sh
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
apt-get update
|
||||||
|
apt-get install -y curl rsync qemu-user-static nodejs
|
||||||
|
|
||||||
|
sed -i '/\(^\|#\)DNSStubListener=/c\DNSStubListener=no' /etc/systemd/resolved.conf
|
||||||
|
sed -i '/\(^\|#\)Storage=/c\Storage=persistent' /etc/systemd/journald.conf
|
||||||
|
sed -i '/\(^\|#\)Compress=/c\Compress=yes' /etc/systemd/journald.conf
|
||||||
|
sed -i '/\(^\|#\)SystemMaxUse=/c\SystemMaxUse=1G' /etc/systemd/journald.conf
|
||||||
|
sed -i '/\(^\|#\)ForwardToSyslog=/c\ForwardToSyslog=no' /etc/systemd/journald.conf
|
||||||
|
|
||||||
|
systemctl enable container-runtime.service
|
||||||
|
|
||||||
|
echo "nameserver 10.0.3.1" > /etc/resolv.conf
|
||||||
22
container-runtime/download-base-image.sh
Executable file
@@ -0,0 +1,22 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||||
|
set -e
|
||||||
|
|
||||||
|
DISTRO=debian
|
||||||
|
VERSION=trixie
|
||||||
|
ARCH=${ARCH:-$(uname -m)}
|
||||||
|
FLAVOR=default
|
||||||
|
|
||||||
|
_ARCH=$ARCH
|
||||||
|
if [ "$_ARCH" = "x86_64" ]; then
|
||||||
|
_ARCH=amd64
|
||||||
|
elif [ "$_ARCH" = "aarch64" ]; then
|
||||||
|
_ARCH=arm64
|
||||||
|
fi
|
||||||
|
|
||||||
|
BASE_URL="https://images.linuxcontainers.org$(curl -fsSL https://images.linuxcontainers.org/meta/1.0/index-system | grep "^$DISTRO;$VERSION;$_ARCH;$FLAVOR;" | head -n1 | sed 's/^.*;//g')"
|
||||||
|
OUTPUT_FILE="debian.${ARCH}.squashfs"
|
||||||
|
|
||||||
|
echo "Downloading ${BASE_URL}/rootfs.squashfs to $OUTPUT_FILE"
|
||||||
|
curl -fsSL "${BASE_URL}/rootfs.squashfs" > "$OUTPUT_FILE"
|
||||||
|
curl -fsSL "$BASE_URL/SHA256SUMS" | grep 'rootfs\.squashfs' | awk '{print $1" '"$OUTPUT_FILE"'"}' | shasum -a 256 -c
|
||||||
10
container-runtime/install-dist-deps.sh
Executable file
@@ -0,0 +1,10 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
cat ./package.json | sed 's/file:\.\([.\/]\)/file:..\/.\1/g' > ./dist/package.json
|
||||||
|
cat ./package-lock.json | sed 's/"\.\([.\/]\)/"..\/.\1/g' > ./dist/package-lock.json
|
||||||
|
|
||||||
|
npm --prefix dist ci --omit=dev
|
||||||
8
container-runtime/jest.config.js
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
/** @type {import('ts-jest').JestConfigWithTsJest} */
|
||||||
|
module.exports = {
|
||||||
|
preset: "ts-jest",
|
||||||
|
automock: false,
|
||||||
|
testEnvironment: "node",
|
||||||
|
rootDir: "./src/",
|
||||||
|
modulePathIgnorePatterns: ["./dist/"],
|
||||||
|
}
|
||||||
6832
container-runtime/package-lock.json
generated
Normal file
47
container-runtime/package.json
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
{
|
||||||
|
"name": "container-runtime",
|
||||||
|
"version": "0.0.0",
|
||||||
|
"description": "We want to be the sdk intermitent for the system",
|
||||||
|
"module": "./index.js",
|
||||||
|
"scripts": {
|
||||||
|
"check": "tsc --noEmit",
|
||||||
|
"build": "prettier . '!tmp/**' --write && rm -rf dist && tsc",
|
||||||
|
"tsc": "rm -rf dist; tsc",
|
||||||
|
"test": "jest -c ./jest.config.js"
|
||||||
|
},
|
||||||
|
"author": "",
|
||||||
|
"prettier": {
|
||||||
|
"trailingComma": "all",
|
||||||
|
"tabWidth": 2,
|
||||||
|
"semi": false,
|
||||||
|
"singleQuote": false
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@iarna/toml": "^2.2.5",
|
||||||
|
"@noble/curves": "^1.4.0",
|
||||||
|
"@noble/hashes": "^1.4.0",
|
||||||
|
"@start9labs/start-sdk": "file:../sdk/dist",
|
||||||
|
"esbuild-plugin-resolve": "^2.0.0",
|
||||||
|
"filebrowser": "^1.0.0",
|
||||||
|
"isomorphic-fetch": "^3.0.0",
|
||||||
|
"jsonpath": "^1.1.1",
|
||||||
|
"lodash.merge": "^4.6.2",
|
||||||
|
"mime": "^4.0.7",
|
||||||
|
"node-fetch": "^3.1.0",
|
||||||
|
"ts-matches": "^6.3.2",
|
||||||
|
"tslib": "^2.5.3",
|
||||||
|
"typescript": "^5.1.3",
|
||||||
|
"yaml": "^2.3.1"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@swc/cli": "^0.1.62",
|
||||||
|
"@swc/core": "^1.3.65",
|
||||||
|
"@types/jest": "^29.5.12",
|
||||||
|
"@types/jsonpath": "^0.2.4",
|
||||||
|
"@types/node": "^20.11.13",
|
||||||
|
"jest": "^29.7.0",
|
||||||
|
"prettier": "^3.2.5",
|
||||||
|
"ts-jest": "^29.2.3",
|
||||||
|
"typescript": ">5.2"
|
||||||
|
}
|
||||||
|
}
|
||||||
332
container-runtime/src/Adapters/EffectCreator.ts
Normal file
@@ -0,0 +1,332 @@
|
|||||||
|
import {
|
||||||
|
ExtendedVersion,
|
||||||
|
types as T,
|
||||||
|
utils,
|
||||||
|
VersionRange,
|
||||||
|
} from "@start9labs/start-sdk"
|
||||||
|
import * as net from "net"
|
||||||
|
import { object, string, number, literals, some, unknown } from "ts-matches"
|
||||||
|
import { Effects } from "../Models/Effects"
|
||||||
|
|
||||||
|
import { CallbackHolder } from "../Models/CallbackHolder"
|
||||||
|
import { asError } from "@start9labs/start-sdk/base/lib/util"
|
||||||
|
const matchRpcError = object({
|
||||||
|
error: object({
|
||||||
|
code: number,
|
||||||
|
message: string,
|
||||||
|
data: some(
|
||||||
|
string,
|
||||||
|
object({
|
||||||
|
details: string,
|
||||||
|
debug: string.nullable().optional(),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.nullable()
|
||||||
|
.optional(),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
const testRpcError = matchRpcError.test
|
||||||
|
const testRpcResult = object({
|
||||||
|
result: unknown,
|
||||||
|
}).test
|
||||||
|
type RpcError = typeof matchRpcError._TYPE
|
||||||
|
|
||||||
|
const SOCKET_PATH = "/media/startos/rpc/host.sock"
|
||||||
|
let hostSystemId = 0
|
||||||
|
|
||||||
|
export type EffectContext = {
|
||||||
|
eventId: string | null
|
||||||
|
callbacks?: CallbackHolder
|
||||||
|
constRetry?: () => void
|
||||||
|
}
|
||||||
|
|
||||||
|
const rpcRoundFor =
|
||||||
|
(eventId: string | null) =>
|
||||||
|
<K extends T.EffectMethod | "clearCallbacks">(
|
||||||
|
method: K,
|
||||||
|
params: Record<string, unknown>,
|
||||||
|
) => {
|
||||||
|
const id = hostSystemId++
|
||||||
|
const client = net.createConnection({ path: SOCKET_PATH }, () => {
|
||||||
|
client.write(
|
||||||
|
JSON.stringify({
|
||||||
|
id,
|
||||||
|
method,
|
||||||
|
params: { ...params, eventId: eventId ?? undefined },
|
||||||
|
}) + "\n",
|
||||||
|
)
|
||||||
|
})
|
||||||
|
let bufs: Buffer[] = []
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
client.on("data", (data) => {
|
||||||
|
try {
|
||||||
|
bufs.push(data)
|
||||||
|
if (data.reduce((acc, x) => acc || x == 10, false)) {
|
||||||
|
const res: unknown = JSON.parse(
|
||||||
|
Buffer.concat(bufs).toString().split("\n")[0],
|
||||||
|
)
|
||||||
|
if (testRpcError(res)) {
|
||||||
|
let message = res.error.message
|
||||||
|
console.error(
|
||||||
|
"Error in host RPC:",
|
||||||
|
utils.asError({ method, params, error: res.error }),
|
||||||
|
)
|
||||||
|
if (string.test(res.error.data)) {
|
||||||
|
message += ": " + res.error.data
|
||||||
|
console.error(`Details: ${res.error.data}`)
|
||||||
|
} else {
|
||||||
|
if (res.error.data?.details) {
|
||||||
|
message += ": " + res.error.data.details
|
||||||
|
console.error(`Details: ${res.error.data.details}`)
|
||||||
|
}
|
||||||
|
if (res.error.data?.debug) {
|
||||||
|
message += "\n" + res.error.data.debug
|
||||||
|
console.error(`Debug: ${res.error.data.debug}`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
reject(new Error(`${message}@${method}`))
|
||||||
|
} else if (testRpcResult(res)) {
|
||||||
|
resolve(res.result)
|
||||||
|
} else {
|
||||||
|
reject(new Error(`malformed response ${JSON.stringify(res)}`))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
reject(error)
|
||||||
|
}
|
||||||
|
client.end()
|
||||||
|
})
|
||||||
|
client.on("error", (error) => {
|
||||||
|
reject(error)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
export function makeEffects(context: EffectContext): Effects {
|
||||||
|
const rpcRound = rpcRoundFor(context.eventId)
|
||||||
|
const self: Effects = {
|
||||||
|
eventId: context.eventId,
|
||||||
|
child: (name) =>
|
||||||
|
makeEffects({ ...context, callbacks: context.callbacks?.child(name) }),
|
||||||
|
constRetry: context.constRetry,
|
||||||
|
isInContext: !!context.callbacks,
|
||||||
|
onLeaveContext:
|
||||||
|
context.callbacks?.onLeaveContext?.bind(context.callbacks) ||
|
||||||
|
(() => {
|
||||||
|
console.warn(
|
||||||
|
"no context for this effects object",
|
||||||
|
new Error().stack?.replace(/^Error/, ""),
|
||||||
|
)
|
||||||
|
}),
|
||||||
|
clearCallbacks(...[options]: Parameters<T.Effects["clearCallbacks"]>) {
|
||||||
|
return rpcRound("clear-callbacks", {
|
||||||
|
...options,
|
||||||
|
}) as ReturnType<T.Effects["clearCallbacks"]>
|
||||||
|
},
|
||||||
|
action: {
|
||||||
|
clear(...[options]: Parameters<T.Effects["action"]["clear"]>) {
|
||||||
|
return rpcRound("action.clear", {
|
||||||
|
...options,
|
||||||
|
}) as ReturnType<T.Effects["action"]["clear"]>
|
||||||
|
},
|
||||||
|
export(...[options]: Parameters<T.Effects["action"]["export"]>) {
|
||||||
|
return rpcRound("action.export", {
|
||||||
|
...options,
|
||||||
|
}) as ReturnType<T.Effects["action"]["export"]>
|
||||||
|
},
|
||||||
|
getInput(...[options]: Parameters<T.Effects["action"]["getInput"]>) {
|
||||||
|
return rpcRound("action.get-input", {
|
||||||
|
...options,
|
||||||
|
}) as ReturnType<T.Effects["action"]["getInput"]>
|
||||||
|
},
|
||||||
|
createTask(...[options]: Parameters<T.Effects["action"]["createTask"]>) {
|
||||||
|
return rpcRound("action.create-task", {
|
||||||
|
...options,
|
||||||
|
}) as ReturnType<T.Effects["action"]["createTask"]>
|
||||||
|
},
|
||||||
|
run(...[options]: Parameters<T.Effects["action"]["run"]>) {
|
||||||
|
return rpcRound("action.run", {
|
||||||
|
...options,
|
||||||
|
}) as ReturnType<T.Effects["action"]["run"]>
|
||||||
|
},
|
||||||
|
clearTasks(...[options]: Parameters<T.Effects["action"]["clearTasks"]>) {
|
||||||
|
return rpcRound("action.clear-tasks", {
|
||||||
|
...options,
|
||||||
|
}) as ReturnType<T.Effects["action"]["clearTasks"]>
|
||||||
|
},
|
||||||
|
},
|
||||||
|
bind(...[options]: Parameters<T.Effects["bind"]>) {
|
||||||
|
return rpcRound("bind", {
|
||||||
|
...options,
|
||||||
|
stack: new Error().stack,
|
||||||
|
}) as ReturnType<T.Effects["bind"]>
|
||||||
|
},
|
||||||
|
clearBindings(...[options]: Parameters<T.Effects["clearBindings"]>) {
|
||||||
|
return rpcRound("clear-bindings", { ...options }) as ReturnType<
|
||||||
|
T.Effects["clearBindings"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
clearServiceInterfaces(
|
||||||
|
...[options]: Parameters<T.Effects["clearServiceInterfaces"]>
|
||||||
|
) {
|
||||||
|
return rpcRound("clear-service-interfaces", { ...options }) as ReturnType<
|
||||||
|
T.Effects["clearServiceInterfaces"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
getInstalledPackages(...[]: Parameters<T.Effects["getInstalledPackages"]>) {
|
||||||
|
return rpcRound("get-installed-packages", {}) as ReturnType<
|
||||||
|
T.Effects["getInstalledPackages"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
getServiceManifest(
|
||||||
|
...[options]: Parameters<T.Effects["getServiceManifest"]>
|
||||||
|
) {
|
||||||
|
return rpcRound("get-service-manifest", options) as ReturnType<
|
||||||
|
T.Effects["getServiceManifest"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
subcontainer: {
|
||||||
|
createFs(options: { imageId: string; name: string }) {
|
||||||
|
return rpcRound("subcontainer.create-fs", options) as ReturnType<
|
||||||
|
T.Effects["subcontainer"]["createFs"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
destroyFs(options: { guid: string }): Promise<null> {
|
||||||
|
return rpcRound("subcontainer.destroy-fs", options) as ReturnType<
|
||||||
|
T.Effects["subcontainer"]["destroyFs"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
},
|
||||||
|
exportServiceInterface: ((
|
||||||
|
...[options]: Parameters<Effects["exportServiceInterface"]>
|
||||||
|
) => {
|
||||||
|
return rpcRound("export-service-interface", options) as ReturnType<
|
||||||
|
T.Effects["exportServiceInterface"]
|
||||||
|
>
|
||||||
|
}) as Effects["exportServiceInterface"],
|
||||||
|
getContainerIp(...[options]: Parameters<T.Effects["getContainerIp"]>) {
|
||||||
|
return rpcRound("get-container-ip", options) as ReturnType<
|
||||||
|
T.Effects["getContainerIp"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
getOsIp(...[]: Parameters<T.Effects["getOsIp"]>) {
|
||||||
|
return rpcRound("get-os-ip", {}) as ReturnType<T.Effects["getOsIp"]>
|
||||||
|
},
|
||||||
|
getHostInfo: ((...[allOptions]: Parameters<T.Effects["getHostInfo"]>) => {
|
||||||
|
const options = {
|
||||||
|
...allOptions,
|
||||||
|
callback: context.callbacks?.addCallback(allOptions.callback) || null,
|
||||||
|
}
|
||||||
|
return rpcRound("get-host-info", options) as ReturnType<
|
||||||
|
T.Effects["getHostInfo"]
|
||||||
|
> as any
|
||||||
|
}) as Effects["getHostInfo"],
|
||||||
|
getServiceInterface(
|
||||||
|
...[options]: Parameters<T.Effects["getServiceInterface"]>
|
||||||
|
) {
|
||||||
|
return rpcRound("get-service-interface", {
|
||||||
|
...options,
|
||||||
|
callback: context.callbacks?.addCallback(options.callback) || null,
|
||||||
|
}) as ReturnType<T.Effects["getServiceInterface"]>
|
||||||
|
},
|
||||||
|
|
||||||
|
getServicePortForward(
|
||||||
|
...[options]: Parameters<T.Effects["getServicePortForward"]>
|
||||||
|
) {
|
||||||
|
return rpcRound("get-service-port-forward", options) as ReturnType<
|
||||||
|
T.Effects["getServicePortForward"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
getSslCertificate(options: Parameters<T.Effects["getSslCertificate"]>[0]) {
|
||||||
|
return rpcRound("get-ssl-certificate", options) as ReturnType<
|
||||||
|
T.Effects["getSslCertificate"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
getSslKey(options: Parameters<T.Effects["getSslKey"]>[0]) {
|
||||||
|
return rpcRound("get-ssl-key", options) as ReturnType<
|
||||||
|
T.Effects["getSslKey"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
getSystemSmtp(...[options]: Parameters<T.Effects["getSystemSmtp"]>) {
|
||||||
|
return rpcRound("get-system-smtp", {
|
||||||
|
...options,
|
||||||
|
callback: context.callbacks?.addCallback(options.callback) || null,
|
||||||
|
}) as ReturnType<T.Effects["getSystemSmtp"]>
|
||||||
|
},
|
||||||
|
listServiceInterfaces(
|
||||||
|
...[options]: Parameters<T.Effects["listServiceInterfaces"]>
|
||||||
|
) {
|
||||||
|
return rpcRound("list-service-interfaces", {
|
||||||
|
...options,
|
||||||
|
callback: context.callbacks?.addCallback(options.callback) || null,
|
||||||
|
}) as ReturnType<T.Effects["listServiceInterfaces"]>
|
||||||
|
},
|
||||||
|
mount(...[options]: Parameters<T.Effects["mount"]>) {
|
||||||
|
return rpcRound("mount", options) as ReturnType<T.Effects["mount"]>
|
||||||
|
},
|
||||||
|
restart(...[]: Parameters<T.Effects["restart"]>) {
|
||||||
|
console.log("Restarting service...")
|
||||||
|
return rpcRound("restart", {}) as ReturnType<T.Effects["restart"]>
|
||||||
|
},
|
||||||
|
setDependencies(
|
||||||
|
dependencies: Parameters<T.Effects["setDependencies"]>[0],
|
||||||
|
): ReturnType<T.Effects["setDependencies"]> {
|
||||||
|
return rpcRound("set-dependencies", dependencies) as ReturnType<
|
||||||
|
T.Effects["setDependencies"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
checkDependencies(
|
||||||
|
options: Parameters<T.Effects["checkDependencies"]>[0],
|
||||||
|
): ReturnType<T.Effects["checkDependencies"]> {
|
||||||
|
return rpcRound("check-dependencies", options) as ReturnType<
|
||||||
|
T.Effects["checkDependencies"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
getDependencies(): ReturnType<T.Effects["getDependencies"]> {
|
||||||
|
return rpcRound("get-dependencies", {}) as ReturnType<
|
||||||
|
T.Effects["getDependencies"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
setHealth(...[options]: Parameters<T.Effects["setHealth"]>) {
|
||||||
|
return rpcRound("set-health", options) as ReturnType<
|
||||||
|
T.Effects["setHealth"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
|
||||||
|
getStatus(...[o]: Parameters<T.Effects["getStatus"]>) {
|
||||||
|
return rpcRound("get-status", o) as ReturnType<T.Effects["getStatus"]>
|
||||||
|
},
|
||||||
|
/// DEPRECATED
|
||||||
|
setMainStatus(o: { status: "running" | "stopped" }): Promise<null> {
|
||||||
|
return rpcRound("set-main-status", o) as ReturnType<
|
||||||
|
T.Effects["setHealth"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
|
||||||
|
shutdown(...[]: Parameters<T.Effects["shutdown"]>) {
|
||||||
|
return rpcRound("shutdown", {}) as ReturnType<T.Effects["shutdown"]>
|
||||||
|
},
|
||||||
|
getDataVersion() {
|
||||||
|
return rpcRound("get-data-version", {}) as ReturnType<
|
||||||
|
T.Effects["getDataVersion"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
setDataVersion(...[options]: Parameters<T.Effects["setDataVersion"]>) {
|
||||||
|
return rpcRound("set-data-version", options) as ReturnType<
|
||||||
|
T.Effects["setDataVersion"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if (context.callbacks?.onLeaveContext)
|
||||||
|
self.onLeaveContext(() => {
|
||||||
|
self.constRetry = undefined
|
||||||
|
self.isInContext = false
|
||||||
|
self.onLeaveContext = () => {
|
||||||
|
console.warn(
|
||||||
|
"this effects object is already out of context",
|
||||||
|
new Error().stack?.replace(/^Error/, ""),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
return self
|
||||||
|
}
|
||||||
473
container-runtime/src/Adapters/RpcListener.ts
Normal file
@@ -0,0 +1,473 @@
|
|||||||
|
// @ts-check
|
||||||
|
|
||||||
|
import * as net from "net"
|
||||||
|
import {
|
||||||
|
object,
|
||||||
|
some,
|
||||||
|
string,
|
||||||
|
literal,
|
||||||
|
array,
|
||||||
|
number,
|
||||||
|
matches,
|
||||||
|
any,
|
||||||
|
shape,
|
||||||
|
anyOf,
|
||||||
|
literals,
|
||||||
|
} from "ts-matches"
|
||||||
|
|
||||||
|
import {
|
||||||
|
ExtendedVersion,
|
||||||
|
types as T,
|
||||||
|
utils,
|
||||||
|
VersionRange,
|
||||||
|
} from "@start9labs/start-sdk"
|
||||||
|
import * as fs from "fs"
|
||||||
|
|
||||||
|
import { CallbackHolder } from "../Models/CallbackHolder"
|
||||||
|
import { AllGetDependencies } from "../Interfaces/AllGetDependencies"
|
||||||
|
import { jsonPath, unNestPath } from "../Models/JsonPath"
|
||||||
|
import { System } from "../Interfaces/System"
|
||||||
|
import { makeEffects } from "./EffectCreator"
|
||||||
|
type MaybePromise<T> = T | Promise<T>
|
||||||
|
export const matchRpcResult = anyOf(
|
||||||
|
object({ result: any }),
|
||||||
|
object({
|
||||||
|
error: object({
|
||||||
|
code: number,
|
||||||
|
message: string,
|
||||||
|
data: object({
|
||||||
|
details: string.optional(),
|
||||||
|
debug: any.optional(),
|
||||||
|
})
|
||||||
|
.nullable()
|
||||||
|
.optional(),
|
||||||
|
}),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
|
export type RpcResult = typeof matchRpcResult._TYPE
|
||||||
|
type SocketResponse = ({ jsonrpc: "2.0"; id: IdType } & RpcResult) | null
|
||||||
|
|
||||||
|
const SOCKET_PARENT = "/media/startos/rpc"
|
||||||
|
const SOCKET_PATH = "/media/startos/rpc/service.sock"
|
||||||
|
const jsonrpc = "2.0" as const
|
||||||
|
|
||||||
|
const isResult = object({ result: any }).test
|
||||||
|
|
||||||
|
const idType = some(string, number, literal(null))
|
||||||
|
type IdType = null | string | number | undefined
|
||||||
|
const runType = object({
|
||||||
|
id: idType.optional(),
|
||||||
|
method: literal("execute"),
|
||||||
|
params: object({
|
||||||
|
id: string,
|
||||||
|
procedure: string,
|
||||||
|
input: any,
|
||||||
|
timeout: number.nullable().optional(),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
const sandboxRunType = object({
|
||||||
|
id: idType.optional(),
|
||||||
|
method: literal("sandbox"),
|
||||||
|
params: object({
|
||||||
|
id: string,
|
||||||
|
procedure: string,
|
||||||
|
input: any,
|
||||||
|
timeout: number.nullable().optional(),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
const callbackType = object({
|
||||||
|
method: literal("callback"),
|
||||||
|
params: object({
|
||||||
|
id: number,
|
||||||
|
args: array,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
const initType = object({
|
||||||
|
id: idType.optional(),
|
||||||
|
method: literal("init"),
|
||||||
|
params: object({
|
||||||
|
id: string,
|
||||||
|
kind: literals("install", "update", "restore").nullable(),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
const startType = object({
|
||||||
|
id: idType.optional(),
|
||||||
|
method: literal("start"),
|
||||||
|
})
|
||||||
|
const stopType = object({
|
||||||
|
id: idType.optional(),
|
||||||
|
method: literal("stop"),
|
||||||
|
})
|
||||||
|
const exitType = object({
|
||||||
|
id: idType.optional(),
|
||||||
|
method: literal("exit"),
|
||||||
|
params: object({
|
||||||
|
id: string,
|
||||||
|
target: string.nullable(),
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
const evalType = object({
|
||||||
|
id: idType.optional(),
|
||||||
|
method: literal("eval"),
|
||||||
|
params: object({
|
||||||
|
script: string,
|
||||||
|
}),
|
||||||
|
})
|
||||||
|
|
||||||
|
const jsonParse = (x: string) => JSON.parse(x)
|
||||||
|
|
||||||
|
const handleRpc = (id: IdType, result: Promise<RpcResult>) =>
|
||||||
|
result
|
||||||
|
.then((result) => {
|
||||||
|
return {
|
||||||
|
jsonrpc,
|
||||||
|
id,
|
||||||
|
...result,
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.then((x) => {
|
||||||
|
if (
|
||||||
|
("result" in x && x.result === undefined) ||
|
||||||
|
!("error" in x || "result" in x)
|
||||||
|
)
|
||||||
|
(x as any).result = null
|
||||||
|
return x
|
||||||
|
})
|
||||||
|
.catch((error) => ({
|
||||||
|
jsonrpc,
|
||||||
|
id,
|
||||||
|
error: {
|
||||||
|
code: 0,
|
||||||
|
message: typeof error,
|
||||||
|
data: { details: "" + error, debug: error?.stack },
|
||||||
|
},
|
||||||
|
}))
|
||||||
|
|
||||||
|
const hasId = object({ id: idType }).test
|
||||||
|
export class RpcListener {
|
||||||
|
shouldExit = false
|
||||||
|
unixSocketServer = net.createServer(async (server) => {})
|
||||||
|
private _system: System | undefined
|
||||||
|
private callbacks: CallbackHolder | undefined
|
||||||
|
|
||||||
|
constructor(readonly getDependencies: AllGetDependencies) {
|
||||||
|
if (!fs.existsSync(SOCKET_PARENT)) {
|
||||||
|
fs.mkdirSync(SOCKET_PARENT, { recursive: true })
|
||||||
|
}
|
||||||
|
if (fs.existsSync(SOCKET_PATH)) fs.rmSync(SOCKET_PATH, { force: true })
|
||||||
|
|
||||||
|
this.unixSocketServer.listen(SOCKET_PATH)
|
||||||
|
|
||||||
|
console.log("Listening on %s", SOCKET_PATH)
|
||||||
|
|
||||||
|
this.unixSocketServer.on("connection", (s) => {
|
||||||
|
let id: IdType = null
|
||||||
|
const captureId = <X>(x: X) => {
|
||||||
|
if (hasId(x)) id = x.id
|
||||||
|
return x
|
||||||
|
}
|
||||||
|
const logData =
|
||||||
|
(location: string) =>
|
||||||
|
<X>(x: X) => {
|
||||||
|
console.log({
|
||||||
|
location,
|
||||||
|
stringified: JSON.stringify(x),
|
||||||
|
type: typeof x,
|
||||||
|
id,
|
||||||
|
})
|
||||||
|
return x
|
||||||
|
}
|
||||||
|
const mapError = (error: any): SocketResponse => ({
|
||||||
|
jsonrpc,
|
||||||
|
id,
|
||||||
|
error: {
|
||||||
|
message: typeof error,
|
||||||
|
data: {
|
||||||
|
details: error?.message ?? String(error),
|
||||||
|
debug: error?.stack,
|
||||||
|
},
|
||||||
|
code: 1,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
const writeDataToSocket = (x: SocketResponse) => {
|
||||||
|
if (x != null) {
|
||||||
|
return new Promise((resolve) =>
|
||||||
|
s.write(JSON.stringify(x) + "\n", resolve),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
s.on("data", (a) =>
|
||||||
|
Promise.resolve(a)
|
||||||
|
.then((b) => b.toString())
|
||||||
|
.then((buf) => {
|
||||||
|
for (let s of buf.split("\n")) {
|
||||||
|
if (s)
|
||||||
|
Promise.resolve(s)
|
||||||
|
.then(logData("dataIn"))
|
||||||
|
.then(jsonParse)
|
||||||
|
.then(captureId)
|
||||||
|
.then((x) => this.dealWithInput(x))
|
||||||
|
.catch(mapError)
|
||||||
|
.then(logData("response"))
|
||||||
|
.then(writeDataToSocket)
|
||||||
|
.then((_) => {
|
||||||
|
if (this.shouldExit) {
|
||||||
|
process.exit(0)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.catch((e) => {
|
||||||
|
console.error(`Major error in socket handling: ${e}`)
|
||||||
|
console.debug(`Data in: ${a.toString()}`)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
private get system() {
|
||||||
|
if (!this._system) throw new Error("System not initialized")
|
||||||
|
return this._system
|
||||||
|
}
|
||||||
|
|
||||||
|
callCallback(callback: number, args: any[]): void {
|
||||||
|
if (this.callbacks) {
|
||||||
|
this.callbacks
|
||||||
|
.callCallback(callback, args)
|
||||||
|
.catch((error) =>
|
||||||
|
console.error(`callback ${callback} failed`, utils.asError(error)),
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
console.warn(
|
||||||
|
`callback ${callback} ignored because system is not initialized`,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private dealWithInput(input: unknown): MaybePromise<SocketResponse> {
|
||||||
|
return matches(input)
|
||||||
|
.when(runType, async ({ id, params }) => {
|
||||||
|
const system = this.system
|
||||||
|
const procedure = jsonPath.unsafeCast(params.procedure)
|
||||||
|
const { input, timeout, id: eventId } = params
|
||||||
|
const result = this.getResult(
|
||||||
|
procedure,
|
||||||
|
system,
|
||||||
|
eventId,
|
||||||
|
timeout,
|
||||||
|
input,
|
||||||
|
)
|
||||||
|
|
||||||
|
return handleRpc(id, result)
|
||||||
|
})
|
||||||
|
.when(sandboxRunType, async ({ id, params }) => {
|
||||||
|
const system = this.system
|
||||||
|
const procedure = jsonPath.unsafeCast(params.procedure)
|
||||||
|
const { input, timeout, id: eventId } = params
|
||||||
|
const result = this.getResult(
|
||||||
|
procedure,
|
||||||
|
system,
|
||||||
|
eventId,
|
||||||
|
timeout,
|
||||||
|
input,
|
||||||
|
)
|
||||||
|
|
||||||
|
return handleRpc(id, result)
|
||||||
|
})
|
||||||
|
.when(callbackType, async ({ params: { id, args } }) => {
|
||||||
|
this.callCallback(id, args)
|
||||||
|
return null
|
||||||
|
})
|
||||||
|
.when(startType, async ({ id }) => {
|
||||||
|
const callbacks =
|
||||||
|
this.callbacks?.getChild("main") || this.callbacks?.child("main")
|
||||||
|
const effects = makeEffects({
|
||||||
|
eventId: null,
|
||||||
|
callbacks,
|
||||||
|
})
|
||||||
|
return handleRpc(
|
||||||
|
id,
|
||||||
|
this.system.start(effects).then((result) => ({ result })),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.when(stopType, async ({ id }) => {
|
||||||
|
return handleRpc(
|
||||||
|
id,
|
||||||
|
this.system.stop().then((result) => {
|
||||||
|
this.callbacks?.removeChild("main")
|
||||||
|
|
||||||
|
return { result }
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.when(exitType, async ({ id, params }) => {
|
||||||
|
return handleRpc(
|
||||||
|
id,
|
||||||
|
(async () => {
|
||||||
|
if (this._system) {
|
||||||
|
let target = null
|
||||||
|
if (params.target)
|
||||||
|
try {
|
||||||
|
target = ExtendedVersion.parse(params.target)
|
||||||
|
} catch (_) {
|
||||||
|
target = VersionRange.parse(params.target).normalize()
|
||||||
|
}
|
||||||
|
await this._system.exit(
|
||||||
|
makeEffects({
|
||||||
|
eventId: params.id,
|
||||||
|
}),
|
||||||
|
target,
|
||||||
|
)
|
||||||
|
this.shouldExit = true
|
||||||
|
}
|
||||||
|
})().then((result) => ({ result })),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.when(initType, async ({ id, params }) => {
|
||||||
|
return handleRpc(
|
||||||
|
id,
|
||||||
|
(async () => {
|
||||||
|
if (!this._system) {
|
||||||
|
const system = await this.getDependencies.system()
|
||||||
|
this.callbacks = new CallbackHolder(
|
||||||
|
makeEffects({
|
||||||
|
eventId: params.id,
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
const callbacks = this.callbacks.child("init")
|
||||||
|
console.error("Initializing...")
|
||||||
|
await system.init(
|
||||||
|
makeEffects({
|
||||||
|
eventId: params.id,
|
||||||
|
callbacks,
|
||||||
|
}),
|
||||||
|
params.kind,
|
||||||
|
)
|
||||||
|
console.error("Initialization complete.")
|
||||||
|
this._system = system
|
||||||
|
}
|
||||||
|
})().then((result) => ({ result })),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.when(evalType, async ({ id, params }) => {
|
||||||
|
return handleRpc(
|
||||||
|
id,
|
||||||
|
(async () => {
|
||||||
|
const result = await new Function(
|
||||||
|
`return (async () => { return (${params.script}) }).call(this)`,
|
||||||
|
).call({
|
||||||
|
listener: this,
|
||||||
|
require: require,
|
||||||
|
})
|
||||||
|
return {
|
||||||
|
jsonrpc,
|
||||||
|
id,
|
||||||
|
result: ![
|
||||||
|
"string",
|
||||||
|
"number",
|
||||||
|
"boolean",
|
||||||
|
"null",
|
||||||
|
"object",
|
||||||
|
].includes(typeof result)
|
||||||
|
? null
|
||||||
|
: result,
|
||||||
|
}
|
||||||
|
})(),
|
||||||
|
)
|
||||||
|
})
|
||||||
|
.when(
|
||||||
|
shape({ id: idType.optional(), method: string }),
|
||||||
|
({ id, method }) => ({
|
||||||
|
jsonrpc,
|
||||||
|
id,
|
||||||
|
error: {
|
||||||
|
code: -32601,
|
||||||
|
message: `Method not found`,
|
||||||
|
data: {
|
||||||
|
details: method,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
|
||||||
|
.defaultToLazy(() => {
|
||||||
|
console.warn(
|
||||||
|
`Couldn't parse the following input ${JSON.stringify(input)}`,
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
jsonrpc,
|
||||||
|
id: (input as any)?.id,
|
||||||
|
error: {
|
||||||
|
code: -32602,
|
||||||
|
message: "invalid params",
|
||||||
|
data: {
|
||||||
|
details: JSON.stringify(input),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
private getResult(
|
||||||
|
procedure: typeof jsonPath._TYPE,
|
||||||
|
system: System,
|
||||||
|
eventId: string,
|
||||||
|
timeout: number | null | undefined,
|
||||||
|
input: any,
|
||||||
|
) {
|
||||||
|
const ensureResultTypeShape = (
|
||||||
|
result: void | T.ActionInput | T.ActionResult | null,
|
||||||
|
): { result: any } => {
|
||||||
|
return { result }
|
||||||
|
}
|
||||||
|
const callbacks = this.callbacks?.child(procedure)
|
||||||
|
const effects = makeEffects({
|
||||||
|
eventId,
|
||||||
|
callbacks,
|
||||||
|
})
|
||||||
|
|
||||||
|
return (async () => {
|
||||||
|
switch (procedure) {
|
||||||
|
case "/backup/create":
|
||||||
|
return system.createBackup(effects, timeout || null)
|
||||||
|
default:
|
||||||
|
const procedures = unNestPath(procedure)
|
||||||
|
switch (true) {
|
||||||
|
case procedures[1] === "actions" && procedures[3] === "getInput":
|
||||||
|
return system.getActionInput(
|
||||||
|
effects,
|
||||||
|
procedures[2],
|
||||||
|
timeout || null,
|
||||||
|
)
|
||||||
|
case procedures[1] === "actions" && procedures[3] === "run":
|
||||||
|
return system.runAction(
|
||||||
|
effects,
|
||||||
|
procedures[2],
|
||||||
|
input.input,
|
||||||
|
timeout || null,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})().then(ensureResultTypeShape, (error) =>
|
||||||
|
matches(error)
|
||||||
|
.when(
|
||||||
|
object({
|
||||||
|
error: string,
|
||||||
|
code: number.defaultTo(0),
|
||||||
|
}),
|
||||||
|
(error) => ({
|
||||||
|
error: {
|
||||||
|
code: error.code,
|
||||||
|
message: error.error,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
.defaultToLazy(() => ({
|
||||||
|
error: {
|
||||||
|
code: 0,
|
||||||
|
message: String(error),
|
||||||
|
},
|
||||||
|
})),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -0,0 +1,178 @@
|
|||||||
|
import * as fs from "fs/promises"
|
||||||
|
import * as cp from "child_process"
|
||||||
|
import { SubContainer, types as T } from "@start9labs/start-sdk"
|
||||||
|
import { promisify } from "util"
|
||||||
|
import { DockerProcedure, VolumeId } from "../../../Models/DockerProcedure"
|
||||||
|
import { Volume } from "./matchVolume"
|
||||||
|
import {
|
||||||
|
CommandOptions,
|
||||||
|
ExecOptions,
|
||||||
|
SubContainerOwned,
|
||||||
|
} from "@start9labs/start-sdk/package/lib/util/SubContainer"
|
||||||
|
import { Mounts } from "@start9labs/start-sdk/package/lib/mainFn/Mounts"
|
||||||
|
import { Manifest } from "@start9labs/start-sdk/base/lib/osBindings"
|
||||||
|
import { BackupEffects } from "@start9labs/start-sdk/package/lib/backup/Backups"
|
||||||
|
import { Drop } from "@start9labs/start-sdk/package/lib/util"
|
||||||
|
import { SDKManifest } from "@start9labs/start-sdk/base/lib/types"
|
||||||
|
export const exec = promisify(cp.exec)
|
||||||
|
export const execFile = promisify(cp.execFile)
|
||||||
|
|
||||||
|
export class DockerProcedureContainer extends Drop {
|
||||||
|
private constructor(
|
||||||
|
private readonly subcontainer: SubContainer<SDKManifest>,
|
||||||
|
) {
|
||||||
|
super()
|
||||||
|
}
|
||||||
|
|
||||||
|
static async of(
|
||||||
|
effects: T.Effects,
|
||||||
|
packageId: string,
|
||||||
|
data: DockerProcedure,
|
||||||
|
volumes: { [id: VolumeId]: Volume },
|
||||||
|
name: string,
|
||||||
|
options: { subcontainer?: SubContainer<SDKManifest> } = {},
|
||||||
|
) {
|
||||||
|
const subcontainer =
|
||||||
|
options?.subcontainer ??
|
||||||
|
(await DockerProcedureContainer.createSubContainer(
|
||||||
|
effects,
|
||||||
|
packageId,
|
||||||
|
data,
|
||||||
|
volumes,
|
||||||
|
name,
|
||||||
|
))
|
||||||
|
return new DockerProcedureContainer(subcontainer)
|
||||||
|
}
|
||||||
|
static async createSubContainer(
|
||||||
|
effects: T.Effects,
|
||||||
|
packageId: string,
|
||||||
|
data: DockerProcedure,
|
||||||
|
volumes: { [id: VolumeId]: Volume },
|
||||||
|
name: string,
|
||||||
|
) {
|
||||||
|
const subcontainer = await SubContainerOwned.of(
|
||||||
|
effects as BackupEffects,
|
||||||
|
{ imageId: data.image },
|
||||||
|
null,
|
||||||
|
name,
|
||||||
|
)
|
||||||
|
|
||||||
|
if (data.mounts) {
|
||||||
|
const mounts = data.mounts
|
||||||
|
for (const mount in mounts) {
|
||||||
|
const path = mounts[mount].startsWith("/")
|
||||||
|
? `${subcontainer.rootfs}${mounts[mount]}`
|
||||||
|
: `${subcontainer.rootfs}/${mounts[mount]}`
|
||||||
|
await fs.mkdir(path, { recursive: true })
|
||||||
|
const volumeMount = volumes[mount]
|
||||||
|
if (volumeMount.type === "data") {
|
||||||
|
await subcontainer.mount(
|
||||||
|
Mounts.of().mountVolume({
|
||||||
|
volumeId: mount,
|
||||||
|
subpath: null,
|
||||||
|
mountpoint: mounts[mount],
|
||||||
|
readonly: false,
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
} else if (volumeMount.type === "assets") {
|
||||||
|
await subcontainer.mount(
|
||||||
|
Mounts.of().mountAssets({
|
||||||
|
subpath: mount,
|
||||||
|
mountpoint: mounts[mount],
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
} else if (volumeMount.type === "certificate") {
|
||||||
|
const hostnames = [
|
||||||
|
`${packageId}.embassy`,
|
||||||
|
...new Set(
|
||||||
|
Object.values(
|
||||||
|
(
|
||||||
|
await effects.getHostInfo({
|
||||||
|
hostId: volumeMount["interface-id"],
|
||||||
|
})
|
||||||
|
)?.hostnameInfo || {},
|
||||||
|
)
|
||||||
|
.flatMap((h) => h)
|
||||||
|
.flatMap((h) => (h.kind === "onion" ? [h.hostname.value] : [])),
|
||||||
|
).values(),
|
||||||
|
]
|
||||||
|
const certChain = await effects.getSslCertificate({
|
||||||
|
hostnames,
|
||||||
|
})
|
||||||
|
const key = await effects.getSslKey({
|
||||||
|
hostnames,
|
||||||
|
})
|
||||||
|
await fs.writeFile(
|
||||||
|
`${path}/${volumeMount["interface-id"]}.cert.pem`,
|
||||||
|
certChain.join("\n"),
|
||||||
|
)
|
||||||
|
await fs.writeFile(
|
||||||
|
`${path}/${volumeMount["interface-id"]}.key.pem`,
|
||||||
|
key,
|
||||||
|
)
|
||||||
|
} else if (volumeMount.type === "pointer") {
|
||||||
|
await effects.mount({
|
||||||
|
location: path,
|
||||||
|
target: {
|
||||||
|
packageId: volumeMount["package-id"],
|
||||||
|
subpath: volumeMount.path,
|
||||||
|
readonly: volumeMount.readonly,
|
||||||
|
volumeId: volumeMount["volume-id"],
|
||||||
|
idmap: [],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} else if (volumeMount.type === "backup") {
|
||||||
|
await subcontainer.mount(
|
||||||
|
Mounts.of().mountBackups({
|
||||||
|
subpath: null,
|
||||||
|
mountpoint: mounts[mount],
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return subcontainer
|
||||||
|
}
|
||||||
|
|
||||||
|
async exec(
|
||||||
|
commands: string[],
|
||||||
|
options?: CommandOptions & ExecOptions,
|
||||||
|
timeoutMs?: number | null,
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
return await this.subcontainer.exec(commands, options, timeoutMs)
|
||||||
|
} finally {
|
||||||
|
await this.subcontainer.destroy?.()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async execFail(
|
||||||
|
commands: string[],
|
||||||
|
timeoutMs: number | null,
|
||||||
|
options?: CommandOptions & ExecOptions,
|
||||||
|
) {
|
||||||
|
try {
|
||||||
|
const res = await this.subcontainer.exec(commands, options, timeoutMs)
|
||||||
|
if (res.exitCode !== 0) {
|
||||||
|
const codeOrSignal =
|
||||||
|
res.exitCode !== null
|
||||||
|
? `code ${res.exitCode}`
|
||||||
|
: `signal ${res.exitSignal}`
|
||||||
|
throw new Error(
|
||||||
|
`Process exited with ${codeOrSignal}: ${res.stderr.toString()}`,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
return res
|
||||||
|
} finally {
|
||||||
|
await this.subcontainer.destroy?.()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// async spawn(commands: string[]): Promise<cp.ChildProcess> {
|
||||||
|
// return await this.subcontainer.spawn(commands)
|
||||||
|
// }
|
||||||
|
|
||||||
|
onDrop(): void {
|
||||||
|
this.subcontainer.destroy?.()
|
||||||
|
}
|
||||||
|
}
|
||||||