mirror of
https://github.com/Start9Labs/start-os.git
synced 2026-03-26 18:31:52 +00:00
Compare commits
431 Commits
next/minor
...
fix/servic
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d601c19646 | ||
|
|
58e0b166cb | ||
|
|
2a678bb017 | ||
|
|
5664456b77 | ||
|
|
3685b7e57e | ||
|
|
989d5f73b1 | ||
|
|
4f84073cb5 | ||
|
|
c190295c34 | ||
|
|
60875644a1 | ||
|
|
113b09ad01 | ||
|
|
2605d0e671 | ||
|
|
d232b91d31 | ||
|
|
c65db31fd9 | ||
|
|
99871805bd | ||
|
|
e8ef39adad | ||
|
|
466b9217b5 | ||
|
|
c9a7f519b9 | ||
|
|
96ae532879 | ||
|
|
eda08d5b0f | ||
|
|
7c12b58bb5 | ||
|
|
5446c89bc0 | ||
|
|
2d0251e585 | ||
|
|
f41710c892 | ||
|
|
df3f79f282 | ||
|
|
f8df692865 | ||
|
|
0c6d3b188d | ||
|
|
e7a38863ab | ||
|
|
720e0fcdab | ||
|
|
bf8ff84522 | ||
|
|
5a9510238e | ||
|
|
7b3c74179b | ||
|
|
cd70fa4c32 | ||
|
|
83133ced6a | ||
|
|
6c5179a179 | ||
|
|
e33ab39b85 | ||
|
|
9567bcec1b | ||
|
|
550b16dc0b | ||
|
|
5d8331b7f7 | ||
|
|
e35b643e51 | ||
|
|
bc6a92677b | ||
|
|
f52072e6ec | ||
|
|
9c43c43a46 | ||
|
|
0430e0f930 | ||
|
|
b945243d1a | ||
|
|
d8484a8b26 | ||
|
|
3c27499795 | ||
|
|
7c772e873d | ||
|
|
db2fab245e | ||
|
|
a9c9917f1a | ||
|
|
23e2e9e9cc | ||
|
|
2369e92460 | ||
|
|
a53b15f2a3 | ||
|
|
72eb8b1eb6 | ||
|
|
4db54f3b83 | ||
|
|
24eb27f005 | ||
|
|
009d76ea35 | ||
|
|
6e8a425eb1 | ||
|
|
66188d791b | ||
|
|
015ff02d71 | ||
|
|
10bfaf5415 | ||
|
|
e3e0b85e0c | ||
|
|
ad0632892e | ||
|
|
f26791ba39 | ||
|
|
2fbaaebf44 | ||
|
|
edb916338c | ||
|
|
f7e947d37d | ||
|
|
a9e3d1ed75 | ||
|
|
ce97827c42 | ||
|
|
3efec07338 | ||
|
|
68f401bfa3 | ||
|
|
1ea525feaa | ||
|
|
57c4a7527e | ||
|
|
5aa9c045e1 | ||
|
|
6f1900f3bb | ||
|
|
bc62de795e | ||
|
|
c62ca4b183 | ||
|
|
876e5bc683 | ||
|
|
b99f3b73cd | ||
|
|
7eecf29449 | ||
|
|
1d331d7810 | ||
|
|
68414678d8 | ||
|
|
2f6b9dac26 | ||
|
|
d1812d875b | ||
|
|
723dea100f | ||
|
|
c4419ed31f | ||
|
|
754ab86e51 | ||
|
|
04dab532cd | ||
|
|
add01ebc68 | ||
|
|
1cc9a1a30b | ||
|
|
92a1de7500 | ||
|
|
a6fedcff80 | ||
|
|
55eb999305 | ||
|
|
377b7b12ce | ||
|
|
ba2906a42e | ||
|
|
ee27f14be0 | ||
|
|
46c8be63a7 | ||
|
|
7ba66c419a | ||
|
|
340775a593 | ||
|
|
35d2ec8a44 | ||
|
|
2983b9950f | ||
|
|
dbf08a6cf8 | ||
|
|
28f31be36f | ||
|
|
3ec4db0225 | ||
|
|
f5688e077a | ||
|
|
2464d255d5 | ||
|
|
586d950b8c | ||
|
|
e7469388cc | ||
|
|
ab6ca8e16a | ||
|
|
02413a4fac | ||
|
|
05b8dd9ad8 | ||
|
|
29c9419a6e | ||
|
|
90e61989a4 | ||
|
|
b1f9f90fec | ||
|
|
b40849f672 | ||
|
|
44560c8da8 | ||
|
|
46fd01c264 | ||
|
|
100695c262 | ||
|
|
54b5a4ae55 | ||
|
|
ffb252962b | ||
|
|
ae31270e63 | ||
|
|
9b2b54d585 | ||
|
|
e1ccc583a3 | ||
|
|
7750e33f82 | ||
|
|
d2c4741f0b | ||
|
|
c79c4f6bde | ||
|
|
3849d0d1a9 | ||
|
|
8bd71ccd5e | ||
|
|
b731f7fb64 | ||
|
|
cd554f77f3 | ||
|
|
8c977c51ca | ||
|
|
a3252f9671 | ||
|
|
9bc945f76f | ||
|
|
f6b4dfffb6 | ||
|
|
68955c29cb | ||
|
|
97e4d036dc | ||
|
|
0f49f54c29 | ||
|
|
828e13adbb | ||
|
|
e6f0067728 | ||
|
|
5c473eb9cc | ||
|
|
2adf34fbaf | ||
|
|
05dd760388 | ||
|
|
2cf4864078 | ||
|
|
df4c92672f | ||
|
|
5b173315f9 | ||
|
|
c85ea7d8fa | ||
|
|
113154702f | ||
|
|
33ae46f76a | ||
|
|
27272680a2 | ||
|
|
b1621f6b34 | ||
|
|
2c65033c0a | ||
|
|
dcfbaa9243 | ||
|
|
accef65ede | ||
|
|
50755d8ba3 | ||
|
|
47b6509f70 | ||
|
|
89f3fdc05f | ||
|
|
03f8b73627 | ||
|
|
2e6e9635c3 | ||
|
|
6a312e3fdd | ||
|
|
0e8961efe3 | ||
|
|
fc2be42418 | ||
|
|
ab4336cfd7 | ||
|
|
63a29d3a4a | ||
|
|
31856d9895 | ||
|
|
f51dcf23d6 | ||
|
|
1883c9666e | ||
|
|
4b4cf76641 | ||
|
|
495bbecc01 | ||
|
|
e6af7e9885 | ||
|
|
182b8c2283 | ||
|
|
5318cccc5f | ||
|
|
99739575d4 | ||
|
|
6f9069a4fb | ||
|
|
a18ab7f1e9 | ||
|
|
be0371fb11 | ||
|
|
fa3329abf2 | ||
|
|
e830fade06 | ||
|
|
ac392dcb96 | ||
|
|
00a5fdf491 | ||
|
|
7fff9579c0 | ||
|
|
1b006599cf | ||
|
|
ce2842d365 | ||
|
|
7d1096dbd8 | ||
|
|
95722802dc | ||
|
|
95cad7bdd9 | ||
|
|
b2b98643d8 | ||
|
|
bb8109f67d | ||
|
|
e6f02bf8f7 | ||
|
|
57e75e3614 | ||
|
|
89ab67e067 | ||
|
|
115c599fd8 | ||
|
|
3121c08ee8 | ||
|
|
a5bac39196 | ||
|
|
9f640b24b3 | ||
|
|
75e7556bfa | ||
|
|
beb3a9f60a | ||
|
|
dfda2f7d5d | ||
|
|
a77ebd3b55 | ||
|
|
00114287e5 | ||
|
|
a9569d0ed9 | ||
|
|
88d9388be2 | ||
|
|
93c72ecea5 | ||
|
|
b5b0ac50bd | ||
|
|
4d2afdb1a9 | ||
|
|
39a177bd70 | ||
|
|
34fb6ac837 | ||
|
|
f868a454d9 | ||
|
|
751ceab04e | ||
|
|
b6c48d0f98 | ||
|
|
097d77f7b3 | ||
|
|
7a0586684b | ||
|
|
8f34d1c555 | ||
|
|
5270a6781f | ||
|
|
fa93e195cb | ||
|
|
befa9eb16d | ||
|
|
a278c630bb | ||
|
|
76eb0f1775 | ||
|
|
0abe08f243 | ||
|
|
015131f198 | ||
|
|
a730543c76 | ||
|
|
b43ad93c54 | ||
|
|
7850681ce1 | ||
|
|
846189b15b | ||
|
|
657aac0d68 | ||
|
|
81932c8cff | ||
|
|
20f6a5e797 | ||
|
|
949f1c648a | ||
|
|
d159dde2ca | ||
|
|
729a510c5b | ||
|
|
fffc7f4098 | ||
|
|
c7a2e7ada1 | ||
|
|
a2b1968d6e | ||
|
|
398eb13a7f | ||
|
|
956c8a8e03 | ||
|
|
6aba166c82 | ||
|
|
fd7c7ea6b7 | ||
|
|
d85e621bb3 | ||
|
|
25801f374c | ||
|
|
8fd2d0b35c | ||
|
|
dd196c0e11 | ||
|
|
6e2cf8bb3f | ||
|
|
b8eb8a90a5 | ||
|
|
bd4d89fc21 | ||
|
|
6234391229 | ||
|
|
206c185a3b | ||
|
|
7689cbbe0d | ||
|
|
b57a9351b3 | ||
|
|
f0ae9e21ae | ||
|
|
9510c92288 | ||
|
|
755f3f05d8 | ||
|
|
5d8114b475 | ||
|
|
85b39ecf99 | ||
|
|
230838c22b | ||
|
|
a7bfcdcb01 | ||
|
|
47ff630c55 | ||
|
|
70dc53bda7 | ||
|
|
7e1b433c17 | ||
|
|
ec878defab | ||
|
|
1786b70e14 | ||
|
|
7f525fa7dc | ||
|
|
8b89e03999 | ||
|
|
2693b9a42d | ||
|
|
6b336b7b2f | ||
|
|
3c0e77241d | ||
|
|
87461c7f72 | ||
|
|
a67f2b4976 | ||
|
|
8594781780 | ||
|
|
b2c8907635 | ||
|
|
05f4df1a30 | ||
|
|
35fe06a892 | ||
|
|
cd933ce6e4 | ||
|
|
0b93988450 | ||
|
|
12a323f691 | ||
|
|
9c4c211233 | ||
|
|
74ba68ff2c | ||
|
|
7273b37c16 | ||
|
|
0d4ebffc0e | ||
|
|
352b2fb4e7 | ||
|
|
6e6ef57303 | ||
|
|
b80e41503f | ||
|
|
7f28fc17ca | ||
|
|
70d4a0c022 | ||
|
|
8cfd994170 | ||
|
|
641e829e3f | ||
|
|
d202cb731d | ||
|
|
4ab7300376 | ||
|
|
18cc5e0ee8 | ||
|
|
af0cda5dbf | ||
|
|
a730a3719b | ||
|
|
3b669193f6 | ||
|
|
22cd2e3337 | ||
|
|
7e9d453a2c | ||
|
|
a4338b0d03 | ||
|
|
2021431e2f | ||
|
|
5e6a7e134f | ||
|
|
f4fadd366e | ||
|
|
a5b1b4e103 | ||
|
|
7b41b295b7 | ||
|
|
69d5f521a5 | ||
|
|
c0a55142b5 | ||
|
|
513fb3428a | ||
|
|
9a0ae549f6 | ||
|
|
4410d7f195 | ||
|
|
92aa70182d | ||
|
|
90f5864f1e | ||
|
|
e47f126bd5 | ||
|
|
ea6f70e3c5 | ||
|
|
0469aab433 | ||
|
|
ad13b5eb4e | ||
|
|
7324a4973f | ||
|
|
8bc93d23b2 | ||
|
|
c708b685e1 | ||
|
|
cbde91744f | ||
|
|
147e24204b | ||
|
|
13c50e428f | ||
|
|
8403ccd3da | ||
|
|
e92bd61545 | ||
|
|
8215e0221a | ||
|
|
4b44d6fb83 | ||
|
|
0ae3e83ce4 | ||
|
|
f4b573379d | ||
|
|
862ca375ee | ||
|
|
530de6741b | ||
|
|
35c1ff9014 | ||
|
|
3f4caed922 | ||
|
|
09303ab2fb | ||
|
|
df1ac8e1e2 | ||
|
|
7a55c91349 | ||
|
|
c491dfdd3a | ||
|
|
d9cc21f761 | ||
|
|
06207145af | ||
|
|
b195e3435f | ||
|
|
34b4577c0b | ||
|
|
8034e5bbcb | ||
|
|
df7a30bd14 | ||
|
|
d9dfacaaf4 | ||
|
|
d43767b945 | ||
|
|
cb36754c46 | ||
|
|
7e18aafe20 | ||
|
|
f7b079b1b4 | ||
|
|
72ffedead7 | ||
|
|
cf3a501562 | ||
|
|
7becdc3034 | ||
|
|
f0d599781d | ||
|
|
3386105048 | ||
|
|
3b8fb70db1 | ||
|
|
c3ae146580 | ||
|
|
0d079f0d89 | ||
|
|
9f5a90ee9c | ||
|
|
a5307fd8cc | ||
|
|
180589144a | ||
|
|
d9c1867bd7 | ||
|
|
da37d649ec | ||
|
|
4204b4af90 | ||
|
|
941650f668 | ||
|
|
9c0c6c1bd6 | ||
|
|
bd0ddafcd0 | ||
|
|
19f5e92a74 | ||
|
|
3202c38061 | ||
|
|
e35a8c942b | ||
|
|
31811eb91e | ||
|
|
b9316a4112 | ||
|
|
b7abd878ac | ||
|
|
38c2c47789 | ||
|
|
c03778ec8b | ||
|
|
29b0850a94 | ||
|
|
712fde46eb | ||
|
|
c2e79ca5a7 | ||
|
|
c3a52b3989 | ||
|
|
7213d82f1b | ||
|
|
5bcad69cf7 | ||
|
|
c9a487fa4d | ||
|
|
3804a46f3b | ||
|
|
52c0bb5302 | ||
|
|
8aa19e6420 | ||
|
|
4d1c7a3884 | ||
|
|
25f2c057b7 | ||
|
|
010be05920 | ||
|
|
4c465850a2 | ||
|
|
8313dfaeb9 | ||
|
|
873f2b2814 | ||
|
|
e53c90f8f0 | ||
|
|
9499ea8ca9 | ||
|
|
f6c09109ba | ||
|
|
273b5768c4 | ||
|
|
ee13cf7dd9 | ||
|
|
fecbae761e | ||
|
|
e0ee89bdd9 | ||
|
|
833c1f22a3 | ||
|
|
6fed6c8d30 | ||
|
|
94cdaf5314 | ||
|
|
f83ae27352 | ||
|
|
6badf047c3 | ||
|
|
47de9ad15f | ||
|
|
09b91cc663 | ||
|
|
ded16549f7 | ||
|
|
c89e47577b | ||
|
|
bb50beb7ab | ||
|
|
e4cd4d64d7 | ||
|
|
5675fc51a0 | ||
|
|
c7438c4aff | ||
|
|
4a6a3da36c | ||
|
|
a657c332b1 | ||
|
|
cc9cd3fc14 | ||
|
|
234258a077 | ||
|
|
13cda80ee6 | ||
|
|
f6e142baf5 | ||
|
|
ddf1f9bcd5 | ||
|
|
aa950669f6 | ||
|
|
dacd5d3e6b | ||
|
|
e76ccba2f7 | ||
|
|
3933819d53 | ||
|
|
99019c2b1f | ||
|
|
4bf5eb398b | ||
|
|
dbfbac62c0 | ||
|
|
7685293da4 | ||
|
|
ee9c328606 | ||
|
|
cb7790ccba | ||
|
|
6556fcc531 | ||
|
|
178391e7b2 | ||
|
|
18922a1c6d | ||
|
|
5e9e26fa67 | ||
|
|
f5430f9151 | ||
|
|
4dfdf2f92f | ||
|
|
e4d283cc99 | ||
|
|
8ee64d22b3 | ||
|
|
10e3e80042 | ||
|
|
f77a208e2c | ||
|
|
9366dbb96e | ||
|
|
550b17552b | ||
|
|
bec307d0e9 | ||
|
|
93c751f6eb |
81
.github/actions/setup-build/action.yml
vendored
Normal file
81
.github/actions/setup-build/action.yml
vendored
Normal file
@@ -0,0 +1,81 @@
|
||||
name: Setup Build Environment
|
||||
description: Common build environment setup steps
|
||||
|
||||
inputs:
|
||||
nodejs-version:
|
||||
description: Node.js version
|
||||
required: true
|
||||
setup-python:
|
||||
description: Set up Python
|
||||
required: false
|
||||
default: "false"
|
||||
setup-docker:
|
||||
description: Set up Docker QEMU and Buildx
|
||||
required: false
|
||||
default: "true"
|
||||
setup-sccache:
|
||||
description: Configure sccache for GitHub Actions
|
||||
required: false
|
||||
default: "true"
|
||||
free-space:
|
||||
description: Remove unnecessary packages to free disk space
|
||||
required: false
|
||||
default: "true"
|
||||
|
||||
runs:
|
||||
using: composite
|
||||
steps:
|
||||
- name: Free disk space
|
||||
if: inputs.free-space == 'true'
|
||||
shell: bash
|
||||
run: |
|
||||
sudo apt-get remove --purge -y azure-cli || true
|
||||
sudo apt-get remove --purge -y firefox || true
|
||||
sudo apt-get remove --purge -y ghc-* || true
|
||||
sudo apt-get remove --purge -y google-cloud-sdk || true
|
||||
sudo apt-get remove --purge -y google-chrome-stable || true
|
||||
sudo apt-get remove --purge -y powershell || true
|
||||
sudo apt-get remove --purge -y php* || true
|
||||
sudo apt-get remove --purge -y ruby* || true
|
||||
sudo apt-get remove --purge -y mono-* || true
|
||||
sudo apt-get autoremove -y
|
||||
sudo apt-get clean
|
||||
sudo rm -rf /usr/lib/jvm
|
||||
sudo rm -rf /usr/local/.ghcup
|
||||
sudo rm -rf /usr/local/lib/android
|
||||
sudo rm -rf /usr/share/dotnet
|
||||
sudo rm -rf /usr/share/swift
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||
|
||||
# BuildJet runners lack /opt/hostedtoolcache, which setup-python and setup-qemu expect
|
||||
- name: Ensure hostedtoolcache exists
|
||||
shell: bash
|
||||
run: sudo mkdir -p /opt/hostedtoolcache && sudo chown $USER:$USER /opt/hostedtoolcache
|
||||
|
||||
- name: Set up Python
|
||||
if: inputs.setup-python == 'true'
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ inputs.nodejs-version }}
|
||||
cache: npm
|
||||
cache-dependency-path: "**/package-lock.json"
|
||||
|
||||
- name: Set up Docker QEMU
|
||||
if: inputs.setup-docker == 'true'
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
if: inputs.setup-docker == 'true'
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Configure sccache
|
||||
if: inputs.setup-sccache == 'true'
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
|
||||
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
|
||||
88
.github/workflows/start-cli.yaml
vendored
Normal file
88
.github/workflows/start-cli.yaml
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
name: start-cli
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
environment:
|
||||
type: choice
|
||||
description: Environment
|
||||
options:
|
||||
- NONE
|
||||
- dev
|
||||
- unstable
|
||||
- dev-unstable
|
||||
runner:
|
||||
type: choice
|
||||
description: Runner
|
||||
options:
|
||||
- standard
|
||||
- fast
|
||||
arch:
|
||||
type: choice
|
||||
description: Architecture
|
||||
options:
|
||||
- ALL
|
||||
- x86_64
|
||||
- x86_64-apple
|
||||
- aarch64
|
||||
- aarch64-apple
|
||||
- riscv64
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- next/*
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- next/*
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
NODEJS_VERSION: "24.11.0"
|
||||
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
|
||||
|
||||
jobs:
|
||||
compile:
|
||||
name: Build Debian Package
|
||||
if: github.event.pull_request.draft != true
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
triple: >-
|
||||
${{
|
||||
fromJson('{
|
||||
"x86_64": ["x86_64-unknown-linux-musl"],
|
||||
"x86_64-apple": ["x86_64-apple-darwin"],
|
||||
"aarch64": ["aarch64-unknown-linux-musl"],
|
||||
"x86_64-apple": ["aarch64-apple-darwin"],
|
||||
"riscv64": ["riscv64gc-unknown-linux-musl"],
|
||||
"ALL": ["x86_64-unknown-linux-musl", "x86_64-apple-darwin", "aarch64-unknown-linux-musl", "aarch64-apple-darwin", "riscv64gc-unknown-linux-musl"]
|
||||
}')[github.event.inputs.platform || 'ALL']
|
||||
}}
|
||||
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
|
||||
steps:
|
||||
- name: Mount tmpfs
|
||||
if: ${{ github.event.inputs.runner == 'fast' }}
|
||||
run: sudo mount -t tmpfs tmpfs .
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- uses: ./.github/actions/setup-build
|
||||
with:
|
||||
nodejs-version: ${{ env.NODEJS_VERSION }}
|
||||
|
||||
- name: Make
|
||||
run: TARGET=${{ matrix.triple }} make cli
|
||||
env:
|
||||
PLATFORM: ${{ matrix.arch }}
|
||||
SCCACHE_GHA_ENABLED: on
|
||||
SCCACHE_GHA_VERSION: 0
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: start-cli_${{ matrix.triple }}
|
||||
path: core/target/${{ matrix.triple }}/release/start-cli
|
||||
173
.github/workflows/start-registry.yaml
vendored
Normal file
173
.github/workflows/start-registry.yaml
vendored
Normal file
@@ -0,0 +1,173 @@
|
||||
name: start-registry
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
environment:
|
||||
type: choice
|
||||
description: Environment
|
||||
options:
|
||||
- NONE
|
||||
- dev
|
||||
- unstable
|
||||
- dev-unstable
|
||||
runner:
|
||||
type: choice
|
||||
description: Runner
|
||||
options:
|
||||
- standard
|
||||
- fast
|
||||
arch:
|
||||
type: choice
|
||||
description: Architecture
|
||||
options:
|
||||
- ALL
|
||||
- x86_64
|
||||
- aarch64
|
||||
- riscv64
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- next/*
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- next/*
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
NODEJS_VERSION: "24.11.0"
|
||||
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
|
||||
|
||||
jobs:
|
||||
compile:
|
||||
name: Build Debian Package
|
||||
if: github.event.pull_request.draft != true
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
arch: >-
|
||||
${{
|
||||
fromJson('{
|
||||
"x86_64": ["x86_64"],
|
||||
"aarch64": ["aarch64"],
|
||||
"riscv64": ["riscv64"],
|
||||
"ALL": ["x86_64", "aarch64", "riscv64"]
|
||||
}')[github.event.inputs.platform || 'ALL']
|
||||
}}
|
||||
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
|
||||
steps:
|
||||
- name: Mount tmpfs
|
||||
if: ${{ github.event.inputs.runner == 'fast' }}
|
||||
run: sudo mount -t tmpfs tmpfs .
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- uses: ./.github/actions/setup-build
|
||||
with:
|
||||
nodejs-version: ${{ env.NODEJS_VERSION }}
|
||||
|
||||
- name: Make
|
||||
run: make registry-deb
|
||||
env:
|
||||
PLATFORM: ${{ matrix.arch }}
|
||||
SCCACHE_GHA_ENABLED: on
|
||||
SCCACHE_GHA_VERSION: 0
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: start-registry_${{ matrix.arch }}.deb
|
||||
path: results/start-registry-*_${{ matrix.arch }}.deb
|
||||
|
||||
create-image:
|
||||
name: Create Docker Image
|
||||
needs: [compile]
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
|
||||
steps:
|
||||
- name: Cleaning up unnecessary files
|
||||
run: |
|
||||
sudo apt-get remove --purge -y google-chrome-stable firefox mono-devel
|
||||
sudo apt-get autoremove -y
|
||||
sudo apt-get clean
|
||||
|
||||
- run: |
|
||||
sudo mount -t tmpfs tmpfs .
|
||||
if: ${{ github.event.inputs.runner == 'fast' }}
|
||||
|
||||
- name: Set up docker QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: "Login to GitHub Container Registry"
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{github.actor}}
|
||||
password: ${{secrets.GITHUB_TOKEN}}
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
images: ghcr.io/Start9Labs/startos-registry
|
||||
tags: |
|
||||
type=raw,value=${{ github.ref_name }}
|
||||
|
||||
- name: Download debian package
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
pattern: start-registry_*.deb
|
||||
|
||||
- name: Map matrix.arch to docker platform
|
||||
run: |
|
||||
platforms=""
|
||||
for deb in *.deb; do
|
||||
filename=$(basename "$deb" .deb)
|
||||
arch="${filename#*_}"
|
||||
case "$arch" in
|
||||
x86_64)
|
||||
platform="linux/amd64"
|
||||
;;
|
||||
aarch64)
|
||||
platform="linux/arm64"
|
||||
;;
|
||||
riscv64)
|
||||
platform="linux/riscv64"
|
||||
;;
|
||||
*)
|
||||
echo "Unknown architecture: $arch" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
if [ -z "$platforms" ]; then
|
||||
platforms="$platform"
|
||||
else
|
||||
platforms="$platforms,$platform"
|
||||
fi
|
||||
done
|
||||
echo "DOCKER_PLATFORM=$platforms" >> "$GITHUB_ENV"
|
||||
|
||||
- run: |
|
||||
cat | docker buildx build --platform "$DOCKER_PLATFORM" --push -t ${{ steps.meta.outputs.tags }} -f - . << 'EOF'
|
||||
FROM debian:trixie
|
||||
|
||||
ADD *.deb .
|
||||
|
||||
RUN apt-get install -y ./*_$(uname -m).deb && rm *.deb
|
||||
|
||||
VOLUME /var/lib/startos
|
||||
|
||||
ENV RUST_LOG=startos=debug
|
||||
|
||||
ENTRYPOINT ["start-registryd"]
|
||||
|
||||
EOF
|
||||
84
.github/workflows/start-tunnel.yaml
vendored
Normal file
84
.github/workflows/start-tunnel.yaml
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
name: start-tunnel
|
||||
|
||||
on:
|
||||
workflow_call:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
environment:
|
||||
type: choice
|
||||
description: Environment
|
||||
options:
|
||||
- NONE
|
||||
- dev
|
||||
- unstable
|
||||
- dev-unstable
|
||||
runner:
|
||||
type: choice
|
||||
description: Runner
|
||||
options:
|
||||
- standard
|
||||
- fast
|
||||
arch:
|
||||
type: choice
|
||||
description: Architecture
|
||||
options:
|
||||
- ALL
|
||||
- x86_64
|
||||
- aarch64
|
||||
- riscv64
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- next/*
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- next/*
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
NODEJS_VERSION: "24.11.0"
|
||||
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
|
||||
|
||||
jobs:
|
||||
compile:
|
||||
name: Build Debian Package
|
||||
if: github.event.pull_request.draft != true
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
arch: >-
|
||||
${{
|
||||
fromJson('{
|
||||
"x86_64": ["x86_64"],
|
||||
"aarch64": ["aarch64"],
|
||||
"riscv64": ["riscv64"],
|
||||
"ALL": ["x86_64", "aarch64", "riscv64"]
|
||||
}')[github.event.inputs.platform || 'ALL']
|
||||
}}
|
||||
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
|
||||
steps:
|
||||
- name: Mount tmpfs
|
||||
if: ${{ github.event.inputs.runner == 'fast' }}
|
||||
run: sudo mount -t tmpfs tmpfs .
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
- uses: ./.github/actions/setup-build
|
||||
with:
|
||||
nodejs-version: ${{ env.NODEJS_VERSION }}
|
||||
|
||||
- name: Make
|
||||
run: make tunnel-deb
|
||||
env:
|
||||
PLATFORM: ${{ matrix.arch }}
|
||||
SCCACHE_GHA_ENABLED: on
|
||||
SCCACHE_GHA_VERSION: 0
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: start-tunnel_${{ matrix.arch }}.deb
|
||||
path: results/start-tunnel-*_${{ matrix.arch }}.deb
|
||||
165
.github/workflows/startos-iso.yaml
vendored
165
.github/workflows/startos-iso.yaml
vendored
@@ -27,7 +27,8 @@ on:
|
||||
- x86_64-nonfree
|
||||
- aarch64
|
||||
- aarch64-nonfree
|
||||
- raspberrypi
|
||||
# - raspberrypi
|
||||
- riscv64
|
||||
deploy:
|
||||
type: choice
|
||||
description: Deploy
|
||||
@@ -44,13 +45,18 @@ on:
|
||||
- master
|
||||
- next/*
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
NODEJS_VERSION: "20.16.0"
|
||||
NODEJS_VERSION: "24.11.0"
|
||||
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
|
||||
|
||||
jobs:
|
||||
compile:
|
||||
name: Compile Base Binaries
|
||||
if: github.event.pull_request.draft != true
|
||||
strategy:
|
||||
fail-fast: true
|
||||
matrix:
|
||||
@@ -62,39 +68,45 @@ jobs:
|
||||
"aarch64": ["aarch64"],
|
||||
"aarch64-nonfree": ["aarch64"],
|
||||
"raspberrypi": ["aarch64"],
|
||||
"ALL": ["x86_64", "aarch64"]
|
||||
"riscv64": ["riscv64"],
|
||||
"ALL": ["x86_64", "aarch64", "riscv64"]
|
||||
}')[github.event.inputs.platform || 'ALL']
|
||||
}}
|
||||
runs-on: ${{ fromJson('["ubuntu-22.04", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
|
||||
runs-on: >-
|
||||
${{
|
||||
fromJson(
|
||||
format(
|
||||
'["{0}", "{1}"]',
|
||||
fromJson('{
|
||||
"x86_64": "ubuntu-latest",
|
||||
"aarch64": "ubuntu-24.04-arm",
|
||||
"riscv64": "ubuntu-latest"
|
||||
}')[matrix.arch],
|
||||
fromJson('{
|
||||
"x86_64": "buildjet-32vcpu-ubuntu-2204",
|
||||
"aarch64": "buildjet-32vcpu-ubuntu-2204-arm",
|
||||
"riscv64": "buildjet-32vcpu-ubuntu-2204"
|
||||
}')[matrix.arch]
|
||||
)
|
||||
)[github.event.inputs.runner == 'fast']
|
||||
}}
|
||||
steps:
|
||||
- run: |
|
||||
sudo mount -t tmpfs tmpfs .
|
||||
- name: Mount tmpfs
|
||||
if: ${{ github.event.inputs.runner == 'fast' }}
|
||||
|
||||
run: sudo mount -t tmpfs tmpfs .
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
- uses: ./.github/actions/setup-build
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: ${{ env.NODEJS_VERSION }}
|
||||
|
||||
- name: Set up docker QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- name: Set up system dependencies
|
||||
run: sudo apt-get update && sudo apt-get install -y qemu-user-static systemd-container squashfuse
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
nodejs-version: ${{ env.NODEJS_VERSION }}
|
||||
setup-python: "true"
|
||||
|
||||
- name: Make
|
||||
run: make ARCH=${{ matrix.arch }} compiled-${{ matrix.arch }}.tar
|
||||
env:
|
||||
SCCACHE_GHA_ENABLED: on
|
||||
SCCACHE_GHA_VERSION: 0
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@@ -106,13 +118,14 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# TODO: re-add "raspberrypi" to the platform list below
|
||||
platform: >-
|
||||
${{
|
||||
fromJson(
|
||||
format(
|
||||
'[
|
||||
["{0}"],
|
||||
["x86_64", "x86_64-nonfree", "aarch64", "aarch64-nonfree", "raspberrypi"]
|
||||
["x86_64", "x86_64-nonfree", "aarch64", "aarch64-nonfree", "riscv64"]
|
||||
]',
|
||||
github.event.inputs.platform || 'ALL'
|
||||
)
|
||||
@@ -122,13 +135,22 @@ jobs:
|
||||
${{
|
||||
fromJson(
|
||||
format(
|
||||
'["ubuntu-22.04", "{0}"]',
|
||||
'["{0}", "{1}"]',
|
||||
fromJson('{
|
||||
"x86_64": "ubuntu-latest",
|
||||
"x86_64-nonfree": "ubuntu-latest",
|
||||
"aarch64": "ubuntu-24.04-arm",
|
||||
"aarch64-nonfree": "ubuntu-24.04-arm",
|
||||
"raspberrypi": "ubuntu-24.04-arm",
|
||||
"riscv64": "ubuntu-24.04-arm",
|
||||
}')[matrix.platform],
|
||||
fromJson('{
|
||||
"x86_64": "buildjet-8vcpu-ubuntu-2204",
|
||||
"x86_64-nonfree": "buildjet-8vcpu-ubuntu-2204",
|
||||
"aarch64": "buildjet-8vcpu-ubuntu-2204-arm",
|
||||
"aarch64-nonfree": "buildjet-8vcpu-ubuntu-2204-arm",
|
||||
"raspberrypi": "buildjet-8vcpu-ubuntu-2204-arm",
|
||||
"riscv64": "buildjet-8vcpu-ubuntu-2204",
|
||||
}')[matrix.platform]
|
||||
)
|
||||
)[github.event.inputs.runner == 'fast']
|
||||
@@ -142,39 +164,42 @@ jobs:
|
||||
"aarch64": "aarch64",
|
||||
"aarch64-nonfree": "aarch64",
|
||||
"raspberrypi": "aarch64",
|
||||
"riscv64": "riscv64",
|
||||
}')[matrix.platform]
|
||||
}}
|
||||
steps:
|
||||
- name: Free space
|
||||
run: rm -rf /opt/hostedtoolcache*
|
||||
run: |
|
||||
sudo apt-get remove --purge -y azure-cli || true
|
||||
sudo apt-get remove --purge -y firefox || true
|
||||
sudo apt-get remove --purge -y ghc-* || true
|
||||
sudo apt-get remove --purge -y google-cloud-sdk || true
|
||||
sudo apt-get remove --purge -y google-chrome-stable || true
|
||||
sudo apt-get remove --purge -y powershell || true
|
||||
sudo apt-get remove --purge -y php* || true
|
||||
sudo apt-get remove --purge -y ruby* || true
|
||||
sudo apt-get remove --purge -y mono-* || true
|
||||
sudo apt-get autoremove -y
|
||||
sudo apt-get clean
|
||||
sudo rm -rf /usr/lib/jvm # All JDKs
|
||||
sudo rm -rf /usr/local/.ghcup # Haskell toolchain
|
||||
sudo rm -rf /usr/local/lib/android # Android SDK/NDK, emulator
|
||||
sudo rm -rf /usr/share/dotnet # .NET SDKs
|
||||
sudo rm -rf /usr/share/swift # Swift toolchain (if present)
|
||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY" # Pre-cached tool cache (Go, Node, etc.)
|
||||
if: ${{ github.event.inputs.runner != 'fast' }}
|
||||
|
||||
# BuildJet runners lack /opt/hostedtoolcache, which setup-qemu expects
|
||||
- name: Ensure hostedtoolcache exists
|
||||
run: sudo mkdir -p /opt/hostedtoolcache && sudo chown $USER:$USER /opt/hostedtoolcache
|
||||
|
||||
- name: Set up docker QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.x"
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y qemu-user-static
|
||||
wget https://deb.debian.org/debian/pool/main/d/debspawn/debspawn_0.6.2-1_all.deb
|
||||
sha256sum ./debspawn_0.6.2-1_all.deb | grep 37ef27458cb1e35e8bce4d4f639b06b4b3866fc0b9191ec6b9bd157afd06a817
|
||||
sudo apt-get install -y ./debspawn_0.6.2-1_all.deb
|
||||
|
||||
- name: Configure debspawn
|
||||
run: |
|
||||
sudo mkdir -p /etc/debspawn/
|
||||
echo "AllowUnsafePermissions=true" | sudo tee /etc/debspawn/global.toml
|
||||
sudo mkdir -p /var/tmp/debspawn
|
||||
|
||||
- run: sudo mount -t tmpfs tmpfs /var/tmp/debspawn
|
||||
if: ${{ github.event.inputs.runner == 'fast' && (matrix.platform == 'x86_64' || matrix.platform == 'x86_64-nonfree') }}
|
||||
|
||||
- name: Download compiled artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
with:
|
||||
@@ -187,22 +212,19 @@ jobs:
|
||||
run: |
|
||||
mkdir -p web/node_modules
|
||||
mkdir -p web/dist/raw
|
||||
mkdir -p core/startos/bindings
|
||||
mkdir -p core/bindings
|
||||
mkdir -p sdk/base/lib/osBindings
|
||||
mkdir -p container-runtime/node_modules
|
||||
mkdir -p container-runtime/dist
|
||||
mkdir -p container-runtime/dist/node_modules
|
||||
mkdir -p core/startos/bindings
|
||||
mkdir -p sdk/dist
|
||||
mkdir -p sdk/baseDist
|
||||
mkdir -p patch-db/client/node_modules
|
||||
mkdir -p patch-db/client/dist
|
||||
mkdir -p web/.angular
|
||||
mkdir -p web/dist/raw/ui
|
||||
mkdir -p web/dist/raw/install-wizard
|
||||
mkdir -p web/dist/raw/setup-wizard
|
||||
mkdir -p web/dist/static/ui
|
||||
mkdir -p web/dist/static/install-wizard
|
||||
mkdir -p web/dist/static/setup-wizard
|
||||
PLATFORM=${{ matrix.platform }} make -t compiled-${{ env.ARCH }}.tar
|
||||
|
||||
@@ -232,40 +254,3 @@ jobs:
|
||||
name: ${{ matrix.platform }}.img
|
||||
path: results/*.img
|
||||
if: ${{ matrix.platform == 'raspberrypi' }}
|
||||
|
||||
- name: Upload OTA to registry
|
||||
run: >-
|
||||
PLATFORM=${{ matrix.platform }} make upload-ota TARGET="${{
|
||||
fromJson('{
|
||||
"alpha": "alpha-registry-x.start9.com",
|
||||
"beta": "beta-registry.start9.com",
|
||||
}')[github.event.inputs.deploy]
|
||||
}}" KEY="${{
|
||||
fromJson(
|
||||
format('{{
|
||||
"alpha": "{0}",
|
||||
"beta": "{1}",
|
||||
}}', secrets.ALPHA_INDEX_KEY, secrets.BETA_INDEX_KEY)
|
||||
)[github.event.inputs.deploy]
|
||||
}}"
|
||||
if: ${{ github.event.inputs.deploy != '' && github.event.inputs.deploy != 'NONE' }}
|
||||
|
||||
index:
|
||||
if: ${{ github.event.inputs.deploy != '' && github.event.inputs.deploy != 'NONE' }}
|
||||
needs: [image]
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- run: >-
|
||||
curl "https://${{
|
||||
fromJson('{
|
||||
"alpha": "alpha-registry-x.start9.com",
|
||||
"beta": "beta-registry.start9.com",
|
||||
}')[github.event.inputs.deploy]
|
||||
}}:8443/resync.cgi?key=${{
|
||||
fromJson(
|
||||
format('{{
|
||||
"alpha": "{0}",
|
||||
"beta": "{1}",
|
||||
}}', secrets.ALPHA_INDEX_KEY, secrets.BETA_INDEX_KEY)
|
||||
)[github.event.inputs.deploy]
|
||||
}}"
|
||||
|
||||
17
.github/workflows/test.yaml
vendored
17
.github/workflows/test.yaml
vendored
@@ -10,22 +10,29 @@ on:
|
||||
- master
|
||||
- next/*
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
env:
|
||||
NODEJS_VERSION: "20.16.0"
|
||||
NODEJS_VERSION: "24.11.0"
|
||||
ENVIRONMENT: dev-unstable
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Run Automated Tests
|
||||
runs-on: ubuntu-22.04
|
||||
if: github.event.pull_request.draft != true
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
- uses: ./.github/actions/setup-build
|
||||
with:
|
||||
node-version: ${{ env.NODEJS_VERSION }}
|
||||
nodejs-version: ${{ env.NODEJS_VERSION }}
|
||||
free-space: "false"
|
||||
setup-docker: "false"
|
||||
setup-sccache: "false"
|
||||
|
||||
- name: Build And Run Tests
|
||||
run: make test
|
||||
|
||||
27
.gitignore
vendored
27
.gitignore
vendored
@@ -1,31 +1,22 @@
|
||||
.DS_Store
|
||||
.idea
|
||||
system-images/binfmt/binfmt.tar
|
||||
system-images/compat/compat.tar
|
||||
system-images/util/util.tar
|
||||
/*.img
|
||||
/*.img.gz
|
||||
/*.img.xz
|
||||
/*-raspios-bullseye-arm64-lite.img
|
||||
/*-raspios-bullseye-arm64-lite.zip
|
||||
*.img
|
||||
*.img.gz
|
||||
*.img.xz
|
||||
*.zip
|
||||
/product_key.txt
|
||||
/*_product_key.txt
|
||||
.vscode/settings.json
|
||||
deploy_web.sh
|
||||
deploy_web.sh
|
||||
secrets.db
|
||||
.vscode/
|
||||
/cargo-deps/**/*
|
||||
/PLATFORM.txt
|
||||
/ENVIRONMENT.txt
|
||||
/GIT_HASH.txt
|
||||
/VERSION.txt
|
||||
/*.deb
|
||||
/build/env/*.txt
|
||||
*.deb
|
||||
/target
|
||||
/*.squashfs
|
||||
*.squashfs
|
||||
/results
|
||||
/dpkg-workdir
|
||||
/compiled.tar
|
||||
/compiled-*.tar
|
||||
/firmware
|
||||
/tmp
|
||||
/build/lib/firmware
|
||||
tmp
|
||||
@@ -1,7 +1,6 @@
|
||||
# Contributing to StartOS
|
||||
|
||||
This guide is for contributing to the StartOS. If you are interested in packaging a service for StartOS, visit the [service packaging guide](https://docs.start9.com/latest/developer-docs/). If you are interested in promoting, providing technical support, creating tutorials, or helping in other ways, please visit the [Start9 website](https://start9.com/contribute).
|
||||
|
||||
This guide is for contributing to the StartOS. If you are interested in packaging a service for StartOS, visit the [service packaging guide](https://docs.start9.com/latest/packaging-guide/). If you are interested in promoting, providing technical support, creating tutorials, or helping in other ways, please visit the [Start9 website](https://start9.com/contribute).
|
||||
|
||||
## Collaboration
|
||||
|
||||
@@ -13,64 +12,77 @@ This guide is for contributing to the StartOS. If you are interested in packagin
|
||||
```bash
|
||||
/
|
||||
├── assets/
|
||||
├── container-runtime/
|
||||
├── core/
|
||||
├── build/
|
||||
├── debian/
|
||||
├── web/
|
||||
├── image-recipe/
|
||||
├── patch-db
|
||||
└── system-images/
|
||||
└── sdk/
|
||||
```
|
||||
|
||||
#### assets
|
||||
|
||||
screenshots for the StartOS README
|
||||
|
||||
#### container-runtime
|
||||
|
||||
A NodeJS program that dynamically loads maintainer scripts and communicates with the OS to manage packages
|
||||
|
||||
#### core
|
||||
An API, daemon (startd), CLI (start-cli), and SDK (start-sdk) that together provide the core functionality of StartOS.
|
||||
|
||||
An API, daemon (startd), and CLI (start-cli) that together provide the core functionality of StartOS.
|
||||
|
||||
#### build
|
||||
|
||||
Auxiliary files and scripts to include in deployed StartOS images
|
||||
|
||||
#### debian
|
||||
|
||||
Maintainer scripts for the StartOS Debian package
|
||||
|
||||
#### web
|
||||
|
||||
Web UIs served under various conditions and used to interact with StartOS APIs.
|
||||
|
||||
#### image-recipe
|
||||
|
||||
Scripts for building StartOS images
|
||||
|
||||
#### patch-db (submodule)
|
||||
|
||||
A diff based data store used to synchronize data between the web interfaces and server.
|
||||
|
||||
#### system-images
|
||||
Docker images that assist with creating backups.
|
||||
#### sdk
|
||||
|
||||
A typescript sdk for building start-os packages
|
||||
|
||||
## Environment Setup
|
||||
|
||||
#### Clone the StartOS repository
|
||||
|
||||
```sh
|
||||
git clone https://github.com/Start9Labs/start-os.git
|
||||
git clone https://github.com/Start9Labs/start-os.git --recurse-submodules
|
||||
cd start-os
|
||||
```
|
||||
|
||||
#### Load the PatchDB submodule
|
||||
```sh
|
||||
git submodule update --init --recursive
|
||||
```
|
||||
|
||||
#### Continue to your project of interest for additional instructions:
|
||||
|
||||
- [`core`](core/README.md)
|
||||
- [`web-interfaces`](web-interfaces/README.md)
|
||||
- [`build`](build/README.md)
|
||||
- [`patch-db`](https://github.com/Start9Labs/patch-db)
|
||||
|
||||
## Building
|
||||
|
||||
This project uses [GNU Make](https://www.gnu.org/software/make/) to build its components. To build any specific component, simply run `make <TARGET>` replacing `<TARGET>` with the name of the target you'd like to build
|
||||
|
||||
### Requirements
|
||||
|
||||
- [GNU Make](https://www.gnu.org/software/make/)
|
||||
- [Docker](https://docs.docker.com/get-docker/)
|
||||
- [NodeJS v18.15.0](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm)
|
||||
- [NodeJS v20.16.0](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm)
|
||||
- [sed](https://www.gnu.org/software/sed/)
|
||||
- [grep](https://www.gnu.org/software/grep/)
|
||||
- [awk](https://www.gnu.org/software/gawk/)
|
||||
@@ -79,41 +91,43 @@ This project uses [GNU Make](https://www.gnu.org/software/make/) to build its co
|
||||
- [brotli](https://github.com/google/brotli)
|
||||
|
||||
### Environment variables
|
||||
|
||||
- `PLATFORM`: which platform you would like to build for. Must be one of `x86_64`, `x86_64-nonfree`, `aarch64`, `aarch64-nonfree`, `raspberrypi`
|
||||
- NOTE: `nonfree` images are for including `nonfree` firmware packages in the built ISO
|
||||
- NOTE: `nonfree` images are for including `nonfree` firmware packages in the built ISO
|
||||
- `ENVIRONMENT`: a hyphen separated set of feature flags to enable
|
||||
- `dev`: enables password ssh (INSECURE!) and does not compress frontends
|
||||
- `unstable`: enables assertions that will cause errors on unexpected inconsistencies that are undesirable in production use either for performance or reliability reasons
|
||||
- `docker`: use `docker` instead of `podman`
|
||||
- `dev`: enables password ssh (INSECURE!) and does not compress frontends
|
||||
- `unstable`: enables assertions that will cause errors on unexpected inconsistencies that are undesirable in production use either for performance or reliability reasons
|
||||
- `docker`: use `docker` instead of `podman`
|
||||
- `GIT_BRANCH_AS_HASH`: set to `1` to use the current git branch name as the git hash so that the project does not need to be rebuilt on each commit
|
||||
|
||||
### Useful Make Targets
|
||||
|
||||
- `iso`: Create a full `.iso` image
|
||||
- Only possible from Debian
|
||||
- Not available for `PLATFORM=raspberrypi`
|
||||
- Additional Requirements:
|
||||
- [debspawn](https://github.com/lkhq/debspawn)
|
||||
- Only possible from Debian
|
||||
- Not available for `PLATFORM=raspberrypi`
|
||||
- Additional Requirements:
|
||||
- [debspawn](https://github.com/lkhq/debspawn)
|
||||
- `img`: Create a full `.img` image
|
||||
- Only possible from Debian
|
||||
- Only available for `PLATFORM=raspberrypi`
|
||||
- Additional Requirements:
|
||||
- [debspawn](https://github.com/lkhq/debspawn)
|
||||
- Only possible from Debian
|
||||
- Only available for `PLATFORM=raspberrypi`
|
||||
- Additional Requirements:
|
||||
- [debspawn](https://github.com/lkhq/debspawn)
|
||||
- `format`: Run automatic code formatting for the project
|
||||
- Additional Requirements:
|
||||
- [rust](https://rustup.rs/)
|
||||
- Additional Requirements:
|
||||
- [rust](https://rustup.rs/)
|
||||
- `test`: Run automated tests for the project
|
||||
- Additional Requirements:
|
||||
- [rust](https://rustup.rs/)
|
||||
- Additional Requirements:
|
||||
- [rust](https://rustup.rs/)
|
||||
- `update`: Deploy the current working project to a device over ssh as if through an over-the-air update
|
||||
- Requires an argument `REMOTE` which is the ssh address of the device, i.e. `start9@192.168.122.2`
|
||||
- Requires an argument `REMOTE` which is the ssh address of the device, i.e. `start9@192.168.122.2`
|
||||
- `reflash`: Deploy the current working project to a device over ssh as if using a live `iso` image to reflash it
|
||||
- Requires an argument `REMOTE` which is the ssh address of the device, i.e. `start9@192.168.122.2`
|
||||
- Requires an argument `REMOTE` which is the ssh address of the device, i.e. `start9@192.168.122.2`
|
||||
- `update-overlay`: Deploy the current working project to a device over ssh to the in-memory overlay without restarting it
|
||||
- WARNING: changes will be reverted after the device is rebooted
|
||||
- WARNING: changes to `init` will not take effect as the device is already initialized
|
||||
- Requires an argument `REMOTE` which is the ssh address of the device, i.e. `start9@192.168.122.2`
|
||||
- WARNING: changes will be reverted after the device is rebooted
|
||||
- WARNING: changes to `init` will not take effect as the device is already initialized
|
||||
- Requires an argument `REMOTE` which is the ssh address of the device, i.e. `start9@192.168.122.2`
|
||||
- `wormhole`: Deploy the `startbox` to a device using [magic-wormhole](https://github.com/magic-wormhole/magic-wormhole)
|
||||
- When the build it complete will emit a command to paste into the shell of the device to upgrade it
|
||||
- Additional Requirements:
|
||||
- [magic-wormhole](https://github.com/magic-wormhole/magic-wormhole)
|
||||
- `clean`: Delete all compiled artifacts
|
||||
- When the build it complete will emit a command to paste into the shell of the device to upgrade it
|
||||
- Additional Requirements:
|
||||
- [magic-wormhole](https://github.com/magic-wormhole/magic-wormhole)
|
||||
- `clean`: Delete all compiled artifacts
|
||||
|
||||
@@ -25,15 +25,15 @@ docker buildx create --use
|
||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh # proceed with default installation
|
||||
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/master/install.sh | bash
|
||||
source ~/.bashrc
|
||||
nvm install 20
|
||||
nvm use 20
|
||||
nvm alias default 20 # this prevents your machine from reverting back to another version
|
||||
nvm install 24
|
||||
nvm use 24
|
||||
nvm alias default 24 # this prevents your machine from reverting back to another version
|
||||
```
|
||||
|
||||
## Cloning the repository
|
||||
|
||||
```sh
|
||||
git clone --recursive https://github.com/Start9Labs/start-os.git --branch next/minor
|
||||
git clone --recursive https://github.com/Start9Labs/start-os.git --branch next/major
|
||||
cd start-os
|
||||
```
|
||||
|
||||
|
||||
307
Makefile
307
Makefile
@@ -1,32 +1,44 @@
|
||||
PLATFORM_FILE := $(shell ./check-platform.sh)
|
||||
ENVIRONMENT_FILE := $(shell ./check-environment.sh)
|
||||
GIT_HASH_FILE := $(shell ./check-git-hash.sh)
|
||||
VERSION_FILE := $(shell ./check-version.sh)
|
||||
BASENAME := $(shell ./basename.sh)
|
||||
PLATFORM := $(shell if [ -f ./PLATFORM.txt ]; then cat ./PLATFORM.txt; else echo unknown; fi)
|
||||
ls-files = $(shell git ls-files --cached --others --exclude-standard $1)
|
||||
PROFILE = release
|
||||
|
||||
PLATFORM_FILE := $(shell ./build/env/check-platform.sh)
|
||||
ENVIRONMENT_FILE := $(shell ./build/env/check-environment.sh)
|
||||
GIT_HASH_FILE := $(shell ./build/env/check-git-hash.sh)
|
||||
VERSION_FILE := $(shell ./build/env/check-version.sh)
|
||||
BASENAME := $(shell PROJECT=startos ./build/env/basename.sh)
|
||||
PLATFORM := $(shell if [ -f $(PLATFORM_FILE) ]; then cat $(PLATFORM_FILE); else echo unknown; fi)
|
||||
ARCH := $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then echo aarch64; else echo $(PLATFORM) | sed 's/-nonfree$$//g'; fi)
|
||||
RUST_ARCH := $(shell if [ "$(ARCH)" = "riscv64" ]; then echo riscv64gc; else echo $(ARCH); fi)
|
||||
REGISTRY_BASENAME := $(shell PROJECT=start-registry PLATFORM=$(ARCH) ./build/env/basename.sh)
|
||||
TUNNEL_BASENAME := $(shell PROJECT=start-tunnel PLATFORM=$(ARCH) ./build/env/basename.sh)
|
||||
IMAGE_TYPE=$(shell if [ "$(PLATFORM)" = raspberrypi ]; then echo img; else echo iso; fi)
|
||||
WEB_UIS := web/dist/raw/ui/index.html web/dist/raw/setup-wizard/index.html web/dist/raw/install-wizard/index.html
|
||||
COMPRESSED_WEB_UIS := web/dist/static/ui/index.html web/dist/static/setup-wizard/index.html web/dist/static/install-wizard/index.html
|
||||
FIRMWARE_ROMS := ./firmware/$(PLATFORM) $(shell jq --raw-output '.[] | select(.platform[] | contains("$(PLATFORM)")) | "./firmware/$(PLATFORM)/" + .id + ".rom.gz"' build/lib/firmware.json)
|
||||
BUILD_SRC := $(shell git ls-files build) build/lib/depends build/lib/conflicts $(FIRMWARE_ROMS)
|
||||
DEBIAN_SRC := $(shell git ls-files debian/)
|
||||
IMAGE_RECIPE_SRC := $(shell git ls-files image-recipe/)
|
||||
STARTD_SRC := core/startos/startd.service $(BUILD_SRC)
|
||||
COMPAT_SRC := $(shell git ls-files system-images/compat/)
|
||||
UTILS_SRC := $(shell git ls-files system-images/utils/)
|
||||
BINFMT_SRC := $(shell git ls-files system-images/binfmt/)
|
||||
CORE_SRC := $(shell git ls-files core) $(shell git ls-files --recurse-submodules patch-db) $(GIT_HASH_FILE)
|
||||
WEB_SHARED_SRC := $(shell git ls-files web/projects/shared) $(shell ls -p web/ | grep -v / | sed 's/^/web\//g') web/node_modules/.package-lock.json web/config.json patch-db/client/dist/index.js sdk/baseDist/package.json web/patchdb-ui-seed.json sdk/dist/package.json
|
||||
WEB_UI_SRC := $(shell git ls-files web/projects/ui)
|
||||
WEB_SETUP_WIZARD_SRC := $(shell git ls-files web/projects/setup-wizard)
|
||||
WEB_INSTALL_WIZARD_SRC := $(shell git ls-files web/projects/install-wizard)
|
||||
WEB_UIS := web/dist/raw/ui/index.html web/dist/raw/setup-wizard/index.html
|
||||
COMPRESSED_WEB_UIS := web/dist/static/ui/index.html web/dist/static/setup-wizard/index.html
|
||||
FIRMWARE_ROMS := build/lib/firmware/$(PLATFORM) $(shell jq --raw-output '.[] | select(.platform[] | contains("$(PLATFORM)")) | "./build/lib/firmware/$(PLATFORM)/" + .id + ".rom.gz"' build/lib/firmware.json)
|
||||
BUILD_SRC := $(call ls-files, build/lib) build/lib/depends build/lib/conflicts $(FIRMWARE_ROMS)
|
||||
IMAGE_RECIPE_SRC := $(call ls-files, build/image-recipe/)
|
||||
STARTD_SRC := core/startd.service $(BUILD_SRC)
|
||||
CORE_SRC := $(call ls-files, core) $(shell git ls-files --recurse-submodules patch-db) $(GIT_HASH_FILE)
|
||||
WEB_SHARED_SRC := $(call ls-files, web/projects/shared) $(call ls-files, web/projects/marketplace) $(shell ls -p web/ | grep -v / | sed 's/^/web\//g') web/node_modules/.package-lock.json web/config.json patch-db/client/dist/index.js sdk/baseDist/package.json web/patchdb-ui-seed.json sdk/dist/package.json
|
||||
WEB_UI_SRC := $(call ls-files, web/projects/ui)
|
||||
WEB_SETUP_WIZARD_SRC := $(call ls-files, web/projects/setup-wizard)
|
||||
WEB_START_TUNNEL_SRC := $(call ls-files, web/projects/start-tunnel)
|
||||
PATCH_DB_CLIENT_SRC := $(shell git ls-files --recurse-submodules patch-db/client)
|
||||
GZIP_BIN := $(shell which pigz || which gzip)
|
||||
TAR_BIN := $(shell which gtar || which tar)
|
||||
COMPILED_TARGETS := core/target/$(ARCH)-unknown-linux-musl/release/startbox core/target/$(ARCH)-unknown-linux-musl/release/containerbox system-images/compat/docker-images/$(ARCH).tar system-images/utils/docker-images/$(ARCH).tar system-images/binfmt/docker-images/$(ARCH).tar container-runtime/rootfs.$(ARCH).squashfs
|
||||
ALL_TARGETS := $(STARTD_SRC) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) $(VERSION_FILE) $(COMPILED_TARGETS) cargo-deps/$(ARCH)-unknown-linux-musl/release/startos-backup-fs $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then echo cargo-deps/aarch64-unknown-linux-musl/release/pi-beep; fi) $(shell /bin/bash -c 'if [[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]; then echo cargo-deps/$(ARCH)-unknown-linux-musl/release/tokio-console; fi') $(PLATFORM_FILE)
|
||||
REBUILD_TYPES = 1
|
||||
COMPILED_TARGETS := core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox core/target/$(RUST_ARCH)-unknown-linux-musl/release/start-container container-runtime/rootfs.$(ARCH).squashfs
|
||||
STARTOS_TARGETS := $(STARTD_SRC) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) $(VERSION_FILE) $(COMPILED_TARGETS) target/$(RUST_ARCH)-unknown-linux-musl/release/startos-backup-fs $(PLATFORM_FILE) \
|
||||
$(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then \
|
||||
echo target/aarch64-unknown-linux-musl/release/pi-beep; \
|
||||
fi) \
|
||||
$(shell /bin/bash -c 'if [[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]; then \
|
||||
echo target/$(RUST_ARCH)-unknown-linux-musl/release/flamegraph; \
|
||||
fi') \
|
||||
$(shell /bin/bash -c 'if [[ "${ENVIRONMENT}" =~ (^|-)console($$|-) ]]; then \
|
||||
echo target/$(RUST_ARCH)-unknown-linux-musl/release/tokio-console; \
|
||||
fi')
|
||||
REGISTRY_TARGETS := core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/registrybox core/start-registryd.service
|
||||
TUNNEL_TARGETS := core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox core/start-tunneld.service
|
||||
|
||||
ifeq ($(REMOTE),)
|
||||
mkdir = mkdir -p $1
|
||||
@@ -49,23 +61,18 @@ endif
|
||||
|
||||
.DELETE_ON_ERROR:
|
||||
|
||||
.PHONY: all metadata install clean format cli uis ui reflash deb $(IMAGE_TYPE) squashfs sudo wormhole wormhole-deb test test-core test-sdk test-container-runtime registry
|
||||
.PHONY: all metadata install clean format install-cli cli uis ui reflash deb $(IMAGE_TYPE) squashfs wormhole wormhole-deb test test-core test-sdk test-container-runtime registry install-registry tunnel install-tunnel ts-bindings
|
||||
|
||||
all: $(ALL_TARGETS)
|
||||
all: $(STARTOS_TARGETS)
|
||||
|
||||
touch:
|
||||
touch $(ALL_TARGETS)
|
||||
touch $(STARTOS_TARGETS)
|
||||
|
||||
metadata: $(VERSION_FILE) $(PLATFORM_FILE) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE)
|
||||
|
||||
sudo:
|
||||
sudo true
|
||||
|
||||
clean:
|
||||
rm -f system-images/**/*.tar
|
||||
rm -rf system-images/compat/target
|
||||
rm -rf core/target
|
||||
rm -rf core/startos/bindings
|
||||
rm -rf core/bindings
|
||||
rm -rf web/.angular
|
||||
rm -f web/config.json
|
||||
rm -rf web/node_modules
|
||||
@@ -73,7 +80,7 @@ clean:
|
||||
rm -rf patch-db/client/node_modules
|
||||
rm -rf patch-db/client/dist
|
||||
rm -rf patch-db/target
|
||||
rm -rf cargo-deps
|
||||
rm -rf target
|
||||
rm -rf dpkg-workdir
|
||||
rm -rf image-recipe/deb
|
||||
rm -rf results
|
||||
@@ -81,14 +88,8 @@ clean:
|
||||
rm -rf container-runtime/dist
|
||||
rm -rf container-runtime/node_modules
|
||||
rm -f container-runtime/*.squashfs
|
||||
if [ -d container-runtime/tmp/combined ] && mountpoint container-runtime/tmp/combined; then sudo umount container-runtime/tmp/combined; fi
|
||||
if [ -d container-runtime/tmp/lower ] && mountpoint container-runtime/tmp/lower; then sudo umount container-runtime/tmp/lower; fi
|
||||
rm -rf container-runtime/tmp
|
||||
(cd sdk && make clean)
|
||||
rm -f ENVIRONMENT.txt
|
||||
rm -f PLATFORM.txt
|
||||
rm -f GIT_HASH.txt
|
||||
rm -f VERSION.txt
|
||||
rm -f env/*.txt
|
||||
|
||||
format:
|
||||
cd core && cargo +nightly fmt
|
||||
@@ -98,48 +99,90 @@ test: | test-core test-sdk test-container-runtime
|
||||
test-core: $(CORE_SRC) $(ENVIRONMENT_FILE)
|
||||
./core/run-tests.sh
|
||||
|
||||
test-sdk: $(shell git ls-files sdk) sdk/base/lib/osBindings/index.ts
|
||||
test-sdk: $(call ls-files, sdk) sdk/base/lib/osBindings/index.ts
|
||||
cd sdk && make test
|
||||
|
||||
test-container-runtime: container-runtime/node_modules/.package-lock.json $(shell git ls-files container-runtime/src) container-runtime/package.json container-runtime/tsconfig.json
|
||||
test-container-runtime: container-runtime/node_modules/.package-lock.json $(call ls-files, container-runtime/src) container-runtime/package.json container-runtime/tsconfig.json
|
||||
cd container-runtime && npm test
|
||||
|
||||
cli:
|
||||
cd core && ./install-cli.sh
|
||||
install-cli: $(GIT_HASH_FILE)
|
||||
./core/build/build-cli.sh --install
|
||||
|
||||
registry:
|
||||
cd core && ./build-registrybox.sh
|
||||
cli: $(GIT_HASH_FILE)
|
||||
./core/build/build-cli.sh
|
||||
|
||||
registry: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/registrybox
|
||||
|
||||
install-registry: $(REGISTRY_TARGETS)
|
||||
$(call mkdir,$(DESTDIR)/usr/bin)
|
||||
$(call cp,core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/registrybox,$(DESTDIR)/usr/bin/start-registrybox)
|
||||
$(call ln,/usr/bin/start-registrybox,$(DESTDIR)/usr/bin/start-registryd)
|
||||
$(call ln,/usr/bin/start-registrybox,$(DESTDIR)/usr/bin/start-registry)
|
||||
|
||||
$(call mkdir,$(DESTDIR)/lib/systemd/system)
|
||||
$(call cp,core/start-registryd.service,$(DESTDIR)/lib/systemd/system/start-registryd.service)
|
||||
|
||||
core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/registrybox: $(CORE_SRC) $(ENVIRONMENT_FILE)
|
||||
ARCH=$(ARCH) PROFILE=$(PROFILE) ./core/build/build-registrybox.sh
|
||||
|
||||
tunnel: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox
|
||||
|
||||
install-tunnel: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox core/start-tunneld.service
|
||||
$(call mkdir,$(DESTDIR)/usr/bin)
|
||||
$(call cp,core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox,$(DESTDIR)/usr/bin/start-tunnelbox)
|
||||
$(call ln,/usr/bin/start-tunnelbox,$(DESTDIR)/usr/bin/start-tunneld)
|
||||
$(call ln,/usr/bin/start-tunnelbox,$(DESTDIR)/usr/bin/start-tunnel)
|
||||
|
||||
$(call mkdir,$(DESTDIR)/lib/systemd/system)
|
||||
$(call cp,core/start-tunneld.service,$(DESTDIR)/lib/systemd/system/start-tunneld.service)
|
||||
|
||||
$(call mkdir,$(DESTDIR)/usr/lib/startos/scripts)
|
||||
$(call cp,build/lib/scripts/forward-port,$(DESTDIR)/usr/lib/startos/scripts/forward-port)
|
||||
|
||||
core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox: $(CORE_SRC) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) web/dist/static/start-tunnel/index.html
|
||||
ARCH=$(ARCH) PROFILE=$(PROFILE) ./core/build/build-tunnelbox.sh
|
||||
|
||||
deb: results/$(BASENAME).deb
|
||||
|
||||
debian/control: build/lib/depends build/lib/conflicts
|
||||
./debuild/control.sh
|
||||
results/$(BASENAME).deb: debian/dpkg-build.sh $(call ls-files,debian/startos) $(STARTOS_TARGETS)
|
||||
PLATFORM=$(PLATFORM) REQUIRES=debian ./build/os-compat/run-compat.sh ./debian/dpkg-build.sh
|
||||
|
||||
results/$(BASENAME).deb: dpkg-build.sh $(DEBIAN_SRC) $(ALL_TARGETS)
|
||||
PLATFORM=$(PLATFORM) ./dpkg-build.sh
|
||||
registry-deb: results/$(REGISTRY_BASENAME).deb
|
||||
|
||||
results/$(REGISTRY_BASENAME).deb: debian/dpkg-build.sh $(call ls-files,debian/start-registry) $(REGISTRY_TARGETS)
|
||||
PROJECT=start-registry PLATFORM=$(ARCH) REQUIRES=debian ./build/os-compat/run-compat.sh ./debian/dpkg-build.sh
|
||||
|
||||
tunnel-deb: results/$(TUNNEL_BASENAME).deb
|
||||
|
||||
results/$(TUNNEL_BASENAME).deb: debian/dpkg-build.sh $(call ls-files,debian/start-tunnel) $(TUNNEL_TARGETS) build/lib/scripts/forward-port
|
||||
PROJECT=start-tunnel PLATFORM=$(ARCH) REQUIRES=debian DEPENDS=wireguard-tools,iptables,conntrack ./build/os-compat/run-compat.sh ./debian/dpkg-build.sh
|
||||
|
||||
$(IMAGE_TYPE): results/$(BASENAME).$(IMAGE_TYPE)
|
||||
|
||||
squashfs: results/$(BASENAME).squashfs
|
||||
|
||||
results/$(BASENAME).$(IMAGE_TYPE) results/$(BASENAME).squashfs: $(IMAGE_RECIPE_SRC) results/$(BASENAME).deb
|
||||
./image-recipe/run-local-build.sh "results/$(BASENAME).deb"
|
||||
ARCH=$(ARCH) ./build/image-recipe/run-local-build.sh "results/$(BASENAME).deb"
|
||||
|
||||
# For creating os images. DO NOT USE
|
||||
install: $(ALL_TARGETS)
|
||||
install: $(STARTOS_TARGETS)
|
||||
$(call mkdir,$(DESTDIR)/usr/bin)
|
||||
$(call mkdir,$(DESTDIR)/usr/sbin)
|
||||
$(call cp,core/target/$(ARCH)-unknown-linux-musl/release/startbox,$(DESTDIR)/usr/bin/startbox)
|
||||
$(call cp,core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox,$(DESTDIR)/usr/bin/startbox)
|
||||
$(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/startd)
|
||||
$(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/start-cli)
|
||||
$(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/start-sdk)
|
||||
if [ "$(PLATFORM)" = "raspberrypi" ]; then $(call cp,cargo-deps/aarch64-unknown-linux-musl/release/pi-beep,$(DESTDIR)/usr/bin/pi-beep); fi
|
||||
if /bin/bash -c '[[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]'; then $(call cp,cargo-deps/$(ARCH)-unknown-linux-musl/release/tokio-console,$(DESTDIR)/usr/bin/tokio-console); fi
|
||||
$(call cp,cargo-deps/$(ARCH)-unknown-linux-musl/release/startos-backup-fs,$(DESTDIR)/usr/bin/startos-backup-fs)
|
||||
if [ "$(PLATFORM)" = "raspberrypi" ]; then $(call cp,target/aarch64-unknown-linux-musl/release/pi-beep,$(DESTDIR)/usr/bin/pi-beep); fi
|
||||
if /bin/bash -c '[[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]'; then \
|
||||
$(call cp,target/$(RUST_ARCH)-unknown-linux-musl/release/flamegraph,$(DESTDIR)/usr/bin/flamegraph); \
|
||||
fi
|
||||
if /bin/bash -c '[[ "${ENVIRONMENT}" =~ (^|-)console($$|-) ]]'; then \
|
||||
$(call cp,target/$(RUST_ARCH)-unknown-linux-musl/release/tokio-console,$(DESTDIR)/usr/bin/tokio-console); \
|
||||
fi
|
||||
$(call cp,target/$(RUST_ARCH)-unknown-linux-musl/release/startos-backup-fs,$(DESTDIR)/usr/bin/startos-backup-fs)
|
||||
$(call ln,/usr/bin/startos-backup-fs,$(DESTDIR)/usr/sbin/mount.backup-fs)
|
||||
|
||||
$(call mkdir,$(DESTDIR)/lib/systemd/system)
|
||||
$(call cp,core/startos/startd.service,$(DESTDIR)/lib/systemd/system/startd.service)
|
||||
$(call cp,core/startd.service,$(DESTDIR)/lib/systemd/system/startd.service)
|
||||
|
||||
$(call mkdir,$(DESTDIR)/usr/lib)
|
||||
$(call rm,$(DESTDIR)/usr/lib/startos)
|
||||
@@ -147,30 +190,24 @@ install: $(ALL_TARGETS)
|
||||
$(call mkdir,$(DESTDIR)/usr/lib/startos/container-runtime)
|
||||
$(call cp,container-runtime/rootfs.$(ARCH).squashfs,$(DESTDIR)/usr/lib/startos/container-runtime/rootfs.squashfs)
|
||||
|
||||
$(call cp,PLATFORM.txt,$(DESTDIR)/usr/lib/startos/PLATFORM.txt)
|
||||
$(call cp,ENVIRONMENT.txt,$(DESTDIR)/usr/lib/startos/ENVIRONMENT.txt)
|
||||
$(call cp,GIT_HASH.txt,$(DESTDIR)/usr/lib/startos/GIT_HASH.txt)
|
||||
$(call cp,VERSION.txt,$(DESTDIR)/usr/lib/startos/VERSION.txt)
|
||||
$(call cp,build/env/PLATFORM.txt,$(DESTDIR)/usr/lib/startos/PLATFORM.txt)
|
||||
$(call cp,build/env/ENVIRONMENT.txt,$(DESTDIR)/usr/lib/startos/ENVIRONMENT.txt)
|
||||
$(call cp,build/env/GIT_HASH.txt,$(DESTDIR)/usr/lib/startos/GIT_HASH.txt)
|
||||
$(call cp,build/env/VERSION.txt,$(DESTDIR)/usr/lib/startos/VERSION.txt)
|
||||
|
||||
$(call mkdir,$(DESTDIR)/usr/lib/startos/system-images)
|
||||
$(call cp,system-images/compat/docker-images/$(ARCH).tar,$(DESTDIR)/usr/lib/startos/system-images/compat.tar)
|
||||
$(call cp,system-images/utils/docker-images/$(ARCH).tar,$(DESTDIR)/usr/lib/startos/system-images/utils.tar)
|
||||
|
||||
$(call cp,firmware/$(PLATFORM),$(DESTDIR)/usr/lib/startos/firmware)
|
||||
|
||||
update-overlay: $(ALL_TARGETS)
|
||||
update-overlay: $(STARTOS_TARGETS)
|
||||
@echo "\033[33m!!! THIS WILL ONLY REFLASH YOUR DEVICE IN MEMORY !!!\033[0m"
|
||||
@echo "\033[33mALL CHANGES WILL BE REVERTED IF YOU RESTART THE DEVICE\033[0m"
|
||||
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
|
||||
@if [ "`ssh $(REMOTE) 'cat /usr/lib/startos/VERSION.txt'`" != "`cat ./VERSION.txt`" ]; then >&2 echo "StartOS requires migrations: update-overlay is unavailable." && false; fi
|
||||
@if [ "`ssh $(REMOTE) 'cat /usr/lib/startos/VERSION.txt'`" != "`cat $(VERSION_FILE)`" ]; then >&2 echo "StartOS requires migrations: update-overlay is unavailable." && false; fi
|
||||
$(call ssh,"sudo systemctl stop startd")
|
||||
$(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) PLATFORM=$(PLATFORM)
|
||||
$(call ssh,"sudo systemctl start startd")
|
||||
|
||||
wormhole: core/target/$(ARCH)-unknown-linux-musl/release/startbox
|
||||
wormhole: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox
|
||||
@echo "Paste the following command into the shell of your StartOS server:"
|
||||
@echo
|
||||
@wormhole send core/target/$(ARCH)-unknown-linux-musl/release/startbox 2>&1 | awk -Winteractive '/wormhole receive/ { printf "sudo /usr/lib/startos/scripts/chroot-and-upgrade \"cd /usr/bin && rm startbox && wormhole receive --accept-file %s && chmod +x startbox\"\n", $$3 }'
|
||||
@wormhole send core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox 2>&1 | awk -Winteractive '/wormhole receive/ { printf "sudo /usr/lib/startos/scripts/chroot-and-upgrade \"cd /usr/bin && rm startbox && wormhole receive --accept-file %s && chmod +x startbox\"\n", $$3 }'
|
||||
|
||||
wormhole-deb: results/$(BASENAME).deb
|
||||
@echo "Paste the following command into the shell of your StartOS server:"
|
||||
@@ -182,18 +219,18 @@ wormhole-squashfs: results/$(BASENAME).squashfs
|
||||
$(eval SQFS_SIZE := $(shell du -s --bytes results/$(BASENAME).squashfs | awk '{print $$1}'))
|
||||
@echo "Paste the following command into the shell of your StartOS server:"
|
||||
@echo
|
||||
@wormhole send results/$(BASENAME).squashfs 2>&1 | awk -Winteractive '/wormhole receive/ { printf "sudo sh -c '"'"'/usr/lib/startos/scripts/prune-images $(SQFS_SIZE) && cd /media/startos/images && wormhole receive --accept-file %s && mv $(BASENAME).squashfs $(SQFS_SUM).rootfs && ln -rsf ./$(SQFS_SUM).rootfs ../config/current.rootfs && sync && reboot'"'"'\n", $$3 }'
|
||||
@wormhole send results/$(BASENAME).squashfs 2>&1 | awk -Winteractive '/wormhole receive/ { printf "sudo sh -c '"'"'/usr/lib/startos/scripts/prune-images $(SQFS_SIZE) && /usr/lib/startos/scripts/prune-boot && cd /media/startos/images && wormhole receive --accept-file %s && CHECKSUM=$(SQFS_SUM) /usr/lib/startos/scripts/upgrade ./$(BASENAME).squashfs'"'"'\n", $$3 }'
|
||||
|
||||
update: $(ALL_TARGETS)
|
||||
update: $(STARTOS_TARGETS)
|
||||
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
|
||||
$(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create')
|
||||
$(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) DESTDIR=/media/startos/next PLATFORM=$(PLATFORM)
|
||||
$(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync "apt-get install -y $(shell cat ./build/lib/depends)"')
|
||||
|
||||
update-startbox: core/target/$(ARCH)-unknown-linux-musl/release/startbox # only update binary (faster than full update)
|
||||
update-startbox: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox # only update binary (faster than full update)
|
||||
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
|
||||
$(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create')
|
||||
$(call cp,core/target/$(ARCH)-unknown-linux-musl/release/startbox,/media/startos/next/usr/bin/startbox)
|
||||
$(call cp,core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox,/media/startos/next/usr/bin/startbox)
|
||||
$(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync true')
|
||||
|
||||
update-deb: results/$(BASENAME).deb # better than update, but only available from debian
|
||||
@@ -208,11 +245,11 @@ update-squashfs: results/$(BASENAME).squashfs
|
||||
$(eval SQFS_SUM := $(shell b3sum results/$(BASENAME).squashfs))
|
||||
$(eval SQFS_SIZE := $(shell du -s --bytes results/$(BASENAME).squashfs | awk '{print $$1}'))
|
||||
$(call ssh,'/usr/lib/startos/scripts/prune-images $(SQFS_SIZE)')
|
||||
$(call cp,results/$(BASENAME).squashfs,/media/startos/images/$(SQFS_SUM).rootfs)
|
||||
$(call ssh,'sudo ln -rsf /media/startos/images/$(SQFS_SUM).rootfs /media/startos/config/current.rootfs')
|
||||
$(call ssh,'sudo reboot')
|
||||
$(call ssh,'/usr/lib/startos/scripts/prune-boot')
|
||||
$(call cp,results/$(BASENAME).squashfs,/media/startos/images/next.rootfs)
|
||||
$(call ssh,'sudo CHECKSUM=$(SQFS_SUM) /usr/lib/startos/scripts/upgrade /media/startos/images/next.rootfs')
|
||||
|
||||
emulate-reflash: $(ALL_TARGETS)
|
||||
emulate-reflash: $(STARTOS_TARGETS)
|
||||
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
|
||||
$(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create')
|
||||
$(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) DESTDIR=/media/startos/next PLATFORM=$(PLATFORM)
|
||||
@@ -220,67 +257,65 @@ emulate-reflash: $(ALL_TARGETS)
|
||||
$(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync "apt-get install -y $(shell cat ./build/lib/depends)"')
|
||||
|
||||
upload-ota: results/$(BASENAME).squashfs
|
||||
TARGET=$(TARGET) KEY=$(KEY) ./upload-ota.sh
|
||||
TARGET=$(TARGET) KEY=$(KEY) ./build/upload-ota.sh
|
||||
|
||||
container-runtime/debian.$(ARCH).squashfs:
|
||||
container-runtime/debian.$(ARCH).squashfs: ./container-runtime/download-base-image.sh
|
||||
ARCH=$(ARCH) ./container-runtime/download-base-image.sh
|
||||
|
||||
container-runtime/node_modules/.package-lock.json: container-runtime/package.json container-runtime/package-lock.json sdk/dist/package.json
|
||||
container-runtime/package-lock.json: sdk/dist/package.json
|
||||
npm --prefix container-runtime i
|
||||
touch container-runtime/package-lock.json
|
||||
|
||||
container-runtime/node_modules/.package-lock.json: container-runtime/package-lock.json
|
||||
npm --prefix container-runtime ci
|
||||
touch container-runtime/node_modules/.package-lock.json
|
||||
|
||||
sdk/base/lib/osBindings/index.ts: $(shell if [ "$(REBUILD_TYPES)" -ne 0 ]; then echo core/startos/bindings/index.ts; fi)
|
||||
ts-bindings: core/bindings/index.ts
|
||||
mkdir -p sdk/base/lib/osBindings
|
||||
rsync -ac --delete core/startos/bindings/ sdk/base/lib/osBindings/
|
||||
touch sdk/base/lib/osBindings/index.ts
|
||||
rsync -ac --delete core/bindings/ sdk/base/lib/osBindings/
|
||||
|
||||
core/startos/bindings/index.ts: $(shell git ls-files core) $(ENVIRONMENT_FILE)
|
||||
rm -rf core/startos/bindings
|
||||
./core/build-ts.sh
|
||||
ls core/startos/bindings/*.ts | sed 's/core\/startos\/bindings\/\([^.]*\)\.ts/export { \1 } from ".\/\1";/g' | grep -v '"./index"' | tee core/startos/bindings/index.ts
|
||||
npm --prefix sdk exec -- prettier --config ./sdk/base/package.json -w ./core/startos/bindings/*.ts
|
||||
touch core/startos/bindings/index.ts
|
||||
core/bindings/index.ts: $(call ls-files, core) $(ENVIRONMENT_FILE)
|
||||
rm -rf core/bindings
|
||||
./core/build/build-ts.sh
|
||||
ls core/bindings/*.ts | sed 's/core\/bindings\/\([^.]*\)\.ts/export { \1 } from ".\/\1";/g' | grep -v '"./index"' | tee core/bindings/index.ts
|
||||
npm --prefix sdk exec -- prettier --config ./sdk/base/package.json -w ./core/bindings/*.ts
|
||||
touch core/bindings/index.ts
|
||||
|
||||
sdk/dist/package.json sdk/baseDist/package.json: $(shell git ls-files sdk) sdk/base/lib/osBindings/index.ts
|
||||
sdk/dist/package.json sdk/baseDist/package.json: $(call ls-files, sdk) sdk/base/lib/osBindings/index.ts
|
||||
(cd sdk && make bundle)
|
||||
touch sdk/dist/package.json
|
||||
touch sdk/baseDist/package.json
|
||||
|
||||
# TODO: make container-runtime its own makefile?
|
||||
container-runtime/dist/index.js: container-runtime/node_modules/.package-lock.json $(shell git ls-files container-runtime/src) container-runtime/package.json container-runtime/tsconfig.json
|
||||
container-runtime/dist/index.js: container-runtime/node_modules/.package-lock.json $(call ls-files, container-runtime/src) container-runtime/package.json container-runtime/tsconfig.json
|
||||
npm --prefix container-runtime run build
|
||||
|
||||
container-runtime/dist/node_modules/.package-lock.json container-runtime/dist/package.json container-runtime/dist/package-lock.json: container-runtime/package.json container-runtime/package-lock.json sdk/dist/package.json container-runtime/install-dist-deps.sh
|
||||
./container-runtime/install-dist-deps.sh
|
||||
touch container-runtime/dist/node_modules/.package-lock.json
|
||||
|
||||
container-runtime/rootfs.$(ARCH).squashfs: container-runtime/debian.$(ARCH).squashfs container-runtime/container-runtime.service container-runtime/update-image.sh container-runtime/deb-install.sh container-runtime/dist/index.js container-runtime/dist/node_modules/.package-lock.json core/target/$(ARCH)-unknown-linux-musl/release/containerbox | sudo
|
||||
ARCH=$(ARCH) ./container-runtime/update-image.sh
|
||||
container-runtime/rootfs.$(ARCH).squashfs: container-runtime/debian.$(ARCH).squashfs container-runtime/container-runtime.service container-runtime/update-image.sh container-runtime/update-image-local.sh container-runtime/deb-install.sh container-runtime/dist/index.js container-runtime/dist/node_modules/.package-lock.json core/target/$(RUST_ARCH)-unknown-linux-musl/release/start-container
|
||||
ARCH=$(ARCH) ./container-runtime/update-image-local.sh
|
||||
|
||||
build/lib/depends build/lib/conflicts: build/dpkg-deps/*
|
||||
build/dpkg-deps/generate.sh
|
||||
build/lib/depends build/lib/conflicts: $(ENVIRONMENT_FILE) $(PLATFORM_FILE) $(shell ls build/dpkg-deps/*)
|
||||
PLATFORM=$(PLATFORM) ARCH=$(ARCH) build/dpkg-deps/generate.sh
|
||||
|
||||
$(FIRMWARE_ROMS): build/lib/firmware.json download-firmware.sh $(PLATFORM_FILE)
|
||||
./download-firmware.sh $(PLATFORM)
|
||||
$(FIRMWARE_ROMS): build/lib/firmware.json ./build/download-firmware.sh $(PLATFORM_FILE)
|
||||
./build/download-firmware.sh $(PLATFORM)
|
||||
|
||||
system-images/compat/docker-images/$(ARCH).tar: $(COMPAT_SRC)
|
||||
cd system-images/compat && make docker-images/$(ARCH).tar && touch docker-images/$(ARCH).tar
|
||||
core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox: $(CORE_SRC) $(COMPRESSED_WEB_UIS) web/patchdb-ui-seed.json $(ENVIRONMENT_FILE)
|
||||
ARCH=$(ARCH) PROFILE=$(PROFILE) ./core/build/build-startbox.sh
|
||||
touch core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox
|
||||
|
||||
system-images/utils/docker-images/$(ARCH).tar: $(UTILS_SRC)
|
||||
cd system-images/utils && make docker-images/$(ARCH).tar && touch docker-images/$(ARCH).tar
|
||||
core/target/$(RUST_ARCH)-unknown-linux-musl/release/start-container: $(CORE_SRC) $(ENVIRONMENT_FILE)
|
||||
ARCH=$(ARCH) ./core/build/build-start-container.sh
|
||||
touch core/target/$(RUST_ARCH)-unknown-linux-musl/release/start-container
|
||||
|
||||
system-images/binfmt/docker-images/$(ARCH).tar: $(BINFMT_SRC)
|
||||
cd system-images/binfmt && make docker-images/$(ARCH).tar && touch docker-images/$(ARCH).tar
|
||||
web/package-lock.json: web/package.json sdk/baseDist/package.json
|
||||
npm --prefix web i
|
||||
touch web/package-lock.json
|
||||
|
||||
core/target/$(ARCH)-unknown-linux-musl/release/startbox: $(CORE_SRC) $(COMPRESSED_WEB_UIS) web/patchdb-ui-seed.json $(ENVIRONMENT_FILE)
|
||||
ARCH=$(ARCH) ./core/build-startbox.sh
|
||||
touch core/target/$(ARCH)-unknown-linux-musl/release/startbox
|
||||
|
||||
core/target/$(ARCH)-unknown-linux-musl/release/containerbox: $(CORE_SRC) $(ENVIRONMENT_FILE)
|
||||
ARCH=$(ARCH) ./core/build-containerbox.sh
|
||||
touch core/target/$(ARCH)-unknown-linux-musl/release/containerbox
|
||||
|
||||
web/node_modules/.package-lock.json: web/package.json sdk/baseDist/package.json
|
||||
web/node_modules/.package-lock.json: web/package-lock.json
|
||||
npm --prefix web ci
|
||||
touch web/node_modules/.package-lock.json
|
||||
|
||||
@@ -289,23 +324,27 @@ web/.angular/.updated: patch-db/client/dist/index.js sdk/baseDist/package.json w
|
||||
mkdir -p web/.angular
|
||||
touch web/.angular/.updated
|
||||
|
||||
web/dist/raw/ui/index.html: $(WEB_UI_SRC) $(WEB_SHARED_SRC) web/.angular/.updated
|
||||
web/.i18n-checked: $(WEB_SHARED_SRC) $(WEB_UI_SRC) $(WEB_SETUP_WIZARD_SRC) $(WEB_START_TUNNEL_SRC)
|
||||
npm --prefix web run check:i18n
|
||||
touch web/.i18n-checked
|
||||
|
||||
web/dist/raw/ui/index.html: $(WEB_UI_SRC) $(WEB_SHARED_SRC) web/.angular/.updated web/.i18n-checked
|
||||
npm --prefix web run build:ui
|
||||
touch web/dist/raw/ui/index.html
|
||||
|
||||
web/dist/raw/setup-wizard/index.html: $(WEB_SETUP_WIZARD_SRC) $(WEB_SHARED_SRC) web/.angular/.updated
|
||||
web/dist/raw/setup-wizard/index.html: $(WEB_SETUP_WIZARD_SRC) $(WEB_SHARED_SRC) web/.angular/.updated web/.i18n-checked
|
||||
npm --prefix web run build:setup
|
||||
touch web/dist/raw/setup-wizard/index.html
|
||||
|
||||
web/dist/raw/install-wizard/index.html: $(WEB_INSTALL_WIZARD_SRC) $(WEB_SHARED_SRC) web/.angular/.updated
|
||||
npm --prefix web run build:install-wiz
|
||||
touch web/dist/raw/install-wizard/index.html
|
||||
web/dist/raw/start-tunnel/index.html: $(WEB_START_TUNNEL_SRC) $(WEB_SHARED_SRC) web/.angular/.updated web/.i18n-checked
|
||||
npm --prefix web run build:tunnel
|
||||
touch web/dist/raw/start-tunnel/index.html
|
||||
|
||||
$(COMPRESSED_WEB_UIS): $(WEB_UIS) $(ENVIRONMENT_FILE)
|
||||
./compress-uis.sh
|
||||
web/dist/static/%/index.html: web/dist/raw/%/index.html
|
||||
./web/compress-uis.sh $*
|
||||
|
||||
web/config.json: $(GIT_HASH_FILE) web/config-sample.json
|
||||
jq '.useMocks = false' web/config-sample.json | jq '.gitHash = "$(shell cat GIT_HASH.txt)"' > web/config.json
|
||||
web/config.json: $(GIT_HASH_FILE) $(ENVIRONMENT_FILE) web/config-sample.json web/update-config.sh
|
||||
./web/update-config.sh
|
||||
|
||||
patch-db/client/node_modules/.package-lock.json: patch-db/client/package.json
|
||||
npm --prefix patch-db/client ci
|
||||
@@ -326,11 +365,17 @@ uis: $(WEB_UIS)
|
||||
# this is a convenience step to build the UI
|
||||
ui: web/dist/raw/ui
|
||||
|
||||
cargo-deps/aarch64-unknown-linux-musl/release/pi-beep:
|
||||
ARCH=aarch64 ./build-cargo-dep.sh pi-beep
|
||||
target/aarch64-unknown-linux-musl/release/pi-beep: ./build/build-cargo-dep.sh
|
||||
ARCH=aarch64 ./build/build-cargo-dep.sh pi-beep
|
||||
|
||||
cargo-deps/$(ARCH)-unknown-linux-musl/release/tokio-console:
|
||||
ARCH=$(ARCH) PREINSTALL="apk add musl-dev pkgconfig" ./build-cargo-dep.sh tokio-console
|
||||
target/$(RUST_ARCH)-unknown-linux-musl/release/tokio-console: ./build/build-cargo-dep.sh
|
||||
ARCH=$(ARCH) ./build/build-cargo-dep.sh tokio-console
|
||||
touch $@
|
||||
|
||||
cargo-deps/$(ARCH)-unknown-linux-musl/release/startos-backup-fs:
|
||||
ARCH=$(ARCH) PREINSTALL="apk add fuse3 fuse3-dev fuse3-static musl-dev pkgconfig" ./build-cargo-dep.sh --git https://github.com/Start9Labs/start-fs.git startos-backup-fs
|
||||
target/$(RUST_ARCH)-unknown-linux-musl/release/startos-backup-fs: ./build/build-cargo-dep.sh
|
||||
ARCH=$(ARCH) ./build/build-cargo-dep.sh --git https://github.com/Start9Labs/start-fs.git startos-backup-fs
|
||||
touch $@
|
||||
|
||||
target/$(RUST_ARCH)-unknown-linux-musl/release/flamegraph: ./build/build-cargo-dep.sh
|
||||
ARCH=$(ARCH) ./build/build-cargo-dep.sh flamegraph
|
||||
touch $@
|
||||
|
||||
@@ -13,9 +13,6 @@
|
||||
<a href="https://twitter.com/start9labs">
|
||||
<img alt="X (formerly Twitter) Follow" src="https://img.shields.io/twitter/follow/start9labs">
|
||||
</a>
|
||||
<a href="https://mastodon.start9labs.com">
|
||||
<img src="https://img.shields.io/mastodon/follow/000000001?domain=https%3A%2F%2Fmastodon.start9labs.com&label=Follow&style=social">
|
||||
</a>
|
||||
<a href="https://matrix.to/#/#community:matrix.start9labs.com">
|
||||
<img alt="Static Badge" src="https://img.shields.io/badge/community-matrix-yellow?logo=matrix">
|
||||
</a>
|
||||
|
||||
201
agents/VERSION_BUMP.md
Normal file
201
agents/VERSION_BUMP.md
Normal file
@@ -0,0 +1,201 @@
|
||||
# StartOS Version Bump Guide
|
||||
|
||||
This document explains how to bump the StartOS version across the entire codebase.
|
||||
|
||||
## Overview
|
||||
|
||||
When bumping from version `X.Y.Z-alpha.N` to `X.Y.Z-alpha.N+1`, you need to update files in multiple locations across the repository. The `// VERSION_BUMP` comment markers indicate where changes are needed.
|
||||
|
||||
## Files to Update
|
||||
|
||||
### 1. Core Rust Crate Version
|
||||
|
||||
**File: `core/Cargo.toml`**
|
||||
|
||||
Update the version string (line ~18):
|
||||
|
||||
```toml
|
||||
version = "0.4.0-alpha.15" # VERSION_BUMP
|
||||
```
|
||||
|
||||
**File: `core/Cargo.lock`**
|
||||
|
||||
This file is auto-generated. After updating `Cargo.toml`, run:
|
||||
|
||||
```bash
|
||||
cd core
|
||||
cargo check
|
||||
```
|
||||
|
||||
This will update the version in `Cargo.lock` automatically.
|
||||
|
||||
### 2. Create New Version Migration Module
|
||||
|
||||
**File: `core/src/version/vX_Y_Z_alpha_N+1.rs`**
|
||||
|
||||
Create a new version file by copying the previous version and updating:
|
||||
|
||||
```rust
|
||||
use exver::{PreReleaseSegment, VersionRange};
|
||||
|
||||
use super::v0_3_5::V0_3_0_COMPAT;
|
||||
use super::{VersionT, v0_4_0_alpha_14}; // Update to previous version
|
||||
use crate::prelude::*;
|
||||
|
||||
lazy_static::lazy_static! {
|
||||
static ref V0_4_0_alpha_15: exver::Version = exver::Version::new(
|
||||
[0, 4, 0],
|
||||
[PreReleaseSegment::String("alpha".into()), 15.into()] // Update number
|
||||
);
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Default)]
|
||||
pub struct Version;
|
||||
|
||||
impl VersionT for Version {
|
||||
type Previous = v0_4_0_alpha_14::Version; // Update to previous version
|
||||
type PreUpRes = ();
|
||||
|
||||
async fn pre_up(self) -> Result<Self::PreUpRes, Error> {
|
||||
Ok(())
|
||||
}
|
||||
fn semver(self) -> exver::Version {
|
||||
V0_4_0_alpha_15.clone() // Update version name
|
||||
}
|
||||
fn compat(self) -> &'static VersionRange {
|
||||
&V0_3_0_COMPAT
|
||||
}
|
||||
#[instrument(skip_all)]
|
||||
fn up(self, _db: &mut Value, _: Self::PreUpRes) -> Result<Value, Error> {
|
||||
// Add migration logic here if needed
|
||||
Ok(Value::Null)
|
||||
}
|
||||
fn down(self, _db: &mut Value) -> Result<(), Error> {
|
||||
// Add rollback logic here if needed
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### 3. Update Version Module Registry
|
||||
|
||||
**File: `core/src/version/mod.rs`**
|
||||
|
||||
Make changes in **5 locations**:
|
||||
|
||||
#### Location 1: Module Declaration (~line 57)
|
||||
|
||||
Add the new module after the previous version:
|
||||
|
||||
```rust
|
||||
mod v0_4_0_alpha_14;
|
||||
mod v0_4_0_alpha_15; // Add this
|
||||
```
|
||||
|
||||
#### Location 2: Current Type Alias (~line 59)
|
||||
|
||||
Update the `Current` type and move the `// VERSION_BUMP` comment:
|
||||
|
||||
```rust
|
||||
pub type Current = v0_4_0_alpha_15::Version; // VERSION_BUMP
|
||||
```
|
||||
|
||||
#### Location 3: Version Enum (~line 175)
|
||||
|
||||
Remove `// VERSION_BUMP` from the previous version, add new variant, add comment:
|
||||
|
||||
```rust
|
||||
V0_4_0_alpha_14(Wrapper<v0_4_0_alpha_14::Version>),
|
||||
V0_4_0_alpha_15(Wrapper<v0_4_0_alpha_15::Version>), // VERSION_BUMP
|
||||
Other(exver::Version),
|
||||
```
|
||||
|
||||
#### Location 4: as_version_t() Match (~line 233)
|
||||
|
||||
Remove `// VERSION_BUMP`, add new match arm, add comment:
|
||||
|
||||
```rust
|
||||
Self::V0_4_0_alpha_14(v) => DynVersion(Box::new(v.0)),
|
||||
Self::V0_4_0_alpha_15(v) => DynVersion(Box::new(v.0)), // VERSION_BUMP
|
||||
Self::Other(v) => {
|
||||
```
|
||||
|
||||
#### Location 5: as_exver() Match (~line 284, inside #[cfg(test)])
|
||||
|
||||
Remove `// VERSION_BUMP`, add new match arm, add comment:
|
||||
|
||||
```rust
|
||||
Version::V0_4_0_alpha_14(Wrapper(x)) => x.semver(),
|
||||
Version::V0_4_0_alpha_15(Wrapper(x)) => x.semver(), // VERSION_BUMP
|
||||
Version::Other(x) => x.clone(),
|
||||
```
|
||||
|
||||
### 4. SDK TypeScript Version
|
||||
|
||||
**File: `sdk/package/lib/StartSdk.ts`**
|
||||
|
||||
Update the OSVersion constant (~line 64):
|
||||
|
||||
```typescript
|
||||
export const OSVersion = testTypeVersion("0.4.0-alpha.15");
|
||||
```
|
||||
|
||||
### 5. Web UI Package Version
|
||||
|
||||
**File: `web/package.json`**
|
||||
|
||||
Update the version field:
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "startos-ui",
|
||||
"version": "0.4.0-alpha.15",
|
||||
...
|
||||
}
|
||||
```
|
||||
|
||||
**File: `web/package-lock.json`**
|
||||
|
||||
This file is auto-generated, but it's faster to update manually. Find all instances of "startos-ui" and update the version field.
|
||||
|
||||
## Verification Step
|
||||
|
||||
```
|
||||
make
|
||||
```
|
||||
|
||||
## VERSION_BUMP Comment Pattern
|
||||
|
||||
The `// VERSION_BUMP` comment serves as a marker for where to make changes next time:
|
||||
|
||||
- Always **remove** it from the old location
|
||||
- **Add** the new version entry
|
||||
- **Move** the comment to mark the new location
|
||||
|
||||
This pattern helps you quickly find all the places that need updating in the next version bump.
|
||||
|
||||
## Summary Checklist
|
||||
|
||||
- [ ] Update `core/Cargo.toml` version
|
||||
- [ ] Create new `core/src/version/vX_Y_Z_alpha_N+1.rs` file
|
||||
- [ ] Update `core/src/version/mod.rs` in 5 locations
|
||||
- [ ] Run `cargo check` to update `core/Cargo.lock`
|
||||
- [ ] Update `sdk/package/lib/StartSdk.ts` OSVersion
|
||||
- [ ] Update `web/package.json` and `web/package-lock.json` version
|
||||
- [ ] Verify all changes compile/build successfully
|
||||
|
||||
## Migration Logic
|
||||
|
||||
The `up()` and `down()` methods in the version file handle database migrations:
|
||||
|
||||
- **up()**: Migrates the database from the previous version to this version
|
||||
- **down()**: Rolls back from this version to the previous version
|
||||
- **pre_up()**: Runs before migration, useful for pre-migration checks or data gathering
|
||||
|
||||
If no migration is needed, return `Ok(Value::Null)` for `up()` and `Ok(())` for `down()`.
|
||||
|
||||
For complex migrations, you may need to:
|
||||
|
||||
1. Update `type PreUpRes` to pass data between `pre_up()` and `up()`
|
||||
2. Implement database transformations in the `up()` method
|
||||
3. Implement reverse transformations in `down()` for rollback support
|
||||
@@ -1,34 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
shopt -s expand_aliases
|
||||
|
||||
if [ "$0" != "./build-cargo-dep.sh" ]; then
|
||||
>&2 echo "Must be run from start-os directory"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
USE_TTY=
|
||||
if tty -s; then
|
||||
USE_TTY="-it"
|
||||
fi
|
||||
|
||||
if [ -z "$ARCH" ]; then
|
||||
ARCH=$(uname -m)
|
||||
fi
|
||||
|
||||
DOCKER_PLATFORM="linux/${ARCH}"
|
||||
if [ "$ARCH" = aarch64 ] || [ "$ARCH" = arm64 ]; then
|
||||
DOCKER_PLATFORM="linux/arm64"
|
||||
elif [ "$ARCH" = x86_64 ]; then
|
||||
DOCKER_PLATFORM="linux/amd64"
|
||||
fi
|
||||
|
||||
mkdir -p cargo-deps
|
||||
alias 'rust-musl-builder'='docker run $USE_TTY --platform=${DOCKER_PLATFORM} --rm -e "RUSTFLAGS=$RUSTFLAGS" -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$(pwd)"/cargo-deps:/home/rust/src -w /home/rust/src -P rust:alpine'
|
||||
|
||||
PREINSTALL=${PREINSTALL:-true}
|
||||
|
||||
rust-musl-builder sh -c "$PREINSTALL && cargo install $* --target-dir /home/rust/src --target=$ARCH-unknown-linux-musl"
|
||||
sudo chown -R $USER cargo-deps
|
||||
sudo chown -R $USER ~/.cargo
|
||||
26
build/build-cargo-dep.sh
Executable file
26
build/build-cargo-dep.sh
Executable file
@@ -0,0 +1,26 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")/.."
|
||||
|
||||
set -e
|
||||
shopt -s expand_aliases
|
||||
|
||||
if [ -z "$ARCH" ]; then
|
||||
ARCH=$(uname -m)
|
||||
fi
|
||||
|
||||
RUST_ARCH="$ARCH"
|
||||
if [ "$ARCH" = "riscv64" ]; then
|
||||
RUST_ARCH="riscv64gc"
|
||||
fi
|
||||
|
||||
mkdir -p target
|
||||
|
||||
source core/build/builder-alias.sh
|
||||
|
||||
RUSTFLAGS="-C target-feature=+crt-static"
|
||||
|
||||
rust-zig-builder cargo-zigbuild install $* --target-dir /workdir/target/ --target=$RUST_ARCH-unknown-linux-musl
|
||||
if [ "$(ls -nd "target/$RUST_ARCH-unknown-linux-musl/release/${!#}" | awk '{ print $3 }')" != "$UID" ]; then
|
||||
rust-zig-builder sh -c "chown -R $UID:$UID target && chown -R $UID:$UID /usr/local/cargo"
|
||||
fi
|
||||
@@ -11,13 +11,13 @@ if [ -z "$PLATFORM" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
rm -rf ./firmware/$PLATFORM
|
||||
mkdir -p ./firmware/$PLATFORM
|
||||
rm -rf ./lib/firmware/$PLATFORM
|
||||
mkdir -p ./lib/firmware/$PLATFORM
|
||||
|
||||
cd ./firmware/$PLATFORM
|
||||
cd ./lib/firmware/$PLATFORM
|
||||
|
||||
firmwares=()
|
||||
while IFS= read -r line; do firmwares+=("$line"); done < <(jq -c ".[] | select(.platform[] | contains(\"$PLATFORM\"))" ../../build/lib/firmware.json)
|
||||
while IFS= read -r line; do firmwares+=("$line"); done < <(jq -c ".[] | select(.platform[] | contains(\"$PLATFORM\"))" ../../firmware.json)
|
||||
for firmware in "${firmwares[@]}"; do
|
||||
if [ -n "$firmware" ]; then
|
||||
id=$(echo "$firmware" | jq --raw-output '.id')
|
||||
@@ -3,22 +3,25 @@ avahi-utils
|
||||
b3sum
|
||||
bash-completion
|
||||
beep
|
||||
binfmt-support
|
||||
bmon
|
||||
btrfs-progs
|
||||
ca-certificates
|
||||
cifs-utils
|
||||
conntrack
|
||||
cryptsetup
|
||||
curl
|
||||
dnsutils
|
||||
dmidecode
|
||||
dnsutils
|
||||
dosfstools
|
||||
e2fsprogs
|
||||
ecryptfs-utils
|
||||
equivs
|
||||
exfatprogs
|
||||
flashrom
|
||||
fuse3
|
||||
grub-common
|
||||
grub-efi
|
||||
htop
|
||||
httpdirfs
|
||||
iotop
|
||||
@@ -36,13 +39,15 @@ man-db
|
||||
ncdu
|
||||
net-tools
|
||||
network-manager
|
||||
nfs-common
|
||||
nvme-cli
|
||||
nyx
|
||||
openssh-server
|
||||
podman
|
||||
postgresql
|
||||
psmisc
|
||||
qemu-guest-agent
|
||||
qemu-user-static
|
||||
rfkill
|
||||
rsync
|
||||
samba-common-bin
|
||||
smartmontools
|
||||
|
||||
@@ -5,11 +5,18 @@ set -e
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
IFS="-" read -ra FEATURES <<< "$ENVIRONMENT"
|
||||
FEATURES+=("${ARCH}")
|
||||
if [ "$ARCH" != "$PLATFORM" ]; then
|
||||
FEATURES+=("${PLATFORM}")
|
||||
fi
|
||||
if [[ "$PLATFORM" =~ -nonfree$ ]]; then
|
||||
FEATURES+=("nonfree")
|
||||
fi
|
||||
|
||||
feature_file_checker='
|
||||
/^#/ { next }
|
||||
/^\+ [a-z0-9]+$/ { next }
|
||||
/^- [a-z0-9]+$/ { next }
|
||||
/^\+ [a-z0-9.-]+$/ { next }
|
||||
/^- [a-z0-9.-]+$/ { next }
|
||||
{ exit 1 }
|
||||
'
|
||||
|
||||
@@ -30,8 +37,8 @@ for type in conflicts depends; do
|
||||
for feature in ${FEATURES[@]}; do
|
||||
file="$feature.$type"
|
||||
if [ -f $file ]; then
|
||||
if grep "^- $pkg$" $file; then
|
||||
SKIP=1
|
||||
if grep "^- $pkg$" $file > /dev/null; then
|
||||
SKIP=yes
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
10
build/dpkg-deps/nonfree.depends
Normal file
10
build/dpkg-deps/nonfree.depends
Normal file
@@ -0,0 +1,10 @@
|
||||
+ firmware-amd-graphics
|
||||
+ firmware-atheros
|
||||
+ firmware-brcm80211
|
||||
+ firmware-iwlwifi
|
||||
+ firmware-libertas
|
||||
+ firmware-misc-nonfree
|
||||
+ firmware-realtek
|
||||
+ nvidia-container-toolkit
|
||||
# + nvidia-driver
|
||||
# + nvidia-kernel-dkms
|
||||
10
build/dpkg-deps/raspberrypi.depends
Normal file
10
build/dpkg-deps/raspberrypi.depends
Normal file
@@ -0,0 +1,10 @@
|
||||
- grub-efi
|
||||
+ parted
|
||||
+ raspberrypi-net-mods
|
||||
+ raspberrypi-sys-mods
|
||||
+ raspi-config
|
||||
+ raspi-firmware
|
||||
+ raspi-utils
|
||||
+ rpi-eeprom
|
||||
+ rpi-update
|
||||
+ rpi.gpio-common
|
||||
@@ -1,2 +1,3 @@
|
||||
+ gdb
|
||||
+ heaptrack
|
||||
+ heaptrack
|
||||
+ linux-perf
|
||||
1
build/dpkg-deps/x86_64.depends
Normal file
1
build/dpkg-deps/x86_64.depends
Normal file
@@ -0,0 +1 @@
|
||||
+ grub-pc-bin
|
||||
4
basename.sh → build/env/basename.sh
vendored
4
basename.sh → build/env/basename.sh
vendored
@@ -1,5 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
PROJECT=${PROJECT:-"startos"}
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
PLATFORM="$(if [ -f ./PLATFORM.txt ]; then cat ./PLATFORM.txt; else echo unknown; fi)"
|
||||
@@ -16,4 +18,4 @@ if [ -n "$STARTOS_ENV" ]; then
|
||||
VERSION_FULL="$VERSION_FULL~${STARTOS_ENV}"
|
||||
fi
|
||||
|
||||
echo -n "startos-${VERSION_FULL}_${PLATFORM}"
|
||||
echo -n "${PROJECT}-${VERSION_FULL}_${PLATFORM}"
|
||||
@@ -1,8 +1,10 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
if ! [ -f ./ENVIRONMENT.txt ] || [ "$(cat ./ENVIRONMENT.txt)" != "$ENVIRONMENT" ]; then
|
||||
>&2 echo "Updating ENVIRONMENT.txt to \"$ENVIRONMENT\""
|
||||
echo -n "$ENVIRONMENT" > ./ENVIRONMENT.txt
|
||||
fi
|
||||
|
||||
echo -n ./ENVIRONMENT.txt
|
||||
echo -n ./build/env/ENVIRONMENT.txt
|
||||
@@ -1,5 +1,7 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
if [ "$GIT_BRANCH_AS_HASH" != 1 ]; then
|
||||
GIT_HASH="$(git rev-parse HEAD)$(if ! git diff-index --quiet HEAD --; then echo '-modified'; fi)"
|
||||
else
|
||||
@@ -7,7 +9,8 @@ else
|
||||
fi
|
||||
|
||||
if ! [ -f ./GIT_HASH.txt ] || [ "$(cat ./GIT_HASH.txt)" != "$GIT_HASH" ]; then
|
||||
>&2 echo Git hash changed from "$([ -f ./GIT_HASH.txt ] && cat ./GIT_HASH.txt)" to "$GIT_HASH"
|
||||
echo -n "$GIT_HASH" > ./GIT_HASH.txt
|
||||
fi
|
||||
|
||||
echo -n ./GIT_HASH.txt
|
||||
echo -n ./build/env/GIT_HASH.txt
|
||||
@@ -1,8 +1,10 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
if ! [ -f ./PLATFORM.txt ] || [ "$(cat ./PLATFORM.txt)" != "$PLATFORM" ] && [ -n "$PLATFORM" ]; then
|
||||
>&2 echo "Updating PLATFORM.txt to \"$PLATFORM\""
|
||||
echo -n "$PLATFORM" > ./PLATFORM.txt
|
||||
fi
|
||||
|
||||
echo -n ./PLATFORM.txt
|
||||
echo -n ./build/env/PLATFORM.txt
|
||||
@@ -1,6 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
FE_VERSION="$(cat web/package.json | grep '"version"' | sed 's/[ \t]*"version":[ \t]*"\([^"]*\)",/\1/')"
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
FE_VERSION="$(cat ../../web/package.json | grep '"version"' | sed 's/[ \t]*"version":[ \t]*"\([^"]*\)",/\1/')"
|
||||
|
||||
# TODO: Validate other version sources - backend/Cargo.toml, backend/src/version/mod.rs
|
||||
|
||||
@@ -10,4 +12,4 @@ if ! [ -f ./VERSION.txt ] || [ "$(cat ./VERSION.txt)" != "$VERSION" ]; then
|
||||
echo -n "$VERSION" > ./VERSION.txt
|
||||
fi
|
||||
|
||||
echo -n ./VERSION.txt
|
||||
echo -n ./build/env/VERSION.txt
|
||||
35
build/image-recipe/Dockerfile
Normal file
35
build/image-recipe/Dockerfile
Normal file
@@ -0,0 +1,35 @@
|
||||
ARG SUITE=trixie
|
||||
|
||||
FROM debian:${SUITE}
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
RUN apt-get update && \
|
||||
apt-get install -yq \
|
||||
live-build \
|
||||
procps \
|
||||
binfmt-support \
|
||||
qemu-utils \
|
||||
qemu-user-static \
|
||||
xorriso \
|
||||
isolinux \
|
||||
ca-certificates \
|
||||
curl \
|
||||
wget \
|
||||
gpg \
|
||||
git \
|
||||
fdisk \
|
||||
dosfstools \
|
||||
e2fsprogs \
|
||||
squashfs-tools \
|
||||
rsync \
|
||||
b3sum \
|
||||
dpkg-dev
|
||||
|
||||
|
||||
COPY binary_grub-efi.patch /root/binary_grub-efi.patch
|
||||
RUN patch /usr/lib/live/build/binary_grub-efi < /root/binary_grub-efi.patch && rm /root/binary_grub-efi.patch
|
||||
|
||||
RUN echo 'retry_connrefused = on' > /etc/wgetrc && \
|
||||
echo 'tries = 100' >> /etc/wgetrc
|
||||
|
||||
WORKDIR /root
|
||||
47
build/image-recipe/binary_grub-efi.patch
Normal file
47
build/image-recipe/binary_grub-efi.patch
Normal file
@@ -0,0 +1,47 @@
|
||||
--- /usr/lib/live/build/binary_grub-efi 2024-05-25 05:22:52.000000000 -0600
|
||||
+++ binary_grub-efi 2025-10-16 13:04:32.338740922 -0600
|
||||
@@ -54,6 +54,8 @@
|
||||
armhf)
|
||||
Check_package chroot /usr/lib/grub/arm-efi/configfile.mod grub-efi-arm-bin
|
||||
;;
|
||||
+ riscv64)
|
||||
+ Check_package chroot /usr/lib/grub/riscv64-efi/configfile.mod grub-efi-riscv64-bin
|
||||
esac
|
||||
Check_package chroot /usr/bin/grub-mkimage grub-common
|
||||
Check_package chroot /usr/bin/mcopy mtools
|
||||
@@ -136,7 +138,7 @@
|
||||
esac
|
||||
|
||||
# Cleanup files that we generate
|
||||
-rm -rf binary/boot/efi.img binary/boot/grub/i386-efi/ binary/boot/grub/x86_64-efi binary/boot/grub/arm64-efi binary/boot/grub/arm-efi
|
||||
+rm -rf binary/boot/efi.img binary/boot/grub/i386-efi/ binary/boot/grub/x86_64-efi binary/boot/grub/arm64-efi binary/boot/grub/arm-efi binary/boot/grub/riscv64-efi
|
||||
|
||||
# This is workaround till both efi-image and grub-cpmodules are put into a binary package
|
||||
case "${LB_BUILD_WITH_CHROOT}" in
|
||||
@@ -243,6 +245,10 @@
|
||||
gen_efi_boot_img "arm-efi" "arm" "debian-live/arm"
|
||||
PATH="\${PRE_EFI_IMAGE_PATH}"
|
||||
;;
|
||||
+ riscv64)
|
||||
+ gen_efi_boot_img "riscv64-efi" "riscv64" "debian-live/riscv64"
|
||||
+ PATH="\${PRE_EFI_IMAGE_PATH}"
|
||||
+ ;;
|
||||
esac
|
||||
|
||||
|
||||
@@ -324,6 +330,7 @@
|
||||
rm -f chroot/grub-efi-temp/bootnetx64.efi
|
||||
rm -f chroot/grub-efi-temp/bootnetaa64.efi
|
||||
rm -f chroot/grub-efi-temp/bootnetarm.efi
|
||||
+rm -f chroot/grub-efi-temp/bootnetriscv64.efi
|
||||
|
||||
mkdir -p binary
|
||||
cp -a chroot/grub-efi-temp/* binary/
|
||||
@@ -331,6 +338,7 @@
|
||||
rm -rf chroot/grub-efi-temp-i386-efi
|
||||
rm -rf chroot/grub-efi-temp-arm64-efi
|
||||
rm -rf chroot/grub-efi-temp-arm-efi
|
||||
+rm -rf chroot/grub-efi-temp-riscv64-efi
|
||||
rm -rf chroot/grub-efi-temp-cfg
|
||||
rm -rf chroot/grub-efi-temp
|
||||
|
||||
449
build/image-recipe/build.sh
Executable file
449
build/image-recipe/build.sh
Executable file
@@ -0,0 +1,449 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
MAX_IMG_LEN=$((4 * 1024 * 1024 * 1024)) # 4GB
|
||||
|
||||
echo "==== StartOS Image Build ===="
|
||||
|
||||
echo "Building for architecture: $IB_TARGET_ARCH"
|
||||
|
||||
SOURCE_DIR="$(realpath $(dirname "${BASH_SOURCE[0]}"))"
|
||||
|
||||
base_dir="$(pwd -P)"
|
||||
prep_results_dir="$base_dir/images-prep"
|
||||
RESULTS_DIR="$base_dir/results"
|
||||
echo "Saving results in: $RESULTS_DIR"
|
||||
|
||||
DEB_PATH="$base_dir/$1"
|
||||
|
||||
VERSION="$(dpkg-deb --fsys-tarfile $DEB_PATH | tar --to-stdout -xvf - ./usr/lib/startos/VERSION.txt)"
|
||||
GIT_HASH="$(dpkg-deb --fsys-tarfile $DEB_PATH | tar --to-stdout -xvf - ./usr/lib/startos/GIT_HASH.txt)"
|
||||
if [[ "$GIT_HASH" =~ ^@ ]]; then
|
||||
GIT_HASH="unknown"
|
||||
else
|
||||
GIT_HASH="$(echo -n "$GIT_HASH" | head -c 7)"
|
||||
fi
|
||||
IB_OS_ENV="$(dpkg-deb --fsys-tarfile $DEB_PATH | tar --to-stdout -xvf - ./usr/lib/startos/ENVIRONMENT.txt)"
|
||||
IB_TARGET_PLATFORM="$(dpkg-deb --fsys-tarfile $DEB_PATH | tar --to-stdout -xvf - ./usr/lib/startos/PLATFORM.txt)"
|
||||
|
||||
VERSION_FULL="${VERSION}-${GIT_HASH}"
|
||||
if [ -n "$IB_OS_ENV" ]; then
|
||||
VERSION_FULL="$VERSION_FULL~${IB_OS_ENV}"
|
||||
fi
|
||||
|
||||
IMAGE_BASENAME=startos-${VERSION_FULL}_${IB_TARGET_PLATFORM}
|
||||
|
||||
BOOTLOADERS=grub-efi
|
||||
if [ "$IB_TARGET_PLATFORM" = "x86_64" ] || [ "$IB_TARGET_PLATFORM" = "x86_64-nonfree" ]; then
|
||||
IB_TARGET_ARCH=amd64
|
||||
QEMU_ARCH=x86_64
|
||||
BOOTLOADERS=grub-efi,syslinux
|
||||
elif [ "$IB_TARGET_PLATFORM" = "aarch64" ] || [ "$IB_TARGET_PLATFORM" = "aarch64-nonfree" ] || [ "$IB_TARGET_PLATFORM" = "raspberrypi" ] || [ "$IB_TARGET_PLATFORM" = "rockchip64" ]; then
|
||||
IB_TARGET_ARCH=arm64
|
||||
QEMU_ARCH=aarch64
|
||||
elif [ "$IB_TARGET_PLATFORM" = "riscv64" ]; then
|
||||
IB_TARGET_ARCH=riscv64
|
||||
QEMU_ARCH=riscv64
|
||||
else
|
||||
IB_TARGET_ARCH="$IB_TARGET_PLATFORM"
|
||||
QEMU_ARCH="$IB_TARGET_PLATFORM"
|
||||
fi
|
||||
|
||||
QEMU_ARGS=()
|
||||
if [ "$QEMU_ARCH" != $(uname -m) ]; then
|
||||
QEMU_ARGS+=(--bootstrap-qemu-arch ${IB_TARGET_ARCH})
|
||||
QEMU_ARGS+=(--bootstrap-qemu-static /usr/bin/qemu-${QEMU_ARCH}-static)
|
||||
fi
|
||||
|
||||
mkdir -p $prep_results_dir
|
||||
|
||||
cd $prep_results_dir
|
||||
|
||||
NON_FREE=
|
||||
if [[ "${IB_TARGET_PLATFORM}" =~ -nonfree$ ]] || [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
||||
NON_FREE=1
|
||||
fi
|
||||
IMAGE_TYPE=iso
|
||||
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ] || [ "${IB_TARGET_PLATFORM}" = "rockchip64" ]; then
|
||||
IMAGE_TYPE=img
|
||||
fi
|
||||
|
||||
ARCHIVE_AREAS="main contrib"
|
||||
if [ "$NON_FREE" = 1 ]; then
|
||||
if [ "$IB_SUITE" = "bullseye" ]; then
|
||||
ARCHIVE_AREAS="$ARCHIVE_AREAS non-free"
|
||||
else
|
||||
ARCHIVE_AREAS="$ARCHIVE_AREAS non-free non-free-firmware"
|
||||
fi
|
||||
fi
|
||||
|
||||
PLATFORM_CONFIG_EXTRAS=()
|
||||
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
||||
PLATFORM_CONFIG_EXTRAS+=( --firmware-binary false )
|
||||
PLATFORM_CONFIG_EXTRAS+=( --firmware-chroot false )
|
||||
RPI_KERNEL_VERSION=6.12.47+rpt
|
||||
PLATFORM_CONFIG_EXTRAS+=( --linux-packages linux-image-$RPI_KERNEL_VERSION )
|
||||
PLATFORM_CONFIG_EXTRAS+=( --linux-flavours "rpi-v8 rpi-2712" )
|
||||
elif [ "${IB_TARGET_PLATFORM}" = "rockchip64" ]; then
|
||||
PLATFORM_CONFIG_EXTRAS+=( --linux-flavours rockchip64 )
|
||||
elif [ "${IB_TARGET_ARCH}" = "riscv64" ]; then
|
||||
PLATFORM_CONFIG_EXTRAS+=( --uefi-secure-boot=disable )
|
||||
fi
|
||||
|
||||
|
||||
cat > /etc/wgetrc << EOF
|
||||
retry_connrefused = on
|
||||
tries = 100
|
||||
EOF
|
||||
lb config \
|
||||
--iso-application "StartOS v${VERSION_FULL} ${IB_TARGET_ARCH}" \
|
||||
--iso-volume "StartOS v${VERSION} ${IB_TARGET_ARCH}" \
|
||||
--iso-preparer "START9 LABS; HTTPS://START9.COM" \
|
||||
--iso-publisher "START9 LABS; HTTPS://START9.COM" \
|
||||
--backports true \
|
||||
--bootappend-live "boot=live noautologin" \
|
||||
--bootloaders $BOOTLOADERS \
|
||||
--cache false \
|
||||
--mirror-bootstrap "https://deb.debian.org/debian/" \
|
||||
--mirror-chroot "https://deb.debian.org/debian/" \
|
||||
--mirror-chroot-security "https://security.debian.org/debian-security" \
|
||||
-d ${IB_SUITE} \
|
||||
-a ${IB_TARGET_ARCH} \
|
||||
${QEMU_ARGS[@]} \
|
||||
--archive-areas "${ARCHIVE_AREAS}" \
|
||||
${PLATFORM_CONFIG_EXTRAS[@]}
|
||||
|
||||
# Overlays
|
||||
|
||||
mkdir -p config/packages.chroot/
|
||||
cp $RESULTS_DIR/$IMAGE_BASENAME.deb config/packages.chroot/
|
||||
dpkg-name config/packages.chroot/*.deb
|
||||
|
||||
mkdir -p config/includes.chroot/etc
|
||||
echo start > config/includes.chroot/etc/hostname
|
||||
cat > config/includes.chroot/etc/hosts << EOT
|
||||
127.0.0.1 localhost start
|
||||
::1 localhost start ip6-localhost ip6-loopback
|
||||
ff02::1 ip6-allnodes
|
||||
ff02::2 ip6-allrouters
|
||||
EOT
|
||||
|
||||
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
||||
mkdir -p config/includes.chroot
|
||||
git clone --depth=1 --branch=stable https://github.com/raspberrypi/rpi-firmware.git config/includes.chroot/boot
|
||||
rm -rf config/includes.chroot/boot/.git config/includes.chroot/boot/modules
|
||||
rsync -rLp $SOURCE_DIR/raspberrypi/squashfs/ config/includes.chroot/
|
||||
fi
|
||||
|
||||
# Bootloaders
|
||||
|
||||
rm -rf config/bootloaders
|
||||
cp -r /usr/share/live/build/bootloaders config/bootloaders
|
||||
|
||||
cat > config/bootloaders/syslinux/syslinux.cfg << EOF
|
||||
include menu.cfg
|
||||
default vesamenu.c32
|
||||
prompt 0
|
||||
timeout 50
|
||||
EOF
|
||||
|
||||
cat > config/bootloaders/isolinux/isolinux.cfg << EOF
|
||||
include menu.cfg
|
||||
default vesamenu.c32
|
||||
prompt 0
|
||||
timeout 50
|
||||
EOF
|
||||
|
||||
# Extract splash.png from the deb package
|
||||
dpkg-deb --fsys-tarfile $DEB_PATH | tar --to-stdout -xf - ./usr/lib/startos/splash.png > /tmp/splash.png
|
||||
cp /tmp/splash.png config/bootloaders/syslinux_common/splash.png
|
||||
cp /tmp/splash.png config/bootloaders/isolinux/splash.png
|
||||
cp /tmp/splash.png config/bootloaders/grub-pc/splash.png
|
||||
rm /tmp/splash.png
|
||||
|
||||
sed -i -e '2i set timeout=5' config/bootloaders/grub-pc/config.cfg
|
||||
|
||||
# Archives
|
||||
|
||||
mkdir -p config/archives
|
||||
|
||||
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
||||
curl -fsSL https://archive.raspberrypi.com/debian/raspberrypi.gpg.key | gpg --dearmor -o config/archives/raspi.key
|
||||
echo "deb [arch=${IB_TARGET_ARCH} signed-by=/etc/apt/trusted.gpg.d/raspi.key.gpg] https://archive.raspberrypi.com/debian/ ${IB_SUITE} main" > config/archives/raspi.list
|
||||
fi
|
||||
|
||||
if [ "${IB_TARGET_PLATFORM}" = "rockchip64" ]; then
|
||||
curl -fsSL https://apt.armbian.com/armbian.key | gpg --dearmor -o config/archives/armbian.key
|
||||
echo "deb https://apt.armbian.com/ ${IB_SUITE} main" > config/archives/armbian.list
|
||||
fi
|
||||
|
||||
if [ "$NON_FREE" = 1 ]; then
|
||||
curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | gpg --dearmor -o config/archives/nvidia-container-toolkit.key
|
||||
curl -s -L https://nvidia.github.io/libnvidia-container/stable/deb/nvidia-container-toolkit.list \
|
||||
| sed 's#deb https://#deb [signed-by=/etc/apt/trusted.gpg.d/nvidia-container-toolkit.key.gpg] https://#g' \
|
||||
> config/archives/nvidia-container-toolkit.list
|
||||
fi
|
||||
|
||||
cat > config/archives/backports.pref <<-EOF
|
||||
Package: linux-image-*
|
||||
Pin: release n=${IB_SUITE}-backports
|
||||
Pin-Priority: 500
|
||||
|
||||
Package: linux-headers-*
|
||||
Pin: release n=${IB_SUITE}-backports
|
||||
Pin-Priority: 500
|
||||
|
||||
Package: *nvidia*
|
||||
Pin: release n=${IB_SUITE}-backports
|
||||
Pin-Priority: 500
|
||||
EOF
|
||||
|
||||
# Hooks
|
||||
|
||||
cat > config/hooks/normal/9000-install-startos.hook.chroot << EOF
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
if [ "${NON_FREE}" = "1" ] && [ "${IB_TARGET_PLATFORM}" != "raspberrypi" ]; then
|
||||
# install a specific NVIDIA driver version
|
||||
|
||||
# ---------------- configuration ----------------
|
||||
NVIDIA_DRIVER_VERSION="\${NVIDIA_DRIVER_VERSION:-580.119.02}"
|
||||
|
||||
BASE_URL="https://download.nvidia.com/XFree86/Linux-${QEMU_ARCH}"
|
||||
|
||||
echo "[nvidia-hook] Using NVIDIA driver: \${NVIDIA_DRIVER_VERSION}" >&2
|
||||
|
||||
# ---------------- kernel version ----------------
|
||||
|
||||
# Determine target kernel version from newest /boot/vmlinuz-* in the chroot.
|
||||
KVER="\$(
|
||||
ls -1t /boot/vmlinuz-* 2>/dev/null \
|
||||
| head -n1 \
|
||||
| sed 's|.*/vmlinuz-||'
|
||||
)"
|
||||
|
||||
if [ -z "\${KVER}" ]; then
|
||||
echo "[nvidia-hook] ERROR: no /boot/vmlinuz-* found; cannot determine kernel version" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "[nvidia-hook] Target kernel version: \${KVER}" >&2
|
||||
|
||||
# Ensure kernel headers are present
|
||||
TEMP_APT_DEPS=(build-essential)
|
||||
if [ ! -e "/lib/modules/\${KVER}/build" ]; then
|
||||
TEMP_APT_DEPS+=(linux-headers-\${KVER})
|
||||
fi
|
||||
|
||||
echo "[nvidia-hook] Installing build dependencies" >&2
|
||||
|
||||
/usr/lib/startos/scripts/install-equivs <<-EOF
|
||||
Package: nvidia-depends
|
||||
Version: \${NVIDIA_DRIVER_VERSION}
|
||||
Section: unknown
|
||||
Priority: optional
|
||||
Depends: \${dep_list="\$(IFS=', '; echo "\${TEMP_APT_DEPS[*]}")"}
|
||||
EOF
|
||||
|
||||
# ---------------- download and run installer ----------------
|
||||
|
||||
RUN_NAME="NVIDIA-Linux-${QEMU_ARCH}-\${NVIDIA_DRIVER_VERSION}.run"
|
||||
RUN_PATH="/root/\${RUN_NAME}"
|
||||
RUN_URL="\${BASE_URL}/\${NVIDIA_DRIVER_VERSION}/\${RUN_NAME}"
|
||||
|
||||
echo "[nvidia-hook] Downloading \${RUN_URL}" >&2
|
||||
wget -O "\${RUN_PATH}" "\${RUN_URL}"
|
||||
chmod +x "\${RUN_PATH}"
|
||||
|
||||
echo "[nvidia-hook] Running NVIDIA installer for kernel \${KVER}" >&2
|
||||
|
||||
sh "\${RUN_PATH}" \
|
||||
--silent \
|
||||
--kernel-name="\${KVER}" \
|
||||
--no-x-check \
|
||||
--no-nouveau-check \
|
||||
--no-runlevel-check
|
||||
|
||||
# Rebuild module metadata
|
||||
echo "[nvidia-hook] Running depmod for \${KVER}" >&2
|
||||
depmod -a "\${KVER}"
|
||||
|
||||
echo "[nvidia-hook] NVIDIA \${NVIDIA_DRIVER_VERSION} installation complete for kernel \${KVER}" >&2
|
||||
|
||||
echo "[nvidia-hook] Removing build dependencies..." >&2
|
||||
apt-get purge -y nvidia-depends
|
||||
apt-get autoremove -y
|
||||
echo "[nvidia-hook] Removed build dependencies." >&2
|
||||
fi
|
||||
|
||||
cp /etc/resolv.conf /etc/resolv.conf.bak
|
||||
|
||||
if [ "${IB_SUITE}" = trixie ] && [ "${IB_TARGET_ARCH}" != riscv64 ]; then
|
||||
echo 'deb https://deb.debian.org/debian/ bookworm main' > /etc/apt/sources.list.d/bookworm.list
|
||||
apt-get update
|
||||
apt-get install -y postgresql-15
|
||||
rm /etc/apt/sources.list.d/bookworm.list
|
||||
apt-get update
|
||||
systemctl mask postgresql
|
||||
fi
|
||||
|
||||
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
||||
ln -sf /usr/bin/pi-beep /usr/local/bin/beep
|
||||
KERNEL_VERSION=${RPI_KERNEL_VERSION} sh /boot/config.sh > /boot/config.txt
|
||||
mkinitramfs -c gzip -o /boot/initrd.img-${RPI_KERNEL_VERSION}-rpi-v8 ${RPI_KERNEL_VERSION}-rpi-v8
|
||||
mkinitramfs -c gzip -o /boot/initrd.img-${RPI_KERNEL_VERSION}-rpi-2712 ${RPI_KERNEL_VERSION}-rpi-2712
|
||||
fi
|
||||
|
||||
useradd --shell /bin/bash -G startos -m start9
|
||||
echo start9:embassy | chpasswd
|
||||
usermod -aG sudo start9
|
||||
usermod -aG systemd-journal start9
|
||||
|
||||
echo "start9 ALL=(ALL:ALL) NOPASSWD: ALL" | sudo tee "/etc/sudoers.d/010_start9-nopasswd"
|
||||
|
||||
if [ "${IB_TARGET_PLATFORM}" != "raspberrypi" ]; then
|
||||
/usr/lib/startos/scripts/enable-kiosk
|
||||
fi
|
||||
|
||||
if ! [[ "${IB_OS_ENV}" =~ (^|-)dev($|-) ]]; then
|
||||
passwd -l start9
|
||||
fi
|
||||
|
||||
EOF
|
||||
|
||||
SOURCE_DATE_EPOCH="${SOURCE_DATE_EPOCH:-$(date '+%s')}"
|
||||
|
||||
if lb bootstrap; then
|
||||
true
|
||||
else
|
||||
EXIT=$?
|
||||
cat ./chroot/debootstrap/debootstrap.log
|
||||
exit $EXIT
|
||||
fi
|
||||
lb chroot
|
||||
lb installer
|
||||
lb binary_chroot
|
||||
lb chroot_prep install all mode-apt-install-binary mode-archives-chroot
|
||||
mv chroot/chroot/etc/resolv.conf.bak chroot/chroot/etc/resolv.conf
|
||||
lb binary_rootfs
|
||||
|
||||
cp $prep_results_dir/binary/live/filesystem.squashfs $RESULTS_DIR/$IMAGE_BASENAME.squashfs
|
||||
|
||||
if [ "${IMAGE_TYPE}" = iso ]; then
|
||||
|
||||
lb binary_manifest
|
||||
lb binary_package-lists
|
||||
lb binary_linux-image
|
||||
lb binary_memtest
|
||||
lb binary_grub-legacy
|
||||
lb binary_grub-pc
|
||||
lb binary_grub_cfg
|
||||
lb binary_syslinux
|
||||
lb binary_disk
|
||||
lb binary_loadlin
|
||||
lb binary_win32-loader
|
||||
lb binary_includes
|
||||
lb binary_grub-efi
|
||||
lb binary_hooks
|
||||
lb binary_checksums
|
||||
find binary -newermt "$(date -d@${SOURCE_DATE_EPOCH} '+%Y-%m-%d %H:%M:%S')" -printf "%y %p\n" -exec touch '{}' -d@${SOURCE_DATE_EPOCH} --no-dereference ';' > binary.modified_timestamps
|
||||
lb binary_iso
|
||||
lb binary_onie
|
||||
lb binary_netboot
|
||||
lb binary_tar
|
||||
lb binary_hdd
|
||||
lb binary_zsync
|
||||
lb chroot_prep remove all mode-archives-chroot
|
||||
lb source
|
||||
|
||||
mv $prep_results_dir/live-image-${IB_TARGET_ARCH}.hybrid.iso $RESULTS_DIR/$IMAGE_BASENAME.iso
|
||||
|
||||
elif [ "${IMAGE_TYPE}" = img ]; then
|
||||
|
||||
SECTOR_LEN=512
|
||||
BOOT_START=$((1024 * 1024)) # 1MiB
|
||||
BOOT_LEN=$((512 * 1024 * 1024)) # 512MiB
|
||||
BOOT_END=$((BOOT_START + BOOT_LEN - 1))
|
||||
ROOT_START=$((BOOT_END + 1))
|
||||
ROOT_LEN=$((MAX_IMG_LEN - ROOT_START))
|
||||
ROOT_END=$((MAX_IMG_LEN - 1))
|
||||
|
||||
TARGET_NAME=$prep_results_dir/${IMAGE_BASENAME}.img
|
||||
truncate -s $MAX_IMG_LEN $TARGET_NAME
|
||||
|
||||
sfdisk $TARGET_NAME <<-EOF
|
||||
label: dos
|
||||
label-id: 0xcb15ae4d
|
||||
unit: sectors
|
||||
sector-size: 512
|
||||
|
||||
${TARGET_NAME}1 : start=$((BOOT_START / SECTOR_LEN)), size=$((BOOT_LEN / SECTOR_LEN)), type=c, bootable
|
||||
${TARGET_NAME}2 : start=$((ROOT_START / SECTOR_LEN)), size=$((ROOT_LEN / SECTOR_LEN)), type=83
|
||||
EOF
|
||||
|
||||
BOOT_DEV=$(losetup --show -f --offset $BOOT_START --sizelimit $BOOT_LEN $TARGET_NAME)
|
||||
ROOT_DEV=$(losetup --show -f --offset $ROOT_START --sizelimit $ROOT_LEN $TARGET_NAME)
|
||||
|
||||
mkfs.vfat -F32 $BOOT_DEV
|
||||
mkfs.ext4 $ROOT_DEV
|
||||
|
||||
TMPDIR=$(mktemp -d)
|
||||
|
||||
mkdir -p $TMPDIR/boot $TMPDIR/root
|
||||
mount $ROOT_DEV $TMPDIR/root
|
||||
mount $BOOT_DEV $TMPDIR/boot
|
||||
unsquashfs -n -f -d $TMPDIR $prep_results_dir/binary/live/filesystem.squashfs boot
|
||||
|
||||
mkdir $TMPDIR/root/images $TMPDIR/root/config
|
||||
B3SUM=$(b3sum $prep_results_dir/binary/live/filesystem.squashfs | head -c 16)
|
||||
cp $prep_results_dir/binary/live/filesystem.squashfs $TMPDIR/root/images/$B3SUM.rootfs
|
||||
ln -rsf $TMPDIR/root/images/$B3SUM.rootfs $TMPDIR/root/config/current.rootfs
|
||||
|
||||
mkdir -p $TMPDIR/next $TMPDIR/lower $TMPDIR/root/config/work $TMPDIR/root/config/overlay
|
||||
mount $TMPDIR/root/config/current.rootfs $TMPDIR/lower
|
||||
|
||||
mount -t overlay -o lowerdir=$TMPDIR/lower,workdir=$TMPDIR/root/config/work,upperdir=$TMPDIR/root/config/overlay overlay $TMPDIR/next
|
||||
|
||||
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
||||
sed -i 's| boot=startos| boot=startos init=/usr/lib/startos/scripts/init_resize\.sh|' $TMPDIR/boot/cmdline.txt
|
||||
rsync -a $SOURCE_DIR/raspberrypi/img/ $TMPDIR/next/
|
||||
fi
|
||||
|
||||
umount $TMPDIR/next
|
||||
umount $TMPDIR/lower
|
||||
|
||||
umount $TMPDIR/boot
|
||||
umount $TMPDIR/root
|
||||
|
||||
|
||||
e2fsck -fy $ROOT_DEV
|
||||
resize2fs -M $ROOT_DEV
|
||||
|
||||
BLOCK_COUNT=$(dumpe2fs -h $ROOT_DEV | awk '/^Block count:/ { print $3 }')
|
||||
BLOCK_SIZE=$(dumpe2fs -h $ROOT_DEV | awk '/^Block size:/ { print $3 }')
|
||||
ROOT_LEN=$((BLOCK_COUNT * BLOCK_SIZE))
|
||||
|
||||
losetup -d $ROOT_DEV
|
||||
losetup -d $BOOT_DEV
|
||||
|
||||
# Recreate partition 2 with the new size using sfdisk
|
||||
sfdisk $TARGET_NAME <<-EOF
|
||||
label: dos
|
||||
label-id: 0xcb15ae4d
|
||||
unit: sectors
|
||||
sector-size: 512
|
||||
|
||||
${TARGET_NAME}1 : start=$((BOOT_START / SECTOR_LEN)), size=$((BOOT_LEN / SECTOR_LEN)), type=c, bootable
|
||||
${TARGET_NAME}2 : start=$((ROOT_START / SECTOR_LEN)), size=$((ROOT_LEN / SECTOR_LEN)), type=83
|
||||
EOF
|
||||
|
||||
TARGET_SIZE=$((ROOT_START + ROOT_LEN))
|
||||
truncate -s $TARGET_SIZE $TARGET_NAME
|
||||
|
||||
mv $TARGET_NAME $RESULTS_DIR/$IMAGE_BASENAME.img
|
||||
|
||||
fi
|
||||
|
||||
chown $IB_UID:$IB_UID $RESULTS_DIR/$IMAGE_BASENAME.*
|
||||
@@ -1 +1 @@
|
||||
usb-storage.quirks=152d:0562:u,14cd:121c:u,0781:cfcb:u console=serial0,115200 console=tty1 root=PARTUUID=cb15ae4d-02 rootfstype=ext4 fsck.repair=yes rootwait cgroup_enable=cpuset cgroup_memory=1 cgroup_enable=memory quiet boot=startos
|
||||
usb-storage.quirks=152d:0562:u,14cd:121c:u,0781:cfcb:u console=serial0,115200 console=tty1 root=PARTUUID=cb15ae4d-02 rootfstype=ext4 fsck.repair=yes rootwait cgroup_enable=cpuset cgroup_memory=1 cgroup_enable=memory boot=startos
|
||||
46
build/image-recipe/raspberrypi/squashfs/boot/config.sh
Normal file
46
build/image-recipe/raspberrypi/squashfs/boot/config.sh
Normal file
@@ -0,0 +1,46 @@
|
||||
#!/bin/sh
|
||||
|
||||
cat << EOF
|
||||
|
||||
# Enable audio (loads snd_bcm2835)
|
||||
dtparam=audio=on
|
||||
|
||||
# Automatically load overlays for detected cameras
|
||||
camera_auto_detect=1
|
||||
|
||||
# Automatically load overlays for detected DSI displays
|
||||
display_auto_detect=1
|
||||
|
||||
# Enable DRM VC4 V3D driver
|
||||
dtoverlay=vc4-kms-v3d
|
||||
max_framebuffers=2
|
||||
|
||||
# Run in 64-bit mode
|
||||
arm_64bit=1
|
||||
|
||||
# Disable compensation for displays with overscan
|
||||
disable_overscan=1
|
||||
|
||||
[cm4]
|
||||
# Enable host mode on the 2711 built-in XHCI USB controller.
|
||||
# This line should be removed if the legacy DWC2 controller is required
|
||||
# (e.g. for USB device mode) or if USB support is not required.
|
||||
otg_mode=1
|
||||
|
||||
[all]
|
||||
|
||||
[pi4]
|
||||
# Run as fast as firmware / board allows
|
||||
arm_boost=1
|
||||
kernel=vmlinuz-${KERNEL_VERSION}-rpi-v8
|
||||
initramfs initrd.img-${KERNEL_VERSION}-rpi-v8 followkernel
|
||||
|
||||
[pi5]
|
||||
kernel=vmlinuz-${KERNEL_VERSION}-rpi-2712
|
||||
initramfs initrd.img-${KERNEL_VERSION}-rpi-2712 followkernel
|
||||
|
||||
[all]
|
||||
gpu_mem=16
|
||||
dtoverlay=pwm-2chan,disable-bt
|
||||
|
||||
EOF
|
||||
35
build/image-recipe/run-local-build.sh
Executable file
35
build/image-recipe/run-local-build.sh
Executable file
@@ -0,0 +1,35 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")/../.."
|
||||
|
||||
BASEDIR="$(pwd -P)"
|
||||
|
||||
SUITE=trixie
|
||||
|
||||
USE_TTY=
|
||||
if tty -s; then
|
||||
USE_TTY="-it"
|
||||
fi
|
||||
|
||||
dockerfile_hash=$(sha256sum ${BASEDIR}/build/image-recipe/Dockerfile | head -c 7)
|
||||
|
||||
docker_img_name="start9/build-iso:${SUITE}-${dockerfile_hash}"
|
||||
|
||||
platform=linux/${ARCH}
|
||||
case $ARCH in
|
||||
x86_64)
|
||||
platform=linux/amd64;;
|
||||
aarch64)
|
||||
platform=linux/arm64;;
|
||||
esac
|
||||
|
||||
if ! docker run --rm --platform=$platform "${docker_img_name}" true 2> /dev/null; then
|
||||
docker buildx build --load --platform=$platform --build-arg=SUITE=${SUITE} -t "${docker_img_name}" ./build/image-recipe
|
||||
fi
|
||||
|
||||
docker run $USE_TTY --rm --platform=$platform --privileged -v "$(pwd)/build/image-recipe:/root/image-recipe" -v "$(pwd)/results:/root/results" \
|
||||
-e IB_SUITE="$SUITE" \
|
||||
-e IB_UID="$UID" \
|
||||
-e IB_INCLUDE \
|
||||
"${docker_img_name}" /root/image-recipe/build.sh $@
|
||||
51
build/lib/grub-theme/theme.txt
Normal file
51
build/lib/grub-theme/theme.txt
Normal file
@@ -0,0 +1,51 @@
|
||||
desktop-image: "../splash.png"
|
||||
title-color: "#ffffff"
|
||||
title-font: "Unifont Regular 16"
|
||||
title-text: "StartOS Boot Menu with GRUB"
|
||||
message-font: "Unifont Regular 16"
|
||||
terminal-font: "Unifont Regular 16"
|
||||
|
||||
#help bar at the bottom
|
||||
+ label {
|
||||
top = 100%-50
|
||||
left = 0
|
||||
width = 100%
|
||||
height = 20
|
||||
text = "@KEYMAP_SHORT@"
|
||||
align = "center"
|
||||
color = "#ffffff"
|
||||
font = "Unifont Regular 16"
|
||||
}
|
||||
|
||||
#boot menu
|
||||
+ boot_menu {
|
||||
left = 10%
|
||||
width = 80%
|
||||
top = 52%
|
||||
height = 48%-80
|
||||
item_color = "#a8a8a8"
|
||||
item_font = "Unifont Regular 16"
|
||||
selected_item_color= "#ffffff"
|
||||
selected_item_font = "Unifont Regular 16"
|
||||
item_height = 16
|
||||
item_padding = 0
|
||||
item_spacing = 4
|
||||
icon_width = 0
|
||||
icon_heigh = 0
|
||||
item_icon_space = 0
|
||||
}
|
||||
|
||||
#progress bar
|
||||
+ progress_bar {
|
||||
id = "__timeout__"
|
||||
left = 15%
|
||||
top = 100%-80
|
||||
height = 16
|
||||
width = 70%
|
||||
font = "Unifont Regular 16"
|
||||
text_color = "#000000"
|
||||
fg_color = "#ffffff"
|
||||
bg_color = "#a8a8a8"
|
||||
border_color = "#ffffff"
|
||||
text = "@TIMEOUT_NOTIFICATION_LONG@"
|
||||
}
|
||||
147
build/lib/motd
147
build/lib/motd
@@ -1,34 +1,123 @@
|
||||
#!/bin/sh
|
||||
printf "\n"
|
||||
printf "Welcome to\n"
|
||||
cat << "ASCII"
|
||||
|
||||
███████
|
||||
█ █ █
|
||||
█ █ █ █
|
||||
█ █ █ █
|
||||
█ █ █ █
|
||||
█ █ █ █
|
||||
█ █
|
||||
███████
|
||||
parse_essential_db_info() {
|
||||
DB_DUMP="/tmp/startos_db.json"
|
||||
|
||||
_____ __ ___ __ __
|
||||
(_ | /\ |__) | / \(_
|
||||
__) | / \| \ | \__/__)
|
||||
ASCII
|
||||
printf " v$(cat /usr/lib/startos/VERSION.txt)\n\n"
|
||||
printf " %s (%s %s)\n" "$(uname -o)" "$(uname -r)" "$(uname -m)"
|
||||
printf " Git Hash: $(cat /usr/lib/startos/GIT_HASH.txt)"
|
||||
if [ -n "$(cat /usr/lib/startos/ENVIRONMENT.txt)" ]; then
|
||||
printf " ~ $(cat /usr/lib/startos/ENVIRONMENT.txt)\n"
|
||||
else
|
||||
printf "\n"
|
||||
if command -v start-cli >/dev/null 2>&1; then
|
||||
timeout 30 start-cli db dump > "$DB_DUMP" 2>/dev/null || return 1
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
|
||||
if command -v jq >/dev/null 2>&1 && [ -f "$DB_DUMP" ]; then
|
||||
HOSTNAME=$(jq -r '.value.serverInfo.hostname // "unknown"' "$DB_DUMP" 2>/dev/null)
|
||||
VERSION=$(jq -r '.value.serverInfo.version // "unknown"' "$DB_DUMP" 2>/dev/null)
|
||||
RAM_BYTES=$(jq -r '.value.serverInfo.ram // 0' "$DB_DUMP" 2>/dev/null)
|
||||
WAN_IP=$(jq -r '.value.serverInfo.network.gateways[].ipInfo.wanIp // "unknown"' "$DB_DUMP" 2>/dev/null | head -1)
|
||||
NTP_SYNCED=$(jq -r '.value.serverInfo.ntpSynced // false' "$DB_DUMP" 2>/dev/null)
|
||||
|
||||
if [ "$RAM_BYTES" != "0" ] && [ "$RAM_BYTES" != "null" ]; then
|
||||
RAM_GB=$(echo "scale=1; $RAM_BYTES / 1073741824" | bc 2>/dev/null || echo "unknown")
|
||||
else
|
||||
RAM_GB="unknown"
|
||||
fi
|
||||
|
||||
RUNNING_SERVICES=$(jq -r '[.value.packageData[] | select(.statusInfo.started != null)] | length' "$DB_DUMP" 2>/dev/null)
|
||||
TOTAL_SERVICES=$(jq -r '.value.packageData | length' "$DB_DUMP" 2>/dev/null)
|
||||
|
||||
rm -f "$DB_DUMP"
|
||||
return 0
|
||||
else
|
||||
rm -f "$DB_DUMP" 2>/dev/null
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
DB_INFO_AVAILABLE=0
|
||||
if parse_essential_db_info; then
|
||||
DB_INFO_AVAILABLE=1
|
||||
fi
|
||||
|
||||
printf "\n"
|
||||
printf " * Documentation: https://docs.start9.com\n"
|
||||
printf " * Management: https://%s.local\n" "$(hostname)"
|
||||
printf " * Support: https://start9.com/contact\n"
|
||||
printf " * Source Code: https://github.com/Start9Labs/start-os\n"
|
||||
printf " * License: MIT\n"
|
||||
printf "\n"
|
||||
if [ "$DB_INFO_AVAILABLE" -eq 1 ] && [ "$VERSION" != "unknown" ]; then
|
||||
version_display="v$VERSION"
|
||||
else
|
||||
version_display="v$(cat /usr/lib/startos/VERSION.txt 2>/dev/null || echo 'unknown')"
|
||||
fi
|
||||
|
||||
printf "\n\033[1;37m ▄▄▀▀▀▀▀▄▄\033[0m\n"
|
||||
printf "\033[1;37m ▄▀ ▄ ▀▄ ▄▄▄▄▄ ▄▄▄▄▄▄▄ ▄ ▄▄▄▄▄ ▄▄▄▄▄▄▄ \033[1;31m▄██████▄ ▄██████\033[0m\n"
|
||||
printf "\033[1;37m █ █ █ █ █ █ █ █ █ ▀▄ █ \033[1;31m██ ██ ██ \033[0m\n"
|
||||
printf "\033[1;37m█ █ █ █ ▀▄▄▄▄ █ █ █ █ ▄▄▄▀ █ \033[1;31m██ ██ ▀█████▄\033[0m\n"
|
||||
printf "\033[1;37m█ █ █ █ █ █ █ █ █ ▀▄ █ \033[1;31m██ ██ ██\033[0m\n"
|
||||
printf "\033[1;37m █ █ █ █ ▄▄▄▄▄▀ █ █ █ █ ▀▄ █ \033[1;31m▀██████▀ ██████▀\033[0m\n"
|
||||
printf "\033[1;37m █ █\033[0m\n"
|
||||
printf "\033[1;37m ▀▀▄▄▄▀▀ $version_display\033[0m\n\n"
|
||||
|
||||
uptime_str=$(uptime | awk -F'up ' '{print $2}' | awk -F',' '{print $1}' | sed 's/^ *//')
|
||||
|
||||
if [ "$DB_INFO_AVAILABLE" -eq 1 ] && [ "$RAM_GB" != "unknown" ]; then
|
||||
memory_used=$(free -m | awk 'NR==2{printf "%.0fMB", $3}')
|
||||
memory_display="$memory_used / ${RAM_GB}GB"
|
||||
else
|
||||
memory_display=$(free -m | awk 'NR==2{printf "%.0fMB / %.0fMB", $3, $2}')
|
||||
fi
|
||||
|
||||
root_usage=$(df -h / | awk 'NR==2{printf "%s (%s free)", $5, $4}')
|
||||
|
||||
if [ -d "/media/startos/data/package-data" ]; then
|
||||
data_usage=$(df -h /media/startos/data/package-data | awk 'NR==2{printf "%s (%s free)", $5, $4}')
|
||||
else
|
||||
data_usage="N/A"
|
||||
fi
|
||||
|
||||
if [ "$DB_INFO_AVAILABLE" -eq 1 ]; then
|
||||
services_text="$RUNNING_SERVICES/$TOTAL_SERVICES running"
|
||||
else
|
||||
services_text="Unknown"
|
||||
fi
|
||||
|
||||
local_ip=$(ip route get 1.1.1.1 2>/dev/null | awk '{for(i=1;i<=NF;i++) if($i=="src") print $(i+1)}' | head -1)
|
||||
if [ -z "$local_ip" ]; then local_ip="N/A"; fi
|
||||
|
||||
if [ "$DB_INFO_AVAILABLE" -eq 1 ] && [ "$WAN_IP" != "unknown" ]; then
|
||||
wan_ip="$WAN_IP"
|
||||
else
|
||||
wan_ip="N/A"
|
||||
fi
|
||||
|
||||
printf " \033[1;37m┌─ SYSTEM STATUS ───────────────────────────────────────────────────┐\033[0m\n"
|
||||
printf " \033[1;37m│\033[0m %-8s \033[0;33m%-22s\033[0m %-8s \033[0;33m%-23s\033[0m \033[1;37m│\033[0m\n" "Uptime:" "$uptime_str" "Memory:" "$memory_display"
|
||||
printf " \033[1;37m│\033[0m %-8s \033[0;33m%-22s\033[0m %-8s \033[0;33m%-23s\033[0m \033[1;37m│\033[0m\n" "Root:" "$root_usage" "Data:" "$data_usage"
|
||||
|
||||
if [ "$DB_INFO_AVAILABLE" -eq 1 ]; then
|
||||
if [ "$RUNNING_SERVICES" -eq "$TOTAL_SERVICES" ] && [ "$TOTAL_SERVICES" -gt 0 ]; then
|
||||
printf " \033[1;37m│\033[0m %-8s \033[0;32m%-22s\033[0m %-8s \033[0;33m%-23s\033[0m \033[1;37m│\033[0m\n" "Services:" "$services_text" "WAN:" "$wan_ip"
|
||||
elif [ "$RUNNING_SERVICES" -gt 0 ]; then
|
||||
printf " \033[1;37m│\033[0m %-8s \033[0;33m%-22s\033[0m %-8s \033[0;33m%-23s\033[0m \033[1;37m│\033[0m\n" "Services:" "$services_text" "WAN:" "$wan_ip"
|
||||
else
|
||||
printf " \033[1;37m│\033[0m %-8s \033[0;31m%-22s\033[0m %-8s \033[0;33m%-23s\033[0m \033[1;37m│\033[0m\n" "Services:" "$services_text" "WAN:" "$wan_ip"
|
||||
fi
|
||||
else
|
||||
printf " \033[1;37m│\033[0m %-8s \033[0;37m%-22s\033[0m %-8s \033[0;33m%-23s\033[0m \033[1;37m│\033[0m\n" "Services:" "$services_text" "WAN:" "$wan_ip"
|
||||
fi
|
||||
|
||||
if [ "$DB_INFO_AVAILABLE" -eq 1 ] && [ "$NTP_SYNCED" = "true" ]; then
|
||||
printf " \033[1;37m│\033[0m %-8s \033[0;33m%-22s\033[0m %-8s \033[0;32m%-23s\033[0m \033[1;37m│\033[0m\n" "Local:" "$local_ip" "NTP:" "Synced"
|
||||
elif [ "$DB_INFO_AVAILABLE" -eq 1 ] && [ "$NTP_SYNCED" = "false" ]; then
|
||||
printf " \033[1;37m│\033[0m %-8s \033[0;33m%-22s\033[0m %-8s \033[0;31m%-23s\033[0m \033[1;37m│\033[0m\n" "Local:" "$local_ip" "NTP:" "Not Synced"
|
||||
else
|
||||
printf " \033[1;37m│\033[0m %-8s \033[0;33m%-22s\033[0m %-8s \033[0;37m%-23s\033[0m \033[1;37m│\033[0m\n" "Local:" "$local_ip" "NTP:" "Unknown"
|
||||
fi
|
||||
|
||||
printf " \033[1;37m└───────────────────────────────────────────────────────────────────┘\033[0m"
|
||||
|
||||
if [ "$DB_INFO_AVAILABLE" -eq 1 ] && [ "$HOSTNAME" != "unknown" ]; then
|
||||
web_url="https://$HOSTNAME.local"
|
||||
else
|
||||
web_url="https://$(hostname).local"
|
||||
fi
|
||||
printf "\n \033[1;37m┌──────────────────────────────────────────────────── QUICK ACCESS ─┐\033[0m\n"
|
||||
printf " \033[1;37m│\033[0m Web Interface: \033[0;36m%-50s\033[0m \033[1;37m│\033[0m\n" "$web_url"
|
||||
printf " \033[1;37m│\033[0m Documentation: \033[0;36m%-50s\033[0m \033[1;37m│\033[0m\n" "https://staging.docs.start9.com"
|
||||
printf " \033[1;37m│\033[0m Support: \033[0;36m%-50s\033[0m \033[1;37m│\033[0m\n" "https://start9.com/contact"
|
||||
printf " \033[1;37m└───────────────────────────────────────────────────────────────────┘\033[0m\n\n"
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
|
||||
if cat /sys/class/drm/*/status | grep -qw connected; then
|
||||
exit 0
|
||||
else
|
||||
exit 1
|
||||
fi
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/bin/bash
|
||||
|
||||
SOURCE_DIR="$(dirname "${BASH_SOURCE[0]}")"
|
||||
SOURCE_DIR="$(dirname $(realpath "${BASH_SOURCE[0]}"))"
|
||||
|
||||
if [ "$UID" -ne 0 ]; then
|
||||
>&2 echo 'Must be run as root'
|
||||
@@ -10,24 +10,24 @@ fi
|
||||
POSITIONAL_ARGS=()
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--no-sync)
|
||||
NO_SYNC=1
|
||||
shift
|
||||
;;
|
||||
--create)
|
||||
ONLY_CREATE=1
|
||||
shift
|
||||
;;
|
||||
-*|--*)
|
||||
echo "Unknown option $1"
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
POSITIONAL_ARGS+=("$1") # save positional arg
|
||||
shift # past argument
|
||||
;;
|
||||
esac
|
||||
case $1 in
|
||||
--no-sync)
|
||||
NO_SYNC=1
|
||||
shift
|
||||
;;
|
||||
--create)
|
||||
ONLY_CREATE=1
|
||||
shift
|
||||
;;
|
||||
-*|--*)
|
||||
echo "Unknown option $1"
|
||||
exit 1
|
||||
;;
|
||||
*)
|
||||
POSITIONAL_ARGS+=("$1") # save positional arg
|
||||
shift # past argument
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
set -- "${POSITIONAL_ARGS[@]}" # restore positional parameters
|
||||
@@ -35,7 +35,7 @@ set -- "${POSITIONAL_ARGS[@]}" # restore positional parameters
|
||||
if [ -z "$NO_SYNC" ]; then
|
||||
echo 'Syncing...'
|
||||
umount -R /media/startos/next 2> /dev/null
|
||||
umount -R /media/startos/upper 2> /dev/null
|
||||
umount /media/startos/upper 2> /dev/null
|
||||
rm -rf /media/startos/upper /media/startos/next
|
||||
mkdir /media/startos/upper
|
||||
mount -t tmpfs tmpfs /media/startos/upper
|
||||
@@ -43,8 +43,6 @@ if [ -z "$NO_SYNC" ]; then
|
||||
mount -t overlay \
|
||||
-olowerdir=/media/startos/current,upperdir=/media/startos/upper/data,workdir=/media/startos/upper/work \
|
||||
overlay /media/startos/next
|
||||
mkdir -p /media/startos/next/media/startos/root
|
||||
mount --bind /media/startos/root /media/startos/next/media/startos/root
|
||||
fi
|
||||
|
||||
if [ -n "$ONLY_CREATE" ]; then
|
||||
@@ -56,12 +54,18 @@ mkdir -p /media/startos/next/dev
|
||||
mkdir -p /media/startos/next/sys
|
||||
mkdir -p /media/startos/next/proc
|
||||
mkdir -p /media/startos/next/boot
|
||||
mkdir -p /media/startos/next/media/startos/root
|
||||
mount --bind /run /media/startos/next/run
|
||||
mount --bind /tmp /media/startos/next/tmp
|
||||
mount --bind /dev /media/startos/next/dev
|
||||
mount --bind /sys /media/startos/next/sys
|
||||
mount --bind /proc /media/startos/next/proc
|
||||
mount --bind /boot /media/startos/next/boot
|
||||
mount --bind /media/startos/root /media/startos/next/media/startos/root
|
||||
|
||||
if mountpoint /sys/firmware/efi/efivars 2>&1 > /dev/null; then
|
||||
mount --bind /sys/firmware/efi/efivars /media/startos/next/sys/firmware/efi/efivars
|
||||
fi
|
||||
|
||||
if [ -z "$*" ]; then
|
||||
chroot /media/startos/next
|
||||
@@ -71,6 +75,10 @@ else
|
||||
CHROOT_RES=$?
|
||||
fi
|
||||
|
||||
if mountpoint /media/startos/next/sys/firmware/efi/efivars 2>&1 > /dev/null; then
|
||||
umount /media/startos/next/sys/firmware/efi/efivars
|
||||
fi
|
||||
|
||||
umount /media/startos/next/run
|
||||
umount /media/startos/next/tmp
|
||||
umount /media/startos/next/dev
|
||||
@@ -87,11 +95,12 @@ if [ "$CHROOT_RES" -eq 0 ]; then
|
||||
|
||||
echo 'Upgrading...'
|
||||
|
||||
rm -f /media/startos/images/next.squashfs
|
||||
if ! time mksquashfs /media/startos/next /media/startos/images/next.squashfs -b 4096 -comp gzip; then
|
||||
umount -R /media/startos/next
|
||||
umount -R /media/startos/upper
|
||||
rm -rf /media/startos/upper /media/startos/next
|
||||
exit 1
|
||||
umount -l /media/startos/next
|
||||
umount -l /media/startos/upper
|
||||
rm -rf /media/startos/upper /media/startos/next
|
||||
exit 1
|
||||
fi
|
||||
hash=$(b3sum /media/startos/images/next.squashfs | head -c 32)
|
||||
mv /media/startos/images/next.squashfs /media/startos/images/${hash}.rootfs
|
||||
@@ -103,5 +112,5 @@ if [ "$CHROOT_RES" -eq 0 ]; then
|
||||
fi
|
||||
|
||||
umount -R /media/startos/next
|
||||
umount -R /media/startos/upper
|
||||
umount /media/startos/upper
|
||||
rm -rf /media/startos/upper /media/startos/next
|
||||
@@ -27,7 +27,6 @@ user_pref("browser.crashReports.unsubmittedCheck.autoSubmit2", false);
|
||||
user_pref("browser.newtabpage.activity-stream.feeds.asrouterfeed", false);
|
||||
user_pref("browser.newtabpage.activity-stream.feeds.topsites", false);
|
||||
user_pref("browser.newtabpage.activity-stream.showSponsoredTopSites", false);
|
||||
user_pref("browser.onboarding.enabled", false);
|
||||
user_pref("browser.ping-centre.telemetry", false);
|
||||
user_pref("browser.pocket.enabled", false);
|
||||
user_pref("browser.safebrowsing.blockedURIs.enabled", false);
|
||||
@@ -43,7 +42,7 @@ user_pref("browser.startup.homepage_override.mstone", "ignore");
|
||||
user_pref("browser.theme.content-theme", 0);
|
||||
user_pref("browser.theme.toolbar-theme", 0);
|
||||
user_pref("browser.urlbar.groupLabels.enabled", false);
|
||||
user_pref("browser.urlbar.suggest.searches" false);
|
||||
user_pref("browser.urlbar.suggest.searches", false);
|
||||
user_pref("datareporting.policy.firstRunURL", "");
|
||||
user_pref("datareporting.healthreport.service.enabled", false);
|
||||
user_pref("datareporting.healthreport.uploadEnabled", false);
|
||||
@@ -52,10 +51,9 @@ user_pref("dom.securecontext.allowlist_onions", true);
|
||||
user_pref("dom.securecontext.whitelist_onions", true);
|
||||
user_pref("experiments.enabled", false);
|
||||
user_pref("experiments.activeExperiment", false);
|
||||
user_pref("experiments.supported", false);
|
||||
user_pref("extensions.activeThemeID", "firefox-compact-dark@mozilla.org");
|
||||
user_pref("extensions.blocklist.enabled", false);
|
||||
user_pref("extensions.getAddons.cache.enabled", false);
|
||||
user_pref("extensions.htmlaboutaddons.recommendations.enabled", false);
|
||||
user_pref("extensions.pocket.enabled", false);
|
||||
user_pref("extensions.update.enabled", false);
|
||||
user_pref("extensions.shield-recipe-client.enabled", false);
|
||||
@@ -66,9 +64,15 @@ user_pref("messaging-system.rsexperimentloader.enabled", false);
|
||||
user_pref("network.allow-experiments", false);
|
||||
user_pref("network.captive-portal-service.enabled", false);
|
||||
user_pref("network.connectivity-service.enabled", false);
|
||||
user_pref("network.proxy.autoconfig_url", "file:///usr/lib/startos/proxy.pac");
|
||||
user_pref("network.proxy.socks", "10.0.3.1");
|
||||
user_pref("network.proxy.socks_port", 9050);
|
||||
user_pref("network.proxy.socks_version", 5);
|
||||
user_pref("network.proxy.socks_remote_dns", true);
|
||||
user_pref("network.proxy.type", 2);
|
||||
user_pref("network.proxy.type", 1);
|
||||
user_pref("privacy.resistFingerprinting", true);
|
||||
//Enable letterboxing if we want the window size sent to the server to snap to common resolutions:
|
||||
//user_pref("privacy.resistFingerprinting.letterboxing", true);
|
||||
user_pref("privacy.trackingprotection.enabled", true);
|
||||
user_pref("signon.rememberSignons", false);
|
||||
user_pref("toolkit.telemetry.archive.enabled", false);
|
||||
user_pref("toolkit.telemetry.bhrPing.enabled", false);
|
||||
@@ -81,6 +85,17 @@ user_pref("toolkit.telemetry.shutdownPingSender.enabled", false);
|
||||
user_pref("toolkit.telemetry.unified", false);
|
||||
user_pref("toolkit.telemetry.updatePing.enabled", false);
|
||||
user_pref("toolkit.telemetry.cachedClientID", "");
|
||||
//Blocking automatic Mozilla CDN server requests
|
||||
user_pref("extensions.getAddons.showPane", false);
|
||||
user_pref("extensions.getAddons.cache.enabled", false);
|
||||
//user_pref("services.settings.server", ""); // Remote settings server (HSTS preload updates and Cerfiticate Revocation Lists are fetched)
|
||||
user_pref("browser.aboutHomeSnippets.updateUrl", "");
|
||||
user_pref("browser.newtabpage.activity-stream.feeds.snippets", false);
|
||||
user_pref("browser.newtabpage.activity-stream.feeds.section.topstories", false);
|
||||
user_pref("browser.newtabpage.activity-stream.feeds.system.topstories", false);
|
||||
user_pref("browser.newtabpage.activity-stream.feeds.discoverystreamfeed", false);
|
||||
user_pref("browser.safebrowsing.provider.mozilla.updateURL", "");
|
||||
user_pref("browser.safebrowsing.provider.mozilla.gethashURL", "");
|
||||
EOF
|
||||
|
||||
ln -sf /usr/lib/$(uname -m)-linux-gnu/pkcs11/p11-kit-trust.so /usr/lib/firefox-esr/libnssckbi.so
|
||||
@@ -91,15 +106,6 @@ cat > /home/kiosk/kiosk.sh << 'EOF'
|
||||
while ! curl "http://localhost" > /dev/null; do
|
||||
sleep 1
|
||||
done
|
||||
while ! /usr/lib/startos/scripts/check-monitor; do
|
||||
sleep 15
|
||||
done
|
||||
(
|
||||
while /usr/lib/startos/scripts/check-monitor; do
|
||||
sleep 15
|
||||
done
|
||||
killall firefox-esr
|
||||
) &
|
||||
matchbox-window-manager -use_titlebar no &
|
||||
cp -r /home/kiosk/fx-profile /home/kiosk/fx-profile-tmp
|
||||
firefox-esr http://localhost --profile /home/kiosk/fx-profile-tmp
|
||||
|
||||
55
build/lib/scripts/forward-port
Executable file
55
build/lib/scripts/forward-port
Executable file
@@ -0,0 +1,55 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [ -z "$sip" ] || [ -z "$dip" ] || [ -z "$dprefix" ] || [ -z "$sport" ] || [ -z "$dport" ]; then
|
||||
>&2 echo 'missing required env var'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
NAME="F$(echo "$sip:$sport -> $dip/$dprefix:$dport" | sha256sum | head -c 15)"
|
||||
|
||||
for kind in INPUT FORWARD ACCEPT; do
|
||||
if ! iptables -C $kind -j "${NAME}_${kind}" 2> /dev/null; then
|
||||
iptables -N "${NAME}_${kind}" 2> /dev/null
|
||||
iptables -A $kind -j "${NAME}_${kind}"
|
||||
fi
|
||||
done
|
||||
for kind in PREROUTING INPUT OUTPUT POSTROUTING; do
|
||||
if ! iptables -t nat -C $kind -j "${NAME}_${kind}" 2> /dev/null; then
|
||||
iptables -t nat -N "${NAME}_${kind}" 2> /dev/null
|
||||
iptables -t nat -A $kind -j "${NAME}_${kind}"
|
||||
fi
|
||||
done
|
||||
|
||||
err=0
|
||||
trap 'err=1' ERR
|
||||
|
||||
for kind in INPUT FORWARD ACCEPT; do
|
||||
iptables -F "${NAME}_${kind}" 2> /dev/null
|
||||
done
|
||||
for kind in PREROUTING INPUT OUTPUT POSTROUTING; do
|
||||
iptables -t nat -F "${NAME}_${kind}" 2> /dev/null
|
||||
done
|
||||
if [ "$UNDO" = 1 ]; then
|
||||
conntrack -D -p tcp -d $sip --dport $sport || true # conntrack returns exit 1 if no connections are active
|
||||
conntrack -D -p udp -d $sip --dport $sport || true # conntrack returns exit 1 if no connections are active
|
||||
exit $err
|
||||
fi
|
||||
|
||||
# DNAT: rewrite destination for incoming packets (external traffic)
|
||||
iptables -t nat -A ${NAME}_PREROUTING -d "$sip" -p tcp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
||||
iptables -t nat -A ${NAME}_PREROUTING -d "$sip" -p udp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
||||
|
||||
# DNAT: rewrite destination for locally-originated packets (hairpin from host itself)
|
||||
iptables -t nat -A ${NAME}_OUTPUT -d "$sip" -p tcp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
||||
iptables -t nat -A ${NAME}_OUTPUT -d "$sip" -p udp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
||||
|
||||
# MASQUERADE: rewrite source for all forwarded traffic to the destination
|
||||
# This ensures responses are routed back through the host regardless of source IP
|
||||
iptables -t nat -A ${NAME}_POSTROUTING -d "$dip" -p tcp --dport "$dport" -j MASQUERADE
|
||||
iptables -t nat -A ${NAME}_POSTROUTING -d "$dip" -p udp --dport "$dport" -j MASQUERADE
|
||||
|
||||
# Allow new connections to be forwarded to the destination
|
||||
iptables -A ${NAME}_FORWARD -d $dip -p tcp --dport $dport -m state --state NEW -j ACCEPT
|
||||
iptables -A ${NAME}_FORWARD -d $dip -p udp --dport $dport -m state --state NEW -j ACCEPT
|
||||
|
||||
exit $err
|
||||
20
build/lib/scripts/install-equivs
Executable file
20
build/lib/scripts/install-equivs
Executable file
@@ -0,0 +1,20 @@
|
||||
#!/bin/bash
|
||||
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
export DEBCONF_NONINTERACTIVE_SEEN=true
|
||||
|
||||
TMP_DIR=$(mktemp -d)
|
||||
|
||||
(
|
||||
set -e
|
||||
cd $TMP_DIR
|
||||
|
||||
cat > control.equivs
|
||||
equivs-build control.equivs
|
||||
apt-get install -y ./*.deb < /dev/null
|
||||
)
|
||||
|
||||
rm -rf $TMP_DIR
|
||||
|
||||
echo Install complete. >&2
|
||||
exit 0
|
||||
35
build/lib/scripts/prune-boot
Executable file
35
build/lib/scripts/prune-boot
Executable file
@@ -0,0 +1,35 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
if [ "$UID" -ne 0 ]; then
|
||||
>&2 echo 'Must be run as root'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get the current kernel version
|
||||
current_kernel=$(uname -r)
|
||||
|
||||
echo "Current kernel: $current_kernel"
|
||||
echo "Searching for old kernel files in /boot..."
|
||||
|
||||
# Extract base kernel version (without possible suffixes)
|
||||
current_base=$(echo "$current_kernel" | sed 's/-.*//')
|
||||
|
||||
cd /boot || { echo "/boot directory not found!"; exit 1; }
|
||||
|
||||
for file in vmlinuz-* initrd.img-* System.map-* config-*; do
|
||||
# Extract version from filename
|
||||
version=$(echo "$file" | sed -E 's/^[^0-9]*([0-9][^ ]*).*/\1/')
|
||||
# Skip if file matches current kernel version
|
||||
if [[ "$file" == *"$current_kernel"* ]]; then
|
||||
continue
|
||||
fi
|
||||
# Compare versions, delete if less than current
|
||||
if dpkg --compare-versions "$version" lt "$current_kernel"; then
|
||||
echo "Deleting $file (version $version is older than $current_kernel)"
|
||||
sudo rm -f "$file"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "Old kernel files deleted."
|
||||
@@ -29,10 +29,13 @@ if [ -z "$needed" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
MARGIN=${MARGIN:-1073741824}
|
||||
target=$((needed + MARGIN))
|
||||
|
||||
if [ -h /media/startos/config/current.rootfs ] && [ -e /media/startos/config/current.rootfs ]; then
|
||||
echo 'Pruning...'
|
||||
current="$(readlink -f /media/startos/config/current.rootfs)"
|
||||
while [[ "$(df -B1 --output=avail --sync /media/startos/images | tail -n1)" -lt "$needed" ]]; do
|
||||
while [[ "$(df -B1 --output=avail --sync /media/startos/images | tail -n1)" -lt "$target" ]]; do
|
||||
to_prune="$(ls -t1 /media/startos/images/*.rootfs /media/startos/images/*.squashfs 2> /dev/null | grep -v "$current" | tail -n1)"
|
||||
if [ -e "$to_prune" ]; then
|
||||
echo " Pruning $to_prune"
|
||||
|
||||
@@ -83,6 +83,7 @@ local_mount_root()
|
||||
if [ -d "$image" ]; then
|
||||
mount -r --bind $image /lower
|
||||
elif [ -f "$image" ]; then
|
||||
modprobe loop
|
||||
modprobe squashfs
|
||||
mount -r $image /lower
|
||||
else
|
||||
|
||||
@@ -1,36 +1,64 @@
|
||||
#!/bin/bash
|
||||
|
||||
fail=$(printf " [\033[31m fail \033[0m]")
|
||||
pass=$(printf " [\033[32m pass \033[0m]")
|
||||
# --- Config ---
|
||||
# Colors (using printf to ensure compatibility)
|
||||
GRAY=$(printf '\033[90m')
|
||||
GREEN=$(printf '\033[32m')
|
||||
RED=$(printf '\033[31m')
|
||||
NC=$(printf '\033[0m') # No Color
|
||||
|
||||
# Proxies to test
|
||||
proxies=(
|
||||
"Host Tor|127.0.1.1:9050"
|
||||
"Startd Tor|10.0.3.1:9050"
|
||||
)
|
||||
|
||||
# Default URLs
|
||||
onion_list=(
|
||||
"The Tor Project|http://2gzyxa5ihm7nsggfxnu52rck2vv4rvmdlkiu3zzui5du4xyclen53wid.onion"
|
||||
"Start9|http://privacy34kn4ez3y3nijweec6w4g54i3g54sdv7r5mr6soma3w4begyd.onion"
|
||||
"Mempool|http://mempoolhqx4isw62xs7abwphsq7ldayuidyx2v2oethdhhj6mlo2r6ad.onion"
|
||||
"DuckDuckGo|https://duckduckgogg42xjoc72x3sjasowoarfbgcmvfimaftt6twagswzczad.onion"
|
||||
"Brave Search|https://search.brave4u7jddbv7cyviptqjc7jusxh72uik7zt6adtckl5f4nwy2v72qd.onion"
|
||||
)
|
||||
|
||||
# Check if ~/.startos/tor-check.list exists and read its contents if available
|
||||
if [ -f ~/.startos/tor-check.list ]; then
|
||||
while IFS= read -r line; do
|
||||
# Check if the line starts with a #
|
||||
if [[ ! "$line" =~ ^# ]]; then
|
||||
onion_list+=("$line")
|
||||
# Load custom list
|
||||
[ -f ~/.startos/tor-check.list ] && readarray -t custom_list < <(grep -v '^#' ~/.startos/tor-check.list) && onion_list+=("${custom_list[@]}")
|
||||
|
||||
# --- Functions ---
|
||||
print_line() { printf "${GRAY}────────────────────────────────────────${NC}\n"; }
|
||||
|
||||
# --- Main ---
|
||||
echo "Testing Onion Connections..."
|
||||
|
||||
for proxy_info in "${proxies[@]}"; do
|
||||
proxy_name="${proxy_info%%|*}"
|
||||
proxy_addr="${proxy_info#*|}"
|
||||
|
||||
print_line
|
||||
printf "${GRAY}Proxy: %s (%s)${NC}\n" "$proxy_name" "$proxy_addr"
|
||||
|
||||
for data in "${onion_list[@]}"; do
|
||||
name="${data%%|*}"
|
||||
url="${data#*|}"
|
||||
|
||||
# Capture verbose output + http code.
|
||||
# --no-progress-meter: Suppresses the "0 0 0" stats but keeps -v output
|
||||
output=$(curl -v --no-progress-meter --max-time 15 --socks5-hostname "$proxy_addr" "$url" 2>&1)
|
||||
exit_code=$?
|
||||
|
||||
if [ $exit_code -eq 0 ]; then
|
||||
printf " ${GREEN}[pass]${NC} %s (%s)\n" "$name" "$url"
|
||||
else
|
||||
printf " ${RED}[fail]${NC} %s (%s)\n" "$name" "$url"
|
||||
printf " ${RED}↳ Curl Error %s${NC}\n" "$exit_code"
|
||||
|
||||
# Print the last 4 lines of verbose log to show the specific handshake error
|
||||
# We look for lines starting with '*' or '>' or '<' to filter out junk if any remains
|
||||
echo "$output" | tail -n 4 | sed "s/^/ ${GRAY}/"
|
||||
fi
|
||||
done < ~/.startos/tor-check.list
|
||||
fi
|
||||
|
||||
echo "Testing connection to Onion Pages ..."
|
||||
|
||||
for data in "${onion_list[@]}"; do
|
||||
name="${data%%|*}"
|
||||
url="${data#*|}"
|
||||
if curl --socks5-hostname localhost:9050 "$url" > /dev/null 2>&1; then
|
||||
echo " ${pass}: $name ($url) "
|
||||
else
|
||||
echo " ${fail}: $name ($url) "
|
||||
fi
|
||||
done
|
||||
done
|
||||
|
||||
echo
|
||||
echo "Done."
|
||||
print_line
|
||||
# Reset color just in case
|
||||
printf "${NC}"
|
||||
|
||||
82
build/lib/scripts/upgrade
Executable file
82
build/lib/scripts/upgrade
Executable file
@@ -0,0 +1,82 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
SOURCE_DIR="$(dirname $(realpath "${BASH_SOURCE[0]}"))"
|
||||
|
||||
if [ "$UID" -ne 0 ]; then
|
||||
>&2 echo 'Must be run as root'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [ -f "$1" ]; then
|
||||
>&2 echo "usage: $0 <SQUASHFS>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo 'Upgrading...'
|
||||
|
||||
hash=$(b3sum $1 | head -c 32)
|
||||
if [ -n "$2" ] && [ "$hash" != "$CHECKSUM" ]; then
|
||||
>&2 echo 'Checksum mismatch'
|
||||
exit 2
|
||||
fi
|
||||
|
||||
unsquashfs -f -d / $1 boot
|
||||
|
||||
umount -R /media/startos/next 2> /dev/null || true
|
||||
umount /media/startos/upper 2> /dev/null || true
|
||||
umount /media/startos/lower 2> /dev/null || true
|
||||
|
||||
mkdir -p /media/startos/upper
|
||||
mount -t tmpfs tmpfs /media/startos/upper
|
||||
mkdir -p /media/startos/lower /media/startos/upper/data /media/startos/upper/work /media/startos/next
|
||||
mount $1 /media/startos/lower
|
||||
mount -t overlay \
|
||||
-olowerdir=/media/startos/lower,upperdir=/media/startos/upper/data,workdir=/media/startos/upper/work \
|
||||
overlay /media/startos/next
|
||||
|
||||
mkdir -p /media/startos/next/run
|
||||
mkdir -p /media/startos/next/dev
|
||||
mkdir -p /media/startos/next/sys
|
||||
mkdir -p /media/startos/next/proc
|
||||
mkdir -p /media/startos/next/boot
|
||||
mkdir -p /media/startos/next/media/startos/root
|
||||
mount --bind /run /media/startos/next/run
|
||||
mount --bind /tmp /media/startos/next/tmp
|
||||
mount --bind /dev /media/startos/next/dev
|
||||
mount --bind /sys /media/startos/next/sys
|
||||
mount --bind /proc /media/startos/next/proc
|
||||
mount --bind /boot /media/startos/next/boot
|
||||
mount --bind /media/startos/root /media/startos/next/media/startos/root
|
||||
|
||||
if mountpoint /boot/efi 2>&1 > /dev/null; then
|
||||
mkdir -p /media/startos/next/boot/efi
|
||||
mount --bind /boot/efi /media/startos/next/boot/efi
|
||||
fi
|
||||
|
||||
if mountpoint /sys/firmware/efi/efivars 2>&1 > /dev/null; then
|
||||
mount --bind /sys/firmware/efi/efivars /media/startos/next/sys/firmware/efi/efivars
|
||||
fi
|
||||
|
||||
chroot /media/startos/next bash -e << "EOF"
|
||||
|
||||
if [ -f /boot/grub/grub.cfg ]; then
|
||||
grub-install /dev/$(eval $(lsblk -o MOUNTPOINT,PKNAME -P | grep 'MOUNTPOINT="/media/startos/root"') && echo $PKNAME)
|
||||
update-grub
|
||||
fi
|
||||
|
||||
EOF
|
||||
|
||||
sync
|
||||
|
||||
umount -Rl /media/startos/next
|
||||
umount /media/startos/upper
|
||||
umount /media/startos/lower
|
||||
|
||||
mv $1 /media/startos/images/${hash}.rootfs
|
||||
ln -rsf /media/startos/images/${hash}.rootfs /media/startos/config/current.rootfs
|
||||
|
||||
sync
|
||||
|
||||
echo 'System upgrade complete. Reboot to apply changes...'
|
||||
|
Before Width: | Height: | Size: 9.6 KiB After Width: | Height: | Size: 9.6 KiB |
25
build/os-compat/buildenv.Dockerfile
Normal file
25
build/os-compat/buildenv.Dockerfile
Normal file
@@ -0,0 +1,25 @@
|
||||
FROM debian:trixie
|
||||
|
||||
RUN apt-get update && \
|
||||
apt-get install -y \
|
||||
ca-certificates \
|
||||
curl \
|
||||
gpg \
|
||||
build-essential \
|
||||
sed \
|
||||
grep \
|
||||
gawk \
|
||||
jq \
|
||||
gzip \
|
||||
brotli \
|
||||
squashfs-tools \
|
||||
git \
|
||||
rsync \
|
||||
b3sum \
|
||||
sudo \
|
||||
nodejs
|
||||
|
||||
RUN git config --global --add safe.directory /root/start-os
|
||||
|
||||
RUN mkdir -p /root/start-os
|
||||
WORKDIR /root/start-os
|
||||
30
build/os-compat/run-compat.sh
Executable file
30
build/os-compat/run-compat.sh
Executable file
@@ -0,0 +1,30 @@
|
||||
#!/bin/bash
|
||||
|
||||
pwd=$(pwd)
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")/../.."
|
||||
|
||||
set -e
|
||||
|
||||
rel_pwd="${pwd#"$(pwd)"}"
|
||||
|
||||
COMPAT_ARCH=$(uname -m)
|
||||
|
||||
platform=linux/$COMPAT_ARCH
|
||||
|
||||
case $COMPAT_ARCH in
|
||||
x86_64)
|
||||
platform=linux/amd64;;
|
||||
aarch64)
|
||||
platform=linux/arm64;;
|
||||
esac
|
||||
|
||||
if [ "$FORCE_COMPAT" = 1 ] || ( [ "$REQUIRES" = "linux" ] && [ "$(uname -s)" != "Linux" ] ) || ( [ "$REQUIRES" = "debian" ] && ! which dpkg > /dev/null ); then
|
||||
if tty -s; then
|
||||
USE_TTY="-it"
|
||||
fi
|
||||
|
||||
docker run $USE_TTY --platform=$platform -eARCH -eENVIRONMENT -ePLATFORM -eGIT_BRANCH_AS_HASH -ePROJECT -eDEPENDS -eCONFLICTS -w "/root/start-os${rel_pwd}" --rm -v "$(pwd):/root/start-os" start9/build-env $@
|
||||
else
|
||||
exec $@
|
||||
fi
|
||||
@@ -1,87 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
function partition_for () {
|
||||
if [[ "$1" =~ [0-9]+$ ]]; then
|
||||
echo "$1p$2"
|
||||
else
|
||||
echo "$1$2"
|
||||
fi
|
||||
}
|
||||
|
||||
VERSION=$(cat VERSION.txt)
|
||||
ENVIRONMENT=$(cat ENVIRONMENT.txt)
|
||||
GIT_HASH=$(cat GIT_HASH.txt | head -c 7)
|
||||
DATE=$(date +%Y%m%d)
|
||||
|
||||
ROOT_PART_END=7217792
|
||||
|
||||
VERSION_FULL="$VERSION-$GIT_HASH"
|
||||
|
||||
if [ -n "$ENVIRONMENT" ]; then
|
||||
VERSION_FULL="$VERSION_FULL~$ENVIRONMENT"
|
||||
fi
|
||||
|
||||
TARGET_NAME=startos-${VERSION_FULL}-${DATE}_raspberrypi.img
|
||||
TARGET_SIZE=$[($ROOT_PART_END+1)*512]
|
||||
|
||||
rm -f $TARGET_NAME
|
||||
truncate -s $TARGET_SIZE $TARGET_NAME
|
||||
(
|
||||
echo o
|
||||
echo x
|
||||
echo i
|
||||
echo "0xcb15ae4d"
|
||||
echo r
|
||||
echo n
|
||||
echo p
|
||||
echo 1
|
||||
echo 2048
|
||||
echo 526335
|
||||
echo t
|
||||
echo c
|
||||
echo n
|
||||
echo p
|
||||
echo 2
|
||||
echo 526336
|
||||
echo $ROOT_PART_END
|
||||
echo a
|
||||
echo 1
|
||||
echo w
|
||||
) | fdisk $TARGET_NAME
|
||||
OUTPUT_DEVICE=$(sudo losetup --show -fP $TARGET_NAME)
|
||||
sudo mkfs.ext4 `partition_for ${OUTPUT_DEVICE} 2`
|
||||
sudo mkfs.vfat `partition_for ${OUTPUT_DEVICE} 1`
|
||||
|
||||
TMPDIR=$(mktemp -d)
|
||||
|
||||
sudo mount `partition_for ${OUTPUT_DEVICE} 2` $TMPDIR
|
||||
sudo mkdir $TMPDIR/boot
|
||||
sudo mount `partition_for ${OUTPUT_DEVICE} 1` $TMPDIR/boot
|
||||
sudo unsquashfs -f -d $TMPDIR startos.raspberrypi.squashfs
|
||||
REAL_GIT_HASH=$(cat $TMPDIR/usr/lib/startos/GIT_HASH.txt)
|
||||
REAL_VERSION=$(cat $TMPDIR/usr/lib/startos/VERSION.txt)
|
||||
REAL_ENVIRONMENT=$(cat $TMPDIR/usr/lib/startos/ENVIRONMENT.txt)
|
||||
sudo sed -i 's| boot=startos| init=/usr/lib/startos/scripts/init_resize\.sh|' $TMPDIR/boot/cmdline.txt
|
||||
sudo cp ./build/raspberrypi/fstab $TMPDIR/etc/
|
||||
sudo cp ./build/raspberrypi/init_resize.sh $TMPDIR/usr/lib/startos/scripts/init_resize.sh
|
||||
sudo umount $TMPDIR/boot
|
||||
sudo umount $TMPDIR
|
||||
sudo losetup -d $OUTPUT_DEVICE
|
||||
|
||||
if [ "$ALLOW_VERSION_MISMATCH" != 1 ]; then
|
||||
if [ "$(cat GIT_HASH.txt)" != "$REAL_GIT_HASH" ]; then
|
||||
>&2 echo "startos.raspberrypi.squashfs GIT_HASH.txt mismatch"
|
||||
>&2 echo "expected $REAL_GIT_HASH (dpkg) found $(cat GIT_HASH.txt) (repo)"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$(cat VERSION.txt)" != "$REAL_VERSION" ]; then
|
||||
>&2 echo "startos.raspberrypi.squashfs VERSION.txt mismatch"
|
||||
exit 1
|
||||
fi
|
||||
if [ "$(cat ENVIRONMENT.txt)" != "$REAL_ENVIRONMENT" ]; then
|
||||
>&2 echo "startos.raspberrypi.squashfs ENVIRONMENT.txt mismatch"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
142
build/upload-ota.sh
Executable file
142
build/upload-ota.sh
Executable file
@@ -0,0 +1,142 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [ -z "$VERSION" ]; then
|
||||
>&2 echo '$VERSION required'
|
||||
exit 2
|
||||
fi
|
||||
|
||||
set -e
|
||||
|
||||
if [ "$SKIP_DL" != "1" ]; then
|
||||
if [ "$SKIP_CLEAN" != "1" ]; then
|
||||
rm -rf ~/Downloads/v$VERSION
|
||||
mkdir ~/Downloads/v$VERSION
|
||||
cd ~/Downloads/v$VERSION
|
||||
fi
|
||||
|
||||
if [ -n "$RUN_ID" ]; then
|
||||
for arch in aarch64 aarch64-nonfree riscv64 x86_64 x86_64-nonfree; do
|
||||
while ! gh run download -R Start9Labs/start-os $RUN_ID -n $arch.squashfs -D $(pwd); do sleep 1; done
|
||||
done
|
||||
for arch in aarch64 aarch64-nonfree riscv64 x86_64 x86_64-nonfree; do
|
||||
while ! gh run download -R Start9Labs/start-os $RUN_ID -n $arch.iso -D $(pwd); do sleep 1; done
|
||||
done
|
||||
fi
|
||||
|
||||
if [ -n "$ST_RUN_ID" ]; then
|
||||
for arch in aarch64 riscv64 x86_64; do
|
||||
while ! gh run download -R Start9Labs/start-os $ST_RUN_ID -n start-tunnel_$arch.deb -D $(pwd); do sleep 1; done
|
||||
done
|
||||
fi
|
||||
|
||||
if [ -n "$CLI_RUN_ID" ]; then
|
||||
for arch in aarch64 riscv64 x86_64; do
|
||||
for os in linux macos; do
|
||||
pair=${arch}-${os}
|
||||
if [ "${pair}" = "riscv64-linux" ]; then
|
||||
target=riscv64gc-unknown-linux-musl
|
||||
elif [ "${pair}" = "riscv64-macos" ]; then
|
||||
continue
|
||||
elif [ "${os}" = "linux" ]; then
|
||||
target="${arch}-unknown-linux-musl"
|
||||
elif [ "${os}" = "macos" ]; then
|
||||
target="${arch}-apple-darwin"
|
||||
fi
|
||||
while ! gh run download -R Start9Labs/start-os $CLI_RUN_ID -n start-cli_$target -D $(pwd); do sleep 1; done
|
||||
mv start-cli "start-cli_${pair}"
|
||||
done
|
||||
done
|
||||
fi
|
||||
else
|
||||
cd ~/Downloads/v$VERSION
|
||||
fi
|
||||
|
||||
start-cli --registry=https://alpha-registry-x.start9.com registry os version add $VERSION "v$VERSION" '' ">=0.3.5 <=$VERSION"
|
||||
|
||||
if [ "$SKIP_UL" = "2" ]; then
|
||||
exit 2
|
||||
elif [ "$SKIP_UL" != "1" ]; then
|
||||
for file in *.deb start-cli_*; do
|
||||
gh release upload -R Start9Labs/start-os v$VERSION $file
|
||||
done
|
||||
for file in *.iso *.squashfs; do
|
||||
s3cmd put -P $file s3://startos-images/v$VERSION/$file
|
||||
done
|
||||
fi
|
||||
|
||||
if [ "$SKIP_INDEX" != "1" ]; then
|
||||
for arch in aarch64 aarch64-nonfree riscv64 x86_64 x86_64-nonfree; do
|
||||
for file in *_$arch.squashfs *_$arch.iso; do
|
||||
start-cli --registry=https://alpha-registry-x.start9.com registry os asset add --platform=$arch --version=$VERSION $file https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$file
|
||||
done
|
||||
done
|
||||
fi
|
||||
|
||||
for file in *.iso *.squashfs *.deb start-cli_*; do
|
||||
gpg -u 7CFFDA41CA66056A --detach-sign --armor -o "${file}.asc" "$file"
|
||||
done
|
||||
|
||||
gpg --export -a 7CFFDA41CA66056A > dr-bonez.key.asc
|
||||
tar -czvf signatures.tar.gz *.asc
|
||||
|
||||
gh release upload -R Start9Labs/start-os v$VERSION signatures.tar.gz
|
||||
|
||||
cat << EOF
|
||||
# ISO Downloads
|
||||
|
||||
- [x86_64/AMD64](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_x86_64-nonfree.iso))
|
||||
- [x86_64/AMD64-slim (FOSS-only)](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_x86_64.iso) "Without proprietary software or drivers")
|
||||
- [aarch64/ARM64](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_aarch64-nonfree.iso))
|
||||
- [aarch64/ARM64-slim (FOSS-Only)](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_aarch64.iso) "Without proprietary software or drivers")
|
||||
- [RISCV64 (RVA23)](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_riscv64.iso))
|
||||
|
||||
EOF
|
||||
cat << 'EOF'
|
||||
# StartOS Checksums
|
||||
|
||||
## SHA-256
|
||||
```
|
||||
EOF
|
||||
sha256sum *.iso *.squashfs
|
||||
cat << 'EOF'
|
||||
```
|
||||
|
||||
## BLAKE-3
|
||||
```
|
||||
EOF
|
||||
b3sum *.iso *.squashfs
|
||||
cat << 'EOF'
|
||||
```
|
||||
|
||||
# Start-Tunnel Checksums
|
||||
|
||||
## SHA-256
|
||||
```
|
||||
EOF
|
||||
sha256sum start-tunnel*.deb
|
||||
cat << 'EOF'
|
||||
```
|
||||
|
||||
## BLAKE-3
|
||||
```
|
||||
EOF
|
||||
b3sum start-tunnel*.deb
|
||||
cat << 'EOF'
|
||||
```
|
||||
|
||||
# start-cli Checksums
|
||||
|
||||
## SHA-256
|
||||
```
|
||||
EOF
|
||||
sha256sum start-cli_*
|
||||
cat << 'EOF'
|
||||
```
|
||||
|
||||
## BLAKE-3
|
||||
```
|
||||
EOF
|
||||
b3sum start-cli_*
|
||||
cat << 'EOF'
|
||||
```
|
||||
EOF
|
||||
6
container-runtime/container-runtime-failure.service
Normal file
6
container-runtime/container-runtime-failure.service
Normal file
@@ -0,0 +1,6 @@
|
||||
[Unit]
|
||||
Description=StartOS Container Runtime Failure Handler
|
||||
|
||||
[Service]
|
||||
Type=oneshot
|
||||
ExecStart=/usr/bin/start-container rebuild
|
||||
@@ -1,11 +1,12 @@
|
||||
[Unit]
|
||||
Description=StartOS Container Runtime
|
||||
OnFailure=container-runtime-failure.service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
ExecStart=/usr/bin/node --experimental-detect-module --unhandled-rejections=warn /usr/lib/startos/init/index.js
|
||||
Restart=always
|
||||
RestartSec=3
|
||||
Environment=RUST_LOG=startos=debug
|
||||
ExecStart=/usr/bin/node --experimental-detect-module --trace-warnings --unhandled-rejections=warn /usr/lib/startos/init/index.js
|
||||
Restart=no
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
@@ -2,17 +2,10 @@
|
||||
|
||||
set -e
|
||||
|
||||
mkdir -p /run/systemd/resolve
|
||||
echo "nameserver 8.8.8.8" > /run/systemd/resolve/stub-resolv.conf
|
||||
|
||||
apt-get update
|
||||
apt-get install -y curl rsync qemu-user-static
|
||||
|
||||
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash
|
||||
source ~/.bashrc
|
||||
nvm install 20
|
||||
ln -s $(which node) /usr/bin/node
|
||||
apt-get install -y curl rsync qemu-user-static nodejs
|
||||
|
||||
sed -i '/\(^\|#\)DNSStubListener=/c\DNSStubListener=no' /etc/systemd/resolved.conf
|
||||
sed -i '/\(^\|#\)Storage=/c\Storage=persistent' /etc/systemd/journald.conf
|
||||
sed -i '/\(^\|#\)Compress=/c\Compress=yes' /etc/systemd/journald.conf
|
||||
sed -i '/\(^\|#\)SystemMaxUse=/c\SystemMaxUse=1G' /etc/systemd/journald.conf
|
||||
@@ -20,4 +13,4 @@ sed -i '/\(^\|#\)ForwardToSyslog=/c\ForwardToSyslog=no' /etc/systemd/journald.co
|
||||
|
||||
systemctl enable container-runtime.service
|
||||
|
||||
rm -rf /run/systemd
|
||||
echo "nameserver 10.0.3.1" > /etc/resolv.conf
|
||||
@@ -1,11 +1,9 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
set -e
|
||||
|
||||
DISTRO=debian
|
||||
VERSION=bookworm
|
||||
VERSION=trixie
|
||||
ARCH=${ARCH:-$(uname -m)}
|
||||
FLAVOR=default
|
||||
|
||||
@@ -16,8 +14,9 @@ elif [ "$_ARCH" = "aarch64" ]; then
|
||||
_ARCH=arm64
|
||||
fi
|
||||
|
||||
URL="https://images.linuxcontainers.org/$(curl -fsSL https://images.linuxcontainers.org/meta/1.0/index-system | grep "^$DISTRO;$VERSION;$_ARCH;$FLAVOR;" | head -n1 | sed 's/^.*;//g')/rootfs.squashfs"
|
||||
BASE_URL="https://images.linuxcontainers.org$(curl -fsSL https://images.linuxcontainers.org/meta/1.0/index-system | grep "^$DISTRO;$VERSION;$_ARCH;$FLAVOR;" | head -n1 | sed 's/^.*;//g')"
|
||||
OUTPUT_FILE="debian.${ARCH}.squashfs"
|
||||
|
||||
echo "Downloading $URL to debian.${ARCH}.squashfs"
|
||||
|
||||
curl -fsSL "$URL" > debian.${ARCH}.squashfs
|
||||
echo "Downloading ${BASE_URL}/rootfs.squashfs to $OUTPUT_FILE"
|
||||
curl -fsSL "${BASE_URL}/rootfs.squashfs" > "$OUTPUT_FILE"
|
||||
curl -fsSL "$BASE_URL/SHA256SUMS" | grep 'rootfs\.squashfs' | awk '{print $1" '"$OUTPUT_FILE"'"}' | shasum -a 256 -c
|
||||
@@ -1,28 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
IMAGE=$1
|
||||
|
||||
if [ -z "$IMAGE" ]; then
|
||||
>&2 echo "usage: $0 <image id>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! [ -d "/media/images/$IMAGE" ]; then
|
||||
>&2 echo "image does not exist"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
container=$(mktemp -d)
|
||||
mkdir -p $container/rootfs $container/upper $container/work
|
||||
mount -t overlay -olowerdir=/media/images/$IMAGE,upperdir=$container/upper,workdir=$container/work overlay $container/rootfs
|
||||
|
||||
rootfs=$container/rootfs
|
||||
|
||||
for special in dev sys proc run; do
|
||||
mkdir -p $rootfs/$special
|
||||
mount --bind /$special $rootfs/$special
|
||||
done
|
||||
|
||||
echo $rootfs
|
||||
6091
container-runtime/package-lock.json
generated
6091
container-runtime/package-lock.json
generated
File diff suppressed because it is too large
Load Diff
@@ -26,8 +26,9 @@
|
||||
"isomorphic-fetch": "^3.0.0",
|
||||
"jsonpath": "^1.1.1",
|
||||
"lodash.merge": "^4.6.2",
|
||||
"mime": "^4.0.7",
|
||||
"node-fetch": "^3.1.0",
|
||||
"ts-matches": "^5.5.1",
|
||||
"ts-matches": "^6.3.2",
|
||||
"tslib": "^2.5.3",
|
||||
"typescript": "^5.1.3",
|
||||
"yaml": "^2.3.1"
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
rootfs=$1
|
||||
if [ -z "$rootfs" ]; then
|
||||
>&2 echo "usage: $0 <container rootfs path>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
umount --recursive $rootfs
|
||||
rm -rf $rootfs/..
|
||||
@@ -1,4 +1,9 @@
|
||||
import { types as T, utils } from "@start9labs/start-sdk"
|
||||
import {
|
||||
ExtendedVersion,
|
||||
types as T,
|
||||
utils,
|
||||
VersionRange,
|
||||
} from "@start9labs/start-sdk"
|
||||
import * as net from "net"
|
||||
import { object, string, number, literals, some, unknown } from "ts-matches"
|
||||
import { Effects } from "../Models/Effects"
|
||||
@@ -6,23 +11,19 @@ import { Effects } from "../Models/Effects"
|
||||
import { CallbackHolder } from "../Models/CallbackHolder"
|
||||
import { asError } from "@start9labs/start-sdk/base/lib/util"
|
||||
const matchRpcError = object({
|
||||
error: object(
|
||||
{
|
||||
code: number,
|
||||
message: string,
|
||||
data: some(
|
||||
string,
|
||||
object(
|
||||
{
|
||||
details: string,
|
||||
debug: string,
|
||||
},
|
||||
["debug"],
|
||||
),
|
||||
),
|
||||
},
|
||||
["data"],
|
||||
),
|
||||
error: object({
|
||||
code: number,
|
||||
message: string,
|
||||
data: some(
|
||||
string,
|
||||
object({
|
||||
details: string,
|
||||
debug: string.nullable().optional(),
|
||||
}),
|
||||
)
|
||||
.nullable()
|
||||
.optional(),
|
||||
}),
|
||||
})
|
||||
const testRpcError = matchRpcError.test
|
||||
const testRpcResult = object({
|
||||
@@ -34,13 +35,13 @@ const SOCKET_PATH = "/media/startos/rpc/host.sock"
|
||||
let hostSystemId = 0
|
||||
|
||||
export type EffectContext = {
|
||||
procedureId: string | null
|
||||
eventId: string | null
|
||||
callbacks?: CallbackHolder
|
||||
constRetry?: () => void
|
||||
}
|
||||
|
||||
const rpcRoundFor =
|
||||
(procedureId: string | null) =>
|
||||
(eventId: string | null) =>
|
||||
<K extends T.EffectMethod | "clearCallbacks">(
|
||||
method: K,
|
||||
params: Record<string, unknown>,
|
||||
@@ -51,7 +52,7 @@ const rpcRoundFor =
|
||||
JSON.stringify({
|
||||
id,
|
||||
method,
|
||||
params: { ...params, procedureId: procedureId || undefined },
|
||||
params: { ...params, eventId: eventId ?? undefined },
|
||||
}) + "\n",
|
||||
)
|
||||
})
|
||||
@@ -102,9 +103,21 @@ const rpcRoundFor =
|
||||
}
|
||||
|
||||
export function makeEffects(context: EffectContext): Effects {
|
||||
const rpcRound = rpcRoundFor(context.procedureId)
|
||||
const rpcRound = rpcRoundFor(context.eventId)
|
||||
const self: Effects = {
|
||||
eventId: context.eventId,
|
||||
child: (name) =>
|
||||
makeEffects({ ...context, callbacks: context.callbacks?.child(name) }),
|
||||
constRetry: context.constRetry,
|
||||
isInContext: !!context.callbacks,
|
||||
onLeaveContext:
|
||||
context.callbacks?.onLeaveContext?.bind(context.callbacks) ||
|
||||
(() => {
|
||||
console.warn(
|
||||
"no context for this effects object",
|
||||
new Error().stack?.replace(/^Error/, ""),
|
||||
)
|
||||
}),
|
||||
clearCallbacks(...[options]: Parameters<T.Effects["clearCallbacks"]>) {
|
||||
return rpcRound("clear-callbacks", {
|
||||
...options,
|
||||
@@ -126,22 +139,20 @@ export function makeEffects(context: EffectContext): Effects {
|
||||
...options,
|
||||
}) as ReturnType<T.Effects["action"]["getInput"]>
|
||||
},
|
||||
request(...[options]: Parameters<T.Effects["action"]["request"]>) {
|
||||
return rpcRound("action.request", {
|
||||
createTask(...[options]: Parameters<T.Effects["action"]["createTask"]>) {
|
||||
return rpcRound("action.create-task", {
|
||||
...options,
|
||||
}) as ReturnType<T.Effects["action"]["request"]>
|
||||
}) as ReturnType<T.Effects["action"]["createTask"]>
|
||||
},
|
||||
run(...[options]: Parameters<T.Effects["action"]["run"]>) {
|
||||
return rpcRound("action.run", {
|
||||
...options,
|
||||
}) as ReturnType<T.Effects["action"]["run"]>
|
||||
},
|
||||
clearRequests(
|
||||
...[options]: Parameters<T.Effects["action"]["clearRequests"]>
|
||||
) {
|
||||
return rpcRound("action.clear-requests", {
|
||||
clearTasks(...[options]: Parameters<T.Effects["action"]["clearTasks"]>) {
|
||||
return rpcRound("action.clear-tasks", {
|
||||
...options,
|
||||
}) as ReturnType<T.Effects["action"]["clearRequests"]>
|
||||
}) as ReturnType<T.Effects["action"]["clearTasks"]>
|
||||
},
|
||||
},
|
||||
bind(...[options]: Parameters<T.Effects["bind"]>) {
|
||||
@@ -167,6 +178,13 @@ export function makeEffects(context: EffectContext): Effects {
|
||||
T.Effects["getInstalledPackages"]
|
||||
>
|
||||
},
|
||||
getServiceManifest(
|
||||
...[options]: Parameters<T.Effects["getServiceManifest"]>
|
||||
) {
|
||||
return rpcRound("get-service-manifest", options) as ReturnType<
|
||||
T.Effects["getServiceManifest"]
|
||||
>
|
||||
},
|
||||
subcontainer: {
|
||||
createFs(options: { imageId: string; name: string }) {
|
||||
return rpcRound("subcontainer.create-fs", options) as ReturnType<
|
||||
@@ -186,13 +204,6 @@ export function makeEffects(context: EffectContext): Effects {
|
||||
T.Effects["exportServiceInterface"]
|
||||
>
|
||||
}) as Effects["exportServiceInterface"],
|
||||
exposeForDependents(
|
||||
...[options]: Parameters<T.Effects["exposeForDependents"]>
|
||||
) {
|
||||
return rpcRound("expose-for-dependents", options) as ReturnType<
|
||||
T.Effects["exposeForDependents"]
|
||||
>
|
||||
},
|
||||
getContainerIp(...[options]: Parameters<T.Effects["getContainerIp"]>) {
|
||||
return rpcRound("get-container-ip", options) as ReturnType<
|
||||
T.Effects["getContainerIp"]
|
||||
@@ -254,6 +265,7 @@ export function makeEffects(context: EffectContext): Effects {
|
||||
return rpcRound("mount", options) as ReturnType<T.Effects["mount"]>
|
||||
},
|
||||
restart(...[]: Parameters<T.Effects["restart"]>) {
|
||||
console.log("Restarting service...")
|
||||
return rpcRound("restart", {}) as ReturnType<T.Effects["restart"]>
|
||||
},
|
||||
setDependencies(
|
||||
@@ -284,6 +296,7 @@ export function makeEffects(context: EffectContext): Effects {
|
||||
getStatus(...[o]: Parameters<T.Effects["getStatus"]>) {
|
||||
return rpcRound("get-status", o) as ReturnType<T.Effects["getStatus"]>
|
||||
},
|
||||
/// DEPRECATED
|
||||
setMainStatus(o: { status: "running" | "stopped" }): Promise<null> {
|
||||
return rpcRound("set-main-status", o) as ReturnType<
|
||||
T.Effects["setHealth"]
|
||||
@@ -293,15 +306,6 @@ export function makeEffects(context: EffectContext): Effects {
|
||||
shutdown(...[]: Parameters<T.Effects["shutdown"]>) {
|
||||
return rpcRound("shutdown", {}) as ReturnType<T.Effects["shutdown"]>
|
||||
},
|
||||
store: {
|
||||
get: async (options: any) =>
|
||||
rpcRound("store.get", {
|
||||
...options,
|
||||
callback: context.callbacks?.addCallback(options.callback) || null,
|
||||
}) as any,
|
||||
set: async (options: any) =>
|
||||
rpcRound("store.set", options) as ReturnType<T.Effects["store"]["set"]>,
|
||||
} as T.Effects["store"],
|
||||
getDataVersion() {
|
||||
return rpcRound("get-data-version", {}) as ReturnType<
|
||||
T.Effects["getDataVersion"]
|
||||
@@ -313,5 +317,16 @@ export function makeEffects(context: EffectContext): Effects {
|
||||
>
|
||||
},
|
||||
}
|
||||
if (context.callbacks?.onLeaveContext)
|
||||
self.onLeaveContext(() => {
|
||||
self.constRetry = undefined
|
||||
self.isInContext = false
|
||||
self.onLeaveContext = () => {
|
||||
console.warn(
|
||||
"this effects object is already out of context",
|
||||
new Error().stack?.replace(/^Error/, ""),
|
||||
)
|
||||
}
|
||||
})
|
||||
return self
|
||||
}
|
||||
|
||||
@@ -12,9 +12,15 @@ import {
|
||||
any,
|
||||
shape,
|
||||
anyOf,
|
||||
literals,
|
||||
} from "ts-matches"
|
||||
|
||||
import { types as T, utils } from "@start9labs/start-sdk"
|
||||
import {
|
||||
ExtendedVersion,
|
||||
types as T,
|
||||
utils,
|
||||
VersionRange,
|
||||
} from "@start9labs/start-sdk"
|
||||
import * as fs from "fs"
|
||||
|
||||
import { CallbackHolder } from "../Models/CallbackHolder"
|
||||
@@ -26,20 +32,16 @@ type MaybePromise<T> = T | Promise<T>
|
||||
export const matchRpcResult = anyOf(
|
||||
object({ result: any }),
|
||||
object({
|
||||
error: object(
|
||||
{
|
||||
code: number,
|
||||
message: string,
|
||||
data: object(
|
||||
{
|
||||
details: string,
|
||||
debug: any,
|
||||
},
|
||||
["details", "debug"],
|
||||
),
|
||||
},
|
||||
["data"],
|
||||
),
|
||||
error: object({
|
||||
code: number,
|
||||
message: string,
|
||||
data: object({
|
||||
details: string.optional(),
|
||||
debug: any.optional(),
|
||||
})
|
||||
.nullable()
|
||||
.optional(),
|
||||
}),
|
||||
}),
|
||||
)
|
||||
|
||||
@@ -54,38 +56,26 @@ const isResult = object({ result: any }).test
|
||||
|
||||
const idType = some(string, number, literal(null))
|
||||
type IdType = null | string | number | undefined
|
||||
const runType = object(
|
||||
{
|
||||
id: idType,
|
||||
method: literal("execute"),
|
||||
params: object(
|
||||
{
|
||||
id: string,
|
||||
procedure: string,
|
||||
input: any,
|
||||
timeout: number,
|
||||
},
|
||||
["timeout"],
|
||||
),
|
||||
},
|
||||
["id"],
|
||||
)
|
||||
const sandboxRunType = object(
|
||||
{
|
||||
id: idType,
|
||||
method: literal("sandbox"),
|
||||
params: object(
|
||||
{
|
||||
id: string,
|
||||
procedure: string,
|
||||
input: any,
|
||||
timeout: number,
|
||||
},
|
||||
["timeout"],
|
||||
),
|
||||
},
|
||||
["id"],
|
||||
)
|
||||
const runType = object({
|
||||
id: idType.optional(),
|
||||
method: literal("execute"),
|
||||
params: object({
|
||||
id: string,
|
||||
procedure: string,
|
||||
input: any,
|
||||
timeout: number.nullable().optional(),
|
||||
}),
|
||||
})
|
||||
const sandboxRunType = object({
|
||||
id: idType.optional(),
|
||||
method: literal("sandbox"),
|
||||
params: object({
|
||||
id: string,
|
||||
procedure: string,
|
||||
input: any,
|
||||
timeout: number.nullable().optional(),
|
||||
}),
|
||||
})
|
||||
const callbackType = object({
|
||||
method: literal("callback"),
|
||||
params: object({
|
||||
@@ -93,44 +83,37 @@ const callbackType = object({
|
||||
args: array,
|
||||
}),
|
||||
})
|
||||
const initType = object(
|
||||
{
|
||||
id: idType,
|
||||
method: literal("init"),
|
||||
},
|
||||
["id"],
|
||||
)
|
||||
const startType = object(
|
||||
{
|
||||
id: idType,
|
||||
method: literal("start"),
|
||||
},
|
||||
["id"],
|
||||
)
|
||||
const stopType = object(
|
||||
{
|
||||
id: idType,
|
||||
method: literal("stop"),
|
||||
},
|
||||
["id"],
|
||||
)
|
||||
const exitType = object(
|
||||
{
|
||||
id: idType,
|
||||
method: literal("exit"),
|
||||
},
|
||||
["id"],
|
||||
)
|
||||
const evalType = object(
|
||||
{
|
||||
id: idType,
|
||||
method: literal("eval"),
|
||||
params: object({
|
||||
script: string,
|
||||
}),
|
||||
},
|
||||
["id"],
|
||||
)
|
||||
const initType = object({
|
||||
id: idType.optional(),
|
||||
method: literal("init"),
|
||||
params: object({
|
||||
id: string,
|
||||
kind: literals("install", "update", "restore").nullable(),
|
||||
}),
|
||||
})
|
||||
const startType = object({
|
||||
id: idType.optional(),
|
||||
method: literal("start"),
|
||||
})
|
||||
const stopType = object({
|
||||
id: idType.optional(),
|
||||
method: literal("stop"),
|
||||
})
|
||||
const exitType = object({
|
||||
id: idType.optional(),
|
||||
method: literal("exit"),
|
||||
params: object({
|
||||
id: string,
|
||||
target: string.nullable(),
|
||||
}),
|
||||
})
|
||||
const evalType = object({
|
||||
id: idType.optional(),
|
||||
method: literal("eval"),
|
||||
params: object({
|
||||
script: string,
|
||||
}),
|
||||
})
|
||||
|
||||
const jsonParse = (x: string) => JSON.parse(x)
|
||||
|
||||
@@ -163,6 +146,7 @@ const handleRpc = (id: IdType, result: Promise<RpcResult>) =>
|
||||
|
||||
const hasId = object({ id: idType }).test
|
||||
export class RpcListener {
|
||||
shouldExit = false
|
||||
unixSocketServer = net.createServer(async (server) => {})
|
||||
private _system: System | undefined
|
||||
private callbacks: CallbackHolder | undefined
|
||||
@@ -171,8 +155,12 @@ export class RpcListener {
|
||||
if (!fs.existsSync(SOCKET_PARENT)) {
|
||||
fs.mkdirSync(SOCKET_PARENT, { recursive: true })
|
||||
}
|
||||
if (fs.existsSync(SOCKET_PATH)) fs.rmSync(SOCKET_PATH, { force: true })
|
||||
|
||||
this.unixSocketServer.listen(SOCKET_PATH)
|
||||
|
||||
console.log("Listening on %s", SOCKET_PATH)
|
||||
|
||||
this.unixSocketServer.on("connection", (s) => {
|
||||
let id: IdType = null
|
||||
const captureId = <X>(x: X) => {
|
||||
@@ -223,6 +211,11 @@ export class RpcListener {
|
||||
.catch(mapError)
|
||||
.then(logData("response"))
|
||||
.then(writeDataToSocket)
|
||||
.then((_) => {
|
||||
if (this.shouldExit) {
|
||||
process.exit(0)
|
||||
}
|
||||
})
|
||||
.catch((e) => {
|
||||
console.error(`Major error in socket handling: ${e}`)
|
||||
console.debug(`Data in: ${a.toString()}`)
|
||||
@@ -238,21 +231,6 @@ export class RpcListener {
|
||||
return this._system
|
||||
}
|
||||
|
||||
private callbackHolders: Map<string, CallbackHolder> = new Map()
|
||||
private removeCallbackHolderFor(procedure: string) {
|
||||
const prev = this.callbackHolders.get(procedure)
|
||||
if (prev) {
|
||||
this.callbackHolders.delete(procedure)
|
||||
this.callbacks?.removeChild(prev)
|
||||
}
|
||||
}
|
||||
private callbackHolderFor(procedure: string): CallbackHolder {
|
||||
this.removeCallbackHolderFor(procedure)
|
||||
const callbackHolder = this.callbacks!.child()
|
||||
this.callbackHolders.set(procedure, callbackHolder)
|
||||
return callbackHolder
|
||||
}
|
||||
|
||||
callCallback(callback: number, args: any[]): void {
|
||||
if (this.callbacks) {
|
||||
this.callbacks
|
||||
@@ -272,11 +250,11 @@ export class RpcListener {
|
||||
.when(runType, async ({ id, params }) => {
|
||||
const system = this.system
|
||||
const procedure = jsonPath.unsafeCast(params.procedure)
|
||||
const { input, timeout, id: procedureId } = params
|
||||
const { input, timeout, id: eventId } = params
|
||||
const result = this.getResult(
|
||||
procedure,
|
||||
system,
|
||||
procedureId,
|
||||
eventId,
|
||||
timeout,
|
||||
input,
|
||||
)
|
||||
@@ -286,11 +264,11 @@ export class RpcListener {
|
||||
.when(sandboxRunType, async ({ id, params }) => {
|
||||
const system = this.system
|
||||
const procedure = jsonPath.unsafeCast(params.procedure)
|
||||
const { input, timeout, id: procedureId } = params
|
||||
const { input, timeout, id: eventId } = params
|
||||
const result = this.getResult(
|
||||
procedure,
|
||||
system,
|
||||
procedureId,
|
||||
eventId,
|
||||
timeout,
|
||||
input,
|
||||
)
|
||||
@@ -302,9 +280,10 @@ export class RpcListener {
|
||||
return null
|
||||
})
|
||||
.when(startType, async ({ id }) => {
|
||||
const callbacks = this.callbackHolderFor("main")
|
||||
const callbacks =
|
||||
this.callbacks?.getChild("main") || this.callbacks?.child("main")
|
||||
const effects = makeEffects({
|
||||
procedureId: null,
|
||||
eventId: null,
|
||||
callbacks,
|
||||
})
|
||||
return handleRpc(
|
||||
@@ -313,21 +292,39 @@ export class RpcListener {
|
||||
)
|
||||
})
|
||||
.when(stopType, async ({ id }) => {
|
||||
this.removeCallbackHolderFor("main")
|
||||
return handleRpc(
|
||||
id,
|
||||
this.system.stop().then((result) => ({ result })),
|
||||
this.system.stop().then((result) => {
|
||||
this.callbacks?.removeChild("main")
|
||||
|
||||
return { result }
|
||||
}),
|
||||
)
|
||||
})
|
||||
.when(exitType, async ({ id }) => {
|
||||
.when(exitType, async ({ id, params }) => {
|
||||
return handleRpc(
|
||||
id,
|
||||
(async () => {
|
||||
if (this._system) await this._system.exit()
|
||||
if (this._system) {
|
||||
let target = null
|
||||
if (params.target)
|
||||
try {
|
||||
target = ExtendedVersion.parse(params.target)
|
||||
} catch (_) {
|
||||
target = VersionRange.parse(params.target).normalize()
|
||||
}
|
||||
await this._system.exit(
|
||||
makeEffects({
|
||||
eventId: params.id,
|
||||
}),
|
||||
target,
|
||||
)
|
||||
this.shouldExit = true
|
||||
}
|
||||
})().then((result) => ({ result })),
|
||||
)
|
||||
})
|
||||
.when(initType, async ({ id }) => {
|
||||
.when(initType, async ({ id, params }) => {
|
||||
return handleRpc(
|
||||
id,
|
||||
(async () => {
|
||||
@@ -335,16 +332,19 @@ export class RpcListener {
|
||||
const system = await this.getDependencies.system()
|
||||
this.callbacks = new CallbackHolder(
|
||||
makeEffects({
|
||||
procedureId: null,
|
||||
eventId: params.id,
|
||||
}),
|
||||
)
|
||||
const callbacks = this.callbackHolderFor("containerInit")
|
||||
await system.containerInit(
|
||||
const callbacks = this.callbacks.child("init")
|
||||
console.error("Initializing...")
|
||||
await system.init(
|
||||
makeEffects({
|
||||
procedureId: null,
|
||||
eventId: params.id,
|
||||
callbacks,
|
||||
}),
|
||||
params.kind,
|
||||
)
|
||||
console.error("Initialization complete.")
|
||||
this._system = system
|
||||
}
|
||||
})().then((result) => ({ result })),
|
||||
@@ -377,7 +377,7 @@ export class RpcListener {
|
||||
)
|
||||
})
|
||||
.when(
|
||||
shape({ id: idType, method: string }, ["id"]),
|
||||
shape({ id: idType.optional(), method: string }),
|
||||
({ id, method }) => ({
|
||||
jsonrpc,
|
||||
id,
|
||||
@@ -411,8 +411,8 @@ export class RpcListener {
|
||||
private getResult(
|
||||
procedure: typeof jsonPath._TYPE,
|
||||
system: System,
|
||||
procedureId: string,
|
||||
timeout: number | undefined,
|
||||
eventId: string,
|
||||
timeout: number | null | undefined,
|
||||
input: any,
|
||||
) {
|
||||
const ensureResultTypeShape = (
|
||||
@@ -420,9 +420,9 @@ export class RpcListener {
|
||||
): { result: any } => {
|
||||
return { result }
|
||||
}
|
||||
const callbacks = this.callbackHolderFor(procedure)
|
||||
const callbacks = this.callbacks?.child(procedure)
|
||||
const effects = makeEffects({
|
||||
procedureId,
|
||||
eventId,
|
||||
callbacks,
|
||||
})
|
||||
|
||||
@@ -430,16 +430,6 @@ export class RpcListener {
|
||||
switch (procedure) {
|
||||
case "/backup/create":
|
||||
return system.createBackup(effects, timeout || null)
|
||||
case "/backup/restore":
|
||||
return system.restoreBackup(effects, timeout || null)
|
||||
case "/packageInit":
|
||||
return system.packageInit(effects, timeout || null)
|
||||
case "/packageUninit":
|
||||
return system.packageUninit(
|
||||
effects,
|
||||
string.optional().unsafeCast(input),
|
||||
timeout || null,
|
||||
)
|
||||
default:
|
||||
const procedures = unNestPath(procedure)
|
||||
switch (true) {
|
||||
@@ -461,14 +451,10 @@ export class RpcListener {
|
||||
})().then(ensureResultTypeShape, (error) =>
|
||||
matches(error)
|
||||
.when(
|
||||
object(
|
||||
{
|
||||
error: string,
|
||||
code: number,
|
||||
},
|
||||
["code"],
|
||||
{ code: 0 },
|
||||
),
|
||||
object({
|
||||
error: string,
|
||||
code: number.defaultTo(0),
|
||||
}),
|
||||
(error) => ({
|
||||
error: {
|
||||
code: error.code,
|
||||
|
||||
@@ -7,13 +7,22 @@ import { Volume } from "./matchVolume"
|
||||
import {
|
||||
CommandOptions,
|
||||
ExecOptions,
|
||||
ExecSpawnable,
|
||||
SubContainerOwned,
|
||||
} from "@start9labs/start-sdk/package/lib/util/SubContainer"
|
||||
import { Mounts } from "@start9labs/start-sdk/package/lib/mainFn/Mounts"
|
||||
import { Manifest } from "@start9labs/start-sdk/base/lib/osBindings"
|
||||
import { BackupEffects } from "@start9labs/start-sdk/package/lib/backup/Backups"
|
||||
import { Drop } from "@start9labs/start-sdk/package/lib/util"
|
||||
import { SDKManifest } from "@start9labs/start-sdk/base/lib/types"
|
||||
export const exec = promisify(cp.exec)
|
||||
export const execFile = promisify(cp.execFile)
|
||||
|
||||
export class DockerProcedureContainer {
|
||||
private constructor(private readonly subcontainer: ExecSpawnable) {}
|
||||
export class DockerProcedureContainer extends Drop {
|
||||
private constructor(
|
||||
private readonly subcontainer: SubContainer<SDKManifest>,
|
||||
) {
|
||||
super()
|
||||
}
|
||||
|
||||
static async of(
|
||||
effects: T.Effects,
|
||||
@@ -21,7 +30,7 @@ export class DockerProcedureContainer {
|
||||
data: DockerProcedure,
|
||||
volumes: { [id: VolumeId]: Volume },
|
||||
name: string,
|
||||
options: { subcontainer?: ExecSpawnable } = {},
|
||||
options: { subcontainer?: SubContainer<SDKManifest> } = {},
|
||||
) {
|
||||
const subcontainer =
|
||||
options?.subcontainer ??
|
||||
@@ -41,9 +50,10 @@ export class DockerProcedureContainer {
|
||||
volumes: { [id: VolumeId]: Volume },
|
||||
name: string,
|
||||
) {
|
||||
const subcontainer = await SubContainer.of(
|
||||
effects,
|
||||
const subcontainer = await SubContainerOwned.of(
|
||||
effects as BackupEffects,
|
||||
{ imageId: data.image },
|
||||
null,
|
||||
name,
|
||||
)
|
||||
|
||||
@@ -57,13 +67,19 @@ export class DockerProcedureContainer {
|
||||
const volumeMount = volumes[mount]
|
||||
if (volumeMount.type === "data") {
|
||||
await subcontainer.mount(
|
||||
{ type: "volume", id: mount, subpath: null, readonly: false },
|
||||
mounts[mount],
|
||||
Mounts.of().mountVolume({
|
||||
volumeId: mount,
|
||||
subpath: null,
|
||||
mountpoint: mounts[mount],
|
||||
readonly: false,
|
||||
}),
|
||||
)
|
||||
} else if (volumeMount.type === "assets") {
|
||||
await subcontainer.mount(
|
||||
{ type: "assets", subpath: mount },
|
||||
mounts[mount],
|
||||
Mounts.of().mountAssets({
|
||||
subpath: mount,
|
||||
mountpoint: mounts[mount],
|
||||
}),
|
||||
)
|
||||
} else if (volumeMount.type === "certificate") {
|
||||
const hostnames = [
|
||||
@@ -95,21 +111,22 @@ export class DockerProcedureContainer {
|
||||
key,
|
||||
)
|
||||
} else if (volumeMount.type === "pointer") {
|
||||
await effects
|
||||
.mount({
|
||||
location: path,
|
||||
target: {
|
||||
packageId: volumeMount["package-id"],
|
||||
subpath: volumeMount.path,
|
||||
readonly: volumeMount.readonly,
|
||||
volumeId: volumeMount["volume-id"],
|
||||
},
|
||||
})
|
||||
.catch(console.warn)
|
||||
await effects.mount({
|
||||
location: path,
|
||||
target: {
|
||||
packageId: volumeMount["package-id"],
|
||||
subpath: volumeMount.path,
|
||||
readonly: volumeMount.readonly,
|
||||
volumeId: volumeMount["volume-id"],
|
||||
idmap: [],
|
||||
},
|
||||
})
|
||||
} else if (volumeMount.type === "backup") {
|
||||
await subcontainer.mount(
|
||||
{ type: "backup", subpath: null },
|
||||
mounts[mount],
|
||||
Mounts.of().mountBackups({
|
||||
subpath: null,
|
||||
mountpoint: mounts[mount],
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -151,7 +168,11 @@ export class DockerProcedureContainer {
|
||||
}
|
||||
}
|
||||
|
||||
async spawn(commands: string[]): Promise<cp.ChildProcess> {
|
||||
return await this.subcontainer.spawn(commands)
|
||||
// async spawn(commands: string[]): Promise<cp.ChildProcess> {
|
||||
// return await this.subcontainer.spawn(commands)
|
||||
// }
|
||||
|
||||
onDrop(): void {
|
||||
this.subcontainer.destroy?.()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,17 +6,23 @@ import { Daemon } from "@start9labs/start-sdk/package/lib/mainFn/Daemon"
|
||||
import { Effects } from "../../../Models/Effects"
|
||||
import { off } from "node:process"
|
||||
import { CommandController } from "@start9labs/start-sdk/package/lib/mainFn/CommandController"
|
||||
import { SDKManifest } from "@start9labs/start-sdk/base/lib/types"
|
||||
import { SubContainerRc } from "@start9labs/start-sdk/package/lib/util/SubContainer"
|
||||
|
||||
const EMBASSY_HEALTH_INTERVAL = 15 * 1000
|
||||
const EMBASSY_PROPERTIES_LOOP = 30 * 1000
|
||||
/**
|
||||
* We wanted something to represent what the main loop is doing, and
|
||||
* in this case it used to run the properties, health, and the docker/ js main.
|
||||
* Also, this has an ability to clean itself up too if need be.
|
||||
*/
|
||||
export class MainLoop {
|
||||
private subcontainerRc?: SubContainerRc<SDKManifest>
|
||||
get mainSubContainerHandle() {
|
||||
return this.mainEvent?.daemon?.subContainerHandle
|
||||
this.subcontainerRc =
|
||||
this.subcontainerRc ??
|
||||
this.mainEvent?.daemon?.subcontainerRc() ??
|
||||
undefined
|
||||
return this.subcontainerRc
|
||||
}
|
||||
private healthLoops?: {
|
||||
name: string
|
||||
@@ -24,7 +30,7 @@ export class MainLoop {
|
||||
}[]
|
||||
|
||||
private mainEvent?: {
|
||||
daemon: Daemon
|
||||
daemon: Daemon<SDKManifest>
|
||||
}
|
||||
|
||||
private constructor(
|
||||
@@ -55,28 +61,20 @@ export class MainLoop {
|
||||
if (jsMain) {
|
||||
throw new Error("Unreachable")
|
||||
}
|
||||
const daemon = new Daemon(async () => {
|
||||
const subcontainer = await DockerProcedureContainer.createSubContainer(
|
||||
effects,
|
||||
this.system.manifest.id,
|
||||
this.system.manifest.main,
|
||||
this.system.manifest.volumes,
|
||||
`Main - ${currentCommand.join(" ")}`,
|
||||
)
|
||||
return CommandController.of()(
|
||||
this.effects,
|
||||
subcontainer,
|
||||
currentCommand,
|
||||
{
|
||||
runAsInit: true,
|
||||
env: {
|
||||
TINI_SUBREAPER: "true",
|
||||
},
|
||||
sigtermTimeout: utils.inMs(
|
||||
this.system.manifest.main["sigterm-timeout"],
|
||||
),
|
||||
},
|
||||
)
|
||||
const subcontainer = await DockerProcedureContainer.createSubContainer(
|
||||
effects,
|
||||
this.system.manifest.id,
|
||||
this.system.manifest.main,
|
||||
this.system.manifest.volumes,
|
||||
`Main - ${currentCommand.join(" ")}`,
|
||||
)
|
||||
const daemon = await Daemon.of()(this.effects, subcontainer, {
|
||||
command: currentCommand,
|
||||
runAsInit: true,
|
||||
env: {
|
||||
TINI_SUBREAPER: "true",
|
||||
},
|
||||
sigtermTimeout: utils.inMs(this.system.manifest.main["sigterm-timeout"]),
|
||||
})
|
||||
|
||||
daemon.start()
|
||||
@@ -121,6 +119,7 @@ export class MainLoop {
|
||||
? {
|
||||
preferredExternalPort: lanConf.external,
|
||||
alpn: { specified: ["http/1.1"] },
|
||||
addXForwardedHeaders: false,
|
||||
}
|
||||
: null,
|
||||
})
|
||||
@@ -134,7 +133,7 @@ export class MainLoop {
|
||||
delete this.mainEvent
|
||||
delete this.healthLoops
|
||||
await main?.daemon
|
||||
.stop()
|
||||
.term()
|
||||
.catch((e: unknown) => console.error(`Main loop error`, utils.asError(e)))
|
||||
this.effects.setMainStatus({ status: "stopped" })
|
||||
if (healthLoops) healthLoops.forEach((x) => clearInterval(x.interval))
|
||||
|
||||
@@ -0,0 +1,153 @@
|
||||
export default {
|
||||
nodes: {
|
||||
type: "list",
|
||||
subtype: "union",
|
||||
name: "Lightning Nodes",
|
||||
description: "List of Lightning Network node instances to manage",
|
||||
range: "[1,*)",
|
||||
default: ["lnd"],
|
||||
spec: {
|
||||
type: "string",
|
||||
"display-as": "{{name}}",
|
||||
"unique-by": "name",
|
||||
name: "Node Implementation",
|
||||
tag: {
|
||||
id: "type",
|
||||
name: "Type",
|
||||
description:
|
||||
"- LND: Lightning Network Daemon from Lightning Labs\n- CLN: Core Lightning from Blockstream\n",
|
||||
"variant-names": {
|
||||
lnd: "Lightning Network Daemon (LND)",
|
||||
"c-lightning": "Core Lightning (CLN)",
|
||||
},
|
||||
},
|
||||
default: "lnd",
|
||||
variants: {
|
||||
lnd: {
|
||||
name: {
|
||||
type: "string",
|
||||
name: "Node Name",
|
||||
description: "Name of this node in the list",
|
||||
default: "StartOS LND",
|
||||
nullable: false,
|
||||
},
|
||||
"connection-settings": {
|
||||
type: "union",
|
||||
name: "Connection Settings",
|
||||
description: "The Lightning Network Daemon node to connect to.",
|
||||
tag: {
|
||||
id: "type",
|
||||
name: "Type",
|
||||
description:
|
||||
"- Internal: The Lightning Network Daemon service installed to your StartOS server.\n- External: A Lightning Network Daemon instance running on a remote device (advanced).\n",
|
||||
"variant-names": {
|
||||
internal: "Internal",
|
||||
external: "External",
|
||||
},
|
||||
},
|
||||
default: "internal",
|
||||
variants: {
|
||||
internal: {},
|
||||
external: {
|
||||
address: {
|
||||
type: "string",
|
||||
name: "Public Address",
|
||||
description:
|
||||
"The public address of your LND REST server\nNOTE: RTL does not support a .onion URL here\n",
|
||||
nullable: false,
|
||||
},
|
||||
"rest-port": {
|
||||
type: "number",
|
||||
name: "REST Port",
|
||||
description:
|
||||
"The port that your Lightning Network Daemon REST server is bound to",
|
||||
nullable: false,
|
||||
range: "[0,65535]",
|
||||
integral: true,
|
||||
default: 8080,
|
||||
},
|
||||
macaroon: {
|
||||
type: "string",
|
||||
name: "Macaroon",
|
||||
description:
|
||||
'Your admin.macaroon file, Base64URL encoded. This is the same as the value after "macaroon=" in your lndconnect URL.',
|
||||
nullable: false,
|
||||
masked: true,
|
||||
pattern: "[=A-Za-z0-9_-]+",
|
||||
"pattern-description":
|
||||
"Macaroon must be encoded in Base64URL format (only A-Z, a-z, 0-9, _, - and = allowed)",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
"c-lightning": {
|
||||
name: {
|
||||
type: "string",
|
||||
name: "Node Name",
|
||||
description: "Name of this node in the list",
|
||||
default: "StartOS CLN",
|
||||
nullable: false,
|
||||
},
|
||||
"connection-settings": {
|
||||
type: "union",
|
||||
name: "Connection Settings",
|
||||
description: "The Core Lightning (CLN) node to connect to.",
|
||||
tag: {
|
||||
id: "type",
|
||||
name: "Type",
|
||||
description:
|
||||
"- Internal: The Core Lightning (CLN) service installed to your StartOS server.\n- External: A Core Lightning (CLN) instance running on a remote device (advanced).\n",
|
||||
"variant-names": {
|
||||
internal: "Internal",
|
||||
external: "External",
|
||||
},
|
||||
},
|
||||
default: "internal",
|
||||
variants: {
|
||||
internal: {},
|
||||
external: {
|
||||
address: {
|
||||
type: "string",
|
||||
name: "Public Address",
|
||||
description:
|
||||
"The public address of your CLNRest server\nNOTE: RTL does not support a .onion URL here\n",
|
||||
nullable: false,
|
||||
},
|
||||
"rest-port": {
|
||||
type: "number",
|
||||
name: "CLNRest Port",
|
||||
description: "The port that your CLNRest server is bound to",
|
||||
nullable: false,
|
||||
range: "[0,65535]",
|
||||
integral: true,
|
||||
default: 3010,
|
||||
},
|
||||
macaroon: {
|
||||
type: "string",
|
||||
name: "Rune",
|
||||
description:
|
||||
"Your CLNRest unrestricted Rune, Base64URL encoded.",
|
||||
nullable: false,
|
||||
masked: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
password: {
|
||||
type: "string",
|
||||
name: "Password",
|
||||
description: "The password for your Ride the Lightning dashboard",
|
||||
nullable: false,
|
||||
copyable: true,
|
||||
masked: true,
|
||||
default: {
|
||||
charset: "a-z,A-Z,0-9",
|
||||
len: 22,
|
||||
},
|
||||
},
|
||||
}
|
||||
@@ -1,5 +1,30 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`transformConfigSpec transformConfigSpec(RTL) 1`] = `
|
||||
{
|
||||
"password": {
|
||||
"default": {
|
||||
"charset": "a-z,A-Z,0-9",
|
||||
"len": 22,
|
||||
},
|
||||
"description": "The password for your Ride the Lightning dashboard",
|
||||
"disabled": false,
|
||||
"generate": null,
|
||||
"immutable": false,
|
||||
"inputmode": "text",
|
||||
"masked": true,
|
||||
"maxLength": null,
|
||||
"minLength": null,
|
||||
"name": "Password",
|
||||
"patterns": [],
|
||||
"placeholder": null,
|
||||
"required": true,
|
||||
"type": "text",
|
||||
"warning": null,
|
||||
},
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`transformConfigSpec transformConfigSpec(bitcoind) 1`] = `
|
||||
{
|
||||
"advanced": {
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import {
|
||||
ExtendedVersion,
|
||||
FileHelper,
|
||||
getDataVersion,
|
||||
overlaps,
|
||||
types as T,
|
||||
utils,
|
||||
VersionRange,
|
||||
@@ -47,6 +50,7 @@ import {
|
||||
transformOldConfigToNew,
|
||||
} from "./transformConfigSpec"
|
||||
import { partialDiff } from "@start9labs/start-sdk/base/lib/util"
|
||||
import { Volume } from "@start9labs/start-sdk/package/lib/util/Volume"
|
||||
|
||||
type Optional<A> = A | undefined | null
|
||||
function todo(): never {
|
||||
@@ -55,8 +59,21 @@ function todo(): never {
|
||||
|
||||
const MANIFEST_LOCATION = "/usr/lib/startos/package/embassyManifest.json"
|
||||
export const EMBASSY_JS_LOCATION = "/usr/lib/startos/package/embassy.js"
|
||||
const EMBASSY_POINTER_PATH_PREFIX = "/embassyConfig" as utils.StorePath
|
||||
const EMBASSY_DEPENDS_ON_PATH_PREFIX = "/embassyDependsOn" as utils.StorePath
|
||||
|
||||
const configFile = FileHelper.json(
|
||||
{
|
||||
base: new Volume("embassy"),
|
||||
subpath: "config.json",
|
||||
},
|
||||
matches.any,
|
||||
)
|
||||
const dependsOnFile = FileHelper.json(
|
||||
{
|
||||
base: new Volume("embassy"),
|
||||
subpath: "dependsOn.json",
|
||||
},
|
||||
dictionary([string, array(string)]),
|
||||
)
|
||||
|
||||
const matchResult = object({
|
||||
result: any,
|
||||
@@ -94,47 +111,48 @@ const fromReturnType = <A>(a: U.ResultType<A>): A => {
|
||||
return assertNever(a)
|
||||
}
|
||||
|
||||
const matchSetResult = object(
|
||||
{
|
||||
"depends-on": dictionary([string, array(string)]),
|
||||
dependsOn: dictionary([string, array(string)]),
|
||||
signal: literals(
|
||||
"SIGTERM",
|
||||
"SIGHUP",
|
||||
"SIGINT",
|
||||
"SIGQUIT",
|
||||
"SIGILL",
|
||||
"SIGTRAP",
|
||||
"SIGABRT",
|
||||
"SIGBUS",
|
||||
"SIGFPE",
|
||||
"SIGKILL",
|
||||
"SIGUSR1",
|
||||
"SIGSEGV",
|
||||
"SIGUSR2",
|
||||
"SIGPIPE",
|
||||
"SIGALRM",
|
||||
"SIGSTKFLT",
|
||||
"SIGCHLD",
|
||||
"SIGCONT",
|
||||
"SIGSTOP",
|
||||
"SIGTSTP",
|
||||
"SIGTTIN",
|
||||
"SIGTTOU",
|
||||
"SIGURG",
|
||||
"SIGXCPU",
|
||||
"SIGXFSZ",
|
||||
"SIGVTALRM",
|
||||
"SIGPROF",
|
||||
"SIGWINCH",
|
||||
"SIGIO",
|
||||
"SIGPWR",
|
||||
"SIGSYS",
|
||||
"SIGINFO",
|
||||
),
|
||||
},
|
||||
["depends-on", "dependsOn"],
|
||||
)
|
||||
const matchSetResult = object({
|
||||
"depends-on": dictionary([string, array(string)])
|
||||
.nullable()
|
||||
.optional(),
|
||||
dependsOn: dictionary([string, array(string)])
|
||||
.nullable()
|
||||
.optional(),
|
||||
signal: literals(
|
||||
"SIGTERM",
|
||||
"SIGHUP",
|
||||
"SIGINT",
|
||||
"SIGQUIT",
|
||||
"SIGILL",
|
||||
"SIGTRAP",
|
||||
"SIGABRT",
|
||||
"SIGBUS",
|
||||
"SIGFPE",
|
||||
"SIGKILL",
|
||||
"SIGUSR1",
|
||||
"SIGSEGV",
|
||||
"SIGUSR2",
|
||||
"SIGPIPE",
|
||||
"SIGALRM",
|
||||
"SIGSTKFLT",
|
||||
"SIGCHLD",
|
||||
"SIGCONT",
|
||||
"SIGSTOP",
|
||||
"SIGTSTP",
|
||||
"SIGTTIN",
|
||||
"SIGTTOU",
|
||||
"SIGURG",
|
||||
"SIGXCPU",
|
||||
"SIGXFSZ",
|
||||
"SIGVTALRM",
|
||||
"SIGPROF",
|
||||
"SIGWINCH",
|
||||
"SIGIO",
|
||||
"SIGPWR",
|
||||
"SIGSYS",
|
||||
"SIGINFO",
|
||||
),
|
||||
})
|
||||
|
||||
type OldGetConfigRes = {
|
||||
config?: null | Record<string, unknown>
|
||||
@@ -174,14 +192,14 @@ export type PackagePropertiesV2 = {
|
||||
}
|
||||
export type PackagePropertyString = {
|
||||
type: "string"
|
||||
description?: string
|
||||
description?: string | null
|
||||
value: string
|
||||
/** Let's the ui make this copyable button */
|
||||
copyable?: boolean
|
||||
copyable?: boolean | null
|
||||
/** Let the ui create a qr for this field */
|
||||
qr?: boolean
|
||||
qr?: boolean | null
|
||||
/** Hiding the value unless toggled off for field */
|
||||
masked?: boolean
|
||||
masked?: boolean | null
|
||||
}
|
||||
export type PackagePropertyObject = {
|
||||
value: PackagePropertiesV2
|
||||
@@ -225,17 +243,14 @@ const matchPackagePropertyObject: Parser<unknown, PackagePropertyObject> =
|
||||
})
|
||||
|
||||
const matchPackagePropertyString: Parser<unknown, PackagePropertyString> =
|
||||
object(
|
||||
{
|
||||
type: literal("string"),
|
||||
description: string,
|
||||
value: string,
|
||||
copyable: boolean,
|
||||
qr: boolean,
|
||||
masked: boolean,
|
||||
},
|
||||
["copyable", "description", "qr", "masked"],
|
||||
)
|
||||
object({
|
||||
type: literal("string"),
|
||||
description: string.nullable().optional(),
|
||||
value: string,
|
||||
copyable: boolean.nullable().optional(),
|
||||
qr: boolean.nullable().optional(),
|
||||
masked: boolean.nullable().optional(),
|
||||
})
|
||||
setMatchPackageProperties(
|
||||
dictionary([
|
||||
string,
|
||||
@@ -273,8 +288,8 @@ function convertProperties(
|
||||
}
|
||||
}
|
||||
|
||||
const DEFAULT_REGISTRY = "https://registry.start9.com"
|
||||
export class SystemForEmbassy implements System {
|
||||
private version: ExtendedVersion
|
||||
currentRunning: MainLoop | undefined
|
||||
static async of(manifestLocation: string = MANIFEST_LOCATION) {
|
||||
const moduleCode = await import(EMBASSY_JS_LOCATION)
|
||||
@@ -296,11 +311,41 @@ export class SystemForEmbassy implements System {
|
||||
constructor(
|
||||
readonly manifest: Manifest,
|
||||
readonly moduleCode: Partial<U.ExpectedExports>,
|
||||
) {}
|
||||
) {
|
||||
this.version = ExtendedVersion.parseEmver(manifest.version)
|
||||
if (
|
||||
this.manifest.id === "bitcoind" &&
|
||||
this.manifest.title.toLowerCase().includes("knots")
|
||||
)
|
||||
this.version.flavor = "knots"
|
||||
|
||||
async containerInit(effects: Effects): Promise<void> {
|
||||
if (
|
||||
this.manifest.id === "lnd" ||
|
||||
this.manifest.id === "ride-the-lightning" ||
|
||||
this.manifest.id === "datum"
|
||||
) {
|
||||
this.version.upstream.prerelease = ["beta"]
|
||||
} else if (
|
||||
this.manifest.id === "lightning-terminal" ||
|
||||
this.manifest.id === "robosats"
|
||||
) {
|
||||
this.version.upstream.prerelease = ["alpha"]
|
||||
}
|
||||
|
||||
if (this.manifest.id === "nostr") {
|
||||
this.manifest.id = "nostr-rs-relay"
|
||||
}
|
||||
}
|
||||
|
||||
async init(
|
||||
effects: Effects,
|
||||
kind: "install" | "update" | "restore" | null,
|
||||
): Promise<void> {
|
||||
if (kind === "restore") {
|
||||
await this.restoreBackup(effects, null)
|
||||
}
|
||||
for (let depId in this.manifest.dependencies) {
|
||||
if (this.manifest.dependencies[depId].config) {
|
||||
if (this.manifest.dependencies[depId]?.config) {
|
||||
await this.dependenciesAutoconfig(effects, depId, null)
|
||||
}
|
||||
}
|
||||
@@ -308,6 +353,9 @@ export class SystemForEmbassy implements System {
|
||||
await this.exportActions(effects)
|
||||
await this.exportNetwork(effects)
|
||||
await this.containerSetDependencies(effects)
|
||||
if (kind === "install" || kind === "update") {
|
||||
await this.packageInit(effects, null)
|
||||
}
|
||||
}
|
||||
async containerSetDependencies(effects: T.Effects) {
|
||||
const oldDeps: Record<string, string[]> = Object.fromEntries(
|
||||
@@ -347,16 +395,23 @@ export class SystemForEmbassy implements System {
|
||||
}
|
||||
|
||||
async packageInit(effects: Effects, timeoutMs: number | null): Promise<void> {
|
||||
const previousVersion = await effects.getDataVersion()
|
||||
const previousVersion = await getDataVersion(effects)
|
||||
if (previousVersion) {
|
||||
if (
|
||||
(await this.migration(effects, { from: previousVersion }, timeoutMs))
|
||||
.configured
|
||||
) {
|
||||
await effects.action.clearRequests({ only: ["needs-config"] })
|
||||
const migrationRes = await this.migration(
|
||||
effects,
|
||||
{ from: previousVersion },
|
||||
timeoutMs,
|
||||
)
|
||||
if (migrationRes) {
|
||||
if (migrationRes.configured)
|
||||
await effects.action.clearTasks({ only: ["needs-config"] })
|
||||
await configFile.write(
|
||||
effects,
|
||||
await this.getConfig(effects, timeoutMs),
|
||||
)
|
||||
}
|
||||
} else if (this.manifest.config) {
|
||||
await effects.action.request({
|
||||
await effects.action.createTask({
|
||||
packageId: this.manifest.id,
|
||||
actionId: "config",
|
||||
severity: "critical",
|
||||
@@ -364,9 +419,11 @@ export class SystemForEmbassy implements System {
|
||||
reason: "This service must be configured before it can be run",
|
||||
})
|
||||
}
|
||||
|
||||
await effects.setDataVersion({
|
||||
version: ExtendedVersion.parseEmver(this.manifest.version).toString(),
|
||||
version: this.version.toString(),
|
||||
})
|
||||
// @FullMetal: package hacks go here
|
||||
}
|
||||
async exportNetwork(effects: Effects) {
|
||||
for (const [id, interfaceValue] of Object.entries(
|
||||
@@ -403,6 +460,7 @@ export class SystemForEmbassy implements System {
|
||||
addSsl = {
|
||||
preferredExternalPort: lanPortNum,
|
||||
alpn: { specified: [] },
|
||||
addXForwardedHeaders: false,
|
||||
}
|
||||
}
|
||||
return [
|
||||
@@ -441,7 +499,7 @@ export class SystemForEmbassy implements System {
|
||||
masked: false,
|
||||
path: "",
|
||||
schemeOverride: null,
|
||||
search: {},
|
||||
query: {},
|
||||
username: null,
|
||||
}),
|
||||
])
|
||||
@@ -456,13 +514,18 @@ export class SystemForEmbassy implements System {
|
||||
): Promise<T.ActionInput | null> {
|
||||
if (actionId === "config") {
|
||||
const config = await this.getConfig(effects, timeoutMs)
|
||||
return { spec: config.spec, value: config.config }
|
||||
return {
|
||||
eventId: effects.eventId!,
|
||||
spec: config.spec,
|
||||
value: config.config,
|
||||
}
|
||||
} else if (actionId === "properties") {
|
||||
return null
|
||||
} else {
|
||||
const oldSpec = this.manifest.actions?.[actionId]?.["input-spec"]
|
||||
if (!oldSpec) return null
|
||||
return {
|
||||
eventId: effects.eventId!,
|
||||
spec: transformConfigSpec(oldSpec as OldConfigSpec),
|
||||
value: null,
|
||||
}
|
||||
@@ -543,14 +606,14 @@ export class SystemForEmbassy implements System {
|
||||
}
|
||||
await effects.action.clear({ except: Object.keys(actions) })
|
||||
}
|
||||
async packageUninit(
|
||||
async uninit(
|
||||
effects: Effects,
|
||||
nextVersion: Optional<string>,
|
||||
timeoutMs: number | null,
|
||||
target: ExtendedVersion | VersionRange | null,
|
||||
timeoutMs?: number | null,
|
||||
): Promise<void> {
|
||||
await this.currentRunning?.clean({ timeout: timeoutMs ?? undefined })
|
||||
if (nextVersion) {
|
||||
await this.migration(effects, { to: nextVersion }, timeoutMs)
|
||||
if (target) {
|
||||
await this.migration(effects, { to: target }, timeoutMs ?? null)
|
||||
}
|
||||
await effects.setMainStatus({ status: "stopped" })
|
||||
}
|
||||
@@ -577,11 +640,21 @@ export class SystemForEmbassy implements System {
|
||||
const moduleCode = await this.moduleCode
|
||||
await moduleCode.createBackup?.(polyfillEffects(effects, this.manifest))
|
||||
}
|
||||
const dataVersion = await effects.getDataVersion()
|
||||
if (dataVersion)
|
||||
await fs.writeFile("/media/startos/backup/dataVersion.txt", dataVersion, {
|
||||
encoding: "utf-8",
|
||||
})
|
||||
}
|
||||
async restoreBackup(
|
||||
effects: Effects,
|
||||
timeoutMs: number | null,
|
||||
): Promise<void> {
|
||||
const store = await fs
|
||||
.readFile("/media/startos/backup/store.json", {
|
||||
encoding: "utf-8",
|
||||
})
|
||||
.catch((_) => null)
|
||||
const restoreBackup = this.manifest.backup.restore
|
||||
if (restoreBackup.type === "docker") {
|
||||
const commands = [restoreBackup.entrypoint, ...restoreBackup.args]
|
||||
@@ -600,6 +673,13 @@ export class SystemForEmbassy implements System {
|
||||
const moduleCode = await this.moduleCode
|
||||
await moduleCode.restoreBackup?.(polyfillEffects(effects, this.manifest))
|
||||
}
|
||||
|
||||
const dataVersion = await fs
|
||||
.readFile("/media/startos/backup/dataVersion.txt", {
|
||||
encoding: "utf-8",
|
||||
})
|
||||
.catch((_) => null)
|
||||
if (dataVersion) await effects.setDataVersion({ version: dataVersion })
|
||||
}
|
||||
async getConfig(effects: Effects, timeoutMs: number | null) {
|
||||
return this.getConfigUncleaned(effects, timeoutMs).then(convertToNewConfig)
|
||||
@@ -649,10 +729,7 @@ export class SystemForEmbassy implements System {
|
||||
structuredClone(newConfigWithoutPointers as Record<string, unknown>),
|
||||
)
|
||||
await updateConfig(effects, this.manifest, spec, newConfig)
|
||||
await effects.store.set({
|
||||
path: EMBASSY_POINTER_PATH_PREFIX,
|
||||
value: newConfig,
|
||||
})
|
||||
await configFile.write(effects, newConfig)
|
||||
const setConfigValue = this.manifest.config?.set
|
||||
if (!setConfigValue) return
|
||||
if (setConfigValue.type === "docker") {
|
||||
@@ -706,15 +783,11 @@ export class SystemForEmbassy implements System {
|
||||
rawDepends: { [x: string]: readonly string[] },
|
||||
configuring: boolean,
|
||||
) {
|
||||
const storedDependsOn = (await effects.store.get({
|
||||
packageId: this.manifest.id,
|
||||
path: EMBASSY_DEPENDS_ON_PATH_PREFIX,
|
||||
})) as Record<string, readonly string[]>
|
||||
|
||||
const storedDependsOn = await dependsOnFile.read().once()
|
||||
const requiredDeps = {
|
||||
...Object.fromEntries(
|
||||
Object.entries(this.manifest.dependencies || {})
|
||||
?.filter((x) => x[1].requirement.type === "required")
|
||||
Object.entries(this.manifest.dependencies ?? {})
|
||||
.filter(([k, v]) => v?.requirement.type === "required")
|
||||
.map((x) => [x[0], []]) || [],
|
||||
),
|
||||
}
|
||||
@@ -728,10 +801,7 @@ export class SystemForEmbassy implements System {
|
||||
? storedDependsOn
|
||||
: requiredDeps
|
||||
|
||||
await effects.store.set({
|
||||
path: EMBASSY_DEPENDS_ON_PATH_PREFIX,
|
||||
value: dependsOn,
|
||||
})
|
||||
await dependsOnFile.write(effects, dependsOn)
|
||||
|
||||
await effects.setDependencies({
|
||||
dependencies: Object.entries(dependsOn).flatMap(
|
||||
@@ -755,31 +825,33 @@ export class SystemForEmbassy implements System {
|
||||
|
||||
async migration(
|
||||
effects: Effects,
|
||||
version: { from: string } | { to: string },
|
||||
version:
|
||||
| { from: VersionRange | ExtendedVersion }
|
||||
| { to: VersionRange | ExtendedVersion },
|
||||
timeoutMs: number | null,
|
||||
): Promise<{ configured: boolean }> {
|
||||
): Promise<{ configured: boolean } | null> {
|
||||
let migration
|
||||
let args: [string, ...string[]]
|
||||
if ("from" in version) {
|
||||
args = [version.from, "from"]
|
||||
const fromExver = ExtendedVersion.parse(version.from)
|
||||
if (overlaps(this.version, version.from)) return null
|
||||
args = [version.from.toString(), "from"]
|
||||
if (!this.manifest.migrations) return { configured: true }
|
||||
migration = Object.entries(this.manifest.migrations.from)
|
||||
.map(
|
||||
([version, procedure]) =>
|
||||
[VersionRange.parseEmver(version), procedure] as const,
|
||||
)
|
||||
.find(([versionEmver, _]) => versionEmver.satisfiedBy(fromExver))
|
||||
.find(([versionEmver, _]) => overlaps(versionEmver, version.from))
|
||||
} else {
|
||||
args = [version.to, "to"]
|
||||
const toExver = ExtendedVersion.parse(version.to)
|
||||
if (overlaps(this.version, version.to)) return null
|
||||
args = [version.to.toString(), "to"]
|
||||
if (!this.manifest.migrations) return { configured: true }
|
||||
migration = Object.entries(this.manifest.migrations.to)
|
||||
.map(
|
||||
([version, procedure]) =>
|
||||
[VersionRange.parseEmver(version), procedure] as const,
|
||||
)
|
||||
.find(([versionEmver, _]) => versionEmver.satisfiedBy(toExver))
|
||||
.find(([versionEmver, _]) => overlaps(versionEmver, version.to))
|
||||
}
|
||||
|
||||
if (migration) {
|
||||
@@ -815,13 +887,12 @@ export class SystemForEmbassy implements System {
|
||||
})) as any
|
||||
}
|
||||
}
|
||||
return { configured: true }
|
||||
return null
|
||||
}
|
||||
async properties(
|
||||
effects: Effects,
|
||||
timeoutMs: number | null,
|
||||
): Promise<PropertiesReturn> {
|
||||
// TODO BLU-J set the properties ever so often
|
||||
const setConfigValue = this.manifest.properties
|
||||
if (!setConfigValue) throw new Error("There is no properties")
|
||||
if (setConfigValue.type === "docker") {
|
||||
@@ -969,43 +1040,52 @@ export class SystemForEmbassy implements System {
|
||||
timeoutMs: number | null,
|
||||
): Promise<void> {
|
||||
// TODO: docker
|
||||
const oldConfig = (await effects.store.get({
|
||||
packageId: id,
|
||||
path: EMBASSY_POINTER_PATH_PREFIX,
|
||||
callback: () => {
|
||||
this.dependenciesAutoconfig(effects, id, timeoutMs)
|
||||
},
|
||||
})) as U.Config
|
||||
if (!oldConfig) return
|
||||
const moduleCode = await this.moduleCode
|
||||
const method = moduleCode?.dependencies?.[id]?.autoConfigure
|
||||
if (!method) return
|
||||
const newConfig = (await method(
|
||||
polyfillEffects(effects, this.manifest),
|
||||
JSON.parse(JSON.stringify(oldConfig)),
|
||||
).then((x) => {
|
||||
if ("result" in x) return x.result
|
||||
if ("error" in x) throw new Error("Error getting config: " + x.error)
|
||||
throw new Error("Error getting config: " + x["error-code"][1])
|
||||
})) as any
|
||||
const diff = partialDiff(oldConfig, newConfig)
|
||||
if (diff) {
|
||||
await effects.action.request({
|
||||
actionId: "config",
|
||||
await effects.mount({
|
||||
location: `/media/embassy/${id}`,
|
||||
target: {
|
||||
packageId: id,
|
||||
replayId: `${id}/config`,
|
||||
severity: "important",
|
||||
reason: `Configure this dependency for the needs of ${this.manifest.title}`,
|
||||
input: {
|
||||
kind: "partial",
|
||||
value: diff.diff,
|
||||
},
|
||||
when: {
|
||||
condition: "input-not-matches",
|
||||
once: false,
|
||||
},
|
||||
volumeId: "embassy",
|
||||
subpath: null,
|
||||
readonly: true,
|
||||
idmap: [],
|
||||
},
|
||||
})
|
||||
configFile
|
||||
.withPath(`/media/embassy/${id}/config.json`)
|
||||
.read()
|
||||
.onChange(effects, async (oldConfig: U.Config) => {
|
||||
if (!oldConfig) return { cancel: false }
|
||||
const moduleCode = await this.moduleCode
|
||||
const method = moduleCode?.dependencies?.[id]?.autoConfigure
|
||||
if (!method) return { cancel: true }
|
||||
const newConfig = (await method(
|
||||
polyfillEffects(effects, this.manifest),
|
||||
JSON.parse(JSON.stringify(oldConfig)),
|
||||
).then((x) => {
|
||||
if ("result" in x) return x.result
|
||||
if ("error" in x) throw new Error("Error getting config: " + x.error)
|
||||
throw new Error("Error getting config: " + x["error-code"][1])
|
||||
})) as any
|
||||
const diff = partialDiff(oldConfig, newConfig)
|
||||
if (diff) {
|
||||
await effects.action.createTask({
|
||||
actionId: "config",
|
||||
packageId: id,
|
||||
replayId: `${id}/config`,
|
||||
severity: "important",
|
||||
reason: `Configure this dependency for the needs of ${this.manifest.title}`,
|
||||
input: {
|
||||
kind: "partial",
|
||||
value: diff.diff,
|
||||
},
|
||||
when: {
|
||||
condition: "input-not-matches",
|
||||
once: false,
|
||||
},
|
||||
})
|
||||
}
|
||||
return { cancel: false }
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1107,11 +1187,21 @@ async function updateConfig(
|
||||
) {
|
||||
if (specValue.target === "config") {
|
||||
const jp = require("jsonpath")
|
||||
const remoteConfig = await effects.store.get({
|
||||
packageId: specValue["package-id"],
|
||||
callback: () => effects.restart(),
|
||||
path: EMBASSY_POINTER_PATH_PREFIX,
|
||||
const depId = specValue["package-id"]
|
||||
await effects.mount({
|
||||
location: `/media/embassy/${depId}`,
|
||||
target: {
|
||||
packageId: depId,
|
||||
volumeId: "embassy",
|
||||
subpath: null,
|
||||
readonly: true,
|
||||
idmap: [],
|
||||
},
|
||||
})
|
||||
const remoteConfig = configFile
|
||||
.withPath(`/media/embassy/${depId}/config.json`)
|
||||
.read()
|
||||
.once()
|
||||
console.debug(remoteConfig)
|
||||
const configValue = specValue.multi
|
||||
? jp.query(remoteConfig, specValue.selector)
|
||||
@@ -1152,14 +1242,14 @@ async function updateConfig(
|
||||
const url: string =
|
||||
filled === null || filled.addressInfo === null
|
||||
? ""
|
||||
: catchFn(() =>
|
||||
utils.hostnameInfoToAddress(
|
||||
specValue.target === "lan-address"
|
||||
? filled.addressInfo!.localHostnames[0] ||
|
||||
filled.addressInfo!.onionHostnames[0]
|
||||
: filled.addressInfo!.onionHostnames[0] ||
|
||||
filled.addressInfo!.localHostnames[0],
|
||||
),
|
||||
: catchFn(
|
||||
() =>
|
||||
(specValue.target === "lan-address"
|
||||
? filled.addressInfo!.filter({ kind: "mdns" }) ||
|
||||
filled.addressInfo!.onion
|
||||
: filled.addressInfo!.onion ||
|
||||
filled.addressInfo!.filter({ kind: "mdns" })
|
||||
).hostnames[0].hostname.value,
|
||||
) || ""
|
||||
mutConfigValue[key] = url
|
||||
}
|
||||
|
||||
@@ -14,123 +14,113 @@ import {
|
||||
import { matchVolume } from "./matchVolume"
|
||||
import { matchDockerProcedure } from "../../../Models/DockerProcedure"
|
||||
|
||||
const matchJsProcedure = object(
|
||||
{
|
||||
type: literal("script"),
|
||||
args: array(unknown),
|
||||
},
|
||||
["args"],
|
||||
{
|
||||
args: [],
|
||||
},
|
||||
)
|
||||
const matchJsProcedure = object({
|
||||
type: literal("script"),
|
||||
args: array(unknown).nullable().optional().defaultTo([]),
|
||||
})
|
||||
|
||||
const matchProcedure = some(matchDockerProcedure, matchJsProcedure)
|
||||
export type Procedure = typeof matchProcedure._TYPE
|
||||
|
||||
const matchAction = object(
|
||||
{
|
||||
name: string,
|
||||
description: string,
|
||||
warning: string,
|
||||
implementation: matchProcedure,
|
||||
"allowed-statuses": array(literals("running", "stopped")),
|
||||
"input-spec": unknown,
|
||||
},
|
||||
["warning", "input-spec", "input-spec"],
|
||||
)
|
||||
export const matchManifest = object(
|
||||
{
|
||||
id: string,
|
||||
title: string,
|
||||
version: string,
|
||||
main: matchDockerProcedure,
|
||||
assets: object(
|
||||
{
|
||||
assets: string,
|
||||
scripts: string,
|
||||
},
|
||||
["assets", "scripts"],
|
||||
const matchAction = object({
|
||||
name: string,
|
||||
description: string,
|
||||
warning: string.nullable().optional(),
|
||||
implementation: matchProcedure,
|
||||
"allowed-statuses": array(literals("running", "stopped")),
|
||||
"input-spec": unknown.nullable().optional(),
|
||||
})
|
||||
export const matchManifest = object({
|
||||
id: string,
|
||||
title: string,
|
||||
version: string,
|
||||
main: matchDockerProcedure,
|
||||
assets: object({
|
||||
assets: string.nullable().optional(),
|
||||
scripts: string.nullable().optional(),
|
||||
})
|
||||
.nullable()
|
||||
.optional(),
|
||||
"health-checks": dictionary([
|
||||
string,
|
||||
every(
|
||||
matchProcedure,
|
||||
object({
|
||||
name: string,
|
||||
["success-message"]: string.nullable().optional(),
|
||||
}),
|
||||
),
|
||||
"health-checks": dictionary([
|
||||
string,
|
||||
every(
|
||||
matchProcedure,
|
||||
object(
|
||||
{
|
||||
name: string,
|
||||
["success-message"]: string,
|
||||
},
|
||||
["success-message"],
|
||||
),
|
||||
),
|
||||
]),
|
||||
config: object({
|
||||
get: matchProcedure,
|
||||
set: matchProcedure,
|
||||
]),
|
||||
config: object({
|
||||
get: matchProcedure,
|
||||
set: matchProcedure,
|
||||
})
|
||||
.nullable()
|
||||
.optional(),
|
||||
properties: matchProcedure.nullable().optional(),
|
||||
volumes: dictionary([string, matchVolume]),
|
||||
interfaces: dictionary([
|
||||
string,
|
||||
object({
|
||||
name: string,
|
||||
description: string,
|
||||
"tor-config": object({
|
||||
"port-mapping": dictionary([string, string]),
|
||||
})
|
||||
.nullable()
|
||||
.optional(),
|
||||
"lan-config": dictionary([
|
||||
string,
|
||||
object({
|
||||
ssl: boolean,
|
||||
internal: number,
|
||||
}),
|
||||
])
|
||||
.nullable()
|
||||
.optional(),
|
||||
ui: boolean,
|
||||
protocols: array(string),
|
||||
}),
|
||||
properties: matchProcedure,
|
||||
volumes: dictionary([string, matchVolume]),
|
||||
interfaces: dictionary([
|
||||
string,
|
||||
object(
|
||||
{
|
||||
name: string,
|
||||
description: string,
|
||||
"tor-config": object({
|
||||
"port-mapping": dictionary([string, string]),
|
||||
}),
|
||||
"lan-config": dictionary([
|
||||
string,
|
||||
object({
|
||||
ssl: boolean,
|
||||
internal: number,
|
||||
}),
|
||||
]),
|
||||
ui: boolean,
|
||||
protocols: array(string),
|
||||
},
|
||||
["lan-config", "tor-config"],
|
||||
]),
|
||||
backup: object({
|
||||
create: matchProcedure,
|
||||
restore: matchProcedure,
|
||||
}),
|
||||
migrations: object({
|
||||
to: dictionary([string, matchProcedure]),
|
||||
from: dictionary([string, matchProcedure]),
|
||||
})
|
||||
.nullable()
|
||||
.optional(),
|
||||
dependencies: dictionary([
|
||||
string,
|
||||
object({
|
||||
version: string,
|
||||
requirement: some(
|
||||
object({
|
||||
type: literal("opt-in"),
|
||||
how: string,
|
||||
}),
|
||||
object({
|
||||
type: literal("opt-out"),
|
||||
how: string,
|
||||
}),
|
||||
object({
|
||||
type: literal("required"),
|
||||
}),
|
||||
),
|
||||
]),
|
||||
backup: object({
|
||||
create: matchProcedure,
|
||||
restore: matchProcedure,
|
||||
}),
|
||||
migrations: object({
|
||||
to: dictionary([string, matchProcedure]),
|
||||
from: dictionary([string, matchProcedure]),
|
||||
}),
|
||||
dependencies: dictionary([
|
||||
string,
|
||||
object(
|
||||
{
|
||||
version: string,
|
||||
requirement: some(
|
||||
object({
|
||||
type: literal("opt-in"),
|
||||
how: string,
|
||||
}),
|
||||
object({
|
||||
type: literal("opt-out"),
|
||||
how: string,
|
||||
}),
|
||||
object({
|
||||
type: literal("required"),
|
||||
}),
|
||||
),
|
||||
description: string,
|
||||
config: object({
|
||||
check: matchProcedure,
|
||||
"auto-configure": matchProcedure,
|
||||
}),
|
||||
},
|
||||
["description", "config"],
|
||||
),
|
||||
]),
|
||||
description: string.nullable().optional(),
|
||||
config: object({
|
||||
check: matchProcedure,
|
||||
"auto-configure": matchProcedure,
|
||||
})
|
||||
.nullable()
|
||||
.optional(),
|
||||
})
|
||||
.nullable()
|
||||
.optional(),
|
||||
]),
|
||||
|
||||
actions: dictionary([string, matchAction]),
|
||||
},
|
||||
["config", "actions", "properties", "migrations", "dependencies"],
|
||||
)
|
||||
actions: dictionary([string, matchAction]),
|
||||
})
|
||||
export type Manifest = typeof matchManifest._TYPE
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
import { object, literal, string, boolean, some } from "ts-matches"
|
||||
|
||||
const matchDataVolume = object(
|
||||
{
|
||||
type: literal("data"),
|
||||
readonly: boolean,
|
||||
},
|
||||
["readonly"],
|
||||
)
|
||||
const matchDataVolume = object({
|
||||
type: literal("data"),
|
||||
readonly: boolean.optional(),
|
||||
})
|
||||
const matchAssetVolume = object({
|
||||
type: literal("assets"),
|
||||
})
|
||||
|
||||
@@ -135,12 +135,9 @@ export const polyfillEffects = (
|
||||
[input.command, ...(input.args || [])].join(" "),
|
||||
)
|
||||
const daemon = promiseSubcontainer.then((subcontainer) =>
|
||||
daemons.runCommand()(
|
||||
effects,
|
||||
subcontainer,
|
||||
[input.command, ...(input.args || [])],
|
||||
{},
|
||||
),
|
||||
daemons.runCommand()(effects, subcontainer, {
|
||||
command: [input.command, ...(input.args || [])],
|
||||
}),
|
||||
)
|
||||
return {
|
||||
wait: () =>
|
||||
@@ -169,12 +166,12 @@ export const polyfillEffects = (
|
||||
{ imageId: manifest.main.image },
|
||||
commands,
|
||||
{
|
||||
mounts: Mounts.of().addVolume(
|
||||
input.volumeId,
|
||||
null,
|
||||
"/drive",
|
||||
false,
|
||||
),
|
||||
mounts: Mounts.of().mountVolume({
|
||||
volumeId: input.volumeId,
|
||||
subpath: null,
|
||||
mountpoint: "/drive",
|
||||
readonly: false,
|
||||
}),
|
||||
},
|
||||
commands.join(" "),
|
||||
)
|
||||
@@ -206,12 +203,12 @@ export const polyfillEffects = (
|
||||
{ imageId: manifest.main.image },
|
||||
commands,
|
||||
{
|
||||
mounts: Mounts.of().addVolume(
|
||||
input.volumeId,
|
||||
null,
|
||||
"/drive",
|
||||
false,
|
||||
),
|
||||
mounts: Mounts.of().mountVolume({
|
||||
volumeId: input.volumeId,
|
||||
subpath: null,
|
||||
mountpoint: "/drive",
|
||||
readonly: false,
|
||||
}),
|
||||
},
|
||||
commands.join(" "),
|
||||
)
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
import { matchOldConfigSpec, transformConfigSpec } from "./transformConfigSpec"
|
||||
import fixtureEmbasyPagesConfig from "./__fixtures__/embasyPagesConfig"
|
||||
import {
|
||||
matchOldConfigSpec,
|
||||
matchOldValueSpecList,
|
||||
transformConfigSpec,
|
||||
} from "./transformConfigSpec"
|
||||
import fixtureEmbassyPagesConfig from "./__fixtures__/embassyPagesConfig"
|
||||
import fixtureRTLConfig from "./__fixtures__/rtlConfig"
|
||||
import searNXG from "./__fixtures__/searNXG"
|
||||
import bitcoind from "./__fixtures__/bitcoind"
|
||||
import nostr from "./__fixtures__/nostr"
|
||||
@@ -8,14 +13,25 @@ import nostrConfig2 from "./__fixtures__/nostrConfig2"
|
||||
describe("transformConfigSpec", () => {
|
||||
test("matchOldConfigSpec(embassyPages.homepage.variants[web-page])", () => {
|
||||
matchOldConfigSpec.unsafeCast(
|
||||
fixtureEmbasyPagesConfig.homepage.variants["web-page"],
|
||||
fixtureEmbassyPagesConfig.homepage.variants["web-page"],
|
||||
)
|
||||
})
|
||||
test("matchOldConfigSpec(embassyPages)", () => {
|
||||
matchOldConfigSpec.unsafeCast(fixtureEmbasyPagesConfig)
|
||||
matchOldConfigSpec.unsafeCast(fixtureEmbassyPagesConfig)
|
||||
})
|
||||
test("transformConfigSpec(embassyPages)", () => {
|
||||
const spec = matchOldConfigSpec.unsafeCast(fixtureEmbasyPagesConfig)
|
||||
const spec = matchOldConfigSpec.unsafeCast(fixtureEmbassyPagesConfig)
|
||||
expect(transformConfigSpec(spec)).toMatchSnapshot()
|
||||
})
|
||||
|
||||
test("matchOldConfigSpec(RTL.nodes)", () => {
|
||||
matchOldValueSpecList.unsafeCast(fixtureRTLConfig.nodes)
|
||||
})
|
||||
test("matchOldConfigSpec(RTL)", () => {
|
||||
matchOldConfigSpec.unsafeCast(fixtureRTLConfig)
|
||||
})
|
||||
test("transformConfigSpec(RTL)", () => {
|
||||
const spec = matchOldConfigSpec.unsafeCast(fixtureRTLConfig)
|
||||
expect(transformConfigSpec(spec)).toMatchSnapshot()
|
||||
})
|
||||
|
||||
|
||||
@@ -47,6 +47,7 @@ export function transformConfigSpec(oldSpec: OldConfigSpec): IST.InputSpec {
|
||||
immutable: false,
|
||||
}
|
||||
} else if (oldVal.type === "list") {
|
||||
if (isUnionList(oldVal)) return inputSpec
|
||||
newVal = getListSpec(oldVal)
|
||||
} else if (oldVal.type === "number") {
|
||||
const range = Range.from(oldVal.range)
|
||||
@@ -177,15 +178,17 @@ export function transformOldConfigToNew(
|
||||
}
|
||||
}
|
||||
|
||||
if (isList(val) && isObjectList(val)) {
|
||||
if (isList(val)) {
|
||||
if (!config[key]) return obj
|
||||
|
||||
newVal = (config[key] as object[]).map((obj) =>
|
||||
transformOldConfigToNew(
|
||||
matchOldConfigSpec.unsafeCast(val.spec.spec),
|
||||
obj,
|
||||
),
|
||||
)
|
||||
if (isObjectList(val)) {
|
||||
newVal = (config[key] as object[]).map((obj) =>
|
||||
transformOldConfigToNew(
|
||||
matchOldConfigSpec.unsafeCast(val.spec.spec),
|
||||
obj,
|
||||
),
|
||||
)
|
||||
} else if (isUnionList(val)) return obj
|
||||
}
|
||||
|
||||
if (isPointer(val)) {
|
||||
@@ -203,6 +206,7 @@ export function transformNewConfigToOld(
|
||||
spec: OldConfigSpec,
|
||||
config: Record<string, any>,
|
||||
): Record<string, any> {
|
||||
if (!config) return config
|
||||
return Object.entries(spec).reduce((obj, [key, val]) => {
|
||||
let newVal = config[key]
|
||||
|
||||
@@ -223,13 +227,15 @@ export function transformNewConfigToOld(
|
||||
}
|
||||
}
|
||||
|
||||
if (isList(val) && isObjectList(val)) {
|
||||
newVal = (config[key] as object[]).map((obj) =>
|
||||
transformNewConfigToOld(
|
||||
matchOldConfigSpec.unsafeCast(val.spec.spec),
|
||||
obj,
|
||||
),
|
||||
)
|
||||
if (isList(val)) {
|
||||
if (isObjectList(val)) {
|
||||
newVal = (config[key] as object[]).map((obj) =>
|
||||
transformNewConfigToOld(
|
||||
matchOldConfigSpec.unsafeCast(val.spec.spec),
|
||||
obj,
|
||||
),
|
||||
)
|
||||
} else if (isUnionList(val)) return obj
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -375,15 +381,17 @@ function isNumberList(
|
||||
): val is OldValueSpecList & { subtype: "number" } {
|
||||
return val.subtype === "number"
|
||||
}
|
||||
|
||||
function isObjectList(
|
||||
val: OldValueSpecList,
|
||||
): val is OldValueSpecList & { subtype: "object" } {
|
||||
if (["union"].includes(val.subtype)) {
|
||||
throw new Error("Invalid list subtype. enum, string, and object permitted.")
|
||||
}
|
||||
return val.subtype === "object"
|
||||
}
|
||||
function isUnionList(
|
||||
val: OldValueSpecList,
|
||||
): val is OldValueSpecList & { subtype: "union" } {
|
||||
return val.subtype === "union"
|
||||
}
|
||||
|
||||
export type OldConfigSpec = Record<string, OldValueSpec>
|
||||
const [_matchOldConfigSpec, setMatchOldConfigSpec] = deferred<unknown>()
|
||||
export const matchOldConfigSpec = _matchOldConfigSpec as Parser<
|
||||
@@ -396,100 +404,71 @@ export const matchOldDefaultString = anyOf(
|
||||
)
|
||||
type OldDefaultString = typeof matchOldDefaultString._TYPE
|
||||
|
||||
export const matchOldValueSpecString = object(
|
||||
{
|
||||
type: literals("string"),
|
||||
name: string,
|
||||
masked: boolean,
|
||||
copyable: boolean,
|
||||
nullable: boolean,
|
||||
placeholder: string,
|
||||
pattern: string,
|
||||
"pattern-description": string,
|
||||
default: matchOldDefaultString,
|
||||
textarea: boolean,
|
||||
description: string,
|
||||
warning: string,
|
||||
},
|
||||
[
|
||||
"masked",
|
||||
"copyable",
|
||||
"nullable",
|
||||
"placeholder",
|
||||
"pattern",
|
||||
"pattern-description",
|
||||
"default",
|
||||
"textarea",
|
||||
"description",
|
||||
"warning",
|
||||
],
|
||||
)
|
||||
export const matchOldValueSpecString = object({
|
||||
type: literals("string"),
|
||||
name: string,
|
||||
masked: boolean.nullable().optional(),
|
||||
copyable: boolean.nullable().optional(),
|
||||
nullable: boolean.nullable().optional(),
|
||||
placeholder: string.nullable().optional(),
|
||||
pattern: string.nullable().optional(),
|
||||
"pattern-description": string.nullable().optional(),
|
||||
default: matchOldDefaultString.nullable().optional(),
|
||||
textarea: boolean.nullable().optional(),
|
||||
description: string.nullable().optional(),
|
||||
warning: string.nullable().optional(),
|
||||
})
|
||||
|
||||
export const matchOldValueSpecNumber = object(
|
||||
{
|
||||
type: literals("number"),
|
||||
nullable: boolean,
|
||||
name: string,
|
||||
range: string,
|
||||
integral: boolean,
|
||||
default: number,
|
||||
description: string,
|
||||
warning: string,
|
||||
units: string,
|
||||
placeholder: anyOf(number, string),
|
||||
},
|
||||
["default", "description", "warning", "units", "placeholder"],
|
||||
)
|
||||
export const matchOldValueSpecNumber = object({
|
||||
type: literals("number"),
|
||||
nullable: boolean,
|
||||
name: string,
|
||||
range: string,
|
||||
integral: boolean,
|
||||
default: number.nullable().optional(),
|
||||
description: string.nullable().optional(),
|
||||
warning: string.nullable().optional(),
|
||||
units: string.nullable().optional(),
|
||||
placeholder: anyOf(number, string).nullable().optional(),
|
||||
})
|
||||
type OldValueSpecNumber = typeof matchOldValueSpecNumber._TYPE
|
||||
|
||||
export const matchOldValueSpecBoolean = object(
|
||||
{
|
||||
type: literals("boolean"),
|
||||
default: boolean,
|
||||
name: string,
|
||||
description: string,
|
||||
warning: string,
|
||||
},
|
||||
["description", "warning"],
|
||||
)
|
||||
export const matchOldValueSpecBoolean = object({
|
||||
type: literals("boolean"),
|
||||
default: boolean,
|
||||
name: string,
|
||||
description: string.nullable().optional(),
|
||||
warning: string.nullable().optional(),
|
||||
})
|
||||
type OldValueSpecBoolean = typeof matchOldValueSpecBoolean._TYPE
|
||||
|
||||
const matchOldValueSpecObject = object(
|
||||
{
|
||||
type: literals("object"),
|
||||
spec: _matchOldConfigSpec,
|
||||
name: string,
|
||||
description: string,
|
||||
warning: string,
|
||||
},
|
||||
["description", "warning"],
|
||||
)
|
||||
const matchOldValueSpecObject = object({
|
||||
type: literals("object"),
|
||||
spec: _matchOldConfigSpec,
|
||||
name: string,
|
||||
description: string.nullable().optional(),
|
||||
warning: string.nullable().optional(),
|
||||
})
|
||||
type OldValueSpecObject = typeof matchOldValueSpecObject._TYPE
|
||||
|
||||
const matchOldValueSpecEnum = object(
|
||||
{
|
||||
values: array(string),
|
||||
"value-names": dictionary([string, string]),
|
||||
type: literals("enum"),
|
||||
default: string,
|
||||
name: string,
|
||||
description: string,
|
||||
warning: string,
|
||||
},
|
||||
["description", "warning"],
|
||||
)
|
||||
const matchOldValueSpecEnum = object({
|
||||
values: array(string),
|
||||
"value-names": dictionary([string, string]),
|
||||
type: literals("enum"),
|
||||
default: string,
|
||||
name: string,
|
||||
description: string.nullable().optional(),
|
||||
warning: string.nullable().optional(),
|
||||
})
|
||||
type OldValueSpecEnum = typeof matchOldValueSpecEnum._TYPE
|
||||
|
||||
const matchOldUnionTagSpec = object(
|
||||
{
|
||||
id: string, // The name of the field containing one of the union variants
|
||||
"variant-names": dictionary([string, string]), // The name of each variant
|
||||
name: string,
|
||||
description: string,
|
||||
warning: string,
|
||||
},
|
||||
["description", "warning"],
|
||||
)
|
||||
const matchOldUnionTagSpec = object({
|
||||
id: string, // The name of the field containing one of the union variants
|
||||
"variant-names": dictionary([string, string]), // The name of each variant
|
||||
name: string,
|
||||
description: string.nullable().optional(),
|
||||
warning: string.nullable().optional(),
|
||||
})
|
||||
const matchOldValueSpecUnion = object({
|
||||
type: literals("union"),
|
||||
tag: matchOldUnionTagSpec,
|
||||
@@ -514,57 +493,51 @@ setOldUniqueBy(
|
||||
),
|
||||
)
|
||||
|
||||
const matchOldListValueSpecObject = object(
|
||||
{
|
||||
spec: _matchOldConfigSpec, // this is a mapped type of the config object at this level, replacing the object's values with specs on those values
|
||||
"unique-by": matchOldUniqueBy, // indicates whether duplicates can be permitted in the list
|
||||
"display-as": string, // this should be a handlebars template which can make use of the entire config which corresponds to 'spec'
|
||||
},
|
||||
["display-as", "unique-by"],
|
||||
)
|
||||
const matchOldListValueSpecString = object(
|
||||
{
|
||||
masked: boolean,
|
||||
copyable: boolean,
|
||||
pattern: string,
|
||||
"pattern-description": string,
|
||||
placeholder: string,
|
||||
},
|
||||
["pattern", "pattern-description", "placeholder", "copyable", "masked"],
|
||||
)
|
||||
const matchOldListValueSpecObject = object({
|
||||
spec: _matchOldConfigSpec, // this is a mapped type of the config object at this level, replacing the object's values with specs on those values
|
||||
"unique-by": matchOldUniqueBy.nullable().optional(), // indicates whether duplicates can be permitted in the list
|
||||
"display-as": string.nullable().optional(), // this should be a handlebars template which can make use of the entire config which corresponds to 'spec'
|
||||
})
|
||||
const matchOldListValueSpecUnion = object({
|
||||
"unique-by": matchOldUniqueBy.nullable().optional(),
|
||||
"display-as": string.nullable().optional(),
|
||||
tag: matchOldUnionTagSpec,
|
||||
variants: dictionary([string, _matchOldConfigSpec]),
|
||||
})
|
||||
const matchOldListValueSpecString = object({
|
||||
masked: boolean.nullable().optional(),
|
||||
copyable: boolean.nullable().optional(),
|
||||
pattern: string.nullable().optional(),
|
||||
"pattern-description": string.nullable().optional(),
|
||||
placeholder: string.nullable().optional(),
|
||||
})
|
||||
|
||||
const matchOldListValueSpecEnum = object({
|
||||
values: array(string),
|
||||
"value-names": dictionary([string, string]),
|
||||
})
|
||||
const matchOldListValueSpecNumber = object(
|
||||
{
|
||||
range: string,
|
||||
integral: boolean,
|
||||
units: string,
|
||||
placeholder: anyOf(number, string),
|
||||
},
|
||||
["units", "placeholder"],
|
||||
)
|
||||
const matchOldListValueSpecNumber = object({
|
||||
range: string,
|
||||
integral: boolean,
|
||||
units: string.nullable().optional(),
|
||||
placeholder: anyOf(number, string).nullable().optional(),
|
||||
})
|
||||
|
||||
// represents a spec for a list
|
||||
const matchOldValueSpecList = every(
|
||||
object(
|
||||
{
|
||||
type: literals("list"),
|
||||
range: string, // '[0,1]' (inclusive) OR '[0,*)' (right unbounded), normal math rules
|
||||
default: anyOf(
|
||||
array(string),
|
||||
array(number),
|
||||
array(matchOldDefaultString),
|
||||
array(object),
|
||||
),
|
||||
name: string,
|
||||
description: string,
|
||||
warning: string,
|
||||
},
|
||||
["description", "warning"],
|
||||
),
|
||||
export const matchOldValueSpecList = every(
|
||||
object({
|
||||
type: literals("list"),
|
||||
range: string, // '[0,1]' (inclusive) OR '[0,*)' (right unbounded), normal math rules
|
||||
default: anyOf(
|
||||
array(string),
|
||||
array(number),
|
||||
array(matchOldDefaultString),
|
||||
array(object),
|
||||
),
|
||||
name: string,
|
||||
description: string.nullable().optional(),
|
||||
warning: string.nullable().optional(),
|
||||
}),
|
||||
anyOf(
|
||||
object({
|
||||
subtype: literals("string"),
|
||||
@@ -582,6 +555,10 @@ const matchOldValueSpecList = every(
|
||||
subtype: literals("number"),
|
||||
spec: matchOldListValueSpecNumber,
|
||||
}),
|
||||
object({
|
||||
subtype: literals("union"),
|
||||
spec: matchOldListValueSpecUnion,
|
||||
}),
|
||||
),
|
||||
)
|
||||
type OldValueSpecList = typeof matchOldValueSpecList._TYPE
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { System } from "../../Interfaces/System"
|
||||
import { Effects } from "../../Models/Effects"
|
||||
import { T, utils } from "@start9labs/start-sdk"
|
||||
import { Optional } from "ts-matches/lib/parsers/interfaces"
|
||||
import { ExtendedVersion, T, utils, VersionRange } from "@start9labs/start-sdk"
|
||||
|
||||
export const STARTOS_JS_LOCATION = "/usr/lib/startos/package/index.js"
|
||||
|
||||
@@ -11,6 +10,7 @@ type RunningMain = {
|
||||
|
||||
export class SystemForStartOs implements System {
|
||||
private runningMain: RunningMain | undefined
|
||||
private starting: boolean = false
|
||||
|
||||
static of() {
|
||||
return new SystemForStartOs(require(STARTOS_JS_LOCATION))
|
||||
@@ -19,22 +19,23 @@ export class SystemForStartOs implements System {
|
||||
constructor(readonly abi: T.ABI) {
|
||||
this
|
||||
}
|
||||
async containerInit(effects: Effects): Promise<void> {
|
||||
return void (await this.abi.containerInit({ effects }))
|
||||
}
|
||||
async packageInit(
|
||||
|
||||
async init(
|
||||
effects: Effects,
|
||||
kind: "install" | "update" | "restore" | null,
|
||||
): Promise<void> {
|
||||
return void (await this.abi.init({ effects, kind }))
|
||||
}
|
||||
|
||||
async exit(
|
||||
effects: Effects,
|
||||
target: ExtendedVersion | VersionRange | null,
|
||||
timeoutMs: number | null = null,
|
||||
): Promise<void> {
|
||||
return void (await this.abi.packageInit({ effects }))
|
||||
}
|
||||
async packageUninit(
|
||||
effects: Effects,
|
||||
nextVersion: Optional<string> = null,
|
||||
timeoutMs: number | null = null,
|
||||
): Promise<void> {
|
||||
return void (await this.abi.packageUninit({ effects, nextVersion }))
|
||||
await this.stop()
|
||||
return void (await this.abi.uninit({ effects, target }))
|
||||
}
|
||||
|
||||
async createBackup(
|
||||
effects: T.Effects,
|
||||
timeoutMs: number | null,
|
||||
@@ -43,14 +44,6 @@ export class SystemForStartOs implements System {
|
||||
effects,
|
||||
}))
|
||||
}
|
||||
async restoreBackup(
|
||||
effects: T.Effects,
|
||||
timeoutMs: number | null,
|
||||
): Promise<void> {
|
||||
return void (await this.abi.restoreBackup({
|
||||
effects,
|
||||
}))
|
||||
}
|
||||
getActionInput(
|
||||
effects: Effects,
|
||||
id: string,
|
||||
@@ -71,28 +64,27 @@ export class SystemForStartOs implements System {
|
||||
return action.run({ effects, input })
|
||||
}
|
||||
|
||||
async exit(): Promise<void> {}
|
||||
|
||||
async start(effects: Effects): Promise<void> {
|
||||
if (this.runningMain) return
|
||||
effects.constRetry = utils.once(() => effects.restart())
|
||||
let mainOnTerm: () => Promise<void> | undefined
|
||||
const started = async (onTerm: () => Promise<void>) => {
|
||||
await effects.setMainStatus({ status: "running" })
|
||||
mainOnTerm = onTerm
|
||||
return null
|
||||
}
|
||||
const daemons = await (
|
||||
await this.abi.main({
|
||||
effects,
|
||||
started,
|
||||
try {
|
||||
if (this.runningMain || this.starting) return
|
||||
this.starting = true
|
||||
effects.constRetry = utils.once(() => {
|
||||
console.debug(".const() triggered")
|
||||
effects.restart()
|
||||
})
|
||||
).build()
|
||||
this.runningMain = {
|
||||
stop: async () => {
|
||||
if (mainOnTerm) await mainOnTerm()
|
||||
await daemons.term()
|
||||
},
|
||||
let mainOnTerm: () => Promise<void> | undefined
|
||||
const daemons = await (
|
||||
await this.abi.main({
|
||||
effects,
|
||||
})
|
||||
).build()
|
||||
this.runningMain = {
|
||||
stop: async () => {
|
||||
await daemons.term()
|
||||
},
|
||||
}
|
||||
} finally {
|
||||
this.starting = false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { types as T } from "@start9labs/start-sdk"
|
||||
import {
|
||||
ExtendedVersion,
|
||||
types as T,
|
||||
VersionRange,
|
||||
} from "@start9labs/start-sdk"
|
||||
import { Effects } from "../Models/Effects"
|
||||
import { CallbackHolder } from "../Models/CallbackHolder"
|
||||
import { Optional } from "ts-matches/lib/parsers/interfaces"
|
||||
|
||||
export type Procedure =
|
||||
| "/packageInit"
|
||||
| "/packageUninit"
|
||||
| "/backup/create"
|
||||
| "/backup/restore"
|
||||
| `/actions/${string}/getInput`
|
||||
| `/actions/${string}/run`
|
||||
|
||||
@@ -15,20 +15,15 @@ export type ExecuteResult =
|
||||
| { ok: unknown }
|
||||
| { err: { code: number; message: string } }
|
||||
export type System = {
|
||||
containerInit(effects: T.Effects): Promise<void>
|
||||
init(
|
||||
effects: T.Effects,
|
||||
kind: "install" | "update" | "restore" | null,
|
||||
): Promise<void>
|
||||
|
||||
start(effects: T.Effects): Promise<void>
|
||||
stop(): Promise<void>
|
||||
|
||||
packageInit(effects: Effects, timeoutMs: number | null): Promise<void>
|
||||
packageUninit(
|
||||
effects: Effects,
|
||||
nextVersion: Optional<string>,
|
||||
timeoutMs: number | null,
|
||||
): Promise<void>
|
||||
|
||||
createBackup(effects: T.Effects, timeoutMs: number | null): Promise<void>
|
||||
restoreBackup(effects: T.Effects, timeoutMs: number | null): Promise<void>
|
||||
runAction(
|
||||
effects: Effects,
|
||||
actionId: string,
|
||||
@@ -41,7 +36,10 @@ export type System = {
|
||||
timeoutMs: number | null,
|
||||
): Promise<T.ActionInput | null>
|
||||
|
||||
exit(): Promise<void>
|
||||
exit(
|
||||
effects: Effects,
|
||||
target: ExtendedVersion | VersionRange | null,
|
||||
): Promise<void>
|
||||
}
|
||||
|
||||
export type RunningMain = {
|
||||
|
||||
@@ -14,7 +14,8 @@ export class CallbackHolder {
|
||||
constructor(private effects?: T.Effects) {}
|
||||
|
||||
private callbacks = new Map<number, Function>()
|
||||
private children: WeakRef<CallbackHolder>[] = []
|
||||
private onLeaveContextCallbacks: Function[] = []
|
||||
private children: Map<string, CallbackHolder> = new Map()
|
||||
private newId() {
|
||||
return CallbackIdCell.inc++
|
||||
}
|
||||
@@ -32,23 +33,30 @@ export class CallbackHolder {
|
||||
})
|
||||
return id
|
||||
}
|
||||
child(): CallbackHolder {
|
||||
const child = new CallbackHolder()
|
||||
this.children.push(new WeakRef(child))
|
||||
child(name: string): CallbackHolder {
|
||||
this.removeChild(name)
|
||||
const child = new CallbackHolder(this.effects)
|
||||
this.children.set(name, child)
|
||||
return child
|
||||
}
|
||||
removeChild(child: CallbackHolder) {
|
||||
this.children = this.children.filter((c) => {
|
||||
const ref = c.deref()
|
||||
return ref && ref !== child
|
||||
})
|
||||
|
||||
getChild(name: string): CallbackHolder | null {
|
||||
return this.children.get(name) || null
|
||||
}
|
||||
|
||||
removeChild(name: string) {
|
||||
const child = this.children.get(name)
|
||||
if (child) {
|
||||
child.leaveContext()
|
||||
this.children.delete(name)
|
||||
}
|
||||
}
|
||||
private getCallback(index: number): Function | undefined {
|
||||
let callback = this.callbacks.get(index)
|
||||
if (callback) this.callbacks.delete(index)
|
||||
else {
|
||||
for (let i = 0; i < this.children.length; i++) {
|
||||
callback = this.children[i].deref()?.getCallback(index)
|
||||
for (let [_, child] of this.children) {
|
||||
callback = child.getCallback(index)
|
||||
if (callback) return callback
|
||||
}
|
||||
}
|
||||
@@ -57,6 +65,25 @@ export class CallbackHolder {
|
||||
callCallback(index: number, args: any[]): Promise<unknown> {
|
||||
const callback = this.getCallback(index)
|
||||
if (!callback) return Promise.resolve()
|
||||
return Promise.resolve().then(() => callback(...args))
|
||||
return Promise.resolve()
|
||||
.then(() => callback(...args))
|
||||
.catch((e) => console.error("callback failed", e))
|
||||
}
|
||||
onLeaveContext(fn: Function) {
|
||||
this.onLeaveContextCallbacks.push(fn)
|
||||
}
|
||||
leaveContext() {
|
||||
for (let [_, child] of this.children) {
|
||||
child.leaveContext()
|
||||
}
|
||||
this.children = new Map()
|
||||
for (let fn of this.onLeaveContextCallbacks) {
|
||||
try {
|
||||
fn()
|
||||
} catch (e) {
|
||||
console.warn(e)
|
||||
}
|
||||
}
|
||||
this.onLeaveContextCallbacks = []
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,31 +17,25 @@ const Path = string
|
||||
|
||||
export type VolumeId = string
|
||||
export type Path = string
|
||||
export const matchDockerProcedure = object(
|
||||
{
|
||||
type: literal("docker"),
|
||||
image: string,
|
||||
system: boolean,
|
||||
entrypoint: string,
|
||||
args: array(string),
|
||||
mounts: dictionary([VolumeId, Path]),
|
||||
"io-format": literals(
|
||||
"json",
|
||||
"json-pretty",
|
||||
"yaml",
|
||||
"cbor",
|
||||
"toml",
|
||||
"toml-pretty",
|
||||
),
|
||||
"sigterm-timeout": some(number, matchDuration),
|
||||
inject: boolean,
|
||||
},
|
||||
["io-format", "sigterm-timeout", "system", "args", "inject", "mounts"],
|
||||
{
|
||||
"sigterm-timeout": 30,
|
||||
inject: false,
|
||||
args: [],
|
||||
},
|
||||
)
|
||||
export const matchDockerProcedure = object({
|
||||
type: literal("docker"),
|
||||
image: string,
|
||||
system: boolean.optional(),
|
||||
entrypoint: string,
|
||||
args: array(string).defaultTo([]),
|
||||
mounts: dictionary([VolumeId, Path]).optional(),
|
||||
"io-format": literals(
|
||||
"json",
|
||||
"json-pretty",
|
||||
"yaml",
|
||||
"cbor",
|
||||
"toml",
|
||||
"toml-pretty",
|
||||
)
|
||||
.nullable()
|
||||
.optional(),
|
||||
"sigterm-timeout": some(number, matchDuration).onMismatch(30),
|
||||
inject: boolean.defaultTo(false),
|
||||
})
|
||||
|
||||
export type DockerProcedure = typeof matchDockerProcedure._TYPE
|
||||
|
||||
@@ -7,6 +7,12 @@ const getDependencies: AllGetDependencies = {
|
||||
system: getSystem,
|
||||
}
|
||||
|
||||
for (let s of ["SIGTERM", "SIGINT", "SIGHUP"]) {
|
||||
process.on(s, (s) => {
|
||||
console.log(`Caught ${s}`)
|
||||
})
|
||||
}
|
||||
|
||||
new RpcListener(getDependencies)
|
||||
|
||||
/**
|
||||
|
||||
21
container-runtime/update-image-local.sh
Executable file
21
container-runtime/update-image-local.sh
Executable file
@@ -0,0 +1,21 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")/.."
|
||||
|
||||
USE_TTY=
|
||||
if tty -s; then
|
||||
USE_TTY="-it"
|
||||
fi
|
||||
|
||||
DOCKER_PLATFORM=linux/${ARCH}
|
||||
case $ARCH in
|
||||
x86_64)
|
||||
DOCKER_PLATFORM=linux/amd64;;
|
||||
aarch64)
|
||||
DOCKER_PLATFORM=linux/arm64;;
|
||||
esac
|
||||
|
||||
docker run --rm $USE_TTY --platform=$DOCKER_PLATFORM -eARCH --privileged -v "$(pwd):/root/start-os" start9/build-env /root/start-os/container-runtime/update-image.sh
|
||||
if [ "$(ls -nd "rootfs.${ARCH}.squashfs" | awk '{ print $3 }')" != "$UID" ]; then
|
||||
docker run --rm $USE_TTY -v "$(pwd):/root/start-os" start9/build-env chown -R $UID:$UID /root/start-os/container-runtime
|
||||
fi
|
||||
@@ -4,48 +4,39 @@ cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
set -e
|
||||
|
||||
if mountpoint tmp/combined; then sudo umount -R tmp/combined; fi
|
||||
if mountpoint tmp/lower; then sudo umount tmp/lower; fi
|
||||
sudo rm -rf tmp
|
||||
mkdir -p tmp/lower tmp/upper tmp/work tmp/combined
|
||||
if which squashfuse > /dev/null; then
|
||||
sudo squashfuse debian.${ARCH}.squashfs tmp/lower
|
||||
else
|
||||
sudo mount debian.${ARCH}.squashfs tmp/lower
|
||||
fi
|
||||
sudo mount -t overlay -olowerdir=tmp/lower,upperdir=tmp/upper,workdir=tmp/work overlay tmp/combined
|
||||
|
||||
QEMU=
|
||||
if [ "$ARCH" != "$(uname -m)" ]; then
|
||||
QEMU=/usr/bin/qemu-${ARCH}-static
|
||||
if ! which qemu-$ARCH-static > /dev/null; then
|
||||
>&2 echo qemu-user-static is required for cross-platform builds
|
||||
sudo umount tmp/combined
|
||||
sudo umount tmp/lower
|
||||
sudo rm -rf tmp
|
||||
exit 1
|
||||
fi
|
||||
sudo cp $(which qemu-$ARCH-static) tmp/combined${QEMU}
|
||||
RUST_ARCH="$ARCH"
|
||||
if [ "$ARCH" = "riscv64" ]; then
|
||||
RUST_ARCH="riscv64gc"
|
||||
fi
|
||||
|
||||
sudo mkdir -p tmp/combined/usr/lib/startos/
|
||||
sudo rsync -a --copy-unsafe-links dist/ tmp/combined/usr/lib/startos/init/
|
||||
sudo chown -R 0:0 tmp/combined/usr/lib/startos/
|
||||
sudo cp container-runtime.service tmp/combined/lib/systemd/system/container-runtime.service
|
||||
sudo chown 0:0 tmp/combined/lib/systemd/system/container-runtime.service
|
||||
sudo cp ../core/target/$ARCH-unknown-linux-musl/release/containerbox tmp/combined/usr/bin/start-cli
|
||||
sudo chown 0:0 tmp/combined/usr/bin/start-cli
|
||||
echo container-runtime | sha256sum | head -c 32 | cat - <(echo) | sudo tee tmp/combined/etc/machine-id
|
||||
cat deb-install.sh | sudo systemd-nspawn --console=pipe -D tmp/combined $QEMU /bin/bash
|
||||
sudo truncate -s 0 tmp/combined/etc/machine-id
|
||||
mount -t tmpfs tmpfs /tmp
|
||||
mkdir -p /tmp/lower /tmp/upper /tmp/work /tmp/combined
|
||||
mount -o loop debian.${ARCH}.squashfs /tmp/lower
|
||||
mount -t overlay -olowerdir=/tmp/lower,upperdir=/tmp/upper,workdir=/tmp/work overlay /tmp/combined
|
||||
|
||||
if [ -n "$QEMU" ]; then
|
||||
sudo rm tmp/combined${QEMU}
|
||||
fi
|
||||
mkdir -p /tmp/combined/usr/lib/startos/
|
||||
rsync -a --copy-unsafe-links --info=progress2 dist/ /tmp/combined/usr/lib/startos/init/
|
||||
chown -R 0:0 /tmp/combined/usr/lib/startos/
|
||||
cp container-runtime.service /tmp/combined/lib/systemd/system/container-runtime.service
|
||||
chown 0:0 /tmp/combined/lib/systemd/system/container-runtime.service
|
||||
cp container-runtime-failure.service /tmp/combined/lib/systemd/system/container-runtime-failure.service
|
||||
chown 0:0 /tmp/combined/lib/systemd/system/container-runtime-failure.service
|
||||
cp ../core/target/${RUST_ARCH}-unknown-linux-musl/release/start-container /tmp/combined/usr/bin/start-container
|
||||
echo -e '#!/bin/bash\nexec start-container "$@"' > /tmp/combined/usr/bin/start-cli # TODO: remove
|
||||
chmod +x /tmp/combined/usr/bin/start-cli
|
||||
chown 0:0 /tmp/combined/usr/bin/start-container
|
||||
echo container-runtime | sha256sum | head -c 32 | cat - <(echo) > /tmp/combined/etc/machine-id
|
||||
rm -f /tmp/combined/etc/resolv.conf
|
||||
cp /etc/resolv.conf /tmp/combined/etc/resolv.conf
|
||||
for fs in proc sys dev; do
|
||||
mount --bind /$fs /tmp/combined/$fs
|
||||
done
|
||||
cat deb-install.sh | chroot /tmp/combined /bin/bash
|
||||
for fs in proc sys dev; do
|
||||
umount /tmp/combined/$fs
|
||||
done
|
||||
truncate -s 0 /tmp/combined/etc/machine-id
|
||||
|
||||
rm -f rootfs.${ARCH}.squashfs
|
||||
mkdir -p ../build/lib/container-runtime
|
||||
sudo mksquashfs tmp/combined rootfs.${ARCH}.squashfs
|
||||
sudo umount tmp/combined
|
||||
sudo umount tmp/lower
|
||||
sudo rm -rf tmp
|
||||
mksquashfs /tmp/combined rootfs.${ARCH}.squashfs
|
||||
2
core/.gitignore
vendored
2
core/.gitignore
vendored
@@ -8,4 +8,4 @@ secrets.db
|
||||
.env
|
||||
.editorconfig
|
||||
proptest-regressions/**/*
|
||||
/startos/bindings/*
|
||||
/bindings/*
|
||||
7496
core/Cargo.lock
generated
7496
core/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
293
core/Cargo.toml
293
core/Cargo.toml
@@ -1,3 +1,292 @@
|
||||
[workspace]
|
||||
[package]
|
||||
authors = ["Aiden McClelland <me@drbonez.dev>"]
|
||||
description = "The core of StartOS"
|
||||
documentation = "https://docs.rs/start-os"
|
||||
edition = "2024"
|
||||
keywords = [
|
||||
"bitcoin",
|
||||
"full-node",
|
||||
"lightning",
|
||||
"privacy",
|
||||
"raspberry-pi",
|
||||
"self-hosted",
|
||||
]
|
||||
license = "MIT"
|
||||
name = "start-os"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/Start9Labs/start-os"
|
||||
version = "0.4.0-alpha.19" # VERSION_BUMP
|
||||
|
||||
members = ["helpers", "models", "startos"]
|
||||
[lib]
|
||||
name = "startos"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "startbox"
|
||||
path = "src/main/startbox.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "start-cli"
|
||||
path = "src/main/start-cli.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "start-container"
|
||||
path = "src/main/start-container.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "registrybox"
|
||||
path = "src/main/registrybox.rs"
|
||||
|
||||
[[bin]]
|
||||
name = "tunnelbox"
|
||||
path = "src/main/tunnelbox.rs"
|
||||
|
||||
[features]
|
||||
arti = [
|
||||
"arti-client",
|
||||
"safelog",
|
||||
"tor-cell",
|
||||
"tor-hscrypto",
|
||||
"tor-hsservice",
|
||||
"tor-keymgr",
|
||||
"tor-llcrypto",
|
||||
"tor-proto",
|
||||
"tor-rtcompat",
|
||||
]
|
||||
beta = []
|
||||
console = ["console-subscriber", "tokio/tracing"]
|
||||
default = []
|
||||
dev = []
|
||||
test = []
|
||||
unstable = ["backtrace-on-stack-overflow"]
|
||||
|
||||
[dependencies]
|
||||
aes = { version = "0.7.5", features = ["ctr"] }
|
||||
arti-client = { version = "0.33", features = [
|
||||
"compression",
|
||||
"ephemeral-keystore",
|
||||
"experimental-api",
|
||||
"onion-service-client",
|
||||
"onion-service-service",
|
||||
"rustls",
|
||||
"static",
|
||||
"tokio",
|
||||
], default-features = false, git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true }
|
||||
async-acme = { version = "0.6.0", git = "https://github.com/dr-bonez/async-acme.git", features = [
|
||||
"use_rustls",
|
||||
"use_tokio",
|
||||
] }
|
||||
async-compression = { version = "0.4.32", features = [
|
||||
"brotli",
|
||||
"gzip",
|
||||
"tokio",
|
||||
"zstd",
|
||||
] }
|
||||
async-stream = "0.3.5"
|
||||
async-trait = "0.1.74"
|
||||
axum = { version = "0.8.4", features = ["ws", "http2"] }
|
||||
backtrace-on-stack-overflow = { version = "0.3.0", optional = true }
|
||||
base32 = "0.5.0"
|
||||
base64 = "0.22.1"
|
||||
base64ct = "1.6.0"
|
||||
basic-cookies = "0.1.4"
|
||||
blake3 = { version = "1.5.0", features = ["mmap", "rayon"] }
|
||||
bytes = "1"
|
||||
chrono = { version = "0.4.31", features = ["serde"] }
|
||||
clap = { version = "4.4.12", features = ["string"] }
|
||||
color-eyre = "0.6.2"
|
||||
console = "0.16.2"
|
||||
console-subscriber = { version = "0.5.0", optional = true }
|
||||
const_format = "0.2.34"
|
||||
cookie = "0.18.0"
|
||||
cookie_store = "0.22.0"
|
||||
curve25519-dalek = "4.1.3"
|
||||
der = { version = "0.7.9", features = ["derive", "pem"] }
|
||||
digest = "0.10.7"
|
||||
divrem = "1.0.0"
|
||||
dns-lookup = "3.0.1"
|
||||
ed25519 = { version = "2.2.3", features = ["alloc", "pem", "pkcs8"] }
|
||||
ed25519-dalek = { version = "2.2.0", features = [
|
||||
"digest",
|
||||
"hazmat",
|
||||
"pkcs8",
|
||||
"rand_core",
|
||||
"serde",
|
||||
"zeroize",
|
||||
] }
|
||||
ed25519-dalek-v1 = { package = "ed25519-dalek", version = "1" }
|
||||
exver = { version = "0.2.0", git = "https://github.com/Start9Labs/exver-rs.git", features = [
|
||||
"serde",
|
||||
] }
|
||||
fd-lock-rs = "0.1.4"
|
||||
form_urlencoded = "1.2.1"
|
||||
futures = "0.3.28"
|
||||
gpt = "4.1.0"
|
||||
hex = "0.4.3"
|
||||
hickory-server = { version = "0.25.2", features = ["resolver"] }
|
||||
hmac = "0.12.1"
|
||||
http = "1.0.0"
|
||||
http-body-util = "0.1"
|
||||
hyper = { version = "1.5", features = ["http1", "http2", "server"] }
|
||||
hyper-util = { version = "0.1.10", features = [
|
||||
"http1",
|
||||
"http2",
|
||||
"server",
|
||||
"server-auto",
|
||||
"server-graceful",
|
||||
"service",
|
||||
"tokio",
|
||||
] }
|
||||
id-pool = { version = "0.2.2", default-features = false, features = [
|
||||
"serde",
|
||||
"u16",
|
||||
] }
|
||||
iddqd = "0.3.14"
|
||||
imbl = { version = "6", features = ["serde", "small-chunks"] }
|
||||
imbl-value = { version = "0.4.3", features = ["ts-rs"] }
|
||||
include_dir = { version = "0.7.3", features = ["metadata"] }
|
||||
indexmap = { version = "2.0.2", features = ["serde"] }
|
||||
indicatif = { version = "0.18.3", features = ["tokio"] }
|
||||
inotify = "0.11.0"
|
||||
integer-encoding = { version = "4.0.0", features = ["tokio_async"] }
|
||||
ipnet = { version = "2.8.0", features = ["serde"] }
|
||||
isocountry = "0.3.2"
|
||||
itertools = "0.14.0"
|
||||
jaq-core = "0.10.1"
|
||||
jaq-std = "0.10.0"
|
||||
josekit = "0.10.3"
|
||||
jsonpath_lib = { git = "https://github.com/Start9Labs/jsonpath.git" }
|
||||
lazy_async_pool = "0.3.3"
|
||||
lazy_format = "2.0"
|
||||
lazy_static = "1.4.0"
|
||||
lettre = { version = "0.11.18", default-features = false, features = [
|
||||
"aws-lc-rs",
|
||||
"builder",
|
||||
"hostname",
|
||||
"pool",
|
||||
"rustls-platform-verifier",
|
||||
"smtp-transport",
|
||||
"tokio1-rustls",
|
||||
] }
|
||||
libc = "0.2.149"
|
||||
log = "0.4.20"
|
||||
mbrman = "0.6.0"
|
||||
miette = { version = "7.6.0", features = ["fancy"] }
|
||||
mio = "1"
|
||||
new_mime_guess = "4"
|
||||
nix = { version = "0.30.1", features = [
|
||||
"fs",
|
||||
"hostname",
|
||||
"mount",
|
||||
"net",
|
||||
"process",
|
||||
"sched",
|
||||
"signal",
|
||||
"user",
|
||||
] }
|
||||
nom = "8.0.0"
|
||||
num = "0.4.1"
|
||||
num_cpus = "1.16.0"
|
||||
num_enum = "0.7.0"
|
||||
once_cell = "1.19.0"
|
||||
openssh-keys = "0.6.2"
|
||||
openssl = { version = "0.10.57", features = ["vendored"] }
|
||||
p256 = { version = "0.13.2", features = ["pem"] }
|
||||
patch-db = { version = "*", path = "../patch-db/patch-db", features = [
|
||||
"trace",
|
||||
] }
|
||||
pbkdf2 = "0.12.2"
|
||||
pin-project = "1.1.3"
|
||||
pkcs8 = { version = "0.10.2", features = ["std"] }
|
||||
prettytable-rs = "0.10.0"
|
||||
proptest = "1.3.1"
|
||||
proptest-derive = "0.7.0"
|
||||
qrcode = "0.14.1"
|
||||
r3bl_tui = "0.7.6"
|
||||
rand = "0.9.2"
|
||||
regex = "1.10.2"
|
||||
reqwest = { version = "0.12.25", features = [
|
||||
"json",
|
||||
"socks",
|
||||
"stream",
|
||||
"http2",
|
||||
] }
|
||||
reqwest_cookie_store = "0.9.0"
|
||||
rpassword = "7.2.0"
|
||||
rust-argon2 = "3.0.0"
|
||||
rust-i18n = "3.1.5"
|
||||
rpc-toolkit = { git = "https://github.com/Start9Labs/rpc-toolkit.git" }
|
||||
safelog = { version = "0.4.8", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true }
|
||||
semver = { version = "1.0.20", features = ["serde"] }
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
serde_cbor = { package = "ciborium", version = "0.2.1" }
|
||||
serde_json = "1.0"
|
||||
serde_toml = { package = "toml", version = "0.9.9+spec-1.0.0" }
|
||||
serde_yaml = { package = "serde_yml", version = "0.0.12" }
|
||||
sha-crypt = "0.5.0"
|
||||
sha2 = "0.10.2"
|
||||
signal-hook = "0.3.17"
|
||||
socket2 = { version = "0.6.0", features = ["all"] }
|
||||
socks5-impl = { version = "0.7.2", features = ["client", "server"] }
|
||||
sqlx = { version = "0.8.6", features = [
|
||||
"postgres",
|
||||
"runtime-tokio-rustls",
|
||||
], default-features = false }
|
||||
sscanf = "0.4.1"
|
||||
ssh-key = { version = "0.6.2", features = ["ed25519"] }
|
||||
tar = "0.4.40"
|
||||
termion = "4.0.5"
|
||||
textwrap = "0.16.1"
|
||||
thiserror = "2.0.12"
|
||||
tokio = { version = "1.38.1", features = ["full"] }
|
||||
tokio-rustls = "0.26.4"
|
||||
tokio-stream = { version = "0.1.14", features = ["io-util", "net", "sync"] }
|
||||
tokio-tar = { git = "https://github.com/dr-bonez/tokio-tar.git" }
|
||||
tokio-tungstenite = { version = "0.26.2", features = ["native-tls", "url"] }
|
||||
tokio-util = { version = "0.7.9", features = ["io"] }
|
||||
tor-cell = { version = "0.33", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true }
|
||||
tor-hscrypto = { version = "0.33", features = [
|
||||
"full",
|
||||
], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true }
|
||||
tor-hsservice = { version = "0.33", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true }
|
||||
tor-keymgr = { version = "0.33", features = [
|
||||
"ephemeral-keystore",
|
||||
], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true }
|
||||
tor-llcrypto = { version = "0.33", features = [
|
||||
"full",
|
||||
], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true }
|
||||
tor-proto = { version = "0.33", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true }
|
||||
tor-rtcompat = { version = "0.33", features = [
|
||||
"rustls",
|
||||
"tokio",
|
||||
], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true }
|
||||
torut = "0.2.1"
|
||||
tower-service = "0.3.3"
|
||||
tracing = "0.1.39"
|
||||
tracing-error = "0.2.0"
|
||||
tracing-journald = "0.3.0"
|
||||
tracing-subscriber = { version = "=0.3.19", features = ["env-filter"] }
|
||||
ts-rs = "9.0.1"
|
||||
typed-builder = "0.23.2"
|
||||
url = { version = "2.4.1", features = ["serde"] }
|
||||
uuid = { version = "1.4.1", features = ["v4"] }
|
||||
visit-rs = "0.1.1"
|
||||
x25519-dalek = { version = "2.0.1", features = ["static_secrets"] }
|
||||
zbus = "5.1.1"
|
||||
hashing-serializer = "0.1.1"
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
procfs = "0.18.0"
|
||||
pty-process = "0.5.1"
|
||||
|
||||
[profile.test]
|
||||
opt-level = 3
|
||||
|
||||
[profile.dev]
|
||||
opt-level = 3
|
||||
|
||||
[profile.dev.package.backtrace]
|
||||
opt-level = 3
|
||||
|
||||
[profile.dev.package.sqlx-macros]
|
||||
opt-level = 3
|
||||
|
||||
2
core/Cross.toml
Normal file
2
core/Cross.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[build]
|
||||
pre-build = ["apt-get update && apt-get install -y rsync"]
|
||||
@@ -14,7 +14,7 @@
|
||||
## Artifacts
|
||||
|
||||
The StartOS backend is packed into a single binary `startbox` that is symlinked under
|
||||
several different names for different behaviour:
|
||||
several different names for different behavior:
|
||||
|
||||
- `startd`: This is the main daemon of StartOS
|
||||
- `start-cli`: This is a CLI tool that will allow you to issue commands to
|
||||
|
||||
@@ -1,54 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
set -ea
|
||||
shopt -s expand_aliases
|
||||
|
||||
if [ -z "$ARCH" ]; then
|
||||
ARCH=$(uname -m)
|
||||
fi
|
||||
if [ "$ARCH" = "arm64" ]; then
|
||||
ARCH="aarch64"
|
||||
fi
|
||||
|
||||
if [ -z "$KERNEL_NAME" ]; then
|
||||
KERNEL_NAME=$(uname -s)
|
||||
fi
|
||||
|
||||
if [ -z "$TARGET" ]; then
|
||||
if [ "$KERNEL_NAME" = "Linux" ]; then
|
||||
TARGET="$ARCH-unknown-linux-musl"
|
||||
elif [ "$KERNEL_NAME" = "Darwin" ]; then
|
||||
TARGET="$ARCH-apple-darwin"
|
||||
else
|
||||
>&2 echo "unknown kernel $KERNEL_NAME"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
USE_TTY=
|
||||
if tty -s; then
|
||||
USE_TTY="-it"
|
||||
fi
|
||||
|
||||
cd ..
|
||||
FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')"
|
||||
RUSTFLAGS=""
|
||||
|
||||
if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then
|
||||
RUSTFLAGS="--cfg tokio_unstable"
|
||||
fi
|
||||
|
||||
if which zig > /dev/null && [ "$ENFORCE_USE_DOCKER" != 1 ]; do
|
||||
echo "FEATURES=\"$FEATURES\""
|
||||
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
||||
RUSTFLAGS=$RUSTFLAGS sh -c "cd core && cargo zigbuild --release --no-default-features --features cli,$FEATURES --locked --bin start-cli --target=$TARGET"
|
||||
else
|
||||
alias 'rust-zig-builder'='docker run $USE_TTY --rm -e "RUSTFLAGS=$RUSTFLAGS" -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$HOME/.cargo/git":/root/.cargo/git -v "$(pwd)":/home/rust/src -w /home/rust/src -P messense/cargo-zigbuild'
|
||||
RUSTFLAGS=$RUSTFLAGS rust-zig-builder sh -c "cd core && cargo zigbuild --release --no-default-features --features cli,$FEATURES --locked --bin start-cli --target=$TARGET"
|
||||
|
||||
if [ "$(ls -nd core/target/$TARGET/release/start-cli | awk '{ print $3 }')" != "$UID" ]; then
|
||||
rust-zig-builder sh -c "cd core && chown -R $UID:$UID target && chown -R $UID:$UID /root/.cargo"
|
||||
fi
|
||||
fi
|
||||
@@ -1,36 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
set -ea
|
||||
shopt -s expand_aliases
|
||||
|
||||
if [ -z "$ARCH" ]; then
|
||||
ARCH=$(uname -m)
|
||||
fi
|
||||
|
||||
if [ "$ARCH" = "arm64" ]; then
|
||||
ARCH="aarch64"
|
||||
fi
|
||||
|
||||
USE_TTY=
|
||||
if tty -s; then
|
||||
USE_TTY="-it"
|
||||
fi
|
||||
|
||||
cd ..
|
||||
FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')"
|
||||
RUSTFLAGS=""
|
||||
|
||||
if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then
|
||||
RUSTFLAGS="--cfg tokio_unstable"
|
||||
fi
|
||||
|
||||
alias 'rust-musl-builder'='docker run $USE_TTY --rm -e "RUSTFLAGS=$RUSTFLAGS" -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$HOME/.cargo/git":/root/.cargo/git -v "$(pwd)":/home/rust/src -w /home/rust/src -P messense/rust-musl-cross:$ARCH-musl'
|
||||
|
||||
echo "FEATURES=\"$FEATURES\""
|
||||
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
||||
rust-musl-builder sh -c "cd core && cargo build --release --no-default-features --features container-runtime,$FEATURES --locked --bin containerbox --target=$ARCH-unknown-linux-musl"
|
||||
if [ "$(ls -nd core/target/$ARCH-unknown-linux-musl/release/containerbox | awk '{ print $3 }')" != "$UID" ]; then
|
||||
rust-musl-builder sh -c "cd core && chown -R $UID:$UID target && chown -R $UID:$UID /root/.cargo"
|
||||
fi
|
||||
@@ -1,36 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
set -ea
|
||||
shopt -s expand_aliases
|
||||
|
||||
if [ -z "$ARCH" ]; then
|
||||
ARCH=$(uname -m)
|
||||
fi
|
||||
|
||||
if [ "$ARCH" = "arm64" ]; then
|
||||
ARCH="aarch64"
|
||||
fi
|
||||
|
||||
USE_TTY=
|
||||
if tty -s; then
|
||||
USE_TTY="-it"
|
||||
fi
|
||||
|
||||
cd ..
|
||||
FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')"
|
||||
RUSTFLAGS=""
|
||||
|
||||
if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then
|
||||
RUSTFLAGS="--cfg tokio_unstable"
|
||||
fi
|
||||
|
||||
alias 'rust-musl-builder'='docker run $USE_TTY --rm -e "RUSTFLAGS=$RUSTFLAGS" -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$HOME/.cargo/git":/root/.cargo/git -v "$(pwd)":/home/rust/src -w /home/rust/src -P messense/rust-musl-cross:$ARCH-musl'
|
||||
|
||||
echo "FEATURES=\"$FEATURES\""
|
||||
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
||||
rust-musl-builder sh -c "cd core && cargo build --release --no-default-features --features cli,registry,$FEATURES --locked --bin registrybox --target=$ARCH-unknown-linux-musl"
|
||||
if [ "$(ls -nd core/target/$ARCH-unknown-linux-musl/release/registrybox | awk '{ print $3 }')" != "$UID" ]; then
|
||||
rust-musl-builder sh -c "cd core && chown -R $UID:$UID target && chown -R $UID:$UID /root/.cargo"
|
||||
fi
|
||||
@@ -1,36 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
set -ea
|
||||
shopt -s expand_aliases
|
||||
|
||||
if [ -z "$ARCH" ]; then
|
||||
ARCH=$(uname -m)
|
||||
fi
|
||||
|
||||
if [ "$ARCH" = "arm64" ]; then
|
||||
ARCH="aarch64"
|
||||
fi
|
||||
|
||||
USE_TTY=
|
||||
if tty -s; then
|
||||
USE_TTY="-it"
|
||||
fi
|
||||
|
||||
cd ..
|
||||
FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')"
|
||||
RUSTFLAGS=""
|
||||
|
||||
if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then
|
||||
RUSTFLAGS="--cfg tokio_unstable"
|
||||
fi
|
||||
|
||||
alias 'rust-musl-builder'='docker run $USE_TTY --rm -e "RUSTFLAGS=$RUSTFLAGS" -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$HOME/.cargo/git":/root/.cargo/git -v "$(pwd)":/home/rust/src -w /home/rust/src -P messense/rust-musl-cross:$ARCH-musl'
|
||||
|
||||
echo "FEATURES=\"$FEATURES\""
|
||||
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
||||
rust-musl-builder sh -c "cd core && cargo build --release --no-default-features --features cli,daemon,$FEATURES --locked --bin startbox --target=$ARCH-unknown-linux-musl"
|
||||
if [ "$(ls -nd core/target/$ARCH-unknown-linux-musl/release/startbox | awk '{ print $3 }')" != "$UID" ]; then
|
||||
rust-musl-builder sh -c "cd core && chown -R $UID:$UID target && chown -R $UID:$UID /root/.cargo"
|
||||
fi
|
||||
@@ -1,36 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
set -ea
|
||||
shopt -s expand_aliases
|
||||
|
||||
if [ -z "$ARCH" ]; then
|
||||
ARCH=$(uname -m)
|
||||
fi
|
||||
|
||||
if [ "$ARCH" = "arm64" ]; then
|
||||
ARCH="aarch64"
|
||||
fi
|
||||
|
||||
USE_TTY=
|
||||
if tty -s; then
|
||||
USE_TTY="-it"
|
||||
fi
|
||||
|
||||
cd ..
|
||||
FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')"
|
||||
RUSTFLAGS=""
|
||||
|
||||
if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then
|
||||
RUSTFLAGS="--cfg tokio_unstable"
|
||||
fi
|
||||
|
||||
alias 'rust-musl-builder'='docker run $USE_TTY --rm -e "RUSTFLAGS=$RUSTFLAGS" -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$HOME/.cargo/git":/root/.cargo/git -v "$(pwd)":/home/rust/src -w /home/rust/src -P messense/rust-musl-cross:$ARCH-musl'
|
||||
|
||||
echo "FEATURES=\"$FEATURES\""
|
||||
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
||||
rust-musl-builder sh -c "cd core && cargo test --release --features=test,$FEATURES 'export_bindings_' && chown \$UID:\$UID startos/bindings"
|
||||
if [ "$(ls -nd core/startos/bindings | awk '{ print $3 }')" != "$UID" ]; then
|
||||
rust-musl-builder sh -c "cd core && chown -R $UID:$UID startos/bindings && chown -R $UID:$UID target && chown -R $UID:$UID /root/.cargo"
|
||||
fi
|
||||
79
core/build/build-cli.sh
Executable file
79
core/build/build-cli.sh
Executable file
@@ -0,0 +1,79 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
source ./builder-alias.sh
|
||||
|
||||
set -ea
|
||||
|
||||
INSTALL=false
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case $1 in
|
||||
--install)
|
||||
INSTALL=true
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
>&2 echo "Unknown option: $1"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
shopt -s expand_aliases
|
||||
|
||||
PROFILE=${PROFILE:-release}
|
||||
if [ "${PROFILE}" = "release" ]; then
|
||||
BUILD_FLAGS="--release"
|
||||
else
|
||||
if [ "$PROFILE" != "debug" ]; then
|
||||
>&2 echo "Unknown profile $PROFILE: falling back to debug..."
|
||||
PROFILE=debug
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "${ARCH:-}" ]; then
|
||||
ARCH=$(uname -m)
|
||||
fi
|
||||
|
||||
if [ "$ARCH" = "arm64" ]; then
|
||||
ARCH="aarch64"
|
||||
fi
|
||||
|
||||
RUST_ARCH="$ARCH"
|
||||
if [ "$ARCH" = "riscv64" ]; then
|
||||
RUST_ARCH="riscv64gc"
|
||||
fi
|
||||
|
||||
if [ -z "${KERNEL_NAME:-}" ]; then
|
||||
KERNEL_NAME=$(uname -s)
|
||||
fi
|
||||
|
||||
if [ -z "${TARGET:-}" ]; then
|
||||
if [ "$KERNEL_NAME" = "Linux" ]; then
|
||||
TARGET="$RUST_ARCH-unknown-linux-musl"
|
||||
elif [ "$KERNEL_NAME" = "Darwin" ]; then
|
||||
TARGET="$RUST_ARCH-apple-darwin"
|
||||
else
|
||||
>&2 echo "unknown kernel $KERNEL_NAME"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
cd ../..
|
||||
FEATURES="$(echo "${ENVIRONMENT:-}" | sed 's/-/,/g')"
|
||||
RUSTFLAGS=""
|
||||
if [[ "${ENVIRONMENT:-}" =~ (^|-)console($|-) ]]; then
|
||||
RUSTFLAGS="--cfg tokio_unstable"
|
||||
fi
|
||||
|
||||
echo "FEATURES=\"$FEATURES\""
|
||||
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
||||
rust-zig-builder cargo zigbuild --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=$FEATURES --locked --bin start-cli --target=$TARGET
|
||||
if [ "$(ls -nd "core/target/$TARGET/$PROFILE/start-cli" | awk '{ print $3 }')" != "$UID" ]; then
|
||||
rust-zig-builder sh -c "cd core && chown -R $UID:$UID target && chown -R $UID:$UID /usr/local/cargo"
|
||||
fi
|
||||
|
||||
if [ "$INSTALL" = "true" ]; then
|
||||
cp "core/target/$TARGET/$PROFILE/start-cli" ~/.cargo/bin/start-cli
|
||||
fi
|
||||
46
core/build/build-registrybox.sh
Executable file
46
core/build/build-registrybox.sh
Executable file
@@ -0,0 +1,46 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
source ./builder-alias.sh
|
||||
|
||||
set -ea
|
||||
shopt -s expand_aliases
|
||||
|
||||
PROFILE=${PROFILE:-release}
|
||||
if [ "${PROFILE}" = "release" ]; then
|
||||
BUILD_FLAGS="--release"
|
||||
else
|
||||
if [ "$PROFILE" != "debug"]; then
|
||||
>&2 echo "Unknown profile $PROFILE: falling back to debug..."
|
||||
PROFILE=debug
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$ARCH" ]; then
|
||||
ARCH=$(uname -m)
|
||||
fi
|
||||
|
||||
if [ "$ARCH" = "arm64" ]; then
|
||||
ARCH="aarch64"
|
||||
fi
|
||||
|
||||
RUST_ARCH="$ARCH"
|
||||
if [ "$ARCH" = "riscv64" ]; then
|
||||
RUST_ARCH="riscv64gc"
|
||||
fi
|
||||
|
||||
cd ../..
|
||||
FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')"
|
||||
RUSTFLAGS=""
|
||||
|
||||
if [[ "${ENVIRONMENT}" =~ (^|-)console($|-) ]]; then
|
||||
RUSTFLAGS="--cfg tokio_unstable"
|
||||
fi
|
||||
|
||||
echo "FEATURES=\"$FEATURES\""
|
||||
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
||||
rust-zig-builder cargo zigbuild --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=$FEATURES --locked --bin registrybox --target=$RUST_ARCH-unknown-linux-musl
|
||||
if [ "$(ls -nd "core/target/$RUST_ARCH-unknown-linux-musl/$PROFILE/registrybox" | awk '{ print $3 }')" != "$UID" ]; then
|
||||
rust-zig-builder sh -c "chown -R $UID:$UID core/target && chown -R $UID:$UID /usr/local/cargo"
|
||||
fi
|
||||
46
core/build/build-start-container.sh
Executable file
46
core/build/build-start-container.sh
Executable file
@@ -0,0 +1,46 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
source ./builder-alias.sh
|
||||
|
||||
set -ea
|
||||
shopt -s expand_aliases
|
||||
|
||||
PROFILE=${PROFILE:-release}
|
||||
if [ "${PROFILE}" = "release" ]; then
|
||||
BUILD_FLAGS="--release"
|
||||
else
|
||||
if [ "$PROFILE" != "debug"]; then
|
||||
>&2 echo "Unknown profile $PROFILE: falling back to debug..."
|
||||
PROFILE=debug
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$ARCH" ]; then
|
||||
ARCH=$(uname -m)
|
||||
fi
|
||||
|
||||
if [ "$ARCH" = "arm64" ]; then
|
||||
ARCH="aarch64"
|
||||
fi
|
||||
|
||||
RUST_ARCH="$ARCH"
|
||||
if [ "$ARCH" = "riscv64" ]; then
|
||||
RUST_ARCH="riscv64gc"
|
||||
fi
|
||||
|
||||
cd ../..
|
||||
FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')"
|
||||
RUSTFLAGS=""
|
||||
|
||||
if [[ "${ENVIRONMENT}" =~ (^|-)console($|-) ]]; then
|
||||
RUSTFLAGS="--cfg tokio_unstable"
|
||||
fi
|
||||
|
||||
echo "FEATURES=\"$FEATURES\""
|
||||
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
||||
rust-zig-builder cargo zigbuild --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=$FEATURES --locked --bin start-container --target=$RUST_ARCH-unknown-linux-musl
|
||||
if [ "$(ls -nd "core/target/$RUST_ARCH-unknown-linux-musl/$PROFILE/start-container" | awk '{ print $3 }')" != "$UID" ]; then
|
||||
rust-zig-builder sh -c "chown -R $UID:$UID core/target && chown -R $UID:$UID /usr/local/cargo"
|
||||
fi
|
||||
46
core/build/build-startbox.sh
Executable file
46
core/build/build-startbox.sh
Executable file
@@ -0,0 +1,46 @@
|
||||
#!/bin/bash
|
||||
|
||||
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||
|
||||
source ./builder-alias.sh
|
||||
|
||||
set -ea
|
||||
shopt -s expand_aliases
|
||||
|
||||
PROFILE=${PROFILE:-release}
|
||||
if [ "${PROFILE}" = "release" ]; then
|
||||
BUILD_FLAGS="--release"
|
||||
else
|
||||
if [ "$PROFILE" != "debug"]; then
|
||||
>&2 echo "Unknown profile $PROFILE: falling back to debug..."
|
||||
PROFILE=debug
|
||||
fi
|
||||
fi
|
||||
|
||||
if [ -z "$ARCH" ]; then
|
||||
ARCH=$(uname -m)
|
||||
fi
|
||||
|
||||
if [ "$ARCH" = "arm64" ]; then
|
||||
ARCH="aarch64"
|
||||
fi
|
||||
|
||||
RUST_ARCH="$ARCH"
|
||||
if [ "$ARCH" = "riscv64" ]; then
|
||||
RUST_ARCH="riscv64gc"
|
||||
fi
|
||||
|
||||
cd ../..
|
||||
FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')"
|
||||
RUSTFLAGS=""
|
||||
|
||||
if [[ "${ENVIRONMENT}" =~ (^|-)console($|-) ]]; then
|
||||
RUSTFLAGS="--cfg tokio_unstable"
|
||||
fi
|
||||
|
||||
echo "FEATURES=\"$FEATURES\""
|
||||
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
||||
rust-zig-builder cargo zigbuild --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=$FEATURES --locked --bin startbox --target=$RUST_ARCH-unknown-linux-musl
|
||||
if [ "$(ls -nd "core/target/$RUST_ARCH-unknown-linux-musl/$PROFILE/startbox" | awk '{ print $3 }')" != "$UID" ]; then
|
||||
rust-zig-builder sh -c "chown -R $UID:$UID core/target && chown -R $UID:$UID /usr/local/cargo"
|
||||
fi
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user