Compare commits
184 Commits
feature/nv
...
next/major
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9bc0fbd5b1 | ||
|
|
b7f7202e25 | ||
|
|
0719c227ee | ||
|
|
621da47990 | ||
|
|
9fa81a0c9d | ||
|
|
2dac2bb2b3 | ||
|
|
58f1dc5025 | ||
|
|
cc89023bbd | ||
|
|
7e35ad57e7 | ||
|
|
010e439d1d | ||
|
|
cdbb512cca | ||
|
|
bb2e69299e | ||
|
|
fd0dc9a5b8 | ||
|
|
e2e88f774e | ||
|
|
4bebcafdde | ||
|
|
2bb1463f4f | ||
|
|
f20ece44a1 | ||
|
|
9fddcb957f | ||
|
|
fd502cfb99 | ||
|
|
ee95eef395 | ||
|
|
aaa43ce6af | ||
|
|
e0f27281d1 | ||
|
|
ecc4703ae7 | ||
|
|
d478911311 | ||
|
|
23fe6fb663 | ||
|
|
186925065d | ||
|
|
53dff95365 | ||
|
|
7f6abf2a80 | ||
|
|
19fa1cb4e3 | ||
|
|
521f61c647 | ||
|
|
3d45234aae | ||
|
|
f60a1a9ed0 | ||
|
|
2aa910a3e8 | ||
|
|
8d1e11e158 | ||
|
|
b7e4df44bf | ||
|
|
25aa140174 | ||
|
|
7ffb462355 | ||
|
|
6ed0afc75f | ||
|
|
cb7618cb34 | ||
|
|
456c5d6725 | ||
|
|
bdfa918a33 | ||
|
|
8b65490d0e | ||
|
|
c9a93f0a33 | ||
|
|
f5bfbe0465 | ||
|
|
8bccffcb5c | ||
|
|
9ff65497a8 | ||
|
|
7335e52ab3 | ||
|
|
b54f10af55 | ||
|
|
0549c7c0ef | ||
|
|
2a8d8c7154 | ||
|
|
b9f2446cee | ||
|
|
03d7d5f123 | ||
|
|
2fd674eca8 | ||
|
|
0e9c90f2c0 | ||
|
|
bca2e4d630 | ||
|
|
f41fc75024 | ||
|
|
56cb3861bc | ||
|
|
2999d22d2a | ||
|
|
bb745c43cc | ||
|
|
de9a7e4189 | ||
|
|
8fbcf44dec | ||
|
|
97b3b548c0 | ||
|
|
6c72a22178 | ||
|
|
d7c394ef33 | ||
|
|
96dcd126db | ||
|
|
e4b0f56fa7 | ||
|
|
3ef99eca87 | ||
|
|
f64c543747 | ||
|
|
292a914307 | ||
|
|
9a58568053 | ||
|
|
34e01d4223 | ||
|
|
427c38f23b | ||
|
|
d669aa9afb | ||
|
|
bcdeabfe85 | ||
|
|
b1b7d2fa70 | ||
|
|
476b10c413 | ||
|
|
6e56682c11 | ||
|
|
9ed6c1263c | ||
|
|
5cf70dc8f5 | ||
|
|
1358937fa9 | ||
|
|
0e9d4f5d53 | ||
|
|
59550d6f5e | ||
|
|
68c3d87c5e | ||
|
|
24c1f47886 | ||
|
|
1b9fcaad2b | ||
|
|
900d86ab83 | ||
|
|
c1a328e5ca | ||
|
|
2903b949ea | ||
|
|
8ac8dae6fd | ||
|
|
0e8dd82910 | ||
|
|
873922d9e3 | ||
|
|
c9ce2c57b3 | ||
|
|
6c9cbebe9c | ||
|
|
dd9837b9b2 | ||
|
|
7313693a9e | ||
|
|
66a606c14e | ||
|
|
7352602f58 | ||
|
|
4ab51c4570 | ||
|
|
1c1ae11241 | ||
|
|
cc6a134a32 | ||
|
|
3ae24e63e2 | ||
|
|
8562e1e19d | ||
|
|
90d8d39adf | ||
|
|
9f7bc74a1e | ||
|
|
65e1c9e5d8 | ||
|
|
5a6b2a5588 | ||
|
|
e86b06c2cd | ||
|
|
7b8bb92d60 | ||
|
|
ebb7916ecd | ||
|
|
b5ac0b5200 | ||
|
|
a90b96cddd | ||
|
|
d1b80cffb8 | ||
|
|
ae5fe88a40 | ||
|
|
fc4b887b71 | ||
|
|
a81b1aa5a6 | ||
|
|
d8663cd3ae | ||
|
|
9f36bc5b5d | ||
|
|
e2804f9b88 | ||
|
|
3cf9dbc6d2 | ||
|
|
0fa069126b | ||
|
|
50004da782 | ||
|
|
517bf80fc8 | ||
|
|
6091314981 | ||
|
|
c485edfa12 | ||
|
|
fd54e9ca91 | ||
|
|
d1444b1175 | ||
|
|
3024db2654 | ||
|
|
dba1cb93c1 | ||
|
|
d12b278a84 | ||
|
|
0070a8e692 | ||
|
|
efc12691bd | ||
|
|
effcec7e2e | ||
|
|
10a5bc0280 | ||
|
|
90b73dd320 | ||
|
|
324f9d17cd | ||
|
|
a782cb270b | ||
|
|
c59c619e12 | ||
|
|
00eecf3704 | ||
|
|
b67e554e76 | ||
|
|
36b8fda6db | ||
|
|
d2f12a7efc | ||
|
|
8dd50eb9c0 | ||
|
|
73c6696873 | ||
|
|
2586f841b8 | ||
|
|
ccf6fa34b1 | ||
|
|
9546fc9ce0 | ||
|
|
3441d4d6d6 | ||
|
|
7b05a7c585 | ||
|
|
76de6be7de | ||
|
|
c52fcf5087 | ||
|
|
be921b7865 | ||
|
|
43e514f9ee | ||
|
|
8ef4ef4895 | ||
|
|
36bf55c133 | ||
|
|
f56262b845 | ||
|
|
5316d6ea68 | ||
|
|
ea8a7c0a57 | ||
|
|
68ae365897 | ||
|
|
ba71f205dd | ||
|
|
95a519cbe8 | ||
|
|
efd90d3bdf | ||
|
|
a4bae73592 | ||
|
|
8b89f016ad | ||
|
|
3320391fcc | ||
|
|
26a68afdef | ||
|
|
0260c1532d | ||
|
|
b6262c8e13 | ||
|
|
ba740a9ee2 | ||
|
|
f2142f0bb3 | ||
|
|
86ca23c093 | ||
|
|
463b6ca4ef | ||
|
|
58e0b166cb | ||
|
|
2a678bb017 | ||
|
|
5664456b77 | ||
|
|
3685b7e57e | ||
|
|
989d5f73b1 | ||
|
|
4f84073cb5 | ||
|
|
c190295c34 | ||
|
|
60875644a1 | ||
|
|
113b09ad01 | ||
|
|
2605d0e671 | ||
|
|
d232b91d31 | ||
|
|
c65db31fd9 | ||
|
|
99871805bd |
1
.claude/settings.json
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{}
|
||||||
81
.github/actions/setup-build/action.yml
vendored
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
name: Setup Build Environment
|
||||||
|
description: Common build environment setup steps
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
nodejs-version:
|
||||||
|
description: Node.js version
|
||||||
|
required: true
|
||||||
|
setup-python:
|
||||||
|
description: Set up Python
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
|
setup-docker:
|
||||||
|
description: Set up Docker QEMU and Buildx
|
||||||
|
required: false
|
||||||
|
default: "true"
|
||||||
|
setup-sccache:
|
||||||
|
description: Configure sccache for GitHub Actions
|
||||||
|
required: false
|
||||||
|
default: "true"
|
||||||
|
free-space:
|
||||||
|
description: Remove unnecessary packages to free disk space
|
||||||
|
required: false
|
||||||
|
default: "true"
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: composite
|
||||||
|
steps:
|
||||||
|
- name: Free disk space
|
||||||
|
if: inputs.free-space == 'true'
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
sudo apt-get remove --purge -y azure-cli || true
|
||||||
|
sudo apt-get remove --purge -y firefox || true
|
||||||
|
sudo apt-get remove --purge -y ghc-* || true
|
||||||
|
sudo apt-get remove --purge -y google-cloud-sdk || true
|
||||||
|
sudo apt-get remove --purge -y google-chrome-stable || true
|
||||||
|
sudo apt-get remove --purge -y powershell || true
|
||||||
|
sudo apt-get remove --purge -y php* || true
|
||||||
|
sudo apt-get remove --purge -y ruby* || true
|
||||||
|
sudo apt-get remove --purge -y mono-* || true
|
||||||
|
sudo apt-get autoremove -y
|
||||||
|
sudo apt-get clean
|
||||||
|
sudo rm -rf /usr/lib/jvm
|
||||||
|
sudo rm -rf /usr/local/.ghcup
|
||||||
|
sudo rm -rf /usr/local/lib/android
|
||||||
|
sudo rm -rf /usr/share/dotnet
|
||||||
|
sudo rm -rf /usr/share/swift
|
||||||
|
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
|
||||||
|
|
||||||
|
# BuildJet runners lack /opt/hostedtoolcache, which setup-python and setup-qemu expect
|
||||||
|
- name: Ensure hostedtoolcache exists
|
||||||
|
shell: bash
|
||||||
|
run: sudo mkdir -p /opt/hostedtoolcache && sudo chown $USER:$USER /opt/hostedtoolcache
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
if: inputs.setup-python == 'true'
|
||||||
|
uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: "3.x"
|
||||||
|
|
||||||
|
- uses: actions/setup-node@v6
|
||||||
|
with:
|
||||||
|
node-version: ${{ inputs.nodejs-version }}
|
||||||
|
cache: npm
|
||||||
|
cache-dependency-path: "**/package-lock.json"
|
||||||
|
|
||||||
|
- name: Set up Docker QEMU
|
||||||
|
if: inputs.setup-docker == 'true'
|
||||||
|
uses: docker/setup-qemu-action@v4
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
if: inputs.setup-docker == 'true'
|
||||||
|
uses: docker/setup-buildx-action@v4
|
||||||
|
|
||||||
|
- name: Configure sccache
|
||||||
|
if: inputs.setup-sccache == 'true'
|
||||||
|
uses: actions/github-script@v8
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
|
||||||
|
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
|
||||||
52
.github/workflows/start-cli.yaml
vendored
@@ -37,6 +37,10 @@ on:
|
|||||||
- master
|
- master
|
||||||
- next/*
|
- next/*
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODEJS_VERSION: "24.11.0"
|
NODEJS_VERSION: "24.11.0"
|
||||||
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
|
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
|
||||||
@@ -44,6 +48,7 @@ env:
|
|||||||
jobs:
|
jobs:
|
||||||
compile:
|
compile:
|
||||||
name: Build Debian Package
|
name: Build Debian Package
|
||||||
|
if: github.event.pull_request.draft != true
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
@@ -60,50 +65,15 @@ jobs:
|
|||||||
}}
|
}}
|
||||||
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
|
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
|
||||||
steps:
|
steps:
|
||||||
- name: Cleaning up unnecessary files
|
- name: Mount tmpfs
|
||||||
run: |
|
|
||||||
sudo apt-get remove --purge -y mono-* \
|
|
||||||
ghc* cabal-install* \
|
|
||||||
dotnet* \
|
|
||||||
php* \
|
|
||||||
ruby* \
|
|
||||||
mysql-* \
|
|
||||||
postgresql-* \
|
|
||||||
azure-cli \
|
|
||||||
powershell \
|
|
||||||
google-cloud-sdk \
|
|
||||||
msodbcsql* mssql-tools* \
|
|
||||||
imagemagick* \
|
|
||||||
libgl1-mesa-dri \
|
|
||||||
google-chrome-stable \
|
|
||||||
firefox
|
|
||||||
sudo apt-get autoremove -y
|
|
||||||
sudo apt-get clean
|
|
||||||
|
|
||||||
- run: |
|
|
||||||
sudo mount -t tmpfs tmpfs .
|
|
||||||
if: ${{ github.event.inputs.runner == 'fast' }}
|
if: ${{ github.event.inputs.runner == 'fast' }}
|
||||||
|
run: sudo mount -t tmpfs tmpfs .
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
- uses: ./.github/actions/setup-build
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
with:
|
||||||
node-version: ${{ env.NODEJS_VERSION }}
|
nodejs-version: ${{ env.NODEJS_VERSION }}
|
||||||
|
|
||||||
- name: Set up docker QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- name: Configure sccache
|
|
||||||
uses: actions/github-script@v7
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
|
|
||||||
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
|
|
||||||
|
|
||||||
- name: Make
|
- name: Make
|
||||||
run: TARGET=${{ matrix.triple }} make cli
|
run: TARGET=${{ matrix.triple }} make cli
|
||||||
@@ -112,7 +82,7 @@ jobs:
|
|||||||
SCCACHE_GHA_ENABLED: on
|
SCCACHE_GHA_ENABLED: on
|
||||||
SCCACHE_GHA_VERSION: 0
|
SCCACHE_GHA_VERSION: 0
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: start-cli_${{ matrix.triple }}
|
name: start-cli_${{ matrix.triple }}
|
||||||
path: core/target/${{ matrix.triple }}/release/start-cli
|
path: core/target/${{ matrix.triple }}/release/start-cli
|
||||||
|
|||||||
66
.github/workflows/start-registry.yaml
vendored
@@ -1,4 +1,4 @@
|
|||||||
name: Start-Registry
|
name: start-registry
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
@@ -35,6 +35,10 @@ on:
|
|||||||
- master
|
- master
|
||||||
- next/*
|
- next/*
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODEJS_VERSION: "24.11.0"
|
NODEJS_VERSION: "24.11.0"
|
||||||
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
|
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
|
||||||
@@ -42,6 +46,7 @@ env:
|
|||||||
jobs:
|
jobs:
|
||||||
compile:
|
compile:
|
||||||
name: Build Debian Package
|
name: Build Debian Package
|
||||||
|
if: github.event.pull_request.draft != true
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
@@ -56,50 +61,15 @@ jobs:
|
|||||||
}}
|
}}
|
||||||
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
|
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
|
||||||
steps:
|
steps:
|
||||||
- name: Cleaning up unnecessary files
|
- name: Mount tmpfs
|
||||||
run: |
|
|
||||||
sudo apt-get remove --purge -y mono-* \
|
|
||||||
ghc* cabal-install* \
|
|
||||||
dotnet* \
|
|
||||||
php* \
|
|
||||||
ruby* \
|
|
||||||
mysql-* \
|
|
||||||
postgresql-* \
|
|
||||||
azure-cli \
|
|
||||||
powershell \
|
|
||||||
google-cloud-sdk \
|
|
||||||
msodbcsql* mssql-tools* \
|
|
||||||
imagemagick* \
|
|
||||||
libgl1-mesa-dri \
|
|
||||||
google-chrome-stable \
|
|
||||||
firefox
|
|
||||||
sudo apt-get autoremove -y
|
|
||||||
sudo apt-get clean
|
|
||||||
|
|
||||||
- run: |
|
|
||||||
sudo mount -t tmpfs tmpfs .
|
|
||||||
if: ${{ github.event.inputs.runner == 'fast' }}
|
if: ${{ github.event.inputs.runner == 'fast' }}
|
||||||
|
run: sudo mount -t tmpfs tmpfs .
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
- uses: ./.github/actions/setup-build
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
with:
|
||||||
node-version: ${{ env.NODEJS_VERSION }}
|
nodejs-version: ${{ env.NODEJS_VERSION }}
|
||||||
|
|
||||||
- name: Set up docker QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- name: Configure sccache
|
|
||||||
uses: actions/github-script@v7
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
|
|
||||||
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
|
|
||||||
|
|
||||||
- name: Make
|
- name: Make
|
||||||
run: make registry-deb
|
run: make registry-deb
|
||||||
@@ -108,7 +78,7 @@ jobs:
|
|||||||
SCCACHE_GHA_ENABLED: on
|
SCCACHE_GHA_ENABLED: on
|
||||||
SCCACHE_GHA_VERSION: 0
|
SCCACHE_GHA_VERSION: 0
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: start-registry_${{ matrix.arch }}.deb
|
name: start-registry_${{ matrix.arch }}.deb
|
||||||
path: results/start-registry-*_${{ matrix.arch }}.deb
|
path: results/start-registry-*_${{ matrix.arch }}.deb
|
||||||
@@ -132,13 +102,13 @@ jobs:
|
|||||||
if: ${{ github.event.inputs.runner == 'fast' }}
|
if: ${{ github.event.inputs.runner == 'fast' }}
|
||||||
|
|
||||||
- name: Set up docker QEMU
|
- name: Set up docker QEMU
|
||||||
uses: docker/setup-qemu-action@v3
|
uses: docker/setup-qemu-action@v4
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v3
|
uses: docker/setup-buildx-action@v4
|
||||||
|
|
||||||
- name: "Login to GitHub Container Registry"
|
- name: "Login to GitHub Container Registry"
|
||||||
uses: docker/login-action@v3
|
uses: docker/login-action@v4
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{github.actor}}
|
username: ${{github.actor}}
|
||||||
@@ -146,14 +116,14 @@ jobs:
|
|||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v5
|
uses: docker/metadata-action@v6
|
||||||
with:
|
with:
|
||||||
images: ghcr.io/Start9Labs/startos-registry
|
images: ghcr.io/Start9Labs/startos-registry
|
||||||
tags: |
|
tags: |
|
||||||
type=raw,value=${{ github.ref_name }}
|
type=raw,value=${{ github.ref_name }}
|
||||||
|
|
||||||
- name: Download debian package
|
- name: Download debian package
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v8
|
||||||
with:
|
with:
|
||||||
pattern: start-registry_*.deb
|
pattern: start-registry_*.deb
|
||||||
|
|
||||||
@@ -192,7 +162,7 @@ jobs:
|
|||||||
|
|
||||||
ADD *.deb .
|
ADD *.deb .
|
||||||
|
|
||||||
RUN apt-get install -y ./*_$(uname -m).deb && rm *.deb
|
RUN apt-get update && apt-get install -y ./*_$(uname -m).deb && rm -rf *.deb /var/lib/apt/lists/*
|
||||||
|
|
||||||
VOLUME /var/lib/startos
|
VOLUME /var/lib/startos
|
||||||
|
|
||||||
|
|||||||
54
.github/workflows/start-tunnel.yaml
vendored
@@ -1,4 +1,4 @@
|
|||||||
name: Start-Tunnel
|
name: start-tunnel
|
||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_call:
|
workflow_call:
|
||||||
@@ -35,6 +35,10 @@ on:
|
|||||||
- master
|
- master
|
||||||
- next/*
|
- next/*
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODEJS_VERSION: "24.11.0"
|
NODEJS_VERSION: "24.11.0"
|
||||||
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
|
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
|
||||||
@@ -42,6 +46,7 @@ env:
|
|||||||
jobs:
|
jobs:
|
||||||
compile:
|
compile:
|
||||||
name: Build Debian Package
|
name: Build Debian Package
|
||||||
|
if: github.event.pull_request.draft != true
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
@@ -56,50 +61,15 @@ jobs:
|
|||||||
}}
|
}}
|
||||||
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
|
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
|
||||||
steps:
|
steps:
|
||||||
- name: Cleaning up unnecessary files
|
- name: Mount tmpfs
|
||||||
run: |
|
|
||||||
sudo apt-get remove --purge -y mono-* \
|
|
||||||
ghc* cabal-install* \
|
|
||||||
dotnet* \
|
|
||||||
php* \
|
|
||||||
ruby* \
|
|
||||||
mysql-* \
|
|
||||||
postgresql-* \
|
|
||||||
azure-cli \
|
|
||||||
powershell \
|
|
||||||
google-cloud-sdk \
|
|
||||||
msodbcsql* mssql-tools* \
|
|
||||||
imagemagick* \
|
|
||||||
libgl1-mesa-dri \
|
|
||||||
google-chrome-stable \
|
|
||||||
firefox
|
|
||||||
sudo apt-get autoremove -y
|
|
||||||
sudo apt-get clean
|
|
||||||
|
|
||||||
- run: |
|
|
||||||
sudo mount -t tmpfs tmpfs .
|
|
||||||
if: ${{ github.event.inputs.runner == 'fast' }}
|
if: ${{ github.event.inputs.runner == 'fast' }}
|
||||||
|
run: sudo mount -t tmpfs tmpfs .
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
- uses: ./.github/actions/setup-build
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
with:
|
||||||
node-version: ${{ env.NODEJS_VERSION }}
|
nodejs-version: ${{ env.NODEJS_VERSION }}
|
||||||
|
|
||||||
- name: Set up docker QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- name: Configure sccache
|
|
||||||
uses: actions/github-script@v7
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
|
|
||||||
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
|
|
||||||
|
|
||||||
- name: Make
|
- name: Make
|
||||||
run: make tunnel-deb
|
run: make tunnel-deb
|
||||||
@@ -108,7 +78,7 @@ jobs:
|
|||||||
SCCACHE_GHA_ENABLED: on
|
SCCACHE_GHA_ENABLED: on
|
||||||
SCCACHE_GHA_VERSION: 0
|
SCCACHE_GHA_VERSION: 0
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: start-tunnel_${{ matrix.arch }}.deb
|
name: start-tunnel_${{ matrix.arch }}.deb
|
||||||
path: results/start-tunnel-*_${{ matrix.arch }}.deb
|
path: results/start-tunnel-*_${{ matrix.arch }}.deb
|
||||||
|
|||||||
201
.github/workflows/startos-iso.yaml
vendored
@@ -25,10 +25,13 @@ on:
|
|||||||
- ALL
|
- ALL
|
||||||
- x86_64
|
- x86_64
|
||||||
- x86_64-nonfree
|
- x86_64-nonfree
|
||||||
|
- x86_64-nvidia
|
||||||
- aarch64
|
- aarch64
|
||||||
- aarch64-nonfree
|
- aarch64-nonfree
|
||||||
- raspberrypi
|
- aarch64-nvidia
|
||||||
|
# - raspberrypi
|
||||||
- riscv64
|
- riscv64
|
||||||
|
- riscv64-nonfree
|
||||||
deploy:
|
deploy:
|
||||||
type: choice
|
type: choice
|
||||||
description: Deploy
|
description: Deploy
|
||||||
@@ -45,6 +48,10 @@ on:
|
|||||||
- master
|
- master
|
||||||
- next/*
|
- next/*
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODEJS_VERSION: "24.11.0"
|
NODEJS_VERSION: "24.11.0"
|
||||||
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
|
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
|
||||||
@@ -52,6 +59,7 @@ env:
|
|||||||
jobs:
|
jobs:
|
||||||
compile:
|
compile:
|
||||||
name: Compile Base Binaries
|
name: Compile Base Binaries
|
||||||
|
if: github.event.pull_request.draft != true
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: true
|
fail-fast: true
|
||||||
matrix:
|
matrix:
|
||||||
@@ -60,10 +68,13 @@ jobs:
|
|||||||
fromJson('{
|
fromJson('{
|
||||||
"x86_64": ["x86_64"],
|
"x86_64": ["x86_64"],
|
||||||
"x86_64-nonfree": ["x86_64"],
|
"x86_64-nonfree": ["x86_64"],
|
||||||
|
"x86_64-nvidia": ["x86_64"],
|
||||||
"aarch64": ["aarch64"],
|
"aarch64": ["aarch64"],
|
||||||
"aarch64-nonfree": ["aarch64"],
|
"aarch64-nonfree": ["aarch64"],
|
||||||
|
"aarch64-nvidia": ["aarch64"],
|
||||||
"raspberrypi": ["aarch64"],
|
"raspberrypi": ["aarch64"],
|
||||||
"riscv64": ["riscv64"],
|
"riscv64": ["riscv64"],
|
||||||
|
"riscv64-nonfree": ["riscv64"],
|
||||||
"ALL": ["x86_64", "aarch64", "riscv64"]
|
"ALL": ["x86_64", "aarch64", "riscv64"]
|
||||||
}')[github.event.inputs.platform || 'ALL']
|
}')[github.event.inputs.platform || 'ALL']
|
||||||
}}
|
}}
|
||||||
@@ -86,54 +97,16 @@ jobs:
|
|||||||
)[github.event.inputs.runner == 'fast']
|
)[github.event.inputs.runner == 'fast']
|
||||||
}}
|
}}
|
||||||
steps:
|
steps:
|
||||||
- name: Cleaning up unnecessary files
|
- name: Mount tmpfs
|
||||||
run: |
|
|
||||||
sudo apt-get remove --purge -y azure-cli || true
|
|
||||||
sudo apt-get remove --purge -y firefox || true
|
|
||||||
sudo apt-get remove --purge -y ghc-* || true
|
|
||||||
sudo apt-get remove --purge -y google-cloud-sdk || true
|
|
||||||
sudo apt-get remove --purge -y google-chrome-stable || true
|
|
||||||
sudo apt-get remove --purge -y powershell || true
|
|
||||||
sudo apt-get remove --purge -y php* || true
|
|
||||||
sudo apt-get remove --purge -y ruby* || true
|
|
||||||
sudo apt-get remove --purge -y mono-* || true
|
|
||||||
sudo apt-get autoremove -y
|
|
||||||
sudo apt-get clean
|
|
||||||
sudo rm -rf /usr/lib/jvm # All JDKs
|
|
||||||
sudo rm -rf /usr/local/.ghcup # Haskell toolchain
|
|
||||||
sudo rm -rf /usr/local/lib/android # Android SDK/NDK, emulator
|
|
||||||
sudo rm -rf /usr/share/dotnet # .NET SDKs
|
|
||||||
sudo rm -rf /usr/share/swift # Swift toolchain (if present)
|
|
||||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY" # Pre-cached tool cache (Go, Node, etc.)
|
|
||||||
- run: |
|
|
||||||
sudo mount -t tmpfs tmpfs .
|
|
||||||
if: ${{ github.event.inputs.runner == 'fast' }}
|
if: ${{ github.event.inputs.runner == 'fast' }}
|
||||||
|
run: sudo mount -t tmpfs tmpfs .
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
- uses: ./.github/actions/setup-build
|
||||||
- name: Set up Python
|
|
||||||
uses: actions/setup-python@v5
|
|
||||||
with:
|
with:
|
||||||
python-version: "3.x"
|
nodejs-version: ${{ env.NODEJS_VERSION }}
|
||||||
|
setup-python: "true"
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
|
||||||
node-version: ${{ env.NODEJS_VERSION }}
|
|
||||||
|
|
||||||
- name: Set up docker QEMU
|
|
||||||
uses: docker/setup-qemu-action@v3
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
|
|
||||||
- name: Configure sccache
|
|
||||||
uses: actions/github-script@v7
|
|
||||||
with:
|
|
||||||
script: |
|
|
||||||
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
|
|
||||||
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
|
|
||||||
|
|
||||||
- name: Make
|
- name: Make
|
||||||
run: make ARCH=${{ matrix.arch }} compiled-${{ matrix.arch }}.tar
|
run: make ARCH=${{ matrix.arch }} compiled-${{ matrix.arch }}.tar
|
||||||
@@ -141,7 +114,7 @@ jobs:
|
|||||||
SCCACHE_GHA_ENABLED: on
|
SCCACHE_GHA_ENABLED: on
|
||||||
SCCACHE_GHA_VERSION: 0
|
SCCACHE_GHA_VERSION: 0
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: compiled-${{ matrix.arch }}.tar
|
name: compiled-${{ matrix.arch }}.tar
|
||||||
path: compiled-${{ matrix.arch }}.tar
|
path: compiled-${{ matrix.arch }}.tar
|
||||||
@@ -157,7 +130,7 @@ jobs:
|
|||||||
format(
|
format(
|
||||||
'[
|
'[
|
||||||
["{0}"],
|
["{0}"],
|
||||||
["x86_64", "x86_64-nonfree", "aarch64", "aarch64-nonfree", "riscv64", "raspberrypi"]
|
["x86_64", "x86_64-nonfree", "x86_64-nvidia", "aarch64", "aarch64-nonfree", "aarch64-nvidia", "raspberrypi", "riscv64", "riscv64-nonfree"]
|
||||||
]',
|
]',
|
||||||
github.event.inputs.platform || 'ALL'
|
github.event.inputs.platform || 'ALL'
|
||||||
)
|
)
|
||||||
@@ -171,18 +144,24 @@ jobs:
|
|||||||
fromJson('{
|
fromJson('{
|
||||||
"x86_64": "ubuntu-latest",
|
"x86_64": "ubuntu-latest",
|
||||||
"x86_64-nonfree": "ubuntu-latest",
|
"x86_64-nonfree": "ubuntu-latest",
|
||||||
|
"x86_64-nvidia": "ubuntu-latest",
|
||||||
"aarch64": "ubuntu-24.04-arm",
|
"aarch64": "ubuntu-24.04-arm",
|
||||||
"aarch64-nonfree": "ubuntu-24.04-arm",
|
"aarch64-nonfree": "ubuntu-24.04-arm",
|
||||||
|
"aarch64-nvidia": "ubuntu-24.04-arm",
|
||||||
"raspberrypi": "ubuntu-24.04-arm",
|
"raspberrypi": "ubuntu-24.04-arm",
|
||||||
"riscv64": "ubuntu-24.04-arm",
|
"riscv64": "ubuntu-24.04-arm",
|
||||||
|
"riscv64-nonfree": "ubuntu-24.04-arm",
|
||||||
}')[matrix.platform],
|
}')[matrix.platform],
|
||||||
fromJson('{
|
fromJson('{
|
||||||
"x86_64": "buildjet-8vcpu-ubuntu-2204",
|
"x86_64": "buildjet-8vcpu-ubuntu-2204",
|
||||||
"x86_64-nonfree": "buildjet-8vcpu-ubuntu-2204",
|
"x86_64-nonfree": "buildjet-8vcpu-ubuntu-2204",
|
||||||
|
"x86_64-nvidia": "buildjet-8vcpu-ubuntu-2204",
|
||||||
"aarch64": "buildjet-8vcpu-ubuntu-2204-arm",
|
"aarch64": "buildjet-8vcpu-ubuntu-2204-arm",
|
||||||
"aarch64-nonfree": "buildjet-8vcpu-ubuntu-2204-arm",
|
"aarch64-nonfree": "buildjet-8vcpu-ubuntu-2204-arm",
|
||||||
|
"aarch64-nvidia": "buildjet-8vcpu-ubuntu-2204-arm",
|
||||||
"raspberrypi": "buildjet-8vcpu-ubuntu-2204-arm",
|
"raspberrypi": "buildjet-8vcpu-ubuntu-2204-arm",
|
||||||
"riscv64": "buildjet-8vcpu-ubuntu-2204",
|
"riscv64": "buildjet-8vcpu-ubuntu-2204",
|
||||||
|
"riscv64-nonfree": "buildjet-8vcpu-ubuntu-2204",
|
||||||
}')[matrix.platform]
|
}')[matrix.platform]
|
||||||
)
|
)
|
||||||
)[github.event.inputs.runner == 'fast']
|
)[github.event.inputs.runner == 'fast']
|
||||||
@@ -193,10 +172,13 @@ jobs:
|
|||||||
fromJson('{
|
fromJson('{
|
||||||
"x86_64": "x86_64",
|
"x86_64": "x86_64",
|
||||||
"x86_64-nonfree": "x86_64",
|
"x86_64-nonfree": "x86_64",
|
||||||
|
"x86_64-nvidia": "x86_64",
|
||||||
"aarch64": "aarch64",
|
"aarch64": "aarch64",
|
||||||
"aarch64-nonfree": "aarch64",
|
"aarch64-nonfree": "aarch64",
|
||||||
|
"aarch64-nvidia": "aarch64",
|
||||||
"raspberrypi": "aarch64",
|
"raspberrypi": "aarch64",
|
||||||
"riscv64": "riscv64",
|
"riscv64": "riscv64",
|
||||||
|
"riscv64-nonfree": "riscv64",
|
||||||
}')[matrix.platform]
|
}')[matrix.platform]
|
||||||
}}
|
}}
|
||||||
steps:
|
steps:
|
||||||
@@ -221,15 +203,19 @@ jobs:
|
|||||||
sudo rm -rf "$AGENT_TOOLSDIRECTORY" # Pre-cached tool cache (Go, Node, etc.)
|
sudo rm -rf "$AGENT_TOOLSDIRECTORY" # Pre-cached tool cache (Go, Node, etc.)
|
||||||
if: ${{ github.event.inputs.runner != 'fast' }}
|
if: ${{ github.event.inputs.runner != 'fast' }}
|
||||||
|
|
||||||
- name: Set up docker QEMU
|
# BuildJet runners lack /opt/hostedtoolcache, which setup-qemu expects
|
||||||
uses: docker/setup-qemu-action@v3
|
- name: Ensure hostedtoolcache exists
|
||||||
|
run: sudo mkdir -p /opt/hostedtoolcache && sudo chown $USER:$USER /opt/hostedtoolcache
|
||||||
|
|
||||||
- uses: actions/checkout@v4
|
- name: Set up docker QEMU
|
||||||
|
uses: docker/setup-qemu-action@v4
|
||||||
|
|
||||||
|
- uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
|
||||||
- name: Download compiled artifacts
|
- name: Download compiled artifacts
|
||||||
uses: actions/download-artifact@v4
|
uses: actions/download-artifact@v8
|
||||||
with:
|
with:
|
||||||
name: compiled-${{ env.ARCH }}.tar
|
name: compiled-${{ env.ARCH }}.tar
|
||||||
|
|
||||||
@@ -251,10 +237,8 @@ jobs:
|
|||||||
mkdir -p patch-db/client/dist
|
mkdir -p patch-db/client/dist
|
||||||
mkdir -p web/.angular
|
mkdir -p web/.angular
|
||||||
mkdir -p web/dist/raw/ui
|
mkdir -p web/dist/raw/ui
|
||||||
mkdir -p web/dist/raw/install-wizard
|
|
||||||
mkdir -p web/dist/raw/setup-wizard
|
mkdir -p web/dist/raw/setup-wizard
|
||||||
mkdir -p web/dist/static/ui
|
mkdir -p web/dist/static/ui
|
||||||
mkdir -p web/dist/static/install-wizard
|
|
||||||
mkdir -p web/dist/static/setup-wizard
|
mkdir -p web/dist/static/setup-wizard
|
||||||
PLATFORM=${{ matrix.platform }} make -t compiled-${{ env.ARCH }}.tar
|
PLATFORM=${{ matrix.platform }} make -t compiled-${{ env.ARCH }}.tar
|
||||||
|
|
||||||
@@ -268,19 +252,124 @@ jobs:
|
|||||||
run: PLATFORM=${{ matrix.platform }} make img
|
run: PLATFORM=${{ matrix.platform }} make img
|
||||||
if: ${{ matrix.platform == 'raspberrypi' }}
|
if: ${{ matrix.platform == 'raspberrypi' }}
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: ${{ matrix.platform }}.squashfs
|
name: ${{ matrix.platform }}.squashfs
|
||||||
path: results/*.squashfs
|
path: results/*.squashfs
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: ${{ matrix.platform }}.iso
|
name: ${{ matrix.platform }}.iso
|
||||||
path: results/*.iso
|
path: results/*.iso
|
||||||
if: ${{ matrix.platform != 'raspberrypi' }}
|
if: ${{ matrix.platform != 'raspberrypi' }}
|
||||||
|
|
||||||
- uses: actions/upload-artifact@v4
|
- uses: actions/upload-artifact@v7
|
||||||
with:
|
with:
|
||||||
name: ${{ matrix.platform }}.img
|
name: ${{ matrix.platform }}.img
|
||||||
path: results/*.img
|
path: results/*.img
|
||||||
if: ${{ matrix.platform == 'raspberrypi' }}
|
if: ${{ matrix.platform == 'raspberrypi' }}
|
||||||
|
|
||||||
|
deploy:
|
||||||
|
name: Deploy
|
||||||
|
needs: [image]
|
||||||
|
if: github.event_name == 'workflow_dispatch' && github.event.inputs.deploy != 'NONE'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
REGISTRY: >-
|
||||||
|
${{
|
||||||
|
fromJson('{
|
||||||
|
"alpha": "https://alpha-registry-x.start9.com",
|
||||||
|
"beta": "https://beta-registry.start9.com"
|
||||||
|
}')[github.event.inputs.deploy]
|
||||||
|
}}
|
||||||
|
S3_BUCKET: s3://startos-images
|
||||||
|
S3_CDN: https://startos-images.nyc3.cdn.digitaloceanspaces.com
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
with:
|
||||||
|
sparse-checkout: web/package.json
|
||||||
|
|
||||||
|
- name: Determine version
|
||||||
|
id: version
|
||||||
|
run: |
|
||||||
|
VERSION=$(sed -n 's/.*"version": *"\([^"]*\)".*/\1/p' web/package.json | head -1)
|
||||||
|
echo "version=$VERSION" >> "$GITHUB_OUTPUT"
|
||||||
|
echo "Version: $VERSION"
|
||||||
|
|
||||||
|
- name: Download squashfs artifacts
|
||||||
|
uses: actions/download-artifact@v8
|
||||||
|
with:
|
||||||
|
pattern: "*.squashfs"
|
||||||
|
path: artifacts/
|
||||||
|
merge-multiple: true
|
||||||
|
|
||||||
|
- name: Download ISO artifacts
|
||||||
|
uses: actions/download-artifact@v8
|
||||||
|
with:
|
||||||
|
pattern: "*.iso"
|
||||||
|
path: artifacts/
|
||||||
|
merge-multiple: true
|
||||||
|
|
||||||
|
- name: Install start-cli
|
||||||
|
run: |
|
||||||
|
ARCH=$(uname -m)
|
||||||
|
OS=$(uname -s | tr '[:upper:]' '[:lower:]')
|
||||||
|
ASSET_NAME="start-cli_${ARCH}-${OS}"
|
||||||
|
DOWNLOAD_URL=$(curl -fsS \
|
||||||
|
-H "Authorization: token ${{ github.token }}" \
|
||||||
|
https://api.github.com/repos/Start9Labs/start-os/releases \
|
||||||
|
| jq -r '[.[].assets[] | select(.name=="'"$ASSET_NAME"'")] | first | .browser_download_url')
|
||||||
|
curl -fsSL \
|
||||||
|
-H "Authorization: token ${{ github.token }}" \
|
||||||
|
-H "Accept: application/octet-stream" \
|
||||||
|
"$DOWNLOAD_URL" -o /tmp/start-cli
|
||||||
|
sudo install -m 755 /tmp/start-cli /usr/local/bin/start-cli
|
||||||
|
echo "start-cli: $(start-cli --version)"
|
||||||
|
|
||||||
|
- name: Configure S3
|
||||||
|
run: |
|
||||||
|
sudo apt-get install -y -qq s3cmd > /dev/null
|
||||||
|
cat > ~/.s3cfg <<EOF
|
||||||
|
[default]
|
||||||
|
access_key = ${{ secrets.S3_ACCESS_KEY }}
|
||||||
|
secret_key = ${{ secrets.S3_SECRET_KEY }}
|
||||||
|
host_base = nyc3.digitaloceanspaces.com
|
||||||
|
host_bucket = %(bucket)s.nyc3.digitaloceanspaces.com
|
||||||
|
use_https = True
|
||||||
|
EOF
|
||||||
|
|
||||||
|
- name: Set up developer key
|
||||||
|
run: |
|
||||||
|
mkdir -p ~/.startos
|
||||||
|
printf '%s' "${{ secrets.DEV_KEY }}" > ~/.startos/developer.key.pem
|
||||||
|
|
||||||
|
- name: Upload to S3
|
||||||
|
run: |
|
||||||
|
VERSION="${{ steps.version.outputs.version }}"
|
||||||
|
cd artifacts
|
||||||
|
for file in *.iso *.squashfs; do
|
||||||
|
[ -f "$file" ] || continue
|
||||||
|
echo "Uploading $file..."
|
||||||
|
s3cmd put -P "$file" "${{ env.S3_BUCKET }}/v${VERSION}/$file"
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Register OS version
|
||||||
|
run: |
|
||||||
|
VERSION="${{ steps.version.outputs.version }}"
|
||||||
|
start-cli --registry="${{ env.REGISTRY }}" registry os version add \
|
||||||
|
"$VERSION" "v${VERSION}" '' ">=0.3.5 <=${VERSION}"
|
||||||
|
|
||||||
|
- name: Index assets in registry
|
||||||
|
run: |
|
||||||
|
VERSION="${{ steps.version.outputs.version }}"
|
||||||
|
cd artifacts
|
||||||
|
for file in *.squashfs *.iso; do
|
||||||
|
[ -f "$file" ] || continue
|
||||||
|
PLATFORM=$(echo "$file" | sed 's/.*_\([^.]*\)\.\(squashfs\|iso\)$/\1/')
|
||||||
|
echo "Indexing $file for platform $PLATFORM..."
|
||||||
|
start-cli --registry="${{ env.REGISTRY }}" registry os asset add \
|
||||||
|
--platform="$PLATFORM" \
|
||||||
|
--version="$VERSION" \
|
||||||
|
"$file" \
|
||||||
|
"${{ env.S3_CDN }}/v${VERSION}/$file"
|
||||||
|
done
|
||||||
|
|||||||
15
.github/workflows/test.yaml
vendored
@@ -10,6 +10,10 @@ on:
|
|||||||
- master
|
- master
|
||||||
- next/*
|
- next/*
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODEJS_VERSION: "24.11.0"
|
NODEJS_VERSION: "24.11.0"
|
||||||
ENVIRONMENT: dev-unstable
|
ENVIRONMENT: dev-unstable
|
||||||
@@ -17,15 +21,18 @@ env:
|
|||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
name: Run Automated Tests
|
name: Run Automated Tests
|
||||||
|
if: github.event.pull_request.draft != true
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
with:
|
with:
|
||||||
submodules: recursive
|
submodules: recursive
|
||||||
|
- uses: ./.github/actions/setup-build
|
||||||
- uses: actions/setup-node@v4
|
|
||||||
with:
|
with:
|
||||||
node-version: ${{ env.NODEJS_VERSION }}
|
nodejs-version: ${{ env.NODEJS_VERSION }}
|
||||||
|
free-space: "false"
|
||||||
|
setup-docker: "false"
|
||||||
|
setup-sccache: "false"
|
||||||
|
|
||||||
- name: Build And Run Tests
|
- name: Build And Run Tests
|
||||||
run: make test
|
run: make test
|
||||||
|
|||||||
6
.gitignore
vendored
@@ -19,4 +19,8 @@ secrets.db
|
|||||||
/compiled.tar
|
/compiled.tar
|
||||||
/compiled-*.tar
|
/compiled-*.tar
|
||||||
/build/lib/firmware
|
/build/lib/firmware
|
||||||
tmp
|
tmp
|
||||||
|
web/.i18n-checked
|
||||||
|
docs/USER.md
|
||||||
|
*.s9pk
|
||||||
|
/build/lib/migration-images
|
||||||
|
|||||||
101
ARCHITECTURE.md
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
# Architecture
|
||||||
|
|
||||||
|
StartOS is an open-source Linux distribution for running personal servers. It manages discovery, installation, network configuration, backups, and health monitoring of self-hosted services.
|
||||||
|
|
||||||
|
## Tech Stack
|
||||||
|
|
||||||
|
- Backend: Rust (async/Tokio, Axum web framework)
|
||||||
|
- Frontend: Angular 21 + TypeScript + Taiga UI 5
|
||||||
|
- Container runtime: Node.js/TypeScript with LXC
|
||||||
|
- Database/State: Patch-DB (git submodule) - storage layer with reactive frontend sync
|
||||||
|
- API: JSON-RPC via rpc-toolkit (see `core/rpc-toolkit.md`)
|
||||||
|
- Auth: Password + session cookie, public/private key signatures, local authcookie (see `core/src/middleware/auth/`)
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
```bash
|
||||||
|
/
|
||||||
|
├── assets/ # Screenshots for README
|
||||||
|
├── build/ # Auxiliary files and scripts for deployed images
|
||||||
|
├── container-runtime/ # Node.js program managing package containers
|
||||||
|
├── core/ # Rust backend: API, daemon (startd), CLI (start-cli)
|
||||||
|
├── debian/ # Debian package maintainer scripts
|
||||||
|
├── image-recipe/ # Scripts for building StartOS images
|
||||||
|
├── patch-db/ # (submodule) Diff-based data store for frontend sync
|
||||||
|
├── sdk/ # TypeScript SDK for building StartOS packages
|
||||||
|
└── web/ # Web UIs (Angular)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Components
|
||||||
|
|
||||||
|
- **`core/`** — Rust backend daemon. Produces a single binary `startbox` that is symlinked as `startd` (main daemon), `start-cli` (CLI), `start-container` (runs inside LXC containers), `registrybox` (package registry), and `tunnelbox` (VPN/tunnel). Handles all backend logic: RPC API, service lifecycle, networking (DNS, ACME, WiFi, Tor, WireGuard), backups, and database state management. See [core/ARCHITECTURE.md](core/ARCHITECTURE.md).
|
||||||
|
|
||||||
|
- **`web/`** — Angular 21 + TypeScript workspace using Taiga UI 5. Contains three applications (admin UI, setup wizard, VPN management) and two shared libraries (common components/services, marketplace). Communicates with the backend exclusively via JSON-RPC. See [web/ARCHITECTURE.md](web/ARCHITECTURE.md).
|
||||||
|
|
||||||
|
- **`container-runtime/`** — Node.js runtime that runs inside each service's LXC container. Loads the service's JavaScript from its S9PK package and manages subcontainers. Communicates with the host daemon via JSON-RPC over Unix socket. See [container-runtime/CLAUDE.md](container-runtime/CLAUDE.md).
|
||||||
|
|
||||||
|
- **`sdk/`** — TypeScript SDK for packaging services for StartOS (`@start9labs/start-sdk`). Split into `base/` (core types, ABI definitions, effects interface, consumed by web as `@start9labs/start-sdk-base`) and `package/` (full SDK for service developers, consumed by container-runtime as `@start9labs/start-sdk`).
|
||||||
|
|
||||||
|
- **`patch-db/`** — Git submodule providing diff-based state synchronization. Uses CBOR encoding. Backend mutations produce diffs that are pushed to the frontend via WebSocket, enabling reactive UI updates without polling. See [patch-db repo](https://github.com/Start9Labs/patch-db).
|
||||||
|
|
||||||
|
## Build Pipeline
|
||||||
|
|
||||||
|
Components have a strict dependency chain. Changes flow in one direction:
|
||||||
|
|
||||||
|
```
|
||||||
|
Rust (core/)
|
||||||
|
→ cargo test exports ts-rs types to core/bindings/
|
||||||
|
→ rsync copies to sdk/base/lib/osBindings/
|
||||||
|
→ SDK build produces baseDist/ and dist/
|
||||||
|
→ web/ consumes baseDist/ (via @start9labs/start-sdk-base)
|
||||||
|
→ container-runtime/ consumes dist/ (via @start9labs/start-sdk)
|
||||||
|
```
|
||||||
|
|
||||||
|
Key make targets along this chain:
|
||||||
|
|
||||||
|
| Step | Command | What it does |
|
||||||
|
|---|---|---|
|
||||||
|
| 1 | `cargo check -p start-os` | Verify Rust compiles |
|
||||||
|
| 2 | `make ts-bindings` | Export ts-rs types → rsync to SDK |
|
||||||
|
| 3 | `cd sdk && make baseDist dist` | Build SDK packages |
|
||||||
|
| 4 | `cd web && npm run check` | Type-check Angular projects |
|
||||||
|
| 5 | `cd container-runtime && npm run check` | Type-check runtime |
|
||||||
|
|
||||||
|
**Important**: Editing `sdk/base/lib/osBindings/*.ts` alone is NOT sufficient — you must rebuild the SDK bundle (step 3) before web/container-runtime can see the changes.
|
||||||
|
|
||||||
|
## Cross-Layer Verification
|
||||||
|
|
||||||
|
When making changes across multiple layers (Rust, SDK, web, container-runtime), verify in this order:
|
||||||
|
|
||||||
|
1. **Rust**: `cargo check -p start-os` — verifies core compiles
|
||||||
|
2. **TS bindings**: `make ts-bindings` — regenerates TypeScript types from Rust `#[ts(export)]` structs
|
||||||
|
- Runs `./core/build/build-ts.sh` to export ts-rs types to `core/bindings/`
|
||||||
|
- Syncs `core/bindings/` → `sdk/base/lib/osBindings/` via rsync
|
||||||
|
- If you manually edit files in `sdk/base/lib/osBindings/`, you must still rebuild the SDK (step 3)
|
||||||
|
3. **SDK bundle**: `cd sdk && make baseDist dist` — compiles SDK source into packages
|
||||||
|
- `baseDist/` is consumed by `/web` (via `@start9labs/start-sdk-base`)
|
||||||
|
- `dist/` is consumed by `/container-runtime` (via `@start9labs/start-sdk`)
|
||||||
|
- Web and container-runtime reference the **built** SDK, not source files
|
||||||
|
4. **Web type check**: `cd web && npm run check` — type-checks all Angular projects
|
||||||
|
5. **Container runtime type check**: `cd container-runtime && npm run check` — type-checks the runtime
|
||||||
|
|
||||||
|
## Data Flow: Backend to Frontend
|
||||||
|
|
||||||
|
StartOS uses Patch-DB for reactive state synchronization:
|
||||||
|
|
||||||
|
1. The backend mutates state via `db.mutate()`, producing CBOR diffs
|
||||||
|
2. Diffs are pushed to the frontend over a persistent WebSocket connection
|
||||||
|
3. The frontend applies diffs to its local state copy and notifies observers
|
||||||
|
4. Components watch specific database paths via `PatchDB.watch$()`, receiving updates reactively
|
||||||
|
|
||||||
|
This means the UI is always eventually consistent with the backend — after any mutating API call, the frontend waits for the corresponding PatchDB diff before resolving, so the UI reflects the result immediately.
|
||||||
|
|
||||||
|
## Further Reading
|
||||||
|
|
||||||
|
- [core/ARCHITECTURE.md](core/ARCHITECTURE.md) — Rust backend architecture
|
||||||
|
- [web/ARCHITECTURE.md](web/ARCHITECTURE.md) — Angular frontend architecture
|
||||||
|
- [container-runtime/CLAUDE.md](container-runtime/CLAUDE.md) — Container runtime details
|
||||||
|
- [core/rpc-toolkit.md](core/rpc-toolkit.md) — JSON-RPC handler patterns
|
||||||
|
- [core/s9pk-structure.md](core/s9pk-structure.md) — S9PK package format
|
||||||
|
- [docs/exver.md](docs/exver.md) — Extended versioning format
|
||||||
|
- [docs/VERSION_BUMP.md](docs/VERSION_BUMP.md) — Version bumping guide
|
||||||
59
CLAUDE.md
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
# CLAUDE.md
|
||||||
|
|
||||||
|
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
See [ARCHITECTURE.md](ARCHITECTURE.md) for the full system architecture, component map, build pipeline, and cross-layer verification order.
|
||||||
|
|
||||||
|
Each major component has its own `CLAUDE.md` with detailed guidance: `core/`, `web/`, `container-runtime/`, `sdk/`.
|
||||||
|
|
||||||
|
## Build & Development
|
||||||
|
|
||||||
|
See [CONTRIBUTING.md](CONTRIBUTING.md) for:
|
||||||
|
|
||||||
|
- Environment setup and requirements
|
||||||
|
- Build commands and make targets
|
||||||
|
- Testing and formatting commands
|
||||||
|
- Environment variables
|
||||||
|
|
||||||
|
**Quick reference:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
. ./devmode.sh # Enable dev mode
|
||||||
|
make update-startbox REMOTE=start9@<ip> # Fastest iteration (binary + UI)
|
||||||
|
make test-core # Run Rust tests
|
||||||
|
```
|
||||||
|
|
||||||
|
## Operating Rules
|
||||||
|
|
||||||
|
- Always verify cross-layer changes using the order described in [ARCHITECTURE.md](ARCHITECTURE.md#cross-layer-verification)
|
||||||
|
- Check component-level CLAUDE.md files for component-specific conventions. ALWAYS read it before operating on that component.
|
||||||
|
- Follow existing patterns before inventing new ones
|
||||||
|
- Always use `make` recipes when they exist for testing builds rather than manually invoking build commands
|
||||||
|
- **Commit signing:** Never push unsigned commits. Before pushing, check all unpushed commits for signatures with `git log --show-signature @{upstream}..HEAD`. If any are unsigned, prompt the user to sign them with `git rebase --exec 'git commit --amend -S --no-edit' @{upstream}`.
|
||||||
|
|
||||||
|
## Supplementary Documentation
|
||||||
|
|
||||||
|
The `docs/` directory contains cross-cutting documentation for AI assistants:
|
||||||
|
|
||||||
|
- `TODO.md` - Pending tasks for AI agents (check this first, remove items when completed)
|
||||||
|
- `USER.md` - Current user identifier (gitignored, see below)
|
||||||
|
- `exver.md` - Extended versioning format (used across core, sdk, and web)
|
||||||
|
- `VERSION_BUMP.md` - Guide for bumping the StartOS version across the codebase
|
||||||
|
|
||||||
|
Component-specific docs live alongside their code (e.g., `core/rpc-toolkit.md`, `core/i18n-patterns.md`).
|
||||||
|
|
||||||
|
### Session Startup
|
||||||
|
|
||||||
|
On startup:
|
||||||
|
|
||||||
|
1. **Check for `docs/USER.md`** - If it doesn't exist, prompt the user for their name/identifier and create it. This file is gitignored since it varies per developer.
|
||||||
|
|
||||||
|
2. **Check `docs/TODO.md` for relevant tasks** - Show TODOs that either:
|
||||||
|
- Have no `@username` tag (relevant to everyone)
|
||||||
|
- Are tagged with the current user's identifier
|
||||||
|
|
||||||
|
Skip TODOs tagged with a different user.
|
||||||
|
|
||||||
|
3. **Ask "What would you like to do today?"** - Offer options for each relevant TODO item, plus "Something else" for other requests.
|
||||||
317
CONTRIBUTING.md
@@ -1,133 +1,240 @@
|
|||||||
# Contributing to StartOS
|
# Contributing to StartOS
|
||||||
|
|
||||||
This guide is for contributing to the StartOS. If you are interested in packaging a service for StartOS, visit the [service packaging guide](https://docs.start9.com/latest/packaging-guide/). If you are interested in promoting, providing technical support, creating tutorials, or helping in other ways, please visit the [Start9 website](https://start9.com/contribute).
|
This guide is for contributing to the StartOS. If you are interested in packaging a service for StartOS, visit the [service packaging guide](https://github.com/Start9Labs/ai-service-packaging). If you are interested in promoting, providing technical support, creating tutorials, or helping in other ways, please visit the [Start9 website](https://start9.com/contribute).
|
||||||
|
|
||||||
## Collaboration
|
## Collaboration
|
||||||
|
|
||||||
- [Matrix](https://matrix.to/#/#community-dev:matrix.start9labs.com)
|
- [Matrix](https://matrix.to/#/#dev-startos:matrix.start9labs.com)
|
||||||
- [Telegram](https://t.me/start9_labs/47471)
|
|
||||||
|
|
||||||
## Project Structure
|
For project structure and system architecture, see [ARCHITECTURE.md](ARCHITECTURE.md).
|
||||||
|
|
||||||
```bash
|
|
||||||
/
|
|
||||||
├── assets/
|
|
||||||
├── container-runtime/
|
|
||||||
├── core/
|
|
||||||
├── build/
|
|
||||||
├── debian/
|
|
||||||
├── web/
|
|
||||||
├── image-recipe/
|
|
||||||
├── patch-db
|
|
||||||
└── sdk/
|
|
||||||
```
|
|
||||||
|
|
||||||
#### assets
|
|
||||||
|
|
||||||
screenshots for the StartOS README
|
|
||||||
|
|
||||||
#### container-runtime
|
|
||||||
|
|
||||||
A NodeJS program that dynamically loads maintainer scripts and communicates with the OS to manage packages
|
|
||||||
|
|
||||||
#### core
|
|
||||||
|
|
||||||
An API, daemon (startd), and CLI (start-cli) that together provide the core functionality of StartOS.
|
|
||||||
|
|
||||||
#### build
|
|
||||||
|
|
||||||
Auxiliary files and scripts to include in deployed StartOS images
|
|
||||||
|
|
||||||
#### debian
|
|
||||||
|
|
||||||
Maintainer scripts for the StartOS Debian package
|
|
||||||
|
|
||||||
#### web
|
|
||||||
|
|
||||||
Web UIs served under various conditions and used to interact with StartOS APIs.
|
|
||||||
|
|
||||||
#### image-recipe
|
|
||||||
|
|
||||||
Scripts for building StartOS images
|
|
||||||
|
|
||||||
#### patch-db (submodule)
|
|
||||||
|
|
||||||
A diff based data store used to synchronize data between the web interfaces and server.
|
|
||||||
|
|
||||||
#### sdk
|
|
||||||
|
|
||||||
A typescript sdk for building start-os packages
|
|
||||||
|
|
||||||
## Environment Setup
|
## Environment Setup
|
||||||
|
|
||||||
#### Clone the StartOS repository
|
### Installing Dependencies (Debian/Ubuntu)
|
||||||
|
|
||||||
|
> Debian/Ubuntu is the only officially supported build environment.
|
||||||
|
> MacOS has limited build capabilities and Windows requires [WSL2](https://learn.microsoft.com/en-us/windows/wsl/install).
|
||||||
|
|
||||||
```sh
|
```sh
|
||||||
git clone https://github.com/Start9Labs/start-os.git --recurse-submodules
|
sudo apt update
|
||||||
|
sudo apt install -y ca-certificates curl gpg build-essential
|
||||||
|
curl -fsSL https://download.docker.com/linux/debian/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
|
||||||
|
echo "deb [arch=$(dpkg-architecture -q DEB_HOST_ARCH) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian bookworm stable" | sudo tee /etc/apt/sources.list.d/docker.list
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y sed grep gawk jq gzip brotli containerd.io docker-ce docker-ce-cli docker-compose-plugin qemu-user-static binfmt-support squashfs-tools git debspawn rsync b3sum
|
||||||
|
sudo mkdir -p /etc/debspawn/
|
||||||
|
echo "AllowUnsafePermissions=true" | sudo tee /etc/debspawn/global.toml
|
||||||
|
sudo usermod -aG docker $USER
|
||||||
|
sudo su $USER
|
||||||
|
docker run --privileged --rm tonistiigi/binfmt --install all
|
||||||
|
docker buildx create --use
|
||||||
|
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh # proceed with default installation
|
||||||
|
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/master/install.sh | bash
|
||||||
|
source ~/.bashrc
|
||||||
|
nvm install 24
|
||||||
|
nvm use 24
|
||||||
|
nvm alias default 24 # this prevents your machine from reverting back to another version
|
||||||
|
```
|
||||||
|
|
||||||
|
### Cloning the Repository
|
||||||
|
|
||||||
|
```sh
|
||||||
|
git clone --recursive https://github.com/Start9Labs/start-os.git --branch next/major
|
||||||
cd start-os
|
cd start-os
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Continue to your project of interest for additional instructions:
|
### Development Mode
|
||||||
|
|
||||||
- [`core`](core/README.md)
|
For faster iteration during development:
|
||||||
- [`web-interfaces`](web-interfaces/README.md)
|
|
||||||
- [`build`](build/README.md)
|
```sh
|
||||||
- [`patch-db`](https://github.com/Start9Labs/patch-db)
|
. ./devmode.sh
|
||||||
|
```
|
||||||
|
|
||||||
|
This sets `ENVIRONMENT=dev` and `GIT_BRANCH_AS_HASH=1` to prevent rebuilds on every commit.
|
||||||
|
|
||||||
## Building
|
## Building
|
||||||
|
|
||||||
This project uses [GNU Make](https://www.gnu.org/software/make/) to build its components. To build any specific component, simply run `make <TARGET>` replacing `<TARGET>` with the name of the target you'd like to build
|
All builds can be performed on any operating system that can run Docker.
|
||||||
|
|
||||||
|
This project uses [GNU Make](https://www.gnu.org/software/make/) to build its components.
|
||||||
|
|
||||||
### Requirements
|
### Requirements
|
||||||
|
|
||||||
- [GNU Make](https://www.gnu.org/software/make/)
|
- [GNU Make](https://www.gnu.org/software/make/)
|
||||||
- [Docker](https://docs.docker.com/get-docker/)
|
- [Docker](https://docs.docker.com/get-docker/) or [Podman](https://podman.io/)
|
||||||
- [NodeJS v20.16.0](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm)
|
- [NodeJS v20.16.0](https://docs.npmjs.com/downloading-and-installing-node-js-and-npm)
|
||||||
- [sed](https://www.gnu.org/software/sed/)
|
- [Rust](https://rustup.rs/) (nightly for formatting)
|
||||||
- [grep](https://www.gnu.org/software/grep/)
|
- [sed](https://www.gnu.org/software/sed/), [grep](https://www.gnu.org/software/grep/), [awk](https://www.gnu.org/software/gawk/)
|
||||||
- [awk](https://www.gnu.org/software/gawk/)
|
|
||||||
- [jq](https://jqlang.github.io/jq/)
|
- [jq](https://jqlang.github.io/jq/)
|
||||||
- [gzip](https://www.gnu.org/software/gzip/)
|
- [gzip](https://www.gnu.org/software/gzip/), [brotli](https://github.com/google/brotli)
|
||||||
- [brotli](https://github.com/google/brotli)
|
|
||||||
|
|
||||||
### Environment variables
|
### Environment Variables
|
||||||
|
|
||||||
- `PLATFORM`: which platform you would like to build for. Must be one of `x86_64`, `x86_64-nonfree`, `aarch64`, `aarch64-nonfree`, `raspberrypi`
|
| Variable | Description |
|
||||||
- NOTE: `nonfree` images are for including `nonfree` firmware packages in the built ISO
|
| -------------------- | --------------------------------------------------------------------------------------------------- |
|
||||||
- `ENVIRONMENT`: a hyphen separated set of feature flags to enable
|
| `PLATFORM` | Target platform: `x86_64`, `x86_64-nonfree`, `aarch64`, `aarch64-nonfree`, `riscv64`, `raspberrypi` |
|
||||||
- `dev`: enables password ssh (INSECURE!) and does not compress frontends
|
| `ENVIRONMENT` | Hyphen-separated feature flags (see below) |
|
||||||
- `unstable`: enables assertions that will cause errors on unexpected inconsistencies that are undesirable in production use either for performance or reliability reasons
|
| `PROFILE` | Build profile: `release` (default) or `dev` |
|
||||||
- `docker`: use `docker` instead of `podman`
|
| `GIT_BRANCH_AS_HASH` | Set to `1` to use git branch name as version hash (avoids rebuilds) |
|
||||||
- `GIT_BRANCH_AS_HASH`: set to `1` to use the current git branch name as the git hash so that the project does not need to be rebuilt on each commit
|
|
||||||
|
|
||||||
### Useful Make Targets
|
**ENVIRONMENT flags:**
|
||||||
|
|
||||||
- `iso`: Create a full `.iso` image
|
- `dev` - Enables password SSH before setup, skips frontend compression
|
||||||
- Only possible from Debian
|
- `unstable` - Enables assertions and debugging with performance penalty
|
||||||
- Not available for `PLATFORM=raspberrypi`
|
- `console` - Enables tokio-console for async debugging
|
||||||
- Additional Requirements:
|
|
||||||
- [debspawn](https://github.com/lkhq/debspawn)
|
**Platform notes:**
|
||||||
- `img`: Create a full `.img` image
|
|
||||||
- Only possible from Debian
|
- `-nonfree` variants include proprietary firmware and drivers
|
||||||
- Only available for `PLATFORM=raspberrypi`
|
- `raspberrypi` includes non-free components by necessity
|
||||||
- Additional Requirements:
|
- Platform is remembered between builds if not specified
|
||||||
- [debspawn](https://github.com/lkhq/debspawn)
|
|
||||||
- `format`: Run automatic code formatting for the project
|
### Make Targets
|
||||||
- Additional Requirements:
|
|
||||||
- [rust](https://rustup.rs/)
|
#### Building
|
||||||
- `test`: Run automated tests for the project
|
|
||||||
- Additional Requirements:
|
| Target | Description |
|
||||||
- [rust](https://rustup.rs/)
|
| ------------- | ---------------------------------------------- |
|
||||||
- `update`: Deploy the current working project to a device over ssh as if through an over-the-air update
|
| `iso` | Create full `.iso` image (not for raspberrypi) |
|
||||||
- Requires an argument `REMOTE` which is the ssh address of the device, i.e. `start9@192.168.122.2`
|
| `img` | Create full `.img` image (raspberrypi only) |
|
||||||
- `reflash`: Deploy the current working project to a device over ssh as if using a live `iso` image to reflash it
|
| `deb` | Build Debian package |
|
||||||
- Requires an argument `REMOTE` which is the ssh address of the device, i.e. `start9@192.168.122.2`
|
| `all` | Build all Rust binaries |
|
||||||
- `update-overlay`: Deploy the current working project to a device over ssh to the in-memory overlay without restarting it
|
| `uis` | Build all web UIs |
|
||||||
- WARNING: changes will be reverted after the device is rebooted
|
| `ui` | Build main UI only |
|
||||||
- WARNING: changes to `init` will not take effect as the device is already initialized
|
| `ts-bindings` | Generate TypeScript bindings from Rust types |
|
||||||
- Requires an argument `REMOTE` which is the ssh address of the device, i.e. `start9@192.168.122.2`
|
|
||||||
- `wormhole`: Deploy the `startbox` to a device using [magic-wormhole](https://github.com/magic-wormhole/magic-wormhole)
|
#### Deploying to Device
|
||||||
- When the build it complete will emit a command to paste into the shell of the device to upgrade it
|
|
||||||
- Additional Requirements:
|
For devices on the same network:
|
||||||
- [magic-wormhole](https://github.com/magic-wormhole/magic-wormhole)
|
|
||||||
- `clean`: Delete all compiled artifacts
|
| Target | Description |
|
||||||
|
| ------------------------------------ | ----------------------------------------------- |
|
||||||
|
| `update-startbox REMOTE=start9@<ip>` | Deploy binary + UI only (fastest) |
|
||||||
|
| `update-deb REMOTE=start9@<ip>` | Deploy full Debian package |
|
||||||
|
| `update REMOTE=start9@<ip>` | OTA-style update |
|
||||||
|
| `reflash REMOTE=start9@<ip>` | Reflash as if using live ISO |
|
||||||
|
| `update-overlay REMOTE=start9@<ip>` | Deploy to in-memory overlay (reverts on reboot) |
|
||||||
|
|
||||||
|
For devices on different networks (uses [magic-wormhole](https://github.com/magic-wormhole/magic-wormhole)):
|
||||||
|
|
||||||
|
| Target | Description |
|
||||||
|
| ------------------- | -------------------- |
|
||||||
|
| `wormhole` | Send startbox binary |
|
||||||
|
| `wormhole-deb` | Send Debian package |
|
||||||
|
| `wormhole-squashfs` | Send squashfs image |
|
||||||
|
|
||||||
|
### Creating a VM
|
||||||
|
|
||||||
|
Install virt-manager:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
sudo apt update
|
||||||
|
sudo apt install -y virt-manager
|
||||||
|
sudo usermod -aG libvirt $USER
|
||||||
|
sudo su $USER
|
||||||
|
virt-manager
|
||||||
|
```
|
||||||
|
|
||||||
|
Follow the screenshot walkthrough in [`assets/create-vm/`](assets/create-vm/) to create a new virtual machine. Key steps:
|
||||||
|
|
||||||
|
1. Create a new virtual machine
|
||||||
|
2. Browse for the ISO — create a storage pool pointing to your `results/` directory
|
||||||
|
3. Select "Generic or unknown OS"
|
||||||
|
4. Set memory and CPUs
|
||||||
|
5. Create a disk and name the VM
|
||||||
|
|
||||||
|
Build an ISO first:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
PLATFORM=$(uname -m) ENVIRONMENT=dev make iso
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Other
|
||||||
|
|
||||||
|
| Target | Description |
|
||||||
|
| ------------------------ | ------------------------------------------- |
|
||||||
|
| `format` | Run code formatting (Rust nightly required) |
|
||||||
|
| `test` | Run all automated tests |
|
||||||
|
| `test-core` | Run Rust tests |
|
||||||
|
| `test-sdk` | Run SDK tests |
|
||||||
|
| `test-container-runtime` | Run container runtime tests |
|
||||||
|
| `clean` | Delete all compiled artifacts |
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
```bash
|
||||||
|
make test # All tests
|
||||||
|
make test-core # Rust tests (via ./core/run-tests.sh)
|
||||||
|
make test-sdk # SDK tests
|
||||||
|
make test-container-runtime # Container runtime tests
|
||||||
|
|
||||||
|
# Run specific Rust test
|
||||||
|
cd core && cargo test <test_name> --features=test
|
||||||
|
```
|
||||||
|
|
||||||
|
## Code Formatting
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Rust (requires nightly)
|
||||||
|
make format
|
||||||
|
|
||||||
|
# TypeScript/HTML/SCSS (web)
|
||||||
|
cd web && npm run format
|
||||||
|
```
|
||||||
|
|
||||||
|
## Code Style Guidelines
|
||||||
|
|
||||||
|
### Formatting
|
||||||
|
|
||||||
|
Run the formatters before committing. Configuration is handled by `rustfmt.toml` (Rust) and prettier configs (TypeScript).
|
||||||
|
|
||||||
|
### Documentation & Comments
|
||||||
|
|
||||||
|
**Rust:**
|
||||||
|
|
||||||
|
- Add doc comments (`///`) to public APIs, structs, and non-obvious functions
|
||||||
|
- Use `//` comments sparingly for complex logic that isn't self-evident
|
||||||
|
- Prefer self-documenting code (clear naming, small functions) over comments
|
||||||
|
|
||||||
|
**TypeScript:**
|
||||||
|
|
||||||
|
- Document exported functions and complex types with JSDoc
|
||||||
|
- Keep comments focused on "why" rather than "what"
|
||||||
|
|
||||||
|
**General:**
|
||||||
|
|
||||||
|
- Don't add comments that just restate the code
|
||||||
|
- Update or remove comments when code changes
|
||||||
|
- TODOs should include context: `// TODO(username): reason`
|
||||||
|
|
||||||
|
### Commit Messages
|
||||||
|
|
||||||
|
Use [Conventional Commits](https://www.conventionalcommits.org/):
|
||||||
|
|
||||||
|
```
|
||||||
|
<type>(<scope>): <description>
|
||||||
|
|
||||||
|
[optional body]
|
||||||
|
|
||||||
|
[optional footer]
|
||||||
|
```
|
||||||
|
|
||||||
|
**Types:**
|
||||||
|
|
||||||
|
- `feat` - New feature
|
||||||
|
- `fix` - Bug fix
|
||||||
|
- `docs` - Documentation only
|
||||||
|
- `style` - Formatting, no code change
|
||||||
|
- `refactor` - Code change that neither fixes a bug nor adds a feature
|
||||||
|
- `test` - Adding or updating tests
|
||||||
|
- `chore` - Build process, dependencies, etc.
|
||||||
|
|
||||||
|
**Examples:**
|
||||||
|
|
||||||
|
```
|
||||||
|
feat(web): add dark mode toggle
|
||||||
|
fix(core): resolve race condition in service startup
|
||||||
|
docs: update CONTRIBUTING.md with style guidelines
|
||||||
|
refactor(sdk): simplify package validation logic
|
||||||
|
```
|
||||||
|
|||||||
134
DEVELOPMENT.md
@@ -1,134 +0,0 @@
|
|||||||
# Setting up your development environment on Debian/Ubuntu
|
|
||||||
|
|
||||||
A step-by-step guide
|
|
||||||
|
|
||||||
> This is the only officially supported build environment.
|
|
||||||
> MacOS has limited build capabilities and Windows requires [WSL2](https://learn.microsoft.com/en-us/windows/wsl/install)
|
|
||||||
|
|
||||||
## Installing dependencies
|
|
||||||
|
|
||||||
Run the following commands one at a time
|
|
||||||
|
|
||||||
```sh
|
|
||||||
sudo apt update
|
|
||||||
sudo apt install -y ca-certificates curl gpg build-essential
|
|
||||||
curl -fsSL https://download.docker.com/linux/debian/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
|
|
||||||
echo "deb [arch=$(dpkg-architecture -q DEB_HOST_ARCH) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian bookworm stable" | sudo tee /etc/apt/sources.list.d/docker.list
|
|
||||||
sudo apt update
|
|
||||||
sudo apt install -y sed grep gawk jq gzip brotli containerd.io docker-ce docker-ce-cli docker-compose-plugin qemu-user-static binfmt-support squashfs-tools git debspawn rsync b3sum
|
|
||||||
sudo mkdir -p /etc/debspawn/
|
|
||||||
echo "AllowUnsafePermissions=true" | sudo tee /etc/debspawn/global.toml
|
|
||||||
sudo usermod -aG docker $USER
|
|
||||||
sudo su $USER
|
|
||||||
docker run --privileged --rm tonistiigi/binfmt --install all
|
|
||||||
docker buildx create --use
|
|
||||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh # proceed with default installation
|
|
||||||
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/master/install.sh | bash
|
|
||||||
source ~/.bashrc
|
|
||||||
nvm install 24
|
|
||||||
nvm use 24
|
|
||||||
nvm alias default 24 # this prevents your machine from reverting back to another version
|
|
||||||
```
|
|
||||||
|
|
||||||
## Cloning the repository
|
|
||||||
|
|
||||||
```sh
|
|
||||||
git clone --recursive https://github.com/Start9Labs/start-os.git --branch next/major
|
|
||||||
cd start-os
|
|
||||||
```
|
|
||||||
|
|
||||||
## Building an ISO
|
|
||||||
|
|
||||||
```sh
|
|
||||||
PLATFORM=$(uname -m) ENVIRONMENT=dev make iso
|
|
||||||
```
|
|
||||||
|
|
||||||
This will build an ISO for your current architecture. If you are building to run on an architecture other than the one you are currently on, replace `$(uname -m)` with the correct platform for the device (one of `aarch64`, `aarch64-nonfree`, `x86_64`, `x86_64-nonfree`, `raspberrypi`)
|
|
||||||
|
|
||||||
## Creating a VM
|
|
||||||
|
|
||||||
### Install virt-manager
|
|
||||||
|
|
||||||
```sh
|
|
||||||
sudo apt update
|
|
||||||
sudo apt install -y virt-manager
|
|
||||||
sudo usermod -aG libvirt $USER
|
|
||||||
sudo su $USER
|
|
||||||
```
|
|
||||||
|
|
||||||
### Launch virt-manager
|
|
||||||
|
|
||||||
```sh
|
|
||||||
virt-manager
|
|
||||||
```
|
|
||||||
|
|
||||||
### Create new virtual machine
|
|
||||||
|
|
||||||

|
|
||||||

|
|
||||||

|
|
||||||

|
|
||||||
|
|
||||||
#### make sure to set "Target Path" to the path to your results directory in start-os
|
|
||||||
|
|
||||||

|
|
||||||

|
|
||||||

|
|
||||||

|
|
||||||

|
|
||||||

|
|
||||||

|
|
||||||

|
|
||||||
|
|
||||||
## Updating a VM
|
|
||||||
|
|
||||||
The fastest way to update a VM to your latest code depends on what you changed:
|
|
||||||
|
|
||||||
### UI or startd:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
PLATFORM=$(uname -m) ENVIRONMENT=dev make update-startbox REMOTE=start9@<VM IP>
|
|
||||||
```
|
|
||||||
|
|
||||||
### Container runtime or debian dependencies:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
PLATFORM=$(uname -m) ENVIRONMENT=dev make update-deb REMOTE=start9@<VM IP>
|
|
||||||
```
|
|
||||||
|
|
||||||
### Image recipe:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
PLATFORM=$(uname -m) ENVIRONMENT=dev make update-squashfs REMOTE=start9@<VM IP>
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
If the device you are building for is not available via ssh, it is also possible to use `magic-wormhole` to send the relevant files.
|
|
||||||
|
|
||||||
### Prerequisites:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
sudo apt update
|
|
||||||
sudo apt install -y magic-wormhole
|
|
||||||
```
|
|
||||||
|
|
||||||
As before, the fastest way to update a VM to your latest code depends on what you changed. Each of the following commands will return a command to paste into the shell of the device you would like to upgrade.
|
|
||||||
|
|
||||||
### UI or startd:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
PLATFORM=$(uname -m) ENVIRONMENT=dev make wormhole
|
|
||||||
```
|
|
||||||
|
|
||||||
### Container runtime or debian dependencies:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
PLATFORM=$(uname -m) ENVIRONMENT=dev make wormhole-deb
|
|
||||||
```
|
|
||||||
|
|
||||||
### Image recipe:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
PLATFORM=$(uname -m) ENVIRONMENT=dev make wormhole-squashfs
|
|
||||||
```
|
|
||||||
56
Makefile
@@ -7,22 +7,21 @@ GIT_HASH_FILE := $(shell ./build/env/check-git-hash.sh)
|
|||||||
VERSION_FILE := $(shell ./build/env/check-version.sh)
|
VERSION_FILE := $(shell ./build/env/check-version.sh)
|
||||||
BASENAME := $(shell PROJECT=startos ./build/env/basename.sh)
|
BASENAME := $(shell PROJECT=startos ./build/env/basename.sh)
|
||||||
PLATFORM := $(shell if [ -f $(PLATFORM_FILE) ]; then cat $(PLATFORM_FILE); else echo unknown; fi)
|
PLATFORM := $(shell if [ -f $(PLATFORM_FILE) ]; then cat $(PLATFORM_FILE); else echo unknown; fi)
|
||||||
ARCH := $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then echo aarch64; else echo $(PLATFORM) | sed 's/-nonfree$$//g'; fi)
|
ARCH := $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then echo aarch64; elif [ "$(PLATFORM)" = "rockchip64" ]; then echo aarch64; else echo $(PLATFORM) | sed 's/-nonfree$$//g; s/-nvidia$$//g'; fi)
|
||||||
RUST_ARCH := $(shell if [ "$(ARCH)" = "riscv64" ]; then echo riscv64gc; else echo $(ARCH); fi)
|
RUST_ARCH := $(shell if [ "$(ARCH)" = "riscv64" ]; then echo riscv64gc; else echo $(ARCH); fi)
|
||||||
REGISTRY_BASENAME := $(shell PROJECT=start-registry PLATFORM=$(ARCH) ./build/env/basename.sh)
|
REGISTRY_BASENAME := $(shell PROJECT=start-registry PLATFORM=$(ARCH) ./build/env/basename.sh)
|
||||||
TUNNEL_BASENAME := $(shell PROJECT=start-tunnel PLATFORM=$(ARCH) ./build/env/basename.sh)
|
TUNNEL_BASENAME := $(shell PROJECT=start-tunnel PLATFORM=$(ARCH) ./build/env/basename.sh)
|
||||||
IMAGE_TYPE=$(shell if [ "$(PLATFORM)" = raspberrypi ]; then echo img; else echo iso; fi)
|
IMAGE_TYPE=$(shell if [ "$(PLATFORM)" = raspberrypi ]; then echo img; else echo iso; fi)
|
||||||
WEB_UIS := web/dist/raw/ui/index.html web/dist/raw/setup-wizard/index.html web/dist/raw/install-wizard/index.html
|
WEB_UIS := web/dist/raw/ui/index.html web/dist/raw/setup-wizard/index.html
|
||||||
COMPRESSED_WEB_UIS := web/dist/static/ui/index.html web/dist/static/setup-wizard/index.html web/dist/static/install-wizard/index.html
|
COMPRESSED_WEB_UIS := web/dist/static/ui/index.html web/dist/static/setup-wizard/index.html
|
||||||
FIRMWARE_ROMS := build/lib/firmware/$(PLATFORM) $(shell jq --raw-output '.[] | select(.platform[] | contains("$(PLATFORM)")) | "./build/lib/firmware/$(PLATFORM)/" + .id + ".rom.gz"' build/lib/firmware.json)
|
FIRMWARE_ROMS := build/lib/firmware/$(PLATFORM) $(shell jq --raw-output '.[] | select(.platform[] | contains("$(PLATFORM)")) | "./build/lib/firmware/$(PLATFORM)/" + .id + ".rom.gz"' build/lib/firmware.json)
|
||||||
BUILD_SRC := $(call ls-files, build/lib) build/lib/depends build/lib/conflicts $(FIRMWARE_ROMS)
|
BUILD_SRC := $(call ls-files, build/lib) build/lib/depends build/lib/conflicts $(FIRMWARE_ROMS) build/lib/migration-images/.done
|
||||||
IMAGE_RECIPE_SRC := $(call ls-files, build/image-recipe/)
|
IMAGE_RECIPE_SRC := $(call ls-files, build/image-recipe/)
|
||||||
STARTD_SRC := core/startd.service $(BUILD_SRC)
|
STARTD_SRC := core/startd.service $(BUILD_SRC)
|
||||||
CORE_SRC := $(call ls-files, core) $(shell git ls-files --recurse-submodules patch-db) $(GIT_HASH_FILE)
|
CORE_SRC := $(call ls-files, core) $(shell git ls-files --recurse-submodules patch-db) $(GIT_HASH_FILE)
|
||||||
WEB_SHARED_SRC := $(call ls-files, web/projects/shared) $(call ls-files, web/projects/marketplace) $(shell ls -p web/ | grep -v / | sed 's/^/web\//g') web/node_modules/.package-lock.json web/config.json patch-db/client/dist/index.js sdk/baseDist/package.json web/patchdb-ui-seed.json sdk/dist/package.json
|
WEB_SHARED_SRC := $(call ls-files, web/projects/shared) $(call ls-files, web/projects/marketplace) $(shell ls -p web/ | grep -v / | sed 's/^/web\//g') web/node_modules/.package-lock.json web/config.json patch-db/client/dist/index.js sdk/baseDist/package.json web/patchdb-ui-seed.json sdk/dist/package.json
|
||||||
WEB_UI_SRC := $(call ls-files, web/projects/ui)
|
WEB_UI_SRC := $(call ls-files, web/projects/ui)
|
||||||
WEB_SETUP_WIZARD_SRC := $(call ls-files, web/projects/setup-wizard)
|
WEB_SETUP_WIZARD_SRC := $(call ls-files, web/projects/setup-wizard)
|
||||||
WEB_INSTALL_WIZARD_SRC := $(call ls-files, web/projects/install-wizard)
|
|
||||||
WEB_START_TUNNEL_SRC := $(call ls-files, web/projects/start-tunnel)
|
WEB_START_TUNNEL_SRC := $(call ls-files, web/projects/start-tunnel)
|
||||||
PATCH_DB_CLIENT_SRC := $(shell git ls-files --recurse-submodules patch-db/client)
|
PATCH_DB_CLIENT_SRC := $(shell git ls-files --recurse-submodules patch-db/client)
|
||||||
GZIP_BIN := $(shell which pigz || which gzip)
|
GZIP_BIN := $(shell which pigz || which gzip)
|
||||||
@@ -90,6 +89,7 @@ clean:
|
|||||||
rm -rf container-runtime/node_modules
|
rm -rf container-runtime/node_modules
|
||||||
rm -f container-runtime/*.squashfs
|
rm -f container-runtime/*.squashfs
|
||||||
(cd sdk && make clean)
|
(cd sdk && make clean)
|
||||||
|
rm -rf build/lib/migration-images
|
||||||
rm -f env/*.txt
|
rm -f env/*.txt
|
||||||
|
|
||||||
format:
|
format:
|
||||||
@@ -106,6 +106,10 @@ test-sdk: $(call ls-files, sdk) sdk/base/lib/osBindings/index.ts
|
|||||||
test-container-runtime: container-runtime/node_modules/.package-lock.json $(call ls-files, container-runtime/src) container-runtime/package.json container-runtime/tsconfig.json
|
test-container-runtime: container-runtime/node_modules/.package-lock.json $(call ls-files, container-runtime/src) container-runtime/package.json container-runtime/tsconfig.json
|
||||||
cd container-runtime && npm test
|
cd container-runtime && npm test
|
||||||
|
|
||||||
|
build/lib/migration-images/.done: build/save-migration-images.sh
|
||||||
|
ARCH=$(ARCH) ./build/save-migration-images.sh build/lib/migration-images
|
||||||
|
touch $@
|
||||||
|
|
||||||
install-cli: $(GIT_HASH_FILE)
|
install-cli: $(GIT_HASH_FILE)
|
||||||
./core/build/build-cli.sh --install
|
./core/build/build-cli.sh --install
|
||||||
|
|
||||||
@@ -140,6 +144,11 @@ install-tunnel: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox
|
|||||||
$(call mkdir,$(DESTDIR)/usr/lib/startos/scripts)
|
$(call mkdir,$(DESTDIR)/usr/lib/startos/scripts)
|
||||||
$(call cp,build/lib/scripts/forward-port,$(DESTDIR)/usr/lib/startos/scripts/forward-port)
|
$(call cp,build/lib/scripts/forward-port,$(DESTDIR)/usr/lib/startos/scripts/forward-port)
|
||||||
|
|
||||||
|
$(call mkdir,$(DESTDIR)/etc/apt/sources.list.d)
|
||||||
|
$(call cp,apt/start9.list,$(DESTDIR)/etc/apt/sources.list.d/start9.list)
|
||||||
|
$(call mkdir,$(DESTDIR)/usr/share/keyrings)
|
||||||
|
$(call cp,apt/start9.gpg,$(DESTDIR)/usr/share/keyrings/start9.gpg)
|
||||||
|
|
||||||
core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox: $(CORE_SRC) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) web/dist/static/start-tunnel/index.html
|
core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox: $(CORE_SRC) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) web/dist/static/start-tunnel/index.html
|
||||||
ARCH=$(ARCH) PROFILE=$(PROFILE) ./core/build/build-tunnelbox.sh
|
ARCH=$(ARCH) PROFILE=$(PROFILE) ./core/build/build-tunnelbox.sh
|
||||||
|
|
||||||
@@ -151,7 +160,7 @@ results/$(BASENAME).deb: debian/dpkg-build.sh $(call ls-files,debian/startos) $(
|
|||||||
registry-deb: results/$(REGISTRY_BASENAME).deb
|
registry-deb: results/$(REGISTRY_BASENAME).deb
|
||||||
|
|
||||||
results/$(REGISTRY_BASENAME).deb: debian/dpkg-build.sh $(call ls-files,debian/start-registry) $(REGISTRY_TARGETS)
|
results/$(REGISTRY_BASENAME).deb: debian/dpkg-build.sh $(call ls-files,debian/start-registry) $(REGISTRY_TARGETS)
|
||||||
PROJECT=start-registry PLATFORM=$(ARCH) REQUIRES=debian ./build/os-compat/run-compat.sh ./debian/dpkg-build.sh
|
PROJECT=start-registry PLATFORM=$(ARCH) REQUIRES=debian DEPENDS=ca-certificates ./build/os-compat/run-compat.sh ./debian/dpkg-build.sh
|
||||||
|
|
||||||
tunnel-deb: results/$(TUNNEL_BASENAME).deb
|
tunnel-deb: results/$(TUNNEL_BASENAME).deb
|
||||||
|
|
||||||
@@ -184,6 +193,9 @@ install: $(STARTOS_TARGETS)
|
|||||||
|
|
||||||
$(call mkdir,$(DESTDIR)/lib/systemd/system)
|
$(call mkdir,$(DESTDIR)/lib/systemd/system)
|
||||||
$(call cp,core/startd.service,$(DESTDIR)/lib/systemd/system/startd.service)
|
$(call cp,core/startd.service,$(DESTDIR)/lib/systemd/system/startd.service)
|
||||||
|
if /bin/bash -c '[[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]'; then \
|
||||||
|
sed -i '/^Environment=/a Environment=RUST_BACKTRACE=full' $(DESTDIR)/lib/systemd/system/startd.service; \
|
||||||
|
fi
|
||||||
|
|
||||||
$(call mkdir,$(DESTDIR)/usr/lib)
|
$(call mkdir,$(DESTDIR)/usr/lib)
|
||||||
$(call rm,$(DESTDIR)/usr/lib/startos)
|
$(call rm,$(DESTDIR)/usr/lib/startos)
|
||||||
@@ -237,16 +249,16 @@ update-startbox: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox
|
|||||||
update-deb: results/$(BASENAME).deb # better than update, but only available from debian
|
update-deb: results/$(BASENAME).deb # better than update, but only available from debian
|
||||||
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
|
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
|
||||||
$(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create')
|
$(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create')
|
||||||
$(call mkdir,/media/startos/next/tmp/startos-deb)
|
$(call mkdir,/media/startos/next/var/tmp/startos-deb)
|
||||||
$(call cp,results/$(BASENAME).deb,/media/startos/next/tmp/startos-deb/$(BASENAME).deb)
|
$(call cp,results/$(BASENAME).deb,/media/startos/next/var/tmp/startos-deb/$(BASENAME).deb)
|
||||||
$(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync "apt-get install -y --reinstall /tmp/startos-deb/$(BASENAME).deb"')
|
$(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync "apt-get install -y --reinstall /var/tmp/startos-deb/$(BASENAME).deb"')
|
||||||
|
|
||||||
update-squashfs: results/$(BASENAME).squashfs
|
update-squashfs: results/$(BASENAME).squashfs
|
||||||
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
|
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
|
||||||
$(eval SQFS_SUM := $(shell b3sum results/$(BASENAME).squashfs))
|
$(eval SQFS_SUM := $(shell b3sum results/$(BASENAME).squashfs | head -c 32))
|
||||||
$(eval SQFS_SIZE := $(shell du -s --bytes results/$(BASENAME).squashfs | awk '{print $$1}'))
|
$(eval SQFS_SIZE := $(shell du -s --bytes results/$(BASENAME).squashfs | awk '{print $$1}'))
|
||||||
$(call ssh,'/usr/lib/startos/scripts/prune-images $(SQFS_SIZE)')
|
$(call ssh,'sudo /usr/lib/startos/scripts/prune-images $(SQFS_SIZE)')
|
||||||
$(call ssh,'/usr/lib/startos/scripts/prune-boot')
|
$(call ssh,'sudo /usr/lib/startos/scripts/prune-boot')
|
||||||
$(call cp,results/$(BASENAME).squashfs,/media/startos/images/next.rootfs)
|
$(call cp,results/$(BASENAME).squashfs,/media/startos/images/next.rootfs)
|
||||||
$(call ssh,'sudo CHECKSUM=$(SQFS_SUM) /usr/lib/startos/scripts/upgrade /media/startos/images/next.rootfs')
|
$(call ssh,'sudo CHECKSUM=$(SQFS_SUM) /usr/lib/startos/scripts/upgrade /media/startos/images/next.rootfs')
|
||||||
|
|
||||||
@@ -279,7 +291,11 @@ core/bindings/index.ts: $(call ls-files, core) $(ENVIRONMENT_FILE)
|
|||||||
rm -rf core/bindings
|
rm -rf core/bindings
|
||||||
./core/build/build-ts.sh
|
./core/build/build-ts.sh
|
||||||
ls core/bindings/*.ts | sed 's/core\/bindings\/\([^.]*\)\.ts/export { \1 } from ".\/\1";/g' | grep -v '"./index"' | tee core/bindings/index.ts
|
ls core/bindings/*.ts | sed 's/core\/bindings\/\([^.]*\)\.ts/export { \1 } from ".\/\1";/g' | grep -v '"./index"' | tee core/bindings/index.ts
|
||||||
npm --prefix sdk exec -- prettier --config ./sdk/base/package.json -w ./core/bindings/*.ts
|
if [ -d core/bindings/tunnel ]; then \
|
||||||
|
ls core/bindings/tunnel/*.ts | sed 's/core\/bindings\/tunnel\/\([^.]*\)\.ts/export { \1 } from ".\/\1";/g' | grep -v '"./index"' > core/bindings/tunnel/index.ts; \
|
||||||
|
echo 'export * as Tunnel from "./tunnel";' >> core/bindings/index.ts; \
|
||||||
|
fi
|
||||||
|
npm --prefix sdk/base exec -- prettier --config=./sdk/base/package.json -w './core/bindings/**/*.ts'
|
||||||
touch core/bindings/index.ts
|
touch core/bindings/index.ts
|
||||||
|
|
||||||
sdk/dist/package.json sdk/baseDist/package.json: $(call ls-files, sdk) sdk/base/lib/osBindings/index.ts
|
sdk/dist/package.json sdk/baseDist/package.json: $(call ls-files, sdk) sdk/base/lib/osBindings/index.ts
|
||||||
@@ -325,19 +341,19 @@ web/.angular/.updated: patch-db/client/dist/index.js sdk/baseDist/package.json w
|
|||||||
mkdir -p web/.angular
|
mkdir -p web/.angular
|
||||||
touch web/.angular/.updated
|
touch web/.angular/.updated
|
||||||
|
|
||||||
web/dist/raw/ui/index.html: $(WEB_UI_SRC) $(WEB_SHARED_SRC) web/.angular/.updated
|
web/.i18n-checked: $(WEB_SHARED_SRC) $(WEB_UI_SRC) $(WEB_SETUP_WIZARD_SRC) $(WEB_START_TUNNEL_SRC)
|
||||||
|
npm --prefix web run check:i18n
|
||||||
|
touch web/.i18n-checked
|
||||||
|
|
||||||
|
web/dist/raw/ui/index.html: $(WEB_UI_SRC) $(WEB_SHARED_SRC) web/.angular/.updated web/.i18n-checked
|
||||||
npm --prefix web run build:ui
|
npm --prefix web run build:ui
|
||||||
touch web/dist/raw/ui/index.html
|
touch web/dist/raw/ui/index.html
|
||||||
|
|
||||||
web/dist/raw/setup-wizard/index.html: $(WEB_SETUP_WIZARD_SRC) $(WEB_SHARED_SRC) web/.angular/.updated
|
web/dist/raw/setup-wizard/index.html: $(WEB_SETUP_WIZARD_SRC) $(WEB_SHARED_SRC) web/.angular/.updated web/.i18n-checked
|
||||||
npm --prefix web run build:setup
|
npm --prefix web run build:setup
|
||||||
touch web/dist/raw/setup-wizard/index.html
|
touch web/dist/raw/setup-wizard/index.html
|
||||||
|
|
||||||
web/dist/raw/install-wizard/index.html: $(WEB_INSTALL_WIZARD_SRC) $(WEB_SHARED_SRC) web/.angular/.updated
|
web/dist/raw/start-tunnel/index.html: $(WEB_START_TUNNEL_SRC) $(WEB_SHARED_SRC) web/.angular/.updated web/.i18n-checked
|
||||||
npm --prefix web run build:install
|
|
||||||
touch web/dist/raw/install-wizard/index.html
|
|
||||||
|
|
||||||
web/dist/raw/start-tunnel/index.html: $(WEB_START_TUNNEL_SRC) $(WEB_SHARED_SRC) web/.angular/.updated
|
|
||||||
npm --prefix web run build:tunnel
|
npm --prefix web run build:tunnel
|
||||||
touch web/dist/raw/start-tunnel/index.html
|
touch web/dist/raw/start-tunnel/index.html
|
||||||
|
|
||||||
|
|||||||
88
README.md
@@ -7,76 +7,64 @@
|
|||||||
<a href="https://github.com/Start9Labs/start-os/actions/workflows/startos-iso.yaml">
|
<a href="https://github.com/Start9Labs/start-os/actions/workflows/startos-iso.yaml">
|
||||||
<img src="https://github.com/Start9Labs/start-os/actions/workflows/startos-iso.yaml/badge.svg">
|
<img src="https://github.com/Start9Labs/start-os/actions/workflows/startos-iso.yaml/badge.svg">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://heyapollo.com/product/startos">
|
<a href="https://heyapollo.com/product/startos">
|
||||||
<img alt="Static Badge" src="https://img.shields.io/badge/apollo-review%20%E2%AD%90%E2%AD%90%E2%AD%90%E2%AD%90%E2%AD%90%20-slateblue">
|
<img alt="Static Badge" src="https://img.shields.io/badge/apollo-review%20%E2%AD%90%E2%AD%90%E2%AD%90%E2%AD%90%E2%AD%90%20-slateblue">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://twitter.com/start9labs">
|
<a href="https://twitter.com/start9labs">
|
||||||
<img alt="X (formerly Twitter) Follow" src="https://img.shields.io/twitter/follow/start9labs">
|
<img alt="X (formerly Twitter) Follow" src="https://img.shields.io/twitter/follow/start9labs">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://matrix.to/#/#community:matrix.start9labs.com">
|
|
||||||
<img alt="Static Badge" src="https://img.shields.io/badge/community-matrix-yellow?logo=matrix">
|
|
||||||
</a>
|
|
||||||
<a href="https://t.me/start9_labs">
|
|
||||||
<img alt="Static Badge" src="https://img.shields.io/badge/community-telegram-blue?logo=telegram">
|
|
||||||
</a>
|
|
||||||
<a href="https://docs.start9.com">
|
<a href="https://docs.start9.com">
|
||||||
<img alt="Static Badge" src="https://img.shields.io/badge/docs-orange?label=%F0%9F%91%A4%20support">
|
<img alt="Static Badge" src="https://img.shields.io/badge/docs-orange?label=%F0%9F%91%A4%20support">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://matrix.to/#/#community-dev:matrix.start9labs.com">
|
<a href="https://matrix.to/#/#dev-startos:matrix.start9labs.com">
|
||||||
<img alt="Static Badge" src="https://img.shields.io/badge/developer-matrix-darkcyan?logo=matrix">
|
<img alt="Static Badge" src="https://img.shields.io/badge/developer-matrix-darkcyan?logo=matrix">
|
||||||
</a>
|
</a>
|
||||||
<a href="https://start9.com">
|
<a href="https://start9.com">
|
||||||
<img alt="Website" src="https://img.shields.io/website?up_message=online&down_message=offline&url=https%3A%2F%2Fstart9.com&logo=website&label=%F0%9F%8C%90%20website">
|
<img alt="Website" src="https://img.shields.io/website?up_message=online&down_message=offline&url=https%3A%2F%2Fstart9.com&logo=website&label=%F0%9F%8C%90%20website">
|
||||||
</a>
|
</a>
|
||||||
</div>
|
</div>
|
||||||
<br />
|
|
||||||
<div align="center">
|
|
||||||
<h3>
|
|
||||||
Welcome to the era of Sovereign Computing
|
|
||||||
</h3>
|
|
||||||
<p>
|
|
||||||
StartOS is an open source Linux distribution optimized for running a personal server. It facilitates the discovery, installation, network configuration, service configuration, data backup, dependency management, and health monitoring of self-hosted software services.
|
|
||||||
</p>
|
|
||||||
</div>
|
|
||||||
<br />
|
|
||||||
<p align="center">
|
|
||||||
<img src="assets/StartOS.png" alt="StartOS" width="85%">
|
|
||||||
</p>
|
|
||||||
<br />
|
|
||||||
|
|
||||||
## Running StartOS
|
## What is StartOS?
|
||||||
> [!WARNING]
|
|
||||||
> StartOS is in beta. It lacks features. It doesn't always work perfectly. Start9 servers are not plug and play. Using them properly requires some effort and patience. Please do not use StartOS or purchase a server if you are unable or unwilling to follow instructions and learn new concepts.
|
|
||||||
|
|
||||||
### 💰 Buy a Start9 server
|
StartOS is an open-source Linux distribution for running a personal server. It handles discovery, installation, network configuration, data backup, dependency management, and health monitoring of self-hosted services.
|
||||||
This is the most convenient option. Simply [buy a server](https://store.start9.com) from Start9 and plug it in.
|
|
||||||
|
|
||||||
### 👷 Build your own server
|
**Tech stack:** Rust backend (Tokio/Axum), Angular frontend, Node.js container runtime with LXC, and a custom diff-based database ([Patch-DB](https://github.com/Start9Labs/patch-db)) for reactive state synchronization.
|
||||||
This option is easier than you might imagine, and there are 4 reasons why you might prefer it:
|
|
||||||
1. You already have hardware
|
|
||||||
1. You want to save on shipping costs
|
|
||||||
1. You prefer not to divulge your physical address
|
|
||||||
1. You just like building things
|
|
||||||
|
|
||||||
To pursue this option, follow one of our [DIY guides](https://start9.com/latest/diy).
|
Services run in isolated LXC containers, packaged as [S9PKs](https://github.com/Start9Labs/start-os/blob/master/core/s9pk-structure.md) — a signed, merkle-archived format that supports partial downloads and cryptographic verification.
|
||||||
|
|
||||||
## ❤️ Contributing
|
## What can you do with it?
|
||||||
There are multiple ways to contribute: work directly on StartOS, package a service for the marketplace, or help with documentation and guides. To learn more about contributing, see [here](https://start9.com/contribute/).
|
|
||||||
|
|
||||||
To report security issues, please email our security team - security@start9.com.
|
StartOS lets you self-host services that would otherwise depend on third-party cloud providers — giving you full ownership of your data and infrastructure.
|
||||||
|
|
||||||
## 🌎 Marketplace
|
Browse available services on the [Start9 Marketplace](https://marketplace.start9.com/), including:
|
||||||
There are dozens of services available for StartOS, and new ones are being added all the time. Check out the full list of available services [here](https://marketplace.start9.com/marketplace). To read more about the Marketplace ecosystem, check out this [blog post](https://blog.start9.com/start9-marketplace-strategy/)
|
|
||||||
|
|
||||||
## 🖥️ User Interface Screenshots
|
- **Bitcoin & Lightning** — Run a full Bitcoin node, Lightning node, BTCPay Server, and other payment infrastructure
|
||||||
|
- **Communication** — Self-host Matrix, SimpleX, or other messaging platforms
|
||||||
|
- **Cloud Storage** — Run Nextcloud, Vaultwarden, and other productivity tools
|
||||||
|
|
||||||
<p align="center">
|
Services are added by the community. If a service you want isn't available, you can [package it yourself](https://github.com/Start9Labs/ai-service-packaging/).
|
||||||
<img src="assets/registry.png" alt="StartOS Marketplace" width="49%">
|
|
||||||
<img src="assets/community.png" alt="StartOS Community Registry" width="49%">
|
## Getting StartOS
|
||||||
<img src="assets/c-lightning.png" alt="StartOS NextCloud Service" width="49%">
|
|
||||||
<img src="assets/btcpay.png" alt="StartOS BTCPay Service" width="49%">
|
### Buy a Start9 server
|
||||||
<img src="assets/nextcloud.png" alt="StartOS System Settings" width="49%">
|
|
||||||
<img src="assets/system.png" alt="StartOS System Settings" width="49%">
|
The easiest path. [Buy a server](https://store.start9.com) from Start9 and plug it in.
|
||||||
<img src="assets/welcome.png" alt="StartOS System Settings" width="49%">
|
|
||||||
<img src="assets/logs.png" alt="StartOS System Settings" width="49%">
|
### Build your own
|
||||||
</p>
|
|
||||||
|
Follow the [install guide](https://docs.start9.com/start-os/installing.html) to install StartOS on your own hardware. . Reasons to go this route:
|
||||||
|
|
||||||
|
1. You already have compatible hardware
|
||||||
|
2. You want to save on shipping costs
|
||||||
|
3. You prefer not to share your physical address
|
||||||
|
4. You enjoy building things
|
||||||
|
|
||||||
|
### Build from source
|
||||||
|
|
||||||
|
See [CONTRIBUTING.md](CONTRIBUTING.md) for environment setup, build instructions, and development workflow.
|
||||||
|
|
||||||
|
## Contributing
|
||||||
|
|
||||||
|
There are multiple ways to contribute: work directly on StartOS, package a service for the marketplace, or help with documentation and guides. See [CONTRIBUTING.md](CONTRIBUTING.md) or visit [start9.com/contribute](https://start9.com/contribute/).
|
||||||
|
|
||||||
|
To report security issues, email [security@start9.com](mailto:security@start9.com).
|
||||||
|
|||||||
@@ -1,95 +0,0 @@
|
|||||||
# StartTunnel
|
|
||||||
|
|
||||||
A self-hosted WireGuard VPN optimized for creating VLANs and reverse tunneling to personal servers.
|
|
||||||
|
|
||||||
You can think of StartTunnel as "virtual router in the cloud".
|
|
||||||
|
|
||||||
Use it for private remote access to self-hosted services running on a personal server, or to expose self-hosted services to the public Internet without revealing the host server's IP address.
|
|
||||||
|
|
||||||
## Features
|
|
||||||
|
|
||||||
- **Create Subnets**: Each subnet creates a private, virtual local area network (VLAN), similar to the LAN created by a home router.
|
|
||||||
|
|
||||||
- **Add Devices**: When you add a device (server, phone, laptop) to a subnet, it receives a LAN IP address on that subnet as well as a unique WireGuard config that must be copied, downloaded, or scanned into the device.
|
|
||||||
|
|
||||||
- **Forward Ports**: Forwarding a port creates a "reverse tunnel", exposing a specific port on a specific device to the public Internet.
|
|
||||||
|
|
||||||
## Installation
|
|
||||||
|
|
||||||
1. Rent a low cost VPS. For most use cases, the cheapest option should be enough.
|
|
||||||
|
|
||||||
- It must have a dedicated public IP address.
|
|
||||||
- For compute (CPU), memory (RAM), and storage (disk), choose the minimum spec.
|
|
||||||
- For transfer (bandwidth), it depends on (1) your use case and (2) your home Internet's _upload_ speed. Even if you intend to serve large files or stream content from your server, there is no reason to pay for speeds that exceed your home Internet's upload speed.
|
|
||||||
|
|
||||||
1. Provision the VPS with the latest version of Debian.
|
|
||||||
|
|
||||||
1. Access the VPS via SSH.
|
|
||||||
|
|
||||||
1. Run the StartTunnel install script:
|
|
||||||
|
|
||||||
curl -fsSL https://start9labs.github.io/start-tunnel | sh
|
|
||||||
|
|
||||||
1. [Initialize the web interface](#web-interface) (recommended)
|
|
||||||
|
|
||||||
## Updating
|
|
||||||
|
|
||||||
Simply re-run the install command:
|
|
||||||
|
|
||||||
```sh
|
|
||||||
curl -fsSL https://start9labs.github.io/start-tunnel | sh
|
|
||||||
```
|
|
||||||
|
|
||||||
## CLI
|
|
||||||
|
|
||||||
By default, StartTunnel is managed via the `start-tunnel` command line interface, which is self-documented.
|
|
||||||
|
|
||||||
```
|
|
||||||
start-tunnel --help
|
|
||||||
```
|
|
||||||
|
|
||||||
## Web Interface
|
|
||||||
|
|
||||||
Enable the web interface (recommended in most cases) to access your StartTunnel from the browser or via API.
|
|
||||||
|
|
||||||
1. Initialize the web interface.
|
|
||||||
|
|
||||||
start-tunnel web init
|
|
||||||
|
|
||||||
1. If your VPS has multiple public IP addresses, you will be prompted to select the IP address at which to host the web interface.
|
|
||||||
|
|
||||||
1. When prompted, enter the port at which to host the web interface. The default is 8443, and we recommend using it. If you change the default, choose an uncommon port to avoid future conflicts.
|
|
||||||
|
|
||||||
1. To access your StartTunnel web interface securely over HTTPS, you need an SSL certificate. When prompted, select whether to autogenerate a certificate or provide your own. _This is only for accessing your StartTunnel web interface_.
|
|
||||||
|
|
||||||
1. You will receive a success message with 3 pieces of information:
|
|
||||||
|
|
||||||
- **<https://IP:port>**: the URL where you can reach your personal web interface.
|
|
||||||
- **Password**: an autogenerated password for your interface. If you lose/forget it, you can reset it using the start-tunnel CLI.
|
|
||||||
- **Root Certificate Authority**: the Root CA of your StartTunnel instance.
|
|
||||||
|
|
||||||
1. If you autogenerated your SSL certificate, visiting the `https://IP:port` URL in the browser will warn you that the website is insecure. This is expected. You have two options for getting past this warning:
|
|
||||||
- option 1 (recommended): [Trust your StartTunnel Root CA on your connecting device](#trusting-your-starttunnel-root-ca).
|
|
||||||
- Option 2: bypass the warning in the browser, creating a one-time security exception.
|
|
||||||
|
|
||||||
### Trusting your StartTunnel Root CA
|
|
||||||
|
|
||||||
1. Copy the contents of your Root CA (starting with -----BEGIN CERTIFICATE----- and ending with -----END CERTIFICATE-----).
|
|
||||||
|
|
||||||
2. Open a text editor:
|
|
||||||
|
|
||||||
- Linux: gedit, nano, or any editor
|
|
||||||
- Mac: TextEdit
|
|
||||||
- Windows: Notepad
|
|
||||||
|
|
||||||
3. Paste the contents of your Root CA.
|
|
||||||
|
|
||||||
4. Save the file with a `.crt` extension (e.g. `start-tunnel.crt`) (make sure it saves as plain text, not rich text).
|
|
||||||
|
|
||||||
5. Trust the Root CA on your client device(s):
|
|
||||||
|
|
||||||
- [Linux](https://staging.docs.start9.com/device-guides/linux/ca.html)
|
|
||||||
- [Mac](https://staging.docs.start9.com/device-guides/mac/ca.html)
|
|
||||||
- [Windows](https://staging.docs.start9.com/device-guides/windows/ca.html)
|
|
||||||
- [Android/Graphene](https://staging.docs.start9.com/device-guides/android/ca.html)
|
|
||||||
- [iOS](https://staging.docs.start9.com/device-guides/ios/ca.html)
|
|
||||||
BIN
apt/start9.gpg
Normal file
1
apt/start9.list
Normal file
@@ -0,0 +1 @@
|
|||||||
|
deb [arch=amd64,arm64,riscv64 signed-by=/usr/share/keyrings/start9.gpg] https://start9-debs.nyc3.cdn.digitaloceanspaces.com stable main
|
||||||
|
Before Width: | Height: | Size: 2.1 MiB |
|
Before Width: | Height: | Size: 396 KiB |
|
Before Width: | Height: | Size: 402 KiB |
|
Before Width: | Height: | Size: 591 KiB |
BIN
assets/logs.png
|
Before Width: | Height: | Size: 1.6 MiB |
|
Before Width: | Height: | Size: 319 KiB |
|
Before Width: | Height: | Size: 521 KiB |
|
Before Width: | Height: | Size: 331 KiB |
|
Before Width: | Height: | Size: 402 KiB |
0
build/README.md
Normal file
139
build/apt/publish-deb.sh
Executable file
@@ -0,0 +1,139 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Publish .deb files to an S3-hosted apt repository.
|
||||||
|
#
|
||||||
|
# Usage: publish-deb.sh <deb-file-or-directory> [<deb-file-or-directory> ...]
|
||||||
|
#
|
||||||
|
# Environment variables:
|
||||||
|
# GPG_PRIVATE_KEY - Armored GPG private key (imported if set)
|
||||||
|
# GPG_KEY_ID - GPG key ID for signing
|
||||||
|
# S3_ACCESS_KEY - S3 access key
|
||||||
|
# S3_SECRET_KEY - S3 secret key
|
||||||
|
# S3_ENDPOINT - S3 endpoint (default: https://nyc3.digitaloceanspaces.com)
|
||||||
|
# S3_BUCKET - S3 bucket name (default: start9-debs)
|
||||||
|
# SUITE - Apt suite name (default: stable)
|
||||||
|
# COMPONENT - Apt component name (default: main)
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [ $# -eq 0 ]; then
|
||||||
|
echo "Usage: $0 <deb-file-or-directory> [...]" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
BUCKET="${S3_BUCKET:-start9-debs}"
|
||||||
|
ENDPOINT="${S3_ENDPOINT:-https://nyc3.digitaloceanspaces.com}"
|
||||||
|
GPG_KEY_ID="${GPG_KEY_ID:-5259ADFC2D63C217}"
|
||||||
|
SUITE="${SUITE:-stable}"
|
||||||
|
COMPONENT="${COMPONENT:-main}"
|
||||||
|
REPO_DIR="$(mktemp -d)"
|
||||||
|
|
||||||
|
cleanup() {
|
||||||
|
rm -rf "$REPO_DIR"
|
||||||
|
}
|
||||||
|
trap cleanup EXIT
|
||||||
|
|
||||||
|
# Import GPG key if provided
|
||||||
|
if [ -n "$GPG_PRIVATE_KEY" ]; then
|
||||||
|
echo "$GPG_PRIVATE_KEY" | gpg --batch --import 2>/dev/null
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Configure s3cmd
|
||||||
|
if [ -n "$S3_ACCESS_KEY" ] && [ -n "$S3_SECRET_KEY" ]; then
|
||||||
|
S3CMD_CONFIG="$(mktemp)"
|
||||||
|
cat > "$S3CMD_CONFIG" <<EOF
|
||||||
|
[default]
|
||||||
|
access_key = ${S3_ACCESS_KEY}
|
||||||
|
secret_key = ${S3_SECRET_KEY}
|
||||||
|
host_base = $(echo "$ENDPOINT" | sed 's|https://||')
|
||||||
|
host_bucket = %(bucket)s.$(echo "$ENDPOINT" | sed 's|https://||')
|
||||||
|
use_https = True
|
||||||
|
EOF
|
||||||
|
s3() {
|
||||||
|
s3cmd -c "$S3CMD_CONFIG" "$@"
|
||||||
|
}
|
||||||
|
else
|
||||||
|
# Fall back to default ~/.s3cfg
|
||||||
|
S3CMD_CONFIG=""
|
||||||
|
s3() {
|
||||||
|
s3cmd "$@"
|
||||||
|
}
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Sync existing repo from S3
|
||||||
|
echo "Syncing existing repo from s3://${BUCKET}/ ..."
|
||||||
|
s3 sync --no-mime-magic "s3://${BUCKET}/" "$REPO_DIR/" 2>/dev/null || true
|
||||||
|
|
||||||
|
# Collect all .deb files from arguments
|
||||||
|
DEB_FILES=()
|
||||||
|
for arg in "$@"; do
|
||||||
|
if [ -d "$arg" ]; then
|
||||||
|
while IFS= read -r -d '' f; do
|
||||||
|
DEB_FILES+=("$f")
|
||||||
|
done < <(find "$arg" -name '*.deb' -print0)
|
||||||
|
elif [ -f "$arg" ]; then
|
||||||
|
DEB_FILES+=("$arg")
|
||||||
|
else
|
||||||
|
echo "Warning: $arg is not a file or directory, skipping" >&2
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ ${#DEB_FILES[@]} -eq 0 ]; then
|
||||||
|
echo "No .deb files found" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Copy each deb to the pool, renaming to standard format
|
||||||
|
for deb in "${DEB_FILES[@]}"; do
|
||||||
|
PKG_NAME="$(dpkg-deb --field "$deb" Package)"
|
||||||
|
POOL_DIR="$REPO_DIR/pool/${COMPONENT}/${PKG_NAME:0:1}/${PKG_NAME}"
|
||||||
|
mkdir -p "$POOL_DIR"
|
||||||
|
cp "$deb" "$POOL_DIR/"
|
||||||
|
dpkg-name -o "$POOL_DIR/$(basename "$deb")" 2>/dev/null || true
|
||||||
|
echo "Added: $(basename "$deb") -> pool/${COMPONENT}/${PKG_NAME:0:1}/${PKG_NAME}/"
|
||||||
|
done
|
||||||
|
|
||||||
|
# Generate Packages indices for each architecture
|
||||||
|
for arch in amd64 arm64 riscv64; do
|
||||||
|
BINARY_DIR="$REPO_DIR/dists/${SUITE}/${COMPONENT}/binary-${arch}"
|
||||||
|
mkdir -p "$BINARY_DIR"
|
||||||
|
(
|
||||||
|
cd "$REPO_DIR"
|
||||||
|
dpkg-scanpackages --multiversion --arch "$arch" pool/ > "$BINARY_DIR/Packages"
|
||||||
|
gzip -k -f "$BINARY_DIR/Packages"
|
||||||
|
)
|
||||||
|
echo "Generated Packages index for ${arch}"
|
||||||
|
done
|
||||||
|
|
||||||
|
# Generate Release file
|
||||||
|
(
|
||||||
|
cd "$REPO_DIR/dists/${SUITE}"
|
||||||
|
apt-ftparchive release \
|
||||||
|
-o "APT::FTPArchive::Release::Origin=Start9" \
|
||||||
|
-o "APT::FTPArchive::Release::Label=Start9" \
|
||||||
|
-o "APT::FTPArchive::Release::Suite=${SUITE}" \
|
||||||
|
-o "APT::FTPArchive::Release::Codename=${SUITE}" \
|
||||||
|
-o "APT::FTPArchive::Release::Architectures=amd64 arm64 riscv64" \
|
||||||
|
-o "APT::FTPArchive::Release::Components=${COMPONENT}" \
|
||||||
|
. > Release
|
||||||
|
)
|
||||||
|
echo "Generated Release file"
|
||||||
|
|
||||||
|
# Sign if GPG key is available
|
||||||
|
if [ -n "$GPG_KEY_ID" ]; then
|
||||||
|
(
|
||||||
|
cd "$REPO_DIR/dists/${SUITE}"
|
||||||
|
gpg --default-key "$GPG_KEY_ID" --batch --yes --detach-sign -o Release.gpg Release
|
||||||
|
gpg --default-key "$GPG_KEY_ID" --batch --yes --clearsign -o InRelease Release
|
||||||
|
)
|
||||||
|
echo "Signed Release file with key ${GPG_KEY_ID}"
|
||||||
|
else
|
||||||
|
echo "Warning: GPG_KEY_ID not set, Release file is unsigned" >&2
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Upload to S3
|
||||||
|
echo "Uploading to s3://${BUCKET}/ ..."
|
||||||
|
s3 sync --acl-public --no-mime-magic "$REPO_DIR/" "s3://${BUCKET}/"
|
||||||
|
|
||||||
|
[ -n "$S3CMD_CONFIG" ] && rm -f "$S3CMD_CONFIG"
|
||||||
|
echo "Done."
|
||||||
@@ -3,6 +3,7 @@ avahi-utils
|
|||||||
b3sum
|
b3sum
|
||||||
bash-completion
|
bash-completion
|
||||||
beep
|
beep
|
||||||
|
binfmt-support
|
||||||
bmon
|
bmon
|
||||||
btrfs-progs
|
btrfs-progs
|
||||||
ca-certificates
|
ca-certificates
|
||||||
@@ -10,11 +11,13 @@ cifs-utils
|
|||||||
conntrack
|
conntrack
|
||||||
cryptsetup
|
cryptsetup
|
||||||
curl
|
curl
|
||||||
|
dkms
|
||||||
dmidecode
|
dmidecode
|
||||||
dnsutils
|
dnsutils
|
||||||
dosfstools
|
dosfstools
|
||||||
e2fsprogs
|
e2fsprogs
|
||||||
ecryptfs-utils
|
ecryptfs-utils
|
||||||
|
equivs
|
||||||
exfatprogs
|
exfatprogs
|
||||||
flashrom
|
flashrom
|
||||||
fuse3
|
fuse3
|
||||||
@@ -34,6 +37,7 @@ lvm2
|
|||||||
lxc
|
lxc
|
||||||
magic-wormhole
|
magic-wormhole
|
||||||
man-db
|
man-db
|
||||||
|
mokutil
|
||||||
ncdu
|
ncdu
|
||||||
net-tools
|
net-tools
|
||||||
network-manager
|
network-manager
|
||||||
@@ -44,6 +48,7 @@ openssh-server
|
|||||||
podman
|
podman
|
||||||
psmisc
|
psmisc
|
||||||
qemu-guest-agent
|
qemu-guest-agent
|
||||||
|
qemu-user-static
|
||||||
rfkill
|
rfkill
|
||||||
rsync
|
rsync
|
||||||
samba-common-bin
|
samba-common-bin
|
||||||
@@ -52,6 +57,7 @@ socat
|
|||||||
sqlite3
|
sqlite3
|
||||||
squashfs-tools
|
squashfs-tools
|
||||||
squashfs-tools-ng
|
squashfs-tools-ng
|
||||||
|
ssl-cert
|
||||||
sudo
|
sudo
|
||||||
systemd
|
systemd
|
||||||
systemd-resolved
|
systemd-resolved
|
||||||
|
|||||||
1
build/dpkg-deps/dev.depends
Normal file
@@ -0,0 +1 @@
|
|||||||
|
+ nmap
|
||||||
@@ -9,6 +9,13 @@ FEATURES+=("${ARCH}")
|
|||||||
if [ "$ARCH" != "$PLATFORM" ]; then
|
if [ "$ARCH" != "$PLATFORM" ]; then
|
||||||
FEATURES+=("${PLATFORM}")
|
FEATURES+=("${PLATFORM}")
|
||||||
fi
|
fi
|
||||||
|
if [[ "$PLATFORM" =~ -nonfree$ ]]; then
|
||||||
|
FEATURES+=("nonfree")
|
||||||
|
fi
|
||||||
|
if [[ "$PLATFORM" =~ -nvidia$ ]]; then
|
||||||
|
FEATURES+=("nonfree")
|
||||||
|
FEATURES+=("nvidia")
|
||||||
|
fi
|
||||||
|
|
||||||
feature_file_checker='
|
feature_file_checker='
|
||||||
/^#/ { next }
|
/^#/ { next }
|
||||||
|
|||||||
7
build/dpkg-deps/nonfree.depends
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
+ firmware-amd-graphics
|
||||||
|
+ firmware-atheros
|
||||||
|
+ firmware-brcm80211
|
||||||
|
+ firmware-iwlwifi
|
||||||
|
+ firmware-libertas
|
||||||
|
+ firmware-misc-nonfree
|
||||||
|
+ firmware-realtek
|
||||||
1
build/dpkg-deps/nvidia.depends
Normal file
@@ -0,0 +1 @@
|
|||||||
|
+ nvidia-container-toolkit
|
||||||
@@ -1,5 +1,6 @@
|
|||||||
- grub-efi
|
+ gdisk
|
||||||
+ parted
|
+ parted
|
||||||
|
+ u-boot-rpi
|
||||||
+ raspberrypi-net-mods
|
+ raspberrypi-net-mods
|
||||||
+ raspberrypi-sys-mods
|
+ raspberrypi-sys-mods
|
||||||
+ raspi-config
|
+ raspi-config
|
||||||
|
|||||||
@@ -23,6 +23,8 @@ RUN apt-get update && \
|
|||||||
squashfs-tools \
|
squashfs-tools \
|
||||||
rsync \
|
rsync \
|
||||||
b3sum \
|
b3sum \
|
||||||
|
btrfs-progs \
|
||||||
|
gdisk \
|
||||||
dpkg-dev
|
dpkg-dev
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,6 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
MAX_IMG_LEN=$((4 * 1024 * 1024 * 1024)) # 4GB
|
|
||||||
|
|
||||||
echo "==== StartOS Image Build ===="
|
echo "==== StartOS Image Build ===="
|
||||||
|
|
||||||
@@ -34,14 +33,14 @@ fi
|
|||||||
IMAGE_BASENAME=startos-${VERSION_FULL}_${IB_TARGET_PLATFORM}
|
IMAGE_BASENAME=startos-${VERSION_FULL}_${IB_TARGET_PLATFORM}
|
||||||
|
|
||||||
BOOTLOADERS=grub-efi
|
BOOTLOADERS=grub-efi
|
||||||
if [ "$IB_TARGET_PLATFORM" = "x86_64" ] || [ "$IB_TARGET_PLATFORM" = "x86_64-nonfree" ]; then
|
if [ "$IB_TARGET_PLATFORM" = "x86_64" ] || [ "$IB_TARGET_PLATFORM" = "x86_64-nonfree" ] || [ "$IB_TARGET_PLATFORM" = "x86_64-nvidia" ]; then
|
||||||
IB_TARGET_ARCH=amd64
|
IB_TARGET_ARCH=amd64
|
||||||
QEMU_ARCH=x86_64
|
QEMU_ARCH=x86_64
|
||||||
BOOTLOADERS=grub-efi,syslinux
|
BOOTLOADERS=grub-efi,syslinux
|
||||||
elif [ "$IB_TARGET_PLATFORM" = "aarch64" ] || [ "$IB_TARGET_PLATFORM" = "aarch64-nonfree" ] || [ "$IB_TARGET_PLATFORM" = "raspberrypi" ] || [ "$IB_TARGET_PLATFORM" = "rockchip64" ]; then
|
elif [ "$IB_TARGET_PLATFORM" = "aarch64" ] || [ "$IB_TARGET_PLATFORM" = "aarch64-nonfree" ] || [ "$IB_TARGET_PLATFORM" = "aarch64-nvidia" ] || [ "$IB_TARGET_PLATFORM" = "raspberrypi" ] || [ "$IB_TARGET_PLATFORM" = "rockchip64" ]; then
|
||||||
IB_TARGET_ARCH=arm64
|
IB_TARGET_ARCH=arm64
|
||||||
QEMU_ARCH=aarch64
|
QEMU_ARCH=aarch64
|
||||||
elif [ "$IB_TARGET_PLATFORM" = "riscv64" ]; then
|
elif [ "$IB_TARGET_PLATFORM" = "riscv64" ] || [ "$IB_TARGET_PLATFORM" = "riscv64-nonfree" ]; then
|
||||||
IB_TARGET_ARCH=riscv64
|
IB_TARGET_ARCH=riscv64
|
||||||
QEMU_ARCH=riscv64
|
QEMU_ARCH=riscv64
|
||||||
else
|
else
|
||||||
@@ -60,9 +59,13 @@ mkdir -p $prep_results_dir
|
|||||||
cd $prep_results_dir
|
cd $prep_results_dir
|
||||||
|
|
||||||
NON_FREE=
|
NON_FREE=
|
||||||
if [[ "${IB_TARGET_PLATFORM}" =~ -nonfree$ ]] || [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
if [[ "${IB_TARGET_PLATFORM}" =~ -nonfree$ ]] || [[ "${IB_TARGET_PLATFORM}" =~ -nvidia$ ]] || [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
||||||
NON_FREE=1
|
NON_FREE=1
|
||||||
fi
|
fi
|
||||||
|
NVIDIA=
|
||||||
|
if [[ "${IB_TARGET_PLATFORM}" =~ -nvidia$ ]]; then
|
||||||
|
NVIDIA=1
|
||||||
|
fi
|
||||||
IMAGE_TYPE=iso
|
IMAGE_TYPE=iso
|
||||||
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ] || [ "${IB_TARGET_PLATFORM}" = "rockchip64" ]; then
|
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ] || [ "${IB_TARGET_PLATFORM}" = "rockchip64" ]; then
|
||||||
IMAGE_TYPE=img
|
IMAGE_TYPE=img
|
||||||
@@ -73,7 +76,7 @@ if [ "$NON_FREE" = 1 ]; then
|
|||||||
if [ "$IB_SUITE" = "bullseye" ]; then
|
if [ "$IB_SUITE" = "bullseye" ]; then
|
||||||
ARCHIVE_AREAS="$ARCHIVE_AREAS non-free"
|
ARCHIVE_AREAS="$ARCHIVE_AREAS non-free"
|
||||||
else
|
else
|
||||||
ARCHIVE_AREAS="$ARCHIVE_AREAS non-free-firmware"
|
ARCHIVE_AREAS="$ARCHIVE_AREAS non-free non-free-firmware"
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -101,7 +104,7 @@ lb config \
|
|||||||
--iso-preparer "START9 LABS; HTTPS://START9.COM" \
|
--iso-preparer "START9 LABS; HTTPS://START9.COM" \
|
||||||
--iso-publisher "START9 LABS; HTTPS://START9.COM" \
|
--iso-publisher "START9 LABS; HTTPS://START9.COM" \
|
||||||
--backports true \
|
--backports true \
|
||||||
--bootappend-live "boot=live noautologin" \
|
--bootappend-live "boot=live noautologin console=tty0" \
|
||||||
--bootloaders $BOOTLOADERS \
|
--bootloaders $BOOTLOADERS \
|
||||||
--cache false \
|
--cache false \
|
||||||
--mirror-bootstrap "https://deb.debian.org/debian/" \
|
--mirror-bootstrap "https://deb.debian.org/debian/" \
|
||||||
@@ -128,6 +131,15 @@ ff02::1 ip6-allnodes
|
|||||||
ff02::2 ip6-allrouters
|
ff02::2 ip6-allrouters
|
||||||
EOT
|
EOT
|
||||||
|
|
||||||
|
if [[ "${IB_OS_ENV}" =~ (^|-)dev($|-) ]]; then
|
||||||
|
mkdir -p config/includes.chroot/etc/ssh/sshd_config.d
|
||||||
|
echo "PasswordAuthentication yes" > config/includes.chroot/etc/ssh/sshd_config.d/dev-password-auth.conf
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Installer marker file (used by installed GRUB to detect the live USB)
|
||||||
|
mkdir -p config/includes.binary
|
||||||
|
touch config/includes.binary/.startos-installer
|
||||||
|
|
||||||
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
||||||
mkdir -p config/includes.chroot
|
mkdir -p config/includes.chroot
|
||||||
git clone --depth=1 --branch=stable https://github.com/raspberrypi/rpi-firmware.git config/includes.chroot/boot
|
git clone --depth=1 --branch=stable https://github.com/raspberrypi/rpi-firmware.git config/includes.chroot/boot
|
||||||
@@ -154,9 +166,12 @@ prompt 0
|
|||||||
timeout 50
|
timeout 50
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
cp $SOURCE_DIR/splash.png config/bootloaders/syslinux_common/splash.png
|
# Extract splash.png from the deb package
|
||||||
cp $SOURCE_DIR/splash.png config/bootloaders/isolinux/splash.png
|
dpkg-deb --fsys-tarfile $DEB_PATH | tar --to-stdout -xf - ./usr/lib/startos/splash.png > /tmp/splash.png
|
||||||
cp $SOURCE_DIR/splash.png config/bootloaders/grub-pc/splash.png
|
cp /tmp/splash.png config/bootloaders/syslinux_common/splash.png
|
||||||
|
cp /tmp/splash.png config/bootloaders/isolinux/splash.png
|
||||||
|
cp /tmp/splash.png config/bootloaders/grub-pc/splash.png
|
||||||
|
rm /tmp/splash.png
|
||||||
|
|
||||||
sed -i -e '2i set timeout=5' config/bootloaders/grub-pc/config.cfg
|
sed -i -e '2i set timeout=5' config/bootloaders/grub-pc/config.cfg
|
||||||
|
|
||||||
@@ -165,7 +180,13 @@ sed -i -e '2i set timeout=5' config/bootloaders/grub-pc/config.cfg
|
|||||||
mkdir -p config/archives
|
mkdir -p config/archives
|
||||||
|
|
||||||
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
||||||
curl -fsSL https://archive.raspberrypi.com/debian/raspberrypi.gpg.key | gpg --dearmor -o config/archives/raspi.key
|
# Fetch the keyring package (not the old raspberrypi.gpg.key, which has
|
||||||
|
# SHA1-only binding signatures that sqv on Trixie rejects).
|
||||||
|
KEYRING_DEB=$(mktemp)
|
||||||
|
curl -fsSL -o "$KEYRING_DEB" https://archive.raspberrypi.com/debian/pool/main/r/raspberrypi-archive-keyring/raspberrypi-archive-keyring_2025.1+rpt1_all.deb
|
||||||
|
dpkg-deb -x "$KEYRING_DEB" "$KEYRING_DEB.d"
|
||||||
|
cp "$KEYRING_DEB.d/usr/share/keyrings/raspberrypi-archive-keyring.gpg" config/archives/raspi.key
|
||||||
|
rm -rf "$KEYRING_DEB" "$KEYRING_DEB.d"
|
||||||
echo "deb [arch=${IB_TARGET_ARCH} signed-by=/etc/apt/trusted.gpg.d/raspi.key.gpg] https://archive.raspberrypi.com/debian/ ${IB_SUITE} main" > config/archives/raspi.list
|
echo "deb [arch=${IB_TARGET_ARCH} signed-by=/etc/apt/trusted.gpg.d/raspi.key.gpg] https://archive.raspberrypi.com/debian/ ${IB_SUITE} main" > config/archives/raspi.list
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -174,40 +195,151 @@ if [ "${IB_TARGET_PLATFORM}" = "rockchip64" ]; then
|
|||||||
echo "deb https://apt.armbian.com/ ${IB_SUITE} main" > config/archives/armbian.list
|
echo "deb https://apt.armbian.com/ ${IB_SUITE} main" > config/archives/armbian.list
|
||||||
fi
|
fi
|
||||||
|
|
||||||
cat > config/archives/backports.pref <<- EOF
|
if [ "$NVIDIA" = 1 ]; then
|
||||||
|
curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | gpg --dearmor -o config/archives/nvidia-container-toolkit.key
|
||||||
|
curl -s -L https://nvidia.github.io/libnvidia-container/stable/deb/nvidia-container-toolkit.list \
|
||||||
|
| sed 's#deb https://#deb [signed-by=/etc/apt/trusted.gpg.d/nvidia-container-toolkit.key.gpg] https://#g' \
|
||||||
|
> config/archives/nvidia-container-toolkit.list
|
||||||
|
fi
|
||||||
|
|
||||||
|
cat > config/archives/backports.pref <<-EOF
|
||||||
Package: linux-image-*
|
Package: linux-image-*
|
||||||
Pin: release n=${IB_SUITE}-backports
|
Pin: release n=${IB_SUITE}-backports
|
||||||
Pin-Priority: 500
|
Pin-Priority: 500
|
||||||
|
|
||||||
|
Package: linux-headers-*
|
||||||
|
Pin: release n=${IB_SUITE}-backports
|
||||||
|
Pin-Priority: 500
|
||||||
|
|
||||||
|
Package: *nvidia*
|
||||||
|
Pin: release n=${IB_SUITE}-backports
|
||||||
|
Pin-Priority: 500
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
# Dependencies
|
# Hooks
|
||||||
|
|
||||||
## Firmware
|
|
||||||
if [ "$NON_FREE" = 1 ]; then
|
|
||||||
echo 'firmware-iwlwifi firmware-misc-nonfree firmware-brcm80211 firmware-realtek firmware-atheros firmware-libertas firmware-amd-graphics' > config/package-lists/nonfree.list.chroot
|
|
||||||
fi
|
|
||||||
|
|
||||||
cat > config/hooks/normal/9000-install-startos.hook.chroot << EOF
|
cat > config/hooks/normal/9000-install-startos.hook.chroot << EOF
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
|
if [ "${IB_TARGET_PLATFORM}" != "raspberrypi" ]; then
|
||||||
|
/usr/lib/startos/scripts/enable-kiosk
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "${NVIDIA}" = "1" ]; then
|
||||||
|
# install a specific NVIDIA driver version
|
||||||
|
|
||||||
|
# ---------------- configuration ----------------
|
||||||
|
NVIDIA_DRIVER_VERSION="\${NVIDIA_DRIVER_VERSION:-580.126.09}"
|
||||||
|
|
||||||
|
BASE_URL="https://download.nvidia.com/XFree86/Linux-${QEMU_ARCH}"
|
||||||
|
|
||||||
|
echo "[nvidia-hook] Using NVIDIA driver: \${NVIDIA_DRIVER_VERSION}" >&2
|
||||||
|
|
||||||
|
# ---------------- kernel version ----------------
|
||||||
|
|
||||||
|
# Determine target kernel version from newest /boot/vmlinuz-* in the chroot.
|
||||||
|
KVER="\$(
|
||||||
|
ls -1t /boot/vmlinuz-* 2>/dev/null \
|
||||||
|
| head -n1 \
|
||||||
|
| sed 's|.*/vmlinuz-||'
|
||||||
|
)"
|
||||||
|
|
||||||
|
if [ -z "\${KVER}" ]; then
|
||||||
|
echo "[nvidia-hook] ERROR: no /boot/vmlinuz-* found; cannot determine kernel version" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "[nvidia-hook] Target kernel version: \${KVER}" >&2
|
||||||
|
|
||||||
|
# Ensure kernel headers are present
|
||||||
|
TEMP_APT_DEPS=(build-essential pkg-config)
|
||||||
|
if [ ! -e "/lib/modules/\${KVER}/build" ]; then
|
||||||
|
TEMP_APT_DEPS+=(linux-headers-\${KVER})
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "[nvidia-hook] Installing build dependencies" >&2
|
||||||
|
|
||||||
|
/usr/lib/startos/scripts/install-equivs <<-EOF
|
||||||
|
Package: nvidia-depends
|
||||||
|
Version: \${NVIDIA_DRIVER_VERSION}
|
||||||
|
Section: unknown
|
||||||
|
Priority: optional
|
||||||
|
Depends: \${dep_list="\$(IFS=', '; echo "\${TEMP_APT_DEPS[*]}")"}
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# ---------------- download and run installer ----------------
|
||||||
|
|
||||||
|
RUN_NAME="NVIDIA-Linux-${QEMU_ARCH}-\${NVIDIA_DRIVER_VERSION}.run"
|
||||||
|
RUN_PATH="/root/\${RUN_NAME}"
|
||||||
|
RUN_URL="\${BASE_URL}/\${NVIDIA_DRIVER_VERSION}/\${RUN_NAME}"
|
||||||
|
|
||||||
|
echo "[nvidia-hook] Downloading \${RUN_URL}" >&2
|
||||||
|
wget -O "\${RUN_PATH}" "\${RUN_URL}"
|
||||||
|
chmod +x "\${RUN_PATH}"
|
||||||
|
|
||||||
|
echo "[nvidia-hook] Running NVIDIA installer for kernel \${KVER}" >&2
|
||||||
|
|
||||||
|
if ! sh "\${RUN_PATH}" \
|
||||||
|
--silent \
|
||||||
|
--kernel-name="\${KVER}" \
|
||||||
|
--no-x-check \
|
||||||
|
--no-nouveau-check \
|
||||||
|
--no-runlevel-check; then
|
||||||
|
cat /var/log/nvidia-installer.log
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Rebuild module metadata
|
||||||
|
echo "[nvidia-hook] Running depmod for \${KVER}" >&2
|
||||||
|
depmod -a "\${KVER}"
|
||||||
|
|
||||||
|
echo "[nvidia-hook] NVIDIA \${NVIDIA_DRIVER_VERSION} installation complete for kernel \${KVER}" >&2
|
||||||
|
|
||||||
|
echo "[nvidia-hook] Removing .run installer..." >&2
|
||||||
|
rm -f "\${RUN_PATH}"
|
||||||
|
|
||||||
|
echo "[nvidia-hook] Blacklisting nouveau..." >&2
|
||||||
|
echo "blacklist nouveau" > /etc/modprobe.d/blacklist-nouveau.conf
|
||||||
|
echo "options nouveau modeset=0" >> /etc/modprobe.d/blacklist-nouveau.conf
|
||||||
|
|
||||||
|
echo "[nvidia-hook] Rebuilding initramfs..." >&2
|
||||||
|
update-initramfs -u -k "\${KVER}"
|
||||||
|
|
||||||
|
echo "[nvidia-hook] Removing build dependencies..." >&2
|
||||||
|
apt-get purge -y nvidia-depends
|
||||||
|
apt-get autoremove -y
|
||||||
|
echo "[nvidia-hook] Removed build dependencies." >&2
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Install linux-kbuild for sign-file (Secure Boot module signing)
|
||||||
|
KVER_ALL="\$(ls -1t /boot/vmlinuz-* 2>/dev/null | head -n1 | sed 's|.*/vmlinuz-||')"
|
||||||
|
if [ -n "\${KVER_ALL}" ]; then
|
||||||
|
KBUILD_VER="\$(echo "\${KVER_ALL}" | grep -oP '^\d+\.\d+')"
|
||||||
|
if [ -n "\${KBUILD_VER}" ]; then
|
||||||
|
echo "[build] Installing linux-kbuild-\${KBUILD_VER} for Secure Boot support" >&2
|
||||||
|
apt-get install -y "linux-kbuild-\${KBUILD_VER}" || echo "[build] WARNING: linux-kbuild-\${KBUILD_VER} not available" >&2
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
cp /etc/resolv.conf /etc/resolv.conf.bak
|
cp /etc/resolv.conf /etc/resolv.conf.bak
|
||||||
|
|
||||||
if [ "${IB_SUITE}" = trixie ] && [ "${IB_TARGET_ARCH}" != riscv64 ]; then
|
if [ "${IB_SUITE}" = trixie ] && [ "${IB_TARGET_ARCH}" != riscv64 ]; then
|
||||||
echo 'deb https://deb.debian.org/debian/ bookworm main' > /etc/apt/sources.list.d/bookworm.list
|
echo 'deb https://deb.debian.org/debian/ bookworm main' > /etc/apt/sources.list.d/bookworm.list
|
||||||
apt-get update
|
apt-get update
|
||||||
apt-get install -y postgresql-15
|
apt-get install -y postgresql-15
|
||||||
rm /etc/apt/sources.list.d/bookworm.list
|
rm /etc/apt/sources.list.d/bookworm.list
|
||||||
apt-get update
|
apt-get update
|
||||||
systemctl mask postgresql
|
systemctl mask postgresql
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
||||||
ln -sf /usr/bin/pi-beep /usr/local/bin/beep
|
ln -sf /usr/bin/pi-beep /usr/local/bin/beep
|
||||||
KERNEL_VERSION=${RPI_KERNEL_VERSION} sh /boot/config.sh > /boot/config.txt
|
sh /boot/firmware/config.sh > /boot/firmware/config.txt
|
||||||
mkinitramfs -c gzip -o initrd.img-${RPI_KERNEL_VERSION}-rpi-v8 ${RPI_KERNEL_VERSION}-rpi-v8
|
mkinitramfs -c gzip -o /boot/initrd.img-${RPI_KERNEL_VERSION}-rpi-v8 ${RPI_KERNEL_VERSION}-rpi-v8
|
||||||
mkinitramfs -c gzip -o initrd.img-${RPI_KERNEL_VERSION}-rpi-2712 ${RPI_KERNEL_VERSION}-rpi-2712
|
mkinitramfs -c gzip -o /boot/initrd.img-${RPI_KERNEL_VERSION}-rpi-2712 ${RPI_KERNEL_VERSION}-rpi-2712
|
||||||
|
cp /usr/lib/u-boot/rpi_arm64/u-boot.bin /boot/firmware/u-boot.bin
|
||||||
fi
|
fi
|
||||||
|
|
||||||
useradd --shell /bin/bash -G startos -m start9
|
useradd --shell /bin/bash -G startos -m start9
|
||||||
@@ -217,13 +349,15 @@ usermod -aG systemd-journal start9
|
|||||||
|
|
||||||
echo "start9 ALL=(ALL:ALL) NOPASSWD: ALL" | sudo tee "/etc/sudoers.d/010_start9-nopasswd"
|
echo "start9 ALL=(ALL:ALL) NOPASSWD: ALL" | sudo tee "/etc/sudoers.d/010_start9-nopasswd"
|
||||||
|
|
||||||
if [ "${IB_TARGET_PLATFORM}" != "raspberrypi" ]; then
|
if ! [[ "${IB_OS_ENV}" =~ (^|-)dev($|-) ]]; then
|
||||||
/usr/lib/startos/scripts/enable-kiosk
|
passwd -l start9
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! [[ "${IB_OS_ENV}" =~ (^|-)dev($|-) ]]; then
|
mkdir -p /media/startos
|
||||||
passwd -l start9
|
chmod 750 /media/startos
|
||||||
fi
|
chown root:startos /media/startos
|
||||||
|
|
||||||
|
start-cli --registry=https://alpha-registry-x.start9.com registry package download tor -d /usr/lib/startos/tor_${QEMU_ARCH}.s9pk -a "${QEMU_ARCH}"
|
||||||
|
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
@@ -277,38 +411,85 @@ if [ "${IMAGE_TYPE}" = iso ]; then
|
|||||||
elif [ "${IMAGE_TYPE}" = img ]; then
|
elif [ "${IMAGE_TYPE}" = img ]; then
|
||||||
|
|
||||||
SECTOR_LEN=512
|
SECTOR_LEN=512
|
||||||
BOOT_START=$((1024 * 1024)) # 1MiB
|
FW_START=$((1024 * 1024)) # 1MiB (sector 2048) — Pi-specific
|
||||||
BOOT_LEN=$((512 * 1024 * 1024)) # 512MiB
|
FW_LEN=$((128 * 1024 * 1024)) # 128MiB (Pi firmware + U-Boot + DTBs)
|
||||||
|
FW_END=$((FW_START + FW_LEN - 1))
|
||||||
|
ESP_START=$((FW_END + 1)) # 100MB EFI System Partition (matches os_install)
|
||||||
|
ESP_LEN=$((100 * 1024 * 1024))
|
||||||
|
ESP_END=$((ESP_START + ESP_LEN - 1))
|
||||||
|
BOOT_START=$((ESP_END + 1)) # 2GB /boot (matches os_install)
|
||||||
|
BOOT_LEN=$((2 * 1024 * 1024 * 1024))
|
||||||
BOOT_END=$((BOOT_START + BOOT_LEN - 1))
|
BOOT_END=$((BOOT_START + BOOT_LEN - 1))
|
||||||
ROOT_START=$((BOOT_END + 1))
|
ROOT_START=$((BOOT_END + 1))
|
||||||
ROOT_LEN=$((MAX_IMG_LEN - ROOT_START))
|
|
||||||
ROOT_END=$((MAX_IMG_LEN - 1))
|
# Size root partition to fit the squashfs + 256MB overhead for btrfs
|
||||||
|
# metadata and config overlay, avoiding the need for btrfs resize
|
||||||
|
SQUASHFS_SIZE=$(stat -c %s $prep_results_dir/binary/live/filesystem.squashfs)
|
||||||
|
ROOT_LEN=$(( SQUASHFS_SIZE + 256 * 1024 * 1024 ))
|
||||||
|
# Align to sector boundary
|
||||||
|
ROOT_LEN=$(( (ROOT_LEN + SECTOR_LEN - 1) / SECTOR_LEN * SECTOR_LEN ))
|
||||||
|
|
||||||
|
# Total image: partitions + GPT backup header (34 sectors)
|
||||||
|
IMG_LEN=$((ROOT_START + ROOT_LEN + 34 * SECTOR_LEN))
|
||||||
|
|
||||||
|
# Fixed GPT partition UUIDs (deterministic, based on old MBR disk ID cb15ae4d)
|
||||||
|
FW_UUID=cb15ae4d-0001-4000-8000-000000000001
|
||||||
|
ESP_UUID=cb15ae4d-0002-4000-8000-000000000002
|
||||||
|
BOOT_UUID=cb15ae4d-0003-4000-8000-000000000003
|
||||||
|
ROOT_UUID=cb15ae4d-0004-4000-8000-000000000004
|
||||||
|
|
||||||
TARGET_NAME=$prep_results_dir/${IMAGE_BASENAME}.img
|
TARGET_NAME=$prep_results_dir/${IMAGE_BASENAME}.img
|
||||||
truncate -s $MAX_IMG_LEN $TARGET_NAME
|
truncate -s $IMG_LEN $TARGET_NAME
|
||||||
|
|
||||||
sfdisk $TARGET_NAME <<-EOF
|
sfdisk $TARGET_NAME <<-EOF
|
||||||
label: dos
|
label: gpt
|
||||||
label-id: 0xcb15ae4d
|
|
||||||
unit: sectors
|
|
||||||
sector-size: 512
|
|
||||||
|
|
||||||
${TARGET_NAME}1 : start=$((BOOT_START / SECTOR_LEN)), size=$((BOOT_LEN / SECTOR_LEN)), type=c, bootable
|
${TARGET_NAME}1 : start=$((FW_START / SECTOR_LEN)), size=$((FW_LEN / SECTOR_LEN)), type=EBD0A0A2-B9E5-4433-87C0-68B6B72699C7, uuid=${FW_UUID}, name="firmware"
|
||||||
${TARGET_NAME}2 : start=$((ROOT_START / SECTOR_LEN)), size=$((ROOT_LEN / SECTOR_LEN)), type=83
|
${TARGET_NAME}2 : start=$((ESP_START / SECTOR_LEN)), size=$((ESP_LEN / SECTOR_LEN)), type=C12A7328-F81F-11D2-BA4B-00A0C93EC93B, uuid=${ESP_UUID}, name="efi"
|
||||||
|
${TARGET_NAME}3 : start=$((BOOT_START / SECTOR_LEN)), size=$((BOOT_LEN / SECTOR_LEN)), type=0FC63DAF-8483-4772-8E79-3D69D8477DE4, uuid=${BOOT_UUID}, name="boot"
|
||||||
|
${TARGET_NAME}4 : start=$((ROOT_START / SECTOR_LEN)), size=$((ROOT_LEN / SECTOR_LEN)), type=B921B045-1DF0-41C3-AF44-4C6F280D3FAE, uuid=${ROOT_UUID}, name="root"
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
BOOT_DEV=$(losetup --show -f --offset $BOOT_START --sizelimit $BOOT_LEN $TARGET_NAME)
|
# Create named loop device nodes (high minor numbers to avoid conflicts)
|
||||||
ROOT_DEV=$(losetup --show -f --offset $ROOT_START --sizelimit $ROOT_LEN $TARGET_NAME)
|
# and detach any stale ones from previous failed builds
|
||||||
|
FW_DEV=/dev/startos-loop-fw
|
||||||
|
ESP_DEV=/dev/startos-loop-esp
|
||||||
|
BOOT_DEV=/dev/startos-loop-boot
|
||||||
|
ROOT_DEV=/dev/startos-loop-root
|
||||||
|
for dev in $FW_DEV:200 $ESP_DEV:201 $BOOT_DEV:202 $ROOT_DEV:203; do
|
||||||
|
name=${dev%:*}
|
||||||
|
minor=${dev#*:}
|
||||||
|
[ -e $name ] || mknod $name b 7 $minor
|
||||||
|
losetup -d $name 2>/dev/null || true
|
||||||
|
done
|
||||||
|
|
||||||
mkfs.vfat -F32 $BOOT_DEV
|
losetup $FW_DEV --offset $FW_START --sizelimit $FW_LEN $TARGET_NAME
|
||||||
mkfs.ext4 $ROOT_DEV
|
losetup $ESP_DEV --offset $ESP_START --sizelimit $ESP_LEN $TARGET_NAME
|
||||||
|
losetup $BOOT_DEV --offset $BOOT_START --sizelimit $BOOT_LEN $TARGET_NAME
|
||||||
|
losetup $ROOT_DEV --offset $ROOT_START --sizelimit $ROOT_LEN $TARGET_NAME
|
||||||
|
|
||||||
|
mkfs.vfat -F32 -n firmware $FW_DEV
|
||||||
|
mkfs.vfat -F32 -n efi $ESP_DEV
|
||||||
|
mkfs.vfat -F32 -n boot $BOOT_DEV
|
||||||
|
mkfs.btrfs -f -L rootfs $ROOT_DEV
|
||||||
|
|
||||||
TMPDIR=$(mktemp -d)
|
TMPDIR=$(mktemp -d)
|
||||||
|
|
||||||
|
# Extract boot files from squashfs to staging area
|
||||||
|
BOOT_STAGING=$(mktemp -d)
|
||||||
|
unsquashfs -n -f -d $BOOT_STAGING $prep_results_dir/binary/live/filesystem.squashfs boot
|
||||||
|
|
||||||
|
# Mount partitions (nested: firmware and efi inside boot)
|
||||||
mkdir -p $TMPDIR/boot $TMPDIR/root
|
mkdir -p $TMPDIR/boot $TMPDIR/root
|
||||||
mount $ROOT_DEV $TMPDIR/root
|
|
||||||
mount $BOOT_DEV $TMPDIR/boot
|
mount $BOOT_DEV $TMPDIR/boot
|
||||||
unsquashfs -n -f -d $TMPDIR $prep_results_dir/binary/live/filesystem.squashfs boot
|
mkdir -p $TMPDIR/boot/firmware $TMPDIR/boot/efi
|
||||||
|
mount $FW_DEV $TMPDIR/boot/firmware
|
||||||
|
mount $ESP_DEV $TMPDIR/boot/efi
|
||||||
|
mount $ROOT_DEV $TMPDIR/root
|
||||||
|
|
||||||
|
# Copy boot files — nested mounts route firmware/* to the firmware partition
|
||||||
|
cp -a $BOOT_STAGING/boot/. $TMPDIR/boot/
|
||||||
|
rm -rf $BOOT_STAGING
|
||||||
|
|
||||||
mkdir $TMPDIR/root/images $TMPDIR/root/config
|
mkdir $TMPDIR/root/images $TMPDIR/root/config
|
||||||
B3SUM=$(b3sum $prep_results_dir/binary/live/filesystem.squashfs | head -c 16)
|
B3SUM=$(b3sum $prep_results_dir/binary/live/filesystem.squashfs | head -c 16)
|
||||||
@@ -321,43 +502,49 @@ elif [ "${IMAGE_TYPE}" = img ]; then
|
|||||||
mount -t overlay -o lowerdir=$TMPDIR/lower,workdir=$TMPDIR/root/config/work,upperdir=$TMPDIR/root/config/overlay overlay $TMPDIR/next
|
mount -t overlay -o lowerdir=$TMPDIR/lower,workdir=$TMPDIR/root/config/work,upperdir=$TMPDIR/root/config/overlay overlay $TMPDIR/next
|
||||||
|
|
||||||
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then
|
||||||
sed -i 's| boot=startos| boot=startos init=/usr/lib/startos/scripts/init_resize\.sh|' $TMPDIR/boot/cmdline.txt
|
|
||||||
rsync -a $SOURCE_DIR/raspberrypi/img/ $TMPDIR/next/
|
rsync -a $SOURCE_DIR/raspberrypi/img/ $TMPDIR/next/
|
||||||
|
|
||||||
|
# Install GRUB: ESP at /boot/efi (Part 2), /boot (Part 3)
|
||||||
|
mkdir -p $TMPDIR/next/boot \
|
||||||
|
$TMPDIR/next/dev $TMPDIR/next/proc $TMPDIR/next/sys $TMPDIR/next/media/startos/root
|
||||||
|
mount --rbind $TMPDIR/boot $TMPDIR/next/boot
|
||||||
|
mount --bind /dev $TMPDIR/next/dev
|
||||||
|
mount -t proc proc $TMPDIR/next/proc
|
||||||
|
mount -t sysfs sysfs $TMPDIR/next/sys
|
||||||
|
mount --bind $TMPDIR/root $TMPDIR/next/media/startos/root
|
||||||
|
|
||||||
|
chroot $TMPDIR/next grub-install --target=arm64-efi --removable --efi-directory=/boot/efi --boot-directory=/boot --no-nvram
|
||||||
|
chroot $TMPDIR/next update-grub
|
||||||
|
|
||||||
|
umount $TMPDIR/next/media/startos/root
|
||||||
|
umount $TMPDIR/next/sys
|
||||||
|
umount $TMPDIR/next/proc
|
||||||
|
umount $TMPDIR/next/dev
|
||||||
|
umount -l $TMPDIR/next/boot
|
||||||
|
|
||||||
|
# Fix root= in grub.cfg: update-grub sees loop devices, but the
|
||||||
|
# real device uses a fixed GPT PARTUUID for root (Part 4).
|
||||||
|
sed -i "s|root=[^ ]*|root=PARTUUID=${ROOT_UUID}|g" $TMPDIR/boot/grub/grub.cfg
|
||||||
|
|
||||||
|
# Inject first-boot resize script into GRUB config
|
||||||
|
sed -i 's| boot=startos| boot=startos init=/usr/lib/startos/scripts/init_resize\.sh|' $TMPDIR/boot/grub/grub.cfg
|
||||||
fi
|
fi
|
||||||
|
|
||||||
umount $TMPDIR/next
|
umount $TMPDIR/next
|
||||||
umount $TMPDIR/lower
|
umount $TMPDIR/lower
|
||||||
|
|
||||||
|
umount $TMPDIR/boot/firmware
|
||||||
|
umount $TMPDIR/boot/efi
|
||||||
umount $TMPDIR/boot
|
umount $TMPDIR/boot
|
||||||
umount $TMPDIR/root
|
umount $TMPDIR/root
|
||||||
|
|
||||||
|
|
||||||
e2fsck -fy $ROOT_DEV
|
|
||||||
resize2fs -M $ROOT_DEV
|
|
||||||
|
|
||||||
BLOCK_COUNT=$(dumpe2fs -h $ROOT_DEV | awk '/^Block count:/ { print $3 }')
|
|
||||||
BLOCK_SIZE=$(dumpe2fs -h $ROOT_DEV | awk '/^Block size:/ { print $3 }')
|
|
||||||
ROOT_LEN=$((BLOCK_COUNT * BLOCK_SIZE))
|
|
||||||
|
|
||||||
losetup -d $ROOT_DEV
|
losetup -d $ROOT_DEV
|
||||||
losetup -d $BOOT_DEV
|
losetup -d $BOOT_DEV
|
||||||
|
losetup -d $ESP_DEV
|
||||||
# Recreate partition 2 with the new size using sfdisk
|
losetup -d $FW_DEV
|
||||||
sfdisk $TARGET_NAME <<-EOF
|
|
||||||
label: dos
|
|
||||||
label-id: 0xcb15ae4d
|
|
||||||
unit: sectors
|
|
||||||
sector-size: 512
|
|
||||||
|
|
||||||
${TARGET_NAME}1 : start=$((BOOT_START / SECTOR_LEN)), size=$((BOOT_LEN / SECTOR_LEN)), type=c, bootable
|
|
||||||
${TARGET_NAME}2 : start=$((ROOT_START / SECTOR_LEN)), size=$((ROOT_LEN / SECTOR_LEN)), type=83
|
|
||||||
EOF
|
|
||||||
|
|
||||||
TARGET_SIZE=$((ROOT_START + ROOT_LEN))
|
|
||||||
truncate -s $TARGET_SIZE $TARGET_NAME
|
|
||||||
|
|
||||||
mv $TARGET_NAME $RESULTS_DIR/$IMAGE_BASENAME.img
|
mv $TARGET_NAME $RESULTS_DIR/$IMAGE_BASENAME.img
|
||||||
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
chown $IB_UID:$IB_UID $RESULTS_DIR/$IMAGE_BASENAME.*
|
chown $IB_UID:$IB_UID $RESULTS_DIR/$IMAGE_BASENAME.*
|
||||||
|
|||||||
@@ -1,2 +1,4 @@
|
|||||||
/dev/mmcblk0p1 /boot vfat umask=0077 0 2
|
PARTUUID=cb15ae4d-0001-4000-8000-000000000001 /boot/firmware vfat umask=0077 0 2
|
||||||
/dev/mmcblk0p2 / ext4 defaults 0 1
|
PARTUUID=cb15ae4d-0002-4000-8000-000000000002 /boot/efi vfat umask=0077 0 1
|
||||||
|
PARTUUID=cb15ae4d-0003-4000-8000-000000000003 /boot vfat umask=0077 0 2
|
||||||
|
PARTUUID=cb15ae4d-0004-4000-8000-000000000004 / btrfs defaults 0 1
|
||||||
|
|||||||
@@ -12,15 +12,16 @@ get_variables () {
|
|||||||
BOOT_DEV_NAME=$(echo /sys/block/*/"${BOOT_PART_NAME}" | cut -d "/" -f 4)
|
BOOT_DEV_NAME=$(echo /sys/block/*/"${BOOT_PART_NAME}" | cut -d "/" -f 4)
|
||||||
BOOT_PART_NUM=$(cat "/sys/block/${BOOT_DEV_NAME}/${BOOT_PART_NAME}/partition")
|
BOOT_PART_NUM=$(cat "/sys/block/${BOOT_DEV_NAME}/${BOOT_PART_NAME}/partition")
|
||||||
|
|
||||||
OLD_DISKID=$(fdisk -l "$ROOT_DEV" | sed -n 's/Disk identifier: 0x\([^ ]*\)/\1/p')
|
|
||||||
|
|
||||||
ROOT_DEV_SIZE=$(cat "/sys/block/${ROOT_DEV_NAME}/size")
|
ROOT_DEV_SIZE=$(cat "/sys/block/${ROOT_DEV_NAME}/size")
|
||||||
if [ "$ROOT_DEV_SIZE" -le 67108864 ]; then
|
# GPT backup header/entries occupy last 33 sectors
|
||||||
TARGET_END=$((ROOT_DEV_SIZE - 1))
|
USABLE_END=$((ROOT_DEV_SIZE - 34))
|
||||||
|
|
||||||
|
if [ "$USABLE_END" -le 67108864 ]; then
|
||||||
|
TARGET_END=$USABLE_END
|
||||||
else
|
else
|
||||||
TARGET_END=$((33554432 - 1))
|
TARGET_END=$((33554432 - 1))
|
||||||
DATA_PART_START=33554432
|
DATA_PART_START=33554432
|
||||||
DATA_PART_END=$((ROOT_DEV_SIZE - 1))
|
DATA_PART_END=$USABLE_END
|
||||||
fi
|
fi
|
||||||
|
|
||||||
PARTITION_TABLE=$(parted -m "$ROOT_DEV" unit s print | tr -d 's')
|
PARTITION_TABLE=$(parted -m "$ROOT_DEV" unit s print | tr -d 's')
|
||||||
@@ -57,37 +58,30 @@ check_variables () {
|
|||||||
main () {
|
main () {
|
||||||
get_variables
|
get_variables
|
||||||
|
|
||||||
|
# Fix GPT backup header first — the image was built with a tight root
|
||||||
|
# partition, so the backup GPT is not at the end of the SD card. parted
|
||||||
|
# will prompt interactively if this isn't fixed before we use it.
|
||||||
|
sgdisk -e "$ROOT_DEV" 2>/dev/null || true
|
||||||
|
|
||||||
if ! check_variables; then
|
if ! check_variables; then
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# if [ "$ROOT_PART_END" -eq "$TARGET_END" ]; then
|
|
||||||
# reboot_pi
|
|
||||||
# fi
|
|
||||||
|
|
||||||
if ! echo Yes | parted -m --align=optimal "$ROOT_DEV" ---pretend-input-tty u s resizepart "$ROOT_PART_NUM" "$TARGET_END" ; then
|
if ! echo Yes | parted -m --align=optimal "$ROOT_DEV" ---pretend-input-tty u s resizepart "$ROOT_PART_NUM" "$TARGET_END" ; then
|
||||||
FAIL_REASON="Root partition resize failed"
|
FAIL_REASON="Root partition resize failed"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -n "$DATA_PART_START" ]; then
|
if [ -n "$DATA_PART_START" ]; then
|
||||||
if ! parted -ms --align=optimal "$ROOT_DEV" u s mkpart primary "$DATA_PART_START" "$DATA_PART_END"; then
|
if ! parted -ms --align=optimal "$ROOT_DEV" u s mkpart data "$DATA_PART_START" "$DATA_PART_END"; then
|
||||||
FAIL_REASON="Data partition creation failed"
|
FAIL_REASON="Data partition creation failed"
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
(
|
|
||||||
echo x
|
|
||||||
echo i
|
|
||||||
echo "0xcb15ae4d"
|
|
||||||
echo r
|
|
||||||
echo w
|
|
||||||
) | fdisk $ROOT_DEV
|
|
||||||
|
|
||||||
mount / -o remount,rw
|
mount / -o remount,rw
|
||||||
|
|
||||||
resize2fs $ROOT_PART_DEV
|
btrfs filesystem resize max /media/startos/root
|
||||||
|
|
||||||
if ! systemd-machine-id-setup --root=/media/startos/config/overlay/; then
|
if ! systemd-machine-id-setup --root=/media/startos/config/overlay/; then
|
||||||
FAIL_REASON="systemd-machine-id-setup failed"
|
FAIL_REASON="systemd-machine-id-setup failed"
|
||||||
@@ -111,7 +105,7 @@ mount / -o remount,ro
|
|||||||
beep
|
beep
|
||||||
|
|
||||||
if main; then
|
if main; then
|
||||||
sed -i 's| init=/usr/lib/startos/scripts/init_resize\.sh||' /boot/cmdline.txt
|
sed -i 's| init=/usr/lib/startos/scripts/init_resize\.sh||' /boot/grub/grub.cfg
|
||||||
echo "Resized root filesystem. Rebooting in 5 seconds..."
|
echo "Resized root filesystem. Rebooting in 5 seconds..."
|
||||||
sleep 5
|
sleep 5
|
||||||
else
|
else
|
||||||
|
|||||||
@@ -1 +0,0 @@
|
|||||||
usb-storage.quirks=152d:0562:u,14cd:121c:u,0781:cfcb:u console=serial0,115200 console=tty1 root=PARTUUID=cb15ae4d-02 rootfstype=ext4 fsck.repair=yes rootwait cgroup_enable=cpuset cgroup_memory=1 cgroup_enable=memory quiet boot=startos
|
|
||||||
@@ -27,20 +27,18 @@ disable_overscan=1
|
|||||||
# (e.g. for USB device mode) or if USB support is not required.
|
# (e.g. for USB device mode) or if USB support is not required.
|
||||||
otg_mode=1
|
otg_mode=1
|
||||||
|
|
||||||
[all]
|
|
||||||
|
|
||||||
[pi4]
|
[pi4]
|
||||||
# Run as fast as firmware / board allows
|
# Run as fast as firmware / board allows
|
||||||
arm_boost=1
|
arm_boost=1
|
||||||
kernel=vmlinuz-${KERNEL_VERSION}-rpi-v8
|
|
||||||
initramfs initrd.img-${KERNEL_VERSION}-rpi-v8 followkernel
|
|
||||||
|
|
||||||
[pi5]
|
|
||||||
kernel=vmlinuz-${KERNEL_VERSION}-rpi-2712
|
|
||||||
initramfs initrd.img-${KERNEL_VERSION}-rpi-2712 followkernel
|
|
||||||
|
|
||||||
[all]
|
[all]
|
||||||
gpu_mem=16
|
gpu_mem=16
|
||||||
dtoverlay=pwm-2chan,disable-bt
|
dtoverlay=pwm-2chan,disable-bt
|
||||||
|
|
||||||
EOF
|
# Enable UART for U-Boot and serial console
|
||||||
|
enable_uart=1
|
||||||
|
|
||||||
|
# Load U-Boot as the bootloader (GRUB is chainloaded from U-Boot)
|
||||||
|
kernel=u-boot.bin
|
||||||
|
|
||||||
|
EOF
|
||||||
@@ -84,4 +84,8 @@ arm_boost=1
|
|||||||
gpu_mem=16
|
gpu_mem=16
|
||||||
dtoverlay=pwm-2chan,disable-bt
|
dtoverlay=pwm-2chan,disable-bt
|
||||||
|
|
||||||
auto_initramfs=1
|
# Enable UART for U-Boot and serial console
|
||||||
|
enable_uart=1
|
||||||
|
|
||||||
|
# Load U-Boot as the bootloader (GRUB is chainloaded from U-Boot)
|
||||||
|
kernel=u-boot.bin
|
||||||
@@ -0,0 +1,4 @@
|
|||||||
|
# Raspberry Pi-specific GRUB overrides
|
||||||
|
# Overrides GRUB_CMDLINE_LINUX from /etc/default/grub with Pi-specific
|
||||||
|
# console devices and hardware quirks.
|
||||||
|
GRUB_CMDLINE_LINUX="boot=startos console=serial0,115200 console=tty1 usb-storage.quirks=152d:0562:u,14cd:121c:u,0781:cfcb:u cgroup_enable=cpuset cgroup_memory=1 cgroup_enable=memory"
|
||||||
@@ -1,6 +1,3 @@
|
|||||||
os-partitions:
|
|
||||||
boot: /dev/mmcblk0p1
|
|
||||||
root: /dev/mmcblk0p2
|
|
||||||
ethernet-interface: end0
|
ethernet-interface: end0
|
||||||
wifi-interface: wlan0
|
wifi-interface: wlan0
|
||||||
disable-encryption: true
|
disable-encryption: true
|
||||||
|
|||||||
51
build/lib/grub-theme/theme.txt
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
desktop-image: "../splash.png"
|
||||||
|
title-color: "#ffffff"
|
||||||
|
title-font: "Unifont Regular 16"
|
||||||
|
title-text: "StartOS Boot Menu with GRUB"
|
||||||
|
message-font: "Unifont Regular 16"
|
||||||
|
terminal-font: "Unifont Regular 16"
|
||||||
|
|
||||||
|
#help bar at the bottom
|
||||||
|
+ label {
|
||||||
|
top = 100%-50
|
||||||
|
left = 0
|
||||||
|
width = 100%
|
||||||
|
height = 20
|
||||||
|
text = "@KEYMAP_SHORT@"
|
||||||
|
align = "center"
|
||||||
|
color = "#ffffff"
|
||||||
|
font = "Unifont Regular 16"
|
||||||
|
}
|
||||||
|
|
||||||
|
#boot menu
|
||||||
|
+ boot_menu {
|
||||||
|
left = 10%
|
||||||
|
width = 80%
|
||||||
|
top = 52%
|
||||||
|
height = 48%-80
|
||||||
|
item_color = "#a8a8a8"
|
||||||
|
item_font = "Unifont Regular 16"
|
||||||
|
selected_item_color= "#ffffff"
|
||||||
|
selected_item_font = "Unifont Regular 16"
|
||||||
|
item_height = 16
|
||||||
|
item_padding = 0
|
||||||
|
item_spacing = 4
|
||||||
|
icon_width = 0
|
||||||
|
icon_heigh = 0
|
||||||
|
item_icon_space = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
#progress bar
|
||||||
|
+ progress_bar {
|
||||||
|
id = "__timeout__"
|
||||||
|
left = 15%
|
||||||
|
top = 100%-80
|
||||||
|
height = 16
|
||||||
|
width = 70%
|
||||||
|
font = "Unifont Regular 16"
|
||||||
|
text_color = "#000000"
|
||||||
|
fg_color = "#ffffff"
|
||||||
|
bg_color = "#a8a8a8"
|
||||||
|
border_color = "#ffffff"
|
||||||
|
text = "@TIMEOUT_NOTIFICATION_LONG@"
|
||||||
|
}
|
||||||
@@ -4,7 +4,7 @@ parse_essential_db_info() {
|
|||||||
DB_DUMP="/tmp/startos_db.json"
|
DB_DUMP="/tmp/startos_db.json"
|
||||||
|
|
||||||
if command -v start-cli >/dev/null 2>&1; then
|
if command -v start-cli >/dev/null 2>&1; then
|
||||||
start-cli db dump > "$DB_DUMP" 2>/dev/null || return 1
|
timeout 30 start-cli db dump > "$DB_DUMP" 2>/dev/null || return 1
|
||||||
else
|
else
|
||||||
return 1
|
return 1
|
||||||
fi
|
fi
|
||||||
@@ -118,6 +118,6 @@ else
|
|||||||
fi
|
fi
|
||||||
printf "\n \033[1;37m┌──────────────────────────────────────────────────── QUICK ACCESS ─┐\033[0m\n"
|
printf "\n \033[1;37m┌──────────────────────────────────────────────────── QUICK ACCESS ─┐\033[0m\n"
|
||||||
printf " \033[1;37m│\033[0m Web Interface: \033[0;36m%-50s\033[0m \033[1;37m│\033[0m\n" "$web_url"
|
printf " \033[1;37m│\033[0m Web Interface: \033[0;36m%-50s\033[0m \033[1;37m│\033[0m\n" "$web_url"
|
||||||
printf " \033[1;37m│\033[0m Documentation: \033[0;36m%-50s\033[0m \033[1;37m│\033[0m\n" "https://staging.docs.start9.com"
|
printf " \033[1;37m│\033[0m Documentation: \033[0;36m%-50s\033[0m \033[1;37m│\033[0m\n" "https://docs.start9.com"
|
||||||
printf " \033[1;37m│\033[0m Support: \033[0;36m%-50s\033[0m \033[1;37m│\033[0m\n" "https://start9.com/contact"
|
printf " \033[1;37m│\033[0m Support: \033[0;36m%-50s\033[0m \033[1;37m│\033[0m\n" "https://start9.com/contact"
|
||||||
printf " \033[1;37m└───────────────────────────────────────────────────────────────────┘\033[0m\n\n"
|
printf " \033[1;37m└───────────────────────────────────────────────────────────────────┘\033[0m\n\n"
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ set -- "${POSITIONAL_ARGS[@]}" # restore positional parameters
|
|||||||
|
|
||||||
if [ -z "$NO_SYNC" ]; then
|
if [ -z "$NO_SYNC" ]; then
|
||||||
echo 'Syncing...'
|
echo 'Syncing...'
|
||||||
umount -R /media/startos/next 2> /dev/null
|
umount -l /media/startos/next 2> /dev/null
|
||||||
umount /media/startos/upper 2> /dev/null
|
umount /media/startos/upper 2> /dev/null
|
||||||
rm -rf /media/startos/upper /media/startos/next
|
rm -rf /media/startos/upper /media/startos/next
|
||||||
mkdir /media/startos/upper
|
mkdir /media/startos/upper
|
||||||
@@ -55,16 +55,16 @@ mkdir -p /media/startos/next/sys
|
|||||||
mkdir -p /media/startos/next/proc
|
mkdir -p /media/startos/next/proc
|
||||||
mkdir -p /media/startos/next/boot
|
mkdir -p /media/startos/next/boot
|
||||||
mkdir -p /media/startos/next/media/startos/root
|
mkdir -p /media/startos/next/media/startos/root
|
||||||
mount --bind /run /media/startos/next/run
|
mount -t tmpfs tmpfs /media/startos/next/run
|
||||||
mount --bind /tmp /media/startos/next/tmp
|
mount -t tmpfs tmpfs /media/startos/next/tmp
|
||||||
mount --bind /dev /media/startos/next/dev
|
mount --bind /dev /media/startos/next/dev
|
||||||
mount --bind /sys /media/startos/next/sys
|
mount -t sysfs sysfs /media/startos/next/sys
|
||||||
mount --bind /proc /media/startos/next/proc
|
mount -t proc proc /media/startos/next/proc
|
||||||
mount --bind /boot /media/startos/next/boot
|
mount --bind /boot /media/startos/next/boot
|
||||||
mount --bind /media/startos/root /media/startos/next/media/startos/root
|
mount --bind /media/startos/root /media/startos/next/media/startos/root
|
||||||
|
|
||||||
if mountpoint /sys/firmware/efi/efivars 2>&1 > /dev/null; then
|
if mountpoint /sys/firmware/efi/efivars 2>&1 > /dev/null; then
|
||||||
mount --bind /sys/firmware/efi/efivars /media/startos/next/sys/firmware/efi/efivars
|
mount -t efivarfs efivarfs /media/startos/next/sys/firmware/efi/efivars
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -z "$*" ]; then
|
if [ -z "$*" ]; then
|
||||||
@@ -79,13 +79,13 @@ if mountpoint /media/startos/next/sys/firmware/efi/efivars 2>&1 > /dev/null; the
|
|||||||
umount /media/startos/next/sys/firmware/efi/efivars
|
umount /media/startos/next/sys/firmware/efi/efivars
|
||||||
fi
|
fi
|
||||||
|
|
||||||
umount /media/startos/next/run
|
umount -l /media/startos/next/run
|
||||||
umount /media/startos/next/tmp
|
umount -l /media/startos/next/tmp
|
||||||
umount /media/startos/next/dev
|
umount -l /media/startos/next/dev
|
||||||
umount /media/startos/next/sys
|
umount -l /media/startos/next/sys
|
||||||
umount /media/startos/next/proc
|
umount -l /media/startos/next/proc
|
||||||
umount /media/startos/next/boot
|
umount -l /media/startos/next/boot
|
||||||
umount /media/startos/next/media/startos/root
|
umount -l /media/startos/next/media/startos/root
|
||||||
|
|
||||||
if [ "$CHROOT_RES" -eq 0 ]; then
|
if [ "$CHROOT_RES" -eq 0 ]; then
|
||||||
|
|
||||||
@@ -111,6 +111,6 @@ if [ "$CHROOT_RES" -eq 0 ]; then
|
|||||||
reboot
|
reboot
|
||||||
fi
|
fi
|
||||||
|
|
||||||
umount -R /media/startos/next
|
umount -l /media/startos/next
|
||||||
umount /media/startos/upper
|
umount -l /media/startos/upper
|
||||||
rm -rf /media/startos/upper /media/startos/next
|
rm -rf /media/startos/upper /media/startos/next
|
||||||
@@ -5,7 +5,7 @@ if [ -z "$sip" ] || [ -z "$dip" ] || [ -z "$dprefix" ] || [ -z "$sport" ] || [ -
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
NAME="F$(echo "$sip:$sport -> $dip/$dprefix:$dport" | sha256sum | head -c 15)"
|
NAME="F$(echo "$sip:$sport -> $dip/$dprefix:$dport ${src_subnet:-any}" | sha256sum | head -c 15)"
|
||||||
|
|
||||||
for kind in INPUT FORWARD ACCEPT; do
|
for kind in INPUT FORWARD ACCEPT; do
|
||||||
if ! iptables -C $kind -j "${NAME}_${kind}" 2> /dev/null; then
|
if ! iptables -C $kind -j "${NAME}_${kind}" 2> /dev/null; then
|
||||||
@@ -13,7 +13,7 @@ for kind in INPUT FORWARD ACCEPT; do
|
|||||||
iptables -A $kind -j "${NAME}_${kind}"
|
iptables -A $kind -j "${NAME}_${kind}"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
for kind in PREROUTING INPUT OUTPUT POSTROUTING; do
|
for kind in PREROUTING OUTPUT POSTROUTING; do
|
||||||
if ! iptables -t nat -C $kind -j "${NAME}_${kind}" 2> /dev/null; then
|
if ! iptables -t nat -C $kind -j "${NAME}_${kind}" 2> /dev/null; then
|
||||||
iptables -t nat -N "${NAME}_${kind}" 2> /dev/null
|
iptables -t nat -N "${NAME}_${kind}" 2> /dev/null
|
||||||
iptables -t nat -A $kind -j "${NAME}_${kind}"
|
iptables -t nat -A $kind -j "${NAME}_${kind}"
|
||||||
@@ -26,7 +26,7 @@ trap 'err=1' ERR
|
|||||||
for kind in INPUT FORWARD ACCEPT; do
|
for kind in INPUT FORWARD ACCEPT; do
|
||||||
iptables -F "${NAME}_${kind}" 2> /dev/null
|
iptables -F "${NAME}_${kind}" 2> /dev/null
|
||||||
done
|
done
|
||||||
for kind in PREROUTING INPUT OUTPUT POSTROUTING; do
|
for kind in PREROUTING OUTPUT POSTROUTING; do
|
||||||
iptables -t nat -F "${NAME}_${kind}" 2> /dev/null
|
iptables -t nat -F "${NAME}_${kind}" 2> /dev/null
|
||||||
done
|
done
|
||||||
if [ "$UNDO" = 1 ]; then
|
if [ "$UNDO" = 1 ]; then
|
||||||
@@ -36,20 +36,37 @@ if [ "$UNDO" = 1 ]; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# DNAT: rewrite destination for incoming packets (external traffic)
|
# DNAT: rewrite destination for incoming packets (external traffic)
|
||||||
iptables -t nat -A ${NAME}_PREROUTING -d "$sip" -p tcp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
# When src_subnet is set, only forward traffic from that subnet (private forwards)
|
||||||
iptables -t nat -A ${NAME}_PREROUTING -d "$sip" -p udp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
if [ -n "$src_subnet" ]; then
|
||||||
|
iptables -t nat -A ${NAME}_PREROUTING -s "$src_subnet" -d "$sip" -p tcp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
||||||
|
iptables -t nat -A ${NAME}_PREROUTING -s "$src_subnet" -d "$sip" -p udp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
||||||
|
# Also allow containers on the bridge subnet to reach this forward
|
||||||
|
if [ -n "$bridge_subnet" ]; then
|
||||||
|
iptables -t nat -A ${NAME}_PREROUTING -s "$bridge_subnet" -d "$sip" -p tcp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
||||||
|
iptables -t nat -A ${NAME}_PREROUTING -s "$bridge_subnet" -d "$sip" -p udp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
iptables -t nat -A ${NAME}_PREROUTING -d "$sip" -p tcp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
||||||
|
iptables -t nat -A ${NAME}_PREROUTING -d "$sip" -p udp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
||||||
|
fi
|
||||||
|
|
||||||
# DNAT: rewrite destination for locally-originated packets (hairpin from host itself)
|
# DNAT: rewrite destination for locally-originated packets (hairpin from host itself)
|
||||||
iptables -t nat -A ${NAME}_OUTPUT -d "$sip" -p tcp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
iptables -t nat -A ${NAME}_OUTPUT -d "$sip" -p tcp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
||||||
iptables -t nat -A ${NAME}_OUTPUT -d "$sip" -p udp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
iptables -t nat -A ${NAME}_OUTPUT -d "$sip" -p udp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
|
||||||
|
|
||||||
# MASQUERADE: rewrite source for all forwarded traffic to the destination
|
|
||||||
# This ensures responses are routed back through the host regardless of source IP
|
|
||||||
iptables -t nat -A ${NAME}_POSTROUTING -d "$dip" -p tcp --dport "$dport" -j MASQUERADE
|
|
||||||
iptables -t nat -A ${NAME}_POSTROUTING -d "$dip" -p udp --dport "$dport" -j MASQUERADE
|
|
||||||
|
|
||||||
# Allow new connections to be forwarded to the destination
|
# Allow new connections to be forwarded to the destination
|
||||||
iptables -A ${NAME}_FORWARD -d $dip -p tcp --dport $dport -m state --state NEW -j ACCEPT
|
iptables -A ${NAME}_FORWARD -d $dip -p tcp --dport $dport -m state --state NEW -j ACCEPT
|
||||||
iptables -A ${NAME}_FORWARD -d $dip -p udp --dport $dport -m state --state NEW -j ACCEPT
|
iptables -A ${NAME}_FORWARD -d $dip -p udp --dport $dport -m state --state NEW -j ACCEPT
|
||||||
|
|
||||||
exit $err
|
# NAT hairpin: masquerade traffic from the bridge subnet or host to the DNAT
|
||||||
|
# target, so replies route back through the host for proper NAT reversal.
|
||||||
|
# Container-to-container hairpin (source is on the bridge subnet)
|
||||||
|
if [ -n "$bridge_subnet" ]; then
|
||||||
|
iptables -t nat -A ${NAME}_POSTROUTING -s "$bridge_subnet" -d "$dip" -p tcp --dport "$dport" -j MASQUERADE
|
||||||
|
iptables -t nat -A ${NAME}_POSTROUTING -s "$bridge_subnet" -d "$dip" -p udp --dport "$dport" -j MASQUERADE
|
||||||
|
fi
|
||||||
|
# Host-to-container hairpin (host connects to its own gateway IP, source is sip)
|
||||||
|
iptables -t nat -A ${NAME}_POSTROUTING -s "$sip" -d "$dip" -p tcp --dport "$dport" -j MASQUERADE
|
||||||
|
iptables -t nat -A ${NAME}_POSTROUTING -s "$sip" -d "$dip" -p udp --dport "$dport" -j MASQUERADE
|
||||||
|
|
||||||
|
exit $err
|
||||||
|
|||||||
20
build/lib/scripts/install-equivs
Executable file
@@ -0,0 +1,20 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
export DEBIAN_FRONTEND=noninteractive
|
||||||
|
export DEBCONF_NONINTERACTIVE_SEEN=true
|
||||||
|
|
||||||
|
TMP_DIR=$(mktemp -d)
|
||||||
|
|
||||||
|
(
|
||||||
|
set -e
|
||||||
|
cd $TMP_DIR
|
||||||
|
|
||||||
|
cat > control.equivs
|
||||||
|
equivs-build control.equivs
|
||||||
|
apt-get install -y ./*.deb < /dev/null
|
||||||
|
)
|
||||||
|
|
||||||
|
rm -rf $TMP_DIR
|
||||||
|
|
||||||
|
echo Install complete. >&2
|
||||||
|
exit 0
|
||||||
@@ -29,10 +29,13 @@ if [ -z "$needed" ]; then
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
MARGIN=${MARGIN:-1073741824}
|
||||||
|
target=$((needed + MARGIN))
|
||||||
|
|
||||||
if [ -h /media/startos/config/current.rootfs ] && [ -e /media/startos/config/current.rootfs ]; then
|
if [ -h /media/startos/config/current.rootfs ] && [ -e /media/startos/config/current.rootfs ]; then
|
||||||
echo 'Pruning...'
|
echo 'Pruning...'
|
||||||
current="$(readlink -f /media/startos/config/current.rootfs)"
|
current="$(readlink -f /media/startos/config/current.rootfs)"
|
||||||
while [[ "$(df -B1 --output=avail --sync /media/startos/images | tail -n1)" -lt "$needed" ]]; do
|
while [[ "$(df -B1 --output=avail --sync /media/startos/images | tail -n1)" -lt "$target" ]]; do
|
||||||
to_prune="$(ls -t1 /media/startos/images/*.rootfs /media/startos/images/*.squashfs 2> /dev/null | grep -v "$current" | tail -n1)"
|
to_prune="$(ls -t1 /media/startos/images/*.rootfs /media/startos/images/*.squashfs 2> /dev/null | grep -v "$current" | tail -n1)"
|
||||||
if [ -e "$to_prune" ]; then
|
if [ -e "$to_prune" ]; then
|
||||||
echo " Pruning $to_prune"
|
echo " Pruning $to_prune"
|
||||||
|
|||||||
76
build/lib/scripts/sign-unsigned-modules
Executable file
@@ -0,0 +1,76 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# sign-unsigned-modules [--source <dir> --dest <dir>] [--sign-file <path>]
|
||||||
|
# [--mok-key <path>] [--mok-pub <path>]
|
||||||
|
#
|
||||||
|
# Signs all unsigned kernel modules using the DKMS MOK key.
|
||||||
|
#
|
||||||
|
# Default (install) mode:
|
||||||
|
# Run inside a chroot. Finds and signs unsigned modules in /lib/modules in-place.
|
||||||
|
# sign-file and MOK key are auto-detected from standard paths.
|
||||||
|
#
|
||||||
|
# Overlay mode (--source/--dest):
|
||||||
|
# Finds unsigned modules in <source>, copies to <dest>, signs the copies.
|
||||||
|
# Clears old signed modules in <dest> first. Used during upgrades where the
|
||||||
|
# overlay upper is tmpfs and writes would be lost.
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
SOURCE=""
|
||||||
|
DEST=""
|
||||||
|
SIGN_FILE=""
|
||||||
|
MOK_KEY="/var/lib/dkms/mok.key"
|
||||||
|
MOK_PUB="/var/lib/dkms/mok.pub"
|
||||||
|
|
||||||
|
while [[ $# -gt 0 ]]; do
|
||||||
|
case $1 in
|
||||||
|
--source) SOURCE="$2"; shift 2;;
|
||||||
|
--dest) DEST="$2"; shift 2;;
|
||||||
|
--sign-file) SIGN_FILE="$2"; shift 2;;
|
||||||
|
--mok-key) MOK_KEY="$2"; shift 2;;
|
||||||
|
--mok-pub) MOK_PUB="$2"; shift 2;;
|
||||||
|
*) echo "Unknown option: $1" >&2; exit 1;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
# Auto-detect sign-file if not specified
|
||||||
|
if [ -z "$SIGN_FILE" ]; then
|
||||||
|
SIGN_FILE="$(ls -1 /usr/lib/linux-kbuild-*/scripts/sign-file 2>/dev/null | head -1)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$SIGN_FILE" ] || [ ! -x "$SIGN_FILE" ]; then
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -f "$MOK_KEY" ] || [ ! -f "$MOK_PUB" ]; then
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
COUNT=0
|
||||||
|
|
||||||
|
if [ -n "$SOURCE" ] && [ -n "$DEST" ]; then
|
||||||
|
# Overlay mode: find unsigned in source, copy to dest, sign in dest
|
||||||
|
rm -rf "${DEST}"/lib/modules
|
||||||
|
|
||||||
|
for ko in $(find "${SOURCE}"/lib/modules -name '*.ko' 2>/dev/null); do
|
||||||
|
if ! modinfo "$ko" 2>/dev/null | grep -q '^sig_id:'; then
|
||||||
|
rel_path="${ko#${SOURCE}}"
|
||||||
|
mkdir -p "${DEST}$(dirname "$rel_path")"
|
||||||
|
cp "$ko" "${DEST}${rel_path}"
|
||||||
|
"$SIGN_FILE" sha256 "$MOK_KEY" "$MOK_PUB" "${DEST}${rel_path}"
|
||||||
|
COUNT=$((COUNT + 1))
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
else
|
||||||
|
# In-place mode: sign modules directly
|
||||||
|
for ko in $(find /lib/modules -name '*.ko' 2>/dev/null); do
|
||||||
|
if ! modinfo "$ko" 2>/dev/null | grep -q '^sig_id:'; then
|
||||||
|
"$SIGN_FILE" sha256 "$MOK_KEY" "$MOK_PUB" "$ko"
|
||||||
|
COUNT=$((COUNT + 1))
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ $COUNT -gt 0 ]; then
|
||||||
|
echo "[sign-modules] Signed $COUNT unsigned kernel modules"
|
||||||
|
fi
|
||||||
@@ -104,6 +104,7 @@ local_mount_root()
|
|||||||
-olowerdir=/startos/config/overlay:/lower,upperdir=/upper/data,workdir=/upper/work \
|
-olowerdir=/startos/config/overlay:/lower,upperdir=/upper/data,workdir=/upper/work \
|
||||||
overlay ${rootmnt}
|
overlay ${rootmnt}
|
||||||
|
|
||||||
|
mkdir -m 750 -p ${rootmnt}/media/startos
|
||||||
mkdir -p ${rootmnt}/media/startos/config
|
mkdir -p ${rootmnt}/media/startos/config
|
||||||
mount --bind /startos/config ${rootmnt}/media/startos/config
|
mount --bind /startos/config ${rootmnt}/media/startos/config
|
||||||
mkdir -p ${rootmnt}/media/startos/images
|
mkdir -p ${rootmnt}/media/startos/images
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ fi
|
|||||||
|
|
||||||
unsquashfs -f -d / $1 boot
|
unsquashfs -f -d / $1 boot
|
||||||
|
|
||||||
umount -R /media/startos/next 2> /dev/null || true
|
umount -l /media/startos/next 2> /dev/null || true
|
||||||
umount /media/startos/upper 2> /dev/null || true
|
umount /media/startos/upper 2> /dev/null || true
|
||||||
umount /media/startos/lower 2> /dev/null || true
|
umount /media/startos/lower 2> /dev/null || true
|
||||||
|
|
||||||
@@ -45,18 +45,13 @@ mkdir -p /media/startos/next/media/startos/root
|
|||||||
mount --bind /run /media/startos/next/run
|
mount --bind /run /media/startos/next/run
|
||||||
mount --bind /tmp /media/startos/next/tmp
|
mount --bind /tmp /media/startos/next/tmp
|
||||||
mount --bind /dev /media/startos/next/dev
|
mount --bind /dev /media/startos/next/dev
|
||||||
mount --bind /sys /media/startos/next/sys
|
mount -t sysfs sysfs /media/startos/next/sys
|
||||||
mount --bind /proc /media/startos/next/proc
|
mount -t proc proc /media/startos/next/proc
|
||||||
mount --bind /boot /media/startos/next/boot
|
mount --rbind /boot /media/startos/next/boot
|
||||||
mount --bind /media/startos/root /media/startos/next/media/startos/root
|
mount --bind /media/startos/root /media/startos/next/media/startos/root
|
||||||
|
|
||||||
if mountpoint /boot/efi 2>&1 > /dev/null; then
|
|
||||||
mkdir -p /media/startos/next/boot/efi
|
|
||||||
mount --bind /boot/efi /media/startos/next/boot/efi
|
|
||||||
fi
|
|
||||||
|
|
||||||
if mountpoint /sys/firmware/efi/efivars 2>&1 > /dev/null; then
|
if mountpoint /sys/firmware/efi/efivars 2>&1 > /dev/null; then
|
||||||
mount --bind /sys/firmware/efi/efivars /media/startos/next/sys/firmware/efi/efivars
|
mount -t efivarfs efivarfs /media/startos/next/sys/firmware/efi/efivars
|
||||||
fi
|
fi
|
||||||
|
|
||||||
chroot /media/startos/next bash -e << "EOF"
|
chroot /media/startos/next bash -e << "EOF"
|
||||||
@@ -68,9 +63,18 @@ fi
|
|||||||
|
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
|
# Sign unsigned kernel modules for Secure Boot
|
||||||
|
SIGN_FILE="$(ls -1 /media/startos/next/usr/lib/linux-kbuild-*/scripts/sign-file 2>/dev/null | head -1)"
|
||||||
|
/media/startos/next/usr/lib/startos/scripts/sign-unsigned-modules \
|
||||||
|
--source /media/startos/lower \
|
||||||
|
--dest /media/startos/config/overlay \
|
||||||
|
--sign-file "$SIGN_FILE" \
|
||||||
|
--mok-key /media/startos/config/overlay/var/lib/dkms/mok.key \
|
||||||
|
--mok-pub /media/startos/config/overlay/var/lib/dkms/mok.pub
|
||||||
|
|
||||||
sync
|
sync
|
||||||
|
|
||||||
umount -Rl /media/startos/next
|
umount -l /media/startos/next
|
||||||
umount /media/startos/upper
|
umount /media/startos/upper
|
||||||
umount /media/startos/lower
|
umount /media/startos/lower
|
||||||
|
|
||||||
|
|||||||
|
Before Width: | Height: | Size: 9.6 KiB After Width: | Height: | Size: 9.6 KiB |
367
build/manage-release.sh
Executable file
@@ -0,0 +1,367 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
REPO="Start9Labs/start-os"
|
||||||
|
REGISTRY="https://alpha-registry-x.start9.com"
|
||||||
|
S3_BUCKET="s3://startos-images"
|
||||||
|
S3_CDN="https://startos-images.nyc3.cdn.digitaloceanspaces.com"
|
||||||
|
START9_GPG_KEY="2D63C217"
|
||||||
|
|
||||||
|
ARCHES="aarch64 aarch64-nonfree aarch64-nvidia riscv64 riscv64-nonfree x86_64 x86_64-nonfree x86_64-nvidia"
|
||||||
|
CLI_ARCHES="aarch64 riscv64 x86_64"
|
||||||
|
|
||||||
|
parse_run_id() {
|
||||||
|
local val="$1"
|
||||||
|
if [[ "$val" =~ /actions/runs/([0-9]+) ]]; then
|
||||||
|
echo "${BASH_REMATCH[1]}"
|
||||||
|
else
|
||||||
|
echo "$val"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
require_version() {
|
||||||
|
if [ -z "${VERSION:-}" ]; then
|
||||||
|
read -rp "VERSION: " VERSION
|
||||||
|
if [ -z "$VERSION" ]; then
|
||||||
|
>&2 echo '$VERSION required'
|
||||||
|
exit 2
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
release_dir() {
|
||||||
|
echo "$HOME/Downloads/v$VERSION"
|
||||||
|
}
|
||||||
|
|
||||||
|
ensure_release_dir() {
|
||||||
|
local dir
|
||||||
|
dir=$(release_dir)
|
||||||
|
if [ "$CLEAN" = "1" ]; then
|
||||||
|
rm -rf "$dir"
|
||||||
|
fi
|
||||||
|
mkdir -p "$dir"
|
||||||
|
cd "$dir"
|
||||||
|
}
|
||||||
|
|
||||||
|
enter_release_dir() {
|
||||||
|
local dir
|
||||||
|
dir=$(release_dir)
|
||||||
|
if [ ! -d "$dir" ]; then
|
||||||
|
>&2 echo "Release directory $dir does not exist. Run 'download' or 'pull' first."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
cd "$dir"
|
||||||
|
}
|
||||||
|
|
||||||
|
cli_target_for() {
|
||||||
|
local arch=$1 os=$2
|
||||||
|
local pair="${arch}-${os}"
|
||||||
|
if [ "$pair" = "riscv64-linux" ]; then
|
||||||
|
echo "riscv64gc-unknown-linux-musl"
|
||||||
|
elif [ "$pair" = "riscv64-macos" ]; then
|
||||||
|
return 1
|
||||||
|
elif [ "$os" = "linux" ]; then
|
||||||
|
echo "${arch}-unknown-linux-musl"
|
||||||
|
elif [ "$os" = "macos" ]; then
|
||||||
|
echo "${arch}-apple-darwin"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
release_files() {
|
||||||
|
for file in *.iso *.squashfs *.deb; do
|
||||||
|
[ -f "$file" ] && echo "$file"
|
||||||
|
done
|
||||||
|
for file in start-cli_*; do
|
||||||
|
[[ "$file" == *.asc ]] && continue
|
||||||
|
[ -f "$file" ] && echo "$file"
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
resolve_gh_user() {
|
||||||
|
GH_USER=${GH_USER:-$(gh api user -q .login 2>/dev/null || true)}
|
||||||
|
GH_GPG_KEY=$(git config user.signingkey 2>/dev/null || true)
|
||||||
|
}
|
||||||
|
|
||||||
|
# --- Subcommands ---
|
||||||
|
|
||||||
|
cmd_download() {
|
||||||
|
require_version
|
||||||
|
|
||||||
|
if [ -z "${RUN_ID:-}" ]; then
|
||||||
|
read -rp "RUN_ID (OS images, leave blank to skip): " RUN_ID
|
||||||
|
fi
|
||||||
|
RUN_ID=$(parse_run_id "${RUN_ID:-}")
|
||||||
|
|
||||||
|
if [ -z "${ST_RUN_ID:-}" ]; then
|
||||||
|
read -rp "ST_RUN_ID (start-tunnel, leave blank to skip): " ST_RUN_ID
|
||||||
|
fi
|
||||||
|
ST_RUN_ID=$(parse_run_id "${ST_RUN_ID:-}")
|
||||||
|
|
||||||
|
if [ -z "${CLI_RUN_ID:-}" ]; then
|
||||||
|
read -rp "CLI_RUN_ID (start-cli, leave blank to skip): " CLI_RUN_ID
|
||||||
|
fi
|
||||||
|
CLI_RUN_ID=$(parse_run_id "${CLI_RUN_ID:-}")
|
||||||
|
|
||||||
|
ensure_release_dir
|
||||||
|
|
||||||
|
if [ -n "$RUN_ID" ]; then
|
||||||
|
for arch in $ARCHES; do
|
||||||
|
while ! gh run download -R $REPO "$RUN_ID" -n "$arch.squashfs" -D "$(pwd)"; do sleep 1; done
|
||||||
|
done
|
||||||
|
for arch in $ARCHES; do
|
||||||
|
while ! gh run download -R $REPO "$RUN_ID" -n "$arch.iso" -D "$(pwd)"; do sleep 1; done
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$ST_RUN_ID" ]; then
|
||||||
|
for arch in $CLI_ARCHES; do
|
||||||
|
while ! gh run download -R $REPO "$ST_RUN_ID" -n "start-tunnel_$arch.deb" -D "$(pwd)"; do sleep 1; done
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -n "$CLI_RUN_ID" ]; then
|
||||||
|
for arch in $CLI_ARCHES; do
|
||||||
|
for os in linux macos; do
|
||||||
|
local target
|
||||||
|
target=$(cli_target_for "$arch" "$os") || continue
|
||||||
|
while ! gh run download -R $REPO "$CLI_RUN_ID" -n "start-cli_$target" -D "$(pwd)"; do sleep 1; done
|
||||||
|
mv start-cli "start-cli_${arch}-${os}"
|
||||||
|
done
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd_pull() {
|
||||||
|
require_version
|
||||||
|
ensure_release_dir
|
||||||
|
|
||||||
|
echo "Downloading release assets from tag v$VERSION..."
|
||||||
|
|
||||||
|
# Download debs and CLI binaries from the GH release
|
||||||
|
for file in $(gh release view -R $REPO "v$VERSION" --json assets -q '.assets[].name' | grep -E '\.(deb)$|^start-cli_'); do
|
||||||
|
gh release download -R $REPO "v$VERSION" -p "$file" -D "$(pwd)" --clobber
|
||||||
|
done
|
||||||
|
|
||||||
|
# Download ISOs and squashfs from S3 CDN
|
||||||
|
for arch in $ARCHES; do
|
||||||
|
for ext in squashfs iso; do
|
||||||
|
# Get the actual filename from the GH release asset list or body
|
||||||
|
local filename
|
||||||
|
filename=$(gh release view -R $REPO "v$VERSION" --json assets -q ".assets[].name" | grep "_${arch}\\.${ext}$" || true)
|
||||||
|
if [ -z "$filename" ]; then
|
||||||
|
filename=$(gh release view -R $REPO "v$VERSION" --json body -q .body | grep -oP "[^ ]*_${arch}\\.${ext}" | head -1 || true)
|
||||||
|
fi
|
||||||
|
if [ -n "$filename" ]; then
|
||||||
|
echo "Downloading $filename from S3..."
|
||||||
|
curl -fSL -o "$filename" "$S3_CDN/v$VERSION/$filename"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd_register() {
|
||||||
|
require_version
|
||||||
|
enter_release_dir
|
||||||
|
start-cli --registry=$REGISTRY registry os version add "$VERSION" "v$VERSION" '' ">=0.3.5 <=$VERSION"
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd_upload() {
|
||||||
|
require_version
|
||||||
|
enter_release_dir
|
||||||
|
|
||||||
|
for file in $(release_files); do
|
||||||
|
case "$file" in
|
||||||
|
*.iso|*.squashfs)
|
||||||
|
s3cmd put -P "$file" "$S3_BUCKET/v$VERSION/$file"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
gh release upload -R $REPO "v$VERSION" "$file"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd_index() {
|
||||||
|
require_version
|
||||||
|
enter_release_dir
|
||||||
|
|
||||||
|
for arch in $ARCHES; do
|
||||||
|
for file in *_"$arch".squashfs *_"$arch".iso; do
|
||||||
|
start-cli --registry=$REGISTRY registry os asset add --platform="$arch" --version="$VERSION" "$file" "$S3_CDN/v$VERSION/$file"
|
||||||
|
done
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd_sign() {
|
||||||
|
require_version
|
||||||
|
enter_release_dir
|
||||||
|
resolve_gh_user
|
||||||
|
|
||||||
|
mkdir -p signatures
|
||||||
|
|
||||||
|
for file in $(release_files); do
|
||||||
|
gpg -u $START9_GPG_KEY --detach-sign --armor -o "signatures/${file}.start9.asc" "$file"
|
||||||
|
if [ -n "$GH_USER" ] && [ -n "$GH_GPG_KEY" ]; then
|
||||||
|
gpg -u "$GH_GPG_KEY" --detach-sign --armor -o "signatures/${file}.${GH_USER}.asc" "$file"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
gpg --export -a $START9_GPG_KEY > signatures/start9.key.asc
|
||||||
|
if [ -n "$GH_USER" ] && [ -n "$GH_GPG_KEY" ]; then
|
||||||
|
gpg --export -a "$GH_GPG_KEY" > "signatures/${GH_USER}.key.asc"
|
||||||
|
else
|
||||||
|
>&2 echo 'Warning: could not determine GitHub user or GPG signing key, skipping personal signature'
|
||||||
|
fi
|
||||||
|
tar -czvf signatures.tar.gz -C signatures .
|
||||||
|
|
||||||
|
gh release upload -R $REPO "v$VERSION" signatures.tar.gz --clobber
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd_cosign() {
|
||||||
|
require_version
|
||||||
|
enter_release_dir
|
||||||
|
resolve_gh_user
|
||||||
|
|
||||||
|
if [ -z "$GH_USER" ] || [ -z "$GH_GPG_KEY" ]; then
|
||||||
|
>&2 echo 'Error: could not determine GitHub user or GPG signing key'
|
||||||
|
>&2 echo "Set GH_USER and/or configure git user.signingkey"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Downloading existing signatures..."
|
||||||
|
gh release download -R $REPO "v$VERSION" -p "signatures.tar.gz" -D "$(pwd)" --clobber
|
||||||
|
mkdir -p signatures
|
||||||
|
tar -xzf signatures.tar.gz -C signatures
|
||||||
|
|
||||||
|
echo "Adding personal signatures as $GH_USER..."
|
||||||
|
for file in $(release_files); do
|
||||||
|
gpg -u "$GH_GPG_KEY" --detach-sign --armor -o "signatures/${file}.${GH_USER}.asc" "$file"
|
||||||
|
done
|
||||||
|
|
||||||
|
gpg --export -a "$GH_GPG_KEY" > "signatures/${GH_USER}.key.asc"
|
||||||
|
|
||||||
|
echo "Re-packing signatures..."
|
||||||
|
tar -czvf signatures.tar.gz -C signatures .
|
||||||
|
|
||||||
|
gh release upload -R $REPO "v$VERSION" signatures.tar.gz --clobber
|
||||||
|
echo "Done. Personal signatures for $GH_USER added to v$VERSION."
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd_notes() {
|
||||||
|
require_version
|
||||||
|
enter_release_dir
|
||||||
|
|
||||||
|
cat << EOF
|
||||||
|
# ISO Downloads
|
||||||
|
|
||||||
|
- [x86_64/AMD64]($S3_CDN/v$VERSION/$(ls *_x86_64-nonfree.iso))
|
||||||
|
- [x86_64/AMD64 + NVIDIA]($S3_CDN/v$VERSION/$(ls *_x86_64-nvidia.iso))
|
||||||
|
- [x86_64/AMD64-slim (FOSS-only)]($S3_CDN/v$VERSION/$(ls *_x86_64.iso) "Without proprietary software or drivers")
|
||||||
|
- [aarch64/ARM64]($S3_CDN/v$VERSION/$(ls *_aarch64-nonfree.iso))
|
||||||
|
- [aarch64/ARM64 + NVIDIA]($S3_CDN/v$VERSION/$(ls *_aarch64-nvidia.iso))
|
||||||
|
- [aarch64/ARM64-slim (FOSS-Only)]($S3_CDN/v$VERSION/$(ls *_aarch64.iso) "Without proprietary software or drivers")
|
||||||
|
- [RISCV64 (RVA23)]($S3_CDN/v$VERSION/$(ls *_riscv64-nonfree.iso))
|
||||||
|
- [RISCV64 (RVA23)-slim (FOSS-only)]($S3_CDN/v$VERSION/$(ls *_riscv64.iso) "Without proprietary software or drivers")
|
||||||
|
|
||||||
|
EOF
|
||||||
|
cat << 'EOF'
|
||||||
|
# StartOS Checksums
|
||||||
|
|
||||||
|
## SHA-256
|
||||||
|
```
|
||||||
|
EOF
|
||||||
|
sha256sum *.iso *.squashfs
|
||||||
|
cat << 'EOF'
|
||||||
|
```
|
||||||
|
|
||||||
|
## BLAKE-3
|
||||||
|
```
|
||||||
|
EOF
|
||||||
|
b3sum *.iso *.squashfs
|
||||||
|
cat << 'EOF'
|
||||||
|
```
|
||||||
|
|
||||||
|
# Start-Tunnel Checksums
|
||||||
|
|
||||||
|
## SHA-256
|
||||||
|
```
|
||||||
|
EOF
|
||||||
|
sha256sum start-tunnel*.deb
|
||||||
|
cat << 'EOF'
|
||||||
|
```
|
||||||
|
|
||||||
|
## BLAKE-3
|
||||||
|
```
|
||||||
|
EOF
|
||||||
|
b3sum start-tunnel*.deb
|
||||||
|
cat << 'EOF'
|
||||||
|
```
|
||||||
|
|
||||||
|
# start-cli Checksums
|
||||||
|
|
||||||
|
## SHA-256
|
||||||
|
```
|
||||||
|
EOF
|
||||||
|
release_files | grep '^start-cli_' | xargs sha256sum
|
||||||
|
cat << 'EOF'
|
||||||
|
```
|
||||||
|
|
||||||
|
## BLAKE-3
|
||||||
|
```
|
||||||
|
EOF
|
||||||
|
release_files | grep '^start-cli_' | xargs b3sum
|
||||||
|
cat << 'EOF'
|
||||||
|
```
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd_full_release() {
|
||||||
|
cmd_download
|
||||||
|
cmd_register
|
||||||
|
cmd_upload
|
||||||
|
cmd_index
|
||||||
|
cmd_sign
|
||||||
|
cmd_notes
|
||||||
|
}
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat << 'EOF'
|
||||||
|
Usage: manage-release.sh <subcommand>
|
||||||
|
|
||||||
|
Subcommands:
|
||||||
|
download Download artifacts from GitHub Actions runs
|
||||||
|
Requires: RUN_ID, ST_RUN_ID, CLI_RUN_ID (any combination)
|
||||||
|
pull Download an existing release from the GH tag and S3
|
||||||
|
register Register the version in the Start9 registry
|
||||||
|
upload Upload artifacts to GitHub Releases and S3
|
||||||
|
index Add assets to the registry index
|
||||||
|
sign Sign all artifacts with Start9 org key (+ personal key if available)
|
||||||
|
and upload signatures.tar.gz
|
||||||
|
cosign Add personal GPG signature to an existing release's signatures
|
||||||
|
(requires 'pull' first so you can verify assets before signing)
|
||||||
|
notes Print release notes with download links and checksums
|
||||||
|
full-release Run: download → register → upload → index → sign → notes
|
||||||
|
|
||||||
|
Environment variables:
|
||||||
|
VERSION (required) Release version
|
||||||
|
RUN_ID GitHub Actions run ID for OS images (download subcommand)
|
||||||
|
ST_RUN_ID GitHub Actions run ID for start-tunnel (download subcommand)
|
||||||
|
CLI_RUN_ID GitHub Actions run ID for start-cli (download subcommand)
|
||||||
|
GH_USER Override GitHub username (default: autodetected via gh cli)
|
||||||
|
CLEAN Set to 1 to wipe and recreate the release directory
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
case "${1:-}" in
|
||||||
|
download) cmd_download ;;
|
||||||
|
pull) cmd_pull ;;
|
||||||
|
register) cmd_register ;;
|
||||||
|
upload) cmd_upload ;;
|
||||||
|
index) cmd_index ;;
|
||||||
|
sign) cmd_sign ;;
|
||||||
|
cosign) cmd_cosign ;;
|
||||||
|
notes) cmd_notes ;;
|
||||||
|
full-release) cmd_full_release ;;
|
||||||
|
*) usage; exit 1 ;;
|
||||||
|
esac
|
||||||
@@ -1,87 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
function partition_for () {
|
|
||||||
if [[ "$1" =~ [0-9]+$ ]]; then
|
|
||||||
echo "$1p$2"
|
|
||||||
else
|
|
||||||
echo "$1$2"
|
|
||||||
fi
|
|
||||||
}
|
|
||||||
|
|
||||||
VERSION=$(cat VERSION.txt)
|
|
||||||
ENVIRONMENT=$(cat ENVIRONMENT.txt)
|
|
||||||
GIT_HASH=$(cat GIT_HASH.txt | head -c 7)
|
|
||||||
DATE=$(date +%Y%m%d)
|
|
||||||
|
|
||||||
ROOT_PART_END=7217792
|
|
||||||
|
|
||||||
VERSION_FULL="$VERSION-$GIT_HASH"
|
|
||||||
|
|
||||||
if [ -n "$ENVIRONMENT" ]; then
|
|
||||||
VERSION_FULL="$VERSION_FULL~$ENVIRONMENT"
|
|
||||||
fi
|
|
||||||
|
|
||||||
TARGET_NAME=startos-${VERSION_FULL}-${DATE}_raspberrypi.img
|
|
||||||
TARGET_SIZE=$[($ROOT_PART_END+1)*512]
|
|
||||||
|
|
||||||
rm -f $TARGET_NAME
|
|
||||||
truncate -s $TARGET_SIZE $TARGET_NAME
|
|
||||||
(
|
|
||||||
echo o
|
|
||||||
echo x
|
|
||||||
echo i
|
|
||||||
echo "0xcb15ae4d"
|
|
||||||
echo r
|
|
||||||
echo n
|
|
||||||
echo p
|
|
||||||
echo 1
|
|
||||||
echo 2048
|
|
||||||
echo 526335
|
|
||||||
echo t
|
|
||||||
echo c
|
|
||||||
echo n
|
|
||||||
echo p
|
|
||||||
echo 2
|
|
||||||
echo 526336
|
|
||||||
echo $ROOT_PART_END
|
|
||||||
echo a
|
|
||||||
echo 1
|
|
||||||
echo w
|
|
||||||
) | fdisk $TARGET_NAME
|
|
||||||
OUTPUT_DEVICE=$(sudo losetup --show -fP $TARGET_NAME)
|
|
||||||
sudo mkfs.ext4 `partition_for ${OUTPUT_DEVICE} 2`
|
|
||||||
sudo mkfs.vfat `partition_for ${OUTPUT_DEVICE} 1`
|
|
||||||
|
|
||||||
TMPDIR=$(mktemp -d)
|
|
||||||
|
|
||||||
sudo mount `partition_for ${OUTPUT_DEVICE} 2` $TMPDIR
|
|
||||||
sudo mkdir $TMPDIR/boot
|
|
||||||
sudo mount `partition_for ${OUTPUT_DEVICE} 1` $TMPDIR/boot
|
|
||||||
sudo unsquashfs -f -d $TMPDIR startos.raspberrypi.squashfs
|
|
||||||
REAL_GIT_HASH=$(cat $TMPDIR/usr/lib/startos/GIT_HASH.txt)
|
|
||||||
REAL_VERSION=$(cat $TMPDIR/usr/lib/startos/VERSION.txt)
|
|
||||||
REAL_ENVIRONMENT=$(cat $TMPDIR/usr/lib/startos/ENVIRONMENT.txt)
|
|
||||||
sudo sed -i 's| boot=startos| init=/usr/lib/startos/scripts/init_resize\.sh|' $TMPDIR/boot/cmdline.txt
|
|
||||||
sudo cp ./build/raspberrypi/fstab $TMPDIR/etc/
|
|
||||||
sudo cp ./build/raspberrypi/init_resize.sh $TMPDIR/usr/lib/startos/scripts/init_resize.sh
|
|
||||||
sudo umount $TMPDIR/boot
|
|
||||||
sudo umount $TMPDIR
|
|
||||||
sudo losetup -d $OUTPUT_DEVICE
|
|
||||||
|
|
||||||
if [ "$ALLOW_VERSION_MISMATCH" != 1 ]; then
|
|
||||||
if [ "$(cat GIT_HASH.txt)" != "$REAL_GIT_HASH" ]; then
|
|
||||||
>&2 echo "startos.raspberrypi.squashfs GIT_HASH.txt mismatch"
|
|
||||||
>&2 echo "expected $REAL_GIT_HASH (dpkg) found $(cat GIT_HASH.txt) (repo)"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
if [ "$(cat VERSION.txt)" != "$REAL_VERSION" ]; then
|
|
||||||
>&2 echo "startos.raspberrypi.squashfs VERSION.txt mismatch"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
if [ "$(cat ENVIRONMENT.txt)" != "$REAL_ENVIRONMENT" ]; then
|
|
||||||
>&2 echo "startos.raspberrypi.squashfs ENVIRONMENT.txt mismatch"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
33
build/save-migration-images.sh
Executable file
@@ -0,0 +1,33 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Save Docker images needed by the 0.3.6-alpha.0 migration as tarballs
|
||||||
|
# so they can be bundled into the OS and loaded without internet access.
|
||||||
|
set -e
|
||||||
|
|
||||||
|
ARCH="${ARCH:-x86_64}"
|
||||||
|
DESTDIR="${1:-build/lib/migration-images}"
|
||||||
|
|
||||||
|
if [ "$ARCH" = "x86_64" ]; then
|
||||||
|
DOCKER_PLATFORM="linux/amd64"
|
||||||
|
elif [ "$ARCH" = "aarch64" ]; then
|
||||||
|
DOCKER_PLATFORM="linux/arm64"
|
||||||
|
else
|
||||||
|
DOCKER_PLATFORM="linux/$ARCH"
|
||||||
|
fi
|
||||||
|
|
||||||
|
IMAGES=("start9/compat:latest" "start9/utils:latest" "tonistiigi/binfmt:latest")
|
||||||
|
|
||||||
|
mkdir -p "$DESTDIR"
|
||||||
|
|
||||||
|
for IMAGE in "${IMAGES[@]}"; do
|
||||||
|
FILENAME=$(echo "$IMAGE" | sed 's|/|_|g; s/:/_/g').tar
|
||||||
|
if [ -f "$DESTDIR/$FILENAME" ]; then
|
||||||
|
echo "Skipping $IMAGE (already saved)"
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
echo "Pulling $IMAGE for $DOCKER_PLATFORM..."
|
||||||
|
docker pull --platform "$DOCKER_PLATFORM" "$IMAGE"
|
||||||
|
echo "Saving $IMAGE to $DESTDIR/$FILENAME..."
|
||||||
|
docker save "$IMAGE" -o "$DESTDIR/$FILENAME"
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "Migration images saved to $DESTDIR"
|
||||||
@@ -1,142 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
|
|
||||||
if [ -z "$VERSION" ]; then
|
|
||||||
>&2 echo '$VERSION required'
|
|
||||||
exit 2
|
|
||||||
fi
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
if [ "$SKIP_DL" != "1" ]; then
|
|
||||||
if [ "$SKIP_CLEAN" != "1" ]; then
|
|
||||||
rm -rf ~/Downloads/v$VERSION
|
|
||||||
mkdir ~/Downloads/v$VERSION
|
|
||||||
cd ~/Downloads/v$VERSION
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -n "$RUN_ID" ]; then
|
|
||||||
for arch in aarch64 aarch64-nonfree riscv64 x86_64 x86_64-nonfree raspberrypi; do
|
|
||||||
while ! gh run download -R Start9Labs/start-os $RUN_ID -n $arch.squashfs -D $(pwd); do sleep 1; done
|
|
||||||
done
|
|
||||||
for arch in aarch64 aarch64-nonfree riscv64 x86_64 x86_64-nonfree; do
|
|
||||||
while ! gh run download -R Start9Labs/start-os $RUN_ID -n $arch.iso -D $(pwd); do sleep 1; done
|
|
||||||
done
|
|
||||||
while ! gh run download -R Start9Labs/start-os $RUN_ID -n raspberrypi.img -D $(pwd); do sleep 1; done
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -n "$ST_RUN_ID" ]; then
|
|
||||||
for arch in aarch64 riscv64 x86_64; do
|
|
||||||
while ! gh run download -R Start9Labs/start-os $ST_RUN_ID -n start-tunnel_$arch.deb -D $(pwd); do sleep 1; done
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ -n "$CLI_RUN_ID" ]; then
|
|
||||||
for arch in aarch64 riscv64 x86_64; do
|
|
||||||
for os in linux macos; do
|
|
||||||
pair=${arch}-${os}
|
|
||||||
if [ "${pair}" = "riscv64-linux" ]; then
|
|
||||||
target=riscv64gc-unknown-linux-musl
|
|
||||||
elif [ "${pair}" = "riscv64-macos" ]; then
|
|
||||||
continue
|
|
||||||
elif [ "${os}" = "linux" ]; then
|
|
||||||
target="${arch}-unknown-linux-musl"
|
|
||||||
elif [ "${os}" = "macos" ]; then
|
|
||||||
target="${arch}-apple-darwin"
|
|
||||||
fi
|
|
||||||
while ! gh run download -R Start9Labs/start-os $CLI_RUN_ID -n start-cli_$target -D $(pwd); do sleep 1; done
|
|
||||||
mv start-cli "start-cli_${pair}"
|
|
||||||
done
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
cd ~/Downloads/v$VERSION
|
|
||||||
fi
|
|
||||||
|
|
||||||
start-cli --registry=https://alpha-registry-x.start9.com registry os version add $VERSION "v$VERSION" '' ">=0.3.5 <=$VERSION"
|
|
||||||
|
|
||||||
if [ "$SKIP_UL" = "2" ]; then
|
|
||||||
exit 2
|
|
||||||
elif [ "$SKIP_UL" != "1" ]; then
|
|
||||||
for file in *.squashfs *.iso *.deb start-cli_*; do
|
|
||||||
gh release upload -R Start9Labs/start-os v$VERSION $file
|
|
||||||
done
|
|
||||||
for file in *.img; do
|
|
||||||
if ! [ -f $file.gz ]; then
|
|
||||||
cat $file | pigz > $file.gz
|
|
||||||
fi
|
|
||||||
gh release upload -R Start9Labs/start-os v$VERSION $file.gz
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
if [ "$SKIP_INDEX" != "1" ]; then
|
|
||||||
for arch in aarch64 aarch64-nonfree x86_64 x86_64-nonfree; do
|
|
||||||
for file in *_$arch.squashfs *_$arch.iso; do
|
|
||||||
start-cli --registry=https://alpha-registry-x.start9.com registry os asset add --platform=$arch --version=$VERSION $file https://github.com/Start9Labs/start-os/releases/download/v$VERSION/$(echo -n "$file" | sed 's/~/./g')
|
|
||||||
done
|
|
||||||
done
|
|
||||||
for arch in raspberrypi; do
|
|
||||||
for file in *_$arch.squashfs; do
|
|
||||||
start-cli --registry=https://alpha-registry-x.start9.com registry os asset add --platform=$arch --version=$VERSION $file https://github.com/Start9Labs/start-os/releases/download/v$VERSION/$(echo -n "$file" | sed 's/~/./g')
|
|
||||||
done
|
|
||||||
done
|
|
||||||
fi
|
|
||||||
|
|
||||||
for file in *.iso *.img *.img.gz *.squashfs *.deb start-cli_*; do
|
|
||||||
gpg -u 7CFFDA41CA66056A --detach-sign --armor -o "${file}.asc" "$file"
|
|
||||||
done
|
|
||||||
|
|
||||||
gpg --export -a 7CFFDA41CA66056A > dr-bonez.key.asc
|
|
||||||
tar -czvf signatures.tar.gz *.asc
|
|
||||||
|
|
||||||
gh release upload -R Start9Labs/start-os v$VERSION signatures.tar.gz
|
|
||||||
|
|
||||||
cat << 'EOF'
|
|
||||||
# StartOS Checksums
|
|
||||||
|
|
||||||
## SHA-256
|
|
||||||
```
|
|
||||||
EOF
|
|
||||||
sha256sum *.iso *.img *img.gz *.squashfs
|
|
||||||
cat << 'EOF'
|
|
||||||
```
|
|
||||||
|
|
||||||
## BLAKE-3
|
|
||||||
```
|
|
||||||
EOF
|
|
||||||
b3sum *.iso *.img *.img.gz *.squashfs
|
|
||||||
cat << 'EOF'
|
|
||||||
```
|
|
||||||
|
|
||||||
# Start-Tunnel Checksums
|
|
||||||
|
|
||||||
## SHA-256
|
|
||||||
```
|
|
||||||
EOF
|
|
||||||
sha256sum start-tunnel*.deb
|
|
||||||
cat << 'EOF'
|
|
||||||
```
|
|
||||||
|
|
||||||
## BLAKE-3
|
|
||||||
```
|
|
||||||
EOF
|
|
||||||
b3sum start-tunnel*.deb
|
|
||||||
cat << 'EOF'
|
|
||||||
```
|
|
||||||
|
|
||||||
# start-cli Checksums
|
|
||||||
|
|
||||||
## SHA-256
|
|
||||||
```
|
|
||||||
EOF
|
|
||||||
sha256sum start-cli_*
|
|
||||||
cat << 'EOF'
|
|
||||||
```
|
|
||||||
|
|
||||||
## BLAKE-3
|
|
||||||
```
|
|
||||||
EOF
|
|
||||||
b3sum start-cli_*
|
|
||||||
cat << 'EOF'
|
|
||||||
```
|
|
||||||
EOF
|
|
||||||
|
|
||||||
32
container-runtime/CLAUDE.md
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
# Container Runtime — Node.js Service Manager
|
||||||
|
|
||||||
|
Node.js runtime that manages service containers via JSON-RPC. See `RPCSpec.md` in this directory for the full RPC protocol.
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
```
|
||||||
|
LXC Container (uniform base for all services)
|
||||||
|
└── systemd
|
||||||
|
└── container-runtime.service
|
||||||
|
└── Loads /usr/lib/startos/package/index.js (from s9pk javascript.squashfs)
|
||||||
|
└── Package JS launches subcontainers (from images in s9pk)
|
||||||
|
```
|
||||||
|
|
||||||
|
The container runtime communicates with the host via JSON-RPC over Unix socket. Package JavaScript must export functions conforming to the `ABI` type defined in `sdk/base/lib/types.ts`.
|
||||||
|
|
||||||
|
## `/media/startos/` Directory (mounted by host into container)
|
||||||
|
|
||||||
|
| Path | Description |
|
||||||
|
| -------------------- | ----------------------------------------------------- |
|
||||||
|
| `volumes/<name>/` | Package data volumes (id-mapped, persistent) |
|
||||||
|
| `assets/` | Read-only assets from s9pk `assets.squashfs` |
|
||||||
|
| `images/<name>/` | Container images (squashfs, used for subcontainers) |
|
||||||
|
| `images/<name>.env` | Environment variables for image |
|
||||||
|
| `images/<name>.json` | Image metadata |
|
||||||
|
| `backup/` | Backup mount point (mounted during backup operations) |
|
||||||
|
| `rpc/service.sock` | RPC socket (container runtime listens here) |
|
||||||
|
| `rpc/host.sock` | Host RPC socket (for effects callbacks to host) |
|
||||||
|
|
||||||
|
## S9PK Structure
|
||||||
|
|
||||||
|
See `../core/s9pk-structure.md` for the S9PK package format.
|
||||||
@@ -1,16 +1,21 @@
|
|||||||
# Container RPC SERVER Specification
|
# Container RPC Server Specification
|
||||||
|
|
||||||
|
The container runtime exposes a JSON-RPC server over a Unix socket at `/media/startos/rpc/service.sock`.
|
||||||
|
|
||||||
## Methods
|
## Methods
|
||||||
|
|
||||||
### init
|
### init
|
||||||
|
|
||||||
initialize runtime (mount `/proc`, `/sys`, `/dev`, and `/run` to each image in `/media/images`)
|
Initialize the runtime and system.
|
||||||
|
|
||||||
called after os has mounted js and images to the container
|
#### params
|
||||||
|
|
||||||
#### args
|
```ts
|
||||||
|
{
|
||||||
`[]`
|
id: string,
|
||||||
|
kind: "install" | "update" | "restore" | null,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
#### response
|
#### response
|
||||||
|
|
||||||
@@ -18,11 +23,16 @@ called after os has mounted js and images to the container
|
|||||||
|
|
||||||
### exit
|
### exit
|
||||||
|
|
||||||
shutdown runtime
|
Shutdown runtime and optionally run exit hooks for a target version.
|
||||||
|
|
||||||
#### args
|
#### params
|
||||||
|
|
||||||
`[]`
|
```ts
|
||||||
|
{
|
||||||
|
id: string,
|
||||||
|
target: string | null, // ExtendedVersion or VersionRange
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
#### response
|
#### response
|
||||||
|
|
||||||
@@ -30,11 +40,11 @@ shutdown runtime
|
|||||||
|
|
||||||
### start
|
### start
|
||||||
|
|
||||||
run main method if not already running
|
Run main method if not already running.
|
||||||
|
|
||||||
#### args
|
#### params
|
||||||
|
|
||||||
`[]`
|
None
|
||||||
|
|
||||||
#### response
|
#### response
|
||||||
|
|
||||||
@@ -42,11 +52,11 @@ run main method if not already running
|
|||||||
|
|
||||||
### stop
|
### stop
|
||||||
|
|
||||||
stop main method by sending SIGTERM to child processes, and SIGKILL after timeout
|
Stop main method by sending SIGTERM to child processes, and SIGKILL after timeout.
|
||||||
|
|
||||||
#### args
|
#### params
|
||||||
|
|
||||||
`{ timeout: millis }`
|
None
|
||||||
|
|
||||||
#### response
|
#### response
|
||||||
|
|
||||||
@@ -54,15 +64,16 @@ stop main method by sending SIGTERM to child processes, and SIGKILL after timeou
|
|||||||
|
|
||||||
### execute
|
### execute
|
||||||
|
|
||||||
run a specific package procedure
|
Run a specific package procedure.
|
||||||
|
|
||||||
#### args
|
#### params
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
{
|
{
|
||||||
procedure: JsonPath,
|
id: string, // event ID
|
||||||
input: any,
|
procedure: string, // JSON path (e.g., "/backup/create", "/actions/{name}/run")
|
||||||
timeout: millis,
|
input: any,
|
||||||
|
timeout: number | null,
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -72,18 +83,64 @@ run a specific package procedure
|
|||||||
|
|
||||||
### sandbox
|
### sandbox
|
||||||
|
|
||||||
run a specific package procedure in sandbox mode
|
Run a specific package procedure in sandbox mode. Same interface as `execute`.
|
||||||
|
|
||||||
#### args
|
UNIMPLEMENTED: this feature is planned but does not exist
|
||||||
|
|
||||||
|
#### params
|
||||||
|
|
||||||
```ts
|
```ts
|
||||||
{
|
{
|
||||||
procedure: JsonPath,
|
id: string,
|
||||||
input: any,
|
procedure: string,
|
||||||
timeout: millis,
|
input: any,
|
||||||
|
timeout: number | null,
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
#### response
|
#### response
|
||||||
|
|
||||||
`any`
|
`any`
|
||||||
|
|
||||||
|
### callback
|
||||||
|
|
||||||
|
Handle a callback from an effect.
|
||||||
|
|
||||||
|
#### params
|
||||||
|
|
||||||
|
```ts
|
||||||
|
{
|
||||||
|
id: number,
|
||||||
|
args: any[],
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### response
|
||||||
|
|
||||||
|
`null` (no response sent)
|
||||||
|
|
||||||
|
### eval
|
||||||
|
|
||||||
|
Evaluate a script in the runtime context. Used for debugging.
|
||||||
|
|
||||||
|
#### params
|
||||||
|
|
||||||
|
```ts
|
||||||
|
{
|
||||||
|
script: string,
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
#### response
|
||||||
|
|
||||||
|
`any`
|
||||||
|
|
||||||
|
## Procedures
|
||||||
|
|
||||||
|
The `execute` and `sandbox` methods route to procedures based on the `procedure` path:
|
||||||
|
|
||||||
|
| Procedure | Description |
|
||||||
|
| -------------------------- | ---------------------------- |
|
||||||
|
| `/backup/create` | Create a backup |
|
||||||
|
| `/actions/{name}/getInput` | Get input spec for an action |
|
||||||
|
| `/actions/{name}/run` | Run an action with input |
|
||||||
|
|||||||
30
container-runtime/__mocks__/mime.js
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
// Mock for ESM-only mime package — Jest's module loader doesn't support require(esm)
|
||||||
|
const types = {
|
||||||
|
".png": "image/png",
|
||||||
|
".jpg": "image/jpeg",
|
||||||
|
".jpeg": "image/jpeg",
|
||||||
|
".gif": "image/gif",
|
||||||
|
".svg": "image/svg+xml",
|
||||||
|
".webp": "image/webp",
|
||||||
|
".ico": "image/x-icon",
|
||||||
|
".json": "application/json",
|
||||||
|
".js": "application/javascript",
|
||||||
|
".html": "text/html",
|
||||||
|
".css": "text/css",
|
||||||
|
".txt": "text/plain",
|
||||||
|
".md": "text/markdown",
|
||||||
|
}
|
||||||
|
|
||||||
|
module.exports = {
|
||||||
|
default: {
|
||||||
|
getType(path) {
|
||||||
|
const ext = "." + path.split(".").pop()
|
||||||
|
return types[ext] || null
|
||||||
|
},
|
||||||
|
getExtension(type) {
|
||||||
|
const entry = Object.entries(types).find(([, v]) => v === type)
|
||||||
|
return entry ? entry[0].slice(1) : null
|
||||||
|
},
|
||||||
|
},
|
||||||
|
__esModule: true,
|
||||||
|
}
|
||||||
@@ -5,7 +5,7 @@ OnFailure=container-runtime-failure.service
|
|||||||
[Service]
|
[Service]
|
||||||
Type=simple
|
Type=simple
|
||||||
Environment=RUST_LOG=startos=debug
|
Environment=RUST_LOG=startos=debug
|
||||||
ExecStart=/usr/bin/node --experimental-detect-module --trace-warnings --unhandled-rejections=warn /usr/lib/startos/init/index.js
|
ExecStart=/usr/bin/start-container pipe-wrap /usr/bin/node --experimental-detect-module --trace-warnings /usr/lib/startos/init/index.js
|
||||||
Restart=no
|
Restart=no
|
||||||
|
|
||||||
[Install]
|
[Install]
|
||||||
|
|||||||
@@ -5,4 +5,7 @@ module.exports = {
|
|||||||
testEnvironment: "node",
|
testEnvironment: "node",
|
||||||
rootDir: "./src/",
|
rootDir: "./src/",
|
||||||
modulePathIgnorePatterns: ["./dist/"],
|
modulePathIgnorePatterns: ["./dist/"],
|
||||||
|
moduleNameMapper: {
|
||||||
|
"^mime$": "<rootDir>/../__mocks__/mime.js",
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|||||||
15
container-runtime/package-lock.json
generated
@@ -19,7 +19,6 @@
|
|||||||
"lodash.merge": "^4.6.2",
|
"lodash.merge": "^4.6.2",
|
||||||
"mime": "^4.0.7",
|
"mime": "^4.0.7",
|
||||||
"node-fetch": "^3.1.0",
|
"node-fetch": "^3.1.0",
|
||||||
"ts-matches": "^6.3.2",
|
|
||||||
"tslib": "^2.5.3",
|
"tslib": "^2.5.3",
|
||||||
"typescript": "^5.1.3",
|
"typescript": "^5.1.3",
|
||||||
"yaml": "^2.3.1"
|
"yaml": "^2.3.1"
|
||||||
@@ -38,7 +37,7 @@
|
|||||||
},
|
},
|
||||||
"../sdk/dist": {
|
"../sdk/dist": {
|
||||||
"name": "@start9labs/start-sdk",
|
"name": "@start9labs/start-sdk",
|
||||||
"version": "0.4.0-beta.47",
|
"version": "0.4.0-beta.66",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@iarna/toml": "^3.0.0",
|
"@iarna/toml": "^3.0.0",
|
||||||
@@ -46,11 +45,13 @@
|
|||||||
"@noble/hashes": "^1.7.2",
|
"@noble/hashes": "^1.7.2",
|
||||||
"@types/ini": "^4.1.1",
|
"@types/ini": "^4.1.1",
|
||||||
"deep-equality-data-structures": "^2.0.0",
|
"deep-equality-data-structures": "^2.0.0",
|
||||||
|
"fast-xml-parser": "^5.5.6",
|
||||||
"ini": "^5.0.0",
|
"ini": "^5.0.0",
|
||||||
"isomorphic-fetch": "^3.0.0",
|
"isomorphic-fetch": "^3.0.0",
|
||||||
"mime": "^4.0.7",
|
"mime": "^4.0.7",
|
||||||
"ts-matches": "^6.3.2",
|
"yaml": "^2.7.1",
|
||||||
"yaml": "^2.7.1"
|
"zod": "^4.3.6",
|
||||||
|
"zod-deep-partial": "^1.2.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/jest": "^29.4.0",
|
"@types/jest": "^29.4.0",
|
||||||
@@ -6494,12 +6495,6 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/ts-matches": {
|
|
||||||
"version": "6.3.2",
|
|
||||||
"resolved": "https://registry.npmjs.org/ts-matches/-/ts-matches-6.3.2.tgz",
|
|
||||||
"integrity": "sha512-UhSgJymF8cLd4y0vV29qlKVCkQpUtekAaujXbQVc729FezS8HwqzepqvtjzQ3HboatIqN/Idor85O2RMwT7lIQ==",
|
|
||||||
"license": "MIT"
|
|
||||||
},
|
|
||||||
"node_modules/tslib": {
|
"node_modules/tslib": {
|
||||||
"version": "2.8.1",
|
"version": "2.8.1",
|
||||||
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
|
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
|
||||||
|
|||||||
@@ -28,7 +28,6 @@
|
|||||||
"lodash.merge": "^4.6.2",
|
"lodash.merge": "^4.6.2",
|
||||||
"mime": "^4.0.7",
|
"mime": "^4.0.7",
|
||||||
"node-fetch": "^3.1.0",
|
"node-fetch": "^3.1.0",
|
||||||
"ts-matches": "^6.3.2",
|
|
||||||
"tslib": "^2.5.3",
|
"tslib": "^2.5.3",
|
||||||
"typescript": "^5.1.3",
|
"typescript": "^5.1.3",
|
||||||
"yaml": "^2.3.1"
|
"yaml": "^2.3.1"
|
||||||
|
|||||||
@@ -3,33 +3,39 @@ import {
|
|||||||
types as T,
|
types as T,
|
||||||
utils,
|
utils,
|
||||||
VersionRange,
|
VersionRange,
|
||||||
|
z,
|
||||||
} from "@start9labs/start-sdk"
|
} from "@start9labs/start-sdk"
|
||||||
import * as net from "net"
|
import * as net from "net"
|
||||||
import { object, string, number, literals, some, unknown } from "ts-matches"
|
|
||||||
import { Effects } from "../Models/Effects"
|
import { Effects } from "../Models/Effects"
|
||||||
|
|
||||||
import { CallbackHolder } from "../Models/CallbackHolder"
|
import { CallbackHolder } from "../Models/CallbackHolder"
|
||||||
import { asError } from "@start9labs/start-sdk/base/lib/util"
|
import { asError } from "@start9labs/start-sdk/base/lib/util"
|
||||||
const matchRpcError = object({
|
const matchRpcError = z.object({
|
||||||
error: object({
|
error: z.object({
|
||||||
code: number,
|
code: z.number(),
|
||||||
message: string,
|
message: z.string(),
|
||||||
data: some(
|
data: z
|
||||||
string,
|
.union([
|
||||||
object({
|
z.string(),
|
||||||
details: string,
|
z.object({
|
||||||
debug: string.nullable().optional(),
|
details: z.string(),
|
||||||
}),
|
debug: z.string().nullable().optional(),
|
||||||
)
|
}),
|
||||||
|
])
|
||||||
.nullable()
|
.nullable()
|
||||||
.optional(),
|
.optional(),
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
const testRpcError = matchRpcError.test
|
function testRpcError(v: unknown): v is RpcError {
|
||||||
const testRpcResult = object({
|
return matchRpcError.safeParse(v).success
|
||||||
result: unknown,
|
}
|
||||||
}).test
|
const matchRpcResult = z.object({
|
||||||
type RpcError = typeof matchRpcError._TYPE
|
result: z.unknown(),
|
||||||
|
})
|
||||||
|
function testRpcResult(v: unknown): v is z.infer<typeof matchRpcResult> {
|
||||||
|
return matchRpcResult.safeParse(v).success
|
||||||
|
}
|
||||||
|
type RpcError = z.infer<typeof matchRpcError>
|
||||||
|
|
||||||
const SOCKET_PATH = "/media/startos/rpc/host.sock"
|
const SOCKET_PATH = "/media/startos/rpc/host.sock"
|
||||||
let hostSystemId = 0
|
let hostSystemId = 0
|
||||||
@@ -71,7 +77,7 @@ const rpcRoundFor =
|
|||||||
"Error in host RPC:",
|
"Error in host RPC:",
|
||||||
utils.asError({ method, params, error: res.error }),
|
utils.asError({ method, params, error: res.error }),
|
||||||
)
|
)
|
||||||
if (string.test(res.error.data)) {
|
if (typeof res.error.data === "string") {
|
||||||
message += ": " + res.error.data
|
message += ": " + res.error.data
|
||||||
console.error(`Details: ${res.error.data}`)
|
console.error(`Details: ${res.error.data}`)
|
||||||
} else {
|
} else {
|
||||||
@@ -181,9 +187,10 @@ export function makeEffects(context: EffectContext): Effects {
|
|||||||
getServiceManifest(
|
getServiceManifest(
|
||||||
...[options]: Parameters<T.Effects["getServiceManifest"]>
|
...[options]: Parameters<T.Effects["getServiceManifest"]>
|
||||||
) {
|
) {
|
||||||
return rpcRound("get-service-manifest", options) as ReturnType<
|
return rpcRound("get-service-manifest", {
|
||||||
T.Effects["getServiceManifest"]
|
...options,
|
||||||
>
|
callback: context.callbacks?.addCallback(options.callback) || null,
|
||||||
|
}) as ReturnType<T.Effects["getServiceManifest"]>
|
||||||
},
|
},
|
||||||
subcontainer: {
|
subcontainer: {
|
||||||
createFs(options: { imageId: string; name: string }) {
|
createFs(options: { imageId: string; name: string }) {
|
||||||
@@ -205,9 +212,10 @@ export function makeEffects(context: EffectContext): Effects {
|
|||||||
>
|
>
|
||||||
}) as Effects["exportServiceInterface"],
|
}) as Effects["exportServiceInterface"],
|
||||||
getContainerIp(...[options]: Parameters<T.Effects["getContainerIp"]>) {
|
getContainerIp(...[options]: Parameters<T.Effects["getContainerIp"]>) {
|
||||||
return rpcRound("get-container-ip", options) as ReturnType<
|
return rpcRound("get-container-ip", {
|
||||||
T.Effects["getContainerIp"]
|
...options,
|
||||||
>
|
callback: context.callbacks?.addCallback(options.callback) || null,
|
||||||
|
}) as ReturnType<T.Effects["getContainerIp"]>
|
||||||
},
|
},
|
||||||
getOsIp(...[]: Parameters<T.Effects["getOsIp"]>) {
|
getOsIp(...[]: Parameters<T.Effects["getOsIp"]>) {
|
||||||
return rpcRound("get-os-ip", {}) as ReturnType<T.Effects["getOsIp"]>
|
return rpcRound("get-os-ip", {}) as ReturnType<T.Effects["getOsIp"]>
|
||||||
@@ -238,9 +246,10 @@ export function makeEffects(context: EffectContext): Effects {
|
|||||||
>
|
>
|
||||||
},
|
},
|
||||||
getSslCertificate(options: Parameters<T.Effects["getSslCertificate"]>[0]) {
|
getSslCertificate(options: Parameters<T.Effects["getSslCertificate"]>[0]) {
|
||||||
return rpcRound("get-ssl-certificate", options) as ReturnType<
|
return rpcRound("get-ssl-certificate", {
|
||||||
T.Effects["getSslCertificate"]
|
...options,
|
||||||
>
|
callback: context.callbacks?.addCallback(options.callback) || null,
|
||||||
|
}) as ReturnType<T.Effects["getSslCertificate"]>
|
||||||
},
|
},
|
||||||
getSslKey(options: Parameters<T.Effects["getSslKey"]>[0]) {
|
getSslKey(options: Parameters<T.Effects["getSslKey"]>[0]) {
|
||||||
return rpcRound("get-ssl-key", options) as ReturnType<
|
return rpcRound("get-ssl-key", options) as ReturnType<
|
||||||
@@ -253,6 +262,14 @@ export function makeEffects(context: EffectContext): Effects {
|
|||||||
callback: context.callbacks?.addCallback(options.callback) || null,
|
callback: context.callbacks?.addCallback(options.callback) || null,
|
||||||
}) as ReturnType<T.Effects["getSystemSmtp"]>
|
}) as ReturnType<T.Effects["getSystemSmtp"]>
|
||||||
},
|
},
|
||||||
|
getOutboundGateway(
|
||||||
|
...[options]: Parameters<T.Effects["getOutboundGateway"]>
|
||||||
|
) {
|
||||||
|
return rpcRound("get-outbound-gateway", {
|
||||||
|
...options,
|
||||||
|
callback: context.callbacks?.addCallback(options.callback) || null,
|
||||||
|
}) as ReturnType<T.Effects["getOutboundGateway"]>
|
||||||
|
},
|
||||||
listServiceInterfaces(
|
listServiceInterfaces(
|
||||||
...[options]: Parameters<T.Effects["listServiceInterfaces"]>
|
...[options]: Parameters<T.Effects["listServiceInterfaces"]>
|
||||||
) {
|
) {
|
||||||
@@ -294,7 +311,10 @@ export function makeEffects(context: EffectContext): Effects {
|
|||||||
},
|
},
|
||||||
|
|
||||||
getStatus(...[o]: Parameters<T.Effects["getStatus"]>) {
|
getStatus(...[o]: Parameters<T.Effects["getStatus"]>) {
|
||||||
return rpcRound("get-status", o) as ReturnType<T.Effects["getStatus"]>
|
return rpcRound("get-status", {
|
||||||
|
...o,
|
||||||
|
callback: context.callbacks?.addCallback(o.callback) || null,
|
||||||
|
}) as ReturnType<T.Effects["getStatus"]>
|
||||||
},
|
},
|
||||||
/// DEPRECATED
|
/// DEPRECATED
|
||||||
setMainStatus(o: { status: "running" | "stopped" }): Promise<null> {
|
setMainStatus(o: { status: "running" | "stopped" }): Promise<null> {
|
||||||
@@ -316,9 +336,35 @@ export function makeEffects(context: EffectContext): Effects {
|
|||||||
T.Effects["setDataVersion"]
|
T.Effects["setDataVersion"]
|
||||||
>
|
>
|
||||||
},
|
},
|
||||||
|
plugin: {
|
||||||
|
url: {
|
||||||
|
register(
|
||||||
|
...[options]: Parameters<T.Effects["plugin"]["url"]["register"]>
|
||||||
|
) {
|
||||||
|
return rpcRound("plugin.url.register", options) as ReturnType<
|
||||||
|
T.Effects["plugin"]["url"]["register"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
exportUrl(
|
||||||
|
...[options]: Parameters<T.Effects["plugin"]["url"]["exportUrl"]>
|
||||||
|
) {
|
||||||
|
return rpcRound("plugin.url.export-url", options) as ReturnType<
|
||||||
|
T.Effects["plugin"]["url"]["exportUrl"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
clearUrls(
|
||||||
|
...[options]: Parameters<T.Effects["plugin"]["url"]["clearUrls"]>
|
||||||
|
) {
|
||||||
|
return rpcRound("plugin.url.clear-urls", options) as ReturnType<
|
||||||
|
T.Effects["plugin"]["url"]["clearUrls"]
|
||||||
|
>
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
if (context.callbacks?.onLeaveContext)
|
if (context.callbacks?.onLeaveContext)
|
||||||
self.onLeaveContext(() => {
|
self.onLeaveContext(() => {
|
||||||
|
self.constRetry = undefined
|
||||||
self.isInContext = false
|
self.isInContext = false
|
||||||
self.onLeaveContext = () => {
|
self.onLeaveContext = () => {
|
||||||
console.warn(
|
console.warn(
|
||||||
|
|||||||
@@ -1,25 +1,13 @@
|
|||||||
// @ts-check
|
// @ts-check
|
||||||
|
|
||||||
import * as net from "net"
|
import * as net from "net"
|
||||||
import {
|
|
||||||
object,
|
|
||||||
some,
|
|
||||||
string,
|
|
||||||
literal,
|
|
||||||
array,
|
|
||||||
number,
|
|
||||||
matches,
|
|
||||||
any,
|
|
||||||
shape,
|
|
||||||
anyOf,
|
|
||||||
literals,
|
|
||||||
} from "ts-matches"
|
|
||||||
|
|
||||||
import {
|
import {
|
||||||
ExtendedVersion,
|
ExtendedVersion,
|
||||||
types as T,
|
types as T,
|
||||||
utils,
|
utils,
|
||||||
VersionRange,
|
VersionRange,
|
||||||
|
z,
|
||||||
} from "@start9labs/start-sdk"
|
} from "@start9labs/start-sdk"
|
||||||
import * as fs from "fs"
|
import * as fs from "fs"
|
||||||
|
|
||||||
@@ -29,89 +17,92 @@ import { jsonPath, unNestPath } from "../Models/JsonPath"
|
|||||||
import { System } from "../Interfaces/System"
|
import { System } from "../Interfaces/System"
|
||||||
import { makeEffects } from "./EffectCreator"
|
import { makeEffects } from "./EffectCreator"
|
||||||
type MaybePromise<T> = T | Promise<T>
|
type MaybePromise<T> = T | Promise<T>
|
||||||
export const matchRpcResult = anyOf(
|
export const matchRpcResult = z.union([
|
||||||
object({ result: any }),
|
z.object({ result: z.any() }),
|
||||||
object({
|
z.object({
|
||||||
error: object({
|
error: z.object({
|
||||||
code: number,
|
code: z.number(),
|
||||||
message: string,
|
message: z.string(),
|
||||||
data: object({
|
data: z
|
||||||
details: string.optional(),
|
.object({
|
||||||
debug: any.optional(),
|
details: z.string().optional(),
|
||||||
})
|
debug: z.any().optional(),
|
||||||
|
})
|
||||||
.nullable()
|
.nullable()
|
||||||
.optional(),
|
.optional(),
|
||||||
}),
|
}),
|
||||||
}),
|
}),
|
||||||
)
|
])
|
||||||
|
|
||||||
export type RpcResult = typeof matchRpcResult._TYPE
|
export type RpcResult = z.infer<typeof matchRpcResult>
|
||||||
type SocketResponse = ({ jsonrpc: "2.0"; id: IdType } & RpcResult) | null
|
type SocketResponse = ({ jsonrpc: "2.0"; id: IdType } & RpcResult) | null
|
||||||
|
|
||||||
const SOCKET_PARENT = "/media/startos/rpc"
|
const SOCKET_PARENT = "/media/startos/rpc"
|
||||||
const SOCKET_PATH = "/media/startos/rpc/service.sock"
|
const SOCKET_PATH = "/media/startos/rpc/service.sock"
|
||||||
const jsonrpc = "2.0" as const
|
const jsonrpc = "2.0" as const
|
||||||
|
|
||||||
const isResult = object({ result: any }).test
|
const isResultSchema = z.object({ result: z.any() })
|
||||||
|
const isResult = (v: unknown): v is z.infer<typeof isResultSchema> =>
|
||||||
|
isResultSchema.safeParse(v).success
|
||||||
|
|
||||||
const idType = some(string, number, literal(null))
|
const idType = z.union([z.string(), z.number(), z.literal(null)])
|
||||||
type IdType = null | string | number | undefined
|
type IdType = null | string | number | undefined
|
||||||
const runType = object({
|
const runType = z.object({
|
||||||
id: idType.optional(),
|
id: idType.optional(),
|
||||||
method: literal("execute"),
|
method: z.literal("execute"),
|
||||||
params: object({
|
params: z.object({
|
||||||
id: string,
|
id: z.string(),
|
||||||
procedure: string,
|
procedure: z.string(),
|
||||||
input: any,
|
input: z.any(),
|
||||||
timeout: number.nullable().optional(),
|
timeout: z.number().nullable().optional(),
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
const sandboxRunType = object({
|
const sandboxRunType = z.object({
|
||||||
id: idType.optional(),
|
id: idType.optional(),
|
||||||
method: literal("sandbox"),
|
method: z.literal("sandbox"),
|
||||||
params: object({
|
params: z.object({
|
||||||
id: string,
|
id: z.string(),
|
||||||
procedure: string,
|
procedure: z.string(),
|
||||||
input: any,
|
input: z.any(),
|
||||||
timeout: number.nullable().optional(),
|
timeout: z.number().nullable().optional(),
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
const callbackType = object({
|
const callbackType = z.object({
|
||||||
method: literal("callback"),
|
method: z.literal("callback"),
|
||||||
params: object({
|
params: z.object({
|
||||||
id: number,
|
id: z.number(),
|
||||||
args: array,
|
args: z.array(z.unknown()),
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
const initType = object({
|
const initType = z.object({
|
||||||
id: idType.optional(),
|
id: idType.optional(),
|
||||||
method: literal("init"),
|
method: z.literal("init"),
|
||||||
params: object({
|
params: z.object({
|
||||||
id: string,
|
id: z.string(),
|
||||||
kind: literals("install", "update", "restore").nullable(),
|
kind: z.enum(["install", "update", "restore"]).nullable(),
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
const startType = object({
|
const startType = z.object({
|
||||||
id: idType.optional(),
|
id: idType.optional(),
|
||||||
method: literal("start"),
|
method: z.literal("start"),
|
||||||
})
|
})
|
||||||
const stopType = object({
|
const stopType = z.object({
|
||||||
id: idType.optional(),
|
id: idType.optional(),
|
||||||
method: literal("stop"),
|
method: z.literal("stop"),
|
||||||
})
|
})
|
||||||
const exitType = object({
|
const exitType = z.object({
|
||||||
id: idType.optional(),
|
id: idType.optional(),
|
||||||
method: literal("exit"),
|
method: z.literal("exit"),
|
||||||
params: object({
|
params: z.object({
|
||||||
id: string,
|
id: z.string(),
|
||||||
target: string.nullable(),
|
target: z.string().nullable(),
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
const evalType = object({
|
const evalType = z.object({
|
||||||
id: idType.optional(),
|
id: idType.optional(),
|
||||||
method: literal("eval"),
|
method: z.literal("eval"),
|
||||||
params: object({
|
params: z.object({
|
||||||
script: string,
|
script: z.string(),
|
||||||
}),
|
}),
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -144,7 +135,9 @@ const handleRpc = (id: IdType, result: Promise<RpcResult>) =>
|
|||||||
},
|
},
|
||||||
}))
|
}))
|
||||||
|
|
||||||
const hasId = object({ id: idType }).test
|
const hasIdSchema = z.object({ id: idType })
|
||||||
|
const hasId = (v: unknown): v is z.infer<typeof hasIdSchema> =>
|
||||||
|
hasIdSchema.safeParse(v).success
|
||||||
export class RpcListener {
|
export class RpcListener {
|
||||||
shouldExit = false
|
shouldExit = false
|
||||||
unixSocketServer = net.createServer(async (server) => {})
|
unixSocketServer = net.createServer(async (server) => {})
|
||||||
@@ -246,40 +239,52 @@ export class RpcListener {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private dealWithInput(input: unknown): MaybePromise<SocketResponse> {
|
private dealWithInput(input: unknown): MaybePromise<SocketResponse> {
|
||||||
return matches(input)
|
const parsed = z.object({ method: z.string() }).safeParse(input)
|
||||||
.when(runType, async ({ id, params }) => {
|
if (!parsed.success) {
|
||||||
|
console.warn(
|
||||||
|
`Couldn't parse the following input ${JSON.stringify(input)}`,
|
||||||
|
)
|
||||||
|
return {
|
||||||
|
jsonrpc,
|
||||||
|
id: (input as any)?.id,
|
||||||
|
error: {
|
||||||
|
code: -32602,
|
||||||
|
message: "invalid params",
|
||||||
|
data: {
|
||||||
|
details: JSON.stringify(input),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
switch (parsed.data.method) {
|
||||||
|
case "execute": {
|
||||||
|
const { id, params } = runType.parse(input)
|
||||||
const system = this.system
|
const system = this.system
|
||||||
const procedure = jsonPath.unsafeCast(params.procedure)
|
const procedure = jsonPath.parse(params.procedure)
|
||||||
const { input, timeout, id: eventId } = params
|
const { input: inp, timeout, id: eventId } = params
|
||||||
const result = this.getResult(
|
const result = this.getResult(procedure, system, eventId, timeout, inp)
|
||||||
procedure,
|
|
||||||
system,
|
|
||||||
eventId,
|
|
||||||
timeout,
|
|
||||||
input,
|
|
||||||
)
|
|
||||||
|
|
||||||
return handleRpc(id, result)
|
return handleRpc(id, result)
|
||||||
})
|
}
|
||||||
.when(sandboxRunType, async ({ id, params }) => {
|
case "sandbox": {
|
||||||
|
const { id, params } = sandboxRunType.parse(input)
|
||||||
const system = this.system
|
const system = this.system
|
||||||
const procedure = jsonPath.unsafeCast(params.procedure)
|
const procedure = jsonPath.parse(params.procedure)
|
||||||
const { input, timeout, id: eventId } = params
|
const { input: inp, timeout, id: eventId } = params
|
||||||
const result = this.getResult(
|
const result = this.getResult(procedure, system, eventId, timeout, inp)
|
||||||
procedure,
|
|
||||||
system,
|
|
||||||
eventId,
|
|
||||||
timeout,
|
|
||||||
input,
|
|
||||||
)
|
|
||||||
|
|
||||||
return handleRpc(id, result)
|
return handleRpc(id, result)
|
||||||
})
|
}
|
||||||
.when(callbackType, async ({ params: { id, args } }) => {
|
case "callback": {
|
||||||
|
const {
|
||||||
|
params: { id, args },
|
||||||
|
} = callbackType.parse(input)
|
||||||
this.callCallback(id, args)
|
this.callCallback(id, args)
|
||||||
return null
|
return null
|
||||||
})
|
}
|
||||||
.when(startType, async ({ id }) => {
|
case "start": {
|
||||||
|
const { id } = startType.parse(input)
|
||||||
const callbacks =
|
const callbacks =
|
||||||
this.callbacks?.getChild("main") || this.callbacks?.child("main")
|
this.callbacks?.getChild("main") || this.callbacks?.child("main")
|
||||||
const effects = makeEffects({
|
const effects = makeEffects({
|
||||||
@@ -290,18 +295,17 @@ export class RpcListener {
|
|||||||
id,
|
id,
|
||||||
this.system.start(effects).then((result) => ({ result })),
|
this.system.start(effects).then((result) => ({ result })),
|
||||||
)
|
)
|
||||||
})
|
}
|
||||||
.when(stopType, async ({ id }) => {
|
case "stop": {
|
||||||
|
const { id } = stopType.parse(input)
|
||||||
|
this.callbacks?.removeChild("main")
|
||||||
return handleRpc(
|
return handleRpc(
|
||||||
id,
|
id,
|
||||||
this.system.stop().then((result) => {
|
this.system.stop().then((result) => ({ result })),
|
||||||
this.callbacks?.removeChild("main")
|
|
||||||
|
|
||||||
return { result }
|
|
||||||
}),
|
|
||||||
)
|
)
|
||||||
})
|
}
|
||||||
.when(exitType, async ({ id, params }) => {
|
case "exit": {
|
||||||
|
const { id, params } = exitType.parse(input)
|
||||||
return handleRpc(
|
return handleRpc(
|
||||||
id,
|
id,
|
||||||
(async () => {
|
(async () => {
|
||||||
@@ -323,8 +327,9 @@ export class RpcListener {
|
|||||||
}
|
}
|
||||||
})().then((result) => ({ result })),
|
})().then((result) => ({ result })),
|
||||||
)
|
)
|
||||||
})
|
}
|
||||||
.when(initType, async ({ id, params }) => {
|
case "init": {
|
||||||
|
const { id, params } = initType.parse(input)
|
||||||
return handleRpc(
|
return handleRpc(
|
||||||
id,
|
id,
|
||||||
(async () => {
|
(async () => {
|
||||||
@@ -349,8 +354,9 @@ export class RpcListener {
|
|||||||
}
|
}
|
||||||
})().then((result) => ({ result })),
|
})().then((result) => ({ result })),
|
||||||
)
|
)
|
||||||
})
|
}
|
||||||
.when(evalType, async ({ id, params }) => {
|
case "eval": {
|
||||||
|
const { id, params } = evalType.parse(input)
|
||||||
return handleRpc(
|
return handleRpc(
|
||||||
id,
|
id,
|
||||||
(async () => {
|
(async () => {
|
||||||
@@ -375,41 +381,28 @@ export class RpcListener {
|
|||||||
}
|
}
|
||||||
})(),
|
})(),
|
||||||
)
|
)
|
||||||
})
|
}
|
||||||
.when(
|
default: {
|
||||||
shape({ id: idType.optional(), method: string }),
|
const { id, method } = z
|
||||||
({ id, method }) => ({
|
.object({ id: idType.optional(), method: z.string() })
|
||||||
|
.passthrough()
|
||||||
|
.parse(input)
|
||||||
|
return {
|
||||||
jsonrpc,
|
jsonrpc,
|
||||||
id,
|
id,
|
||||||
error: {
|
error: {
|
||||||
code: -32601,
|
code: -32601,
|
||||||
message: `Method not found`,
|
message: "Method not found",
|
||||||
data: {
|
data: {
|
||||||
details: method,
|
details: method,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}),
|
|
||||||
)
|
|
||||||
|
|
||||||
.defaultToLazy(() => {
|
|
||||||
console.warn(
|
|
||||||
`Couldn't parse the following input ${JSON.stringify(input)}`,
|
|
||||||
)
|
|
||||||
return {
|
|
||||||
jsonrpc,
|
|
||||||
id: (input as any)?.id,
|
|
||||||
error: {
|
|
||||||
code: -32602,
|
|
||||||
message: "invalid params",
|
|
||||||
data: {
|
|
||||||
details: JSON.stringify(input),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
private getResult(
|
private getResult(
|
||||||
procedure: typeof jsonPath._TYPE,
|
procedure: z.infer<typeof jsonPath>,
|
||||||
system: System,
|
system: System,
|
||||||
eventId: string,
|
eventId: string,
|
||||||
timeout: number | null | undefined,
|
timeout: number | null | undefined,
|
||||||
@@ -437,6 +430,7 @@ export class RpcListener {
|
|||||||
return system.getActionInput(
|
return system.getActionInput(
|
||||||
effects,
|
effects,
|
||||||
procedures[2],
|
procedures[2],
|
||||||
|
input?.prefill ?? null,
|
||||||
timeout || null,
|
timeout || null,
|
||||||
)
|
)
|
||||||
case procedures[1] === "actions" && procedures[3] === "run":
|
case procedures[1] === "actions" && procedures[3] === "run":
|
||||||
@@ -448,26 +442,18 @@ export class RpcListener {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
})().then(ensureResultTypeShape, (error) =>
|
})().then(ensureResultTypeShape, (error) => {
|
||||||
matches(error)
|
const errorSchema = z.object({
|
||||||
.when(
|
error: z.string(),
|
||||||
object({
|
code: z.number().default(0),
|
||||||
error: string,
|
})
|
||||||
code: number.defaultTo(0),
|
const parsed = errorSchema.safeParse(error)
|
||||||
}),
|
if (parsed.success) {
|
||||||
(error) => ({
|
return {
|
||||||
error: {
|
error: { code: parsed.data.code, message: parsed.data.error },
|
||||||
code: error.code,
|
}
|
||||||
message: error.error,
|
}
|
||||||
},
|
return { error: { code: 0, message: String(error) } }
|
||||||
}),
|
})
|
||||||
)
|
|
||||||
.defaultToLazy(() => ({
|
|
||||||
error: {
|
|
||||||
code: 0,
|
|
||||||
message: String(error),
|
|
||||||
},
|
|
||||||
})),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import * as fs from "fs/promises"
|
|||||||
import * as cp from "child_process"
|
import * as cp from "child_process"
|
||||||
import { SubContainer, types as T } from "@start9labs/start-sdk"
|
import { SubContainer, types as T } from "@start9labs/start-sdk"
|
||||||
import { promisify } from "util"
|
import { promisify } from "util"
|
||||||
import { DockerProcedure, VolumeId } from "../../../Models/DockerProcedure"
|
import { DockerProcedure } from "../../../Models/DockerProcedure"
|
||||||
import { Volume } from "./matchVolume"
|
import { Volume } from "./matchVolume"
|
||||||
import {
|
import {
|
||||||
CommandOptions,
|
CommandOptions,
|
||||||
@@ -28,7 +28,7 @@ export class DockerProcedureContainer extends Drop {
|
|||||||
effects: T.Effects,
|
effects: T.Effects,
|
||||||
packageId: string,
|
packageId: string,
|
||||||
data: DockerProcedure,
|
data: DockerProcedure,
|
||||||
volumes: { [id: VolumeId]: Volume },
|
volumes: { [id: string]: Volume },
|
||||||
name: string,
|
name: string,
|
||||||
options: { subcontainer?: SubContainer<SDKManifest> } = {},
|
options: { subcontainer?: SubContainer<SDKManifest> } = {},
|
||||||
) {
|
) {
|
||||||
@@ -47,7 +47,7 @@ export class DockerProcedureContainer extends Drop {
|
|||||||
effects: T.Effects,
|
effects: T.Effects,
|
||||||
packageId: string,
|
packageId: string,
|
||||||
data: DockerProcedure,
|
data: DockerProcedure,
|
||||||
volumes: { [id: VolumeId]: Volume },
|
volumes: { [id: string]: Volume },
|
||||||
name: string,
|
name: string,
|
||||||
) {
|
) {
|
||||||
const subcontainer = await SubContainerOwned.of(
|
const subcontainer = await SubContainerOwned.of(
|
||||||
@@ -64,7 +64,7 @@ export class DockerProcedureContainer extends Drop {
|
|||||||
? `${subcontainer.rootfs}${mounts[mount]}`
|
? `${subcontainer.rootfs}${mounts[mount]}`
|
||||||
: `${subcontainer.rootfs}/${mounts[mount]}`
|
: `${subcontainer.rootfs}/${mounts[mount]}`
|
||||||
await fs.mkdir(path, { recursive: true })
|
await fs.mkdir(path, { recursive: true })
|
||||||
const volumeMount = volumes[mount]
|
const volumeMount: Volume = volumes[mount]
|
||||||
if (volumeMount.type === "data") {
|
if (volumeMount.type === "data") {
|
||||||
await subcontainer.mount(
|
await subcontainer.mount(
|
||||||
Mounts.of().mountVolume({
|
Mounts.of().mountVolume({
|
||||||
@@ -82,18 +82,15 @@ export class DockerProcedureContainer extends Drop {
|
|||||||
}),
|
}),
|
||||||
)
|
)
|
||||||
} else if (volumeMount.type === "certificate") {
|
} else if (volumeMount.type === "certificate") {
|
||||||
|
const hostInfo = await effects.getHostInfo({
|
||||||
|
hostId: volumeMount["interface-id"],
|
||||||
|
})
|
||||||
const hostnames = [
|
const hostnames = [
|
||||||
`${packageId}.embassy`,
|
`${packageId}.embassy`,
|
||||||
...new Set(
|
...new Set(
|
||||||
Object.values(
|
Object.values(hostInfo?.bindings || {})
|
||||||
(
|
.flatMap((b) => b.addresses.available)
|
||||||
await effects.getHostInfo({
|
.map((h) => h.hostname),
|
||||||
hostId: volumeMount["interface-id"],
|
|
||||||
})
|
|
||||||
)?.hostnameInfo || {},
|
|
||||||
)
|
|
||||||
.flatMap((h) => h)
|
|
||||||
.flatMap((h) => (h.kind === "onion" ? [h.hostname.value] : [])),
|
|
||||||
).values(),
|
).values(),
|
||||||
]
|
]
|
||||||
const certChain = await effects.getSslCertificate({
|
const certChain = await effects.getSslCertificate({
|
||||||
|
|||||||
@@ -15,26 +15,11 @@ import { System } from "../../../Interfaces/System"
|
|||||||
import { matchManifest, Manifest } from "./matchManifest"
|
import { matchManifest, Manifest } from "./matchManifest"
|
||||||
import * as childProcess from "node:child_process"
|
import * as childProcess from "node:child_process"
|
||||||
import { DockerProcedureContainer } from "./DockerProcedureContainer"
|
import { DockerProcedureContainer } from "./DockerProcedureContainer"
|
||||||
|
import { DockerProcedure } from "../../../Models/DockerProcedure"
|
||||||
import { promisify } from "node:util"
|
import { promisify } from "node:util"
|
||||||
import * as U from "./oldEmbassyTypes"
|
import * as U from "./oldEmbassyTypes"
|
||||||
import { MainLoop } from "./MainLoop"
|
import { MainLoop } from "./MainLoop"
|
||||||
import {
|
import { z } from "@start9labs/start-sdk"
|
||||||
matches,
|
|
||||||
boolean,
|
|
||||||
dictionary,
|
|
||||||
literal,
|
|
||||||
literals,
|
|
||||||
object,
|
|
||||||
string,
|
|
||||||
unknown,
|
|
||||||
any,
|
|
||||||
tuple,
|
|
||||||
number,
|
|
||||||
anyOf,
|
|
||||||
deferred,
|
|
||||||
Parser,
|
|
||||||
array,
|
|
||||||
} from "ts-matches"
|
|
||||||
import { AddSslOptions } from "@start9labs/start-sdk/base/lib/osBindings"
|
import { AddSslOptions } from "@start9labs/start-sdk/base/lib/osBindings"
|
||||||
import {
|
import {
|
||||||
BindOptionsByProtocol,
|
BindOptionsByProtocol,
|
||||||
@@ -57,6 +42,83 @@ function todo(): never {
|
|||||||
throw new Error("Not implemented")
|
throw new Error("Not implemented")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function getStatus(
|
||||||
|
effects: Effects,
|
||||||
|
options: Omit<Parameters<Effects["getStatus"]>[0], "callback"> = {},
|
||||||
|
) {
|
||||||
|
async function* watch(abort?: AbortSignal) {
|
||||||
|
const resolveCell = { resolve: () => {} }
|
||||||
|
effects.onLeaveContext(() => {
|
||||||
|
resolveCell.resolve()
|
||||||
|
})
|
||||||
|
abort?.addEventListener("abort", () => resolveCell.resolve())
|
||||||
|
while (effects.isInContext && !abort?.aborted) {
|
||||||
|
let callback: () => void = () => {}
|
||||||
|
const waitForNext = new Promise<void>((resolve) => {
|
||||||
|
callback = resolve
|
||||||
|
resolveCell.resolve = resolve
|
||||||
|
})
|
||||||
|
yield await effects.getStatus({ ...options, callback })
|
||||||
|
await waitForNext
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
const: () =>
|
||||||
|
effects.getStatus({
|
||||||
|
...options,
|
||||||
|
callback:
|
||||||
|
effects.constRetry &&
|
||||||
|
(() => effects.constRetry && effects.constRetry()),
|
||||||
|
}),
|
||||||
|
once: () => effects.getStatus(options),
|
||||||
|
watch: (abort?: AbortSignal) => {
|
||||||
|
const ctrl = new AbortController()
|
||||||
|
abort?.addEventListener("abort", () => ctrl.abort())
|
||||||
|
return watch(ctrl.signal)
|
||||||
|
},
|
||||||
|
onChange: (
|
||||||
|
callback: (
|
||||||
|
value: T.StatusInfo | null,
|
||||||
|
error?: Error,
|
||||||
|
) => { cancel: boolean } | Promise<{ cancel: boolean }>,
|
||||||
|
) => {
|
||||||
|
;(async () => {
|
||||||
|
const ctrl = new AbortController()
|
||||||
|
for await (const value of watch(ctrl.signal)) {
|
||||||
|
try {
|
||||||
|
const res = await callback(value)
|
||||||
|
if (res.cancel) {
|
||||||
|
ctrl.abort()
|
||||||
|
break
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
console.error(
|
||||||
|
"callback function threw an error @ getStatus.onChange",
|
||||||
|
e,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})()
|
||||||
|
.catch((e) => callback(null, e as Error))
|
||||||
|
.catch((e) =>
|
||||||
|
console.error(
|
||||||
|
"callback function threw an error @ getStatus.onChange",
|
||||||
|
e,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Local type for procedure values from the manifest.
|
||||||
|
* The manifest's zod schemas use ZodTypeAny casts that produce `unknown` in zod v4.
|
||||||
|
* This type restores the expected shape for type-safe property access.
|
||||||
|
*/
|
||||||
|
type Procedure =
|
||||||
|
| (DockerProcedure & { type: "docker" })
|
||||||
|
| { type: "script"; args: unknown[] | null }
|
||||||
|
|
||||||
const MANIFEST_LOCATION = "/usr/lib/startos/package/embassyManifest.json"
|
const MANIFEST_LOCATION = "/usr/lib/startos/package/embassyManifest.json"
|
||||||
export const EMBASSY_JS_LOCATION = "/usr/lib/startos/package/embassy.js"
|
export const EMBASSY_JS_LOCATION = "/usr/lib/startos/package/embassy.js"
|
||||||
|
|
||||||
@@ -65,26 +127,24 @@ const configFile = FileHelper.json(
|
|||||||
base: new Volume("embassy"),
|
base: new Volume("embassy"),
|
||||||
subpath: "config.json",
|
subpath: "config.json",
|
||||||
},
|
},
|
||||||
matches.any,
|
z.any(),
|
||||||
)
|
)
|
||||||
const dependsOnFile = FileHelper.json(
|
const dependsOnFile = FileHelper.json(
|
||||||
{
|
{
|
||||||
base: new Volume("embassy"),
|
base: new Volume("embassy"),
|
||||||
subpath: "dependsOn.json",
|
subpath: "dependsOn.json",
|
||||||
},
|
},
|
||||||
dictionary([string, array(string)]),
|
z.record(z.string(), z.array(z.string())),
|
||||||
)
|
)
|
||||||
|
|
||||||
const matchResult = object({
|
const matchResult = z.object({
|
||||||
result: any,
|
result: z.any(),
|
||||||
})
|
})
|
||||||
const matchError = object({
|
const matchError = z.object({
|
||||||
error: string,
|
error: z.string(),
|
||||||
})
|
})
|
||||||
const matchErrorCode = object<{
|
const matchErrorCode = z.object({
|
||||||
"error-code": [number, string] | readonly [number, string]
|
"error-code": z.tuple([z.number(), z.string()]),
|
||||||
}>({
|
|
||||||
"error-code": tuple(number, string),
|
|
||||||
})
|
})
|
||||||
|
|
||||||
const assertNever = (
|
const assertNever = (
|
||||||
@@ -96,29 +156,34 @@ const assertNever = (
|
|||||||
/**
|
/**
|
||||||
Should be changing the type for specific properties, and this is mostly a transformation for the old return types to the newer one.
|
Should be changing the type for specific properties, and this is mostly a transformation for the old return types to the newer one.
|
||||||
*/
|
*/
|
||||||
|
function isMatchResult(a: unknown): a is z.infer<typeof matchResult> {
|
||||||
|
return matchResult.safeParse(a).success
|
||||||
|
}
|
||||||
|
function isMatchError(a: unknown): a is z.infer<typeof matchError> {
|
||||||
|
return matchError.safeParse(a).success
|
||||||
|
}
|
||||||
|
function isMatchErrorCode(a: unknown): a is z.infer<typeof matchErrorCode> {
|
||||||
|
return matchErrorCode.safeParse(a).success
|
||||||
|
}
|
||||||
const fromReturnType = <A>(a: U.ResultType<A>): A => {
|
const fromReturnType = <A>(a: U.ResultType<A>): A => {
|
||||||
if (matchResult.test(a)) {
|
if (isMatchResult(a)) {
|
||||||
return a.result
|
return a.result
|
||||||
}
|
}
|
||||||
if (matchError.test(a)) {
|
if (isMatchError(a)) {
|
||||||
console.info({ passedErrorStack: new Error().stack, error: a.error })
|
console.info({ passedErrorStack: new Error().stack, error: a.error })
|
||||||
throw { error: a.error }
|
throw { error: a.error }
|
||||||
}
|
}
|
||||||
if (matchErrorCode.test(a)) {
|
if (isMatchErrorCode(a)) {
|
||||||
const [code, message] = a["error-code"]
|
const [code, message] = a["error-code"]
|
||||||
throw { error: message, code }
|
throw { error: message, code }
|
||||||
}
|
}
|
||||||
return assertNever(a)
|
return assertNever(a as never)
|
||||||
}
|
}
|
||||||
|
|
||||||
const matchSetResult = object({
|
const matchSetResult = z.object({
|
||||||
"depends-on": dictionary([string, array(string)])
|
"depends-on": z.record(z.string(), z.array(z.string())).nullable().optional(),
|
||||||
.nullable()
|
dependsOn: z.record(z.string(), z.array(z.string())).nullable().optional(),
|
||||||
.optional(),
|
signal: z.enum([
|
||||||
dependsOn: dictionary([string, array(string)])
|
|
||||||
.nullable()
|
|
||||||
.optional(),
|
|
||||||
signal: literals(
|
|
||||||
"SIGTERM",
|
"SIGTERM",
|
||||||
"SIGHUP",
|
"SIGHUP",
|
||||||
"SIGINT",
|
"SIGINT",
|
||||||
@@ -151,7 +216,7 @@ const matchSetResult = object({
|
|||||||
"SIGPWR",
|
"SIGPWR",
|
||||||
"SIGSYS",
|
"SIGSYS",
|
||||||
"SIGINFO",
|
"SIGINFO",
|
||||||
),
|
]),
|
||||||
})
|
})
|
||||||
|
|
||||||
type OldGetConfigRes = {
|
type OldGetConfigRes = {
|
||||||
@@ -233,33 +298,29 @@ const asProperty = (x: PackagePropertiesV2): PropertiesReturn =>
|
|||||||
Object.fromEntries(
|
Object.fromEntries(
|
||||||
Object.entries(x).map(([key, value]) => [key, asProperty_(value)]),
|
Object.entries(x).map(([key, value]) => [key, asProperty_(value)]),
|
||||||
)
|
)
|
||||||
const [matchPackageProperties, setMatchPackageProperties] =
|
const matchPackagePropertyObject: z.ZodType<PackagePropertyObject> = z.object({
|
||||||
deferred<PackagePropertiesV2>()
|
value: z.lazy(() => matchPackageProperties),
|
||||||
const matchPackagePropertyObject: Parser<unknown, PackagePropertyObject> =
|
type: z.literal("object"),
|
||||||
object({
|
description: z.string(),
|
||||||
value: matchPackageProperties,
|
})
|
||||||
type: literal("object"),
|
|
||||||
description: string,
|
|
||||||
})
|
|
||||||
|
|
||||||
const matchPackagePropertyString: Parser<unknown, PackagePropertyString> =
|
const matchPackagePropertyString: z.ZodType<PackagePropertyString> = z.object({
|
||||||
object({
|
type: z.literal("string"),
|
||||||
type: literal("string"),
|
description: z.string().nullable().optional(),
|
||||||
description: string.nullable().optional(),
|
value: z.string(),
|
||||||
value: string,
|
copyable: z.boolean().nullable().optional(),
|
||||||
copyable: boolean.nullable().optional(),
|
qr: z.boolean().nullable().optional(),
|
||||||
qr: boolean.nullable().optional(),
|
masked: z.boolean().nullable().optional(),
|
||||||
masked: boolean.nullable().optional(),
|
})
|
||||||
})
|
const matchPackageProperties: z.ZodType<PackagePropertiesV2> = z.lazy(() =>
|
||||||
setMatchPackageProperties(
|
z.record(
|
||||||
dictionary([
|
z.string(),
|
||||||
string,
|
z.union([matchPackagePropertyObject, matchPackagePropertyString]),
|
||||||
anyOf(matchPackagePropertyObject, matchPackagePropertyString),
|
),
|
||||||
]),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const matchProperties = object({
|
const matchProperties = z.object({
|
||||||
version: literal(2),
|
version: z.literal(2),
|
||||||
data: matchPackageProperties,
|
data: matchPackageProperties,
|
||||||
})
|
})
|
||||||
|
|
||||||
@@ -303,7 +364,7 @@ export class SystemForEmbassy implements System {
|
|||||||
})
|
})
|
||||||
const manifestData = await fs.readFile(manifestLocation, "utf-8")
|
const manifestData = await fs.readFile(manifestLocation, "utf-8")
|
||||||
return new SystemForEmbassy(
|
return new SystemForEmbassy(
|
||||||
matchManifest.unsafeCast(JSON.parse(manifestData)),
|
matchManifest.parse(JSON.parse(manifestData)),
|
||||||
moduleCode,
|
moduleCode,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -384,13 +445,14 @@ export class SystemForEmbassy implements System {
|
|||||||
}
|
}
|
||||||
callCallback(_callback: number, _args: any[]): void {}
|
callCallback(_callback: number, _args: any[]): void {}
|
||||||
async stop(): Promise<void> {
|
async stop(): Promise<void> {
|
||||||
const { currentRunning } = this
|
const clean = this.currentRunning?.clean({
|
||||||
this.currentRunning?.clean()
|
timeout: fromDuration(
|
||||||
|
(this.manifest.main["sigterm-timeout"] as any) || "30s",
|
||||||
|
),
|
||||||
|
})
|
||||||
delete this.currentRunning
|
delete this.currentRunning
|
||||||
if (currentRunning) {
|
if (clean) {
|
||||||
await currentRunning.clean({
|
await clean
|
||||||
timeout: fromDuration(this.manifest.main["sigterm-timeout"] || "30s"),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -510,6 +572,7 @@ export class SystemForEmbassy implements System {
|
|||||||
async getActionInput(
|
async getActionInput(
|
||||||
effects: Effects,
|
effects: Effects,
|
||||||
actionId: string,
|
actionId: string,
|
||||||
|
_prefill: Record<string, unknown> | null,
|
||||||
timeoutMs: number | null,
|
timeoutMs: number | null,
|
||||||
): Promise<T.ActionInput | null> {
|
): Promise<T.ActionInput | null> {
|
||||||
if (actionId === "config") {
|
if (actionId === "config") {
|
||||||
@@ -622,7 +685,7 @@ export class SystemForEmbassy implements System {
|
|||||||
effects: Effects,
|
effects: Effects,
|
||||||
timeoutMs: number | null,
|
timeoutMs: number | null,
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const backup = this.manifest.backup.create
|
const backup = this.manifest.backup.create as Procedure
|
||||||
if (backup.type === "docker") {
|
if (backup.type === "docker") {
|
||||||
const commands = [backup.entrypoint, ...backup.args]
|
const commands = [backup.entrypoint, ...backup.args]
|
||||||
const container = await DockerProcedureContainer.of(
|
const container = await DockerProcedureContainer.of(
|
||||||
@@ -655,7 +718,7 @@ export class SystemForEmbassy implements System {
|
|||||||
encoding: "utf-8",
|
encoding: "utf-8",
|
||||||
})
|
})
|
||||||
.catch((_) => null)
|
.catch((_) => null)
|
||||||
const restoreBackup = this.manifest.backup.restore
|
const restoreBackup = this.manifest.backup.restore as Procedure
|
||||||
if (restoreBackup.type === "docker") {
|
if (restoreBackup.type === "docker") {
|
||||||
const commands = [restoreBackup.entrypoint, ...restoreBackup.args]
|
const commands = [restoreBackup.entrypoint, ...restoreBackup.args]
|
||||||
const container = await DockerProcedureContainer.of(
|
const container = await DockerProcedureContainer.of(
|
||||||
@@ -688,7 +751,7 @@ export class SystemForEmbassy implements System {
|
|||||||
effects: Effects,
|
effects: Effects,
|
||||||
timeoutMs: number | null,
|
timeoutMs: number | null,
|
||||||
): Promise<OldGetConfigRes> {
|
): Promise<OldGetConfigRes> {
|
||||||
const config = this.manifest.config?.get
|
const config = this.manifest.config?.get as Procedure | undefined
|
||||||
if (!config) return { spec: {} }
|
if (!config) return { spec: {} }
|
||||||
if (config.type === "docker") {
|
if (config.type === "docker") {
|
||||||
const commands = [config.entrypoint, ...config.args]
|
const commands = [config.entrypoint, ...config.args]
|
||||||
@@ -730,7 +793,7 @@ export class SystemForEmbassy implements System {
|
|||||||
)
|
)
|
||||||
await updateConfig(effects, this.manifest, spec, newConfig)
|
await updateConfig(effects, this.manifest, spec, newConfig)
|
||||||
await configFile.write(effects, newConfig)
|
await configFile.write(effects, newConfig)
|
||||||
const setConfigValue = this.manifest.config?.set
|
const setConfigValue = this.manifest.config?.set as Procedure | undefined
|
||||||
if (!setConfigValue) return
|
if (!setConfigValue) return
|
||||||
if (setConfigValue.type === "docker") {
|
if (setConfigValue.type === "docker") {
|
||||||
const commands = [
|
const commands = [
|
||||||
@@ -745,7 +808,7 @@ export class SystemForEmbassy implements System {
|
|||||||
this.manifest.volumes,
|
this.manifest.volumes,
|
||||||
`Set Config - ${commands.join(" ")}`,
|
`Set Config - ${commands.join(" ")}`,
|
||||||
)
|
)
|
||||||
const answer = matchSetResult.unsafeCast(
|
const answer = matchSetResult.parse(
|
||||||
JSON.parse(
|
JSON.parse(
|
||||||
(await container.execFail(commands, timeoutMs)).stdout.toString(),
|
(await container.execFail(commands, timeoutMs)).stdout.toString(),
|
||||||
),
|
),
|
||||||
@@ -758,7 +821,7 @@ export class SystemForEmbassy implements System {
|
|||||||
const method = moduleCode.setConfig
|
const method = moduleCode.setConfig
|
||||||
if (!method) throw new Error("Expecting that the method setConfig exists")
|
if (!method) throw new Error("Expecting that the method setConfig exists")
|
||||||
|
|
||||||
const answer = matchSetResult.unsafeCast(
|
const answer = matchSetResult.parse(
|
||||||
await method(
|
await method(
|
||||||
polyfillEffects(effects, this.manifest),
|
polyfillEffects(effects, this.manifest),
|
||||||
newConfig as U.Config,
|
newConfig as U.Config,
|
||||||
@@ -787,7 +850,11 @@ export class SystemForEmbassy implements System {
|
|||||||
const requiredDeps = {
|
const requiredDeps = {
|
||||||
...Object.fromEntries(
|
...Object.fromEntries(
|
||||||
Object.entries(this.manifest.dependencies ?? {})
|
Object.entries(this.manifest.dependencies ?? {})
|
||||||
.filter(([k, v]) => v?.requirement.type === "required")
|
.filter(
|
||||||
|
([k, v]) =>
|
||||||
|
(v?.requirement as { type: string } | undefined)?.type ===
|
||||||
|
"required",
|
||||||
|
)
|
||||||
.map((x) => [x[0], []]) || [],
|
.map((x) => [x[0], []]) || [],
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
@@ -855,7 +922,7 @@ export class SystemForEmbassy implements System {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (migration) {
|
if (migration) {
|
||||||
const [_, procedure] = migration
|
const [_, procedure] = migration as readonly [unknown, Procedure]
|
||||||
if (procedure.type === "docker") {
|
if (procedure.type === "docker") {
|
||||||
const commands = [procedure.entrypoint, ...procedure.args]
|
const commands = [procedure.entrypoint, ...procedure.args]
|
||||||
const container = await DockerProcedureContainer.of(
|
const container = await DockerProcedureContainer.of(
|
||||||
@@ -893,7 +960,10 @@ export class SystemForEmbassy implements System {
|
|||||||
effects: Effects,
|
effects: Effects,
|
||||||
timeoutMs: number | null,
|
timeoutMs: number | null,
|
||||||
): Promise<PropertiesReturn> {
|
): Promise<PropertiesReturn> {
|
||||||
const setConfigValue = this.manifest.properties
|
const setConfigValue = this.manifest.properties as
|
||||||
|
| Procedure
|
||||||
|
| null
|
||||||
|
| undefined
|
||||||
if (!setConfigValue) throw new Error("There is no properties")
|
if (!setConfigValue) throw new Error("There is no properties")
|
||||||
if (setConfigValue.type === "docker") {
|
if (setConfigValue.type === "docker") {
|
||||||
const commands = [setConfigValue.entrypoint, ...setConfigValue.args]
|
const commands = [setConfigValue.entrypoint, ...setConfigValue.args]
|
||||||
@@ -904,7 +974,7 @@ export class SystemForEmbassy implements System {
|
|||||||
this.manifest.volumes,
|
this.manifest.volumes,
|
||||||
`Properties - ${commands.join(" ")}`,
|
`Properties - ${commands.join(" ")}`,
|
||||||
)
|
)
|
||||||
const properties = matchProperties.unsafeCast(
|
const properties = matchProperties.parse(
|
||||||
JSON.parse(
|
JSON.parse(
|
||||||
(await container.execFail(commands, timeoutMs)).stdout.toString(),
|
(await container.execFail(commands, timeoutMs)).stdout.toString(),
|
||||||
),
|
),
|
||||||
@@ -915,7 +985,7 @@ export class SystemForEmbassy implements System {
|
|||||||
const method = moduleCode.properties
|
const method = moduleCode.properties
|
||||||
if (!method)
|
if (!method)
|
||||||
throw new Error("Expecting that the method properties exists")
|
throw new Error("Expecting that the method properties exists")
|
||||||
const properties = matchProperties.unsafeCast(
|
const properties = matchProperties.parse(
|
||||||
await method(polyfillEffects(effects, this.manifest)).then(
|
await method(polyfillEffects(effects, this.manifest)).then(
|
||||||
fromReturnType,
|
fromReturnType,
|
||||||
),
|
),
|
||||||
@@ -930,7 +1000,8 @@ export class SystemForEmbassy implements System {
|
|||||||
formData: unknown,
|
formData: unknown,
|
||||||
timeoutMs: number | null,
|
timeoutMs: number | null,
|
||||||
): Promise<T.ActionResult> {
|
): Promise<T.ActionResult> {
|
||||||
const actionProcedure = this.manifest.actions?.[actionId]?.implementation
|
const actionProcedure = this.manifest.actions?.[actionId]
|
||||||
|
?.implementation as Procedure | undefined
|
||||||
const toActionResult = ({
|
const toActionResult = ({
|
||||||
message,
|
message,
|
||||||
value,
|
value,
|
||||||
@@ -997,7 +1068,9 @@ export class SystemForEmbassy implements System {
|
|||||||
oldConfig: unknown,
|
oldConfig: unknown,
|
||||||
timeoutMs: number | null,
|
timeoutMs: number | null,
|
||||||
): Promise<object> {
|
): Promise<object> {
|
||||||
const actionProcedure = this.manifest.dependencies?.[id]?.config?.check
|
const actionProcedure = this.manifest.dependencies?.[id]?.config?.check as
|
||||||
|
| Procedure
|
||||||
|
| undefined
|
||||||
if (!actionProcedure) return { message: "Action not found", value: null }
|
if (!actionProcedure) return { message: "Action not found", value: null }
|
||||||
if (actionProcedure.type === "docker") {
|
if (actionProcedure.type === "docker") {
|
||||||
const commands = [
|
const commands = [
|
||||||
@@ -1040,16 +1113,26 @@ export class SystemForEmbassy implements System {
|
|||||||
timeoutMs: number | null,
|
timeoutMs: number | null,
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
// TODO: docker
|
// TODO: docker
|
||||||
await effects.mount({
|
const status = await getStatus(effects, { packageId: id }).const()
|
||||||
location: `/media/embassy/${id}`,
|
if (!status) return
|
||||||
target: {
|
try {
|
||||||
packageId: id,
|
await effects.mount({
|
||||||
volumeId: "embassy",
|
location: `/media/embassy/${id}`,
|
||||||
subpath: null,
|
target: {
|
||||||
readonly: true,
|
packageId: id,
|
||||||
idmap: [],
|
volumeId: "embassy",
|
||||||
},
|
subpath: null,
|
||||||
})
|
readonly: true,
|
||||||
|
idmap: [],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
} catch (e) {
|
||||||
|
console.error(
|
||||||
|
`Failed to mount dependency volume for ${id}, skipping autoconfig:`,
|
||||||
|
e,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
}
|
||||||
configFile
|
configFile
|
||||||
.withPath(`/media/embassy/${id}/config.json`)
|
.withPath(`/media/embassy/${id}/config.json`)
|
||||||
.read()
|
.read()
|
||||||
@@ -1089,40 +1172,50 @@ export class SystemForEmbassy implements System {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const matchPointer = object({
|
const matchPointer = z.object({
|
||||||
type: literal("pointer"),
|
type: z.literal("pointer"),
|
||||||
})
|
})
|
||||||
|
|
||||||
const matchPointerPackage = object({
|
const matchPointerPackage = z.object({
|
||||||
subtype: literal("package"),
|
subtype: z.literal("package"),
|
||||||
target: literals("tor-key", "tor-address", "lan-address"),
|
target: z.enum(["tor-key", "tor-address", "lan-address"]),
|
||||||
"package-id": string,
|
"package-id": z.string(),
|
||||||
interface: string,
|
interface: z.string(),
|
||||||
})
|
})
|
||||||
const matchPointerConfig = object({
|
const matchPointerConfig = z.object({
|
||||||
subtype: literal("package"),
|
subtype: z.literal("package"),
|
||||||
target: literals("config"),
|
target: z.enum(["config"]),
|
||||||
"package-id": string,
|
"package-id": z.string(),
|
||||||
selector: string,
|
selector: z.string(),
|
||||||
multi: boolean,
|
multi: z.boolean(),
|
||||||
})
|
})
|
||||||
const matchSpec = object({
|
const matchSpec = z.object({
|
||||||
spec: object,
|
spec: z.record(z.string(), z.unknown()),
|
||||||
})
|
})
|
||||||
const matchVariants = object({ variants: dictionary([string, unknown]) })
|
const matchVariants = z.object({ variants: z.record(z.string(), z.unknown()) })
|
||||||
|
function isMatchPointer(v: unknown): v is z.infer<typeof matchPointer> {
|
||||||
|
return matchPointer.safeParse(v).success
|
||||||
|
}
|
||||||
|
function isMatchSpec(v: unknown): v is z.infer<typeof matchSpec> {
|
||||||
|
return matchSpec.safeParse(v).success
|
||||||
|
}
|
||||||
|
function isMatchVariants(v: unknown): v is z.infer<typeof matchVariants> {
|
||||||
|
return matchVariants.safeParse(v).success
|
||||||
|
}
|
||||||
function cleanSpecOfPointers<T>(mutSpec: T): T {
|
function cleanSpecOfPointers<T>(mutSpec: T): T {
|
||||||
if (!object.test(mutSpec)) return mutSpec
|
if (typeof mutSpec !== "object" || mutSpec === null) return mutSpec
|
||||||
for (const key in mutSpec) {
|
for (const key in mutSpec) {
|
||||||
const value = mutSpec[key]
|
const value = mutSpec[key]
|
||||||
if (matchSpec.test(value)) value.spec = cleanSpecOfPointers(value.spec)
|
if (isMatchSpec(value))
|
||||||
if (matchVariants.test(value))
|
value.spec = cleanSpecOfPointers(value.spec) as Record<string, unknown>
|
||||||
|
if (isMatchVariants(value))
|
||||||
value.variants = Object.fromEntries(
|
value.variants = Object.fromEntries(
|
||||||
Object.entries(value.variants).map(([key, value]) => [
|
Object.entries(value.variants).map(([key, value]) => [
|
||||||
key,
|
key,
|
||||||
cleanSpecOfPointers(value),
|
cleanSpecOfPointers(value),
|
||||||
]),
|
]),
|
||||||
)
|
)
|
||||||
if (!matchPointer.test(value)) continue
|
if (!isMatchPointer(value)) continue
|
||||||
delete mutSpec[key]
|
delete mutSpec[key]
|
||||||
// // if (value.target === )
|
// // if (value.target === )
|
||||||
}
|
}
|
||||||
@@ -1188,6 +1281,11 @@ async function updateConfig(
|
|||||||
if (specValue.target === "config") {
|
if (specValue.target === "config") {
|
||||||
const jp = require("jsonpath")
|
const jp = require("jsonpath")
|
||||||
const depId = specValue["package-id"]
|
const depId = specValue["package-id"]
|
||||||
|
const depStatus = await getStatus(effects, { packageId: depId }).const()
|
||||||
|
if (!depStatus) {
|
||||||
|
mutConfigValue[key] = null
|
||||||
|
continue
|
||||||
|
}
|
||||||
await effects.mount({
|
await effects.mount({
|
||||||
location: `/media/embassy/${depId}`,
|
location: `/media/embassy/${depId}`,
|
||||||
target: {
|
target: {
|
||||||
@@ -1244,12 +1342,8 @@ async function updateConfig(
|
|||||||
? ""
|
? ""
|
||||||
: catchFn(
|
: catchFn(
|
||||||
() =>
|
() =>
|
||||||
(specValue.target === "lan-address"
|
filled.addressInfo!.filter({ kind: "mdns" })!.hostnames[0]
|
||||||
? filled.addressInfo!.filter({ kind: "mdns" }) ||
|
.hostname,
|
||||||
filled.addressInfo!.onion
|
|
||||||
: filled.addressInfo!.onion ||
|
|
||||||
filled.addressInfo!.filter({ kind: "mdns" })
|
|
||||||
).hostnames[0].hostname.value,
|
|
||||||
) || ""
|
) || ""
|
||||||
mutConfigValue[key] = url
|
mutConfigValue[key] = url
|
||||||
}
|
}
|
||||||
@@ -1272,7 +1366,7 @@ function extractServiceInterfaceId(manifest: Manifest, specInterface: string) {
|
|||||||
}
|
}
|
||||||
async function convertToNewConfig(value: OldGetConfigRes) {
|
async function convertToNewConfig(value: OldGetConfigRes) {
|
||||||
try {
|
try {
|
||||||
const valueSpec: OldConfigSpec = matchOldConfigSpec.unsafeCast(value.spec)
|
const valueSpec: OldConfigSpec = matchOldConfigSpec.parse(value.spec)
|
||||||
const spec = transformConfigSpec(valueSpec)
|
const spec = transformConfigSpec(valueSpec)
|
||||||
if (!value.config) return { spec, config: null }
|
if (!value.config) return { spec, config: null }
|
||||||
const config = transformOldConfigToNew(valueSpec, value.config) ?? null
|
const config = transformOldConfigToNew(valueSpec, value.config) ?? null
|
||||||
|
|||||||
@@ -4,9 +4,9 @@ import synapseManifest from "./__fixtures__/synapseManifest"
|
|||||||
|
|
||||||
describe("matchManifest", () => {
|
describe("matchManifest", () => {
|
||||||
test("gittea", () => {
|
test("gittea", () => {
|
||||||
matchManifest.unsafeCast(giteaManifest)
|
matchManifest.parse(giteaManifest)
|
||||||
})
|
})
|
||||||
test("synapse", () => {
|
test("synapse", () => {
|
||||||
matchManifest.unsafeCast(synapseManifest)
|
matchManifest.parse(synapseManifest)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -1,126 +1,123 @@
|
|||||||
import {
|
import { z } from "@start9labs/start-sdk"
|
||||||
object,
|
|
||||||
literal,
|
|
||||||
string,
|
|
||||||
array,
|
|
||||||
boolean,
|
|
||||||
dictionary,
|
|
||||||
literals,
|
|
||||||
number,
|
|
||||||
unknown,
|
|
||||||
some,
|
|
||||||
every,
|
|
||||||
} from "ts-matches"
|
|
||||||
import { matchVolume } from "./matchVolume"
|
import { matchVolume } from "./matchVolume"
|
||||||
import { matchDockerProcedure } from "../../../Models/DockerProcedure"
|
import { matchDockerProcedure } from "../../../Models/DockerProcedure"
|
||||||
|
|
||||||
const matchJsProcedure = object({
|
const matchJsProcedure = z.object({
|
||||||
type: literal("script"),
|
type: z.literal("script"),
|
||||||
args: array(unknown).nullable().optional().defaultTo([]),
|
args: z.array(z.unknown()).nullable().optional().default([]),
|
||||||
})
|
})
|
||||||
|
|
||||||
const matchProcedure = some(matchDockerProcedure, matchJsProcedure)
|
const matchProcedure = z.union([matchDockerProcedure, matchJsProcedure])
|
||||||
export type Procedure = typeof matchProcedure._TYPE
|
export type Procedure = z.infer<typeof matchProcedure>
|
||||||
|
|
||||||
const matchAction = object({
|
const healthCheckFields = {
|
||||||
name: string,
|
name: z.string(),
|
||||||
description: string,
|
"success-message": z.string().nullable().optional(),
|
||||||
warning: string.nullable().optional(),
|
}
|
||||||
|
|
||||||
|
const matchAction = z.object({
|
||||||
|
name: z.string(),
|
||||||
|
description: z.string(),
|
||||||
|
warning: z.string().nullable().optional(),
|
||||||
implementation: matchProcedure,
|
implementation: matchProcedure,
|
||||||
"allowed-statuses": array(literals("running", "stopped")),
|
"allowed-statuses": z.array(z.enum(["running", "stopped"])),
|
||||||
"input-spec": unknown.nullable().optional(),
|
"input-spec": z.unknown().nullable().optional(),
|
||||||
})
|
})
|
||||||
export const matchManifest = object({
|
export const matchManifest = z.object({
|
||||||
id: string,
|
id: z.string(),
|
||||||
title: string,
|
title: z.string(),
|
||||||
version: string,
|
version: z.string(),
|
||||||
main: matchDockerProcedure,
|
main: matchDockerProcedure,
|
||||||
assets: object({
|
assets: z
|
||||||
assets: string.nullable().optional(),
|
.object({
|
||||||
scripts: string.nullable().optional(),
|
assets: z.string().nullable().optional(),
|
||||||
})
|
scripts: z.string().nullable().optional(),
|
||||||
|
})
|
||||||
.nullable()
|
.nullable()
|
||||||
.optional(),
|
.optional(),
|
||||||
"health-checks": dictionary([
|
"health-checks": z.record(
|
||||||
string,
|
z.string(),
|
||||||
every(
|
z.union([
|
||||||
matchProcedure,
|
matchDockerProcedure.extend(healthCheckFields),
|
||||||
object({
|
matchJsProcedure.extend(healthCheckFields),
|
||||||
name: string,
|
]),
|
||||||
["success-message"]: string.nullable().optional(),
|
),
|
||||||
}),
|
config: z
|
||||||
),
|
.object({
|
||||||
]),
|
get: matchProcedure,
|
||||||
config: object({
|
set: matchProcedure,
|
||||||
get: matchProcedure,
|
})
|
||||||
set: matchProcedure,
|
|
||||||
})
|
|
||||||
.nullable()
|
.nullable()
|
||||||
.optional(),
|
.optional(),
|
||||||
properties: matchProcedure.nullable().optional(),
|
properties: matchProcedure.nullable().optional(),
|
||||||
volumes: dictionary([string, matchVolume]),
|
volumes: z.record(z.string(), matchVolume),
|
||||||
interfaces: dictionary([
|
interfaces: z.record(
|
||||||
string,
|
z.string(),
|
||||||
object({
|
z.object({
|
||||||
name: string,
|
name: z.string(),
|
||||||
description: string,
|
description: z.string(),
|
||||||
"tor-config": object({
|
"tor-config": z
|
||||||
"port-mapping": dictionary([string, string]),
|
.object({
|
||||||
})
|
"port-mapping": z.record(z.string(), z.string()),
|
||||||
|
})
|
||||||
.nullable()
|
.nullable()
|
||||||
.optional(),
|
.optional(),
|
||||||
"lan-config": dictionary([
|
"lan-config": z
|
||||||
string,
|
.record(
|
||||||
object({
|
z.string(),
|
||||||
ssl: boolean,
|
z.object({
|
||||||
internal: number,
|
ssl: z.boolean(),
|
||||||
}),
|
internal: z.number(),
|
||||||
])
|
}),
|
||||||
|
)
|
||||||
.nullable()
|
.nullable()
|
||||||
.optional(),
|
.optional(),
|
||||||
ui: boolean,
|
ui: z.boolean(),
|
||||||
protocols: array(string),
|
protocols: z.array(z.string()),
|
||||||
}),
|
}),
|
||||||
]),
|
),
|
||||||
backup: object({
|
backup: z.object({
|
||||||
create: matchProcedure,
|
create: matchProcedure,
|
||||||
restore: matchProcedure,
|
restore: matchProcedure,
|
||||||
}),
|
}),
|
||||||
migrations: object({
|
migrations: z
|
||||||
to: dictionary([string, matchProcedure]),
|
.object({
|
||||||
from: dictionary([string, matchProcedure]),
|
to: z.record(z.string(), matchProcedure),
|
||||||
})
|
from: z.record(z.string(), matchProcedure),
|
||||||
|
})
|
||||||
.nullable()
|
.nullable()
|
||||||
.optional(),
|
.optional(),
|
||||||
dependencies: dictionary([
|
dependencies: z.record(
|
||||||
string,
|
z.string(),
|
||||||
object({
|
z
|
||||||
version: string,
|
.object({
|
||||||
requirement: some(
|
version: z.string(),
|
||||||
object({
|
requirement: z.union([
|
||||||
type: literal("opt-in"),
|
z.object({
|
||||||
how: string,
|
type: z.literal("opt-in"),
|
||||||
}),
|
how: z.string(),
|
||||||
object({
|
}),
|
||||||
type: literal("opt-out"),
|
z.object({
|
||||||
how: string,
|
type: z.literal("opt-out"),
|
||||||
}),
|
how: z.string(),
|
||||||
object({
|
}),
|
||||||
type: literal("required"),
|
z.object({
|
||||||
}),
|
type: z.literal("required"),
|
||||||
),
|
}),
|
||||||
description: string.nullable().optional(),
|
]),
|
||||||
config: object({
|
description: z.string().nullable().optional(),
|
||||||
check: matchProcedure,
|
config: z
|
||||||
"auto-configure": matchProcedure,
|
.object({
|
||||||
|
check: matchProcedure,
|
||||||
|
"auto-configure": matchProcedure,
|
||||||
|
})
|
||||||
|
.nullable()
|
||||||
|
.optional(),
|
||||||
})
|
})
|
||||||
.nullable()
|
|
||||||
.optional(),
|
|
||||||
})
|
|
||||||
.nullable()
|
.nullable()
|
||||||
.optional(),
|
.optional(),
|
||||||
]),
|
),
|
||||||
|
|
||||||
actions: dictionary([string, matchAction]),
|
actions: z.record(z.string(), matchAction),
|
||||||
})
|
})
|
||||||
export type Manifest = typeof matchManifest._TYPE
|
export type Manifest = z.infer<typeof matchManifest>
|
||||||
|
|||||||
@@ -1,32 +1,32 @@
|
|||||||
import { object, literal, string, boolean, some } from "ts-matches"
|
import { z } from "@start9labs/start-sdk"
|
||||||
|
|
||||||
const matchDataVolume = object({
|
const matchDataVolume = z.object({
|
||||||
type: literal("data"),
|
type: z.literal("data"),
|
||||||
readonly: boolean.optional(),
|
readonly: z.boolean().optional(),
|
||||||
})
|
})
|
||||||
const matchAssetVolume = object({
|
const matchAssetVolume = z.object({
|
||||||
type: literal("assets"),
|
type: z.literal("assets"),
|
||||||
})
|
})
|
||||||
const matchPointerVolume = object({
|
const matchPointerVolume = z.object({
|
||||||
type: literal("pointer"),
|
type: z.literal("pointer"),
|
||||||
"package-id": string,
|
"package-id": z.string(),
|
||||||
"volume-id": string,
|
"volume-id": z.string(),
|
||||||
path: string,
|
path: z.string(),
|
||||||
readonly: boolean,
|
readonly: z.boolean(),
|
||||||
})
|
})
|
||||||
const matchCertificateVolume = object({
|
const matchCertificateVolume = z.object({
|
||||||
type: literal("certificate"),
|
type: z.literal("certificate"),
|
||||||
"interface-id": string,
|
"interface-id": z.string(),
|
||||||
})
|
})
|
||||||
const matchBackupVolume = object({
|
const matchBackupVolume = z.object({
|
||||||
type: literal("backup"),
|
type: z.literal("backup"),
|
||||||
readonly: boolean,
|
readonly: z.boolean(),
|
||||||
})
|
})
|
||||||
export const matchVolume = some(
|
export const matchVolume = z.union([
|
||||||
matchDataVolume,
|
matchDataVolume,
|
||||||
matchAssetVolume,
|
matchAssetVolume,
|
||||||
matchPointerVolume,
|
matchPointerVolume,
|
||||||
matchCertificateVolume,
|
matchCertificateVolume,
|
||||||
matchBackupVolume,
|
matchBackupVolume,
|
||||||
)
|
])
|
||||||
export type Volume = typeof matchVolume._TYPE
|
export type Volume = z.infer<typeof matchVolume>
|
||||||
|
|||||||
@@ -12,43 +12,43 @@ import nostrConfig2 from "./__fixtures__/nostrConfig2"
|
|||||||
|
|
||||||
describe("transformConfigSpec", () => {
|
describe("transformConfigSpec", () => {
|
||||||
test("matchOldConfigSpec(embassyPages.homepage.variants[web-page])", () => {
|
test("matchOldConfigSpec(embassyPages.homepage.variants[web-page])", () => {
|
||||||
matchOldConfigSpec.unsafeCast(
|
matchOldConfigSpec.parse(
|
||||||
fixtureEmbassyPagesConfig.homepage.variants["web-page"],
|
fixtureEmbassyPagesConfig.homepage.variants["web-page"],
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
test("matchOldConfigSpec(embassyPages)", () => {
|
test("matchOldConfigSpec(embassyPages)", () => {
|
||||||
matchOldConfigSpec.unsafeCast(fixtureEmbassyPagesConfig)
|
matchOldConfigSpec.parse(fixtureEmbassyPagesConfig)
|
||||||
})
|
})
|
||||||
test("transformConfigSpec(embassyPages)", () => {
|
test("transformConfigSpec(embassyPages)", () => {
|
||||||
const spec = matchOldConfigSpec.unsafeCast(fixtureEmbassyPagesConfig)
|
const spec = matchOldConfigSpec.parse(fixtureEmbassyPagesConfig)
|
||||||
expect(transformConfigSpec(spec)).toMatchSnapshot()
|
expect(transformConfigSpec(spec)).toMatchSnapshot()
|
||||||
})
|
})
|
||||||
|
|
||||||
test("matchOldConfigSpec(RTL.nodes)", () => {
|
test("matchOldConfigSpec(RTL.nodes)", () => {
|
||||||
matchOldValueSpecList.unsafeCast(fixtureRTLConfig.nodes)
|
matchOldValueSpecList.parse(fixtureRTLConfig.nodes)
|
||||||
})
|
})
|
||||||
test("matchOldConfigSpec(RTL)", () => {
|
test("matchOldConfigSpec(RTL)", () => {
|
||||||
matchOldConfigSpec.unsafeCast(fixtureRTLConfig)
|
matchOldConfigSpec.parse(fixtureRTLConfig)
|
||||||
})
|
})
|
||||||
test("transformConfigSpec(RTL)", () => {
|
test("transformConfigSpec(RTL)", () => {
|
||||||
const spec = matchOldConfigSpec.unsafeCast(fixtureRTLConfig)
|
const spec = matchOldConfigSpec.parse(fixtureRTLConfig)
|
||||||
expect(transformConfigSpec(spec)).toMatchSnapshot()
|
expect(transformConfigSpec(spec)).toMatchSnapshot()
|
||||||
})
|
})
|
||||||
|
|
||||||
test("transformConfigSpec(searNXG)", () => {
|
test("transformConfigSpec(searNXG)", () => {
|
||||||
const spec = matchOldConfigSpec.unsafeCast(searNXG)
|
const spec = matchOldConfigSpec.parse(searNXG)
|
||||||
expect(transformConfigSpec(spec)).toMatchSnapshot()
|
expect(transformConfigSpec(spec)).toMatchSnapshot()
|
||||||
})
|
})
|
||||||
test("transformConfigSpec(bitcoind)", () => {
|
test("transformConfigSpec(bitcoind)", () => {
|
||||||
const spec = matchOldConfigSpec.unsafeCast(bitcoind)
|
const spec = matchOldConfigSpec.parse(bitcoind)
|
||||||
expect(transformConfigSpec(spec)).toMatchSnapshot()
|
expect(transformConfigSpec(spec)).toMatchSnapshot()
|
||||||
})
|
})
|
||||||
test("transformConfigSpec(nostr)", () => {
|
test("transformConfigSpec(nostr)", () => {
|
||||||
const spec = matchOldConfigSpec.unsafeCast(nostr)
|
const spec = matchOldConfigSpec.parse(nostr)
|
||||||
expect(transformConfigSpec(spec)).toMatchSnapshot()
|
expect(transformConfigSpec(spec)).toMatchSnapshot()
|
||||||
})
|
})
|
||||||
test("transformConfigSpec(nostr2)", () => {
|
test("transformConfigSpec(nostr2)", () => {
|
||||||
const spec = matchOldConfigSpec.unsafeCast(nostrConfig2)
|
const spec = matchOldConfigSpec.parse(nostrConfig2)
|
||||||
expect(transformConfigSpec(spec)).toMatchSnapshot()
|
expect(transformConfigSpec(spec)).toMatchSnapshot()
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -1,19 +1,4 @@
|
|||||||
import { IST } from "@start9labs/start-sdk"
|
import { IST, z } from "@start9labs/start-sdk"
|
||||||
import {
|
|
||||||
dictionary,
|
|
||||||
object,
|
|
||||||
anyOf,
|
|
||||||
string,
|
|
||||||
literals,
|
|
||||||
array,
|
|
||||||
number,
|
|
||||||
boolean,
|
|
||||||
Parser,
|
|
||||||
deferred,
|
|
||||||
every,
|
|
||||||
nill,
|
|
||||||
literal,
|
|
||||||
} from "ts-matches"
|
|
||||||
|
|
||||||
export function transformConfigSpec(oldSpec: OldConfigSpec): IST.InputSpec {
|
export function transformConfigSpec(oldSpec: OldConfigSpec): IST.InputSpec {
|
||||||
return Object.entries(oldSpec).reduce((inputSpec, [key, oldVal]) => {
|
return Object.entries(oldSpec).reduce((inputSpec, [key, oldVal]) => {
|
||||||
@@ -82,7 +67,7 @@ export function transformConfigSpec(oldSpec: OldConfigSpec): IST.InputSpec {
|
|||||||
name: oldVal.name,
|
name: oldVal.name,
|
||||||
description: oldVal.description || null,
|
description: oldVal.description || null,
|
||||||
warning: oldVal.warning || null,
|
warning: oldVal.warning || null,
|
||||||
spec: transformConfigSpec(matchOldConfigSpec.unsafeCast(oldVal.spec)),
|
spec: transformConfigSpec(matchOldConfigSpec.parse(oldVal.spec)),
|
||||||
}
|
}
|
||||||
} else if (oldVal.type === "string") {
|
} else if (oldVal.type === "string") {
|
||||||
newVal = {
|
newVal = {
|
||||||
@@ -121,7 +106,7 @@ export function transformConfigSpec(oldSpec: OldConfigSpec): IST.InputSpec {
|
|||||||
...obj,
|
...obj,
|
||||||
[id]: {
|
[id]: {
|
||||||
name: oldVal.tag["variant-names"][id] || id,
|
name: oldVal.tag["variant-names"][id] || id,
|
||||||
spec: transformConfigSpec(matchOldConfigSpec.unsafeCast(spec)),
|
spec: transformConfigSpec(matchOldConfigSpec.parse(spec)),
|
||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
{} as Record<string, { name: string; spec: IST.InputSpec }>,
|
{} as Record<string, { name: string; spec: IST.InputSpec }>,
|
||||||
@@ -153,7 +138,7 @@ export function transformOldConfigToNew(
|
|||||||
|
|
||||||
if (isObject(val)) {
|
if (isObject(val)) {
|
||||||
newVal = transformOldConfigToNew(
|
newVal = transformOldConfigToNew(
|
||||||
matchOldConfigSpec.unsafeCast(val.spec),
|
matchOldConfigSpec.parse(val.spec),
|
||||||
config[key],
|
config[key],
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -172,7 +157,7 @@ export function transformOldConfigToNew(
|
|||||||
newVal = {
|
newVal = {
|
||||||
selection,
|
selection,
|
||||||
value: transformOldConfigToNew(
|
value: transformOldConfigToNew(
|
||||||
matchOldConfigSpec.unsafeCast(val.variants[selection]),
|
matchOldConfigSpec.parse(val.variants[selection]),
|
||||||
config[key],
|
config[key],
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
@@ -183,10 +168,7 @@ export function transformOldConfigToNew(
|
|||||||
|
|
||||||
if (isObjectList(val)) {
|
if (isObjectList(val)) {
|
||||||
newVal = (config[key] as object[]).map((obj) =>
|
newVal = (config[key] as object[]).map((obj) =>
|
||||||
transformOldConfigToNew(
|
transformOldConfigToNew(matchOldConfigSpec.parse(val.spec.spec), obj),
|
||||||
matchOldConfigSpec.unsafeCast(val.spec.spec),
|
|
||||||
obj,
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
} else if (isUnionList(val)) return obj
|
} else if (isUnionList(val)) return obj
|
||||||
}
|
}
|
||||||
@@ -212,7 +194,7 @@ export function transformNewConfigToOld(
|
|||||||
|
|
||||||
if (isObject(val)) {
|
if (isObject(val)) {
|
||||||
newVal = transformNewConfigToOld(
|
newVal = transformNewConfigToOld(
|
||||||
matchOldConfigSpec.unsafeCast(val.spec),
|
matchOldConfigSpec.parse(val.spec),
|
||||||
config[key],
|
config[key],
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
@@ -221,7 +203,7 @@ export function transformNewConfigToOld(
|
|||||||
newVal = {
|
newVal = {
|
||||||
[val.tag.id]: config[key].selection,
|
[val.tag.id]: config[key].selection,
|
||||||
...transformNewConfigToOld(
|
...transformNewConfigToOld(
|
||||||
matchOldConfigSpec.unsafeCast(val.variants[config[key].selection]),
|
matchOldConfigSpec.parse(val.variants[config[key].selection]),
|
||||||
config[key].value,
|
config[key].value,
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
@@ -230,10 +212,7 @@ export function transformNewConfigToOld(
|
|||||||
if (isList(val)) {
|
if (isList(val)) {
|
||||||
if (isObjectList(val)) {
|
if (isObjectList(val)) {
|
||||||
newVal = (config[key] as object[]).map((obj) =>
|
newVal = (config[key] as object[]).map((obj) =>
|
||||||
transformNewConfigToOld(
|
transformNewConfigToOld(matchOldConfigSpec.parse(val.spec.spec), obj),
|
||||||
matchOldConfigSpec.unsafeCast(val.spec.spec),
|
|
||||||
obj,
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
} else if (isUnionList(val)) return obj
|
} else if (isUnionList(val)) return obj
|
||||||
}
|
}
|
||||||
@@ -337,9 +316,7 @@ function getListSpec(
|
|||||||
default: oldVal.default as Record<string, unknown>[],
|
default: oldVal.default as Record<string, unknown>[],
|
||||||
spec: {
|
spec: {
|
||||||
type: "object",
|
type: "object",
|
||||||
spec: transformConfigSpec(
|
spec: transformConfigSpec(matchOldConfigSpec.parse(oldVal.spec.spec)),
|
||||||
matchOldConfigSpec.unsafeCast(oldVal.spec.spec),
|
|
||||||
),
|
|
||||||
uniqueBy: oldVal.spec["unique-by"] || null,
|
uniqueBy: oldVal.spec["unique-by"] || null,
|
||||||
displayAs: oldVal.spec["display-as"] || null,
|
displayAs: oldVal.spec["display-as"] || null,
|
||||||
},
|
},
|
||||||
@@ -393,211 +370,281 @@ function isUnionList(
|
|||||||
}
|
}
|
||||||
|
|
||||||
export type OldConfigSpec = Record<string, OldValueSpec>
|
export type OldConfigSpec = Record<string, OldValueSpec>
|
||||||
const [_matchOldConfigSpec, setMatchOldConfigSpec] = deferred<unknown>()
|
export const matchOldConfigSpec: z.ZodType<OldConfigSpec> = z.lazy(() =>
|
||||||
export const matchOldConfigSpec = _matchOldConfigSpec as Parser<
|
z.record(z.string(), matchOldValueSpec),
|
||||||
unknown,
|
|
||||||
OldConfigSpec
|
|
||||||
>
|
|
||||||
export const matchOldDefaultString = anyOf(
|
|
||||||
string,
|
|
||||||
object({ charset: string, len: number }),
|
|
||||||
)
|
)
|
||||||
type OldDefaultString = typeof matchOldDefaultString._TYPE
|
export const matchOldDefaultString = z.union([
|
||||||
|
z.string(),
|
||||||
|
z.object({ charset: z.string(), len: z.number() }),
|
||||||
|
])
|
||||||
|
type OldDefaultString = z.infer<typeof matchOldDefaultString>
|
||||||
|
|
||||||
export const matchOldValueSpecString = object({
|
export const matchOldValueSpecString = z.object({
|
||||||
type: literals("string"),
|
type: z.enum(["string"]),
|
||||||
name: string,
|
name: z.string(),
|
||||||
masked: boolean.nullable().optional(),
|
masked: z.boolean().nullable().optional(),
|
||||||
copyable: boolean.nullable().optional(),
|
copyable: z.boolean().nullable().optional(),
|
||||||
nullable: boolean.nullable().optional(),
|
nullable: z.boolean().nullable().optional(),
|
||||||
placeholder: string.nullable().optional(),
|
placeholder: z.string().nullable().optional(),
|
||||||
pattern: string.nullable().optional(),
|
pattern: z.string().nullable().optional(),
|
||||||
"pattern-description": string.nullable().optional(),
|
"pattern-description": z.string().nullable().optional(),
|
||||||
default: matchOldDefaultString.nullable().optional(),
|
default: matchOldDefaultString.nullable().optional(),
|
||||||
textarea: boolean.nullable().optional(),
|
textarea: z.boolean().nullable().optional(),
|
||||||
description: string.nullable().optional(),
|
description: z.string().nullable().optional(),
|
||||||
warning: string.nullable().optional(),
|
warning: z.string().nullable().optional(),
|
||||||
})
|
})
|
||||||
|
|
||||||
export const matchOldValueSpecNumber = object({
|
export const matchOldValueSpecNumber = z.object({
|
||||||
type: literals("number"),
|
type: z.enum(["number"]),
|
||||||
nullable: boolean,
|
nullable: z.boolean(),
|
||||||
name: string,
|
name: z.string(),
|
||||||
range: string,
|
range: z.string(),
|
||||||
integral: boolean,
|
integral: z.boolean(),
|
||||||
default: number.nullable().optional(),
|
default: z.number().nullable().optional(),
|
||||||
description: string.nullable().optional(),
|
description: z.string().nullable().optional(),
|
||||||
warning: string.nullable().optional(),
|
warning: z.string().nullable().optional(),
|
||||||
units: string.nullable().optional(),
|
units: z.string().nullable().optional(),
|
||||||
placeholder: anyOf(number, string).nullable().optional(),
|
placeholder: z.union([z.number(), z.string()]).nullable().optional(),
|
||||||
})
|
})
|
||||||
type OldValueSpecNumber = typeof matchOldValueSpecNumber._TYPE
|
type OldValueSpecNumber = z.infer<typeof matchOldValueSpecNumber>
|
||||||
|
|
||||||
export const matchOldValueSpecBoolean = object({
|
export const matchOldValueSpecBoolean = z.object({
|
||||||
type: literals("boolean"),
|
type: z.enum(["boolean"]),
|
||||||
default: boolean,
|
default: z.boolean(),
|
||||||
name: string,
|
name: z.string(),
|
||||||
description: string.nullable().optional(),
|
description: z.string().nullable().optional(),
|
||||||
warning: string.nullable().optional(),
|
warning: z.string().nullable().optional(),
|
||||||
})
|
})
|
||||||
type OldValueSpecBoolean = typeof matchOldValueSpecBoolean._TYPE
|
type OldValueSpecBoolean = z.infer<typeof matchOldValueSpecBoolean>
|
||||||
|
|
||||||
const matchOldValueSpecObject = object({
|
type OldValueSpecObject = {
|
||||||
type: literals("object"),
|
type: "object"
|
||||||
spec: _matchOldConfigSpec,
|
spec: OldConfigSpec
|
||||||
name: string,
|
name: string
|
||||||
description: string.nullable().optional(),
|
description?: string | null
|
||||||
warning: string.nullable().optional(),
|
warning?: string | null
|
||||||
|
}
|
||||||
|
const matchOldValueSpecObject: z.ZodType<OldValueSpecObject> = z.object({
|
||||||
|
type: z.enum(["object"]),
|
||||||
|
spec: z.lazy(() => matchOldConfigSpec),
|
||||||
|
name: z.string(),
|
||||||
|
description: z.string().nullable().optional(),
|
||||||
|
warning: z.string().nullable().optional(),
|
||||||
})
|
})
|
||||||
type OldValueSpecObject = typeof matchOldValueSpecObject._TYPE
|
|
||||||
|
|
||||||
const matchOldValueSpecEnum = object({
|
const matchOldValueSpecEnum = z.object({
|
||||||
values: array(string),
|
values: z.array(z.string()),
|
||||||
"value-names": dictionary([string, string]),
|
"value-names": z.record(z.string(), z.string()),
|
||||||
type: literals("enum"),
|
type: z.enum(["enum"]),
|
||||||
default: string,
|
default: z.string(),
|
||||||
name: string,
|
name: z.string(),
|
||||||
description: string.nullable().optional(),
|
description: z.string().nullable().optional(),
|
||||||
warning: string.nullable().optional(),
|
warning: z.string().nullable().optional(),
|
||||||
})
|
})
|
||||||
type OldValueSpecEnum = typeof matchOldValueSpecEnum._TYPE
|
type OldValueSpecEnum = z.infer<typeof matchOldValueSpecEnum>
|
||||||
|
|
||||||
const matchOldUnionTagSpec = object({
|
const matchOldUnionTagSpec = z.object({
|
||||||
id: string, // The name of the field containing one of the union variants
|
id: z.string(), // The name of the field containing one of the union variants
|
||||||
"variant-names": dictionary([string, string]), // The name of each variant
|
"variant-names": z.record(z.string(), z.string()), // The name of each variant
|
||||||
name: string,
|
name: z.string(),
|
||||||
description: string.nullable().optional(),
|
description: z.string().nullable().optional(),
|
||||||
warning: string.nullable().optional(),
|
warning: z.string().nullable().optional(),
|
||||||
})
|
})
|
||||||
const matchOldValueSpecUnion = object({
|
type OldValueSpecUnion = {
|
||||||
type: literals("union"),
|
type: "union"
|
||||||
|
tag: z.infer<typeof matchOldUnionTagSpec>
|
||||||
|
variants: Record<string, OldConfigSpec>
|
||||||
|
default: string
|
||||||
|
}
|
||||||
|
const matchOldValueSpecUnion: z.ZodType<OldValueSpecUnion> = z.object({
|
||||||
|
type: z.enum(["union"]),
|
||||||
tag: matchOldUnionTagSpec,
|
tag: matchOldUnionTagSpec,
|
||||||
variants: dictionary([string, _matchOldConfigSpec]),
|
variants: z.record(
|
||||||
default: string,
|
z.string(),
|
||||||
|
z.lazy(() => matchOldConfigSpec),
|
||||||
|
),
|
||||||
|
default: z.string(),
|
||||||
})
|
})
|
||||||
type OldValueSpecUnion = typeof matchOldValueSpecUnion._TYPE
|
|
||||||
|
|
||||||
const [matchOldUniqueBy, setOldUniqueBy] = deferred<OldUniqueBy>()
|
|
||||||
type OldUniqueBy =
|
type OldUniqueBy =
|
||||||
| null
|
| null
|
||||||
| string
|
| string
|
||||||
| { any: OldUniqueBy[] }
|
| { any: OldUniqueBy[] }
|
||||||
| { all: OldUniqueBy[] }
|
| { all: OldUniqueBy[] }
|
||||||
|
|
||||||
setOldUniqueBy(
|
const matchOldUniqueBy: z.ZodType<OldUniqueBy> = z.lazy(() =>
|
||||||
anyOf(
|
z.union([
|
||||||
nill,
|
z.null(),
|
||||||
string,
|
z.string(),
|
||||||
object({ any: array(matchOldUniqueBy) }),
|
z.object({ any: z.array(matchOldUniqueBy) }),
|
||||||
object({ all: array(matchOldUniqueBy) }),
|
z.object({ all: z.array(matchOldUniqueBy) }),
|
||||||
),
|
]),
|
||||||
)
|
)
|
||||||
|
|
||||||
const matchOldListValueSpecObject = object({
|
type OldListValueSpecObject = {
|
||||||
spec: _matchOldConfigSpec, // this is a mapped type of the config object at this level, replacing the object's values with specs on those values
|
spec: OldConfigSpec
|
||||||
"unique-by": matchOldUniqueBy.nullable().optional(), // indicates whether duplicates can be permitted in the list
|
"unique-by"?: OldUniqueBy | null
|
||||||
"display-as": string.nullable().optional(), // this should be a handlebars template which can make use of the entire config which corresponds to 'spec'
|
"display-as"?: string | null
|
||||||
})
|
}
|
||||||
const matchOldListValueSpecUnion = object({
|
const matchOldListValueSpecObject: z.ZodType<OldListValueSpecObject> = z.object(
|
||||||
|
{
|
||||||
|
spec: z.lazy(() => matchOldConfigSpec), // this is a mapped type of the config object at this level, replacing the object's values with specs on those values
|
||||||
|
"unique-by": matchOldUniqueBy.nullable().optional(), // indicates whether duplicates can be permitted in the list
|
||||||
|
"display-as": z.string().nullable().optional(), // this should be a handlebars template which can make use of the entire config which corresponds to 'spec'
|
||||||
|
},
|
||||||
|
)
|
||||||
|
type OldListValueSpecUnion = {
|
||||||
|
"unique-by"?: OldUniqueBy | null
|
||||||
|
"display-as"?: string | null
|
||||||
|
tag: z.infer<typeof matchOldUnionTagSpec>
|
||||||
|
variants: Record<string, OldConfigSpec>
|
||||||
|
}
|
||||||
|
const matchOldListValueSpecUnion: z.ZodType<OldListValueSpecUnion> = z.object({
|
||||||
"unique-by": matchOldUniqueBy.nullable().optional(),
|
"unique-by": matchOldUniqueBy.nullable().optional(),
|
||||||
"display-as": string.nullable().optional(),
|
"display-as": z.string().nullable().optional(),
|
||||||
tag: matchOldUnionTagSpec,
|
tag: matchOldUnionTagSpec,
|
||||||
variants: dictionary([string, _matchOldConfigSpec]),
|
variants: z.record(
|
||||||
|
z.string(),
|
||||||
|
z.lazy(() => matchOldConfigSpec),
|
||||||
|
),
|
||||||
})
|
})
|
||||||
const matchOldListValueSpecString = object({
|
const matchOldListValueSpecString = z.object({
|
||||||
masked: boolean.nullable().optional(),
|
masked: z.boolean().nullable().optional(),
|
||||||
copyable: boolean.nullable().optional(),
|
copyable: z.boolean().nullable().optional(),
|
||||||
pattern: string.nullable().optional(),
|
pattern: z.string().nullable().optional(),
|
||||||
"pattern-description": string.nullable().optional(),
|
"pattern-description": z.string().nullable().optional(),
|
||||||
placeholder: string.nullable().optional(),
|
placeholder: z.string().nullable().optional(),
|
||||||
})
|
})
|
||||||
|
|
||||||
const matchOldListValueSpecEnum = object({
|
const matchOldListValueSpecEnum = z.object({
|
||||||
values: array(string),
|
values: z.array(z.string()),
|
||||||
"value-names": dictionary([string, string]),
|
"value-names": z.record(z.string(), z.string()),
|
||||||
})
|
})
|
||||||
const matchOldListValueSpecNumber = object({
|
const matchOldListValueSpecNumber = z.object({
|
||||||
range: string,
|
range: z.string(),
|
||||||
integral: boolean,
|
integral: z.boolean(),
|
||||||
units: string.nullable().optional(),
|
units: z.string().nullable().optional(),
|
||||||
placeholder: anyOf(number, string).nullable().optional(),
|
placeholder: z.union([z.number(), z.string()]).nullable().optional(),
|
||||||
})
|
})
|
||||||
|
|
||||||
|
type OldValueSpecListBase = {
|
||||||
|
type: "list"
|
||||||
|
range: string
|
||||||
|
default: string[] | number[] | OldDefaultString[] | Record<string, unknown>[]
|
||||||
|
name: string
|
||||||
|
description?: string | null
|
||||||
|
warning?: string | null
|
||||||
|
}
|
||||||
|
|
||||||
|
type OldValueSpecList = OldValueSpecListBase &
|
||||||
|
(
|
||||||
|
| { subtype: "string"; spec: z.infer<typeof matchOldListValueSpecString> }
|
||||||
|
| { subtype: "enum"; spec: z.infer<typeof matchOldListValueSpecEnum> }
|
||||||
|
| { subtype: "object"; spec: OldListValueSpecObject }
|
||||||
|
| { subtype: "number"; spec: z.infer<typeof matchOldListValueSpecNumber> }
|
||||||
|
| { subtype: "union"; spec: OldListValueSpecUnion }
|
||||||
|
)
|
||||||
|
|
||||||
// represents a spec for a list
|
// represents a spec for a list
|
||||||
export const matchOldValueSpecList = every(
|
export const matchOldValueSpecList: z.ZodType<OldValueSpecList> =
|
||||||
object({
|
z.intersection(
|
||||||
type: literals("list"),
|
z.object({
|
||||||
range: string, // '[0,1]' (inclusive) OR '[0,*)' (right unbounded), normal math rules
|
type: z.enum(["list"]),
|
||||||
default: anyOf(
|
range: z.string(), // '[0,1]' (inclusive) OR '[0,*)' (right unbounded), normal math rules
|
||||||
array(string),
|
default: z.union([
|
||||||
array(number),
|
z.array(z.string()),
|
||||||
array(matchOldDefaultString),
|
z.array(z.number()),
|
||||||
array(object),
|
z.array(matchOldDefaultString),
|
||||||
),
|
z.array(z.object({}).passthrough()),
|
||||||
name: string,
|
]),
|
||||||
description: string.nullable().optional(),
|
name: z.string(),
|
||||||
warning: string.nullable().optional(),
|
description: z.string().nullable().optional(),
|
||||||
}),
|
warning: z.string().nullable().optional(),
|
||||||
anyOf(
|
|
||||||
object({
|
|
||||||
subtype: literals("string"),
|
|
||||||
spec: matchOldListValueSpecString,
|
|
||||||
}),
|
}),
|
||||||
object({
|
z.union([
|
||||||
subtype: literals("enum"),
|
z.object({
|
||||||
spec: matchOldListValueSpecEnum,
|
subtype: z.enum(["string"]),
|
||||||
}),
|
spec: matchOldListValueSpecString,
|
||||||
object({
|
}),
|
||||||
subtype: literals("object"),
|
z.object({
|
||||||
spec: matchOldListValueSpecObject,
|
subtype: z.enum(["enum"]),
|
||||||
}),
|
spec: matchOldListValueSpecEnum,
|
||||||
object({
|
}),
|
||||||
subtype: literals("number"),
|
z.object({
|
||||||
spec: matchOldListValueSpecNumber,
|
subtype: z.enum(["object"]),
|
||||||
}),
|
spec: matchOldListValueSpecObject,
|
||||||
object({
|
}),
|
||||||
subtype: literals("union"),
|
z.object({
|
||||||
spec: matchOldListValueSpecUnion,
|
subtype: z.enum(["number"]),
|
||||||
}),
|
spec: matchOldListValueSpecNumber,
|
||||||
),
|
}),
|
||||||
)
|
z.object({
|
||||||
type OldValueSpecList = typeof matchOldValueSpecList._TYPE
|
subtype: z.enum(["union"]),
|
||||||
|
spec: matchOldListValueSpecUnion,
|
||||||
|
}),
|
||||||
|
]),
|
||||||
|
) as unknown as z.ZodType<OldValueSpecList>
|
||||||
|
|
||||||
const matchOldValueSpecPointer = every(
|
type OldValueSpecPointer = {
|
||||||
object({
|
type: "pointer"
|
||||||
type: literal("pointer"),
|
} & (
|
||||||
}),
|
| {
|
||||||
anyOf(
|
subtype: "package"
|
||||||
object({
|
target: "tor-key" | "tor-address" | "lan-address"
|
||||||
subtype: literal("package"),
|
"package-id": string
|
||||||
target: literals("tor-key", "tor-address", "lan-address"),
|
interface: string
|
||||||
"package-id": string,
|
}
|
||||||
interface: string,
|
| {
|
||||||
}),
|
subtype: "package"
|
||||||
object({
|
target: "config"
|
||||||
subtype: literal("package"),
|
"package-id": string
|
||||||
target: literals("config"),
|
selector: string
|
||||||
"package-id": string,
|
multi: boolean
|
||||||
selector: string,
|
}
|
||||||
multi: boolean,
|
|
||||||
}),
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
type OldValueSpecPointer = typeof matchOldValueSpecPointer._TYPE
|
const matchOldValueSpecPointer: z.ZodType<OldValueSpecPointer> = z.intersection(
|
||||||
|
z.object({
|
||||||
|
type: z.literal("pointer"),
|
||||||
|
}),
|
||||||
|
z.union([
|
||||||
|
z.object({
|
||||||
|
subtype: z.literal("package"),
|
||||||
|
target: z.enum(["tor-key", "tor-address", "lan-address"]),
|
||||||
|
"package-id": z.string(),
|
||||||
|
interface: z.string(),
|
||||||
|
}),
|
||||||
|
z.object({
|
||||||
|
subtype: z.literal("package"),
|
||||||
|
target: z.enum(["config"]),
|
||||||
|
"package-id": z.string(),
|
||||||
|
selector: z.string(),
|
||||||
|
multi: z.boolean(),
|
||||||
|
}),
|
||||||
|
]),
|
||||||
|
) as unknown as z.ZodType<OldValueSpecPointer>
|
||||||
|
|
||||||
export const matchOldValueSpec = anyOf(
|
type OldValueSpecString = z.infer<typeof matchOldValueSpecString>
|
||||||
|
|
||||||
|
type OldValueSpec =
|
||||||
|
| OldValueSpecString
|
||||||
|
| OldValueSpecNumber
|
||||||
|
| OldValueSpecBoolean
|
||||||
|
| OldValueSpecObject
|
||||||
|
| OldValueSpecEnum
|
||||||
|
| OldValueSpecList
|
||||||
|
| OldValueSpecUnion
|
||||||
|
| OldValueSpecPointer
|
||||||
|
|
||||||
|
export const matchOldValueSpec: z.ZodType<OldValueSpec> = z.union([
|
||||||
matchOldValueSpecString,
|
matchOldValueSpecString,
|
||||||
matchOldValueSpecNumber,
|
matchOldValueSpecNumber,
|
||||||
matchOldValueSpecBoolean,
|
matchOldValueSpecBoolean,
|
||||||
matchOldValueSpecObject,
|
matchOldValueSpecObject as z.ZodType<OldValueSpecObject>,
|
||||||
matchOldValueSpecEnum,
|
matchOldValueSpecEnum,
|
||||||
matchOldValueSpecList,
|
matchOldValueSpecList as z.ZodType<OldValueSpecList>,
|
||||||
matchOldValueSpecUnion,
|
matchOldValueSpecUnion as z.ZodType<OldValueSpecUnion>,
|
||||||
matchOldValueSpecPointer,
|
matchOldValueSpecPointer as z.ZodType<OldValueSpecPointer>,
|
||||||
)
|
])
|
||||||
type OldValueSpec = typeof matchOldValueSpec._TYPE
|
|
||||||
|
|
||||||
setMatchOldConfigSpec(dictionary([string, matchOldValueSpec]))
|
|
||||||
|
|
||||||
export class Range {
|
export class Range {
|
||||||
min?: number
|
min?: number
|
||||||
|
|||||||
@@ -47,11 +47,12 @@ export class SystemForStartOs implements System {
|
|||||||
getActionInput(
|
getActionInput(
|
||||||
effects: Effects,
|
effects: Effects,
|
||||||
id: string,
|
id: string,
|
||||||
|
prefill: Record<string, unknown> | null,
|
||||||
timeoutMs: number | null,
|
timeoutMs: number | null,
|
||||||
): Promise<T.ActionInput | null> {
|
): Promise<T.ActionInput | null> {
|
||||||
const action = this.abi.actions.get(id)
|
const action = this.abi.actions.get(id)
|
||||||
if (!action) throw new Error(`Action ${id} not found`)
|
if (!action) throw new Error(`Action ${id} not found`)
|
||||||
return action.getInput({ effects })
|
return action.getInput({ effects, prefill })
|
||||||
}
|
}
|
||||||
runAction(
|
runAction(
|
||||||
effects: Effects,
|
effects: Effects,
|
||||||
@@ -70,7 +71,7 @@ export class SystemForStartOs implements System {
|
|||||||
this.starting = true
|
this.starting = true
|
||||||
effects.constRetry = utils.once(() => {
|
effects.constRetry = utils.once(() => {
|
||||||
console.debug(".const() triggered")
|
console.debug(".const() triggered")
|
||||||
effects.restart()
|
if (effects.isInContext) effects.restart()
|
||||||
})
|
})
|
||||||
let mainOnTerm: () => Promise<void> | undefined
|
let mainOnTerm: () => Promise<void> | undefined
|
||||||
const daemons = await (
|
const daemons = await (
|
||||||
|
|||||||
@@ -33,6 +33,7 @@ export type System = {
|
|||||||
getActionInput(
|
getActionInput(
|
||||||
effects: Effects,
|
effects: Effects,
|
||||||
actionId: string,
|
actionId: string,
|
||||||
|
prefill: Record<string, unknown> | null,
|
||||||
timeoutMs: number | null,
|
timeoutMs: number | null,
|
||||||
): Promise<T.ActionInput | null>
|
): Promise<T.ActionInput | null>
|
||||||
|
|
||||||
|
|||||||
@@ -1,41 +1,19 @@
|
|||||||
import {
|
import { z } from "@start9labs/start-sdk"
|
||||||
object,
|
|
||||||
literal,
|
|
||||||
string,
|
|
||||||
boolean,
|
|
||||||
array,
|
|
||||||
dictionary,
|
|
||||||
literals,
|
|
||||||
number,
|
|
||||||
Parser,
|
|
||||||
some,
|
|
||||||
} from "ts-matches"
|
|
||||||
import { matchDuration } from "./Duration"
|
import { matchDuration } from "./Duration"
|
||||||
|
|
||||||
const VolumeId = string
|
export const matchDockerProcedure = z.object({
|
||||||
const Path = string
|
type: z.literal("docker"),
|
||||||
|
image: z.string(),
|
||||||
export type VolumeId = string
|
system: z.boolean().optional(),
|
||||||
export type Path = string
|
entrypoint: z.string(),
|
||||||
export const matchDockerProcedure = object({
|
args: z.array(z.string()).default([]),
|
||||||
type: literal("docker"),
|
mounts: z.record(z.string(), z.string()).optional(),
|
||||||
image: string,
|
"io-format": z
|
||||||
system: boolean.optional(),
|
.enum(["json", "json-pretty", "yaml", "cbor", "toml", "toml-pretty"])
|
||||||
entrypoint: string,
|
|
||||||
args: array(string).defaultTo([]),
|
|
||||||
mounts: dictionary([VolumeId, Path]).optional(),
|
|
||||||
"io-format": literals(
|
|
||||||
"json",
|
|
||||||
"json-pretty",
|
|
||||||
"yaml",
|
|
||||||
"cbor",
|
|
||||||
"toml",
|
|
||||||
"toml-pretty",
|
|
||||||
)
|
|
||||||
.nullable()
|
.nullable()
|
||||||
.optional(),
|
.optional(),
|
||||||
"sigterm-timeout": some(number, matchDuration).onMismatch(30),
|
"sigterm-timeout": z.union([z.number(), matchDuration]).catch(30),
|
||||||
inject: boolean.defaultTo(false),
|
inject: z.boolean().default(false),
|
||||||
})
|
})
|
||||||
|
|
||||||
export type DockerProcedure = typeof matchDockerProcedure._TYPE
|
export type DockerProcedure = z.infer<typeof matchDockerProcedure>
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
import { string } from "ts-matches"
|
import { z } from "@start9labs/start-sdk"
|
||||||
|
|
||||||
export type TimeUnit = "d" | "h" | "s" | "ms" | "m" | "µs" | "ns"
|
export type TimeUnit = "d" | "h" | "s" | "ms" | "m" | "µs" | "ns"
|
||||||
export type Duration = `${number}${TimeUnit}`
|
export type Duration = `${number}${TimeUnit}`
|
||||||
|
|
||||||
const durationRegex = /^([0-9]*(\.[0-9]+)?)(ns|µs|ms|s|m|d)$/
|
const durationRegex = /^([0-9]*(\.[0-9]+)?)(ns|µs|ms|s|m|d)$/
|
||||||
|
|
||||||
export const matchDuration = string.refine(isDuration)
|
export const matchDuration = z.string().refine(isDuration)
|
||||||
export function isDuration(value: string): value is Duration {
|
export function isDuration(value: string): value is Duration {
|
||||||
return durationRegex.test(value)
|
return durationRegex.test(value)
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
import { literals, some, string } from "ts-matches"
|
import { z } from "@start9labs/start-sdk"
|
||||||
|
|
||||||
type NestedPath<A extends string, B extends string> = `/${A}/${string}/${B}`
|
type NestedPath<A extends string, B extends string> = `/${A}/${string}/${B}`
|
||||||
type NestedPaths = NestedPath<"actions", "run" | "getInput">
|
type NestedPaths = NestedPath<"actions", "run" | "getInput">
|
||||||
// prettier-ignore
|
// prettier-ignore
|
||||||
type UnNestPaths<A> =
|
type UnNestPaths<A> =
|
||||||
A extends `${infer A}/${infer B}` ? [...UnNestPaths<A>, ... UnNestPaths<B>] :
|
A extends `${infer A}/${infer B}` ? [...UnNestPaths<A>, ... UnNestPaths<B>] :
|
||||||
[A]
|
[A]
|
||||||
|
|
||||||
export function unNestPath<A extends string>(a: A): UnNestPaths<A> {
|
export function unNestPath<A extends string>(a: A): UnNestPaths<A> {
|
||||||
@@ -17,14 +17,14 @@ function isNestedPath(path: string): path is NestedPaths {
|
|||||||
return true
|
return true
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
export const jsonPath = some(
|
export const jsonPath = z.union([
|
||||||
literals(
|
z.enum([
|
||||||
"/packageInit",
|
"/packageInit",
|
||||||
"/packageUninit",
|
"/packageUninit",
|
||||||
"/backup/create",
|
"/backup/create",
|
||||||
"/backup/restore",
|
"/backup/restore",
|
||||||
),
|
]),
|
||||||
string.refine(isNestedPath, "isNestedPath"),
|
z.string().refine(isNestedPath),
|
||||||
)
|
])
|
||||||
|
|
||||||
export type JsonPath = typeof jsonPath._TYPE
|
export type JsonPath = z.infer<typeof jsonPath>
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import { RpcListener } from "./Adapters/RpcListener"
|
import { RpcListener } from "./Adapters/RpcListener"
|
||||||
import { SystemForEmbassy } from "./Adapters/Systems/SystemForEmbassy"
|
|
||||||
import { AllGetDependencies } from "./Interfaces/AllGetDependencies"
|
import { AllGetDependencies } from "./Interfaces/AllGetDependencies"
|
||||||
import { getSystem } from "./Adapters/Systems"
|
import { getSystem } from "./Adapters/Systems"
|
||||||
|
|
||||||
@@ -7,6 +6,18 @@ const getDependencies: AllGetDependencies = {
|
|||||||
system: getSystem,
|
system: getSystem,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
process.on("unhandledRejection", (reason) => {
|
||||||
|
if (
|
||||||
|
reason instanceof Error &&
|
||||||
|
"muteUnhandled" in reason &&
|
||||||
|
reason.muteUnhandled
|
||||||
|
) {
|
||||||
|
// mute
|
||||||
|
} else {
|
||||||
|
console.error("Unhandled promise rejection", reason)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
for (let s of ["SIGTERM", "SIGINT", "SIGHUP"]) {
|
for (let s of ["SIGTERM", "SIGINT", "SIGHUP"]) {
|
||||||
process.on(s, (s) => {
|
process.on(s, (s) => {
|
||||||
console.log(`Caught ${s}`)
|
console.log(`Caught ${s}`)
|
||||||
|
|||||||
@@ -16,6 +16,6 @@ case $ARCH in
|
|||||||
esac
|
esac
|
||||||
|
|
||||||
docker run --rm $USE_TTY --platform=$DOCKER_PLATFORM -eARCH --privileged -v "$(pwd):/root/start-os" start9/build-env /root/start-os/container-runtime/update-image.sh
|
docker run --rm $USE_TTY --platform=$DOCKER_PLATFORM -eARCH --privileged -v "$(pwd):/root/start-os" start9/build-env /root/start-os/container-runtime/update-image.sh
|
||||||
if [ "$(ls -nd "rootfs.${ARCH}.squashfs" | awk '{ print $3 }')" != "$UID" ]; then
|
if [ "$(ls -nd "container-runtime/rootfs.${ARCH}.squashfs" | awk '{ print $3 }')" != "$UID" ]; then
|
||||||
docker run --rm $USE_TTY -v "$(pwd):/root/start-os" start9/build-env chown -R $UID:$UID /root/start-os/container-runtime
|
docker run --rm $USE_TTY -v "$(pwd):/root/start-os" start9/build-env chown -R $UID:$UID /root/start-os/container-runtime
|
||||||
fi
|
fi
|
||||||
72
core/ARCHITECTURE.md
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
# Core Architecture
|
||||||
|
|
||||||
|
The Rust backend daemon for StartOS.
|
||||||
|
|
||||||
|
## Binaries
|
||||||
|
|
||||||
|
The crate produces a single binary `startbox` that is symlinked under different names for different behavior:
|
||||||
|
|
||||||
|
- `startbox` / `startd` — Main daemon
|
||||||
|
- `start-cli` — CLI interface
|
||||||
|
- `start-container` — Runs inside LXC containers; communicates with host and manages subcontainers
|
||||||
|
- `registrybox` — Registry daemon
|
||||||
|
- `tunnelbox` — VPN/tunnel daemon
|
||||||
|
|
||||||
|
## Crate Structure
|
||||||
|
|
||||||
|
- `startos` — Core library that supports building `startbox`
|
||||||
|
- `helpers` — Utility functions used across both `startos` and `js-engine`
|
||||||
|
- `models` — Types shared across `startos`, `js-engine`, and `helpers`
|
||||||
|
|
||||||
|
## Key Modules
|
||||||
|
|
||||||
|
- `src/context/` — Context types (RpcContext, CliContext, InitContext, DiagnosticContext)
|
||||||
|
- `src/service/` — Service lifecycle management with actor pattern (`service_actor.rs`)
|
||||||
|
- `src/db/model/` — Patch-DB models (`public.rs` synced to frontend, `private.rs` backend-only)
|
||||||
|
- `src/net/` — Networking (DNS, ACME, WiFi, Tor via Arti, WireGuard)
|
||||||
|
- `src/s9pk/` — S9PK package format (merkle archive)
|
||||||
|
- `src/registry/` — Package registry management
|
||||||
|
|
||||||
|
## RPC Pattern
|
||||||
|
|
||||||
|
The API is JSON-RPC (not REST). All endpoints are RPC methods organized in a hierarchical command structure using [rpc-toolkit](https://github.com/Start9Labs/rpc-toolkit). Handlers are registered in a tree of `ParentHandler` nodes, with four handler types: `from_fn_async` (standard), `from_fn_async_local` (non-Send), `from_fn` (sync), and `from_fn_blocking` (blocking). Metadata like `.with_about()` drives middleware and documentation.
|
||||||
|
|
||||||
|
See [rpc-toolkit.md](rpc-toolkit.md) for full handler patterns and configuration.
|
||||||
|
|
||||||
|
## Patch-DB Patterns
|
||||||
|
|
||||||
|
Patch-DB provides diff-based state synchronization. Changes to `db/model/public.rs` automatically sync to the frontend.
|
||||||
|
|
||||||
|
**Key patterns:**
|
||||||
|
- `db.peek().await` — Get a read-only snapshot of the database state
|
||||||
|
- `db.mutate(|db| { ... }).await` — Apply mutations atomically, returns `MutateResult`
|
||||||
|
- `#[derive(HasModel)]` — Derive macro for types stored in the database, generates typed accessors
|
||||||
|
|
||||||
|
**Generated accessor types** (from `HasModel` derive):
|
||||||
|
- `as_field()` — Immutable reference: `&Model<T>`
|
||||||
|
- `as_field_mut()` — Mutable reference: `&mut Model<T>`
|
||||||
|
- `into_field()` — Owned value: `Model<T>`
|
||||||
|
|
||||||
|
**`Model<T>` APIs** (from `db/prelude.rs`):
|
||||||
|
- `.de()` — Deserialize to `T`
|
||||||
|
- `.ser(&value)` — Serialize from `T`
|
||||||
|
- `.mutate(|v| ...)` — Deserialize, mutate, reserialize
|
||||||
|
- For maps: `.keys()`, `.as_idx(&key)`, `.as_idx_mut(&key)`, `.insert()`, `.remove()`, `.contains_key()`
|
||||||
|
|
||||||
|
See [patchdb.md](patchdb.md) for `TypedDbWatch<T>` construction, API, and usage patterns.
|
||||||
|
|
||||||
|
## i18n
|
||||||
|
|
||||||
|
See [i18n-patterns.md](i18n-patterns.md) for internationalization key conventions and the `t!()` macro.
|
||||||
|
|
||||||
|
## Rust Utilities & Patterns
|
||||||
|
|
||||||
|
See [core-rust-patterns.md](core-rust-patterns.md) for common utilities (Invoke trait, Guard pattern, mount guards, Apply trait, etc.).
|
||||||
|
|
||||||
|
## Related Documentation
|
||||||
|
|
||||||
|
- [rpc-toolkit.md](rpc-toolkit.md) — JSON-RPC handler patterns
|
||||||
|
- [patchdb.md](patchdb.md) — Patch-DB watch patterns and TypedDbWatch
|
||||||
|
- [i18n-patterns.md](i18n-patterns.md) — Internationalization conventions
|
||||||
|
- [core-rust-patterns.md](core-rust-patterns.md) — Common Rust utilities
|
||||||
|
- [s9pk-structure.md](s9pk-structure.md) — S9PK package format
|
||||||
28
core/CLAUDE.md
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# Core — Rust Backend
|
||||||
|
|
||||||
|
The Rust backend daemon for StartOS.
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
See [ARCHITECTURE.md](ARCHITECTURE.md) for binaries, modules, Patch-DB patterns, and related documentation.
|
||||||
|
|
||||||
|
See [CONTRIBUTING.md](CONTRIBUTING.md) for how to add RPC endpoints, TS-exported types, and i18n keys.
|
||||||
|
|
||||||
|
## Quick Reference
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo check -p start-os # Type check
|
||||||
|
make test-core # Run tests
|
||||||
|
make ts-bindings # Regenerate TS types after changing #[ts(export)] structs
|
||||||
|
cd sdk && make baseDist dist # Rebuild SDK after ts-bindings
|
||||||
|
```
|
||||||
|
|
||||||
|
## Operating Rules
|
||||||
|
|
||||||
|
- Always run `cargo check -p start-os` after modifying Rust code
|
||||||
|
- When adding RPC endpoints, follow the patterns in [rpc-toolkit.md](rpc-toolkit.md)
|
||||||
|
- When modifying `#[ts(export)]` types, regenerate bindings and rebuild the SDK (see [ARCHITECTURE.md](../ARCHITECTURE.md#build-pipeline))
|
||||||
|
- **i18n is mandatory** — any user-facing string must go in `core/locales/i18n.yaml` with all 5 locales (`en_US`, `de_DE`, `es_ES`, `fr_FR`, `pl_PL`). This includes CLI subcommand descriptions (`about.<name>`), CLI arg help (`help.arg.<name>`), error messages (`error.<name>`), notifications, setup messages, and any other text shown to users. Entries are alphabetically ordered within their section. See [i18n-patterns.md](i18n-patterns.md)
|
||||||
|
- When using DB watches, follow the `TypedDbWatch<T>` patterns in [patchdb.md](patchdb.md)
|
||||||
|
- **Always use `.invoke(ErrorKind::...)` instead of `.status()` when running CLI commands** via `tokio::process::Command`. The `Invoke` trait (from `crate::util::Invoke`) captures stdout/stderr and checks exit codes properly. Using `.status()` leaks stderr directly to system logs, creating noise. For check-then-act patterns (e.g. `iptables -C`), use `.invoke(...).await.is_ok()` / `.is_err()` instead of `.status().await.map_or(false, |s| s.success())`.
|
||||||
|
- Always use file utils in util::io instead of tokio::fs when available
|
||||||
49
core/CONTRIBUTING.md
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
# Contributing to Core
|
||||||
|
|
||||||
|
For general environment setup, cloning, and build system, see the root [CONTRIBUTING.md](../CONTRIBUTING.md).
|
||||||
|
|
||||||
|
## Prerequisites
|
||||||
|
|
||||||
|
- [Rust](https://rustup.rs) (nightly for formatting)
|
||||||
|
- [rust-analyzer](https://rust-analyzer.github.io/) recommended
|
||||||
|
- [Docker](https://docs.docker.com/get-docker/) (for cross-compilation via `rust-zig-builder` container)
|
||||||
|
|
||||||
|
## Common Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
cargo check -p start-os # Type check
|
||||||
|
cargo test --features=test # Run tests (or: make test-core)
|
||||||
|
make format # Format with nightly rustfmt
|
||||||
|
cd core && cargo test <test_name> --features=test # Run a specific test
|
||||||
|
```
|
||||||
|
|
||||||
|
## Adding a New RPC Endpoint
|
||||||
|
|
||||||
|
1. Define a params struct with `#[derive(Deserialize, Serialize)]`
|
||||||
|
2. Choose a handler type (`from_fn_async` for most cases)
|
||||||
|
3. Write the handler function: `async fn my_handler(ctx: RpcContext, params: MyParams) -> Result<MyResponse, Error>`
|
||||||
|
4. Register it in the appropriate `ParentHandler` tree
|
||||||
|
5. If params/response should be available in TypeScript, add `#[derive(TS)]` and `#[ts(export)]`
|
||||||
|
|
||||||
|
See [rpc-toolkit.md](rpc-toolkit.md) for full handler patterns and all four handler types.
|
||||||
|
|
||||||
|
## Adding TS-Exported Types
|
||||||
|
|
||||||
|
When a Rust type needs to be available in TypeScript (for the web frontend or SDK):
|
||||||
|
|
||||||
|
1. Add `ts_rs::TS` to the derive list and `#[ts(export)]` to the struct/enum
|
||||||
|
2. Use `#[serde(rename_all = "camelCase")]` for JS-friendly field names
|
||||||
|
3. For types that don't implement TS (like `DateTime<Utc>`, `exver::Version`), use `#[ts(type = "string")]` overrides
|
||||||
|
4. For `u64` fields that should be JS `number` (not `bigint`), use `#[ts(type = "number")]`
|
||||||
|
5. Run `make ts-bindings` to regenerate — files appear in `core/bindings/` then sync to `sdk/base/lib/osBindings/`
|
||||||
|
6. Rebuild the SDK: `cd sdk && make baseDist dist`
|
||||||
|
|
||||||
|
## Adding i18n Keys
|
||||||
|
|
||||||
|
1. Add the key to `core/locales/i18n.yaml` with all 5 language translations
|
||||||
|
2. Use the `t!("your.key.name")` macro in Rust code
|
||||||
|
3. Follow existing namespace conventions — match the module path where the key is used
|
||||||
|
4. Use kebab-case for multi-word segments
|
||||||
|
5. Translations are validated at compile time
|
||||||
|
|
||||||
|
See [i18n-patterns.md](i18n-patterns.md) for full conventions.
|
||||||
4167
core/Cargo.lock
generated
@@ -15,7 +15,7 @@ license = "MIT"
|
|||||||
name = "start-os"
|
name = "start-os"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
repository = "https://github.com/Start9Labs/start-os"
|
repository = "https://github.com/Start9Labs/start-os"
|
||||||
version = "0.4.0-alpha.17" # VERSION_BUMP
|
version = "0.4.0-alpha.23" # VERSION_BUMP
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
name = "startos"
|
name = "startos"
|
||||||
@@ -42,17 +42,6 @@ name = "tunnelbox"
|
|||||||
path = "src/main/tunnelbox.rs"
|
path = "src/main/tunnelbox.rs"
|
||||||
|
|
||||||
[features]
|
[features]
|
||||||
arti = [
|
|
||||||
"arti-client",
|
|
||||||
"safelog",
|
|
||||||
"tor-cell",
|
|
||||||
"tor-hscrypto",
|
|
||||||
"tor-hsservice",
|
|
||||||
"tor-keymgr",
|
|
||||||
"tor-llcrypto",
|
|
||||||
"tor-proto",
|
|
||||||
"tor-rtcompat",
|
|
||||||
]
|
|
||||||
beta = []
|
beta = []
|
||||||
console = ["console-subscriber", "tokio/tracing"]
|
console = ["console-subscriber", "tokio/tracing"]
|
||||||
default = []
|
default = []
|
||||||
@@ -62,16 +51,6 @@ unstable = ["backtrace-on-stack-overflow"]
|
|||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
aes = { version = "0.7.5", features = ["ctr"] }
|
aes = { version = "0.7.5", features = ["ctr"] }
|
||||||
arti-client = { version = "0.33", features = [
|
|
||||||
"compression",
|
|
||||||
"ephemeral-keystore",
|
|
||||||
"experimental-api",
|
|
||||||
"onion-service-client",
|
|
||||||
"onion-service-service",
|
|
||||||
"rustls",
|
|
||||||
"static",
|
|
||||||
"tokio",
|
|
||||||
], default-features = false, git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true }
|
|
||||||
async-acme = { version = "0.6.0", git = "https://github.com/dr-bonez/async-acme.git", features = [
|
async-acme = { version = "0.6.0", git = "https://github.com/dr-bonez/async-acme.git", features = [
|
||||||
"use_rustls",
|
"use_rustls",
|
||||||
"use_tokio",
|
"use_tokio",
|
||||||
@@ -84,7 +63,7 @@ async-compression = { version = "0.4.32", features = [
|
|||||||
] }
|
] }
|
||||||
async-stream = "0.3.5"
|
async-stream = "0.3.5"
|
||||||
async-trait = "0.1.74"
|
async-trait = "0.1.74"
|
||||||
axum = { version = "0.8.4", features = ["ws", "http2"] }
|
axum = { version = "0.8.4", features = ["http2", "ws"] }
|
||||||
backtrace-on-stack-overflow = { version = "0.3.0", optional = true }
|
backtrace-on-stack-overflow = { version = "0.3.0", optional = true }
|
||||||
base32 = "0.5.0"
|
base32 = "0.5.0"
|
||||||
base64 = "0.22.1"
|
base64 = "0.22.1"
|
||||||
@@ -100,7 +79,6 @@ console-subscriber = { version = "0.5.0", optional = true }
|
|||||||
const_format = "0.2.34"
|
const_format = "0.2.34"
|
||||||
cookie = "0.18.0"
|
cookie = "0.18.0"
|
||||||
cookie_store = "0.22.0"
|
cookie_store = "0.22.0"
|
||||||
curve25519-dalek = "4.1.3"
|
|
||||||
der = { version = "0.7.9", features = ["derive", "pem"] }
|
der = { version = "0.7.9", features = ["derive", "pem"] }
|
||||||
digest = "0.10.7"
|
digest = "0.10.7"
|
||||||
divrem = "1.0.0"
|
divrem = "1.0.0"
|
||||||
@@ -122,6 +100,7 @@ fd-lock-rs = "0.1.4"
|
|||||||
form_urlencoded = "1.2.1"
|
form_urlencoded = "1.2.1"
|
||||||
futures = "0.3.28"
|
futures = "0.3.28"
|
||||||
gpt = "4.1.0"
|
gpt = "4.1.0"
|
||||||
|
hashing-serializer = "0.1.1"
|
||||||
hex = "0.4.3"
|
hex = "0.4.3"
|
||||||
hickory-server = { version = "0.25.2", features = ["resolver"] }
|
hickory-server = { version = "0.25.2", features = ["resolver"] }
|
||||||
hmac = "0.12.1"
|
hmac = "0.12.1"
|
||||||
@@ -176,6 +155,7 @@ mio = "1"
|
|||||||
new_mime_guess = "4"
|
new_mime_guess = "4"
|
||||||
nix = { version = "0.30.1", features = [
|
nix = { version = "0.30.1", features = [
|
||||||
"fs",
|
"fs",
|
||||||
|
"hostname",
|
||||||
"mount",
|
"mount",
|
||||||
"net",
|
"net",
|
||||||
"process",
|
"process",
|
||||||
@@ -191,9 +171,7 @@ once_cell = "1.19.0"
|
|||||||
openssh-keys = "0.6.2"
|
openssh-keys = "0.6.2"
|
||||||
openssl = { version = "0.10.57", features = ["vendored"] }
|
openssl = { version = "0.10.57", features = ["vendored"] }
|
||||||
p256 = { version = "0.13.2", features = ["pem"] }
|
p256 = { version = "0.13.2", features = ["pem"] }
|
||||||
patch-db = { version = "*", path = "../patch-db/patch-db", features = [
|
patch-db = { version = "*", path = "../patch-db/core", features = ["trace"] }
|
||||||
"trace",
|
|
||||||
] }
|
|
||||||
pbkdf2 = "0.12.2"
|
pbkdf2 = "0.12.2"
|
||||||
pin-project = "1.1.3"
|
pin-project = "1.1.3"
|
||||||
pkcs8 = { version = "0.10.2", features = ["std"] }
|
pkcs8 = { version = "0.10.2", features = ["std"] }
|
||||||
@@ -205,16 +183,16 @@ r3bl_tui = "0.7.6"
|
|||||||
rand = "0.9.2"
|
rand = "0.9.2"
|
||||||
regex = "1.10.2"
|
regex = "1.10.2"
|
||||||
reqwest = { version = "0.12.25", features = [
|
reqwest = { version = "0.12.25", features = [
|
||||||
|
"http2",
|
||||||
"json",
|
"json",
|
||||||
"socks",
|
"socks",
|
||||||
"stream",
|
"stream",
|
||||||
"http2",
|
|
||||||
] }
|
] }
|
||||||
reqwest_cookie_store = "0.9.0"
|
reqwest_cookie_store = "0.9.0"
|
||||||
rpassword = "7.2.0"
|
rpassword = "7.2.0"
|
||||||
rust-argon2 = "3.0.0"
|
|
||||||
rpc-toolkit = { git = "https://github.com/Start9Labs/rpc-toolkit.git" }
|
rpc-toolkit = { git = "https://github.com/Start9Labs/rpc-toolkit.git" }
|
||||||
safelog = { version = "0.4.8", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true }
|
rust-argon2 = "3.0.0"
|
||||||
|
rust-i18n = "3.1.5"
|
||||||
semver = { version = "1.0.20", features = ["serde"] }
|
semver = { version = "1.0.20", features = ["serde"] }
|
||||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||||
serde_cbor = { package = "ciborium", version = "0.2.1" }
|
serde_cbor = { package = "ciborium", version = "0.2.1" }
|
||||||
@@ -223,6 +201,7 @@ serde_toml = { package = "toml", version = "0.9.9+spec-1.0.0" }
|
|||||||
serde_yaml = { package = "serde_yml", version = "0.0.12" }
|
serde_yaml = { package = "serde_yml", version = "0.0.12" }
|
||||||
sha-crypt = "0.5.0"
|
sha-crypt = "0.5.0"
|
||||||
sha2 = "0.10.2"
|
sha2 = "0.10.2"
|
||||||
|
sha3 = "0.10"
|
||||||
signal-hook = "0.3.17"
|
signal-hook = "0.3.17"
|
||||||
socket2 = { version = "0.6.0", features = ["all"] }
|
socket2 = { version = "0.6.0", features = ["all"] }
|
||||||
socks5-impl = { version = "0.7.2", features = ["client", "server"] }
|
socks5-impl = { version = "0.7.2", features = ["client", "server"] }
|
||||||
@@ -242,28 +221,11 @@ tokio-stream = { version = "0.1.14", features = ["io-util", "net", "sync"] }
|
|||||||
tokio-tar = { git = "https://github.com/dr-bonez/tokio-tar.git" }
|
tokio-tar = { git = "https://github.com/dr-bonez/tokio-tar.git" }
|
||||||
tokio-tungstenite = { version = "0.26.2", features = ["native-tls", "url"] }
|
tokio-tungstenite = { version = "0.26.2", features = ["native-tls", "url"] }
|
||||||
tokio-util = { version = "0.7.9", features = ["io"] }
|
tokio-util = { version = "0.7.9", features = ["io"] }
|
||||||
tor-cell = { version = "0.33", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true }
|
|
||||||
tor-hscrypto = { version = "0.33", features = [
|
|
||||||
"full",
|
|
||||||
], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true }
|
|
||||||
tor-hsservice = { version = "0.33", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true }
|
|
||||||
tor-keymgr = { version = "0.33", features = [
|
|
||||||
"ephemeral-keystore",
|
|
||||||
], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true }
|
|
||||||
tor-llcrypto = { version = "0.33", features = [
|
|
||||||
"full",
|
|
||||||
], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true }
|
|
||||||
tor-proto = { version = "0.33", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true }
|
|
||||||
tor-rtcompat = { version = "0.33", features = [
|
|
||||||
"rustls",
|
|
||||||
"tokio",
|
|
||||||
], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true }
|
|
||||||
torut = "0.2.1"
|
|
||||||
tower-service = "0.3.3"
|
tower-service = "0.3.3"
|
||||||
tracing = "0.1.39"
|
tracing = "0.1.39"
|
||||||
tracing-error = "0.2.0"
|
tracing-error = "0.2.0"
|
||||||
tracing-journald = "0.3.0"
|
tracing-journald = "0.3.0"
|
||||||
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
|
tracing-subscriber = { version = "=0.3.19", features = ["env-filter"] }
|
||||||
ts-rs = "9.0.1"
|
ts-rs = "9.0.1"
|
||||||
typed-builder = "0.23.2"
|
typed-builder = "0.23.2"
|
||||||
url = { version = "2.4.1", features = ["serde"] }
|
url = { version = "2.4.1", features = ["serde"] }
|
||||||
@@ -271,7 +233,9 @@ uuid = { version = "1.4.1", features = ["v4"] }
|
|||||||
visit-rs = "0.1.1"
|
visit-rs = "0.1.1"
|
||||||
x25519-dalek = { version = "2.0.1", features = ["static_secrets"] }
|
x25519-dalek = { version = "2.0.1", features = ["static_secrets"] }
|
||||||
zbus = "5.1.1"
|
zbus = "5.1.1"
|
||||||
hashing-serializer = "0.1.1"
|
|
||||||
|
[dev-dependencies]
|
||||||
|
clap_mangen = "0.2.33"
|
||||||
|
|
||||||
[target.'cfg(target_os = "linux")'.dependencies]
|
[target.'cfg(target_os = "linux")'.dependencies]
|
||||||
procfs = "0.18.0"
|
procfs = "0.18.0"
|
||||||
|
|||||||
@@ -22,9 +22,7 @@ several different names for different behavior:
|
|||||||
- `start-sdk`: This is a CLI tool that aids in building and packaging services
|
- `start-sdk`: This is a CLI tool that aids in building and packaging services
|
||||||
you wish to deploy to StartOS
|
you wish to deploy to StartOS
|
||||||
|
|
||||||
## Questions
|
## Documentation
|
||||||
|
|
||||||
If you have questions about how various pieces of the backend system work. Open
|
- [ARCHITECTURE.md](ARCHITECTURE.md) — Backend architecture, modules, and patterns
|
||||||
an issue and tag the following people
|
- [CONTRIBUTING.md](CONTRIBUTING.md) — How to contribute to core
|
||||||
|
|
||||||
- dr-bonez
|
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ PROFILE=${PROFILE:-release}
|
|||||||
if [ "${PROFILE}" = "release" ]; then
|
if [ "${PROFILE}" = "release" ]; then
|
||||||
BUILD_FLAGS="--release"
|
BUILD_FLAGS="--release"
|
||||||
else
|
else
|
||||||
if [ "$PROFILE" != "debug"]; then
|
if [ "$PROFILE" != "debug" ]; then
|
||||||
>&2 echo "Unknown profile $PROFILE: falling back to debug..."
|
>&2 echo "Unknown profile $PROFILE: falling back to debug..."
|
||||||
PROFILE=debug
|
PROFILE=debug
|
||||||
fi
|
fi
|
||||||
@@ -67,6 +67,10 @@ if [[ "${ENVIRONMENT:-}" =~ (^|-)console($|-) ]]; then
|
|||||||
RUSTFLAGS="--cfg tokio_unstable"
|
RUSTFLAGS="--cfg tokio_unstable"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [[ "${ENVIRONMENT:-}" =~ (^|-)unstable($|-) ]]; then
|
||||||
|
RUSTFLAGS="$RUSTFLAGS -C debuginfo=1"
|
||||||
|
fi
|
||||||
|
|
||||||
echo "FEATURES=\"$FEATURES\""
|
echo "FEATURES=\"$FEATURES\""
|
||||||
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
||||||
rust-zig-builder cargo zigbuild --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=$FEATURES --locked --bin start-cli --target=$TARGET
|
rust-zig-builder cargo zigbuild --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=$FEATURES --locked --bin start-cli --target=$TARGET
|
||||||
|
|||||||
44
core/build/build-manpage.sh
Executable file
@@ -0,0 +1,44 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
cd "$(dirname "${BASH_SOURCE[0]}")"
|
||||||
|
|
||||||
|
source ./builder-alias.sh
|
||||||
|
|
||||||
|
set -ea
|
||||||
|
shopt -s expand_aliases
|
||||||
|
|
||||||
|
PROFILE=${PROFILE:-debug}
|
||||||
|
if [ "${PROFILE}" = "release" ]; then
|
||||||
|
BUILD_FLAGS="--release"
|
||||||
|
else
|
||||||
|
if [ "$PROFILE" != "debug" ]; then
|
||||||
|
>&2 echo "Unknown profile $PROFILE: falling back to debug..."
|
||||||
|
PROFILE=debug
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ -z "$ARCH" ]; then
|
||||||
|
ARCH=$(uname -m)
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$ARCH" = "arm64" ]; then
|
||||||
|
ARCH="aarch64"
|
||||||
|
fi
|
||||||
|
|
||||||
|
RUST_ARCH="$ARCH"
|
||||||
|
if [ "$ARCH" = "riscv64" ]; then
|
||||||
|
RUST_ARCH="riscv64gc"
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd ../..
|
||||||
|
FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')"
|
||||||
|
RUSTFLAGS=""
|
||||||
|
if [[ "${ENVIRONMENT}" =~ (^|-)console($|-) ]]; then
|
||||||
|
RUSTFLAGS="--cfg tokio_unstable"
|
||||||
|
fi
|
||||||
|
echo "FEATURES=\"$FEATURES\""
|
||||||
|
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
||||||
|
rust-zig-builder cargo test --manifest-path=./core/Cargo.toml --lib $BUILD_FLAGS --features test,$FEATURES --locked 'export_manpage_'
|
||||||
|
if [ "$(ls -nd "core/man" | awk '{ print $3 }')" != "$UID" ]; then
|
||||||
|
rust-zig-builder sh -c "chown -R $UID:$UID core/target && chown -R $UID:$UID core/man && chown -R $UID:$UID /usr/local/cargo"
|
||||||
|
fi
|
||||||
@@ -38,6 +38,10 @@ if [[ "${ENVIRONMENT}" =~ (^|-)console($|-) ]]; then
|
|||||||
RUSTFLAGS="--cfg tokio_unstable"
|
RUSTFLAGS="--cfg tokio_unstable"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then
|
||||||
|
RUSTFLAGS="$RUSTFLAGS -C debuginfo=1"
|
||||||
|
fi
|
||||||
|
|
||||||
echo "FEATURES=\"$FEATURES\""
|
echo "FEATURES=\"$FEATURES\""
|
||||||
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
||||||
rust-zig-builder cargo zigbuild --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=$FEATURES --locked --bin registrybox --target=$RUST_ARCH-unknown-linux-musl
|
rust-zig-builder cargo zigbuild --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=$FEATURES --locked --bin registrybox --target=$RUST_ARCH-unknown-linux-musl
|
||||||
|
|||||||
@@ -38,6 +38,10 @@ if [[ "${ENVIRONMENT}" =~ (^|-)console($|-) ]]; then
|
|||||||
RUSTFLAGS="--cfg tokio_unstable"
|
RUSTFLAGS="--cfg tokio_unstable"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then
|
||||||
|
RUSTFLAGS="$RUSTFLAGS -C debuginfo=1"
|
||||||
|
fi
|
||||||
|
|
||||||
echo "FEATURES=\"$FEATURES\""
|
echo "FEATURES=\"$FEATURES\""
|
||||||
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
||||||
rust-zig-builder cargo zigbuild --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=$FEATURES --locked --bin start-container --target=$RUST_ARCH-unknown-linux-musl
|
rust-zig-builder cargo zigbuild --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=$FEATURES --locked --bin start-container --target=$RUST_ARCH-unknown-linux-musl
|
||||||
|
|||||||
@@ -38,6 +38,10 @@ if [[ "${ENVIRONMENT}" =~ (^|-)console($|-) ]]; then
|
|||||||
RUSTFLAGS="--cfg tokio_unstable"
|
RUSTFLAGS="--cfg tokio_unstable"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then
|
||||||
|
RUSTFLAGS="$RUSTFLAGS -C debuginfo=1"
|
||||||
|
fi
|
||||||
|
|
||||||
echo "FEATURES=\"$FEATURES\""
|
echo "FEATURES=\"$FEATURES\""
|
||||||
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
||||||
rust-zig-builder cargo zigbuild --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=$FEATURES --locked --bin startbox --target=$RUST_ARCH-unknown-linux-musl
|
rust-zig-builder cargo zigbuild --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=$FEATURES --locked --bin startbox --target=$RUST_ARCH-unknown-linux-musl
|
||||||
|
|||||||
@@ -7,11 +7,11 @@ source ./builder-alias.sh
|
|||||||
set -ea
|
set -ea
|
||||||
shopt -s expand_aliases
|
shopt -s expand_aliases
|
||||||
|
|
||||||
PROFILE=${PROFILE:-release}
|
PROFILE=${PROFILE:-debug}
|
||||||
if [ "${PROFILE}" = "release" ]; then
|
if [ "${PROFILE}" = "release" ]; then
|
||||||
BUILD_FLAGS="--release"
|
BUILD_FLAGS="--release"
|
||||||
else
|
else
|
||||||
if [ "$PROFILE" != "debug"]; then
|
if [ "$PROFILE" != "debug" ]; then
|
||||||
>&2 echo "Unknown profile $PROFILE: falling back to debug..."
|
>&2 echo "Unknown profile $PROFILE: falling back to debug..."
|
||||||
PROFILE=debug
|
PROFILE=debug
|
||||||
fi
|
fi
|
||||||
@@ -38,7 +38,7 @@ if [[ "${ENVIRONMENT}" =~ (^|-)console($|-) ]]; then
|
|||||||
fi
|
fi
|
||||||
echo "FEATURES=\"$FEATURES\""
|
echo "FEATURES=\"$FEATURES\""
|
||||||
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
||||||
rust-zig-builder cargo test --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features test,$FEATURES --locked 'export_bindings_'
|
rust-zig-builder cargo test --manifest-path=./core/Cargo.toml --lib $BUILD_FLAGS --features test,$FEATURES --locked 'export_bindings_'
|
||||||
if [ "$(ls -nd "core/bindings" | awk '{ print $3 }')" != "$UID" ]; then
|
if [ "$(ls -nd "core/bindings" | awk '{ print $3 }')" != "$UID" ]; then
|
||||||
rust-zig-builder sh -c "chown -R $UID:$UID core/target && chown -R $UID:$UID core/bindings && chown -R $UID:$UID /usr/local/cargo"
|
rust-zig-builder sh -c "chown -R $UID:$UID core/target && chown -R $UID:$UID core/bindings && chown -R $UID:$UID /usr/local/cargo"
|
||||||
fi
|
fi
|
||||||
@@ -38,6 +38,10 @@ if [[ "${ENVIRONMENT}" =~ (^|-)console($|-) ]]; then
|
|||||||
RUSTFLAGS="--cfg tokio_unstable"
|
RUSTFLAGS="--cfg tokio_unstable"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then
|
||||||
|
RUSTFLAGS="$RUSTFLAGS -C debuginfo=1"
|
||||||
|
fi
|
||||||
|
|
||||||
echo "FEATURES=\"$FEATURES\""
|
echo "FEATURES=\"$FEATURES\""
|
||||||
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
echo "RUSTFLAGS=\"$RUSTFLAGS\""
|
||||||
rust-zig-builder cargo zigbuild --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=$FEATURES --locked --bin tunnelbox --target=$RUST_ARCH-unknown-linux-musl
|
rust-zig-builder cargo zigbuild --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=$FEATURES --locked --bin tunnelbox --target=$RUST_ARCH-unknown-linux-musl
|
||||||
|
|||||||
249
core/core-rust-patterns.md
Normal file
@@ -0,0 +1,249 @@
|
|||||||
|
# Utilities & Patterns
|
||||||
|
|
||||||
|
This document covers common utilities and patterns used throughout the StartOS codebase.
|
||||||
|
|
||||||
|
## Util Module (`core/src/util/`)
|
||||||
|
|
||||||
|
The `util` module contains reusable utilities. Key submodules:
|
||||||
|
|
||||||
|
| Module | Purpose |
|
||||||
|
|--------|---------|
|
||||||
|
| `actor/` | Actor pattern implementation for concurrent state management |
|
||||||
|
| `collections/` | Custom collection types |
|
||||||
|
| `crypto.rs` | Cryptographic utilities (encryption, hashing) |
|
||||||
|
| `future.rs` | Future/async utilities |
|
||||||
|
| `io.rs` | File I/O helpers (create_file, canonicalize, etc.) |
|
||||||
|
| `iter.rs` | Iterator extensions |
|
||||||
|
| `net.rs` | Network utilities |
|
||||||
|
| `rpc.rs` | RPC helpers |
|
||||||
|
| `rpc_client.rs` | RPC client utilities |
|
||||||
|
| `serde.rs` | Serialization helpers (Base64, display/fromstr, etc.) |
|
||||||
|
| `sync.rs` | Synchronization primitives (SyncMutex, etc.) |
|
||||||
|
|
||||||
|
## Command Invocation (`Invoke` trait)
|
||||||
|
|
||||||
|
The `Invoke` trait provides a clean way to run external commands with error handling:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::util::Invoke;
|
||||||
|
|
||||||
|
// Simple invocation
|
||||||
|
tokio::process::Command::new("ls")
|
||||||
|
.arg("-la")
|
||||||
|
.invoke(ErrorKind::Filesystem)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// With timeout
|
||||||
|
tokio::process::Command::new("slow-command")
|
||||||
|
.timeout(Some(Duration::from_secs(30)))
|
||||||
|
.invoke(ErrorKind::Timeout)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// With input
|
||||||
|
let mut input = Cursor::new(b"input data");
|
||||||
|
tokio::process::Command::new("cat")
|
||||||
|
.input(Some(&mut input))
|
||||||
|
.invoke(ErrorKind::Filesystem)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
// Piped commands
|
||||||
|
tokio::process::Command::new("cat")
|
||||||
|
.arg("file.txt")
|
||||||
|
.pipe(&mut tokio::process::Command::new("grep").arg("pattern"))
|
||||||
|
.invoke(ErrorKind::Filesystem)
|
||||||
|
.await?;
|
||||||
|
```
|
||||||
|
|
||||||
|
## Guard Pattern
|
||||||
|
|
||||||
|
Guards ensure cleanup happens when they go out of scope.
|
||||||
|
|
||||||
|
### `GeneralGuard` / `GeneralBoxedGuard`
|
||||||
|
|
||||||
|
For arbitrary cleanup actions:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::util::GeneralGuard;
|
||||||
|
|
||||||
|
let guard = GeneralGuard::new(|| {
|
||||||
|
println!("Cleanup runs on drop");
|
||||||
|
});
|
||||||
|
|
||||||
|
// Do work...
|
||||||
|
|
||||||
|
// Explicit drop with action
|
||||||
|
guard.drop();
|
||||||
|
|
||||||
|
// Or skip the action
|
||||||
|
// guard.drop_without_action();
|
||||||
|
```
|
||||||
|
|
||||||
|
### `FileLock`
|
||||||
|
|
||||||
|
File-based locking with automatic unlock:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::util::FileLock;
|
||||||
|
|
||||||
|
let lock = FileLock::new("/path/to/lockfile", true).await?; // blocking=true
|
||||||
|
// Lock held until dropped or explicitly unlocked
|
||||||
|
lock.unlock().await?;
|
||||||
|
```
|
||||||
|
|
||||||
|
## Mount Guard Pattern (`core/src/disk/mount/guard.rs`)
|
||||||
|
|
||||||
|
RAII guards for filesystem mounts. Ensures filesystems are unmounted when guards are dropped.
|
||||||
|
|
||||||
|
### `MountGuard`
|
||||||
|
|
||||||
|
Basic mount guard:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::disk::mount::guard::MountGuard;
|
||||||
|
use crate::disk::mount::filesystem::{MountType, ReadOnly};
|
||||||
|
|
||||||
|
let guard = MountGuard::mount(&filesystem, "/mnt/target", ReadOnly).await?;
|
||||||
|
|
||||||
|
// Use the mounted filesystem at guard.path()
|
||||||
|
do_something(guard.path()).await?;
|
||||||
|
|
||||||
|
// Explicit unmount (or auto-unmounts on drop)
|
||||||
|
guard.unmount(false).await?; // false = don't delete mountpoint
|
||||||
|
```
|
||||||
|
|
||||||
|
### `TmpMountGuard`
|
||||||
|
|
||||||
|
Reference-counted temporary mount (mounts to `/media/startos/tmp/`):
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::disk::mount::guard::TmpMountGuard;
|
||||||
|
use crate::disk::mount::filesystem::ReadOnly;
|
||||||
|
|
||||||
|
// Multiple clones share the same mount
|
||||||
|
let guard1 = TmpMountGuard::mount(&filesystem, ReadOnly).await?;
|
||||||
|
let guard2 = guard1.clone();
|
||||||
|
|
||||||
|
// Mount stays alive while any guard exists
|
||||||
|
// Auto-unmounts when last guard is dropped
|
||||||
|
```
|
||||||
|
|
||||||
|
### `GenericMountGuard` trait
|
||||||
|
|
||||||
|
All mount guards implement this trait:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
pub trait GenericMountGuard: std::fmt::Debug + Send + Sync + 'static {
|
||||||
|
fn path(&self) -> &Path;
|
||||||
|
fn unmount(self) -> impl Future<Output = Result<(), Error>> + Send;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### `SubPath`
|
||||||
|
|
||||||
|
Wraps a mount guard to point to a subdirectory:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::disk::mount::guard::SubPath;
|
||||||
|
|
||||||
|
let mount = TmpMountGuard::mount(&filesystem, ReadOnly).await?;
|
||||||
|
let subdir = SubPath::new(mount, "data/subdir");
|
||||||
|
|
||||||
|
// subdir.path() returns the full path including subdirectory
|
||||||
|
```
|
||||||
|
|
||||||
|
## FileSystem Implementations (`core/src/disk/mount/filesystem/`)
|
||||||
|
|
||||||
|
Various filesystem types that can be mounted:
|
||||||
|
|
||||||
|
| Type | Description |
|
||||||
|
|------|-------------|
|
||||||
|
| `bind.rs` | Bind mounts |
|
||||||
|
| `block_dev.rs` | Block device mounts |
|
||||||
|
| `cifs.rs` | CIFS/SMB network shares |
|
||||||
|
| `ecryptfs.rs` | Encrypted filesystem |
|
||||||
|
| `efivarfs.rs` | EFI variables |
|
||||||
|
| `httpdirfs.rs` | HTTP directory as filesystem |
|
||||||
|
| `idmapped.rs` | ID-mapped mounts |
|
||||||
|
| `label.rs` | Mount by label |
|
||||||
|
| `loop_dev.rs` | Loop device mounts |
|
||||||
|
| `overlayfs.rs` | Overlay filesystem |
|
||||||
|
|
||||||
|
## Other Useful Utilities
|
||||||
|
|
||||||
|
### `Apply` / `ApplyRef` traits
|
||||||
|
|
||||||
|
Fluent method chaining:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::util::Apply;
|
||||||
|
|
||||||
|
let result = some_value
|
||||||
|
.apply(|v| transform(v))
|
||||||
|
.apply(|v| another_transform(v));
|
||||||
|
```
|
||||||
|
|
||||||
|
### `Container<T>`
|
||||||
|
|
||||||
|
Async-safe optional container:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::util::Container;
|
||||||
|
|
||||||
|
let container = Container::new(None);
|
||||||
|
container.set(value).await;
|
||||||
|
let taken = container.take().await;
|
||||||
|
```
|
||||||
|
|
||||||
|
### `HashWriter<H, W>`
|
||||||
|
|
||||||
|
Write data while computing hash:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::util::HashWriter;
|
||||||
|
use sha2::Sha256;
|
||||||
|
|
||||||
|
let writer = HashWriter::new(Sha256::new(), file);
|
||||||
|
// Write data...
|
||||||
|
let (hasher, file) = writer.finish();
|
||||||
|
let hash = hasher.finalize();
|
||||||
|
```
|
||||||
|
|
||||||
|
### `Never` type
|
||||||
|
|
||||||
|
Uninhabited type for impossible cases:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::util::Never;
|
||||||
|
|
||||||
|
fn impossible() -> Never {
|
||||||
|
// This function can never return
|
||||||
|
}
|
||||||
|
|
||||||
|
let never: Never = impossible();
|
||||||
|
never.absurd::<String>() // Can convert to any type
|
||||||
|
```
|
||||||
|
|
||||||
|
### `MaybeOwned<'a, T>`
|
||||||
|
|
||||||
|
Either borrowed or owned data:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::util::MaybeOwned;
|
||||||
|
|
||||||
|
fn accept_either(data: MaybeOwned<'_, String>) {
|
||||||
|
// Use &*data to access the value
|
||||||
|
}
|
||||||
|
|
||||||
|
accept_either(MaybeOwned::from(&existing_string));
|
||||||
|
accept_either(MaybeOwned::from(owned_string));
|
||||||
|
```
|
||||||
|
|
||||||
|
### `new_guid()`
|
||||||
|
|
||||||
|
Generate a random GUID:
|
||||||
|
|
||||||
|
```rust
|
||||||
|
use crate::util::new_guid;
|
||||||
|
|
||||||
|
let guid = new_guid(); // Returns InternedString
|
||||||
|
```
|
||||||
100
core/i18n-patterns.md
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
# i18n Patterns in `core/`
|
||||||
|
|
||||||
|
## Library & Setup
|
||||||
|
|
||||||
|
**Crate:** [`rust-i18n`](https://crates.io/crates/rust-i18n) v3.1.5 (`core/Cargo.toml`)
|
||||||
|
|
||||||
|
**Initialization** (`core/src/lib.rs:3`):
|
||||||
|
```rust
|
||||||
|
rust_i18n::i18n!("locales", fallback = ["en_US"]);
|
||||||
|
```
|
||||||
|
This macro scans `core/locales/` at compile time and embeds all translations as constants.
|
||||||
|
|
||||||
|
**Prelude re-export** (`core/src/prelude.rs:4`):
|
||||||
|
```rust
|
||||||
|
pub use rust_i18n::t;
|
||||||
|
```
|
||||||
|
Most modules import `t!` via the prelude.
|
||||||
|
|
||||||
|
## Translation File
|
||||||
|
|
||||||
|
**Location:** `core/locales/i18n.yaml`
|
||||||
|
**Format:** YAML v2 (~755 keys)
|
||||||
|
|
||||||
|
**Supported languages:** `en_US`, `de_DE`, `es_ES`, `fr_FR`, `pl_PL`
|
||||||
|
|
||||||
|
**Entry structure:**
|
||||||
|
```yaml
|
||||||
|
namespace.sub.key-name:
|
||||||
|
en_US: "English text with %{param}"
|
||||||
|
de_DE: "German text with %{param}"
|
||||||
|
# ...
|
||||||
|
```
|
||||||
|
|
||||||
|
## Using `t!()`
|
||||||
|
|
||||||
|
```rust
|
||||||
|
// Simple key
|
||||||
|
t!("error.unknown")
|
||||||
|
|
||||||
|
// With parameter interpolation (%{name} in YAML)
|
||||||
|
t!("bins.deprecated.renamed", old = old_name, new = new_name)
|
||||||
|
```
|
||||||
|
|
||||||
|
## Key Naming Conventions
|
||||||
|
|
||||||
|
Keys use **dot-separated hierarchical namespaces** with **kebab-case** for multi-word segments:
|
||||||
|
|
||||||
|
```
|
||||||
|
<module>.<submodule>.<descriptive-name>
|
||||||
|
```
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
- `error.incorrect-password` — error kind label
|
||||||
|
- `bins.start-init.updating-firmware` — startup phase message
|
||||||
|
- `backup.bulk.complete-title` — backup notification title
|
||||||
|
- `help.arg.acme-contact` — CLI help text for an argument
|
||||||
|
- `context.diagnostic.starting-diagnostic-ui` — diagnostic context status
|
||||||
|
|
||||||
|
### Top-Level Namespaces
|
||||||
|
|
||||||
|
| Namespace | Purpose |
|
||||||
|
|-----------|---------|
|
||||||
|
| `error.*` | `ErrorKind` display strings (see `src/error.rs`) |
|
||||||
|
| `bins.*` | CLI binary messages (deprecated, start-init, startd, etc.) |
|
||||||
|
| `init.*` | Initialization phase labels |
|
||||||
|
| `setup.*` | First-run setup messages |
|
||||||
|
| `context.*` | Context startup messages (diagnostic, setup, CLI) |
|
||||||
|
| `service.*` | Service lifecycle messages |
|
||||||
|
| `backup.*` | Backup/restore operation messages |
|
||||||
|
| `registry.*` | Package registry messages |
|
||||||
|
| `net.*` | Network-related messages |
|
||||||
|
| `middleware.*` | Request middleware messages (auth, etc.) |
|
||||||
|
| `disk.*` | Disk operation messages |
|
||||||
|
| `lxc.*` | Container management messages |
|
||||||
|
| `system.*` | System monitoring/metrics messages |
|
||||||
|
| `notifications.*` | User-facing notification messages |
|
||||||
|
| `update.*` | OS update messages |
|
||||||
|
| `util.*` | Utility messages (TUI, RPC) |
|
||||||
|
| `ssh.*` | SSH operation messages |
|
||||||
|
| `shutdown.*` | Shutdown-related messages |
|
||||||
|
| `logs.*` | Log-related messages |
|
||||||
|
| `auth.*` | Authentication messages |
|
||||||
|
| `help.*` | CLI help text (`help.arg.<arg-name>`) |
|
||||||
|
| `about.*` | CLI command descriptions |
|
||||||
|
|
||||||
|
## Locale Selection
|
||||||
|
|
||||||
|
`core/src/bins/mod.rs:15-36` — `set_locale_from_env()`:
|
||||||
|
|
||||||
|
1. Reads `LANG` environment variable
|
||||||
|
2. Strips `.UTF-8` suffix
|
||||||
|
3. Exact-matches against available locales, falls back to language-prefix match (e.g. `en_GB` matches `en_US`)
|
||||||
|
|
||||||
|
## Adding New Keys
|
||||||
|
|
||||||
|
1. Add the key to `core/locales/i18n.yaml` with all 5 language translations
|
||||||
|
2. Use the `t!("your.key.name")` macro in Rust code
|
||||||
|
3. Follow existing namespace conventions — match the module path where the key is used
|
||||||
|
4. Use kebab-case for multi-word segments
|
||||||
|
5. Translations are validated at compile time
|
||||||
5578
core/locales/i18n.yaml
Normal file
13
core/man/start-cli/start-cli-auth-get-pubkey.1
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
.ie \n(.g .ds Aq \(aq
|
||||||
|
.el .ds Aq '
|
||||||
|
.TH start-cli-auth-get-pubkey 1 "get-pubkey "
|
||||||
|
.SH NAME
|
||||||
|
start\-cli\-auth\-get\-pubkey \- Get the public key from the server
|
||||||
|
.SH SYNOPSIS
|
||||||
|
\fBstart\-cli auth get\-pubkey\fR [\fB\-h\fR|\fB\-\-help\fR]
|
||||||
|
.SH DESCRIPTION
|
||||||
|
Get the public key from the server
|
||||||
|
.SH OPTIONS
|
||||||
|
.TP
|
||||||
|
\fB\-h\fR, \fB\-\-help\fR
|
||||||
|
Print help
|
||||||
13
core/man/start-cli/start-cli-auth-login.1
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
.ie \n(.g .ds Aq \(aq
|
||||||
|
.el .ds Aq '
|
||||||
|
.TH start-cli-auth-login 1 "login "
|
||||||
|
.SH NAME
|
||||||
|
start\-cli\-auth\-login \- Login to a new auth session
|
||||||
|
.SH SYNOPSIS
|
||||||
|
\fBstart\-cli auth login\fR [\fB\-h\fR|\fB\-\-help\fR]
|
||||||
|
.SH DESCRIPTION
|
||||||
|
Login to a new auth session
|
||||||
|
.SH OPTIONS
|
||||||
|
.TP
|
||||||
|
\fB\-h\fR, \fB\-\-help\fR
|
||||||
|
Print help
|
||||||
16
core/man/start-cli/start-cli-auth-logout.1
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
.ie \n(.g .ds Aq \(aq
|
||||||
|
.el .ds Aq '
|
||||||
|
.TH start-cli-auth-logout 1 "logout "
|
||||||
|
.SH NAME
|
||||||
|
start\-cli\-auth\-logout \- Logout from current auth session
|
||||||
|
.SH SYNOPSIS
|
||||||
|
\fBstart\-cli auth logout\fR [\fB\-h\fR|\fB\-\-help\fR] <\fISESSION\fR>
|
||||||
|
.SH DESCRIPTION
|
||||||
|
Logout from current auth session
|
||||||
|
.SH OPTIONS
|
||||||
|
.TP
|
||||||
|
\fB\-h\fR, \fB\-\-help\fR
|
||||||
|
Print help
|
||||||
|
.TP
|
||||||
|
<\fISESSION\fR>
|
||||||
|
|
||||||
13
core/man/start-cli/start-cli-auth-reset-password.1
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
.ie \n(.g .ds Aq \(aq
|
||||||
|
.el .ds Aq '
|
||||||
|
.TH start-cli-auth-reset-password 1 "reset-password "
|
||||||
|
.SH NAME
|
||||||
|
start\-cli\-auth\-reset\-password \- Reset the password
|
||||||
|
.SH SYNOPSIS
|
||||||
|
\fBstart\-cli auth reset\-password\fR [\fB\-h\fR|\fB\-\-help\fR]
|
||||||
|
.SH DESCRIPTION
|
||||||
|
Reset the password
|
||||||
|
.SH OPTIONS
|
||||||
|
.TP
|
||||||
|
\fB\-h\fR, \fB\-\-help\fR
|
||||||
|
Print help
|
||||||