Compare commits

..

82 Commits

Author SHA1 Message Date
Aiden McClelland
2191707b94 wip: iroh 2025-08-31 15:43:34 -06:00
Aiden McClelland
63a4bba19a fix gha sccache 2025-08-29 13:32:12 -06:00
Aiden McClelland
d64b80987c support for sccache 2025-08-29 13:07:29 -06:00
Aiden McClelland
fbea3c56e6 clean up logs 2025-08-29 11:59:36 -06:00
Aiden McClelland
58b6b5c4ea Merge branch 'feature/proxies' of github.com:Start9Labs/start-os into feature/proxies 2025-08-29 11:48:31 -06:00
Aiden McClelland
369e559518 fix file_stream and remove non-terminating test 2025-08-29 11:48:30 -06:00
Alex Inkin
ca39ffb9eb refactor: fix multiple comments (#3013)
* refactor: fix multiple comments

* styling changes, add documentation to sidebar

* translations for dns page

* refactor: subtle colors

* rearrange service page

---------

Co-authored-by: Matt Hill <mattnine@protonmail.com>
2025-08-29 11:37:34 -06:00
Aiden McClelland
8163db7ac3 socks5 proxy working 2025-08-29 11:19:30 -06:00
Aiden McClelland
b3b031ed47 wip: debugging tor 2025-08-27 15:10:54 -06:00
Matt Hill
c5fa09c4d4 handle wh file uploads 2025-08-27 10:06:39 -06:00
Alex Inkin
b7438ef155 refactor: refactor forms components and remove legacy Taiga UI package (#3012) 2025-08-27 09:57:49 -06:00
Matt Hill
2a27716e29 remove unnecessary truthy check 2025-08-26 22:16:57 -06:00
Matt Hill
7a94086d45 move status column in service list 2025-08-26 13:59:16 -06:00
Matt Hill
ec72fb4bfd fix showing dns records 2025-08-26 13:08:24 -06:00
Matt Hill
9eaaa85625 implement toggling gateways for service interface 2025-08-26 12:29:14 -06:00
Aiden McClelland
f876cd796e Merge branch 'feature/proxies' of github.com:Start9Labs/start-os into feature/proxies 2025-08-26 12:13:41 -06:00
Aiden McClelland
9fe9608560 misc fixes 2025-08-26 12:13:39 -06:00
Matt Hill
303f6a55ac Merge branch 'feature/proxies' of github.com:Start9Labs/start-os into feature/proxies 2025-08-26 11:37:12 -06:00
Aiden McClelland
ff686d3c52 Merge branch 'feature/proxies' of github.com:Start9Labs/start-os into feature/proxies 2025-08-25 19:29:52 -06:00
Aiden McClelland
f4cf94acd2 fix dns 2025-08-25 19:29:39 -06:00
Matt Hill
0709a5c242 reason instead of description 2025-08-24 10:24:48 -06:00
Matt Hill
701db35ca3 remove logs 2025-08-24 09:41:58 -06:00
Matt Hill
57bdc400b4 honor hidden form values 2025-08-24 09:40:24 -06:00
Matt Hill
611e19da26 placeholder for empty service interfaces table 2025-08-24 08:54:44 -06:00
Matt Hill
0e9b9fce3e simple renaming 2025-08-24 08:46:12 -06:00
Aiden McClelland
d6d91822cc coukd work 2025-08-22 08:53:38 -06:00
Aiden McClelland
5bee2cef96 fix deadlock 2025-08-21 18:40:53 -06:00
Aiden McClelland
359146f02c wip 2025-08-20 14:46:15 -06:00
Matt Hill
d564471825 more translations 2025-08-20 11:45:17 -06:00
Alex Inkin
931505ff08 fix: refactor legacy components (#3010)
* fix: comments

* fix: refactor legacy components

* remove default again

---------

Co-authored-by: Matt Hill <mattnine@protonmail.com>
2025-08-19 08:13:36 -06:00
Alex Inkin
0709ea65d7 fix: comments (#3009)
* fix: comments

* undo default

---------

Co-authored-by: Matt Hill <mattnine@protonmail.com>
2025-08-19 08:10:14 -06:00
Aiden McClelland
75a20ae5c5 it builds 2025-08-18 18:12:03 -06:00
Matt Hill
aaf2361909 add missing translations 2025-08-18 15:16:43 -06:00
Matt Hill
17c4f3a1e8 fix dns form 2025-08-17 09:01:09 -06:00
Matt Hill
a0a2c20b08 fix all types 2025-08-16 23:14:19 -06:00
Aiden McClelland
f7f0b7dc1a revert to ts-rs v9 2025-08-16 22:33:53 -06:00
Aiden McClelland
d06c443c7d clean up tech debt, bump dependencies 2025-08-15 18:32:27 -06:00
Aiden McClelland
7094d1d939 update types 2025-08-15 18:05:52 -06:00
Aiden McClelland
8f573386c6 with todos 2025-08-15 16:07:23 -06:00
Matt Hill
bfc88a2225 fix sort functions for public and private domains 2025-08-13 14:28:53 -06:00
Matt Hill
d5bb537368 dns 2025-08-13 13:27:05 -06:00
Matt Hill
3abae65b22 better icon for restart tor 2025-08-13 10:54:07 -06:00
Matt Hill
3848e8f2df restart tor instead of reset 2025-08-13 10:53:46 -06:00
Matt Hill
63323faa97 nix StartOS domains, implement public and private domains at interface scope 2025-08-11 23:01:31 -06:00
Matt Hill
e8b7a35d43 public domain, max width, descriptions for dns 2025-08-11 10:03:35 -06:00
waterplea
da9a1b99d9 fix: dns testing 2025-08-11 13:50:58 +07:00
Matt Hill
68780ccbdd forms for adding domain, rework things based on new ideas 2025-08-10 23:33:05 -06:00
Aiden McClelland
022f7134be wip: start-tunnel & fix build 2025-08-09 21:57:32 -06:00
Matt Hill
b4491a3f39 only translations left 2025-08-09 09:29:47 -06:00
waterplea
29ddfad9d7 fix: address comments 2025-08-09 17:45:31 +07:00
Matt Hill
86a24ec067 domains preferred 2025-08-08 21:00:32 -06:00
Matt Hill
35ace3997b MVP of service interface page 2025-08-08 20:57:16 -06:00
Aiden McClelland
4f24658d33 fix unnecessary export 2025-08-08 11:12:11 -06:00
Aiden McClelland
3a84cc97fe comments 2025-08-07 17:21:09 -06:00
Aiden McClelland
3845550e90 best address logic 2025-08-07 17:15:23 -06:00
Matt Hill
4d5ff1a97b start sorting addresses 2025-08-07 13:47:27 -06:00
Matt Hill
b864816033 better placeholder for no addresses 2025-08-07 09:08:41 -06:00
Matt Hill
2762076683 minor 2025-08-07 09:03:54 -06:00
Matt Hill
8796e41ea0 merge 2025-08-07 08:18:47 -06:00
waterplea
8edb7429f5 refactor: styles for interfaces page 2025-08-07 18:53:35 +07:00
Matt Hill
5109efcee2 different options for clearnet domains 2025-08-06 18:45:41 -06:00
Matt Hill
177232ab28 start service interface page, WIP 2025-08-06 17:55:21 -06:00
Aiden McClelland
d6dfaf8feb domains api + migration 2025-08-06 14:29:35 -06:00
Aiden McClelland
ea12251a7e add ip util to sdk 2025-08-06 11:14:41 -06:00
waterplea
b35a89da29 refactor: add file control to form service 2025-08-06 19:07:21 +07:00
Matt Hill
d8d1009417 domains mostly finished 2025-08-05 17:29:48 -06:00
Aiden McClelland
3835562200 fix fe types 2025-08-05 17:14:17 -06:00
Aiden McClelland
0d227e62dc Merge branch 'feature/proxies' of github.com:Start9Labs/start-os into feature/proxies 2025-08-05 17:07:27 -06:00
Aiden McClelland
10af26116d refactor public/private gateways 2025-08-05 17:07:25 -06:00
Matt Hill
f8b03ea917 certificate authorities 2025-08-05 13:03:04 -06:00
Matt Hill
4a2777c52f domains and acme refactor 2025-08-05 09:29:04 -06:00
waterplea
86dbf26253 refactor: gateways page 2025-08-05 17:39:48 +07:00
waterplea
32999fc55f refactor: domains page 2025-08-04 19:34:57 +07:00
Matt Hill
ea2b1f5920 edit instead of chnage acme and change gateway 2025-08-01 22:59:10 -06:00
Matt Hill
716ed64aa8 show and test dns 2025-07-31 19:57:04 -06:00
Matt Hill
f23659f4ea dont show hidden actions 2025-07-31 13:42:43 -06:00
Matt Hill
daf584b33e add domains and gateways, remove routers, fix docs links 2025-07-30 15:33:13 -06:00
Aiden McClelland
e6b7390a61 wip start-tunneld 2025-07-24 18:33:55 -06:00
Aiden McClelland
84f554269f proxy -> tunnel, implement backend apis 2025-07-23 15:44:57 -06:00
Matt Hill
21adce5c5d fix file type 2025-07-22 17:07:20 -06:00
Aiden McClelland
d3e7e37f59 backend changes 2025-07-22 16:48:16 -06:00
Matt Hill
4d9709eb1c add support for inbound proxies 2025-07-22 16:40:31 -06:00
511 changed files with 15800 additions and 25269 deletions

View File

@@ -1,118 +0,0 @@
name: start-cli
on:
workflow_call:
workflow_dispatch:
inputs:
environment:
type: choice
description: Environment
options:
- NONE
- dev
- unstable
- dev-unstable
runner:
type: choice
description: Runner
options:
- standard
- fast
arch:
type: choice
description: Architecture
options:
- ALL
- x86_64
- x86_64-apple
- aarch64
- aarch64-apple
- riscv64
push:
branches:
- master
- next/*
pull_request:
branches:
- master
- next/*
env:
NODEJS_VERSION: "24.11.0"
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
jobs:
compile:
name: Build Debian Package
strategy:
fail-fast: true
matrix:
triple: >-
${{
fromJson('{
"x86_64": ["x86_64-unknown-linux-musl"],
"x86_64-apple": ["x86_64-apple-darwin"],
"aarch64": ["aarch64-unknown-linux-musl"],
"x86_64-apple": ["aarch64-apple-darwin"],
"riscv64": ["riscv64gc-unknown-linux-musl"],
"ALL": ["x86_64-unknown-linux-musl", "x86_64-apple-darwin", "aarch64-unknown-linux-musl", "aarch64-apple-darwin", "riscv64gc-unknown-linux-musl"]
}')[github.event.inputs.platform || 'ALL']
}}
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
steps:
- name: Cleaning up unnecessary files
run: |
sudo apt-get remove --purge -y mono-* \
ghc* cabal-install* \
dotnet* \
php* \
ruby* \
mysql-* \
postgresql-* \
azure-cli \
powershell \
google-cloud-sdk \
msodbcsql* mssql-tools* \
imagemagick* \
libgl1-mesa-dri \
google-chrome-stable \
firefox
sudo apt-get autoremove -y
sudo apt-get clean
- run: |
sudo mount -t tmpfs tmpfs .
if: ${{ github.event.inputs.runner == 'fast' }}
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-node@v4
with:
node-version: ${{ env.NODEJS_VERSION }}
- name: Set up docker QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Configure sccache
uses: actions/github-script@v7
with:
script: |
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
- name: Make
run: TARGET=${{ matrix.triple }} make cli
env:
PLATFORM: ${{ matrix.arch }}
SCCACHE_GHA_ENABLED: on
SCCACHE_GHA_VERSION: 0
- uses: actions/upload-artifact@v4
with:
name: start-cli_${{ matrix.triple }}
path: core/target/${{ matrix.triple }}/release/start-cli

View File

@@ -1,203 +0,0 @@
name: Start-Registry
on:
workflow_call:
workflow_dispatch:
inputs:
environment:
type: choice
description: Environment
options:
- NONE
- dev
- unstable
- dev-unstable
runner:
type: choice
description: Runner
options:
- standard
- fast
arch:
type: choice
description: Architecture
options:
- ALL
- x86_64
- aarch64
- riscv64
push:
branches:
- master
- next/*
pull_request:
branches:
- master
- next/*
env:
NODEJS_VERSION: "24.11.0"
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
jobs:
compile:
name: Build Debian Package
strategy:
fail-fast: true
matrix:
arch: >-
${{
fromJson('{
"x86_64": ["x86_64"],
"aarch64": ["aarch64"],
"riscv64": ["riscv64"],
"ALL": ["x86_64", "aarch64", "riscv64"]
}')[github.event.inputs.platform || 'ALL']
}}
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
steps:
- name: Cleaning up unnecessary files
run: |
sudo apt-get remove --purge -y mono-* \
ghc* cabal-install* \
dotnet* \
php* \
ruby* \
mysql-* \
postgresql-* \
azure-cli \
powershell \
google-cloud-sdk \
msodbcsql* mssql-tools* \
imagemagick* \
libgl1-mesa-dri \
google-chrome-stable \
firefox
sudo apt-get autoremove -y
sudo apt-get clean
- run: |
sudo mount -t tmpfs tmpfs .
if: ${{ github.event.inputs.runner == 'fast' }}
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-node@v4
with:
node-version: ${{ env.NODEJS_VERSION }}
- name: Set up docker QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Configure sccache
uses: actions/github-script@v7
with:
script: |
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
- name: Make
run: make registry-deb
env:
PLATFORM: ${{ matrix.arch }}
SCCACHE_GHA_ENABLED: on
SCCACHE_GHA_VERSION: 0
- uses: actions/upload-artifact@v4
with:
name: start-registry_${{ matrix.arch }}.deb
path: results/start-registry-*_${{ matrix.arch }}.deb
create-image:
name: Create Docker Image
needs: [compile]
permissions:
contents: read
packages: write
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
steps:
- name: Cleaning up unnecessary files
run: |
sudo apt-get remove --purge -y google-chrome-stable firefox mono-devel
sudo apt-get autoremove -y
sudo apt-get clean
- run: |
sudo mount -t tmpfs tmpfs .
if: ${{ github.event.inputs.runner == 'fast' }}
- name: Set up docker QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: "Login to GitHub Container Registry"
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{github.actor}}
password: ${{secrets.GITHUB_TOKEN}}
- name: Docker meta
id: meta
uses: docker/metadata-action@v5
with:
images: ghcr.io/Start9Labs/startos-registry
tags: |
type=raw,value=${{ github.ref_name }}
- name: Download debian package
uses: actions/download-artifact@v4
with:
pattern: start-registry_*.deb
- name: Map matrix.arch to docker platform
run: |
platforms=""
for deb in *.deb; do
filename=$(basename "$deb" .deb)
arch="${filename#*_}"
case "$arch" in
x86_64)
platform="linux/amd64"
;;
aarch64)
platform="linux/arm64"
;;
riscv64)
platform="linux/riscv64"
;;
*)
echo "Unknown architecture: $arch" >&2
exit 1
;;
esac
if [ -z "$platforms" ]; then
platforms="$platform"
else
platforms="$platforms,$platform"
fi
done
echo "DOCKER_PLATFORM=$platforms" >> "$GITHUB_ENV"
- run: |
cat | docker buildx build --platform "$DOCKER_PLATFORM" --push -t ${{ steps.meta.outputs.tags }} -f - . << 'EOF'
FROM debian:trixie
ADD *.deb .
RUN apt-get install -y ./*_$(uname -m).deb && rm *.deb
VOLUME /var/lib/startos
ENV RUST_LOG=startos=debug
ENTRYPOINT ["start-registryd"]
EOF

View File

@@ -1,114 +0,0 @@
name: Start-Tunnel
on:
workflow_call:
workflow_dispatch:
inputs:
environment:
type: choice
description: Environment
options:
- NONE
- dev
- unstable
- dev-unstable
runner:
type: choice
description: Runner
options:
- standard
- fast
arch:
type: choice
description: Architecture
options:
- ALL
- x86_64
- aarch64
- riscv64
push:
branches:
- master
- next/*
pull_request:
branches:
- master
- next/*
env:
NODEJS_VERSION: "24.11.0"
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
jobs:
compile:
name: Build Debian Package
strategy:
fail-fast: true
matrix:
arch: >-
${{
fromJson('{
"x86_64": ["x86_64"],
"aarch64": ["aarch64"],
"riscv64": ["riscv64"],
"ALL": ["x86_64", "aarch64", "riscv64"]
}')[github.event.inputs.platform || 'ALL']
}}
runs-on: ${{ fromJson('["ubuntu-latest", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
steps:
- name: Cleaning up unnecessary files
run: |
sudo apt-get remove --purge -y mono-* \
ghc* cabal-install* \
dotnet* \
php* \
ruby* \
mysql-* \
postgresql-* \
azure-cli \
powershell \
google-cloud-sdk \
msodbcsql* mssql-tools* \
imagemagick* \
libgl1-mesa-dri \
google-chrome-stable \
firefox
sudo apt-get autoremove -y
sudo apt-get clean
- run: |
sudo mount -t tmpfs tmpfs .
if: ${{ github.event.inputs.runner == 'fast' }}
- uses: actions/checkout@v4
with:
submodules: recursive
- uses: actions/setup-node@v4
with:
node-version: ${{ env.NODEJS_VERSION }}
- name: Set up docker QEMU
uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Configure sccache
uses: actions/github-script@v7
with:
script: |
core.exportVariable('ACTIONS_RESULTS_URL', process.env.ACTIONS_RESULTS_URL || '');
core.exportVariable('ACTIONS_RUNTIME_TOKEN', process.env.ACTIONS_RUNTIME_TOKEN || '');
- name: Make
run: make tunnel-deb
env:
PLATFORM: ${{ matrix.arch }}
SCCACHE_GHA_ENABLED: on
SCCACHE_GHA_VERSION: 0
- uses: actions/upload-artifact@v4
with:
name: start-tunnel_${{ matrix.arch }}.deb
path: results/start-tunnel-*_${{ matrix.arch }}.deb

View File

@@ -28,7 +28,6 @@ on:
- aarch64 - aarch64
- aarch64-nonfree - aarch64-nonfree
- raspberrypi - raspberrypi
- riscv64
deploy: deploy:
type: choice type: choice
description: Deploy description: Deploy
@@ -46,7 +45,7 @@ on:
- next/* - next/*
env: env:
NODEJS_VERSION: "24.11.0" NODEJS_VERSION: "22.17.1"
ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}' ENVIRONMENT: '${{ fromJson(format(''["{0}", ""]'', github.event.inputs.environment || ''dev''))[github.event.inputs.environment == ''NONE''] }}'
jobs: jobs:
@@ -63,48 +62,11 @@ jobs:
"aarch64": ["aarch64"], "aarch64": ["aarch64"],
"aarch64-nonfree": ["aarch64"], "aarch64-nonfree": ["aarch64"],
"raspberrypi": ["aarch64"], "raspberrypi": ["aarch64"],
"riscv64": ["riscv64"], "ALL": ["x86_64", "aarch64"]
"ALL": ["x86_64", "aarch64", "riscv64"]
}')[github.event.inputs.platform || 'ALL'] }')[github.event.inputs.platform || 'ALL']
}} }}
runs-on: >- runs-on: ${{ fromJson('["ubuntu-22.04", "buildjet-32vcpu-ubuntu-2204"]')[github.event.inputs.runner == 'fast'] }}
${{
fromJson(
format(
'["{0}", "{1}"]',
fromJson('{
"x86_64": "ubuntu-latest",
"aarch64": "ubuntu-24.04-arm",
"riscv64": "ubuntu-latest"
}')[matrix.arch],
fromJson('{
"x86_64": "buildjet-32vcpu-ubuntu-2204",
"aarch64": "buildjet-32vcpu-ubuntu-2204-arm",
"riscv64": "buildjet-32vcpu-ubuntu-2204"
}')[matrix.arch]
)
)[github.event.inputs.runner == 'fast']
}}
steps: steps:
- name: Cleaning up unnecessary files
run: |
sudo apt-get remove --purge -y azure-cli || true
sudo apt-get remove --purge -y firefox || true
sudo apt-get remove --purge -y ghc-* || true
sudo apt-get remove --purge -y google-cloud-sdk || true
sudo apt-get remove --purge -y google-chrome-stable || true
sudo apt-get remove --purge -y powershell || true
sudo apt-get remove --purge -y php* || true
sudo apt-get remove --purge -y ruby* || true
sudo apt-get remove --purge -y mono-* || true
sudo apt-get autoremove -y
sudo apt-get clean
sudo rm -rf /usr/lib/jvm # All JDKs
sudo rm -rf /usr/local/.ghcup # Haskell toolchain
sudo rm -rf /usr/local/lib/android # Android SDK/NDK, emulator
sudo rm -rf /usr/share/dotnet # .NET SDKs
sudo rm -rf /usr/share/swift # Swift toolchain (if present)
sudo rm -rf "$AGENT_TOOLSDIRECTORY" # Pre-cached tool cache (Go, Node, etc.)
- run: | - run: |
sudo mount -t tmpfs tmpfs . sudo mount -t tmpfs tmpfs .
if: ${{ github.event.inputs.runner == 'fast' }} if: ${{ github.event.inputs.runner == 'fast' }}
@@ -160,7 +122,7 @@ jobs:
format( format(
'[ '[
["{0}"], ["{0}"],
["x86_64", "x86_64-nonfree", "aarch64", "aarch64-nonfree", "riscv64", "raspberrypi"] ["x86_64", "x86_64-nonfree", "aarch64", "aarch64-nonfree", "raspberrypi"]
]', ]',
github.event.inputs.platform || 'ALL' github.event.inputs.platform || 'ALL'
) )
@@ -170,22 +132,13 @@ jobs:
${{ ${{
fromJson( fromJson(
format( format(
'["{0}", "{1}"]', '["ubuntu-22.04", "{0}"]',
fromJson('{
"x86_64": "ubuntu-latest",
"x86_64-nonfree": "ubuntu-latest",
"aarch64": "ubuntu-24.04-arm",
"aarch64-nonfree": "ubuntu-24.04-arm",
"raspberrypi": "ubuntu-24.04-arm",
"riscv64": "ubuntu-24.04-arm",
}')[matrix.platform],
fromJson('{ fromJson('{
"x86_64": "buildjet-8vcpu-ubuntu-2204", "x86_64": "buildjet-8vcpu-ubuntu-2204",
"x86_64-nonfree": "buildjet-8vcpu-ubuntu-2204", "x86_64-nonfree": "buildjet-8vcpu-ubuntu-2204",
"aarch64": "buildjet-8vcpu-ubuntu-2204-arm", "aarch64": "buildjet-8vcpu-ubuntu-2204-arm",
"aarch64-nonfree": "buildjet-8vcpu-ubuntu-2204-arm", "aarch64-nonfree": "buildjet-8vcpu-ubuntu-2204-arm",
"raspberrypi": "buildjet-8vcpu-ubuntu-2204-arm", "raspberrypi": "buildjet-8vcpu-ubuntu-2204-arm",
"riscv64": "buildjet-8vcpu-ubuntu-2204",
}')[matrix.platform] }')[matrix.platform]
) )
)[github.event.inputs.runner == 'fast'] )[github.event.inputs.runner == 'fast']
@@ -199,29 +152,11 @@ jobs:
"aarch64": "aarch64", "aarch64": "aarch64",
"aarch64-nonfree": "aarch64", "aarch64-nonfree": "aarch64",
"raspberrypi": "aarch64", "raspberrypi": "aarch64",
"riscv64": "riscv64",
}')[matrix.platform] }')[matrix.platform]
}} }}
steps: steps:
- name: Free space - name: Free space
run: | run: rm -rf /opt/hostedtoolcache*
sudo apt-get remove --purge -y azure-cli || true
sudo apt-get remove --purge -y firefox || true
sudo apt-get remove --purge -y ghc-* || true
sudo apt-get remove --purge -y google-cloud-sdk || true
sudo apt-get remove --purge -y google-chrome-stable || true
sudo apt-get remove --purge -y powershell || true
sudo apt-get remove --purge -y php* || true
sudo apt-get remove --purge -y ruby* || true
sudo apt-get remove --purge -y mono-* || true
sudo apt-get autoremove -y
sudo apt-get clean
sudo rm -rf /usr/lib/jvm # All JDKs
sudo rm -rf /usr/local/.ghcup # Haskell toolchain
sudo rm -rf /usr/local/lib/android # Android SDK/NDK, emulator
sudo rm -rf /usr/share/dotnet # .NET SDKs
sudo rm -rf /usr/share/swift # Swift toolchain (if present)
sudo rm -rf "$AGENT_TOOLSDIRECTORY" # Pre-cached tool cache (Go, Node, etc.)
if: ${{ github.event.inputs.runner != 'fast' }} if: ${{ github.event.inputs.runner != 'fast' }}
- uses: actions/checkout@v4 - uses: actions/checkout@v4
@@ -328,7 +263,7 @@ jobs:
index: index:
if: ${{ github.event.inputs.deploy != '' && github.event.inputs.deploy != 'NONE' }} if: ${{ github.event.inputs.deploy != '' && github.event.inputs.deploy != 'NONE' }}
needs: [image] needs: [image]
runs-on: ubuntu-latest runs-on: ubuntu-22.04
steps: steps:
- run: >- - run: >-
curl "https://${{ curl "https://${{

View File

@@ -11,13 +11,13 @@ on:
- next/* - next/*
env: env:
NODEJS_VERSION: "24.11.0" NODEJS_VERSION: "22.17.1"
ENVIRONMENT: dev-unstable ENVIRONMENT: dev-unstable
jobs: jobs:
test: test:
name: Run Automated Tests name: Run Automated Tests
runs-on: ubuntu-latest runs-on: ubuntu-22.04
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
with: with:

View File

@@ -25,9 +25,9 @@ docker buildx create --use
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh # proceed with default installation curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh # proceed with default installation
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/master/install.sh | bash curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/master/install.sh | bash
source ~/.bashrc source ~/.bashrc
nvm install 24 nvm install 22
nvm use 24 nvm use 22
nvm alias default 24 # this prevents your machine from reverting back to another version nvm alias default 22 # this prevents your machine from reverting back to another version
``` ```
## Cloning the repository ## Cloning the repository

159
Makefile
View File

@@ -5,17 +5,15 @@ PLATFORM_FILE := $(shell ./check-platform.sh)
ENVIRONMENT_FILE := $(shell ./check-environment.sh) ENVIRONMENT_FILE := $(shell ./check-environment.sh)
GIT_HASH_FILE := $(shell ./check-git-hash.sh) GIT_HASH_FILE := $(shell ./check-git-hash.sh)
VERSION_FILE := $(shell ./check-version.sh) VERSION_FILE := $(shell ./check-version.sh)
BASENAME := $(shell PROJECT=startos ./basename.sh) BASENAME := $(shell ./basename.sh)
PLATFORM := $(shell if [ -f ./PLATFORM.txt ]; then cat ./PLATFORM.txt; else echo unknown; fi) PLATFORM := $(shell if [ -f ./PLATFORM.txt ]; then cat ./PLATFORM.txt; else echo unknown; fi)
ARCH := $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then echo aarch64; else echo $(PLATFORM) | sed 's/-nonfree$$//g'; fi) ARCH := $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then echo aarch64; else echo $(PLATFORM) | sed 's/-nonfree$$//g'; fi)
RUST_ARCH := $(shell if [ "$(ARCH)" = "riscv64" ]; then echo riscv64gc; else echo $(ARCH); fi)
REGISTRY_BASENAME := $(shell PROJECT=start-registry PLATFORM=$(ARCH) ./basename.sh)
TUNNEL_BASENAME := $(shell PROJECT=start-tunnel PLATFORM=$(ARCH) ./basename.sh)
IMAGE_TYPE=$(shell if [ "$(PLATFORM)" = raspberrypi ]; then echo img; else echo iso; fi) IMAGE_TYPE=$(shell if [ "$(PLATFORM)" = raspberrypi ]; then echo img; else echo iso; fi)
WEB_UIS := web/dist/raw/ui/index.html web/dist/raw/setup-wizard/index.html web/dist/raw/install-wizard/index.html WEB_UIS := web/dist/raw/ui/index.html web/dist/raw/setup-wizard/index.html web/dist/raw/install-wizard/index.html
COMPRESSED_WEB_UIS := web/dist/static/ui/index.html web/dist/static/setup-wizard/index.html web/dist/static/install-wizard/index.html COMPRESSED_WEB_UIS := web/dist/static/ui/index.html web/dist/static/setup-wizard/index.html web/dist/static/install-wizard/index.html
FIRMWARE_ROMS := ./firmware/$(PLATFORM) $(shell jq --raw-output '.[] | select(.platform[] | contains("$(PLATFORM)")) | "./firmware/$(PLATFORM)/" + .id + ".rom.gz"' build/lib/firmware.json) FIRMWARE_ROMS := ./firmware/$(PLATFORM) $(shell jq --raw-output '.[] | select(.platform[] | contains("$(PLATFORM)")) | "./firmware/$(PLATFORM)/" + .id + ".rom.gz"' build/lib/firmware.json)
BUILD_SRC := $(call ls-files, build) build/lib/depends build/lib/conflicts $(FIRMWARE_ROMS) BUILD_SRC := $(call ls-files, build) build/lib/depends build/lib/conflicts $(FIRMWARE_ROMS)
DEBIAN_SRC := $(call ls-files, debian/)
IMAGE_RECIPE_SRC := $(call ls-files, image-recipe/) IMAGE_RECIPE_SRC := $(call ls-files, image-recipe/)
STARTD_SRC := core/startos/startd.service $(BUILD_SRC) STARTD_SRC := core/startos/startd.service $(BUILD_SRC)
CORE_SRC := $(call ls-files, core) $(shell git ls-files --recurse-submodules patch-db) $(GIT_HASH_FILE) CORE_SRC := $(call ls-files, core) $(shell git ls-files --recurse-submodules patch-db) $(GIT_HASH_FILE)
@@ -23,23 +21,19 @@ WEB_SHARED_SRC := $(call ls-files, web/projects/shared) $(call ls-files, web/pro
WEB_UI_SRC := $(call ls-files, web/projects/ui) WEB_UI_SRC := $(call ls-files, web/projects/ui)
WEB_SETUP_WIZARD_SRC := $(call ls-files, web/projects/setup-wizard) WEB_SETUP_WIZARD_SRC := $(call ls-files, web/projects/setup-wizard)
WEB_INSTALL_WIZARD_SRC := $(call ls-files, web/projects/install-wizard) WEB_INSTALL_WIZARD_SRC := $(call ls-files, web/projects/install-wizard)
WEB_START_TUNNEL_SRC := $(call ls-files, web/projects/start-tunnel)
PATCH_DB_CLIENT_SRC := $(shell git ls-files --recurse-submodules patch-db/client) PATCH_DB_CLIENT_SRC := $(shell git ls-files --recurse-submodules patch-db/client)
GZIP_BIN := $(shell which pigz || which gzip) GZIP_BIN := $(shell which pigz || which gzip)
TAR_BIN := $(shell which gtar || which tar) TAR_BIN := $(shell which gtar || which tar)
COMPILED_TARGETS := core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox core/target/$(RUST_ARCH)-unknown-linux-musl/release/start-container container-runtime/rootfs.$(ARCH).squashfs COMPILED_TARGETS := core/target/$(ARCH)-unknown-linux-musl/$(PROFILE)/startbox core/target/$(ARCH)-unknown-linux-musl/release/containerbox container-runtime/rootfs.$(ARCH).squashfs
STARTOS_TARGETS := $(STARTD_SRC) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) $(VERSION_FILE) $(COMPILED_TARGETS) cargo-deps/$(RUST_ARCH)-unknown-linux-musl/release/startos-backup-fs $(PLATFORM_FILE) \ ALL_TARGETS := $(STARTD_SRC) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) $(VERSION_FILE) $(COMPILED_TARGETS) cargo-deps/$(ARCH)-unknown-linux-musl/release/startos-backup-fs $(PLATFORM_FILE) \
$(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then \ $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then \
echo cargo-deps/aarch64-unknown-linux-musl/release/pi-beep; \ echo cargo-deps/aarch64-unknown-linux-musl/release/pi-beep; \
fi) \ fi) \
$(shell /bin/bash -c 'if [[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]; then \ $(shell /bin/bash -c 'if [[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]; then \
echo cargo-deps/$(RUST_ARCH)-unknown-linux-musl/release/flamegraph; \ echo cargo-deps/$(ARCH)-unknown-linux-musl/release/tokio-console; \
fi') \ echo cargo-deps/$(ARCH)-unknown-linux-musl/release/flamegraph; \
$(shell /bin/bash -c 'if [[ "${ENVIRONMENT}" =~ (^|-)console($$|-) ]]; then \
echo cargo-deps/$(RUST_ARCH)-unknown-linux-musl/release/tokio-console; \
fi') fi')
REGISTRY_TARGETS := core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/registrybox core/startos/start-registryd.service REBUILD_TYPES = 1
TUNNEL_TARGETS := core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox core/startos/start-tunneld.service
ifeq ($(REMOTE),) ifeq ($(REMOTE),)
mkdir = mkdir -p $1 mkdir = mkdir -p $1
@@ -62,12 +56,12 @@ endif
.DELETE_ON_ERROR: .DELETE_ON_ERROR:
.PHONY: all metadata install clean format install-cli cli uis ui reflash deb $(IMAGE_TYPE) squashfs wormhole wormhole-deb test test-core test-sdk test-container-runtime registry install-registry tunnel install-tunnel ts-bindings .PHONY: all metadata install clean format cli uis ui reflash deb $(IMAGE_TYPE) squashfs wormhole wormhole-deb test test-core test-sdk test-container-runtime registry
all: $(STARTOS_TARGETS) all: $(ALL_TARGETS)
touch: touch:
touch $(STARTOS_TARGETS) touch $(ALL_TARGETS)
metadata: $(VERSION_FILE) $(PLATFORM_FILE) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) metadata: $(VERSION_FILE) $(PLATFORM_FILE) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE)
@@ -112,80 +106,43 @@ test-sdk: $(call ls-files, sdk) sdk/base/lib/osBindings/index.ts
test-container-runtime: container-runtime/node_modules/.package-lock.json $(call ls-files, container-runtime/src) container-runtime/package.json container-runtime/tsconfig.json test-container-runtime: container-runtime/node_modules/.package-lock.json $(call ls-files, container-runtime/src) container-runtime/package.json container-runtime/tsconfig.json
cd container-runtime && npm test cd container-runtime && npm test
install-cli: $(GIT_HASH_FILE) cli:
./core/build-cli.sh --install ./core/install-cli.sh
cli: $(GIT_HASH_FILE) registry:
./core/build-cli.sh ./core/build-registrybox.sh
registry: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/registrybox tunnel:
./core/build-tunnelbox.sh
install-registry: $(REGISTRY_TARGETS)
$(call mkdir,$(DESTDIR)/usr/bin)
$(call cp,core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/registrybox,$(DESTDIR)/usr/bin/start-registrybox)
$(call ln,/usr/bin/start-registrybox,$(DESTDIR)/usr/bin/start-registryd)
$(call ln,/usr/bin/start-registrybox,$(DESTDIR)/usr/bin/start-registry)
$(call mkdir,$(DESTDIR)/lib/systemd/system)
$(call cp,core/startos/start-registryd.service,$(DESTDIR)/lib/systemd/system/start-registryd.service)
core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/registrybox: $(CORE_SRC) $(ENVIRONMENT_FILE)
ARCH=$(ARCH) PROFILE=$(PROFILE) ./core/build-registrybox.sh
tunnel: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox
install-tunnel: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox core/startos/start-tunneld.service
$(call mkdir,$(DESTDIR)/usr/bin)
$(call cp,core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox,$(DESTDIR)/usr/bin/start-tunnelbox)
$(call ln,/usr/bin/start-tunnelbox,$(DESTDIR)/usr/bin/start-tunneld)
$(call ln,/usr/bin/start-tunnelbox,$(DESTDIR)/usr/bin/start-tunnel)
$(call mkdir,$(DESTDIR)/lib/systemd/system)
$(call cp,core/startos/start-tunneld.service,$(DESTDIR)/lib/systemd/system/start-tunneld.service)
$(call mkdir,$(DESTDIR)/usr/lib/startos/scripts)
$(call cp,build/lib/scripts/forward-port,$(DESTDIR)/usr/lib/startos/scripts/forward-port)
core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox: $(CORE_SRC) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) web/dist/static/start-tunnel/index.html
ARCH=$(ARCH) PROFILE=$(PROFILE) ./core/build-tunnelbox.sh
deb: results/$(BASENAME).deb deb: results/$(BASENAME).deb
results/$(BASENAME).deb: dpkg-build.sh $(call ls-files,debian/startos) $(STARTOS_TARGETS) debian/control: build/lib/depends build/lib/conflicts
./debuild/control.sh
results/$(BASENAME).deb: dpkg-build.sh $(DEBIAN_SRC) $(ALL_TARGETS)
PLATFORM=$(PLATFORM) REQUIRES=debian ./build/os-compat/run-compat.sh ./dpkg-build.sh PLATFORM=$(PLATFORM) REQUIRES=debian ./build/os-compat/run-compat.sh ./dpkg-build.sh
registry-deb: results/$(REGISTRY_BASENAME).deb
results/$(REGISTRY_BASENAME).deb: dpkg-build.sh $(call ls-files,debian/start-registry) $(REGISTRY_TARGETS)
PROJECT=start-registry PLATFORM=$(ARCH) REQUIRES=debian ./build/os-compat/run-compat.sh ./dpkg-build.sh
tunnel-deb: results/$(TUNNEL_BASENAME).deb
results/$(TUNNEL_BASENAME).deb: dpkg-build.sh $(call ls-files,debian/start-tunnel) $(TUNNEL_TARGETS) build/lib/scripts/forward-port
PROJECT=start-tunnel PLATFORM=$(ARCH) REQUIRES=debian DEPENDS=wireguard-tools,iptables,conntrack ./build/os-compat/run-compat.sh ./dpkg-build.sh
$(IMAGE_TYPE): results/$(BASENAME).$(IMAGE_TYPE) $(IMAGE_TYPE): results/$(BASENAME).$(IMAGE_TYPE)
squashfs: results/$(BASENAME).squashfs squashfs: results/$(BASENAME).squashfs
results/$(BASENAME).$(IMAGE_TYPE) results/$(BASENAME).squashfs: $(IMAGE_RECIPE_SRC) results/$(BASENAME).deb results/$(BASENAME).$(IMAGE_TYPE) results/$(BASENAME).squashfs: $(IMAGE_RECIPE_SRC) results/$(BASENAME).deb
./image-recipe/run-local-build.sh "results/$(BASENAME).deb" REQUIRES=debian ./build/os-compat/run-compat.sh ./image-recipe/run-local-build.sh "results/$(BASENAME).deb"
# For creating os images. DO NOT USE # For creating os images. DO NOT USE
install: $(STARTOS_TARGETS) install: $(ALL_TARGETS)
$(call mkdir,$(DESTDIR)/usr/bin) $(call mkdir,$(DESTDIR)/usr/bin)
$(call mkdir,$(DESTDIR)/usr/sbin) $(call mkdir,$(DESTDIR)/usr/sbin)
$(call cp,core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox,$(DESTDIR)/usr/bin/startbox) $(call cp,core/target/$(ARCH)-unknown-linux-musl/$(PROFILE)/startbox,$(DESTDIR)/usr/bin/startbox)
$(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/startd) $(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/startd)
$(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/start-cli) $(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/start-cli)
if [ "$(PLATFORM)" = "raspberrypi" ]; then $(call cp,cargo-deps/aarch64-unknown-linux-musl/release/pi-beep,$(DESTDIR)/usr/bin/pi-beep); fi if [ "$(PLATFORM)" = "raspberrypi" ]; then $(call cp,cargo-deps/aarch64-unknown-linux-musl/release/pi-beep,$(DESTDIR)/usr/bin/pi-beep); fi
if /bin/bash -c '[[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]'; then \ if /bin/bash -c '[[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]'; then \
$(call cp,cargo-deps/$(RUST_ARCH)-unknown-linux-musl/release/flamegraph,$(DESTDIR)/usr/bin/flamegraph); \ $(call cp,cargo-deps/$(ARCH)-unknown-linux-musl/release/tokio-console,$(DESTDIR)/usr/bin/tokio-console); \
$(call cp,cargo-deps/$(ARCH)-unknown-linux-musl/release/flamegraph,$(DESTDIR)/usr/bin/flamegraph); \
fi fi
if /bin/bash -c '[[ "${ENVIRONMENT}" =~ (^|-)console($$|-) ]]'; then \ $(call cp,cargo-deps/$(ARCH)-unknown-linux-musl/release/startos-backup-fs,$(DESTDIR)/usr/bin/startos-backup-fs)
$(call cp,cargo-deps/$(RUST_ARCH)-unknown-linux-musl/release/tokio-console,$(DESTDIR)/usr/bin/tokio-console); \
fi
$(call cp,cargo-deps/$(RUST_ARCH)-unknown-linux-musl/release/startos-backup-fs,$(DESTDIR)/usr/bin/startos-backup-fs)
$(call ln,/usr/bin/startos-backup-fs,$(DESTDIR)/usr/sbin/mount.backup-fs) $(call ln,/usr/bin/startos-backup-fs,$(DESTDIR)/usr/sbin/mount.backup-fs)
$(call mkdir,$(DESTDIR)/lib/systemd/system) $(call mkdir,$(DESTDIR)/lib/systemd/system)
@@ -204,7 +161,7 @@ install: $(STARTOS_TARGETS)
$(call cp,firmware/$(PLATFORM),$(DESTDIR)/usr/lib/startos/firmware) $(call cp,firmware/$(PLATFORM),$(DESTDIR)/usr/lib/startos/firmware)
update-overlay: $(STARTOS_TARGETS) update-overlay: $(ALL_TARGETS)
@echo "\033[33m!!! THIS WILL ONLY REFLASH YOUR DEVICE IN MEMORY !!!\033[0m" @echo "\033[33m!!! THIS WILL ONLY REFLASH YOUR DEVICE IN MEMORY !!!\033[0m"
@echo "\033[33mALL CHANGES WILL BE REVERTED IF YOU RESTART THE DEVICE\033[0m" @echo "\033[33mALL CHANGES WILL BE REVERTED IF YOU RESTART THE DEVICE\033[0m"
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi @if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
@@ -213,10 +170,10 @@ update-overlay: $(STARTOS_TARGETS)
$(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) PLATFORM=$(PLATFORM) $(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) PLATFORM=$(PLATFORM)
$(call ssh,"sudo systemctl start startd") $(call ssh,"sudo systemctl start startd")
wormhole: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox wormhole: core/target/$(ARCH)-unknown-linux-musl/$(PROFILE)/startbox
@echo "Paste the following command into the shell of your StartOS server:" @echo "Paste the following command into the shell of your StartOS server:"
@echo @echo
@wormhole send core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox 2>&1 | awk -Winteractive '/wormhole receive/ { printf "sudo /usr/lib/startos/scripts/chroot-and-upgrade \"cd /usr/bin && rm startbox && wormhole receive --accept-file %s && chmod +x startbox\"\n", $$3 }' @wormhole send core/target/$(ARCH)-unknown-linux-musl/$(PROFILE)/startbox 2>&1 | awk -Winteractive '/wormhole receive/ { printf "sudo /usr/lib/startos/scripts/chroot-and-upgrade \"cd /usr/bin && rm startbox && wormhole receive --accept-file %s && chmod +x startbox\"\n", $$3 }'
wormhole-deb: results/$(BASENAME).deb wormhole-deb: results/$(BASENAME).deb
@echo "Paste the following command into the shell of your StartOS server:" @echo "Paste the following command into the shell of your StartOS server:"
@@ -228,18 +185,18 @@ wormhole-squashfs: results/$(BASENAME).squashfs
$(eval SQFS_SIZE := $(shell du -s --bytes results/$(BASENAME).squashfs | awk '{print $$1}')) $(eval SQFS_SIZE := $(shell du -s --bytes results/$(BASENAME).squashfs | awk '{print $$1}'))
@echo "Paste the following command into the shell of your StartOS server:" @echo "Paste the following command into the shell of your StartOS server:"
@echo @echo
@wormhole send results/$(BASENAME).squashfs 2>&1 | awk -Winteractive '/wormhole receive/ { printf "sudo sh -c '"'"'/usr/lib/startos/scripts/prune-images $(SQFS_SIZE) && /usr/lib/startos/scripts/prune-boot && cd /media/startos/images && wormhole receive --accept-file %s && CHECKSUM=$(SQFS_SUM) /usr/lib/startos/scripts/upgrade ./$(BASENAME).squashfs'"'"'\n", $$3 }' @wormhole send results/$(BASENAME).squashfs 2>&1 | awk -Winteractive '/wormhole receive/ { printf "sudo sh -c '"'"'/usr/lib/startos/scripts/prune-images $(SQFS_SIZE) && /usr/lib/startos/scripts/prune-boot && cd /media/startos/images && wormhole receive --accept-file %s && CHECKSUM=$(SQFS_SUM) /usr/lib/startos/scripts/use-img ./$(BASENAME).squashfs'"'"'\n", $$3 }'
update: $(STARTOS_TARGETS) update: $(ALL_TARGETS)
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi @if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
$(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create') $(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create')
$(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) DESTDIR=/media/startos/next PLATFORM=$(PLATFORM) $(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) DESTDIR=/media/startos/next PLATFORM=$(PLATFORM)
$(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync "apt-get install -y $(shell cat ./build/lib/depends)"') $(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync "apt-get install -y $(shell cat ./build/lib/depends)"')
update-startbox: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox # only update binary (faster than full update) update-startbox: core/target/$(ARCH)-unknown-linux-musl/$(PROFILE)/startbox # only update binary (faster than full update)
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi @if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
$(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create') $(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create')
$(call cp,core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox,/media/startos/next/usr/bin/startbox) $(call cp,core/target/$(ARCH)-unknown-linux-musl/$(PROFILE)/startbox,/media/startos/next/usr/bin/startbox)
$(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync true') $(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync true')
update-deb: results/$(BASENAME).deb # better than update, but only available from debian update-deb: results/$(BASENAME).deb # better than update, but only available from debian
@@ -256,9 +213,9 @@ update-squashfs: results/$(BASENAME).squashfs
$(call ssh,'/usr/lib/startos/scripts/prune-images $(SQFS_SIZE)') $(call ssh,'/usr/lib/startos/scripts/prune-images $(SQFS_SIZE)')
$(call ssh,'/usr/lib/startos/scripts/prune-boot') $(call ssh,'/usr/lib/startos/scripts/prune-boot')
$(call cp,results/$(BASENAME).squashfs,/media/startos/images/next.rootfs) $(call cp,results/$(BASENAME).squashfs,/media/startos/images/next.rootfs)
$(call ssh,'sudo CHECKSUM=$(SQFS_SUM) /usr/lib/startos/scripts/upgrade /media/startos/images/next.rootfs') $(call ssh,'sudo CHECKSUM=$(SQFS_SUM) /usr/lib/startos/scripts/use-img /media/startos/images/next.rootfs')
emulate-reflash: $(STARTOS_TARGETS) emulate-reflash: $(ALL_TARGETS)
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi @if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
$(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create') $(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create')
$(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) DESTDIR=/media/startos/next PLATFORM=$(PLATFORM) $(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) DESTDIR=/media/startos/next PLATFORM=$(PLATFORM)
@@ -279,9 +236,10 @@ container-runtime/node_modules/.package-lock.json: container-runtime/package-loc
npm --prefix container-runtime ci npm --prefix container-runtime ci
touch container-runtime/node_modules/.package-lock.json touch container-runtime/node_modules/.package-lock.json
ts-bindings: core/startos/bindings/index.ts sdk/base/lib/osBindings/index.ts: $(shell if [ "$(REBUILD_TYPES)" -ne 0 ]; then echo core/startos/bindings/index.ts; fi)
mkdir -p sdk/base/lib/osBindings mkdir -p sdk/base/lib/osBindings
rsync -ac --delete core/startos/bindings/ sdk/base/lib/osBindings/ rsync -ac --delete core/startos/bindings/ sdk/base/lib/osBindings/
touch sdk/base/lib/osBindings/index.ts
core/startos/bindings/index.ts: $(call ls-files, core) $(ENVIRONMENT_FILE) core/startos/bindings/index.ts: $(call ls-files, core) $(ENVIRONMENT_FILE)
rm -rf core/startos/bindings rm -rf core/startos/bindings
@@ -303,22 +261,22 @@ container-runtime/dist/node_modules/.package-lock.json container-runtime/dist/pa
./container-runtime/install-dist-deps.sh ./container-runtime/install-dist-deps.sh
touch container-runtime/dist/node_modules/.package-lock.json touch container-runtime/dist/node_modules/.package-lock.json
container-runtime/rootfs.$(ARCH).squashfs: container-runtime/debian.$(ARCH).squashfs container-runtime/container-runtime.service container-runtime/update-image.sh container-runtime/deb-install.sh container-runtime/dist/index.js container-runtime/dist/node_modules/.package-lock.json core/target/$(RUST_ARCH)-unknown-linux-musl/release/start-container container-runtime/rootfs.$(ARCH).squashfs: container-runtime/debian.$(ARCH).squashfs container-runtime/container-runtime.service container-runtime/update-image.sh container-runtime/deb-install.sh container-runtime/dist/index.js container-runtime/dist/node_modules/.package-lock.json core/target/$(ARCH)-unknown-linux-musl/release/containerbox
ARCH=$(ARCH) REQUIRES=qemu ./build/os-compat/run-compat.sh ./container-runtime/update-image.sh ARCH=$(ARCH) REQUIRES=linux ./build/os-compat/run-compat.sh ./container-runtime/update-image.sh
build/lib/depends build/lib/conflicts: $(ENVIRONMENT_FILE) $(PLATFORM_FILE) $(shell ls build/dpkg-deps/*) build/lib/depends build/lib/conflicts: build/dpkg-deps/*
PLATFORM=$(PLATFORM) ARCH=$(ARCH) build/dpkg-deps/generate.sh build/dpkg-deps/generate.sh
$(FIRMWARE_ROMS): build/lib/firmware.json download-firmware.sh $(PLATFORM_FILE) $(FIRMWARE_ROMS): build/lib/firmware.json download-firmware.sh $(PLATFORM_FILE)
./download-firmware.sh $(PLATFORM) ./download-firmware.sh $(PLATFORM)
core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox: $(CORE_SRC) $(COMPRESSED_WEB_UIS) web/patchdb-ui-seed.json $(ENVIRONMENT_FILE) core/target/$(ARCH)-unknown-linux-musl/$(PROFILE)/startbox: $(CORE_SRC) $(COMPRESSED_WEB_UIS) web/patchdb-ui-seed.json $(ENVIRONMENT_FILE)
ARCH=$(ARCH) PROFILE=$(PROFILE) ./core/build-startbox.sh ARCH=$(ARCH) PROFILE=$(PROFILE) ./core/build-startbox.sh
touch core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox touch core/target/$(ARCH)-unknown-linux-musl/$(PROFILE)/startbox
core/target/$(RUST_ARCH)-unknown-linux-musl/release/start-container: $(CORE_SRC) $(ENVIRONMENT_FILE) core/target/$(ARCH)-unknown-linux-musl/release/containerbox: $(CORE_SRC) $(ENVIRONMENT_FILE)
ARCH=$(ARCH) ./core/build-start-container.sh ARCH=$(ARCH) ./core/build-containerbox.sh
touch core/target/$(RUST_ARCH)-unknown-linux-musl/release/start-container touch core/target/$(ARCH)-unknown-linux-musl/release/containerbox
web/package-lock.json: web/package.json sdk/baseDist/package.json web/package-lock.json: web/package.json sdk/baseDist/package.json
npm --prefix web i npm --prefix web i
@@ -345,12 +303,8 @@ web/dist/raw/install-wizard/index.html: $(WEB_INSTALL_WIZARD_SRC) $(WEB_SHARED_S
npm --prefix web run build:install npm --prefix web run build:install
touch web/dist/raw/install-wizard/index.html touch web/dist/raw/install-wizard/index.html
web/dist/raw/start-tunnel/index.html: $(WEB_START_TUNNEL_SRC) $(WEB_SHARED_SRC) web/.angular/.updated $(COMPRESSED_WEB_UIS): $(WEB_UIS) $(ENVIRONMENT_FILE)
npm --prefix web run build:tunnel ./compress-uis.sh
touch web/dist/raw/start-tunnel/index.html
web/dist/static/%/index.html: web/dist/raw/%/index.html
./compress-uis.sh $*
web/config.json: $(GIT_HASH_FILE) web/config-sample.json web/config.json: $(GIT_HASH_FILE) web/config-sample.json
jq '.useMocks = false' web/config-sample.json | jq '.gitHash = "$(shell cat GIT_HASH.txt)"' > web/config.json jq '.useMocks = false' web/config-sample.json | jq '.gitHash = "$(shell cat GIT_HASH.txt)"' > web/config.json
@@ -374,17 +328,14 @@ uis: $(WEB_UIS)
# this is a convenience step to build the UI # this is a convenience step to build the UI
ui: web/dist/raw/ui ui: web/dist/raw/ui
cargo-deps/aarch64-unknown-linux-musl/release/pi-beep: ./build-cargo-dep.sh cargo-deps/aarch64-unknown-linux-musl/release/pi-beep:
ARCH=aarch64 ./build-cargo-dep.sh pi-beep ARCH=aarch64 ./build-cargo-dep.sh pi-beep
cargo-deps/$(RUST_ARCH)-unknown-linux-musl/release/tokio-console: ./build-cargo-dep.sh cargo-deps/$(ARCH)-unknown-linux-musl/release/tokio-console:
ARCH=$(ARCH) ./build-cargo-dep.sh tokio-console ARCH=$(ARCH) PREINSTALL="apk add musl-dev pkgconfig" ./build-cargo-dep.sh tokio-console
touch $@
cargo-deps/$(RUST_ARCH)-unknown-linux-musl/release/startos-backup-fs: ./build-cargo-dep.sh cargo-deps/$(ARCH)-unknown-linux-musl/release/startos-backup-fs:
ARCH=$(ARCH) ./build-cargo-dep.sh --git https://github.com/Start9Labs/start-fs.git startos-backup-fs ARCH=$(ARCH) PREINSTALL="apk add fuse3 fuse3-dev fuse3-static musl-dev pkgconfig" ./build-cargo-dep.sh --git https://github.com/Start9Labs/start-fs.git startos-backup-fs
touch $@
cargo-deps/$(RUST_ARCH)-unknown-linux-musl/release/flamegraph: ./build-cargo-dep.sh cargo-deps/$(ARCH)-unknown-linux-musl/release/flamegraph:
ARCH=$(ARCH) ./build-cargo-dep.sh flamegraph ARCH=$(ARCH) PREINSTALL="apk add musl-dev pkgconfig" ./build-cargo-dep.sh flamegraph
touch $@

View File

@@ -1,95 +0,0 @@
# StartTunnel
A self-hosted WireGuard VPN optimized for creating VLANs and reverse tunneling to personal servers.
You can think of StartTunnel as "virtual router in the cloud".
Use it for private remote access to self-hosted services running on a personal server, or to expose self-hosted services to the public Internet without revealing the host server's IP address.
## Features
- **Create Subnets**: Each subnet creates a private, virtual local area network (VLAN), similar to the LAN created by a home router.
- **Add Devices**: When you add a device (server, phone, laptop) to a subnet, it receives a LAN IP address on that subnet as well as a unique WireGuard config that must be copied, downloaded, or scanned into the device.
- **Forward Ports**: Forwarding a port creates a "reverse tunnel", exposing a specific port on a specific device to the public Internet.
## Installation
1. Rent a low cost VPS. For most use cases, the cheapest option should be enough.
- It must have a dedicated public IP address.
- For compute (CPU), memory (RAM), and storage (disk), choose the minimum spec.
- For transfer (bandwidth), it depends on (1) your use case and (2) your home Internet's _upload_ speed. Even if you intend to serve large files or stream content from your server, there is no reason to pay for speeds that exceed your home Internet's upload speed.
1. Provision the VPS with the latest version of Debian.
1. Access the VPS via SSH.
1. Run the StartTunnel install script:
curl -fsSL https://start9labs.github.io/start-tunnel | sh
1. [Initialize the web interface](#web-interface) (recommended)
## Updating
Simply re-run the install command:
```sh
curl -fsSL https://start9labs.github.io/start-tunnel | sh
```
## CLI
By default, StartTunnel is managed via the `start-tunnel` command line interface, which is self-documented.
```
start-tunnel --help
```
## Web Interface
Enable the web interface (recommended in most cases) to access your StartTunnel from the browser or via API.
1. Initialize the web interface.
start-tunnel web init
1. If your VPS has multiple public IP addresses, you will be prompted to select the IP address at which to host the web interface.
1. When prompted, enter the port at which to host the web interface. The default is 8443, and we recommend using it. If you change the default, choose an uncommon port to avoid future conflicts.
1. To access your StartTunnel web interface securely over HTTPS, you need an SSL certificate. When prompted, select whether to autogenerate a certificate or provide your own. _This is only for accessing your StartTunnel web interface_.
1. You will receive a success message with 3 pieces of information:
- **<https://IP:port>**: the URL where you can reach your personal web interface.
- **Password**: an autogenerated password for your interface. If you lose/forget it, you can reset it using the start-tunnel CLI.
- **Root Certificate Authority**: the Root CA of your StartTunnel instance.
1. If you autogenerated your SSL certificate, visiting the `https://IP:port` URL in the browser will warn you that the website is insecure. This is expected. You have two options for getting past this warning:
- option 1 (recommended): [Trust your StartTunnel Root CA on your connecting device](#trusting-your-starttunnel-root-ca).
- Option 2: bypass the warning in the browser, creating a one-time security exception.
### Trusting your StartTunnel Root CA
1. Copy the contents of your Root CA (starting with -----BEGIN CERTIFICATE----- and ending with -----END CERTIFICATE-----).
2. Open a text editor:
- Linux: gedit, nano, or any editor
- Mac: TextEdit
- Windows: Notepad
3. Paste the contents of your Root CA.
4. Save the file with a `.crt` extension (e.g. `start-tunnel.crt`) (make sure it saves as plain text, not rich text).
5. Trust the Root CA on your client device(s):
- [Linux](https://staging.docs.start9.com/device-guides/linux/ca.html)
- [Mac](https://staging.docs.start9.com/device-guides/mac/ca.html)
- [Windows](https://staging.docs.start9.com/device-guides/windows/ca.html)
- [Android/Graphene](https://staging.docs.start9.com/device-guides/android/ca.html)
- [iOS](https://staging.docs.start9.com/device-guides/ios/ca.html)

View File

@@ -1,201 +0,0 @@
# StartOS Version Bump Guide
This document explains how to bump the StartOS version across the entire codebase.
## Overview
When bumping from version `X.Y.Z-alpha.N` to `X.Y.Z-alpha.N+1`, you need to update files in multiple locations across the repository. The `// VERSION_BUMP` comment markers indicate where changes are needed.
## Files to Update
### 1. Core Rust Crate Version
**File: `core/startos/Cargo.toml`**
Update the version string (line ~18):
```toml
version = "0.4.0-alpha.15" # VERSION_BUMP
```
**File: `core/Cargo.lock`**
This file is auto-generated. After updating `Cargo.toml`, run:
```bash
cd core
cargo check
```
This will update the version in `Cargo.lock` automatically.
### 2. Create New Version Migration Module
**File: `core/startos/src/version/vX_Y_Z_alpha_N+1.rs`**
Create a new version file by copying the previous version and updating:
```rust
use exver::{PreReleaseSegment, VersionRange};
use super::v0_3_5::V0_3_0_COMPAT;
use super::{VersionT, v0_4_0_alpha_14}; // Update to previous version
use crate::prelude::*;
lazy_static::lazy_static! {
static ref V0_4_0_alpha_15: exver::Version = exver::Version::new(
[0, 4, 0],
[PreReleaseSegment::String("alpha".into()), 15.into()] // Update number
);
}
#[derive(Clone, Copy, Debug, Default)]
pub struct Version;
impl VersionT for Version {
type Previous = v0_4_0_alpha_14::Version; // Update to previous version
type PreUpRes = ();
async fn pre_up(self) -> Result<Self::PreUpRes, Error> {
Ok(())
}
fn semver(self) -> exver::Version {
V0_4_0_alpha_15.clone() // Update version name
}
fn compat(self) -> &'static VersionRange {
&V0_3_0_COMPAT
}
#[instrument(skip_all)]
fn up(self, _db: &mut Value, _: Self::PreUpRes) -> Result<Value, Error> {
// Add migration logic here if needed
Ok(Value::Null)
}
fn down(self, _db: &mut Value) -> Result<(), Error> {
// Add rollback logic here if needed
Ok(())
}
}
```
### 3. Update Version Module Registry
**File: `core/startos/src/version/mod.rs`**
Make changes in **5 locations**:
#### Location 1: Module Declaration (~line 57)
Add the new module after the previous version:
```rust
mod v0_4_0_alpha_14;
mod v0_4_0_alpha_15; // Add this
```
#### Location 2: Current Type Alias (~line 59)
Update the `Current` type and move the `// VERSION_BUMP` comment:
```rust
pub type Current = v0_4_0_alpha_15::Version; // VERSION_BUMP
```
#### Location 3: Version Enum (~line 175)
Remove `// VERSION_BUMP` from the previous version, add new variant, add comment:
```rust
V0_4_0_alpha_14(Wrapper<v0_4_0_alpha_14::Version>),
V0_4_0_alpha_15(Wrapper<v0_4_0_alpha_15::Version>), // VERSION_BUMP
Other(exver::Version),
```
#### Location 4: as_version_t() Match (~line 233)
Remove `// VERSION_BUMP`, add new match arm, add comment:
```rust
Self::V0_4_0_alpha_14(v) => DynVersion(Box::new(v.0)),
Self::V0_4_0_alpha_15(v) => DynVersion(Box::new(v.0)), // VERSION_BUMP
Self::Other(v) => {
```
#### Location 5: as_exver() Match (~line 284, inside #[cfg(test)])
Remove `// VERSION_BUMP`, add new match arm, add comment:
```rust
Version::V0_4_0_alpha_14(Wrapper(x)) => x.semver(),
Version::V0_4_0_alpha_15(Wrapper(x)) => x.semver(), // VERSION_BUMP
Version::Other(x) => x.clone(),
```
### 4. SDK TypeScript Version
**File: `sdk/package/lib/StartSdk.ts`**
Update the OSVersion constant (~line 64):
```typescript
export const OSVersion = testTypeVersion("0.4.0-alpha.15");
```
### 5. Web UI Package Version
**File: `web/package.json`**
Update the version field:
```json
{
"name": "startos-ui",
"version": "0.4.0-alpha.15",
...
}
```
**File: `web/package-lock.json`**
This file is auto-generated, but it's faster to update manually. Find all instances of "startos-ui" and update the version field.
## Verification Step
```
make
```
## VERSION_BUMP Comment Pattern
The `// VERSION_BUMP` comment serves as a marker for where to make changes next time:
- Always **remove** it from the old location
- **Add** the new version entry
- **Move** the comment to mark the new location
This pattern helps you quickly find all the places that need updating in the next version bump.
## Summary Checklist
- [ ] Update `core/startos/Cargo.toml` version
- [ ] Create new `core/startos/src/version/vX_Y_Z_alpha_N+1.rs` file
- [ ] Update `core/startos/src/version/mod.rs` in 5 locations
- [ ] Run `cargo check` to update `core/Cargo.lock`
- [ ] Update `sdk/package/lib/StartSdk.ts` OSVersion
- [ ] Update `web/package.json` and `web/package-lock.json` version
- [ ] Verify all changes compile/build successfully
## Migration Logic
The `up()` and `down()` methods in the version file handle database migrations:
- **up()**: Migrates the database from the previous version to this version
- **down()**: Rolls back from this version to the previous version
- **pre_up()**: Runs before migration, useful for pre-migration checks or data gathering
If no migration is needed, return `Ok(Value::Null)` for `up()` and `Ok(())` for `down()`.
For complex migrations, you may need to:
1. Update `type PreUpRes` to pass data between `pre_up()` and `up()`
2. Implement database transformations in the `up()` method
3. Implement reverse transformations in `down()` for rollback support

View File

@@ -1,7 +1,5 @@
#!/bin/bash #!/bin/bash
PROJECT=${PROJECT:-"startos"}
cd "$(dirname "${BASH_SOURCE[0]}")" cd "$(dirname "${BASH_SOURCE[0]}")"
PLATFORM="$(if [ -f ./PLATFORM.txt ]; then cat ./PLATFORM.txt; else echo unknown; fi)" PLATFORM="$(if [ -f ./PLATFORM.txt ]; then cat ./PLATFORM.txt; else echo unknown; fi)"
@@ -18,4 +16,4 @@ if [ -n "$STARTOS_ENV" ]; then
VERSION_FULL="$VERSION_FULL~${STARTOS_ENV}" VERSION_FULL="$VERSION_FULL~${STARTOS_ENV}"
fi fi
echo -n "${PROJECT}-${VERSION_FULL}_${PLATFORM}" echo -n "startos-${VERSION_FULL}_${PLATFORM}"

View File

@@ -8,22 +8,27 @@ if [ "$0" != "./build-cargo-dep.sh" ]; then
exit 1 exit 1
fi fi
USE_TTY=
if tty -s; then
USE_TTY="-it"
fi
if [ -z "$ARCH" ]; then if [ -z "$ARCH" ]; then
ARCH=$(uname -m) ARCH=$(uname -m)
fi fi
RUST_ARCH="$ARCH" DOCKER_PLATFORM="linux/${ARCH}"
if [ "$ARCH" = "riscv64" ]; then if [ "$ARCH" = aarch64 ] || [ "$ARCH" = arm64 ]; then
RUST_ARCH="riscv64gc" DOCKER_PLATFORM="linux/arm64"
elif [ "$ARCH" = x86_64 ]; then
DOCKER_PLATFORM="linux/amd64"
fi fi
mkdir -p cargo-deps mkdir -p cargo-deps
alias 'rust-musl-builder'='docker run $USE_TTY --platform=${DOCKER_PLATFORM} --rm -e "RUSTFLAGS=$RUSTFLAGS" -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$(pwd)"/cargo-deps:/home/rust/src -w /home/rust/src -P rust:alpine'
source core/builder-alias.sh PREINSTALL=${PREINSTALL:-true}
RUSTFLAGS="-C target-feature=+crt-static" rust-musl-builder sh -c "$PREINSTALL && cargo install $* --target-dir /home/rust/src --target=$ARCH-unknown-linux-musl"
sudo chown -R $USER cargo-deps
rust-zig-builder cargo-zigbuild install $* --target-dir /workdir/cargo-deps/ --target=$RUST_ARCH-unknown-linux-musl sudo chown -R $USER ~/.cargo
if [ "$(ls -nd "cargo-deps/$RUST_ARCH-unknown-linux-musl/release/${!#}" | awk '{ print $3 }')" != "$UID" ]; then
rust-zig-builder sh -c "chown -R $UID:$UID cargo-deps && chown -R $UID:$UID /usr/local/cargo"
fi

View File

@@ -7,9 +7,9 @@ bmon
btrfs-progs btrfs-progs
ca-certificates ca-certificates
cifs-utils cifs-utils
conntrack
cryptsetup cryptsetup
curl curl
dnsutils
dmidecode dmidecode
dnsutils dnsutils
dosfstools dosfstools
@@ -19,7 +19,6 @@ exfatprogs
flashrom flashrom
fuse3 fuse3
grub-common grub-common
grub-efi
htop htop
httpdirfs httpdirfs
iotop iotop
@@ -42,6 +41,7 @@ nvme-cli
nyx nyx
openssh-server openssh-server
podman podman
postgresql
psmisc psmisc
qemu-guest-agent qemu-guest-agent
rfkill rfkill

View File

@@ -5,15 +5,11 @@ set -e
cd "$(dirname "${BASH_SOURCE[0]}")" cd "$(dirname "${BASH_SOURCE[0]}")"
IFS="-" read -ra FEATURES <<< "$ENVIRONMENT" IFS="-" read -ra FEATURES <<< "$ENVIRONMENT"
FEATURES+=("${ARCH}")
if [ "$ARCH" != "$PLATFORM" ]; then
FEATURES+=("${PLATFORM}")
fi
feature_file_checker=' feature_file_checker='
/^#/ { next } /^#/ { next }
/^\+ [a-z0-9.-]+$/ { next } /^\+ [a-z0-9-]+$/ { next }
/^- [a-z0-9.-]+$/ { next } /^- [a-z0-9-]+$/ { next }
{ exit 1 } { exit 1 }
' '
@@ -34,8 +30,8 @@ for type in conflicts depends; do
for feature in ${FEATURES[@]}; do for feature in ${FEATURES[@]}; do
file="$feature.$type" file="$feature.$type"
if [ -f $file ]; then if [ -f $file ]; then
if grep "^- $pkg$" $file > /dev/null; then if grep "^- $pkg$" $file; then
SKIP=yes SKIP=1
fi fi
fi fi
done done

View File

@@ -1,10 +0,0 @@
- grub-efi
+ parted
+ raspberrypi-net-mods
+ raspberrypi-sys-mods
+ raspi-config
+ raspi-firmware
+ raspi-utils
+ rpi-eeprom
+ rpi-update
+ rpi.gpio-common

View File

@@ -1 +0,0 @@
+ grub-pc-bin

View File

@@ -1,123 +1,34 @@
#!/bin/sh #!/bin/sh
printf "\n"
printf "Welcome to\n"
cat << "ASCII"
parse_essential_db_info() { ███████
DB_DUMP="/tmp/startos_db.json" █ █ █
█ █ █ █
█ █ █ █
█ █ █ █
█ █ █ █
█ █
███████
if command -v start-cli >/dev/null 2>&1; then _____ __ ___ __ __
start-cli db dump > "$DB_DUMP" 2>/dev/null || return 1 (_ | /\ |__) | / \(_
else __) | / \| \ | \__/__)
return 1 ASCII
fi printf " v$(cat /usr/lib/startos/VERSION.txt)\n\n"
printf " %s (%s %s)\n" "$(uname -o)" "$(uname -r)" "$(uname -m)"
if command -v jq >/dev/null 2>&1 && [ -f "$DB_DUMP" ]; then printf " Git Hash: $(cat /usr/lib/startos/GIT_HASH.txt)"
HOSTNAME=$(jq -r '.value.serverInfo.hostname // "unknown"' "$DB_DUMP" 2>/dev/null) if [ -n "$(cat /usr/lib/startos/ENVIRONMENT.txt)" ]; then
VERSION=$(jq -r '.value.serverInfo.version // "unknown"' "$DB_DUMP" 2>/dev/null) printf " ~ $(cat /usr/lib/startos/ENVIRONMENT.txt)\n"
RAM_BYTES=$(jq -r '.value.serverInfo.ram // 0' "$DB_DUMP" 2>/dev/null)
WAN_IP=$(jq -r '.value.serverInfo.network.gateways[].ipInfo.wanIp // "unknown"' "$DB_DUMP" 2>/dev/null | head -1)
NTP_SYNCED=$(jq -r '.value.serverInfo.ntpSynced // false' "$DB_DUMP" 2>/dev/null)
if [ "$RAM_BYTES" != "0" ] && [ "$RAM_BYTES" != "null" ]; then
RAM_GB=$(echo "scale=1; $RAM_BYTES / 1073741824" | bc 2>/dev/null || echo "unknown")
else
RAM_GB="unknown"
fi
RUNNING_SERVICES=$(jq -r '[.value.packageData[] | select(.statusInfo.started != null)] | length' "$DB_DUMP" 2>/dev/null)
TOTAL_SERVICES=$(jq -r '.value.packageData | length' "$DB_DUMP" 2>/dev/null)
rm -f "$DB_DUMP"
return 0
else
rm -f "$DB_DUMP" 2>/dev/null
return 1
fi
}
DB_INFO_AVAILABLE=0
if parse_essential_db_info; then
DB_INFO_AVAILABLE=1
fi
if [ "$DB_INFO_AVAILABLE" -eq 1 ] && [ "$VERSION" != "unknown" ]; then
version_display="v$VERSION"
else else
version_display="v$(cat /usr/lib/startos/VERSION.txt 2>/dev/null || echo 'unknown')" printf "\n"
fi fi
printf "\n\033[1;37m ▄▄▀▀▀▀▀▄▄\033[0m\n" printf "\n"
printf "\033[1;37m ▄▀ ▄ ▀▄ ▄▄▄▄▄ ▄▄▄▄▄▄▄ ▄ ▄▄▄▄▄ ▄▄▄▄▄▄▄ \033[1;31m▄██████▄ ▄██████\033[0m\n" printf " * Documentation: https://docs.start9.com\n"
printf "\033[1;37m █ █ █ █ █ █ █ █ █ ▀▄ █ \033[1;31m██ ██ ██ \033[0m\n" printf " * Management: https://%s.local\n" "$(hostname)"
printf "\033[1;37m█ █ █ █ ▀▄▄▄▄ █ █ █ █ ▄▄▄▀ █ \033[1;31m██ ██ ▀█████▄\033[0m\n" printf " * Support: https://start9.com/contact\n"
printf "\033[1;37m█ █ █ █ █ █ █ █ █ ▀▄ █ \033[1;31m██ ██ ██\033[0m\n" printf " * Source Code: https://github.com/Start9Labs/start-os\n"
printf "\033[1;37m █ █ █ █ ▄▄▄▄▄▀ █ █ █ █ ▀▄ █ \033[1;31m▀██████▀ ██████▀\033[0m\n" printf " * License: MIT\n"
printf "\033[1;37m █ █\033[0m\n" printf "\n"
printf "\033[1;37m ▀▀▄▄▄▀▀ $version_display\033[0m\n\n"
uptime_str=$(uptime | awk -F'up ' '{print $2}' | awk -F',' '{print $1}' | sed 's/^ *//')
if [ "$DB_INFO_AVAILABLE" -eq 1 ] && [ "$RAM_GB" != "unknown" ]; then
memory_used=$(free -m | awk 'NR==2{printf "%.0fMB", $3}')
memory_display="$memory_used / ${RAM_GB}GB"
else
memory_display=$(free -m | awk 'NR==2{printf "%.0fMB / %.0fMB", $3, $2}')
fi
root_usage=$(df -h / | awk 'NR==2{printf "%s (%s free)", $5, $4}')
if [ -d "/media/startos/data/package-data" ]; then
data_usage=$(df -h /media/startos/data/package-data | awk 'NR==2{printf "%s (%s free)", $5, $4}')
else
data_usage="N/A"
fi
if [ "$DB_INFO_AVAILABLE" -eq 1 ]; then
services_text="$RUNNING_SERVICES/$TOTAL_SERVICES running"
else
services_text="Unknown"
fi
local_ip=$(ip route get 1.1.1.1 2>/dev/null | awk '{for(i=1;i<=NF;i++) if($i=="src") print $(i+1)}' | head -1)
if [ -z "$local_ip" ]; then local_ip="N/A"; fi
if [ "$DB_INFO_AVAILABLE" -eq 1 ] && [ "$WAN_IP" != "unknown" ]; then
wan_ip="$WAN_IP"
else
wan_ip="N/A"
fi
printf " \033[1;37m┌─ SYSTEM STATUS ───────────────────────────────────────────────────┐\033[0m\n"
printf " \033[1;37m│\033[0m %-8s \033[0;33m%-22s\033[0m %-8s \033[0;33m%-23s\033[0m \033[1;37m│\033[0m\n" "Uptime:" "$uptime_str" "Memory:" "$memory_display"
printf " \033[1;37m│\033[0m %-8s \033[0;33m%-22s\033[0m %-8s \033[0;33m%-23s\033[0m \033[1;37m│\033[0m\n" "Root:" "$root_usage" "Data:" "$data_usage"
if [ "$DB_INFO_AVAILABLE" -eq 1 ]; then
if [ "$RUNNING_SERVICES" -eq "$TOTAL_SERVICES" ] && [ "$TOTAL_SERVICES" -gt 0 ]; then
printf " \033[1;37m│\033[0m %-8s \033[0;32m%-22s\033[0m %-8s \033[0;33m%-23s\033[0m \033[1;37m│\033[0m\n" "Services:" "$services_text" "WAN:" "$wan_ip"
elif [ "$RUNNING_SERVICES" -gt 0 ]; then
printf " \033[1;37m│\033[0m %-8s \033[0;33m%-22s\033[0m %-8s \033[0;33m%-23s\033[0m \033[1;37m│\033[0m\n" "Services:" "$services_text" "WAN:" "$wan_ip"
else
printf " \033[1;37m│\033[0m %-8s \033[0;31m%-22s\033[0m %-8s \033[0;33m%-23s\033[0m \033[1;37m│\033[0m\n" "Services:" "$services_text" "WAN:" "$wan_ip"
fi
else
printf " \033[1;37m│\033[0m %-8s \033[0;37m%-22s\033[0m %-8s \033[0;33m%-23s\033[0m \033[1;37m│\033[0m\n" "Services:" "$services_text" "WAN:" "$wan_ip"
fi
if [ "$DB_INFO_AVAILABLE" -eq 1 ] && [ "$NTP_SYNCED" = "true" ]; then
printf " \033[1;37m│\033[0m %-8s \033[0;33m%-22s\033[0m %-8s \033[0;32m%-23s\033[0m \033[1;37m│\033[0m\n" "Local:" "$local_ip" "NTP:" "Synced"
elif [ "$DB_INFO_AVAILABLE" -eq 1 ] && [ "$NTP_SYNCED" = "false" ]; then
printf " \033[1;37m│\033[0m %-8s \033[0;33m%-22s\033[0m %-8s \033[0;31m%-23s\033[0m \033[1;37m│\033[0m\n" "Local:" "$local_ip" "NTP:" "Not Synced"
else
printf " \033[1;37m│\033[0m %-8s \033[0;33m%-22s\033[0m %-8s \033[0;37m%-23s\033[0m \033[1;37m│\033[0m\n" "Local:" "$local_ip" "NTP:" "Unknown"
fi
printf " \033[1;37m└───────────────────────────────────────────────────────────────────┘\033[0m"
if [ "$DB_INFO_AVAILABLE" -eq 1 ] && [ "$HOSTNAME" != "unknown" ]; then
web_url="https://$HOSTNAME.local"
else
web_url="https://$(hostname).local"
fi
printf "\n \033[1;37m┌──────────────────────────────────────────────────── QUICK ACCESS ─┐\033[0m\n"
printf " \033[1;37m│\033[0m Web Interface: \033[0;36m%-50s\033[0m \033[1;37m│\033[0m\n" "$web_url"
printf " \033[1;37m│\033[0m Documentation: \033[0;36m%-50s\033[0m \033[1;37m│\033[0m\n" "https://staging.docs.start9.com"
printf " \033[1;37m│\033[0m Support: \033[0;36m%-50s\033[0m \033[1;37m│\033[0m\n" "https://start9.com/contact"
printf " \033[1;37m└───────────────────────────────────────────────────────────────────┘\033[0m\n\n"

View File

@@ -10,24 +10,24 @@ fi
POSITIONAL_ARGS=() POSITIONAL_ARGS=()
while [[ $# -gt 0 ]]; do while [[ $# -gt 0 ]]; do
case $1 in case $1 in
--no-sync) --no-sync)
NO_SYNC=1 NO_SYNC=1
shift shift
;; ;;
--create) --create)
ONLY_CREATE=1 ONLY_CREATE=1
shift shift
;; ;;
-*|--*) -*|--*)
echo "Unknown option $1" echo "Unknown option $1"
exit 1 exit 1
;; ;;
*) *)
POSITIONAL_ARGS+=("$1") # save positional arg POSITIONAL_ARGS+=("$1") # save positional arg
shift # past argument shift # past argument
;; ;;
esac esac
done done
set -- "${POSITIONAL_ARGS[@]}" # restore positional parameters set -- "${POSITIONAL_ARGS[@]}" # restore positional parameters
@@ -35,7 +35,7 @@ set -- "${POSITIONAL_ARGS[@]}" # restore positional parameters
if [ -z "$NO_SYNC" ]; then if [ -z "$NO_SYNC" ]; then
echo 'Syncing...' echo 'Syncing...'
umount -R /media/startos/next 2> /dev/null umount -R /media/startos/next 2> /dev/null
umount /media/startos/upper 2> /dev/null umount -R /media/startos/upper 2> /dev/null
rm -rf /media/startos/upper /media/startos/next rm -rf /media/startos/upper /media/startos/next
mkdir /media/startos/upper mkdir /media/startos/upper
mount -t tmpfs tmpfs /media/startos/upper mount -t tmpfs tmpfs /media/startos/upper
@@ -43,6 +43,8 @@ if [ -z "$NO_SYNC" ]; then
mount -t overlay \ mount -t overlay \
-olowerdir=/media/startos/current,upperdir=/media/startos/upper/data,workdir=/media/startos/upper/work \ -olowerdir=/media/startos/current,upperdir=/media/startos/upper/data,workdir=/media/startos/upper/work \
overlay /media/startos/next overlay /media/startos/next
mkdir -p /media/startos/next/media/startos/root
mount --bind /media/startos/root /media/startos/next/media/startos/root
fi fi
if [ -n "$ONLY_CREATE" ]; then if [ -n "$ONLY_CREATE" ]; then
@@ -54,18 +56,12 @@ mkdir -p /media/startos/next/dev
mkdir -p /media/startos/next/sys mkdir -p /media/startos/next/sys
mkdir -p /media/startos/next/proc mkdir -p /media/startos/next/proc
mkdir -p /media/startos/next/boot mkdir -p /media/startos/next/boot
mkdir -p /media/startos/next/media/startos/root
mount --bind /run /media/startos/next/run mount --bind /run /media/startos/next/run
mount --bind /tmp /media/startos/next/tmp mount --bind /tmp /media/startos/next/tmp
mount --bind /dev /media/startos/next/dev mount --bind /dev /media/startos/next/dev
mount --bind /sys /media/startos/next/sys mount --bind /sys /media/startos/next/sys
mount --bind /proc /media/startos/next/proc mount --bind /proc /media/startos/next/proc
mount --bind /boot /media/startos/next/boot mount --bind /boot /media/startos/next/boot
mount --bind /media/startos/root /media/startos/next/media/startos/root
if mountpoint /sys/firmware/efi/efivars 2> /dev/null; then
mount --bind /sys/firmware/efi/efivars /media/startos/next/sys/firmware/efi/efivars
fi
if [ -z "$*" ]; then if [ -z "$*" ]; then
chroot /media/startos/next chroot /media/startos/next
@@ -75,10 +71,6 @@ else
CHROOT_RES=$? CHROOT_RES=$?
fi fi
if mountpoint /media/startos/next/sys/firmware/efi/efivars 2> /dev/null; then
umount /media/startos/next/sys/firmware/efi/efivars
fi
umount /media/startos/next/run umount /media/startos/next/run
umount /media/startos/next/tmp umount /media/startos/next/tmp
umount /media/startos/next/dev umount /media/startos/next/dev
@@ -95,12 +87,11 @@ if [ "$CHROOT_RES" -eq 0 ]; then
echo 'Upgrading...' echo 'Upgrading...'
rm -f /media/startos/images/next.squashfs
if ! time mksquashfs /media/startos/next /media/startos/images/next.squashfs -b 4096 -comp gzip; then if ! time mksquashfs /media/startos/next /media/startos/images/next.squashfs -b 4096 -comp gzip; then
umount -l /media/startos/next umount -R /media/startos/next
umount -l /media/startos/upper umount -R /media/startos/upper
rm -rf /media/startos/upper /media/startos/next rm -rf /media/startos/upper /media/startos/next
exit 1 exit 1
fi fi
hash=$(b3sum /media/startos/images/next.squashfs | head -c 32) hash=$(b3sum /media/startos/images/next.squashfs | head -c 32)
mv /media/startos/images/next.squashfs /media/startos/images/${hash}.rootfs mv /media/startos/images/next.squashfs /media/startos/images/${hash}.rootfs
@@ -112,5 +103,5 @@ if [ "$CHROOT_RES" -eq 0 ]; then
fi fi
umount -R /media/startos/next umount -R /media/startos/next
umount /media/startos/upper umount -R /media/startos/upper
rm -rf /media/startos/upper /media/startos/next rm -rf /media/startos/upper /media/startos/next

View File

@@ -64,11 +64,9 @@ user_pref("messaging-system.rsexperimentloader.enabled", false);
user_pref("network.allow-experiments", false); user_pref("network.allow-experiments", false);
user_pref("network.captive-portal-service.enabled", false); user_pref("network.captive-portal-service.enabled", false);
user_pref("network.connectivity-service.enabled", false); user_pref("network.connectivity-service.enabled", false);
user_pref("network.proxy.socks", "10.0.3.1"); user_pref("network.proxy.autoconfig_url", "file:///usr/lib/startos/proxy.pac");
user_pref("network.proxy.socks_port", 9050);
user_pref("network.proxy.socks_version", 5);
user_pref("network.proxy.socks_remote_dns", true); user_pref("network.proxy.socks_remote_dns", true);
user_pref("network.proxy.type", 1); user_pref("network.proxy.type", 2);
user_pref("privacy.resistFingerprinting", true); user_pref("privacy.resistFingerprinting", true);
//Enable letterboxing if we want the window size sent to the server to snap to common resolutions: //Enable letterboxing if we want the window size sent to the server to snap to common resolutions:
//user_pref("privacy.resistFingerprinting.letterboxing", true); //user_pref("privacy.resistFingerprinting.letterboxing", true);

View File

@@ -1,51 +1,26 @@
#!/bin/bash #!/bin/bash
if [ -z "$sip" ] || [ -z "$dip" ] || [ -z "$dprefix" ] || [ -z "$sport" ] || [ -z "$dport" ]; then if [ -z "$iiface" ] || [ -z "$oiface" ] || [ -z "$sip" ] || [ -z "$dip" ] || [ -z "$sport" ] || [ -z "$dport" ]; then
>&2 echo 'missing required env var' >&2 echo 'missing required env var'
exit 1 exit 1
fi fi
NAME="F$(echo "$sip:$sport -> $dip/$dprefix:$dport" | sha256sum | head -c 15)" kind="-A"
for kind in INPUT FORWARD ACCEPT; do
if ! iptables -C $kind -j "${NAME}_${kind}" 2> /dev/null; then
iptables -N "${NAME}_${kind}" 2> /dev/null
iptables -A $kind -j "${NAME}_${kind}"
fi
done
for kind in PREROUTING INPUT OUTPUT POSTROUTING; do
if ! iptables -t nat -C $kind -j "${NAME}_${kind}" 2> /dev/null; then
iptables -t nat -N "${NAME}_${kind}" 2> /dev/null
iptables -t nat -A $kind -j "${NAME}_${kind}"
fi
done
err=0
trap 'err=1' ERR
for kind in INPUT FORWARD ACCEPT; do
iptables -F "${NAME}_${kind}" 2> /dev/null
done
for kind in PREROUTING INPUT OUTPUT POSTROUTING; do
iptables -t nat -F "${NAME}_${kind}" 2> /dev/null
done
if [ "$UNDO" = 1 ]; then if [ "$UNDO" = 1 ]; then
conntrack -D -p tcp -d $sip --dport $sport || true # conntrack returns exit 1 if no connections are active kind="-D"
conntrack -D -p udp -d $sip --dport $sport || true # conntrack returns exit 1 if no connections are active
exit $err
fi fi
iptables -t nat -A ${NAME}_PREROUTING -d "$sip" -p tcp --dport "$sport" -j DNAT --to-destination "$dip:$dport" iptables -t nat "$kind" POSTROUTING -o $iiface -j MASQUERADE
iptables -t nat -A ${NAME}_PREROUTING -d "$sip" -p udp --dport "$sport" -j DNAT --to-destination "$dip:$dport" iptables -t nat "$kind" PREROUTING -i $iiface -p tcp --dport $sport -j DNAT --to-destination $dip:$dport
iptables -t nat -A ${NAME}_OUTPUT -d "$sip" -p tcp --dport "$sport" -j DNAT --to-destination "$dip:$dport" iptables -t nat "$kind" PREROUTING -i $iiface -p udp --dport $sport -j DNAT --to-destination $dip:$dport
iptables -t nat -A ${NAME}_OUTPUT -d "$sip" -p udp --dport "$sport" -j DNAT --to-destination "$dip:$dport" iptables -t nat "$kind" PREROUTING -i $oiface -s $dip/24 -d $sip -p tcp --dport $sport -j DNAT --to-destination $dip:$dport
iptables -t nat "$kind" PREROUTING -i $oiface -s $dip/24 -d $sip -p udp --dport $sport -j DNAT --to-destination $dip:$dport
iptables -t nat "$kind" POSTROUTING -o $oiface -s $dip/24 -d $dip/32 -p tcp --dport $dport -j SNAT --to-source $sip:$sport
iptables -t nat "$kind" POSTROUTING -o $oiface -s $dip/24 -d $dip/32 -p udp --dport $dport -j SNAT --to-source $sip:$sport
iptables -t nat -A ${NAME}_PREROUTING -s "$dip/$dprefix" -d "$sip" -p tcp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
iptables -t nat -A ${NAME}_PREROUTING -s "$dip/$dprefix" -d "$sip" -p udp --dport "$sport" -j DNAT --to-destination "$dip:$dport"
iptables -t nat -A ${NAME}_POSTROUTING -s "$dip/$dprefix" -d "$dip" -p tcp --dport "$dport" -j MASQUERADE
iptables -t nat -A ${NAME}_POSTROUTING -s "$dip/$dprefix" -d "$dip" -p udp --dport "$dport" -j MASQUERADE
iptables -A ${NAME}_FORWARD -d $dip -p tcp --dport $dport -m state --state NEW -j ACCEPT iptables -t nat "$kind" PREROUTING -i $iiface -s $sip/32 -d $sip -p tcp --dport $sport -j DNAT --to-destination $dip:$dport
iptables -A ${NAME}_FORWARD -d $dip -p udp --dport $dport -m state --state NEW -j ACCEPT iptables -t nat "$kind" PREROUTING -i $iiface -s $sip/32 -d $sip -p udp --dport $sport -j DNAT --to-destination $dip:$dport
iptables -t nat "$kind" POSTROUTING -o $oiface -s $sip/32 -d $dip/32 -p tcp --dport $dport -j SNAT --to-source $sip:$sport
exit $err iptables -t nat "$kind" POSTROUTING -o $oiface -s $sip/32 -d $dip/32 -p udp --dport $dport -j SNAT --to-source $sip:$sport

View File

@@ -83,7 +83,6 @@ local_mount_root()
if [ -d "$image" ]; then if [ -d "$image" ]; then
mount -r --bind $image /lower mount -r --bind $image /lower
elif [ -f "$image" ]; then elif [ -f "$image" ]; then
modprobe loop
modprobe squashfs modprobe squashfs
mount -r $image /lower mount -r $image /lower
else else

View File

@@ -1,64 +1,36 @@
#!/bin/bash #!/bin/bash
# --- Config --- fail=$(printf " [\033[31m fail \033[0m]")
# Colors (using printf to ensure compatibility) pass=$(printf " [\033[32m pass \033[0m]")
GRAY=$(printf '\033[90m')
GREEN=$(printf '\033[32m')
RED=$(printf '\033[31m')
NC=$(printf '\033[0m') # No Color
# Proxies to test
proxies=(
"Host Tor|127.0.1.1:9050"
"Startd Tor|10.0.3.1:9050"
)
# Default URLs
onion_list=( onion_list=(
"The Tor Project|http://2gzyxa5ihm7nsggfxnu52rck2vv4rvmdlkiu3zzui5du4xyclen53wid.onion"
"Start9|http://privacy34kn4ez3y3nijweec6w4g54i3g54sdv7r5mr6soma3w4begyd.onion" "Start9|http://privacy34kn4ez3y3nijweec6w4g54i3g54sdv7r5mr6soma3w4begyd.onion"
"Mempool|http://mempoolhqx4isw62xs7abwphsq7ldayuidyx2v2oethdhhj6mlo2r6ad.onion" "Mempool|http://mempoolhqx4isw62xs7abwphsq7ldayuidyx2v2oethdhhj6mlo2r6ad.onion"
"DuckDuckGo|https://duckduckgogg42xjoc72x3sjasowoarfbgcmvfimaftt6twagswzczad.onion" "DuckDuckGo|https://duckduckgogg42xjoc72x3sjasowoarfbgcmvfimaftt6twagswzczad.onion"
"Brave Search|https://search.brave4u7jddbv7cyviptqjc7jusxh72uik7zt6adtckl5f4nwy2v72qd.onion" "Brave Search|https://search.brave4u7jddbv7cyviptqjc7jusxh72uik7zt6adtckl5f4nwy2v72qd.onion"
) )
# Load custom list # Check if ~/.startos/tor-check.list exists and read its contents if available
[ -f ~/.startos/tor-check.list ] && readarray -t custom_list < <(grep -v '^#' ~/.startos/tor-check.list) && onion_list+=("${custom_list[@]}") if [ -f ~/.startos/tor-check.list ]; then
while IFS= read -r line; do
# --- Functions --- # Check if the line starts with a #
print_line() { printf "${GRAY}────────────────────────────────────────${NC}\n"; } if [[ ! "$line" =~ ^# ]]; then
onion_list+=("$line")
# --- Main ---
echo "Testing Onion Connections..."
for proxy_info in "${proxies[@]}"; do
proxy_name="${proxy_info%%|*}"
proxy_addr="${proxy_info#*|}"
print_line
printf "${GRAY}Proxy: %s (%s)${NC}\n" "$proxy_name" "$proxy_addr"
for data in "${onion_list[@]}"; do
name="${data%%|*}"
url="${data#*|}"
# Capture verbose output + http code.
# --no-progress-meter: Suppresses the "0 0 0" stats but keeps -v output
output=$(curl -v --no-progress-meter --max-time 15 --socks5-hostname "$proxy_addr" "$url" 2>&1)
exit_code=$?
if [ $exit_code -eq 0 ]; then
printf " ${GREEN}[pass]${NC} %s (%s)\n" "$name" "$url"
else
printf " ${RED}[fail]${NC} %s (%s)\n" "$name" "$url"
printf " ${RED}↳ Curl Error %s${NC}\n" "$exit_code"
# Print the last 4 lines of verbose log to show the specific handshake error
# We look for lines starting with '*' or '>' or '<' to filter out junk if any remains
echo "$output" | tail -n 4 | sed "s/^/ ${GRAY}/"
fi fi
done done < ~/.startos/tor-check.list
fi
echo "Testing connection to Onion Pages ..."
for data in "${onion_list[@]}"; do
name="${data%%|*}"
url="${data#*|}"
if curl --socks5-hostname localhost:9050 "$url" > /dev/null 2>&1; then
echo " ${pass}: $name ($url) "
else
echo " ${fail}: $name ($url) "
fi
done done
print_line
# Reset color just in case echo
printf "${NC}" echo "Done."

View File

@@ -1,82 +0,0 @@
#!/bin/bash
set -e
SOURCE_DIR="$(dirname $(realpath "${BASH_SOURCE[0]}"))"
if [ "$UID" -ne 0 ]; then
>&2 echo 'Must be run as root'
exit 1
fi
if ! [ -f "$1" ]; then
>&2 echo "usage: $0 <SQUASHFS>"
exit 1
fi
echo 'Upgrading...'
hash=$(b3sum $1 | head -c 32)
if [ -n "$2" ] && [ "$hash" != "$CHECKSUM" ]; then
>&2 echo 'Checksum mismatch'
exit 2
fi
unsquashfs -f -d / $1 boot
umount -R /media/startos/next 2> /dev/null || true
umount /media/startos/upper 2> /dev/null || true
umount /media/startos/lower 2> /dev/null || true
mkdir -p /media/startos/upper
mount -t tmpfs tmpfs /media/startos/upper
mkdir -p /media/startos/lower /media/startos/upper/data /media/startos/upper/work /media/startos/next
mount $1 /media/startos/lower
mount -t overlay \
-olowerdir=/media/startos/lower,upperdir=/media/startos/upper/data,workdir=/media/startos/upper/work \
overlay /media/startos/next
mkdir -p /media/startos/next/run
mkdir -p /media/startos/next/dev
mkdir -p /media/startos/next/sys
mkdir -p /media/startos/next/proc
mkdir -p /media/startos/next/boot
mkdir -p /media/startos/next/media/startos/root
mount --bind /run /media/startos/next/run
mount --bind /tmp /media/startos/next/tmp
mount --bind /dev /media/startos/next/dev
mount --bind /sys /media/startos/next/sys
mount --bind /proc /media/startos/next/proc
mount --bind /boot /media/startos/next/boot
mount --bind /media/startos/root /media/startos/next/media/startos/root
if mountpoint /boot/efi 2> /dev/null; then
mkdir -p /media/startos/next/boot/efi
mount --bind /boot/efi /media/startos/next/boot/efi
fi
if mountpoint /sys/firmware/efi/efivars 2> /dev/null; then
mount --bind /sys/firmware/efi/efivars /media/startos/next/sys/firmware/efi/efivars
fi
chroot /media/startos/next bash -e << "EOF"
if [ -f /boot/grub/grub.cfg ]; then
grub-install /dev/$(eval $(lsblk -o MOUNTPOINT,PKNAME -P | grep 'MOUNTPOINT="/media/startos/root"') && echo $PKNAME)
update-grub
fi
EOF
sync
umount -Rl /media/startos/next
umount /media/startos/upper
umount /media/startos/lower
mv $1 /media/startos/images/${hash}.rootfs
ln -rsf /media/startos/images/${hash}.rootfs /media/startos/config/current.rootfs
sync
echo 'System upgrade complete. Reboot to apply changes...'

61
build/lib/scripts/use-img Executable file
View File

@@ -0,0 +1,61 @@
#!/bin/bash
set -e
if [ "$UID" -ne 0 ]; then
>&2 echo 'Must be run as root'
exit 1
fi
if [ -z "$1" ]; then
>&2 echo "usage: $0 <SQUASHFS>"
exit 1
fi
VERSION=$(unsquashfs -cat $1 /usr/lib/startos/VERSION.txt)
GIT_HASH=$(unsquashfs -cat $1 /usr/lib/startos/GIT_HASH.txt)
B3SUM=$(b3sum $1 | head -c 32)
if [ -n "$CHECKSUM" ] && [ "$CHECKSUM" != "$B3SUM" ]; then
>&2 echo "CHECKSUM MISMATCH"
exit 2
fi
mv $1 /media/startos/images/${B3SUM}.rootfs
ln -rsf /media/startos/images/${B3SUM}.rootfs /media/startos/config/current.rootfs
unsquashfs -n -f -d / /media/startos/images/${B3SUM}.rootfs boot
umount -R /media/startos/next 2> /dev/null || true
umount -R /media/startos/lower 2> /dev/null || true
umount -R /media/startos/upper 2> /dev/null || true
rm -rf /media/startos/lower /media/startos/upper /media/startos/next
mkdir /media/startos/upper
mount -t tmpfs tmpfs /media/startos/upper
mkdir -p /media/startos/lower /media/startos/upper/data /media/startos/upper/work /media/startos/next
mount /media/startos/images/${B3SUM}.rootfs /media/startos/lower
mount -t overlay \
-olowerdir=/media/startos/lower,upperdir=/media/startos/upper/data,workdir=/media/startos/upper/work \
overlay /media/startos/next
mkdir -p /media/startos/next/media/startos/root
mount --bind /media/startos/root /media/startos/next/media/startos/root
mkdir -p /media/startos/next/dev
mkdir -p /media/startos/next/sys
mkdir -p /media/startos/next/proc
mkdir -p /media/startos/next/boot
mount --bind /dev /media/startos/next/dev
mount --bind /sys /media/startos/next/sys
mount --bind /proc /media/startos/next/proc
mount --bind /boot /media/startos/next/boot
chroot /media/startos/next update-grub2
umount -R /media/startos/next
umount -R /media/startos/upper
umount -R /media/startos/lower
rm -rf /media/startos/lower /media/startos/upper /media/startos/next
sync
reboot

View File

@@ -1,4 +1,4 @@
FROM debian:forky FROM debian:bookworm
RUN apt-get update && \ RUN apt-get update && \
apt-get install -y \ apt-get install -y \
@@ -25,8 +25,7 @@ RUN apt-get update && \
systemd-container \ systemd-container \
systemd-sysv \ systemd-sysv \
dbus \ dbus \
dbus-user-session \ dbus-user-session
nodejs
RUN systemctl mask \ RUN systemctl mask \
systemd-firstboot.service \ systemd-firstboot.service \
@@ -39,6 +38,17 @@ RUN git config --global --add safe.directory /root/start-os
RUN mkdir -p /etc/debspawn && \ RUN mkdir -p /etc/debspawn && \
echo "AllowUnsafePermissions=true" > /etc/debspawn/global.toml echo "AllowUnsafePermissions=true" > /etc/debspawn/global.toml
ENV NVM_DIR=~/.nvm
ENV NODE_VERSION=22
RUN mkdir -p $NVM_DIR && \
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/master/install.sh | bash && \
. $NVM_DIR/nvm.sh \
nvm install $NODE_VERSION && \
nvm use $NODE_VERSION && \
nvm alias default $NODE_VERSION && \
ln -s $(which node) /usr/bin/node && \
ln -s $(which npm) /usr/bin/npm
RUN mkdir -p /root/start-os RUN mkdir -p /root/start-os
WORKDIR /root/start-os WORKDIR /root/start-os

View File

@@ -1,6 +1,6 @@
#!/bin/bash #!/bin/bash
if [ "$FORCE_COMPAT" = 1 ] || ( [ "$REQUIRES" = "linux" ] && [ "$(uname -s)" != "Linux" ] ) || ( [ "$REQUIRES" = "debian" ] && ! which dpkg > /dev/null ) || ( [ "$REQUIRES" = "qemu" ] && ! which qemu-$ARCH > /dev/null ); then if [ "$FORCE_COMPAT" = 1 ] || ( [ "$REQUIRES" = "linux" ] && [ "$(uname -s)" != "Linux" ] ) || ( [ "$REQUIRES" = "debian" ] && ! which dpkg > /dev/null ); then
project_pwd="$(cd "$(dirname "${BASH_SOURCE[0]}")"/../.. && pwd)/" project_pwd="$(cd "$(dirname "${BASH_SOURCE[0]}")"/../.. && pwd)/"
pwd="$(pwd)/" pwd="$(pwd)/"
if ! [[ "$pwd" = "$project_pwd"* ]]; then if ! [[ "$pwd" = "$project_pwd"* ]]; then
@@ -18,9 +18,9 @@ if [ "$FORCE_COMPAT" = 1 ] || ( [ "$REQUIRES" = "linux" ] && [ "$(uname -s)" !=
docker run -d --rm --name os-compat --privileged --security-opt apparmor=unconfined -v "${project_pwd}:/root/start-os" -v /lib/modules:/lib/modules:ro start9/build-env docker run -d --rm --name os-compat --privileged --security-opt apparmor=unconfined -v "${project_pwd}:/root/start-os" -v /lib/modules:/lib/modules:ro start9/build-env
while ! docker exec os-compat systemctl is-active --quiet multi-user.target 2> /dev/null; do sleep .5; done while ! docker exec os-compat systemctl is-active --quiet multi-user.target 2> /dev/null; do sleep .5; done
docker exec -eARCH -eENVIRONMENT -ePLATFORM -eGIT_BRANCH_AS_HASH -ePROJECT -eDEPENDS -eCONFLICTS $USE_TTY -w "/root/start-os${rel_pwd}" os-compat $@ docker exec -eARCH -eENVIRONMENT -ePLATFORM -eGIT_BRANCH_AS_HASH $USE_TTY -w "/root/start-os${rel_pwd}" os-compat $@
code=$? code=$?
docker stop os-compat > /dev/null docker stop os-compat
exit $code exit $code
else else
exec $@ exec $@

View File

@@ -4,17 +4,13 @@ cd "$(dirname "${BASH_SOURCE[0]}")"
set -e set -e
STATIC_DIR=web/dist/static/$1 rm -rf web/dist/static
RAW_DIR=web/dist/raw/$1
mkdir -p $STATIC_DIR
rm -rf $STATIC_DIR
if ! [[ "$ENVIRONMENT" =~ (^|-)dev($|-) ]]; then if ! [[ "$ENVIRONMENT" =~ (^|-)dev($|-) ]]; then
find $RAW_DIR -type f -not -name '*.gz' -and -not -name '*.br' | xargs -n 1 -P 0 gzip -kf find web/dist/raw -type f -not -name '*.gz' -and -not -name '*.br' | xargs -n 1 -P 0 gzip -kf
find $RAW_DIR -type f -not -name '*.gz' -and -not -name '*.br' | xargs -n 1 -P 0 brotli -kf find web/dist/raw -type f -not -name '*.gz' -and -not -name '*.br' | xargs -n 1 -P 0 brotli -kf
for file in $(find $RAW_DIR -type f -not -name '*.gz' -and -not -name '*.br'); do for file in $(find web/dist/raw -type f -not -name '*.gz' -and -not -name '*.br'); do
raw_size=$(du $file | awk '{print $1 * 512}') raw_size=$(du $file | awk '{print $1 * 512}')
gz_size=$(du $file.gz | awk '{print $1 * 512}') gz_size=$(du $file.gz | awk '{print $1 * 512}')
br_size=$(du $file.br | awk '{print $1 * 512}') br_size=$(du $file.br | awk '{print $1 * 512}')
@@ -27,5 +23,4 @@ if ! [[ "$ENVIRONMENT" =~ (^|-)dev($|-) ]]; then
done done
fi fi
cp -r web/dist/raw web/dist/static
cp -r $RAW_DIR $STATIC_DIR

View File

@@ -4,7 +4,6 @@ OnFailure=container-runtime-failure.service
[Service] [Service]
Type=simple Type=simple
Environment=RUST_LOG=startos=debug
ExecStart=/usr/bin/node --experimental-detect-module --trace-warnings --unhandled-rejections=warn /usr/lib/startos/init/index.js ExecStart=/usr/bin/node --experimental-detect-module --trace-warnings --unhandled-rejections=warn /usr/lib/startos/init/index.js
Restart=no Restart=no

View File

@@ -6,9 +6,13 @@ mkdir -p /run/systemd/resolve
echo "nameserver 8.8.8.8" > /run/systemd/resolve/stub-resolv.conf echo "nameserver 8.8.8.8" > /run/systemd/resolve/stub-resolv.conf
apt-get update apt-get update
apt-get install -y curl rsync qemu-user-static nodejs apt-get install -y curl rsync qemu-user-static
curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash
source ~/.bashrc
nvm install 22
ln -s $(which node) /usr/bin/node
sed -i '/\(^\|#\)DNSStubListener=/c\DNSStubListener=no' /etc/systemd/resolved.conf
sed -i '/\(^\|#\)Storage=/c\Storage=persistent' /etc/systemd/journald.conf sed -i '/\(^\|#\)Storage=/c\Storage=persistent' /etc/systemd/journald.conf
sed -i '/\(^\|#\)Compress=/c\Compress=yes' /etc/systemd/journald.conf sed -i '/\(^\|#\)Compress=/c\Compress=yes' /etc/systemd/journald.conf
sed -i '/\(^\|#\)SystemMaxUse=/c\SystemMaxUse=1G' /etc/systemd/journald.conf sed -i '/\(^\|#\)SystemMaxUse=/c\SystemMaxUse=1G' /etc/systemd/journald.conf
@@ -17,6 +21,3 @@ sed -i '/\(^\|#\)ForwardToSyslog=/c\ForwardToSyslog=no' /etc/systemd/journald.co
systemctl enable container-runtime.service systemctl enable container-runtime.service
rm -rf /run/systemd rm -rf /run/systemd
rm -f /etc/resolv.conf
echo "nameserver 10.0.3.1" > /etc/resolv.conf

View File

@@ -3,7 +3,7 @@ cd "$(dirname "${BASH_SOURCE[0]}")"
set -e set -e
DISTRO=debian DISTRO=debian
VERSION=trixie VERSION=bookworm
ARCH=${ARCH:-$(uname -m)} ARCH=${ARCH:-$(uname -m)}
FLAVOR=default FLAVOR=default

View File

@@ -38,7 +38,7 @@
}, },
"../sdk/dist": { "../sdk/dist": {
"name": "@start9labs/start-sdk", "name": "@start9labs/start-sdk",
"version": "0.4.0-beta.45", "version": "0.4.0-beta.36",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@iarna/toml": "^3.0.0", "@iarna/toml": "^3.0.0",
@@ -110,7 +110,6 @@
"integrity": "sha512-l+lkXCHS6tQEc5oUpK28xBOZ6+HwaH7YwoYQbLFiYb4nS2/l1tKnZEtEWkD0GuiYdvArf9qBS0XlQGXzPMsNqQ==", "integrity": "sha512-l+lkXCHS6tQEc5oUpK28xBOZ6+HwaH7YwoYQbLFiYb4nS2/l1tKnZEtEWkD0GuiYdvArf9qBS0XlQGXzPMsNqQ==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"dependencies": { "dependencies": {
"@ampproject/remapping": "^2.2.0", "@ampproject/remapping": "^2.2.0",
"@babel/code-frame": "^7.26.2", "@babel/code-frame": "^7.26.2",
@@ -1201,7 +1200,6 @@
"dev": true, "dev": true,
"hasInstallScript": true, "hasInstallScript": true,
"license": "Apache-2.0", "license": "Apache-2.0",
"peer": true,
"dependencies": { "dependencies": {
"@swc/counter": "^0.1.3", "@swc/counter": "^0.1.3",
"@swc/types": "^0.1.17" "@swc/types": "^0.1.17"
@@ -2145,7 +2143,6 @@
} }
], ],
"license": "MIT", "license": "MIT",
"peer": true,
"dependencies": { "dependencies": {
"caniuse-lite": "^1.0.30001688", "caniuse-lite": "^1.0.30001688",
"electron-to-chromium": "^1.5.73", "electron-to-chromium": "^1.5.73",
@@ -3993,7 +3990,6 @@
"integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==", "integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==",
"dev": true, "dev": true,
"license": "MIT", "license": "MIT",
"peer": true,
"dependencies": { "dependencies": {
"@jest/core": "^29.7.0", "@jest/core": "^29.7.0",
"@jest/types": "^29.6.3", "@jest/types": "^29.6.3",
@@ -6560,7 +6556,6 @@
"integrity": "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==", "integrity": "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==",
"dev": true, "dev": true,
"license": "Apache-2.0", "license": "Apache-2.0",
"peer": true,
"bin": { "bin": {
"tsc": "bin/tsc", "tsc": "bin/tsc",
"tsserver": "bin/tsserver" "tsserver": "bin/tsserver"

View File

@@ -289,7 +289,6 @@ export function makeEffects(context: EffectContext): Effects {
getStatus(...[o]: Parameters<T.Effects["getStatus"]>) { getStatus(...[o]: Parameters<T.Effects["getStatus"]>) {
return rpcRound("get-status", o) as ReturnType<T.Effects["getStatus"]> return rpcRound("get-status", o) as ReturnType<T.Effects["getStatus"]>
}, },
/// DEPRECATED
setMainStatus(o: { status: "running" | "stopped" }): Promise<null> { setMainStatus(o: { status: "running" | "stopped" }): Promise<null> {
return rpcRound("set-main-status", o) as ReturnType< return rpcRound("set-main-status", o) as ReturnType<
T.Effects["setHealth"] T.Effects["setHealth"]

View File

@@ -146,7 +146,6 @@ const handleRpc = (id: IdType, result: Promise<RpcResult>) =>
const hasId = object({ id: idType }).test const hasId = object({ id: idType }).test
export class RpcListener { export class RpcListener {
shouldExit = false
unixSocketServer = net.createServer(async (server) => {}) unixSocketServer = net.createServer(async (server) => {})
private _system: System | undefined private _system: System | undefined
private callbacks: CallbackHolder | undefined private callbacks: CallbackHolder | undefined
@@ -159,8 +158,6 @@ export class RpcListener {
this.unixSocketServer.listen(SOCKET_PATH) this.unixSocketServer.listen(SOCKET_PATH)
console.log("Listening on %s", SOCKET_PATH)
this.unixSocketServer.on("connection", (s) => { this.unixSocketServer.on("connection", (s) => {
let id: IdType = null let id: IdType = null
const captureId = <X>(x: X) => { const captureId = <X>(x: X) => {
@@ -211,11 +208,6 @@ export class RpcListener {
.catch(mapError) .catch(mapError)
.then(logData("response")) .then(logData("response"))
.then(writeDataToSocket) .then(writeDataToSocket)
.then((_) => {
if (this.shouldExit) {
process.exit(0)
}
})
.catch((e) => { .catch((e) => {
console.error(`Major error in socket handling: ${e}`) console.error(`Major error in socket handling: ${e}`)
console.debug(`Data in: ${a.toString()}`) console.debug(`Data in: ${a.toString()}`)
@@ -292,13 +284,10 @@ export class RpcListener {
) )
}) })
.when(stopType, async ({ id }) => { .when(stopType, async ({ id }) => {
this.callbacks?.removeChild("main")
return handleRpc( return handleRpc(
id, id,
this.system.stop().then((result) => { this.system.stop().then((result) => ({ result })),
this.callbacks?.removeChild("main")
return { result }
}),
) )
}) })
.when(exitType, async ({ id, params }) => { .when(exitType, async ({ id, params }) => {
@@ -319,7 +308,6 @@ export class RpcListener {
}), }),
target, target,
) )
this.shouldExit = true
} }
})().then((result) => ({ result })), })().then((result) => ({ result })),
) )

View File

@@ -118,7 +118,7 @@ export class DockerProcedureContainer extends Drop {
subpath: volumeMount.path, subpath: volumeMount.path,
readonly: volumeMount.readonly, readonly: volumeMount.readonly,
volumeId: volumeMount["volume-id"], volumeId: volumeMount["volume-id"],
idmap: [], filetype: "directory",
}, },
}) })
} else if (volumeMount.type === "backup") { } else if (volumeMount.type === "backup") {

View File

@@ -120,7 +120,6 @@ export class MainLoop {
? { ? {
preferredExternalPort: lanConf.external, preferredExternalPort: lanConf.external,
alpn: { specified: ["http/1.1"] }, alpn: { specified: ["http/1.1"] },
addXForwardedHeaders: false,
} }
: null, : null,
}) })
@@ -134,7 +133,7 @@ export class MainLoop {
delete this.mainEvent delete this.mainEvent
delete this.healthLoops delete this.healthLoops
await main?.daemon await main?.daemon
.term() .stop()
.catch((e: unknown) => console.error(`Main loop error`, utils.asError(e))) .catch((e: unknown) => console.error(`Main loop error`, utils.asError(e)))
this.effects.setMainStatus({ status: "stopped" }) this.effects.setMainStatus({ status: "stopped" })
if (healthLoops) healthLoops.forEach((x) => clearInterval(x.interval)) if (healthLoops) healthLoops.forEach((x) => clearInterval(x.interval))

View File

@@ -456,7 +456,6 @@ export class SystemForEmbassy implements System {
addSsl = { addSsl = {
preferredExternalPort: lanPortNum, preferredExternalPort: lanPortNum,
alpn: { specified: [] }, alpn: { specified: [] },
addXForwardedHeaders: false,
} }
} }
return [ return [
@@ -889,6 +888,7 @@ export class SystemForEmbassy implements System {
effects: Effects, effects: Effects,
timeoutMs: number | null, timeoutMs: number | null,
): Promise<PropertiesReturn> { ): Promise<PropertiesReturn> {
// TODO BLU-J set the properties ever so often
const setConfigValue = this.manifest.properties const setConfigValue = this.manifest.properties
if (!setConfigValue) throw new Error("There is no properties") if (!setConfigValue) throw new Error("There is no properties")
if (setConfigValue.type === "docker") { if (setConfigValue.type === "docker") {
@@ -1043,7 +1043,7 @@ export class SystemForEmbassy implements System {
volumeId: "embassy", volumeId: "embassy",
subpath: null, subpath: null,
readonly: true, readonly: true,
idmap: [], filetype: "directory",
}, },
}) })
configFile configFile
@@ -1191,7 +1191,7 @@ async function updateConfig(
volumeId: "embassy", volumeId: "embassy",
subpath: null, subpath: null,
readonly: true, readonly: true,
idmap: [], filetype: "directory",
}, },
}) })
const remoteConfig = configFile const remoteConfig = configFile
@@ -1241,11 +1241,11 @@ async function updateConfig(
: catchFn( : catchFn(
() => () =>
(specValue.target === "lan-address" (specValue.target === "lan-address"
? filled.addressInfo!.filter({ kind: "mdns" }) || ? filled.addressInfo!.localHostnames[0] ||
filled.addressInfo!.onion filled.addressInfo!.onionHostnames[0]
: filled.addressInfo!.onion || : filled.addressInfo!.onionHostnames[0] ||
filled.addressInfo!.filter({ kind: "mdns" }) filled.addressInfo!.localHostnames[0]
).hostnames[0].hostname.value, ).hostname.value,
) || "" ) || ""
mutConfigValue[key] = url mutConfigValue[key] = url
} }

View File

@@ -68,18 +68,22 @@ export class SystemForStartOs implements System {
try { try {
if (this.runningMain || this.starting) return if (this.runningMain || this.starting) return
this.starting = true this.starting = true
effects.constRetry = utils.once(() => { effects.constRetry = utils.once(() => effects.restart())
console.debug(".const() triggered")
effects.restart()
})
let mainOnTerm: () => Promise<void> | undefined let mainOnTerm: () => Promise<void> | undefined
const started = async (onTerm: () => Promise<void>) => {
await effects.setMainStatus({ status: "running" })
mainOnTerm = onTerm
return null
}
const daemons = await ( const daemons = await (
await this.abi.main({ await this.abi.main({
effects, effects,
started,
}) })
).build() ).build()
this.runningMain = { this.runningMain = {
stop: async () => { stop: async () => {
if (mainOnTerm) await mainOnTerm()
await daemons.term() await daemons.term()
}, },
} }

View File

@@ -7,12 +7,6 @@ const getDependencies: AllGetDependencies = {
system: getSystem, system: getSystem,
} }
for (let s of ["SIGTERM", "SIGINT", "SIGHUP"]) {
process.on(s, (s) => {
console.log(`Caught ${s}`)
})
}
new RpcListener(getDependencies) new RpcListener(getDependencies)
/** /**

View File

@@ -4,12 +4,7 @@ cd "$(dirname "${BASH_SOURCE[0]}")"
set -e set -e
RUST_ARCH="$ARCH" if mountpoint -q tmp/combined; then sudo umount -R tmp/combined; fi
if [ "$ARCH" = "riscv64" ]; then
RUST_ARCH="riscv64gc"
fi
if mountpoint -q tmp/combined; then sudo umount -l tmp/combined; fi
if mountpoint -q tmp/lower; then sudo umount tmp/lower; fi if mountpoint -q tmp/lower; then sudo umount tmp/lower; fi
sudo rm -rf tmp sudo rm -rf tmp
mkdir -p tmp/lower tmp/upper tmp/work tmp/combined mkdir -p tmp/lower tmp/upper tmp/work tmp/combined
@@ -26,15 +21,15 @@ fi
QEMU= QEMU=
if [ "$ARCH" != "$(uname -m)" ]; then if [ "$ARCH" != "$(uname -m)" ]; then
QEMU=/usr/bin/qemu-${ARCH} QEMU=/usr/bin/qemu-${ARCH}-static
if ! which qemu-$ARCH > /dev/null; then if ! which qemu-$ARCH-static > /dev/null; then
>&2 echo qemu-user is required for cross-platform builds >&2 echo qemu-user-static is required for cross-platform builds
sudo umount tmp/combined sudo umount tmp/combined
sudo umount tmp/lower sudo umount tmp/lower
sudo rm -rf tmp sudo rm -rf tmp
exit 1 exit 1
fi fi
sudo cp $(which qemu-$ARCH) tmp/combined${QEMU} sudo cp $(which qemu-$ARCH-static) tmp/combined${QEMU}
fi fi
sudo mkdir -p tmp/combined/usr/lib/startos/ sudo mkdir -p tmp/combined/usr/lib/startos/
@@ -44,8 +39,8 @@ sudo cp container-runtime.service tmp/combined/lib/systemd/system/container-runt
sudo chown 0:0 tmp/combined/lib/systemd/system/container-runtime.service sudo chown 0:0 tmp/combined/lib/systemd/system/container-runtime.service
sudo cp container-runtime-failure.service tmp/combined/lib/systemd/system/container-runtime-failure.service sudo cp container-runtime-failure.service tmp/combined/lib/systemd/system/container-runtime-failure.service
sudo chown 0:0 tmp/combined/lib/systemd/system/container-runtime-failure.service sudo chown 0:0 tmp/combined/lib/systemd/system/container-runtime-failure.service
sudo cp ../core/target/${RUST_ARCH}-unknown-linux-musl/release/start-container tmp/combined/usr/bin/start-container sudo cp ../core/target/$ARCH-unknown-linux-musl/release/containerbox tmp/combined/usr/bin/start-container
echo -e '#!/bin/bash\nexec start-container "$@"' | sudo tee tmp/combined/usr/bin/start-cli # TODO: remove echo -e '#!/bin/bash\nexec start-container $@' | sudo tee tmp/combined/usr/bin/start-cli # TODO: remove
sudo chmod +x tmp/combined/usr/bin/start-cli sudo chmod +x tmp/combined/usr/bin/start-cli
sudo chown 0:0 tmp/combined/usr/bin/start-container sudo chown 0:0 tmp/combined/usr/bin/start-container
echo container-runtime | sha256sum | head -c 32 | cat - <(echo) | sudo tee tmp/combined/etc/machine-id echo container-runtime | sha256sum | head -c 32 | cat - <(echo) | sudo tee tmp/combined/etc/machine-id

4615
core/Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,3 +1,3 @@
[workspace] [workspace]
members = ["startos"] members = ["helpers", "models", "startos"]

View File

@@ -1,2 +0,0 @@
[build]
pre-build = ["apt-get update && apt-get install -y rsync"]

View File

@@ -2,78 +2,53 @@
cd "$(dirname "${BASH_SOURCE[0]}")" cd "$(dirname "${BASH_SOURCE[0]}")"
source ./builder-alias.sh
set -ea set -ea
INSTALL=false
while [[ $# -gt 0 ]]; do
case $1 in
--install)
INSTALL=true
shift
;;
*)
>&2 echo "Unknown option: $1"
exit 1
;;
esac
done
shopt -s expand_aliases shopt -s expand_aliases
PROFILE=${PROFILE:-release} if [ -z "$ARCH" ]; then
if [ "${PROFILE}" = "release" ]; then ARCH=$(uname -m)
BUILD_FLAGS="--release"
else
if [ "$PROFILE" != "debug"]; then
>&2 echo "Unknown profile $PROFILE: falling back to debug..."
PROFILE=debug
fi
fi fi
if [ -z "${ARCH:-}" ]; then
ARCH=$(uname -m)
fi
if [ "$ARCH" = "arm64" ]; then if [ "$ARCH" = "arm64" ]; then
ARCH="aarch64" ARCH="aarch64"
fi fi
RUST_ARCH="$ARCH" if [ -z "$KERNEL_NAME" ]; then
if [ "$ARCH" = "riscv64" ]; then KERNEL_NAME=$(uname -s)
RUST_ARCH="riscv64gc"
fi fi
if [ -z "${KERNEL_NAME:-}" ]; then if [ -z "$TARGET" ]; then
KERNEL_NAME=$(uname -s) if [ "$KERNEL_NAME" = "Linux" ]; then
TARGET="$ARCH-unknown-linux-musl"
elif [ "$KERNEL_NAME" = "Darwin" ]; then
TARGET="$ARCH-apple-darwin"
else
>&2 echo "unknown kernel $KERNEL_NAME"
exit 1
fi
fi fi
if [ -z "${TARGET:-}" ]; then USE_TTY=
if [ "$KERNEL_NAME" = "Linux" ]; then if tty -s; then
TARGET="$RUST_ARCH-unknown-linux-musl" USE_TTY="-it"
elif [ "$KERNEL_NAME" = "Darwin" ]; then
TARGET="$RUST_ARCH-apple-darwin"
else
>&2 echo "unknown kernel $KERNEL_NAME"
exit 1
fi
fi fi
cd .. cd ..
FEATURES="$(echo "${ENVIRONMENT:-}" | sed 's/-/,/g')" FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')"
RUSTFLAGS="" RUSTFLAGS=""
if [[ "${ENVIRONMENT:-}" =~ (^|-)console($|-) ]]; then
RUSTFLAGS="--cfg tokio_unstable" if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then
RUSTFLAGS="--cfg tokio_unstable"
fi fi
echo "FEATURES=\"$FEATURES\"" if which zig > /dev/null && [ "$ENFORCE_USE_DOCKER" != 1 ]; do
echo "RUSTFLAGS=\"$RUSTFLAGS\"" echo "FEATURES=\"$FEATURES\""
rust-zig-builder cargo zigbuild --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=$FEATURES --locked --bin start-cli --target=$TARGET echo "RUSTFLAGS=\"$RUSTFLAGS\""
if [ "$(ls -nd "core/target/$TARGET/$PROFILE/start-cli" | awk '{ print $3 }')" != "$UID" ]; then RUSTFLAGS=$RUSTFLAGS sh -c "cd core && cargo zigbuild --release --no-default-features --features cli,$FEATURES --locked --bin start-cli --target=$TARGET"
rust-zig-builder sh -c "cd core && chown -R $UID:$UID target && chown -R $UID:$UID /usr/local/cargo" else
fi alias 'rust-zig-builder'='docker run $USE_TTY --rm -e "RUSTFLAGS=$RUSTFLAGS" -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$HOME/.cargo/git":/root/.cargo/git -v "$(pwd)":/home/rust/src -w /home/rust/src -P messense/cargo-zigbuild'
RUSTFLAGS=$RUSTFLAGS rust-zig-builder sh -c "cd core && cargo zigbuild --release --no-default-features --features cli,$FEATURES --locked --bin start-cli --target=$TARGET"
if [ "$INSTALL" = "true" ]; then if [ "$(ls -nd core/target/$TARGET/release/start-cli | awk '{ print $3 }')" != "$UID" ]; then
cp "core/target/$TARGET/$PROFILE/start-cli" ~/.cargo/bin/start-cli rust-zig-builder sh -c "cd core && chown -R $UID:$UID target && chown -R $UID:$UID /root/.cargo"
fi
fi fi

36
core/build-containerbox.sh Executable file
View File

@@ -0,0 +1,36 @@
#!/bin/bash
cd "$(dirname "${BASH_SOURCE[0]}")"
set -ea
shopt -s expand_aliases
if [ -z "$ARCH" ]; then
ARCH=$(uname -m)
fi
if [ "$ARCH" = "arm64" ]; then
ARCH="aarch64"
fi
USE_TTY=
if tty -s; then
USE_TTY="-it"
fi
cd ..
FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')"
RUSTFLAGS=""
if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then
RUSTFLAGS="--cfg tokio_unstable"
fi
source ./core/builder-alias.sh
echo "FEATURES=\"$FEATURES\""
echo "RUSTFLAGS=\"$RUSTFLAGS\""
rust-musl-builder sh -c "cd core && cargo build --release --no-default-features --features cli-container,$FEATURES --locked --bin containerbox --target=$ARCH-unknown-linux-musl"
if [ "$(ls -nd core/target/$ARCH-unknown-linux-musl/release/containerbox | awk '{ print $3 }')" != "$UID" ]; then
rust-musl-builder sh -c "cd core && chown -R $UID:$UID target && chown -R $UID:$UID /root/.cargo"
fi

View File

@@ -2,21 +2,9 @@
cd "$(dirname "${BASH_SOURCE[0]}")" cd "$(dirname "${BASH_SOURCE[0]}")"
source ./builder-alias.sh
set -ea set -ea
shopt -s expand_aliases shopt -s expand_aliases
PROFILE=${PROFILE:-release}
if [ "${PROFILE}" = "release" ]; then
BUILD_FLAGS="--release"
else
if [ "$PROFILE" != "debug"]; then
>&2 echo "Unknown profile $PROFILE: falling back to debug..."
PROFILE=debug
fi
fi
if [ -z "$ARCH" ]; then if [ -z "$ARCH" ]; then
ARCH=$(uname -m) ARCH=$(uname -m)
fi fi
@@ -25,22 +13,24 @@ if [ "$ARCH" = "arm64" ]; then
ARCH="aarch64" ARCH="aarch64"
fi fi
RUST_ARCH="$ARCH" USE_TTY=
if [ "$ARCH" = "riscv64" ]; then if tty -s; then
RUST_ARCH="riscv64gc" USE_TTY="-it"
fi fi
cd .. cd ..
FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')" FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')"
RUSTFLAGS="" RUSTFLAGS=""
if [[ "${ENVIRONMENT}" =~ (^|-)console($|-) ]]; then if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then
RUSTFLAGS="--cfg tokio_unstable" RUSTFLAGS="--cfg tokio_unstable"
fi fi
source ./core/builder-alias.sh
echo "FEATURES=\"$FEATURES\"" echo "FEATURES=\"$FEATURES\""
echo "RUSTFLAGS=\"$RUSTFLAGS\"" echo "RUSTFLAGS=\"$RUSTFLAGS\""
rust-zig-builder cargo zigbuild --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=$FEATURES --locked --bin registrybox --target=$RUST_ARCH-unknown-linux-musl rust-musl-builder sh -c "cd core && cargo build --release --no-default-features --features cli-registry,registry,$FEATURES --locked --bin registrybox --target=$ARCH-unknown-linux-musl"
if [ "$(ls -nd "core/target/$RUST_ARCH-unknown-linux-musl/$PROFILE/registrybox" | awk '{ print $3 }')" != "$UID" ]; then if [ "$(ls -nd core/target/$ARCH-unknown-linux-musl/release/registrybox | awk '{ print $3 }')" != "$UID" ]; then
rust-zig-builder sh -c "chown -R $UID:$UID core/target && chown -R $UID:$UID /usr/local/cargo" rust-musl-builder sh -c "cd core && chown -R $UID:$UID target && chown -R $UID:$UID /root/.cargo"
fi fi

View File

@@ -1,46 +0,0 @@
#!/bin/bash
cd "$(dirname "${BASH_SOURCE[0]}")"
source ./builder-alias.sh
set -ea
shopt -s expand_aliases
PROFILE=${PROFILE:-release}
if [ "${PROFILE}" = "release" ]; then
BUILD_FLAGS="--release"
else
if [ "$PROFILE" != "debug"]; then
>&2 echo "Unknown profile $PROFILE: falling back to debug..."
PROFILE=debug
fi
fi
if [ -z "$ARCH" ]; then
ARCH=$(uname -m)
fi
if [ "$ARCH" = "arm64" ]; then
ARCH="aarch64"
fi
RUST_ARCH="$ARCH"
if [ "$ARCH" = "riscv64" ]; then
RUST_ARCH="riscv64gc"
fi
cd ..
FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')"
RUSTFLAGS=""
if [[ "${ENVIRONMENT}" =~ (^|-)console($|-) ]]; then
RUSTFLAGS="--cfg tokio_unstable"
fi
echo "FEATURES=\"$FEATURES\""
echo "RUSTFLAGS=\"$RUSTFLAGS\""
rust-zig-builder cargo zigbuild --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=$FEATURES --locked --bin start-container --target=$RUST_ARCH-unknown-linux-musl
if [ "$(ls -nd "core/target/$RUST_ARCH-unknown-linux-musl/$PROFILE/start-container" | awk '{ print $3 }')" != "$UID" ]; then
rust-zig-builder sh -c "chown -R $UID:$UID core/target && chown -R $UID:$UID /usr/local/cargo"
fi

View File

@@ -1,22 +1,15 @@
#!/bin/bash #!/bin/bash
cd "$(dirname "${BASH_SOURCE[0]}")"
source ./builder-alias.sh
set -ea
shopt -s expand_aliases
PROFILE=${PROFILE:-release} PROFILE=${PROFILE:-release}
if [ "${PROFILE}" = "release" ]; then if [ "${PROFILE}" = "release" ]; then
BUILD_FLAGS="--release" BUILD_FLAGS="--release"
else
if [ "$PROFILE" != "debug"]; then
>&2 echo "Unknown profile $PROFILE: falling back to debug..."
PROFILE=debug
fi
fi fi
cd "$(dirname "${BASH_SOURCE[0]}")"
set -ea
shopt -s expand_aliases
if [ -z "$ARCH" ]; then if [ -z "$ARCH" ]; then
ARCH=$(uname -m) ARCH=$(uname -m)
fi fi
@@ -25,22 +18,24 @@ if [ "$ARCH" = "arm64" ]; then
ARCH="aarch64" ARCH="aarch64"
fi fi
RUST_ARCH="$ARCH" USE_TTY=
if [ "$ARCH" = "riscv64" ]; then if tty -s; then
RUST_ARCH="riscv64gc" USE_TTY="-it"
fi fi
cd .. cd ..
FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')" FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')"
RUSTFLAGS="" RUSTFLAGS=""
if [[ "${ENVIRONMENT}" =~ (^|-)console($|-) ]]; then if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then
RUSTFLAGS="--cfg tokio_unstable" RUSTFLAGS="--cfg tokio_unstable"
fi fi
source ./core/builder-alias.sh
echo "FEATURES=\"$FEATURES\"" echo "FEATURES=\"$FEATURES\""
echo "RUSTFLAGS=\"$RUSTFLAGS\"" echo "RUSTFLAGS=\"$RUSTFLAGS\""
rust-zig-builder cargo zigbuild --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=$FEATURES --locked --bin startbox --target=$RUST_ARCH-unknown-linux-musl rust-musl-builder sh -c "cd core && cargo build $BUILD_FLAGS --no-default-features --features cli,startd,$FEATURES --locked --bin startbox --target=$ARCH-unknown-linux-musl"
if [ "$(ls -nd "core/target/$RUST_ARCH-unknown-linux-musl/$PROFILE/startbox" | awk '{ print $3 }')" != "$UID" ]; then if [ "$(ls -nd core/target/$ARCH-unknown-linux-musl/${PROFILE}/startbox | awk '{ print $3 }')" != "$UID" ]; then
rust-zig-builder sh -c "chown -R $UID:$UID core/target && chown -R $UID:$UID /usr/local/cargo" rust-musl-builder sh -c "cd core && chown -R $UID:$UID target && chown -R $UID:$UID /root/.cargo"
fi fi

View File

@@ -2,43 +2,35 @@
cd "$(dirname "${BASH_SOURCE[0]}")" cd "$(dirname "${BASH_SOURCE[0]}")"
source ./builder-alias.sh
set -ea set -ea
shopt -s expand_aliases shopt -s expand_aliases
PROFILE=${PROFILE:-release}
if [ "${PROFILE}" = "release" ]; then
BUILD_FLAGS="--release"
else
if [ "$PROFILE" != "debug"]; then
>&2 echo "Unknown profile $PROFILE: falling back to debug..."
PROFILE=debug
fi
fi
if [ -z "$ARCH" ]; then if [ -z "$ARCH" ]; then
ARCH=$(uname -m) ARCH=$(uname -m)
fi fi
if [ "$ARCH" = "arm64" ]; then if [ "$ARCH" = "arm64" ]; then
ARCH="aarch64" ARCH="aarch64"
fi fi
RUST_ARCH="$ARCH" USE_TTY=
if [ "$ARCH" = "riscv64" ]; then if tty -s; then
RUST_ARCH="riscv64gc" USE_TTY="-it"
fi fi
cd .. cd ..
FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')" FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')"
RUSTFLAGS="" RUSTFLAGS=""
if [[ "${ENVIRONMENT}" =~ (^|-)console($|-) ]]; then
if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then
RUSTFLAGS="--cfg tokio_unstable" RUSTFLAGS="--cfg tokio_unstable"
fi fi
source ./core/builder-alias.sh
echo "FEATURES=\"$FEATURES\"" echo "FEATURES=\"$FEATURES\""
echo "RUSTFLAGS=\"$RUSTFLAGS\"" echo "RUSTFLAGS=\"$RUSTFLAGS\""
rust-zig-builder cargo test --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features test,$FEATURES --locked 'export_bindings_' rust-musl-builder sh -c "cd core && cargo test --release --features=test,$FEATURES 'export_bindings_' && chown \$UID:\$UID startos/bindings"
if [ "$(ls -nd "core/startos/bindings" | awk '{ print $3 }')" != "$UID" ]; then if [ "$(ls -nd core/startos/bindings | awk '{ print $3 }')" != "$UID" ]; then
rust-zig-builder sh -c "chown -R $UID:$UID core/target && chown -R $UID:$UID core/startos/bindings && chown -R $UID:$UID /usr/local/cargo" rust-musl-builder sh -c "cd core && chown -R $UID:$UID startos/bindings && chown -R $UID:$UID target && chown -R $UID:$UID /root/.cargo"
fi fi

View File

@@ -2,21 +2,9 @@
cd "$(dirname "${BASH_SOURCE[0]}")" cd "$(dirname "${BASH_SOURCE[0]}")"
source ./builder-alias.sh
set -ea set -ea
shopt -s expand_aliases shopt -s expand_aliases
PROFILE=${PROFILE:-release}
if [ "${PROFILE}" = "release" ]; then
BUILD_FLAGS="--release"
else
if [ "$PROFILE" != "debug"]; then
>&2 echo "Unknown profile $PROFILE: falling back to debug..."
PROFILE=debug
fi
fi
if [ -z "$ARCH" ]; then if [ -z "$ARCH" ]; then
ARCH=$(uname -m) ARCH=$(uname -m)
fi fi
@@ -25,22 +13,24 @@ if [ "$ARCH" = "arm64" ]; then
ARCH="aarch64" ARCH="aarch64"
fi fi
RUST_ARCH="$ARCH" USE_TTY=
if [ "$ARCH" = "riscv64" ]; then if tty -s; then
RUST_ARCH="riscv64gc" USE_TTY="-it"
fi fi
cd .. cd ..
FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')" FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')"
RUSTFLAGS="" RUSTFLAGS=""
if [[ "${ENVIRONMENT}" =~ (^|-)console($|-) ]]; then if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then
RUSTFLAGS="--cfg tokio_unstable" RUSTFLAGS="--cfg tokio_unstable"
fi fi
source ./core/builder-alias.sh
echo "FEATURES=\"$FEATURES\"" echo "FEATURES=\"$FEATURES\""
echo "RUSTFLAGS=\"$RUSTFLAGS\"" echo "RUSTFLAGS=\"$RUSTFLAGS\""
rust-zig-builder cargo zigbuild --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=$FEATURES --locked --bin tunnelbox --target=$RUST_ARCH-unknown-linux-musl rust-musl-builder sh -c "cd core && cargo build --release --no-default-features --features cli-tunnel,tunnel,$FEATURES --locked --bin tunnelbox --target=$ARCH-unknown-linux-musl"
if [ "$(ls -nd "core/target/$RUST_ARCH-unknown-linux-musl/$PROFILE/tunnelbox" | awk '{ print $3 }')" != "$UID" ]; then if [ "$(ls -nd core/target/$ARCH-unknown-linux-musl/release/tunnelbox | awk '{ print $3 }')" != "$UID" ]; then
rust-zig-builder sh -c "chown -R $UID:$UID core/target && chown -R $UID:$UID /usr/local/cargo" rust-musl-builder sh -c "cd core && chown -R $UID:$UID target && chown -R $UID:$UID /root/.cargo"
fi fi

View File

@@ -1,8 +1,3 @@
#!/bin/bash #!/bin/bash
USE_TTY= alias 'rust-musl-builder'='docker run $USE_TTY --rm -e "RUSTFLAGS=$RUSTFLAGS" -e SCCACHE_GHA_ENABLED -e SCCACHE_GHA_VERSION -e ACTIONS_RESULTS_URL -e ACTIONS_RUNTIME_TOKEN -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$HOME/.cargo/git":/root/.cargo/git -v "$HOME/.cache/sccache":/root/.cache/sccache -v "$(pwd)":/home/rust/src -w /home/rust/src -P start9/rust-musl-cross:$ARCH-musl'
if tty -s; then
USE_TTY="-it"
fi
alias 'rust-zig-builder'='docker run '"$USE_TTY"' --rm -e "RUSTFLAGS=$RUSTFLAGS" -e "PKG_CONFIG_SYSROOT_DIR=/opt/sysroot/$ARCH" -e PKG_CONFIG_PATH="" -e PKG_CONFIG_LIBDIR="/opt/sysroot/$ARCH/usr/lib/pkgconfig" -e "AWS_LC_SYS_CMAKE_TOOLCHAIN_FILE_riscv64gc_unknown_linux_musl=/root/cmake-overrides/toolchain-riscv64-musl-clang.cmake" -e SCCACHE_GHA_ENABLED -e SCCACHE_GHA_VERSION -e ACTIONS_RESULTS_URL -e ACTIONS_RUNTIME_TOKEN -v "$HOME/.cargo/registry":/usr/local/cargo/registry -v "$HOME/.cargo/git":/usr/local/cargo/git -v "$HOME/.cache/sccache":/root/.cache/sccache -v "$HOME/.cache/cargo-zigbuild:/root/.cache/cargo-zigbuild" -v "$(pwd)":/workdir -w /workdir start9/cargo-zigbuild'

19
core/helpers/Cargo.toml Normal file
View File

@@ -0,0 +1,19 @@
[package]
name = "helpers"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
color-eyre = "0.6.2"
futures = "0.3.28"
lazy_async_pool = "0.3.3"
models = { path = "../models" }
pin-project = "1.1.3"
rpc-toolkit = { git = "https://github.com/Start9Labs/rpc-toolkit.git", branch = "master" }
serde = { version = "1.0", features = ["derive", "rc"] }
serde_json = "1.0"
tokio = { version = "1", features = ["full"] }
tokio-stream = { version = "0.1.14", features = ["io-util", "sync"] }
tracing = "0.1.39"

View File

@@ -0,0 +1,31 @@
use std::task::Poll;
use tokio::io::{AsyncRead, ReadBuf};
#[pin_project::pin_project]
pub struct ByteReplacementReader<R> {
pub replace: u8,
pub with: u8,
#[pin]
pub inner: R,
}
impl<R: AsyncRead> AsyncRead for ByteReplacementReader<R> {
fn poll_read(
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
buf: &mut ReadBuf<'_>,
) -> std::task::Poll<std::io::Result<()>> {
let this = self.project();
match this.inner.poll_read(cx, buf) {
Poll::Ready(Ok(())) => {
for idx in 0..buf.filled().len() {
if buf.filled()[idx] == *this.replace {
buf.filled_mut()[idx] = *this.with;
}
}
Poll::Ready(Ok(()))
}
a => a,
}
}
}

262
core/helpers/src/lib.rs Normal file
View File

@@ -0,0 +1,262 @@
use std::future::Future;
use std::ops::{Deref, DerefMut};
use std::path::{Path, PathBuf};
use std::time::Duration;
use color_eyre::eyre::{eyre, Context, Error};
use futures::future::BoxFuture;
use futures::FutureExt;
use models::ResultExt;
use tokio::fs::File;
use tokio::sync::oneshot;
use tokio::task::{JoinError, JoinHandle, LocalSet};
mod byte_replacement_reader;
mod rsync;
mod script_dir;
pub use byte_replacement_reader::*;
pub use rsync::*;
pub use script_dir::*;
pub fn const_true() -> bool {
true
}
pub fn to_tmp_path(path: impl AsRef<Path>) -> Result<PathBuf, Error> {
let path = path.as_ref();
if let (Some(parent), Some(file_name)) =
(path.parent(), path.file_name().and_then(|f| f.to_str()))
{
Ok(parent.join(format!(".{}.tmp", file_name)))
} else {
Err(eyre!("invalid path: {}", path.display()))
}
}
pub async fn canonicalize(
path: impl AsRef<Path> + Send + Sync,
create_parent: bool,
) -> Result<PathBuf, Error> {
fn create_canonical_folder<'a>(
path: impl AsRef<Path> + Send + Sync + 'a,
) -> BoxFuture<'a, Result<PathBuf, Error>> {
async move {
let path = canonicalize(path, true).await?;
tokio::fs::create_dir(&path)
.await
.with_context(|| path.display().to_string())?;
Ok(path)
}
.boxed()
}
let path = path.as_ref();
if tokio::fs::metadata(path).await.is_err() {
let parent = path.parent().unwrap_or(Path::new("."));
if let Some(file_name) = path.file_name() {
if create_parent && tokio::fs::metadata(parent).await.is_err() {
return Ok(create_canonical_folder(parent).await?.join(file_name));
} else {
return Ok(tokio::fs::canonicalize(parent)
.await
.with_context(|| parent.display().to_string())?
.join(file_name));
}
}
}
tokio::fs::canonicalize(&path)
.await
.with_context(|| path.display().to_string())
}
#[pin_project::pin_project(PinnedDrop)]
pub struct NonDetachingJoinHandle<T>(#[pin] JoinHandle<T>);
impl<T> NonDetachingJoinHandle<T> {
pub async fn wait_for_abort(self) -> Result<T, JoinError> {
self.abort();
self.await
}
}
impl<T> From<JoinHandle<T>> for NonDetachingJoinHandle<T> {
fn from(t: JoinHandle<T>) -> Self {
NonDetachingJoinHandle(t)
}
}
impl<T> Deref for NonDetachingJoinHandle<T> {
type Target = JoinHandle<T>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<T> DerefMut for NonDetachingJoinHandle<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
#[pin_project::pinned_drop]
impl<T> PinnedDrop for NonDetachingJoinHandle<T> {
fn drop(self: std::pin::Pin<&mut Self>) {
let this = self.project();
this.0.into_ref().get_ref().abort()
}
}
impl<T> Future for NonDetachingJoinHandle<T> {
type Output = Result<T, JoinError>;
fn poll(
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Self::Output> {
let this = self.project();
this.0.poll(cx)
}
}
pub struct AtomicFile {
tmp_path: PathBuf,
path: PathBuf,
file: Option<File>,
}
impl AtomicFile {
pub async fn new(
path: impl AsRef<Path> + Send + Sync,
tmp_path: Option<impl AsRef<Path> + Send + Sync>,
) -> Result<Self, Error> {
let path = canonicalize(&path, true).await?;
let tmp_path = if let Some(tmp_path) = tmp_path {
canonicalize(&tmp_path, true).await?
} else {
to_tmp_path(&path)?
};
let file = File::create(&tmp_path)
.await
.with_context(|| tmp_path.display().to_string())?;
Ok(Self {
tmp_path,
path,
file: Some(file),
})
}
pub async fn rollback(mut self) -> Result<(), Error> {
drop(self.file.take());
tokio::fs::remove_file(&self.tmp_path)
.await
.with_context(|| format!("rm {}", self.tmp_path.display()))?;
Ok(())
}
pub async fn save(mut self) -> Result<(), Error> {
use tokio::io::AsyncWriteExt;
if let Some(file) = self.file.as_mut() {
file.flush().await?;
file.shutdown().await?;
file.sync_all().await?;
}
drop(self.file.take());
tokio::fs::rename(&self.tmp_path, &self.path)
.await
.with_context(|| {
format!("mv {} -> {}", self.tmp_path.display(), self.path.display())
})?;
Ok(())
}
}
impl std::ops::Deref for AtomicFile {
type Target = File;
fn deref(&self) -> &Self::Target {
self.file.as_ref().unwrap()
}
}
impl std::ops::DerefMut for AtomicFile {
fn deref_mut(&mut self) -> &mut Self::Target {
self.file.as_mut().unwrap()
}
}
impl Drop for AtomicFile {
fn drop(&mut self) {
if let Some(file) = self.file.take() {
drop(file);
let path = std::mem::take(&mut self.tmp_path);
tokio::spawn(async move { tokio::fs::remove_file(path).await.log_err() });
}
}
}
pub struct TimedResource<T: 'static + Send> {
handle: NonDetachingJoinHandle<Option<T>>,
ready: oneshot::Sender<()>,
}
impl<T: 'static + Send> TimedResource<T> {
pub fn new(resource: T, timer: Duration) -> Self {
let (send, recv) = oneshot::channel();
let handle = tokio::spawn(async move {
tokio::select! {
_ = tokio::time::sleep(timer) => {
drop(resource);
None
},
_ = recv => Some(resource),
}
});
Self {
handle: handle.into(),
ready: send,
}
}
pub fn new_with_destructor<
Fn: FnOnce(T) -> Fut + Send + 'static,
Fut: Future<Output = ()> + Send,
>(
resource: T,
timer: Duration,
destructor: Fn,
) -> Self {
let (send, recv) = oneshot::channel();
let handle = tokio::spawn(async move {
tokio::select! {
_ = tokio::time::sleep(timer) => {
destructor(resource).await;
None
},
_ = recv => Some(resource),
}
});
Self {
handle: handle.into(),
ready: send,
}
}
pub async fn get(self) -> Option<T> {
let _ = self.ready.send(());
self.handle.await.unwrap()
}
pub fn is_timed_out(&self) -> bool {
self.ready.is_closed()
}
}
pub async fn spawn_local<
T: 'static + Send,
F: FnOnce() -> Fut + Send + 'static,
Fut: Future<Output = T> + 'static,
>(
fut: F,
) -> NonDetachingJoinHandle<T> {
let (send, recv) = tokio::sync::oneshot::channel();
std::thread::spawn(move || {
tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap()
.block_on(async move {
let set = LocalSet::new();
send.send(set.spawn_local(fut()).into())
.unwrap_or_else(|_| unreachable!());
set.await
})
});
recv.await.unwrap()
}

View File

@@ -0,0 +1,82 @@
use std::sync::Arc;
use color_eyre::Report;
use models::InterfaceId;
use models::PackageId;
use serde_json::Value;
use tokio::sync::mpsc;
pub struct RuntimeDropped;
pub struct Callback {
id: Arc<String>,
sender: mpsc::UnboundedSender<(Arc<String>, Vec<Value>)>,
}
impl Callback {
pub fn new(id: String, sender: mpsc::UnboundedSender<(Arc<String>, Vec<Value>)>) -> Self {
Self {
id: Arc::new(id),
sender,
}
}
pub fn is_listening(&self) -> bool {
self.sender.is_closed()
}
pub fn call(&self, args: Vec<Value>) -> Result<(), RuntimeDropped> {
self.sender
.send((self.id.clone(), args))
.map_err(|_| RuntimeDropped)
}
}
#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct AddressSchemaOnion {
pub id: InterfaceId,
pub external_port: u16,
}
#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct AddressSchemaLocal {
pub id: InterfaceId,
pub external_port: u16,
}
#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Address(pub String);
#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Domain;
#[derive(serde::Deserialize, serde::Serialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Name;
#[async_trait::async_trait]
#[allow(unused_variables)]
pub trait OsApi: Send + Sync + 'static {
async fn get_service_config(
&self,
id: PackageId,
path: &str,
callback: Option<Callback>,
) -> Result<Vec<Value>, Report>;
async fn bind_local(
&self,
internal_port: u16,
address_schema: AddressSchemaLocal,
) -> Result<Address, Report>;
async fn bind_onion(
&self,
internal_port: u16,
address_schema: AddressSchemaOnion,
) -> Result<Address, Report>;
async fn unbind_local(&self, id: InterfaceId, external: u16) -> Result<(), Report>;
async fn unbind_onion(&self, id: InterfaceId, external: u16) -> Result<(), Report>;
fn set_started(&self) -> Result<(), Report>;
async fn restart(&self) -> Result<(), Report>;
async fn start(&self) -> Result<(), Report>;
async fn stop(&self) -> Result<(), Report>;
}

View File

@@ -2,15 +2,13 @@ use std::path::Path;
use color_eyre::eyre::eyre; use color_eyre::eyre::eyre;
use futures::StreamExt; use futures::StreamExt;
use models::{Error, ErrorKind};
use tokio::io::{AsyncBufReadExt, AsyncReadExt, BufReader}; use tokio::io::{AsyncBufReadExt, AsyncReadExt, BufReader};
use tokio::process::{Child, Command}; use tokio::process::{Child, Command};
use tokio::sync::watch; use tokio::sync::watch;
use tokio_stream::wrappers::WatchStream; use tokio_stream::wrappers::WatchStream;
use crate::util::future::NonDetachingJoinHandle; use crate::{const_true, ByteReplacementReader, NonDetachingJoinHandle};
use crate::util::io::ByteReplacementReader;
use crate::util::serde::const_true;
use crate::{Error, ErrorKind};
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
@@ -87,7 +85,7 @@ impl Rsync {
return Err(Error::new( return Err(Error::new(
eyre!("rsync command stdout is none"), eyre!("rsync command stdout is none"),
ErrorKind::Filesystem, ErrorKind::Filesystem,
)); ))
} }
Some(a) => a, Some(a) => a,
}; };
@@ -96,7 +94,7 @@ impl Rsync {
return Err(Error::new( return Err(Error::new(
eyre!("rsync command stderr is none"), eyre!("rsync command stderr is none"),
ErrorKind::Filesystem, ErrorKind::Filesystem,
)); ))
} }
Some(a) => a, Some(a) => a,
}; };
@@ -145,7 +143,7 @@ impl Rsync {
return Err(Error::new( return Err(Error::new(
eyre!("rsync stderr error: {}", err), eyre!("rsync stderr error: {}", err),
ErrorKind::Filesystem, ErrorKind::Filesystem,
)); ))
} }
Ok(a) => a?, Ok(a) => a?,
}; };

View File

@@ -0,0 +1,17 @@
use std::path::{Path, PathBuf};
use models::{PackageId, VersionString};
pub const PKG_SCRIPT_DIR: &str = "package-data/scripts";
pub fn script_dir<P: AsRef<Path>>(
datadir: P,
pkg_id: &PackageId,
version: &VersionString,
) -> PathBuf {
datadir
.as_ref()
.join(&*PKG_SCRIPT_DIR)
.join(pkg_id)
.join(version.as_str())
}

19
core/install-cli.sh Executable file
View File

@@ -0,0 +1,19 @@
#!/bin/bash
cd "$(dirname "${BASH_SOURCE[0]}")"
set -ea
shopt -s expand_aliases
web="../web/dist/static"
[ -d "$web" ] || mkdir -p "$web"
if [ -z "$PLATFORM" ]; then
PLATFORM=$(uname -m)
fi
if [ "$PLATFORM" = "arm64" ]; then
PLATFORM="aarch64"
fi
cargo install --path=./startos --no-default-features --features=cli,docker --bin start-cli --locked

40
core/models/Cargo.toml Normal file
View File

@@ -0,0 +1,40 @@
[package]
name = "models"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
arti-client = { version = "0.33", default-features = false, git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit" }
axum = "0.8.4"
base64 = "0.22.1"
color-eyre = "0.6.2"
ed25519-dalek = { version = "2.0.0", features = ["serde"] }
gpt = "4.1.0"
lazy_static = "1.4"
lettre = { version = "0.11", default-features = false }
mbrman = "0.6.0"
exver = { version = "0.2.0", git = "https://github.com/Start9Labs/exver-rs.git", features = [
"serde",
] }
ipnet = "2.8.0"
num_enum = "0.7.1"
openssl = { version = "0.10.57", features = ["vendored"] }
patch-db = { version = "*", path = "../../patch-db/patch-db", features = [
"trace",
] }
rand = "0.9.1"
regex = "1.10.2"
reqwest = "0.12"
rpc-toolkit = { git = "https://github.com/Start9Labs/rpc-toolkit.git", branch = "master" }
rustls = "0.23"
serde = { version = "1.0", features = ["derive", "rc"] }
serde_json = "1.0"
ssh-key = "0.6.2"
ts-rs = "9"
thiserror = "2.0"
tokio = { version = "1", features = ["full"] }
tracing = "0.1.39"
yasi = { version = "0.1.6", features = ["serde", "ts-rs"] }
zbus = "5"

View File

@@ -1,3 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type AnyVerifyingKey = string export type ServiceInterfaceId = string;

View File

@@ -1,41 +1,41 @@
use std::borrow::Cow; use std::borrow::Cow;
use std::path::Path; use std::path::Path;
use std::str::FromStr;
use base64::Engine; use base64::Engine;
use color_eyre::eyre::eyre; use color_eyre::eyre::eyre;
use imbl_value::InternedString;
use reqwest::header::CONTENT_TYPE; use reqwest::header::CONTENT_TYPE;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::io::{AsyncRead, AsyncReadExt}; use tokio::io::{AsyncRead, AsyncReadExt};
use ts_rs::TS; use ts_rs::TS;
use yasi::InternedString;
use crate::util::mime::{mime, unmime}; use crate::{mime, Error, ErrorKind, ResultExt};
use crate::{Error, ErrorKind, ResultExt};
#[derive(Clone, TS)] #[derive(Clone, TS)]
#[ts(type = "string")] #[ts(type = "string")]
pub struct DataUrl<'a> { pub struct DataUrl<'a> {
pub mime: InternedString, mime: InternedString,
pub data: Cow<'a, [u8]>, data: Cow<'a, [u8]>,
} }
impl<'a> DataUrl<'a> { impl<'a> DataUrl<'a> {
pub const DEFAULT_MIME: &'static str = "application/octet-stream"; pub const DEFAULT_MIME: &'static str = "application/octet-stream";
pub const MAX_SIZE: u64 = 100 * 1024; pub const MAX_SIZE: u64 = 100 * 1024;
fn to_string(&self) -> String { // data:{mime};base64,{data}
pub fn to_string(&self) -> String {
use std::fmt::Write; use std::fmt::Write;
let mut res = String::with_capacity(self.len()); let mut res = String::with_capacity(self.data_url_len_without_mime() + self.mime.len());
write!(&mut res, "{self}").unwrap(); let _ = write!(res, "data:{};base64,", self.mime);
base64::engine::general_purpose::STANDARD.encode_string(&self.data, &mut res);
res res
} }
fn len_without_mime(&self) -> usize { fn data_url_len_without_mime(&self) -> usize {
5 + 8 + (4 * self.data.len() / 3) + 3 5 + 8 + (4 * self.data.len() / 3) + 3
} }
pub fn len(&self) -> usize { pub fn data_url_len(&self) -> usize {
self.len_without_mime() + self.mime.len() self.data_url_len_without_mime() + self.mime.len()
} }
pub fn from_slice(mime: &str, data: &'a [u8]) -> Self { pub fn from_slice(mime: &str, data: &'a [u8]) -> Self {
@@ -44,10 +44,6 @@ impl<'a> DataUrl<'a> {
data: Cow::Borrowed(data), data: Cow::Borrowed(data),
} }
} }
pub fn canonical_ext(&self) -> Option<&'static str> {
unmime(&self.mime)
}
} }
impl DataUrl<'static> { impl DataUrl<'static> {
pub async fn from_reader( pub async fn from_reader(
@@ -113,57 +109,12 @@ impl DataUrl<'static> {
} }
} }
impl<'a> std::fmt::Display for DataUrl<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"data:{};base64,{}",
self.mime,
base64::display::Base64Display::new(
&*self.data,
&base64::engine::general_purpose::STANDARD
)
)
}
}
impl<'a> std::fmt::Debug for DataUrl<'a> { impl<'a> std::fmt::Debug for DataUrl<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
std::fmt::Display::fmt(self, f) f.write_str(&self.to_string())
} }
} }
#[derive(Debug)]
pub struct DataUrlParseError;
impl std::fmt::Display for DataUrlParseError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "invalid base64 url")
}
}
impl std::error::Error for DataUrlParseError {}
impl From<DataUrlParseError> for Error {
fn from(e: DataUrlParseError) -> Self {
Error::new(e, ErrorKind::ParseUrl)
}
}
impl FromStr for DataUrl<'static> {
type Err = DataUrlParseError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
s.strip_prefix("data:")
.and_then(|v| v.split_once(";base64,"))
.and_then(|(mime, data)| {
Some(DataUrl {
mime: InternedString::intern(mime),
data: Cow::Owned(
base64::engine::general_purpose::STANDARD
.decode(data)
.ok()?,
),
})
})
.ok_or(DataUrlParseError)
}
}
impl<'de> Deserialize<'de> for DataUrl<'static> { impl<'de> Deserialize<'de> for DataUrl<'static> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where where
@@ -179,9 +130,21 @@ impl<'de> Deserialize<'de> for DataUrl<'static> {
where where
E: serde::de::Error, E: serde::de::Error,
{ {
v.parse().map_err(|_| { v.strip_prefix("data:")
E::invalid_value(serde::de::Unexpected::Str(v), &"a valid base64 data url") .and_then(|v| v.split_once(";base64,"))
}) .and_then(|(mime, data)| {
Some(DataUrl {
mime: InternedString::intern(mime),
data: Cow::Owned(
base64::engine::general_purpose::STANDARD
.decode(data)
.ok()?,
),
})
})
.ok_or_else(|| {
E::invalid_value(serde::de::Unexpected::Str(v), &"a valid base64 data url")
})
} }
} }
deserializer.deserialize_any(Visitor) deserializer.deserialize_any(Visitor)
@@ -205,6 +168,6 @@ fn doesnt_reallocate() {
mime: InternedString::intern("png"), mime: InternedString::intern("png"),
data: Cow::Borrowed(&random[..i]), data: Cow::Borrowed(&random[..i]),
}; };
assert_eq!(icon.to_string().capacity(), icon.len()); assert_eq!(icon.to_string().capacity(), icon.data_url_len());
} }
} }

652
core/models/src/errors.rs Normal file
View File

@@ -0,0 +1,652 @@
use std::fmt::{Debug, Display};
use axum::http::uri::InvalidUri;
use axum::http::StatusCode;
use color_eyre::eyre::eyre;
use num_enum::TryFromPrimitive;
use patch_db::Revision;
use rpc_toolkit::reqwest;
use rpc_toolkit::yajrc::{
RpcError, INVALID_PARAMS_ERROR, INVALID_REQUEST_ERROR, METHOD_NOT_FOUND_ERROR, PARSE_ERROR,
};
use serde::{Deserialize, Serialize};
use tokio::task::JoinHandle;
use crate::InvalidId;
#[derive(Debug, Clone, Copy, PartialEq, Eq, TryFromPrimitive)]
#[repr(i32)]
pub enum ErrorKind {
Unknown = 1,
Filesystem = 2,
Docker = 3,
ConfigSpecViolation = 4,
ConfigRulesViolation = 5,
NotFound = 6,
IncorrectPassword = 7,
VersionIncompatible = 8,
Network = 9,
Registry = 10,
Serialization = 11,
Deserialization = 12,
Utf8 = 13,
ParseVersion = 14,
IncorrectDisk = 15,
// Nginx = 16,
Dependency = 17,
ParseS9pk = 18,
ParseUrl = 19,
DiskNotAvailable = 20,
BlockDevice = 21,
InvalidOnionAddress = 22,
Pack = 23,
ValidateS9pk = 24,
DiskCorrupted = 25, // Remove
Tor = 26,
ConfigGen = 27,
ParseNumber = 28,
Database = 29,
InvalidId = 30,
InvalidSignature = 31,
Backup = 32,
Restore = 33,
Authorization = 34,
AutoConfigure = 35,
Action = 36,
RateLimited = 37,
InvalidRequest = 38,
MigrationFailed = 39,
Uninitialized = 40,
ParseNetAddress = 41,
ParseSshKey = 42,
SoundError = 43,
ParseTimestamp = 44,
ParseSysInfo = 45,
Wifi = 46,
Journald = 47,
DiskManagement = 48,
OpenSsl = 49,
PasswordHashGeneration = 50,
DiagnosticMode = 51,
ParseDbField = 52,
Duplicate = 53,
MultipleErrors = 54,
Incoherent = 55,
InvalidBackupTargetId = 56,
ProductKeyMismatch = 57,
LanPortConflict = 58,
Javascript = 59,
Pem = 60,
TLSInit = 61,
Ascii = 62,
MissingHeader = 63,
Grub = 64,
Systemd = 65,
OpenSsh = 66,
Zram = 67,
Lshw = 68,
CpuSettings = 69,
Firmware = 70,
Timeout = 71,
Lxc = 72,
Cancelled = 73,
Git = 74,
DBus = 75,
InstallFailed = 76,
UpdateFailed = 77,
Smtp = 78,
}
impl ErrorKind {
pub fn as_str(&self) -> &'static str {
use ErrorKind::*;
match self {
Unknown => "Unknown Error",
Filesystem => "Filesystem I/O Error",
Docker => "Docker Error",
ConfigSpecViolation => "Config Spec Violation",
ConfigRulesViolation => "Config Rules Violation",
NotFound => "Not Found",
IncorrectPassword => "Incorrect Password",
VersionIncompatible => "Version Incompatible",
Network => "Network Error",
Registry => "Registry Error",
Serialization => "Serialization Error",
Deserialization => "Deserialization Error",
Utf8 => "UTF-8 Parse Error",
ParseVersion => "Version Parsing Error",
IncorrectDisk => "Incorrect Disk",
// Nginx => "Nginx Error",
Dependency => "Dependency Error",
ParseS9pk => "S9PK Parsing Error",
ParseUrl => "URL Parsing Error",
DiskNotAvailable => "Disk Not Available",
BlockDevice => "Block Device Error",
InvalidOnionAddress => "Invalid Onion Address",
Pack => "Pack Error",
ValidateS9pk => "S9PK Validation Error",
DiskCorrupted => "Disk Corrupted", // Remove
Tor => "Tor Daemon Error",
ConfigGen => "Config Generation Error",
ParseNumber => "Number Parsing Error",
Database => "Database Error",
InvalidId => "Invalid ID",
InvalidSignature => "Invalid Signature",
Backup => "Backup Error",
Restore => "Restore Error",
Authorization => "Unauthorized",
AutoConfigure => "Auto-Configure Error",
Action => "Action Failed",
RateLimited => "Rate Limited",
InvalidRequest => "Invalid Request",
MigrationFailed => "Migration Failed",
Uninitialized => "Uninitialized",
ParseNetAddress => "Net Address Parsing Error",
ParseSshKey => "SSH Key Parsing Error",
SoundError => "Sound Interface Error",
ParseTimestamp => "Timestamp Parsing Error",
ParseSysInfo => "System Info Parsing Error",
Wifi => "WiFi Internal Error",
Journald => "Journald Error",
DiskManagement => "Disk Management Error",
OpenSsl => "OpenSSL Internal Error",
PasswordHashGeneration => "Password Hash Generation Error",
DiagnosticMode => "Server is in Diagnostic Mode",
ParseDbField => "Database Field Parse Error",
Duplicate => "Duplication Error",
MultipleErrors => "Multiple Errors",
Incoherent => "Incoherent",
InvalidBackupTargetId => "Invalid Backup Target ID",
ProductKeyMismatch => "Incompatible Product Keys",
LanPortConflict => "Incompatible LAN Port Configuration",
Javascript => "Javascript Engine Error",
Pem => "PEM Encoding Error",
TLSInit => "TLS Backend Initialization Error",
Ascii => "ASCII Parse Error",
MissingHeader => "Missing Header",
Grub => "Grub Error",
Systemd => "Systemd Error",
OpenSsh => "OpenSSH Error",
Zram => "Zram Error",
Lshw => "LSHW Error",
CpuSettings => "CPU Settings Error",
Firmware => "Firmware Error",
Timeout => "Timeout Error",
Lxc => "LXC Error",
Cancelled => "Cancelled",
Git => "Git Error",
DBus => "DBus Error",
InstallFailed => "Install Failed",
UpdateFailed => "Update Failed",
Smtp => "SMTP Error",
}
}
}
impl Display for ErrorKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.as_str())
}
}
#[derive(Debug)]
pub struct Error {
pub source: color_eyre::eyre::Error,
pub kind: ErrorKind,
pub revision: Option<Revision>,
pub task: Option<JoinHandle<()>>,
}
impl Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}: {}", self.kind.as_str(), self.source)
}
}
impl Error {
pub fn new<E: Into<color_eyre::eyre::Error>>(source: E, kind: ErrorKind) -> Self {
Error {
source: source.into(),
kind,
revision: None,
task: None,
}
}
pub fn clone_output(&self) -> Self {
Error {
source: ErrorData {
details: format!("{}", self.source),
debug: format!("{:?}", self.source),
}
.into(),
kind: self.kind,
revision: self.revision.clone(),
task: None,
}
}
pub fn with_task(mut self, task: JoinHandle<()>) -> Self {
self.task = Some(task);
self
}
pub async fn wait(mut self) -> Self {
if let Some(task) = &mut self.task {
task.await.log_err();
}
self.task.take();
self
}
}
impl axum::response::IntoResponse for Error {
fn into_response(self) -> axum::response::Response {
let mut res = axum::Json(RpcError::from(self)).into_response();
*res.status_mut() = StatusCode::INTERNAL_SERVER_ERROR;
res
}
}
impl From<std::convert::Infallible> for Error {
fn from(value: std::convert::Infallible) -> Self {
match value {}
}
}
impl From<InvalidId> for Error {
fn from(err: InvalidId) -> Self {
Error::new(err, ErrorKind::InvalidId)
}
}
impl From<std::io::Error> for Error {
fn from(e: std::io::Error) -> Self {
Error::new(e, ErrorKind::Filesystem)
}
}
impl From<std::str::Utf8Error> for Error {
fn from(e: std::str::Utf8Error) -> Self {
Error::new(e, ErrorKind::Utf8)
}
}
impl From<std::string::FromUtf8Error> for Error {
fn from(e: std::string::FromUtf8Error) -> Self {
Error::new(e, ErrorKind::Utf8)
}
}
impl From<exver::ParseError> for Error {
fn from(e: exver::ParseError) -> Self {
Error::new(e, ErrorKind::ParseVersion)
}
}
impl From<rpc_toolkit::url::ParseError> for Error {
fn from(e: rpc_toolkit::url::ParseError) -> Self {
Error::new(e, ErrorKind::ParseUrl)
}
}
impl From<std::num::ParseIntError> for Error {
fn from(e: std::num::ParseIntError) -> Self {
Error::new(e, ErrorKind::ParseNumber)
}
}
impl From<std::num::ParseFloatError> for Error {
fn from(e: std::num::ParseFloatError) -> Self {
Error::new(e, ErrorKind::ParseNumber)
}
}
impl From<patch_db::Error> for Error {
fn from(e: patch_db::Error) -> Self {
Error::new(e, ErrorKind::Database)
}
}
impl From<ed25519_dalek::SignatureError> for Error {
fn from(e: ed25519_dalek::SignatureError) -> Self {
Error::new(e, ErrorKind::InvalidSignature)
}
}
impl From<std::net::AddrParseError> for Error {
fn from(e: std::net::AddrParseError) -> Self {
Error::new(e, ErrorKind::ParseNetAddress)
}
}
impl From<ipnet::AddrParseError> for Error {
fn from(e: ipnet::AddrParseError) -> Self {
Error::new(e, ErrorKind::ParseNetAddress)
}
}
impl From<openssl::error::ErrorStack> for Error {
fn from(e: openssl::error::ErrorStack) -> Self {
Error::new(eyre!("{}", e), ErrorKind::OpenSsl)
}
}
impl From<mbrman::Error> for Error {
fn from(e: mbrman::Error) -> Self {
Error::new(e, ErrorKind::DiskManagement)
}
}
impl From<gpt::GptError> for Error {
fn from(e: gpt::GptError) -> Self {
Error::new(e, ErrorKind::DiskManagement)
}
}
impl From<gpt::mbr::MBRError> for Error {
fn from(e: gpt::mbr::MBRError) -> Self {
Error::new(e, ErrorKind::DiskManagement)
}
}
impl From<InvalidUri> for Error {
fn from(e: InvalidUri) -> Self {
Error::new(eyre!("{}", e), ErrorKind::ParseUrl)
}
}
impl From<ssh_key::Error> for Error {
fn from(e: ssh_key::Error) -> Self {
Error::new(e, ErrorKind::OpenSsh)
}
}
impl From<reqwest::Error> for Error {
fn from(e: reqwest::Error) -> Self {
let kind = match e {
_ if e.is_builder() => ErrorKind::ParseUrl,
_ if e.is_decode() => ErrorKind::Deserialization,
_ => ErrorKind::Network,
};
Error::new(e, kind)
}
}
impl From<arti_client::Error> for Error {
fn from(e: arti_client::Error) -> Self {
Error::new(e, ErrorKind::Tor)
}
}
impl From<zbus::Error> for Error {
fn from(e: zbus::Error) -> Self {
Error::new(e, ErrorKind::DBus)
}
}
impl From<rustls::Error> for Error {
fn from(e: rustls::Error) -> Self {
Error::new(e, ErrorKind::OpenSsl)
}
}
impl From<patch_db::value::Error> for Error {
fn from(value: patch_db::value::Error) -> Self {
match value.kind {
patch_db::value::ErrorKind::Serialization => {
Error::new(value.source, ErrorKind::Serialization)
}
patch_db::value::ErrorKind::Deserialization => {
Error::new(value.source, ErrorKind::Deserialization)
}
}
}
}
impl From<lettre::error::Error> for Error {
fn from(e: lettre::error::Error) -> Self {
Error::new(e, ErrorKind::Smtp)
}
}
impl From<lettre::transport::smtp::Error> for Error {
fn from(e: lettre::transport::smtp::Error) -> Self {
Error::new(e, ErrorKind::Smtp)
}
}
impl From<lettre::address::AddressError> for Error {
fn from(e: lettre::address::AddressError) -> Self {
Error::new(e, ErrorKind::Smtp)
}
}
#[derive(Clone, Deserialize, Serialize)]
pub struct ErrorData {
pub details: String,
pub debug: String,
}
impl Display for ErrorData {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Display::fmt(&self.details, f)
}
}
impl Debug for ErrorData {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
Display::fmt(&self.debug, f)
}
}
impl std::error::Error for ErrorData {}
impl From<Error> for ErrorData {
fn from(value: Error) -> Self {
Self {
details: value.to_string(),
debug: format!("{:?}", value),
}
}
}
impl From<&RpcError> for ErrorData {
fn from(value: &RpcError) -> Self {
Self {
details: value
.data
.as_ref()
.and_then(|d| {
d.as_object()
.and_then(|d| {
d.get("details")
.and_then(|d| d.as_str().map(|s| s.to_owned()))
})
.or_else(|| d.as_str().map(|s| s.to_owned()))
})
.unwrap_or_else(|| value.message.clone().into_owned()),
debug: value
.data
.as_ref()
.and_then(|d| {
d.as_object()
.and_then(|d| {
d.get("debug")
.and_then(|d| d.as_str().map(|s| s.to_owned()))
})
.or_else(|| d.as_str().map(|s| s.to_owned()))
})
.unwrap_or_else(|| value.message.clone().into_owned()),
}
}
}
impl From<Error> for RpcError {
fn from(e: Error) -> Self {
let mut data_object = serde_json::Map::with_capacity(3);
data_object.insert("details".to_owned(), format!("{}", e.source).into());
data_object.insert("debug".to_owned(), format!("{:?}", e.source).into());
data_object.insert(
"revision".to_owned(),
match serde_json::to_value(&e.revision) {
Ok(a) => a,
Err(e) => {
tracing::warn!("Error serializing revision for Error object: {}", e);
serde_json::Value::Null
}
},
);
RpcError {
code: e.kind as i32,
message: e.kind.as_str().into(),
data: Some(
match serde_json::to_value(&ErrorData {
details: format!("{}", e.source),
debug: format!("{:?}", e.source),
}) {
Ok(a) => a,
Err(e) => {
tracing::warn!("Error serializing revision for Error object: {}", e);
serde_json::Value::Null
}
},
),
}
}
}
impl From<RpcError> for Error {
fn from(e: RpcError) -> Self {
Error::new(
ErrorData::from(&e),
if let Ok(kind) = e.code.try_into() {
kind
} else if e.code == METHOD_NOT_FOUND_ERROR.code {
ErrorKind::NotFound
} else if e.code == PARSE_ERROR.code
|| e.code == INVALID_PARAMS_ERROR.code
|| e.code == INVALID_REQUEST_ERROR.code
{
ErrorKind::Deserialization
} else {
ErrorKind::Unknown
},
)
}
}
#[derive(Debug, Default)]
pub struct ErrorCollection(Vec<Error>);
impl ErrorCollection {
pub fn new() -> Self {
Self::default()
}
pub fn handle<T, E: Into<Error>>(&mut self, result: Result<T, E>) -> Option<T> {
match result {
Ok(a) => Some(a),
Err(e) => {
self.0.push(e.into());
None
}
}
}
pub fn into_result(self) -> Result<(), Error> {
if self.0.is_empty() {
Ok(())
} else {
Err(Error::new(eyre!("{}", self), ErrorKind::MultipleErrors))
}
}
}
impl From<ErrorCollection> for Result<(), Error> {
fn from(e: ErrorCollection) -> Self {
e.into_result()
}
}
impl<T, E: Into<Error>> Extend<Result<T, E>> for ErrorCollection {
fn extend<I: IntoIterator<Item = Result<T, E>>>(&mut self, iter: I) {
for item in iter {
self.handle(item);
}
}
}
impl std::fmt::Display for ErrorCollection {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
for (idx, e) in self.0.iter().enumerate() {
if idx > 0 {
write!(f, "; ")?;
}
write!(f, "{}", e)?;
}
Ok(())
}
}
pub trait ResultExt<T, E>
where
Self: Sized,
{
fn with_kind(self, kind: ErrorKind) -> Result<T, Error>;
fn with_ctx<F: FnOnce(&E) -> (ErrorKind, D), D: Display>(self, f: F) -> Result<T, Error>;
fn log_err(self) -> Option<T>;
}
impl<T, E> ResultExt<T, E> for Result<T, E>
where
color_eyre::eyre::Error: From<E>,
{
fn with_kind(self, kind: ErrorKind) -> Result<T, Error> {
self.map_err(|e| Error {
source: e.into(),
kind,
revision: None,
task: None,
})
}
fn with_ctx<F: FnOnce(&E) -> (ErrorKind, D), D: Display>(self, f: F) -> Result<T, Error> {
self.map_err(|e| {
let (kind, ctx) = f(&e);
let source = color_eyre::eyre::Error::from(e);
let ctx = format!("{}: {}", ctx, source);
let source = source.wrap_err(ctx);
Error {
kind,
source,
revision: None,
task: None,
}
})
}
fn log_err(self) -> Option<T> {
match self {
Ok(a) => Some(a),
Err(e) => {
let e: color_eyre::eyre::Error = e.into();
tracing::error!("{e}");
tracing::debug!("{e:?}");
None
}
}
}
}
impl<T> ResultExt<T, Error> for Result<T, Error> {
fn with_kind(self, kind: ErrorKind) -> Result<T, Error> {
self.map_err(|e| Error {
source: e.source,
kind,
revision: e.revision,
task: e.task,
})
}
fn with_ctx<F: FnOnce(&Error) -> (ErrorKind, D), D: Display>(self, f: F) -> Result<T, Error> {
self.map_err(|e| {
let (kind, ctx) = f(&e);
let source = e.source;
let ctx = format!("{}: {}", ctx, source);
let source = source.wrap_err(ctx);
Error {
kind,
source,
revision: e.revision,
task: e.task,
}
})
}
fn log_err(self) -> Option<T> {
match self {
Ok(a) => Some(a),
Err(e) => {
tracing::error!("{e}");
tracing::debug!("{e:?}");
None
}
}
}
}
pub trait OptionExt<T>
where
Self: Sized,
{
fn or_not_found(self, message: impl std::fmt::Display) -> Result<T, Error>;
}
impl<T> OptionExt<T> for Option<T> {
fn or_not_found(self, message: impl std::fmt::Display) -> Result<T, Error> {
self.ok_or_else(|| Error::new(eyre!("{}", message), ErrorKind::NotFound))
}
}
#[macro_export]
macro_rules! ensure_code {
($x:expr, $c:expr, $fmt:expr $(, $arg:expr)*) => {
if !($x) {
return Err(Error::new(color_eyre::eyre::eyre!($fmt, $($arg, )*), $c));
}
};
}

View File

@@ -2,9 +2,9 @@ use std::convert::Infallible;
use std::path::Path; use std::path::Path;
use std::str::FromStr; use std::str::FromStr;
use imbl_value::InternedString;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use ts_rs::TS; use ts_rs::TS;
use yasi::InternedString;
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, TS)] #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, TS)]
#[ts(type = "string")] #[ts(type = "string")]
@@ -14,14 +14,12 @@ impl GatewayId {
&*self.0 &*self.0
} }
} }
impl From<InternedString> for GatewayId { impl<T> From<T> for GatewayId
fn from(value: InternedString) -> Self { where
Self(value) T: Into<InternedString>,
} {
} fn from(value: T) -> Self {
impl From<GatewayId> for InternedString { Self(value.into())
fn from(value: GatewayId) -> Self {
value.0
} }
} }
impl FromStr for GatewayId { impl FromStr for GatewayId {

View File

@@ -1,9 +1,9 @@
use std::path::Path; use std::path::Path;
use std::str::FromStr; use std::str::FromStr;
use imbl_value::InternedString;
use serde::{Deserialize, Deserializer, Serialize}; use serde::{Deserialize, Deserializer, Serialize};
use ts_rs::TS; use ts_rs::TS;
use yasi::InternedString;
use crate::{Id, InvalidId}; use crate::{Id, InvalidId};

View File

@@ -5,8 +5,7 @@ use std::str::FromStr;
use serde::{Deserialize, Deserializer, Serialize}; use serde::{Deserialize, Deserializer, Serialize};
use ts_rs::TS; use ts_rs::TS;
use crate::util::VersionString; use crate::{Id, InvalidId, PackageId, VersionString};
use crate::{Id, InvalidId, PackageId};
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, TS)] #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, TS)]
#[ts(type = "string")] #[ts(type = "string")]

View File

@@ -1,4 +1,4 @@
use imbl_value::InternedString; use yasi::InternedString;
#[derive(Debug, thiserror::Error)] #[derive(Debug, thiserror::Error)]
#[error("Invalid ID: {0}")] #[error("Invalid ID: {0}")]

View File

@@ -1,9 +1,9 @@
use std::borrow::Borrow; use std::borrow::Borrow;
use std::str::FromStr; use std::str::FromStr;
use imbl_value::InternedString;
use regex::Regex; use regex::Regex;
use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde::{Deserialize, Deserializer, Serialize, Serializer};
use yasi::InternedString;
mod action; mod action;
mod gateway; mod gateway;

View File

@@ -2,9 +2,9 @@ use std::borrow::Borrow;
use std::path::Path; use std::path::Path;
use std::str::FromStr; use std::str::FromStr;
use imbl_value::InternedString;
use serde::{Deserialize, Serialize, Serializer}; use serde::{Deserialize, Serialize, Serializer};
use ts_rs::TS; use ts_rs::TS;
use yasi::InternedString;
use crate::{Id, InvalidId, SYSTEM_ID}; use crate::{Id, InvalidId, SYSTEM_ID};

View File

@@ -2,9 +2,9 @@ use std::convert::Infallible;
use std::path::Path; use std::path::Path;
use std::str::FromStr; use std::str::FromStr;
use imbl_value::InternedString;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use ts_rs::TS; use ts_rs::TS;
use yasi::InternedString;
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, TS)] #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, TS)]
#[ts(type = "string")] #[ts(type = "string")]

View File

@@ -5,8 +5,7 @@ use rpc_toolkit::clap::builder::ValueParserFactory;
use serde::{Deserialize, Deserializer, Serialize}; use serde::{Deserialize, Deserializer, Serialize};
use ts_rs::TS; use ts_rs::TS;
use crate::Id; use crate::{FromStrParser, Id};
use crate::util::FromStrParser;
#[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, TS)] #[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, TS)]
#[ts(export, type = "string")] #[ts(export, type = "string")]

15
core/models/src/lib.rs Normal file
View File

@@ -0,0 +1,15 @@
mod clap;
mod data_url;
mod errors;
mod id;
mod mime;
mod procedure_name;
mod version;
pub use clap::*;
pub use data_url::*;
pub use errors::*;
pub use id::*;
pub use mime::*;
pub use procedure_name::*;
pub use version::*;

View File

@@ -2,21 +2,9 @@
cd "$(dirname "${BASH_SOURCE[0]}")" cd "$(dirname "${BASH_SOURCE[0]}")"
source ./builder-alias.sh
set -ea set -ea
shopt -s expand_aliases shopt -s expand_aliases
PROFILE=${PROFILE:-release}
if [ "${PROFILE}" = "release" ]; then
BUILD_FLAGS="--release"
else
if [ "$PROFILE" != "debug"]; then
>&2 echo "Unknown profile $PROFILE: falling back to debug..."
PROFILE=debug
fi
fi
if [ -z "$ARCH" ]; then if [ -z "$ARCH" ]; then
ARCH=$(uname -m) ARCH=$(uname -m)
fi fi
@@ -34,12 +22,15 @@ cd ..
FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')" FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')"
RUSTFLAGS="" RUSTFLAGS=""
if [[ "${ENVIRONMENT}" =~ (^|-)console($|-) ]]; then if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then
RUSTFLAGS="--cfg tokio_unstable" RUSTFLAGS="--cfg tokio_unstable"
fi fi
alias 'rust-musl-builder'='docker run $USE_TTY --rm -e "RUSTFLAGS=$RUSTFLAGS" -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$HOME/.cargo/git":/root/.cargo/git -v "$(pwd)":/home/rust/src -w /home/rust/src -P messense/rust-musl-cross:$ARCH-musl'
echo "FEATURES=\"$FEATURES\"" echo "FEATURES=\"$FEATURES\""
echo "RUSTFLAGS=\"$RUSTFLAGS\"" echo "RUSTFLAGS=\"$RUSTFLAGS\""
rust-zig-builder cargo test --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features=test,$FEATURES --workspace --locked --lib -- --skip export_bindings_ rust-musl-builder sh -c "apt-get update && apt-get install -y rsync && cd core && cargo test --release --features=test,$FEATURES --workspace --locked --target=$ARCH-unknown-linux-musl -- --skip export_bindings_ && chown \$UID:\$UID target"
rust-zig-builder sh -c "chown -R $UID:$UID core/target && chown -R $UID:$UID /usr/local/cargo" if [ "$(ls -nd core/target | awk '{ print $3 }')" != "$UID" ]; then
rust-musl-builder sh -c "cd core && chown -R $UID:$UID target && chown -R $UID:$UID /root/.cargo"
fi

View File

@@ -4,18 +4,18 @@ description = "The core of StartOS"
documentation = "https://docs.rs/start-os" documentation = "https://docs.rs/start-os"
edition = "2024" edition = "2024"
keywords = [ keywords = [
"self-hosted",
"raspberry-pi",
"privacy",
"bitcoin", "bitcoin",
"full-node", "full-node",
"lightning", "lightning",
"privacy",
"raspberry-pi",
"self-hosted",
] ]
license = "MIT"
name = "start-os" name = "start-os"
readme = "README.md" readme = "README.md"
repository = "https://github.com/Start9Labs/start-os" repository = "https://github.com/Start9Labs/start-os"
version = "0.4.0-alpha.16" # VERSION_BUMP version = "0.4.0-alpha.10" # VERSION_BUMP
license = "MIT"
[lib] [lib]
name = "startos" name = "startos"
@@ -23,95 +23,91 @@ path = "src/lib.rs"
[[bin]] [[bin]]
name = "startbox" name = "startbox"
path = "src/main/startbox.rs" path = "src/main.rs"
[[bin]] [[bin]]
name = "start-cli" name = "start-cli"
path = "src/main/start-cli.rs" path = "src/main.rs"
[[bin]] [[bin]]
name = "start-container" name = "containerbox"
path = "src/main/start-container.rs" path = "src/main.rs"
[[bin]] [[bin]]
name = "registrybox" name = "registrybox"
path = "src/main/registrybox.rs" path = "src/main.rs"
[[bin]] [[bin]]
name = "tunnelbox" name = "tunnelbox"
path = "src/main/tunnelbox.rs" path = "src/main.rs"
[features] [features]
arti = [ cli = ["cli-startd", "cli-registry", "cli-tunnel"]
"arti-client", cli-container = ["procfs", "pty-process"]
"safelog", cli-registry = []
"tor-cell", cli-startd = []
"tor-hscrypto", cli-tunnel = []
"tor-hsservice", default = ["cli", "startd", "registry", "cli-container", "tunnel"]
"tor-keymgr",
"tor-llcrypto",
"tor-proto",
"tor-rtcompat",
]
console = ["console-subscriber", "tokio/tracing"]
default = []
dev = [] dev = []
docker = []
registry = []
startd = []
test = [] test = []
unstable = ["backtrace-on-stack-overflow"] tunnel = []
unstable = ["console-subscriber", "tokio/tracing"]
[dependencies] [dependencies]
aes = { version = "0.7.5", features = ["ctr"] }
arti-client = { version = "0.33", features = [ arti-client = { version = "0.33", features = [
"compression", "compression",
"ephemeral-keystore",
"experimental-api", "experimental-api",
"onion-service-client",
"onion-service-service",
"rustls", "rustls",
"static", "static",
"tokio", "tokio",
], default-features = false, git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true } "ephemeral-keystore",
"onion-service-client",
"onion-service-service",
], default-features = false, git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit" }
aes = { version = "0.7.5", features = ["ctr"] }
async-acme = { version = "0.6.0", git = "https://github.com/dr-bonez/async-acme.git", features = [ async-acme = { version = "0.6.0", git = "https://github.com/dr-bonez/async-acme.git", features = [
"use_rustls", "use_rustls",
"use_tokio", "use_tokio",
] } ] }
async-compression = { version = "0.4.32", features = [ async-compression = { version = "0.4.4", features = [
"brotli",
"gzip", "gzip",
"brotli",
"tokio", "tokio",
"zstd",
] } ] }
async-stream = "0.3.5" async-stream = "0.3.5"
async-trait = "0.1.74" async-trait = "0.1.74"
axum = { version = "0.8.4", features = ["ws", "http2"] } axum = { version = "0.8.4", features = ["ws"] }
backtrace-on-stack-overflow = { version = "0.3.0", optional = true } barrage = "0.2.3"
backhand = "0.23.0"
base32 = "0.5.0" base32 = "0.5.0"
base64 = "0.22.1" base64 = "0.22.1"
base64ct = "1.6.0" base64ct = "1.6.0"
basic-cookies = "0.1.4" basic-cookies = "0.1.4"
bech32 = "0.11.0"
blake3 = { version = "1.5.0", features = ["mmap", "rayon"] } blake3 = { version = "1.5.0", features = ["mmap", "rayon"] }
bytes = "1" bytes = "1"
chrono = { version = "0.4.31", features = ["serde"] } chrono = { version = "0.4.31", features = ["serde"] }
clap = { version = "4.4.12", features = ["string"] } clap = { version = "4.4.12", features = ["string"] }
color-eyre = "0.6.2" color-eyre = "0.6.2"
console = "0.16.2" console = "0.16.0"
console-subscriber = { version = "0.5.0", optional = true } console-subscriber = { version = "0.4.1", optional = true }
const_format = "0.2.34" const_format = "0.2.34"
cookie = "0.18.0" cookie = "0.18.0"
cookie_store = "0.22.0" cookie_store = "0.22.0"
curve25519-dalek = "4.1.3"
der = { version = "0.7.9", features = ["derive", "pem"] } der = { version = "0.7.9", features = ["derive", "pem"] }
digest = "0.10.7" digest = "0.10.7"
divrem = "1.0.0" divrem = "1.0.0"
dns-lookup = "3.0.1" dns-lookup = "3.0.0"
ed25519 = { version = "2.2.3", features = ["alloc", "pem", "pkcs8"] } ed25519 = { version = "2.2.3", features = ["pkcs8", "pem", "alloc"] }
ed25519-dalek = { version = "2.2.0", features = [ ed25519-dalek = { version = "2.2.0", features = [
"digest",
"hazmat",
"pkcs8",
"rand_core",
"serde", "serde",
"zeroize", "zeroize",
"rand_core",
"digest",
"pkcs8",
] } ] }
ed25519-dalek-v1 = { package = "ed25519-dalek", version = "1" } ed25519-dalek-v1 = { package = "ed25519-dalek", version = "1" }
exver = { version = "0.2.0", git = "https://github.com/Start9Labs/exver-rs.git", features = [ exver = { version = "0.2.0", git = "https://github.com/Start9Labs/exver-rs.git", features = [
@@ -121,35 +117,36 @@ fd-lock-rs = "0.1.4"
form_urlencoded = "1.2.1" form_urlencoded = "1.2.1"
futures = "0.3.28" futures = "0.3.28"
gpt = "4.1.0" gpt = "4.1.0"
helpers = { path = "../helpers" }
hex = "0.4.3" hex = "0.4.3"
hickory-client = "0.25.2" hickory-client = "0.25.2"
hickory-server = "0.25.2" hickory-server = "0.25.2"
hmac = "0.12.1" hmac = "0.12.1"
http = "1.0.0" http = "1.0.0"
http-body-util = "0.1" http-body-util = "0.1"
hyper = { version = "1.5", features = ["http1", "http2", "server"] } hyper = { version = "1.5", features = ["server", "http1", "http2"] }
hyper-util = { version = "0.1.10", features = [ hyper-util = { version = "0.1.10", features = [
"http1",
"http2",
"server", "server",
"server-auto", "server-auto",
"server-graceful", "server-graceful",
"service", "service",
"http1",
"http2",
"tokio", "tokio",
] } ] }
id-pool = { version = "0.2.2", default-features = false, features = [ id-pool = { version = "0.2.2", default-features = false, features = [
"serde", "serde",
"u16", "u16",
] } ] }
iddqd = "0.3.14"
imbl = { version = "6", features = ["serde", "small-chunks"] } imbl = { version = "6", features = ["serde", "small-chunks"] }
imbl-value = { version = "0.4.3", features = ["ts-rs"] } imbl-value = { version = "0.4.3", features = ["ts-rs"] }
include_dir = { version = "0.7.3", features = ["metadata"] } include_dir = { version = "0.7.3", features = ["metadata"] }
indexmap = { version = "2.0.2", features = ["serde"] } indexmap = { version = "2.0.2", features = ["serde"] }
indicatif = { version = "0.18.3", features = ["tokio"] } indicatif = { version = "0.18.0", features = ["tokio"] }
inotify = "0.11.0" inotify = "0.11.0"
integer-encoding = { version = "4.0.0", features = ["tokio_async"] } integer-encoding = { version = "4.0.0", features = ["tokio_async"] }
ipnet = { version = "2.8.0", features = ["serde"] } ipnet = { version = "2.8.0", features = ["serde"] }
iroh = { version = "0.91.2", features = ["discovery-pkarr-dht"] }
isocountry = "0.3.2" isocountry = "0.3.2"
itertools = "0.14.0" itertools = "0.14.0"
jaq-core = "0.10.1" jaq-core = "0.10.1"
@@ -158,21 +155,21 @@ josekit = "0.10.3"
jsonpath_lib = { git = "https://github.com/Start9Labs/jsonpath.git" } jsonpath_lib = { git = "https://github.com/Start9Labs/jsonpath.git" }
lazy_async_pool = "0.3.3" lazy_async_pool = "0.3.3"
lazy_format = "2.0" lazy_format = "2.0"
lazy_static = "1.4.0" lazy_static = "1.5.0"
lettre = { version = "0.11.18", default-features = false, features = [ lettre = { version = "0.11.18", default-features = false, features = [
"aws-lc-rs",
"builder",
"hostname",
"pool",
"rustls-platform-verifier",
"smtp-transport", "smtp-transport",
"pool",
"hostname",
"builder",
"tokio1-rustls", "tokio1-rustls",
"rustls-platform-verifier",
"aws-lc-rs",
] } ] }
libc = "0.2.149" libc = "0.2.149"
log = "0.4.20" log = "0.4.20"
mbrman = "0.6.0"
miette = { version = "7.6.0", features = ["fancy"] }
mio = "1" mio = "1"
mbrman = "0.6.0"
models = { version = "*", path = "../models" }
new_mime_guess = "4" new_mime_guess = "4"
nix = { version = "0.30.1", features = [ nix = { version = "0.30.1", features = [
"fs", "fs",
@@ -185,8 +182,8 @@ nix = { version = "0.30.1", features = [
] } ] }
nom = "8.0.0" nom = "8.0.0"
num = "0.4.1" num = "0.4.1"
num_cpus = "1.16.0"
num_enum = "0.7.0" num_enum = "0.7.0"
num_cpus = "1.16.0"
once_cell = "1.19.0" once_cell = "1.19.0"
openssh-keys = "0.6.2" openssh-keys = "0.6.2"
openssl = { version = "0.10.57", features = ["vendored"] } openssl = { version = "0.10.57", features = ["vendored"] }
@@ -198,83 +195,83 @@ pbkdf2 = "0.12.2"
pin-project = "1.1.3" pin-project = "1.1.3"
pkcs8 = { version = "0.10.2", features = ["std"] } pkcs8 = { version = "0.10.2", features = ["std"] }
prettytable-rs = "0.10.0" prettytable-rs = "0.10.0"
procfs = { version = "0.17.0", optional = true }
proptest = "1.3.1" proptest = "1.3.1"
proptest-derive = "0.7.0" proptest-derive = "0.6.0"
pty-process = { version = "0.5.1", optional = true }
qrcode = "0.14.1" qrcode = "0.14.1"
r3bl_tui = "0.7.6"
rand = "0.9.2" rand = "0.9.2"
regex = "1.10.2" regex = "1.10.2"
reqwest = { version = "0.12.25", features = [ reqwest = { version = "0.12.4", features = ["stream", "json", "socks"] }
"json",
"socks",
"stream",
"http2",
] }
reqwest_cookie_store = "0.9.0" reqwest_cookie_store = "0.9.0"
rpassword = "7.2.0" rpassword = "7.2.0"
rpc-toolkit = { git = "https://github.com/Start9Labs/rpc-toolkit.git", branch = "master" }
rust-argon2 = "3.0.0" rust-argon2 = "3.0.0"
rpc-toolkit = { git = "https://github.com/Start9Labs/rpc-toolkit.git" } rustyline-async = "0.4.1"
safelog = { version = "0.4.8", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true } safelog = { version = "0.4.8", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit" }
semver = { version = "1.0.20", features = ["serde"] } semver = { version = "1.0.20", features = ["serde"] }
serde = { version = "1.0", features = ["derive", "rc"] } serde = { version = "1.0", features = ["derive", "rc"] }
serde_cbor = { package = "ciborium", version = "0.2.1" } serde_cbor = { package = "ciborium", version = "0.2.1" }
serde_json = "1.0" serde_json = "1.0"
serde_toml = { package = "toml", version = "0.9.9+spec-1.0.0" } serde_toml = { package = "toml", version = "0.9.5" }
serde_urlencoded = "0.7"
serde_with = { version = "3.4.0", features = ["macros", "json"] }
serde_yaml = { package = "serde_yml", version = "0.0.12" } serde_yaml = { package = "serde_yml", version = "0.0.12" }
sha-crypt = "0.5.0" sha-crypt = "0.5.0"
sha2 = "0.10.2" sha2 = "0.10.2"
shell-words = "1"
signal-hook = "0.3.17" signal-hook = "0.3.17"
simple-logging = "2.0.2"
socket2 = { version = "0.6.0", features = ["all"] } socket2 = { version = "0.6.0", features = ["all"] }
socks5-impl = { version = "0.7.2", features = ["client", "server"] } socks5-impl = { version = "0.7.2", features = ["server"] }
sqlx = { version = "0.8.6", features = [ sqlx = { version = "0.8.6", features = [
"postgres",
"runtime-tokio-rustls", "runtime-tokio-rustls",
"postgres",
], default-features = false } ], default-features = false }
sscanf = "0.4.1" sscanf = "0.4.1"
ssh-key = { version = "0.6.2", features = ["ed25519"] } ssh-key = { version = "0.6.2", features = ["ed25519"] }
tar = "0.4.40" tar = "0.4.40"
termion = "4.0.5" termion = "4.0.5"
textwrap = "0.16.1"
thiserror = "2.0.12" thiserror = "2.0.12"
textwrap = "0.16.1"
tokio = { version = "1.38.1", features = ["full"] } tokio = { version = "1.38.1", features = ["full"] }
tokio-rustls = "0.26.4" tokio-rustls = "0.26.0"
tokio-stream = { version = "0.1.14", features = ["io-util", "net", "sync"] } tokio-stream = { version = "0.1.14", features = ["io-util", "sync", "net"] }
tokio-tar = { git = "https://github.com/dr-bonez/tokio-tar.git" } tokio-tar = { git = "https://github.com/dr-bonez/tokio-tar.git" }
tokio-tungstenite = { version = "0.26.2", features = ["native-tls", "url"] } tokio-tungstenite = { version = "0.27.0", features = ["native-tls", "url"] }
tokio-util = { version = "0.7.9", features = ["io"] } tokio-util = { version = "0.7.9", features = ["io"] }
tor-cell = { version = "0.33", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true } tor-cell = { version = "0.33", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit" }
tor-hscrypto = { version = "0.33", features = [ tor-hscrypto = { version = "0.33", features = [
"full", "full",
], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true } ], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit" }
tor-hsservice = { version = "0.33", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true } tor-hsservice = { version = "0.33", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit" }
tor-keymgr = { version = "0.33", features = [ tor-keymgr = { version = "0.33", features = [
"ephemeral-keystore", "ephemeral-keystore",
], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true } ], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit" }
tor-llcrypto = { version = "0.33", features = [ tor-llcrypto = { version = "0.33", features = [
"full", "full",
], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true } ], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit" }
tor-proto = { version = "0.33", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true } tor-proto = { version = "0.33", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit" }
tor-rtcompat = { version = "0.33", features = [ tor-rtcompat = { version = "0.33", features = [
"rustls",
"tokio", "tokio",
], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true } "rustls",
torut = "0.2.1" ], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit" }
tower-service = "0.3.3" tower-service = "0.3.3"
tracing = "0.1.39" tracing = "0.1.39"
tracing-error = "0.2.0" tracing-error = "0.2.0"
tracing-futures = "0.2.5"
tracing-journald = "0.3.0" tracing-journald = "0.3.0"
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] } tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
ts-rs = "9.0.1" ts-rs = "9.0.1"
typed-builder = "0.23.2" typed-builder = "0.21.0"
unix-named-pipe = "0.2.0"
url = { version = "2.4.1", features = ["serde"] } url = { version = "2.4.1", features = ["serde"] }
urlencoding = "2.1.3"
uuid = { version = "1.4.1", features = ["v4"] } uuid = { version = "1.4.1", features = ["v4"] }
visit-rs = "0.1.1"
x25519-dalek = { version = "2.0.1", features = ["static_secrets"] }
zbus = "5.1.1" zbus = "5.1.1"
zeroize = "1.6.0"
[target.'cfg(target_os = "linux")'.dependencies] rustls = "0.23.20"
procfs = "0.18.0" rustls-pki-types = { version = "1.10.1", features = ["alloc"] }
pty-process = "0.5.1"
[profile.test] [profile.test]
opt-level = 3 opt-level = 3

View File

@@ -4,7 +4,7 @@ Description=StartOS Registry
[Service] [Service]
Type=simple Type=simple
Environment=RUST_LOG=startos=debug,patch_db=warn Environment=RUST_LOG=startos=debug,patch_db=warn
ExecStart=/usr/bin/start-registryd ExecStart=/usr/local/bin/registry
Restart=always Restart=always
RestartSec=3 RestartSec=3
ManagedOOMPreference=avoid ManagedOOMPreference=avoid

View File

@@ -1,4 +1,3 @@
use std::collections::BTreeMap;
use std::time::SystemTime; use std::time::SystemTime;
use imbl_value::InternedString; use imbl_value::InternedString;
@@ -7,7 +6,7 @@ use openssl::x509::X509;
use crate::db::model::DatabaseModel; use crate::db::model::DatabaseModel;
use crate::hostname::{Hostname, generate_hostname, generate_id}; use crate::hostname::{Hostname, generate_hostname, generate_id};
use crate::net::ssl::{gen_nistp256, make_root_cert}; use crate::net::ssl::{generate_key, make_root_cert};
use crate::net::tor::TorSecretKey; use crate::net::tor::TorSecretKey;
use crate::prelude::*; use crate::prelude::*;
use crate::util::serde::Pem; use crate::util::serde::Pem;
@@ -37,7 +36,7 @@ impl AccountInfo {
let server_id = generate_id(); let server_id = generate_id();
let hostname = generate_hostname(); let hostname = generate_hostname();
let tor_key = vec![TorSecretKey::generate()]; let tor_key = vec![TorSecretKey::generate()];
let root_ca_key = gen_nistp256()?; let root_ca_key = generate_key()?;
let root_ca_cert = make_root_cert(&root_ca_key, &hostname, start_time)?; let root_ca_cert = make_root_cert(&root_ca_key, &hostname, start_time)?;
let ssh_key = ssh_key::PrivateKey::from(ssh_key::private::Ed25519Keypair::random( let ssh_key = ssh_key::PrivateKey::from(ssh_key::private::Ed25519Keypair::random(
&mut ssh_key::rand_core::OsRng::default(), &mut ssh_key::rand_core::OsRng::default(),
@@ -108,7 +107,6 @@ impl AccountInfo {
.map(|tor_key| tor_key.onion_address()) .map(|tor_key| tor_key.onion_address())
.collect(), .collect(),
)?; )?;
server_info.as_password_hash_mut().ser(&self.password)?;
db.as_private_mut().as_password_mut().ser(&self.password)?; db.as_private_mut().as_password_mut().ser(&self.password)?;
db.as_private_mut() db.as_private_mut()
.as_ssh_privkey_mut() .as_ssh_privkey_mut()
@@ -121,20 +119,12 @@ impl AccountInfo {
key_store.as_onion_mut().insert_key(tor_key)?; key_store.as_onion_mut().insert_key(tor_key)?;
} }
let cert_store = key_store.as_local_certs_mut(); let cert_store = key_store.as_local_certs_mut();
if cert_store.as_root_cert().de()?.0 != self.root_ca_cert { cert_store
cert_store .as_root_key_mut()
.as_root_key_mut() .ser(Pem::new_ref(&self.root_ca_key))?;
.ser(Pem::new_ref(&self.root_ca_key))?; cert_store
cert_store .as_root_cert_mut()
.as_root_cert_mut() .ser(Pem::new_ref(&self.root_ca_cert))?;
.ser(Pem::new_ref(&self.root_ca_cert))?;
let int_key = crate::net::ssl::gen_nistp256()?;
let int_cert =
crate::net::ssl::make_int_cert((&self.root_ca_key, &self.root_ca_cert), &int_key)?;
cert_store.as_int_key_mut().ser(&Pem(int_key))?;
cert_store.as_int_cert_mut().ser(&Pem(int_cert))?;
cert_store.as_leaves_mut().ser(&BTreeMap::new())?;
}
Ok(()) Ok(())
} }

View File

@@ -1,13 +1,14 @@
use std::fmt; use std::fmt;
use clap::{CommandFactory, FromArgMatches, Parser}; use clap::{CommandFactory, FromArgMatches, Parser};
pub use models::ActionId;
use models::{PackageId, ReplayId};
use qrcode::QrCode; use qrcode::QrCode;
use rpc_toolkit::{Context, HandlerExt, ParentHandler, from_fn_async}; use rpc_toolkit::{Context, HandlerExt, ParentHandler, from_fn_async};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::instrument; use tracing::instrument;
use ts_rs::TS; use ts_rs::TS;
pub use crate::ActionId;
use crate::context::{CliContext, RpcContext}; use crate::context::{CliContext, RpcContext};
use crate::db::model::package::TaskSeverity; use crate::db::model::package::TaskSeverity;
use crate::prelude::*; use crate::prelude::*;
@@ -15,7 +16,6 @@ use crate::rpc_continuations::Guid;
use crate::util::serde::{ use crate::util::serde::{
HandlerExtSerde, StdinDeserializable, WithIoFormat, display_serializable, HandlerExtSerde, StdinDeserializable, WithIoFormat, display_serializable,
}; };
use crate::{PackageId, ReplayId};
pub fn action_api<C: Context>() -> ParentHandler<C> { pub fn action_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new() ParentHandler::new()
@@ -52,7 +52,6 @@ pub fn action_api<C: Context>() -> ParentHandler<C> {
#[ts(export)] #[ts(export)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct ActionInput { pub struct ActionInput {
#[serde(default)]
pub event_id: Guid, pub event_id: Guid,
#[ts(type = "Record<string, unknown>")] #[ts(type = "Record<string, unknown>")]
pub spec: Value, pub spec: Value,

View File

@@ -14,8 +14,8 @@ use tracing::instrument;
use ts_rs::TS; use ts_rs::TS;
use crate::context::{CliContext, RpcContext}; use crate::context::{CliContext, RpcContext};
use crate::middleware::auth::session::{ use crate::middleware::auth::{
AsLogoutSessionId, HasLoggedOutSessions, HashSessionToken, LoginRes, SessionAuthContext, AsLogoutSessionId, AuthContext, HasLoggedOutSessions, HashSessionToken, LoginRes,
}; };
use crate::prelude::*; use crate::prelude::*;
use crate::util::crypto::EncryptedWire; use crate::util::crypto::EncryptedWire;
@@ -110,7 +110,7 @@ impl std::str::FromStr for PasswordType {
}) })
} }
} }
pub fn auth<C: Context, AC: SessionAuthContext>() -> ParentHandler<C> pub fn auth<C: Context, AC: AuthContext>() -> ParentHandler<C>
where where
CliContext: CallRemote<AC>, CliContext: CallRemote<AC>,
{ {
@@ -173,7 +173,7 @@ fn gen_pwd() {
} }
#[instrument(skip_all)] #[instrument(skip_all)]
async fn cli_login<C: SessionAuthContext>( async fn cli_login<C: AuthContext>(
HandlerArgs { HandlerArgs {
context: ctx, context: ctx,
parent_method, parent_method,
@@ -220,14 +220,14 @@ pub fn check_password(hash: &str, password: &str) -> Result<(), Error> {
pub struct LoginParams { pub struct LoginParams {
password: String, password: String,
#[ts(skip)] #[ts(skip)]
#[serde(rename = "__Auth_userAgent")] // from Auth middleware #[serde(rename = "__auth_userAgent")] // from Auth middleware
user_agent: Option<String>, user_agent: Option<String>,
#[serde(default)] #[serde(default)]
ephemeral: bool, ephemeral: bool,
} }
#[instrument(skip_all)] #[instrument(skip_all)]
pub async fn login_impl<C: SessionAuthContext>( pub async fn login_impl<C: AuthContext>(
ctx: C, ctx: C,
LoginParams { LoginParams {
password, password,
@@ -279,11 +279,11 @@ pub async fn login_impl<C: SessionAuthContext>(
#[command(rename_all = "kebab-case")] #[command(rename_all = "kebab-case")]
pub struct LogoutParams { pub struct LogoutParams {
#[ts(skip)] #[ts(skip)]
#[serde(rename = "__Auth_session")] // from Auth middleware #[serde(rename = "__auth_session")] // from Auth middleware
session: InternedString, session: InternedString,
} }
pub async fn logout<C: SessionAuthContext>( pub async fn logout<C: AuthContext>(
ctx: C, ctx: C,
LogoutParams { session }: LogoutParams, LogoutParams { session }: LogoutParams,
) -> Result<Option<HasLoggedOutSessions>, Error> { ) -> Result<Option<HasLoggedOutSessions>, Error> {
@@ -312,7 +312,7 @@ pub struct SessionList {
sessions: Sessions, sessions: Sessions,
} }
pub fn session<C: Context, AC: SessionAuthContext>() -> ParentHandler<C> pub fn session<C: Context, AC: AuthContext>() -> ParentHandler<C>
where where
CliContext: CallRemote<AC>, CliContext: CallRemote<AC>,
{ {
@@ -373,13 +373,13 @@ fn display_sessions(params: WithIoFormat<ListParams>, arg: SessionList) -> Resul
pub struct ListParams { pub struct ListParams {
#[arg(skip)] #[arg(skip)]
#[ts(skip)] #[ts(skip)]
#[serde(rename = "__Auth_session")] // from Auth middleware #[serde(rename = "__auth_session")] // from Auth middleware
session: Option<InternedString>, session: Option<InternedString>,
} }
// #[command(display(display_sessions))] // #[command(display(display_sessions))]
#[instrument(skip_all)] #[instrument(skip_all)]
pub async fn list<C: SessionAuthContext>( pub async fn list<C: AuthContext>(
ctx: C, ctx: C,
ListParams { session, .. }: ListParams, ListParams { session, .. }: ListParams,
) -> Result<SessionList, Error> { ) -> Result<SessionList, Error> {
@@ -418,10 +418,7 @@ pub struct KillParams {
} }
#[instrument(skip_all)] #[instrument(skip_all)]
pub async fn kill<C: SessionAuthContext>( pub async fn kill<C: AuthContext>(ctx: C, KillParams { ids }: KillParams) -> Result<(), Error> {
ctx: C,
KillParams { ids }: KillParams,
) -> Result<(), Error> {
HasLoggedOutSessions::new(ids.into_iter().map(KillSessionId::new), &ctx).await?; HasLoggedOutSessions::new(ids.into_iter().map(KillSessionId::new), &ctx).await?;
Ok(()) Ok(())
} }
@@ -477,19 +474,30 @@ pub async fn reset_password_impl(
let old_password = old_password.unwrap_or_default().decrypt(&ctx)?; let old_password = old_password.unwrap_or_default().decrypt(&ctx)?;
let new_password = new_password.unwrap_or_default().decrypt(&ctx)?; let new_password = new_password.unwrap_or_default().decrypt(&ctx)?;
let account = ctx.account.mutate(|account| { let mut account = ctx.account.write().await;
if !argon2::verify_encoded(&account.password, old_password.as_bytes()) if !argon2::verify_encoded(&account.password, old_password.as_bytes())
.with_kind(crate::ErrorKind::IncorrectPassword)? .with_kind(crate::ErrorKind::IncorrectPassword)?
{ {
return Err(Error::new( return Err(Error::new(
eyre!("Incorrect Password"), eyre!("Incorrect Password"),
crate::ErrorKind::IncorrectPassword, crate::ErrorKind::IncorrectPassword,
)); ));
} }
account.set_password(&new_password)?; account.set_password(&new_password)?;
Ok(account.clone()) let account_password = &account.password;
})?; let account = account.clone();
ctx.db.mutate(|d| account.save(d)).await.result ctx.db
.mutate(|d| {
d.as_public_mut()
.as_server_info_mut()
.as_password_hash_mut()
.ser(account_password)?;
account.save(d)?;
Ok(())
})
.await
.result
} }
#[instrument(skip_all)] #[instrument(skip_all)]

View File

@@ -5,7 +5,9 @@ use std::sync::Arc;
use chrono::Utc; use chrono::Utc;
use clap::Parser; use clap::Parser;
use color_eyre::eyre::eyre; use color_eyre::eyre::eyre;
use helpers::AtomicFile;
use imbl::OrdSet; use imbl::OrdSet;
use models::PackageId;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::io::AsyncWriteExt; use tokio::io::AsyncWriteExt;
use tracing::instrument; use tracing::instrument;
@@ -13,7 +15,6 @@ use ts_rs::TS;
use super::PackageBackupReport; use super::PackageBackupReport;
use super::target::{BackupTargetId, PackageBackupInfo}; use super::target::{BackupTargetId, PackageBackupInfo};
use crate::PackageId;
use crate::backup::os::OsBackup; use crate::backup::os::OsBackup;
use crate::backup::{BackupReport, ServerBackupReport}; use crate::backup::{BackupReport, ServerBackupReport};
use crate::context::RpcContext; use crate::context::RpcContext;
@@ -22,10 +23,10 @@ use crate::db::model::{Database, DatabaseModel};
use crate::disk::mount::backup::BackupMountGuard; use crate::disk::mount::backup::BackupMountGuard;
use crate::disk::mount::filesystem::ReadWrite; use crate::disk::mount::filesystem::ReadWrite;
use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard}; use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard};
use crate::middleware::auth::session::SessionAuthContext; use crate::middleware::auth::AuthContext;
use crate::notifications::{NotificationLevel, notify}; use crate::notifications::{NotificationLevel, notify};
use crate::prelude::*; use crate::prelude::*;
use crate::util::io::{AtomicFile, dir_copy}; use crate::util::io::dir_copy;
use crate::util::serde::IoFormat; use crate::util::serde::IoFormat;
use crate::version::VersionT; use crate::version::VersionT;
@@ -311,14 +312,19 @@ async fn perform_backup(
let ui = ctx.db.peek().await.into_public().into_ui().de()?; let ui = ctx.db.peek().await.into_public().into_ui().de()?;
let mut os_backup_file = let mut os_backup_file =
AtomicFile::new(backup_guard.path().join("os-backup.json"), None::<PathBuf>).await?; AtomicFile::new(backup_guard.path().join("os-backup.json"), None::<PathBuf>)
.await
.with_kind(ErrorKind::Filesystem)?;
os_backup_file os_backup_file
.write_all(&IoFormat::Json.to_vec(&OsBackup { .write_all(&IoFormat::Json.to_vec(&OsBackup {
account: ctx.account.peek(|a| a.clone()), account: ctx.account.read().await.clone(),
ui, ui,
})?) })?)
.await?; .await?;
os_backup_file.save().await?; os_backup_file
.save()
.await
.with_kind(ErrorKind::Filesystem)?;
let luks_folder_old = backup_guard.path().join("luks.old"); let luks_folder_old = backup_guard.path().join("luks.old");
if tokio::fs::metadata(&luks_folder_old).await.is_ok() { if tokio::fs::metadata(&luks_folder_old).await.is_ok() {
@@ -336,7 +342,7 @@ async fn perform_backup(
let timestamp = Utc::now(); let timestamp = Utc::now();
backup_guard.unencrypted_metadata.version = crate::version::Current::default().semver().into(); backup_guard.unencrypted_metadata.version = crate::version::Current::default().semver().into();
backup_guard.unencrypted_metadata.hostname = ctx.account.peek(|a| a.hostname.clone()); backup_guard.unencrypted_metadata.hostname = ctx.account.read().await.hostname.clone();
backup_guard.unencrypted_metadata.timestamp = timestamp.clone(); backup_guard.unencrypted_metadata.timestamp = timestamp.clone();
backup_guard.metadata.version = crate::version::Current::default().semver().into(); backup_guard.metadata.version = crate::version::Current::default().semver().into();
backup_guard.metadata.timestamp = Some(timestamp); backup_guard.metadata.timestamp = Some(timestamp);

View File

@@ -1,12 +1,15 @@
use std::collections::BTreeMap; use std::collections::BTreeMap;
use chrono::{DateTime, Utc};
use models::{HostId, PackageId};
use reqwest::Url;
use rpc_toolkit::{Context, HandlerExt, ParentHandler, from_fn_async}; use rpc_toolkit::{Context, HandlerExt, ParentHandler, from_fn_async};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::PackageId;
use crate::context::CliContext; use crate::context::CliContext;
#[allow(unused_imports)] #[allow(unused_imports)]
use crate::prelude::*; use crate::prelude::*;
use crate::util::serde::{Base32, Base64};
pub mod backup_bulk; pub mod backup_bulk;
pub mod os; pub mod os;
@@ -55,3 +58,13 @@ pub fn package_backup<C: Context>() -> ParentHandler<C> {
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
} }
#[derive(Deserialize, Serialize)]
struct BackupMetadata {
pub timestamp: DateTime<Utc>,
#[serde(default)]
pub network_keys: BTreeMap<HostId, Base64<[u8; 32]>>,
#[serde(default)]
pub tor_keys: BTreeMap<HostId, Base32<[u8; 64]>>, // DEPRECATED
pub registry: Option<Url>,
}

View File

@@ -3,6 +3,7 @@ use std::sync::Arc;
use clap::Parser; use clap::Parser;
use futures::{StreamExt, stream}; use futures::{StreamExt, stream};
use models::PackageId;
use patch_db::json_ptr::ROOT; use patch_db::json_ptr::ROOT;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::sync::Mutex; use tokio::sync::Mutex;
@@ -10,6 +11,7 @@ use tracing::instrument;
use ts_rs::TS; use ts_rs::TS;
use super::target::BackupTargetId; use super::target::BackupTargetId;
use crate::PLATFORM;
use crate::backup::os::OsBackup; use crate::backup::os::OsBackup;
use crate::context::setup::SetupResult; use crate::context::setup::SetupResult;
use crate::context::{RpcContext, SetupContext}; use crate::context::{RpcContext, SetupContext};
@@ -25,7 +27,6 @@ use crate::service::service_map::DownloadInstallFuture;
use crate::setup::SetupExecuteProgress; use crate::setup::SetupExecuteProgress;
use crate::system::sync_kiosk; use crate::system::sync_kiosk;
use crate::util::serde::IoFormat; use crate::util::serde::IoFormat;
use crate::{PLATFORM, PackageId};
#[derive(Deserialize, Serialize, Parser, TS)] #[derive(Deserialize, Serialize, Parser, TS)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]

View File

@@ -9,6 +9,7 @@ use digest::OutputSizeUser;
use digest::generic_array::GenericArray; use digest::generic_array::GenericArray;
use exver::Version; use exver::Version;
use imbl_value::InternedString; use imbl_value::InternedString;
use models::{FromStrParser, PackageId};
use rpc_toolkit::{Context, HandlerExt, ParentHandler, from_fn_async}; use rpc_toolkit::{Context, HandlerExt, ParentHandler, from_fn_async};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sha2::Sha256; use sha2::Sha256;
@@ -17,7 +18,6 @@ use tracing::instrument;
use ts_rs::TS; use ts_rs::TS;
use self::cifs::CifsBackupTarget; use self::cifs::CifsBackupTarget;
use crate::PackageId;
use crate::context::{CliContext, RpcContext}; use crate::context::{CliContext, RpcContext};
use crate::db::model::DatabaseModel; use crate::db::model::DatabaseModel;
use crate::disk::mount::backup::BackupMountGuard; use crate::disk::mount::backup::BackupMountGuard;
@@ -27,10 +27,10 @@ use crate::disk::mount::filesystem::{FileSystem, MountType, ReadWrite};
use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard}; use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard};
use crate::disk::util::PartitionInfo; use crate::disk::util::PartitionInfo;
use crate::prelude::*; use crate::prelude::*;
use crate::util::VersionString;
use crate::util::serde::{ use crate::util::serde::{
HandlerExtSerde, WithIoFormat, deserialize_from_str, display_serializable, serialize_display, HandlerExtSerde, WithIoFormat, deserialize_from_str, display_serializable, serialize_display,
}; };
use crate::util::{FromStrParser, VersionString};
pub mod cifs; pub mod cifs;
@@ -301,14 +301,14 @@ lazy_static::lazy_static! {
Mutex::new(BTreeMap::new()); Mutex::new(BTreeMap::new());
} }
#[derive(Deserialize, Serialize, Parser)] #[derive(Deserialize, Serialize, Parser, TS)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
#[command(rename_all = "kebab-case")] #[command(rename_all = "kebab-case")]
pub struct MountParams { pub struct MountParams {
target_id: BackupTargetId, target_id: BackupTargetId,
#[arg(long)] #[arg(long)]
server_id: Option<String>, server_id: Option<String>,
password: String, // TODO: rpassword password: String,
#[arg(long)] #[arg(long)]
allow_partial: bool, allow_partial: bool,
} }

View File

@@ -1,85 +1,91 @@
use std::collections::{BTreeMap, VecDeque}; use std::collections::VecDeque;
use std::ffi::OsString; use std::ffi::OsString;
use std::path::Path; use std::path::Path;
#[cfg(feature = "cli-container")]
pub mod container_cli; pub mod container_cli;
pub mod deprecated; pub mod deprecated;
#[cfg(any(feature = "registry", feature = "cli-registry"))]
pub mod registry; pub mod registry;
#[cfg(feature = "cli")]
pub mod start_cli; pub mod start_cli;
#[cfg(feature = "startd")]
pub mod start_init; pub mod start_init;
#[cfg(feature = "startd")]
pub mod startd; pub mod startd;
#[cfg(any(feature = "tunnel", feature = "cli-tunnel"))]
pub mod tunnel; pub mod tunnel;
#[derive(Default)] fn select_executable(name: &str) -> Option<fn(VecDeque<OsString>)> {
pub struct MultiExecutable(BTreeMap<&'static str, fn(VecDeque<OsString>)>); match name {
impl MultiExecutable { #[cfg(feature = "startd")]
pub fn enable_startd(&mut self) -> &mut Self { "startd" => Some(startd::main),
self.0.insert("startd", startd::main); #[cfg(feature = "startd")]
self.0 "embassyd" => Some(|_| deprecated::renamed("embassyd", "startd")),
.insert("embassyd", |_| deprecated::renamed("embassyd", "startd")); #[cfg(feature = "startd")]
self.0 "embassy-init" => Some(|_| deprecated::removed("embassy-init")),
.insert("embassy-init", |_| deprecated::removed("embassy-init"));
self
}
pub fn enable_start_cli(&mut self) -> &mut Self {
self.0.insert("start-cli", start_cli::main);
self.0.insert("embassy-cli", |_| {
deprecated::renamed("embassy-cli", "start-cli")
});
self.0
.insert("embassy-sdk", |_| deprecated::removed("embassy-sdk"));
self
}
pub fn enable_start_container(&mut self) -> &mut Self {
self.0.insert("start-container", container_cli::main);
self
}
pub fn enable_start_registryd(&mut self) -> &mut Self {
self.0.insert("start-registryd", registry::main);
self
}
pub fn enable_start_registry(&mut self) -> &mut Self {
self.0.insert("start-registry", registry::cli);
self
}
pub fn enable_start_tunneld(&mut self) -> &mut Self {
self.0.insert("start-tunneld", tunnel::main);
self
}
pub fn enable_start_tunnel(&mut self) -> &mut Self {
self.0.insert("start-tunnel", tunnel::cli);
self
}
fn select_executable(&self, name: &str) -> Option<fn(VecDeque<OsString>)> { #[cfg(feature = "cli-startd")]
self.0.get(&name).copied() "start-cli" => Some(start_cli::main),
} #[cfg(feature = "cli-startd")]
"embassy-cli" => Some(|_| deprecated::renamed("embassy-cli", "start-cli")),
#[cfg(feature = "cli-startd")]
"embassy-sdk" => Some(|_| deprecated::removed("embassy-sdk")),
pub fn execute(&self) { #[cfg(feature = "cli-container")]
let mut args = std::env::args_os().collect::<VecDeque<_>>(); "start-container" => Some(container_cli::main),
for _ in 0..2 {
if let Some(s) = args.pop_front() { #[cfg(feature = "registry")]
if let Some(name) = Path::new(&*s).file_name().and_then(|s| s.to_str()) { "start-registryd" => Some(registry::main),
if name == "--contents" { #[cfg(feature = "cli-registry")]
for name in self.0.keys() { "start-registry" => Some(registry::cli),
println!("{name}");
} #[cfg(feature = "tunnel")]
} "start-tunneld" => Some(tunnel::main),
if let Some(x) = self.select_executable(&name) { #[cfg(feature = "cli-tunnel")]
args.push_front(s); "start-tunnel" => Some(tunnel::cli),
return x(args);
} "contents" => Some(|_| {
} #[cfg(feature = "startd")]
} println!("startd");
} #[cfg(feature = "cli-startd")]
let args = std::env::args().collect::<VecDeque<_>>(); println!("start-cli");
eprintln!( #[cfg(feature = "cli-container")]
"unknown executable: {}", println!("start-container");
args.get(1) #[cfg(feature = "registry")]
.or_else(|| args.get(0)) println!("start-registryd");
.map(|s| s.as_str()) #[cfg(feature = "cli-registry")]
.unwrap_or("N/A") println!("start-registry");
); #[cfg(feature = "tunnel")]
std::process::exit(1); println!("start-tunneld");
#[cfg(feature = "cli-tunnel")]
println!("start-tunnel");
}),
_ => None,
} }
} }
pub fn startbox() {
let mut args = std::env::args_os().collect::<VecDeque<_>>();
for _ in 0..2 {
if let Some(s) = args.pop_front() {
if let Some(x) = Path::new(&*s)
.file_name()
.and_then(|s| s.to_str())
.and_then(|s| select_executable(&s))
{
args.push_front(s);
return x(args);
}
}
}
let args = std::env::args().collect::<VecDeque<_>>();
eprintln!(
"unknown executable: {}",
args.get(1)
.or_else(|| args.get(0))
.map(|s| s.as_str())
.unwrap_or("N/A")
);
std::process::exit(1);
}

View File

@@ -11,17 +11,14 @@ use crate::context::config::ClientConfig;
use crate::net::web_server::{Acceptor, WebServer}; use crate::net::web_server::{Acceptor, WebServer};
use crate::prelude::*; use crate::prelude::*;
use crate::registry::context::{RegistryConfig, RegistryContext}; use crate::registry::context::{RegistryConfig, RegistryContext};
use crate::registry::registry_router;
use crate::util::logger::LOGGER; use crate::util::logger::LOGGER;
#[instrument(skip_all)] #[instrument(skip_all)]
async fn inner_main(config: &RegistryConfig) -> Result<(), Error> { async fn inner_main(config: &RegistryConfig) -> Result<(), Error> {
let server = async { let server = async {
let ctx = RegistryContext::init(config).await?; let ctx = RegistryContext::init(config).await?;
let server = WebServer::new( let mut server = WebServer::new(Acceptor::bind([ctx.listen]).await?);
Acceptor::bind([ctx.listen]).await?, server.serve_registry(ctx.clone());
registry_router(ctx.clone()),
);
let mut shutdown_recv = ctx.shutdown.subscribe(); let mut shutdown_recv = ctx.shutdown.subscribe();

View File

@@ -17,7 +17,7 @@ pub fn main(args: impl IntoIterator<Item = OsString>) {
if let Err(e) = CliApp::new( if let Err(e) = CliApp::new(
|cfg: ClientConfig| Ok(CliContext::init(cfg.load()?)?), |cfg: ClientConfig| Ok(CliContext::init(cfg.load()?)?),
crate::main_api(), crate::expanded_api(),
) )
.run(args) .run(args)
{ {

View File

@@ -11,8 +11,7 @@ use crate::disk::fsck::RepairStrategy;
use crate::disk::main::DEFAULT_PASSWORD; use crate::disk::main::DEFAULT_PASSWORD;
use crate::firmware::{check_for_firmware_update, update_firmware}; use crate::firmware::{check_for_firmware_update, update_firmware};
use crate::init::{InitPhases, STANDBY_MODE_PATH}; use crate::init::{InitPhases, STANDBY_MODE_PATH};
use crate::net::gateway::UpgradableListener; use crate::net::web_server::{UpgradableListener, WebServer};
use crate::net::web_server::WebServer;
use crate::prelude::*; use crate::prelude::*;
use crate::progress::FullProgressTracker; use crate::progress::FullProgressTracker;
use crate::shutdown::Shutdown; use crate::shutdown::Shutdown;
@@ -38,7 +37,7 @@ async fn setup_or_init(
let mut update_phase = handle.add_phase("Updating Firmware".into(), Some(10)); let mut update_phase = handle.add_phase("Updating Firmware".into(), Some(10));
let mut reboot_phase = handle.add_phase("Rebooting".into(), Some(1)); let mut reboot_phase = handle.add_phase("Rebooting".into(), Some(1));
server.serve_ui_for(init_ctx); server.serve_init(init_ctx);
update_phase.start(); update_phase.start();
if let Err(e) = update_firmware(firmware).await { if let Err(e) = update_firmware(firmware).await {
@@ -94,7 +93,7 @@ async fn setup_or_init(
let ctx = InstallContext::init().await?; let ctx = InstallContext::init().await?;
server.serve_ui_for(ctx.clone()); server.serve_install(ctx.clone());
ctx.shutdown ctx.shutdown
.subscribe() .subscribe()
@@ -114,7 +113,7 @@ async fn setup_or_init(
{ {
let ctx = SetupContext::init(server, config)?; let ctx = SetupContext::init(server, config)?;
server.serve_ui_for(ctx.clone()); server.serve_setup(ctx.clone());
let mut shutdown = ctx.shutdown.subscribe(); let mut shutdown = ctx.shutdown.subscribe();
if let Some(shutdown) = shutdown.recv().await.expect("context dropped") { if let Some(shutdown) = shutdown.recv().await.expect("context dropped") {
@@ -150,7 +149,7 @@ async fn setup_or_init(
let init_phases = InitPhases::new(&handle); let init_phases = InitPhases::new(&handle);
let rpc_ctx_phases = InitRpcContextPhases::new(&handle); let rpc_ctx_phases = InitRpcContextPhases::new(&handle);
server.serve_ui_for(init_ctx); server.serve_init(init_ctx);
async { async {
disk_phase.start(); disk_phase.start();
@@ -248,7 +247,7 @@ pub async fn main(
e, e,
)?; )?;
server.serve_ui_for(ctx.clone()); server.serve_diagnostic(ctx.clone());
let shutdown = ctx.shutdown.subscribe().recv().await.unwrap(); let shutdown = ctx.shutdown.subscribe().recv().await.unwrap();

View File

@@ -12,9 +12,8 @@ use tracing::instrument;
use crate::context::config::ServerConfig; use crate::context::config::ServerConfig;
use crate::context::rpc::InitRpcContextPhases; use crate::context::rpc::InitRpcContextPhases;
use crate::context::{DiagnosticContext, InitContext, RpcContext}; use crate::context::{DiagnosticContext, InitContext, RpcContext};
use crate::net::gateway::{BindTcp, SelfContainedNetworkInterfaceListener, UpgradableListener}; use crate::net::gateway::SelfContainedNetworkInterfaceListener;
use crate::net::static_server::refresher; use crate::net::web_server::{Acceptor, UpgradableListener, WebServer};
use crate::net::web_server::{Acceptor, WebServer};
use crate::shutdown::Shutdown; use crate::shutdown::Shutdown;
use crate::system::launch_metrics_task; use crate::system::launch_metrics_task;
use crate::util::io::append_file; use crate::util::io::append_file;
@@ -39,7 +38,7 @@ async fn inner_main(
}; };
tokio::fs::write("/run/startos/initialized", "").await?; tokio::fs::write("/run/startos/initialized", "").await?;
server.serve_ui_for(ctx.clone()); server.serve_main(ctx.clone());
LOGGER.set_logfile(None); LOGGER.set_logfile(None);
handle.complete(); handle.complete();
@@ -48,7 +47,7 @@ async fn inner_main(
let init_ctx = InitContext::init(config).await?; let init_ctx = InitContext::init(config).await?;
let handle = init_ctx.progress.clone(); let handle = init_ctx.progress.clone();
let rpc_ctx_phases = InitRpcContextPhases::new(&handle); let rpc_ctx_phases = InitRpcContextPhases::new(&handle);
server.serve_ui_for(init_ctx); server.serve_init(init_ctx);
let ctx = RpcContext::init( let ctx = RpcContext::init(
&server.acceptor_setter(), &server.acceptor_setter(),
@@ -64,14 +63,14 @@ async fn inner_main(
) )
.await?; .await?;
server.serve_ui_for(ctx.clone()); server.serve_main(ctx.clone());
handle.complete(); handle.complete();
ctx ctx
}; };
let (rpc_ctx, shutdown) = async { let (rpc_ctx, shutdown) = async {
crate::hostname::sync_hostname(&rpc_ctx.account.peek(|a| a.hostname.clone())).await?; crate::hostname::sync_hostname(&rpc_ctx.account.read().await.hostname).await?;
let mut shutdown_recv = rpc_ctx.shutdown.subscribe(); let mut shutdown_recv = rpc_ctx.shutdown.subscribe();
@@ -133,6 +132,8 @@ async fn inner_main(
.await?; .await?;
rpc_ctx.shutdown().await?; rpc_ctx.shutdown().await?;
tracing::info!("RPC Context is dropped");
Ok(shutdown) Ok(shutdown)
} }
@@ -148,10 +149,9 @@ pub fn main(args: impl IntoIterator<Item = OsString>) {
.build() .build()
.expect("failed to initialize runtime"); .expect("failed to initialize runtime");
let res = rt.block_on(async { let res = rt.block_on(async {
let mut server = WebServer::new( let mut server = WebServer::new(Acceptor::bind_upgradable(
Acceptor::bind_upgradable(SelfContainedNetworkInterfaceListener::bind(BindTcp, 80)), SelfContainedNetworkInterfaceListener::bind(80),
refresher(), ));
);
match inner_main(&mut server, &config).await { match inner_main(&mut server, &config).await {
Ok(a) => { Ok(a) => {
server.shutdown().await; server.shutdown().await;
@@ -179,7 +179,7 @@ pub fn main(args: impl IntoIterator<Item = OsString>) {
e, e,
)?; )?;
server.serve_ui_for(ctx.clone()); server.serve_diagnostic(ctx.clone());
let mut shutdown = ctx.shutdown.subscribe(); let mut shutdown = ctx.shutdown.subscribe();

View File

@@ -1,110 +1,29 @@
use std::ffi::OsString; use std::ffi::OsString;
use std::net::SocketAddr;
use std::sync::Arc;
use std::time::Duration;
use clap::Parser; use clap::Parser;
use futures::FutureExt; use futures::FutureExt;
use rpc_toolkit::CliApp; use rpc_toolkit::CliApp;
use tokio::signal::unix::signal; use tokio::signal::unix::signal;
use tracing::instrument; use tracing::instrument;
use visit_rs::Visit;
use crate::context::CliContext; use crate::context::CliContext;
use crate::context::config::ClientConfig; use crate::context::config::ClientConfig;
use crate::net::gateway::{Bind, BindTcp}; use crate::net::web_server::{Acceptor, WebServer};
use crate::net::tls::TlsListener;
use crate::net::web_server::{Accept, Acceptor, MetadataVisitor, WebServer};
use crate::prelude::*; use crate::prelude::*;
use crate::tunnel::context::{TunnelConfig, TunnelContext}; use crate::tunnel::context::{TunnelConfig, TunnelContext};
use crate::tunnel::tunnel_router;
use crate::tunnel::web::TunnelCertHandler;
use crate::util::future::NonDetachingJoinHandle;
use crate::util::logger::LOGGER; use crate::util::logger::LOGGER;
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
enum WebserverListener {
Http,
Https(SocketAddr),
}
impl<V: MetadataVisitor> Visit<V> for WebserverListener {
fn visit(&self, visitor: &mut V) -> <V as visit_rs::Visitor>::Result {
visitor.visit(self)
}
}
#[instrument(skip_all)] #[instrument(skip_all)]
async fn inner_main(config: &TunnelConfig) -> Result<(), Error> { async fn inner_main(config: &TunnelConfig) -> Result<(), Error> {
let server = async { let server = async {
let ctx = TunnelContext::init(config).await?; let ctx = TunnelContext::init(config).await?;
let listen = ctx.listen; let mut server = WebServer::new(Acceptor::bind([ctx.listen]).await?);
let server = WebServer::new( server.serve_tunnel(ctx.clone());
Acceptor::bind_map_dyn([(WebserverListener::Http, listen)]).await?,
tunnel_router(ctx.clone()),
);
let acceptor_setter = server.acceptor_setter();
let https_db = ctx.db.clone();
let https_thread: NonDetachingJoinHandle<()> = tokio::spawn(async move {
let mut sub = https_db.subscribe("/webserver".parse().unwrap()).await;
while {
while let Err(e) = async {
let webserver = https_db.peek().await.into_webserver();
if webserver.as_enabled().de()? {
let addr = webserver.as_listen().de()?.or_not_found("listen address")?;
acceptor_setter.send_if_modified(|a| {
let key = WebserverListener::Https(addr);
if !a.contains_key(&key) {
match (|| {
Ok::<_, Error>(TlsListener::new(
BindTcp.bind(addr)?,
TunnelCertHandler {
db: https_db.clone(),
crypto_provider: Arc::new(tokio_rustls::rustls::crypto::ring::default_provider()),
},
))
})() {
Ok(l) => {
a.retain(|k, _| *k == WebserverListener::Http);
a.insert(key, l.into_dyn());
true
}
Err(e) => {
tracing::error!("error adding ssl listener: {e}");
tracing::debug!("{e:?}");
false
}
}
} else {
false
}
});
} else {
acceptor_setter.send_if_modified(|a| {
let before = a.len();
a.retain(|k, _| *k == WebserverListener::Http);
a.len() != before
});
}
Ok::<_, Error>(())
}
.await
{
tracing::error!("error updating webserver bind: {e}");
tracing::debug!("{e:?}");
tokio::time::sleep(Duration::from_secs(5)).await;
}
sub.recv().await.is_some()
} {}
})
.into();
let mut shutdown_recv = ctx.shutdown.subscribe(); let mut shutdown_recv = ctx.shutdown.subscribe();
let sig_handler_ctx = ctx; let sig_handler_ctx = ctx;
let sig_handler: NonDetachingJoinHandle<()> = tokio::spawn(async move { let sig_handler = tokio::spawn(async move {
use tokio::signal::unix::SignalKind; use tokio::signal::unix::SignalKind;
futures::future::select_all( futures::future::select_all(
[ [
@@ -129,16 +48,14 @@ async fn inner_main(config: &TunnelConfig) -> Result<(), Error> {
.send(()) .send(())
.map_err(|_| ()) .map_err(|_| ())
.expect("send shutdown signal"); .expect("send shutdown signal");
}) });
.into();
shutdown_recv shutdown_recv
.recv() .recv()
.await .await
.with_kind(crate::ErrorKind::Unknown)?; .with_kind(crate::ErrorKind::Unknown)?;
sig_handler.wait_for_abort().await.with_kind(ErrorKind::Unknown)?; sig_handler.abort();
https_thread.wait_for_abort().await.with_kind(ErrorKind::Unknown)?;
Ok::<_, Error>(server) Ok::<_, Error>(server)
} }

View File

@@ -1,7 +1,7 @@
use helpers::Callback; use helpers::Callback;
use itertools::Itertools; use itertools::Itertools;
use jsonpath_lib::Compiled; use jsonpath_lib::Compiled;
use crate::PackageId; use models::PackageId;
use serde_json::Value; use serde_json::Value;
use crate::context::RpcContext; use crate::context::RpcContext;

View File

@@ -6,7 +6,6 @@ use std::sync::Arc;
use cookie::{Cookie, Expiration, SameSite}; use cookie::{Cookie, Expiration, SameSite};
use cookie_store::CookieStore; use cookie_store::CookieStore;
use http::HeaderMap;
use imbl_value::InternedString; use imbl_value::InternedString;
use josekit::jwk::Jwk; use josekit::jwk::Jwk;
use once_cell::sync::OnceCell; use once_cell::sync::OnceCell;
@@ -24,10 +23,10 @@ use super::setup::CURRENT_SECRET;
use crate::context::config::{ClientConfig, local_config_path}; use crate::context::config::{ClientConfig, local_config_path};
use crate::context::{DiagnosticContext, InitContext, InstallContext, RpcContext, SetupContext}; use crate::context::{DiagnosticContext, InitContext, InstallContext, RpcContext, SetupContext};
use crate::developer::{OS_DEVELOPER_KEY_PATH, default_developer_key_path}; use crate::developer::{OS_DEVELOPER_KEY_PATH, default_developer_key_path};
use crate::middleware::auth::local::LocalAuthContext; use crate::middleware::auth::AuthContext;
use crate::prelude::*; use crate::prelude::*;
use crate::rpc_continuations::Guid; use crate::rpc_continuations::Guid;
use crate::util::io::read_file_to_string; use crate::tunnel::context::TunnelContext;
#[derive(Debug)] #[derive(Debug)]
pub struct CliContextSeed { pub struct CliContextSeed {
@@ -35,7 +34,7 @@ pub struct CliContextSeed {
pub base_url: Url, pub base_url: Url,
pub rpc_url: Url, pub rpc_url: Url,
pub registry_url: Option<Url>, pub registry_url: Option<Url>,
pub registry_hostname: Vec<InternedString>, pub registry_hostname: Option<InternedString>,
pub registry_listen: Option<SocketAddr>, pub registry_listen: Option<SocketAddr>,
pub tunnel_addr: Option<SocketAddr>, pub tunnel_addr: Option<SocketAddr>,
pub tunnel_listen: Option<SocketAddr>, pub tunnel_listen: Option<SocketAddr>,
@@ -126,7 +125,7 @@ impl CliContext {
Ok::<_, Error>(registry) Ok::<_, Error>(registry)
}) })
.transpose()?, .transpose()?,
registry_hostname: config.registry_hostname.unwrap_or_default(), registry_hostname: config.registry_hostname,
registry_listen: config.registry_listen, registry_listen: config.registry_listen,
tunnel_addr: config.tunnel, tunnel_addr: config.tunnel,
tunnel_listen: config.tunnel_listen, tunnel_listen: config.tunnel_listen,
@@ -160,7 +159,7 @@ impl CliContext {
continue; continue;
} }
let pair = <ed25519::KeypairBytes as ed25519::pkcs8::DecodePrivateKey>::from_pkcs8_pem( let pair = <ed25519::KeypairBytes as ed25519::pkcs8::DecodePrivateKey>::from_pkcs8_pem(
&std::fs::read_to_string(path)?, &std::fs::read_to_string(&self.developer_key_path)?,
) )
.with_kind(crate::ErrorKind::Pem)?; .with_kind(crate::ErrorKind::Pem)?;
let secret = ed25519_dalek::SecretKey::try_from(&pair.secret_key[..]).map_err(|_| { let secret = ed25519_dalek::SecretKey::try_from(&pair.secret_key[..]).map_err(|_| {
@@ -172,7 +171,7 @@ impl CliContext {
return Ok(secret.into()) return Ok(secret.into())
} }
Err(Error::new( Err(Error::new(
eyre!("Developer Key does not exist! Please run `start-cli init-key` before running this command."), eyre!("Developer Key does not exist! Please run `start-cli init` before running this command."),
crate::ErrorKind::Uninitialized crate::ErrorKind::Uninitialized
)) ))
}) })
@@ -234,28 +233,23 @@ impl CliContext {
&self, &self,
method: &str, method: &str,
params: Value, params: Value,
) -> Result<Value, Error> ) -> Result<Value, RpcError>
where where
Self: CallRemote<RemoteContext>, Self: CallRemote<RemoteContext>,
{ {
<Self as CallRemote<RemoteContext, Empty>>::call_remote(&self, method, params, Empty {}) <Self as CallRemote<RemoteContext, Empty>>::call_remote(&self, method, params, Empty {})
.await .await
.map_err(Error::from)
.with_ctx(|e| (e.kind, method))
} }
pub async fn call_remote_with<RemoteContext, T>( pub async fn call_remote_with<RemoteContext, T>(
&self, &self,
method: &str, method: &str,
params: Value, params: Value,
extra: T, extra: T,
) -> Result<Value, Error> ) -> Result<Value, RpcError>
where where
Self: CallRemote<RemoteContext, T>, Self: CallRemote<RemoteContext, T>,
{ {
<Self as CallRemote<RemoteContext, T>>::call_remote(&self, method, params, extra) <Self as CallRemote<RemoteContext, T>>::call_remote(&self, method, params, extra).await
.await
.map_err(Error::from)
.with_ctx(|e| (e.kind, method))
} }
} }
impl AsRef<Jwk> for CliContext { impl AsRef<Jwk> for CliContext {
@@ -285,15 +279,9 @@ impl Context for CliContext {
) )
} }
} }
impl AsRef<Client> for CliContext {
fn as_ref(&self) -> &Client {
&self.client
}
}
impl CallRemote<RpcContext> for CliContext { impl CallRemote<RpcContext> for CliContext {
async fn call_remote(&self, method: &str, params: Value, _: Empty) -> Result<Value, RpcError> { async fn call_remote(&self, method: &str, params: Value, _: Empty) -> Result<Value, RpcError> {
if let Ok(local) = read_file_to_string(RpcContext::LOCAL_AUTH_COOKIE_PATH).await { if let Ok(local) = std::fs::read_to_string(RpcContext::LOCAL_AUTH_COOKIE_PATH) {
self.cookie_store self.cookie_store
.lock() .lock()
.unwrap() .unwrap()
@@ -307,11 +295,10 @@ impl CallRemote<RpcContext> for CliContext {
) )
.with_kind(crate::ErrorKind::Network)?; .with_kind(crate::ErrorKind::Network)?;
} }
crate::middleware::auth::signature::call_remote( crate::middleware::signature::call_remote(
self, self,
self.rpc_url.clone(), self.rpc_url.clone(),
HeaderMap::new(), self.rpc_url.host_str().or_not_found("rpc url hostname")?,
self.rpc_url.host_str(),
method, method,
params, params,
) )
@@ -320,11 +307,24 @@ impl CallRemote<RpcContext> for CliContext {
} }
impl CallRemote<DiagnosticContext> for CliContext { impl CallRemote<DiagnosticContext> for CliContext {
async fn call_remote(&self, method: &str, params: Value, _: Empty) -> Result<Value, RpcError> { async fn call_remote(&self, method: &str, params: Value, _: Empty) -> Result<Value, RpcError> {
crate::middleware::auth::signature::call_remote( if let Ok(local) = std::fs::read_to_string(TunnelContext::LOCAL_AUTH_COOKIE_PATH) {
self.cookie_store
.lock()
.unwrap()
.insert_raw(
&Cookie::build(("local", local))
.domain("localhost")
.expires(Expiration::Session)
.same_site(SameSite::Strict)
.build(),
&"http://localhost".parse()?,
)
.with_kind(crate::ErrorKind::Network)?;
}
crate::middleware::signature::call_remote(
self, self,
self.rpc_url.clone(), self.rpc_url.clone(),
HeaderMap::new(), self.rpc_url.host_str().or_not_found("rpc url hostname")?,
self.rpc_url.host_str(),
method, method,
params, params,
) )
@@ -333,11 +333,10 @@ impl CallRemote<DiagnosticContext> for CliContext {
} }
impl CallRemote<InitContext> for CliContext { impl CallRemote<InitContext> for CliContext {
async fn call_remote(&self, method: &str, params: Value, _: Empty) -> Result<Value, RpcError> { async fn call_remote(&self, method: &str, params: Value, _: Empty) -> Result<Value, RpcError> {
crate::middleware::auth::signature::call_remote( crate::middleware::signature::call_remote(
self, self,
self.rpc_url.clone(), self.rpc_url.clone(),
HeaderMap::new(), self.rpc_url.host_str().or_not_found("rpc url hostname")?,
self.rpc_url.host_str(),
method, method,
params, params,
) )
@@ -346,11 +345,10 @@ impl CallRemote<InitContext> for CliContext {
} }
impl CallRemote<SetupContext> for CliContext { impl CallRemote<SetupContext> for CliContext {
async fn call_remote(&self, method: &str, params: Value, _: Empty) -> Result<Value, RpcError> { async fn call_remote(&self, method: &str, params: Value, _: Empty) -> Result<Value, RpcError> {
crate::middleware::auth::signature::call_remote( crate::middleware::signature::call_remote(
self, self,
self.rpc_url.clone(), self.rpc_url.clone(),
HeaderMap::new(), self.rpc_url.host_str().or_not_found("rpc url hostname")?,
self.rpc_url.host_str(),
method, method,
params, params,
) )
@@ -359,14 +357,25 @@ impl CallRemote<SetupContext> for CliContext {
} }
impl CallRemote<InstallContext> for CliContext { impl CallRemote<InstallContext> for CliContext {
async fn call_remote(&self, method: &str, params: Value, _: Empty) -> Result<Value, RpcError> { async fn call_remote(&self, method: &str, params: Value, _: Empty) -> Result<Value, RpcError> {
crate::middleware::auth::signature::call_remote( crate::middleware::signature::call_remote(
self, self,
self.rpc_url.clone(), self.rpc_url.clone(),
HeaderMap::new(), self.rpc_url.host_str().or_not_found("rpc url hostname")?,
self.rpc_url.host_str(),
method, method,
params, params,
) )
.await .await
} }
} }
#[test]
fn test() {
let ctx = CliContext::init(ClientConfig::default()).unwrap();
ctx.runtime().unwrap().block_on(async {
reqwest::Client::new()
.get("http://example.com")
.send()
.await
.unwrap();
});
}

View File

@@ -65,7 +65,7 @@ pub struct ClientConfig {
#[arg(short = 'r', long)] #[arg(short = 'r', long)]
pub registry: Option<Url>, pub registry: Option<Url>,
#[arg(long)] #[arg(long)]
pub registry_hostname: Option<Vec<InternedString>>, pub registry_hostname: Option<InternedString>,
#[arg(skip)] #[arg(skip)]
pub registry_listen: Option<SocketAddr>, pub registry_listen: Option<SocketAddr>,
#[arg(short = 't', long)] #[arg(short = 't', long)]

View File

@@ -25,12 +25,10 @@ impl InitContext {
#[instrument(skip_all)] #[instrument(skip_all)]
pub async fn init(cfg: &ServerConfig) -> Result<Self, Error> { pub async fn init(cfg: &ServerConfig) -> Result<Self, Error> {
let (shutdown, _) = tokio::sync::broadcast::channel(1); let (shutdown, _) = tokio::sync::broadcast::channel(1);
let mut progress = FullProgressTracker::new();
progress.enable_logging(true);
Ok(Self(Arc::new(InitContextSeed { Ok(Self(Arc::new(InitContextSeed {
config: cfg.clone(), config: cfg.clone(),
error: watch::channel(None).0, error: watch::channel(None).0,
progress, progress: FullProgressTracker::new(),
shutdown, shutdown,
rpc_continuations: RpcContinuations::new(), rpc_continuations: RpcContinuations::new(),
}))) })))

View File

@@ -1,21 +1,24 @@
use std::collections::{BTreeMap, BTreeSet}; use std::collections::{BTreeMap, BTreeSet};
use std::ffi::OsStr; use std::ffi::OsStr;
use std::future::Future; use std::future::Future;
use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4};
use std::ops::Deref; use std::ops::Deref;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use chrono::{TimeDelta, Utc}; use chrono::{TimeDelta, Utc};
use helpers::NonDetachingJoinHandle;
use imbl::OrdMap; use imbl::OrdMap;
use imbl_value::InternedString; use imbl_value::InternedString;
use itertools::Itertools; use itertools::Itertools;
use josekit::jwk::Jwk; use josekit::jwk::Jwk;
use models::{ActionId, PackageId};
use reqwest::{Client, Proxy}; use reqwest::{Client, Proxy};
use rpc_toolkit::yajrc::RpcError; use rpc_toolkit::yajrc::RpcError;
use rpc_toolkit::{CallRemote, Context, Empty}; use rpc_toolkit::{CallRemote, Context, Empty};
use tokio::sync::{RwLock, broadcast, oneshot, watch}; use tokio::sync::{broadcast, oneshot, watch, RwLock};
use tokio::time::Instant; use tokio::time::Instant;
use tracing::instrument; use tracing::instrument;
@@ -23,30 +26,28 @@ use super::setup::CURRENT_SECRET;
use crate::account::AccountInfo; use crate::account::AccountInfo;
use crate::auth::Sessions; use crate::auth::Sessions;
use crate::context::config::ServerConfig; use crate::context::config::ServerConfig;
use crate::db::model::Database;
use crate::db::model::package::TaskSeverity; use crate::db::model::package::TaskSeverity;
use crate::db::model::Database;
use crate::disk::OsPartitionInfo; use crate::disk::OsPartitionInfo;
use crate::init::{InitResult, check_time_is_synchronized}; use crate::init::{check_time_is_synchronized, InitResult};
use crate::install::PKG_ARCHIVE_DIR; use crate::install::PKG_ARCHIVE_DIR;
use crate::lxc::LxcManager; use crate::lxc::LxcManager;
use crate::net::gateway::UpgradableListener;
use crate::net::net_controller::{NetController, NetService}; use crate::net::net_controller::{NetController, NetService};
use crate::net::socks::DEFAULT_SOCKS_LISTEN; use crate::net::socks::DEFAULT_SOCKS_LISTEN;
use crate::net::utils::{find_eth_iface, find_wifi_iface}; use crate::net::utils::{find_eth_iface, find_wifi_iface};
use crate::net::web_server::WebServerAcceptorSetter; use crate::net::web_server::{UpgradableListener, WebServerAcceptorSetter};
use crate::net::wifi::WpaCli; use crate::net::wifi::WpaCli;
use crate::prelude::*; use crate::prelude::*;
use crate::progress::{FullProgressTracker, PhaseProgressTrackerHandle}; use crate::progress::{FullProgressTracker, PhaseProgressTrackerHandle};
use crate::rpc_continuations::{Guid, OpenAuthedContinuations, RpcContinuations}; use crate::rpc_continuations::{Guid, OpenAuthedContinuations, RpcContinuations};
use crate::service::ServiceMap;
use crate::service::action::update_tasks; use crate::service::action::update_tasks;
use crate::service::effects::callbacks::ServiceCallbacks; use crate::service::effects::callbacks::ServiceCallbacks;
use crate::service::ServiceMap;
use crate::shutdown::Shutdown; use crate::shutdown::Shutdown;
use crate::util::future::NonDetachingJoinHandle;
use crate::util::io::delete_file; use crate::util::io::delete_file;
use crate::util::lshw::LshwDevice; use crate::util::lshw::LshwDevice;
use crate::util::sync::{SyncMutex, SyncRwLock, Watch}; use crate::util::sync::{SyncMutex, Watch};
use crate::{ActionId, DATA_DIR, PackageId}; use crate::{DATA_DIR, HOST_IP};
pub struct RpcContextSeed { pub struct RpcContextSeed {
is_closed: AtomicBool, is_closed: AtomicBool,
@@ -57,7 +58,7 @@ pub struct RpcContextSeed {
pub ephemeral_sessions: SyncMutex<Sessions>, pub ephemeral_sessions: SyncMutex<Sessions>,
pub db: TypedPatchDb<Database>, pub db: TypedPatchDb<Database>,
pub sync_db: watch::Sender<u64>, pub sync_db: watch::Sender<u64>,
pub account: SyncRwLock<AccountInfo>, pub account: RwLock<AccountInfo>,
pub net_controller: Arc<NetController>, pub net_controller: Arc<NetController>,
pub os_net_service: NetService, pub os_net_service: NetService,
pub s9pk_arch: Option<&'static str>, pub s9pk_arch: Option<&'static str>,
@@ -75,11 +76,6 @@ pub struct RpcContextSeed {
pub start_time: Instant, pub start_time: Instant,
pub crons: SyncMutex<BTreeMap<Guid, NonDetachingJoinHandle<()>>>, pub crons: SyncMutex<BTreeMap<Guid, NonDetachingJoinHandle<()>>>,
} }
impl Drop for RpcContextSeed {
fn drop(&mut self) {
tracing::info!("RpcContext is dropped");
}
}
pub struct Hardware { pub struct Hardware {
pub devices: Vec<LshwDevice>, pub devices: Vec<LshwDevice>,
@@ -224,7 +220,7 @@ impl RpcContext {
ephemeral_sessions: SyncMutex::new(Sessions::new()), ephemeral_sessions: SyncMutex::new(Sessions::new()),
sync_db: watch::Sender::new(db.sequence().await), sync_db: watch::Sender::new(db.sequence().await),
db, db,
account: SyncRwLock::new(account), account: RwLock::new(account),
callbacks: net_controller.callbacks.clone(), callbacks: net_controller.callbacks.clone(),
net_controller, net_controller,
os_net_service, os_net_service,
@@ -273,7 +269,7 @@ impl RpcContext {
self.crons.mutate(|c| std::mem::take(c)); self.crons.mutate(|c| std::mem::take(c));
self.services.shutdown_all().await?; self.services.shutdown_all().await?;
self.is_closed.store(true, Ordering::SeqCst); self.is_closed.store(true, Ordering::SeqCst);
tracing::info!("RpcContext is shutdown"); tracing::info!("RPC Context is shutdown");
Ok(()) Ok(())
} }
@@ -388,19 +384,12 @@ impl RpcContext {
.as_entries()? .as_entries()?
.into_iter() .into_iter()
.map(|(_, pde)| { .map(|(_, pde)| {
Ok(pde Ok(pde.as_tasks().as_entries()?.into_iter().map(|(_, r)| {
.as_tasks() Ok::<_, Error>((
.as_entries()? r.as_task().as_package_id().de()?,
.into_iter() r.as_task().as_action_id().de()?,
.map(|(_, r)| { ))
let t = r.as_task(); }))
Ok::<_, Error>(if t.as_input().transpose_ref().is_some() {
Some((t.as_package_id().de()?, t.as_action_id().de()?))
} else {
None
})
})
.filter_map_ok(|a| a))
}) })
.flatten_ok() .flatten_ok()
.map(|a| a.and_then(|a| a)) .map(|a| a.and_then(|a| a))
@@ -422,32 +411,46 @@ impl RpcContext {
} }
} }
} }
for id in
self.db self.db
.mutate(|db| { .mutate::<Vec<PackageId>>(|db| {
for (package_id, action_input) in &action_input { for (package_id, action_input) in &action_input {
for (action_id, input) in action_input { for (action_id, input) in action_input {
for (_, pde) in db.as_public_mut().as_package_data_mut().as_entries_mut()? { for (_, pde) in
pde.as_tasks_mut().mutate(|tasks| { db.as_public_mut().as_package_data_mut().as_entries_mut()?
Ok(update_tasks(tasks, package_id, action_id, input, false)) {
})?; pde.as_tasks_mut().mutate(|tasks| {
Ok(update_tasks(tasks, package_id, action_id, input, false))
})?;
}
} }
} }
} db.as_public()
for (_, pde) in db.as_public_mut().as_package_data_mut().as_entries_mut()? { .as_package_data()
if pde .as_entries()?
.as_tasks()
.de()?
.into_iter() .into_iter()
.any(|(_, t)| t.active && t.task.severity == TaskSeverity::Critical) .filter_map(|(id, pkg)| {
{ (|| {
pde.as_status_info_mut().stop()?; if pkg.as_tasks().de()?.into_iter().any(|(_, t)| {
} t.active && t.task.severity == TaskSeverity::Critical
} }) {
Ok(()) Ok(Some(id))
}) } else {
.await Ok(None)
.result?; }
})()
.transpose()
})
.collect()
})
.await
.result?
{
let svc = self.services.get(&id).await;
if let Some(svc) = &*svc {
svc.stop(procedure_id.clone(), false).await?;
}
}
check_tasks.complete(); check_tasks.complete();
Ok(()) Ok(())
@@ -475,11 +478,6 @@ impl RpcContext {
<Self as CallRemote<RemoteContext, T>>::call_remote(&self, method, params, extra).await <Self as CallRemote<RemoteContext, T>>::call_remote(&self, method, params, extra).await
} }
} }
impl AsRef<Client> for RpcContext {
fn as_ref(&self) -> &Client {
&self.client
}
}
impl AsRef<Jwk> for RpcContext { impl AsRef<Jwk> for RpcContext {
fn as_ref(&self) -> &Jwk { fn as_ref(&self) -> &Jwk {
&CURRENT_SECRET &CURRENT_SECRET

View File

@@ -4,6 +4,7 @@ use std::sync::Arc;
use std::time::Duration; use std::time::Duration;
use futures::{Future, StreamExt}; use futures::{Future, StreamExt};
use helpers::NonDetachingJoinHandle;
use imbl_value::InternedString; use imbl_value::InternedString;
use josekit::jwk::Jwk; use josekit::jwk::Jwk;
use patch_db::PatchDb; use patch_db::PatchDb;
@@ -20,14 +21,13 @@ use crate::context::RpcContext;
use crate::context::config::ServerConfig; use crate::context::config::ServerConfig;
use crate::disk::OsPartitionInfo; use crate::disk::OsPartitionInfo;
use crate::hostname::Hostname; use crate::hostname::Hostname;
use crate::net::gateway::UpgradableListener; use crate::net::web_server::{UpgradableListener, WebServer, WebServerAcceptorSetter};
use crate::net::web_server::{WebServer, WebServerAcceptorSetter};
use crate::prelude::*; use crate::prelude::*;
use crate::progress::FullProgressTracker; use crate::progress::FullProgressTracker;
use crate::rpc_continuations::{Guid, RpcContinuation, RpcContinuations}; use crate::rpc_continuations::{Guid, RpcContinuation, RpcContinuations};
use crate::setup::SetupProgress; use crate::setup::SetupProgress;
use crate::shutdown::Shutdown; use crate::shutdown::Shutdown;
use crate::util::future::NonDetachingJoinHandle; use crate::util::net::WebSocketExt;
lazy_static::lazy_static! { lazy_static::lazy_static! {
pub static ref CURRENT_SECRET: Jwk = Jwk::generate_ec_key(josekit::jwk::alg::ec::EcCurve::P256).unwrap_or_else(|e| { pub static ref CURRENT_SECRET: Jwk = Jwk::generate_ec_key(josekit::jwk::alg::ec::EcCurve::P256).unwrap_or_else(|e| {
@@ -86,8 +86,6 @@ impl SetupContext {
config: &ServerConfig, config: &ServerConfig,
) -> Result<Self, Error> { ) -> Result<Self, Error> {
let (shutdown, _) = tokio::sync::broadcast::channel(1); let (shutdown, _) = tokio::sync::broadcast::channel(1);
let mut progress = FullProgressTracker::new();
progress.enable_logging(true);
Ok(Self(Arc::new(SetupContextSeed { Ok(Self(Arc::new(SetupContextSeed {
webserver: webserver.acceptor_setter(), webserver: webserver.acceptor_setter(),
config: config.clone(), config: config.clone(),
@@ -98,7 +96,7 @@ impl SetupContext {
) )
})?, })?,
disable_encryption: config.disable_encryption.unwrap_or(false), disable_encryption: config.disable_encryption.unwrap_or(false),
progress, progress: FullProgressTracker::new(),
task: OnceCell::new(), task: OnceCell::new(),
result: OnceCell::new(), result: OnceCell::new(),
disk_guid: OnceCell::new(), disk_guid: OnceCell::new(),

View File

@@ -1,11 +1,14 @@
use clap::Parser; use clap::Parser;
use color_eyre::eyre::eyre;
use models::PackageId;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::instrument; use tracing::instrument;
use ts_rs::TS; use ts_rs::TS;
use crate::Error;
use crate::context::RpcContext; use crate::context::RpcContext;
use crate::prelude::*; use crate::prelude::*;
use crate::{Error, PackageId}; use crate::rpc_continuations::Guid;
#[derive(Deserialize, Serialize, Parser, TS)] #[derive(Deserialize, Serialize, Parser, TS)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
@@ -16,51 +19,37 @@ pub struct ControlParams {
#[instrument(skip_all)] #[instrument(skip_all)]
pub async fn start(ctx: RpcContext, ControlParams { id }: ControlParams) -> Result<(), Error> { pub async fn start(ctx: RpcContext, ControlParams { id }: ControlParams) -> Result<(), Error> {
ctx.db ctx.services
.mutate(|db| { .get(&id)
db.as_public_mut()
.as_package_data_mut()
.as_idx_mut(&id)
.or_not_found(&id)?
.as_status_info_mut()
.as_desired_mut()
.map_mutate(|s| Ok(s.start()))
})
.await .await
.result?; .as_ref()
.or_not_found(lazy_format!("Manager for {id}"))?
.start(Guid::new())
.await?;
Ok(()) Ok(())
} }
pub async fn stop(ctx: RpcContext, ControlParams { id }: ControlParams) -> Result<(), Error> { pub async fn stop(ctx: RpcContext, ControlParams { id }: ControlParams) -> Result<(), Error> {
ctx.db ctx.services
.mutate(|db| { .get(&id)
db.as_public_mut()
.as_package_data_mut()
.as_idx_mut(&id)
.or_not_found(&id)?
.as_status_info_mut()
.stop()
})
.await .await
.result?; .as_ref()
.ok_or_else(|| Error::new(eyre!("Manager not found"), crate::ErrorKind::InvalidRequest))?
.stop(Guid::new(), true)
.await?;
Ok(()) Ok(())
} }
pub async fn restart(ctx: RpcContext, ControlParams { id }: ControlParams) -> Result<(), Error> { pub async fn restart(ctx: RpcContext, ControlParams { id }: ControlParams) -> Result<(), Error> {
ctx.db ctx.services
.mutate(|db| { .get(&id)
db.as_public_mut()
.as_package_data_mut()
.as_idx_mut(&id)
.or_not_found(&id)?
.as_status_info_mut()
.as_desired_mut()
.map_mutate(|s| Ok(s.restart()))
})
.await .await
.result?; .as_ref()
.ok_or_else(|| Error::new(eyre!("Manager not found"), crate::ErrorKind::InvalidRequest))?
.restart(Guid::new(), false)
.await?;
Ok(()) Ok(())
} }

View File

@@ -22,32 +22,13 @@ use ts_rs::TS;
use crate::context::{CliContext, RpcContext}; use crate::context::{CliContext, RpcContext};
use crate::prelude::*; use crate::prelude::*;
use crate::rpc_continuations::{Guid, RpcContinuation}; use crate::rpc_continuations::{Guid, RpcContinuation};
use crate::util::net::WebSocketExt;
use crate::util::serde::{HandlerExtSerde, apply_expr}; use crate::util::serde::{HandlerExtSerde, apply_expr};
lazy_static::lazy_static! { lazy_static::lazy_static! {
static ref PUBLIC: JsonPointer = "/public".parse().unwrap(); static ref PUBLIC: JsonPointer = "/public".parse().unwrap();
} }
pub trait DbAccess<T>: Sized {
fn access<'a>(db: &'a Model<Self>) -> &'a Model<T>;
}
pub trait DbAccessMut<T>: DbAccess<T> {
fn access_mut<'a>(db: &'a mut Model<Self>) -> &'a mut Model<T>;
}
pub trait DbAccessByKey<T>: Sized {
type Key<'a>;
fn access_by_key<'a>(db: &'a Model<Self>, key: Self::Key<'_>) -> Option<&'a Model<T>>;
}
pub trait DbAccessMutByKey<T>: DbAccessByKey<T> {
fn access_mut_by_key<'a>(
db: &'a mut Model<Self>,
key: Self::Key<'_>,
) -> Option<&'a mut Model<T>>;
}
pub fn db<C: Context>() -> ParentHandler<C> { pub fn db<C: Context>() -> ParentHandler<C> {
ParentHandler::new() ParentHandler::new()
.subcommand( .subcommand(
@@ -146,7 +127,7 @@ pub struct SubscribeParams {
#[ts(type = "string | null")] #[ts(type = "string | null")]
pointer: Option<JsonPointer>, pointer: Option<JsonPointer>,
#[ts(skip)] #[ts(skip)]
#[serde(rename = "__Auth_session")] #[serde(rename = "__auth_session")]
session: Option<InternedString>, session: Option<InternedString>,
} }

Some files were not shown because too many files have changed in this diff Show More