Merge branch 'next/minor' of github.com:Start9Labs/start-os into next/major

This commit is contained in:
Matt Hill
2024-11-25 19:02:07 -07:00
712 changed files with 83068 additions and 9240 deletions

View File

@@ -78,7 +78,7 @@ jobs:
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
python-version: '3.x' python-version: "3.x"
- uses: actions/setup-node@v4 - uses: actions/setup-node@v4
with: with:
@@ -156,7 +156,7 @@ jobs:
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v5 uses: actions/setup-python@v5
with: with:
python-version: '3.x' python-version: "3.x"
- name: Install dependencies - name: Install dependencies
run: | run: |
@@ -187,11 +187,27 @@ jobs:
run: | run: |
mkdir -p web/node_modules mkdir -p web/node_modules
mkdir -p web/dist/raw mkdir -p web/dist/raw
touch core/startos/bindings mkdir -p core/startos/bindings
touch sdk/lib/osBindings mkdir -p sdk/base/lib/osBindings
mkdir -p container-runtime/node_modules
mkdir -p container-runtime/dist mkdir -p container-runtime/dist
mkdir -p container-runtime/dist/node_modules
mkdir -p core/startos/bindings
mkdir -p sdk/dist
mkdir -p sdk/baseDist
mkdir -p patch-db/client/node_modules
mkdir -p patch-db/client/dist
mkdir -p web/.angular
mkdir -p web/dist/raw/ui
mkdir -p web/dist/raw/install-wizard
mkdir -p web/dist/raw/setup-wizard
mkdir -p web/dist/static/ui
mkdir -p web/dist/static/install-wizard
mkdir -p web/dist/static/setup-wizard
PLATFORM=${{ matrix.platform }} make -t compiled-${{ env.ARCH }}.tar PLATFORM=${{ matrix.platform }} make -t compiled-${{ env.ARCH }}.tar
- run: git status
- name: Run iso build - name: Run iso build
run: PLATFORM=${{ matrix.platform }} make iso run: PLATFORM=${{ matrix.platform }} make iso
if: ${{ matrix.platform != 'raspberrypi' }} if: ${{ matrix.platform != 'raspberrypi' }}

View File

@@ -11,7 +11,7 @@ on:
- next/* - next/*
env: env:
NODEJS_VERSION: "18.15.0" NODEJS_VERSION: "20.16.0"
ENVIRONMENT: dev-unstable ENVIRONMENT: dev-unstable
jobs: jobs:

40
CLEARNET.md Normal file
View File

@@ -0,0 +1,40 @@
# Setting up clearnet for a service interface
NOTE: this guide is for HTTPS only! Other configurations may require a more bespoke setup depending on the service. Please consult the service documentation or the Start9 Community for help with non-HTTPS applications
## Initialize ACME certificate generation
The following command will register your device with an ACME certificate provider, such as letsencrypt
This only needs to be done once.
```
start-cli net acme init --provider=letsencrypt --contact="mailto:me@drbonez.dev"
```
- `provider` can be `letsencrypt`, `letsencrypt-staging` (useful if you're doing a lot of testing and want to avoid being rate limited), or the url of any provider that supports the [RFC8555](https://datatracker.ietf.org/doc/html/rfc8555) ACME api
- `contact` can be any valid contact url, typically `mailto:` urls. it can be specified multiple times to set multiple contacts
## Whitelist a domain for ACME certificate acquisition
The following command will tell the OS to use ACME certificates instead of system signed ones for the provided url. In this example, `testing.drbonez.dev`
This must be done for every domain you wish to host on clearnet.
```
start-cli net acme domain add "testing.drbonez.dev"
```
## Forward clearnet port
Go into your router settings, and map port 443 on your router to port 5443 on your start-os device. This one port should cover most use cases
## Add domain to service host
The following command will tell the OS to route https requests from the WAN to the provided hostname to the specified service. In this example, we are adding `testing.drbonez.dev` to the host `ui-multi` on the package `hello-world`. To see a list of available host IDs for a given package, run `start-cli package host <PACKAGE> list`
This must be done for every domain you wish to host on clearnet.
```
start-cli package host hello-world address ui-multi add testing.drbonez.dev
```

View File

@@ -27,6 +27,7 @@ curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/master/install.sh | bash
source ~/.bashrc source ~/.bashrc
nvm install 20 nvm install 20
nvm use 20 nvm use 20
nvm alias default 20 # this prevents your machine from reverting back to another version
``` ```
## Cloning the repository ## Cloning the repository

View File

@@ -6,7 +6,8 @@ BASENAME := $(shell ./basename.sh)
PLATFORM := $(shell if [ -f ./PLATFORM.txt ]; then cat ./PLATFORM.txt; else echo unknown; fi) PLATFORM := $(shell if [ -f ./PLATFORM.txt ]; then cat ./PLATFORM.txt; else echo unknown; fi)
ARCH := $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then echo aarch64; else echo $(PLATFORM) | sed 's/-nonfree$$//g'; fi) ARCH := $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then echo aarch64; else echo $(PLATFORM) | sed 's/-nonfree$$//g'; fi)
IMAGE_TYPE=$(shell if [ "$(PLATFORM)" = raspberrypi ]; then echo img; else echo iso; fi) IMAGE_TYPE=$(shell if [ "$(PLATFORM)" = raspberrypi ]; then echo img; else echo iso; fi)
WEB_UIS := web/dist/raw/ui web/dist/raw/setup-wizard web/dist/raw/install-wizard WEB_UIS := web/dist/raw/ui/index.html web/dist/raw/setup-wizard/index.html web/dist/raw/install-wizard/index.html
COMPRESSED_WEB_UIS := web/dist/static/ui/index.html web/dist/static/setup-wizard/index.html web/dist/static/install-wizard/index.html
FIRMWARE_ROMS := ./firmware/$(PLATFORM) $(shell jq --raw-output '.[] | select(.platform[] | contains("$(PLATFORM)")) | "./firmware/$(PLATFORM)/" + .id + ".rom.gz"' build/lib/firmware.json) FIRMWARE_ROMS := ./firmware/$(PLATFORM) $(shell jq --raw-output '.[] | select(.platform[] | contains("$(PLATFORM)")) | "./firmware/$(PLATFORM)/" + .id + ".rom.gz"' build/lib/firmware.json)
BUILD_SRC := $(shell git ls-files build) build/lib/depends build/lib/conflicts $(FIRMWARE_ROMS) BUILD_SRC := $(shell git ls-files build) build/lib/depends build/lib/conflicts $(FIRMWARE_ROMS)
DEBIAN_SRC := $(shell git ls-files debian/) DEBIAN_SRC := $(shell git ls-files debian/)
@@ -16,7 +17,7 @@ COMPAT_SRC := $(shell git ls-files system-images/compat/)
UTILS_SRC := $(shell git ls-files system-images/utils/) UTILS_SRC := $(shell git ls-files system-images/utils/)
BINFMT_SRC := $(shell git ls-files system-images/binfmt/) BINFMT_SRC := $(shell git ls-files system-images/binfmt/)
CORE_SRC := $(shell git ls-files core) $(shell git ls-files --recurse-submodules patch-db) $(GIT_HASH_FILE) CORE_SRC := $(shell git ls-files core) $(shell git ls-files --recurse-submodules patch-db) $(GIT_HASH_FILE)
WEB_SHARED_SRC := $(shell git ls-files web/projects/shared) $(shell ls -p web/ | grep -v / | sed 's/^/web\//g') web/node_modules/.package-lock.json web/config.json patch-db/client/dist web/patchdb-ui-seed.json sdk/dist WEB_SHARED_SRC := $(shell git ls-files web/projects/shared) $(shell ls -p web/ | grep -v / | sed 's/^/web\//g') web/node_modules/.package-lock.json web/config.json patch-db/client/dist/index.js sdk/baseDist/package.json web/patchdb-ui-seed.json sdk/dist/package.json
WEB_UI_SRC := $(shell git ls-files web/projects/ui) WEB_UI_SRC := $(shell git ls-files web/projects/ui)
WEB_SETUP_WIZARD_SRC := $(shell git ls-files web/projects/setup-wizard) WEB_SETUP_WIZARD_SRC := $(shell git ls-files web/projects/setup-wizard)
WEB_INSTALL_WIZARD_SRC := $(shell git ls-files web/projects/install-wizard) WEB_INSTALL_WIZARD_SRC := $(shell git ls-files web/projects/install-wizard)
@@ -47,7 +48,7 @@ endif
.DELETE_ON_ERROR: .DELETE_ON_ERROR:
.PHONY: all metadata install clean format cli uis ui reflash deb $(IMAGE_TYPE) squashfs sudo wormhole wormhole-deb test test-core test-sdk test-container-runtime .PHONY: all metadata install clean format cli uis ui reflash deb $(IMAGE_TYPE) squashfs sudo wormhole wormhole-deb test test-core test-sdk test-container-runtime registry
all: $(ALL_TARGETS) all: $(ALL_TARGETS)
@@ -94,15 +95,18 @@ test: | test-core test-sdk test-container-runtime
test-core: $(CORE_SRC) $(ENVIRONMENT_FILE) test-core: $(CORE_SRC) $(ENVIRONMENT_FILE)
./core/run-tests.sh ./core/run-tests.sh
test-sdk: $(shell git ls-files sdk) sdk/lib/osBindings test-sdk: $(shell git ls-files sdk) sdk/base/lib/osBindings/index.ts
cd sdk && make test cd sdk && make test
test-container-runtime: container-runtime/node_modules $(shell git ls-files container-runtime/src) container-runtime/package.json container-runtime/tsconfig.json test-container-runtime: container-runtime/node_modules/.package-lock.json $(shell git ls-files container-runtime/src) container-runtime/package.json container-runtime/tsconfig.json
cd container-runtime && npm test cd container-runtime && npm test
cli: cli:
cd core && ./install-cli.sh cd core && ./install-cli.sh
registry:
cd core && ./build-registrybox.sh
deb: results/$(BASENAME).deb deb: results/$(BASENAME).deb
debian/control: build/lib/depends build/lib/conflicts debian/control: build/lib/depends build/lib/conflicts
@@ -209,7 +213,7 @@ emulate-reflash: $(ALL_TARGETS)
@if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi @if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi
$(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create') $(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create')
$(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) DESTDIR=/media/startos/next PLATFORM=$(PLATFORM) $(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) DESTDIR=/media/startos/next PLATFORM=$(PLATFORM)
$(call ssh,'sudo rm -f /media/startos/config/disk.guid') $(call ssh,'sudo rm -f /media/startos/config/disk.guid /media/startos/config/overlay/etc/hostname')
$(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync "apt-get install -y $(shell cat ./build/lib/depends)"') $(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync "apt-get install -y $(shell cat ./build/lib/depends)"')
upload-ota: results/$(BASENAME).squashfs upload-ota: results/$(BASENAME).squashfs
@@ -218,34 +222,36 @@ upload-ota: results/$(BASENAME).squashfs
container-runtime/debian.$(ARCH).squashfs: container-runtime/debian.$(ARCH).squashfs:
ARCH=$(ARCH) ./container-runtime/download-base-image.sh ARCH=$(ARCH) ./container-runtime/download-base-image.sh
container-runtime/node_modules: container-runtime/package.json container-runtime/package-lock.json sdk/dist container-runtime/node_modules/.package-lock.json: container-runtime/package.json container-runtime/package-lock.json sdk/dist/package.json
npm --prefix container-runtime ci npm --prefix container-runtime ci
touch container-runtime/node_modules touch container-runtime/node_modules/.package-lock.json
sdk/lib/osBindings: core/startos/bindings sdk/base/lib/osBindings/index.ts: core/startos/bindings/index.ts
mkdir -p sdk/lib/osBindings mkdir -p sdk/base/lib/osBindings
ls core/startos/bindings/*.ts | sed 's/core\/startos\/bindings\/\([^.]*\)\.ts/export { \1 } from ".\/\1";/g' > core/startos/bindings/index.ts rsync -ac --delete core/startos/bindings/ sdk/base/lib/osBindings/
npm --prefix sdk exec -- prettier --config ./sdk/package.json -w ./core/startos/bindings/*.ts touch sdk/base/lib/osBindings/index.ts
rsync -ac --delete core/startos/bindings/ sdk/lib/osBindings/
touch sdk/lib/osBindings
core/startos/bindings: $(shell git ls-files core) $(ENVIRONMENT_FILE) core/startos/bindings/index.ts: $(shell git ls-files core) $(ENVIRONMENT_FILE)
rm -rf core/startos/bindings rm -rf core/startos/bindings
./core/build-ts.sh ./core/build-ts.sh
touch core/startos/bindings ls core/startos/bindings/*.ts | sed 's/core\/startos\/bindings\/\([^.]*\)\.ts/export { \1 } from ".\/\1";/g' | grep -v '"./index"' | tee core/startos/bindings/index.ts
npm --prefix sdk exec -- prettier --config ./sdk/base/package.json -w ./core/startos/bindings/*.ts
touch core/startos/bindings/index.ts
sdk/dist: $(shell git ls-files sdk) sdk/lib/osBindings sdk/dist/package.json sdk/baseDist/package.json: $(shell git ls-files sdk) sdk/base/lib/osBindings/index.ts
(cd sdk && make bundle) (cd sdk && make bundle)
touch sdk/dist/package.json
touch sdk/baseDist/package.json
# TODO: make container-runtime its own makefile? # TODO: make container-runtime its own makefile?
container-runtime/dist/index.js: container-runtime/node_modules $(shell git ls-files container-runtime/src) container-runtime/package.json container-runtime/tsconfig.json container-runtime/dist/index.js: container-runtime/node_modules/.package-lock.json $(shell git ls-files container-runtime/src) container-runtime/package.json container-runtime/tsconfig.json
npm --prefix container-runtime run build npm --prefix container-runtime run build
container-runtime/dist/node_modules container-runtime/dist/package.json container-runtime/dist/package-lock.json: container-runtime/package.json container-runtime/package-lock.json sdk/dist container-runtime/install-dist-deps.sh container-runtime/dist/node_modules/.package-lock.json container-runtime/dist/package.json container-runtime/dist/package-lock.json: container-runtime/package.json container-runtime/package-lock.json sdk/dist/package.json container-runtime/install-dist-deps.sh
./container-runtime/install-dist-deps.sh ./container-runtime/install-dist-deps.sh
touch container-runtime/dist/node_modules touch container-runtime/dist/node_modules/.package-lock.json
container-runtime/rootfs.$(ARCH).squashfs: container-runtime/debian.$(ARCH).squashfs container-runtime/container-runtime.service container-runtime/update-image.sh container-runtime/deb-install.sh container-runtime/dist/index.js container-runtime/dist/node_modules core/target/$(ARCH)-unknown-linux-musl/release/containerbox | sudo container-runtime/rootfs.$(ARCH).squashfs: container-runtime/debian.$(ARCH).squashfs container-runtime/container-runtime.service container-runtime/update-image.sh container-runtime/deb-install.sh container-runtime/dist/index.js container-runtime/dist/node_modules/.package-lock.json core/target/$(ARCH)-unknown-linux-musl/release/containerbox | sudo
ARCH=$(ARCH) ./container-runtime/update-image.sh ARCH=$(ARCH) ./container-runtime/update-image.sh
build/lib/depends build/lib/conflicts: build/dpkg-deps/* build/lib/depends build/lib/conflicts: build/dpkg-deps/*
@@ -263,7 +269,7 @@ system-images/utils/docker-images/$(ARCH).tar: $(UTILS_SRC)
system-images/binfmt/docker-images/$(ARCH).tar: $(BINFMT_SRC) system-images/binfmt/docker-images/$(ARCH).tar: $(BINFMT_SRC)
cd system-images/binfmt && make docker-images/$(ARCH).tar && touch docker-images/$(ARCH).tar cd system-images/binfmt && make docker-images/$(ARCH).tar && touch docker-images/$(ARCH).tar
core/target/$(ARCH)-unknown-linux-musl/release/startbox: $(CORE_SRC) web/dist/static web/patchdb-ui-seed.json $(ENVIRONMENT_FILE) core/target/$(ARCH)-unknown-linux-musl/release/startbox: $(CORE_SRC) $(COMPRESSED_WEB_UIS) web/patchdb-ui-seed.json $(ENVIRONMENT_FILE)
ARCH=$(ARCH) ./core/build-startbox.sh ARCH=$(ARCH) ./core/build-startbox.sh
touch core/target/$(ARCH)-unknown-linux-musl/release/startbox touch core/target/$(ARCH)-unknown-linux-musl/release/startbox
@@ -271,27 +277,28 @@ core/target/$(ARCH)-unknown-linux-musl/release/containerbox: $(CORE_SRC) $(ENVIR
ARCH=$(ARCH) ./core/build-containerbox.sh ARCH=$(ARCH) ./core/build-containerbox.sh
touch core/target/$(ARCH)-unknown-linux-musl/release/containerbox touch core/target/$(ARCH)-unknown-linux-musl/release/containerbox
web/node_modules/.package-lock.json: web/package.json sdk/dist web/node_modules/.package-lock.json: web/package.json sdk/baseDist/package.json
npm --prefix web ci npm --prefix web ci
touch web/node_modules/.package-lock.json touch web/node_modules/.package-lock.json
web/.angular: patch-db/client/dist sdk/dist web/node_modules/.package-lock.json web/.angular/.updated: patch-db/client/dist/index.js sdk/baseDist/package.json web/node_modules/.package-lock.json
rm -rf web/.angular rm -rf web/.angular
mkdir -p web/.angular mkdir -p web/.angular
touch web/.angular/.updated
web/dist/raw/ui: $(WEB_UI_SRC) $(WEB_SHARED_SRC) web/.angular web/dist/raw/ui/index.html: $(WEB_UI_SRC) $(WEB_SHARED_SRC) web/.angular/.updated
npm --prefix web run build:ui npm --prefix web run build:ui
touch web/dist/raw/ui touch web/dist/raw/ui/index.html
web/dist/raw/setup-wizard: $(WEB_SETUP_WIZARD_SRC) $(WEB_SHARED_SRC) web/.angular web/dist/raw/setup-wizard/index.html: $(WEB_SETUP_WIZARD_SRC) $(WEB_SHARED_SRC) web/.angular/.updated
npm --prefix web run build:setup npm --prefix web run build:setup
touch web/dist/raw/setup-wizard touch web/dist/raw/setup-wizard/index.html
web/dist/raw/install-wizard: $(WEB_INSTALL_WIZARD_SRC) $(WEB_SHARED_SRC) web/.angular web/dist/raw/install-wizard/index.html: $(WEB_INSTALL_WIZARD_SRC) $(WEB_SHARED_SRC) web/.angular/.updated
npm --prefix web run build:install npm --prefix web run build:install-wiz
touch web/dist/raw/install-wizard touch web/dist/raw/install-wizard/index.html
web/dist/static: $(WEB_UIS) $(ENVIRONMENT_FILE) $(COMPRESSED_WEB_UIS): $(WEB_UIS) $(ENVIRONMENT_FILE)
./compress-uis.sh ./compress-uis.sh
web/config.json: $(GIT_HASH_FILE) web/config-sample.json web/config.json: $(GIT_HASH_FILE) web/config-sample.json
@@ -301,13 +308,14 @@ web/patchdb-ui-seed.json: web/package.json
jq '."ack-welcome" = $(shell jq '.version' web/package.json)' web/patchdb-ui-seed.json > ui-seed.tmp jq '."ack-welcome" = $(shell jq '.version' web/package.json)' web/patchdb-ui-seed.json > ui-seed.tmp
mv ui-seed.tmp web/patchdb-ui-seed.json mv ui-seed.tmp web/patchdb-ui-seed.json
patch-db/client/node_modules: patch-db/client/package.json patch-db/client/node_modules/.package-lock.json: patch-db/client/package.json
npm --prefix patch-db/client ci npm --prefix patch-db/client ci
touch patch-db/client/node_modules touch patch-db/client/node_modules/.package-lock.json
patch-db/client/dist: $(PATCH_DB_CLIENT_SRC) patch-db/client/node_modules patch-db/client/dist/index.js: $(PATCH_DB_CLIENT_SRC) patch-db/client/node_modules/.package-lock.json
rm -rf patch-db/client/dist rm -rf patch-db/client/dist
npm --prefix patch-db/client run build npm --prefix patch-db/client run build
touch patch-db/client/dist/index.js
# used by github actions # used by github actions
compiled-$(ARCH).tar: $(COMPILED_TARGETS) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) $(VERSION_FILE) compiled-$(ARCH).tar: $(COMPILED_TARGETS) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) $(VERSION_FILE)

View File

@@ -9,6 +9,7 @@ ca-certificates
cifs-utils cifs-utils
cryptsetup cryptsetup
curl curl
dnsutils
dmidecode dmidecode
dosfstools dosfstools
e2fsprogs e2fsprogs

View File

@@ -43,6 +43,8 @@ if [ -z "$NO_SYNC" ]; then
mount -t overlay \ mount -t overlay \
-olowerdir=/media/startos/current,upperdir=/media/startos/upper/data,workdir=/media/startos/upper/work \ -olowerdir=/media/startos/current,upperdir=/media/startos/upper/data,workdir=/media/startos/upper/work \
overlay /media/startos/next overlay /media/startos/next
mkdir -p /media/startos/next/media/startos/root
mount --bind /media/startos/root /media/startos/next/media/startos/root
fi fi
if [ -n "$ONLY_CREATE" ]; then if [ -n "$ONLY_CREATE" ]; then
@@ -75,6 +77,7 @@ umount /media/startos/next/dev
umount /media/startos/next/sys umount /media/startos/next/sys
umount /media/startos/next/proc umount /media/startos/next/proc
umount /media/startos/next/boot umount /media/startos/next/boot
umount /media/startos/next/media/startos/root
if [ "$CHROOT_RES" -eq 0 ]; then if [ "$CHROOT_RES" -eq 0 ]; then
@@ -84,7 +87,12 @@ if [ "$CHROOT_RES" -eq 0 ]; then
echo 'Upgrading...' echo 'Upgrading...'
time mksquashfs /media/startos/next /media/startos/images/next.squashfs -b 4096 -comp gzip if ! time mksquashfs /media/startos/next /media/startos/images/next.squashfs -b 4096 -comp gzip; then
umount -R /media/startos/next
umount -R /media/startos/upper
rm -rf /media/startos/upper /media/startos/next
exit 1
fi
hash=$(b3sum /media/startos/images/next.squashfs | head -c 32) hash=$(b3sum /media/startos/images/next.squashfs | head -c 32)
mv /media/startos/images/next.squashfs /media/startos/images/${hash}.rootfs mv /media/startos/images/next.squashfs /media/startos/images/${hash}.rootfs
ln -rsf /media/startos/images/${hash}.rootfs /media/startos/config/current.rootfs ln -rsf /media/startos/images/${hash}.rootfs /media/startos/config/current.rootfs

View File

@@ -33,10 +33,11 @@ if [ -h /media/startos/config/current.rootfs ] && [ -e /media/startos/config/cur
echo 'Pruning...' echo 'Pruning...'
current="$(readlink -f /media/startos/config/current.rootfs)" current="$(readlink -f /media/startos/config/current.rootfs)"
while [[ "$(df -B1 --output=avail --sync /media/startos/images | tail -n1)" -lt "$needed" ]]; do while [[ "$(df -B1 --output=avail --sync /media/startos/images | tail -n1)" -lt "$needed" ]]; do
to_prune="$(ls -t1 /media/startos/images/*.rootfs /media/startos/images/*.squashfs | grep -v "$current" | tail -n1)" to_prune="$(ls -t1 /media/startos/images/*.rootfs /media/startos/images/*.squashfs 2> /dev/null | grep -v "$current" | tail -n1)"
if [ -e "$to_prune" ]; then if [ -e "$to_prune" ]; then
echo " Pruning $to_prune" echo " Pruning $to_prune"
rm -rf "$to_prune" rm -rf "$to_prune"
sync
else else
>&2 echo "Not enough space and nothing to prune!" >&2 echo "Not enough space and nothing to prune!"
exit 1 exit 1

690
code Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

View File

@@ -1,86 +0,0 @@
## Testing
So, we are going to
1. create a fake server
2. pretend socket server os (while the fake server is running)
3. Run a fake effects system (while 1/2 are running)
In order to simulate that we created a server like the start-os and
a fake server (in this case I am using syncthing-wrapper)
### TODO
Undo the packing that I have done earlier, and hijack the embassy.js to use the bundle service + code
Converting embassy.js -> service.js
```sequence {theme="hand"}
startOs ->> startInit.js: Rpc Call
startInit.js ->> service.js: Rpc Converted into js code
```
### Create a fake server
```bash
run_test () {
(
set -e
libs=/home/jh/Projects/start-os/libs/start_init
sockets=/tmp/start9
service=/home/jh/Projects/syncthing-wrapper
docker run \
-v $libs:/libs \
-v $service:/service \
-w /libs \
--rm node:18-alpine \
sh -c "
npm i &&
npm run bundle:esbuild &&
npm run bundle:service
"
docker run \
-v ./libs/start_init/:/libs \
-w /libs \
--rm node:18-alpine \
sh -c "
npm i &&
npm run bundle:esbuild
"
rm -rf $sockets || true
mkdir -p $sockets/sockets
cd $service
docker run \
-v $libs:/start-init \
-v $sockets:/start9 \
--rm -it $(docker build -q .) sh -c "
apk add nodejs &&
node /start-init/bundleEs.js
"
)
}
run_test
```
### Pretend Socket Server OS
First we are going to create our fake server client with the bash then send it the json possible data
```bash
sudo socat - unix-client:/tmp/start9/sockets/rpc.sock
```
<!-- prettier-ignore -->
```json
{"id":"a","method":"run","params":{"methodName":"/dependencyMounts","methodArgs":[]}}
{"id":"a","method":"run","params":{"methodName":"/actions/test","methodArgs":{"input":{"id": 1}}}}
{"id":"b","method":"run","params":{"methodName":"/actions/test","methodArgs":{"id": 1}}}
```

View File

@@ -4,7 +4,7 @@ import { object, string, number, literals, some, unknown } from "ts-matches"
import { Effects } from "../Models/Effects" import { Effects } from "../Models/Effects"
import { CallbackHolder } from "../Models/CallbackHolder" import { CallbackHolder } from "../Models/CallbackHolder"
import { MainEffects } from "@start9labs/start-sdk/cjs/lib/StartSdk" import { asError } from "@start9labs/start-sdk/base/lib/util"
const matchRpcError = object({ const matchRpcError = object({
error: object( error: object(
{ {
@@ -35,7 +35,8 @@ let hostSystemId = 0
export type EffectContext = { export type EffectContext = {
procedureId: string | null procedureId: string | null
callbacks: CallbackHolder | null callbacks?: CallbackHolder
constRetry: () => void
} }
const rpcRoundFor = const rpcRoundFor =
@@ -50,7 +51,7 @@ const rpcRoundFor =
JSON.stringify({ JSON.stringify({
id, id,
method, method,
params: { ...params, procedureId }, params: { ...params, procedureId: procedureId || undefined },
}) + "\n", }) + "\n",
) )
}) })
@@ -67,7 +68,7 @@ const rpcRoundFor =
let message = res.error.message let message = res.error.message
console.error( console.error(
"Error in host RPC:", "Error in host RPC:",
utils.asError({ method, params }), utils.asError({ method, params, error: res.error }),
) )
if (string.test(res.error.data)) { if (string.test(res.error.data)) {
message += ": " + res.error.data message += ": " + res.error.data
@@ -100,24 +101,64 @@ const rpcRoundFor =
}) })
} }
function makeEffects(context: EffectContext): Effects { export function makeEffects(context: EffectContext): Effects {
const rpcRound = rpcRoundFor(context.procedureId) const rpcRound = rpcRoundFor(context.procedureId)
const self: Effects = { const self: Effects = {
constRetry: context.constRetry,
clearCallbacks(...[options]: Parameters<T.Effects["clearCallbacks"]>) {
return rpcRound("clear-callbacks", {
...options,
}) as ReturnType<T.Effects["clearCallbacks"]>
},
action: {
clear(...[options]: Parameters<T.Effects["action"]["clear"]>) {
return rpcRound("action.clear", {
...options,
}) as ReturnType<T.Effects["action"]["clear"]>
},
export(...[options]: Parameters<T.Effects["action"]["export"]>) {
return rpcRound("action.export", {
...options,
}) as ReturnType<T.Effects["action"]["export"]>
},
getInput(...[options]: Parameters<T.Effects["action"]["getInput"]>) {
return rpcRound("action.get-input", {
...options,
}) as ReturnType<T.Effects["action"]["getInput"]>
},
request(...[options]: Parameters<T.Effects["action"]["request"]>) {
return rpcRound("action.request", {
...options,
}) as ReturnType<T.Effects["action"]["request"]>
},
run(...[options]: Parameters<T.Effects["action"]["run"]>) {
return rpcRound("action.run", {
...options,
}) as ReturnType<T.Effects["action"]["run"]>
},
clearRequests(
...[options]: Parameters<T.Effects["action"]["clearRequests"]>
) {
return rpcRound("action.clear-requests", {
...options,
}) as ReturnType<T.Effects["action"]["clearRequests"]>
},
},
bind(...[options]: Parameters<T.Effects["bind"]>) { bind(...[options]: Parameters<T.Effects["bind"]>) {
return rpcRound("bind", { return rpcRound("bind", {
...options, ...options,
stack: new Error().stack, stack: new Error().stack,
}) as ReturnType<T.Effects["bind"]> }) as ReturnType<T.Effects["bind"]>
}, },
clearBindings(...[]: Parameters<T.Effects["clearBindings"]>) { clearBindings(...[options]: Parameters<T.Effects["clearBindings"]>) {
return rpcRound("clear-bindings", {}) as ReturnType< return rpcRound("clear-bindings", { ...options }) as ReturnType<
T.Effects["clearBindings"] T.Effects["clearBindings"]
> >
}, },
clearServiceInterfaces( clearServiceInterfaces(
...[]: Parameters<T.Effects["clearServiceInterfaces"]> ...[options]: Parameters<T.Effects["clearServiceInterfaces"]>
) { ) {
return rpcRound("clear-service-interfaces", {}) as ReturnType< return rpcRound("clear-service-interfaces", { ...options }) as ReturnType<
T.Effects["clearServiceInterfaces"] T.Effects["clearServiceInterfaces"]
> >
}, },
@@ -127,27 +168,17 @@ function makeEffects(context: EffectContext): Effects {
> >
}, },
subcontainer: { subcontainer: {
createFs(options: { imageId: string }) { createFs(options: { imageId: string; name: string }) {
return rpcRound("subcontainer.create-fs", options) as ReturnType< return rpcRound("subcontainer.create-fs", options) as ReturnType<
T.Effects["subcontainer"]["createFs"] T.Effects["subcontainer"]["createFs"]
> >
}, },
destroyFs(options: { guid: string }): Promise<void> { destroyFs(options: { guid: string }): Promise<null> {
return rpcRound("subcontainer.destroy-fs", options) as ReturnType< return rpcRound("subcontainer.destroy-fs", options) as ReturnType<
T.Effects["subcontainer"]["destroyFs"] T.Effects["subcontainer"]["destroyFs"]
> >
}, },
}, },
executeAction(...[options]: Parameters<T.Effects["executeAction"]>) {
return rpcRound("execute-action", options) as ReturnType<
T.Effects["executeAction"]
>
},
exportAction(...[options]: Parameters<T.Effects["exportAction"]>) {
return rpcRound("export-action", options) as ReturnType<
T.Effects["exportAction"]
>
},
exportServiceInterface: (( exportServiceInterface: ((
...[options]: Parameters<Effects["exportServiceInterface"]> ...[options]: Parameters<Effects["exportServiceInterface"]>
) => { ) => {
@@ -162,11 +193,6 @@ function makeEffects(context: EffectContext): Effects {
T.Effects["exposeForDependents"] T.Effects["exposeForDependents"]
> >
}, },
getConfigured(...[]: Parameters<T.Effects["getConfigured"]>) {
return rpcRound("get-configured", {}) as ReturnType<
T.Effects["getConfigured"]
>
},
getContainerIp(...[]: Parameters<T.Effects["getContainerIp"]>) { getContainerIp(...[]: Parameters<T.Effects["getContainerIp"]>) {
return rpcRound("get-container-ip", {}) as ReturnType< return rpcRound("get-container-ip", {}) as ReturnType<
T.Effects["getContainerIp"] T.Effects["getContainerIp"]
@@ -230,19 +256,9 @@ function makeEffects(context: EffectContext): Effects {
mount(...[options]: Parameters<T.Effects["mount"]>) { mount(...[options]: Parameters<T.Effects["mount"]>) {
return rpcRound("mount", options) as ReturnType<T.Effects["mount"]> return rpcRound("mount", options) as ReturnType<T.Effects["mount"]>
}, },
clearActions(...[]: Parameters<T.Effects["clearActions"]>) {
return rpcRound("clear-actions", {}) as ReturnType<
T.Effects["clearActions"]
>
},
restart(...[]: Parameters<T.Effects["restart"]>) { restart(...[]: Parameters<T.Effects["restart"]>) {
return rpcRound("restart", {}) as ReturnType<T.Effects["restart"]> return rpcRound("restart", {}) as ReturnType<T.Effects["restart"]>
}, },
setConfigured(...[configured]: Parameters<T.Effects["setConfigured"]>) {
return rpcRound("set-configured", { configured }) as ReturnType<
T.Effects["setConfigured"]
>
},
setDependencies( setDependencies(
dependencies: Parameters<T.Effects["setDependencies"]>[0], dependencies: Parameters<T.Effects["setDependencies"]>[0],
): ReturnType<T.Effects["setDependencies"]> { ): ReturnType<T.Effects["setDependencies"]> {
@@ -268,7 +284,10 @@ function makeEffects(context: EffectContext): Effects {
> >
}, },
setMainStatus(o: { status: "running" | "stopped" }): Promise<void> { getStatus(...[o]: Parameters<T.Effects["getStatus"]>) {
return rpcRound("get-status", o) as ReturnType<T.Effects["getStatus"]>
},
setMainStatus(o: { status: "running" | "stopped" }): Promise<null> {
return rpcRound("set-main-status", o) as ReturnType< return rpcRound("set-main-status", o) as ReturnType<
T.Effects["setHealth"] T.Effects["setHealth"]
> >
@@ -299,18 +318,3 @@ function makeEffects(context: EffectContext): Effects {
} }
return self return self
} }
export function makeProcedureEffects(procedureId: string): Effects {
return makeEffects({ procedureId, callbacks: null })
}
export function makeMainEffects(): MainEffects {
const rpcRound = rpcRoundFor(null)
return {
_type: "main",
clearCallbacks: () => {
return rpcRound("clearCallbacks", {}) as Promise<void>
},
...makeEffects({ procedureId: null, callbacks: new CallbackHolder() }),
}
}

View File

@@ -14,17 +14,14 @@ import {
anyOf, anyOf,
} from "ts-matches" } from "ts-matches"
import { types as T } from "@start9labs/start-sdk" import { types as T, utils } from "@start9labs/start-sdk"
import * as fs from "fs" import * as fs from "fs"
import { CallbackHolder } from "../Models/CallbackHolder" import { CallbackHolder } from "../Models/CallbackHolder"
import { AllGetDependencies } from "../Interfaces/AllGetDependencies" import { AllGetDependencies } from "../Interfaces/AllGetDependencies"
import { jsonPath, unNestPath } from "../Models/JsonPath" import { jsonPath, unNestPath } from "../Models/JsonPath"
import { RunningMain, System } from "../Interfaces/System" import { System } from "../Interfaces/System"
import { import { makeEffects } from "./EffectCreator"
MakeMainEffects,
MakeProcedureEffects,
} from "../Interfaces/MakeEffects"
type MaybePromise<T> = T | Promise<T> type MaybePromise<T> = T | Promise<T>
export const matchRpcResult = anyOf( export const matchRpcResult = anyOf(
object({ result: any }), object({ result: any }),
@@ -45,6 +42,7 @@ export const matchRpcResult = anyOf(
), ),
}), }),
) )
export type RpcResult = typeof matchRpcResult._TYPE export type RpcResult = typeof matchRpcResult._TYPE
type SocketResponse = ({ jsonrpc: "2.0"; id: IdType } & RpcResult) | null type SocketResponse = ({ jsonrpc: "2.0"; id: IdType } & RpcResult) | null
@@ -55,73 +53,96 @@ const jsonrpc = "2.0" as const
const isResult = object({ result: any }).test const isResult = object({ result: any }).test
const idType = some(string, number, literal(null)) const idType = some(string, number, literal(null))
type IdType = null | string | number type IdType = null | string | number | undefined
const runType = object({ const runType = object(
id: idType, {
method: literal("execute"), id: idType,
params: object( method: literal("execute"),
{ params: object(
id: string, {
procedure: string, id: string,
input: any, procedure: string,
timeout: number, input: any,
}, timeout: number,
["timeout"], },
), ["timeout"],
}) ),
const sandboxRunType = object({ },
id: idType, ["id"],
method: literal("sandbox"), )
params: object( const sandboxRunType = object(
{ {
id: string, id: idType,
procedure: string, method: literal("sandbox"),
input: any, params: object(
timeout: number, {
}, id: string,
["timeout"], procedure: string,
), input: any,
}) timeout: number,
},
["timeout"],
),
},
["id"],
)
const callbackType = object({ const callbackType = object({
method: literal("callback"), method: literal("callback"),
params: object({ params: object({
callback: number, id: number,
args: array, args: array,
}), }),
}) })
const initType = object({ const initType = object(
id: idType, {
method: literal("init"), id: idType,
}) method: literal("init"),
const startType = object({ },
id: idType, ["id"],
method: literal("start"), )
}) const startType = object(
const stopType = object({ {
id: idType, id: idType,
method: literal("stop"), method: literal("start"),
}) },
const exitType = object({ ["id"],
id: idType, )
method: literal("exit"), const stopType = object(
}) {
const evalType = object({ id: idType,
id: idType, method: literal("stop"),
method: literal("eval"), },
params: object({ ["id"],
script: string, )
}), const exitType = object(
}) {
id: idType,
method: literal("exit"),
},
["id"],
)
const evalType = object(
{
id: idType,
method: literal("eval"),
params: object({
script: string,
}),
},
["id"],
)
const jsonParse = (x: string) => JSON.parse(x) const jsonParse = (x: string) => JSON.parse(x)
const handleRpc = (id: IdType, result: Promise<RpcResult>) => const handleRpc = (id: IdType, result: Promise<RpcResult>) =>
result result
.then((result) => ({ .then((result) => {
jsonrpc, return {
id, jsonrpc,
...result, id,
})) ...result,
}
})
.then((x) => { .then((x) => {
if ( if (
("result" in x && x.result === undefined) || ("result" in x && x.result === undefined) ||
@@ -144,8 +165,7 @@ const hasId = object({ id: idType }).test
export class RpcListener { export class RpcListener {
unixSocketServer = net.createServer(async (server) => {}) unixSocketServer = net.createServer(async (server) => {})
private _system: System | undefined private _system: System | undefined
private _makeProcedureEffects: MakeProcedureEffects | undefined private callbacks: CallbackHolder | undefined
private _makeMainEffects: MakeMainEffects | undefined
constructor(readonly getDependencies: AllGetDependencies) { constructor(readonly getDependencies: AllGetDependencies) {
if (!fs.existsSync(SOCKET_PARENT)) { if (!fs.existsSync(SOCKET_PARENT)) {
@@ -198,7 +218,11 @@ export class RpcListener {
.then((x) => this.dealWithInput(x)) .then((x) => this.dealWithInput(x))
.catch(mapError) .catch(mapError)
.then(logData("response")) .then(logData("response"))
.then(writeDataToSocket), .then(writeDataToSocket)
.catch((e) => {
console.error(`Major error in socket handling: ${e}`)
console.debug(`Data in: ${a.toString()}`)
}),
) )
}) })
} }
@@ -208,18 +232,33 @@ export class RpcListener {
return this._system return this._system
} }
private get makeProcedureEffects() { private callbackHolders: Map<string, CallbackHolder> = new Map()
if (!this._makeProcedureEffects) { private removeCallbackHolderFor(procedure: string) {
this._makeProcedureEffects = this.getDependencies.makeProcedureEffects() const prev = this.callbackHolders.get(procedure)
if (prev) {
this.callbackHolders.delete(procedure)
this.callbacks?.removeChild(prev)
} }
return this._makeProcedureEffects }
private callbackHolderFor(procedure: string): CallbackHolder {
this.removeCallbackHolderFor(procedure)
const callbackHolder = this.callbacks!.child()
this.callbackHolders.set(procedure, callbackHolder)
return callbackHolder
} }
private get makeMainEffects() { callCallback(callback: number, args: any[]): void {
if (!this._makeMainEffects) { if (this.callbacks) {
this._makeMainEffects = this.getDependencies.makeMainEffects() this.callbacks
.callCallback(callback, args)
.catch((error) =>
console.error(`callback ${callback} failed`, utils.asError(error)),
)
} else {
console.warn(
`callback ${callback} ignored because system is not initialized`,
)
} }
return this._makeMainEffects
} }
private dealWithInput(input: unknown): MaybePromise<SocketResponse> { private dealWithInput(input: unknown): MaybePromise<SocketResponse> {
@@ -227,40 +266,49 @@ export class RpcListener {
.when(runType, async ({ id, params }) => { .when(runType, async ({ id, params }) => {
const system = this.system const system = this.system
const procedure = jsonPath.unsafeCast(params.procedure) const procedure = jsonPath.unsafeCast(params.procedure)
const effects = this.getDependencies.makeProcedureEffects()(params.id) const { input, timeout, id: procedureId } = params
const input = params.input const result = this.getResult(
const timeout = params.timeout procedure,
const result = getResult(procedure, system, effects, timeout, input) system,
procedureId,
timeout,
input,
)
return handleRpc(id, result) return handleRpc(id, result)
}) })
.when(sandboxRunType, async ({ id, params }) => { .when(sandboxRunType, async ({ id, params }) => {
const system = this.system const system = this.system
const procedure = jsonPath.unsafeCast(params.procedure) const procedure = jsonPath.unsafeCast(params.procedure)
const effects = this.makeProcedureEffects(params.id) const { input, timeout, id: procedureId } = params
const result = getResult( const result = this.getResult(
procedure, procedure,
system, system,
effects, procedureId,
params.input, timeout,
params.input, input,
) )
return handleRpc(id, result) return handleRpc(id, result)
}) })
.when(callbackType, async ({ params: { callback, args } }) => { .when(callbackType, async ({ params: { id, args } }) => {
this.system.callCallback(callback, args) this.callCallback(id, args)
return null return null
}) })
.when(startType, async ({ id }) => { .when(startType, async ({ id }) => {
const callbacks = this.callbackHolderFor("main")
const effects = makeEffects({
procedureId: null,
callbacks,
constRetry: () => {},
})
return handleRpc( return handleRpc(
id, id,
this.system this.system.start(effects).then((result) => ({ result })),
.start(this.makeMainEffects())
.then((result) => ({ result })),
) )
}) })
.when(stopType, async ({ id }) => { .when(stopType, async ({ id }) => {
this.removeCallbackHolderFor("main")
return handleRpc( return handleRpc(
id, id,
this.system.stop().then((result) => ({ result })), this.system.stop().then((result) => ({ result })),
@@ -280,7 +328,20 @@ export class RpcListener {
(async () => { (async () => {
if (!this._system) { if (!this._system) {
const system = await this.getDependencies.system() const system = await this.getDependencies.system()
await system.containerInit() this.callbacks = new CallbackHolder(
makeEffects({
procedureId: null,
constRetry: () => {},
}),
)
const callbacks = this.callbackHolderFor("containerInit")
await system.containerInit(
makeEffects({
procedureId: null,
callbacks,
constRetry: () => {},
}),
)
this._system = system this._system = system
} }
})().then((result) => ({ result })), })().then((result) => ({ result })),
@@ -312,17 +373,20 @@ export class RpcListener {
})(), })(),
) )
}) })
.when(shape({ id: idType, method: string }), ({ id, method }) => ({ .when(
jsonrpc, shape({ id: idType, method: string }, ["id"]),
id, ({ id, method }) => ({
error: { jsonrpc,
code: -32601, id,
message: `Method not found`, error: {
data: { code: -32601,
details: method, message: `Method not found`,
data: {
details: method,
},
}, },
}, }),
})) )
.defaultToLazy(() => { .defaultToLazy(() => {
console.warn( console.warn(
@@ -341,98 +405,81 @@ export class RpcListener {
} }
}) })
} }
} private getResult(
function getResult( procedure: typeof jsonPath._TYPE,
procedure: typeof jsonPath._TYPE, system: System,
system: System, procedureId: string,
effects: T.Effects, timeout: number | undefined,
timeout: number | undefined, input: any,
input: any, ) {
) { const ensureResultTypeShape = (
const ensureResultTypeShape = ( result: void | T.ActionInput | T.ActionResult | null,
result: ): { result: any } => {
| void return { result }
| T.ConfigRes
| T.PropertiesReturn
| T.ActionMetadata[]
| T.ActionResult,
): { result: any } => {
if (isResult(result)) return result
return { result }
}
return (async () => {
switch (procedure) {
case "/backup/create":
return system.createBackup(effects, timeout || null)
case "/backup/restore":
return system.restoreBackup(effects, timeout || null)
case "/config/get":
return system.getConfig(effects, timeout || null)
case "/config/set":
return system.setConfig(effects, input, timeout || null)
case "/properties":
return system.properties(effects, timeout || null)
case "/actions/metadata":
return system.actionsMetadata(effects)
case "/init":
return system.packageInit(
effects,
string.optional().unsafeCast(input),
timeout || null,
)
case "/uninit":
return system.packageUninit(
effects,
string.optional().unsafeCast(input),
timeout || null,
)
default:
const procedures = unNestPath(procedure)
switch (true) {
case procedures[1] === "actions" && procedures[3] === "get":
return system.action(effects, procedures[2], input, timeout || null)
case procedures[1] === "actions" && procedures[3] === "run":
return system.action(effects, procedures[2], input, timeout || null)
case procedures[1] === "dependencies" && procedures[3] === "query":
return system.dependenciesAutoconfig(
effects,
procedures[2],
input,
timeout || null,
)
case procedures[1] === "dependencies" && procedures[3] === "update":
return system.dependenciesAutoconfig(
effects,
procedures[2],
input,
timeout || null,
)
}
} }
})().then(ensureResultTypeShape, (error) => const callbacks = this.callbackHolderFor(procedure)
matches(error) const effects = makeEffects({
.when( procedureId,
object( callbacks,
{ constRetry: () => {},
error: string, })
code: number,
}, return (async () => {
["code"], switch (procedure) {
{ code: 0 }, case "/backup/create":
), return system.createBackup(effects, timeout || null)
(error) => ({ case "/backup/restore":
return system.restoreBackup(effects, timeout || null)
case "/packageInit":
return system.packageInit(effects, timeout || null)
case "/packageUninit":
return system.packageUninit(
effects,
string.optional().unsafeCast(input),
timeout || null,
)
default:
const procedures = unNestPath(procedure)
switch (true) {
case procedures[1] === "actions" && procedures[3] === "getInput":
return system.getActionInput(
effects,
procedures[2],
timeout || null,
)
case procedures[1] === "actions" && procedures[3] === "run":
return system.runAction(
effects,
procedures[2],
input.input,
timeout || null,
)
}
}
})().then(ensureResultTypeShape, (error) =>
matches(error)
.when(
object(
{
error: string,
code: number,
},
["code"],
{ code: 0 },
),
(error) => ({
error: {
code: error.code,
message: error.error,
},
}),
)
.defaultToLazy(() => ({
error: { error: {
code: error.code, code: 0,
message: error.error, message: String(error),
}, },
}), })),
) )
.defaultToLazy(() => ({ }
error: {
code: 0,
message: String(error),
},
})),
)
} }

View File

@@ -8,7 +8,7 @@ import {
CommandOptions, CommandOptions,
ExecOptions, ExecOptions,
ExecSpawnable, ExecSpawnable,
} from "@start9labs/start-sdk/cjs/lib/util/SubContainer" } from "@start9labs/start-sdk/package/lib/util/SubContainer"
export const exec = promisify(cp.exec) export const exec = promisify(cp.exec)
export const execFile = promisify(cp.execFile) export const execFile = promisify(cp.execFile)
@@ -20,6 +20,7 @@ export class DockerProcedureContainer {
packageId: string, packageId: string,
data: DockerProcedure, data: DockerProcedure,
volumes: { [id: VolumeId]: Volume }, volumes: { [id: VolumeId]: Volume },
name: string,
options: { subcontainer?: ExecSpawnable } = {}, options: { subcontainer?: ExecSpawnable } = {},
) { ) {
const subcontainer = const subcontainer =
@@ -29,6 +30,7 @@ export class DockerProcedureContainer {
packageId, packageId,
data, data,
volumes, volumes,
name,
)) ))
return new DockerProcedureContainer(subcontainer) return new DockerProcedureContainer(subcontainer)
} }
@@ -37,8 +39,13 @@ export class DockerProcedureContainer {
packageId: string, packageId: string,
data: DockerProcedure, data: DockerProcedure,
volumes: { [id: VolumeId]: Volume }, volumes: { [id: VolumeId]: Volume },
name: string,
) { ) {
const subcontainer = await SubContainer.of(effects, { id: data.image }) const subcontainer = await SubContainer.of(
effects,
{ id: data.image },
name,
)
if (data.mounts) { if (data.mounts) {
const mounts = data.mounts const mounts = data.mounts
@@ -144,7 +151,7 @@ export class DockerProcedureContainer {
} }
} }
async spawn(commands: string[]): Promise<cp.ChildProcessWithoutNullStreams> { async spawn(commands: string[]): Promise<cp.ChildProcess> {
return await this.subcontainer.spawn(commands) return await this.subcontainer.spawn(commands)
} }
} }

View File

@@ -2,11 +2,10 @@ import { polyfillEffects } from "./polyfillEffects"
import { DockerProcedureContainer } from "./DockerProcedureContainer" import { DockerProcedureContainer } from "./DockerProcedureContainer"
import { SystemForEmbassy } from "." import { SystemForEmbassy } from "."
import { T, utils } from "@start9labs/start-sdk" import { T, utils } from "@start9labs/start-sdk"
import { Daemon } from "@start9labs/start-sdk/cjs/lib/mainFn/Daemon" import { Daemon } from "@start9labs/start-sdk/package/lib/mainFn/Daemon"
import { Effects } from "../../../Models/Effects" import { Effects } from "../../../Models/Effects"
import { off } from "node:process" import { off } from "node:process"
import { CommandController } from "@start9labs/start-sdk/cjs/lib/mainFn/CommandController" import { CommandController } from "@start9labs/start-sdk/package/lib/mainFn/CommandController"
import { asError } from "@start9labs/start-sdk/cjs/lib/util"
const EMBASSY_HEALTH_INTERVAL = 15 * 1000 const EMBASSY_HEALTH_INTERVAL = 15 * 1000
const EMBASSY_PROPERTIES_LOOP = 30 * 1000 const EMBASSY_PROPERTIES_LOOP = 30 * 1000
@@ -62,6 +61,7 @@ export class MainLoop {
this.system.manifest.id, this.system.manifest.id,
this.system.manifest.main, this.system.manifest.main,
this.system.manifest.volumes, this.system.manifest.volumes,
`Main - ${currentCommand.join(" ")}`,
) )
return CommandController.of()( return CommandController.of()(
this.effects, this.effects,
@@ -136,7 +136,7 @@ export class MainLoop {
delete this.healthLoops delete this.healthLoops
await main?.daemon await main?.daemon
.stop() .stop()
.catch((e) => console.error(`Main loop error`, utils.asError(e))) .catch((e: unknown) => console.error(`Main loop error`, utils.asError(e)))
this.effects.setMainStatus({ status: "stopped" }) this.effects.setMainStatus({ status: "stopped" })
if (healthLoops) healthLoops.forEach((x) => clearInterval(x.interval)) if (healthLoops) healthLoops.forEach((x) => clearInterval(x.interval))
} }
@@ -154,7 +154,7 @@ export class MainLoop {
result: "starting", result: "starting",
message: null, message: null,
}) })
.catch((e) => console.error(asError(e))) .catch((e) => console.error(utils.asError(e)))
const interval = setInterval(async () => { const interval = setInterval(async () => {
const actionProcedure = value const actionProcedure = value
const timeChanged = Date.now() - start const timeChanged = Date.now() - start
@@ -162,21 +162,30 @@ export class MainLoop {
const subcontainer = actionProcedure.inject const subcontainer = actionProcedure.inject
? this.mainSubContainerHandle ? this.mainSubContainerHandle
: undefined : undefined
// prettier-ignore const commands = [
const container = actionProcedure.entrypoint,
await DockerProcedureContainer.of( ...actionProcedure.args,
effects, ]
manifest.id, const container = await DockerProcedureContainer.of(
actionProcedure, effects,
manifest.volumes, manifest.id,
{ actionProcedure,
subcontainer, manifest.volumes,
} `Health Check - ${commands.join(" ")}`,
) {
const executed = await container.exec( subcontainer,
[actionProcedure.entrypoint, ...actionProcedure.args], },
{ input: JSON.stringify(timeChanged) },
) )
const env: Record<string, string> = actionProcedure.inject
? {
HOME: "/root",
}
: {}
const executed = await container.exec(commands, {
input: JSON.stringify(timeChanged),
env,
})
if (executed.exitCode === 0) { if (executed.exitCode === 0) {
await effects.setHealth({ await effects.setHealth({
id: healthId, id: healthId,
@@ -227,6 +236,18 @@ export class MainLoop {
}) })
return return
} }
if (executed.exitCode && executed.exitCode > 0) {
await effects.setHealth({
id: healthId,
name: value.name,
result: "failure",
message:
executed.stderr.toString() ||
executed.stdout.toString() ||
`Program exited with code ${executed.exitCode}:`,
})
return
}
await effects.setHealth({ await effects.setHealth({
id: healthId, id: healthId,
name: value.name, name: value.name,

View File

@@ -264,7 +264,6 @@ exports[`transformConfigSpec transformConfigSpec(bitcoind) 1`] = `
"disabled": false, "disabled": false,
"immutable": false, "immutable": false,
"name": "Pruning Mode", "name": "Pruning Mode",
"required": true,
"type": "union", "type": "union",
"variants": { "variants": {
"automatic": { "automatic": {
@@ -524,7 +523,6 @@ exports[`transformConfigSpec transformConfigSpec(embassyPages) 1`] = `
"disabled": false, "disabled": false,
"immutable": false, "immutable": false,
"name": "Type", "name": "Type",
"required": true,
"type": "union", "type": "union",
"variants": { "variants": {
"index": { "index": {
@@ -589,7 +587,6 @@ exports[`transformConfigSpec transformConfigSpec(embassyPages) 1`] = `
"disabled": false, "disabled": false,
"immutable": false, "immutable": false,
"name": "Folder Location", "name": "Folder Location",
"required": false,
"type": "select", "type": "select",
"values": { "values": {
"filebrowser": "filebrowser", "filebrowser": "filebrowser",
@@ -644,7 +641,6 @@ exports[`transformConfigSpec transformConfigSpec(embassyPages) 1`] = `
"disabled": false, "disabled": false,
"immutable": false, "immutable": false,
"name": "Type", "name": "Type",
"required": true,
"type": "union", "type": "union",
"variants": { "variants": {
"redirect": { "redirect": {
@@ -705,7 +701,6 @@ exports[`transformConfigSpec transformConfigSpec(embassyPages) 1`] = `
"disabled": false, "disabled": false,
"immutable": false, "immutable": false,
"name": "Folder Location", "name": "Folder Location",
"required": false,
"type": "select", "type": "select",
"values": { "values": {
"filebrowser": "filebrowser", "filebrowser": "filebrowser",
@@ -758,7 +753,6 @@ exports[`transformConfigSpec transformConfigSpec(nostr2) 1`] = `
"disabled": false, "disabled": false,
"immutable": false, "immutable": false,
"name": "Relay Type", "name": "Relay Type",
"required": true,
"type": "union", "type": "union",
"variants": { "variants": {
"private": { "private": {

View File

@@ -2,8 +2,8 @@ import { ExtendedVersion, types as T, utils } from "@start9labs/start-sdk"
import * as fs from "fs/promises" import * as fs from "fs/promises"
import { polyfillEffects } from "./polyfillEffects" import { polyfillEffects } from "./polyfillEffects"
import { Duration, duration, fromDuration } from "../../../Models/Duration" import { fromDuration } from "../../../Models/Duration"
import { System, Procedure } from "../../../Interfaces/System" import { System } from "../../../Interfaces/System"
import { matchManifest, Manifest } from "./matchManifest" import { matchManifest, Manifest } from "./matchManifest"
import * as childProcess from "node:child_process" import * as childProcess from "node:child_process"
import { DockerProcedureContainer } from "./DockerProcedureContainer" import { DockerProcedureContainer } from "./DockerProcedureContainer"
@@ -27,19 +27,12 @@ import {
Parser, Parser,
array, array,
} from "ts-matches" } from "ts-matches"
import { JsonPath, unNestPath } from "../../../Models/JsonPath" import { AddSslOptions } from "@start9labs/start-sdk/base/lib/osBindings"
import { RpcResult, matchRpcResult } from "../../RpcListener"
import { CT } from "@start9labs/start-sdk"
import {
AddSslOptions,
BindOptions,
} from "@start9labs/start-sdk/cjs/lib/osBindings"
import { import {
BindOptionsByProtocol, BindOptionsByProtocol,
Host,
MultiHost, MultiHost,
} from "@start9labs/start-sdk/cjs/lib/interfaces/Host" } from "@start9labs/start-sdk/base/lib/interfaces/Host"
import { ServiceInterfaceBuilder } from "@start9labs/start-sdk/cjs/lib/interfaces/ServiceInterfaceBuilder" import { ServiceInterfaceBuilder } from "@start9labs/start-sdk/base/lib/interfaces/ServiceInterfaceBuilder"
import { Effects } from "../../../Models/Effects" import { Effects } from "../../../Models/Effects"
import { import {
OldConfigSpec, OldConfigSpec,
@@ -48,18 +41,16 @@ import {
transformNewConfigToOld, transformNewConfigToOld,
transformOldConfigToNew, transformOldConfigToNew,
} from "./transformConfigSpec" } from "./transformConfigSpec"
import { MainEffects } from "@start9labs/start-sdk/cjs/lib/StartSdk" import { partialDiff } from "@start9labs/start-sdk/base/lib/util"
import { StorePath } from "@start9labs/start-sdk/cjs/lib/store/PathBuilder"
type Optional<A> = A | undefined | null type Optional<A> = A | undefined | null
function todo(): never { function todo(): never {
throw new Error("Not implemented") throw new Error("Not implemented")
} }
const execFile = promisify(childProcess.execFile)
const MANIFEST_LOCATION = "/usr/lib/startos/package/embassyManifest.json" const MANIFEST_LOCATION = "/usr/lib/startos/package/embassyManifest.json"
export const EMBASSY_JS_LOCATION = "/usr/lib/startos/package/embassy.js" export const EMBASSY_JS_LOCATION = "/usr/lib/startos/package/embassy.js"
const EMBASSY_POINTER_PATH_PREFIX = "/embassyConfig" as StorePath const EMBASSY_POINTER_PATH_PREFIX = "/embassyConfig" as utils.StorePath
const matchResult = object({ const matchResult = object({
result: any, result: any,
@@ -144,6 +135,34 @@ type OldGetConfigRes = {
spec: OldConfigSpec spec: OldConfigSpec
} }
export type PropertiesValue =
| {
/** The type of this value, either "string" or "object" */
type: "object"
/** A nested mapping of values. The user will experience this as a nested page with back button */
value: { [k: string]: PropertiesValue }
/** (optional) A human readable description of the new set of values */
description: string | null
}
| {
/** The type of this value, either "string" or "object" */
type: "string"
/** The value to display to the user */
value: string
/** A human readable description of the value */
description: string | null
/** Whether or not to mask the value, for example, when displaying a password */
masked: boolean | null
/** Whether or not to include a button for copying the value to clipboard */
copyable: boolean | null
/** Whether or not to include a button for displaying the value as a QR code */
qr: boolean | null
}
export type PropertiesReturn = {
[key: string]: PropertiesValue
}
export type PackagePropertiesV2 = { export type PackagePropertiesV2 = {
[name: string]: PackagePropertyObject | PackagePropertyString [name: string]: PackagePropertyObject | PackagePropertyString
} }
@@ -166,7 +185,7 @@ export type PackagePropertyObject = {
const asProperty_ = ( const asProperty_ = (
x: PackagePropertyString | PackagePropertyObject, x: PackagePropertyString | PackagePropertyObject,
): T.PropertiesValue => { ): PropertiesValue => {
if (x.type === "object") { if (x.type === "object") {
return { return {
...x, ...x,
@@ -186,7 +205,7 @@ const asProperty_ = (
...x, ...x,
} }
} }
const asProperty = (x: PackagePropertiesV2): T.PropertiesReturn => const asProperty = (x: PackagePropertiesV2): PropertiesReturn =>
Object.fromEntries( Object.fromEntries(
Object.entries(x).map(([key, value]) => [key, asProperty_(value)]), Object.entries(x).map(([key, value]) => [key, asProperty_(value)]),
) )
@@ -223,6 +242,31 @@ const matchProperties = object({
data: matchPackageProperties, data: matchPackageProperties,
}) })
function convertProperties(
name: string,
value: PropertiesValue,
): T.ActionResultMember {
if (value.type === "string") {
return {
type: "single",
name,
description: value.description,
copyable: value.copyable || false,
masked: value.masked || false,
qr: value.qr || false,
value: value.value,
}
}
return {
type: "group",
name,
description: value.description,
value: Object.entries(value.value).map(([name, value]) =>
convertProperties(name, value),
),
}
}
const DEFAULT_REGISTRY = "https://registry.start9.com" const DEFAULT_REGISTRY = "https://registry.start9.com"
export class SystemForEmbassy implements System { export class SystemForEmbassy implements System {
currentRunning: MainLoop | undefined currentRunning: MainLoop | undefined
@@ -248,50 +292,38 @@ export class SystemForEmbassy implements System {
readonly moduleCode: Partial<U.ExpectedExports>, readonly moduleCode: Partial<U.ExpectedExports>,
) {} ) {}
async actionsMetadata(effects: T.Effects): Promise<T.ActionMetadata[]> { async containerInit(effects: Effects): Promise<void> {
const actions = Object.entries(this.manifest.actions ?? {}) for (let depId in this.manifest.dependencies) {
return Promise.all( if (this.manifest.dependencies[depId].config) {
actions.map(async ([actionId, action]): Promise<T.ActionMetadata> => { await this.dependenciesAutoconfig(effects, depId, null)
const name = action.name ?? actionId }
const description = action.description }
const warning = action.warning ?? null await effects.setMainStatus({ status: "stopped" })
const disabled = false await this.exportActions(effects)
const input = (await convertToNewConfig(action["input-spec"] as any)) await this.exportNetwork(effects)
.spec await this.containerSetDependencies(effects)
const hasRunning = !!action["allowed-statuses"].find( }
(x) => x === "running", async containerSetDependencies(effects: T.Effects) {
) const oldDeps: Record<string, string[]> = Object.fromEntries(
const hasStopped = !!action["allowed-statuses"].find( await effects
(x) => x === "stopped", .getDependencies()
) .then((x) =>
// prettier-ignore x.flatMap((x) =>
const allowedStatuses = x.kind === "running" ? [[x.id, x?.healthChecks || []]] : [],
hasRunning && hasStopped ? "any": ),
hasRunning ? "onlyRunning" : )
"onlyStopped" .catch(() => []),
)
const group = null await this.setDependencies(effects, oldDeps)
return {
name,
description,
warning,
disabled,
allowedStatuses,
group,
input,
}
}),
)
} }
async containerInit(): Promise<void> {}
async exit(): Promise<void> { async exit(): Promise<void> {
if (this.currentRunning) await this.currentRunning.clean() if (this.currentRunning) await this.currentRunning.clean()
delete this.currentRunning delete this.currentRunning
} }
async start(effects: MainEffects): Promise<void> { async start(effects: T.Effects): Promise<void> {
effects.constRetry = utils.once(() => effects.restart())
if (!!this.currentRunning) return if (!!this.currentRunning) return
this.currentRunning = await MainLoop.of(this, effects) this.currentRunning = await MainLoop.of(this, effects)
@@ -308,16 +340,26 @@ export class SystemForEmbassy implements System {
} }
} }
async packageInit( async packageInit(effects: Effects, timeoutMs: number | null): Promise<void> {
effects: Effects, const previousVersion = await effects.getDataVersion()
previousVersion: Optional<string>, if (previousVersion) {
timeoutMs: number | null, if (
): Promise<void> { (await this.migration(effects, previousVersion, timeoutMs)).configured
if (previousVersion) ) {
await this.migration(effects, previousVersion, timeoutMs) await effects.action.clearRequests({ only: ["needs-config"] })
await effects.setMainStatus({ status: "stopped" }) }
await this.exportActions(effects) await effects.setDataVersion({
await this.exportNetwork(effects) version: ExtendedVersion.parseEmver(this.manifest.version).toString(),
})
} else if (this.manifest.config) {
await effects.action.request({
packageId: this.manifest.id,
actionId: "config",
severity: "critical",
replayId: "needs-config",
reason: "This service must be configured before it can be run",
})
}
} }
async exportNetwork(effects: Effects) { async exportNetwork(effects: Effects) {
for (const [id, interfaceValue] of Object.entries( for (const [id, interfaceValue] of Object.entries(
@@ -400,10 +442,75 @@ export class SystemForEmbassy implements System {
) )
} }
} }
async getActionInput(
effects: Effects,
actionId: string,
timeoutMs: number | null,
): Promise<T.ActionInput | null> {
if (actionId === "config") {
const config = await this.getConfig(effects, timeoutMs)
return { spec: config.spec, value: config.config }
} else if (actionId === "properties") {
return null
} else {
const oldSpec = this.manifest.actions?.[actionId]?.["input-spec"]
if (!oldSpec) return null
return {
spec: transformConfigSpec(oldSpec as OldConfigSpec),
value: null,
}
}
}
async runAction(
effects: Effects,
actionId: string,
input: unknown,
timeoutMs: number | null,
): Promise<T.ActionResult | null> {
if (actionId === "config") {
await this.setConfig(effects, input, timeoutMs)
return null
} else if (actionId === "properties") {
return {
version: "1",
title: "Properties",
message: null,
result: {
type: "group",
value: Object.entries(await this.properties(effects, timeoutMs)).map(
([name, value]) => convertProperties(name, value),
),
},
}
} else {
return this.action(effects, actionId, input, timeoutMs)
}
}
async exportActions(effects: Effects) { async exportActions(effects: Effects) {
const manifest = this.manifest const manifest = this.manifest
if (!manifest.actions) return const actions = {
for (const [actionId, action] of Object.entries(manifest.actions)) { ...manifest.actions,
}
if (manifest.config) {
actions.config = {
name: "Configure",
description: `Customize ${manifest.title}`,
"allowed-statuses": ["running", "stopped"],
"input-spec": {},
implementation: { type: "script", args: [] },
}
}
if (manifest.properties) {
actions.properties = {
name: "Properties",
description:
"Runtime information, credentials, and other values of interest",
"allowed-statuses": ["running", "stopped"],
"input-spec": null,
implementation: { type: "script", args: [] },
}
}
for (const [actionId, action] of Object.entries(actions)) {
const hasRunning = !!action["allowed-statuses"].find( const hasRunning = !!action["allowed-statuses"].find(
(x) => x === "running", (x) => x === "running",
) )
@@ -412,21 +519,22 @@ export class SystemForEmbassy implements System {
) )
// prettier-ignore // prettier-ignore
const allowedStatuses = hasRunning && hasStopped ? "any": const allowedStatuses = hasRunning && hasStopped ? "any":
hasRunning ? "onlyRunning" : hasRunning ? "only-running" :
"onlyStopped" "only-stopped"
await effects.exportAction({ await effects.action.export({
id: actionId, id: actionId,
metadata: { metadata: {
name: action.name, name: action.name,
description: action.description, description: action.description,
warning: action.warning || null, warning: action.warning || null,
input: action["input-spec"] as CT.InputSpec, visibility: "enabled",
disabled: false,
allowedStatuses, allowedStatuses,
hasInput: !!action["input-spec"],
group: null, group: null,
}, },
}) })
} }
await effects.action.clear({ except: Object.keys(actions) })
} }
async packageUninit( async packageUninit(
effects: Effects, effects: Effects,
@@ -443,6 +551,7 @@ export class SystemForEmbassy implements System {
): Promise<void> { ): Promise<void> {
const backup = this.manifest.backup.create const backup = this.manifest.backup.create
if (backup.type === "docker") { if (backup.type === "docker") {
const commands = [backup.entrypoint, ...backup.args]
const container = await DockerProcedureContainer.of( const container = await DockerProcedureContainer.of(
effects, effects,
this.manifest.id, this.manifest.id,
@@ -451,8 +560,9 @@ export class SystemForEmbassy implements System {
...this.manifest.volumes, ...this.manifest.volumes,
BACKUP: { type: "backup", readonly: false }, BACKUP: { type: "backup", readonly: false },
}, },
`Backup - ${commands.join(" ")}`,
) )
await container.execFail([backup.entrypoint, ...backup.args], timeoutMs) await container.execFail(commands, timeoutMs)
} else { } else {
const moduleCode = await this.moduleCode const moduleCode = await this.moduleCode
await moduleCode.createBackup?.(polyfillEffects(effects, this.manifest)) await moduleCode.createBackup?.(polyfillEffects(effects, this.manifest))
@@ -464,6 +574,7 @@ export class SystemForEmbassy implements System {
): Promise<void> { ): Promise<void> {
const restoreBackup = this.manifest.backup.restore const restoreBackup = this.manifest.backup.restore
if (restoreBackup.type === "docker") { if (restoreBackup.type === "docker") {
const commands = [restoreBackup.entrypoint, ...restoreBackup.args]
const container = await DockerProcedureContainer.of( const container = await DockerProcedureContainer.of(
effects, effects,
this.manifest.id, this.manifest.id,
@@ -472,20 +583,15 @@ export class SystemForEmbassy implements System {
...this.manifest.volumes, ...this.manifest.volumes,
BACKUP: { type: "backup", readonly: true }, BACKUP: { type: "backup", readonly: true },
}, },
`Restore Backup - ${commands.join(" ")}`,
) )
await container.execFail( await container.execFail(commands, timeoutMs)
[restoreBackup.entrypoint, ...restoreBackup.args],
timeoutMs,
)
} else { } else {
const moduleCode = await this.moduleCode const moduleCode = await this.moduleCode
await moduleCode.restoreBackup?.(polyfillEffects(effects, this.manifest)) await moduleCode.restoreBackup?.(polyfillEffects(effects, this.manifest))
} }
} }
async getConfig( async getConfig(effects: Effects, timeoutMs: number | null) {
effects: Effects,
timeoutMs: number | null,
): Promise<T.ConfigRes> {
return this.getConfigUncleaned(effects, timeoutMs).then(convertToNewConfig) return this.getConfigUncleaned(effects, timeoutMs).then(convertToNewConfig)
} }
private async getConfigUncleaned( private async getConfigUncleaned(
@@ -495,20 +601,17 @@ export class SystemForEmbassy implements System {
const config = this.manifest.config?.get const config = this.manifest.config?.get
if (!config) return { spec: {} } if (!config) return { spec: {} }
if (config.type === "docker") { if (config.type === "docker") {
const commands = [config.entrypoint, ...config.args]
const container = await DockerProcedureContainer.of( const container = await DockerProcedureContainer.of(
effects, effects,
this.manifest.id, this.manifest.id,
config, config,
this.manifest.volumes, this.manifest.volumes,
`Get Config - ${commands.join(" ")}`,
) )
// TODO: yaml // TODO: yaml
return JSON.parse( return JSON.parse(
( (await container.execFail(commands, timeoutMs)).stdout.toString(),
await container.execFail(
[config.entrypoint, ...config.args],
timeoutMs,
)
).stdout.toString(),
) )
} else { } else {
const moduleCode = await this.moduleCode const moduleCode = await this.moduleCode
@@ -543,28 +646,25 @@ export class SystemForEmbassy implements System {
const setConfigValue = this.manifest.config?.set const setConfigValue = this.manifest.config?.set
if (!setConfigValue) return if (!setConfigValue) return
if (setConfigValue.type === "docker") { if (setConfigValue.type === "docker") {
const commands = [
setConfigValue.entrypoint,
...setConfigValue.args,
JSON.stringify(newConfig),
]
const container = await DockerProcedureContainer.of( const container = await DockerProcedureContainer.of(
effects, effects,
this.manifest.id, this.manifest.id,
setConfigValue, setConfigValue,
this.manifest.volumes, this.manifest.volumes,
`Set Config - ${commands.join(" ")}`,
) )
const answer = matchSetResult.unsafeCast( const answer = matchSetResult.unsafeCast(
JSON.parse( JSON.parse(
( (await container.execFail(commands, timeoutMs)).stdout.toString(),
await container.execFail(
[
setConfigValue.entrypoint,
...setConfigValue.args,
JSON.stringify(newConfig),
],
timeoutMs,
)
).stdout.toString(),
), ),
) )
const dependsOn = answer["depends-on"] ?? answer.dependsOn ?? {} const dependsOn = answer["depends-on"] ?? answer.dependsOn ?? {}
await this.setConfigSetConfig(effects, dependsOn) await this.setDependencies(effects, dependsOn)
return return
} else if (setConfigValue.type === "script") { } else if (setConfigValue.type === "script") {
const moduleCode = await this.moduleCode const moduleCode = await this.moduleCode
@@ -587,31 +687,60 @@ export class SystemForEmbassy implements System {
}), }),
) )
const dependsOn = answer["depends-on"] ?? answer.dependsOn ?? {} const dependsOn = answer["depends-on"] ?? answer.dependsOn ?? {}
await this.setConfigSetConfig(effects, dependsOn) await this.setDependencies(effects, dependsOn)
return return
} }
} }
private async setConfigSetConfig( private async setDependencies(
effects: Effects, effects: Effects,
dependsOn: { [x: string]: readonly string[] }, rawDepends: { [x: string]: readonly string[] },
) { ) {
const dependsOn: Record<string, readonly string[] | null> = {
...Object.fromEntries(
Object.entries(this.manifest.dependencies || {})?.map((x) => [
x[0],
null,
]) || [],
),
...rawDepends,
}
await effects.setDependencies({ await effects.setDependencies({
dependencies: Object.entries(dependsOn).flatMap(([key, value]) => { dependencies: Object.entries(dependsOn).flatMap(
const dependency = this.manifest.dependencies?.[key] ([key, value]): T.Dependencies => {
if (!dependency) return [] const dependency = this.manifest.dependencies?.[key]
const versionRange = dependency.version if (!dependency) return []
const registryUrl = DEFAULT_REGISTRY if (value == null) {
const kind = "running" const versionRange = dependency.version
return [ if (dependency.requirement.type === "required") {
{ return [
id: key, {
versionRange, id: key,
registryUrl, versionRange,
kind, kind: "running",
healthChecks: [...value], healthChecks: [],
}, },
] ]
}), }
return [
{
kind: "exists",
id: key,
versionRange,
},
]
}
const versionRange = dependency.version
const kind = "running"
return [
{
id: key,
versionRange,
kind,
healthChecks: [...value],
},
]
},
),
}) })
} }
@@ -619,7 +748,7 @@ export class SystemForEmbassy implements System {
effects: Effects, effects: Effects,
fromVersion: string, fromVersion: string,
timeoutMs: number | null, timeoutMs: number | null,
): Promise<T.MigrationRes> { ): Promise<{ configured: boolean }> {
const fromEmver = ExtendedVersion.parseEmver(fromVersion) const fromEmver = ExtendedVersion.parseEmver(fromVersion)
const currentEmver = ExtendedVersion.parseEmver(this.manifest.version) const currentEmver = ExtendedVersion.parseEmver(this.manifest.version)
if (!this.manifest.migrations) return { configured: true } if (!this.manifest.migrations) return { configured: true }
@@ -652,23 +781,20 @@ export class SystemForEmbassy implements System {
if (migration) { if (migration) {
const [version, procedure] = migration const [version, procedure] = migration
if (procedure.type === "docker") { if (procedure.type === "docker") {
const commands = [
procedure.entrypoint,
...procedure.args,
JSON.stringify(fromVersion),
]
const container = await DockerProcedureContainer.of( const container = await DockerProcedureContainer.of(
effects, effects,
this.manifest.id, this.manifest.id,
procedure, procedure,
this.manifest.volumes, this.manifest.volumes,
`Migration - ${commands.join(" ")}`,
) )
return JSON.parse( return JSON.parse(
( (await container.execFail(commands, timeoutMs)).stdout.toString(),
await container.execFail(
[
procedure.entrypoint,
...procedure.args,
JSON.stringify(fromVersion),
],
timeoutMs,
)
).stdout.toString(),
) )
} else if (procedure.type === "script") { } else if (procedure.type === "script") {
const moduleCode = await this.moduleCode const moduleCode = await this.moduleCode
@@ -690,25 +816,22 @@ export class SystemForEmbassy implements System {
async properties( async properties(
effects: Effects, effects: Effects,
timeoutMs: number | null, timeoutMs: number | null,
): Promise<T.PropertiesReturn> { ): Promise<PropertiesReturn> {
// TODO BLU-J set the properties ever so often // TODO BLU-J set the properties ever so often
const setConfigValue = this.manifest.properties const setConfigValue = this.manifest.properties
if (!setConfigValue) throw new Error("There is no properties") if (!setConfigValue) throw new Error("There is no properties")
if (setConfigValue.type === "docker") { if (setConfigValue.type === "docker") {
const commands = [setConfigValue.entrypoint, ...setConfigValue.args]
const container = await DockerProcedureContainer.of( const container = await DockerProcedureContainer.of(
effects, effects,
this.manifest.id, this.manifest.id,
setConfigValue, setConfigValue,
this.manifest.volumes, this.manifest.volumes,
`Properties - ${commands.join(" ")}`,
) )
const properties = matchProperties.unsafeCast( const properties = matchProperties.unsafeCast(
JSON.parse( JSON.parse(
( (await container.execFail(commands, timeoutMs)).stdout.toString(),
await container.execFail(
[setConfigValue.entrypoint, ...setConfigValue.args],
timeoutMs,
)
).stdout.toString(),
), ),
) )
return asProperty(properties.data) return asProperty(properties.data)
@@ -735,13 +858,13 @@ export class SystemForEmbassy implements System {
const actionProcedure = this.manifest.actions?.[actionId]?.implementation const actionProcedure = this.manifest.actions?.[actionId]?.implementation
const toActionResult = ({ const toActionResult = ({
message, message,
value = "", value,
copyable, copyable,
qr, qr,
}: U.ActionResult): T.ActionResult => ({ }: U.ActionResult): T.ActionResult => ({
version: "0", version: "0",
message, message,
value, value: value ?? null,
copyable, copyable,
qr, qr,
}) })
@@ -750,11 +873,18 @@ export class SystemForEmbassy implements System {
const subcontainer = actionProcedure.inject const subcontainer = actionProcedure.inject
? this.currentRunning?.mainSubContainerHandle ? this.currentRunning?.mainSubContainerHandle
: undefined : undefined
const env: Record<string, string> = actionProcedure.inject
? {
HOME: "/root",
}
: {}
const container = await DockerProcedureContainer.of( const container = await DockerProcedureContainer.of(
effects, effects,
this.manifest.id, this.manifest.id,
actionProcedure, actionProcedure,
this.manifest.volumes, this.manifest.volumes,
`Action ${actionId}`,
{ {
subcontainer, subcontainer,
}, },
@@ -769,6 +899,7 @@ export class SystemForEmbassy implements System {
JSON.stringify(formData), JSON.stringify(formData),
], ],
timeoutMs, timeoutMs,
{ env },
) )
).stdout.toString(), ).stdout.toString(),
), ),
@@ -794,23 +925,20 @@ export class SystemForEmbassy implements System {
const actionProcedure = this.manifest.dependencies?.[id]?.config?.check const actionProcedure = this.manifest.dependencies?.[id]?.config?.check
if (!actionProcedure) return { message: "Action not found", value: null } if (!actionProcedure) return { message: "Action not found", value: null }
if (actionProcedure.type === "docker") { if (actionProcedure.type === "docker") {
const commands = [
actionProcedure.entrypoint,
...actionProcedure.args,
JSON.stringify(oldConfig),
]
const container = await DockerProcedureContainer.of( const container = await DockerProcedureContainer.of(
effects, effects,
this.manifest.id, this.manifest.id,
actionProcedure, actionProcedure,
this.manifest.volumes, this.manifest.volumes,
`Dependencies Check - ${commands.join(" ")}`,
) )
return JSON.parse( return JSON.parse(
( (await container.execFail(commands, timeoutMs)).stdout.toString(),
await container.execFail(
[
actionProcedure.entrypoint,
...actionProcedure.args,
JSON.stringify(oldConfig),
],
timeoutMs,
)
).stdout.toString(),
) )
} else if (actionProcedure.type === "script") { } else if (actionProcedure.type === "script") {
const moduleCode = await this.moduleCode const moduleCode = await this.moduleCode
@@ -834,24 +962,46 @@ export class SystemForEmbassy implements System {
async dependenciesAutoconfig( async dependenciesAutoconfig(
effects: Effects, effects: Effects,
id: string, id: string,
input: unknown,
timeoutMs: number | null, timeoutMs: number | null,
): Promise<void> { ): Promise<void> {
const oldConfig = object({ remoteConfig: any }).unsafeCast(
input,
).remoteConfig
// TODO: docker // TODO: docker
const oldConfig = (await effects.store.get({
packageId: id,
path: EMBASSY_POINTER_PATH_PREFIX,
callback: () => {
this.dependenciesAutoconfig(effects, id, timeoutMs)
},
})) as U.Config
if (!oldConfig) return
const moduleCode = await this.moduleCode const moduleCode = await this.moduleCode
const method = moduleCode.dependencies?.[id]?.autoConfigure const method = moduleCode.dependencies?.[id]?.autoConfigure
if (!method) return if (!method) return
return (await method( const newConfig = (await method(
polyfillEffects(effects, this.manifest), polyfillEffects(effects, this.manifest),
oldConfig, JSON.parse(JSON.stringify(oldConfig)),
).then((x) => { ).then((x) => {
if ("result" in x) return x.result if ("result" in x) return x.result
if ("error" in x) throw new Error("Error getting config: " + x.error) if ("error" in x) throw new Error("Error getting config: " + x.error)
throw new Error("Error getting config: " + x["error-code"][1]) throw new Error("Error getting config: " + x["error-code"][1])
})) as any })) as any
const diff = partialDiff(oldConfig, newConfig)
if (diff) {
await effects.action.request({
actionId: "config",
packageId: id,
replayId: `${id}/config`,
severity: "important",
reason: `Configure this dependency for the needs of ${this.manifest.title}`,
input: {
kind: "partial",
value: diff.diff,
},
when: {
condition: "input-not-matches",
once: false,
},
})
}
} }
} }
@@ -1026,9 +1176,7 @@ function extractServiceInterfaceId(manifest: Manifest, specInterface: string) {
const serviceInterfaceId = `${specInterface}-${internalPort}` const serviceInterfaceId = `${specInterface}-${internalPort}`
return serviceInterfaceId return serviceInterfaceId
} }
async function convertToNewConfig( async function convertToNewConfig(value: OldGetConfigRes) {
value: OldGetConfigRes,
): Promise<T.ConfigRes> {
const valueSpec: OldConfigSpec = matchOldConfigSpec.unsafeCast(value.spec) const valueSpec: OldConfigSpec = matchOldConfigSpec.unsafeCast(value.spec)
const spec = transformConfigSpec(valueSpec) const spec = transformConfigSpec(valueSpec)
if (!value.config) return { spec, config: null } if (!value.config) return { spec, config: null }

View File

@@ -42,6 +42,7 @@ const matchAction = object(
export const matchManifest = object( export const matchManifest = object(
{ {
id: string, id: string,
title: string,
version: string, version: string,
main: matchDockerProcedure, main: matchDockerProcedure,
assets: object( assets: object(

View File

@@ -105,12 +105,14 @@ export const polyfillEffects = (
args?: string[] | undefined args?: string[] | undefined
timeoutMillis?: number | undefined timeoutMillis?: number | undefined
}): Promise<oet.ResultType<string>> { }): Promise<oet.ResultType<string>> {
const commands: [string, ...string[]] = [command, ...(args || [])]
return startSdk return startSdk
.runCommand( .runCommand(
effects, effects,
{ id: manifest.main.image }, { id: manifest.main.image },
[command, ...(args || [])], commands,
{}, {},
commands.join(" "),
) )
.then((x: any) => ({ .then((x: any) => ({
stderr: x.stderr.toString(), stderr: x.stderr.toString(),
@@ -129,6 +131,7 @@ export const polyfillEffects = (
manifest.id, manifest.id,
manifest.main, manifest.main,
manifest.volumes, manifest.volumes,
[input.command, ...(input.args || [])].join(" "),
) )
const daemon = promiseSubcontainer.then((subcontainer) => const daemon = promiseSubcontainer.then((subcontainer) =>
daemons.runCommand()( daemons.runCommand()(
@@ -153,11 +156,17 @@ export const polyfillEffects = (
path: string path: string
uid: string uid: string
}): Promise<null> { }): Promise<null> {
const commands: [string, ...string[]] = [
"chown",
"--recursive",
input.uid,
`/drive/${input.path}`,
]
await startSdk await startSdk
.runCommand( .runCommand(
effects, effects,
{ id: manifest.main.image }, { id: manifest.main.image },
["chown", "--recursive", input.uid, `/drive/${input.path}`], commands,
{ {
mounts: [ mounts: [
{ {
@@ -171,6 +180,7 @@ export const polyfillEffects = (
}, },
], ],
}, },
commands.join(" "),
) )
.then((x: any) => ({ .then((x: any) => ({
stderr: x.stderr.toString(), stderr: x.stderr.toString(),
@@ -188,11 +198,17 @@ export const polyfillEffects = (
path: string path: string
mode: string mode: string
}): Promise<null> { }): Promise<null> {
const commands: [string, ...string[]] = [
"chmod",
"--recursive",
input.mode,
`/drive/${input.path}`,
]
await startSdk await startSdk
.runCommand( .runCommand(
effects, effects,
{ id: manifest.main.image }, { id: manifest.main.image },
["chmod", "--recursive", input.mode, `/drive/${input.path}`], commands,
{ {
mounts: [ mounts: [
{ {
@@ -206,6 +222,7 @@ export const polyfillEffects = (
}, },
], ],
}, },
commands.join(" "),
) )
.then((x: any) => ({ .then((x: any) => ({
stderr: x.stderr.toString(), stderr: x.stderr.toString(),

View File

@@ -1,4 +1,4 @@
import { CT } from "@start9labs/start-sdk" import { IST } from "@start9labs/start-sdk"
import { import {
dictionary, dictionary,
object, object,
@@ -15,9 +15,9 @@ import {
literal, literal,
} from "ts-matches" } from "ts-matches"
export function transformConfigSpec(oldSpec: OldConfigSpec): CT.InputSpec { export function transformConfigSpec(oldSpec: OldConfigSpec): IST.InputSpec {
return Object.entries(oldSpec).reduce((inputSpec, [key, oldVal]) => { return Object.entries(oldSpec).reduce((inputSpec, [key, oldVal]) => {
let newVal: CT.ValueSpec let newVal: IST.ValueSpec
if (oldVal.type === "boolean") { if (oldVal.type === "boolean") {
newVal = { newVal = {
@@ -43,7 +43,6 @@ export function transformConfigSpec(oldSpec: OldConfigSpec): CT.InputSpec {
}), }),
{}, {},
), ),
required: false,
disabled: false, disabled: false,
immutable: false, immutable: false,
} }
@@ -124,10 +123,9 @@ export function transformConfigSpec(oldSpec: OldConfigSpec): CT.InputSpec {
spec: transformConfigSpec(matchOldConfigSpec.unsafeCast(spec)), spec: transformConfigSpec(matchOldConfigSpec.unsafeCast(spec)),
}, },
}), }),
{} as Record<string, { name: string; spec: CT.InputSpec }>, {} as Record<string, { name: string; spec: IST.InputSpec }>,
), ),
disabled: false, disabled: false,
required: true,
default: oldVal.default, default: oldVal.default,
immutable: false, immutable: false,
} }
@@ -141,7 +139,7 @@ export function transformConfigSpec(oldSpec: OldConfigSpec): CT.InputSpec {
...inputSpec, ...inputSpec,
[key]: newVal, [key]: newVal,
} }
}, {} as CT.InputSpec) }, {} as IST.InputSpec)
} }
export function transformOldConfigToNew( export function transformOldConfigToNew(
@@ -233,10 +231,10 @@ export function transformNewConfigToOld(
function getListSpec( function getListSpec(
oldVal: OldValueSpecList, oldVal: OldValueSpecList,
): CT.ValueSpecMultiselect | CT.ValueSpecList { ): IST.ValueSpecMultiselect | IST.ValueSpecList {
const range = Range.from(oldVal.range) const range = Range.from(oldVal.range)
let partial: Omit<CT.ValueSpecList, "type" | "spec" | "default"> = { let partial: Omit<IST.ValueSpecList, "type" | "spec" | "default"> = {
name: oldVal.name, name: oldVal.name,
description: oldVal.description || null, description: oldVal.description || null,
warning: oldVal.warning || null, warning: oldVal.warning || null,

View File

@@ -1,21 +1,12 @@
import { ExecuteResult, Procedure, System } from "../../Interfaces/System" import { System } from "../../Interfaces/System"
import { unNestPath } from "../../Models/JsonPath"
import matches, { any, number, object, string, tuple } from "ts-matches"
import { Effects } from "../../Models/Effects" import { Effects } from "../../Models/Effects"
import { RpcResult, matchRpcResult } from "../RpcListener"
import { duration } from "../../Models/Duration"
import { T, utils } from "@start9labs/start-sdk" import { T, utils } from "@start9labs/start-sdk"
import { Volume } from "../../Models/Volume"
import { MainEffects } from "@start9labs/start-sdk/cjs/lib/StartSdk"
import { CallbackHolder } from "../../Models/CallbackHolder"
import { Optional } from "ts-matches/lib/parsers/interfaces" import { Optional } from "ts-matches/lib/parsers/interfaces"
export const STARTOS_JS_LOCATION = "/usr/lib/startos/package/index.js" export const STARTOS_JS_LOCATION = "/usr/lib/startos/package/index.js"
type RunningMain = { type RunningMain = {
effects: MainEffects
stop: () => Promise<void> stop: () => Promise<void>
callbacks: CallbackHolder
} }
export class SystemForStartOs implements System { export class SystemForStartOs implements System {
@@ -25,23 +16,24 @@ export class SystemForStartOs implements System {
return new SystemForStartOs(require(STARTOS_JS_LOCATION)) return new SystemForStartOs(require(STARTOS_JS_LOCATION))
} }
constructor(readonly abi: T.ABI) {} constructor(readonly abi: T.ABI) {
containerInit(): Promise<void> { this
throw new Error("Method not implemented.") }
async containerInit(effects: Effects): Promise<void> {
return void (await this.abi.containerInit({ effects }))
} }
async packageInit( async packageInit(
effects: Effects, effects: Effects,
previousVersion: Optional<string> = null,
timeoutMs: number | null = null, timeoutMs: number | null = null,
): Promise<void> { ): Promise<void> {
return void (await this.abi.init({ effects })) return void (await this.abi.packageInit({ effects }))
} }
async packageUninit( async packageUninit(
effects: Effects, effects: Effects,
nextVersion: Optional<string> = null, nextVersion: Optional<string> = null,
timeoutMs: number | null = null, timeoutMs: number | null = null,
): Promise<void> { ): Promise<void> {
return void (await this.abi.uninit({ effects, nextVersion })) return void (await this.abi.packageUninit({ effects, nextVersion }))
} }
async createBackup( async createBackup(
effects: T.Effects, effects: T.Effects,
@@ -49,8 +41,6 @@ export class SystemForStartOs implements System {
): Promise<void> { ): Promise<void> {
return void (await this.abi.createBackup({ return void (await this.abi.createBackup({
effects, effects,
pathMaker: ((options) =>
new Volume(options.volume, options.path).path) as T.PathMaker,
})) }))
} }
async restoreBackup( async restoreBackup(
@@ -59,118 +49,56 @@ export class SystemForStartOs implements System {
): Promise<void> { ): Promise<void> {
return void (await this.abi.restoreBackup({ return void (await this.abi.restoreBackup({
effects, effects,
pathMaker: ((options) =>
new Volume(options.volume, options.path).path) as T.PathMaker,
})) }))
} }
getConfig( getActionInput(
effects: T.Effects,
timeoutMs: number | null,
): Promise<T.ConfigRes> {
return this.abi.getConfig({ effects })
}
async setConfig(
effects: Effects,
input: { effects: Effects; input: Record<string, unknown> },
timeoutMs: number | null,
): Promise<void> {
const _: unknown = await this.abi.setConfig({ effects, input })
return
}
migration(
effects: Effects,
fromVersion: string,
timeoutMs: number | null,
): Promise<T.MigrationRes> {
throw new Error("Method not implemented.")
}
properties(
effects: Effects,
timeoutMs: number | null,
): Promise<T.PropertiesReturn> {
throw new Error("Method not implemented.")
}
async action(
effects: Effects, effects: Effects,
id: string, id: string,
formData: unknown,
timeoutMs: number | null, timeoutMs: number | null,
): Promise<T.ActionResult> { ): Promise<T.ActionInput | null> {
const action = (await this.abi.actions({ effects }))[id] const action = this.abi.actions.get(id)
if (!action) throw new Error(`Action ${id} not found`) if (!action) throw new Error(`Action ${id} not found`)
return action.run({ effects }) return action.getInput({ effects })
} }
dependenciesCheck( runAction(
effects: Effects, effects: Effects,
id: string, id: string,
oldConfig: unknown, input: unknown,
timeoutMs: number | null, timeoutMs: number | null,
): Promise<any> { ): Promise<T.ActionResult | null> {
const dependencyConfig = this.abi.dependencyConfig[id] const action = this.abi.actions.get(id)
if (!dependencyConfig) throw new Error(`dependencyConfig ${id} not found`) if (!action) throw new Error(`Action ${id} not found`)
return dependencyConfig.query({ effects }) return action.run({ effects, input })
} }
async dependenciesAutoconfig(
effects: Effects,
id: string,
remoteConfig: unknown,
timeoutMs: number | null,
): Promise<void> {
const dependencyConfig = this.abi.dependencyConfig[id]
if (!dependencyConfig) throw new Error(`dependencyConfig ${id} not found`)
const queryResults = await this.getConfig(effects, timeoutMs)
return void (await dependencyConfig.update({
queryResults,
remoteConfig,
})) // TODO
}
async actionsMetadata(effects: T.Effects): Promise<T.ActionMetadata[]> {
return this.abi.actionsMetadata({ effects })
}
async init(): Promise<void> {}
async exit(): Promise<void> {} async exit(): Promise<void> {}
async start(effects: MainEffects): Promise<void> { async start(effects: Effects): Promise<void> {
effects.constRetry = utils.once(() => effects.restart())
if (this.runningMain) await this.stop() if (this.runningMain) await this.stop()
let mainOnTerm: () => Promise<void> | undefined let mainOnTerm: () => Promise<void> | undefined
const started = async (onTerm: () => Promise<void>) => { const started = async (onTerm: () => Promise<void>) => {
await effects.setMainStatus({ status: "running" }) await effects.setMainStatus({ status: "running" })
mainOnTerm = onTerm mainOnTerm = onTerm
return null
} }
const daemons = await ( const daemons = await (
await this.abi.main({ await this.abi.main({
effects: effects as MainEffects, effects,
started, started,
}) })
).build() ).build()
this.runningMain = { this.runningMain = {
effects,
stop: async () => { stop: async () => {
if (mainOnTerm) await mainOnTerm() if (mainOnTerm) await mainOnTerm()
await daemons.term() await daemons.term()
}, },
callbacks: new CallbackHolder(),
}
}
callCallback(callback: number, args: any[]): void {
if (this.runningMain) {
this.runningMain.callbacks
.callCallback(callback, args)
.catch((error) =>
console.error(`callback ${callback} failed`, utils.asError(error)),
)
} else {
console.warn(`callback ${callback} ignored because system is not running`)
} }
} }
async stop(): Promise<void> { async stop(): Promise<void> {
if (this.runningMain) { if (this.runningMain) {
await this.runningMain.stop() await this.runningMain.stop()
await this.runningMain.effects.clearCallbacks()
this.runningMain = undefined this.runningMain = undefined
} }
} }

View File

@@ -1,7 +1,4 @@
import { GetDependency } from "./GetDependency" import { GetDependency } from "./GetDependency"
import { System } from "./System" import { System } from "./System"
import { MakeMainEffects, MakeProcedureEffects } from "./MakeEffects"
export type AllGetDependencies = GetDependency<"system", Promise<System>> & export type AllGetDependencies = GetDependency<"system", Promise<System>>
GetDependency<"makeProcedureEffects", MakeProcedureEffects> &
GetDependency<"makeMainEffects", MakeMainEffects>

View File

@@ -1,4 +0,0 @@
import { Effects } from "../Models/Effects"
import { MainEffects } from "@start9labs/start-sdk/cjs/lib/StartSdk"
export type MakeProcedureEffects = (procedureId: string) => Effects
export type MakeMainEffects = () => MainEffects

View File

@@ -1,39 +1,26 @@
import { types as T } from "@start9labs/start-sdk" import { types as T } from "@start9labs/start-sdk"
import { RpcResult } from "../Adapters/RpcListener"
import { Effects } from "../Models/Effects" import { Effects } from "../Models/Effects"
import { CallbackHolder } from "../Models/CallbackHolder" import { CallbackHolder } from "../Models/CallbackHolder"
import { MainEffects } from "@start9labs/start-sdk/cjs/lib/StartSdk"
import { Optional } from "ts-matches/lib/parsers/interfaces" import { Optional } from "ts-matches/lib/parsers/interfaces"
export type Procedure = export type Procedure =
| "/init" | "/packageInit"
| "/uninit" | "/packageUninit"
| "/config/set"
| "/config/get"
| "/backup/create" | "/backup/create"
| "/backup/restore" | "/backup/restore"
| "/actions/metadata" | `/actions/${string}/getInput`
| "/properties"
| `/actions/${string}/get`
| `/actions/${string}/run` | `/actions/${string}/run`
| `/dependencies/${string}/query`
| `/dependencies/${string}/update`
export type ExecuteResult = export type ExecuteResult =
| { ok: unknown } | { ok: unknown }
| { err: { code: number; message: string } } | { err: { code: number; message: string } }
export type System = { export type System = {
containerInit(): Promise<void> containerInit(effects: T.Effects): Promise<void>
start(effects: MainEffects): Promise<void> start(effects: T.Effects): Promise<void>
callCallback(callback: number, args: any[]): void
stop(): Promise<void> stop(): Promise<void>
packageInit( packageInit(effects: Effects, timeoutMs: number | null): Promise<void>
effects: Effects,
previousVersion: Optional<string>,
timeoutMs: number | null,
): Promise<void>
packageUninit( packageUninit(
effects: Effects, effects: Effects,
nextVersion: Optional<string>, nextVersion: Optional<string>,
@@ -42,41 +29,17 @@ export type System = {
createBackup(effects: T.Effects, timeoutMs: number | null): Promise<void> createBackup(effects: T.Effects, timeoutMs: number | null): Promise<void>
restoreBackup(effects: T.Effects, timeoutMs: number | null): Promise<void> restoreBackup(effects: T.Effects, timeoutMs: number | null): Promise<void>
getConfig(effects: T.Effects, timeoutMs: number | null): Promise<T.ConfigRes> runAction(
setConfig(
effects: Effects,
input: { effects: Effects; input: Record<string, unknown> },
timeoutMs: number | null,
): Promise<void>
migration(
effects: Effects,
fromVersion: string,
timeoutMs: number | null,
): Promise<T.MigrationRes>
properties(
effects: Effects,
timeoutMs: number | null,
): Promise<T.PropertiesReturn>
action(
effects: Effects, effects: Effects,
actionId: string, actionId: string,
formData: unknown, input: unknown,
timeoutMs: number | null, timeoutMs: number | null,
): Promise<T.ActionResult> ): Promise<T.ActionResult | null>
getActionInput(
dependenciesCheck(
effects: Effects, effects: Effects,
id: string, actionId: string,
oldConfig: unknown,
timeoutMs: number | null, timeoutMs: number | null,
): Promise<any> ): Promise<T.ActionInput | null>
dependenciesAutoconfig(
effects: Effects,
id: string,
oldConfig: unknown,
timeoutMs: number | null,
): Promise<void>
actionsMetadata(effects: T.Effects): Promise<T.ActionMetadata[]>
exit(): Promise<void> exit(): Promise<void>
} }

View File

@@ -1,22 +1,62 @@
import { T } from "@start9labs/start-sdk"
const CallbackIdCell = { inc: 1 }
const callbackRegistry = new FinalizationRegistry(
async (options: { cbs: Map<number, Function>; effects: T.Effects }) => {
await options.effects.clearCallbacks({
only: Array.from(options.cbs.keys()),
})
},
)
export class CallbackHolder { export class CallbackHolder {
constructor() {} constructor(private effects?: T.Effects) {}
private inc = 0
private callbacks = new Map<number, Function>() private callbacks = new Map<number, Function>()
private children: WeakRef<CallbackHolder>[] = []
private newId() { private newId() {
return this.inc++ return CallbackIdCell.inc++
} }
addCallback(callback?: Function) { addCallback(callback?: Function) {
if (!callback) { if (!callback) {
return return
} }
const id = this.newId() const id = this.newId()
console.error("adding callback", id)
this.callbacks.set(id, callback) this.callbacks.set(id, callback)
if (this.effects)
callbackRegistry.register(this, {
cbs: this.callbacks,
effects: this.effects,
})
return id return id
} }
child(): CallbackHolder {
const child = new CallbackHolder()
this.children.push(new WeakRef(child))
return child
}
removeChild(child: CallbackHolder) {
this.children = this.children.filter((c) => {
const ref = c.deref()
return ref && ref !== child
})
}
private getCallback(index: number): Function | undefined {
let callback = this.callbacks.get(index)
if (callback) this.callbacks.delete(index)
else {
for (let i = 0; i < this.children.length; i++) {
callback = this.children[i].deref()?.getCallback(index)
if (callback) return callback
}
}
return callback
}
callCallback(index: number, args: any[]): Promise<unknown> { callCallback(index: number, args: any[]): Promise<unknown> {
const callback = this.callbacks.get(index) const callback = this.getCallback(index)
if (!callback) throw new Error(`Callback ${index} does not exist`) if (!callback) return Promise.resolve()
this.callbacks.delete(index)
return Promise.resolve().then(() => callback(...args)) return Promise.resolve().then(() => callback(...args))
} }
} }

View File

@@ -1,9 +1,7 @@
import { literals, some, string } from "ts-matches" import { literals, some, string } from "ts-matches"
type NestedPath<A extends string, B extends string> = `/${A}/${string}/${B}` type NestedPath<A extends string, B extends string> = `/${A}/${string}/${B}`
type NestedPaths = type NestedPaths = NestedPath<"actions", "run" | "getInput">
| NestedPath<"actions", "run" | "get">
| NestedPath<"dependencies", "query" | "update">
// prettier-ignore // prettier-ignore
type UnNestPaths<A> = type UnNestPaths<A> =
A extends `${infer A}/${infer B}` ? [...UnNestPaths<A>, ... UnNestPaths<B>] : A extends `${infer A}/${infer B}` ? [...UnNestPaths<A>, ... UnNestPaths<B>] :
@@ -15,25 +13,16 @@ export function unNestPath<A extends string>(a: A): UnNestPaths<A> {
function isNestedPath(path: string): path is NestedPaths { function isNestedPath(path: string): path is NestedPaths {
const paths = path.split("/") const paths = path.split("/")
if (paths.length !== 4) return false if (paths.length !== 4) return false
if (paths[1] === "actions" && (paths[3] === "run" || paths[3] === "get")) if (paths[1] === "actions" && (paths[3] === "run" || paths[3] === "getInput"))
return true
if (
paths[1] === "dependencies" &&
(paths[3] === "query" || paths[3] === "update")
)
return true return true
return false return false
} }
export const jsonPath = some( export const jsonPath = some(
literals( literals(
"/init", "/packageInit",
"/uninit", "/packageUninit",
"/config/set",
"/config/get",
"/backup/create", "/backup/create",
"/backup/restore", "/backup/restore",
"/actions/metadata",
"/properties",
), ),
string.refine(isNestedPath, "isNestedPath"), string.refine(isNestedPath, "isNestedPath"),
) )

View File

@@ -1,13 +1,10 @@
import { RpcListener } from "./Adapters/RpcListener" import { RpcListener } from "./Adapters/RpcListener"
import { SystemForEmbassy } from "./Adapters/Systems/SystemForEmbassy" import { SystemForEmbassy } from "./Adapters/Systems/SystemForEmbassy"
import { makeMainEffects, makeProcedureEffects } from "./Adapters/EffectCreator"
import { AllGetDependencies } from "./Interfaces/AllGetDependencies" import { AllGetDependencies } from "./Interfaces/AllGetDependencies"
import { getSystem } from "./Adapters/Systems" import { getSystem } from "./Adapters/Systems"
const getDependencies: AllGetDependencies = { const getDependencies: AllGetDependencies = {
system: getSystem, system: getSystem,
makeProcedureEffects: () => makeProcedureEffects,
makeMainEffects: () => makeMainEffects,
} }
new RpcListener(getDependencies) new RpcListener(getDependencies)

1538
core/Cargo.lock generated

File diff suppressed because it is too large Load Diff

50
core/build-cli.sh Executable file
View File

@@ -0,0 +1,50 @@
#!/bin/bash
cd "$(dirname "${BASH_SOURCE[0]}")"
set -ea
shopt -s expand_aliases
if [ -z "$ARCH" ]; then
ARCH=$(uname -m)
fi
if [ "$ARCH" = "arm64" ]; then
ARCH="aarch64"
fi
if [ -z "$KERNEL_NAME" ]; then
KERNEL_NAME=$(uname -s)
fi
if [ -z "$TARGET" ]; then
if [ "$KERNEL_NAME" = "Linux" ]; then
TARGET="$ARCH-unknown-linux-musl"
elif [ "$KERNEL_NAME" = "Darwin" ]; then
TARGET="$ARCH-apple-darwin"
else
>&2 echo "unknown kernel $KERNEL_NAME"
exit 1
fi
fi
USE_TTY=
if tty -s; then
USE_TTY="-it"
fi
cd ..
FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')"
RUSTFLAGS=""
if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then
RUSTFLAGS="--cfg tokio_unstable"
fi
alias 'rust-zig-builder'='docker run $USE_TTY --rm -e "RUSTFLAGS=$RUSTFLAGS" -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$HOME/.cargo/git":/root/.cargo/git -v "$(pwd)":/home/rust/src -w /home/rust/src -P messense/cargo-zigbuild'
echo "FEATURES=\"$FEATURES\""
echo "RUSTFLAGS=\"$RUSTFLAGS\""
rust-zig-builder sh -c "cd core && cargo zigbuild --release --no-default-features --features cli,daemon,$FEATURES --locked --bin start-cli --target=$TARGET"
if [ "$(ls -nd core/target/$TARGET/release/start-cli | awk '{ print $3 }')" != "$UID" ]; then
rust-zig-builder sh -c "cd core && chown -R $UID:$UID target && chown -R $UID:$UID /root/.cargo"
fi

View File

@@ -6,6 +6,7 @@ use std::time::Duration;
use color_eyre::eyre::{eyre, Context, Error}; use color_eyre::eyre::{eyre, Context, Error};
use futures::future::BoxFuture; use futures::future::BoxFuture;
use futures::FutureExt; use futures::FutureExt;
use models::ResultExt;
use tokio::fs::File; use tokio::fs::File;
use tokio::sync::oneshot; use tokio::sync::oneshot;
use tokio::task::{JoinError, JoinHandle, LocalSet}; use tokio::task::{JoinError, JoinHandle, LocalSet};
@@ -176,7 +177,7 @@ impl Drop for AtomicFile {
if let Some(file) = self.file.take() { if let Some(file) = self.file.take() {
drop(file); drop(file);
let path = std::mem::take(&mut self.tmp_path); let path = std::mem::take(&mut self.tmp_path);
tokio::spawn(async move { tokio::fs::remove_file(path).await.unwrap() }); tokio::spawn(async move { tokio::fs::remove_file(path).await.log_err() });
} }
} }
} }

View File

@@ -1,9 +1,8 @@
use std::marker::PhantomData; use std::marker::PhantomData;
use std::str::FromStr; use std::str::FromStr;
use clap::builder::TypedValueParser; use rpc_toolkit::clap;
use rpc_toolkit::clap::builder::TypedValueParser;
use crate::prelude::*;
pub struct FromStrParser<T>(PhantomData<T>); pub struct FromStrParser<T>(PhantomData<T>);
impl<T> FromStrParser<T> { impl<T> FromStrParser<T> {

View File

@@ -322,6 +322,11 @@ impl From<reqwest::Error> for Error {
Error::new(e, kind) Error::new(e, kind)
} }
} }
impl From<torut::onion::OnionAddressParseError> for Error {
fn from(e: torut::onion::OnionAddressParseError) -> Self {
Error::new(e, ErrorKind::Tor)
}
}
impl From<patch_db::value::Error> for Error { impl From<patch_db::value::Error> for Error {
fn from(value: patch_db::value::Error) -> Self { fn from(value: patch_db::value::Error) -> Self {
match value.kind { match value.kind {
@@ -351,6 +356,14 @@ impl Debug for ErrorData {
} }
} }
impl std::error::Error for ErrorData {} impl std::error::Error for ErrorData {}
impl From<Error> for ErrorData {
fn from(value: Error) -> Self {
Self {
details: value.to_string(),
debug: format!("{:?}", value),
}
}
}
impl From<&RpcError> for ErrorData { impl From<&RpcError> for ErrorData {
fn from(value: &RpcError) -> Self { fn from(value: &RpcError) -> Self {
Self { Self {

View File

@@ -11,6 +11,7 @@ mod host;
mod image; mod image;
mod invalid_id; mod invalid_id;
mod package; mod package;
mod replay;
mod service_interface; mod service_interface;
mod volume; mod volume;
@@ -20,6 +21,7 @@ pub use host::HostId;
pub use image::ImageId; pub use image::ImageId;
pub use invalid_id::InvalidId; pub use invalid_id::InvalidId;
pub use package::{PackageId, SYSTEM_PACKAGE_ID}; pub use package::{PackageId, SYSTEM_PACKAGE_ID};
pub use replay::ReplayId;
pub use service_interface::ServiceInterfaceId; pub use service_interface::ServiceInterfaceId;
pub use volume::VolumeId; pub use volume::VolumeId;

View File

@@ -0,0 +1,45 @@
use std::convert::Infallible;
use std::path::Path;
use std::str::FromStr;
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use yasi::InternedString;
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, TS)]
#[ts(type = "string")]
pub struct ReplayId(InternedString);
impl FromStr for ReplayId {
type Err = Infallible;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(ReplayId(InternedString::intern(s)))
}
}
impl AsRef<ReplayId> for ReplayId {
fn as_ref(&self) -> &ReplayId {
self
}
}
impl std::fmt::Display for ReplayId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", &self.0)
}
}
impl AsRef<str> for ReplayId {
fn as_ref(&self) -> &str {
self.0.as_ref()
}
}
impl AsRef<Path> for ReplayId {
fn as_ref(&self) -> &Path {
self.0.as_ref()
}
}
impl<'de> Deserialize<'de> for ReplayId {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
Ok(ReplayId(serde::Deserialize::deserialize(deserializer)?))
}
}

View File

@@ -1,9 +1,11 @@
use std::path::Path; use std::path::Path;
use std::str::FromStr;
use rpc_toolkit::clap::builder::ValueParserFactory;
use serde::{Deserialize, Deserializer, Serialize}; use serde::{Deserialize, Deserializer, Serialize};
use ts_rs::TS; use ts_rs::TS;
use crate::Id; use crate::{FromStrParser, Id};
#[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, TS)] #[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, TS)]
#[ts(export, type = "string")] #[ts(export, type = "string")]
@@ -59,3 +61,15 @@ impl sqlx::Type<sqlx::Postgres> for ServiceInterfaceId {
<&str as sqlx::Type<sqlx::Postgres>>::compatible(ty) <&str as sqlx::Type<sqlx::Postgres>>::compatible(ty)
} }
} }
impl FromStr for ServiceInterfaceId {
type Err = <Id as FromStr>::Err;
fn from_str(s: &str) -> Result<Self, Self::Err> {
Id::from_str(s).map(Self)
}
}
impl ValueParserFactory for ServiceInterfaceId {
type Parser = FromStrParser<Self>;
fn value_parser() -> Self::Parser {
FromStrParser::new()
}
}

View File

@@ -1,3 +1,4 @@
mod clap;
mod data_url; mod data_url;
mod errors; mod errors;
mod id; mod id;
@@ -5,6 +6,7 @@ mod mime;
mod procedure_name; mod procedure_name;
mod version; mod version;
pub use clap::*;
pub use data_url::*; pub use data_url::*;
pub use errors::*; pub use errors::*;
pub use id::*; pub use id::*;

View File

@@ -1,38 +1,30 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::{ActionId, PackageId}; use crate::ActionId;
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
pub enum ProcedureName { pub enum ProcedureName {
GetConfig, GetConfig,
SetConfig, SetConfig,
CreateBackup, CreateBackup,
Properties,
RestoreBackup, RestoreBackup,
ActionMetadata, GetActionInput(ActionId),
RunAction(ActionId), RunAction(ActionId),
GetAction(ActionId), PackageInit,
QueryDependency(PackageId), PackageUninit,
UpdateDependency(PackageId),
Init,
Uninit,
} }
impl ProcedureName { impl ProcedureName {
pub fn js_function_name(&self) -> String { pub fn js_function_name(&self) -> String {
match self { match self {
ProcedureName::Init => "/init".to_string(), ProcedureName::PackageInit => "/packageInit".to_string(),
ProcedureName::Uninit => "/uninit".to_string(), ProcedureName::PackageUninit => "/packageUninit".to_string(),
ProcedureName::SetConfig => "/config/set".to_string(), ProcedureName::SetConfig => "/config/set".to_string(),
ProcedureName::GetConfig => "/config/get".to_string(), ProcedureName::GetConfig => "/config/get".to_string(),
ProcedureName::CreateBackup => "/backup/create".to_string(), ProcedureName::CreateBackup => "/backup/create".to_string(),
ProcedureName::Properties => "/properties".to_string(),
ProcedureName::RestoreBackup => "/backup/restore".to_string(), ProcedureName::RestoreBackup => "/backup/restore".to_string(),
ProcedureName::ActionMetadata => "/actions/metadata".to_string(),
ProcedureName::RunAction(id) => format!("/actions/{}/run", id), ProcedureName::RunAction(id) => format!("/actions/{}/run", id),
ProcedureName::GetAction(id) => format!("/actions/{}/get", id), ProcedureName::GetActionInput(id) => format!("/actions/{}/getInput", id),
ProcedureName::QueryDependency(id) => format!("/dependencies/{}/query", id),
ProcedureName::UpdateDependency(id) => format!("/dependencies/{}/update", id),
} }
} }
} }

View File

@@ -14,7 +14,7 @@ keywords = [
name = "start-os" name = "start-os"
readme = "README.md" readme = "README.md"
repository = "https://github.com/Start9Labs/start-os" repository = "https://github.com/Start9Labs/start-os"
version = "0.3.6-alpha.5" version = "0.3.6-alpha.8"
license = "MIT" license = "MIT"
[lib] [lib]
@@ -39,10 +39,10 @@ path = "src/main.rs"
[features] [features]
cli = [] cli = []
container-runtime = ["procfs", "unshare"] container-runtime = ["procfs", "tty-spawn"]
daemon = [] daemon = []
registry = [] registry = []
default = ["cli", "daemon"] default = ["cli", "daemon", "registry", "container-runtime"]
dev = [] dev = []
unstable = ["console-subscriber", "tokio/tracing"] unstable = ["console-subscriber", "tokio/tracing"]
docker = [] docker = []
@@ -50,6 +50,10 @@ test = []
[dependencies] [dependencies]
aes = { version = "0.7.5", features = ["ctr"] } aes = { version = "0.7.5", features = ["ctr"] }
async-acme = { version = "0.5.0", git = "https://github.com/dr-bonez/async-acme.git", features = [
"use_rustls",
"use_tokio",
] }
async-compression = { version = "0.4.4", features = [ async-compression = { version = "0.4.4", features = [
"gzip", "gzip",
"brotli", "brotli",
@@ -156,6 +160,7 @@ prettytable-rs = "0.10.0"
procfs = { version = "0.16.0", optional = true } procfs = { version = "0.16.0", optional = true }
proptest = "1.3.1" proptest = "1.3.1"
proptest-derive = "0.5.0" proptest-derive = "0.5.0"
qrcode = "0.14.1"
rand = { version = "0.8.5", features = ["std"] } rand = { version = "0.8.5", features = ["std"] }
regex = "1.10.2" regex = "1.10.2"
reqwest = { version = "0.12.4", features = ["stream", "json", "socks"] } reqwest = { version = "0.12.4", features = ["stream", "json", "socks"] }
@@ -197,7 +202,7 @@ tokio-util = { version = "0.7.9", features = ["io"] }
torut = { git = "https://github.com/Start9Labs/torut.git", branch = "update/dependencies", features = [ torut = { git = "https://github.com/Start9Labs/torut.git", branch = "update/dependencies", features = [
"serialize", "serialize",
] } ] }
tower-service = "0.3.2" tower-service = "0.3.3"
tracing = "0.1.39" tracing = "0.1.39"
tracing-error = "0.2.0" tracing-error = "0.2.0"
tracing-futures = "0.2.5" tracing-futures = "0.2.5"
@@ -205,10 +210,9 @@ tracing-journald = "0.3.0"
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] } tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
trust-dns-server = "0.23.1" trust-dns-server = "0.23.1"
ts-rs = { git = "https://github.com/dr-bonez/ts-rs.git", branch = "feature/top-level-as" } # "8.1.0" ts-rs = { git = "https://github.com/dr-bonez/ts-rs.git", branch = "feature/top-level-as" } # "8.1.0"
tty-spawn = { version = "0.4.0", optional = true }
typed-builder = "0.18.0" typed-builder = "0.18.0"
which = "6.0.3"
unix-named-pipe = "0.2.0" unix-named-pipe = "0.2.0"
unshare = { version = "0.7.0", optional = true }
url = { version = "2.4.1", features = ["serde"] } url = { version = "2.4.1", features = ["serde"] }
urlencoding = "2.1.3" urlencoding = "2.1.3"
uuid = { version = "1.4.1", features = ["v4"] } uuid = { version = "1.4.1", features = ["v4"] }

View File

@@ -0,0 +1,13 @@
[Unit]
Description=StartOS Registry
[Service]
Type=simple
Environment=RUST_LOG=startos=debug,patch_db=warn
ExecStart=/usr/local/bin/registry
Restart=always
RestartSec=3
ManagedOOMPreference=avoid
[Install]
WantedBy=multi-user.target

View File

@@ -1,87 +1,302 @@
use clap::Parser; use std::collections::BTreeMap;
use std::fmt;
use clap::{CommandFactory, FromArgMatches, Parser};
pub use models::ActionId; pub use models::ActionId;
use models::PackageId; use models::PackageId;
use qrcode::QrCode;
use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::instrument; use tracing::instrument;
use ts_rs::TS; use ts_rs::TS;
use crate::config::Config; use crate::context::{CliContext, RpcContext};
use crate::context::RpcContext;
use crate::prelude::*; use crate::prelude::*;
use crate::rpc_continuations::Guid; use crate::rpc_continuations::Guid;
use crate::util::serde::{display_serializable, StdinDeserializable, WithIoFormat}; use crate::util::serde::{
display_serializable, HandlerExtSerde, StdinDeserializable, WithIoFormat,
};
#[derive(Debug, Serialize, Deserialize)] pub fn action_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new()
.subcommand(
"get-input",
from_fn_async(get_action_input)
.with_display_serializable()
.with_about("Get action input spec")
.with_call_remote::<CliContext>(),
)
.subcommand(
"run",
from_fn_async(run_action)
.with_display_serializable()
.with_custom_display_fn(|_, res| {
if let Some(res) = res {
println!("{res}")
}
Ok(())
})
.with_about("Run service action")
.with_call_remote::<CliContext>(),
)
}
#[derive(Debug, Clone, Deserialize, Serialize, TS)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct ActionInput {
#[ts(type = "Record<string, unknown>")]
pub spec: Value,
#[ts(type = "Record<string, unknown> | null")]
pub value: Option<Value>,
}
#[derive(Deserialize, Serialize, TS, Parser)]
#[serde(rename_all = "camelCase")]
pub struct GetActionInputParams {
pub package_id: PackageId,
pub action_id: ActionId,
}
#[instrument(skip_all)]
pub async fn get_action_input(
ctx: RpcContext,
GetActionInputParams {
package_id,
action_id,
}: GetActionInputParams,
) -> Result<Option<ActionInput>, Error> {
ctx.services
.get(&package_id)
.await
.as_ref()
.or_not_found(lazy_format!("Manager for {}", package_id))?
.get_action_input(Guid::new(), action_id)
.await
}
#[derive(Debug, Serialize, Deserialize, TS)]
#[serde(tag = "version")] #[serde(tag = "version")]
#[ts(export)]
pub enum ActionResult { pub enum ActionResult {
#[serde(rename = "0")] #[serde(rename = "0")]
V0(ActionResultV0), V0(ActionResultV0),
#[serde(rename = "1")]
V1(ActionResultV1),
}
impl fmt::Display for ActionResult {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::V0(res) => res.fmt(f),
Self::V1(res) => res.fmt(f),
}
}
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize, TS)]
pub struct ActionResultV0 { pub struct ActionResultV0 {
pub message: String, pub message: String,
pub value: Option<String>, pub value: Option<String>,
pub copyable: bool, pub copyable: bool,
pub qr: bool, pub qr: bool,
} }
impl fmt::Display for ActionResultV0 {
#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, Deserialize, Serialize)] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
#[serde(rename_all = "camelCase")] write!(f, "{}", self.message)?;
pub enum DockerStatus { if let Some(value) = &self.value {
Running, write!(f, ":\n{value}")?;
Stopped, if self.qr {
use qrcode::render::unicode;
write!(
f,
"\n{}",
QrCode::new(value.as_bytes())
.unwrap()
.render::<unicode::Dense1x2>()
.build()
)?;
}
}
Ok(())
}
} }
pub fn display_action_result(params: WithIoFormat<ActionParams>, result: ActionResult) { #[derive(Debug, Serialize, Deserialize, TS)]
#[serde(rename_all = "camelCase")]
pub struct ActionResultV1 {
pub title: String,
pub message: Option<String>,
pub result: Option<ActionResultValue>,
}
#[derive(Debug, Serialize, Deserialize, TS)]
#[serde(rename_all = "camelCase")]
pub struct ActionResultMember {
pub name: String,
pub description: Option<String>,
#[serde(flatten)]
#[ts(flatten)]
pub value: ActionResultValue,
}
#[derive(Debug, Serialize, Deserialize, TS)]
#[serde(rename_all = "camelCase")]
#[serde(rename_all_fields = "camelCase")]
#[serde(tag = "type")]
pub enum ActionResultValue {
Single {
value: String,
copyable: bool,
qr: bool,
masked: bool,
},
Group {
value: Vec<ActionResultMember>,
},
}
impl ActionResultValue {
fn fmt_rec(&self, f: &mut fmt::Formatter<'_>, indent: usize) -> fmt::Result {
match self {
Self::Single { value, qr, .. } => {
for _ in 0..indent {
write!(f, " ")?;
}
write!(f, "{value}")?;
if *qr {
use qrcode::render::unicode;
writeln!(f)?;
for _ in 0..indent {
write!(f, " ")?;
}
write!(
f,
"{}",
QrCode::new(value.as_bytes())
.unwrap()
.render::<unicode::Dense1x2>()
.build()
)?;
}
}
Self::Group { value } => {
for ActionResultMember {
name,
description,
value,
} in value
{
for _ in 0..indent {
write!(f, " ")?;
}
write!(f, "{name}")?;
if let Some(description) = description {
write!(f, ": {description}")?;
}
writeln!(f, ":")?;
value.fmt_rec(f, indent + 1)?;
}
}
}
Ok(())
}
}
impl fmt::Display for ActionResultV1 {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(f, "{}:", self.title)?;
if let Some(message) = &self.message {
writeln!(f, "{message}")?;
}
if let Some(result) = &self.result {
result.fmt_rec(f, 1)?;
}
Ok(())
}
}
pub fn display_action_result<T: Serialize>(params: WithIoFormat<T>, result: Option<ActionResult>) {
let Some(result) = result else {
return;
};
if let Some(format) = params.format { if let Some(format) = params.format {
return display_serializable(format, result); return display_serializable(format, result);
} }
match result { println!("{result}")
ActionResult::V0(ar) => {
println!(
"{}: {}",
ar.message,
serde_json::to_string(&ar.value).unwrap()
);
}
}
} }
#[derive(Deserialize, Serialize, Parser, TS)] #[derive(Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
#[command(rename_all = "kebab-case")] pub struct RunActionParams {
pub struct ActionParams { pub package_id: PackageId,
#[arg(id = "id")] pub action_id: ActionId,
#[serde(rename = "id")] #[ts(optional, type = "any")]
pub input: Option<Value>,
}
#[derive(Parser)]
struct CliRunActionParams {
pub package_id: PackageId, pub package_id: PackageId,
pub action_id: ActionId, pub action_id: ActionId,
#[command(flatten)] #[command(flatten)]
#[ts(type = "{ [key: string]: any } | null")] pub input: StdinDeserializable<Option<Value>>,
#[serde(default)] }
pub input: StdinDeserializable<Option<Config>>, impl From<CliRunActionParams> for RunActionParams {
fn from(
CliRunActionParams {
package_id,
action_id,
input,
}: CliRunActionParams,
) -> Self {
Self {
package_id,
action_id,
input: input.0,
}
}
}
impl CommandFactory for RunActionParams {
fn command() -> clap::Command {
CliRunActionParams::command()
}
fn command_for_update() -> clap::Command {
CliRunActionParams::command_for_update()
}
}
impl FromArgMatches for RunActionParams {
fn from_arg_matches(matches: &clap::ArgMatches) -> Result<Self, clap::Error> {
CliRunActionParams::from_arg_matches(matches).map(Self::from)
}
fn from_arg_matches_mut(matches: &mut clap::ArgMatches) -> Result<Self, clap::Error> {
CliRunActionParams::from_arg_matches_mut(matches).map(Self::from)
}
fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> Result<(), clap::Error> {
*self = CliRunActionParams::from_arg_matches(matches).map(Self::from)?;
Ok(())
}
fn update_from_arg_matches_mut(
&mut self,
matches: &mut clap::ArgMatches,
) -> Result<(), clap::Error> {
*self = CliRunActionParams::from_arg_matches_mut(matches).map(Self::from)?;
Ok(())
}
} }
// impl C
// #[command(about = "Executes an action", display(display_action_result))] // #[command(about = "Executes an action", display(display_action_result))]
#[instrument(skip_all)] #[instrument(skip_all)]
pub async fn action( pub async fn run_action(
ctx: RpcContext, ctx: RpcContext,
ActionParams { RunActionParams {
package_id, package_id,
action_id, action_id,
input: StdinDeserializable(input), input,
}: ActionParams, }: RunActionParams,
) -> Result<ActionResult, Error> { ) -> Result<Option<ActionResult>, Error> {
ctx.services ctx.services
.get(&package_id) .get(&package_id)
.await .await
.as_ref() .as_ref()
.or_not_found(lazy_format!("Manager for {}", package_id))? .or_not_found(lazy_format!("Manager for {}", package_id))?
.action( .run_action(Guid::new(), action_id, input.unwrap_or_default())
Guid::new(),
action_id,
input.map(|c| to_value(&c)).transpose()?.unwrap_or_default(),
)
.await .await
} }

View File

@@ -91,28 +91,40 @@ pub fn auth<C: Context>() -> ParentHandler<C> {
.with_metadata("login", Value::Bool(true)) .with_metadata("login", Value::Bool(true))
.no_cli(), .no_cli(),
) )
.subcommand("login", from_fn_async(cli_login).no_display()) .subcommand(
"login",
from_fn_async(cli_login)
.no_display()
.with_about("Log in to StartOS server"),
)
.subcommand( .subcommand(
"logout", "logout",
from_fn_async(logout) from_fn_async(logout)
.with_metadata("get_session", Value::Bool(true)) .with_metadata("get_session", Value::Bool(true))
.no_display() .no_display()
.with_about("Log out of StartOS server")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand("session", session::<C>()) .subcommand(
"session",
session::<C>().with_about("List or kill StartOS sessions"),
)
.subcommand( .subcommand(
"reset-password", "reset-password",
from_fn_async(reset_password_impl).no_cli(), from_fn_async(reset_password_impl).no_cli(),
) )
.subcommand( .subcommand(
"reset-password", "reset-password",
from_fn_async(cli_reset_password).no_display(), from_fn_async(cli_reset_password)
.no_display()
.with_about("Reset StartOS password"),
) )
.subcommand( .subcommand(
"get-pubkey", "get-pubkey",
from_fn_async(get_pubkey) from_fn_async(get_pubkey)
.with_metadata("authenticated", Value::Bool(false)) .with_metadata("authenticated", Value::Bool(false))
.no_display() .no_display()
.with_about("Get public key derived from server private key")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
} }
@@ -275,8 +287,8 @@ pub struct Session {
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
#[ts(export)] #[ts(export)]
pub struct SessionList { pub struct SessionList {
#[ts(type = "string")] #[ts(type = "string | null")]
current: InternedString, current: Option<InternedString>,
sessions: Sessions, sessions: Sessions,
} }
@@ -290,12 +302,14 @@ pub fn session<C: Context>() -> ParentHandler<C> {
.with_custom_display_fn(|handle, result| { .with_custom_display_fn(|handle, result| {
Ok(display_sessions(handle.params, result)) Ok(display_sessions(handle.params, result))
}) })
.with_about("Display all server sessions")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
"kill", "kill",
from_fn_async(kill) from_fn_async(kill)
.no_display() .no_display()
.with_about("Terminate existing server session(s)")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
} }
@@ -323,7 +337,7 @@ fn display_sessions(params: WithIoFormat<ListParams>, arg: SessionList) {
session.user_agent.as_deref().unwrap_or("N/A"), session.user_agent.as_deref().unwrap_or("N/A"),
&format!("{}", session.metadata), &format!("{}", session.metadata),
]; ];
if id == arg.current { if Some(id) == arg.current {
row.iter_mut() row.iter_mut()
.map(|c| c.style(Attr::ForegroundColor(color::GREEN))) .map(|c| c.style(Attr::ForegroundColor(color::GREEN)))
.collect::<()>() .collect::<()>()
@@ -340,7 +354,7 @@ pub struct ListParams {
#[arg(skip)] #[arg(skip)]
#[ts(skip)] #[ts(skip)]
#[serde(rename = "__auth_session")] // from Auth middleware #[serde(rename = "__auth_session")] // from Auth middleware
session: InternedString, session: Option<InternedString>,
} }
// #[command(display(display_sessions))] // #[command(display(display_sessions))]

View File

@@ -141,7 +141,7 @@ impl Drop for BackupStatusGuard {
.ser(&None) .ser(&None)
}) })
.await .await
.unwrap() .log_err()
}); });
} }
} }
@@ -332,10 +332,10 @@ async fn perform_backup(
let timestamp = Utc::now(); let timestamp = Utc::now();
backup_guard.unencrypted_metadata.version = crate::version::Current::new().semver().into(); backup_guard.unencrypted_metadata.version = crate::version::Current::default().semver().into();
backup_guard.unencrypted_metadata.hostname = ctx.account.read().await.hostname.clone(); backup_guard.unencrypted_metadata.hostname = ctx.account.read().await.hostname.clone();
backup_guard.unencrypted_metadata.timestamp = timestamp.clone(); backup_guard.unencrypted_metadata.timestamp = timestamp.clone();
backup_guard.metadata.version = crate::version::Current::new().semver().into(); backup_guard.metadata.version = crate::version::Current::default().semver().into();
backup_guard.metadata.timestamp = Some(timestamp); backup_guard.metadata.timestamp = Some(timestamp);
backup_guard.metadata.package_backups = package_backups; backup_guard.metadata.package_backups = package_backups;

View File

@@ -40,9 +40,13 @@ pub fn backup<C: Context>() -> ParentHandler<C> {
"create", "create",
from_fn_async(backup_bulk::backup_all) from_fn_async(backup_bulk::backup_all)
.no_display() .no_display()
.with_about("Create backup for all packages")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand("target", target::target::<C>()) .subcommand(
"target",
target::target::<C>().with_about("Commands related to a backup target"),
)
} }
pub fn package_backup<C: Context>() -> ParentHandler<C> { pub fn package_backup<C: Context>() -> ParentHandler<C> {
@@ -50,6 +54,7 @@ pub fn package_backup<C: Context>() -> ParentHandler<C> {
"restore", "restore",
from_fn_async(restore::restore_packages_rpc) from_fn_async(restore::restore_packages_rpc)
.no_display() .no_display()
.with_about("Restore package(s) from backup")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
} }

View File

@@ -52,18 +52,21 @@ pub fn cifs<C: Context>() -> ParentHandler<C> {
"add", "add",
from_fn_async(add) from_fn_async(add)
.no_display() .no_display()
.with_about("Add a new backup target")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
"update", "update",
from_fn_async(update) from_fn_async(update)
.no_display() .no_display()
.with_about("Update an existing backup target")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
"remove", "remove",
from_fn_async(remove) from_fn_async(remove)
.no_display() .no_display()
.with_about("Remove an existing backup target")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
} }

View File

@@ -9,7 +9,7 @@ use digest::generic_array::GenericArray;
use digest::OutputSizeUser; use digest::OutputSizeUser;
use exver::Version; use exver::Version;
use imbl_value::InternedString; use imbl_value::InternedString;
use models::PackageId; use models::{FromStrParser, PackageId};
use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler}; use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sha2::Sha256; use sha2::Sha256;
@@ -27,7 +27,6 @@ use crate::disk::mount::filesystem::{FileSystem, MountType, ReadWrite};
use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard}; use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard};
use crate::disk::util::PartitionInfo; use crate::disk::util::PartitionInfo;
use crate::prelude::*; use crate::prelude::*;
use crate::util::clap::FromStrParser;
use crate::util::serde::{ use crate::util::serde::{
deserialize_from_str, display_serializable, serialize_display, HandlerExtSerde, WithIoFormat, deserialize_from_str, display_serializable, serialize_display, HandlerExtSerde, WithIoFormat,
}; };
@@ -142,11 +141,15 @@ impl FileSystem for BackupTargetFS {
// #[command(subcommands(cifs::cifs, list, info, mount, umount))] // #[command(subcommands(cifs::cifs, list, info, mount, umount))]
pub fn target<C: Context>() -> ParentHandler<C> { pub fn target<C: Context>() -> ParentHandler<C> {
ParentHandler::new() ParentHandler::new()
.subcommand("cifs", cifs::cifs::<C>()) .subcommand(
"cifs",
cifs::cifs::<C>().with_about("Add, remove, or update a backup target"),
)
.subcommand( .subcommand(
"list", "list",
from_fn_async(list) from_fn_async(list)
.with_display_serializable() .with_display_serializable()
.with_about("List existing backup targets")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
@@ -156,16 +159,20 @@ pub fn target<C: Context>() -> ParentHandler<C> {
.with_custom_display_fn::<CliContext, _>(|params, info| { .with_custom_display_fn::<CliContext, _>(|params, info| {
Ok(display_backup_info(params.params, info)) Ok(display_backup_info(params.params, info))
}) })
.with_about("Display package backup information")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
"mount", "mount",
from_fn_async(mount).with_call_remote::<CliContext>(), from_fn_async(mount)
.with_about("Mount backup target")
.with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
"umount", "umount",
from_fn_async(umount) from_fn_async(umount)
.no_display() .no_display()
.with_about("Unmount backup target")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
} }

View File

@@ -8,7 +8,7 @@ use crate::util::logger::EmbassyLogger;
use crate::version::{Current, VersionT}; use crate::version::{Current, VersionT};
lazy_static::lazy_static! { lazy_static::lazy_static! {
static ref VERSION_STRING: String = Current::new().semver().to_string(); static ref VERSION_STRING: String = Current::default().semver().to_string();
} }
pub fn main(args: impl IntoIterator<Item = OsString>) { pub fn main(args: impl IntoIterator<Item = OsString>) {

View File

@@ -28,6 +28,16 @@ fn select_executable(name: &str) -> Option<fn(VecDeque<OsString>)> {
"embassy-sdk" => Some(|_| deprecated::renamed("embassy-sdk", "start-sdk")), "embassy-sdk" => Some(|_| deprecated::renamed("embassy-sdk", "start-sdk")),
"embassyd" => Some(|_| deprecated::renamed("embassyd", "startd")), "embassyd" => Some(|_| deprecated::renamed("embassyd", "startd")),
"embassy-init" => Some(|_| deprecated::removed("embassy-init")), "embassy-init" => Some(|_| deprecated::removed("embassy-init")),
"contents" => Some(|_| {
#[cfg(feature = "cli")]
println!("start-cli");
#[cfg(feature = "container-runtime")]
println!("start-cli (container)");
#[cfg(feature = "daemon")]
println!("startd");
#[cfg(feature = "registry")]
println!("registry");
}),
_ => None, _ => None,
} }
} }

View File

@@ -9,7 +9,7 @@ use crate::util::logger::EmbassyLogger;
use crate::version::{Current, VersionT}; use crate::version::{Current, VersionT};
lazy_static::lazy_static! { lazy_static::lazy_static! {
static ref VERSION_STRING: String = Current::new().semver().to_string(); static ref VERSION_STRING: String = Current::default().semver().to_string();
} }
pub fn main(args: impl IntoIterator<Item = OsString>) { pub fn main(args: impl IntoIterator<Item = OsString>) {

View File

@@ -1,22 +0,0 @@
use std::collections::{BTreeMap, BTreeSet};
use models::PackageId;
use serde::{Deserialize, Serialize};
use super::{Config, ConfigSpec};
#[allow(unused_imports)]
use crate::prelude::*;
use crate::status::health_check::HealthCheckId;
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ConfigRes {
pub config: Option<Config>,
pub spec: ConfigSpec,
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SetResult {
pub depends_on: BTreeMap<PackageId, BTreeSet<HealthCheckId>>,
}

View File

@@ -1,281 +0,0 @@
use std::collections::BTreeSet;
use std::sync::Arc;
use std::time::Duration;
use clap::Parser;
use color_eyre::eyre::eyre;
use indexmap::{IndexMap, IndexSet};
use itertools::Itertools;
use models::{ErrorKind, OptionExt, PackageId};
use patch_db::value::InternedString;
use patch_db::Value;
use regex::Regex;
use rpc_toolkit::{from_fn_async, Context, Empty, HandlerExt, ParentHandler};
use serde::{Deserialize, Serialize};
use tracing::instrument;
use ts_rs::TS;
use crate::context::{CliContext, RpcContext};
use crate::prelude::*;
use crate::rpc_continuations::Guid;
use crate::util::serde::{HandlerExtSerde, StdinDeserializable};
#[derive(Clone, Debug, Default, Serialize, Deserialize)]
pub struct ConfigSpec(pub IndexMap<InternedString, Value>);
pub mod action;
pub mod util;
use util::NumRange;
use self::action::ConfigRes;
pub type Config = patch_db::value::InOMap<InternedString, Value>;
pub trait TypeOf {
fn type_of(&self) -> &'static str;
}
impl TypeOf for Value {
fn type_of(&self) -> &'static str {
match self {
Value::Array(_) => "list",
Value::Bool(_) => "boolean",
Value::Null => "null",
Value::Number(_) => "number",
Value::Object(_) => "object",
Value::String(_) => "string",
}
}
}
#[derive(Debug, thiserror::Error)]
pub enum ConfigurationError {
#[error("Timeout Error")]
TimeoutError(#[from] TimeoutError),
#[error("No Match: {0}")]
NoMatch(#[from] NoMatchWithPath),
#[error("System Error: {0}")]
SystemError(Error),
}
impl From<ConfigurationError> for Error {
fn from(err: ConfigurationError) -> Self {
let kind = match &err {
ConfigurationError::SystemError(e) => e.kind,
_ => crate::ErrorKind::ConfigGen,
};
crate::Error::new(err, kind)
}
}
#[derive(Clone, Copy, Debug, thiserror::Error)]
#[error("Timeout Error")]
pub struct TimeoutError;
#[derive(Clone, Debug, thiserror::Error)]
pub struct NoMatchWithPath {
pub path: Vec<InternedString>,
pub error: MatchError,
}
impl NoMatchWithPath {
pub fn new(error: MatchError) -> Self {
NoMatchWithPath {
path: Vec::new(),
error,
}
}
pub fn prepend(mut self, seg: InternedString) -> Self {
self.path.push(seg);
self
}
}
impl std::fmt::Display for NoMatchWithPath {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}: {}", self.path.iter().rev().join("."), self.error)
}
}
impl From<NoMatchWithPath> for Error {
fn from(e: NoMatchWithPath) -> Self {
ConfigurationError::from(e).into()
}
}
#[derive(Clone, Debug, thiserror::Error)]
pub enum MatchError {
#[error("String {0:?} Does Not Match Pattern {1}")]
Pattern(Arc<String>, Regex),
#[error("String {0:?} Is Not In Enum {1:?}")]
Enum(Arc<String>, IndexSet<String>),
#[error("Field Is Not Nullable")]
NotNullable,
#[error("Length Mismatch: expected {0}, actual: {1}")]
LengthMismatch(NumRange<usize>, usize),
#[error("Invalid Type: expected {0}, actual: {1}")]
InvalidType(&'static str, &'static str),
#[error("Number Out Of Range: expected {0}, actual: {1}")]
OutOfRange(NumRange<f64>, f64),
#[error("Number Is Not Integral: {0}")]
NonIntegral(f64),
#[error("Variant {0:?} Is Not In Union {1:?}")]
Union(Arc<String>, IndexSet<String>),
#[error("Variant Is Missing Tag {0:?}")]
MissingTag(InternedString),
#[error("Property {0:?} Of Variant {1:?} Conflicts With Union Tag")]
PropertyMatchesUnionTag(InternedString, String),
#[error("Name of Property {0:?} Conflicts With Map Tag Name")]
PropertyNameMatchesMapTag(String),
#[error("Object Key Is Invalid: {0}")]
InvalidKey(String),
#[error("Value In List Is Not Unique")]
ListUniquenessViolation,
}
#[derive(Deserialize, Serialize, Parser, TS)]
#[serde(rename_all = "camelCase")]
#[command(rename_all = "kebab-case")]
pub struct ConfigParams {
pub id: PackageId,
}
// #[command(subcommands(get, set))]
pub fn config<C: Context>() -> ParentHandler<C, ConfigParams> {
ParentHandler::new()
.subcommand(
"get",
from_fn_async(get)
.with_inherited(|ConfigParams { id }, _| id)
.with_display_serializable()
.with_call_remote::<CliContext>(),
)
.subcommand(
"set",
set::<C>().with_inherited(|ConfigParams { id }, _| id),
)
}
#[instrument(skip_all)]
pub async fn get(ctx: RpcContext, _: Empty, id: PackageId) -> Result<ConfigRes, Error> {
ctx.services
.get(&id)
.await
.as_ref()
.or_not_found(lazy_format!("Manager for {id}"))?
.get_config(Guid::new())
.await
}
#[derive(Deserialize, Serialize, Parser, TS)]
#[serde(rename_all = "camelCase")]
pub struct SetParams {
#[arg(long = "timeout")]
pub timeout: Option<crate::util::serde::Duration>,
#[command(flatten)]
#[ts(type = "{ [key: string]: any } | null")]
pub config: StdinDeserializable<Option<Config>>,
}
// #[command(
// subcommands(self(set_impl(async, context(RpcContext))), set_dry),
// display(display_none),
// metadata(sync_db = true)
// )]
#[instrument(skip_all)]
pub fn set<C: Context>() -> ParentHandler<C, SetParams, PackageId> {
ParentHandler::new()
.root_handler(
from_fn_async(set_impl)
.with_metadata("sync_db", Value::Bool(true))
.with_inherited(|set_params, id| (id, set_params))
.no_display()
.with_call_remote::<CliContext>(),
)
.subcommand(
"dry",
from_fn_async(set_dry)
.with_inherited(|set_params, id| (id, set_params))
.no_display()
.with_call_remote::<CliContext>(),
)
}
pub async fn set_dry(
ctx: RpcContext,
_: Empty,
(
id,
SetParams {
timeout,
config: StdinDeserializable(config),
},
): (PackageId, SetParams),
) -> Result<BTreeSet<PackageId>, Error> {
let mut breakages = BTreeSet::new();
let procedure_id = Guid::new();
let db = ctx.db.peek().await;
for dep in db
.as_public()
.as_package_data()
.as_entries()?
.into_iter()
.filter_map(
|(k, v)| match v.as_current_dependencies().contains_key(&id) {
Ok(true) => Some(Ok(k)),
Ok(false) => None,
Err(e) => Some(Err(e)),
},
)
{
let dep_id = dep?;
let Some(dependent) = &*ctx.services.get(&dep_id).await else {
continue;
};
if dependent
.dependency_config(procedure_id.clone(), id.clone(), config.clone())
.await?
.is_some()
{
breakages.insert(dep_id);
}
}
Ok(breakages)
}
#[derive(Default)]
pub struct ConfigureContext {
pub timeout: Option<Duration>,
pub config: Option<Config>,
}
#[instrument(skip_all)]
pub async fn set_impl(
ctx: RpcContext,
_: Empty,
(
id,
SetParams {
timeout,
config: StdinDeserializable(config),
},
): (PackageId, SetParams),
) -> Result<(), Error> {
let configure_context = ConfigureContext {
timeout: timeout.map(|t| *t),
config,
};
ctx.services
.get(&id)
.await
.as_ref()
.ok_or_else(|| {
Error::new(
eyre!("There is no manager running for {id}"),
ErrorKind::Unknown,
)
})?
.configure(Guid::new(), configure_context)
.await?;
Ok(())
}

View File

@@ -1,406 +0,0 @@
use std::borrow::Cow;
use std::ops::{Bound, RangeBounds, RangeInclusive};
use patch_db::Value;
use rand::distributions::Distribution;
use rand::Rng;
use super::Config;
pub const STATIC_NULL: Value = Value::Null;
#[derive(Clone, Debug)]
pub struct CharSet(pub Vec<(RangeInclusive<char>, usize)>, usize);
impl CharSet {
pub fn contains(&self, c: &char) -> bool {
self.0.iter().any(|r| r.0.contains(c))
}
pub fn gen<R: Rng>(&self, rng: &mut R) -> char {
let mut idx = rng.gen_range(0..self.1);
for r in &self.0 {
if idx < r.1 {
return std::convert::TryFrom::try_from(
rand::distributions::Uniform::new_inclusive(
u32::from(*r.0.start()),
u32::from(*r.0.end()),
)
.sample(rng),
)
.unwrap();
} else {
idx -= r.1;
}
}
unreachable!()
}
}
impl Default for CharSet {
fn default() -> Self {
CharSet(vec![('!'..='~', 94)], 94)
}
}
impl<'de> serde::de::Deserialize<'de> for CharSet {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let mut res = Vec::new();
let mut len = 0;
let mut a: Option<char> = None;
let mut b: Option<char> = None;
let mut in_range = false;
for c in s.chars() {
match c {
',' => match (a, b, in_range) {
(Some(start), Some(end), _) => {
if !end.is_ascii() {
return Err(serde::de::Error::custom("Invalid Character"));
}
if start >= end {
return Err(serde::de::Error::custom("Invalid Bounds"));
}
let l = u32::from(end) - u32::from(start) + 1;
res.push((start..=end, l as usize));
len += l as usize;
a = None;
b = None;
in_range = false;
}
(Some(start), None, false) => {
len += 1;
res.push((start..=start, 1));
a = None;
}
(Some(_), None, true) => {
b = Some(',');
}
(None, None, false) => {
a = Some(',');
}
_ => {
return Err(serde::de::Error::custom("Syntax Error"));
}
},
'-' => {
if a.is_none() {
a = Some('-');
} else if !in_range {
in_range = true;
} else if b.is_none() {
b = Some('-')
} else {
return Err(serde::de::Error::custom("Syntax Error"));
}
}
_ => {
if a.is_none() {
a = Some(c);
} else if in_range && b.is_none() {
b = Some(c);
} else {
return Err(serde::de::Error::custom("Syntax Error"));
}
}
}
}
match (a, b) {
(Some(start), Some(end)) => {
if !end.is_ascii() {
return Err(serde::de::Error::custom("Invalid Character"));
}
if start >= end {
return Err(serde::de::Error::custom("Invalid Bounds"));
}
let l = u32::from(end) - u32::from(start) + 1;
res.push((start..=end, l as usize));
len += l as usize;
}
(Some(c), None) => {
len += 1;
res.push((c..=c, 1));
}
_ => (),
}
Ok(CharSet(res, len))
}
}
impl serde::ser::Serialize for CharSet {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::ser::Serializer,
{
<&str>::serialize(
&self
.0
.iter()
.map(|r| match r.1 {
1 => format!("{}", r.0.start()),
_ => format!("{}-{}", r.0.start(), r.0.end()),
})
.collect::<Vec<_>>()
.join(",")
.as_str(),
serializer,
)
}
}
pub trait MergeWith {
fn merge_with(&mut self, other: &serde_json::Value);
}
impl MergeWith for serde_json::Value {
fn merge_with(&mut self, other: &serde_json::Value) {
use serde_json::Value::Object;
if let (Object(orig), Object(ref other)) = (self, other) {
for (key, val) in other.into_iter() {
match (orig.get_mut(key), val) {
(Some(new_orig @ Object(_)), other @ Object(_)) => {
new_orig.merge_with(other);
}
(None, _) => {
orig.insert(key.clone(), val.clone());
}
_ => (),
}
}
}
}
}
#[test]
fn merge_with_tests() {
use serde_json::json;
let mut a = json!(
{"a": 1, "c": {"d": "123"}, "i": [1,2,3], "j": {}, "k":[1,2,3], "l": "test"}
);
a.merge_with(
&json!({"a":"a", "b": "b", "c":{"d":"d", "e":"e"}, "f":{"g":"g"}, "h": [1,2,3], "i":"i", "j":[1,2,3], "k":{}}),
);
assert_eq!(
a,
json!({"a": 1, "c": {"d": "123", "e":"e"}, "b":"b", "f": {"g":"g"}, "h":[1,2,3], "i":[1,2,3], "j": {}, "k":[1,2,3], "l": "test"})
)
}
pub mod serde_regex {
use regex::Regex;
use serde::*;
pub fn serialize<S>(regex: &Regex, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
<&str>::serialize(&regex.as_str(), serializer)
}
pub fn deserialize<'de, D>(deserializer: D) -> Result<Regex, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
Regex::new(&s).map_err(|e| de::Error::custom(e))
}
}
#[derive(Clone, Debug)]
pub struct NumRange<T: std::str::FromStr + std::fmt::Display + std::cmp::PartialOrd>(
pub (Bound<T>, Bound<T>),
);
impl<T> std::ops::Deref for NumRange<T>
where
T: std::str::FromStr + std::fmt::Display + std::cmp::PartialOrd,
{
type Target = (Bound<T>, Bound<T>);
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<'de, T> serde::de::Deserialize<'de> for NumRange<T>
where
T: std::str::FromStr + std::fmt::Display + std::cmp::PartialOrd,
<T as std::str::FromStr>::Err: std::fmt::Display,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
let mut split = s.split(",");
let start = split
.next()
.map(|s| match s.get(..1) {
Some("(") => match s.get(1..2) {
Some("*") => Ok(Bound::Unbounded),
_ => s[1..]
.trim()
.parse()
.map(Bound::Excluded)
.map_err(|e| serde::de::Error::custom(e)),
},
Some("[") => s[1..]
.trim()
.parse()
.map(Bound::Included)
.map_err(|e| serde::de::Error::custom(e)),
_ => Err(serde::de::Error::custom(format!(
"Could not parse left bound: {}",
s
))),
})
.transpose()?
.unwrap();
let end = split
.next()
.map(|s| match s.get(s.len() - 1..) {
Some(")") => match s.get(s.len() - 2..s.len() - 1) {
Some("*") => Ok(Bound::Unbounded),
_ => s[..s.len() - 1]
.trim()
.parse()
.map(Bound::Excluded)
.map_err(|e| serde::de::Error::custom(e)),
},
Some("]") => s[..s.len() - 1]
.trim()
.parse()
.map(Bound::Included)
.map_err(|e| serde::de::Error::custom(e)),
_ => Err(serde::de::Error::custom(format!(
"Could not parse right bound: {}",
s
))),
})
.transpose()?
.unwrap_or(Bound::Unbounded);
Ok(NumRange((start, end)))
}
}
impl<T> std::fmt::Display for NumRange<T>
where
T: std::str::FromStr + std::fmt::Display + std::cmp::PartialOrd,
{
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self.start_bound() {
Bound::Excluded(n) => write!(f, "({},", n)?,
Bound::Included(n) => write!(f, "[{},", n)?,
Bound::Unbounded => write!(f, "(*,")?,
};
match self.end_bound() {
Bound::Excluded(n) => write!(f, "{})", n),
Bound::Included(n) => write!(f, "{}]", n),
Bound::Unbounded => write!(f, "*)"),
}
}
}
impl<T> serde::ser::Serialize for NumRange<T>
where
T: std::str::FromStr + std::fmt::Display + std::cmp::PartialOrd,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::ser::Serializer,
{
<&str>::serialize(&format!("{}", self).as_str(), serializer)
}
}
#[derive(Clone, Debug)]
pub enum UniqueBy {
Any(Vec<UniqueBy>),
All(Vec<UniqueBy>),
Exactly(String),
NotUnique,
}
impl UniqueBy {
pub fn eq(&self, lhs: &Config, rhs: &Config) -> bool {
match self {
UniqueBy::Any(any) => any.iter().any(|u| u.eq(lhs, rhs)),
UniqueBy::All(all) => all.iter().all(|u| u.eq(lhs, rhs)),
UniqueBy::Exactly(key) => lhs.get(&**key) == rhs.get(&**key),
UniqueBy::NotUnique => false,
}
}
}
impl Default for UniqueBy {
fn default() -> Self {
UniqueBy::NotUnique
}
}
impl<'de> serde::de::Deserialize<'de> for UniqueBy {
fn deserialize<D: serde::de::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
struct Visitor;
impl<'de> serde::de::Visitor<'de> for Visitor {
type Value = UniqueBy;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(formatter, "a key, an \"any\" object, or an \"all\" object")
}
fn visit_str<E: serde::de::Error>(self, v: &str) -> Result<Self::Value, E> {
Ok(UniqueBy::Exactly(v.to_owned()))
}
fn visit_string<E: serde::de::Error>(self, v: String) -> Result<Self::Value, E> {
Ok(UniqueBy::Exactly(v))
}
fn visit_map<A: serde::de::MapAccess<'de>>(
self,
mut map: A,
) -> Result<Self::Value, A::Error> {
let mut variant = None;
while let Some(key) = map.next_key::<Cow<str>>()? {
match key.as_ref() {
"any" => {
return Ok(UniqueBy::Any(map.next_value()?));
}
"all" => {
return Ok(UniqueBy::All(map.next_value()?));
}
_ => {
variant = Some(key);
}
}
}
Err(serde::de::Error::unknown_variant(
variant.unwrap_or_default().as_ref(),
&["any", "all"],
))
}
fn visit_unit<E: serde::de::Error>(self) -> Result<Self::Value, E> {
Ok(UniqueBy::NotUnique)
}
fn visit_none<E: serde::de::Error>(self) -> Result<Self::Value, E> {
Ok(UniqueBy::NotUnique)
}
}
deserializer.deserialize_any(Visitor)
}
}
impl serde::ser::Serialize for UniqueBy {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::ser::Serializer,
{
use serde::ser::SerializeMap;
match self {
UniqueBy::Any(any) => {
let mut map = serializer.serialize_map(Some(1))?;
map.serialize_key("any")?;
map.serialize_value(any)?;
map.end()
}
UniqueBy::All(all) => {
let mut map = serializer.serialize_map(Some(1))?;
map.serialize_key("all")?;
map.serialize_value(all)?;
map.end()
}
UniqueBy::Exactly(key) => serializer.serialize_str(key),
UniqueBy::NotUnique => serializer.serialize_unit(),
}
}
}

View File

@@ -1,4 +1,4 @@
use std::collections::BTreeMap; use std::collections::{BTreeMap, BTreeSet};
use std::future::Future; use std::future::Future;
use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4}; use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4};
use std::ops::Deref; use std::ops::Deref;
@@ -9,8 +9,11 @@ use std::time::Duration;
use chrono::{TimeDelta, Utc}; use chrono::{TimeDelta, Utc};
use helpers::NonDetachingJoinHandle; use helpers::NonDetachingJoinHandle;
use imbl::OrdMap;
use imbl_value::InternedString; use imbl_value::InternedString;
use itertools::Itertools;
use josekit::jwk::Jwk; use josekit::jwk::Jwk;
use models::{ActionId, PackageId};
use reqwest::{Client, Proxy}; use reqwest::{Client, Proxy};
use rpc_toolkit::yajrc::RpcError; use rpc_toolkit::yajrc::RpcError;
use rpc_toolkit::{CallRemote, Context, Empty}; use rpc_toolkit::{CallRemote, Context, Empty};
@@ -23,7 +26,6 @@ use crate::account::AccountInfo;
use crate::auth::Sessions; use crate::auth::Sessions;
use crate::context::config::ServerConfig; use crate::context::config::ServerConfig;
use crate::db::model::Database; use crate::db::model::Database;
use crate::dependencies::compute_dependency_config_errs;
use crate::disk::OsPartitionInfo; use crate::disk::OsPartitionInfo;
use crate::init::check_time_is_synchronized; use crate::init::check_time_is_synchronized;
use crate::lxc::{ContainerId, LxcContainer, LxcManager}; use crate::lxc::{ContainerId, LxcContainer, LxcManager};
@@ -33,11 +35,11 @@ use crate::net::wifi::WpaCli;
use crate::prelude::*; use crate::prelude::*;
use crate::progress::{FullProgressTracker, PhaseProgressTrackerHandle}; use crate::progress::{FullProgressTracker, PhaseProgressTrackerHandle};
use crate::rpc_continuations::{Guid, OpenAuthedContinuations, RpcContinuations}; use crate::rpc_continuations::{Guid, OpenAuthedContinuations, RpcContinuations};
use crate::service::action::update_requested_actions;
use crate::service::effects::callbacks::ServiceCallbacks; use crate::service::effects::callbacks::ServiceCallbacks;
use crate::service::ServiceMap; use crate::service::ServiceMap;
use crate::shutdown::Shutdown; use crate::shutdown::Shutdown;
use crate::system::get_mem_info; use crate::util::lshw::LshwDevice;
use crate::util::lshw::{lshw, LshwDevice};
use crate::util::sync::SyncMutex; use crate::util::sync::SyncMutex;
pub struct RpcContextSeed { pub struct RpcContextSeed {
@@ -58,16 +60,15 @@ pub struct RpcContextSeed {
pub shutdown: broadcast::Sender<Option<Shutdown>>, pub shutdown: broadcast::Sender<Option<Shutdown>>,
pub tor_socks: SocketAddr, pub tor_socks: SocketAddr,
pub lxc_manager: Arc<LxcManager>, pub lxc_manager: Arc<LxcManager>,
pub open_authed_continuations: OpenAuthedContinuations<InternedString>, pub open_authed_continuations: OpenAuthedContinuations<Option<InternedString>>,
pub rpc_continuations: RpcContinuations, pub rpc_continuations: RpcContinuations,
pub callbacks: ServiceCallbacks, pub callbacks: ServiceCallbacks,
pub wifi_manager: Option<Arc<RwLock<WpaCli>>>, pub wifi_manager: Option<Arc<RwLock<WpaCli>>>,
pub current_secret: Arc<Jwk>, pub current_secret: Arc<Jwk>,
pub client: Client, pub client: Client,
pub hardware: Hardware,
pub start_time: Instant, pub start_time: Instant,
pub crons: SyncMutex<BTreeMap<Guid, NonDetachingJoinHandle<()>>>, pub crons: SyncMutex<BTreeMap<Guid, NonDetachingJoinHandle<()>>>,
#[cfg(feature = "dev")] // #[cfg(feature = "dev")]
pub dev: Dev, pub dev: Dev,
} }
@@ -83,15 +84,14 @@ pub struct Hardware {
pub struct InitRpcContextPhases { pub struct InitRpcContextPhases {
load_db: PhaseProgressTrackerHandle, load_db: PhaseProgressTrackerHandle,
init_net_ctrl: PhaseProgressTrackerHandle, init_net_ctrl: PhaseProgressTrackerHandle,
read_device_info: PhaseProgressTrackerHandle,
cleanup_init: CleanupInitPhases, cleanup_init: CleanupInitPhases,
// TODO: migrations
} }
impl InitRpcContextPhases { impl InitRpcContextPhases {
pub fn new(handle: &FullProgressTracker) -> Self { pub fn new(handle: &FullProgressTracker) -> Self {
Self { Self {
load_db: handle.add_phase("Loading database".into(), Some(5)), load_db: handle.add_phase("Loading database".into(), Some(5)),
init_net_ctrl: handle.add_phase("Initializing network".into(), Some(1)), init_net_ctrl: handle.add_phase("Initializing network".into(), Some(1)),
read_device_info: handle.add_phase("Reading device information".into(), Some(1)),
cleanup_init: CleanupInitPhases::new(handle), cleanup_init: CleanupInitPhases::new(handle),
} }
} }
@@ -100,14 +100,14 @@ impl InitRpcContextPhases {
pub struct CleanupInitPhases { pub struct CleanupInitPhases {
cleanup_sessions: PhaseProgressTrackerHandle, cleanup_sessions: PhaseProgressTrackerHandle,
init_services: PhaseProgressTrackerHandle, init_services: PhaseProgressTrackerHandle,
check_dependencies: PhaseProgressTrackerHandle, check_requested_actions: PhaseProgressTrackerHandle,
} }
impl CleanupInitPhases { impl CleanupInitPhases {
pub fn new(handle: &FullProgressTracker) -> Self { pub fn new(handle: &FullProgressTracker) -> Self {
Self { Self {
cleanup_sessions: handle.add_phase("Cleaning up sessions".into(), Some(1)), cleanup_sessions: handle.add_phase("Cleaning up sessions".into(), Some(1)),
init_services: handle.add_phase("Initializing services".into(), Some(10)), init_services: handle.add_phase("Initializing services".into(), Some(10)),
check_dependencies: handle.add_phase("Checking dependencies".into(), Some(1)), check_requested_actions: handle.add_phase("Checking action requests".into(), Some(1)),
} }
} }
} }
@@ -123,7 +123,6 @@ impl RpcContext {
InitRpcContextPhases { InitRpcContextPhases {
mut load_db, mut load_db,
mut init_net_ctrl, mut init_net_ctrl,
mut read_device_info,
cleanup_init, cleanup_init,
}: InitRpcContextPhases, }: InitRpcContextPhases,
) -> Result<Self, Error> { ) -> Result<Self, Error> {
@@ -175,11 +174,6 @@ impl RpcContext {
let metrics_cache = RwLock::<Option<crate::system::Metrics>>::new(None); let metrics_cache = RwLock::<Option<crate::system::Metrics>>::new(None);
let tor_proxy_url = format!("socks5h://{tor_proxy}"); let tor_proxy_url = format!("socks5h://{tor_proxy}");
read_device_info.start();
let devices = lshw().await?;
let ram = get_mem_info().await?.total.0 as u64 * 1024 * 1024;
read_device_info.complete();
let crons = SyncMutex::new(BTreeMap::new()); let crons = SyncMutex::new(BTreeMap::new());
if !db if !db
@@ -271,10 +265,9 @@ impl RpcContext {
})) }))
.build() .build()
.with_kind(crate::ErrorKind::ParseUrl)?, .with_kind(crate::ErrorKind::ParseUrl)?,
hardware: Hardware { devices, ram },
start_time: Instant::now(), start_time: Instant::now(),
crons, crons,
#[cfg(feature = "dev")] // #[cfg(feature = "dev")]
dev: Dev { dev: Dev {
lxc: Mutex::new(BTreeMap::new()), lxc: Mutex::new(BTreeMap::new()),
}, },
@@ -283,6 +276,7 @@ impl RpcContext {
let res = Self(seed.clone()); let res = Self(seed.clone());
res.cleanup_and_initialize(cleanup_init).await?; res.cleanup_and_initialize(cleanup_init).await?;
tracing::info!("Cleaned up transient states"); tracing::info!("Cleaned up transient states");
crate::version::post_init(&res).await?;
Ok(res) Ok(res)
} }
@@ -309,7 +303,7 @@ impl RpcContext {
CleanupInitPhases { CleanupInitPhases {
mut cleanup_sessions, mut cleanup_sessions,
init_services, init_services,
mut check_dependencies, mut check_requested_actions,
}: CleanupInitPhases, }: CleanupInitPhases,
) -> Result<(), Error> { ) -> Result<(), Error> {
cleanup_sessions.start(); cleanup_sessions.start();
@@ -366,35 +360,68 @@ impl RpcContext {
cleanup_sessions.complete(); cleanup_sessions.complete();
self.services.init(&self, init_services).await?; self.services.init(&self, init_services).await?;
tracing::info!("Initialized Package Managers"); tracing::info!("Initialized Services");
check_dependencies.start(); // TODO
let mut updated_current_dependents = BTreeMap::new(); check_requested_actions.start();
let peek = self.db.peek().await; let peek = self.db.peek().await;
for (package_id, package) in peek.as_public().as_package_data().as_entries()?.into_iter() { let mut action_input: OrdMap<PackageId, BTreeMap<ActionId, Value>> = OrdMap::new();
let package = package.clone(); let requested_actions: BTreeSet<_> = peek
let mut current_dependencies = package.as_current_dependencies().de()?; .as_public()
compute_dependency_config_errs(self, &package_id, &mut current_dependencies) .as_package_data()
.await .as_entries()?
.log_err(); .into_iter()
updated_current_dependents.insert(package_id.clone(), current_dependencies); .map(|(_, pde)| {
Ok(pde
.as_requested_actions()
.as_entries()?
.into_iter()
.map(|(_, r)| {
Ok::<_, Error>((
r.as_request().as_package_id().de()?,
r.as_request().as_action_id().de()?,
))
}))
})
.flatten_ok()
.map(|a| a.and_then(|a| a))
.try_collect()?;
let procedure_id = Guid::new();
for (package_id, action_id) in requested_actions {
if let Some(service) = self.services.get(&package_id).await.as_ref() {
if let Some(input) = service
.get_action_input(procedure_id.clone(), action_id.clone())
.await?
.and_then(|i| i.value)
{
action_input
.entry(package_id)
.or_default()
.insert(action_id, input);
}
}
} }
self.db self.db
.mutate(|v| { .mutate(|db| {
for (package_id, deps) in updated_current_dependents { for (package_id, action_input) in &action_input {
if let Some(model) = v for (action_id, input) in action_input {
.as_public_mut() for (_, pde) in db.as_public_mut().as_package_data_mut().as_entries_mut()? {
.as_package_data_mut() pde.as_requested_actions_mut().mutate(|requested_actions| {
.as_idx_mut(&package_id) Ok(update_requested_actions(
.map(|i| i.as_current_dependencies_mut()) requested_actions,
{ package_id,
model.ser(&deps)?; action_id,
input,
false,
))
})?;
}
} }
} }
Ok(()) Ok(())
}) })
.await?; .await?;
check_dependencies.complete(); check_requested_actions.complete();
Ok(()) Ok(())
} }
@@ -431,8 +458,8 @@ impl AsRef<RpcContinuations> for RpcContext {
&self.rpc_continuations &self.rpc_continuations
} }
} }
impl AsRef<OpenAuthedContinuations<InternedString>> for RpcContext { impl AsRef<OpenAuthedContinuations<Option<InternedString>>> for RpcContext {
fn as_ref(&self) -> &OpenAuthedContinuations<InternedString> { fn as_ref(&self) -> &OpenAuthedContinuations<Option<InternedString>> {
&self.open_authed_continuations &self.open_authed_continuations
} }
} }

View File

@@ -21,6 +21,7 @@ use crate::account::AccountInfo;
use crate::context::config::ServerConfig; use crate::context::config::ServerConfig;
use crate::context::RpcContext; use crate::context::RpcContext;
use crate::disk::OsPartitionInfo; use crate::disk::OsPartitionInfo;
use crate::hostname::Hostname;
use crate::init::init_postgres; use crate::init::init_postgres;
use crate::prelude::*; use crate::prelude::*;
use crate::progress::FullProgressTracker; use crate::progress::FullProgressTracker;
@@ -42,6 +43,8 @@ lazy_static::lazy_static! {
pub struct SetupResult { pub struct SetupResult {
pub tor_address: String, pub tor_address: String,
#[ts(type = "string")] #[ts(type = "string")]
pub hostname: Hostname,
#[ts(type = "string")]
pub lan_address: InternedString, pub lan_address: InternedString,
pub root_ca: String, pub root_ca: String,
} }
@@ -50,6 +53,7 @@ impl TryFrom<&AccountInfo> for SetupResult {
fn try_from(value: &AccountInfo) -> Result<Self, Self::Error> { fn try_from(value: &AccountInfo) -> Result<Self, Self::Error> {
Ok(Self { Ok(Self {
tor_address: format!("https://{}", value.tor_key.public().get_onion_address()), tor_address: format!("https://{}", value.tor_key.public().get_onion_address()),
hostname: value.hostname.clone(),
lan_address: value.hostname.lan_address(), lan_address: value.hostname.lan_address(),
root_ca: String::from_utf8(value.root_ca_cert.to_pem()?)?, root_ca: String::from_utf8(value.root_ca_cert.to_pem()?)?,
}) })

View File

@@ -31,7 +31,12 @@ lazy_static::lazy_static! {
pub fn db<C: Context>() -> ParentHandler<C> { pub fn db<C: Context>() -> ParentHandler<C> {
ParentHandler::new() ParentHandler::new()
.subcommand("dump", from_fn_async(cli_dump).with_display_serializable()) .subcommand(
"dump",
from_fn_async(cli_dump)
.with_display_serializable()
.with_about("Filter/query db to display tables and records"),
)
.subcommand("dump", from_fn_async(dump).no_cli()) .subcommand("dump", from_fn_async(dump).no_cli())
.subcommand( .subcommand(
"subscribe", "subscribe",
@@ -39,8 +44,16 @@ pub fn db<C: Context>() -> ParentHandler<C> {
.with_metadata("get_session", Value::Bool(true)) .with_metadata("get_session", Value::Bool(true))
.no_cli(), .no_cli(),
) )
.subcommand("put", put::<C>()) .subcommand(
.subcommand("apply", from_fn_async(cli_apply).no_display()) "put",
put::<C>().with_about("Command for adding UI record to db"),
)
.subcommand(
"apply",
from_fn_async(cli_apply)
.no_display()
.with_about("Update a db record"),
)
.subcommand("apply", from_fn_async(apply).no_cli()) .subcommand("apply", from_fn_async(apply).no_cli())
} }
@@ -115,7 +128,7 @@ pub struct SubscribeParams {
pointer: Option<JsonPointer>, pointer: Option<JsonPointer>,
#[ts(skip)] #[ts(skip)]
#[serde(rename = "__auth_session")] #[serde(rename = "__auth_session")]
session: InternedString, session: Option<InternedString>,
} }
#[derive(Deserialize, Serialize, TS)] #[derive(Deserialize, Serialize, TS)]
@@ -215,6 +228,8 @@ pub async fn subscribe(
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
#[command(rename_all = "kebab-case")] #[command(rename_all = "kebab-case")]
pub struct CliApplyParams { pub struct CliApplyParams {
#[arg(long)]
allow_model_mismatch: bool,
expr: String, expr: String,
path: Option<PathBuf>, path: Option<PathBuf>,
} }
@@ -225,7 +240,12 @@ async fn cli_apply(
context, context,
parent_method, parent_method,
method, method,
params: CliApplyParams { expr, path }, params:
CliApplyParams {
allow_model_mismatch,
expr,
path,
},
.. ..
}: HandlerArgs<CliContext, CliApplyParams>, }: HandlerArgs<CliContext, CliApplyParams>,
) -> Result<(), RpcError> { ) -> Result<(), RpcError> {
@@ -240,7 +260,14 @@ async fn cli_apply(
&expr, &expr,
)?; )?;
Ok::<_, Error>(( let value = if allow_model_mismatch {
serde_json::from_value::<Value>(res.clone().into()).with_ctx(|_| {
(
crate::ErrorKind::Deserialization,
"result does not match database model",
)
})?
} else {
to_value( to_value(
&serde_json::from_value::<model::Database>(res.clone().into()).with_ctx( &serde_json::from_value::<model::Database>(res.clone().into()).with_ctx(
|_| { |_| {
@@ -250,9 +277,9 @@ async fn cli_apply(
) )
}, },
)?, )?,
)?, )?
(), };
)) Ok::<_, Error>((value, ()))
}) })
.await?; .await?;
} else { } else {
@@ -299,6 +326,7 @@ pub fn put<C: Context>() -> ParentHandler<C> {
"ui", "ui",
from_fn_async(ui) from_fn_async(ui)
.with_display_serializable() .with_display_serializable()
.with_about("Add path and value to db")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
} }

View File

@@ -4,7 +4,8 @@ use chrono::{DateTime, Utc};
use exver::VersionRange; use exver::VersionRange;
use imbl_value::InternedString; use imbl_value::InternedString;
use models::{ use models::{
ActionId, DataUrl, HealthCheckId, HostId, PackageId, ServiceInterfaceId, VersionString, ActionId, DataUrl, HealthCheckId, HostId, PackageId, ReplayId, ServiceInterfaceId,
VersionString,
}; };
use patch_db::json_ptr::JsonPointer; use patch_db::json_ptr::JsonPointer;
use patch_db::HasModel; use patch_db::HasModel;
@@ -17,8 +18,8 @@ use crate::net::service_interface::ServiceInterface;
use crate::prelude::*; use crate::prelude::*;
use crate::progress::FullProgress; use crate::progress::FullProgress;
use crate::s9pk::manifest::Manifest; use crate::s9pk::manifest::Manifest;
use crate::status::Status; use crate::status::MainStatus;
use crate::util::serde::Pem; use crate::util::serde::{is_partial_of, Pem};
#[derive(Debug, Default, Deserialize, Serialize, TS)] #[derive(Debug, Default, Deserialize, Serialize, TS)]
#[ts(export)] #[ts(export)]
@@ -310,9 +311,9 @@ pub struct InstallingInfo {
} }
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] #[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)]
#[ts(export)] #[ts(export)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "kebab-case")]
pub enum AllowedStatuses { pub enum AllowedStatuses {
OnlyRunning, // onlyRunning OnlyRunning,
OnlyStopped, OnlyStopped,
Any, Any,
} }
@@ -324,13 +325,28 @@ pub struct ActionMetadata {
pub name: String, pub name: String,
pub description: String, pub description: String,
pub warning: Option<String>, pub warning: Option<String>,
#[ts(type = "any")] #[serde(default)]
pub input: Value, pub visibility: ActionVisibility,
pub disabled: bool,
pub allowed_statuses: AllowedStatuses, pub allowed_statuses: AllowedStatuses,
pub has_input: bool,
pub group: Option<String>, pub group: Option<String>,
} }
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)]
#[ts(export)]
#[serde(rename_all = "kebab-case")]
#[serde(rename_all_fields = "camelCase")]
pub enum ActionVisibility {
Hidden,
Disabled(String),
Enabled,
}
impl Default for ActionVisibility {
fn default() -> Self {
Self::Enabled
}
}
#[derive(Debug, Deserialize, Serialize, HasModel, TS)] #[derive(Debug, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
#[model = "Model<Self>"] #[model = "Model<Self>"]
@@ -338,7 +354,7 @@ pub struct ActionMetadata {
pub struct PackageDataEntry { pub struct PackageDataEntry {
pub state_info: PackageState, pub state_info: PackageState,
pub data_version: Option<VersionString>, pub data_version: Option<VersionString>,
pub status: Status, pub status: MainStatus,
#[ts(type = "string | null")] #[ts(type = "string | null")]
pub registry: Option<Url>, pub registry: Option<Url>,
#[ts(type = "string")] #[ts(type = "string")]
@@ -348,6 +364,8 @@ pub struct PackageDataEntry {
pub last_backup: Option<DateTime<Utc>>, pub last_backup: Option<DateTime<Utc>>,
pub current_dependencies: CurrentDependencies, pub current_dependencies: CurrentDependencies,
pub actions: BTreeMap<ActionId, ActionMetadata>, pub actions: BTreeMap<ActionId, ActionMetadata>,
#[ts(as = "BTreeMap::<String, ActionRequestEntry>")]
pub requested_actions: BTreeMap<ReplayId, ActionRequestEntry>,
pub service_interfaces: BTreeMap<ServiceInterfaceId, ServiceInterface>, pub service_interfaces: BTreeMap<ServiceInterfaceId, ServiceInterface>,
pub hosts: Hosts, pub hosts: Hosts,
#[ts(type = "string[]")] #[ts(type = "string[]")]
@@ -384,8 +402,9 @@ impl Map for CurrentDependencies {
} }
} }
#[derive(Clone, Debug, Deserialize, Serialize, TS)] #[derive(Clone, Debug, Deserialize, Serialize, TS, HasModel)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
pub struct CurrentDependencyInfo { pub struct CurrentDependencyInfo {
#[ts(type = "string | null")] #[ts(type = "string | null")]
pub title: Option<InternedString>, pub title: Option<InternedString>,
@@ -394,11 +413,10 @@ pub struct CurrentDependencyInfo {
pub kind: CurrentDependencyKind, pub kind: CurrentDependencyKind,
#[ts(type = "string")] #[ts(type = "string")]
pub version_range: VersionRange, pub version_range: VersionRange,
pub config_satisfied: bool,
} }
#[derive(Clone, Debug, Deserialize, Serialize, TS)] #[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "kebab-case")]
#[serde(tag = "kind")] #[serde(tag = "kind")]
pub enum CurrentDependencyKind { pub enum CurrentDependencyKind {
Exists, Exists,
@@ -410,6 +428,81 @@ pub enum CurrentDependencyKind {
}, },
} }
#[derive(Clone, Debug, Deserialize, Serialize, TS, HasModel)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
#[model = "Model<Self>"]
pub struct ActionRequestEntry {
pub request: ActionRequest,
pub active: bool,
}
#[derive(Clone, Debug, Deserialize, Serialize, TS, HasModel)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
#[model = "Model<Self>"]
pub struct ActionRequest {
pub package_id: PackageId,
pub action_id: ActionId,
#[serde(default)]
pub severity: ActionSeverity,
#[ts(optional)]
pub reason: Option<String>,
#[ts(optional)]
pub when: Option<ActionRequestTrigger>,
#[ts(optional)]
pub input: Option<ActionRequestInput>,
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "kebab-case")]
#[ts(export)]
pub enum ActionSeverity {
Critical,
Important,
}
impl Default for ActionSeverity {
fn default() -> Self {
ActionSeverity::Important
}
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct ActionRequestTrigger {
#[serde(default)]
pub once: bool,
pub condition: ActionRequestCondition,
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "kebab-case")]
#[ts(export)]
pub enum ActionRequestCondition {
InputNotMatches,
}
#[derive(Clone, Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "kebab-case")]
#[serde(tag = "kind")]
pub enum ActionRequestInput {
Partial {
#[ts(type = "Record<string, unknown>")]
value: Value,
},
}
impl ActionRequestInput {
pub fn matches(&self, input: Option<&Value>) -> bool {
match self {
Self::Partial { value } => match input {
None => false,
Some(full) => is_partial_of(value, full),
},
}
}
}
#[derive(Debug, Default, Deserialize, Serialize)] #[derive(Debug, Default, Deserialize, Serialize)]
pub struct InterfaceAddressMap(pub BTreeMap<HostId, InterfaceAddresses>); pub struct InterfaceAddressMap(pub BTreeMap<HostId, InterfaceAddresses>);
impl Map for InterfaceAddressMap { impl Map for InterfaceAddressMap {

View File

@@ -31,6 +31,6 @@ pub struct Private {
pub package_stores: BTreeMap<PackageId, Value>, pub package_stores: BTreeMap<PackageId, Value>,
} }
fn generate_compat_key() -> Pem<ed25519_dalek::SigningKey> { pub fn generate_compat_key() -> Pem<ed25519_dalek::SigningKey> {
Pem(ed25519_dalek::SigningKey::generate(&mut rand::thread_rng())) Pem(ed25519_dalek::SigningKey::generate(&mut rand::thread_rng()))
} }

View File

@@ -22,6 +22,7 @@ use crate::prelude::*;
use crate::progress::FullProgress; use crate::progress::FullProgress;
use crate::system::SmtpValue; use crate::system::SmtpValue;
use crate::util::cpupower::Governor; use crate::util::cpupower::Governor;
use crate::util::lshw::LshwDevice;
use crate::version::{Current, VersionT}; use crate::version::{Current, VersionT};
use crate::{ARCH, PLATFORM}; use crate::{ARCH, PLATFORM};
@@ -43,16 +44,18 @@ impl Public {
arch: get_arch(), arch: get_arch(),
platform: get_platform(), platform: get_platform(),
id: account.server_id.clone(), id: account.server_id.clone(),
version: Current::new().semver(), version: Current::default().semver(),
hostname: account.hostname.no_dot_host_name(), hostname: account.hostname.no_dot_host_name(),
last_backup: None, last_backup: None,
eos_version_compat: Current::new().compat().clone(), package_version_compat: Current::default().compat().clone(),
post_init_migration_todos: BTreeSet::new(),
lan_address, lan_address,
onion_address: account.tor_key.public().get_onion_address(), onion_address: account.tor_key.public().get_onion_address(),
tor_address: format!("https://{}", account.tor_key.public().get_onion_address()) tor_address: format!("https://{}", account.tor_key.public().get_onion_address())
.parse() .parse()
.unwrap(), .unwrap(),
ip_info: BTreeMap::new(), ip_info: BTreeMap::new(),
acme: None,
status_info: ServerStatus { status_info: ServerStatus {
backup_progress: None, backup_progress: None,
updated: false, updated: false,
@@ -77,6 +80,8 @@ impl Public {
zram: true, zram: true,
governor: None, governor: None,
smtp: None, smtp: None,
ram: 0,
devices: Vec::new(),
}, },
package_data: AllPackageData::default(), package_data: AllPackageData::default(),
ui: serde_json::from_str(include_str!(concat!( ui: serde_json::from_str(include_str!(concat!(
@@ -112,11 +117,13 @@ pub struct ServerInfo {
pub hostname: InternedString, pub hostname: InternedString,
#[ts(type = "string")] #[ts(type = "string")]
pub version: Version, pub version: Version,
#[ts(type = "string")]
pub package_version_compat: VersionRange,
#[ts(type = "string[]")]
pub post_init_migration_todos: BTreeSet<Version>,
#[ts(type = "string | null")] #[ts(type = "string | null")]
pub last_backup: Option<DateTime<Utc>>, pub last_backup: Option<DateTime<Utc>>,
#[ts(type = "string")] #[ts(type = "string")]
pub eos_version_compat: VersionRange,
#[ts(type = "string")]
pub lan_address: Url, pub lan_address: Url,
#[ts(type = "string")] #[ts(type = "string")]
pub onion_address: OnionAddressV3, pub onion_address: OnionAddressV3,
@@ -124,6 +131,7 @@ pub struct ServerInfo {
#[ts(type = "string")] #[ts(type = "string")]
pub tor_address: Url, pub tor_address: Url,
pub ip_info: BTreeMap<String, IpInfo>, pub ip_info: BTreeMap<String, IpInfo>,
pub acme: Option<AcmeSettings>,
#[serde(default)] #[serde(default)]
pub status_info: ServerStatus, pub status_info: ServerStatus,
pub wifi: WifiInfo, pub wifi: WifiInfo,
@@ -138,6 +146,9 @@ pub struct ServerInfo {
pub zram: bool, pub zram: bool,
pub governor: Option<Governor>, pub governor: Option<Governor>,
pub smtp: Option<SmtpValue>, pub smtp: Option<SmtpValue>,
#[ts(type = "number")]
pub ram: u64,
pub devices: Vec<LshwDevice>,
} }
#[derive(Debug, Deserialize, Serialize, HasModel, TS)] #[derive(Debug, Deserialize, Serialize, HasModel, TS)]
@@ -165,6 +176,20 @@ impl IpInfo {
} }
} }
#[derive(Debug, Deserialize, Serialize, HasModel, TS)]
#[serde(rename_all = "camelCase")]
#[model = "Model<Self>"]
#[ts(export)]
pub struct AcmeSettings {
#[ts(type = "string")]
pub provider: Url,
/// email addresses for letsencrypt
pub contact: Vec<String>,
#[ts(type = "string[]")]
/// domains to get letsencrypt certs for
pub domains: BTreeSet<InternedString>,
}
#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)] #[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)]
#[model = "Model<Self>"] #[model = "Model<Self>"]
#[ts(export)] #[ts(export)]

View File

@@ -1,28 +1,14 @@
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::time::Duration;
use clap::Parser;
use imbl_value::InternedString; use imbl_value::InternedString;
use models::PackageId; use models::PackageId;
use patch_db::json_patch::merge;
use rpc_toolkit::{from_fn_async, Context, Empty, HandlerExt, ParentHandler};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::instrument;
use ts_rs::TS; use ts_rs::TS;
use crate::config::{Config, ConfigSpec, ConfigureContext};
use crate::context::{CliContext, RpcContext};
use crate::db::model::package::CurrentDependencies;
use crate::prelude::*; use crate::prelude::*;
use crate::rpc_continuations::Guid;
use crate::util::serde::HandlerExtSerde;
use crate::util::PathOrUrl; use crate::util::PathOrUrl;
use crate::Error; use crate::Error;
pub fn dependency<C: Context>() -> ParentHandler<C> {
ParentHandler::new().subcommand("configure", configure::<C>())
}
#[derive(Clone, Debug, Default, Deserialize, Serialize, HasModel, TS)] #[derive(Clone, Debug, Default, Deserialize, Serialize, HasModel, TS)]
#[model = "Model<Self>"] #[model = "Model<Self>"]
#[ts(export)] #[ts(export)]
@@ -56,129 +42,3 @@ pub struct DependencyMetadata {
#[ts(type = "string")] #[ts(type = "string")]
pub title: InternedString, pub title: InternedString,
} }
#[derive(Deserialize, Serialize, Parser, TS)]
#[serde(rename_all = "camelCase")]
#[command(rename_all = "kebab-case")]
pub struct ConfigureParams {
dependent_id: PackageId,
dependency_id: PackageId,
}
pub fn configure<C: Context>() -> ParentHandler<C, ConfigureParams> {
ParentHandler::new()
.root_handler(
from_fn_async(configure_impl)
.with_inherited(|params, _| params)
.no_display()
.with_call_remote::<CliContext>(),
)
.subcommand(
"dry",
from_fn_async(configure_dry)
.with_inherited(|params, _| params)
.with_display_serializable()
.with_call_remote::<CliContext>(),
)
}
pub async fn configure_impl(
ctx: RpcContext,
_: Empty,
ConfigureParams {
dependent_id,
dependency_id,
}: ConfigureParams,
) -> Result<(), Error> {
let ConfigDryRes {
old_config: _,
new_config,
spec: _,
} = configure_logic(ctx.clone(), (dependent_id, dependency_id.clone())).await?;
let configure_context = ConfigureContext {
timeout: Some(Duration::from_secs(3).into()),
config: Some(new_config),
};
ctx.services
.get(&dependency_id)
.await
.as_ref()
.ok_or_else(|| {
Error::new(
eyre!("There is no manager running for {dependency_id}"),
ErrorKind::Unknown,
)
})?
.configure(Guid::new(), configure_context)
.await?;
Ok(())
}
pub async fn configure_dry(
ctx: RpcContext,
_: Empty,
ConfigureParams {
dependent_id,
dependency_id,
}: ConfigureParams,
) -> Result<ConfigDryRes, Error> {
configure_logic(ctx.clone(), (dependent_id, dependency_id.clone())).await
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ConfigDryRes {
pub old_config: Config,
pub new_config: Config,
pub spec: ConfigSpec,
}
pub async fn configure_logic(
ctx: RpcContext,
(dependent_id, dependency_id): (PackageId, PackageId),
) -> Result<ConfigDryRes, Error> {
let procedure_id = Guid::new();
let dependency_guard = ctx.services.get(&dependency_id).await;
let dependency = dependency_guard.as_ref().or_not_found(&dependency_id)?;
let dependent_guard = ctx.services.get(&dependent_id).await;
let dependent = dependent_guard.as_ref().or_not_found(&dependent_id)?;
let config_res = dependency.get_config(procedure_id.clone()).await?;
let diff = Value::Object(
dependent
.dependency_config(procedure_id, dependency_id, config_res.config.clone())
.await?
.unwrap_or_default(),
);
let mut new_config = Value::Object(config_res.config.clone().unwrap_or_default());
merge(&mut new_config, &diff);
Ok(ConfigDryRes {
old_config: config_res.config.unwrap_or_default(),
new_config: new_config.as_object().cloned().unwrap_or_default(),
spec: config_res.spec,
})
}
#[instrument(skip_all)]
pub async fn compute_dependency_config_errs(
ctx: &RpcContext,
id: &PackageId,
current_dependencies: &mut CurrentDependencies,
) -> Result<(), Error> {
let procedure_id = Guid::new();
let service_guard = ctx.services.get(id).await;
let service = service_guard.as_ref().or_not_found(id)?;
for (dep_id, dep_info) in current_dependencies.0.iter_mut() {
// check if config passes dependency check
let Some(dependency) = &*ctx.services.get(dep_id).await else {
continue;
};
let dep_config = dependency.get_config(procedure_id.clone()).await?.config;
dep_info.config_satisfied = service
.dependency_config(procedure_id.clone(), dep_id.clone(), dep_config)
.await?
.is_none();
}
Ok(())
}

View File

@@ -9,35 +9,53 @@ use rpc_toolkit::{
use crate::context::{CliContext, DiagnosticContext, RpcContext}; use crate::context::{CliContext, DiagnosticContext, RpcContext};
use crate::init::SYSTEM_REBUILD_PATH; use crate::init::SYSTEM_REBUILD_PATH;
use crate::shutdown::Shutdown; use crate::shutdown::Shutdown;
use crate::util::io::delete_file;
use crate::Error; use crate::Error;
pub fn diagnostic<C: Context>() -> ParentHandler<C> { pub fn diagnostic<C: Context>() -> ParentHandler<C> {
ParentHandler::new() ParentHandler::new()
.subcommand("error", from_fn(error).with_call_remote::<CliContext>()) .subcommand(
.subcommand("logs", crate::system::logs::<DiagnosticContext>()) "error",
from_fn(error)
.with_about("Display diagnostic error")
.with_call_remote::<CliContext>(),
)
.subcommand( .subcommand(
"logs", "logs",
from_fn_async(crate::logs::cli_logs::<DiagnosticContext, Empty>).no_display(), crate::system::logs::<DiagnosticContext>().with_about("Display OS logs"),
)
.subcommand(
"logs",
from_fn_async(crate::logs::cli_logs::<DiagnosticContext, Empty>)
.no_display()
.with_about("Display OS logs"),
) )
.subcommand( .subcommand(
"kernel-logs", "kernel-logs",
crate::system::kernel_logs::<DiagnosticContext>(), crate::system::kernel_logs::<DiagnosticContext>().with_about("Display kernel logs"),
) )
.subcommand( .subcommand(
"kernel-logs", "kernel-logs",
from_fn_async(crate::logs::cli_logs::<DiagnosticContext, Empty>).no_display(), from_fn_async(crate::logs::cli_logs::<DiagnosticContext, Empty>)
.no_display()
.with_about("Display kernal logs"),
) )
.subcommand( .subcommand(
"restart", "restart",
from_fn(restart) from_fn(restart)
.no_display() .no_display()
.with_about("Restart the server")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand("disk", disk::<C>()) .subcommand(
"disk",
disk::<C>().with_about("Command to remove disk from filesystem"),
)
.subcommand( .subcommand(
"rebuild", "rebuild",
from_fn_async(rebuild) from_fn_async(rebuild)
.no_display() .no_display()
.with_about("Teardown and rebuild service containers")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
} }
@@ -72,14 +90,13 @@ pub fn disk<C: Context>() -> ParentHandler<C> {
CallRemoteHandler::<CliContext, _, _>::new( CallRemoteHandler::<CliContext, _, _>::new(
from_fn_async(forget_disk::<RpcContext>).no_display(), from_fn_async(forget_disk::<RpcContext>).no_display(),
) )
.no_display(), .no_display()
.with_about("Remove disk from filesystem"),
) )
} }
pub async fn forget_disk<C: Context>(_: C) -> Result<(), Error> { pub async fn forget_disk<C: Context>(_: C) -> Result<(), Error> {
let disk_guid = Path::new("/media/startos/config/disk.guid"); delete_file("/media/startos/config/overlay/etc/hostname").await?;
if tokio::fs::metadata(disk_guid).await.is_ok() { delete_file("/media/startos/config/disk.guid").await?;
tokio::fs::remove_file(disk_guid).await?;
}
Ok(()) Ok(())
} }

View File

@@ -51,13 +51,16 @@ pub fn disk<C: Context>() -> ParentHandler<C> {
.with_custom_display_fn(|handle, result| { .with_custom_display_fn(|handle, result| {
Ok(display_disk_info(handle.params, result)) Ok(display_disk_info(handle.params, result))
}) })
.with_about("List disk info")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand("repair", from_fn_async(|_: C| repair()).no_cli()) .subcommand("repair", from_fn_async(|_: C| repair()).no_cli())
.subcommand( .subcommand(
"repair", "repair",
CallRemoteHandler::<CliContext, _, _>::new( CallRemoteHandler::<CliContext, _, _>::new(
from_fn_async(|_: RpcContext| repair()).no_display(), from_fn_async(|_: RpcContext| repair())
.no_display()
.with_about("Repair disk in the event of corruption"),
), ),
) )
} }

View File

@@ -219,10 +219,10 @@ impl<G: GenericMountGuard> Drop for BackupMountGuard<G> {
let second = self.backup_disk_mount_guard.take(); let second = self.backup_disk_mount_guard.take();
tokio::spawn(async move { tokio::spawn(async move {
if let Some(guard) = first { if let Some(guard) = first {
guard.unmount().await.unwrap(); guard.unmount().await.log_err();
} }
if let Some(guard) = second { if let Some(guard) = second {
guard.unmount().await.unwrap(); guard.unmount().await.log_err();
} }
}); });
} }

View File

@@ -151,12 +151,12 @@ impl<G: GenericMountGuard> Drop for OverlayGuard<G> {
let guard = self.inner_guard.take(); let guard = self.inner_guard.take();
if lower.is_some() || upper.is_some() || guard.mounted { if lower.is_some() || upper.is_some() || guard.mounted {
tokio::spawn(async move { tokio::spawn(async move {
guard.unmount(false).await.unwrap(); guard.unmount(false).await.log_err();
if let Some(lower) = lower { if let Some(lower) = lower {
lower.unmount().await.unwrap(); lower.unmount().await.log_err();
} }
if let Some(upper) = upper { if let Some(upper) = upper {
upper.delete().await.unwrap(); upper.delete().await.log_err();
} }
}); });
} }

View File

@@ -96,7 +96,7 @@ impl Drop for MountGuard {
fn drop(&mut self) { fn drop(&mut self) {
if self.mounted { if self.mounted {
let mountpoint = std::mem::take(&mut self.mountpoint); let mountpoint = std::mem::take(&mut self.mountpoint);
tokio::spawn(async move { unmount(mountpoint, true).await.unwrap() }); tokio::spawn(async move { unmount(mountpoint, true).await.log_err() });
} }
} }
} }

View File

@@ -5,6 +5,16 @@ use tracing::instrument;
use crate::util::Invoke; use crate::util::Invoke;
use crate::Error; use crate::Error;
pub async fn is_mountpoint(path: impl AsRef<Path>) -> Result<bool, Error> {
let is_mountpoint = tokio::process::Command::new("mountpoint")
.arg(path.as_ref())
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.status()
.await?;
Ok(is_mountpoint.success())
}
#[instrument(skip_all)] #[instrument(skip_all)]
pub async fn bind<P0: AsRef<Path>, P1: AsRef<Path>>( pub async fn bind<P0: AsRef<Path>, P1: AsRef<Path>>(
src: P0, src: P0,
@@ -16,13 +26,7 @@ pub async fn bind<P0: AsRef<Path>, P1: AsRef<Path>>(
src.as_ref().display(), src.as_ref().display(),
dst.as_ref().display() dst.as_ref().display()
); );
let is_mountpoint = tokio::process::Command::new("mountpoint") if is_mountpoint(&dst).await? {
.arg(dst.as_ref())
.stdout(std::process::Stdio::null())
.stderr(std::process::Stdio::null())
.status()
.await?;
if is_mountpoint.success() {
unmount(dst.as_ref(), true).await?; unmount(dst.as_ref(), true).await?;
} }
tokio::fs::create_dir_all(&src).await?; tokio::fs::create_dir_all(&src).await?;

View File

@@ -32,7 +32,9 @@ use crate::progress::{
use crate::rpc_continuations::{Guid, RpcContinuation}; use crate::rpc_continuations::{Guid, RpcContinuation};
use crate::s9pk::v2::pack::{CONTAINER_DATADIR, CONTAINER_TOOL}; use crate::s9pk::v2::pack::{CONTAINER_DATADIR, CONTAINER_TOOL};
use crate::ssh::SSH_AUTHORIZED_KEYS_FILE; use crate::ssh::SSH_AUTHORIZED_KEYS_FILE;
use crate::system::get_mem_info;
use crate::util::io::{create_file, IOHook}; use crate::util::io::{create_file, IOHook};
use crate::util::lshw::lshw;
use crate::util::net::WebSocketExt; use crate::util::net::WebSocketExt;
use crate::util::{cpupower, Invoke}; use crate::util::{cpupower, Invoke};
use crate::Error; use crate::Error;
@@ -323,7 +325,9 @@ pub async fn init(
local_auth.complete(); local_auth.complete();
load_database.start(); load_database.start();
let db = TypedPatchDb::<Database>::load_unchecked(cfg.db().await?); let db = cfg.db().await?;
crate::version::Current::default().pre_init(&db).await?;
let db = TypedPatchDb::<Database>::load_unchecked(db);
let peek = db.peek().await; let peek = db.peek().await;
load_database.complete(); load_database.complete();
tracing::info!("Opened PatchDB"); tracing::info!("Opened PatchDB");
@@ -506,6 +510,8 @@ pub async fn init(
update_server_info.start(); update_server_info.start();
server_info.ip_info = crate::net::dhcp::init_ips().await?; server_info.ip_info = crate::net::dhcp::init_ips().await?;
server_info.ram = get_mem_info().await?.total.0 as u64 * 1024 * 1024;
server_info.devices = lshw().await?;
server_info.status_info = ServerStatus { server_info.status_info = ServerStatus {
updated: false, updated: false,
update_progress: None, update_progress: None,
@@ -528,8 +534,6 @@ pub async fn init(
.await?; .await?;
launch_service_network.complete(); launch_service_network.complete();
crate::version::init(&db, run_migrations).await?;
validate_db.start(); validate_db.start();
db.mutate(|d| { db.mutate(|d| {
let model = d.de()?; let model = d.de()?;
@@ -549,18 +553,33 @@ pub async fn init(
pub fn init_api<C: Context>() -> ParentHandler<C> { pub fn init_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new() ParentHandler::new()
.subcommand("logs", crate::system::logs::<InitContext>())
.subcommand( .subcommand(
"logs", "logs",
from_fn_async(crate::logs::cli_logs::<InitContext, Empty>).no_display(), crate::system::logs::<InitContext>().with_about("Disply OS logs"),
)
.subcommand(
"logs",
from_fn_async(crate::logs::cli_logs::<InitContext, Empty>)
.no_display()
.with_about("Display OS logs"),
) )
.subcommand("kernel-logs", crate::system::kernel_logs::<InitContext>())
.subcommand( .subcommand(
"kernel-logs", "kernel-logs",
from_fn_async(crate::logs::cli_logs::<InitContext, Empty>).no_display(), crate::system::kernel_logs::<InitContext>().with_about("Display kernel logs"),
)
.subcommand(
"kernel-logs",
from_fn_async(crate::logs::cli_logs::<InitContext, Empty>)
.no_display()
.with_about("Display kernel logs"),
) )
.subcommand("subscribe", from_fn_async(init_progress).no_cli()) .subcommand("subscribe", from_fn_async(init_progress).no_cli())
.subcommand("subscribe", from_fn_async(cli_init_progress).no_display()) .subcommand(
"subscribe",
from_fn_async(cli_init_progress)
.no_display()
.with_about("Get initialization progress"),
)
} }
#[derive(Debug, Deserialize, Serialize, TS)] #[derive(Debug, Deserialize, Serialize, TS)]

View File

@@ -9,7 +9,7 @@ use exver::VersionRange;
use futures::{AsyncWriteExt, StreamExt}; use futures::{AsyncWriteExt, StreamExt};
use imbl_value::{json, InternedString}; use imbl_value::{json, InternedString};
use itertools::Itertools; use itertools::Itertools;
use models::VersionString; use models::{FromStrParser, VersionString};
use reqwest::header::{HeaderMap, CONTENT_LENGTH}; use reqwest::header::{HeaderMap, CONTENT_LENGTH};
use reqwest::Url; use reqwest::Url;
use rpc_toolkit::yajrc::{GenericRpcMethod, RpcError}; use rpc_toolkit::yajrc::{GenericRpcMethod, RpcError};
@@ -17,6 +17,7 @@ use rpc_toolkit::HandlerArgs;
use rustyline_async::ReadlineEvent; use rustyline_async::ReadlineEvent;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::sync::oneshot; use tokio::sync::oneshot;
use tokio_tungstenite::tungstenite::protocol::frame::coding::CloseCode;
use tracing::instrument; use tracing::instrument;
use ts_rs::TS; use ts_rs::TS;
@@ -29,7 +30,6 @@ use crate::registry::package::get::GetPackageResponse;
use crate::rpc_continuations::{Guid, RpcContinuation}; use crate::rpc_continuations::{Guid, RpcContinuation};
use crate::s9pk::manifest::PackageId; use crate::s9pk::manifest::PackageId;
use crate::upload::upload; use crate::upload::upload;
use crate::util::clap::FromStrParser;
use crate::util::io::open_file; use crate::util::io::open_file;
use crate::util::net::WebSocketExt; use crate::util::net::WebSocketExt;
use crate::util::Never; use crate::util::Never;
@@ -172,7 +172,7 @@ pub async fn install(
pub struct SideloadParams { pub struct SideloadParams {
#[ts(skip)] #[ts(skip)]
#[serde(rename = "__auth_session")] #[serde(rename = "__auth_session")]
session: InternedString, session: Option<InternedString>,
} }
#[derive(Deserialize, Serialize, TS)] #[derive(Deserialize, Serialize, TS)]
@@ -188,7 +188,7 @@ pub async fn sideload(
SideloadParams { session }: SideloadParams, SideloadParams { session }: SideloadParams,
) -> Result<SideloadResponse, Error> { ) -> Result<SideloadResponse, Error> {
let (upload, file) = upload(&ctx, session.clone()).await?; let (upload, file) = upload(&ctx, session.clone()).await?;
let (err_send, err_recv) = oneshot::channel(); let (err_send, err_recv) = oneshot::channel::<Error>();
let progress = Guid::new(); let progress = Guid::new();
let progress_tracker = FullProgressTracker::new(); let progress_tracker = FullProgressTracker::new();
let mut progress_listener = progress_tracker.stream(Some(Duration::from_millis(200))); let mut progress_listener = progress_tracker.stream(Some(Duration::from_millis(200)));
@@ -202,12 +202,14 @@ pub async fn sideload(
use axum::extract::ws::Message; use axum::extract::ws::Message;
async move { async move {
if let Err(e) = async { if let Err(e) = async {
type RpcResponse = rpc_toolkit::yajrc::RpcResponse::<GenericRpcMethod<&'static str, (), FullProgress>>; type RpcResponse = rpc_toolkit::yajrc::RpcResponse<
GenericRpcMethod<&'static str, (), FullProgress>,
>;
tokio::select! { tokio::select! {
res = async { res = async {
while let Some(progress) = progress_listener.next().await { while let Some(progress) = progress_listener.next().await {
ws.send(Message::Text( ws.send(Message::Text(
serde_json::to_string(&RpcResponse::from_result::<RpcError>(Ok(progress))) serde_json::to_string(&progress)
.with_kind(ErrorKind::Serialization)?, .with_kind(ErrorKind::Serialization)?,
)) ))
.await .await
@@ -217,12 +219,8 @@ pub async fn sideload(
} => res?, } => res?,
err = err_recv => { err = err_recv => {
if let Ok(e) = err { if let Ok(e) = err {
ws.send(Message::Text( ws.close_result(Err::<&str, _>(e.clone_output())).await?;
serde_json::to_string(&RpcResponse::from_result::<RpcError>(Err(e))) return Err(e)
.with_kind(ErrorKind::Serialization)?,
))
.await
.with_kind(ErrorKind::Network)?;
} }
} }
} }
@@ -260,7 +258,7 @@ pub async fn sideload(
} }
.await .await
{ {
let _ = err_send.send(RpcError::from(e.clone_output())); let _ = err_send.send(e.clone_output());
tracing::error!("Error sideloading package: {e}"); tracing::error!("Error sideloading package: {e}");
tracing::debug!("{e:?}"); tracing::debug!("{e:?}");
} }
@@ -407,19 +405,21 @@ pub async fn cli_install(
let mut progress = FullProgress::new(); let mut progress = FullProgress::new();
type RpcResponse = rpc_toolkit::yajrc::RpcResponse<
GenericRpcMethod<&'static str, (), FullProgress>,
>;
loop { loop {
tokio::select! { tokio::select! {
msg = ws.next() => { msg = ws.next() => {
if let Some(msg) = msg { if let Some(msg) = msg {
if let Message::Text(t) = msg.with_kind(ErrorKind::Network)? { match msg.with_kind(ErrorKind::Network)? {
progress = Message::Text(t) => {
serde_json::from_str::<RpcResponse>(&t) progress =
.with_kind(ErrorKind::Deserialization)?.result?; serde_json::from_str::<FullProgress>(&t)
bar.update(&progress); .with_kind(ErrorKind::Deserialization)?;
bar.update(&progress);
}
Message::Close(Some(c)) if c.code != CloseCode::Normal => {
return Err(Error::new(eyre!("{}", c.reason), ErrorKind::Network))
}
_ => (),
} }
} else { } else {
break; break;

View File

@@ -29,7 +29,6 @@ pub mod action;
pub mod auth; pub mod auth;
pub mod backup; pub mod backup;
pub mod bins; pub mod bins;
pub mod config;
pub mod context; pub mod context;
pub mod control; pub mod control;
pub mod db; pub mod db;
@@ -50,7 +49,6 @@ pub mod notifications;
pub mod os_install; pub mod os_install;
pub mod prelude; pub mod prelude;
pub mod progress; pub mod progress;
pub mod properties;
pub mod registry; pub mod registry;
pub mod rpc_continuations; pub mod rpc_continuations;
pub mod s9pk; pub mod s9pk;
@@ -70,7 +68,6 @@ pub mod volume;
use std::time::SystemTime; use std::time::SystemTime;
use clap::Parser; use clap::Parser;
pub use config::Config;
pub use error::{Error, ErrorKind, ResultExt}; pub use error::{Error, ErrorKind, ResultExt};
use imbl_value::Value; use imbl_value::Value;
use rpc_toolkit::yajrc::RpcError; use rpc_toolkit::yajrc::RpcError;
@@ -116,29 +113,70 @@ impl std::fmt::Display for ApiState {
pub fn main_api<C: Context>() -> ParentHandler<C> { pub fn main_api<C: Context>() -> ParentHandler<C> {
let api = ParentHandler::new() let api = ParentHandler::new()
.subcommand::<C, _>("git-info", from_fn(version::git_info)) .subcommand(
"git-info",
from_fn(|_: C| version::git_info()).with_about("Display the githash of StartOS CLI"),
)
.subcommand( .subcommand(
"echo", "echo",
from_fn(echo::<RpcContext>) from_fn(echo::<RpcContext>)
.with_metadata("authenticated", Value::Bool(false)) .with_metadata("authenticated", Value::Bool(false))
.with_about("Echo a message")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
"state", "state",
from_fn(|_: RpcContext| Ok::<_, Error>(ApiState::Running)) from_fn(|_: RpcContext| Ok::<_, Error>(ApiState::Running))
.with_metadata("authenticated", Value::Bool(false)) .with_metadata("authenticated", Value::Bool(false))
.with_about("Display the API that is currently serving")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand("server", server::<C>()) .subcommand(
.subcommand("package", package::<C>()) "server",
.subcommand("net", net::net::<C>()) server::<C>()
.subcommand("auth", auth::auth::<C>()) .with_about("Commands related to the server i.e. restart, update, and shutdown"),
.subcommand("db", db::db::<C>()) )
.subcommand("ssh", ssh::ssh::<C>()) .subcommand(
.subcommand("wifi", net::wifi::wifi::<C>()) "package",
.subcommand("disk", disk::disk::<C>()) package::<C>().with_about("Commands related to packages"),
.subcommand("notification", notifications::notification::<C>()) )
.subcommand("backup", backup::backup::<C>()) .subcommand(
"net",
net::net::<C>().with_about("Network commands related to tor and dhcp"),
)
.subcommand(
"auth",
auth::auth::<C>().with_about(
"Commands related to Authentication i.e. login, logout, reset-password",
),
)
.subcommand(
"db",
db::db::<C>().with_about("Commands to interact with the db i.e. dump, put, apply"),
)
.subcommand(
"ssh",
ssh::ssh::<C>()
.with_about("Commands for interacting with ssh keys i.e. add, delete, list"),
)
.subcommand(
"wifi",
net::wifi::wifi::<C>()
.with_about("Commands related to wifi networks i.e. add, connect, delete"),
)
.subcommand(
"disk",
disk::disk::<C>().with_about("Commands for listing disk info and repairing"),
)
.subcommand(
"notification",
notifications::notification::<C>().with_about("Create, delete, or list notifications"),
)
.subcommand(
"backup",
backup::backup::<C>()
.with_about("Commands related to backup creation and backup targets"),
)
.subcommand( .subcommand(
"registry", "registry",
CallRemoteHandler::<RpcContext, _, _, RegistryUrlParams>::new( CallRemoteHandler::<RpcContext, _, _, RegistryUrlParams>::new(
@@ -146,10 +184,20 @@ pub fn main_api<C: Context>() -> ParentHandler<C> {
) )
.no_cli(), .no_cli(),
) )
.subcommand("s9pk", s9pk::rpc::s9pk()) .subcommand(
.subcommand("util", util::rpc::util::<C>()); "s9pk",
s9pk::rpc::s9pk().with_about("Commands for interacting with s9pk files"),
)
.subcommand(
"util",
util::rpc::util::<C>().with_about("Command for calculating the blake3 hash of a file"),
);
#[cfg(feature = "dev")] #[cfg(feature = "dev")]
let api = api.subcommand("lxc", lxc::dev::lxc::<C>()); let api = api.subcommand(
"lxc",
lxc::dev::lxc::<C>()
.with_about("Commands related to lxc containers i.e. create, list, remove, connect"),
);
api api
} }
@@ -162,42 +210,57 @@ pub fn server<C: Context>() -> ParentHandler<C> {
.with_custom_display_fn(|handle, result| { .with_custom_display_fn(|handle, result| {
Ok(system::display_time(handle.params, result)) Ok(system::display_time(handle.params, result))
}) })
.with_call_remote::<CliContext>(), .with_about("Display current time and server uptime")
.with_call_remote::<CliContext>()
)
.subcommand(
"experimental",
system::experimental::<C>()
.with_about("Commands related to configuring experimental options such as zram and cpu governor"),
) )
.subcommand("experimental", system::experimental::<C>())
.subcommand("logs", system::logs::<RpcContext>())
.subcommand( .subcommand(
"logs", "logs",
from_fn_async(logs::cli_logs::<RpcContext, Empty>).no_display(), system::logs::<RpcContext>().with_about("Display OS logs"),
)
.subcommand(
"logs",
from_fn_async(logs::cli_logs::<RpcContext, Empty>).no_display().with_about("Display OS logs"),
) )
.subcommand("kernel-logs", system::kernel_logs::<RpcContext>())
.subcommand( .subcommand(
"kernel-logs", "kernel-logs",
from_fn_async(logs::cli_logs::<RpcContext, Empty>).no_display(), system::kernel_logs::<RpcContext>().with_about("Display Kernel logs"),
)
.subcommand(
"kernel-logs",
from_fn_async(logs::cli_logs::<RpcContext, Empty>).no_display().with_about("Display Kernel logs"),
) )
.subcommand( .subcommand(
"metrics", "metrics",
from_fn_async(system::metrics) from_fn_async(system::metrics)
.with_display_serializable() .with_display_serializable()
.with_call_remote::<CliContext>(), .with_about("Display information about the server i.e. temperature, RAM, CPU, and disk usage")
.with_call_remote::<CliContext>()
) )
.subcommand( .subcommand(
"shutdown", "shutdown",
from_fn_async(shutdown::shutdown) from_fn_async(shutdown::shutdown)
.no_display() .no_display()
.with_call_remote::<CliContext>(), .with_about("Shutdown the server")
.with_call_remote::<CliContext>()
) )
.subcommand( .subcommand(
"restart", "restart",
from_fn_async(shutdown::restart) from_fn_async(shutdown::restart)
.no_display() .no_display()
.with_call_remote::<CliContext>(), .with_about("Restart the server")
.with_call_remote::<CliContext>()
) )
.subcommand( .subcommand(
"rebuild", "rebuild",
from_fn_async(shutdown::rebuild) from_fn_async(shutdown::rebuild)
.no_display() .no_display()
.with_call_remote::<CliContext>(), .with_about("Teardown and rebuild service containers")
.with_call_remote::<CliContext>()
) )
.subcommand( .subcommand(
"update", "update",
@@ -207,7 +270,7 @@ pub fn server<C: Context>() -> ParentHandler<C> {
) )
.subcommand( .subcommand(
"update", "update",
from_fn_async(update::cli_update_system).no_display(), from_fn_async(update::cli_update_system).no_display().with_about("Check a given registry for StartOS updates and update if available"),
) )
.subcommand( .subcommand(
"update-firmware", "update-firmware",
@@ -222,19 +285,22 @@ pub fn server<C: Context>() -> ParentHandler<C> {
.with_custom_display_fn(|_handle, result| { .with_custom_display_fn(|_handle, result| {
Ok(firmware::display_firmware_update_result(result)) Ok(firmware::display_firmware_update_result(result))
}) })
.with_call_remote::<CliContext>(), .with_about("Update the mainboard's firmware to the latest firmware available in this version of StartOS if available. Note: This command does not reach out to the Internet")
.with_call_remote::<CliContext>()
) )
.subcommand( .subcommand(
"set-smtp", "set-smtp",
from_fn_async(system::set_system_smtp) from_fn_async(system::set_system_smtp)
.no_display() .no_display()
.with_call_remote::<CliContext>(), .with_about("Set system smtp server and credentials")
.with_call_remote::<CliContext>()
) )
.subcommand( .subcommand(
"clear-smtp", "clear-smtp",
from_fn_async(system::clear_system_smtp) from_fn_async(system::clear_system_smtp)
.no_display() .no_display()
.with_call_remote::<CliContext>(), .with_about("Remove system smtp server and credentials")
.with_call_remote::<CliContext>()
) )
} }
@@ -242,12 +308,7 @@ pub fn package<C: Context>() -> ParentHandler<C> {
ParentHandler::new() ParentHandler::new()
.subcommand( .subcommand(
"action", "action",
from_fn_async(action::action) action::action_api::<C>().with_about("Commands to get action input or run an action"),
.with_display_serializable()
.with_custom_display_fn(|handle, result| {
Ok(action::display_action_result(handle.params, result))
})
.with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
"install", "install",
@@ -261,32 +322,40 @@ pub fn package<C: Context>() -> ParentHandler<C> {
.with_metadata("get_session", Value::Bool(true)) .with_metadata("get_session", Value::Bool(true))
.no_cli(), .no_cli(),
) )
.subcommand("install", from_fn_async(install::cli_install).no_display()) .subcommand(
"install",
from_fn_async(install::cli_install)
.no_display()
.with_about("Install a package from a marketplace or via sideloading"),
)
.subcommand( .subcommand(
"uninstall", "uninstall",
from_fn_async(install::uninstall) from_fn_async(install::uninstall)
.with_metadata("sync_db", Value::Bool(true)) .with_metadata("sync_db", Value::Bool(true))
.no_display() .no_display()
.with_about("Remove a package")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
"list", "list",
from_fn_async(install::list) from_fn_async(install::list)
.with_display_serializable() .with_display_serializable()
.with_about("List installed packages")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
"installed-version", "installed-version",
from_fn_async(install::installed_version) from_fn_async(install::installed_version)
.with_display_serializable() .with_display_serializable()
.with_about("Display installed version for a PackageId")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand("config", config::config::<C>())
.subcommand( .subcommand(
"start", "start",
from_fn_async(control::start) from_fn_async(control::start)
.with_metadata("sync_db", Value::Bool(true)) .with_metadata("sync_db", Value::Bool(true))
.no_display() .no_display()
.with_about("Start a service")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
@@ -294,6 +363,7 @@ pub fn package<C: Context>() -> ParentHandler<C> {
from_fn_async(control::stop) from_fn_async(control::stop)
.with_metadata("sync_db", Value::Bool(true)) .with_metadata("sync_db", Value::Bool(true))
.no_display() .no_display()
.with_about("Stop a service")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
@@ -301,100 +371,174 @@ pub fn package<C: Context>() -> ParentHandler<C> {
from_fn_async(control::restart) from_fn_async(control::restart)
.with_metadata("sync_db", Value::Bool(true)) .with_metadata("sync_db", Value::Bool(true))
.no_display() .no_display()
.with_about("Restart a service")
.with_call_remote::<CliContext>(),
)
.subcommand(
"rebuild",
from_fn_async(service::rebuild)
.with_metadata("sync_db", Value::Bool(true))
.no_display()
.with_about("Rebuild service container")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand("logs", logs::package_logs()) .subcommand("logs", logs::package_logs())
.subcommand( .subcommand(
"logs", "logs",
from_fn_async(logs::cli_logs::<RpcContext, logs::PackageIdParams>).no_display(), logs::package_logs().with_about("Display package logs"),
) )
.subcommand( .subcommand(
"properties", "logs",
from_fn_async(properties::properties) from_fn_async(logs::cli_logs::<RpcContext, logs::PackageIdParams>)
.with_custom_display_fn(|_handle, result| { .no_display()
Ok(properties::display_properties(result)) .with_about("Display package logs"),
}) )
.with_call_remote::<CliContext>(), .subcommand(
"backup",
backup::package_backup::<C>()
.with_about("Commands for restoring package(s) from backup"),
) )
.subcommand("dependency", dependencies::dependency::<C>())
.subcommand("backup", backup::package_backup::<C>())
.subcommand("connect", from_fn_async(service::connect_rpc).no_cli()) .subcommand("connect", from_fn_async(service::connect_rpc).no_cli())
.subcommand( .subcommand(
"connect", "connect",
from_fn_async(service::connect_rpc_cli).no_display(), from_fn_async(service::connect_rpc_cli)
.no_display()
.with_about("Connect to a LXC container"),
)
.subcommand(
"attach",
from_fn_async(service::attach)
.with_metadata("get_session", Value::Bool(true))
.with_about("Execute commands within a service container")
.no_cli(),
)
.subcommand("attach", from_fn_async(service::cli_attach).no_display())
.subcommand(
"host",
net::host::host::<C>().with_about("Manage network hosts for a package"),
) )
} }
pub fn diagnostic_api() -> ParentHandler<DiagnosticContext> { pub fn diagnostic_api() -> ParentHandler<DiagnosticContext> {
ParentHandler::new() ParentHandler::new()
.subcommand::<DiagnosticContext, _>( .subcommand(
"git-info", "git-info",
from_fn(version::git_info).with_metadata("authenticated", Value::Bool(false)), from_fn(|_: DiagnosticContext| version::git_info())
.with_metadata("authenticated", Value::Bool(false))
.with_about("Display the githash of StartOS CLI"),
) )
.subcommand( .subcommand(
"echo", "echo",
from_fn(echo::<DiagnosticContext>).with_call_remote::<CliContext>(), from_fn(echo::<DiagnosticContext>)
.with_about("Echo a message")
.with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
"state", "state",
from_fn(|_: DiagnosticContext| Ok::<_, Error>(ApiState::Error)) from_fn(|_: DiagnosticContext| Ok::<_, Error>(ApiState::Error))
.with_metadata("authenticated", Value::Bool(false)) .with_metadata("authenticated", Value::Bool(false))
.with_about("Display the API that is currently serving")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand("diagnostic", diagnostic::diagnostic::<DiagnosticContext>()) .subcommand(
"diagnostic",
diagnostic::diagnostic::<DiagnosticContext>()
.with_about("Diagnostic commands i.e. logs, restart, rebuild"),
)
} }
pub fn init_api() -> ParentHandler<InitContext> { pub fn init_api() -> ParentHandler<InitContext> {
ParentHandler::new() ParentHandler::new()
.subcommand::<InitContext, _>( .subcommand(
"git-info", "git-info",
from_fn(version::git_info).with_metadata("authenticated", Value::Bool(false)), from_fn(|_: InitContext| version::git_info())
.with_metadata("authenticated", Value::Bool(false))
.with_about("Display the githash of StartOS CLI"),
) )
.subcommand( .subcommand(
"echo", "echo",
from_fn(echo::<InitContext>).with_call_remote::<CliContext>(), from_fn(echo::<InitContext>)
.with_about("Echo a message")
.with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
"state", "state",
from_fn(|_: InitContext| Ok::<_, Error>(ApiState::Initializing)) from_fn(|_: InitContext| Ok::<_, Error>(ApiState::Initializing))
.with_metadata("authenticated", Value::Bool(false)) .with_metadata("authenticated", Value::Bool(false))
.with_about("Display the API that is currently serving")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand("init", init::init_api::<InitContext>()) .subcommand(
"init",
init::init_api::<InitContext>()
.with_about("Commands to get logs or initialization progress"),
)
} }
pub fn setup_api() -> ParentHandler<SetupContext> { pub fn setup_api() -> ParentHandler<SetupContext> {
ParentHandler::new() ParentHandler::new()
.subcommand::<SetupContext, _>( .subcommand(
"git-info", "git-info",
from_fn(version::git_info).with_metadata("authenticated", Value::Bool(false)), from_fn(|_: SetupContext| version::git_info())
.with_metadata("authenticated", Value::Bool(false))
.with_about("Display the githash of StartOS CLI"),
) )
.subcommand( .subcommand(
"echo", "echo",
from_fn(echo::<SetupContext>).with_call_remote::<CliContext>(), from_fn(echo::<SetupContext>)
.with_about("Echo a message")
.with_call_remote::<CliContext>(),
) )
.subcommand("setup", setup::setup::<SetupContext>()) .subcommand("setup", setup::setup::<SetupContext>())
} }
pub fn install_api() -> ParentHandler<InstallContext> { pub fn install_api() -> ParentHandler<InstallContext> {
ParentHandler::new() ParentHandler::new()
.subcommand::<InstallContext, _>( .subcommand(
"git-info", "git-info",
from_fn(version::git_info).with_metadata("authenticated", Value::Bool(false)), from_fn(|_: InstallContext| version::git_info())
.with_metadata("authenticated", Value::Bool(false))
.with_about("Display the githash of StartOS CLI"),
) )
.subcommand( .subcommand(
"echo", "echo",
from_fn(echo::<InstallContext>).with_call_remote::<CliContext>(), from_fn(echo::<InstallContext>)
.with_about("Echo a message")
.with_call_remote::<CliContext>(),
)
.subcommand(
"install",
os_install::install::<InstallContext>()
.with_about("Commands to list disk info, install StartOS, and reboot"),
) )
.subcommand("install", os_install::install::<InstallContext>())
} }
pub fn expanded_api() -> ParentHandler<CliContext> { pub fn expanded_api() -> ParentHandler<CliContext> {
main_api() main_api()
.subcommand("init", from_fn_blocking(developer::init).no_display()) .subcommand(
.subcommand("pubkey", from_fn_blocking(developer::pubkey)) "init",
.subcommand("diagnostic", diagnostic::diagnostic::<CliContext>()) from_fn_blocking(developer::init)
.no_display()
.with_about("Create developer key if it doesn't exist"),
)
.subcommand(
"pubkey",
from_fn_blocking(developer::pubkey)
.with_about("Get public key for developer private key"),
)
.subcommand(
"diagnostic",
diagnostic::diagnostic::<CliContext>()
.with_about("Commands to display logs, restart the server, etc"),
)
.subcommand("setup", setup::setup::<CliContext>()) .subcommand("setup", setup::setup::<CliContext>())
.subcommand("install", os_install::install::<CliContext>()) .subcommand(
.subcommand("registry", registry::registry_api::<CliContext>()) "install",
os_install::install::<CliContext>()
.with_about("Commands to list disk info, install StartOS, and reboot"),
)
.subcommand(
"registry",
registry::registry_api::<CliContext>().with_about("Commands related to the registry"),
)
} }

View File

@@ -12,7 +12,7 @@ use color_eyre::eyre::eyre;
use futures::stream::BoxStream; use futures::stream::BoxStream;
use futures::{Future, FutureExt, Stream, StreamExt, TryStreamExt}; use futures::{Future, FutureExt, Stream, StreamExt, TryStreamExt};
use itertools::Itertools; use itertools::Itertools;
use models::PackageId; use models::{FromStrParser, PackageId};
use rpc_toolkit::yajrc::RpcError; use rpc_toolkit::yajrc::RpcError;
use rpc_toolkit::{ use rpc_toolkit::{
from_fn_async, CallRemote, Context, Empty, HandlerArgs, HandlerExt, HandlerFor, ParentHandler, from_fn_async, CallRemote, Context, Empty, HandlerArgs, HandlerExt, HandlerFor, ParentHandler,
@@ -30,7 +30,6 @@ use crate::error::ResultExt;
use crate::lxc::ContainerId; use crate::lxc::ContainerId;
use crate::prelude::*; use crate::prelude::*;
use crate::rpc_continuations::{Guid, RpcContinuation, RpcContinuations}; use crate::rpc_continuations::{Guid, RpcContinuation, RpcContinuations};
use crate::util::clap::FromStrParser;
use crate::util::serde::Reversible; use crate::util::serde::Reversible;
use crate::util::Invoke; use crate::util::Invoke;
@@ -114,7 +113,7 @@ async fn ws_handler(
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] #[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct LogResponse { pub struct LogResponse {
entries: Reversible<LogEntry>, pub entries: Reversible<LogEntry>,
start_cursor: Option<String>, start_cursor: Option<String>,
end_cursor: Option<String>, end_cursor: Option<String>,
} }
@@ -361,11 +360,7 @@ pub fn logs<
source: impl for<'a> LogSourceFn<'a, C, Extra>, source: impl for<'a> LogSourceFn<'a, C, Extra>,
) -> ParentHandler<C, LogsParams<Extra>> { ) -> ParentHandler<C, LogsParams<Extra>> {
ParentHandler::new() ParentHandler::new()
.root_handler( .root_handler(logs_nofollow::<C, Extra>(source.clone()).no_cli())
logs_nofollow::<C, Extra>(source.clone())
.with_inherited(|params, _| params)
.no_cli(),
)
.subcommand( .subcommand(
"follow", "follow",
logs_follow::<C, Extra>(source) logs_follow::<C, Extra>(source)
@@ -437,7 +432,7 @@ where
fn logs_nofollow<C, Extra>( fn logs_nofollow<C, Extra>(
f: impl for<'a> LogSourceFn<'a, C, Extra>, f: impl for<'a> LogSourceFn<'a, C, Extra>,
) -> impl HandlerFor<C, Params = Empty, InheritedParams = LogsParams<Extra>, Ok = LogResponse, Err = Error> ) -> impl HandlerFor<C, Params = LogsParams<Extra>, InheritedParams = Empty, Ok = LogResponse, Err = Error>
where where
C: Context, C: Context,
Extra: FromArgMatches + Args + Send + Sync + 'static, Extra: FromArgMatches + Args + Send + Sync + 'static,
@@ -445,7 +440,7 @@ where
from_fn_async( from_fn_async(
move |HandlerArgs { move |HandlerArgs {
context, context,
inherited_params: params:
LogsParams { LogsParams {
extra, extra,
limit, limit,
@@ -454,7 +449,7 @@ where
before, before,
}, },
.. ..
}: HandlerArgs<C, Empty, LogsParams<Extra>>| { }: HandlerArgs<C, LogsParams<Extra>>| {
let f = f.clone(); let f = f.clone();
async move { async move {
fetch_logs( fetch_logs(
@@ -487,14 +482,18 @@ fn logs_follow<
context, context,
inherited_params: inherited_params:
LogsParams { LogsParams {
extra, limit, boot, .. extra,
cursor,
limit,
boot,
..
}, },
.. ..
}: HandlerArgs<C, Empty, LogsParams<Extra>>| { }: HandlerArgs<C, Empty, LogsParams<Extra>>| {
let f = f.clone(); let f = f.clone();
async move { async move {
let src = f.call(&context, extra).await?; let src = f.call(&context, extra).await?;
follow_logs(context, src, limit, boot.map(String::from)).await follow_logs(context, src, cursor, limit, boot.map(String::from)).await
} }
}, },
) )
@@ -525,7 +524,7 @@ pub fn package_logs() -> ParentHandler<RpcContext, LogsParams<PackageIdParams>>
pub async fn journalctl( pub async fn journalctl(
id: LogSource, id: LogSource,
limit: usize, limit: Option<usize>,
cursor: Option<&str>, cursor: Option<&str>,
boot: Option<&str>, boot: Option<&str>,
before: bool, before: bool,
@@ -533,11 +532,12 @@ pub async fn journalctl(
) -> Result<LogStream, Error> { ) -> Result<LogStream, Error> {
let mut cmd = gen_journalctl_command(&id); let mut cmd = gen_journalctl_command(&id);
cmd.arg(format!("--lines={}", limit)); if let Some(limit) = limit {
cmd.arg(format!("--lines={}", limit));
}
let cursor_formatted = format!("--after-cursor={}", cursor.unwrap_or("")); if let Some(cursor) = cursor {
if cursor.is_some() { cmd.arg(&format!("--after-cursor={}", cursor));
cmd.arg(&cursor_formatted);
if before { if before {
cmd.arg("--reverse"); cmd.arg("--reverse");
} }
@@ -638,8 +638,15 @@ pub async fn fetch_logs(
before: bool, before: bool,
) -> Result<LogResponse, Error> { ) -> Result<LogResponse, Error> {
let limit = limit.unwrap_or(50); let limit = limit.unwrap_or(50);
let mut stream = let mut stream = journalctl(
journalctl(id, limit, cursor.as_deref(), boot.as_deref(), before, false).await?; id,
Some(limit),
cursor.as_deref(),
boot.as_deref(),
before,
false,
)
.await?;
let mut entries = Vec::with_capacity(limit); let mut entries = Vec::with_capacity(limit);
let mut start_cursor = None; let mut start_cursor = None;
@@ -682,11 +689,16 @@ pub async fn fetch_logs(
pub async fn follow_logs<Context: AsRef<RpcContinuations>>( pub async fn follow_logs<Context: AsRef<RpcContinuations>>(
ctx: Context, ctx: Context,
id: LogSource, id: LogSource,
cursor: Option<String>,
limit: Option<usize>, limit: Option<usize>,
boot: Option<String>, boot: Option<String>,
) -> Result<LogFollowResponse, Error> { ) -> Result<LogFollowResponse, Error> {
let limit = limit.unwrap_or(50); let limit = if cursor.is_some() {
let mut stream = journalctl(id, limit, None, boot.as_deref(), false, true).await?; None
} else {
Some(limit.unwrap_or(50))
};
let mut stream = journalctl(id, limit, cursor.as_deref(), boot.as_deref(), false, true).await?;
let mut start_cursor = None; let mut start_cursor = None;
let mut first_entry = None; let mut first_entry = None;

View File

@@ -8,16 +8,21 @@ use rpc_toolkit::{
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use ts_rs::TS; use ts_rs::TS;
use crate::context::{CliContext, RpcContext};
use crate::lxc::{ContainerId, LxcConfig}; use crate::lxc::{ContainerId, LxcConfig};
use crate::prelude::*; use crate::prelude::*;
use crate::rpc_continuations::Guid; use crate::rpc_continuations::Guid;
use crate::{
context::{CliContext, RpcContext},
service::ServiceStats,
};
pub fn lxc<C: Context>() -> ParentHandler<C> { pub fn lxc<C: Context>() -> ParentHandler<C> {
ParentHandler::new() ParentHandler::new()
.subcommand( .subcommand(
"create", "create",
from_fn_async(create).with_call_remote::<CliContext>(), from_fn_async(create)
.with_about("Create lxc container")
.with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
"list", "list",
@@ -31,16 +36,59 @@ pub fn lxc<C: Context>() -> ParentHandler<C> {
table.printstd(); table.printstd();
Ok(()) Ok(())
}) })
.with_about("List lxc containers")
.with_call_remote::<CliContext>(),
)
.subcommand(
"stats",
from_fn_async(stats)
.with_custom_display_fn(|_, res| {
use prettytable::*;
let mut table = table!([
"Container ID",
"Name",
"Memory Usage",
"Memory Limit",
"Memory %"
]);
for ServiceStats {
container_id,
package_id,
memory_usage,
memory_limit,
} in res
{
table.add_row(row![
&*container_id,
&*package_id,
memory_usage,
memory_limit,
format!(
"{:.2}",
memory_usage.0 as f64 / memory_limit.0 as f64 * 100.0
)
]);
}
table.printstd();
Ok(())
})
.with_about("List information related to the lxc containers i.e. CPU, Memory, Disk")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
"remove", "remove",
from_fn_async(remove) from_fn_async(remove)
.no_display() .no_display()
.with_about("Remove lxc container")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand("connect", from_fn_async(connect_rpc).no_cli()) .subcommand("connect", from_fn_async(connect_rpc).no_cli())
.subcommand("connect", from_fn_async(connect_rpc_cli).no_display()) .subcommand(
"connect",
from_fn_async(connect_rpc_cli)
.no_display()
.with_about("Connect to a lxc container"),
)
} }
pub async fn create(ctx: RpcContext) -> Result<ContainerId, Error> { pub async fn create(ctx: RpcContext) -> Result<ContainerId, Error> {
@@ -54,6 +102,22 @@ pub async fn list(ctx: RpcContext) -> Result<Vec<ContainerId>, Error> {
Ok(ctx.dev.lxc.lock().await.keys().cloned().collect()) Ok(ctx.dev.lxc.lock().await.keys().cloned().collect())
} }
pub async fn stats(ctx: RpcContext) -> Result<Vec<ServiceStats>, Error> {
let ids = ctx.db.peek().await.as_public().as_package_data().keys()?;
let guids: Vec<_> = ctx.dev.lxc.lock().await.keys().cloned().collect();
let mut stats = Vec::with_capacity(guids.len());
for id in ids {
let service: tokio::sync::OwnedRwLockReadGuard<Option<crate::service::ServiceRef>> =
ctx.services.get(&id).await;
let service_ref = service.as_ref().or_not_found(&id)?;
stats.push(service_ref.stats().await?);
}
Ok(stats)
}
#[derive(Deserialize, Serialize, Parser, TS)] #[derive(Deserialize, Serialize, Parser, TS)]
pub struct RemoveParams { pub struct RemoveParams {
#[ts(type = "string")] #[ts(type = "string")]

View File

@@ -1,13 +1,13 @@
use std::collections::BTreeSet;
use std::net::Ipv4Addr; use std::net::Ipv4Addr;
use std::path::Path; use std::path::Path;
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
use std::time::Duration; use std::time::Duration;
use std::{collections::BTreeSet, ffi::OsString};
use clap::builder::ValueParserFactory; use clap::builder::ValueParserFactory;
use futures::{AsyncWriteExt, StreamExt}; use futures::{AsyncWriteExt, StreamExt};
use imbl_value::{InOMap, InternedString}; use imbl_value::{InOMap, InternedString};
use models::InvalidId; use models::{FromStrParser, InvalidId};
use rpc_toolkit::yajrc::RpcError; use rpc_toolkit::yajrc::RpcError;
use rpc_toolkit::{GenericRpcMethod, RpcRequest, RpcResponse}; use rpc_toolkit::{GenericRpcMethod, RpcRequest, RpcResponse};
use rustyline_async::{ReadlineEvent, SharedWriter}; use rustyline_async::{ReadlineEvent, SharedWriter};
@@ -28,12 +28,11 @@ use crate::disk::mount::guard::{GenericMountGuard, MountGuard, TmpMountGuard};
use crate::disk::mount::util::unmount; use crate::disk::mount::util::unmount;
use crate::prelude::*; use crate::prelude::*;
use crate::rpc_continuations::{Guid, RpcContinuation}; use crate::rpc_continuations::{Guid, RpcContinuation};
use crate::util::clap::FromStrParser;
use crate::util::io::open_file; use crate::util::io::open_file;
use crate::util::rpc_client::UnixRpcClient; use crate::util::rpc_client::UnixRpcClient;
use crate::util::{new_guid, Invoke}; use crate::util::{new_guid, Invoke};
#[cfg(feature = "dev")] // #[cfg(feature = "dev")]
pub mod dev; pub mod dev;
const LXC_CONTAINER_DIR: &str = "/var/lib/lxc"; const LXC_CONTAINER_DIR: &str = "/var/lib/lxc";
@@ -127,7 +126,8 @@ impl LxcManager {
Path::new(LXC_CONTAINER_DIR).join(container).join("rootfs"), Path::new(LXC_CONTAINER_DIR).join(container).join("rootfs"),
true, true,
) )
.await?; .await
.log_err();
if tokio_stream::wrappers::ReadDirStream::new( if tokio_stream::wrappers::ReadDirStream::new(
tokio::fs::read_dir(&rootfs_path).await?, tokio::fs::read_dir(&rootfs_path).await?,
) )
@@ -287,6 +287,30 @@ impl LxcContainer {
self.rpc_bind.path() self.rpc_bind.path()
} }
pub async fn command(&self, commands: &[&str]) -> Result<String, Error> {
let mut cmd = Command::new("lxc-attach");
cmd.kill_on_drop(true);
let output = cmd
.arg(&**self.guid)
.arg("--")
.args(commands)
.output()
.await?;
if !output.status.success() {
return Err(Error::new(
eyre!(
"Command failed with exit code: {:?} \n Message: {:?}",
output.status.code(),
String::from_utf8(output.stderr)
),
ErrorKind::Docker,
));
}
Ok(String::from_utf8(output.stdout)?)
}
#[instrument(skip_all)] #[instrument(skip_all)]
pub async fn exit(mut self) -> Result<(), Error> { pub async fn exit(mut self) -> Result<(), Error> {
Command::new("lxc-stop") Command::new("lxc-stop")
@@ -365,7 +389,7 @@ impl Drop for LxcContainer {
tracing::error!("Error reading logs from crashed container: {e}"); tracing::error!("Error reading logs from crashed container: {e}");
tracing::debug!("{e:?}") tracing::debug!("{e:?}")
} }
rootfs.unmount(true).await.unwrap(); rootfs.unmount(true).await.log_err();
drop(guid); drop(guid);
if let Err(e) = manager.gc().await { if let Err(e) = manager.gc().await {
tracing::error!("Error cleaning up dangling LXC containers: {e}"); tracing::error!("Error cleaning up dangling LXC containers: {e}");

View File

@@ -49,7 +49,7 @@ impl HasLoggedOutSessions {
.map(|s| s.as_logout_session_id()) .map(|s| s.as_logout_session_id())
.collect(); .collect();
for sid in &to_log_out { for sid in &to_log_out {
ctx.open_authed_continuations.kill(sid) ctx.open_authed_continuations.kill(&Some(sid.clone()))
} }
ctx.ephemeral_sessions.mutate(|s| { ctx.ephemeral_sessions.mutate(|s| {
for sid in &to_log_out { for sid in &to_log_out {

View File

@@ -0,0 +1,324 @@
use std::collections::{BTreeMap, BTreeSet};
use std::str::FromStr;
use clap::builder::ValueParserFactory;
use clap::Parser;
use imbl_value::InternedString;
use itertools::Itertools;
use models::{ErrorData, FromStrParser};
use openssl::pkey::{PKey, Private};
use openssl::x509::X509;
use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler};
use serde::{Deserialize, Serialize};
use url::Url;
use crate::context::{CliContext, RpcContext};
use crate::db::model::public::AcmeSettings;
use crate::db::model::Database;
use crate::prelude::*;
use crate::util::serde::{Pem, Pkcs8Doc};
#[derive(Debug, Default, Deserialize, Serialize, HasModel)]
#[model = "Model<Self>"]
pub struct AcmeCertStore {
pub accounts: BTreeMap<JsonKey<Vec<String>>, Pem<Pkcs8Doc>>,
pub certs: BTreeMap<Url, BTreeMap<JsonKey<BTreeSet<InternedString>>, AcmeCert>>,
}
impl AcmeCertStore {
pub fn new() -> Self {
Self::default()
}
}
#[derive(Debug, Deserialize, Serialize)]
pub struct AcmeCert {
pub key: Pem<PKey<Private>>,
pub fullchain: Vec<Pem<X509>>,
}
pub struct AcmeCertCache<'a>(pub &'a TypedPatchDb<Database>);
#[async_trait::async_trait]
impl<'a> async_acme::cache::AcmeCache for AcmeCertCache<'a> {
type Error = ErrorData;
async fn read_account(&self, contacts: &[&str]) -> Result<Option<Vec<u8>>, Self::Error> {
let contacts = JsonKey::new(contacts.into_iter().map(|s| (*s).to_owned()).collect_vec());
let Some(account) = self
.0
.peek()
.await
.into_private()
.into_key_store()
.into_acme()
.into_accounts()
.into_idx(&contacts)
else {
return Ok(None);
};
Ok(Some(account.de()?.0.document.into_vec()))
}
async fn write_account(&self, contacts: &[&str], contents: &[u8]) -> Result<(), Self::Error> {
let contacts = JsonKey::new(contacts.into_iter().map(|s| (*s).to_owned()).collect_vec());
let key = Pkcs8Doc {
tag: "EC PRIVATE KEY".into(),
document: pkcs8::Document::try_from(contents).with_kind(ErrorKind::Pem)?,
};
self.0
.mutate(|db| {
db.as_private_mut()
.as_key_store_mut()
.as_acme_mut()
.as_accounts_mut()
.insert(&contacts, &Pem::new(key))
})
.await?;
Ok(())
}
async fn read_certificate(
&self,
domains: &[String],
directory_url: &str,
) -> Result<Option<(String, String)>, Self::Error> {
let domains = JsonKey::new(domains.into_iter().map(InternedString::intern).collect());
let directory_url = directory_url
.parse::<Url>()
.with_kind(ErrorKind::ParseUrl)?;
let Some(cert) = self
.0
.peek()
.await
.into_private()
.into_key_store()
.into_acme()
.into_certs()
.into_idx(&directory_url)
.and_then(|a| a.into_idx(&domains))
else {
return Ok(None);
};
let cert = cert.de()?;
Ok(Some((
String::from_utf8(
cert.key
.0
.private_key_to_pem_pkcs8()
.with_kind(ErrorKind::OpenSsl)?,
)
.with_kind(ErrorKind::Utf8)?,
cert.fullchain
.into_iter()
.map(|cert| {
String::from_utf8(cert.0.to_pem().with_kind(ErrorKind::OpenSsl)?)
.with_kind(ErrorKind::Utf8)
})
.collect::<Result<Vec<_>, _>>()?
.join("\n"),
)))
}
async fn write_certificate(
&self,
domains: &[String],
directory_url: &str,
key_pem: &str,
certificate_pem: &str,
) -> Result<(), Self::Error> {
tracing::info!("Saving new certificate for {domains:?}");
let domains = JsonKey::new(domains.into_iter().map(InternedString::intern).collect());
let directory_url = directory_url
.parse::<Url>()
.with_kind(ErrorKind::ParseUrl)?;
let cert = AcmeCert {
key: Pem(PKey::<Private>::private_key_from_pem(key_pem.as_bytes())
.with_kind(ErrorKind::OpenSsl)?),
fullchain: X509::stack_from_pem(certificate_pem.as_bytes())
.with_kind(ErrorKind::OpenSsl)?
.into_iter()
.map(Pem)
.collect(),
};
self.0
.mutate(|db| {
db.as_private_mut()
.as_key_store_mut()
.as_acme_mut()
.as_certs_mut()
.upsert(&directory_url, || Ok(BTreeMap::new()))?
.insert(&domains, &cert)
})
.await?;
Ok(())
}
}
pub fn acme<C: Context>() -> ParentHandler<C> {
ParentHandler::new()
.subcommand(
"init",
from_fn_async(init)
.no_display()
.with_about("Setup ACME certificate acquisition")
.with_call_remote::<CliContext>(),
)
.subcommand(
"domain",
domain::<C>()
.with_about("Add, remove, or view domains for which to acquire ACME certificates"),
)
}
#[derive(Clone, Deserialize, Serialize)]
pub struct AcmeProvider(pub Url);
impl FromStr for AcmeProvider {
type Err = <Url as FromStr>::Err;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"letsencrypt" => async_acme::acme::LETS_ENCRYPT_PRODUCTION_DIRECTORY.parse(),
"letsencrypt-staging" => async_acme::acme::LETS_ENCRYPT_STAGING_DIRECTORY.parse(),
s => s.parse(),
}
.map(Self)
}
}
impl ValueParserFactory for AcmeProvider {
type Parser = FromStrParser<Self>;
fn value_parser() -> Self::Parser {
Self::Parser::new()
}
}
#[derive(Deserialize, Serialize, Parser)]
pub struct InitAcmeParams {
#[arg(long)]
pub provider: AcmeProvider,
#[arg(long)]
pub contact: Vec<String>,
}
pub async fn init(
ctx: RpcContext,
InitAcmeParams {
provider: AcmeProvider(provider),
contact,
}: InitAcmeParams,
) -> Result<(), Error> {
ctx.db
.mutate(|db| {
db.as_public_mut()
.as_server_info_mut()
.as_acme_mut()
.map_mutate(|acme| {
Ok(Some(AcmeSettings {
provider,
contact,
domains: acme.map(|acme| acme.domains).unwrap_or_default(),
}))
})
})
.await?;
Ok(())
}
pub fn domain<C: Context>() -> ParentHandler<C> {
ParentHandler::new()
.subcommand(
"add",
from_fn_async(add_domain)
.no_display()
.with_about("Add a domain for which to acquire ACME certificates")
.with_call_remote::<CliContext>(),
)
.subcommand(
"remove",
from_fn_async(remove_domain)
.no_display()
.with_about("Remove a domain for which to acquire ACME certificates")
.with_call_remote::<CliContext>(),
)
.subcommand(
"list",
from_fn_async(list_domains)
.with_custom_display_fn(|_, res| {
for domain in res {
println!("{domain}")
}
Ok(())
})
.with_about("List domains for which to acquire ACME certificates")
.with_call_remote::<CliContext>(),
)
}
#[derive(Deserialize, Serialize, Parser)]
pub struct DomainParams {
pub domain: InternedString,
}
pub async fn add_domain(
ctx: RpcContext,
DomainParams { domain }: DomainParams,
) -> Result<(), Error> {
ctx.db
.mutate(|db| {
db.as_public_mut()
.as_server_info_mut()
.as_acme_mut()
.transpose_mut()
.ok_or_else(|| {
Error::new(
eyre!("Please call `start-cli net acme init` before adding a domain"),
ErrorKind::InvalidRequest,
)
})?
.as_domains_mut()
.mutate(|domains| {
domains.insert(domain);
Ok(())
})
})
.await?;
Ok(())
}
pub async fn remove_domain(
ctx: RpcContext,
DomainParams { domain }: DomainParams,
) -> Result<(), Error> {
ctx.db
.mutate(|db| {
if let Some(acme) = db
.as_public_mut()
.as_server_info_mut()
.as_acme_mut()
.transpose_mut()
{
acme.as_domains_mut().mutate(|domains| {
domains.remove(&domain);
Ok(())
})
} else {
Ok(())
}
})
.await?;
Ok(())
}
pub async fn list_domains(ctx: RpcContext) -> Result<BTreeSet<InternedString>, Error> {
if let Some(acme) = ctx
.db
.peek()
.await
.into_public()
.into_server_info()
.into_acme()
.transpose()
{
acme.into_domains().de()
} else {
Ok(BTreeSet::new())
}
}

View File

@@ -58,6 +58,7 @@ pub fn dhcp<C: Context>() -> ParentHandler<C> {
"update", "update",
from_fn_async::<_, _, (), Error, (RpcContext, UpdateParams)>(update) from_fn_async::<_, _, (), Error, (RpcContext, UpdateParams)>(update)
.no_display() .no_display()
.with_about("Update IP assigned by dhcp")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
} }

View File

@@ -98,16 +98,8 @@ impl RequestHandler for Resolver {
) )
.await .await
} }
a => { _ => {
if a != RecordType::AAAA { let res = Header::response_from_request(request.header());
tracing::warn!(
"Non A-Record requested for {}: {:?}",
query.name(),
query.query_type()
);
}
let mut res = Header::response_from_request(request.header());
res.set_response_code(ResponseCode::NXDomain);
response_handle response_handle
.send_response( .send_response(
MessageResponseBuilder::from_message_request(&*request).build( MessageResponseBuilder::from_message_request(&*request).build(

View File

@@ -1,7 +1,9 @@
use std::fmt; use std::fmt;
use std::str::FromStr; use std::str::FromStr;
use clap::builder::ValueParserFactory;
use imbl_value::InternedString; use imbl_value::InternedString;
use models::FromStrParser;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use torut::onion::OnionAddressV3; use torut::onion::OnionAddressV3;
use ts_rs::TS; use ts_rs::TS;
@@ -46,3 +48,10 @@ impl fmt::Display for HostAddress {
} }
} }
} }
impl ValueParserFactory for HostAddress {
type Parser = FromStrParser<Self>;
fn value_parser() -> Self::Parser {
Self::Parser::new()
}
}

View File

@@ -1,3 +1,7 @@
use std::str::FromStr;
use clap::builder::ValueParserFactory;
use models::{FromStrParser, HostId};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use ts_rs::TS; use ts_rs::TS;
@@ -5,10 +9,37 @@ use crate::net::forward::AvailablePorts;
use crate::net::vhost::AlpnInfo; use crate::net::vhost::AlpnInfo;
use crate::prelude::*; use crate::prelude::*;
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, TS)]
#[ts(export)]
#[serde(rename_all = "camelCase")]
pub struct BindId {
pub id: HostId,
pub internal_port: u16,
}
impl ValueParserFactory for BindId {
type Parser = FromStrParser<Self>;
fn value_parser() -> Self::Parser {
FromStrParser::new()
}
}
impl FromStr for BindId {
type Err = Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let (id, port) = s
.split_once(":")
.ok_or_else(|| Error::new(eyre!("expected <id>:<port>"), ErrorKind::ParseUrl))?;
Ok(Self {
id: id.parse()?,
internal_port: port.parse()?,
})
}
}
#[derive(Debug, Deserialize, Serialize, TS)] #[derive(Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
#[ts(export)] #[ts(export)]
pub struct BindInfo { pub struct BindInfo {
pub enabled: bool,
pub options: BindOptions, pub options: BindOptions,
pub lan: LanInfo, pub lan: LanInfo,
} }
@@ -30,6 +61,7 @@ impl BindInfo {
assigned_ssl_port = Some(available_ports.alloc()?); assigned_ssl_port = Some(available_ports.alloc()?);
} }
Ok(Self { Ok(Self {
enabled: true,
options, options,
lan: LanInfo { lan: LanInfo {
assigned_port, assigned_port,
@@ -69,7 +101,14 @@ impl BindInfo {
available_ports.free([port]); available_ports.free([port]);
} }
} }
Ok(Self { options, lan }) Ok(Self {
enabled: true,
options,
lan,
})
}
pub fn disable(&mut self) {
self.enabled = false;
} }
} }

View File

@@ -1,10 +1,13 @@
use std::collections::{BTreeMap, BTreeSet}; use std::collections::{BTreeMap, BTreeSet};
use clap::Parser;
use imbl_value::InternedString; use imbl_value::InternedString;
use models::{HostId, PackageId}; use models::{HostId, PackageId};
use rpc_toolkit::{from_fn_async, Context, Empty, HandlerExt, ParentHandler};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use ts_rs::TS; use ts_rs::TS;
use crate::context::{CliContext, RpcContext};
use crate::db::model::DatabaseModel; use crate::db::model::DatabaseModel;
use crate::net::forward::AvailablePorts; use crate::net::forward::AvailablePorts;
use crate::net::host::address::HostAddress; use crate::net::host::address::HostAddress;
@@ -134,3 +137,163 @@ impl Model<Host> {
}) })
} }
} }
#[derive(Deserialize, Serialize, Parser)]
pub struct HostParams {
package: PackageId,
}
pub fn host<C: Context>() -> ParentHandler<C, HostParams> {
ParentHandler::<C, HostParams>::new()
.subcommand(
"list",
from_fn_async(list_hosts)
.with_inherited(|HostParams { package }, _| package)
.with_custom_display_fn(|_, ids| {
for id in ids {
println!("{id}")
}
Ok(())
})
.with_about("List host IDs available for this service"),
)
.subcommand(
"address",
address::<C>().with_inherited(|HostParams { package }, _| package),
)
}
pub async fn list_hosts(
ctx: RpcContext,
_: Empty,
package: PackageId,
) -> Result<Vec<HostId>, Error> {
ctx.db
.peek()
.await
.into_public()
.into_package_data()
.into_idx(&package)
.or_not_found(&package)?
.into_hosts()
.keys()
}
#[derive(Deserialize, Serialize, Parser)]
pub struct AddressApiParams {
host: HostId,
}
pub fn address<C: Context>() -> ParentHandler<C, AddressApiParams, PackageId> {
ParentHandler::<C, AddressApiParams, PackageId>::new()
.subcommand(
"add",
from_fn_async(add_address)
.with_inherited(|AddressApiParams { host }, package| (package, host))
.no_display()
.with_about("Add an address to this host")
.with_call_remote::<CliContext>(),
)
.subcommand(
"remove",
from_fn_async(remove_address)
.with_inherited(|AddressApiParams { host }, package| (package, host))
.no_display()
.with_about("Remove an address from this host")
.with_call_remote::<CliContext>(),
)
.subcommand(
"list",
from_fn_async(list_addresses)
.with_inherited(|AddressApiParams { host }, package| (package, host))
.with_custom_display_fn(|_, res| {
for address in res {
println!("{address}")
}
Ok(())
})
.with_about("List addresses for this host")
.with_call_remote::<CliContext>(),
)
}
#[derive(Deserialize, Serialize, Parser)]
pub struct AddressParams {
pub address: HostAddress,
}
pub async fn add_address(
ctx: RpcContext,
AddressParams { address }: AddressParams,
(package, host): (PackageId, HostId),
) -> Result<(), Error> {
ctx.db
.mutate(|db| {
if let HostAddress::Onion { address } = address {
db.as_private()
.as_key_store()
.as_onion()
.get_key(&address)?;
}
db.as_public_mut()
.as_package_data_mut()
.as_idx_mut(&package)
.or_not_found(&package)?
.as_hosts_mut()
.as_idx_mut(&host)
.or_not_found(&host)?
.as_addresses_mut()
.mutate(|a| Ok(a.insert(address)))
})
.await?;
let service = ctx.services.get(&package).await;
let service_ref = service.as_ref().or_not_found(&package)?;
service_ref.update_host(host).await?;
Ok(())
}
pub async fn remove_address(
ctx: RpcContext,
AddressParams { address }: AddressParams,
(package, host): (PackageId, HostId),
) -> Result<(), Error> {
ctx.db
.mutate(|db| {
db.as_public_mut()
.as_package_data_mut()
.as_idx_mut(&package)
.or_not_found(&package)?
.as_hosts_mut()
.as_idx_mut(&host)
.or_not_found(&host)?
.as_addresses_mut()
.mutate(|a| Ok(a.remove(&address)))
})
.await?;
let service = ctx.services.get(&package).await;
let service_ref = service.as_ref().or_not_found(&package)?;
service_ref.update_host(host).await?;
Ok(())
}
pub async fn list_addresses(
ctx: RpcContext,
_: Empty,
(package, host): (PackageId, HostId),
) -> Result<BTreeSet<HostAddress>, Error> {
ctx.db
.peek()
.await
.into_public()
.into_package_data()
.into_idx(&package)
.or_not_found(&package)?
.into_hosts()
.into_idx(&host)
.or_not_found(&host)?
.into_addresses()
.de()
}

View File

@@ -1,6 +1,7 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::account::AccountInfo; use crate::account::AccountInfo;
use crate::net::acme::AcmeCertStore;
use crate::net::ssl::CertStore; use crate::net::ssl::CertStore;
use crate::net::tor::OnionStore; use crate::net::tor::OnionStore;
use crate::prelude::*; use crate::prelude::*;
@@ -10,13 +11,15 @@ use crate::prelude::*;
pub struct KeyStore { pub struct KeyStore {
pub onion: OnionStore, pub onion: OnionStore,
pub local_certs: CertStore, pub local_certs: CertStore,
// pub letsencrypt_certs: BTreeMap<BTreeSet<InternedString>, CertData> #[serde(default)]
pub acme: AcmeCertStore,
} }
impl KeyStore { impl KeyStore {
pub fn new(account: &AccountInfo) -> Result<Self, Error> { pub fn new(account: &AccountInfo) -> Result<Self, Error> {
let mut res = Self { let mut res = Self {
onion: OnionStore::new(), onion: OnionStore::new(),
local_certs: CertStore::new(account)?, local_certs: CertStore::new(account)?,
acme: AcmeCertStore::new(),
}; };
res.onion.insert(account.tor_key.clone()); res.onion.insert(account.tor_key.clone());
Ok(res) Ok(res)

View File

@@ -1,5 +1,6 @@
use rpc_toolkit::{Context, ParentHandler}; use rpc_toolkit::{Context, HandlerExt, ParentHandler};
pub mod acme;
pub mod dhcp; pub mod dhcp;
pub mod dns; pub mod dns;
pub mod forward; pub mod forward;
@@ -20,6 +21,16 @@ pub const PACKAGE_CERT_PATH: &str = "/var/lib/embassy/ssl";
pub fn net<C: Context>() -> ParentHandler<C> { pub fn net<C: Context>() -> ParentHandler<C> {
ParentHandler::new() ParentHandler::new()
.subcommand("tor", tor::tor::<C>()) .subcommand(
.subcommand("dhcp", dhcp::dhcp::<C>()) "tor",
tor::tor::<C>().with_about("Tor commands such as list-services, logs, and reset"),
)
.subcommand(
"dhcp",
dhcp::dhcp::<C>().with_about("Command to update IP assigned from dhcp"),
)
.subcommand(
"acme",
acme::acme::<C>().with_about("Setup automatic clearnet certificate acquisition"),
)
} }

View File

@@ -15,8 +15,8 @@ use crate::hostname::Hostname;
use crate::net::dns::DnsController; use crate::net::dns::DnsController;
use crate::net::forward::LanPortForwardController; use crate::net::forward::LanPortForwardController;
use crate::net::host::address::HostAddress; use crate::net::host::address::HostAddress;
use crate::net::host::binding::{AddSslOptions, BindOptions, LanInfo}; use crate::net::host::binding::{AddSslOptions, BindId, BindOptions, LanInfo};
use crate::net::host::{host_for, Host, HostKind}; use crate::net::host::{host_for, Host, HostKind, Hosts};
use crate::net::service_interface::{HostnameInfo, IpHostname, OnionHostname}; use crate::net::service_interface::{HostnameInfo, IpHostname, OnionHostname};
use crate::net::tor::TorController; use crate::net::tor::TorController;
use crate::net::vhost::{AlpnInfo, VHostController}; use crate::net::vhost::{AlpnInfo, VHostController};
@@ -154,14 +154,16 @@ impl NetController {
) -> Result<NetService, Error> { ) -> Result<NetService, Error> {
let dns = self.dns.add(Some(package.clone()), ip).await?; let dns = self.dns.add(Some(package.clone()), ip).await?;
Ok(NetService { let mut res = NetService {
shutdown: false, shutdown: false,
id: package, id: package,
ip, ip,
dns, dns,
controller: Arc::downgrade(self), controller: Arc::downgrade(self),
binds: BTreeMap::new(), binds: BTreeMap::new(),
}) };
res.clear_bindings(Default::default()).await?;
Ok(res)
} }
} }
@@ -221,35 +223,45 @@ impl NetService {
self.update(id, host).await self.update(id, host).await
} }
pub async fn clear_bindings(&mut self) -> Result<(), Error> { pub async fn clear_bindings(&mut self, except: BTreeSet<BindId>) -> Result<(), Error> {
let ctrl = self.net_controller()?; let pkg_id = &self.id;
let hosts = self
.net_controller()?
.db
.mutate(|db| {
let mut res = Hosts::default();
for (host_id, host) in db
.as_public_mut()
.as_package_data_mut()
.as_idx_mut(pkg_id)
.or_not_found(pkg_id)?
.as_hosts_mut()
.as_entries_mut()?
{
host.as_bindings_mut().mutate(|b| {
for (internal_port, info) in b {
if !except.contains(&BindId {
id: host_id.clone(),
internal_port: *internal_port,
}) {
info.disable();
}
}
Ok(())
})?;
res.0.insert(host_id, host.de()?);
}
Ok(res)
})
.await?;
let mut errors = ErrorCollection::new(); let mut errors = ErrorCollection::new();
for (_, binds) in std::mem::take(&mut self.binds) { for (id, host) in hosts.0 {
for (_, (lan, _, hostnames, rc)) in binds.lan { errors.handle(self.update(id, host).await);
drop(rc);
if let Some(external) = lan.assigned_ssl_port {
for hostname in ctrl.server_hostnames.iter().cloned() {
ctrl.vhost.gc(hostname, external).await?;
}
for hostname in hostnames {
ctrl.vhost.gc(Some(hostname), external).await?;
}
}
if let Some(external) = lan.assigned_port {
ctrl.forward.gc(external).await?;
}
}
for (addr, (_, rcs)) in binds.tor {
drop(rcs);
errors.handle(ctrl.tor.gc(Some(addr), None).await);
}
} }
std::mem::take(&mut self.dns);
errors.handle(ctrl.dns.gc(Some(self.id.clone()), self.ip).await);
errors.into_result() errors.into_result()
} }
async fn update(&mut self, id: HostId, host: Host) -> Result<(), Error> { pub async fn update(&mut self, id: HostId, host: Host) -> Result<(), Error> {
let ctrl = self.net_controller()?; let ctrl = self.net_controller()?;
let mut hostname_info = BTreeMap::new(); let mut hostname_info = BTreeMap::new();
let binds = self.binds.entry(id.clone()).or_default(); let binds = self.binds.entry(id.clone()).or_default();
@@ -261,6 +273,9 @@ impl NetService {
let ip_info = server_info.as_ip_info().de()?; let ip_info = server_info.as_ip_info().de()?;
let hostname = server_info.as_hostname().de()?; let hostname = server_info.as_hostname().de()?;
for (port, bind) in &host.bindings { for (port, bind) in &host.bindings {
if !bind.enabled {
continue;
}
let old_lan_bind = binds.lan.remove(port); let old_lan_bind = binds.lan.remove(port);
let lan_bind = old_lan_bind let lan_bind = old_lan_bind
.as_ref() .as_ref()
@@ -315,16 +330,29 @@ impl NetService {
} }
HostAddress::Domain { address } => { HostAddress::Domain { address } => {
if hostnames.insert(address.clone()) { if hostnames.insert(address.clone()) {
let address = Some(address.clone());
rcs.push( rcs.push(
ctrl.vhost ctrl.vhost
.add( .add(
Some(address.clone()), address.clone(),
external, external,
target, target,
connect_ssl.clone(), connect_ssl.clone(),
) )
.await?, .await?,
); );
if ssl.preferred_external_port == 443 {
rcs.push(
ctrl.vhost
.add(
address.clone(),
5443,
target,
connect_ssl.clone(),
)
.await?,
);
}
} }
} }
} }
@@ -348,11 +376,32 @@ impl NetService {
network_interface_id: interface.clone(), network_interface_id: interface.clone(),
public: false, public: false,
hostname: IpHostname::Local { hostname: IpHostname::Local {
value: format!("{hostname}.local"), value: InternedString::from_display(&{
let hostname = &hostname;
lazy_format!("{hostname}.local")
}),
port: new_lan_bind.0.assigned_port, port: new_lan_bind.0.assigned_port,
ssl_port: new_lan_bind.0.assigned_ssl_port, ssl_port: new_lan_bind.0.assigned_ssl_port,
}, },
}); });
for address in host.addresses() {
if let HostAddress::Domain { address } = address {
if let Some(ssl) = &new_lan_bind.1 {
if ssl.preferred_external_port == 443 {
bind_hostname_info.push(HostnameInfo::Ip {
network_interface_id: interface.clone(),
public: false,
hostname: IpHostname::Domain {
domain: address.clone(),
subdomain: None,
port: None,
ssl_port: Some(443),
},
});
}
}
}
}
if let Some(ipv4) = ip_info.ipv4 { if let Some(ipv4) = ip_info.ipv4 {
bind_hostname_info.push(HostnameInfo::Ip { bind_hostname_info.push(HostnameInfo::Ip {
network_interface_id: interface.clone(), network_interface_id: interface.clone(),
@@ -395,7 +444,7 @@ impl NetService {
} }
let mut removed = BTreeSet::new(); let mut removed = BTreeSet::new();
binds.lan.retain(|internal, (external, _, hostnames, _)| { binds.lan.retain(|internal, (external, _, hostnames, _)| {
if host.bindings.contains_key(internal) { if host.bindings.get(internal).map_or(false, |b| b.enabled) {
true true
} else { } else {
removed.insert((*external, std::mem::take(hostnames))); removed.insert((*external, std::mem::take(hostnames)));
@@ -424,6 +473,9 @@ impl NetService {
let mut tor_hostname_ports = BTreeMap::<u16, TorHostnamePorts>::new(); let mut tor_hostname_ports = BTreeMap::<u16, TorHostnamePorts>::new();
let mut tor_binds = OrdMap::<u16, SocketAddr>::new(); let mut tor_binds = OrdMap::<u16, SocketAddr>::new();
for (internal, info) in &host.bindings { for (internal, info) in &host.bindings {
if !info.enabled {
continue;
}
tor_binds.insert( tor_binds.insert(
info.options.preferred_external_port, info.options.preferred_external_port,
SocketAddr::from((self.ip, *internal)), SocketAddr::from((self.ip, *internal)),
@@ -497,6 +549,7 @@ impl NetService {
ctrl.tor.gc(Some(addr.clone()), None).await?; ctrl.tor.gc(Some(addr.clone()), None).await?;
} }
} }
self.net_controller()? self.net_controller()?
.db .db
.mutate(|db| { .mutate(|db| {
@@ -511,7 +564,7 @@ impl NetService {
pub async fn remove_all(mut self) -> Result<(), Error> { pub async fn remove_all(mut self) -> Result<(), Error> {
self.shutdown = true; self.shutdown = true;
if let Some(ctrl) = Weak::upgrade(&self.controller) { if let Some(ctrl) = Weak::upgrade(&self.controller) {
self.clear_bindings().await?; self.clear_bindings(Default::default()).await?;
drop(ctrl); drop(ctrl);
Ok(()) Ok(())
} else { } else {
@@ -566,7 +619,7 @@ impl Drop for NetService {
binds: BTreeMap::new(), binds: BTreeMap::new(),
}, },
); );
tokio::spawn(async move { svc.remove_all().await.unwrap() }); tokio::spawn(async move { svc.remove_all().await.log_err() });
} }
} }
} }

View File

@@ -47,13 +47,16 @@ pub enum IpHostname {
ssl_port: Option<u16>, ssl_port: Option<u16>,
}, },
Local { Local {
value: String, #[ts(type = "string")]
value: InternedString,
port: Option<u16>, port: Option<u16>,
ssl_port: Option<u16>, ssl_port: Option<u16>,
}, },
Domain { Domain {
domain: String, #[ts(type = "string")]
subdomain: Option<String>, domain: InternedString,
#[ts(type = "string | null")]
subdomain: Option<InternedString>,
port: Option<u16>, port: Option<u16>,
ssl_port: Option<u16>, ssl_port: Option<u16>,
}, },

View File

@@ -84,7 +84,7 @@ pub fn rpc_router<C: Context + Clone + AsRef<RpcContinuations>>(
server: HttpServer<C>, server: HttpServer<C>,
) -> Router { ) -> Router {
Router::new() Router::new()
.route("/rpc/*path", post(server)) .route("/rpc/*path", any(server))
.route( .route(
"/ws/rpc/:guid", "/ws/rpc/:guid",
get({ get({

View File

@@ -26,7 +26,7 @@ use ts_rs::TS;
use crate::context::{CliContext, RpcContext}; use crate::context::{CliContext, RpcContext};
use crate::logs::{journalctl, LogSource, LogsParams}; use crate::logs::{journalctl, LogSource, LogsParams};
use crate::prelude::*; use crate::prelude::*;
use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat}; use crate::util::serde::{display_serializable, Base64, HandlerExtSerde, WithIoFormat};
use crate::util::Invoke; use crate::util::Invoke;
pub const SYSTEMD_UNIT: &str = "tor@default"; pub const SYSTEMD_UNIT: &str = "tor@default";
@@ -59,7 +59,9 @@ impl Model<OnionStore> {
self.insert(&key.public().get_onion_address(), &key) self.insert(&key.public().get_onion_address(), &key)
} }
pub fn get_key(&self, address: &OnionAddressV3) -> Result<TorSecretKeyV3, Error> { pub fn get_key(&self, address: &OnionAddressV3) -> Result<TorSecretKeyV3, Error> {
self.as_idx(address).or_not_found(address)?.de() self.as_idx(address)
.or_not_found(lazy_format!("private key for {address}"))?
.de()
} }
} }
@@ -91,20 +93,102 @@ pub fn tor<C: Context>() -> ParentHandler<C> {
.with_custom_display_fn(|handle, result| { .with_custom_display_fn(|handle, result| {
Ok(display_services(handle.params, result)) Ok(display_services(handle.params, result))
}) })
.with_about("Display Tor V3 Onion Addresses")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand("logs", logs()) .subcommand("logs", logs().with_about("Display Tor logs"))
.subcommand( .subcommand(
"logs", "logs",
from_fn_async(crate::logs::cli_logs::<RpcContext, Empty>).no_display(), from_fn_async(crate::logs::cli_logs::<RpcContext, Empty>)
.no_display()
.with_about("Display Tor logs"),
) )
.subcommand( .subcommand(
"reset", "reset",
from_fn_async(reset) from_fn_async(reset)
.no_display() .no_display()
.with_about("Reset Tor daemon")
.with_call_remote::<CliContext>(),
)
.subcommand(
"key",
key::<C>().with_about("Manage the onion service key store"),
)
}
pub fn key<C: Context>() -> ParentHandler<C> {
ParentHandler::new()
.subcommand(
"generate",
from_fn_async(generate_key)
.with_about("Generate an onion service key and add it to the key store")
.with_call_remote::<CliContext>(),
)
.subcommand(
"add",
from_fn_async(add_key)
.with_about("Add an onion service key to the key store")
.with_call_remote::<CliContext>(),
)
.subcommand(
"list",
from_fn_async(list_keys)
.with_custom_display_fn(|_, res| {
for addr in res {
println!("{addr}");
}
Ok(())
})
.with_about("List onion services with keys in the key store")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
} }
pub async fn generate_key(ctx: RpcContext) -> Result<OnionAddressV3, Error> {
ctx.db
.mutate(|db| {
Ok(db
.as_private_mut()
.as_key_store_mut()
.as_onion_mut()
.new_key()?
.public()
.get_onion_address())
})
.await
}
#[derive(Deserialize, Serialize, Parser)]
pub struct AddKeyParams {
pub key: Base64<[u8; 64]>,
}
pub async fn add_key(
ctx: RpcContext,
AddKeyParams { key }: AddKeyParams,
) -> Result<OnionAddressV3, Error> {
let key = TorSecretKeyV3::from(key.0);
ctx.db
.mutate(|db| {
db.as_private_mut()
.as_key_store_mut()
.as_onion_mut()
.insert_key(&key)
})
.await?;
Ok(key.public().get_onion_address())
}
pub async fn list_keys(ctx: RpcContext) -> Result<Vec<OnionAddressV3>, Error> {
ctx.db
.peek()
.await
.into_private()
.into_key_store()
.into_onion()
.keys()
}
#[derive(Deserialize, Serialize, Parser, TS)] #[derive(Deserialize, Serialize, Parser, TS)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
#[command(rename_all = "kebab-case")] #[command(rename_all = "kebab-case")]
@@ -307,7 +391,7 @@ async fn torctl(
let logs = journalctl( let logs = journalctl(
LogSource::Unit(SYSTEMD_UNIT), LogSource::Unit(SYSTEMD_UNIT),
0, Some(0),
None, None,
Some("0"), Some("0"),
false, false,

View File

@@ -4,6 +4,7 @@ use std::str::FromStr;
use std::sync::{Arc, Weak}; use std::sync::{Arc, Weak};
use std::time::Duration; use std::time::Duration;
use async_acme::acme::ACME_TLS_ALPN_NAME;
use axum::body::Body; use axum::body::Body;
use axum::extract::Request; use axum::extract::Request;
use axum::response::Response; use axum::response::Response;
@@ -15,31 +16,47 @@ use models::ResultExt;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tokio::io::AsyncWriteExt; use tokio::io::AsyncWriteExt;
use tokio::net::{TcpListener, TcpStream}; use tokio::net::{TcpListener, TcpStream};
use tokio::sync::{Mutex, RwLock}; use tokio::sync::{watch, Mutex, RwLock};
use tokio_rustls::rustls::crypto::CryptoProvider;
use tokio_rustls::rustls::pki_types::{ use tokio_rustls::rustls::pki_types::{
CertificateDer, PrivateKeyDer, PrivatePkcs8KeyDer, ServerName, CertificateDer, PrivateKeyDer, PrivatePkcs8KeyDer, ServerName,
}; };
use tokio_rustls::rustls::server::Acceptor; use tokio_rustls::rustls::server::{Acceptor, ResolvesServerCert};
use tokio_rustls::rustls::sign::CertifiedKey;
use tokio_rustls::rustls::{RootCertStore, ServerConfig}; use tokio_rustls::rustls::{RootCertStore, ServerConfig};
use tokio_rustls::{LazyConfigAcceptor, TlsConnector}; use tokio_rustls::{LazyConfigAcceptor, TlsConnector};
use tokio_stream::wrappers::WatchStream;
use tokio_stream::StreamExt;
use tracing::instrument; use tracing::instrument;
use ts_rs::TS; use ts_rs::TS;
use crate::db::model::Database; use crate::db::model::Database;
use crate::net::acme::AcmeCertCache;
use crate::net::static_server::server_error; use crate::net::static_server::server_error;
use crate::prelude::*; use crate::prelude::*;
use crate::util::io::BackTrackingIO; use crate::util::io::BackTrackingIO;
use crate::util::sync::SyncMutex;
use crate::util::serde::MaybeUtf8String; use crate::util::serde::MaybeUtf8String;
#[derive(Debug)]
struct SingleCertResolver(Arc<CertifiedKey>);
impl ResolvesServerCert for SingleCertResolver {
fn resolve(&self, _: tokio_rustls::rustls::server::ClientHello) -> Option<Arc<CertifiedKey>> {
Some(self.0.clone())
}
}
// not allowed: <=1024, >=32768, 5355, 5432, 9050, 6010, 9051, 5353 // not allowed: <=1024, >=32768, 5355, 5432, 9050, 6010, 9051, 5353
pub struct VHostController { pub struct VHostController {
crypto_provider: Arc<CryptoProvider>,
db: TypedPatchDb<Database>, db: TypedPatchDb<Database>,
servers: Mutex<BTreeMap<u16, VHostServer>>, servers: Mutex<BTreeMap<u16, VHostServer>>,
} }
impl VHostController { impl VHostController {
pub fn new(db: TypedPatchDb<Database>) -> Self { pub fn new(db: TypedPatchDb<Database>) -> Self {
Self { Self {
crypto_provider: Arc::new(tokio_rustls::rustls::crypto::ring::default_provider()),
db, db,
servers: Mutex::new(BTreeMap::new()), servers: Mutex::new(BTreeMap::new()),
} }
@@ -56,7 +73,8 @@ impl VHostController {
let server = if let Some(server) = writable.remove(&external) { let server = if let Some(server) = writable.remove(&external) {
server server
} else { } else {
VHostServer::new(external, self.db.clone()).await? tracing::info!("Listening on {external}");
VHostServer::new(external, self.db.clone(), self.crypto_provider.clone()).await?
}; };
let rc = server let rc = server
.add( .add(
@@ -108,7 +126,11 @@ struct VHostServer {
} }
impl VHostServer { impl VHostServer {
#[instrument(skip_all)] #[instrument(skip_all)]
async fn new(port: u16, db: TypedPatchDb<Database>) -> Result<Self, Error> { async fn new(port: u16, db: TypedPatchDb<Database>, crypto_provider: Arc<CryptoProvider>) -> Result<Self, Error> {
let acme_tls_alpn_cache = Arc::new(SyncMutex::new(BTreeMap::<
InternedString,
watch::Receiver<Option<Arc<CertifiedKey>>>,
>::new()));
// check if port allowed // check if port allowed
let listener = TcpListener::bind(SocketAddr::new(Ipv6Addr::UNSPECIFIED.into(), port)) let listener = TcpListener::bind(SocketAddr::new(Ipv6Addr::UNSPECIFIED.into(), port))
.await .await
@@ -133,9 +155,11 @@ impl VHostServer {
let mut stream = BackTrackingIO::new(stream); let mut stream = BackTrackingIO::new(stream);
let mapping = mapping.clone(); let mapping = mapping.clone();
let db = db.clone(); let db = db.clone();
let acme_tls_alpn_cache = acme_tls_alpn_cache.clone();
let crypto_provider = crypto_provider.clone();
tokio::spawn(async move { tokio::spawn(async move {
if let Err(e) = async { if let Err(e) = async {
let mid = match LazyConfigAcceptor::new( let mid: tokio_rustls::StartHandshake<&mut BackTrackingIO<TcpStream>> = match LazyConfigAcceptor::new(
Acceptor::default(), Acceptor::default(),
&mut stream, &mut stream,
) )
@@ -206,38 +230,102 @@ impl VHostServer {
.map(|(target, _)| target.clone()) .map(|(target, _)| target.clone())
}; };
if let Some(target) = target { if let Some(target) = target {
let mut tcp_stream = let peek = db.peek().await;
TcpStream::connect(target.addr).await?; let root = peek.as_private().as_key_store().as_local_certs().as_root_cert().de()?;
let hostnames = target_name let mut cfg = match async {
.into_iter() if let Some(acme_settings) = peek.as_public().as_server_info().as_acme().de()? {
.chain( if let Some(domain) = target_name.as_ref().filter(|target_name| acme_settings.domains.contains(*target_name)) {
db.peek() if mid
.await .client_hello()
.into_public() .alpn()
.into_server_info() .into_iter()
.into_ip_info() .flatten()
.into_entries()? .any(|alpn| alpn == ACME_TLS_ALPN_NAME)
.into_iter() {
.flat_map(|(_, ips)| [ let cert = WatchStream::new(
ips.as_ipv4().de().map(|ip| ip.map(IpAddr::V4)), acme_tls_alpn_cache.peek(|c| c.get(&**domain).cloned())
ips.as_ipv6().de().map(|ip| ip.map(IpAddr::V6)) .ok_or_else(|| {
]) Error::new(
.filter_map(|a| a.transpose()) eyre!("No challenge recv available for {domain}"),
.map(|a| a.map(|ip| InternedString::from_display(&ip))) ErrorKind::OpenSsl
.collect::<Result<Vec<_>, _>>()?, )
) })?,
.collect(); );
let key = db tracing::info!("Waiting for verification cert for {domain}");
.mutate(|v| { let cert = cert
v.as_private_mut() .filter(|c| c.is_some())
.as_key_store_mut() .next()
.as_local_certs_mut() .await
.cert_for(&hostnames) .flatten()
}) .ok_or_else(|| {
.await?; Error::new(eyre!("No challenge available for {domain}"), ErrorKind::OpenSsl)
let cfg = ServerConfig::builder() })?;
.with_no_client_auth(); tracing::info!("Verification cert received for {domain}");
let mut cfg = let mut cfg = ServerConfig::builder_with_provider(crypto_provider.clone())
.with_safe_default_protocol_versions()
.with_kind(crate::ErrorKind::OpenSsl)?
.with_no_client_auth()
.with_cert_resolver(Arc::new(SingleCertResolver(cert)));
cfg.alpn_protocols = vec![ACME_TLS_ALPN_NAME.to_vec()];
return Ok(Err(cfg));
} else {
let domains = [domain.to_string()];
let (send, recv) = watch::channel(None);
acme_tls_alpn_cache.mutate(|c| c.insert(domain.clone(), recv));
let cert =
async_acme::rustls_helper::order(
|_, cert| {
send.send_replace(Some(Arc::new(cert)));
Ok(())
},
acme_settings.provider.as_str(),
&domains,
Some(&AcmeCertCache(&db)),
&acme_settings.contact,
)
.await
.with_kind(ErrorKind::OpenSsl)?;
return Ok(Ok(
ServerConfig::builder_with_provider(crypto_provider.clone())
.with_safe_default_protocol_versions()
.with_kind(crate::ErrorKind::OpenSsl)?
.with_no_client_auth()
.with_cert_resolver(Arc::new(SingleCertResolver(Arc::new(cert))))
));
}
}
}
let hostnames = target_name
.into_iter()
.chain(
peek
.as_public()
.as_server_info()
.as_ip_info()
.as_entries()?
.into_iter()
.flat_map(|(_, ips)| [
ips.as_ipv4().de().map(|ip| ip.map(IpAddr::V4)),
ips.as_ipv6().de().map(|ip| ip.map(IpAddr::V6))
])
.filter_map(|a| a.transpose())
.map(|a| a.map(|ip| InternedString::from_display(&ip)))
.collect::<Result<Vec<_>, _>>()?,
)
.collect();
let key = db
.mutate(|v| {
v.as_private_mut()
.as_key_store_mut()
.as_local_certs_mut()
.cert_for(&hostnames)
})
.await?;
let cfg = ServerConfig::builder_with_provider(crypto_provider.clone())
.with_safe_default_protocol_versions()
.with_kind(crate::ErrorKind::OpenSsl)?
.with_no_client_auth();
if mid.client_hello().signature_schemes().contains( if mid.client_hello().signature_schemes().contains(
&tokio_rustls::rustls::SignatureScheme::ED25519, &tokio_rustls::rustls::SignatureScheme::ED25519,
) { ) {
@@ -275,16 +363,34 @@ impl VHostServer {
)), )),
) )
} }
.with_kind(crate::ErrorKind::OpenSsl)?; .with_kind(crate::ErrorKind::OpenSsl)
.map(Ok)
}.await? {
Ok(a) => a,
Err(cfg) => {
tracing::info!("performing ACME auth challenge");
let mut accept = mid.into_stream(Arc::new(cfg));
let io = accept.get_mut().unwrap();
let buffered = io.stop_buffering();
io.write_all(&buffered).await?;
accept.await?;
tracing::info!("ACME auth challenge completed");
return Ok(());
}
};
let mut tcp_stream =
TcpStream::connect(target.addr).await?;
match target.connect_ssl { match target.connect_ssl {
Ok(()) => { Ok(()) => {
let mut client_cfg = let mut client_cfg =
tokio_rustls::rustls::ClientConfig::builder() tokio_rustls::rustls::ClientConfig::builder_with_provider(crypto_provider)
.with_safe_default_protocol_versions()
.with_kind(crate::ErrorKind::OpenSsl)?
.with_root_certificates({ .with_root_certificates({
let mut store = RootCertStore::empty(); let mut store = RootCertStore::empty();
store.add( store.add(
CertificateDer::from( CertificateDer::from(
key.root.to_der()?, root.to_der()?,
), ),
).with_kind(crate::ErrorKind::OpenSsl)?; ).with_kind(crate::ErrorKind::OpenSsl)?;
store store

View File

@@ -7,7 +7,7 @@ use axum::extract::Request;
use axum::Router; use axum::Router;
use axum_server::Handle; use axum_server::Handle;
use bytes::Bytes; use bytes::Bytes;
use futures::future::ready; use futures::future::{ready, BoxFuture};
use futures::FutureExt; use futures::FutureExt;
use helpers::NonDetachingJoinHandle; use helpers::NonDetachingJoinHandle;
use tokio::sync::{oneshot, watch}; use tokio::sync::{oneshot, watch};
@@ -30,8 +30,39 @@ impl SwappableRouter {
} }
} }
#[derive(Clone)] pub struct SwappableRouterService {
pub struct SwappableRouterService(watch::Receiver<Router>); router: watch::Receiver<Router>,
changed: Option<BoxFuture<'static, ()>>,
}
impl SwappableRouterService {
fn router(&self) -> Router {
self.router.borrow().clone()
}
fn changed(&mut self, cx: &mut std::task::Context<'_>) -> Poll<()> {
let mut changed = if let Some(changed) = self.changed.take() {
changed
} else {
let mut router = self.router.clone();
async move {
router.changed().await;
}
.boxed()
};
if changed.poll_unpin(cx).is_ready() {
return Poll::Ready(());
}
self.changed = Some(changed);
Poll::Pending
}
}
impl Clone for SwappableRouterService {
fn clone(&self) -> Self {
Self {
router: self.router.clone(),
changed: None,
}
}
}
impl<B> tower_service::Service<Request<B>> for SwappableRouterService impl<B> tower_service::Service<Request<B>> for SwappableRouterService
where where
B: axum::body::HttpBody<Data = Bytes> + Send + 'static, B: axum::body::HttpBody<Data = Bytes> + Send + 'static,
@@ -42,15 +73,13 @@ where
type Future = <Router as tower_service::Service<Request<B>>>::Future; type Future = <Router as tower_service::Service<Request<B>>>::Future;
#[inline] #[inline]
fn poll_ready(&mut self, cx: &mut std::task::Context<'_>) -> Poll<Result<(), Self::Error>> { fn poll_ready(&mut self, cx: &mut std::task::Context<'_>) -> Poll<Result<(), Self::Error>> {
let mut changed = self.0.changed().boxed(); if self.changed(cx).is_ready() {
if changed.poll_unpin(cx).is_ready() {
return Poll::Ready(Ok(())); return Poll::Ready(Ok(()));
} }
drop(changed); tower_service::Service::<Request<B>>::poll_ready(&mut self.router(), cx)
tower_service::Service::<Request<B>>::poll_ready(&mut self.0.borrow().clone(), cx)
} }
fn call(&mut self, req: Request<B>) -> Self::Future { fn call(&mut self, req: Request<B>) -> Self::Future {
self.0.borrow().clone().call(req) self.router().call(req)
} }
} }
@@ -66,7 +95,10 @@ impl<T> tower_service::Service<T> for SwappableRouter {
Poll::Ready(Ok(())) Poll::Ready(Ok(()))
} }
fn call(&mut self, _: T) -> Self::Future { fn call(&mut self, _: T) -> Self::Future {
ready(Ok(SwappableRouterService(self.0.subscribe()))) ready(Ok(SwappableRouterService {
router: self.0.subscribe(),
changed: None,
}))
} }
} }

View File

@@ -43,18 +43,21 @@ pub fn wifi<C: Context>() -> ParentHandler<C> {
"add", "add",
from_fn_async(add) from_fn_async(add)
.no_display() .no_display()
.with_about("Add wifi ssid and password")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
"connect", "connect",
from_fn_async(connect) from_fn_async(connect)
.no_display() .no_display()
.with_about("Connect to wifi network")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
"delete", "delete",
from_fn_async(delete) from_fn_async(delete)
.no_display() .no_display()
.with_about("Remove a wifi network")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
@@ -64,10 +67,17 @@ pub fn wifi<C: Context>() -> ParentHandler<C> {
.with_custom_display_fn(|handle, result| { .with_custom_display_fn(|handle, result| {
Ok(display_wifi_info(handle.params, result)) Ok(display_wifi_info(handle.params, result))
}) })
.with_about("List wifi info")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand("country", country::<C>()) .subcommand(
.subcommand("available", available::<C>()) "country",
country::<C>().with_about("Command to set country"),
)
.subcommand(
"available",
available::<C>().with_about("Command to list available wifi networks"),
)
} }
pub fn available<C: Context>() -> ParentHandler<C> { pub fn available<C: Context>() -> ParentHandler<C> {
@@ -76,6 +86,7 @@ pub fn available<C: Context>() -> ParentHandler<C> {
from_fn_async(get_available) from_fn_async(get_available)
.with_display_serializable() .with_display_serializable()
.with_custom_display_fn(|handle, result| Ok(display_wifi_list(handle.params, result))) .with_custom_display_fn(|handle, result| Ok(display_wifi_list(handle.params, result)))
.with_about("List available wifi networks")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
} }
@@ -85,6 +96,7 @@ pub fn country<C: Context>() -> ParentHandler<C> {
"set", "set",
from_fn_async(set_country) from_fn_async(set_country)
.no_display() .no_display()
.with_about("Set Country")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
} }

View File

@@ -7,7 +7,7 @@ use clap::builder::ValueParserFactory;
use clap::Parser; use clap::Parser;
use color_eyre::eyre::eyre; use color_eyre::eyre::eyre;
use imbl_value::InternedString; use imbl_value::InternedString;
use models::PackageId; use models::{FromStrParser, PackageId};
use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler}; use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::instrument; use tracing::instrument;
@@ -17,7 +17,6 @@ use crate::backup::BackupReport;
use crate::context::{CliContext, RpcContext}; use crate::context::{CliContext, RpcContext};
use crate::db::model::DatabaseModel; use crate::db::model::DatabaseModel;
use crate::prelude::*; use crate::prelude::*;
use crate::util::clap::FromStrParser;
use crate::util::serde::HandlerExtSerde; use crate::util::serde::HandlerExtSerde;
// #[command(subcommands(list, delete, delete_before, create))] // #[command(subcommands(list, delete, delete_before, create))]
@@ -27,24 +26,28 @@ pub fn notification<C: Context>() -> ParentHandler<C> {
"list", "list",
from_fn_async(list) from_fn_async(list)
.with_display_serializable() .with_display_serializable()
.with_about("List notifications")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
"delete", "delete",
from_fn_async(delete) from_fn_async(delete)
.no_display() .no_display()
.with_about("Delete notification for a given id")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
"delete-before", "delete-before",
from_fn_async(delete_before) from_fn_async(delete_before)
.no_display() .no_display()
.with_about("Delete notifications preceding a given id")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
"create", "create",
from_fn_async(create) from_fn_async(create)
.no_display() .no_display()
.with_about("Persist a newly created notification")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
} }
@@ -253,13 +256,13 @@ impl Map for Notifications {
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct Notification { pub struct Notification {
package_id: Option<PackageId>, pub package_id: Option<PackageId>,
created_at: DateTime<Utc>, pub created_at: DateTime<Utc>,
code: u32, pub code: u32,
level: NotificationLevel, pub level: NotificationLevel,
title: String, pub title: String,
message: String, pub message: String,
data: Value, pub data: Value,
} }
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]

View File

@@ -21,7 +21,7 @@ use crate::disk::OsPartitionInfo;
use crate::net::utils::find_eth_iface; use crate::net::utils::find_eth_iface;
use crate::prelude::*; use crate::prelude::*;
use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile; use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile;
use crate::util::io::{open_file, TmpDir}; use crate::util::io::{delete_file, open_file, TmpDir};
use crate::util::serde::IoFormat; use crate::util::serde::IoFormat;
use crate::util::Invoke; use crate::util::Invoke;
use crate::ARCH; use crate::ARCH;
@@ -31,17 +31,19 @@ mod mbr;
pub fn install<C: Context>() -> ParentHandler<C> { pub fn install<C: Context>() -> ParentHandler<C> {
ParentHandler::new() ParentHandler::new()
.subcommand("disk", disk::<C>()) .subcommand("disk", disk::<C>().with_about("Command to list disk info"))
.subcommand( .subcommand(
"execute", "execute",
from_fn_async(execute::<InstallContext>) from_fn_async(execute::<InstallContext>)
.no_display() .no_display()
.with_about("Install StartOS over existing version")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
"reboot", "reboot",
from_fn_async(reboot) from_fn_async(reboot)
.no_display() .no_display()
.with_about("Restart the server")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
} }
@@ -51,6 +53,7 @@ pub fn disk<C: Context>() -> ParentHandler<C> {
"list", "list",
from_fn_async(list) from_fn_async(list)
.no_display() .no_display()
.with_about("List disk info")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
} }
@@ -147,23 +150,6 @@ pub async fn execute<C: Context>(
overwrite |= disk.guid.is_none() && disk.partitions.iter().all(|p| p.guid.is_none()); overwrite |= disk.guid.is_none() && disk.partitions.iter().all(|p| p.guid.is_none());
if !overwrite
&& (disk
.guid
.as_ref()
.map_or(false, |g| g.starts_with("EMBASSY_"))
|| disk
.partitions
.iter()
.flat_map(|p| p.guid.as_ref())
.any(|g| g.starts_with("EMBASSY_")))
{
return Err(Error::new(
eyre!("installing over versions before 0.3.6 is unsupported"),
ErrorKind::InvalidRequest,
));
}
let part_info = partition(&mut disk, overwrite).await?; let part_info = partition(&mut disk, overwrite).await?;
if let Some(efi) = &part_info.efi { if let Some(efi) = &part_info.efi {
@@ -194,18 +180,9 @@ pub async fn execute<C: Context>(
{ {
if let Err(e) = async { if let Err(e) = async {
// cp -r ${guard}/config /tmp/config // cp -r ${guard}/config /tmp/config
if tokio::fs::metadata(guard.path().join("config/upgrade")) delete_file(guard.path().join("config/upgrade")).await?;
.await delete_file(guard.path().join("config/overlay/etc/hostname")).await?;
.is_ok() delete_file(guard.path().join("config/disk.guid")).await?;
{
tokio::fs::remove_file(guard.path().join("config/upgrade")).await?;
}
if tokio::fs::metadata(guard.path().join("config/disk.guid"))
.await
.is_ok()
{
tokio::fs::remove_file(guard.path().join("config/disk.guid")).await?;
}
Command::new("cp") Command::new("cp")
.arg("-r") .arg("-r")
.arg(guard.path().join("config")) .arg(guard.path().join("config"))

View File

@@ -1,35 +0,0 @@
use clap::Parser;
use imbl_value::{json, Value};
use models::PackageId;
use serde::{Deserialize, Serialize};
use crate::context::RpcContext;
use crate::prelude::*;
use crate::Error;
pub fn display_properties(response: Value) {
println!("{}", response);
}
#[derive(Deserialize, Serialize, Parser)]
#[serde(rename_all = "camelCase")]
#[command(rename_all = "kebab-case")]
pub struct PropertiesParam {
id: PackageId,
}
// #[command(display(display_properties))]
pub async fn properties(
ctx: RpcContext,
PropertiesParam { id }: PropertiesParam,
) -> Result<Value, Error> {
match &*ctx.services.get(&id).await {
Some(service) => Ok(json!({
"version": 2,
"data": service.properties().await?
})),
None => Err(Error::new(
eyre!("Could not find a service with id {id}"),
ErrorKind::NotFound,
)),
}
}

View File

@@ -18,14 +18,23 @@ use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat};
pub fn admin_api<C: Context>() -> ParentHandler<C> { pub fn admin_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new() ParentHandler::new()
.subcommand("signer", signers_api::<C>()) .subcommand(
"signer",
signers_api::<C>().with_about("Commands to add or list signers"),
)
.subcommand("add", from_fn_async(add_admin).no_cli()) .subcommand("add", from_fn_async(add_admin).no_cli())
.subcommand("add", from_fn_async(cli_add_admin).no_display()) .subcommand(
"add",
from_fn_async(cli_add_admin)
.no_display()
.with_about("Add admin signer"),
)
.subcommand( .subcommand(
"list", "list",
from_fn_async(list_admins) from_fn_async(list_admins)
.with_display_serializable() .with_display_serializable()
.with_custom_display_fn(|handle, result| Ok(display_signers(handle.params, result))) .with_custom_display_fn(|handle, result| Ok(display_signers(handle.params, result)))
.with_about("List admin signers")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
} }
@@ -38,6 +47,7 @@ fn signers_api<C: Context>() -> ParentHandler<C> {
.with_metadata("admin", Value::Bool(true)) .with_metadata("admin", Value::Bool(true))
.with_display_serializable() .with_display_serializable()
.with_custom_display_fn(|handle, result| Ok(display_signers(handle.params, result))) .with_custom_display_fn(|handle, result| Ok(display_signers(handle.params, result)))
.with_about("List signers")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
@@ -46,7 +56,17 @@ fn signers_api<C: Context>() -> ParentHandler<C> {
.with_metadata("admin", Value::Bool(true)) .with_metadata("admin", Value::Bool(true))
.no_cli(), .no_cli(),
) )
.subcommand("add", from_fn_async(cli_add_signer)) .subcommand(
"add",
from_fn_async(cli_add_signer).with_about("Add signer"),
)
.subcommand(
"edit",
from_fn_async(edit_signer)
.with_metadata("admin", Value::Bool(true))
.no_display()
.with_call_remote::<CliContext>(),
)
} }
impl Model<BTreeMap<Guid, SignerInfo>> { impl Model<BTreeMap<Guid, SignerInfo>> {
@@ -130,6 +150,64 @@ pub async fn add_signer(ctx: RegistryContext, signer: SignerInfo) -> Result<Guid
.await .await
} }
#[derive(Debug, Deserialize, Serialize, Parser, TS)]
#[serde(rename_all = "camelCase")]
#[command(rename_all = "kebab-case")]
#[ts(export)]
pub struct EditSignerParams {
pub id: Guid,
#[arg(short = 'n', long)]
pub set_name: Option<String>,
#[arg(short = 'c', long)]
pub add_contact: Vec<ContactInfo>,
#[arg(short = 'k', long)]
pub add_key: Vec<AnyVerifyingKey>,
#[arg(short = 'C', long)]
pub remove_contact: Vec<ContactInfo>,
#[arg(short = 'K', long)]
pub remove_key: Vec<AnyVerifyingKey>,
}
pub async fn edit_signer(
ctx: RegistryContext,
EditSignerParams {
id,
set_name,
add_contact,
add_key,
remove_contact,
remove_key,
}: EditSignerParams,
) -> Result<(), Error> {
ctx.db
.mutate(|db| {
db.as_index_mut()
.as_signers_mut()
.as_idx_mut(&id)
.or_not_found(&id)?
.mutate(|s| {
if let Some(name) = set_name {
s.name = name;
}
s.contact.extend(add_contact);
for rm in remove_contact {
let Some((idx, _)) = s.contact.iter().enumerate().find(|(_, c)| *c == &rm)
else {
continue;
};
s.contact.remove(idx);
}
s.keys.extend(add_key);
for rm in remove_key {
s.keys.remove(&rm);
}
Ok(())
})
})
.await
}
#[derive(Debug, Deserialize, Serialize, Parser)] #[derive(Debug, Deserialize, Serialize, Parser)]
#[command(rename_all = "kebab-case")] #[command(rename_all = "kebab-case")]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]

View File

@@ -255,7 +255,7 @@ impl CallRemote<RegistryContext, RegistryUrlParams> for RpcContext {
.header(CONTENT_TYPE, "application/json") .header(CONTENT_TYPE, "application/json")
.header(ACCEPT, "application/json") .header(ACCEPT, "application/json")
.header(CONTENT_LENGTH, body.len()) .header(CONTENT_LENGTH, body.len())
.header(DEVICE_INFO_HEADER, DeviceInfo::from(self).to_header_value()) // .header(DEVICE_INFO_HEADER, DeviceInfo::from(self).to_header_value())
.body(body) .body(body)
.send() .send()
.await?; .await?;

View File

@@ -18,14 +18,24 @@ use crate::util::serde::{apply_expr, HandlerExtSerde};
pub fn db_api<C: Context>() -> ParentHandler<C> { pub fn db_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new() ParentHandler::new()
.subcommand("dump", from_fn_async(cli_dump).with_display_serializable()) .subcommand(
"dump",
from_fn_async(cli_dump)
.with_display_serializable()
.with_about("Filter/query db to display tables and records"),
)
.subcommand( .subcommand(
"dump", "dump",
from_fn_async(dump) from_fn_async(dump)
.with_metadata("admin", Value::Bool(true)) .with_metadata("admin", Value::Bool(true))
.no_cli(), .no_cli(),
) )
.subcommand("apply", from_fn_async(cli_apply).no_display()) .subcommand(
"apply",
from_fn_async(cli_apply)
.no_display()
.with_about("Update a db record"),
)
.subcommand( .subcommand(
"apply", "apply",
from_fn_async(apply) from_fn_async(apply)

View File

@@ -15,6 +15,7 @@ use url::Url;
use crate::context::RpcContext; use crate::context::RpcContext;
use crate::prelude::*; use crate::prelude::*;
use crate::registry::context::RegistryContext; use crate::registry::context::RegistryContext;
use crate::util::lshw::{LshwDevice, LshwDisplay, LshwProcessor};
use crate::util::VersionString; use crate::util::VersionString;
use crate::version::VersionT; use crate::version::VersionT;
@@ -26,12 +27,12 @@ pub struct DeviceInfo {
pub os: OsInfo, pub os: OsInfo,
pub hardware: HardwareInfo, pub hardware: HardwareInfo,
} }
impl From<&RpcContext> for DeviceInfo { impl DeviceInfo {
fn from(value: &RpcContext) -> Self { pub async fn load(ctx: &RpcContext) -> Result<Self, Error> {
Self { Ok(Self {
os: OsInfo::from(value), os: OsInfo::from(ctx),
hardware: HardwareInfo::from(value), hardware: HardwareInfo::load(ctx).await?,
} })
} }
} }
impl DeviceInfo { impl DeviceInfo {
@@ -44,11 +45,11 @@ impl DeviceInfo {
.append_pair("hardware.arch", &*self.hardware.arch) .append_pair("hardware.arch", &*self.hardware.arch)
.append_pair("hardware.ram", &self.hardware.ram.to_string()); .append_pair("hardware.ram", &self.hardware.ram.to_string());
for (class, products) in &self.hardware.devices { for device in &self.hardware.devices {
for product in products { url.query_pairs_mut().append_pair(
url.query_pairs_mut() &format!("hardware.device.{}", device.class()),
.append_pair(&format!("hardware.device.{}", class), product); device.product(),
} );
} }
HeaderValue::from_str(url.query().unwrap_or_default()).unwrap() HeaderValue::from_str(url.query().unwrap_or_default()).unwrap()
@@ -80,16 +81,20 @@ impl DeviceInfo {
devices: identity(query) devices: identity(query)
.split_off("hardware.device.") .split_off("hardware.device.")
.into_iter() .into_iter()
.filter_map(|(k, v)| { .filter_map(|(k, v)| match k.strip_prefix("hardware.device.") {
k.strip_prefix("hardware.device.") Some("processor") => Some(LshwDevice::Processor(LshwProcessor {
.map(|k| (k.into(), v.into_owned())) product: v.into_owned(),
})),
Some("display") => Some(LshwDevice::Display(LshwDisplay {
product: v.into_owned(),
})),
Some(class) => {
tracing::warn!("unknown device class: {class}");
None
}
_ => None,
}) })
.fold(BTreeMap::new(), |mut acc, (k, v)| { .collect(),
let mut devs = acc.remove(&k).unwrap_or_default();
devs.push(v);
acc.insert(k, devs);
acc
}),
}, },
}) })
} }
@@ -108,8 +113,8 @@ pub struct OsInfo {
impl From<&RpcContext> for OsInfo { impl From<&RpcContext> for OsInfo {
fn from(_: &RpcContext) -> Self { fn from(_: &RpcContext) -> Self {
Self { Self {
version: crate::version::Current::new().semver(), version: crate::version::Current::default().semver(),
compat: crate::version::Current::new().compat().clone(), compat: crate::version::Current::default().compat().clone(),
platform: InternedString::intern(&*crate::PLATFORM), platform: InternedString::intern(&*crate::PLATFORM),
} }
} }
@@ -122,26 +127,16 @@ pub struct HardwareInfo {
pub arch: InternedString, pub arch: InternedString,
#[ts(type = "number")] #[ts(type = "number")]
pub ram: u64, pub ram: u64,
#[ts(as = "BTreeMap::<String, Vec<String>>")] pub devices: Vec<LshwDevice>,
pub devices: BTreeMap<InternedString, Vec<String>>,
} }
impl HardwareInfo {
impl From<&RpcContext> for HardwareInfo { pub async fn load(ctx: &RpcContext) -> Result<Self, Error> {
fn from(value: &RpcContext) -> Self { let s = ctx.db.peek().await.into_public().into_server_info();
Self { Ok(Self {
arch: InternedString::intern(crate::ARCH), arch: s.as_arch().de()?,
ram: value.hardware.ram, ram: s.as_ram().de()?,
devices: value devices: s.as_devices().de()?,
.hardware })
.devices
.iter()
.fold(BTreeMap::new(), |mut acc, dev| {
let mut devs = acc.remove(dev.class()).unwrap_or_default();
devs.push(dev.product().to_owned());
acc.insert(dev.class().into(), devs);
acc
}),
}
} }
} }

View File

@@ -0,0 +1,126 @@
use std::collections::BTreeMap;
use std::path::PathBuf;
use clap::Parser;
use imbl_value::InternedString;
use itertools::Itertools;
use models::DataUrl;
use rpc_toolkit::{from_fn_async, Context, Empty, HandlerArgs, HandlerExt, ParentHandler};
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use crate::context::CliContext;
use crate::prelude::*;
use crate::registry::context::RegistryContext;
use crate::registry::package::index::Category;
use crate::util::serde::{HandlerExtSerde, WithIoFormat};
pub fn info_api<C: Context>() -> ParentHandler<C, WithIoFormat<Empty>> {
ParentHandler::<C, WithIoFormat<Empty>>::new()
.root_handler(
from_fn_async(get_info)
.with_display_serializable()
.with_about("Display registry name, icon, and package categories")
.with_call_remote::<CliContext>(),
)
.subcommand(
"set-name",
from_fn_async(set_name)
.with_metadata("admin", Value::Bool(true))
.no_display()
.with_about("Set the name for the registry")
.with_call_remote::<CliContext>(),
)
.subcommand(
"set-icon",
from_fn_async(set_icon)
.with_metadata("admin", Value::Bool(true))
.no_cli(),
)
.subcommand(
"set-icon",
from_fn_async(cli_set_icon)
.no_display()
.with_about("Set the icon for the registry"),
)
}
#[derive(Debug, Default, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct RegistryInfo {
pub name: Option<String>,
pub icon: Option<DataUrl<'static>>,
#[ts(as = "BTreeMap::<String, Category>")]
pub categories: BTreeMap<InternedString, Category>,
}
pub async fn get_info(ctx: RegistryContext) -> Result<RegistryInfo, Error> {
let peek = ctx.db.peek().await.into_index();
Ok(RegistryInfo {
name: peek.as_name().de()?,
icon: peek.as_icon().de()?,
categories: peek.as_package().as_categories().de()?,
})
}
#[derive(Debug, Deserialize, Serialize, Parser, TS)]
#[command(rename_all = "kebab-case")]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct SetNameParams {
pub name: String,
}
pub async fn set_name(
ctx: RegistryContext,
SetNameParams { name }: SetNameParams,
) -> Result<(), Error> {
ctx.db
.mutate(|db| db.as_index_mut().as_name_mut().ser(&Some(name)))
.await
}
#[derive(Debug, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct SetIconParams {
pub icon: DataUrl<'static>,
}
pub async fn set_icon(
ctx: RegistryContext,
SetIconParams { icon }: SetIconParams,
) -> Result<(), Error> {
ctx.db
.mutate(|db| db.as_index_mut().as_icon_mut().ser(&Some(icon)))
.await
}
#[derive(Debug, Deserialize, Serialize, Parser, TS)]
#[command(rename_all = "kebab-case")]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct CliSetIconParams {
pub icon: PathBuf,
}
pub async fn cli_set_icon(
HandlerArgs {
context: ctx,
parent_method,
method,
params: CliSetIconParams { icon },
..
}: HandlerArgs<CliContext, CliSetIconParams>,
) -> Result<(), Error> {
let data_url = DataUrl::from_path(icon).await?;
ctx.call_remote::<RegistryContext>(
&parent_method.into_iter().chain(method).join("."),
imbl_value::json!({
"icon": data_url,
}),
)
.await?;
Ok(())
}

View File

@@ -28,6 +28,7 @@ pub mod auth;
pub mod context; pub mod context;
pub mod db; pub mod db;
pub mod device_info; pub mod device_info;
pub mod info;
pub mod os; pub mod os;
pub mod package; pub mod package;
pub mod signer; pub mod signer;
@@ -57,52 +58,42 @@ pub async fn get_full_index(ctx: RegistryContext) -> Result<FullIndex, Error> {
ctx.db.peek().await.into_index().de() ctx.db.peek().await.into_index().de()
} }
#[derive(Debug, Default, Deserialize, Serialize, TS)]
#[serde(rename_all = "camelCase")]
#[ts(export)]
pub struct RegistryInfo {
pub name: Option<String>,
pub icon: Option<DataUrl<'static>>,
#[ts(as = "BTreeMap::<String, Category>")]
pub categories: BTreeMap<InternedString, Category>,
}
pub async fn get_info(ctx: RegistryContext) -> Result<RegistryInfo, Error> {
let peek = ctx.db.peek().await.into_index();
Ok(RegistryInfo {
name: peek.as_name().de()?,
icon: peek.as_icon().de()?,
categories: peek.as_package().as_categories().de()?,
})
}
pub fn registry_api<C: Context>() -> ParentHandler<C> { pub fn registry_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new() ParentHandler::new()
.subcommand( .subcommand(
"index", "index",
from_fn_async(get_full_index) from_fn_async(get_full_index)
.with_display_serializable() .with_display_serializable()
.with_about("List info including registry name and packages")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand("info", info::info_api::<C>())
// set info and categories
.subcommand(
"os",
os::os_api::<C>().with_about("Commands related to OS assets and versions"),
)
.subcommand( .subcommand(
"info", "package",
from_fn_async(get_info) package::package_api::<C>().with_about("Commands to index, add, or get packages"),
.with_display_serializable() )
.with_call_remote::<CliContext>(), .subcommand(
"admin",
admin::admin_api::<C>().with_about("Commands to add or list admins or signers"),
)
.subcommand(
"db",
db::db_api::<C>().with_about("Commands to interact with the db i.e. dump and apply"),
) )
.subcommand("os", os::os_api::<C>())
.subcommand("package", package::package_api::<C>())
.subcommand("admin", admin::admin_api::<C>())
.subcommand("db", db::db_api::<C>())
} }
pub fn registry_router(ctx: RegistryContext) -> Router { pub fn registry_router(ctx: RegistryContext) -> Router {
use axum::extract as x; use axum::extract as x;
use axum::routing::{any, get, post}; use axum::routing::{any, get};
Router::new() Router::new()
.route("/rpc/*path", { .route("/rpc/*path", {
let ctx = ctx.clone(); let ctx = ctx.clone();
post( any(
Server::new(move || ready(Ok(ctx.clone())), registry_api()) Server::new(move || ready(Ok(ctx.clone())), registry_api())
.middleware(Cors::new()) .middleware(Cors::new())
.middleware(Auth::new()) .middleware(Auth::new())

View File

@@ -26,11 +26,26 @@ use crate::util::io::open_file;
pub fn get_api<C: Context>() -> ParentHandler<C> { pub fn get_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new() ParentHandler::new()
.subcommand("iso", from_fn_async(get_iso).no_cli()) .subcommand("iso", from_fn_async(get_iso).no_cli())
.subcommand("iso", from_fn_async(cli_get_os_asset).no_display()) .subcommand(
"iso",
from_fn_async(cli_get_os_asset)
.no_display()
.with_about("Download iso"),
)
.subcommand("img", from_fn_async(get_img).no_cli()) .subcommand("img", from_fn_async(get_img).no_cli())
.subcommand("img", from_fn_async(cli_get_os_asset).no_display()) .subcommand(
"img",
from_fn_async(cli_get_os_asset)
.no_display()
.with_about("Download img"),
)
.subcommand("squashfs", from_fn_async(get_squashfs).no_cli()) .subcommand("squashfs", from_fn_async(get_squashfs).no_cli())
.subcommand("squashfs", from_fn_async(cli_get_os_asset).no_display()) .subcommand(
"squashfs",
from_fn_async(cli_get_os_asset)
.no_display()
.with_about("Download squashfs"),
)
} }
#[derive(Debug, Deserialize, Serialize, TS)] #[derive(Debug, Deserialize, Serialize, TS)]
@@ -94,7 +109,11 @@ pub async fn get_squashfs(
pub struct CliGetOsAssetParams { pub struct CliGetOsAssetParams {
pub version: Version, pub version: Version,
pub platform: InternedString, pub platform: InternedString,
#[arg(long = "download", short = 'd')] #[arg(
long = "download",
short = 'd',
help = "The path of the directory to download to"
)]
pub download: Option<PathBuf>, pub download: Option<PathBuf>,
#[arg( #[arg(
long = "reverify", long = "reverify",
@@ -119,9 +138,15 @@ async fn cli_get_os_asset(
.. ..
}: HandlerArgs<CliContext, CliGetOsAssetParams>, }: HandlerArgs<CliContext, CliGetOsAssetParams>,
) -> Result<RegistryAsset<Blake3Commitment>, Error> { ) -> Result<RegistryAsset<Blake3Commitment>, Error> {
let ext = method
.iter()
.last()
.or_else(|| parent_method.iter().last())
.unwrap_or(&"bin");
let res = from_value::<RegistryAsset<Blake3Commitment>>( let res = from_value::<RegistryAsset<Blake3Commitment>>(
ctx.call_remote::<RegistryContext>( ctx.call_remote::<RegistryContext>(
&parent_method.into_iter().chain(method).join("."), &parent_method.iter().chain(&method).join("."),
json!({ json!({
"version": version, "version": version,
"platform": platform, "platform": platform,
@@ -133,6 +158,7 @@ async fn cli_get_os_asset(
res.validate(SIG_CONTEXT, res.all_signers())?; res.validate(SIG_CONTEXT, res.all_signers())?;
if let Some(download) = download { if let Some(download) = download {
let download = download.join(format!("startos-{version}_{platform}.{ext}"));
let mut file = AtomicFile::new(&download, None::<&Path>) let mut file = AtomicFile::new(&download, None::<&Path>)
.await .await
.with_kind(ErrorKind::Filesystem)?; .with_kind(ErrorKind::Filesystem)?;

View File

@@ -7,8 +7,21 @@ pub mod sign;
pub fn asset_api<C: Context>() -> ParentHandler<C> { pub fn asset_api<C: Context>() -> ParentHandler<C> {
ParentHandler::new() ParentHandler::new()
.subcommand("add", add::add_api::<C>()) .subcommand("add", add::add_api::<C>())
.subcommand("add", from_fn_async(add::cli_add_asset).no_display()) .subcommand(
"add",
from_fn_async(add::cli_add_asset)
.no_display()
.with_about("Add asset to registry"),
)
.subcommand("sign", sign::sign_api::<C>()) .subcommand("sign", sign::sign_api::<C>())
.subcommand("sign", from_fn_async(sign::cli_sign_asset).no_display()) .subcommand(
.subcommand("get", get::get_api::<C>()) "sign",
from_fn_async(sign::cli_sign_asset)
.no_display()
.with_about("Sign file and add to registry index"),
)
.subcommand(
"get",
get::get_api::<C>().with_about("Commands to download image, iso, or squashfs files"),
)
} }

View File

@@ -15,8 +15,16 @@ pub fn os_api<C: Context>() -> ParentHandler<C> {
"index", "index",
from_fn_async(index::get_os_index) from_fn_async(index::get_os_index)
.with_display_serializable() .with_display_serializable()
.with_about("List index of OS versions")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand("asset", asset::asset_api::<C>()) .subcommand(
.subcommand("version", version::version_api::<C>()) "asset",
asset::asset_api::<C>().with_about("Commands to add, sign, or get registry assets"),
)
.subcommand(
"version",
version::version_api::<C>()
.with_about("Commands to add, remove, or list versions or version signers"),
)
} }

View File

@@ -26,6 +26,7 @@ pub fn version_api<C: Context>() -> ParentHandler<C> {
.with_metadata("admin", Value::Bool(true)) .with_metadata("admin", Value::Bool(true))
.with_metadata("get_signer", Value::Bool(true)) .with_metadata("get_signer", Value::Bool(true))
.no_display() .no_display()
.with_about("Add OS version")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
@@ -33,9 +34,13 @@ pub fn version_api<C: Context>() -> ParentHandler<C> {
from_fn_async(remove_version) from_fn_async(remove_version)
.with_metadata("admin", Value::Bool(true)) .with_metadata("admin", Value::Bool(true))
.no_display() .no_display()
.with_about("Remove OS version")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand("signer", signer::signer_api::<C>()) .subcommand(
"signer",
signer::signer_api::<C>().with_about("Add, remove, and list version signers"),
)
.subcommand( .subcommand(
"get", "get",
from_fn_async(get_version) from_fn_async(get_version)
@@ -43,6 +48,7 @@ pub fn version_api<C: Context>() -> ParentHandler<C> {
.with_custom_display_fn(|handle, result| { .with_custom_display_fn(|handle, result| {
Ok(display_version_info(handle.params, result)) Ok(display_version_info(handle.params, result))
}) })
.with_about("Get OS versions and related version info")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
} }

View File

@@ -21,6 +21,7 @@ pub fn signer_api<C: Context>() -> ParentHandler<C> {
from_fn_async(add_version_signer) from_fn_async(add_version_signer)
.with_metadata("admin", Value::Bool(true)) .with_metadata("admin", Value::Bool(true))
.no_display() .no_display()
.with_about("Add version signer")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
@@ -28,6 +29,7 @@ pub fn signer_api<C: Context>() -> ParentHandler<C> {
from_fn_async(remove_version_signer) from_fn_async(remove_version_signer)
.with_metadata("admin", Value::Bool(true)) .with_metadata("admin", Value::Bool(true))
.no_display() .no_display()
.with_about("Remove version signer")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
.subcommand( .subcommand(
@@ -35,6 +37,7 @@ pub fn signer_api<C: Context>() -> ParentHandler<C> {
from_fn_async(list_version_signers) from_fn_async(list_version_signers)
.with_display_serializable() .with_display_serializable()
.with_custom_display_fn(|handle, result| Ok(display_signers(handle.params, result))) .with_custom_display_fn(|handle, result| Ok(display_signers(handle.params, result)))
.with_about("List version signers and related signer info")
.with_call_remote::<CliContext>(), .with_call_remote::<CliContext>(),
) )
} }

Some files were not shown because too many files have changed in this diff Show More