diff --git a/.github/workflows/startos-iso.yaml b/.github/workflows/startos-iso.yaml index 184c2b0c7..010c79594 100644 --- a/.github/workflows/startos-iso.yaml +++ b/.github/workflows/startos-iso.yaml @@ -71,27 +71,27 @@ jobs: sudo mount -t tmpfs tmpfs . if: ${{ github.event.inputs.runner == 'fast' }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: recursive - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 with: node-version: ${{ env.NODEJS_VERSION }} - name: Set up docker QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - - name: Set up system QEMU - run: sudo apt-get update && sudo apt-get install -y qemu-user-static + - name: Set up system dependencies + run: sudo apt-get update && sudo apt-get install -y qemu-user-static systemd-container squashfuse - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Make run: make ARCH=${{ matrix.arch }} compiled-${{ matrix.arch }}.tar - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: compiled-${{ matrix.arch }}.tar path: compiled-${{ matrix.arch }}.tar @@ -140,7 +140,11 @@ jobs: }')[matrix.platform] }} steps: - - uses: actions/checkout@v3 + - name: Free space + run: rm -rf /opt/hostedtoolcache* + if: ${{ github.event.inputs.runner != 'fast' }} + + - uses: actions/checkout@v4 with: submodules: recursive @@ -162,7 +166,7 @@ jobs: if: ${{ github.event.inputs.runner == 'fast' && (matrix.platform == 'x86_64' || matrix.platform == 'x86_64-nonfree') }} - name: Download compiled artifacts - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: compiled-${{ env.ARCH }}.tar @@ -171,8 +175,10 @@ jobs: - name: Prevent rebuild of compiled artifacts run: | + mkdir -p web/node_modules mkdir -p web/dist/raw touch core/startos/bindings + touch sdk/lib/osBindings mkdir -p container-runtime/dist PLATFORM=${{ matrix.platform }} make -t compiled-${{ env.ARCH }}.tar @@ -184,18 +190,18 @@ jobs: run: PLATFORM=${{ matrix.platform }} make img if: ${{ matrix.platform == 'raspberrypi' }} - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: ${{ matrix.platform }}.squashfs path: results/*.squashfs - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: ${{ matrix.platform }}.iso path: results/*.iso if: ${{ matrix.platform != 'raspberrypi' }} - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: ${{ matrix.platform }}.img path: results/*.img diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index c6082ac25..0a5eb38e9 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -19,11 +19,11 @@ jobs: name: Run Automated Tests runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: submodules: recursive - - uses: actions/setup-node@v3 + - uses: actions/setup-node@v4 with: node-version: ${{ env.NODEJS_VERSION }} diff --git a/.gitignore b/.gitignore index 1df3692ee..766d876e8 100644 --- a/.gitignore +++ b/.gitignore @@ -20,7 +20,6 @@ secrets.db /ENVIRONMENT.txt /GIT_HASH.txt /VERSION.txt -/eos-*.tar.gz /*.deb /target /*.squashfs diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md new file mode 100644 index 000000000..cb9385f41 --- /dev/null +++ b/DEVELOPMENT.md @@ -0,0 +1,133 @@ +# Setting up your development environment on Debian/Ubuntu + +A step-by-step guide + +> This is the only officially supported build environment. +> MacOS has limited build capabilities and Windows requires [WSL2](https://learn.microsoft.com/en-us/windows/wsl/install) + +## Installing dependencies + +Run the following commands one at a time + +```sh +sudo apt update +sudo apt install -y ca-certificates curl gpg build-essential +curl -fsSL https://download.docker.com/linux/debian/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg +echo "deb [arch=$(dpkg-architecture -q DEB_HOST_ARCH) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian bookworm stable" | sudo tee /etc/apt/sources.list.d/docker.list +sudo apt update +sudo apt install -y sed grep gawk jq gzip brotli containerd.io docker-ce docker-ce-cli docker-compose-plugin qemu-user-static binfmt-support squashfs-tools git debspawn rsync b3sum +sudo mkdir -p /etc/debspawn/ +echo "AllowUnsafePermissions=true" | sudo tee /etc/debspawn/global.toml +sudo usermod -aG docker $USER +sudo su $USER +docker run --privileged --rm tonistiigi/binfmt --install all +docker buildx create --use +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh # proceed with default installation +curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/master/install.sh | bash +source ~/.bashrc +nvm install 20 +nvm use 20 +``` + +## Cloning the repository + +```sh +git clone --recursive https://github.com/Start9Labs/start-os.git --branch next/minor +cd start-os +``` + +## Building an ISO + +```sh +PLATFORM=$(uname -m) ENVIRONMENT=dev make iso +``` + +This will build an ISO for your current architecture. If you are building to run on an architecture other than the one you are currently on, replace `$(uname -m)` with the correct platform for the device (one of `aarch64`, `aarch64-nonfree`, `x86_64`, `x86_64-nonfree`, `raspberrypi`) + +## Creating a VM + +### Install virt-manager + +```sh +sudo apt update +sudo apt install -y virt-manager +sudo usermod -aG libvirt $USER +sudo su $USER +``` + +### Launch virt-manager + +```sh +virt-manager +``` + +### Create new virtual machine + +![Select "Create a new virtual machine"](assets/create-vm/step-1.png) +![Click "Forward"](assets/create-vm/step-2.png) +![Click "Browse"](assets/create-vm/step-3.png) +![Click "+"](assets/create-vm/step-4.png) + +#### make sure to set "Target Path" to the path to your results directory in start-os + +![Create storage pool](assets/create-vm/step-5.png) +![Select storage pool](assets/create-vm/step-6.png) +![Select ISO](assets/create-vm/step-7.png) +![Select "Generic or unknown OS" and click "Forward"](assets/create-vm/step-8.png) +![Set Memory and CPUs](assets/create-vm/step-9.png) +![Create disk](assets/create-vm/step-10.png) +![Name VM](assets/create-vm/step-11.png) +![Create network](assets/create-vm/step-12.png) + +## Updating a VM + +The fastest way to update a VM to your latest code depends on what you changed: + +### UI or startd: + +```sh +PLATFORM=$(uname -m) ENVIRONMENT=dev make update-startbox REMOTE=start9@ +``` + +### Container runtime or debian dependencies: + +```sh +PLATFORM=$(uname -m) ENVIRONMENT=dev make update-deb REMOTE=start9@ +``` + +### Image recipe: + +```sh +PLATFORM=$(uname -m) ENVIRONMENT=dev make update-squashfs REMOTE=start9@ +``` + +--- + +If the device you are building for is not available via ssh, it is also possible to use `magic-wormhole` to send the relevant files. + +### Prerequisites: + +```sh +sudo apt update +sudo apt install -y magic-wormhole +``` + +As before, the fastest way to update a VM to your latest code depends on what you changed. Each of the following commands will return a command to paste into the shell of the device you would like to upgrade. + +### UI or startd: + +```sh +PLATFORM=$(uname -m) ENVIRONMENT=dev make wormhole +``` + +### Container runtime or debian dependencies: + +```sh +PLATFORM=$(uname -m) ENVIRONMENT=dev make wormhole-deb +``` + +### Image recipe: + +```sh +PLATFORM=$(uname -m) ENVIRONMENT=dev make wormhole-squashfs +``` diff --git a/Makefile b/Makefile index 7d386dcda..e63419898 100644 --- a/Makefile +++ b/Makefile @@ -6,26 +6,25 @@ BASENAME := $(shell ./basename.sh) PLATFORM := $(shell if [ -f ./PLATFORM.txt ]; then cat ./PLATFORM.txt; else echo unknown; fi) ARCH := $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then echo aarch64; else echo $(PLATFORM) | sed 's/-nonfree$$//g'; fi) IMAGE_TYPE=$(shell if [ "$(PLATFORM)" = raspberrypi ]; then echo img; else echo iso; fi) -BINS := core/target/$(ARCH)-unknown-linux-musl/release/startbox core/target/$(ARCH)-unknown-linux-musl/release/containerbox WEB_UIS := web/dist/raw/ui web/dist/raw/setup-wizard web/dist/raw/install-wizard FIRMWARE_ROMS := ./firmware/$(PLATFORM) $(shell jq --raw-output '.[] | select(.platform[] | contains("$(PLATFORM)")) | "./firmware/$(PLATFORM)/" + .id + ".rom.gz"' build/lib/firmware.json) -BUILD_SRC := $(shell git ls-files build) build/lib/depends build/lib/conflicts container-runtime/rootfs.$(ARCH).squashfs $(FIRMWARE_ROMS) +BUILD_SRC := $(shell git ls-files build) build/lib/depends build/lib/conflicts $(FIRMWARE_ROMS) DEBIAN_SRC := $(shell git ls-files debian/) IMAGE_RECIPE_SRC := $(shell git ls-files image-recipe/) STARTD_SRC := core/startos/startd.service $(BUILD_SRC) COMPAT_SRC := $(shell git ls-files system-images/compat/) UTILS_SRC := $(shell git ls-files system-images/utils/) BINFMT_SRC := $(shell git ls-files system-images/binfmt/) -CORE_SRC := $(shell git ls-files -- core ':!:core/startos/bindings/*') $(shell git ls-files --recurse-submodules patch-db) web/dist/static web/patchdb-ui-seed.json $(GIT_HASH_FILE) -WEB_SHARED_SRC := $(shell git ls-files web/projects/shared) $(shell ls -p web/ | grep -v / | sed 's/^/web\//g') web/node_modules web/config.json patch-db/client/dist web/patchdb-ui-seed.json +CORE_SRC := $(shell git ls-files core) $(shell git ls-files --recurse-submodules patch-db) $(GIT_HASH_FILE) +WEB_SHARED_SRC := $(shell git ls-files web/projects/shared) $(shell ls -p web/ | grep -v / | sed 's/^/web\//g') web/node_modules/.package-lock.json web/config.json patch-db/client/dist web/patchdb-ui-seed.json sdk/dist WEB_UI_SRC := $(shell git ls-files web/projects/ui) WEB_SETUP_WIZARD_SRC := $(shell git ls-files web/projects/setup-wizard) WEB_INSTALL_WIZARD_SRC := $(shell git ls-files web/projects/install-wizard) PATCH_DB_CLIENT_SRC := $(shell git ls-files --recurse-submodules patch-db/client) GZIP_BIN := $(shell which pigz || which gzip) TAR_BIN := $(shell which gtar || which tar) -COMPILED_TARGETS := $(BINS) system-images/compat/docker-images/$(ARCH).tar system-images/utils/docker-images/$(ARCH).tar system-images/binfmt/docker-images/$(ARCH).tar container-runtime/rootfs.$(ARCH).squashfs -ALL_TARGETS := $(STARTD_SRC) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) $(VERSION_FILE) $(COMPILED_TARGETS) $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then echo cargo-deps/aarch64-unknown-linux-musl/release/pi-beep; fi) $(shell /bin/bash -c 'if [[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]; then echo cargo-deps/$(ARCH)-unknown-linux-musl/release/tokio-console; fi') $(PLATFORM_FILE) sdk/lib/test +COMPILED_TARGETS := core/target/$(ARCH)-unknown-linux-musl/release/startbox core/target/$(ARCH)-unknown-linux-musl/release/containerbox system-images/compat/docker-images/$(ARCH).tar system-images/utils/docker-images/$(ARCH).tar system-images/binfmt/docker-images/$(ARCH).tar container-runtime/rootfs.$(ARCH).squashfs +ALL_TARGETS := $(STARTD_SRC) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) $(VERSION_FILE) $(COMPILED_TARGETS) cargo-deps/$(ARCH)-unknown-linux-musl/release/startos-backup-fs $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then echo cargo-deps/aarch64-unknown-linux-musl/release/pi-beep; fi) $(shell /bin/bash -c 'if [[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]; then echo cargo-deps/$(ARCH)-unknown-linux-musl/release/tokio-console; fi') $(PLATFORM_FILE) ifeq ($(REMOTE),) mkdir = mkdir -p $1 @@ -48,7 +47,7 @@ endif .DELETE_ON_ERROR: -.PHONY: all metadata install clean format cli uis ui reflash deb $(IMAGE_TYPE) squashfs sudo wormhole test +.PHONY: all metadata install clean format cli uis ui reflash deb $(IMAGE_TYPE) squashfs sudo wormhole wormhole-deb test test-core test-sdk test-container-runtime all: $(ALL_TARGETS) @@ -64,6 +63,7 @@ clean: rm -f system-images/**/*.tar rm -rf system-images/compat/target rm -rf core/target + rm -rf core/startos/bindings rm -rf web/.angular rm -f web/config.json rm -rf web/node_modules @@ -79,8 +79,8 @@ clean: rm -rf container-runtime/dist rm -rf container-runtime/node_modules rm -f container-runtime/*.squashfs - rm -rf sdk/dist - rm -rf sdk/node_modules + rm -rf container-runtime/tmp + (cd sdk && make clean) rm -f ENVIRONMENT.txt rm -f PLATFORM.txt rm -f GIT_HASH.txt @@ -89,10 +89,16 @@ clean: format: cd core && cargo +nightly fmt -test: $(CORE_SRC) $(ENVIRONMENT_FILE) - (cd core && cargo build && cargo test) - npm --prefix sdk exec -- prettier -w ./core/startos/bindings/*.ts - (cd sdk && make test) +test: | test-core test-sdk test-container-runtime + +test-core: $(CORE_SRC) $(ENVIRONMENT_FILE) + cd core && cargo build --features=test && cargo test --features=test + +test-sdk: $(shell git ls-files sdk) sdk/lib/osBindings + cd sdk && make test + +test-container-runtime: container-runtime/node_modules $(shell git ls-files container-runtime/src) container-runtime/package.json container-runtime/tsconfig.json + cd container-runtime && npm test cli: cd core && ./install-cli.sh @@ -102,7 +108,7 @@ deb: results/$(BASENAME).deb debian/control: build/lib/depends build/lib/conflicts ./debuild/control.sh -results/$(BASENAME).deb: dpkg-build.sh $(DEBIAN_SRC) $(VERSION_FILE) $(PLATFORM_FILE) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) +results/$(BASENAME).deb: dpkg-build.sh $(DEBIAN_SRC) $(ALL_TARGETS) PLATFORM=$(PLATFORM) ./dpkg-build.sh $(IMAGE_TYPE): results/$(BASENAME).$(IMAGE_TYPE) @@ -115,13 +121,15 @@ results/$(BASENAME).$(IMAGE_TYPE) results/$(BASENAME).squashfs: $(IMAGE_RECIPE_S # For creating os images. DO NOT USE install: $(ALL_TARGETS) $(call mkdir,$(DESTDIR)/usr/bin) + $(call mkdir,$(DESTDIR)/usr/sbin) $(call cp,core/target/$(ARCH)-unknown-linux-musl/release/startbox,$(DESTDIR)/usr/bin/startbox) $(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/startd) $(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/start-cli) $(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/start-sdk) - $(call ln,/usr/bin/startbox,$(DESTDIR)/usr/bin/embassy-cli) if [ "$(PLATFORM)" = "raspberrypi" ]; then $(call cp,cargo-deps/aarch64-unknown-linux-musl/release/pi-beep,$(DESTDIR)/usr/bin/pi-beep); fi if /bin/bash -c '[[ "${ENVIRONMENT}" =~ (^|-)unstable($$|-) ]]'; then $(call cp,cargo-deps/$(ARCH)-unknown-linux-musl/release/tokio-console,$(DESTDIR)/usr/bin/tokio-console); fi + $(call cp,cargo-deps/$(ARCH)-unknown-linux-musl/release/startos-backup-fs,$(DESTDIR)/usr/bin/startos-backup-fs) + $(call ln,/usr/bin/startos-backup-fs,$(DESTDIR)/usr/sbin/mount.backup-fs) $(call mkdir,$(DESTDIR)/lib/systemd/system) $(call cp,core/startos/startd.service,$(DESTDIR)/lib/systemd/system/startd.service) @@ -140,7 +148,6 @@ install: $(ALL_TARGETS) $(call mkdir,$(DESTDIR)/usr/lib/startos/system-images) $(call cp,system-images/compat/docker-images/$(ARCH).tar,$(DESTDIR)/usr/lib/startos/system-images/compat.tar) $(call cp,system-images/utils/docker-images/$(ARCH).tar,$(DESTDIR)/usr/lib/startos/system-images/utils.tar) - $(call cp,system-images/binfmt/docker-images/$(ARCH).tar,$(DESTDIR)/usr/lib/startos/system-images/binfmt.tar) $(call cp,firmware/$(PLATFORM),$(DESTDIR)/usr/lib/startos/firmware) @@ -154,38 +161,80 @@ update-overlay: $(ALL_TARGETS) $(call ssh,"sudo systemctl start startd") wormhole: core/target/$(ARCH)-unknown-linux-musl/release/startbox - @echo "Paste the following command into the shell of your start-os server:" + @echo "Paste the following command into the shell of your StartOS server:" + @echo @wormhole send core/target/$(ARCH)-unknown-linux-musl/release/startbox 2>&1 | awk -Winteractive '/wormhole receive/ { printf "sudo /usr/lib/startos/scripts/chroot-and-upgrade \"cd /usr/bin && rm startbox && wormhole receive --accept-file %s && chmod +x startbox\"\n", $$3 }' +wormhole-deb: results/$(BASENAME).deb + @echo "Paste the following command into the shell of your StartOS server:" + @echo + @wormhole send results/$(BASENAME).deb 2>&1 | awk -Winteractive '/wormhole receive/ { printf "sudo /usr/lib/startos/scripts/chroot-and-upgrade '"'"'cd $$(mktemp -d) && wormhole receive --accept-file %s && apt-get install -y --reinstall ./$(BASENAME).deb'"'"'\n", $$3 }' + +wormhole-squashfs: results/$(BASENAME).squashfs + $(eval SQFS_SUM := $(shell b3sum results/$(BASENAME).squashfs | head -c 32)) + $(eval SQFS_SIZE := $(shell du -s --bytes results/$(BASENAME).squashfs | awk '{print $$1}')) + @echo "Paste the following command into the shell of your StartOS server:" + @echo + @wormhole send results/$(BASENAME).squashfs 2>&1 | awk -Winteractive '/wormhole receive/ { printf "sudo sh -c '"'"'/usr/lib/startos/scripts/prune-images $(SQFS_SIZE) && cd /media/startos/images && wormhole receive --accept-file %s && mv $(BASENAME).squashfs $(SQFS_SUM).rootfs && ln -rsf ./$(SQFS_SUM).rootfs ../config/current.rootfs && sync && reboot'"'"'\n", $$3 }' + update: $(ALL_TARGETS) @if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi - $(call ssh,"sudo rsync -a --delete --force --info=progress2 /media/embassy/embassyfs/current/ /media/embassy/next/") - $(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) DESTDIR=/media/embassy/next PLATFORM=$(PLATFORM) - $(call ssh,'sudo NO_SYNC=1 /media/embassy/next/usr/lib/startos/scripts/chroot-and-upgrade "apt-get install -y $(shell cat ./build/lib/depends)"') + $(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create') + $(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) DESTDIR=/media/startos/next PLATFORM=$(PLATFORM) + $(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync "apt-get install -y $(shell cat ./build/lib/depends)"') + +update-startbox: core/target/$(ARCH)-unknown-linux-musl/release/startbox # only update binary (faster than full update) + @if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi + $(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create') + $(call cp,core/target/$(ARCH)-unknown-linux-musl/release/startbox,/media/startos/next/usr/bin/startbox) + $(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync true') + +update-deb: results/$(BASENAME).deb # better than update, but only available from debian + @if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi + $(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create') + $(call mkdir,/media/startos/next/tmp/startos-deb) + $(call cp,results/$(BASENAME).deb,/media/startos/next/tmp/startos-deb/$(BASENAME).deb) + $(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync "apt-get install -y --reinstall /tmp/startos-deb/$(BASENAME).deb"') + +update-squashfs: results/$(BASENAME).squashfs + @if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi + $(eval SQFS_SUM := $(shell b3sum results/$(BASENAME).squashfs)) + $(eval SQFS_SIZE := $(shell du -s --bytes results/$(BASENAME).squashfs | awk '{print $$1}')) + $(call ssh,'/usr/lib/startos/scripts/prune-images $(SQFS_SIZE)') + $(call cp,results/$(BASENAME).squashfs,/media/startos/images/$(SQFS_SUM).rootfs) + $(call ssh,'sudo ln -rsf /media/startos/images/$(SQFS_SUM).rootfs /media/startos/config/current.rootfs') + $(call ssh,'sudo reboot') emulate-reflash: $(ALL_TARGETS) @if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi - $(call ssh,"sudo rsync -a --delete --force --info=progress2 /media/embassy/embassyfs/current/ /media/embassy/next/") - $(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) DESTDIR=/media/embassy/next PLATFORM=$(PLATFORM) - $(call ssh,"sudo touch /media/embassy/config/upgrade && sudo rm -f /media/embassy/config/disk.guid && sudo sync && sudo reboot") + $(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create') + $(MAKE) install REMOTE=$(REMOTE) SSHPASS=$(SSHPASS) DESTDIR=/media/startos/next PLATFORM=$(PLATFORM) + $(call ssh,'sudo rm -f /media/startos/config/disk.guid') + $(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync "apt-get install -y $(shell cat ./build/lib/depends)"') upload-ota: results/$(BASENAME).squashfs TARGET=$(TARGET) KEY=$(KEY) ./upload-ota.sh -container-runtime/alpine.$(ARCH).squashfs: +container-runtime/debian.$(ARCH).squashfs: ARCH=$(ARCH) ./container-runtime/download-base-image.sh container-runtime/node_modules: container-runtime/package.json container-runtime/package-lock.json sdk/dist npm --prefix container-runtime ci touch container-runtime/node_modules -core/startos/bindings: $(shell git ls-files -- core ':!:core/startos/bindings/*') $(ENVIRONMENT_FILE) - rm -rf core/startos/bindings - (cd core/ && cargo test --features=test) +sdk/lib/osBindings: core/startos/bindings + mkdir -p sdk/lib/osBindings ls core/startos/bindings/*.ts | sed 's/core\/startos\/bindings\/\([^.]*\)\.ts/export { \1 } from ".\/\1";/g' > core/startos/bindings/index.ts - npm --prefix sdk exec -- prettier -w ./core/startos/bindings/*.ts + npm --prefix sdk exec -- prettier --config ./sdk/package.json -w ./core/startos/bindings/*.ts + rsync -ac --delete core/startos/bindings/ sdk/lib/osBindings/ + touch sdk/lib/osBindings -sdk/dist: $(shell git ls-files sdk) core/startos/bindings +core/startos/bindings: $(shell git ls-files core) $(ENVIRONMENT_FILE) + rm -rf core/startos/bindings + (cd core/ && cargo test --features=test 'export_bindings_') + touch core/startos/bindings + +sdk/dist: $(shell git ls-files sdk) sdk/lib/osBindings (cd sdk && make bundle) # TODO: make container-runtime its own makefile? @@ -196,7 +245,7 @@ container-runtime/dist/node_modules container-runtime/dist/package.json containe ./container-runtime/install-dist-deps.sh touch container-runtime/dist/node_modules -container-runtime/rootfs.$(ARCH).squashfs: container-runtime/alpine.$(ARCH).squashfs container-runtime/containerRuntime.rc container-runtime/update-image.sh container-runtime/dist/index.js container-runtime/dist/node_modules core/target/$(ARCH)-unknown-linux-musl/release/containerbox | sudo +container-runtime/rootfs.$(ARCH).squashfs: container-runtime/debian.$(ARCH).squashfs container-runtime/container-runtime.service container-runtime/update-image.sh container-runtime/deb-install.sh container-runtime/dist/index.js container-runtime/dist/node_modules core/target/$(ARCH)-unknown-linux-musl/release/containerbox | sudo ARCH=$(ARCH) ./container-runtime/update-image.sh build/lib/depends build/lib/conflicts: build/dpkg-deps/* @@ -214,22 +263,33 @@ system-images/utils/docker-images/$(ARCH).tar: $(UTILS_SRC) system-images/binfmt/docker-images/$(ARCH).tar: $(BINFMT_SRC) cd system-images/binfmt && make docker-images/$(ARCH).tar && touch docker-images/$(ARCH).tar -$(BINS): $(CORE_SRC) $(ENVIRONMENT_FILE) - cd core && ARCH=$(ARCH) ./build-prod.sh - touch $(BINS) +core/target/$(ARCH)-unknown-linux-musl/release/startbox: $(CORE_SRC) web/dist/static web/patchdb-ui-seed.json $(ENVIRONMENT_FILE) + ARCH=$(ARCH) ./core/build-startbox.sh + touch core/target/$(ARCH)-unknown-linux-musl/release/startbox -web/node_modules: web/package.json sdk/dist - (cd sdk && make bundle) +core/target/$(ARCH)-unknown-linux-musl/release/containerbox: $(CORE_SRC) $(ENVIRONMENT_FILE) + ARCH=$(ARCH) ./core/build-containerbox.sh + touch core/target/$(ARCH)-unknown-linux-musl/release/containerbox + +web/node_modules/.package-lock.json: web/package.json sdk/dist npm --prefix web ci + touch web/node_modules/.package-lock.json -web/dist/raw/ui: $(WEB_UI_SRC) $(WEB_SHARED_SRC) +web/.angular: patch-db/client/dist sdk/dist web/node_modules/.package-lock.json + rm -rf web/.angular + mkdir -p web/.angular + +web/dist/raw/ui: $(WEB_UI_SRC) $(WEB_SHARED_SRC) web/.angular npm --prefix web run build:ui + touch web/dist/raw/ui -web/dist/raw/setup-wizard: $(WEB_SETUP_WIZARD_SRC) $(WEB_SHARED_SRC) +web/dist/raw/setup-wizard: $(WEB_SETUP_WIZARD_SRC) $(WEB_SHARED_SRC) web/.angular npm --prefix web run build:setup + touch web/dist/raw/setup-wizard -web/dist/raw/install-wizard: $(WEB_INSTALL_WIZARD_SRC) $(WEB_SHARED_SRC) +web/dist/raw/install-wizard: $(WEB_INSTALL_WIZARD_SRC) $(WEB_SHARED_SRC) web/.angular npm --prefix web run build:install + touch web/dist/raw/install-wizard web/dist/static: $(WEB_UIS) $(ENVIRONMENT_FILE) ./compress-uis.sh @@ -243,10 +303,11 @@ web/patchdb-ui-seed.json: web/package.json patch-db/client/node_modules: patch-db/client/package.json npm --prefix patch-db/client ci + touch patch-db/client/node_modules patch-db/client/dist: $(PATCH_DB_CLIENT_SRC) patch-db/client/node_modules - ! test -d patch-db/client/dist || rm -rf patch-db/client/dist - npm --prefix web run build:deps + rm -rf patch-db/client/dist + npm --prefix patch-db/client run build # used by github actions compiled-$(ARCH).tar: $(COMPILED_TARGETS) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) $(VERSION_FILE) @@ -261,5 +322,8 @@ ui: web/dist/raw/ui cargo-deps/aarch64-unknown-linux-musl/release/pi-beep: ARCH=aarch64 ./build-cargo-dep.sh pi-beep -cargo-deps/$(ARCH)-unknown-linux-gnu/release/tokio-console: | sudo - ARCH=$(ARCH) ./build-cargo-dep.sh tokio-console +cargo-deps/$(ARCH)-unknown-linux-musl/release/tokio-console: + ARCH=$(ARCH) PREINSTALL="apk add musl-dev pkgconfig" ./build-cargo-dep.sh tokio-console + +cargo-deps/$(ARCH)-unknown-linux-musl/release/startos-backup-fs: + ARCH=$(ARCH) PREINSTALL="apk add fuse3 fuse3-dev fuse3-static musl-dev pkgconfig" ./build-cargo-dep.sh --git https://github.com/Start9Labs/start-fs.git startos-backup-fs diff --git a/assets/create-vm/step-1.png b/assets/create-vm/step-1.png new file mode 100644 index 000000000..2dfafc25f Binary files /dev/null and b/assets/create-vm/step-1.png differ diff --git a/assets/create-vm/step-10.png b/assets/create-vm/step-10.png new file mode 100644 index 000000000..bc1985394 Binary files /dev/null and b/assets/create-vm/step-10.png differ diff --git a/assets/create-vm/step-11.png b/assets/create-vm/step-11.png new file mode 100644 index 000000000..322dd5394 Binary files /dev/null and b/assets/create-vm/step-11.png differ diff --git a/assets/create-vm/step-12.png b/assets/create-vm/step-12.png new file mode 100644 index 000000000..52f14f56e Binary files /dev/null and b/assets/create-vm/step-12.png differ diff --git a/assets/create-vm/step-2.png b/assets/create-vm/step-2.png new file mode 100644 index 000000000..020f3d7d1 Binary files /dev/null and b/assets/create-vm/step-2.png differ diff --git a/assets/create-vm/step-3.png b/assets/create-vm/step-3.png new file mode 100644 index 000000000..0295ba6bc Binary files /dev/null and b/assets/create-vm/step-3.png differ diff --git a/assets/create-vm/step-4.png b/assets/create-vm/step-4.png new file mode 100644 index 000000000..85832d260 Binary files /dev/null and b/assets/create-vm/step-4.png differ diff --git a/assets/create-vm/step-5.png b/assets/create-vm/step-5.png new file mode 100644 index 000000000..d34cb16a7 Binary files /dev/null and b/assets/create-vm/step-5.png differ diff --git a/assets/create-vm/step-6.png b/assets/create-vm/step-6.png new file mode 100644 index 000000000..92f2c2f1f Binary files /dev/null and b/assets/create-vm/step-6.png differ diff --git a/assets/create-vm/step-7.png b/assets/create-vm/step-7.png new file mode 100644 index 000000000..ad8eb9b81 Binary files /dev/null and b/assets/create-vm/step-7.png differ diff --git a/assets/create-vm/step-8.png b/assets/create-vm/step-8.png new file mode 100644 index 000000000..ce5443c76 Binary files /dev/null and b/assets/create-vm/step-8.png differ diff --git a/assets/create-vm/step-9.png b/assets/create-vm/step-9.png new file mode 100644 index 000000000..042735490 Binary files /dev/null and b/assets/create-vm/step-9.png differ diff --git a/build-cargo-dep.sh b/build-cargo-dep.sh index 9e20f0caf..c32e4f8ae 100755 --- a/build-cargo-dep.sh +++ b/build-cargo-dep.sh @@ -17,9 +17,18 @@ if [ -z "$ARCH" ]; then ARCH=$(uname -m) fi -mkdir -p cargo-deps -alias 'rust-musl-builder'='docker run $USE_TTY --rm -e "RUSTFLAGS=$RUSTFLAGS" -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$(pwd)"/cargo-deps:/home/rust/src -w /home/rust/src -P messense/rust-musl-cross:$ARCH-musl' +DOCKER_PLATFORM="linux/${ARCH}" +if [ "$ARCH" = aarch64 ]; then + DOCKER_PLATFORM="linux/arm64" +elif [ "$ARCH" = x86_64 ]; then + DOCKER_PLATFORM="linux/amd64" +fi -rust-musl-builder cargo install "$1" --target-dir /home/rust/src --target=$ARCH-unknown-linux-musl +mkdir -p cargo-deps +alias 'rust-musl-builder'='docker run $USE_TTY --platform=${DOCKER_PLATFORM} --rm -e "RUSTFLAGS=$RUSTFLAGS" -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$(pwd)"/cargo-deps:/home/rust/src -w /home/rust/src -P rust:alpine' + +PREINSTALL=${PREINSTALL:-true} + +rust-musl-builder sh -c "$PREINSTALL && cargo install $* --target-dir /home/rust/src --target=$ARCH-unknown-linux-musl" sudo chown -R $USER cargo-deps sudo chown -R $USER ~/.cargo \ No newline at end of file diff --git a/build/README.md b/build/README.md deleted file mode 100644 index 3bab01866..000000000 --- a/build/README.md +++ /dev/null @@ -1,107 +0,0 @@ -# Building StartOS - -⚠️ The commands given assume a Debian or Ubuntu-based environment. _Building in -a VM is NOT yet supported_ ⚠️ - -## Prerequisites - -1. Install dependencies - -- Avahi - - `sudo apt install -y avahi-daemon` - - Installed by default on most Debian systems - https://avahi.org -- Build Essentials (needed to run `make`) - - `sudo apt install -y build-essential` -- Docker - - `curl -fsSL https://get.docker.com | sh` - - https://docs.docker.com/get-docker - - Add your user to the docker group: `sudo usermod -a -G docker $USER` - - Reload user environment `exec sudo su -l $USER` -- Prepare Docker environment - - Setup buildx (https://docs.docker.com/buildx/working-with-buildx/) - - Create a builder: `docker buildx create --use` - - Add multi-arch build ability: - `docker run --rm --privileged linuxkit/binfmt:v0.8` -- Node Version 12+ - - snap: `sudo snap install node` - - [nvm](https://github.com/nvm-sh/nvm#installing-and-updating): - `nvm install --lts` - - https://nodejs.org/en/docs -- NPM Version 7+ - - apt: `sudo apt install -y npm` - - [nvm](https://github.com/nvm-sh/nvm#installing-and-updating): - `nvm install --lts` - - https://docs.npmjs.com/downloading-and-installing-node-js-and-npm -- jq - - `sudo apt install -y jq` - - https://stedolan.github.io/jq -- yq - - snap: `sudo snap install yq` - - binaries: https://github.com/mikefarah/yq/releases/ - - https://mikefarah.gitbook.io/yq - -2. Clone the latest repo with required submodules - > :information_source: You chan check latest available version - > [here](https://github.com/Start9Labs/start-os/releases) - ``` - git clone --recursive https://github.com/Start9Labs/start-os.git --branch latest - ``` - -## Build Raspberry Pi Image - -``` -cd start-os -make embassyos-raspi.img ARCH=aarch64 -``` - -## Flash - -Flash the resulting `embassyos-raspi.img` to your SD Card - -We recommend [Balena Etcher](https://www.balena.io/etcher/) - -## Setup - -Visit http://start.local from any web browser - We recommend -[Firefox](https://www.mozilla.org/firefox/browsers) - -Enter your product key. This is generated during the build process and can be -found in `product_key.txt`, located in the root directory. - -## Troubleshooting - -1. I just flashed my SD card, fired up StartOS, bootup sounds and all, but my - browser is saying "Unable to connect" with start.local. - -- Try doing a hard refresh on your browser, or opening the url in a - private/incognito window. If you've ran an instance of StartOS before, - sometimes you can have a stale cache that will block you from navigating to - the page. - -2. Flashing the image isn't working with balenaEtcher. I'm getting - `Cannot read property 'message' of null` when I try. - -- The latest versions of Balena may not flash properly. This version here: - https://github.com/balena-io/etcher/releases/tag/v1.5.122 should work - properly. - -3. Startup isn't working properly and I'm curious as to why. How can I view logs - regarding startup for debugging? - -- Find the IP of your device -- Run `nc 8080` and it will print the logs - -4. I need to ssh into my server to fix something, but I cannot get to the - console to add ssh keys normally. - -- During the Build step, instead of running just - `make embassyos-raspi.img ARCH=aarch64` run - `ENVIRONMENT=dev make embassyos-raspi.img ARCH=aarch64`. Flash like normal, - and insert into your server. Boot up StartOS, then on another computer on - the same network, ssh into the the server with the username `start9` password - `embassy`. - -4. I need to reset my password, how can I do that? - -- You will need to reflash your device. Select "Use Existing Drive" once you are - in setup, and it will prompt you to set a new password. diff --git a/build/RELEASE.md b/build/RELEASE.md deleted file mode 100644 index 8d640d454..000000000 --- a/build/RELEASE.md +++ /dev/null @@ -1,76 +0,0 @@ -# Release Process - -## `embassyos_0.3.x-1_amd64.deb` - -- Description: debian package for x86_64 - intended to be installed on pureos -- Destination: GitHub Release Tag -- Requires: N/A -- Build steps: - - Clone `https://github.com/Start9Labs/embassy-os-deb` at `master` - - Run `make TAG=master` from that folder -- Artifact: `./embassyos_0.3.x-1_amd64.deb` - -## `eos---_amd64.iso` - -- Description: live usb image for x86_64 -- Destination: GitHub Release Tag -- Requires: `embassyos_0.3.x-1_amd64.deb` -- Build steps: - - Clone `https://github.com/Start9Labs/eos-image-recipes` at `master` - - Copy `embassyos_0.3.x-1_amd64.deb` to - `overlays/vendor/root/embassyos_0.3.x-1_amd64.deb` - - Run `./run-local-build.sh byzantium` from that folder -- Artifact: `./results/eos---_amd64.iso` - -## `eos.x86_64.squashfs` - -- Description: compressed embassyOS x86_64 filesystem image -- Destination: GitHub Release Tag, Registry @ - `resources/eos//eos.x86_64.squashfs` -- Requires: `eos---_amd64.iso` -- Build steps: - - From `https://github.com/Start9Labs/eos-image-recipes` at `master` - - `./extract-squashfs.sh results/eos---_amd64.iso` (run on Linux) -- Artifact: `./results/eos.x86_64.squashfs` - -## `eos.raspberrypi.squashfs` - -- Description: compressed embassyOS raspberrypi filesystem image -- Destination: GitHub Release Tag, Registry @ - `resources/eos//eos.raspberrypi.squashfs` -- Requires: N/A -- Build steps: - - Clone `https://github.com/Start9Labs/embassy-os` at `master` - - `make embassyos-raspi.img` - - flash `embassyos-raspi.img` to raspberry pi - - boot raspberry pi with ethernet - - wait for chime - - you can watch logs using `nc 8080` - - unplug raspberry pi, put sd card back in build machine - - `./build/raspberry-pi/rip-image.sh` -- Artifact: `./eos.raspberrypi.squashfs` - -## `lite-upgrade.img` - -- Description: update image for users coming from 0.3.2.1 and before -- Destination: Registry @ `resources/eos//eos.img` -- Requires: `eos.raspberrypi.squashfs` -- Build steps: - - From `https://github.com/Start9Labs/embassy-os` at `master` - - `make lite-upgrade.img` -- Artifact `./lite-upgrade.img` - -## `eos---_raspberrypi.tar.gz` - -- Description: pre-initialized raspberrypi image -- Destination: GitHub Release Tag (as tar.gz) -- Requires: `eos.raspberrypi.squashfs` -- Build steps: - - From `https://github.com/Start9Labs/embassy-os` at `master` - - `make eos_raspberrypi.img` - - `tar --format=posix -cS -f- eos---_raspberrypi.img | gzip > eos---_raspberrypi.tar.gz` -- Artifact `./eos---_raspberrypi.tar.gz` - -## `embassy-sdk` - -- Build and deploy to all registries \ No newline at end of file diff --git a/build/dpkg-deps/depends b/build/dpkg-deps/depends index 5209b5421..3ccaee4d6 100644 --- a/build/dpkg-deps/depends +++ b/build/dpkg-deps/depends @@ -1,5 +1,6 @@ avahi-daemon avahi-utils +b3sum bash-completion beep bmon @@ -14,10 +15,12 @@ e2fsprogs ecryptfs-utils exfatprogs flashrom +fuse3 grub-common htop httpdirfs iotop +iptables iw jq libyajl2 diff --git a/build/lib/scripts/add-apt-sources b/build/lib/scripts/add-apt-sources index 638d8dad6..9d4f54a28 100755 --- a/build/lib/scripts/add-apt-sources +++ b/build/lib/scripts/add-apt-sources @@ -4,6 +4,3 @@ set -e curl -fsSL https://deb.torproject.org/torproject.org/A3C4F0F979CAA22CDBA8F512EE8CBC9E886DDD89.asc | gpg --dearmor -o- > /usr/share/keyrings/tor-archive-keyring.gpg echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/tor-archive-keyring.gpg] https://deb.torproject.org/torproject.org bullseye main" > /etc/apt/sources.list.d/tor.list - -curl -fsSL https://download.docker.com/linux/debian/gpg | gpg --dearmor -o- > /usr/share/keyrings/docker-archive-keyring.gpg -echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian bullseye stable" > /etc/apt/sources.list.d/docker.list diff --git a/build/lib/scripts/chroot-and-upgrade b/build/lib/scripts/chroot-and-upgrade index f95e49924..3af93f5ef 100755 --- a/build/lib/scripts/chroot-and-upgrade +++ b/build/lib/scripts/chroot-and-upgrade @@ -1,46 +1,99 @@ #!/bin/bash +SOURCE_DIR="$(dirname "${BASH_SOURCE[0]}")" + if [ "$UID" -ne 0 ]; then >&2 echo 'Must be run as root' exit 1 fi +POSITIONAL_ARGS=() + +while [[ $# -gt 0 ]]; do + case $1 in + --no-sync) + NO_SYNC=1 + shift + ;; + --create) + ONLY_CREATE=1 + shift + ;; + -*|--*) + echo "Unknown option $1" + exit 1 + ;; + *) + POSITIONAL_ARGS+=("$1") # save positional arg + shift # past argument + ;; + esac +done + +set -- "${POSITIONAL_ARGS[@]}" # restore positional parameters + if [ -z "$NO_SYNC" ]; then echo 'Syncing...' - rsync -a --delete --force --info=progress2 /media/embassy/embassyfs/current/ /media/embassy/next + umount -R /media/startos/next 2> /dev/null + umount -R /media/startos/upper 2> /dev/null + rm -rf /media/startos/upper /media/startos/next + mkdir /media/startos/upper + mount -t tmpfs tmpfs /media/startos/upper + mkdir -p /media/startos/upper/data /media/startos/upper/work /media/startos/next + mount -t overlay \ + -olowerdir=/media/startos/current,upperdir=/media/startos/upper/data,workdir=/media/startos/upper/work \ + overlay /media/startos/next fi -mkdir -p /media/embassy/next/run -mkdir -p /media/embassy/next/dev -mkdir -p /media/embassy/next/sys -mkdir -p /media/embassy/next/proc -mkdir -p /media/embassy/next/boot -mount --bind /run /media/embassy/next/run -mount --bind /dev /media/embassy/next/dev -mount --bind /sys /media/embassy/next/sys -mount --bind /proc /media/embassy/next/proc -mount --bind /boot /media/embassy/next/boot +if [ -n "$ONLY_CREATE" ]; then + exit 0 +fi + +mkdir -p /media/startos/next/run +mkdir -p /media/startos/next/dev +mkdir -p /media/startos/next/sys +mkdir -p /media/startos/next/proc +mkdir -p /media/startos/next/boot +mount --bind /run /media/startos/next/run +mount --bind /tmp /media/startos/next/tmp +mount --bind /dev /media/startos/next/dev +mount --bind /sys /media/startos/next/sys +mount --bind /proc /media/startos/next/proc +mount --bind /boot /media/startos/next/boot if [ -z "$*" ]; then - chroot /media/embassy/next + chroot /media/startos/next CHROOT_RES=$? else - chroot /media/embassy/next "$SHELL" -c "$*" + chroot /media/startos/next "$SHELL" -c "$*" CHROOT_RES=$? fi -umount /media/embassy/next/run -umount /media/embassy/next/dev -umount /media/embassy/next/sys -umount /media/embassy/next/proc -umount /media/embassy/next/boot +umount /media/startos/next/run +umount /media/startos/next/tmp +umount /media/startos/next/dev +umount /media/startos/next/sys +umount /media/startos/next/proc +umount /media/startos/next/boot if [ "$CHROOT_RES" -eq 0 ]; then + + if [ -h /media/startos/config/current.rootfs ] && [ -e /media/startos/config/current.rootfs ]; then + ${SOURCE_DIR}/prune-images $(du -s --bytes /media/startos/next | awk '{print $1}') + fi + echo 'Upgrading...' - touch /media/embassy/config/upgrade + time mksquashfs /media/startos/next /media/startos/images/next.squashfs -b 4096 -comp gzip + hash=$(b3sum /media/startos/images/next.squashfs | head -c 32) + mv /media/startos/images/next.squashfs /media/startos/images/${hash}.rootfs + ln -rsf /media/startos/images/${hash}.rootfs /media/startos/config/current.rootfs sync reboot -fi \ No newline at end of file +fi + +umount -R /media/startos/next +umount -R /media/startos/upper +rm -rf /media/startos/upper /media/startos/next \ No newline at end of file diff --git a/build/lib/scripts/embassy-initramfs-module b/build/lib/scripts/embassy-initramfs-module deleted file mode 100755 index 2a2f08a07..000000000 --- a/build/lib/scripts/embassy-initramfs-module +++ /dev/null @@ -1,98 +0,0 @@ -# Local filesystem mounting -*- shell-script -*- - -# -# This script overrides local_mount_root() in /scripts/local -# and mounts root as a read-only filesystem with a temporary (rw) -# overlay filesystem. -# - -. /scripts/local - -local_mount_root() -{ - echo 'using embassy initramfs module' - - local_top - local_device_setup "${ROOT}" "root file system" - ROOT="${DEV}" - - # Get the root filesystem type if not set - if [ -z "${ROOTFSTYPE}" ]; then - FSTYPE=$(get_fstype "${ROOT}") - else - FSTYPE=${ROOTFSTYPE} - fi - - local_premount - - # CHANGES TO THE ORIGINAL FUNCTION BEGIN HERE - # N.B. this code still lacks error checking - - modprobe ${FSTYPE} - checkfs ${ROOT} root "${FSTYPE}" - - ROOTFLAGS="$(echo "${ROOTFLAGS}" | sed 's/subvol=\(next\|current\)//' | sed 's/^-o *$//')" - - if [ "${FSTYPE}" != "unknown" ]; then - mount -t ${FSTYPE} ${ROOTFLAGS} ${ROOT} ${rootmnt} - else - mount ${ROOTFLAGS} ${ROOT} ${rootmnt} - fi - - echo 'mounting embassyfs' - - mkdir /embassyfs - - mount --move ${rootmnt} /embassyfs - - if ! [ -d /embassyfs/current ] && [ -d /embassyfs/prev ]; then - mv /embassyfs/prev /embassyfs/current - fi - - if ! [ -d /embassyfs/current ]; then - mkdir /embassyfs/current - for FILE in $(ls /embassyfs); do - if [ "$FILE" != current ]; then - mv /embassyfs/$FILE /embassyfs/current/ - fi - done - fi - - mkdir -p /embassyfs/config - - if [ -f /embassyfs/config/upgrade ] && [ -d /embassyfs/next ]; then - mv /embassyfs/current /embassyfs/prev - mv /embassyfs/next /embassyfs/current - rm /embassyfs/config/upgrade - fi - - if ! [ -d /embassyfs/next ]; then - if [ -d /embassyfs/prev ]; then - mv /embassyfs/prev /embassyfs/next - else - mkdir /embassyfs/next - fi - fi - - mkdir /lower /upper - - mount -r --bind /embassyfs/current /lower - - modprobe overlay || insmod "/lower/lib/modules/$(uname -r)/kernel/fs/overlayfs/overlay.ko" - - # Mount a tmpfs for the overlay in /upper - mount -t tmpfs tmpfs /upper - mkdir /upper/data /upper/work - - # Mount the final overlay-root in $rootmnt - mount -t overlay \ - -olowerdir=/lower,upperdir=/upper/data,workdir=/upper/work \ - overlay ${rootmnt} - - mkdir -p ${rootmnt}/media/embassy/config - mount --bind /embassyfs/config ${rootmnt}/media/embassy/config - mkdir -p ${rootmnt}/media/embassy/next - mount --bind /embassyfs/next ${rootmnt}/media/embassy/next - mkdir -p ${rootmnt}/media/embassy/embassyfs - mount -r --bind /embassyfs ${rootmnt}/media/embassy/embassyfs -} \ No newline at end of file diff --git a/build/lib/scripts/enable-kiosk b/build/lib/scripts/enable-kiosk index ad7cd4bf3..45bed5fe9 100755 --- a/build/lib/scripts/enable-kiosk +++ b/build/lib/scripts/enable-kiosk @@ -14,14 +14,8 @@ if ! id kiosk; then useradd -s /bin/bash --create-home kiosk fi -# create kiosk script -cat > /home/kiosk/kiosk.sh << 'EOF' -#!/bin/sh -PROFILE=$(mktemp -d) -if [ -f /usr/local/share/ca-certificates/startos-root-ca.crt ]; then - certutil -A -n "StartOS Local Root CA" -t "TCu,Cuw,Tuw" -i /usr/local/share/ca-certificates/startos-root-ca.crt -d $PROFILE -fi -cat >> $PROFILE/prefs.js << EOT +mkdir /home/kiosk/fx-profile +cat >> /home/kiosk/fx-profile/prefs.js << EOF user_pref("app.normandy.api_url", ""); user_pref("app.normandy.enabled", false); user_pref("app.shield.optoutstudies.enabled", false); @@ -87,7 +81,11 @@ user_pref("toolkit.telemetry.shutdownPingSender.enabled", false); user_pref("toolkit.telemetry.unified", false); user_pref("toolkit.telemetry.updatePing.enabled", false); user_pref("toolkit.telemetry.cachedClientID", ""); -EOT +EOF + +# create kiosk script +cat > /home/kiosk/kiosk.sh << 'EOF' +#!/bin/sh while ! curl "http://localhost" > /dev/null; do sleep 1 done @@ -101,8 +99,7 @@ done killall firefox-esr ) & matchbox-window-manager -use_titlebar no & -firefox-esr http://localhost --profile $PROFILE -rm -rf $PROFILE +firefox-esr http://localhost --profile /home/kiosk/fx-profile EOF chmod +x /home/kiosk/kiosk.sh @@ -116,6 +113,8 @@ fi EOF fi +chown -R kiosk:kiosk /home/kiosk + # enable autologin mkdir -p /etc/systemd/system/getty@tty1.service.d cat > /etc/systemd/system/getty@tty1.service.d/autologin.conf << 'EOF' diff --git a/build/lib/scripts/grub-probe-eos b/build/lib/scripts/grub-probe-eos index ed37eefaa..aa2e1cacc 100755 --- a/build/lib/scripts/grub-probe-eos +++ b/build/lib/scripts/grub-probe-eos @@ -3,8 +3,8 @@ ARGS= for ARG in $@; do - if [ -d "/media/embassy/embassyfs" ] && [ "$ARG" = "/" ]; then - ARG=/media/embassy/embassyfs + if [ -d "/media/startos/root" ] && [ "$ARG" = "/" ]; then + ARG=/media/startos/root fi ARGS="$ARGS $ARG" done diff --git a/build/lib/scripts/prune-images b/build/lib/scripts/prune-images new file mode 100755 index 000000000..20356a28c --- /dev/null +++ b/build/lib/scripts/prune-images @@ -0,0 +1,49 @@ +#!/bin/bash + +if [ "$UID" -ne 0 ]; then + >&2 echo 'Must be run as root' + exit 1 +fi + +POSITIONAL_ARGS=() + +while [[ $# -gt 0 ]]; do + case $1 in + -*|--*) + echo "Unknown option $1" + exit 1 + ;; + *) + POSITIONAL_ARGS+=("$1") # save positional arg + shift # past argument + ;; + esac +done + +set -- "${POSITIONAL_ARGS[@]}" # restore positional parameters + +needed=$1 + +if [ -z "$needed" ]; then + >&2 echo "usage: $0 " + exit 1 +fi + +if [ -h /media/startos/config/current.rootfs ] && [ -e /media/startos/config/current.rootfs ]; then + echo 'Pruning...' + current="$(readlink -f /media/startos/config/current.rootfs)" + while [[ "$(df -B1 --output=avail --sync /media/startos/images | tail -n1)" -lt "$needed" ]]; do + to_prune="$(ls -t1 /media/startos/images/*.rootfs /media/startos/images/*.squashfs | grep -v "$current" | tail -n1)" + if [ -e "$to_prune" ]; then + echo " Pruning $to_prune" + rm -rf "$to_prune" + else + >&2 echo "Not enough space and nothing to prune!" + exit 1 + fi + done + echo 'done.' +else + >&2 echo 'No current.rootfs, not safe to prune' + exit 1 +fi \ No newline at end of file diff --git a/build/lib/scripts/startos-initramfs-module b/build/lib/scripts/startos-initramfs-module new file mode 100755 index 000000000..e13c887e2 --- /dev/null +++ b/build/lib/scripts/startos-initramfs-module @@ -0,0 +1,114 @@ +# Local filesystem mounting -*- shell-script -*- + +# +# This script overrides local_mount_root() in /scripts/local +# and mounts root as a read-only filesystem with a temporary (rw) +# overlay filesystem. +# + +. /scripts/local + +local_mount_root() +{ + echo 'using startos initramfs module' + + local_top + local_device_setup "${ROOT}" "root file system" + ROOT="${DEV}" + + # Get the root filesystem type if not set + if [ -z "${ROOTFSTYPE}" ]; then + FSTYPE=$(get_fstype "${ROOT}") + else + FSTYPE=${ROOTFSTYPE} + fi + + local_premount + + # CHANGES TO THE ORIGINAL FUNCTION BEGIN HERE + # N.B. this code still lacks error checking + + modprobe ${FSTYPE} + checkfs ${ROOT} root "${FSTYPE}" + + echo 'mounting startos' + mkdir /startos + + ROOTFLAGS="$(echo "${ROOTFLAGS}" | sed 's/subvol=\(next\|current\)//' | sed 's/^-o *$//')" + + if [ "${FSTYPE}" != "unknown" ]; then + mount -t ${FSTYPE} ${ROOTFLAGS} ${ROOT} /startos + else + mount ${ROOTFLAGS} ${ROOT} /startos + fi + + if [ -d /startos/images ]; then + if [ -h /startos/config/current.rootfs ] && [ -e /startos/config/current.rootfs ]; then + image=$(readlink -f /startos/config/current.rootfs) + else + image="$(ls -t1 /startos/images/*.rootfs | head -n1)" + fi + if ! [ -f "$image" ]; then + >&2 echo "image $image not available to boot" + exit 1 + fi + else + if [ -f /startos/config/upgrade ] && [ -d /startos/next ]; then + oldroot=/startos/next + elif [ -d /startos/current ]; then + oldroot=/startos/current + elif [ -d /startos/prev ]; then + oldroot=/startos/prev + else + >&2 echo no StartOS filesystem found + exit 1 + fi + + mkdir -p /startos/config/overlay/etc + mv $oldroot/etc/fstab /startos/config/overlay/etc/fstab + mv $oldroot/etc/machine-id /startos/config/overlay/etc/machine-id + mv $oldroot/etc/ssh /startos/config/overlay/etc/ssh + + mkdir -p /startos/images + mv $oldroot /startos/images/legacy.rootfs + + rm -rf /startos/next /startos/current /startos/prev + + ln -rsf /startos/images/old.squashfs /startos/config/current.rootfs + image=$(readlink -f /startos/config/current.rootfs) + fi + + mkdir /lower /upper + + if [ -d "$image" ]; then + mount -r --bind $image /lower + elif [ -f "$image" ]; then + modprobe squashfs + mount -r $image /lower + else + >&2 echo "not a regular file or directory: $image" + exit 1 + fi + + modprobe overlay || insmod "/lower/lib/modules/$(uname -r)/kernel/fs/overlayfs/overlay.ko" + + # Mount a tmpfs for the overlay in /upper + mount -t tmpfs tmpfs /upper + mkdir /upper/data /upper/work + + mkdir -p /startos/config/overlay + + # Mount the final overlay-root in $rootmnt + mount -t overlay \ + -olowerdir=/startos/config/overlay:/lower,upperdir=/upper/data,workdir=/upper/work \ + overlay ${rootmnt} + + mkdir -p ${rootmnt}/media/startos/config + mount --bind /startos/config ${rootmnt}/media/startos/config + mkdir -p ${rootmnt}/media/startos/images + mount --bind /startos/images ${rootmnt}/media/startos/images + mkdir -p ${rootmnt}/media/startos/root + mount -r --bind /startos ${rootmnt}/media/startos/root + mkdir -p ${rootmnt}/media/startos/current + mount -r --bind /lower ${rootmnt}/media/startos/current +} \ No newline at end of file diff --git a/build/raspberrypi/make-image.sh b/build/raspberrypi/make-image.sh index 3b07cb3a8..ec5ea4297 100755 --- a/build/raspberrypi/make-image.sh +++ b/build/raspberrypi/make-image.sh @@ -63,7 +63,7 @@ sudo unsquashfs -f -d $TMPDIR startos.raspberrypi.squashfs REAL_GIT_HASH=$(cat $TMPDIR/usr/lib/startos/GIT_HASH.txt) REAL_VERSION=$(cat $TMPDIR/usr/lib/startos/VERSION.txt) REAL_ENVIRONMENT=$(cat $TMPDIR/usr/lib/startos/ENVIRONMENT.txt) -sudo sed -i 's| boot=embassy| init=/usr/lib/startos/scripts/init_resize\.sh|' $TMPDIR/boot/cmdline.txt +sudo sed -i 's| boot=startos| init=/usr/lib/startos/scripts/init_resize\.sh|' $TMPDIR/boot/cmdline.txt sudo cp ./build/raspberrypi/fstab $TMPDIR/etc/ sudo cp ./build/raspberrypi/init_resize.sh $TMPDIR/usr/lib/startos/scripts/init_resize.sh sudo umount $TMPDIR/boot diff --git a/check-git-hash.sh b/check-git-hash.sh index 874dcc8bf..2f59b9198 100755 --- a/check-git-hash.sh +++ b/check-git-hash.sh @@ -1,7 +1,7 @@ #!/bin/bash if [ "$GIT_BRANCH_AS_HASH" != 1 ]; then - GIT_HASH="$(git describe --always --abbrev=40 --dirty=-modified)" + GIT_HASH="$(git rev-parse HEAD)$(if ! git diff-index --quiet HEAD --; then echo '-modified'; fi)" else GIT_HASH="@$(git rev-parse --abbrev-ref HEAD)" fi diff --git a/container-runtime/Dockerfile b/container-runtime/Dockerfile deleted file mode 100644 index f936ee11b..000000000 --- a/container-runtime/Dockerfile +++ /dev/null @@ -1,4 +0,0 @@ -FROM node:18-alpine - -ADD ./startInit.js /usr/local/lib/startInit.js -ADD ./entrypoint.sh /usr/local/bin/entrypoint.sh \ No newline at end of file diff --git a/container-runtime/RPCSpec.md b/container-runtime/RPCSpec.md index 679671614..fd1014add 100644 --- a/container-runtime/RPCSpec.md +++ b/container-runtime/RPCSpec.md @@ -3,38 +3,61 @@ ## Methods ### init + initialize runtime (mount `/proc`, `/sys`, `/dev`, and `/run` to each image in `/media/images`) called after os has mounted js and images to the container + #### args + `[]` + #### response + `null` ### exit + shutdown runtime + #### args + `[]` + #### response + `null` ### start + run main method if not already running + #### args + `[]` + #### response + `null` ### stop + stop main method by sending SIGTERM to child processes, and SIGKILL after timeout + #### args + `{ timeout: millis }` + #### response + `null` ### execute + run a specific package procedure -#### args + +#### args + ```ts { procedure: JsonPath, @@ -42,12 +65,17 @@ run a specific package procedure timeout: millis, } ``` + #### response + `any` ### sandbox + run a specific package procedure in sandbox mode -#### args + +#### args + ```ts { procedure: JsonPath, @@ -55,5 +83,7 @@ run a specific package procedure in sandbox mode timeout: millis, } ``` + #### response + `any` diff --git a/container-runtime/container-runtime.service b/container-runtime/container-runtime.service new file mode 100644 index 000000000..b9d5ec5ae --- /dev/null +++ b/container-runtime/container-runtime.service @@ -0,0 +1,9 @@ +[Unit] +Description=StartOS Container Runtime + +[Service] +Type=simple +ExecStart=/usr/bin/node --experimental-detect-module --unhandled-rejections=warn /usr/lib/startos/init/index.js + +[Install] +WantedBy=multi-user.target \ No newline at end of file diff --git a/container-runtime/containerRuntime.rc b/container-runtime/containerRuntime.rc deleted file mode 100644 index 203b99659..000000000 --- a/container-runtime/containerRuntime.rc +++ /dev/null @@ -1,10 +0,0 @@ -#!/sbin/openrc-run - -name=containerRuntime -#cfgfile="/etc/containerRuntime/containerRuntime.conf" -command="/usr/bin/node" -command_args="--experimental-detect-module --unhandled-rejections=warn /usr/lib/startos/init/index.js" -pidfile="/run/containerRuntime.pid" -command_background="yes" -output_log="/var/log/containerRuntime.log" -error_log="/var/log/containerRuntime.err" diff --git a/container-runtime/deb-install.sh b/container-runtime/deb-install.sh new file mode 100644 index 000000000..697bfd10e --- /dev/null +++ b/container-runtime/deb-install.sh @@ -0,0 +1,23 @@ +#!/bin/bash + +set -e + +mkdir -p /run/systemd/resolve +echo "nameserver 8.8.8.8" > /run/systemd/resolve/stub-resolv.conf + +apt-get update +apt-get install -y curl rsync qemu-user-static + +curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.7/install.sh | bash +source ~/.bashrc +nvm install 20 +ln -s $(which node) /usr/bin/node + +sed -i '/\(^\|#\)Storage=/c\Storage=persistent' /etc/systemd/journald.conf +sed -i '/\(^\|#\)Compress=/c\Compress=yes' /etc/systemd/journald.conf +sed -i '/\(^\|#\)SystemMaxUse=/c\SystemMaxUse=1G' /etc/systemd/journald.conf +sed -i '/\(^\|#\)ForwardToSyslog=/c\ForwardToSyslog=no' /etc/systemd/journald.conf + +systemctl enable container-runtime.service + +rm -rf /run/systemd \ No newline at end of file diff --git a/container-runtime/download-base-image.sh b/container-runtime/download-base-image.sh index 23a140ea5..7fb134f31 100755 --- a/container-runtime/download-base-image.sh +++ b/container-runtime/download-base-image.sh @@ -4,8 +4,8 @@ cd "$(dirname "${BASH_SOURCE[0]}")" set -e -DISTRO=alpine -VERSION=3.19 +DISTRO=debian +VERSION=bookworm ARCH=${ARCH:-$(uname -m)} FLAVOR=default @@ -16,4 +16,8 @@ elif [ "$_ARCH" = "aarch64" ]; then _ARCH=arm64 fi -curl https://images.linuxcontainers.org/$(curl --silent https://images.linuxcontainers.org/meta/1.0/index-system | grep "^$DISTRO;$VERSION;$_ARCH;$FLAVOR;" | head -n1 | sed 's/^.*;//g')/rootfs.squashfs --output alpine.${ARCH}.squashfs \ No newline at end of file +URL="https://images.linuxcontainers.org/$(curl -fsSL https://images.linuxcontainers.org/meta/1.0/index-system | grep "^$DISTRO;$VERSION;$_ARCH;$FLAVOR;" | head -n1 | sed 's/^.*;//g')/rootfs.squashfs" + +echo "Downloading $URL to debian.${ARCH}.squashfs" + +curl -fsSL "$URL" > debian.${ARCH}.squashfs \ No newline at end of file diff --git a/container-runtime/jest.config.js b/container-runtime/jest.config.js new file mode 100644 index 000000000..f499f03f9 --- /dev/null +++ b/container-runtime/jest.config.js @@ -0,0 +1,8 @@ +/** @type {import('ts-jest').JestConfigWithTsJest} */ +module.exports = { + preset: "ts-jest", + automock: false, + testEnvironment: "node", + rootDir: "./src/", + modulePathIgnorePatterns: ["./dist/"], +} diff --git a/container-runtime/package-lock.json b/container-runtime/package-lock.json index b29b4af3d..e2af8830d 100644 --- a/container-runtime/package-lock.json +++ b/container-runtime/package-lock.json @@ -1,20 +1,24 @@ { - "name": "start-init", + "name": "container-runtime", "version": "0.0.0", - "lockfileVersion": 2, + "lockfileVersion": 3, "requires": true, "packages": { "": { - "name": "start-init", + "name": "container-runtime", "version": "0.0.0", "dependencies": { "@iarna/toml": "^2.2.5", + "@noble/curves": "^1.4.0", + "@noble/hashes": "^1.4.0", "@start9labs/start-sdk": "file:../sdk/dist", "esbuild-plugin-resolve": "^2.0.0", "filebrowser": "^1.0.0", "isomorphic-fetch": "^3.0.0", + "jsonpath": "^1.1.1", + "lodash.merge": "^4.6.2", "node-fetch": "^3.1.0", - "ts-matches": "^5.4.1", + "ts-matches": "^5.5.1", "tslib": "^2.5.3", "typescript": "^5.1.3", "yaml": "^2.3.1" @@ -22,8 +26,12 @@ "devDependencies": { "@swc/cli": "^0.1.62", "@swc/core": "^1.3.65", + "@types/jest": "^29.5.12", + "@types/jsonpath": "^0.2.4", "@types/node": "^20.11.13", + "jest": "^29.7.0", "prettier": "^3.2.5", + "ts-jest": "^29.2.3", "typescript": ">5.2" } }, @@ -32,31 +40,979 @@ "version": "0.4.0-rev0.lib0.rc8.beta10", "license": "MIT", "dependencies": { - "@iarna/toml": "^2.2.5", "isomorphic-fetch": "^3.0.0", - "ts-matches": "^5.4.1", - "yaml": "^2.2.2" + "ts-matches": "^5.4.1" }, "devDependencies": { + "@iarna/toml": "^2.2.5", "@types/jest": "^29.4.0", + "copyfiles": "^2.4.1", "jest": "^29.4.3", "prettier": "^3.2.5", "ts-jest": "^29.0.5", "ts-node": "^10.9.1", "tsx": "^4.7.1", - "typescript": "^5.0.4" + "typescript": "^5.0.4", + "yaml": "^2.2.2" } }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dev": true, + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.7.tgz", + "integrity": "sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==", + "dev": true, + "dependencies": { + "@babel/highlight": "^7.24.7", + "picocolors": "^1.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.25.2.tgz", + "integrity": "sha512-bYcppcpKBvX4znYaPEeFau03bp89ShqNMLs+rmdptMw+heSZh9+z84d2YG+K7cYLbWwzdjtDoW/uqZmPjulClQ==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.25.2.tgz", + "integrity": "sha512-BBt3opiCOxUr9euZ5/ro/Xv8/V7yJ5bjYMqG/C1YAo8MIKAnumZalCN+msbci3Pigy4lIQfPUpfMM27HMGaYEA==", + "dev": true, + "dependencies": { + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.24.7", + "@babel/generator": "^7.25.0", + "@babel/helper-compilation-targets": "^7.25.2", + "@babel/helper-module-transforms": "^7.25.2", + "@babel/helpers": "^7.25.0", + "@babel/parser": "^7.25.0", + "@babel/template": "^7.25.0", + "@babel/traverse": "^7.25.2", + "@babel/types": "^7.25.2", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/core/node_modules/debug": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/@babel/core/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/@babel/core/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/generator": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.25.0.tgz", + "integrity": "sha512-3LEEcj3PVW8pW2R1SR1M89g/qrYk/m/mB/tLqn7dn4sbBUQyTqnlod+II2U4dqiGtUmkcnAmkMDralTFZttRiw==", + "dev": true, + "dependencies": { + "@babel/types": "^7.25.0", + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25", + "jsesc": "^2.5.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.25.2.tgz", + "integrity": "sha512-U2U5LsSaZ7TAt3cfaymQ8WHh0pxvdHoEk6HVpaexxixjyEquMh0L0YNJNM6CTGKMXV1iksi0iZkGw4AcFkPaaw==", + "dev": true, + "dependencies": { + "@babel/compat-data": "^7.25.2", + "@babel/helper-validator-option": "^7.24.8", + "browserslist": "^4.23.1", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets/node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/@babel/helper-compilation-targets/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/helper-compilation-targets/node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.7.tgz", + "integrity": "sha512-8AyH3C+74cgCVVXow/myrynrAGv+nTVg5vKu2nZph9x7RcRwzmh0VFallJuFTZ9mx6u4eSdXZfcOzSqTUm0HCA==", + "dev": true, + "dependencies": { + "@babel/traverse": "^7.24.7", + "@babel/types": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.25.2.tgz", + "integrity": "sha512-BjyRAbix6j/wv83ftcVJmBt72QtHI56C7JXZoG2xATiLpmoC7dpd8WnkikExHDVPpi/3qCmO6WY1EaXOluiecQ==", + "dev": true, + "dependencies": { + "@babel/helper-module-imports": "^7.24.7", + "@babel/helper-simple-access": "^7.24.7", + "@babel/helper-validator-identifier": "^7.24.7", + "@babel/traverse": "^7.25.2" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.8.tgz", + "integrity": "sha512-FFWx5142D8h2Mgr/iPVGH5G7w6jDn4jUSpZTyDnQO0Yn7Ks2Kuz6Pci8H6MPCoUJegd/UZQ3tAvfLCxQSnWWwg==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-simple-access": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.24.7.tgz", + "integrity": "sha512-zBAIvbCMh5Ts+b86r/CjU+4XGYIs+R1j951gxI3KmmxBMhCg4oQMsv6ZXQ64XOm/cvzfU1FmoCyt6+owc5QMYg==", + "dev": true, + "dependencies": { + "@babel/traverse": "^7.24.7", + "@babel/types": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.8.tgz", + "integrity": "sha512-pO9KhhRcuUyGnJWwyEgnRJTSIZHiT+vMD0kPeD+so0l7mxkMT19g3pjY9GTnHySck/hDzq+dtW/4VgnMkippsQ==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz", + "integrity": "sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.24.8.tgz", + "integrity": "sha512-xb8t9tD1MHLungh/AIoWYN+gVHaB9kwlu8gffXGSt3FFEIT7RjS+xWbc2vUD1UTZdIpKj/ab3rdqJ7ufngyi2Q==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.25.0.tgz", + "integrity": "sha512-MjgLZ42aCm0oGjJj8CtSM3DB8NOOf8h2l7DCTePJs29u+v7yO/RBX9nShlKMgFnRks/Q4tBAe7Hxnov9VkGwLw==", + "dev": true, + "dependencies": { + "@babel/template": "^7.25.0", + "@babel/types": "^7.25.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.7.tgz", + "integrity": "sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw==", + "dev": true, + "dependencies": { + "@babel/helper-validator-identifier": "^7.24.7", + "chalk": "^2.4.2", + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/highlight/node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dev": true, + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dev": true, + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/@babel/highlight/node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==", + "dev": true + }, + "node_modules/@babel/highlight/node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "dev": true, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@babel/highlight/node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/highlight/node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dev": true, + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/@babel/parser": { + "version": "7.25.3", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.25.3.tgz", + "integrity": "sha512-iLTJKDbJ4hMvFPgQwwsVoxtHyWpKKPBrxkANrSYewDPaPpT5py5yeVkgPIJ7XYXhndxJpaA3PyALSXQ7u8e/Dw==", + "dev": true, + "dependencies": { + "@babel/types": "^7.25.2" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-syntax-async-generators": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-bigint": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", + "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-class-properties": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.12.13" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-jsx": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.24.7.tgz", + "integrity": "sha512-6ddciUPe/mpMnOKv/U+RSd2vvVy+Yw/JfBB0ZHYjEZt9NLHmCUylNYlsbqCCS1Bffjlb0fCwC9Vqz+sBz6PsiQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-logical-assignment-operators": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-numeric-separator": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-typescript": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.24.7.tgz", + "integrity": "sha512-c/+fVeJBB0FeKsFvwytYiUD+LBvhHjGSI0g446PRGdSVGZLRNArBUno2PETbAly3tpiNAQR5XaZ+JslxkotsbA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.0.tgz", + "integrity": "sha512-aOOgh1/5XzKvg1jvVz7AVrx2piJ2XBi227DHmbY6y+bM9H2FlN+IfecYu4Xl0cNiiVejlsCri89LUsbj8vJD9Q==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.24.7", + "@babel/parser": "^7.25.0", + "@babel/types": "^7.25.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.25.3", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.25.3.tgz", + "integrity": "sha512-HefgyP1x754oGCsKmV5reSmtV7IXj/kpaE1XYY+D9G5PvKKoFfSbiS4M77MdjuwlZKDIKFCffq9rPU+H/s3ZdQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.24.7", + "@babel/generator": "^7.25.0", + "@babel/parser": "^7.25.3", + "@babel/template": "^7.25.0", + "@babel/types": "^7.25.2", + "debug": "^4.3.1", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse/node_modules/debug": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/@babel/traverse/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/@babel/types": { + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.25.2.tgz", + "integrity": "sha512-YTnYtra7W9e6/oAZEHj0bJehPRUlLH9/fbpT5LfB0NhQXyALCRkRs3zH9v07IYhkgpqX6Z78FnuccZr/l4Fs4Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.24.8", + "@babel/helper-validator-identifier": "^7.24.7", + "to-fast-properties": "^2.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@bcoe/v8-coverage": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz", + "integrity": "sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw==", + "dev": true + }, "node_modules/@iarna/toml": { "version": "2.2.5", - "resolved": "https://registry.npmjs.org/@iarna/toml/-/toml-2.2.5.tgz", - "integrity": "sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg==" + "license": "ISC" + }, + "node_modules/@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", + "dev": true, + "dependencies": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/console": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/console/-/console-29.7.0.tgz", + "integrity": "sha512-5Ni4CU7XHQi32IJ398EEP4RrB8eV09sXP2ROqD4bksHrnTree52PsxvX8tpL8LvTZ3pFzXyPbNQReSN41CAhOg==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/core": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/core/-/core-29.7.0.tgz", + "integrity": "sha512-n7aeXWKMnGtDA48y8TLWJPJmLmmZ642Ceo78cYWEpiD7FzDgmNDV/GCVRorPABdXLJZ/9wzzgZAlHjXjxDHGsg==", + "dev": true, + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/reporters": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-changed-files": "^29.7.0", + "jest-config": "^29.7.0", + "jest-haste-map": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-resolve-dependencies": "^29.7.0", + "jest-runner": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "jest-watcher": "^29.7.0", + "micromatch": "^4.0.4", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/environment": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/environment/-/environment-29.7.0.tgz", + "integrity": "sha512-aQIfHDq33ExsN4jP1NWGXhxgQ/wixs60gDiKO+XVMd8Mn0NWPWgc34ZQDTb2jKaUWQ7MuwoitXAsN2XVXNMpAw==", + "dev": true, + "dependencies": { + "@jest/fake-timers": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-mock": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/expect/-/expect-29.7.0.tgz", + "integrity": "sha512-8uMeAMycttpva3P1lBHB8VciS9V0XAr3GymPpipdyQXbBcuhkLQOSe8E/p92RyAdToS6ZD1tFkX+CkhoECE0dQ==", + "dev": true, + "dependencies": { + "expect": "^29.7.0", + "jest-snapshot": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/expect-utils": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/expect-utils/-/expect-utils-29.7.0.tgz", + "integrity": "sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==", + "dev": true, + "dependencies": { + "jest-get-type": "^29.6.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/fake-timers": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/fake-timers/-/fake-timers-29.7.0.tgz", + "integrity": "sha512-q4DH1Ha4TTFPdxLsqDXK1d3+ioSL7yL5oCMJZgDYm6i+6CygW5E5xVr/D1HdsGxjt1ZWSfUAs9OxSB/BNelWrQ==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.3", + "@sinonjs/fake-timers": "^10.0.2", + "@types/node": "*", + "jest-message-util": "^29.7.0", + "jest-mock": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/globals": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/globals/-/globals-29.7.0.tgz", + "integrity": "sha512-mpiz3dutLbkW2MNFubUGUEVLkTGiqW6yLVTA+JbP6fI6J5iL9Y0Nlg8k95pcF8ctKwCS7WVxteBs29hhfAotzQ==", + "dev": true, + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/expect": "^29.7.0", + "@jest/types": "^29.6.3", + "jest-mock": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/reporters": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/reporters/-/reporters-29.7.0.tgz", + "integrity": "sha512-DApq0KJbJOEzAFYjHADNNxAE3KbhxQB1y5Kplb5Waqw6zVbuWatSnMjE5gs8FUgEPmNsnZA3NCWl9NG0ia04Pg==", + "dev": true, + "dependencies": { + "@bcoe/v8-coverage": "^0.2.3", + "@jest/console": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "@types/node": "*", + "chalk": "^4.0.0", + "collect-v8-coverage": "^1.0.0", + "exit": "^0.1.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "istanbul-lib-coverage": "^3.0.0", + "istanbul-lib-instrument": "^6.0.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.0", + "istanbul-reports": "^3.1.3", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "jest-worker": "^29.7.0", + "slash": "^3.0.0", + "string-length": "^4.0.1", + "strip-ansi": "^6.0.0", + "v8-to-istanbul": "^9.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/@jest/schemas": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", + "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", + "dev": true, + "dependencies": { + "@sinclair/typebox": "^0.27.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/source-map": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/source-map/-/source-map-29.6.3.tgz", + "integrity": "sha512-MHjT95QuipcPrpLM+8JMSzFx6eHp5Bm+4XeFDJlwsvVBjmKNiIAvasGK2fxz2WbGRlnvqehFbh07MMa7n3YJnw==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.18", + "callsites": "^3.0.0", + "graceful-fs": "^4.2.9" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-result": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-result/-/test-result-29.7.0.tgz", + "integrity": "sha512-Fdx+tv6x1zlkJPcWXmMDAG2HBnaR9XPSd5aDWQVsfrZmLVT3lU1cwyxLgRmXR9yrq4NBoEm9BMsfgFzTQAbJYA==", + "dev": true, + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "collect-v8-coverage": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/test-sequencer": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/test-sequencer/-/test-sequencer-29.7.0.tgz", + "integrity": "sha512-GQwJ5WZVrKnOJuiYiAF52UNUJXgTZx1NHjFSEB0qEMmSZKAkdMoIzw/Cj6x6NF4AvV23AUqDpFzQkN/eYCYTxw==", + "dev": true, + "dependencies": { + "@jest/test-result": "^29.7.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/transform": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-29.7.0.tgz", + "integrity": "sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw==", + "dev": true, + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "babel-plugin-istanbul": "^6.1.1", + "chalk": "^4.0.0", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "micromatch": "^4.0.4", + "pirates": "^4.0.4", + "slash": "^3.0.0", + "write-file-atomic": "^4.0.2" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/types": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz", + "integrity": "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^17.0.8", + "chalk": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz", + "integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==", + "dev": true, + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", + "dev": true, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==", + "dev": true + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dev": true, + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } }, "node_modules/@mole-inc/bin-wrapper": { "version": "8.0.1", - "resolved": "https://registry.npmjs.org/@mole-inc/bin-wrapper/-/bin-wrapper-8.0.1.tgz", - "integrity": "sha512-sTGoeZnjI8N4KS+sW2AN95gDBErhAguvkw/tWdCjeM8bvxpz5lqrnd0vOJABA1A+Ic3zED7PYoLP/RANLgVotA==", "dev": true, + "license": "MIT", "dependencies": { "bin-check": "^4.1.0", "bin-version-check": "^5.0.0", @@ -71,11 +1027,30 @@ "node": "^12.20.0 || ^14.13.1 || >=16.0.0" } }, + "node_modules/@noble/curves": { + "version": "1.4.0", + "license": "MIT", + "dependencies": { + "@noble/hashes": "1.4.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, + "node_modules/@noble/hashes": { + "version": "1.4.0", + "license": "MIT", + "engines": { + "node": ">= 16" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + } + }, "node_modules/@nodelib/fs.scandir": { "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", "dev": true, + "license": "MIT", "dependencies": { "@nodelib/fs.stat": "2.0.5", "run-parallel": "^1.1.9" @@ -86,18 +1061,16 @@ }, "node_modules/@nodelib/fs.stat": { "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", "dev": true, + "license": "MIT", "engines": { "node": ">= 8" } }, "node_modules/@nodelib/fs.walk": { "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", "dev": true, + "license": "MIT", "dependencies": { "@nodelib/fs.scandir": "2.1.5", "fastq": "^1.6.0" @@ -106,11 +1079,16 @@ "node": ">= 8" } }, + "node_modules/@sinclair/typebox": { + "version": "0.27.8", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", + "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==", + "dev": true + }, "node_modules/@sindresorhus/is": { "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz", - "integrity": "sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, @@ -118,15 +1096,32 @@ "url": "https://github.com/sindresorhus/is?sponsor=1" } }, + "node_modules/@sinonjs/commons": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/@sinonjs/commons/-/commons-3.0.1.tgz", + "integrity": "sha512-K3mCHKQ9sVh8o1C9cxkwxaOmXoAMlDxC1mYyHrjqOWEcBjYr76t96zL2zlj5dUGZ3HSw240X1qgH3Mjf1yJWpQ==", + "dev": true, + "dependencies": { + "type-detect": "4.0.8" + } + }, + "node_modules/@sinonjs/fake-timers": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-10.3.0.tgz", + "integrity": "sha512-V4BG07kuYSUkTCSBHG8G8TNhM+F19jXFWnQtzj+we8DrkpSBCee9Z3Ms8yiGer/dlmhe35/Xdgyo3/0rQKg7YA==", + "dev": true, + "dependencies": { + "@sinonjs/commons": "^3.0.0" + } + }, "node_modules/@start9labs/start-sdk": { "resolved": "../sdk/dist", "link": true }, "node_modules/@swc/cli": { "version": "0.1.65", - "resolved": "https://registry.npmjs.org/@swc/cli/-/cli-0.1.65.tgz", - "integrity": "sha512-4NcgsvJVHhA7trDnMmkGLLvWMHu2kSy+qHx6QwRhhJhdiYdNUrhdp+ERxen73sYtaeEOYeLJcWrQ60nzKi6rpg==", "dev": true, + "license": "MIT", "dependencies": { "@mole-inc/bin-wrapper": "^8.0.1", "commander": "^7.1.0", @@ -155,14 +1150,13 @@ } }, "node_modules/@swc/core": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.4.1.tgz", - "integrity": "sha512-3y+Y8js+e7BbM16iND+6Rcs3jdiL28q3iVtYsCviYSSpP2uUVKkp5sJnCY4pg8AaVvyN7CGQHO7gLEZQ5ByozQ==", + "version": "1.5.28", "dev": true, "hasInstallScript": true, + "license": "Apache-2.0", "dependencies": { - "@swc/counter": "^0.1.2", - "@swc/types": "^0.1.5" + "@swc/counter": "^0.1.3", + "@swc/types": "^0.1.8" }, "engines": { "node": ">=10" @@ -172,19 +1166,19 @@ "url": "https://opencollective.com/swc" }, "optionalDependencies": { - "@swc/core-darwin-arm64": "1.4.1", - "@swc/core-darwin-x64": "1.4.1", - "@swc/core-linux-arm-gnueabihf": "1.4.1", - "@swc/core-linux-arm64-gnu": "1.4.1", - "@swc/core-linux-arm64-musl": "1.4.1", - "@swc/core-linux-x64-gnu": "1.4.1", - "@swc/core-linux-x64-musl": "1.4.1", - "@swc/core-win32-arm64-msvc": "1.4.1", - "@swc/core-win32-ia32-msvc": "1.4.1", - "@swc/core-win32-x64-msvc": "1.4.1" + "@swc/core-darwin-arm64": "1.5.28", + "@swc/core-darwin-x64": "1.5.28", + "@swc/core-linux-arm-gnueabihf": "1.5.28", + "@swc/core-linux-arm64-gnu": "1.5.28", + "@swc/core-linux-arm64-musl": "1.5.28", + "@swc/core-linux-x64-gnu": "1.5.28", + "@swc/core-linux-x64-musl": "1.5.28", + "@swc/core-win32-arm64-msvc": "1.5.28", + "@swc/core-win32-ia32-msvc": "1.5.28", + "@swc/core-win32-x64-msvc": "1.5.28" }, "peerDependencies": { - "@swc/helpers": "^0.5.0" + "@swc/helpers": "*" }, "peerDependenciesMeta": { "@swc/helpers": { @@ -192,183 +1186,23 @@ } } }, - "node_modules/@swc/core-darwin-arm64": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.4.1.tgz", - "integrity": "sha512-ePyfx0348UbR4DOAW24TedeJbafnzha8liXFGuQ4bdXtEVXhLfPngprrxKrAddCuv42F9aTxydlF6+adD3FBhA==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=10" - } - }, - "node_modules/@swc/core-darwin-x64": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.4.1.tgz", - "integrity": "sha512-eLf4JSe6VkCMdDowjM8XNC5rO+BrgfbluEzAVtKR8L2HacNYukieumN7EzpYCi0uF1BYwu1ku6tLyG2r0VcGxA==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=10" - } - }, - "node_modules/@swc/core-linux-arm-gnueabihf": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.4.1.tgz", - "integrity": "sha512-K8VtTLWMw+rkN/jDC9o/Q9SMmzdiHwYo2CfgkwVT29NsGccwmNhCQx6XoYiPKyKGIFKt4tdQnJHKUFzxUqQVtQ==", - "cpu": [ - "arm" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=10" - } - }, - "node_modules/@swc/core-linux-arm64-gnu": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.4.1.tgz", - "integrity": "sha512-0e8p4g0Bfkt8lkiWgcdiENH3RzkcqKtpRXIVNGOmVc0OBkvc2tpm2WTx/eoCnes2HpTT4CTtR3Zljj4knQ4Fvw==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=10" - } - }, - "node_modules/@swc/core-linux-arm64-musl": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.4.1.tgz", - "integrity": "sha512-b/vWGQo2n7lZVUnSQ7NBq3Qrj85GrAPPiRbpqaIGwOytiFSk8VULFihbEUwDe0rXgY4LDm8z8wkgADZcLnmdUA==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=10" - } - }, - "node_modules/@swc/core-linux-x64-gnu": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.4.1.tgz", - "integrity": "sha512-AFMQlvkKEdNi1Vk2GFTxxJzbICttBsOQaXa98kFTeWTnFFIyiIj2w7Sk8XRTEJ/AjF8ia8JPKb1zddBWr9+bEQ==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=10" - } - }, - "node_modules/@swc/core-linux-x64-musl": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.4.1.tgz", - "integrity": "sha512-QX2MxIECX1gfvUVZY+jk528/oFkS9MAl76e3ZRvG2KC/aKlCQL0KSzcTSm13mOxkDKS30EaGRDRQWNukGpMeRg==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=10" - } - }, - "node_modules/@swc/core-win32-arm64-msvc": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.4.1.tgz", - "integrity": "sha512-OklkJYXXI/tntD2zaY8i3iZldpyDw5q+NAP3k9OlQ7wXXf37djRsHLV0NW4+ZNHBjE9xp2RsXJ0jlOJhfgGoFA==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=10" - } - }, - "node_modules/@swc/core-win32-ia32-msvc": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.4.1.tgz", - "integrity": "sha512-MBuc3/QfKX9FnLOU7iGN+6yHRTQaPQ9WskiC8s8JFiKQ+7I2p25tay2RplR9dIEEGgVAu6L7auv96LbNTh+FaA==", - "cpu": [ - "ia32" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=10" - } - }, - "node_modules/@swc/core-win32-x64-msvc": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.4.1.tgz", - "integrity": "sha512-lu4h4wFBb/bOK6N2MuZwg7TrEpwYXgpQf5R7ObNSXL65BwZ9BG8XRzD+dLJmALu8l5N08rP/TrpoKRoGT4WSxw==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=10" - } - }, "node_modules/@swc/counter": { "version": "0.1.3", - "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz", - "integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==", - "dev": true + "dev": true, + "license": "Apache-2.0" }, "node_modules/@swc/types": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.5.tgz", - "integrity": "sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw==", - "dev": true + "version": "0.1.8", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@swc/counter": "^0.1.3" + } }, "node_modules/@szmarczak/http-timer": { "version": "4.0.6", - "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-4.0.6.tgz", - "integrity": "sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w==", "dev": true, + "license": "MIT", "dependencies": { "defer-to-connect": "^2.0.0" }, @@ -378,15 +1212,54 @@ }, "node_modules/@tokenizer/token": { "version": "0.3.0", - "resolved": "https://registry.npmjs.org/@tokenizer/token/-/token-0.3.0.tgz", - "integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==", - "dev": true + "dev": true, + "license": "MIT" + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.6.8", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.8.tgz", + "integrity": "sha512-ASsj+tpEDsEiFr1arWrlN6V3mdfjRMZt6LtK/Vp/kreFLnr5QH5+DhvD5nINYZXzwJvXeGq+05iUXcAzVrqWtw==", + "dev": true, + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.20.6", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.6.tgz", + "integrity": "sha512-r1bzfrm0tomOI8g1SzvCaQHo6Lcv6zu0EA+W2kHrt8dyrHQxGzBBL4kdkzIS+jBMV+EYcMAEAqXqYaLJq5rOZg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.20.7" + } }, "node_modules/@types/cacheable-request": { "version": "6.0.3", - "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.3.tgz", - "integrity": "sha512-IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw==", "dev": true, + "license": "MIT", "dependencies": { "@types/http-cache-semantics": "*", "@types/keyv": "^3.1.4", @@ -394,43 +1267,108 @@ "@types/responselike": "^1.0.0" } }, - "node_modules/@types/http-cache-semantics": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.4.tgz", - "integrity": "sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==", - "dev": true - }, - "node_modules/@types/keyv": { - "version": "3.1.4", - "resolved": "https://registry.npmjs.org/@types/keyv/-/keyv-3.1.4.tgz", - "integrity": "sha512-BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg==", + "node_modules/@types/graceful-fs": { + "version": "4.1.9", + "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", + "integrity": "sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==", "dev": true, "dependencies": { "@types/node": "*" } }, - "node_modules/@types/node": { - "version": "20.11.17", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.17.tgz", - "integrity": "sha512-QmgQZGWu1Yw9TDyAP9ZzpFJKynYNeOvwMJmaxABfieQoVoiVOS6MN1WSpqpRcbeA5+RW82kraAVxCCJg+780Qw==", + "node_modules/@types/http-cache-semantics": { + "version": "4.0.4", "dev": true, + "license": "MIT" + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", + "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==", + "dev": true + }, + "node_modules/@types/istanbul-lib-report": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz", + "integrity": "sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-coverage": "*" + } + }, + "node_modules/@types/istanbul-reports": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", + "integrity": "sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==", + "dev": true, + "dependencies": { + "@types/istanbul-lib-report": "*" + } + }, + "node_modules/@types/jest": { + "version": "29.5.12", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.12.tgz", + "integrity": "sha512-eDC8bTvT/QhYdxJAulQikueigY5AsdBRH2yDKW3yveW7svY3+DzN84/2NUgkw10RTiJbWqZrTtoGVdYlvFJdLw==", + "dev": true, + "dependencies": { + "expect": "^29.0.0", + "pretty-format": "^29.0.0" + } + }, + "node_modules/@types/jsonpath": { + "version": "0.2.4", + "resolved": "https://registry.npmjs.org/@types/jsonpath/-/jsonpath-0.2.4.tgz", + "integrity": "sha512-K3hxB8Blw0qgW6ExKgMbXQv2UPZBoE2GqLpVY+yr7nMD2Pq86lsuIzyAaiQ7eMqFL5B6di6pxSkogLJEyEHoGA==", + "dev": true + }, + "node_modules/@types/keyv": { + "version": "3.1.4", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/node": { + "version": "20.14.2", + "dev": true, + "license": "MIT", "dependencies": { "undici-types": "~5.26.4" } }, "node_modules/@types/responselike": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.3.tgz", - "integrity": "sha512-H/+L+UkTV33uf49PH5pCAUBVPNj2nDBXTN+qS1dOwyyg24l3CcicicCA7ca+HMvJBZcFgl5r8e+RR6elsb4Lyw==", "dev": true, + "license": "MIT", "dependencies": { "@types/node": "*" } }, + "node_modules/@types/stack-utils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/stack-utils/-/stack-utils-2.0.3.tgz", + "integrity": "sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==", + "dev": true + }, + "node_modules/@types/yargs": { + "version": "17.0.33", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz", + "integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==", + "dev": true, + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "21.0.3", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", + "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==", + "dev": true + }, "node_modules/accepts": { "version": "1.3.8", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", - "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", + "license": "MIT", "dependencies": { "mime-types": "~2.1.34", "negotiator": "0.6.3" @@ -439,10 +1377,60 @@ "node": ">= 0.6" } }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.2.tgz", + "integrity": "sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==", + "dev": true, + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, "node_modules/arch": { "version": "2.2.0", - "resolved": "https://registry.npmjs.org/arch/-/arch-2.2.0.tgz", - "integrity": "sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==", "dev": true, "funding": [ { @@ -457,24 +1445,153 @@ "type": "consulting", "url": "https://feross.org/support" } - ] + ], + "license": "MIT" + }, + "node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "dependencies": { + "sprintf-js": "~1.0.2" + } }, "node_modules/array-flatten": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", - "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" + "license": "MIT" + }, + "node_modules/async": { + "version": "3.2.5", + "resolved": "https://registry.npmjs.org/async/-/async-3.2.5.tgz", + "integrity": "sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg==", + "dev": true + }, + "node_modules/babel-jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", + "integrity": "sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==", + "dev": true, + "dependencies": { + "@jest/transform": "^29.7.0", + "@types/babel__core": "^7.1.14", + "babel-plugin-istanbul": "^6.1.1", + "babel-preset-jest": "^29.6.3", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.8.0" + } + }, + "node_modules/babel-plugin-istanbul": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", + "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-instrument": "^5.0.4", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-istanbul/node_modules/istanbul-lib-instrument": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", + "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", + "dev": true, + "dependencies": { + "@babel/core": "^7.12.3", + "@babel/parser": "^7.14.7", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-istanbul/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/babel-plugin-jest-hoist": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz", + "integrity": "sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==", + "dev": true, + "dependencies": { + "@babel/template": "^7.3.3", + "@babel/types": "^7.3.3", + "@types/babel__core": "^7.1.14", + "@types/babel__traverse": "^7.0.6" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/babel-preset-current-node-syntax": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.0.1.tgz", + "integrity": "sha512-M7LQ0bxarkxQoN+vz5aJPsLBn77n8QgTFmo8WK0/44auK2xlCXrYcUxHFxgU7qW5Yzw/CjmLRK2uJzaCd7LvqQ==", + "dev": true, + "dependencies": { + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-bigint": "^7.8.3", + "@babel/plugin-syntax-class-properties": "^7.8.3", + "@babel/plugin-syntax-import-meta": "^7.8.3", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.8.3", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.8.3", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-top-level-await": "^7.8.3" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/babel-preset-jest": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz", + "integrity": "sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==", + "dev": true, + "dependencies": { + "babel-plugin-jest-hoist": "^29.6.3", + "babel-preset-current-node-syntax": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } }, "node_modules/balanced-match": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/bin-check": { "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bin-check/-/bin-check-4.1.0.tgz", - "integrity": "sha512-b6weQyEUKsDGFlACWSIOfveEnImkJyK/FGW6FAG42loyoquvjdtOIqO6yBFzHyqyVVhNgNkQxxx09SFLK28YnA==", "dev": true, + "license": "MIT", "dependencies": { "execa": "^0.7.0", "executable": "^4.1.0" @@ -485,9 +1602,8 @@ }, "node_modules/bin-version": { "version": "6.0.0", - "resolved": "https://registry.npmjs.org/bin-version/-/bin-version-6.0.0.tgz", - "integrity": "sha512-nk5wEsP4RiKjG+vF+uG8lFsEn4d7Y6FVDamzzftSunXOoOcOOkzcWdKVlGgFFwlUQCj63SgnUkLLGF8v7lufhw==", "dev": true, + "license": "MIT", "dependencies": { "execa": "^5.0.0", "find-versions": "^5.0.0" @@ -501,9 +1617,8 @@ }, "node_modules/bin-version-check": { "version": "5.1.0", - "resolved": "https://registry.npmjs.org/bin-version-check/-/bin-version-check-5.1.0.tgz", - "integrity": "sha512-bYsvMqJ8yNGILLz1KP9zKLzQ6YpljV3ln1gqhuLkUtyfGi3qXKGuK2p+U4NAvjVFzDFiBBtOpCOSFNuYYEGZ5g==", "dev": true, + "license": "MIT", "dependencies": { "bin-version": "^6.0.0", "semver": "^7.5.3", @@ -518,9 +1633,8 @@ }, "node_modules/bin-version/node_modules/cross-spawn": { "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", "dev": true, + "license": "MIT", "dependencies": { "path-key": "^3.1.0", "shebang-command": "^2.0.0", @@ -532,9 +1646,8 @@ }, "node_modules/bin-version/node_modules/execa": { "version": "5.1.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", - "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", "dev": true, + "license": "MIT", "dependencies": { "cross-spawn": "^7.0.3", "get-stream": "^6.0.0", @@ -555,9 +1668,8 @@ }, "node_modules/bin-version/node_modules/get-stream": { "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, @@ -567,9 +1679,8 @@ }, "node_modules/bin-version/node_modules/is-stream": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" }, @@ -579,9 +1690,8 @@ }, "node_modules/bin-version/node_modules/npm-run-path": { "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", "dev": true, + "license": "MIT", "dependencies": { "path-key": "^3.0.0" }, @@ -591,18 +1701,16 @@ }, "node_modules/bin-version/node_modules/path-key": { "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/bin-version/node_modules/shebang-command": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", "dev": true, + "license": "MIT", "dependencies": { "shebang-regex": "^3.0.0" }, @@ -612,18 +1720,16 @@ }, "node_modules/bin-version/node_modules/shebang-regex": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/bin-version/node_modules/which": { "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", "dev": true, + "license": "ISC", "dependencies": { "isexe": "^2.0.0" }, @@ -635,12 +1741,11 @@ } }, "node_modules/body-parser": { - "version": "1.20.1", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz", - "integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==", + "version": "1.20.2", + "license": "MIT", "dependencies": { "bytes": "3.1.2", - "content-type": "~1.0.4", + "content-type": "~1.0.5", "debug": "2.6.9", "depd": "2.0.0", "destroy": "1.2.0", @@ -648,7 +1753,7 @@ "iconv-lite": "0.4.24", "on-finished": "2.4.1", "qs": "6.11.0", - "raw-body": "2.5.1", + "raw-body": "2.5.2", "type-is": "~1.6.18", "unpipe": "1.0.0" }, @@ -659,47 +1764,101 @@ }, "node_modules/brace-expansion": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", "dev": true, + "license": "MIT", "dependencies": { "balanced-match": "^1.0.0" } }, "node_modules/braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", + "version": "3.0.3", "dev": true, + "license": "MIT", "dependencies": { - "fill-range": "^7.0.1" + "fill-range": "^7.1.1" }, "engines": { "node": ">=8" } }, + "node_modules/browserslist": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.3.tgz", + "integrity": "sha512-btwCFJVjI4YWDNfau8RhZ+B1Q/VLoUITrm3RlP6y1tYGWIOa+InuYiRGXUBXo8nA1qKmHMyLB/iVQg5TT4eFoA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "caniuse-lite": "^1.0.30001646", + "electron-to-chromium": "^1.5.4", + "node-releases": "^2.0.18", + "update-browserslist-db": "^1.1.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/bs-logger": { + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/bs-logger/-/bs-logger-0.2.6.tgz", + "integrity": "sha512-pd8DCoxmbgc7hyPKOvxtqNcjYoOsABPQdcCUjGp3d42VR2CX1ORhk2A87oqqu5R1kk+76nsxZupkmyd+MVtCog==", + "dev": true, + "dependencies": { + "fast-json-stable-stringify": "2.x" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/bser": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", + "dev": true, + "dependencies": { + "node-int64": "^0.4.0" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", + "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==", + "dev": true + }, "node_modules/bytes": { "version": "3.1.2", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", - "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==", + "license": "MIT", "engines": { "node": ">= 0.8" } }, "node_modules/cacheable-lookup": { "version": "5.0.4", - "resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz", - "integrity": "sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA==", "dev": true, + "license": "MIT", "engines": { "node": ">=10.6.0" } }, "node_modules/cacheable-request": { "version": "7.0.4", - "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-7.0.4.tgz", - "integrity": "sha512-v+p6ongsrp0yTGbJXjgxPow2+DL93DASP4kXCDKb8/bwRtt9OEF3whggkkDkGNzgcWy2XaF4a8nZglC7uElscg==", "dev": true, + "license": "MIT", "dependencies": { "clone-response": "^1.0.2", "get-stream": "^5.1.0", @@ -715,9 +1874,8 @@ }, "node_modules/cacheable-request/node_modules/get-stream": { "version": "5.2.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", - "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", "dev": true, + "license": "MIT", "dependencies": { "pump": "^3.0.0" }, @@ -730,8 +1888,7 @@ }, "node_modules/call-bind": { "version": "1.0.7", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", - "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", + "license": "MIT", "dependencies": { "es-define-property": "^1.0.0", "es-errors": "^1.3.0", @@ -746,11 +1903,108 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001650", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001650.tgz", + "integrity": "sha512-fgEc7hP/LB7iicdXHUI9VsBsMZmUmlVJeQP2qqQW+3lkqVhbmjEU8zp+h5stWeilX+G7uXuIUIIlWlDw9jdt8g==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ] + }, + "node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/char-regex": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/char-regex/-/char-regex-1.0.2.tgz", + "integrity": "sha512-kWWXztvZ5SBQV+eRgKFeh8q5sLuZY2+8WUIzlxWVTg+oGwY14qylx1KbKzHd8P6ZYkAg0xyIDU9JMHhyJMZ1jw==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/ci-info": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "engines": { + "node": ">=8" + } + }, + "node_modules/cjs-module-lexer": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.3.1.tgz", + "integrity": "sha512-a3KdPAANPbNE4ZUv9h6LckSl9zLsYOP4MBmhIPkRaeyybt+r4UghLvq+xw/YwUcC1gqylCkL4rdVs3Lwupjm4Q==", + "dev": true + }, + "node_modules/cliui": { + "version": "8.0.1", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", + "integrity": "sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, "node_modules/clone-response": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.3.tgz", - "integrity": "sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA==", "dev": true, + "license": "MIT", "dependencies": { "mimic-response": "^1.0.0" }, @@ -758,19 +2012,57 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/co": { + "version": "4.6.0", + "resolved": "https://registry.npmjs.org/co/-/co-4.6.0.tgz", + "integrity": "sha512-QVb0dM5HvG+uaxitm8wONl7jltx8dqhfU33DcqtOZcLSVIKSDDLDi7+0LbAKiyI8hD9u42m2YxXSkMGWThaecQ==", + "dev": true, + "engines": { + "iojs": ">= 1.0.0", + "node": ">= 0.12.0" + } + }, + "node_modules/collect-v8-coverage": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz", + "integrity": "sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q==", + "dev": true + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, "node_modules/commander": { "version": "7.2.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", - "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", "dev": true, + "license": "MIT", "engines": { "node": ">= 10" } }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==", + "dev": true + }, "node_modules/content-disposition": { "version": "0.5.4", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", - "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", + "license": "MIT", "dependencies": { "safe-buffer": "5.2.1" }, @@ -780,30 +2072,53 @@ }, "node_modules/content-type": { "version": "1.0.5", - "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", - "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==", + "license": "MIT", "engines": { "node": ">= 0.6" } }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true + }, "node_modules/cookie": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", - "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", + "version": "0.6.0", + "license": "MIT", "engines": { "node": ">= 0.6" } }, "node_modules/cookie-signature": { "version": "1.0.6", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", - "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" + "license": "MIT" + }, + "node_modules/create-jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/create-jest/-/create-jest-29.7.0.tgz", + "integrity": "sha512-Adz2bdH0Vq3F53KEMJOoftQFutWCukm6J24wbPWRO4k1kMY7gS7ds/uoJkNuV8wDCtWWnuwGcJwpWcih+zEW1Q==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "exit": "^0.1.2", + "graceful-fs": "^4.2.9", + "jest-config": "^29.7.0", + "jest-util": "^29.7.0", + "prompts": "^2.0.1" + }, + "bin": { + "create-jest": "bin/create-jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } }, "node_modules/cross-spawn": { "version": "5.1.0", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", - "integrity": "sha512-pTgQJ5KC0d2hcY8eyL1IzlBPYjTkyH72XRZPnLyKus2mBfNjQs3klqbJU2VILqZryAZUt9JOb3h/mWMy23/f5A==", "dev": true, + "license": "MIT", "dependencies": { "lru-cache": "^4.0.1", "shebang-command": "^1.2.0", @@ -812,25 +2127,22 @@ }, "node_modules/data-uri-to-buffer": { "version": "4.0.1", - "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", - "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==", + "license": "MIT", "engines": { "node": ">= 12" } }, "node_modules/debug": { "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "license": "MIT", "dependencies": { "ms": "2.0.0" } }, "node_modules/decompress-response": { "version": "6.0.0", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", - "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", "dev": true, + "license": "MIT", "dependencies": { "mimic-response": "^3.1.0" }, @@ -843,9 +2155,8 @@ }, "node_modules/decompress-response/node_modules/mimic-response": { "version": "3.1.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", - "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, @@ -853,19 +2164,45 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/dedent": { + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/dedent/-/dedent-1.5.3.tgz", + "integrity": "sha512-NHQtfOOW68WD8lgypbLA5oT+Bt0xXJhiYvoR6SmmNXZfpzOGXwdKWmcwG8N7PwVVWV3eF/68nmD9BaJSsTBhyQ==", + "dev": true, + "peerDependencies": { + "babel-plugin-macros": "^3.1.0" + }, + "peerDependenciesMeta": { + "babel-plugin-macros": { + "optional": true + } + } + }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==" + }, + "node_modules/deepmerge": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.3.1.tgz", + "integrity": "sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/defer-to-connect": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", - "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" } }, "node_modules/define-data-property": { "version": "1.1.4", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", - "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "license": "MIT", "dependencies": { "es-define-property": "^1.0.0", "es-errors": "^1.3.0", @@ -880,47 +2217,107 @@ }, "node_modules/depd": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==", + "license": "MIT", "engines": { "node": ">= 0.8" } }, "node_modules/destroy": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", - "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==", + "license": "MIT", "engines": { "node": ">= 0.8", "npm": "1.2.8000 || >= 1.4.16" } }, + "node_modules/detect-newline": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", + "integrity": "sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/diff-sequences": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/diff-sequences/-/diff-sequences-29.6.3.tgz", + "integrity": "sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==", + "dev": true, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, "node_modules/ee-first": { "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" + "license": "MIT" + }, + "node_modules/ejs": { + "version": "3.1.10", + "resolved": "https://registry.npmjs.org/ejs/-/ejs-3.1.10.tgz", + "integrity": "sha512-UeJmFfOrAQS8OJWPZ4qtgHyWExa088/MtK5UEyoJGFH67cDEXkZSviOiKRCZ4Xij0zxI3JECgYs3oKx+AizQBA==", + "dev": true, + "dependencies": { + "jake": "^10.8.5" + }, + "bin": { + "ejs": "bin/cli.js" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.5.tgz", + "integrity": "sha512-QR7/A7ZkMS8tZuoftC/jfqNkZLQO779SSW3YuZHP4eXpj3EffGLFcB/Xu9AAZQzLccTiCV+EmUo3ha4mQ9wnlA==", + "dev": true + }, + "node_modules/emittery": { + "version": "0.13.1", + "resolved": "https://registry.npmjs.org/emittery/-/emittery-0.13.1.tgz", + "integrity": "sha512-DeWwawk6r5yR9jFgnDKYt4sLS0LmHJJi3ZOnb5/JdbYwj3nW+FxQnHIjhBKz8YLC7oRNPVM9NQ47I3CVx34eqQ==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sindresorhus/emittery?sponsor=1" + } + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true }, "node_modules/encodeurl": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==", + "license": "MIT", "engines": { "node": ">= 0.8" } }, "node_modules/end-of-stream": { "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", "dev": true, + "license": "MIT", "dependencies": { "once": "^1.4.0" } }, + "node_modules/error-ex": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/error-ex/-/error-ex-1.3.2.tgz", + "integrity": "sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==", + "dev": true, + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, "node_modules/es-define-property": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", - "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", + "license": "MIT", "dependencies": { "get-intrinsic": "^1.2.4" }, @@ -930,27 +2327,32 @@ }, "node_modules/es-errors": { "version": "1.3.0", - "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", - "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", "engines": { "node": ">= 0.4" } }, "node_modules/esbuild-plugin-resolve": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/esbuild-plugin-resolve/-/esbuild-plugin-resolve-2.0.0.tgz", - "integrity": "sha512-eJy9B8yDW5X/J48eWtR1uVmv+DKfHvYYnrrcqQoe/nUkVHVOTZlJnSevkYyGOz6hI90t036Y5QIPDrGzmppxfg==" + "license": "MIT" + }, + "node_modules/escalade": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", + "integrity": "sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==", + "dev": true, + "engines": { + "node": ">=6" + } }, "node_modules/escape-html": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" + "license": "MIT" }, "node_modules/escape-string-regexp": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", - "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", "dev": true, + "license": "MIT", "engines": { "node": ">=12" }, @@ -958,19 +2360,75 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/escodegen": { + "version": "1.14.3", + "resolved": "https://registry.npmjs.org/escodegen/-/escodegen-1.14.3.tgz", + "integrity": "sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw==", + "dependencies": { + "esprima": "^4.0.1", + "estraverse": "^4.2.0", + "esutils": "^2.0.2", + "optionator": "^0.8.1" + }, + "bin": { + "escodegen": "bin/escodegen.js", + "esgenerate": "bin/esgenerate.js" + }, + "engines": { + "node": ">=4.0" + }, + "optionalDependencies": { + "source-map": "~0.6.1" + } + }, + "node_modules/escodegen/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/estraverse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.3.0.tgz", + "integrity": "sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/etag": { "version": "1.8.1", - "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", - "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==", + "license": "MIT", "engines": { "node": ">= 0.6" } }, "node_modules/execa": { "version": "0.7.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz", - "integrity": "sha512-RztN09XglpYI7aBBrJCPW95jEH7YF1UEPOoX9yDhUTPdp7mK+CQvnLTuD10BNXZ3byLTu2uehZ8EcKT/4CGiFw==", "dev": true, + "license": "MIT", "dependencies": { "cross-spawn": "^5.0.1", "get-stream": "^3.0.0", @@ -986,9 +2444,8 @@ }, "node_modules/executable": { "version": "4.1.1", - "resolved": "https://registry.npmjs.org/executable/-/executable-4.1.1.tgz", - "integrity": "sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==", "dev": true, + "license": "MIT", "dependencies": { "pify": "^2.2.0" }, @@ -996,17 +2453,41 @@ "node": ">=4" } }, + "node_modules/exit": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/exit/-/exit-0.1.2.tgz", + "integrity": "sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ==", + "dev": true, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/expect": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/expect/-/expect-29.7.0.tgz", + "integrity": "sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==", + "dev": true, + "dependencies": { + "@jest/expect-utils": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, "node_modules/express": { - "version": "4.18.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz", - "integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==", + "version": "4.19.2", + "license": "MIT", "dependencies": { "accepts": "~1.3.8", "array-flatten": "1.1.1", - "body-parser": "1.20.1", + "body-parser": "1.20.2", "content-disposition": "0.5.4", "content-type": "~1.0.4", - "cookie": "0.5.0", + "cookie": "0.6.0", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", @@ -1039,9 +2520,8 @@ }, "node_modules/ext-list": { "version": "2.2.2", - "resolved": "https://registry.npmjs.org/ext-list/-/ext-list-2.2.2.tgz", - "integrity": "sha512-u+SQgsubraE6zItfVA0tBuCBhfU9ogSRnsvygI7wht9TS510oLkBRXBsqopeUG/GBOIQyKZO9wjTqIu/sf5zFA==", "dev": true, + "license": "MIT", "dependencies": { "mime-db": "^1.28.0" }, @@ -1051,9 +2531,8 @@ }, "node_modules/ext-name": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ext-name/-/ext-name-5.0.0.tgz", - "integrity": "sha512-yblEwXAbGv1VQDmow7s38W77hzAgJAO50ztBLMcUyUBfxv1HC+LGwtiEN+Co6LtlqT/5uwVOxsD4TNIilWhwdQ==", "dev": true, + "license": "MIT", "dependencies": { "ext-list": "^2.0.0", "sort-keys-length": "^1.0.0" @@ -1064,9 +2543,8 @@ }, "node_modules/fast-glob": { "version": "3.3.2", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", - "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", "dev": true, + "license": "MIT", "dependencies": { "@nodelib/fs.stat": "^2.0.2", "@nodelib/fs.walk": "^1.2.3", @@ -1078,19 +2556,36 @@ "node": ">=8.6.0" } }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true + }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==" + }, "node_modules/fastq": { "version": "1.17.1", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", - "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", "dev": true, + "license": "ISC", "dependencies": { "reusify": "^1.0.4" } }, + "node_modules/fb-watchman": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", + "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", + "dev": true, + "dependencies": { + "bser": "2.1.1" + } + }, "node_modules/fetch-blob": { "version": "3.2.0", - "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", - "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", "funding": [ { "type": "github", @@ -1101,6 +2596,7 @@ "url": "https://paypal.me/jimmywarting" } ], + "license": "MIT", "dependencies": { "node-domexception": "^1.0.0", "web-streams-polyfill": "^3.0.3" @@ -1111,9 +2607,8 @@ }, "node_modules/file-type": { "version": "17.1.6", - "resolved": "https://registry.npmjs.org/file-type/-/file-type-17.1.6.tgz", - "integrity": "sha512-hlDw5Ev+9e883s0pwUsuuYNu4tD7GgpUnOvykjv1Gya0ZIjuKumthDRua90VUn6/nlRKAjcxLUnHNTIUWwWIiw==", "dev": true, + "license": "MIT", "dependencies": { "readable-web-to-node-stream": "^3.0.2", "strtok3": "^7.0.0-alpha.9", @@ -1128,8 +2623,7 @@ }, "node_modules/filebrowser": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/filebrowser/-/filebrowser-1.0.0.tgz", - "integrity": "sha512-RRONYpCDzbmWPhBX43T4dE+ptqLznJ7lKfbMaZLChB2i2ZIdFXoqT9qZTi70Dpq6fnJHuvcdeiRqMIPZKhVgTQ==", + "license": "ISC", "dependencies": { "commander": "^2.9.0", "content-disposition": "^0.5.1", @@ -1138,14 +2632,33 @@ }, "node_modules/filebrowser/node_modules/commander": { "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + "license": "MIT" + }, + "node_modules/filelist": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/filelist/-/filelist-1.0.4.tgz", + "integrity": "sha512-w1cEuf3S+DrLCQL7ET6kz+gmlJdbq9J7yXCSjK/OZCPA+qEN1WyF4ZAf0YYJa4/shHJra2t/d/r8SV4Ji+x+8Q==", + "dev": true, + "dependencies": { + "minimatch": "^5.0.1" + } + }, + "node_modules/filelist/node_modules/minimatch": { + "version": "5.1.6", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", + "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } }, "node_modules/filename-reserved-regex": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/filename-reserved-regex/-/filename-reserved-regex-3.0.0.tgz", - "integrity": "sha512-hn4cQfU6GOT/7cFHXBqeBg2TbrMBgdD0kcjLhvSQYYwm3s4B6cjvBfb7nBALJLAXqmU5xajSa7X2NnUud/VCdw==", "dev": true, + "license": "MIT", "engines": { "node": "^12.20.0 || ^14.13.1 || >=16.0.0" }, @@ -1155,9 +2668,8 @@ }, "node_modules/filenamify": { "version": "5.1.1", - "resolved": "https://registry.npmjs.org/filenamify/-/filenamify-5.1.1.tgz", - "integrity": "sha512-M45CbrJLGACfrPOkrTp3j2EcO9OBkKUYME0eiqOCa7i2poaklU0jhlIaMlr8ijLorT0uLAzrn3qXOp5684CkfA==", "dev": true, + "license": "MIT", "dependencies": { "filename-reserved-regex": "^3.0.0", "strip-outer": "^2.0.0", @@ -1171,10 +2683,9 @@ } }, "node_modules/fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", + "version": "7.1.1", "dev": true, + "license": "MIT", "dependencies": { "to-regex-range": "^5.0.1" }, @@ -1184,8 +2695,7 @@ }, "node_modules/finalhandler": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", - "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==", + "license": "MIT", "dependencies": { "debug": "2.6.9", "encodeurl": "~1.0.2", @@ -1199,11 +2709,23 @@ "node": ">= 0.8" } }, + "node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dev": true, + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, "node_modules/find-versions": { "version": "5.1.0", - "resolved": "https://registry.npmjs.org/find-versions/-/find-versions-5.1.0.tgz", - "integrity": "sha512-+iwzCJ7C5v5KgcBuueqVoNiHVoQpwiUK5XFLjf0affFTep+Wcw93tPvmb8tqujDNmzhBDPddnWV/qgWSXgq+Hg==", "dev": true, + "license": "MIT", "dependencies": { "semver-regex": "^4.0.5" }, @@ -1216,8 +2738,7 @@ }, "node_modules/formdata-polyfill": { "version": "4.0.10", - "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", - "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", + "license": "MIT", "dependencies": { "fetch-blob": "^3.1.2" }, @@ -1227,32 +2748,66 @@ }, "node_modules/forwarded": { "version": "0.2.0", - "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", - "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==", + "license": "MIT", "engines": { "node": ">= 0.6" } }, "node_modules/fresh": { "version": "0.5.2", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==", + "license": "MIT", "engines": { "node": ">= 0.6" } }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==", + "dev": true + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, "node_modules/function-bind": { "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "dev": true, + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, "node_modules/get-intrinsic": { "version": "1.2.4", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", - "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", + "license": "MIT", "dependencies": { "es-errors": "^1.3.0", "function-bind": "^1.1.2", @@ -1267,20 +2822,48 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "dev": true, + "engines": { + "node": ">=8.0.0" + } + }, "node_modules/get-stream": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", - "integrity": "sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dev": true, + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, "node_modules/glob-parent": { "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", "dev": true, + "license": "ISC", "dependencies": { "is-glob": "^4.0.1" }, @@ -1288,10 +2871,40 @@ "node": ">= 6" } }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, "node_modules/gopd": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", - "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", + "license": "MIT", "dependencies": { "get-intrinsic": "^1.1.3" }, @@ -1301,9 +2914,8 @@ }, "node_modules/got": { "version": "11.8.6", - "resolved": "https://registry.npmjs.org/got/-/got-11.8.6.tgz", - "integrity": "sha512-6tfZ91bOr7bOXnK7PRDCGBLa1H4U080YHNaAQ2KsMGlLEzRbk44nsZF2E1IeRc3vtJHPVbKCYgdFbaGO2ljd8g==", "dev": true, + "license": "MIT", "dependencies": { "@sindresorhus/is": "^4.0.0", "@szmarczak/http-timer": "^4.0.5", @@ -1324,10 +2936,24 @@ "url": "https://github.com/sindresorhus/got?sponsor=1" } }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==", + "dev": true + }, + "node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/has-property-descriptors": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", - "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "license": "MIT", "dependencies": { "es-define-property": "^1.0.0" }, @@ -1336,9 +2962,8 @@ } }, "node_modules/has-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", - "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==", + "version": "1.0.3", + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -1348,8 +2973,7 @@ }, "node_modules/has-symbols": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==", + "license": "MIT", "engines": { "node": ">= 0.4" }, @@ -1358,9 +2982,8 @@ } }, "node_modules/hasown": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.1.tgz", - "integrity": "sha512-1/th4MHjnwncwXsIW6QMzlvYL9kG5e/CpVvLRZe4XPa8TOUNbCELqmvhDmnkNsAjwaG4+I8gJJL0JBvTTLO9qA==", + "version": "2.0.2", + "license": "MIT", "dependencies": { "function-bind": "^1.1.2" }, @@ -1368,16 +2991,20 @@ "node": ">= 0.4" } }, + "node_modules/html-escaper": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz", + "integrity": "sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg==", + "dev": true + }, "node_modules/http-cache-semantics": { "version": "4.1.1", - "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", - "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==", - "dev": true + "dev": true, + "license": "BSD-2-Clause" }, "node_modules/http-errors": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", - "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", + "license": "MIT", "dependencies": { "depd": "2.0.0", "inherits": "2.0.4", @@ -1391,9 +3018,8 @@ }, "node_modules/http2-wrapper": { "version": "1.0.3", - "resolved": "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-1.0.3.tgz", - "integrity": "sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg==", "dev": true, + "license": "MIT", "dependencies": { "quick-lru": "^5.1.1", "resolve-alpn": "^1.0.0" @@ -1404,17 +3030,15 @@ }, "node_modules/human-signals": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", - "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=10.17.0" } }, "node_modules/iconv-lite": { "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", + "license": "MIT", "dependencies": { "safer-buffer": ">= 2.1.2 < 3" }, @@ -1424,8 +3048,6 @@ }, "node_modules/ieee754": { "version": "1.2.1", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", "dev": true, "funding": [ { @@ -1440,35 +3062,110 @@ "type": "consulting", "url": "https://feross.org/support" } - ] + ], + "license": "BSD-3-Clause" + }, + "node_modules/import-local": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.2.0.tgz", + "integrity": "sha512-2SPlun1JUPWoM6t3F0dw0FkCF/jWY8kttcY4f599GLTSjh2OCuuhdTkJQsEcZzBqbXZGKMK2OqW1oZsjtf/gQA==", + "dev": true, + "dependencies": { + "pkg-dir": "^4.2.0", + "resolve-cwd": "^3.0.0" + }, + "bin": { + "import-local-fixture": "fixtures/cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dev": true, + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } }, "node_modules/inherits": { "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + "license": "ISC" }, "node_modules/ipaddr.js": { "version": "1.9.1", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", - "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==", + "license": "MIT", "engines": { "node": ">= 0.10" } }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "resolved": "https://registry.npmjs.org/is-arrayish/-/is-arrayish-0.2.1.tgz", + "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", + "dev": true + }, + "node_modules/is-core-module": { + "version": "2.15.0", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.0.tgz", + "integrity": "sha512-Dd+Lb2/zvk9SKy1TGCt1wFJFo/MWBPMX5x7KcvLajWTGuomczdQX61PvY5yK6SVACwpoexWo81IfFyoKY2QnTA==", + "dev": true, + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-extglob": { "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-generator-fn": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-generator-fn/-/is-generator-fn-2.1.0.tgz", + "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, "node_modules/is-glob": { "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", "dev": true, + "license": "MIT", "dependencies": { "is-extglob": "^2.1.1" }, @@ -1478,41 +3175,36 @@ }, "node_modules/is-number": { "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.12.0" } }, "node_modules/is-plain-obj": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", - "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/is-stream": { "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/isexe": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/isomorphic-fetch": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/isomorphic-fetch/-/isomorphic-fetch-3.0.0.tgz", - "integrity": "sha512-qvUtwJ3j6qwsF3jLxkZ72qCgjMysPzDfeV240JHiGZsANBYd+EEuu35v7dfrJ9Up0Ak07D7GGSkGhCHTqg/5wA==", + "license": "MIT", "dependencies": { "node-fetch": "^2.6.1", "whatwg-fetch": "^3.4.1" @@ -1520,8 +3212,7 @@ }, "node_modules/isomorphic-fetch/node_modules/node-fetch": { "version": "2.7.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", - "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", + "license": "MIT", "dependencies": { "whatwg-url": "^5.0.0" }, @@ -1537,83 +3228,1043 @@ } } }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-instrument": { + "version": "6.0.3", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-6.0.3.tgz", + "integrity": "sha512-Vtgk7L/R2JHyyGW07spoFlB8/lpjiOLTjMdms6AFMraYt3BaJauod/NGrfnVG/y4Ix1JEuMRPDPEj2ua+zz1/Q==", + "dev": true, + "dependencies": { + "@babel/core": "^7.23.9", + "@babel/parser": "^7.23.9", + "@istanbuljs/schema": "^0.1.3", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz", + "integrity": "sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw==", + "dev": true, + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-source-maps": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.1.tgz", + "integrity": "sha512-n3s8EwkdFIJCG3BPKBYvskgXGoy88ARzvegkitk60NxRdwltLOTaH7CUiMRXvwYorl0Q712iEjcWB+fK/MrWVw==", + "dev": true, + "dependencies": { + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0", + "source-map": "^0.6.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-source-maps/node_modules/debug": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", + "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/istanbul-lib-source-maps/node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/istanbul-lib-source-maps/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/istanbul-reports": { + "version": "3.1.7", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.1.7.tgz", + "integrity": "sha512-BewmUXImeuRk2YY0PVbxgKAysvhRPUQE0h5QRM++nVWyubKGV0l8qQ5op8+B2DOmwSe63Jivj0BjkPQVf8fP5g==", + "dev": true, + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jake": { + "version": "10.9.2", + "resolved": "https://registry.npmjs.org/jake/-/jake-10.9.2.tgz", + "integrity": "sha512-2P4SQ0HrLQ+fw6llpLnOaGAvN2Zu6778SJMrCUwns4fOoG9ayrTiZk3VV8sCPkVZF8ab0zksVpS8FDY5pRCNBA==", + "dev": true, + "dependencies": { + "async": "^3.2.3", + "chalk": "^4.0.2", + "filelist": "^1.0.4", + "minimatch": "^3.1.2" + }, + "bin": { + "jake": "bin/cli.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/jake/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/jake/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest/-/jest-29.7.0.tgz", + "integrity": "sha512-NIy3oAFp9shda19hy4HK0HRTWKtPJmGdnvywu01nOqNC2vZg+Z+fvJDxpMQA88eb2I9EcafcdjYgsDthnYTvGw==", + "dev": true, + "dependencies": { + "@jest/core": "^29.7.0", + "@jest/types": "^29.6.3", + "import-local": "^3.0.2", + "jest-cli": "^29.7.0" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-changed-files": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-changed-files/-/jest-changed-files-29.7.0.tgz", + "integrity": "sha512-fEArFiwf1BpQ+4bXSprcDc3/x4HSzL4al2tozwVpDFpsxALjLYdyiIK4e5Vz66GQJIbXJ82+35PtysofptNX2w==", + "dev": true, + "dependencies": { + "execa": "^5.0.0", + "jest-util": "^29.7.0", + "p-limit": "^3.1.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-changed-files/node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/jest-changed-files/node_modules/execa": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", + "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/jest-changed-files/node_modules/get-stream": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", + "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-changed-files/node_modules/is-stream": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", + "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-changed-files/node_modules/npm-run-path": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", + "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", + "dev": true, + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-changed-files/node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-changed-files/node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-changed-files/node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-changed-files/node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/jest-circus": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-circus/-/jest-circus-29.7.0.tgz", + "integrity": "sha512-3E1nCMgipcTkCocFwM90XXQab9bS+GMsjdpmPrlelaxwD93Ad8iVEjX/vvHPdLPnFf+L40u+5+iutRdA1N9myw==", + "dev": true, + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/expect": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "co": "^4.6.0", + "dedent": "^1.0.0", + "is-generator-fn": "^2.0.0", + "jest-each": "^29.7.0", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "p-limit": "^3.1.0", + "pretty-format": "^29.7.0", + "pure-rand": "^6.0.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-cli": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-cli/-/jest-cli-29.7.0.tgz", + "integrity": "sha512-OVVobw2IubN/GSYsxETi+gOe7Ka59EFMR/twOU3Jb2GnKKeMGJB5SGUUrEz3SFVmJASUdZUzy83sLNNQ2gZslg==", + "dev": true, + "dependencies": { + "@jest/core": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "create-jest": "^29.7.0", + "exit": "^0.1.2", + "import-local": "^3.0.2", + "jest-config": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "yargs": "^17.3.1" + }, + "bin": { + "jest": "bin/jest.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "node-notifier": "^8.0.1 || ^9.0.0 || ^10.0.0" + }, + "peerDependenciesMeta": { + "node-notifier": { + "optional": true + } + } + }, + "node_modules/jest-config": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-config/-/jest-config-29.7.0.tgz", + "integrity": "sha512-uXbpfeQ7R6TZBqI3/TxCU4q4ttk3u0PJeC+E0zbfSoSjq6bJ7buBPxzQPL0ifrkY4DNu4JUdk0ImlBUYi840eQ==", + "dev": true, + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/test-sequencer": "^29.7.0", + "@jest/types": "^29.6.3", + "babel-jest": "^29.7.0", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "deepmerge": "^4.2.2", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-circus": "^29.7.0", + "jest-environment-node": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-runner": "^29.7.0", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "micromatch": "^4.0.4", + "parse-json": "^5.2.0", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "strip-json-comments": "^3.1.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@types/node": "*", + "ts-node": ">=9.0.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "ts-node": { + "optional": true + } + } + }, + "node_modules/jest-diff": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-diff/-/jest-diff-29.7.0.tgz", + "integrity": "sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "diff-sequences": "^29.6.3", + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-docblock": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-docblock/-/jest-docblock-29.7.0.tgz", + "integrity": "sha512-q617Auw3A612guyaFgsbFeYpNP5t2aoUNLwBUbc/0kD1R4t9ixDbyFTHd1nok4epoVFpr7PmeWHrhvuV3XaJ4g==", + "dev": true, + "dependencies": { + "detect-newline": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-each": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-each/-/jest-each-29.7.0.tgz", + "integrity": "sha512-gns+Er14+ZrEoC5fhOfYCY1LOHHr0TI+rQUHZS8Ttw2l7gl+80eHc/gFf2Ktkw0+SIACDTeWvpFcv3B04VembQ==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.3", + "chalk": "^4.0.0", + "jest-get-type": "^29.6.3", + "jest-util": "^29.7.0", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-environment-node": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-environment-node/-/jest-environment-node-29.7.0.tgz", + "integrity": "sha512-DOSwCRqXirTOyheM+4d5YZOrWcdu0LNZ87ewUoywbcb2XR4wKgqiG8vNeYwhjFMbEkfju7wx2GYH0P2gevGvFw==", + "dev": true, + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/fake-timers": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-mock": "^29.7.0", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-get-type": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/jest-get-type/-/jest-get-type-29.6.3.tgz", + "integrity": "sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==", + "dev": true, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-haste-map": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.7.0.tgz", + "integrity": "sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.3", + "@types/graceful-fs": "^4.1.3", + "@types/node": "*", + "anymatch": "^3.0.3", + "fb-watchman": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "jest-worker": "^29.7.0", + "micromatch": "^4.0.4", + "walker": "^1.0.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.2" + } + }, + "node_modules/jest-leak-detector": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-leak-detector/-/jest-leak-detector-29.7.0.tgz", + "integrity": "sha512-kYA8IJcSYtST2BY9I+SMC32nDpBT3J2NvWJx8+JCuCdl/CR1I4EKUJROiP8XtCcxqgTTBGJNdbB1A8XRKbTetw==", + "dev": true, + "dependencies": { + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-matcher-utils": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz", + "integrity": "sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "jest-diff": "^29.7.0", + "jest-get-type": "^29.6.3", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-message-util": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-message-util/-/jest-message-util-29.7.0.tgz", + "integrity": "sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.12.13", + "@jest/types": "^29.6.3", + "@types/stack-utils": "^2.0.0", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "micromatch": "^4.0.4", + "pretty-format": "^29.7.0", + "slash": "^3.0.0", + "stack-utils": "^2.0.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-mock": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-mock/-/jest-mock-29.7.0.tgz", + "integrity": "sha512-ITOMZn+UkYS4ZFh83xYAOzWStloNzJFO2s8DWrE4lhtGD+AorgnbkiKERe4wQVBydIGPx059g6riW5Btp6Llnw==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "jest-util": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-pnp-resolver": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/jest-pnp-resolver/-/jest-pnp-resolver-1.2.3.tgz", + "integrity": "sha512-+3NpwQEnRoIBtx4fyhblQDPgJI0H1IEIkX7ShLUjPGA7TtUTvI1oiKi3SR4oBR0hQhQR80l4WAe5RrXBwWMA8w==", + "dev": true, + "engines": { + "node": ">=6" + }, + "peerDependencies": { + "jest-resolve": "*" + }, + "peerDependenciesMeta": { + "jest-resolve": { + "optional": true + } + } + }, + "node_modules/jest-regex-util": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.6.3.tgz", + "integrity": "sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==", + "dev": true, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-resolve/-/jest-resolve-29.7.0.tgz", + "integrity": "sha512-IOVhZSrg+UvVAshDSDtHyFCCBUl/Q3AAJv8iZ6ZjnZ74xzvwuzLXid9IIIPgTnY62SJjfuupMKZsZQRsCvxEgA==", + "dev": true, + "dependencies": { + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-pnp-resolver": "^1.2.2", + "jest-util": "^29.7.0", + "jest-validate": "^29.7.0", + "resolve": "^1.20.0", + "resolve.exports": "^2.0.0", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-resolve-dependencies": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-resolve-dependencies/-/jest-resolve-dependencies-29.7.0.tgz", + "integrity": "sha512-un0zD/6qxJ+S0et7WxeI3H5XSe9lTBBR7bOHCHXkKR6luG5mwDDlIzVQ0V5cZCuoTgEdcdwzTghYkTWfubi+nA==", + "dev": true, + "dependencies": { + "jest-regex-util": "^29.6.3", + "jest-snapshot": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runner": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-runner/-/jest-runner-29.7.0.tgz", + "integrity": "sha512-fsc4N6cPCAahybGBfTRcq5wFR6fpLznMg47sY5aDpsoejOcVYFb07AHuSnR0liMcPTgBsA3ZJL6kFOjPdoNipQ==", + "dev": true, + "dependencies": { + "@jest/console": "^29.7.0", + "@jest/environment": "^29.7.0", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "graceful-fs": "^4.2.9", + "jest-docblock": "^29.7.0", + "jest-environment-node": "^29.7.0", + "jest-haste-map": "^29.7.0", + "jest-leak-detector": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-resolve": "^29.7.0", + "jest-runtime": "^29.7.0", + "jest-util": "^29.7.0", + "jest-watcher": "^29.7.0", + "jest-worker": "^29.7.0", + "p-limit": "^3.1.0", + "source-map-support": "0.5.13" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-runtime": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-runtime/-/jest-runtime-29.7.0.tgz", + "integrity": "sha512-gUnLjgwdGqW7B4LvOIkbKs9WGbn+QLqRQQ9juC6HndeDiezIwhDP+mhMwHWCEcfQ5RUXa6OPnFF8BJh5xegwwQ==", + "dev": true, + "dependencies": { + "@jest/environment": "^29.7.0", + "@jest/fake-timers": "^29.7.0", + "@jest/globals": "^29.7.0", + "@jest/source-map": "^29.6.3", + "@jest/test-result": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "cjs-module-lexer": "^1.0.0", + "collect-v8-coverage": "^1.0.0", + "glob": "^7.1.3", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-mock": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-resolve": "^29.7.0", + "jest-snapshot": "^29.7.0", + "jest-util": "^29.7.0", + "slash": "^3.0.0", + "strip-bom": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-snapshot": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-snapshot/-/jest-snapshot-29.7.0.tgz", + "integrity": "sha512-Rm0BMWtxBcioHr1/OX5YCP8Uov4riHvKPknOGs804Zg9JGZgmIBkbtlxJC/7Z4msKYVbIJtfU+tKb8xlYNfdkw==", + "dev": true, + "dependencies": { + "@babel/core": "^7.11.6", + "@babel/generator": "^7.7.2", + "@babel/plugin-syntax-jsx": "^7.7.2", + "@babel/plugin-syntax-typescript": "^7.7.2", + "@babel/types": "^7.3.3", + "@jest/expect-utils": "^29.7.0", + "@jest/transform": "^29.7.0", + "@jest/types": "^29.6.3", + "babel-preset-current-node-syntax": "^1.0.0", + "chalk": "^4.0.0", + "expect": "^29.7.0", + "graceful-fs": "^4.2.9", + "jest-diff": "^29.7.0", + "jest-get-type": "^29.6.3", + "jest-matcher-utils": "^29.7.0", + "jest-message-util": "^29.7.0", + "jest-util": "^29.7.0", + "natural-compare": "^1.4.0", + "pretty-format": "^29.7.0", + "semver": "^7.5.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-util": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz", + "integrity": "sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-validate/-/jest-validate-29.7.0.tgz", + "integrity": "sha512-ZB7wHqaRGVw/9hST/OuFUReG7M8vKeq0/J2egIGLdvjHCmYqGARhzXmtgi+gVeZ5uXFF219aOc3Ls2yLg27tkw==", + "dev": true, + "dependencies": { + "@jest/types": "^29.6.3", + "camelcase": "^6.2.0", + "chalk": "^4.0.0", + "jest-get-type": "^29.6.3", + "leven": "^3.1.0", + "pretty-format": "^29.7.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-validate/node_modules/camelcase": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", + "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/jest-watcher": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-watcher/-/jest-watcher-29.7.0.tgz", + "integrity": "sha512-49Fg7WXkU3Vl2h6LbLtMQ/HyB6rXSIX7SqvBLQmssRBGN9I0PNvPmAmCWSOY6SOvrjhI/F7/bGAv9RtnsPA03g==", + "dev": true, + "dependencies": { + "@jest/test-result": "^29.7.0", + "@jest/types": "^29.6.3", + "@types/node": "*", + "ansi-escapes": "^4.2.1", + "chalk": "^4.0.0", + "emittery": "^0.13.1", + "jest-util": "^29.7.0", + "string-length": "^4.0.1" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz", + "integrity": "sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==", + "dev": true, + "dependencies": { + "@types/node": "*", + "jest-util": "^29.7.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-worker/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "dev": true + }, + "node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "dev": true, + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=4" + } + }, "node_modules/json-buffer": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", - "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", "dev": true }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsonpath": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/jsonpath/-/jsonpath-1.1.1.tgz", + "integrity": "sha512-l6Cg7jRpixfbgoWgkrl77dgEj8RPvND0wMH6TwQmi9Qs4TFfS9u5cUFnbeKTwj5ga5Y3BTGGNI28k117LJ009w==", + "dependencies": { + "esprima": "1.2.2", + "static-eval": "2.0.2", + "underscore": "1.12.1" + } + }, + "node_modules/jsonpath/node_modules/esprima": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-1.2.2.tgz", + "integrity": "sha512-+JpPZam9w5DuJ3Q67SqsMGtiHKENSMRVoxvArfJZK01/BfLEObtZ6orJa/MtoGNR/rfMgp5837T41PAmTwAv/A==", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/keyv": { "version": "4.5.4", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", - "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", "dev": true, + "license": "MIT", "dependencies": { "json-buffer": "3.0.1" } }, + "node_modules/kleur": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz", + "integrity": "sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/leven": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/leven/-/leven-3.1.0.tgz", + "integrity": "sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/levn": { + "version": "0.3.0", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz", + "integrity": "sha512-0OO4y2iOHix2W6ujICbKIaEQXvFQHue65vUG3pb5EUomzPI90z9hsA1VsO/dbIIpC53J8gxM9Q4Oho0jrCM/yA==", + "dependencies": { + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true + }, + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dev": true, + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/lodash.memoize": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/lodash.memoize/-/lodash.memoize-4.1.2.tgz", + "integrity": "sha512-t7j+NzmgnQzTAYXcsHYLgimltOV1MXHtlOWf6GjL9Kj8GK5FInw5JotxvbOs+IvV1/Dzo04/fCGfLVs7aXb4Ag==", + "dev": true + }, + "node_modules/lodash.merge": { + "version": "4.6.2", + "license": "MIT" + }, "node_modules/lowercase-keys": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", - "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/lru-cache": { "version": "4.1.5", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", - "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", "dev": true, + "license": "ISC", "dependencies": { "pseudomap": "^1.0.2", "yallist": "^2.1.2" } }, + "node_modules/make-dir": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz", + "integrity": "sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw==", + "dev": true, + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-error": { + "version": "1.3.6", + "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz", + "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==", + "dev": true + }, + "node_modules/makeerror": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", + "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", + "dev": true, + "dependencies": { + "tmpl": "1.0.5" + } + }, "node_modules/media-typer": { "version": "0.3.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==", + "license": "MIT", "engines": { "node": ">= 0.6" } }, "node_modules/merge-descriptors": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" + "license": "MIT" }, "node_modules/merge-stream": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/merge2": { "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", "dev": true, + "license": "MIT", "engines": { "node": ">= 8" } }, "node_modules/methods": { "version": "1.1.2", - "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==", + "license": "MIT", "engines": { "node": ">= 0.6" } }, "node_modules/micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", + "version": "4.0.7", "dev": true, + "license": "MIT", "dependencies": { - "braces": "^3.0.2", + "braces": "^3.0.3", "picomatch": "^2.3.1" }, "engines": { @@ -1622,8 +4273,7 @@ }, "node_modules/mime": { "version": "1.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", - "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "license": "MIT", "bin": { "mime": "cli.js" }, @@ -1633,16 +4283,14 @@ }, "node_modules/mime-db": { "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", "engines": { "node": ">= 0.6" } }, "node_modules/mime-types": { "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", "dependencies": { "mime-db": "1.52.0" }, @@ -1652,27 +4300,24 @@ }, "node_modules/mimic-fn": { "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } }, "node_modules/mimic-response": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", - "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } }, "node_modules/minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", + "version": "9.0.4", "dev": true, + "license": "ISC", "dependencies": { "brace-expansion": "^2.0.1" }, @@ -1685,21 +4330,23 @@ }, "node_modules/ms": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" + "license": "MIT" + }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true }, "node_modules/negotiator": { "version": "0.6.3", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", - "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==", + "license": "MIT", "engines": { "node": ">= 0.6" } }, "node_modules/node-domexception": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", - "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==", "funding": [ { "type": "github", @@ -1710,14 +4357,14 @@ "url": "https://paypal.me/jimmywarting" } ], + "license": "MIT", "engines": { "node": ">=10.5.0" } }, "node_modules/node-fetch": { "version": "3.3.2", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", - "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", + "license": "MIT", "dependencies": { "data-uri-to-buffer": "^4.0.0", "fetch-blob": "^3.1.4", @@ -1731,11 +4378,31 @@ "url": "https://opencollective.com/node-fetch" } }, + "node_modules/node-int64": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==", + "dev": true + }, + "node_modules/node-releases": { + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz", + "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==", + "dev": true + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/normalize-url": { "version": "6.1.0", - "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz", - "integrity": "sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==", "dev": true, + "license": "MIT", "engines": { "node": ">=10" }, @@ -1745,9 +4412,8 @@ }, "node_modules/npm-run-path": { "version": "2.0.2", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", - "integrity": "sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==", "dev": true, + "license": "MIT", "dependencies": { "path-key": "^2.0.0" }, @@ -1757,16 +4423,14 @@ }, "node_modules/object-inspect": { "version": "1.13.1", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", - "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==", + "license": "MIT", "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/on-finished": { "version": "2.4.1", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", - "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", + "license": "MIT", "dependencies": { "ee-first": "1.1.1" }, @@ -1776,18 +4440,16 @@ }, "node_modules/once": { "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", "dev": true, + "license": "ISC", "dependencies": { "wrappy": "1" } }, "node_modules/onetime": { "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", "dev": true, + "license": "MIT", "dependencies": { "mimic-fn": "^2.1.0" }, @@ -1798,11 +4460,26 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/optionator": { + "version": "0.8.3", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz", + "integrity": "sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA==", + "dependencies": { + "deep-is": "~0.1.3", + "fast-levenshtein": "~2.0.6", + "levn": "~0.3.0", + "prelude-ls": "~1.1.2", + "type-check": "~0.3.2", + "word-wrap": "~1.2.3" + }, + "engines": { + "node": ">= 0.8.0" + } + }, "node_modules/os-filter-obj": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/os-filter-obj/-/os-filter-obj-2.0.0.tgz", - "integrity": "sha512-uksVLsqG3pVdzzPvmAHpBK0wKxYItuzZr7SziusRPoz67tGV8rL1szZ6IdeUrbqLjGDwApBtN29eEE3IqGHOjg==", "dev": true, + "license": "MIT", "dependencies": { "arch": "^2.1.0" }, @@ -1812,49 +4489,136 @@ }, "node_modules/p-cancelable": { "version": "2.1.1", - "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-2.1.1.tgz", - "integrity": "sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/p-finally": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", - "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dev": true, + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-locate/node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dev": true, + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-5.2.0.tgz", + "integrity": "sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/parseurl": { "version": "1.3.3", - "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", - "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==", + "license": "MIT", "engines": { "node": ">= 0.8" } }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/path-key": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", "dev": true, + "license": "MIT", "engines": { "node": ">=4" } }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true + }, "node_modules/path-to-regexp": { "version": "0.1.7", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", - "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" + "license": "MIT" }, "node_modules/peek-readable": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/peek-readable/-/peek-readable-5.0.0.tgz", - "integrity": "sha512-YtCKvLUOvwtMGmrniQPdO7MwPjgkFBtFIrmfSbYmYuq3tKDV/mcfAhBth1+C3ru7uXIZasc/pHnb+YDYNkkj4A==", "dev": true, + "license": "MIT", "engines": { "node": ">=14.16" }, @@ -1863,11 +4627,16 @@ "url": "https://github.com/sponsors/Borewit" } }, + "node_modules/picocolors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", + "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", + "dev": true + }, "node_modules/picomatch": { "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", "dev": true, + "license": "MIT", "engines": { "node": ">=8.6" }, @@ -1877,18 +4646,45 @@ }, "node_modules/pify": { "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, - "node_modules/prettier": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.2.5.tgz", - "integrity": "sha512-3/GWa9aOC0YeD7LUfvOG2NiDyhOWRvt1k+rcKhOuYnMY24iiCphgneUfJDyFXd6rZCAnuLBv6UeAULtrhT/F4A==", + "node_modules/pirates": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz", + "integrity": "sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==", "dev": true, + "engines": { + "node": ">= 6" + } + }, + "node_modules/pkg-dir": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-4.2.0.tgz", + "integrity": "sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ==", + "dev": true, + "dependencies": { + "find-up": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/prelude-ls": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.1.2.tgz", + "integrity": "sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w==", + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/prettier": { + "version": "3.3.2", + "dev": true, + "license": "MIT", "bin": { "prettier": "bin/prettier.cjs" }, @@ -1899,10 +4695,48 @@ "url": "https://github.com/prettier/prettier?sponsor=1" } }, + "node_modules/pretty-format": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", + "integrity": "sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==", + "dev": true, + "dependencies": { + "@jest/schemas": "^29.6.3", + "ansi-styles": "^5.0.0", + "react-is": "^18.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/pretty-format/node_modules/ansi-styles": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", + "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/prompts": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", + "integrity": "sha512-NxNv/kLguCA7p3jE8oL2aEBsrJWgAakBpgmgK6lpPWV+WuOmY6r2/zbAVnP+T8bQlA0nzHXSJSJW0Hq7ylaD2Q==", + "dev": true, + "dependencies": { + "kleur": "^3.0.3", + "sisteransi": "^1.0.5" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/proxy-addr": { "version": "2.0.7", - "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", - "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", + "license": "MIT", "dependencies": { "forwarded": "0.2.0", "ipaddr.js": "1.9.1" @@ -1913,24 +4747,37 @@ }, "node_modules/pseudomap": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", - "integrity": "sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/pump": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", "dev": true, + "license": "MIT", "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, + "node_modules/pure-rand": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", + "integrity": "sha512-bVWawvoZoBYpp6yIoQtQXHZjmz35RSVHnUOTefl8Vcjr8snTPY1wnpSPMWekcFwbxI6gtmT7rSYPFvz71ldiOA==", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/dubzzz" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fast-check" + } + ] + }, "node_modules/qs": { "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", + "license": "BSD-3-Clause", "dependencies": { "side-channel": "^1.0.4" }, @@ -1943,120 +4790,6 @@ }, "node_modules/queue-microtask": { "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/quick-lru": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", - "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", - "dev": true, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/range-parser": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", - "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/raw-body": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", - "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", - "dependencies": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dev": true, - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/readable-web-to-node-stream": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/readable-web-to-node-stream/-/readable-web-to-node-stream-3.0.2.tgz", - "integrity": "sha512-ePeK6cc1EcKLEhJFt/AebMCLL+GgSKhuygrZ/GLaKZYEecIgIECf4UaUuaByiGtzckwR4ain9VzUh95T1exYGw==", - "dev": true, - "dependencies": { - "readable-stream": "^3.6.0" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/Borewit" - } - }, - "node_modules/resolve-alpn": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz", - "integrity": "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==", - "dev": true - }, - "node_modules/responselike": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/responselike/-/responselike-2.0.1.tgz", - "integrity": "sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw==", - "dev": true, - "dependencies": { - "lowercase-keys": "^2.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", - "dev": true, - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", "dev": true, "funding": [ { @@ -2072,14 +4805,157 @@ "url": "https://feross.org/support" } ], - "dependencies": { - "queue-microtask": "^1.2.2" + "license": "MIT" + }, + "node_modules/quick-lru": { + "version": "5.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", + "node_modules/range-parser": { + "version": "1.2.1", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "2.5.2", + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/react-is": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", + "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", + "dev": true + }, + "node_modules/readable-stream": { + "version": "3.6.2", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/readable-web-to-node-stream": { + "version": "3.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "readable-stream": "^3.6.0" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/Borewit" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", + "dev": true, + "dependencies": { + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-alpn": { + "version": "1.2.1", + "dev": true, + "license": "MIT" + }, + "node_modules/resolve-cwd": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/resolve-cwd/-/resolve-cwd-3.0.0.tgz", + "integrity": "sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg==", + "dev": true, + "dependencies": { + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve.exports": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/resolve.exports/-/resolve.exports-2.0.2.tgz", + "integrity": "sha512-X2UW6Nw3n/aMgDVy+0rSqgHlv39WZAlZrXCdnbyEiKm17DSqHX4MmQMaST3FbeWR5FTuRcUwYAziZajji0Y7mg==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/responselike": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "lowercase-keys": "^2.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/reusify": { + "version": "1.0.4", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "dev": true, "funding": [ { "type": "github", @@ -2093,21 +4969,38 @@ "type": "consulting", "url": "https://feross.org/support" } - ] + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" }, "node_modules/safer-buffer": { "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" + "license": "MIT" }, "node_modules/semver": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", - "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "version": "7.6.2", "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, + "license": "ISC", "bin": { "semver": "bin/semver.js" }, @@ -2117,9 +5010,8 @@ }, "node_modules/semver-regex": { "version": "4.0.5", - "resolved": "https://registry.npmjs.org/semver-regex/-/semver-regex-4.0.5.tgz", - "integrity": "sha512-hunMQrEy1T6Jr2uEVjrAIqjwWcQTgOAcIM52C8MY1EZSD3DDNft04XzvYKPqjED65bNVVko0YI38nYeEHCX3yw==", "dev": true, + "license": "MIT", "engines": { "node": ">=12" }, @@ -2129,9 +5021,8 @@ }, "node_modules/semver-truncate": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/semver-truncate/-/semver-truncate-3.0.0.tgz", - "integrity": "sha512-LJWA9kSvMolR51oDE6PN3kALBNaUdkxzAGcexw8gjMA8xr5zUqK0JiR3CgARSqanYF3Z1YHvsErb1KDgh+v7Rg==", "dev": true, + "license": "MIT", "dependencies": { "semver": "^7.3.5" }, @@ -2142,28 +5033,9 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/semver/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/semver/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, "node_modules/send": { "version": "0.18.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", - "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", + "license": "MIT", "dependencies": { "debug": "2.6.9", "depd": "2.0.0", @@ -2185,13 +5057,11 @@ }, "node_modules/send/node_modules/ms": { "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + "license": "MIT" }, "node_modules/serve-static": { "version": "1.15.0", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz", - "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==", + "license": "MIT", "dependencies": { "encodeurl": "~1.0.2", "escape-html": "~1.0.3", @@ -2203,16 +5073,15 @@ } }, "node_modules/set-function-length": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.1.tgz", - "integrity": "sha512-j4t6ccc+VsKwYHso+kElc5neZpjtq9EnRICFZtWyBsLojhmeF/ZBd/elqm22WJh/BziDe/SBiOeAt0m2mfLD0g==", + "version": "1.2.2", + "license": "MIT", "dependencies": { - "define-data-property": "^1.1.2", + "define-data-property": "^1.1.4", "es-errors": "^1.3.0", "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.3", + "get-intrinsic": "^1.2.4", "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.1" + "has-property-descriptors": "^1.0.2" }, "engines": { "node": ">= 0.4" @@ -2220,14 +5089,12 @@ }, "node_modules/setprototypeof": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", - "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" + "license": "ISC" }, "node_modules/shebang-command": { "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", "dev": true, + "license": "MIT", "dependencies": { "shebang-regex": "^1.0.0" }, @@ -2237,19 +5104,17 @@ }, "node_modules/shebang-regex": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/side-channel": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.5.tgz", - "integrity": "sha512-QcgiIWV4WV7qWExbN5llt6frQB/lBven9pqliLXfGPB+K9ZYXxDozp0wLkHS24kWCm+6YXH/f0HhnObZnZOBnQ==", + "version": "1.0.6", + "license": "MIT", "dependencies": { - "call-bind": "^1.0.6", + "call-bind": "^1.0.7", "es-errors": "^1.3.0", "get-intrinsic": "^1.2.4", "object-inspect": "^1.13.1" @@ -2263,24 +5128,27 @@ }, "node_modules/signal-exit": { "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", + "dev": true, + "license": "ISC" + }, + "node_modules/sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==", "dev": true }, "node_modules/slash": { "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", "dev": true, + "license": "MIT", "engines": { "node": ">=8" } }, "node_modules/sort-keys": { "version": "1.1.2", - "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-1.1.2.tgz", - "integrity": "sha512-vzn8aSqKgytVik0iwdBEi+zevbTYZogewTUM6dtpmGwEcdzbub/TX4bCzRhebDCRC3QzXgJsLRKB2V/Oof7HXg==", "dev": true, + "license": "MIT", "dependencies": { "is-plain-obj": "^1.0.0" }, @@ -2290,9 +5158,8 @@ }, "node_modules/sort-keys-length": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/sort-keys-length/-/sort-keys-length-1.0.1.tgz", - "integrity": "sha512-GRbEOUqCxemTAk/b32F2xa8wDTs+Z1QHOkbhJDQTvv/6G3ZkbJ+frYWsTcc7cBB3Fu4wy4XlLCuNtJuMn7Gsvw==", "dev": true, + "license": "MIT", "dependencies": { "sort-keys": "^1.0.0" }, @@ -2302,53 +5169,161 @@ }, "node_modules/source-map": { "version": "0.7.4", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", - "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", "dev": true, + "license": "BSD-3-Clause", "engines": { "node": ">= 8" } }, + "node_modules/source-map-support": { + "version": "0.5.13", + "resolved": "https://registry.npmjs.org/source-map-support/-/source-map-support-0.5.13.tgz", + "integrity": "sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==", + "dev": true, + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/source-map-support/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true + }, + "node_modules/stack-utils": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/stack-utils/-/stack-utils-2.0.6.tgz", + "integrity": "sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==", + "dev": true, + "dependencies": { + "escape-string-regexp": "^2.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/stack-utils/node_modules/escape-string-regexp": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz", + "integrity": "sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/static-eval": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/static-eval/-/static-eval-2.0.2.tgz", + "integrity": "sha512-N/D219Hcr2bPjLxPiV+TQE++Tsmrady7TqAJugLy7Xk1EumfDWS/f5dtBbkRCGE7wKKXuYockQoj8Rm2/pVKyg==", + "dependencies": { + "escodegen": "^1.8.1" + } + }, "node_modules/statuses": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "license": "MIT", "engines": { "node": ">= 0.8" } }, "node_modules/string_decoder": { "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", "dev": true, + "license": "MIT", "dependencies": { "safe-buffer": "~5.2.0" } }, + "node_modules/string-length": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/string-length/-/string-length-4.0.2.tgz", + "integrity": "sha512-+l6rNN5fYHNhZZy41RXsYptCjA2Igmq4EG7kZAYFQI1E1VTXarr6ZPXBg6eq7Y6eK4FEhY6AJlyuFIb/v/S0VQ==", + "dev": true, + "dependencies": { + "char-regex": "^1.0.2", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-bom": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-4.0.0.tgz", + "integrity": "sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w==", + "dev": true, + "engines": { + "node": ">=8" + } + }, "node_modules/strip-eof": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", - "integrity": "sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q==", "dev": true, + "license": "MIT", "engines": { "node": ">=0.10.0" } }, "node_modules/strip-final-newline": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", "dev": true, + "license": "MIT", "engines": { "node": ">=6" } }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/strip-outer": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-outer/-/strip-outer-2.0.0.tgz", - "integrity": "sha512-A21Xsm1XzUkK0qK1ZrytDUvqsQWict2Cykhvi0fBQntGG5JSprESasEyV1EZ/4CiR5WB5KjzLTrP/bO37B0wPg==", "dev": true, + "license": "MIT", "engines": { "node": "^12.20.0 || ^14.13.1 || >=16.0.0" }, @@ -2358,9 +5333,8 @@ }, "node_modules/strtok3": { "version": "7.0.0", - "resolved": "https://registry.npmjs.org/strtok3/-/strtok3-7.0.0.tgz", - "integrity": "sha512-pQ+V+nYQdC5H3Q7qBZAz/MO6lwGhoC2gOAjuouGf/VO0m7vQRh8QNMl2Uf6SwAtzZ9bOw3UIeBukEGNJl5dtXQ==", "dev": true, + "license": "MIT", "dependencies": { "@tokenizer/token": "^0.3.0", "peek-readable": "^5.0.0" @@ -2373,11 +5347,85 @@ "url": "https://github.com/sponsors/Borewit" } }, + "node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dev": true, + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dev": true, + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/test-exclude/node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/test-exclude/node_modules/minimatch": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", + "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "dev": true, + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/tmpl": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==", + "dev": true + }, + "node_modules/to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", + "dev": true, + "engines": { + "node": ">=4" + } + }, "node_modules/to-regex-range": { "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", "dev": true, + "license": "MIT", "dependencies": { "is-number": "^7.0.0" }, @@ -2387,17 +5435,15 @@ }, "node_modules/toidentifier": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", - "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==", + "license": "MIT", "engines": { "node": ">=0.6" } }, "node_modules/token-types": { "version": "5.0.1", - "resolved": "https://registry.npmjs.org/token-types/-/token-types-5.0.1.tgz", - "integrity": "sha512-Y2fmSnZjQdDb9W4w4r1tswlMHylzWIeOKpx0aZH9BgGtACHhrk3OkT52AzwcuqTRBZtvvnTjDBh8eynMulu8Vg==", "dev": true, + "license": "MIT", "dependencies": { "@tokenizer/token": "^0.3.0", "ieee754": "^1.2.1" @@ -2412,14 +5458,12 @@ }, "node_modules/tr46": { "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" + "license": "MIT" }, "node_modules/trim-repeated": { "version": "2.0.0", - "resolved": "https://registry.npmjs.org/trim-repeated/-/trim-repeated-2.0.0.tgz", - "integrity": "sha512-QUHBFTJGdOwmp0tbOG505xAgOp/YliZP/6UgafFXYZ26WT1bvQmSMJUvkeVSASuJJHbqsFbynTvkd5W8RBTipg==", "dev": true, + "license": "MIT", "dependencies": { "escape-string-regexp": "^5.0.0" }, @@ -2427,20 +5471,97 @@ "node": ">=12" } }, + "node_modules/ts-jest": { + "version": "29.2.4", + "resolved": "https://registry.npmjs.org/ts-jest/-/ts-jest-29.2.4.tgz", + "integrity": "sha512-3d6tgDyhCI29HlpwIq87sNuI+3Q6GLTTCeYRHCs7vDz+/3GCMwEtV9jezLyl4ZtnBgx00I7hm8PCP8cTksMGrw==", + "dev": true, + "dependencies": { + "bs-logger": "0.x", + "ejs": "^3.1.10", + "fast-json-stable-stringify": "2.x", + "jest-util": "^29.0.0", + "json5": "^2.2.3", + "lodash.memoize": "4.x", + "make-error": "1.x", + "semver": "^7.5.3", + "yargs-parser": "^21.0.1" + }, + "bin": { + "ts-jest": "cli.js" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || ^18.0.0 || >=20.0.0" + }, + "peerDependencies": { + "@babel/core": ">=7.0.0-beta.0 <8", + "@jest/transform": "^29.0.0", + "@jest/types": "^29.0.0", + "babel-jest": "^29.0.0", + "jest": "^29.0.0", + "typescript": ">=4.3 <6" + }, + "peerDependenciesMeta": { + "@babel/core": { + "optional": true + }, + "@jest/transform": { + "optional": true + }, + "@jest/types": { + "optional": true + }, + "babel-jest": { + "optional": true + }, + "esbuild": { + "optional": true + } + } + }, "node_modules/ts-matches": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/ts-matches/-/ts-matches-5.4.1.tgz", - "integrity": "sha512-kXrY75F0s0WD15N2bWKDScKlKgwnusN6dTRzGs1N7LlxQRnazrsBISC1HL4sy2adsyk65Zbx3Ui3IGN8leAFOQ==" + "version": "5.5.1", + "license": "MIT" }, "node_modules/tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" + "version": "2.6.3", + "license": "0BSD" + }, + "node_modules/type-check": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.3.2.tgz", + "integrity": "sha512-ZCmOJdvOWDBYJlzAoFkC+Q0+bUyEOS1ltgp1MGU03fqHG+dbi9tBFU2Rd9QKiDZFAYrhPh2JUf7rZRIuHRKtOg==", + "dependencies": { + "prelude-ls": "~1.1.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/type-fest": { + "version": "0.21.3", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.21.3.tgz", + "integrity": "sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } }, "node_modules/type-is": { "version": "1.6.18", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", - "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", + "license": "MIT", "dependencies": { "media-typer": "0.3.0", "mime-types": "~2.1.24" @@ -2450,10 +5571,9 @@ } }, "node_modules/typescript": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz", - "integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==", + "version": "5.4.5", "dev": true, + "license": "Apache-2.0", "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" @@ -2462,64 +5582,113 @@ "node": ">=14.17" } }, + "node_modules/underscore": { + "version": "1.12.1", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.12.1.tgz", + "integrity": "sha512-hEQt0+ZLDVUMhebKxL4x1BTtDY7bavVofhZ9KZ4aI26X9SRaE+Y3m83XUL1UP2jn8ynjndwCCpEHdUG+9pP1Tw==" + }, "node_modules/undici-types": { "version": "5.26.5", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/unpipe": { "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", - "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==", + "license": "MIT", "engines": { "node": ">= 0.8" } }, + "node_modules/update-browserslist-db": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz", + "integrity": "sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "escalade": "^3.1.2", + "picocolors": "^1.0.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, "node_modules/util-deprecate": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", - "dev": true + "dev": true, + "license": "MIT" }, "node_modules/utils-merge": { "version": "1.0.1", - "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", - "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==", + "license": "MIT", "engines": { "node": ">= 0.4.0" } }, + "node_modules/v8-to-istanbul": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz", + "integrity": "sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA==", + "dev": true, + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.12", + "@types/istanbul-lib-coverage": "^2.0.1", + "convert-source-map": "^2.0.0" + }, + "engines": { + "node": ">=10.12.0" + } + }, "node_modules/vary": { "version": "1.1.2", - "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", - "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==", + "license": "MIT", "engines": { "node": ">= 0.8" } }, + "node_modules/walker": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", + "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", + "dev": true, + "dependencies": { + "makeerror": "1.0.12" + } + }, "node_modules/web-streams-polyfill": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.2.tgz", - "integrity": "sha512-3pRGuxRF5gpuZc0W+EpwQRmCD7gRqcDOMt688KmdlDAgAyaB1XlN0zq2njfDNm44XVdIouE7pZ6GzbdyH47uIQ==", + "version": "3.3.3", + "license": "MIT", "engines": { "node": ">= 8" } }, "node_modules/webidl-conversions": { "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" + "license": "BSD-2-Clause" }, "node_modules/whatwg-fetch": { "version": "3.6.20", - "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-3.6.20.tgz", - "integrity": "sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg==" + "license": "MIT" }, "node_modules/whatwg-url": { "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", + "license": "MIT", "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" @@ -2527,9 +5696,8 @@ }, "node_modules/which": { "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", "dev": true, + "license": "ISC", "dependencies": { "isexe": "^2.0.0" }, @@ -2537,1764 +5705,111 @@ "which": "bin/which" } }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, "node_modules/wrappy": { "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "dev": true + "dev": true, + "license": "ISC" + }, + "node_modules/write-file-atomic": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", + "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", + "dev": true, + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.7" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", + "integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==", + "dev": true, + "engines": { + "node": ">=10" + } }, "node_modules/yallist": { "version": "2.1.2", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "integrity": "sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==", - "dev": true + "dev": true, + "license": "ISC" }, "node_modules/yaml": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.4.tgz", - "integrity": "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==", + "version": "2.4.5", + "license": "ISC", + "bin": { + "yaml": "bin.mjs" + }, "engines": { "node": ">= 14" } - } - }, - "dependencies": { - "@iarna/toml": { - "version": "2.2.5", - "resolved": "https://registry.npmjs.org/@iarna/toml/-/toml-2.2.5.tgz", - "integrity": "sha512-trnsAYxU3xnS1gPHPyU961coFyLkh4gAD/0zQ5mymY4yOZ+CYvsPqUbOFSw0aDM4y0tV7tiFxL/1XfXPNC6IPg==" }, - "@mole-inc/bin-wrapper": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/@mole-inc/bin-wrapper/-/bin-wrapper-8.0.1.tgz", - "integrity": "sha512-sTGoeZnjI8N4KS+sW2AN95gDBErhAguvkw/tWdCjeM8bvxpz5lqrnd0vOJABA1A+Ic3zED7PYoLP/RANLgVotA==", + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", "dev": true, - "requires": { - "bin-check": "^4.1.0", - "bin-version-check": "^5.0.0", - "content-disposition": "^0.5.4", - "ext-name": "^5.0.0", - "file-type": "^17.1.6", - "filenamify": "^5.0.2", - "got": "^11.8.5", - "os-filter-obj": "^2.0.0" - } - }, - "@nodelib/fs.scandir": { - "version": "2.1.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", - "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", - "dev": true, - "requires": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - } - }, - "@nodelib/fs.stat": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", - "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", - "dev": true - }, - "@nodelib/fs.walk": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", - "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", - "dev": true, - "requires": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - } - }, - "@sindresorhus/is": { - "version": "4.6.0", - "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-4.6.0.tgz", - "integrity": "sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw==", - "dev": true - }, - "@start9labs/start-sdk": { - "version": "file:../sdk/dist", - "requires": { - "@iarna/toml": "^2.2.5", - "@types/jest": "^29.4.0", - "isomorphic-fetch": "^3.0.0", - "jest": "^29.4.3", - "prettier": "^3.2.5", - "ts-jest": "^29.0.5", - "ts-matches": "^5.4.1", - "ts-node": "^10.9.1", - "tsx": "^4.7.1", - "typescript": "^5.0.4", - "yaml": "^2.2.2" - } - }, - "@swc/cli": { - "version": "0.1.65", - "resolved": "https://registry.npmjs.org/@swc/cli/-/cli-0.1.65.tgz", - "integrity": "sha512-4NcgsvJVHhA7trDnMmkGLLvWMHu2kSy+qHx6QwRhhJhdiYdNUrhdp+ERxen73sYtaeEOYeLJcWrQ60nzKi6rpg==", - "dev": true, - "requires": { - "@mole-inc/bin-wrapper": "^8.0.1", - "commander": "^7.1.0", - "fast-glob": "^3.2.5", - "minimatch": "^9.0.3", - "semver": "^7.3.8", - "slash": "3.0.0", - "source-map": "^0.7.3" - } - }, - "@swc/core": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.4.1.tgz", - "integrity": "sha512-3y+Y8js+e7BbM16iND+6Rcs3jdiL28q3iVtYsCviYSSpP2uUVKkp5sJnCY4pg8AaVvyN7CGQHO7gLEZQ5ByozQ==", - "dev": true, - "requires": { - "@swc/core-darwin-arm64": "1.4.1", - "@swc/core-darwin-x64": "1.4.1", - "@swc/core-linux-arm-gnueabihf": "1.4.1", - "@swc/core-linux-arm64-gnu": "1.4.1", - "@swc/core-linux-arm64-musl": "1.4.1", - "@swc/core-linux-x64-gnu": "1.4.1", - "@swc/core-linux-x64-musl": "1.4.1", - "@swc/core-win32-arm64-msvc": "1.4.1", - "@swc/core-win32-ia32-msvc": "1.4.1", - "@swc/core-win32-x64-msvc": "1.4.1", - "@swc/counter": "^0.1.2", - "@swc/types": "^0.1.5" - } - }, - "@swc/core-darwin-arm64": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.4.1.tgz", - "integrity": "sha512-ePyfx0348UbR4DOAW24TedeJbafnzha8liXFGuQ4bdXtEVXhLfPngprrxKrAddCuv42F9aTxydlF6+adD3FBhA==", - "dev": true, - "optional": true - }, - "@swc/core-darwin-x64": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.4.1.tgz", - "integrity": "sha512-eLf4JSe6VkCMdDowjM8XNC5rO+BrgfbluEzAVtKR8L2HacNYukieumN7EzpYCi0uF1BYwu1ku6tLyG2r0VcGxA==", - "dev": true, - "optional": true - }, - "@swc/core-linux-arm-gnueabihf": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.4.1.tgz", - "integrity": "sha512-K8VtTLWMw+rkN/jDC9o/Q9SMmzdiHwYo2CfgkwVT29NsGccwmNhCQx6XoYiPKyKGIFKt4tdQnJHKUFzxUqQVtQ==", - "dev": true, - "optional": true - }, - "@swc/core-linux-arm64-gnu": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.4.1.tgz", - "integrity": "sha512-0e8p4g0Bfkt8lkiWgcdiENH3RzkcqKtpRXIVNGOmVc0OBkvc2tpm2WTx/eoCnes2HpTT4CTtR3Zljj4knQ4Fvw==", - "dev": true, - "optional": true - }, - "@swc/core-linux-arm64-musl": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.4.1.tgz", - "integrity": "sha512-b/vWGQo2n7lZVUnSQ7NBq3Qrj85GrAPPiRbpqaIGwOytiFSk8VULFihbEUwDe0rXgY4LDm8z8wkgADZcLnmdUA==", - "dev": true, - "optional": true - }, - "@swc/core-linux-x64-gnu": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.4.1.tgz", - "integrity": "sha512-AFMQlvkKEdNi1Vk2GFTxxJzbICttBsOQaXa98kFTeWTnFFIyiIj2w7Sk8XRTEJ/AjF8ia8JPKb1zddBWr9+bEQ==", - "dev": true, - "optional": true - }, - "@swc/core-linux-x64-musl": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.4.1.tgz", - "integrity": "sha512-QX2MxIECX1gfvUVZY+jk528/oFkS9MAl76e3ZRvG2KC/aKlCQL0KSzcTSm13mOxkDKS30EaGRDRQWNukGpMeRg==", - "dev": true, - "optional": true - }, - "@swc/core-win32-arm64-msvc": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.4.1.tgz", - "integrity": "sha512-OklkJYXXI/tntD2zaY8i3iZldpyDw5q+NAP3k9OlQ7wXXf37djRsHLV0NW4+ZNHBjE9xp2RsXJ0jlOJhfgGoFA==", - "dev": true, - "optional": true - }, - "@swc/core-win32-ia32-msvc": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.4.1.tgz", - "integrity": "sha512-MBuc3/QfKX9FnLOU7iGN+6yHRTQaPQ9WskiC8s8JFiKQ+7I2p25tay2RplR9dIEEGgVAu6L7auv96LbNTh+FaA==", - "dev": true, - "optional": true - }, - "@swc/core-win32-x64-msvc": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.4.1.tgz", - "integrity": "sha512-lu4h4wFBb/bOK6N2MuZwg7TrEpwYXgpQf5R7ObNSXL65BwZ9BG8XRzD+dLJmALu8l5N08rP/TrpoKRoGT4WSxw==", - "dev": true, - "optional": true - }, - "@swc/counter": { - "version": "0.1.3", - "resolved": "https://registry.npmjs.org/@swc/counter/-/counter-0.1.3.tgz", - "integrity": "sha512-e2BR4lsJkkRlKZ/qCHPw9ZaSxc0MVUd7gtbtaB7aMvHeJVYe8sOB8DBZkP2DtISHGSku9sCK6T6cnY0CtXrOCQ==", - "dev": true - }, - "@swc/types": { - "version": "0.1.5", - "resolved": "https://registry.npmjs.org/@swc/types/-/types-0.1.5.tgz", - "integrity": "sha512-myfUej5naTBWnqOCc/MdVOLVjXUXtIA+NpDrDBKJtLLg2shUjBu3cZmB/85RyitKc55+lUUyl7oRfLOvkr2hsw==", - "dev": true - }, - "@szmarczak/http-timer": { - "version": "4.0.6", - "resolved": "https://registry.npmjs.org/@szmarczak/http-timer/-/http-timer-4.0.6.tgz", - "integrity": "sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w==", - "dev": true, - "requires": { - "defer-to-connect": "^2.0.0" - } - }, - "@tokenizer/token": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/@tokenizer/token/-/token-0.3.0.tgz", - "integrity": "sha512-OvjF+z51L3ov0OyAU0duzsYuvO01PH7x4t6DJx+guahgTnBHkhJdG7soQeTSFLWN3efnHyibZ4Z8l2EuWwJN3A==", - "dev": true - }, - "@types/cacheable-request": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.3.tgz", - "integrity": "sha512-IQ3EbTzGxIigb1I3qPZc1rWJnH0BmSKv5QYTalEwweFvyBDLSAe24zP0le/hyi7ecGfZVlIVAg4BZqb8WBwKqw==", - "dev": true, - "requires": { - "@types/http-cache-semantics": "*", - "@types/keyv": "^3.1.4", - "@types/node": "*", - "@types/responselike": "^1.0.0" - } - }, - "@types/http-cache-semantics": { - "version": "4.0.4", - "resolved": "https://registry.npmjs.org/@types/http-cache-semantics/-/http-cache-semantics-4.0.4.tgz", - "integrity": "sha512-1m0bIFVc7eJWyve9S0RnuRgcQqF/Xd5QsUZAZeQFr1Q3/p9JWoQQEqmVy+DPTNpGXwhgIetAoYF8JSc33q29QA==", - "dev": true - }, - "@types/keyv": { - "version": "3.1.4", - "resolved": "https://registry.npmjs.org/@types/keyv/-/keyv-3.1.4.tgz", - "integrity": "sha512-BQ5aZNSCpj7D6K2ksrRCTmKRLEpnPvWDiLPfoGyhZ++8YtiK9d/3DBKPJgry359X/P1PfruyYwvnvwFjuEiEIg==", - "dev": true, - "requires": { - "@types/node": "*" - } - }, - "@types/node": { - "version": "20.11.17", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.17.tgz", - "integrity": "sha512-QmgQZGWu1Yw9TDyAP9ZzpFJKynYNeOvwMJmaxABfieQoVoiVOS6MN1WSpqpRcbeA5+RW82kraAVxCCJg+780Qw==", - "dev": true, - "requires": { - "undici-types": "~5.26.4" - } - }, - "@types/responselike": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/@types/responselike/-/responselike-1.0.3.tgz", - "integrity": "sha512-H/+L+UkTV33uf49PH5pCAUBVPNj2nDBXTN+qS1dOwyyg24l3CcicicCA7ca+HMvJBZcFgl5r8e+RR6elsb4Lyw==", - "dev": true, - "requires": { - "@types/node": "*" - } - }, - "accepts": { - "version": "1.3.8", - "resolved": "https://registry.npmjs.org/accepts/-/accepts-1.3.8.tgz", - "integrity": "sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==", - "requires": { - "mime-types": "~2.1.34", - "negotiator": "0.6.3" - } - }, - "arch": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/arch/-/arch-2.2.0.tgz", - "integrity": "sha512-Of/R0wqp83cgHozfIYLbBMnej79U/SVGOOyuB3VVFv1NRM/PSFMK12x9KVtiYzJqmnU5WR2qp0Z5rHb7sWGnFQ==", - "dev": true - }, - "array-flatten": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", - "integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==" - }, - "balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", - "dev": true - }, - "bin-check": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/bin-check/-/bin-check-4.1.0.tgz", - "integrity": "sha512-b6weQyEUKsDGFlACWSIOfveEnImkJyK/FGW6FAG42loyoquvjdtOIqO6yBFzHyqyVVhNgNkQxxx09SFLK28YnA==", - "dev": true, - "requires": { - "execa": "^0.7.0", - "executable": "^4.1.0" - } - }, - "bin-version": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/bin-version/-/bin-version-6.0.0.tgz", - "integrity": "sha512-nk5wEsP4RiKjG+vF+uG8lFsEn4d7Y6FVDamzzftSunXOoOcOOkzcWdKVlGgFFwlUQCj63SgnUkLLGF8v7lufhw==", - "dev": true, - "requires": { - "execa": "^5.0.0", - "find-versions": "^5.0.0" - }, "dependencies": { - "cross-spawn": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", - "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", - "dev": true, - "requires": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - } - }, - "execa": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", - "integrity": "sha512-8uSpZZocAZRBAPIEINJj3Lo9HyGitllczc27Eh5YYojjMFMn8yHMDMaUHE2Jqfq05D/wucwI4JGURyXt1vchyg==", - "dev": true, - "requires": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", - "strip-final-newline": "^2.0.0" - } - }, - "get-stream": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz", - "integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg==", - "dev": true - }, - "is-stream": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz", - "integrity": "sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==", - "dev": true - }, - "npm-run-path": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-4.0.1.tgz", - "integrity": "sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw==", - "dev": true, - "requires": { - "path-key": "^3.0.0" - } - }, - "path-key": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", - "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", - "dev": true - }, - "shebang-command": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", - "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", - "dev": true, - "requires": { - "shebang-regex": "^3.0.0" - } - }, - "shebang-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", - "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", - "dev": true - }, - "which": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", - "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - } - } - }, - "bin-version-check": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/bin-version-check/-/bin-version-check-5.1.0.tgz", - "integrity": "sha512-bYsvMqJ8yNGILLz1KP9zKLzQ6YpljV3ln1gqhuLkUtyfGi3qXKGuK2p+U4NAvjVFzDFiBBtOpCOSFNuYYEGZ5g==", - "dev": true, - "requires": { - "bin-version": "^6.0.0", - "semver": "^7.5.3", - "semver-truncate": "^3.0.0" - } - }, - "body-parser": { - "version": "1.20.1", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz", - "integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==", - "requires": { - "bytes": "3.1.2", - "content-type": "~1.0.4", - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "on-finished": "2.4.1", - "qs": "6.11.0", - "raw-body": "2.5.1", - "type-is": "~1.6.18", - "unpipe": "1.0.0" - } - }, - "brace-expansion": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", - "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", - "dev": true, - "requires": { - "balanced-match": "^1.0.0" - } - }, - "braces": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.2.tgz", - "integrity": "sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==", - "dev": true, - "requires": { - "fill-range": "^7.0.1" - } - }, - "bytes": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz", - "integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==" - }, - "cacheable-lookup": { - "version": "5.0.4", - "resolved": "https://registry.npmjs.org/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz", - "integrity": "sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA==", - "dev": true - }, - "cacheable-request": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-7.0.4.tgz", - "integrity": "sha512-v+p6ongsrp0yTGbJXjgxPow2+DL93DASP4kXCDKb8/bwRtt9OEF3whggkkDkGNzgcWy2XaF4a8nZglC7uElscg==", - "dev": true, - "requires": { - "clone-response": "^1.0.2", - "get-stream": "^5.1.0", - "http-cache-semantics": "^4.0.0", - "keyv": "^4.0.0", - "lowercase-keys": "^2.0.0", - "normalize-url": "^6.0.1", - "responselike": "^2.0.0" + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" }, - "dependencies": { - "get-stream": { - "version": "5.2.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-5.2.0.tgz", - "integrity": "sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA==", - "dev": true, - "requires": { - "pump": "^3.0.0" - } - } + "engines": { + "node": ">=12" } }, - "call-bind": { - "version": "1.0.7", - "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.7.tgz", - "integrity": "sha512-GHTSNSYICQ7scH7sZ+M2rFopRoLh8t2bLSW6BbgrtLsahOIB5iyAVJf9GjWK3cYTDaMj4XdBpM1cA6pIS0Kv2w==", - "requires": { - "es-define-property": "^1.0.0", - "es-errors": "^1.3.0", - "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.4", - "set-function-length": "^1.2.1" - } - }, - "clone-response": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.3.tgz", - "integrity": "sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA==", + "node_modules/yargs-parser": { + "version": "21.1.1", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz", + "integrity": "sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==", "dev": true, - "requires": { - "mimic-response": "^1.0.0" + "engines": { + "node": ">=12" } }, - "commander": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", - "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", - "dev": true - }, - "content-disposition": { - "version": "0.5.4", - "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", - "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", - "requires": { - "safe-buffer": "5.2.1" - } - }, - "content-type": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/content-type/-/content-type-1.0.5.tgz", - "integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==" - }, - "cookie": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", - "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==" - }, - "cookie-signature": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz", - "integrity": "sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==" - }, - "cross-spawn": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-5.1.0.tgz", - "integrity": "sha512-pTgQJ5KC0d2hcY8eyL1IzlBPYjTkyH72XRZPnLyKus2mBfNjQs3klqbJU2VILqZryAZUt9JOb3h/mWMy23/f5A==", + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", "dev": true, - "requires": { - "lru-cache": "^4.0.1", - "shebang-command": "^1.2.0", - "which": "^1.2.9" - } - }, - "data-uri-to-buffer": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-4.0.1.tgz", - "integrity": "sha512-0R9ikRb668HB7QDxT1vkpuUBtqc53YyAwMwGeUFKRojY/NWKvdZ+9UYtRfGmhqNbRkTSVpMbmyhXipFFv2cb/A==" - }, - "debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "requires": { - "ms": "2.0.0" - } - }, - "decompress-response": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-6.0.0.tgz", - "integrity": "sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ==", - "dev": true, - "requires": { - "mimic-response": "^3.1.0" + "engines": { + "node": ">=10" }, - "dependencies": { - "mimic-response": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-3.1.0.tgz", - "integrity": "sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ==", - "dev": true - } + "funding": { + "url": "https://github.com/sponsors/sindresorhus" } - }, - "defer-to-connect": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/defer-to-connect/-/defer-to-connect-2.0.1.tgz", - "integrity": "sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg==", - "dev": true - }, - "define-data-property": { - "version": "1.1.4", - "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", - "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", - "requires": { - "es-define-property": "^1.0.0", - "es-errors": "^1.3.0", - "gopd": "^1.0.1" - } - }, - "depd": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", - "integrity": "sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==" - }, - "destroy": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz", - "integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==" - }, - "ee-first": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", - "integrity": "sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==" - }, - "encodeurl": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", - "integrity": "sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==" - }, - "end-of-stream": { - "version": "1.4.4", - "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", - "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", - "dev": true, - "requires": { - "once": "^1.4.0" - } - }, - "es-define-property": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.0.tgz", - "integrity": "sha512-jxayLKShrEqqzJ0eumQbVhTYQM27CfT1T35+gCgDFoL82JLsXqTJ76zv6A0YLOgEnLUMvLzsDsGIrl8NFpT2gQ==", - "requires": { - "get-intrinsic": "^1.2.4" - } - }, - "es-errors": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", - "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==" - }, - "esbuild-plugin-resolve": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/esbuild-plugin-resolve/-/esbuild-plugin-resolve-2.0.0.tgz", - "integrity": "sha512-eJy9B8yDW5X/J48eWtR1uVmv+DKfHvYYnrrcqQoe/nUkVHVOTZlJnSevkYyGOz6hI90t036Y5QIPDrGzmppxfg==" - }, - "escape-html": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", - "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==" - }, - "escape-string-regexp": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-5.0.0.tgz", - "integrity": "sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==", - "dev": true - }, - "etag": { - "version": "1.8.1", - "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", - "integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==" - }, - "execa": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/execa/-/execa-0.7.0.tgz", - "integrity": "sha512-RztN09XglpYI7aBBrJCPW95jEH7YF1UEPOoX9yDhUTPdp7mK+CQvnLTuD10BNXZ3byLTu2uehZ8EcKT/4CGiFw==", - "dev": true, - "requires": { - "cross-spawn": "^5.0.1", - "get-stream": "^3.0.0", - "is-stream": "^1.1.0", - "npm-run-path": "^2.0.0", - "p-finally": "^1.0.0", - "signal-exit": "^3.0.0", - "strip-eof": "^1.0.0" - } - }, - "executable": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/executable/-/executable-4.1.1.tgz", - "integrity": "sha512-8iA79xD3uAch729dUG8xaaBBFGaEa0wdD2VkYLFHwlqosEj/jT66AzcreRDSgV7ehnNLBW2WR5jIXwGKjVdTLg==", - "dev": true, - "requires": { - "pify": "^2.2.0" - } - }, - "express": { - "version": "4.18.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz", - "integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==", - "requires": { - "accepts": "~1.3.8", - "array-flatten": "1.1.1", - "body-parser": "1.20.1", - "content-disposition": "0.5.4", - "content-type": "~1.0.4", - "cookie": "0.5.0", - "cookie-signature": "1.0.6", - "debug": "2.6.9", - "depd": "2.0.0", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "finalhandler": "1.2.0", - "fresh": "0.5.2", - "http-errors": "2.0.0", - "merge-descriptors": "1.0.1", - "methods": "~1.1.2", - "on-finished": "2.4.1", - "parseurl": "~1.3.3", - "path-to-regexp": "0.1.7", - "proxy-addr": "~2.0.7", - "qs": "6.11.0", - "range-parser": "~1.2.1", - "safe-buffer": "5.2.1", - "send": "0.18.0", - "serve-static": "1.15.0", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "type-is": "~1.6.18", - "utils-merge": "1.0.1", - "vary": "~1.1.2" - } - }, - "ext-list": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/ext-list/-/ext-list-2.2.2.tgz", - "integrity": "sha512-u+SQgsubraE6zItfVA0tBuCBhfU9ogSRnsvygI7wht9TS510oLkBRXBsqopeUG/GBOIQyKZO9wjTqIu/sf5zFA==", - "dev": true, - "requires": { - "mime-db": "^1.28.0" - } - }, - "ext-name": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/ext-name/-/ext-name-5.0.0.tgz", - "integrity": "sha512-yblEwXAbGv1VQDmow7s38W77hzAgJAO50ztBLMcUyUBfxv1HC+LGwtiEN+Co6LtlqT/5uwVOxsD4TNIilWhwdQ==", - "dev": true, - "requires": { - "ext-list": "^2.0.0", - "sort-keys-length": "^1.0.0" - } - }, - "fast-glob": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", - "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", - "dev": true, - "requires": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" - } - }, - "fastq": { - "version": "1.17.1", - "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", - "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", - "dev": true, - "requires": { - "reusify": "^1.0.4" - } - }, - "fetch-blob": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-3.2.0.tgz", - "integrity": "sha512-7yAQpD2UMJzLi1Dqv7qFYnPbaPx7ZfFK6PiIxQ4PfkGPyNyl2Ugx+a/umUonmKqjhM4DnfbMvdX6otXq83soQQ==", - "requires": { - "node-domexception": "^1.0.0", - "web-streams-polyfill": "^3.0.3" - } - }, - "file-type": { - "version": "17.1.6", - "resolved": "https://registry.npmjs.org/file-type/-/file-type-17.1.6.tgz", - "integrity": "sha512-hlDw5Ev+9e883s0pwUsuuYNu4tD7GgpUnOvykjv1Gya0ZIjuKumthDRua90VUn6/nlRKAjcxLUnHNTIUWwWIiw==", - "dev": true, - "requires": { - "readable-web-to-node-stream": "^3.0.2", - "strtok3": "^7.0.0-alpha.9", - "token-types": "^5.0.0-alpha.2" - } - }, - "filebrowser": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/filebrowser/-/filebrowser-1.0.0.tgz", - "integrity": "sha512-RRONYpCDzbmWPhBX43T4dE+ptqLznJ7lKfbMaZLChB2i2ZIdFXoqT9qZTi70Dpq6fnJHuvcdeiRqMIPZKhVgTQ==", - "requires": { - "commander": "^2.9.0", - "content-disposition": "^0.5.1", - "express": "^4.14.0" - }, - "dependencies": { - "commander": { - "version": "2.20.3", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", - "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" - } - } - }, - "filename-reserved-regex": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/filename-reserved-regex/-/filename-reserved-regex-3.0.0.tgz", - "integrity": "sha512-hn4cQfU6GOT/7cFHXBqeBg2TbrMBgdD0kcjLhvSQYYwm3s4B6cjvBfb7nBALJLAXqmU5xajSa7X2NnUud/VCdw==", - "dev": true - }, - "filenamify": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/filenamify/-/filenamify-5.1.1.tgz", - "integrity": "sha512-M45CbrJLGACfrPOkrTp3j2EcO9OBkKUYME0eiqOCa7i2poaklU0jhlIaMlr8ijLorT0uLAzrn3qXOp5684CkfA==", - "dev": true, - "requires": { - "filename-reserved-regex": "^3.0.0", - "strip-outer": "^2.0.0", - "trim-repeated": "^2.0.0" - } - }, - "fill-range": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.0.1.tgz", - "integrity": "sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==", - "dev": true, - "requires": { - "to-regex-range": "^5.0.1" - } - }, - "finalhandler": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz", - "integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==", - "requires": { - "debug": "2.6.9", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "on-finished": "2.4.1", - "parseurl": "~1.3.3", - "statuses": "2.0.1", - "unpipe": "~1.0.0" - } - }, - "find-versions": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/find-versions/-/find-versions-5.1.0.tgz", - "integrity": "sha512-+iwzCJ7C5v5KgcBuueqVoNiHVoQpwiUK5XFLjf0affFTep+Wcw93tPvmb8tqujDNmzhBDPddnWV/qgWSXgq+Hg==", - "dev": true, - "requires": { - "semver-regex": "^4.0.5" - } - }, - "formdata-polyfill": { - "version": "4.0.10", - "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz", - "integrity": "sha512-buewHzMvYL29jdeQTVILecSaZKnt/RJWjoZCF5OW60Z67/GmSLBkOFM7qh1PI3zFNtJbaZL5eQu1vLfazOwj4g==", - "requires": { - "fetch-blob": "^3.1.2" - } - }, - "forwarded": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", - "integrity": "sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==" - }, - "fresh": { - "version": "0.5.2", - "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", - "integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==" - }, - "function-bind": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", - "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==" - }, - "get-intrinsic": { - "version": "1.2.4", - "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.4.tgz", - "integrity": "sha512-5uYhsJH8VJBTv7oslg4BznJYhDoRI6waYCxMmCdnTrcCrHA/fCFKoTFz2JKKE0HdDFUF7/oQuhzumXJK7paBRQ==", - "requires": { - "es-errors": "^1.3.0", - "function-bind": "^1.1.2", - "has-proto": "^1.0.1", - "has-symbols": "^1.0.3", - "hasown": "^2.0.0" - } - }, - "get-stream": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", - "integrity": "sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ==", - "dev": true - }, - "glob-parent": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", - "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", - "dev": true, - "requires": { - "is-glob": "^4.0.1" - } - }, - "gopd": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.0.1.tgz", - "integrity": "sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==", - "requires": { - "get-intrinsic": "^1.1.3" - } - }, - "got": { - "version": "11.8.6", - "resolved": "https://registry.npmjs.org/got/-/got-11.8.6.tgz", - "integrity": "sha512-6tfZ91bOr7bOXnK7PRDCGBLa1H4U080YHNaAQ2KsMGlLEzRbk44nsZF2E1IeRc3vtJHPVbKCYgdFbaGO2ljd8g==", - "dev": true, - "requires": { - "@sindresorhus/is": "^4.0.0", - "@szmarczak/http-timer": "^4.0.5", - "@types/cacheable-request": "^6.0.1", - "@types/responselike": "^1.0.0", - "cacheable-lookup": "^5.0.3", - "cacheable-request": "^7.0.2", - "decompress-response": "^6.0.0", - "http2-wrapper": "^1.0.0-beta.5.2", - "lowercase-keys": "^2.0.0", - "p-cancelable": "^2.0.0", - "responselike": "^2.0.0" - } - }, - "has-property-descriptors": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", - "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", - "requires": { - "es-define-property": "^1.0.0" - } - }, - "has-proto": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-proto/-/has-proto-1.0.1.tgz", - "integrity": "sha512-7qE+iP+O+bgF9clE5+UoBFzE65mlBiVj3tKCrlNQ0Ogwm0BjpT/gK4SlLYDMybDh5I3TCTKnPPa0oMG7JDYrhg==" - }, - "has-symbols": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz", - "integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==" - }, - "hasown": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.1.tgz", - "integrity": "sha512-1/th4MHjnwncwXsIW6QMzlvYL9kG5e/CpVvLRZe4XPa8TOUNbCELqmvhDmnkNsAjwaG4+I8gJJL0JBvTTLO9qA==", - "requires": { - "function-bind": "^1.1.2" - } - }, - "http-cache-semantics": { - "version": "4.1.1", - "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz", - "integrity": "sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==", - "dev": true - }, - "http-errors": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz", - "integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==", - "requires": { - "depd": "2.0.0", - "inherits": "2.0.4", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "toidentifier": "1.0.1" - } - }, - "http2-wrapper": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/http2-wrapper/-/http2-wrapper-1.0.3.tgz", - "integrity": "sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg==", - "dev": true, - "requires": { - "quick-lru": "^5.1.1", - "resolve-alpn": "^1.0.0" - } - }, - "human-signals": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-2.1.0.tgz", - "integrity": "sha512-B4FFZ6q/T2jhhksgkbEW3HBvWIfDW85snkQgawt07S7J5QXTk6BkNV+0yAeZrM5QpMAdYlocGoljn0sJ/WQkFw==", - "dev": true - }, - "iconv-lite": { - "version": "0.4.24", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.4.24.tgz", - "integrity": "sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==", - "requires": { - "safer-buffer": ">= 2.1.2 < 3" - } - }, - "ieee754": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "dev": true - }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "ipaddr.js": { - "version": "1.9.1", - "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", - "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==" - }, - "is-extglob": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", - "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", - "dev": true - }, - "is-glob": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", - "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", - "dev": true, - "requires": { - "is-extglob": "^2.1.1" - } - }, - "is-number": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", - "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", - "dev": true - }, - "is-plain-obj": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", - "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==", - "dev": true - }, - "is-stream": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", - "integrity": "sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==", - "dev": true - }, - "isexe": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", - "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", - "dev": true - }, - "isomorphic-fetch": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/isomorphic-fetch/-/isomorphic-fetch-3.0.0.tgz", - "integrity": "sha512-qvUtwJ3j6qwsF3jLxkZ72qCgjMysPzDfeV240JHiGZsANBYd+EEuu35v7dfrJ9Up0Ak07D7GGSkGhCHTqg/5wA==", - "requires": { - "node-fetch": "^2.6.1", - "whatwg-fetch": "^3.4.1" - }, - "dependencies": { - "node-fetch": { - "version": "2.7.0", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", - "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", - "requires": { - "whatwg-url": "^5.0.0" - } - } - } - }, - "json-buffer": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", - "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", - "dev": true - }, - "keyv": { - "version": "4.5.4", - "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", - "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", - "dev": true, - "requires": { - "json-buffer": "3.0.1" - } - }, - "lowercase-keys": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-2.0.0.tgz", - "integrity": "sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA==", - "dev": true - }, - "lru-cache": { - "version": "4.1.5", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", - "integrity": "sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g==", - "dev": true, - "requires": { - "pseudomap": "^1.0.2", - "yallist": "^2.1.2" - } - }, - "media-typer": { - "version": "0.3.0", - "resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz", - "integrity": "sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==" - }, - "merge-descriptors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz", - "integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==" - }, - "merge-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "dev": true - }, - "merge2": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", - "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", - "dev": true - }, - "methods": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/methods/-/methods-1.1.2.tgz", - "integrity": "sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==" - }, - "micromatch": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.5.tgz", - "integrity": "sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==", - "dev": true, - "requires": { - "braces": "^3.0.2", - "picomatch": "^2.3.1" - } - }, - "mime": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", - "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==" - }, - "mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==" - }, - "mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "requires": { - "mime-db": "1.52.0" - } - }, - "mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "dev": true - }, - "mimic-response": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", - "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==", - "dev": true - }, - "minimatch": { - "version": "9.0.3", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", - "integrity": "sha512-RHiac9mvaRw0x3AYRgDC1CxAP7HTcNrrECeA8YYJeWnpo+2Q5CegtZjaotWTWxDG3UeGA1coE05iH1mPjT/2mg==", - "dev": true, - "requires": { - "brace-expansion": "^2.0.1" - } - }, - "ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==" - }, - "negotiator": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-0.6.3.tgz", - "integrity": "sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==" - }, - "node-domexception": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", - "integrity": "sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==" - }, - "node-fetch": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.3.2.tgz", - "integrity": "sha512-dRB78srN/l6gqWulah9SrxeYnxeddIG30+GOqK/9OlLVyLg3HPnr6SqOWTWOXKRwC2eGYCkZ59NNuSgvSrpgOA==", - "requires": { - "data-uri-to-buffer": "^4.0.0", - "fetch-blob": "^3.1.4", - "formdata-polyfill": "^4.0.10" - } - }, - "normalize-url": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-6.1.0.tgz", - "integrity": "sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A==", - "dev": true - }, - "npm-run-path": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-2.0.2.tgz", - "integrity": "sha512-lJxZYlT4DW/bRUtFh1MQIWqmLwQfAxnqWG4HhEdjMlkrJYnJn0Jrr2u3mgxqaWsdiBc76TYkTG/mhrnYTuzfHw==", - "dev": true, - "requires": { - "path-key": "^2.0.0" - } - }, - "object-inspect": { - "version": "1.13.1", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.1.tgz", - "integrity": "sha512-5qoj1RUiKOMsCCNLV1CBiPYE10sziTsnmNxkAI/rZhiD63CF7IqdFGC/XzjWjpSgLf0LxXX3bDFIh0E18f6UhQ==" - }, - "on-finished": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.4.1.tgz", - "integrity": "sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==", - "requires": { - "ee-first": "1.1.1" - } - }, - "once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "dev": true, - "requires": { - "wrappy": "1" - } - }, - "onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "dev": true, - "requires": { - "mimic-fn": "^2.1.0" - } - }, - "os-filter-obj": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/os-filter-obj/-/os-filter-obj-2.0.0.tgz", - "integrity": "sha512-uksVLsqG3pVdzzPvmAHpBK0wKxYItuzZr7SziusRPoz67tGV8rL1szZ6IdeUrbqLjGDwApBtN29eEE3IqGHOjg==", - "dev": true, - "requires": { - "arch": "^2.1.0" - } - }, - "p-cancelable": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-2.1.1.tgz", - "integrity": "sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg==", - "dev": true - }, - "p-finally": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", - "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==", - "dev": true - }, - "parseurl": { - "version": "1.3.3", - "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", - "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==" - }, - "path-key": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-2.0.1.tgz", - "integrity": "sha512-fEHGKCSmUSDPv4uoj8AlD+joPlq3peND+HRYyxFz4KPw4z926S/b8rIuFs2FYJg3BwsxJf6A9/3eIdLaYC+9Dw==", - "dev": true - }, - "path-to-regexp": { - "version": "0.1.7", - "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", - "integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==" - }, - "peek-readable": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/peek-readable/-/peek-readable-5.0.0.tgz", - "integrity": "sha512-YtCKvLUOvwtMGmrniQPdO7MwPjgkFBtFIrmfSbYmYuq3tKDV/mcfAhBth1+C3ru7uXIZasc/pHnb+YDYNkkj4A==", - "dev": true - }, - "picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", - "dev": true - }, - "pify": { - "version": "2.3.0", - "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", - "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", - "dev": true - }, - "prettier": { - "version": "3.2.5", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.2.5.tgz", - "integrity": "sha512-3/GWa9aOC0YeD7LUfvOG2NiDyhOWRvt1k+rcKhOuYnMY24iiCphgneUfJDyFXd6rZCAnuLBv6UeAULtrhT/F4A==", - "dev": true - }, - "proxy-addr": { - "version": "2.0.7", - "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.7.tgz", - "integrity": "sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==", - "requires": { - "forwarded": "0.2.0", - "ipaddr.js": "1.9.1" - } - }, - "pseudomap": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz", - "integrity": "sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ==", - "dev": true - }, - "pump": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.0.tgz", - "integrity": "sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww==", - "dev": true, - "requires": { - "end-of-stream": "^1.1.0", - "once": "^1.3.1" - } - }, - "qs": { - "version": "6.11.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz", - "integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==", - "requires": { - "side-channel": "^1.0.4" - } - }, - "queue-microtask": { - "version": "1.2.3", - "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", - "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", - "dev": true - }, - "quick-lru": { - "version": "5.1.1", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-5.1.1.tgz", - "integrity": "sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA==", - "dev": true - }, - "range-parser": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/range-parser/-/range-parser-1.2.1.tgz", - "integrity": "sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==" - }, - "raw-body": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz", - "integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==", - "requires": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" - } - }, - "readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dev": true, - "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - } - }, - "readable-web-to-node-stream": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/readable-web-to-node-stream/-/readable-web-to-node-stream-3.0.2.tgz", - "integrity": "sha512-ePeK6cc1EcKLEhJFt/AebMCLL+GgSKhuygrZ/GLaKZYEecIgIECf4UaUuaByiGtzckwR4ain9VzUh95T1exYGw==", - "dev": true, - "requires": { - "readable-stream": "^3.6.0" - } - }, - "resolve-alpn": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/resolve-alpn/-/resolve-alpn-1.2.1.tgz", - "integrity": "sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g==", - "dev": true - }, - "responselike": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/responselike/-/responselike-2.0.1.tgz", - "integrity": "sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw==", - "dev": true, - "requires": { - "lowercase-keys": "^2.0.0" - } - }, - "reusify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", - "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", - "dev": true - }, - "run-parallel": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", - "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", - "dev": true, - "requires": { - "queue-microtask": "^1.2.2" - } - }, - "safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" - }, - "safer-buffer": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", - "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" - }, - "semver": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", - "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", - "dev": true, - "requires": { - "lru-cache": "^6.0.0" - }, - "dependencies": { - "lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "requires": { - "yallist": "^4.0.0" - } - }, - "yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - } - } - }, - "semver-regex": { - "version": "4.0.5", - "resolved": "https://registry.npmjs.org/semver-regex/-/semver-regex-4.0.5.tgz", - "integrity": "sha512-hunMQrEy1T6Jr2uEVjrAIqjwWcQTgOAcIM52C8MY1EZSD3DDNft04XzvYKPqjED65bNVVko0YI38nYeEHCX3yw==", - "dev": true - }, - "semver-truncate": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/semver-truncate/-/semver-truncate-3.0.0.tgz", - "integrity": "sha512-LJWA9kSvMolR51oDE6PN3kALBNaUdkxzAGcexw8gjMA8xr5zUqK0JiR3CgARSqanYF3Z1YHvsErb1KDgh+v7Rg==", - "dev": true, - "requires": { - "semver": "^7.3.5" - } - }, - "send": { - "version": "0.18.0", - "resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz", - "integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==", - "requires": { - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "fresh": "0.5.2", - "http-errors": "2.0.0", - "mime": "1.6.0", - "ms": "2.1.3", - "on-finished": "2.4.1", - "range-parser": "~1.2.1", - "statuses": "2.0.1" - }, - "dependencies": { - "ms": { - "version": "2.1.3", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", - "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" - } - } - }, - "serve-static": { - "version": "1.15.0", - "resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz", - "integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==", - "requires": { - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "parseurl": "~1.3.3", - "send": "0.18.0" - } - }, - "set-function-length": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.1.tgz", - "integrity": "sha512-j4t6ccc+VsKwYHso+kElc5neZpjtq9EnRICFZtWyBsLojhmeF/ZBd/elqm22WJh/BziDe/SBiOeAt0m2mfLD0g==", - "requires": { - "define-data-property": "^1.1.2", - "es-errors": "^1.3.0", - "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.3", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.1" - } - }, - "setprototypeof": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.2.0.tgz", - "integrity": "sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==" - }, - "shebang-command": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-1.2.0.tgz", - "integrity": "sha512-EV3L1+UQWGor21OmnvojK36mhg+TyIKDh3iFBKBohr5xeXIhNBcx8oWdgkTEEQ+BEFFYdLRuqMfd5L84N1V5Vg==", - "dev": true, - "requires": { - "shebang-regex": "^1.0.0" - } - }, - "shebang-regex": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-1.0.0.tgz", - "integrity": "sha512-wpoSFAxys6b2a2wHZ1XpDSgD7N9iVjg29Ph9uV/uaP9Ex/KXlkTZTeddxDPSYQpgvzKLGJke2UU0AzoGCjNIvQ==", - "dev": true - }, - "side-channel": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.5.tgz", - "integrity": "sha512-QcgiIWV4WV7qWExbN5llt6frQB/lBven9pqliLXfGPB+K9ZYXxDozp0wLkHS24kWCm+6YXH/f0HhnObZnZOBnQ==", - "requires": { - "call-bind": "^1.0.6", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.4", - "object-inspect": "^1.13.1" - } - }, - "signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==", - "dev": true - }, - "slash": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", - "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", - "dev": true - }, - "sort-keys": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-1.1.2.tgz", - "integrity": "sha512-vzn8aSqKgytVik0iwdBEi+zevbTYZogewTUM6dtpmGwEcdzbub/TX4bCzRhebDCRC3QzXgJsLRKB2V/Oof7HXg==", - "dev": true, - "requires": { - "is-plain-obj": "^1.0.0" - } - }, - "sort-keys-length": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/sort-keys-length/-/sort-keys-length-1.0.1.tgz", - "integrity": "sha512-GRbEOUqCxemTAk/b32F2xa8wDTs+Z1QHOkbhJDQTvv/6G3ZkbJ+frYWsTcc7cBB3Fu4wy4XlLCuNtJuMn7Gsvw==", - "dev": true, - "requires": { - "sort-keys": "^1.0.0" - } - }, - "source-map": { - "version": "0.7.4", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.7.4.tgz", - "integrity": "sha512-l3BikUxvPOcn5E74dZiq5BGsTb5yEwhaTSzccU6t4sDOH8NWJCstKO5QT2CvtFoK6F0saL7p9xHAqHOlCPJygA==", - "dev": true - }, - "statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==" - }, - "string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "dev": true, - "requires": { - "safe-buffer": "~5.2.0" - } - }, - "strip-eof": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/strip-eof/-/strip-eof-1.0.0.tgz", - "integrity": "sha512-7FCwGGmx8mD5xQd3RPUvnSpUXHM3BWuzjtpD4TXsfcZ9EL4azvVVUscFYwD9nx8Kh+uCBC00XBtAykoMHwTh8Q==", - "dev": true - }, - "strip-final-newline": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-2.0.0.tgz", - "integrity": "sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA==", - "dev": true - }, - "strip-outer": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/strip-outer/-/strip-outer-2.0.0.tgz", - "integrity": "sha512-A21Xsm1XzUkK0qK1ZrytDUvqsQWict2Cykhvi0fBQntGG5JSprESasEyV1EZ/4CiR5WB5KjzLTrP/bO37B0wPg==", - "dev": true - }, - "strtok3": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/strtok3/-/strtok3-7.0.0.tgz", - "integrity": "sha512-pQ+V+nYQdC5H3Q7qBZAz/MO6lwGhoC2gOAjuouGf/VO0m7vQRh8QNMl2Uf6SwAtzZ9bOw3UIeBukEGNJl5dtXQ==", - "dev": true, - "requires": { - "@tokenizer/token": "^0.3.0", - "peek-readable": "^5.0.0" - } - }, - "to-regex-range": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", - "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", - "dev": true, - "requires": { - "is-number": "^7.0.0" - } - }, - "toidentifier": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", - "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==" - }, - "token-types": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/token-types/-/token-types-5.0.1.tgz", - "integrity": "sha512-Y2fmSnZjQdDb9W4w4r1tswlMHylzWIeOKpx0aZH9BgGtACHhrk3OkT52AzwcuqTRBZtvvnTjDBh8eynMulu8Vg==", - "dev": true, - "requires": { - "@tokenizer/token": "^0.3.0", - "ieee754": "^1.2.1" - } - }, - "tr46": { - "version": "0.0.3", - "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", - "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" - }, - "trim-repeated": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/trim-repeated/-/trim-repeated-2.0.0.tgz", - "integrity": "sha512-QUHBFTJGdOwmp0tbOG505xAgOp/YliZP/6UgafFXYZ26WT1bvQmSMJUvkeVSASuJJHbqsFbynTvkd5W8RBTipg==", - "dev": true, - "requires": { - "escape-string-regexp": "^5.0.0" - } - }, - "ts-matches": { - "version": "5.4.1", - "resolved": "https://registry.npmjs.org/ts-matches/-/ts-matches-5.4.1.tgz", - "integrity": "sha512-kXrY75F0s0WD15N2bWKDScKlKgwnusN6dTRzGs1N7LlxQRnazrsBISC1HL4sy2adsyk65Zbx3Ui3IGN8leAFOQ==" - }, - "tslib": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.6.2.tgz", - "integrity": "sha512-AEYxH93jGFPn/a2iVAwW87VuUIkR1FVUKB77NwMF7nBTDkDrrT/Hpt/IrCJ0QXhW27jTBDcf5ZY7w6RiqTMw2Q==" - }, - "type-is": { - "version": "1.6.18", - "resolved": "https://registry.npmjs.org/type-is/-/type-is-1.6.18.tgz", - "integrity": "sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==", - "requires": { - "media-typer": "0.3.0", - "mime-types": "~2.1.24" - } - }, - "typescript": { - "version": "5.3.3", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz", - "integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==", - "dev": true - }, - "undici-types": { - "version": "5.26.5", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", - "dev": true - }, - "unpipe": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/unpipe/-/unpipe-1.0.0.tgz", - "integrity": "sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==" - }, - "util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", - "dev": true - }, - "utils-merge": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/utils-merge/-/utils-merge-1.0.1.tgz", - "integrity": "sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==" - }, - "vary": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", - "integrity": "sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==" - }, - "web-streams-polyfill": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/web-streams-polyfill/-/web-streams-polyfill-3.3.2.tgz", - "integrity": "sha512-3pRGuxRF5gpuZc0W+EpwQRmCD7gRqcDOMt688KmdlDAgAyaB1XlN0zq2njfDNm44XVdIouE7pZ6GzbdyH47uIQ==" - }, - "webidl-conversions": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", - "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" - }, - "whatwg-fetch": { - "version": "3.6.20", - "resolved": "https://registry.npmjs.org/whatwg-fetch/-/whatwg-fetch-3.6.20.tgz", - "integrity": "sha512-EqhiFU6daOA8kpjOWTL0olhVOF3i7OrFzSYiGsEMB8GcXS+RrzauAERX65xMeNWVqxA6HXH2m69Z9LaKKdisfg==" - }, - "whatwg-url": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", - "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", - "requires": { - "tr46": "~0.0.3", - "webidl-conversions": "^3.0.0" - } - }, - "which": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/which/-/which-1.3.1.tgz", - "integrity": "sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ==", - "dev": true, - "requires": { - "isexe": "^2.0.0" - } - }, - "wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", - "dev": true - }, - "yallist": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", - "integrity": "sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==", - "dev": true - }, - "yaml": { - "version": "2.3.4", - "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.3.4.tgz", - "integrity": "sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==" } } } diff --git a/container-runtime/package.json b/container-runtime/package.json index 2fa407408..0a8e4afa8 100644 --- a/container-runtime/package.json +++ b/container-runtime/package.json @@ -1,12 +1,13 @@ { - "name": "start-init", + "name": "container-runtime", "version": "0.0.0", "description": "We want to be the sdk intermitent for the system", "module": "./index.js", "scripts": { "check": "tsc --noEmit", - "build": "prettier --write '**/*.ts' && rm -rf dist && tsc", - "tsc": "rm -rf dist; tsc" + "build": "prettier . '!tmp/**' --write && rm -rf dist && tsc", + "tsc": "rm -rf dist; tsc", + "test": "jest -c ./jest.config.js" }, "author": "", "prettier": { @@ -17,12 +18,16 @@ }, "dependencies": { "@iarna/toml": "^2.2.5", + "@noble/curves": "^1.4.0", + "@noble/hashes": "^1.4.0", "@start9labs/start-sdk": "file:../sdk/dist", "esbuild-plugin-resolve": "^2.0.0", "filebrowser": "^1.0.0", "isomorphic-fetch": "^3.0.0", + "jsonpath": "^1.1.1", + "lodash.merge": "^4.6.2", "node-fetch": "^3.1.0", - "ts-matches": "^5.4.1", + "ts-matches": "^5.5.1", "tslib": "^2.5.3", "typescript": "^5.1.3", "yaml": "^2.3.1" @@ -30,8 +35,12 @@ "devDependencies": { "@swc/cli": "^0.1.62", "@swc/core": "^1.3.65", + "@types/jest": "^29.5.12", + "@types/jsonpath": "^0.2.4", "@types/node": "^20.11.13", + "jest": "^29.7.0", "prettier": "^3.2.5", + "ts-jest": "^29.2.3", "typescript": ">5.2" } } diff --git a/container-runtime/src/Adapters/EffectCreator.ts b/container-runtime/src/Adapters/EffectCreator.ts new file mode 100644 index 000000000..0ef299151 --- /dev/null +++ b/container-runtime/src/Adapters/EffectCreator.ts @@ -0,0 +1,301 @@ +import { types as T } from "@start9labs/start-sdk" +import * as net from "net" +import { object, string, number, literals, some, unknown } from "ts-matches" +import { Effects } from "../Models/Effects" + +import { CallbackHolder } from "../Models/CallbackHolder" +import { MainEffects } from "@start9labs/start-sdk/cjs/lib/StartSdk" +const matchRpcError = object({ + error: object( + { + code: number, + message: string, + data: some( + string, + object( + { + details: string, + debug: string, + }, + ["debug"], + ), + ), + }, + ["data"], + ), +}) +const testRpcError = matchRpcError.test +const testRpcResult = object({ + result: unknown, +}).test +type RpcError = typeof matchRpcError._TYPE + +const SOCKET_PATH = "/media/startos/rpc/host.sock" +let hostSystemId = 0 + +export type EffectContext = { + procedureId: string | null + callbacks: CallbackHolder | null +} + +const rpcRoundFor = + (procedureId: string | null) => + ( + method: K, + params: Record, + ) => { + const id = hostSystemId++ + const client = net.createConnection({ path: SOCKET_PATH }, () => { + client.write( + JSON.stringify({ + id, + method, + params: { ...params, procedureId }, + }) + "\n", + ) + }) + let bufs: Buffer[] = [] + return new Promise((resolve, reject) => { + client.on("data", (data) => { + try { + bufs.push(data) + if (data.reduce((acc, x) => acc || x == 10, false)) { + const res: unknown = JSON.parse( + Buffer.concat(bufs).toString().split("\n")[0], + ) + if (testRpcError(res)) { + let message = res.error.message + console.error("Error in host RPC:", { method, params }) + if (string.test(res.error.data)) { + message += ": " + res.error.data + console.error(`Details: ${res.error.data}`) + } else { + if (res.error.data?.details) { + message += ": " + res.error.data.details + console.error(`Details: ${res.error.data.details}`) + } + if (res.error.data?.debug) { + message += "\n" + res.error.data.debug + console.error(`Debug: ${res.error.data.debug}`) + } + } + reject(new Error(`${message}@${method}`)) + } else if (testRpcResult(res)) { + resolve(res.result) + } else { + reject(new Error(`malformed response ${JSON.stringify(res)}`)) + } + } + } catch (error) { + reject(error) + } + client.end() + }) + client.on("error", (error) => { + reject(error) + }) + }) + } + +function makeEffects(context: EffectContext): Effects { + const rpcRound = rpcRoundFor(context.procedureId) + const self: Effects = { + bind(...[options]: Parameters) { + return rpcRound("bind", { + ...options, + stack: new Error().stack, + }) as ReturnType + }, + clearBindings(...[]: Parameters) { + return rpcRound("clearBindings", {}) as ReturnType< + T.Effects["clearBindings"] + > + }, + clearServiceInterfaces( + ...[]: Parameters + ) { + return rpcRound("clearServiceInterfaces", {}) as ReturnType< + T.Effects["clearServiceInterfaces"] + > + }, + getInstalledPackages(...[]: Parameters) { + return rpcRound("getInstalledPackages", {}) as ReturnType< + T.Effects["getInstalledPackages"] + > + }, + createOverlayedImage(options: { + imageId: string + }): Promise<[string, string]> { + return rpcRound("createOverlayedImage", options) as ReturnType< + T.Effects["createOverlayedImage"] + > + }, + destroyOverlayedImage(options: { guid: string }): Promise { + return rpcRound("destroyOverlayedImage", options) as ReturnType< + T.Effects["destroyOverlayedImage"] + > + }, + executeAction(...[options]: Parameters) { + return rpcRound("executeAction", options) as ReturnType< + T.Effects["executeAction"] + > + }, + exportAction(...[options]: Parameters) { + return rpcRound("exportAction", options) as ReturnType< + T.Effects["exportAction"] + > + }, + exportServiceInterface: (( + ...[options]: Parameters + ) => { + return rpcRound("exportServiceInterface", options) as ReturnType< + T.Effects["exportServiceInterface"] + > + }) as Effects["exportServiceInterface"], + exposeForDependents( + ...[options]: Parameters + ) { + return rpcRound("exposeForDependents", options) as ReturnType< + T.Effects["exposeForDependents"] + > + }, + getConfigured(...[]: Parameters) { + return rpcRound("getConfigured", {}) as ReturnType< + T.Effects["getConfigured"] + > + }, + getContainerIp(...[]: Parameters) { + return rpcRound("getContainerIp", {}) as ReturnType< + T.Effects["getContainerIp"] + > + }, + getHostInfo: ((...[allOptions]: Parameters) => { + const options = { + ...allOptions, + callback: context.callbacks?.addCallback(allOptions.callback) || null, + } + return rpcRound("getHostInfo", options) as ReturnType< + T.Effects["getHostInfo"] + > as any + }) as Effects["getHostInfo"], + getServiceInterface( + ...[options]: Parameters + ) { + return rpcRound("getServiceInterface", { + ...options, + callback: context.callbacks?.addCallback(options.callback) || null, + }) as ReturnType + }, + + getPrimaryUrl(...[options]: Parameters) { + return rpcRound("getPrimaryUrl", { + ...options, + callback: context.callbacks?.addCallback(options.callback) || null, + }) as ReturnType + }, + getServicePortForward( + ...[options]: Parameters + ) { + return rpcRound("getServicePortForward", options) as ReturnType< + T.Effects["getServicePortForward"] + > + }, + getSslCertificate(options: Parameters[0]) { + return rpcRound("getSslCertificate", options) as ReturnType< + T.Effects["getSslCertificate"] + > + }, + getSslKey(options: Parameters[0]) { + return rpcRound("getSslKey", options) as ReturnType< + T.Effects["getSslKey"] + > + }, + getSystemSmtp(...[options]: Parameters) { + return rpcRound("getSystemSmtp", { + ...options, + callback: context.callbacks?.addCallback(options.callback) || null, + }) as ReturnType + }, + listServiceInterfaces( + ...[options]: Parameters + ) { + return rpcRound("listServiceInterfaces", { + ...options, + callback: context.callbacks?.addCallback(options.callback) || null, + }) as ReturnType + }, + mount(...[options]: Parameters) { + return rpcRound("mount", options) as ReturnType + }, + clearActions(...[]: Parameters) { + return rpcRound("clearActions", {}) as ReturnType< + T.Effects["clearActions"] + > + }, + restart(...[]: Parameters) { + return rpcRound("restart", {}) as ReturnType + }, + setConfigured(...[configured]: Parameters) { + return rpcRound("setConfigured", { configured }) as ReturnType< + T.Effects["setConfigured"] + > + }, + setDependencies( + dependencies: Parameters[0], + ): ReturnType { + return rpcRound("setDependencies", dependencies) as ReturnType< + T.Effects["setDependencies"] + > + }, + checkDependencies( + options: Parameters[0], + ): ReturnType { + return rpcRound("checkDependencies", options) as ReturnType< + T.Effects["checkDependencies"] + > + }, + getDependencies(): ReturnType { + return rpcRound("getDependencies", {}) as ReturnType< + T.Effects["getDependencies"] + > + }, + setHealth(...[options]: Parameters) { + return rpcRound("setHealth", options) as ReturnType< + T.Effects["setHealth"] + > + }, + + setMainStatus(o: { status: "running" | "stopped" }): Promise { + return rpcRound("setMainStatus", o) as ReturnType + }, + + shutdown(...[]: Parameters) { + return rpcRound("shutdown", {}) as ReturnType + }, + store: { + get: async (options: any) => + rpcRound("getStore", { + ...options, + callback: context.callbacks?.addCallback(options.callback) || null, + }) as any, + set: async (options: any) => + rpcRound("setStore", options) as ReturnType, + } as T.Effects["store"], + } + return self +} + +export function makeProcedureEffects(procedureId: string): Effects { + return makeEffects({ procedureId, callbacks: null }) +} + +export function makeMainEffects(): MainEffects { + const rpcRound = rpcRoundFor(null) + return { + _type: "main", + clearCallbacks: () => { + return rpcRound("clearCallbacks", {}) as Promise + }, + ...makeEffects({ procedureId: null, callbacks: new CallbackHolder() }), + } +} diff --git a/container-runtime/src/Adapters/HostSystemStartOs.ts b/container-runtime/src/Adapters/HostSystemStartOs.ts deleted file mode 100644 index 5e52224fa..000000000 --- a/container-runtime/src/Adapters/HostSystemStartOs.ts +++ /dev/null @@ -1,290 +0,0 @@ -import { types as T } from "@start9labs/start-sdk" -import * as net from "net" -import { object, string, number, literals, some, unknown } from "ts-matches" -import { Effects } from "../Models/Effects" - -import { CallbackHolder } from "../Models/CallbackHolder" -const matchRpcError = object({ - error: object( - { - code: number, - message: string, - data: some( - string, - object( - { - details: string, - debug: string, - }, - ["debug"], - ), - ), - }, - ["data"], - ), -}) -const testRpcError = matchRpcError.test -const testRpcResult = object({ - result: unknown, -}).test -type RpcError = typeof matchRpcError._TYPE - -const SOCKET_PATH = "/media/startos/rpc/host.sock" -const MAIN = "/main" as const -export class HostSystemStartOs implements Effects { - static of(callbackHolder: CallbackHolder) { - return new HostSystemStartOs(callbackHolder) - } - - constructor(readonly callbackHolder: CallbackHolder) {} - id = 0 - rpcRound( - method: K, - params: unknown, - ) { - const id = this.id++ - const client = net.createConnection({ path: SOCKET_PATH }, () => { - client.write( - JSON.stringify({ - id, - method, - params, - }) + "\n", - ) - }) - let bufs: Buffer[] = [] - return new Promise((resolve, reject) => { - client.on("data", (data) => { - try { - bufs.push(data) - if (data.reduce((acc, x) => acc || x == 10, false)) { - const res: unknown = JSON.parse( - Buffer.concat(bufs).toString().split("\n")[0], - ) - if (testRpcError(res)) { - let message = res.error.message - console.error({ method, params, hostSystemStartOs: true }) - if (string.test(res.error.data)) { - message += ": " + res.error.data - console.error(res.error.data) - } else { - if (res.error.data?.details) { - message += ": " + res.error.data.details - console.error(res.error.data.details) - } - if (res.error.data?.debug) { - message += "\n" + res.error.data.debug - console.error("Debug: " + res.error.data.debug) - } - } - reject(new Error(`${message}@${method}`)) - } else if (testRpcResult(res)) { - resolve(res.result) - } else { - reject(new Error(`malformed response ${JSON.stringify(res)}`)) - } - } - } catch (error) { - reject(error) - } - client.end() - }) - client.on("error", (error) => { - reject(error) - }) - }) - } - - bind(...[options]: Parameters) { - return this.rpcRound("bind", options) as ReturnType - } - clearBindings(...[]: Parameters) { - return this.rpcRound("clearBindings", null) as ReturnType< - T.Effects["clearBindings"] - > - } - clearServiceInterfaces( - ...[]: Parameters - ) { - return this.rpcRound("clearServiceInterfaces", null) as ReturnType< - T.Effects["clearServiceInterfaces"] - > - } - createOverlayedImage(options: { - imageId: string - }): Promise<[string, string]> { - return this.rpcRound("createOverlayedImage", options) as ReturnType< - T.Effects["createOverlayedImage"] - > - } - destroyOverlayedImage(options: { guid: string }): Promise { - return this.rpcRound("destroyOverlayedImage", options) as ReturnType< - T.Effects["destroyOverlayedImage"] - > - } - executeAction(...[options]: Parameters) { - return this.rpcRound("executeAction", options) as ReturnType< - T.Effects["executeAction"] - > - } - exists(...[packageId]: Parameters) { - return this.rpcRound("exists", packageId) as ReturnType - } - exportAction(...[options]: Parameters) { - return this.rpcRound("exportAction", options) as ReturnType< - T.Effects["exportAction"] - > - } - exportServiceInterface: Effects["exportServiceInterface"] = ( - ...[options]: Parameters - ) => { - return this.rpcRound("exportServiceInterface", options) as ReturnType< - T.Effects["exportServiceInterface"] - > - } - exposeForDependents(...[options]: any) { - return this.rpcRound("exposeForDependents", null) as ReturnType< - T.Effects["exposeForDependents"] - > - } - getConfigured(...[]: Parameters) { - return this.rpcRound("getConfigured", null) as ReturnType< - T.Effects["getConfigured"] - > - } - getContainerIp(...[]: Parameters) { - return this.rpcRound("getContainerIp", null) as ReturnType< - T.Effects["getContainerIp"] - > - } - getHostInfo: Effects["getHostInfo"] = (...[allOptions]: any[]) => { - const options = { - ...allOptions, - callback: this.callbackHolder.addCallback(allOptions.callback), - } - return this.rpcRound("getHostInfo", options) as ReturnType< - T.Effects["getHostInfo"] - > as any - } - getServiceInterface( - ...[options]: Parameters - ) { - return this.rpcRound("getServiceInterface", { - ...options, - callback: this.callbackHolder.addCallback(options.callback), - }) as ReturnType - } - - getPrimaryUrl(...[options]: Parameters) { - return this.rpcRound("getPrimaryUrl", { - ...options, - callback: this.callbackHolder.addCallback(options.callback), - }) as ReturnType - } - getServicePortForward( - ...[options]: Parameters - ) { - return this.rpcRound("getServicePortForward", options) as ReturnType< - T.Effects["getServicePortForward"] - > - } - getSslCertificate(options: Parameters[0]) { - return this.rpcRound("getSslCertificate", options) as ReturnType< - T.Effects["getSslCertificate"] - > - } - getSslKey(options: Parameters[0]) { - return this.rpcRound("getSslKey", options) as ReturnType< - T.Effects["getSslKey"] - > - } - getSystemSmtp(...[options]: Parameters) { - return this.rpcRound("getSystemSmtp", { - ...options, - callback: this.callbackHolder.addCallback(options.callback), - }) as ReturnType - } - listServiceInterfaces( - ...[options]: Parameters - ) { - return this.rpcRound("listServiceInterfaces", { - ...options, - callback: this.callbackHolder.addCallback(options.callback), - }) as ReturnType - } - mount(...[options]: Parameters) { - return this.rpcRound("mount", options) as ReturnType - } - removeAction(...[options]: Parameters) { - return this.rpcRound("removeAction", options) as ReturnType< - T.Effects["removeAction"] - > - } - removeAddress(...[options]: Parameters) { - return this.rpcRound("removeAddress", options) as ReturnType< - T.Effects["removeAddress"] - > - } - restart(...[]: Parameters) { - return this.rpcRound("restart", null) - } - reverseProxy(...[options]: Parameters) { - return this.rpcRound("reverseProxy", options) as ReturnType< - T.Effects["reverseProxy"] - > - } - running(...[packageId]: Parameters) { - return this.rpcRound("running", { packageId }) as ReturnType< - T.Effects["running"] - > - } - // runRsync(...[options]: Parameters) { - // - // return this.rpcRound('executeAction', options) as ReturnType - // - // return this.rpcRound('executeAction', options) as ReturnType - // } - setConfigured(...[configured]: Parameters) { - return this.rpcRound("setConfigured", { configured }) as ReturnType< - T.Effects["setConfigured"] - > - } - setDependencies( - ...[dependencies]: Parameters - ): ReturnType { - return this.rpcRound("setDependencies", { dependencies }) as ReturnType< - T.Effects["setDependencies"] - > - } - setHealth(...[options]: Parameters) { - return this.rpcRound("setHealth", options) as ReturnType< - T.Effects["setHealth"] - > - } - - setMainStatus(o: { status: "running" | "stopped" }): Promise { - return this.rpcRound("setMainStatus", o) as ReturnType< - T.Effects["setHealth"] - > - } - - shutdown(...[]: Parameters) { - return this.rpcRound("shutdown", null) - } - stopped(...[packageId]: Parameters) { - return this.rpcRound("stopped", { packageId }) as ReturnType< - T.Effects["stopped"] - > - } - store: T.Effects["store"] = { - get: async (options: any) => - this.rpcRound("getStore", { - ...options, - callback: this.callbackHolder.addCallback(options.callback), - }) as any, - set: async (options: any) => - this.rpcRound("setStore", options) as ReturnType< - T.Effects["store"]["set"] - >, - } -} diff --git a/container-runtime/src/Adapters/RpcListener.ts b/container-runtime/src/Adapters/RpcListener.ts index 202e942b5..28f578149 100644 --- a/container-runtime/src/Adapters/RpcListener.ts +++ b/container-runtime/src/Adapters/RpcListener.ts @@ -15,15 +15,16 @@ import { } from "ts-matches" import { types as T } from "@start9labs/start-sdk" -import * as CP from "child_process" -import * as Mod from "module" import * as fs from "fs" import { CallbackHolder } from "../Models/CallbackHolder" import { AllGetDependencies } from "../Interfaces/AllGetDependencies" -import { HostSystem } from "../Interfaces/HostSystem" import { jsonPath } from "../Models/JsonPath" -import { System } from "../Interfaces/System" +import { RunningMain, System } from "../Interfaces/System" +import { + MakeMainEffects, + MakeProcedureEffects, +} from "../Interfaces/MakeEffects" type MaybePromise = T | Promise export const matchRpcResult = anyOf( object({ result: any }), @@ -45,7 +46,7 @@ export const matchRpcResult = anyOf( }), ) export type RpcResult = typeof matchRpcResult._TYPE -type SocketResponse = { jsonrpc: "2.0"; id: IdType } & RpcResult +type SocketResponse = ({ jsonrpc: "2.0"; id: IdType } & RpcResult) | null const SOCKET_PARENT = "/media/startos/rpc" const SOCKET_PATH = "/media/startos/rpc/service.sock" @@ -58,11 +59,12 @@ const runType = object({ method: literal("execute"), params: object( { + id: string, procedure: string, input: any, timeout: number, }, - ["timeout"], + ["timeout", "input"], ), }) const sandboxRunType = object({ @@ -70,18 +72,18 @@ const sandboxRunType = object({ method: literal("sandbox"), params: object( { + id: string, procedure: string, input: any, timeout: number, }, - ["timeout"], + ["timeout", "input"], ), }) const callbackType = object({ - id: idType, method: literal("callback"), params: object({ - callback: string, + callback: number, args: array, }), }) @@ -89,6 +91,14 @@ const initType = object({ id: idType, method: literal("init"), }) +const startType = object({ + id: idType, + method: literal("start"), +}) +const stopType = object({ + id: idType, + method: literal("stop"), +}) const exitType = object({ id: idType, method: literal("exit"), @@ -101,34 +111,41 @@ const evalType = object({ }), }) -const jsonParse = (x: Buffer) => JSON.parse(x.toString()) -function reduceMethod( - methodArgs: object, - effects: HostSystem, -): (previousValue: any, currentValue: string) => any { - return (x: any, method: string) => - Promise.resolve(x) - .then((x) => x[method]) - .then((x) => - typeof x !== "function" - ? x - : x({ - ...methodArgs, - effects, - }), +const jsonParse = (x: string) => JSON.parse(x) + +const handleRpc = (id: IdType, result: Promise) => + result + .then((result) => ({ + jsonrpc, + id, + ...result, + })) + .then((x) => { + if ( + ("result" in x && x.result === undefined) || + !("error" in x || "result" in x) ) -} + (x as any).result = null + return x + }) + .catch((error) => ({ + jsonrpc, + id, + error: { + code: 0, + message: typeof error, + data: { details: "" + error, debug: error?.stack }, + }, + })) const hasId = object({ id: idType }).test export class RpcListener { unixSocketServer = net.createServer(async (server) => {}) private _system: System | undefined - private _effects: HostSystem | undefined + private _makeProcedureEffects: MakeProcedureEffects | undefined + private _makeMainEffects: MakeMainEffects | undefined - constructor( - readonly getDependencies: AllGetDependencies, - private callbacks = new CallbackHolder(), - ) { + constructor(readonly getDependencies: AllGetDependencies) { if (!fs.existsSync(SOCKET_PARENT)) { fs.mkdirSync(SOCKET_PARENT, { recursive: true }) } @@ -160,125 +177,140 @@ export class RpcListener { details: error?.message ?? String(error), debug: error?.stack, }, - code: 0, + code: 1, }, }) - const writeDataToSocket = (x: SocketResponse) => - new Promise((resolve) => s.write(JSON.stringify(x), resolve)) + const writeDataToSocket = (x: SocketResponse) => { + if (x != null) { + return new Promise((resolve) => + s.write(JSON.stringify(x) + "\n", resolve), + ) + } + } s.on("data", (a) => Promise.resolve(a) + .then((b) => b.toString()) .then(logData("dataIn")) .then(jsonParse) .then(captureId) .then((x) => this.dealWithInput(x)) .catch(mapError) .then(logData("response")) - .then(writeDataToSocket) - .finally(() => void s.end()), + .then(writeDataToSocket), ) }) } - private get effects() { - return this.getDependencies.hostSystem()(this.callbacks) - } - private get system() { if (!this._system) throw new Error("System not initialized") return this._system } + private get makeProcedureEffects() { + if (!this._makeProcedureEffects) { + this._makeProcedureEffects = this.getDependencies.makeProcedureEffects() + } + return this._makeProcedureEffects + } + + private get makeMainEffects() { + if (!this._makeMainEffects) { + this._makeMainEffects = this.getDependencies.makeMainEffects() + } + return this._makeMainEffects + } + private dealWithInput(input: unknown): MaybePromise { return matches(input) - .when(some(runType, sandboxRunType), async ({ id, params }) => { + .when(runType, async ({ id, params }) => { const system = this.system const procedure = jsonPath.unsafeCast(params.procedure) - return system - .execute(this.effects, { + const effects = this.getDependencies.makeProcedureEffects()(params.id) + return handleRpc( + id, + system.execute(effects, { procedure, input: params.input, timeout: params.timeout, - }) - .then((result) => ({ - jsonrpc, - id, - ...result, - })) - .then((x) => { - if ( - ("result" in x && x.result === undefined) || - !("error" in x || "result" in x) - ) - (x as any).result = null - return x - }) - .catch((error) => ({ - jsonrpc, - id, - error: { - code: 0, - message: typeof error, - data: { details: "" + error, debug: error?.stack }, - }, - })) + }), + ) }) - .when(callbackType, async ({ id, params: { callback, args } }) => - Promise.resolve(this.callbacks.callCallback(callback, args)) - .then((result) => ({ - jsonrpc, - id, - result, - })) - .catch((error) => ({ - jsonrpc, - id, - - error: { - code: 0, - message: typeof error, - data: { - details: error?.message ?? String(error), - debug: error?.stack, - }, - }, - })), - ) - .when(exitType, async ({ id }) => { - if (this._system) this._system.exit(this.effects) - delete this._system - delete this._effects - - return { - jsonrpc, + .when(sandboxRunType, async ({ id, params }) => { + const system = this.system + const procedure = jsonPath.unsafeCast(params.procedure) + const effects = this.makeProcedureEffects(params.id) + return handleRpc( id, - result: null, - } + system.sandbox(effects, { + procedure, + input: params.input, + timeout: params.timeout, + }), + ) + }) + .when(callbackType, async ({ params: { callback, args } }) => { + this.system.callCallback(callback, args) + return null + }) + .when(startType, async ({ id }) => { + return handleRpc( + id, + this.system + .start(this.makeMainEffects()) + .then((result) => ({ result })), + ) + }) + .when(stopType, async ({ id }) => { + return handleRpc( + id, + this.system.stop().then((result) => ({ result })), + ) + }) + .when(exitType, async ({ id }) => { + return handleRpc( + id, + (async () => { + if (this._system) await this._system.exit() + })().then((result) => ({ result })), + ) }) .when(initType, async ({ id }) => { - this._system = await this.getDependencies.system() - - return { - jsonrpc, + return handleRpc( id, - result: null, - } + (async () => { + if (!this._system) { + const system = await this.getDependencies.system() + await system.init() + this._system = system + } + })().then((result) => ({ result })), + ) }) .when(evalType, async ({ id, params }) => { - const result = await new Function( - `return (async () => { return (${params.script}) }).call(this)`, - ).call({ - listener: this, - require: require, - }) - return { - jsonrpc, + return handleRpc( id, - result: !["string", "number", "boolean", "null", "object"].includes( - typeof result, - ) - ? null - : result, - } + (async () => { + const result = await new Function( + `return (async () => { return (${params.script}) }).call(this)`, + ).call({ + listener: this, + require: require, + }) + return { + jsonrpc, + id, + result: ![ + "string", + "number", + "boolean", + "null", + "object", + ].includes(typeof result) + ? null + : result, + } + })(), + ) }) .when(shape({ id: idType, method: string }), ({ id, method }) => ({ jsonrpc, diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/DockerProcedureContainer.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/DockerProcedureContainer.ts index 1a8b54e22..012a70eee 100644 --- a/container-runtime/src/Adapters/Systems/SystemForEmbassy/DockerProcedureContainer.ts +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/DockerProcedureContainer.ts @@ -14,10 +14,11 @@ export class DockerProcedureContainer { // } static async of( effects: T.Effects, + packageId: string, data: DockerProcedure, volumes: { [id: VolumeId]: Volume }, ) { - const overlay = await Overlay.of(effects, data.image) + const overlay = await Overlay.of(effects, { id: data.image }) if (data.mounts) { const mounts = data.mounts @@ -38,16 +39,25 @@ export class DockerProcedureContainer { mounts[mount], ) } else if (volumeMount.type === "certificate") { - volumeMount + const hostnames = [ + `${packageId}.embassy`, + ...new Set( + Object.values( + ( + await effects.getHostInfo({ + hostId: volumeMount["interface-id"], + }) + )?.hostnameInfo || {}, + ) + .flatMap((h) => h) + .flatMap((h) => (h.kind === "onion" ? [h.hostname.value] : [])), + ).values(), + ] const certChain = await effects.getSslCertificate({ - packageId: null, - hostId: volumeMount["interface-id"], - algorithm: null, + hostnames, }) const key = await effects.getSslKey({ - packageId: null, - hostId: volumeMount["interface-id"], - algorithm: null, + hostnames, }) await fs.writeFile( `${path}/${volumeMount["interface-id"]}.cert.pem`, @@ -58,17 +68,19 @@ export class DockerProcedureContainer { key, ) } else if (volumeMount.type === "pointer") { - await effects.mount({ - location: path, - target: { - packageId: volumeMount["package-id"], - subpath: volumeMount.path, - readonly: volumeMount.readonly, - volumeId: volumeMount["volume-id"], - }, - }) + await effects + .mount({ + location: path, + target: { + packageId: volumeMount["package-id"], + subpath: volumeMount.path, + readonly: volumeMount.readonly, + volumeId: volumeMount["volume-id"], + }, + }) + .catch(console.warn) } else if (volumeMount.type === "backup") { - throw new Error("TODO") + await overlay.mount({ type: "backup", subpath: null }, mounts[mount]) } } } @@ -84,10 +96,19 @@ export class DockerProcedureContainer { } } - async execSpawn(commands: string[]) { + async execFail(commands: string[], timeoutMs: number | null) { try { - const spawned = await this.overlay.spawn(commands) - return spawned + const res = await this.overlay.exec(commands, {}, timeoutMs) + if (res.exitCode !== 0) { + const codeOrSignal = + res.exitCode !== null + ? `code ${res.exitCode}` + : `signal ${res.exitSignal}` + throw new Error( + `Process exited with ${codeOrSignal}: ${res.stderr.toString()}`, + ) + } + return res } finally { await this.overlay.destroy() } diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/MainLoop.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/MainLoop.ts index 17fd13468..f8f0a2d6e 100644 --- a/container-runtime/src/Adapters/Systems/SystemForEmbassy/MainLoop.ts +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/MainLoop.ts @@ -1,8 +1,10 @@ -import { PolyfillEffects } from "./polyfillEffects" +import { polyfillEffects } from "./polyfillEffects" import { DockerProcedureContainer } from "./DockerProcedureContainer" import { SystemForEmbassy } from "." -import { HostSystemStartOs } from "../../HostSystemStartOs" -import { Daemons, T, daemons } from "@start9labs/start-sdk" +import { T, utils } from "@start9labs/start-sdk" +import { Daemon } from "@start9labs/start-sdk/cjs/lib/mainFn/Daemon" +import { Effects } from "../../../Models/Effects" +import { off } from "node:process" const EMBASSY_HEALTH_INTERVAL = 15 * 1000 const EMBASSY_PROPERTIES_LOOP = 30 * 1000 @@ -12,25 +14,28 @@ const EMBASSY_PROPERTIES_LOOP = 30 * 1000 * Also, this has an ability to clean itself up too if need be. */ export class MainLoop { - private healthLoops: - | { - name: string - interval: NodeJS.Timeout - }[] - | undefined + private healthLoops?: { + name: string + interval: NodeJS.Timeout + }[] - private mainEvent: - | Promise<{ - daemon: T.DaemonReturned - wait: Promise - }> - | undefined - constructor( + private mainEvent?: { + daemon: Daemon + } + + private constructor( readonly system: SystemForEmbassy, - readonly effects: HostSystemStartOs, - ) { - this.healthLoops = this.constructHealthLoops() - this.mainEvent = this.constructMainEvent() + readonly effects: Effects, + ) {} + + static async of( + system: SystemForEmbassy, + effects: Effects, + ): Promise { + const res = new MainLoop(system, effects) + res.healthLoops = res.constructHealthLoops() + res.mainEvent = await res.constructMainEvent() + return res } private async constructMainEvent() { @@ -40,44 +45,76 @@ export class MainLoop { ...system.manifest.main.args, ] + await this.setupInterfaces(effects) await effects.setMainStatus({ status: "running" }) const jsMain = (this.system.moduleCode as any)?.jsMain const dockerProcedureContainer = await DockerProcedureContainer.of( effects, + this.system.manifest.id, this.system.manifest.main, this.system.manifest.volumes, ) if (jsMain) { - const daemons = Daemons.of({ - effects, - started: async (_) => {}, - healthReceipts: [], - }) - throw new Error("todo") - // return { - // daemon, - // wait: daemon.wait().finally(() => { - // this.clean() - // effects.setMainStatus({ status: "stopped" }) - // }), - // } + throw new Error("Unreachable") } - const daemon = await daemons.runDaemon()( + const daemon = await Daemon.of()( this.effects, - this.system.manifest.main.image, + { id: this.system.manifest.main.image }, currentCommand, { overlay: dockerProcedureContainer.overlay, + sigtermTimeout: utils.inMs( + this.system.manifest.main["sigterm-timeout"], + ), }, ) + daemon.start() return { daemon, - wait: daemon.wait().finally(() => { - this.clean() - effects - .setMainStatus({ status: "stopped" }) - .catch((e) => console.error("Could not set the status to stopped")) - }), + } + } + + private async setupInterfaces(effects: T.Effects) { + for (const interfaceId in this.system.manifest.interfaces) { + const iface = this.system.manifest.interfaces[interfaceId] + const internalPorts = new Set() + for (const port of Object.values( + iface["tor-config"]?.["port-mapping"] || {}, + )) { + internalPorts.add(parseInt(port)) + } + for (const port of Object.values(iface["lan-config"] || {})) { + internalPorts.add(port.internal) + } + for (const internalPort of internalPorts) { + const torConf = Object.entries( + iface["tor-config"]?.["port-mapping"] || {}, + ) + .map(([external, internal]) => ({ + internal: parseInt(internal), + external: parseInt(external), + })) + .find((conf) => conf.internal == internalPort) + const lanConf = Object.entries(iface["lan-config"] || {}) + .map(([external, conf]) => ({ + external: parseInt(external), + ...conf, + })) + .find((conf) => conf.internal == internalPort) + await effects.bind({ + kind: "multi", + id: interfaceId, + internalPort, + preferredExternalPort: torConf?.external || internalPort, + secure: null, + addSsl: lanConf?.ssl + ? { + preferredExternalPort: lanConf.external, + alpn: { specified: ["http/1.1"] }, + } + : null, + }) + } } } @@ -86,7 +123,8 @@ export class MainLoop { const main = await mainEvent delete this.mainEvent delete this.healthLoops - if (mainEvent) await main?.daemon.term() + await main?.daemon.stop().catch((e) => console.error(e)) + this.effects.setMainStatus({ status: "stopped" }) if (healthLoops) healthLoops.forEach((x) => clearInterval(x.interval)) } @@ -102,14 +140,24 @@ export class MainLoop { if (actionProcedure.type === "docker") { const container = await DockerProcedureContainer.of( effects, + manifest.id, actionProcedure, manifest.volumes, ) - const executed = await container.execSpawn([ + const executed = await container.exec([ actionProcedure.entrypoint, ...actionProcedure.args, JSON.stringify(timeChanged), ]) + if (executed.exitCode === 0) { + await effects.setHealth({ + id: healthId, + name: value.name, + result: "success", + message: actionProcedure["success-message"], + }) + return + } if (executed.exitCode === 59) { await effects.setHealth({ id: healthId, @@ -173,7 +221,7 @@ export class MainLoop { } const result = await method( - new PolyfillEffects(effects, this.system.manifest), + polyfillEffects(effects, this.system.manifest), timeChanged, ) diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/__fixtures__/bitcoind.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/__fixtures__/bitcoind.ts new file mode 100644 index 000000000..9a643b39d --- /dev/null +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/__fixtures__/bitcoind.ts @@ -0,0 +1,387 @@ +export default { + "peer-tor-address": { + name: "Peer Tor Address", + description: "The Tor address of the peer interface", + type: "pointer", + subtype: "package", + "package-id": "bitcoind", + target: "tor-address", + interface: "peer", + }, + "rpc-tor-address": { + name: "RPC Tor Address", + description: "The Tor address of the RPC interface", + type: "pointer", + subtype: "package", + "package-id": "bitcoind", + target: "tor-address", + interface: "rpc", + }, + rpc: { + type: "object", + name: "RPC Settings", + description: "RPC configuration options.", + spec: { + enable: { + type: "boolean", + name: "Enable", + description: "Allow remote RPC requests.", + default: true, + }, + username: { + type: "string", + nullable: false, + name: "Username", + description: "The username for connecting to Bitcoin over RPC.", + warning: + "You will need to restart all services that depend on Bitcoin.", + default: "bitcoin", + masked: true, + pattern: "^[a-zA-Z0-9_]+$", + "pattern-description": "Must be alphanumeric (can contain underscore).", + }, + password: { + type: "string", + nullable: false, + name: "RPC Password", + description: "The password for connecting to Bitcoin over RPC.", + warning: + "You will need to restart all services that depend on Bitcoin.", + default: { + charset: "a-z,2-7", + len: 20, + }, + pattern: "^[a-zA-Z0-9_]+$", + "pattern-description": "Must be alphanumeric (can contain underscore).", + copyable: true, + masked: true, + }, + advanced: { + type: "object", + name: "Advanced", + description: "Advanced RPC Settings", + spec: { + auth: { + name: "Authorization", + description: + "Username and hashed password for JSON-RPC connections. RPC clients connect using the usual http basic authentication.", + type: "list", + subtype: "string", + default: [], + spec: { + pattern: "^[a-zA-Z0-9_-]+:([0-9a-fA-F]{2})+\\$([0-9a-fA-F]{2})+$", + "pattern-description": + 'Each item must be of the form ":$".', + }, + range: "[0,*)", + }, + servertimeout: { + name: "Rpc Server Timeout", + description: + "Number of seconds after which an uncompleted RPC call will time out.", + type: "number", + nullable: false, + range: "[5,300]", + integral: true, + units: "seconds", + default: 30, + }, + threads: { + name: "Threads", + description: + "Set the number of threads for handling RPC calls. You may wish to increase this if you are making lots of calls via an integration.", + type: "number", + nullable: false, + default: 16, + range: "[1,64]", + integral: true, + units: undefined, + }, + workqueue: { + name: "Work Queue", + description: + "Set the depth of the work queue to service RPC calls. Determines how long the backlog of RPC requests can get before it just rejects new ones.", + type: "number", + nullable: false, + default: 128, + range: "[8,256]", + integral: true, + units: "requests", + }, + }, + }, + }, + }, + "zmq-enabled": { + type: "boolean", + name: "ZeroMQ Enabled", + description: + "The ZeroMQ interface is useful for some applications which might require data related to block and transaction events from Bitcoin Core. For example, LND requires ZeroMQ be enabled for LND to get the latest block data", + default: true, + }, + txindex: { + type: "boolean", + name: "Transaction Index", + description: + "By enabling Transaction Index (txindex) Bitcoin Core will build a complete transaction index. This allows Bitcoin Core to access any transaction with commands like `gettransaction`.", + default: true, + }, + coinstatsindex: { + type: "boolean", + name: "Coinstats Index", + description: + "Enabling Coinstats Index reduces the time for the gettxoutsetinfo RPC to complete at the cost of using additional disk space", + default: false, + }, + wallet: { + type: "object", + name: "Wallet", + description: "Wallet Settings", + spec: { + enable: { + name: "Enable Wallet", + description: "Load the wallet and enable wallet RPC calls.", + type: "boolean", + default: true, + }, + avoidpartialspends: { + name: "Avoid Partial Spends", + description: + "Group outputs by address, selecting all or none, instead of selecting on a per-output basis. This improves privacy at the expense of higher transaction fees.", + type: "boolean", + default: true, + }, + discardfee: { + name: "Discard Change Tolerance", + description: + "The fee rate (in BTC/kB) that indicates your tolerance for discarding change by adding it to the fee.", + type: "number", + nullable: false, + default: 0.0001, + range: "[0,.01]", + integral: false, + units: "BTC/kB", + }, + }, + }, + advanced: { + type: "object", + name: "Advanced", + description: "Advanced Settings", + spec: { + mempool: { + type: "object", + name: "Mempool", + description: "Mempool Settings", + spec: { + persistmempool: { + type: "boolean", + name: "Persist Mempool", + description: "Save the mempool on shutdown and load on restart.", + default: true, + }, + maxmempool: { + type: "number", + nullable: false, + name: "Max Mempool Size", + description: + "Keep the transaction memory pool below megabytes.", + range: "[1,*)", + integral: true, + units: "MiB", + default: 300, + }, + mempoolexpiry: { + type: "number", + nullable: false, + name: "Mempool Expiration", + description: + "Do not keep transactions in the mempool longer than hours.", + range: "[1,*)", + integral: true, + units: "Hr", + default: 336, + }, + mempoolfullrbf: { + name: "Enable Full RBF", + description: + "Policy for your node to use for relaying and mining unconfirmed transactions. For details, see https://github.com/bitcoin/bitcoin/blob/master/doc/release-notes/release-notes-24.0.1.md#notice-of-new-option-for-transaction-replacement-policies", + type: "boolean", + default: true, + }, + permitbaremultisig: { + type: "boolean", + name: "Permit Bare Multisig", + description: "Relay non-P2SH multisig transactions", + default: true, + }, + datacarrier: { + type: "boolean", + name: "Relay OP_RETURN Transactions", + description: "Relay transactions with OP_RETURN outputs", + default: true, + }, + datacarriersize: { + type: "number", + nullable: false, + name: "Max OP_RETURN Size", + description: "Maximum size of data in OP_RETURN outputs to relay", + range: "[0,10000]", + integral: true, + units: "bytes", + default: 83, + }, + }, + }, + peers: { + type: "object", + name: "Peers", + description: "Peer Connection Settings", + spec: { + listen: { + type: "boolean", + name: "Make Public", + description: + "Allow other nodes to find your server on the network.", + default: true, + }, + onlyconnect: { + type: "boolean", + name: "Disable Peer Discovery", + description: "Only connect to specified peers.", + default: false, + }, + onlyonion: { + type: "boolean", + name: "Disable Clearnet", + description: "Only connect to peers over Tor.", + default: false, + }, + v2transport: { + type: "boolean", + name: "Use V2 P2P Transport Protocol", + description: + "Enable or disable the use of BIP324 V2 P2P transport protocol.", + default: false, + }, + addnode: { + name: "Add Nodes", + description: "Add addresses of nodes to connect to.", + type: "list", + subtype: "object", + range: "[0,*)", + default: [], + spec: { + spec: { + hostname: { + type: "string", + nullable: false, + name: "Hostname", + description: "Domain or IP address of bitcoin peer", + pattern: + "(^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$)|((^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$)|(^[a-z2-7]{16}\\.onion$)|(^([a-z0-9]([a-z0-9-]{0,61}[a-z0-9])?\\.)+[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$))", + "pattern-description": + "Must be either a domain name, or an IPv4 or IPv6 address. Do not include protocol scheme (eg 'http://') or port.", + }, + port: { + type: "number", + nullable: true, + name: "Port", + description: + "Port that peer is listening on for inbound p2p connections", + range: "[0,65535]", + integral: true, + }, + }, + }, + }, + }, + }, + pruning: { + type: "union", + name: "Pruning Settings", + description: + "Blockchain Pruning Options\nReduce the blockchain size on disk\n", + warning: + "Disabling pruning will convert your node into a full archival node. This requires a resync of the entire blockchain, a process that may take several days.\n", + tag: { + id: "mode", + name: "Pruning Mode", + description: + "- Disabled: Disable pruning\n- Automatic: Limit blockchain size on disk to a certain number of megabytes\n", + "variant-names": { + disabled: "Disabled", + automatic: "Automatic", + }, + }, + variants: { + disabled: {}, + automatic: { + size: { + type: "number", + nullable: false, + name: "Max Chain Size", + description: "Limit of blockchain size on disk.", + warning: + "Increasing this value will require re-syncing your node.", + default: 550, + range: "[550,1000000)", + integral: true, + units: "MiB", + }, + }, + }, + default: "disabled", + }, + dbcache: { + type: "number", + nullable: true, + name: "Database Cache", + description: + "How much RAM to allocate for caching the TXO set. Higher values improve syncing performance, but increase your chance of using up all your system's memory or corrupting your database in the event of an ungraceful shutdown. Set this high but comfortably below your system's total RAM during IBD, then turn down to 450 (or leave blank) once the sync completes.", + warning: + "WARNING: Increasing this value results in a higher chance of ungraceful shutdowns, which can leave your node unusable if it happens during the initial block download. Use this setting with caution. Be sure to set this back to the default (450 or leave blank) once your node is synced. DO NOT press the STOP button if your dbcache is large. Instead, set this number back to the default, hit save, and wait for bitcoind to restart on its own.", + range: "(0,*)", + integral: true, + units: "MiB", + }, + blockfilters: { + type: "object", + name: "Block Filters", + description: "Settings for storing and serving compact block filters", + spec: { + blockfilterindex: { + type: "boolean", + name: "Compute Compact Block Filters (BIP158)", + description: + "Generate Compact Block Filters during initial sync (IBD) to enable 'getblockfilter' RPC. This is useful if dependent services need block filters to efficiently scan for addresses/transactions etc.", + default: true, + }, + peerblockfilters: { + type: "boolean", + name: "Serve Compact Block Filters to Peers (BIP157)", + description: + "Serve Compact Block Filters as a peer service to other nodes on the network. This is useful if you wish to connect an SPV client to your node to make it efficient to scan transactions without having to download all block data. 'Compute Compact Block Filters (BIP158)' is required.", + default: false, + }, + }, + }, + bloomfilters: { + type: "object", + name: "Bloom Filters (BIP37)", + description: "Setting for serving Bloom Filters", + spec: { + peerbloomfilters: { + type: "boolean", + name: "Serve Bloom Filters to Peers", + description: + "Peers have the option of setting filters on each connection they make after the version handshake has completed. Bloom filters are for clients implementing SPV (Simplified Payment Verification) that want to check that block headers connect together correctly, without needing to verify the full blockchain. The client must trust that the transactions in the chain are in fact valid. It is highly recommended AGAINST using for anything except Bisq integration.", + warning: + "This is ONLY for use with Bisq integration, please use Block Filters for all other applications.", + default: false, + }, + }, + }, + }, + }, +} diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/__fixtures__/embasyPagesConfig.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/__fixtures__/embasyPagesConfig.ts new file mode 100644 index 000000000..cb70bd123 --- /dev/null +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/__fixtures__/embasyPagesConfig.ts @@ -0,0 +1,127 @@ +export default { + homepage: { + name: "Homepage", + description: + "The page that will be displayed when your Start9 Pages .onion address is visited. Since this page is technically publicly accessible, you can choose to which type of page to display.", + type: "union", + default: "welcome", + tag: { + id: "type", + name: "Type", + "variant-names": { + welcome: "Welcome", + index: "Table of Contents", + "web-page": "Web Page", + redirect: "Redirect", + }, + }, + variants: { + welcome: {}, + index: {}, + "web-page": { + source: { + name: "Folder Location", + description: "The service that contains your website files.", + type: "enum", + values: ["filebrowser", "nextcloud"], + "value-names": {}, + default: "nextcloud", + }, + folder: { + type: "string", + name: "Folder Path", + placeholder: "e.g. websites/resume", + description: + 'The path to the folder that contains the static files of your website. For example, a value of "projects/resume" would tell Start9 Pages to look for that folder path in the selected service.', + pattern: + "^(\\.|[a-zA-Z0-9_ -][a-zA-Z0-9_ .-]*|([a-zA-Z0-9_ .-][a-zA-Z0-9_ -]+\\.*)+)(/[a-zA-Z0-9_ -][a-zA-Z0-9_ .-]*|/([a-zA-Z0-9_ .-][a-zA-Z0-9_ -]+\\.*)+)*/?$", + "pattern-description": "Must be a valid relative file path", + nullable: false, + }, + }, + redirect: { + target: { + type: "string", + name: "Target Subdomain", + description: + "The name of the subdomain to redirect users to. This must be a valid subdomain site within your Start9 Pages.", + pattern: "^[a-z-]+$", + "pattern-description": + "May contain only lowercase characters and hyphens.", + nullable: false, + }, + }, + }, + }, + subdomains: { + type: "list", + name: "Subdomains", + description: "The websites you want to serve.", + default: [], + range: "[0, *)", + subtype: "object", + spec: { + "unique-by": "name", + "display-as": "{{name}}", + spec: { + name: { + type: "string", + nullable: false, + name: "Subdomain name", + description: + 'The subdomain of your Start9 Pages .onion address to host the website on. For example, a value of "me" would produce a website hosted at http://me.xxxxxx.onion.', + pattern: "^[a-z-]+$", + "pattern-description": + "May contain only lowercase characters and hyphens", + }, + settings: { + type: "union", + name: "Settings", + description: + "The desired behavior you want to occur when the subdomain is visited. You can either redirect to another subdomain, or load a stored web page.", + default: "web-page", + tag: { + id: "type", + name: "Type", + "variant-names": { "web-page": "Web Page", redirect: "Redirect" }, + }, + variants: { + "web-page": { + source: { + name: "Folder Location", + description: "The service that contains your website files.", + type: "enum", + values: ["filebrowser", "nextcloud"], + "value-names": {}, + default: "nextcloud", + }, + folder: { + type: "string", + name: "Folder Path", + placeholder: "e.g. websites/resume", + description: + 'The path to the folder that contains the website files. For example, a value of "projects/resume" would tell Start9 Pages to look for that folder path in the selected service.', + pattern: + "^(\\.|[a-zA-Z0-9_ -][a-zA-Z0-9_ .-]*|([a-zA-Z0-9_ .-][a-zA-Z0-9_ -]+\\.*)+)(/[a-zA-Z0-9_ -][a-zA-Z0-9_ .-]*|/([a-zA-Z0-9_ .-][a-zA-Z0-9_ -]+\\.*)+)*/?$", + "pattern-description": "Must be a valid relative file path", + nullable: false, + }, + }, + redirect: { + target: { + type: "string", + name: "Target Subdomain", + description: + "The subdomain of your Start9 Pages .onion address to redirect to. This should be the name of another subdomain on Start9 Pages. Leave empty to redirect to the homepage.", + pattern: "^[a-z-]+$", + "pattern-description": + "May contain only lowercase characters and hyphens.", + nullable: false, + }, + }, + }, + }, + }, + }, + }, +} diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/__fixtures__/nostr.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/__fixtures__/nostr.ts new file mode 100644 index 000000000..f5a93a918 --- /dev/null +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/__fixtures__/nostr.ts @@ -0,0 +1,28 @@ +export default { + "tor-address": { + name: "Tor Address", + description: "The Tor address of the network interface", + type: "pointer", + subtype: "package", + "package-id": "nostr-wallet-connect", + target: "tor-address", + interface: "main", + }, + "lan-address": { + name: "LAN Address", + description: "The LAN address of the network interface", + type: "pointer", + subtype: "package", + "package-id": "nostr-wallet-connect", + target: "lan-address", + interface: "main", + }, + "nostr-relay": { + type: "string", + name: "Nostr Relay", + default: "wss://relay.getalby.com/v1", + description: "The Nostr Relay to use for Nostr Wallet Connect connections", + copyable: true, + nullable: false, + }, +} diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/__fixtures__/searNXG.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/__fixtures__/searNXG.ts new file mode 100644 index 000000000..51eb06b9a --- /dev/null +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/__fixtures__/searNXG.ts @@ -0,0 +1,39 @@ +export default { + "instance-name": { + type: "string", + name: "SearXNG Instance Name", + description: + "Enter a name for your SearXNG instance. This is the name that will be listed if you want to share your SearXNG engine publicly.", + nullable: false, + default: "My SearXNG Engine", + placeholder: "Uncle Jim SearXNG Engine", + }, + "tor-url": { + name: "Enable Tor address as the base URL", + description: + "Activates the utilization of a .onion address as the primary URL, particularly beneficial for publicly hosted instances over the Tor network.", + type: "boolean", + default: false, + }, + "enable-metrics": { + name: "Enable Stats", + description: + "Your SearXNG instance will collect anonymous stats about its own usage and performance. You can view these metrics by appending `/stats` or `/stats/errors` to your SearXNG URL.", + type: "boolean", + default: true, + }, //, + // "email-address": { + // "type": "string", + // "name": "Email Address", + // "description": "Your Email address - required to create an SSL certificate.", + // "nullable": false, + // "default": "youremail@domain.com", + // }, + // "public-host": { + // "type": "string", + // "name": "Public Domain Name", + // "description": "Enter a domain name here if you want to share your SearXNG engine publicly. You will also need to modify your domain name's DNS settings to point to your Start9 server.", + // "nullable": true, + // "placeholder": "https://search.mydomain.com" + // } +} diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/__snapshots__/transformConfigSpec.test.ts.snap b/container-runtime/src/Adapters/Systems/SystemForEmbassy/__snapshots__/transformConfigSpec.test.ts.snap new file mode 100644 index 000000000..9eb6e97cf --- /dev/null +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/__snapshots__/transformConfigSpec.test.ts.snap @@ -0,0 +1,791 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`transformConfigSpec transformConfigSpec(bitcoind) 1`] = ` +{ + "advanced": { + "description": "Advanced Settings", + "name": "Advanced", + "spec": { + "blockfilters": { + "description": "Settings for storing and serving compact block filters", + "name": "Block Filters", + "spec": { + "blockfilterindex": { + "default": true, + "description": "Generate Compact Block Filters during initial sync (IBD) to enable 'getblockfilter' RPC. This is useful if dependent services need block filters to efficiently scan for addresses/transactions etc.", + "disabled": false, + "immutable": false, + "name": "Compute Compact Block Filters (BIP158)", + "type": "toggle", + "warning": null, + }, + "peerblockfilters": { + "default": false, + "description": "Serve Compact Block Filters as a peer service to other nodes on the network. This is useful if you wish to connect an SPV client to your node to make it efficient to scan transactions without having to download all block data. 'Compute Compact Block Filters (BIP158)' is required.", + "disabled": false, + "immutable": false, + "name": "Serve Compact Block Filters to Peers (BIP157)", + "type": "toggle", + "warning": null, + }, + }, + "type": "object", + "warning": null, + }, + "bloomfilters": { + "description": "Setting for serving Bloom Filters", + "name": "Bloom Filters (BIP37)", + "spec": { + "peerbloomfilters": { + "default": false, + "description": "Peers have the option of setting filters on each connection they make after the version handshake has completed. Bloom filters are for clients implementing SPV (Simplified Payment Verification) that want to check that block headers connect together correctly, without needing to verify the full blockchain. The client must trust that the transactions in the chain are in fact valid. It is highly recommended AGAINST using for anything except Bisq integration.", + "disabled": false, + "immutable": false, + "name": "Serve Bloom Filters to Peers", + "type": "toggle", + "warning": "This is ONLY for use with Bisq integration, please use Block Filters for all other applications.", + }, + }, + "type": "object", + "warning": null, + }, + "dbcache": { + "default": null, + "description": "How much RAM to allocate for caching the TXO set. Higher values improve syncing performance, but increase your chance of using up all your system's memory or corrupting your database in the event of an ungraceful shutdown. Set this high but comfortably below your system's total RAM during IBD, then turn down to 450 (or leave blank) once the sync completes.", + "disabled": false, + "immutable": false, + "integer": true, + "max": null, + "min": null, + "name": "Database Cache", + "placeholder": null, + "required": false, + "step": null, + "type": "number", + "units": "MiB", + "warning": "WARNING: Increasing this value results in a higher chance of ungraceful shutdowns, which can leave your node unusable if it happens during the initial block download. Use this setting with caution. Be sure to set this back to the default (450 or leave blank) once your node is synced. DO NOT press the STOP button if your dbcache is large. Instead, set this number back to the default, hit save, and wait for bitcoind to restart on its own.", + }, + "mempool": { + "description": "Mempool Settings", + "name": "Mempool", + "spec": { + "datacarrier": { + "default": true, + "description": "Relay transactions with OP_RETURN outputs", + "disabled": false, + "immutable": false, + "name": "Relay OP_RETURN Transactions", + "type": "toggle", + "warning": null, + }, + "datacarriersize": { + "default": 83, + "description": "Maximum size of data in OP_RETURN outputs to relay", + "disabled": false, + "immutable": false, + "integer": true, + "max": 10000, + "min": null, + "name": "Max OP_RETURN Size", + "placeholder": null, + "required": true, + "step": null, + "type": "number", + "units": "bytes", + "warning": null, + }, + "maxmempool": { + "default": 300, + "description": "Keep the transaction memory pool below megabytes.", + "disabled": false, + "immutable": false, + "integer": true, + "max": null, + "min": 1, + "name": "Max Mempool Size", + "placeholder": null, + "required": true, + "step": null, + "type": "number", + "units": "MiB", + "warning": null, + }, + "mempoolexpiry": { + "default": 336, + "description": "Do not keep transactions in the mempool longer than hours.", + "disabled": false, + "immutable": false, + "integer": true, + "max": null, + "min": 1, + "name": "Mempool Expiration", + "placeholder": null, + "required": true, + "step": null, + "type": "number", + "units": "Hr", + "warning": null, + }, + "mempoolfullrbf": { + "default": true, + "description": "Policy for your node to use for relaying and mining unconfirmed transactions. For details, see https://github.com/bitcoin/bitcoin/blob/master/doc/release-notes/release-notes-24.0.1.md#notice-of-new-option-for-transaction-replacement-policies", + "disabled": false, + "immutable": false, + "name": "Enable Full RBF", + "type": "toggle", + "warning": null, + }, + "permitbaremultisig": { + "default": true, + "description": "Relay non-P2SH multisig transactions", + "disabled": false, + "immutable": false, + "name": "Permit Bare Multisig", + "type": "toggle", + "warning": null, + }, + "persistmempool": { + "default": true, + "description": "Save the mempool on shutdown and load on restart.", + "disabled": false, + "immutable": false, + "name": "Persist Mempool", + "type": "toggle", + "warning": null, + }, + }, + "type": "object", + "warning": null, + }, + "peers": { + "description": "Peer Connection Settings", + "name": "Peers", + "spec": { + "addnode": { + "default": [], + "description": "Add addresses of nodes to connect to.", + "disabled": false, + "maxLength": null, + "minLength": null, + "name": "Add Nodes", + "spec": { + "displayAs": null, + "spec": { + "hostname": { + "default": null, + "description": "Domain or IP address of bitcoin peer", + "disabled": false, + "generate": null, + "immutable": false, + "inputmode": "text", + "masked": false, + "maxLength": null, + "minLength": null, + "name": "Hostname", + "patterns": [ + { + "description": "Must be either a domain name, or an IPv4 or IPv6 address. Do not include protocol scheme (eg 'http://') or port.", + "regex": "(^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$)|((^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$)|(^[a-z2-7]{16}\\.onion$)|(^([a-z0-9]([a-z0-9-]{0,61}[a-z0-9])?\\.)+[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$))", + }, + ], + "placeholder": null, + "required": true, + "type": "text", + "warning": null, + }, + "port": { + "default": null, + "description": "Port that peer is listening on for inbound p2p connections", + "disabled": false, + "immutable": false, + "integer": true, + "max": 65535, + "min": null, + "name": "Port", + "placeholder": null, + "required": false, + "step": null, + "type": "number", + "units": null, + "warning": null, + }, + }, + "type": "object", + "uniqueBy": null, + }, + "type": "list", + "warning": null, + }, + "listen": { + "default": true, + "description": "Allow other nodes to find your server on the network.", + "disabled": false, + "immutable": false, + "name": "Make Public", + "type": "toggle", + "warning": null, + }, + "onlyconnect": { + "default": false, + "description": "Only connect to specified peers.", + "disabled": false, + "immutable": false, + "name": "Disable Peer Discovery", + "type": "toggle", + "warning": null, + }, + "onlyonion": { + "default": false, + "description": "Only connect to peers over Tor.", + "disabled": false, + "immutable": false, + "name": "Disable Clearnet", + "type": "toggle", + "warning": null, + }, + "v2transport": { + "default": false, + "description": "Enable or disable the use of BIP324 V2 P2P transport protocol.", + "disabled": false, + "immutable": false, + "name": "Use V2 P2P Transport Protocol", + "type": "toggle", + "warning": null, + }, + }, + "type": "object", + "warning": null, + }, + "pruning": { + "default": "disabled", + "description": "- Disabled: Disable pruning +- Automatic: Limit blockchain size on disk to a certain number of megabytes +", + "disabled": false, + "immutable": false, + "name": "Pruning Mode", + "required": true, + "type": "union", + "variants": { + "automatic": { + "name": "Automatic", + "spec": { + "size": { + "default": 550, + "description": "Limit of blockchain size on disk.", + "disabled": false, + "immutable": false, + "integer": true, + "max": 999999, + "min": 550, + "name": "Max Chain Size", + "placeholder": null, + "required": true, + "step": null, + "type": "number", + "units": "MiB", + "warning": "Increasing this value will require re-syncing your node.", + }, + }, + }, + "disabled": { + "name": "Disabled", + "spec": {}, + }, + }, + "warning": null, + }, + }, + "type": "object", + "warning": null, + }, + "coinstatsindex": { + "default": false, + "description": "Enabling Coinstats Index reduces the time for the gettxoutsetinfo RPC to complete at the cost of using additional disk space", + "disabled": false, + "immutable": false, + "name": "Coinstats Index", + "type": "toggle", + "warning": null, + }, + "rpc": { + "description": "RPC configuration options.", + "name": "RPC Settings", + "spec": { + "advanced": { + "description": "Advanced RPC Settings", + "name": "Advanced", + "spec": { + "auth": { + "default": [], + "description": "Username and hashed password for JSON-RPC connections. RPC clients connect using the usual http basic authentication.", + "disabled": false, + "maxLength": null, + "minLength": null, + "name": "Authorization", + "spec": { + "generate": null, + "inputmode": "text", + "masked": false, + "maxLength": null, + "minLength": null, + "patterns": [ + { + "description": "Each item must be of the form ":$".", + "regex": "^[a-zA-Z0-9_-]+:([0-9a-fA-F]{2})+\\$([0-9a-fA-F]{2})+$", + }, + ], + "placeholder": null, + "type": "text", + }, + "type": "list", + "warning": null, + }, + "servertimeout": { + "default": 30, + "description": "Number of seconds after which an uncompleted RPC call will time out.", + "disabled": false, + "immutable": false, + "integer": true, + "max": 300, + "min": 5, + "name": "Rpc Server Timeout", + "placeholder": null, + "required": true, + "step": null, + "type": "number", + "units": "seconds", + "warning": null, + }, + "threads": { + "default": 16, + "description": "Set the number of threads for handling RPC calls. You may wish to increase this if you are making lots of calls via an integration.", + "disabled": false, + "immutable": false, + "integer": true, + "max": 64, + "min": 1, + "name": "Threads", + "placeholder": null, + "required": true, + "step": null, + "type": "number", + "units": null, + "warning": null, + }, + "workqueue": { + "default": 128, + "description": "Set the depth of the work queue to service RPC calls. Determines how long the backlog of RPC requests can get before it just rejects new ones.", + "disabled": false, + "immutable": false, + "integer": true, + "max": 256, + "min": 8, + "name": "Work Queue", + "placeholder": null, + "required": true, + "step": null, + "type": "number", + "units": "requests", + "warning": null, + }, + }, + "type": "object", + "warning": null, + }, + "enable": { + "default": true, + "description": "Allow remote RPC requests.", + "disabled": false, + "immutable": false, + "name": "Enable", + "type": "toggle", + "warning": null, + }, + "password": { + "default": { + "charset": "a-z,2-7", + "len": 20, + }, + "description": "The password for connecting to Bitcoin over RPC.", + "disabled": false, + "generate": null, + "immutable": false, + "inputmode": "text", + "masked": true, + "maxLength": null, + "minLength": null, + "name": "RPC Password", + "patterns": [ + { + "description": "Must be alphanumeric (can contain underscore).", + "regex": "^[a-zA-Z0-9_]+$", + }, + ], + "placeholder": null, + "required": true, + "type": "text", + "warning": "You will need to restart all services that depend on Bitcoin.", + }, + "username": { + "default": "bitcoin", + "description": "The username for connecting to Bitcoin over RPC.", + "disabled": false, + "generate": null, + "immutable": false, + "inputmode": "text", + "masked": true, + "maxLength": null, + "minLength": null, + "name": "Username", + "patterns": [ + { + "description": "Must be alphanumeric (can contain underscore).", + "regex": "^[a-zA-Z0-9_]+$", + }, + ], + "placeholder": null, + "required": true, + "type": "text", + "warning": "You will need to restart all services that depend on Bitcoin.", + }, + }, + "type": "object", + "warning": null, + }, + "txindex": { + "default": true, + "description": "By enabling Transaction Index (txindex) Bitcoin Core will build a complete transaction index. This allows Bitcoin Core to access any transaction with commands like \`gettransaction\`.", + "disabled": false, + "immutable": false, + "name": "Transaction Index", + "type": "toggle", + "warning": null, + }, + "wallet": { + "description": "Wallet Settings", + "name": "Wallet", + "spec": { + "avoidpartialspends": { + "default": true, + "description": "Group outputs by address, selecting all or none, instead of selecting on a per-output basis. This improves privacy at the expense of higher transaction fees.", + "disabled": false, + "immutable": false, + "name": "Avoid Partial Spends", + "type": "toggle", + "warning": null, + }, + "discardfee": { + "default": 0.0001, + "description": "The fee rate (in BTC/kB) that indicates your tolerance for discarding change by adding it to the fee.", + "disabled": false, + "immutable": false, + "integer": false, + "max": 0.01, + "min": null, + "name": "Discard Change Tolerance", + "placeholder": null, + "required": true, + "step": null, + "type": "number", + "units": "BTC/kB", + "warning": null, + }, + "enable": { + "default": true, + "description": "Load the wallet and enable wallet RPC calls.", + "disabled": false, + "immutable": false, + "name": "Enable Wallet", + "type": "toggle", + "warning": null, + }, + }, + "type": "object", + "warning": null, + }, + "zmq-enabled": { + "default": true, + "description": "The ZeroMQ interface is useful for some applications which might require data related to block and transaction events from Bitcoin Core. For example, LND requires ZeroMQ be enabled for LND to get the latest block data", + "disabled": false, + "immutable": false, + "name": "ZeroMQ Enabled", + "type": "toggle", + "warning": null, + }, +} +`; + +exports[`transformConfigSpec transformConfigSpec(embassyPages) 1`] = ` +{ + "homepage": { + "default": "welcome", + "description": null, + "disabled": false, + "immutable": false, + "name": "Type", + "required": true, + "type": "union", + "variants": { + "index": { + "name": "Table of Contents", + "spec": {}, + }, + "redirect": { + "name": "Redirect", + "spec": { + "target": { + "default": null, + "description": "The name of the subdomain to redirect users to. This must be a valid subdomain site within your Start9 Pages.", + "disabled": false, + "generate": null, + "immutable": false, + "inputmode": "text", + "masked": false, + "maxLength": null, + "minLength": null, + "name": "Target Subdomain", + "patterns": [ + { + "description": "May contain only lowercase characters and hyphens.", + "regex": "^[a-z-]+$", + }, + ], + "placeholder": null, + "required": true, + "type": "text", + "warning": null, + }, + }, + }, + "web-page": { + "name": "Web Page", + "spec": { + "folder": { + "default": null, + "description": "The path to the folder that contains the static files of your website. For example, a value of "projects/resume" would tell Start9 Pages to look for that folder path in the selected service.", + "disabled": false, + "generate": null, + "immutable": false, + "inputmode": "text", + "masked": false, + "maxLength": null, + "minLength": null, + "name": "Folder Path", + "patterns": [ + { + "description": "Must be a valid relative file path", + "regex": "^(\\.|[a-zA-Z0-9_ -][a-zA-Z0-9_ .-]*|([a-zA-Z0-9_ .-][a-zA-Z0-9_ -]+\\.*)+)(/[a-zA-Z0-9_ -][a-zA-Z0-9_ .-]*|/([a-zA-Z0-9_ .-][a-zA-Z0-9_ -]+\\.*)+)*/?$", + }, + ], + "placeholder": "e.g. websites/resume", + "required": true, + "type": "text", + "warning": null, + }, + "source": { + "default": "nextcloud", + "description": "The service that contains your website files.", + "disabled": false, + "immutable": false, + "name": "Folder Location", + "required": false, + "type": "select", + "values": { + "filebrowser": "filebrowser", + "nextcloud": "nextcloud", + }, + "warning": null, + }, + }, + }, + "welcome": { + "name": "Welcome", + "spec": {}, + }, + }, + "warning": null, + }, + "subdomains": { + "default": [], + "description": "The websites you want to serve.", + "disabled": false, + "maxLength": null, + "minLength": null, + "name": "Subdomains", + "spec": { + "displayAs": "{{name}}", + "spec": { + "name": { + "default": null, + "description": "The subdomain of your Start9 Pages .onion address to host the website on. For example, a value of "me" would produce a website hosted at http://me.xxxxxx.onion.", + "disabled": false, + "generate": null, + "immutable": false, + "inputmode": "text", + "masked": false, + "maxLength": null, + "minLength": null, + "name": "Subdomain name", + "patterns": [ + { + "description": "May contain only lowercase characters and hyphens", + "regex": "^[a-z-]+$", + }, + ], + "placeholder": null, + "required": true, + "type": "text", + "warning": null, + }, + "settings": { + "default": "web-page", + "description": null, + "disabled": false, + "immutable": false, + "name": "Type", + "required": true, + "type": "union", + "variants": { + "redirect": { + "name": "Redirect", + "spec": { + "target": { + "default": null, + "description": "The subdomain of your Start9 Pages .onion address to redirect to. This should be the name of another subdomain on Start9 Pages. Leave empty to redirect to the homepage.", + "disabled": false, + "generate": null, + "immutable": false, + "inputmode": "text", + "masked": false, + "maxLength": null, + "minLength": null, + "name": "Target Subdomain", + "patterns": [ + { + "description": "May contain only lowercase characters and hyphens.", + "regex": "^[a-z-]+$", + }, + ], + "placeholder": null, + "required": true, + "type": "text", + "warning": null, + }, + }, + }, + "web-page": { + "name": "Web Page", + "spec": { + "folder": { + "default": null, + "description": "The path to the folder that contains the website files. For example, a value of "projects/resume" would tell Start9 Pages to look for that folder path in the selected service.", + "disabled": false, + "generate": null, + "immutable": false, + "inputmode": "text", + "masked": false, + "maxLength": null, + "minLength": null, + "name": "Folder Path", + "patterns": [ + { + "description": "Must be a valid relative file path", + "regex": "^(\\.|[a-zA-Z0-9_ -][a-zA-Z0-9_ .-]*|([a-zA-Z0-9_ .-][a-zA-Z0-9_ -]+\\.*)+)(/[a-zA-Z0-9_ -][a-zA-Z0-9_ .-]*|/([a-zA-Z0-9_ .-][a-zA-Z0-9_ -]+\\.*)+)*/?$", + }, + ], + "placeholder": "e.g. websites/resume", + "required": true, + "type": "text", + "warning": null, + }, + "source": { + "default": "nextcloud", + "description": "The service that contains your website files.", + "disabled": false, + "immutable": false, + "name": "Folder Location", + "required": false, + "type": "select", + "values": { + "filebrowser": "filebrowser", + "nextcloud": "nextcloud", + }, + "warning": null, + }, + }, + }, + }, + "warning": null, + }, + }, + "type": "object", + "uniqueBy": "name", + }, + "type": "list", + "warning": null, + }, +} +`; + +exports[`transformConfigSpec transformConfigSpec(nostr) 1`] = ` +{ + "nostr-relay": { + "default": "wss://relay.getalby.com/v1", + "description": "The Nostr Relay to use for Nostr Wallet Connect connections", + "disabled": false, + "generate": null, + "immutable": false, + "inputmode": "text", + "masked": false, + "maxLength": null, + "minLength": null, + "name": "Nostr Relay", + "patterns": [], + "placeholder": null, + "required": true, + "type": "text", + "warning": null, + }, +} +`; + +exports[`transformConfigSpec transformConfigSpec(searNXG) 1`] = ` +{ + "enable-metrics": { + "default": true, + "description": "Your SearXNG instance will collect anonymous stats about its own usage and performance. You can view these metrics by appending \`/stats\` or \`/stats/errors\` to your SearXNG URL.", + "disabled": false, + "immutable": false, + "name": "Enable Stats", + "type": "toggle", + "warning": null, + }, + "instance-name": { + "default": "My SearXNG Engine", + "description": "Enter a name for your SearXNG instance. This is the name that will be listed if you want to share your SearXNG engine publicly.", + "disabled": false, + "generate": null, + "immutable": false, + "inputmode": "text", + "masked": false, + "maxLength": null, + "minLength": null, + "name": "SearXNG Instance Name", + "patterns": [], + "placeholder": "Uncle Jim SearXNG Engine", + "required": true, + "type": "text", + "warning": null, + }, + "tor-url": { + "default": false, + "description": "Activates the utilization of a .onion address as the primary URL, particularly beneficial for publicly hosted instances over the Tor network.", + "disabled": false, + "immutable": false, + "name": "Enable Tor address as the base URL", + "type": "toggle", + "warning": null, + }, +} +`; diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/index.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/index.ts index a18539445..ffdb02988 100644 --- a/container-runtime/src/Adapters/Systems/SystemForEmbassy/index.ts +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/index.ts @@ -1,9 +1,9 @@ -import { types as T, utils, EmVer } from "@start9labs/start-sdk" +import { ExtendedVersion, types as T, utils } from "@start9labs/start-sdk" import * as fs from "fs/promises" -import { PolyfillEffects } from "./polyfillEffects" -import { Duration, duration } from "../../../Models/Duration" -import { System } from "../../../Interfaces/System" +import { polyfillEffects } from "./polyfillEffects" +import { Duration, duration, fromDuration } from "../../../Models/Duration" +import { System, Procedure } from "../../../Interfaces/System" import { matchManifest, Manifest } from "./matchManifest" import * as childProcess from "node:child_process" import { DockerProcedureContainer } from "./DockerProcedureContainer" @@ -25,11 +25,31 @@ import { anyOf, deferred, Parser, + array, } from "ts-matches" -import { HostSystemStartOs } from "../../HostSystemStartOs" import { JsonPath, unNestPath } from "../../../Models/JsonPath" import { RpcResult, matchRpcResult } from "../../RpcListener" -import { InputSpec } from "@start9labs/start-sdk/cjs/sdk/lib/config/configTypes" +import { CT } from "@start9labs/start-sdk" +import { + AddSslOptions, + BindOptions, +} from "@start9labs/start-sdk/cjs/lib/osBindings" +import { + BindOptionsByProtocol, + Host, + MultiHost, +} from "@start9labs/start-sdk/cjs/lib/interfaces/Host" +import { ServiceInterfaceBuilder } from "@start9labs/start-sdk/cjs/lib/interfaces/ServiceInterfaceBuilder" +import { Effects } from "../../../Models/Effects" +import { + OldConfigSpec, + matchOldConfigSpec, + transformConfigSpec, + transformNewConfigToOld, + transformOldConfigToNew, +} from "./transformConfigSpec" +import { MainEffects } from "@start9labs/start-sdk/cjs/lib/StartSdk" +import { StorePath } from "@start9labs/start-sdk/cjs/lib/store/PathBuilder" type Optional = A | undefined | null function todo(): never { @@ -38,8 +58,55 @@ function todo(): never { const execFile = promisify(childProcess.execFile) const MANIFEST_LOCATION = "/usr/lib/startos/package/embassyManifest.json" -const EMBASSY_JS_LOCATION = "/usr/lib/startos/package/embassy.js" -const EMBASSY_POINTER_PATH_PREFIX = "/embassyConfig" +export const EMBASSY_JS_LOCATION = "/usr/lib/startos/package/embassy.js" +const EMBASSY_POINTER_PATH_PREFIX = "/embassyConfig" as StorePath + +const matchSetResult = object( + { + "depends-on": dictionary([string, array(string)]), + dependsOn: dictionary([string, array(string)]), + signal: literals( + "SIGTERM", + "SIGHUP", + "SIGINT", + "SIGQUIT", + "SIGILL", + "SIGTRAP", + "SIGABRT", + "SIGBUS", + "SIGFPE", + "SIGKILL", + "SIGUSR1", + "SIGSEGV", + "SIGUSR2", + "SIGPIPE", + "SIGALRM", + "SIGSTKFLT", + "SIGCHLD", + "SIGCONT", + "SIGSTOP", + "SIGTSTP", + "SIGTTIN", + "SIGTTOU", + "SIGURG", + "SIGXCPU", + "SIGXFSZ", + "SIGVTALRM", + "SIGPROF", + "SIGWINCH", + "SIGIO", + "SIGPWR", + "SIGSYS", + "SIGINFO", + ), + }, + ["depends-on", "dependsOn"], +) + +type OldGetConfigRes = { + config?: null | Record + spec: OldConfigSpec +} export type PackagePropertiesV2 = { [name: string]: PackagePropertyObject | PackagePropertyString @@ -120,6 +187,7 @@ const matchProperties = object({ data: matchPackageProperties, }) +const DEFAULT_REGISTRY = "https://registry.start9.com" export class SystemForEmbassy implements System { currentRunning: MainLoop | undefined static async of(manifestLocation: string = MANIFEST_LOCATION) { @@ -142,11 +210,36 @@ export class SystemForEmbassy implements System { readonly manifest: Manifest, readonly moduleCode: Partial, ) {} + + async init(): Promise {} + + async exit(): Promise { + if (this.currentRunning) await this.currentRunning.clean() + delete this.currentRunning + } + + async start(effects: MainEffects): Promise { + if (!!this.currentRunning) return + + this.currentRunning = await MainLoop.of(this, effects) + } + callCallback(_callback: number, _args: any[]): void {} + async stop(): Promise { + const { currentRunning } = this + this.currentRunning?.clean() + delete this.currentRunning + if (currentRunning) { + await currentRunning.clean({ + timeout: fromDuration(this.manifest.main["sigterm-timeout"] || "30s"), + }) + } + } + async execute( - effects: HostSystemStartOs, + effects: Effects, options: { procedure: JsonPath - input: unknown + input?: unknown timeout?: number | undefined }, ): Promise { @@ -204,65 +297,172 @@ export class SystemForEmbassy implements System { } }) } - async exit(effects: HostSystemStartOs): Promise { - if (this.currentRunning) await this.currentRunning.clean() - delete this.currentRunning - } async _execute( - effects: HostSystemStartOs, + effects: Effects, options: { procedure: JsonPath - input: unknown + input?: unknown timeout?: number | undefined }, ): Promise { const input = options.input switch (options.procedure) { case "/backup/create": - return this.createBackup(effects) + return this.createBackup(effects, options.timeout || null) case "/backup/restore": - return this.restoreBackup(effects) + return this.restoreBackup(effects, options.timeout || null) case "/config/get": - return this.getConfig(effects) + return this.getConfig(effects, options.timeout || null) case "/config/set": - return this.setConfig(effects, input) + return this.setConfig(effects, input, options.timeout || null) case "/properties": - return this.properties(effects) + return this.properties(effects, options.timeout || null) case "/actions/metadata": return todo() case "/init": - return this.init(effects, string.optional().unsafeCast(input)) + return this.initProcedure( + effects, + string.optional().unsafeCast(input), + options.timeout || null, + ) case "/uninit": - return this.uninit(effects, string.optional().unsafeCast(input)) - case "/main/start": - return this.mainStart(effects) - case "/main/stop": - return this.mainStop(effects) + return this.uninit( + effects, + string.optional().unsafeCast(input), + options.timeout || null, + ) default: const procedures = unNestPath(options.procedure) switch (true) { case procedures[1] === "actions" && procedures[3] === "get": - return this.action(effects, procedures[2], input) + return this.action( + effects, + procedures[2], + input, + options.timeout || null, + ) case procedures[1] === "actions" && procedures[3] === "run": - return this.action(effects, procedures[2], input) + return this.action( + effects, + procedures[2], + input, + options.timeout || null, + ) case procedures[1] === "dependencies" && procedures[3] === "query": - return this.dependenciesAutoconfig(effects, procedures[2], input) + return null case procedures[1] === "dependencies" && procedures[3] === "update": - return this.dependenciesAutoconfig(effects, procedures[2], input) + return this.dependenciesAutoconfig( + effects, + procedures[2], + input, + options.timeout || null, + ) } } throw new Error(`Could not find the path for ${options.procedure}`) } - private async init( - effects: HostSystemStartOs, + async sandbox( + effects: Effects, + options: { procedure: Procedure; input: unknown; timeout?: number }, + ): Promise { + return this.execute(effects, options) + } + + private async initProcedure( + effects: Effects, previousVersion: Optional, + timeoutMs: number | null, ): Promise { - if (previousVersion) await this.migration(effects, previousVersion) + if (previousVersion) + await this.migration(effects, previousVersion, timeoutMs) await effects.setMainStatus({ status: "stopped" }) await this.exportActions(effects) + await this.exportNetwork(effects) } - async exportActions(effects: HostSystemStartOs) { + async exportNetwork(effects: Effects) { + for (const [id, interfaceValue] of Object.entries( + this.manifest.interfaces, + )) { + const host = new MultiHost({ effects, id }) + const internalPorts = new Set( + Object.values(interfaceValue["tor-config"]?.["port-mapping"] ?? {}) + .map(Number.parseInt) + .concat( + ...Object.values(interfaceValue["lan-config"] ?? {}).map( + (c) => c.internal, + ), + ) + .filter(Boolean), + ) + const bindings = Array.from(internalPorts).map< + [number, BindOptionsByProtocol] + >((port) => { + const lanPort = Object.entries(interfaceValue["lan-config"] ?? {}).find( + ([external, internal]) => internal.internal === port, + )?.[0] + const torPort = Object.entries( + interfaceValue["tor-config"]?.["port-mapping"] ?? {}, + ).find( + ([external, internal]) => Number.parseInt(internal) === port, + )?.[0] + let addSsl: AddSslOptions | null = null + if (lanPort) { + const lanPortNum = Number.parseInt(lanPort) + if (lanPortNum === 443) { + return [port, { protocol: "http", preferredExternalPort: 80 }] + } + addSsl = { + preferredExternalPort: lanPortNum, + alpn: { specified: [] }, + } + } + return [ + port, + { + secure: null, + preferredExternalPort: Number.parseInt( + torPort || lanPort || String(port), + ), + addSsl, + }, + ] + }) + + await Promise.all( + bindings.map(async ([internal, options]) => { + if (internal == null) { + return + } + if (options?.preferredExternalPort == null) { + return + } + const origin = await host.bindPort(internal, options) + await origin.export([ + new ServiceInterfaceBuilder({ + effects, + name: interfaceValue.name, + id: `${id}-${internal}`, + description: interfaceValue.description, + hasPrimary: false, + disabled: false, + type: + interfaceValue.ui && + (origin.scheme === "http" || origin.sslScheme === "https") + ? "ui" + : "api", + masked: false, + path: "", + schemeOverride: null, + search: {}, + username: null, + }), + ]) + }), + ) + } + } + async exportActions(effects: Effects) { const manifest = this.manifest if (!manifest.actions) return for (const [actionId, action] of Object.entries(manifest.actions)) { @@ -282,7 +482,7 @@ export class SystemForEmbassy implements System { name: action.name, description: action.description, warning: action.warning || null, - input: action["input-spec"] as InputSpec, + input: action["input-spec"] as CT.InputSpec, disabled: false, allowedStatuses, group: null, @@ -291,89 +491,94 @@ export class SystemForEmbassy implements System { } } private async uninit( - effects: HostSystemStartOs, + effects: Effects, nextVersion: Optional, + timeoutMs: number | null, ): Promise { // TODO Do a migration down if the version exists await effects.setMainStatus({ status: "stopped" }) } - private async mainStart(effects: HostSystemStartOs): Promise { - if (!!this.currentRunning) return - this.currentRunning = new MainLoop(this, effects) - } - private async mainStop( - effects: HostSystemStartOs, - options?: { timeout?: number }, - ): Promise { - const { currentRunning } = this - delete this.currentRunning - if (currentRunning) { - await currentRunning.clean({ - timeout: options?.timeout || this.manifest.main["sigterm-timeout"], - }) - } - return duration(this.manifest.main["sigterm-timeout"], "s") - } - private async createBackup(effects: HostSystemStartOs): Promise { + private async createBackup( + effects: Effects, + timeoutMs: number | null, + ): Promise { const backup = this.manifest.backup.create if (backup.type === "docker") { const container = await DockerProcedureContainer.of( effects, + this.manifest.id, backup, - this.manifest.volumes, + { + ...this.manifest.volumes, + BACKUP: { type: "backup", readonly: false }, + }, ) - await container.exec([backup.entrypoint, ...backup.args]) + await container.execFail([backup.entrypoint, ...backup.args], timeoutMs) } else { const moduleCode = await this.moduleCode - await moduleCode.createBackup?.( - new PolyfillEffects(effects, this.manifest), - ) + await moduleCode.createBackup?.(polyfillEffects(effects, this.manifest)) } } - private async restoreBackup(effects: HostSystemStartOs): Promise { + private async restoreBackup( + effects: Effects, + timeoutMs: number | null, + ): Promise { const restoreBackup = this.manifest.backup.restore if (restoreBackup.type === "docker") { const container = await DockerProcedureContainer.of( effects, + this.manifest.id, restoreBackup, - this.manifest.volumes, + { + ...this.manifest.volumes, + BACKUP: { type: "backup", readonly: true }, + }, + ) + await container.execFail( + [restoreBackup.entrypoint, ...restoreBackup.args], + timeoutMs, ) - await container.exec([restoreBackup.entrypoint, ...restoreBackup.args]) } else { const moduleCode = await this.moduleCode - await moduleCode.restoreBackup?.( - new PolyfillEffects(effects, this.manifest), - ) + await moduleCode.restoreBackup?.(polyfillEffects(effects, this.manifest)) } } - private async getConfig(effects: HostSystemStartOs): Promise { - return this.getConfigUncleaned(effects).then(removePointers) + private async getConfig( + effects: Effects, + timeoutMs: number | null, + ): Promise { + return this.getConfigUncleaned(effects, timeoutMs).then(convertToNewConfig) } private async getConfigUncleaned( - effects: HostSystemStartOs, - ): Promise { + effects: Effects, + timeoutMs: number | null, + ): Promise { const config = this.manifest.config?.get if (!config) return { spec: {} } if (config.type === "docker") { const container = await DockerProcedureContainer.of( effects, + this.manifest.id, config, this.manifest.volumes, ) // TODO: yaml return JSON.parse( ( - await container.exec([config.entrypoint, ...config.args]) + await container.execFail( + [config.entrypoint, ...config.args], + timeoutMs, + ) ).stdout.toString(), ) } else { const moduleCode = await this.moduleCode const method = moduleCode.getConfig if (!method) throw new Error("Expecting that the method getConfig exists") - return (await method(new PolyfillEffects(effects, this.manifest)).then( + return (await method(polyfillEffects(effects, this.manifest)).then( (x) => { - if ("result" in x) return x.result + if ("result" in x) return JSON.parse(JSON.stringify(x.result)) if ("error" in x) throw new Error("Error getting config: " + x.error) throw new Error("Error getting config: " + x["error-code"][1]) }, @@ -381,71 +586,120 @@ export class SystemForEmbassy implements System { } } private async setConfig( - effects: HostSystemStartOs, + effects: Effects, newConfigWithoutPointers: unknown, - ): Promise { - const newConfig = structuredClone(newConfigWithoutPointers) - await updateConfig( - effects, - await this.getConfigUncleaned(effects).then((x) => x.spec), - newConfig, + timeoutMs: number | null, + ): Promise { + const spec = await this.getConfigUncleaned(effects, timeoutMs).then( + (x) => x.spec, ) + const newConfig = transformNewConfigToOld( + spec, + structuredClone(newConfigWithoutPointers as Record), + ) + await updateConfig(effects, this.manifest, spec, newConfig) + await effects.store.set({ + path: EMBASSY_POINTER_PATH_PREFIX, + value: newConfig, + }) const setConfigValue = this.manifest.config?.set - if (!setConfigValue) return { signal: "SIGTERM", "depends-on": {} } + if (!setConfigValue) return if (setConfigValue.type === "docker") { const container = await DockerProcedureContainer.of( effects, + this.manifest.id, setConfigValue, this.manifest.volumes, ) - return JSON.parse( - ( - await container.exec([ - setConfigValue.entrypoint, - ...setConfigValue.args, - JSON.stringify(newConfig), - ]) - ).stdout.toString(), + const answer = matchSetResult.unsafeCast( + JSON.parse( + ( + await container.execFail( + [ + setConfigValue.entrypoint, + ...setConfigValue.args, + JSON.stringify(newConfig), + ], + timeoutMs, + ) + ).stdout.toString(), + ), ) + const dependsOn = answer["depends-on"] ?? answer.dependsOn ?? {} + await this.setConfigSetConfig(effects, dependsOn) + return } else if (setConfigValue.type === "script") { const moduleCode = await this.moduleCode const method = moduleCode.setConfig if (!method) throw new Error("Expecting that the method setConfig exists") - return await method( - new PolyfillEffects(effects, this.manifest), - newConfig as U.Config, - ).then((x): T.SetResult => { - if ("result" in x) - return { - "depends-on": x.result["depends-on"], - signal: x.result.signal === "SIGEMT" ? "SIGTERM" : x.result.signal, - } - if ("error" in x) throw new Error("Error getting config: " + x.error) - throw new Error("Error getting config: " + x["error-code"][1]) - }) - } else { - return { - "depends-on": {}, - signal: "SIGTERM", - } + + const answer = matchSetResult.unsafeCast( + await method( + polyfillEffects(effects, this.manifest), + newConfig as U.Config, + ).then((x): T.SetResult => { + if ("result" in x) + return { + dependsOn: x.result["depends-on"], + signal: + x.result.signal === "SIGEMT" ? "SIGTERM" : x.result.signal, + } + if ("error" in x) throw new Error("Error getting config: " + x.error) + throw new Error("Error getting config: " + x["error-code"][1]) + }), + ) + const dependsOn = answer["depends-on"] ?? answer.dependsOn ?? {} + await this.setConfigSetConfig(effects, dependsOn) + return } } + private async setConfigSetConfig( + effects: Effects, + dependsOn: { [x: string]: readonly string[] }, + ) { + await effects.setDependencies({ + dependencies: Object.entries(dependsOn).flatMap(([key, value]) => { + const dependency = this.manifest.dependencies?.[key] + if (!dependency) return [] + const versionRange = dependency.version + const registryUrl = DEFAULT_REGISTRY + const kind = "running" + return [ + { + id: key, + versionRange, + registryUrl, + kind, + healthChecks: [...value], + }, + ] + }), + }) + } + private async migration( - effects: HostSystemStartOs, + effects: Effects, fromVersion: string, + timeoutMs: number | null, ): Promise { - const fromEmver = EmVer.from(fromVersion) - const currentEmver = EmVer.from(this.manifest.version) + const fromEmver = ExtendedVersion.parseEmver(fromVersion) + const currentEmver = ExtendedVersion.parseEmver(this.manifest.version) if (!this.manifest.migrations) return { configured: true } const fromMigration = Object.entries(this.manifest.migrations.from) - .map(([version, procedure]) => [EmVer.from(version), procedure] as const) + .map( + ([version, procedure]) => + [ExtendedVersion.parseEmver(version), procedure] as const, + ) .find( ([versionEmver, procedure]) => versionEmver.greaterThan(fromEmver) && versionEmver.lessThanOrEqual(currentEmver), ) const toMigration = Object.entries(this.manifest.migrations.to) - .map(([version, procedure]) => [EmVer.from(version), procedure] as const) + .map( + ([version, procedure]) => + [ExtendedVersion.parseEmver(version), procedure] as const, + ) .find( ([versionEmver, procedure]) => versionEmver.greaterThan(fromEmver) && @@ -462,16 +716,20 @@ export class SystemForEmbassy implements System { if (procedure.type === "docker") { const container = await DockerProcedureContainer.of( effects, + this.manifest.id, procedure, this.manifest.volumes, ) return JSON.parse( ( - await container.exec([ - procedure.entrypoint, - ...procedure.args, - JSON.stringify(fromVersion), - ]) + await container.execFail( + [ + procedure.entrypoint, + ...procedure.args, + JSON.stringify(fromVersion), + ], + timeoutMs, + ) ).stdout.toString(), ) } else if (procedure.type === "script") { @@ -480,7 +738,7 @@ export class SystemForEmbassy implements System { if (!method) throw new Error("Expecting that the method migration exists") return (await method( - new PolyfillEffects(effects, this.manifest), + polyfillEffects(effects, this.manifest), fromVersion as string, ).then((x) => { if ("result" in x) return x.result @@ -492,24 +750,26 @@ export class SystemForEmbassy implements System { return { configured: true } } private async properties( - effects: HostSystemStartOs, - ): Promise> { + effects: Effects, + timeoutMs: number | null, + ): Promise> { // TODO BLU-J set the properties ever so often const setConfigValue = this.manifest.properties if (!setConfigValue) throw new Error("There is no properties") if (setConfigValue.type === "docker") { const container = await DockerProcedureContainer.of( effects, + this.manifest.id, setConfigValue, this.manifest.volumes, ) const properties = matchProperties.unsafeCast( JSON.parse( ( - await container.exec([ - setConfigValue.entrypoint, - ...setConfigValue.args, - ]) + await container.execFail( + [setConfigValue.entrypoint, ...setConfigValue.args], + timeoutMs, + ) ).stdout.toString(), ), ) @@ -520,7 +780,7 @@ export class SystemForEmbassy implements System { if (!method) throw new Error("Expecting that the method properties exists") const properties = matchProperties.unsafeCast( - await method(new PolyfillEffects(effects, this.manifest)).then((x) => { + await method(polyfillEffects(effects, this.manifest)).then((x) => { if ("result" in x) return x.result if ("error" in x) throw new Error("Error getting config: " + x.error) throw new Error("Error getting config: " + x["error-code"][1]) @@ -530,62 +790,31 @@ export class SystemForEmbassy implements System { } throw new Error(`Unknown type in the fetch properties: ${setConfigValue}`) } - private async health( - effects: HostSystemStartOs, - healthId: string, - timeSinceStarted: unknown, - ): Promise { - const healthProcedure = this.manifest["health-checks"][healthId] - if (!healthProcedure) return - if (healthProcedure.type === "docker") { - const container = await DockerProcedureContainer.of( - effects, - healthProcedure, - this.manifest.volumes, - ) - return JSON.parse( - ( - await container.exec([ - healthProcedure.entrypoint, - ...healthProcedure.args, - JSON.stringify(timeSinceStarted), - ]) - ).stdout.toString(), - ) - } else if (healthProcedure.type === "script") { - const moduleCode = await this.moduleCode - const method = moduleCode.health?.[healthId] - if (!method) throw new Error("Expecting that the method health exists") - await method( - new PolyfillEffects(effects, this.manifest), - Number(timeSinceStarted), - ).then((x) => { - if ("result" in x) return x.result - if ("error" in x) throw new Error("Error getting config: " + x.error) - throw new Error("Error getting config: " + x["error-code"][1]) - }) - } - } private async action( - effects: HostSystemStartOs, + effects: Effects, actionId: string, formData: unknown, + timeoutMs: number | null, ): Promise { const actionProcedure = this.manifest.actions?.[actionId]?.implementation if (!actionProcedure) return { message: "Action not found", value: null } if (actionProcedure.type === "docker") { const container = await DockerProcedureContainer.of( effects, + this.manifest.id, actionProcedure, this.manifest.volumes, ) return JSON.parse( ( - await container.exec([ - actionProcedure.entrypoint, - ...actionProcedure.args, - JSON.stringify(formData), - ]) + await container.execFail( + [ + actionProcedure.entrypoint, + ...actionProcedure.args, + JSON.stringify(formData), + ], + timeoutMs, + ) ).stdout.toString(), ) } else { @@ -593,7 +822,7 @@ export class SystemForEmbassy implements System { const method = moduleCode.action?.[actionId] if (!method) throw new Error("Expecting that the method action exists") return (await method( - new PolyfillEffects(effects, this.manifest), + polyfillEffects(effects, this.manifest), formData as any, ).then((x) => { if ("result" in x) return x.result @@ -602,61 +831,22 @@ export class SystemForEmbassy implements System { })) as any } } - private async dependenciesCheck( - effects: HostSystemStartOs, - id: string, - oldConfig: unknown, - ): Promise { - const actionProcedure = this.manifest.dependencies?.[id]?.config?.check - if (!actionProcedure) return { message: "Action not found", value: null } - if (actionProcedure.type === "docker") { - const container = await DockerProcedureContainer.of( - effects, - actionProcedure, - this.manifest.volumes, - ) - return JSON.parse( - ( - await container.exec([ - actionProcedure.entrypoint, - ...actionProcedure.args, - JSON.stringify(oldConfig), - ]) - ).stdout.toString(), - ) - } else if (actionProcedure.type === "script") { - const moduleCode = await this.moduleCode - const method = moduleCode.dependencies?.[id]?.check - if (!method) - throw new Error( - `Expecting that the method dependency check ${id} exists`, - ) - return (await method( - new PolyfillEffects(effects, this.manifest), - oldConfig as any, - ).then((x) => { - if ("result" in x) return x.result - if ("error" in x) throw new Error("Error getting config: " + x.error) - throw new Error("Error getting config: " + x["error-code"][1]) - })) as any - } else { - return {} - } - } private async dependenciesAutoconfig( - effects: HostSystemStartOs, + effects: Effects, id: string, - oldConfig: unknown, + input: unknown, + timeoutMs: number | null, ): Promise { + const oldConfig = object({ remoteConfig: any }).unsafeCast( + input, + ).remoteConfig + // TODO: docker const moduleCode = await this.moduleCode const method = moduleCode.dependencies?.[id]?.autoConfigure - if (!method) - throw new Error( - `Expecting that the method dependency autoConfigure ${id} exists`, - ) + if (!method) return return (await method( - new PolyfillEffects(effects, this.manifest), - oldConfig as any, + polyfillEffects(effects, this.manifest), + oldConfig, ).then((x) => { if ("result" in x) return x.result if ("error" in x) throw new Error("Error getting config: " + x.error) @@ -664,14 +854,6 @@ export class SystemForEmbassy implements System { })) as any } } -async function removePointers(value: T.ConfigRes): Promise { - const startingSpec = structuredClone(value.spec) - const config = - value.config && cleanConfigFromPointers(value.config, startingSpec) - const spec = cleanSpecOfPointers(startingSpec) - - return { config, spec } -} const matchPointer = object({ type: literal("pointer"), @@ -731,76 +913,122 @@ type CleanConfigFromPointers = } : null -function cleanConfigFromPointers( - config: C, - spec: S, -): CleanConfigFromPointers { - const newConfig = {} as CleanConfigFromPointers - - if (!(object.test(config) && object.test(spec)) || newConfig == null) - return null as CleanConfigFromPointers - - for (const key of Object.keys(spec)) { - if (!isKeyOf(key, spec)) continue - if (!isKeyOf(key, config)) continue - const partSpec = spec[key] - if (matchPointer.test(partSpec)) continue - ;(newConfig as any)[key] = matchSpec.test(partSpec) - ? cleanConfigFromPointers(config[key], partSpec.spec) - : config[key] - } - return newConfig as CleanConfigFromPointers -} - async function updateConfig( - effects: HostSystemStartOs, - spec: unknown, - mutConfigValue: unknown, + effects: Effects, + manifest: Manifest, + spec: OldConfigSpec, + mutConfigValue: Record, ) { - if (!dictionary([string, unknown]).test(spec)) return - if (!dictionary([string, unknown]).test(mutConfigValue)) return for (const key in spec) { const specValue = spec[key] - const newConfigValue = mutConfigValue[key] - if (matchSpec.test(specValue)) { - const updateObject = { spec: null } - await updateConfig(effects, { spec: specValue.spec }, updateObject) - mutConfigValue[key] = updateObject.spec - } - if ( - matchVariants.test(specValue) && - object({ tag: object({ id: string }) }).test(newConfigValue) && - newConfigValue.tag.id in specValue.variants + if (specValue.type === "object") { + await updateConfig( + effects, + manifest, + specValue.spec as OldConfigSpec, + mutConfigValue[key] as Record, + ) + } else if (specValue.type === "list" && specValue.subtype === "object") { + const list = mutConfigValue[key] as unknown[] + for (let val of list) { + await updateConfig( + effects, + manifest, + { ...(specValue.spec as any), type: "object" as const }, + val as Record, + ) + } + } else if (specValue.type === "union") { + const union = mutConfigValue[key] as Record + await updateConfig( + effects, + manifest, + specValue.variants[union[specValue.tag.id] as string] as OldConfigSpec, + mutConfigValue[key] as Record, + ) + } else if ( + specValue.type === "pointer" && + specValue.subtype === "package" ) { - // Not going to do anything on the variants... - } - if (!matchPointer.test(specValue)) continue - if (matchPointerConfig.test(specValue)) { - const configValue = (await effects.store.get({ - packageId: specValue["package-id"], - callback() {}, - path: `${EMBASSY_POINTER_PATH_PREFIX}${specValue.selector}` as any, - })) as any - mutConfigValue[key] = configValue - } - if (matchPointerPackage.test(specValue)) { - if (specValue.target === "tor-key") - throw new Error("This service uses an unsupported target TorKey") - const filled = await utils - .getServiceInterface(effects, { + if (specValue.target === "config") { + const jp = require("jsonpath") + const remoteConfig = await effects.store.get({ packageId: specValue["package-id"], - id: specValue.interface, + callback: () => effects.restart(), + path: EMBASSY_POINTER_PATH_PREFIX, }) - .once() - .catch(() => null) - - mutConfigValue[key] = - filled === null - ? "" - : specValue.target === "lan-address" - ? filled.addressInfo.localHostnames[0] - : filled.addressInfo.onionHostnames[0] + console.debug(remoteConfig) + const configValue = specValue.multi + ? jp.query(remoteConfig, specValue.selector) + : jp.query(remoteConfig, specValue.selector, 1)[0] + mutConfigValue[key] = configValue === undefined ? null : configValue + } else if (specValue.target === "tor-key") { + throw new Error("This service uses an unsupported target TorKey") + } else { + const specInterface = specValue.interface + const serviceInterfaceId = extractServiceInterfaceId( + manifest, + specInterface, + ) + if (!serviceInterfaceId) { + mutConfigValue[key] = "" + return + } + const filled = await utils + .getServiceInterface(effects, { + packageId: specValue["package-id"], + id: serviceInterfaceId, + }) + .once() + .catch((x) => { + console.error("Could not get the service interface", x) + return null + }) + const catchFn = (fn: () => X) => { + try { + return fn() + } catch (e) { + return undefined + } + } + const url: string = + filled === null || filled.addressInfo === null + ? "" + : catchFn(() => + utils.hostnameInfoToAddress( + specValue.target === "lan-address" + ? filled.addressInfo!.localHostnames[0] || + filled.addressInfo!.onionHostnames[0] + : filled.addressInfo!.onionHostnames[0] || + filled.addressInfo!.localHostnames[0], + ), + ) || "" + mutConfigValue[key] = url + } } } } +function extractServiceInterfaceId(manifest: Manifest, specInterface: string) { + const internalPort = + Object.entries( + manifest.interfaces[specInterface]?.["lan-config"] || {}, + )[0]?.[1]?.internal || + Object.entries( + manifest.interfaces[specInterface]?.["tor-config"]?.["port-mapping"] || + {}, + )?.[0]?.[1] + + if (!internalPort) return null + const serviceInterfaceId = `${specInterface}-${internalPort}` + return serviceInterfaceId +} +async function convertToNewConfig( + value: OldGetConfigRes, +): Promise { + const valueSpec: OldConfigSpec = matchOldConfigSpec.unsafeCast(value.spec) + const spec = transformConfigSpec(valueSpec) + if (!value.config) return { spec, config: null } + const config = transformOldConfigToNew(valueSpec, value.config) + return { spec, config } +} diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchManifest.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchManifest.ts index 9b70f884b..8ce6cabbc 100644 --- a/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchManifest.ts +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchManifest.ts @@ -57,6 +57,7 @@ export const matchManifest = object( matchProcedure, object({ name: string, + ["success-message"]: string, }), ), ]), @@ -68,13 +69,25 @@ export const matchManifest = object( volumes: dictionary([string, matchVolume]), interfaces: dictionary([ string, - object({ - name: string, - "tor-config": object({}), - "lan-config": object({}), - ui: boolean, - protocols: array(string), - }), + object( + { + name: string, + description: string, + "tor-config": object({ + "port-mapping": dictionary([string, string]), + }), + "lan-config": dictionary([ + string, + object({ + ssl: boolean, + internal: number, + }), + ]), + ui: boolean, + protocols: array(string), + }, + ["lan-config", "tor-config"], + ), ]), backup: object({ create: matchProcedure, diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/oldEmbassyTypes.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/oldEmbassyTypes.ts index 072a1171c..73d130c9a 100644 --- a/container-runtime/src/Adapters/Systems/SystemForEmbassy/oldEmbassyTypes.ts +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/oldEmbassyTypes.ts @@ -99,17 +99,8 @@ export type Effects = { /** Sandbox mode lets us read but not write */ is_sandboxed(): boolean + // Does a volume and path exist? exists(input: { volumeId: string; path: string }): Promise - bindLocal(options: { - internalPort: number - name: string - externalPort: number - }): Promise - bindTor(options: { - internalPort: number - name: string - externalPort: number - }): Promise fetch( url: string, @@ -129,6 +120,10 @@ export type Effects = { /// Returns the body as a json json(): Promise }> + diskUsage(options?: { + volumeId: string + path: string + }): Promise<{ used: number; total: number }> runRsync(options: { srcVolume: string diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/polyfillEffects.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/polyfillEffects.ts index b21a8eede..6481a7a56 100644 --- a/container-runtime/src/Adapters/Systems/SystemForEmbassy/polyfillEffects.ts +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/polyfillEffects.ts @@ -3,213 +3,434 @@ import * as oet from "./oldEmbassyTypes" import { Volume } from "../../../Models/Volume" import * as child_process from "child_process" import { promisify } from "util" -import { startSdk } from "@start9labs/start-sdk" -import { HostSystemStartOs } from "../../HostSystemStartOs" +import { daemons, startSdk, T } from "@start9labs/start-sdk" import "isomorphic-fetch" import { Manifest } from "./matchManifest" - -const execFile = promisify(child_process.execFile) - -export class PolyfillEffects implements oet.Effects { - constructor( - readonly effects: HostSystemStartOs, - private manifest: Manifest, - ) {} - async writeFile(input: { - path: string - volumeId: string - toWrite: string - }): Promise { - await fs.writeFile( - new Volume(input.volumeId, input.path).path, - input.toWrite, - ) - } - async readFile(input: { volumeId: string; path: string }): Promise { - return ( - await fs.readFile(new Volume(input.volumeId, input.path).path) - ).toString() - } - async metadata(input: { - volumeId: string - path: string - }): Promise { - const stats = await fs.stat(new Volume(input.volumeId, input.path).path) - return { - fileType: stats.isFile() ? "file" : "directory", - gid: stats.gid, - uid: stats.uid, - mode: stats.mode, - isDir: stats.isDirectory(), - isFile: stats.isFile(), - isSymlink: stats.isSymbolicLink(), - len: stats.size, - readonly: (stats.mode & 0o200) > 0, - } - } - async createDir(input: { volumeId: string; path: string }): Promise { - const path = new Volume(input.volumeId, input.path).path - await fs.mkdir(path, { recursive: true }) - return path - } - async readDir(input: { volumeId: string; path: string }): Promise { - return fs.readdir(new Volume(input.volumeId, input.path).path) - } - async removeDir(input: { volumeId: string; path: string }): Promise { - const path = new Volume(input.volumeId, input.path).path - await fs.rmdir(new Volume(input.volumeId, input.path).path, { - recursive: true, - }) - return path - } - removeFile(input: { volumeId: string; path: string }): Promise { - return fs.rm(new Volume(input.volumeId, input.path).path) - } - async writeJsonFile(input: { - volumeId: string - path: string - toWrite: Record - }): Promise { - await fs.writeFile( - new Volume(input.volumeId, input.path).path, - JSON.stringify(input.toWrite), - ) - } - async readJsonFile(input: { - volumeId: string - path: string - }): Promise> { - return JSON.parse( - ( - await fs.readFile(new Volume(input.volumeId, input.path).path) - ).toString(), - ) - } - runCommand({ - command, - args, - timeoutMillis, - }: { - command: string - args?: string[] | undefined - timeoutMillis?: number | undefined - }): Promise> { - return startSdk - .runCommand( - this.effects, - this.manifest.main.image, - [command, ...(args || [])], - {}, +import { DockerProcedureContainer } from "./DockerProcedureContainer" +import * as cp from "child_process" +import { Effects } from "../../../Models/Effects" +export const execFile = promisify(cp.execFile) +export const polyfillEffects = ( + effects: Effects, + manifest: Manifest, +): oet.Effects => { + const self = { + effects, + manifest, + async writeFile(input: { + path: string + volumeId: string + toWrite: string + }): Promise { + await fs.writeFile( + new Volume(input.volumeId, input.path).path, + input.toWrite, ) - .then((x: any) => ({ - stderr: x.stderr.toString(), - stdout: x.stdout.toString(), - })) - .then((x) => (!!x.stderr ? { error: x.stderr } : { result: x.stdout })) - } - runDaemon(input: { command: string; args?: string[] | undefined }): { - wait(): Promise> - term(): Promise - } { - throw new Error("Method not implemented.") - } - chown(input: { volumeId: string; path: string; uid: string }): Promise { - throw new Error("Method not implemented.") - } - chmod(input: { - volumeId: string - path: string - mode: string - }): Promise { - throw new Error("Method not implemented.") - } - sleep(timeMs: number): Promise { - return new Promise((resolve) => setTimeout(resolve, timeMs)) - } - trace(whatToPrint: string): void { - console.trace(whatToPrint) - } - warn(whatToPrint: string): void { - console.warn(whatToPrint) - } - error(whatToPrint: string): void { - console.error(whatToPrint) - } - debug(whatToPrint: string): void { - console.debug(whatToPrint) - } - info(whatToPrint: string): void { - console.log(false) - } - is_sandboxed(): boolean { - return false - } - exists(input: { volumeId: string; path: string }): Promise { - return this.metadata(input) - .then(() => true) - .catch(() => false) - } - bindLocal(options: { - internalPort: number - name: string - externalPort: number - }): Promise { - throw new Error("Method not implemented.") - } - bindTor(options: { - internalPort: number - name: string - externalPort: number - }): Promise { - throw new Error("Method not implemented.") - } - async fetch( - url: string, - options?: - | { - method?: - | "GET" - | "POST" - | "PUT" - | "DELETE" - | "HEAD" - | "PATCH" - | undefined - headers?: Record | undefined - body?: string | undefined + }, + async readFile(input: { volumeId: string; path: string }): Promise { + return ( + await fs.readFile(new Volume(input.volumeId, input.path).path) + ).toString() + }, + async metadata(input: { + volumeId: string + path: string + }): Promise { + const stats = await fs.stat(new Volume(input.volumeId, input.path).path) + return { + fileType: stats.isFile() ? "file" : "directory", + gid: stats.gid, + uid: stats.uid, + mode: stats.mode, + isDir: stats.isDirectory(), + isFile: stats.isFile(), + isSymlink: stats.isSymbolicLink(), + len: stats.size, + readonly: (stats.mode & 0o200) > 0, + } + }, + async createDir(input: { + volumeId: string + path: string + }): Promise { + const path = new Volume(input.volumeId, input.path).path + await fs.mkdir(path, { recursive: true }) + return path + }, + async readDir(input: { + volumeId: string + path: string + }): Promise { + return fs.readdir(new Volume(input.volumeId, input.path).path) + }, + async removeDir(input: { + volumeId: string + path: string + }): Promise { + const path = new Volume(input.volumeId, input.path).path + await fs.rmdir(new Volume(input.volumeId, input.path).path, { + recursive: true, + }) + return path + }, + removeFile(input: { volumeId: string; path: string }): Promise { + return fs.rm(new Volume(input.volumeId, input.path).path) + }, + async writeJsonFile(input: { + volumeId: string + path: string + toWrite: Record + }): Promise { + await fs.writeFile( + new Volume(input.volumeId, input.path).path, + JSON.stringify(input.toWrite), + ) + }, + async readJsonFile(input: { + volumeId: string + path: string + }): Promise> { + return JSON.parse( + ( + await fs.readFile(new Volume(input.volumeId, input.path).path) + ).toString(), + ) + }, + runCommand({ + command, + args, + timeoutMillis, + }: { + command: string + args?: string[] | undefined + timeoutMillis?: number | undefined + }): Promise> { + return startSdk + .runCommand( + effects, + { id: manifest.main.image }, + [command, ...(args || [])], + {}, + ) + .then((x: any) => ({ + stderr: x.stderr.toString(), + stdout: x.stdout.toString(), + })) + .then((x: any) => + !!x.stderr ? { error: x.stderr } : { result: x.stdout }, + ) + }, + runDaemon(input: { command: string; args?: string[] | undefined }): { + wait(): Promise> + term(): Promise + } { + const dockerProcedureContainer = DockerProcedureContainer.of( + effects, + manifest.id, + manifest.main, + manifest.volumes, + ) + const daemon = dockerProcedureContainer.then((dockerProcedureContainer) => + daemons.runCommand()( + effects, + { id: manifest.main.image }, + [input.command, ...(input.args || [])], + { + overlay: dockerProcedureContainer.overlay, + }, + ), + ) + return { + wait: () => + daemon.then((daemon) => + daemon.wait().then(() => { + return { result: "" } + }), + ), + term: () => daemon.then((daemon) => daemon.term()), + } + }, + async chown(input: { + volumeId: string + path: string + uid: string + }): Promise { + await startSdk + .runCommand( + effects, + { id: manifest.main.image }, + ["chown", "--recursive", input.uid, `/drive/${input.path}`], + { + mounts: [ + { + path: "/drive", + options: { + type: "volume", + id: input.volumeId, + subpath: null, + readonly: false, + }, + }, + ], + }, + ) + .then((x: any) => ({ + stderr: x.stderr.toString(), + stdout: x.stdout.toString(), + })) + .then((x: any) => { + if (!!x.stderr) { + throw new Error(x.stderr) + } + }) + return null + }, + async chmod(input: { + volumeId: string + path: string + mode: string + }): Promise { + await startSdk + .runCommand( + effects, + { id: manifest.main.image }, + ["chmod", "--recursive", input.mode, `/drive/${input.path}`], + { + mounts: [ + { + path: "/drive", + options: { + type: "volume", + id: input.volumeId, + subpath: null, + readonly: false, + }, + }, + ], + }, + ) + .then((x: any) => ({ + stderr: x.stderr.toString(), + stdout: x.stdout.toString(), + })) + .then((x: any) => { + if (!!x.stderr) { + throw new Error(x.stderr) + } + }) + return null + }, + sleep(timeMs: number): Promise { + return new Promise((resolve) => setTimeout(resolve, timeMs)) + }, + trace(whatToPrint: string): void { + console.trace(whatToPrint) + }, + warn(whatToPrint: string): void { + console.warn(whatToPrint) + }, + error(whatToPrint: string): void { + console.error(whatToPrint) + }, + debug(whatToPrint: string): void { + console.debug(whatToPrint) + }, + info(whatToPrint: string): void { + console.log(false) + }, + is_sandboxed(): boolean { + return false + }, + exists(input: { volumeId: string; path: string }): Promise { + return self + .metadata(input) + .then(() => true) + .catch(() => false) + }, + async fetch( + url: string, + options?: + | { + method?: + | "GET" + | "POST" + | "PUT" + | "DELETE" + | "HEAD" + | "PATCH" + | undefined + headers?: Record | undefined + body?: string | undefined + } + | undefined, + ): Promise<{ + method: string + ok: boolean + status: number + headers: Record + body?: string | null | undefined + text(): Promise + json(): Promise + }> { + const fetched = await fetch(url, options) + return { + method: fetched.type, + ok: fetched.ok, + status: fetched.status, + headers: Object.fromEntries(fetched.headers.entries()), + body: await fetched.text(), + text: () => fetched.text(), + json: () => fetched.json(), + } + }, + + runRsync(rsyncOptions: { + srcVolume: string + dstVolume: string + srcPath: string + dstPath: string + options: oet.BackupOptions + }): { + id: () => Promise + wait: () => Promise + progress: () => Promise + } { + let secondRun: ReturnType | undefined + let firstRun = self._runRsync(rsyncOptions) + let waitValue = firstRun.wait().then((x) => { + secondRun = self._runRsync(rsyncOptions) + return secondRun.wait() + }) + const id = async () => { + return secondRun?.id?.() ?? firstRun.id() + } + const wait = () => waitValue + const progress = async () => { + const secondProgress = secondRun?.progress?.() + if (secondProgress) { + return (await secondProgress) / 2.0 + 0.5 } - | undefined, - ): Promise<{ - method: string - ok: boolean - status: number - headers: Record - body?: string | null | undefined - text(): Promise - json(): Promise - }> { - const fetched = await fetch(url, options) - return { - method: fetched.type, - ok: fetched.ok, - status: fetched.status, - headers: Object.fromEntries(fetched.headers.entries()), - body: await fetched.text(), - text: () => fetched.text(), - json: () => fetched.json(), - } - } - runRsync(options: { - srcVolume: string - dstVolume: string - srcPath: string - dstPath: string - options: oet.BackupOptions - }): { - id: () => Promise - wait: () => Promise - progress: () => Promise - } { - throw new Error("Method not implemented.") + return (await firstRun.progress()) / 2.0 + } + return { id, wait, progress } + }, + _runRsync(rsyncOptions: { + srcVolume: string + dstVolume: string + srcPath: string + dstPath: string + options: oet.BackupOptions + }): { + id: () => Promise + wait: () => Promise + progress: () => Promise + } { + const { srcVolume, dstVolume, srcPath, dstPath, options } = rsyncOptions + const command = "rsync" + const args: string[] = [] + if (options.delete) { + args.push("--delete") + } + if (options.force) { + args.push("--force") + } + if (options.ignoreExisting) { + args.push("--ignore-existing") + } + for (const exclude of options.exclude) { + args.push(`--exclude=${exclude}`) + } + args.push("-actAXH") + args.push("--info=progress2") + args.push("--no-inc-recursive") + args.push(new Volume(srcVolume, srcPath).path) + args.push(new Volume(dstVolume, dstPath).path) + const spawned = child_process.spawn(command, args, { detached: true }) + let percentage = 0.0 + spawned.stdout.on("data", (data: unknown) => { + const lines = String(data).replace("\r", "\n").split("\n") + for (const line of lines) { + const parsed = /$([0-9.]+)%/.exec(line)?.[1] + if (!parsed) continue + percentage = Number.parseFloat(parsed) + } + }) + + spawned.stderr.on("data", (data: unknown) => { + console.error(String(data)) + }) + + const id = async () => { + const pid = spawned.pid + if (pid === undefined) { + throw new Error("rsync process has no pid") + } + return String(pid) + } + const waitPromise = new Promise((resolve, reject) => { + spawned.on("exit", (code: any) => { + if (code === 0) { + resolve(null) + } else { + reject(new Error(`rsync exited with code ${code}`)) + } + }) + }) + const wait = () => waitPromise + const progress = () => Promise.resolve(percentage) + return { id, wait, progress } + }, + async diskUsage( + options?: { volumeId: string; path: string } | undefined, + ): Promise<{ used: number; total: number }> { + const output = await execFile("df", ["--block-size=1", "-P", "/"]) + .then((x: any) => ({ + stderr: x.stderr.toString(), + stdout: x.stdout.toString(), + })) + .then((x: any) => { + if (!!x.stderr) { + throw new Error(x.stderr) + } + return parseDfOutput(x.stdout) + }) + if (!!options) { + const used = await execFile("du", [ + "-s", + "--block-size=1", + "-P", + new Volume(options.volumeId, options.path).path, + ]) + .then((x: any) => ({ + stderr: x.stderr.toString(), + stdout: x.stdout.toString(), + })) + .then((x: any) => { + if (!!x.stderr) { + throw new Error(x.stderr) + } + return Number.parseInt(x.stdout.split(/\s+/)[0]) + }) + return { + ...output, + used, + } + } + return output + }, } + return self +} + +function parseDfOutput(output: string): { used: number; total: number } { + const lines = output + .split("\n") + .filter((x) => x.length) + .map((x) => x.split(/\s+/)) + const index = lines.splice(0, 1)[0].map((x) => x.toLowerCase()) + const usedIndex = index.indexOf("used") + const availableIndex = index.indexOf("available") + const used = lines.map((x) => Number.parseInt(x[usedIndex]))[0] || 0 + const total = lines.map((x) => Number.parseInt(x[availableIndex]))[0] || 0 + return { used, total } } diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/transformConfigSpec.test.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/transformConfigSpec.test.ts new file mode 100644 index 000000000..79caef377 --- /dev/null +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/transformConfigSpec.test.ts @@ -0,0 +1,33 @@ +import { matchOldConfigSpec, transformConfigSpec } from "./transformConfigSpec" +import fixtureEmbasyPagesConfig from "./__fixtures__/embasyPagesConfig" +import searNXG from "./__fixtures__/searNXG" +import bitcoind from "./__fixtures__/bitcoind" +import nostr from "./__fixtures__/nostr" + +describe("transformConfigSpec", () => { + test("matchOldConfigSpec(embassyPages.homepage.variants[web-page])", () => { + matchOldConfigSpec.unsafeCast( + fixtureEmbasyPagesConfig.homepage.variants["web-page"], + ) + }) + test("matchOldConfigSpec(embassyPages)", () => { + matchOldConfigSpec.unsafeCast(fixtureEmbasyPagesConfig) + }) + test("transformConfigSpec(embassyPages)", () => { + const spec = matchOldConfigSpec.unsafeCast(fixtureEmbasyPagesConfig) + expect(transformConfigSpec(spec)).toMatchSnapshot() + }) + + test("transformConfigSpec(searNXG)", () => { + const spec = matchOldConfigSpec.unsafeCast(searNXG) + expect(transformConfigSpec(spec)).toMatchSnapshot() + }) + test("transformConfigSpec(bitcoind)", () => { + const spec = matchOldConfigSpec.unsafeCast(bitcoind) + expect(transformConfigSpec(spec)).toMatchSnapshot() + }) + test("transformConfigSpec(nostr)", () => { + const spec = matchOldConfigSpec.unsafeCast(nostr) + expect(transformConfigSpec(spec)).toMatchSnapshot() + }) +}) diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/transformConfigSpec.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/transformConfigSpec.ts new file mode 100644 index 000000000..706e0b941 --- /dev/null +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/transformConfigSpec.ts @@ -0,0 +1,589 @@ +import { CT } from "@start9labs/start-sdk" +import { + dictionary, + object, + anyOf, + string, + literals, + array, + number, + boolean, + Parser, + deferred, + every, + nill, + literal, +} from "ts-matches" + +export function transformConfigSpec(oldSpec: OldConfigSpec): CT.InputSpec { + return Object.entries(oldSpec).reduce((inputSpec, [key, oldVal]) => { + let newVal: CT.ValueSpec + + if (oldVal.type === "boolean") { + newVal = { + type: "toggle", + name: oldVal.name, + default: oldVal.default, + description: oldVal.description || null, + warning: oldVal.warning || null, + disabled: false, + immutable: false, + } + } else if (oldVal.type === "enum") { + newVal = { + type: "select", + name: oldVal.name, + description: oldVal.description || null, + warning: oldVal.warning || null, + default: oldVal.default, + values: oldVal.values.reduce( + (obj, curr) => ({ + ...obj, + [curr]: oldVal["value-names"][curr] || curr, + }), + {}, + ), + required: false, + disabled: false, + immutable: false, + } + } else if (oldVal.type === "list") { + newVal = getListSpec(oldVal) + } else if (oldVal.type === "number") { + const range = Range.from(oldVal.range) + + newVal = { + type: "number", + name: oldVal.name, + default: oldVal.default || null, + description: oldVal.description || null, + warning: oldVal.warning || null, + disabled: false, + immutable: false, + required: !oldVal.nullable, + min: range.min + ? range.minInclusive + ? range.min + : range.min + 1 + : null, + max: range.max + ? range.maxInclusive + ? range.max + : range.max - 1 + : null, + integer: oldVal.integral, + step: null, + units: oldVal.units || null, + placeholder: oldVal.placeholder || null, + } + } else if (oldVal.type === "object") { + newVal = { + type: "object", + name: oldVal.name, + description: oldVal.description || null, + warning: oldVal.warning || null, + spec: transformConfigSpec(matchOldConfigSpec.unsafeCast(oldVal.spec)), + } + } else if (oldVal.type === "string") { + newVal = { + type: "text", + name: oldVal.name, + default: oldVal.default || null, + description: oldVal.description || null, + warning: oldVal.warning || null, + disabled: false, + immutable: false, + required: !oldVal.nullable, + patterns: + oldVal.pattern && oldVal["pattern-description"] + ? [ + { + regex: oldVal.pattern, + description: oldVal["pattern-description"], + }, + ] + : [], + minLength: null, + maxLength: null, + masked: oldVal.masked || false, + generate: null, + inputmode: "text", + placeholder: oldVal.placeholder || null, + } + } else if (oldVal.type === "union") { + newVal = { + type: "union", + name: oldVal.tag.name, + description: oldVal.tag.description || null, + warning: oldVal.tag.warning || null, + variants: Object.entries(oldVal.variants).reduce( + (obj, [id, spec]) => ({ + ...obj, + [id]: { + name: oldVal.tag["variant-names"][id] || id, + spec: transformConfigSpec(matchOldConfigSpec.unsafeCast(spec)), + }, + }), + {} as Record, + ), + disabled: false, + required: true, + default: oldVal.default, + immutable: false, + } + } else if (oldVal.type === "pointer") { + return inputSpec + } else { + throw new Error(`unknown spec ${JSON.stringify(oldVal)}`) + } + + return { + ...inputSpec, + [key]: newVal, + } + }, {} as CT.InputSpec) +} + +export function transformOldConfigToNew( + spec: OldConfigSpec, + config: Record, +): Record { + return Object.entries(spec).reduce((obj, [key, val]) => { + let newVal = config[key] + + if (isObject(val)) { + newVal = transformOldConfigToNew( + matchOldConfigSpec.unsafeCast(val.spec), + config[key], + ) + } + + if (isUnion(val)) { + const selection = config[key][val.tag.id] + delete config[key][val.tag.id] + + newVal = { + selection, + value: transformOldConfigToNew( + matchOldConfigSpec.unsafeCast(val.variants[selection]), + config[key], + ), + } + } + + if (isList(val) && isObjectList(val)) { + newVal = (config[key] as object[]).map((obj) => + transformOldConfigToNew( + matchOldConfigSpec.unsafeCast(val.spec.spec), + obj, + ), + ) + } + + if (isPointer(val)) { + return obj + } + + return { + ...obj, + [key]: newVal, + } + }, {}) +} + +export function transformNewConfigToOld( + spec: OldConfigSpec, + config: Record, +): Record { + return Object.entries(spec).reduce((obj, [key, val]) => { + let newVal = config[key] + + if (isObject(val)) { + newVal = transformNewConfigToOld( + matchOldConfigSpec.unsafeCast(val.spec), + config[key], + ) + } + + if (isUnion(val)) { + newVal = { + [val.tag.id]: config[key].selection, + ...transformNewConfigToOld( + matchOldConfigSpec.unsafeCast(val.variants[config[key].selection]), + config[key].value, + ), + } + } + + if (isList(val) && isObjectList(val)) { + newVal = (config[key] as object[]).map((obj) => + transformNewConfigToOld( + matchOldConfigSpec.unsafeCast(val.spec.spec), + obj, + ), + ) + } + + return { + ...obj, + [key]: newVal, + } + }, {}) +} + +function getListSpec( + oldVal: OldValueSpecList, +): CT.ValueSpecMultiselect | CT.ValueSpecList { + const range = Range.from(oldVal.range) + + let partial: Omit = { + name: oldVal.name, + description: oldVal.description || null, + warning: oldVal.warning || null, + minLength: range.min + ? range.minInclusive + ? range.min + : range.min + 1 + : null, + maxLength: range.max + ? range.maxInclusive + ? range.max + : range.max - 1 + : null, + disabled: false, + } + + if (isEnumList(oldVal)) { + return { + ...partial, + type: "multiselect", + default: oldVal.default as string[], + immutable: false, + values: oldVal.spec.values.reduce( + (obj, curr) => ({ + ...obj, + [curr]: oldVal.spec["value-names"][curr], + }), + {}, + ), + } + } else if (isStringList(oldVal)) { + return { + ...partial, + type: "list", + default: oldVal.default as string[], + spec: { + type: "text", + patterns: + oldVal.spec.pattern && oldVal.spec["pattern-description"] + ? [ + { + regex: oldVal.spec.pattern, + description: oldVal.spec["pattern-description"], + }, + ] + : [], + minLength: null, + maxLength: null, + masked: oldVal.spec.masked || false, + generate: null, + inputmode: "text", + placeholder: oldVal.spec.placeholder || null, + }, + } + } else if (isObjectList(oldVal)) { + return { + ...partial, + type: "list", + default: oldVal.default as Record[], + spec: { + type: "object", + spec: transformConfigSpec( + matchOldConfigSpec.unsafeCast(oldVal.spec.spec), + ), + uniqueBy: oldVal.spec["unique-by"] || null, + displayAs: oldVal.spec["display-as"] || null, + }, + } + } else { + throw new Error("Invalid list subtype. enum, string, and object permitted.") + } +} + +function isObject(val: OldValueSpec): val is OldValueSpecObject { + return val.type === "object" +} + +function isUnion(val: OldValueSpec): val is OldValueSpecUnion { + return val.type === "union" +} + +function isList(val: OldValueSpec): val is OldValueSpecList { + return val.type === "list" +} + +function isPointer(val: OldValueSpec): val is OldValueSpecPointer { + return val.type === "pointer" +} + +function isEnumList( + val: OldValueSpecList, +): val is OldValueSpecList & { subtype: "enum" } { + return val.subtype === "enum" +} + +function isStringList( + val: OldValueSpecList, +): val is OldValueSpecList & { subtype: "string" } { + return val.subtype === "string" +} + +function isObjectList( + val: OldValueSpecList, +): val is OldValueSpecList & { subtype: "object" } { + if (["number", "union"].includes(val.subtype)) { + throw new Error("Invalid list subtype. enum, string, and object permitted.") + } + return val.subtype === "object" +} +export type OldConfigSpec = Record +const [_matchOldConfigSpec, setMatchOldConfigSpec] = deferred() +export const matchOldConfigSpec = _matchOldConfigSpec as Parser< + unknown, + OldConfigSpec +> +export const matchOldDefaultString = anyOf( + string, + object({ charset: string, len: number }), +) +type OldDefaultString = typeof matchOldDefaultString._TYPE + +export const matchOldValueSpecString = object( + { + type: literals("string"), + name: string, + masked: boolean, + copyable: boolean, + nullable: boolean, + placeholder: string, + pattern: string, + "pattern-description": string, + default: matchOldDefaultString, + textarea: boolean, + description: string, + warning: string, + }, + [ + "masked", + "copyable", + "nullable", + "placeholder", + "pattern", + "pattern-description", + "default", + "textarea", + "description", + "warning", + ], +) + +export const matchOldValueSpecNumber = object( + { + type: literals("number"), + nullable: boolean, + name: string, + range: string, + integral: boolean, + default: number, + description: string, + warning: string, + units: string, + placeholder: string, + }, + ["default", "description", "warning", "units", "placeholder"], +) +type OldValueSpecNumber = typeof matchOldValueSpecNumber._TYPE + +export const matchOldValueSpecBoolean = object( + { + type: literals("boolean"), + default: boolean, + name: string, + description: string, + warning: string, + }, + ["description", "warning"], +) +type OldValueSpecBoolean = typeof matchOldValueSpecBoolean._TYPE + +const matchOldValueSpecObject = object( + { + type: literals("object"), + spec: _matchOldConfigSpec, + name: string, + description: string, + warning: string, + }, + ["description", "warning"], +) +type OldValueSpecObject = typeof matchOldValueSpecObject._TYPE + +const matchOldValueSpecEnum = object( + { + values: array(string), + "value-names": dictionary([string, string]), + type: literals("enum"), + default: string, + name: string, + description: string, + warning: string, + }, + ["description", "warning"], +) +type OldValueSpecEnum = typeof matchOldValueSpecEnum._TYPE + +const matchOldUnionTagSpec = object( + { + id: string, // The name of the field containing one of the union variants + "variant-names": dictionary([string, string]), // The name of each variant + name: string, + description: string, + warning: string, + }, + ["description", "warning"], +) +const matchOldValueSpecUnion = object({ + type: literals("union"), + tag: matchOldUnionTagSpec, + variants: dictionary([string, _matchOldConfigSpec]), + default: string, +}) +type OldValueSpecUnion = typeof matchOldValueSpecUnion._TYPE + +const [matchOldUniqueBy, setOldUniqueBy] = deferred() +type OldUniqueBy = + | null + | string + | { any: OldUniqueBy[] } + | { all: OldUniqueBy[] } + +setOldUniqueBy( + anyOf( + nill, + string, + object({ any: array(matchOldUniqueBy) }), + object({ all: array(matchOldUniqueBy) }), + ), +) + +const matchOldListValueSpecObject = object( + { + spec: _matchOldConfigSpec, // this is a mapped type of the config object at this level, replacing the object's values with specs on those values + "unique-by": matchOldUniqueBy, // indicates whether duplicates can be permitted in the list + "display-as": string, // this should be a handlebars template which can make use of the entire config which corresponds to 'spec' + }, + ["display-as", "unique-by"], +) +const matchOldListValueSpecString = object( + { + masked: boolean, + copyable: boolean, + pattern: string, + "pattern-description": string, + placeholder: string, + }, + ["pattern", "pattern-description", "placeholder", "copyable", "masked"], +) + +const matchOldListValueSpecEnum = object({ + values: array(string), + "value-names": dictionary([string, string]), +}) + +// represents a spec for a list +const matchOldValueSpecList = every( + object( + { + type: literals("list"), + range: string, // '[0,1]' (inclusive) OR '[0,*)' (right unbounded), normal math rules + default: anyOf( + array(string), + array(number), + array(matchOldDefaultString), + array(object), + ), + name: string, + description: string, + warning: string, + }, + ["description", "warning"], + ), + anyOf( + object({ + subtype: literals("string"), + spec: matchOldListValueSpecString, + }), + object({ + subtype: literals("enum"), + spec: matchOldListValueSpecEnum, + }), + object({ + subtype: literals("object"), + spec: matchOldListValueSpecObject, + }), + ), +) +type OldValueSpecList = typeof matchOldValueSpecList._TYPE + +const matchOldValueSpecPointer = every( + object({ + type: literal("pointer"), + }), + anyOf( + object({ + subtype: literal("package"), + target: literals("tor-key", "tor-address", "lan-address"), + "package-id": string, + interface: string, + }), + object({ + subtype: literal("package"), + target: literals("config"), + "package-id": string, + selector: string, + multi: boolean, + }), + ), +) +type OldValueSpecPointer = typeof matchOldValueSpecPointer._TYPE + +export const matchOldValueSpec = anyOf( + matchOldValueSpecString, + matchOldValueSpecNumber, + matchOldValueSpecBoolean, + matchOldValueSpecObject, + matchOldValueSpecEnum, + matchOldValueSpecList, + matchOldValueSpecUnion, + matchOldValueSpecPointer, +) +type OldValueSpec = typeof matchOldValueSpec._TYPE + +setMatchOldConfigSpec(dictionary([string, matchOldValueSpec])) + +export class Range { + min?: number + max?: number + minInclusive!: boolean + maxInclusive!: boolean + + static from(s: string = "(*,*)"): Range { + const r = new Range() + r.minInclusive = s.startsWith("[") + r.maxInclusive = s.endsWith("]") + const [minStr, maxStr] = s.split(",").map((a) => a.trim()) + r.min = minStr === "(*" ? undefined : Number(minStr.slice(1)) + r.max = maxStr === "*)" ? undefined : Number(maxStr.slice(0, -1)) + return r + } +} diff --git a/container-runtime/src/Adapters/Systems/SystemForStartOs.ts b/container-runtime/src/Adapters/Systems/SystemForStartOs.ts index 7549bf0f2..029b504c0 100644 --- a/container-runtime/src/Adapters/Systems/SystemForStartOs.ts +++ b/container-runtime/src/Adapters/Systems/SystemForStartOs.ts @@ -1,152 +1,226 @@ -import { ExecuteResult, System } from "../../Interfaces/System" +import { ExecuteResult, Procedure, System } from "../../Interfaces/System" import { unNestPath } from "../../Models/JsonPath" -import { string } from "ts-matches" -import { HostSystemStartOs } from "../HostSystemStartOs" +import matches, { any, number, object, string, tuple } from "ts-matches" import { Effects } from "../../Models/Effects" -import { RpcResult } from "../RpcListener" +import { RpcResult, matchRpcResult } from "../RpcListener" import { duration } from "../../Models/Duration" -const LOCATION = "/usr/lib/startos/package/startos" +import { T } from "@start9labs/start-sdk" +import { Volume } from "../../Models/Volume" +import { MainEffects } from "@start9labs/start-sdk/cjs/lib/StartSdk" +import { CallbackHolder } from "../../Models/CallbackHolder" + +export const STARTOS_JS_LOCATION = "/usr/lib/startos/package/index.js" + +type RunningMain = { + effects: MainEffects + stop: () => Promise + callbacks: CallbackHolder +} + export class SystemForStartOs implements System { - private onTerm: (() => Promise) | undefined + private runningMain: RunningMain | undefined + static of() { - return new SystemForStartOs() + return new SystemForStartOs(require(STARTOS_JS_LOCATION)) } - constructor() {} + + constructor(readonly abi: T.ABI) {} + + async init(): Promise {} + + async exit(): Promise {} + + async start(effects: MainEffects): Promise { + if (this.runningMain) await this.stop() + let mainOnTerm: () => Promise | undefined + const started = async (onTerm: () => Promise) => { + await effects.setMainStatus({ status: "running" }) + mainOnTerm = onTerm + } + const daemons = await ( + await this.abi.main({ + effects: effects as MainEffects, + started, + }) + ).build() + this.runningMain = { + effects, + stop: async () => { + if (mainOnTerm) await mainOnTerm() + await daemons.term() + }, + callbacks: new CallbackHolder(), + } + } + + callCallback(callback: number, args: any[]): void { + if (this.runningMain) { + this.runningMain.callbacks + .callCallback(callback, args) + .catch((error) => console.error(`callback ${callback} failed`, error)) + } else { + console.warn(`callback ${callback} ignored because system is not running`) + } + } + + async stop(): Promise { + if (this.runningMain) { + await this.runningMain.stop() + await this.runningMain.effects.clearCallbacks() + this.runningMain = undefined + } + } + async execute( - effects: HostSystemStartOs, + effects: Effects, options: { - procedure: - | "/init" - | "/uninit" - | "/main/start" - | "/main/stop" - | "/config/set" - | "/config/get" - | "/backup/create" - | "/backup/restore" - | "/actions/metadata" - | `/actions/${string}/get` - | `/actions/${string}/run` - | `/dependencies/${string}/query` - | `/dependencies/${string}/update` - input: unknown + procedure: Procedure + input?: unknown timeout?: number | undefined }, ): Promise { - return { result: await this._execute(effects, options) } + return this._execute(effects, options) + .then((x) => + matches(x) + .when( + object({ + result: any, + }), + (x) => x, + ) + .when( + object({ + error: string, + }), + (x) => ({ + error: { + code: 0, + message: x.error, + }, + }), + ) + .when( + object({ + "error-code": tuple(number, string), + }), + ({ "error-code": [code, message] }) => ({ + error: { + code, + message, + }, + }), + ) + .defaultTo({ result: x }), + ) + .catch((error: unknown) => { + if (error instanceof Error) + return { + error: { + code: 0, + message: error.name, + data: { + details: error.message, + debug: `${error?.cause ?? "[noCause]"}:${error?.stack ?? "[noStack]"}`, + }, + }, + } + if (matchRpcResult.test(error)) return error + return { + error: { + code: 0, + message: String(error), + }, + } + }) } async _execute( - effects: Effects, + effects: Effects | MainEffects, options: { - procedure: - | "/init" - | "/uninit" - | "/main/start" - | "/main/stop" - | "/config/set" - | "/config/get" - | "/backup/create" - | "/backup/restore" - | "/actions/metadata" - | `/actions/${string}/get` - | `/actions/${string}/run` - | `/dependencies/${string}/query` - | `/dependencies/${string}/update` - input: unknown + procedure: Procedure + input?: unknown timeout?: number | undefined }, ): Promise { switch (options.procedure) { case "/init": { - const path = `${LOCATION}/procedures/init` - const procedure: any = await import(path).catch(() => require(path)) - const previousVersion = string.optional().unsafeCast(options) - return procedure.init({ effects, previousVersion }) + const previousVersion = + string.optional().unsafeCast(options.input) || null + return this.abi.init({ effects, previousVersion }) } case "/uninit": { - const path = `${LOCATION}/procedures/init` - const procedure: any = await import(path).catch(() => require(path)) - const nextVersion = string.optional().unsafeCast(options) - return procedure.uninit({ effects, nextVersion }) - } - case "/main/start": { - const path = `${LOCATION}/procedures/main` - const procedure: any = await import(path).catch(() => require(path)) - const started = async (onTerm: () => Promise) => { - await effects.setMainStatus({ status: "running" }) - if (this.onTerm) await this.onTerm() - this.onTerm = onTerm - } - return procedure.main({ effects, started }) - } - case "/main/stop": { - await effects.setMainStatus({ status: "stopped" }) - if (this.onTerm) await this.onTerm() - delete this.onTerm - return duration(30, "s") + const nextVersion = string.optional().unsafeCast(options.input) || null + return this.abi.uninit({ effects, nextVersion }) } + // case "/main/start": { + // + // } + // case "/main/stop": { + // if (this.onTerm) await this.onTerm() + // await effects.setMainStatus({ status: "stopped" }) + // delete this.onTerm + // return duration(30, "s") + // } case "/config/set": { - const path = `${LOCATION}/procedures/config` - const procedure: any = await import(path).catch(() => require(path)) - const input = options.input - return procedure.setConfig({ effects, input }) + const input = options.input as any // TODO + return this.abi.setConfig({ effects, input }) } case "/config/get": { - const path = `${LOCATION}/procedures/config` - const procedure: any = await import(path).catch(() => require(path)) - return procedure.getConfig({ effects }) + return this.abi.getConfig({ effects }) } case "/backup/create": + return this.abi.createBackup({ + effects, + pathMaker: ((options) => + new Volume(options.volume, options.path).path) as T.PathMaker, + }) case "/backup/restore": - throw new Error("this should be called with the init/unit") + return this.abi.restoreBackup({ + effects, + pathMaker: ((options) => + new Volume(options.volume, options.path).path) as T.PathMaker, + }) case "/actions/metadata": { - const path = `${LOCATION}/procedures/actions` - const procedure: any = await import(path).catch(() => require(path)) - return procedure.actionsMetadata({ effects }) + return this.abi.actionsMetadata({ effects }) + } + case "/properties": { + throw new Error("TODO") } default: const procedures = unNestPath(options.procedure) const id = procedures[2] switch (true) { case procedures[1] === "actions" && procedures[3] === "get": { - const path = `${LOCATION}/procedures/actions` - const action: any = (await import(path).catch(() => require(path))) - .actions[id] + const action = (await this.abi.actions({ effects }))[id] if (!action) throw new Error(`Action ${id} not found`) - return action.get({ effects }) + return action.getConfig({ effects }) } case procedures[1] === "actions" && procedures[3] === "run": { - const path = `${LOCATION}/procedures/actions` - const action: any = (await import(path).catch(() => require(path))) - .actions[id] + const action = (await this.abi.actions({ effects }))[id] if (!action) throw new Error(`Action ${id} not found`) - const input = options.input - return action.run({ effects, input }) + return action.run({ effects, input: options.input as any }) // TODO } case procedures[1] === "dependencies" && procedures[3] === "query": { - const path = `${LOCATION}/procedures/dependencies` - const dependencyConfig: any = ( - await import(path).catch(() => require(path)) - ).dependencyConfig[id] + const dependencyConfig = this.abi.dependencyConfig[id] if (!dependencyConfig) throw new Error(`dependencyConfig ${id} not found`) const localConfig = options.input - return dependencyConfig.query({ effects, localConfig }) + return dependencyConfig.query({ effects }) } case procedures[1] === "dependencies" && procedures[3] === "update": { - const path = `${LOCATION}/procedures/dependencies` - const dependencyConfig: any = ( - await import(path).catch(() => require(path)) - ).dependencyConfig[id] + const dependencyConfig = this.abi.dependencyConfig[id] if (!dependencyConfig) throw new Error(`dependencyConfig ${id} not found`) - return dependencyConfig.update(options.input) + return dependencyConfig.update(options.input as any) // TODO } } + return } - throw new Error("Method not implemented.") } - exit(effects: Effects): Promise { - throw new Error("Method not implemented.") + + async sandbox( + effects: Effects, + options: { procedure: Procedure; input?: unknown; timeout?: number }, + ): Promise { + return this.execute(effects, options) } } diff --git a/container-runtime/src/Adapters/Systems/index.ts b/container-runtime/src/Adapters/Systems/index.ts index eadc67318..a44ad533e 100644 --- a/container-runtime/src/Adapters/Systems/index.ts +++ b/container-runtime/src/Adapters/Systems/index.ts @@ -1,6 +1,22 @@ +import * as fs from "node:fs/promises" import { System } from "../../Interfaces/System" -import { SystemForEmbassy } from "./SystemForEmbassy" -import { SystemForStartOs } from "./SystemForStartOs" +import { EMBASSY_JS_LOCATION, SystemForEmbassy } from "./SystemForEmbassy" +import { STARTOS_JS_LOCATION, SystemForStartOs } from "./SystemForStartOs" export async function getSystem(): Promise { - return SystemForEmbassy.of() + if ( + await fs.access(STARTOS_JS_LOCATION).then( + () => true, + () => false, + ) + ) { + return SystemForStartOs.of() + } else if ( + await fs.access(EMBASSY_JS_LOCATION).then( + () => true, + () => false, + ) + ) { + return SystemForEmbassy.of() + } + throw new Error(`${STARTOS_JS_LOCATION} not found`) } diff --git a/container-runtime/src/Interfaces/AllGetDependencies.ts b/container-runtime/src/Interfaces/AllGetDependencies.ts index 88a200900..ca5c43585 100644 --- a/container-runtime/src/Interfaces/AllGetDependencies.ts +++ b/container-runtime/src/Interfaces/AllGetDependencies.ts @@ -1,6 +1,7 @@ import { GetDependency } from "./GetDependency" import { System } from "./System" -import { GetHostSystem, HostSystem } from "./HostSystem" +import { MakeMainEffects, MakeProcedureEffects } from "./MakeEffects" export type AllGetDependencies = GetDependency<"system", Promise> & - GetDependency<"hostSystem", GetHostSystem> + GetDependency<"makeProcedureEffects", MakeProcedureEffects> & + GetDependency<"makeMainEffects", MakeMainEffects> diff --git a/container-runtime/src/Interfaces/HostSystem.ts b/container-runtime/src/Interfaces/HostSystem.ts deleted file mode 100644 index 4e04bbcc8..000000000 --- a/container-runtime/src/Interfaces/HostSystem.ts +++ /dev/null @@ -1,7 +0,0 @@ -import { types as T } from "@start9labs/start-sdk" - -import { CallbackHolder } from "../Models/CallbackHolder" -import { Effects } from "../Models/Effects" - -export type HostSystem = Effects -export type GetHostSystem = (callbackHolder: CallbackHolder) => HostSystem diff --git a/container-runtime/src/Interfaces/MakeEffects.ts b/container-runtime/src/Interfaces/MakeEffects.ts new file mode 100644 index 000000000..3b25f8180 --- /dev/null +++ b/container-runtime/src/Interfaces/MakeEffects.ts @@ -0,0 +1,4 @@ +import { Effects } from "../Models/Effects" +import { MainEffects } from "@start9labs/start-sdk/cjs/lib/StartSdk" +export type MakeProcedureEffects = (procedureId: string) => Effects +export type MakeMainEffects = () => MainEffects diff --git a/container-runtime/src/Interfaces/System.ts b/container-runtime/src/Interfaces/System.ts index 86b2aa492..01fd3c5ff 100644 --- a/container-runtime/src/Interfaces/System.ts +++ b/container-runtime/src/Interfaces/System.ts @@ -1,32 +1,54 @@ import { types as T } from "@start9labs/start-sdk" -import { JsonPath } from "../Models/JsonPath" -import { HostSystemStartOs } from "../Adapters/HostSystemStartOs" import { RpcResult } from "../Adapters/RpcListener" +import { Effects } from "../Models/Effects" +import { CallbackHolder } from "../Models/CallbackHolder" +import { MainEffects } from "@start9labs/start-sdk/cjs/lib/StartSdk" + +export type Procedure = + | "/init" + | "/uninit" + | "/config/set" + | "/config/get" + | "/backup/create" + | "/backup/restore" + | "/actions/metadata" + | "/properties" + | `/actions/${string}/get` + | `/actions/${string}/run` + | `/dependencies/${string}/query` + | `/dependencies/${string}/update` + export type ExecuteResult = | { ok: unknown } | { err: { code: number; message: string } } -export interface System { - // init(effects: Effects): Promise - // exit(effects: Effects): Promise - // start(effects: Effects): Promise - // stop(effects: Effects, options: { timeout: number, signal?: number }): Promise +export type System = { + init(): Promise + + start(effects: MainEffects): Promise + callCallback(callback: number, args: any[]): void + stop(): Promise execute( - effects: T.Effects, + effects: Effects, options: { - procedure: JsonPath + procedure: Procedure + input: unknown + timeout?: number + }, + ): Promise + sandbox( + effects: Effects, + options: { + procedure: Procedure input: unknown timeout?: number }, ): Promise - // sandbox( - // effects: Effects, - // options: { - // procedure: JsonPath - // input: unknown - // timeout?: number - // }, - // ): Promise - exit(effects: T.Effects): Promise + exit(): Promise +} + +export type RunningMain = { + callbacks: CallbackHolder + stop(): Promise } diff --git a/container-runtime/src/Models/CallbackHolder.ts b/container-runtime/src/Models/CallbackHolder.ts index b562e8dd0..b51af0bee 100644 --- a/container-runtime/src/Models/CallbackHolder.ts +++ b/container-runtime/src/Models/CallbackHolder.ts @@ -1,17 +1,19 @@ export class CallbackHolder { constructor() {} - private root = (Math.random() + 1).toString(36).substring(7) private inc = 0 - private callbacks = new Map() + private callbacks = new Map() private newId() { - return this.root + (this.inc++).toString(36) + return this.inc++ } - addCallback(callback: Function) { + addCallback(callback?: Function) { + if (!callback) { + return + } const id = this.newId() this.callbacks.set(id, callback) return id } - callCallback(index: string, args: any[]): Promise { + callCallback(index: number, args: any[]): Promise { const callback = this.callbacks.get(index) if (!callback) throw new Error(`Callback ${index} does not exist`) this.callbacks.delete(index) diff --git a/container-runtime/src/Models/DockerProcedure.ts b/container-runtime/src/Models/DockerProcedure.ts index 91ae73b5f..20c8145ab 100644 --- a/container-runtime/src/Models/DockerProcedure.ts +++ b/container-runtime/src/Models/DockerProcedure.ts @@ -8,7 +8,9 @@ import { literals, number, Parser, + some, } from "ts-matches" +import { matchDuration } from "./Duration" const VolumeId = string const Path = string @@ -31,7 +33,7 @@ export const matchDockerProcedure = object( "toml", "toml-pretty", ), - "sigterm-timeout": number, + "sigterm-timeout": some(number, matchDuration), inject: boolean, }, ["io-format", "sigterm-timeout", "system", "args", "inject", "mounts"], diff --git a/container-runtime/src/Models/Duration.ts b/container-runtime/src/Models/Duration.ts index 8c701a703..5f61c362a 100644 --- a/container-runtime/src/Models/Duration.ts +++ b/container-runtime/src/Models/Duration.ts @@ -1,6 +1,30 @@ -export type TimeUnit = "d" | "h" | "s" | "ms" +import { string } from "ts-matches" + +export type TimeUnit = "d" | "h" | "s" | "ms" | "m" | "µs" | "ns" export type Duration = `${number}${TimeUnit}` -export function duration(timeValue: number, timeUnit: TimeUnit = "s") { - return `${timeValue}${timeUnit}` as Duration +const durationRegex = /^([0-9]*(\.[0-9]+)?)(ns|µs|ms|s|m|d)$/ + +export const matchDuration = string.refine(isDuration) +export function isDuration(value: string): value is Duration { + return durationRegex.test(value) +} + +export function duration(timeValue: number, timeUnit: TimeUnit = "s") { + return `${timeValue > 0 ? timeValue : 0}${timeUnit}` as Duration +} +const unitsToSeconds: Record = { + ns: 1e-9, + µs: 1e-6, + ms: 0.001, + s: 1, + m: 60, + h: 3600, + d: 86400, +} + +export function fromDuration(duration: Duration | number): number { + if (typeof duration === "number") return duration + const [, num, , unit] = duration.match(durationRegex) || [] + return Number(num) * unitsToSeconds[unit] } diff --git a/container-runtime/src/Models/Effects.ts b/container-runtime/src/Models/Effects.ts index 757d51238..bacf8894a 100644 --- a/container-runtime/src/Models/Effects.ts +++ b/container-runtime/src/Models/Effects.ts @@ -1,5 +1,3 @@ import { types as T } from "@start9labs/start-sdk" -export type Effects = T.Effects & { - setMainStatus(o: { status: "running" | "stopped" }): Promise -} +export type Effects = T.Effects diff --git a/container-runtime/src/Models/JsonPath.ts b/container-runtime/src/Models/JsonPath.ts index 314019154..95a2b3a00 100644 --- a/container-runtime/src/Models/JsonPath.ts +++ b/container-runtime/src/Models/JsonPath.ts @@ -28,8 +28,6 @@ export const jsonPath = some( literals( "/init", "/uninit", - "/main/start", - "/main/stop", "/config/set", "/config/get", "/backup/create", diff --git a/container-runtime/src/Models/Volume.ts b/container-runtime/src/Models/Volume.ts index ebf013b68..061bb1fd0 100644 --- a/container-runtime/src/Models/Volume.ts +++ b/container-runtime/src/Models/Volume.ts @@ -1,14 +1,17 @@ import * as fs from "node:fs/promises" +export const BACKUP = "backup" export class Volume { readonly path: string constructor( readonly volumeId: string, _path = "", ) { - const path = (this.path = `/media/startos/volumes/${volumeId}${ - !_path ? "" : `/${_path}` - }`) + if (volumeId.toLowerCase() === BACKUP) { + this.path = `/media/startos/backup${!_path ? "" : `/${_path}`}` + } else { + this.path = `/media/startos/volumes/${volumeId}${!_path ? "" : `/${_path}`}` + } } async exists() { return fs.stat(this.path).then( diff --git a/container-runtime/src/index.ts b/container-runtime/src/index.ts index d86111ecb..5454bee3d 100644 --- a/container-runtime/src/index.ts +++ b/container-runtime/src/index.ts @@ -1,12 +1,13 @@ import { RpcListener } from "./Adapters/RpcListener" import { SystemForEmbassy } from "./Adapters/Systems/SystemForEmbassy" -import { HostSystemStartOs } from "./Adapters/HostSystemStartOs" +import { makeMainEffects, makeProcedureEffects } from "./Adapters/EffectCreator" import { AllGetDependencies } from "./Interfaces/AllGetDependencies" import { getSystem } from "./Adapters/Systems" const getDependencies: AllGetDependencies = { system: getSystem, - hostSystem: () => HostSystemStartOs.of, + makeProcedureEffects: () => makeProcedureEffects, + makeMainEffects: () => makeMainEffects, } new RpcListener(getDependencies) diff --git a/container-runtime/tsconfig.json b/container-runtime/tsconfig.json index fd93d5154..6981133d6 100644 --- a/container-runtime/tsconfig.json +++ b/container-runtime/tsconfig.json @@ -1,11 +1,5 @@ { - "include": [ - "./**/*.mjs", - "./**/*.js", - "src/Adapters/RpcListener.ts", - "src/index.ts", - "effects.ts" - ], + "include": ["./**/*.ts"], "exclude": ["dist"], "inputs": ["./src/index.ts"], "compilerOptions": { @@ -19,9 +13,10 @@ "declaration": true, "noImplicitAny": true, "esModuleInterop": true, - "types": ["node"], + "types": ["node", "jest"], "moduleResolution": "Node16", - "skipLibCheck": true + "skipLibCheck": true, + "resolveJsonModule": true }, "ts-node": { "compilerOptions": { diff --git a/container-runtime/update-image.sh b/container-runtime/update-image.sh index e0d5dc0c3..61429821c 100755 --- a/container-runtime/update-image.sh +++ b/container-runtime/update-image.sh @@ -4,12 +4,15 @@ cd "$(dirname "${BASH_SOURCE[0]}")" set -e - - -if mountpoint tmp/combined; then sudo umount tmp/combined; fi +if mountpoint tmp/combined; then sudo umount -R tmp/combined; fi if mountpoint tmp/lower; then sudo umount tmp/lower; fi +sudo rm -rf tmp mkdir -p tmp/lower tmp/upper tmp/work tmp/combined -sudo mount alpine.${ARCH}.squashfs tmp/lower +if which squashfuse > /dev/null; then + sudo squashfuse debian.${ARCH}.squashfs tmp/lower +else + sudo mount debian.${ARCH}.squashfs tmp/lower +fi sudo mount -t overlay -olowerdir=tmp/lower,upperdir=tmp/upper,workdir=tmp/work overlay tmp/combined QEMU= @@ -18,21 +21,21 @@ if [ "$ARCH" != "$(uname -m)" ]; then sudo cp $(which qemu-$ARCH-static) tmp/combined${QEMU} fi -echo "nameserver 8.8.8.8" | sudo tee tmp/combined/etc/resolv.conf # TODO - delegate to host resolver? -sudo chroot tmp/combined $QEMU /sbin/apk add nodejs sudo mkdir -p tmp/combined/usr/lib/startos/ sudo rsync -a --copy-unsafe-links dist/ tmp/combined/usr/lib/startos/init/ -sudo cp containerRuntime.rc tmp/combined/etc/init.d/containerRuntime +sudo chown -R 0:0 tmp/combined/usr/lib/startos/ +sudo cp container-runtime.service tmp/combined/lib/systemd/system/container-runtime.service +sudo chown 0:0 tmp/combined/lib/systemd/system/container-runtime.service sudo cp ../core/target/$ARCH-unknown-linux-musl/release/containerbox tmp/combined/usr/bin/start-cli -sudo chmod +x tmp/combined/etc/init.d/containerRuntime -sudo chroot tmp/combined $QEMU /sbin/rc-update add containerRuntime default +sudo chown 0:0 tmp/combined/usr/bin/start-cli +echo container-runtime | sha256sum | head -c 32 | cat - <(echo) | sudo tee tmp/combined/etc/machine-id +cat deb-install.sh | sudo systemd-nspawn --console=pipe -D tmp/combined $QEMU /bin/bash +sudo truncate -s 0 tmp/combined/etc/machine-id if [ -n "$QEMU" ]; then sudo rm tmp/combined${QEMU} fi -sudo truncate -s 0 tmp/combined/etc/resolv.conf -sudo chown -R 0:0 tmp/combined rm -f rootfs.${ARCH}.squashfs mkdir -p ../build/lib/container-runtime sudo mksquashfs tmp/combined rootfs.${ARCH}.squashfs diff --git a/core/.gitignore b/core/.gitignore index ad8368a86..9673044e6 100644 --- a/core/.gitignore +++ b/core/.gitignore @@ -8,3 +8,4 @@ secrets.db .env .editorconfig proptest-regressions/**/* +/startos/bindings/* \ No newline at end of file diff --git a/core/Cargo.lock b/core/Cargo.lock index 7eaac03bd..2108ac851 100644 --- a/core/Cargo.lock +++ b/core/Cargo.lock @@ -38,23 +38,23 @@ dependencies = [ [[package]] name = "ahash" -version = "0.7.7" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" +checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.15", "once_cell", "version_check", ] [[package]] name = "ahash" -version = "0.8.6" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" +checksum = "e89da841a80418a9b391ebaea17f5c112ffaaa96f621d2c285b5174da76b9011" dependencies = [ "cfg-if", - "getrandom 0.2.11", + "getrandom 0.2.15", "once_cell", "version_check", "zerocopy", @@ -62,9 +62,9 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "1.1.2" +version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] @@ -86,9 +86,9 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.16" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" +checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" [[package]] name = "android-tzdata" @@ -107,47 +107,48 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.13" +version = "0.6.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d96bd03f33fe50a863e394ee9718a706f988b9079b20c3784fb726e7678b62fb" +checksum = "64e15c1ab1f89faffbf04a634d5e1962e9074f2741eef6d97f3c4e322426d526" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", + "is_terminal_polyfill", "utf8parse", ] [[package]] name = "anstyle" -version = "1.0.6" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8901269c6307e8d93993578286ac0edf7f195079ffff5ebdeea6a59ffb7e36bc" +checksum = "1bec1de6f59aedf83baf9ff929c98f2ad654b97c9510f4e70cf6f661d49fd5b1" [[package]] name = "anstyle-parse" -version = "0.2.3" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" +checksum = "eb47de1e80c2b463c735db5b217a0ddc39d612e7ac9e2e96a5aed1f57616c1cb" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.0.2" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" +checksum = "6d36fc52c7f6c869915e99412912f22093507da8d9e942ceaf66fe4b7c14422a" dependencies = [ "windows-sys 0.52.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.2" +version = "3.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" +checksum = "5bf74e1b6e971609db8ca7a9ce79fd5768ab6ae46441c572e46cf596f59e57f8" dependencies = [ "anstyle", "windows-sys 0.52.0", @@ -155,15 +156,21 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.75" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" +checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" [[package]] name = "arrayref" -version = "0.3.7" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" +checksum = "9d151e35f61089500b617991b791fc8bfd237ae50cd5950803758a179b41e67a" + +[[package]] +name = "arrayvec" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" [[package]] name = "arrayvec" @@ -186,16 +193,16 @@ version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "81953c529336010edd6d8e358f886d9581267795c61b19475b71314bffa46d35" dependencies = [ - "concurrent-queue", + "concurrent-queue 2.5.0", "event-listener", "futures-core", ] [[package]] name = "async-compression" -version = "0.4.4" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f658e2baef915ba0f26f1f7c42bfb8e12f532a01f449a090ded75ae7a07e9ba2" +checksum = "fec134f64e2bc57411226dfc4e52dec859ddfc7e711fc5e07b612584f000e4aa" dependencies = [ "brotli", "flate2", @@ -224,18 +231,18 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", ] [[package]] name = "async-trait" -version = "0.1.74" +version = "0.1.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" +checksum = "6e0c28dcc82d7c8ead5cb13beb15405b57b8546e93215673ff8ca0349a028107" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", ] [[package]] @@ -248,21 +255,43 @@ dependencies = [ ] [[package]] -name = "atty" -version = "0.2.14" +name = "atomic-waker" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8" -dependencies = [ - "hermit-abi 0.1.19", - "libc", - "winapi", -] +checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" [[package]] name = "autocfg" -version = "1.1.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" +checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" + +[[package]] +name = "aws-lc-rs" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ae74d9bd0a7530e8afd1770739ad34b36838829d6ad61818f9230f683f5ad77" +dependencies = [ + "aws-lc-sys", + "mirai-annotations", + "paste", + "zeroize", +] + +[[package]] +name = "aws-lc-sys" +version = "0.20.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e89b6941c2d1a7045538884d6e760ccfffdf8e1ffc2613d8efa74305e1f3752" +dependencies = [ + "bindgen", + "cc", + "cmake", + "dunce", + "fs_extra", + "libc", + "paste", +] [[package]] name = "axum" @@ -275,9 +304,9 @@ dependencies = [ "bitflags 1.3.2", "bytes", "futures-util", - "http 0.2.11", - "http-body 0.4.5", - "hyper 0.14.27", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.30", "itoa", "matchit", "memchr", @@ -300,13 +329,13 @@ checksum = "3a6c9af12842a67734c9a2e355436e5d03b22383ed60cf13cd0c18fbfe3dcbcf" dependencies = [ "async-trait", "axum-core 0.4.3", - "base64 0.21.5", + "base64 0.21.7", "bytes", "futures-util", "http 1.1.0", - "http-body 1.0.0", + "http-body 1.0.1", "http-body-util", - "hyper 1.2.0", + "hyper 1.4.1", "hyper-util", "itoa", "matchit", @@ -320,9 +349,9 @@ dependencies = [ "serde_path_to_error", "serde_urlencoded", "sha1", - "sync_wrapper 1.0.0", + "sync_wrapper 1.0.1", "tokio", - "tokio-tungstenite", + "tokio-tungstenite 0.21.0", "tower", "tower-layer", "tower-service", @@ -338,8 +367,8 @@ dependencies = [ "async-trait", "bytes", "futures-util", - "http 0.2.11", - "http-body 0.4.5", + "http 0.2.12", + "http-body 0.4.6", "mime", "rustversion", "tower-layer", @@ -356,7 +385,7 @@ dependencies = [ "bytes", "futures-util", "http 1.1.0", - "http-body 1.0.0", + "http-body 1.0.1", "http-body-util", "mime", "pin-project-lite", @@ -376,9 +405,9 @@ dependencies = [ "bytes", "futures-util", "http 1.1.0", - "http-body 1.0.0", + "http-body 1.0.1", "http-body-util", - "hyper 1.2.0", + "hyper 1.4.1", "hyper-util", "pin-project-lite", "tokio", @@ -387,10 +416,26 @@ dependencies = [ ] [[package]] -name = "backtrace" -version = "0.3.69" +name = "backhand" +version = "0.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" +checksum = "8f2fc1bc7bb7fd449e02000cc1592cc63dcdcd61710f8b9efe32bab2d1784603" +dependencies = [ + "deku", + "flate2", + "rustc-hash", + "thiserror", + "tracing", + "xz2", + "zstd", + "zstd-safe", +] + +[[package]] +name = "backtrace" +version = "0.3.71" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b05800d2e817c8b3b4b54abd461726265fa9789ae34330622f2db9ee696f9d" dependencies = [ "addr2line", "cc", @@ -401,6 +446,17 @@ dependencies = [ "rustc-demangle", ] +[[package]] +name = "barrage" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be5951c75bdabb58753d140dd5802f12ff3a483cb2e16fb5276e111b94b19e87" +dependencies = [ + "concurrent-queue 1.2.4", + "event-listener", + "spin", +] + [[package]] name = "base16ct" version = "0.2.0" @@ -414,16 +470,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23ce669cd6c8588f79e15cf450314f9638f967fc5770ff1c7c1deb0925ea7cfa" [[package]] -name = "base64" -version = "0.13.1" +name = "base32" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" +checksum = "022dfe9eb35f19ebbcb51e0b40a5ab759f46ad60cadf7297e0bd085afb50e076" [[package]] name = "base64" -version = "0.21.5" +version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" + +[[package]] +name = "base64" +version = "0.22.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64ct" @@ -433,9 +495,9 @@ checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] name = "basic-cookies" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb53b6b315f924c7f113b162e53b3901c05fc9966baf84d201dfcc7432a4bb38" +checksum = "67bd8fd42c16bdb08688243dc5f0cc117a3ca9efeeaba3a345a18a6159ad96f7" dependencies = [ "lalrpop", "lalrpop-util", @@ -451,6 +513,29 @@ dependencies = [ "serde", ] +[[package]] +name = "bindgen" +version = "0.69.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a00dc851838a2120612785d195287475a3ac45514741da670b735818822129a0" +dependencies = [ + "bitflags 2.6.0", + "cexpr", + "clang-sys", + "itertools 0.12.1", + "lazy_static", + "lazycell", + "log", + "prettyplease", + "proc-macro2", + "quote", + "regex", + "rustc-hash", + "shlex", + "syn 2.0.72", + "which", +] + [[package]] name = "bit-set" version = "0.5.3" @@ -474,18 +559,30 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.1" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" +checksum = "b048fb63fd8b5923fc5aa7b340d8e156aec7ec02f0c78fa8a6ddc2613f6f71de" dependencies = [ "serde", ] [[package]] name = "bitmaps" -version = "3.2.0" +version = "3.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "703642b98a00b3b90513279a8ede3fcfa479c126c5fb46e78f3051522f021403" +checksum = "a1d084b0137aaa901caf9f1e8b21daa6aa24d41cd806e111335541eff9683bd6" + +[[package]] +name = "bitvec" +version = "0.19.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55f93d0ef3363c364d5976646a38f04cf67cfe1d4c8d160cdea02cab2c116b33" +dependencies = [ + "funty 1.1.0", + "radium 0.5.3", + "tap", + "wyz 0.2.0", +] [[package]] name = "bitvec" @@ -493,10 +590,10 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" dependencies = [ - "funty", - "radium", + "funty 2.0.0", + "radium 0.7.0", "tap", - "wyz", + "wyz 0.5.1", ] [[package]] @@ -506,21 +603,23 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23285ad32269793932e830392f2fe2f83e26488fd3ec778883a93c8323735780" dependencies = [ "arrayref", - "arrayvec", + "arrayvec 0.7.4", "constant_time_eq", ] [[package]] name = "blake3" -version = "1.5.0" +version = "1.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0231f06152bf547e9c2b5194f247cd97aacf6dcd8b15d8e5ec0663f64580da87" +checksum = "e9ec96fe9a81b5e365f9db71fe00edc4fe4ca2cc7dcb7861f0603012a7caa210" dependencies = [ "arrayref", - "arrayvec", + "arrayvec 0.7.4", "cc", "cfg-if", "constant_time_eq", + "memmap2", + "rayon-core", ] [[package]] @@ -543,9 +642,9 @@ dependencies = [ [[package]] name = "brotli" -version = "3.4.0" +version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "516074a47ef4bce09577a3b379392300159ce5b1ba2e501ff1c819950066100f" +checksum = "74f7971dbd9326d58187408ab83117d8ac1bb9c17b085fdacd1cf2f598719b6b" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -554,9 +653,9 @@ dependencies = [ [[package]] name = "brotli-decompressor" -version = "2.5.1" +version = "4.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e2e4afe60d7dd600fdd3de8d0f08c2b7ec039712e3b6137ff98b7004e82de4f" +checksum = "9a45bd2e4095a8b518033b128020dd4a55aab1c0a381ba4404a472630f4bc362" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -564,9 +663,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.14.0" +version = "3.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" +checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c" [[package]] name = "byteorder" @@ -576,19 +675,35 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.5.0" +version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" +checksum = "a12916984aab3fa6e39d655a33e09c0071eb36d6ab3aea5c2d78551f1df6d952" + +[[package]] +name = "cache-padded" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "981520c98f422fcc584dc1a95c334e6953900b9106bc47a9839b81790009eb21" [[package]] name = "cc" -version = "1.0.84" +version = "1.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f8e7c90afad890484a21653d08b6e209ae34770fb5ee298f9c699fcc1e5c856" +checksum = "2aba8f4e9906c7ce3c73463f62a7f0c65183ada1a2d47e397cc8810827f9694f" dependencies = [ + "jobserver", "libc", ] +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom 7.1.3", +] + [[package]] name = "cfg-if" version = "1.0.0" @@ -596,10 +711,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] -name = "chrono" -version = "0.4.31" +name = "cfg_aliases" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + +[[package]] +name = "chrono" +version = "0.4.38" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" dependencies = [ "android-tzdata", "iana-time-zone", @@ -607,7 +728,7 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] @@ -616,14 +737,14 @@ version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8eebd66744a15ded14960ab4ccdbfb51ad3b81f51f3f04a80adac98c985396c9" dependencies = [ - "hashbrown 0.14.2", + "hashbrown 0.14.5", ] [[package]] name = "ciborium" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "effd91f6c78e5a4ace8a5d3c0b6bfaec9e2baaef55f3efc00e45fb2e477ee926" +checksum = "42e69ffd6f0917f5c029256a24d0161db17cea3997d185db0d35926308770f0e" dependencies = [ "ciborium-io", "ciborium-ll", @@ -632,18 +753,18 @@ dependencies = [ [[package]] name = "ciborium-io" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdf919175532b369853f5d5e20b26b43112613fd6fe7aee757e35f7a44642656" +checksum = "05afea1e0a06c9be33d539b876f1ce3692f4afea2cb41f740e7743225ed1c757" [[package]] name = "ciborium-ll" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "defaa24ecc093c77630e6c15e17c51f5e187bf35ee514f4e2d67baaa96dae22b" +checksum = "57663b653d948a338bfb3eeba9bb2fd5fcfaecb9e199e87e1eda4d9e8b240fd9" dependencies = [ "ciborium-io", - "half", + "half 2.4.1", ] [[package]] @@ -666,25 +787,21 @@ dependencies = [ ] [[package]] -name = "clap" -version = "3.2.25" +name = "clang-sys" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ea181bf566f71cb9a5d17a59e1871af638180a18fb0035c92ae62b705207123" +checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" dependencies = [ - "atty", - "bitflags 1.3.2", - "clap_lex 0.2.4", - "indexmap 1.9.3", - "strsim 0.10.0", - "termcolor", - "textwrap", + "glob", + "libc", + "libloading", ] [[package]] name = "clap" -version = "4.5.4" +version = "4.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0" +checksum = "35723e6a11662c2afb578bcf0b88bf6ea8e21282a953428f240574fcc3a2b5b3" dependencies = [ "clap_builder", "clap_derive", @@ -692,48 +809,48 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.2" +version = "4.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4" +checksum = "49eb96cbfa7cfa35017b7cd548c75b14c3118c98b423041d70562665e07fb0fa" dependencies = [ "anstream", "anstyle", - "clap_lex 0.7.0", - "strsim 0.11.0", + "clap_lex", + "strsim 0.11.1", ] [[package]] name = "clap_derive" -version = "4.5.4" +version = "4.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64" +checksum = "5d029b67f89d30bbb547c89fd5161293c0aec155fc691d7924b64550662db93e" dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", ] [[package]] name = "clap_lex" -version = "0.2.4" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" +checksum = "1462739cb27611015575c0c11df5df7601141071f07518d56fcc1be504cbec97" + +[[package]] +name = "cmake" +version = "0.1.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a31c789563b815f77f4250caee12365734369f942439b7defd71e18a48197130" dependencies = [ - "os_str_bytes", + "cc", ] -[[package]] -name = "clap_lex" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" - [[package]] name = "color-eyre" -version = "0.6.2" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a667583cca8c4f8436db8de46ea8233c42a7d9ae424a82d338f2e4675229204" +checksum = "55146f5e46f237f7423d74111267d4597b59b0dad0ffaf7303bce9945d843ad5" dependencies = [ "backtrace", "color-spantrace", @@ -746,9 +863,9 @@ dependencies = [ [[package]] name = "color-spantrace" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ba75b3d9449ecdccb27ecbc479fdc0b87fa2dd43d2f8298f9bf0e59aacc8dce" +checksum = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2" dependencies = [ "once_cell", "owo-colors", @@ -758,37 +875,46 @@ dependencies = [ [[package]] name = "colorchoice" -version = "1.0.0" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" +checksum = "d3fd119d74b830634cea2a0f58bbd0d54540518a14397557951e79340abc28c0" [[package]] name = "concurrent-queue" -version = "2.3.0" +version = "1.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f057a694a54f12365049b0958a1685bb52d567f5593b355fbf685838e873d400" +checksum = "af4780a44ab5696ea9e28294517f1fffb421a83a25af521333c838635509db9c" +dependencies = [ + "cache-padded", +] + +[[package]] +name = "concurrent-queue" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ca0197aee26d1ae37445ee532fefce43251d24cc7c166799f4d46817f1d3973" dependencies = [ "crossbeam-utils", ] [[package]] name = "console" -version = "0.15.7" +version = "0.15.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c926e00cc70edefdc64d3a5ff31cc65bb97a3460097762bd23afb4d8145fccf8" +checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" dependencies = [ "encode_unicode 0.3.6", "lazy_static", "libc", "unicode-width", - "windows-sys 0.45.0", + "windows-sys 0.52.0", ] [[package]] name = "console-api" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd326812b3fd01da5bb1af7d340d0d555fd3d4b641e7f1dfcf5962a902952787" +checksum = "a257c22cd7e487dd4a13d413beabc512c5052f0bc048db0da6a84c3d8a6142fd" dependencies = [ "futures-core", "prost", @@ -799,9 +925,9 @@ dependencies = [ [[package]] name = "console-subscriber" -version = "0.2.0" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7481d4c57092cd1c19dd541b92bdce883de840df30aa5d03fd48a3935c01842e" +checksum = "31c4cc54bae66f7d9188996404abdf7fdfa23034ef8e43478c8810828abad758" dependencies = [ "console-api", "crossbeam-channel", @@ -809,6 +935,7 @@ dependencies = [ "futures-task", "hdrhistogram", "humantime", + "prost", "prost-types", "serde", "serde_json", @@ -823,24 +950,24 @@ dependencies = [ [[package]] name = "const-oid" -version = "0.9.5" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28c122c3980598d243d63d9a704629a2d748d101f278052ff068be5a4423ab6f" +checksum = "c2459377285ad874054d797f3ccebf984978aa39129f6eafde5cdc8315b612f8" [[package]] name = "const_format" -version = "0.2.32" +version = "0.2.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3a214c7af3d04997541b18d432afaff4c455e79e2029079647e72fc2bd27673" +checksum = "c990efc7a285731f9a4378d81aff2f0e85a2c8781a05ef0f8baa8dac54d0ff48" dependencies = [ "const_format_proc_macros", ] [[package]] name = "const_format_proc_macros" -version = "0.2.32" +version = "0.2.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7f6ff08fd20f4f299298a28e2dfa8a8ba1036e6cd2460ac1de7b425d76f2500" +checksum = "e026b6ce194a874cb9cf32cd5772d1ef9767cc8fcb5765948d74f37a9d8b2bf6" dependencies = [ "proc-macro2", "quote", @@ -870,33 +997,23 @@ dependencies = [ [[package]] name = "cookie" -version = "0.17.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7efb37c3e1ccb1ff97164ad95ac1606e8ccd35b3fa0a7d99a304c7f4a428cc24" +checksum = "4ddef33a339a91ea89fb53151bd0a4689cfce27055c291dfa69945475d22c747" dependencies = [ "percent-encoding", "time", "version_check", ] -[[package]] -name = "cookie" -version = "0.18.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cd91cf61412820176e137621345ee43b3f4423e589e7ae4e50d601d93e35ef8" -dependencies = [ - "time", - "version_check", -] - [[package]] name = "cookie_store" -version = "0.20.0" +version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "387461abbc748185c3a6e1673d826918b450b87ff22639429c694619a83b6cf6" +checksum = "4934e6b7e8419148b6ef56950d277af8561060b56afd59e2aadf98b59fce6baa" dependencies = [ - "cookie 0.17.0", - "idna 0.3.0", + "cookie", + "idna 0.5.0", "log", "publicsuffix", "serde", @@ -908,9 +1025,9 @@ dependencies = [ [[package]] name = "core-foundation" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ "core-foundation-sys", "libc", @@ -918,24 +1035,24 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.4" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" [[package]] name = "cpufeatures" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0" +checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" dependencies = [ "libc", ] [[package]] name = "crc" -version = "3.0.1" +version = "3.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86ec7a15cbe22e59248fc7eadb1907dab5ba09372595da4d73dd805ed4417dfe" +checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636" dependencies = [ "crc-catalog", ] @@ -948,41 +1065,55 @@ checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] name = "crc32fast" -version = "1.3.2" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +checksum = "a97769d94ddab943e4510d138150169a2758b5ef3eb191a9ee688de3e23ef7b3" dependencies = [ "cfg-if", ] [[package]] name = "crossbeam-channel" -version = "0.5.8" +version = "0.5.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" +checksum = "33480d6946193aa8033910124896ca395333cae7e2d1113d1fef6c3272217df2" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-deque" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613f8cc01fe9cf1a3eb3d7f488fd2fa8388403e97039e2f73692932e291a770d" +dependencies = [ + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" dependencies = [ - "cfg-if", "crossbeam-utils", ] [[package]] name = "crossbeam-queue" -version = "0.3.8" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add" +checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" dependencies = [ - "cfg-if", "crossbeam-utils", ] [[package]] name = "crossbeam-utils" -version = "0.8.16" +version = "0.8.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a22b2d63d4d1dc0b7f1b6b2747dd0088008a9be28b6ddf0b1e7d335e3037294" -dependencies = [ - "cfg-if", -] +checksum = "22ec99545bb0ed0ea7bb9b8e1e9122ea386ff8a48c0922e43f36d45ab09e0e80" [[package]] name = "crossterm" @@ -990,11 +1121,11 @@ version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.6.0", "crossterm_winapi", "futures-core", "libc", - "mio", + "mio 0.8.11", "parking_lot", "signal-hook", "signal-hook-mio", @@ -1018,9 +1149,9 @@ checksum = "7a81dae078cea95a014a339291cec439d2f232ebe854a9d672b796c6afafa9b7" [[package]] name = "crypto-bigint" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28f85c3514d2a6e64160359b45a3918c3b4178bcbf4ae5d03ab2d02e521c479a" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array", "rand_core 0.6.4", @@ -1068,12 +1199,6 @@ dependencies = [ "cipher 0.3.0", ] -[[package]] -name = "current_platform" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a74858bcfe44b22016cb49337d7b6f04618c58e5dbfdef61b06b8c434324a0bc" - [[package]] name = "curve25519-dalek" version = "3.2.0" @@ -1089,16 +1214,15 @@ dependencies = [ [[package]] name = "curve25519-dalek" -version = "4.1.1" +version = "4.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c" +checksum = "97fb8b7c4503de7d6ae7b42ab72a5a59857b4c937ec27a3d4539dba95b5ab2be" dependencies = [ "cfg-if", "cpufeatures", "curve25519-dalek-derive", "digest 0.10.7", "fiat-crypto", - "platforms", "rustc_version", "subtle", "zeroize", @@ -1112,14 +1236,14 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", ] [[package]] name = "darling" -version = "0.20.3" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0209d94da627ab5605dcccf08bb18afa5009cfbef48d8a8b7d7bdbc79be25c5e" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" dependencies = [ "darling_core", "darling_macro", @@ -1127,51 +1251,88 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.3" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "177e3443818124b357d8e76f53be906d60937f0d3a90773a664fa63fa253e621" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", - "strsim 0.10.0", - "syn 2.0.39", + "strsim 0.11.1", + "syn 2.0.72", ] [[package]] name = "darling_macro" -version = "0.20.3" +version = "0.20.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" dependencies = [ "darling_core", "quote", - "syn 2.0.39", + "syn 2.0.72", ] [[package]] name = "data-encoding" -version = "2.4.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308" +checksum = "e8566979429cf69b49a5c740c60791108e86440e8be149bbea4fe54d2c32d6e2" + +[[package]] +name = "deku" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "709ade444d53896e60f6265660eb50480dd08b77bfc822e5dcc233b88b0b2fba" +dependencies = [ + "bitvec 1.0.1", + "deku_derive", + "no_std_io", + "rustversion", +] + +[[package]] +name = "deku_derive" +version = "0.17.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d7534973f93f9de83203e41c8ddd32d230599fa73fa889f3deb1580ccd186913" +dependencies = [ + "darling", + "proc-macro-crate", + "proc-macro2", + "quote", + "syn 2.0.72", +] [[package]] name = "der" -version = "0.7.8" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fffa369a668c8af7dbf8b5e56c9f744fbd399949ed171606040001947de40b1c" +checksum = "f55bf8e7b65898637379c1b74eb1551107c8294ed26d855ceb9fd1a09cfc9bc0" dependencies = [ "const-oid", + "der_derive", "pem-rfc7468", "zeroize", ] [[package]] -name = "deranged" -version = "0.3.9" +name = "der_derive" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f32d04922c60427da6f9fef14d042d9edddef64cb9d4ce0d64d0685fbeb1fd3" +checksum = "8034092389675178f570469e6c3b0465d3d30b4505c294a6550db47f3c17ad18" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.72", +] + +[[package]] +name = "deranged" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b42b6fa04a440b495c8b04d0e71b707c585f83cb9cb28cf8cd0d976c315e31b4" dependencies = [ "powerfmt", "serde", @@ -1179,23 +1340,17 @@ dependencies = [ [[package]] name = "derive_more" -version = "0.99.17" +version = "0.99.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +checksum = "5f33878137e4dafd7fa914ad4e259e18a4e8e532b9617a2d0150262bf53abfce" dependencies = [ "convert_case 0.4.0", "proc-macro2", "quote", "rustc_version", - "syn 1.0.109", + "syn 2.0.72", ] -[[package]] -name = "diff" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" - [[package]] name = "digest" version = "0.9.0" @@ -1252,30 +1407,36 @@ checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" [[package]] name = "drain" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f1a0abf3fcefad9b4dd0e414207a7408e12b68414a01e6bb19b897d5bd7632d" +checksum = "9d105028bd2b5dfcb33318fd79a445001ead36004dd8dffef1bdd7e493d8bc1e" dependencies = [ "tokio", ] [[package]] -name = "dyn-clone" -version = "1.0.16" +name = "dunce" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "545b22097d44f8a9581187cdf93de7a71e4722bf51200cfaba810865b49a495d" +checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" + +[[package]] +name = "dyn-clone" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" [[package]] name = "ecdsa" -version = "0.16.8" +version = "0.16.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4b1e0c257a9e9f25f90ff76d7a68360ed497ee519c8e428d1825ef0000799d4" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" dependencies = [ "der", "digest 0.10.7", "elliptic-curve", "rfc6979", - "signature 2.0.0", + "signature 2.2.0", "spki", ] @@ -1296,7 +1457,7 @@ checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" dependencies = [ "pkcs8", "serde", - "signature 2.0.0", + "signature 2.2.0", ] [[package]] @@ -1319,30 +1480,30 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4a3daa8e81a3963a60642bcc1f90a670680bd4a77535faa384e9d1c79d620871" dependencies = [ - "curve25519-dalek 4.1.1", + "curve25519-dalek 4.1.3", "ed25519 2.2.3", "rand_core 0.6.4", "serde", "sha2 0.10.8", - "signature 2.0.0", + "signature 2.2.0", "subtle", "zeroize", ] [[package]] name = "either" -version = "1.9.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" +checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" dependencies = [ "serde", ] [[package]] name = "elliptic-curve" -version = "0.13.6" +version = "0.13.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d97ca172ae9dc9f9b779a6e3a65d308f2af74e5b8c921299075bdb4a0370e914" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" dependencies = [ "base16ct", "crypto-bigint", @@ -1360,20 +1521,21 @@ dependencies = [ [[package]] name = "emver" -version = "0.1.7" -source = "git+https://github.com/Start9Labs/emver-rs.git#61cf0bc96711b4d6f3f30df8efef025e0cc02bad" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed260c4d7efaec031b9c4f6c4d3cf136e3df2bbfe50925800236f5e847f28704" dependencies = [ "either", "fp-core", - "nom", + "nom 6.1.2", "serde", ] [[package]] name = "ena" -version = "0.14.2" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c533630cf40e9caa44bd91aadc88a75d75a4c3a12b4cfde353cbed41daa1e1f1" +checksum = "3d248bdd43ce613d87415282f69b9bb99d947d290b10962dd6c56233312c2ad5" dependencies = [ "log", ] @@ -1392,9 +1554,9 @@ checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" [[package]] name = "encoding_rs" -version = "0.8.33" +version = "0.8.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7268b386296a025e474d5140678f75d6de9493ae55a5d709eeb9dd08149945e1" +checksum = "b45de904aa0b010bce2ab45264d0631681847fa7b6f2eaa7dab7619943bc4f59" dependencies = [ "cfg-if", ] @@ -1408,7 +1570,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", ] [[package]] @@ -1419,12 +1581,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.6" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c18ee0ed65a5f1f81cac6b1d213b69c35fa47d4252ad41f1486dbd8226fe36e" +checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" dependencies = [ "libc", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -1444,11 +1606,29 @@ version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" +[[package]] +name = "exver" +version = "0.2.0" +source = "git+https://github.com/Start9Labs/exver-rs.git#29f52c1be18a0fe187670beac92822994b0d1949" +dependencies = [ + "either", + "emver", + "fp-core", + "getrandom 0.2.15", + "itertools 0.13.0", + "memchr", + "pest", + "pest_derive", + "serde", + "smallvec", + "yasi", +] + [[package]] name = "eyre" -version = "0.6.8" +version = "0.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c2b6b5a29c02cdc822728b7d7b8ae1bab3e3b05d44522770ddd49722eeac7eb" +checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" dependencies = [ "indenter", "once_cell", @@ -1456,9 +1636,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.0.1" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" +checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" [[package]] name = "fd-lock-rs" @@ -1481,28 +1661,22 @@ dependencies = [ [[package]] name = "fiat-crypto" -version = "0.2.3" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f69037fe1b785e84986b4f2cbcf647381876a00671d25ceef715d7812dd7e1dd" +checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" [[package]] name = "filetime" -version = "0.2.22" +version = "0.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" +checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.3.5", - "windows-sys 0.48.0", + "redox_syscall 0.4.1", + "windows-sys 0.52.0", ] -[[package]] -name = "finl_unicode" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" - [[package]] name = "fixedbitset" version = "0.4.2" @@ -1511,9 +1685,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.28" +version = "1.0.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" +checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" dependencies = [ "crc32fast", "miniz_oxide", @@ -1527,7 +1701,7 @@ checksum = "55ac459de2512911e4b674ce33cf20befaba382d05b62b008afc1c8b57cbf181" dependencies = [ "futures-core", "futures-sink", - "spin 0.9.8", + "spin", ] [[package]] @@ -1553,9 +1727,9 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] @@ -1569,6 +1743,18 @@ dependencies = [ "itertools 0.8.2", ] +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + +[[package]] +name = "funty" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" + [[package]] name = "funty" version = "2.0.0" @@ -1577,9 +1763,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "futures" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0290714b38af9b4a7b094b8a37086d1b4e61f2df9122c3cad2577669145335" +checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" dependencies = [ "futures-channel", "futures-core", @@ -1592,9 +1778,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff4dd66668b557604244583e3e1e1eada8c5c2e96a6d0d6653ede395b78bbacb" +checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" dependencies = [ "futures-core", "futures-sink", @@ -1602,15 +1788,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb1d22c66e66d9d72e1758f0bd7d4fd0bee04cad842ee34587d68c07e45d088c" +checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" [[package]] name = "futures-executor" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f4fb8693db0cf099eadcca0efe2a5a22e4550f98ed16aba6c48700da29597bc" +checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" dependencies = [ "futures-core", "futures-task", @@ -1630,38 +1816,38 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bf34a163b5c4c52d0478a4d757da8fb65cabef42ba90515efee0f6f9fa45aaa" +checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" [[package]] name = "futures-macro" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53b153fd91e4b0147f4aced87be237c98248656bb01050b96bf3ee89220a8ddb" +checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", ] [[package]] name = "futures-sink" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e36d3378ee38c2a36ad710c5d30c2911d752cb941c00c72dbabfb786a7970817" +checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" [[package]] name = "futures-task" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "efd193069b0ddadc69c46389b740bbccdd97203899b48d09c5f7969591d6bae2" +checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" [[package]] name = "futures-util" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a19526d624e703a3179b3d322efec918b6246ea0fa51d41124525f00f1cc8104" +checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" dependencies = [ "futures-channel", "futures-core", @@ -1699,9 +1885,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.11" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" +checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "libc", @@ -1710,9 +1896,15 @@ dependencies = [ [[package]] name = "gimli" -version = "0.28.0" +version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" + +[[package]] +name = "glob" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "gpt" @@ -1720,7 +1912,7 @@ version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8283e7331b8c93b9756e0cfdbcfb90312852f953c6faf9bf741e684cc3b6ad69" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.6.0", "crc", "log", "uuid", @@ -1739,17 +1931,17 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.21" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" +checksum = "81fe527a889e1532da5c525686d96d4c2e74cdd345badf8dfef9f6b39dd5f5e8" dependencies = [ "bytes", "fnv", "futures-core", "futures-sink", "futures-util", - "http 0.2.11", - "indexmap 1.9.3", + "http 0.2.12", + "indexmap 2.2.6", "slab", "tokio", "tokio-util", @@ -1758,17 +1950,17 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.3" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51ee2dd2e4f378392eeff5d51618cd9a63166a2513846bbc55f21cfacd9199d4" +checksum = "fa82e28a107a8cc405f0839610bdc9b15f1e25ec7d696aa5cf173edbcb1486ab" dependencies = [ + "atomic-waker", "bytes", "fnv", "futures-core", "futures-sink", - "futures-util", "http 1.1.0", - "indexmap 2.1.0", + "indexmap 2.2.6", "slab", "tokio", "tokio-util", @@ -1777,9 +1969,19 @@ dependencies = [ [[package]] name = "half" -version = "1.8.2" +version = "1.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" +checksum = "1b43ede17f21864e81be2fa654110bf1e793774238d86ef8555c37e6519c0403" + +[[package]] +name = "half" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dd08c532ae367adf81c312a4580bc67f1d0fe8bc9c460520283f4c0ff277888" +dependencies = [ + "cfg-if", + "crunchy", +] [[package]] name = "hashbrown" @@ -1793,16 +1995,16 @@ version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" dependencies = [ - "ahash 0.8.6", + "ahash 0.8.11", ] [[package]] name = "hashbrown" -version = "0.14.2" +version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156" +checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" dependencies = [ - "ahash 0.8.6", + "ahash 0.8.11", "allocator-api2", ] @@ -1812,19 +2014,19 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" dependencies = [ - "hashbrown 0.14.2", + "hashbrown 0.14.5", ] [[package]] name = "hdrhistogram" -version = "7.5.3" +version = "7.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5b38e5c02b7c7be48c8dc5217c4f1634af2ea221caae2e024bffc7a7651c691" +checksum = "765c9198f173dd59ce26ff9f95ef0aafd0a0fe01fb9d72841bc5066a4c06511d" dependencies = [ - "base64 0.13.1", + "base64 0.21.7", "byteorder", "flate2", - "nom", + "nom 7.1.3", "num-traits", ] @@ -1852,7 +2054,7 @@ dependencies = [ "lazy_async_pool", "models", "pin-project", - "rpc-toolkit 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "rpc-toolkit", "serde", "serde_json", "tokio", @@ -1862,18 +2064,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.1.19" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62b467343b94ba476dcb2500d242dadbb39557df889310ac77c5d99100aaac33" -dependencies = [ - "libc", -] - -[[package]] -name = "hermit-abi" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" +checksum = "d231dfb89cfffdbc30e7fc41579ed6066ad03abda9e567ccafae602b97ec5024" [[package]] name = "hex" @@ -1883,15 +2076,15 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "hifijson" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85ef6b41c333e6dd2a4aaa59125a19b633cd17e7aaf372b2260809777bcdef4a" +checksum = "18ae468bcb4dfecf0e4949ee28abbc99076b6a0077f51ddbc94dbfff8e6a870c" [[package]] name = "hkdf" -version = "0.12.3" +version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "791a029f6b9fc27657f6f188ec6e5e43f6911f6f878e0dc5501396e09809d437" +checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" dependencies = [ "hmac", ] @@ -1907,18 +2100,18 @@ dependencies = [ [[package]] name = "home" -version = "0.5.5" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" +checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] name = "http" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb" +checksum = "601cbb57e577e2f5ef5be8e7b83f0f63994f25aa94d673e54a92d5c516d101f1" dependencies = [ "bytes", "fnv", @@ -1938,20 +2131,20 @@ dependencies = [ [[package]] name = "http-body" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ "bytes", - "http 0.2.11", + "http 0.2.12", "pin-project-lite", ] [[package]] name = "http-body" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", "http 1.1.0", @@ -1959,22 +2152,22 @@ dependencies = [ [[package]] name = "http-body-util" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0475f8b2ac86659c21b64320d5d653f9efe42acd2a4e560073ec61a155a34f1d" +checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", - "futures-core", + "futures-util", "http 1.1.0", - "http-body 1.0.0", + "http-body 1.0.1", "pin-project-lite", ] [[package]] name = "httparse" -version = "1.8.0" +version = "1.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" +checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" [[package]] name = "httpdate" @@ -1990,22 +2183,22 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.27" +version = "0.14.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" +checksum = "a152ddd61dfaec7273fe8419ab357f33aee0d914c5f4efbf0d96fa749eea5ec9" dependencies = [ "bytes", "futures-channel", "futures-core", "futures-util", - "h2 0.3.21", - "http 0.2.11", - "http-body 0.4.5", + "h2 0.3.26", + "http 0.2.12", + "http-body 0.4.6", "httparse", "httpdate", "itoa", "pin-project-lite", - "socket2 0.4.10", + "socket2", "tokio", "tower-service", "tracing", @@ -2014,22 +2207,40 @@ dependencies = [ [[package]] name = "hyper" -version = "1.2.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "186548d73ac615b32a73aafe38fb4f56c0d340e110e5a200bcadbaf2e199263a" +checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05" dependencies = [ "bytes", "futures-channel", "futures-util", - "h2 0.4.3", + "h2 0.4.5", "http 1.1.0", - "http-body 1.0.0", + "http-body 1.0.1", "httparse", "httpdate", "itoa", "pin-project-lite", "smallvec", "tokio", + "want", +] + +[[package]] +name = "hyper-rustls" +version = "0.27.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ee4be2c948921a1a5320b629c4193916ed787a7f7f293fd3f7f5a6c9de74155" +dependencies = [ + "futures-util", + "http 1.1.0", + "hyper 1.4.1", + "hyper-util", + "rustls 0.23.12", + "rustls-pki-types", + "tokio", + "tokio-rustls", + "tower-service", ] [[package]] @@ -2038,7 +2249,7 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" dependencies = [ - "hyper 0.14.27", + "hyper 0.14.30", "pin-project-lite", "tokio", "tokio-io-timeout", @@ -2046,38 +2257,45 @@ dependencies = [ [[package]] name = "hyper-tls" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" dependencies = [ "bytes", - "hyper 0.14.27", + "http-body-util", + "hyper 1.4.1", + "hyper-util", "native-tls", "tokio", "tokio-native-tls", + "tower-service", ] [[package]] name = "hyper-util" -version = "0.1.3" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" +checksum = "3ab92f4f49ee4fb4f997c784b7a2e0fa70050211e0b6a287f898c3c9785ca956" dependencies = [ "bytes", + "futures-channel", "futures-util", "http 1.1.0", - "http-body 1.0.0", - "hyper 1.2.0", + "http-body 1.0.1", + "hyper 1.4.1", "pin-project-lite", - "socket2 0.5.5", + "socket2", "tokio", + "tower", + "tower-service", + "tracing", ] [[package]] name = "iana-time-zone" -version = "0.1.58" +version = "0.1.60" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8326b86b6cff230b97d0d312a6c40a60726df3332e721f72a1b035f451663b20" +checksum = "e7ffbb5a1b541ea2561f8c41c087286cc091e21e556a4f09a8f6cbf17b69b141" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -2131,6 +2349,16 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "idna" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + [[package]] name = "imbl" version = "2.0.3" @@ -2147,9 +2375,9 @@ dependencies = [ [[package]] name = "imbl-sized-chunks" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6957ea0b2541c5ca561d3ef4538044af79f8a05a1eb3a3b148936aaceaa1076" +checksum = "144006fb58ed787dcae3f54575ff4349755b00ccc99f4b4873860b654be1ed63" dependencies = [ "bitmaps", ] @@ -2157,7 +2385,7 @@ dependencies = [ [[package]] name = "imbl-value" version = "0.1.0" -source = "git+https://github.com/Start9Labs/imbl-value.git#929395141c3a882ac366c12ac9402d0ebaa2201b" +source = "git+https://github.com/Start9Labs/imbl-value.git#48dc39a762a3b4f9300d3b9f850cbd394e777ae0" dependencies = [ "imbl", "serde", @@ -2168,18 +2396,18 @@ dependencies = [ [[package]] name = "include_dir" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18762faeff7122e89e0857b02f7ce6fcc0d101d5e9ad2ad7846cc01d61b7f19e" +checksum = "923d117408f1e49d914f1a379a309cffe4f18c05cf4e3d12e613a15fc81bd0dd" dependencies = [ "include_dir_macros", ] [[package]] name = "include_dir_macros" -version = "0.7.3" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b139284b5cf57ecfa712bcc66950bb635b31aff41c188e8a4cfc758eca374a3f" +checksum = "7cab85a7ed0bd5f0e76d93846e0147172bed2e2d3f859bcc33a8d9699cad1a75" dependencies = [ "proc-macro2", "quote", @@ -2204,20 +2432,20 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.1.0" +version = "2.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", - "hashbrown 0.14.2", + "hashbrown 0.14.5", "serde", ] [[package]] name = "indicatif" -version = "0.17.7" +version = "0.17.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb28741c9db9a713d93deb3bb9515c20788cef5815265bee4980e87bde7e0f25" +checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3" dependencies = [ "console", "instant", @@ -2238,9 +2466,9 @@ dependencies = [ [[package]] name = "instant" -version = "0.1.12" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" +checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222" dependencies = [ "cfg-if", ] @@ -2276,15 +2504,21 @@ dependencies = [ [[package]] name = "is-terminal" -version = "0.4.9" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" +checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b" dependencies = [ - "hermit-abi 0.3.3", - "rustix", - "windows-sys 0.48.0", + "hermit-abi", + "libc", + "windows-sys 0.52.0", ] +[[package]] +name = "is_terminal_polyfill" +version = "1.70.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" + [[package]] name = "isocountry" version = "0.3.2" @@ -2332,10 +2566,19 @@ dependencies = [ ] [[package]] -name = "itoa" -version = "1.0.9" +name = "itertools" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" +checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" [[package]] name = "jaq-core" @@ -2343,7 +2586,7 @@ version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eb52eeac20f256459e909bd4a03bb8c4fab6a1fdbb8ed52d00f644152df48ece" dependencies = [ - "ahash 0.7.7", + "ahash 0.7.8", "dyn-clone", "hifijson", "indexmap 1.9.3", @@ -2376,13 +2619,22 @@ dependencies = [ ] [[package]] -name = "josekit" -version = "0.8.4" +name = "jobserver" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5754487a088f527b1407df470db8e654e4064dccbbe1fe850e0773721e9962b7" +checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" +dependencies = [ + "libc", +] + +[[package]] +name = "josekit" +version = "0.8.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54b85e2125819afc4fd2ae57416207e792c7e12797858e5db2a6c6f24a166829" dependencies = [ "anyhow", - "base64 0.21.5", + "base64 0.22.1", "flate2", "once_cell", "openssl", @@ -2395,9 +2647,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.65" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54c0c35952f67de54bb584e9fd912b3023117cbafc0a77d8f3dee1fb5f572fe8" +checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" dependencies = [ "wasm-bindgen", ] @@ -2435,42 +2687,42 @@ dependencies = [ [[package]] name = "keccak" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f6d5ed8676d904364de097082f4e7d240b571b67989ced0240f08b7f966f940" +checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" dependencies = [ "cpufeatures", ] [[package]] name = "lalrpop" -version = "0.19.12" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a1cbf952127589f2851ab2046af368fd20645491bb4b376f04b7f94d7a9837b" +checksum = "55cb077ad656299f160924eb2912aa147d7339ea7d69e1b5517326fdcec3c1ca" dependencies = [ "ascii-canvas", "bit-set", - "diff", "ena", - "is-terminal", - "itertools 0.10.5", + "itertools 0.11.0", "lalrpop-util", "petgraph", + "pico-args", "regex", - "regex-syntax 0.6.29", + "regex-syntax 0.8.4", "string_cache", "term", "tiny-keccak", "unicode-xid", + "walkdir", ] [[package]] name = "lalrpop-util" -version = "0.19.12" +version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3c48237b9604c5a4702de6b824e02006c3214327564636aef27c1028a8fa0ed" +checksum = "507460a910eb7b32ee961886ff48539633b788a36b65692b95f225b844c82553" dependencies = [ - "regex", + "regex-automata 0.4.7", ] [[package]] @@ -2491,18 +2743,47 @@ checksum = "e479e99b287d578ed5f6cd4c92cdf48db219088adb9c5b14f7c155b71dfba792" [[package]] name = "lazy_static" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" +checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" dependencies = [ - "spin 0.5.2", + "spin", +] + +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + +[[package]] +name = "lexical-core" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6607c62aa161d23d17a9072cc5da0be67cdfc89d3afb1e8d9c842bebc2525ffe" +dependencies = [ + "arrayvec 0.5.2", + "bitflags 1.3.2", + "cfg-if", + "ryu", + "static_assertions", ] [[package]] name = "libc" -version = "0.2.150" +version = "0.2.155" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" +checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c" + +[[package]] +name = "libloading" +version = "0.8.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4" +dependencies = [ + "cfg-if", + "windows-targets 0.52.6", +] [[package]] name = "libm" @@ -2512,20 +2793,19 @@ checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" [[package]] name = "libredox" -version = "0.0.1" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" +checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.6.0", "libc", - "redox_syscall 0.4.1", ] [[package]] name = "libsqlite3-sys" -version = "0.26.0" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afc22eff61b133b115c6e8c74e818c628d6d5e7a502afea6f64dee076dd94326" +checksum = "cf4e226dcd58b4be396f7bd3c20da8fdee2911400705297ba7d2d7cc2c30f716" dependencies = [ "cc", "pkg-config", @@ -2533,16 +2813,22 @@ dependencies = [ ] [[package]] -name = "linux-raw-sys" -version = "0.4.11" +name = "libyml" +version = "0.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "969488b55f8ac402214f3f5fd243ebb7206cf82de60d3172994707a4bcc2b829" +checksum = "3e281a65eeba3d4503a2839252f86374528f9ceafe6fed97c1d3b52e1fb625c1" + +[[package]] +name = "linux-raw-sys" +version = "0.4.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "lock_api" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" +checksum = "07af8b9cdd281b7915f413fa73f29ebd5d55d0d3f0155584dade1ff18cea1b17" dependencies = [ "autocfg", "scopeguard", @@ -2550,9 +2836,20 @@ dependencies = [ [[package]] name = "log" -version = "0.4.20" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +checksum = "a7a70ba024b9dc04c27ea2f0c0548feb474ec5c54bba33a7f72f873a39d07b24" + +[[package]] +name = "lzma-sys" +version = "0.1.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5fda04ab3764e6cde78b9974eec4f779acaba7c4e84b36eca3cf77c581b85d27" +dependencies = [ + "cc", + "libc", + "pkg-config", +] [[package]] name = "matchers" @@ -2576,7 +2873,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c487024623ae38584610237dd1be8932bb2b324474b23c37a25f9fbe6bf5e9e" dependencies = [ "bincode", - "bitvec", + "bitvec 1.0.1", "serde", "serde-big-array", "thiserror", @@ -2594,9 +2891,18 @@ dependencies = [ [[package]] name = "memchr" -version = "2.6.4" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" +checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" + +[[package]] +name = "memmap2" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fe751422e4a8caa417e13c3ea66452215d7d63e19e604f4980461212f3ae1322" +dependencies = [ + "libc", +] [[package]] name = "memoffset" @@ -2630,18 +2936,18 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.7.1" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" dependencies = [ "adler", ] [[package]] name = "mio" -version = "0.8.9" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "log", @@ -2649,14 +2955,33 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "mio" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4569e456d394deccd22ce1c1913e6ea0e54519f577285001215d33557431afe4" +dependencies = [ + "hermit-abi", + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys 0.52.0", +] + +[[package]] +name = "mirai-annotations" +version = "1.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c9be0862c1b3f26a88803c4a49de6889c10e608b3ee9344e6ef5b45fb37ad3d1" + [[package]] name = "models" version = "0.1.0" dependencies = [ - "base64 0.21.5", + "axum 0.7.5", + "base64 0.21.7", "color-eyre", "ed25519-dalek 2.1.1", - "emver", + "exver", "ipnet", "lazy_static", "mbrman", @@ -2666,7 +2991,7 @@ dependencies = [ "rand 0.8.5", "regex", "reqwest", - "rpc-toolkit 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "rpc-toolkit", "serde", "serde_json", "sqlx", @@ -2681,11 +3006,10 @@ dependencies = [ [[package]] name = "native-tls" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +checksum = "a8614eb2c83d59d1c8cc974dd3f920198647674a0a035e1af1fa58707e317466" dependencies = [ - "lazy_static", "libc", "log", "openssl", @@ -2699,9 +3023,9 @@ dependencies = [ [[package]] name = "new_debug_unreachable" -version = "1.0.4" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4a24736216ec316047a1fc4252e27dabb04218aa4a3f37c6e7ddbf1f9782b54" +checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086" [[package]] name = "new_mime_guess" @@ -2740,15 +3064,38 @@ dependencies = [ [[package]] name = "nix" -version = "0.27.1" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053" +checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.6.0", "cfg-if", + "cfg_aliases", "libc", ] +[[package]] +name = "no_std_io" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fa5f306a6f2c01b4fd172f29bb46195b1764061bf926c75e96ff55df3178208" +dependencies = [ + "memchr", +] + +[[package]] +name = "nom" +version = "6.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e7413f999671bd4745a7b624bd370a569fb6bc574b23c83a3c5ed2e453f3d5e2" +dependencies = [ + "bitvec 0.19.6", + "funty 1.1.0", + "lexical-core", + "memchr", + "version_check", +] + [[package]] name = "nom" version = "7.1.3" @@ -2771,9 +3118,9 @@ dependencies = [ [[package]] name = "num" -version = "0.4.1" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b05180d69e3da0e530ba2a1dae5110317e49e3b7f3d41be227dc5f92e49ee7af" +checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23" dependencies = [ "num-bigint", "num-complex", @@ -2785,11 +3132,10 @@ dependencies = [ [[package]] name = "num-bigint" -version = "0.4.4" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" +checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ - "autocfg", "num-integer", "num-traits", ] @@ -2813,28 +3159,33 @@ dependencies = [ [[package]] name = "num-complex" -version = "0.4.4" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ba157ca0885411de85d6ca030ba7e2a83a28636056c7c699b07c8b6f7383214" +checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495" dependencies = [ "num-traits", ] [[package]] -name = "num-integer" -version = "0.1.45" +name = "num-conv" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" + +[[package]] +name = "num-integer" +version = "0.1.46" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" dependencies = [ - "autocfg", "num-traits", ] [[package]] name = "num-iter" -version = "0.1.43" +version = "0.1.45" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d03e6c028c5dc5cac6e2dec0efda81fc887605bb3d884578bb6d6bf7514e252" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" dependencies = [ "autocfg", "num-integer", @@ -2843,11 +3194,10 @@ dependencies = [ [[package]] name = "num-rational" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" +checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" dependencies = [ - "autocfg", "num-bigint", "num-integer", "num-traits", @@ -2855,9 +3205,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.17" +version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" +checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", "libm", @@ -2869,29 +3219,29 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.3.3", + "hermit-abi", "libc", ] [[package]] name = "num_enum" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "683751d591e6d81200c39fb0d1032608b77724f34114db54f571ff1317b337c0" +checksum = "02339744ee7253741199f897151b38e72257d13802d4ee837285cc2990a90845" dependencies = [ "num_enum_derive", ] [[package]] name = "num_enum_derive" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c11e44798ad209ccdd91fc192f0526a369a01234f7373e1b141c96d7cee4f0e" +checksum = "681030a937600a36906c185595136d26abfebb4aa9c65701cefcaf8578bb982b" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", ] [[package]] @@ -2902,9 +3252,9 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] name = "object" -version = "0.32.1" +version = "0.32.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" +checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" dependencies = [ "memchr", ] @@ -2917,17 +3267,17 @@ checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "opaque-debug" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" +checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381" [[package]] name = "openssh-keys" -version = "0.6.2" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c75a0ec2d1b302412fb503224289325fcc0e44600176864804c7211b055cfd58" +checksum = "abb830a82898b2ac17c9620ddce839ac3b34b9cb8a1a037cbdbfb9841c756c3e" dependencies = [ - "base64 0.21.5", + "base64 0.21.7", "byteorder", "md-5", "sha2 0.10.8", @@ -2936,11 +3286,11 @@ dependencies = [ [[package]] name = "openssl" -version = "0.10.59" +version = "0.10.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a257ad03cd8fb16ad4172fedf8094451e1af1c4b70097636ef2eac9a5f0cc33" +checksum = "9529f4786b70a3e8c61e11179af17ab6188ad8d0ded78c5529441ed39d4bd9c1" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.6.0", "cfg-if", "foreign-types", "libc", @@ -2957,7 +3307,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", ] [[package]] @@ -2968,18 +3318,18 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-src" -version = "300.1.6+3.1.4" +version = "300.3.1+3.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439fac53e092cd7442a3660c85dde4643ab3b5bd39040912388dcdabf6b88085" +checksum = "7259953d42a81bf137fbbd73bd30a8e1914d6dce43c2b90ed575783a22608b91" dependencies = [ "cc", ] [[package]] name = "openssl-sys" -version = "0.9.95" +version = "0.9.103" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40a4130519a360279579c2053038317e40eff64d13fd3f004f9e1b72b8a6aaf9" +checksum = "7f9e8deee91df40a943c71b917e5874b951d32a802526c85721ce3b776c929d6" dependencies = [ "cc", "libc", @@ -2988,12 +3338,6 @@ dependencies = [ "vcpkg", ] -[[package]] -name = "os_str_bytes" -version = "6.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" - [[package]] name = "overload" version = "0.1.1" @@ -3031,10 +3375,24 @@ dependencies = [ ] [[package]] -name = "parking_lot" -version = "0.12.1" +name = "p521" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" +checksum = "0fc9e2161f1f215afdfce23677034ae137bbd45016a880c2eb3ba8eb95f085b2" +dependencies = [ + "base16ct", + "ecdsa", + "elliptic-curve", + "primeorder", + "rand_core 0.6.4", + "sha2 0.10.8", +] + +[[package]] +name = "parking_lot" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bf18183cf54e8d6059647fc3063646a1801cf30896933ec2311622cc4b9a27" dependencies = [ "lock_api", "parking_lot_core", @@ -3042,22 +3400,22 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.9" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" +checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.4.1", + "redox_syscall 0.5.3", "smallvec", - "windows-targets 0.48.5", + "windows-targets 0.52.6", ] [[package]] name = "paste" -version = "1.0.14" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" +checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" [[package]] name = "patch-db" @@ -3074,7 +3432,7 @@ dependencies = [ "nix 0.26.4", "patch-db-macro", "serde", - "serde_cbor 0.11.1", + "serde_cbor", "thiserror", "tokio", "tracing", @@ -3121,18 +3479,63 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" + +[[package]] +name = "pest" +version = "2.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cd53dff83f26735fdc1ca837098ccf133605d794cdae66acfc2bfac3ec809d95" +dependencies = [ + "memchr", + "thiserror", + "ucd-trie", +] + +[[package]] +name = "pest_derive" +version = "2.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a548d2beca6773b1c244554d36fcf8548a8a58e74156968211567250e48e49a" +dependencies = [ + "pest", + "pest_generator", +] + +[[package]] +name = "pest_generator" +version = "2.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c93a82e8d145725dcbaf44e5ea887c8a869efdcc28706df2d08c69e17077183" +dependencies = [ + "pest", + "pest_meta", + "proc-macro2", + "quote", + "syn 2.0.72", +] + +[[package]] +name = "pest_meta" +version = "2.7.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a941429fea7e08bedec25e4f6785b6ffaacc6b755da98df5ef3e7dcf4a124c4f" +dependencies = [ + "once_cell", + "pest", + "sha2 0.10.8", +] [[package]] name = "petgraph" -version = "0.6.4" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" +checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 2.1.0", + "indexmap 2.2.6", ] [[package]] @@ -3145,30 +3548,36 @@ dependencies = [ ] [[package]] -name = "pin-project" -version = "1.1.3" +name = "pico-args" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fda4ed1c6c173e3fc7a83629421152e01d7b1f9b7f65fb301e490e8cfc656422" +checksum = "5be167a7af36ee22fe3115051bc51f6e6c7054c9348e28deb4f49bd6f705a315" + +[[package]] +name = "pin-project" +version = "1.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.3" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", ] [[package]] name = "pin-project-lite" -version = "0.2.13" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" +checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" [[package]] name = "pin-utils" @@ -3199,21 +3608,15 @@ dependencies = [ [[package]] name = "pkg-config" -version = "0.3.27" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" - -[[package]] -name = "platforms" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14e6ab3f592e6fb464fc9712d8d6e6912de6473954635fd76a589d832cffcbb0" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" [[package]] name = "portable-atomic" -version = "1.5.1" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bccab0e7fd7cc19f820a1c8c91720af652d0c88dc9664dd72aef2614f04af3b" +checksum = "da544ee218f0d287a911e9c99a39a8c9bc8fcad3cb8db5959940044ecfc67265" [[package]] name = "powerfmt" @@ -3233,6 +3636,16 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c" +[[package]] +name = "prettyplease" +version = "0.2.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f12335488a2f3b0a83b14edad48dca9879ce89b2edd10e80237e4e852dd645e" +dependencies = [ + "proc-macro2", + "syn 2.0.72", +] + [[package]] name = "prettytable-rs" version = "0.10.0" @@ -3249,46 +3662,46 @@ dependencies = [ [[package]] name = "primeorder" -version = "0.13.3" +version = "0.13.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7dbe9ed3b56368bd99483eb32fe9c17fdd3730aebadc906918ce78d54c7eeb4" +checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" dependencies = [ "elliptic-curve", ] [[package]] name = "proc-macro-crate" -version = "2.0.0" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8" +checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" dependencies = [ - "toml_edit 0.20.7", + "toml_edit 0.21.1", ] [[package]] name = "proc-macro2" -version = "1.0.69" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" +checksum = "5e719e8df665df0d1c8fbfd238015744736151d4445ec0836b8e628aae103b77" dependencies = [ "unicode-ident", ] [[package]] name = "proptest" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf" +checksum = "b4c2511913b88df1637da85cc8d96ec8e43a3f8bb8ccb71ee1ac240d6f3df58d" dependencies = [ "bit-set", "bit-vec", - "bitflags 2.4.1", + "bitflags 2.6.0", "lazy_static", "num-traits", "rand 0.8.5", "rand_chacha 0.3.1", "rand_xorshift", - "regex-syntax 0.8.2", + "regex-syntax 0.8.4", "rusty-fork", "tempfile", "unarray", @@ -3296,20 +3709,20 @@ dependencies = [ [[package]] name = "proptest-derive" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf16337405ca084e9c78985114633b6827711d22b9e6ef6c6c0d665eb3f0b6e" +checksum = "6ff7ff745a347b87471d859a377a9a404361e7efc2a971d73424a6d183c0fc77" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.72", ] [[package]] name = "prost" -version = "0.12.1" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4fdd22f3b9c31b53c060df4a0613a1c7f062d4115a2b984dd15b1858f7e340d" +checksum = "deb1435c188b76130da55f17a466d252ff7b1418b2ad3e037d127b94e3411f29" dependencies = [ "bytes", "prost-derive", @@ -3317,22 +3730,22 @@ dependencies = [ [[package]] name = "prost-derive" -version = "0.12.1" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "265baba7fabd416cf5078179f7d2cbeca4ce7a9041111900675ea7c4cb8a4c32" +checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1" dependencies = [ "anyhow", - "itertools 0.11.0", + "itertools 0.12.1", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", ] [[package]] name = "prost-types" -version = "0.12.1" +version = "0.12.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e081b29f63d83a4bc75cfc9f3fe424f9156cf92d8a4f0c9407cce9a1b67327cf" +checksum = "9091c90b0a32608e984ff2fa4091273cbdd755d54935c51d520887f4a1dbd5b0" dependencies = [ "prost", ] @@ -3361,13 +3774,19 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" -version = "1.0.33" +version = "1.0.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" dependencies = [ "proc-macro2", ] +[[package]] +name = "radium" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "941ba9d78d8e2f7ce474c015eea4d9c6d25b6a3327f9832ee29a4de27f91bbb8" + [[package]] name = "radium" version = "0.7.0" @@ -3433,7 +3852,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.15", ] [[package]] @@ -3463,6 +3882,16 @@ dependencies = [ "rand_core 0.6.4", ] +[[package]] +name = "rayon-core" +version = "1.12.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" +dependencies = [ + "crossbeam-deque", + "crossbeam-utils", +] + [[package]] name = "redox_syscall" version = "0.1.57" @@ -3478,15 +3907,6 @@ dependencies = [ "bitflags 1.3.2", ] -[[package]] -name = "redox_syscall" -version = "0.3.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "redox_syscall" version = "0.4.1" @@ -3497,26 +3917,35 @@ dependencies = [ ] [[package]] -name = "redox_users" -version = "0.4.4" +name = "redox_syscall" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4" +checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4" dependencies = [ - "getrandom 0.2.11", + "bitflags 2.6.0", +] + +[[package]] +name = "redox_users" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" +dependencies = [ + "getrandom 0.2.15", "libredox", "thiserror", ] [[package]] name = "regex" -version = "1.10.2" +version = "1.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" +checksum = "b91213439dad192326a0d7c6ee3955910425f441d7038e0d6933b0aec5c4517f" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.3", - "regex-syntax 0.8.2", + "regex-automata 0.4.7", + "regex-syntax 0.8.4", ] [[package]] @@ -3530,13 +3959,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.3" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" +checksum = "38caf58cc5ef2fed281f89292ef23f6365465ed9a41b7a7754eb4e26496c92df" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.2", + "regex-syntax 0.8.4", ] [[package]] @@ -3547,28 +3976,31 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.8.2" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" +checksum = "7a66a03ae7c801facd77a29370b4faec201768915ac14a721ba36f20bc9c209b" [[package]] name = "reqwest" -version = "0.11.27" +version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd67538700a17451e7cba03ac727fb961abb7607553461627b97de0b89cf4a62" +checksum = "c7d6d2a27d57148378eb5e111173f4276ad26340ecc5c49a4a2152167a2d6a37" dependencies = [ - "base64 0.21.5", + "base64 0.22.1", "bytes", - "cookie 0.17.0", + "cookie", "cookie_store", "encoding_rs", "futures-core", "futures-util", - "h2 0.3.21", - "http 0.2.11", - "http-body 0.4.5", - "hyper 0.14.27", + "h2 0.4.5", + "http 1.1.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.4.1", + "hyper-rustls", "hyper-tls", + "hyper-util", "ipnet", "js-sys", "log", @@ -3577,11 +4009,11 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls-pemfile", + "rustls-pemfile 2.1.2", "serde", "serde_json", "serde_urlencoded", - "sync_wrapper 0.1.2", + "sync_wrapper 1.0.1", "system-configuration", "tokio", "tokio-native-tls", @@ -3598,9 +4030,9 @@ dependencies = [ [[package]] name = "reqwest_cookie_store" -version = "0.6.0" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba529055ea150e42e4eb9c11dcd380a41025ad4d594b0cb4904ef28b037e1061" +checksum = "a0b36498c7452f11b1833900f31fbb01fc46be20992a50269c88cf59d79f54e9" dependencies = [ "bytes", "cookie_store", @@ -3620,16 +4052,17 @@ dependencies = [ [[package]] name = "ring" -version = "0.17.5" +version = "0.17.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb0205304757e5d899b9c2e448b867ffd03ae7f988002e47cd24954391394d0b" +checksum = "c17fa4cb658e3583423e915b9f3acc01cceaee1860e33d59ebae66adc3a2dc0d" dependencies = [ "cc", - "getrandom 0.2.11", + "cfg-if", + "getrandom 0.2.15", "libc", - "spin 0.9.8", + "spin", "untrusted", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -3646,34 +4079,12 @@ dependencies = [ [[package]] name = "rpc-toolkit" version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c48252a30abb9426a3239fa8dfd2c8dd2647bb24db0b6145db2df04ae53fe647" -dependencies = [ - "clap 3.2.25", - "futures", - "hyper 0.14.27", - "lazy_static", - "openssl", - "reqwest", - "rpc-toolkit-macro 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "serde", - "serde_cbor 0.11.2", - "serde_json", - "thiserror", - "tokio", - "url", - "yajrc", -] - -[[package]] -name = "rpc-toolkit" -version = "0.2.3" -source = "git+https://github.com/Start9Labs/rpc-toolkit.git?branch=refactor/traits#c89e0abdb15dd3bed9adb5339cf0b61a96f32b50" +source = "git+https://github.com/Start9Labs/rpc-toolkit.git?branch=refactor/no-dyn-ctx#f608480034942f1f521ab95949ab33fbc51d99a9" dependencies = [ "async-stream", "async-trait", "axum 0.7.5", - "clap 4.5.4", + "clap", "futures", "http 1.1.0", "http-body-util", @@ -3684,7 +4095,6 @@ dependencies = [ "openssl", "pin-project", "reqwest", - "rpc-toolkit-macro 0.2.2 (git+https://github.com/Start9Labs/rpc-toolkit.git?branch=refactor/traits)", "serde", "serde_json", "thiserror", @@ -3694,54 +4104,11 @@ dependencies = [ "yajrc", ] -[[package]] -name = "rpc-toolkit-macro" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8e4b9cb00baf2d61bcd35e98d67dcb760382a3b4540df7e63b38d053c8a7b8b" -dependencies = [ - "proc-macro2", - "rpc-toolkit-macro-internals 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "syn 1.0.109", -] - -[[package]] -name = "rpc-toolkit-macro" -version = "0.2.2" -source = "git+https://github.com/Start9Labs/rpc-toolkit.git?branch=refactor/traits#c89e0abdb15dd3bed9adb5339cf0b61a96f32b50" -dependencies = [ - "proc-macro2", - "rpc-toolkit-macro-internals 0.2.2 (git+https://github.com/Start9Labs/rpc-toolkit.git?branch=refactor/traits)", - "syn 1.0.109", -] - -[[package]] -name = "rpc-toolkit-macro-internals" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3e2ce21b936feaecdab9c9a8e75b9dca64374ccc11951a58045ad6559b75f42" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "rpc-toolkit-macro-internals" -version = "0.2.2" -source = "git+https://github.com/Start9Labs/rpc-toolkit.git?branch=refactor/traits#c89e0abdb15dd3bed9adb5339cf0b61a96f32b50" -dependencies = [ - "itertools 0.12.1", - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "rsa" -version = "0.9.3" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86ef35bf3e7fe15a53c4ab08a998e42271eab13eb0db224126bc7bc4c4bad96d" +checksum = "5d0e5124fcb30e76a7e79bfee683a2746db83784b86289f6251b54b7950a0dfc" dependencies = [ "const-oid", "digest 0.10.7", @@ -3752,7 +4119,7 @@ dependencies = [ "pkcs8", "rand_core 0.6.4", "sha2 0.10.8", - "signature 2.0.0", + "signature 2.2.0", "spki", "subtle", "zeroize", @@ -3770,20 +4137,26 @@ dependencies = [ [[package]] name = "rust-argon2" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e71971821b3ae0e769e4a4328dbcb517607b434db7697e9aba17203ec14e46a" +checksum = "9d9848531d60c9cbbcf9d166c885316c24bc0e2a9d3eba0956bb6cbbd79bc6e8" dependencies = [ - "base64 0.21.5", + "base64 0.21.7", "blake2b_simd", "constant_time_eq", ] [[package]] name = "rustc-demangle" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" +checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f" + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] name = "rustc_version" @@ -3796,22 +4169,22 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.21" +version = "0.38.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b426b0506e5d50a7d8dafcf2e81471400deb602392c7dd110815afb4eaf02a3" +checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.6.0", "errno", "libc", "linux-raw-sys", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] name = "rustls" -version = "0.21.8" +version = "0.21.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "446e14c5cda4f3f30fe71863c34ec70f5ac79d6087097ad0bb433e1be5edf04c" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "ring", "rustls-webpki 0.101.7", @@ -3820,14 +4193,15 @@ dependencies = [ [[package]] name = "rustls" -version = "0.22.3" +version = "0.23.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99008d7ad0bbbea527ec27bddbc0e432c5b87d8175178cee68d2eec9c4a1813c" +checksum = "c58f8c84392efc0a126acce10fa59ff7b3d2ac06ab451a33f2741989b806b044" dependencies = [ + "aws-lc-rs", "log", - "ring", + "once_cell", "rustls-pki-types", - "rustls-webpki 0.102.2", + "rustls-webpki 0.102.6", "subtle", "zeroize", ] @@ -3838,14 +4212,24 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" dependencies = [ - "base64 0.21.5", + "base64 0.21.7", +] + +[[package]] +name = "rustls-pemfile" +version = "2.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29993a25686778eb88d4189742cd713c9bce943bc54251a33509dc63cbacf73d" +dependencies = [ + "base64 0.22.1", + "rustls-pki-types", ] [[package]] name = "rustls-pki-types" -version = "1.4.1" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecd36cc4259e3e4514335c4a138c6b43171a8d61d8f5c9348f9fc7529416f247" +checksum = "976295e77ce332211c0d24d92c0e83e50f5c5f046d11082cea19f3df13a3562d" [[package]] name = "rustls-webpki" @@ -3859,10 +4243,11 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.102.2" +version = "0.102.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "faaa0a62740bedb9b2ef5afa303da42764c012f743917351dc9a237ea1663610" +checksum = "8e6b52d4fda176fd835fdc55a835d4a89b8499cad995885a21149d5ad62f852e" dependencies = [ + "aws-lc-rs", "ring", "rustls-pki-types", "untrusted", @@ -3870,9 +4255,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.14" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" +checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" [[package]] name = "rusty-fork" @@ -3904,17 +4289,26 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.15" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" +checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" + +[[package]] +name = "same-file" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] [[package]] name = "schannel" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" +checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -3949,11 +4343,11 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.9.2" +version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" +checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.6.0", "core-foundation", "core-foundation-sys", "libc", @@ -3962,9 +4356,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.9.1" +version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" +checksum = "75da29fe9b9b08fe9d6b22b5b4bcbc75d8db3aa31e639aa56bb62e9d46bfceaf" dependencies = [ "core-foundation-sys", "libc", @@ -3972,18 +4366,18 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.20" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090" +checksum = "61697e0a1c7e512e84a621326239844a24d8207b4669b41bc18b32ea5cbf988b" dependencies = [ "serde", ] [[package]] name = "serde" -version = "1.0.192" +version = "1.0.204" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bca2a08484b285dcb282d0f67b26cadc0df8b19f8c12502c13d966bf9482f001" +checksum = "bc76f558e0cbb2a839d37354c575f1dc3fdc6546b5be373ba43d95f231bf7c12" dependencies = [ "serde_derive", ] @@ -4001,38 +4395,28 @@ dependencies = [ name = "serde_cbor" version = "0.11.1" dependencies = [ - "half", - "serde", -] - -[[package]] -name = "serde_cbor" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bef2ebfde456fb76bbcf9f59315333decc4fda0b2b44b420243c11e0f5ec1f5" -dependencies = [ - "half", + "half 1.8.3", "serde", ] [[package]] name = "serde_derive" -version = "1.0.192" +version = "1.0.204" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6c7207fbec9faa48073f3e3074cbe553af6ea512d7c21ba46e434e70ea9fbc1" +checksum = "e0cd7e117be63d3c3678776753929474f3b04a43a080c744d6b0ae2a8c28e222" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", ] [[package]] name = "serde_json" -version = "1.0.108" +version = "1.0.120" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" +checksum = "4e0d21c9a8cae1235ad58a00c11cb40d4b1e5c784f1ef2c537876ed6ffd8b7c5" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.2.6", "itoa", "ryu", "serde", @@ -4040,9 +4424,9 @@ dependencies = [ [[package]] name = "serde_path_to_error" -version = "0.1.14" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4beec8bce849d58d06238cb50db2e1c417cfeafa4c63f692b15c82b7c80f8335" +checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6" dependencies = [ "itoa", "serde", @@ -4050,9 +4434,9 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "0.6.4" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12022b835073e5b11e90a14f86838ceb1c8fb0325b72416845c487ac0fa95e80" +checksum = "eb5b1b31579f3811bf615c144393417496f152e12ac8b7663bf664f4a815306d" dependencies = [ "serde", ] @@ -4071,16 +4455,17 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.4.0" +version = "3.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64cd236ccc1b7a29e7e2739f27c0b2dd199804abc4290e32f59f3b68d6405c23" +checksum = "69cecfa94848272156ea67b2b1a53f20fc7bc638c4a46d2f8abde08f05f4b857" dependencies = [ - "base64 0.21.5", + "base64 0.22.1", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.1.0", + "indexmap 2.2.6", "serde", + "serde_derive", "serde_json", "serde_with_macros", "time", @@ -4088,27 +4473,31 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.4.0" +version = "3.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93634eb5f75a2323b16de4748022ac4297f9e76b6dced2be287a099f41b5e788" +checksum = "a8fee4991ef4f274617a51ad4af30519438dacb2f56ac773b08a1922ff743350" dependencies = [ "darling", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", ] [[package]] -name = "serde_yaml" -version = "0.9.27" +name = "serde_yml" +version = "0.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cc7a1570e38322cfe4154732e5110f887ea57e22b76f4bfd32b5bdd3368666c" +checksum = "78ce6afeda22f0b55dde2c34897bce76a629587348480384231205c14b59a01f" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.2.6", "itoa", + "libyml", + "log", + "memchr", "ryu", "serde", - "unsafe-libyaml", + "serde_json", + "tempfile", ] [[package]] @@ -4171,6 +4560,12 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + [[package]] name = "signal-hook" version = "0.3.17" @@ -4188,15 +4583,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "29ad2e15f37ec9a6cc544097b78a1ec90001e9f71b81338ca39f430adaca99af" dependencies = [ "libc", - "mio", + "mio 0.8.11", "signal-hook", ] [[package]] name = "signal-hook-registry" -version = "1.4.1" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8229b473baa5980ac72ef434c4415e70c4b5e71b423043adb4ba059f89c99a1" +checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1" dependencies = [ "libc", ] @@ -4209,9 +4604,9 @@ checksum = "74233d3b3b2f6d4b006dc19dee745e73e2a6bfb6f93607cd3b02bd5b00797d7c" [[package]] name = "signature" -version = "2.0.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fe458c98333f9c8152221191a77e2a44e8325d0193484af2e9421a53019e57d" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest 0.10.7", "rand_core 0.6.4", @@ -4250,31 +4645,21 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" [[package]] -name = "socket2" -version = "0.4.10" +name = "smawk" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" -dependencies = [ - "libc", - "winapi", -] +checksum = "b7c388c1b5e93756d0c740965c41e8822f866621d41acbdf6336a6a168f8840c" [[package]] name = "socket2" -version = "0.5.5" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" +checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" dependencies = [ "libc", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] -[[package]] -name = "spin" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" - [[package]] name = "spin" version = "0.9.8" @@ -4286,9 +4671,9 @@ dependencies = [ [[package]] name = "spki" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" dependencies = [ "base64ct", "der", @@ -4296,20 +4681,19 @@ dependencies = [ [[package]] name = "sqlformat" -version = "0.2.2" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b7b278788e7be4d0d29c0f39497a0eef3fba6bbc8e70d8bf7fde46edeaa9e85" +checksum = "f895e3734318cc55f1fe66258926c9b910c124d47520339efecbb6c59cec7c1f" dependencies = [ - "itertools 0.11.0", - "nom", + "nom 7.1.3", "unicode_categories", ] [[package]] name = "sqlx" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e50c216e3624ec8e7ecd14c6a6a6370aad6ee5d8cfc3ab30b5162eeeef2ed33" +checksum = "c9a2ccff1a000a5a59cd33da541d9f2fdcd9e6e8229cc200565942bff36d0aaa" dependencies = [ "sqlx-core", "sqlx-macros", @@ -4320,18 +4704,17 @@ dependencies = [ [[package]] name = "sqlx-core" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d6753e460c998bbd4cd8c6f0ed9a64346fcca0723d6e75e52fdc351c5d2169d" +checksum = "24ba59a9342a3d9bab6c56c118be528b27c9b60e490080e9711a04dccac83ef6" dependencies = [ - "ahash 0.8.6", + "ahash 0.8.11", "atoi", "byteorder", "bytes", "chrono", "crc", "crossbeam-queue", - "dotenvy", "either", "event-listener", "futures-channel", @@ -4341,14 +4724,14 @@ dependencies = [ "futures-util", "hashlink", "hex", - "indexmap 2.1.0", + "indexmap 2.2.6", "log", "memchr", "once_cell", "paste", "percent-encoding", - "rustls 0.21.8", - "rustls-pemfile", + "rustls 0.21.12", + "rustls-pemfile 1.0.4", "serde", "serde_json", "sha2 0.10.8", @@ -4364,9 +4747,9 @@ dependencies = [ [[package]] name = "sqlx-macros" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a793bb3ba331ec8359c1853bd39eed32cdd7baaf22c35ccf5c92a7e8d1189ec" +checksum = "4ea40e2345eb2faa9e1e5e326db8c34711317d2b5e08d0d5741619048a803127" dependencies = [ "proc-macro2", "quote", @@ -4377,9 +4760,9 @@ dependencies = [ [[package]] name = "sqlx-macros-core" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a4ee1e104e00dedb6aa5ffdd1343107b0a4702e862a84320ee7cc74782d96fc" +checksum = "5833ef53aaa16d860e92123292f1f6a3d53c34ba8b1969f152ef1a7bb803f3c8" dependencies = [ "dotenvy", "either", @@ -4403,13 +4786,13 @@ dependencies = [ [[package]] name = "sqlx-mysql" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "864b869fdf56263f4c95c45483191ea0af340f9f3e3e7b4d57a61c7c87a970db" +checksum = "1ed31390216d20e538e447a7a9b959e06ed9fc51c37b514b46eb758016ecd418" dependencies = [ "atoi", - "base64 0.21.5", - "bitflags 2.4.1", + "base64 0.21.7", + "bitflags 2.6.0", "byteorder", "bytes", "chrono", @@ -4446,13 +4829,13 @@ dependencies = [ [[package]] name = "sqlx-postgres" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb7ae0e6a97fb3ba33b23ac2671a5ce6e3cabe003f451abd5a56e7951d975624" +checksum = "7c824eb80b894f926f89a0b9da0c7f435d27cdd35b8c655b114e58223918577e" dependencies = [ "atoi", - "base64 0.21.5", - "bitflags 2.4.1", + "base64 0.21.7", + "bitflags 2.6.0", "byteorder", "chrono", "crc", @@ -4474,7 +4857,6 @@ dependencies = [ "rand 0.8.5", "serde", "serde_json", - "sha1", "sha2 0.10.8", "smallvec", "sqlx-core", @@ -4486,9 +4868,9 @@ dependencies = [ [[package]] name = "sqlx-sqlite" -version = "0.7.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d59dc83cf45d89c555a577694534fcd1b55c545a816c816ce51f20bbe56a4f3f" +checksum = "b244ef0a8414da0bed4bb1910426e890b19e5e9bccc27ada6b797d05c55ae0aa" dependencies = [ "atoi", "chrono", @@ -4505,13 +4887,14 @@ dependencies = [ "sqlx-core", "tracing", "url", + "urlencoding", ] [[package]] name = "sscanf" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c713ebd15ce561dd4a13ed62bc2a0368e16806fc30dcaf66ecf1256b2a3fdde6" +checksum = "a147d3cf7e723671ed11355b5b008c8019195f7fc902e213f5557d931e9f839d" dependencies = [ "const_format", "lazy_static", @@ -4521,16 +4904,16 @@ dependencies = [ [[package]] name = "sscanf_macro" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84955aa74a157e5834d58a07be11af7f0ab923f0194a0bb2ea6b3db8b5d1611d" +checksum = "af3a37bdf8e90e77cc60f74473edf28d922ae2eacdd595e67724ccd2381774cc" dependencies = [ "convert_case 0.6.0", "proc-macro2", "quote", "regex-syntax 0.6.29", "strsim 0.10.0", - "syn 2.0.39", + "syn 2.0.72", "unicode-width", ] @@ -4557,18 +4940,19 @@ dependencies = [ [[package]] name = "ssh-key" -version = "0.6.2" +version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2180b3bc4955efd5661a97658d3cf4c8107e0d132f619195afe9486c13cca313" +checksum = "ca9b366a80cf18bb6406f4cf4d10aebfb46140a8c0c33f666a144c5c76ecbafc" dependencies = [ "ed25519-dalek 2.1.1", "p256", "p384", + "p521", "rand_core 0.6.4", "rsa", "sec1", "sha2 0.10.8", - "signature 2.0.0", + "signature 2.2.0", "ssh-cipher", "ssh-encoding", "subtle", @@ -4577,7 +4961,7 @@ dependencies = [ [[package]] name = "start-os" -version = "0.3.5-rev.2" +version = "0.3.6-alpha.3" dependencies = [ "aes", "async-compression", @@ -4585,45 +4969,50 @@ dependencies = [ "async-trait", "axum 0.7.5", "axum-server", - "base32", - "base64 0.21.5", + "backhand", + "barrage", + "base32 0.5.1", + "base64 0.22.1", "base64ct", "basic-cookies", "blake3", "bytes", "chrono", "ciborium", - "clap 4.5.4", + "clap", "color-eyre", "console", "console-subscriber", - "cookie 0.18.0", + "cookie", "cookie_store", - "current_platform", + "der", "digest 0.10.7", "divrem", "ed25519 2.2.3", "ed25519-dalek 1.0.1", "ed25519-dalek 2.1.1", - "emver", + "exver", "fd-lock-rs", + "form_urlencoded", "futures", "gpt", "helpers", "hex", "hmac", "http 1.1.0", + "http-body-util", + "hyper-util", "id-pool", "imbl", "imbl-value", "include_dir", - "indexmap 2.1.0", + "indexmap 2.2.6", "indicatif", "integer-encoding", "ipnet", "iprange", "isocountry", - "itertools 0.12.1", + "itertools 0.13.0", "jaq-core", "jaq-std", "josekit", @@ -4636,9 +5025,10 @@ dependencies = [ "mbrman", "models", "new_mime_guess", - "nix 0.27.1", - "nom", + "nix 0.29.0", + "nom 7.1.3", "num", + "num_cpus", "num_enum", "once_cell", "openssh-keys", @@ -4656,32 +5046,35 @@ dependencies = [ "reqwest", "reqwest_cookie_store", "rpassword", - "rpc-toolkit 0.2.3 (git+https://github.com/Start9Labs/rpc-toolkit.git?branch=refactor/traits)", + "rpc-toolkit", "rust-argon2", "rustyline-async", "semver", "serde", "serde_json", + "serde_urlencoded", "serde_with", - "serde_yaml", + "serde_yml", "sha2 0.10.8", "shell-words", "simple-logging", + "socket2", "sqlx", "sscanf", "ssh-key", - "stderrlog", "tar", + "textwrap", "thiserror", "tokio", "tokio-rustls", "tokio-socks", "tokio-stream", "tokio-tar", - "tokio-tungstenite", + "tokio-tungstenite 0.23.1", "tokio-util", - "toml 0.8.8", + "toml 0.8.16", "torut", + "tower-service", "tracing", "tracing-error", "tracing-futures", @@ -4697,17 +5090,10 @@ dependencies = [ ] [[package]] -name = "stderrlog" -version = "0.5.4" +name = "static_assertions" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69a26bbf6de627d389164afa9783739b56746c6c72c4ed16539f4ff54170327b" -dependencies = [ - "atty", - "chrono", - "log", - "termcolor", - "thread_local", -] +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" [[package]] name = "string_cache" @@ -4724,13 +5110,13 @@ dependencies = [ [[package]] name = "stringprep" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6" +checksum = "7b4df3d392d81bd458a8a621b8bffbd2302a12ffe288a9d931670948749463b1" dependencies = [ - "finl_unicode", "unicode-bidi", "unicode-normalization", + "unicode-properties", ] [[package]] @@ -4741,15 +5127,15 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "strsim" -version = "0.11.0" +version = "0.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ee073c9e4cd00e28217186dbe12796d692868f432bf2e97ee73bed0c56dfa01" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" [[package]] name = "subtle" -version = "2.5.0" +version = "2.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc" +checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292" [[package]] name = "syn" @@ -4764,9 +5150,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.39" +version = "2.0.72" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a" +checksum = "dc4b9b9bf2add8093d3f2c0204471e951b2285580335de42f9d2534f3ae7a8af" dependencies = [ "proc-macro2", "quote", @@ -4781,9 +5167,9 @@ checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" [[package]] name = "sync_wrapper" -version = "1.0.0" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "384595c11a4e2969895cad5a8c4029115f5ab956a9e5ef4de79d11a426e5f20c" +checksum = "a7065abeca94b6a8a577f9bd45aa0867a2238b74e8eb67cf10d492bc39351394" [[package]] name = "system-configuration" @@ -4814,26 +5200,25 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tar" -version = "0.4.40" +version = "0.4.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b16afcea1f22891c49a00c751c7b63b2233284064f11a200fc624137c51e2ddb" +checksum = "cb797dad5fb5b76fcf519e702f4a589483b5ef06567f160c392832c1f5e44909" dependencies = [ "filetime", "libc", - "xattr 1.0.1", + "xattr 1.3.1", ] [[package]] name = "tempfile" -version = "3.8.1" +version = "3.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" dependencies = [ "cfg-if", "fastrand", - "redox_syscall 0.4.1", "rustix", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -4849,24 +5234,29 @@ dependencies = [ [[package]] name = "termcolor" -version = "1.1.3" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bab24d30b911b2376f3a13cc2cd443142f0c81dda04c118693e35b3835757755" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" dependencies = [ "winapi-util", ] [[package]] name = "textwrap" -version = "0.16.0" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" +checksum = "23d434d3f8967a09480fb04132ebe0a3e088c173e6d0ee7897abbdf4eab0f8b9" +dependencies = [ + "smawk", + "unicode-linebreak", + "unicode-width", +] [[package]] name = "thingbuf" -version = "0.1.4" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4706f1bfb859af03f099ada2de3cea3e515843c2d3e93b7893f16d94a37f9415" +checksum = "662b54ef6f7b4e71f683dadc787bbb2d8e8ef2f91b682ebed3164a5a7abca905" dependencies = [ "parking_lot", "pin-project", @@ -4874,22 +5264,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.50" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2" +checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.50" +version = "1.0.63" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" +checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", ] [[package]] @@ -4905,9 +5295,9 @@ dependencies = [ [[package]] name = "thread_local" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" dependencies = [ "cfg-if", "once_cell", @@ -4915,12 +5305,13 @@ dependencies = [ [[package]] name = "time" -version = "0.3.30" +version = "0.3.36" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5" +checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885" dependencies = [ "deranged", "itoa", + "num-conv", "powerfmt", "serde", "time-core", @@ -4935,10 +5326,11 @@ checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.15" +version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20" +checksum = "3f252a68540fde3a3877aeea552b832b40ab9a69e318efd078774a01ddee1ccf" dependencies = [ + "num-conv", "time-core", ] @@ -4953,9 +5345,9 @@ dependencies = [ [[package]] name = "tinyvec" -version = "1.6.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +checksum = "445e881f4f6d382d5f27c034e25eb92edd7c784ceab92a0937db7f2e9471b938" dependencies = [ "tinyvec_macros", ] @@ -4968,22 +5360,21 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.34.0" +version = "1.39.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0c014766411e834f7af5b8f4cf46257aab4036ca95e9d2c144a10f59ad6f5b9" +checksum = "d040ac2b29ab03b09d4129c2f5bbd012a3ac2f79d38ff506a4bf8dd34b0eac8a" dependencies = [ "backtrace", "bytes", "libc", - "mio", - "num_cpus", + "mio 1.0.1", "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.5", + "socket2", "tokio-macros", "tracing", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -4998,13 +5389,13 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.2.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", ] [[package]] @@ -5019,11 +5410,11 @@ dependencies = [ [[package]] name = "tokio-rustls" -version = "0.25.0" +version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" +checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" dependencies = [ - "rustls 0.22.3", + "rustls 0.23.12", "rustls-pki-types", "tokio", ] @@ -5042,9 +5433,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.14" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" dependencies = [ "futures-core", "pin-project-lite", @@ -5071,27 +5462,38 @@ name = "tokio-tungstenite" version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c83b561d025642014097b66e6c1bb422783339e0909e4429cde4749d1990bc38" +dependencies = [ + "futures-util", + "log", + "tokio", + "tungstenite 0.21.0", +] + +[[package]] +name = "tokio-tungstenite" +version = "0.23.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c6989540ced10490aaf14e6bad2e3d33728a2813310a0c71d1574304c49631cd" dependencies = [ "futures-util", "log", "native-tls", "tokio", "tokio-native-tls", - "tungstenite", + "tungstenite 0.23.0", ] [[package]] name = "tokio-util" -version = "0.7.10" +version = "0.7.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" dependencies = [ "bytes", "futures-core", "futures-sink", "pin-project-lite", "tokio", - "tracing", ] [[package]] @@ -5108,21 +5510,21 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.8" +version = "0.8.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1a195ec8c9da26928f773888e0742ca3ca1040c6cd859c919c9f59c1954ab35" +checksum = "81967dd0dd2c1ab0bc3468bd7caecc32b8a4aa47d0c8c695d8c2b2108168d62c" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.21.0", + "toml_edit 0.22.17", ] [[package]] name = "toml_datetime" -version = "0.6.5" +version = "0.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" +checksum = "f8fb9f64314842840f1d940ac544da178732128f1c78c21772e876579e0da1db" dependencies = [ "serde", ] @@ -5133,52 +5535,52 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.2.6", "serde", "serde_spanned", "toml_datetime", - "winnow", + "winnow 0.5.40", ] [[package]] name = "toml_edit" -version = "0.20.7" +version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" +checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.2.6", "toml_datetime", - "winnow", + "winnow 0.5.40", ] [[package]] name = "toml_edit" -version = "0.21.0" +version = "0.22.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d34d383cd00a163b4a5b85053df514d45bc330f6de7737edfe0a93311d1eaa03" +checksum = "8d9f8729f5aea9562aac1cc0441f5d6de3cff1ee0c5d67293eeca5eb36ee7c16" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.2.6", "serde", "serde_spanned", "toml_datetime", - "winnow", + "winnow 0.6.16", ] [[package]] name = "tonic" -version = "0.10.2" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d560933a0de61cf715926b9cac824d4c883c2c43142f787595e48280c40a1d0e" +checksum = "76c4eb7a4e9ef9d4763600161f12f5070b92a578e1b634db88a6887844c91a13" dependencies = [ "async-stream", "async-trait", "axum 0.6.20", - "base64 0.21.5", + "base64 0.21.7", "bytes", - "h2 0.3.21", - "http 0.2.11", - "http-body 0.4.5", - "hyper 0.14.27", + "h2 0.3.26", + "http 0.2.12", + "http-body 0.4.6", + "hyper 0.14.30", "hyper-timeout", "percent-encoding", "pin-project", @@ -5196,8 +5598,8 @@ name = "torut" version = "0.2.1" source = "git+https://github.com/Start9Labs/torut.git?branch=update/dependencies#cc7a1425a01214465e106975e6690794d8551bdb" dependencies = [ - "base32", - "base64 0.21.5", + "base32 0.4.0", + "base64 0.21.7", "derive_more", "ed25519-dalek 1.0.1", "hex", @@ -5262,7 +5664,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", ] [[package]] @@ -5337,9 +5739,9 @@ dependencies = [ [[package]] name = "treediff" -version = "4.0.2" +version = "4.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52984d277bdf2a751072b5df30ec0377febdb02f7696d64c2d7d54630bac4303" +checksum = "4d127780145176e2b5d16611cc25a900150e86e9fd79d3bde6ff3a37359c9cb5" dependencies = [ "serde_json", ] @@ -5393,14 +5795,14 @@ dependencies = [ [[package]] name = "try-lock" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "ts-rs" version = "8.1.0" -source = "git+https://github.com/dr-bonez/ts-rs.git?branch=feature/top-level-as#d5f359d803158e06f47977a6f785878a82221d4f" +source = "git+https://github.com/dr-bonez/ts-rs.git?branch=feature/top-level-as#7ae88ade90b5e724159048a663a0bdb04bed27f7" dependencies = [ "thiserror", "ts-rs-macros", @@ -5409,12 +5811,12 @@ dependencies = [ [[package]] name = "ts-rs-macros" version = "8.1.0" -source = "git+https://github.com/dr-bonez/ts-rs.git?branch=feature/top-level-as#d5f359d803158e06f47977a6f785878a82221d4f" +source = "git+https://github.com/dr-bonez/ts-rs.git?branch=feature/top-level-as#7ae88ade90b5e724159048a663a0bdb04bed27f7" dependencies = [ "Inflector", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", "termcolor", ] @@ -5423,6 +5825,25 @@ name = "tungstenite" version = "0.21.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9ef1a641ea34f399a848dea702823bbecfb4c486f911735368f1f137cb8257e1" +dependencies = [ + "byteorder", + "bytes", + "data-encoding", + "http 1.1.0", + "httparse", + "log", + "rand 0.8.5", + "sha1", + "thiserror", + "url", + "utf-8", +] + +[[package]] +name = "tungstenite" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6e2e2ce1e47ed2994fd43b04c8f618008d4cabdd5ee34027cf14f9d918edd9c8" dependencies = [ "byteorder", "bytes", @@ -5440,22 +5861,22 @@ dependencies = [ [[package]] name = "typed-builder" -version = "0.18.1" +version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "444d8748011b93cb168770e8092458cb0f8854f931ff82fdf6ddfbd72a9c933e" +checksum = "77739c880e00693faef3d65ea3aad725f196da38b22fdc7ea6ded6e1ce4d3add" dependencies = [ "typed-builder-macro", ] [[package]] name = "typed-builder-macro" -version = "0.18.1" +version = "0.18.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "563b3b88238ec95680aef36bdece66896eaa7ce3c0f1b4f39d38fb2435261352" +checksum = "1f718dfaf347dcb5b983bfc87608144b0bad87970aebcbea5ce44d2a30c08e63" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", ] [[package]] @@ -5464,6 +5885,12 @@ version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +[[package]] +name = "ucd-trie" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" + [[package]] name = "unarray" version = "0.1.4" @@ -5481,9 +5908,9 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.13" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" [[package]] name = "unicode-ident" @@ -5492,25 +5919,37 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] -name = "unicode-normalization" -version = "0.1.22" +name = "unicode-linebreak" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +checksum = "3b09c83c3c29d37506a3e260c08c03743a6bb66a9cd432c6934ab501a190571f" + +[[package]] +name = "unicode-normalization" +version = "0.1.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" dependencies = [ "tinyvec", ] [[package]] -name = "unicode-segmentation" -version = "1.10.1" +name = "unicode-properties" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" +checksum = "e4259d9d4425d9f0661581b804cb85fe66a4c631cadd8f490d1c13a35d5d9291" + +[[package]] +name = "unicode-segmentation" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d4c87d22b6e3f4a18d4d40ef354e97c90fcb14dd91d7dc0aa9d8a1172ebf7202" [[package]] name = "unicode-width" -version = "0.1.11" +version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" +checksum = "68f5e5f3158ecfd4b8ff6fe086db7c8467a2dfdac97fe420f2b7c4aa97af66d6" [[package]] name = "unicode-xid" @@ -5524,12 +5963,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e" -[[package]] -name = "unsafe-libyaml" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f28467d3e1d3c6586d8f25fa243f544f5800fec42d97032474e17222c2b75cfa" - [[package]] name = "untrusted" version = "0.9.0" @@ -5538,12 +5971,12 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.4.1" +version = "2.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5" +checksum = "22784dbdf76fdde8af1aeda5622b546b422b6fc585325248a2bf9f5e41e94d6c" dependencies = [ "form_urlencoded", - "idna 0.4.0", + "idna 0.5.0", "percent-encoding", "serde", ] @@ -5562,17 +5995,17 @@ checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" [[package]] name = "utf8parse" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" +checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.5.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88ad59a7560b41a70d191093a945f0b87bc1deeda46fb237479708a1d6b6cdfc" +checksum = "81dfa00651efa65069b0b6b651f4aaa31ba9e3c3ce0137aaad053604ee7e0314" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.15", ] [[package]] @@ -5589,9 +6022,9 @@ checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" [[package]] name = "version_check" -version = "0.9.4" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "wait-timeout" @@ -5602,6 +6035,16 @@ dependencies = [ "libc", ] +[[package]] +name = "walkdir" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b" +dependencies = [ + "same-file", + "winapi-util", +] + [[package]] name = "want" version = "0.3.1" @@ -5624,10 +6067,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] -name = "wasm-bindgen" -version = "0.2.88" +name = "wasite" +version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7daec296f25a1bae309c0cd5c29c4b260e510e6d813c286b19eaadf409d40fce" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + +[[package]] +name = "wasm-bindgen" +version = "0.2.92" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -5635,24 +6084,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.88" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e397f4664c0e4e428e8313a469aaa58310d302159845980fd23b0f22a847f217" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.38" +version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9afec9963e3d0994cac82455b2b3502b81a7f40f9a0d32181f7528d9f4b43e02" +checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" dependencies = [ "cfg-if", "js-sys", @@ -5662,9 +6111,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.88" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5961017b3b08ad5f3fe39f1e79877f8ee7c23c5e5fd5eb80de95abc41f1f16b2" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -5672,22 +6121,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.88" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5353b8dab669f5e10f5bd76df26a9360c748f054f862ff5f3f8aae0c7fb3907" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.88" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d046c5d029ba91a1ed14da14dca44b68bf2f124cfbaf741c54151fdb3e0750b" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" [[package]] name = "wasm-streams" @@ -5704,9 +6153,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.65" +version = "0.3.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5db499c5f66323272151db0e666cd34f78617522fb0c1604d31a27c50c206a85" +checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" dependencies = [ "js-sys", "wasm-bindgen", @@ -5714,18 +6163,31 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.24.0" +version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b291546d5d9d1eab74f069c77749f2cb8504a12caa20f0f2de93ddbf6f411888" +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" + +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" dependencies = [ - "rustls-webpki 0.101.7", + "either", + "home", + "once_cell", + "rustix", ] [[package]] name = "whoami" -version = "1.4.1" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22fc3756b8a9133049b26c7f61ab35416c130e8c09b660f5b3958b446f52cc50" +checksum = "a44ab49fad634e88f55bf8f9bb3abd2f27d7204172a112c7c9987e01c1c94ea9" +dependencies = [ + "redox_syscall 0.4.1", + "wasite", +] [[package]] name = "winapi" @@ -5745,11 +6207,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.6" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" dependencies = [ - "winapi", + "windows-sys 0.52.0", ] [[package]] @@ -5760,20 +6222,11 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-core" -version = "0.51.1" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" dependencies = [ - "windows-targets 0.48.5", -] - -[[package]] -name = "windows-sys" -version = "0.45.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" -dependencies = [ - "windows-targets 0.42.2", + "windows-targets 0.52.6", ] [[package]] @@ -5791,22 +6244,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.4", -] - -[[package]] -name = "windows-targets" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" -dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", + "windows-targets 0.52.6", ] [[package]] @@ -5826,25 +6264,20 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7dd37b7e5ab9018759f893a1952c9420d060016fc19a472b4bb20d1bdd694d1b" +checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ - "windows_aarch64_gnullvm 0.52.4", - "windows_aarch64_msvc 0.52.4", - "windows_i686_gnu 0.52.4", - "windows_i686_msvc 0.52.4", - "windows_x86_64_gnu 0.52.4", - "windows_x86_64_gnullvm 0.52.4", - "windows_x86_64_msvc 0.52.4", + "windows_aarch64_gnullvm 0.52.6", + "windows_aarch64_msvc 0.52.6", + "windows_i686_gnu 0.52.6", + "windows_i686_gnullvm", + "windows_i686_msvc 0.52.6", + "windows_x86_64_gnu 0.52.6", + "windows_x86_64_gnullvm 0.52.6", + "windows_x86_64_msvc 0.52.6", ] -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" - [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" @@ -5853,15 +6286,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcf46cf4c365c6f2d1cc93ce535f2c8b244591df96ceee75d8e83deb70a9cac9" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" +checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" @@ -5871,15 +6298,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da9f259dd3bcf6990b55bffd094c4f7235817ba4ceebde8e6d11cd0c5633b675" - -[[package]] -name = "windows_i686_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" +checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" @@ -5889,15 +6310,15 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b474d8268f99e0995f25b9f095bc7434632601028cf86590aea5c8a5cb7801d3" +checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] -name = "windows_i686_msvc" -version = "0.42.2" +name = "windows_i686_gnullvm" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" +checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" @@ -5907,15 +6328,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1515e9a29e5bed743cb4415a9ecf5dfca648ce85ee42e15873c3cd8610ff8e02" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" +checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" @@ -5925,15 +6340,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5eee091590e89cc02ad514ffe3ead9eb6b660aedca2183455434b93546371a03" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" +checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" @@ -5943,15 +6352,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ca79f2451b49fa9e2af39f0747fe999fcda4f5e241b2898624dca97a1f2177" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" +checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" @@ -5961,29 +6364,44 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.4" +version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32b752e52a2da0ddfbdbcc6fceadfeede4c939ed16d13e648833a61dfb611ed8" +checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.5.19" +version = "0.5.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "829846f3e3db426d4cee4510841b71a8e58aa2a76b1132579487ae430ccd9c7b" +checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +dependencies = [ + "memchr", +] + +[[package]] +name = "winnow" +version = "0.6.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b480ae9340fc261e6be3e95a1ba86d54ae3f9171132a73ce8d4bbaf68339507c" dependencies = [ "memchr", ] [[package]] name = "winreg" -version = "0.50.0" +version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524e57b2c537c0f9b1e69f1965311ec12182b4122e45035b1508cd24d2adadb1" +checksum = "a277a57398d4bfa075df44f501a17cfdf8542d224f0d36095a2adc7aee4ef0a5" dependencies = [ "cfg-if", "windows-sys 0.48.0", ] +[[package]] +name = "wyz" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" + [[package]] name = "wyz" version = "0.5.1" @@ -6004,11 +6422,22 @@ dependencies = [ [[package]] name = "xattr" -version = "1.0.1" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4686009f71ff3e5c4dbcf1a282d0a44db3f021ba69350cd42086b3e5f1c6985" +checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f" dependencies = [ "libc", + "linux-raw-sys", + "rustix", +] + +[[package]] +name = "xz2" +version = "0.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "388c44dc09d76f1536602ead6d325eb532f5c122f17782bd57fb47baeeb767e2" +dependencies = [ + "lzma-sys", ] [[package]] @@ -6029,7 +6458,7 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f355ab62ebe30b758c1f4ab096a306722c4b7dbfb9d8c07d18c70d71a945588" dependencies = [ - "ahash 0.8.6", + "ahash 0.8.11", "hashbrown 0.13.2", "lazy_static", "serde", @@ -6037,29 +6466,29 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.7.25" +version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cd369a67c0edfef15010f980c3cbe45d7f651deac2cd67ce097cd801de16557" +checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.7.25" +version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2f140bda219a26ccc0cdb03dba58af72590c53b22642577d88a927bc5c87d6b" +checksum = "fa4f8080344d4671fb4e831a13ad1e68092748387dfc4f55e356242fae12ce3e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", ] [[package]] name = "zeroize" -version = "1.6.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" +checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" dependencies = [ "zeroize_derive", ] @@ -6072,5 +6501,33 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.72", +] + +[[package]] +name = "zstd" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcf2b778a664581e31e389454a7072dab1647606d44f7feea22cd5abb9c9f3f9" +dependencies = [ + "zstd-safe", +] + +[[package]] +name = "zstd-safe" +version = "7.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa556e971e7b568dc775c136fc9de8c779b1c2fc3a63defaafadffdbd3181afa" +dependencies = [ + "zstd-sys", +] + +[[package]] +name = "zstd-sys" +version = "2.0.12+zstd.1.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a4e40c320c3cb459d9a9ff6de98cff88f4751ee9275d140e2be94a2b74e4c13" +dependencies = [ + "cc", + "pkg-config", ] diff --git a/core/build-prod.sh b/core/build-containerbox.sh similarity index 61% rename from core/build-prod.sh rename to core/build-containerbox.sh index 8b6184942..e4a8f6e7a 100755 --- a/core/build-prod.sh +++ b/core/build-containerbox.sh @@ -28,18 +28,12 @@ set +e fail= echo "FEATURES=\"$FEATURES\"" echo "RUSTFLAGS=\"$RUSTFLAGS\"" -if ! rust-musl-builder sh -c "(cd core && cargo build --release $(if [ -n "$FEATURES" ]; then echo "--features $FEATURES"; fi) --locked --bin startbox --target=$ARCH-unknown-linux-musl)"; then - fail=true -fi -if ! rust-musl-builder sh -c "(cd core && cargo build --release --no-default-features --features container-runtime,$FEATURES --locked --bin containerbox --target=$ARCH-unknown-linux-musl)"; then +if ! rust-musl-builder sh -c "cd core && cargo build --release --no-default-features --features container-runtime,$FEATURES --locked --bin containerbox --target=$ARCH-unknown-linux-musl && chown -R $UID:$UID target && chown -R $UID:$UID /root/.cargo"; then fail=true fi set -e cd core -sudo chown -R $USER target -sudo chown -R $USER ~/.cargo - if [ -n "$fail" ]; then exit 1 fi diff --git a/core/build-registrybox.sh b/core/build-registrybox.sh new file mode 100755 index 000000000..9db57dd80 --- /dev/null +++ b/core/build-registrybox.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +cd "$(dirname "${BASH_SOURCE[0]}")" + +set -e +shopt -s expand_aliases + +if [ -z "$ARCH" ]; then + ARCH=$(uname -m) +fi + +USE_TTY= +if tty -s; then + USE_TTY="-it" +fi + +cd .. +FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')" +RUSTFLAGS="" + +if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then + RUSTFLAGS="--cfg tokio_unstable" +fi + +alias 'rust-musl-builder'='docker run $USE_TTY --rm -e "RUSTFLAGS=$RUSTFLAGS" -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$HOME/.cargo/git":/root/.cargo/git -v "$(pwd)":/home/rust/src -w /home/rust/src -P messense/rust-musl-cross:$ARCH-musl' + +set +e +fail= +echo "FEATURES=\"$FEATURES\"" +echo "RUSTFLAGS=\"$RUSTFLAGS\"" +if ! rust-musl-builder sh -c "cd core && cargo build --release --no-default-features --features cli,registry,$FEATURES --locked --bin registrybox --target=$ARCH-unknown-linux-musl && chown -R $UID:$UID target && chown -R $UID:$UID /root/.cargo"; then + fail=true +fi +set -e +cd core + +if [ -n "$fail" ]; then + exit 1 +fi diff --git a/core/build-startbox.sh b/core/build-startbox.sh new file mode 100755 index 000000000..55a455f09 --- /dev/null +++ b/core/build-startbox.sh @@ -0,0 +1,39 @@ +#!/bin/bash + +cd "$(dirname "${BASH_SOURCE[0]}")" + +set -e +shopt -s expand_aliases + +if [ -z "$ARCH" ]; then + ARCH=$(uname -m) +fi + +USE_TTY= +if tty -s; then + USE_TTY="-it" +fi + +cd .. +FEATURES="$(echo $ENVIRONMENT | sed 's/-/,/g')" +RUSTFLAGS="" + +if [[ "${ENVIRONMENT}" =~ (^|-)unstable($|-) ]]; then + RUSTFLAGS="--cfg tokio_unstable" +fi + +alias 'rust-musl-builder'='docker run $USE_TTY --rm -e "RUSTFLAGS=$RUSTFLAGS" -v "$HOME/.cargo/registry":/root/.cargo/registry -v "$HOME/.cargo/git":/root/.cargo/git -v "$(pwd)":/home/rust/src -w /home/rust/src -P messense/rust-musl-cross:$ARCH-musl' + +set +e +fail= +echo "FEATURES=\"$FEATURES\"" +echo "RUSTFLAGS=\"$RUSTFLAGS\"" +if ! rust-musl-builder sh -c "cd core && cargo build --release --no-default-features --features cli,daemon,$FEATURES --locked --bin startbox --target=$ARCH-unknown-linux-musl && chown -R $UID:$UID target && chown -R $UID:$UID /root/.cargo"; then + fail=true +fi +set -e +cd core + +if [ -n "$fail" ]; then + exit 1 +fi diff --git a/core/helpers/Cargo.toml b/core/helpers/Cargo.toml index 228f3ef54..9af19018e 100644 --- a/core/helpers/Cargo.toml +++ b/core/helpers/Cargo.toml @@ -11,7 +11,7 @@ futures = "0.3.28" lazy_async_pool = "0.3.3" models = { path = "../models" } pin-project = "1.1.3" -rpc-toolkit = "0.2.3" +rpc-toolkit = { git = "https://github.com/Start9Labs/rpc-toolkit.git", branch = "refactor/no-dyn-ctx" } serde = { version = "1.0", features = ["derive", "rc"] } serde_json = "1.0" tokio = { version = "1", features = ["full"] } diff --git a/core/helpers/src/lib.rs b/core/helpers/src/lib.rs index d913aefee..80631fea2 100644 --- a/core/helpers/src/lib.rs +++ b/core/helpers/src/lib.rs @@ -50,7 +50,8 @@ pub async fn canonicalize( } let path = path.as_ref(); if tokio::fs::metadata(path).await.is_err() { - if let (Some(parent), Some(file_name)) = (path.parent(), path.file_name()) { + let parent = path.parent().unwrap_or(Path::new(".")); + if let Some(file_name) = path.file_name() { if create_parent && tokio::fs::metadata(parent).await.is_err() { return Ok(create_canonical_folder(parent).await?.join(file_name)); } else { diff --git a/core/helpers/src/script_dir.rs b/core/helpers/src/script_dir.rs index d90051899..5cedd419f 100644 --- a/core/helpers/src/script_dir.rs +++ b/core/helpers/src/script_dir.rs @@ -1,10 +1,14 @@ use std::path::{Path, PathBuf}; -use models::{PackageId, Version}; +use models::{PackageId, VersionString}; pub const PKG_SCRIPT_DIR: &str = "package-data/scripts"; -pub fn script_dir>(datadir: P, pkg_id: &PackageId, version: &Version) -> PathBuf { +pub fn script_dir>( + datadir: P, + pkg_id: &PackageId, + version: &VersionString, +) -> PathBuf { datadir .as_ref() .join(&*PKG_SCRIPT_DIR) diff --git a/core/install-cli.sh b/core/install-cli.sh index f4fe712ee..620600d92 100755 --- a/core/install-cli.sh +++ b/core/install-cli.sh @@ -12,4 +12,4 @@ if [ -z "$PLATFORM" ]; then export PLATFORM=$(uname -m) fi -cargo install --path=./startos --no-default-features --features=cli,docker --bin start-cli --locked +cargo install --path=./startos --no-default-features --features=cli,docker,registry --bin start-cli --locked diff --git a/core/models/Cargo.toml b/core/models/Cargo.toml index 3611f45d5..44295745d 100644 --- a/core/models/Cargo.toml +++ b/core/models/Cargo.toml @@ -6,12 +6,13 @@ edition = "2021" # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +axum = "0.7.5" base64 = "0.21.4" color-eyre = "0.6.2" ed25519-dalek = { version = "2.0.0", features = ["serde"] } lazy_static = "1.4" mbrman = "0.5.2" -emver = { version = "0.1", git = "https://github.com/Start9Labs/emver-rs.git", features = [ +exver = { version = "0.2.0", git = "https://github.com/Start9Labs/exver-rs.git", features = [ "serde", ] } ipnet = "2.8.0" @@ -22,8 +23,8 @@ patch-db = { version = "*", path = "../../patch-db/patch-db", features = [ ] } rand = "0.8.5" regex = "1.10.2" -reqwest = "0.11.22" -rpc-toolkit = "0.2.2" +reqwest = "0.12" +rpc-toolkit = { git = "https://github.com/Start9Labs/rpc-toolkit.git", branch = "refactor/no-dyn-ctx" } serde = { version = "1.0", features = ["derive", "rc"] } serde_json = "1.0" sqlx = { version = "0.7.2", features = [ diff --git a/core/models/src/errors.rs b/core/models/src/errors.rs index 2362b6dba..ee6b0ae12 100644 --- a/core/models/src/errors.rs +++ b/core/models/src/errors.rs @@ -1,9 +1,10 @@ use std::fmt::{Debug, Display}; +use axum::http::uri::InvalidUri; +use axum::http::StatusCode; use color_eyre::eyre::eyre; use num_enum::TryFromPrimitive; use patch_db::Revision; -use rpc_toolkit::hyper::http::uri::InvalidUri; use rpc_toolkit::reqwest; use rpc_toolkit::yajrc::{ RpcError, INVALID_PARAMS_ERROR, INVALID_REQUEST_ERROR, METHOD_NOT_FOUND_ERROR, PARSE_ERROR, @@ -88,6 +89,7 @@ pub enum ErrorKind { Timeout = 71, Lxc = 72, Cancelled = 73, + Git = 74, } impl ErrorKind { pub fn as_str(&self) -> &'static str { @@ -166,6 +168,7 @@ impl ErrorKind { Timeout => "Timeout Error", Lxc => "LXC Error", Cancelled => "Cancelled", + Git => "Git Error", } } } @@ -207,6 +210,13 @@ impl Error { } } } +impl axum::response::IntoResponse for Error { + fn into_response(self) -> axum::response::Response { + let mut res = axum::Json(RpcError::from(self)).into_response(); + *res.status_mut() = StatusCode::INTERNAL_SERVER_ERROR; + res + } +} impl From for Error { fn from(value: std::convert::Infallible) -> Self { match value {} @@ -232,8 +242,8 @@ impl From for Error { Error::new(e, ErrorKind::Utf8) } } -impl From for Error { - fn from(e: emver::ParseError) -> Self { +impl From for Error { + fn from(e: exver::ParseError) -> Self { Error::new(e, ErrorKind::ParseVersion) } } @@ -480,6 +490,7 @@ where { fn with_kind(self, kind: ErrorKind) -> Result; fn with_ctx (ErrorKind, D), D: Display>(self, f: F) -> Result; + fn log_err(self) -> Option; } impl ResultExt for Result where @@ -506,6 +517,18 @@ where } }) } + + fn log_err(self) -> Option { + match self { + Ok(a) => Some(a), + Err(e) => { + let e: color_eyre::eyre::Error = e.into(); + tracing::error!("{e}"); + tracing::debug!("{e:?}"); + None + } + } + } } impl ResultExt for Result { fn with_kind(self, kind: ErrorKind) -> Result { @@ -529,6 +552,17 @@ impl ResultExt for Result { } }) } + + fn log_err(self) -> Option { + match self { + Ok(a) => Some(a), + Err(e) => { + tracing::error!("{e}"); + tracing::debug!("{e:?}"); + None + } + } + } } pub trait OptionExt diff --git a/core/models/src/id/image.rs b/core/models/src/id/image.rs index bbb0a601e..69a04f880 100644 --- a/core/models/src/id/image.rs +++ b/core/models/src/id/image.rs @@ -5,7 +5,7 @@ use std::str::FromStr; use serde::{Deserialize, Deserializer, Serialize}; use ts_rs::TS; -use crate::{Id, InvalidId, PackageId, Version}; +use crate::{Id, InvalidId, PackageId, VersionString}; #[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, TS)] #[ts(type = "string")] @@ -21,7 +21,7 @@ impl std::fmt::Display for ImageId { } } impl ImageId { - pub fn for_package(&self, pkg_id: &PackageId, pkg_version: Option<&Version>) -> String { + pub fn for_package(&self, pkg_id: &PackageId, pkg_version: Option<&VersionString>) -> String { format!( "start9/{}/{}:{}", pkg_id, diff --git a/core/models/src/id/mod.rs b/core/models/src/id/mod.rs index 11644c71d..85c9d8255 100644 --- a/core/models/src/id/mod.rs +++ b/core/models/src/id/mod.rs @@ -24,12 +24,17 @@ pub use service_interface::ServiceInterfaceId; pub use volume::VolumeId; lazy_static::lazy_static! { - static ref ID_REGEX: Regex = Regex::new("^[a-z]+(-[a-z]+)*$").unwrap(); + static ref ID_REGEX: Regex = Regex::new("^[a-z]+(-[a-z0-9]+)*$").unwrap(); pub static ref SYSTEM_ID: Id = Id(InternedString::intern("x_system")); } -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] +#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] pub struct Id(InternedString); +impl std::fmt::Debug for Id { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} impl TryFrom for Id { type Error = InvalidId; fn try_from(value: InternedString) -> Result { diff --git a/core/models/src/id/package.rs b/core/models/src/id/package.rs index d2665e59a..6e22b9d51 100644 --- a/core/models/src/id/package.rs +++ b/core/models/src/id/package.rs @@ -61,6 +61,11 @@ impl Borrow for PackageId { self.0.as_ref() } } +impl<'a> Borrow for &'a PackageId { + fn borrow(&self) -> &str { + self.0.as_ref() + } +} impl AsRef for PackageId { fn as_ref(&self) -> &Path { self.0.as_ref().as_ref() diff --git a/core/models/src/procedure_name.rs b/core/models/src/procedure_name.rs index c42068be3..466835818 100644 --- a/core/models/src/procedure_name.rs +++ b/core/models/src/procedure_name.rs @@ -1,11 +1,9 @@ use serde::{Deserialize, Serialize}; -use crate::ActionId; +use crate::{ActionId, PackageId}; #[derive(Debug, Clone, Serialize, Deserialize)] pub enum ProcedureName { - StartMain, - StopMain, GetConfig, SetConfig, CreateBackup, @@ -14,8 +12,8 @@ pub enum ProcedureName { ActionMetadata, RunAction(ActionId), GetAction(ActionId), - QueryDependency(ActionId), - UpdateDependency(ActionId), + QueryDependency(PackageId), + UpdateDependency(PackageId), Init, Uninit, } @@ -25,8 +23,6 @@ impl ProcedureName { match self { ProcedureName::Init => "/init".to_string(), ProcedureName::Uninit => "/uninit".to_string(), - ProcedureName::StartMain => "/main/start".to_string(), - ProcedureName::StopMain => "/main/stop".to_string(), ProcedureName::SetConfig => "/config/set".to_string(), ProcedureName::GetConfig => "/config/get".to_string(), ProcedureName::CreateBackup => "/backup/create".to_string(), diff --git a/core/models/src/version.rs b/core/models/src/version.rs index 1e4798ba1..f0c7b19ae 100644 --- a/core/models/src/version.rs +++ b/core/models/src/version.rs @@ -3,100 +3,109 @@ use std::ops::Deref; use std::str::FromStr; use serde::{Deserialize, Deserializer, Serialize, Serializer}; +use ts_rs::TS; -#[derive(Debug, Clone)] -pub struct Version { - version: emver::Version, +#[derive(Debug, Clone, TS)] +#[ts(type = "string", rename = "Version")] +pub struct VersionString { + version: exver::ExtendedVersion, string: String, } -impl Version { +impl VersionString { pub fn as_str(&self) -> &str { self.string.as_str() } - pub fn into_version(self) -> emver::Version { + pub fn into_version(self) -> exver::ExtendedVersion { self.version } } -impl std::fmt::Display for Version { +impl std::fmt::Display for VersionString { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.string) } } -impl std::str::FromStr for Version { - type Err = ::Err; +impl std::str::FromStr for VersionString { + type Err = ::Err; fn from_str(s: &str) -> Result { - Ok(Version { + Ok(VersionString { string: s.to_owned(), version: s.parse()?, }) } } -impl From for Version { - fn from(v: emver::Version) -> Self { - Version { +impl From for VersionString { + fn from(v: exver::ExtendedVersion) -> Self { + VersionString { string: v.to_string(), version: v, } } } -impl From for emver::Version { - fn from(v: Version) -> Self { +impl From for exver::ExtendedVersion { + fn from(v: VersionString) -> Self { v.version } } -impl Default for Version { +impl Default for VersionString { fn default() -> Self { - Self::from(emver::Version::default()) + Self::from(exver::ExtendedVersion::default()) } } -impl Deref for Version { - type Target = emver::Version; +impl Deref for VersionString { + type Target = exver::ExtendedVersion; fn deref(&self) -> &Self::Target { &self.version } } -impl AsRef for Version { - fn as_ref(&self) -> &emver::Version { +impl AsRef for VersionString { + fn as_ref(&self) -> &exver::ExtendedVersion { &self.version } } -impl AsRef for Version { +impl AsRef for VersionString { fn as_ref(&self) -> &str { self.as_str() } } -impl PartialEq for Version { - fn eq(&self, other: &Version) -> bool { +impl PartialEq for VersionString { + fn eq(&self, other: &VersionString) -> bool { self.version.eq(&other.version) } } -impl Eq for Version {} -impl PartialOrd for Version { +impl Eq for VersionString {} +impl PartialOrd for VersionString { fn partial_cmp(&self, other: &Self) -> Option { self.version.partial_cmp(&other.version) } } -impl Ord for Version { +impl Ord for VersionString { fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.version.cmp(&other.version) + self.version.partial_cmp(&other.version).unwrap_or_else(|| { + match (self.version.flavor(), other.version.flavor()) { + (None, Some(_)) => std::cmp::Ordering::Greater, + (Some(_), None) => std::cmp::Ordering::Less, + (a, b) => a.cmp(&b), + } + }) } } -impl Hash for Version { +impl Hash for VersionString { fn hash(&self, state: &mut H) { self.version.hash(state) } } -impl<'de> Deserialize<'de> for Version { +impl<'de> Deserialize<'de> for VersionString { fn deserialize(deserializer: D) -> Result where D: Deserializer<'de>, { let string = String::deserialize(deserializer)?; - let version = emver::Version::from_str(&string).map_err(::serde::de::Error::custom)?; + let version = + exver::ExtendedVersion::from_str(&string).map_err(::serde::de::Error::custom)?; Ok(Self { string, version }) } } -impl Serialize for Version { +impl Serialize for VersionString { fn serialize(&self, serializer: S) -> Result where S: Serializer, diff --git a/core/startos/.sqlx/query-1ce5254f27de971fd87f5ab66d300f2b22433c86617a0dbf796bf2170186dd2e.json b/core/startos/.sqlx/query-1ce5254f27de971fd87f5ab66d300f2b22433c86617a0dbf796bf2170186dd2e.json deleted file mode 100644 index d36100fef..000000000 --- a/core/startos/.sqlx/query-1ce5254f27de971fd87f5ab66d300f2b22433c86617a0dbf796bf2170186dd2e.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "INSERT INTO network_keys (package, interface, key) VALUES ($1, $2, $3) ON CONFLICT (package, interface) DO NOTHING", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Text", - "Bytea" - ] - }, - "nullable": [] - }, - "hash": "1ce5254f27de971fd87f5ab66d300f2b22433c86617a0dbf796bf2170186dd2e" -} diff --git a/core/startos/.sqlx/query-21471490cdc3adb206274cc68e1ea745ffa5da4479478c1fd2158a45324b1930.json b/core/startos/.sqlx/query-21471490cdc3adb206274cc68e1ea745ffa5da4479478c1fd2158a45324b1930.json deleted file mode 100644 index e0b1d7cf2..000000000 --- a/core/startos/.sqlx/query-21471490cdc3adb206274cc68e1ea745ffa5da4479478c1fd2158a45324b1930.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM ssh_keys WHERE fingerprint = $1", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [] - }, - "hash": "21471490cdc3adb206274cc68e1ea745ffa5da4479478c1fd2158a45324b1930" -} diff --git a/core/startos/.sqlx/query-28ea34bbde836e0618c5fc9bb7c36e463c20c841a7d6a0eb15be0f24f4a928ec.json b/core/startos/.sqlx/query-28ea34bbde836e0618c5fc9bb7c36e463c20c841a7d6a0eb15be0f24f4a928ec.json deleted file mode 100644 index e234a72a9..000000000 --- a/core/startos/.sqlx/query-28ea34bbde836e0618c5fc9bb7c36e463c20c841a7d6a0eb15be0f24f4a928ec.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT hostname, path, username, password FROM cifs_shares WHERE id = $1", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "hostname", - "type_info": "Text" - }, - { - "ordinal": 1, - "name": "path", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "username", - "type_info": "Text" - }, - { - "ordinal": 3, - "name": "password", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int4" - ] - }, - "nullable": [ - false, - false, - false, - true - ] - }, - "hash": "28ea34bbde836e0618c5fc9bb7c36e463c20c841a7d6a0eb15be0f24f4a928ec" -} diff --git a/core/startos/.sqlx/query-350ab82048fb4a049042e4fdbe1b8c606ca400e43e31b9a05d2937217e0f6962.json b/core/startos/.sqlx/query-350ab82048fb4a049042e4fdbe1b8c606ca400e43e31b9a05d2937217e0f6962.json deleted file mode 100644 index c451ce9f3..000000000 --- a/core/startos/.sqlx/query-350ab82048fb4a049042e4fdbe1b8c606ca400e43e31b9a05d2937217e0f6962.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM tor WHERE package = $1 AND interface = $2", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Text" - ] - }, - "nullable": [] - }, - "hash": "350ab82048fb4a049042e4fdbe1b8c606ca400e43e31b9a05d2937217e0f6962" -} diff --git a/core/startos/.sqlx/query-4099028a5c0de578255bf54a67cef6cb0f1e9a4e158260700f1639dd4b438997.json b/core/startos/.sqlx/query-4099028a5c0de578255bf54a67cef6cb0f1e9a4e158260700f1639dd4b438997.json deleted file mode 100644 index 761af064b..000000000 --- a/core/startos/.sqlx/query-4099028a5c0de578255bf54a67cef6cb0f1e9a4e158260700f1639dd4b438997.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT * FROM ssh_keys WHERE fingerprint = $1", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "fingerprint", - "type_info": "Text" - }, - { - "ordinal": 1, - "name": "openssh_pubkey", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "created_at", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [ - false, - false, - false - ] - }, - "hash": "4099028a5c0de578255bf54a67cef6cb0f1e9a4e158260700f1639dd4b438997" -} diff --git a/core/startos/.sqlx/query-4691e3a2ce80b59009ac17124f54f925f61dc5ea371903e62cdffa5d7b67ca96.json b/core/startos/.sqlx/query-4691e3a2ce80b59009ac17124f54f925f61dc5ea371903e62cdffa5d7b67ca96.json deleted file mode 100644 index 1f7edd1ce..000000000 --- a/core/startos/.sqlx/query-4691e3a2ce80b59009ac17124f54f925f61dc5ea371903e62cdffa5d7b67ca96.json +++ /dev/null @@ -1,50 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT * FROM session WHERE logged_out IS NULL OR logged_out > CURRENT_TIMESTAMP", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Text" - }, - { - "ordinal": 1, - "name": "logged_in", - "type_info": "Timestamp" - }, - { - "ordinal": 2, - "name": "logged_out", - "type_info": "Timestamp" - }, - { - "ordinal": 3, - "name": "last_active", - "type_info": "Timestamp" - }, - { - "ordinal": 4, - "name": "user_agent", - "type_info": "Text" - }, - { - "ordinal": 5, - "name": "metadata", - "type_info": "Text" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false, - false, - true, - false, - true, - false - ] - }, - "hash": "4691e3a2ce80b59009ac17124f54f925f61dc5ea371903e62cdffa5d7b67ca96" -} diff --git a/core/startos/.sqlx/query-4bcfbefb1eb3181343871a1cd7fc3afb81c2be5c681cfa8b4be0ce70610e9c3a.json b/core/startos/.sqlx/query-4bcfbefb1eb3181343871a1cd7fc3afb81c2be5c681cfa8b4be0ce70610e9c3a.json deleted file mode 100644 index 2157198e5..000000000 --- a/core/startos/.sqlx/query-4bcfbefb1eb3181343871a1cd7fc3afb81c2be5c681cfa8b4be0ce70610e9c3a.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "UPDATE session SET logged_out = CURRENT_TIMESTAMP WHERE id = $1", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [] - }, - "hash": "4bcfbefb1eb3181343871a1cd7fc3afb81c2be5c681cfa8b4be0ce70610e9c3a" -} diff --git a/core/startos/.sqlx/query-629be61c3c341c131ddbbff0293a83dbc6afd07cae69d246987f62cf0cc35c2a.json b/core/startos/.sqlx/query-629be61c3c341c131ddbbff0293a83dbc6afd07cae69d246987f62cf0cc35c2a.json deleted file mode 100644 index 764cff84a..000000000 --- a/core/startos/.sqlx/query-629be61c3c341c131ddbbff0293a83dbc6afd07cae69d246987f62cf0cc35c2a.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT password FROM account", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "password", - "type_info": "Text" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false - ] - }, - "hash": "629be61c3c341c131ddbbff0293a83dbc6afd07cae69d246987f62cf0cc35c2a" -} diff --git a/core/startos/.sqlx/query-687688055e63d27123cdc89a5bbbd8361776290a9411d527eaf1fdb40bef399d.json b/core/startos/.sqlx/query-687688055e63d27123cdc89a5bbbd8361776290a9411d527eaf1fdb40bef399d.json deleted file mode 100644 index 2e8a9ee0e..000000000 --- a/core/startos/.sqlx/query-687688055e63d27123cdc89a5bbbd8361776290a9411d527eaf1fdb40bef399d.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT key FROM tor WHERE package = $1 AND interface = $2", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "key", - "type_info": "Bytea" - } - ], - "parameters": { - "Left": [ - "Text", - "Text" - ] - }, - "nullable": [ - false - ] - }, - "hash": "687688055e63d27123cdc89a5bbbd8361776290a9411d527eaf1fdb40bef399d" -} diff --git a/core/startos/.sqlx/query-6d35ccf780fb2bb62586dd1d3df9c1550a41ee580dad3f49d35cb843ebef10ca.json b/core/startos/.sqlx/query-6d35ccf780fb2bb62586dd1d3df9c1550a41ee580dad3f49d35cb843ebef10ca.json deleted file mode 100644 index 3f859bd10..000000000 --- a/core/startos/.sqlx/query-6d35ccf780fb2bb62586dd1d3df9c1550a41ee580dad3f49d35cb843ebef10ca.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "UPDATE session SET last_active = CURRENT_TIMESTAMP WHERE id = $1 AND logged_out IS NULL OR logged_out > CURRENT_TIMESTAMP", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [] - }, - "hash": "6d35ccf780fb2bb62586dd1d3df9c1550a41ee580dad3f49d35cb843ebef10ca" -} diff --git a/core/startos/.sqlx/query-770c1017734720453dc87b58c385b987c5af5807151ff71a59000014586752e0.json b/core/startos/.sqlx/query-770c1017734720453dc87b58c385b987c5af5807151ff71a59000014586752e0.json deleted file mode 100644 index cf3591e01..000000000 --- a/core/startos/.sqlx/query-770c1017734720453dc87b58c385b987c5af5807151ff71a59000014586752e0.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "INSERT INTO network_keys (package, interface, key) VALUES ($1, $2, $3) ON CONFLICT (package, interface) DO UPDATE SET package = EXCLUDED.package RETURNING key", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "key", - "type_info": "Bytea" - } - ], - "parameters": { - "Left": [ - "Text", - "Text", - "Bytea" - ] - }, - "nullable": [ - false - ] - }, - "hash": "770c1017734720453dc87b58c385b987c5af5807151ff71a59000014586752e0" -} diff --git a/core/startos/.sqlx/query-7b64f032d507e8ffe37c41f4c7ad514a66c421a11ab04c26d89a7aa8f6b67210.json b/core/startos/.sqlx/query-7b64f032d507e8ffe37c41f4c7ad514a66c421a11ab04c26d89a7aa8f6b67210.json deleted file mode 100644 index 53fc6f066..000000000 --- a/core/startos/.sqlx/query-7b64f032d507e8ffe37c41f4c7ad514a66c421a11ab04c26d89a7aa8f6b67210.json +++ /dev/null @@ -1,65 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT id, package_id, created_at, code, level, title, message, data FROM notifications WHERE id < $1 ORDER BY id DESC LIMIT $2", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "package_id", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "created_at", - "type_info": "Timestamp" - }, - { - "ordinal": 3, - "name": "code", - "type_info": "Int4" - }, - { - "ordinal": 4, - "name": "level", - "type_info": "Text" - }, - { - "ordinal": 5, - "name": "title", - "type_info": "Text" - }, - { - "ordinal": 6, - "name": "message", - "type_info": "Text" - }, - { - "ordinal": 7, - "name": "data", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int4", - "Int8" - ] - }, - "nullable": [ - false, - true, - false, - false, - false, - false, - false, - true - ] - }, - "hash": "7b64f032d507e8ffe37c41f4c7ad514a66c421a11ab04c26d89a7aa8f6b67210" -} diff --git a/core/startos/.sqlx/query-7c7a3549c997eb75bf964ea65fbb98a73045adf618696cd838d79203ef5383fb.json b/core/startos/.sqlx/query-7c7a3549c997eb75bf964ea65fbb98a73045adf618696cd838d79203ef5383fb.json deleted file mode 100644 index 245a838d8..000000000 --- a/core/startos/.sqlx/query-7c7a3549c997eb75bf964ea65fbb98a73045adf618696cd838d79203ef5383fb.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n INSERT INTO account (\n id,\n server_id,\n hostname,\n password,\n network_key,\n root_ca_key_pem,\n root_ca_cert_pem\n ) VALUES (\n 0, $1, $2, $3, $4, $5, $6\n ) ON CONFLICT (id) DO UPDATE SET\n server_id = EXCLUDED.server_id,\n hostname = EXCLUDED.hostname,\n password = EXCLUDED.password,\n network_key = EXCLUDED.network_key,\n root_ca_key_pem = EXCLUDED.root_ca_key_pem,\n root_ca_cert_pem = EXCLUDED.root_ca_cert_pem\n ", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Text", - "Text", - "Bytea", - "Text", - "Text" - ] - }, - "nullable": [] - }, - "hash": "7c7a3549c997eb75bf964ea65fbb98a73045adf618696cd838d79203ef5383fb" -} diff --git a/core/startos/.sqlx/query-7e0649d839927e57fa03ee51a2c9f96a8bdb0fc97ee8a3c6df1069e1e2b98576.json b/core/startos/.sqlx/query-7e0649d839927e57fa03ee51a2c9f96a8bdb0fc97ee8a3c6df1069e1e2b98576.json deleted file mode 100644 index e3ce7957d..000000000 --- a/core/startos/.sqlx/query-7e0649d839927e57fa03ee51a2c9f96a8bdb0fc97ee8a3c6df1069e1e2b98576.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM tor WHERE package = $1", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [] - }, - "hash": "7e0649d839927e57fa03ee51a2c9f96a8bdb0fc97ee8a3c6df1069e1e2b98576" -} diff --git a/core/startos/.sqlx/query-8951b9126fbf60dbb5997241e11e3526b70bccf3e407327917294a993bc17ed5.json b/core/startos/.sqlx/query-8951b9126fbf60dbb5997241e11e3526b70bccf3e407327917294a993bc17ed5.json deleted file mode 100644 index e39aebf69..000000000 --- a/core/startos/.sqlx/query-8951b9126fbf60dbb5997241e11e3526b70bccf3e407327917294a993bc17ed5.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "INSERT INTO tor (package, interface, key) VALUES ($1, $2, $3) ON CONFLICT (package, interface) DO NOTHING", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Text", - "Bytea" - ] - }, - "nullable": [] - }, - "hash": "8951b9126fbf60dbb5997241e11e3526b70bccf3e407327917294a993bc17ed5" -} diff --git a/core/startos/.sqlx/query-94d471bb374b4965c6cbedf8c17bbf6bea226d38efaf6559923c79a36d5ca08c.json b/core/startos/.sqlx/query-94d471bb374b4965c6cbedf8c17bbf6bea226d38efaf6559923c79a36d5ca08c.json deleted file mode 100644 index e7fe8d38c..000000000 --- a/core/startos/.sqlx/query-94d471bb374b4965c6cbedf8c17bbf6bea226d38efaf6559923c79a36d5ca08c.json +++ /dev/null @@ -1,64 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT id, package_id, created_at, code, level, title, message, data FROM notifications ORDER BY id DESC LIMIT $1", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "package_id", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "created_at", - "type_info": "Timestamp" - }, - { - "ordinal": 3, - "name": "code", - "type_info": "Int4" - }, - { - "ordinal": 4, - "name": "level", - "type_info": "Text" - }, - { - "ordinal": 5, - "name": "title", - "type_info": "Text" - }, - { - "ordinal": 6, - "name": "message", - "type_info": "Text" - }, - { - "ordinal": 7, - "name": "data", - "type_info": "Text" - } - ], - "parameters": { - "Left": [ - "Int8" - ] - }, - "nullable": [ - false, - true, - false, - false, - false, - false, - false, - true - ] - }, - "hash": "94d471bb374b4965c6cbedf8c17bbf6bea226d38efaf6559923c79a36d5ca08c" -} diff --git a/core/startos/.sqlx/query-95c4ab4c645f3302568c6ff13d85ab58252362694cf0f56999bf60194d20583a.json b/core/startos/.sqlx/query-95c4ab4c645f3302568c6ff13d85ab58252362694cf0f56999bf60194d20583a.json deleted file mode 100644 index aadc0fc3a..000000000 --- a/core/startos/.sqlx/query-95c4ab4c645f3302568c6ff13d85ab58252362694cf0f56999bf60194d20583a.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT id, hostname, path, username, password FROM cifs_shares", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "hostname", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "path", - "type_info": "Text" - }, - { - "ordinal": 3, - "name": "username", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "password", - "type_info": "Text" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false, - false, - false, - false, - true - ] - }, - "hash": "95c4ab4c645f3302568c6ff13d85ab58252362694cf0f56999bf60194d20583a" -} diff --git a/core/startos/.sqlx/query-a60d6e66719325b08dc4ecfacaf337527233c84eee758ac9be967906e5841d27.json b/core/startos/.sqlx/query-a60d6e66719325b08dc4ecfacaf337527233c84eee758ac9be967906e5841d27.json deleted file mode 100644 index c56a9ebd1..000000000 --- a/core/startos/.sqlx/query-a60d6e66719325b08dc4ecfacaf337527233c84eee758ac9be967906e5841d27.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM cifs_shares WHERE id = $1", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int4" - ] - }, - "nullable": [] - }, - "hash": "a60d6e66719325b08dc4ecfacaf337527233c84eee758ac9be967906e5841d27" -} diff --git a/core/startos/.sqlx/query-a6b0c8909a3a5d6d9156aebfb359424e6b5a1d1402e028219e21726f1ebd282e.json b/core/startos/.sqlx/query-a6b0c8909a3a5d6d9156aebfb359424e6b5a1d1402e028219e21726f1ebd282e.json deleted file mode 100644 index 86bd9250e..000000000 --- a/core/startos/.sqlx/query-a6b0c8909a3a5d6d9156aebfb359424e6b5a1d1402e028219e21726f1ebd282e.json +++ /dev/null @@ -1,32 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT fingerprint, openssh_pubkey, created_at FROM ssh_keys", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "fingerprint", - "type_info": "Text" - }, - { - "ordinal": 1, - "name": "openssh_pubkey", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "created_at", - "type_info": "Text" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false, - false, - false - ] - }, - "hash": "a6b0c8909a3a5d6d9156aebfb359424e6b5a1d1402e028219e21726f1ebd282e" -} diff --git a/core/startos/.sqlx/query-b1147beaaabbed89f2ab8c1e13ec4393a9a8fde2833cf096af766a979d94dee6.json b/core/startos/.sqlx/query-b1147beaaabbed89f2ab8c1e13ec4393a9a8fde2833cf096af766a979d94dee6.json deleted file mode 100644 index c8ff84277..000000000 --- a/core/startos/.sqlx/query-b1147beaaabbed89f2ab8c1e13ec4393a9a8fde2833cf096af766a979d94dee6.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "UPDATE cifs_shares SET hostname = $1, path = $2, username = $3, password = $4 WHERE id = $5", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Text", - "Text", - "Text", - "Int4" - ] - }, - "nullable": [] - }, - "hash": "b1147beaaabbed89f2ab8c1e13ec4393a9a8fde2833cf096af766a979d94dee6" -} diff --git a/core/startos/.sqlx/query-b203820ee1c553a4b246eac74b79bd10d5717b2a0ddecf22330b7d531aac7c5d.json b/core/startos/.sqlx/query-b203820ee1c553a4b246eac74b79bd10d5717b2a0ddecf22330b7d531aac7c5d.json deleted file mode 100644 index b76542db8..000000000 --- a/core/startos/.sqlx/query-b203820ee1c553a4b246eac74b79bd10d5717b2a0ddecf22330b7d531aac7c5d.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM network_keys WHERE package = $1", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [] - }, - "hash": "b203820ee1c553a4b246eac74b79bd10d5717b2a0ddecf22330b7d531aac7c5d" -} diff --git a/core/startos/.sqlx/query-b81592b3a74940ab56d41537484090d45cfa4c85168a587b1a41dc5393cccea1.json b/core/startos/.sqlx/query-b81592b3a74940ab56d41537484090d45cfa4c85168a587b1a41dc5393cccea1.json deleted file mode 100644 index e2e8a1620..000000000 --- a/core/startos/.sqlx/query-b81592b3a74940ab56d41537484090d45cfa4c85168a587b1a41dc5393cccea1.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "UPDATE account SET tor_key = NULL, network_key = gen_random_bytes(32)", - "describe": { - "columns": [], - "parameters": { - "Left": [] - }, - "nullable": [] - }, - "hash": "b81592b3a74940ab56d41537484090d45cfa4c85168a587b1a41dc5393cccea1" -} diff --git a/core/startos/.sqlx/query-bc9382d34bf93f468c64d0d02613452e7a69768da179e78479cd35ee42b493ae.json b/core/startos/.sqlx/query-bc9382d34bf93f468c64d0d02613452e7a69768da179e78479cd35ee42b493ae.json new file mode 100644 index 000000000..d5fae12b7 --- /dev/null +++ b/core/startos/.sqlx/query-bc9382d34bf93f468c64d0d02613452e7a69768da179e78479cd35ee42b493ae.json @@ -0,0 +1,16 @@ +{ + "db_name": "PostgreSQL", + "query": "INSERT INTO user_activity (created_at, server_id, arch) VALUES ($1, $2, $3)", + "describe": { + "columns": [], + "parameters": { + "Left": [ + "Timestamptz", + "Varchar", + "Varchar" + ] + }, + "nullable": [] + }, + "hash": "bc9382d34bf93f468c64d0d02613452e7a69768da179e78479cd35ee42b493ae" +} diff --git a/core/startos/.sqlx/query-d5117054072476377f3c4f040ea429d4c9b2cf534e76f35c80a2bf60e8599cca.json b/core/startos/.sqlx/query-d5117054072476377f3c4f040ea429d4c9b2cf534e76f35c80a2bf60e8599cca.json deleted file mode 100644 index b77ba7ce9..000000000 --- a/core/startos/.sqlx/query-d5117054072476377f3c4f040ea429d4c9b2cf534e76f35c80a2bf60e8599cca.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT openssh_pubkey FROM ssh_keys", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "openssh_pubkey", - "type_info": "Text" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false - ] - }, - "hash": "d5117054072476377f3c4f040ea429d4c9b2cf534e76f35c80a2bf60e8599cca" -} diff --git a/core/startos/.sqlx/query-da71f94b29798d1738d2b10b9a721ea72db8cfb362e7181c8226d9297507c62b.json b/core/startos/.sqlx/query-da71f94b29798d1738d2b10b9a721ea72db8cfb362e7181c8226d9297507c62b.json deleted file mode 100644 index 5c5c89c27..000000000 --- a/core/startos/.sqlx/query-da71f94b29798d1738d2b10b9a721ea72db8cfb362e7181c8226d9297507c62b.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "INSERT INTO notifications (package_id, code, level, title, message, data) VALUES ($1, $2, $3, $4, $5, $6)", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Int4", - "Text", - "Text", - "Text", - "Text" - ] - }, - "nullable": [] - }, - "hash": "da71f94b29798d1738d2b10b9a721ea72db8cfb362e7181c8226d9297507c62b" -} diff --git a/core/startos/.sqlx/query-dfc23b7e966c3853284753a7e934351ba0cae3825988b3e0ecd3b6781bcff524.json b/core/startos/.sqlx/query-dfc23b7e966c3853284753a7e934351ba0cae3825988b3e0ecd3b6781bcff524.json deleted file mode 100644 index 2fc8ad1ba..000000000 --- a/core/startos/.sqlx/query-dfc23b7e966c3853284753a7e934351ba0cae3825988b3e0ecd3b6781bcff524.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM network_keys WHERE package = $1 AND interface = $2", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Text" - ] - }, - "nullable": [] - }, - "hash": "dfc23b7e966c3853284753a7e934351ba0cae3825988b3e0ecd3b6781bcff524" -} diff --git a/core/startos/.sqlx/query-e185203cf84e43b801dfb23b4159e34aeaef1154dcd3d6811ab504915497ccf7.json b/core/startos/.sqlx/query-e185203cf84e43b801dfb23b4159e34aeaef1154dcd3d6811ab504915497ccf7.json deleted file mode 100644 index a4dc187cd..000000000 --- a/core/startos/.sqlx/query-e185203cf84e43b801dfb23b4159e34aeaef1154dcd3d6811ab504915497ccf7.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM notifications WHERE id = $1", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int4" - ] - }, - "nullable": [] - }, - "hash": "e185203cf84e43b801dfb23b4159e34aeaef1154dcd3d6811ab504915497ccf7" -} diff --git a/core/startos/.sqlx/query-e545696735f202f9d13cf22a561f3ff3f9aed7f90027a9ba97634bcb47d772f0.json b/core/startos/.sqlx/query-e545696735f202f9d13cf22a561f3ff3f9aed7f90027a9ba97634bcb47d772f0.json deleted file mode 100644 index 97a4ec95a..000000000 --- a/core/startos/.sqlx/query-e545696735f202f9d13cf22a561f3ff3f9aed7f90027a9ba97634bcb47d772f0.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT tor_key FROM account WHERE id = 0", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "tor_key", - "type_info": "Bytea" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - true - ] - }, - "hash": "e545696735f202f9d13cf22a561f3ff3f9aed7f90027a9ba97634bcb47d772f0" -} diff --git a/core/startos/.sqlx/query-e5843c5b0e7819b29aa1abf2266799bd4f82e761837b526a0972c3d4439a264d.json b/core/startos/.sqlx/query-e5843c5b0e7819b29aa1abf2266799bd4f82e761837b526a0972c3d4439a264d.json deleted file mode 100644 index b2aa04370..000000000 --- a/core/startos/.sqlx/query-e5843c5b0e7819b29aa1abf2266799bd4f82e761837b526a0972c3d4439a264d.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "INSERT INTO session (id, user_agent, metadata) VALUES ($1, $2, $3)", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Text", - "Text" - ] - }, - "nullable": [] - }, - "hash": "e5843c5b0e7819b29aa1abf2266799bd4f82e761837b526a0972c3d4439a264d" -} diff --git a/core/startos/.sqlx/query-e95322a8e2ae3b93f1e974b24c0b81803f1e9ec9e8ebbf15cafddfc1c5a028ed.json b/core/startos/.sqlx/query-e95322a8e2ae3b93f1e974b24c0b81803f1e9ec9e8ebbf15cafddfc1c5a028ed.json deleted file mode 100644 index fd5a467ec..000000000 --- a/core/startos/.sqlx/query-e95322a8e2ae3b93f1e974b24c0b81803f1e9ec9e8ebbf15cafddfc1c5a028ed.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "\n SELECT\n network_keys.package,\n network_keys.interface,\n network_keys.key,\n tor.key AS \"tor_key?\"\n FROM\n network_keys\n LEFT JOIN\n tor\n ON\n network_keys.package = tor.package\n AND\n network_keys.interface = tor.interface\n WHERE\n network_keys.package = $1\n ", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "package", - "type_info": "Text" - }, - { - "ordinal": 1, - "name": "interface", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "key", - "type_info": "Bytea" - }, - { - "ordinal": 3, - "name": "tor_key?", - "type_info": "Bytea" - } - ], - "parameters": { - "Left": [ - "Text" - ] - }, - "nullable": [ - false, - false, - false, - false - ] - }, - "hash": "e95322a8e2ae3b93f1e974b24c0b81803f1e9ec9e8ebbf15cafddfc1c5a028ed" -} diff --git a/core/startos/.sqlx/query-eb750adaa305bdbf3c5b70aaf59139c7b7569602adb58f2d6b3a94da4f167b0a.json b/core/startos/.sqlx/query-eb750adaa305bdbf3c5b70aaf59139c7b7569602adb58f2d6b3a94da4f167b0a.json deleted file mode 100644 index fb8a7c1e5..000000000 --- a/core/startos/.sqlx/query-eb750adaa305bdbf3c5b70aaf59139c7b7569602adb58f2d6b3a94da4f167b0a.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "DELETE FROM notifications WHERE id < $1", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Int4" - ] - }, - "nullable": [] - }, - "hash": "eb750adaa305bdbf3c5b70aaf59139c7b7569602adb58f2d6b3a94da4f167b0a" -} diff --git a/core/startos/.sqlx/query-ecc765d8205c0876956f95f76944ac6a5f34dd820c4073b7728c7067aab9fded.json b/core/startos/.sqlx/query-ecc765d8205c0876956f95f76944ac6a5f34dd820c4073b7728c7067aab9fded.json deleted file mode 100644 index 27c9752b2..000000000 --- a/core/startos/.sqlx/query-ecc765d8205c0876956f95f76944ac6a5f34dd820c4073b7728c7067aab9fded.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "INSERT INTO cifs_shares (hostname, path, username, password) VALUES ($1, $2, $3, $4) RETURNING id", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - } - ], - "parameters": { - "Left": [ - "Text", - "Text", - "Text", - "Text" - ] - }, - "nullable": [ - false - ] - }, - "hash": "ecc765d8205c0876956f95f76944ac6a5f34dd820c4073b7728c7067aab9fded" -} diff --git a/core/startos/.sqlx/query-f6d1c5ef0f9d9577bea8382318967b9deb46da75788c7fe6082b43821c22d556.json b/core/startos/.sqlx/query-f6d1c5ef0f9d9577bea8382318967b9deb46da75788c7fe6082b43821c22d556.json deleted file mode 100644 index 6ed9898f6..000000000 --- a/core/startos/.sqlx/query-f6d1c5ef0f9d9577bea8382318967b9deb46da75788c7fe6082b43821c22d556.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "INSERT INTO ssh_keys (fingerprint, openssh_pubkey, created_at) VALUES ($1, $2, $3)", - "describe": { - "columns": [], - "parameters": { - "Left": [ - "Text", - "Text", - "Text" - ] - }, - "nullable": [] - }, - "hash": "f6d1c5ef0f9d9577bea8382318967b9deb46da75788c7fe6082b43821c22d556" -} diff --git a/core/startos/.sqlx/query-f7d2dae84613bcef330f7403352cc96547f3f6dbec11bf2eadfaf53ad8ab51b5.json b/core/startos/.sqlx/query-f7d2dae84613bcef330f7403352cc96547f3f6dbec11bf2eadfaf53ad8ab51b5.json deleted file mode 100644 index f48ccb074..000000000 --- a/core/startos/.sqlx/query-f7d2dae84613bcef330f7403352cc96547f3f6dbec11bf2eadfaf53ad8ab51b5.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT network_key FROM account WHERE id = 0", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "network_key", - "type_info": "Bytea" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false - ] - }, - "hash": "f7d2dae84613bcef330f7403352cc96547f3f6dbec11bf2eadfaf53ad8ab51b5" -} diff --git a/core/startos/.sqlx/query-fe6e4f09f3028e5b6b6259e86cbad285680ce157aae9d7837ac020c8b2945e7f.json b/core/startos/.sqlx/query-fe6e4f09f3028e5b6b6259e86cbad285680ce157aae9d7837ac020c8b2945e7f.json deleted file mode 100644 index 6ef1d5023..000000000 --- a/core/startos/.sqlx/query-fe6e4f09f3028e5b6b6259e86cbad285680ce157aae9d7837ac020c8b2945e7f.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "db_name": "PostgreSQL", - "query": "SELECT * FROM account WHERE id = 0", - "describe": { - "columns": [ - { - "ordinal": 0, - "name": "id", - "type_info": "Int4" - }, - { - "ordinal": 1, - "name": "password", - "type_info": "Text" - }, - { - "ordinal": 2, - "name": "tor_key", - "type_info": "Bytea" - }, - { - "ordinal": 3, - "name": "server_id", - "type_info": "Text" - }, - { - "ordinal": 4, - "name": "hostname", - "type_info": "Text" - }, - { - "ordinal": 5, - "name": "network_key", - "type_info": "Bytea" - }, - { - "ordinal": 6, - "name": "root_ca_key_pem", - "type_info": "Text" - }, - { - "ordinal": 7, - "name": "root_ca_cert_pem", - "type_info": "Text" - } - ], - "parameters": { - "Left": [] - }, - "nullable": [ - false, - false, - true, - true, - true, - false, - false, - false - ] - }, - "hash": "fe6e4f09f3028e5b6b6259e86cbad285680ce157aae9d7837ac020c8b2945e7f" -} diff --git a/core/startos/Cargo.toml b/core/startos/Cargo.toml index d9d4a4e36..1e1cd4737 100644 --- a/core/startos/Cargo.toml +++ b/core/startos/Cargo.toml @@ -14,7 +14,7 @@ keywords = [ name = "start-os" readme = "README.md" repository = "https://github.com/Start9Labs/start-os" -version = "0.3.5-rev.2" +version = "0.3.6-alpha.3" license = "MIT" [lib] @@ -22,7 +22,7 @@ name = "startos" path = "src/lib.rs" [[bin]] -name = "containerbox" +name = "startbox" path = "src/main.rs" [[bin]] @@ -30,13 +30,18 @@ name = "start-cli" path = "src/main.rs" [[bin]] -name = "startbox" +name = "containerbox" +path = "src/main.rs" + +[[bin]] +name = "registrybox" path = "src/main.rs" [features] cli = [] container-runtime = [] daemon = [] +registry = [] default = ["cli", "daemon"] dev = [] unstable = ["console-subscriber", "tokio/tracing"] @@ -54,20 +59,22 @@ async-stream = "0.3.5" async-trait = "0.1.74" axum = { version = "0.7.3", features = ["ws"] } axum-server = "0.6.0" -base32 = "0.4.0" -base64 = "0.21.4" +barrage = "0.2.3" +backhand = "0.18.0" +base32 = "0.5.0" +base64 = "0.22.1" base64ct = "1.6.0" basic-cookies = "0.1.4" -blake3 = "1.5.0" +blake3 = { version = "1.5.0", features = ["mmap", "rayon"] } bytes = "1" chrono = { version = "0.4.31", features = ["serde"] } clap = "4.4.12" color-eyre = "0.6.2" console = "0.15.7" -console-subscriber = { version = "0.2", optional = true } +console-subscriber = { version = "0.3.0", optional = true } cookie = "0.18.0" -cookie_store = "0.20.0" -current_platform = "0.2.0" +cookie_store = "0.21.0" +der = { version = "0.7.9", features = ["derive", "pem"] } digest = "0.10.7" divrem = "1.0.0" ed25519 = { version = "2.2.3", features = ["pkcs8", "pem", "alloc"] } @@ -79,30 +86,38 @@ ed25519-dalek = { version = "2.1.1", features = [ "pkcs8", ] } ed25519-dalek-v1 = { package = "ed25519-dalek", version = "1" } -emver = { version = "0.1.7", git = "https://github.com/Start9Labs/emver-rs.git", features = [ +exver = { version = "0.2.0", git = "https://github.com/Start9Labs/exver-rs.git", features = [ "serde", ] } fd-lock-rs = "0.1.4" +form_urlencoded = "1.2.1" futures = "0.3.28" gpt = "3.1.0" helpers = { path = "../helpers" } hex = "0.4.3" hmac = "0.12.1" http = "1.0.0" +http-body-util = "0.1" +hyper-util = { version = "0.1.5", features = [ + "tokio", + "service", + "http1", + "http2", +] } id-pool = { version = "0.2.2", default-features = false, features = [ "serde", "u16", ] } -imbl = "2.0.2" +imbl = "2.0.3" imbl-value = { git = "https://github.com/Start9Labs/imbl-value.git" } -include_dir = "0.7.3" +include_dir = { version = "0.7.3", features = ["metadata"] } indexmap = { version = "2.0.2", features = ["serde"] } indicatif = { version = "0.17.7", features = ["tokio"] } integer-encoding = { version = "4.0.0", features = ["tokio_async"] } ipnet = { version = "2.8.0", features = ["serde"] } iprange = { version = "0.6.7", features = ["serde"] } isocountry = "0.3.2" -itertools = "0.12.0" +itertools = "0.13.0" jaq-core = "0.10.1" jaq-std = "0.10.0" josekit = "0.8.4" @@ -115,10 +130,11 @@ log = "0.4.20" mbrman = "0.5.2" models = { version = "*", path = "../models" } new_mime_guess = "4" -nix = { version = "0.27.1", features = ["user", "process", "signal", "fs"] } +nix = { version = "0.29.0", features = ["user", "process", "signal", "fs"] } nom = "7.1.3" num = "0.4.1" num_enum = "0.7.0" +num_cpus = "1.16.0" once_cell = "1.19.0" openssh-keys = "0.6.2" openssl = { version = "0.10.57", features = ["vendored"] } @@ -131,13 +147,13 @@ pin-project = "1.1.3" pkcs8 = { version = "0.10.2", features = ["std"] } prettytable-rs = "0.10.0" proptest = "1.3.1" -proptest-derive = "0.4.0" +proptest-derive = "0.5.0" rand = { version = "0.8.5", features = ["std"] } regex = "1.10.2" -reqwest = { version = "0.11.23", features = ["stream", "json", "socks"] } -reqwest_cookie_store = "0.6.0" +reqwest = { version = "0.12.4", features = ["stream", "json", "socks"] } +reqwest_cookie_store = "0.8.0" rpassword = "7.2.0" -rpc-toolkit = { git = "https://github.com/Start9Labs/rpc-toolkit.git", branch = "refactor/traits" } +rpc-toolkit = { git = "https://github.com/Start9Labs/rpc-toolkit.git", branch = "refactor/no-dyn-ctx" } rust-argon2 = "2.0.0" rustyline-async = "0.4.1" semver = { version = "1.0.20", features = ["serde"] } @@ -145,11 +161,13 @@ serde = { version = "1.0", features = ["derive", "rc"] } serde_cbor = { package = "ciborium", version = "0.2.1" } serde_json = "1.0" serde_toml = { package = "toml", version = "0.8.2" } +serde_urlencoded = "0.7" serde_with = { version = "3.4.0", features = ["macros", "json"] } -serde_yaml = "0.9.25" +serde_yaml = { package = "serde_yml", version = "0.0.10" } sha2 = "0.10.2" shell-words = "1" simple-logging = "2.0.2" +socket2 = "0.5.7" sqlx = { version = "0.7.2", features = [ "chrono", "runtime-tokio-rustls", @@ -157,19 +175,20 @@ sqlx = { version = "0.7.2", features = [ ] } sscanf = "0.4.1" ssh-key = { version = "0.6.2", features = ["ed25519"] } -stderrlog = "0.5.4" tar = "0.4.40" thiserror = "1.0.49" -tokio = { version = "1", features = ["full"] } -tokio-rustls = "0.25.0" +textwrap = "0.16.1" +tokio = { version = "1.38.1", features = ["full"] } +tokio-rustls = "0.26.0" tokio-socks = "0.5.1" tokio-stream = { version = "0.1.14", features = ["io-util", "sync", "net"] } tokio-tar = { git = "https://github.com/dr-bonez/tokio-tar.git" } -tokio-tungstenite = { version = "0.21.0", features = ["native-tls"] } +tokio-tungstenite = { version = "0.23.1", features = ["native-tls", "url"] } tokio-util = { version = "0.7.9", features = ["io"] } torut = { git = "https://github.com/Start9Labs/torut.git", branch = "update/dependencies", features = [ "serialize", ] } +tower-service = "0.3.2" tracing = "0.1.39" tracing-error = "0.2.0" tracing-futures = "0.2.5" diff --git a/core/startos/bindings/ActionMetadata.ts b/core/startos/bindings/ActionMetadata.ts deleted file mode 100644 index c9373a5b8..000000000 --- a/core/startos/bindings/ActionMetadata.ts +++ /dev/null @@ -1,12 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { AllowedStatuses } from "./AllowedStatuses"; - -export type ActionMetadata = { - name: string; - description: string; - warning: string | null; - input: any; - disabled: boolean; - allowedStatuses: AllowedStatuses; - group: string | null; -}; diff --git a/core/startos/bindings/AddressInfo.ts b/core/startos/bindings/AddressInfo.ts deleted file mode 100644 index 1355f72a2..000000000 --- a/core/startos/bindings/AddressInfo.ts +++ /dev/null @@ -1,10 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { BindOptions } from "./BindOptions"; -import type { HostId } from "./HostId"; - -export type AddressInfo = { - username: string | null; - hostId: HostId; - bindOptions: BindOptions; - suffix: string; -}; diff --git a/core/startos/bindings/BindInfo.ts b/core/startos/bindings/BindInfo.ts deleted file mode 100644 index 8bda6d37e..000000000 --- a/core/startos/bindings/BindInfo.ts +++ /dev/null @@ -1,4 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { BindOptions } from "./BindOptions"; - -export type BindInfo = { options: BindOptions; assignedLanPort: number | null }; diff --git a/core/startos/bindings/BindOptions.ts b/core/startos/bindings/BindOptions.ts deleted file mode 100644 index 6b12139a7..000000000 --- a/core/startos/bindings/BindOptions.ts +++ /dev/null @@ -1,10 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { AddSslOptions } from "./AddSslOptions"; -import type { Security } from "./Security"; - -export type BindOptions = { - scheme: string | null; - preferredExternalPort: number; - addSsl: AddSslOptions | null; - secure: Security | null; -}; diff --git a/core/startos/bindings/BindParams.ts b/core/startos/bindings/BindParams.ts deleted file mode 100644 index 4aa78e522..000000000 --- a/core/startos/bindings/BindParams.ts +++ /dev/null @@ -1,15 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { AddSslOptions } from "./AddSslOptions"; -import type { HostId } from "./HostId"; -import type { HostKind } from "./HostKind"; -import type { Security } from "./Security"; - -export type BindParams = { - kind: HostKind; - id: HostId; - internalPort: number; - scheme: string | null; - preferredExternalPort: number; - addSsl: AddSslOptions | null; - secure: Security | null; -}; diff --git a/core/startos/bindings/ChrootParams.ts b/core/startos/bindings/ChrootParams.ts deleted file mode 100644 index 9ee6e8959..000000000 --- a/core/startos/bindings/ChrootParams.ts +++ /dev/null @@ -1,10 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type ChrootParams = { - env: string | null; - workdir: string | null; - user: string | null; - path: string; - command: string; - args: string[]; -}; diff --git a/core/startos/bindings/DepInfo.ts b/core/startos/bindings/DepInfo.ts deleted file mode 100644 index 3c01e0939..000000000 --- a/core/startos/bindings/DepInfo.ts +++ /dev/null @@ -1,3 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type DepInfo = { description: string | null; optional: boolean }; diff --git a/core/startos/bindings/Dependencies.ts b/core/startos/bindings/Dependencies.ts deleted file mode 100644 index 974495b7b..000000000 --- a/core/startos/bindings/Dependencies.ts +++ /dev/null @@ -1,5 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { DepInfo } from "./DepInfo"; -import type { PackageId } from "./PackageId"; - -export type Dependencies = { [key: PackageId]: DepInfo }; diff --git a/core/startos/bindings/DependencyConfigErrors.ts b/core/startos/bindings/DependencyConfigErrors.ts deleted file mode 100644 index 5f2246fa9..000000000 --- a/core/startos/bindings/DependencyConfigErrors.ts +++ /dev/null @@ -1,4 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { PackageId } from "./PackageId"; - -export type DependencyConfigErrors = { [key: PackageId]: string }; diff --git a/core/startos/bindings/DependencyRequirement.ts b/core/startos/bindings/DependencyRequirement.ts deleted file mode 100644 index e6224ce48..000000000 --- a/core/startos/bindings/DependencyRequirement.ts +++ /dev/null @@ -1,11 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type DependencyRequirement = - | { - kind: "running"; - id: string; - healthChecks: string[]; - versionSpec: string; - registryUrl: string; - } - | { kind: "exists"; id: string; versionSpec: string; registryUrl: string }; diff --git a/core/startos/bindings/Description.ts b/core/startos/bindings/Description.ts deleted file mode 100644 index 918bd09c5..000000000 --- a/core/startos/bindings/Description.ts +++ /dev/null @@ -1,3 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type Description = { short: string; long: string }; diff --git a/core/startos/bindings/ExecuteAction.ts b/core/startos/bindings/ExecuteAction.ts deleted file mode 100644 index aaa340747..000000000 --- a/core/startos/bindings/ExecuteAction.ts +++ /dev/null @@ -1,7 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type ExecuteAction = { - serviceId: string | null; - actionId: string; - input: any; -}; diff --git a/core/startos/bindings/ExportActionParams.ts b/core/startos/bindings/ExportActionParams.ts deleted file mode 100644 index 4961c4a11..000000000 --- a/core/startos/bindings/ExportActionParams.ts +++ /dev/null @@ -1,4 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { ActionMetadata } from "./ActionMetadata"; - -export type ExportActionParams = { id: string; metadata: ActionMetadata }; diff --git a/core/startos/bindings/ExportServiceInterfaceParams.ts b/core/startos/bindings/ExportServiceInterfaceParams.ts deleted file mode 100644 index 93deb0ce5..000000000 --- a/core/startos/bindings/ExportServiceInterfaceParams.ts +++ /dev/null @@ -1,14 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { AddressInfo } from "./AddressInfo"; -import type { ServiceInterfaceType } from "./ServiceInterfaceType"; - -export type ExportServiceInterfaceParams = { - id: string; - name: string; - description: string; - hasPrimary: boolean; - disabled: boolean; - masked: boolean; - addressInfo: AddressInfo; - type: ServiceInterfaceType; -}; diff --git a/core/startos/bindings/ExportedHostInfo.ts b/core/startos/bindings/ExportedHostInfo.ts deleted file mode 100644 index d8339a074..000000000 --- a/core/startos/bindings/ExportedHostInfo.ts +++ /dev/null @@ -1,10 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { ExportedHostnameInfo } from "./ExportedHostnameInfo"; -import type { HostId } from "./HostId"; -import type { HostKind } from "./HostKind"; - -export type ExportedHostInfo = { - id: HostId; - kind: HostKind; - hostnames: Array; -}; diff --git a/core/startos/bindings/ExportedHostnameInfo.ts b/core/startos/bindings/ExportedHostnameInfo.ts deleted file mode 100644 index 23cbdd487..000000000 --- a/core/startos/bindings/ExportedHostnameInfo.ts +++ /dev/null @@ -1,12 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { ExportedIpHostname } from "./ExportedIpHostname"; -import type { ExportedOnionHostname } from "./ExportedOnionHostname"; - -export type ExportedHostnameInfo = - | { - kind: "ip"; - networkInterfaceId: string; - public: boolean; - hostname: ExportedIpHostname; - } - | { kind: "onion"; hostname: ExportedOnionHostname }; diff --git a/core/startos/bindings/ExportedOnionHostname.ts b/core/startos/bindings/ExportedOnionHostname.ts deleted file mode 100644 index af072289f..000000000 --- a/core/startos/bindings/ExportedOnionHostname.ts +++ /dev/null @@ -1,7 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type ExportedOnionHostname = { - value: string; - port: number | null; - sslPort: number | null; -}; diff --git a/core/startos/bindings/ExposeForDependentsParams.ts b/core/startos/bindings/ExposeForDependentsParams.ts deleted file mode 100644 index 714771c1e..000000000 --- a/core/startos/bindings/ExposeForDependentsParams.ts +++ /dev/null @@ -1,3 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type ExposeForDependentsParams = { paths: string[] }; diff --git a/core/startos/bindings/GetHostInfoParams.ts b/core/startos/bindings/GetHostInfoParams.ts deleted file mode 100644 index 7c2b1b6b6..000000000 --- a/core/startos/bindings/GetHostInfoParams.ts +++ /dev/null @@ -1,10 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { Callback } from "./Callback"; -import type { GetHostInfoParamsKind } from "./GetHostInfoParamsKind"; - -export type GetHostInfoParams = { - kind: GetHostInfoParamsKind | null; - serviceInterfaceId: string; - packageId: string | null; - callback: Callback; -}; diff --git a/core/startos/bindings/GetPrimaryUrlParams.ts b/core/startos/bindings/GetPrimaryUrlParams.ts deleted file mode 100644 index c5aaa0ebe..000000000 --- a/core/startos/bindings/GetPrimaryUrlParams.ts +++ /dev/null @@ -1,8 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { Callback } from "./Callback"; - -export type GetPrimaryUrlParams = { - packageId: string | null; - serviceInterfaceId: string; - callback: Callback; -}; diff --git a/core/startos/bindings/GetServiceInterfaceParams.ts b/core/startos/bindings/GetServiceInterfaceParams.ts deleted file mode 100644 index 03990c10b..000000000 --- a/core/startos/bindings/GetServiceInterfaceParams.ts +++ /dev/null @@ -1,8 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { Callback } from "./Callback"; - -export type GetServiceInterfaceParams = { - packageId: string | null; - serviceInterfaceId: string; - callback: Callback; -}; diff --git a/core/startos/bindings/GetSslCertificateParams.ts b/core/startos/bindings/GetSslCertificateParams.ts deleted file mode 100644 index a1fd17bdd..000000000 --- a/core/startos/bindings/GetSslCertificateParams.ts +++ /dev/null @@ -1,8 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { Algorithm } from "./Algorithm"; - -export type GetSslCertificateParams = { - packageId: string | null; - hostId: string; - algorithm: Algorithm | null; -}; diff --git a/core/startos/bindings/GetSslKeyParams.ts b/core/startos/bindings/GetSslKeyParams.ts deleted file mode 100644 index 8c5170c9c..000000000 --- a/core/startos/bindings/GetSslKeyParams.ts +++ /dev/null @@ -1,8 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { Algorithm } from "./Algorithm"; - -export type GetSslKeyParams = { - packageId: string | null; - hostId: string; - algorithm: Algorithm | null; -}; diff --git a/core/startos/bindings/GetStoreParams.ts b/core/startos/bindings/GetStoreParams.ts deleted file mode 100644 index bfd97377b..000000000 --- a/core/startos/bindings/GetStoreParams.ts +++ /dev/null @@ -1,3 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type GetStoreParams = { packageId: string | null; path: string }; diff --git a/core/startos/bindings/GetSystemSmtpParams.ts b/core/startos/bindings/GetSystemSmtpParams.ts deleted file mode 100644 index b96b9f595..000000000 --- a/core/startos/bindings/GetSystemSmtpParams.ts +++ /dev/null @@ -1,4 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { Callback } from "./Callback"; - -export type GetSystemSmtpParams = { callback: Callback }; diff --git a/core/startos/bindings/Host.ts b/core/startos/bindings/Host.ts deleted file mode 100644 index 4188cb404..000000000 --- a/core/startos/bindings/Host.ts +++ /dev/null @@ -1,11 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { BindInfo } from "./BindInfo"; -import type { HostAddress } from "./HostAddress"; -import type { HostKind } from "./HostKind"; - -export type Host = { - kind: HostKind; - bindings: { [key: number]: BindInfo }; - addresses: Array; - primary: HostAddress | null; -}; diff --git a/core/startos/bindings/HostInfo.ts b/core/startos/bindings/HostInfo.ts deleted file mode 100644 index b69dbf6b4..000000000 --- a/core/startos/bindings/HostInfo.ts +++ /dev/null @@ -1,5 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { Host } from "./Host"; -import type { HostId } from "./HostId"; - -export type HostInfo = { [key: HostId]: Host }; diff --git a/core/startos/bindings/InstalledState.ts b/core/startos/bindings/InstalledState.ts deleted file mode 100644 index 053c3ae66..000000000 --- a/core/startos/bindings/InstalledState.ts +++ /dev/null @@ -1,4 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { Manifest } from "./Manifest"; - -export type InstalledState = { manifest: Manifest }; diff --git a/core/startos/bindings/MainStatus.ts b/core/startos/bindings/MainStatus.ts deleted file mode 100644 index 878213087..000000000 --- a/core/startos/bindings/MainStatus.ts +++ /dev/null @@ -1,20 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { Duration } from "./Duration"; -import type { HealthCheckId } from "./HealthCheckId"; -import type { HealthCheckResult } from "./HealthCheckResult"; - -export type MainStatus = - | { status: "stopped" } - | { status: "restarting" } - | { status: "stopping"; timeout: Duration } - | { status: "starting" } - | { - status: "running"; - started: string; - health: { [key: HealthCheckId]: HealthCheckResult }; - } - | { - status: "backingUp"; - started: string | null; - health: { [key: HealthCheckId]: HealthCheckResult }; - }; diff --git a/core/startos/bindings/Manifest.ts b/core/startos/bindings/Manifest.ts deleted file mode 100644 index 688486a7d..000000000 --- a/core/startos/bindings/Manifest.ts +++ /dev/null @@ -1,32 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { Alerts } from "./Alerts"; -import type { Dependencies } from "./Dependencies"; -import type { Description } from "./Description"; -import type { HardwareRequirements } from "./HardwareRequirements"; -import type { ImageId } from "./ImageId"; -import type { PackageId } from "./PackageId"; -import type { VolumeId } from "./VolumeId"; - -export type Manifest = { - id: PackageId; - title: string; - version: string; - releaseNotes: string; - license: string; - replaces: Array; - wrapperRepo: string; - upstreamRepo: string; - supportSite: string; - marketingSite: string; - donationUrl: string | null; - description: Description; - images: Array; - assets: Array; - volumes: Array; - alerts: Alerts; - dependencies: Dependencies; - hardwareRequirements: HardwareRequirements; - gitHash: string | null; - osVersion: string; - hasConfig: boolean; -}; diff --git a/core/startos/bindings/MaybeUtf8String.ts b/core/startos/bindings/MaybeUtf8String.ts deleted file mode 100644 index a77f8ce4e..000000000 --- a/core/startos/bindings/MaybeUtf8String.ts +++ /dev/null @@ -1,3 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type MaybeUtf8String = string | number[]; diff --git a/core/startos/bindings/MountTarget.ts b/core/startos/bindings/MountTarget.ts deleted file mode 100644 index 3009861fb..000000000 --- a/core/startos/bindings/MountTarget.ts +++ /dev/null @@ -1,8 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type MountTarget = { - packageId: string; - volumeId: string; - subpath: string | null; - readonly: boolean; -}; diff --git a/core/startos/bindings/PackageDataEntry.ts b/core/startos/bindings/PackageDataEntry.ts deleted file mode 100644 index 5729e6bc3..000000000 --- a/core/startos/bindings/PackageDataEntry.ts +++ /dev/null @@ -1,27 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { ActionId } from "./ActionId"; -import type { ActionMetadata } from "./ActionMetadata"; -import type { CurrentDependencies } from "./CurrentDependencies"; -import type { DataUrl } from "./DataUrl"; -import type { HostInfo } from "./HostInfo"; -import type { PackageState } from "./PackageState"; -import type { ServiceInterfaceId } from "./ServiceInterfaceId"; -import type { ServiceInterfaceWithHostInfo } from "./ServiceInterfaceWithHostInfo"; -import type { Status } from "./Status"; - -export type PackageDataEntry = { - stateInfo: PackageState; - status: Status; - marketplaceUrl: string | null; - developerKey: string; - icon: DataUrl; - lastBackup: string | null; - nextBackup: string | null; - currentDependencies: CurrentDependencies; - actions: { [key: ActionId]: ActionMetadata }; - serviceInterfaces: { - [key: ServiceInterfaceId]: ServiceInterfaceWithHostInfo; - }; - hosts: HostInfo; - storeExposedDependents: string[]; -}; diff --git a/core/startos/bindings/ParamsMaybePackageId.ts b/core/startos/bindings/ParamsMaybePackageId.ts deleted file mode 100644 index e9f0f170c..000000000 --- a/core/startos/bindings/ParamsMaybePackageId.ts +++ /dev/null @@ -1,3 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type ParamsMaybePackageId = { packageId: string | null }; diff --git a/core/startos/bindings/ParamsPackageId.ts b/core/startos/bindings/ParamsPackageId.ts deleted file mode 100644 index 7bc919843..000000000 --- a/core/startos/bindings/ParamsPackageId.ts +++ /dev/null @@ -1,3 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type ParamsPackageId = { packageId: string }; diff --git a/core/startos/bindings/PasswordType.ts b/core/startos/bindings/PasswordType.ts deleted file mode 100644 index 0f36f60a2..000000000 --- a/core/startos/bindings/PasswordType.ts +++ /dev/null @@ -1,4 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { EncryptedWire } from "./EncryptedWire"; - -export type PasswordType = EncryptedWire | string; diff --git a/core/startos/bindings/Public.ts b/core/startos/bindings/Public.ts deleted file mode 100644 index 442176303..000000000 --- a/core/startos/bindings/Public.ts +++ /dev/null @@ -1,9 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { AllPackageData } from "./AllPackageData"; -import type { ServerInfo } from "./ServerInfo"; - -export type Public = { - serverInfo: ServerInfo; - packageData: AllPackageData; - ui: any; -}; diff --git a/core/startos/bindings/RemoveActionParams.ts b/core/startos/bindings/RemoveActionParams.ts deleted file mode 100644 index fcd567c3f..000000000 --- a/core/startos/bindings/RemoveActionParams.ts +++ /dev/null @@ -1,3 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type RemoveActionParams = { id: string }; diff --git a/core/startos/bindings/RemoveAddressParams.ts b/core/startos/bindings/RemoveAddressParams.ts deleted file mode 100644 index 578631d39..000000000 --- a/core/startos/bindings/RemoveAddressParams.ts +++ /dev/null @@ -1,3 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type RemoveAddressParams = { id: string }; diff --git a/core/startos/bindings/ReverseProxyBind.ts b/core/startos/bindings/ReverseProxyBind.ts deleted file mode 100644 index c9d67b127..000000000 --- a/core/startos/bindings/ReverseProxyBind.ts +++ /dev/null @@ -1,7 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type ReverseProxyBind = { - ip: string | null; - port: number; - ssl: boolean; -}; diff --git a/core/startos/bindings/ReverseProxyDestination.ts b/core/startos/bindings/ReverseProxyDestination.ts deleted file mode 100644 index 216d1310f..000000000 --- a/core/startos/bindings/ReverseProxyDestination.ts +++ /dev/null @@ -1,7 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type ReverseProxyDestination = { - ip: string | null; - port: number; - ssl: boolean; -}; diff --git a/core/startos/bindings/ReverseProxyHttp.ts b/core/startos/bindings/ReverseProxyHttp.ts deleted file mode 100644 index 07cf41862..000000000 --- a/core/startos/bindings/ReverseProxyHttp.ts +++ /dev/null @@ -1,3 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type ReverseProxyHttp = { headers: null | { [key: string]: string } }; diff --git a/core/startos/bindings/ReverseProxyParams.ts b/core/startos/bindings/ReverseProxyParams.ts deleted file mode 100644 index 6f684b780..000000000 --- a/core/startos/bindings/ReverseProxyParams.ts +++ /dev/null @@ -1,10 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { ReverseProxyBind } from "./ReverseProxyBind"; -import type { ReverseProxyDestination } from "./ReverseProxyDestination"; -import type { ReverseProxyHttp } from "./ReverseProxyHttp"; - -export type ReverseProxyParams = { - bind: ReverseProxyBind; - dst: ReverseProxyDestination; - http: ReverseProxyHttp; -}; diff --git a/core/startos/bindings/ServerInfo.ts b/core/startos/bindings/ServerInfo.ts deleted file mode 100644 index e505c84e7..000000000 --- a/core/startos/bindings/ServerInfo.ts +++ /dev/null @@ -1,35 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { Governor } from "./Governor"; -import type { IpInfo } from "./IpInfo"; -import type { ServerStatus } from "./ServerStatus"; -import type { WifiInfo } from "./WifiInfo"; - -export type ServerInfo = { - arch: string; - platform: string; - id: string; - hostname: string; - version: string; - lastBackup: string | null; - /** - * Used in the wifi to determine the region to set the system to - */ - lastWifiRegion: string | null; - eosVersionCompat: string; - lanAddress: string; - onionAddress: string; - /** - * for backwards compatibility - */ - torAddress: string; - ipInfo: { [key: string]: IpInfo }; - statusInfo: ServerStatus; - wifi: WifiInfo; - unreadNotificationCount: number; - passwordHash: string; - pubkey: string; - caFingerprint: string; - ntpSynced: boolean; - zram: boolean; - governor: Governor | null; -}; diff --git a/core/startos/bindings/ServerStatus.ts b/core/startos/bindings/ServerStatus.ts deleted file mode 100644 index e72d5d6de..000000000 --- a/core/startos/bindings/ServerStatus.ts +++ /dev/null @@ -1,12 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { BackupProgress } from "./BackupProgress"; -import type { PackageId } from "./PackageId"; -import type { UpdateProgress } from "./UpdateProgress"; - -export type ServerStatus = { - backupProgress: { [key: PackageId]: BackupProgress } | null; - updated: boolean; - updateProgress: UpdateProgress | null; - shuttingDown: boolean; - restarting: boolean; -}; diff --git a/core/startos/bindings/ServiceInterface.ts b/core/startos/bindings/ServiceInterface.ts deleted file mode 100644 index 4167257bb..000000000 --- a/core/startos/bindings/ServiceInterface.ts +++ /dev/null @@ -1,15 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { AddressInfo } from "./AddressInfo"; -import type { ServiceInterfaceId } from "./ServiceInterfaceId"; -import type { ServiceInterfaceType } from "./ServiceInterfaceType"; - -export type ServiceInterface = { - id: ServiceInterfaceId; - name: string; - description: string; - hasPrimary: boolean; - disabled: boolean; - masked: boolean; - addressInfo: AddressInfo; - type: ServiceInterfaceType; -}; diff --git a/core/startos/bindings/ServiceInterfaceType.ts b/core/startos/bindings/ServiceInterfaceType.ts deleted file mode 100644 index fadd11f9d..000000000 --- a/core/startos/bindings/ServiceInterfaceType.ts +++ /dev/null @@ -1,3 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type ServiceInterfaceType = "ui" | "p2p" | "api"; diff --git a/core/startos/bindings/ServiceInterfaceWithHostInfo.ts b/core/startos/bindings/ServiceInterfaceWithHostInfo.ts deleted file mode 100644 index bef83abe2..000000000 --- a/core/startos/bindings/ServiceInterfaceWithHostInfo.ts +++ /dev/null @@ -1,17 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { AddressInfo } from "./AddressInfo"; -import type { ExportedHostInfo } from "./ExportedHostInfo"; -import type { ServiceInterfaceId } from "./ServiceInterfaceId"; -import type { ServiceInterfaceType } from "./ServiceInterfaceType"; - -export type ServiceInterfaceWithHostInfo = { - hostInfo: ExportedHostInfo; - id: ServiceInterfaceId; - name: string; - description: string; - hasPrimary: boolean; - disabled: boolean; - masked: boolean; - addressInfo: AddressInfo; - type: ServiceInterfaceType; -}; diff --git a/core/startos/bindings/SetConfigured.ts b/core/startos/bindings/SetConfigured.ts deleted file mode 100644 index ff9eaf11d..000000000 --- a/core/startos/bindings/SetConfigured.ts +++ /dev/null @@ -1,3 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type SetConfigured = { configured: boolean }; diff --git a/core/startos/bindings/SetMainStatus.ts b/core/startos/bindings/SetMainStatus.ts deleted file mode 100644 index 653342e5f..000000000 --- a/core/startos/bindings/SetMainStatus.ts +++ /dev/null @@ -1,4 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { Status } from "./Status"; - -export type SetMainStatus = { status: Status }; diff --git a/core/startos/bindings/SetStoreParams.ts b/core/startos/bindings/SetStoreParams.ts deleted file mode 100644 index 8737295bd..000000000 --- a/core/startos/bindings/SetStoreParams.ts +++ /dev/null @@ -1,3 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type SetStoreParams = { value: any; path: string }; diff --git a/core/startos/bindings/Status.ts b/core/startos/bindings/Status.ts deleted file mode 100644 index e23c5b4be..000000000 --- a/core/startos/bindings/Status.ts +++ /dev/null @@ -1,9 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { DependencyConfigErrors } from "./DependencyConfigErrors"; -import type { MainStatus } from "./MainStatus"; - -export type Status = { - configured: boolean; - main: MainStatus; - dependencyConfigErrors: DependencyConfigErrors; -}; diff --git a/core/startos/bindings/UpdateProgress.ts b/core/startos/bindings/UpdateProgress.ts deleted file mode 100644 index 3d07c56b4..000000000 --- a/core/startos/bindings/UpdateProgress.ts +++ /dev/null @@ -1,3 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type UpdateProgress = { size: number | null; downloaded: number }; diff --git a/core/startos/bindings/UpdatingState.ts b/core/startos/bindings/UpdatingState.ts deleted file mode 100644 index 37a83f0df..000000000 --- a/core/startos/bindings/UpdatingState.ts +++ /dev/null @@ -1,8 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { InstallingInfo } from "./InstallingInfo"; -import type { Manifest } from "./Manifest"; - -export type UpdatingState = { - manifest: Manifest; - installingInfo: InstallingInfo; -}; diff --git a/core/startos/bindings/index.ts b/core/startos/bindings/index.ts deleted file mode 100644 index 4ce0ab643..000000000 --- a/core/startos/bindings/index.ts +++ /dev/null @@ -1,102 +0,0 @@ -export { ActionId } from "./ActionId"; -export { ActionMetadata } from "./ActionMetadata"; -export { AddressInfo } from "./AddressInfo"; -export { AddSslOptions } from "./AddSslOptions"; -export { Alerts } from "./Alerts"; -export { Algorithm } from "./Algorithm"; -export { AllowedStatuses } from "./AllowedStatuses"; -export { AllPackageData } from "./AllPackageData"; -export { AlpnInfo } from "./AlpnInfo"; -export { BackupProgress } from "./BackupProgress"; -export { BindInfo } from "./BindInfo"; -export { BindOptions } from "./BindOptions"; -export { BindParams } from "./BindParams"; -export { Callback } from "./Callback"; -export { ChrootParams } from "./ChrootParams"; -export { CreateOverlayedImageParams } from "./CreateOverlayedImageParams"; -export { CurrentDependencies } from "./CurrentDependencies"; -export { CurrentDependencyInfo } from "./CurrentDependencyInfo"; -export { DataUrl } from "./DataUrl"; -export { Dependencies } from "./Dependencies"; -export { DependencyConfigErrors } from "./DependencyConfigErrors"; -export { DependencyKind } from "./DependencyKind"; -export { DependencyRequirement } from "./DependencyRequirement"; -export { DepInfo } from "./DepInfo"; -export { Description } from "./Description"; -export { DestroyOverlayedImageParams } from "./DestroyOverlayedImageParams"; -export { Duration } from "./Duration"; -export { EncryptedWire } from "./EncryptedWire"; -export { ExecuteAction } from "./ExecuteAction"; -export { ExportActionParams } from "./ExportActionParams"; -export { ExportedHostInfo } from "./ExportedHostInfo"; -export { ExportedHostnameInfo } from "./ExportedHostnameInfo"; -export { ExportedIpHostname } from "./ExportedIpHostname"; -export { ExportedOnionHostname } from "./ExportedOnionHostname"; -export { ExportServiceInterfaceParams } from "./ExportServiceInterfaceParams"; -export { ExposeForDependentsParams } from "./ExposeForDependentsParams"; -export { FullProgress } from "./FullProgress"; -export { GetHostInfoParamsKind } from "./GetHostInfoParamsKind"; -export { GetHostInfoParams } from "./GetHostInfoParams"; -export { GetPrimaryUrlParams } from "./GetPrimaryUrlParams"; -export { GetServiceInterfaceParams } from "./GetServiceInterfaceParams"; -export { GetServicePortForwardParams } from "./GetServicePortForwardParams"; -export { GetSslCertificateParams } from "./GetSslCertificateParams"; -export { GetSslKeyParams } from "./GetSslKeyParams"; -export { GetStoreParams } from "./GetStoreParams"; -export { GetSystemSmtpParams } from "./GetSystemSmtpParams"; -export { Governor } from "./Governor"; -export { HardwareRequirements } from "./HardwareRequirements"; -export { HealthCheckId } from "./HealthCheckId"; -export { HealthCheckResult } from "./HealthCheckResult"; -export { HostAddress } from "./HostAddress"; -export { HostId } from "./HostId"; -export { HostInfo } from "./HostInfo"; -export { HostKind } from "./HostKind"; -export { Host } from "./Host"; -export { ImageId } from "./ImageId"; -export { InstalledState } from "./InstalledState"; -export { InstallingInfo } from "./InstallingInfo"; -export { InstallingState } from "./InstallingState"; -export { IpInfo } from "./IpInfo"; -export { ListServiceInterfacesParams } from "./ListServiceInterfacesParams"; -export { MainStatus } from "./MainStatus"; -export { Manifest } from "./Manifest"; -export { MaybeUtf8String } from "./MaybeUtf8String"; -export { MountParams } from "./MountParams"; -export { MountTarget } from "./MountTarget"; -export { NamedProgress } from "./NamedProgress"; -export { PackageDataEntry } from "./PackageDataEntry"; -export { PackageId } from "./PackageId"; -export { PackageState } from "./PackageState"; -export { ParamsMaybePackageId } from "./ParamsMaybePackageId"; -export { ParamsPackageId } from "./ParamsPackageId"; -export { PasswordType } from "./PasswordType"; -export { Progress } from "./Progress"; -export { Public } from "./Public"; -export { RemoveActionParams } from "./RemoveActionParams"; -export { RemoveAddressParams } from "./RemoveAddressParams"; -export { ReverseProxyBind } from "./ReverseProxyBind"; -export { ReverseProxyDestination } from "./ReverseProxyDestination"; -export { ReverseProxyHttp } from "./ReverseProxyHttp"; -export { ReverseProxyParams } from "./ReverseProxyParams"; -export { Security } from "./Security"; -export { ServerInfo } from "./ServerInfo"; -export { ServerSpecs } from "./ServerSpecs"; -export { ServerStatus } from "./ServerStatus"; -export { ServiceInterfaceId } from "./ServiceInterfaceId"; -export { ServiceInterface } from "./ServiceInterface"; -export { ServiceInterfaceType } from "./ServiceInterfaceType"; -export { ServiceInterfaceWithHostInfo } from "./ServiceInterfaceWithHostInfo"; -export { SessionList } from "./SessionList"; -export { Sessions } from "./Sessions"; -export { Session } from "./Session"; -export { SetConfigured } from "./SetConfigured"; -export { SetDependenciesParams } from "./SetDependenciesParams"; -export { SetHealth } from "./SetHealth"; -export { SetMainStatus } from "./SetMainStatus"; -export { SetStoreParams } from "./SetStoreParams"; -export { Status } from "./Status"; -export { UpdateProgress } from "./UpdateProgress"; -export { UpdatingState } from "./UpdatingState"; -export { VolumeId } from "./VolumeId"; -export { WifiInfo } from "./WifiInfo"; diff --git a/core/startos/proptest-regressions/s9pk/merkle_archive/test.txt b/core/startos/proptest-regressions/s9pk/merkle_archive/test.txt new file mode 100644 index 000000000..116de6aba --- /dev/null +++ b/core/startos/proptest-regressions/s9pk/merkle_archive/test.txt @@ -0,0 +1,7 @@ +# Seeds for failure cases proptest has generated in the past. It is +# automatically read and these particular cases re-run before any +# novel cases are generated. +# +# It is recommended to check this file in to source control so that +# everyone who runs the test benefits from these saved cases. +cc dbb4790c31f9e400ed29a9ba2dbd61e3c55ce8a3fbae16601ca3512e803020ed # shrinks to files = [] diff --git a/core/startos/src/account.rs b/core/startos/src/account.rs index e074d301d..9e755342f 100644 --- a/core/startos/src/account.rs +++ b/core/startos/src/account.rs @@ -28,6 +28,7 @@ pub struct AccountInfo { pub root_ca_key: PKey, pub root_ca_cert: X509, pub ssh_key: ssh_key::PrivateKey, + pub compat_s9pk_key: ed25519_dalek::SigningKey, } impl AccountInfo { pub fn new(password: &str, start_time: SystemTime) -> Result { @@ -39,6 +40,7 @@ impl AccountInfo { let ssh_key = ssh_key::PrivateKey::from(ssh_key::private::Ed25519Keypair::random( &mut rand::thread_rng(), )); + let compat_s9pk_key = ed25519_dalek::SigningKey::generate(&mut rand::thread_rng()); Ok(Self { server_id, hostname, @@ -47,6 +49,7 @@ impl AccountInfo { root_ca_key, root_ca_cert, ssh_key, + compat_s9pk_key, }) } @@ -61,6 +64,7 @@ impl AccountInfo { let root_ca_key = cert_store.as_root_key().de()?.0; let root_ca_cert = cert_store.as_root_cert().de()?.0; let ssh_key = db.as_private().as_ssh_privkey().de()?.0; + let compat_s9pk_key = db.as_private().as_compat_s9pk_key().de()?.0; Ok(Self { server_id, @@ -70,6 +74,7 @@ impl AccountInfo { root_ca_key, root_ca_cert, ssh_key, + compat_s9pk_key, }) } @@ -92,6 +97,9 @@ impl AccountInfo { db.as_private_mut() .as_ssh_privkey_mut() .ser(Pem::new_ref(&self.ssh_key))?; + db.as_private_mut() + .as_compat_s9pk_key_mut() + .ser(Pem::new_ref(&self.compat_s9pk_key))?; let key_store = db.as_private_mut().as_key_store_mut(); key_store.as_onion_mut().insert_key(&self.tor_key)?; let cert_store = key_store.as_local_certs_mut(); diff --git a/core/startos/src/action.rs b/core/startos/src/action.rs index 396e7ed50..7c4492adc 100644 --- a/core/startos/src/action.rs +++ b/core/startos/src/action.rs @@ -1,7 +1,6 @@ use clap::Parser; pub use models::ActionId; use models::PackageId; -use rpc_toolkit::command; use serde::{Deserialize, Serialize}; use tracing::instrument; use ts_rs::TS; @@ -9,6 +8,7 @@ use ts_rs::TS; use crate::config::Config; use crate::context::RpcContext; use crate::prelude::*; +use crate::rpc_continuations::Guid; use crate::util::serde::{display_serializable, StdinDeserializable, WithIoFormat}; #[derive(Debug, Serialize, Deserialize)] @@ -58,6 +58,7 @@ pub struct ActionParams { pub action_id: ActionId, #[command(flatten)] #[ts(type = "{ [key: string]: any } | null")] + #[serde(default)] pub input: StdinDeserializable>, } // impl C @@ -78,6 +79,7 @@ pub async fn action( .as_ref() .or_not_found(lazy_format!("Manager for {}", package_id))? .action( + Guid::new(), action_id, input.map(|c| to_value(&c)).transpose()?.unwrap_or_default(), ) diff --git a/core/startos/src/auth.rs b/core/startos/src/auth.rs index 915ac10bd..d998e9897 100644 --- a/core/startos/src/auth.rs +++ b/core/startos/src/auth.rs @@ -4,9 +4,10 @@ use chrono::{DateTime, Utc}; use clap::Parser; use color_eyre::eyre::eyre; use imbl_value::{json, InternedString}; +use itertools::Itertools; use josekit::jwk::Jwk; use rpc_toolkit::yajrc::RpcError; -use rpc_toolkit::{command, from_fn_async, AnyContext, CallRemote, HandlerExt, ParentHandler}; +use rpc_toolkit::{from_fn_async, Context, HandlerArgs, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; use tracing::instrument; use ts_rs::TS; @@ -82,7 +83,7 @@ impl std::str::FromStr for PasswordType { }) } } -pub fn auth() -> ParentHandler { +pub fn auth() -> ParentHandler { ParentHandler::new() .subcommand( "login", @@ -94,11 +95,11 @@ pub fn auth() -> ParentHandler { .subcommand( "logout", from_fn_async(logout) - .with_metadata("get-session", Value::Bool(true)) - .with_remote_cli::() - .no_display(), + .with_metadata("get_session", Value::Bool(true)) + .no_display() + .with_call_remote::(), ) - .subcommand("session", session()) + .subcommand("session", session::()) .subcommand( "reset-password", from_fn_async(reset_password_impl).no_cli(), @@ -112,7 +113,7 @@ pub fn auth() -> ParentHandler { from_fn_async(get_pubkey) .with_metadata("authenticated", Value::Bool(false)) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) } @@ -128,26 +129,20 @@ fn gen_pwd() { .unwrap() ) } -#[derive(Deserialize, Serialize, Parser)] -#[serde(rename_all = "camelCase")] -#[command(rename_all = "kebab-case")] -pub struct CliLoginParams { - password: Option, -} #[instrument(skip_all)] async fn cli_login( - ctx: CliContext, - CliLoginParams { password }: CliLoginParams, + HandlerArgs { + context: ctx, + parent_method, + method, + .. + }: HandlerArgs, ) -> Result<(), RpcError> { - let password = if let Some(password) = password { - password.decrypt(&ctx)? - } else { - rpassword::prompt_password("Password: ")? - }; + let password = rpassword::prompt_password("Password: ")?; - ctx.call_remote( - "auth.login", + ctx.call_remote::( + &parent_method.into_iter().chain(method).join("."), json!({ "password": password, "metadata": { @@ -183,11 +178,15 @@ pub fn check_password_against_db(db: &DatabaseModel, password: &str) -> Result<( #[derive(Deserialize, Serialize, Parser, TS)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] +#[ts(export)] pub struct LoginParams { password: Option, - #[serde(default)] + #[ts(skip)] + #[serde(rename = "__auth_userAgent")] // from Auth middleware user_agent: Option, #[serde(default)] + ephemeral: bool, + #[serde(default)] #[ts(type = "any")] metadata: Value, } @@ -198,35 +197,54 @@ pub async fn login_impl( LoginParams { password, user_agent, + ephemeral, metadata, }: LoginParams, ) -> Result { let password = password.unwrap_or_default().decrypt(&ctx)?; - ctx.db - .mutate(|db| { - check_password_against_db(db, &password)?; - let hash_token = HashSessionToken::new(); - db.as_private_mut().as_sessions_mut().insert( - hash_token.hashed(), - &Session { + if ephemeral { + check_password_against_db(&ctx.db.peek().await, &password)?; + let hash_token = HashSessionToken::new(); + ctx.ephemeral_sessions.mutate(|s| { + s.0.insert( + hash_token.hashed().clone(), + Session { logged_in: Utc::now(), last_active: Utc::now(), user_agent, metadata, }, - )?; + ) + }); + Ok(hash_token.to_login_res()) + } else { + ctx.db + .mutate(|db| { + check_password_against_db(db, &password)?; + let hash_token = HashSessionToken::new(); + db.as_private_mut().as_sessions_mut().insert( + hash_token.hashed(), + &Session { + logged_in: Utc::now(), + last_active: Utc::now(), + user_agent, + metadata, + }, + )?; - Ok(hash_token.to_login_res()) - }) - .await + Ok(hash_token.to_login_res()) + }) + .await + } } #[derive(Deserialize, Serialize, Parser, TS)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] pub struct LogoutParams { - #[ts(type = "string")] + #[ts(skip)] + #[serde(rename = "__auth_session")] // from Auth middleware session: InternedString, } @@ -262,23 +280,23 @@ pub struct SessionList { sessions: Sessions, } -pub fn session() -> ParentHandler { +pub fn session() -> ParentHandler { ParentHandler::new() .subcommand( "list", from_fn_async(list) - .with_metadata("get-session", Value::Bool(true)) + .with_metadata("get_session", Value::Bool(true)) .with_display_serializable() - .with_custom_display_fn::(|handle, result| { + .with_custom_display_fn(|handle, result| { Ok(display_sessions(handle.params, result)) }) - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "kill", from_fn_async(kill) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) } @@ -321,6 +339,7 @@ fn display_sessions(params: WithIoFormat, arg: SessionList) { pub struct ListParams { #[arg(skip)] #[ts(skip)] + #[serde(rename = "__auth_session")] // from Auth middleware session: InternedString, } @@ -330,9 +349,15 @@ pub async fn list( ctx: RpcContext, ListParams { session, .. }: ListParams, ) -> Result { + let mut sessions = ctx.db.peek().await.into_private().into_sessions().de()?; + ctx.ephemeral_sessions.peek(|s| { + sessions + .0 + .extend(s.0.iter().map(|(k, v)| (k.clone(), v.clone()))) + }); Ok(SessionList { - current: HashSessionToken::from_token(session).hashed().clone(), - sessions: ctx.db.peek().await.into_private().into_sessions().de()?, + current: session, + sessions, }) } @@ -374,21 +399,16 @@ pub struct ResetPasswordParams { #[instrument(skip_all)] async fn cli_reset_password( - ctx: CliContext, - ResetPasswordParams { - old_password, - new_password, - }: ResetPasswordParams, + HandlerArgs { + context: ctx, + parent_method, + method, + .. + }: HandlerArgs, ) -> Result<(), RpcError> { - let old_password = if let Some(old_password) = old_password { - old_password.decrypt(&ctx)? - } else { - rpassword::prompt_password("Current Password: ")? - }; + let old_password = rpassword::prompt_password("Current Password: ")?; - let new_password = if let Some(new_password) = new_password { - new_password.decrypt(&ctx)? - } else { + let new_password = { let new_password = rpassword::prompt_password("New Password: ")?; if new_password != rpassword::prompt_password("Confirm: ")? { return Err(Error::new( @@ -400,8 +420,8 @@ async fn cli_reset_password( new_password }; - ctx.call_remote( - "auth.reset-password", + ctx.call_remote::( + &parent_method.into_iter().chain(method).join("."), imbl_value::json!({ "old-password": old_password, "new-password": new_password }), ) .await?; @@ -447,7 +467,7 @@ pub async fn reset_password_impl( #[instrument(skip_all)] pub async fn get_pubkey(ctx: RpcContext) -> Result { - let secret = ctx.as_ref().clone(); + let secret = >::as_ref(&ctx).clone(); let pub_key = secret.to_public_key()?; Ok(pub_key) } diff --git a/core/startos/src/backup/backup_bulk.rs b/core/startos/src/backup/backup_bulk.rs index 928e4811a..b4419e88e 100644 --- a/core/startos/src/backup/backup_bulk.rs +++ b/core/startos/src/backup/backup_bulk.rs @@ -13,14 +13,14 @@ use tokio::io::AsyncWriteExt; use tracing::instrument; use ts_rs::TS; -use super::target::BackupTargetId; +use super::target::{BackupTargetId, PackageBackupInfo}; use super::PackageBackupReport; use crate::auth::check_password_against_db; use crate::backup::os::OsBackup; use crate::backup::{BackupReport, ServerBackupReport}; use crate::context::RpcContext; use crate::db::model::public::BackupProgress; -use crate::db::model::DatabaseModel; +use crate::db::model::{Database, DatabaseModel}; use crate::disk::mount::backup::BackupMountGuard; use crate::disk::mount::filesystem::ReadWrite; use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard}; @@ -42,9 +42,9 @@ pub struct BackupParams { password: crate::auth::PasswordType, } -struct BackupStatusGuard(Option); +struct BackupStatusGuard(Option>); impl BackupStatusGuard { - fn new(db: PatchDb) -> Self { + fn new(db: TypedPatchDb) -> Self { Self(Some(db)) } async fn handle_result( @@ -164,7 +164,7 @@ pub async fn backup_all( .decrypt(&ctx)?; let password = password.decrypt(&ctx)?; - let ((fs, package_ids), status_guard) = ( + let ((fs, package_ids, server_id), status_guard) = ( ctx.db .mutate(|db| { check_password_against_db(db, &password)?; @@ -181,7 +181,11 @@ pub async fn backup_all( .collect() }; assure_backing_up(db, &package_ids)?; - Ok((fs, package_ids)) + Ok(( + fs, + package_ids, + db.as_public().as_server_info().as_id().de()?, + )) }) .await?, BackupStatusGuard::new(ctx.db.clone()), @@ -189,6 +193,7 @@ pub async fn backup_all( let mut backup_guard = BackupMountGuard::mount( TmpMountGuard::mount(&fs, ReadWrite).await?, + &server_id, &old_password_decrypted, ) .await?; @@ -246,19 +251,43 @@ async fn perform_backup( backup_guard: BackupMountGuard, package_ids: &OrdSet, ) -> Result, Error> { + let db = ctx.db.peek().await; let mut backup_report = BTreeMap::new(); let backup_guard = Arc::new(backup_guard); + let mut package_backups: BTreeMap = + backup_guard.metadata.package_backups.clone(); for id in package_ids { if let Some(service) = &*ctx.services.get(id).await { + let backup_result = service + .backup(backup_guard.package_backup(id).await?) + .await + .err() + .map(|e| e.to_string()); + if backup_result.is_none() { + let manifest = db + .as_public() + .as_package_data() + .as_idx(id) + .or_not_found(id)? + .as_state_info() + .expect_installed()? + .as_manifest(); + + package_backups.insert( + id.clone(), + PackageBackupInfo { + os_version: manifest.as_os_version().de()?, + version: manifest.as_version().de()?, + title: manifest.as_title().de()?, + timestamp: Utc::now(), + }, + ); + } backup_report.insert( id.clone(), PackageBackupReport { - error: service - .backup(backup_guard.package_backup(id)) - .await - .err() - .map(|e| e.to_string()), + error: backup_result, }, ); } @@ -274,11 +303,11 @@ async fn perform_backup( let ui = ctx.db.peek().await.into_public().into_ui().de()?; let mut os_backup_file = - AtomicFile::new(backup_guard.path().join("os-backup.cbor"), None::) + AtomicFile::new(backup_guard.path().join("os-backup.json"), None::) .await .with_kind(ErrorKind::Filesystem)?; os_backup_file - .write_all(&IoFormat::Cbor.to_vec(&OsBackup { + .write_all(&IoFormat::Json.to_vec(&OsBackup { account: ctx.account.read().await.clone(), ui, })?) @@ -296,17 +325,19 @@ async fn perform_backup( if tokio::fs::metadata(&luks_folder_bak).await.is_ok() { tokio::fs::rename(&luks_folder_bak, &luks_folder_old).await?; } - let luks_folder = Path::new("/media/embassy/config/luks"); + let luks_folder = Path::new("/media/startos/config/luks"); if tokio::fs::metadata(&luks_folder).await.is_ok() { - dir_copy(&luks_folder, &luks_folder_bak, None).await?; + dir_copy(luks_folder, &luks_folder_bak, None).await?; } - let timestamp = Some(Utc::now()); + let timestamp = Utc::now(); backup_guard.unencrypted_metadata.version = crate::version::Current::new().semver().into(); - backup_guard.unencrypted_metadata.full = true; + backup_guard.unencrypted_metadata.hostname = ctx.account.read().await.hostname.clone(); + backup_guard.unencrypted_metadata.timestamp = timestamp.clone(); backup_guard.metadata.version = crate::version::Current::new().semver().into(); - backup_guard.metadata.timestamp = timestamp; + backup_guard.metadata.timestamp = Some(timestamp); + backup_guard.metadata.package_backups = package_backups; backup_guard.save_and_unmount().await?; @@ -315,7 +346,7 @@ async fn perform_backup( v.as_public_mut() .as_server_info_mut() .as_last_backup_mut() - .ser(×tamp) + .ser(&Some(timestamp)) }) .await?; diff --git a/core/startos/src/backup/mod.rs b/core/startos/src/backup/mod.rs index c963118c4..8afafaa33 100644 --- a/core/startos/src/backup/mod.rs +++ b/core/startos/src/backup/mod.rs @@ -3,7 +3,7 @@ use std::collections::BTreeMap; use chrono::{DateTime, Utc}; use models::{HostId, PackageId}; use reqwest::Url; -use rpc_toolkit::{from_fn_async, HandlerExt, ParentHandler}; +use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; use crate::context::CliContext; @@ -34,23 +34,23 @@ pub struct PackageBackupReport { } // #[command(subcommands(backup_bulk::backup_all, target::target))] -pub fn backup() -> ParentHandler { +pub fn backup() -> ParentHandler { ParentHandler::new() .subcommand( "create", from_fn_async(backup_bulk::backup_all) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) - .subcommand("target", target::target()) + .subcommand("target", target::target::()) } -pub fn package_backup() -> ParentHandler { +pub fn package_backup() -> ParentHandler { ParentHandler::new().subcommand( "restore", from_fn_async(restore::restore_packages_rpc) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) } @@ -61,5 +61,5 @@ struct BackupMetadata { pub network_keys: BTreeMap>, #[serde(default)] pub tor_keys: BTreeMap>, // DEPRECATED - pub marketplace_url: Option, + pub registry: Option, } diff --git a/core/startos/src/backup/os.rs b/core/startos/src/backup/os.rs index 6848473a7..6f08c5f43 100644 --- a/core/startos/src/backup/os.rs +++ b/core/startos/src/backup/os.rs @@ -1,3 +1,4 @@ +use imbl_value::InternedString; use openssl::pkey::{PKey, Private}; use openssl::x509::X509; use patch_db::Value; @@ -85,6 +86,7 @@ impl OsBackupV0 { ssh_key::Algorithm::Ed25519, )?, tor_key: TorSecretKeyV3::from(self.tor_key.0), + compat_s9pk_key: ed25519_dalek::SigningKey::generate(&mut rand::thread_rng()), }, ui: self.ui, }) @@ -96,7 +98,7 @@ impl OsBackupV0 { #[serde(rename = "kebab-case")] struct OsBackupV1 { server_id: String, // uuidv4 - hostname: String, // embassy-- + hostname: InternedString, // embassy-- net_key: Base64<[u8; 32]>, // Ed25519 Secret Key root_ca_key: Pem>, // PEM Encoded OpenSSL Key root_ca_cert: Pem, // PEM Encoded OpenSSL X509 Certificate @@ -113,6 +115,7 @@ impl OsBackupV1 { root_ca_cert: self.root_ca_cert.0, ssh_key: ssh_key::PrivateKey::from(Ed25519Keypair::from_seed(&self.net_key.0)), tor_key: TorSecretKeyV3::from(ed25519_expand_key(&self.net_key.0)), + compat_s9pk_key: ed25519_dalek::SigningKey::from_bytes(&self.net_key), }, ui: self.ui, } @@ -124,13 +127,14 @@ impl OsBackupV1 { #[serde(rename = "kebab-case")] struct OsBackupV2 { - server_id: String, // uuidv4 - hostname: String, // - - root_ca_key: Pem>, // PEM Encoded OpenSSL Key - root_ca_cert: Pem, // PEM Encoded OpenSSL X509 Certificate - ssh_key: Pem, // PEM Encoded OpenSSH Key - tor_key: TorSecretKeyV3, // Base64 Encoded Ed25519 Expanded Secret Key - ui: Value, // JSON Value + server_id: String, // uuidv4 + hostname: InternedString, // - + root_ca_key: Pem>, // PEM Encoded OpenSSL Key + root_ca_cert: Pem, // PEM Encoded OpenSSL X509 Certificate + ssh_key: Pem, // PEM Encoded OpenSSH Key + tor_key: TorSecretKeyV3, // Base64 Encoded Ed25519 Expanded Secret Key + compat_s9pk_key: Pem, // PEM Encoded ED25519 Key + ui: Value, // JSON Value } impl OsBackupV2 { fn project(self) -> OsBackup { @@ -143,6 +147,7 @@ impl OsBackupV2 { root_ca_cert: self.root_ca_cert.0, ssh_key: self.ssh_key.0, tor_key: self.tor_key, + compat_s9pk_key: self.compat_s9pk_key.0, }, ui: self.ui, } @@ -155,6 +160,7 @@ impl OsBackupV2 { root_ca_cert: Pem(backup.account.root_ca_cert.clone()), ssh_key: Pem(backup.account.ssh_key.clone()), tor_key: backup.account.tor_key.clone(), + compat_s9pk_key: Pem(backup.account.compat_s9pk_key.clone()), ui: backup.ui.clone(), } } diff --git a/core/startos/src/backup/restore.rs b/core/startos/src/backup/restore.rs index 4753a4290..28d70653f 100644 --- a/core/startos/src/backup/restore.rs +++ b/core/startos/src/backup/restore.rs @@ -4,25 +4,25 @@ use std::sync::Arc; use clap::Parser; use futures::{stream, StreamExt}; use models::PackageId; -use openssl::x509::X509; use patch_db::json_ptr::ROOT; use serde::{Deserialize, Serialize}; -use torut::onion::OnionAddressV3; +use tokio::sync::Mutex; use tracing::instrument; use ts_rs::TS; use super::target::BackupTargetId; use crate::backup::os::OsBackup; +use crate::context::setup::SetupResult; use crate::context::{RpcContext, SetupContext}; use crate::db::model::Database; use crate::disk::mount::backup::BackupMountGuard; use crate::disk::mount::filesystem::ReadWrite; use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard}; -use crate::hostname::Hostname; -use crate::init::init; +use crate::init::{init, InitResult}; use crate::prelude::*; use crate::s9pk::S9pk; use crate::service::service_map::DownloadInstallFuture; +use crate::setup::SetupExecuteProgress; use crate::util::serde::IoFormat; #[derive(Deserialize, Serialize, Parser, TS)] @@ -44,9 +44,14 @@ pub async fn restore_packages_rpc( password, }: RestorePackageParams, ) -> Result<(), Error> { - let fs = target_id.load(&ctx.db.peek().await)?; - let backup_guard = - BackupMountGuard::mount(TmpMountGuard::mount(&fs, ReadWrite).await?, &password).await?; + let peek = ctx.db.peek().await; + let fs = target_id.load(&peek)?; + let backup_guard = BackupMountGuard::mount( + TmpMountGuard::mount(&fs, ReadWrite).await?, + &peek.as_public().as_server_info().as_id().de()?, + &password, + ) + .await?; let tasks = restore_packages(&ctx, backup_guard, ids).await?; @@ -67,22 +72,27 @@ pub async fn restore_packages_rpc( Ok(()) } -#[instrument(skip(ctx))] +#[instrument(skip_all)] pub async fn recover_full_embassy( - ctx: SetupContext, + ctx: &SetupContext, disk_guid: Arc, start_os_password: String, recovery_source: TmpMountGuard, - recovery_password: Option, -) -> Result<(Arc, Hostname, OnionAddressV3, X509), Error> { - let backup_guard = BackupMountGuard::mount( - recovery_source, - recovery_password.as_deref().unwrap_or_default(), - ) - .await?; + server_id: &str, + recovery_password: &str, + SetupExecuteProgress { + init_phases, + restore_phase, + rpc_ctx_phases, + }: SetupExecuteProgress, +) -> Result<(SetupResult, RpcContext), Error> { + let mut restore_phase = restore_phase.or_not_found("restore progress")?; - let os_backup_path = backup_guard.path().join("os-backup.cbor"); - let mut os_backup: OsBackup = IoFormat::Cbor.from_slice( + let backup_guard = + BackupMountGuard::mount(recovery_source, server_id, recovery_password).await?; + + let os_backup_path = backup_guard.path().join("os-backup.json"); + let mut os_backup: OsBackup = IoFormat::Json.from_slice( &tokio::fs::read(&os_backup_path) .await .with_ctx(|_| (ErrorKind::Filesystem, os_backup_path.display().to_string()))?, @@ -99,10 +109,17 @@ pub async fn recover_full_embassy( db.put(&ROOT, &Database::init(&os_backup.account)?).await?; drop(db); - init(&ctx.config).await?; + let InitResult { net_ctrl } = init(&ctx.config, init_phases).await?; - let rpc_ctx = RpcContext::init(&ctx.config, disk_guid.clone()).await?; + let rpc_ctx = RpcContext::init( + &ctx.config, + disk_guid.clone(), + Some(net_ctrl), + rpc_ctx_phases, + ) + .await?; + restore_phase.start(); let ids: Vec<_> = backup_guard .metadata .package_backups @@ -110,26 +127,26 @@ pub async fn recover_full_embassy( .cloned() .collect(); let tasks = restore_packages(&rpc_ctx, backup_guard, ids).await?; + restore_phase.set_total(tasks.len() as u64); + let restore_phase = Arc::new(Mutex::new(restore_phase)); stream::iter(tasks) - .for_each_concurrent(5, |(id, res)| async move { - match async { res.await?.await }.await { - Ok(_) => (), - Err(err) => { - tracing::error!("Error restoring package {}: {}", id, err); - tracing::debug!("{:?}", err); + .for_each_concurrent(5, |(id, res)| { + let restore_phase = restore_phase.clone(); + async move { + match async { res.await?.await }.await { + Ok(_) => (), + Err(err) => { + tracing::error!("Error restoring package {}: {}", id, err); + tracing::debug!("{:?}", err); + } } + *restore_phase.lock().await += 1; } }) .await; + restore_phase.lock().await.complete(); - rpc_ctx.shutdown().await?; - - Ok(( - disk_guid, - os_backup.account.hostname, - os_backup.account.tor_key.public().get_onion_address(), - os_backup.account.root_ca_cert, - )) + Ok(((&os_backup.account).try_into()?, rpc_ctx)) } #[instrument(skip(ctx, backup_guard))] @@ -141,17 +158,15 @@ async fn restore_packages( let backup_guard = Arc::new(backup_guard); let mut tasks = BTreeMap::new(); for id in ids { - let backup_dir = backup_guard.clone().package_backup(&id); + let backup_dir = backup_guard.clone().package_backup(&id).await?; + let s9pk_path = backup_dir.path().join(&id).with_extension("s9pk"); let task = ctx .services .install( ctx.clone(), - S9pk::open( - backup_dir.path().join(&id).with_extension("s9pk"), - Some(&id), - ) - .await?, + || S9pk::open(s9pk_path, Some(&id)), Some(backup_dir), + None, ) .await?; tasks.insert(id, task); diff --git a/core/startos/src/backup/target/cifs.rs b/core/startos/src/backup/target/cifs.rs index ab2e91c0e..e83f4e981 100644 --- a/core/startos/src/backup/target/cifs.rs +++ b/core/startos/src/backup/target/cifs.rs @@ -4,7 +4,7 @@ use std::path::{Path, PathBuf}; use clap::Parser; use color_eyre::eyre::eyre; use imbl_value::InternedString; -use rpc_toolkit::{command, from_fn_async, HandlerExt, ParentHandler}; +use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; use ts_rs::TS; @@ -14,7 +14,7 @@ use crate::db::model::DatabaseModel; use crate::disk::mount::filesystem::cifs::Cifs; use crate::disk::mount::filesystem::ReadOnly; use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard}; -use crate::disk::util::{recovery_info, EmbassyOsRecoveryInfo}; +use crate::disk::util::{recovery_info, StartOsRecoveryInfo}; use crate::prelude::*; use crate::util::serde::KeyVal; @@ -43,28 +43,28 @@ pub struct CifsBackupTarget { path: PathBuf, username: String, mountable: bool, - start_os: Option, + start_os: BTreeMap, } -pub fn cifs() -> ParentHandler { +pub fn cifs() -> ParentHandler { ParentHandler::new() .subcommand( "add", from_fn_async(add) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "update", from_fn_async(update) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "remove", from_fn_async(remove) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) } @@ -239,7 +239,7 @@ pub async fn list(db: &DatabaseModel) -> Result, Er path: mount_info.path, username: mount_info.username, mountable: start_os.is_ok(), - start_os: start_os.ok().and_then(|a| a), + start_os: start_os.ok().unwrap_or_default(), }, )); } diff --git a/core/startos/src/backup/target/mod.rs b/core/startos/src/backup/target/mod.rs index c0f2ef10e..032f70848 100644 --- a/core/startos/src/backup/target/mod.rs +++ b/core/startos/src/backup/target/mod.rs @@ -7,8 +7,10 @@ use clap::Parser; use color_eyre::eyre::eyre; use digest::generic_array::GenericArray; use digest::OutputSizeUser; +use exver::Version; +use imbl_value::InternedString; use models::PackageId; -use rpc_toolkit::{command, from_fn_async, AnyContext, HandlerExt, ParentHandler}; +use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; use sha2::Sha256; use tokio::sync::Mutex; @@ -29,7 +31,7 @@ use crate::util::clap::FromStrParser; use crate::util::serde::{ deserialize_from_str, display_serializable, serialize_display, HandlerExtSerde, WithIoFormat, }; -use crate::util::Version; +use crate::util::VersionString; pub mod cifs; @@ -138,23 +140,33 @@ impl FileSystem for BackupTargetFS { } // #[command(subcommands(cifs::cifs, list, info, mount, umount))] -pub fn target() -> ParentHandler { +pub fn target() -> ParentHandler { ParentHandler::new() - .subcommand("cifs", cifs::cifs()) + .subcommand("cifs", cifs::cifs::()) .subcommand( "list", from_fn_async(list) .with_display_serializable() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "info", from_fn_async(info) .with_display_serializable() - .with_custom_display_fn::(|params, info| { + .with_custom_display_fn::(|params, info| { Ok(display_backup_info(params.params, info)) }) - .with_remote_cli::(), + .with_call_remote::(), + ) + .subcommand( + "mount", + from_fn_async(mount).with_call_remote::(), + ) + .subcommand( + "umount", + from_fn_async(umount) + .no_display() + .with_call_remote::(), ) } @@ -202,8 +214,8 @@ pub struct BackupInfo { #[derive(Clone, Debug, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct PackageBackupInfo { - pub title: String, - pub version: Version, + pub title: InternedString, + pub version: VersionString, pub os_version: Version, pub timestamp: DateTime, } @@ -223,9 +235,9 @@ fn display_backup_info(params: WithIoFormat, info: BackupInfo) { "TIMESTAMP", ]); table.add_row(row![ - "EMBASSY OS", - info.version.as_str(), - info.version.as_str(), + "StartOS", + &info.version.to_string(), + &info.version.to_string(), &if let Some(ts) = &info.timestamp { ts.to_string() } else { @@ -236,7 +248,7 @@ fn display_backup_info(params: WithIoFormat, info: BackupInfo) { let row = row![ &*id, info.version.as_str(), - info.os_version.as_str(), + &info.os_version.to_string(), &info.timestamp.to_string(), ]; table.add_row(row); @@ -249,6 +261,7 @@ fn display_backup_info(params: WithIoFormat, info: BackupInfo) { #[command(rename_all = "kebab-case")] pub struct InfoParams { target_id: BackupTargetId, + server_id: String, password: String, } @@ -257,11 +270,13 @@ pub async fn info( ctx: RpcContext, InfoParams { target_id, + server_id, password, }: InfoParams, ) -> Result { let guard = BackupMountGuard::mount( TmpMountGuard::mount(&target_id.load(&ctx.db.peek().await)?, ReadWrite).await?, + &server_id, &password, ) .await?; @@ -283,6 +298,7 @@ lazy_static::lazy_static! { #[command(rename_all = "kebab-case")] pub struct MountParams { target_id: BackupTargetId, + server_id: String, password: String, } @@ -291,6 +307,7 @@ pub async fn mount( ctx: RpcContext, MountParams { target_id, + server_id, password, }: MountParams, ) -> Result { @@ -302,6 +319,7 @@ pub async fn mount( let guard = BackupMountGuard::mount( TmpMountGuard::mount(&target_id.clone().load(&ctx.db.peek().await)?, ReadWrite).await?, + &server_id, &password, ) .await?; diff --git a/core/startos/src/bins/container_cli.rs b/core/startos/src/bins/container_cli.rs index a33a99131..db7cbd36a 100644 --- a/core/startos/src/bins/container_cli.rs +++ b/core/startos/src/bins/container_cli.rs @@ -15,7 +15,7 @@ pub fn main(args: impl IntoIterator) { EmbassyLogger::init(); if let Err(e) = CliApp::new( |cfg: ContainerClientConfig| Ok(ContainerCliContext::init(cfg)), - crate::service::service_effect_handler::service_effect_handler(), + crate::service::effects::handler(), ) .run(args) { diff --git a/core/startos/src/bins/mod.rs b/core/startos/src/bins/mod.rs index 68f2802e0..4a4670a5b 100644 --- a/core/startos/src/bins/mod.rs +++ b/core/startos/src/bins/mod.rs @@ -5,6 +5,8 @@ use std::path::Path; #[cfg(feature = "container-runtime")] pub mod container_cli; pub mod deprecated; +#[cfg(feature = "registry")] +pub mod registry; #[cfg(feature = "cli")] pub mod start_cli; #[cfg(feature = "daemon")] @@ -20,6 +22,8 @@ fn select_executable(name: &str) -> Option)> { "start-cli" => Some(container_cli::main), #[cfg(feature = "daemon")] "startd" => Some(startd::main), + #[cfg(feature = "registry")] + "registry" => Some(registry::main), "embassy-cli" => Some(|_| deprecated::renamed("embassy-cli", "start-cli")), "embassy-sdk" => Some(|_| deprecated::renamed("embassy-sdk", "start-sdk")), "embassyd" => Some(|_| deprecated::renamed("embassyd", "startd")), diff --git a/core/startos/src/bins/registry.rs b/core/startos/src/bins/registry.rs new file mode 100644 index 000000000..132e0984a --- /dev/null +++ b/core/startos/src/bins/registry.rs @@ -0,0 +1,87 @@ +use std::ffi::OsString; + +use clap::Parser; +use futures::FutureExt; +use tokio::signal::unix::signal; +use tracing::instrument; + +use crate::net::web_server::WebServer; +use crate::prelude::*; +use crate::registry::context::{RegistryConfig, RegistryContext}; +use crate::util::logger::EmbassyLogger; + +#[instrument(skip_all)] +async fn inner_main(config: &RegistryConfig) -> Result<(), Error> { + let server = async { + let ctx = RegistryContext::init(config).await?; + let mut server = WebServer::new(ctx.listen); + server.serve_registry(ctx.clone()); + + let mut shutdown_recv = ctx.shutdown.subscribe(); + + let sig_handler_ctx = ctx; + let sig_handler = tokio::spawn(async move { + use tokio::signal::unix::SignalKind; + futures::future::select_all( + [ + SignalKind::interrupt(), + SignalKind::quit(), + SignalKind::terminate(), + ] + .iter() + .map(|s| { + async move { + signal(*s) + .unwrap_or_else(|_| panic!("register {:?} handler", s)) + .recv() + .await + } + .boxed() + }), + ) + .await; + sig_handler_ctx + .shutdown + .send(()) + .map_err(|_| ()) + .expect("send shutdown signal"); + }); + + shutdown_recv + .recv() + .await + .with_kind(crate::ErrorKind::Unknown)?; + + sig_handler.abort(); + + Ok::<_, Error>(server) + } + .await?; + server.shutdown().await; + + Ok(()) +} + +pub fn main(args: impl IntoIterator) { + EmbassyLogger::init(); + + let config = RegistryConfig::parse_from(args).load().unwrap(); + + let res = { + let rt = tokio::runtime::Builder::new_multi_thread() + .enable_all() + .build() + .expect("failed to initialize runtime"); + rt.block_on(inner_main(&config)) + }; + + match res { + Ok(()) => (), + Err(e) => { + eprintln!("{}", e.source); + tracing::debug!("{:?}", e.source); + drop(e.source); + std::process::exit(e.kind as i32) + } + } +} diff --git a/core/startos/src/bins/start_cli.rs b/core/startos/src/bins/start_cli.rs index 374247f2e..17cc095a3 100644 --- a/core/startos/src/bins/start_cli.rs +++ b/core/startos/src/bins/start_cli.rs @@ -16,7 +16,7 @@ pub fn main(args: impl IntoIterator) { EmbassyLogger::init(); if let Err(e) = CliApp::new( |cfg: ClientConfig| Ok(CliContext::init(cfg.load()?)?), - crate::main_api(), + crate::expanded_api(), ) .run(args) { diff --git a/core/startos/src/bins/start_init.rs b/core/startos/src/bins/start_init.rs index 284748339..394d42c8d 100644 --- a/core/startos/src/bins/start_init.rs +++ b/core/startos/src/bins/start_init.rs @@ -1,47 +1,56 @@ -use std::net::{Ipv6Addr, SocketAddr}; -use std::path::Path; use std::sync::Arc; -use std::time::Duration; -use helpers::NonDetachingJoinHandle; use tokio::process::Command; use tracing::instrument; use crate::context::config::ServerConfig; -use crate::context::{DiagnosticContext, InstallContext, SetupContext}; -use crate::disk::fsck::{RepairStrategy, RequiresReboot}; +use crate::context::rpc::InitRpcContextPhases; +use crate::context::{DiagnosticContext, InitContext, InstallContext, RpcContext, SetupContext}; +use crate::disk::fsck::RepairStrategy; use crate::disk::main::DEFAULT_PASSWORD; use crate::disk::REPAIR_DISK_PATH; -use crate::firmware::update_firmware; -use crate::init::STANDBY_MODE_PATH; +use crate::firmware::{check_for_firmware_update, update_firmware}; +use crate::init::{InitPhases, InitResult, STANDBY_MODE_PATH}; use crate::net::web_server::WebServer; +use crate::prelude::*; +use crate::progress::FullProgressTracker; use crate::shutdown::Shutdown; -use crate::sound::{BEP, CHIME}; use crate::util::Invoke; -use crate::{Error, ErrorKind, ResultExt, PLATFORM}; +use crate::PLATFORM; #[instrument(skip_all)] -async fn setup_or_init(config: &ServerConfig) -> Result, Error> { - let song = NonDetachingJoinHandle::from(tokio::spawn(async { - loop { - BEP.play().await.unwrap(); - BEP.play().await.unwrap(); - tokio::time::sleep(Duration::from_secs(30)).await; - } - })); +async fn setup_or_init( + server: &mut WebServer, + config: &ServerConfig, +) -> Result, Error> { + if let Some(firmware) = check_for_firmware_update() + .await + .map_err(|e| { + tracing::warn!("Error checking for firmware update: {e}"); + tracing::debug!("{e:?}"); + }) + .ok() + .and_then(|a| a) + { + let init_ctx = InitContext::init(config).await?; + let handle = &init_ctx.progress; + let mut update_phase = handle.add_phase("Updating Firmware".into(), Some(10)); + let mut reboot_phase = handle.add_phase("Rebooting".into(), Some(1)); - match update_firmware().await { - Ok(RequiresReboot(true)) => { - return Ok(Some(Shutdown { - export_args: None, - restart: true, - })) - } - Err(e) => { + server.serve_init(init_ctx); + + update_phase.start(); + if let Err(e) = update_firmware(firmware).await { tracing::warn!("Error performing firmware update: {e}"); tracing::debug!("{e:?}"); + } else { + update_phase.complete(); + reboot_phase.start(); + return Ok(Err(Shutdown { + export_args: None, + restart: true, + })); } - _ => (), } Command::new("ln") @@ -84,14 +93,7 @@ async fn setup_or_init(config: &ServerConfig) -> Result, Error> let ctx = InstallContext::init().await?; - let server = WebServer::install( - SocketAddr::new(Ipv6Addr::UNSPECIFIED.into(), 80), - ctx.clone(), - )?; - - drop(song); - tokio::time::sleep(Duration::from_secs(1)).await; // let the record state that I hate this - CHIME.play().await?; + server.serve_install(ctx.clone()); ctx.shutdown .subscribe() @@ -99,33 +101,23 @@ async fn setup_or_init(config: &ServerConfig) -> Result, Error> .await .expect("context dropped"); - server.shutdown().await; + return Ok(Err(Shutdown { + export_args: None, + restart: true, + })); + } - Command::new("reboot") - .invoke(crate::ErrorKind::Unknown) - .await?; - } else if tokio::fs::metadata("/media/embassy/config/disk.guid") + if tokio::fs::metadata("/media/startos/config/disk.guid") .await .is_err() { let ctx = SetupContext::init(config)?; - let server = WebServer::setup( - SocketAddr::new(Ipv6Addr::UNSPECIFIED.into(), 80), - ctx.clone(), - )?; - - drop(song); - tokio::time::sleep(Duration::from_secs(1)).await; // let the record state that I hate this - CHIME.play().await?; + server.serve_setup(ctx.clone()); let mut shutdown = ctx.shutdown.subscribe(); shutdown.recv().await.expect("context dropped"); - server.shutdown().await; - - drop(shutdown); - tokio::task::yield_now().await; if let Err(e) = Command::new("killall") .arg("firefox-esr") @@ -135,64 +127,85 @@ async fn setup_or_init(config: &ServerConfig) -> Result, Error> tracing::error!("Failed to kill kiosk: {}", e); tracing::debug!("{:?}", e); } + + Ok(Ok(match ctx.result.get() { + Some(Ok((_, rpc_ctx))) => (rpc_ctx.clone(), ctx.progress.clone()), + Some(Err(e)) => return Err(e.clone_output()), + None => { + return Err(Error::new( + eyre!("Setup mode exited before setup completed"), + ErrorKind::Unknown, + )) + } + })) } else { - let guid_string = tokio::fs::read_to_string("/media/embassy/config/disk.guid") // unique identifier for volume group - keeps track of the disk that goes with your embassy - .await?; - let guid = guid_string.trim(); - let requires_reboot = crate::disk::main::import( - guid, - config.datadir(), - if tokio::fs::metadata(REPAIR_DISK_PATH).await.is_ok() { - RepairStrategy::Aggressive - } else { - RepairStrategy::Preen - }, - if guid.ends_with("_UNENC") { - None - } else { - Some(DEFAULT_PASSWORD) - }, - ) - .await?; - if tokio::fs::metadata(REPAIR_DISK_PATH).await.is_ok() { - tokio::fs::remove_file(REPAIR_DISK_PATH) - .await - .with_ctx(|_| (crate::ErrorKind::Filesystem, REPAIR_DISK_PATH))?; - } - if requires_reboot.0 { - crate::disk::main::export(guid, config.datadir()).await?; - Command::new("reboot") - .invoke(crate::ErrorKind::Unknown) + let init_ctx = InitContext::init(config).await?; + let handle = init_ctx.progress.clone(); + let err_channel = init_ctx.error.clone(); + + let mut disk_phase = handle.add_phase("Opening data drive".into(), Some(10)); + let init_phases = InitPhases::new(&handle); + let rpc_ctx_phases = InitRpcContextPhases::new(&handle); + + server.serve_init(init_ctx); + + async { + disk_phase.start(); + let guid_string = tokio::fs::read_to_string("/media/startos/config/disk.guid") // unique identifier for volume group - keeps track of the disk that goes with your embassy .await?; - } - tracing::info!("Loaded Disk"); - crate::init::init(config).await?; - drop(song); - } - - Ok(None) -} - -async fn run_script_if_exists>(path: P) { - let script = path.as_ref(); - if script.exists() { - match Command::new("/bin/bash").arg(script).spawn() { - Ok(mut c) => { - if let Err(e) = c.wait().await { - tracing::error!("Error Running {}: {}", script.display(), e); - tracing::debug!("{:?}", e); - } + let disk_guid = Arc::new(String::from(guid_string.trim())); + let requires_reboot = crate::disk::main::import( + &**disk_guid, + config.datadir(), + if tokio::fs::metadata(REPAIR_DISK_PATH).await.is_ok() { + RepairStrategy::Aggressive + } else { + RepairStrategy::Preen + }, + if disk_guid.ends_with("_UNENC") { + None + } else { + Some(DEFAULT_PASSWORD) + }, + ) + .await?; + if tokio::fs::metadata(REPAIR_DISK_PATH).await.is_ok() { + tokio::fs::remove_file(REPAIR_DISK_PATH) + .await + .with_ctx(|_| (crate::ErrorKind::Filesystem, REPAIR_DISK_PATH))?; } - Err(e) => { - tracing::error!("Error Running {}: {}", script.display(), e); - tracing::debug!("{:?}", e); + disk_phase.complete(); + tracing::info!("Loaded Disk"); + + if requires_reboot.0 { + let mut reboot_phase = handle.add_phase("Rebooting".into(), Some(1)); + reboot_phase.start(); + return Ok(Err(Shutdown { + export_args: Some((disk_guid, config.datadir().to_owned())), + restart: true, + })); } + + let InitResult { net_ctrl } = crate::init::init(config, init_phases).await?; + + let rpc_ctx = + RpcContext::init(config, disk_guid, Some(net_ctrl), rpc_ctx_phases).await?; + + Ok::<_, Error>(Ok((rpc_ctx, handle))) } + .await + .map_err(|e| { + err_channel.send_replace(Some(e.clone_output())); + e + }) } } #[instrument(skip_all)] -async fn inner_main(config: &ServerConfig) -> Result, Error> { +pub async fn main( + server: &mut WebServer, + config: &ServerConfig, +) -> Result, Error> { if &*PLATFORM == "raspberrypi" && tokio::fs::metadata(STANDBY_MODE_PATH).await.is_ok() { tokio::fs::remove_file(STANDBY_MODE_PATH).await?; Command::new("sync").invoke(ErrorKind::Filesystem).await?; @@ -200,25 +213,20 @@ async fn inner_main(config: &ServerConfig) -> Result, Error> { futures::future::pending::<()>().await; } - crate::sound::BEP.play().await?; - - run_script_if_exists("/media/embassy/config/preinit.sh").await; - - let res = match setup_or_init(config).await { + let res = match setup_or_init(server, config).await { Err(e) => { async move { - tracing::error!("{}", e.source); - tracing::debug!("{}", e.source); - crate::sound::BEETHOVEN.play().await?; + tracing::error!("{e}"); + tracing::debug!("{e:?}"); let ctx = DiagnosticContext::init( config, - if tokio::fs::metadata("/media/embassy/config/disk.guid") + if tokio::fs::metadata("/media/startos/config/disk.guid") .await .is_ok() { Some(Arc::new( - tokio::fs::read_to_string("/media/embassy/config/disk.guid") // unique identifier for volume group - keeps track of the disk that goes with your embassy + tokio::fs::read_to_string("/media/startos/config/disk.guid") // unique identifier for volume group - keeps track of the disk that goes with your embassy .await? .trim() .to_owned(), @@ -229,44 +237,16 @@ async fn inner_main(config: &ServerConfig) -> Result, Error> { e, )?; - let server = WebServer::diagnostic( - SocketAddr::new(Ipv6Addr::UNSPECIFIED.into(), 80), - ctx.clone(), - )?; + server.serve_diagnostic(ctx.clone()); let shutdown = ctx.shutdown.subscribe().recv().await.unwrap(); - server.shutdown().await; - - Ok(shutdown) + Ok(Err(shutdown)) } .await } Ok(s) => Ok(s), }; - run_script_if_exists("/media/embassy/config/postinit.sh").await; - res } - -pub fn main(config: &ServerConfig) { - let res = { - let rt = tokio::runtime::Builder::new_multi_thread() - .enable_all() - .build() - .expect("failed to initialize runtime"); - rt.block_on(inner_main(config)) - }; - - match res { - Ok(Some(shutdown)) => shutdown.execute(), - Ok(None) => (), - Err(e) => { - eprintln!("{}", e.source); - tracing::debug!("{:?}", e.source); - drop(e.source); - std::process::exit(e.kind as i32) - } - } -} diff --git a/core/startos/src/bins/startd.rs b/core/startos/src/bins/startd.rs index 3e571d6b2..d383f3091 100644 --- a/core/startos/src/bins/startd.rs +++ b/core/startos/src/bins/startd.rs @@ -1,6 +1,6 @@ +use std::cmp::max; use std::ffi::OsString; use std::net::{Ipv6Addr, SocketAddr}; -use std::path::Path; use std::sync::Arc; use clap::Parser; @@ -10,7 +10,8 @@ use tokio::signal::unix::signal; use tracing::instrument; use crate::context::config::ServerConfig; -use crate::context::{DiagnosticContext, RpcContext}; +use crate::context::rpc::InitRpcContextPhases; +use crate::context::{DiagnosticContext, InitContext, RpcContext}; use crate::net::web_server::WebServer; use crate::shutdown::Shutdown; use crate::system::launch_metrics_task; @@ -18,23 +19,51 @@ use crate::util::logger::EmbassyLogger; use crate::{Error, ErrorKind, ResultExt}; #[instrument(skip_all)] -async fn inner_main(config: &ServerConfig) -> Result, Error> { - let (rpc_ctx, server, shutdown) = async { - let rpc_ctx = RpcContext::init( +async fn inner_main( + server: &mut WebServer, + config: &ServerConfig, +) -> Result, Error> { + let rpc_ctx = if !tokio::fs::metadata("/run/startos/initialized") + .await + .is_ok() + { + let (ctx, handle) = match super::start_init::main(server, &config).await? { + Err(s) => return Ok(Some(s)), + Ok(ctx) => ctx, + }; + tokio::fs::write("/run/startos/initialized", "").await?; + + server.serve_main(ctx.clone()); + handle.complete(); + + ctx + } else { + let init_ctx = InitContext::init(config).await?; + let handle = init_ctx.progress.clone(); + let rpc_ctx_phases = InitRpcContextPhases::new(&handle); + server.serve_init(init_ctx); + + let ctx = RpcContext::init( config, Arc::new( - tokio::fs::read_to_string("/media/embassy/config/disk.guid") // unique identifier for volume group - keeps track of the disk that goes with your embassy + tokio::fs::read_to_string("/media/startos/config/disk.guid") // unique identifier for volume group - keeps track of the disk that goes with your embassy .await? .trim() .to_owned(), ), + None, + rpc_ctx_phases, ) .await?; + + server.serve_main(ctx.clone()); + handle.complete(); + + ctx + }; + + let (rpc_ctx, shutdown) = async { crate::hostname::sync_hostname(&rpc_ctx.account.read().await.hostname).await?; - let server = WebServer::main( - SocketAddr::new(Ipv6Addr::UNSPECIFIED.into(), 80), - rpc_ctx.clone(), - )?; let mut shutdown_recv = rpc_ctx.shutdown.subscribe(); @@ -74,8 +103,6 @@ async fn inner_main(config: &ServerConfig) -> Result, Error> { .await }); - crate::sound::CHIME.play().await?; - metrics_task .map_err(|e| { Error::new( @@ -93,10 +120,9 @@ async fn inner_main(config: &ServerConfig) -> Result, Error> { sig_handler.abort(); - Ok::<_, Error>((rpc_ctx, server, shutdown)) + Ok::<_, Error>((rpc_ctx, shutdown)) } .await?; - server.shutdown().await; rpc_ctx.shutdown().await?; tracing::info!("RPC Context is dropped"); @@ -109,32 +135,31 @@ pub fn main(args: impl IntoIterator) { let config = ServerConfig::parse_from(args).load().unwrap(); - if !Path::new("/run/embassy/initialized").exists() { - super::start_init::main(&config); - std::fs::write("/run/embassy/initialized", "").unwrap(); - } - let res = { let rt = tokio::runtime::Builder::new_multi_thread() + .worker_threads(max(4, num_cpus::get())) .enable_all() .build() .expect("failed to initialize runtime"); rt.block_on(async { - match inner_main(&config).await { - Ok(a) => Ok(a), + let mut server = WebServer::new(SocketAddr::new(Ipv6Addr::UNSPECIFIED.into(), 80)); + match inner_main(&mut server, &config).await { + Ok(a) => { + server.shutdown().await; + Ok(a) + } Err(e) => { async { - tracing::error!("{}", e.source); - tracing::debug!("{:?}", e.source); - crate::sound::BEETHOVEN.play().await?; + tracing::error!("{e}"); + tracing::debug!("{e:?}"); let ctx = DiagnosticContext::init( &config, - if tokio::fs::metadata("/media/embassy/config/disk.guid") + if tokio::fs::metadata("/media/startos/config/disk.guid") .await .is_ok() { Some(Arc::new( - tokio::fs::read_to_string("/media/embassy/config/disk.guid") // unique identifier for volume group - keeps track of the disk that goes with your embassy + tokio::fs::read_to_string("/media/startos/config/disk.guid") // unique identifier for volume group - keeps track of the disk that goes with your embassy .await? .trim() .to_owned(), @@ -145,10 +170,7 @@ pub fn main(args: impl IntoIterator) { e, )?; - let server = WebServer::diagnostic( - SocketAddr::new(Ipv6Addr::UNSPECIFIED.into(), 80), - ctx.clone(), - )?; + server.serve_diagnostic(ctx.clone()); let mut shutdown = ctx.shutdown.subscribe(); @@ -157,7 +179,7 @@ pub fn main(args: impl IntoIterator) { server.shutdown().await; - Ok::<_, Error>(shutdown) + Ok::<_, Error>(Some(shutdown)) } .await } diff --git a/core/startos/src/config/mod.rs b/core/startos/src/config/mod.rs index c600f590c..22edd98f7 100644 --- a/core/startos/src/config/mod.rs +++ b/core/startos/src/config/mod.rs @@ -1,3 +1,4 @@ +use std::collections::BTreeSet; use std::sync::Arc; use std::time::Duration; @@ -9,13 +10,14 @@ use models::{ErrorKind, OptionExt, PackageId}; use patch_db::value::InternedString; use patch_db::Value; use regex::Regex; -use rpc_toolkit::{from_fn_async, Empty, HandlerExt, ParentHandler}; +use rpc_toolkit::{from_fn_async, Context, Empty, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; use tracing::instrument; use ts_rs::TS; use crate::context::{CliContext, RpcContext}; use crate::prelude::*; +use crate::rpc_continuations::Guid; use crate::util::serde::{HandlerExtSerde, StdinDeserializable}; #[derive(Clone, Debug, Default, Serialize, Deserialize)] @@ -134,16 +136,19 @@ pub struct ConfigParams { } // #[command(subcommands(get, set))] -pub fn config() -> ParentHandler { +pub fn config() -> ParentHandler { ParentHandler::new() .subcommand( "get", from_fn_async(get) .with_inherited(|ConfigParams { id }, _| id) .with_display_serializable() - .with_remote_cli::(), + .with_call_remote::(), + ) + .subcommand( + "set", + set::().with_inherited(|ConfigParams { id }, _| id), ) - .subcommand("set", set().with_inherited(|ConfigParams { id }, _| id)) } #[instrument(skip_all)] @@ -153,7 +158,7 @@ pub async fn get(ctx: RpcContext, _: Empty, id: PackageId) -> Result ParentHandler { - ParentHandler::new().root_handler( - from_fn_async(set_impl) - .with_metadata("sync_db", Value::Bool(true)) - .with_inherited(|set_params, id| (id, set_params)) - .no_display() - .with_remote_cli::(), - ) +pub fn set() -> ParentHandler { + ParentHandler::new() + .root_handler( + from_fn_async(set_impl) + .with_metadata("sync_db", Value::Bool(true)) + .with_inherited(|set_params, id| (id, set_params)) + .no_display() + .with_call_remote::(), + ) + .subcommand( + "dry", + from_fn_async(set_dry) + .with_inherited(|set_params, id| (id, set_params)) + .no_display() + .with_call_remote::(), + ) +} + +pub async fn set_dry( + ctx: RpcContext, + _: Empty, + ( + id, + SetParams { + timeout, + config: StdinDeserializable(config), + }, + ): (PackageId, SetParams), +) -> Result, Error> { + let mut breakages = BTreeSet::new(); + + let procedure_id = Guid::new(); + + let db = ctx.db.peek().await; + for dep in db + .as_public() + .as_package_data() + .as_entries()? + .into_iter() + .filter_map( + |(k, v)| match v.as_current_dependencies().contains_key(&id) { + Ok(true) => Some(Ok(k)), + Ok(false) => None, + Err(e) => Some(Err(e)), + }, + ) + { + let dep_id = dep?; + + let Some(dependent) = &*ctx.services.get(&dep_id).await else { + continue; + }; + + if dependent + .dependency_config(procedure_id.clone(), id.clone(), config.clone()) + .await? + .is_some() + { + breakages.insert(dep_id); + } + } + + Ok(breakages) } #[derive(Default)] @@ -215,7 +275,7 @@ pub async fn set_impl( ErrorKind::Unknown, ) })? - .configure(configure_context) + .configure(Guid::new(), configure_context) .await?; Ok(()) } diff --git a/core/startos/src/context/cli.rs b/core/startos/src/context/cli.rs index cc2fe232b..0eca1d2c2 100644 --- a/core/startos/src/context/cli.rs +++ b/core/startos/src/context/cli.rs @@ -10,7 +10,7 @@ use reqwest::Proxy; use reqwest_cookie_store::CookieStoreMutex; use rpc_toolkit::reqwest::{Client, Url}; use rpc_toolkit::yajrc::RpcError; -use rpc_toolkit::{call_remote_http, CallRemote, Context}; +use rpc_toolkit::{call_remote_http, CallRemote, Context, Empty}; use tokio::net::TcpStream; use tokio::runtime::Runtime; use tokio_tungstenite::{MaybeTlsStream, WebSocketStream}; @@ -18,15 +18,17 @@ use tracing::instrument; use super::setup::CURRENT_SECRET; use crate::context::config::{local_config_path, ClientConfig}; -use crate::core::rpc_continuations::RequestGuid; +use crate::context::{DiagnosticContext, InitContext, InstallContext, RpcContext, SetupContext}; use crate::middleware::auth::LOCAL_AUTH_COOKIE_PATH; use crate::prelude::*; +use crate::rpc_continuations::Guid; #[derive(Debug)] pub struct CliContextSeed { - pub runtime: OnceCell, + pub runtime: OnceCell>, pub base_url: Url, pub rpc_url: Url, + pub registry_url: Option, pub client: Client, pub cookie_store: Arc, pub cookie_path: PathBuf, @@ -41,7 +43,9 @@ impl Drop for CliContextSeed { std::fs::create_dir_all(&parent_dir).unwrap(); } let mut writer = fd_lock_rs::FdLock::lock( - File::create(&tmp).unwrap(), + File::create(&tmp) + .with_ctx(|_| (ErrorKind::Filesystem, &tmp)) + .unwrap(), fd_lock_rs::LockType::Exclusive, true, ) @@ -66,6 +70,8 @@ impl CliContext { "http://localhost".parse()? }; + let registry = config.registry.clone(); + let cookie_path = config.cookie_path.unwrap_or_else(|| { local_config_path() .as_deref() @@ -76,9 +82,12 @@ impl CliContext { }); let cookie_store = Arc::new(CookieStoreMutex::new({ let mut store = if cookie_path.exists() { - CookieStore::load_json(BufReader::new(File::open(&cookie_path)?)) - .map_err(|e| eyre!("{}", e)) - .with_kind(crate::ErrorKind::Deserialization)? + CookieStore::load_json(BufReader::new( + File::open(&cookie_path) + .with_ctx(|_| (ErrorKind::Filesystem, cookie_path.display()))?, + )) + .map_err(|e| eyre!("{}", e)) + .with_kind(crate::ErrorKind::Deserialization)? } else { CookieStore::default() }; @@ -104,6 +113,17 @@ impl CliContext { .push("v1"); url }, + registry_url: registry + .map(|mut registry| { + registry + .path_segments_mut() + .map_err(|_| eyre!("Url cannot be base")) + .with_kind(crate::ErrorKind::ParseUrl)? + .push("rpc") + .push("v0"); + Ok::<_, Error>(registry) + }) + .transpose()?, client: { let mut builder = Client::builder().cookie_provider(cookie_store.clone()); if let Some(proxy) = config.proxy { @@ -149,7 +169,7 @@ impl CliContext { pub async fn ws_continuation( &self, - guid: RequestGuid, + guid: Guid, ) -> Result>, Error> { let mut url = self.base_url.clone(); let ws_scheme = match url.scheme() { @@ -179,7 +199,7 @@ impl CliContext { pub async fn rest_continuation( &self, - guid: RequestGuid, + guid: Guid, body: reqwest::Body, headers: reqwest::header::HeaderMap, ) -> Result { @@ -198,6 +218,29 @@ impl CliContext { .await .with_kind(ErrorKind::Network) } + + pub async fn call_remote( + &self, + method: &str, + params: Value, + ) -> Result + where + Self: CallRemote, + { + >::call_remote(&self, method, params, Empty {}) + .await + } + pub async fn call_remote_with( + &self, + method: &str, + params: Value, + extra: T, + ) -> Result + where + Self: CallRemote, + { + >::call_remote(&self, method, params, extra).await + } } impl AsRef for CliContext { fn as_ref(&self) -> &Jwk { @@ -211,21 +254,43 @@ impl std::ops::Deref for CliContext { } } impl Context for CliContext { - fn runtime(&self) -> tokio::runtime::Handle { - self.runtime - .get_or_init(|| { - tokio::runtime::Builder::new_multi_thread() - .enable_all() - .build() - .unwrap() - }) - .handle() - .clone() + fn runtime(&self) -> Option> { + Some( + self.runtime + .get_or_init(|| { + Arc::new( + tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(), + ) + }) + .clone(), + ) } } -#[async_trait::async_trait] -impl CallRemote for CliContext { - async fn call_remote(&self, method: &str, params: Value) -> Result { +impl CallRemote for CliContext { + async fn call_remote(&self, method: &str, params: Value, _: Empty) -> Result { + call_remote_http(&self.client, self.rpc_url.clone(), method, params).await + } +} +impl CallRemote for CliContext { + async fn call_remote(&self, method: &str, params: Value, _: Empty) -> Result { + call_remote_http(&self.client, self.rpc_url.clone(), method, params).await + } +} +impl CallRemote for CliContext { + async fn call_remote(&self, method: &str, params: Value, _: Empty) -> Result { + call_remote_http(&self.client, self.rpc_url.clone(), method, params).await + } +} +impl CallRemote for CliContext { + async fn call_remote(&self, method: &str, params: Value, _: Empty) -> Result { + call_remote_http(&self.client, self.rpc_url.clone(), method, params).await + } +} +impl CallRemote for CliContext { + async fn call_remote(&self, method: &str, params: Value, _: Empty) -> Result { call_remote_http(&self.client, self.rpc_url.clone(), method, params).await } } @@ -233,7 +298,7 @@ impl CallRemote for CliContext { #[test] fn test() { let ctx = CliContext::init(ClientConfig::default()).unwrap(); - ctx.runtime().block_on(async { + ctx.runtime().unwrap().block_on(async { reqwest::Client::new() .get("http://example.com") .send() diff --git a/core/startos/src/context/config.rs b/core/startos/src/context/config.rs index 55065e816..e02648919 100644 --- a/core/startos/src/context/config.rs +++ b/core/startos/src/context/config.rs @@ -14,7 +14,7 @@ use crate::init::init_postgres; use crate::prelude::*; use crate::util::serde::IoFormat; -pub const DEVICE_CONFIG_PATH: &str = "/media/embassy/config/config.yaml"; // "/media/startos/config/config.yaml"; +pub const DEVICE_CONFIG_PATH: &str = "/media/startos/config/config.yaml"; // "/media/startos/config/config.yaml"; pub const CONFIG_PATH: &str = "/etc/startos/config.yaml"; pub const CONFIG_PATH_LOCAL: &str = ".startos/config.yaml"; @@ -37,7 +37,10 @@ pub trait ContextConfig: DeserializeOwned + Default { .map(|f| f.parse()) .transpose()? .unwrap_or_default(); - format.from_reader(File::open(path)?) + format.from_reader( + File::open(path.as_ref()) + .with_ctx(|_| (ErrorKind::Filesystem, path.as_ref().display()))?, + ) } fn load_path_rec(&mut self, path: Option>) -> Result<(), Error> { if let Some(path) = path.filter(|p| p.as_ref().exists()) { @@ -58,6 +61,8 @@ pub struct ClientConfig { pub config: Option, #[arg(short = 'h', long = "host")] pub host: Option, + #[arg(short = 'r', long = "registry")] + pub registry: Option, #[arg(short = 'p', long = "proxy")] pub proxy: Option, #[arg(long = "cookie-path")] @@ -71,8 +76,10 @@ impl ContextConfig for ClientConfig { } fn merge_with(&mut self, other: Self) { self.host = self.host.take().or(other.host); + self.registry = self.registry.take().or(other.registry); self.proxy = self.proxy.take().or(other.proxy); self.cookie_path = self.cookie_path.take().or(other.cookie_path); + self.developer_key_path = self.developer_key_path.take().or(other.developer_key_path); } } impl ClientConfig { @@ -89,35 +96,34 @@ impl ClientConfig { #[serde(rename_all = "kebab-case")] #[command(rename_all = "kebab-case")] pub struct ServerConfig { - #[arg(short = 'c', long = "config")] + #[arg(short, long)] pub config: Option, - #[arg(long = "wifi-interface")] - pub wifi_interface: Option, - #[arg(long = "ethernet-interface")] + #[arg(long)] pub ethernet_interface: Option, #[arg(skip)] pub os_partitions: Option, - #[arg(long = "bind-rpc")] + #[arg(long)] pub bind_rpc: Option, - #[arg(long = "tor-control")] + #[arg(long)] pub tor_control: Option, - #[arg(long = "tor-socks")] + #[arg(long)] pub tor_socks: Option, - #[arg(long = "dns-bind")] + #[arg(long)] pub dns_bind: Option>, - #[arg(long = "revision-cache-size")] + #[arg(long)] pub revision_cache_size: Option, - #[arg(short = 'd', long = "datadir")] + #[arg(short, long)] pub datadir: Option, - #[arg(long = "disable-encryption")] + #[arg(long)] pub disable_encryption: Option, + #[arg(long)] + pub multi_arch_s9pks: Option, } impl ContextConfig for ServerConfig { fn next(&mut self) -> Option { self.config.take() } fn merge_with(&mut self, other: Self) { - self.wifi_interface = self.wifi_interface.take().or(other.wifi_interface); self.ethernet_interface = self.ethernet_interface.take().or(other.ethernet_interface); self.os_partitions = self.os_partitions.take().or(other.os_partitions); self.bind_rpc = self.bind_rpc.take().or(other.bind_rpc); @@ -130,6 +136,7 @@ impl ContextConfig for ServerConfig { .or(other.revision_cache_size); self.datadir = self.datadir.take().or(other.datadir); self.disable_encryption = self.disable_encryption.take().or(other.disable_encryption); + self.multi_arch_s9pks = self.multi_arch_s9pks.take().or(other.multi_arch_s9pks); } } diff --git a/core/startos/src/context/diagnostic.rs b/core/startos/src/context/diagnostic.rs index 117e56061..0bf67e172 100644 --- a/core/startos/src/context/diagnostic.rs +++ b/core/startos/src/context/diagnostic.rs @@ -8,14 +8,16 @@ use tokio::sync::broadcast::Sender; use tracing::instrument; use crate::context::config::ServerConfig; +use crate::rpc_continuations::RpcContinuations; use crate::shutdown::Shutdown; use crate::Error; pub struct DiagnosticContextSeed { pub datadir: PathBuf, - pub shutdown: Sender>, + pub shutdown: Sender, pub error: Arc, pub disk_guid: Option>, + pub rpc_continuations: RpcContinuations, } #[derive(Clone)] @@ -37,10 +39,15 @@ impl DiagnosticContext { shutdown, disk_guid, error: Arc::new(error.into()), + rpc_continuations: RpcContinuations::new(), }))) } } - +impl AsRef for DiagnosticContext { + fn as_ref(&self) -> &RpcContinuations { + &self.rpc_continuations + } +} impl Context for DiagnosticContext {} impl Deref for DiagnosticContext { type Target = DiagnosticContextSeed; diff --git a/core/startos/src/context/init.rs b/core/startos/src/context/init.rs new file mode 100644 index 000000000..566457a9c --- /dev/null +++ b/core/startos/src/context/init.rs @@ -0,0 +1,50 @@ +use std::ops::Deref; +use std::sync::Arc; + +use rpc_toolkit::Context; +use tokio::sync::broadcast::Sender; +use tokio::sync::watch; +use tracing::instrument; + +use crate::context::config::ServerConfig; +use crate::progress::FullProgressTracker; +use crate::rpc_continuations::RpcContinuations; +use crate::Error; + +pub struct InitContextSeed { + pub config: ServerConfig, + pub error: watch::Sender>, + pub progress: FullProgressTracker, + pub shutdown: Sender<()>, + pub rpc_continuations: RpcContinuations, +} + +#[derive(Clone)] +pub struct InitContext(Arc); +impl InitContext { + #[instrument(skip_all)] + pub async fn init(cfg: &ServerConfig) -> Result { + let (shutdown, _) = tokio::sync::broadcast::channel(1); + Ok(Self(Arc::new(InitContextSeed { + config: cfg.clone(), + error: watch::channel(None).0, + progress: FullProgressTracker::new(), + shutdown, + rpc_continuations: RpcContinuations::new(), + }))) + } +} + +impl AsRef for InitContext { + fn as_ref(&self) -> &RpcContinuations { + &self.rpc_continuations + } +} + +impl Context for InitContext {} +impl Deref for InitContext { + type Target = InitContextSeed; + fn deref(&self) -> &Self::Target { + &*self.0 + } +} diff --git a/core/startos/src/context/install.rs b/core/startos/src/context/install.rs index d4717d2b0..c0c564b34 100644 --- a/core/startos/src/context/install.rs +++ b/core/startos/src/context/install.rs @@ -6,11 +6,13 @@ use tokio::sync::broadcast::Sender; use tracing::instrument; use crate::net::utils::find_eth_iface; +use crate::rpc_continuations::RpcContinuations; use crate::Error; pub struct InstallContextSeed { pub ethernet_interface: String, pub shutdown: Sender<()>, + pub rpc_continuations: RpcContinuations, } #[derive(Clone)] @@ -22,10 +24,17 @@ impl InstallContext { Ok(Self(Arc::new(InstallContextSeed { ethernet_interface: find_eth_iface().await?, shutdown, + rpc_continuations: RpcContinuations::new(), }))) } } +impl AsRef for InstallContext { + fn as_ref(&self) -> &RpcContinuations { + &self.rpc_continuations + } +} + impl Context for InstallContext {} impl Deref for InstallContext { type Target = InstallContextSeed; diff --git a/core/startos/src/context/mod.rs b/core/startos/src/context/mod.rs index 77f54f26c..efe261b0c 100644 --- a/core/startos/src/context/mod.rs +++ b/core/startos/src/context/mod.rs @@ -1,12 +1,14 @@ pub mod cli; pub mod config; pub mod diagnostic; +pub mod init; pub mod install; pub mod rpc; pub mod setup; pub use cli::CliContext; pub use diagnostic::DiagnosticContext; +pub use init::InitContext; pub use install::InstallContext; pub use rpc::RpcContext; pub use setup::SetupContext; diff --git a/core/startos/src/context/rpc.rs b/core/startos/src/context/rpc.rs index 6450eb561..0db681d3b 100644 --- a/core/startos/src/context/rpc.rs +++ b/core/startos/src/context/rpc.rs @@ -8,31 +8,34 @@ use std::time::Duration; use imbl_value::InternedString; use josekit::jwk::Jwk; -use patch_db::PatchDb; use reqwest::{Client, Proxy}; -use rpc_toolkit::Context; -use tokio::sync::{broadcast, oneshot, Mutex, RwLock}; +use rpc_toolkit::yajrc::RpcError; +use rpc_toolkit::{CallRemote, Context, Empty}; +use tokio::sync::{broadcast, watch, Mutex, RwLock}; use tokio::time::Instant; use tracing::instrument; use super::setup::CURRENT_SECRET; use crate::account::AccountInfo; +use crate::auth::Sessions; use crate::context::config::ServerConfig; -use crate::core::rpc_continuations::{RequestGuid, RestHandler, RpcContinuation, WebSocketHandler}; -use crate::db::prelude::PatchDbExt; +use crate::db::model::Database; use crate::dependencies::compute_dependency_config_errs; use crate::disk::OsPartitionInfo; use crate::init::check_time_is_synchronized; -use crate::lxc::{LxcContainer, LxcManager}; -use crate::middleware::auth::HashSessionToken; -use crate::net::net_controller::NetController; -use crate::net::utils::find_eth_iface; +use crate::lxc::{ContainerId, LxcContainer, LxcManager}; +use crate::net::net_controller::{NetController, PreInitNetController}; +use crate::net::utils::{find_eth_iface, find_wifi_iface}; use crate::net::wifi::WpaCli; use crate::prelude::*; +use crate::progress::{FullProgressTracker, PhaseProgressTrackerHandle}; +use crate::rpc_continuations::{OpenAuthedContinuations, RpcContinuations}; +use crate::service::effects::callbacks::ServiceCallbacks; use crate::service::ServiceMap; use crate::shutdown::Shutdown; use crate::system::get_mem_info; use crate::util::lshw::{lshw, LshwDevice}; +use crate::util::sync::SyncMutex; pub struct RpcContextSeed { is_closed: AtomicBool, @@ -41,26 +44,31 @@ pub struct RpcContextSeed { pub ethernet_interface: String, pub datadir: PathBuf, pub disk_guid: Arc, - pub db: PatchDb, + pub ephemeral_sessions: SyncMutex, + pub db: TypedPatchDb, + pub sync_db: watch::Sender, pub account: RwLock, pub net_controller: Arc, + pub s9pk_arch: Option<&'static str>, pub services: ServiceMap, pub metrics_cache: RwLock>, pub shutdown: broadcast::Sender>, pub tor_socks: SocketAddr, pub lxc_manager: Arc, - pub open_authed_websockets: Mutex>>>, - pub rpc_stream_continuations: Mutex>, + pub open_authed_continuations: OpenAuthedContinuations, + pub rpc_continuations: RpcContinuations, + pub callbacks: ServiceCallbacks, pub wifi_manager: Option>>, pub current_secret: Arc, pub client: Client, pub hardware: Hardware, pub start_time: Instant, + #[cfg(feature = "dev")] pub dev: Dev, } pub struct Dev { - pub lxc: Mutex>, + pub lxc: Mutex>, } pub struct Hardware { @@ -68,45 +76,103 @@ pub struct Hardware { pub ram: u64, } +pub struct InitRpcContextPhases { + load_db: PhaseProgressTrackerHandle, + init_net_ctrl: PhaseProgressTrackerHandle, + read_device_info: PhaseProgressTrackerHandle, + cleanup_init: CleanupInitPhases, +} +impl InitRpcContextPhases { + pub fn new(handle: &FullProgressTracker) -> Self { + Self { + load_db: handle.add_phase("Loading database".into(), Some(5)), + init_net_ctrl: handle.add_phase("Initializing network".into(), Some(1)), + read_device_info: handle.add_phase("Reading device information".into(), Some(1)), + cleanup_init: CleanupInitPhases::new(handle), + } + } +} + +pub struct CleanupInitPhases { + init_services: PhaseProgressTrackerHandle, + check_dependencies: PhaseProgressTrackerHandle, +} +impl CleanupInitPhases { + pub fn new(handle: &FullProgressTracker) -> Self { + Self { + init_services: handle.add_phase("Initializing services".into(), Some(10)), + check_dependencies: handle.add_phase("Checking dependencies".into(), Some(1)), + } + } +} + #[derive(Clone)] pub struct RpcContext(Arc); impl RpcContext { #[instrument(skip_all)] - pub async fn init(config: &ServerConfig, disk_guid: Arc) -> Result { - tracing::info!("Loaded Config"); + pub async fn init( + config: &ServerConfig, + disk_guid: Arc, + net_ctrl: Option, + InitRpcContextPhases { + mut load_db, + mut init_net_ctrl, + mut read_device_info, + cleanup_init, + }: InitRpcContextPhases, + ) -> Result { let tor_proxy = config.tor_socks.unwrap_or(SocketAddr::V4(SocketAddrV4::new( Ipv4Addr::new(127, 0, 0, 1), 9050, ))); let (shutdown, _) = tokio::sync::broadcast::channel(1); - let db = config.db().await?; + load_db.start(); + let db = if let Some(net_ctrl) = &net_ctrl { + net_ctrl.db.clone() + } else { + TypedPatchDb::::load(config.db().await?).await? + }; let peek = db.peek().await; let account = AccountInfo::load(&peek)?; + load_db.complete(); tracing::info!("Opened PatchDB"); + + init_net_ctrl.start(); let net_controller = Arc::new( NetController::init( - db.clone(), - config - .tor_control - .unwrap_or(SocketAddr::from(([127, 0, 0, 1], 9051))), - tor_proxy, + if let Some(net_ctrl) = net_ctrl { + net_ctrl + } else { + PreInitNetController::init( + db.clone(), + config + .tor_control + .unwrap_or(SocketAddr::from(([127, 0, 0, 1], 9051))), + tor_proxy, + &account.hostname, + account.tor_key.clone(), + ) + .await? + }, config .dns_bind .as_deref() .unwrap_or(&[SocketAddr::from(([127, 0, 0, 1], 53))]), - &account.hostname, - account.tor_key.clone(), ) .await?, ); + init_net_ctrl.complete(); tracing::info!("Initialized Net Controller"); + let services = ServiceMap::default(); let metrics_cache = RwLock::>::new(None); - tracing::info!("Initialized Notification Manager"); let tor_proxy_url = format!("socks5h://{tor_proxy}"); + + read_device_info.start(); let devices = lshw().await?; let ram = get_mem_info().await?.total.0 as u64 * 1024 * 1024; + read_device_info.complete(); if !db .peek() @@ -132,6 +198,8 @@ impl RpcContext { }); } + let wifi_interface = find_wifi_iface().await?; + let seed = Arc::new(RpcContextSeed { is_closed: AtomicBool::new(false), datadir: config.datadir().to_path_buf(), @@ -141,25 +209,32 @@ impl RpcContext { ErrorKind::Filesystem, ) })?, - wifi_interface: config.wifi_interface.clone(), + wifi_interface: wifi_interface.clone(), ethernet_interface: if let Some(eth) = config.ethernet_interface.clone() { eth } else { find_eth_iface().await? }, disk_guid, + ephemeral_sessions: SyncMutex::new(Sessions::new()), + sync_db: watch::Sender::new(db.sequence().await), db, account: RwLock::new(account), net_controller, + s9pk_arch: if config.multi_arch_s9pks.unwrap_or(false) { + None + } else { + Some(crate::ARCH) + }, services, metrics_cache, shutdown, tor_socks: tor_proxy, lxc_manager: Arc::new(LxcManager::new()), - open_authed_websockets: Mutex::new(BTreeMap::new()), - rpc_stream_continuations: Mutex::new(BTreeMap::new()), - wifi_manager: config - .wifi_interface + open_authed_continuations: OpenAuthedContinuations::new(), + rpc_continuations: RpcContinuations::new(), + callbacks: Default::default(), + wifi_manager: wifi_interface .clone() .map(|i| Arc::new(RwLock::new(WpaCli::init(i)))), current_secret: Arc::new( @@ -184,13 +259,14 @@ impl RpcContext { .with_kind(crate::ErrorKind::ParseUrl)?, hardware: Hardware { devices, ram }, start_time: Instant::now(), + #[cfg(feature = "dev")] dev: Dev { lxc: Mutex::new(BTreeMap::new()), }, }); let res = Self(seed.clone()); - res.cleanup_and_initialize().await?; + res.cleanup_and_initialize(cleanup_init).await?; tracing::info!("Cleaned up transient states"); Ok(res) } @@ -204,94 +280,68 @@ impl RpcContext { Ok(()) } - #[instrument(skip(self))] - pub async fn cleanup_and_initialize(&self) -> Result<(), Error> { - self.services.init(&self).await?; + #[instrument(skip_all)] + pub async fn cleanup_and_initialize( + &self, + CleanupInitPhases { + init_services, + mut check_dependencies, + }: CleanupInitPhases, + ) -> Result<(), Error> { + self.services.init(&self, init_services).await?; tracing::info!("Initialized Package Managers"); - let mut all_dependency_config_errs = BTreeMap::new(); + check_dependencies.start(); + let mut updated_current_dependents = BTreeMap::new(); let peek = self.db.peek().await; for (package_id, package) in peek.as_public().as_package_data().as_entries()?.into_iter() { let package = package.clone(); - let current_dependencies = package.as_current_dependencies().de()?; - all_dependency_config_errs.insert( - package_id.clone(), - compute_dependency_config_errs( - self, - &peek, - &package_id, - ¤t_dependencies, - &Default::default(), - ) - .await?, - ); + let mut current_dependencies = package.as_current_dependencies().de()?; + compute_dependency_config_errs(self, &package_id, &mut current_dependencies) + .await + .log_err(); + updated_current_dependents.insert(package_id.clone(), current_dependencies); } self.db .mutate(|v| { - for (package_id, errs) in all_dependency_config_errs { - if let Some(config_errors) = v + for (package_id, deps) in updated_current_dependents { + if let Some(model) = v .as_public_mut() .as_package_data_mut() .as_idx_mut(&package_id) - .map(|i| i.as_status_mut().as_dependency_config_errors_mut()) + .map(|i| i.as_current_dependencies_mut()) { - config_errors.ser(&errs)?; + model.ser(&deps)?; } } Ok(()) }) .await?; + check_dependencies.complete(); Ok(()) } - - #[instrument(skip_all)] - pub async fn clean_continuations(&self) { - let mut continuations = self.rpc_stream_continuations.lock().await; - let mut to_remove = Vec::new(); - for (guid, cont) in &*continuations { - if cont.is_timed_out() { - to_remove.push(guid.clone()); - } - } - for guid in to_remove { - continuations.remove(&guid); - } - } - - #[instrument(skip_all)] - pub async fn add_continuation(&self, guid: RequestGuid, handler: RpcContinuation) { - self.clean_continuations().await; - self.rpc_stream_continuations - .lock() - .await - .insert(guid, handler); - } - - pub async fn get_ws_continuation_handler( + pub async fn call_remote( &self, - guid: &RequestGuid, - ) -> Option { - let mut continuations = self.rpc_stream_continuations.lock().await; - if !matches!(continuations.get(guid), Some(RpcContinuation::WebSocket(_))) { - return None; - } - let Some(RpcContinuation::WebSocket(x)) = continuations.remove(guid) else { - return None; - }; - x.get().await + method: &str, + params: Value, + ) -> Result + where + Self: CallRemote, + { + >::call_remote(&self, method, params, Empty {}) + .await } - - pub async fn get_rest_continuation_handler(&self, guid: &RequestGuid) -> Option { - let mut continuations: tokio::sync::MutexGuard<'_, BTreeMap> = - self.rpc_stream_continuations.lock().await; - if !matches!(continuations.get(guid), Some(RpcContinuation::Rest(_))) { - return None; - } - let Some(RpcContinuation::Rest(x)) = continuations.remove(guid) else { - return None; - }; - x.get().await + pub async fn call_remote_with( + &self, + method: &str, + params: Value, + extra: T, + ) -> Result + where + Self: CallRemote, + { + >::call_remote(&self, method, params, extra).await } } impl AsRef for RpcContext { @@ -299,6 +349,16 @@ impl AsRef for RpcContext { &CURRENT_SECRET } } +impl AsRef for RpcContext { + fn as_ref(&self) -> &RpcContinuations { + &self.rpc_continuations + } +} +impl AsRef> for RpcContext { + fn as_ref(&self) -> &OpenAuthedContinuations { + &self.open_authed_continuations + } +} impl Context for RpcContext {} impl Deref for RpcContext { type Target = RpcContextSeed; diff --git a/core/startos/src/context/setup.rs b/core/startos/src/context/setup.rs index 013dc060b..999154977 100644 --- a/core/startos/src/context/setup.rs +++ b/core/startos/src/context/setup.rs @@ -1,23 +1,32 @@ use std::ops::Deref; use std::path::PathBuf; use std::sync::Arc; +use std::time::Duration; +use futures::{Future, StreamExt}; +use helpers::NonDetachingJoinHandle; +use imbl_value::InternedString; use josekit::jwk::Jwk; use patch_db::PatchDb; -use rpc_toolkit::yajrc::RpcError; use rpc_toolkit::Context; use serde::{Deserialize, Serialize}; use sqlx::postgres::PgConnectOptions; use sqlx::PgPool; use tokio::sync::broadcast::Sender; -use tokio::sync::RwLock; +use tokio::sync::OnceCell; use tracing::instrument; +use ts_rs::TS; +use crate::account::AccountInfo; use crate::context::config::ServerConfig; +use crate::context::RpcContext; use crate::disk::OsPartitionInfo; use crate::init::init_postgres; use crate::prelude::*; -use crate::setup::SetupStatus; +use crate::progress::FullProgressTracker; +use crate::rpc_continuations::{Guid, RpcContinuation, RpcContinuations}; +use crate::setup::SetupProgress; +use crate::util::net::WebSocketExt; lazy_static::lazy_static! { pub static ref CURRENT_SECRET: Jwk = Jwk::generate_ec_key(josekit::jwk::alg::ec::EcCurve::P256).unwrap_or_else(|e| { @@ -27,30 +36,36 @@ lazy_static::lazy_static! { }); } -#[derive(Clone, Serialize, Deserialize)] +#[derive(Debug, Clone, Deserialize, Serialize, TS)] #[serde(rename_all = "camelCase")] +#[ts(export)] pub struct SetupResult { pub tor_address: String, - pub lan_address: String, + #[ts(type = "string")] + pub lan_address: InternedString, pub root_ca: String, } +impl TryFrom<&AccountInfo> for SetupResult { + type Error = Error; + fn try_from(value: &AccountInfo) -> Result { + Ok(Self { + tor_address: format!("https://{}", value.tor_key.public().get_onion_address()), + lan_address: value.hostname.lan_address(), + root_ca: String::from_utf8(value.root_ca_cert.to_pem()?)?, + }) + } +} pub struct SetupContextSeed { pub config: ServerConfig, pub os_partitions: OsPartitionInfo, pub disable_encryption: bool, + pub progress: FullProgressTracker, + pub task: OnceCell>, + pub result: OnceCell>, pub shutdown: Sender<()>, pub datadir: PathBuf, - pub selected_v2_drive: RwLock>, - pub cached_product_key: RwLock>>, - pub setup_status: RwLock>>, - pub setup_result: RwLock, SetupResult)>>, -} - -impl AsRef for SetupContextSeed { - fn as_ref(&self) -> &Jwk { - &*CURRENT_SECRET - } + pub rpc_continuations: RpcContinuations, } #[derive(Clone)] @@ -69,12 +84,12 @@ impl SetupContext { ) })?, disable_encryption: config.disable_encryption.unwrap_or(false), + progress: FullProgressTracker::new(), + task: OnceCell::new(), + result: OnceCell::new(), shutdown, datadir, - selected_v2_drive: RwLock::new(None), - cached_product_key: RwLock::new(None), - setup_status: RwLock::new(None), - setup_result: RwLock::new(None), + rpc_continuations: RpcContinuations::new(), }))) } #[instrument(skip_all)] @@ -85,17 +100,103 @@ impl SetupContext { .with_ctx(|_| (crate::ErrorKind::Filesystem, db_path.display().to_string()))?; Ok(db) } - #[instrument(skip_all)] - pub async fn secret_store(&self) -> Result { - init_postgres(&self.datadir).await?; - let secret_store = - PgPool::connect_with(PgConnectOptions::new().database("secrets").username("root")) - .await?; - sqlx::migrate!() - .run(&secret_store) - .await - .with_kind(crate::ErrorKind::Database)?; - Ok(secret_store) + + pub fn run_setup(&self, f: F) -> Result<(), Error> + where + F: FnOnce() -> Fut + Send + 'static, + Fut: Future> + Send, + { + let local_ctx = self.clone(); + self.task + .set( + tokio::spawn(async move { + local_ctx + .result + .get_or_init(|| async { + match f().await { + Ok(res) => { + tracing::info!("Setup complete!"); + Ok(res) + } + Err(e) => { + tracing::error!("Setup failed: {e}"); + tracing::debug!("{e:?}"); + Err(e) + } + } + }) + .await; + local_ctx.progress.complete(); + }) + .into(), + ) + .map_err(|_| { + if self.result.initialized() { + Error::new(eyre!("Setup already complete"), ErrorKind::InvalidRequest) + } else { + Error::new( + eyre!("Setup already in progress"), + ErrorKind::InvalidRequest, + ) + } + })?; + Ok(()) + } + + pub async fn progress(&self) -> SetupProgress { + use axum::extract::ws; + + let guid = Guid::new(); + let progress_tracker = self.progress.clone(); + let progress = progress_tracker.snapshot(); + self.rpc_continuations + .add( + guid.clone(), + RpcContinuation::ws( + |mut ws| async move { + if let Err(e) = async { + let mut stream = + progress_tracker.stream(Some(Duration::from_millis(100))); + while let Some(progress) = stream.next().await { + ws.send(ws::Message::Text( + serde_json::to_string(&progress) + .with_kind(ErrorKind::Serialization)?, + )) + .await + .with_kind(ErrorKind::Network)?; + if progress.overall.is_complete() { + break; + } + } + + ws.normal_close("complete").await?; + + Ok::<_, Error>(()) + } + .await + { + tracing::error!("Error in setup progress websocket: {e}"); + tracing::debug!("{e:?}"); + } + }, + Duration::from_secs(30), + ), + ) + .await; + + SetupProgress { progress, guid } + } +} + +impl AsRef for SetupContext { + fn as_ref(&self) -> &Jwk { + &*CURRENT_SECRET + } +} + +impl AsRef for SetupContext { + fn as_ref(&self) -> &RpcContinuations { + &self.rpc_continuations } } diff --git a/core/startos/src/control.rs b/core/startos/src/control.rs index 3ef7d6030..e831e07d6 100644 --- a/core/startos/src/control.rs +++ b/core/startos/src/control.rs @@ -1,13 +1,13 @@ use clap::Parser; use color_eyre::eyre::eyre; use models::PackageId; -use rpc_toolkit::command; use serde::{Deserialize, Serialize}; use tracing::instrument; use ts_rs::TS; use crate::context::RpcContext; use crate::prelude::*; +use crate::rpc_continuations::Guid; use crate::Error; #[derive(Deserialize, Serialize, Parser, TS)] @@ -24,7 +24,7 @@ pub async fn start(ctx: RpcContext, ControlParams { id }: ControlParams) -> Resu .await .as_ref() .or_not_found(lazy_format!("Manager for {id}"))? - .start() + .start(Guid::new()) .await?; Ok(()) @@ -37,7 +37,7 @@ pub async fn stop(ctx: RpcContext, ControlParams { id }: ControlParams) -> Resul .await .as_ref() .ok_or_else(|| Error::new(eyre!("Manager not found"), crate::ErrorKind::InvalidRequest))? - .stop() + .stop(Guid::new()) .await?; Ok(()) @@ -49,7 +49,7 @@ pub async fn restart(ctx: RpcContext, ControlParams { id }: ControlParams) -> Re .await .as_ref() .ok_or_else(|| Error::new(eyre!("Manager not found"), crate::ErrorKind::InvalidRequest))? - .restart() + .restart(Guid::new()) .await?; Ok(()) diff --git a/core/startos/src/core/mod.rs b/core/startos/src/core/mod.rs deleted file mode 100644 index 7c2dbbb06..000000000 --- a/core/startos/src/core/mod.rs +++ /dev/null @@ -1 +0,0 @@ -pub mod rpc_continuations; diff --git a/core/startos/src/core/rpc_continuations.rs b/core/startos/src/core/rpc_continuations.rs deleted file mode 100644 index 9a82cb1fe..000000000 --- a/core/startos/src/core/rpc_continuations.rs +++ /dev/null @@ -1,75 +0,0 @@ -use std::time::Duration; - -use axum::extract::ws::WebSocket; -use axum::extract::Request; -use axum::response::Response; -use futures::future::BoxFuture; -use helpers::TimedResource; -use imbl_value::InternedString; - -#[allow(unused_imports)] -use crate::prelude::*; -use crate::util::new_guid; - -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, serde::Serialize, serde::Deserialize)] -pub struct RequestGuid(InternedString); -impl RequestGuid { - pub fn new() -> Self { - Self(new_guid()) - } - - pub fn from(r: &str) -> Option { - if r.len() != 64 { - return None; - } - for c in r.chars() { - if !(c >= 'A' && c <= 'Z' || c >= '2' && c <= '7') { - return None; - } - } - Some(RequestGuid(InternedString::intern(r))) - } -} -impl AsRef for RequestGuid { - fn as_ref(&self) -> &str { - self.0.as_ref() - } -} - -#[test] -fn parse_guid() { - println!( - "{:?}", - RequestGuid::from(&format!("{}", RequestGuid::new())) - ) -} - -impl std::fmt::Display for RequestGuid { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0.fmt(f) - } -} - -pub type RestHandler = - Box BoxFuture<'static, Result> + Send>; - -pub type WebSocketHandler = Box BoxFuture<'static, ()> + Send>; - -pub enum RpcContinuation { - Rest(TimedResource), - WebSocket(TimedResource), -} -impl RpcContinuation { - pub fn rest(handler: RestHandler, timeout: Duration) -> Self { - RpcContinuation::Rest(TimedResource::new(handler, timeout)) - } - pub fn ws(handler: WebSocketHandler, timeout: Duration) -> Self { - RpcContinuation::WebSocket(TimedResource::new(handler, timeout)) - } - pub fn is_timed_out(&self) -> bool { - match self { - RpcContinuation::Rest(a) => a.is_timed_out(), - RpcContinuation::WebSocket(a) => a.is_timed_out(), - } - } -} diff --git a/core/startos/src/db/mod.rs b/core/startos/src/db/mod.rs index ab5d23efb..ef35bd30d 100644 --- a/core/startos/src/db/mod.rs +++ b/core/startos/src/db/mod.rs @@ -3,175 +3,43 @@ pub mod prelude; use std::path::PathBuf; use std::sync::Arc; +use std::time::Duration; -use axum::extract::ws::{self, WebSocket}; -use axum::extract::WebSocketUpgrade; -use axum::response::Response; +use axum::extract::ws; use clap::Parser; -use futures::{FutureExt, StreamExt}; -use http::header::COOKIE; -use http::HeaderMap; +use imbl_value::InternedString; +use itertools::Itertools; use patch_db::json_ptr::{JsonPointer, ROOT}; -use patch_db::{Dump, Revision}; +use patch_db::{DiffPatch, Dump, Revision}; use rpc_toolkit::yajrc::RpcError; -use rpc_toolkit::{command, from_fn_async, CallRemote, HandlerExt, ParentHandler}; +use rpc_toolkit::{from_fn_async, Context, HandlerArgs, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; -use serde_json::Value; -use tokio::sync::oneshot; +use tokio::sync::mpsc::{self, UnboundedReceiver}; +use tokio::sync::watch; use tracing::instrument; use ts_rs::TS; use crate::context::{CliContext, RpcContext}; -use crate::middleware::auth::{HasValidSession, HashSessionToken}; use crate::prelude::*; +use crate::rpc_continuations::{Guid, RpcContinuation}; +use crate::util::net::WebSocketExt; use crate::util::serde::{apply_expr, HandlerExtSerde}; lazy_static::lazy_static! { static ref PUBLIC: JsonPointer = "/public".parse().unwrap(); } -#[instrument(skip_all)] -async fn ws_handler( - ctx: RpcContext, - session: Option<(HasValidSession, HashSessionToken)>, - mut stream: WebSocket, -) -> Result<(), Error> { - let (dump, sub) = ctx.db.dump_and_sub(PUBLIC.clone()).await; - - if let Some((session, token)) = session { - let kill = subscribe_to_session_kill(&ctx, token).await; - send_dump(session.clone(), &mut stream, dump).await?; - - deal_with_messages(session, kill, sub, stream).await?; - } else { - stream - .send(ws::Message::Close(Some(ws::CloseFrame { - code: ws::close_code::ERROR, - reason: "UNAUTHORIZED".into(), - }))) - .await - .with_kind(ErrorKind::Network)?; - drop(stream); - } - - Ok(()) -} - -async fn subscribe_to_session_kill( - ctx: &RpcContext, - token: HashSessionToken, -) -> oneshot::Receiver<()> { - let (send, recv) = oneshot::channel(); - let mut guard = ctx.open_authed_websockets.lock().await; - if !guard.contains_key(&token) { - guard.insert(token, vec![send]); - } else { - guard.get_mut(&token).unwrap().push(send); - } - recv -} - -#[instrument(skip_all)] -async fn deal_with_messages( - _has_valid_authentication: HasValidSession, - mut kill: oneshot::Receiver<()>, - mut sub: patch_db::Subscriber, - mut stream: WebSocket, -) -> Result<(), Error> { - let mut timer = tokio::time::interval(tokio::time::Duration::from_secs(5)); - - loop { - futures::select! { - _ = (&mut kill).fuse() => { - tracing::info!("Closing WebSocket: Reason: Session Terminated"); - stream - .send(ws::Message::Close(Some(ws::CloseFrame { - code: ws::close_code::ERROR, - reason: "UNAUTHORIZED".into(), - }))).await - .with_kind(ErrorKind::Network)?; - drop(stream); - return Ok(()) - } - new_rev = sub.recv().fuse() => { - let rev = new_rev.expect("UNREACHABLE: patch-db is dropped"); - stream - .send(ws::Message::Text(serde_json::to_string(&rev).with_kind(ErrorKind::Serialization)?)) - .await - .with_kind(ErrorKind::Network)?; - } - message = stream.next().fuse() => { - let message = message.transpose().with_kind(ErrorKind::Network)?; - match message { - None => { - tracing::info!("Closing WebSocket: Stream Finished"); - return Ok(()) - } - _ => (), - } - } - // This is trying to give a health checks to the home to keep the ui alive. - _ = timer.tick().fuse() => { - stream - .send(ws::Message::Ping(vec![])) - .await - .with_kind(crate::ErrorKind::Network)?; - } - } - } -} - -async fn send_dump( - _has_valid_authentication: HasValidSession, - stream: &mut WebSocket, - dump: Dump, -) -> Result<(), Error> { - stream - .send(ws::Message::Text( - serde_json::to_string(&dump).with_kind(ErrorKind::Serialization)?, - )) - .await - .with_kind(ErrorKind::Network)?; - Ok(()) -} - -pub async fn subscribe( - ctx: RpcContext, - headers: HeaderMap, - ws: WebSocketUpgrade, -) -> Result { - let session = match async { - let token = HashSessionToken::from_header(headers.get(COOKIE))?; - let session = HasValidSession::from_header(headers.get(COOKIE), &ctx).await?; - Ok::<_, Error>((session, token)) - } - .await - { - Ok(a) => Some(a), - Err(e) => { - if e.kind != ErrorKind::Authorization { - tracing::error!("Error Authenticating Websocket: {}", e); - tracing::debug!("{:?}", e); - } - None - } - }; - Ok(ws.on_upgrade(|ws| async move { - match ws_handler(ctx, session, ws).await { - Ok(()) => (), - Err(e) => { - tracing::error!("WebSocket Closed: {}", e); - tracing::debug!("{:?}", e); - } - } - })) -} - -pub fn db() -> ParentHandler { +pub fn db() -> ParentHandler { ParentHandler::new() .subcommand("dump", from_fn_async(cli_dump).with_display_serializable()) .subcommand("dump", from_fn_async(dump).no_cli()) - .subcommand("put", put()) + .subcommand( + "subscribe", + from_fn_async(subscribe) + .with_metadata("get_session", Value::Bool(true)) + .no_cli(), + ) + .subcommand("put", put::()) .subcommand("apply", from_fn_async(cli_apply).no_display()) .subcommand("apply", from_fn_async(apply).no_cli()) } @@ -195,78 +63,202 @@ pub struct CliDumpParams { #[instrument(skip_all)] async fn cli_dump( - ctx: CliContext, - CliDumpParams { - path, - include_private, - }: CliDumpParams, + HandlerArgs { + context, + parent_method, + method, + params: CliDumpParams { + include_private, + path, + }, + .. + }: HandlerArgs, ) -> Result { let dump = if let Some(path) = path { PatchDb::open(path).await?.dump(&ROOT).await } else { + let method = parent_method.into_iter().chain(method).join("."); from_value::( - ctx.call_remote( - "db.dump", - imbl_value::json!({ "includePrivate":include_private }), - ) - .await?, + context + .call_remote::( + &method, + imbl_value::json!({ + "pointer": if include_private { + AsRef::::as_ref(&ROOT) + } else { + AsRef::::as_ref(&*PUBLIC) + } + }), + ) + .await?, )? }; Ok(dump) } -#[derive(Deserialize, Serialize, Parser, TS)] +#[derive(Deserialize, Serialize, TS)] #[serde(rename_all = "camelCase")] -#[command(rename_all = "kebab-case")] pub struct DumpParams { - #[arg(long = "include-private", short = 'p')] - #[serde(default)] - #[ts(skip)] - include_private: bool, + #[ts(type = "string | null")] + pointer: Option, } -pub async fn dump( +pub async fn dump(ctx: RpcContext, DumpParams { pointer }: DumpParams) -> Result { + Ok(ctx.db.dump(pointer.as_ref().unwrap_or(&*PUBLIC)).await) +} + +#[derive(Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +pub struct SubscribeParams { + #[ts(type = "string | null")] + pointer: Option, + #[ts(skip)] + #[serde(rename = "__auth_session")] + session: InternedString, +} + +#[derive(Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +pub struct SubscribeRes { + #[ts(type = "{ id: number; value: unknown }")] + pub dump: Dump, + pub guid: Guid, +} + +struct DbSubscriber { + rev: u64, + sub: UnboundedReceiver, + sync_db: watch::Receiver, +} +impl DbSubscriber { + async fn recv(&mut self) -> Option { + loop { + tokio::select! { + rev = self.sub.recv() => { + if let Some(rev) = rev.as_ref() { + self.rev = rev.id; + } + return rev + } + _ = self.sync_db.changed() => { + let id = *self.sync_db.borrow(); + if id > self.rev { + match self.sub.try_recv() { + Ok(rev) => { + self.rev = rev.id; + return Some(rev) + } + Err(mpsc::error::TryRecvError::Disconnected) => { + return None + } + Err(mpsc::error::TryRecvError::Empty) => { + return Some(Revision { id, patch: DiffPatch::default() }) + } + } + } + } + } + } + } +} + +pub async fn subscribe( ctx: RpcContext, - DumpParams { include_private }: DumpParams, -) -> Result { - Ok(if include_private { - ctx.db.dump(&ROOT).await - } else { - ctx.db.dump(&PUBLIC).await - }) + SubscribeParams { pointer, session }: SubscribeParams, +) -> Result { + let (dump, sub) = ctx + .db + .dump_and_sub(pointer.unwrap_or_else(|| PUBLIC.clone())) + .await; + let mut sub = DbSubscriber { + rev: dump.id, + sub, + sync_db: ctx.sync_db.subscribe(), + }; + let guid = Guid::new(); + ctx.rpc_continuations + .add( + guid.clone(), + RpcContinuation::ws_authed( + &ctx, + session, + |mut ws| async move { + if let Err(e) = async { + while let Some(rev) = sub.recv().await { + ws.send(ws::Message::Text( + serde_json::to_string(&rev).with_kind(ErrorKind::Serialization)?, + )) + .await + .with_kind(ErrorKind::Network)?; + } + + ws.normal_close("complete").await?; + + Ok::<_, Error>(()) + } + .await + { + tracing::error!("Error in db websocket: {e}"); + tracing::debug!("{e:?}"); + } + }, + Duration::from_secs(30), + ), + ) + .await; + + Ok(SubscribeRes { dump, guid }) +} + +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct CliApplyParams { + expr: String, + path: Option, } #[instrument(skip_all)] async fn cli_apply( - ctx: CliContext, - ApplyParams { expr, path }: ApplyParams, + HandlerArgs { + context, + parent_method, + method, + params: CliApplyParams { expr, path }, + .. + }: HandlerArgs, ) -> Result<(), RpcError> { if let Some(path) = path { PatchDb::open(path) .await? - .mutate(|db| { + .apply_function(|db| { let res = apply_expr( - serde_json::to_value(patch_db::Value::from(db.clone())) + serde_json::to_value(patch_db::Value::from(db)) .with_kind(ErrorKind::Deserialization)? .into(), &expr, )?; - db.ser( - &serde_json::from_value::(res.clone().into()).with_ctx( - |_| { - ( - crate::ErrorKind::Deserialization, - "result does not match database model", - ) - }, + Ok::<_, Error>(( + to_value( + &serde_json::from_value::(res.clone().into()).with_ctx( + |_| { + ( + crate::ErrorKind::Deserialization, + "result does not match database model", + ) + }, + )?, )?, - ) + (), + )) }) .await?; } else { - ctx.call_remote("db.apply", imbl_value::json!({ "expr": expr })) + let method = parent_method.into_iter().chain(method).join("."); + context + .call_remote::(&method, imbl_value::json!({ "expr": expr })) .await?; } @@ -278,10 +270,9 @@ async fn cli_apply( #[command(rename_all = "kebab-case")] pub struct ApplyParams { expr: String, - path: Option, } -pub async fn apply(ctx: RpcContext, ApplyParams { expr, .. }: ApplyParams) -> Result<(), Error> { +pub async fn apply(ctx: RpcContext, ApplyParams { expr }: ApplyParams) -> Result<(), Error> { ctx.db .mutate(|db| { let res = apply_expr( @@ -303,12 +294,12 @@ pub async fn apply(ctx: RpcContext, ApplyParams { expr, .. }: ApplyParams) -> Re .await } -pub fn put() -> ParentHandler { +pub fn put() -> ParentHandler { ParentHandler::new().subcommand( "ui", from_fn_async(ui) .with_display_serializable() - .with_remote_cli::(), + .with_call_remote::(), ) } #[derive(Deserialize, Serialize, Parser, TS)] @@ -324,7 +315,7 @@ pub struct UiParams { // #[command(display(display_serializable))] #[instrument(skip_all)] pub async fn ui(ctx: RpcContext, UiParams { pointer, value, .. }: UiParams) -> Result<(), Error> { - let ptr = "/ui" + let ptr = "/public/ui" .parse::() .with_kind(ErrorKind::Database)? + &pointer; diff --git a/core/startos/src/db/model/mod.rs b/core/startos/src/db/model/mod.rs index 9be0f8b68..678f7e5fb 100644 --- a/core/startos/src/db/model/mod.rs +++ b/core/startos/src/db/model/mod.rs @@ -2,7 +2,6 @@ use std::collections::BTreeMap; use patch_db::HasModel; use serde::{Deserialize, Serialize}; -use ts_rs::TS; use crate::account::AccountInfo; use crate::auth::Sessions; @@ -41,6 +40,7 @@ impl Database { notifications: Notifications::new(), cifs: CifsTargets::new(), package_stores: BTreeMap::new(), + compat_s9pk_key: Pem(account.compat_s9pk_key.clone()), }, // TODO }) } diff --git a/core/startos/src/db/model/package.rs b/core/startos/src/db/model/package.rs index 83a35d086..cb537a2b5 100644 --- a/core/startos/src/db/model/package.rs +++ b/core/startos/src/db/model/package.rs @@ -1,7 +1,7 @@ use std::collections::{BTreeMap, BTreeSet}; use chrono::{DateTime, Utc}; -use emver::VersionRange; +use exver::VersionRange; use imbl_value::InternedString; use models::{ActionId, DataUrl, HealthCheckId, HostId, PackageId, ServiceInterfaceId}; use patch_db::json_ptr::JsonPointer; @@ -10,8 +10,8 @@ use reqwest::Url; use serde::{Deserialize, Serialize}; use ts_rs::TS; -use crate::net::host::HostInfo; -use crate::net::service_interface::ServiceInterfaceWithHostInfo; +use crate::net::host::Hosts; +use crate::net::service_interface::ServiceInterface; use crate::prelude::*; use crate::progress::FullProgress; use crate::s9pk::manifest::Manifest; @@ -54,7 +54,7 @@ impl PackageState { pub fn expect_installed(&self) -> Result<&InstalledState, Error> { match self { Self::Installed(a) => Ok(a), - a => Err(Error::new( + _ => Err(Error::new( eyre!( "Package {} is not in installed state", self.as_manifest(ManifestPreference::Old).id @@ -63,6 +63,18 @@ impl PackageState { )), } } + pub fn expect_removing(&self) -> Result<&InstalledState, Error> { + match self { + Self::Removing(a) => Ok(a), + _ => Err(Error::new( + eyre!( + "Package {} is not in removing state", + self.as_manifest(ManifestPreference::Old).id + ), + ErrorKind::InvalidRequest, + )), + } + } pub fn into_installing_info(self) -> Option { match self { Self::Installing(InstallingState { installing_info }) @@ -161,7 +173,7 @@ impl Model { pub fn expect_installed(&self) -> Result<&Model, Error> { match self.as_match() { PackageStateMatchModelRef::Installed(a) => Ok(a), - a => Err(Error::new( + _ => Err(Error::new( eyre!( "Package {} is not in installed state", self.as_manifest(ManifestPreference::Old).as_id().de()? @@ -251,7 +263,7 @@ impl Model { PackageStateMatchModelMut::Installed(s) | PackageStateMatchModelMut::Removing(s) => { s.as_manifest_mut() } - PackageStateMatchModelMut::Error(s) => { + PackageStateMatchModelMut::Error(_) => { return Err(Error::new( eyre!("could not determine package state to get manifest"), ErrorKind::Database, @@ -325,7 +337,7 @@ pub struct PackageDataEntry { pub state_info: PackageState, pub status: Status, #[ts(type = "string | null")] - pub marketplace_url: Option, + pub registry: Option, #[ts(type = "string")] pub developer_key: Pem, pub icon: DataUrl<'static>, @@ -333,8 +345,8 @@ pub struct PackageDataEntry { pub last_backup: Option>, pub current_dependencies: CurrentDependencies, pub actions: BTreeMap, - pub service_interfaces: BTreeMap, - pub hosts: HostInfo, + pub service_interfaces: BTreeMap, + pub hosts: Hosts, #[ts(type = "string[]")] pub store_exposed_dependents: Vec, } @@ -372,14 +384,14 @@ impl Map for CurrentDependencies { #[derive(Clone, Debug, Deserialize, Serialize, TS)] #[serde(rename_all = "camelCase")] pub struct CurrentDependencyInfo { + #[ts(type = "string | null")] + pub title: Option, + pub icon: Option>, #[serde(flatten)] pub kind: CurrentDependencyKind, - pub title: String, - pub icon: DataUrl<'static>, #[ts(type = "string")] - pub registry_url: Url, - #[ts(type = "string")] - pub version_spec: VersionRange, + pub version_range: VersionRange, + pub config_satisfied: bool, } #[derive(Clone, Debug, Deserialize, Serialize, TS)] diff --git a/core/startos/src/db/model/private.rs b/core/startos/src/db/model/private.rs index 2b8c55dbd..c57364fc3 100644 --- a/core/startos/src/db/model/private.rs +++ b/core/startos/src/db/model/private.rs @@ -19,6 +19,8 @@ use crate::util::serde::Pem; pub struct Private { pub key_store: KeyStore, pub password: String, // argon2 hash + #[serde(default = "generate_compat_key")] + pub compat_s9pk_key: Pem, pub ssh_privkey: Pem, pub ssh_pubkeys: SshKeys, pub available_ports: AvailablePorts, @@ -28,3 +30,7 @@ pub struct Private { #[serde(default)] pub package_stores: BTreeMap, } + +fn generate_compat_key() -> Pem { + Pem(ed25519_dalek::SigningKey::generate(&mut rand::thread_rng())) +} diff --git a/core/startos/src/db/model/public.rs b/core/startos/src/db/model/public.rs index d1eec5443..b20693a90 100644 --- a/core/startos/src/db/model/public.rs +++ b/core/startos/src/db/model/public.rs @@ -1,8 +1,8 @@ -use std::collections::BTreeMap; +use std::collections::{BTreeMap, BTreeSet}; use std::net::{Ipv4Addr, Ipv6Addr}; use chrono::{DateTime, Utc}; -use emver::VersionRange; +use exver::{Version, VersionRange}; use imbl_value::InternedString; use ipnet::{Ipv4Net, Ipv6Net}; use isocountry::CountryCode; @@ -19,8 +19,9 @@ use crate::account::AccountInfo; use crate::db::model::package::AllPackageData; use crate::net::utils::{get_iface_ipv4_addr, get_iface_ipv6_addr}; use crate::prelude::*; +use crate::progress::FullProgress; +use crate::system::SmtpValue; use crate::util::cpupower::Governor; -use crate::util::Version; use crate::version::{Current, VersionT}; use crate::{ARCH, PLATFORM}; @@ -31,7 +32,7 @@ use crate::{ARCH, PLATFORM}; pub struct Public { pub server_info: ServerInfo, pub package_data: AllPackageData, - #[ts(type = "any")] + #[ts(type = "unknown")] pub ui: Value, } impl Public { @@ -42,10 +43,9 @@ impl Public { arch: get_arch(), platform: get_platform(), id: account.server_id.clone(), - version: Current::new().semver().into(), + version: Current::new().semver(), hostname: account.hostname.no_dot_host_name(), last_backup: None, - last_wifi_region: None, eos_version_compat: Current::new().compat().clone(), lan_address, onion_address: account.tor_key.public().get_onion_address(), @@ -60,11 +60,7 @@ impl Public { shutting_down: false, restarting: false, }, - wifi: WifiInfo { - ssids: Vec::new(), - connected: None, - selected: None, - }, + wifi: WifiInfo::default(), unread_notification_count: 0, password_hash: account.password.clone(), pubkey: ssh_key::PublicKey::from(&account.ssh_key) @@ -80,6 +76,7 @@ impl Public { ntp_synced: false, zram: true, governor: None, + smtp: None, }, package_data: AllPackageData::default(), ui: serde_json::from_str(include_str!(concat!( @@ -111,14 +108,12 @@ pub struct ServerInfo { #[ts(type = "string")] pub platform: InternedString, pub id: String, - pub hostname: String, + #[ts(type = "string")] + pub hostname: InternedString, #[ts(type = "string")] pub version: Version, #[ts(type = "string | null")] pub last_backup: Option>, - /// Used in the wifi to determine the region to set the system to - #[ts(type = "string | null")] - pub last_wifi_region: Option, #[ts(type = "string")] pub eos_version_compat: VersionRange, #[ts(type = "string")] @@ -142,6 +137,7 @@ pub struct ServerInfo { #[serde(default)] pub zram: bool, pub governor: Option, + pub smtp: Option, } #[derive(Debug, Deserialize, Serialize, HasModel, TS)] @@ -183,32 +179,23 @@ pub struct BackupProgress { pub struct ServerStatus { pub backup_progress: Option>, pub updated: bool, - pub update_progress: Option, + pub update_progress: Option, #[serde(default)] pub shutting_down: bool, #[serde(default)] pub restarting: bool, } -#[derive(Debug, Deserialize, Serialize, HasModel, TS)] -#[serde(rename_all = "camelCase")] -#[model = "Model"] -#[ts(export)] -pub struct UpdateProgress { - #[ts(type = "number | null")] - pub size: Option, - #[ts(type = "number")] - pub downloaded: u64, -} - -#[derive(Debug, Deserialize, Serialize, HasModel, TS)] +#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)] #[serde(rename_all = "camelCase")] #[model = "Model"] #[ts(export)] pub struct WifiInfo { - pub ssids: Vec, + pub interface: Option, + pub ssids: BTreeSet, pub selected: Option, - pub connected: Option, + #[ts(type = "string | null")] + pub last_region: Option, } #[derive(Debug, Deserialize, Serialize, TS)] diff --git a/core/startos/src/db/prelude.rs b/core/startos/src/db/prelude.rs index 43dd59002..419b356ef 100644 --- a/core/startos/src/db/prelude.rs +++ b/core/startos/src/db/prelude.rs @@ -1,22 +1,16 @@ use std::collections::BTreeMap; -use std::future::Future; use std::marker::PhantomData; -use std::panic::UnwindSafe; use std::str::FromStr; use chrono::{DateTime, Utc}; pub use imbl_value::Value; -use patch_db::json_ptr::ROOT; use patch_db::value::InternedString; pub use patch_db::{HasModel, PatchDb}; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; -use crate::db::model::DatabaseModel; use crate::prelude::*; -pub type Peeked = Model; - pub fn to_value(value: &T) -> Result where T: Serialize, @@ -31,45 +25,7 @@ where patch_db::value::from_value(value).with_kind(ErrorKind::Deserialization) } -pub trait PatchDbExt { - fn peek(&self) -> impl Future + Send; - fn mutate( - &self, - f: impl FnOnce(&mut DatabaseModel) -> Result + UnwindSafe + Send, - ) -> impl Future> + Send; - fn map_mutate( - &self, - f: impl FnOnce(DatabaseModel) -> Result + UnwindSafe + Send, - ) -> impl Future> + Send; -} -impl PatchDbExt for PatchDb { - async fn peek(&self) -> DatabaseModel { - DatabaseModel::from(self.dump(&ROOT).await.value) - } - async fn mutate( - &self, - f: impl FnOnce(&mut DatabaseModel) -> Result + UnwindSafe + Send, - ) -> Result { - Ok(self - .apply_function(|mut v| { - let model = <&mut DatabaseModel>::from(&mut v); - let res = f(model)?; - Ok::<_, Error>((v, res)) - }) - .await? - .1) - } - async fn map_mutate( - &self, - f: impl FnOnce(DatabaseModel) -> Result + UnwindSafe + Send, - ) -> Result { - Ok(DatabaseModel::from( - self.apply_function(|v| f(DatabaseModel::from(v)).map(|a| (a.into(), ()))) - .await? - .0, - )) - } -} +pub type TypedPatchDb = patch_db::TypedPatchDb; /// &mut Model <=> &mut Value #[repr(transparent)] @@ -125,7 +81,7 @@ impl Model { Ok(res) } pub fn map_mutate(&mut self, f: impl FnOnce(T) -> Result) -> Result { - let mut orig = self.de()?; + let orig = self.de()?; let res = f(orig)?; self.ser(&res)?; Ok(res) @@ -262,10 +218,9 @@ where .into()), } } - pub fn upsert(&mut self, key: &T::Key, value: F) -> Result<&mut Model, Error> + pub fn upsert(&mut self, key: &T::Key, value: F) -> Result<&mut Model, Error> where - F: FnOnce() -> D, - D: AsRef, + F: FnOnce() -> Result, { use serde::ser::Error; match &mut self.value { @@ -278,7 +233,7 @@ where s.as_ref().index_or_insert(v) }); if !exists { - res.ser(value().as_ref())?; + res.ser(&value()?)?; } Ok(res) } @@ -375,6 +330,18 @@ where } } impl Model { + pub fn contains_key(&self, key: &T::Key) -> Result { + use serde::de::Error; + let s = T::key_str(key)?; + match &self.value { + Value::Object(o) => Ok(o.contains_key(s.as_ref())), + v => Err(patch_db::value::Error { + source: patch_db::value::ErrorSource::custom(format!("expected object found {v}")), + kind: patch_db::value::ErrorKind::Deserialization, + } + .into()), + } + } pub fn into_idx(self, key: &T::Key) -> Option> { use patch_db::ModelExt; let s = T::key_str(key).ok()?; diff --git a/core/startos/src/dependencies.rs b/core/startos/src/dependencies.rs index 69e4cad59..013648980 100644 --- a/core/startos/src/dependencies.rs +++ b/core/startos/src/dependencies.rs @@ -2,21 +2,25 @@ use std::collections::BTreeMap; use std::time::Duration; use clap::Parser; +use imbl_value::InternedString; use models::PackageId; -use rpc_toolkit::{command, from_fn_async, Empty, HandlerExt, ParentHandler}; +use patch_db::json_patch::merge; +use rpc_toolkit::{from_fn_async, Context, Empty, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; use tracing::instrument; use ts_rs::TS; use crate::config::{Config, ConfigSpec, ConfigureContext}; -use crate::context::RpcContext; +use crate::context::{CliContext, RpcContext}; use crate::db::model::package::CurrentDependencies; use crate::prelude::*; -use crate::status::DependencyConfigErrors; +use crate::rpc_continuations::Guid; +use crate::util::serde::HandlerExtSerde; +use crate::util::PathOrUrl; use crate::Error; -pub fn dependency() -> ParentHandler { - ParentHandler::new().subcommand("configure", configure()) +pub fn dependency() -> ParentHandler { + ParentHandler::new().subcommand("configure", configure::()) } #[derive(Clone, Debug, Default, Deserialize, Serialize, HasModel, TS)] @@ -41,6 +45,16 @@ impl Map for Dependencies { pub struct DepInfo { pub description: Option, pub optional: bool, + pub s9pk: Option, +} + +#[derive(Clone, Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct DependencyMetadata { + #[ts(type = "string")] + pub title: InternedString, } #[derive(Deserialize, Serialize, Parser, TS)] @@ -50,12 +64,21 @@ pub struct ConfigureParams { dependent_id: PackageId, dependency_id: PackageId, } -pub fn configure() -> ParentHandler { - ParentHandler::new().root_handler( - from_fn_async(configure_impl) - .with_inherited(|params, _| params) - .no_cli(), - ) +pub fn configure() -> ParentHandler { + ParentHandler::new() + .root_handler( + from_fn_async(configure_impl) + .with_inherited(|params, _| params) + .no_display() + .with_call_remote::(), + ) + .subcommand( + "dry", + from_fn_async(configure_dry) + .with_inherited(|params, _| params) + .with_display_serializable() + .with_call_remote::(), + ) } pub async fn configure_impl( @@ -86,11 +109,22 @@ pub async fn configure_impl( ErrorKind::Unknown, ) })? - .configure(configure_context) + .configure(Guid::new(), configure_context) .await?; Ok(()) } +pub async fn configure_dry( + ctx: RpcContext, + _: Empty, + ConfigureParams { + dependent_id, + dependency_id, + }: ConfigureParams, +) -> Result { + configure_logic(ctx.clone(), (dependent_id, dependency_id.clone())).await +} + #[derive(Clone, Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ConfigDryRes { @@ -103,94 +137,48 @@ pub async fn configure_logic( ctx: RpcContext, (dependent_id, dependency_id): (PackageId, PackageId), ) -> Result { - // let db = ctx.db.peek().await; - // let pkg = db - // .as_package_data() - // .as_idx(&pkg_id) - // .or_not_found(&pkg_id)? - // .as_installed() - // .or_not_found(&pkg_id)?; - // let pkg_version = pkg.as_manifest().as_version().de()?; - // let pkg_volumes = pkg.as_manifest().as_volumes().de()?; - // let dependency = db - // .as_package_data() - // .as_idx(&dependency_id) - // .or_not_found(&dependency_id)? - // .as_installed() - // .or_not_found(&dependency_id)?; - // let dependency_config_action = dependency - // .as_manifest() - // .as_config() - // .de()? - // .ok_or_else(|| not_found!("Manifest Config"))?; - // let dependency_version = dependency.as_manifest().as_version().de()?; - // let dependency_volumes = dependency.as_manifest().as_volumes().de()?; - // let dependency = pkg - // .as_manifest() - // .as_dependencies() - // .as_idx(&dependency_id) - // .or_not_found(&dependency_id)?; - - // let ConfigRes { - // config: maybe_config, - // spec, - // } = dependency_config_action - // .get( - // &ctx, - // &dependency_id, - // &dependency_version, - // &dependency_volumes, - // ) - // .await?; - - // let old_config = if let Some(config) = maybe_config { - // config - // } else { - // spec.gen( - // &mut rand::rngs::StdRng::from_entropy(), - // &Some(Duration::new(10, 0)), - // )? - // }; - - // let new_config = dependency - // .as_config() - // .de()? - // .ok_or_else(|| not_found!("Config"))? - // .auto_configure - // .sandboxed( - // &ctx, - // &pkg_id, - // &pkg_version, - // &pkg_volumes, - // Some(&old_config), - // None, - // ProcedureName::AutoConfig(dependency_id.clone()), - // ) - // .await? - // .map_err(|e| Error::new(eyre!("{}", e.1), crate::ErrorKind::AutoConfigure))?; - - // Ok(ConfigDryRes { - // old_config, - // new_config, - // spec, - // }) - todo!() + let procedure_id = Guid::new(); + let dependency_guard = ctx.services.get(&dependency_id).await; + let dependency = dependency_guard.as_ref().or_not_found(&dependency_id)?; + let dependent_guard = ctx.services.get(&dependent_id).await; + let dependent = dependent_guard.as_ref().or_not_found(&dependent_id)?; + let config_res = dependency.get_config(procedure_id.clone()).await?; + let diff = Value::Object( + dependent + .dependency_config(procedure_id, dependency_id, config_res.config.clone()) + .await? + .unwrap_or_default(), + ); + let mut new_config = Value::Object(config_res.config.clone().unwrap_or_default()); + merge(&mut new_config, &diff); + Ok(ConfigDryRes { + old_config: config_res.config.unwrap_or_default(), + new_config: new_config.as_object().cloned().unwrap_or_default(), + spec: config_res.spec, + }) } #[instrument(skip_all)] pub async fn compute_dependency_config_errs( ctx: &RpcContext, - db: &Peeked, id: &PackageId, - current_dependencies: &CurrentDependencies, - dependency_config: &BTreeMap, -) -> Result { - let mut dependency_config_errs = BTreeMap::new(); - for (dependency, _dep_info) in current_dependencies.0.iter() { + current_dependencies: &mut CurrentDependencies, +) -> Result<(), Error> { + let procedure_id = Guid::new(); + let service_guard = ctx.services.get(id).await; + let service = service_guard.as_ref().or_not_found(id)?; + for (dep_id, dep_info) in current_dependencies.0.iter_mut() { // check if config passes dependency check - if let Some(error) = todo!() { - dependency_config_errs.insert(dependency.clone(), error); - } + let Some(dependency) = &*ctx.services.get(dep_id).await else { + continue; + }; + + let dep_config = dependency.get_config(procedure_id.clone()).await?.config; + + dep_info.config_satisfied = service + .dependency_config(procedure_id.clone(), dep_id.clone(), dep_config) + .await? + .is_none(); } - Ok(DependencyConfigErrors(dependency_config_errs)) + Ok(()) } diff --git a/core/startos/src/developer/mod.rs b/core/startos/src/developer/mod.rs index 596957445..4a2a4c3df 100644 --- a/core/startos/src/developer/mod.rs +++ b/core/startos/src/developer/mod.rs @@ -8,7 +8,8 @@ use ed25519_dalek::{SigningKey, VerifyingKey}; use tracing::instrument; use crate::context::CliContext; -use crate::{Error, ResultExt}; +use crate::prelude::*; +use crate::util::serde::Pem; #[instrument(skip_all)] pub fn init(ctx: CliContext) -> Result<(), Error> { @@ -25,7 +26,8 @@ pub fn init(ctx: CliContext) -> Result<(), Error> { secret_key: secret.to_bytes(), public_key: Some(PublicKeyBytes(VerifyingKey::from(&secret).to_bytes())), }; - let mut dev_key_file = File::create(&ctx.developer_key_path)?; + let mut dev_key_file = File::create(&ctx.developer_key_path) + .with_ctx(|_| (ErrorKind::Filesystem, ctx.developer_key_path.display()))?; dev_key_file.write_all( keypair_bytes .to_pkcs8_pem(base64ct::LineEnding::default()) @@ -45,3 +47,7 @@ pub fn init(ctx: CliContext) -> Result<(), Error> { } Ok(()) } + +pub fn pubkey(ctx: CliContext) -> Result, Error> { + Ok(Pem(ctx.developer_key()?.verifying_key())) +} diff --git a/core/startos/src/diagnostic.rs b/core/startos/src/diagnostic.rs index 1cef6d7ef..5e99580e9 100644 --- a/core/startos/src/diagnostic.rs +++ b/core/startos/src/diagnostic.rs @@ -1,38 +1,44 @@ use std::path::Path; use std::sync::Arc; -use clap::Parser; use rpc_toolkit::yajrc::RpcError; -use rpc_toolkit::{command, from_fn, from_fn_async, AnyContext, HandlerExt, ParentHandler}; -use serde::{Deserialize, Serialize}; -use ts_rs::TS; +use rpc_toolkit::{ + from_fn, from_fn_async, CallRemoteHandler, Context, Empty, HandlerExt, ParentHandler, +}; -use crate::context::{CliContext, DiagnosticContext}; +use crate::context::{CliContext, DiagnosticContext, RpcContext}; use crate::init::SYSTEM_REBUILD_PATH; -use crate::logs::{fetch_logs, LogResponse, LogSource}; use crate::shutdown::Shutdown; use crate::Error; -pub fn diagnostic() -> ParentHandler { +pub fn diagnostic() -> ParentHandler { ParentHandler::new() - .subcommand("error", from_fn(error).with_remote_cli::()) - .subcommand("logs", from_fn_async(logs).no_cli()) + .subcommand("error", from_fn(error).with_call_remote::()) + .subcommand("logs", crate::system::logs::()) .subcommand( - "exit", - from_fn(exit).no_display().with_remote_cli::(), + "logs", + from_fn_async(crate::logs::cli_logs::).no_display(), + ) + .subcommand( + "kernel-logs", + crate::system::kernel_logs::(), + ) + .subcommand( + "kernel-logs", + from_fn_async(crate::logs::cli_logs::).no_display(), ) .subcommand( "restart", from_fn(restart) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) - .subcommand("disk", disk()) + .subcommand("disk", disk::()) .subcommand( "rebuild", from_fn_async(rebuild) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) } @@ -41,40 +47,15 @@ pub fn error(ctx: DiagnosticContext) -> Result, Error> { Ok(ctx.error.clone()) } -#[derive(Deserialize, Serialize, Parser, TS)] -#[serde(rename_all = "camelCase")] -#[command(rename_all = "kebab-case")] -pub struct LogsParams { - #[ts(type = "number | null")] - limit: Option, - cursor: Option, - before: bool, -} -pub async fn logs( - _: AnyContext, - LogsParams { - limit, - cursor, - before, - }: LogsParams, -) -> Result { - Ok(fetch_logs(LogSource::System, limit, cursor, before).await?) -} - -pub fn exit(ctx: DiagnosticContext) -> Result<(), Error> { - ctx.shutdown.send(None).expect("receiver dropped"); - Ok(()) -} - pub fn restart(ctx: DiagnosticContext) -> Result<(), Error> { ctx.shutdown - .send(Some(Shutdown { + .send(Shutdown { export_args: ctx .disk_guid .clone() .map(|guid| (guid, ctx.datadir.clone())), restart: true, - })) + }) .expect("receiver dropped"); Ok(()) } @@ -83,17 +64,20 @@ pub async fn rebuild(ctx: DiagnosticContext) -> Result<(), Error> { restart(ctx) } -pub fn disk() -> ParentHandler { - ParentHandler::new().subcommand( - "forget", - from_fn_async(forget_disk) - .no_display() - .with_remote_cli::(), - ) +pub fn disk() -> ParentHandler { + ParentHandler::new() + .subcommand("forget", from_fn_async(forget_disk::).no_cli()) + .subcommand( + "forget", + CallRemoteHandler::::new( + from_fn_async(forget_disk::).no_display(), + ) + .no_display(), + ) } -pub async fn forget_disk(_: AnyContext) -> Result<(), Error> { - let disk_guid = Path::new("/media/embassy/config/disk.guid"); +pub async fn forget_disk(_: C) -> Result<(), Error> { + let disk_guid = Path::new("/media/startos/config/disk.guid"); if tokio::fs::metadata(disk_guid).await.is_ok() { tokio::fs::remove_file(disk_guid).await?; } diff --git a/core/startos/src/disk/fsck/ext4.rs b/core/startos/src/disk/fsck/ext4.rs index 7bcbbc8b3..a068749fa 100644 --- a/core/startos/src/disk/fsck/ext4.rs +++ b/core/startos/src/disk/fsck/ext4.rs @@ -38,7 +38,7 @@ fn backup_existing_undo_file<'a>(path: &'a Path) -> BoxFuture<'a, Result<(), Err pub async fn e2fsck_aggressive( logicalname: impl AsRef + std::fmt::Debug, ) -> Result { - let undo_path = Path::new("/media/embassy/config") + let undo_path = Path::new("/media/startos/config") .join( logicalname .as_ref() diff --git a/core/startos/src/disk/main.rs b/core/startos/src/disk/main.rs index a337a4473..73aca4010 100644 --- a/core/startos/src/disk/main.rs +++ b/core/startos/src/disk/main.rs @@ -13,7 +13,7 @@ use crate::disk::mount::util::unmount; use crate::util::Invoke; use crate::{Error, ErrorKind, ResultExt}; -pub const PASSWORD_PATH: &'static str = "/run/embassy/password"; +pub const PASSWORD_PATH: &'static str = "/run/startos/password"; pub const DEFAULT_PASSWORD: &'static str = "password"; pub const MAIN_FS_SIZE: FsSize = FsSize::Gigabytes(8); @@ -64,10 +64,10 @@ where .await?; } let mut guid = format!( - "EMBASSY_{}", + "STARTOS_{}", base32::encode( - base32::Alphabet::RFC4648 { padding: false }, - &rand::random::<[u8; 32]>(), + base32::Alphabet::Rfc4648 { padding: false }, + &rand::random::<[u8; 20]>(), ) ); if !encrypted { @@ -168,7 +168,7 @@ pub async fn create_all_fs>( #[instrument(skip_all)] pub async fn unmount_fs>(guid: &str, datadir: P, name: &str) -> Result<(), Error> { - unmount(datadir.as_ref().join(name)).await?; + unmount(datadir.as_ref().join(name), false).await?; if !guid.ends_with("_UNENC") { Command::new("cryptsetup") .arg("-q") @@ -219,7 +219,7 @@ pub async fn import>( if scan .values() .filter_map(|a| a.as_ref()) - .filter(|a| a.starts_with("EMBASSY_")) + .filter(|a| a.starts_with("STARTOS_") || a.starts_with("EMBASSY_")) .next() .is_none() { @@ -302,7 +302,7 @@ pub async fn mount_fs>( if !guid.ends_with("_UNENC") { // Backup LUKS header if e2fsck succeeded - let luks_folder = Path::new("/media/embassy/config/luks"); + let luks_folder = Path::new("/media/startos/config/luks"); tokio::fs::create_dir_all(luks_folder).await?; let tmp_luks_bak = luks_folder.join(format!(".{full_name}.luks.bak.tmp")); if tokio::fs::metadata(&tmp_luks_bak).await.is_ok() { diff --git a/core/startos/src/disk/mod.rs b/core/startos/src/disk/mod.rs index f74c944a4..c0a701fc9 100644 --- a/core/startos/src/disk/mod.rs +++ b/core/startos/src/disk/mod.rs @@ -1,6 +1,8 @@ use std::path::{Path, PathBuf}; -use rpc_toolkit::{from_fn_async, AnyContext, Empty, HandlerExt, ParentHandler}; +use itertools::Itertools; +use lazy_format::lazy_format; +use rpc_toolkit::{from_fn_async, CallRemoteHandler, Context, Empty, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; use crate::context::{CliContext, RpcContext}; @@ -14,7 +16,7 @@ pub mod mount; pub mod util; pub const BOOT_RW_PATH: &str = "/media/boot-rw"; -pub const REPAIR_DISK_PATH: &str = "/media/embassy/config/repair-disk"; +pub const REPAIR_DISK_PATH: &str = "/media/startos/config/repair-disk"; #[derive(Clone, Debug, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] @@ -40,22 +42,23 @@ impl OsPartitionInfo { } } -pub fn disk() -> ParentHandler { +pub fn disk() -> ParentHandler { ParentHandler::new() .subcommand( "list", from_fn_async(list) .with_display_serializable() - .with_custom_display_fn::(|handle, result| { + .with_custom_display_fn(|handle, result| { Ok(display_disk_info(handle.params, result)) }) - .with_remote_cli::(), + .with_call_remote::(), ) + .subcommand("repair", from_fn_async(|_: C| repair()).no_cli()) .subcommand( "repair", - from_fn_async(repair) - .no_display() - .with_remote_cli::(), + CallRemoteHandler::::new( + from_fn_async(|_: RpcContext| repair()).no_display(), + ), ) } @@ -101,10 +104,18 @@ fn display_disk_info(params: WithIoFormat, args: Vec) { } else { "N/A" }, - if let Some(eos) = part.start_os.as_ref() { - eos.version.as_str() + &if part.start_os.is_empty() { + "N/A".to_owned() + } else if part.start_os.len() == 1 { + part.start_os + .first_key_value() + .map(|(_, info)| info.version.to_string()) + .unwrap() } else { - "N/A" + part.start_os + .iter() + .map(|(id, info)| lazy_format!("{} ({})", info.version, id)) + .join(", ") }, ]; table.add_row(row); diff --git a/core/startos/src/disk/mount/backup.rs b/core/startos/src/disk/mount/backup.rs index 5dbd80db3..8f45b0d4f 100644 --- a/core/startos/src/disk/mount/backup.rs +++ b/core/startos/src/disk/mount/backup.rs @@ -11,9 +11,10 @@ use super::filesystem::ecryptfs::EcryptFS; use super::guard::{GenericMountGuard, TmpMountGuard}; use crate::auth::check_password; use crate::backup::target::BackupInfo; +use crate::disk::mount::filesystem::backupfs::BackupFS; use crate::disk::mount::filesystem::ReadWrite; use crate::disk::mount::guard::SubPath; -use crate::disk::util::EmbassyOsRecoveryInfo; +use crate::disk::util::StartOsRecoveryInfo; use crate::util::crypto::{decrypt_slice, encrypt_slice}; use crate::util::serde::IoFormat; use crate::{Error, ErrorKind, ResultExt}; @@ -23,29 +24,27 @@ pub struct BackupMountGuard { backup_disk_mount_guard: Option, encrypted_guard: Option, enc_key: String, - pub unencrypted_metadata: EmbassyOsRecoveryInfo, + unencrypted_metadata_path: PathBuf, + pub unencrypted_metadata: StartOsRecoveryInfo, pub metadata: BackupInfo, } impl BackupMountGuard { - fn backup_disk_path(&self) -> &Path { - if let Some(guard) = &self.backup_disk_mount_guard { - guard.path() - } else { - unreachable!() - } - } - #[instrument(skip_all)] - pub async fn mount(backup_disk_mount_guard: G, password: &str) -> Result { + pub async fn mount( + backup_disk_mount_guard: G, + server_id: &str, + password: &str, + ) -> Result { let backup_disk_path = backup_disk_mount_guard.path(); - let unencrypted_metadata_path = - backup_disk_path.join("EmbassyBackups/unencrypted-metadata.cbor"); - let mut unencrypted_metadata: EmbassyOsRecoveryInfo = + let backup_dir = backup_disk_path.join("StartOSBackups").join(server_id); + let unencrypted_metadata_path = backup_dir.join("unencrypted-metadata.json"); + let crypt_path = backup_dir.join("crypt"); + let mut unencrypted_metadata: StartOsRecoveryInfo = if tokio::fs::metadata(&unencrypted_metadata_path) .await .is_ok() { - IoFormat::Cbor.from_slice( + IoFormat::Json.from_slice( &tokio::fs::read(&unencrypted_metadata_path) .await .with_ctx(|_| { @@ -56,6 +55,9 @@ impl BackupMountGuard { })?, )? } else { + if tokio::fs::metadata(&crypt_path).await.is_ok() { + tokio::fs::remove_dir_all(&crypt_path).await?; + } Default::default() }; let enc_key = if let (Some(hash), Some(wrapped_key)) = ( @@ -63,7 +65,7 @@ impl BackupMountGuard { unencrypted_metadata.wrapped_key.as_ref(), ) { let wrapped_key = - base32::decode(base32::Alphabet::RFC4648 { padding: true }, wrapped_key) + base32::decode(base32::Alphabet::Rfc4648 { padding: true }, wrapped_key) .ok_or_else(|| { Error::new( eyre!("failed to decode wrapped key"), @@ -74,7 +76,7 @@ impl BackupMountGuard { String::from_utf8(decrypt_slice(wrapped_key, password))? } else { base32::encode( - base32::Alphabet::RFC4648 { padding: false }, + base32::Alphabet::Rfc4648 { padding: false }, &rand::random::<[u8; 32]>()[..], ) }; @@ -91,12 +93,11 @@ impl BackupMountGuard { } if unencrypted_metadata.wrapped_key.is_none() { unencrypted_metadata.wrapped_key = Some(base32::encode( - base32::Alphabet::RFC4648 { padding: true }, + base32::Alphabet::Rfc4648 { padding: true }, &encrypt_slice(&enc_key, password), )); } - let crypt_path = backup_disk_path.join("EmbassyBackups/crypt"); if tokio::fs::metadata(&crypt_path).await.is_err() { tokio::fs::create_dir_all(&crypt_path).await.with_ctx(|_| { ( @@ -105,12 +106,15 @@ impl BackupMountGuard { ) })?; } - let encrypted_guard = - TmpMountGuard::mount(&EcryptFS::new(&crypt_path, &enc_key), ReadWrite).await?; + let encrypted_guard = TmpMountGuard::mount( + &BackupFS::new(&crypt_path, &enc_key, vec![(100000, 65536)]), + ReadWrite, + ) + .await?; - let metadata_path = encrypted_guard.path().join("metadata.cbor"); + let metadata_path = encrypted_guard.path().join("metadata.json"); let metadata: BackupInfo = if tokio::fs::metadata(&metadata_path).await.is_ok() { - IoFormat::Cbor.from_slice(&tokio::fs::read(&metadata_path).await.with_ctx(|_| { + IoFormat::Json.from_slice(&tokio::fs::read(&metadata_path).await.with_ctx(|_| { ( crate::ErrorKind::Filesystem, metadata_path.display().to_string(), @@ -124,6 +128,7 @@ impl BackupMountGuard { backup_disk_mount_guard: Some(backup_disk_mount_guard), encrypted_guard: Some(encrypted_guard), enc_key, + unencrypted_metadata_path, unencrypted_metadata, metadata, }) @@ -139,33 +144,45 @@ impl BackupMountGuard { .with_kind(crate::ErrorKind::PasswordHashGeneration)?, ); self.unencrypted_metadata.wrapped_key = Some(base32::encode( - base32::Alphabet::RFC4648 { padding: false }, + base32::Alphabet::Rfc4648 { padding: false }, &encrypt_slice(&self.enc_key, new_password), )); Ok(()) } #[instrument(skip_all)] - pub fn package_backup(self: &Arc, id: &PackageId) -> SubPath> { - SubPath::new(self.clone(), id) + pub async fn package_backup( + self: &Arc, + id: &PackageId, + ) -> Result>, Error> { + let package_guard = SubPath::new(self.clone(), id); + let package_path = package_guard.path(); + if tokio::fs::metadata(&package_path).await.is_err() { + tokio::fs::create_dir_all(&package_path) + .await + .with_ctx(|_| { + ( + crate::ErrorKind::Filesystem, + package_path.display().to_string(), + ) + })?; + } + Ok(package_guard) } #[instrument(skip_all)] pub async fn save(&self) -> Result<(), Error> { - let metadata_path = self.path().join("metadata.cbor"); - let backup_disk_path = self.backup_disk_path(); + let metadata_path = self.path().join("metadata.json"); let mut file = AtomicFile::new(&metadata_path, None::) .await .with_kind(ErrorKind::Filesystem)?; - file.write_all(&IoFormat::Cbor.to_vec(&self.metadata)?) + file.write_all(&IoFormat::Json.to_vec(&self.metadata)?) .await?; file.save().await.with_kind(ErrorKind::Filesystem)?; - let unencrypted_metadata_path = - backup_disk_path.join("EmbassyBackups/unencrypted-metadata.cbor"); - let mut file = AtomicFile::new(&unencrypted_metadata_path, None::) + let mut file = AtomicFile::new(&self.unencrypted_metadata_path, None::) .await .with_kind(ErrorKind::Filesystem)?; - file.write_all(&IoFormat::Cbor.to_vec(&self.unencrypted_metadata)?) + file.write_all(&IoFormat::Json.to_vec(&self.unencrypted_metadata)?) .await?; file.save().await.with_kind(ErrorKind::Filesystem)?; Ok(()) @@ -178,7 +195,6 @@ impl BackupMountGuard { Ok(()) } } -#[async_trait::async_trait] impl GenericMountGuard for BackupMountGuard { fn path(&self) -> &Path { if let Some(guard) = &self.encrypted_guard { diff --git a/core/startos/src/disk/mount/filesystem/backupfs.rs b/core/startos/src/disk/mount/filesystem/backupfs.rs new file mode 100644 index 000000000..254abde20 --- /dev/null +++ b/core/startos/src/disk/mount/filesystem/backupfs.rs @@ -0,0 +1,68 @@ +use std::borrow::Cow; +use std::fmt::{self, Display}; +use std::os::unix::ffi::OsStrExt; +use std::path::Path; + +use digest::generic_array::GenericArray; +use digest::{Digest, OutputSizeUser}; +use sha2::Sha256; + +use super::FileSystem; +use crate::prelude::*; + +pub struct BackupFS, Password: fmt::Display> { + data_dir: DataDir, + password: Password, + idmapped_root: Vec<(u32, u32)>, +} +impl, Password: fmt::Display> BackupFS { + pub fn new(data_dir: DataDir, password: Password, idmapped_root: Vec<(u32, u32)>) -> Self { + BackupFS { + data_dir, + password, + idmapped_root, + } + } +} +impl + Send + Sync, Password: fmt::Display + Send + Sync> FileSystem + for BackupFS +{ + fn mount_type(&self) -> Option> { + Some("backup-fs") + } + fn mount_options(&self) -> impl IntoIterator { + [ + Cow::Owned(format!("password={}", self.password)), + Cow::Borrowed("file-size-padding=0.05"), + Cow::Borrowed("allow_other"), + ] + .into_iter() + .chain( + self.idmapped_root + .iter() + .map(|(root, range)| Cow::Owned(format!("idmapped-root={root}:{range}"))), + ) + } + async fn source(&self) -> Result>, Error> { + Ok(Some(&self.data_dir)) + } + async fn source_hash( + &self, + ) -> Result::OutputSize>, Error> { + let mut sha = Sha256::new(); + sha.update("BackupFS"); + sha.update( + tokio::fs::canonicalize(self.data_dir.as_ref()) + .await + .with_ctx(|_| { + ( + crate::ErrorKind::Filesystem, + self.data_dir.as_ref().display().to_string(), + ) + })? + .as_os_str() + .as_bytes(), + ); + Ok(sha.finalize()) + } +} diff --git a/core/startos/src/disk/mount/filesystem/mod.rs b/core/startos/src/disk/mount/filesystem/mod.rs index 53157937c..818549a0a 100644 --- a/core/startos/src/disk/mount/filesystem/mod.rs +++ b/core/startos/src/disk/mount/filesystem/mod.rs @@ -1,6 +1,7 @@ use std::ffi::OsStr; use std::fmt::{Display, Write}; use std::path::Path; +use std::time::Duration; use digest::generic_array::GenericArray; use digest::OutputSizeUser; @@ -11,6 +12,7 @@ use tokio::process::Command; use crate::prelude::*; use crate::util::Invoke; +pub mod backupfs; pub mod bind; pub mod block_dev; pub mod cifs; @@ -71,6 +73,7 @@ pub(self) async fn default_mount_impl( fs.pre_mount().await?; tokio::fs::create_dir_all(mountpoint.as_ref()).await?; Command::from(default_mount_command(fs, mountpoint, mount_type).await?) + .capture(false) .invoke(ErrorKind::Filesystem) .await?; diff --git a/core/startos/src/disk/mount/filesystem/overlayfs.rs b/core/startos/src/disk/mount/filesystem/overlayfs.rs index ad5eec501..5e40a21a1 100644 --- a/core/startos/src/disk/mount/filesystem/overlayfs.rs +++ b/core/startos/src/disk/mount/filesystem/overlayfs.rs @@ -6,22 +6,26 @@ use digest::generic_array::GenericArray; use digest::{Digest, OutputSizeUser}; use sha2::Sha256; -use crate::disk::mount::filesystem::{FileSystem, ReadOnly, ReadWrite}; -use crate::disk::mount::guard::{GenericMountGuard, MountGuard, TmpMountGuard}; +use crate::disk::mount::filesystem::{FileSystem, ReadWrite}; +use crate::disk::mount::guard::{GenericMountGuard, MountGuard}; use crate::prelude::*; use crate::util::io::TmpDir; -struct OverlayFs, P1: AsRef> { +pub struct OverlayFs, P1: AsRef, P2: AsRef> { lower: P0, upper: P1, + work: P2, } -impl, P1: AsRef> OverlayFs { - pub fn new(lower: P0, upper: P1) -> Self { - Self { lower, upper } +impl, P1: AsRef, P2: AsRef> OverlayFs { + pub fn new(lower: P0, upper: P1, work: P2) -> Self { + Self { lower, upper, work } } } -impl + Send + Sync, P1: AsRef + Send + Sync> FileSystem - for OverlayFs +impl< + P0: AsRef + Send + Sync, + P1: AsRef + Send + Sync, + P2: AsRef + Send + Sync, + > FileSystem for OverlayFs { fn mount_type(&self) -> Option> { Some("overlay") @@ -33,24 +37,20 @@ impl + Send + Sync, P1: AsRef + Send + Sync> FileSystem [ Box::new(lazy_format!("lowerdir={}", self.lower.as_ref().display())) as Box, - Box::new(lazy_format!( - "upperdir={}/upper", - self.upper.as_ref().display() - )), - Box::new(lazy_format!( - "workdir={}/work", - self.upper.as_ref().display() - )), + Box::new(lazy_format!("upperdir={}", self.upper.as_ref().display())), + Box::new(lazy_format!("workdir={}", self.work.as_ref().display())), ] } async fn pre_mount(&self) -> Result<(), Error> { - tokio::fs::create_dir_all(self.upper.as_ref().join("upper")).await?; - tokio::fs::create_dir_all(self.upper.as_ref().join("work")).await?; + tokio::fs::create_dir_all(self.upper.as_ref()).await?; + tokio::fs::create_dir_all(self.work.as_ref()).await?; Ok(()) } async fn source_hash( &self, ) -> Result::OutputSize>, Error> { + tokio::fs::create_dir_all(self.upper.as_ref()).await?; + tokio::fs::create_dir_all(self.work.as_ref()).await?; let mut sha = Sha256::new(); sha.update("OverlayFs"); sha.update( @@ -77,25 +77,37 @@ impl + Send + Sync, P1: AsRef + Send + Sync> FileSystem .as_os_str() .as_bytes(), ); + sha.update( + tokio::fs::canonicalize(self.work.as_ref()) + .await + .with_ctx(|_| { + ( + crate::ErrorKind::Filesystem, + self.upper.as_ref().display().to_string(), + ) + })? + .as_os_str() + .as_bytes(), + ); Ok(sha.finalize()) } } #[derive(Debug)] -pub struct OverlayGuard { - lower: Option, +pub struct OverlayGuard { + lower: Option, upper: Option, inner_guard: MountGuard, } -impl OverlayGuard { - pub async fn mount( - base: &impl FileSystem, - mountpoint: impl AsRef, - ) -> Result { - let lower = TmpMountGuard::mount(base, ReadOnly).await?; +impl OverlayGuard { + pub async fn mount(lower: G, mountpoint: impl AsRef) -> Result { let upper = TmpDir::new().await?; let inner_guard = MountGuard::mount( - &OverlayFs::new(lower.path(), upper.as_ref()), + &OverlayFs::new( + lower.path(), + upper.as_ref().join("upper"), + upper.as_ref().join("work"), + ), mountpoint, ReadWrite, ) @@ -124,16 +136,15 @@ impl OverlayGuard { } } } -#[async_trait::async_trait] -impl GenericMountGuard for OverlayGuard { +impl GenericMountGuard for OverlayGuard { fn path(&self) -> &Path { self.inner_guard.path() } - async fn unmount(mut self) -> Result<(), Error> { + async fn unmount(self) -> Result<(), Error> { self.unmount(false).await } } -impl Drop for OverlayGuard { +impl Drop for OverlayGuard { fn drop(&mut self) { let lower = self.lower.take(); let upper = self.upper.take(); diff --git a/core/startos/src/disk/mount/guard.rs b/core/startos/src/disk/mount/guard.rs index af46904fd..a2d577226 100644 --- a/core/startos/src/disk/mount/guard.rs +++ b/core/startos/src/disk/mount/guard.rs @@ -2,6 +2,7 @@ use std::collections::BTreeMap; use std::path::{Path, PathBuf}; use std::sync::{Arc, Weak}; +use futures::Future; use lazy_static::lazy_static; use models::ResultExt; use tokio::sync::Mutex; @@ -12,25 +13,22 @@ use super::util::unmount; use crate::util::{Invoke, Never}; use crate::Error; -pub const TMP_MOUNTPOINT: &'static str = "/media/embassy/tmp"; +pub const TMP_MOUNTPOINT: &'static str = "/media/startos/tmp"; -#[async_trait::async_trait] pub trait GenericMountGuard: std::fmt::Debug + Send + Sync + 'static { fn path(&self) -> &Path; - async fn unmount(mut self) -> Result<(), Error>; + fn unmount(self) -> impl Future> + Send; } -#[async_trait::async_trait] impl GenericMountGuard for Never { fn path(&self) -> &Path { match *self {} } - async fn unmount(mut self) -> Result<(), Error> { + async fn unmount(self) -> Result<(), Error> { match self {} } } -#[async_trait::async_trait] impl GenericMountGuard for Arc where T: GenericMountGuard, @@ -38,7 +36,7 @@ where fn path(&self) -> &Path { (&**self).path() } - async fn unmount(mut self) -> Result<(), Error> { + async fn unmount(self) -> Result<(), Error> { if let Ok(guard) = Arc::try_unwrap(self) { guard.unmount().await?; } @@ -76,7 +74,7 @@ impl MountGuard { } pub async fn unmount(mut self, delete_mountpoint: bool) -> Result<(), Error> { if self.mounted { - unmount(&self.mountpoint).await?; + unmount(&self.mountpoint, false).await?; if delete_mountpoint { match tokio::fs::remove_dir(&self.mountpoint).await { Err(e) if e.raw_os_error() == Some(39) => Ok(()), // directory not empty @@ -98,24 +96,23 @@ impl Drop for MountGuard { fn drop(&mut self) { if self.mounted { let mountpoint = std::mem::take(&mut self.mountpoint); - tokio::spawn(async move { unmount(mountpoint).await.unwrap() }); + tokio::spawn(async move { unmount(mountpoint, true).await.unwrap() }); } } } -#[async_trait::async_trait] impl GenericMountGuard for MountGuard { fn path(&self) -> &Path { &self.mountpoint } - async fn unmount(mut self) -> Result<(), Error> { + async fn unmount(self) -> Result<(), Error> { MountGuard::unmount(self, false).await } } async fn tmp_mountpoint(source: &impl FileSystem) -> Result { Ok(Path::new(TMP_MOUNTPOINT).join(base32::encode( - base32::Alphabet::RFC4648 { padding: false }, - &source.source_hash().await?, + base32::Alphabet::Rfc4648 { padding: false }, + &source.source_hash().await?[0..20], ))) } @@ -165,12 +162,11 @@ impl TmpMountGuard { std::mem::replace(self, unmounted) } } -#[async_trait::async_trait] impl GenericMountGuard for TmpMountGuard { fn path(&self) -> &Path { self.guard.path() } - async fn unmount(mut self) -> Result<(), Error> { + async fn unmount(self) -> Result<(), Error> { self.guard.unmount().await } } @@ -187,12 +183,11 @@ impl SubPath { Self { guard, path } } } -#[async_trait::async_trait] impl GenericMountGuard for SubPath { fn path(&self) -> &Path { self.path.as_path() } - async fn unmount(mut self) -> Result<(), Error> { + async fn unmount(self) -> Result<(), Error> { self.guard.unmount().await } } diff --git a/core/startos/src/disk/mount/util.rs b/core/startos/src/disk/mount/util.rs index e93ceb7dd..674f33304 100644 --- a/core/startos/src/disk/mount/util.rs +++ b/core/startos/src/disk/mount/util.rs @@ -23,7 +23,7 @@ pub async fn bind, P1: AsRef>( .status() .await?; if is_mountpoint.success() { - unmount(dst.as_ref()).await?; + unmount(dst.as_ref(), true).await?; } tokio::fs::create_dir_all(&src).await?; tokio::fs::create_dir_all(&dst).await?; @@ -41,11 +41,14 @@ pub async fn bind, P1: AsRef>( } #[instrument(skip_all)] -pub async fn unmount>(mountpoint: P) -> Result<(), Error> { +pub async fn unmount>(mountpoint: P, lazy: bool) -> Result<(), Error> { tracing::debug!("Unmounting {}.", mountpoint.as_ref().display()); - tokio::process::Command::new("umount") - .arg("-Rl") - .arg(mountpoint.as_ref()) + let mut cmd = tokio::process::Command::new("umount"); + cmd.arg("-R"); + if lazy { + cmd.arg("-l"); + } + cmd.arg(mountpoint.as_ref()) .invoke(crate::ErrorKind::Filesystem) .await?; Ok(()) diff --git a/core/startos/src/disk/util.rs b/core/startos/src/disk/util.rs index b0bc00a5d..c64ea40ae 100644 --- a/core/startos/src/disk/util.rs +++ b/core/startos/src/disk/util.rs @@ -1,6 +1,7 @@ use std::collections::{BTreeMap, BTreeSet}; use std::path::{Path, PathBuf}; +use chrono::{DateTime, Utc}; use color_eyre::eyre::{self, eyre}; use futures::TryStreamExt; use nom::bytes::complete::{tag, take_till1}; @@ -19,8 +20,9 @@ use super::mount::filesystem::ReadOnly; use super::mount::guard::TmpMountGuard; use crate::disk::mount::guard::GenericMountGuard; use crate::disk::OsPartitionInfo; +use crate::hostname::Hostname; use crate::util::serde::IoFormat; -use crate::util::{Invoke, Version}; +use crate::util::Invoke; use crate::{Error, ResultExt as _}; #[derive(Clone, Copy, Debug, Deserialize, Serialize)] @@ -49,15 +51,16 @@ pub struct PartitionInfo { pub label: Option, pub capacity: u64, pub used: Option, - pub start_os: Option, + pub start_os: BTreeMap, pub guid: Option, } #[derive(Clone, Debug, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] -pub struct EmbassyOsRecoveryInfo { - pub version: Version, - pub full: bool, +pub struct StartOsRecoveryInfo { + pub hostname: Hostname, + pub version: exver::Version, + pub timestamp: DateTime, pub password_hash: Option, pub wrapped_key: Option, } @@ -223,29 +226,38 @@ pub async fn pvscan() -> Result>, Error> { pub async fn recovery_info( mountpoint: impl AsRef, -) -> Result, Error> { - let backup_unencrypted_metadata_path = mountpoint - .as_ref() - .join("EmbassyBackups/unencrypted-metadata.cbor"); - if tokio::fs::metadata(&backup_unencrypted_metadata_path) - .await - .is_ok() - { - return Ok(Some( - IoFormat::Cbor.from_slice( - &tokio::fs::read(&backup_unencrypted_metadata_path) - .await - .with_ctx(|_| { - ( - crate::ErrorKind::Filesystem, - backup_unencrypted_metadata_path.display().to_string(), - ) - })?, - )?, - )); +) -> Result, Error> { + let backup_root = mountpoint.as_ref().join("StartOSBackups"); + let mut res = BTreeMap::new(); + if tokio::fs::metadata(&backup_root).await.is_ok() { + let mut dir = tokio::fs::read_dir(&backup_root).await?; + while let Some(entry) = dir.next_entry().await? { + let server_id = entry.file_name().to_string_lossy().into_owned(); + let backup_unencrypted_metadata_path = backup_root + .join(&server_id) + .join("unencrypted-metadata.json"); + if tokio::fs::metadata(&backup_unencrypted_metadata_path) + .await + .is_ok() + { + res.insert( + server_id, + IoFormat::Json.from_slice( + &tokio::fs::read(&backup_unencrypted_metadata_path) + .await + .with_ctx(|_| { + ( + crate::ErrorKind::Filesystem, + backup_unencrypted_metadata_path.display().to_string(), + ) + })?, + )?, + ); + } + } } - Ok(None) + Ok(res) } #[instrument(skip_all)] @@ -390,7 +402,7 @@ async fn disk_info(disk: PathBuf) -> DiskInfo { } async fn part_info(part: PathBuf) -> PartitionInfo { - let mut start_os = None; + let mut start_os = BTreeMap::new(); let label = get_label(&part) .await .map_err(|e| tracing::warn!("Could not get label of {}: {}", part.display(), e.source)) @@ -410,14 +422,13 @@ async fn part_info(part: PathBuf) -> PartitionInfo { tracing::warn!("Could not get usage of {}: {}", part.display(), e.source) }) .ok(); - if let Some(recovery_info) = match recovery_info(mount_guard.path()).await { - Ok(a) => a, + match recovery_info(mount_guard.path()).await { + Ok(a) => { + start_os = a; + } Err(e) => { tracing::error!("Error fetching unencrypted backup metadata: {}", e); - None } - } { - start_os = Some(recovery_info) } if let Err(e) = mount_guard.unmount().await { tracing::error!("Error unmounting partition {}: {}", part.display(), e); diff --git a/core/startos/src/error.rs b/core/startos/src/error.rs index 9f0493f10..a0ca5707c 100644 --- a/core/startos/src/error.rs +++ b/core/startos/src/error.rs @@ -58,7 +58,7 @@ impl std::error::Error for ErrorCollection {} macro_rules! ensure_code { ($x:expr, $c:expr, $fmt:expr $(, $arg:expr)*) => { if !($x) { - return Err(crate::error::Error::new(color_eyre::eyre::eyre!($fmt, $($arg, )*), $c)); + Err::<(), _>(crate::error::Error::new(color_eyre::eyre::eyre!($fmt, $($arg, )*), $c))?; } }; } diff --git a/core/startos/src/firmware.rs b/core/startos/src/firmware.rs index 20347bcff..a70cf9e47 100644 --- a/core/startos/src/firmware.rs +++ b/core/startos/src/firmware.rs @@ -3,18 +3,18 @@ use std::path::Path; use async_compression::tokio::bufread::GzipDecoder; use serde::{Deserialize, Serialize}; -use tokio::fs::File; use tokio::io::BufReader; use tokio::process::Command; use crate::disk::fsck::RequiresReboot; use crate::prelude::*; +use crate::util::io::open_file; use crate::util::Invoke; use crate::PLATFORM; /// Part of the Firmware, look there for more about -#[derive(Clone, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case")] pub struct VersionMatcher { /// Strip this prefix on the version matcher semver_prefix: Option, @@ -27,8 +27,8 @@ pub struct VersionMatcher { /// Inside a file that is firmware.json, we /// wanted a structure that could help decide what to do /// for each of the firmware versions -#[derive(Clone, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case")] pub struct Firmware { id: String, /// This is the platform(s) the firmware was built for @@ -49,12 +49,8 @@ pub fn display_firmware_update_result(result: RequiresReboot) { } } -/// We wanted to make sure during every init -/// that the firmware was the correct and updated for -/// systems like the Pure System that a new firmware -/// was released and the updates where pushed through the pure os. -// #[command(rename = "update-firmware", display(display_firmware_update_result))] -pub async fn update_firmware() -> Result { +#[instrument] +pub async fn check_for_firmware_update() -> Result, Error> { let system_product_name = String::from_utf8( Command::new("dmidecode") .arg("-s") @@ -74,22 +70,21 @@ pub async fn update_firmware() -> Result { .trim() .to_owned(); if system_product_name.is_empty() || bios_version.is_empty() { - return Ok(RequiresReboot(false)); + return Ok(None); } - let firmware_dir = Path::new("/usr/lib/startos/firmware"); - for firmware in serde_json::from_str::>( &tokio::fs::read_to_string("/usr/lib/startos/firmware.json").await?, ) .with_kind(ErrorKind::Deserialization)? { - let id = firmware.id; let matches_product_name = firmware .system_product_name - .map_or(true, |spn| spn == system_product_name); + .as_ref() + .map_or(true, |spn| spn == &system_product_name); let matches_bios_version = firmware .bios_version + .as_ref() .map_or(Some(true), |bv| { let mut semver_str = bios_version.as_str(); if let Some(prefix) = &bv.semver_prefix { @@ -113,35 +108,46 @@ pub async fn update_firmware() -> Result { }) .unwrap_or(false); if firmware.platform.contains(&*PLATFORM) && matches_product_name && matches_bios_version { - let filename = format!("{id}.rom.gz"); - let firmware_path = firmware_dir.join(&filename); - Command::new("sha256sum") - .arg("-c") - .input(Some(&mut std::io::Cursor::new(format!( - "{} {}", - firmware.shasum, - firmware_path.display() - )))) - .invoke(ErrorKind::Filesystem) - .await?; - let mut rdr = if tokio::fs::metadata(&firmware_path).await.is_ok() { - GzipDecoder::new(BufReader::new(File::open(&firmware_path).await?)) - } else { - return Err(Error::new( - eyre!("Firmware {id}.rom.gz not found in {firmware_dir:?}"), - ErrorKind::NotFound, - )); - }; - Command::new("flashrom") - .arg("-p") - .arg("internal") - .arg("-w-") - .input(Some(&mut rdr)) - .invoke(ErrorKind::Firmware) - .await?; - return Ok(RequiresReboot(true)); + return Ok(Some(firmware)); } } - Ok(RequiresReboot(false)) + Ok(None) +} + +/// We wanted to make sure during every init +/// that the firmware was the correct and updated for +/// systems like the Pure System that a new firmware +/// was released and the updates where pushed through the pure os. +#[instrument] +pub async fn update_firmware(firmware: Firmware) -> Result<(), Error> { + let id = &firmware.id; + let firmware_dir = Path::new("/usr/lib/startos/firmware"); + let filename = format!("{id}.rom.gz"); + let firmware_path = firmware_dir.join(&filename); + Command::new("sha256sum") + .arg("-c") + .input(Some(&mut std::io::Cursor::new(format!( + "{} {}", + firmware.shasum, + firmware_path.display() + )))) + .invoke(ErrorKind::Filesystem) + .await?; + let mut rdr = if tokio::fs::metadata(&firmware_path).await.is_ok() { + GzipDecoder::new(BufReader::new(open_file(&firmware_path).await?)) + } else { + return Err(Error::new( + eyre!("Firmware {id}.rom.gz not found in {firmware_dir:?}"), + ErrorKind::NotFound, + )); + }; + Command::new("flashrom") + .arg("-p") + .arg("internal") + .arg("-w-") + .input(Some(&mut rdr)) + .invoke(ErrorKind::Firmware) + .await?; + Ok(()) } diff --git a/core/startos/src/hostname.rs b/core/startos/src/hostname.rs index f68d5c9d8..36bb5d8a4 100644 --- a/core/startos/src/hostname.rs +++ b/core/startos/src/hostname.rs @@ -1,11 +1,13 @@ +use imbl_value::InternedString; +use lazy_format::lazy_format; use rand::{thread_rng, Rng}; use tokio::process::Command; use tracing::instrument; use crate::util::Invoke; use crate::{Error, ErrorKind}; -#[derive(Clone, serde::Deserialize, serde::Serialize, Debug)] -pub struct Hostname(pub String); +#[derive(Clone, Debug, Default, serde::Deserialize, serde::Serialize)] +pub struct Hostname(pub InternedString); lazy_static::lazy_static! { static ref ADJECTIVES: Vec = include_str!("./assets/adjectives.txt").lines().map(|x| x.to_string()).collect(); @@ -18,15 +20,16 @@ impl AsRef for Hostname { } impl Hostname { - pub fn lan_address(&self) -> String { - format!("https://{}.local", self.0) + pub fn lan_address(&self) -> InternedString { + InternedString::from_display(&lazy_format!("https://{}.local", self.0)) } - pub fn local_domain_name(&self) -> String { - format!("{}.local", self.0) + pub fn local_domain_name(&self) -> InternedString { + InternedString::from_display(&lazy_format!("{}.local", self.0)) } - pub fn no_dot_host_name(&self) -> String { - self.0.to_owned() + + pub fn no_dot_host_name(&self) -> InternedString { + self.0.clone() } } @@ -34,7 +37,9 @@ pub fn generate_hostname() -> Hostname { let mut rng = thread_rng(); let adjective = &ADJECTIVES[rng.gen_range(0..ADJECTIVES.len())]; let noun = &NOUNS[rng.gen_range(0..NOUNS.len())]; - Hostname(format!("{adjective}-{noun}")) + Hostname(InternedString::from_display(&lazy_format!( + "{adjective}-{noun}" + ))) } pub fn generate_id() -> String { @@ -48,12 +53,12 @@ pub async fn get_current_hostname() -> Result { .invoke(ErrorKind::ParseSysInfo) .await?; let out_string = String::from_utf8(out)?; - Ok(Hostname(out_string.trim().to_owned())) + Ok(Hostname(out_string.trim().into())) } #[instrument(skip_all)] pub async fn set_hostname(hostname: &Hostname) -> Result<(), Error> { - let hostname: &String = &hostname.0; + let hostname = &*hostname.0; Command::new("hostnamectl") .arg("--static") .arg("set-hostname") diff --git a/core/startos/src/init.rs b/core/startos/src/init.rs index d1d5a9943..e6b7be598 100644 --- a/core/startos/src/init.rs +++ b/core/startos/src/init.rs @@ -1,27 +1,44 @@ use std::fs::Permissions; +use std::io::Cursor; +use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4}; use std::os::unix::fs::PermissionsExt; use std::path::Path; use std::time::{Duration, SystemTime}; +use axum::extract::ws::{self}; use color_eyre::eyre::eyre; +use futures::{StreamExt, TryStreamExt}; +use itertools::Itertools; use models::ResultExt; use rand::random; +use rpc_toolkit::{from_fn_async, Context, Empty, HandlerArgs, HandlerExt, ParentHandler}; +use serde::{Deserialize, Serialize}; use tokio::process::Command; use tracing::instrument; +use ts_rs::TS; use crate::account::AccountInfo; use crate::context::config::ServerConfig; +use crate::context::{CliContext, InitContext}; use crate::db::model::public::ServerStatus; +use crate::db::model::Database; use crate::disk::mount::util::unmount; use crate::middleware::auth::LOCAL_AUTH_COOKIE_PATH; +use crate::net::net_controller::PreInitNetController; use crate::prelude::*; +use crate::progress::{ + FullProgress, FullProgressTracker, PhaseProgressTrackerHandle, PhasedProgressBar, +}; +use crate::rpc_continuations::{Guid, RpcContinuation}; +use crate::s9pk::v2::pack::{CONTAINER_DATADIR, CONTAINER_TOOL}; use crate::ssh::SSH_AUTHORIZED_KEYS_FILE; -use crate::util::cpupower::{get_available_governors, get_preferred_governor, set_governor}; -use crate::util::Invoke; -use crate::{Error, ARCH}; +use crate::util::io::{create_file, IOHook}; +use crate::util::net::WebSocketExt; +use crate::util::{cpupower, Invoke}; +use crate::Error; -pub const SYSTEM_REBUILD_PATH: &str = "/media/embassy/config/system-rebuild"; -pub const STANDBY_MODE_PATH: &str = "/media/embassy/config/standby"; +pub const SYSTEM_REBUILD_PATH: &str = "/media/startos/config/system-rebuild"; +pub const STANDBY_MODE_PATH: &str = "/media/startos/config/standby"; pub async fn check_time_is_synchronized() -> Result { Ok(String::from_utf8( @@ -48,7 +65,7 @@ pub async fn init_postgres(datadir: impl AsRef) -> Result<(), Error> { .await? .success() { - unmount("/var/lib/postgresql").await?; + unmount("/var/lib/postgresql", true).await?; } let exists = tokio::fs::metadata(&db_dir).await.is_ok(); if !exists { @@ -122,10 +139,7 @@ pub async fn init_postgres(datadir: impl AsRef) -> Result<(), Error> { old_version -= 1; let old_datadir = db_dir.join(old_version.to_string()); if tokio::fs::metadata(&old_datadir).await.is_ok() { - tokio::fs::File::create(&incomplete_path) - .await? - .sync_all() - .await?; + create_file(&incomplete_path).await?.sync_all().await?; Command::new("pg_upgradecluster") .arg(old_version.to_string()) .arg("main") @@ -179,14 +193,114 @@ pub async fn init_postgres(datadir: impl AsRef) -> Result<(), Error> { } pub struct InitResult { - pub db: patch_db::PatchDb, + pub net_ctrl: PreInitNetController, +} + +pub struct InitPhases { + preinit: Option, + local_auth: PhaseProgressTrackerHandle, + load_database: PhaseProgressTrackerHandle, + load_ssh_keys: PhaseProgressTrackerHandle, + start_net: PhaseProgressTrackerHandle, + mount_logs: PhaseProgressTrackerHandle, + load_ca_cert: PhaseProgressTrackerHandle, + load_wifi: PhaseProgressTrackerHandle, + init_tmp: PhaseProgressTrackerHandle, + set_governor: PhaseProgressTrackerHandle, + sync_clock: PhaseProgressTrackerHandle, + enable_zram: PhaseProgressTrackerHandle, + update_server_info: PhaseProgressTrackerHandle, + launch_service_network: PhaseProgressTrackerHandle, + run_migrations: PhaseProgressTrackerHandle, + validate_db: PhaseProgressTrackerHandle, + postinit: Option, +} +impl InitPhases { + pub fn new(handle: &FullProgressTracker) -> Self { + Self { + preinit: if Path::new("/media/startos/config/preinit.sh").exists() { + Some(handle.add_phase("Running preinit.sh".into(), Some(5))) + } else { + None + }, + local_auth: handle.add_phase("Enabling local authentication".into(), Some(1)), + load_database: handle.add_phase("Loading database".into(), Some(5)), + load_ssh_keys: handle.add_phase("Loading SSH Keys".into(), Some(1)), + start_net: handle.add_phase("Starting network controller".into(), Some(1)), + mount_logs: handle.add_phase("Switching logs to write to data drive".into(), Some(1)), + load_ca_cert: handle.add_phase("Loading CA certificate".into(), Some(1)), + load_wifi: handle.add_phase("Loading WiFi configuration".into(), Some(1)), + init_tmp: handle.add_phase("Initializing temporary files".into(), Some(1)), + set_governor: handle.add_phase("Setting CPU performance profile".into(), Some(1)), + sync_clock: handle.add_phase("Synchronizing system clock".into(), Some(10)), + enable_zram: handle.add_phase("Enabling ZRAM".into(), Some(1)), + update_server_info: handle.add_phase("Updating server info".into(), Some(1)), + launch_service_network: handle.add_phase("Launching service intranet".into(), Some(1)), + run_migrations: handle.add_phase("Running migrations".into(), Some(10)), + validate_db: handle.add_phase("Validating database".into(), Some(1)), + postinit: if Path::new("/media/startos/config/postinit.sh").exists() { + Some(handle.add_phase("Running postinit.sh".into(), Some(5))) + } else { + None + }, + } + } +} + +pub async fn run_script>(path: P, mut progress: PhaseProgressTrackerHandle) { + let script = path.as_ref(); + progress.start(); + if let Err(e) = async { + let script = tokio::fs::read_to_string(script).await?; + progress.set_total(script.as_bytes().iter().filter(|b| **b == b'\n').count() as u64); + let mut reader = IOHook::new(Cursor::new(script.as_bytes())); + reader.post_read(|buf| progress += buf.iter().filter(|b| **b == b'\n').count() as u64); + Command::new("/bin/bash") + .input(Some(&mut reader)) + .invoke(ErrorKind::Unknown) + .await?; + + Ok::<_, Error>(()) + } + .await + { + tracing::error!("Error Running {}: {}", script.display(), e); + tracing::debug!("{:?}", e); + } + progress.complete(); } #[instrument(skip_all)] -pub async fn init(cfg: &ServerConfig) -> Result { - tokio::fs::create_dir_all("/run/embassy") +pub async fn init( + cfg: &ServerConfig, + InitPhases { + preinit, + mut local_auth, + mut load_database, + mut load_ssh_keys, + mut start_net, + mut mount_logs, + mut load_ca_cert, + mut load_wifi, + mut init_tmp, + mut set_governor, + mut sync_clock, + mut enable_zram, + mut update_server_info, + mut launch_service_network, + run_migrations, + mut validate_db, + postinit, + }: InitPhases, +) -> Result { + if let Some(progress) = preinit { + run_script("/media/startos/config/preinit.sh", progress).await; + } + + local_auth.start(); + tokio::fs::create_dir_all("/run/startos") .await - .with_ctx(|_| (crate::ErrorKind::Filesystem, "mkdir -p /run/embassy"))?; + .with_ctx(|_| (crate::ErrorKind::Filesystem, "mkdir -p /run/startos"))?; if tokio::fs::metadata(LOCAL_AUTH_COOKIE_PATH).await.is_err() { tokio::fs::write( LOCAL_AUTH_COOKIE_PATH, @@ -206,46 +320,41 @@ pub async fn init(cfg: &ServerConfig) -> Result { .invoke(crate::ErrorKind::Filesystem) .await?; } + local_auth.complete(); - let db = cfg.db().await?; + load_database.start(); + let db = TypedPatchDb::::load_unchecked(cfg.db().await?); let peek = db.peek().await; + load_database.complete(); tracing::info!("Opened PatchDB"); + load_ssh_keys.start(); crate::ssh::sync_keys( &peek.as_private().as_ssh_pubkeys().de()?, SSH_AUTHORIZED_KEYS_FILE, ) .await?; + load_ssh_keys.complete(); tracing::info!("Synced SSH Keys"); let account = AccountInfo::load(&peek)?; - let mut server_info = peek.as_public().as_server_info().de()?; - - // write to ca cert store - tokio::fs::write( - "/usr/local/share/ca-certificates/startos-root-ca.crt", - account.root_ca_cert.to_pem()?, + start_net.start(); + let net_ctrl = PreInitNetController::init( + db.clone(), + cfg.tor_control + .unwrap_or(SocketAddr::from(([127, 0, 0, 1], 9051))), + cfg.tor_socks.unwrap_or(SocketAddr::V4(SocketAddrV4::new( + Ipv4Addr::new(127, 0, 0, 1), + 9050, + ))), + &account.hostname, + account.tor_key, ) .await?; - Command::new("update-ca-certificates") - .invoke(crate::ErrorKind::OpenSsl) - .await?; - - if let Some(wifi_interface) = &cfg.wifi_interface { - crate::net::wifi::synchronize_wpa_supplicant_conf( - &cfg.datadir().join("main"), - wifi_interface, - &server_info.last_wifi_region, - ) - .await?; - tracing::info!("Synchronized WiFi"); - } - - let should_rebuild = tokio::fs::metadata(SYSTEM_REBUILD_PATH).await.is_ok() - || &*server_info.version < &emver::Version::new(0, 3, 2, 0) - || (*ARCH == "x86_64" && &*server_info.version < &emver::Version::new(0, 3, 4, 0)); + start_net.complete(); + mount_logs.start(); let log_dir = cfg.datadir().join("main/logs"); if tokio::fs::metadata(&log_dir).await.is_err() { tokio::fs::create_dir_all(&log_dir).await?; @@ -274,10 +383,49 @@ pub async fn init(cfg: &ServerConfig) -> Result { .arg("systemd-journald") .invoke(crate::ErrorKind::Journald) .await?; + mount_logs.complete(); tracing::info!("Mounted Logs"); + let mut server_info = peek.as_public().as_server_info().de()?; + + load_ca_cert.start(); + // write to ca cert store + tokio::fs::write( + "/usr/local/share/ca-certificates/startos-root-ca.crt", + account.root_ca_cert.to_pem()?, + ) + .await?; + Command::new("update-ca-certificates") + .invoke(crate::ErrorKind::OpenSsl) + .await?; + if tokio::fs::metadata("/home/kiosk/profile").await.is_ok() { + Command::new("certutil") + .arg("-A") + .arg("-n") + .arg("StartOS Local Root CA") + .arg("-t") + .arg("TCu,Cuw,Tuw") + .arg("-i") + .arg("/usr/local/share/ca-certificates/startos-root-ca.crt") + .arg("-d") + .arg("/home/kiosk/fx-profile") + .invoke(ErrorKind::OpenSsl) + .await?; + } + load_ca_cert.complete(); + + load_wifi.start(); + crate::net::wifi::synchronize_wpa_supplicant_conf( + &cfg.datadir().join("main"), + &mut server_info.wifi, + ) + .await?; + load_wifi.complete(); + tracing::info!("Synchronized WiFi"); + + init_tmp.start(); let tmp_dir = cfg.datadir().join("package-data/tmp"); - if should_rebuild && tokio::fs::metadata(&tmp_dir).await.is_ok() { + if tokio::fs::metadata(&tmp_dir).await.is_ok() { tokio::fs::remove_dir_all(&tmp_dir).await?; } if tokio::fs::metadata(&tmp_dir).await.is_err() { @@ -288,23 +436,40 @@ pub async fn init(cfg: &ServerConfig) -> Result { tokio::fs::remove_dir_all(&tmp_var).await?; } crate::disk::mount::util::bind(&tmp_var, "/var/tmp", false).await?; + let downloading = cfg + .datadir() + .join(format!("package-data/archive/downloading")); + if tokio::fs::metadata(&downloading).await.is_ok() { + tokio::fs::remove_dir_all(&downloading).await?; + } + let tmp_docker = cfg + .datadir() + .join(format!("package-data/tmp/{CONTAINER_TOOL}")); + crate::disk::mount::util::bind(&tmp_docker, CONTAINER_DATADIR, false).await?; + init_tmp.complete(); + set_governor.start(); let governor = if let Some(governor) = &server_info.governor { - if get_available_governors().await?.contains(governor) { + if cpupower::get_available_governors() + .await? + .contains(governor) + { Some(governor) } else { tracing::warn!("CPU Governor \"{governor}\" Not Available"); None } } else { - get_preferred_governor().await? + cpupower::get_preferred_governor().await? }; if let Some(governor) = governor { tracing::info!("Setting CPU Governor to \"{governor}\""); - set_governor(governor).await?; + cpupower::set_governor(governor).await?; tracing::info!("Set CPU Governor"); } + set_governor.complete(); + sync_clock.start(); server_info.ntp_synced = false; let mut not_made_progress = 0u32; for _ in 0..1800 { @@ -331,10 +496,15 @@ pub async fn init(cfg: &ServerConfig) -> Result { } else { tracing::info!("Syncronized system clock"); } + sync_clock.complete(); + enable_zram.start(); if server_info.zram { crate::system::enable_zram().await? } + enable_zram.complete(); + + update_server_info.start(); server_info.ip_info = crate::net::dhcp::init_ips().await?; server_info.status_info = ServerStatus { updated: false, @@ -343,30 +513,150 @@ pub async fn init(cfg: &ServerConfig) -> Result { shutting_down: false, restarting: false, }; - db.mutate(|v| { v.as_public_mut().as_server_info_mut().ser(&server_info)?; Ok(()) }) .await?; + update_server_info.complete(); - crate::version::init(&db).await?; + launch_service_network.start(); + Command::new("systemctl") + .arg("start") + .arg("lxc-net.service") + .invoke(ErrorKind::Lxc) + .await?; + launch_service_network.complete(); + crate::version::init(&db, run_migrations).await?; + + validate_db.start(); db.mutate(|d| { let model = d.de()?; d.ser(&model) }) .await?; + validate_db.complete(); - if should_rebuild { - match tokio::fs::remove_file(SYSTEM_REBUILD_PATH).await { - Ok(()) => Ok(()), - Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()), - Err(e) => Err(e), - }?; + if let Some(progress) = postinit { + run_script("/media/startos/config/postinit.sh", progress).await; } tracing::info!("System initialized."); - Ok(InitResult { db }) + Ok(InitResult { net_ctrl }) +} + +pub fn init_api() -> ParentHandler { + ParentHandler::new() + .subcommand("logs", crate::system::logs::()) + .subcommand( + "logs", + from_fn_async(crate::logs::cli_logs::).no_display(), + ) + .subcommand("kernel-logs", crate::system::kernel_logs::()) + .subcommand( + "kernel-logs", + from_fn_async(crate::logs::cli_logs::).no_display(), + ) + .subcommand("subscribe", from_fn_async(init_progress).no_cli()) + .subcommand("subscribe", from_fn_async(cli_init_progress).no_display()) +} + +#[derive(Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct InitProgressRes { + pub progress: FullProgress, + pub guid: Guid, +} + +pub async fn init_progress(ctx: InitContext) -> Result { + let progress_tracker = ctx.progress.clone(); + let progress = progress_tracker.snapshot(); + let mut error = ctx.error.subscribe(); + let guid = Guid::new(); + ctx.rpc_continuations + .add( + guid.clone(), + RpcContinuation::ws( + |mut ws| async move { + let res = tokio::try_join!( + async { + let mut stream = + progress_tracker.stream(Some(Duration::from_millis(100))); + while let Some(progress) = stream.next().await { + ws.send(ws::Message::Text( + serde_json::to_string(&progress) + .with_kind(ErrorKind::Serialization)?, + )) + .await + .with_kind(ErrorKind::Network)?; + if progress.overall.is_complete() { + break; + } + } + + Ok::<_, Error>(()) + }, + async { + if let Some(e) = error + .wait_for(|e| e.is_some()) + .await + .ok() + .and_then(|e| e.as_ref().map(|e| e.clone_output())) + { + Err::<(), _>(e) + } else { + Ok(()) + } + } + ); + + if let Err(e) = ws + .close_result(res.map(|_| "complete").map_err(|e| { + tracing::error!("error in init progress websocket: {e}"); + tracing::debug!("{e:?}"); + e + })) + .await + { + tracing::error!("error closing init progress websocket: {e}"); + tracing::debug!("{e:?}"); + } + }, + Duration::from_secs(30), + ), + ) + .await; + Ok(InitProgressRes { progress, guid }) +} + +pub async fn cli_init_progress( + HandlerArgs { + context: ctx, + parent_method, + method, + raw_params, + .. + }: HandlerArgs, +) -> Result<(), Error> { + let res: InitProgressRes = from_value( + ctx.call_remote::( + &parent_method + .into_iter() + .chain(method.into_iter()) + .join("."), + raw_params, + ) + .await?, + )?; + let mut ws = ctx.ws_continuation(res.guid).await?; + let mut bar = PhasedProgressBar::new("Initializing..."); + while let Some(msg) = ws.try_next().await.with_kind(ErrorKind::Network)? { + if let tokio_tungstenite::tungstenite::Message::Text(msg) = msg { + bar.update(&serde_json::from_str(&msg).with_kind(ErrorKind::Deserialization)?); + } + } + Ok(()) } diff --git a/core/startos/src/inspect.rs b/core/startos/src/inspect.rs deleted file mode 100644 index 6d24fc32a..000000000 --- a/core/startos/src/inspect.rs +++ /dev/null @@ -1,127 +0,0 @@ -use std::path::PathBuf; - -use clap::Parser; -use rpc_toolkit::{command, from_fn_async, AnyContext, HandlerExt, ParentHandler}; -use serde::{Deserialize, Serialize}; - -use crate::context::CliContext; -use crate::s9pk::manifest::Manifest; -// use crate::s9pk::reader::S9pkReader; -use crate::util::serde::HandlerExtSerde; -use crate::Error; - -pub fn inspect() -> ParentHandler { - ParentHandler::new() - .subcommand("hash", from_fn_async(hash)) - .subcommand( - "manifest", - from_fn_async(manifest).with_display_serializable(), - ) - .subcommand("license", from_fn_async(license).no_display()) - .subcommand("icon", from_fn_async(icon).no_display()) - .subcommand("instructions", from_fn_async(instructions).no_display()) - .subcommand("docker-images", from_fn_async(docker_images).no_display()) -} - -#[derive(Deserialize, Serialize, Parser, TS)] -#[serde(rename_all = "camelCase")] -#[command(rename_all = "kebab-case")] -pub struct HashParams { - path: PathBuf, -} - -pub async fn hash(_: CliContext, HashParams { path }: HashParams) -> Result { - Ok(S9pkReader::open(path, true) - .await? - .hash_str() - .unwrap() - .to_owned()) -} - -#[derive(Deserialize, Serialize, Parser, TS)] -#[serde(rename_all = "camelCase")] -#[command(rename_all = "kebab-case")] -pub struct ManifestParams { - path: PathBuf, - #[arg(long = "no-verify")] - no_verify: bool, -} - -// #[command(cli_only, display(display_serializable))] -pub async fn manifest( - _: CliContext, - ManifestParams { .. }: ManifestParams, -) -> Result { - // S9pkReader::open(path, !no_verify).await?.manifest().await - todo!() -} - -#[derive(Deserialize, Serialize, Parser, TS)] -#[serde(rename_all = "camelCase")] -#[command(rename_all = "kebab-case")] -pub struct InspectParams { - path: PathBuf, - #[arg(long = "no-verify")] - no_verify: bool, -} - -pub async fn license( - _: AnyContext, - InspectParams { path, no_verify }: InspectParams, -) -> Result<(), Error> { - tokio::io::copy( - &mut S9pkReader::open(path, !no_verify).await?.license().await?, - &mut tokio::io::stdout(), - ) - .await?; - Ok(()) -} - -pub async fn icon( - _: AnyContext, - InspectParams { path, no_verify }: InspectParams, -) -> Result<(), Error> { - tokio::io::copy( - &mut S9pkReader::open(path, !no_verify).await?.icon().await?, - &mut tokio::io::stdout(), - ) - .await?; - Ok(()) -} -#[derive(Deserialize, Serialize, Parser, TS)] -#[serde(rename_all = "camelCase")] -#[command(rename_all = "kebab-case")] -pub struct InstructionParams { - path: PathBuf, - #[arg(long = "no-verify")] - no_verify: bool, -} - -pub async fn instructions( - _: CliContext, - InstructionParams { path, no_verify }: InstructionParams, -) -> Result<(), Error> { - tokio::io::copy( - &mut S9pkReader::open(path, !no_verify) - .await? - .instructions() - .await?, - &mut tokio::io::stdout(), - ) - .await?; - Ok(()) -} -pub async fn docker_images( - _: AnyContext, - InspectParams { path, no_verify }: InspectParams, -) -> Result<(), Error> { - tokio::io::copy( - &mut S9pkReader::open(path, !no_verify) - .await? - .docker_images() - .await?, - &mut tokio::io::stdout(), - ) - .await?; - Ok(()) -} diff --git a/core/startos/src/install/mod.rs b/core/startos/src/install/mod.rs index f20f5a5b0..7a545a3aa 100644 --- a/core/startos/src/install/mod.rs +++ b/core/startos/src/install/mod.rs @@ -1,32 +1,37 @@ +use std::ops::Deref; use std::path::PathBuf; use std::time::Duration; use clap::builder::ValueParserFactory; use clap::{value_parser, CommandFactory, FromArgMatches, Parser}; use color_eyre::eyre::eyre; -use emver::VersionRange; -use futures::{FutureExt, StreamExt}; -use patch_db::json_ptr::JsonPointer; +use exver::VersionRange; +use futures::{AsyncWriteExt, StreamExt}; +use imbl_value::{json, InternedString}; +use itertools::Itertools; +use models::VersionString; use reqwest::header::{HeaderMap, CONTENT_LENGTH}; use reqwest::Url; -use rpc_toolkit::yajrc::RpcError; -use rpc_toolkit::CallRemote; +use rpc_toolkit::yajrc::{GenericRpcMethod, RpcError}; +use rpc_toolkit::HandlerArgs; +use rustyline_async::ReadlineEvent; use serde::{Deserialize, Serialize}; -use serde_json::{json, Value}; use tokio::sync::oneshot; use tracing::instrument; use ts_rs::TS; use crate::context::{CliContext, RpcContext}; -use crate::core::rpc_continuations::{RequestGuid, RpcContinuation}; use crate::db::model::package::{ManifestPreference, PackageState, PackageStateMatchModelRef}; use crate::prelude::*; -use crate::progress::{FullProgress, PhasedProgressBar}; +use crate::progress::{FullProgress, FullProgressTracker, PhasedProgressBar}; +use crate::registry::context::{RegistryContext, RegistryUrlParams}; +use crate::registry::package::get::GetPackageResponse; +use crate::rpc_continuations::{Guid, RpcContinuation}; use crate::s9pk::manifest::PackageId; -use crate::s9pk::merkle_archive::source::http::HttpSource; -use crate::s9pk::S9pk; use crate::upload::upload; use crate::util::clap::FromStrParser; +use crate::util::io::open_file; +use crate::util::net::WebSocketExt; use crate::util::Never; pub const PKG_ARCHIVE_DIR: &str = "package-data/archive"; @@ -34,32 +39,33 @@ pub const PKG_PUBLIC_DIR: &str = "package-data/public"; pub const PKG_WASM_DIR: &str = "package-data/wasm"; // #[command(display(display_serializable))] -pub async fn list(ctx: RpcContext) -> Result { - Ok(ctx.db.peek().await.as_public().as_package_data().as_entries()? +pub async fn list(ctx: RpcContext) -> Result, Error> { + Ok(ctx + .db + .peek() + .await + .as_public() + .as_package_data() + .as_entries()? .iter() .filter_map(|(id, pde)| { let status = match pde.as_state_info().as_match() { - PackageStateMatchModelRef::Installed(_) => { - "installed" - } - PackageStateMatchModelRef::Installing(_) => { - "installing" - } - PackageStateMatchModelRef::Updating(_) => { - "updating" - } - PackageStateMatchModelRef::Restoring(_) => { - "restoring" - } - PackageStateMatchModelRef::Removing(_) => { - "removing" - } - PackageStateMatchModelRef::Error(_) => { - "error" - } + PackageStateMatchModelRef::Installed(_) => "installed", + PackageStateMatchModelRef::Installing(_) => "installing", + PackageStateMatchModelRef::Updating(_) => "updating", + PackageStateMatchModelRef::Restoring(_) => "restoring", + PackageStateMatchModelRef::Removing(_) => "removing", + PackageStateMatchModelRef::Error(_) => "error", }; - serde_json::to_value(json!({ "status": status, "id": id.clone(), "version": pde.as_state_info().as_manifest(ManifestPreference::Old).as_version().de().ok()?})) - .ok() + Some(json!({ + "status": status, + "id": id.clone(), + "version": pde.as_state_info() + .as_manifest(ManifestPreference::Old) + .as_version() + .de() + .ok()? + })) }) .collect()) } @@ -103,164 +109,150 @@ impl std::fmt::Display for MinMax { } } -#[derive(Deserialize, Serialize, Parser, TS)] +#[derive(Deserialize, Serialize, TS)] #[serde(rename_all = "camelCase")] -#[command(rename_all = "kebab-case")] +#[ts(export)] pub struct InstallParams { + #[ts(type = "string")] + registry: Url, id: PackageId, - #[arg(short = 'm', long = "marketplace-url")] - #[ts(type = "string | null")] - marketplace_url: Option, - #[arg(short = 'v', long = "version-spec")] - version_spec: Option, - #[arg(long = "version-priority")] - version_priority: Option, + version: VersionString, } -// #[command( -// custom_cli(cli_install(async, context(CliContext))), -// )] #[instrument(skip_all)] pub async fn install( ctx: RpcContext, InstallParams { + registry, id, - marketplace_url, - version_spec, - version_priority, + version, }: InstallParams, ) -> Result<(), Error> { - let version_str = match &version_spec { - None => "*", - Some(v) => &*v, - }; - let version: VersionRange = version_str.parse()?; - let marketplace_url = - marketplace_url.unwrap_or_else(|| crate::DEFAULT_MARKETPLACE.parse().unwrap()); - let version_priority = version_priority.unwrap_or_default(); - let s9pk = S9pk::deserialize( - &HttpSource::new( - ctx.client.clone(), - format!( - "{}/package/v0/{}.s9pk?spec={}&version-priority={}", - marketplace_url, id, version, version_priority, - ) - .parse()?, + let package: GetPackageResponse = from_value( + ctx.call_remote_with::( + "package.get", + json!({ + "id": id, + "version": VersionRange::exactly(version.deref().clone()), + }), + RegistryUrlParams { + registry: registry.clone(), + }, ) .await?, - ) - .await?; + )?; - ensure_code!( - &s9pk.as_manifest().id == &id, - ErrorKind::ValidateS9pk, - "manifest.id does not match expected" - ); + let asset = &package + .best + .get(&version) + .ok_or_else(|| { + Error::new( + eyre!("{id}@{version} not found on {registry}"), + ErrorKind::NotFound, + ) + })? + .s9pk; let download = ctx .services - .install(ctx.clone(), s9pk, None::) + .install( + ctx.clone(), + || asset.deserialize_s9pk_buffered(ctx.client.clone()), + None::, + None, + ) .await?; tokio::spawn(async move { download.await?.await }); Ok(()) } -#[derive(Deserialize, Serialize)] +#[derive(Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +pub struct SideloadParams { + #[ts(skip)] + #[serde(rename = "__auth_session")] + session: InternedString, +} + +#[derive(Deserialize, Serialize, TS)] #[serde(rename_all = "camelCase")] pub struct SideloadResponse { - pub upload: RequestGuid, - pub progress: RequestGuid, + pub upload: Guid, + pub progress: Guid, } #[instrument(skip_all)] -pub async fn sideload(ctx: RpcContext) -> Result { - let (upload, file) = upload(&ctx).await?; - let (id_send, id_recv) = oneshot::channel(); +pub async fn sideload( + ctx: RpcContext, + SideloadParams { session }: SideloadParams, +) -> Result { + let (upload, file) = upload(&ctx, session.clone()).await?; let (err_send, err_recv) = oneshot::channel(); - let progress = RequestGuid::new(); - let db = ctx.db.clone(); - let mut sub = db - .subscribe( - "/package-data/{id}/install-progress" - .parse::() - .with_kind(ErrorKind::Database)?, - ) - .await; - ctx.add_continuation( - progress.clone(), - RpcContinuation::ws( - Box::new(|mut ws| { - use axum::extract::ws::Message; - async move { - if let Err(e) = async { - let id = id_recv.await.map_err(|_| { - Error::new( - eyre!("Could not get id to watch progress"), - ErrorKind::Cancelled, - ) - })?; - tokio::select! { - res = async { - while let Some(rev) = sub.recv().await { - if !rev.patch.0.is_empty() { // TODO: don't send empty patches? + let progress = Guid::new(); + let progress_tracker = FullProgressTracker::new(); + let mut progress_listener = progress_tracker.stream(Some(Duration::from_millis(200))); + ctx.rpc_continuations + .add( + progress.clone(), + RpcContinuation::ws_authed( + &ctx, + session, + |mut ws| { + use axum::extract::ws::Message; + async move { + if let Err(e) = async { + type RpcResponse = rpc_toolkit::yajrc::RpcResponse::>; + tokio::select! { + res = async { + while let Some(progress) = progress_listener.next().await { ws.send(Message::Text( - serde_json::to_string(&if let Some(p) = db - .peek() - .await - .as_public() - .as_package_data() - .as_idx(&id) - .and_then(|e| e.as_state_info().as_installing_info()).map(|i| i.as_progress()) - { - Ok::<_, ()>(p.de()?) - } else { - let mut p = FullProgress::new(); - p.overall.complete(); - Ok(p) - }) + serde_json::to_string(&RpcResponse::from_result::(Ok(progress))) + .with_kind(ErrorKind::Serialization)?, + )) + .await + .with_kind(ErrorKind::Network)?; + } + Ok::<_, Error>(()) + } => res?, + err = err_recv => { + if let Ok(e) = err { + ws.send(Message::Text( + serde_json::to_string(&RpcResponse::from_result::(Err(e))) .with_kind(ErrorKind::Serialization)?, )) .await .with_kind(ErrorKind::Network)?; } } - Ok::<_, Error>(()) - } => res?, - err = err_recv => { - if let Ok(e) = err { - ws.send(Message::Text( - serde_json::to_string(&Err::<(), _>(e)) - .with_kind(ErrorKind::Serialization)?, - )) - .await - .with_kind(ErrorKind::Network)?; - } } + + ws.normal_close("complete").await?; + + Ok::<_, Error>(()) + } + .await + { + tracing::error!("Error tracking sideload progress: {e}"); + tracing::debug!("{e:?}"); } - - ws.close().await.with_kind(ErrorKind::Network)?; - - Ok::<_, Error>(()) } - .await - { - tracing::error!("Error tracking sideload progress: {e}"); - tracing::debug!("{e:?}"); - } - } - .boxed() - }), - Duration::from_secs(600), - ), - ) - .await; + }, + Duration::from_secs(600), + ), + ) + .await; tokio::spawn(async move { if let Err(e) = async { - let s9pk = S9pk::deserialize(&file).await?; - let _ = id_send.send(s9pk.as_manifest().id.clone()); + let key = ctx.db.peek().await.into_private().into_compat_s9pk_key(); + ctx.services - .install(ctx.clone(), s9pk, None::) + .install( + ctx.clone(), + || crate::s9pk::load(file.clone(), || Ok(key.de()?.0), Some(&progress_tracker)), + None::, + Some(progress_tracker.clone()), + ) .await? .await? .await?; @@ -276,10 +268,16 @@ pub async fn sideload(ctx: RpcContext) -> Result { Ok(SideloadResponse { upload, progress }) } +#[derive(Deserialize, Serialize, Parser)] +pub struct QueryPackageParams { + id: PackageId, + version: Option, +} + #[derive(Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub enum CliInstallParams { - Marketplace(InstallParams), + Marketplace(QueryPackageParams), Sideload(PathBuf), } impl CommandFactory for CliInstallParams { @@ -293,14 +291,19 @@ impl CommandFactory for CliInstallParams { .required_unless_present("id") .value_parser(value_parser!(PathBuf)), ) - .args(InstallParams::command().get_arguments().cloned().map(|a| { - if a.get_id() == "id" { - a.required(false).required_unless_present("sideload") - } else { - a - } - .conflicts_with("sideload") - })) + .args( + QueryPackageParams::command() + .get_arguments() + .cloned() + .map(|a| { + if a.get_id() == "id" { + a.required(false).required_unless_present("sideload") + } else { + a + } + .conflicts_with("sideload") + }), + ) } fn command_for_update() -> clap::Command { Self::command() @@ -311,7 +314,9 @@ impl FromArgMatches for CliInstallParams { if let Some(sideload) = matches.get_one::("sideload") { Ok(Self::Sideload(sideload.clone())) } else { - Ok(Self::Marketplace(InstallParams::from_arg_matches(matches)?)) + Ok(Self::Marketplace(QueryPackageParams::from_arg_matches( + matches, + )?)) } } fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> Result<(), clap::Error> { @@ -320,16 +325,60 @@ impl FromArgMatches for CliInstallParams { } } +#[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] +pub struct InstalledVersionParams { + id: PackageId, +} + +pub async fn installed_version( + ctx: RpcContext, + InstalledVersionParams { id }: InstalledVersionParams, +) -> Result, Error> { + if let Some(pde) = ctx + .db + .peek() + .await + .into_public() + .into_package_data() + .into_idx(&id) + { + Ok(Some( + pde.into_state_info() + .as_manifest(ManifestPreference::Old) + .as_version() + .de()?, + )) + } else { + Ok(None) + } +} + #[instrument(skip_all)] -pub async fn cli_install(ctx: CliContext, params: CliInstallParams) -> Result<(), RpcError> { +pub async fn cli_install( + HandlerArgs { + context: ctx, + parent_method, + method, + params, + .. + }: HandlerArgs, +) -> Result<(), RpcError> { + let method = parent_method.into_iter().chain(method).collect_vec(); match params { CliInstallParams::Sideload(path) => { - let file = crate::s9pk::load(&ctx, path).await?; + let file = open_file(path).await?; // rpc call remote sideload let SideloadResponse { upload, progress } = from_value::( - ctx.call_remote("package.sideload", imbl_value::json!({})) - .await?, + ctx.call_remote::( + &method[..method.len() - 1] + .into_iter() + .chain(std::iter::once(&"sideload")) + .join("."), + imbl_value::json!({}), + ) + .await?, )?; let upload = async { @@ -358,14 +407,18 @@ pub async fn cli_install(ctx: CliContext, params: CliInstallParams) -> Result<() let mut progress = FullProgress::new(); + type RpcResponse = rpc_toolkit::yajrc::RpcResponse< + GenericRpcMethod<&'static str, (), FullProgress>, + >; + loop { tokio::select! { msg = ws.next() => { if let Some(msg) = msg { if let Message::Text(t) = msg.with_kind(ErrorKind::Network)? { progress = - serde_json::from_str::>(&t) - .with_kind(ErrorKind::Deserialization)??; + serde_json::from_str::(&t) + .with_kind(ErrorKind::Deserialization)?.result?; bar.update(&progress); } } else { @@ -385,9 +438,70 @@ pub async fn cli_install(ctx: CliContext, params: CliInstallParams) -> Result<() progress?; upload?; } - CliInstallParams::Marketplace(params) => { - ctx.call_remote("package.install", to_value(¶ms)?) - .await?; + CliInstallParams::Marketplace(QueryPackageParams { id, version }) => { + let source_version: Option = from_value( + ctx.call_remote::("package.installed-version", json!({ "id": &id })) + .await?, + )?; + let mut packages: GetPackageResponse = from_value( + ctx.call_remote::( + "package.get", + json!({ "id": &id, "version": version, "sourceVersion": source_version }), + ) + .await?, + )?; + let version = if packages.best.len() == 1 { + packages.best.pop_first().map(|(k, _)| k).unwrap() + } else { + println!("Multiple flavors of {id} found. Please select one of the following versions to install:"); + let version; + loop { + let (mut read, mut output) = rustyline_async::Readline::new("> ".into()) + .with_kind(ErrorKind::Filesystem)?; + for (idx, version) in packages.best.keys().enumerate() { + output + .write_all(format!(" {}) {}\n", idx + 1, version).as_bytes()) + .await?; + read.add_history_entry(version.to_string()); + } + if let ReadlineEvent::Line(line) = read.readline().await? { + let trimmed = line.trim(); + match trimmed.parse() { + Ok(v) => { + if let Some((k, _)) = packages.best.remove_entry(&v) { + version = k; + break; + } + } + Err(_) => match trimmed.parse::() { + Ok(i) if (1..=packages.best.len()).contains(&i) => { + version = packages.best.keys().nth(i - 1).unwrap().clone(); + break; + } + _ => (), + }, + } + eprintln!("invalid selection: {trimmed}"); + println!("Please select one of the following versions to install:"); + } else { + return Err(Error::new( + eyre!("Could not determine precise version to install"), + ErrorKind::InvalidRequest, + ) + .into()); + } + } + version + }; + ctx.call_remote::( + &method.join("."), + to_value(&InstallParams { + id, + registry: ctx.registry_url.clone().or_not_found("--registry")?, + version, + })?, + ) + .await?; } } Ok(()) @@ -423,7 +537,12 @@ pub async fn uninstall( let return_id = id.clone(); - tokio::spawn(async move { ctx.services.uninstall(&ctx, &id).await }); + tokio::spawn(async move { + if let Err(e) = ctx.services.uninstall(&ctx, &id).await { + tracing::error!("Error uninstalling service {id}: {e}"); + tracing::debug!("{e:?}"); + } + }); Ok(return_id) } diff --git a/core/startos/src/lib.rs b/core/startos/src/lib.rs index 448db9ff2..feeb5a647 100644 --- a/core/startos/src/lib.rs +++ b/core/startos/src/lib.rs @@ -1,13 +1,8 @@ -pub const DEFAULT_MARKETPLACE: &str = "https://registry.start9.com"; +pub const DEFAULT_REGISTRY: &str = "https://registry.start9.com"; // pub const COMMUNITY_MARKETPLACE: &str = "https://community-registry.start9.com"; -pub const BUFFER_SIZE: usize = 1024; pub const HOST_IP: [u8; 4] = [172, 18, 0, 1]; -pub const TARGET: &str = current_platform::CURRENT_PLATFORM; +pub use std::env::consts::ARCH; lazy_static::lazy_static! { - pub static ref ARCH: &'static str = { - let (arch, _) = TARGET.split_once("-").unwrap(); - arch - }; pub static ref PLATFORM: String = { if let Ok(platform) = std::fs::read_to_string("/usr/lib/startos/PLATFORM.txt") { platform @@ -20,6 +15,15 @@ lazy_static::lazy_static! { }; } +mod cap { + #![allow(non_upper_case_globals)] + + pub const CAP_1_KiB: usize = 1024; + pub const CAP_1_MiB: usize = CAP_1_KiB * CAP_1_KiB; + pub const CAP_10_MiB: usize = 10 * CAP_1_MiB; +} +pub use cap::*; + pub mod account; pub mod action; pub mod auth; @@ -28,7 +32,6 @@ pub mod bins; pub mod config; pub mod context; pub mod control; -pub mod core; pub mod db; pub mod dependencies; pub mod developer; @@ -38,8 +41,6 @@ pub mod error; pub mod firmware; pub mod hostname; pub mod init; -pub mod progress; -// pub mod inspect; pub mod install; pub mod logs; pub mod lxc; @@ -48,8 +49,10 @@ pub mod net; pub mod notifications; pub mod os_install; pub mod prelude; +pub mod progress; pub mod properties; pub mod registry; +pub mod rpc_continuations; pub mod s9pk; pub mod service; pub mod setup; @@ -72,117 +75,179 @@ pub use error::{Error, ErrorKind, ResultExt}; use imbl_value::Value; use rpc_toolkit::yajrc::RpcError; use rpc_toolkit::{ - command, from_fn, from_fn_async, from_fn_blocking, AnyContext, HandlerExt, ParentHandler, + from_fn, from_fn_async, from_fn_blocking, CallRemoteHandler, Context, Empty, HandlerExt, + ParentHandler, }; use serde::{Deserialize, Serialize}; use ts_rs::TS; -use crate::context::CliContext; +use crate::context::{ + CliContext, DiagnosticContext, InitContext, InstallContext, RpcContext, SetupContext, +}; +use crate::disk::fsck::RequiresReboot; +use crate::registry::context::{RegistryContext, RegistryUrlParams}; use crate::util::serde::HandlerExtSerde; #[derive(Deserialize, Serialize, Parser, TS)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] +#[ts(export)] pub struct EchoParams { message: String, } -pub fn echo(_: AnyContext, EchoParams { message }: EchoParams) -> Result { +pub fn echo(_: C, EchoParams { message }: EchoParams) -> Result { Ok(message) } -pub fn main_api() -> ParentHandler { - ParentHandler::new() - .subcommand("git-info", from_fn(version::git_info)) - .subcommand( - "echo", - from_fn(echo) - .with_metadata("authenticated", Value::Bool(false)) - .with_remote_cli::(), - ) - .subcommand("init", from_fn_blocking(developer::init).no_display()) - .subcommand("server", server()) - .subcommand("package", package()) - .subcommand("net", net::net()) - .subcommand("auth", auth::auth()) - .subcommand("db", db::db()) - .subcommand("ssh", ssh::ssh()) - .subcommand("wifi", net::wifi::wifi()) - .subcommand("disk", disk::disk()) - .subcommand("notification", notifications::notification()) - .subcommand("backup", backup::backup()) - .subcommand("marketplace", registry::marketplace::marketplace()) - .subcommand("lxc", lxc::lxc()) - .subcommand("s9pk", s9pk::rpc::s9pk()) +#[derive(Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub enum ApiState { + Error, + Initializing, + Running, +} +impl std::fmt::Display for ApiState { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Debug::fmt(&self, f) + } } -pub fn server() -> ParentHandler { +pub fn main_api() -> ParentHandler { + let api = ParentHandler::new() + .subcommand::("git-info", from_fn(version::git_info)) + .subcommand( + "echo", + from_fn(echo::) + .with_metadata("authenticated", Value::Bool(false)) + .with_call_remote::(), + ) + .subcommand( + "state", + from_fn(|_: RpcContext| Ok::<_, Error>(ApiState::Running)) + .with_metadata("authenticated", Value::Bool(false)) + .with_call_remote::(), + ) + .subcommand("server", server::()) + .subcommand("package", package::()) + .subcommand("net", net::net::()) + .subcommand("auth", auth::auth::()) + .subcommand("db", db::db::()) + .subcommand("ssh", ssh::ssh::()) + .subcommand("wifi", net::wifi::wifi::()) + .subcommand("disk", disk::disk::()) + .subcommand("notification", notifications::notification::()) + .subcommand("backup", backup::backup::()) + .subcommand( + "registry", + CallRemoteHandler::::new( + registry::registry_api::(), + ) + .no_cli(), + ) + .subcommand("s9pk", s9pk::rpc::s9pk()) + .subcommand("util", util::rpc::util::()); + #[cfg(feature = "dev")] + let api = api.subcommand("lxc", lxc::dev::lxc::()); + api +} + +pub fn server() -> ParentHandler { ParentHandler::new() .subcommand( "time", from_fn_async(system::time) .with_display_serializable() - .with_custom_display_fn::(|handle, result| { + .with_custom_display_fn(|handle, result| { Ok(system::display_time(handle.params, result)) }) - .with_remote_cli::(), + .with_call_remote::(), + ) + .subcommand("experimental", system::experimental::()) + .subcommand("logs", system::logs::()) + .subcommand( + "logs", + from_fn_async(logs::cli_logs::).no_display(), + ) + .subcommand("kernel-logs", system::kernel_logs::()) + .subcommand( + "kernel-logs", + from_fn_async(logs::cli_logs::).no_display(), ) - .subcommand("experimental", system::experimental()) - .subcommand("logs", system::logs()) - .subcommand("kernel-logs", system::kernel_logs()) .subcommand( "metrics", from_fn_async(system::metrics) .with_display_serializable() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "shutdown", from_fn_async(shutdown::shutdown) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "restart", from_fn_async(shutdown::restart) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "rebuild", from_fn_async(shutdown::rebuild) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "update", from_fn_async(update::update_system) .with_metadata("sync_db", Value::Bool(true)) - .with_custom_display_fn::(|handle, result| { - Ok(update::display_update_result(handle.params, result)) - }) - .with_remote_cli::(), + .no_cli(), + ) + .subcommand( + "update", + from_fn_async(update::cli_update_system).no_display(), ) .subcommand( "update-firmware", - from_fn_async(firmware::update_firmware) - .with_custom_display_fn::(|_handle, result| { - Ok(firmware::display_firmware_update_result(result)) - }) - .with_remote_cli::(), + from_fn_async(|_: RpcContext| async { + if let Some(firmware) = firmware::check_for_firmware_update().await? { + firmware::update_firmware(firmware).await?; + Ok::<_, Error>(RequiresReboot(true)) + } else { + Ok(RequiresReboot(false)) + } + }) + .with_custom_display_fn(|_handle, result| { + Ok(firmware::display_firmware_update_result(result)) + }) + .with_call_remote::(), + ) + .subcommand( + "set-smtp", + from_fn_async(system::set_system_smtp) + .no_display() + .with_call_remote::(), + ) + .subcommand( + "clear-smtp", + from_fn_async(system::clear_system_smtp) + .no_display() + .with_call_remote::(), ) } -pub fn package() -> ParentHandler { +pub fn package() -> ParentHandler { ParentHandler::new() .subcommand( "action", from_fn_async(action::action) .with_display_serializable() - .with_custom_display_fn::(|handle, result| { + .with_custom_display_fn(|handle, result| { Ok(action::display_action_result(handle.params, result)) }) - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "install", @@ -190,54 +255,69 @@ pub fn package() -> ParentHandler { .with_metadata("sync_db", Value::Bool(true)) .no_cli(), ) - .subcommand("sideload", from_fn_async(install::sideload).no_cli()) + .subcommand( + "sideload", + from_fn_async(install::sideload) + .with_metadata("get_session", Value::Bool(true)) + .no_cli(), + ) .subcommand("install", from_fn_async(install::cli_install).no_display()) .subcommand( "uninstall", from_fn_async(install::uninstall) .with_metadata("sync_db", Value::Bool(true)) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "list", from_fn_async(install::list) .with_display_serializable() - .with_remote_cli::(), + .with_call_remote::(), ) - .subcommand("config", config::config()) + .subcommand( + "installed-version", + from_fn_async(install::installed_version) + .with_display_serializable() + .with_call_remote::(), + ) + .subcommand("config", config::config::()) .subcommand( "start", from_fn_async(control::start) .with_metadata("sync_db", Value::Bool(true)) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "stop", from_fn_async(control::stop) .with_metadata("sync_db", Value::Bool(true)) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "restart", from_fn_async(control::restart) .with_metadata("sync_db", Value::Bool(true)) .no_display() - .with_remote_cli::(), + .with_call_remote::(), + ) + .subcommand("logs", logs::package_logs()) + .subcommand( + "logs", + from_fn_async(logs::cli_logs::).no_display(), ) - .subcommand("logs", logs::logs()) .subcommand( "properties", from_fn_async(properties::properties) - .with_custom_display_fn::(|_handle, result| { + .with_custom_display_fn(|_handle, result| { Ok(properties::display_properties(result)) }) - .with_remote_cli::(), + .with_call_remote::(), ) - .subcommand("dependency", dependencies::dependency()) - .subcommand("backup", backup::package_backup()) + .subcommand("dependency", dependencies::dependency::()) + .subcommand("backup", backup::package_backup::()) .subcommand("connect", from_fn_async(service::connect_rpc).no_cli()) .subcommand( "connect", @@ -245,32 +325,76 @@ pub fn package() -> ParentHandler { ) } -pub fn diagnostic_api() -> ParentHandler { +pub fn diagnostic_api() -> ParentHandler { ParentHandler::new() - .subcommand( + .subcommand::( "git-info", from_fn(version::git_info).with_metadata("authenticated", Value::Bool(false)), ) - .subcommand("echo", from_fn(echo).with_remote_cli::()) - .subcommand("diagnostic", diagnostic::diagnostic()) + .subcommand( + "echo", + from_fn(echo::).with_call_remote::(), + ) + .subcommand( + "state", + from_fn(|_: DiagnosticContext| Ok::<_, Error>(ApiState::Error)) + .with_metadata("authenticated", Value::Bool(false)) + .with_call_remote::(), + ) + .subcommand("diagnostic", diagnostic::diagnostic::()) } -pub fn setup_api() -> ParentHandler { +pub fn init_api() -> ParentHandler { ParentHandler::new() - .subcommand( + .subcommand::( "git-info", from_fn(version::git_info).with_metadata("authenticated", Value::Bool(false)), ) - .subcommand("echo", from_fn(echo).with_remote_cli::()) - .subcommand("setup", setup::setup()) + .subcommand( + "echo", + from_fn(echo::).with_call_remote::(), + ) + .subcommand( + "state", + from_fn(|_: InitContext| Ok::<_, Error>(ApiState::Initializing)) + .with_metadata("authenticated", Value::Bool(false)) + .with_call_remote::(), + ) + .subcommand("init", init::init_api::()) } -pub fn install_api() -> ParentHandler { +pub fn setup_api() -> ParentHandler { ParentHandler::new() - .subcommand( + .subcommand::( "git-info", from_fn(version::git_info).with_metadata("authenticated", Value::Bool(false)), ) - .subcommand("echo", from_fn(echo).with_remote_cli::()) - .subcommand("install", os_install::install()) + .subcommand( + "echo", + from_fn(echo::).with_call_remote::(), + ) + .subcommand("setup", setup::setup::()) +} + +pub fn install_api() -> ParentHandler { + ParentHandler::new() + .subcommand::( + "git-info", + from_fn(version::git_info).with_metadata("authenticated", Value::Bool(false)), + ) + .subcommand( + "echo", + from_fn(echo::).with_call_remote::(), + ) + .subcommand("install", os_install::install::()) +} + +pub fn expanded_api() -> ParentHandler { + main_api() + .subcommand("init", from_fn_blocking(developer::init).no_display()) + .subcommand("pubkey", from_fn_blocking(developer::pubkey)) + .subcommand("diagnostic", diagnostic::diagnostic::()) + .subcommand("setup", setup::setup::()) + .subcommand("install", os_install::install::()) + .subcommand("registry", registry::registry_api::()) } diff --git a/core/startos/src/logs.rs b/core/startos/src/logs.rs index 1cd84c331..9cf234f5f 100644 --- a/core/startos/src/logs.rs +++ b/core/startos/src/logs.rs @@ -1,16 +1,23 @@ +use std::convert::Infallible; use std::ops::{Deref, DerefMut}; use std::process::Stdio; +use std::str::FromStr; use std::time::{Duration, UNIX_EPOCH}; use axum::extract::ws::{self, WebSocket}; use chrono::{DateTime, Utc}; -use clap::Parser; +use clap::builder::ValueParserFactory; +use clap::{Args, FromArgMatches, Parser}; use color_eyre::eyre::eyre; use futures::stream::BoxStream; -use futures::{FutureExt, Stream, StreamExt, TryStreamExt}; +use futures::{Future, FutureExt, Stream, StreamExt, TryStreamExt}; +use itertools::Itertools; use models::PackageId; use rpc_toolkit::yajrc::RpcError; -use rpc_toolkit::{command, from_fn_async, CallRemote, Empty, HandlerExt, ParentHandler}; +use rpc_toolkit::{ + from_fn_async, CallRemote, Context, Empty, HandlerArgs, HandlerExt, HandlerFor, ParentHandler, +}; +use serde::de::{self, DeserializeOwned}; use serde::{Deserialize, Serialize}; use tokio::io::{AsyncBufReadExt, BufReader}; use tokio::process::{Child, Command}; @@ -19,14 +26,17 @@ use tokio_tungstenite::tungstenite::Message; use tracing::instrument; use crate::context::{CliContext, RpcContext}; -use crate::core::rpc_continuations::{RequestGuid, RpcContinuation}; use crate::error::ResultExt; +use crate::lxc::ContainerId; use crate::prelude::*; +use crate::rpc_continuations::{Guid, RpcContinuation, RpcContinuations}; +use crate::util::clap::FromStrParser; use crate::util::serde::Reversible; +use crate::util::Invoke; #[pin_project::pin_project] pub struct LogStream { - _child: Child, + _child: Option, #[pin] entries: BoxStream<'static, Result>, } @@ -112,13 +122,15 @@ pub struct LogResponse { #[serde(rename_all = "camelCase")] pub struct LogFollowResponse { start_cursor: Option, - guid: RequestGuid, + guid: Guid, } #[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] pub struct LogEntry { timestamp: DateTime, message: String, + boot_id: String, } impl std::fmt::Display for LogEntry { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { @@ -141,6 +153,8 @@ pub struct JournalctlEntry { pub message: String, #[serde(rename = "__CURSOR")] pub cursor: String, + #[serde(rename = "_BOOT_ID")] + pub boot_id: String, } impl JournalctlEntry { fn log_entry(self) -> Result<(String, LogEntry), Error> { @@ -151,6 +165,7 @@ impl JournalctlEntry { UNIX_EPOCH + Duration::from_micros(self.timestamp.parse::()?), ), message: self.message, + boot_id: self.boot_id, }, )) } @@ -169,7 +184,13 @@ fn deserialize_log_message<'de, D: serde::de::Deserializer<'de>>( where E: serde::de::Error, { - Ok(v.trim().to_owned()) + Ok(v.to_owned()) + } + fn visit_string(self, v: String) -> Result + where + E: de::Error, + { + Ok(v) } fn visit_unit(self) -> Result where @@ -187,7 +208,7 @@ fn deserialize_log_message<'de, D: serde::de::Deserializer<'de>>( .flatten() .collect::, _>>()?, ) - .map(|s| s.trim().to_owned()) + .map(|s| s.to_owned()) .map_err(serde::de::Error::custom) } } @@ -200,12 +221,11 @@ fn deserialize_log_message<'de, D: serde::de::Deserializer<'de>>( /// --user-unit=UNIT Show logs from the specified user unit)) /// System: Unit is startd, but we also filter on the comm /// Container: Filtering containers, like podman/docker is done by filtering on the CONTAINER_NAME -#[derive(Debug)] +#[derive(Debug, Clone)] pub enum LogSource { Kernel, Unit(&'static str), - System, - Container(PackageId), + Container(ContainerId), } pub const SYSTEM_UNIT: &str = "startd"; @@ -213,177 +233,307 @@ pub const SYSTEM_UNIT: &str = "startd"; #[derive(Deserialize, Serialize, Parser)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] -pub struct LogsParam { +pub struct PackageIdParams { id: PackageId, +} + +#[derive(Debug, Clone)] +pub enum BootIdentifier { + Index(i32), + Id(String), +} +impl FromStr for BootIdentifier { + type Err = Infallible; + fn from_str(s: &str) -> Result { + Ok(match s.parse() { + Ok(i) => Self::Index(i), + Err(_) => Self::Id(s.to_owned()), + }) + } +} +impl ValueParserFactory for BootIdentifier { + type Parser = FromStrParser; + fn value_parser() -> Self::Parser { + Self::Parser::new() + } +} +impl Serialize for BootIdentifier { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + match self { + Self::Index(i) => serializer.serialize_i32(*i), + Self::Id(i) => serializer.serialize_str(i), + } + } +} +impl<'de> Deserialize<'de> for BootIdentifier { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + struct Visitor; + impl<'de> de::Visitor<'de> for Visitor { + type Value = BootIdentifier; + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { + write!(formatter, "a string or integer") + } + fn visit_str(self, v: &str) -> Result + where + E: de::Error, + { + Ok(Self::Value::Id(v.to_owned())) + } + fn visit_string(self, v: String) -> Result + where + E: de::Error, + { + Ok(Self::Value::Id(v)) + } + fn visit_i64(self, v: i64) -> Result + where + E: de::Error, + { + Ok(Self::Value::Index(v as i32)) + } + fn visit_f64(self, v: f64) -> Result + where + E: de::Error, + { + Ok(Self::Value::Index(v as i32)) + } + fn visit_u64(self, v: u64) -> Result + where + E: de::Error, + { + Ok(Self::Value::Index(v as i32)) + } + } + deserializer.deserialize_any(Visitor) + } +} +impl From for String { + fn from(value: BootIdentifier) -> Self { + match value { + BootIdentifier::Index(i) => i.to_string(), + BootIdentifier::Id(i) => i, + } + } +} + +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct LogsParams { + #[command(flatten)] + #[serde(flatten)] + extra: Extra, #[arg(short = 'l', long = "limit")] limit: Option, - #[arg(short = 'c', long = "cursor")] + #[arg(short = 'c', long = "cursor", conflicts_with = "follow")] cursor: Option, - #[arg(short = 'B', long = "before")] + #[arg(short = 'b', long = "boot")] + #[serde(default)] + boot: Option, + #[arg(short = 'B', long = "before", conflicts_with = "follow")] #[serde(default)] before: bool, +} + +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct CliLogsParams { + #[command(flatten)] + #[serde(flatten)] + rpc_params: LogsParams, #[arg(short = 'f', long = "follow")] #[serde(default)] follow: bool, } -pub fn logs() -> ParentHandler { - ParentHandler::::new() +#[allow(private_bounds)] +pub fn logs< + C: Context + AsRef, + Extra: FromArgMatches + Serialize + DeserializeOwned + Args + Send + Sync + 'static, +>( + source: impl for<'a> LogSourceFn<'a, C, Extra>, +) -> ParentHandler> { + ParentHandler::new() .root_handler( - from_fn_async(cli_logs) - .no_display() - .with_inherited(|params, _| params), - ) - .root_handler( - from_fn_async(logs_follow) + logs_nofollow::(source.clone()) .with_inherited(|params, _| params) .no_cli(), ) .subcommand( "follow", - from_fn_async(logs_follow) + logs_follow::(source) .with_inherited(|params, _| params) .no_cli(), ) } -pub async fn cli_logs( - ctx: CliContext, - _: Empty, - LogsParam { - id, - limit, - cursor, - before, - follow, - }: LogsParam, -) -> Result<(), RpcError> { - if follow { - if cursor.is_some() { - return Err(RpcError::from(Error::new( - eyre!("The argument '--cursor ' cannot be used with '--follow'"), - crate::ErrorKind::InvalidRequest, - ))); - } - if before { - return Err(RpcError::from(Error::new( - eyre!("The argument '--before' cannot be used with '--follow'"), - crate::ErrorKind::InvalidRequest, - ))); - } - cli_logs_generic_follow(ctx, "package.logs.follow", Some(id), limit).await - } else { - cli_logs_generic_nofollow(ctx, "package.logs", Some(id), limit, cursor, before).await - } -} -pub async fn logs_nofollow( - _ctx: (), - _: Empty, - LogsParam { - id, - limit, - cursor, - before, + +pub async fn cli_logs( + HandlerArgs { + context: ctx, + parent_method, + method, + params: CliLogsParams { rpc_params, follow }, .. - }: LogsParam, -) -> Result { - fetch_logs(LogSource::Container(id), limit, cursor, before).await -} -pub async fn logs_follow( - ctx: RpcContext, - _: Empty, - LogsParam { id, limit, .. }: LogsParam, -) -> Result { - follow_logs(ctx, LogSource::Container(id), limit).await -} + }: HandlerArgs>, +) -> Result<(), RpcError> +where + CliContext: CallRemote, + Extra: FromArgMatches + Args + Serialize + Send + Sync, +{ + let method = parent_method + .into_iter() + .chain(method) + .chain(follow.then_some("follow")) + .join("."); -pub async fn cli_logs_generic_nofollow( - ctx: CliContext, - method: &str, - id: Option, - limit: Option, - cursor: Option, - before: bool, -) -> Result<(), RpcError> { - let res = from_value::( - ctx.call_remote( - method, - imbl_value::json!({ - "id": id, - "limit": limit, - "cursor": cursor, - "before": before, - }), - ) - .await?, - )?; + if follow { + let res = from_value::( + ctx.call_remote::(&method, to_value(&rpc_params)?) + .await?, + )?; - for entry in res.entries.iter() { - println!("{}", entry); - } + let mut stream = ctx.ws_continuation(res.guid).await?; + while let Some(log) = stream.try_next().await? { + if let Message::Text(log) = log { + println!("{}", serde_json::from_str::(&log)?); + } + } + } else { + let res = from_value::( + ctx.call_remote::(&method, to_value(&rpc_params)?) + .await?, + )?; - Ok(()) -} - -pub async fn cli_logs_generic_follow( - ctx: CliContext, - method: &str, - id: Option, - limit: Option, -) -> Result<(), RpcError> { - let res = from_value::( - ctx.call_remote( - method, - imbl_value::json!({ - "id": id, - "limit": limit, - }), - ) - .await?, - )?; - - let mut stream = ctx.ws_continuation(res.guid).await?; - while let Some(log) = stream.try_next().await? { - if let Message::Text(log) = log { - println!("{}", serde_json::from_str::(&log)?); + for entry in res.entries.iter() { + println!("{}", entry); } } Ok(()) } +trait LogSourceFn<'a, Context, Extra>: Clone + Send + Sync + 'static { + type Fut: Future> + Send + 'a; + fn call(&self, ctx: &'a Context, extra: Extra) -> Self::Fut; +} + +impl<'a, C: Context, Extra, F, Fut> LogSourceFn<'a, C, Extra> for F +where + F: Fn(&'a C, Extra) -> Fut + Clone + Send + Sync + 'static, + Fut: Future> + Send + 'a, +{ + type Fut = Fut; + fn call(&self, ctx: &'a C, extra: Extra) -> Self::Fut { + self(ctx, extra) + } +} + +fn logs_nofollow( + f: impl for<'a> LogSourceFn<'a, C, Extra>, +) -> impl HandlerFor, Ok = LogResponse, Err = Error> +where + C: Context, + Extra: FromArgMatches + Args + Send + Sync + 'static, +{ + from_fn_async( + move |HandlerArgs { + context, + inherited_params: + LogsParams { + extra, + limit, + cursor, + boot, + before, + }, + .. + }: HandlerArgs>| { + let f = f.clone(); + async move { + fetch_logs( + f.call(&context, extra).await?, + limit, + cursor, + boot.map(String::from), + before, + ) + .await + } + }, + ) +} + +fn logs_follow< + C: Context + AsRef, + Extra: FromArgMatches + Args + Send + Sync + 'static, +>( + f: impl for<'a> LogSourceFn<'a, C, Extra>, +) -> impl HandlerFor< + C, + Params = Empty, + InheritedParams = LogsParams, + Ok = LogFollowResponse, + Err = Error, +> { + from_fn_async( + move |HandlerArgs { + context, + inherited_params: + LogsParams { + extra, limit, boot, .. + }, + .. + }: HandlerArgs>| { + let f = f.clone(); + async move { + let src = f.call(&context, extra).await?; + follow_logs(context, src, limit, boot.map(String::from)).await + } + }, + ) +} + +async fn get_package_id( + ctx: &RpcContext, + PackageIdParams { id }: PackageIdParams, +) -> Result { + let container_id = ctx + .services + .get(&id) + .await + .as_ref() + .map(|x| x.container_id()) + .ok_or_else(|| { + Error::new( + eyre!("No service found with id: {}", id), + ErrorKind::NotFound, + ) + })??; + Ok(LogSource::Container(container_id)) +} + +pub fn package_logs() -> ParentHandler> { + logs::(get_package_id) +} + pub async fn journalctl( id: LogSource, limit: usize, cursor: Option<&str>, + boot: Option<&str>, before: bool, follow: bool, ) -> Result { - let mut cmd = Command::new("journalctl"); - cmd.kill_on_drop(true); + let mut cmd = gen_journalctl_command(&id); - cmd.arg("--output=json"); - cmd.arg("--output-fields=MESSAGE"); - cmd.arg(format!("-n{}", limit)); - match id { - LogSource::Kernel => { - cmd.arg("-k"); - } - LogSource::Unit(id) => { - cmd.arg("-u"); - cmd.arg(id); - } - LogSource::System => { - cmd.arg("-u"); - cmd.arg(SYSTEM_UNIT); - cmd.arg(format!("_COMM={}", SYSTEM_UNIT)); - } - LogSource::Container(id) => { - #[cfg(not(feature = "docker"))] - cmd.arg(format!("SYSLOG_IDENTIFIER={}.embassy", id)); - #[cfg(feature = "docker")] - cmd.arg(format!("CONTAINER_NAME={}.embassy", id)); - } - }; + cmd.arg(format!("--lines={}", limit)); let cursor_formatted = format!("--after-cursor={}", cursor.unwrap_or("")); if cursor.is_some() { @@ -392,33 +542,91 @@ pub async fn journalctl( cmd.arg("--reverse"); } } - if follow { - cmd.arg("--follow"); + + if let Some(boot) = boot { + cmd.arg(format!("--boot={boot}")); + } else { + cmd.arg("--boot=all"); } - let mut child = cmd.stdout(Stdio::piped()).spawn()?; - let out = BufReader::new( - child - .stdout - .take() - .ok_or_else(|| Error::new(eyre!("No stdout available"), crate::ErrorKind::Journald))?, - ); + let deserialized_entries = String::from_utf8(cmd.invoke(ErrorKind::Journald).await?)? + .lines() + .map(serde_json::from_str::) + .collect::, _>>() + .with_kind(ErrorKind::Deserialization)?; - let journalctl_entries = LinesStream::new(out.lines()); + if follow { + let mut follow_cmd = gen_journalctl_command(&id); + follow_cmd.arg("-f"); + if let Some(last) = deserialized_entries.last() { + follow_cmd.arg(format!("--after-cursor={}", last.cursor)); + follow_cmd.arg("--lines=all"); + } else { + follow_cmd.arg("--lines=0"); + } + let mut child = follow_cmd.stdout(Stdio::piped()).spawn()?; + let out = + BufReader::new(child.stdout.take().ok_or_else(|| { + Error::new(eyre!("No stdout available"), crate::ErrorKind::Journald) + })?); - let deserialized_entries = journalctl_entries - .map_err(|e| Error::new(e, crate::ErrorKind::Journald)) - .and_then(|s| { - futures::future::ready( - serde_json::from_str::(&s) - .with_kind(crate::ErrorKind::Deserialization), - ) - }); + let journalctl_entries = LinesStream::new(out.lines()); - Ok(LogStream { - _child: child, - entries: deserialized_entries.boxed(), - }) + let follow_deserialized_entries = journalctl_entries + .map_err(|e| Error::new(e, crate::ErrorKind::Journald)) + .and_then(|s| { + futures::future::ready( + serde_json::from_str::(&s) + .with_kind(crate::ErrorKind::Deserialization), + ) + }); + + let entries = futures::stream::iter(deserialized_entries) + .map(Ok) + .chain(follow_deserialized_entries) + .boxed(); + Ok(LogStream { + _child: Some(child), + entries, + }) + } else { + let entries = futures::stream::iter(deserialized_entries).map(Ok).boxed(); + + Ok(LogStream { + _child: None, + entries, + }) + } +} + +fn gen_journalctl_command(id: &LogSource) -> Command { + let mut cmd = match id { + LogSource::Container(container_id) => { + let mut cmd = Command::new("lxc-attach"); + cmd.arg(format!("{}", container_id)) + .arg("--") + .arg("journalctl"); + cmd + } + _ => Command::new("journalctl"), + }; + cmd.kill_on_drop(true); + + cmd.arg("--output=json"); + cmd.arg("--output-fields=MESSAGE"); + match id { + LogSource::Kernel => { + cmd.arg("-k"); + } + LogSource::Unit(id) => { + cmd.arg("-u"); + cmd.arg(id); + } + LogSource::Container(_container_id) => { + cmd.arg("-u").arg("container-runtime.service"); + } + }; + cmd } #[instrument(skip_all)] @@ -426,10 +634,12 @@ pub async fn fetch_logs( id: LogSource, limit: Option, cursor: Option, + boot: Option, before: bool, ) -> Result { let limit = limit.unwrap_or(50); - let mut stream = journalctl(id, limit, cursor.as_deref(), before, false).await?; + let mut stream = + journalctl(id, limit, cursor.as_deref(), boot.as_deref(), before, false).await?; let mut entries = Vec::with_capacity(limit); let mut start_cursor = None; @@ -469,13 +679,14 @@ pub async fn fetch_logs( } #[instrument(skip_all)] -pub async fn follow_logs( - ctx: RpcContext, +pub async fn follow_logs>( + ctx: Context, id: LogSource, limit: Option, + boot: Option, ) -> Result { let limit = limit.unwrap_or(50); - let mut stream = journalctl(id, limit, None, false, true).await?; + let mut stream = journalctl(id, limit, None, boot.as_deref(), false, true).await?; let mut start_cursor = None; let mut first_entry = None; @@ -491,24 +702,25 @@ pub async fn follow_logs( first_entry = Some(entry); } - let guid = RequestGuid::new(); - ctx.add_continuation( - guid.clone(), - RpcContinuation::ws( - Box::new(move |socket| { - ws_handler(first_entry, stream, socket) - .map(|x| match x { - Ok(_) => (), - Err(e) => { - tracing::error!("Error in log stream: {}", e); - } - }) - .boxed() - }), - Duration::from_secs(30), - ), - ) - .await; + let guid = Guid::new(); + ctx.as_ref() + .add( + guid.clone(), + RpcContinuation::ws( + Box::new(move |socket| { + ws_handler(first_entry, stream, socket) + .map(|x| match x { + Ok(_) => (), + Err(e) => { + tracing::error!("Error in log stream: {}", e); + } + }) + .boxed() + }), + Duration::from_secs(30), + ), + ) + .await; Ok(LogFollowResponse { start_cursor, guid }) } diff --git a/core/startos/src/lxc/dev.rs b/core/startos/src/lxc/dev.rs new file mode 100644 index 000000000..61dd8e598 --- /dev/null +++ b/core/startos/src/lxc/dev.rs @@ -0,0 +1,112 @@ +use std::ops::Deref; + +use clap::Parser; +use rpc_toolkit::{ + from_fn_async, CallRemoteHandler, Context, Empty, HandlerArgs, HandlerExt, HandlerFor, + ParentHandler, +}; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; + +use crate::context::{CliContext, RpcContext}; +use crate::lxc::{ContainerId, LxcConfig}; +use crate::prelude::*; +use crate::rpc_continuations::Guid; + +pub fn lxc() -> ParentHandler { + ParentHandler::new() + .subcommand( + "create", + from_fn_async(create).with_call_remote::(), + ) + .subcommand( + "list", + from_fn_async(list) + .with_custom_display_fn(|_, res| { + use prettytable::*; + let mut table = table!([bc => "GUID"]); + for guid in res { + table.add_row(row![&*guid]); + } + table.printstd(); + Ok(()) + }) + .with_call_remote::(), + ) + .subcommand( + "remove", + from_fn_async(remove) + .no_display() + .with_call_remote::(), + ) + .subcommand("connect", from_fn_async(connect_rpc).no_cli()) + .subcommand("connect", from_fn_async(connect_rpc_cli).no_display()) +} + +pub async fn create(ctx: RpcContext) -> Result { + let container = ctx.lxc_manager.create(None, LxcConfig::default()).await?; + let guid = container.guid.deref().clone(); + ctx.dev.lxc.lock().await.insert(guid.clone(), container); + Ok(guid) +} + +pub async fn list(ctx: RpcContext) -> Result, Error> { + Ok(ctx.dev.lxc.lock().await.keys().cloned().collect()) +} + +#[derive(Deserialize, Serialize, Parser, TS)] +pub struct RemoveParams { + #[ts(type = "string")] + pub guid: ContainerId, +} + +pub async fn remove(ctx: RpcContext, RemoveParams { guid }: RemoveParams) -> Result<(), Error> { + if let Some(container) = ctx.dev.lxc.lock().await.remove(&guid) { + container.exit().await?; + } + Ok(()) +} + +#[derive(Deserialize, Serialize, Parser, TS)] +pub struct ConnectParams { + #[ts(type = "string")] + pub guid: ContainerId, +} + +pub async fn connect_rpc( + ctx: RpcContext, + ConnectParams { guid }: ConnectParams, +) -> Result { + super::connect( + &ctx, + ctx.dev.lxc.lock().await.get(&guid).ok_or_else(|| { + Error::new(eyre!("No container with guid: {guid}"), ErrorKind::NotFound) + })?, + ) + .await +} + +pub async fn connect_rpc_cli( + HandlerArgs { + context, + parent_method, + method, + params, + inherited_params, + raw_params, + }: HandlerArgs, +) -> Result<(), Error> { + let ctx = context.clone(); + let guid = CallRemoteHandler::::new(from_fn_async(connect_rpc)) + .handle_async(HandlerArgs { + context, + parent_method, + method, + params: rpc_toolkit::util::Flat(params, Empty {}), + inherited_params, + raw_params, + }) + .await?; + + super::connect_cli(&ctx, guid).await +} diff --git a/core/startos/src/lxc/mod.rs b/core/startos/src/lxc/mod.rs index 0ebefdd96..b8ce9c703 100644 --- a/core/startos/src/lxc/mod.rs +++ b/core/startos/src/lxc/mod.rs @@ -1,21 +1,17 @@ use std::collections::BTreeSet; use std::net::Ipv4Addr; -use std::ops::Deref; use std::path::Path; use std::sync::{Arc, Weak}; use std::time::Duration; -use clap::Parser; -use futures::{AsyncWriteExt, FutureExt, StreamExt}; +use clap::builder::ValueParserFactory; +use futures::{AsyncWriteExt, StreamExt}; use imbl_value::{InOMap, InternedString}; -use rpc_toolkit::yajrc::{RpcError, RpcResponse}; -use rpc_toolkit::{ - from_fn_async, AnyContext, CallRemoteHandler, GenericRpcMethod, Handler, HandlerArgs, - HandlerExt, ParentHandler, RpcRequest, -}; +use models::InvalidId; +use rpc_toolkit::yajrc::RpcError; +use rpc_toolkit::{GenericRpcMethod, RpcRequest, RpcResponse}; use rustyline_async::{ReadlineEvent, SharedWriter}; use serde::{Deserialize, Serialize}; -use tokio::fs::File; use tokio::io::{AsyncBufReadExt, BufReader}; use tokio::process::Command; use tokio::sync::Mutex; @@ -23,36 +19,80 @@ use tokio::time::Instant; use ts_rs::TS; use crate::context::{CliContext, RpcContext}; -use crate::core::rpc_continuations::{RequestGuid, RpcContinuation}; use crate::disk::mount::filesystem::bind::Bind; use crate::disk::mount::filesystem::block_dev::BlockDev; use crate::disk::mount::filesystem::idmapped::IdMapped; use crate::disk::mount::filesystem::overlayfs::OverlayGuard; -use crate::disk::mount::filesystem::ReadWrite; -use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard}; +use crate::disk::mount::filesystem::{MountType, ReadOnly, ReadWrite}; +use crate::disk::mount::guard::{GenericMountGuard, MountGuard, TmpMountGuard}; use crate::disk::mount::util::unmount; use crate::prelude::*; +use crate::rpc_continuations::{Guid, RpcContinuation}; +use crate::util::clap::FromStrParser; +use crate::util::io::open_file; use crate::util::rpc_client::UnixRpcClient; use crate::util::{new_guid, Invoke}; +#[cfg(feature = "dev")] +pub mod dev; + const LXC_CONTAINER_DIR: &str = "/var/lib/lxc"; const RPC_DIR: &str = "media/startos/rpc"; // must not be absolute path pub const CONTAINER_RPC_SERVER_SOCKET: &str = "service.sock"; // must not be absolute path pub const HOST_RPC_SERVER_SOCKET: &str = "host.sock"; // must not be absolute path const CONTAINER_DHCP_TIMEOUT: Duration = Duration::from_secs(30); -pub struct LxcManager { - containers: Mutex>>, +#[derive( + Clone, Debug, Serialize, Deserialize, Default, PartialEq, Eq, PartialOrd, Ord, Hash, TS, +)] +#[ts(type = "string")] +pub struct ContainerId(InternedString); +impl std::ops::Deref for ContainerId { + type Target = str; + fn deref(&self) -> &Self::Target { + &self.0 + } } +impl std::fmt::Display for ContainerId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", &*self.0) + } +} +impl TryFrom<&str> for ContainerId { + type Error = InvalidId; + fn try_from(value: &str) -> Result { + Ok(ContainerId(InternedString::intern(value))) + } +} +impl std::str::FromStr for ContainerId { + type Err = InvalidId; + fn from_str(s: &str) -> Result { + Self::try_from(s) + } +} +impl ValueParserFactory for ContainerId { + type Parser = FromStrParser; + fn value_parser() -> Self::Parser { + FromStrParser::new() + } +} + +#[derive(Default)] +pub struct LxcManager { + containers: Mutex>>, +} + impl LxcManager { pub fn new() -> Self { - Self { - containers: Default::default(), - } + Self::default() } - pub async fn create(self: &Arc, config: LxcConfig) -> Result { - let container = LxcContainer::new(self, config).await?; + pub async fn create( + self: &Arc, + log_mount: Option<&Path>, + config: LxcConfig, + ) -> Result { + let container = LxcContainer::new(self, log_mount, config).await?; let mut guard = self.containers.lock().await; *guard = std::mem::take(&mut *guard) .into_iter() @@ -69,7 +109,7 @@ impl LxcManager { .await .iter() .filter_map(|g| g.upgrade()) - .map(|g| (&*g).clone()), + .map(|g| (*g).clone()), ); for container in String::from_utf8( Command::new("lxc-ls") @@ -80,10 +120,14 @@ impl LxcManager { .lines() .map(|s| s.trim()) { - if !expected.contains(container) { + if !expected.contains(&ContainerId::try_from(container)?) { let rootfs_path = Path::new(LXC_CONTAINER_DIR).join(container).join("rootfs"); if tokio::fs::metadata(&rootfs_path).await.is_ok() { - unmount(Path::new(LXC_CONTAINER_DIR).join(container).join("rootfs")).await?; + unmount( + Path::new(LXC_CONTAINER_DIR).join(container).join("rootfs"), + true, + ) + .await?; if tokio_stream::wrappers::ReadDirStream::new( tokio::fs::read_dir(&rootfs_path).await?, ) @@ -111,15 +155,21 @@ impl LxcManager { pub struct LxcContainer { manager: Weak, - rootfs: OverlayGuard, - guid: Arc, + rootfs: OverlayGuard, + pub guid: Arc, rpc_bind: TmpMountGuard, + log_mount: Option, config: LxcConfig, exited: bool, } impl LxcContainer { - async fn new(manager: &Arc, config: LxcConfig) -> Result { + async fn new( + manager: &Arc, + log_mount: Option<&Path>, + config: LxcConfig, + ) -> Result { let guid = new_guid(); + let machine_id = hex::encode(rand::random::<[u8; 16]>()); let container_dir = Path::new(LXC_CONTAINER_DIR).join(&*guid); tokio::fs::create_dir_all(&container_dir).await?; tokio::fs::write( @@ -136,15 +186,20 @@ impl LxcContainer { .invoke(ErrorKind::Filesystem) .await?; let rootfs = OverlayGuard::mount( - &IdMapped::new( - BlockDev::new("/usr/lib/startos/container-runtime/rootfs.squashfs"), - 0, - 100000, - 65536, - ), + TmpMountGuard::mount( + &IdMapped::new( + BlockDev::new("/usr/lib/startos/container-runtime/rootfs.squashfs"), + 0, + 100000, + 65536, + ), + ReadOnly, + ) + .await?, &rootfs_dir, ) .await?; + tokio::fs::write(rootfs_dir.join("etc/machine-id"), format!("{machine_id}\n")).await?; tokio::fs::write(rootfs_dir.join("etc/hostname"), format!("{guid}\n")).await?; Command::new("sed") .arg("-i") @@ -166,6 +221,20 @@ impl LxcContainer { .arg(rpc_bind.path()) .invoke(ErrorKind::Filesystem) .await?; + let log_mount = if let Some(path) = log_mount { + let log_mount_point = rootfs_dir.join("var/log/journal").join(machine_id); + let log_mount = + MountGuard::mount(&Bind::new(path), &log_mount_point, MountType::ReadWrite).await?; + Command::new("chown") + // This was needed as 100999 because the group id of journald + .arg("100000:100999") + .arg(&log_mount_point) + .invoke(crate::ErrorKind::Filesystem) + .await?; + Some(log_mount) + } else { + None + }; Command::new("lxc-start") .arg("-d") .arg("--name") @@ -175,10 +244,11 @@ impl LxcContainer { Ok(Self { manager: Arc::downgrade(manager), rootfs, - guid: Arc::new(guid), + guid: Arc::new(ContainerId::try_from(&*guid)?), rpc_bind, config, exited: false, + log_mount, }) } @@ -188,11 +258,12 @@ impl LxcContainer { pub async fn ip(&self) -> Result { let start = Instant::now(); + let guid: &str = &self.guid; loop { let output = String::from_utf8( Command::new("lxc-info") .arg("--name") - .arg(&*self.guid) + .arg(guid) .arg("-iH") .invoke(ErrorKind::Docker) .await?, @@ -217,28 +288,27 @@ impl LxcContainer { #[instrument(skip_all)] pub async fn exit(mut self) -> Result<(), Error> { + Command::new("lxc-stop") + .arg("--name") + .arg(&**self.guid) + .invoke(ErrorKind::Lxc) + .await?; self.rpc_bind.take().unmount().await?; + if let Some(log_mount) = self.log_mount.take() { + log_mount.unmount(true).await?; + } self.rootfs.take().unmount(true).await?; let rootfs_path = self.rootfs_dir(); - let err_path = rootfs_path.join("var/log/containerRuntime.err"); - if tokio::fs::metadata(&err_path).await.is_ok() { - let mut lines = BufReader::new(File::open(&err_path).await?).lines(); - while let Some(line) = lines.next_line().await? { - let container = &**self.guid; - tracing::error!(container, "{}", line); - } - } - if tokio::fs::metadata(&rootfs_path).await.is_ok() { - if tokio_stream::wrappers::ReadDirStream::new(tokio::fs::read_dir(&rootfs_path).await?) + if tokio::fs::metadata(&rootfs_path).await.is_ok() + && tokio_stream::wrappers::ReadDirStream::new(tokio::fs::read_dir(&rootfs_path).await?) .count() .await > 0 - { - return Err(Error::new( - eyre!("rootfs is not empty, refusing to delete"), - ErrorKind::InvalidRequest, - )); - } + { + return Err(Error::new( + eyre!("rootfs is not empty, refusing to delete"), + ErrorKind::InvalidRequest, + )); } Command::new("lxc-destroy") .arg("--force") @@ -281,7 +351,7 @@ impl Drop for LxcContainer { if let Err(e) = async { let err_path = rootfs.path().join("var/log/containerRuntime.err"); if tokio::fs::metadata(&err_path).await.is_ok() { - let mut lines = BufReader::new(File::open(&err_path).await?).lines(); + let mut lines = BufReader::new(open_file(&err_path).await?).lines(); while let Some(line) = lines.next_line().await? { let container = &**guid; tracing::error!(container, "{}", line); @@ -310,90 +380,16 @@ impl Drop for LxcContainer { #[derive(Default, Serialize)] pub struct LxcConfig {} - -pub fn lxc() -> ParentHandler { - ParentHandler::new() - .subcommand( - "create", - from_fn_async(create).with_remote_cli::(), - ) - .subcommand( - "list", - from_fn_async(list) - .with_custom_display_fn::(|_, res| { - use prettytable::*; - let mut table = table!([bc => "GUID"]); - for guid in res { - table.add_row(row![&*guid]); - } - table.printstd(); - Ok(()) - }) - .with_remote_cli::(), - ) - .subcommand( - "remove", - from_fn_async(remove) - .no_display() - .with_remote_cli::(), - ) - .subcommand("connect", from_fn_async(connect_rpc).no_cli()) - .subcommand("connect", from_fn_async(connect_rpc_cli).no_display()) -} - -pub async fn create(ctx: RpcContext) -> Result { - let container = ctx.lxc_manager.create(LxcConfig::default()).await?; - let guid = container.guid.deref().clone(); - ctx.dev.lxc.lock().await.insert(guid.clone(), container); - Ok(guid) -} - -pub async fn list(ctx: RpcContext) -> Result, Error> { - Ok(ctx.dev.lxc.lock().await.keys().cloned().collect()) -} - -#[derive(Deserialize, Serialize, Parser, TS)] -pub struct RemoveParams { - #[ts(type = "string")] - pub guid: InternedString, -} - -pub async fn remove(ctx: RpcContext, RemoveParams { guid }: RemoveParams) -> Result<(), Error> { - if let Some(container) = ctx.dev.lxc.lock().await.remove(&guid) { - container.exit().await?; - } - Ok(()) -} - -#[derive(Deserialize, Serialize, Parser, TS)] -pub struct ConnectParams { - #[ts(type = "string")] - pub guid: InternedString, -} - -pub async fn connect_rpc( - ctx: RpcContext, - ConnectParams { guid }: ConnectParams, -) -> Result { - connect( - &ctx, - ctx.dev.lxc.lock().await.get(&guid).ok_or_else(|| { - Error::new(eyre!("No container with guid: {guid}"), ErrorKind::NotFound) - })?, - ) - .await -} - -pub async fn connect(ctx: &RpcContext, container: &LxcContainer) -> Result { +pub async fn connect(ctx: &RpcContext, container: &LxcContainer) -> Result { use axum::extract::ws::Message; let rpc = container.connect_rpc(Some(Duration::from_secs(30))).await?; - let guid = RequestGuid::new(); - ctx.add_continuation( - guid.clone(), - RpcContinuation::ws( - Box::new(|mut ws| { - async move { + let guid = Guid::new(); + ctx.rpc_continuations + .add( + guid.clone(), + RpcContinuation::ws( + |mut ws| async move { if let Err(e) = async { loop { match ws.next().await { @@ -413,11 +409,8 @@ pub async fn connect(ctx: &RpcContext, container: &LxcContainer) -> Result { - id, - result, - }) - .with_kind(ErrorKind::Serialization)?, + serde_json::to_string(&RpcResponse { id, result }) + .with_kind(ErrorKind::Serialization)?, )) .await .with_kind(ErrorKind::Network)?; @@ -435,17 +428,15 @@ pub async fn connect(ctx: &RpcContext, container: &LxcContainer) -> Result Result<(), Error> { +pub async fn connect_cli(ctx: &CliContext, guid: Guid) -> Result<(), Error> { use futures::SinkExt; use tokio_tungstenite::tungstenite::Message; @@ -502,7 +493,7 @@ pub async fn connect_cli(ctx: &CliContext, guid: RequestGuid) -> Result<(), Erro if let Some((method, rest)) = command.split_first() { let mut params = InOMap::new(); for arg in rest { - if let Some((name, value)) = arg.split_once("=") { + if let Some((name, value)) = arg.split_once('=') { params.insert(InternedString::intern(name), if value.is_empty() { Value::Null } else if let Ok(v) = serde_json::from_str(value) { @@ -553,14 +544,3 @@ pub async fn connect_cli(ctx: &CliContext, guid: RequestGuid) -> Result<(), Erro Ok(()) } - -pub async fn connect_rpc_cli( - handle_args: HandlerArgs, -) -> Result<(), Error> { - let ctx = handle_args.context.clone(); - let guid = CallRemoteHandler::::new(from_fn_async(connect_rpc)) - .handle_async(handle_args) - .await?; - - connect_cli(&ctx, guid).await -} diff --git a/core/startos/src/middleware/auth.rs b/core/startos/src/middleware/auth.rs index 30ae56744..9b04afb38 100644 --- a/core/startos/src/middleware/auth.rs +++ b/core/startos/src/middleware/auth.rs @@ -23,7 +23,7 @@ use tokio::sync::Mutex; use crate::context::RpcContext; use crate::prelude::*; -pub const LOCAL_AUTH_COOKIE_PATH: &str = "/run/embassy/rpc.authcookie"; +pub const LOCAL_AUTH_COOKIE_PATH: &str = "/run/startos/rpc.authcookie"; #[derive(Deserialize, Serialize)] #[serde(rename_all = "camelCase")] @@ -48,19 +48,14 @@ impl HasLoggedOutSessions { .into_iter() .map(|s| s.as_logout_session_id()) .collect(); - ctx.open_authed_websockets - .lock() - .await - .retain(|session, sockets| { - if to_log_out.contains(session.hashed()) { - for socket in std::mem::take(sockets) { - let _ = socket.send(()); - } - false - } else { - true - } - }); + for sid in &to_log_out { + ctx.open_authed_continuations.kill(sid) + } + ctx.ephemeral_sessions.mutate(|s| { + for sid in &to_log_out { + s.0.remove(sid); + } + }); ctx.db .mutate(|db| { let sessions = db.as_private_mut().as_sessions_mut(); @@ -120,20 +115,29 @@ impl HasValidSession { ctx: &RpcContext, ) -> Result { let session_hash = session_token.hashed(); - ctx.db - .mutate(|db| { - db.as_private_mut() - .as_sessions_mut() - .as_idx_mut(session_hash) - .ok_or_else(|| { - Error::new(eyre!("UNAUTHORIZED"), crate::ErrorKind::Authorization) - })? - .mutate(|s| { - s.last_active = Utc::now(); - Ok(()) - }) - }) - .await?; + if !ctx.ephemeral_sessions.mutate(|s| { + if let Some(session) = s.0.get_mut(session_hash) { + session.last_active = Utc::now(); + true + } else { + false + } + }) { + ctx.db + .mutate(|db| { + db.as_private_mut() + .as_sessions_mut() + .as_idx_mut(session_hash) + .ok_or_else(|| { + Error::new(eyre!("UNAUTHORIZED"), crate::ErrorKind::Authorization) + })? + .mutate(|s| { + s.last_active = Utc::now(); + Ok(()) + }) + }) + .await?; + } Ok(Self(SessionType::Session(session_token))) } @@ -161,7 +165,7 @@ impl HashSessionToken { pub fn new() -> Self { Self::from_token(InternedString::intern( base32::encode( - base32::Alphabet::RFC4648 { padding: false }, + base32::Alphabet::Rfc4648 { padding: false }, &rand::random::<[u8; 16]>(), ) .to_lowercase(), @@ -210,7 +214,7 @@ impl HashSessionToken { hasher.update(token.as_bytes()); InternedString::intern( base32::encode( - base32::Alphabet::RFC4648 { padding: false }, + base32::Alphabet::Rfc4648 { padding: false }, hasher.finalize().as_slice(), ) .to_lowercase(), @@ -245,7 +249,6 @@ impl Borrow for HashSessionToken { } #[derive(Deserialize)] -#[serde(rename_all = "camelCase")] pub struct Metadata { #[serde(default = "const_true")] authenticated: bool, @@ -274,7 +277,6 @@ impl Auth { } } } -#[async_trait::async_trait] impl Middleware for Auth { type Metadata = Metadata; async fn process_http_request( @@ -306,7 +308,7 @@ impl Middleware for Auth { }); } if let Some(user_agent) = self.user_agent.as_ref().and_then(|h| h.to_str().ok()) { - request.params["user-agent"] = Value::String(Arc::new(user_agent.to_owned())) + request.params["__auth_userAgent"] = Value::String(Arc::new(user_agent.to_owned())) // TODO: will this panic? } } else if metadata.authenticated { @@ -318,7 +320,7 @@ impl Middleware for Auth { }) } Ok(HasValidSession(SessionType::Session(s))) if metadata.get_session => { - request.params["session"] = + request.params["__auth_session"] = Value::String(Arc::new(s.hashed().deref().to_owned())); // TODO: will this panic? } diff --git a/core/startos/src/middleware/cors.rs b/core/startos/src/middleware/cors.rs index 60a472cdd..a8c406a8a 100644 --- a/core/startos/src/middleware/cors.rs +++ b/core/startos/src/middleware/cors.rs @@ -44,8 +44,7 @@ impl Cors { } } } -#[async_trait::async_trait] -impl Middleware for Cors { +impl Middleware for Cors { type Metadata = Empty; async fn process_http_request( &mut self, diff --git a/core/startos/src/middleware/db.rs b/core/startos/src/middleware/db.rs index b8cdaa231..4e5f0e037 100644 --- a/core/startos/src/middleware/db.rs +++ b/core/startos/src/middleware/db.rs @@ -7,7 +7,6 @@ use serde::Deserialize; use crate::context::RpcContext; #[derive(Deserialize)] -#[serde(rename_all = "camelCase")] pub struct Metadata { #[serde(default)] sync_db: bool, @@ -23,7 +22,6 @@ impl SyncDb { } } -#[async_trait::async_trait] impl Middleware for SyncDb { type Metadata = Metadata; async fn process_rpc_request( @@ -38,10 +36,11 @@ impl Middleware for SyncDb { async fn process_http_response(&mut self, context: &RpcContext, response: &mut Response) { if let Err(e) = async { if self.sync_db { - response.headers_mut().append( - "X-Patch-Sequence", - HeaderValue::from_str(&context.db.sequence().await.to_string())?, - ); + let id = context.db.sequence().await; + response + .headers_mut() + .append("X-Patch-Sequence", HeaderValue::from_str(&id.to_string())?); + context.sync_db.send_replace(id); } Ok::<_, InvalidHeaderValue>(()) } diff --git a/core/startos/src/middleware/diagnostic.rs b/core/startos/src/middleware/diagnostic.rs deleted file mode 100644 index f779d632f..000000000 --- a/core/startos/src/middleware/diagnostic.rs +++ /dev/null @@ -1,43 +0,0 @@ -use rpc_toolkit::yajrc::RpcMethod; -use rpc_toolkit::{Empty, Middleware, RpcRequest, RpcResponse}; - -use crate::context::DiagnosticContext; -use crate::prelude::*; - -#[derive(Clone)] -pub struct DiagnosticMode { - method: Option, -} -impl DiagnosticMode { - pub fn new() -> Self { - Self { method: None } - } -} - -#[async_trait::async_trait] -impl Middleware for DiagnosticMode { - type Metadata = Empty; - async fn process_rpc_request( - &mut self, - _: &DiagnosticContext, - _: Self::Metadata, - request: &mut RpcRequest, - ) -> Result<(), RpcResponse> { - self.method = Some(request.method.as_str().to_owned()); - Ok(()) - } - async fn process_rpc_response(&mut self, _: &DiagnosticContext, response: &mut RpcResponse) { - if let Err(e) = &mut response.result { - if e.code == -32601 { - *e = Error::new( - eyre!( - "{} is not available on the Diagnostic API", - self.method.as_ref().map(|s| s.as_str()).unwrap_or_default() - ), - crate::ErrorKind::DiagnosticMode, - ) - .into(); - } - } - } -} diff --git a/core/startos/src/middleware/mod.rs b/core/startos/src/middleware/mod.rs index 3af0cb5a4..3438dc3db 100644 --- a/core/startos/src/middleware/mod.rs +++ b/core/startos/src/middleware/mod.rs @@ -1,4 +1,3 @@ pub mod auth; pub mod cors; pub mod db; -pub mod diagnostic; diff --git a/core/startos/src/net/dhcp.rs b/core/startos/src/net/dhcp.rs index b0d538e81..ffcb9774b 100644 --- a/core/startos/src/net/dhcp.rs +++ b/core/startos/src/net/dhcp.rs @@ -3,7 +3,7 @@ use std::net::IpAddr; use clap::Parser; use futures::TryStreamExt; -use rpc_toolkit::{from_fn_async, HandlerExt, ParentHandler}; +use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; use tokio::sync::RwLock; use ts_rs::TS; @@ -53,12 +53,12 @@ pub async fn init_ips() -> Result, Error> { } // #[command(subcommands(update))] -pub fn dhcp() -> ParentHandler { +pub fn dhcp() -> ParentHandler { ParentHandler::new().subcommand( "update", from_fn_async::<_, _, (), Error, (RpcContext, UpdateParams)>(update) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) } #[derive(Deserialize, Serialize, Parser, TS)] diff --git a/core/startos/src/net/dns.rs b/core/startos/src/net/dns.rs index ba69b6c16..090e845b0 100644 --- a/core/startos/src/net/dns.rs +++ b/core/startos/src/net/dns.rs @@ -34,7 +34,7 @@ struct Resolver { impl Resolver { async fn resolve(&self, name: &Name) -> Option> { match name.iter().next_back() { - Some(b"embassy") => { + Some(b"embassy") | Some(b"startos") => { if let Some(pkg) = name.iter().rev().skip(1).next() { if let Some(ip) = self.services.read().await.get(&Some( std::str::from_utf8(pkg) diff --git a/core/startos/src/net/host/address.rs b/core/startos/src/net/host/address.rs index cb3b485f6..9b16441ce 100644 --- a/core/startos/src/net/host/address.rs +++ b/core/startos/src/net/host/address.rs @@ -1,8 +1,14 @@ +use std::fmt; +use std::str::FromStr; + +use imbl_value::InternedString; use serde::{Deserialize, Serialize}; use torut::onion::OnionAddressV3; use ts_rs::TS; -#[derive(Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, TS)] +use crate::prelude::*; + +#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, TS)] #[serde(rename_all = "camelCase")] #[serde(tag = "kind")] #[ts(export)] @@ -11,4 +17,32 @@ pub enum HostAddress { #[ts(type = "string")] address: OnionAddressV3, }, + Domain { + #[ts(type = "string")] + address: InternedString, + }, +} + +impl FromStr for HostAddress { + type Err = Error; + fn from_str(s: &str) -> Result { + if let Some(addr) = s.strip_suffix(".onion") { + Ok(HostAddress::Onion { + address: addr + .parse::() + .with_kind(ErrorKind::ParseUrl)?, + }) + } else { + Ok(HostAddress::Domain { address: s.into() }) + } + } +} + +impl fmt::Display for HostAddress { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match self { + Self::Onion { address } => write!(f, "{address}"), + Self::Domain { address } => write!(f, "{address}"), + } + } } diff --git a/core/startos/src/net/host/binding.rs b/core/startos/src/net/host/binding.rs index 8301821f5..76dd04059 100644 --- a/core/startos/src/net/host/binding.rs +++ b/core/startos/src/net/host/binding.rs @@ -1,4 +1,3 @@ -use imbl_value::InternedString; use serde::{Deserialize, Serialize}; use ts_rs::TS; @@ -11,17 +10,31 @@ use crate::prelude::*; #[ts(export)] pub struct BindInfo { pub options: BindOptions, - pub assigned_lan_port: Option, + pub lan: LanInfo, +} +#[derive(Clone, Copy, Debug, Deserialize, Serialize, TS, PartialEq, Eq, PartialOrd, Ord)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct LanInfo { + pub assigned_port: Option, + pub assigned_ssl_port: Option, } impl BindInfo { pub fn new(available_ports: &mut AvailablePorts, options: BindOptions) -> Result { - let mut assigned_lan_port = None; - if options.add_ssl.is_some() || options.secure.is_some() { - assigned_lan_port = Some(available_ports.alloc()?); + let mut assigned_port = None; + let mut assigned_ssl_port = None; + if options.secure.is_some() { + assigned_port = Some(available_ports.alloc()?); + } + if options.add_ssl.is_some() { + assigned_ssl_port = Some(available_ports.alloc()?); } Ok(Self { options, - assigned_lan_port, + lan: LanInfo { + assigned_port, + assigned_ssl_port, + }, }) } pub fn update( @@ -29,29 +42,38 @@ impl BindInfo { available_ports: &mut AvailablePorts, options: BindOptions, ) -> Result { - let Self { - mut assigned_lan_port, - .. - } = self; - if options.add_ssl.is_some() || options.secure.is_some() { - assigned_lan_port = if let Some(port) = assigned_lan_port.take() { + let Self { mut lan, .. } = self; + if options + .secure + .map_or(false, |s| !(s.ssl && options.add_ssl.is_some())) + // doesn't make sense to have 2 listening ports, both with ssl + { + lan.assigned_port = if let Some(port) = lan.assigned_port.take() { Some(port) } else { Some(available_ports.alloc()?) }; } else { - if let Some(port) = assigned_lan_port.take() { + if let Some(port) = lan.assigned_port.take() { available_ports.free([port]); } } - Ok(Self { - options, - assigned_lan_port, - }) + if options.add_ssl.is_some() { + lan.assigned_ssl_port = if let Some(port) = lan.assigned_ssl_port.take() { + Some(port) + } else { + Some(available_ports.alloc()?) + }; + } else { + if let Some(port) = lan.assigned_ssl_port.take() { + available_ports.free([port]); + } + } + Ok(Self { options, lan }) } } -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize, TS)] #[ts(export)] #[serde(rename_all = "camelCase")] pub struct Security { @@ -62,8 +84,6 @@ pub struct Security { #[serde(rename_all = "camelCase")] #[ts(export)] pub struct BindOptions { - #[ts(type = "string | null")] - pub scheme: Option, pub preferred_external_port: u16, pub add_ssl: Option, pub secure: Option, @@ -73,11 +93,8 @@ pub struct BindOptions { #[serde(rename_all = "camelCase")] #[ts(export)] pub struct AddSslOptions { - #[ts(type = "string | null")] - pub scheme: Option, pub preferred_external_port: u16, // #[serde(default)] // pub add_x_forwarded_headers: bool, // TODO - #[serde(default)] - pub alpn: AlpnInfo, + pub alpn: Option, } diff --git a/core/startos/src/net/host/mod.rs b/core/startos/src/net/host/mod.rs index 2d50df15a..175fe3e83 100644 --- a/core/startos/src/net/host/mod.rs +++ b/core/startos/src/net/host/mod.rs @@ -1,13 +1,15 @@ use std::collections::{BTreeMap, BTreeSet}; use imbl_value::InternedString; -use models::HostId; +use models::{HostId, PackageId}; use serde::{Deserialize, Serialize}; use ts_rs::TS; +use crate::db::model::DatabaseModel; use crate::net::forward::AvailablePorts; use crate::net::host::address::HostAddress; use crate::net::host::binding::{BindInfo, BindOptions}; +use crate::net::service_interface::HostnameInfo; use crate::prelude::*; pub mod address; @@ -21,7 +23,8 @@ pub struct Host { pub kind: HostKind, pub bindings: BTreeMap, pub addresses: BTreeSet, - pub primary: Option, + /// COMPUTED: NetService::update + pub hostname_info: BTreeMap>, // internal port -> Hostnames } impl AsRef for Host { fn as_ref(&self) -> &Host { @@ -34,9 +37,13 @@ impl Host { kind, bindings: BTreeMap::new(), addresses: BTreeSet::new(), - primary: None, + hostname_info: BTreeMap::new(), } } + pub fn addresses(&self) -> impl Iterator { + // TODO: handle primary + self.addresses.iter() + } } #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, TS)] @@ -51,9 +58,9 @@ pub enum HostKind { #[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)] #[model = "Model"] #[ts(export)] -pub struct HostInfo(BTreeMap); +pub struct Hosts(pub BTreeMap); -impl Map for HostInfo { +impl Map for Hosts { type Key = HostId; type Value = Host; fn key_str(key: &Self::Key) -> Result, Error> { @@ -64,25 +71,66 @@ impl Map for HostInfo { } } -impl Model { +pub fn host_for<'a>( + db: &'a mut DatabaseModel, + package_id: &PackageId, + host_id: &HostId, + host_kind: HostKind, +) -> Result<&'a mut Model, Error> { + fn host_info<'a>( + db: &'a mut DatabaseModel, + package_id: &PackageId, + ) -> Result<&'a mut Model, Error> { + Ok::<_, Error>( + db.as_public_mut() + .as_package_data_mut() + .as_idx_mut(package_id) + .or_not_found(package_id)? + .as_hosts_mut(), + ) + } + let tor_key = if host_info(db, package_id)?.as_idx(host_id).is_none() { + Some( + db.as_private_mut() + .as_key_store_mut() + .as_onion_mut() + .new_key()?, + ) + } else { + None + }; + host_info(db, package_id)?.upsert(host_id, || { + let mut h = Host::new(host_kind); + h.addresses.insert(HostAddress::Onion { + address: tor_key + .or_not_found("generated tor key")? + .public() + .get_onion_address(), + }); + Ok(h) + }) +} + +impl Model { + pub fn set_kind(&mut self, kind: HostKind) -> Result<(), Error> { + match (self.as_kind().de()?, kind) { + (HostKind::Multi, HostKind::Multi) => Ok(()), + } + } pub fn add_binding( &mut self, available_ports: &mut AvailablePorts, - kind: HostKind, - id: &HostId, internal_port: u16, options: BindOptions, ) -> Result<(), Error> { - self.upsert(id, || Host::new(kind))? - .as_bindings_mut() - .mutate(|b| { - let info = if let Some(info) = b.remove(&internal_port) { - info.update(available_ports, options)? - } else { - BindInfo::new(available_ports, options)? - }; - b.insert(internal_port, info); - Ok(()) - }) // TODO: handle host kind change + self.as_bindings_mut().mutate(|b| { + let info = if let Some(info) = b.remove(&internal_port) { + info.update(available_ports, options)? + } else { + BindInfo::new(available_ports, options)? + }; + b.insert(internal_port, info); + Ok(()) + }) } } diff --git a/core/startos/src/net/mod.rs b/core/startos/src/net/mod.rs index aaf019e66..e55da4206 100644 --- a/core/startos/src/net/mod.rs +++ b/core/startos/src/net/mod.rs @@ -1,4 +1,4 @@ -use rpc_toolkit::ParentHandler; +use rpc_toolkit::{Context, ParentHandler}; pub mod dhcp; pub mod dns; @@ -18,8 +18,8 @@ pub mod wifi; pub const PACKAGE_CERT_PATH: &str = "/var/lib/embassy/ssl"; -pub fn net() -> ParentHandler { +pub fn net() -> ParentHandler { ParentHandler::new() - .subcommand("tor", tor::tor()) - .subcommand("dhcp", dhcp::dhcp()) + .subcommand("tor", tor::tor::()) + .subcommand("dhcp", dhcp::dhcp::()) } diff --git a/core/startos/src/net/net_controller.rs b/core/startos/src/net/net_controller.rs index a4c9ea507..b7a8022b4 100644 --- a/core/startos/src/net/net_controller.rs +++ b/core/startos/src/net/net_controller.rs @@ -4,41 +4,39 @@ use std::sync::{Arc, Weak}; use color_eyre::eyre::eyre; use imbl::OrdMap; -use lazy_format::lazy_format; +use imbl_value::InternedString; use models::{HostId, OptionExt, PackageId}; -use patch_db::PatchDb; use torut::onion::{OnionAddressV3, TorSecretKeyV3}; use tracing::instrument; -use crate::db::prelude::PatchDbExt; +use crate::db::model::Database; use crate::error::ErrorCollection; use crate::hostname::Hostname; use crate::net::dns::DnsController; use crate::net::forward::LanPortForwardController; use crate::net::host::address::HostAddress; -use crate::net::host::binding::{AddSslOptions, BindOptions}; -use crate::net::host::{Host, HostKind}; +use crate::net::host::binding::{AddSslOptions, BindOptions, LanInfo}; +use crate::net::host::{host_for, Host, HostKind}; +use crate::net::service_interface::{HostnameInfo, IpHostname, OnionHostname}; use crate::net::tor::TorController; use crate::net::vhost::{AlpnInfo, VHostController}; +use crate::prelude::*; use crate::util::serde::MaybeUtf8String; -use crate::{Error, HOST_IP}; +use crate::HOST_IP; -pub struct NetController { - db: PatchDb, - pub(super) tor: TorController, - pub(super) vhost: VHostController, - pub(super) dns: DnsController, - pub(super) forward: LanPortForwardController, - pub(super) os_bindings: Vec>, +pub struct PreInitNetController { + pub db: TypedPatchDb, + tor: TorController, + vhost: VHostController, + os_bindings: Vec>, + server_hostnames: Vec>, } - -impl NetController { +impl PreInitNetController { #[instrument(skip_all)] pub async fn init( - db: PatchDb, + db: TypedPatchDb, tor_control: SocketAddr, tor_socks: SocketAddr, - dns_bind: &[SocketAddr], hostname: &Hostname, os_tor_key: TorSecretKeyV3, ) -> Result { @@ -46,9 +44,8 @@ impl NetController { db: db.clone(), tor: TorController::new(tor_control, tor_socks), vhost: VHostController::new(db), - dns: DnsController::init(dns_bind).await?, - forward: LanPortForwardController::new(), os_bindings: Vec::new(), + server_hostnames: Vec::new(), }; res.add_os_bindings(hostname, os_tor_key).await?; Ok(res) @@ -64,64 +61,34 @@ impl NetController { MaybeUtf8String("h2".into()), ])); - // Internal DNS - self.vhost - .add( - Some("embassy".into()), - 443, - ([127, 0, 0, 1], 80).into(), - alpn.clone(), - ) - .await?; - self.os_bindings - .push(self.dns.add(None, HOST_IP.into()).await?); + self.server_hostnames = vec![ + // LAN IP + None, + // Internal DNS + Some("embassy".into()), + Some("startos".into()), + // localhost + Some("localhost".into()), + Some(hostname.no_dot_host_name()), + // LAN mDNS + Some(hostname.local_domain_name()), + ]; - // LAN IP - self.os_bindings.push( - self.vhost - .add(None, 443, ([127, 0, 0, 1], 80).into(), alpn.clone()) - .await?, - ); - - // localhost - self.os_bindings.push( - self.vhost - .add( - Some("localhost".into()), - 443, - ([127, 0, 0, 1], 80).into(), - alpn.clone(), - ) - .await?, - ); - self.os_bindings.push( - self.vhost - .add( - Some(hostname.no_dot_host_name()), - 443, - ([127, 0, 0, 1], 80).into(), - alpn.clone(), - ) - .await?, - ); - - // LAN mDNS - self.os_bindings.push( - self.vhost - .add( - Some(hostname.local_domain_name()), - 443, - ([127, 0, 0, 1], 80).into(), - alpn.clone(), - ) - .await?, - ); + for hostname in self.server_hostnames.iter().cloned() { + self.os_bindings.push( + self.vhost + .add(hostname, 443, ([127, 0, 0, 1], 80).into(), alpn.clone()) + .await?, + ); + } // Tor self.os_bindings.push( self.vhost .add( - Some(tor_key.public().get_onion_address().to_string()), + Some(InternedString::from_display( + &tor_key.public().get_onion_address(), + )), 443, ([127, 0, 0, 1], 80).into(), alpn.clone(), @@ -142,6 +109,42 @@ impl NetController { Ok(()) } +} + +pub struct NetController { + db: TypedPatchDb, + pub(super) tor: TorController, + pub(super) vhost: VHostController, + pub(super) dns: DnsController, + pub(super) forward: LanPortForwardController, + pub(super) os_bindings: Vec>, + pub(super) server_hostnames: Vec>, +} + +impl NetController { + pub async fn init( + PreInitNetController { + db, + tor, + vhost, + os_bindings, + server_hostnames, + }: PreInitNetController, + dns_bind: &[SocketAddr], + ) -> Result { + let mut res = Self { + db, + tor, + vhost, + dns: DnsController::init(dns_bind).await?, + forward: LanPortForwardController::new(), + os_bindings, + server_hostnames, + }; + res.os_bindings + .push(res.dns.add(None, HOST_IP.into()).await?); + Ok(res) + } #[instrument(skip_all)] pub async fn create_service( @@ -162,9 +165,17 @@ impl NetController { } } -#[derive(Default)] +#[derive(Default, Debug)] struct HostBinds { - lan: BTreeMap, Arc<()>)>, + lan: BTreeMap< + u16, + ( + LanInfo, + Option, + BTreeSet, + Vec>, + ), + >, tor: BTreeMap, Vec>)>, } @@ -193,190 +204,316 @@ impl NetService { internal_port: u16, options: BindOptions, ) -> Result<(), Error> { - let id_ref = &id; + dbg!("bind", &kind, &id, internal_port, &options); let pkg_id = &self.id; let host = self .net_controller()? .db - .mutate(|d| { - let mut ports = d.as_private().as_available_ports().de()?; - let hosts = d - .as_public_mut() - .as_package_data_mut() - .as_idx_mut(pkg_id) - .or_not_found(pkg_id)? - .as_hosts_mut(); - hosts.add_binding(&mut ports, kind, &id, internal_port, options)?; - let host = hosts - .as_idx(&id) - .or_not_found(lazy_format!("Host {id_ref} for {pkg_id}"))? - .de()?; - d.as_private_mut().as_available_ports_mut().ser(&ports)?; + .mutate(|db| { + let mut ports = db.as_private().as_available_ports().de()?; + let host = host_for(db, pkg_id, &id, kind)?; + host.add_binding(&mut ports, internal_port, options)?; + let host = host.de()?; + db.as_private_mut().as_available_ports_mut().ser(&ports)?; Ok(host) }) .await?; self.update(id, host).await } - async fn update(&mut self, id: HostId, host: Host) -> Result<(), Error> { + pub async fn clear_bindings(&mut self) -> Result<(), Error> { let ctrl = self.net_controller()?; - let binds = { - if !self.binds.contains_key(&id) { - self.binds.insert(id.clone(), Default::default()); - } - self.binds.get_mut(&id).unwrap() - }; - if true - // TODO: if should listen lan - { - for (port, bind) in &host.bindings { - let old_lan_bind = binds.lan.remove(port); - let old_lan_port = old_lan_bind.as_ref().map(|(external, _, _)| *external); - let lan_bind = old_lan_bind.filter(|(external, ssl, _)| { - ssl == &bind.options.add_ssl - && bind.assigned_lan_port.as_ref() == Some(external) - }); // only keep existing binding if relevant details match - if let Some(external) = bind.assigned_lan_port { - let new_lan_bind = if let Some(b) = lan_bind { - b - } else { - if let Some(ssl) = &bind.options.add_ssl { - let rc = ctrl - .vhost - .add( - None, - external, - (self.ip, *port).into(), - if bind.options.secure.as_ref().map_or(false, |s| s.ssl) { - Ok(()) - } else { - Err(ssl.alpn.clone()) - }, - ) - .await?; - (*port, Some(ssl.clone()), rc) - } else { - let rc = ctrl.forward.add(external, (self.ip, *port).into()).await?; - (*port, None, rc) - } - }; - binds.lan.insert(*port, new_lan_bind); + let mut errors = ErrorCollection::new(); + for (_, binds) in std::mem::take(&mut self.binds) { + for (_, (lan, _, hostnames, rc)) in binds.lan { + drop(rc); + if let Some(external) = lan.assigned_ssl_port { + for hostname in ctrl.server_hostnames.iter().cloned() { + ctrl.vhost.gc(hostname, external).await?; + } + for hostname in hostnames { + ctrl.vhost.gc(Some(hostname), external).await?; + } } - if let Some(external) = old_lan_port { - ctrl.vhost.gc(None, external).await?; + if let Some(external) = lan.assigned_port { ctrl.forward.gc(external).await?; } } - let mut removed = BTreeSet::new(); - let mut removed_ssl = BTreeSet::new(); - binds.lan.retain(|internal, (external, ssl, _)| { - if host.bindings.contains_key(internal) { - true - } else { - if ssl.is_some() { - removed_ssl.insert(*external); - } else { - removed.insert(*external); - } - false - } - }); - for external in removed { - ctrl.forward.gc(external).await?; - } - for external in removed_ssl { - ctrl.vhost.gc(None, external).await?; + for (addr, (_, rcs)) in binds.tor { + drop(rcs); + errors.handle(ctrl.tor.gc(Some(addr), None).await); } } - let tor_binds: OrdMap = host - .bindings - .iter() - .flat_map(|(internal, info)| { - let non_ssl = ( - info.options.preferred_external_port, - SocketAddr::from((self.ip, *internal)), - ); - if let (Some(ssl), Some(ssl_internal)) = - (&info.options.add_ssl, info.assigned_lan_port) - { - itertools::Either::Left( - [ - ( - ssl.preferred_external_port, - SocketAddr::from(([127, 0, 0, 1], ssl_internal)), - ), - non_ssl, - ] - .into_iter(), - ) + std::mem::take(&mut self.dns); + errors.handle(ctrl.dns.gc(Some(self.id.clone()), self.ip).await); + errors.into_result() + } + + async fn update(&mut self, id: HostId, host: Host) -> Result<(), Error> { + let ctrl = self.net_controller()?; + let mut hostname_info = BTreeMap::new(); + let binds = self.binds.entry(id.clone()).or_default(); + + let peek = ctrl.db.peek().await; + + // LAN + let server_info = peek.as_public().as_server_info(); + let ip_info = server_info.as_ip_info().de()?; + let hostname = server_info.as_hostname().de()?; + for (port, bind) in &host.bindings { + let old_lan_bind = binds.lan.remove(port); + let lan_bind = old_lan_bind + .as_ref() + .filter(|(external, ssl, _, _)| { + ssl == &bind.options.add_ssl && bind.lan == *external + }) + .cloned(); // only keep existing binding if relevant details match + if bind.lan.assigned_port.is_some() || bind.lan.assigned_ssl_port.is_some() { + let new_lan_bind = if let Some(b) = lan_bind { + b } else { - itertools::Either::Right([non_ssl].into_iter()) + let mut rcs = Vec::with_capacity(2 + host.addresses.len()); + let mut hostnames = BTreeSet::new(); + if let Some(ssl) = &bind.options.add_ssl { + let external = bind + .lan + .assigned_ssl_port + .or_not_found("assigned ssl port")?; + let target = (self.ip, *port).into(); + let connect_ssl = if let Some(alpn) = ssl.alpn.clone() { + Err(alpn) + } else { + if bind.options.secure.as_ref().map_or(false, |s| s.ssl) { + Ok(()) + } else { + Err(AlpnInfo::Reflect) + } + }; + for hostname in ctrl.server_hostnames.iter().cloned() { + rcs.push( + ctrl.vhost + .add(hostname, external, target, connect_ssl.clone()) + .await?, + ); + } + for address in host.addresses() { + match address { + HostAddress::Onion { address } => { + let hostname = InternedString::from_display(address); + if hostnames.insert(hostname.clone()) { + rcs.push( + ctrl.vhost + .add( + Some(hostname), + external, + target, + connect_ssl.clone(), + ) + .await?, + ); + } + } + HostAddress::Domain { address } => { + if hostnames.insert(address.clone()) { + rcs.push( + ctrl.vhost + .add( + Some(address.clone()), + external, + target, + connect_ssl.clone(), + ) + .await?, + ); + } + } + } + } + } + if let Some(security) = bind.options.secure { + if bind.options.add_ssl.is_some() && security.ssl { + // doesn't make sense to have 2 listening ports, both with ssl + } else { + let external = + bind.lan.assigned_port.or_not_found("assigned lan port")?; + rcs.push(ctrl.forward.add(external, (self.ip, *port).into()).await?); + } + } + (bind.lan, bind.options.add_ssl.clone(), hostnames, rcs) + }; + let mut bind_hostname_info: Vec = + hostname_info.remove(port).unwrap_or_default(); + for (interface, ip_info) in &ip_info { + bind_hostname_info.push(HostnameInfo::Ip { + network_interface_id: interface.clone(), + public: false, + hostname: IpHostname::Local { + value: format!("{hostname}.local"), + port: new_lan_bind.0.assigned_port, + ssl_port: new_lan_bind.0.assigned_ssl_port, + }, + }); + if let Some(ipv4) = ip_info.ipv4 { + bind_hostname_info.push(HostnameInfo::Ip { + network_interface_id: interface.clone(), + public: false, + hostname: IpHostname::Ipv4 { + value: ipv4, + port: new_lan_bind.0.assigned_port, + ssl_port: new_lan_bind.0.assigned_ssl_port, + }, + }); + } + if let Some(ipv6) = ip_info.ipv6 { + bind_hostname_info.push(HostnameInfo::Ip { + network_interface_id: interface.clone(), + public: false, + hostname: IpHostname::Ipv6 { + value: ipv6, + port: new_lan_bind.0.assigned_port, + ssl_port: new_lan_bind.0.assigned_ssl_port, + }, + }); + } } - }) - .collect(); - let mut keep_tor_addrs = BTreeSet::new(); - for addr in match host.kind { - HostKind::Multi => { - // itertools::Either::Left( - host.addresses.iter() - // ) - } // HostKind::Single | HostKind::Static => itertools::Either::Right(&host.primary), - } { - match addr { - HostAddress::Onion { address } => { - keep_tor_addrs.insert(address); - let old_tor_bind = binds.tor.remove(address); - let tor_bind = old_tor_bind.filter(|(ports, _)| ports == &tor_binds); - let new_tor_bind = if let Some(tor_bind) = tor_bind { - tor_bind - } else { - let key = ctrl - .db - .peek() - .await - .into_private() - .into_key_store() - .into_onion() - .get_key(address)?; - let rcs = ctrl - .tor - .add(key, tor_binds.clone().into_iter().collect()) - .await?; - (tor_binds.clone(), rcs) - }; - binds.tor.insert(address.clone(), new_tor_bind); + hostname_info.insert(*port, bind_hostname_info); + binds.lan.insert(*port, new_lan_bind); + } + if let Some((lan, _, hostnames, _)) = old_lan_bind { + if let Some(external) = lan.assigned_ssl_port { + for hostname in ctrl.server_hostnames.iter().cloned() { + ctrl.vhost.gc(hostname, external).await?; + } + for hostname in hostnames { + ctrl.vhost.gc(Some(hostname), external).await?; + } + } + if let Some(external) = lan.assigned_port { + ctrl.forward.gc(external).await?; } } } + let mut removed = BTreeSet::new(); + binds.lan.retain(|internal, (external, _, hostnames, _)| { + if host.bindings.contains_key(internal) { + true + } else { + removed.insert((*external, std::mem::take(hostnames))); + + false + } + }); + for (lan, hostnames) in removed { + if let Some(external) = lan.assigned_ssl_port { + for hostname in ctrl.server_hostnames.iter().cloned() { + ctrl.vhost.gc(hostname, external).await?; + } + for hostname in hostnames { + ctrl.vhost.gc(Some(hostname), external).await?; + } + } + if let Some(external) = lan.assigned_port { + ctrl.forward.gc(external).await?; + } + } + + struct TorHostnamePorts { + non_ssl: Option, + ssl: Option, + } + let mut tor_hostname_ports = BTreeMap::::new(); + let mut tor_binds = OrdMap::::new(); + for (internal, info) in &host.bindings { + tor_binds.insert( + info.options.preferred_external_port, + SocketAddr::from((self.ip, *internal)), + ); + if let (Some(ssl), Some(ssl_internal)) = + (&info.options.add_ssl, info.lan.assigned_ssl_port) + { + tor_binds.insert( + ssl.preferred_external_port, + SocketAddr::from(([127, 0, 0, 1], ssl_internal)), + ); + tor_hostname_ports.insert( + *internal, + TorHostnamePorts { + non_ssl: Some(info.options.preferred_external_port) + .filter(|p| *p != ssl.preferred_external_port), + ssl: Some(ssl.preferred_external_port), + }, + ); + } else { + tor_hostname_ports.insert( + *internal, + TorHostnamePorts { + non_ssl: Some(info.options.preferred_external_port), + ssl: None, + }, + ); + } + } + + let mut keep_tor_addrs = BTreeSet::new(); + for tor_addr in host.addresses().filter_map(|a| { + if let HostAddress::Onion { address } = a { + Some(address) + } else { + None + } + }) { + keep_tor_addrs.insert(tor_addr); + let old_tor_bind = binds.tor.remove(tor_addr); + let tor_bind = old_tor_bind.filter(|(ports, _)| ports == &tor_binds); + let new_tor_bind = if let Some(tor_bind) = tor_bind { + tor_bind + } else { + let key = peek + .as_private() + .as_key_store() + .as_onion() + .get_key(tor_addr)?; + let rcs = ctrl + .tor + .add(key, tor_binds.clone().into_iter().collect()) + .await?; + (tor_binds.clone(), rcs) + }; + for (internal, ports) in &tor_hostname_ports { + let mut bind_hostname_info = hostname_info.remove(internal).unwrap_or_default(); + bind_hostname_info.push(HostnameInfo::Onion { + hostname: OnionHostname { + value: tor_addr.to_string(), + port: ports.non_ssl, + ssl_port: ports.ssl, + }, + }); + hostname_info.insert(*internal, bind_hostname_info); + } + binds.tor.insert(tor_addr.clone(), new_tor_bind); + } for addr in binds.tor.keys() { if !keep_tor_addrs.contains(addr) { ctrl.tor.gc(Some(addr.clone()), None).await?; } } + self.net_controller()? + .db + .mutate(|db| { + host_for(db, &self.id, &id, host.kind)? + .as_hostname_info_mut() + .ser(&hostname_info) + }) + .await?; Ok(()) } pub async fn remove_all(mut self) -> Result<(), Error> { self.shutdown = true; - let mut errors = ErrorCollection::new(); if let Some(ctrl) = Weak::upgrade(&self.controller) { - for (_, binds) in std::mem::take(&mut self.binds) { - for (_, (external, ssl, rc)) in binds.lan { - drop(rc); - if ssl.is_some() { - errors.handle(ctrl.vhost.gc(None, external).await); - } else { - errors.handle(ctrl.forward.gc(external).await); - } - } - for (addr, (_, rcs)) in binds.tor { - drop(rcs); - errors.handle(ctrl.tor.gc(Some(addr), None).await); - } - } - std::mem::take(&mut self.dns); - errors.handle(ctrl.dns.gc(Some(self.id.clone()), self.ip).await); - errors.into_result() + self.clear_bindings().await?; + drop(ctrl); + Ok(()) } else { tracing::warn!("NetService dropped after NetController is shutdown"); Err(Error::new( @@ -387,7 +524,30 @@ impl NetService { } pub fn get_ip(&self) -> Ipv4Addr { - self.ip.to_owned() + self.ip + } + + pub fn get_lan_port(&self, host_id: HostId, internal_port: u16) -> Result { + let host_id_binds = self.binds.get_key_value(&host_id); + match host_id_binds { + Some((_, binds)) => { + if let Some((lan, _, _, _)) = binds.lan.get(&internal_port) { + Ok(*lan) + } else { + Err(Error::new( + eyre!( + "Internal Port {} not found in NetService binds", + internal_port + ), + crate::ErrorKind::NotFound, + )) + } + } + None => Err(Error::new( + eyre!("HostID {} not found in NetService binds", host_id), + crate::ErrorKind::NotFound, + )), + } } } diff --git a/core/startos/src/net/refresher.html b/core/startos/src/net/refresher.html new file mode 100644 index 000000000..445c6b5be --- /dev/null +++ b/core/startos/src/net/refresher.html @@ -0,0 +1,11 @@ + + + StartOS: Loading... + + + + Loading... + + \ No newline at end of file diff --git a/core/startos/src/net/service_interface.rs b/core/startos/src/net/service_interface.rs index 9a4659cfd..dbe228ef2 100644 --- a/core/startos/src/net/service_interface.rs +++ b/core/startos/src/net/service_interface.rs @@ -1,51 +1,30 @@ use std::net::{Ipv4Addr, Ipv6Addr}; +use imbl_value::InternedString; use models::{HostId, ServiceInterfaceId}; use serde::{Deserialize, Serialize}; use ts_rs::TS; -use crate::net::host::binding::BindOptions; -use crate::net::host::HostKind; -use crate::prelude::*; - -#[derive(Clone, Debug, Deserialize, Serialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -pub struct ServiceInterfaceWithHostInfo { - #[serde(flatten)] - pub service_interface: ServiceInterface, - pub host_info: ExportedHostInfo, -} - -#[derive(Clone, Debug, Deserialize, Serialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -pub struct ExportedHostInfo { - pub id: HostId, - pub kind: HostKind, - pub hostnames: Vec, -} - #[derive(Clone, Debug, Deserialize, Serialize, TS)] #[ts(export)] #[serde(rename_all = "camelCase")] #[serde(rename_all_fields = "camelCase")] #[serde(tag = "kind")] -pub enum ExportedHostnameInfo { +pub enum HostnameInfo { Ip { network_interface_id: String, public: bool, - hostname: ExportedIpHostname, + hostname: IpHostname, }, Onion { - hostname: ExportedOnionHostname, + hostname: OnionHostname, }, } #[derive(Clone, Debug, Deserialize, Serialize, TS)] #[ts(export)] #[serde(rename_all = "camelCase")] -pub struct ExportedOnionHostname { +pub struct OnionHostname { pub value: String, pub port: Option, pub ssl_port: Option, @@ -56,7 +35,7 @@ pub struct ExportedOnionHostname { #[serde(rename_all = "camelCase")] #[serde(rename_all_fields = "camelCase")] #[serde(tag = "kind")] -pub enum ExportedIpHostname { +pub enum IpHostname { Ipv4 { value: Ipv4Addr, port: Option, @@ -110,6 +89,10 @@ pub enum ServiceInterfaceType { pub struct AddressInfo { pub username: Option, pub host_id: HostId, - pub bind_options: BindOptions, + pub internal_port: u16, + #[ts(type = "string | null")] + pub scheme: Option, + #[ts(type = "string | null")] + pub ssl_scheme: Option, pub suffix: String, } diff --git a/core/startos/src/net/ssl.rs b/core/startos/src/net/ssl.rs index 245881c55..29bcd9652 100644 --- a/core/startos/src/net/ssl.rs +++ b/core/startos/src/net/ssl.rs @@ -1,13 +1,13 @@ -use std::cmp::Ordering; +use std::cmp::{min, Ordering}; use std::collections::{BTreeMap, BTreeSet}; use std::net::IpAddr; use std::path::Path; -use std::time::{SystemTime, UNIX_EPOCH}; +use std::time::{Duration, SystemTime, UNIX_EPOCH}; use futures::FutureExt; use imbl_value::InternedString; use libc::time_t; -use openssl::asn1::{Asn1Integer, Asn1Time}; +use openssl::asn1::{Asn1Integer, Asn1Time, Asn1TimeRef}; use openssl::bn::{BigNum, MsbOption}; use openssl::ec::{EcGroup, EcKey}; use openssl::hash::MessageDigest; @@ -17,7 +17,7 @@ use openssl::x509::{X509Builder, X509Extension, X509NameBuilder, X509}; use openssl::*; use patch_db::HasModel; use serde::{Deserialize, Serialize}; -use tokio::sync::Mutex; +use tokio::time::Instant; use tracing::instrument; use crate::account::AccountInfo; @@ -127,12 +127,18 @@ impl Model { } } -#[derive(Debug, Clone, Deserialize, Serialize)] +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)] pub struct CertData { pub keys: PKeyPair, pub certs: CertPair, } +impl CertData { + pub fn expiration(&self) -> Result { + self.certs.expiration() + } +} +#[derive(Debug, Clone, PartialEq, Eq)] pub struct FullchainCertData { pub root: X509, pub int: X509, @@ -145,6 +151,16 @@ impl FullchainCertData { pub fn fullchain_nistp256(&self) -> Vec<&X509> { vec![&self.leaf.certs.nistp256, &self.int, &self.root] } + pub fn expiration(&self) -> Result { + [ + asn1_time_to_system_time(self.root.not_after())?, + asn1_time_to_system_time(self.int.not_after())?, + self.leaf.expiration()?, + ] + .into_iter() + .min() + .ok_or_else(|| Error::new(eyre!("unreachable"), ErrorKind::Unknown)) + } } static CERTIFICATE_VERSION: i32 = 2; // X509 version 3 is actually encoded as '2' in the cert because fuck you. @@ -156,6 +172,26 @@ fn unix_time(time: SystemTime) -> time_t { .unwrap_or_default() } +lazy_static::lazy_static! { + static ref ASN1_UNIX_EPOCH: Asn1Time = Asn1Time::from_unix(0).unwrap(); +} + +fn asn1_time_to_system_time(time: &Asn1TimeRef) -> Result { + let diff = time.diff(&**ASN1_UNIX_EPOCH)?; + let mut res = UNIX_EPOCH; + if diff.days >= 0 { + res += Duration::from_secs(diff.days as u64 * 86400); + } else { + res -= Duration::from_secs((-1 * diff.days) as u64 * 86400); + } + if diff.secs >= 0 { + res += Duration::from_secs(diff.secs as u64); + } else { + res -= Duration::from_secs((-1 * diff.secs) as u64); + } + Ok(res) +} + #[derive(Debug, Clone, Deserialize, Serialize)] pub struct PKeyPair { #[serde(with = "crate::util::serde::pem")] @@ -163,6 +199,12 @@ pub struct PKeyPair { #[serde(with = "crate::util::serde::pem")] pub nistp256: PKey, } +impl PartialEq for PKeyPair { + fn eq(&self, other: &Self) -> bool { + self.ed25519.public_eq(&other.ed25519) && self.nistp256.public_eq(&other.nistp256) + } +} +impl Eq for PKeyPair {} #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize)] pub struct CertPair { @@ -171,6 +213,14 @@ pub struct CertPair { #[serde(with = "crate::util::serde::pem")] pub nistp256: X509, } +impl CertPair { + pub fn expiration(&self) -> Result { + Ok(min( + asn1_time_to_system_time(self.ed25519.not_after())?, + asn1_time_to_system_time(self.nistp256.not_after())?, + )) + } +} pub async fn root_ca_start_time() -> Result { Ok(if check_time_is_synchronized().await? { diff --git a/core/startos/src/net/static_server.rs b/core/startos/src/net/static_server.rs index fa71672b3..f1da91851 100644 --- a/core/startos/src/net/static_server.rs +++ b/core/startos/src/net/static_server.rs @@ -1,6 +1,8 @@ -use std::fs::Metadata; +use std::cmp::min; use std::future::Future; +use std::io::Cursor; use std::path::{Path, PathBuf}; +use std::sync::Arc; use std::time::UNIX_EPOCH; use async_compression::tokio::bufread::GzipEncoder; @@ -9,33 +11,50 @@ use axum::extract::{self as x, Request}; use axum::response::Response; use axum::routing::{any, get, post}; use axum::Router; +use base64::display::Base64Display; use digest::Digest; use futures::future::ready; -use http::header::ACCEPT_ENCODING; +use http::header::{ + ACCEPT_ENCODING, ACCEPT_RANGES, CACHE_CONTROL, CONNECTION, CONTENT_ENCODING, CONTENT_LENGTH, + CONTENT_RANGE, CONTENT_TYPE, ETAG, RANGE, +}; use http::request::Parts as RequestParts; -use http::{HeaderMap, Method, StatusCode}; +use http::{HeaderValue, Method, StatusCode}; +use imbl_value::InternedString; use include_dir::Dir; use new_mime_guess::MimeGuess; use openssl::hash::MessageDigest; use openssl::x509::X509; -use rpc_toolkit::Server; -use tokio::fs::File; -use tokio::io::BufReader; +use rpc_toolkit::{Context, HttpServer, Server}; +use sqlx::query; +use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeekExt, BufReader}; use tokio_util::io::ReaderStream; +use url::Url; -use crate::context::{DiagnosticContext, InstallContext, RpcContext, SetupContext}; -use crate::core::rpc_continuations::RequestGuid; -use crate::db::subscribe; +use crate::context::{DiagnosticContext, InitContext, InstallContext, RpcContext, SetupContext}; use crate::hostname::Hostname; +use crate::install::PKG_ARCHIVE_DIR; use crate::middleware::auth::{Auth, HasValidSession}; use crate::middleware::cors::Cors; use crate::middleware::db::SyncDb; -use crate::middleware::diagnostic::DiagnosticMode; -use crate::{diagnostic_api, install_api, main_api, setup_api, Error, ErrorKind, ResultExt}; +use crate::prelude::*; +use crate::registry::signer::commitment::merkle_archive::MerkleArchiveCommitment; +use crate::rpc_continuations::{Guid, RpcContinuations}; +use crate::s9pk::merkle_archive::source::http::HttpSource; +use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile; +use crate::s9pk::merkle_archive::source::FileSource; +use crate::s9pk::S9pk; +use crate::util::io::open_file; +use crate::util::net::SyncBody; +use crate::util::serde::BASE64; +use crate::{diagnostic_api, init_api, install_api, main_api, setup_api}; const NOT_FOUND: &[u8] = b"Not Found"; const METHOD_NOT_ALLOWED: &[u8] = b"Method Not Allowed"; const NOT_AUTHORIZED: &[u8] = b"Not Authorized"; +const INTERNAL_SERVER_ERROR: &[u8] = b"Internal Server Error"; + +const PROXY_STRIP_HEADERS: &[&str] = &["cookie", "host", "origin", "referer", "user-agent"]; #[cfg(all(feature = "daemon", not(feature = "test")))] const EMBEDDED_UIS: Dir<'_> = @@ -43,12 +62,9 @@ const EMBEDDED_UIS: Dir<'_> = #[cfg(not(all(feature = "daemon", not(feature = "test"))))] const EMBEDDED_UIS: Dir<'_> = Dir::new("", &[]); -const PROXY_STRIP_HEADERS: &[&str] = &["cookie", "host", "origin", "referer", "user-agent"]; - #[derive(Clone)] pub enum UiMode { Setup, - Diag, Install, Main, } @@ -57,131 +73,49 @@ impl UiMode { fn path(&self, path: &str) -> PathBuf { match self { Self::Setup => Path::new("setup-wizard").join(path), - Self::Diag => Path::new("diagnostic-ui").join(path), Self::Install => Path::new("install-wizard").join(path), Self::Main => Path::new("ui").join(path), } } } -pub fn setup_ui_file_router(ctx: SetupContext) -> Router { - Router::new() - .route_service( - "/rpc/*path", - post(Server::new(move || ready(Ok(ctx.clone())), setup_api()).middleware(Cors::new())), - ) - .fallback(any(|request: Request| async move { - alt_ui(request, UiMode::Setup) - .await - .unwrap_or_else(server_error) - })) -} - -pub fn diag_ui_file_router(ctx: DiagnosticContext) -> Router { +pub fn rpc_router>( + ctx: C, + server: HttpServer, +) -> Router { Router::new() + .route("/rpc/*path", post(server)) .route( - "/rpc/*path", - post( - Server::new(move || ready(Ok(ctx.clone())), diagnostic_api()) - .middleware(Cors::new()) - .middleware(DiagnosticMode::new()), - ), - ) - .fallback(any(|request: Request| async move { - alt_ui(request, UiMode::Diag) - .await - .unwrap_or_else(server_error) - })) -} - -pub fn install_ui_file_router(ctx: InstallContext) -> Router { - Router::new() - .route("/rpc/*path", { - let ctx = ctx.clone(); - post(Server::new(move || ready(Ok(ctx.clone())), install_api()).middleware(Cors::new())) - }) - .fallback(any(|request: Request| async move { - alt_ui(request, UiMode::Install) - .await - .unwrap_or_else(server_error) - })) -} - -pub fn main_ui_server_router(ctx: RpcContext) -> Router { - Router::new() - .route("/rpc/*path", { - let ctx = ctx.clone(); - post( - Server::new(move || ready(Ok(ctx.clone())), main_api()) - .middleware(Cors::new()) - .middleware(Auth::new()) - .middleware(SyncDb::new()), - ) - }) - .route( - "/ws/db", - any({ - let ctx = ctx.clone(); - move |headers: HeaderMap, ws: x::WebSocketUpgrade| async move { - subscribe(ctx, headers, ws) - .await - .unwrap_or_else(server_error) - } - }), - ) - .route( - "/ws/rpc/*path", + "/ws/rpc/:guid", get({ let ctx = ctx.clone(); - move |x::Path(path): x::Path, + move |x::Path(guid): x::Path, ws: axum::extract::ws::WebSocketUpgrade| async move { - match RequestGuid::from(&path) { - None => { - tracing::debug!("No Guid Path"); - bad_request() - } - Some(guid) => match ctx.get_ws_continuation_handler(&guid).await { - Some(cont) => ws.on_upgrade(cont), - _ => not_found(), - }, + match AsRef::::as_ref(&ctx).get_ws_handler(&guid).await { + Some(cont) => ws.on_upgrade(cont), + _ => not_found(), } } }), ) .route( - "/rest/rpc/*path", + "/rest/rpc/:guid", any({ let ctx = ctx.clone(); - move |request: x::Request| async move { - let path = request - .uri() - .path() - .strip_prefix("/rest/rpc/") - .unwrap_or_default(); - match RequestGuid::from(&path) { - None => { - tracing::debug!("No Guid Path"); - bad_request() - } - Some(guid) => match ctx.get_rest_continuation_handler(&guid).await { - None => not_found(), - Some(cont) => cont(request).await.unwrap_or_else(server_error), - }, + move |x::Path(guid): x::Path, request: x::Request| async move { + match AsRef::::as_ref(&ctx).get_rest_handler(&guid).await { + None => not_found(), + Some(cont) => cont(request).await.unwrap_or_else(server_error), } } }), ) - .fallback(any(move |request: Request| async move { - main_start_os_ui(request, ctx) - .await - .unwrap_or_else(server_error) - })) } -async fn alt_ui(req: Request, ui_mode: UiMode) -> Result { +fn serve_ui(req: Request, ui_mode: UiMode) -> Result { let (request_parts, _body) = req.into_parts(); match &request_parts.method { - &Method::GET => { + &Method::GET | &Method::HEAD => { let uri_path = ui_mode.path( request_parts .uri @@ -195,9 +129,7 @@ async fn alt_ui(req: Request, ui_mode: UiMode) -> Result { .or_else(|| EMBEDDED_UIS.get_file(&*ui_mode.path("index.html"))); if let Some(file) = file { - FileData::from_embedded(&request_parts, file) - .into_response(&request_parts) - .await + FileData::from_embedded(&request_parts, file)?.into_response(&request_parts) } else { Ok(not_found()) } @@ -206,116 +138,231 @@ async fn alt_ui(req: Request, ui_mode: UiMode) -> Result { } } +pub fn setup_ui_router(ctx: SetupContext) -> Router { + rpc_router( + ctx.clone(), + Server::new(move || ready(Ok(ctx.clone())), setup_api()).middleware(Cors::new()), + ) + .fallback(any(|request: Request| async move { + serve_ui(request, UiMode::Setup).unwrap_or_else(server_error) + })) +} + +pub fn diagnostic_ui_router(ctx: DiagnosticContext) -> Router { + rpc_router( + ctx.clone(), + Server::new(move || ready(Ok(ctx.clone())), diagnostic_api()).middleware(Cors::new()), + ) + .fallback(any(|request: Request| async move { + serve_ui(request, UiMode::Main).unwrap_or_else(server_error) + })) +} + +pub fn install_ui_router(ctx: InstallContext) -> Router { + rpc_router( + ctx.clone(), + Server::new(move || ready(Ok(ctx.clone())), install_api()).middleware(Cors::new()), + ) + .fallback(any(|request: Request| async move { + serve_ui(request, UiMode::Install).unwrap_or_else(server_error) + })) +} + +pub fn init_ui_router(ctx: InitContext) -> Router { + rpc_router( + ctx.clone(), + Server::new(move || ready(Ok(ctx.clone())), init_api()).middleware(Cors::new()), + ) + .fallback(any(|request: Request| async move { + serve_ui(request, UiMode::Main).unwrap_or_else(server_error) + })) +} + +pub fn main_ui_router(ctx: RpcContext) -> Router { + rpc_router(ctx.clone(), { + let ctx = ctx.clone(); + Server::new(move || ready(Ok(ctx.clone())), main_api::()) + .middleware(Cors::new()) + .middleware(Auth::new()) + .middleware(SyncDb::new()) + }) + .route("/proxy/:url", { + let ctx = ctx.clone(); + any(move |x::Path(url): x::Path, request: Request| { + let ctx = ctx.clone(); + async move { + proxy_request(ctx, request, url) + .await + .unwrap_or_else(server_error) + } + }) + }) + .nest("/s9pk", s9pk_router(ctx.clone())) + .route( + "/static/local-root-ca.crt", + get(move || { + let ctx = ctx.clone(); + async move { + let account = ctx.account.read().await; + cert_send(&account.root_ca_cert, &account.hostname) + } + }), + ) + .fallback(any(|request: Request| async move { + serve_ui(request, UiMode::Main).unwrap_or_else(server_error) + })) +} + +pub fn refresher() -> Router { + Router::new().fallback(get(|request: Request| async move { + let res = include_bytes!("./refresher.html"); + FileData { + data: Body::from(&res[..]), + content_range: None, + e_tag: None, + encoding: None, + len: Some(res.len() as u64), + mime: Some("text/html".into()), + digest: None, + } + .into_response(&request.into_parts().0) + .unwrap_or_else(server_error) + })) +} + +async fn proxy_request(ctx: RpcContext, request: Request, url: String) -> Result { + if_authorized(&ctx, request, |mut request| async { + for header in PROXY_STRIP_HEADERS { + request.headers_mut().remove(*header); + } + *request.uri_mut() = url.parse()?; + let request = request.map(|b| reqwest::Body::wrap_stream(SyncBody::from(b))); + let response = ctx.client.execute(request.try_into()?).await?; + Ok(Response::from(response).map(|b| Body::new(b))) + }) + .await +} + +fn s9pk_router(ctx: RpcContext) -> Router { + Router::new() + .route("/installed/:s9pk", { + let ctx = ctx.clone(); + any( + |x::Path(s9pk): x::Path, request: Request| async move { + if_authorized(&ctx, request, |request| async { + let (parts, _) = request.into_parts(); + match FileData::from_path( + &parts, + &ctx.datadir + .join(PKG_ARCHIVE_DIR) + .join("installed") + .join(s9pk), + ) + .await? + { + Some(file) => file.into_response(&parts), + None => Ok(not_found()), + } + }) + .await + .unwrap_or_else(server_error) + }, + ) + }) + .route("/installed/:s9pk/*path", { + let ctx = ctx.clone(); + any( + |x::Path((s9pk, path)): x::Path<(String, PathBuf)>, + x::RawQuery(query): x::RawQuery, + request: Request| async move { + if_authorized(&ctx, request, |request| async { + let s9pk = S9pk::deserialize( + &MultiCursorFile::from( + open_file( + ctx.datadir + .join(PKG_ARCHIVE_DIR) + .join("installed") + .join(s9pk), + ) + .await?, + ), + query + .as_deref() + .map(MerkleArchiveCommitment::from_query) + .and_then(|a| a.transpose()) + .transpose()? + .as_ref(), + ) + .await?; + let (parts, _) = request.into_parts(); + match FileData::from_s9pk(&parts, &s9pk, &path).await? { + Some(file) => file.into_response(&parts), + None => Ok(not_found()), + } + }) + .await + .unwrap_or_else(server_error) + }, + ) + }) + .route( + "/proxy/:url/*path", + any( + |x::Path((url, path)): x::Path<(Url, PathBuf)>, + x::RawQuery(query): x::RawQuery, + request: Request| async move { + if_authorized(&ctx, request, |request| async { + let s9pk = S9pk::deserialize( + &Arc::new(HttpSource::new(ctx.client.clone(), url).await?), + query + .as_deref() + .map(MerkleArchiveCommitment::from_query) + .and_then(|a| a.transpose()) + .transpose()? + .as_ref(), + ) + .await?; + let (parts, _) = request.into_parts(); + match FileData::from_s9pk(&parts, &s9pk, &path).await? { + Some(file) => file.into_response(&parts), + None => Ok(not_found()), + } + }) + .await + .unwrap_or_else(server_error) + }, + ), + ) +} + async fn if_authorized< - F: FnOnce() -> Fut, - Fut: Future> + Send + Sync, + F: FnOnce(Request) -> Fut, + Fut: Future> + Send, >( ctx: &RpcContext, - parts: &RequestParts, + request: Request, f: F, ) -> Result { - if let Err(e) = HasValidSession::from_header(parts.headers.get(http::header::COOKIE), ctx).await + if let Err(e) = + HasValidSession::from_header(request.headers().get(http::header::COOKIE), ctx).await { - un_authorized(e, parts.uri.path()) + Ok(unauthorized(e, request.uri().path())) } else { - f().await + f(request).await } } -async fn main_start_os_ui(req: Request, ctx: RpcContext) -> Result { - let (request_parts, _body) = req.into_parts(); - match ( - &request_parts.method, - request_parts - .uri - .path() - .strip_prefix('/') - .unwrap_or(request_parts.uri.path()) - .split_once('/'), - ) { - (&Method::GET, Some(("public", path))) => { - todo!("pull directly from s9pk") - } - (&Method::GET, Some(("proxy", target))) => { - if_authorized(&ctx, &request_parts, || async { - let target = urlencoding::decode(target)?; - let res = ctx - .client - .get(target.as_ref()) - .headers( - request_parts - .headers - .iter() - .filter(|(h, _)| { - !PROXY_STRIP_HEADERS - .iter() - .any(|bad| h.as_str().eq_ignore_ascii_case(bad)) - }) - .flat_map(|(h, v)| { - Some(( - reqwest::header::HeaderName::from_lowercase( - h.as_str().as_bytes(), - ) - .ok()?, - reqwest::header::HeaderValue::from_bytes(v.as_bytes()).ok()?, - )) - }) - .collect(), - ) - .send() - .await - .with_kind(crate::ErrorKind::Network)?; - let mut hres = Response::builder().status(res.status().as_u16()); - for (h, v) in res.headers().clone() { - if let Some(h) = h { - hres = hres.header(h.to_string(), v.as_bytes()); - } - } - hres.body(Body::from_stream(res.bytes_stream())) - .with_kind(crate::ErrorKind::Network) - }) - .await - } - (&Method::GET, Some(("eos", "local.crt"))) => { - let account = ctx.account.read().await; - cert_send(&account.root_ca_cert, &account.hostname) - } - (&Method::GET, _) => { - let uri_path = UiMode::Main.path( - request_parts - .uri - .path() - .strip_prefix('/') - .unwrap_or(request_parts.uri.path()), - ); - - let file = EMBEDDED_UIS - .get_file(&*uri_path) - .or_else(|| EMBEDDED_UIS.get_file(&*UiMode::Main.path("index.html"))); - - if let Some(file) = file { - FileData::from_embedded(&request_parts, file) - .into_response(&request_parts) - .await - } else { - Ok(not_found()) - } - } - _ => Ok(method_not_allowed()), - } -} - -fn un_authorized(err: Error, path: &str) -> Result { +pub fn unauthorized(err: Error, path: &str) -> Response { tracing::warn!("unauthorized for {} @{:?}", err, path); tracing::debug!("{:?}", err); - Ok(Response::builder() + Response::builder() .status(StatusCode::UNAUTHORIZED) .body(NOT_AUTHORIZED.into()) - .unwrap()) + .unwrap() } /// HTTP status code 404 -fn not_found() -> Response { +pub fn not_found() -> Response { Response::builder() .status(StatusCode::NOT_FOUND) .body(NOT_FOUND.into()) @@ -323,21 +370,23 @@ fn not_found() -> Response { } /// HTTP status code 405 -fn method_not_allowed() -> Response { +pub fn method_not_allowed() -> Response { Response::builder() .status(StatusCode::METHOD_NOT_ALLOWED) .body(METHOD_NOT_ALLOWED.into()) .unwrap() } -fn server_error(err: Error) -> Response { +pub fn server_error(err: Error) -> Response { + tracing::error!("internal server error: {}", err); + tracing::debug!("{:?}", err); Response::builder() .status(StatusCode::INTERNAL_SERVER_ERROR) - .body(err.to_string().into()) + .body(INTERNAL_SERVER_ERROR.into()) .unwrap() } -fn bad_request() -> Response { +pub fn bad_request() -> Response { Response::builder() .status(StatusCode::BAD_REQUEST) .body(Body::empty()) @@ -351,7 +400,7 @@ fn cert_send(cert: &X509, hostname: &Hostname) -> Result { .header( http::header::ETAG, base32::encode( - base32::Alphabet::RFC4648 { padding: false }, + base32::Alphabet::Rfc4648 { padding: false }, &*cert.digest(MessageDigest::sha256())?, ) .to_lowercase(), @@ -366,53 +415,156 @@ fn cert_send(cert: &X509, hostname: &Hostname) -> Result { .with_kind(ErrorKind::Network) } +fn parse_range(header: &HeaderValue, len: u64) -> Result<(u64, u64, u64), Error> { + let r = header + .to_str() + .with_kind(ErrorKind::Network)? + .trim() + .strip_prefix("bytes=") + .ok_or_else(|| Error::new(eyre!("invalid range units"), ErrorKind::InvalidRequest))?; + + if r.contains(",") { + return Err(Error::new( + eyre!("multi-range requests are unsupported"), + ErrorKind::InvalidRequest, + )); + } + if let Some((start, end)) = r.split_once("-").map(|(s, e)| (s.trim(), e.trim())) { + Ok(( + if start.is_empty() { + 0u64 + } else { + start.parse()? + }, + if end.is_empty() { + len - 1 + } else { + min(end.parse()?, len - 1) + }, + len, + )) + } else { + Ok((len - r.trim().parse::()?, len - 1, len)) + } +} + struct FileData { data: Body, len: Option, + content_range: Option<(u64, u64, u64)>, encoding: Option<&'static str>, - e_tag: String, - mime: Option, + e_tag: Option, + mime: Option, + digest: Option<(&'static str, Vec)>, } impl FileData { - fn from_embedded(req: &RequestParts, file: &'static include_dir::File<'static>) -> Self { + fn from_embedded( + req: &RequestParts, + file: &'static include_dir::File<'static>, + ) -> Result { let path = file.path(); - let (encoding, data) = req - .headers - .get_all(ACCEPT_ENCODING) - .into_iter() - .filter_map(|h| h.to_str().ok()) - .flat_map(|s| s.split(",")) - .filter_map(|s| s.split(";").next()) - .map(|s| s.trim()) - .fold((None, file.contents()), |acc, e| { - if let Some(file) = (e == "br") - .then_some(()) - .and_then(|_| EMBEDDED_UIS.get_file(format!("{}.br", path.display()))) - { - (Some("br"), file.contents()) - } else if let Some(file) = (e == "gzip" && acc.0 != Some("br")) - .then_some(()) - .and_then(|_| EMBEDDED_UIS.get_file(format!("{}.gz", path.display()))) - { - (Some("gzip"), file.contents()) - } else { - acc - } - }); + let (encoding, data, len, content_range) = if let Some(range) = req.headers.get(RANGE) { + let data = file.contents(); + let (start, end, size) = parse_range(range, data.len() as u64)?; + let encoding = req + .headers + .get_all(ACCEPT_ENCODING) + .into_iter() + .filter_map(|h| h.to_str().ok()) + .flat_map(|s| s.split(",")) + .filter_map(|s| s.split(";").next()) + .map(|s| s.trim()) + .any(|e| e == "gzip") + .then_some("gzip"); + let data = if start > end { + &[] + } else { + &data[(start as usize)..=(end as usize)] + }; + let (len, data) = if encoding == Some("gzip") { + ( + None, + Body::from_stream(ReaderStream::new(GzipEncoder::new(Cursor::new(data)))), + ) + } else { + (Some(data.len() as u64), Body::from(data)) + }; + (encoding, data, len, Some((start, end, size))) + } else { + let (encoding, data) = req + .headers + .get_all(ACCEPT_ENCODING) + .into_iter() + .filter_map(|h| h.to_str().ok()) + .flat_map(|s| s.split(",")) + .filter_map(|s| s.split(";").next()) + .map(|s| s.trim()) + .fold((None, file.contents()), |acc, e| { + if let Some(file) = (e == "br") + .then_some(()) + .and_then(|_| EMBEDDED_UIS.get_file(format!("{}.br", path.display()))) + { + (Some("br"), file.contents()) + } else if let Some(file) = (e == "gzip" && acc.0 != Some("br")) + .then_some(()) + .and_then(|_| EMBEDDED_UIS.get_file(format!("{}.gz", path.display()))) + { + (Some("gzip"), file.contents()) + } else { + acc + } + }); + (encoding, Body::from(data), Some(data.len() as u64), None) + }; - Self { - len: Some(data.len() as u64), + Ok(Self { + len, encoding, - data: data.into(), - e_tag: e_tag(path, None), + content_range, + data: if req.method == Method::HEAD { + Body::empty() + } else { + data + }, + e_tag: file.metadata().map(|metadata| { + e_tag( + path, + format!( + "{}", + metadata + .modified() + .duration_since(UNIX_EPOCH) + .map(|d| d.as_secs() as i64) + .unwrap_or_else(|e| e.duration().as_secs() as i64 * -1), + ) + .as_bytes(), + ) + }), mime: MimeGuess::from_path(path) .first() - .map(|m| m.essence_str().to_owned()), + .map(|m| m.essence_str().into()), + digest: None, + }) + } + + fn encode( + encoding: &mut Option<&str>, + data: R, + len: u64, + ) -> (Option, Body) { + if *encoding == Some("gzip") { + ( + None, + Body::from_stream(ReaderStream::new(GzipEncoder::new(BufReader::new(data)))), + ) + } else { + *encoding = None; + (Some(len), Body::from_stream(ReaderStream::new(data))) } } - async fn from_path(req: &RequestParts, path: &Path) -> Result { - let encoding = req + async fn from_path(req: &RequestParts, path: &Path) -> Result, Error> { + let mut encoding = req .headers .get_all(ACCEPT_ENCODING) .into_iter() @@ -423,75 +575,169 @@ impl FileData { .any(|e| e == "gzip") .then_some("gzip"); - let file = File::open(path) - .await - .with_ctx(|_| (ErrorKind::Filesystem, path.display().to_string()))?; + if tokio::fs::metadata(path).await.is_err() { + return Ok(None); + } + + let mut file = open_file(path).await?; + let metadata = file .metadata() .await .with_ctx(|_| (ErrorKind::Filesystem, path.display().to_string()))?; - let e_tag = e_tag(path, Some(&metadata)); + let content_range = req + .headers + .get(RANGE) + .map(|r| parse_range(r, metadata.len())) + .transpose()?; - let (len, data) = if encoding == Some("gzip") { - ( - None, - Body::from_stream(ReaderStream::new(GzipEncoder::new(BufReader::new(file)))), + let e_tag = Some(e_tag( + path, + format!( + "{}", + metadata + .modified()? + .duration_since(UNIX_EPOCH) + .map(|d| d.as_secs() as i64) + .unwrap_or_else(|e| e.duration().as_secs() as i64 * -1) ) + .as_bytes(), + )); + + let (len, data) = if let Some((start, end, _)) = content_range { + let len = end + 1 - start; + file.seek(std::io::SeekFrom::Start(start)).await?; + Self::encode(&mut encoding, file.take(len), len) } else { - ( - Some(metadata.len()), - Body::from_stream(ReaderStream::new(file)), - ) + Self::encode(&mut encoding, file, metadata.len()) }; - Ok(Self { - data, + Ok(Some(Self { + data: if req.method == Method::HEAD { + Body::empty() + } else { + data + }, len, + content_range, encoding, e_tag, mime: MimeGuess::from_path(path) .first() - .map(|m| m.essence_str().to_owned()), - }) + .map(|m| m.essence_str().into()), + digest: None, + })) } - async fn into_response(self, req: &RequestParts) -> Result { + async fn from_s9pk( + req: &RequestParts, + s9pk: &S9pk, + path: &Path, + ) -> Result, Error> { + let mut encoding = req + .headers + .get_all(ACCEPT_ENCODING) + .into_iter() + .filter_map(|h| h.to_str().ok()) + .flat_map(|s| s.split(",")) + .filter_map(|s| s.split(";").next()) + .map(|s| s.trim()) + .any(|e| e == "gzip") + .then_some("gzip"); + + let Some(file) = s9pk.as_archive().contents().get_path(path) else { + return Ok(None); + }; + let Some(contents) = file.as_file() else { + return Ok(None); + }; + let (digest, len) = if let Some((hash, len)) = file.hash() { + (Some(("blake3", hash.as_bytes().to_vec())), len) + } else { + (None, contents.size().await?) + }; + + let content_range = req + .headers + .get(RANGE) + .map(|r| parse_range(r, len)) + .transpose()?; + + let (len, data) = if let Some((start, end, _)) = content_range { + let len = end + 1 - start; + Self::encode(&mut encoding, contents.slice(start, len).await?, len) + } else { + Self::encode(&mut encoding, contents.reader().await?.take(len), len) + }; + + Ok(Some(Self { + data: if req.method == Method::HEAD { + Body::empty() + } else { + data + }, + len, + content_range, + encoding, + e_tag: None, + mime: MimeGuess::from_path(path) + .first() + .map(|m| m.essence_str().into()), + digest, + })) + } + + fn into_response(self, req: &RequestParts) -> Result { let mut builder = Response::builder(); if let Some(mime) = self.mime { - builder = builder.header(http::header::CONTENT_TYPE, &*mime); + builder = builder.header(CONTENT_TYPE, &*mime); + } + if let Some(e_tag) = &self.e_tag { + builder = builder + .header(ETAG, &**e_tag) + .header(CACHE_CONTROL, "public, max-age=21000000, immutable"); + } + + builder = builder.header(ACCEPT_RANGES, "bytes"); + if let Some((start, end, size)) = self.content_range { + builder = builder + .header(CONTENT_RANGE, format!("bytes {start}-{end}/{size}")) + .status(StatusCode::PARTIAL_CONTENT); + } + + if let Some((algorithm, digest)) = self.digest { + builder = builder.header( + "Repr-Digest", + format!("{algorithm}=:{}:", Base64Display::new(&digest, &BASE64)), + ); } - builder = builder.header(http::header::ETAG, &*self.e_tag); - builder = builder.header( - http::header::CACHE_CONTROL, - "public, max-age=21000000, immutable", - ); if req .headers - .get_all(http::header::CONNECTION) + .get_all(CONNECTION) .iter() .flat_map(|s| s.to_str().ok()) .flat_map(|s| s.split(",")) .any(|s| s.trim() == "keep-alive") { - builder = builder.header(http::header::CONNECTION, "keep-alive"); + builder = builder.header(CONNECTION, "keep-alive"); } - if req - .headers - .get("if-none-match") - .and_then(|h| h.to_str().ok()) - == Some(self.e_tag.as_ref()) + if self.e_tag.is_some() + && req + .headers + .get("if-none-match") + .and_then(|h| h.to_str().ok()) + == self.e_tag.as_deref() { - builder = builder.status(StatusCode::NOT_MODIFIED); - builder.body(Body::empty()) + builder.status(StatusCode::NOT_MODIFIED).body(Body::empty()) } else { if let Some(len) = self.len { - builder = builder.header(http::header::CONTENT_LENGTH, len); + builder = builder.header(CONTENT_LENGTH, len); } if let Some(encoding) = self.encoding { - builder = builder.header(http::header::CONTENT_ENCODING, encoding); + builder = builder.header(CONTENT_ENCODING, encoding); } builder.body(self.data) @@ -500,24 +746,17 @@ impl FileData { } } -fn e_tag(path: &Path, metadata: Option<&Metadata>) -> String { +lazy_static::lazy_static! { + static ref INSTANCE_NONCE: u64 = rand::random(); +} + +fn e_tag(path: &Path, modified: impl AsRef<[u8]>) -> String { let mut hasher = sha2::Sha256::new(); hasher.update(format!("{:?}", path).as_bytes()); - if let Some(modified) = metadata.and_then(|m| m.modified().ok()) { - hasher.update( - format!( - "{}", - modified - .duration_since(UNIX_EPOCH) - .unwrap_or_default() - .as_secs() - ) - .as_bytes(), - ); - } + hasher.update(modified.as_ref()); let res = hasher.finalize(); format!( "\"{}\"", - base32::encode(base32::Alphabet::RFC4648 { padding: false }, res.as_slice()).to_lowercase() + base32::encode(base32::Alphabet::Rfc4648 { padding: false }, res.as_slice()).to_lowercase() ) } diff --git a/core/startos/src/net/tor.rs b/core/startos/src/net/tor.rs index dfca0fc0b..24b8ddb02 100644 --- a/core/startos/src/net/tor.rs +++ b/core/startos/src/net/tor.rs @@ -12,8 +12,7 @@ use helpers::NonDetachingJoinHandle; use itertools::Itertools; use lazy_static::lazy_static; use regex::Regex; -use rpc_toolkit::yajrc::RpcError; -use rpc_toolkit::{command, from_fn_async, AnyContext, Empty, HandlerExt, ParentHandler}; +use rpc_toolkit::{from_fn_async, Context, Empty, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; use tokio::net::TcpStream; use tokio::process::Command; @@ -25,10 +24,7 @@ use tracing::instrument; use ts_rs::TS; use crate::context::{CliContext, RpcContext}; -use crate::logs::{ - cli_logs_generic_follow, cli_logs_generic_nofollow, fetch_logs, follow_logs, journalctl, - LogFollowResponse, LogResponse, LogSource, -}; +use crate::logs::{journalctl, LogSource, LogsParams}; use crate::prelude::*; use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat}; use crate::util::Invoke; @@ -86,23 +82,27 @@ lazy_static! { static ref PROGRESS_REGEX: Regex = Regex::new("PROGRESS=([0-9]+)").unwrap(); } -pub fn tor() -> ParentHandler { +pub fn tor() -> ParentHandler { ParentHandler::new() .subcommand( "list-services", from_fn_async(list_services) .with_display_serializable() - .with_custom_display_fn::(|handle, result| { + .with_custom_display_fn(|handle, result| { Ok(display_services(handle.params, result)) }) - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand("logs", logs()) + .subcommand( + "logs", + from_fn_async(crate::logs::cli_logs::).no_display(), + ) .subcommand( "reset", from_fn_async(reset) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) } #[derive(Deserialize, Serialize, Parser, TS)] @@ -143,89 +143,10 @@ pub async fn list_services(ctx: RpcContext, _: Empty) -> Result, - #[arg(short = 'c', long = "cursor")] - cursor: Option, - #[arg(short = 'B', long = "before")] - #[serde(default)] - before: bool, - #[arg(short = 'f', long = "follow")] - #[serde(default)] - follow: bool, -} - -pub fn logs() -> ParentHandler { - ParentHandler::new() - .root_handler( - from_fn_async(cli_logs) - .no_display() - .with_inherited(|params, _| params), - ) - .root_handler( - from_fn_async(logs_nofollow) - .with_inherited(|params, _| params) - .no_cli(), - ) - .subcommand( - "follow", - from_fn_async(logs_follow) - .with_inherited(|params, _| params) - .no_cli(), - ) -} -pub async fn cli_logs( - ctx: CliContext, - _: Empty, - LogsParams { - limit, - cursor, - before, - follow, - }: LogsParams, -) -> Result<(), RpcError> { - if follow { - if cursor.is_some() { - return Err(RpcError::from(Error::new( - eyre!("The argument '--cursor ' cannot be used with '--follow'"), - crate::ErrorKind::InvalidRequest, - ))); - } - if before { - return Err(RpcError::from(Error::new( - eyre!("The argument '--before' cannot be used with '--follow'"), - crate::ErrorKind::InvalidRequest, - ))); - } - cli_logs_generic_follow(ctx, "net.tor.logs.follow", None, limit).await - } else { - cli_logs_generic_nofollow(ctx, "net.tor.logs", None, limit, cursor, before).await - } -} -pub async fn logs_nofollow( - _: AnyContext, - _: Empty, - LogsParams { - limit, - cursor, - before, - .. - }: LogsParams, -) -> Result { - fetch_logs(LogSource::Unit(SYSTEMD_UNIT), limit, cursor, before).await -} - -pub async fn logs_follow( - ctx: RpcContext, - _: Empty, - LogsParams { limit, .. }: LogsParams, -) -> Result { - follow_logs(ctx, LogSource::Unit(SYSTEMD_UNIT), limit).await +pub fn logs() -> ParentHandler { + crate::logs::logs::(|_: &RpcContext, _| async { + Ok(LogSource::Unit(SYSTEMD_UNIT)) + }) } fn event_handler(_event: AsyncEvent<'static>) -> BoxFuture<'static, Result<(), ConnError>> { @@ -384,7 +305,15 @@ async fn torctl( .invoke(ErrorKind::Tor) .await?; - let logs = journalctl(LogSource::Unit(SYSTEMD_UNIT), 0, None, false, true).await?; + let logs = journalctl( + LogSource::Unit(SYSTEMD_UNIT), + 0, + None, + Some("0"), + false, + true, + ) + .await?; let mut tcp_stream = None; for _ in 0..60 { diff --git a/core/startos/src/net/utils.rs b/core/startos/src/net/utils.rs index 6de319a5e..9cba8a0cd 100644 --- a/core/startos/src/net/utils.rs +++ b/core/startos/src/net/utils.rs @@ -112,24 +112,6 @@ pub async fn find_eth_iface() -> Result { )) } -#[pin_project::pin_project] -pub struct SingleAccept(Option); -impl SingleAccept { - pub fn new(conn: T) -> Self { - Self(Some(conn)) - } -} -// impl axum_server::accept::Accept for SingleAccept { -// type Conn = T; -// type Error = Infallible; -// fn poll_accept( -// self: std::pin::Pin<&mut Self>, -// _cx: &mut std::task::Context<'_>, -// ) -> std::task::Poll>> { -// std::task::Poll::Ready(self.project().0.take().map(Ok)) -// } -// } - pub struct TcpListeners { listeners: Vec, } diff --git a/core/startos/src/net/vhost.rs b/core/startos/src/net/vhost.rs index 46838ed51..9fc7c8384 100644 --- a/core/startos/src/net/vhost.rs +++ b/core/startos/src/net/vhost.rs @@ -1,13 +1,19 @@ use std::collections::BTreeMap; use std::net::{IpAddr, Ipv6Addr, SocketAddr}; +use std::str::FromStr; use std::sync::{Arc, Weak}; use std::time::Duration; +use axum::body::Body; +use axum::extract::Request; +use axum::response::Response; use color_eyre::eyre::eyre; use helpers::NonDetachingJoinHandle; +use http::Uri; use imbl_value::InternedString; use models::ResultExt; use serde::{Deserialize, Serialize}; +use tokio::io::AsyncWriteExt; use tokio::net::{TcpListener, TcpStream}; use tokio::sync::{Mutex, RwLock}; use tokio_rustls::rustls::pki_types::{ @@ -19,18 +25,20 @@ use tokio_rustls::{LazyConfigAcceptor, TlsConnector}; use tracing::instrument; use ts_rs::TS; +use crate::db::model::Database; +use crate::net::static_server::server_error; use crate::prelude::*; -use crate::util::io::{BackTrackingReader, TimeoutStream}; +use crate::util::io::BackTrackingIO; use crate::util::serde::MaybeUtf8String; // not allowed: <=1024, >=32768, 5355, 5432, 9050, 6010, 9051, 5353 pub struct VHostController { - db: PatchDb, + db: TypedPatchDb, servers: Mutex>, } impl VHostController { - pub fn new(db: PatchDb) -> Self { + pub fn new(db: TypedPatchDb) -> Self { Self { db, servers: Mutex::new(BTreeMap::new()), @@ -39,7 +47,7 @@ impl VHostController { #[instrument(skip_all)] pub async fn add( &self, - hostname: Option, + hostname: Option, external: u16, target: SocketAddr, connect_ssl: Result<(), AlpnInfo>, // Ok: yes, connect using ssl, pass through alpn; Err: connect tcp, use provided strategy for alpn @@ -63,7 +71,7 @@ impl VHostController { Ok(rc?) } #[instrument(skip_all)] - pub async fn gc(&self, hostname: Option, external: u16) -> Result<(), Error> { + pub async fn gc(&self, hostname: Option, external: u16) -> Result<(), Error> { let mut writable = self.servers.lock().await; if let Some(server) = writable.remove(&external) { server.gc(hostname).await?; @@ -95,12 +103,12 @@ impl Default for AlpnInfo { } struct VHostServer { - mapping: Weak, BTreeMap>>>>, + mapping: Weak, BTreeMap>>>>, _thread: NonDetachingJoinHandle<()>, } impl VHostServer { #[instrument(skip_all)] - async fn new(port: u16, db: PatchDb) -> Result { + async fn new(port: u16, db: TypedPatchDb) -> Result { // check if port allowed let listener = TcpListener::bind(SocketAddr::new(Ipv6Addr::UNSPECIFIED.into(), port)) .await @@ -111,11 +119,18 @@ impl VHostServer { _thread: tokio::spawn(async move { loop { match listener.accept().await { - Ok((stream, sock_addr)) => { - let stream = - Box::pin(TimeoutStream::new(stream, Duration::from_secs(300))); - let mut stream = BackTrackingReader::new(stream); - stream.start_buffering(); + Ok((stream, _)) => { + if let Err(e) = socket2::SockRef::from(&stream).set_tcp_keepalive( + &socket2::TcpKeepalive::new() + .with_time(Duration::from_secs(900)) + .with_interval(Duration::from_secs(60)) + .with_retries(5), + ) { + tracing::error!("Failed to set tcp keepalive: {e}"); + tracing::debug!("{e:?}"); + } + + let mut stream = BackTrackingIO::new(stream); let mapping = mapping.clone(); let db = db.clone(); tokio::spawn(async move { @@ -128,42 +143,44 @@ impl VHostServer { { Ok(a) => a, Err(_) => { - // stream.rewind(); - // return hyper::server::Server::builder( - // SingleAccept::new(stream), - // ) - // .serve(make_service_fn(|_| async { - // Ok::<_, Infallible>(service_fn(|req| async move { - // let host = req - // .headers() - // .get(http::header::HOST) - // .and_then(|host| host.to_str().ok()); - // let uri = Uri::from_parts({ - // let mut parts = - // req.uri().to_owned().into_parts(); - // parts.authority = host - // .map(FromStr::from_str) - // .transpose()?; - // parts - // })?; - // Response::builder() - // .status( - // http::StatusCode::TEMPORARY_REDIRECT, - // ) - // .header( - // http::header::LOCATION, - // uri.to_string(), - // ) - // .body(Body::default()) - // })) - // })) - // .await - // .with_kind(crate::ErrorKind::Network); - todo!() + stream.rewind(); + return hyper_util::server::conn::auto::Builder::new(hyper_util::rt::TokioExecutor::new()) + .serve_connection( + hyper_util::rt::TokioIo::new(stream), + hyper_util::service::TowerToHyperService::new(axum::Router::new().fallback( + axum::routing::method_routing::any(move |req: Request| async move { + match async move { + let host = req + .headers() + .get(http::header::HOST) + .and_then(|host| host.to_str().ok()); + let uri = Uri::from_parts({ + let mut parts = req.uri().to_owned().into_parts(); + parts.scheme = Some("https".parse()?); + parts.authority = host.map(FromStr::from_str).transpose()?; + parts + })?; + Response::builder() + .status(http::StatusCode::TEMPORARY_REDIRECT) + .header(http::header::LOCATION, uri.to_string()) + .body(Body::default()) + }.await { + Ok(a) => a, + Err(e) => { + tracing::warn!("Error redirecting http request on ssl port: {e}"); + tracing::error!("{e:?}"); + server_error(Error::new(e, ErrorKind::Network)) + } + } + }), + )), + ) + .await + .map_err(|e| Error::new(color_eyre::eyre::Report::msg(e), ErrorKind::Network)); } }; let target_name = - mid.client_hello().server_name().map(|s| s.to_owned()); + mid.client_hello().server_name().map(|s| s.into()); let target = { let mapping = mapping.read().await; mapping @@ -192,12 +209,23 @@ impl VHostServer { let mut tcp_stream = TcpStream::connect(target.addr).await?; let hostnames = target_name - .as_ref() .into_iter() - .map(InternedString::intern) - .chain(std::iter::once(InternedString::from_display( - &sock_addr.ip(), - ))) + .chain( + db.peek() + .await + .into_public() + .into_server_info() + .into_ip_info() + .into_entries()? + .into_iter() + .flat_map(|(_, ips)| [ + ips.as_ipv4().de().map(|ip| ip.map(IpAddr::V4)), + ips.as_ipv6().de().map(|ip| ip.map(IpAddr::V6)) + ]) + .filter_map(|a| a.transpose()) + .map(|a| a.map(|ip| InternedString::from_display(&ip))) + .collect::, _>>()?, + ) .collect(); let key = db .mutate(|v| { @@ -286,8 +314,12 @@ impl VHostServer { ) .await .with_kind(crate::ErrorKind::OpenSsl)?; + let mut accept = mid.into_stream(Arc::new(cfg)); + let io = accept.get_mut().unwrap(); + let buffered = io.stop_buffering(); + io.write_all(&buffered).await?; let mut tls_stream = - match mid.into_stream(Arc::new(cfg)).await { + match accept.await { Ok(a) => a, Err(e) => { tracing::trace!( "VHostController: failed to accept TLS connection on port {port}: {e}"); @@ -295,7 +327,6 @@ impl VHostServer { return Ok(()) } }; - tls_stream.get_mut().0.stop_buffering(); tokio::io::copy_bidirectional( &mut tls_stream, &mut target_stream, @@ -308,8 +339,12 @@ impl VHostServer { { cfg.alpn_protocols.push(proto.into()); } + let mut accept = mid.into_stream(Arc::new(cfg)); + let io = accept.get_mut().unwrap(); + let buffered = io.stop_buffering(); + io.write_all(&buffered).await?; let mut tls_stream = - match mid.into_stream(Arc::new(cfg)).await { + match accept.await { Ok(a) => a, Err(e) => { tracing::trace!( "VHostController: failed to accept TLS connection on port {port}: {e}"); @@ -317,7 +352,6 @@ impl VHostServer { return Ok(()) } }; - tls_stream.get_mut().0.stop_buffering(); tokio::io::copy_bidirectional( &mut tls_stream, &mut tcp_stream, @@ -326,8 +360,12 @@ impl VHostServer { } Err(AlpnInfo::Specified(alpn)) => { cfg.alpn_protocols = alpn.into_iter().map(|a| a.0).collect(); + let mut accept = mid.into_stream(Arc::new(cfg)); + let io = accept.get_mut().unwrap(); + let buffered = io.stop_buffering(); + io.write_all(&buffered).await?; let mut tls_stream = - match mid.into_stream(Arc::new(cfg)).await { + match accept.await { Ok(a) => a, Err(e) => { tracing::trace!( "VHostController: failed to accept TLS connection on port {port}: {e}"); @@ -335,7 +373,6 @@ impl VHostServer { return Ok(()) } }; - tls_stream.get_mut().0.stop_buffering(); tokio::io::copy_bidirectional( &mut tls_stream, &mut tcp_stream, @@ -376,7 +413,11 @@ impl VHostServer { .into(), }) } - async fn add(&self, hostname: Option, target: TargetInfo) -> Result, Error> { + async fn add( + &self, + hostname: Option, + target: TargetInfo, + ) -> Result, Error> { if let Some(mapping) = Weak::upgrade(&self.mapping) { let mut writable = mapping.write().await; let mut targets = writable.remove(&hostname).unwrap_or_default(); @@ -395,7 +436,7 @@ impl VHostServer { )) } } - async fn gc(&self, hostname: Option) -> Result<(), Error> { + async fn gc(&self, hostname: Option) -> Result<(), Error> { if let Some(mapping) = Weak::upgrade(&self.mapping) { let mut writable = mapping.write().await; let mut targets = writable.remove(&hostname).unwrap_or_default(); diff --git a/core/startos/src/net/web_server.rs b/core/startos/src/net/web_server.rs index a89aae92f..a9cfdf046 100644 --- a/core/startos/src/net/web_server.rs +++ b/core/startos/src/net/web_server.rs @@ -1,23 +1,84 @@ +use std::convert::Infallible; use std::net::SocketAddr; +use std::task::Poll; use std::time::Duration; +use axum::extract::Request; use axum::Router; use axum_server::Handle; +use bytes::Bytes; +use futures::future::ready; +use futures::FutureExt; use helpers::NonDetachingJoinHandle; -use tokio::sync::oneshot; +use tokio::sync::{oneshot, watch}; -use crate::context::{DiagnosticContext, InstallContext, RpcContext, SetupContext}; +use crate::context::{DiagnosticContext, InitContext, InstallContext, RpcContext, SetupContext}; use crate::net::static_server::{ - diag_ui_file_router, install_ui_file_router, main_ui_server_router, setup_ui_file_router, + diagnostic_ui_router, init_ui_router, install_ui_router, main_ui_router, refresher, + setup_ui_router, }; -use crate::Error; +use crate::prelude::*; + +#[derive(Clone)] +pub struct SwappableRouter(watch::Sender); +impl SwappableRouter { + pub fn new(router: Router) -> Self { + Self(watch::channel(router).0) + } + pub fn swap(&self, router: Router) { + let _ = self.0.send_replace(router); + } +} + +#[derive(Clone)] +pub struct SwappableRouterService(watch::Receiver); +impl tower_service::Service> for SwappableRouterService +where + B: axum::body::HttpBody + Send + 'static, + B::Error: Into, +{ + type Response = >>::Response; + type Error = >>::Error; + type Future = >>::Future; + #[inline] + fn poll_ready(&mut self, cx: &mut std::task::Context<'_>) -> Poll> { + let mut changed = self.0.changed().boxed(); + if changed.poll_unpin(cx).is_ready() { + return Poll::Ready(Ok(())); + } + drop(changed); + tower_service::Service::>::poll_ready(&mut self.0.borrow().clone(), cx) + } + fn call(&mut self, req: Request) -> Self::Future { + self.0.borrow().clone().call(req) + } +} + +impl tower_service::Service for SwappableRouter { + type Response = SwappableRouterService; + type Error = Infallible; + type Future = futures::future::Ready>; + #[inline] + fn poll_ready( + &mut self, + _: &mut std::task::Context<'_>, + ) -> std::task::Poll> { + Poll::Ready(Ok(())) + } + fn call(&mut self, _: T) -> Self::Future { + ready(Ok(SwappableRouterService(self.0.subscribe()))) + } +} pub struct WebServer { shutdown: oneshot::Sender<()>, + router: SwappableRouter, thread: NonDetachingJoinHandle<()>, } impl WebServer { - pub fn new(bind: SocketAddr, router: Router) -> Self { + pub fn new(bind: SocketAddr) -> Self { + let router = SwappableRouter::new(refresher()); + let thread_router = router.clone(); let (shutdown, shutdown_recv) = oneshot::channel(); let thread = NonDetachingJoinHandle::from(tokio::spawn(async move { let handle = Handle::new(); @@ -25,14 +86,18 @@ impl WebServer { server.http_builder().http1().preserve_header_case(true); server.http_builder().http1().title_case_headers(true); - if let (Err(e), _) = tokio::join!(server.serve(router.into_make_service()), async { + if let (Err(e), _) = tokio::join!(server.serve(thread_router), async { let _ = shutdown_recv.await; handle.graceful_shutdown(Some(Duration::from_secs(0))); }) { tracing::error!("Spawning hyper server error: {}", e); } })); - Self { shutdown, thread } + Self { + shutdown, + router, + thread, + } } pub async fn shutdown(self) { @@ -40,19 +105,27 @@ impl WebServer { self.thread.await.unwrap() } - pub fn main(bind: SocketAddr, ctx: RpcContext) -> Result { - Ok(Self::new(bind, main_ui_server_router(ctx))) + pub fn serve_router(&mut self, router: Router) { + self.router.swap(router) } - pub fn setup(bind: SocketAddr, ctx: SetupContext) -> Result { - Ok(Self::new(bind, setup_ui_file_router(ctx))) + pub fn serve_main(&mut self, ctx: RpcContext) { + self.serve_router(main_ui_router(ctx)) } - pub fn diagnostic(bind: SocketAddr, ctx: DiagnosticContext) -> Result { - Ok(Self::new(bind, diag_ui_file_router(ctx))) + pub fn serve_setup(&mut self, ctx: SetupContext) { + self.serve_router(setup_ui_router(ctx)) } - pub fn install(bind: SocketAddr, ctx: InstallContext) -> Result { - Ok(Self::new(bind, install_ui_file_router(ctx))) + pub fn serve_diagnostic(&mut self, ctx: DiagnosticContext) { + self.serve_router(diagnostic_ui_router(ctx)) + } + + pub fn serve_install(&mut self, ctx: InstallContext) { + self.serve_router(install_ui_router(ctx)) + } + + pub fn serve_init(&mut self, ctx: InitContext) { + self.serve_router(init_ui_router(ctx)) } } diff --git a/core/startos/src/net/wifi.rs b/core/startos/src/net/wifi.rs index 9de177bfe..298fad71f 100644 --- a/core/startos/src/net/wifi.rs +++ b/core/startos/src/net/wifi.rs @@ -8,7 +8,7 @@ use clap::Parser; use isocountry::CountryCode; use lazy_static::lazy_static; use regex::Regex; -use rpc_toolkit::{command, from_fn_async, AnyContext, Empty, HandlerExt, ParentHandler}; +use rpc_toolkit::{from_fn_async, Context, Empty, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; use tokio::process::Command; use tokio::sync::RwLock; @@ -16,6 +16,9 @@ use tracing::instrument; use ts_rs::TS; use crate::context::{CliContext, RpcContext}; +use crate::db::model::public::WifiInfo; +use crate::db::model::Database; +use crate::net::utils::find_wifi_iface; use crate::prelude::*; use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat}; use crate::util::Invoke; @@ -34,57 +37,55 @@ pub fn wifi_manager(ctx: &RpcContext) -> Result<&WifiManager, Error> { } } -pub fn wifi() -> ParentHandler { +pub fn wifi() -> ParentHandler { ParentHandler::new() .subcommand( "add", from_fn_async(add) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "connect", from_fn_async(connect) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "delete", from_fn_async(delete) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "get", from_fn_async(get) .with_display_serializable() - .with_custom_display_fn::(|handle, result| { + .with_custom_display_fn(|handle, result| { Ok(display_wifi_info(handle.params, result)) }) - .with_remote_cli::(), + .with_call_remote::(), ) - .subcommand("country", country()) - .subcommand("available", available()) + .subcommand("country", country::()) + .subcommand("available", available::()) } -pub fn available() -> ParentHandler { +pub fn available() -> ParentHandler { ParentHandler::new().subcommand( "get", from_fn_async(get_available) .with_display_serializable() - .with_custom_display_fn::(|handle, result| { - Ok(display_wifi_list(handle.params, result)) - }) - .with_remote_cli::(), + .with_custom_display_fn(|handle, result| Ok(display_wifi_list(handle.params, result))) + .with_call_remote::(), ) } -pub fn country() -> ParentHandler { +pub fn country() -> ParentHandler { ParentHandler::new().subcommand( "set", from_fn_async(set_country) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) } @@ -111,7 +112,7 @@ pub async fn add(ctx: RpcContext, AddParams { ssid, password }: AddParams) -> Re )); } async fn add_procedure( - db: PatchDb, + db: TypedPatchDb, wifi_manager: WifiManager, ssid: &Ssid, password: &Psk, @@ -137,6 +138,18 @@ pub async fn add(ctx: RpcContext, AddParams { ssid, password }: AddParams) -> Re ErrorKind::Wifi, )); } + ctx.db + .mutate(|db| { + db.as_public_mut() + .as_server_info_mut() + .as_wifi_mut() + .as_ssids_mut() + .mutate(|s| { + s.insert(ssid); + Ok(()) + }) + }) + .await?; Ok(()) } #[derive(Deserialize, Serialize, Parser, TS)] @@ -156,7 +169,7 @@ pub async fn connect(ctx: RpcContext, SsidParams { ssid }: SsidParams) -> Result )); } async fn connect_procedure( - db: PatchDb, + db: TypedPatchDb, wifi_manager: WifiManager, ssid: &Ssid, ) -> Result<(), Error> { @@ -190,6 +203,17 @@ pub async fn connect(ctx: RpcContext, SsidParams { ssid }: SsidParams) -> Result ErrorKind::Wifi, )); } + + ctx.db + .mutate(|db| { + let wifi = db.as_public_mut().as_server_info_mut().as_wifi_mut(); + wifi.as_ssids_mut().mutate(|s| { + s.insert(ssid.clone()); + Ok(()) + })?; + wifi.as_selected_mut().ser(&Some(ssid)) + }) + .await?; Ok(()) } @@ -215,11 +239,23 @@ pub async fn delete(ctx: RpcContext, SsidParams { ssid }: SsidParams) -> Result< } wpa_supplicant.remove_network(ctx.db.clone(), &ssid).await?; + + ctx.db + .mutate(|db| { + let wifi = db.as_public_mut().as_server_info_mut().as_wifi_mut(); + wifi.as_ssids_mut().mutate(|s| { + s.remove(&ssid.0); + Ok(()) + })?; + wifi.as_selected_mut() + .map_mutate(|s| Ok(s.filter(|s| s == &ssid.0))) + }) + .await?; Ok(()) } #[derive(serde::Serialize, serde::Deserialize)] #[serde(rename_all = "camelCase")] -pub struct WiFiInfo { +pub struct WifiListInfo { ssids: HashMap, connected: Option, country: Option, @@ -228,7 +264,7 @@ pub struct WiFiInfo { } #[derive(serde::Serialize, serde::Deserialize, Clone)] #[serde(rename_all = "camelCase")] -pub struct WifiListInfo { +pub struct WifiListInfoLow { strength: SignalStrength, security: Vec, } @@ -239,8 +275,8 @@ pub struct WifiListOut { strength: SignalStrength, security: Vec, } -pub type WifiList = HashMap; -fn display_wifi_info(params: WithIoFormat, info: WiFiInfo) { +pub type WifiList = HashMap; +fn display_wifi_info(params: WithIoFormat, info: WifiListInfo) { use prettytable::*; if let Some(format) = params.format { @@ -330,7 +366,7 @@ fn display_wifi_list(params: WithIoFormat, info: Vec) { // #[command(display(display_wifi_info))] #[instrument(skip_all)] -pub async fn get(ctx: RpcContext, _: Empty) -> Result { +pub async fn get(ctx: RpcContext, _: Empty) -> Result { let wifi_manager = wifi_manager(&ctx)?; let wpa_supplicant = wifi_manager.read().await; let (list_networks, current_res, country_res, ethernet_res, signal_strengths) = tokio::join!( @@ -368,7 +404,7 @@ pub async fn get(ctx: RpcContext, _: Empty) -> Result { }) .collect(); let current = current_res?; - Ok(WiFiInfo { + Ok(WifiListInfo { ssids, connected: current, country: country_res?, @@ -477,7 +513,7 @@ impl SignalStrength { } #[derive(Debug, Clone)] -pub struct WifiInfo { +pub struct WifiInfoLow { ssid: Ssid, device: Option, } @@ -604,7 +640,7 @@ impl WpaCli { Ok(()) } #[instrument(skip_all)] - pub async fn list_networks_low(&self) -> Result, Error> { + pub async fn list_networks_low(&self) -> Result, Error> { let r = Command::new("nmcli") .arg("-t") .arg("c") @@ -623,13 +659,13 @@ impl WpaCli { if !connection_type.contains("wireless") { return None; } - let info = WifiInfo { + let info = WifiInfoLow { ssid: name, device: device.map(|x| x.to_owned()), }; Some((uuid, info)) }) - .collect::>()) + .collect::>()) } #[instrument(skip_all)] @@ -652,7 +688,7 @@ impl WpaCli { values.next()?.split(' ').map(|x| x.to_owned()).collect(); Some(( ssid, - WifiListInfo { + WifiListInfoLow { strength: signal, security, }, @@ -681,12 +717,13 @@ impl WpaCli { Ok(()) } - pub async fn save_config(&mut self, db: PatchDb) -> Result<(), Error> { + pub async fn save_config(&mut self, db: TypedPatchDb) -> Result<(), Error> { let new_country = self.get_country_low().await?; db.mutate(|d| { d.as_public_mut() .as_server_info_mut() - .as_last_wifi_region_mut() + .as_wifi_mut() + .as_last_region_mut() .ser(&new_country) }) .await @@ -720,7 +757,11 @@ impl WpaCli { .collect()) } #[instrument(skip_all)] - pub async fn select_network(&mut self, db: PatchDb, ssid: &Ssid) -> Result { + pub async fn select_network( + &mut self, + db: TypedPatchDb, + ssid: &Ssid, + ) -> Result { let m_id = self.check_active_network(ssid).await?; match m_id { None => Err(Error::new( @@ -772,7 +813,11 @@ impl WpaCli { } } #[instrument(skip_all)] - pub async fn remove_network(&mut self, db: PatchDb, ssid: &Ssid) -> Result { + pub async fn remove_network( + &mut self, + db: TypedPatchDb, + ssid: &Ssid, + ) -> Result { let found_networks = self.find_networks(ssid).await?; if found_networks.is_empty() { return Ok(true); @@ -786,7 +831,7 @@ impl WpaCli { #[instrument(skip_all)] pub async fn set_add_network( &mut self, - db: PatchDb, + db: TypedPatchDb, ssid: &Ssid, psk: &Psk, ) -> Result<(), Error> { @@ -795,7 +840,12 @@ impl WpaCli { Ok(()) } #[instrument(skip_all)] - pub async fn add_network(&mut self, db: PatchDb, ssid: &Ssid, psk: &Psk) -> Result<(), Error> { + pub async fn add_network( + &mut self, + db: TypedPatchDb, + ssid: &Ssid, + psk: &Psk, + ) -> Result<(), Error> { self.add_network_low(ssid, psk).await?; self.save_config(db).await?; Ok(()) @@ -837,9 +887,12 @@ impl TypedValueParser for CountryCodeParser { #[instrument(skip_all)] pub async fn synchronize_wpa_supplicant_conf>( main_datadir: P, - wifi_iface: &str, - last_country_code: &Option, + wifi: &mut WifiInfo, ) -> Result<(), Error> { + wifi.interface = find_wifi_iface().await?; + let Some(wifi_iface) = &wifi.interface else { + return Ok(()); + }; let persistent = main_datadir.as_ref().join("system-connections"); tracing::debug!("persistent: {:?}", persistent); // let supplicant = Path::new("/etc/wpa_supplicant.conf"); @@ -863,7 +916,7 @@ pub async fn synchronize_wpa_supplicant_conf>( .arg("up") .invoke(ErrorKind::Wifi) .await?; - if let Some(last_country_code) = last_country_code { + if let Some(last_country_code) = wifi.last_region { tracing::info!("Setting the region"); let _ = Command::new("iw") .arg("reg") diff --git a/core/startos/src/notifications.rs b/core/startos/src/notifications.rs index 8a5732462..c99ffb356 100644 --- a/core/startos/src/notifications.rs +++ b/core/startos/src/notifications.rs @@ -8,7 +8,7 @@ use clap::Parser; use color_eyre::eyre::eyre; use imbl_value::InternedString; use models::PackageId; -use rpc_toolkit::{command, from_fn_async, HandlerExt, ParentHandler}; +use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; use tracing::instrument; use ts_rs::TS; @@ -21,31 +21,31 @@ use crate::util::clap::FromStrParser; use crate::util::serde::HandlerExtSerde; // #[command(subcommands(list, delete, delete_before, create))] -pub fn notification() -> ParentHandler { +pub fn notification() -> ParentHandler { ParentHandler::new() .subcommand( "list", from_fn_async(list) .with_display_serializable() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "delete", from_fn_async(delete) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "delete-before", from_fn_async(delete_before) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "create", from_fn_async(create) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) } @@ -74,6 +74,7 @@ pub async fn list( .as_notifications() .as_entries()? .into_iter() + .rev() .take(limit); let notifs = records .into_iter() @@ -97,6 +98,7 @@ pub async fn list( .as_entries()? .into_iter() .filter(|(id, _)| *id < before) + .rev() .take(limit); records .into_iter() diff --git a/core/startos/src/os_install/gpt.rs b/core/startos/src/os_install/gpt.rs index 4139b4cf2..01703083b 100644 --- a/core/startos/src/os_install/gpt.rs +++ b/core/startos/src/os_install/gpt.rs @@ -87,7 +87,7 @@ pub async fn partition(disk: &DiskInfo, overwrite: bool) -> Result gpt::partition_types::LINUX_ROOT_X64, "aarch64" => gpt::partition_types::LINUX_ROOT_ARM_64, _ => gpt::partition_types::LINUX_FS, diff --git a/core/startos/src/os_install/mod.rs b/core/startos/src/os_install/mod.rs index 0abd1d23e..3d80f6cbd 100644 --- a/core/startos/src/os_install/mod.rs +++ b/core/startos/src/os_install/mod.rs @@ -3,7 +3,7 @@ use std::path::{Path, PathBuf}; use clap::Parser; use color_eyre::eyre::eyre; use models::Error; -use rpc_toolkit::{command, from_fn_async, AnyContext, HandlerExt, ParentHandler}; +use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; use tokio::process::Command; use ts_rs::TS; @@ -13,11 +13,15 @@ use crate::context::{CliContext, InstallContext}; use crate::disk::mount::filesystem::bind::Bind; use crate::disk::mount::filesystem::block_dev::BlockDev; use crate::disk::mount::filesystem::efivarfs::EfiVarFs; +use crate::disk::mount::filesystem::overlayfs::OverlayFs; use crate::disk::mount::filesystem::{MountType, ReadWrite}; use crate::disk::mount::guard::{GenericMountGuard, MountGuard, TmpMountGuard}; use crate::disk::util::{DiskInfo, PartitionTable}; use crate::disk::OsPartitionInfo; -use crate::net::utils::{find_eth_iface, find_wifi_iface}; +use crate::net::utils::find_eth_iface; +use crate::prelude::*; +use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile; +use crate::util::io::{open_file, TmpDir}; use crate::util::serde::IoFormat; use crate::util::Invoke; use crate::ARCH; @@ -25,33 +29,33 @@ use crate::ARCH; mod gpt; mod mbr; -pub fn install() -> ParentHandler { +pub fn install() -> ParentHandler { ParentHandler::new() - .subcommand("disk", disk()) + .subcommand("disk", disk::()) .subcommand( "execute", - from_fn_async(execute) + from_fn_async(execute::) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "reboot", from_fn_async(reboot) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) } -pub fn disk() -> ParentHandler { +pub fn disk() -> ParentHandler { ParentHandler::new().subcommand( "list", from_fn_async(list) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) } -pub async fn list() -> Result, Error> { +pub async fn list(_: InstallContext) -> Result, Error> { let skip = match async { Ok::<_, Error>( Path::new( @@ -122,8 +126,8 @@ pub struct ExecuteParams { overwrite: bool, } -pub async fn execute( - _: AnyContext, +pub async fn execute( + _: C, ExecuteParams { logicalname, mut overwrite, @@ -140,10 +144,26 @@ pub async fn execute( ) })?; let eth_iface = find_eth_iface().await?; - let wifi_iface = find_wifi_iface().await?; overwrite |= disk.guid.is_none() && disk.partitions.iter().all(|p| p.guid.is_none()); + if !overwrite + && (disk + .guid + .as_ref() + .map_or(false, |g| g.starts_with("EMBASSY_")) + || disk + .partitions + .iter() + .flat_map(|p| p.guid.as_ref()) + .any(|g| g.starts_with("EMBASSY_"))) + { + return Err(Error::new( + eyre!("installing over versions before 0.3.6 is unsupported"), + ErrorKind::InvalidRequest, + )); + } + let part_info = partition(&mut disk, overwrite).await?; if let Some(efi) = &part_info.efi { @@ -216,58 +236,120 @@ pub async fn execute( .arg("rootfs") .invoke(crate::ErrorKind::DiskManagement) .await?; - let rootfs = TmpMountGuard::mount(&BlockDev::new(&part_info.root), ReadWrite).await?; + + let config_path = rootfs.path().join("config"); + if tokio::fs::metadata("/tmp/config.bak").await.is_ok() { + if tokio::fs::metadata(&config_path).await.is_ok() { + tokio::fs::remove_dir_all(&config_path).await?; + } Command::new("cp") .arg("-r") .arg("/tmp/config.bak") - .arg(rootfs.path().join("config")) + .arg(&config_path) .invoke(crate::ErrorKind::Filesystem) .await?; } else { - tokio::fs::create_dir(rootfs.path().join("config")).await?; + tokio::fs::create_dir_all(&config_path).await?; } - tokio::fs::create_dir(rootfs.path().join("next")).await?; - let current = rootfs.path().join("current"); - tokio::fs::create_dir(¤t).await?; - tokio::fs::create_dir(current.join("boot")).await?; - let boot = MountGuard::mount( + let images_path = rootfs.path().join("images"); + tokio::fs::create_dir_all(&images_path).await?; + let image_path = images_path + .join(hex::encode( + &MultiCursorFile::from(open_file("/run/live/medium/live/filesystem.squashfs").await?) + .blake3_mmap() + .await? + .as_bytes()[..16], + )) + .with_extension("rootfs"); + tokio::fs::copy("/run/live/medium/live/filesystem.squashfs", &image_path).await?; + // TODO: check hash of fs + let unsquash_target = TmpDir::new().await?; + let bootfs = MountGuard::mount( &BlockDev::new(&part_info.boot), - current.join("boot"), + unsquash_target.join("boot"), ReadWrite, ) .await?; - - let efi = if let Some(efi) = &part_info.efi { - Some(MountGuard::mount(&BlockDev::new(efi), current.join("boot/efi"), ReadWrite).await?) - } else { - None - }; - Command::new("unsquashfs") .arg("-n") .arg("-f") .arg("-d") - .arg(¤t) + .arg(&*unsquash_target) .arg("/run/live/medium/live/filesystem.squashfs") + .arg("boot") .invoke(crate::ErrorKind::Filesystem) .await?; + bootfs.unmount(true).await?; + unsquash_target.delete().await?; + Command::new("ln") + .arg("-rsf") + .arg(&image_path) + .arg(config_path.join("current.rootfs")) + .invoke(ErrorKind::DiskManagement) + .await?; tokio::fs::write( rootfs.path().join("config/config.yaml"), IoFormat::Yaml.to_vec(&ServerConfig { os_partitions: Some(part_info.clone()), ethernet_interface: Some(eth_iface), - wifi_interface: wifi_iface, ..Default::default() })?, ) .await?; + let lower = TmpMountGuard::mount(&BlockDev::new(&image_path), MountType::ReadOnly).await?; + let work = config_path.join("work"); + let upper = config_path.join("overlay"); + let overlay = + TmpMountGuard::mount(&OverlayFs::new(&lower.path(), &upper, &work), ReadWrite).await?; + + let boot = MountGuard::mount( + &BlockDev::new(&part_info.boot), + overlay.path().join("boot"), + ReadWrite, + ) + .await?; + let efi = if let Some(efi) = &part_info.efi { + Some( + MountGuard::mount( + &BlockDev::new(efi), + overlay.path().join("boot/efi"), + ReadWrite, + ) + .await?, + ) + } else { + None + }; + let start_os_fs = MountGuard::mount( + &Bind::new(rootfs.path()), + overlay.path().join("media/startos/root"), + MountType::ReadOnly, + ) + .await?; + let dev = MountGuard::mount(&Bind::new("/dev"), overlay.path().join("dev"), ReadWrite).await?; + let proc = + MountGuard::mount(&Bind::new("/proc"), overlay.path().join("proc"), ReadWrite).await?; + let sys = MountGuard::mount(&Bind::new("/sys"), overlay.path().join("sys"), ReadWrite).await?; + let efivarfs = if tokio::fs::metadata("/sys/firmware/efi").await.is_ok() { + Some( + MountGuard::mount( + &EfiVarFs, + overlay.path().join("sys/firmware/efi/efivars"), + ReadWrite, + ) + .await?, + ) + } else { + None + }; + tokio::fs::write( - current.join("etc/fstab"), + overlay.path().join("etc/fstab"), format!( include_str!("fstab.template"), boot = part_info.boot.display(), @@ -282,46 +364,24 @@ pub async fn execute( .await?; Command::new("chroot") - .arg(¤t) + .arg(overlay.path()) .arg("systemd-machine-id-setup") .invoke(crate::ErrorKind::Systemd) .await?; Command::new("chroot") - .arg(¤t) + .arg(overlay.path()) .arg("ssh-keygen") .arg("-A") .invoke(crate::ErrorKind::OpenSsh) .await?; - let start_os_fs = MountGuard::mount( - &Bind::new(rootfs.path()), - current.join("media/embassy/embassyfs"), - MountType::ReadOnly, - ) - .await?; - let dev = MountGuard::mount(&Bind::new("/dev"), current.join("dev"), ReadWrite).await?; - let proc = MountGuard::mount(&Bind::new("/proc"), current.join("proc"), ReadWrite).await?; - let sys = MountGuard::mount(&Bind::new("/sys"), current.join("sys"), ReadWrite).await?; - let efivarfs = if tokio::fs::metadata("/sys/firmware/efi").await.is_ok() { - Some( - MountGuard::mount( - &EfiVarFs, - current.join("sys/firmware/efi/efivars"), - ReadWrite, - ) - .await?, - ) - } else { - None - }; - let mut install = Command::new("chroot"); - install.arg(¤t).arg("grub-install"); + install.arg(overlay.path()).arg("grub-install"); if tokio::fs::metadata("/sys/firmware/efi").await.is_err() { install.arg("--target=i386-pc"); } else { - match *ARCH { + match ARCH { "x86_64" => install.arg("--target=x86_64-efi"), "aarch64" => install.arg("--target=arm64-efi"), _ => &mut install, @@ -333,7 +393,7 @@ pub async fn execute( .await?; Command::new("chroot") - .arg(¤t) + .arg(overlay.path()) .arg("update-grub2") .invoke(crate::ErrorKind::Grub) .await?; @@ -348,7 +408,13 @@ pub async fn execute( efi.unmount(false).await?; } boot.unmount(false).await?; + + overlay.unmount().await?; + tokio::fs::remove_dir_all(&work).await?; + lower.unmount().await?; + rootfs.unmount().await?; + Ok(()) } diff --git a/core/startos/src/progress.rs b/core/startos/src/progress.rs index eec637575..cc3257132 100644 --- a/core/startos/src/progress.rs +++ b/core/startos/src/progress.rs @@ -1,28 +1,33 @@ use std::panic::UnwindSafe; -use std::sync::Arc; use std::time::Duration; -use futures::Future; +use futures::future::pending; +use futures::stream::BoxStream; +use futures::{Future, FutureExt, StreamExt, TryFutureExt}; +use helpers::NonDetachingJoinHandle; use imbl_value::{InOMap, InternedString}; use indicatif::{MultiProgress, ProgressBar, ProgressStyle}; use itertools::Itertools; use serde::{Deserialize, Serialize}; use tokio::io::{AsyncSeek, AsyncWrite}; -use tokio::sync::{mpsc, watch}; +use tokio::sync::watch; use ts_rs::TS; -use crate::db::model::DatabaseModel; +use crate::db::model::{Database, DatabaseModel}; use crate::prelude::*; lazy_static::lazy_static! { static ref SPINNER: ProgressStyle = ProgressStyle::with_template("{spinner} {msg}...").unwrap(); - static ref PERCENTAGE: ProgressStyle = ProgressStyle::with_template("{msg} {percent}% {wide_bar} [{bytes}/{total_bytes}] [{binary_bytes_per_sec} {eta}]").unwrap(); + static ref PERCENTAGE: ProgressStyle = ProgressStyle::with_template("{msg} {percent}% {wide_bar} [{human_pos}/{human_len}] [{per_sec} {eta}]").unwrap(); + static ref PERCENTAGE_BYTES: ProgressStyle = ProgressStyle::with_template("{msg} {percent}% {wide_bar} [{binary_bytes}/{binary_total_bytes}] [{binary_bytes_per_sec} {eta}]").unwrap(); + static ref STEPS: ProgressStyle = ProgressStyle::with_template("{spinner} {wide_msg} [{human_pos}/?] [{per_sec} {elapsed}]").unwrap(); static ref BYTES: ProgressStyle = ProgressStyle::with_template("{spinner} {wide_msg} [{bytes}/?] [{binary_bytes_per_sec} {elapsed}]").unwrap(); } #[derive(Debug, Clone, Copy, Deserialize, Serialize, PartialEq, Eq, PartialOrd, Ord, TS)] #[serde(untagged)] pub enum Progress { + NotStarted(()), Complete(bool), Progress { #[ts(type = "number")] @@ -33,10 +38,13 @@ pub enum Progress { } impl Progress { pub fn new() -> Self { - Progress::Complete(false) + Progress::NotStarted(()) } - pub fn update_bar(self, bar: &ProgressBar) { + pub fn update_bar(self, bar: &ProgressBar, bytes: bool) { match self { + Self::NotStarted(()) => { + bar.set_style(SPINNER.clone()); + } Self::Complete(false) => { bar.set_style(SPINNER.clone()); bar.tick(); @@ -45,7 +53,11 @@ impl Progress { bar.finish(); } Self::Progress { done, total: None } => { - bar.set_style(BYTES.clone()); + if bytes { + bar.set_style(BYTES.clone()); + } else { + bar.set_style(STEPS.clone()); + } bar.set_position(done); bar.tick(); } @@ -53,16 +65,26 @@ impl Progress { done, total: Some(total), } => { - bar.set_style(PERCENTAGE.clone()); + if bytes { + bar.set_style(PERCENTAGE_BYTES.clone()); + } else { + bar.set_style(PERCENTAGE.clone()); + } bar.set_position(done); bar.set_length(total); bar.tick(); } } } + pub fn start(&mut self) { + *self = match *self { + Self::NotStarted(()) => Self::Complete(false), + a => a, + }; + } pub fn set_done(&mut self, done: u64) { *self = match *self { - Self::Complete(false) => Self::Progress { done, total: None }, + Self::Complete(false) | Self::NotStarted(()) => Self::Progress { done, total: None }, Self::Progress { mut done, total } => { if let Some(total) = total { if done > total { @@ -76,7 +98,7 @@ impl Progress { } pub fn set_total(&mut self, total: u64) { *self = match *self { - Self::Complete(false) => Self::Progress { + Self::Complete(false) | Self::NotStarted(()) => Self::Progress { done: 0, total: Some(total), }, @@ -104,12 +126,15 @@ impl Progress { pub fn complete(&mut self) { *self = Self::Complete(true); } + pub fn is_complete(&self) -> bool { + matches!(self, Self::Complete(true)) + } } impl std::ops::Add for Progress { type Output = Self; fn add(self, rhs: u64) -> Self::Output { match self { - Self::Complete(false) => Self::Progress { + Self::Complete(false) | Self::NotStarted(()) => Self::Progress { done: rhs, total: None, }, @@ -155,39 +180,23 @@ impl FullProgress { } } +#[derive(Clone)] pub struct FullProgressTracker { - overall: Arc>, - overall_recv: watch::Receiver, - phases: InOMap>, - new_phase: ( - mpsc::UnboundedSender<(InternedString, watch::Receiver)>, - mpsc::UnboundedReceiver<(InternedString, watch::Receiver)>, - ), + overall: watch::Sender, + phases: watch::Sender>>, } impl FullProgressTracker { pub fn new() -> Self { - let (overall, overall_recv) = watch::channel(Progress::new()); - Self { - overall: Arc::new(overall), - overall_recv, - phases: InOMap::new(), - new_phase: mpsc::unbounded_channel(), - } + let (overall, _) = watch::channel(Progress::new()); + let (phases, _) = watch::channel(InOMap::new()); + Self { overall, phases } } - fn fill_phases(&mut self) -> bool { - let mut changed = false; - while let Ok((name, phase)) = self.new_phase.1.try_recv() { - self.phases.insert(name, phase); - changed = true; - } - changed - } - pub fn snapshot(&mut self) -> FullProgress { - self.fill_phases(); + pub fn snapshot(&self) -> FullProgress { FullProgress { overall: *self.overall.borrow(), phases: self .phases + .borrow() .iter() .map(|(name, progress)| NamedProgress { name: name.clone(), @@ -196,29 +205,76 @@ impl FullProgressTracker { .collect(), } } - pub async fn changed(&mut self) { - if self.fill_phases() { - return; - } - let phases = self - .phases - .iter_mut() - .map(|(_, p)| Box::pin(p.changed())) - .collect_vec(); - tokio::select! { - _ = self.overall_recv.changed() => (), - _ = futures::future::select_all(phases) => (), - } - } - pub fn handle(&self) -> FullProgressTrackerHandle { - FullProgressTrackerHandle { - overall: self.overall.clone(), - new_phase: self.new_phase.0.clone(), + pub fn stream(&self, min_interval: Option) -> BoxStream<'static, FullProgress> { + struct StreamState { + overall: watch::Receiver, + phases_recv: watch::Receiver>>, + phases: InOMap>, } + let mut overall = self.overall.subscribe(); + overall.mark_changed(); // make sure stream starts with a value + let phases_recv = self.phases.subscribe(); + let phases = phases_recv.borrow().clone(); + let state = StreamState { + overall, + phases_recv, + phases, + }; + futures::stream::unfold( + state, + move |StreamState { + mut overall, + mut phases_recv, + mut phases, + }| async move { + let changed = phases + .iter_mut() + .map(|(_, p)| async move { p.changed().or_else(|_| pending()).await }.boxed()) + .chain([overall.changed().boxed()]) + .chain([phases_recv.changed().boxed()]) + .map(|fut| fut.map(|r| r.unwrap_or_default())) + .collect_vec(); + if let Some(min_interval) = min_interval { + tokio::join!( + tokio::time::sleep(min_interval), + futures::future::select_all(changed), + ); + } else { + futures::future::select_all(changed).await; + } + + for (name, phase) in &*phases_recv.borrow_and_update() { + if !phases.contains_key(name) { + phases.insert(name.clone(), phase.clone()); + } + } + + let o = *overall.borrow_and_update(); + + Some(( + FullProgress { + overall: o, + phases: phases + .iter_mut() + .map(|(name, progress)| NamedProgress { + name: name.clone(), + progress: *progress.borrow_and_update(), + }) + .collect(), + }, + StreamState { + overall, + phases_recv, + phases, + }, + )) + }, + ) + .boxed() } pub fn sync_to_db( - mut self, - db: PatchDb, + &self, + db: TypedPatchDb, deref: DerefFn, min_interval: Option, ) -> impl Future> + 'static @@ -226,14 +282,14 @@ impl FullProgressTracker { DerefFn: Fn(&mut DatabaseModel) -> Option<&mut Model> + 'static, for<'a> &'a DerefFn: UnwindSafe + Send, { + let mut stream = self.stream(min_interval); async move { - loop { - let progress = self.snapshot(); + while let Some(progress) = stream.next().await { if db .mutate(|v| { if let Some(p) = deref(v) { p.ser(&progress)?; - Ok(false) + Ok(progress.overall.is_complete()) } else { Ok(true) } @@ -242,25 +298,23 @@ impl FullProgressTracker { { break; } - tokio::join!(self.changed(), async { - if let Some(interval) = min_interval { - tokio::time::sleep(interval).await - } else { - futures::future::ready(()).await - } - }); } Ok(()) } } -} - -#[derive(Clone)] -pub struct FullProgressTrackerHandle { - overall: Arc>, - new_phase: mpsc::UnboundedSender<(InternedString, watch::Receiver)>, -} -impl FullProgressTrackerHandle { + pub fn progress_bar_task(&self, name: &str) -> NonDetachingJoinHandle<()> { + let mut stream = self.stream(None); + let mut bar = PhasedProgressBar::new(name); + tokio::spawn(async move { + while let Some(progress) = stream.next().await { + bar.update(&progress); + if progress.overall.is_complete() { + break; + } + } + }) + .into() + } pub fn add_phase( &self, name: InternedString, @@ -271,7 +325,9 @@ impl FullProgressTrackerHandle { .send_modify(|o| o.add_total(overall_contribution)); } let (send, recv) = watch::channel(Progress::new()); - let _ = self.new_phase.send((name, recv)); + self.phases.send_modify(|p| { + p.insert(name, recv); + }); PhaseProgressTrackerHandle { overall: self.overall.clone(), overall_contribution, @@ -285,7 +341,7 @@ impl FullProgressTrackerHandle { } pub struct PhaseProgressTrackerHandle { - overall: Arc>, + overall: watch::Sender, overall_contribution: Option, contributed: u64, progress: watch::Sender, @@ -308,6 +364,9 @@ impl PhaseProgressTrackerHandle { } } } + pub fn start(&mut self) { + self.progress.send_modify(|p| p.start()); + } pub fn set_done(&mut self, done: u64) { self.progress.send_modify(|p| p.set_done(done)); self.update_overall(); @@ -324,6 +383,12 @@ impl PhaseProgressTrackerHandle { self.progress.send_modify(|p| p.complete()); self.update_overall(); } + pub fn writer(self, writer: W) -> ProgressTrackerWriter { + ProgressTrackerWriter { + writer, + progress: self, + } + } } impl std::ops::AddAssign for PhaseProgressTrackerHandle { fn add_assign(&mut self, rhs: u64) { @@ -435,7 +500,7 @@ impl PhasedProgressBar { ); } } - progress.overall.update_bar(&self.overall); + progress.overall.update_bar(&self.overall, false); for (name, bar) in self.phases.iter() { if let Some(progress) = progress.phases.iter().find_map(|p| { if &p.name == name { @@ -444,7 +509,7 @@ impl PhasedProgressBar { None } }) { - progress.update_bar(bar); + progress.update_bar(bar, true); } } } diff --git a/core/startos/src/properties.rs b/core/startos/src/properties.rs index 5aa8a01d4..e24b14965 100644 --- a/core/startos/src/properties.rs +++ b/core/startos/src/properties.rs @@ -1,7 +1,6 @@ use clap::Parser; -use imbl_value::Value; +use imbl_value::{json, Value}; use models::PackageId; -use rpc_toolkit::command; use serde::{Deserialize, Serialize}; use crate::context::RpcContext; @@ -24,7 +23,10 @@ pub async fn properties( PropertiesParam { id }: PropertiesParam, ) -> Result { match &*ctx.services.get(&id).await { - Some(service) => service.properties().await, + Some(service) => Ok(json!({ + "version": 2, + "data": service.properties().await? + })), None => Err(Error::new( eyre!("Could not find a service with id {id}"), ErrorKind::NotFound, diff --git a/core/startos/src/registry/admin.rs b/core/startos/src/registry/admin.rs index 50f83dc5c..8125580a4 100644 --- a/core/startos/src/registry/admin.rs +++ b/core/startos/src/registry/admin.rs @@ -1,234 +1,256 @@ +use std::collections::BTreeMap; use std::path::PathBuf; -use std::time::Duration; use clap::Parser; -use color_eyre::eyre::eyre; -use console::style; -use futures::StreamExt; -use indicatif::{ProgressBar, ProgressStyle}; -use reqwest::{header, Body, Client, Url}; -use rpc_toolkit::command; +use itertools::Itertools; +use rpc_toolkit::{from_fn_async, Context, HandlerArgs, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; use ts_rs::TS; use crate::context::CliContext; -use crate::s9pk::S9pk; -use crate::{Error, ErrorKind}; +use crate::prelude::*; +use crate::registry::context::RegistryContext; +use crate::registry::signer::sign::AnyVerifyingKey; +use crate::registry::signer::{ContactInfo, SignerInfo}; +use crate::registry::RegistryDatabase; +use crate::rpc_continuations::Guid; +use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat}; -async fn registry_user_pass(location: &str) -> Result<(Url, String, String), Error> { - let mut url = Url::parse(location)?; - let user = url.username().to_string(); - let pass = url.password().map(str::to_string); - if user.is_empty() || url.path() != "/" { - return Err(Error::new( - eyre!("{location:?} is not like \"https://user@registry.example.com/\""), - ErrorKind::ParseUrl, - )); +pub fn admin_api() -> ParentHandler { + ParentHandler::new() + .subcommand("signer", signers_api::()) + .subcommand("add", from_fn_async(add_admin).no_cli()) + .subcommand("add", from_fn_async(cli_add_admin).no_display()) + .subcommand( + "list", + from_fn_async(list_admins) + .with_display_serializable() + .with_custom_display_fn(|handle, result| Ok(display_signers(handle.params, result))) + .with_call_remote::(), + ) +} + +fn signers_api() -> ParentHandler { + ParentHandler::new() + .subcommand( + "list", + from_fn_async(list_signers) + .with_metadata("admin", Value::Bool(true)) + .with_display_serializable() + .with_custom_display_fn(|handle, result| Ok(display_signers(handle.params, result))) + .with_call_remote::(), + ) + .subcommand( + "add", + from_fn_async(add_signer) + .with_metadata("admin", Value::Bool(true)) + .no_cli(), + ) + .subcommand("add", from_fn_async(cli_add_signer)) +} + +impl Model> { + pub fn get_signer(&self, key: &AnyVerifyingKey) -> Result { + self.as_entries()? + .into_iter() + .map(|(guid, s)| Ok::<_, Error>((guid, s.as_keys().de()?))) + .filter_ok(|(_, s)| s.contains(key)) + .next() + .transpose()? + .map(|(a, _)| a) + .ok_or_else(|| Error::new(eyre!("unknown signer"), ErrorKind::Authorization)) } - let _ = url.set_username(""); - let _ = url.set_password(None); - let pass = match pass { - Some(p) => p, - None => { - let pass_prompt = format!("{} Password for {user}: ", style("?").yellow()); - tokio::task::spawn_blocking(move || rpassword::prompt_password(pass_prompt)) - .await - .unwrap()? + pub fn get_signer_info(&self, key: &AnyVerifyingKey) -> Result<(Guid, SignerInfo), Error> { + self.as_entries()? + .into_iter() + .map(|(guid, s)| Ok::<_, Error>((guid, s.de()?))) + .filter_ok(|(_, s)| s.keys.contains(key)) + .next() + .transpose()? + .ok_or_else(|| Error::new(eyre!("unknown signer"), ErrorKind::Authorization)) + } + + pub fn add_signer(&mut self, signer: &SignerInfo) -> Result { + if let Some((guid, s)) = self + .as_entries()? + .into_iter() + .map(|(guid, s)| Ok::<_, Error>((guid, s.de()?))) + .filter_ok(|(_, s)| !s.keys.is_disjoint(&signer.keys)) + .next() + .transpose()? + { + return Err(Error::new( + eyre!( + "A signer {} ({}) already exists with a matching key", + guid, + s.name + ), + ErrorKind::InvalidRequest, + )); } - }; - Ok((url, user.to_string(), pass.to_string())) -} - -#[derive(serde::Serialize, Debug)] -struct Package { - id: String, - version: String, - arches: Option>, -} - -async fn do_index( - httpc: &Client, - mut url: Url, - user: &str, - pass: &str, - pkg: &Package, -) -> Result<(), Error> { - url.set_path("/admin/v0/index"); - let req = httpc - .post(url) - .header(header::ACCEPT, "text/plain") - .basic_auth(user, Some(pass)) - .json(pkg) - .build()?; - let res = httpc.execute(req).await?; - if !res.status().is_success() { - let info = res.text().await?; - return Err(Error::new(eyre!("{}", info), ErrorKind::Registry)); + let id = Guid::new(); + self.insert(&id, signer)?; + Ok(id) } - Ok(()) } -async fn do_upload( - httpc: &Client, - mut url: Url, - user: &str, - pass: &str, - pkg_id: &str, - body: Body, -) -> Result<(), Error> { - url.set_path("/admin/v0/upload"); - let req = httpc - .post(url) - .header(header::ACCEPT, "text/plain") - .query(&[("id", pkg_id)]) - .basic_auth(user, Some(pass)) - .body(body) - .build()?; - let res = httpc.execute(req).await?; - if !res.status().is_success() { - let info = res.text().await?; - return Err(Error::new(eyre!("{}", info), ErrorKind::Registry)); +pub async fn list_signers(ctx: RegistryContext) -> Result, Error> { + ctx.db.peek().await.into_index().into_signers().de() +} + +pub fn display_signers(params: WithIoFormat, signers: BTreeMap) { + use prettytable::*; + + if let Some(format) = params.format { + return display_serializable(format, signers); } - Ok(()) + + let mut table = Table::new(); + table.add_row(row![bc => + "ID", + "NAME", + "CONTACT", + "KEYS", + ]); + for (id, info) in signers { + table.add_row(row![ + id.as_ref(), + &info.name, + &info.contact.into_iter().join("\n"), + &info.keys.into_iter().join("\n"), + ]); + } + table.print_tty(false).unwrap(); } -#[derive(Deserialize, Serialize, Parser)] -#[serde(rename_all = "camelCase")] +pub async fn add_signer(ctx: RegistryContext, signer: SignerInfo) -> Result { + ctx.db + .mutate(|db| db.as_index_mut().as_signers_mut().add_signer(&signer)) + .await +} + +#[derive(Debug, Deserialize, Serialize, Parser)] #[command(rename_all = "kebab-case")] -pub struct PublishParams { - location: String, - path: PathBuf, - #[arg(name = "no-verify", long = "no-verify")] - no_verify: bool, - #[arg(name = "no-upload", long = "no-upload")] - no_upload: bool, - #[arg(name = "no-index", long = "no-index")] - no_index: bool, +#[serde(rename_all = "camelCase")] +pub struct CliAddSignerParams { + #[arg(long = "name", short = 'n')] + pub name: String, + #[arg(long = "contact", short = 'c')] + pub contact: Vec, + #[arg(long = "key")] + pub keys: Vec, + pub database: Option, } -pub async fn publish( - _: CliContext, - PublishParams { - location, - no_index, - no_upload, - no_verify, - path, - }: PublishParams, -) -> Result<(), Error> { - // Prepare for progress bars. - let bytes_bar_style = - ProgressStyle::with_template("{percent}% {wide_bar} [{bytes}/{total_bytes}] [{eta}]") - .unwrap(); - let plain_line_style = - ProgressStyle::with_template("{prefix:.bold.dim} {wide_msg}...").unwrap(); - let spinner_line_style = - ProgressStyle::with_template("{prefix:.bold.dim} {spinner} {wide_msg}...").unwrap(); - - // Read the file to get manifest information and check validity.. - // Open file right away so it can not change out from under us. - let file = tokio::fs::File::open(&path).await?; - - let manifest = if no_verify { - let pb = ProgressBar::new(1) - .with_style(spinner_line_style.clone()) - .with_prefix("[1/3]") - .with_message("Querying s9pk"); - pb.enable_steady_tick(Duration::from_millis(200)); - let s9pk = S9pk::open(&path, None).await?; - let m = s9pk.as_manifest().clone(); - pb.set_style(plain_line_style.clone()); - pb.abandon(); - m - } else { - let pb = ProgressBar::new(1) - .with_style(spinner_line_style.clone()) - .with_prefix("[1/3]") - .with_message("Verifying s9pk"); - pb.enable_steady_tick(Duration::from_millis(200)); - let s9pk = S9pk::open(&path, None).await?; - // s9pk.validate().await?; - todo!(); - let m = s9pk.as_manifest().clone(); - pb.set_style(plain_line_style.clone()); - pb.abandon(); - m +pub async fn cli_add_signer( + HandlerArgs { + context: ctx, + parent_method, + method, + params: + CliAddSignerParams { + name, + contact, + keys, + database, + }, + .. + }: HandlerArgs, +) -> Result { + let signer = SignerInfo { + name, + contact, + keys: keys.into_iter().collect(), }; - let pkg = Package { - id: manifest.id.to_string(), - version: manifest.version.to_string(), - arches: manifest.hardware_requirements.arch.clone(), - }; - println!("{} id = {}", style(">").green(), pkg.id); - println!("{} version = {}", style(">").green(), pkg.version); - if let Some(arches) = &pkg.arches { - println!("{} arches = {:?}", style(">").green(), arches); + if let Some(database) = database { + TypedPatchDb::::load(PatchDb::open(database).await?) + .await? + .mutate(|db| db.as_index_mut().as_signers_mut().add_signer(&signer)) + .await } else { - println!( - "{} No architecture listed in hardware_requirements", - style(">").red() - ); + from_value( + ctx.call_remote::( + &parent_method.into_iter().chain(method).join("."), + to_value(&signer)?, + ) + .await?, + ) } +} - // Process the url and get the user's password. - let (registry, user, pass) = registry_user_pass(&location).await?; +#[derive(Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct AddAdminParams { + pub signer: Guid, +} - // Now prepare a stream of the file which will show a progress bar as it is consumed. - let file_size = file.metadata().await?.len(); - let file_stream = tokio_util::io::ReaderStream::new(file); - ProgressBar::new(0) - .with_style(plain_line_style.clone()) - .with_prefix("[2/3]") - .with_message("Uploading s9pk") - .abandon(); - let pb = ProgressBar::new(file_size).with_style(bytes_bar_style.clone()); - let stream_pb = pb.clone(); - let file_stream = file_stream.inspect(move |bytes| { - if let Ok(bytes) = bytes { - stream_pb.inc(bytes.len() as u64); - } - }); +pub async fn add_admin( + ctx: RegistryContext, + AddAdminParams { signer }: AddAdminParams, +) -> Result<(), Error> { + ctx.db + .mutate(|db| { + ensure_code!( + db.as_index().as_signers().contains_key(&signer)?, + ErrorKind::InvalidRequest, + "unknown signer {signer}" + ); + db.as_admins_mut().mutate(|a| Ok(a.insert(signer)))?; + Ok(()) + }) + .await +} - let httpc = Client::builder().build().unwrap(); - // And upload! - if no_upload { - println!("{} Skipping upload", style(">").yellow()); +#[derive(Debug, Deserialize, Serialize, Parser)] +#[command(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] +pub struct CliAddAdminParams { + pub signer: Guid, + pub database: Option, +} + +pub async fn cli_add_admin( + HandlerArgs { + context: ctx, + parent_method, + method, + params: CliAddAdminParams { signer, database }, + .. + }: HandlerArgs, +) -> Result<(), Error> { + if let Some(database) = database { + TypedPatchDb::::load(PatchDb::open(database).await?) + .await? + .mutate(|db| { + ensure_code!( + db.as_index().as_signers().contains_key(&signer)?, + ErrorKind::InvalidRequest, + "unknown signer {signer}" + ); + db.as_admins_mut().mutate(|a| Ok(a.insert(signer)))?; + Ok(()) + }) + .await?; } else { - do_upload( - &httpc, - registry.clone(), - &user, - &pass, - &pkg.id, - Body::wrap_stream(file_stream), + ctx.call_remote::( + &parent_method.into_iter().chain(method).join("."), + to_value(&AddAdminParams { signer })?, ) .await?; } - pb.finish_and_clear(); - - // Also index, so it will show up in the registry. - let pb = ProgressBar::new(0) - .with_style(spinner_line_style.clone()) - .with_prefix("[3/3]") - .with_message("Indexing registry"); - pb.enable_steady_tick(Duration::from_millis(200)); - if no_index { - println!("{} Skipping index", style(">").yellow()); - } else { - do_index(&httpc, registry.clone(), &user, &pass, &pkg).await?; - } - pb.set_style(plain_line_style.clone()); - pb.abandon(); - - // All done - if !no_index { - println!( - "{} Package {} is now published to {}", - style(">").green(), - pkg.id, - registry - ); - } Ok(()) } + +pub async fn list_admins(ctx: RegistryContext) -> Result, Error> { + let db = ctx.db.peek().await; + let admins = db.as_admins().de()?; + Ok(db + .into_index() + .into_signers() + .de()? + .into_iter() + .filter(|(id, _)| admins.contains(id)) + .collect()) +} diff --git a/core/startos/src/registry/asset.rs b/core/startos/src/registry/asset.rs new file mode 100644 index 000000000..fb6dd59fc --- /dev/null +++ b/core/startos/src/registry/asset.rs @@ -0,0 +1,113 @@ +use std::collections::HashMap; +use std::sync::Arc; + +use chrono::{DateTime, Utc}; +use helpers::NonDetachingJoinHandle; +use reqwest::Client; +use serde::{Deserialize, Serialize}; +use tokio::io::AsyncWrite; +use ts_rs::TS; +use url::Url; + +use crate::prelude::*; +use crate::registry::signer::commitment::merkle_archive::MerkleArchiveCommitment; +use crate::registry::signer::commitment::{Commitment, Digestable}; +use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey}; +use crate::registry::signer::AcceptSigners; +use crate::s9pk::merkle_archive::source::http::HttpSource; +use crate::s9pk::merkle_archive::source::{ArchiveSource, Section}; +use crate::s9pk::S9pk; +use crate::upload::UploadingFile; + +#[derive(Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct RegistryAsset { + #[ts(type = "string")] + pub published_at: DateTime, + #[ts(type = "string")] + pub url: Url, + pub commitment: Commitment, + pub signatures: HashMap, +} +impl RegistryAsset { + pub fn all_signers(&self) -> AcceptSigners { + AcceptSigners::All( + self.signatures + .keys() + .cloned() + .map(AcceptSigners::Signer) + .collect(), + ) + } +} +impl RegistryAsset { + pub fn validate(&self, context: &str, mut accept: AcceptSigners) -> Result<&Commitment, Error> { + for (signer, signature) in &self.signatures { + accept.process_signature(signer, &self.commitment, context, signature)?; + } + accept.try_accept()?; + Ok(&self.commitment) + } +} +impl Commitment<&'a HttpSource>> RegistryAsset { + pub async fn download( + &self, + client: Client, + dst: &mut (impl AsyncWrite + Unpin + Send + ?Sized), + ) -> Result<(), Error> { + self.commitment + .copy_to(&HttpSource::new(client, self.url.clone()).await?, dst) + .await + } +} +impl RegistryAsset { + pub async fn deserialize_s9pk( + &self, + client: Client, + ) -> Result>>, Error> { + S9pk::deserialize( + &Arc::new(HttpSource::new(client, self.url.clone()).await?), + Some(&self.commitment), + ) + .await + } + pub async fn deserialize_s9pk_buffered( + &self, + client: Client, + ) -> Result>>, Error> { + S9pk::deserialize( + &Arc::new(BufferedHttpSource::new(client, self.url.clone()).await?), + Some(&self.commitment), + ) + .await + } +} + +pub struct BufferedHttpSource { + _download: NonDetachingJoinHandle<()>, + file: UploadingFile, +} +impl BufferedHttpSource { + pub async fn new(client: Client, url: Url) -> Result { + let (mut handle, file) = UploadingFile::new().await?; + let response = client.get(url).send().await?; + Ok(Self { + _download: tokio::spawn(async move { handle.download(response).await }).into(), + file, + }) + } +} +impl ArchiveSource for BufferedHttpSource { + type FetchReader = ::FetchReader; + type FetchAllReader = ::FetchAllReader; + async fn size(&self) -> Option { + self.file.size().await + } + async fn fetch_all(&self) -> Result { + self.file.fetch_all().await + } + async fn fetch(&self, position: u64, size: u64) -> Result { + self.file.fetch(position, size).await + } +} diff --git a/core/startos/src/registry/auth.rs b/core/startos/src/registry/auth.rs new file mode 100644 index 000000000..4707bf809 --- /dev/null +++ b/core/startos/src/registry/auth.rs @@ -0,0 +1,222 @@ +use std::collections::BTreeMap; +use std::sync::Arc; +use std::time::{Duration, Instant, SystemTime, UNIX_EPOCH}; + +use axum::body::Body; +use axum::extract::Request; +use axum::response::Response; +use chrono::Utc; +use http::HeaderValue; +use rpc_toolkit::yajrc::RpcError; +use rpc_toolkit::{Middleware, RpcRequest, RpcResponse}; +use serde::{Deserialize, Serialize}; +use tokio::io::AsyncWriteExt; +use tokio::sync::Mutex; +use ts_rs::TS; +use url::Url; + +use crate::prelude::*; +use crate::registry::context::RegistryContext; +use crate::registry::signer::commitment::request::RequestCommitment; +use crate::registry::signer::commitment::Commitment; +use crate::registry::signer::sign::{ + AnySignature, AnySigningKey, AnyVerifyingKey, SignatureScheme, +}; +use crate::util::serde::Base64; + +pub const AUTH_SIG_HEADER: &str = "X-StartOS-Registry-Auth-Sig"; + +#[derive(Deserialize)] +pub struct Metadata { + #[serde(default)] + admin: bool, + #[serde(default)] + get_signer: bool, +} + +#[derive(Clone)] +pub struct Auth { + nonce_cache: Arc>>, // for replay protection + signer: Option>, +} +impl Auth { + pub fn new() -> Self { + Self { + nonce_cache: Arc::new(Mutex::new(BTreeMap::new())), + signer: None, + } + } + async fn handle_nonce(&mut self, nonce: u64) -> Result<(), Error> { + let mut cache = self.nonce_cache.lock().await; + if cache.values().any(|n| *n == nonce) { + return Err(Error::new( + eyre!("replay attack detected"), + ErrorKind::Authorization, + )); + } + while let Some(entry) = cache.first_entry() { + if entry.key().elapsed() > Duration::from_secs(60) { + entry.remove_entry(); + } else { + break; + } + } + Ok(()) + } +} + +#[derive(Serialize, Deserialize, TS)] +pub struct RegistryAdminLogRecord { + pub timestamp: String, + pub name: String, + #[ts(type = "{ id: string | number | null; method: string; params: any }")] + pub request: RpcRequest, + pub key: AnyVerifyingKey, +} + +pub struct SignatureHeader { + pub commitment: RequestCommitment, + pub signer: AnyVerifyingKey, + pub signature: AnySignature, +} +impl SignatureHeader { + pub fn to_header(&self) -> HeaderValue { + let mut url: Url = "http://localhost".parse().unwrap(); + self.commitment.append_query(&mut url); + url.query_pairs_mut() + .append_pair("signer", &self.signer.to_string()); + url.query_pairs_mut() + .append_pair("signature", &self.signature.to_string()); + HeaderValue::from_str(url.query().unwrap_or_default()).unwrap() + } + pub fn from_header(header: &HeaderValue) -> Result { + let query: BTreeMap<_, _> = form_urlencoded::parse(header.as_bytes()).collect(); + Ok(Self { + commitment: RequestCommitment::from_query(&header)?, + signer: query.get("signer").or_not_found("signer")?.parse()?, + signature: query.get("signature").or_not_found("signature")?.parse()?, + }) + } + pub fn sign(signer: &AnySigningKey, body: &[u8], context: &str) -> Result { + let timestamp = SystemTime::now() + .duration_since(UNIX_EPOCH) + .map(|d| d.as_secs() as i64) + .unwrap_or_else(|e| e.duration().as_secs() as i64 * -1); + let nonce = rand::random(); + let commitment = RequestCommitment { + timestamp, + nonce, + size: body.len() as u64, + blake3: Base64(*blake3::hash(body).as_bytes()), + }; + let signature = signer + .scheme() + .sign_commitment(&signer, &commitment, context)?; + Ok(Self { + commitment, + signer: signer.verifying_key(), + signature, + }) + } +} + +impl Middleware for Auth { + type Metadata = Metadata; + async fn process_http_request( + &mut self, + ctx: &RegistryContext, + request: &mut Request, + ) -> Result<(), Response> { + if request.headers().contains_key(AUTH_SIG_HEADER) { + self.signer = Some( + async { + let SignatureHeader { + commitment, + signer, + signature, + } = SignatureHeader::from_header( + request + .headers() + .get(AUTH_SIG_HEADER) + .or_not_found("missing X-StartOS-Registry-Auth-Sig") + .with_kind(ErrorKind::InvalidRequest)?, + )?; + + signer.scheme().verify_commitment( + &signer, + &commitment, + &ctx.hostname, + &signature, + )?; + + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .map(|d| d.as_secs() as i64) + .unwrap_or_else(|e| e.duration().as_secs() as i64 * -1); + if (now - commitment.timestamp).abs() > 30 { + return Err(Error::new( + eyre!("timestamp not within 30s of now"), + ErrorKind::InvalidSignature, + )); + } + self.handle_nonce(commitment.nonce).await?; + + let mut body = Vec::with_capacity(commitment.size as usize); + commitment.copy_to(request, &mut body).await?; + *request.body_mut() = Body::from(body); + + Ok(signer) + } + .await + .map_err(RpcError::from), + ); + } + Ok(()) + } + async fn process_rpc_request( + &mut self, + ctx: &RegistryContext, + metadata: Self::Metadata, + request: &mut RpcRequest, + ) -> Result<(), RpcResponse> { + async move { + let signer = self.signer.take().transpose()?; + if metadata.get_signer { + if let Some(signer) = &signer { + request.params["__auth_signer"] = to_value(signer)?; + } + } + if metadata.admin { + let signer = signer + .ok_or_else(|| Error::new(eyre!("UNAUTHORIZED"), ErrorKind::Authorization))?; + let db = ctx.db.peek().await; + let (guid, admin) = db.as_index().as_signers().get_signer_info(&signer)?; + if db.into_admins().de()?.contains(&guid) { + let mut log = tokio::fs::OpenOptions::new() + .create(true) + .append(true) + .open(ctx.datadir.join("admin.log")) + .await?; + log.write_all( + (serde_json::to_string(&RegistryAdminLogRecord { + timestamp: Utc::now().to_rfc3339(), + name: admin.name, + request: request.clone(), + key: signer, + }) + .with_kind(ErrorKind::Serialization)? + + "\n") + .as_bytes(), + ) + .await?; + } else { + return Err(Error::new(eyre!("UNAUTHORIZED"), ErrorKind::Authorization)); + } + } + + Ok(()) + } + .await + .map_err(|e| RpcResponse::from_result(Err(e))) + } +} diff --git a/core/startos/src/registry/context.rs b/core/startos/src/registry/context.rs new file mode 100644 index 000000000..d3eaf3691 --- /dev/null +++ b/core/startos/src/registry/context.rs @@ -0,0 +1,289 @@ +use std::net::{Ipv4Addr, SocketAddr}; +use std::ops::Deref; +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +use clap::Parser; +use imbl_value::InternedString; +use patch_db::PatchDb; +use reqwest::{Client, Proxy}; +use rpc_toolkit::yajrc::RpcError; +use rpc_toolkit::{CallRemote, Context, Empty}; +use serde::{Deserialize, Serialize}; +use sqlx::PgPool; +use tokio::sync::broadcast::Sender; +use tracing::instrument; +use url::Url; + +use crate::context::config::{ContextConfig, CONFIG_PATH}; +use crate::context::{CliContext, RpcContext}; +use crate::prelude::*; +use crate::registry::auth::{SignatureHeader, AUTH_SIG_HEADER}; +use crate::registry::device_info::{DeviceInfo, DEVICE_INFO_HEADER}; +use crate::registry::signer::sign::AnySigningKey; +use crate::registry::RegistryDatabase; +use crate::rpc_continuations::RpcContinuations; + +#[derive(Debug, Clone, Default, Deserialize, Serialize, Parser)] +#[serde(rename_all = "kebab-case")] +#[command(rename_all = "kebab-case")] +pub struct RegistryConfig { + #[arg(short = 'c', long = "config")] + pub config: Option, + #[arg(short = 'l', long = "listen")] + pub listen: Option, + #[arg(short = 'h', long = "hostname")] + pub hostname: Option, + #[arg(short = 'p', long = "tor-proxy")] + pub tor_proxy: Option, + #[arg(short = 'd', long = "datadir")] + pub datadir: Option, + #[arg(short = 'u', long = "pg-connection-url")] + pub pg_connection_url: Option, +} +impl ContextConfig for RegistryConfig { + fn next(&mut self) -> Option { + self.config.take() + } + fn merge_with(&mut self, other: Self) { + self.listen = self.listen.take().or(other.listen); + self.hostname = self.hostname.take().or(other.hostname); + self.tor_proxy = self.tor_proxy.take().or(other.tor_proxy); + self.datadir = self.datadir.take().or(other.datadir); + } +} + +impl RegistryConfig { + pub fn load(mut self) -> Result { + let path = self.next(); + self.load_path_rec(path)?; + self.load_path_rec(Some(CONFIG_PATH))?; + Ok(self) + } +} + +pub struct RegistryContextSeed { + pub hostname: InternedString, + pub listen: SocketAddr, + pub db: TypedPatchDb, + pub datadir: PathBuf, + pub rpc_continuations: RpcContinuations, + pub client: Client, + pub shutdown: Sender<()>, + pub pool: Option, +} + +#[derive(Clone)] +pub struct RegistryContext(Arc); +impl RegistryContext { + #[instrument(skip_all)] + pub async fn init(config: &RegistryConfig) -> Result { + let (shutdown, _) = tokio::sync::broadcast::channel(1); + let datadir = config + .datadir + .as_deref() + .unwrap_or_else(|| Path::new("/var/lib/startos")) + .to_owned(); + if tokio::fs::metadata(&datadir).await.is_err() { + tokio::fs::create_dir_all(&datadir).await?; + } + let db_path = datadir.join("registry.db"); + let db = TypedPatchDb::::load_or_init( + PatchDb::open(&db_path).await?, + || async { Ok(Default::default()) }, + ) + .await?; + let tor_proxy_url = config + .tor_proxy + .clone() + .map(Ok) + .unwrap_or_else(|| "socks5h://localhost:9050".parse())?; + let pool: Option = match &config.pg_connection_url { + Some(url) => match PgPool::connect(url.as_str()).await { + Ok(pool) => Some(pool), + Err(_) => None, + }, + None => None, + }; + Ok(Self(Arc::new(RegistryContextSeed { + hostname: config + .hostname + .as_ref() + .ok_or_else(|| { + Error::new( + eyre!("missing required configuration: hostname"), + ErrorKind::NotFound, + ) + })? + .clone(), + listen: config + .listen + .unwrap_or(SocketAddr::new(Ipv4Addr::LOCALHOST.into(), 5959)), + db, + datadir, + rpc_continuations: RpcContinuations::new(), + client: Client::builder() + .proxy(Proxy::custom(move |url| { + if url.host_str().map_or(false, |h| h.ends_with(".onion")) { + Some(tor_proxy_url.clone()) + } else { + None + } + })) + .build() + .with_kind(crate::ErrorKind::ParseUrl)?, + shutdown, + pool, + }))) + } +} +impl AsRef for RegistryContext { + fn as_ref(&self) -> &RpcContinuations { + &self.rpc_continuations + } +} + +impl Context for RegistryContext {} +impl Deref for RegistryContext { + type Target = RegistryContextSeed; + fn deref(&self) -> &Self::Target { + &*self.0 + } +} + +#[derive(Debug, Deserialize, Serialize, Parser)] +pub struct RegistryUrlParams { + pub registry: Url, +} + +impl CallRemote for CliContext { + async fn call_remote( + &self, + mut method: &str, + params: Value, + _: Empty, + ) -> Result { + use reqwest::header::{ACCEPT, CONTENT_LENGTH, CONTENT_TYPE}; + use reqwest::Method; + use rpc_toolkit::yajrc::{GenericRpcMethod, Id, RpcRequest}; + use rpc_toolkit::RpcResponse; + + let url = self + .registry_url + .clone() + .ok_or_else(|| Error::new(eyre!("`--registry` required"), ErrorKind::InvalidRequest))?; + method = method.strip_prefix("registry.").unwrap_or(method); + + let rpc_req = RpcRequest { + id: Some(Id::Number(0.into())), + method: GenericRpcMethod::<_, _, Value>::new(method), + params, + }; + let body = serde_json::to_vec(&rpc_req)?; + let host = url.host().or_not_found("registry hostname")?.to_string(); + let res = self + .client + .request(Method::POST, url) + .header(CONTENT_TYPE, "application/json") + .header(ACCEPT, "application/json") + .header(CONTENT_LENGTH, body.len()) + .header( + AUTH_SIG_HEADER, + SignatureHeader::sign( + &AnySigningKey::Ed25519(self.developer_key()?.clone()), + &body, + &host, + )? + .to_header(), + ) + .body(body) + .send() + .await?; + + if !res.status().is_success() { + let status = res.status(); + let txt = res.text().await?; + let mut res = Err(Error::new( + eyre!("{}", status.canonical_reason().unwrap_or(status.as_str())), + ErrorKind::Network, + )); + if !txt.is_empty() { + res = res.with_ctx(|_| (ErrorKind::Network, txt)); + } + return res.map_err(From::from); + } + + match res + .headers() + .get(CONTENT_TYPE) + .and_then(|v| v.to_str().ok()) + { + Some("application/json") => { + serde_json::from_slice::(&*res.bytes().await?) + .with_kind(ErrorKind::Deserialization)? + .result + } + _ => Err(Error::new(eyre!("unknown content type"), ErrorKind::Network).into()), + } + } +} + +impl CallRemote for RpcContext { + async fn call_remote( + &self, + mut method: &str, + params: Value, + RegistryUrlParams { registry }: RegistryUrlParams, + ) -> Result { + use reqwest::header::{ACCEPT, CONTENT_LENGTH, CONTENT_TYPE}; + use reqwest::Method; + use rpc_toolkit::yajrc::{GenericRpcMethod, Id, RpcRequest}; + use rpc_toolkit::RpcResponse; + + let url = registry.join("rpc/v0")?; + method = method.strip_prefix("registry.").unwrap_or(method); + + let rpc_req = RpcRequest { + id: Some(Id::Number(0.into())), + method: GenericRpcMethod::<_, _, Value>::new(method), + params, + }; + let body = serde_json::to_vec(&rpc_req)?; + let res = self + .client + .request(Method::POST, url) + .header(CONTENT_TYPE, "application/json") + .header(ACCEPT, "application/json") + .header(CONTENT_LENGTH, body.len()) + .header(DEVICE_INFO_HEADER, DeviceInfo::from(self).to_header_value()) + .body(body) + .send() + .await?; + + if !res.status().is_success() { + let status = res.status(); + let txt = res.text().await?; + let mut res = Err(Error::new( + eyre!("{}", status.canonical_reason().unwrap_or(status.as_str())), + ErrorKind::Network, + )); + if !txt.is_empty() { + res = res.with_ctx(|_| (ErrorKind::Network, txt)); + } + return res.map_err(From::from); + } + + match res + .headers() + .get(CONTENT_TYPE) + .and_then(|v| v.to_str().ok()) + { + Some("application/json") => { + serde_json::from_slice::(&*res.bytes().await?) + .with_kind(ErrorKind::Deserialization)? + .result + } + _ => Err(Error::new(eyre!("unknown content type"), ErrorKind::Network).into()), + } + } +} diff --git a/core/startos/src/registry/db.rs b/core/startos/src/registry/db.rs new file mode 100644 index 000000000..df39604f1 --- /dev/null +++ b/core/startos/src/registry/db.rs @@ -0,0 +1,171 @@ +use std::path::PathBuf; + +use clap::Parser; +use itertools::Itertools; +use patch_db::json_ptr::{JsonPointer, ROOT}; +use patch_db::Dump; +use rpc_toolkit::yajrc::RpcError; +use rpc_toolkit::{from_fn_async, Context, HandlerArgs, HandlerExt, ParentHandler}; +use serde::{Deserialize, Serialize}; +use tracing::instrument; +use ts_rs::TS; + +use crate::context::CliContext; +use crate::prelude::*; +use crate::registry::context::RegistryContext; +use crate::registry::RegistryDatabase; +use crate::util::serde::{apply_expr, HandlerExtSerde}; + +pub fn db_api() -> ParentHandler { + ParentHandler::new() + .subcommand("dump", from_fn_async(cli_dump).with_display_serializable()) + .subcommand( + "dump", + from_fn_async(dump) + .with_metadata("admin", Value::Bool(true)) + .no_cli(), + ) + .subcommand("apply", from_fn_async(cli_apply).no_display()) + .subcommand( + "apply", + from_fn_async(apply) + .with_metadata("admin", Value::Bool(true)) + .no_cli(), + ) +} + +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct CliDumpParams { + #[arg(long = "pointer", short = 'p')] + pointer: Option, + path: Option, +} + +#[instrument(skip_all)] +async fn cli_dump( + HandlerArgs { + context, + parent_method, + method, + params: CliDumpParams { pointer, path }, + .. + }: HandlerArgs, +) -> Result { + let dump = if let Some(path) = path { + PatchDb::open(path).await?.dump(&ROOT).await + } else { + let method = parent_method.into_iter().chain(method).join("."); + from_value::( + context + .call_remote::(&method, imbl_value::json!({ "pointer": pointer })) + .await?, + )? + }; + + Ok(dump) +} + +#[derive(Deserialize, Serialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct DumpParams { + #[arg(long = "pointer", short = 'p')] + #[ts(type = "string | null")] + pointer: Option, +} + +pub async fn dump(ctx: RegistryContext, DumpParams { pointer }: DumpParams) -> Result { + Ok(ctx + .db + .dump(&pointer.as_ref().map_or(ROOT, |p| p.borrowed())) + .await) +} + +#[derive(Deserialize, Serialize, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct CliApplyParams { + expr: String, + path: Option, +} + +#[instrument(skip_all)] +async fn cli_apply( + HandlerArgs { + context, + parent_method, + method, + params: CliApplyParams { expr, path }, + .. + }: HandlerArgs, +) -> Result<(), RpcError> { + if let Some(path) = path { + PatchDb::open(path) + .await? + .apply_function(|db| { + let res = apply_expr( + serde_json::to_value(patch_db::Value::from(db)) + .with_kind(ErrorKind::Deserialization)? + .into(), + &expr, + )?; + + Ok::<_, Error>(( + to_value( + &serde_json::from_value::(res.clone().into()).with_ctx( + |_| { + ( + crate::ErrorKind::Deserialization, + "result does not match database model", + ) + }, + )?, + )?, + (), + )) + }) + .await?; + } else { + let method = parent_method.into_iter().chain(method).join("."); + context + .call_remote::(&method, imbl_value::json!({ "expr": expr })) + .await?; + } + + Ok(()) +} + +#[derive(Deserialize, Serialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +pub struct ApplyParams { + expr: String, + path: Option, +} + +pub async fn apply( + ctx: RegistryContext, + ApplyParams { expr, .. }: ApplyParams, +) -> Result<(), Error> { + ctx.db + .mutate(|db| { + let res = apply_expr( + serde_json::to_value(patch_db::Value::from(db.clone())) + .with_kind(ErrorKind::Deserialization)? + .into(), + &expr, + )?; + + db.ser( + &serde_json::from_value::(res.clone().into()).with_ctx(|_| { + ( + crate::ErrorKind::Deserialization, + "result does not match database model", + ) + })?, + ) + }) + .await +} diff --git a/core/startos/src/registry/device_info.rs b/core/startos/src/registry/device_info.rs new file mode 100644 index 000000000..172348a10 --- /dev/null +++ b/core/startos/src/registry/device_info.rs @@ -0,0 +1,193 @@ +use std::collections::BTreeMap; +use std::convert::identity; +use std::ops::Deref; + +use axum::extract::Request; +use axum::response::Response; +use exver::{Version, VersionRange}; +use http::HeaderValue; +use imbl_value::InternedString; +use rpc_toolkit::{Middleware, RpcRequest, RpcResponse}; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; +use url::Url; + +use crate::context::RpcContext; +use crate::prelude::*; +use crate::registry::context::RegistryContext; +use crate::util::VersionString; +use crate::version::VersionT; + +pub const DEVICE_INFO_HEADER: &str = "X-StartOS-Device-Info"; + +#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +pub struct DeviceInfo { + pub os: OsInfo, + pub hardware: HardwareInfo, +} +impl From<&RpcContext> for DeviceInfo { + fn from(value: &RpcContext) -> Self { + Self { + os: OsInfo::from(value), + hardware: HardwareInfo::from(value), + } + } +} +impl DeviceInfo { + pub fn to_header_value(&self) -> HeaderValue { + let mut url: Url = "http://localhost".parse().unwrap(); + url.query_pairs_mut() + .append_pair("os.version", &self.os.version.to_string()) + .append_pair("os.compat", &self.os.compat.to_string()) + .append_pair("os.platform", &*self.os.platform) + .append_pair("hardware.arch", &*self.hardware.arch) + .append_pair("hardware.ram", &self.hardware.ram.to_string()); + + for (class, products) in &self.hardware.devices { + for product in products { + url.query_pairs_mut() + .append_pair(&format!("hardware.device.{}", class), product); + } + } + + HeaderValue::from_str(url.query().unwrap_or_default()).unwrap() + } + pub fn from_header_value(header: &HeaderValue) -> Result { + let query: BTreeMap<_, _> = form_urlencoded::parse(header.as_bytes()).collect(); + Ok(Self { + os: OsInfo { + version: query + .get("os.version") + .or_not_found("os.version")? + .parse()?, + compat: query.get("os.compat").or_not_found("os.compat")?.parse()?, + platform: query + .get("os.platform") + .or_not_found("os.platform")? + .deref() + .into(), + }, + hardware: HardwareInfo { + arch: query + .get("hardware.arch") + .or_not_found("hardware.arch")? + .parse()?, + ram: query + .get("hardware.ram") + .or_not_found("hardware.ram")? + .parse()?, + devices: identity(query) + .split_off("hardware.device.") + .into_iter() + .filter_map(|(k, v)| { + k.strip_prefix("hardware.device.") + .map(|k| (k.into(), v.into_owned())) + }) + .fold(BTreeMap::new(), |mut acc, (k, v)| { + let mut devs = acc.remove(&k).unwrap_or_default(); + devs.push(v); + acc.insert(k, devs); + acc + }), + }, + }) + } +} + +#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +pub struct OsInfo { + #[ts(as = "VersionString")] + pub version: Version, + #[ts(type = "string")] + pub compat: VersionRange, + #[ts(type = "string")] + pub platform: InternedString, +} +impl From<&RpcContext> for OsInfo { + fn from(_: &RpcContext) -> Self { + Self { + version: crate::version::Current::new().semver(), + compat: crate::version::Current::new().compat().clone(), + platform: InternedString::intern(&*crate::PLATFORM), + } + } +} + +#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +pub struct HardwareInfo { + #[ts(type = "string")] + pub arch: InternedString, + #[ts(type = "number")] + pub ram: u64, + #[ts(as = "BTreeMap::>")] + pub devices: BTreeMap>, +} + +impl From<&RpcContext> for HardwareInfo { + fn from(value: &RpcContext) -> Self { + Self { + arch: InternedString::intern(crate::ARCH), + ram: value.hardware.ram, + devices: value + .hardware + .devices + .iter() + .fold(BTreeMap::new(), |mut acc, dev| { + let mut devs = acc.remove(dev.class()).unwrap_or_default(); + devs.push(dev.product().to_owned()); + acc.insert(dev.class().into(), devs); + acc + }), + } + } +} + +#[derive(Deserialize)] +pub struct Metadata { + #[serde(default)] + get_device_info: bool, +} + +#[derive(Clone)] +pub struct DeviceInfoMiddleware { + device_info: Option, +} +impl DeviceInfoMiddleware { + pub fn new() -> Self { + Self { device_info: None } + } +} + +impl Middleware for DeviceInfoMiddleware { + type Metadata = Metadata; + async fn process_http_request( + &mut self, + _: &RegistryContext, + request: &mut Request, + ) -> Result<(), Response> { + self.device_info = request.headers_mut().remove(DEVICE_INFO_HEADER); + Ok(()) + } + async fn process_rpc_request( + &mut self, + _: &RegistryContext, + metadata: Self::Metadata, + request: &mut RpcRequest, + ) -> Result<(), RpcResponse> { + async move { + if metadata.get_device_info { + if let Some(device_info) = &self.device_info { + request.params["__device_info"] = + to_value(&DeviceInfo::from_header_value(device_info)?)?; + } + } + + Ok::<_, Error>(()) + } + .await + .map_err(|e| RpcResponse::from_result(Err(e))) + } +} diff --git a/core/startos/src/registry/marketplace.rs b/core/startos/src/registry/marketplace.rs deleted file mode 100644 index 82ead81cb..000000000 --- a/core/startos/src/registry/marketplace.rs +++ /dev/null @@ -1,101 +0,0 @@ -use base64::Engine; -use clap::Parser; -use color_eyre::eyre::eyre; -use reqwest::{StatusCode, Url}; -use rpc_toolkit::{command, from_fn_async, HandlerExt, ParentHandler}; -use serde::{Deserialize, Serialize}; -use serde_json::Value; -use ts_rs::TS; - -use crate::context::{CliContext, RpcContext}; -use crate::version::VersionT; -use crate::{Error, ResultExt}; - -pub fn marketplace() -> ParentHandler { - ParentHandler::new().subcommand("get", from_fn_async(get).with_remote_cli::()) -} - -pub fn with_query_params(ctx: RpcContext, mut url: Url) -> Url { - url.query_pairs_mut() - .append_pair( - "os.version", - &crate::version::Current::new().semver().to_string(), - ) - .append_pair( - "os.compat", - &crate::version::Current::new().compat().to_string(), - ) - .append_pair("os.arch", &*crate::PLATFORM) - .append_pair("hardware.arch", &*crate::ARCH) - .append_pair("hardware.ram", &ctx.hardware.ram.to_string()); - - for hw in &ctx.hardware.devices { - url.query_pairs_mut() - .append_pair(&format!("hardware.device.{}", hw.class()), hw.product()); - } - - url -} - -#[derive(Deserialize, Serialize, Parser, TS)] -#[serde(rename_all = "camelCase")] -#[command(rename_all = "kebab-case")] -pub struct GetParams { - #[ts(type = "string")] - url: Url, -} - -pub async fn get(ctx: RpcContext, GetParams { url }: GetParams) -> Result { - let mut response = ctx - .client - .get(with_query_params(ctx.clone(), url)) - .send() - .await - .with_kind(crate::ErrorKind::Network)?; - let status = response.status(); - if status.is_success() { - match response - .headers_mut() - .remove("Content-Type") - .as_ref() - .and_then(|h| h.to_str().ok()) - .and_then(|h| h.split(";").next()) - .map(|h| h.trim()) - { - Some("application/json") => response - .json() - .await - .with_kind(crate::ErrorKind::Deserialization), - Some("text/plain") => Ok(Value::String( - response - .text() - .await - .with_kind(crate::ErrorKind::Registry)?, - )), - Some(ctype) => Ok(Value::String(format!( - "data:{};base64,{}", - ctype, - base64::engine::general_purpose::URL_SAFE.encode( - &response - .bytes() - .await - .with_kind(crate::ErrorKind::Registry)? - ) - ))), - _ => Err(Error::new( - eyre!("missing Content-Type"), - crate::ErrorKind::Registry, - )), - } - } else { - let message = response.text().await.with_kind(crate::ErrorKind::Network)?; - Err(Error::new( - eyre!("{}", message), - match status { - StatusCode::BAD_REQUEST => crate::ErrorKind::InvalidRequest, - StatusCode::NOT_FOUND => crate::ErrorKind::NotFound, - _ => crate::ErrorKind::Registry, - }, - )) - } -} diff --git a/core/startos/src/registry/metrics-db/registry-sqlx-data.sh b/core/startos/src/registry/metrics-db/registry-sqlx-data.sh new file mode 100755 index 000000000..2b24873a4 --- /dev/null +++ b/core/startos/src/registry/metrics-db/registry-sqlx-data.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +cd "$(dirname "${BASH_SOURCE[0]}")" +TMP_DIR=$(mktemp -d) +mkdir $TMP_DIR/pgdata +docker run -d --rm --name=tmp_postgres -e POSTGRES_PASSWORD=password -v $TMP_DIR/pgdata:/var/lib/postgresql/data postgres + +( + set -e + ctr=0 + until docker exec tmp_postgres psql -U postgres 2> /dev/null || [ $ctr -ge 5 ]; do + ctr=$[ctr + 1] + sleep 5; + done + + PG_IP=$(docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' tmp_postgres) + + cat "./registry_schema.sql" | docker exec -i tmp_postgres psql -U postgres -d postgres -f- + cd ../../.. + DATABASE_URL=postgres://postgres:password@$PG_IP/postgres PLATFORM=$(uname -m) cargo sqlx prepare -- --lib --profile=test --workspace + echo "Subscript Complete" +) + +docker stop tmp_postgres +sudo rm -rf $TMP_DIR diff --git a/core/startos/src/registry/metrics-db/registry_schema.sql b/core/startos/src/registry/metrics-db/registry_schema.sql new file mode 100644 index 000000000..abd9f4ea6 --- /dev/null +++ b/core/startos/src/registry/metrics-db/registry_schema.sql @@ -0,0 +1,828 @@ +-- +-- PostgreSQL database dump +-- + +-- Dumped from database version 14.12 (Ubuntu 14.12-0ubuntu0.22.04.1) +-- Dumped by pg_dump version 14.12 (Ubuntu 14.12-0ubuntu0.22.04.1) + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET xmloption = content; +SET client_min_messages = warning; +SET row_security = off; + +SET default_tablespace = ''; + +SET default_table_access_method = heap; + +-- +-- Name: admin; Type: TABLE; Schema: public; Owner: alpha_admin +-- + +CREATE TABLE public.admin ( + id character varying NOT NULL, + created_at timestamp with time zone NOT NULL, + pass_hash character varying NOT NULL, + deleted_at timestamp with time zone +); + + +ALTER TABLE public.admin OWNER TO alpha_admin; + +-- +-- Name: admin_pkgs; Type: TABLE; Schema: public; Owner: alpha_admin +-- + +CREATE TABLE public.admin_pkgs ( + id bigint NOT NULL, + admin character varying NOT NULL, + pkg_id character varying NOT NULL +); + + +ALTER TABLE public.admin_pkgs OWNER TO alpha_admin; + +-- +-- Name: admin_pkgs_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin +-- + +CREATE SEQUENCE public.admin_pkgs_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.admin_pkgs_id_seq OWNER TO alpha_admin; + +-- +-- Name: admin_pkgs_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin +-- + +ALTER SEQUENCE public.admin_pkgs_id_seq OWNED BY public.admin_pkgs.id; + + +-- +-- Name: category; Type: TABLE; Schema: public; Owner: alpha_admin +-- + +CREATE TABLE public.category ( + id bigint NOT NULL, + created_at timestamp with time zone NOT NULL, + name character varying NOT NULL, + description character varying NOT NULL, + priority bigint DEFAULT 0 NOT NULL +); + + +ALTER TABLE public.category OWNER TO alpha_admin; + +-- +-- Name: category_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin +-- + +CREATE SEQUENCE public.category_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.category_id_seq OWNER TO alpha_admin; + +-- +-- Name: category_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin +-- + +ALTER SEQUENCE public.category_id_seq OWNED BY public.category.id; + + +-- +-- Name: eos_hash; Type: TABLE; Schema: public; Owner: alpha_admin +-- + +CREATE TABLE public.eos_hash ( + id bigint NOT NULL, + version character varying NOT NULL, + hash character varying NOT NULL +); + + +ALTER TABLE public.eos_hash OWNER TO alpha_admin; + +-- +-- Name: eos_hash_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin +-- + +CREATE SEQUENCE public.eos_hash_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.eos_hash_id_seq OWNER TO alpha_admin; + +-- +-- Name: eos_hash_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin +-- + +ALTER SEQUENCE public.eos_hash_id_seq OWNED BY public.eos_hash.id; + + +-- +-- Name: error_log_record; Type: TABLE; Schema: public; Owner: alpha_admin +-- + +CREATE TABLE public.error_log_record ( + id bigint NOT NULL, + created_at timestamp with time zone NOT NULL, + epoch character varying NOT NULL, + commit_hash character varying NOT NULL, + source_file character varying NOT NULL, + line bigint NOT NULL, + target character varying NOT NULL, + level character varying NOT NULL, + message character varying NOT NULL, + incidents bigint NOT NULL +); + + +ALTER TABLE public.error_log_record OWNER TO alpha_admin; + +-- +-- Name: error_log_record_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin +-- + +CREATE SEQUENCE public.error_log_record_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.error_log_record_id_seq OWNER TO alpha_admin; + +-- +-- Name: error_log_record_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin +-- + +ALTER SEQUENCE public.error_log_record_id_seq OWNED BY public.error_log_record.id; + + +-- +-- Name: metric; Type: TABLE; Schema: public; Owner: alpha_admin +-- + +CREATE TABLE public.metric ( + id bigint NOT NULL, + created_at timestamp with time zone NOT NULL, + version character varying NOT NULL, + pkg_id character varying NOT NULL +); + + +ALTER TABLE public.metric OWNER TO alpha_admin; + +-- +-- Name: metric_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin +-- + +CREATE SEQUENCE public.metric_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.metric_id_seq OWNER TO alpha_admin; + +-- +-- Name: metric_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin +-- + +ALTER SEQUENCE public.metric_id_seq OWNED BY public.metric.id; + + +-- +-- Name: os_version; Type: TABLE; Schema: public; Owner: alpha_admin +-- + +CREATE TABLE public.os_version ( + id bigint NOT NULL, + created_at timestamp with time zone NOT NULL, + updated_at timestamp with time zone NOT NULL, + number character varying NOT NULL, + headline character varying NOT NULL, + release_notes character varying NOT NULL, + arch character varying +); + + +ALTER TABLE public.os_version OWNER TO alpha_admin; + +-- +-- Name: os_version_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin +-- + +CREATE SEQUENCE public.os_version_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.os_version_id_seq OWNER TO alpha_admin; + +-- +-- Name: os_version_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin +-- + +ALTER SEQUENCE public.os_version_id_seq OWNED BY public.os_version.id; + + +-- +-- Name: persistent_migration; Type: TABLE; Schema: public; Owner: alpha_admin +-- + +CREATE TABLE public.persistent_migration ( + id integer NOT NULL, + version integer NOT NULL, + label character varying, + "timestamp" timestamp with time zone NOT NULL +); + + +ALTER TABLE public.persistent_migration OWNER TO alpha_admin; + +-- +-- Name: persistent_migration_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin +-- + +CREATE SEQUENCE public.persistent_migration_id_seq + AS integer + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.persistent_migration_id_seq OWNER TO alpha_admin; + +-- +-- Name: persistent_migration_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin +-- + +ALTER SEQUENCE public.persistent_migration_id_seq OWNED BY public.persistent_migration.id; + + +-- +-- Name: pkg_category; Type: TABLE; Schema: public; Owner: alpha_admin +-- + +CREATE TABLE public.pkg_category ( + id bigint NOT NULL, + created_at timestamp with time zone NOT NULL, + category_id bigint NOT NULL, + pkg_id character varying NOT NULL +); + + +ALTER TABLE public.pkg_category OWNER TO alpha_admin; + +-- +-- Name: pkg_dependency; Type: TABLE; Schema: public; Owner: alpha_admin +-- + +CREATE TABLE public.pkg_dependency ( + id bigint NOT NULL, + created_at timestamp with time zone NOT NULL, + pkg_id character varying NOT NULL, + pkg_version character varying NOT NULL, + dep_id character varying NOT NULL, + dep_version_range character varying NOT NULL +); + + +ALTER TABLE public.pkg_dependency OWNER TO alpha_admin; + +-- +-- Name: pkg_dependency_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin +-- + +CREATE SEQUENCE public.pkg_dependency_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.pkg_dependency_id_seq OWNER TO alpha_admin; + +-- +-- Name: pkg_dependency_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin +-- + +ALTER SEQUENCE public.pkg_dependency_id_seq OWNED BY public.pkg_dependency.id; + + +-- +-- Name: pkg_record; Type: TABLE; Schema: public; Owner: alpha_admin +-- + +CREATE TABLE public.pkg_record ( + created_at timestamp with time zone NOT NULL, + updated_at timestamp with time zone, + pkg_id character varying NOT NULL, + hidden boolean DEFAULT false NOT NULL +); + + +ALTER TABLE public.pkg_record OWNER TO alpha_admin; + +-- +-- Name: service_category_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin +-- + +CREATE SEQUENCE public.service_category_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.service_category_id_seq OWNER TO alpha_admin; + +-- +-- Name: service_category_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin +-- + +ALTER SEQUENCE public.service_category_id_seq OWNED BY public.pkg_category.id; + + +-- +-- Name: upload; Type: TABLE; Schema: public; Owner: alpha_admin +-- + +CREATE TABLE public.upload ( + id bigint NOT NULL, + uploader character varying NOT NULL, + pkg_id character varying NOT NULL, + pkg_version character varying NOT NULL, + created_at timestamp with time zone NOT NULL +); + + +ALTER TABLE public.upload OWNER TO alpha_admin; + +-- +-- Name: upload_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin +-- + +CREATE SEQUENCE public.upload_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.upload_id_seq OWNER TO alpha_admin; + +-- +-- Name: upload_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin +-- + +ALTER SEQUENCE public.upload_id_seq OWNED BY public.upload.id; + + +-- +-- Name: user_activity; Type: TABLE; Schema: public; Owner: alpha_admin +-- + +CREATE TABLE public.user_activity ( + id bigint NOT NULL, + created_at timestamp with time zone NOT NULL, + server_id character varying NOT NULL, + os_version character varying, + arch character varying +); + + +ALTER TABLE public.user_activity OWNER TO alpha_admin; + +-- +-- Name: user_activity_id_seq; Type: SEQUENCE; Schema: public; Owner: alpha_admin +-- + +CREATE SEQUENCE public.user_activity_id_seq + START WITH 1 + INCREMENT BY 1 + NO MINVALUE + NO MAXVALUE + CACHE 1; + + +ALTER TABLE public.user_activity_id_seq OWNER TO alpha_admin; + +-- +-- Name: user_activity_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: alpha_admin +-- + +ALTER SEQUENCE public.user_activity_id_seq OWNED BY public.user_activity.id; + + +-- +-- Name: version; Type: TABLE; Schema: public; Owner: alpha_admin +-- + +CREATE TABLE public.version ( + created_at timestamp with time zone NOT NULL, + updated_at timestamp with time zone, + number character varying NOT NULL, + release_notes character varying NOT NULL, + os_version character varying NOT NULL, + pkg_id character varying NOT NULL, + title character varying NOT NULL, + desc_short character varying NOT NULL, + desc_long character varying NOT NULL, + icon_type character varying NOT NULL, + deprecated_at timestamp with time zone +); + + +ALTER TABLE public.version OWNER TO alpha_admin; + +-- +-- Name: version_platform; Type: TABLE; Schema: public; Owner: alpha_admin +-- + +CREATE TABLE public.version_platform ( + created_at timestamp with time zone NOT NULL, + updated_at timestamp with time zone, + pkg_id character varying NOT NULL, + version_number character varying NOT NULL, + arch character varying NOT NULL, + ram bigint, + device jsonb +); + + +ALTER TABLE public.version_platform OWNER TO alpha_admin; + +-- +-- Name: admin_pkgs id; Type: DEFAULT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.admin_pkgs ALTER COLUMN id SET DEFAULT nextval('public.admin_pkgs_id_seq'::regclass); + + +-- +-- Name: category id; Type: DEFAULT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.category ALTER COLUMN id SET DEFAULT nextval('public.category_id_seq'::regclass); + + +-- +-- Name: eos_hash id; Type: DEFAULT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.eos_hash ALTER COLUMN id SET DEFAULT nextval('public.eos_hash_id_seq'::regclass); + + +-- +-- Name: error_log_record id; Type: DEFAULT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.error_log_record ALTER COLUMN id SET DEFAULT nextval('public.error_log_record_id_seq'::regclass); + + +-- +-- Name: metric id; Type: DEFAULT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.metric ALTER COLUMN id SET DEFAULT nextval('public.metric_id_seq'::regclass); + + +-- +-- Name: os_version id; Type: DEFAULT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.os_version ALTER COLUMN id SET DEFAULT nextval('public.os_version_id_seq'::regclass); + + +-- +-- Name: persistent_migration id; Type: DEFAULT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.persistent_migration ALTER COLUMN id SET DEFAULT nextval('public.persistent_migration_id_seq'::regclass); + + +-- +-- Name: pkg_category id; Type: DEFAULT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.pkg_category ALTER COLUMN id SET DEFAULT nextval('public.service_category_id_seq'::regclass); + + +-- +-- Name: pkg_dependency id; Type: DEFAULT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.pkg_dependency ALTER COLUMN id SET DEFAULT nextval('public.pkg_dependency_id_seq'::regclass); + + +-- +-- Name: upload id; Type: DEFAULT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.upload ALTER COLUMN id SET DEFAULT nextval('public.upload_id_seq'::regclass); + + +-- +-- Name: user_activity id; Type: DEFAULT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.user_activity ALTER COLUMN id SET DEFAULT nextval('public.user_activity_id_seq'::regclass); + + +-- +-- Name: admin admin_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.admin + ADD CONSTRAINT admin_pkey PRIMARY KEY (id); + + +-- +-- Name: admin_pkgs admin_pkgs_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.admin_pkgs + ADD CONSTRAINT admin_pkgs_pkey PRIMARY KEY (id); + + +-- +-- Name: category category_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.category + ADD CONSTRAINT category_pkey PRIMARY KEY (id); + + +-- +-- Name: eos_hash eos_hash_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.eos_hash + ADD CONSTRAINT eos_hash_pkey PRIMARY KEY (id); + + +-- +-- Name: error_log_record error_log_record_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.error_log_record + ADD CONSTRAINT error_log_record_pkey PRIMARY KEY (id); + + +-- +-- Name: metric metric_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.metric + ADD CONSTRAINT metric_pkey PRIMARY KEY (id); + + +-- +-- Name: os_version os_version_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.os_version + ADD CONSTRAINT os_version_pkey PRIMARY KEY (id); + + +-- +-- Name: persistent_migration persistent_migration_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.persistent_migration + ADD CONSTRAINT persistent_migration_pkey PRIMARY KEY (id); + + +-- +-- Name: pkg_category pkg_category_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.pkg_category + ADD CONSTRAINT pkg_category_pkey PRIMARY KEY (id); + + +-- +-- Name: pkg_dependency pkg_dependency_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.pkg_dependency + ADD CONSTRAINT pkg_dependency_pkey PRIMARY KEY (id); + + +-- +-- Name: admin_pkgs unique_admin_pkg; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.admin_pkgs + ADD CONSTRAINT unique_admin_pkg UNIQUE (pkg_id, admin); + + +-- +-- Name: error_log_record unique_log_record; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.error_log_record + ADD CONSTRAINT unique_log_record UNIQUE (epoch, commit_hash, source_file, line, target, level, message); + + +-- +-- Name: category unique_name; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.category + ADD CONSTRAINT unique_name UNIQUE (name); + + +-- +-- Name: pkg_category unique_pkg_category; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.pkg_category + ADD CONSTRAINT unique_pkg_category UNIQUE (pkg_id, category_id); + + +-- +-- Name: pkg_dependency unique_pkg_dep_version; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.pkg_dependency + ADD CONSTRAINT unique_pkg_dep_version UNIQUE (pkg_id, pkg_version, dep_id); + + +-- +-- Name: eos_hash unique_version; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.eos_hash + ADD CONSTRAINT unique_version UNIQUE (version); + + +-- +-- Name: upload upload_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.upload + ADD CONSTRAINT upload_pkey PRIMARY KEY (id); + + +-- +-- Name: user_activity user_activity_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.user_activity + ADD CONSTRAINT user_activity_pkey PRIMARY KEY (id); + + +-- +-- Name: version version_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.version + ADD CONSTRAINT version_pkey PRIMARY KEY (pkg_id, number); + + +-- +-- Name: version_platform version_platform_pkey; Type: CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.version_platform + ADD CONSTRAINT version_platform_pkey PRIMARY KEY (pkg_id, version_number, arch); + + +-- +-- Name: category_name_idx; Type: INDEX; Schema: public; Owner: alpha_admin +-- + +CREATE UNIQUE INDEX category_name_idx ON public.category USING btree (name); + + +-- +-- Name: pkg_record_pkg_id_idx; Type: INDEX; Schema: public; Owner: alpha_admin +-- + +CREATE UNIQUE INDEX pkg_record_pkg_id_idx ON public.pkg_record USING btree (pkg_id); + + +-- +-- Name: version_number_idx; Type: INDEX; Schema: public; Owner: alpha_admin +-- + +CREATE INDEX version_number_idx ON public.version USING btree (number); + + +-- +-- Name: admin_pkgs admin_pkgs_admin_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.admin_pkgs + ADD CONSTRAINT admin_pkgs_admin_fkey FOREIGN KEY (admin) REFERENCES public.admin(id) ON UPDATE RESTRICT ON DELETE RESTRICT; + + +-- +-- Name: metric metric_pkg_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.metric + ADD CONSTRAINT metric_pkg_id_fkey FOREIGN KEY (pkg_id) REFERENCES public.pkg_record(pkg_id) ON UPDATE RESTRICT ON DELETE RESTRICT; + + +-- +-- Name: pkg_category pkg_category_category_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.pkg_category + ADD CONSTRAINT pkg_category_category_id_fkey FOREIGN KEY (category_id) REFERENCES public.category(id) ON UPDATE RESTRICT ON DELETE RESTRICT; + + +-- +-- Name: pkg_category pkg_category_pkg_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.pkg_category + ADD CONSTRAINT pkg_category_pkg_id_fkey FOREIGN KEY (pkg_id) REFERENCES public.pkg_record(pkg_id) ON UPDATE RESTRICT ON DELETE RESTRICT; + + +-- +-- Name: pkg_dependency pkg_dependency_dep_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.pkg_dependency + ADD CONSTRAINT pkg_dependency_dep_id_fkey FOREIGN KEY (dep_id) REFERENCES public.pkg_record(pkg_id) ON UPDATE RESTRICT ON DELETE RESTRICT; + + +-- +-- Name: pkg_dependency pkg_dependency_pkg_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.pkg_dependency + ADD CONSTRAINT pkg_dependency_pkg_id_fkey FOREIGN KEY (pkg_id) REFERENCES public.pkg_record(pkg_id) ON UPDATE RESTRICT ON DELETE RESTRICT; + + +-- +-- Name: upload upload_pkg_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.upload + ADD CONSTRAINT upload_pkg_id_fkey FOREIGN KEY (pkg_id) REFERENCES public.pkg_record(pkg_id) ON UPDATE RESTRICT ON DELETE RESTRICT; + + +-- +-- Name: upload upload_uploader_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.upload + ADD CONSTRAINT upload_uploader_fkey FOREIGN KEY (uploader) REFERENCES public.admin(id) ON UPDATE RESTRICT ON DELETE RESTRICT; + + +-- +-- Name: version version_pkg_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.version + ADD CONSTRAINT version_pkg_id_fkey FOREIGN KEY (pkg_id) REFERENCES public.pkg_record(pkg_id) ON UPDATE RESTRICT ON DELETE RESTRICT; + + +-- +-- Name: version_platform version_platform_pkg_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: alpha_admin +-- + +ALTER TABLE ONLY public.version_platform + ADD CONSTRAINT version_platform_pkg_id_fkey FOREIGN KEY (pkg_id) REFERENCES public.pkg_record(pkg_id) ON UPDATE RESTRICT ON DELETE RESTRICT; + + +-- +-- PostgreSQL database dump complete +-- + diff --git a/core/startos/src/registry/mod.rs b/core/startos/src/registry/mod.rs index 27f541f1d..d34ebb841 100644 --- a/core/startos/src/registry/mod.rs +++ b/core/startos/src/registry/mod.rs @@ -1,2 +1,160 @@ +use std::collections::{BTreeMap, BTreeSet}; + +use axum::Router; +use futures::future::ready; +use imbl_value::InternedString; +use models::DataUrl; +use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler, Server}; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; + +use crate::context::CliContext; +use crate::middleware::cors::Cors; +use crate::net::static_server::{bad_request, not_found, server_error}; +use crate::net::web_server::WebServer; +use crate::prelude::*; +use crate::registry::auth::Auth; +use crate::registry::context::RegistryContext; +use crate::registry::device_info::DeviceInfoMiddleware; +use crate::registry::os::index::OsIndex; +use crate::registry::package::index::{Category, PackageIndex}; +use crate::registry::signer::SignerInfo; +use crate::rpc_continuations::Guid; +use crate::util::serde::HandlerExtSerde; + pub mod admin; -pub mod marketplace; +pub mod asset; +pub mod auth; +pub mod context; +pub mod db; +pub mod device_info; +pub mod os; +pub mod package; +pub mod signer; + +#[derive(Debug, Default, Deserialize, Serialize, HasModel)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +pub struct RegistryDatabase { + pub admins: BTreeSet, + pub index: FullIndex, +} +impl RegistryDatabase {} + +#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct FullIndex { + pub name: Option, + pub icon: Option>, + pub package: PackageIndex, + pub os: OsIndex, + pub signers: BTreeMap, +} + +pub async fn get_full_index(ctx: RegistryContext) -> Result { + ctx.db.peek().await.into_index().de() +} + +#[derive(Debug, Default, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct RegistryInfo { + pub name: Option, + pub icon: Option>, + #[ts(as = "BTreeMap::")] + pub categories: BTreeMap, +} + +pub async fn get_info(ctx: RegistryContext) -> Result { + let peek = ctx.db.peek().await.into_index(); + Ok(RegistryInfo { + name: peek.as_name().de()?, + icon: peek.as_icon().de()?, + categories: peek.as_package().as_categories().de()?, + }) +} + +pub fn registry_api() -> ParentHandler { + ParentHandler::new() + .subcommand( + "index", + from_fn_async(get_full_index) + .with_display_serializable() + .with_call_remote::(), + ) + .subcommand( + "info", + from_fn_async(get_info) + .with_display_serializable() + .with_call_remote::(), + ) + .subcommand("os", os::os_api::()) + .subcommand("package", package::package_api::()) + .subcommand("admin", admin::admin_api::()) + .subcommand("db", db::db_api::()) +} + +pub fn registry_router(ctx: RegistryContext) -> Router { + use axum::extract as x; + use axum::routing::{any, get, post}; + Router::new() + .route("/rpc/*path", { + let ctx = ctx.clone(); + post( + Server::new(move || ready(Ok(ctx.clone())), registry_api()) + .middleware(Cors::new()) + .middleware(Auth::new()) + .middleware(DeviceInfoMiddleware::new()), + ) + }) + .route( + "/ws/rpc/*path", + get({ + let ctx = ctx.clone(); + move |x::Path(path): x::Path, + ws: axum::extract::ws::WebSocketUpgrade| async move { + match Guid::from(&path) { + None => { + tracing::debug!("No Guid Path"); + bad_request() + } + Some(guid) => match ctx.rpc_continuations.get_ws_handler(&guid).await { + Some(cont) => ws.on_upgrade(cont), + _ => not_found(), + }, + } + } + }), + ) + .route( + "/rest/rpc/*path", + any({ + let ctx = ctx.clone(); + move |request: x::Request| async move { + let path = request + .uri() + .path() + .strip_prefix("/rest/rpc/") + .unwrap_or_default(); + match Guid::from(&path) { + None => { + tracing::debug!("No Guid Path"); + bad_request() + } + Some(guid) => match ctx.rpc_continuations.get_rest_handler(&guid).await { + None => not_found(), + Some(cont) => cont(request).await.unwrap_or_else(server_error), + }, + } + } + }), + ) +} + +impl WebServer { + pub fn serve_registry(&mut self, ctx: RegistryContext) { + self.serve_router(registry_router(ctx)) + } +} diff --git a/core/startos/src/registry/os/asset/add.rs b/core/startos/src/registry/os/asset/add.rs new file mode 100644 index 000000000..d609063ea --- /dev/null +++ b/core/startos/src/registry/os/asset/add.rs @@ -0,0 +1,257 @@ +use std::collections::{BTreeMap, HashMap}; +use std::panic::UnwindSafe; +use std::path::PathBuf; + +use chrono::Utc; +use clap::Parser; +use exver::Version; +use imbl_value::InternedString; +use itertools::Itertools; +use rpc_toolkit::{from_fn_async, Context, HandlerArgs, HandlerExt, ParentHandler}; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; +use url::Url; + +use crate::context::CliContext; +use crate::prelude::*; +use crate::progress::FullProgressTracker; +use crate::registry::asset::RegistryAsset; +use crate::registry::context::RegistryContext; +use crate::registry::os::index::OsVersionInfo; +use crate::registry::os::SIG_CONTEXT; +use crate::registry::signer::commitment::blake3::Blake3Commitment; +use crate::registry::signer::sign::ed25519::Ed25519; +use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey, SignatureScheme}; +use crate::s9pk::merkle_archive::hash::VerifyingWriter; +use crate::s9pk::merkle_archive::source::http::HttpSource; +use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile; +use crate::s9pk::merkle_archive::source::ArchiveSource; +use crate::util::io::open_file; +use crate::util::serde::Base64; + +pub fn add_api() -> ParentHandler { + ParentHandler::new() + .subcommand( + "iso", + from_fn_async(add_iso) + .with_metadata("get_signer", Value::Bool(true)) + .no_cli(), + ) + .subcommand( + "img", + from_fn_async(add_img) + .with_metadata("get_signer", Value::Bool(true)) + .no_cli(), + ) + .subcommand( + "squashfs", + from_fn_async(add_squashfs) + .with_metadata("get_signer", Value::Bool(true)) + .no_cli(), + ) +} + +#[derive(Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct AddAssetParams { + #[ts(type = "string")] + pub version: Version, + #[ts(type = "string")] + pub platform: InternedString, + #[ts(type = "string")] + pub url: Url, + #[serde(rename = "__auth_signer")] + #[ts(skip)] + pub signer: AnyVerifyingKey, + pub signature: AnySignature, + pub commitment: Blake3Commitment, +} + +async fn add_asset( + ctx: RegistryContext, + AddAssetParams { + version, + platform, + url, + signer, + signature, + commitment, + }: AddAssetParams, + accessor: impl FnOnce( + &mut Model, + ) -> &mut Model>> + + UnwindSafe + + Send, +) -> Result<(), Error> { + signer + .scheme() + .verify_commitment(&signer, &commitment, SIG_CONTEXT, &signature)?; + ctx.db + .mutate(|db| { + let signer_guid = db.as_index().as_signers().get_signer(&signer)?; + if db + .as_index() + .as_os() + .as_versions() + .as_idx(&version) + .or_not_found(&version)? + .as_authorized() + .de()? + .contains(&signer_guid) + { + accessor( + db.as_index_mut() + .as_os_mut() + .as_versions_mut() + .as_idx_mut(&version) + .or_not_found(&version)?, + ) + .upsert(&platform, || { + Ok(RegistryAsset { + published_at: Utc::now(), + url, + commitment: commitment.clone(), + signatures: HashMap::new(), + }) + })? + .mutate(|s| { + if s.commitment != commitment { + Err(Error::new( + eyre!("commitment does not match"), + ErrorKind::InvalidSignature, + )) + } else { + s.signatures.insert(signer, signature); + Ok(()) + } + })?; + Ok(()) + } else { + Err(Error::new(eyre!("UNAUTHORIZED"), ErrorKind::Authorization)) + } + }) + .await?; + + Ok(()) +} + +pub async fn add_iso(ctx: RegistryContext, params: AddAssetParams) -> Result<(), Error> { + add_asset(ctx, params, |m| m.as_iso_mut()).await +} + +pub async fn add_img(ctx: RegistryContext, params: AddAssetParams) -> Result<(), Error> { + add_asset(ctx, params, |m| m.as_img_mut()).await +} + +pub async fn add_squashfs(ctx: RegistryContext, params: AddAssetParams) -> Result<(), Error> { + add_asset(ctx, params, |m| m.as_squashfs_mut()).await +} + +#[derive(Debug, Deserialize, Serialize, Parser)] +#[command(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] +pub struct CliAddAssetParams { + #[arg(short = 'p', long = "platform")] + pub platform: InternedString, + #[arg(short = 'v', long = "version")] + pub version: Version, + pub file: PathBuf, + pub url: Url, +} + +pub async fn cli_add_asset( + HandlerArgs { + context: ctx, + parent_method, + method, + params: + CliAddAssetParams { + platform, + version, + file: path, + url, + }, + .. + }: HandlerArgs, +) -> Result<(), Error> { + let ext = match path.extension().and_then(|e| e.to_str()) { + Some("iso") => "iso", + Some("img") => "img", + Some("squashfs") => "squashfs", + _ => { + return Err(Error::new( + eyre!("Unknown extension"), + ErrorKind::InvalidRequest, + )) + } + }; + + let file = MultiCursorFile::from(open_file(&path).await?); + + let progress = FullProgressTracker::new(); + let mut sign_phase = progress.add_phase(InternedString::intern("Signing File"), Some(10)); + let mut verify_phase = progress.add_phase(InternedString::intern("Verifying URL"), Some(100)); + let mut index_phase = progress.add_phase( + InternedString::intern("Adding File to Registry Index"), + Some(1), + ); + + let progress_task = + progress.progress_bar_task(&format!("Adding {} to registry...", path.display())); + + sign_phase.start(); + let blake3 = file.blake3_mmap().await?; + let size = file + .size() + .await + .ok_or_else(|| Error::new(eyre!("failed to read file metadata"), ErrorKind::Filesystem))?; + let commitment = Blake3Commitment { + hash: Base64(*blake3.as_bytes()), + size, + }; + let signature = AnySignature::Ed25519(Ed25519.sign_commitment( + ctx.developer_key()?, + &commitment, + SIG_CONTEXT, + )?); + sign_phase.complete(); + + verify_phase.start(); + let src = HttpSource::new(ctx.client.clone(), url.clone()).await?; + if let Some(size) = src.size().await { + verify_phase.set_total(size); + } + let mut writer = verify_phase.writer(VerifyingWriter::new( + tokio::io::sink(), + Some((blake3::Hash::from_bytes(*commitment.hash), commitment.size)), + )); + src.copy_all_to(&mut writer).await?; + let (verifier, mut verify_phase) = writer.into_inner(); + verifier.verify().await?; + verify_phase.complete(); + + index_phase.start(); + ctx.call_remote::( + &parent_method + .into_iter() + .chain(method) + .chain([ext]) + .join("."), + imbl_value::json!({ + "platform": platform, + "version": version, + "url": &url, + "signature": signature, + "commitment": commitment, + }), + ) + .await?; + index_phase.complete(); + + progress.complete(); + + progress_task.await.with_kind(ErrorKind::Unknown)?; + + Ok(()) +} diff --git a/core/startos/src/registry/os/asset/get.rs b/core/startos/src/registry/os/asset/get.rs new file mode 100644 index 000000000..ad0010dca --- /dev/null +++ b/core/startos/src/registry/os/asset/get.rs @@ -0,0 +1,174 @@ +use std::collections::BTreeMap; +use std::panic::UnwindSafe; +use std::path::{Path, PathBuf}; + +use clap::Parser; +use exver::Version; +use helpers::AtomicFile; +use imbl_value::{json, InternedString}; +use itertools::Itertools; +use rpc_toolkit::{from_fn_async, Context, HandlerArgs, HandlerExt, ParentHandler}; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; + +use crate::context::CliContext; +use crate::prelude::*; +use crate::progress::FullProgressTracker; +use crate::registry::asset::RegistryAsset; +use crate::registry::context::RegistryContext; +use crate::registry::os::index::OsVersionInfo; +use crate::registry::os::SIG_CONTEXT; +use crate::registry::signer::commitment::blake3::Blake3Commitment; +use crate::registry::signer::commitment::Commitment; +use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile; +use crate::util::io::open_file; + +pub fn get_api() -> ParentHandler { + ParentHandler::new() + .subcommand("iso", from_fn_async(get_iso).no_cli()) + .subcommand("iso", from_fn_async(cli_get_os_asset).no_display()) + .subcommand("img", from_fn_async(get_img).no_cli()) + .subcommand("img", from_fn_async(cli_get_os_asset).no_display()) + .subcommand("squashfs", from_fn_async(get_squashfs).no_cli()) + .subcommand("squashfs", from_fn_async(cli_get_os_asset).no_display()) +} + +#[derive(Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct GetOsAssetParams { + #[ts(type = "string")] + pub version: Version, + #[ts(type = "string")] + pub platform: InternedString, +} + +async fn get_os_asset( + ctx: RegistryContext, + GetOsAssetParams { version, platform }: GetOsAssetParams, + accessor: impl FnOnce( + &Model, + ) -> &Model>> + + UnwindSafe + + Send, +) -> Result, Error> { + accessor( + ctx.db + .peek() + .await + .as_index() + .as_os() + .as_versions() + .as_idx(&version) + .or_not_found(&version)?, + ) + .as_idx(&platform) + .or_not_found(&platform)? + .de() +} + +pub async fn get_iso( + ctx: RegistryContext, + params: GetOsAssetParams, +) -> Result, Error> { + get_os_asset(ctx, params, |info| info.as_iso()).await +} + +pub async fn get_img( + ctx: RegistryContext, + params: GetOsAssetParams, +) -> Result, Error> { + get_os_asset(ctx, params, |info| info.as_img()).await +} + +pub async fn get_squashfs( + ctx: RegistryContext, + params: GetOsAssetParams, +) -> Result, Error> { + get_os_asset(ctx, params, |info| info.as_squashfs()).await +} + +#[derive(Debug, Deserialize, Serialize, Parser)] +#[command(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] +pub struct CliGetOsAssetParams { + pub version: Version, + pub platform: InternedString, + #[arg(long = "download", short = 'd')] + pub download: Option, + #[arg( + long = "reverify", + short = 'r', + help = "verify the hash of the file a second time after download" + )] + pub reverify: bool, +} + +async fn cli_get_os_asset( + HandlerArgs { + context: ctx, + parent_method, + method, + params: + CliGetOsAssetParams { + version, + platform, + download, + reverify, + }, + .. + }: HandlerArgs, +) -> Result, Error> { + let res = from_value::>( + ctx.call_remote::( + &parent_method.into_iter().chain(method).join("."), + json!({ + "version": version, + "platform": platform, + }), + ) + .await?, + )?; + + res.validate(SIG_CONTEXT, res.all_signers())?; + + if let Some(download) = download { + let mut file = AtomicFile::new(&download, None::<&Path>) + .await + .with_kind(ErrorKind::Filesystem)?; + + let progress = FullProgressTracker::new(); + let mut download_phase = + progress.add_phase(InternedString::intern("Downloading File"), Some(100)); + download_phase.set_total(res.commitment.size); + let reverify_phase = if reverify { + Some(progress.add_phase(InternedString::intern("Reverifying File"), Some(10))) + } else { + None + }; + + let progress_task = progress.progress_bar_task("Downloading..."); + + download_phase.start(); + let mut download_writer = download_phase.writer(&mut *file); + res.download(ctx.client.clone(), &mut download_writer) + .await?; + let (_, mut download_phase) = download_writer.into_inner(); + file.save().await.with_kind(ErrorKind::Filesystem)?; + download_phase.complete(); + + if let Some(mut reverify_phase) = reverify_phase { + reverify_phase.start(); + res.commitment + .check(&MultiCursorFile::from(open_file(download).await?)) + .await?; + reverify_phase.complete(); + } + + progress.complete(); + + progress_task.await.with_kind(ErrorKind::Unknown)?; + } + + Ok(res) +} diff --git a/core/startos/src/registry/os/asset/mod.rs b/core/startos/src/registry/os/asset/mod.rs new file mode 100644 index 000000000..ec9d6cae7 --- /dev/null +++ b/core/startos/src/registry/os/asset/mod.rs @@ -0,0 +1,14 @@ +use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler}; + +pub mod add; +pub mod get; +pub mod sign; + +pub fn asset_api() -> ParentHandler { + ParentHandler::new() + .subcommand("add", add::add_api::()) + .subcommand("add", from_fn_async(add::cli_add_asset).no_display()) + .subcommand("sign", sign::sign_api::()) + .subcommand("sign", from_fn_async(sign::cli_sign_asset).no_display()) + .subcommand("get", get::get_api::()) +} diff --git a/core/startos/src/registry/os/asset/sign.rs b/core/startos/src/registry/os/asset/sign.rs new file mode 100644 index 000000000..18b603daf --- /dev/null +++ b/core/startos/src/registry/os/asset/sign.rs @@ -0,0 +1,221 @@ +use std::collections::BTreeMap; +use std::panic::UnwindSafe; +use std::path::PathBuf; + +use clap::Parser; +use exver::Version; +use imbl_value::InternedString; +use itertools::Itertools; +use rpc_toolkit::{from_fn_async, Context, HandlerArgs, HandlerExt, ParentHandler}; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; + +use crate::context::CliContext; +use crate::prelude::*; +use crate::progress::FullProgressTracker; +use crate::registry::asset::RegistryAsset; +use crate::registry::context::RegistryContext; +use crate::registry::os::index::OsVersionInfo; +use crate::registry::os::SIG_CONTEXT; +use crate::registry::signer::commitment::blake3::Blake3Commitment; +use crate::registry::signer::sign::ed25519::Ed25519; +use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey, SignatureScheme}; +use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile; +use crate::s9pk::merkle_archive::source::ArchiveSource; +use crate::util::io::open_file; +use crate::util::serde::Base64; + +pub fn sign_api() -> ParentHandler { + ParentHandler::new() + .subcommand( + "iso", + from_fn_async(sign_iso) + .with_metadata("get_signer", Value::Bool(true)) + .no_cli(), + ) + .subcommand( + "img", + from_fn_async(sign_img) + .with_metadata("get_signer", Value::Bool(true)) + .no_cli(), + ) + .subcommand( + "squashfs", + from_fn_async(sign_squashfs) + .with_metadata("get_signer", Value::Bool(true)) + .no_cli(), + ) +} + +#[derive(Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct SignAssetParams { + #[ts(type = "string")] + version: Version, + #[ts(type = "string")] + platform: InternedString, + #[ts(skip)] + #[serde(rename = "__auth_signer")] + signer: AnyVerifyingKey, + signature: AnySignature, +} + +async fn sign_asset( + ctx: RegistryContext, + SignAssetParams { + version, + platform, + signer, + signature, + }: SignAssetParams, + accessor: impl FnOnce( + &mut Model, + ) -> &mut Model>> + + UnwindSafe + + Send, +) -> Result<(), Error> { + ctx.db + .mutate(|db| { + let guid = db.as_index().as_signers().get_signer(&signer)?; + if !db + .as_index() + .as_os() + .as_versions() + .as_idx(&version) + .or_not_found(&version)? + .as_authorized() + .de()? + .contains(&guid) + { + return Err(Error::new( + eyre!("signer {guid} is not authorized"), + ErrorKind::Authorization, + )); + } + + accessor( + db.as_index_mut() + .as_os_mut() + .as_versions_mut() + .as_idx_mut(&version) + .or_not_found(&version)?, + ) + .as_idx_mut(&platform) + .or_not_found(&platform)? + .mutate(|s| { + signer.scheme().verify_commitment( + &signer, + &s.commitment, + SIG_CONTEXT, + &signature, + )?; + s.signatures.insert(signer, signature); + Ok(()) + })?; + + Ok(()) + }) + .await +} + +pub async fn sign_iso(ctx: RegistryContext, params: SignAssetParams) -> Result<(), Error> { + sign_asset(ctx, params, |m| m.as_iso_mut()).await +} + +pub async fn sign_img(ctx: RegistryContext, params: SignAssetParams) -> Result<(), Error> { + sign_asset(ctx, params, |m| m.as_img_mut()).await +} + +pub async fn sign_squashfs(ctx: RegistryContext, params: SignAssetParams) -> Result<(), Error> { + sign_asset(ctx, params, |m| m.as_squashfs_mut()).await +} + +#[derive(Debug, Deserialize, Serialize, Parser)] +#[command(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] +pub struct CliSignAssetParams { + #[arg(short = 'p', long = "platform")] + pub platform: InternedString, + #[arg(short = 'v', long = "version")] + pub version: Version, + pub file: PathBuf, +} + +pub async fn cli_sign_asset( + HandlerArgs { + context: ctx, + parent_method, + method, + params: + CliSignAssetParams { + platform, + version, + file: path, + }, + .. + }: HandlerArgs, +) -> Result<(), Error> { + let ext = match path.extension().and_then(|e| e.to_str()) { + Some("iso") => "iso", + Some("img") => "img", + Some("squashfs") => "squashfs", + _ => { + return Err(Error::new( + eyre!("Unknown extension"), + ErrorKind::InvalidRequest, + )) + } + }; + + let file = MultiCursorFile::from(open_file(&path).await?); + + let progress = FullProgressTracker::new(); + let mut sign_phase = progress.add_phase(InternedString::intern("Signing File"), Some(10)); + let mut index_phase = progress.add_phase( + InternedString::intern("Adding Signature to Registry Index"), + Some(1), + ); + + let progress_task = + progress.progress_bar_task(&format!("Adding {} to registry...", path.display())); + + sign_phase.start(); + let blake3 = file.blake3_mmap().await?; + let size = file + .size() + .await + .ok_or_else(|| Error::new(eyre!("failed to read file metadata"), ErrorKind::Filesystem))?; + let commitment = Blake3Commitment { + hash: Base64(*blake3.as_bytes()), + size, + }; + let signature = AnySignature::Ed25519(Ed25519.sign_commitment( + ctx.developer_key()?, + &commitment, + SIG_CONTEXT, + )?); + sign_phase.complete(); + + index_phase.start(); + ctx.call_remote::( + &parent_method + .into_iter() + .chain(method) + .chain([ext]) + .join("."), + imbl_value::json!({ + "platform": platform, + "version": version, + "signature": signature, + }), + ) + .await?; + index_phase.complete(); + + progress.complete(); + + progress_task.await.with_kind(ErrorKind::Unknown)?; + + Ok(()) +} diff --git a/core/startos/src/registry/os/index.rs b/core/startos/src/registry/os/index.rs new file mode 100644 index 000000000..b61cb8f96 --- /dev/null +++ b/core/startos/src/registry/os/index.rs @@ -0,0 +1,57 @@ +use std::collections::{BTreeMap, BTreeSet}; + +use exver::{Version, VersionRange}; +use imbl_value::InternedString; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; + +use crate::prelude::*; +use crate::registry::asset::RegistryAsset; +use crate::registry::context::RegistryContext; +use crate::registry::signer::commitment::blake3::Blake3Commitment; +use crate::rpc_continuations::Guid; + +#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct OsIndex { + pub versions: OsVersionInfoMap, +} + +#[derive(Debug, Default, Deserialize, Serialize, TS)] +pub struct OsVersionInfoMap( + #[ts(as = "BTreeMap::")] pub BTreeMap, +); +impl Map for OsVersionInfoMap { + type Key = Version; + type Value = OsVersionInfo; + fn key_str(key: &Self::Key) -> Result, Error> { + Ok(InternedString::from_display(key)) + } + fn key_string(key: &Self::Key) -> Result { + Ok(InternedString::from_display(key)) + } +} + +#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct OsVersionInfo { + pub headline: String, + pub release_notes: String, + #[ts(type = "string")] + pub source_version: VersionRange, + pub authorized: BTreeSet, + #[ts(as = "BTreeMap::>")] + pub iso: BTreeMap>, // platform (i.e. x86_64-nonfree) -> asset + #[ts(as = "BTreeMap::>")] + pub squashfs: BTreeMap>, // platform (i.e. x86_64-nonfree) -> asset + #[ts(as = "BTreeMap::>")] + pub img: BTreeMap>, // platform (i.e. raspberrypi) -> asset +} + +pub async fn get_os_index(ctx: RegistryContext) -> Result { + ctx.db.peek().await.into_index().into_os().de() +} diff --git a/core/startos/src/registry/os/mod.rs b/core/startos/src/registry/os/mod.rs new file mode 100644 index 000000000..64ce44eaf --- /dev/null +++ b/core/startos/src/registry/os/mod.rs @@ -0,0 +1,22 @@ +use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler}; + +use crate::context::CliContext; +use crate::util::serde::HandlerExtSerde; + +pub const SIG_CONTEXT: &str = "startos"; + +pub mod asset; +pub mod index; +pub mod version; + +pub fn os_api() -> ParentHandler { + ParentHandler::new() + .subcommand( + "index", + from_fn_async(index::get_os_index) + .with_display_serializable() + .with_call_remote::(), + ) + .subcommand("asset", asset::asset_api::()) + .subcommand("version", version::version_api::()) +} diff --git a/core/startos/src/registry/os/version/mod.rs b/core/startos/src/registry/os/version/mod.rs new file mode 100644 index 000000000..4c0568a80 --- /dev/null +++ b/core/startos/src/registry/os/version/mod.rs @@ -0,0 +1,207 @@ +use std::collections::BTreeMap; + +use chrono::Utc; +use clap::Parser; +use exver::{Version, VersionRange}; +use itertools::Itertools; +use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler}; +use serde::{Deserialize, Serialize}; +use sqlx::query; +use ts_rs::TS; + +use crate::context::CliContext; +use crate::prelude::*; +use crate::registry::context::RegistryContext; +use crate::registry::os::index::OsVersionInfo; +use crate::registry::signer::sign::AnyVerifyingKey; +use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat}; + +pub mod signer; + +pub fn version_api() -> ParentHandler { + ParentHandler::new() + .subcommand( + "add", + from_fn_async(add_version) + .with_metadata("admin", Value::Bool(true)) + .with_metadata("get_signer", Value::Bool(true)) + .no_display() + .with_call_remote::(), + ) + .subcommand( + "remove", + from_fn_async(remove_version) + .with_metadata("admin", Value::Bool(true)) + .no_display() + .with_call_remote::(), + ) + .subcommand("signer", signer::signer_api::()) + .subcommand( + "get", + from_fn_async(get_version) + .with_display_serializable() + .with_custom_display_fn(|handle, result| { + Ok(display_version_info(handle.params, result)) + }) + .with_call_remote::(), + ) +} + +#[derive(Debug, Deserialize, Serialize, Parser, TS)] +#[command(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct AddVersionParams { + #[ts(type = "string")] + pub version: Version, + pub headline: String, + pub release_notes: String, + #[ts(type = "string")] + pub source_version: VersionRange, + #[arg(skip)] + #[ts(skip)] + #[serde(rename = "__auth_signer")] + pub signer: Option, +} + +pub async fn add_version( + ctx: RegistryContext, + AddVersionParams { + version, + headline, + release_notes, + source_version, + signer, + }: AddVersionParams, +) -> Result<(), Error> { + ctx.db + .mutate(|db| { + let signer = signer + .map(|s| db.as_index().as_signers().get_signer(&s)) + .transpose()?; + db.as_index_mut() + .as_os_mut() + .as_versions_mut() + .upsert(&version, || Ok(OsVersionInfo::default()))? + .mutate(|i| { + i.headline = headline; + i.release_notes = release_notes; + i.source_version = source_version; + i.authorized.extend(signer); + Ok(()) + }) + }) + .await +} + +#[derive(Debug, Deserialize, Serialize, Parser, TS)] +#[command(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct RemoveVersionParams { + #[ts(type = "string")] + pub version: Version, +} + +pub async fn remove_version( + ctx: RegistryContext, + RemoveVersionParams { version }: RemoveVersionParams, +) -> Result<(), Error> { + ctx.db + .mutate(|db| { + db.as_index_mut() + .as_os_mut() + .as_versions_mut() + .remove(&version)?; + Ok(()) + }) + .await +} + +#[derive(Debug, Deserialize, Serialize, Parser, TS)] +#[command(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct GetOsVersionParams { + #[ts(type = "string | null")] + #[arg(long = "src")] + pub source: Option, + #[ts(type = "string | null")] + #[arg(long = "target")] + pub target: Option, + #[ts(type = "string | null")] + #[arg(long = "id")] + server_id: Option, + #[ts(type = "string | null")] + #[arg(long = "arch")] + arch: Option, +} + +pub async fn get_version( + ctx: RegistryContext, + GetOsVersionParams { + source, + target, + server_id, + arch, + }: GetOsVersionParams, +) -> Result, Error> { + if let (Some(pool), Some(server_id), Some(arch)) = (&ctx.pool, server_id, arch) { + let created_at = Utc::now(); + + query!( + "INSERT INTO user_activity (created_at, server_id, arch) VALUES ($1, $2, $3)", + created_at, + server_id, + arch + ) + .execute(pool) + .await?; + } + let target = target.unwrap_or(VersionRange::Any); + ctx.db + .peek() + .await + .into_index() + .into_os() + .into_versions() + .into_entries()? + .into_iter() + .map(|(v, i)| i.de().map(|i| (v, i))) + .filter_ok(|(version, info)| { + version.satisfies(&target) + && source + .as_ref() + .map_or(true, |s| s.satisfies(&info.source_version)) + }) + .collect() +} + +pub fn display_version_info(params: WithIoFormat, info: BTreeMap) { + use prettytable::*; + + if let Some(format) = params.format { + return display_serializable(format, info); + } + + let mut table = Table::new(); + table.add_row(row![bc => + "VERSION", + "HEADLINE", + "RELEASE NOTES", + "ISO PLATFORMS", + "IMG PLATFORMS", + "SQUASHFS PLATFORMS", + ]); + for (version, info) in &info { + table.add_row(row![ + &version.to_string(), + &info.headline, + &info.release_notes, + &info.iso.keys().into_iter().join(", "), + &info.img.keys().into_iter().join(", "), + &info.squashfs.keys().into_iter().join(", "), + ]); + } + table.print_tty(false).unwrap(); +} diff --git a/core/startos/src/registry/os/version/signer.rs b/core/startos/src/registry/os/version/signer.rs new file mode 100644 index 000000000..51f7c6719 --- /dev/null +++ b/core/startos/src/registry/os/version/signer.rs @@ -0,0 +1,132 @@ +use std::collections::BTreeMap; + +use clap::Parser; +use exver::Version; +use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler}; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; + +use crate::context::CliContext; +use crate::prelude::*; +use crate::registry::admin::display_signers; +use crate::registry::context::RegistryContext; +use crate::registry::signer::SignerInfo; +use crate::rpc_continuations::Guid; +use crate::util::serde::HandlerExtSerde; + +pub fn signer_api() -> ParentHandler { + ParentHandler::new() + .subcommand( + "add", + from_fn_async(add_version_signer) + .with_metadata("admin", Value::Bool(true)) + .no_display() + .with_call_remote::(), + ) + .subcommand( + "remove", + from_fn_async(remove_version_signer) + .with_metadata("admin", Value::Bool(true)) + .no_display() + .with_call_remote::(), + ) + .subcommand( + "list", + from_fn_async(list_version_signers) + .with_display_serializable() + .with_custom_display_fn(|handle, result| Ok(display_signers(handle.params, result))) + .with_call_remote::(), + ) +} + +#[derive(Debug, Deserialize, Serialize, Parser, TS)] +#[command(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct VersionSignerParams { + #[ts(type = "string")] + pub version: Version, + pub signer: Guid, +} + +pub async fn add_version_signer( + ctx: RegistryContext, + VersionSignerParams { version, signer }: VersionSignerParams, +) -> Result<(), Error> { + ctx.db + .mutate(|db| { + ensure_code!( + db.as_index().as_signers().contains_key(&signer)?, + ErrorKind::InvalidRequest, + "unknown signer {signer}" + ); + + db.as_index_mut() + .as_os_mut() + .as_versions_mut() + .as_idx_mut(&version) + .or_not_found(&version)? + .as_authorized_mut() + .mutate(|s| Ok(s.insert(signer)))?; + + Ok(()) + }) + .await +} + +pub async fn remove_version_signer( + ctx: RegistryContext, + VersionSignerParams { version, signer }: VersionSignerParams, +) -> Result<(), Error> { + ctx.db + .mutate(|db| { + if !db + .as_index_mut() + .as_os_mut() + .as_versions_mut() + .as_idx_mut(&version) + .or_not_found(&version)? + .as_authorized_mut() + .mutate(|s| Ok(s.remove(&signer)))? + { + return Err(Error::new( + eyre!("signer {signer} is not authorized to sign for v{version}"), + ErrorKind::NotFound, + )); + } + + Ok(()) + }) + .await +} + +#[derive(Debug, Deserialize, Serialize, Parser, TS)] +#[command(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct ListVersionSignersParams { + #[ts(type = "string")] + pub version: Version, +} + +pub async fn list_version_signers( + ctx: RegistryContext, + ListVersionSignersParams { version }: ListVersionSignersParams, +) -> Result, Error> { + let db = ctx.db.peek().await; + db.as_index() + .as_os() + .as_versions() + .as_idx(&version) + .or_not_found(&version)? + .as_authorized() + .de()? + .into_iter() + .filter_map(|guid| { + db.as_index() + .as_signers() + .as_idx(&guid) + .map(|s| s.de().map(|s| (guid, s))) + }) + .collect() +} diff --git a/core/startos/src/registry/package/add.rs b/core/startos/src/registry/package/add.rs new file mode 100644 index 000000000..c52f06ac0 --- /dev/null +++ b/core/startos/src/registry/package/add.rs @@ -0,0 +1,159 @@ +use std::path::PathBuf; +use std::sync::Arc; + +use clap::Parser; +use imbl_value::InternedString; +use itertools::Itertools; +use rpc_toolkit::HandlerArgs; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; +use url::Url; + +use crate::context::CliContext; +use crate::prelude::*; +use crate::progress::{FullProgressTracker, ProgressTrackerWriter}; +use crate::registry::context::RegistryContext; +use crate::registry::package::index::PackageVersionInfo; +use crate::registry::signer::commitment::merkle_archive::MerkleArchiveCommitment; +use crate::registry::signer::sign::ed25519::Ed25519; +use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey, SignatureScheme}; +use crate::s9pk::merkle_archive::source::http::HttpSource; +use crate::s9pk::merkle_archive::source::ArchiveSource; +use crate::s9pk::v2::SIG_CONTEXT; +use crate::s9pk::S9pk; +use crate::util::io::TrackingIO; + +#[derive(Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct AddPackageParams { + #[ts(type = "string")] + pub url: Url, + #[ts(skip)] + #[serde(rename = "__auth_signer")] + pub uploader: AnyVerifyingKey, + pub commitment: MerkleArchiveCommitment, + pub signature: AnySignature, +} + +pub async fn add_package( + ctx: RegistryContext, + AddPackageParams { + url, + uploader, + commitment, + signature, + }: AddPackageParams, +) -> Result<(), Error> { + uploader + .scheme() + .verify_commitment(&uploader, &commitment, SIG_CONTEXT, &signature)?; + let peek = ctx.db.peek().await; + let uploader_guid = peek.as_index().as_signers().get_signer(&uploader)?; + let s9pk = S9pk::deserialize( + &Arc::new(HttpSource::new(ctx.client.clone(), url.clone()).await?), + Some(&commitment), + ) + .await?; + + let manifest = s9pk.as_manifest(); + + let mut info = PackageVersionInfo::from_s9pk(&s9pk, url).await?; + if !info.s9pk.signatures.contains_key(&uploader) { + info.s9pk.signatures.insert(uploader.clone(), signature); + } + + ctx.db + .mutate(|db| { + if db.as_admins().de()?.contains(&uploader_guid) + || db + .as_index() + .as_package() + .as_packages() + .as_idx(&manifest.id) + .or_not_found(&manifest.id)? + .as_authorized() + .de()? + .contains(&uploader_guid) + { + let package = db + .as_index_mut() + .as_package_mut() + .as_packages_mut() + .upsert(&manifest.id, || Ok(Default::default()))?; + package.as_versions_mut().insert(&manifest.version, &info)?; + + Ok(()) + } else { + Err(Error::new(eyre!("UNAUTHORIZED"), ErrorKind::Authorization)) + } + }) + .await +} + +#[derive(Debug, Deserialize, Serialize, Parser)] +#[command(rename_all = "kebab-case")] +#[serde(rename_all = "camelCase")] +pub struct CliAddPackageParams { + pub file: PathBuf, + pub url: Url, +} + +pub async fn cli_add_package( + HandlerArgs { + context: ctx, + parent_method, + method, + params: CliAddPackageParams { file, url }, + .. + }: HandlerArgs, +) -> Result<(), Error> { + let s9pk = S9pk::open(&file, None).await?; + + let progress = FullProgressTracker::new(); + let mut sign_phase = progress.add_phase(InternedString::intern("Signing File"), Some(1)); + let mut verify_phase = progress.add_phase(InternedString::intern("Verifying URL"), Some(100)); + let mut index_phase = progress.add_phase( + InternedString::intern("Adding File to Registry Index"), + Some(1), + ); + + let progress_task = + progress.progress_bar_task(&format!("Adding {} to registry...", file.display())); + + sign_phase.start(); + let commitment = s9pk.as_archive().commitment().await?; + let signature = Ed25519.sign_commitment(ctx.developer_key()?, &commitment, SIG_CONTEXT)?; + sign_phase.complete(); + + verify_phase.start(); + let source = HttpSource::new(ctx.client.clone(), url.clone()).await?; + let len = source.size().await; + let mut src = S9pk::deserialize(&Arc::new(source), Some(&commitment)).await?; + if let Some(len) = len { + verify_phase.set_total(len); + } + let mut verify_writer = ProgressTrackerWriter::new(tokio::io::sink(), verify_phase); + src.serialize(&mut TrackingIO::new(0, &mut verify_writer), true) + .await?; + let (_, mut verify_phase) = verify_writer.into_inner(); + verify_phase.complete(); + + index_phase.start(); + ctx.call_remote::( + &parent_method.into_iter().chain(method).join("."), + imbl_value::json!({ + "url": &url, + "signature": AnySignature::Ed25519(signature), + "commitment": commitment, + }), + ) + .await?; + index_phase.complete(); + + progress.complete(); + + progress_task.await.with_kind(ErrorKind::Unknown)?; + + Ok(()) +} diff --git a/core/startos/src/registry/package/get.rs b/core/startos/src/registry/package/get.rs new file mode 100644 index 000000000..cae1289a9 --- /dev/null +++ b/core/startos/src/registry/package/get.rs @@ -0,0 +1,387 @@ +use std::collections::{BTreeMap, BTreeSet}; + +use clap::{Parser, ValueEnum}; +use exver::{ExtendedVersion, VersionRange}; +use imbl_value::InternedString; +use itertools::Itertools; +use models::PackageId; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; + +use crate::prelude::*; +use crate::registry::context::RegistryContext; +use crate::registry::device_info::DeviceInfo; +use crate::registry::package::index::{PackageIndex, PackageVersionInfo}; +use crate::util::serde::{display_serializable, WithIoFormat}; +use crate::util::VersionString; + +#[derive( + Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, TS, ValueEnum, +)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub enum PackageDetailLevel { + None, + Short, + Full, +} +impl Default for PackageDetailLevel { + fn default() -> Self { + Self::Short + } +} + +#[derive(Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct PackageInfoShort { + pub release_notes: String, +} + +#[derive(Debug, Deserialize, Serialize, TS, Parser)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +#[ts(export)] +pub struct GetPackageParams { + pub id: Option, + #[ts(type = "string | null")] + pub version: Option, + pub source_version: Option, + #[ts(skip)] + #[arg(skip)] + #[serde(rename = "__device_info")] + pub device_info: Option, + #[serde(default)] + #[arg(default_value = "none")] + pub other_versions: PackageDetailLevel, +} + +#[derive(Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct GetPackageResponse { + #[ts(type = "string[]")] + pub categories: BTreeSet, + pub best: BTreeMap, + #[serde(skip_serializing_if = "Option::is_none")] + #[ts(optional)] + pub other_versions: Option>, +} +impl GetPackageResponse { + pub fn tables(&self) -> Vec { + use prettytable::*; + + let mut res = Vec::with_capacity(self.best.len()); + + for (version, info) in &self.best { + let mut table = info.table(version); + + let lesser_versions: BTreeMap<_, _> = self + .other_versions + .as_ref() + .into_iter() + .flatten() + .filter(|(v, _)| ***v < **version) + .collect(); + + if !lesser_versions.is_empty() { + table.add_row(row![bc => "OLDER VERSIONS"]); + table.add_row(row![bc => "VERSION", "RELEASE NOTES"]); + for (version, info) in lesser_versions { + table.add_row(row![AsRef::::as_ref(version), &info.release_notes]); + } + } + + res.push(table); + } + + res + } +} + +#[derive(Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct GetPackageResponseFull { + #[ts(type = "string[]")] + pub categories: BTreeSet, + pub best: BTreeMap, + pub other_versions: BTreeMap, +} +impl GetPackageResponseFull { + pub fn tables(&self) -> Vec { + let mut res = Vec::with_capacity(self.best.len()); + + let all: BTreeMap<_, _> = self.best.iter().chain(self.other_versions.iter()).collect(); + + for (version, info) in all { + res.push(info.table(version)); + } + + res + } +} + +pub type GetPackagesResponse = BTreeMap; +pub type GetPackagesResponseFull = BTreeMap; + +fn get_matching_models<'a>( + db: &'a Model, + GetPackageParams { + id, + source_version, + device_info, + .. + }: &GetPackageParams, +) -> Result)>, Error> { + if let Some(id) = id { + if let Some(pkg) = db.as_packages().as_idx(id) { + vec![(id.clone(), pkg)] + } else { + vec![] + } + } else { + db.as_packages().as_entries()? + } + .iter() + .map(|(k, v)| { + Ok(v.as_versions() + .as_entries()? + .into_iter() + .map(|(v, info)| { + Ok::<_, Error>( + if source_version.as_ref().map_or(Ok(true), |source_version| { + Ok::<_, Error>( + source_version.satisfies( + &info + .as_source_version() + .de()? + .unwrap_or(VersionRange::any()), + ), + ) + })? && device_info + .as_ref() + .map_or(Ok(true), |device_info| info.works_for_device(device_info))? + { + Some((k.clone(), ExtendedVersion::from(v), info)) + } else { + None + }, + ) + }) + .flatten_ok()) + }) + .flatten_ok() + .map(|res| res.and_then(|a| a)) + .collect() +} + +pub async fn get_package(ctx: RegistryContext, params: GetPackageParams) -> Result { + use patch_db::ModelExt; + + let peek = ctx.db.peek().await; + let mut best: BTreeMap>> = + Default::default(); + let mut other: BTreeMap>> = + Default::default(); + for (id, version, info) in get_matching_models(&peek.as_index().as_package(), ¶ms)? { + let package_best = best.entry(id.clone()).or_default(); + let package_other = other.entry(id.clone()).or_default(); + if params + .version + .as_ref() + .map_or(true, |v| version.satisfies(v)) + && package_best.keys().all(|k| !(**k > version)) + { + for worse_version in package_best + .keys() + .filter(|k| ***k < version) + .cloned() + .collect_vec() + { + if let Some(info) = package_best.remove(&worse_version) { + package_other.insert(worse_version, info); + } + } + package_best.insert(version.into(), info); + } else { + package_other.insert(version.into(), info); + } + } + if let Some(id) = params.id { + let categories = peek + .as_index() + .as_package() + .as_packages() + .as_idx(&id) + .map(|p| p.as_categories().de()) + .transpose()? + .unwrap_or_default(); + let best = best + .remove(&id) + .unwrap_or_default() + .into_iter() + .map(|(k, v)| v.de().map(|v| (k, v))) + .try_collect()?; + let other = other.remove(&id).unwrap_or_default(); + match params.other_versions { + PackageDetailLevel::None => to_value(&GetPackageResponse { + categories, + best, + other_versions: None, + }), + PackageDetailLevel::Short => to_value(&GetPackageResponse { + categories, + best, + other_versions: Some( + other + .into_iter() + .map(|(k, v)| from_value(v.as_value().clone()).map(|v| (k, v))) + .try_collect()?, + ), + }), + PackageDetailLevel::Full => to_value(&GetPackageResponseFull { + categories, + best, + other_versions: other + .into_iter() + .map(|(k, v)| v.de().map(|v| (k, v))) + .try_collect()?, + }), + } + } else { + match params.other_versions { + PackageDetailLevel::None => to_value( + &best + .into_iter() + .map(|(id, best)| { + let categories = peek + .as_index() + .as_package() + .as_packages() + .as_idx(&id) + .map(|p| p.as_categories().de()) + .transpose()? + .unwrap_or_default(); + Ok::<_, Error>(( + id, + GetPackageResponse { + categories, + best: best + .into_iter() + .map(|(k, v)| v.de().map(|v| (k, v))) + .try_collect()?, + other_versions: None, + }, + )) + }) + .try_collect::<_, GetPackagesResponse, _>()?, + ), + PackageDetailLevel::Short => to_value( + &best + .into_iter() + .map(|(id, best)| { + let categories = peek + .as_index() + .as_package() + .as_packages() + .as_idx(&id) + .map(|p| p.as_categories().de()) + .transpose()? + .unwrap_or_default(); + let other = other.remove(&id).unwrap_or_default(); + Ok::<_, Error>(( + id, + GetPackageResponse { + categories, + best: best + .into_iter() + .map(|(k, v)| v.de().map(|v| (k, v))) + .try_collect()?, + other_versions: Some( + other + .into_iter() + .map(|(k, v)| { + from_value(v.as_value().clone()).map(|v| (k, v)) + }) + .try_collect()?, + ), + }, + )) + }) + .try_collect::<_, GetPackagesResponse, _>()?, + ), + PackageDetailLevel::Full => to_value( + &best + .into_iter() + .map(|(id, best)| { + let categories = peek + .as_index() + .as_package() + .as_packages() + .as_idx(&id) + .map(|p| p.as_categories().de()) + .transpose()? + .unwrap_or_default(); + let other = other.remove(&id).unwrap_or_default(); + Ok::<_, Error>(( + id, + GetPackageResponseFull { + categories, + best: best + .into_iter() + .map(|(k, v)| v.de().map(|v| (k, v))) + .try_collect()?, + other_versions: other + .into_iter() + .map(|(k, v)| v.de().map(|v| (k, v))) + .try_collect()?, + }, + )) + }) + .try_collect::<_, GetPackagesResponseFull, _>()?, + ), + } + } +} + +pub fn display_package_info( + params: WithIoFormat, + info: Value, +) -> Result<(), Error> { + if let Some(format) = params.format { + display_serializable(format, info); + return Ok(()); + } + + if let Some(_) = params.rest.id { + if params.rest.other_versions == PackageDetailLevel::Full { + for table in from_value::(info)?.tables() { + table.print_tty(false)?; + println!(); + } + } else { + for table in from_value::(info)?.tables() { + table.print_tty(false)?; + println!(); + } + } + } else { + if params.rest.other_versions == PackageDetailLevel::Full { + for (_, package) in from_value::(info)? { + for table in package.tables() { + table.print_tty(false)?; + println!(); + } + } + } else { + for (_, package) in from_value::(info)? { + for table in package.tables() { + table.print_tty(false)?; + println!(); + } + } + } + } + Ok(()) +} diff --git a/core/startos/src/registry/package/index.rs b/core/startos/src/registry/package/index.rs new file mode 100644 index 000000000..12a17f634 --- /dev/null +++ b/core/startos/src/registry/package/index.rs @@ -0,0 +1,202 @@ +use std::collections::{BTreeMap, BTreeSet}; + +use chrono::Utc; +use exver::{Version, VersionRange}; +use imbl_value::InternedString; +use models::{DataUrl, PackageId, VersionString}; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; +use url::Url; + +use crate::prelude::*; +use crate::registry::asset::RegistryAsset; +use crate::registry::context::RegistryContext; +use crate::registry::device_info::DeviceInfo; +use crate::registry::signer::commitment::merkle_archive::MerkleArchiveCommitment; +use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey}; +use crate::rpc_continuations::Guid; +use crate::s9pk::git_hash::GitHash; +use crate::s9pk::manifest::{Alerts, Description, HardwareRequirements}; +use crate::s9pk::merkle_archive::source::FileSource; +use crate::s9pk::S9pk; + +#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct PackageIndex { + #[ts(as = "BTreeMap::")] + pub categories: BTreeMap, + pub packages: BTreeMap, +} + +#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct PackageInfo { + pub authorized: BTreeSet, + pub versions: BTreeMap, + #[ts(type = "string[]")] + pub categories: BTreeSet, +} + +#[derive(Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct Category { + pub name: String, + pub description: Description, +} + +#[derive(Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct DependencyMetadata { + #[ts(type = "string | null")] + pub title: Option, + pub icon: Option>, + pub description: Option, + pub optional: bool, +} + +#[derive(Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct PackageVersionInfo { + #[ts(type = "string")] + pub title: InternedString, + pub icon: DataUrl<'static>, + pub description: Description, + pub release_notes: String, + #[ts(type = "string")] + pub git_hash: GitHash, + #[ts(type = "string")] + pub license: InternedString, + #[ts(type = "string")] + pub wrapper_repo: Url, + #[ts(type = "string")] + pub upstream_repo: Url, + #[ts(type = "string")] + pub support_site: Url, + #[ts(type = "string")] + pub marketing_site: Url, + #[ts(type = "string | null")] + pub donation_url: Option, + pub alerts: Alerts, + pub dependency_metadata: BTreeMap, + #[ts(type = "string")] + pub os_version: Version, + pub hardware_requirements: HardwareRequirements, + #[ts(type = "string | null")] + pub source_version: Option, + pub s9pk: RegistryAsset, +} +impl PackageVersionInfo { + pub async fn from_s9pk(s9pk: &S9pk, url: Url) -> Result { + let manifest = s9pk.as_manifest(); + let mut dependency_metadata = BTreeMap::new(); + for (id, info) in &manifest.dependencies.0 { + let metadata = s9pk.dependency_metadata(id).await?; + dependency_metadata.insert( + id.clone(), + DependencyMetadata { + title: metadata.map(|m| m.title), + icon: s9pk.dependency_icon_data_url(id).await?, + description: info.description.clone(), + optional: info.optional, + }, + ); + } + Ok(Self { + title: manifest.title.clone(), + icon: s9pk.icon_data_url().await?, + description: manifest.description.clone(), + release_notes: manifest.release_notes.clone(), + git_hash: manifest.git_hash.clone().or_not_found("git hash")?, + license: manifest.license.clone(), + wrapper_repo: manifest.wrapper_repo.clone(), + upstream_repo: manifest.upstream_repo.clone(), + support_site: manifest.support_site.clone(), + marketing_site: manifest.marketing_site.clone(), + donation_url: manifest.donation_url.clone(), + alerts: manifest.alerts.clone(), + dependency_metadata, + os_version: manifest.os_version.clone(), + hardware_requirements: manifest.hardware_requirements.clone(), + source_version: None, // TODO + s9pk: RegistryAsset { + published_at: Utc::now(), + url, + commitment: s9pk.as_archive().commitment().await?, + signatures: [( + AnyVerifyingKey::Ed25519(s9pk.as_archive().signer()), + AnySignature::Ed25519(s9pk.as_archive().signature().await?), + )] + .into_iter() + .collect(), + }, + }) + } + pub fn table(&self, version: &VersionString) -> prettytable::Table { + use prettytable::*; + + let mut table = Table::new(); + + table.add_row(row![bc => &self.title]); + table.add_row(row![br -> "VERSION", AsRef::::as_ref(version)]); + table.add_row(row![br -> "RELEASE NOTES", &self.release_notes]); + table.add_row(row![br -> "ABOUT", &textwrap::wrap(&self.description.short, 80).join("\n")]); + table.add_row(row![ + br -> "DESCRIPTION", + &textwrap::wrap(&self.description.long, 80).join("\n") + ]); + table.add_row(row![br -> "GIT HASH", AsRef::::as_ref(&self.git_hash)]); + table.add_row(row![br -> "LICENSE", &self.license]); + table.add_row(row![br -> "PACKAGE REPO", &self.wrapper_repo.to_string()]); + table.add_row(row![br -> "SERVICE REPO", &self.upstream_repo.to_string()]); + table.add_row(row![br -> "WEBSITE", &self.marketing_site.to_string()]); + table.add_row(row![br -> "SUPPORT", &self.support_site.to_string()]); + + table + } +} +impl Model { + pub fn works_for_device(&self, device_info: &DeviceInfo) -> Result { + if !self.as_os_version().de()?.satisfies(&device_info.os.compat) { + return Ok(false); + } + let hw = self.as_hardware_requirements().de()?; + if let Some(arch) = hw.arch { + if !arch.contains(&device_info.hardware.arch) { + return Ok(false); + } + } + if let Some(ram) = hw.ram { + if device_info.hardware.ram < ram { + return Ok(false); + } + } + for (class, regex) in hw.device { + if !device_info + .hardware + .devices + .get(&*class) + .unwrap_or(&Vec::new()) + .iter() + .any(|product| regex.as_ref().is_match(product)) + { + return Ok(false); + } + } + + Ok(true) + } +} + +pub async fn get_package_index(ctx: RegistryContext) -> Result { + ctx.db.peek().await.into_index().into_package().de() +} diff --git a/core/startos/src/registry/package/mod.rs b/core/startos/src/registry/package/mod.rs new file mode 100644 index 000000000..cb2d317f9 --- /dev/null +++ b/core/startos/src/registry/package/mod.rs @@ -0,0 +1,36 @@ +use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler}; + +use crate::context::CliContext; +use crate::prelude::*; +use crate::util::serde::HandlerExtSerde; + +pub mod add; +pub mod get; +pub mod index; + +pub fn package_api() -> ParentHandler { + ParentHandler::new() + .subcommand( + "index", + from_fn_async(index::get_package_index) + .with_display_serializable() + .with_call_remote::(), + ) + .subcommand( + "add", + from_fn_async(add::add_package) + .with_metadata("get_signer", Value::Bool(true)) + .no_cli(), + ) + .subcommand("add", from_fn_async(add::cli_add_package).no_display()) + .subcommand( + "get", + from_fn_async(get::get_package) + .with_metadata("get_device_info", Value::Bool(true)) + .with_display_serializable() + .with_custom_display_fn(|handle, result| { + get::display_package_info(handle.params, result) + }) + .with_call_remote::(), + ) +} diff --git a/core/startos/src/registry/signer/commitment/blake3.rs b/core/startos/src/registry/signer/commitment/blake3.rs new file mode 100644 index 000000000..d99e68c16 --- /dev/null +++ b/core/startos/src/registry/signer/commitment/blake3.rs @@ -0,0 +1,50 @@ +use blake3::Hash; +use digest::Update; +use serde::{Deserialize, Serialize}; +use tokio::io::AsyncWrite; +use ts_rs::TS; + +use crate::prelude::*; +use crate::registry::signer::commitment::{Commitment, Digestable}; +use crate::s9pk::merkle_archive::hash::VerifyingWriter; +use crate::s9pk::merkle_archive::source::ArchiveSource; +use crate::util::io::{ParallelBlake3Writer, TrackingIO}; +use crate::util::serde::Base64; +use crate::CAP_10_MiB; + +#[derive(Clone, Debug, Deserialize, Serialize, HasModel, PartialEq, Eq, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct Blake3Commitment { + pub hash: Base64<[u8; 32]>, + #[ts(type = "number")] + pub size: u64, +} +impl Digestable for Blake3Commitment { + fn update(&self, digest: &mut D) { + digest.update(&*self.hash); + digest.update(&u64::to_be_bytes(self.size)); + } +} +impl<'a, Resource: ArchiveSource> Commitment<&'a Resource> for Blake3Commitment { + async fn create(resource: &'a Resource) -> Result { + let mut hasher = TrackingIO::new(0, ParallelBlake3Writer::new(CAP_10_MiB)); + resource.copy_all_to(&mut hasher).await?; + Ok(Self { + size: hasher.position(), + hash: Base64(*hasher.into_inner().finalize().await?.as_bytes()), + }) + } + async fn copy_to( + &self, + resource: &'a Resource, + writer: W, + ) -> Result<(), Error> { + let mut hasher = + VerifyingWriter::new(writer, Some((Hash::from_bytes(*self.hash), self.size))); + resource.copy_to(0, self.size, &mut hasher).await?; + hasher.verify().await?; + Ok(()) + } +} diff --git a/core/startos/src/registry/signer/commitment/merkle_archive.rs b/core/startos/src/registry/signer/commitment/merkle_archive.rs new file mode 100644 index 000000000..1b9d7d1e0 --- /dev/null +++ b/core/startos/src/registry/signer/commitment/merkle_archive.rs @@ -0,0 +1,127 @@ +use digest::Update; +use serde::{Deserialize, Serialize}; +use tokio::io::AsyncWrite; +use ts_rs::TS; + +use crate::prelude::*; +use crate::registry::signer::commitment::{Commitment, Digestable}; +use crate::s9pk::merkle_archive::source::FileSource; +use crate::s9pk::merkle_archive::MerkleArchive; +use crate::s9pk::S9pk; +use crate::util::io::TrackingIO; +use crate::util::serde::Base64; + +#[derive(Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct MerkleArchiveCommitment { + pub root_sighash: Base64<[u8; 32]>, + #[ts(type = "number")] + pub root_maxsize: u64, +} +impl MerkleArchiveCommitment { + pub fn from_query(query: &str) -> Result, Error> { + let mut root_sighash = None; + let mut root_maxsize = None; + for (k, v) in form_urlencoded::parse(dbg!(query).as_bytes()) { + match &*k { + "rootSighash" => { + root_sighash = Some(dbg!(v).parse()?); + } + "rootMaxsize" => { + root_maxsize = Some(v.parse()?); + } + _ => (), + } + } + if root_sighash.is_some() || root_maxsize.is_some() { + Ok(Some(Self { + root_sighash: root_sighash + .or_not_found("rootSighash required if rootMaxsize specified") + .with_kind(ErrorKind::InvalidRequest)?, + root_maxsize: root_maxsize + .or_not_found("rootMaxsize required if rootSighash specified") + .with_kind(ErrorKind::InvalidRequest)?, + })) + } else { + Ok(None) + } + } +} +impl Digestable for MerkleArchiveCommitment { + fn update(&self, digest: &mut D) { + digest.update(&*self.root_sighash); + digest.update(&u64::to_be_bytes(self.root_maxsize)); + } +} +impl<'a, S: FileSource + Clone> Commitment<&'a MerkleArchive> for MerkleArchiveCommitment { + async fn create(resource: &'a MerkleArchive) -> Result { + resource.commitment().await + } + async fn check(&self, resource: &'a MerkleArchive) -> Result<(), Error> { + let MerkleArchiveCommitment { + root_sighash, + root_maxsize, + } = resource.commitment().await?; + if root_sighash != self.root_sighash { + return Err(Error::new( + eyre!("merkle root mismatch"), + ErrorKind::InvalidSignature, + )); + } + if root_maxsize > self.root_maxsize { + return Err(Error::new( + eyre!("merkle root directory max size too large"), + ErrorKind::InvalidSignature, + )); + } + Ok(()) + } + async fn copy_to( + &self, + resource: &'a MerkleArchive, + writer: W, + ) -> Result<(), Error> { + self.check(resource).await?; + resource + .serialize(&mut TrackingIO::new(0, writer), true) + .await + } +} + +impl<'a, S: FileSource + Clone> Commitment<&'a S9pk> for MerkleArchiveCommitment { + async fn create(resource: &'a S9pk) -> Result { + resource.as_archive().commitment().await + } + async fn check(&self, resource: &'a S9pk) -> Result<(), Error> { + let MerkleArchiveCommitment { + root_sighash, + root_maxsize, + } = resource.as_archive().commitment().await?; + if root_sighash != self.root_sighash { + return Err(Error::new( + eyre!("merkle root mismatch"), + ErrorKind::InvalidSignature, + )); + } + if root_maxsize > self.root_maxsize { + return Err(Error::new( + eyre!("merkle root directory max size too large"), + ErrorKind::InvalidSignature, + )); + } + Ok(()) + } + async fn copy_to( + &self, + resource: &'a S9pk, + writer: W, + ) -> Result<(), Error> { + self.check(resource).await?; + resource + .clone() + .serialize(&mut TrackingIO::new(0, writer), true) + .await + } +} diff --git a/core/startos/src/registry/signer/commitment/mod.rs b/core/startos/src/registry/signer/commitment/mod.rs new file mode 100644 index 000000000..b85e02a4e --- /dev/null +++ b/core/startos/src/registry/signer/commitment/mod.rs @@ -0,0 +1,25 @@ +use digest::Update; +use futures::Future; +use tokio::io::AsyncWrite; + +use crate::prelude::*; + +pub mod blake3; +pub mod merkle_archive; +pub mod request; + +pub trait Digestable { + fn update(&self, digest: &mut D); +} + +pub trait Commitment: Sized + Digestable { + fn create(resource: Resource) -> impl Future> + Send; + fn copy_to( + &self, + resource: Resource, + writer: W, + ) -> impl Future> + Send; + fn check(&self, resource: Resource) -> impl Future> + Send { + self.copy_to(resource, tokio::io::sink()) + } +} diff --git a/core/startos/src/registry/signer/commitment/request.rs b/core/startos/src/registry/signer/commitment/request.rs new file mode 100644 index 000000000..e5bb776bf --- /dev/null +++ b/core/startos/src/registry/signer/commitment/request.rs @@ -0,0 +1,103 @@ +use std::collections::BTreeMap; +use std::time::{SystemTime, UNIX_EPOCH}; + +use axum::body::Body; +use axum::extract::Request; +use digest::Update; +use futures::TryStreamExt; +use http::HeaderValue; +use serde::{Deserialize, Serialize}; +use tokio::io::AsyncWrite; +use tokio_util::io::StreamReader; +use ts_rs::TS; +use url::Url; + +use crate::prelude::*; +use crate::registry::signer::commitment::{Commitment, Digestable}; +use crate::s9pk::merkle_archive::hash::VerifyingWriter; +use crate::util::serde::Base64; + +#[derive(Clone, Debug, Deserialize, Serialize, HasModel, PartialEq, Eq, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct RequestCommitment { + #[ts(type = "number")] + pub timestamp: i64, + #[ts(type = "number")] + pub nonce: u64, + #[ts(type = "number")] + pub size: u64, + pub blake3: Base64<[u8; 32]>, +} +impl RequestCommitment { + pub fn append_query(&self, url: &mut Url) { + url.query_pairs_mut() + .append_pair("timestamp", &self.timestamp.to_string()) + .append_pair("nonce", &self.nonce.to_string()) + .append_pair("size", &self.size.to_string()) + .append_pair("blake3", &self.blake3.to_string()); + } + pub fn from_query(query: &HeaderValue) -> Result { + let query: BTreeMap<_, _> = form_urlencoded::parse(query.as_bytes()).collect(); + Ok(Self { + timestamp: query.get("timestamp").or_not_found("timestamp")?.parse()?, + nonce: query.get("nonce").or_not_found("nonce")?.parse()?, + size: query.get("size").or_not_found("size")?.parse()?, + blake3: query.get("blake3").or_not_found("blake3")?.parse()?, + }) + } +} +impl Digestable for RequestCommitment { + fn update(&self, digest: &mut D) { + digest.update(&i64::to_be_bytes(self.timestamp)); + digest.update(&u64::to_be_bytes(self.nonce)); + digest.update(&u64::to_be_bytes(self.size)); + digest.update(&*self.blake3); + } +} +impl<'a> Commitment<&'a mut Request> for RequestCommitment { + async fn create(resource: &'a mut Request) -> Result { + use http_body_util::BodyExt; + + let body = std::mem::replace(resource.body_mut(), Body::empty()) + .collect() + .await + .with_kind(ErrorKind::Network)? + .to_bytes(); + let res = Self { + timestamp: SystemTime::now() + .duration_since(UNIX_EPOCH) + .map(|d| d.as_secs() as i64) + .unwrap_or_else(|e| e.duration().as_secs() as i64 * -1), + nonce: rand::random(), + size: body.len() as u64, + blake3: Base64(*blake3::hash(&*body).as_bytes()), + }; + *resource.body_mut() = Body::from(body); + Ok(res) + } + async fn copy_to( + &self, + resource: &'a mut Request, + writer: W, + ) -> Result<(), Error> { + use tokio::io::AsyncReadExt; + + let mut body = StreamReader::new( + std::mem::replace(resource.body_mut(), Body::empty()) + .into_data_stream() + .map_err(std::io::Error::other), + ) + .take(self.size); + + let mut writer = VerifyingWriter::new( + writer, + Some((blake3::Hash::from_bytes(*self.blake3), self.size)), + ); + tokio::io::copy(&mut body, &mut writer).await?; + writer.verify().await?; + + Ok(()) + } +} diff --git a/core/startos/src/registry/signer/mod.rs b/core/startos/src/registry/signer/mod.rs new file mode 100644 index 000000000..99b23b88e --- /dev/null +++ b/core/startos/src/registry/signer/mod.rs @@ -0,0 +1,154 @@ +use std::collections::HashSet; +use std::str::FromStr; + +use clap::builder::ValueParserFactory; +use itertools::Itertools; +use serde::{Deserialize, Serialize}; +use ts_rs::TS; +use url::Url; + +use crate::prelude::*; +use crate::registry::signer::commitment::Digestable; +use crate::registry::signer::sign::{AnySignature, AnyVerifyingKey, SignatureScheme}; +use crate::util::clap::FromStrParser; + +pub mod commitment; +pub mod sign; + +#[derive(Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct SignerInfo { + pub name: String, + pub contact: Vec, + pub keys: HashSet, +} + +#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +// TODO: better types +pub enum ContactInfo { + Email(String), + Matrix(String), + Website(#[ts(type = "string")] Url), +} +impl std::fmt::Display for ContactInfo { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Email(e) => write!(f, "mailto:{e}"), + Self::Matrix(m) => write!(f, "https://matrix.to/#/{m}"), + Self::Website(w) => write!(f, "{w}"), + } + } +} +impl FromStr for ContactInfo { + type Err = Error; + fn from_str(s: &str) -> Result { + Ok(if let Some(s) = s.strip_prefix("mailto:") { + Self::Email(s.to_owned()) + } else if let Some(s) = s.strip_prefix("https://matrix.to/#/") { + Self::Matrix(s.to_owned()) + } else { + Self::Website(s.parse()?) + }) + } +} +impl ValueParserFactory for ContactInfo { + type Parser = FromStrParser; + fn value_parser() -> Self::Parser { + Self::Parser::new() + } +} + +#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub enum AcceptSigners { + #[serde(skip)] + Accepted, + Signer(AnyVerifyingKey), + Any(Vec), + All(Vec), +} +impl AcceptSigners { + const fn null() -> Self { + Self::Any(Vec::new()) + } + pub fn flatten(self) -> Self { + match self { + Self::Any(mut s) | Self::All(mut s) if s.len() == 1 => s.swap_remove(0).flatten(), + s => s, + } + } + pub fn accepted(&self) -> bool { + match self { + Self::Accepted => true, + _ => false, + } + } + pub fn try_accept(self) -> Result<(), Error> { + if self.accepted() { + Ok(()) + } else { + Err(Error::new( + eyre!("signer(s) not accepted"), + ErrorKind::InvalidSignature, + )) + } + } + pub fn process_signature( + &mut self, + signer: &AnyVerifyingKey, + commitment: &impl Digestable, + context: &str, + signature: &AnySignature, + ) -> Result<(), Error> { + let mut res = Ok(()); + let new = match std::mem::replace(self, Self::null()) { + Self::Accepted => Self::Accepted, + Self::Signer(s) => { + if &s == signer { + res = signer + .scheme() + .verify_commitment(signer, commitment, context, signature); + Self::Accepted + } else { + Self::Signer(s) + } + } + Self::All(mut s) => { + res = s + .iter_mut() + .map(|s| s.process_signature(signer, commitment, context, signature)) + .collect(); + if s.iter().all(|s| s.accepted()) { + Self::Accepted + } else { + Self::All(s) + } + } + Self::Any(mut s) => { + match s + .iter_mut() + .map(|s| { + s.process_signature(signer, commitment, context, signature)?; + Ok(s) + }) + .filter_ok(|s| s.accepted()) + .next() + { + Some(Ok(s)) => std::mem::replace(s, Self::null()), + Some(Err(e)) => { + res = Err(e); + Self::Any(s) + } + None => Self::Any(s), + } + } + }; + *self = new; + res + } +} diff --git a/core/startos/src/registry/signer/sign/ed25519.rs b/core/startos/src/registry/signer/sign/ed25519.rs new file mode 100644 index 000000000..3ec4c136e --- /dev/null +++ b/core/startos/src/registry/signer/sign/ed25519.rs @@ -0,0 +1,34 @@ +use ed25519_dalek::{Signature, SigningKey, VerifyingKey}; +use sha2::Sha512; + +use crate::prelude::*; +use crate::registry::signer::sign::SignatureScheme; + +pub struct Ed25519; +impl SignatureScheme for Ed25519 { + type SigningKey = SigningKey; + type VerifyingKey = VerifyingKey; + type Signature = Signature; + type Digest = Sha512; + fn new_digest(&self) -> Self::Digest { + ::new() + } + fn sign( + &self, + key: &Self::SigningKey, + digest: Self::Digest, + context: &str, + ) -> Result { + Ok(key.sign_prehashed(digest, Some(context.as_bytes()))?) + } + fn verify( + &self, + key: &Self::VerifyingKey, + digest: Self::Digest, + context: &str, + signature: &Self::Signature, + ) -> Result<(), Error> { + key.verify_prehashed_strict(digest, Some(context.as_bytes()), signature)?; + Ok(()) + } +} diff --git a/core/startos/src/registry/signer/sign/mod.rs b/core/startos/src/registry/signer/sign/mod.rs new file mode 100644 index 000000000..a29109864 --- /dev/null +++ b/core/startos/src/registry/signer/sign/mod.rs @@ -0,0 +1,347 @@ +use std::fmt::Display; +use std::str::FromStr; + +use ::ed25519::pkcs8::BitStringRef; +use clap::builder::ValueParserFactory; +use der::referenced::OwnedToRef; +use pkcs8::der::AnyRef; +use pkcs8::{PrivateKeyInfo, SubjectPublicKeyInfo}; +use serde::{Deserialize, Serialize}; +use sha2::Sha512; +use ts_rs::TS; + +use crate::prelude::*; +use crate::registry::signer::commitment::Digestable; +use crate::registry::signer::sign::ed25519::Ed25519; +use crate::util::clap::FromStrParser; +use crate::util::serde::{deserialize_from_str, serialize_display}; + +pub mod ed25519; + +pub trait SignatureScheme { + type SigningKey; + type VerifyingKey; + type Signature; + type Digest: digest::Update; + fn new_digest(&self) -> Self::Digest; + fn sign( + &self, + key: &Self::SigningKey, + digest: Self::Digest, + context: &str, + ) -> Result; + fn sign_commitment( + &self, + key: &Self::SigningKey, + commitment: &C, + context: &str, + ) -> Result { + let mut digest = self.new_digest(); + commitment.update(&mut digest); + self.sign(key, digest, context) + } + fn verify( + &self, + key: &Self::VerifyingKey, + digest: Self::Digest, + context: &str, + signature: &Self::Signature, + ) -> Result<(), Error>; + fn verify_commitment( + &self, + key: &Self::VerifyingKey, + commitment: &C, + context: &str, + signature: &Self::Signature, + ) -> Result<(), Error> { + let mut digest = self.new_digest(); + commitment.update(&mut digest); + self.verify(key, digest, context, signature) + } +} + +pub enum AnyScheme { + Ed25519(Ed25519), +} +impl From for AnyScheme { + fn from(value: Ed25519) -> Self { + Self::Ed25519(value) + } +} +impl SignatureScheme for AnyScheme { + type SigningKey = AnySigningKey; + type VerifyingKey = AnyVerifyingKey; + type Signature = AnySignature; + type Digest = AnyDigest; + fn new_digest(&self) -> Self::Digest { + match self { + Self::Ed25519(s) => AnyDigest::Sha512(s.new_digest()), + } + } + fn sign( + &self, + key: &Self::SigningKey, + digest: Self::Digest, + context: &str, + ) -> Result { + match (self, key, digest) { + (Self::Ed25519(s), AnySigningKey::Ed25519(key), AnyDigest::Sha512(digest)) => { + Ok(AnySignature::Ed25519(s.sign(key, digest, context)?)) + } + _ => Err(Error::new( + eyre!("mismatched signature algorithm"), + ErrorKind::InvalidSignature, + )), + } + } + fn verify( + &self, + key: &Self::VerifyingKey, + digest: Self::Digest, + context: &str, + signature: &Self::Signature, + ) -> Result<(), Error> { + match (self, key, digest, signature) { + ( + Self::Ed25519(s), + AnyVerifyingKey::Ed25519(key), + AnyDigest::Sha512(digest), + AnySignature::Ed25519(signature), + ) => s.verify(key, digest, context, signature), + _ => Err(Error::new( + eyre!("mismatched signature algorithm"), + ErrorKind::InvalidSignature, + )), + } + } +} + +#[derive(Clone, Debug, PartialEq, Eq, TS)] +#[ts(export, type = "string")] +pub enum AnySigningKey { + Ed25519(::SigningKey), +} +impl AnySigningKey { + pub fn scheme(&self) -> AnyScheme { + match self { + Self::Ed25519(_) => AnyScheme::Ed25519(Ed25519), + } + } + pub fn verifying_key(&self) -> AnyVerifyingKey { + match self { + Self::Ed25519(k) => AnyVerifyingKey::Ed25519(k.into()), + } + } +} +impl<'a> TryFrom> for AnySigningKey { + type Error = pkcs8::Error; + fn try_from(value: PrivateKeyInfo<'a>) -> Result { + if value.algorithm == ed25519_dalek::pkcs8::ALGORITHM_ID { + Ok(Self::Ed25519(ed25519_dalek::SigningKey::try_from(value)?)) + } else { + Err(pkcs8::spki::Error::OidUnknown { + oid: value.algorithm.oid, + } + .into()) + } + } +} +impl pkcs8::EncodePrivateKey for AnySigningKey { + fn to_pkcs8_der(&self) -> pkcs8::Result { + match self { + Self::Ed25519(s) => s.to_pkcs8_der(), + } + } +} +impl FromStr for AnySigningKey { + type Err = Error; + fn from_str(s: &str) -> Result { + use pkcs8::DecodePrivateKey; + Self::from_pkcs8_pem(s).with_kind(ErrorKind::Deserialization) + } +} +impl Display for AnySigningKey { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use pkcs8::EncodePrivateKey; + f.write_str( + &self + .to_pkcs8_pem(pkcs8::LineEnding::LF) + .map_err(|_| std::fmt::Error)?, + ) + } +} +impl<'de> Deserialize<'de> for AnySigningKey { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + deserialize_from_str(deserializer) + } +} +impl Serialize for AnySigningKey { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serialize_display(self, serializer) + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash, TS)] +#[ts(export, type = "string")] +pub enum AnyVerifyingKey { + Ed25519(::VerifyingKey), +} +impl AnyVerifyingKey { + pub fn scheme(&self) -> AnyScheme { + match self { + Self::Ed25519(_) => AnyScheme::Ed25519(Ed25519), + } + } +} +impl<'a> TryFrom, BitStringRef<'a>>> for AnyVerifyingKey { + type Error = pkcs8::spki::Error; + fn try_from( + value: SubjectPublicKeyInfo, BitStringRef<'a>>, + ) -> Result { + if value.algorithm == ed25519_dalek::pkcs8::ALGORITHM_ID { + Ok(Self::Ed25519(ed25519_dalek::VerifyingKey::try_from(value)?)) + } else { + Err(pkcs8::spki::Error::OidUnknown { + oid: value.algorithm.oid, + }) + } + } +} +impl pkcs8::EncodePublicKey for AnyVerifyingKey { + fn to_public_key_der(&self) -> pkcs8::spki::Result { + match self { + Self::Ed25519(s) => s.to_public_key_der(), + } + } +} +impl FromStr for AnyVerifyingKey { + type Err = Error; + fn from_str(s: &str) -> Result { + use pkcs8::DecodePublicKey; + Self::from_public_key_pem(s).with_kind(ErrorKind::Deserialization) + } +} +impl Display for AnyVerifyingKey { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use pkcs8::EncodePublicKey; + f.write_str( + &self + .to_public_key_pem(pkcs8::LineEnding::LF) + .map_err(|_| std::fmt::Error)?, + ) + } +} +impl<'de> Deserialize<'de> for AnyVerifyingKey { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + deserialize_from_str(deserializer) + } +} +impl Serialize for AnyVerifyingKey { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serialize_display(self, serializer) + } +} +impl ValueParserFactory for AnyVerifyingKey { + type Parser = FromStrParser; + fn value_parser() -> Self::Parser { + Self::Parser::new() + } +} + +#[derive(Clone, Debug)] +pub enum AnyDigest { + Sha512(Sha512), +} +impl digest::Update for AnyDigest { + fn update(&mut self, data: &[u8]) { + match self { + Self::Sha512(d) => digest::Update::update(d, data), + } + } +} + +#[derive(Clone, Debug, PartialEq, Eq, TS)] +#[ts(export, type = "string")] +pub enum AnySignature { + Ed25519(::Signature), +} +impl FromStr for AnySignature { + type Err = Error; + fn from_str(s: &str) -> Result { + use der::DecodePem; + + #[derive(der::Sequence)] + struct AnySignatureDer { + alg: pkcs8::spki::AlgorithmIdentifierOwned, + sig: der::asn1::OctetString, + } + impl der::pem::PemLabel for AnySignatureDer { + const PEM_LABEL: &'static str = "SIGNATURE"; + } + + let der = AnySignatureDer::from_pem(s.as_bytes()).with_kind(ErrorKind::Deserialization)?; + if der.alg.oid == ed25519_dalek::pkcs8::ALGORITHM_ID.oid + && der.alg.parameters.owned_to_ref() == ed25519_dalek::pkcs8::ALGORITHM_ID.parameters + { + Ok(Self::Ed25519( + ed25519_dalek::Signature::from_slice(der.sig.as_bytes()) + .with_kind(ErrorKind::Deserialization)?, + )) + } else { + Err(pkcs8::spki::Error::OidUnknown { oid: der.alg.oid }) + .with_kind(ErrorKind::Deserialization) + } + } +} +impl Display for AnySignature { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + use der::EncodePem; + + #[derive(der::Sequence)] + struct AnySignatureDer<'a> { + alg: pkcs8::AlgorithmIdentifierRef<'a>, + sig: der::asn1::OctetString, + } + impl<'a> der::pem::PemLabel for AnySignatureDer<'a> { + const PEM_LABEL: &'static str = "SIGNATURE"; + } + f.write_str( + &match self { + Self::Ed25519(s) => AnySignatureDer { + alg: ed25519_dalek::pkcs8::ALGORITHM_ID, + sig: der::asn1::OctetString::new(s.to_bytes()).map_err(|_| std::fmt::Error)?, + }, + } + .to_pem(der::pem::LineEnding::LF) + .map_err(|_| std::fmt::Error)?, + ) + } +} +impl<'de> Deserialize<'de> for AnySignature { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + deserialize_from_str(deserializer) + } +} +impl Serialize for AnySignature { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serialize_display(self, serializer) + } +} diff --git a/core/startos/src/rpc_continuations.rs b/core/startos/src/rpc_continuations.rs new file mode 100644 index 000000000..043130b69 --- /dev/null +++ b/core/startos/src/rpc_continuations.rs @@ -0,0 +1,259 @@ +use std::collections::BTreeMap; +use std::pin::Pin; +use std::str::FromStr; +use std::sync::Mutex as SyncMutex; +use std::task::{Context, Poll}; +use std::time::Duration; + +use axum::extract::ws::WebSocket; +use axum::extract::Request; +use axum::response::Response; +use clap::builder::ValueParserFactory; +use futures::future::BoxFuture; +use futures::{Future, FutureExt}; +use helpers::TimedResource; +use imbl_value::InternedString; +use tokio::sync::{broadcast, Mutex as AsyncMutex}; +use ts_rs::TS; + +#[allow(unused_imports)] +use crate::prelude::*; +use crate::util::clap::FromStrParser; +use crate::util::new_guid; + +#[derive( + Debug, Clone, PartialEq, Eq, PartialOrd, Ord, serde::Serialize, serde::Deserialize, TS, +)] +#[ts(type = "string")] +pub struct Guid(InternedString); +impl Guid { + pub fn new() -> Self { + Self(new_guid()) + } + + pub fn from(r: &str) -> Option { + if r.len() != 32 { + return None; + } + for c in r.chars() { + if !(c >= 'A' && c <= 'Z' || c >= '2' && c <= '7') { + return None; + } + } + Some(Guid(InternedString::intern(r))) + } +} +impl Default for Guid { + fn default() -> Self { + Self::new() + } +} +impl AsRef for Guid { + fn as_ref(&self) -> &str { + self.0.as_ref() + } +} +impl FromStr for Guid { + type Err = Error; + fn from_str(s: &str) -> Result { + Self::from(s).ok_or_else(|| Error::new(eyre!("invalid guid"), ErrorKind::Deserialization)) + } +} +impl ValueParserFactory for Guid { + type Parser = FromStrParser; + fn value_parser() -> Self::Parser { + Self::Parser::new() + } +} + +#[test] +fn parse_guid() { + println!("{:?}", Guid::from(&format!("{}", Guid::new()))) +} + +impl std::fmt::Display for Guid { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + self.0.fmt(f) + } +} + +pub struct RestFuture { + kill: Option>, + fut: BoxFuture<'static, Result>, +} +impl Future for RestFuture { + type Output = Result; + fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + if self.kill.as_ref().map_or(false, |k| !k.is_empty()) { + Poll::Ready(Err(Error::new( + eyre!("session killed"), + ErrorKind::Authorization, + ))) + } else { + self.fut.poll_unpin(cx) + } + } +} +pub type RestHandler = Box RestFuture + Send>; + +pub struct WebSocketFuture { + kill: Option>, + fut: BoxFuture<'static, ()>, +} +impl Future for WebSocketFuture { + type Output = (); + fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll { + if self.kill.as_ref().map_or(false, |k| !k.is_empty()) { + Poll::Ready(()) + } else { + self.fut.poll_unpin(cx) + } + } +} +pub type WebSocketHandler = Box WebSocketFuture + Send>; + +pub enum RpcContinuation { + Rest(TimedResource), + WebSocket(TimedResource), +} +impl RpcContinuation { + pub fn rest(handler: F, timeout: Duration) -> Self + where + F: FnOnce(Request) -> Fut + Send + 'static, + Fut: Future> + Send + 'static, + { + RpcContinuation::Rest(TimedResource::new( + Box::new(|req| RestFuture { + kill: None, + fut: handler(req).boxed(), + }), + timeout, + )) + } + pub fn ws(handler: F, timeout: Duration) -> Self + where + F: FnOnce(WebSocket) -> Fut + Send + 'static, + Fut: Future + Send + 'static, + { + RpcContinuation::WebSocket(TimedResource::new( + Box::new(|ws| WebSocketFuture { + kill: None, + fut: handler(ws).boxed(), + }), + timeout, + )) + } + pub fn rest_authed(ctx: Ctx, session: T, handler: F, timeout: Duration) -> Self + where + Ctx: AsRef>, + T: Eq + Ord, + F: FnOnce(Request) -> Fut + Send + 'static, + Fut: Future> + Send + 'static, + { + let kill = Some(ctx.as_ref().subscribe_to_kill(session)); + RpcContinuation::Rest(TimedResource::new( + Box::new(|req| RestFuture { + kill, + fut: handler(req).boxed(), + }), + timeout, + )) + } + pub fn ws_authed(ctx: Ctx, session: T, handler: F, timeout: Duration) -> Self + where + Ctx: AsRef>, + T: Eq + Ord, + F: FnOnce(WebSocket) -> Fut + Send + 'static, + Fut: Future + Send + 'static, + { + let kill = Some(ctx.as_ref().subscribe_to_kill(session)); + RpcContinuation::WebSocket(TimedResource::new( + Box::new(|ws| WebSocketFuture { + kill, + fut: handler(ws).boxed(), + }), + timeout, + )) + } + pub fn is_timed_out(&self) -> bool { + match self { + RpcContinuation::Rest(a) => a.is_timed_out(), + RpcContinuation::WebSocket(a) => a.is_timed_out(), + } + } +} + +pub struct RpcContinuations(AsyncMutex>); +impl RpcContinuations { + pub fn new() -> Self { + RpcContinuations(AsyncMutex::new(BTreeMap::new())) + } + + #[instrument(skip_all)] + pub async fn clean(&self) { + let mut continuations = self.0.lock().await; + let mut to_remove = Vec::new(); + for (guid, cont) in &*continuations { + if cont.is_timed_out() { + to_remove.push(guid.clone()); + } + } + for guid in to_remove { + continuations.remove(&guid); + } + } + + #[instrument(skip_all)] + pub async fn add(&self, guid: Guid, handler: RpcContinuation) { + self.clean().await; + self.0.lock().await.insert(guid, handler); + } + + pub async fn get_ws_handler(&self, guid: &Guid) -> Option { + let mut continuations = self.0.lock().await; + if !matches!(continuations.get(guid), Some(RpcContinuation::WebSocket(_))) { + return None; + } + let Some(RpcContinuation::WebSocket(x)) = continuations.remove(guid) else { + return None; + }; + x.get().await + } + + pub async fn get_rest_handler(&self, guid: &Guid) -> Option { + let mut continuations: tokio::sync::MutexGuard<'_, BTreeMap> = + self.0.lock().await; + if !matches!(continuations.get(guid), Some(RpcContinuation::Rest(_))) { + return None; + } + let Some(RpcContinuation::Rest(x)) = continuations.remove(guid) else { + return None; + }; + x.get().await + } +} + +pub struct OpenAuthedContinuations(SyncMutex>>); +impl OpenAuthedContinuations +where + T: Eq + Ord, +{ + pub fn new() -> Self { + Self(SyncMutex::new(BTreeMap::new())) + } + pub fn kill(&self, session: &T) { + if let Some(channel) = self.0.lock().unwrap().remove(session) { + channel.send(()).ok(); + } + } + fn subscribe_to_kill(&self, session: T) -> broadcast::Receiver<()> { + let mut map = self.0.lock().unwrap(); + if let Some(send) = map.get(&session) { + send.subscribe() + } else { + let (send, recv) = broadcast::channel(1); + map.insert(session, send); + recv + } + } +} diff --git a/core/startos/src/s9pk/v1/git_hash.rs b/core/startos/src/s9pk/git_hash.rs similarity index 52% rename from core/startos/src/s9pk/v1/git_hash.rs rename to core/startos/src/s9pk/git_hash.rs index b2990a111..02f83bf4a 100644 --- a/core/startos/src/s9pk/v1/git_hash.rs +++ b/core/startos/src/s9pk/git_hash.rs @@ -1,24 +1,35 @@ use std::path::Path; -use crate::Error; +use tokio::process::Command; + +use crate::prelude::*; +use crate::util::Invoke; #[derive(Clone, Debug, serde::Serialize, serde::Deserialize)] pub struct GitHash(String); impl GitHash { pub async fn from_path(path: impl AsRef) -> Result { - let hash = tokio::process::Command::new("git") - .args(["describe", "--always", "--abbrev=40", "--dirty=-modified"]) - .current_dir(path) - .output() - .await?; - if !hash.status.success() { - return Err(Error::new( - color_eyre::eyre::eyre!("Could not get hash: {}", String::from_utf8(hash.stderr)?), - crate::ErrorKind::Filesystem, - )); + let mut hash = String::from_utf8( + Command::new("git") + .arg("rev-parse") + .arg("HEAD") + .current_dir(&path) + .invoke(ErrorKind::Git) + .await?, + )?; + if Command::new("git") + .arg("diff-index") + .arg("--quiet") + .arg("HEAD") + .arg("--") + .invoke(ErrorKind::Git) + .await + .is_err() + { + hash += "-modified"; } - Ok(GitHash(String::from_utf8(hash.stdout)?)) + Ok(GitHash(hash)) } } diff --git a/core/startos/src/s9pk/merkle_archive/directory_contents.rs b/core/startos/src/s9pk/merkle_archive/directory_contents.rs index c5373a31b..b39789222 100644 --- a/core/startos/src/s9pk/merkle_archive/directory_contents.rs +++ b/core/startos/src/s9pk/merkle_archive/directory_contents.rs @@ -3,6 +3,7 @@ use std::fmt::Debug; use std::path::{Path, PathBuf}; use std::sync::Arc; +use blake3::Hash; use futures::future::BoxFuture; use futures::FutureExt; use imbl::OrdMap; @@ -11,11 +12,12 @@ use itertools::Itertools; use tokio::io::AsyncRead; use crate::prelude::*; -use crate::s9pk::merkle_archive::hash::{Hash, HashWriter}; -use crate::s9pk::merkle_archive::sink::{Sink, TrackingWriter}; +use crate::s9pk::merkle_archive::sink::Sink; use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource, FileSource, Section}; use crate::s9pk::merkle_archive::write_queue::WriteQueue; use crate::s9pk::merkle_archive::{varint, Entry, EntryContents}; +use crate::util::io::{ParallelBlake3Writer, TrackingIO}; +use crate::CAP_10_MiB; #[derive(Clone)] pub struct DirectoryContents { @@ -150,12 +152,12 @@ impl DirectoryContents { Ok(()) } } -impl DirectoryContents> { +impl DirectoryContents> { #[instrument(skip_all)] pub fn deserialize<'a>( source: &'a S, header: &'a mut (impl AsyncRead + Unpin + Send), - sighash: Hash, + (sighash, max_size): (Hash, u64), ) -> BoxFuture<'a, Result> { async move { use tokio::io::AsyncReadExt; @@ -168,15 +170,20 @@ impl DirectoryContents> { header.read_exact(&mut size).await?; let size = u64::from_be_bytes(size); + ensure_code!( + size <= max_size, + ErrorKind::InvalidSignature, + "size is greater than signed" + ); + let mut toc_reader = source.fetch(position, size).await?; let len = varint::deserialize_varint(&mut toc_reader).await?; let mut entries = OrdMap::new(); for _ in 0..len { - entries.insert( - varint::deserialize_varstring(&mut toc_reader).await?.into(), - Entry::deserialize(source, &mut toc_reader).await?, - ); + let name = varint::deserialize_varstring(&mut toc_reader).await?; + let entry = Entry::deserialize(source.clone(), &mut toc_reader).await?; + entries.insert(name.into(), entry); } let res = Self { @@ -196,7 +203,7 @@ impl DirectoryContents> { .boxed() } } -impl DirectoryContents { +impl DirectoryContents { pub fn filter(&mut self, filter: impl Fn(&Path) -> bool) -> Result<(), Error> { for k in self.keys().cloned().collect::>() { let path = Path::new(&*k); @@ -204,7 +211,10 @@ impl DirectoryContents { if !filter(path) { if v.hash.is_none() { return Err(Error::new( - eyre!("cannot filter out unhashed file, run `update_hashes` first"), + eyre!( + "cannot filter out unhashed file {}, run `update_hashes` first", + path.display() + ), ErrorKind::InvalidRequest, )); } @@ -233,7 +243,7 @@ impl DirectoryContents { #[instrument(skip_all)] pub fn sighash<'a>(&'a self) -> BoxFuture<'a, Result> { async move { - let mut hasher = TrackingWriter::new(0, HashWriter::new()); + let mut hasher = TrackingIO::new(0, ParallelBlake3Writer::new(CAP_10_MiB)); let mut sig_contents = OrdMap::new(); for (name, entry) in &**self { sig_contents.insert(name.clone(), entry.to_missing().await?); @@ -244,7 +254,8 @@ impl DirectoryContents { } .serialize_toc(&mut WriteQueue::new(0), &mut hasher) .await?; - Ok(hasher.into_inner().finalize()) + let hash = hasher.into_inner().finalize().await?; + Ok(hash) } .boxed() } @@ -263,6 +274,21 @@ impl DirectoryContents { ((_, a), (_, b), _) if !a.as_contents().is_dir() && b.as_contents().is_dir() => { std::cmp::Ordering::Greater } + ((_, a), (_, b), _) + if a.as_contents().is_missing() && !b.as_contents().is_missing() => + { + std::cmp::Ordering::Greater + } + ((_, a), (_, b), _) + if !a.as_contents().is_missing() && b.as_contents().is_missing() => + { + std::cmp::Ordering::Less + } + ((n_a, a), (n_b, b), _) + if a.as_contents().is_missing() && b.as_contents().is_missing() => + { + n_a.cmp(n_b) + } ((a, _), (b, _), Some(sort_by)) => sort_by(&***a, &***b), _ => std::cmp::Ordering::Equal, }) { @@ -272,6 +298,7 @@ impl DirectoryContents { Ok(()) } + pub fn into_dyn(self) -> DirectoryContents { DirectoryContents { contents: self diff --git a/core/startos/src/s9pk/merkle_archive/expected.rs b/core/startos/src/s9pk/merkle_archive/expected.rs new file mode 100644 index 000000000..c9a2fd31b --- /dev/null +++ b/core/startos/src/s9pk/merkle_archive/expected.rs @@ -0,0 +1,105 @@ +use std::ffi::OsStr; +use std::path::Path; + +use crate::prelude::*; +use crate::s9pk::merkle_archive::directory_contents::DirectoryContents; +use crate::s9pk::merkle_archive::source::FileSource; +use crate::s9pk::merkle_archive::Entry; + +/// An object for tracking the files expected to be in an s9pk +pub struct Expected<'a, T> { + keep: DirectoryContents<()>, + dir: &'a DirectoryContents, +} +impl<'a, T> Expected<'a, T> { + pub fn new(dir: &'a DirectoryContents) -> Self { + Self { + keep: DirectoryContents::new(), + dir, + } + } +} +impl<'a, T: Clone> Expected<'a, T> { + pub fn check_file(&mut self, path: impl AsRef) -> Result<(), Error> { + if self + .dir + .get_path(path.as_ref()) + .and_then(|e| e.as_file()) + .is_some() + { + self.keep.insert_path(path, Entry::file(()))?; + Ok(()) + } else { + Err(Error::new( + eyre!("file {} missing from archive", path.as_ref().display()), + ErrorKind::ParseS9pk, + )) + } + } + pub fn check_stem( + &mut self, + path: impl AsRef, + mut valid_extension: impl FnMut(Option<&OsStr>) -> bool, + ) -> Result<(), Error> { + let (dir, stem) = + if let Some(parent) = path.as_ref().parent().filter(|p| *p != Path::new("")) { + ( + self.dir + .get_path(parent) + .and_then(|e| e.as_directory()) + .ok_or_else(|| { + Error::new( + eyre!("directory {} missing from archive", parent.display()), + ErrorKind::ParseS9pk, + ) + })?, + path.as_ref().strip_prefix(parent).unwrap(), + ) + } else { + (self.dir, path.as_ref()) + }; + let name = dir + .with_stem(&stem.as_os_str().to_string_lossy()) + .filter(|(_, e)| e.as_file().is_some()) + .try_fold( + Err(Error::new( + eyre!( + "file {} with valid extension missing from archive", + path.as_ref().display() + ), + ErrorKind::ParseS9pk, + )), + |acc, (name, _)| + if valid_extension(Path::new(&*name).extension()) { + match acc { + Ok(_) => Err(Error::new( + eyre!( + "more than one file matching {} with valid extension in archive", + path.as_ref().display() + ), + ErrorKind::ParseS9pk, + )), + Err(_) => Ok(Ok(name)) + } + } else { + Ok(acc) + } + )??; + self.keep + .insert_path(path.as_ref().with_file_name(name), Entry::file(()))?; + Ok(()) + } + pub fn into_filter(self) -> Filter { + Filter(self.keep) + } +} + +pub struct Filter(DirectoryContents<()>); +impl Filter { + pub fn keep_checked( + &self, + dir: &mut DirectoryContents, + ) -> Result<(), Error> { + dir.filter(|path| self.0.get_path(path).is_some()) + } +} diff --git a/core/startos/src/s9pk/merkle_archive/file_contents.rs b/core/startos/src/s9pk/merkle_archive/file_contents.rs index 7529fd2d0..c34193e31 100644 --- a/core/startos/src/s9pk/merkle_archive/file_contents.rs +++ b/core/startos/src/s9pk/merkle_archive/file_contents.rs @@ -1,9 +1,11 @@ +use blake3::Hash; use tokio::io::AsyncRead; use crate::prelude::*; -use crate::s9pk::merkle_archive::hash::{Hash, HashWriter}; -use crate::s9pk::merkle_archive::sink::{Sink, TrackingWriter}; +use crate::s9pk::merkle_archive::sink::Sink; use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource, FileSource, Section}; +use crate::util::io::{ParallelBlake3Writer, TrackingIO}; +use crate::CAP_10_MiB; #[derive(Debug, Clone)] pub struct FileContents(S); @@ -13,14 +15,14 @@ impl FileContents { } pub const fn header_size() -> u64 { 8 // position: u64 BE - + 8 // size: u64 BE } } impl FileContents> { #[instrument(skip_all)] pub async fn deserialize( - source: &S, + source: S, header: &mut (impl AsyncRead + Unpin + Send), + size: u64, ) -> Result { use tokio::io::AsyncReadExt; @@ -28,27 +30,22 @@ impl FileContents> { header.read_exact(&mut position).await?; let position = u64::from_be_bytes(position); - let mut size = [0u8; 8]; - header.read_exact(&mut size).await?; - let size = u64::from_be_bytes(size); - Ok(Self(source.section(position, size))) } } impl FileContents { - pub async fn hash(&self) -> Result { - let mut hasher = TrackingWriter::new(0, HashWriter::new()); + pub async fn hash(&self) -> Result<(Hash, u64), Error> { + let mut hasher = TrackingIO::new(0, ParallelBlake3Writer::new(CAP_10_MiB)); self.serialize_body(&mut hasher, None).await?; - Ok(hasher.into_inner().finalize()) + let size = hasher.position(); + let hash = hasher.into_inner().finalize().await?; + Ok((hash, size)) } #[instrument(skip_all)] pub async fn serialize_header(&self, position: u64, w: &mut W) -> Result { use tokio::io::AsyncWriteExt; - let size = self.0.size().await?; - w.write_all(&position.to_be_bytes()).await?; - w.write_all(&size.to_be_bytes()).await?; Ok(position) } @@ -56,21 +53,9 @@ impl FileContents { pub async fn serialize_body( &self, w: &mut W, - verify: Option, + verify: Option<(Hash, u64)>, ) -> Result<(), Error> { - let start = if verify.is_some() { - Some(w.current_position().await?) - } else { - None - }; self.0.copy_verify(w, verify).await?; - if let Some(start) = start { - ensure_code!( - w.current_position().await? - start == self.0.size().await?, - ErrorKind::Pack, - "FileSource::copy wrote a number of bytes that does not match FileSource::size" - ); - } Ok(()) } pub fn into_dyn(self) -> FileContents { diff --git a/core/startos/src/s9pk/merkle_archive/hash.rs b/core/startos/src/s9pk/merkle_archive/hash.rs index ae2829012..c7ad470e4 100644 --- a/core/startos/src/s9pk/merkle_archive/hash.rs +++ b/core/startos/src/s9pk/merkle_archive/hash.rs @@ -1,68 +1,56 @@ -pub use blake3::Hash; -use blake3::Hasher; +use std::task::Poll; + +use blake3::Hash; use tokio::io::AsyncWrite; +use tokio_util::either::Either; use crate::prelude::*; - -#[pin_project::pin_project] -pub struct HashWriter { - hasher: Hasher, -} -impl HashWriter { - pub fn new() -> Self { - Self { - hasher: Hasher::new(), - } - } - pub fn finalize(self) -> Hash { - self.hasher.finalize() - } -} -impl AsyncWrite for HashWriter { - fn poll_write( - self: std::pin::Pin<&mut Self>, - _cx: &mut std::task::Context<'_>, - buf: &[u8], - ) -> std::task::Poll> { - self.project().hasher.update(buf); - std::task::Poll::Ready(Ok(buf.len())) - } - fn poll_flush( - self: std::pin::Pin<&mut Self>, - _cx: &mut std::task::Context<'_>, - ) -> std::task::Poll> { - std::task::Poll::Ready(Ok(())) - } - fn poll_shutdown( - self: std::pin::Pin<&mut Self>, - _cx: &mut std::task::Context<'_>, - ) -> std::task::Poll> { - std::task::Poll::Ready(Ok(())) - } -} +use crate::util::io::{ParallelBlake3Writer, TeeWriter}; +use crate::CAP_10_MiB; #[pin_project::pin_project] pub struct VerifyingWriter { - verify: Option<(Hasher, Hash)>, + verify: Option<(Hash, u64)>, #[pin] - writer: W, + writer: Either, W>, } impl VerifyingWriter { - pub fn new(w: W, verify: Option) -> Self { + pub fn new(w: W, verify: Option<(Hash, u64)>) -> Self { Self { - verify: verify.map(|v| (Hasher::new(), v)), - writer: w, + writer: if verify.is_some() { + Either::Left(TeeWriter::new( + w, + ParallelBlake3Writer::new(CAP_10_MiB), + CAP_10_MiB, + )) + } else { + Either::Right(w) + }, + verify, } } - pub fn verify(self) -> Result { - if let Some((actual, expected)) = self.verify { - ensure_code!( - actual.finalize() == expected, - ErrorKind::InvalidSignature, - "hash sum does not match" - ); +} +impl VerifyingWriter { + pub async fn verify(self) -> Result { + match self.writer { + Either::Left(writer) => { + let (writer, actual) = writer.into_inner().await?; + if let Some((expected, remaining)) = self.verify { + ensure_code!( + actual.finalize().await? == expected, + ErrorKind::InvalidSignature, + "hash sum mismatch" + ); + ensure_code!( + remaining == 0, + ErrorKind::InvalidSignature, + "file size mismatch" + ); + } + Ok(writer) + } + Either::Right(writer) => Ok(writer), } - Ok(self.writer) } } impl AsyncWrite for VerifyingWriter { @@ -70,28 +58,35 @@ impl AsyncWrite for VerifyingWriter { self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>, buf: &[u8], - ) -> std::task::Poll> { + ) -> Poll> { let this = self.project(); - match this.writer.poll_write(cx, buf) { - std::task::Poll::Ready(Ok(written)) => { - if let Some((h, _)) = this.verify { - h.update(&buf[..written]); - } - std::task::Poll::Ready(Ok(written)) + if let Some((_, remaining)) = this.verify { + if *remaining < buf.len() as u64 { + return Poll::Ready(Err(std::io::Error::other(eyre!( + "attempted to write more bytes than signed" + )))); + } + } + match this.writer.poll_write(cx, buf)? { + Poll::Pending => Poll::Pending, + Poll::Ready(n) => { + if let Some((_, remaining)) = this.verify { + *remaining -= n as u64; + } + Poll::Ready(Ok(n)) } - a => a, } } fn poll_flush( self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>, - ) -> std::task::Poll> { + ) -> Poll> { self.project().writer.poll_flush(cx) } fn poll_shutdown( self: std::pin::Pin<&mut Self>, cx: &mut std::task::Context<'_>, - ) -> std::task::Poll> { + ) -> Poll> { self.project().writer.poll_shutdown(cx) } } diff --git a/core/startos/src/s9pk/merkle_archive/mod.rs b/core/startos/src/s9pk/merkle_archive/mod.rs index afd00032a..3f30a4ce1 100644 --- a/core/startos/src/s9pk/merkle_archive/mod.rs +++ b/core/startos/src/s9pk/merkle_archive/mod.rs @@ -1,17 +1,25 @@ use std::path::Path; +use blake3::Hash; use ed25519_dalek::{Signature, SigningKey, VerifyingKey}; +use imbl_value::InternedString; +use sha2::{Digest, Sha512}; use tokio::io::AsyncRead; use crate::prelude::*; +use crate::registry::signer::commitment::merkle_archive::MerkleArchiveCommitment; +use crate::registry::signer::sign::ed25519::Ed25519; +use crate::registry::signer::sign::SignatureScheme; use crate::s9pk::merkle_archive::directory_contents::DirectoryContents; use crate::s9pk::merkle_archive::file_contents::FileContents; -use crate::s9pk::merkle_archive::hash::Hash; use crate::s9pk::merkle_archive::sink::Sink; use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource, FileSource, Section}; use crate::s9pk::merkle_archive::write_queue::WriteQueue; +use crate::util::serde::Base64; +use crate::CAP_1_MiB; pub mod directory_contents; +pub mod expected; pub mod file_contents; pub mod hash; pub mod sink; @@ -23,8 +31,8 @@ pub mod write_queue; #[derive(Debug, Clone)] enum Signer { - Signed(VerifyingKey, Signature), - Signer(SigningKey), + Signed(VerifyingKey, Signature, u64, InternedString), + Signer(SigningKey, InternedString), } #[derive(Debug, Clone)] @@ -33,22 +41,23 @@ pub struct MerkleArchive { contents: DirectoryContents, } impl MerkleArchive { - pub fn new(contents: DirectoryContents, signer: SigningKey) -> Self { + pub fn new(contents: DirectoryContents, signer: SigningKey, context: &str) -> Self { Self { - signer: Signer::Signer(signer), + signer: Signer::Signer(signer, context.into()), contents, } } pub fn signer(&self) -> VerifyingKey { match &self.signer { - Signer::Signed(k, _) => *k, - Signer::Signer(k) => k.verifying_key(), + Signer::Signed(k, _, _, _) => *k, + Signer::Signer(k, _) => k.verifying_key(), } } pub const fn header_size() -> u64 { 32 // pubkey + 64 // signature + 32 // sighash + + 8 // size + DirectoryContents::>::header_size() } pub fn contents(&self) -> &DirectoryContents { @@ -57,8 +66,8 @@ impl MerkleArchive { pub fn contents_mut(&mut self) -> &mut DirectoryContents { &mut self.contents } - pub fn set_signer(&mut self, key: SigningKey) { - self.signer = Signer::Signer(key); + pub fn set_signer(&mut self, key: SigningKey, context: &str) { + self.signer = Signer::Signer(key, context.into()); } pub fn sort_by( &mut self, @@ -67,11 +76,13 @@ impl MerkleArchive { self.contents.sort_by(sort_by) } } -impl MerkleArchive> { +impl MerkleArchive> { #[instrument(skip_all)] pub async fn deserialize( source: &S, + context: &str, header: &mut (impl AsyncRead + Unpin + Send), + commitment: Option<&MerkleArchiveCommitment>, ) -> Result { use tokio::io::AsyncReadExt; @@ -87,37 +98,94 @@ impl MerkleArchive> { header.read_exact(&mut sighash).await?; let sighash = Hash::from_bytes(sighash); - let contents = DirectoryContents::deserialize(source, header, sighash).await?; + let mut max_size = [0u8; 8]; + header.read_exact(&mut max_size).await?; + let max_size = u64::from_be_bytes(max_size); - pubkey.verify_strict(contents.sighash().await?.as_bytes(), &signature)?; + pubkey.verify_prehashed_strict( + Sha512::new_with_prefix(sighash.as_bytes()).chain_update(&u64::to_be_bytes(max_size)), + Some(context.as_bytes()), + &signature, + )?; + + if let Some(MerkleArchiveCommitment { + root_sighash, + root_maxsize, + }) = commitment + { + if sighash.as_bytes() != &**root_sighash { + return Err(Error::new( + eyre!("merkle root mismatch"), + ErrorKind::InvalidSignature, + )); + } + if max_size > *root_maxsize { + return Err(Error::new( + eyre!("root directory max size too large"), + ErrorKind::InvalidSignature, + )); + } + } else { + if max_size > CAP_1_MiB as u64 { + return Err(Error::new( + eyre!("root directory max size over 1MiB, cancelling download in case of DOS attack"), + ErrorKind::InvalidSignature, + )); + } + } + + let contents = DirectoryContents::deserialize(source, header, (sighash, max_size)).await?; Ok(Self { - signer: Signer::Signed(pubkey, signature), + signer: Signer::Signed(pubkey, signature, max_size, context.into()), contents, }) } } -impl MerkleArchive { +impl MerkleArchive { pub async fn update_hashes(&mut self, only_missing: bool) -> Result<(), Error> { self.contents.update_hashes(only_missing).await } pub fn filter(&mut self, filter: impl Fn(&Path) -> bool) -> Result<(), Error> { self.contents.filter(filter) } + pub async fn commitment(&self) -> Result { + let root_maxsize = match self.signer { + Signer::Signed(_, _, s, _) => s, + _ => self.contents.toc_size(), + }; + let root_sighash = self.contents.sighash().await?; + Ok(MerkleArchiveCommitment { + root_sighash: Base64(*root_sighash.as_bytes()), + root_maxsize, + }) + } + pub async fn signature(&self) -> Result { + match &self.signer { + Signer::Signed(_, s, _, _) => Ok(*s), + Signer::Signer(k, context) => { + Ed25519.sign_commitment(k, &self.commitment().await?, context) + } + } + } #[instrument(skip_all)] pub async fn serialize(&self, w: &mut W, verify: bool) -> Result<(), Error> { use tokio::io::AsyncWriteExt; - let sighash = self.contents.sighash().await?; + let commitment = self.commitment().await?; let (pubkey, signature) = match &self.signer { - Signer::Signed(pubkey, signature) => (*pubkey, *signature), - Signer::Signer(s) => (s.into(), ed25519_dalek::Signer::sign(s, sighash.as_bytes())), + Signer::Signed(pubkey, signature, _, _) => (*pubkey, *signature), + Signer::Signer(s, context) => { + (s.into(), Ed25519.sign_commitment(s, &commitment, context)?) + } }; w.write_all(pubkey.as_bytes()).await?; w.write_all(&signature.to_bytes()).await?; - w.write_all(sighash.as_bytes()).await?; + w.write_all(&*commitment.root_sighash).await?; + w.write_all(&u64::to_be_bytes(commitment.root_maxsize)) + .await?; let mut next_pos = w.current_position().await?; next_pos += DirectoryContents::::header_size(); self.contents.serialize_header(next_pos, w).await?; @@ -137,7 +205,7 @@ impl MerkleArchive { #[derive(Debug, Clone)] pub struct Entry { - hash: Option, + hash: Option<(Hash, u64)>, contents: EntryContents, } impl Entry { @@ -150,7 +218,10 @@ impl Entry { pub fn file(source: S) -> Self { Self::new(EntryContents::File(FileContents::new(source))) } - pub fn hash(&self) -> Option { + pub fn directory(directory: DirectoryContents) -> Self { + Self::new(EntryContents::Directory(directory)) + } + pub fn hash(&self) -> Option<(Hash, u64)> { self.hash } pub fn as_contents(&self) -> &EntryContents { @@ -162,6 +233,10 @@ impl Entry { _ => None, } } + pub fn expect_file(&self) -> Result<&FileContents, Error> { + self.as_file() + .ok_or_else(|| Error::new(eyre!("not a file"), ErrorKind::ParseS9pk)) + } pub fn as_directory(&self) -> Option<&DirectoryContents> { match self.as_contents() { EntryContents::Directory(d) => Some(d), @@ -189,14 +264,14 @@ impl Entry { } pub fn header_size(&self) -> u64 { 32 // hash + + 8 // size: u64 BE + self.contents.header_size() } } -impl Entry {} -impl Entry> { +impl Entry> { #[instrument(skip_all)] pub async fn deserialize( - source: &S, + source: S, header: &mut (impl AsyncRead + Unpin + Send), ) -> Result { use tokio::io::AsyncReadExt; @@ -205,32 +280,31 @@ impl Entry> { header.read_exact(&mut hash).await?; let hash = Hash::from_bytes(hash); - let contents = EntryContents::deserialize(source, header, hash).await?; + let mut size = [0u8; 8]; + header.read_exact(&mut size).await?; + let size = u64::from_be_bytes(size); + + let contents = EntryContents::deserialize(source, header, (hash, size)).await?; Ok(Self { - hash: Some(hash), + hash: Some((hash, size)), contents, }) } } -impl Entry { +impl Entry { pub fn filter(&mut self, filter: impl Fn(&Path) -> bool) -> Result<(), Error> { if let EntryContents::Directory(d) = &mut self.contents { d.filter(filter)?; } Ok(()) } - pub async fn read_file_to_vec(&self) -> Result, Error> { - match self.as_contents() { - EntryContents::File(f) => Ok(f.to_vec(self.hash).await?), - EntryContents::Directory(_) => Err(Error::new( - eyre!("expected file, found directory"), - ErrorKind::ParseS9pk, - )), - EntryContents::Missing => { - Err(Error::new(eyre!("entry is missing"), ErrorKind::ParseS9pk)) - } + pub async fn update_hash(&mut self, only_missing: bool) -> Result<(), Error> { + if let EntryContents::Directory(d) = &mut self.contents { + d.update_hashes(only_missing).await?; } + self.hash = Some(self.contents.hash().await?); + Ok(()) } pub async fn to_missing(&self) -> Result { let hash = if let Some(hash) = self.hash { @@ -243,13 +317,6 @@ impl Entry { contents: EntryContents::Missing, }) } - pub async fn update_hash(&mut self, only_missing: bool) -> Result<(), Error> { - if let EntryContents::Directory(d) = &mut self.contents { - d.update_hashes(only_missing).await?; - } - self.hash = Some(self.contents.hash().await?); - Ok(()) - } #[instrument(skip_all)] pub async fn serialize_header( &self, @@ -258,12 +325,13 @@ impl Entry { ) -> Result, Error> { use tokio::io::AsyncWriteExt; - let hash = if let Some(hash) = self.hash { + let (hash, size) = if let Some(hash) = self.hash { hash } else { self.contents.hash().await? }; w.write_all(hash.as_bytes()).await?; + w.write_all(&u64::to_be_bytes(size)).await?; self.contents.serialize_header(position, w).await } pub fn into_dyn(self) -> Entry { @@ -273,6 +341,20 @@ impl Entry { } } } +impl Entry { + pub async fn read_file_to_vec(&self) -> Result, Error> { + match self.as_contents() { + EntryContents::File(f) => Ok(f.to_vec(self.hash).await?), + EntryContents::Directory(_) => Err(Error::new( + eyre!("expected file, found directory"), + ErrorKind::ParseS9pk, + )), + EntryContents::Missing => { + Err(Error::new(eyre!("entry is missing"), ErrorKind::ParseS9pk)) + } + } + } +} #[derive(Debug, Clone)] pub enum EntryContents { @@ -299,13 +381,16 @@ impl EntryContents { pub fn is_dir(&self) -> bool { matches!(self, &EntryContents::Directory(_)) } + pub fn is_missing(&self) -> bool { + matches!(self, &EntryContents::Missing) + } } -impl EntryContents> { +impl EntryContents> { #[instrument(skip_all)] pub async fn deserialize( - source: &S, + source: S, header: &mut (impl AsyncRead + Unpin + Send), - hash: Hash, + (hash, size): (Hash, u64), ) -> Result { use tokio::io::AsyncReadExt; @@ -313,9 +398,11 @@ impl EntryContents> { header.read_exact(&mut type_id).await?; match type_id[0] { 0 => Ok(Self::Missing), - 1 => Ok(Self::File(FileContents::deserialize(source, header).await?)), + 1 => Ok(Self::File( + FileContents::deserialize(source, header, size).await?, + )), 2 => Ok(Self::Directory( - DirectoryContents::deserialize(source, header, hash).await?, + DirectoryContents::deserialize(&source, header, (hash, size)).await?, )), id => Err(Error::new( eyre!("Unknown type id {id} found in MerkleArchive"), @@ -324,17 +411,26 @@ impl EntryContents> { } } } -impl EntryContents { - pub async fn hash(&self) -> Result { +impl EntryContents { + pub async fn hash(&self) -> Result<(Hash, u64), Error> { match self { Self::Missing => Err(Error::new( eyre!("Cannot compute hash of missing file"), ErrorKind::Pack, )), Self::File(f) => f.hash().await, - Self::Directory(d) => d.sighash().await, + Self::Directory(d) => Ok((d.sighash().await?, d.toc_size())), } } + pub fn into_dyn(self) -> EntryContents { + match self { + Self::Missing => EntryContents::Missing, + Self::File(f) => EntryContents::File(f.into_dyn()), + Self::Directory(d) => EntryContents::Directory(d.into_dyn()), + } + } +} +impl EntryContents { #[instrument(skip_all)] pub async fn serialize_header( &self, @@ -350,11 +446,4 @@ impl EntryContents { Self::Directory(d) => Some(d.serialize_header(position, w).await?), }) } - pub fn into_dyn(self) -> EntryContents { - match self { - Self::Missing => EntryContents::Missing, - Self::File(f) => EntryContents::File(f.into_dyn()), - Self::Directory(d) => EntryContents::Directory(d.into_dyn()), - } - } } diff --git a/core/startos/src/s9pk/merkle_archive/sink.rs b/core/startos/src/s9pk/merkle_archive/sink.rs index c71377808..5357eb2d6 100644 --- a/core/startos/src/s9pk/merkle_archive/sink.rs +++ b/core/startos/src/s9pk/merkle_archive/sink.rs @@ -1,6 +1,7 @@ use tokio::io::{AsyncSeek, AsyncWrite}; use crate::prelude::*; +use crate::util::io::TrackingIO; #[async_trait::async_trait] pub trait Sink: AsyncWrite + Unpin + Send { @@ -17,54 +18,8 @@ impl Sink for S { } #[async_trait::async_trait] -impl Sink for TrackingWriter { +impl Sink for TrackingIO { async fn current_position(&mut self) -> Result { - Ok(self.position) - } -} - -#[pin_project::pin_project] -pub struct TrackingWriter { - position: u64, - #[pin] - writer: W, -} -impl TrackingWriter { - pub fn new(start: u64, w: W) -> Self { - Self { - position: start, - writer: w, - } - } - pub fn into_inner(self) -> W { - self.writer - } -} -impl AsyncWrite for TrackingWriter { - fn poll_write( - self: std::pin::Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - buf: &[u8], - ) -> std::task::Poll> { - let this = self.project(); - match this.writer.poll_write(cx, buf) { - std::task::Poll::Ready(Ok(written)) => { - *this.position += written as u64; - std::task::Poll::Ready(Ok(written)) - } - a => a, - } - } - fn poll_flush( - self: std::pin::Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - ) -> std::task::Poll> { - self.project().writer.poll_flush(cx) - } - fn poll_shutdown( - self: std::pin::Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - ) -> std::task::Poll> { - self.project().writer.poll_shutdown(cx) + Ok(self.position()) } } diff --git a/core/startos/src/s9pk/merkle_archive/source/http.rs b/core/startos/src/s9pk/merkle_archive/source/http.rs index 1cb9ba961..e58208277 100644 --- a/core/startos/src/s9pk/merkle_archive/source/http.rs +++ b/core/startos/src/s9pk/merkle_archive/source/http.rs @@ -1,23 +1,25 @@ +use std::collections::BTreeSet; +use std::pin::Pin; +use std::sync::{Arc, Mutex}; +use std::task::Poll; + use bytes::Bytes; -use futures::stream::BoxStream; -use futures::{StreamExt, TryStreamExt}; +use futures::{Stream, TryStreamExt}; use reqwest::header::{ACCEPT_RANGES, CONTENT_LENGTH, RANGE}; use reqwest::{Client, Url}; -use tokio::io::AsyncRead; +use tokio::io::{AsyncRead, AsyncReadExt, ReadBuf, Take}; use tokio_util::io::StreamReader; use crate::prelude::*; use crate::s9pk::merkle_archive::source::ArchiveSource; +use crate::util::io::TrackingIO; +use crate::util::Apply; -#[derive(Clone)] pub struct HttpSource { url: Url, client: Client, size: Option, - range_support: Result< - (), - (), // Arc>> - >, + range_support: Result<(), Arc>>>>, } impl HttpSource { pub async fn new(client: Client, url: Url) -> Result { @@ -32,7 +34,8 @@ impl HttpSource { .headers() .get(ACCEPT_RANGES) .and_then(|s| s.to_str().ok()) - == Some("bytes"); + == Some("bytes") + && false; let size = head .headers() .get(CONTENT_LENGTH) @@ -45,53 +48,142 @@ impl HttpSource { range_support: if range_support { Ok(()) } else { - Err(()) // Err(Arc::new(Mutex::new(None))) + Err(Arc::new(Mutex::new(BTreeSet::new()))) }, }) } } -#[async_trait::async_trait] impl ArchiveSource for HttpSource { - type Reader = HttpReader; + type FetchReader = HttpReader; + type FetchAllReader = StreamReader>, Bytes>; async fn size(&self) -> Option { self.size } - async fn fetch(&self, position: u64, size: u64) -> Result { - match self.range_support { - Ok(_) => Ok(HttpReader::Range(StreamReader::new(if size > 0 { - self.client - .get(self.url.clone()) - .header(RANGE, format!("bytes={}-{}", position, position + size - 1)) - .send() - .await - .with_kind(ErrorKind::Network)? - .error_for_status() - .with_kind(ErrorKind::Network)? - .bytes_stream() - .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)) - .boxed() - } else { - futures::stream::empty().boxed() - }))), - _ => todo!(), + async fn fetch_all(&self) -> Result { + Ok(StreamReader::new( + self.client + .get(self.url.clone()) + .send() + .await + .with_kind(ErrorKind::Network)? + .error_for_status() + .with_kind(ErrorKind::Network)? + .bytes_stream() + .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)) + .apply(boxed), + )) + } + async fn fetch(&self, position: u64, size: u64) -> Result { + match &self.range_support { + Ok(_) => Ok(HttpReader::Range( + StreamReader::new(if size > 0 { + self.client + .get(self.url.clone()) + .header(RANGE, format!("bytes={}-{}", position, position + size - 1)) + .send() + .await + .with_kind(ErrorKind::Network)? + .error_for_status() + .with_kind(ErrorKind::Network)? + .bytes_stream() + .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)) + .apply(boxed) + } else { + futures::stream::empty().apply(boxed) + }) + .take(size), + )), + Err(pool) => { + fn get_reader_for( + pool: &Arc>>>, + position: u64, + ) -> Option> { + let mut lock = pool.lock().unwrap(); + let pos = lock.range(..position).last()?.position(); + lock.take(&pos) + } + let reader = get_reader_for(pool, position); + let mut reader = if let Some(reader) = reader { + reader + } else { + TrackingIO::new( + 0, + StreamReader::new( + self.client + .get(self.url.clone()) + .send() + .await + .with_kind(ErrorKind::Network)? + .error_for_status() + .with_kind(ErrorKind::Network)? + .bytes_stream() + .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e)) + .apply(boxed), + ), + ) + }; + if reader.position() < position { + let to_skip = position - reader.position(); + tokio::io::copy(&mut (&mut reader).take(to_skip), &mut tokio::io::sink()) + .await?; + } + Ok(HttpReader::Rangeless { + pool: pool.clone(), + reader: Some(reader.take(size)), + }) + } } } } -#[pin_project::pin_project(project = HttpReaderProj)] +type BoxStream<'a, T> = Pin + Send + Sync + 'a>>; +fn boxed<'a, T>(stream: impl Stream + Send + Sync + 'a) -> BoxStream<'a, T> { + Box::pin(stream) +} +type HttpBodyReader = StreamReader>, Bytes>; + +#[pin_project::pin_project(project = HttpReaderProj, PinnedDrop)] pub enum HttpReader { - Range(#[pin] StreamReader>, Bytes>), - // Rangeless(#[pin] RangelessReader), + Range(#[pin] Take), + Rangeless { + pool: Arc>>>, + #[pin] + reader: Option>>, + }, } impl AsyncRead for HttpReader { fn poll_read( - self: std::pin::Pin<&mut Self>, + self: Pin<&mut Self>, cx: &mut std::task::Context<'_>, - buf: &mut tokio::io::ReadBuf<'_>, - ) -> std::task::Poll> { + buf: &mut ReadBuf<'_>, + ) -> Poll> { match self.project() { HttpReaderProj::Range(r) => r.poll_read(cx, buf), - // HttpReaderProj::Rangeless(r) => r.poll_read(cx, buf), + HttpReaderProj::Rangeless { mut reader, .. } => { + let mut finished = false; + if let Some(reader) = reader.as_mut().as_pin_mut() { + let start = buf.filled().len(); + futures::ready!(reader.poll_read(cx, buf)?); + finished = start == buf.filled().len(); + } + if finished { + reader.take(); + } + Poll::Ready(Ok(())) + } + } + } +} +#[pin_project::pinned_drop] +impl PinnedDrop for HttpReader { + fn drop(self: Pin<&mut Self>) { + match self.project() { + HttpReaderProj::Range(_) => (), + HttpReaderProj::Rangeless { pool, mut reader } => { + if let Some(reader) = reader.take() { + pool.lock().unwrap().insert(reader.into_inner()); + } + } } } } diff --git a/core/startos/src/s9pk/merkle_archive/source/mod.rs b/core/startos/src/s9pk/merkle_archive/source/mod.rs index 97c94b480..cc9623ab6 100644 --- a/core/startos/src/s9pk/merkle_archive/source/mod.rs +++ b/core/startos/src/s9pk/merkle_archive/source/mod.rs @@ -1,39 +1,100 @@ +use std::cmp::min; +use std::io::SeekFrom; +use std::ops::Deref; use std::path::PathBuf; use std::sync::Arc; use blake3::Hash; +use futures::future::BoxFuture; +use futures::{Future, FutureExt}; use tokio::fs::File; -use tokio::io::{AsyncRead, AsyncWrite}; +use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeekExt, AsyncWrite, Take}; use crate::prelude::*; use crate::s9pk::merkle_archive::hash::VerifyingWriter; +use crate::util::io::{open_file, TmpDir}; pub mod http; pub mod multi_cursor_file; -#[async_trait::async_trait] -pub trait FileSource: Clone + Send + Sync + Sized + 'static { +pub trait FileSource: Send + Sync + Sized + 'static { type Reader: AsyncRead + Unpin + Send; - async fn size(&self) -> Result; - async fn reader(&self) -> Result; + type SliceReader: AsyncRead + Unpin + Send; + fn size(&self) -> impl Future> + Send; + fn reader(&self) -> impl Future> + Send; + fn slice( + &self, + position: u64, + size: u64, + ) -> impl Future> + Send; + fn copy( + &self, + w: &mut W, + ) -> impl Future> + Send { + async move { + tokio::io::copy(&mut self.reader().await?, w).await?; + Ok(()) + } + } + fn copy_verify( + &self, + w: &mut W, + verify: Option<(Hash, u64)>, + ) -> impl Future> + Send { + async move { + let mut w = VerifyingWriter::new(w, verify); + tokio::io::copy(&mut self.reader().await?, &mut w).await?; + w.verify().await?; + Ok(()) + } + } + fn to_vec( + &self, + verify: Option<(Hash, u64)>, + ) -> impl Future, Error>> + Send { + fn to_vec( + src: &impl FileSource, + verify: Option<(Hash, u64)>, + ) -> BoxFuture, Error>> { + async move { + let mut vec = Vec::with_capacity(if let Some((_, size)) = &verify { + *size + } else { + src.size().await? + } as usize); + src.copy_verify(&mut vec, verify).await?; + Ok(vec) + } + .boxed() + } + to_vec(self, verify) + } +} + +impl FileSource for Arc { + type Reader = T::Reader; + type SliceReader = T::SliceReader; + async fn size(&self) -> Result { + self.deref().size().await + } + async fn reader(&self) -> Result { + self.deref().reader().await + } + async fn slice(&self, position: u64, size: u64) -> Result { + self.deref().slice(position, size).await + } async fn copy(&self, w: &mut W) -> Result<(), Error> { - tokio::io::copy(&mut self.reader().await?, w).await?; - Ok(()) + self.deref().copy(w).await } async fn copy_verify( &self, w: &mut W, - verify: Option, + verify: Option<(Hash, u64)>, ) -> Result<(), Error> { - let mut w = VerifyingWriter::new(w, verify); - tokio::io::copy(&mut self.reader().await?, &mut w).await?; - w.verify()?; - Ok(()) + self.deref().copy_verify(w, verify).await } - async fn to_vec(&self, verify: Option) -> Result, Error> { - let mut vec = Vec::with_capacity(self.size().await? as usize); - self.copy_verify(&mut vec, verify).await?; - Ok(vec) + async fn to_vec(&self, verify: Option<(Hash, u64)>) -> Result, Error> { + self.deref().to_vec(verify).await } } @@ -44,15 +105,18 @@ impl DynFileSource { Self(Arc::new(source)) } } -#[async_trait::async_trait] impl FileSource for DynFileSource { type Reader = Box; + type SliceReader = Box; async fn size(&self) -> Result { self.0.size().await } async fn reader(&self) -> Result { self.0.reader().await } + async fn slice(&self, position: u64, size: u64) -> Result { + self.0.slice(position, size).await + } async fn copy( &self, mut w: &mut W, @@ -62,11 +126,11 @@ impl FileSource for DynFileSource { async fn copy_verify( &self, mut w: &mut W, - verify: Option, + verify: Option<(Hash, u64)>, ) -> Result<(), Error> { self.0.copy_verify(&mut w, verify).await } - async fn to_vec(&self, verify: Option) -> Result, Error> { + async fn to_vec(&self, verify: Option<(Hash, u64)>) -> Result, Error> { self.0.to_vec(verify).await } } @@ -75,13 +139,18 @@ impl FileSource for DynFileSource { trait DynableFileSource: Send + Sync + 'static { async fn size(&self) -> Result; async fn reader(&self) -> Result, Error>; + async fn slice( + &self, + position: u64, + size: u64, + ) -> Result, Error>; async fn copy(&self, w: &mut (dyn AsyncWrite + Unpin + Send)) -> Result<(), Error>; async fn copy_verify( &self, w: &mut (dyn AsyncWrite + Unpin + Send), - verify: Option, + verify: Option<(Hash, u64)>, ) -> Result<(), Error>; - async fn to_vec(&self, verify: Option) -> Result, Error>; + async fn to_vec(&self, verify: Option<(Hash, u64)>) -> Result, Error>; } #[async_trait::async_trait] impl DynableFileSource for T { @@ -91,41 +160,58 @@ impl DynableFileSource for T { async fn reader(&self) -> Result, Error> { Ok(Box::new(FileSource::reader(self).await?)) } + async fn slice( + &self, + position: u64, + size: u64, + ) -> Result, Error> { + Ok(Box::new(FileSource::slice(self, position, size).await?)) + } async fn copy(&self, w: &mut (dyn AsyncWrite + Unpin + Send)) -> Result<(), Error> { FileSource::copy(self, w).await } async fn copy_verify( &self, w: &mut (dyn AsyncWrite + Unpin + Send), - verify: Option, + verify: Option<(Hash, u64)>, ) -> Result<(), Error> { FileSource::copy_verify(self, w, verify).await } - async fn to_vec(&self, verify: Option) -> Result, Error> { + async fn to_vec(&self, verify: Option<(Hash, u64)>) -> Result, Error> { FileSource::to_vec(self, verify).await } } -#[async_trait::async_trait] impl FileSource for PathBuf { type Reader = File; + type SliceReader = Take; async fn size(&self) -> Result { Ok(tokio::fs::metadata(self).await?.len()) } async fn reader(&self) -> Result { - Ok(File::open(self).await?) + Ok(open_file(self).await?) + } + async fn slice(&self, position: u64, size: u64) -> Result { + let mut r = FileSource::reader(self).await?; + r.seek(SeekFrom::Start(position)).await?; + Ok(r.take(size)) } } -#[async_trait::async_trait] impl FileSource for Arc<[u8]> { type Reader = std::io::Cursor; + type SliceReader = Take; async fn size(&self) -> Result { Ok(self.len() as u64) } async fn reader(&self) -> Result { Ok(std::io::Cursor::new(self.clone())) } + async fn slice(&self, position: u64, size: u64) -> Result { + let mut r = FileSource::reader(self).await?; + r.seek(SeekFrom::Start(position)).await?; + Ok(r.take(size)) + } async fn copy(&self, w: &mut W) -> Result<(), Error> { use tokio::io::AsyncWriteExt; @@ -134,35 +220,82 @@ impl FileSource for Arc<[u8]> { } } -#[async_trait::async_trait] -pub trait ArchiveSource: Clone + Send + Sync + Sized + 'static { - type Reader: AsyncRead + Unpin + Send; - async fn size(&self) -> Option { - None +pub trait ArchiveSource: Send + Sync + Sized + 'static { + type FetchReader: AsyncRead + Unpin + Send; + type FetchAllReader: AsyncRead + Unpin + Send; + fn size(&self) -> impl Future> + Send { + async { None } } - async fn fetch(&self, position: u64, size: u64) -> Result; - async fn copy_to( + fn fetch_all(&self) -> impl Future> + Send; + fn fetch( + &self, + position: u64, + size: u64, + ) -> impl Future> + Send; + fn copy_all_to( + &self, + w: &mut W, + ) -> impl Future> + Send { + async move { + tokio::io::copy(&mut self.fetch_all().await?, w).await?; + Ok(()) + } + } + fn copy_to( &self, position: u64, size: u64, w: &mut W, - ) -> Result<(), Error> { - tokio::io::copy(&mut self.fetch(position, size).await?, w).await?; - Ok(()) + ) -> impl Future> + Send { + async move { + tokio::io::copy(&mut self.fetch(position, size).await?, w).await?; + Ok(()) + } } - fn section(&self, position: u64, size: u64) -> Section { + fn section(self, position: u64, size: u64) -> Section { Section { - source: self.clone(), + source: self, position, size, } } } -#[async_trait::async_trait] +impl ArchiveSource for Arc { + type FetchReader = T::FetchReader; + type FetchAllReader = T::FetchAllReader; + async fn size(&self) -> Option { + self.deref().size().await + } + async fn fetch_all(&self) -> Result { + self.deref().fetch_all().await + } + async fn fetch(&self, position: u64, size: u64) -> Result { + self.deref().fetch(position, size).await + } + async fn copy_all_to( + &self, + w: &mut W, + ) -> Result<(), Error> { + self.deref().copy_all_to(w).await + } + async fn copy_to( + &self, + position: u64, + size: u64, + w: &mut W, + ) -> Result<(), Error> { + self.deref().copy_to(position, size, w).await + } +} + impl ArchiveSource for Arc<[u8]> { - type Reader = tokio::io::Take>; - async fn fetch(&self, position: u64, size: u64) -> Result { + type FetchReader = tokio::io::Take>; + type FetchAllReader = std::io::Cursor; + async fn fetch_all(&self) -> Result { + Ok(std::io::Cursor::new(self.clone())) + } + async fn fetch(&self, position: u64, size: u64) -> Result { use tokio::io::AsyncReadExt; let mut cur = std::io::Cursor::new(self.clone()); @@ -177,16 +310,108 @@ pub struct Section { position: u64, size: u64, } -#[async_trait::async_trait] impl FileSource for Section { - type Reader = S::Reader; + type Reader = S::FetchReader; + type SliceReader = S::FetchReader; async fn size(&self) -> Result { Ok(self.size) } async fn reader(&self) -> Result { self.source.fetch(self.position, self.size).await } + async fn slice(&self, position: u64, size: u64) -> Result { + self.source + .fetch(self.position + position, min(size, self.size)) + .await + } async fn copy(&self, w: &mut W) -> Result<(), Error> { self.source.copy_to(self.position, self.size, w).await } } + +pub type DynRead = Box; +pub fn into_dyn_read(r: R) -> DynRead { + Box::new(r) +} + +#[derive(Clone)] +pub struct TmpSource { + tmp_dir: Arc, + source: S, +} +impl TmpSource { + pub fn new(tmp_dir: Arc, source: S) -> Self { + Self { tmp_dir, source } + } + pub async fn gc(self) -> Result<(), Error> { + self.tmp_dir.gc().await + } +} +impl std::ops::Deref for TmpSource { + type Target = S; + fn deref(&self) -> &Self::Target { + &self.source + } +} +impl ArchiveSource for TmpSource { + type FetchReader = ::FetchReader; + type FetchAllReader = ::FetchAllReader; + async fn size(&self) -> Option { + self.source.size().await + } + async fn fetch_all(&self) -> Result { + self.source.fetch_all().await + } + async fn fetch(&self, position: u64, size: u64) -> Result { + self.source.fetch(position, size).await + } + async fn copy_all_to( + &self, + w: &mut W, + ) -> Result<(), Error> { + self.source.copy_all_to(w).await + } + async fn copy_to( + &self, + position: u64, + size: u64, + w: &mut W, + ) -> Result<(), Error> { + self.source.copy_to(position, size, w).await + } +} +impl From> for DynFileSource { + fn from(value: TmpSource) -> Self { + DynFileSource::new(value) + } +} + +impl FileSource for TmpSource { + type Reader = ::Reader; + type SliceReader = ::SliceReader; + async fn size(&self) -> Result { + self.source.size().await + } + async fn reader(&self) -> Result { + self.source.reader().await + } + async fn slice(&self, position: u64, size: u64) -> Result { + self.source.slice(position, size).await + } + async fn copy( + &self, + mut w: &mut W, + ) -> Result<(), Error> { + self.source.copy(&mut w).await + } + async fn copy_verify( + &self, + mut w: &mut W, + verify: Option<(Hash, u64)>, + ) -> Result<(), Error> { + self.source.copy_verify(&mut w, verify).await + } + async fn to_vec(&self, verify: Option<(Hash, u64)>) -> Result, Error> { + self.source.to_vec(verify).await + } +} diff --git a/core/startos/src/s9pk/merkle_archive/source/multi_cursor_file.rs b/core/startos/src/s9pk/merkle_archive/source/multi_cursor_file.rs index 7add68e6f..658f3f923 100644 --- a/core/startos/src/s9pk/merkle_archive/source/multi_cursor_file.rs +++ b/core/startos/src/s9pk/merkle_archive/source/multi_cursor_file.rs @@ -1,18 +1,36 @@ use std::io::SeekFrom; use std::os::fd::{AsRawFd, RawFd}; use std::path::{Path, PathBuf}; +use std::pin::Pin; use std::sync::Arc; +use std::task::Poll; use tokio::fs::File; -use tokio::io::{AsyncRead, AsyncReadExt}; +use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeek, ReadBuf, Take}; use tokio::sync::{Mutex, OwnedMutexGuard}; use crate::disk::mount::filesystem::loop_dev::LoopDev; use crate::prelude::*; use crate::s9pk::merkle_archive::source::{ArchiveSource, Section}; +use crate::util::io::open_file; -fn path_from_fd(fd: RawFd) -> PathBuf { - Path::new("/proc/self/fd").join(fd.to_string()) +fn path_from_fd(fd: RawFd) -> Result { + #[cfg(target_os = "linux")] + let path = Path::new("/proc/self/fd").join(fd.to_string()); + #[cfg(target_os = "macos")] // here be dragons + let path = unsafe { + let mut buf = [0u8; libc::PATH_MAX as usize]; + if libc::fcntl(fd, libc::F_GETPATH, buf.as_mut_ptr().cast::()) == -1 { + return Err(std::io::Error::last_os_error().into()); + } + Path::new( + &*std::ffi::CStr::from_bytes_until_nul(&buf) + .with_kind(ErrorKind::Utf8)? + .to_string_lossy(), + ) + .to_owned() + }; + Ok(path) } #[derive(Clone)] @@ -21,15 +39,33 @@ pub struct MultiCursorFile { file: Arc>, } impl MultiCursorFile { - fn path(&self) -> PathBuf { + fn path(&self) -> Result { path_from_fd(self.fd) } pub async fn open(fd: &impl AsRawFd) -> Result { - let fd = fd.as_raw_fd(); - Ok(Self { - fd, - file: Arc::new(Mutex::new(File::open(path_from_fd(fd)).await?)), + let f = open_file(path_from_fd(fd.as_raw_fd())?).await?; + Ok(Self::from(f)) + } + pub async fn cursor(&self) -> Result { + Ok(FileCursor( + if let Ok(file) = self.file.clone().try_lock_owned() { + file + } else { + Arc::new(Mutex::new(open_file(self.path()?).await?)) + .try_lock_owned() + .expect("freshly created") + }, + )) + } + pub async fn blake3_mmap(&self) -> Result { + let path = self.path()?; + tokio::task::spawn_blocking(move || { + let mut hasher = blake3::Hasher::new(); + hasher.update_mmap_rayon(path)?; + Ok(hasher.finalize()) }) + .await + .with_kind(ErrorKind::Unknown)? } } impl From for MultiCursorFile { @@ -42,77 +78,69 @@ impl From for MultiCursorFile { } #[pin_project::pin_project] -pub struct FileSectionReader { - #[pin] - file: OwnedMutexGuard, - remaining: u64, -} -impl AsyncRead for FileSectionReader { +pub struct FileCursor(#[pin] OwnedMutexGuard); +impl AsyncRead for FileCursor { fn poll_read( - self: std::pin::Pin<&mut Self>, + self: Pin<&mut Self>, cx: &mut std::task::Context<'_>, - buf: &mut tokio::io::ReadBuf<'_>, - ) -> std::task::Poll> { + buf: &mut ReadBuf<'_>, + ) -> Poll> { let this = self.project(); - if *this.remaining == 0 { - return std::task::Poll::Ready(Ok(())); - } - let before = buf.filled().len() as u64; - let res = std::pin::Pin::new(&mut (&mut **this.file.get_mut()).take(*this.remaining)) - .poll_read(cx, buf); - *this.remaining = this - .remaining - .saturating_sub(buf.filled().len() as u64 - before); - res + Pin::new(&mut (&mut **this.0.get_mut())).poll_read(cx, buf) + } +} +impl AsyncSeek for FileCursor { + fn start_seek(self: Pin<&mut Self>, position: SeekFrom) -> std::io::Result<()> { + let this = self.project(); + Pin::new(&mut (&mut **this.0.get_mut())).start_seek(position) + } + fn poll_complete( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> Poll> { + let this = self.project(); + Pin::new(&mut (&mut **this.0.get_mut())).poll_complete(cx) + } +} +impl std::ops::Deref for FileCursor { + type Target = File; + fn deref(&self) -> &Self::Target { + &*self.0 + } +} +impl std::ops::DerefMut for FileCursor { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut *self.0 } } -#[async_trait::async_trait] impl ArchiveSource for MultiCursorFile { - type Reader = FileSectionReader; + type FetchReader = Take; + type FetchAllReader = FileCursor; async fn size(&self) -> Option { - tokio::fs::metadata(self.path()).await.ok().map(|m| m.len()) + tokio::fs::metadata(self.path().ok()?) + .await + .ok() + .map(|m| m.len()) } - async fn fetch(&self, position: u64, size: u64) -> Result { + async fn fetch_all(&self) -> Result { use tokio::io::AsyncSeekExt; - let mut file = if let Ok(file) = self.file.clone().try_lock_owned() { - file - } else { - #[cfg(target_os = "linux")] - let file = File::open(self.path()).await?; - #[cfg(target_os = "macos")] // here be dragons - let file = unsafe { - let mut buf = [0u8; libc::PATH_MAX as usize]; - if libc::fcntl( - self.fd, - libc::F_GETPATH, - buf.as_mut_ptr().cast::(), - ) == -1 - { - return Err(std::io::Error::last_os_error().into()); - } - File::open( - &*std::ffi::CStr::from_bytes_until_nul(&buf) - .with_kind(ErrorKind::Utf8)? - .to_string_lossy(), - ) - .await? - }; - Arc::new(Mutex::new(file)) - .try_lock_owned() - .expect("freshly created") - }; - file.seek(SeekFrom::Start(position)).await?; - Ok(Self::Reader { - file, - remaining: size, - }) + let mut file = self.cursor().await?; + file.0.seek(SeekFrom::Start(0)).await?; + Ok(file) + } + async fn fetch(&self, position: u64, size: u64) -> Result { + use tokio::io::AsyncSeekExt; + + let mut file = self.cursor().await?; + file.0.seek(SeekFrom::Start(position)).await?; + Ok(file.take(size)) } } impl From<&Section> for LoopDev { fn from(value: &Section) -> Self { - LoopDev::new(value.source.path(), value.position, value.size) + LoopDev::new(value.source.path().unwrap(), value.position, value.size) } } diff --git a/core/startos/src/s9pk/merkle_archive/test.rs b/core/startos/src/s9pk/merkle_archive/test.rs index 430ab4f31..861f3b04c 100644 --- a/core/startos/src/s9pk/merkle_archive/test.rs +++ b/core/startos/src/s9pk/merkle_archive/test.rs @@ -8,9 +8,9 @@ use ed25519_dalek::SigningKey; use crate::prelude::*; use crate::s9pk::merkle_archive::directory_contents::DirectoryContents; use crate::s9pk::merkle_archive::file_contents::FileContents; -use crate::s9pk::merkle_archive::sink::TrackingWriter; use crate::s9pk::merkle_archive::source::FileSource; use crate::s9pk::merkle_archive::{Entry, EntryContents, MerkleArchive}; +use crate::util::io::TrackingIO; /// Creates a MerkleArchive (a1) with the provided files at the provided paths. NOTE: later files can overwrite previous files/directories at the same path /// Tests: @@ -52,7 +52,7 @@ fn test(files: Vec<(PathBuf, String)>) -> Result<(), Error> { } } let key = SigningKey::generate(&mut rand::thread_rng()); - let mut a1 = MerkleArchive::new(root, key); + let mut a1 = MerkleArchive::new(root, key, "test"); tokio::runtime::Builder::new_current_thread() .enable_io() .build() @@ -60,10 +60,15 @@ fn test(files: Vec<(PathBuf, String)>) -> Result<(), Error> { .block_on(async move { a1.update_hashes(true).await?; let mut s1 = Vec::new(); - a1.serialize(&mut TrackingWriter::new(0, &mut s1), true) - .await?; + a1.serialize(&mut TrackingIO::new(0, &mut s1), true).await?; let s1: Arc<[u8]> = s1.into(); - let a2 = MerkleArchive::deserialize(&s1, &mut Cursor::new(s1.clone())).await?; + let a2 = MerkleArchive::deserialize( + &s1, + "test", + &mut Cursor::new(s1.clone()), + Some(&a1.commitment().await?), + ) + .await?; for (path, content) in check_set { match a2 @@ -88,8 +93,7 @@ fn test(files: Vec<(PathBuf, String)>) -> Result<(), Error> { } let mut s2 = Vec::new(); - a2.serialize(&mut TrackingWriter::new(0, &mut s2), true) - .await?; + a2.serialize(&mut TrackingIO::new(0, &mut s2), true).await?; let s2: Arc<[u8]> = s2.into(); ensure_code!(s1 == s2, ErrorKind::Pack, "s1 does not match s2"); diff --git a/core/startos/src/s9pk/merkle_archive/varint.rs b/core/startos/src/s9pk/merkle_archive/varint.rs index 479b488e6..f4f18d140 100644 --- a/core/startos/src/s9pk/merkle_archive/varint.rs +++ b/core/startos/src/s9pk/merkle_archive/varint.rs @@ -3,7 +3,7 @@ use tokio::io::{AsyncRead, AsyncWrite}; use crate::prelude::*; -/// Most-significant byte, == 0x80 +/// Most-significant bit, == 0x80 pub const MSB: u8 = 0b1000_0000; const MAX_STR_LEN: u64 = 1024 * 1024; // 1 MiB @@ -39,22 +39,20 @@ pub async fn serialize_varstring( Ok(()) } +const MAX_SIZE: usize = (std::mem::size_of::() * 8 + 7) / 7; + #[derive(Default)] struct VarIntProcessor { - buf: [u8; 10], - maxsize: usize, + buf: [u8; MAX_SIZE], i: usize, } impl VarIntProcessor { fn new() -> VarIntProcessor { - VarIntProcessor { - maxsize: (std::mem::size_of::() * 8 + 7) / 7, - ..VarIntProcessor::default() - } + Self::default() } fn push(&mut self, b: u8) -> Result<(), Error> { - if self.i >= self.maxsize { + if self.i >= MAX_SIZE { return Err(Error::new( eyre!("Unterminated varint"), ErrorKind::ParseS9pk, diff --git a/core/startos/src/s9pk/merkle_archive/write_queue.rs b/core/startos/src/s9pk/merkle_archive/write_queue.rs index 9496d5e83..4e1bb3a73 100644 --- a/core/startos/src/s9pk/merkle_archive/write_queue.rs +++ b/core/startos/src/s9pk/merkle_archive/write_queue.rs @@ -30,6 +30,8 @@ impl<'a, S: FileSource> WriteQueue<'a, S> { self.queue.push_back(entry); Ok(res) } +} +impl<'a, S: FileSource + Clone> WriteQueue<'a, S> { pub async fn serialize(&mut self, w: &mut W, verify: bool) -> Result<(), Error> { loop { let Some(next) = self.queue.pop_front() else { diff --git a/core/startos/src/s9pk/mod.rs b/core/startos/src/s9pk/mod.rs index 83924293a..a06218d40 100644 --- a/core/startos/src/s9pk/mod.rs +++ b/core/startos/src/s9pk/mod.rs @@ -1,39 +1,60 @@ +pub mod git_hash; pub mod merkle_archive; pub mod rpc; pub mod v1; pub mod v2; -use std::io::SeekFrom; -use std::path::Path; +use std::sync::Arc; -use tokio::fs::File; -use tokio::io::{AsyncReadExt, AsyncSeekExt}; +use tokio::io::{AsyncReadExt, AsyncSeek}; pub use v2::{manifest, S9pk}; -use crate::context::CliContext; use crate::prelude::*; +use crate::progress::FullProgressTracker; +use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource}; use crate::s9pk::v1::reader::S9pkReader; use crate::s9pk::v2::compat::MAGIC_AND_VERSION; +use crate::util::io::TmpDir; -pub async fn load(ctx: &CliContext, path: impl AsRef) -> Result { +pub async fn load( + source: S, + key: K, + progress: Option<&FullProgressTracker>, +) -> Result, Error> +where + S: ArchiveSource, + S::FetchAllReader: AsyncSeek + Sync, + K: FnOnce() -> Result, +{ // TODO: return s9pk const MAGIC_LEN: usize = MAGIC_AND_VERSION.len(); let mut magic = [0_u8; MAGIC_LEN]; - let mut file = tokio::fs::File::open(&path).await?; - file.read_exact(&mut magic).await?; - file.seek(SeekFrom::Start(0)).await?; + source.fetch(0, 3).await?.read_exact(&mut magic).await?; if magic == v2::compat::MAGIC_AND_VERSION { + let phase = if let Some(progress) = progress { + let mut phase = progress.add_phase( + "Converting Package to V2".into(), + Some(source.size().await.unwrap_or(60)), + ); + phase.start(); + Some(phase) + } else { + None + }; tracing::info!("Converting package to v2 s9pk"); - let new_path = path.as_ref().with_extension("compat.s9pk"); - S9pk::from_v1( - S9pkReader::from_reader(file, true).await?, - &new_path, - ctx.developer_key()?.clone(), + let tmp_dir = TmpDir::new().await?; + let s9pk = S9pk::from_v1( + S9pkReader::from_reader(source.fetch_all().await?, true).await?, + Arc::new(tmp_dir), + key()?, ) .await?; - tokio::fs::rename(&new_path, &path).await?; - file = tokio::fs::File::open(&path).await?; tracing::info!("Converted s9pk successfully"); + if let Some(mut phase) = phase { + phase.complete(); + } + Ok(s9pk.into_dyn()) + } else { + Ok(S9pk::deserialize(&Arc::new(source), None).await?.into_dyn()) } - Ok(file) } diff --git a/core/startos/src/s9pk/rpc.rs b/core/startos/src/s9pk/rpc.rs index 582a0a9d9..92f952077 100644 --- a/core/startos/src/s9pk/rpc.rs +++ b/core/startos/src/s9pk/rpc.rs @@ -1,32 +1,50 @@ -use std::path::{Path, PathBuf}; +use std::path::PathBuf; use std::sync::Arc; use clap::Parser; -use itertools::Itertools; use models::ImageId; use rpc_toolkit::{from_fn_async, Empty, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; -use tokio::fs::File; -use tokio::process::Command; use ts_rs::TS; use crate::context::CliContext; use crate::prelude::*; use crate::s9pk::manifest::Manifest; -use crate::s9pk::merkle_archive::source::DynFileSource; -use crate::s9pk::merkle_archive::Entry; -use crate::s9pk::v2::compat::CONTAINER_TOOL; -use crate::s9pk::S9pk; -use crate::util::io::TmpDir; +use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile; +use crate::s9pk::v2::pack::ImageConfig; +use crate::s9pk::v2::SIG_CONTEXT; +use crate::util::io::{create_file, open_file, TmpDir}; use crate::util::serde::{apply_expr, HandlerExtSerde}; -use crate::util::Invoke; +use crate::util::Apply; pub const SKIP_ENV: &[&str] = &["TERM", "container", "HOME", "HOSTNAME"]; -pub fn s9pk() -> ParentHandler { +pub fn s9pk() -> ParentHandler { ParentHandler::new() + .subcommand("pack", from_fn_async(super::v2::pack::pack).no_display()) + .subcommand( + "list-ingredients", + from_fn_async(super::v2::pack::list_ingredients).with_custom_display_fn( + |_, ingredients| { + ingredients + .into_iter() + .map(Some) + .apply(|i| itertools::intersperse(i, None)) + .for_each(|i| { + if let Some(p) = i { + print!("{}", p.display()) + } else { + print!(" ") + } + }); + println!(); + Ok(()) + }, + ), + ) .subcommand("edit", edit()) .subcommand("inspect", inspect()) + .subcommand("convert", from_fn_async(convert).no_display()) } #[derive(Deserialize, Serialize, Parser)] @@ -34,9 +52,9 @@ struct S9pkPath { s9pk: PathBuf, } -fn edit() -> ParentHandler { +fn edit() -> ParentHandler { let only_parent = |a, _| a; - ParentHandler::::new() + ParentHandler::new() .subcommand( "add-image", from_fn_async(add_image) @@ -51,15 +69,19 @@ fn edit() -> ParentHandler { ) } -fn inspect() -> ParentHandler { +fn inspect() -> ParentHandler { let only_parent = |a, _| a; - ParentHandler::::new() + ParentHandler::new() .subcommand( "file-tree", from_fn_async(file_tree) .with_inherited(only_parent) .with_display_serializable(), ) + .subcommand( + "cat", + from_fn_async(cat).with_inherited(only_parent).no_display(), + ) .subcommand( "manifest", from_fn_async(inspect_manifest) @@ -71,116 +93,33 @@ fn inspect() -> ParentHandler { #[derive(Deserialize, Serialize, Parser, TS)] struct AddImageParams { id: ImageId, - image: String, + #[command(flatten)] + config: ImageConfig, } async fn add_image( ctx: CliContext, - AddImageParams { id, image }: AddImageParams, + AddImageParams { id, config }: AddImageParams, S9pkPath { s9pk: s9pk_path }: S9pkPath, ) -> Result<(), Error> { - let tmpdir = TmpDir::new().await?; - let sqfs_path = tmpdir.join("image.squashfs"); - let arch = String::from_utf8( - Command::new(CONTAINER_TOOL) - .arg("run") - .arg("--rm") - .arg("--entrypoint") - .arg("uname") - .arg(&image) - .arg("-m") - .invoke(ErrorKind::Docker) - .await?, - )?; - let env = String::from_utf8( - Command::new(CONTAINER_TOOL) - .arg("run") - .arg("--rm") - .arg("--entrypoint") - .arg("env") - .arg(&image) - .invoke(ErrorKind::Docker) - .await?, - )? - .lines() - .filter(|l| { - l.trim() - .split_once("=") - .map_or(false, |(v, _)| !SKIP_ENV.contains(&v)) - }) - .join("\n") - + "\n"; - let workdir = Path::new( - String::from_utf8( - Command::new(CONTAINER_TOOL) - .arg("run") - .arg("--rm") - .arg("--entrypoint") - .arg("pwd") - .arg(&image) - .invoke(ErrorKind::Docker) - .await?, - )? - .trim(), + let mut s9pk = super::load( + MultiCursorFile::from(open_file(&s9pk_path).await?), + || ctx.developer_key().cloned(), + None, ) - .to_owned(); - let container_id = String::from_utf8( - Command::new(CONTAINER_TOOL) - .arg("create") - .arg(&image) - .invoke(ErrorKind::Docker) - .await?, - )?; - Command::new("bash") - .arg("-c") - .arg(format!( - "{CONTAINER_TOOL} export {container_id} | mksquashfs - {sqfs} -tar", - container_id = container_id.trim(), - sqfs = sqfs_path.display() - )) - .invoke(ErrorKind::Docker) - .await?; - Command::new(CONTAINER_TOOL) - .arg("rm") - .arg(container_id.trim()) - .invoke(ErrorKind::Docker) - .await?; - let mut s9pk = S9pk::from_file(super::load(&ctx, &s9pk_path).await?) - .await? - .into_dyn(); - let archive = s9pk.as_archive_mut(); - archive.set_signer(ctx.developer_key()?.clone()); - archive.contents_mut().insert_path( - Path::new("images") - .join(arch.trim()) - .join(&id) - .with_extension("squashfs"), - Entry::file(DynFileSource::new(sqfs_path)), - )?; - archive.contents_mut().insert_path( - Path::new("images") - .join(arch.trim()) - .join(&id) - .with_extension("env"), - Entry::file(DynFileSource::new(Arc::from(Vec::from(env)))), - )?; - archive.contents_mut().insert_path( - Path::new("images") - .join(arch.trim()) - .join(&id) - .with_extension("json"), - Entry::file(DynFileSource::new(Arc::from( - serde_json::to_vec(&serde_json::json!({ - "workdir": workdir - })) - .with_kind(ErrorKind::Serialization)?, - ))), - )?; + .await?; + s9pk.as_manifest_mut().images.insert(id, config); + let tmp_dir = Arc::new(TmpDir::new().await?); + s9pk.load_images(tmp_dir.clone()).await?; + s9pk.validate_and_filter(None)?; let tmp_path = s9pk_path.with_extension("s9pk.tmp"); - let mut tmp_file = File::create(&tmp_path).await?; + let mut tmp_file = create_file(&tmp_path).await?; s9pk.serialize(&mut tmp_file, true).await?; + drop(s9pk); tmp_file.sync_all().await?; tokio::fs::rename(&tmp_path, &s9pk_path).await?; + tmp_dir.gc().await?; + Ok(()) } @@ -193,15 +132,20 @@ async fn edit_manifest( EditManifestParams { expression }: EditManifestParams, S9pkPath { s9pk: s9pk_path }: S9pkPath, ) -> Result { - let mut s9pk = S9pk::from_file(super::load(&ctx, &s9pk_path).await?).await?; + let mut s9pk = super::load( + MultiCursorFile::from(open_file(&s9pk_path).await?), + || ctx.developer_key().cloned(), + None, + ) + .await?; let old = serde_json::to_value(s9pk.as_manifest()).with_kind(ErrorKind::Serialization)?; *s9pk.as_manifest_mut() = serde_json::from_value(apply_expr(old.into(), &expression)?.into()) .with_kind(ErrorKind::Serialization)?; let manifest = s9pk.as_manifest().clone(); let tmp_path = s9pk_path.with_extension("s9pk.tmp"); - let mut tmp_file = File::create(&tmp_path).await?; + let mut tmp_file = create_file(&tmp_path).await?; s9pk.as_archive_mut() - .set_signer(ctx.developer_key()?.clone()); + .set_signer(ctx.developer_key()?.clone(), SIG_CONTEXT); s9pk.serialize(&mut tmp_file, true).await?; tmp_file.sync_all().await?; tokio::fs::rename(&tmp_path, &s9pk_path).await?; @@ -212,17 +156,76 @@ async fn edit_manifest( async fn file_tree( ctx: CliContext, _: Empty, - S9pkPath { s9pk }: S9pkPath, + S9pkPath { s9pk: s9pk_path }: S9pkPath, ) -> Result, Error> { - let s9pk = S9pk::from_file(super::load(&ctx, &s9pk).await?).await?; + let s9pk = super::load( + MultiCursorFile::from(open_file(&s9pk_path).await?), + || ctx.developer_key().cloned(), + None, + ) + .await?; Ok(s9pk.as_archive().contents().file_paths("")) } +#[derive(Deserialize, Serialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +struct CatParams { + file_path: PathBuf, +} +async fn cat( + ctx: CliContext, + CatParams { file_path }: CatParams, + S9pkPath { s9pk: s9pk_path }: S9pkPath, +) -> Result<(), Error> { + use crate::s9pk::merkle_archive::source::FileSource; + + let s9pk = super::load( + MultiCursorFile::from(open_file(&s9pk_path).await?), + || ctx.developer_key().cloned(), + None, + ) + .await?; + tokio::io::copy( + &mut s9pk + .as_archive() + .contents() + .get_path(&file_path) + .or_not_found(&file_path.display())? + .as_file() + .or_not_found(&file_path.display())? + .reader() + .await?, + &mut tokio::io::stdout(), + ) + .await?; + Ok(()) +} + async fn inspect_manifest( ctx: CliContext, _: Empty, - S9pkPath { s9pk }: S9pkPath, + S9pkPath { s9pk: s9pk_path }: S9pkPath, ) -> Result { - let s9pk = S9pk::from_file(super::load(&ctx, &s9pk).await?).await?; + let s9pk = super::load( + MultiCursorFile::from(open_file(&s9pk_path).await?), + || ctx.developer_key().cloned(), + None, + ) + .await?; Ok(s9pk.as_manifest().clone()) } + +async fn convert(ctx: CliContext, S9pkPath { s9pk: s9pk_path }: S9pkPath) -> Result<(), Error> { + let mut s9pk = super::load( + MultiCursorFile::from(open_file(&s9pk_path).await?), + || ctx.developer_key().cloned(), + None, + ) + .await?; + let tmp_path = s9pk_path.with_extension("s9pk.tmp"); + s9pk.serialize(&mut create_file(&tmp_path).await?, true) + .await?; + tokio::fs::rename(tmp_path, s9pk_path).await?; + Ok(()) +} diff --git a/core/startos/src/s9pk/v1/docker.rs b/core/startos/src/s9pk/v1/docker.rs index 7f1507703..96c532479 100644 --- a/core/startos/src/s9pk/v1/docker.rs +++ b/core/startos/src/s9pk/v1/docker.rs @@ -1,4 +1,3 @@ -use std::borrow::Cow; use std::collections::BTreeSet; use std::io::SeekFrom; use std::path::Path; @@ -10,7 +9,7 @@ use tokio::io::{AsyncRead, AsyncSeek, AsyncSeekExt}; use tokio_tar::{Archive, Entry}; use crate::util::io::from_cbor_async_reader; -use crate::{Error, ErrorKind, ARCH}; +use crate::{Error, ErrorKind}; #[derive(Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] @@ -26,8 +25,8 @@ pub enum DockerReader { MultiArch(#[pin] Entry>), } impl DockerReader { - pub async fn new(mut rdr: R) -> Result { - let arch = if let Some(multiarch) = tokio_tar::Archive::new(&mut rdr) + pub async fn list_arches(rdr: &mut R) -> Result, Error> { + if let Some(multiarch) = tokio_tar::Archive::new(rdr) .entries()? .try_filter_map(|e| { async move { @@ -43,41 +42,37 @@ impl DockerReader { .await? { let multiarch: DockerMultiArch = from_cbor_async_reader(multiarch).await?; - Some(if multiarch.available.contains(&**ARCH) { - Cow::Borrowed(&**ARCH) - } else { - Cow::Owned(multiarch.default) - }) + Ok(multiarch.available) } else { - None - }; + Err(Error::new( + eyre!("Single arch legacy s9pks not supported"), + ErrorKind::ParseS9pk, + )) + } + } + pub async fn new(mut rdr: R, arch: &str) -> Result { rdr.seek(SeekFrom::Start(0)).await?; - if let Some(arch) = arch { - if let Some(image) = tokio_tar::Archive::new(rdr) - .entries()? - .try_filter_map(|e| { - let arch = arch.clone(); - async move { - Ok(if &*e.path()? == Path::new(&format!("{}.tar", arch)) { - Some(e) - } else { - None - }) - } - .boxed() - }) - .try_next() - .await? - { - Ok(Self::MultiArch(image)) - } else { - Err(Error::new( - eyre!("Docker image section does not contain tarball for architecture"), - ErrorKind::ParseS9pk, - )) - } + if let Some(image) = tokio_tar::Archive::new(rdr) + .entries()? + .try_filter_map(|e| { + async move { + Ok(if &*e.path()? == Path::new(&format!("{}.tar", arch)) { + Some(e) + } else { + None + }) + } + .boxed() + }) + .try_next() + .await? + { + Ok(Self::MultiArch(image)) } else { - Ok(Self::SingleArch(rdr)) + Err(Error::new( + eyre!("Docker image section does not contain tarball for architecture"), + ErrorKind::ParseS9pk, + )) } } } diff --git a/core/startos/src/s9pk/v1/manifest.rs b/core/startos/src/s9pk/v1/manifest.rs index ef346ad2b..4a9956f9f 100644 --- a/core/startos/src/s9pk/v1/manifest.rs +++ b/core/startos/src/s9pk/v1/manifest.rs @@ -1,37 +1,32 @@ use std::collections::BTreeMap; use std::path::{Path, PathBuf}; -use emver::VersionRange; -use imbl_value::InOMap; +use exver::{Version, VersionRange}; +use indexmap::IndexMap; pub use models::PackageId; -use models::VolumeId; +use models::{ActionId, HealthCheckId, ImageId, VolumeId}; use serde::{Deserialize, Serialize}; use url::Url; -use super::git_hash::GitHash; use crate::prelude::*; +use crate::s9pk::git_hash::GitHash; use crate::s9pk::manifest::{Alerts, Description, HardwareRequirements}; -use crate::util::Version; -use crate::version::{Current, VersionT}; +use crate::util::serde::{Duration, IoFormat}; -fn current_version() -> Version { - Current::new().semver().into() -} - -#[derive(Clone, Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "camelCase")] -#[model = "Model"] +#[derive(Clone, Debug, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case")] pub struct Manifest { - #[serde(default = "current_version")] pub eos_version: Version, pub id: PackageId, #[serde(default)] pub git_hash: Option, + pub title: String, + pub version: exver::emver::Version, + pub description: Description, #[serde(default)] pub assets: Assets, - pub title: String, - pub version: Version, - pub description: Description, + #[serde(default)] + pub build: Option>, pub release_notes: String, pub license: String, // type of license pub wrapper_repo: Url, @@ -41,10 +36,23 @@ pub struct Manifest { pub donation_url: Option, #[serde(default)] pub alerts: Alerts, + pub main: PackageProcedure, + pub health_checks: HealthChecks, + pub config: Option, + pub properties: Option, pub volumes: BTreeMap, + // #[serde(default)] + // pub interfaces: Interfaces, + // #[serde(default)] + pub backup: BackupActions, + #[serde(default)] + pub migrations: Migrations, + #[serde(default)] + pub actions: BTreeMap, + // #[serde(default)] + // pub permissions: Permissions, #[serde(default)] pub dependencies: BTreeMap, - pub config: Option>, #[serde(default)] pub replaces: Vec, @@ -53,8 +61,125 @@ pub struct Manifest { pub hardware_requirements: HardwareRequirements, } +impl Manifest { + pub fn package_procedures(&self) -> impl Iterator { + use std::iter::once; + let main = once(&self.main); + let cfg_get = self.config.as_ref().map(|a| &a.get).into_iter(); + let cfg_set = self.config.as_ref().map(|a| &a.set).into_iter(); + let props = self.properties.iter(); + let backups = vec![&self.backup.create, &self.backup.restore].into_iter(); + let migrations = self + .migrations + .to + .values() + .chain(self.migrations.from.values()); + let actions = self.actions.values().map(|a| &a.implementation); + main.chain(cfg_get) + .chain(cfg_set) + .chain(props) + .chain(backups) + .chain(migrations) + .chain(actions) + } +} + +#[derive(Clone, Debug, Deserialize, Serialize, HasModel)] +#[serde(rename_all = "kebab-case")] +#[serde(tag = "type")] +#[model = "Model"] +pub enum PackageProcedure { + Docker(DockerProcedure), + Script(Value), +} + #[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] +#[serde(rename_all = "kebab-case")] +pub struct DockerProcedure { + pub image: ImageId, + #[serde(default)] + pub system: bool, + pub entrypoint: String, + #[serde(default)] + pub args: Vec, + #[serde(default)] + pub inject: bool, + #[serde(default)] + pub mounts: BTreeMap, + #[serde(default)] + pub io_format: Option, + #[serde(default)] + pub sigterm_timeout: Option, + #[serde(default)] + pub shm_size_mb: Option, // TODO: use postfix sizing? like 1k vs 1m vs 1g + #[serde(default)] + pub gpu_acceleration: bool, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct HealthChecks(pub BTreeMap); + +#[derive(Clone, Debug, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case")] +pub struct HealthCheck { + pub name: String, + pub success_message: Option, + #[serde(flatten)] + implementation: PackageProcedure, + pub timeout: Option, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct ConfigActions { + pub get: PackageProcedure, + pub set: PackageProcedure, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct BackupActions { + pub create: PackageProcedure, + pub restore: PackageProcedure, +} + +#[derive(Clone, Debug, Default, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case")] +pub struct Migrations { + pub from: IndexMap, + pub to: IndexMap, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case")] +pub struct Action { + pub name: String, + pub description: String, + #[serde(default)] + pub warning: Option, + pub implementation: PackageProcedure, + // pub allowed_statuses: Vec, + // #[serde(default)] + // pub input_spec: ConfigSpec, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case")] +pub struct DepInfo { + pub version: VersionRange, + pub requirement: DependencyRequirement, + pub description: Option, + #[serde(default)] + pub config: Option, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case")] +pub struct DependencyConfig { + check: PackageProcedure, + auto_configure: PackageProcedure, +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case")] #[serde(tag = "type")] pub enum DependencyRequirement { OptIn { how: String }, @@ -67,17 +192,8 @@ impl DependencyRequirement { } } -#[derive(Clone, Debug, Deserialize, Serialize, HasModel)] -#[serde(rename_all = "camelCase")] -#[model = "Model"] -pub struct DepInfo { - pub version: VersionRange, - pub requirement: DependencyRequirement, - pub description: Option, -} - #[derive(Clone, Debug, Default, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] +#[serde(rename_all = "kebab-case")] pub struct Assets { #[serde(default)] pub license: Option, diff --git a/core/startos/src/s9pk/v1/mod.rs b/core/startos/src/s9pk/v1/mod.rs index 173921a75..9910d0adb 100644 --- a/core/startos/src/s9pk/v1/mod.rs +++ b/core/startos/src/s9pk/v1/mod.rs @@ -6,7 +6,6 @@ use ts_rs::TS; pub mod builder; pub mod docker; -pub mod git_hash; pub mod header; pub mod manifest; pub mod reader; diff --git a/core/startos/src/s9pk/v1/reader.rs b/core/startos/src/s9pk/v1/reader.rs index 82f62e1df..05f351343 100644 --- a/core/startos/src/s9pk/v1/reader.rs +++ b/core/startos/src/s9pk/v1/reader.rs @@ -1,3 +1,4 @@ +use std::collections::BTreeSet; use std::io::SeekFrom; use std::ops::Range; use std::path::Path; @@ -19,7 +20,8 @@ use super::header::{FileSection, Header, TableOfContents}; use super::SIG_CONTEXT; use crate::prelude::*; use crate::s9pk::v1::docker::DockerReader; -use crate::util::Version; +use crate::util::io::open_file; +use crate::util::VersionString; #[pin_project::pin_project] #[derive(Debug)] @@ -82,11 +84,11 @@ impl<'a, R: AsyncSeek + Unpin> AsyncSeek for ReadHandle<'a, R> { pub struct ImageTag { pub package_id: PackageId, pub image_id: ImageId, - pub version: Version, + pub version: VersionString, } impl ImageTag { #[instrument(skip_all)] - pub fn validate(&self, id: &PackageId, version: &Version) -> Result<(), Error> { + pub fn validate(&self, id: &PackageId, version: &VersionString) -> Result<(), Error> { if id != &self.package_id { return Err(Error::new( eyre!( @@ -149,17 +151,15 @@ pub struct S9pkReader>(path: P, check_sig: bool) -> Result { let p = path.as_ref(); - let rdr = File::open(p) - .await - .with_ctx(|_| (crate::error::ErrorKind::Filesystem, p.display().to_string()))?; + let rdr = open_file(p).await?; Self::from_reader(BufReader::new(rdr), check_sig).await } } impl S9pkReader { #[instrument(skip_all)] - pub async fn image_tags(&mut self) -> Result, Error> { - let mut tar = tokio_tar::Archive::new(self.docker_images().await?); + pub async fn image_tags(&mut self, arch: &str) -> Result, Error> { + let mut tar = tokio_tar::Archive::new(self.docker_images(arch).await?); let mut entries = tar.entries()?; while let Some(mut entry) = entries.try_next().await? { if &*entry.path()? != Path::new("manifest.json") { @@ -206,7 +206,7 @@ impl S9pkReader { ( Some(hash), Some(base32::encode( - base32::Alphabet::RFC4648 { padding: false }, + base32::Alphabet::Rfc4648 { padding: false }, hash.as_slice(), )), ) @@ -280,8 +280,15 @@ impl S9pkReader { self.read_handle(self.toc.icon).await } - pub async fn docker_images(&mut self) -> Result>, Error> { - DockerReader::new(self.read_handle(self.toc.docker_images).await?).await + pub async fn docker_arches(&mut self) -> Result, Error> { + DockerReader::list_arches(&mut self.read_handle(self.toc.docker_images).await?).await + } + + pub async fn docker_images( + &mut self, + arch: &str, + ) -> Result>, Error> { + DockerReader::new(self.read_handle(self.toc.docker_images).await?, arch).await } pub async fn assets(&mut self) -> Result, Error> { diff --git a/core/startos/src/s9pk/v2/compat.rs b/core/startos/src/s9pk/v2/compat.rs index 5a98538dc..22250419a 100644 --- a/core/startos/src/s9pk/v2/compat.rs +++ b/core/startos/src/s9pk/v2/compat.rs @@ -1,9 +1,9 @@ -use std::io::Cursor; -use std::path::{Path, PathBuf}; +use std::collections::{BTreeMap, BTreeSet}; +use std::path::Path; use std::sync::Arc; -use itertools::Itertools; -use tokio::fs::File; +use exver::ExtendedVersion; +use models::ImageId; use tokio::io::{AsyncRead, AsyncSeek, AsyncWriteExt}; use tokio::process::Command; @@ -11,242 +11,121 @@ use crate::dependencies::{DepInfo, Dependencies}; use crate::prelude::*; use crate::s9pk::manifest::Manifest; use crate::s9pk::merkle_archive::directory_contents::DirectoryContents; -use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile; -use crate::s9pk::merkle_archive::source::{FileSource, Section}; +use crate::s9pk::merkle_archive::source::TmpSource; use crate::s9pk::merkle_archive::{Entry, MerkleArchive}; -use crate::s9pk::rpc::SKIP_ENV; -use crate::s9pk::v1::manifest::Manifest as ManifestV1; +use crate::s9pk::v1::manifest::{Manifest as ManifestV1, PackageProcedure}; use crate::s9pk::v1::reader::S9pkReader; -use crate::s9pk::v2::S9pk; -use crate::util::io::TmpDir; +use crate::s9pk::v2::pack::{ImageSource, PackSource, CONTAINER_TOOL}; +use crate::s9pk::v2::{S9pk, SIG_CONTEXT}; +use crate::util::io::{create_file, TmpDir}; use crate::util::Invoke; -use crate::ARCH; pub const MAGIC_AND_VERSION: &[u8] = &[0x3b, 0x3b, 0x01]; -#[cfg(not(feature = "docker"))] -pub const CONTAINER_TOOL: &str = "podman"; - -#[cfg(feature = "docker")] -pub const CONTAINER_TOOL: &str = "docker"; - -type DynRead = Box; -fn into_dyn_read(r: R) -> DynRead { - Box::new(r) -} - -#[derive(Clone)] -enum CompatSource { - Buffered(Arc<[u8]>), - File(PathBuf), -} -#[async_trait::async_trait] -impl FileSource for CompatSource { - type Reader = Box; - async fn size(&self) -> Result { - match self { - Self::Buffered(a) => Ok(a.len() as u64), - Self::File(f) => Ok(tokio::fs::metadata(f).await?.len()), - } - } - async fn reader(&self) -> Result { - match self { - Self::Buffered(a) => Ok(into_dyn_read(Cursor::new(a.clone()))), - Self::File(f) => Ok(into_dyn_read(File::open(f).await?)), - } - } -} - -impl S9pk> { +impl S9pk> { #[instrument(skip_all)] pub async fn from_v1( mut reader: S9pkReader, - destination: impl AsRef, + tmp_dir: Arc, signer: ed25519_dalek::SigningKey, ) -> Result { - let scratch_dir = TmpDir::new().await?; + Command::new(CONTAINER_TOOL) + .arg("run") + .arg("--rm") + .arg("--privileged") + .arg("tonistiigi/binfmt") + .arg("--install") + .arg("all") + .invoke(ErrorKind::Docker) + .await?; - let mut archive = DirectoryContents::::new(); + let mut archive = DirectoryContents::>::new(); // manifest.json let manifest_raw = reader.manifest().await?; let manifest = from_value::(manifest_raw.clone())?; let mut new_manifest = Manifest::from(manifest.clone()); + let images: BTreeMap = manifest + .package_procedures() + .filter_map(|p| { + if let PackageProcedure::Docker(p) = p { + Some((p.image.clone(), p.system)) + } else { + None + } + }) + .collect(); + // LICENSE.md let license: Arc<[u8]> = reader.license().await?.to_vec().await?.into(); archive.insert_path( "LICENSE.md", - Entry::file(CompatSource::Buffered(license.into())), + Entry::file(TmpSource::new( + tmp_dir.clone(), + PackSource::Buffered(license.into()), + )), )?; // instructions.md let instructions: Arc<[u8]> = reader.instructions().await?.to_vec().await?.into(); archive.insert_path( "instructions.md", - Entry::file(CompatSource::Buffered(instructions.into())), + Entry::file(TmpSource::new( + tmp_dir.clone(), + PackSource::Buffered(instructions.into()), + )), )?; // icon.md let icon: Arc<[u8]> = reader.icon().await?.to_vec().await?.into(); archive.insert_path( format!("icon.{}", manifest.assets.icon_type()), - Entry::file(CompatSource::Buffered(icon.into())), + Entry::file(TmpSource::new( + tmp_dir.clone(), + PackSource::Buffered(icon.into()), + )), )?; // images - let images_dir = scratch_dir.join("images"); - tokio::fs::create_dir_all(&images_dir).await?; - Command::new(CONTAINER_TOOL) - .arg("load") - .input(Some(&mut reader.docker_images().await?)) - .invoke(ErrorKind::Docker) - .await?; - #[derive(serde::Deserialize)] - #[serde(rename_all = "PascalCase")] - struct DockerImagesOut { - repository: Option, - tag: Option, - #[serde(default)] - names: Vec, - } - for image in { - #[cfg(feature = "docker")] - let images = std::str::from_utf8( - &Command::new(CONTAINER_TOOL) - .arg("images") - .arg("--format=json") - .invoke(ErrorKind::Docker) - .await?, - )? - .lines() - .map(|l| serde_json::from_str::(l)) - .collect::, _>>() - .with_kind(ErrorKind::Deserialization)? - .into_iter(); - #[cfg(not(feature = "docker"))] - let images = serde_json::from_slice::>( - &Command::new(CONTAINER_TOOL) - .arg("images") - .arg("--format=json") - .invoke(ErrorKind::Docker) - .await?, - ) - .with_kind(ErrorKind::Deserialization)? - .into_iter(); - images - } - .flat_map(|i| { - if let (Some(repository), Some(tag)) = (i.repository, i.tag) { - vec![format!("{repository}:{tag}")] - } else { - i.names - .into_iter() - .filter_map(|i| i.strip_prefix("docker.io/").map(|s| s.to_owned())) - .collect() - } - }) - .filter_map(|i| { - i.strip_suffix(&format!(":{}", manifest.version)) - .map(|s| s.to_owned()) - }) - .filter_map(|i| { - i.strip_prefix(&format!("start9/{}/", manifest.id)) - .map(|s| s.to_owned()) - }) { - new_manifest.images.push(image.parse()?); - let sqfs_path = images_dir.join(&image).with_extension("squashfs"); - let image_name = format!("start9/{}/{}:{}", manifest.id, image, manifest.version); - let id = String::from_utf8( - Command::new(CONTAINER_TOOL) - .arg("create") - .arg(&image_name) - .invoke(ErrorKind::Docker) - .await?, - )?; - let env = String::from_utf8( - Command::new(CONTAINER_TOOL) - .arg("run") - .arg("--rm") - .arg("--entrypoint") - .arg("env") - .arg(&image_name) - .invoke(ErrorKind::Docker) - .await?, - )? - .lines() - .filter(|l| { - l.trim() - .split_once("=") - .map_or(false, |(v, _)| !SKIP_ENV.contains(&v)) - }) - .join("\n") - + "\n"; - let workdir = Path::new( - String::from_utf8( - Command::new(CONTAINER_TOOL) - .arg("run") - .arg("--rm") - .arg("--entrypoint") - .arg("pwd") - .arg(&image_name) - .invoke(ErrorKind::Docker) - .await?, - )? - .trim(), - ) - .to_owned(); - Command::new("bash") - .arg("-c") - .arg(format!( - "{CONTAINER_TOOL} export {id} | mksquashfs - {sqfs} -tar", - id = id.trim(), - sqfs = sqfs_path.display() - )) - .invoke(ErrorKind::Docker) - .await?; + for arch in reader.docker_arches().await? { + let images_dir = tmp_dir.join("images").join(&arch); + tokio::fs::create_dir_all(&images_dir).await?; Command::new(CONTAINER_TOOL) - .arg("rm") - .arg(id.trim()) + .arg("load") + .input(Some(&mut reader.docker_images(&arch).await?)) .invoke(ErrorKind::Docker) .await?; - archive.insert_path( - Path::new("images") - .join(&*ARCH) - .join(&image) - .with_extension("squashfs"), - Entry::file(CompatSource::File(sqfs_path)), - )?; - archive.insert_path( - Path::new("images") - .join(&*ARCH) - .join(&image) - .with_extension("env"), - Entry::file(CompatSource::Buffered(Vec::from(env).into())), - )?; - archive.insert_path( - Path::new("images") - .join(&*ARCH) - .join(&image) - .with_extension("json"), - Entry::file(CompatSource::Buffered( - serde_json::to_vec(&serde_json::json!({ - "workdir": workdir - })) - .with_kind(ErrorKind::Serialization)? - .into(), - )), - )?; + for (image, system) in &images { + let mut image_config = new_manifest.images.remove(image).unwrap_or_default(); + image_config.arch.insert(arch.as_str().into()); + new_manifest.images.insert(image.clone(), image_config); + let image_name = if *system { + format!("start9/{}:latest", image) + } else { + format!("start9/{}/{}:{}", manifest.id, image, manifest.version) + }; + ImageSource::DockerTag(image_name.clone()) + .load( + tmp_dir.clone(), + &new_manifest.id, + &new_manifest.version, + image, + &arch, + &mut archive, + ) + .await?; + Command::new(CONTAINER_TOOL) + .arg("rmi") + .arg("-f") + .arg(&image_name) + .invoke(ErrorKind::Docker) + .await?; + } } - Command::new(CONTAINER_TOOL) - .arg("image") - .arg("prune") - .arg("-af") - .invoke(ErrorKind::Docker) - .await?; // assets - let asset_dir = scratch_dir.join("assets"); + let asset_dir = tmp_dir.join("assets"); tokio::fs::create_dir_all(&asset_dir).await?; tokio_tar::Archive::new(reader.assets().await?) .unpack(&asset_dir) @@ -264,22 +143,24 @@ impl S9pk> { .invoke(ErrorKind::Filesystem) .await?; archive.insert_path( - Path::new("assets").join(&asset_id), - Entry::file(CompatSource::File(sqfs_path)), + Path::new("assets") + .join(&asset_id) + .with_extension("squashfs"), + Entry::file(TmpSource::new(tmp_dir.clone(), PackSource::File(sqfs_path))), )?; } // javascript - let js_dir = scratch_dir.join("javascript"); + let js_dir = tmp_dir.join("javascript"); let sqfs_path = js_dir.with_extension("squashfs"); tokio::fs::create_dir_all(&js_dir).await?; if let Some(mut scripts) = reader.scripts().await? { - let mut js_file = File::create(js_dir.join("embassy.js")).await?; + let mut js_file = create_file(js_dir.join("embassy.js")).await?; tokio::io::copy(&mut scripts, &mut js_file).await?; js_file.sync_all().await?; } { - let mut js_file = File::create(js_dir.join("embassyManifest.json")).await?; + let mut js_file = create_file(js_dir.join("embassyManifest.json")).await?; js_file .write_all(&serde_json::to_vec(&manifest_raw).with_kind(ErrorKind::Serialization)?) .await?; @@ -292,29 +173,24 @@ impl S9pk> { .await?; archive.insert_path( Path::new("javascript.squashfs"), - Entry::file(CompatSource::File(sqfs_path)), + Entry::file(TmpSource::new(tmp_dir.clone(), PackSource::File(sqfs_path))), )?; archive.insert_path( "manifest.json", - Entry::file(CompatSource::Buffered( - serde_json::to_vec::(&new_manifest) - .with_kind(ErrorKind::Serialization)? - .into(), + Entry::file(TmpSource::new( + tmp_dir.clone(), + PackSource::Buffered( + serde_json::to_vec::(&new_manifest) + .with_kind(ErrorKind::Serialization)? + .into(), + ), )), )?; - let mut s9pk = S9pk::new(MerkleArchive::new(archive, signer), None).await?; - let mut dest_file = File::create(destination.as_ref()).await?; - s9pk.serialize(&mut dest_file, false).await?; - dest_file.sync_all().await?; - - scratch_dir.delete().await?; - - Ok(S9pk::deserialize(&MultiCursorFile::from( - File::open(destination.as_ref()).await?, - )) - .await?) + let mut res = S9pk::new(MerkleArchive::new(archive, signer, SIG_CONTEXT), None).await?; + res.as_archive_mut().update_hashes(true).await?; + Ok(res) } } @@ -323,18 +199,18 @@ impl From for Manifest { let default_url = value.upstream_repo.clone(); Self { id: value.id, - title: value.title, - version: value.version, + title: value.title.into(), + version: ExtendedVersion::from(value.version).into(), + satisfies: BTreeSet::new(), release_notes: value.release_notes, - license: value.license, - replaces: value.replaces, + license: value.license.into(), wrapper_repo: value.wrapper_repo, upstream_repo: value.upstream_repo, support_site: value.support_site.unwrap_or_else(|| default_url.clone()), marketing_site: value.marketing_site.unwrap_or_else(|| default_url.clone()), donation_url: value.donation_url, description: value.description, - images: Vec::new(), + images: BTreeMap::new(), assets: value .volumes .iter() @@ -358,6 +234,7 @@ impl From for Manifest { DepInfo { description: value.description, optional: !value.requirement.required(), + s9pk: None, }, ) }) diff --git a/core/startos/src/s9pk/v2/manifest.rs b/core/startos/src/s9pk/v2/manifest.rs index ea4524400..a10a65ddb 100644 --- a/core/startos/src/s9pk/v2/manifest.rs +++ b/core/startos/src/s9pk/v2/manifest.rs @@ -1,22 +1,28 @@ -use std::collections::BTreeMap; +use std::collections::{BTreeMap, BTreeSet}; +use std::path::Path; use color_eyre::eyre::eyre; +use exver::Version; use helpers::const_true; +use imbl_value::InternedString; pub use models::PackageId; -use models::{ImageId, VolumeId}; +use models::{mime, ImageId, VolumeId}; use serde::{Deserialize, Serialize}; use ts_rs::TS; use url::Url; use crate::dependencies::Dependencies; use crate::prelude::*; -use crate::s9pk::v1::git_hash::GitHash; +use crate::s9pk::git_hash::GitHash; +use crate::s9pk::merkle_archive::directory_contents::DirectoryContents; +use crate::s9pk::merkle_archive::expected::{Expected, Filter}; +use crate::s9pk::v2::pack::ImageConfig; use crate::util::serde::Regex; -use crate::util::Version; +use crate::util::VersionString; use crate::version::{Current, VersionT}; fn current_version() -> Version { - Current::new().semver().into() + Current::new().semver() } #[derive(Clone, Debug, Deserialize, Serialize, HasModel, TS)] @@ -25,13 +31,13 @@ fn current_version() -> Version { #[ts(export)] pub struct Manifest { pub id: PackageId, - pub title: String, #[ts(type = "string")] - pub version: Version, + pub title: InternedString, + pub version: VersionString, + pub satisfies: BTreeSet, pub release_notes: String, - pub license: String, // type of license - #[serde(default)] - pub replaces: Vec, + #[ts(type = "string")] + pub license: InternedString, // type of license #[ts(type = "string")] pub wrapper_repo: Url, #[ts(type = "string")] @@ -43,9 +49,9 @@ pub struct Manifest { #[ts(type = "string | null")] pub donation_url: Option, pub description: Description, - pub images: Vec, - pub assets: Vec, // TODO: AssetsId - pub volumes: Vec, + pub images: BTreeMap, + pub assets: BTreeSet, // TODO: AssetsId + pub volumes: BTreeSet, #[serde(default)] pub alerts: Alerts, #[serde(default)] @@ -61,16 +67,104 @@ pub struct Manifest { #[serde(default = "const_true")] pub has_config: bool, } +impl Manifest { + pub fn validate_for<'a, T: Clone>( + &self, + arch: Option<&str>, + archive: &'a DirectoryContents, + ) -> Result { + let mut expected = Expected::new(archive); + expected.check_file("manifest.json")?; + expected.check_stem("icon", |ext| { + ext.and_then(|e| e.to_str()) + .and_then(mime) + .map_or(false, |mime| mime.starts_with("image/")) + })?; + expected.check_file("LICENSE.md")?; + expected.check_file("instructions.md")?; + expected.check_file("javascript.squashfs")?; + for (dependency, _) in &self.dependencies.0 { + let dep_path = Path::new("dependencies").join(dependency); + let _ = expected.check_file(dep_path.join("metadata.json")); + let _ = expected.check_stem(dep_path.join("icon"), |ext| { + ext.and_then(|e| e.to_str()) + .and_then(mime) + .map_or(false, |mime| mime.starts_with("image/")) + }); + } + for assets in &self.assets { + expected.check_file(Path::new("assets").join(assets).with_extension("squashfs"))?; + } + for (image_id, config) in &self.images { + let mut check_arch = |arch: &str| { + let mut arch = arch; + if let Err(e) = expected.check_file( + Path::new("images") + .join(arch) + .join(image_id) + .with_extension("squashfs"), + ) { + if let Some(emulate_as) = &config.emulate_missing_as { + expected.check_file( + Path::new("images") + .join(arch) + .join(image_id) + .with_extension("squashfs"), + )?; + arch = &**emulate_as; + } else { + return Err(e); + } + } + expected.check_file( + Path::new("images") + .join(arch) + .join(image_id) + .with_extension("json"), + )?; + expected.check_file( + Path::new("images") + .join(arch) + .join(image_id) + .with_extension("env"), + )?; + Ok(()) + }; + if let Some(arch) = arch { + check_arch(arch)?; + } else if let Some(arches) = &self.hardware_requirements.arch { + for arch in arches { + check_arch(arch)?; + } + } else if let Some(arch) = config.emulate_missing_as.as_deref() { + if !config.arch.contains(arch) { + return Err(Error::new( + eyre!("`emulateMissingAs` must match an included `arch`"), + ErrorKind::ParseS9pk, + )); + } + for arch in &config.arch { + check_arch(&arch)?; + } + } else { + return Err(Error::new(eyre!("`emulateMissingAs` required for all images if no `arch` specified in `hardwareRequirements`"), ErrorKind::ParseS9pk)); + } + } + Ok(expected.into_filter()) + } +} #[derive(Clone, Debug, Default, Deserialize, Serialize, TS)] #[serde(rename_all = "camelCase")] #[ts(export)] pub struct HardwareRequirements { #[serde(default)] - #[ts(type = "{ [key: string]: string }")] - device: BTreeMap, - ram: Option, - pub arch: Option>, + #[ts(type = "{ device?: string, processor?: string }")] + pub device: BTreeMap, + #[ts(type = "number | null")] + pub ram: Option, + #[ts(type = "string[] | null")] + pub arch: Option>, } #[derive(Clone, Debug, Deserialize, Serialize, TS)] diff --git a/core/startos/src/s9pk/v2/mod.rs b/core/startos/src/s9pk/v2/mod.rs index af1cd1c17..e012480af 100644 --- a/core/startos/src/s9pk/v2/mod.rs +++ b/core/startos/src/s9pk/v2/mod.rs @@ -6,19 +6,27 @@ use imbl_value::InternedString; use models::{mime, DataUrl, PackageId}; use tokio::fs::File; +use crate::dependencies::DependencyMetadata; use crate::prelude::*; +use crate::registry::signer::commitment::merkle_archive::MerkleArchiveCommitment; use crate::s9pk::manifest::Manifest; -use crate::s9pk::merkle_archive::file_contents::FileContents; use crate::s9pk::merkle_archive::sink::Sink; use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile; -use crate::s9pk::merkle_archive::source::{ArchiveSource, DynFileSource, FileSource, Section}; +use crate::s9pk::merkle_archive::source::{ + ArchiveSource, DynFileSource, FileSource, Section, TmpSource, +}; use crate::s9pk::merkle_archive::{Entry, MerkleArchive}; -use crate::ARCH; +use crate::s9pk::v2::pack::{ImageSource, PackSource}; +use crate::util::io::{open_file, TmpDir}; +use crate::util::serde::IoFormat; const MAGIC_AND_VERSION: &[u8] = &[0x3b, 0x3b, 0x02]; +pub const SIG_CONTEXT: &str = "s9pk"; + pub mod compat; pub mod manifest; +pub mod pack; /** / @@ -26,51 +34,40 @@ pub mod manifest; ├── icon. ├── LICENSE.md ├── instructions.md + ├── dependencies + │ └── + │ ├── metadata.json + │ └── icon. ├── javascript.squashfs ├── assets │ └── .squashfs (xN) └── images └── + ├── .json (xN) ├── .env (xN) └── .squashfs (xN) */ +// this sorts the s9pk to optimize such that the parts that are used first appear earlier in the s9pk +// this is useful for manipulating an s9pk while partially downloaded on a source that does not support +// random access fn priority(s: &str) -> Option { match s { "manifest.json" => Some(0), a if Path::new(a).file_stem() == Some(OsStr::new("icon")) => Some(1), "LICENSE.md" => Some(2), "instructions.md" => Some(3), - "javascript.squashfs" => Some(4), - "assets" => Some(5), - "images" => Some(6), + "dependencies" => Some(4), + "javascript.squashfs" => Some(5), + "assets" => Some(6), + "images" => Some(7), _ => None, } } -fn filter(p: &Path) -> bool { - match p.iter().count() { - 1 if p.file_name() == Some(OsStr::new("manifest.json")) => true, - 1 if p.file_stem() == Some(OsStr::new("icon")) => true, - 1 if p.file_name() == Some(OsStr::new("LICENSE.md")) => true, - 1 if p.file_name() == Some(OsStr::new("instructions.md")) => true, - 1 if p.file_name() == Some(OsStr::new("javascript.squashfs")) => true, - 1 if p.file_name() == Some(OsStr::new("assets")) => true, - 1 if p.file_name() == Some(OsStr::new("images")) => true, - 2 if p.parent() == Some(Path::new("assets")) => { - p.extension().map_or(false, |ext| ext == "squashfs") - } - 2 if p.parent() == Some(Path::new("images")) => p.file_name() == Some(OsStr::new(&*ARCH)), - 3 if p.parent() == Some(&*Path::new("images").join(&*ARCH)) => p - .extension() - .map_or(false, |ext| ext == "squashfs" || ext == "env"), - _ => false, - } -} - #[derive(Clone)] pub struct S9pk> { - manifest: Manifest, + pub manifest: Manifest, manifest_dirty: bool, archive: MerkleArchive, size: Option, @@ -94,7 +91,7 @@ impl S9pk { } } -impl S9pk { +impl S9pk { pub async fn new(archive: MerkleArchive, size: Option) -> Result { let manifest = extract_manifest(&archive).await?; Ok(Self { @@ -105,22 +102,21 @@ impl S9pk { }) } - pub async fn icon(&self) -> Result<(InternedString, FileContents), Error> { + pub fn validate_and_filter(&mut self, arch: Option<&str>) -> Result<(), Error> { + let filter = self.manifest.validate_for(arch, self.archive.contents())?; + filter.keep_checked(self.archive.contents_mut()) + } + + pub async fn icon(&self) -> Result<(InternedString, Entry), Error> { let mut best_icon = None; - for (path, icon) in self - .archive - .contents() - .with_stem("icon") - .filter(|(p, _)| { - Path::new(&*p) - .extension() - .and_then(|e| e.to_str()) - .and_then(mime) - .map_or(false, |e| e.starts_with("image/")) - }) - .filter_map(|(k, v)| v.into_file().map(|f| (k, f))) - { - let size = icon.size().await?; + for (path, icon) in self.archive.contents().with_stem("icon").filter(|(p, v)| { + Path::new(&*p) + .extension() + .and_then(|e| e.to_str()) + .and_then(mime) + .map_or(false, |e| e.starts_with("image/") && v.as_file().is_some()) + }) { + let size = icon.expect_file()?.size().await?; best_icon = match best_icon { Some((s, a)) if s >= size => Some((s, a)), _ => Some((size, (path, icon))), @@ -138,7 +134,75 @@ impl S9pk { .and_then(|e| e.to_str()) .and_then(mime) .unwrap_or("image/png"); - DataUrl::from_reader(mime, contents.reader().await?, Some(contents.size().await?)).await + Ok(DataUrl::from_vec( + mime, + contents.expect_file()?.to_vec(contents.hash()).await?, + )) + } + + pub async fn dependency_icon( + &self, + id: &PackageId, + ) -> Result)>, Error> { + let mut best_icon = None; + for (path, icon) in self + .archive + .contents() + .get_path(Path::new("dependencies").join(id)) + .and_then(|p| p.as_directory()) + .into_iter() + .flat_map(|d| { + d.with_stem("icon").filter(|(p, v)| { + Path::new(&*p) + .extension() + .and_then(|e| e.to_str()) + .and_then(mime) + .map_or(false, |e| e.starts_with("image/") && v.as_file().is_some()) + }) + }) + { + let size = icon.expect_file()?.size().await?; + best_icon = match best_icon { + Some((s, a)) if s >= size => Some((s, a)), + _ => Some((size, (path, icon))), + }; + } + Ok(best_icon.map(|(_, a)| a)) + } + + pub async fn dependency_icon_data_url( + &self, + id: &PackageId, + ) -> Result>, Error> { + let Some((name, contents)) = self.dependency_icon(id).await? else { + return Ok(None); + }; + let mime = Path::new(&*name) + .extension() + .and_then(|e| e.to_str()) + .and_then(mime) + .unwrap_or("image/png"); + Ok(Some(DataUrl::from_vec( + mime, + contents.expect_file()?.to_vec(contents.hash()).await?, + ))) + } + + pub async fn dependency_metadata( + &self, + id: &PackageId, + ) -> Result, Error> { + if let Some(entry) = self + .archive + .contents() + .get_path(Path::new("dependencies").join(id).join("metadata.json")) + { + Ok(Some(IoFormat::Json.from_slice( + &entry.expect_file()?.to_vec(entry.hash()).await?, + )?)) + } else { + Ok(None) + } } pub async fn serialize(&mut self, w: &mut W, verify: bool) -> Result<(), Error> { @@ -171,9 +235,38 @@ impl S9pk { } } -impl S9pk> { +impl> + FileSource + Clone> S9pk { + pub async fn load_images(&mut self, tmp_dir: Arc) -> Result<(), Error> { + let id = &self.manifest.id; + let version = &self.manifest.version; + for (image_id, image_config) in &mut self.manifest.images { + self.manifest_dirty = true; + for arch in &image_config.arch { + image_config + .source + .load( + tmp_dir.clone(), + id, + version, + image_id, + arch, + self.archive.contents_mut(), + ) + .await?; + } + image_config.source = ImageSource::Packed; + } + + Ok(()) + } +} + +impl S9pk> { #[instrument(skip_all)] - pub async fn deserialize(source: &S) -> Result { + pub async fn deserialize( + source: &S, + commitment: Option<&MerkleArchiveCommitment>, + ) -> Result { use tokio::io::AsyncReadExt; let mut header = source @@ -183,7 +276,7 @@ impl S9pk> { ) .await?; - let mut magic_version = [0u8; 3]; + let mut magic_version = [0u8; MAGIC_AND_VERSION.len()]; header.read_exact(&mut magic_version).await?; ensure_code!( &magic_version == MAGIC_AND_VERSION, @@ -191,9 +284,8 @@ impl S9pk> { "Invalid Magic or Unexpected Version" ); - let mut archive = MerkleArchive::deserialize(source, &mut header).await?; - - archive.filter(filter)?; + let mut archive = + MerkleArchive::deserialize(source, SIG_CONTEXT, &mut header, commitment).await?; archive.sort_by(|a, b| match (priority(a), priority(b)) { (Some(a), Some(b)) => a.cmp(&b), @@ -207,10 +299,10 @@ impl S9pk> { } impl S9pk { pub async fn from_file(file: File) -> Result { - Self::deserialize(&MultiCursorFile::from(file)).await + Self::deserialize(&MultiCursorFile::from(file), None).await } pub async fn open(path: impl AsRef, id: Option<&PackageId>) -> Result { - let res = Self::from_file(tokio::fs::File::open(path).await?).await?; + let res = Self::from_file(open_file(path).await?).await?; if let Some(id) = id { ensure_code!( &res.as_manifest().id == id, diff --git a/core/startos/src/s9pk/v2/pack.rs b/core/startos/src/s9pk/v2/pack.rs new file mode 100644 index 000000000..06a47b9d0 --- /dev/null +++ b/core/startos/src/s9pk/v2/pack.rs @@ -0,0 +1,731 @@ +use std::collections::BTreeSet; +use std::path::{Path, PathBuf}; +use std::sync::Arc; + +use clap::Parser; +use futures::future::{ready, BoxFuture}; +use futures::{FutureExt, TryStreamExt}; +use imbl_value::InternedString; +use models::{ImageId, PackageId, VersionString}; +use serde::{Deserialize, Serialize}; +use tokio::process::Command; +use tokio::sync::OnceCell; +use tokio_stream::wrappers::ReadDirStream; +use tracing::{debug, warn}; +use ts_rs::TS; + +use crate::context::CliContext; +use crate::dependencies::DependencyMetadata; +use crate::prelude::*; +use crate::rpc_continuations::Guid; +use crate::s9pk::manifest::Manifest; +use crate::s9pk::merkle_archive::directory_contents::DirectoryContents; +use crate::s9pk::merkle_archive::source::http::HttpSource; +use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile; +use crate::s9pk::merkle_archive::source::{ + into_dyn_read, ArchiveSource, DynFileSource, DynRead, FileSource, TmpSource, +}; +use crate::s9pk::merkle_archive::{Entry, MerkleArchive}; +use crate::s9pk::v2::SIG_CONTEXT; +use crate::s9pk::S9pk; +use crate::util::io::{create_file, open_file, TmpDir}; +use crate::util::serde::IoFormat; +use crate::util::{new_guid, Invoke, PathOrUrl}; + +#[cfg(not(feature = "docker"))] +pub const CONTAINER_TOOL: &str = "podman"; +#[cfg(feature = "docker")] +pub const CONTAINER_TOOL: &str = "docker"; + +#[cfg(feature = "docker")] +pub const CONTAINER_DATADIR: &str = "/var/lib/docker"; +#[cfg(not(feature = "docker"))] +pub const CONTAINER_DATADIR: &str = "/var/lib/containers"; + +pub struct SqfsDir { + path: PathBuf, + tmpdir: Arc, + sqfs: OnceCell, +} +impl SqfsDir { + pub fn new(path: PathBuf, tmpdir: Arc) -> Self { + Self { + path, + tmpdir, + sqfs: OnceCell::new(), + } + } + async fn file(&self) -> Result<&MultiCursorFile, Error> { + self.sqfs + .get_or_try_init(|| async move { + let guid = Guid::new(); + let path = self.tmpdir.join(guid.as_ref()).with_extension("squashfs"); + let mut cmd = Command::new("mksquashfs"); + if self.path.extension().and_then(|s| s.to_str()) == Some("tar") { + cmd.arg("-tar"); + } + cmd.arg(&self.path) + .arg(&path) + .invoke(ErrorKind::Filesystem) + .await?; + Ok(MultiCursorFile::from( + open_file(&path) + .await + .with_ctx(|_| (ErrorKind::Filesystem, path.display()))?, + )) + }) + .await + } +} + +#[derive(Clone)] +pub enum PackSource { + Buffered(Arc<[u8]>), + File(PathBuf), + Squashfs(Arc), +} +impl FileSource for PackSource { + type Reader = DynRead; + type SliceReader = DynRead; + async fn size(&self) -> Result { + match self { + Self::Buffered(a) => Ok(a.len() as u64), + Self::File(f) => Ok(tokio::fs::metadata(f) + .await + .with_ctx(|_| (ErrorKind::Filesystem, f.display()))? + .len()), + Self::Squashfs(dir) => dir + .file() + .await + .with_ctx(|_| (ErrorKind::Filesystem, dir.path.display()))? + .size() + .await + .or_not_found("file metadata"), + } + } + async fn reader(&self) -> Result { + match self { + Self::Buffered(a) => Ok(into_dyn_read(FileSource::reader(a).await?)), + Self::File(f) => Ok(into_dyn_read(FileSource::reader(f).await?)), + Self::Squashfs(dir) => dir.file().await?.fetch_all().await.map(into_dyn_read), + } + } + async fn slice(&self, position: u64, size: u64) -> Result { + match self { + Self::Buffered(a) => Ok(into_dyn_read(FileSource::slice(a, position, size).await?)), + Self::File(f) => Ok(into_dyn_read(FileSource::slice(f, position, size).await?)), + Self::Squashfs(dir) => dir + .file() + .await? + .fetch(position, size) + .await + .map(into_dyn_read), + } + } +} +impl From for DynFileSource { + fn from(value: PackSource) -> Self { + DynFileSource::new(value) + } +} + +#[derive(Deserialize, Serialize, Parser)] +pub struct PackParams { + pub path: Option, + #[arg(short = 'o', long = "output")] + pub output: Option, + #[arg(long = "javascript")] + pub javascript: Option, + #[arg(long = "icon")] + pub icon: Option, + #[arg(long = "license")] + pub license: Option, + #[arg(long = "instructions")] + pub instructions: Option, + #[arg(long = "assets")] + pub assets: Option, +} +impl PackParams { + fn path(&self) -> &Path { + self.path.as_deref().unwrap_or(Path::new(".")) + } + fn output(&self, id: &PackageId) -> PathBuf { + self.output + .as_ref() + .cloned() + .unwrap_or_else(|| self.path().join(id).with_extension("s9pk")) + } + fn javascript(&self) -> PathBuf { + self.javascript + .as_ref() + .cloned() + .unwrap_or_else(|| self.path().join("javascript")) + } + async fn icon(&self) -> Result { + if let Some(icon) = &self.icon { + Ok(icon.clone()) + } else { + ReadDirStream::new(tokio::fs::read_dir(self.path()).await?) + .try_filter(|x| { + ready( + x.path() + .file_stem() + .map_or(false, |s| s.eq_ignore_ascii_case("icon")), + ) + }) + .map_err(Error::from) + .try_fold( + Err(Error::new(eyre!("icon not found"), ErrorKind::NotFound)), + |acc, x| async move { + match acc { + Ok(_) => Err(Error::new(eyre!("multiple icons found in working directory, please specify which to use with `--icon`"), ErrorKind::InvalidRequest)), + Err(e) => Ok({ + let path = x.path(); + if path + .file_stem() + .map_or(false, |s| s.eq_ignore_ascii_case("icon")) + { + Ok(path) + } else { + Err(e) + } + }), + } + }, + ) + .await? + } + } + async fn license(&self) -> Result { + if let Some(license) = &self.license { + Ok(license.clone()) + } else { + ReadDirStream::new(tokio::fs::read_dir(self.path()).await?) + .try_filter(|x| { + ready( + x.path() + .file_stem() + .map_or(false, |s| s.eq_ignore_ascii_case("license")), + ) + }) + .map_err(Error::from) + .try_fold( + Err(Error::new(eyre!("icon not found"), ErrorKind::NotFound)), + |acc, x| async move { + match acc { + Ok(_) => Err(Error::new(eyre!("multiple licenses found in working directory, please specify which to use with `--license`"), ErrorKind::InvalidRequest)), + Err(e) => Ok({ + let path = x.path(); + if path + .file_stem() + .map_or(false, |s| s.eq_ignore_ascii_case("license")) + { + Ok(path) + } else { + Err(e) + } + }), + } + }, + ) + .await? + } + } + fn instructions(&self) -> PathBuf { + self.instructions + .as_ref() + .cloned() + .unwrap_or_else(|| self.path().join("instructions.md")) + } + fn assets(&self) -> PathBuf { + self.assets + .as_ref() + .cloned() + .unwrap_or_else(|| self.path().join("assets")) + } +} + +#[derive(Debug, Clone, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct ImageConfig { + pub source: ImageSource, + #[ts(type = "string[]")] + pub arch: BTreeSet, + #[ts(type = "string | null")] + pub emulate_missing_as: Option, +} +impl Default for ImageConfig { + fn default() -> Self { + Self { + source: ImageSource::Packed, + arch: BTreeSet::new(), + emulate_missing_as: None, + } + } +} + +#[derive(Parser)] +struct CliImageConfig { + #[arg(long, conflicts_with("docker-tag"))] + docker_build: bool, + #[arg(long, requires("docker-build"))] + dockerfile: Option, + #[arg(long, requires("docker-build"))] + workdir: Option, + #[arg(long, conflicts_with_all(["dockerfile", "workdir"]))] + docker_tag: Option, + #[arg(long)] + arch: Vec, + #[arg(long)] + emulate_missing_as: Option, +} +impl TryFrom for ImageConfig { + type Error = clap::Error; + fn try_from(value: CliImageConfig) -> Result { + let res = Self { + source: if value.docker_build { + ImageSource::DockerBuild { + dockerfile: value.dockerfile, + workdir: value.workdir, + } + } else if let Some(tag) = value.docker_tag { + ImageSource::DockerTag(tag) + } else { + ImageSource::Packed + }, + arch: value.arch.into_iter().collect(), + emulate_missing_as: value.emulate_missing_as, + }; + res.emulate_missing_as + .as_ref() + .map(|a| { + if !res.arch.contains(a) { + Err(clap::Error::raw( + clap::error::ErrorKind::InvalidValue, + "`emulate-missing-as` must match one of the provided `arch`es", + )) + } else { + Ok(()) + } + }) + .transpose()?; + Ok(res) + } +} +impl clap::Args for ImageConfig { + fn augment_args(cmd: clap::Command) -> clap::Command { + CliImageConfig::augment_args(cmd) + } + fn augment_args_for_update(cmd: clap::Command) -> clap::Command { + CliImageConfig::augment_args_for_update(cmd) + } +} +impl clap::FromArgMatches for ImageConfig { + fn from_arg_matches(matches: &clap::ArgMatches) -> Result { + Self::try_from(CliImageConfig::from_arg_matches(matches)?) + } + fn update_from_arg_matches(&mut self, matches: &clap::ArgMatches) -> Result<(), clap::Error> { + *self = Self::try_from(CliImageConfig::from_arg_matches(matches)?)?; + Ok(()) + } +} + +#[derive(Debug, Clone, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub enum ImageSource { + Packed, + #[serde(rename_all = "camelCase")] + DockerBuild { + workdir: Option, + dockerfile: Option, + }, + DockerTag(String), +} +impl ImageSource { + pub fn ingredients(&self) -> Vec { + match self { + Self::Packed => Vec::new(), + Self::DockerBuild { dockerfile, .. } => { + vec![dockerfile.clone().unwrap_or_else(|| "Dockerfile".into())] + } + Self::DockerTag(_) => Vec::new(), + } + } + #[instrument(skip_all)] + pub fn load<'a, S: From> + FileSource + Clone>( + &'a self, + tmp_dir: Arc, + id: &'a PackageId, + version: &'a VersionString, + image_id: &'a ImageId, + arch: &'a str, + into: &'a mut DirectoryContents, + ) -> BoxFuture<'a, Result<(), Error>> { + #[derive(Deserialize)] + #[serde(rename_all = "PascalCase")] + struct DockerImageConfig { + env: Vec, + #[serde(default)] + working_dir: PathBuf, + #[serde(default)] + user: String, + } + async move { + match self { + ImageSource::Packed => Ok(()), + ImageSource::DockerBuild { + workdir, + dockerfile, + } => { + let workdir = workdir.as_deref().unwrap_or(Path::new(".")); + let dockerfile = dockerfile + .clone() + .unwrap_or_else(|| workdir.join("Dockerfile")); + let docker_platform = if arch == "x86_64" { + "--platform=linux/amd64".to_owned() + } else if arch == "aarch64" { + "--platform=linux/arm64".to_owned() + } else { + format!("--platform=linux/{arch}") + }; + // docker buildx build ${path} -o type=image,name=start9/${id} + let tag = format!("start9/{id}/{image_id}:{}", new_guid()); + Command::new(CONTAINER_TOOL) + .arg("build") + .arg(workdir) + .arg("-f") + .arg(dockerfile) + .arg("-t") + .arg(&tag) + .arg(&docker_platform) + .arg("-o") + .arg("type=docker,dest=-") + .capture(false) + .pipe(Command::new(CONTAINER_TOOL).arg("load")) + .invoke(ErrorKind::Docker) + .await?; + ImageSource::DockerTag(tag.clone()) + .load(tmp_dir, id, version, image_id, arch, into) + .await?; + Command::new(CONTAINER_TOOL) + .arg("rmi") + .arg("-f") + .arg(&tag) + .invoke(ErrorKind::Docker) + .await?; + Ok(()) + } + ImageSource::DockerTag(tag) => { + let docker_platform = if arch == "x86_64" { + "--platform=linux/amd64".to_owned() + } else if arch == "aarch64" { + "--platform=linux/arm64".to_owned() + } else { + format!("--platform=linux/{arch}") + }; + let mut inspect_cmd = Command::new(CONTAINER_TOOL); + inspect_cmd + .arg("image") + .arg("inspect") + .arg("--format") + .arg("{{json .Config}}") + .arg(&tag); + let inspect_res = match inspect_cmd.invoke(ErrorKind::Docker).await { + Ok(a) => a, + Err(e) + if { + let msg = e.source.to_string(); + #[cfg(feature = "docker")] + let matches = msg.contains("No such image:"); + #[cfg(not(feature = "docker"))] + let matches = msg.contains(": image not known"); + matches + } => + { + Command::new(CONTAINER_TOOL) + .arg("pull") + .arg(&docker_platform) + .arg(tag) + .capture(false) + .invoke(ErrorKind::Docker) + .await?; + inspect_cmd.invoke(ErrorKind::Docker).await? + } + Err(e) => return Err(e), + }; + let config = serde_json::from_slice::(&inspect_res) + .with_kind(ErrorKind::Deserialization)?; + let base_path = Path::new("images").join(arch).join(image_id); + into.insert_path( + base_path.with_extension("json"), + Entry::file( + TmpSource::new( + tmp_dir.clone(), + PackSource::Buffered( + serde_json::to_vec(&ImageMetadata { + workdir: if config.working_dir == Path::new("") { + "/".into() + } else { + config.working_dir + }, + user: if config.user.is_empty() { + "root".into() + } else { + config.user.into() + }, + }) + .with_kind(ErrorKind::Serialization)? + .into(), + ), + ) + .into(), + ), + )?; + into.insert_path( + base_path.with_extension("env"), + Entry::file( + TmpSource::new( + tmp_dir.clone(), + PackSource::Buffered(config.env.join("\n").into_bytes().into()), + ) + .into(), + ), + )?; + let dest = tmp_dir + .join(Guid::new().as_ref()) + .with_extension("squashfs"); + let container = String::from_utf8( + Command::new(CONTAINER_TOOL) + .arg("create") + .arg(&docker_platform) + .arg(&tag) + .invoke(ErrorKind::Docker) + .await?, + )?; + Command::new(CONTAINER_TOOL) + .arg("export") + .arg(container.trim()) + .pipe(Command::new("mksquashfs").arg("-").arg(&dest).arg("-tar")) + .capture(false) + .invoke(ErrorKind::Docker) + .await?; + Command::new(CONTAINER_TOOL) + .arg("rm") + .arg(container.trim()) + .invoke(ErrorKind::Docker) + .await?; + into.insert_path( + base_path.with_extension("squashfs"), + Entry::file(TmpSource::new(tmp_dir.clone(), PackSource::File(dest)).into()), + )?; + + Ok(()) + } + } + } + .boxed() + } +} + +#[derive(Debug, Clone, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct ImageMetadata { + pub workdir: PathBuf, + #[ts(type = "string")] + pub user: InternedString, +} + +#[instrument(skip_all)] +pub async fn pack(ctx: CliContext, params: PackParams) -> Result<(), Error> { + let tmp_dir = Arc::new(TmpDir::new().await?); + let mut files = DirectoryContents::>::new(); + let js_dir = params.javascript(); + let manifest: Arc<[u8]> = Command::new("node") + .arg("-e") + .arg(format!( + "console.log(JSON.stringify(require('{}/index.js').manifest))", + js_dir.display() + )) + .invoke(ErrorKind::Javascript) + .await? + .into(); + files.insert( + "manifest.json".into(), + Entry::file(TmpSource::new( + tmp_dir.clone(), + PackSource::Buffered(manifest.clone()), + )), + ); + let icon = params.icon().await?; + let icon_ext = icon + .extension() + .or_not_found("icon file extension")? + .to_string_lossy(); + files.insert( + InternedString::from_display(&lazy_format!("icon.{}", icon_ext)), + Entry::file(TmpSource::new(tmp_dir.clone(), PackSource::File(icon))), + ); + files.insert( + "LICENSE.md".into(), + Entry::file(TmpSource::new( + tmp_dir.clone(), + PackSource::File(params.license().await?), + )), + ); + files.insert( + "instructions.md".into(), + Entry::file(TmpSource::new( + tmp_dir.clone(), + PackSource::File(params.instructions()), + )), + ); + files.insert( + "javascript.squashfs".into(), + Entry::file(TmpSource::new( + tmp_dir.clone(), + PackSource::Squashfs(Arc::new(SqfsDir::new(js_dir, tmp_dir.clone()))), + )), + ); + + let mut s9pk = S9pk::new( + MerkleArchive::new(files, ctx.developer_key()?.clone(), SIG_CONTEXT), + None, + ) + .await?; + + let assets_dir = params.assets(); + for assets in s9pk.as_manifest().assets.clone() { + s9pk.as_archive_mut().contents_mut().insert_path( + Path::new("assets").join(&assets).with_extension("squashfs"), + Entry::file(TmpSource::new( + tmp_dir.clone(), + PackSource::Squashfs(Arc::new(SqfsDir::new( + assets_dir.join(&assets), + tmp_dir.clone(), + ))), + )), + )?; + } + + s9pk.load_images(tmp_dir.clone()).await?; + + let mut to_insert = Vec::new(); + for (id, dependency) in &mut s9pk.as_manifest_mut().dependencies.0 { + if let Some(s9pk) = dependency.s9pk.take() { + let s9pk = match s9pk { + PathOrUrl::Path(path) => { + S9pk::deserialize(&MultiCursorFile::from(open_file(path).await?), None) + .await? + .into_dyn() + } + PathOrUrl::Url(url) => { + if url.scheme() == "http" || url.scheme() == "https" { + S9pk::deserialize( + &Arc::new(HttpSource::new(ctx.client.clone(), url).await?), + None, + ) + .await? + .into_dyn() + } else { + return Err(Error::new( + eyre!("unknown scheme: {}", url.scheme()), + ErrorKind::InvalidRequest, + )); + } + } + }; + let dep_path = Path::new("dependencies").join(id); + to_insert.push(( + dep_path.join("metadata.json"), + Entry::file(PackSource::Buffered( + IoFormat::Json + .to_vec(&DependencyMetadata { + title: s9pk.as_manifest().title.clone(), + })? + .into(), + )), + )); + let icon = s9pk.icon().await?; + to_insert.push(( + dep_path.join(&*icon.0), + Entry::file(PackSource::Buffered( + icon.1.expect_file()?.to_vec(icon.1.hash()).await?.into(), + )), + )); + } else { + warn!("no s9pk specified for {id}, leaving metadata empty"); + } + } + + s9pk.validate_and_filter(None)?; + + s9pk.serialize( + &mut create_file(params.output(&s9pk.as_manifest().id)).await?, + false, + ) + .await?; + + drop(s9pk); + + tmp_dir.gc().await?; + + Ok(()) +} + +#[instrument(skip_all)] +pub async fn list_ingredients(_: CliContext, params: PackParams) -> Result, Error> { + let js_path = params.javascript().join("index.js"); + let manifest: Manifest = match async { + serde_json::from_slice( + &Command::new("node") + .arg("-e") + .arg(format!( + "console.log(JSON.stringify(require('{}').manifest))", + js_path.display() + )) + .invoke(ErrorKind::Javascript) + .await?, + ) + .with_kind(ErrorKind::Deserialization) + } + .await + { + Ok(m) => m, + Err(e) => { + warn!("failed to load manifest: {e}"); + debug!("{e:?}"); + return Ok(vec![ + js_path, + params.icon().await?, + params.license().await?, + params.instructions(), + ]); + } + }; + let mut ingredients = vec![ + js_path, + params.icon().await?, + params.license().await?, + params.instructions(), + ]; + + for (_, dependency) in manifest.dependencies.0 { + if let Some(PathOrUrl::Path(p)) = dependency.s9pk { + ingredients.push(p); + } + } + + let assets_dir = params.assets(); + for assets in manifest.assets { + ingredients.push(assets_dir.join(assets)); + } + + for image in manifest.images.values() { + ingredients.extend(image.source.ingredients()); + } + + Ok(ingredients) +} diff --git a/core/startos/src/service/action.rs b/core/startos/src/service/action.rs new file mode 100644 index 000000000..6c5ac4eab --- /dev/null +++ b/core/startos/src/service/action.rs @@ -0,0 +1,64 @@ +use std::time::Duration; + +use models::{ActionId, ProcedureName}; + +use crate::action::ActionResult; +use crate::prelude::*; +use crate::rpc_continuations::Guid; +use crate::service::config::GetConfig; +use crate::service::dependencies::DependencyConfig; +use crate::service::{Service, ServiceActor}; +use crate::util::actor::background::BackgroundJobQueue; +use crate::util::actor::{ConflictBuilder, Handler}; + +pub(super) struct Action { + id: ActionId, + input: Value, +} +impl Handler for ServiceActor { + type Response = Result; + fn conflicts_with(_: &Action) -> ConflictBuilder { + ConflictBuilder::everything() + .except::() + .except::() + } + async fn handle( + &mut self, + id: Guid, + Action { + id: action_id, + input, + }: Action, + _: &BackgroundJobQueue, + ) -> Self::Response { + let container = &self.0.persistent_container; + container + .execute::( + id, + ProcedureName::RunAction(action_id), + input, + Some(Duration::from_secs(30)), + ) + .await + .with_kind(ErrorKind::Action) + } +} + +impl Service { + pub async fn action( + &self, + id: Guid, + action_id: ActionId, + input: Value, + ) -> Result { + self.actor + .send( + id, + Action { + id: action_id, + input, + }, + ) + .await? + } +} diff --git a/core/startos/src/service/cli.rs b/core/startos/src/service/cli.rs index d3bdccd72..95add37fb 100644 --- a/core/startos/src/service/cli.rs +++ b/core/startos/src/service/cli.rs @@ -5,10 +5,11 @@ use clap::Parser; use imbl_value::Value; use once_cell::sync::OnceCell; use rpc_toolkit::yajrc::RpcError; -use rpc_toolkit::{call_remote_socket, yajrc, CallRemote, Context}; +use rpc_toolkit::{call_remote_socket, yajrc, CallRemote, Context, Empty}; use tokio::runtime::Runtime; use crate::lxc::HOST_RPC_SERVER_SOCKET; +use crate::service::effects::context::EffectContext; #[derive(Debug, Default, Parser)] pub struct ContainerClientConfig { @@ -18,7 +19,7 @@ pub struct ContainerClientConfig { pub struct ContainerCliSeed { socket: PathBuf, - runtime: OnceCell, + runtime: OnceCell>, } #[derive(Clone)] @@ -28,29 +29,31 @@ impl ContainerCliContext { Self(Arc::new(ContainerCliSeed { socket: cfg .socket - .unwrap_or_else(|| Path::new("/").join(HOST_RPC_SERVER_SOCKET)), + .unwrap_or_else(|| Path::new("/media/startos/rpc").join(HOST_RPC_SERVER_SOCKET)), runtime: OnceCell::new(), })) } } impl Context for ContainerCliContext { - fn runtime(&self) -> tokio::runtime::Handle { - self.0 - .runtime - .get_or_init(|| { - tokio::runtime::Builder::new_current_thread() - .enable_all() - .build() - .unwrap() - }) - .handle() - .clone() + fn runtime(&self) -> Option> { + Some( + self.0 + .runtime + .get_or_init(|| { + Arc::new( + tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(), + ) + }) + .clone(), + ) } } -#[async_trait::async_trait] -impl CallRemote for ContainerCliContext { - async fn call_remote(&self, method: &str, params: Value) -> Result { +impl CallRemote for ContainerCliContext { + async fn call_remote(&self, method: &str, params: Value, _: Empty) -> Result { call_remote_socket( tokio::net::UnixStream::connect(&self.0.socket) .await diff --git a/core/startos/src/service/config.rs b/core/startos/src/service/config.rs index e1294a465..faa70fc41 100644 --- a/core/startos/src/service/config.rs +++ b/core/startos/src/service/config.rs @@ -1,19 +1,78 @@ +use std::time::Duration; + use models::ProcedureName; +use crate::config::action::ConfigRes; use crate::config::ConfigureContext; use crate::prelude::*; -use crate::service::Service; +use crate::rpc_continuations::Guid; +use crate::service::dependencies::DependencyConfig; +use crate::service::{Service, ServiceActor}; +use crate::util::actor::background::BackgroundJobQueue; +use crate::util::actor::{ConflictBuilder, Handler}; +use crate::util::serde::NoOutput; + +pub(super) struct Configure(ConfigureContext); +impl Handler for ServiceActor { + type Response = Result<(), Error>; + fn conflicts_with(_: &Configure) -> ConflictBuilder { + ConflictBuilder::everything().except::() + } + async fn handle( + &mut self, + id: Guid, + Configure(ConfigureContext { timeout, config }): Configure, + _: &BackgroundJobQueue, + ) -> Self::Response { + let container = &self.0.persistent_container; + let package_id = &self.0.id; -impl Service { - pub async fn configure( - &self, - ConfigureContext { timeout, config }: ConfigureContext, - ) -> Result<(), Error> { - let container = &self.seed.persistent_container; container - .execute::(ProcedureName::SetConfig, to_value(&config)?, timeout) + .execute::(id, ProcedureName::SetConfig, to_value(&config)?, timeout) .await - .with_kind(ErrorKind::Action)?; + .with_kind(ErrorKind::ConfigRulesViolation)?; + self.0 + .ctx + .db + .mutate(move |db| { + db.as_public_mut() + .as_package_data_mut() + .as_idx_mut(package_id) + .or_not_found(package_id)? + .as_status_mut() + .as_configured_mut() + .ser(&true) + }) + .await?; Ok(()) } } + +pub(super) struct GetConfig; +impl Handler for ServiceActor { + type Response = Result; + fn conflicts_with(_: &GetConfig) -> ConflictBuilder { + ConflictBuilder::nothing().except::() + } + async fn handle(&mut self, id: Guid, _: GetConfig, _: &BackgroundJobQueue) -> Self::Response { + let container = &self.0.persistent_container; + container + .execute::( + id, + ProcedureName::GetConfig, + Value::Null, + Some(Duration::from_secs(30)), // TODO timeout + ) + .await + .with_kind(ErrorKind::ConfigRulesViolation) + } +} + +impl Service { + pub async fn configure(&self, id: Guid, ctx: ConfigureContext) -> Result<(), Error> { + self.actor.send(id, Configure(ctx)).await? + } + pub async fn get_config(&self, id: Guid) -> Result { + self.actor.send(id, GetConfig).await? + } +} diff --git a/core/startos/src/service/control.rs b/core/startos/src/service/control.rs index 88d66d97c..7c4bdf815 100644 --- a/core/startos/src/service/control.rs +++ b/core/startos/src/service/control.rs @@ -1,14 +1,22 @@ use crate::prelude::*; +use crate::rpc_continuations::Guid; +use crate::service::config::GetConfig; +use crate::service::dependencies::DependencyConfig; use crate::service::start_stop::StartStop; use crate::service::transition::TransitionKind; use crate::service::{Service, ServiceActor}; -use crate::util::actor::{BackgroundJobs, Handler}; +use crate::util::actor::background::BackgroundJobQueue; +use crate::util::actor::{ConflictBuilder, Handler}; -struct Start; -#[async_trait::async_trait] +pub(super) struct Start; impl Handler for ServiceActor { type Response = (); - async fn handle(&mut self, _: Start, _: &mut BackgroundJobs) -> Self::Response { + fn conflicts_with(_: &Start) -> ConflictBuilder { + ConflictBuilder::everything() + .except::() + .except::() + } + async fn handle(&mut self, _: Guid, _: Start, _: &BackgroundJobQueue) -> Self::Response { self.0.persistent_container.state.send_modify(|x| { x.desired_state = StartStop::Start; }); @@ -16,16 +24,20 @@ impl Handler for ServiceActor { } } impl Service { - pub async fn start(&self) -> Result<(), Error> { - self.actor.send(Start).await + pub async fn start(&self, id: Guid) -> Result<(), Error> { + self.actor.send(id, Start).await } } struct Stop; -#[async_trait::async_trait] impl Handler for ServiceActor { type Response = (); - async fn handle(&mut self, _: Stop, _: &mut BackgroundJobs) -> Self::Response { + fn conflicts_with(_: &Stop) -> ConflictBuilder { + ConflictBuilder::everything() + .except::() + .except::() + } + async fn handle(&mut self, _: Guid, _: Stop, _: &BackgroundJobQueue) -> Self::Response { let mut transition_state = None; self.0.persistent_container.state.send_modify(|x| { x.desired_state = StartStop::Stop; @@ -40,7 +52,7 @@ impl Handler for ServiceActor { } } impl Service { - pub async fn stop(&self) -> Result<(), Error> { - self.actor.send(Stop).await + pub async fn stop(&self, id: Guid) -> Result<(), Error> { + self.actor.send(id, Stop).await } } diff --git a/core/startos/src/service/dependencies.rs b/core/startos/src/service/dependencies.rs new file mode 100644 index 000000000..e8c6f07c4 --- /dev/null +++ b/core/startos/src/service/dependencies.rs @@ -0,0 +1,86 @@ +use std::time::Duration; + +use imbl_value::json; +use models::{PackageId, ProcedureName}; + +use crate::prelude::*; +use crate::rpc_continuations::Guid; +use crate::service::{Service, ServiceActor, ServiceActorSeed}; +use crate::util::actor::background::BackgroundJobQueue; +use crate::util::actor::{ConflictBuilder, Handler}; +use crate::Config; + +impl ServiceActorSeed { + async fn dependency_config( + &self, + id: Guid, + dependency_id: PackageId, + remote_config: Option, + ) -> Result, Error> { + let container = &self.persistent_container; + container + .sanboxed::>( + id.clone(), + ProcedureName::UpdateDependency(dependency_id.clone()), + json!({ + "queryResults": container + .execute::( + id, + ProcedureName::QueryDependency(dependency_id), + Value::Null, + Some(Duration::from_secs(30)), + ) + .await + .with_kind(ErrorKind::Dependency)?, + "remoteConfig": remote_config, + }), + Some(Duration::from_secs(30)), + ) + .await + .with_kind(ErrorKind::Dependency) + .map(|res| res.filter(|c| !c.is_empty() && Some(c) != remote_config.as_ref())) + } +} + +pub(super) struct DependencyConfig { + dependency_id: PackageId, + remote_config: Option, +} +impl Handler for ServiceActor { + type Response = Result, Error>; + fn conflicts_with(_: &DependencyConfig) -> ConflictBuilder { + ConflictBuilder::nothing() + } + async fn handle( + &mut self, + id: Guid, + DependencyConfig { + dependency_id, + remote_config, + }: DependencyConfig, + _: &BackgroundJobQueue, + ) -> Self::Response { + self.0 + .dependency_config(id, dependency_id, remote_config) + .await + } +} + +impl Service { + pub async fn dependency_config( + &self, + id: Guid, + dependency_id: PackageId, + remote_config: Option, + ) -> Result, Error> { + self.actor + .send( + id, + DependencyConfig { + dependency_id, + remote_config, + }, + ) + .await? + } +} diff --git a/core/startos/src/service/effects/action.rs b/core/startos/src/service/effects/action.rs new file mode 100644 index 000000000..4719c6d3d --- /dev/null +++ b/core/startos/src/service/effects/action.rs @@ -0,0 +1,101 @@ +use std::collections::BTreeMap; + +use models::{ActionId, PackageId}; + +use crate::action::ActionResult; +use crate::db::model::package::ActionMetadata; +use crate::rpc_continuations::Guid; +use crate::service::effects::prelude::*; + +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +pub struct ExportActionParams { + #[ts(optional)] + package_id: Option, + id: ActionId, + metadata: ActionMetadata, +} +pub async fn export_action(context: EffectContext, data: ExportActionParams) -> Result<(), Error> { + let context = context.deref()?; + let package_id = context.seed.id.clone(); + context + .seed + .ctx + .db + .mutate(|db| { + let model = db + .as_public_mut() + .as_package_data_mut() + .as_idx_mut(&package_id) + .or_not_found(&package_id)? + .as_actions_mut(); + let mut value = model.de()?; + value + .insert(data.id, data.metadata) + .map(|_| ()) + .unwrap_or_default(); + model.ser(&value) + }) + .await?; + Ok(()) +} + +pub async fn clear_actions(context: EffectContext) -> Result<(), Error> { + let context = context.deref()?; + let package_id = context.seed.id.clone(); + context + .seed + .ctx + .db + .mutate(|db| { + db.as_public_mut() + .as_package_data_mut() + .as_idx_mut(&package_id) + .or_not_found(&package_id)? + .as_actions_mut() + .ser(&BTreeMap::new()) + }) + .await?; + Ok(()) +} + +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct ExecuteAction { + #[serde(default)] + #[ts(skip)] + procedure_id: Guid, + #[ts(optional)] + package_id: Option, + action_id: ActionId, + #[ts(type = "any")] + input: Value, +} +pub async fn execute_action( + context: EffectContext, + ExecuteAction { + procedure_id, + package_id, + action_id, + input, + }: ExecuteAction, +) -> Result { + let context = context.deref()?; + + if let Some(package_id) = package_id { + context + .seed + .ctx + .services + .get(&package_id) + .await + .as_ref() + .or_not_found(&package_id)? + .action(procedure_id, action_id, input) + .await + } else { + context.action(procedure_id, action_id, input).await + } +} diff --git a/core/startos/src/service/effects/callbacks.rs b/core/startos/src/service/effects/callbacks.rs new file mode 100644 index 000000000..1a9250aa8 --- /dev/null +++ b/core/startos/src/service/effects/callbacks.rs @@ -0,0 +1,311 @@ +use std::cmp::min; +use std::collections::{BTreeMap, BTreeSet}; +use std::sync::{Arc, Mutex, Weak}; +use std::time::{Duration, SystemTime}; + +use futures::future::join_all; +use helpers::NonDetachingJoinHandle; +use imbl::{vector, Vector}; +use imbl_value::InternedString; +use models::{HostId, PackageId, ServiceInterfaceId}; +use patch_db::json_ptr::JsonPointer; +use tracing::warn; + +use crate::net::ssl::FullchainCertData; +use crate::prelude::*; +use crate::service::effects::context::EffectContext; +use crate::service::effects::net::ssl::Algorithm; +use crate::service::rpc::CallbackHandle; +use crate::service::{Service, ServiceActorSeed}; +use crate::util::collections::EqMap; + +#[derive(Default)] +pub struct ServiceCallbacks(Mutex); + +#[derive(Default)] +struct ServiceCallbackMap { + get_service_interface: BTreeMap<(PackageId, ServiceInterfaceId), Vec>, + list_service_interfaces: BTreeMap>, + get_system_smtp: Vec, + get_host_info: BTreeMap<(PackageId, HostId), Vec>, + get_ssl_certificate: EqMap< + (BTreeSet, FullchainCertData, Algorithm), + (NonDetachingJoinHandle<()>, Vec), + >, + get_store: BTreeMap>>, +} + +impl ServiceCallbacks { + fn mutate(&self, f: impl FnOnce(&mut ServiceCallbackMap) -> T) -> T { + let mut this = self.0.lock().unwrap(); + f(&mut *this) + } + + pub fn gc(&self) { + self.mutate(|this| { + this.get_service_interface.retain(|_, v| { + v.retain(|h| h.handle.is_active() && h.seed.strong_count() > 0); + !v.is_empty() + }); + this.list_service_interfaces.retain(|_, v| { + v.retain(|h| h.handle.is_active() && h.seed.strong_count() > 0); + !v.is_empty() + }); + this.get_system_smtp + .retain(|h| h.handle.is_active() && h.seed.strong_count() > 0); + this.get_host_info.retain(|_, v| { + v.retain(|h| h.handle.is_active() && h.seed.strong_count() > 0); + !v.is_empty() + }); + this.get_ssl_certificate.retain(|_, (_, v)| { + v.retain(|h| h.handle.is_active() && h.seed.strong_count() > 0); + !v.is_empty() + }); + this.get_store.retain(|_, v| { + v.retain(|_, v| { + v.retain(|h| h.handle.is_active() && h.seed.strong_count() > 0); + !v.is_empty() + }); + !v.is_empty() + }); + }) + } + + pub(super) fn add_get_service_interface( + &self, + package_id: PackageId, + service_interface_id: ServiceInterfaceId, + handler: CallbackHandler, + ) { + self.mutate(|this| { + this.get_service_interface + .entry((package_id, service_interface_id)) + .or_default() + .push(handler); + }) + } + + #[must_use] + pub fn get_service_interface( + &self, + id: &(PackageId, ServiceInterfaceId), + ) -> Option { + self.mutate(|this| { + Some(CallbackHandlers( + this.get_service_interface.remove(id).unwrap_or_default(), + )) + .filter(|cb| !cb.0.is_empty()) + }) + } + + pub(super) fn add_list_service_interfaces( + &self, + package_id: PackageId, + handler: CallbackHandler, + ) { + self.mutate(|this| { + this.list_service_interfaces + .entry(package_id) + .or_default() + .push(handler); + }) + } + + #[must_use] + pub fn list_service_interfaces(&self, id: &PackageId) -> Option { + self.mutate(|this| { + Some(CallbackHandlers( + this.list_service_interfaces.remove(id).unwrap_or_default(), + )) + .filter(|cb| !cb.0.is_empty()) + }) + } + + pub(super) fn add_get_system_smtp(&self, handler: CallbackHandler) { + self.mutate(|this| { + this.get_system_smtp.push(handler); + }) + } + + #[must_use] + pub fn get_system_smtp(&self) -> Option { + self.mutate(|this| { + Some(CallbackHandlers(std::mem::take(&mut this.get_system_smtp))) + .filter(|cb| !cb.0.is_empty()) + }) + } + + pub(super) fn add_get_host_info( + &self, + package_id: PackageId, + host_id: HostId, + handler: CallbackHandler, + ) { + self.mutate(|this| { + this.get_host_info + .entry((package_id, host_id)) + .or_default() + .push(handler); + }) + } + + #[must_use] + pub fn get_host_info(&self, id: &(PackageId, HostId)) -> Option { + self.mutate(|this| { + Some(CallbackHandlers( + this.get_host_info.remove(id).unwrap_or_default(), + )) + .filter(|cb| !cb.0.is_empty()) + }) + } + + pub(super) fn add_get_ssl_certificate( + &self, + ctx: EffectContext, + hostnames: BTreeSet, + cert: FullchainCertData, + algorithm: Algorithm, + handler: CallbackHandler, + ) { + self.mutate(|this| { + this.get_ssl_certificate + .entry((hostnames.clone(), cert.clone(), algorithm)) + .or_insert_with(|| { + ( + tokio::spawn(async move { + if let Err(e) = async { + loop { + match cert + .expiration() + .ok() + .and_then(|e| e.duration_since(SystemTime::now()).ok()) + { + Some(d) => { + tokio::time::sleep(min(Duration::from_secs(86400), d)) + .await + } + _ => break, + } + } + let Ok(ctx) = ctx.deref() else { + return Ok(()); + }; + + if let Some((_, callbacks)) = + ctx.seed.ctx.callbacks.mutate(|this| { + this.get_ssl_certificate + .remove(&(hostnames, cert, algorithm)) + }) + { + CallbackHandlers(callbacks).call(vector![]).await?; + } + Ok::<_, Error>(()) + } + .await + { + tracing::error!( + "Error in callback handler for getSslCertificate: {e}" + ); + tracing::debug!("{e:?}"); + } + }) + .into(), + Vec::new(), + ) + }) + .1 + .push(handler); + }) + } + + pub(super) fn add_get_store( + &self, + package_id: PackageId, + path: JsonPointer, + handler: CallbackHandler, + ) { + self.mutate(|this| { + this.get_store + .entry(package_id) + .or_default() + .entry(path) + .or_default() + .push(handler) + }) + } + + #[must_use] + pub fn get_store( + &self, + package_id: &PackageId, + path: &JsonPointer, + ) -> Option { + self.mutate(|this| { + if let Some(watched) = this.get_store.get_mut(package_id) { + let mut res = Vec::new(); + watched.retain(|ptr, cbs| { + if ptr.starts_with(path) || path.starts_with(ptr) { + res.append(cbs); + false + } else { + true + } + }); + Some(CallbackHandlers(res)) + } else { + None + } + .filter(|cb| !cb.0.is_empty()) + }) + } +} + +pub struct CallbackHandler { + handle: CallbackHandle, + seed: Weak, +} +impl CallbackHandler { + pub fn new(service: &Service, handle: CallbackHandle) -> Self { + Self { + handle, + seed: Arc::downgrade(&service.seed), + } + } + pub async fn call(mut self, args: Vector) -> Result<(), Error> { + if let Some(seed) = self.seed.upgrade() { + seed.persistent_container + .callback(self.handle.take(), args) + .await?; + } + Ok(()) + } +} +impl Drop for CallbackHandler { + fn drop(&mut self) { + if self.handle.is_active() { + warn!("Callback handler dropped while still active!"); + } + } +} + +pub struct CallbackHandlers(Vec); +impl CallbackHandlers { + pub async fn call(self, args: Vector) -> Result<(), Error> { + let mut err = ErrorCollection::new(); + for res in join_all(self.0.into_iter().map(|cb| cb.call(args.clone()))).await { + err.handle(res); + } + err.into_result() + } +} + +pub(super) fn clear_callbacks(context: EffectContext) -> Result<(), Error> { + let context = context.deref()?; + context + .seed + .persistent_container + .state + .send_if_modified(|s| !std::mem::take(&mut s.callbacks).is_empty()); + context.seed.ctx.callbacks.gc(); + Ok(()) +} diff --git a/core/startos/src/service/effects/config.rs b/core/startos/src/service/effects/config.rs new file mode 100644 index 000000000..647d3e272 --- /dev/null +++ b/core/startos/src/service/effects/config.rs @@ -0,0 +1,53 @@ +use models::PackageId; + +use crate::service::effects::prelude::*; + +#[derive(Debug, Clone, Serialize, Deserialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct GetConfiguredParams { + #[ts(optional)] + package_id: Option, +} +pub async fn get_configured(context: EffectContext) -> Result { + let context = context.deref()?; + let peeked = context.seed.ctx.db.peek().await; + let package_id = &context.seed.id; + peeked + .as_public() + .as_package_data() + .as_idx(package_id) + .or_not_found(package_id)? + .as_status() + .as_configured() + .de() +} + +#[derive(Debug, Clone, Serialize, Deserialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct SetConfigured { + configured: bool, +} +pub async fn set_configured( + context: EffectContext, + SetConfigured { configured }: SetConfigured, +) -> Result<(), Error> { + let context = context.deref()?; + let package_id = &context.seed.id; + context + .seed + .ctx + .db + .mutate(|db| { + db.as_public_mut() + .as_package_data_mut() + .as_idx_mut(package_id) + .or_not_found(package_id)? + .as_status_mut() + .as_configured_mut() + .ser(&configured) + }) + .await?; + Ok(()) +} diff --git a/core/startos/src/service/effects/context.rs b/core/startos/src/service/effects/context.rs new file mode 100644 index 000000000..b97499332 --- /dev/null +++ b/core/startos/src/service/effects/context.rs @@ -0,0 +1,27 @@ +use std::sync::{Arc, Weak}; + +use rpc_toolkit::Context; + +use crate::prelude::*; +use crate::service::Service; + +#[derive(Clone)] +pub(in crate::service) struct EffectContext(Weak); +impl EffectContext { + pub fn new(service: Weak) -> Self { + Self(service) + } +} +impl Context for EffectContext {} +impl EffectContext { + pub(super) fn deref(&self) -> Result, Error> { + if let Some(seed) = Weak::upgrade(&self.0) { + Ok(seed) + } else { + Err(Error::new( + eyre!("Service has already been destroyed"), + ErrorKind::InvalidRequest, + )) + } + } +} diff --git a/core/startos/src/service/effects/control.rs b/core/startos/src/service/effects/control.rs new file mode 100644 index 000000000..6b3c6f8a0 --- /dev/null +++ b/core/startos/src/service/effects/control.rs @@ -0,0 +1,66 @@ +use std::str::FromStr; + +use clap::builder::ValueParserFactory; + +use crate::service::effects::prelude::*; +use crate::util::clap::FromStrParser; + +pub async fn restart( + context: EffectContext, + ProcedureId { procedure_id }: ProcedureId, +) -> Result<(), Error> { + let context = context.deref()?; + context.restart(procedure_id).await?; + Ok(()) +} + +pub async fn shutdown( + context: EffectContext, + ProcedureId { procedure_id }: ProcedureId, +) -> Result<(), Error> { + let context = context.deref()?; + context.stop(procedure_id).await?; + Ok(()) +} + +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub enum SetMainStatusStatus { + Running, + Stopped, +} +impl FromStr for SetMainStatusStatus { + type Err = color_eyre::eyre::Report; + fn from_str(s: &str) -> Result { + match s { + "running" => Ok(Self::Running), + "stopped" => Ok(Self::Stopped), + _ => Err(eyre!("unknown status {s}")), + } + } +} +impl ValueParserFactory for SetMainStatusStatus { + type Parser = FromStrParser; + fn value_parser() -> Self::Parser { + FromStrParser::new() + } +} + +#[derive(Debug, Clone, Serialize, Deserialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct SetMainStatus { + status: SetMainStatusStatus, +} +pub async fn set_main_status( + context: EffectContext, + SetMainStatus { status }: SetMainStatus, +) -> Result<(), Error> { + let context = context.deref()?; + match status { + SetMainStatusStatus::Running => context.seed.started(), + SetMainStatusStatus::Stopped => context.seed.stopped(), + } + Ok(()) +} diff --git a/core/startos/src/service/effects/dependency.rs b/core/startos/src/service/effects/dependency.rs new file mode 100644 index 000000000..ad5ec2e9b --- /dev/null +++ b/core/startos/src/service/effects/dependency.rs @@ -0,0 +1,409 @@ +use std::collections::{BTreeMap, BTreeSet}; +use std::path::PathBuf; +use std::str::FromStr; + +use clap::builder::ValueParserFactory; +use exver::VersionRange; +use itertools::Itertools; +use models::{HealthCheckId, PackageId, VolumeId}; +use patch_db::json_ptr::JsonPointer; +use tokio::process::Command; + +use crate::db::model::package::{ + CurrentDependencies, CurrentDependencyInfo, CurrentDependencyKind, ManifestPreference, +}; +use crate::disk::mount::filesystem::bind::Bind; +use crate::disk::mount::filesystem::idmapped::IdMapped; +use crate::disk::mount::filesystem::{FileSystem, MountType}; +use crate::rpc_continuations::Guid; +use crate::service::effects::prelude::*; +use crate::status::health_check::HealthCheckResult; +use crate::util::clap::FromStrParser; +use crate::util::Invoke; +use crate::volume::data_dir; + +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +pub struct MountTarget { + package_id: PackageId, + volume_id: VolumeId, + subpath: Option, + readonly: bool, +} +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +pub struct MountParams { + location: PathBuf, + target: MountTarget, +} +pub async fn mount( + context: EffectContext, + MountParams { + location, + target: + MountTarget { + package_id, + volume_id, + subpath, + readonly, + }, + }: MountParams, +) -> Result<(), Error> { + let context = context.deref()?; + let subpath = subpath.unwrap_or_default(); + let subpath = subpath.strip_prefix("/").unwrap_or(&subpath); + let source = data_dir(&context.seed.ctx.datadir, &package_id, &volume_id).join(subpath); + if tokio::fs::metadata(&source).await.is_err() { + tokio::fs::create_dir_all(&source).await?; + } + let location = location.strip_prefix("/").unwrap_or(&location); + let mountpoint = context + .seed + .persistent_container + .lxc_container + .get() + .or_not_found("lxc container")? + .rootfs_dir() + .join(location); + tokio::fs::create_dir_all(&mountpoint).await?; + Command::new("chown") + .arg("100000:100000") + .arg(&mountpoint) + .invoke(crate::ErrorKind::Filesystem) + .await?; + IdMapped::new(Bind::new(source), 0, 100000, 65536) + .mount( + mountpoint, + if readonly { + MountType::ReadOnly + } else { + MountType::ReadWrite + }, + ) + .await?; + + Ok(()) +} + +pub async fn get_installed_packages(context: EffectContext) -> Result, Error> { + context + .deref()? + .seed + .ctx + .db + .peek() + .await + .into_public() + .into_package_data() + .keys() +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct ExposeForDependentsParams { + #[ts(type = "string[]")] + paths: Vec, +} +pub async fn expose_for_dependents( + context: EffectContext, + ExposeForDependentsParams { paths }: ExposeForDependentsParams, +) -> Result<(), Error> { + Ok(()) +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub enum DependencyKind { + Exists, + Running, +} +#[derive(Debug, Clone, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase", tag = "kind")] +#[serde(rename_all_fields = "camelCase")] +#[ts(export)] +pub enum DependencyRequirement { + Running { + id: PackageId, + health_checks: BTreeSet, + #[ts(type = "string")] + version_range: VersionRange, + }, + Exists { + id: PackageId, + #[ts(type = "string")] + version_range: VersionRange, + }, +} +// filebrowser:exists,bitcoind:running:foo+bar+baz +impl FromStr for DependencyRequirement { + type Err = Error; + fn from_str(s: &str) -> Result { + match s.split_once(':') { + Some((id, "e")) | Some((id, "exists")) => Ok(Self::Exists { + id: id.parse()?, + version_range: "*".parse()?, // TODO + }), + Some((id, rest)) => { + let health_checks = match rest.split_once(':') { + Some(("r", rest)) | Some(("running", rest)) => rest + .split('+') + .map(|id| id.parse().map_err(Error::from)) + .collect(), + Some((kind, _)) => Err(Error::new( + eyre!("unknown dependency kind {kind}"), + ErrorKind::InvalidRequest, + )), + None => match rest { + "r" | "running" => Ok(BTreeSet::new()), + kind => Err(Error::new( + eyre!("unknown dependency kind {kind}"), + ErrorKind::InvalidRequest, + )), + }, + }?; + Ok(Self::Running { + id: id.parse()?, + health_checks, + version_range: "*".parse()?, // TODO + }) + } + None => Ok(Self::Running { + id: s.parse()?, + health_checks: BTreeSet::new(), + version_range: "*".parse()?, // TODO + }), + } + } +} +impl ValueParserFactory for DependencyRequirement { + type Parser = FromStrParser; + fn value_parser() -> Self::Parser { + FromStrParser::new() + } +} +#[derive(Deserialize, Serialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "camelCase")] +#[ts(export)] +pub struct SetDependenciesParams { + #[serde(default)] + procedure_id: Guid, + dependencies: Vec, +} +pub async fn set_dependencies( + context: EffectContext, + SetDependenciesParams { + procedure_id, + dependencies, + }: SetDependenciesParams, +) -> Result<(), Error> { + let context = context.deref()?; + let id = &context.seed.id; + + let mut deps = BTreeMap::new(); + for dependency in dependencies { + let (dep_id, kind, version_range) = match dependency { + DependencyRequirement::Exists { id, version_range } => { + (id, CurrentDependencyKind::Exists, version_range) + } + DependencyRequirement::Running { + id, + health_checks, + version_range, + } => ( + id, + CurrentDependencyKind::Running { health_checks }, + version_range, + ), + }; + let config_satisfied = + if let Some(dep_service) = &*context.seed.ctx.services.get(&dep_id).await { + context + .dependency_config( + procedure_id.clone(), + dep_id.clone(), + dep_service.get_config(procedure_id.clone()).await?.config, + ) + .await? + .is_none() + } else { + true + }; + let info = CurrentDependencyInfo { + title: context + .seed + .persistent_container + .s9pk + .dependency_metadata(&dep_id) + .await? + .map(|m| m.title), + icon: context + .seed + .persistent_container + .s9pk + .dependency_icon_data_url(&dep_id) + .await?, + kind, + version_range, + config_satisfied, + }; + deps.insert(dep_id, info); + } + context + .seed + .ctx + .db + .mutate(|db| { + db.as_public_mut() + .as_package_data_mut() + .as_idx_mut(id) + .or_not_found(id)? + .as_current_dependencies_mut() + .ser(&CurrentDependencies(deps)) + }) + .await +} + +pub async fn get_dependencies(context: EffectContext) -> Result, Error> { + let context = context.deref()?; + let id = &context.seed.id; + let db = context.seed.ctx.db.peek().await; + let data = db + .as_public() + .as_package_data() + .as_idx(id) + .or_not_found(id)? + .as_current_dependencies() + .de()?; + + data.0 + .into_iter() + .map(|(id, current_dependency_info)| { + let CurrentDependencyInfo { + version_range, + kind, + .. + } = current_dependency_info; + Ok::<_, Error>(match kind { + CurrentDependencyKind::Exists => { + DependencyRequirement::Exists { id, version_range } + } + CurrentDependencyKind::Running { health_checks } => { + DependencyRequirement::Running { + id, + health_checks, + version_range, + } + } + }) + }) + .try_collect() +} + +#[derive(Debug, Clone, Serialize, Deserialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct CheckDependenciesParam { + #[ts(optional)] + package_ids: Option>, +} +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct CheckDependenciesResult { + package_id: PackageId, + is_installed: bool, + is_running: bool, + config_satisfied: bool, + health_checks: BTreeMap, + #[ts(type = "string | null")] + version: Option, +} +pub async fn check_dependencies( + context: EffectContext, + CheckDependenciesParam { package_ids }: CheckDependenciesParam, +) -> Result, Error> { + let context = context.deref()?; + let db = context.seed.ctx.db.peek().await; + let current_dependencies = db + .as_public() + .as_package_data() + .as_idx(&context.seed.id) + .or_not_found(&context.seed.id)? + .as_current_dependencies() + .de()?; + let package_ids: Vec<_> = package_ids + .unwrap_or_else(|| current_dependencies.0.keys().cloned().collect()) + .into_iter() + .filter_map(|x| { + let info = current_dependencies.0.get(&x)?; + Some((x, info)) + }) + .collect(); + let mut results = Vec::with_capacity(package_ids.len()); + + for (package_id, dependency_info) in package_ids { + let Some(package) = db.as_public().as_package_data().as_idx(&package_id) else { + results.push(CheckDependenciesResult { + package_id, + is_installed: false, + is_running: false, + config_satisfied: false, + health_checks: Default::default(), + version: None, + }); + continue; + }; + let manifest = package.as_state_info().as_manifest(ManifestPreference::New); + let installed_version = manifest.as_version().de()?.into_version(); + let satisfies = manifest.as_satisfies().de()?; + let version = Some(installed_version.clone()); + if ![installed_version] + .into_iter() + .chain(satisfies.into_iter().map(|v| v.into_version())) + .any(|v| v.satisfies(&dependency_info.version_range)) + { + results.push(CheckDependenciesResult { + package_id, + is_installed: false, + is_running: false, + config_satisfied: false, + health_checks: Default::default(), + version, + }); + continue; + } + let is_installed = true; + let status = package.as_status().as_main().de()?; + let is_running = if is_installed { + status.running() + } else { + false + }; + let health_checks = + if let CurrentDependencyKind::Running { health_checks } = &dependency_info.kind { + status + .health() + .cloned() + .unwrap_or_default() + .into_iter() + .filter(|(id, _)| health_checks.contains(id)) + .collect() + } else { + Default::default() + }; + results.push(CheckDependenciesResult { + package_id, + is_installed, + is_running, + config_satisfied: dependency_info.config_satisfied, + health_checks, + version, + }); + } + Ok(results) +} diff --git a/core/startos/src/service/effects/health.rs b/core/startos/src/service/effects/health.rs new file mode 100644 index 000000000..c8ef8fc4e --- /dev/null +++ b/core/startos/src/service/effects/health.rs @@ -0,0 +1,46 @@ +use models::HealthCheckId; + +use crate::service::effects::prelude::*; +use crate::status::health_check::HealthCheckResult; +use crate::status::MainStatus; + +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct SetHealth { + id: HealthCheckId, + #[serde(flatten)] + result: HealthCheckResult, +} +pub async fn set_health( + context: EffectContext, + SetHealth { id, result }: SetHealth, +) -> Result<(), Error> { + let context = context.deref()?; + + let package_id = &context.seed.id; + context + .seed + .ctx + .db + .mutate(move |db| { + db.as_public_mut() + .as_package_data_mut() + .as_idx_mut(package_id) + .or_not_found(package_id)? + .as_status_mut() + .as_main_mut() + .mutate(|main| { + match main { + &mut MainStatus::Running { ref mut health, .. } + | &mut MainStatus::BackingUp { ref mut health, .. } => { + health.insert(id, result); + } + _ => (), + } + Ok(()) + }) + }) + .await?; + Ok(()) +} diff --git a/core/startos/src/service/effects/image.rs b/core/startos/src/service/effects/image.rs new file mode 100644 index 000000000..af62047ed --- /dev/null +++ b/core/startos/src/service/effects/image.rs @@ -0,0 +1,164 @@ +use std::ffi::OsString; +use std::os::unix::process::CommandExt; +use std::path::{Path, PathBuf}; + +use models::ImageId; +use rpc_toolkit::Context; +use tokio::process::Command; + +use crate::disk::mount::filesystem::overlayfs::OverlayGuard; +use crate::rpc_continuations::Guid; +use crate::service::effects::prelude::*; +use crate::util::Invoke; + +#[derive(Debug, Clone, Serialize, Deserialize, Parser)] +pub struct ChrootParams { + #[arg(short = 'e', long = "env")] + env: Option, + #[arg(short = 'w', long = "workdir")] + workdir: Option, + #[arg(short = 'u', long = "user")] + user: Option, + path: PathBuf, + command: OsString, + args: Vec, +} +pub fn chroot( + _: C, + ChrootParams { + env, + workdir, + user, + path, + command, + args, + }: ChrootParams, +) -> Result<(), Error> { + let mut cmd = std::process::Command::new(command); + if let Some(env) = env { + for (k, v) in std::fs::read_to_string(env)? + .lines() + .map(|l| l.trim()) + .filter_map(|l| l.split_once("=")) + { + cmd.env(k, v); + } + } + nix::unistd::setsid().ok(); // https://stackoverflow.com/questions/25701333/os-setsid-operation-not-permitted + std::os::unix::fs::chroot(path)?; + if let Some(uid) = user.as_deref().and_then(|u| u.parse::().ok()) { + cmd.uid(uid); + } else if let Some(user) = user { + let (uid, gid) = std::fs::read_to_string("/etc/passwd")? + .lines() + .find_map(|l| { + let mut split = l.trim().split(":"); + if user != split.next()? { + return None; + } + split.next(); // throw away x + Some((split.next()?.parse().ok()?, split.next()?.parse().ok()?)) + // uid gid + }) + .or_not_found(lazy_format!("{user} in /etc/passwd"))?; + cmd.uid(uid); + cmd.gid(gid); + }; + if let Some(workdir) = workdir { + cmd.current_dir(workdir); + } + cmd.args(args); + Err(cmd.exec().into()) +} + +#[derive(Debug, Deserialize, Serialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct DestroyOverlayedImageParams { + guid: Guid, +} +#[instrument(skip_all)] +pub async fn destroy_overlayed_image( + context: EffectContext, + DestroyOverlayedImageParams { guid }: DestroyOverlayedImageParams, +) -> Result<(), Error> { + let context = context.deref()?; + if let Some(overlay) = context + .seed + .persistent_container + .overlays + .lock() + .await + .remove(&guid) + { + overlay.unmount(true).await?; + } else { + tracing::warn!("Could not find a guard to remove on the destroy overlayed image; assumming that it already is removed and will be skipping"); + } + Ok(()) +} + +#[derive(Debug, Deserialize, Serialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct CreateOverlayedImageParams { + image_id: ImageId, +} +#[instrument(skip_all)] +pub async fn create_overlayed_image( + context: EffectContext, + CreateOverlayedImageParams { image_id }: CreateOverlayedImageParams, +) -> Result<(PathBuf, Guid), Error> { + let context = context.deref()?; + if let Some(image) = context + .seed + .persistent_container + .images + .get(&image_id) + .cloned() + { + let guid = Guid::new(); + let rootfs_dir = context + .seed + .persistent_container + .lxc_container + .get() + .ok_or_else(|| { + Error::new( + eyre!("PersistentContainer has been destroyed"), + ErrorKind::Incoherent, + ) + })? + .rootfs_dir(); + let mountpoint = rootfs_dir + .join("media/startos/overlays") + .join(guid.as_ref()); + tokio::fs::create_dir_all(&mountpoint).await?; + let container_mountpoint = Path::new("/").join( + mountpoint + .strip_prefix(rootfs_dir) + .with_kind(ErrorKind::Incoherent)?, + ); + tracing::info!("Mounting overlay {guid} for {image_id}"); + let guard = OverlayGuard::mount(image, &mountpoint).await?; + Command::new("chown") + .arg("100000:100000") + .arg(&mountpoint) + .invoke(ErrorKind::Filesystem) + .await?; + tracing::info!("Mounted overlay {guid} for {image_id}"); + context + .seed + .persistent_container + .overlays + .lock() + .await + .insert(guid.clone(), guard); + Ok((container_mountpoint, guid)) + } else { + Err(Error::new( + eyre!("image {image_id} not found in s9pk"), + ErrorKind::NotFound, + )) + } +} diff --git a/core/startos/src/service/effects/mod.rs b/core/startos/src/service/effects/mod.rs new file mode 100644 index 000000000..91a12a4d1 --- /dev/null +++ b/core/startos/src/service/effects/mod.rs @@ -0,0 +1,174 @@ +use rpc_toolkit::{from_fn, from_fn_async, Context, HandlerExt, ParentHandler}; + +use crate::echo; +use crate::prelude::*; +use crate::service::cli::ContainerCliContext; +use crate::service::effects::context::EffectContext; + +mod action; +pub mod callbacks; +mod config; +pub mod context; +mod control; +mod dependency; +mod health; +mod image; +mod net; +mod prelude; +mod store; +mod system; + +pub fn handler() -> ParentHandler { + ParentHandler::new() + .subcommand("gitInfo", from_fn(|_: C| crate::version::git_info())) + .subcommand( + "echo", + from_fn(echo::).with_call_remote::(), + ) + // action + .subcommand( + "executeAction", + from_fn_async(action::execute_action).no_cli(), + ) + .subcommand( + "exportAction", + from_fn_async(action::export_action).no_cli(), + ) + .subcommand( + "clearActions", + from_fn_async(action::clear_actions).no_cli(), + ) + // callbacks + .subcommand( + "clearCallbacks", + from_fn(callbacks::clear_callbacks).no_cli(), + ) + // config + .subcommand( + "getConfigured", + from_fn_async(config::get_configured).no_cli(), + ) + .subcommand( + "setConfigured", + from_fn_async(config::set_configured) + .no_display() + .with_call_remote::(), + ) + // control + .subcommand( + "restart", + from_fn_async(control::restart) + .no_display() + .with_call_remote::(), + ) + .subcommand( + "shutdown", + from_fn_async(control::shutdown) + .no_display() + .with_call_remote::(), + ) + .subcommand( + "setMainStatus", + from_fn_async(control::set_main_status) + .no_display() + .with_call_remote::(), + ) + // dependency + .subcommand( + "setDependencies", + from_fn_async(dependency::set_dependencies) + .no_display() + .with_call_remote::(), + ) + .subcommand( + "getDependencies", + from_fn_async(dependency::get_dependencies) + .no_display() + .with_call_remote::(), + ) + .subcommand( + "checkDependencies", + from_fn_async(dependency::check_dependencies) + .no_display() + .with_call_remote::(), + ) + .subcommand("mount", from_fn_async(dependency::mount).no_cli()) + .subcommand( + "getInstalledPackages", + from_fn_async(dependency::get_installed_packages).no_cli(), + ) + .subcommand( + "exposeForDependents", + from_fn_async(dependency::expose_for_dependents).no_cli(), + ) + // health + .subcommand("setHealth", from_fn_async(health::set_health).no_cli()) + // image + .subcommand( + "chroot", + from_fn(image::chroot::).no_display(), + ) + .subcommand( + "createOverlayedImage", + from_fn_async(image::create_overlayed_image) + .with_custom_display_fn(|_, (path, _)| Ok(println!("{}", path.display()))) + .with_call_remote::(), + ) + .subcommand( + "destroyOverlayedImage", + from_fn_async(image::destroy_overlayed_image).no_cli(), + ) + // net + .subcommand("bind", from_fn_async(net::bind::bind).no_cli()) + .subcommand( + "getServicePortForward", + from_fn_async(net::bind::get_service_port_forward).no_cli(), + ) + .subcommand( + "clearBindings", + from_fn_async(net::bind::clear_bindings).no_cli(), + ) + .subcommand( + "getHostInfo", + from_fn_async(net::host::get_host_info).no_cli(), + ) + .subcommand( + "getPrimaryUrl", + from_fn_async(net::host::get_primary_url).no_cli(), + ) + .subcommand( + "getContainerIp", + from_fn_async(net::info::get_container_ip).no_cli(), + ) + .subcommand( + "exportServiceInterface", + from_fn_async(net::interface::export_service_interface).no_cli(), + ) + .subcommand( + "getServiceInterface", + from_fn_async(net::interface::get_service_interface).no_cli(), + ) + .subcommand( + "listServiceInterfaces", + from_fn_async(net::interface::list_service_interfaces).no_cli(), + ) + .subcommand( + "clearServiceInterfaces", + from_fn_async(net::interface::clear_service_interfaces).no_cli(), + ) + .subcommand( + "getSslCertificate", + from_fn_async(net::ssl::get_ssl_certificate).no_cli(), + ) + .subcommand("getSslKey", from_fn_async(net::ssl::get_ssl_key).no_cli()) + // store + .subcommand("getStore", from_fn_async(store::get_store).no_cli()) + .subcommand("setStore", from_fn_async(store::set_store).no_cli()) + // system + .subcommand( + "getSystemSmtp", + from_fn_async(system::get_system_smtp).no_cli(), + ) + + // TODO Callbacks +} diff --git a/core/startos/src/service/effects/net/bind.rs b/core/startos/src/service/effects/net/bind.rs new file mode 100644 index 000000000..ba273323a --- /dev/null +++ b/core/startos/src/service/effects/net/bind.rs @@ -0,0 +1,56 @@ +use models::{HostId, PackageId}; + +use crate::net::host::binding::{BindOptions, LanInfo}; +use crate::net::host::HostKind; +use crate::service::effects::prelude::*; + +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct BindParams { + kind: HostKind, + id: HostId, + internal_port: u16, + #[serde(flatten)] + options: BindOptions, +} +pub async fn bind( + context: EffectContext, + BindParams { + kind, + id, + internal_port, + options, + }: BindParams, +) -> Result<(), Error> { + let context = context.deref()?; + let mut svc = context.seed.persistent_container.net_service.lock().await; + svc.bind(kind, id, internal_port, options).await +} + +pub async fn clear_bindings(context: EffectContext) -> Result<(), Error> { + let context = context.deref()?; + let mut svc = context.seed.persistent_container.net_service.lock().await; + svc.clear_bindings().await?; + Ok(()) +} + +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +pub struct GetServicePortForwardParams { + #[ts(optional)] + package_id: Option, + host_id: HostId, + internal_port: u32, +} +pub async fn get_service_port_forward( + context: EffectContext, + data: GetServicePortForwardParams, +) -> Result { + let internal_port = data.internal_port as u16; + + let context = context.deref()?; + let net_service = context.seed.persistent_container.net_service.lock().await; + net_service.get_lan_port(data.host_id, internal_port) +} diff --git a/core/startos/src/service/effects/net/host.rs b/core/startos/src/service/effects/net/host.rs new file mode 100644 index 000000000..d320e7fe9 --- /dev/null +++ b/core/startos/src/service/effects/net/host.rs @@ -0,0 +1,73 @@ +use models::{HostId, PackageId}; + +use crate::net::host::address::HostAddress; +use crate::net::host::Host; +use crate::service::effects::callbacks::CallbackHandler; +use crate::service::effects::prelude::*; +use crate::service::rpc::CallbackId; + +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +pub struct GetPrimaryUrlParams { + #[ts(optional)] + package_id: Option, + host_id: HostId, + #[ts(optional)] + callback: Option, +} +pub async fn get_primary_url( + context: EffectContext, + GetPrimaryUrlParams { + package_id, + host_id, + callback, + }: GetPrimaryUrlParams, +) -> Result, Error> { + let context = context.deref()?; + let package_id = package_id.unwrap_or_else(|| context.seed.id.clone()); + + Ok(None) // TODO +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct GetHostInfoParams { + host_id: HostId, + #[ts(optional)] + package_id: Option, + #[ts(optional)] + callback: Option, +} +pub async fn get_host_info( + context: EffectContext, + GetHostInfoParams { + host_id, + package_id, + callback, + }: GetHostInfoParams, +) -> Result, Error> { + let context = context.deref()?; + let db = context.seed.ctx.db.peek().await; + let package_id = package_id.unwrap_or_else(|| context.seed.id.clone()); + + let res = db + .as_public() + .as_package_data() + .as_idx(&package_id) + .and_then(|m| m.as_hosts().as_idx(&host_id)) + .map(|m| m.de()) + .transpose()?; + + if let Some(callback) = callback { + let callback = callback.register(&context.seed.persistent_container); + context.seed.ctx.callbacks.add_get_host_info( + package_id, + host_id, + CallbackHandler::new(&context, callback), + ); + } + + Ok(res) +} diff --git a/core/startos/src/service/effects/net/info.rs b/core/startos/src/service/effects/net/info.rs new file mode 100644 index 000000000..c33a1a81e --- /dev/null +++ b/core/startos/src/service/effects/net/info.rs @@ -0,0 +1,9 @@ +use std::net::Ipv4Addr; + +use crate::service::effects::prelude::*; + +pub async fn get_container_ip(context: EffectContext) -> Result { + let context = context.deref()?; + let net_service = context.seed.persistent_container.net_service.lock().await; + Ok(net_service.get_ip()) +} diff --git a/core/startos/src/service/effects/net/interface.rs b/core/startos/src/service/effects/net/interface.rs new file mode 100644 index 000000000..e636e9b57 --- /dev/null +++ b/core/startos/src/service/effects/net/interface.rs @@ -0,0 +1,188 @@ +use std::collections::BTreeMap; + +use imbl::vector; +use models::{PackageId, ServiceInterfaceId}; + +use crate::net::service_interface::{AddressInfo, ServiceInterface, ServiceInterfaceType}; +use crate::service::effects::callbacks::CallbackHandler; +use crate::service::effects::prelude::*; +use crate::service::rpc::CallbackId; + +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +pub struct ExportServiceInterfaceParams { + id: ServiceInterfaceId, + name: String, + description: String, + has_primary: bool, + disabled: bool, + masked: bool, + address_info: AddressInfo, + r#type: ServiceInterfaceType, +} +pub async fn export_service_interface( + context: EffectContext, + ExportServiceInterfaceParams { + id, + name, + description, + has_primary, + disabled, + masked, + address_info, + r#type, + }: ExportServiceInterfaceParams, +) -> Result<(), Error> { + let context = context.deref()?; + let package_id = context.seed.id.clone(); + + let service_interface = ServiceInterface { + id: id.clone(), + name, + description, + has_primary, + disabled, + masked, + address_info, + interface_type: r#type, + }; + + context + .seed + .ctx + .db + .mutate(|db| { + db.as_public_mut() + .as_package_data_mut() + .as_idx_mut(&package_id) + .or_not_found(&package_id)? + .as_service_interfaces_mut() + .insert(&id, &service_interface)?; + Ok(()) + }) + .await?; + if let Some(callbacks) = context + .seed + .ctx + .callbacks + .get_service_interface(&(package_id.clone(), id)) + { + callbacks.call(vector![]).await?; + } + if let Some(callbacks) = context + .seed + .ctx + .callbacks + .list_service_interfaces(&package_id) + { + callbacks.call(vector![]).await?; + } + + Ok(()) +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct GetServiceInterfaceParams { + #[ts(optional)] + package_id: Option, + service_interface_id: ServiceInterfaceId, + #[ts(optional)] + callback: Option, +} +pub async fn get_service_interface( + context: EffectContext, + GetServiceInterfaceParams { + package_id, + service_interface_id, + callback, + }: GetServiceInterfaceParams, +) -> Result, Error> { + let context = context.deref()?; + let package_id = package_id.unwrap_or_else(|| context.seed.id.clone()); + let db = context.seed.ctx.db.peek().await; + + let interface = db + .as_public() + .as_package_data() + .as_idx(&package_id) + .and_then(|m| m.as_service_interfaces().as_idx(&service_interface_id)) + .map(|m| m.de()) + .transpose()?; + + if let Some(callback) = callback { + let callback = callback.register(&context.seed.persistent_container); + context.seed.ctx.callbacks.add_get_service_interface( + package_id, + service_interface_id, + CallbackHandler::new(&context, callback), + ); + } + + Ok(interface) +} + +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +pub struct ListServiceInterfacesParams { + #[ts(optional)] + package_id: Option, + #[ts(optional)] + callback: Option, +} +pub async fn list_service_interfaces( + context: EffectContext, + ListServiceInterfacesParams { + package_id, + callback, + }: ListServiceInterfacesParams, +) -> Result, Error> { + let context = context.deref()?; + let package_id = package_id.unwrap_or_else(|| context.seed.id.clone()); + + let res = context + .seed + .ctx + .db + .peek() + .await + .into_public() + .into_package_data() + .into_idx(&package_id) + .map(|m| m.into_service_interfaces().de()) + .transpose()? + .unwrap_or_default(); + + if let Some(callback) = callback { + let callback = callback.register(&context.seed.persistent_container); + context + .seed + .ctx + .callbacks + .add_list_service_interfaces(package_id, CallbackHandler::new(&context, callback)); + } + + Ok(res) +} + +pub async fn clear_service_interfaces(context: EffectContext) -> Result<(), Error> { + let context = context.deref()?; + let package_id = context.seed.id.clone(); + + context + .seed + .ctx + .db + .mutate(|db| { + db.as_public_mut() + .as_package_data_mut() + .as_idx_mut(&package_id) + .or_not_found(&package_id)? + .as_service_interfaces_mut() + .ser(&Default::default()) + }) + .await +} diff --git a/core/startos/src/service/effects/net/mod.rs b/core/startos/src/service/effects/net/mod.rs new file mode 100644 index 000000000..cf13451a6 --- /dev/null +++ b/core/startos/src/service/effects/net/mod.rs @@ -0,0 +1,5 @@ +pub mod bind; +pub mod host; +pub mod info; +pub mod interface; +pub mod ssl; diff --git a/core/startos/src/service/effects/net/ssl.rs b/core/startos/src/service/effects/net/ssl.rs new file mode 100644 index 000000000..d37a2d241 --- /dev/null +++ b/core/startos/src/service/effects/net/ssl.rs @@ -0,0 +1,169 @@ +use std::collections::BTreeSet; + +use imbl_value::InternedString; +use itertools::Itertools; +use openssl::pkey::{PKey, Private}; + +use crate::service::effects::callbacks::CallbackHandler; +use crate::service::effects::prelude::*; +use crate::service::rpc::CallbackId; +use crate::util::serde::Pem; + +#[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize, TS, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub enum Algorithm { + Ecdsa, + Ed25519, +} + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct GetSslCertificateParams { + #[ts(type = "string[]")] + hostnames: BTreeSet, + #[ts(optional)] + algorithm: Option, //"ecdsa" | "ed25519" + #[ts(optional)] + callback: Option, +} +pub async fn get_ssl_certificate( + ctx: EffectContext, + GetSslCertificateParams { + hostnames, + algorithm, + callback, + }: GetSslCertificateParams, +) -> Result, Error> { + let context = ctx.deref()?; + let algorithm = algorithm.unwrap_or(Algorithm::Ecdsa); + + let cert = context + .seed + .ctx + .db + .mutate(|db| { + let errfn = |h: &str| Error::new(eyre!("unknown hostname: {h}"), ErrorKind::NotFound); + let entries = db.as_public().as_package_data().as_entries()?; + let packages = entries.iter().map(|(k, _)| k).collect::>(); + let allowed_hostnames = entries + .iter() + .map(|(_, m)| m.as_hosts().as_entries()) + .flatten_ok() + .map_ok(|(_, m)| m.as_addresses().de()) + .map(|a| a.and_then(|a| a)) + .flatten_ok() + .map_ok(|a| InternedString::from_display(&a)) + .try_collect::<_, BTreeSet<_>, _>()?; + for hostname in &hostnames { + if let Some(internal) = hostname + .strip_suffix(".embassy") + .or_else(|| hostname.strip_suffix(".startos")) + { + if !packages.contains(internal) { + return Err(errfn(&*hostname)); + } + } else { + if !allowed_hostnames.contains(hostname) { + return Err(errfn(&*hostname)); + } + } + } + db.as_private_mut() + .as_key_store_mut() + .as_local_certs_mut() + .cert_for(&hostnames) + }) + .await?; + let fullchain = match algorithm { + Algorithm::Ecdsa => cert.fullchain_nistp256(), + Algorithm::Ed25519 => cert.fullchain_ed25519(), + }; + + let res = fullchain + .into_iter() + .map(|c| c.to_pem()) + .map_ok(String::from_utf8) + .map(|a| Ok::<_, Error>(a??)) + .try_collect()?; + + if let Some(callback) = callback { + let callback = callback.register(&context.seed.persistent_container); + context.seed.ctx.callbacks.add_get_ssl_certificate( + ctx, + hostnames, + cert, + algorithm, + CallbackHandler::new(&context, callback), + ); + } + + Ok(res) +} + +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct GetSslKeyParams { + #[ts(type = "string[]")] + hostnames: BTreeSet, + #[ts(optional)] + algorithm: Option, //"ecdsa" | "ed25519" +} +pub async fn get_ssl_key( + context: EffectContext, + GetSslKeyParams { + hostnames, + algorithm, + }: GetSslKeyParams, +) -> Result>, Error> { + let context = context.deref()?; + let package_id = &context.seed.id; + let algorithm = algorithm.unwrap_or(Algorithm::Ecdsa); + + let cert = context + .seed + .ctx + .db + .mutate(|db| { + let errfn = |h: &str| Error::new(eyre!("unknown hostname: {h}"), ErrorKind::NotFound); + let allowed_hostnames = db + .as_public() + .as_package_data() + .as_idx(package_id) + .into_iter() + .map(|m| m.as_hosts().as_entries()) + .flatten_ok() + .map_ok(|(_, m)| m.as_addresses().de()) + .map(|a| a.and_then(|a| a)) + .flatten_ok() + .map_ok(|a| InternedString::from_display(&a)) + .try_collect::<_, BTreeSet<_>, _>()?; + for hostname in &hostnames { + if let Some(internal) = hostname + .strip_suffix(".embassy") + .or_else(|| hostname.strip_suffix(".startos")) + { + if internal != &**package_id { + return Err(errfn(&*hostname)); + } + } else { + if !allowed_hostnames.contains(hostname) { + return Err(errfn(&*hostname)); + } + } + } + db.as_private_mut() + .as_key_store_mut() + .as_local_certs_mut() + .cert_for(&hostnames) + }) + .await?; + let key = match algorithm { + Algorithm::Ecdsa => cert.leaf.keys.nistp256, + Algorithm::Ed25519 => cert.leaf.keys.ed25519, + }; + + Ok(Pem(key)) +} diff --git a/core/startos/src/service/effects/prelude.rs b/core/startos/src/service/effects/prelude.rs new file mode 100644 index 000000000..2dc848c0c --- /dev/null +++ b/core/startos/src/service/effects/prelude.rs @@ -0,0 +1,16 @@ +pub use clap::Parser; +pub use serde::{Deserialize, Serialize}; +pub use ts_rs::TS; + +pub use crate::prelude::*; +use crate::rpc_continuations::Guid; +pub(super) use crate::service::effects::context::EffectContext; + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct ProcedureId { + #[serde(default)] + #[arg(default_value_t, long)] + pub procedure_id: Guid, +} diff --git a/core/startos/src/service/effects/store.rs b/core/startos/src/service/effects/store.rs new file mode 100644 index 000000000..ab4484ab6 --- /dev/null +++ b/core/startos/src/service/effects/store.rs @@ -0,0 +1,93 @@ +use imbl::vector; +use imbl_value::json; +use models::PackageId; +use patch_db::json_ptr::JsonPointer; + +use crate::service::effects::callbacks::CallbackHandler; +use crate::service::effects::prelude::*; +use crate::service::rpc::CallbackId; + +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct GetStoreParams { + #[ts(optional)] + package_id: Option, + #[ts(type = "string")] + path: JsonPointer, + #[ts(optional)] + callback: Option, +} +pub async fn get_store( + context: EffectContext, + GetStoreParams { + package_id, + path, + callback, + }: GetStoreParams, +) -> Result { + let context = context.deref()?; + let peeked = context.seed.ctx.db.peek().await; + let package_id = package_id.unwrap_or(context.seed.id.clone()); + let value = peeked + .as_private() + .as_package_stores() + .as_idx(&package_id) + .or_not_found(&package_id)? + .de()?; + + if let Some(callback) = callback { + let callback = callback.register(&context.seed.persistent_container); + context.seed.ctx.callbacks.add_get_store( + package_id, + path.clone(), + CallbackHandler::new(&context, callback), + ); + } + + Ok(path + .get(&value) + .ok_or_else(|| Error::new(eyre!("Did not find value at path"), ErrorKind::NotFound))? + .clone()) +} + +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct SetStoreParams { + #[ts(type = "any")] + value: Value, + #[ts(type = "string")] + path: JsonPointer, +} +pub async fn set_store( + context: EffectContext, + SetStoreParams { value, path }: SetStoreParams, +) -> Result<(), Error> { + let context = context.deref()?; + let package_id = &context.seed.id; + context + .seed + .ctx + .db + .mutate(|db| { + let model = db + .as_private_mut() + .as_package_stores_mut() + .upsert(package_id, || Ok(json!({})))?; + let mut model_value = model.de()?; + if model_value.is_null() { + model_value = json!({}); + } + path.set(&mut model_value, value, true) + .with_kind(ErrorKind::ParseDbField)?; + model.ser(&model_value) + }) + .await?; + + if let Some(callbacks) = context.seed.ctx.callbacks.get_store(package_id, &path) { + callbacks.call(vector![]).await?; + } + + Ok(()) +} diff --git a/core/startos/src/service/effects/system.rs b/core/startos/src/service/effects/system.rs new file mode 100644 index 000000000..abf0a33c6 --- /dev/null +++ b/core/startos/src/service/effects/system.rs @@ -0,0 +1,39 @@ +use crate::service::effects::callbacks::CallbackHandler; +use crate::service::effects::prelude::*; +use crate::service::rpc::CallbackId; +use crate::system::SmtpValue; + +#[derive(Debug, Clone, Serialize, Deserialize, TS, Parser)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +pub struct GetSystemSmtpParams { + #[arg(skip)] + callback: Option, +} +pub async fn get_system_smtp( + context: EffectContext, + GetSystemSmtpParams { callback }: GetSystemSmtpParams, +) -> Result, Error> { + let context = context.deref()?; + let res = context + .seed + .ctx + .db + .peek() + .await + .into_public() + .into_server_info() + .into_smtp() + .de()?; + + if let Some(callback) = callback { + let callback = callback.register(&context.seed.persistent_container); + context + .seed + .ctx + .callbacks + .add_get_system_smtp(CallbackHandler::new(&context, callback)); + } + + Ok(res) +} diff --git a/core/startos/src/service/mod.rs b/core/startos/src/service/mod.rs index 95d507bfa..2beb5c9fa 100644 --- a/core/startos/src/service/mod.rs +++ b/core/startos/src/service/mod.rs @@ -1,44 +1,49 @@ -use std::sync::Arc; +use std::ops::Deref; +use std::sync::{Arc, Weak}; use std::time::Duration; use chrono::{DateTime, Utc}; use clap::Parser; use futures::future::BoxFuture; use imbl::OrdMap; -use models::{ActionId, HealthCheckId, PackageId, ProcedureName}; +use models::{HealthCheckId, PackageId, ProcedureName}; use persistent_container::PersistentContainer; -use rpc_toolkit::{from_fn_async, CallRemoteHandler, Empty, Handler, HandlerArgs}; +use rpc_toolkit::{from_fn_async, CallRemoteHandler, Empty, HandlerArgs, HandlerFor}; use serde::{Deserialize, Serialize}; +use service_actor::ServiceActor; use start_stop::StartStop; use tokio::sync::Notify; use ts_rs::TS; -use crate::action::ActionResult; -use crate::config::action::ConfigRes; use crate::context::{CliContext, RpcContext}; -use crate::core::rpc_continuations::RequestGuid; use crate::db::model::package::{ InstalledState, PackageDataEntry, PackageState, PackageStateMatchModelRef, UpdatingState, }; use crate::disk::mount::guard::GenericMountGuard; use crate::install::PKG_ARCHIVE_DIR; +use crate::lxc::ContainerId; use crate::prelude::*; use crate::progress::{NamedProgress, Progress}; +use crate::rpc_continuations::Guid; use crate::s9pk::S9pk; use crate::service::service_map::InstallProgressHandles; -use crate::service::transition::TransitionKind; use crate::status::health_check::HealthCheckResult; -use crate::status::MainStatus; -use crate::util::actor::{Actor, BackgroundJobs, SimpleActor}; -use crate::util::serde::Pem; +use crate::util::actor::concurrent::ConcurrentActor; +use crate::util::io::create_file; +use crate::util::serde::{NoOutput, Pem}; +use crate::util::Never; use crate::volume::data_dir; +mod action; pub mod cli; mod config; mod control; +mod dependencies; +pub mod effects; pub mod persistent_container; +mod properties; mod rpc; -pub mod service_effect_handler; +mod service_actor; pub mod service_map; mod start_stop; mod transition; @@ -63,13 +68,137 @@ pub enum LoadDisposition { Undo, } +pub struct ServiceRef(Arc); +impl ServiceRef { + pub fn weak(&self) -> Weak { + Arc::downgrade(&self.0) + } + pub async fn uninstall( + self, + target_version: Option, + ) -> Result<(), Error> { + self.seed + .persistent_container + .execute::( + Guid::new(), + ProcedureName::Uninit, + to_value(&target_version)?, + None, + ) // TODO timeout + .await?; + let id = self.seed.persistent_container.s9pk.as_manifest().id.clone(); + let ctx = self.seed.ctx.clone(); + self.shutdown().await?; + + if target_version.is_none() { + if let Some(pde) = ctx + .db + .mutate(|d| { + if let Some(pde) = d + .as_public_mut() + .as_package_data_mut() + .remove(&id)? + .map(|d| d.de()) + .transpose()? + { + d.as_private_mut().as_available_ports_mut().mutate(|p| { + p.free( + pde.hosts + .0 + .values() + .flat_map(|h| h.bindings.values()) + .flat_map(|b| { + b.lan + .assigned_port + .into_iter() + .chain(b.lan.assigned_ssl_port) + }), + ); + Ok(()) + })?; + Ok(Some(pde)) + } else { + Ok(None) + } + }) + .await? + { + let state = pde.state_info.expect_removing()?; + for volume_id in &state.manifest.volumes { + let path = data_dir(&ctx.datadir, &state.manifest.id, volume_id); + if tokio::fs::metadata(&path).await.is_ok() { + tokio::fs::remove_dir_all(&path).await?; + } + } + let logs_dir = ctx.datadir.join("logs").join(&state.manifest.id); + if tokio::fs::metadata(&logs_dir).await.is_ok() { + tokio::fs::remove_dir_all(&logs_dir).await?; + } + let archive_path = ctx + .datadir + .join("archive") + .join("installed") + .join(&state.manifest.id); + if tokio::fs::metadata(&archive_path).await.is_ok() { + tokio::fs::remove_file(&archive_path).await?; + } + } + } + Ok(()) + } + pub async fn shutdown(self) -> Result<(), Error> { + if let Some((hdl, shutdown)) = self.seed.persistent_container.rpc_server.send_replace(None) + { + self.seed + .persistent_container + .rpc_client + .request(rpc::Exit, Empty {}) + .await?; + shutdown.shutdown(); + hdl.await.with_kind(ErrorKind::Cancelled)?; + } + let service = Arc::try_unwrap(self.0).map_err(|_| { + Error::new( + eyre!("ServiceActor held somewhere after actor shutdown"), + ErrorKind::Unknown, + ) + })?; + service + .actor + .shutdown(crate::util::actor::PendingMessageStrategy::FinishAll { timeout: None }) // TODO timeout + .await; + Arc::try_unwrap(service.seed) + .map_err(|_| { + Error::new( + eyre!("ServiceActorSeed held somewhere after actor shutdown"), + ErrorKind::Unknown, + ) + })? + .persistent_container + .exit() + .await?; + Ok(()) + } +} +impl Deref for ServiceRef { + type Target = Service; + fn deref(&self) -> &Self::Target { + &*self.0 + } +} +impl From for ServiceRef { + fn from(value: Service) -> Self { + Self(Arc::new(value)) + } +} + pub struct Service { - actor: SimpleActor, + actor: ConcurrentActor, seed: Arc, } impl Service { #[instrument(skip_all)] - async fn new(ctx: RpcContext, s9pk: S9pk, start: StartStop) -> Result { + async fn new(ctx: RpcContext, s9pk: S9pk, start: StartStop) -> Result { let id = s9pk.as_manifest().id.clone(); let persistent_container = PersistentContainer::new( &ctx, s9pk, @@ -84,13 +213,17 @@ impl Service { ctx, synchronized: Arc::new(Notify::new()), }); - seed.persistent_container - .init(Arc::downgrade(&seed)) - .await?; - Ok(Self { - actor: SimpleActor::new(ServiceActor(seed.clone())), + let service: ServiceRef = Self { + actor: ConcurrentActor::new(ServiceActor(seed.clone())), seed, - }) + } + .into(); + service + .seed + .persistent_container + .init(service.weak()) + .await?; + Ok(service) } #[instrument(skip_all)] @@ -98,15 +231,14 @@ impl Service { ctx: &RpcContext, id: &PackageId, disposition: LoadDisposition, - ) -> Result, Error> { + ) -> Result, Error> { let handle_installed = { let ctx = ctx.clone(); move |s9pk: S9pk, i: Model| async move { for volume_id in &s9pk.as_manifest().volumes { - let tmp_path = - data_dir(&ctx.datadir, &s9pk.as_manifest().id.clone(), volume_id); - if tokio::fs::metadata(&tmp_path).await.is_err() { - tokio::fs::create_dir_all(&tmp_path).await?; + let path = data_dir(&ctx.datadir, &s9pk.as_manifest().id, volume_id); + if tokio::fs::metadata(&path).await.is_err() { + tokio::fs::create_dir_all(&path).await?; } } let start_stop = if i.as_status().as_main().de()?.running() { @@ -136,12 +268,13 @@ impl Service { tracing::error!("Error opening s9pk for install: {e}"); tracing::debug!("{e:?}") }) { - if let Ok(service) = Self::install(ctx.clone(), s9pk, None, None) - .await - .map_err(|e| { - tracing::error!("Error installing service: {e}"); - tracing::debug!("{e:?}") - }) + if let Ok(service) = + Self::install(ctx.clone(), s9pk, None, None::, None) + .await + .map_err(|e| { + tracing::error!("Error installing service: {e}"); + tracing::debug!("{e:?}") + }) { return Ok(Some(service)); } @@ -173,6 +306,7 @@ impl Service { ctx.clone(), s9pk, Some(s.as_manifest().as_version().de()?), + None::, None, ) .await @@ -190,8 +324,8 @@ impl Service { |db| { db.as_public_mut() .as_package_data_mut() - .as_idx_mut(&id) - .or_not_found(&id)? + .as_idx_mut(id) + .or_not_found(id)? .as_state_info_mut() .map_mutate(|s| { if let PackageState::Updating(UpdatingState { @@ -220,16 +354,12 @@ impl Service { tracing::debug!("{e:?}") }) { - if service - .uninstall(None) - .await - .map_err(|e| { + match ServiceRef::from(service).uninstall(None).await { + Err(e) => { tracing::error!("Error uninstalling service: {e}"); tracing::debug!("{e:?}") - }) - .is_ok() - { - return Ok(None); + } + Ok(()) => return Ok(None), } } } @@ -254,22 +384,49 @@ impl Service { pub async fn install( ctx: RpcContext, s9pk: S9pk, - src_version: Option, + mut src_version: Option, + recovery_source: Option, progress: Option, - ) -> Result { + ) -> Result { let manifest = s9pk.as_manifest().clone(); let developer_key = s9pk.as_archive().signer(); let icon = s9pk.icon_data_url().await?; let service = Self::new(ctx.clone(), s9pk, StartStop::Stop).await?; + if let Some(recovery_source) = recovery_source { + service + .actor + .send( + Guid::new(), + transition::restore::Restore { + path: recovery_source.path().to_path_buf(), + }, + ) + .await??; + recovery_source.unmount().await?; + src_version = Some( + service + .seed + .persistent_container + .s9pk + .as_manifest() + .version + .clone(), + ); + } service .seed .persistent_container - .execute(ProcedureName::Init, to_value(&src_version)?, None) // TODO timeout + .execute::( + Guid::new(), + ProcedureName::Init, + to_value(&src_version)?, + None, + ) // TODO timeout .await .with_kind(ErrorKind::MigrationFailed)?; // TODO: handle cancellation if let Some(mut progress) = progress { progress.finalization_progress.complete(); - progress.progress_handle.complete(); + progress.progress.complete(); tokio::task::yield_now().await; } ctx.db @@ -279,6 +436,9 @@ impl Service { .as_package_data_mut() .as_idx_mut(&manifest.id) .or_not_found(&manifest.id)?; + if !manifest.has_config { + entry.as_status_mut().as_configured_mut().ser(&true)?; + } entry .as_state_info_mut() .ser(&PackageState::Installed(InstalledState { manifest }))?; @@ -286,107 +446,53 @@ impl Service { entry.as_icon_mut().ser(&icon)?; // TODO: marketplace url // TODO: dependency info + Ok(()) }) .await?; + Ok(service) } - pub async fn restore( - ctx: RpcContext, - s9pk: S9pk, - guard: impl GenericMountGuard, - progress: Option, - ) -> Result { - // TODO - Err(Error::new(eyre!("not yet implemented"), ErrorKind::Unknown)) - } - - pub async fn get_config(&self) -> Result { - let container = &self.seed.persistent_container; - container - .execute::( - ProcedureName::GetConfig, - Value::Null, - Some(Duration::from_secs(30)), // TODO timeout - ) - .await - .with_kind(ErrorKind::ConfigGen) - } - - // TODO DO the Action Get - - pub async fn action(&self, id: ActionId, input: Value) -> Result { - let container = &self.seed.persistent_container; - container - .execute::( - ProcedureName::RunAction(id), - input, - Some(Duration::from_secs(30)), - ) - .await - .with_kind(ErrorKind::Action) - } - pub async fn properties(&self) -> Result { - let container = &self.seed.persistent_container; - container - .execute::( - ProcedureName::Properties, - Value::Null, - Some(Duration::from_secs(30)), - ) - .await - .with_kind(ErrorKind::Unknown) - } - - pub async fn shutdown(self) -> Result<(), Error> { - self.actor - .shutdown(crate::util::actor::PendingMessageStrategy::FinishAll { timeout: None }) // TODO timeout - .await; - if let Some((hdl, shutdown)) = self.seed.persistent_container.rpc_server.send_replace(None) - { - self.seed - .persistent_container - .rpc_client - .request(rpc::Exit, Empty {}) - .await?; - shutdown.shutdown(); - hdl.await.with_kind(ErrorKind::Cancelled)?; - } - Arc::try_unwrap(self.seed) - .map_err(|_| { - Error::new( - eyre!("ServiceActorSeed held somewhere after actor shutdown"), - ErrorKind::Unknown, - ) - })? + #[instrument(skip_all)] + pub async fn backup(&self, guard: impl GenericMountGuard) -> Result<(), Error> { + let id = &self.seed.id; + let mut file = create_file(guard.path().join(id).with_extension("s9pk")).await?; + self.seed .persistent_container - .exit() + .s9pk + .clone() + .serialize(&mut file, true) + .await?; + drop(file); + self.actor + .send( + Guid::new(), + transition::backup::Backup { + path: guard.path().join("data"), + }, + ) + .await?? .await?; Ok(()) } - pub async fn uninstall(self, target_version: Option) -> Result<(), Error> { - self.seed + pub fn container_id(&self) -> Result { + let id = &self.seed.id; + let container_id = (*self + .seed .persistent_container - .execute(ProcedureName::Uninit, to_value(&target_version)?, None) // TODO timeout - .await?; - let id = self.seed.persistent_container.s9pk.as_manifest().id.clone(); - self.seed - .ctx - .db - .mutate(|d| d.as_public_mut().as_package_data_mut().remove(&id)) - .await?; - self.shutdown().await - } - pub async fn backup(&self, _guard: impl GenericMountGuard) -> Result { - // TODO - Err(Error::new(eyre!("not yet implemented"), ErrorKind::Unknown)) + .lxc_container + .get() + .or_not_found(format!("container for {id}"))? + .guid) + .clone(); + Ok(container_id) } } #[derive(Debug, Clone)] -struct RunningStatus { +pub struct RunningStatus { health: OrdMap, started: DateTime, } @@ -423,82 +529,6 @@ impl ServiceActorSeed { }); } } -struct ServiceActor(Arc); - -impl Actor for ServiceActor { - fn init(&mut self, jobs: &mut BackgroundJobs) { - let seed = self.0.clone(); - jobs.add_job(async move { - let id = seed.id.clone(); - let mut current = seed.persistent_container.state.subscribe(); - - loop { - let kinds = dbg!(current.borrow().kinds()); - - if let Err(e) = async { - let main_status = match ( - kinds.transition_state, - kinds.desired_state, - kinds.running_status, - ) { - (Some(TransitionKind::Restarting), _, _) => MainStatus::Restarting, - (Some(TransitionKind::BackingUp), _, Some(status)) => { - MainStatus::BackingUp { - started: Some(status.started), - health: status.health.clone(), - } - } - (Some(TransitionKind::BackingUp), _, None) => MainStatus::BackingUp { - started: None, - health: OrdMap::new(), - }, - (None, StartStop::Stop, None) => MainStatus::Stopped, - (None, StartStop::Stop, Some(_)) => MainStatus::Stopping { - timeout: seed.persistent_container.stop().await?.into(), - }, - (None, StartStop::Start, Some(status)) => MainStatus::Running { - started: status.started, - health: status.health.clone(), - }, - (None, StartStop::Start, None) => { - seed.persistent_container.start().await?; - MainStatus::Starting - } - }; - seed.ctx - .db - .mutate(|d| { - if let Some(i) = d.as_public_mut().as_package_data_mut().as_idx_mut(&id) - { - i.as_status_mut().as_main_mut().ser(&main_status)?; - } - Ok(()) - }) - .await?; - - Ok::<_, Error>(()) - } - .await - { - tracing::error!("error synchronizing state of service: {e}"); - tracing::debug!("{e:?}"); - - seed.synchronized.notify_waiters(); - - tracing::error!("Retrying in {}s...", SYNC_RETRY_COOLDOWN_SECONDS); - tokio::time::sleep(Duration::from_secs(SYNC_RETRY_COOLDOWN_SECONDS)).await; - continue; - } - - seed.synchronized.notify_waiters(); - - tokio::select! { - _ = current.changed() => (), - } - } - }) - } -} #[derive(Deserialize, Serialize, Parser, TS)] pub struct ConnectParams { @@ -508,7 +538,7 @@ pub struct ConnectParams { pub async fn connect_rpc( ctx: RpcContext, ConnectParams { id }: ConnectParams, -) -> Result { +) -> Result { let id_ref = &id; crate::lxc::connect( &ctx, @@ -527,11 +557,25 @@ pub async fn connect_rpc( } pub async fn connect_rpc_cli( - handle_args: HandlerArgs, + HandlerArgs { + context, + parent_method, + method, + params, + inherited_params, + raw_params, + }: HandlerArgs, ) -> Result<(), Error> { - let ctx = handle_args.context.clone(); - let guid = CallRemoteHandler::::new(from_fn_async(connect_rpc)) - .handle_async(handle_args) + let ctx = context.clone(); + let guid = CallRemoteHandler::::new(from_fn_async(connect_rpc)) + .handle_async(HandlerArgs { + context, + parent_method, + method, + params: rpc_toolkit::util::Flat(params, Empty {}), + inherited_params, + raw_params, + }) .await?; crate::lxc::connect_cli(&ctx, guid).await diff --git a/core/startos/src/service/persistent_container.rs b/core/startos/src/service/persistent_container.rs index 038661ace..c81322719 100644 --- a/core/startos/src/service/persistent_container.rs +++ b/core/startos/src/service/persistent_container.rs @@ -1,37 +1,39 @@ -use std::collections::BTreeMap; +use std::collections::{BTreeMap, BTreeSet}; use std::path::Path; use std::sync::{Arc, Weak}; use std::time::Duration; use futures::future::ready; -use futures::Future; +use futures::{Future, FutureExt}; use helpers::NonDetachingJoinHandle; -use imbl_value::InternedString; -use models::{ProcedureName, VolumeId}; +use imbl::Vector; +use models::{ImageId, ProcedureName, VolumeId}; use rpc_toolkit::{Empty, Server, ShutdownHandle}; use serde::de::DeserializeOwned; -use tokio::fs::File; use tokio::process::Command; use tokio::sync::{oneshot, watch, Mutex, OnceCell}; use tracing::instrument; -use super::service_effect_handler::{service_effect_handler, EffectContext}; -use super::transition::{TransitionKind, TransitionState}; -use super::ServiceActorSeed; use crate::context::RpcContext; use crate::disk::mount::filesystem::bind::Bind; use crate::disk::mount::filesystem::idmapped::IdMapped; use crate::disk::mount::filesystem::loop_dev::LoopDev; use crate::disk::mount::filesystem::overlayfs::OverlayGuard; use crate::disk::mount::filesystem::{MountType, ReadOnly}; -use crate::disk::mount::guard::MountGuard; +use crate::disk::mount::guard::{GenericMountGuard, MountGuard}; use crate::lxc::{LxcConfig, LxcContainer, HOST_RPC_SERVER_SOCKET}; use crate::net::net_controller::NetService; use crate::prelude::*; +use crate::rpc_continuations::Guid; use crate::s9pk::merkle_archive::source::FileSource; use crate::s9pk::S9pk; +use crate::service::effects::context::EffectContext; +use crate::service::effects::handler; +use crate::service::rpc::{CallbackHandle, CallbackId, CallbackParams}; use crate::service::start_stop::StartStop; -use crate::service::{rpc, RunningStatus}; +use crate::service::transition::{TransitionKind, TransitionState}; +use crate::service::{rpc, RunningStatus, Service}; +use crate::util::io::create_file; use crate::util::rpc_client::UnixRpcClient; use crate::util::Invoke; use crate::volume::{asset_dir, data_dir}; @@ -39,12 +41,14 @@ use crate::ARCH; const RPC_CONNECT_TIMEOUT: Duration = Duration::from_secs(10); -struct ProcedureId(u64); - #[derive(Debug)] pub struct ServiceState { + // indicates whether the service container runtime has been initialized yet + pub(super) rt_initialized: bool, // This contains the start time and health check information for when the service is running. Note: Will be overwritting to the db, pub(super) running_status: Option, + // This tracks references to callbacks registered by the running service: + pub(super) callbacks: BTreeSet>, /// Setting this value causes the service actor to try to bring the service to the specified state. This is done in the background job created in ServiceActor::init pub(super) desired_state: StartStop, /// Override the current desired state for the service during a transition (this is protected by a guard that sets this value to null on drop) @@ -63,7 +67,9 @@ pub struct ServiceStateKinds { impl ServiceState { pub fn new(desired_state: StartStop) -> Self { Self { + rt_initialized: false, running_status: Default::default(), + callbacks: Default::default(), temp_desired_state: Default::default(), transition_state: Default::default(), desired_state, @@ -91,15 +97,27 @@ pub struct PersistentContainer { js_mount: MountGuard, volumes: BTreeMap, assets: BTreeMap, - pub(super) overlays: Arc>>, + pub(super) images: BTreeMap>, + pub(super) overlays: Arc>>>>, pub(super) state: Arc>, pub(super) net_service: Mutex, + destroyed: bool, } impl PersistentContainer { #[instrument(skip_all)] pub async fn new(ctx: &RpcContext, s9pk: S9pk, start: StartStop) -> Result { - let lxc_container = ctx.lxc_manager.create(LxcConfig::default()).await?; + let lxc_container = ctx + .lxc_manager + .create( + Some( + &ctx.datadir + .join("package-data/logs") + .join(&s9pk.as_manifest().id), + ), + LxcConfig::default(), + ) + .await?; let rpc_client = lxc_container.connect_rpc(Some(RPC_CONNECT_TIMEOUT)).await?; let js_mount = MountGuard::mount( &LoopDev::from( @@ -114,6 +132,7 @@ impl PersistentContainer { ReadOnly, ) .await?; + let mut volumes = BTreeMap::new(); for volume in &s9pk.as_manifest().volumes { let mountpoint = lxc_container @@ -151,44 +170,92 @@ impl PersistentContainer { .arg(&mountpoint) .invoke(crate::ErrorKind::Filesystem) .await?; + let s9pk_asset_path = Path::new("assets").join(asset).with_extension("squashfs"); + let sqfs = s9pk + .as_archive() + .contents() + .get_path(&s9pk_asset_path) + .and_then(|e| e.as_file()) + .or_not_found(s9pk_asset_path.display())?; assets.insert( asset.clone(), MountGuard::mount( - &Bind::new( - asset_dir( - &ctx.datadir, - &s9pk.as_manifest().id, - &s9pk.as_manifest().version, - ) - .join(asset), - ), + &IdMapped::new(LoopDev::from(&**sqfs), 0, 100000, 65536), mountpoint, MountType::ReadWrite, ) .await?, ); } + + let mut images = BTreeMap::new(); let image_path = lxc_container.rootfs_dir().join("media/startos/images"); tokio::fs::create_dir_all(&image_path).await?; - for image in &s9pk.as_manifest().images { + for (image, config) in &s9pk.as_manifest().images { + let mut arch = ARCH; + let mut sqfs_path = Path::new("images") + .join(arch) + .join(image) + .with_extension("squashfs"); + if !s9pk + .as_archive() + .contents() + .get_path(&sqfs_path) + .and_then(|e| e.as_file()) + .is_some() + { + arch = if let Some(arch) = config.emulate_missing_as.as_deref() { + arch + } else { + continue; + }; + sqfs_path = Path::new("images") + .join(arch) + .join(image) + .with_extension("squashfs"); + } + let sqfs = s9pk + .as_archive() + .contents() + .get_path(&sqfs_path) + .and_then(|e| e.as_file()) + .or_not_found(sqfs_path.display())?; + let mountpoint = image_path.join(image); + tokio::fs::create_dir_all(&mountpoint).await?; + Command::new("chown") + .arg("100000:100000") + .arg(&mountpoint) + .invoke(ErrorKind::Filesystem) + .await?; + images.insert( + image.clone(), + Arc::new( + MountGuard::mount( + &IdMapped::new(LoopDev::from(&**sqfs), 0, 100000, 65536), + &mountpoint, + ReadOnly, + ) + .await?, + ), + ); let env_filename = Path::new(image.as_ref()).with_extension("env"); if let Some(env) = s9pk .as_archive() .contents() - .get_path(Path::new("images").join(&*ARCH).join(&env_filename)) + .get_path(Path::new("images").join(arch).join(&env_filename)) .and_then(|e| e.as_file()) { - env.copy(&mut File::create(image_path.join(&env_filename)).await?) + env.copy(&mut create_file(image_path.join(&env_filename)).await?) .await?; } let json_filename = Path::new(image.as_ref()).with_extension("json"); if let Some(json) = s9pk .as_archive() .contents() - .get_path(Path::new("images").join(&*ARCH).join(&json_filename)) + .get_path(Path::new("images").join(arch).join(&json_filename)) .and_then(|e| e.as_file()) { - json.copy(&mut File::create(image_path.join(&json_filename)).await?) + json.copy(&mut create_file(image_path.join(&json_filename)).await?) .await?; } } @@ -205,19 +272,52 @@ impl PersistentContainer { js_mount, volumes, assets, + images, overlays: Arc::new(Mutex::new(BTreeMap::new())), state: Arc::new(watch::channel(ServiceState::new(start)).0), net_service: Mutex::new(net_service), + destroyed: false, }) } #[instrument(skip_all)] - pub async fn init(&self, seed: Weak) -> Result<(), Error> { + pub async fn mount_backup( + &self, + backup_path: impl AsRef, + mount_type: MountType, + ) -> Result { + let backup_path = backup_path.as_ref(); + let mountpoint = self + .lxc_container + .get() + .ok_or_else(|| { + Error::new( + eyre!("PersistentContainer has been destroyed"), + ErrorKind::Incoherent, + ) + })? + .rootfs_dir() + .join("media/startos/backup"); + tokio::fs::create_dir_all(&mountpoint).await?; + Command::new("chown") + .arg("100000:100000") + .arg(mountpoint.as_os_str()) + .invoke(ErrorKind::Filesystem) + .await?; + tokio::fs::create_dir_all(backup_path).await?; + Command::new("chown") + .arg("100000:100000") + .arg(backup_path) + .invoke(ErrorKind::Filesystem) + .await?; + let bind = Bind::new(backup_path); + MountGuard::mount(&bind, &mountpoint, mount_type).await + } + + #[instrument(skip_all)] + pub async fn init(&self, seed: Weak) -> Result<(), Error> { let socket_server_context = EffectContext::new(seed); - let server = Server::new( - move || ready(Ok(socket_server_context.clone())), - service_effect_handler(), - ); + let server = Server::new(move || ready(Ok(socket_server_context.clone())), handler()); let path = self .lxc_container .get() @@ -231,7 +331,7 @@ impl PersistentContainer { .join(HOST_RPC_SERVER_SOCKET); let (send, recv) = oneshot::channel(); let handle = NonDetachingJoinHandle::from(tokio::spawn(async move { - let (shutdown, fut) = match async { + let chown_status = async { let res = server.run_unix(&path, |err| { tracing::error!("error on unix socket {}: {err}", path.display()) })?; @@ -241,9 +341,8 @@ impl PersistentContainer { .invoke(ErrorKind::Filesystem) .await?; Ok::<_, Error>(res) - } - .await - { + }; + let (shutdown, fut) = match chown_status.await { Ok((shutdown, fut)) => (Ok(shutdown), Some(fut)), Err(e) => (Err(e), None), }; @@ -273,19 +372,26 @@ impl PersistentContainer { self.rpc_client.request(rpc::Init, Empty {}).await?; + self.state.send_modify(|s| s.rt_initialized = true); + Ok(()) } #[instrument(skip_all)] - fn destroy(&mut self) -> impl Future> + 'static { + fn destroy(&mut self) -> Option> + 'static> { + if self.destroyed { + return None; + } let rpc_client = self.rpc_client.clone(); let rpc_server = self.rpc_server.send_replace(None); let js_mount = self.js_mount.take(); let volumes = std::mem::take(&mut self.volumes); let assets = std::mem::take(&mut self.assets); + let images = std::mem::take(&mut self.images); let overlays = self.overlays.clone(); let lxc_container = self.lxc_container.take(); - async move { + self.destroyed = true; + Some(async move { let mut errs = ErrorCollection::new(); if let Some((hdl, shutdown)) = rpc_server { errs.handle(rpc_client.request(rpc::Exit, Empty {}).await); @@ -301,43 +407,43 @@ impl PersistentContainer { for (_, overlay) in std::mem::take(&mut *overlays.lock().await) { errs.handle(overlay.unmount(true).await); } + for (_, images) in images { + errs.handle(images.unmount().await); + } errs.handle(js_mount.unmount(true).await); if let Some(lxc_container) = lxc_container { errs.handle(lxc_container.exit().await); } errs.into_result() - } + }) } #[instrument(skip_all)] pub async fn exit(mut self) -> Result<(), Error> { - self.destroy().await?; + if let Some(destroy) = self.destroy() { + dbg!(destroy.await)?; + } + tracing::info!("Service for {} exited", self.s9pk.as_manifest().id); Ok(()) } #[instrument(skip_all)] pub async fn start(&self) -> Result<(), Error> { - self.execute( - ProcedureName::StartMain, - Value::Null, - Some(Duration::from_secs(5)), // TODO - ) - .await?; + self.rpc_client.request(rpc::Start, Empty {}).await?; Ok(()) } #[instrument(skip_all)] - pub async fn stop(&self) -> Result { - let timeout: Option = self - .execute(ProcedureName::StopMain, Value::Null, None) - .await?; - Ok(timeout.map(|a| *a).unwrap_or(Duration::from_secs(30))) + pub async fn stop(&self) -> Result<(), Error> { + self.rpc_client.request(rpc::Stop, Empty {}).await?; + Ok(()) } #[instrument(skip_all)] pub async fn execute( &self, + id: Guid, name: ProcedureName, input: Value, timeout: Option, @@ -345,7 +451,7 @@ impl PersistentContainer { where O: DeserializeOwned, { - self._execute(name, input, timeout) + self._execute(id, name, input, timeout) .await .and_then(from_value) } @@ -353,6 +459,7 @@ impl PersistentContainer { #[instrument(skip_all)] pub async fn sanboxed( &self, + id: Guid, name: ProcedureName, input: Value, timeout: Option, @@ -360,21 +467,36 @@ impl PersistentContainer { where O: DeserializeOwned, { - self._sandboxed(name, input, timeout) + self._sandboxed(id, name, input, timeout) .await .and_then(from_value) } + #[instrument(skip_all)] + pub async fn callback(&self, handle: CallbackHandle, args: Vector) -> Result<(), Error> { + let mut params = None; + self.state.send_if_modified(|s| { + params = handle.params(&mut s.callbacks, args); + params.is_some() + }); + if let Some(params) = params { + self._callback(params).await?; + } + Ok(()) + } + #[instrument(skip_all)] async fn _execute( &self, + id: Guid, name: ProcedureName, input: Value, timeout: Option, ) -> Result { - let fut = self - .rpc_client - .request(rpc::Execute, rpc::ExecuteParams::new(name, input, timeout)); + let fut = self.rpc_client.request( + rpc::Execute, + rpc::ExecuteParams::new(id, name, input, timeout), + ); Ok(if let Some(timeout) = timeout { tokio::time::timeout(timeout, fut) @@ -388,13 +510,15 @@ impl PersistentContainer { #[instrument(skip_all)] async fn _sandboxed( &self, + id: Guid, name: ProcedureName, input: Value, timeout: Option, ) -> Result { - let fut = self - .rpc_client - .request(rpc::Sandbox, rpc::ExecuteParams::new(name, input, timeout)); + let fut = self.rpc_client.request( + rpc::Sandbox, + rpc::ExecuteParams::new(id, name, input, timeout), + ); Ok(if let Some(timeout) = timeout { tokio::time::timeout(timeout, fut) @@ -404,11 +528,18 @@ impl PersistentContainer { fut.await? }) } + + #[instrument(skip_all)] + async fn _callback(&self, params: CallbackParams) -> Result<(), Error> { + self.rpc_client.notify(rpc::Callback, params).await?; + Ok(()) + } } impl Drop for PersistentContainer { fn drop(&mut self) { - let destroy = self.destroy(); - tokio::spawn(async move { destroy.await.unwrap() }); + if let Some(destroy) = self.destroy() { + tokio::spawn(async move { destroy.await.unwrap() }); + } } } diff --git a/core/startos/src/service/properties.rs b/core/startos/src/service/properties.rs new file mode 100644 index 000000000..3f5201f1d --- /dev/null +++ b/core/startos/src/service/properties.rs @@ -0,0 +1,23 @@ +use std::time::Duration; + +use models::ProcedureName; + +use crate::prelude::*; +use crate::rpc_continuations::Guid; +use crate::service::Service; + +impl Service { + // TODO: leave here or switch to Actor Message? + pub async fn properties(&self) -> Result { + let container = &self.seed.persistent_container; + container + .execute::( + Guid::new(), + ProcedureName::Properties, + Value::Null, + Some(Duration::from_secs(30)), + ) + .await + .with_kind(ErrorKind::Unknown) + } +} diff --git a/core/startos/src/service/rpc.rs b/core/startos/src/service/rpc.rs index 65c8b98fe..25d8fb067 100644 --- a/core/startos/src/service/rpc.rs +++ b/core/startos/src/service/rpc.rs @@ -1,5 +1,8 @@ +use std::collections::BTreeSet; +use std::sync::{Arc, Weak}; use std::time::Duration; +use imbl::Vector; use imbl_value::Value; use models::ProcedureName; use rpc_toolkit::yajrc::RpcMethod; @@ -7,6 +10,9 @@ use rpc_toolkit::Empty; use ts_rs::TS; use crate::prelude::*; +use crate::rpc_continuations::Guid; +use crate::service::persistent_container::PersistentContainer; +use crate::util::Never; #[derive(Clone)] pub struct Init; @@ -26,6 +32,42 @@ impl serde::Serialize for Init { } } +#[derive(Clone)] +pub struct Start; +impl RpcMethod for Start { + type Params = Empty; + type Response = (); + fn as_str<'a>(&'a self) -> &'a str { + "start" + } +} +impl serde::Serialize for Start { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_str(self.as_str()) + } +} + +#[derive(Clone)] +pub struct Stop; +impl RpcMethod for Stop { + type Params = Empty; + type Response = (); + fn as_str<'a>(&'a self) -> &'a str { + "stop" + } +} +impl serde::Serialize for Stop { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_str(self.as_str()) + } +} + #[derive(Clone)] pub struct Exit; impl RpcMethod for Exit { @@ -46,14 +88,21 @@ impl serde::Serialize for Exit { #[derive(Clone, serde::Deserialize, serde::Serialize, TS)] pub struct ExecuteParams { + id: Guid, procedure: String, #[ts(type = "any")] input: Value, timeout: Option, } impl ExecuteParams { - pub fn new(procedure: ProcedureName, input: Value, timeout: Option) -> Self { + pub fn new( + id: Guid, + procedure: ProcedureName, + input: Value, + timeout: Option, + ) -> Self { Self { + id, procedure: procedure.js_function_name(), input, timeout: timeout.map(|d| d.as_millis()), @@ -96,3 +145,74 @@ impl serde::Serialize for Sandbox { serializer.serialize_str(self.as_str()) } } + +#[derive( + Clone, Copy, Debug, serde::Deserialize, serde::Serialize, TS, PartialEq, Eq, PartialOrd, Ord, +)] +#[ts(type = "number")] +pub struct CallbackId(u64); +impl CallbackId { + pub fn register(self, container: &PersistentContainer) -> CallbackHandle { + let this = Arc::new(self); + let res = Arc::downgrade(&this); + container + .state + .send_if_modified(|s| s.callbacks.insert(this)); + CallbackHandle(res) + } +} + +pub struct CallbackHandle(Weak); +impl CallbackHandle { + pub fn is_active(&self) -> bool { + self.0.strong_count() > 0 + } + pub fn params( + self, + registered: &mut BTreeSet>, + args: Vector, + ) -> Option { + if let Some(id) = self.0.upgrade() { + if let Some(strong) = registered.get(&id) { + if Arc::ptr_eq(strong, &id) { + registered.remove(&id); + return Some(CallbackParams::new(&*id, args)); + } + } + } + None + } + pub fn take(&mut self) -> Self { + Self(std::mem::take(&mut self.0)) + } +} + +#[derive(Clone, serde::Deserialize, serde::Serialize, TS)] +pub struct CallbackParams { + id: u64, + #[ts(type = "any[]")] + args: Vector, +} +impl CallbackParams { + fn new(id: &CallbackId, args: Vector) -> Self { + Self { id: id.0, args } + } +} + +#[derive(Clone)] +pub struct Callback; +impl RpcMethod for Callback { + type Params = CallbackParams; + type Response = Never; + fn as_str<'a>(&'a self) -> &'a str { + "callback" + } +} +impl serde::Serialize for Callback { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_str(self.as_str()) + } +} diff --git a/core/startos/src/service/service_actor.rs b/core/startos/src/service/service_actor.rs new file mode 100644 index 000000000..e6578264c --- /dev/null +++ b/core/startos/src/service/service_actor.rs @@ -0,0 +1,156 @@ +use std::sync::Arc; +use std::time::Duration; + +use imbl::OrdMap; + +use super::start_stop::StartStop; +use super::ServiceActorSeed; +use crate::prelude::*; +use crate::service::transition::TransitionKind; +use crate::service::SYNC_RETRY_COOLDOWN_SECONDS; +use crate::status::MainStatus; +use crate::util::actor::background::BackgroundJobQueue; +use crate::util::actor::Actor; + +#[derive(Clone)] +pub(super) struct ServiceActor(pub(super) Arc); + +enum ServiceActorLoopNext { + Wait, + DontWait, +} + +impl Actor for ServiceActor { + fn init(&mut self, jobs: &BackgroundJobQueue) { + let seed = self.0.clone(); + let mut current = seed.persistent_container.state.subscribe(); + jobs.add_job(async move { + let _ = current.wait_for(|s| s.rt_initialized).await; + + loop { + match service_actor_loop(¤t, &seed).await { + ServiceActorLoopNext::Wait => tokio::select! { + _ = current.changed() => (), + }, + ServiceActorLoopNext::DontWait => (), + } + } + }) + } +} + +async fn service_actor_loop( + current: &tokio::sync::watch::Receiver, + seed: &Arc, +) -> ServiceActorLoopNext { + let id = &seed.id; + let kinds = current.borrow().kinds(); + if let Err(e) = async { + let main_status = match ( + kinds.transition_state, + kinds.desired_state, + kinds.running_status, + ) { + (Some(TransitionKind::Restarting), StartStop::Stop, Some(_)) => { + seed.persistent_container.stop().await?; + MainStatus::Restarting + } + (Some(TransitionKind::Restarting), StartStop::Start, _) => { + seed.persistent_container.start().await?; + MainStatus::Restarting + } + (Some(TransitionKind::Restarting), _, _) => MainStatus::Restarting, + (Some(TransitionKind::Restoring), _, _) => MainStatus::Restoring, + (Some(TransitionKind::BackingUp), StartStop::Stop, Some(status)) => { + seed.persistent_container.stop().await?; + MainStatus::BackingUp { + started: Some(status.started), + health: status.health.clone(), + } + } + (Some(TransitionKind::BackingUp), StartStop::Start, _) => { + seed.persistent_container.start().await?; + MainStatus::BackingUp { + started: None, + health: OrdMap::new(), + } + } + (Some(TransitionKind::BackingUp), _, _) => MainStatus::BackingUp { + started: None, + health: OrdMap::new(), + }, + (None, StartStop::Stop, None) => MainStatus::Stopped, + (None, StartStop::Stop, Some(_)) => { + let task_seed = seed.clone(); + seed.ctx + .db + .mutate(|d| { + if let Some(i) = d.as_public_mut().as_package_data_mut().as_idx_mut(&id) { + i.as_status_mut().as_main_mut().ser(&MainStatus::Stopping)?; + } + Ok(()) + }) + .await?; + task_seed.persistent_container.stop().await?; + MainStatus::Stopped + } + (None, StartStop::Start, Some(status)) => MainStatus::Running { + started: status.started, + health: status.health.clone(), + }, + (None, StartStop::Start, None) => { + seed.persistent_container.start().await?; + MainStatus::Starting + } + }; + seed.ctx + .db + .mutate(|d| { + if let Some(i) = d.as_public_mut().as_package_data_mut().as_idx_mut(&id) { + let previous = i.as_status().as_main().de()?; + let previous_health = previous.health(); + let previous_started = previous.started(); + let mut main_status = main_status; + match &mut main_status { + &mut MainStatus::Running { ref mut health, .. } + | &mut MainStatus::BackingUp { ref mut health, .. } => { + *health = previous_health.unwrap_or(health).clone(); + } + _ => (), + }; + match &mut main_status { + MainStatus::Running { + ref mut started, .. + } => { + *started = previous_started.unwrap_or(*started); + } + MainStatus::BackingUp { + ref mut started, .. + } => { + *started = previous_started.map(Some).unwrap_or(*started); + } + _ => (), + }; + i.as_status_mut().as_main_mut().ser(&main_status)?; + } + Ok(()) + }) + .await?; + + Ok::<_, Error>(()) + } + .await + { + tracing::error!("error synchronizing state of service: {e}"); + tracing::debug!("{e:?}"); + + seed.synchronized.notify_waiters(); + + tracing::error!("Retrying in {}s...", SYNC_RETRY_COOLDOWN_SECONDS); + tokio::time::sleep(Duration::from_secs(SYNC_RETRY_COOLDOWN_SECONDS)).await; + return ServiceActorLoopNext::DontWait; + } + seed.synchronized.notify_waiters(); + + ServiceActorLoopNext::Wait +} diff --git a/core/startos/src/service/service_effect_handler.rs b/core/startos/src/service/service_effect_handler.rs deleted file mode 100644 index 4041cbb98..000000000 --- a/core/startos/src/service/service_effect_handler.rs +++ /dev/null @@ -1,1170 +0,0 @@ -use std::collections::BTreeSet; -use std::ffi::OsString; -use std::net::Ipv4Addr; -use std::os::unix::process::CommandExt; -use std::path::{Path, PathBuf}; -use std::str::FromStr; -use std::sync::{Arc, Weak}; - -use clap::builder::ValueParserFactory; -use clap::Parser; -use emver::VersionRange; -use imbl::OrdMap; -use imbl_value::{json, InternedString}; -use models::{ActionId, HealthCheckId, HostId, ImageId, PackageId, VolumeId}; -use patch_db::json_ptr::JsonPointer; -use rpc_toolkit::{from_fn, from_fn_async, AnyContext, Context, Empty, HandlerExt, ParentHandler}; -use serde::{Deserialize, Serialize}; -use tokio::process::Command; -use ts_rs::TS; -use url::Url; - -use crate::db::model::package::{ - ActionMetadata, CurrentDependencies, CurrentDependencyInfo, CurrentDependencyKind, -}; -use crate::disk::mount::filesystem::idmapped::IdMapped; -use crate::disk::mount::filesystem::loop_dev::LoopDev; -use crate::disk::mount::filesystem::overlayfs::OverlayGuard; -use crate::net::host::binding::BindOptions; -use crate::net::host::HostKind; -use crate::prelude::*; -use crate::s9pk::rpc::SKIP_ENV; -use crate::service::cli::ContainerCliContext; -use crate::service::ServiceActorSeed; -use crate::status::health_check::HealthCheckResult; -use crate::status::MainStatus; -use crate::util::clap::FromStrParser; -use crate::util::{new_guid, Invoke}; -use crate::{echo, ARCH}; - -#[derive(Clone)] -pub(super) struct EffectContext(Weak); -impl EffectContext { - pub fn new(seed: Weak) -> Self { - Self(seed) - } -} -impl Context for EffectContext {} -impl EffectContext { - fn deref(&self) -> Result, Error> { - if let Some(seed) = Weak::upgrade(&self.0) { - Ok(seed) - } else { - Err(Error::new( - eyre!("Service has already been destroyed"), - ErrorKind::InvalidRequest, - )) - } - } -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -struct RpcData { - id: i64, - method: String, - params: Value, -} -pub fn service_effect_handler() -> ParentHandler { - ParentHandler::new() - .subcommand("gitInfo", from_fn(crate::version::git_info)) - .subcommand( - "echo", - from_fn(echo).with_remote_cli::(), - ) - .subcommand("chroot", from_fn(chroot).no_display()) - .subcommand("exists", from_fn_async(exists).no_cli()) - .subcommand("executeAction", from_fn_async(execute_action).no_cli()) - .subcommand("getConfigured", from_fn_async(get_configured).no_cli()) - .subcommand( - "stopped", - from_fn_async(stopped) - .no_display() - .with_remote_cli::(), - ) - .subcommand( - "running", - from_fn_async(running) - .no_display() - .with_remote_cli::(), - ) - .subcommand( - "restart", - from_fn_async(restart) - .no_display() - .with_remote_cli::(), - ) - .subcommand( - "shutdown", - from_fn_async(shutdown) - .no_display() - .with_remote_cli::(), - ) - .subcommand( - "setConfigured", - from_fn_async(set_configured) - .no_display() - .with_remote_cli::(), - ) - .subcommand( - "setMainStatus", - from_fn_async(set_main_status).with_remote_cli::(), - ) - .subcommand("setHealth", from_fn_async(set_health).no_cli()) - .subcommand("getStore", from_fn_async(get_store).no_cli()) - .subcommand("setStore", from_fn_async(set_store).no_cli()) - .subcommand( - "exposeForDependents", - from_fn_async(expose_for_dependents).no_cli(), - ) - .subcommand( - "createOverlayedImage", - from_fn_async(create_overlayed_image) - .with_custom_display_fn::(|_, (path, _)| { - Ok(println!("{}", path.display())) - }) - .with_remote_cli::(), - ) - .subcommand( - "destroyOverlayedImage", - from_fn_async(destroy_overlayed_image).no_cli(), - ) - .subcommand( - "getSslCertificate", - from_fn_async(get_ssl_certificate).no_cli(), - ) - .subcommand("getSslKey", from_fn_async(get_ssl_key).no_cli()) - .subcommand( - "getServiceInterface", - from_fn_async(get_service_interface).no_cli(), - ) - .subcommand("clearBindings", from_fn_async(clear_bindings).no_cli()) - .subcommand("bind", from_fn_async(bind).no_cli()) - .subcommand("getHostInfo", from_fn_async(get_host_info).no_cli()) - .subcommand( - "setDependencies", - from_fn_async(set_dependencies) - .no_display() - .with_remote_cli::(), - ) - .subcommand("getSystemSmtp", from_fn_async(get_system_smtp).no_cli()) - .subcommand("getContainerIp", from_fn_async(get_container_ip).no_cli()) - .subcommand( - "getServicePortForward", - from_fn_async(get_service_port_forward).no_cli(), - ) - .subcommand( - "clearServiceInterfaces", - from_fn_async(clear_network_interfaces).no_cli(), - ) - .subcommand( - "exportServiceInterface", - from_fn_async(export_service_interface).no_cli(), - ) - .subcommand("getPrimaryUrl", from_fn_async(get_primary_url).no_cli()) - .subcommand( - "listServiceInterfaces", - from_fn_async(list_service_interfaces).no_cli(), - ) - .subcommand("removeAddress", from_fn_async(remove_address).no_cli()) - .subcommand("exportAction", from_fn_async(export_action).no_cli()) - .subcommand("removeAction", from_fn_async(remove_action).no_cli()) - .subcommand("reverseProxy", from_fn_async(reverse_proxy).no_cli()) - .subcommand("mount", from_fn_async(mount).no_cli()) - // TODO Callbacks -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -struct GetSystemSmtpParams { - callback: Callback, -} -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -struct GetServicePortForwardParams { - #[ts(type = "string | null")] - package_id: Option, - internal_port: u32, -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -struct AddressInfo { - username: Option, - host_id: String, - bind_options: BindOptions, - suffix: String, -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -enum ServiceInterfaceType { - Ui, - P2p, - Api, -} -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -struct ExportServiceInterfaceParams { - id: String, - name: String, - description: String, - has_primary: bool, - disabled: bool, - masked: bool, - address_info: AddressInfo, - r#type: ServiceInterfaceType, -} -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -struct GetPrimaryUrlParams { - #[ts(type = "string | null")] - package_id: Option, - service_interface_id: String, - callback: Callback, -} -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -struct ListServiceInterfacesParams { - #[ts(type = "string | null")] - package_id: Option, - callback: Callback, -} -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -struct RemoveAddressParams { - id: String, -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -struct ExportActionParams { - #[ts(type = "string")] - id: ActionId, - metadata: ActionMetadata, -} -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -struct RemoveActionParams { - #[ts(type = "string")] - id: ActionId, -} -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -struct ReverseProxyBind { - ip: Option, - port: u32, - ssl: bool, -} -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -struct ReverseProxyDestination { - ip: Option, - port: u32, - ssl: bool, -} -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -struct ReverseProxyHttp { - #[ts(type = "null | {[key: string]: string}")] - headers: Option>, -} -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -struct ReverseProxyParams { - bind: ReverseProxyBind, - dst: ReverseProxyDestination, - http: ReverseProxyHttp, -} -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -struct MountTarget { - #[ts(type = "string")] - package_id: PackageId, - #[ts(type = "string")] - volume_id: VolumeId, - subpath: Option, - readonly: bool, -} -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -struct MountParams { - location: String, - target: MountTarget, -} -async fn get_system_smtp( - context: EffectContext, - data: GetSystemSmtpParams, -) -> Result { - todo!() -} -async fn get_container_ip(context: EffectContext, _: Empty) -> Result { - match context.0.upgrade() { - Some(c) => { - let net_service = c.persistent_container.net_service.lock().await; - Ok(net_service.get_ip()) - } - None => Err(Error::new( - eyre!("Upgrade on Weak resulted in a None variant"), - crate::ErrorKind::NotFound, - )), - } -} -async fn get_service_port_forward( - context: EffectContext, - data: GetServicePortForwardParams, -) -> Result { - todo!() -} -async fn clear_network_interfaces(context: EffectContext, _: Empty) -> Result { - todo!() -} -async fn export_service_interface( - context: EffectContext, - data: ExportServiceInterfaceParams, -) -> Result { - todo!() -} -async fn get_primary_url( - context: EffectContext, - data: GetPrimaryUrlParams, -) -> Result { - todo!() -} -async fn list_service_interfaces( - context: EffectContext, - data: ListServiceInterfacesParams, -) -> Result { - todo!() -} -async fn remove_address(context: EffectContext, data: RemoveAddressParams) -> Result { - todo!() -} -async fn export_action(context: EffectContext, data: ExportActionParams) -> Result<(), Error> { - let context = context.deref()?; - let package_id = context.id.clone(); - context - .ctx - .db - .mutate(|db| { - let model = db - .as_public_mut() - .as_package_data_mut() - .as_idx_mut(&package_id) - .or_not_found(&package_id)? - .as_actions_mut(); - let mut value = model.de()?; - value - .insert(data.id, data.metadata) - .map(|_| ()) - .unwrap_or_default(); - model.ser(&value) - }) - .await?; - Ok(()) -} -async fn remove_action(context: EffectContext, data: RemoveActionParams) -> Result<(), Error> { - let context = context.deref()?; - let package_id = context.id.clone(); - context - .ctx - .db - .mutate(|db| { - let model = db - .as_public_mut() - .as_package_data_mut() - .as_idx_mut(&package_id) - .or_not_found(&package_id)? - .as_actions_mut(); - let mut value = model.de()?; - value.remove(&data.id).map(|_| ()).unwrap_or_default(); - model.ser(&value) - }) - .await?; - Ok(()) -} -async fn reverse_proxy(context: EffectContext, data: ReverseProxyParams) -> Result { - todo!() -} -async fn mount(context: EffectContext, data: MountParams) -> Result { - todo!() -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[ts(export)] -struct Callback(#[ts(type = "() => void")] i64); - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[serde(rename_all = "camelCase")] -#[ts(export)] -enum GetHostInfoParamsKind { - Multi, -} -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[serde(rename_all = "camelCase")] -#[ts(export)] -struct GetHostInfoParams { - kind: Option, - service_interface_id: String, - #[ts(type = "string | null")] - package_id: Option, - callback: Callback, -} -async fn get_host_info( - _: AnyContext, - GetHostInfoParams { .. }: GetHostInfoParams, -) -> Result { - todo!() -} - -async fn clear_bindings(context: EffectContext, _: Empty) -> Result { - todo!() -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[serde(rename_all = "camelCase")] -#[ts(export)] -struct BindParams { - kind: HostKind, - id: HostId, - internal_port: u16, - #[serde(flatten)] - options: BindOptions, -} -async fn bind(_: AnyContext, BindParams { .. }: BindParams) -> Result { - todo!() -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[serde(rename_all = "camelCase")] -#[ts(export)] -struct GetServiceInterfaceParams { - #[ts(type = "string | null")] - package_id: Option, - service_interface_id: String, - callback: Callback, -} -async fn get_service_interface( - _: AnyContext, - GetServiceInterfaceParams { - callback, - package_id, - service_interface_id, - }: GetServiceInterfaceParams, -) -> Result { - // TODO @Dr_Bonez - Ok(json!({ - "id": service_interface_id, - "name": service_interface_id, - "description": "This is a fake", - "hasPrimary": true, - "disabled": false, - "masked": false, - "addressInfo": json!({ - "username": Value::Null, - "hostId": "HostId?", - "options": json!({ - "scheme": Value::Null, - "preferredExternalPort": 80, - "addSsl":Value::Null, - "secure": false, - "ssl": false - }), - "suffix": "http" - }), - "type": "api" - })) -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, Parser, TS)] -#[serde(rename_all = "camelCase")] -#[ts(export)] -struct ChrootParams { - #[arg(short = 'e', long = "env")] - env: Option, - #[arg(short = 'w', long = "workdir")] - workdir: Option, - #[arg(short = 'u', long = "user")] - user: Option, - path: PathBuf, - #[ts(type = "string")] - command: OsString, - #[ts(type = "string[]")] - args: Vec, -} -fn chroot( - _: AnyContext, - ChrootParams { - env, - workdir, - user, - path, - command, - args, - }: ChrootParams, -) -> Result<(), Error> { - let mut cmd = std::process::Command::new(command); - if let Some(env) = env { - for (k, v) in std::fs::read_to_string(env)? - .lines() - .map(|l| l.trim()) - .filter_map(|l| l.split_once("=")) - .filter(|(k, _)| !SKIP_ENV.contains(&k)) - { - cmd.env(k, v); - } - } - std::os::unix::fs::chroot(path)?; - if let Some(uid) = user.as_deref().and_then(|u| u.parse::().ok()) { - cmd.uid(uid); - } else if let Some(user) = user { - let (uid, gid) = std::fs::read_to_string("/etc/passwd")? - .lines() - .find_map(|l| { - let mut split = l.trim().split(":"); - if user != split.next()? { - return None; - } - split.next(); // throw away x - Some((split.next()?.parse().ok()?, split.next()?.parse().ok()?)) - // uid gid - }) - .or_not_found(lazy_format!("{user} in /etc/passwd"))?; - cmd.uid(uid); - cmd.gid(gid); - }; - if let Some(workdir) = workdir { - cmd.current_dir(workdir); - } - cmd.args(args); - Err(cmd.exec().into()) -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[serde(rename_all = "camelCase")] -#[ts(export)] -enum Algorithm { - Ecdsa, - Ed25519, -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[serde(rename_all = "camelCase")] -#[ts(export)] -struct GetSslCertificateParams { - package_id: Option, - host_id: String, - algorithm: Option, //"ecdsa" | "ed25519" -} - -async fn get_ssl_certificate( - context: EffectContext, - GetSslCertificateParams { - package_id, - algorithm, - host_id, - }: GetSslCertificateParams, -) -> Result { - let fake = include_str!("./fake.cert.pem"); - Ok(json!([fake, fake, fake])) -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[serde(rename_all = "camelCase")] -#[ts(export)] -struct GetSslKeyParams { - package_id: Option, - host_id: String, - algorithm: Option, -} - -async fn get_ssl_key( - context: EffectContext, - GetSslKeyParams { - package_id, - host_id, - algorithm, - }: GetSslKeyParams, -) -> Result { - let fake = include_str!("./fake.cert.key"); - Ok(json!(fake)) -} -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[serde(rename_all = "camelCase")] -#[ts(export)] -struct GetStoreParams { - #[ts(type = "string | null")] - package_id: Option, - #[ts(type = "string")] - path: JsonPointer, -} - -async fn get_store( - context: EffectContext, - GetStoreParams { package_id, path }: GetStoreParams, -) -> Result { - let context = context.deref()?; - let peeked = context.ctx.db.peek().await; - let package_id = package_id.unwrap_or(context.id.clone()); - let value = peeked - .as_private() - .as_package_stores() - .as_idx(&package_id) - .or_not_found(&package_id)? - .de()?; - - Ok(path - .get(&value) - .ok_or_else(|| Error::new(eyre!("Did not find value at path"), ErrorKind::NotFound))? - .clone()) -} -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[serde(rename_all = "camelCase")] -#[ts(export)] -struct SetStoreParams { - #[ts(type = "any")] - value: Value, - #[ts(type = "string")] - path: JsonPointer, -} - -async fn set_store( - context: EffectContext, - SetStoreParams { value, path }: SetStoreParams, -) -> Result<(), Error> { - let context = context.deref()?; - let package_id = context.id.clone(); - context - .ctx - .db - .mutate(|db| { - let model = db - .as_private_mut() - .as_package_stores_mut() - .upsert(&package_id, || Box::new(json!({})))?; - let mut model_value = model.de()?; - if model_value.is_null() { - model_value = json!({}); - } - path.set(&mut model_value, value, true) - .with_kind(ErrorKind::ParseDbField)?; - model.ser(&model_value) - }) - .await?; - Ok(()) -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[serde(rename_all = "camelCase")] -#[ts(export)] -struct ExposeForDependentsParams { - #[ts(type = "string[]")] - paths: Vec, -} - -async fn expose_for_dependents( - context: EffectContext, - ExposeForDependentsParams { paths }: ExposeForDependentsParams, -) -> Result<(), Error> { - Ok(()) -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, Parser, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -struct ParamsPackageId { - #[ts(type = "string")] - package_id: PackageId, -} -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, Parser, TS)] -#[serde(rename_all = "camelCase")] -#[command(rename_all = "camelCase")] -#[ts(export)] -struct ParamsMaybePackageId { - #[ts(type = "string | null")] - package_id: Option, -} - -async fn exists(context: EffectContext, params: ParamsPackageId) -> Result { - let context = context.deref()?; - let peeked = context.ctx.db.peek().await; - let package = peeked - .as_public() - .as_package_data() - .as_idx(¶ms.package_id) - .is_some(); - Ok(json!(package)) -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[serde(rename_all = "camelCase")] -#[ts(export)] -struct ExecuteAction { - #[ts(type = "string | null")] - service_id: Option, - #[ts(type = "string")] - action_id: ActionId, - #[ts(type = "any")] - input: Value, -} -async fn execute_action( - context: EffectContext, - ExecuteAction { - action_id, - input, - service_id, - }: ExecuteAction, -) -> Result { - let context = context.deref()?; - let package_id = service_id.clone().unwrap_or_else(|| context.id.clone()); - let service = context.ctx.services.get(&package_id).await; - let service = service.as_ref().ok_or_else(|| { - Error::new( - eyre!("Could not find package {package_id}"), - ErrorKind::Unknown, - ) - })?; - - Ok(json!(service.action(action_id, input).await?)) -} -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)] -#[serde(rename_all = "camelCase")] -struct FromService {} -async fn get_configured(context: EffectContext, _: Empty) -> Result { - let context = context.deref()?; - let peeked = context.ctx.db.peek().await; - let package_id = &context.id; - let package = peeked - .as_public() - .as_package_data() - .as_idx(package_id) - .or_not_found(package_id)? - .as_status() - .as_configured() - .de()?; - Ok(json!(package)) -} - -async fn stopped(context: EffectContext, params: ParamsMaybePackageId) -> Result { - let context = context.deref()?; - let peeked = context.ctx.db.peek().await; - let package_id = params.package_id.unwrap_or_else(|| context.id.clone()); - let package = peeked - .as_public() - .as_package_data() - .as_idx(&package_id) - .or_not_found(&package_id)? - .as_status() - .as_main() - .de()?; - Ok(json!(matches!(package, MainStatus::Stopped))) -} -async fn running(context: EffectContext, params: ParamsPackageId) -> Result { - dbg!("Starting the running {params:?}"); - let context = context.deref()?; - let peeked = context.ctx.db.peek().await; - let package_id = params.package_id; - let package = peeked - .as_public() - .as_package_data() - .as_idx(&package_id) - .or_not_found(&package_id)? - .as_status() - .as_main() - .de()?; - Ok(json!(matches!(package, MainStatus::Running { .. }))) -} - -async fn restart(context: EffectContext, _: Empty) -> Result { - let context = context.deref()?; - let service = context.ctx.services.get(&context.id).await; - let service = service.as_ref().ok_or_else(|| { - Error::new( - eyre!("Could not find package {}", context.id), - ErrorKind::Unknown, - ) - })?; - service.restart().await?; - Ok(json!(())) -} - -async fn shutdown(context: EffectContext, _: Empty) -> Result { - let context = context.deref()?; - let service = context.ctx.services.get(&context.id).await; - let service = service.as_ref().ok_or_else(|| { - Error::new( - eyre!("Could not find package {}", context.id), - ErrorKind::Unknown, - ) - })?; - service.stop().await?; - Ok(json!(())) -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, Parser, TS)] -#[serde(rename_all = "camelCase")] -#[command(rename_all = "camelCase")] -#[ts(export)] -struct SetConfigured { - configured: bool, -} -async fn set_configured(context: EffectContext, params: SetConfigured) -> Result { - let context = context.deref()?; - let package_id = &context.id; - context - .ctx - .db - .mutate(|db| { - db.as_public_mut() - .as_package_data_mut() - .as_idx_mut(package_id) - .or_not_found(package_id)? - .as_status_mut() - .as_configured_mut() - .ser(¶ms.configured) - }) - .await?; - Ok(json!(())) -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[serde(rename_all = "camelCase")] -#[ts(export)] -enum Status { - Running, - Stopped, -} -impl FromStr for Status { - type Err = color_eyre::eyre::Report; - fn from_str(s: &str) -> Result { - match s { - "running" => Ok(Self::Running), - "stopped" => Ok(Self::Stopped), - _ => Err(eyre!("unknown status {s}")), - } - } -} -impl ValueParserFactory for Status { - type Parser = FromStrParser; - fn value_parser() -> Self::Parser { - FromStrParser::new() - } -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, Parser, TS)] -#[serde(rename_all = "camelCase")] -#[command(rename_all = "camelCase")] -#[ts(export)] -struct SetMainStatus { - status: Status, -} -async fn set_main_status(context: EffectContext, params: SetMainStatus) -> Result { - dbg!(format!("Status for main will be is {params:?}")); - let context = context.deref()?; - match params.status { - Status::Running => context.started(), - Status::Stopped => context.stopped(), - } - Ok(Value::Null) -} - -#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] -#[serde(rename_all = "camelCase")] -#[ts(export)] -struct SetHealth { - id: HealthCheckId, - #[serde(flatten)] - result: HealthCheckResult, -} - -async fn set_health( - context: EffectContext, - SetHealth { id, result }: SetHealth, -) -> Result { - let context = context.deref()?; - - let package_id = &context.id; - context - .ctx - .db - .mutate(move |db| { - db.as_public_mut() - .as_package_data_mut() - .as_idx_mut(package_id) - .or_not_found(package_id)? - .as_status_mut() - .as_main_mut() - .mutate(|main| { - match main { - &mut MainStatus::Running { ref mut health, .. } - | &mut MainStatus::BackingUp { ref mut health, .. } => { - health.insert(id, result); - } - _ => (), - } - Ok(()) - }) - }) - .await?; - Ok(json!(())) -} -#[derive(serde::Deserialize, serde::Serialize, Parser, TS)] -#[serde(rename_all = "camelCase")] -#[command(rename_all = "camelCase")] -#[ts(export)] -pub struct DestroyOverlayedImageParams { - #[ts(type = "string")] - guid: InternedString, -} - -#[instrument(skip_all)] -pub async fn destroy_overlayed_image( - ctx: EffectContext, - DestroyOverlayedImageParams { guid }: DestroyOverlayedImageParams, -) -> Result<(), Error> { - let ctx = ctx.deref()?; - if ctx - .persistent_container - .overlays - .lock() - .await - .remove(&guid) - .is_none() - { - tracing::warn!("Could not find a guard to remove on the destroy overlayed image; assumming that it already is removed and will be skipping"); - } - Ok(()) -} -#[derive(serde::Deserialize, serde::Serialize, Parser, TS)] -#[serde(rename_all = "camelCase")] -#[command(rename_all = "camelCase")] -#[ts(export)] -pub struct CreateOverlayedImageParams { - #[ts(type = "string")] - image_id: ImageId, -} - -#[instrument(skip_all)] -pub async fn create_overlayed_image( - ctx: EffectContext, - CreateOverlayedImageParams { image_id }: CreateOverlayedImageParams, -) -> Result<(PathBuf, InternedString), Error> { - let ctx = ctx.deref()?; - let path = Path::new("images") - .join(*ARCH) - .join(&image_id) - .with_extension("squashfs"); - if let Some(image) = ctx - .persistent_container - .s9pk - .as_archive() - .contents() - .get_path(dbg!(&path)) - .and_then(|e| e.as_file()) - { - let guid = new_guid(); - let rootfs_dir = ctx - .persistent_container - .lxc_container - .get() - .ok_or_else(|| { - Error::new( - eyre!("PersistentContainer has been destroyed"), - ErrorKind::Incoherent, - ) - })? - .rootfs_dir(); - let mountpoint = rootfs_dir.join("media/startos/overlays").join(&*guid); - tokio::fs::create_dir_all(&mountpoint).await?; - Command::new("chown") - .arg("100000:100000") - .arg(&mountpoint) - .invoke(ErrorKind::Filesystem) - .await?; - let container_mountpoint = Path::new("/").join( - mountpoint - .strip_prefix(rootfs_dir) - .with_kind(ErrorKind::Incoherent)?, - ); - tracing::info!("Mounting overlay {guid} for {image_id}"); - let guard = OverlayGuard::mount( - &IdMapped::new(LoopDev::from(&**image), 0, 100000, 65536), - mountpoint, - ) - .await?; - tracing::info!("Mounted overlay {guid} for {image_id}"); - ctx.persistent_container - .overlays - .lock() - .await - .insert(guid.clone(), guard); - Ok((container_mountpoint, guid)) - } else { - Err(Error::new( - eyre!("image {image_id} not found in s9pk"), - ErrorKind::NotFound, - )) - } -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, TS)] -#[serde(rename_all = "camelCase")] -#[ts(export)] -enum DependencyKind { - Exists, - Running, -} - -#[derive(Debug, Clone, Deserialize, Serialize, TS)] -#[serde(rename_all = "camelCase", tag = "kind")] -#[ts(export)] -enum DependencyRequirement { - #[serde(rename_all = "camelCase")] - Running { - #[ts(type = "string")] - id: PackageId, - #[ts(type = "string[]")] - health_checks: BTreeSet, - #[ts(type = "string")] - version_spec: VersionRange, - #[ts(type = "string")] - registry_url: Url, - }, - #[serde(rename_all = "camelCase")] - Exists { - #[ts(type = "string")] - id: PackageId, - #[ts(type = "string")] - version_spec: VersionRange, - #[ts(type = "string")] - registry_url: Url, - }, -} -// filebrowser:exists,bitcoind:running:foo+bar+baz -impl FromStr for DependencyRequirement { - type Err = Error; - fn from_str(s: &str) -> Result { - match s.split_once(':') { - Some((id, "e")) | Some((id, "exists")) => Ok(Self::Exists { - id: id.parse()?, - registry_url: "".parse()?, // TODO - version_spec: "*".parse()?, // TODO - }), - Some((id, rest)) => { - let health_checks = match rest.split_once(':') { - Some(("r", rest)) | Some(("running", rest)) => rest - .split('+') - .map(|id| id.parse().map_err(Error::from)) - .collect(), - Some((kind, _)) => Err(Error::new( - eyre!("unknown dependency kind {kind}"), - ErrorKind::InvalidRequest, - )), - None => match rest { - "r" | "running" => Ok(BTreeSet::new()), - kind => Err(Error::new( - eyre!("unknown dependency kind {kind}"), - ErrorKind::InvalidRequest, - )), - }, - }?; - Ok(Self::Running { - id: id.parse()?, - health_checks, - registry_url: "".parse()?, // TODO - version_spec: "*".parse()?, // TODO - }) - } - None => Ok(Self::Running { - id: s.parse()?, - health_checks: BTreeSet::new(), - registry_url: "".parse()?, // TODO - version_spec: "*".parse()?, // TODO - }), - } - } -} -impl ValueParserFactory for DependencyRequirement { - type Parser = FromStrParser; - fn value_parser() -> Self::Parser { - FromStrParser::new() - } -} - -#[derive(Deserialize, Serialize, Parser, TS)] -#[serde(rename_all = "camelCase")] -#[command(rename_all = "camelCase")] -#[ts(export)] -struct SetDependenciesParams { - dependencies: Vec, -} - -async fn set_dependencies( - ctx: EffectContext, - SetDependenciesParams { dependencies }: SetDependenciesParams, -) -> Result<(), Error> { - let ctx = ctx.deref()?; - let id = &ctx.id; - ctx.ctx - .db - .mutate(|db| { - let dependencies = CurrentDependencies( - dependencies - .into_iter() - .map(|dependency| match dependency { - DependencyRequirement::Exists { - id, - registry_url, - version_spec, - } => ( - id, - CurrentDependencyInfo { - kind: CurrentDependencyKind::Exists, - registry_url, - version_spec, - icon: todo!(), - title: todo!(), - }, - ), - DependencyRequirement::Running { - id, - health_checks, - registry_url, - version_spec, - } => ( - id, - CurrentDependencyInfo { - kind: CurrentDependencyKind::Running { health_checks }, - registry_url, - version_spec, - icon: todo!(), - title: todo!(), - }, - ), - }) - .collect(), - ); - db.as_public_mut() - .as_package_data_mut() - .as_idx_mut(id) - .or_not_found(id)? - .as_current_dependencies_mut() - .ser(&dependencies) - }) - .await -} diff --git a/core/startos/src/service/service_map.rs b/core/startos/src/service/service_map.rs index 934497eb9..90223216c 100644 --- a/core/startos/src/service/service_map.rs +++ b/core/startos/src/service/service_map.rs @@ -18,56 +18,60 @@ use crate::disk::mount::guard::GenericMountGuard; use crate::install::PKG_ARCHIVE_DIR; use crate::notifications::{notify, NotificationLevel}; use crate::prelude::*; -use crate::progress::{ - FullProgressTracker, FullProgressTrackerHandle, PhaseProgressTrackerHandle, - ProgressTrackerWriter, -}; +use crate::progress::{FullProgressTracker, PhaseProgressTrackerHandle, ProgressTrackerWriter}; use crate::s9pk::manifest::PackageId; use crate::s9pk::merkle_archive::source::FileSource; use crate::s9pk::S9pk; -use crate::service::{LoadDisposition, Service}; +use crate::service::{LoadDisposition, Service, ServiceRef}; use crate::status::{MainStatus, Status}; use crate::util::serde::Pem; pub type DownloadInstallFuture = BoxFuture<'static, Result>; pub type InstallFuture = BoxFuture<'static, Result<(), Error>>; -pub(super) struct InstallProgressHandles { - pub(super) finalization_progress: PhaseProgressTrackerHandle, - pub(super) progress_handle: FullProgressTrackerHandle, +pub struct InstallProgressHandles { + pub finalization_progress: PhaseProgressTrackerHandle, + pub progress: FullProgressTracker, } /// This is the structure to contain all the services #[derive(Default)] -pub struct ServiceMap(Mutex>>>>); +pub struct ServiceMap(Mutex>>>>); impl ServiceMap { - async fn entry(&self, id: &PackageId) -> Arc>> { - self.0 - .lock() - .await - .entry(id.clone()) + async fn entry(&self, id: &PackageId) -> Arc>> { + let mut lock = self.0.lock().await; + lock.entry(id.clone()) .or_insert_with(|| Arc::new(RwLock::new(None))) .clone() } #[instrument(skip_all)] - pub async fn get(&self, id: &PackageId) -> OwnedRwLockReadGuard> { + pub async fn get(&self, id: &PackageId) -> OwnedRwLockReadGuard> { self.entry(id).await.read_owned().await } #[instrument(skip_all)] - pub async fn get_mut(&self, id: &PackageId) -> OwnedRwLockWriteGuard> { + pub async fn get_mut(&self, id: &PackageId) -> OwnedRwLockWriteGuard> { self.entry(id).await.write_owned().await } #[instrument(skip_all)] - pub async fn init(&self, ctx: &RpcContext) -> Result<(), Error> { - for id in ctx.db.peek().await.as_public().as_package_data().keys()? { + pub async fn init( + &self, + ctx: &RpcContext, + mut progress: PhaseProgressTrackerHandle, + ) -> Result<(), Error> { + progress.start(); + let ids = ctx.db.peek().await.as_public().as_package_data().keys()?; + progress.set_total(ids.len() as u64); + for id in ids { if let Err(e) = self.load(ctx, &id, LoadDisposition::Retry).await { tracing::error!("Error loading installed package as service: {e}"); tracing::debug!("{e:?}"); } + progress += 1; } + progress.complete(); Ok(()) } @@ -84,18 +88,26 @@ impl ServiceMap { shutdown_err = service.shutdown().await; } // TODO: retry on error? - *service = Service::load(ctx, id, disposition).await?; + *service = Service::load(ctx, id, disposition).await?.map(From::from); shutdown_err?; Ok(()) } #[instrument(skip_all)] - pub async fn install( + pub async fn install( &self, ctx: RpcContext, - mut s9pk: S9pk, + s9pk: F, recovery_source: Option, - ) -> Result { + progress: Option, + ) -> Result + where + F: FnOnce() -> Fut, + Fut: Future, Error>>, + S: FileSource + Clone, + { + let mut s9pk = s9pk().await?; + s9pk.validate_and_filter(ctx.s9pk_arch)?; let manifest = s9pk.as_manifest().clone(); let id = manifest.id.clone(); let icon = s9pk.icon_data_url().await?; @@ -113,23 +125,22 @@ impl ServiceMap { }; let size = s9pk.size(); - let mut progress = FullProgressTracker::new(); + let progress = progress.unwrap_or_else(|| FullProgressTracker::new()); let download_progress_contribution = size.unwrap_or(60); - let progress_handle = progress.handle(); - let mut download_progress = progress_handle.add_phase( + let mut download_progress = progress.add_phase( InternedString::intern("Download"), Some(download_progress_contribution), ); if let Some(size) = size { download_progress.set_total(size); } - let mut finalization_progress = progress_handle.add_phase( + let mut finalization_progress = progress.add_phase( InternedString::intern(op_name), Some(download_progress_contribution / 2), ); let restoring = recovery_source.is_some(); - let mut reload_guard = ServiceReloadGuard::new(ctx.clone(), id.clone(), op_name); + let mut reload_guard = ServiceRefReloadGuard::new(ctx.clone(), id.clone(), op_name); reload_guard .handle(ctx.db.mutate({ @@ -165,9 +176,8 @@ impl ServiceMap { status: Status { configured: false, main: MainStatus::Stopped, - dependency_config_errors: Default::default(), }, - marketplace_url: None, + registry: None, developer_key: Pem::new(developer_key), icon, last_backup: None, @@ -196,7 +206,7 @@ impl ServiceMap { let deref_id = id.clone(); let sync_progress_task = - NonDetachingJoinHandle::from(tokio::spawn(progress.sync_to_db( + NonDetachingJoinHandle::from(tokio::spawn(progress.clone().sync_to_db( ctx.db.clone(), move |v| { v.as_public_mut() @@ -208,6 +218,7 @@ impl ServiceMap { Some(Duration::from_millis(100)), ))); + download_progress.start(); let mut progress_writer = ProgressTrackerWriter::new( crate::util::io::create_file(&download_path).await?, download_progress, @@ -231,6 +242,7 @@ impl ServiceMap { .await?; Ok(reload_guard .handle_last(async move { + finalization_progress.start(); let s9pk = S9pk::open(&installed_path, Some(&id)).await?; let prev = if let Some(service) = service.take() { ensure_code!( @@ -248,42 +260,31 @@ impl ServiceMap { service .uninstall(Some(s9pk.as_manifest().version.clone())) .await?; - finalization_progress.complete(); - progress_handle.complete(); + progress.complete(); Some(version) } else { None }; - if let Some(recovery_source) = recovery_source { - *service = Some( - Service::restore( - ctx, - s9pk, - recovery_source, - Some(InstallProgressHandles { - finalization_progress, - progress_handle, - }), - ) - .await?, - ); - } else { - *service = Some( - Service::install( - ctx, - s9pk, - prev, - Some(InstallProgressHandles { - finalization_progress, - progress_handle, - }), - ) - .await?, - ); - } + *service = Some( + Service::install( + ctx, + s9pk, + prev, + recovery_source, + Some(InstallProgressHandles { + finalization_progress, + progress, + }), + ) + .await? + .into(), + ); + drop(service); + sync_progress_task.await.map_err(|_| { Error::new(eyre!("progress sync task panicked"), ErrorKind::Unknown) })??; + Ok(()) }) .boxed()) @@ -294,9 +295,14 @@ impl ServiceMap { /// This is ran during the cleanup, so when we are uninstalling the service #[instrument(skip_all)] pub async fn uninstall(&self, ctx: &RpcContext, id: &PackageId) -> Result<(), Error> { - if let Some(service) = self.get_mut(id).await.take() { - ServiceReloadGuard::new(ctx.clone(), id.clone(), "Uninstall") - .handle_last(service.uninstall(None)) + let mut guard = self.get_mut(id).await; + if let Some(service) = guard.take() { + ServiceRefReloadGuard::new(ctx.clone(), id.clone(), "Uninstall") + .handle_last(async move { + let res = service.uninstall(None).await; + drop(guard); + res + }) .await?; } Ok(()) @@ -322,17 +328,17 @@ impl ServiceMap { } } -pub struct ServiceReloadGuard(Option); -impl Drop for ServiceReloadGuard { +pub struct ServiceRefReloadGuard(Option); +impl Drop for ServiceRefReloadGuard { fn drop(&mut self) { if let Some(info) = self.0.take() { tokio::spawn(info.reload(None)); } } } -impl ServiceReloadGuard { +impl ServiceRefReloadGuard { pub fn new(ctx: RpcContext, id: PackageId, operation: &'static str) -> Self { - Self(Some(ServiceReloadInfo { ctx, id, operation })) + Self(Some(ServiceRefReloadInfo { ctx, id, operation })) } pub async fn handle( @@ -361,12 +367,12 @@ impl ServiceReloadGuard { } } -struct ServiceReloadInfo { +struct ServiceRefReloadInfo { ctx: RpcContext, id: PackageId, operation: &'static str, } -impl ServiceReloadInfo { +impl ServiceRefReloadInfo { async fn reload(self, error: Option) -> Result<(), Error> { self.ctx .services diff --git a/core/startos/src/service/start_stop.rs b/core/startos/src/service/start_stop.rs index bc24574ac..178176023 100644 --- a/core/startos/src/service/start_stop.rs +++ b/core/startos/src/service/start_stop.rs @@ -15,6 +15,7 @@ impl From for StartStop { fn from(value: MainStatus) -> Self { match value { MainStatus::Stopped => StartStop::Stop, + MainStatus::Restoring => StartStop::Stop, MainStatus::Restarting => StartStop::Start, MainStatus::Stopping { .. } => StartStop::Stop, MainStatus::Starting => StartStop::Start, diff --git a/core/startos/src/service/transition/backup.rs b/core/startos/src/service/transition/backup.rs index 8b1378917..d8606f534 100644 --- a/core/startos/src/service/transition/backup.rs +++ b/core/startos/src/service/transition/backup.rs @@ -1 +1,92 @@ +use std::path::PathBuf; +use std::sync::Arc; +use futures::future::BoxFuture; +use futures::FutureExt; +use models::ProcedureName; + +use super::TempDesiredRestore; +use crate::disk::mount::filesystem::ReadWrite; +use crate::prelude::*; +use crate::rpc_continuations::Guid; +use crate::service::config::GetConfig; +use crate::service::dependencies::DependencyConfig; +use crate::service::transition::{TransitionKind, TransitionState}; +use crate::service::ServiceActor; +use crate::util::actor::background::BackgroundJobQueue; +use crate::util::actor::{ConflictBuilder, Handler}; +use crate::util::future::RemoteCancellable; + +pub(in crate::service) struct Backup { + pub path: PathBuf, +} +impl Handler for ServiceActor { + type Response = Result>, Error>; + fn conflicts_with(_: &Backup) -> ConflictBuilder { + ConflictBuilder::everything() + .except::() + .except::() + } + async fn handle( + &mut self, + id: Guid, + backup: Backup, + jobs: &BackgroundJobQueue, + ) -> Self::Response { + // So Need a handle to just a single field in the state + let temp: TempDesiredRestore = TempDesiredRestore::new(&self.0.persistent_container.state); + let mut current = self.0.persistent_container.state.subscribe(); + let path = backup.path.clone(); + let seed = self.0.clone(); + + let transition = RemoteCancellable::new(async move { + temp.stop(); + current + .wait_for(|s| s.running_status.is_none()) + .await + .with_kind(ErrorKind::Unknown)?; + + let backup_guard = seed + .persistent_container + .mount_backup(path, ReadWrite) + .await?; + seed.persistent_container + .execute(id, ProcedureName::CreateBackup, Value::Null, None) + .await?; + backup_guard.unmount(true).await?; + + if temp.restore().is_start() { + current + .wait_for(|s| s.running_status.is_some()) + .await + .with_kind(ErrorKind::Unknown)?; + } + drop(temp); + Ok::<_, Arc>(()) + }); + let cancel_handle = transition.cancellation_handle(); + let transition = transition.shared(); + let job_transition = transition.clone(); + jobs.add_job(job_transition.map(|_| ())); + + let mut old = None; + self.0.persistent_container.state.send_modify(|s| { + old = std::mem::replace( + &mut s.transition_state, + Some(TransitionState { + kind: TransitionKind::BackingUp, + cancel_handle, + }), + ) + }); + if let Some(t) = old { + t.abort().await; + } + Ok(transition + .map(|r| { + r.ok_or_else(|| Error::new(eyre!("Backup canceled"), ErrorKind::Cancelled))? + .map_err(|e| e.clone_output()) + }) + .boxed()) + } +} diff --git a/core/startos/src/service/transition/mod.rs b/core/startos/src/service/transition/mod.rs index af62ccc1c..a6a41073b 100644 --- a/core/startos/src/service/transition/mod.rs +++ b/core/startos/src/service/transition/mod.rs @@ -5,16 +5,18 @@ use tokio::sync::watch; use super::persistent_container::ServiceState; use crate::service::start_stop::StartStop; -use crate::util::actor::BackgroundJobs; +use crate::util::actor::background::BackgroundJobQueue; use crate::util::future::{CancellationHandle, RemoteCancellable}; pub mod backup; pub mod restart; +pub mod restore; #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] pub enum TransitionKind { BackingUp, Restarting, + Restoring, } /// Used only in the manager/mod and is used to keep track of the state of the manager during the @@ -41,7 +43,7 @@ impl TransitionState { fn new( task: impl Future + Send + 'static, kind: TransitionKind, - jobs: &mut BackgroundJobs, + jobs: &BackgroundJobQueue, ) -> Self { let task = RemoteCancellable::new(task); let cancel_handle = task.cancellation_handle(); @@ -59,23 +61,28 @@ impl Drop for TransitionState { } #[derive(Debug, Clone)] -pub struct TempDesiredState(pub(super) Arc>); -impl TempDesiredState { +pub struct TempDesiredRestore(pub(super) Arc>, StartStop); +impl TempDesiredRestore { pub fn new(state: &Arc>) -> Self { - Self(state.clone()) + Self(state.clone(), state.borrow().desired_state) } pub fn stop(&self) { self.0 .send_modify(|s| s.temp_desired_state = Some(StartStop::Stop)); } - pub fn start(&self) { + pub fn restore(&self) -> StartStop { + let restore_state = self.1; self.0 - .send_modify(|s| s.temp_desired_state = Some(StartStop::Start)); + .send_modify(|s| s.temp_desired_state = Some(restore_state)); + restore_state } } -impl Drop for TempDesiredState { +impl Drop for TempDesiredRestore { fn drop(&mut self) { - self.0.send_modify(|s| s.temp_desired_state = None); + self.0.send_modify(|s| { + s.temp_desired_state.take(); + s.transition_state.take(); + }); } } // impl Deref for TempDesiredState { diff --git a/core/startos/src/service/transition/restart.rs b/core/startos/src/service/transition/restart.rs index 9c82d0282..108e232ad 100644 --- a/core/startos/src/service/transition/restart.rs +++ b/core/startos/src/service/transition/restart.rs @@ -1,33 +1,59 @@ -use std::sync::Arc; - use futures::FutureExt; +use super::TempDesiredRestore; use crate::prelude::*; +use crate::rpc_continuations::Guid; +use crate::service::config::GetConfig; +use crate::service::dependencies::DependencyConfig; use crate::service::transition::{TransitionKind, TransitionState}; use crate::service::{Service, ServiceActor}; -use crate::util::actor::{BackgroundJobs, Handler}; +use crate::util::actor::background::BackgroundJobQueue; +use crate::util::actor::{ConflictBuilder, Handler}; use crate::util::future::RemoteCancellable; -use super::TempDesiredState; - -struct Restart; -#[async_trait::async_trait] +pub(super) struct Restart; impl Handler for ServiceActor { type Response = (); - async fn handle(&mut self, _: Restart, jobs: &mut BackgroundJobs) -> Self::Response { + fn conflicts_with(_: &Restart) -> ConflictBuilder { + ConflictBuilder::everything() + .except::() + .except::() + } + async fn handle(&mut self, _: Guid, _: Restart, jobs: &BackgroundJobQueue) -> Self::Response { // So Need a handle to just a single field in the state - let temp = TempDesiredState::new(&self.0.persistent_container.state); + let temp = TempDesiredRestore::new(&self.0.persistent_container.state); let mut current = self.0.persistent_container.state.subscribe(); - let transition = RemoteCancellable::new(async move { - temp.stop(); - current.wait_for(|s| s.running_status.is_none()).await; - temp.start(); - current.wait_for(|s| s.running_status.is_some()).await; - drop(temp); - }); + let state = self.0.persistent_container.state.clone(); + let transition = RemoteCancellable::new( + async move { + temp.stop(); + current + .wait_for(|s| s.running_status.is_none()) + .await + .with_kind(ErrorKind::Unknown)?; + if temp.restore().is_start() { + current + .wait_for(|s| s.running_status.is_some()) + .await + .with_kind(ErrorKind::Unknown)?; + } + drop(temp); + state.send_modify(|s| { + s.transition_state.take(); + }); + Ok::<_, Error>(()) + } + .map(|x| { + if let Err(err) = x { + tracing::debug!("{:?}", err); + tracing::warn!("{}", err); + } + }), + ); let cancel_handle = transition.cancellation_handle(); - jobs.add_job(transition.map(|_| ())); - let notified = self.0.synchronized.notified(); + let transition = transition.shared(); + let job_transition = transition.clone(); + jobs.add_job(job_transition.map(|_| ())); let mut old = None; self.0.persistent_container.state.send_modify(|s| { @@ -42,11 +68,14 @@ impl Handler for ServiceActor { if let Some(t) = old { t.abort().await; } - notified.await + if transition.await.is_none() { + tracing::warn!("Service {} has been cancelled", &self.0.id); + } } } impl Service { - pub async fn restart(&self) -> Result<(), Error> { - self.actor.send(Restart).await + #[instrument(skip_all)] + pub async fn restart(&self, id: Guid) -> Result<(), Error> { + self.actor.send(id, Restart).await } } diff --git a/core/startos/src/service/transition/restore.rs b/core/startos/src/service/transition/restore.rs new file mode 100644 index 000000000..1c4020ea4 --- /dev/null +++ b/core/startos/src/service/transition/restore.rs @@ -0,0 +1,80 @@ +use std::path::PathBuf; + +use futures::FutureExt; +use models::ProcedureName; + +use crate::disk::mount::filesystem::ReadOnly; +use crate::prelude::*; +use crate::rpc_continuations::Guid; +use crate::service::transition::{TransitionKind, TransitionState}; +use crate::service::ServiceActor; +use crate::util::actor::background::BackgroundJobQueue; +use crate::util::actor::{ConflictBuilder, Handler}; +use crate::util::future::RemoteCancellable; + +pub(in crate::service) struct Restore { + pub path: PathBuf, +} +impl Handler for ServiceActor { + type Response = Result<(), Error>; + fn conflicts_with(_: &Restore) -> ConflictBuilder { + ConflictBuilder::everything() + } + async fn handle( + &mut self, + id: Guid, + restore: Restore, + jobs: &BackgroundJobQueue, + ) -> Self::Response { + // So Need a handle to just a single field in the state + let path = restore.path.clone(); + let seed = self.0.clone(); + + let state = self.0.persistent_container.state.clone(); + let transition = RemoteCancellable::new( + async move { + let backup_guard = seed + .persistent_container + .mount_backup(path, ReadOnly) + .await?; + seed.persistent_container + .execute(id, ProcedureName::RestoreBackup, Value::Null, None) + .await?; + backup_guard.unmount(true).await?; + + state.send_modify(|s| { + s.transition_state.take(); + }); + Ok::<_, Error>(()) + } + .map(|x| { + if let Err(err) = dbg!(x) { + tracing::debug!("{:?}", err); + tracing::warn!("{}", err); + } + }), + ); + let cancel_handle = transition.cancellation_handle(); + let transition = transition.shared(); + let job_transition = transition.clone(); + jobs.add_job(job_transition.map(|_| ())); + + let mut old = None; + self.0.persistent_container.state.send_modify(|s| { + old = std::mem::replace( + &mut s.transition_state, + Some(TransitionState { + kind: TransitionKind::Restoring, + cancel_handle, + }), + ) + }); + if let Some(t) = old { + t.abort().await; + } + match transition.await { + None => Err(Error::new(eyre!("Restoring canceled"), ErrorKind::Unknown)), + Some(x) => Ok(x), + } + } +} diff --git a/core/startos/src/setup.rs b/core/startos/src/setup.rs index 9120544e3..642dd5476 100644 --- a/core/startos/src/setup.rs +++ b/core/startos/src/setup.rs @@ -1,43 +1,44 @@ +use std::collections::BTreeMap; use std::path::{Path, PathBuf}; use std::sync::Arc; use std::time::Duration; use color_eyre::eyre::eyre; use josekit::jwk::Jwk; -use openssl::x509::X509; use patch_db::json_ptr::ROOT; use rpc_toolkit::yajrc::RpcError; -use rpc_toolkit::{from_fn_async, HandlerExt, ParentHandler}; +use rpc_toolkit::{from_fn_async, Context, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; -use tokio::fs::File; use tokio::io::AsyncWriteExt; use tokio::try_join; -use torut::onion::OnionAddressV3; use tracing::instrument; use ts_rs::TS; use crate::account::AccountInfo; use crate::backup::restore::recover_full_embassy; use crate::backup::target::BackupTargetFS; +use crate::context::rpc::InitRpcContextPhases; use crate::context::setup::SetupResult; -use crate::context::SetupContext; +use crate::context::{RpcContext, SetupContext}; use crate::db::model::Database; use crate::disk::fsck::RepairStrategy; use crate::disk::main::DEFAULT_PASSWORD; use crate::disk::mount::filesystem::cifs::Cifs; use crate::disk::mount::filesystem::ReadWrite; use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard}; -use crate::disk::util::{pvscan, recovery_info, DiskInfo, EmbassyOsRecoveryInfo}; +use crate::disk::util::{pvscan, recovery_info, DiskInfo, StartOsRecoveryInfo}; use crate::disk::REPAIR_DISK_PATH; -use crate::hostname::Hostname; -use crate::init::{init, InitResult}; +use crate::init::{init, InitPhases, InitResult}; +use crate::net::net_controller::PreInitNetController; use crate::net::ssl::root_ca_start_time; use crate::prelude::*; +use crate::progress::{FullProgress, PhaseProgressTrackerHandle}; +use crate::rpc_continuations::Guid; use crate::util::crypto::EncryptedWire; -use crate::util::io::{dir_copy, dir_size, Counter}; +use crate::util::io::{create_file, dir_copy, dir_size, Counter}; use crate::{Error, ErrorKind, ResultExt}; -pub fn setup() -> ParentHandler { +pub fn setup() -> ParentHandler { ParentHandler::new() .subcommand( "status", @@ -45,10 +46,10 @@ pub fn setup() -> ParentHandler { .with_metadata("authenticated", Value::Bool(false)) .no_cli(), ) - .subcommand("disk", disk()) + .subcommand("disk", disk::()) .subcommand("attach", from_fn_async(attach).no_cli()) .subcommand("execute", from_fn_async(execute).no_cli()) - .subcommand("cifs", cifs()) + .subcommand("cifs", cifs::()) .subcommand("complete", from_fn_async(complete).no_cli()) .subcommand( "get-pubkey", @@ -59,7 +60,7 @@ pub fn setup() -> ParentHandler { .subcommand("exit", from_fn_async(exit).no_cli()) } -pub fn disk() -> ParentHandler { +pub fn disk() -> ParentHandler { ParentHandler::new().subcommand( "list", from_fn_async(list_disks) @@ -75,10 +76,12 @@ pub async fn list_disks(ctx: SetupContext) -> Result, Error> { async fn setup_init( ctx: &SetupContext, password: Option, -) -> Result<(Hostname, OnionAddressV3, X509), Error> { - let InitResult { db } = init(&ctx.config).await?; + init_phases: InitPhases, +) -> Result<(AccountInfo, PreInitNetController), Error> { + let InitResult { net_ctrl } = init(&ctx.config, init_phases).await?; - let account = db + let account = net_ctrl + .db .mutate(|m| { let mut account = AccountInfo::load(m)?; if let Some(password) = password { @@ -93,15 +96,12 @@ async fn setup_init( }) .await?; - Ok(( - account.hostname, - account.tor_key.public().get_onion_address(), - account.root_ca_cert, - )) + Ok((account, net_ctrl)) } #[derive(Deserialize, Serialize, TS)] #[serde(rename_all = "camelCase")] +#[ts(export)] pub struct AttachParams { #[serde(rename = "startOsPassword")] password: Option, @@ -110,25 +110,20 @@ pub struct AttachParams { pub async fn attach( ctx: SetupContext, - AttachParams { password, guid }: AttachParams, -) -> Result<(), Error> { - let mut status = ctx.setup_status.write().await; - if status.is_some() { - return Err(Error::new( - eyre!("Setup already in progress"), - ErrorKind::InvalidRequest, - )); - } - *status = Some(Ok(SetupStatus { - bytes_transferred: 0, - total_bytes: None, - complete: false, - })); - drop(status); - tokio::task::spawn(async move { - if let Err(e) = async { + AttachParams { + password, + guid: disk_guid, + }: AttachParams, +) -> Result { + let setup_ctx = ctx.clone(); + ctx.run_setup(|| async move { + let progress = &setup_ctx.progress; + let mut disk_phase = progress.add_phase("Opening data drive".into(), Some(10)); + let init_phases = InitPhases::new(&progress); + let rpc_ctx_phases = InitRpcContextPhases::new(&progress); + let password: Option = match password { - Some(a) => match a.decrypt(&*ctx) { + Some(a) => match a.decrypt(&setup_ctx) { a @ Some(_) => a, None => { return Err(Error::new( @@ -139,15 +134,17 @@ pub async fn attach( }, None => None, }; + + disk_phase.start(); let requires_reboot = crate::disk::main::import( - &*guid, - &ctx.datadir, + &*disk_guid, + &setup_ctx.datadir, if tokio::fs::metadata(REPAIR_DISK_PATH).await.is_ok() { RepairStrategy::Aggressive } else { RepairStrategy::Preen }, - if guid.ends_with("_UNENC") { None } else { Some(DEFAULT_PASSWORD) }, + if disk_guid.ends_with("_UNENC") { None } else { Some(DEFAULT_PASSWORD) }, ) .await?; if tokio::fs::metadata(REPAIR_DISK_PATH).await.is_ok() { @@ -156,7 +153,7 @@ pub async fn attach( .with_ctx(|_| (ErrorKind::Filesystem, REPAIR_DISK_PATH))?; } if requires_reboot.0 { - crate::disk::main::export(&*guid, &ctx.datadir).await?; + crate::disk::main::export(&*disk_guid, &setup_ctx.datadir).await?; return Err(Error::new( eyre!( "Errors were corrected with your disk, but the server must be restarted in order to proceed" @@ -164,37 +161,48 @@ pub async fn attach( ErrorKind::DiskManagement, )); } - let (hostname, tor_addr, root_ca) = setup_init(&ctx, password).await?; - *ctx.setup_result.write().await = Some((guid, SetupResult { - tor_address: format!("https://{}", tor_addr), - lan_address: hostname.lan_address(), - root_ca: String::from_utf8(root_ca.to_pem()?)?, - })); - *ctx.setup_status.write().await = Some(Ok(SetupStatus { - bytes_transferred: 0, - total_bytes: None, - complete: true, - })); - Ok(()) - }.await { - tracing::error!("Error Setting Up Embassy: {}", e); - tracing::debug!("{:?}", e); - *ctx.setup_status.write().await = Some(Err(e.into())); - } - }); - Ok(()) + disk_phase.complete(); + + let (account, net_ctrl) = setup_init(&setup_ctx, password, init_phases).await?; + + let rpc_ctx = RpcContext::init(&setup_ctx.config, disk_guid, Some(net_ctrl), rpc_ctx_phases).await?; + + Ok(((&account).try_into()?, rpc_ctx)) + })?; + + Ok(ctx.progress().await) } -#[derive(Debug, Clone, Deserialize, Serialize)] +#[derive(Debug, Deserialize, Serialize, TS)] #[serde(rename_all = "camelCase")] -pub struct SetupStatus { - pub bytes_transferred: u64, - pub total_bytes: Option, - pub complete: bool, +#[ts(export)] +#[serde(tag = "status")] +pub enum SetupStatusRes { + Complete(SetupResult), + Running(SetupProgress), } -pub async fn status(ctx: SetupContext) -> Result, RpcError> { - ctx.setup_status.read().await.clone().transpose() +#[derive(Debug, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct SetupProgress { + pub progress: FullProgress, + pub guid: Guid, +} + +pub async fn status(ctx: SetupContext) -> Result, Error> { + if let Some(res) = ctx.result.get() { + match res { + Ok((res, _)) => Ok(Some(SetupStatusRes::Complete(res.clone()))), + Err(e) => Err(e.clone_output()), + } + } else { + if ctx.task.initialized() { + Ok(Some(SetupStatusRes::Running(ctx.progress().await))) + } else { + Ok(None) + } + } } /// We want to be able to get a secret, a shared private key with the frontend @@ -202,17 +210,18 @@ pub async fn status(ctx: SetupContext) -> Result, RpcError> /// without knowing the password over clearnet. We use the public key shared across the network /// since it is fine to share the public, and encrypt against the public. pub async fn get_pubkey(ctx: SetupContext) -> Result { - let secret = ctx.as_ref().clone(); + let secret = AsRef::::as_ref(&ctx).clone(); let pub_key = secret.to_public_key()?; Ok(pub_key) } -pub fn cifs() -> ParentHandler { +pub fn cifs() -> ParentHandler { ParentHandler::new().subcommand("verify", from_fn_async(verify_cifs).no_cli()) } #[derive(Deserialize, Serialize, TS)] #[serde(rename_all = "camelCase")] +#[ts(export)] pub struct VerifyCifsParams { hostname: String, path: PathBuf, @@ -229,8 +238,8 @@ pub async fn verify_cifs( username, password, }: VerifyCifsParams, -) -> Result { - let password: Option = password.map(|x| x.decrypt(&*ctx)).flatten(); +) -> Result, Error> { + let password: Option = password.map(|x| x.decrypt(&ctx)).flatten(); let guard = TmpMountGuard::mount( &Cifs { hostname, @@ -243,127 +252,98 @@ pub async fn verify_cifs( .await?; let start_os = recovery_info(guard.path()).await?; guard.unmount().await?; - start_os.ok_or_else(|| Error::new(eyre!("No Backup Found"), crate::ErrorKind::NotFound)) + if start_os.is_empty() { + return Err(Error::new( + eyre!("No Backup Found"), + crate::ErrorKind::NotFound, + )); + } + Ok(start_os) } #[derive(Debug, Deserialize, Serialize, TS)] #[serde(tag = "type")] #[serde(rename_all = "camelCase")] -pub enum RecoverySource { - Migrate { guid: String }, - Backup { target: BackupTargetFS }, +#[serde(rename_all_fields = "camelCase")] +pub enum RecoverySource { + Migrate { + guid: String, + }, + Backup { + target: BackupTargetFS, + password: Password, + server_id: String, + }, } #[derive(Deserialize, Serialize, TS)] #[serde(rename_all = "camelCase")] -pub struct ExecuteParams { +#[ts(export)] +pub struct SetupExecuteParams { start_os_logicalname: PathBuf, start_os_password: EncryptedWire, - recovery_source: Option, - recovery_password: Option, + recovery_source: Option>, } // #[command(rpc_only)] pub async fn execute( ctx: SetupContext, - ExecuteParams { + SetupExecuteParams { start_os_logicalname, start_os_password, recovery_source, - recovery_password, - }: ExecuteParams, -) -> Result<(), Error> { - let start_os_password = match start_os_password.decrypt(&*ctx) { + }: SetupExecuteParams, +) -> Result { + let start_os_password = match start_os_password.decrypt(&ctx) { Some(a) => a, None => { return Err(Error::new( - color_eyre::eyre::eyre!("Couldn't decode embassy-password"), + color_eyre::eyre::eyre!("Couldn't decode startOsPassword"), crate::ErrorKind::Unknown, )) } }; - let recovery_password: Option = match recovery_password { - Some(a) => match a.decrypt(&*ctx) { - Some(a) => Some(a), - None => { - return Err(Error::new( - color_eyre::eyre::eyre!("Couldn't decode recovery-password"), + let recovery = match recovery_source { + Some(RecoverySource::Backup { + target, + password, + server_id, + }) => Some(RecoverySource::Backup { + target, + password: password.decrypt(&ctx).ok_or_else(|| { + Error::new( + color_eyre::eyre::eyre!("Couldn't decode recoveryPassword"), crate::ErrorKind::Unknown, - )) - } - }, + ) + })?, + server_id, + }), + Some(RecoverySource::Migrate { guid }) => Some(RecoverySource::Migrate { guid }), None => None, }; - let mut status = ctx.setup_status.write().await; - if status.is_some() { - return Err(Error::new( - eyre!("Setup already in progress"), - ErrorKind::InvalidRequest, - )); - } - *status = Some(Ok(SetupStatus { - bytes_transferred: 0, - total_bytes: None, - complete: false, - })); - drop(status); - tokio::task::spawn({ - async move { - let ctx = ctx.clone(); - match execute_inner( - ctx.clone(), - start_os_logicalname, - start_os_password, - recovery_source, - recovery_password, - ) - .await - { - Ok((guid, hostname, tor_addr, root_ca)) => { - tracing::info!("Setup Complete!"); - *ctx.setup_result.write().await = Some(( - guid, - SetupResult { - tor_address: format!("https://{}", tor_addr), - lan_address: hostname.lan_address(), - root_ca: String::from_utf8( - root_ca.to_pem().expect("failed to serialize root ca"), - ) - .expect("invalid pem string"), - }, - )); - *ctx.setup_status.write().await = Some(Ok(SetupStatus { - bytes_transferred: 0, - total_bytes: None, - complete: true, - })); - } - Err(e) => { - tracing::error!("Error Setting Up Server: {}", e); - tracing::debug!("{:?}", e); - *ctx.setup_status.write().await = Some(Err(e.into())); - } - } - } - }); - Ok(()) + + let setup_ctx = ctx.clone(); + ctx.run_setup(|| execute_inner(setup_ctx, start_os_logicalname, start_os_password, recovery))?; + + Ok(ctx.progress().await) } #[instrument(skip_all)] // #[command(rpc_only)] pub async fn complete(ctx: SetupContext) -> Result { - let (guid, setup_result) = if let Some((guid, setup_result)) = &*ctx.setup_result.read().await { - (guid.clone(), setup_result.clone()) - } else { - return Err(Error::new( + match ctx.result.get() { + Some(Ok((res, ctx))) => { + let mut guid_file = create_file("/media/startos/config/disk.guid").await?; + guid_file.write_all(ctx.disk_guid.as_bytes()).await?; + guid_file.sync_all().await?; + Ok(res.clone()) + } + Some(Err(e)) => Err(e.clone_output()), + None => Err(Error::new( eyre!("setup.execute has not completed successfully"), crate::ErrorKind::InvalidRequest, - )); - }; - let mut guid_file = File::create("/media/embassy/config/disk.guid").await?; - guid_file.write_all(guid.as_bytes()).await?; - guid_file.sync_all().await?; - Ok(setup_result) + )), + } } #[instrument(skip_all)] @@ -378,9 +358,23 @@ pub async fn execute_inner( ctx: SetupContext, start_os_logicalname: PathBuf, start_os_password: String, - recovery_source: Option, - recovery_password: Option, -) -> Result<(Arc, Hostname, OnionAddressV3, X509), Error> { + recovery_source: Option>, +) -> Result<(SetupResult, RpcContext), Error> { + let progress = &ctx.progress; + let mut disk_phase = progress.add_phase("Formatting data drive".into(), Some(10)); + let restore_phase = match recovery_source.as_ref() { + Some(RecoverySource::Backup { .. }) => { + Some(progress.add_phase("Restoring backup".into(), Some(100))) + } + Some(RecoverySource::Migrate { .. }) => { + Some(progress.add_phase("Transferring data".into(), Some(100))) + } + None => None, + }; + let init_phases = InitPhases::new(&progress); + let rpc_ctx_phases = InitRpcContextPhases::new(&progress); + + disk_phase.start(); let encryption_password = if ctx.disable_encryption { None } else { @@ -402,68 +396,107 @@ pub async fn execute_inner( encryption_password, ) .await?; + disk_phase.complete(); - if let Some(RecoverySource::Backup { target }) = recovery_source { - recover(ctx, guid, start_os_password, target, recovery_password).await - } else if let Some(RecoverySource::Migrate { guid: old_guid }) = recovery_source { - migrate(ctx, guid, &old_guid, start_os_password).await - } else { - let (hostname, tor_addr, root_ca) = fresh_setup(&ctx, &start_os_password).await?; - Ok((guid, hostname, tor_addr, root_ca)) + let progress = SetupExecuteProgress { + init_phases, + restore_phase, + rpc_ctx_phases, + }; + + match recovery_source { + Some(RecoverySource::Backup { + target, + password, + server_id, + }) => { + recover( + &ctx, + guid, + start_os_password, + target, + server_id, + password, + progress, + ) + .await + } + Some(RecoverySource::Migrate { guid: old_guid }) => { + migrate(&ctx, guid, &old_guid, start_os_password, progress).await + } + None => fresh_setup(&ctx, guid, &start_os_password, progress).await, } } +pub struct SetupExecuteProgress { + pub init_phases: InitPhases, + pub restore_phase: Option, + pub rpc_ctx_phases: InitRpcContextPhases, +} + async fn fresh_setup( ctx: &SetupContext, + guid: Arc, start_os_password: &str, -) -> Result<(Hostname, OnionAddressV3, X509), Error> { + SetupExecuteProgress { + init_phases, + rpc_ctx_phases, + .. + }: SetupExecuteProgress, +) -> Result<(SetupResult, RpcContext), Error> { let account = AccountInfo::new(start_os_password, root_ca_start_time().await?)?; let db = ctx.db().await?; db.put(&ROOT, &Database::init(&account)?).await?; drop(db); - init(&ctx.config).await?; - Ok(( - account.hostname, - account.tor_key.public().get_onion_address(), - account.root_ca_cert, - )) + + let InitResult { net_ctrl } = init(&ctx.config, init_phases).await?; + + let rpc_ctx = RpcContext::init(&ctx.config, guid, Some(net_ctrl), rpc_ctx_phases).await?; + + Ok(((&account).try_into()?, rpc_ctx)) } #[instrument(skip_all)] async fn recover( - ctx: SetupContext, + ctx: &SetupContext, guid: Arc, start_os_password: String, recovery_source: BackupTargetFS, - recovery_password: Option, -) -> Result<(Arc, Hostname, OnionAddressV3, X509), Error> { + server_id: String, + recovery_password: String, + progress: SetupExecuteProgress, +) -> Result<(SetupResult, RpcContext), Error> { let recovery_source = TmpMountGuard::mount(&recovery_source, ReadWrite).await?; recover_full_embassy( ctx, guid.clone(), start_os_password, recovery_source, - recovery_password, + &server_id, + &recovery_password, + progress, ) .await } #[instrument(skip_all)] async fn migrate( - ctx: SetupContext, + ctx: &SetupContext, guid: Arc, old_guid: &str, start_os_password: String, -) -> Result<(Arc, Hostname, OnionAddressV3, X509), Error> { - *ctx.setup_status.write().await = Some(Ok(SetupStatus { - bytes_transferred: 0, - total_bytes: None, - complete: false, - })); + SetupExecuteProgress { + init_phases, + restore_phase, + rpc_ctx_phases, + }: SetupExecuteProgress, +) -> Result<(SetupResult, RpcContext), Error> { + let mut restore_phase = restore_phase.or_not_found("restore progress")?; + restore_phase.start(); let _ = crate::disk::main::import( &old_guid, - "/media/embassy/migrate", + "/media/startos/migrate", RepairStrategy::Preen, if guid.ends_with("_UNENC") { None @@ -473,9 +506,9 @@ async fn migrate( ) .await?; - let main_transfer_args = ("/media/embassy/migrate/main/", "/embassy-data/main/"); + let main_transfer_args = ("/media/startos/migrate/main/", "/embassy-data/main/"); let package_data_transfer_args = ( - "/media/embassy/migrate/package-data/", + "/media/startos/migrate/package-data/", "/embassy-data/package-data/", ); @@ -500,20 +533,12 @@ async fn migrate( res = async { loop { tokio::time::sleep(Duration::from_secs(1)).await; - *ctx.setup_status.write().await = Some(Ok(SetupStatus { - bytes_transferred: 0, - total_bytes: Some(main_transfer_size.load() + package_data_transfer_size.load()), - complete: false, - })); + restore_phase.set_total(main_transfer_size.load() + package_data_transfer_size.load()); } } => res, }; - *ctx.setup_status.write().await = Some(Ok(SetupStatus { - bytes_transferred: 0, - total_bytes: Some(size), - complete: false, - })); + restore_phase.set_total(size); let main_transfer_progress = Counter::new(0, ordering); let package_data_transfer_progress = Counter::new(0, ordering); @@ -529,18 +554,17 @@ async fn migrate( res = async { loop { tokio::time::sleep(Duration::from_secs(1)).await; - *ctx.setup_status.write().await = Some(Ok(SetupStatus { - bytes_transferred: main_transfer_progress.load() + package_data_transfer_progress.load(), - total_bytes: Some(size), - complete: false, - })); + restore_phase.set_done(main_transfer_progress.load() + package_data_transfer_progress.load()); } } => res, } - let (hostname, tor_addr, root_ca) = setup_init(&ctx, Some(start_os_password)).await?; + crate::disk::main::export(&old_guid, "/media/startos/migrate").await?; + restore_phase.complete(); - crate::disk::main::export(&old_guid, "/media/embassy/migrate").await?; + let (account, net_ctrl) = setup_init(&ctx, Some(start_os_password), init_phases).await?; - Ok((guid, hostname, tor_addr, root_ca)) + let rpc_ctx = RpcContext::init(&ctx.config, guid, Some(net_ctrl), rpc_ctx_phases).await?; + + Ok(((&account).try_into()?, rpc_ctx)) } diff --git a/core/startos/src/sound.rs b/core/startos/src/sound.rs index 8dc78357c..8cedd78ce 100644 --- a/core/startos/src/sound.rs +++ b/core/startos/src/sound.rs @@ -10,12 +10,12 @@ use crate::util::{FileLock, Invoke}; use crate::{Error, ErrorKind}; lazy_static::lazy_static! { - static ref SEMITONE_K: f64 = 2f64.powf(1f64 / 12f64); - static ref A_4: f64 = 440f64; - static ref C_0: f64 = *A_4 / SEMITONE_K.powf(9f64) / 2f64.powf(4f64); + static ref SEMITONE_K: f64 = 2f64.powf(1.0 / 12.0); + static ref A_4: f64 = 440.0; + static ref C_0: f64 = *A_4 / SEMITONE_K.powf(9.0) / 2_f64.powf(4.0); } -pub const SOUND_LOCK_FILE: &str = "/etc/embassy/sound.lock"; +pub const SOUND_LOCK_FILE: &str = "/run/startos/sound.lock"; struct SoundInterface { guard: Option, diff --git a/core/startos/src/ssh.rs b/core/startos/src/ssh.rs index 787d54056..b97fba3e4 100644 --- a/core/startos/src/ssh.rs +++ b/core/startos/src/ssh.rs @@ -5,7 +5,7 @@ use clap::builder::ValueParserFactory; use clap::Parser; use color_eyre::eyre::eyre; use imbl_value::InternedString; -use rpc_toolkit::{command, from_fn_async, AnyContext, Empty, HandlerExt, ParentHandler}; +use rpc_toolkit::{from_fn_async, Context, Empty, HandlerExt, ParentHandler}; use serde::{Deserialize, Serialize}; use tracing::instrument; use ts_rs::TS; @@ -13,6 +13,7 @@ use ts_rs::TS; use crate::context::{CliContext, RpcContext}; use crate::prelude::*; use crate::util::clap::FromStrParser; +use crate::util::io::create_file; use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat}; pub const SSH_AUTHORIZED_KEYS_FILE: &str = "/home/start9/.ssh/authorized_keys"; @@ -79,28 +80,28 @@ impl std::str::FromStr for SshPubKey { } // #[command(subcommands(add, delete, list,))] -pub fn ssh() -> ParentHandler { +pub fn ssh() -> ParentHandler { ParentHandler::new() .subcommand( "add", from_fn_async(add) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "delete", from_fn_async(delete) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "list", from_fn_async(list) .with_display_serializable() - .with_custom_display_fn::(|handle, result| { + .with_custom_display_fn(|handle, result| { Ok(display_all_ssh_keys(handle.params, result)) }) - .with_remote_cli::(), + .with_call_remote::(), ) } @@ -229,7 +230,7 @@ pub async fn sync_keys>(keys: &SshKeys, dest: P) -> Result<(), Er if tokio::fs::metadata(ssh_dir).await.is_err() { tokio::fs::create_dir_all(ssh_dir).await?; } - let mut f = tokio::fs::File::create(dest).await?; + let mut f = create_file(dest).await?; for key in keys.0.values() { f.write_all(key.0.to_key_format().as_bytes()).await?; f.write_all(b"\n").await?; diff --git a/core/startos/src/status/health_check.rs b/core/startos/src/status/health_check.rs index cd5616527..90b20f8c5 100644 --- a/core/startos/src/status/health_check.rs +++ b/core/startos/src/status/health_check.rs @@ -1,7 +1,12 @@ +use std::str::FromStr; + +use clap::builder::ValueParserFactory; pub use models::HealthCheckId; use serde::{Deserialize, Serialize}; use ts_rs::TS; +use crate::util::clap::FromStrParser; + #[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, TS)] #[serde(rename_all = "camelCase")] pub struct HealthCheckResult { @@ -9,6 +14,45 @@ pub struct HealthCheckResult { #[serde(flatten)] pub kind: HealthCheckResultKind, } +// healthCheckName:kind:message OR healthCheckName:kind +impl FromStr for HealthCheckResult { + type Err = color_eyre::eyre::Report; + fn from_str(s: &str) -> Result { + let from_parts = |name: &str, kind: &str, message: Option<&str>| { + let message = message.map(|x| x.to_string()); + let kind = match kind { + "success" => HealthCheckResultKind::Success { message }, + "disabled" => HealthCheckResultKind::Disabled { message }, + "starting" => HealthCheckResultKind::Starting { message }, + "loading" => HealthCheckResultKind::Loading { + message: message.unwrap_or_default(), + }, + "failure" => HealthCheckResultKind::Failure { + message: message.unwrap_or_default(), + }, + _ => return Err(color_eyre::eyre::eyre!("Invalid health check kind")), + }; + Ok(Self { + name: name.to_string(), + kind, + }) + }; + let parts = s.split(':').collect::>(); + match &*parts { + [name, kind, message] => from_parts(name, kind, Some(message)), + [name, kind] => from_parts(name, kind, None), + _ => Err(color_eyre::eyre::eyre!( + "Could not match the shape of the result ${parts:?}" + )), + } + } +} +impl ValueParserFactory for HealthCheckResult { + type Parser = FromStrParser; + fn value_parser() -> Self::Parser { + FromStrParser::new() + } +} #[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, TS)] #[serde(rename_all = "camelCase")] diff --git a/core/startos/src/status/mod.rs b/core/startos/src/status/mod.rs index 2faa90e79..c1d3a36ad 100644 --- a/core/startos/src/status/mod.rs +++ b/core/startos/src/status/mod.rs @@ -1,14 +1,13 @@ -use std::collections::BTreeMap; +use std::{collections::BTreeMap, sync::Arc}; use chrono::{DateTime, Utc}; use imbl::OrdMap; -use models::PackageId; use serde::{Deserialize, Serialize}; use ts_rs::TS; use self::health_check::HealthCheckId; -use crate::prelude::*; use crate::status::health_check::HealthCheckResult; +use crate::{prelude::*, util::GeneralGuard}; pub mod health_check; #[derive(Clone, Debug, Deserialize, Serialize, HasModel, TS)] @@ -18,23 +17,6 @@ pub mod health_check; pub struct Status { pub configured: bool, pub main: MainStatus, - #[serde(default)] - pub dependency_config_errors: DependencyConfigErrors, -} - -#[derive(Clone, Debug, Deserialize, Serialize, HasModel, Default, TS)] -#[model = "Model"] -#[ts(export)] -pub struct DependencyConfigErrors(pub BTreeMap); -impl Map for DependencyConfigErrors { - type Key = PackageId; - type Value = String; - fn key_str(key: &Self::Key) -> Result, Error> { - Ok(key) - } - fn key_string(key: &Self::Key) -> Result { - Ok(key.clone().into()) - } } #[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq, TS)] @@ -43,10 +25,8 @@ impl Map for DependencyConfigErrors { pub enum MainStatus { Stopped, Restarting, - #[serde(rename_all = "camelCase")] - Stopping { - timeout: crate::util::serde::Duration, - }, + Restoring, + Stopping, Starting, #[serde(rename_all = "camelCase")] Running { @@ -72,6 +52,7 @@ impl MainStatus { started: Some(_), .. } => true, MainStatus::Stopped + | MainStatus::Restoring | MainStatus::Stopping { .. } | MainStatus::Restarting | MainStatus::BackingUp { started: None, .. } => false, @@ -93,6 +74,7 @@ impl MainStatus { MainStatus::Running { started, .. } => Some(*started), MainStatus::BackingUp { started, .. } => *started, MainStatus::Stopped => None, + MainStatus::Restoring => None, MainStatus::Restarting => None, MainStatus::Stopping { .. } => None, MainStatus::Starting { .. } => None, @@ -102,11 +84,24 @@ impl MainStatus { let (started, health) = match self { MainStatus::Starting { .. } => (Some(Utc::now()), Default::default()), MainStatus::Running { started, health } => (Some(started.clone()), health.clone()), - MainStatus::Stopped | MainStatus::Stopping { .. } | MainStatus::Restarting => { - (None, Default::default()) - } + MainStatus::Stopped + | MainStatus::Stopping { .. } + | MainStatus::Restoring + | MainStatus::Restarting => (None, Default::default()), MainStatus::BackingUp { .. } => return self.clone(), }; MainStatus::BackingUp { started, health } } + + pub fn health(&self) -> Option<&OrdMap> { + match self { + MainStatus::Running { health, .. } => Some(health), + MainStatus::BackingUp { health, .. } => Some(health), + MainStatus::Stopped + | MainStatus::Restoring + | MainStatus::Stopping { .. } + | MainStatus::Restarting => None, + MainStatus::Starting { .. } => None, + } + } } diff --git a/core/startos/src/system.rs b/core/startos/src/system.rs index 1a851fd2e..7af94588b 100644 --- a/core/startos/src/system.rs +++ b/core/startos/src/system.rs @@ -5,8 +5,8 @@ use chrono::Utc; use clap::Parser; use color_eyre::eyre::eyre; use futures::FutureExt; -use rpc_toolkit::yajrc::RpcError; -use rpc_toolkit::{command, from_fn_async, AnyContext, Empty, HandlerExt, ParentHandler}; +use imbl::vector; +use rpc_toolkit::{from_fn_async, Context, Empty, HandlerExt, ParentHandler}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use tokio::process::Command; use tokio::sync::broadcast::Receiver; @@ -16,33 +16,31 @@ use ts_rs::TS; use crate::context::{CliContext, RpcContext}; use crate::disk::util::{get_available, get_used}; -use crate::logs::{ - cli_logs_generic_follow, cli_logs_generic_nofollow, fetch_logs, follow_logs, LogFollowResponse, - LogResponse, LogSource, -}; +use crate::logs::{LogSource, LogsParams, SYSTEM_UNIT}; use crate::prelude::*; +use crate::rpc_continuations::RpcContinuations; use crate::shutdown::Shutdown; use crate::util::cpupower::{get_available_governors, set_governor, Governor}; +use crate::util::io::open_file; use crate::util::serde::{display_serializable, HandlerExtSerde, WithIoFormat}; use crate::util::Invoke; -use crate::{Error, ErrorKind, ResultExt}; -pub fn experimental() -> ParentHandler { +pub fn experimental() -> ParentHandler { ParentHandler::new() .subcommand( "zram", from_fn_async(zram) .no_display() - .with_remote_cli::(), + .with_call_remote::(), ) .subcommand( "governor", from_fn_async(governor) .with_display_serializable() - .with_custom_display_fn::(|handle, result| { + .with_custom_display_fn(|handle, result| { Ok(display_governor_info(handle.params, result)) }) - .with_remote_cli::(), + .with_call_remote::(), ) } @@ -230,173 +228,13 @@ pub async fn time(ctx: RpcContext, _: Empty) -> Result { uptime: ctx.start_time.elapsed().as_secs(), }) } -#[derive(Deserialize, Serialize, Parser, TS)] -#[serde(rename_all = "camelCase")] -#[command(rename_all = "kebab-case")] -pub struct LogsParams { - #[arg(short = 'l', long = "limit")] - #[ts(type = "number | null")] - limit: Option, - #[arg(short = 'c', long = "cursor")] - cursor: Option, - #[arg(short = 'B', long = "before")] - #[serde(default)] - before: bool, - #[arg(short = 'f', long = "follow")] - #[serde(default)] - follow: bool, + +pub fn logs>() -> ParentHandler { + crate::logs::logs(|_: &C, _| async { Ok(LogSource::Unit(SYSTEM_UNIT)) }) } -pub fn logs() -> ParentHandler { - ParentHandler::new() - .root_handler( - from_fn_async(cli_logs) - .no_display() - .with_inherited(|params, _| params), - ) - .root_handler( - from_fn_async(logs_nofollow) - .with_inherited(|params, _| params) - .no_cli(), - ) - .subcommand( - "follow", - from_fn_async(logs_follow) - .with_inherited(|params, _| params) - .no_cli(), - ) -} - -pub async fn cli_logs( - ctx: CliContext, - _: Empty, - LogsParams { - limit, - cursor, - before, - follow, - }: LogsParams, -) -> Result<(), RpcError> { - if follow { - if cursor.is_some() { - return Err(RpcError::from(Error::new( - eyre!("The argument '--cursor ' cannot be used with '--follow'"), - crate::ErrorKind::InvalidRequest, - ))); - } - if before { - return Err(RpcError::from(Error::new( - eyre!("The argument '--before' cannot be used with '--follow'"), - crate::ErrorKind::InvalidRequest, - ))); - } - cli_logs_generic_follow(ctx, "server.logs.follow", None, limit).await - } else { - cli_logs_generic_nofollow(ctx, "server.logs", None, limit, cursor, before).await - } -} -pub async fn logs_nofollow( - _ctx: AnyContext, - _: Empty, - LogsParams { - limit, - cursor, - before, - .. - }: LogsParams, -) -> Result { - fetch_logs(LogSource::System, limit, cursor, before).await -} - -pub async fn logs_follow( - ctx: RpcContext, - _: Empty, - LogsParams { limit, .. }: LogsParams, -) -> Result { - follow_logs(ctx, LogSource::System, limit).await -} -#[derive(Deserialize, Serialize, Parser, TS)] -#[serde(rename_all = "camelCase")] -#[command(rename_all = "kebab-case")] -pub struct KernelLogsParams { - #[arg(short = 'l', long = "limit")] - #[ts(type = "number | null")] - limit: Option, - #[arg(short = 'c', long = "cursor")] - cursor: Option, - #[arg(short = 'B', long = "before")] - #[serde(default)] - before: bool, - #[arg(short = 'f', long = "follow")] - #[serde(default)] - follow: bool, -} -pub fn kernel_logs() -> ParentHandler { - ParentHandler::new() - .root_handler( - from_fn_async(cli_kernel_logs) - .no_display() - .with_inherited(|params, _| params), - ) - .root_handler( - from_fn_async(kernel_logs_nofollow) - .with_inherited(|params, _| params) - .no_cli(), - ) - .subcommand( - "follow", - from_fn_async(kernel_logs_follow) - .with_inherited(|params, _| params) - .no_cli(), - ) -} -pub async fn cli_kernel_logs( - ctx: CliContext, - _: Empty, - KernelLogsParams { - limit, - cursor, - before, - follow, - }: KernelLogsParams, -) -> Result<(), RpcError> { - if follow { - if cursor.is_some() { - return Err(RpcError::from(Error::new( - eyre!("The argument '--cursor ' cannot be used with '--follow'"), - crate::ErrorKind::InvalidRequest, - ))); - } - if before { - return Err(RpcError::from(Error::new( - eyre!("The argument '--before' cannot be used with '--follow'"), - crate::ErrorKind::InvalidRequest, - ))); - } - cli_logs_generic_follow(ctx, "server.kernel-logs.follow", None, limit).await - } else { - cli_logs_generic_nofollow(ctx, "server.kernel-logs", None, limit, cursor, before).await - } -} -pub async fn kernel_logs_nofollow( - _ctx: AnyContext, - _: Empty, - KernelLogsParams { - limit, - cursor, - before, - .. - }: KernelLogsParams, -) -> Result { - fetch_logs(LogSource::Kernel, limit, cursor, before).await -} - -pub async fn kernel_logs_follow( - ctx: RpcContext, - _: Empty, - KernelLogsParams { limit, .. }: KernelLogsParams, -) -> Result { - follow_logs(ctx, LogSource::Kernel, limit).await +pub fn kernel_logs>() -> ParentHandler { + crate::logs::logs(|_: &C, _| async { Ok(LogSource::Kernel) }) } #[derive(Serialize, Deserialize)] @@ -821,7 +659,7 @@ impl ProcStat { async fn get_proc_stat() -> Result { use tokio::io::AsyncBufReadExt; let mut cpu_line = String::new(); - let _n = tokio::io::BufReader::new(tokio::fs::File::open("/proc/stat").await?) + let _n = tokio::io::BufReader::new(open_file("/proc/stat").await?) .read_line(&mut cpu_line) .await?; let stats: Vec = cpu_line @@ -987,6 +825,51 @@ async fn get_disk_info() -> Result { }) } +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, Parser, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +pub struct SmtpValue { + #[arg(long)] + pub server: String, + #[arg(long)] + pub port: u16, + #[arg(long)] + pub from: String, + #[arg(long)] + pub login: String, + #[arg(long)] + pub password: Option, +} +pub async fn set_system_smtp(ctx: RpcContext, smtp: SmtpValue) -> Result<(), Error> { + let smtp = Some(smtp); + ctx.db + .mutate(|db| { + db.as_public_mut() + .as_server_info_mut() + .as_smtp_mut() + .ser(&smtp) + }) + .await?; + if let Some(callbacks) = ctx.callbacks.get_system_smtp() { + callbacks.call(vector![to_value(&smtp)?]).await?; + } + Ok(()) +} +pub async fn clear_system_smtp(ctx: RpcContext) -> Result<(), Error> { + ctx.db + .mutate(|db| { + db.as_public_mut() + .as_server_info_mut() + .as_smtp_mut() + .ser(&None) + }) + .await?; + if let Some(callbacks) = ctx.callbacks.get_system_smtp() { + callbacks.call(vector![Value::Null]).await?; + } + Ok(()) +} + #[tokio::test] #[ignore] pub async fn test_get_temp() { diff --git a/core/startos/src/update/latest_information.rs b/core/startos/src/update/latest_information.rs deleted file mode 100644 index e897dd40a..000000000 --- a/core/startos/src/update/latest_information.rs +++ /dev/null @@ -1,23 +0,0 @@ -use std::collections::HashMap; - -use emver::Version; -use serde::{Deserialize, Serialize}; -use serde_with::{serde_as, DisplayFromStr}; - -#[serde_as] -#[derive(Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -pub struct LatestInformation { - release_notes: HashMap, - headline: String, - #[serde_as(as = "DisplayFromStr")] - pub version: Version, -} - -/// Captured from https://beta-registry-0-3.start9labs.com/eos/latest 2021-09-24 -#[test] -fn latest_information_from_server() { - let data_from_server = r#"{"release-notes":{"0.3.0":"This major software release encapsulates the optimal performance, security, and management enhancments to the embassyOS experience."},"headline":"Major embassyOS release","version":"0.3.0"}"#; - let latest_information: LatestInformation = serde_json::from_str(data_from_server).unwrap(); - assert_eq!(latest_information.version.minor(), 3); -} diff --git a/core/startos/src/update/mod.rs b/core/startos/src/update/mod.rs index 05303182d..51d8d77ae 100644 --- a/core/startos/src/update/mod.rs +++ b/core/startos/src/update/mod.rs @@ -1,46 +1,67 @@ -use std::path::PathBuf; -use std::sync::atomic::{AtomicBool, Ordering}; +use std::collections::BTreeMap; +use std::path::Path; +use std::time::Duration; -use clap::Parser; +use clap::{ArgAction, Parser}; use color_eyre::eyre::{eyre, Result}; -use emver::Version; -use helpers::{Rsync, RsyncOptions}; -use lazy_static::lazy_static; +use exver::{Version, VersionRange}; +use futures::TryStreamExt; +use helpers::{AtomicFile, NonDetachingJoinHandle}; +use imbl_value::json; +use itertools::Itertools; +use patch_db::json_ptr::JsonPointer; use reqwest::Url; -use rpc_toolkit::command; +use rpc_toolkit::HandlerArgs; use serde::{Deserialize, Serialize}; use tokio::process::Command; -use tokio_stream::StreamExt; use tracing::instrument; use ts_rs::TS; -use crate::context::RpcContext; -use crate::db::model::public::UpdateProgress; +use crate::context::{CliContext, RpcContext}; use crate::disk::mount::filesystem::bind::Bind; -use crate::disk::mount::filesystem::ReadWrite; -use crate::disk::mount::guard::MountGuard; +use crate::disk::mount::filesystem::block_dev::BlockDev; +use crate::disk::mount::filesystem::efivarfs::{self, EfiVarFs}; +use crate::disk::mount::filesystem::overlayfs::OverlayGuard; +use crate::disk::mount::filesystem::MountType; +use crate::disk::mount::guard::{GenericMountGuard, MountGuard, TmpMountGuard}; use crate::notifications::{notify, NotificationLevel}; use crate::prelude::*; -use crate::registry::marketplace::with_query_params; +use crate::progress::{FullProgressTracker, PhaseProgressTrackerHandle, PhasedProgressBar}; +use crate::registry::asset::RegistryAsset; +use crate::registry::context::{RegistryContext, RegistryUrlParams}; +use crate::registry::os::index::OsVersionInfo; +use crate::registry::os::SIG_CONTEXT; +use crate::registry::signer::commitment::blake3::Blake3Commitment; +use crate::registry::signer::commitment::Commitment; +use crate::rpc_continuations::{Guid, RpcContinuation}; +use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile; use crate::sound::{ CIRCLE_OF_5THS_SHORT, UPDATE_FAILED_1, UPDATE_FAILED_2, UPDATE_FAILED_3, UPDATE_FAILED_4, }; -use crate::update::latest_information::LatestInformation; +use crate::util::net::WebSocketExt; use crate::util::Invoke; -use crate::{Error, ErrorKind, ResultExt, PLATFORM}; - -mod latest_information; - -lazy_static! { - static ref UPDATED: AtomicBool = AtomicBool::new(false); -} +use crate::PLATFORM; #[derive(Deserialize, Serialize, Parser, TS)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] pub struct UpdateSystemParams { #[ts(type = "string")] - marketplace_url: Url, + registry: Url, + #[ts(type = "string | null")] + #[arg(long = "to")] + target: Option, + #[arg(long = "no-progress", action = ArgAction::SetFalse)] + #[serde(default)] + progress: bool, +} + +#[derive(Deserialize, Serialize, TS)] +pub struct UpdateSystemRes { + #[ts(type = "string | null")] + target: Option, + #[ts(type = "string | null")] + progress: Option, } /// An user/ daemon would call this to update the system to the latest version and do the updates available, @@ -48,16 +69,134 @@ pub struct UpdateSystemParams { #[instrument(skip_all)] pub async fn update_system( ctx: RpcContext, - UpdateSystemParams { marketplace_url }: UpdateSystemParams, -) -> Result { - if UPDATED.load(Ordering::SeqCst) { - return Ok(UpdateResult::NoUpdates); + UpdateSystemParams { + target, + registry, + progress, + }: UpdateSystemParams, +) -> Result { + if ctx + .db + .peek() + .await + .into_public() + .into_server_info() + .into_status_info() + .into_updated() + .de()? + { + return Err(Error::new(eyre!("Server was already updated. Please restart your device before attempting to update again."), ErrorKind::InvalidRequest)); } - Ok(if maybe_do_update(ctx, marketplace_url).await?.is_some() { - UpdateResult::Updating + let target = + maybe_do_update(ctx.clone(), registry, target.unwrap_or(VersionRange::Any)).await?; + let progress = if progress && target.is_some() { + let guid = Guid::new(); + ctx.clone() + .rpc_continuations + .add( + guid.clone(), + RpcContinuation::ws( + |mut ws| async move { + if let Err(e) = async { + let mut sub = ctx + .db + .subscribe( + "/public/serverInfo/statusInfo/updateProgress" + .parse::() + .with_kind(ErrorKind::Database)?, + ) + .await; + while { + let progress = ctx + .db + .peek() + .await + .into_public() + .into_server_info() + .into_status_info() + .into_update_progress() + .de()?; + ws.send(axum::extract::ws::Message::Text( + serde_json::to_string(&progress) + .with_kind(ErrorKind::Serialization)?, + )) + .await + .with_kind(ErrorKind::Network)?; + progress.is_some() + } { + sub.recv().await; + } + + ws.normal_close("complete").await?; + + Ok::<_, Error>(()) + } + .await + { + tracing::error!("Error returning progress of update: {e}"); + tracing::debug!("{e:?}") + } + }, + Duration::from_secs(30), + ), + ) + .await; + Some(guid) } else { - UpdateResult::NoUpdates - }) + None + }; + Ok(UpdateSystemRes { target, progress }) +} + +pub async fn cli_update_system( + HandlerArgs { + context, + parent_method, + method, + raw_params, + .. + }: HandlerArgs, +) -> Result<(), Error> { + let res = from_value::( + context + .call_remote::( + &parent_method.into_iter().chain(method).join("."), + raw_params, + ) + .await?, + )?; + match res.target { + None => println!("No updates available"), + Some(v) => { + if let Some(progress) = res.progress { + let mut ws = context.ws_continuation(progress).await?; + let mut progress = PhasedProgressBar::new(&format!("Updating to v{v}...")); + let mut prev = None; + while let Some(msg) = ws.try_next().await.with_kind(ErrorKind::Network)? { + if let tokio_tungstenite::tungstenite::Message::Text(msg) = msg { + if let Some(snap) = + serde_json::from_str(&msg).with_kind(ErrorKind::Deserialization)? + { + progress.update(&snap); + prev = Some(snap); + } else { + break; + } + } + } + if let Some(mut prev) = prev { + for phase in &mut prev.phases { + phase.progress.complete(); + } + prev.overall.complete(); + progress.update(&prev); + } + } else { + println!("Updating to v{v}...") + } + } + } + Ok(()) } /// What is the status of the updates? @@ -80,30 +219,49 @@ pub fn display_update_result(_: UpdateSystemParams, status: UpdateResult) { } #[instrument(skip_all)] -async fn maybe_do_update(ctx: RpcContext, marketplace_url: Url) -> Result, Error> { +async fn maybe_do_update( + ctx: RpcContext, + registry: Url, + target: VersionRange, +) -> Result, Error> { let peeked = ctx.db.peek().await; - let latest_version: Version = ctx - .client - .get(with_query_params( - ctx.clone(), - format!("{}/eos/v0/latest", marketplace_url,).parse()?, - )) - .send() - .await - .with_kind(ErrorKind::Network)? - .json::() - .await - .with_kind(ErrorKind::Network)? - .version; let current_version = peeked.as_public().as_server_info().as_version().de()?; - if latest_version < *current_version { + let mut available = from_value::>( + ctx.call_remote_with::( + "os.version.get", + json!({ + "source": current_version, + "target": target, + }), + RegistryUrlParams { registry }, + ) + .await?, + )?; + let Some((target_version, asset)) = available + .pop_last() + .and_then(|(v, mut info)| info.squashfs.remove(&**PLATFORM).map(|a| (v, a))) + else { return Ok(None); + }; + if !target_version.satisfies(&target) { + return Err(Error::new( + eyre!("got back version from registry that does not satisfy {target}"), + ErrorKind::Registry, + )); } - let eos_url = EosUrl { - base: marketplace_url, - version: latest_version, - }; + asset.validate(SIG_CONTEXT, asset.all_signers())?; + + let progress = FullProgressTracker::new(); + let prune_phase = progress.add_phase("Pruning Old OS Images".into(), Some(2)); + let mut download_phase = progress.add_phase("Downloading File".into(), Some(100)); + download_phase.set_total(asset.commitment.size); + let reverify_phase = progress.add_phase("Reverifying File".into(), Some(10)); + let sync_boot_phase = progress.add_phase("Syncing Boot Files".into(), Some(1)); + let finalize_phase = progress.add_phase("Finalizing Update".into(), Some(1)); + + let start_progress = progress.snapshot(); + let status = ctx .db .mutate(|db| { @@ -115,10 +273,7 @@ async fn maybe_do_update(ctx: RpcContext, marketplace_url: Url) -> Result