diff --git a/.claude/settings.json b/.claude/settings.json index ce5d2734a..0967ef424 100644 --- a/.claude/settings.json +++ b/.claude/settings.json @@ -1,5 +1 @@ -{ - "attribution": { - "commit": "" - } -} +{} diff --git a/.github/workflows/startos-iso.yaml b/.github/workflows/startos-iso.yaml index be64862ce..40dec852b 100644 --- a/.github/workflows/startos-iso.yaml +++ b/.github/workflows/startos-iso.yaml @@ -25,10 +25,13 @@ on: - ALL - x86_64 - x86_64-nonfree + - x86_64-nvidia - aarch64 - aarch64-nonfree + - aarch64-nvidia # - raspberrypi - riscv64 + - riscv64-nonfree deploy: type: choice description: Deploy @@ -65,10 +68,13 @@ jobs: fromJson('{ "x86_64": ["x86_64"], "x86_64-nonfree": ["x86_64"], + "x86_64-nvidia": ["x86_64"], "aarch64": ["aarch64"], "aarch64-nonfree": ["aarch64"], + "aarch64-nvidia": ["aarch64"], "raspberrypi": ["aarch64"], "riscv64": ["riscv64"], + "riscv64-nonfree": ["riscv64"], "ALL": ["x86_64", "aarch64", "riscv64"] }')[github.event.inputs.platform || 'ALL'] }} @@ -125,7 +131,7 @@ jobs: format( '[ ["{0}"], - ["x86_64", "x86_64-nonfree", "aarch64", "aarch64-nonfree", "riscv64"] + ["x86_64", "x86_64-nonfree", "x86_64-nvidia", "aarch64", "aarch64-nonfree", "aarch64-nvidia", "riscv64", "riscv64-nonfree"] ]', github.event.inputs.platform || 'ALL' ) @@ -139,18 +145,24 @@ jobs: fromJson('{ "x86_64": "ubuntu-latest", "x86_64-nonfree": "ubuntu-latest", + "x86_64-nvidia": "ubuntu-latest", "aarch64": "ubuntu-24.04-arm", "aarch64-nonfree": "ubuntu-24.04-arm", + "aarch64-nvidia": "ubuntu-24.04-arm", "raspberrypi": "ubuntu-24.04-arm", "riscv64": "ubuntu-24.04-arm", + "riscv64-nonfree": "ubuntu-24.04-arm", }')[matrix.platform], fromJson('{ "x86_64": "buildjet-8vcpu-ubuntu-2204", "x86_64-nonfree": "buildjet-8vcpu-ubuntu-2204", + "x86_64-nvidia": "buildjet-8vcpu-ubuntu-2204", "aarch64": "buildjet-8vcpu-ubuntu-2204-arm", "aarch64-nonfree": "buildjet-8vcpu-ubuntu-2204-arm", + "aarch64-nvidia": "buildjet-8vcpu-ubuntu-2204-arm", "raspberrypi": "buildjet-8vcpu-ubuntu-2204-arm", "riscv64": "buildjet-8vcpu-ubuntu-2204", + "riscv64-nonfree": "buildjet-8vcpu-ubuntu-2204", }')[matrix.platform] ) )[github.event.inputs.runner == 'fast'] @@ -161,10 +173,13 @@ jobs: fromJson('{ "x86_64": "x86_64", "x86_64-nonfree": "x86_64", + "x86_64-nvidia": "x86_64", "aarch64": "aarch64", "aarch64-nonfree": "aarch64", + "aarch64-nvidia": "aarch64", "raspberrypi": "aarch64", "riscv64": "riscv64", + "riscv64-nonfree": "riscv64", }')[matrix.platform] }} steps: diff --git a/.gitignore b/.gitignore index 4207eb792..10d8b5424 100644 --- a/.gitignore +++ b/.gitignore @@ -21,4 +21,4 @@ secrets.db /build/lib/firmware tmp web/.i18n-checked -agents/USER.md +docs/USER.md diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md new file mode 100644 index 000000000..967978a54 --- /dev/null +++ b/ARCHITECTURE.md @@ -0,0 +1,101 @@ +# Architecture + +StartOS is an open-source Linux distribution for running personal servers. It manages discovery, installation, network configuration, backups, and health monitoring of self-hosted services. + +## Tech Stack + +- Backend: Rust (async/Tokio, Axum web framework) +- Frontend: Angular 20 + TypeScript + TaigaUI +- Container runtime: Node.js/TypeScript with LXC +- Database/State: Patch-DB (git submodule) - storage layer with reactive frontend sync +- API: JSON-RPC via rpc-toolkit (see `core/rpc-toolkit.md`) +- Auth: Password + session cookie, public/private key signatures, local authcookie (see `core/src/middleware/auth/`) + +## Project Structure + +```bash +/ +├── assets/ # Screenshots for README +├── build/ # Auxiliary files and scripts for deployed images +├── container-runtime/ # Node.js program managing package containers +├── core/ # Rust backend: API, daemon (startd), CLI (start-cli) +├── debian/ # Debian package maintainer scripts +├── image-recipe/ # Scripts for building StartOS images +├── patch-db/ # (submodule) Diff-based data store for frontend sync +├── sdk/ # TypeScript SDK for building StartOS packages +└── web/ # Web UIs (Angular) +``` + +## Components + +- **`core/`** — Rust backend daemon. Produces a single binary `startbox` that is symlinked as `startd` (main daemon), `start-cli` (CLI), `start-container` (runs inside LXC containers), `registrybox` (package registry), and `tunnelbox` (VPN/tunnel). Handles all backend logic: RPC API, service lifecycle, networking (DNS, ACME, WiFi, Tor, WireGuard), backups, and database state management. See [core/ARCHITECTURE.md](core/ARCHITECTURE.md). + +- **`web/`** — Angular 20 + TypeScript workspace using Taiga UI. Contains three applications (admin UI, setup wizard, VPN management) and two shared libraries (common components/services, marketplace). Communicates with the backend exclusively via JSON-RPC. See [web/ARCHITECTURE.md](web/ARCHITECTURE.md). + +- **`container-runtime/`** — Node.js runtime that runs inside each service's LXC container. Loads the service's JavaScript from its S9PK package and manages subcontainers. Communicates with the host daemon via JSON-RPC over Unix socket. See [container-runtime/CLAUDE.md](container-runtime/CLAUDE.md). + +- **`sdk/`** — TypeScript SDK for packaging services for StartOS (`@start9labs/start-sdk`). Split into `base/` (core types, ABI definitions, effects interface, consumed by web as `@start9labs/start-sdk-base`) and `package/` (full SDK for service developers, consumed by container-runtime as `@start9labs/start-sdk`). + +- **`patch-db/`** — Git submodule providing diff-based state synchronization. Uses CBOR encoding. Backend mutations produce diffs that are pushed to the frontend via WebSocket, enabling reactive UI updates without polling. See [patch-db repo](https://github.com/Start9Labs/patch-db). + +## Build Pipeline + +Components have a strict dependency chain. Changes flow in one direction: + +``` +Rust (core/) + → cargo test exports ts-rs types to core/bindings/ + → rsync copies to sdk/base/lib/osBindings/ + → SDK build produces baseDist/ and dist/ + → web/ consumes baseDist/ (via @start9labs/start-sdk-base) + → container-runtime/ consumes dist/ (via @start9labs/start-sdk) +``` + +Key make targets along this chain: + +| Step | Command | What it does | +|---|---|---| +| 1 | `cargo check -p start-os` | Verify Rust compiles | +| 2 | `make ts-bindings` | Export ts-rs types → rsync to SDK | +| 3 | `cd sdk && make baseDist dist` | Build SDK packages | +| 4 | `cd web && npm run check` | Type-check Angular projects | +| 5 | `cd container-runtime && npm run check` | Type-check runtime | + +**Important**: Editing `sdk/base/lib/osBindings/*.ts` alone is NOT sufficient — you must rebuild the SDK bundle (step 3) before web/container-runtime can see the changes. + +## Cross-Layer Verification + +When making changes across multiple layers (Rust, SDK, web, container-runtime), verify in this order: + +1. **Rust**: `cargo check -p start-os` — verifies core compiles +2. **TS bindings**: `make ts-bindings` — regenerates TypeScript types from Rust `#[ts(export)]` structs + - Runs `./core/build/build-ts.sh` to export ts-rs types to `core/bindings/` + - Syncs `core/bindings/` → `sdk/base/lib/osBindings/` via rsync + - If you manually edit files in `sdk/base/lib/osBindings/`, you must still rebuild the SDK (step 3) +3. **SDK bundle**: `cd sdk && make baseDist dist` — compiles SDK source into packages + - `baseDist/` is consumed by `/web` (via `@start9labs/start-sdk-base`) + - `dist/` is consumed by `/container-runtime` (via `@start9labs/start-sdk`) + - Web and container-runtime reference the **built** SDK, not source files +4. **Web type check**: `cd web && npm run check` — type-checks all Angular projects +5. **Container runtime type check**: `cd container-runtime && npm run check` — type-checks the runtime + +## Data Flow: Backend to Frontend + +StartOS uses Patch-DB for reactive state synchronization: + +1. The backend mutates state via `db.mutate()`, producing CBOR diffs +2. Diffs are pushed to the frontend over a persistent WebSocket connection +3. The frontend applies diffs to its local state copy and notifies observers +4. Components watch specific database paths via `PatchDB.watch$()`, receiving updates reactively + +This means the UI is always eventually consistent with the backend — after any mutating API call, the frontend waits for the corresponding PatchDB diff before resolving, so the UI reflects the result immediately. + +## Further Reading + +- [core/ARCHITECTURE.md](core/ARCHITECTURE.md) — Rust backend architecture +- [web/ARCHITECTURE.md](web/ARCHITECTURE.md) — Angular frontend architecture +- [container-runtime/CLAUDE.md](container-runtime/CLAUDE.md) — Container runtime details +- [core/rpc-toolkit.md](core/rpc-toolkit.md) — JSON-RPC handler patterns +- [core/s9pk-structure.md](core/s9pk-structure.md) — S9PK package format +- [docs/exver.md](docs/exver.md) — Extended versioning format +- [docs/VERSION_BUMP.md](docs/VERSION_BUMP.md) — Version bumping guide diff --git a/CLAUDE.md b/CLAUDE.md index 22d94db31..7464695cf 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -2,142 +2,55 @@ This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. -## Project Overview +## Architecture -StartOS is an open-source Linux distribution for running personal servers. It manages discovery, installation, network configuration, backups, and health monitoring of self-hosted services. +See [ARCHITECTURE.md](ARCHITECTURE.md) for the full system architecture, component map, build pipeline, and cross-layer verification order. -**Tech Stack:** -- Backend: Rust (async/Tokio, Axum web framework) -- Frontend: Angular 20 + TypeScript + TaigaUI -- Container runtime: Node.js/TypeScript with LXC -- Database/State: Patch-DB (git submodule) - storage layer with reactive frontend sync -- API: JSON-RPC via rpc-toolkit (see `agents/rpc-toolkit.md`) -- Auth: Password + session cookie, public/private key signatures, local authcookie (see `core/src/middleware/auth/`) +Each major component has its own `CLAUDE.md` with detailed guidance: `core/`, `web/`, `container-runtime/`, `sdk/`. ## Build & Development See [CONTRIBUTING.md](CONTRIBUTING.md) for: + - Environment setup and requirements - Build commands and make targets - Testing and formatting commands - Environment variables **Quick reference:** + ```bash . ./devmode.sh # Enable dev mode make update-startbox REMOTE=start9@ # Fastest iteration (binary + UI) make test-core # Run Rust tests ``` -## Architecture +## Operating Rules -### Core (`/core`) -The Rust backend daemon. Main binaries: -- `startbox` - Main daemon (runs as `startd`) -- `start-cli` - CLI interface -- `start-container` - Runs inside LXC containers; communicates with host and manages subcontainers -- `registrybox` - Registry daemon -- `tunnelbox` - VPN/tunnel daemon - -**Key modules:** -- `src/context/` - Context types (RpcContext, CliContext, InitContext, DiagnosticContext) -- `src/service/` - Service lifecycle management with actor pattern (`service_actor.rs`) -- `src/db/model/` - Patch-DB models (`public.rs` synced to frontend, `private.rs` backend-only) -- `src/net/` - Networking (DNS, ACME, WiFi, Tor via Arti, WireGuard) -- `src/s9pk/` - S9PK package format (merkle archive) -- `src/registry/` - Package registry management - -**RPC Pattern:** See `agents/rpc-toolkit.md` - -### Web (`/web`) -Angular projects sharing common code: -- `projects/ui/` - Main admin interface -- `projects/setup-wizard/` - Initial setup -- `projects/start-tunnel/` - VPN management UI -- `projects/shared/` - Common library (API clients, components) -- `projects/marketplace/` - Service discovery - -**Development:** -```bash -cd web -npm ci -npm run start:ui # Dev server with mocks -npm run build:ui # Production build -npm run check # Type check all projects -``` - -### Container Runtime (`/container-runtime`) -Node.js runtime that manages service containers via RPC. See `RPCSpec.md` for protocol. - -**Container Architecture:** -``` -LXC Container (uniform base for all services) -└── systemd - └── container-runtime.service - └── Loads /usr/lib/startos/package/index.js (from s9pk javascript.squashfs) - └── Package JS launches subcontainers (from images in s9pk) -``` - -The container runtime communicates with the host via JSON-RPC over Unix socket. Package JavaScript must export functions conforming to the `ABI` type defined in `sdk/base/lib/types.ts`. - -**`/media/startos/` directory (mounted by host into container):** - -| Path | Description | -|------|-------------| -| `volumes//` | Package data volumes (id-mapped, persistent) | -| `assets/` | Read-only assets from s9pk `assets.squashfs` | -| `images//` | Container images (squashfs, used for subcontainers) | -| `images/.env` | Environment variables for image | -| `images/.json` | Image metadata | -| `backup/` | Backup mount point (mounted during backup operations) | -| `rpc/service.sock` | RPC socket (container runtime listens here) | -| `rpc/host.sock` | Host RPC socket (for effects callbacks to host) | - -**S9PK Structure:** See `agents/s9pk-structure.md` - -### SDK (`/sdk`) -TypeScript SDK for packaging services (`@start9labs/start-sdk`). - -- `base/` - Core types, ABI definitions, effects interface (`@start9labs/start-sdk-base`) -- `package/` - Full SDK for package developers, re-exports base - -### Patch-DB (`/patch-db`) -Git submodule providing diff-based state synchronization. Changes to `db/model/public.rs` automatically sync to the frontend. - -**Key patterns:** -- `db.peek().await` - Get a read-only snapshot of the database state -- `db.mutate(|db| { ... }).await` - Apply mutations atomically, returns `MutateResult` -- `#[derive(HasModel)]` - Derive macro for types stored in the database, generates typed accessors - -**Generated accessor types** (from `HasModel` derive): -- `as_field()` - Immutable reference: `&Model` -- `as_field_mut()` - Mutable reference: `&mut Model` -- `into_field()` - Owned value: `Model` - -**`Model` APIs** (from `db/prelude.rs`): -- `.de()` - Deserialize to `T` -- `.ser(&value)` - Serialize from `T` -- `.mutate(|v| ...)` - Deserialize, mutate, reserialize -- For maps: `.keys()`, `.as_idx(&key)`, `.as_idx_mut(&key)`, `.insert()`, `.remove()`, `.contains_key()` +- Always verify cross-layer changes using the order described in [ARCHITECTURE.md](ARCHITECTURE.md#cross-layer-verification) +- Check component-level CLAUDE.md files for component-specific conventions. ALWAYS read it before operating on that component. +- Follow existing patterns before inventing new ones +- Always use `make` recipes when they exist for testing builds rather than manually invoking build commands ## Supplementary Documentation -The `agents/` directory contains detailed documentation for AI assistants: +The `docs/` directory contains cross-cutting documentation for AI assistants: - `TODO.md` - Pending tasks for AI agents (check this first, remove items when completed) - `USER.md` - Current user identifier (gitignored, see below) -- `rpc-toolkit.md` - JSON-RPC patterns and handler configuration -- `core-rust-patterns.md` - Common utilities and patterns for Rust code in `/core` (guard pattern, mount guards, etc.) -- `s9pk-structure.md` - S9PK package format structure -- `i18n-patterns.md` - Internationalization key conventions and usage in `/core` +- `exver.md` - Extended versioning format (used across core, sdk, and web) +- `VERSION_BUMP.md` - Guide for bumping the StartOS version across the codebase + +Component-specific docs live alongside their code (e.g., `core/rpc-toolkit.md`, `core/i18n-patterns.md`). ### Session Startup On startup: -1. **Check for `agents/USER.md`** - If it doesn't exist, prompt the user for their name/identifier and create it. This file is gitignored since it varies per developer. +1. **Check for `docs/USER.md`** - If it doesn't exist, prompt the user for their name/identifier and create it. This file is gitignored since it varies per developer. + +2. **Check `docs/TODO.md` for relevant tasks** - Show TODOs that either: -2. **Check `agents/TODO.md` for relevant tasks** - Show TODOs that either: - Have no `@username` tag (relevant to everyone) - Are tagged with the current user's identifier diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index f8bafdff4..739568e40 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,37 +1,45 @@ # Contributing to StartOS -This guide is for contributing to the StartOS. If you are interested in packaging a service for StartOS, visit the [service packaging guide](https://docs.start9.com/latest/packaging-guide/). If you are interested in promoting, providing technical support, creating tutorials, or helping in other ways, please visit the [Start9 website](https://start9.com/contribute). +This guide is for contributing to the StartOS. If you are interested in packaging a service for StartOS, visit the [service packaging guide](https://github.com/Start9Labs/ai-service-packaging). If you are interested in promoting, providing technical support, creating tutorials, or helping in other ways, please visit the [Start9 website](https://start9.com/contribute). ## Collaboration -- [Matrix](https://matrix.to/#/#community-dev:matrix.start9labs.com) -- [Telegram](https://t.me/start9_labs/47471) +- [Matrix](https://matrix.to/#/#dev-startos:matrix.start9labs.com) -## Project Structure - -```bash -/ -├── assets/ # Screenshots for README -├── build/ # Auxiliary files and scripts for deployed images -├── container-runtime/ # Node.js program managing package containers -├── core/ # Rust backend: API, daemon (startd), CLI (start-cli) -├── debian/ # Debian package maintainer scripts -├── image-recipe/ # Scripts for building StartOS images -├── patch-db/ # (submodule) Diff-based data store for frontend sync -├── sdk/ # TypeScript SDK for building StartOS packages -└── web/ # Web UIs (Angular) -``` - -See component READMEs for details: -- [`core`](core/README.md) -- [`web`](web/README.md) -- [`build`](build/README.md) -- [`patch-db`](https://github.com/Start9Labs/patch-db) +For project structure and system architecture, see [ARCHITECTURE.md](ARCHITECTURE.md). ## Environment Setup +### Installing Dependencies (Debian/Ubuntu) + +> Debian/Ubuntu is the only officially supported build environment. +> MacOS has limited build capabilities and Windows requires [WSL2](https://learn.microsoft.com/en-us/windows/wsl/install). + ```sh -git clone https://github.com/Start9Labs/start-os.git --recurse-submodules +sudo apt update +sudo apt install -y ca-certificates curl gpg build-essential +curl -fsSL https://download.docker.com/linux/debian/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg +echo "deb [arch=$(dpkg-architecture -q DEB_HOST_ARCH) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian bookworm stable" | sudo tee /etc/apt/sources.list.d/docker.list +sudo apt update +sudo apt install -y sed grep gawk jq gzip brotli containerd.io docker-ce docker-ce-cli docker-compose-plugin qemu-user-static binfmt-support squashfs-tools git debspawn rsync b3sum +sudo mkdir -p /etc/debspawn/ +echo "AllowUnsafePermissions=true" | sudo tee /etc/debspawn/global.toml +sudo usermod -aG docker $USER +sudo su $USER +docker run --privileged --rm tonistiigi/binfmt --install all +docker buildx create --use +curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh # proceed with default installation +curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/master/install.sh | bash +source ~/.bashrc +nvm install 24 +nvm use 24 +nvm alias default 24 # this prevents your machine from reverting back to another version +``` + +### Cloning the Repository + +```sh +git clone --recursive https://github.com/Start9Labs/start-os.git --branch next/major cd start-os ``` @@ -63,19 +71,21 @@ This project uses [GNU Make](https://www.gnu.org/software/make/) to build its co ### Environment Variables -| Variable | Description | -|----------|-------------| -| `PLATFORM` | Target platform: `x86_64`, `x86_64-nonfree`, `aarch64`, `aarch64-nonfree`, `riscv64`, `raspberrypi` | -| `ENVIRONMENT` | Hyphen-separated feature flags (see below) | -| `PROFILE` | Build profile: `release` (default) or `dev` | -| `GIT_BRANCH_AS_HASH` | Set to `1` to use git branch name as version hash (avoids rebuilds) | +| Variable | Description | +| -------------------- | --------------------------------------------------------------------------------------------------- | +| `PLATFORM` | Target platform: `x86_64`, `x86_64-nonfree`, `aarch64`, `aarch64-nonfree`, `riscv64`, `raspberrypi` | +| `ENVIRONMENT` | Hyphen-separated feature flags (see below) | +| `PROFILE` | Build profile: `release` (default) or `dev` | +| `GIT_BRANCH_AS_HASH` | Set to `1` to use git branch name as version hash (avoids rebuilds) | **ENVIRONMENT flags:** + - `dev` - Enables password SSH before setup, skips frontend compression - `unstable` - Enables assertions and debugging with performance penalty - `console` - Enables tokio-console for async debugging **Platform notes:** + - `-nonfree` variants include proprietary firmware and drivers - `raspberrypi` includes non-free components by necessity - Platform is remembered between builds if not specified @@ -84,46 +94,72 @@ This project uses [GNU Make](https://www.gnu.org/software/make/) to build its co #### Building -| Target | Description | -|--------|-------------| -| `iso` | Create full `.iso` image (not for raspberrypi) | -| `img` | Create full `.img` image (raspberrypi only) | -| `deb` | Build Debian package | -| `all` | Build all Rust binaries | -| `uis` | Build all web UIs | -| `ui` | Build main UI only | -| `ts-bindings` | Generate TypeScript bindings from Rust types | +| Target | Description | +| ------------- | ---------------------------------------------- | +| `iso` | Create full `.iso` image (not for raspberrypi) | +| `img` | Create full `.img` image (raspberrypi only) | +| `deb` | Build Debian package | +| `all` | Build all Rust binaries | +| `uis` | Build all web UIs | +| `ui` | Build main UI only | +| `ts-bindings` | Generate TypeScript bindings from Rust types | #### Deploying to Device For devices on the same network: -| Target | Description | -|--------|-------------| -| `update-startbox REMOTE=start9@` | Deploy binary + UI only (fastest) | -| `update-deb REMOTE=start9@` | Deploy full Debian package | -| `update REMOTE=start9@` | OTA-style update | -| `reflash REMOTE=start9@` | Reflash as if using live ISO | -| `update-overlay REMOTE=start9@` | Deploy to in-memory overlay (reverts on reboot) | +| Target | Description | +| ------------------------------------ | ----------------------------------------------- | +| `update-startbox REMOTE=start9@` | Deploy binary + UI only (fastest) | +| `update-deb REMOTE=start9@` | Deploy full Debian package | +| `update REMOTE=start9@` | OTA-style update | +| `reflash REMOTE=start9@` | Reflash as if using live ISO | +| `update-overlay REMOTE=start9@` | Deploy to in-memory overlay (reverts on reboot) | For devices on different networks (uses [magic-wormhole](https://github.com/magic-wormhole/magic-wormhole)): -| Target | Description | -|--------|-------------| -| `wormhole` | Send startbox binary | -| `wormhole-deb` | Send Debian package | -| `wormhole-squashfs` | Send squashfs image | +| Target | Description | +| ------------------- | -------------------- | +| `wormhole` | Send startbox binary | +| `wormhole-deb` | Send Debian package | +| `wormhole-squashfs` | Send squashfs image | + +### Creating a VM + +Install virt-manager: + +```sh +sudo apt update +sudo apt install -y virt-manager +sudo usermod -aG libvirt $USER +sudo su $USER +virt-manager +``` + +Follow the screenshot walkthrough in [`assets/create-vm/`](assets/create-vm/) to create a new virtual machine. Key steps: + +1. Create a new virtual machine +2. Browse for the ISO — create a storage pool pointing to your `results/` directory +3. Select "Generic or unknown OS" +4. Set memory and CPUs +5. Create a disk and name the VM + +Build an ISO first: + +```sh +PLATFORM=$(uname -m) ENVIRONMENT=dev make iso +``` #### Other -| Target | Description | -|--------|-------------| -| `format` | Run code formatting (Rust nightly required) | -| `test` | Run all automated tests | -| `test-core` | Run Rust tests | -| `test-sdk` | Run SDK tests | -| `test-container-runtime` | Run container runtime tests | -| `clean` | Delete all compiled artifacts | +| Target | Description | +| ------------------------ | ------------------------------------------- | +| `format` | Run code formatting (Rust nightly required) | +| `test` | Run all automated tests | +| `test-core` | Run Rust tests | +| `test-sdk` | Run SDK tests | +| `test-container-runtime` | Run container runtime tests | +| `clean` | Delete all compiled artifacts | ## Testing @@ -156,15 +192,18 @@ Run the formatters before committing. Configuration is handled by `rustfmt.toml` ### Documentation & Comments **Rust:** + - Add doc comments (`///`) to public APIs, structs, and non-obvious functions - Use `//` comments sparingly for complex logic that isn't self-evident - Prefer self-documenting code (clear naming, small functions) over comments **TypeScript:** + - Document exported functions and complex types with JSDoc - Keep comments focused on "why" rather than "what" **General:** + - Don't add comments that just restate the code - Update or remove comments when code changes - TODOs should include context: `// TODO(username): reason` @@ -182,6 +221,7 @@ Use [Conventional Commits](https://www.conventionalcommits.org/): ``` **Types:** + - `feat` - New feature - `fix` - Bug fix - `docs` - Documentation only @@ -191,10 +231,10 @@ Use [Conventional Commits](https://www.conventionalcommits.org/): - `chore` - Build process, dependencies, etc. **Examples:** + ``` feat(web): add dark mode toggle fix(core): resolve race condition in service startup docs: update CONTRIBUTING.md with style guidelines refactor(sdk): simplify package validation logic ``` - diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md deleted file mode 100644 index b56d1756a..000000000 --- a/DEVELOPMENT.md +++ /dev/null @@ -1,134 +0,0 @@ -# Setting up your development environment on Debian/Ubuntu - -A step-by-step guide - -> This is the only officially supported build environment. -> MacOS has limited build capabilities and Windows requires [WSL2](https://learn.microsoft.com/en-us/windows/wsl/install) - -## Installing dependencies - -Run the following commands one at a time - -```sh -sudo apt update -sudo apt install -y ca-certificates curl gpg build-essential -curl -fsSL https://download.docker.com/linux/debian/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg -echo "deb [arch=$(dpkg-architecture -q DEB_HOST_ARCH) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/debian bookworm stable" | sudo tee /etc/apt/sources.list.d/docker.list -sudo apt update -sudo apt install -y sed grep gawk jq gzip brotli containerd.io docker-ce docker-ce-cli docker-compose-plugin qemu-user-static binfmt-support squashfs-tools git debspawn rsync b3sum -sudo mkdir -p /etc/debspawn/ -echo "AllowUnsafePermissions=true" | sudo tee /etc/debspawn/global.toml -sudo usermod -aG docker $USER -sudo su $USER -docker run --privileged --rm tonistiigi/binfmt --install all -docker buildx create --use -curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh # proceed with default installation -curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/master/install.sh | bash -source ~/.bashrc -nvm install 24 -nvm use 24 -nvm alias default 24 # this prevents your machine from reverting back to another version -``` - -## Cloning the repository - -```sh -git clone --recursive https://github.com/Start9Labs/start-os.git --branch next/major -cd start-os -``` - -## Building an ISO - -```sh -PLATFORM=$(uname -m) ENVIRONMENT=dev make iso -``` - -This will build an ISO for your current architecture. If you are building to run on an architecture other than the one you are currently on, replace `$(uname -m)` with the correct platform for the device (one of `aarch64`, `aarch64-nonfree`, `x86_64`, `x86_64-nonfree`, `raspberrypi`) - -## Creating a VM - -### Install virt-manager - -```sh -sudo apt update -sudo apt install -y virt-manager -sudo usermod -aG libvirt $USER -sudo su $USER -``` - -### Launch virt-manager - -```sh -virt-manager -``` - -### Create new virtual machine - -![Select "Create a new virtual machine"](assets/create-vm/step-1.png) -![Click "Forward"](assets/create-vm/step-2.png) -![Click "Browse"](assets/create-vm/step-3.png) -![Click "+"](assets/create-vm/step-4.png) - -#### make sure to set "Target Path" to the path to your results directory in start-os - -![Create storage pool](assets/create-vm/step-5.png) -![Select storage pool](assets/create-vm/step-6.png) -![Select ISO](assets/create-vm/step-7.png) -![Select "Generic or unknown OS" and click "Forward"](assets/create-vm/step-8.png) -![Set Memory and CPUs](assets/create-vm/step-9.png) -![Create disk](assets/create-vm/step-10.png) -![Name VM](assets/create-vm/step-11.png) -![Create network](assets/create-vm/step-12.png) - -## Updating a VM - -The fastest way to update a VM to your latest code depends on what you changed: - -### UI or startd: - -```sh -PLATFORM=$(uname -m) ENVIRONMENT=dev make update-startbox REMOTE=start9@ -``` - -### Container runtime or debian dependencies: - -```sh -PLATFORM=$(uname -m) ENVIRONMENT=dev make update-deb REMOTE=start9@ -``` - -### Image recipe: - -```sh -PLATFORM=$(uname -m) ENVIRONMENT=dev make update-squashfs REMOTE=start9@ -``` - ---- - -If the device you are building for is not available via ssh, it is also possible to use `magic-wormhole` to send the relevant files. - -### Prerequisites: - -```sh -sudo apt update -sudo apt install -y magic-wormhole -``` - -As before, the fastest way to update a VM to your latest code depends on what you changed. Each of the following commands will return a command to paste into the shell of the device you would like to upgrade. - -### UI or startd: - -```sh -PLATFORM=$(uname -m) ENVIRONMENT=dev make wormhole -``` - -### Container runtime or debian dependencies: - -```sh -PLATFORM=$(uname -m) ENVIRONMENT=dev make wormhole-deb -``` - -### Image recipe: - -```sh -PLATFORM=$(uname -m) ENVIRONMENT=dev make wormhole-squashfs -``` diff --git a/Makefile b/Makefile index 7d3e5dbf9..7ab474909 100644 --- a/Makefile +++ b/Makefile @@ -7,7 +7,7 @@ GIT_HASH_FILE := $(shell ./build/env/check-git-hash.sh) VERSION_FILE := $(shell ./build/env/check-version.sh) BASENAME := $(shell PROJECT=startos ./build/env/basename.sh) PLATFORM := $(shell if [ -f $(PLATFORM_FILE) ]; then cat $(PLATFORM_FILE); else echo unknown; fi) -ARCH := $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then echo aarch64; else echo $(PLATFORM) | sed 's/-nonfree$$//g'; fi) +ARCH := $(shell if [ "$(PLATFORM)" = "raspberrypi" ]; then echo aarch64; elif [ "$(PLATFORM)" = "rockchip64" ]; then echo aarch64; else echo $(PLATFORM) | sed 's/-nonfree$$//g; s/-nvidia$$//g'; fi) RUST_ARCH := $(shell if [ "$(ARCH)" = "riscv64" ]; then echo riscv64gc; else echo $(ARCH); fi) REGISTRY_BASENAME := $(shell PROJECT=start-registry PLATFORM=$(ARCH) ./build/env/basename.sh) TUNNEL_BASENAME := $(shell PROJECT=start-tunnel PLATFORM=$(ARCH) ./build/env/basename.sh) @@ -139,6 +139,11 @@ install-tunnel: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox $(call mkdir,$(DESTDIR)/usr/lib/startos/scripts) $(call cp,build/lib/scripts/forward-port,$(DESTDIR)/usr/lib/startos/scripts/forward-port) + $(call mkdir,$(DESTDIR)/etc/apt/sources.list.d) + $(call cp,apt/start9.list,$(DESTDIR)/etc/apt/sources.list.d/start9.list) + $(call mkdir,$(DESTDIR)/usr/share/keyrings) + $(call cp,apt/start9.gpg,$(DESTDIR)/usr/share/keyrings/start9.gpg) + core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/tunnelbox: $(CORE_SRC) $(ENVIRONMENT_FILE) $(GIT_HASH_FILE) web/dist/static/start-tunnel/index.html ARCH=$(ARCH) PROFILE=$(PROFILE) ./core/build/build-tunnelbox.sh @@ -236,9 +241,9 @@ update-startbox: core/target/$(RUST_ARCH)-unknown-linux-musl/$(PROFILE)/startbox update-deb: results/$(BASENAME).deb # better than update, but only available from debian @if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi $(call ssh,'sudo /usr/lib/startos/scripts/chroot-and-upgrade --create') - $(call mkdir,/media/startos/next/tmp/startos-deb) - $(call cp,results/$(BASENAME).deb,/media/startos/next/tmp/startos-deb/$(BASENAME).deb) - $(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync "apt-get install -y --reinstall /tmp/startos-deb/$(BASENAME).deb"') + $(call mkdir,/media/startos/next/var/tmp/startos-deb) + $(call cp,results/$(BASENAME).deb,/media/startos/next/var/tmp/startos-deb/$(BASENAME).deb) + $(call ssh,'sudo /media/startos/next/usr/lib/startos/scripts/chroot-and-upgrade --no-sync "apt-get install -y --reinstall /var/tmp/startos-deb/$(BASENAME).deb"') update-squashfs: results/$(BASENAME).squashfs @if [ -z "$(REMOTE)" ]; then >&2 echo "Must specify REMOTE" && false; fi @@ -278,7 +283,7 @@ core/bindings/index.ts: $(call ls-files, core) $(ENVIRONMENT_FILE) rm -rf core/bindings ./core/build/build-ts.sh ls core/bindings/*.ts | sed 's/core\/bindings\/\([^.]*\)\.ts/export { \1 } from ".\/\1";/g' | grep -v '"./index"' | tee core/bindings/index.ts - npm --prefix sdk exec -- prettier --config ./sdk/base/package.json -w ./core/bindings/*.ts + npm --prefix sdk/base exec -- prettier --config=./sdk/base/package.json -w './core/bindings/**/*.ts' touch core/bindings/index.ts sdk/dist/package.json sdk/baseDist/package.json: $(call ls-files, sdk) sdk/base/lib/osBindings/index.ts diff --git a/README.md b/README.md index 0b74a055d..2e42cc63b 100644 --- a/README.md +++ b/README.md @@ -7,76 +7,64 @@ - + Static Badge X (formerly Twitter) Follow - - Static Badge - - - Static Badge - Static Badge - + Static Badge Website -
-
-

- Welcome to the era of Sovereign Computing -

-

- StartOS is an open source Linux distribution optimized for running a personal server. It facilitates the discovery, installation, network configuration, service configuration, data backup, dependency management, and health monitoring of self-hosted software services. -

-
-
-

-StartOS -

-
-## Running StartOS -> [!WARNING] -> StartOS is in beta. It lacks features. It doesn't always work perfectly. Start9 servers are not plug and play. Using them properly requires some effort and patience. Please do not use StartOS or purchase a server if you are unable or unwilling to follow instructions and learn new concepts. +## What is StartOS? -### 💰 Buy a Start9 server -This is the most convenient option. Simply [buy a server](https://store.start9.com) from Start9 and plug it in. +StartOS is an open-source Linux distribution for running a personal server. It handles discovery, installation, network configuration, data backup, dependency management, and health monitoring of self-hosted services. -### 👷 Build your own server -This option is easier than you might imagine, and there are 4 reasons why you might prefer it: -1. You already have hardware -1. You want to save on shipping costs -1. You prefer not to divulge your physical address -1. You just like building things +**Tech stack:** Rust backend (Tokio/Axum), Angular frontend, Node.js container runtime with LXC, and a custom diff-based database ([Patch-DB](https://github.com/Start9Labs/patch-db)) for reactive state synchronization. -To pursue this option, follow one of our [DIY guides](https://start9.com/latest/diy). +Services run in isolated LXC containers, packaged as [S9PKs](https://github.com/Start9Labs/start-os/blob/master/core/s9pk-structure.md) — a signed, merkle-archived format that supports partial downloads and cryptographic verification. -## ❤️ Contributing -There are multiple ways to contribute: work directly on StartOS, package a service for the marketplace, or help with documentation and guides. To learn more about contributing, see [here](https://start9.com/contribute/). +## What can you do with it? -To report security issues, please email our security team - security@start9.com. +StartOS lets you self-host services that would otherwise depend on third-party cloud providers — giving you full ownership of your data and infrastructure. -## 🌎 Marketplace -There are dozens of services available for StartOS, and new ones are being added all the time. Check out the full list of available services [here](https://marketplace.start9.com/marketplace). To read more about the Marketplace ecosystem, check out this [blog post](https://blog.start9.com/start9-marketplace-strategy/) +Browse available services on the [Start9 Marketplace](https://marketplace.start9.com/), including: -## 🖥️ User Interface Screenshots +- **Bitcoin & Lightning** — Run a full Bitcoin node, Lightning node, BTCPay Server, and other payment infrastructure +- **Communication** — Self-host Matrix, SimpleX, or other messaging platforms +- **Cloud Storage** — Run Nextcloud, Vaultwarden, and other productivity tools -

-StartOS Marketplace -StartOS Community Registry -StartOS NextCloud Service -StartOS BTCPay Service -StartOS System Settings -StartOS System Settings -StartOS System Settings -StartOS System Settings -

+Services are added by the community. If a service you want isn't available, you can [package it yourself](https://github.com/Start9Labs/ai-service-packaging/). + +## Getting StartOS + +### Buy a Start9 server + +The easiest path. [Buy a server](https://store.start9.com) from Start9 and plug it in. + +### Build your own + +Follow the [install guide](https://docs.start9.com/start-os/installing.html) to install StartOS on your own hardware. . Reasons to go this route: + +1. You already have compatible hardware +2. You want to save on shipping costs +3. You prefer not to share your physical address +4. You enjoy building things + +### Build from source + +See [CONTRIBUTING.md](CONTRIBUTING.md) for environment setup, build instructions, and development workflow. + +## Contributing + +There are multiple ways to contribute: work directly on StartOS, package a service for the marketplace, or help with documentation and guides. See [CONTRIBUTING.md](CONTRIBUTING.md) or visit [start9.com/contribute](https://start9.com/contribute/). + +To report security issues, email [security@start9.com](mailto:security@start9.com). diff --git a/agents/TODO.md b/agents/TODO.md deleted file mode 100644 index 70124aa74..000000000 --- a/agents/TODO.md +++ /dev/null @@ -1,9 +0,0 @@ -# AI Agent TODOs - -Pending tasks for AI agents. Remove items when completed. - -## Unreviewed CLAUDE.md Sections - -- [ ] Architecture - Web (`/web`) - @MattDHill - - diff --git a/apt/start9.gpg b/apt/start9.gpg new file mode 100644 index 000000000..bb3bc2d3d Binary files /dev/null and b/apt/start9.gpg differ diff --git a/apt/start9.list b/apt/start9.list new file mode 100644 index 000000000..1cb2ef390 --- /dev/null +++ b/apt/start9.list @@ -0,0 +1 @@ +deb [arch=amd64,arm64,riscv64 signed-by=/usr/share/keyrings/start9.gpg] https://start9-debs.nyc3.cdn.digitaloceanspaces.com stable main diff --git a/assets/StartOS.png b/assets/StartOS.png deleted file mode 100644 index 6552848e8..000000000 Binary files a/assets/StartOS.png and /dev/null differ diff --git a/assets/btcpay.png b/assets/btcpay.png deleted file mode 100644 index 9ceb2f1ca..000000000 Binary files a/assets/btcpay.png and /dev/null differ diff --git a/assets/c-lightning.png b/assets/c-lightning.png deleted file mode 100644 index f0dcf660e..000000000 Binary files a/assets/c-lightning.png and /dev/null differ diff --git a/assets/community.png b/assets/community.png deleted file mode 100644 index b0ec06d37..000000000 Binary files a/assets/community.png and /dev/null differ diff --git a/assets/logs.png b/assets/logs.png deleted file mode 100644 index 39788c0ff..000000000 Binary files a/assets/logs.png and /dev/null differ diff --git a/assets/nextcloud.png b/assets/nextcloud.png deleted file mode 100644 index 4dd4e7c31..000000000 Binary files a/assets/nextcloud.png and /dev/null differ diff --git a/assets/registry.png b/assets/registry.png deleted file mode 100644 index 4660a3c75..000000000 Binary files a/assets/registry.png and /dev/null differ diff --git a/assets/system.png b/assets/system.png deleted file mode 100644 index d24ba6dea..000000000 Binary files a/assets/system.png and /dev/null differ diff --git a/assets/welcome.png b/assets/welcome.png deleted file mode 100644 index b3857383b..000000000 Binary files a/assets/welcome.png and /dev/null differ diff --git a/build/apt/publish-deb.sh b/build/apt/publish-deb.sh new file mode 100755 index 000000000..b049e2796 --- /dev/null +++ b/build/apt/publish-deb.sh @@ -0,0 +1,138 @@ +#!/bin/bash +# +# Publish .deb files to an S3-hosted apt repository. +# +# Usage: publish-deb.sh [ ...] +# +# Environment variables: +# GPG_PRIVATE_KEY - Armored GPG private key (imported if set) +# GPG_KEY_ID - GPG key ID for signing +# S3_ACCESS_KEY - S3 access key +# S3_SECRET_KEY - S3 secret key +# S3_ENDPOINT - S3 endpoint (default: https://nyc3.digitaloceanspaces.com) +# S3_BUCKET - S3 bucket name (default: start9-debs) +# SUITE - Apt suite name (default: stable) +# COMPONENT - Apt component name (default: main) + +set -e + +if [ $# -eq 0 ]; then + echo "Usage: $0 [...]" >&2 + exit 1 +fi + +BUCKET="${S3_BUCKET:-start9-debs}" +ENDPOINT="${S3_ENDPOINT:-https://nyc3.digitaloceanspaces.com}" +SUITE="${SUITE:-stable}" +COMPONENT="${COMPONENT:-main}" +REPO_DIR="$(mktemp -d)" + +cleanup() { + rm -rf "$REPO_DIR" +} +trap cleanup EXIT + +# Import GPG key if provided +if [ -n "$GPG_PRIVATE_KEY" ]; then + echo "$GPG_PRIVATE_KEY" | gpg --batch --import 2>/dev/null +fi + +# Configure s3cmd +if [ -n "$S3_ACCESS_KEY" ] && [ -n "$S3_SECRET_KEY" ]; then + S3CMD_CONFIG="$(mktemp)" + cat > "$S3CMD_CONFIG" </dev/null || true + +# Collect all .deb files from arguments +DEB_FILES=() +for arg in "$@"; do + if [ -d "$arg" ]; then + while IFS= read -r -d '' f; do + DEB_FILES+=("$f") + done < <(find "$arg" -name '*.deb' -print0) + elif [ -f "$arg" ]; then + DEB_FILES+=("$arg") + else + echo "Warning: $arg is not a file or directory, skipping" >&2 + fi +done + +if [ ${#DEB_FILES[@]} -eq 0 ]; then + echo "No .deb files found" >&2 + exit 1 +fi + +# Copy each deb to the pool, renaming to standard format +for deb in "${DEB_FILES[@]}"; do + PKG_NAME="$(dpkg-deb --field "$deb" Package)" + POOL_DIR="$REPO_DIR/pool/${COMPONENT}/${PKG_NAME:0:1}/${PKG_NAME}" + mkdir -p "$POOL_DIR" + cp "$deb" "$POOL_DIR/" + dpkg-name -o "$POOL_DIR/$(basename "$deb")" 2>/dev/null || true + echo "Added: $(basename "$deb") -> pool/${COMPONENT}/${PKG_NAME:0:1}/${PKG_NAME}/" +done + +# Generate Packages indices for each architecture +for arch in amd64 arm64 riscv64; do + BINARY_DIR="$REPO_DIR/dists/${SUITE}/${COMPONENT}/binary-${arch}" + mkdir -p "$BINARY_DIR" + ( + cd "$REPO_DIR" + dpkg-scanpackages --arch "$arch" pool/ > "$BINARY_DIR/Packages" + gzip -k -f "$BINARY_DIR/Packages" + ) + echo "Generated Packages index for ${arch}" +done + +# Generate Release file +( + cd "$REPO_DIR/dists/${SUITE}" + apt-ftparchive release \ + -o "APT::FTPArchive::Release::Origin=Start9" \ + -o "APT::FTPArchive::Release::Label=Start9" \ + -o "APT::FTPArchive::Release::Suite=${SUITE}" \ + -o "APT::FTPArchive::Release::Codename=${SUITE}" \ + -o "APT::FTPArchive::Release::Architectures=amd64 arm64 riscv64" \ + -o "APT::FTPArchive::Release::Components=${COMPONENT}" \ + . > Release +) +echo "Generated Release file" + +# Sign if GPG key is available +if [ -n "$GPG_KEY_ID" ]; then + ( + cd "$REPO_DIR/dists/${SUITE}" + gpg --default-key "$GPG_KEY_ID" --batch --yes --detach-sign -o Release.gpg Release + gpg --default-key "$GPG_KEY_ID" --batch --yes --clearsign -o InRelease Release + ) + echo "Signed Release file with key ${GPG_KEY_ID}" +else + echo "Warning: GPG_KEY_ID not set, Release file is unsigned" >&2 +fi + +# Upload to S3 +echo "Uploading to s3://${BUCKET}/ ..." +s3 sync --acl-public --no-mime-magic "$REPO_DIR/" "s3://${BUCKET}/" + +[ -n "$S3CMD_CONFIG" ] && rm -f "$S3CMD_CONFIG" +echo "Done." diff --git a/build/dpkg-deps/depends b/build/dpkg-deps/depends index b50e5168b..da2012ae2 100644 --- a/build/dpkg-deps/depends +++ b/build/dpkg-deps/depends @@ -55,6 +55,7 @@ socat sqlite3 squashfs-tools squashfs-tools-ng +ssl-cert sudo systemd systemd-resolved diff --git a/build/dpkg-deps/dev.depends b/build/dpkg-deps/dev.depends new file mode 100644 index 000000000..40918a966 --- /dev/null +++ b/build/dpkg-deps/dev.depends @@ -0,0 +1 @@ ++ nmap \ No newline at end of file diff --git a/build/dpkg-deps/generate.sh b/build/dpkg-deps/generate.sh index ffb80dce3..b7a4925b2 100755 --- a/build/dpkg-deps/generate.sh +++ b/build/dpkg-deps/generate.sh @@ -12,6 +12,10 @@ fi if [[ "$PLATFORM" =~ -nonfree$ ]]; then FEATURES+=("nonfree") fi +if [[ "$PLATFORM" =~ -nvidia$ ]]; then + FEATURES+=("nonfree") + FEATURES+=("nvidia") +fi feature_file_checker=' /^#/ { next } diff --git a/build/dpkg-deps/nonfree.depends b/build/dpkg-deps/nonfree.depends index 73d021d02..484c14249 100644 --- a/build/dpkg-deps/nonfree.depends +++ b/build/dpkg-deps/nonfree.depends @@ -4,7 +4,4 @@ + firmware-iwlwifi + firmware-libertas + firmware-misc-nonfree -+ firmware-realtek -+ nvidia-container-toolkit -# + nvidia-driver -# + nvidia-kernel-dkms \ No newline at end of file ++ firmware-realtek \ No newline at end of file diff --git a/build/dpkg-deps/nvidia.depends b/build/dpkg-deps/nvidia.depends new file mode 100644 index 000000000..ad0324664 --- /dev/null +++ b/build/dpkg-deps/nvidia.depends @@ -0,0 +1 @@ ++ nvidia-container-toolkit diff --git a/build/image-recipe/build.sh b/build/image-recipe/build.sh index 5f48d9b55..eb5b7fff2 100755 --- a/build/image-recipe/build.sh +++ b/build/image-recipe/build.sh @@ -34,14 +34,14 @@ fi IMAGE_BASENAME=startos-${VERSION_FULL}_${IB_TARGET_PLATFORM} BOOTLOADERS=grub-efi -if [ "$IB_TARGET_PLATFORM" = "x86_64" ] || [ "$IB_TARGET_PLATFORM" = "x86_64-nonfree" ]; then +if [ "$IB_TARGET_PLATFORM" = "x86_64" ] || [ "$IB_TARGET_PLATFORM" = "x86_64-nonfree" ] || [ "$IB_TARGET_PLATFORM" = "x86_64-nvidia" ]; then IB_TARGET_ARCH=amd64 QEMU_ARCH=x86_64 BOOTLOADERS=grub-efi,syslinux -elif [ "$IB_TARGET_PLATFORM" = "aarch64" ] || [ "$IB_TARGET_PLATFORM" = "aarch64-nonfree" ] || [ "$IB_TARGET_PLATFORM" = "raspberrypi" ] || [ "$IB_TARGET_PLATFORM" = "rockchip64" ]; then +elif [ "$IB_TARGET_PLATFORM" = "aarch64" ] || [ "$IB_TARGET_PLATFORM" = "aarch64-nonfree" ] || [ "$IB_TARGET_PLATFORM" = "aarch64-nvidia" ] || [ "$IB_TARGET_PLATFORM" = "raspberrypi" ] || [ "$IB_TARGET_PLATFORM" = "rockchip64" ]; then IB_TARGET_ARCH=arm64 QEMU_ARCH=aarch64 -elif [ "$IB_TARGET_PLATFORM" = "riscv64" ]; then +elif [ "$IB_TARGET_PLATFORM" = "riscv64" ] || [ "$IB_TARGET_PLATFORM" = "riscv64-nonfree" ]; then IB_TARGET_ARCH=riscv64 QEMU_ARCH=riscv64 else @@ -60,9 +60,13 @@ mkdir -p $prep_results_dir cd $prep_results_dir NON_FREE= -if [[ "${IB_TARGET_PLATFORM}" =~ -nonfree$ ]] || [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then +if [[ "${IB_TARGET_PLATFORM}" =~ -nonfree$ ]] || [[ "${IB_TARGET_PLATFORM}" =~ -nvidia$ ]] || [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ]; then NON_FREE=1 fi +NVIDIA= +if [[ "${IB_TARGET_PLATFORM}" =~ -nvidia$ ]]; then + NVIDIA=1 +fi IMAGE_TYPE=iso if [ "${IB_TARGET_PLATFORM}" = "raspberrypi" ] || [ "${IB_TARGET_PLATFORM}" = "rockchip64" ]; then IMAGE_TYPE=img @@ -177,7 +181,7 @@ if [ "${IB_TARGET_PLATFORM}" = "rockchip64" ]; then echo "deb https://apt.armbian.com/ ${IB_SUITE} main" > config/archives/armbian.list fi -if [ "$NON_FREE" = 1 ]; then +if [ "$NVIDIA" = 1 ]; then curl -fsSL https://nvidia.github.io/libnvidia-container/gpgkey | gpg --dearmor -o config/archives/nvidia-container-toolkit.key curl -s -L https://nvidia.github.io/libnvidia-container/stable/deb/nvidia-container-toolkit.list \ | sed 's#deb https://#deb [signed-by=/etc/apt/trusted.gpg.d/nvidia-container-toolkit.key.gpg] https://#g' \ @@ -205,11 +209,11 @@ cat > config/hooks/normal/9000-install-startos.hook.chroot << EOF set -e -if [ "${NON_FREE}" = "1" ] && [ "${IB_TARGET_PLATFORM}" != "raspberrypi" ]; then +if [ "${NVIDIA}" = "1" ]; then # install a specific NVIDIA driver version # ---------------- configuration ---------------- - NVIDIA_DRIVER_VERSION="\${NVIDIA_DRIVER_VERSION:-580.119.02}" + NVIDIA_DRIVER_VERSION="\${NVIDIA_DRIVER_VERSION:-580.126.09}" BASE_URL="https://download.nvidia.com/XFree86/Linux-${QEMU_ARCH}" @@ -259,12 +263,15 @@ if [ "${NON_FREE}" = "1" ] && [ "${IB_TARGET_PLATFORM}" != "raspberrypi" ]; then echo "[nvidia-hook] Running NVIDIA installer for kernel \${KVER}" >&2 - sh "\${RUN_PATH}" \ + if ! sh "\${RUN_PATH}" \ --silent \ --kernel-name="\${KVER}" \ --no-x-check \ --no-nouveau-check \ - --no-runlevel-check + --no-runlevel-check; then + cat /var/log/nvidia-installer.log + exit 1 + fi # Rebuild module metadata echo "[nvidia-hook] Running depmod for \${KVER}" >&2 diff --git a/build/lib/scripts/forward-port b/build/lib/scripts/forward-port index 705c1e6a7..3084de7de 100755 --- a/build/lib/scripts/forward-port +++ b/build/lib/scripts/forward-port @@ -5,7 +5,7 @@ if [ -z "$sip" ] || [ -z "$dip" ] || [ -z "$dprefix" ] || [ -z "$sport" ] || [ - exit 1 fi -NAME="F$(echo "$sip:$sport -> $dip/$dprefix:$dport" | sha256sum | head -c 15)" +NAME="F$(echo "$sip:$sport -> $dip/$dprefix:$dport ${src_subnet:-any}" | sha256sum | head -c 15)" for kind in INPUT FORWARD ACCEPT; do if ! iptables -C $kind -j "${NAME}_${kind}" 2> /dev/null; then @@ -13,7 +13,7 @@ for kind in INPUT FORWARD ACCEPT; do iptables -A $kind -j "${NAME}_${kind}" fi done -for kind in PREROUTING INPUT OUTPUT POSTROUTING; do +for kind in PREROUTING OUTPUT POSTROUTING; do if ! iptables -t nat -C $kind -j "${NAME}_${kind}" 2> /dev/null; then iptables -t nat -N "${NAME}_${kind}" 2> /dev/null iptables -t nat -A $kind -j "${NAME}_${kind}" @@ -26,7 +26,7 @@ trap 'err=1' ERR for kind in INPUT FORWARD ACCEPT; do iptables -F "${NAME}_${kind}" 2> /dev/null done -for kind in PREROUTING INPUT OUTPUT POSTROUTING; do +for kind in PREROUTING OUTPUT POSTROUTING; do iptables -t nat -F "${NAME}_${kind}" 2> /dev/null done if [ "$UNDO" = 1 ]; then @@ -36,20 +36,37 @@ if [ "$UNDO" = 1 ]; then fi # DNAT: rewrite destination for incoming packets (external traffic) -iptables -t nat -A ${NAME}_PREROUTING -d "$sip" -p tcp --dport "$sport" -j DNAT --to-destination "$dip:$dport" -iptables -t nat -A ${NAME}_PREROUTING -d "$sip" -p udp --dport "$sport" -j DNAT --to-destination "$dip:$dport" +# When src_subnet is set, only forward traffic from that subnet (private forwards) +if [ -n "$src_subnet" ]; then + iptables -t nat -A ${NAME}_PREROUTING -s "$src_subnet" -d "$sip" -p tcp --dport "$sport" -j DNAT --to-destination "$dip:$dport" + iptables -t nat -A ${NAME}_PREROUTING -s "$src_subnet" -d "$sip" -p udp --dport "$sport" -j DNAT --to-destination "$dip:$dport" + # Also allow containers on the bridge subnet to reach this forward + if [ -n "$bridge_subnet" ]; then + iptables -t nat -A ${NAME}_PREROUTING -s "$bridge_subnet" -d "$sip" -p tcp --dport "$sport" -j DNAT --to-destination "$dip:$dport" + iptables -t nat -A ${NAME}_PREROUTING -s "$bridge_subnet" -d "$sip" -p udp --dport "$sport" -j DNAT --to-destination "$dip:$dport" + fi +else + iptables -t nat -A ${NAME}_PREROUTING -d "$sip" -p tcp --dport "$sport" -j DNAT --to-destination "$dip:$dport" + iptables -t nat -A ${NAME}_PREROUTING -d "$sip" -p udp --dport "$sport" -j DNAT --to-destination "$dip:$dport" +fi # DNAT: rewrite destination for locally-originated packets (hairpin from host itself) iptables -t nat -A ${NAME}_OUTPUT -d "$sip" -p tcp --dport "$sport" -j DNAT --to-destination "$dip:$dport" iptables -t nat -A ${NAME}_OUTPUT -d "$sip" -p udp --dport "$sport" -j DNAT --to-destination "$dip:$dport" -# MASQUERADE: rewrite source for all forwarded traffic to the destination -# This ensures responses are routed back through the host regardless of source IP -iptables -t nat -A ${NAME}_POSTROUTING -d "$dip" -p tcp --dport "$dport" -j MASQUERADE -iptables -t nat -A ${NAME}_POSTROUTING -d "$dip" -p udp --dport "$dport" -j MASQUERADE - # Allow new connections to be forwarded to the destination iptables -A ${NAME}_FORWARD -d $dip -p tcp --dport $dport -m state --state NEW -j ACCEPT iptables -A ${NAME}_FORWARD -d $dip -p udp --dport $dport -m state --state NEW -j ACCEPT -exit $err \ No newline at end of file +# NAT hairpin: masquerade traffic from the bridge subnet or host to the DNAT +# target, so replies route back through the host for proper NAT reversal. +# Container-to-container hairpin (source is on the bridge subnet) +if [ -n "$bridge_subnet" ]; then + iptables -t nat -A ${NAME}_POSTROUTING -s "$bridge_subnet" -d "$dip" -p tcp --dport "$dport" -j MASQUERADE + iptables -t nat -A ${NAME}_POSTROUTING -s "$bridge_subnet" -d "$dip" -p udp --dport "$dport" -j MASQUERADE +fi +# Host-to-container hairpin (host connects to its own gateway IP, source is sip) +iptables -t nat -A ${NAME}_POSTROUTING -s "$sip" -d "$dip" -p tcp --dport "$dport" -j MASQUERADE +iptables -t nat -A ${NAME}_POSTROUTING -s "$sip" -d "$dip" -p udp --dport "$dport" -j MASQUERADE + +exit $err diff --git a/build/lib/scripts/upgrade b/build/lib/scripts/upgrade index 36651a9cd..60c1b5556 100755 --- a/build/lib/scripts/upgrade +++ b/build/lib/scripts/upgrade @@ -62,7 +62,7 @@ fi chroot /media/startos/next bash -e << "EOF" if [ -f /boot/grub/grub.cfg ]; then - grub-install /dev/$(eval $(lsblk -o MOUNTPOINT,PKNAME -P | grep 'MOUNTPOINT="/media/startos/root"') && echo $PKNAME) + grub-install --no-nvram /dev/$(eval $(lsblk -o MOUNTPOINT,PKNAME -P | grep 'MOUNTPOINT="/media/startos/root"') && echo $PKNAME) update-grub fi diff --git a/build/manage-release.sh b/build/manage-release.sh new file mode 100755 index 000000000..bd98dfbd1 --- /dev/null +++ b/build/manage-release.sh @@ -0,0 +1,332 @@ +#!/bin/bash + +set -e + +REPO="Start9Labs/start-os" +REGISTRY="https://alpha-registry-x.start9.com" +S3_BUCKET="s3://startos-images" +S3_CDN="https://startos-images.nyc3.cdn.digitaloceanspaces.com" +START9_GPG_KEY="2D63C217" + +ARCHES="aarch64 aarch64-nonfree aarch64-nvidia riscv64 riscv64-nonfree x86_64 x86_64-nonfree x86_64-nvidia" +CLI_ARCHES="aarch64 riscv64 x86_64" + +require_version() { + if [ -z "$VERSION" ]; then + >&2 echo '$VERSION required' + exit 2 + fi +} + +release_dir() { + echo "$HOME/Downloads/v$VERSION" +} + +ensure_release_dir() { + local dir + dir=$(release_dir) + if [ "$CLEAN" = "1" ]; then + rm -rf "$dir" + fi + mkdir -p "$dir" + cd "$dir" +} + +enter_release_dir() { + local dir + dir=$(release_dir) + if [ ! -d "$dir" ]; then + >&2 echo "Release directory $dir does not exist. Run 'download' or 'pull' first." + exit 1 + fi + cd "$dir" +} + +cli_target_for() { + local arch=$1 os=$2 + local pair="${arch}-${os}" + if [ "$pair" = "riscv64-linux" ]; then + echo "riscv64gc-unknown-linux-musl" + elif [ "$pair" = "riscv64-macos" ]; then + return 1 + elif [ "$os" = "linux" ]; then + echo "${arch}-unknown-linux-musl" + elif [ "$os" = "macos" ]; then + echo "${arch}-apple-darwin" + fi +} + +release_files() { + for file in *.iso *.squashfs *.deb; do + [ -f "$file" ] && echo "$file" + done + for file in start-cli_*; do + [[ "$file" == *.asc ]] && continue + [ -f "$file" ] && echo "$file" + done +} + +resolve_gh_user() { + GH_USER=${GH_USER:-$(gh api user -q .login 2>/dev/null || true)} + GH_GPG_KEY=$(git config user.signingkey 2>/dev/null || true) +} + +# --- Subcommands --- + +cmd_download() { + require_version + ensure_release_dir + + if [ -n "$RUN_ID" ]; then + for arch in $ARCHES; do + while ! gh run download -R $REPO "$RUN_ID" -n "$arch.squashfs" -D "$(pwd)"; do sleep 1; done + done + for arch in $ARCHES; do + while ! gh run download -R $REPO "$RUN_ID" -n "$arch.iso" -D "$(pwd)"; do sleep 1; done + done + fi + + if [ -n "$ST_RUN_ID" ]; then + for arch in $CLI_ARCHES; do + while ! gh run download -R $REPO "$ST_RUN_ID" -n "start-tunnel_$arch.deb" -D "$(pwd)"; do sleep 1; done + done + fi + + if [ -n "$CLI_RUN_ID" ]; then + for arch in $CLI_ARCHES; do + for os in linux macos; do + local target + target=$(cli_target_for "$arch" "$os") || continue + while ! gh run download -R $REPO "$CLI_RUN_ID" -n "start-cli_$target" -D "$(pwd)"; do sleep 1; done + mv start-cli "start-cli_${arch}-${os}" + done + done + fi +} + +cmd_pull() { + require_version + ensure_release_dir + + echo "Downloading release assets from tag v$VERSION..." + + # Download debs and CLI binaries from the GH release + for file in $(gh release view -R $REPO "v$VERSION" --json assets -q '.assets[].name' | grep -E '\.(deb)$|^start-cli_'); do + gh release download -R $REPO "v$VERSION" -p "$file" -D "$(pwd)" --clobber + done + + # Download ISOs and squashfs from S3 CDN + for arch in $ARCHES; do + for ext in squashfs iso; do + # Get the actual filename from the GH release asset list or body + local filename + filename=$(gh release view -R $REPO "v$VERSION" --json assets -q ".assets[].name" | grep "_${arch}\\.${ext}$" || true) + if [ -z "$filename" ]; then + filename=$(gh release view -R $REPO "v$VERSION" --json body -q .body | grep -oP "[^ ]*_${arch}\\.${ext}" | head -1 || true) + fi + if [ -n "$filename" ]; then + echo "Downloading $filename from S3..." + curl -fSL -o "$filename" "$S3_CDN/v$VERSION/$filename" + fi + done + done +} + +cmd_register() { + require_version + enter_release_dir + start-cli --registry=$REGISTRY registry os version add "$VERSION" "v$VERSION" '' ">=0.3.5 <=$VERSION" +} + +cmd_upload() { + require_version + enter_release_dir + + for file in $(release_files); do + gh release upload -R $REPO "v$VERSION" "$file" + done + for file in *.iso *.squashfs; do + s3cmd put -P "$file" "$S3_BUCKET/v$VERSION/$file" + done +} + +cmd_index() { + require_version + enter_release_dir + + for arch in $ARCHES; do + for file in *_"$arch".squashfs *_"$arch".iso; do + start-cli --registry=$REGISTRY registry os asset add --platform="$arch" --version="$VERSION" "$file" "$S3_CDN/v$VERSION/$file" + done + done +} + +cmd_sign() { + require_version + enter_release_dir + resolve_gh_user + + for file in $(release_files); do + gpg -u $START9_GPG_KEY --detach-sign --armor -o "${file}.start9.asc" "$file" + if [ -n "$GH_USER" ] && [ -n "$GH_GPG_KEY" ]; then + gpg -u "$GH_GPG_KEY" --detach-sign --armor -o "${file}.${GH_USER}.asc" "$file" + fi + done + + gpg --export -a $START9_GPG_KEY > start9.key.asc + if [ -n "$GH_USER" ] && [ -n "$GH_GPG_KEY" ]; then + gpg --export -a "$GH_GPG_KEY" > "${GH_USER}.key.asc" + else + >&2 echo 'Warning: could not determine GitHub user or GPG signing key, skipping personal signature' + fi + tar -czvf signatures.tar.gz *.asc + + gh release upload -R $REPO "v$VERSION" signatures.tar.gz --clobber +} + +cmd_cosign() { + require_version + enter_release_dir + resolve_gh_user + + if [ -z "$GH_USER" ] || [ -z "$GH_GPG_KEY" ]; then + >&2 echo 'Error: could not determine GitHub user or GPG signing key' + >&2 echo "Set GH_USER and/or configure git user.signingkey" + exit 1 + fi + + echo "Downloading existing signatures..." + gh release download -R $REPO "v$VERSION" -p "signatures.tar.gz" -D "$(pwd)" --clobber + tar -xzf signatures.tar.gz + + echo "Adding personal signatures as $GH_USER..." + for file in $(release_files); do + gpg -u "$GH_GPG_KEY" --detach-sign --armor -o "${file}.${GH_USER}.asc" "$file" + done + + gpg --export -a "$GH_GPG_KEY" > "${GH_USER}.key.asc" + + echo "Re-packing signatures..." + tar -czvf signatures.tar.gz *.asc + + gh release upload -R $REPO "v$VERSION" signatures.tar.gz --clobber + echo "Done. Personal signatures for $GH_USER added to v$VERSION." +} + +cmd_notes() { + require_version + enter_release_dir + + cat << EOF +# ISO Downloads + +- [x86_64/AMD64]($S3_CDN/v$VERSION/$(ls *_x86_64-nonfree.iso)) +- [x86_64/AMD64 + NVIDIA]($S3_CDN/v$VERSION/$(ls *_x86_64-nvidia.iso)) +- [x86_64/AMD64-slim (FOSS-only)]($S3_CDN/v$VERSION/$(ls *_x86_64.iso) "Without proprietary software or drivers") +- [aarch64/ARM64]($S3_CDN/v$VERSION/$(ls *_aarch64-nonfree.iso)) +- [aarch64/ARM64 + NVIDIA]($S3_CDN/v$VERSION/$(ls *_aarch64-nvidia.iso)) +- [aarch64/ARM64-slim (FOSS-Only)]($S3_CDN/v$VERSION/$(ls *_aarch64.iso) "Without proprietary software or drivers") +- [RISCV64 (RVA23)]($S3_CDN/v$VERSION/$(ls *_riscv64-nonfree.iso)) +- [RISCV64 (RVA23)-slim (FOSS-only)]($S3_CDN/v$VERSION/$(ls *_riscv64.iso) "Without proprietary software or drivers") + +EOF + cat << 'EOF' +# StartOS Checksums + +## SHA-256 +``` +EOF + sha256sum *.iso *.squashfs + cat << 'EOF' +``` + +## BLAKE-3 +``` +EOF + b3sum *.iso *.squashfs + cat << 'EOF' +``` + +# Start-Tunnel Checksums + +## SHA-256 +``` +EOF + sha256sum start-tunnel*.deb + cat << 'EOF' +``` + +## BLAKE-3 +``` +EOF + b3sum start-tunnel*.deb + cat << 'EOF' +``` + +# start-cli Checksums + +## SHA-256 +``` +EOF + release_files | grep '^start-cli_' | xargs sha256sum + cat << 'EOF' +``` + +## BLAKE-3 +``` +EOF + release_files | grep '^start-cli_' | xargs b3sum + cat << 'EOF' +``` +EOF +} + +cmd_full_release() { + cmd_download + cmd_register + cmd_upload + cmd_index + cmd_sign + cmd_notes +} + +usage() { + cat << 'EOF' +Usage: manage-release.sh + +Subcommands: + download Download artifacts from GitHub Actions runs + Requires: RUN_ID, ST_RUN_ID, CLI_RUN_ID (any combination) + pull Download an existing release from the GH tag and S3 + register Register the version in the Start9 registry + upload Upload artifacts to GitHub Releases and S3 + index Add assets to the registry index + sign Sign all artifacts with Start9 org key (+ personal key if available) + and upload signatures.tar.gz + cosign Add personal GPG signature to an existing release's signatures + (requires 'pull' first so you can verify assets before signing) + notes Print release notes with download links and checksums + full-release Run: download → register → upload → index → sign → notes + +Environment variables: + VERSION (required) Release version + RUN_ID GitHub Actions run ID for OS images (download subcommand) + ST_RUN_ID GitHub Actions run ID for start-tunnel (download subcommand) + CLI_RUN_ID GitHub Actions run ID for start-cli (download subcommand) + GH_USER Override GitHub username (default: autodetected via gh cli) + CLEAN Set to 1 to wipe and recreate the release directory +EOF +} + +case "${1:-}" in + download) cmd_download ;; + pull) cmd_pull ;; + register) cmd_register ;; + upload) cmd_upload ;; + index) cmd_index ;; + sign) cmd_sign ;; + cosign) cmd_cosign ;; + notes) cmd_notes ;; + full-release) cmd_full_release ;; + *) usage; exit 1 ;; +esac diff --git a/build/upload-ota.sh b/build/upload-ota.sh deleted file mode 100755 index 74e1b42aa..000000000 --- a/build/upload-ota.sh +++ /dev/null @@ -1,142 +0,0 @@ -#!/bin/bash - -if [ -z "$VERSION" ]; then - >&2 echo '$VERSION required' - exit 2 -fi - -set -e - -if [ "$SKIP_DL" != "1" ]; then - if [ "$SKIP_CLEAN" != "1" ]; then - rm -rf ~/Downloads/v$VERSION - mkdir ~/Downloads/v$VERSION - cd ~/Downloads/v$VERSION - fi - - if [ -n "$RUN_ID" ]; then - for arch in aarch64 aarch64-nonfree riscv64 x86_64 x86_64-nonfree; do - while ! gh run download -R Start9Labs/start-os $RUN_ID -n $arch.squashfs -D $(pwd); do sleep 1; done - done - for arch in aarch64 aarch64-nonfree riscv64 x86_64 x86_64-nonfree; do - while ! gh run download -R Start9Labs/start-os $RUN_ID -n $arch.iso -D $(pwd); do sleep 1; done - done - fi - - if [ -n "$ST_RUN_ID" ]; then - for arch in aarch64 riscv64 x86_64; do - while ! gh run download -R Start9Labs/start-os $ST_RUN_ID -n start-tunnel_$arch.deb -D $(pwd); do sleep 1; done - done - fi - - if [ -n "$CLI_RUN_ID" ]; then - for arch in aarch64 riscv64 x86_64; do - for os in linux macos; do - pair=${arch}-${os} - if [ "${pair}" = "riscv64-linux" ]; then - target=riscv64gc-unknown-linux-musl - elif [ "${pair}" = "riscv64-macos" ]; then - continue - elif [ "${os}" = "linux" ]; then - target="${arch}-unknown-linux-musl" - elif [ "${os}" = "macos" ]; then - target="${arch}-apple-darwin" - fi - while ! gh run download -R Start9Labs/start-os $CLI_RUN_ID -n start-cli_$target -D $(pwd); do sleep 1; done - mv start-cli "start-cli_${pair}" - done - done - fi -else - cd ~/Downloads/v$VERSION -fi - -start-cli --registry=https://alpha-registry-x.start9.com registry os version add $VERSION "v$VERSION" '' ">=0.3.5 <=$VERSION" - -if [ "$SKIP_UL" = "2" ]; then - exit 2 -elif [ "$SKIP_UL" != "1" ]; then - for file in *.deb start-cli_*; do - gh release upload -R Start9Labs/start-os v$VERSION $file - done - for file in *.iso *.squashfs; do - s3cmd put -P $file s3://startos-images/v$VERSION/$file - done -fi - -if [ "$SKIP_INDEX" != "1" ]; then - for arch in aarch64 aarch64-nonfree riscv64 x86_64 x86_64-nonfree; do - for file in *_$arch.squashfs *_$arch.iso; do - start-cli --registry=https://alpha-registry-x.start9.com registry os asset add --platform=$arch --version=$VERSION $file https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$file - done - done -fi - -for file in *.iso *.squashfs *.deb start-cli_*; do - gpg -u 7CFFDA41CA66056A --detach-sign --armor -o "${file}.asc" "$file" -done - -gpg --export -a 7CFFDA41CA66056A > dr-bonez.key.asc -tar -czvf signatures.tar.gz *.asc - -gh release upload -R Start9Labs/start-os v$VERSION signatures.tar.gz - -cat << EOF -# ISO Downloads - -- [x86_64/AMD64](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_x86_64-nonfree.iso)) -- [x86_64/AMD64-slim (FOSS-only)](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_x86_64.iso) "Without proprietary software or drivers") -- [aarch64/ARM64](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_aarch64-nonfree.iso)) -- [aarch64/ARM64-slim (FOSS-Only)](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_aarch64.iso) "Without proprietary software or drivers") -- [RISCV64 (RVA23)](https://startos-images.nyc3.cdn.digitaloceanspaces.com/v$VERSION/$(ls *_riscv64.iso)) - -EOF -cat << 'EOF' -# StartOS Checksums - -## SHA-256 -``` -EOF -sha256sum *.iso *.squashfs -cat << 'EOF' -``` - -## BLAKE-3 -``` -EOF -b3sum *.iso *.squashfs -cat << 'EOF' -``` - -# Start-Tunnel Checksums - -## SHA-256 -``` -EOF -sha256sum start-tunnel*.deb -cat << 'EOF' -``` - -## BLAKE-3 -``` -EOF -b3sum start-tunnel*.deb -cat << 'EOF' -``` - -# start-cli Checksums - -## SHA-256 -``` -EOF -sha256sum start-cli_* -cat << 'EOF' -``` - -## BLAKE-3 -``` -EOF -b3sum start-cli_* -cat << 'EOF' -``` -EOF \ No newline at end of file diff --git a/container-runtime/CLAUDE.md b/container-runtime/CLAUDE.md new file mode 100644 index 000000000..f0c40840b --- /dev/null +++ b/container-runtime/CLAUDE.md @@ -0,0 +1,32 @@ +# Container Runtime — Node.js Service Manager + +Node.js runtime that manages service containers via JSON-RPC. See `RPCSpec.md` in this directory for the full RPC protocol. + +## Architecture + +``` +LXC Container (uniform base for all services) +└── systemd + └── container-runtime.service + └── Loads /usr/lib/startos/package/index.js (from s9pk javascript.squashfs) + └── Package JS launches subcontainers (from images in s9pk) +``` + +The container runtime communicates with the host via JSON-RPC over Unix socket. Package JavaScript must export functions conforming to the `ABI` type defined in `sdk/base/lib/types.ts`. + +## `/media/startos/` Directory (mounted by host into container) + +| Path | Description | +| -------------------- | ----------------------------------------------------- | +| `volumes//` | Package data volumes (id-mapped, persistent) | +| `assets/` | Read-only assets from s9pk `assets.squashfs` | +| `images//` | Container images (squashfs, used for subcontainers) | +| `images/.env` | Environment variables for image | +| `images/.json` | Image metadata | +| `backup/` | Backup mount point (mounted during backup operations) | +| `rpc/service.sock` | RPC socket (container runtime listens here) | +| `rpc/host.sock` | Host RPC socket (for effects callbacks to host) | + +## S9PK Structure + +See `../core/s9pk-structure.md` for the S9PK package format. diff --git a/container-runtime/RPCSpec.md b/container-runtime/RPCSpec.md index 7c43467ba..57ff31348 100644 --- a/container-runtime/RPCSpec.md +++ b/container-runtime/RPCSpec.md @@ -139,8 +139,8 @@ Evaluate a script in the runtime context. Used for debugging. The `execute` and `sandbox` methods route to procedures based on the `procedure` path: -| Procedure | Description | -|-----------|-------------| -| `/backup/create` | Create a backup | +| Procedure | Description | +| -------------------------- | ---------------------------- | +| `/backup/create` | Create a backup | | `/actions/{name}/getInput` | Get input spec for an action | -| `/actions/{name}/run` | Run an action with input | +| `/actions/{name}/run` | Run an action with input | diff --git a/container-runtime/__mocks__/mime.js b/container-runtime/__mocks__/mime.js new file mode 100644 index 000000000..d2f6ff46b --- /dev/null +++ b/container-runtime/__mocks__/mime.js @@ -0,0 +1,30 @@ +// Mock for ESM-only mime package — Jest's module loader doesn't support require(esm) +const types = { + ".png": "image/png", + ".jpg": "image/jpeg", + ".jpeg": "image/jpeg", + ".gif": "image/gif", + ".svg": "image/svg+xml", + ".webp": "image/webp", + ".ico": "image/x-icon", + ".json": "application/json", + ".js": "application/javascript", + ".html": "text/html", + ".css": "text/css", + ".txt": "text/plain", + ".md": "text/markdown", +} + +module.exports = { + default: { + getType(path) { + const ext = "." + path.split(".").pop() + return types[ext] || null + }, + getExtension(type) { + const entry = Object.entries(types).find(([, v]) => v === type) + return entry ? entry[0].slice(1) : null + }, + }, + __esModule: true, +} diff --git a/container-runtime/jest.config.js b/container-runtime/jest.config.js index f499f03f9..1e9bb209a 100644 --- a/container-runtime/jest.config.js +++ b/container-runtime/jest.config.js @@ -5,4 +5,7 @@ module.exports = { testEnvironment: "node", rootDir: "./src/", modulePathIgnorePatterns: ["./dist/"], + moduleNameMapper: { + "^mime$": "/../__mocks__/mime.js", + }, } diff --git a/container-runtime/package-lock.json b/container-runtime/package-lock.json index 1e0af6585..7ff3f092f 100644 --- a/container-runtime/package-lock.json +++ b/container-runtime/package-lock.json @@ -19,7 +19,6 @@ "lodash.merge": "^4.6.2", "mime": "^4.0.7", "node-fetch": "^3.1.0", - "ts-matches": "^6.3.2", "tslib": "^2.5.3", "typescript": "^5.1.3", "yaml": "^2.3.1" @@ -38,7 +37,7 @@ }, "../sdk/dist": { "name": "@start9labs/start-sdk", - "version": "0.4.0-beta.48", + "version": "0.4.0-beta.55", "license": "MIT", "dependencies": { "@iarna/toml": "^3.0.0", @@ -49,8 +48,9 @@ "ini": "^5.0.0", "isomorphic-fetch": "^3.0.0", "mime": "^4.0.7", - "ts-matches": "^6.3.2", - "yaml": "^2.7.1" + "yaml": "^2.7.1", + "zod": "^4.3.6", + "zod-deep-partial": "^1.2.0" }, "devDependencies": { "@types/jest": "^29.4.0", @@ -6494,12 +6494,6 @@ } } }, - "node_modules/ts-matches": { - "version": "6.3.2", - "resolved": "https://registry.npmjs.org/ts-matches/-/ts-matches-6.3.2.tgz", - "integrity": "sha512-UhSgJymF8cLd4y0vV29qlKVCkQpUtekAaujXbQVc729FezS8HwqzepqvtjzQ3HboatIqN/Idor85O2RMwT7lIQ==", - "license": "MIT" - }, "node_modules/tslib": { "version": "2.8.1", "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz", diff --git a/container-runtime/package.json b/container-runtime/package.json index 583f90f6c..7ded801d3 100644 --- a/container-runtime/package.json +++ b/container-runtime/package.json @@ -28,7 +28,6 @@ "lodash.merge": "^4.6.2", "mime": "^4.0.7", "node-fetch": "^3.1.0", - "ts-matches": "^6.3.2", "tslib": "^2.5.3", "typescript": "^5.1.3", "yaml": "^2.3.1" diff --git a/container-runtime/src/Adapters/EffectCreator.ts b/container-runtime/src/Adapters/EffectCreator.ts index 44c5d40b2..c244347eb 100644 --- a/container-runtime/src/Adapters/EffectCreator.ts +++ b/container-runtime/src/Adapters/EffectCreator.ts @@ -3,33 +3,39 @@ import { types as T, utils, VersionRange, + z, } from "@start9labs/start-sdk" import * as net from "net" -import { object, string, number, literals, some, unknown } from "ts-matches" import { Effects } from "../Models/Effects" import { CallbackHolder } from "../Models/CallbackHolder" import { asError } from "@start9labs/start-sdk/base/lib/util" -const matchRpcError = object({ - error: object({ - code: number, - message: string, - data: some( - string, - object({ - details: string, - debug: string.nullable().optional(), - }), - ) +const matchRpcError = z.object({ + error: z.object({ + code: z.number(), + message: z.string(), + data: z + .union([ + z.string(), + z.object({ + details: z.string(), + debug: z.string().nullable().optional(), + }), + ]) .nullable() .optional(), }), }) -const testRpcError = matchRpcError.test -const testRpcResult = object({ - result: unknown, -}).test -type RpcError = typeof matchRpcError._TYPE +function testRpcError(v: unknown): v is RpcError { + return matchRpcError.safeParse(v).success +} +const matchRpcResult = z.object({ + result: z.unknown(), +}) +function testRpcResult(v: unknown): v is z.infer { + return matchRpcResult.safeParse(v).success +} +type RpcError = z.infer const SOCKET_PATH = "/media/startos/rpc/host.sock" let hostSystemId = 0 @@ -71,7 +77,7 @@ const rpcRoundFor = "Error in host RPC:", utils.asError({ method, params, error: res.error }), ) - if (string.test(res.error.data)) { + if (typeof res.error.data === "string") { message += ": " + res.error.data console.error(`Details: ${res.error.data}`) } else { @@ -253,6 +259,14 @@ export function makeEffects(context: EffectContext): Effects { callback: context.callbacks?.addCallback(options.callback) || null, }) as ReturnType }, + getOutboundGateway( + ...[options]: Parameters + ) { + return rpcRound("get-outbound-gateway", { + ...options, + callback: context.callbacks?.addCallback(options.callback) || null, + }) as ReturnType + }, listServiceInterfaces( ...[options]: Parameters ) { @@ -316,6 +330,31 @@ export function makeEffects(context: EffectContext): Effects { T.Effects["setDataVersion"] > }, + plugin: { + url: { + register( + ...[options]: Parameters + ) { + return rpcRound("plugin.url.register", options) as ReturnType< + T.Effects["plugin"]["url"]["register"] + > + }, + exportUrl( + ...[options]: Parameters + ) { + return rpcRound("plugin.url.export-url", options) as ReturnType< + T.Effects["plugin"]["url"]["exportUrl"] + > + }, + clearUrls( + ...[options]: Parameters + ) { + return rpcRound("plugin.url.clear-urls", options) as ReturnType< + T.Effects["plugin"]["url"]["clearUrls"] + > + }, + }, + }, } if (context.callbacks?.onLeaveContext) self.onLeaveContext(() => { diff --git a/container-runtime/src/Adapters/RpcListener.ts b/container-runtime/src/Adapters/RpcListener.ts index 5567dd979..f9dd0fac2 100644 --- a/container-runtime/src/Adapters/RpcListener.ts +++ b/container-runtime/src/Adapters/RpcListener.ts @@ -1,25 +1,13 @@ // @ts-check import * as net from "net" -import { - object, - some, - string, - literal, - array, - number, - matches, - any, - shape, - anyOf, - literals, -} from "ts-matches" import { ExtendedVersion, types as T, utils, VersionRange, + z, } from "@start9labs/start-sdk" import * as fs from "fs" @@ -29,89 +17,92 @@ import { jsonPath, unNestPath } from "../Models/JsonPath" import { System } from "../Interfaces/System" import { makeEffects } from "./EffectCreator" type MaybePromise = T | Promise -export const matchRpcResult = anyOf( - object({ result: any }), - object({ - error: object({ - code: number, - message: string, - data: object({ - details: string.optional(), - debug: any.optional(), - }) +export const matchRpcResult = z.union([ + z.object({ result: z.any() }), + z.object({ + error: z.object({ + code: z.number(), + message: z.string(), + data: z + .object({ + details: z.string().optional(), + debug: z.any().optional(), + }) .nullable() .optional(), }), }), -) +]) -export type RpcResult = typeof matchRpcResult._TYPE +export type RpcResult = z.infer type SocketResponse = ({ jsonrpc: "2.0"; id: IdType } & RpcResult) | null const SOCKET_PARENT = "/media/startos/rpc" const SOCKET_PATH = "/media/startos/rpc/service.sock" const jsonrpc = "2.0" as const -const isResult = object({ result: any }).test +const isResultSchema = z.object({ result: z.any() }) +const isResult = (v: unknown): v is z.infer => + isResultSchema.safeParse(v).success -const idType = some(string, number, literal(null)) +const idType = z.union([z.string(), z.number(), z.literal(null)]) type IdType = null | string | number | undefined -const runType = object({ +const runType = z.object({ id: idType.optional(), - method: literal("execute"), - params: object({ - id: string, - procedure: string, - input: any, - timeout: number.nullable().optional(), + method: z.literal("execute"), + params: z.object({ + id: z.string(), + procedure: z.string(), + input: z.any(), + timeout: z.number().nullable().optional(), }), }) -const sandboxRunType = object({ +const sandboxRunType = z.object({ id: idType.optional(), - method: literal("sandbox"), - params: object({ - id: string, - procedure: string, - input: any, - timeout: number.nullable().optional(), + method: z.literal("sandbox"), + params: z.object({ + id: z.string(), + procedure: z.string(), + input: z.any(), + timeout: z.number().nullable().optional(), }), }) -const callbackType = object({ - method: literal("callback"), - params: object({ - id: number, - args: array, +const callbackType = z.object({ + method: z.literal("callback"), + params: z.object({ + id: z.number(), + args: z.array(z.unknown()), }), }) -const initType = object({ +const initType = z.object({ id: idType.optional(), - method: literal("init"), - params: object({ - id: string, - kind: literals("install", "update", "restore").nullable(), + method: z.literal("init"), + params: z.object({ + id: z.string(), + kind: z.enum(["install", "update", "restore"]).nullable(), }), }) -const startType = object({ +const startType = z.object({ id: idType.optional(), - method: literal("start"), + method: z.literal("start"), }) -const stopType = object({ +const stopType = z.object({ id: idType.optional(), - method: literal("stop"), + method: z.literal("stop"), }) -const exitType = object({ +const exitType = z.object({ id: idType.optional(), - method: literal("exit"), - params: object({ - id: string, - target: string.nullable(), + method: z.literal("exit"), + params: z.object({ + id: z.string(), + target: z.string().nullable(), }), }) -const evalType = object({ +const evalType = z.object({ id: idType.optional(), - method: literal("eval"), - params: object({ - script: string, + method: z.literal("eval"), + params: z.object({ + script: z.string(), }), }) @@ -144,7 +135,9 @@ const handleRpc = (id: IdType, result: Promise) => }, })) -const hasId = object({ id: idType }).test +const hasIdSchema = z.object({ id: idType }) +const hasId = (v: unknown): v is z.infer => + hasIdSchema.safeParse(v).success export class RpcListener { shouldExit = false unixSocketServer = net.createServer(async (server) => {}) @@ -246,40 +239,52 @@ export class RpcListener { } private dealWithInput(input: unknown): MaybePromise { - return matches(input) - .when(runType, async ({ id, params }) => { + const parsed = z.object({ method: z.string() }).safeParse(input) + if (!parsed.success) { + console.warn( + `Couldn't parse the following input ${JSON.stringify(input)}`, + ) + return { + jsonrpc, + id: (input as any)?.id, + error: { + code: -32602, + message: "invalid params", + data: { + details: JSON.stringify(input), + }, + }, + } + } + + switch (parsed.data.method) { + case "execute": { + const { id, params } = runType.parse(input) const system = this.system - const procedure = jsonPath.unsafeCast(params.procedure) - const { input, timeout, id: eventId } = params - const result = this.getResult( - procedure, - system, - eventId, - timeout, - input, - ) + const procedure = jsonPath.parse(params.procedure) + const { input: inp, timeout, id: eventId } = params + const result = this.getResult(procedure, system, eventId, timeout, inp) return handleRpc(id, result) - }) - .when(sandboxRunType, async ({ id, params }) => { + } + case "sandbox": { + const { id, params } = sandboxRunType.parse(input) const system = this.system - const procedure = jsonPath.unsafeCast(params.procedure) - const { input, timeout, id: eventId } = params - const result = this.getResult( - procedure, - system, - eventId, - timeout, - input, - ) + const procedure = jsonPath.parse(params.procedure) + const { input: inp, timeout, id: eventId } = params + const result = this.getResult(procedure, system, eventId, timeout, inp) return handleRpc(id, result) - }) - .when(callbackType, async ({ params: { id, args } }) => { + } + case "callback": { + const { + params: { id, args }, + } = callbackType.parse(input) this.callCallback(id, args) return null - }) - .when(startType, async ({ id }) => { + } + case "start": { + const { id } = startType.parse(input) const callbacks = this.callbacks?.getChild("main") || this.callbacks?.child("main") const effects = makeEffects({ @@ -290,8 +295,9 @@ export class RpcListener { id, this.system.start(effects).then((result) => ({ result })), ) - }) - .when(stopType, async ({ id }) => { + } + case "stop": { + const { id } = stopType.parse(input) return handleRpc( id, this.system.stop().then((result) => { @@ -300,8 +306,9 @@ export class RpcListener { return { result } }), ) - }) - .when(exitType, async ({ id, params }) => { + } + case "exit": { + const { id, params } = exitType.parse(input) return handleRpc( id, (async () => { @@ -323,8 +330,9 @@ export class RpcListener { } })().then((result) => ({ result })), ) - }) - .when(initType, async ({ id, params }) => { + } + case "init": { + const { id, params } = initType.parse(input) return handleRpc( id, (async () => { @@ -349,8 +357,9 @@ export class RpcListener { } })().then((result) => ({ result })), ) - }) - .when(evalType, async ({ id, params }) => { + } + case "eval": { + const { id, params } = evalType.parse(input) return handleRpc( id, (async () => { @@ -375,41 +384,28 @@ export class RpcListener { } })(), ) - }) - .when( - shape({ id: idType.optional(), method: string }), - ({ id, method }) => ({ + } + default: { + const { id, method } = z + .object({ id: idType.optional(), method: z.string() }) + .passthrough() + .parse(input) + return { jsonrpc, id, error: { code: -32601, - message: `Method not found`, + message: "Method not found", data: { details: method, }, }, - }), - ) - - .defaultToLazy(() => { - console.warn( - `Couldn't parse the following input ${JSON.stringify(input)}`, - ) - return { - jsonrpc, - id: (input as any)?.id, - error: { - code: -32602, - message: "invalid params", - data: { - details: JSON.stringify(input), - }, - }, } - }) + } + } } private getResult( - procedure: typeof jsonPath._TYPE, + procedure: z.infer, system: System, eventId: string, timeout: number | null | undefined, @@ -437,6 +433,7 @@ export class RpcListener { return system.getActionInput( effects, procedures[2], + input?.prefill ?? null, timeout || null, ) case procedures[1] === "actions" && procedures[3] === "run": @@ -448,26 +445,18 @@ export class RpcListener { ) } } - })().then(ensureResultTypeShape, (error) => - matches(error) - .when( - object({ - error: string, - code: number.defaultTo(0), - }), - (error) => ({ - error: { - code: error.code, - message: error.error, - }, - }), - ) - .defaultToLazy(() => ({ - error: { - code: 0, - message: String(error), - }, - })), - ) + })().then(ensureResultTypeShape, (error) => { + const errorSchema = z.object({ + error: z.string(), + code: z.number().default(0), + }) + const parsed = errorSchema.safeParse(error) + if (parsed.success) { + return { + error: { code: parsed.data.code, message: parsed.data.error }, + } + } + return { error: { code: 0, message: String(error) } } + }) } } diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/DockerProcedureContainer.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/DockerProcedureContainer.ts index 029483212..674f17556 100644 --- a/container-runtime/src/Adapters/Systems/SystemForEmbassy/DockerProcedureContainer.ts +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/DockerProcedureContainer.ts @@ -2,7 +2,7 @@ import * as fs from "fs/promises" import * as cp from "child_process" import { SubContainer, types as T } from "@start9labs/start-sdk" import { promisify } from "util" -import { DockerProcedure, VolumeId } from "../../../Models/DockerProcedure" +import { DockerProcedure } from "../../../Models/DockerProcedure" import { Volume } from "./matchVolume" import { CommandOptions, @@ -28,7 +28,7 @@ export class DockerProcedureContainer extends Drop { effects: T.Effects, packageId: string, data: DockerProcedure, - volumes: { [id: VolumeId]: Volume }, + volumes: { [id: string]: Volume }, name: string, options: { subcontainer?: SubContainer } = {}, ) { @@ -47,7 +47,7 @@ export class DockerProcedureContainer extends Drop { effects: T.Effects, packageId: string, data: DockerProcedure, - volumes: { [id: VolumeId]: Volume }, + volumes: { [id: string]: Volume }, name: string, ) { const subcontainer = await SubContainerOwned.of( @@ -64,7 +64,7 @@ export class DockerProcedureContainer extends Drop { ? `${subcontainer.rootfs}${mounts[mount]}` : `${subcontainer.rootfs}/${mounts[mount]}` await fs.mkdir(path, { recursive: true }) - const volumeMount = volumes[mount] + const volumeMount: Volume = volumes[mount] if (volumeMount.type === "data") { await subcontainer.mount( Mounts.of().mountVolume({ @@ -82,18 +82,15 @@ export class DockerProcedureContainer extends Drop { }), ) } else if (volumeMount.type === "certificate") { + const hostInfo = await effects.getHostInfo({ + hostId: volumeMount["interface-id"], + }) const hostnames = [ `${packageId}.embassy`, ...new Set( - Object.values( - ( - await effects.getHostInfo({ - hostId: volumeMount["interface-id"], - }) - )?.hostnameInfo || {}, - ) - .flatMap((h) => h) - .flatMap((h) => (h.kind === "onion" ? [h.hostname.value] : [])), + Object.values(hostInfo?.bindings || {}) + .flatMap((b) => b.addresses.available) + .map((h) => h.hostname), ).values(), ] const certChain = await effects.getSslCertificate({ diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/index.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/index.ts index c2f84c19f..15a97178d 100644 --- a/container-runtime/src/Adapters/Systems/SystemForEmbassy/index.ts +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/index.ts @@ -15,26 +15,11 @@ import { System } from "../../../Interfaces/System" import { matchManifest, Manifest } from "./matchManifest" import * as childProcess from "node:child_process" import { DockerProcedureContainer } from "./DockerProcedureContainer" +import { DockerProcedure } from "../../../Models/DockerProcedure" import { promisify } from "node:util" import * as U from "./oldEmbassyTypes" import { MainLoop } from "./MainLoop" -import { - matches, - boolean, - dictionary, - literal, - literals, - object, - string, - unknown, - any, - tuple, - number, - anyOf, - deferred, - Parser, - array, -} from "ts-matches" +import { z } from "@start9labs/start-sdk" import { AddSslOptions } from "@start9labs/start-sdk/base/lib/osBindings" import { BindOptionsByProtocol, @@ -57,6 +42,15 @@ function todo(): never { throw new Error("Not implemented") } +/** + * Local type for procedure values from the manifest. + * The manifest's zod schemas use ZodTypeAny casts that produce `unknown` in zod v4. + * This type restores the expected shape for type-safe property access. + */ +type Procedure = + | (DockerProcedure & { type: "docker" }) + | { type: "script"; args: unknown[] | null } + const MANIFEST_LOCATION = "/usr/lib/startos/package/embassyManifest.json" export const EMBASSY_JS_LOCATION = "/usr/lib/startos/package/embassy.js" @@ -65,26 +59,24 @@ const configFile = FileHelper.json( base: new Volume("embassy"), subpath: "config.json", }, - matches.any, + z.any(), ) const dependsOnFile = FileHelper.json( { base: new Volume("embassy"), subpath: "dependsOn.json", }, - dictionary([string, array(string)]), + z.record(z.string(), z.array(z.string())), ) -const matchResult = object({ - result: any, +const matchResult = z.object({ + result: z.any(), }) -const matchError = object({ - error: string, +const matchError = z.object({ + error: z.string(), }) -const matchErrorCode = object<{ - "error-code": [number, string] | readonly [number, string] -}>({ - "error-code": tuple(number, string), +const matchErrorCode = z.object({ + "error-code": z.tuple([z.number(), z.string()]), }) const assertNever = ( @@ -96,29 +88,34 @@ const assertNever = ( /** Should be changing the type for specific properties, and this is mostly a transformation for the old return types to the newer one. */ +function isMatchResult(a: unknown): a is z.infer { + return matchResult.safeParse(a).success +} +function isMatchError(a: unknown): a is z.infer { + return matchError.safeParse(a).success +} +function isMatchErrorCode(a: unknown): a is z.infer { + return matchErrorCode.safeParse(a).success +} const fromReturnType = (a: U.ResultType): A => { - if (matchResult.test(a)) { + if (isMatchResult(a)) { return a.result } - if (matchError.test(a)) { + if (isMatchError(a)) { console.info({ passedErrorStack: new Error().stack, error: a.error }) throw { error: a.error } } - if (matchErrorCode.test(a)) { + if (isMatchErrorCode(a)) { const [code, message] = a["error-code"] throw { error: message, code } } - return assertNever(a) + return assertNever(a as never) } -const matchSetResult = object({ - "depends-on": dictionary([string, array(string)]) - .nullable() - .optional(), - dependsOn: dictionary([string, array(string)]) - .nullable() - .optional(), - signal: literals( +const matchSetResult = z.object({ + "depends-on": z.record(z.string(), z.array(z.string())).nullable().optional(), + dependsOn: z.record(z.string(), z.array(z.string())).nullable().optional(), + signal: z.enum([ "SIGTERM", "SIGHUP", "SIGINT", @@ -151,7 +148,7 @@ const matchSetResult = object({ "SIGPWR", "SIGSYS", "SIGINFO", - ), + ]), }) type OldGetConfigRes = { @@ -233,33 +230,29 @@ const asProperty = (x: PackagePropertiesV2): PropertiesReturn => Object.fromEntries( Object.entries(x).map(([key, value]) => [key, asProperty_(value)]), ) -const [matchPackageProperties, setMatchPackageProperties] = - deferred() -const matchPackagePropertyObject: Parser = - object({ - value: matchPackageProperties, - type: literal("object"), - description: string, - }) +const matchPackagePropertyObject: z.ZodType = z.object({ + value: z.lazy(() => matchPackageProperties), + type: z.literal("object"), + description: z.string(), +}) -const matchPackagePropertyString: Parser = - object({ - type: literal("string"), - description: string.nullable().optional(), - value: string, - copyable: boolean.nullable().optional(), - qr: boolean.nullable().optional(), - masked: boolean.nullable().optional(), - }) -setMatchPackageProperties( - dictionary([ - string, - anyOf(matchPackagePropertyObject, matchPackagePropertyString), - ]), +const matchPackagePropertyString: z.ZodType = z.object({ + type: z.literal("string"), + description: z.string().nullable().optional(), + value: z.string(), + copyable: z.boolean().nullable().optional(), + qr: z.boolean().nullable().optional(), + masked: z.boolean().nullable().optional(), +}) +const matchPackageProperties: z.ZodType = z.lazy(() => + z.record( + z.string(), + z.union([matchPackagePropertyObject, matchPackagePropertyString]), + ), ) -const matchProperties = object({ - version: literal(2), +const matchProperties = z.object({ + version: z.literal(2), data: matchPackageProperties, }) @@ -303,7 +296,7 @@ export class SystemForEmbassy implements System { }) const manifestData = await fs.readFile(manifestLocation, "utf-8") return new SystemForEmbassy( - matchManifest.unsafeCast(JSON.parse(manifestData)), + matchManifest.parse(JSON.parse(manifestData)), moduleCode, ) } @@ -389,7 +382,9 @@ export class SystemForEmbassy implements System { delete this.currentRunning if (currentRunning) { await currentRunning.clean({ - timeout: fromDuration(this.manifest.main["sigterm-timeout"] || "30s"), + timeout: fromDuration( + (this.manifest.main["sigterm-timeout"] as any) || "30s", + ), }) } } @@ -510,6 +505,7 @@ export class SystemForEmbassy implements System { async getActionInput( effects: Effects, actionId: string, + _prefill: Record | null, timeoutMs: number | null, ): Promise { if (actionId === "config") { @@ -622,7 +618,7 @@ export class SystemForEmbassy implements System { effects: Effects, timeoutMs: number | null, ): Promise { - const backup = this.manifest.backup.create + const backup = this.manifest.backup.create as Procedure if (backup.type === "docker") { const commands = [backup.entrypoint, ...backup.args] const container = await DockerProcedureContainer.of( @@ -655,7 +651,7 @@ export class SystemForEmbassy implements System { encoding: "utf-8", }) .catch((_) => null) - const restoreBackup = this.manifest.backup.restore + const restoreBackup = this.manifest.backup.restore as Procedure if (restoreBackup.type === "docker") { const commands = [restoreBackup.entrypoint, ...restoreBackup.args] const container = await DockerProcedureContainer.of( @@ -688,7 +684,7 @@ export class SystemForEmbassy implements System { effects: Effects, timeoutMs: number | null, ): Promise { - const config = this.manifest.config?.get + const config = this.manifest.config?.get as Procedure | undefined if (!config) return { spec: {} } if (config.type === "docker") { const commands = [config.entrypoint, ...config.args] @@ -730,7 +726,7 @@ export class SystemForEmbassy implements System { ) await updateConfig(effects, this.manifest, spec, newConfig) await configFile.write(effects, newConfig) - const setConfigValue = this.manifest.config?.set + const setConfigValue = this.manifest.config?.set as Procedure | undefined if (!setConfigValue) return if (setConfigValue.type === "docker") { const commands = [ @@ -745,7 +741,7 @@ export class SystemForEmbassy implements System { this.manifest.volumes, `Set Config - ${commands.join(" ")}`, ) - const answer = matchSetResult.unsafeCast( + const answer = matchSetResult.parse( JSON.parse( (await container.execFail(commands, timeoutMs)).stdout.toString(), ), @@ -758,7 +754,7 @@ export class SystemForEmbassy implements System { const method = moduleCode.setConfig if (!method) throw new Error("Expecting that the method setConfig exists") - const answer = matchSetResult.unsafeCast( + const answer = matchSetResult.parse( await method( polyfillEffects(effects, this.manifest), newConfig as U.Config, @@ -787,7 +783,11 @@ export class SystemForEmbassy implements System { const requiredDeps = { ...Object.fromEntries( Object.entries(this.manifest.dependencies ?? {}) - .filter(([k, v]) => v?.requirement.type === "required") + .filter( + ([k, v]) => + (v?.requirement as { type: string } | undefined)?.type === + "required", + ) .map((x) => [x[0], []]) || [], ), } @@ -855,7 +855,7 @@ export class SystemForEmbassy implements System { } if (migration) { - const [_, procedure] = migration + const [_, procedure] = migration as readonly [unknown, Procedure] if (procedure.type === "docker") { const commands = [procedure.entrypoint, ...procedure.args] const container = await DockerProcedureContainer.of( @@ -893,7 +893,10 @@ export class SystemForEmbassy implements System { effects: Effects, timeoutMs: number | null, ): Promise { - const setConfigValue = this.manifest.properties + const setConfigValue = this.manifest.properties as + | Procedure + | null + | undefined if (!setConfigValue) throw new Error("There is no properties") if (setConfigValue.type === "docker") { const commands = [setConfigValue.entrypoint, ...setConfigValue.args] @@ -904,7 +907,7 @@ export class SystemForEmbassy implements System { this.manifest.volumes, `Properties - ${commands.join(" ")}`, ) - const properties = matchProperties.unsafeCast( + const properties = matchProperties.parse( JSON.parse( (await container.execFail(commands, timeoutMs)).stdout.toString(), ), @@ -915,7 +918,7 @@ export class SystemForEmbassy implements System { const method = moduleCode.properties if (!method) throw new Error("Expecting that the method properties exists") - const properties = matchProperties.unsafeCast( + const properties = matchProperties.parse( await method(polyfillEffects(effects, this.manifest)).then( fromReturnType, ), @@ -930,7 +933,8 @@ export class SystemForEmbassy implements System { formData: unknown, timeoutMs: number | null, ): Promise { - const actionProcedure = this.manifest.actions?.[actionId]?.implementation + const actionProcedure = this.manifest.actions?.[actionId] + ?.implementation as Procedure | undefined const toActionResult = ({ message, value, @@ -997,7 +1001,9 @@ export class SystemForEmbassy implements System { oldConfig: unknown, timeoutMs: number | null, ): Promise { - const actionProcedure = this.manifest.dependencies?.[id]?.config?.check + const actionProcedure = this.manifest.dependencies?.[id]?.config?.check as + | Procedure + | undefined if (!actionProcedure) return { message: "Action not found", value: null } if (actionProcedure.type === "docker") { const commands = [ @@ -1089,40 +1095,50 @@ export class SystemForEmbassy implements System { } } -const matchPointer = object({ - type: literal("pointer"), +const matchPointer = z.object({ + type: z.literal("pointer"), }) -const matchPointerPackage = object({ - subtype: literal("package"), - target: literals("tor-key", "tor-address", "lan-address"), - "package-id": string, - interface: string, +const matchPointerPackage = z.object({ + subtype: z.literal("package"), + target: z.enum(["tor-key", "tor-address", "lan-address"]), + "package-id": z.string(), + interface: z.string(), }) -const matchPointerConfig = object({ - subtype: literal("package"), - target: literals("config"), - "package-id": string, - selector: string, - multi: boolean, +const matchPointerConfig = z.object({ + subtype: z.literal("package"), + target: z.enum(["config"]), + "package-id": z.string(), + selector: z.string(), + multi: z.boolean(), }) -const matchSpec = object({ - spec: object, +const matchSpec = z.object({ + spec: z.record(z.string(), z.unknown()), }) -const matchVariants = object({ variants: dictionary([string, unknown]) }) +const matchVariants = z.object({ variants: z.record(z.string(), z.unknown()) }) +function isMatchPointer(v: unknown): v is z.infer { + return matchPointer.safeParse(v).success +} +function isMatchSpec(v: unknown): v is z.infer { + return matchSpec.safeParse(v).success +} +function isMatchVariants(v: unknown): v is z.infer { + return matchVariants.safeParse(v).success +} function cleanSpecOfPointers(mutSpec: T): T { - if (!object.test(mutSpec)) return mutSpec + if (typeof mutSpec !== "object" || mutSpec === null) return mutSpec for (const key in mutSpec) { const value = mutSpec[key] - if (matchSpec.test(value)) value.spec = cleanSpecOfPointers(value.spec) - if (matchVariants.test(value)) + if (isMatchSpec(value)) + value.spec = cleanSpecOfPointers(value.spec) as Record + if (isMatchVariants(value)) value.variants = Object.fromEntries( Object.entries(value.variants).map(([key, value]) => [ key, cleanSpecOfPointers(value), ]), ) - if (!matchPointer.test(value)) continue + if (!isMatchPointer(value)) continue delete mutSpec[key] // // if (value.target === ) } @@ -1244,12 +1260,8 @@ async function updateConfig( ? "" : catchFn( () => - (specValue.target === "lan-address" - ? filled.addressInfo!.filter({ kind: "mdns" }) || - filled.addressInfo!.onion - : filled.addressInfo!.onion || - filled.addressInfo!.filter({ kind: "mdns" }) - ).hostnames[0].hostname.value, + filled.addressInfo!.filter({ kind: "mdns" })!.hostnames[0] + .hostname, ) || "" mutConfigValue[key] = url } @@ -1272,7 +1284,7 @@ function extractServiceInterfaceId(manifest: Manifest, specInterface: string) { } async function convertToNewConfig(value: OldGetConfigRes) { try { - const valueSpec: OldConfigSpec = matchOldConfigSpec.unsafeCast(value.spec) + const valueSpec: OldConfigSpec = matchOldConfigSpec.parse(value.spec) const spec = transformConfigSpec(valueSpec) if (!value.config) return { spec, config: null } const config = transformOldConfigToNew(valueSpec, value.config) ?? null diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchManifest.test.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchManifest.test.ts index 3730dd3b6..6173f00d0 100644 --- a/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchManifest.test.ts +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchManifest.test.ts @@ -4,9 +4,9 @@ import synapseManifest from "./__fixtures__/synapseManifest" describe("matchManifest", () => { test("gittea", () => { - matchManifest.unsafeCast(giteaManifest) + matchManifest.parse(giteaManifest) }) test("synapse", () => { - matchManifest.unsafeCast(synapseManifest) + matchManifest.parse(synapseManifest) }) }) diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchManifest.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchManifest.ts index 8c680cbd6..f3fe101eb 100644 --- a/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchManifest.ts +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchManifest.ts @@ -1,126 +1,121 @@ -import { - object, - literal, - string, - array, - boolean, - dictionary, - literals, - number, - unknown, - some, - every, -} from "ts-matches" +import { z } from "@start9labs/start-sdk" import { matchVolume } from "./matchVolume" import { matchDockerProcedure } from "../../../Models/DockerProcedure" -const matchJsProcedure = object({ - type: literal("script"), - args: array(unknown).nullable().optional().defaultTo([]), +const matchJsProcedure = z.object({ + type: z.literal("script"), + args: z.array(z.unknown()).nullable().optional().default([]), }) -const matchProcedure = some(matchDockerProcedure, matchJsProcedure) -export type Procedure = typeof matchProcedure._TYPE +const matchProcedure = z.union([matchDockerProcedure, matchJsProcedure]) +export type Procedure = z.infer -const matchAction = object({ - name: string, - description: string, - warning: string.nullable().optional(), +const matchAction = z.object({ + name: z.string(), + description: z.string(), + warning: z.string().nullable().optional(), implementation: matchProcedure, - "allowed-statuses": array(literals("running", "stopped")), - "input-spec": unknown.nullable().optional(), + "allowed-statuses": z.array(z.enum(["running", "stopped"])), + "input-spec": z.unknown().nullable().optional(), }) -export const matchManifest = object({ - id: string, - title: string, - version: string, +export const matchManifest = z.object({ + id: z.string(), + title: z.string(), + version: z.string(), main: matchDockerProcedure, - assets: object({ - assets: string.nullable().optional(), - scripts: string.nullable().optional(), - }) + assets: z + .object({ + assets: z.string().nullable().optional(), + scripts: z.string().nullable().optional(), + }) .nullable() .optional(), - "health-checks": dictionary([ - string, - every( + "health-checks": z.record( + z.string(), + z.intersection( matchProcedure, - object({ - name: string, - ["success-message"]: string.nullable().optional(), + z.object({ + name: z.string(), + "success-message": z.string().nullable().optional(), }), ), - ]), - config: object({ - get: matchProcedure, - set: matchProcedure, - }) + ), + config: z + .object({ + get: matchProcedure, + set: matchProcedure, + }) .nullable() .optional(), properties: matchProcedure.nullable().optional(), - volumes: dictionary([string, matchVolume]), - interfaces: dictionary([ - string, - object({ - name: string, - description: string, - "tor-config": object({ - "port-mapping": dictionary([string, string]), - }) + volumes: z.record(z.string(), matchVolume), + interfaces: z.record( + z.string(), + z.object({ + name: z.string(), + description: z.string(), + "tor-config": z + .object({ + "port-mapping": z.record(z.string(), z.string()), + }) .nullable() .optional(), - "lan-config": dictionary([ - string, - object({ - ssl: boolean, - internal: number, - }), - ]) + "lan-config": z + .record( + z.string(), + z.object({ + ssl: z.boolean(), + internal: z.number(), + }), + ) .nullable() .optional(), - ui: boolean, - protocols: array(string), + ui: z.boolean(), + protocols: z.array(z.string()), }), - ]), - backup: object({ + ), + backup: z.object({ create: matchProcedure, restore: matchProcedure, }), - migrations: object({ - to: dictionary([string, matchProcedure]), - from: dictionary([string, matchProcedure]), - }) + migrations: z + .object({ + to: z.record(z.string(), matchProcedure), + from: z.record(z.string(), matchProcedure), + }) .nullable() .optional(), - dependencies: dictionary([ - string, - object({ - version: string, - requirement: some( - object({ - type: literal("opt-in"), - how: string, - }), - object({ - type: literal("opt-out"), - how: string, - }), - object({ - type: literal("required"), - }), - ), - description: string.nullable().optional(), - config: object({ - check: matchProcedure, - "auto-configure": matchProcedure, + dependencies: z.record( + z.string(), + z + .object({ + version: z.string(), + requirement: z.union([ + z.object({ + type: z.literal("opt-in"), + how: z.string(), + }), + z.object({ + type: z.literal("opt-out"), + how: z.string(), + }), + z.object({ + type: z.literal("required"), + }), + ]), + description: z.string().nullable().optional(), + config: z + .object({ + check: matchProcedure, + "auto-configure": matchProcedure, + }) + .nullable() + .optional(), }) - .nullable() - .optional(), - }) .nullable() .optional(), - ]), + ), - actions: dictionary([string, matchAction]), + actions: z.record(z.string(), matchAction), }) -export type Manifest = typeof matchManifest._TYPE +export type Manifest = z.infer diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchVolume.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchVolume.ts index baffbdd12..ba17d007b 100644 --- a/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchVolume.ts +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/matchVolume.ts @@ -1,32 +1,32 @@ -import { object, literal, string, boolean, some } from "ts-matches" +import { z } from "@start9labs/start-sdk" -const matchDataVolume = object({ - type: literal("data"), - readonly: boolean.optional(), +const matchDataVolume = z.object({ + type: z.literal("data"), + readonly: z.boolean().optional(), }) -const matchAssetVolume = object({ - type: literal("assets"), +const matchAssetVolume = z.object({ + type: z.literal("assets"), }) -const matchPointerVolume = object({ - type: literal("pointer"), - "package-id": string, - "volume-id": string, - path: string, - readonly: boolean, +const matchPointerVolume = z.object({ + type: z.literal("pointer"), + "package-id": z.string(), + "volume-id": z.string(), + path: z.string(), + readonly: z.boolean(), }) -const matchCertificateVolume = object({ - type: literal("certificate"), - "interface-id": string, +const matchCertificateVolume = z.object({ + type: z.literal("certificate"), + "interface-id": z.string(), }) -const matchBackupVolume = object({ - type: literal("backup"), - readonly: boolean, +const matchBackupVolume = z.object({ + type: z.literal("backup"), + readonly: z.boolean(), }) -export const matchVolume = some( +export const matchVolume = z.union([ matchDataVolume, matchAssetVolume, matchPointerVolume, matchCertificateVolume, matchBackupVolume, -) -export type Volume = typeof matchVolume._TYPE +]) +export type Volume = z.infer diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/transformConfigSpec.test.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/transformConfigSpec.test.ts index f29ff94de..cebcb595b 100644 --- a/container-runtime/src/Adapters/Systems/SystemForEmbassy/transformConfigSpec.test.ts +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/transformConfigSpec.test.ts @@ -12,43 +12,43 @@ import nostrConfig2 from "./__fixtures__/nostrConfig2" describe("transformConfigSpec", () => { test("matchOldConfigSpec(embassyPages.homepage.variants[web-page])", () => { - matchOldConfigSpec.unsafeCast( + matchOldConfigSpec.parse( fixtureEmbassyPagesConfig.homepage.variants["web-page"], ) }) test("matchOldConfigSpec(embassyPages)", () => { - matchOldConfigSpec.unsafeCast(fixtureEmbassyPagesConfig) + matchOldConfigSpec.parse(fixtureEmbassyPagesConfig) }) test("transformConfigSpec(embassyPages)", () => { - const spec = matchOldConfigSpec.unsafeCast(fixtureEmbassyPagesConfig) + const spec = matchOldConfigSpec.parse(fixtureEmbassyPagesConfig) expect(transformConfigSpec(spec)).toMatchSnapshot() }) test("matchOldConfigSpec(RTL.nodes)", () => { - matchOldValueSpecList.unsafeCast(fixtureRTLConfig.nodes) + matchOldValueSpecList.parse(fixtureRTLConfig.nodes) }) test("matchOldConfigSpec(RTL)", () => { - matchOldConfigSpec.unsafeCast(fixtureRTLConfig) + matchOldConfigSpec.parse(fixtureRTLConfig) }) test("transformConfigSpec(RTL)", () => { - const spec = matchOldConfigSpec.unsafeCast(fixtureRTLConfig) + const spec = matchOldConfigSpec.parse(fixtureRTLConfig) expect(transformConfigSpec(spec)).toMatchSnapshot() }) test("transformConfigSpec(searNXG)", () => { - const spec = matchOldConfigSpec.unsafeCast(searNXG) + const spec = matchOldConfigSpec.parse(searNXG) expect(transformConfigSpec(spec)).toMatchSnapshot() }) test("transformConfigSpec(bitcoind)", () => { - const spec = matchOldConfigSpec.unsafeCast(bitcoind) + const spec = matchOldConfigSpec.parse(bitcoind) expect(transformConfigSpec(spec)).toMatchSnapshot() }) test("transformConfigSpec(nostr)", () => { - const spec = matchOldConfigSpec.unsafeCast(nostr) + const spec = matchOldConfigSpec.parse(nostr) expect(transformConfigSpec(spec)).toMatchSnapshot() }) test("transformConfigSpec(nostr2)", () => { - const spec = matchOldConfigSpec.unsafeCast(nostrConfig2) + const spec = matchOldConfigSpec.parse(nostrConfig2) expect(transformConfigSpec(spec)).toMatchSnapshot() }) }) diff --git a/container-runtime/src/Adapters/Systems/SystemForEmbassy/transformConfigSpec.ts b/container-runtime/src/Adapters/Systems/SystemForEmbassy/transformConfigSpec.ts index 77d611c95..22e56bc6b 100644 --- a/container-runtime/src/Adapters/Systems/SystemForEmbassy/transformConfigSpec.ts +++ b/container-runtime/src/Adapters/Systems/SystemForEmbassy/transformConfigSpec.ts @@ -1,19 +1,4 @@ -import { IST } from "@start9labs/start-sdk" -import { - dictionary, - object, - anyOf, - string, - literals, - array, - number, - boolean, - Parser, - deferred, - every, - nill, - literal, -} from "ts-matches" +import { IST, z } from "@start9labs/start-sdk" export function transformConfigSpec(oldSpec: OldConfigSpec): IST.InputSpec { return Object.entries(oldSpec).reduce((inputSpec, [key, oldVal]) => { @@ -82,7 +67,7 @@ export function transformConfigSpec(oldSpec: OldConfigSpec): IST.InputSpec { name: oldVal.name, description: oldVal.description || null, warning: oldVal.warning || null, - spec: transformConfigSpec(matchOldConfigSpec.unsafeCast(oldVal.spec)), + spec: transformConfigSpec(matchOldConfigSpec.parse(oldVal.spec)), } } else if (oldVal.type === "string") { newVal = { @@ -121,7 +106,7 @@ export function transformConfigSpec(oldSpec: OldConfigSpec): IST.InputSpec { ...obj, [id]: { name: oldVal.tag["variant-names"][id] || id, - spec: transformConfigSpec(matchOldConfigSpec.unsafeCast(spec)), + spec: transformConfigSpec(matchOldConfigSpec.parse(spec)), }, }), {} as Record, @@ -153,7 +138,7 @@ export function transformOldConfigToNew( if (isObject(val)) { newVal = transformOldConfigToNew( - matchOldConfigSpec.unsafeCast(val.spec), + matchOldConfigSpec.parse(val.spec), config[key], ) } @@ -172,7 +157,7 @@ export function transformOldConfigToNew( newVal = { selection, value: transformOldConfigToNew( - matchOldConfigSpec.unsafeCast(val.variants[selection]), + matchOldConfigSpec.parse(val.variants[selection]), config[key], ), } @@ -183,10 +168,7 @@ export function transformOldConfigToNew( if (isObjectList(val)) { newVal = (config[key] as object[]).map((obj) => - transformOldConfigToNew( - matchOldConfigSpec.unsafeCast(val.spec.spec), - obj, - ), + transformOldConfigToNew(matchOldConfigSpec.parse(val.spec.spec), obj), ) } else if (isUnionList(val)) return obj } @@ -212,7 +194,7 @@ export function transformNewConfigToOld( if (isObject(val)) { newVal = transformNewConfigToOld( - matchOldConfigSpec.unsafeCast(val.spec), + matchOldConfigSpec.parse(val.spec), config[key], ) } @@ -221,7 +203,7 @@ export function transformNewConfigToOld( newVal = { [val.tag.id]: config[key].selection, ...transformNewConfigToOld( - matchOldConfigSpec.unsafeCast(val.variants[config[key].selection]), + matchOldConfigSpec.parse(val.variants[config[key].selection]), config[key].value, ), } @@ -230,10 +212,7 @@ export function transformNewConfigToOld( if (isList(val)) { if (isObjectList(val)) { newVal = (config[key] as object[]).map((obj) => - transformNewConfigToOld( - matchOldConfigSpec.unsafeCast(val.spec.spec), - obj, - ), + transformNewConfigToOld(matchOldConfigSpec.parse(val.spec.spec), obj), ) } else if (isUnionList(val)) return obj } @@ -337,9 +316,7 @@ function getListSpec( default: oldVal.default as Record[], spec: { type: "object", - spec: transformConfigSpec( - matchOldConfigSpec.unsafeCast(oldVal.spec.spec), - ), + spec: transformConfigSpec(matchOldConfigSpec.parse(oldVal.spec.spec)), uniqueBy: oldVal.spec["unique-by"] || null, displayAs: oldVal.spec["display-as"] || null, }, @@ -393,211 +370,281 @@ function isUnionList( } export type OldConfigSpec = Record -const [_matchOldConfigSpec, setMatchOldConfigSpec] = deferred() -export const matchOldConfigSpec = _matchOldConfigSpec as Parser< - unknown, - OldConfigSpec -> -export const matchOldDefaultString = anyOf( - string, - object({ charset: string, len: number }), +export const matchOldConfigSpec: z.ZodType = z.lazy(() => + z.record(z.string(), matchOldValueSpec), ) -type OldDefaultString = typeof matchOldDefaultString._TYPE +export const matchOldDefaultString = z.union([ + z.string(), + z.object({ charset: z.string(), len: z.number() }), +]) +type OldDefaultString = z.infer -export const matchOldValueSpecString = object({ - type: literals("string"), - name: string, - masked: boolean.nullable().optional(), - copyable: boolean.nullable().optional(), - nullable: boolean.nullable().optional(), - placeholder: string.nullable().optional(), - pattern: string.nullable().optional(), - "pattern-description": string.nullable().optional(), +export const matchOldValueSpecString = z.object({ + type: z.enum(["string"]), + name: z.string(), + masked: z.boolean().nullable().optional(), + copyable: z.boolean().nullable().optional(), + nullable: z.boolean().nullable().optional(), + placeholder: z.string().nullable().optional(), + pattern: z.string().nullable().optional(), + "pattern-description": z.string().nullable().optional(), default: matchOldDefaultString.nullable().optional(), - textarea: boolean.nullable().optional(), - description: string.nullable().optional(), - warning: string.nullable().optional(), + textarea: z.boolean().nullable().optional(), + description: z.string().nullable().optional(), + warning: z.string().nullable().optional(), }) -export const matchOldValueSpecNumber = object({ - type: literals("number"), - nullable: boolean, - name: string, - range: string, - integral: boolean, - default: number.nullable().optional(), - description: string.nullable().optional(), - warning: string.nullable().optional(), - units: string.nullable().optional(), - placeholder: anyOf(number, string).nullable().optional(), +export const matchOldValueSpecNumber = z.object({ + type: z.enum(["number"]), + nullable: z.boolean(), + name: z.string(), + range: z.string(), + integral: z.boolean(), + default: z.number().nullable().optional(), + description: z.string().nullable().optional(), + warning: z.string().nullable().optional(), + units: z.string().nullable().optional(), + placeholder: z.union([z.number(), z.string()]).nullable().optional(), }) -type OldValueSpecNumber = typeof matchOldValueSpecNumber._TYPE +type OldValueSpecNumber = z.infer -export const matchOldValueSpecBoolean = object({ - type: literals("boolean"), - default: boolean, - name: string, - description: string.nullable().optional(), - warning: string.nullable().optional(), +export const matchOldValueSpecBoolean = z.object({ + type: z.enum(["boolean"]), + default: z.boolean(), + name: z.string(), + description: z.string().nullable().optional(), + warning: z.string().nullable().optional(), }) -type OldValueSpecBoolean = typeof matchOldValueSpecBoolean._TYPE +type OldValueSpecBoolean = z.infer -const matchOldValueSpecObject = object({ - type: literals("object"), - spec: _matchOldConfigSpec, - name: string, - description: string.nullable().optional(), - warning: string.nullable().optional(), +type OldValueSpecObject = { + type: "object" + spec: OldConfigSpec + name: string + description?: string | null + warning?: string | null +} +const matchOldValueSpecObject: z.ZodType = z.object({ + type: z.enum(["object"]), + spec: z.lazy(() => matchOldConfigSpec), + name: z.string(), + description: z.string().nullable().optional(), + warning: z.string().nullable().optional(), }) -type OldValueSpecObject = typeof matchOldValueSpecObject._TYPE -const matchOldValueSpecEnum = object({ - values: array(string), - "value-names": dictionary([string, string]), - type: literals("enum"), - default: string, - name: string, - description: string.nullable().optional(), - warning: string.nullable().optional(), +const matchOldValueSpecEnum = z.object({ + values: z.array(z.string()), + "value-names": z.record(z.string(), z.string()), + type: z.enum(["enum"]), + default: z.string(), + name: z.string(), + description: z.string().nullable().optional(), + warning: z.string().nullable().optional(), }) -type OldValueSpecEnum = typeof matchOldValueSpecEnum._TYPE +type OldValueSpecEnum = z.infer -const matchOldUnionTagSpec = object({ - id: string, // The name of the field containing one of the union variants - "variant-names": dictionary([string, string]), // The name of each variant - name: string, - description: string.nullable().optional(), - warning: string.nullable().optional(), +const matchOldUnionTagSpec = z.object({ + id: z.string(), // The name of the field containing one of the union variants + "variant-names": z.record(z.string(), z.string()), // The name of each variant + name: z.string(), + description: z.string().nullable().optional(), + warning: z.string().nullable().optional(), }) -const matchOldValueSpecUnion = object({ - type: literals("union"), +type OldValueSpecUnion = { + type: "union" + tag: z.infer + variants: Record + default: string +} +const matchOldValueSpecUnion: z.ZodType = z.object({ + type: z.enum(["union"]), tag: matchOldUnionTagSpec, - variants: dictionary([string, _matchOldConfigSpec]), - default: string, + variants: z.record( + z.string(), + z.lazy(() => matchOldConfigSpec), + ), + default: z.string(), }) -type OldValueSpecUnion = typeof matchOldValueSpecUnion._TYPE -const [matchOldUniqueBy, setOldUniqueBy] = deferred() type OldUniqueBy = | null | string | { any: OldUniqueBy[] } | { all: OldUniqueBy[] } -setOldUniqueBy( - anyOf( - nill, - string, - object({ any: array(matchOldUniqueBy) }), - object({ all: array(matchOldUniqueBy) }), - ), +const matchOldUniqueBy: z.ZodType = z.lazy(() => + z.union([ + z.null(), + z.string(), + z.object({ any: z.array(matchOldUniqueBy) }), + z.object({ all: z.array(matchOldUniqueBy) }), + ]), ) -const matchOldListValueSpecObject = object({ - spec: _matchOldConfigSpec, // this is a mapped type of the config object at this level, replacing the object's values with specs on those values - "unique-by": matchOldUniqueBy.nullable().optional(), // indicates whether duplicates can be permitted in the list - "display-as": string.nullable().optional(), // this should be a handlebars template which can make use of the entire config which corresponds to 'spec' -}) -const matchOldListValueSpecUnion = object({ +type OldListValueSpecObject = { + spec: OldConfigSpec + "unique-by"?: OldUniqueBy | null + "display-as"?: string | null +} +const matchOldListValueSpecObject: z.ZodType = z.object( + { + spec: z.lazy(() => matchOldConfigSpec), // this is a mapped type of the config object at this level, replacing the object's values with specs on those values + "unique-by": matchOldUniqueBy.nullable().optional(), // indicates whether duplicates can be permitted in the list + "display-as": z.string().nullable().optional(), // this should be a handlebars template which can make use of the entire config which corresponds to 'spec' + }, +) +type OldListValueSpecUnion = { + "unique-by"?: OldUniqueBy | null + "display-as"?: string | null + tag: z.infer + variants: Record +} +const matchOldListValueSpecUnion: z.ZodType = z.object({ "unique-by": matchOldUniqueBy.nullable().optional(), - "display-as": string.nullable().optional(), + "display-as": z.string().nullable().optional(), tag: matchOldUnionTagSpec, - variants: dictionary([string, _matchOldConfigSpec]), + variants: z.record( + z.string(), + z.lazy(() => matchOldConfigSpec), + ), }) -const matchOldListValueSpecString = object({ - masked: boolean.nullable().optional(), - copyable: boolean.nullable().optional(), - pattern: string.nullable().optional(), - "pattern-description": string.nullable().optional(), - placeholder: string.nullable().optional(), +const matchOldListValueSpecString = z.object({ + masked: z.boolean().nullable().optional(), + copyable: z.boolean().nullable().optional(), + pattern: z.string().nullable().optional(), + "pattern-description": z.string().nullable().optional(), + placeholder: z.string().nullable().optional(), }) -const matchOldListValueSpecEnum = object({ - values: array(string), - "value-names": dictionary([string, string]), +const matchOldListValueSpecEnum = z.object({ + values: z.array(z.string()), + "value-names": z.record(z.string(), z.string()), }) -const matchOldListValueSpecNumber = object({ - range: string, - integral: boolean, - units: string.nullable().optional(), - placeholder: anyOf(number, string).nullable().optional(), +const matchOldListValueSpecNumber = z.object({ + range: z.string(), + integral: z.boolean(), + units: z.string().nullable().optional(), + placeholder: z.union([z.number(), z.string()]).nullable().optional(), }) +type OldValueSpecListBase = { + type: "list" + range: string + default: string[] | number[] | OldDefaultString[] | Record[] + name: string + description?: string | null + warning?: string | null +} + +type OldValueSpecList = OldValueSpecListBase & + ( + | { subtype: "string"; spec: z.infer } + | { subtype: "enum"; spec: z.infer } + | { subtype: "object"; spec: OldListValueSpecObject } + | { subtype: "number"; spec: z.infer } + | { subtype: "union"; spec: OldListValueSpecUnion } + ) + // represents a spec for a list -export const matchOldValueSpecList = every( - object({ - type: literals("list"), - range: string, // '[0,1]' (inclusive) OR '[0,*)' (right unbounded), normal math rules - default: anyOf( - array(string), - array(number), - array(matchOldDefaultString), - array(object), - ), - name: string, - description: string.nullable().optional(), - warning: string.nullable().optional(), - }), - anyOf( - object({ - subtype: literals("string"), - spec: matchOldListValueSpecString, +export const matchOldValueSpecList: z.ZodType = + z.intersection( + z.object({ + type: z.enum(["list"]), + range: z.string(), // '[0,1]' (inclusive) OR '[0,*)' (right unbounded), normal math rules + default: z.union([ + z.array(z.string()), + z.array(z.number()), + z.array(matchOldDefaultString), + z.array(z.object({}).passthrough()), + ]), + name: z.string(), + description: z.string().nullable().optional(), + warning: z.string().nullable().optional(), }), - object({ - subtype: literals("enum"), - spec: matchOldListValueSpecEnum, - }), - object({ - subtype: literals("object"), - spec: matchOldListValueSpecObject, - }), - object({ - subtype: literals("number"), - spec: matchOldListValueSpecNumber, - }), - object({ - subtype: literals("union"), - spec: matchOldListValueSpecUnion, - }), - ), -) -type OldValueSpecList = typeof matchOldValueSpecList._TYPE + z.union([ + z.object({ + subtype: z.enum(["string"]), + spec: matchOldListValueSpecString, + }), + z.object({ + subtype: z.enum(["enum"]), + spec: matchOldListValueSpecEnum, + }), + z.object({ + subtype: z.enum(["object"]), + spec: matchOldListValueSpecObject, + }), + z.object({ + subtype: z.enum(["number"]), + spec: matchOldListValueSpecNumber, + }), + z.object({ + subtype: z.enum(["union"]), + spec: matchOldListValueSpecUnion, + }), + ]), + ) as unknown as z.ZodType -const matchOldValueSpecPointer = every( - object({ - type: literal("pointer"), - }), - anyOf( - object({ - subtype: literal("package"), - target: literals("tor-key", "tor-address", "lan-address"), - "package-id": string, - interface: string, - }), - object({ - subtype: literal("package"), - target: literals("config"), - "package-id": string, - selector: string, - multi: boolean, - }), - ), +type OldValueSpecPointer = { + type: "pointer" +} & ( + | { + subtype: "package" + target: "tor-key" | "tor-address" | "lan-address" + "package-id": string + interface: string + } + | { + subtype: "package" + target: "config" + "package-id": string + selector: string + multi: boolean + } ) -type OldValueSpecPointer = typeof matchOldValueSpecPointer._TYPE +const matchOldValueSpecPointer: z.ZodType = z.intersection( + z.object({ + type: z.literal("pointer"), + }), + z.union([ + z.object({ + subtype: z.literal("package"), + target: z.enum(["tor-key", "tor-address", "lan-address"]), + "package-id": z.string(), + interface: z.string(), + }), + z.object({ + subtype: z.literal("package"), + target: z.enum(["config"]), + "package-id": z.string(), + selector: z.string(), + multi: z.boolean(), + }), + ]), +) as unknown as z.ZodType -export const matchOldValueSpec = anyOf( +type OldValueSpecString = z.infer + +type OldValueSpec = + | OldValueSpecString + | OldValueSpecNumber + | OldValueSpecBoolean + | OldValueSpecObject + | OldValueSpecEnum + | OldValueSpecList + | OldValueSpecUnion + | OldValueSpecPointer + +export const matchOldValueSpec: z.ZodType = z.union([ matchOldValueSpecString, matchOldValueSpecNumber, matchOldValueSpecBoolean, - matchOldValueSpecObject, + matchOldValueSpecObject as z.ZodType, matchOldValueSpecEnum, - matchOldValueSpecList, - matchOldValueSpecUnion, - matchOldValueSpecPointer, -) -type OldValueSpec = typeof matchOldValueSpec._TYPE - -setMatchOldConfigSpec(dictionary([string, matchOldValueSpec])) + matchOldValueSpecList as z.ZodType, + matchOldValueSpecUnion as z.ZodType, + matchOldValueSpecPointer as z.ZodType, +]) export class Range { min?: number diff --git a/container-runtime/src/Adapters/Systems/SystemForStartOs.ts b/container-runtime/src/Adapters/Systems/SystemForStartOs.ts index 837946ca0..3b0d767ed 100644 --- a/container-runtime/src/Adapters/Systems/SystemForStartOs.ts +++ b/container-runtime/src/Adapters/Systems/SystemForStartOs.ts @@ -47,11 +47,12 @@ export class SystemForStartOs implements System { getActionInput( effects: Effects, id: string, + prefill: Record | null, timeoutMs: number | null, ): Promise { const action = this.abi.actions.get(id) if (!action) throw new Error(`Action ${id} not found`) - return action.getInput({ effects }) + return action.getInput({ effects, prefill }) } runAction( effects: Effects, diff --git a/container-runtime/src/Interfaces/System.ts b/container-runtime/src/Interfaces/System.ts index 1bda6afd7..a6b9d4fef 100644 --- a/container-runtime/src/Interfaces/System.ts +++ b/container-runtime/src/Interfaces/System.ts @@ -33,6 +33,7 @@ export type System = { getActionInput( effects: Effects, actionId: string, + prefill: Record | null, timeoutMs: number | null, ): Promise diff --git a/container-runtime/src/Models/DockerProcedure.ts b/container-runtime/src/Models/DockerProcedure.ts index 5bcd9d5a9..8392fa5ad 100644 --- a/container-runtime/src/Models/DockerProcedure.ts +++ b/container-runtime/src/Models/DockerProcedure.ts @@ -1,41 +1,19 @@ -import { - object, - literal, - string, - boolean, - array, - dictionary, - literals, - number, - Parser, - some, -} from "ts-matches" +import { z } from "@start9labs/start-sdk" import { matchDuration } from "./Duration" -const VolumeId = string -const Path = string - -export type VolumeId = string -export type Path = string -export const matchDockerProcedure = object({ - type: literal("docker"), - image: string, - system: boolean.optional(), - entrypoint: string, - args: array(string).defaultTo([]), - mounts: dictionary([VolumeId, Path]).optional(), - "io-format": literals( - "json", - "json-pretty", - "yaml", - "cbor", - "toml", - "toml-pretty", - ) +export const matchDockerProcedure = z.object({ + type: z.literal("docker"), + image: z.string(), + system: z.boolean().optional(), + entrypoint: z.string(), + args: z.array(z.string()).default([]), + mounts: z.record(z.string(), z.string()).optional(), + "io-format": z + .enum(["json", "json-pretty", "yaml", "cbor", "toml", "toml-pretty"]) .nullable() .optional(), - "sigterm-timeout": some(number, matchDuration).onMismatch(30), - inject: boolean.defaultTo(false), + "sigterm-timeout": z.union([z.number(), matchDuration]).catch(30), + inject: z.boolean().default(false), }) -export type DockerProcedure = typeof matchDockerProcedure._TYPE +export type DockerProcedure = z.infer diff --git a/container-runtime/src/Models/Duration.ts b/container-runtime/src/Models/Duration.ts index 5f61c362a..8d6ca3da2 100644 --- a/container-runtime/src/Models/Duration.ts +++ b/container-runtime/src/Models/Duration.ts @@ -1,11 +1,11 @@ -import { string } from "ts-matches" +import { z } from "@start9labs/start-sdk" export type TimeUnit = "d" | "h" | "s" | "ms" | "m" | "µs" | "ns" export type Duration = `${number}${TimeUnit}` const durationRegex = /^([0-9]*(\.[0-9]+)?)(ns|µs|ms|s|m|d)$/ -export const matchDuration = string.refine(isDuration) +export const matchDuration = z.string().refine(isDuration) export function isDuration(value: string): value is Duration { return durationRegex.test(value) } diff --git a/container-runtime/src/Models/JsonPath.ts b/container-runtime/src/Models/JsonPath.ts index d101836da..4748e7505 100644 --- a/container-runtime/src/Models/JsonPath.ts +++ b/container-runtime/src/Models/JsonPath.ts @@ -1,10 +1,10 @@ -import { literals, some, string } from "ts-matches" +import { z } from "@start9labs/start-sdk" type NestedPath = `/${A}/${string}/${B}` type NestedPaths = NestedPath<"actions", "run" | "getInput"> // prettier-ignore -type UnNestPaths = - A extends `${infer A}/${infer B}` ? [...UnNestPaths, ... UnNestPaths] : +type UnNestPaths = + A extends `${infer A}/${infer B}` ? [...UnNestPaths, ... UnNestPaths] : [A] export function unNestPath(a: A): UnNestPaths { @@ -17,14 +17,14 @@ function isNestedPath(path: string): path is NestedPaths { return true return false } -export const jsonPath = some( - literals( +export const jsonPath = z.union([ + z.enum([ "/packageInit", "/packageUninit", "/backup/create", "/backup/restore", - ), - string.refine(isNestedPath, "isNestedPath"), -) + ]), + z.string().refine(isNestedPath), +]) -export type JsonPath = typeof jsonPath._TYPE +export type JsonPath = z.infer diff --git a/container-runtime/src/index.ts b/container-runtime/src/index.ts index e1e473183..05b0ef601 100644 --- a/container-runtime/src/index.ts +++ b/container-runtime/src/index.ts @@ -1,5 +1,4 @@ import { RpcListener } from "./Adapters/RpcListener" -import { SystemForEmbassy } from "./Adapters/Systems/SystemForEmbassy" import { AllGetDependencies } from "./Interfaces/AllGetDependencies" import { getSystem } from "./Adapters/Systems" @@ -7,6 +6,18 @@ const getDependencies: AllGetDependencies = { system: getSystem, } +process.on("unhandledRejection", (reason) => { + if ( + reason instanceof Error && + "muteUnhandled" in reason && + reason.muteUnhandled + ) { + // mute + } else { + console.error("Unhandled promise rejection", reason) + } +}) + for (let s of ["SIGTERM", "SIGINT", "SIGHUP"]) { process.on(s, (s) => { console.log(`Caught ${s}`) diff --git a/container-runtime/update-image-local.sh b/container-runtime/update-image-local.sh index 20dc7a9ef..14df9e325 100755 --- a/container-runtime/update-image-local.sh +++ b/container-runtime/update-image-local.sh @@ -16,6 +16,6 @@ case $ARCH in esac docker run --rm $USE_TTY --platform=$DOCKER_PLATFORM -eARCH --privileged -v "$(pwd):/root/start-os" start9/build-env /root/start-os/container-runtime/update-image.sh -if [ "$(ls -nd "rootfs.${ARCH}.squashfs" | awk '{ print $3 }')" != "$UID" ]; then +if [ "$(ls -nd "container-runtime/rootfs.${ARCH}.squashfs" | awk '{ print $3 }')" != "$UID" ]; then docker run --rm $USE_TTY -v "$(pwd):/root/start-os" start9/build-env chown -R $UID:$UID /root/start-os/container-runtime fi \ No newline at end of file diff --git a/core/ARCHITECTURE.md b/core/ARCHITECTURE.md new file mode 100644 index 000000000..f895715b2 --- /dev/null +++ b/core/ARCHITECTURE.md @@ -0,0 +1,72 @@ +# Core Architecture + +The Rust backend daemon for StartOS. + +## Binaries + +The crate produces a single binary `startbox` that is symlinked under different names for different behavior: + +- `startbox` / `startd` — Main daemon +- `start-cli` — CLI interface +- `start-container` — Runs inside LXC containers; communicates with host and manages subcontainers +- `registrybox` — Registry daemon +- `tunnelbox` — VPN/tunnel daemon + +## Crate Structure + +- `startos` — Core library that supports building `startbox` +- `helpers` — Utility functions used across both `startos` and `js-engine` +- `models` — Types shared across `startos`, `js-engine`, and `helpers` + +## Key Modules + +- `src/context/` — Context types (RpcContext, CliContext, InitContext, DiagnosticContext) +- `src/service/` — Service lifecycle management with actor pattern (`service_actor.rs`) +- `src/db/model/` — Patch-DB models (`public.rs` synced to frontend, `private.rs` backend-only) +- `src/net/` — Networking (DNS, ACME, WiFi, Tor via Arti, WireGuard) +- `src/s9pk/` — S9PK package format (merkle archive) +- `src/registry/` — Package registry management + +## RPC Pattern + +The API is JSON-RPC (not REST). All endpoints are RPC methods organized in a hierarchical command structure using [rpc-toolkit](https://github.com/Start9Labs/rpc-toolkit). Handlers are registered in a tree of `ParentHandler` nodes, with four handler types: `from_fn_async` (standard), `from_fn_async_local` (non-Send), `from_fn` (sync), and `from_fn_blocking` (blocking). Metadata like `.with_about()` drives middleware and documentation. + +See [rpc-toolkit.md](rpc-toolkit.md) for full handler patterns and configuration. + +## Patch-DB Patterns + +Patch-DB provides diff-based state synchronization. Changes to `db/model/public.rs` automatically sync to the frontend. + +**Key patterns:** +- `db.peek().await` — Get a read-only snapshot of the database state +- `db.mutate(|db| { ... }).await` — Apply mutations atomically, returns `MutateResult` +- `#[derive(HasModel)]` — Derive macro for types stored in the database, generates typed accessors + +**Generated accessor types** (from `HasModel` derive): +- `as_field()` — Immutable reference: `&Model` +- `as_field_mut()` — Mutable reference: `&mut Model` +- `into_field()` — Owned value: `Model` + +**`Model` APIs** (from `db/prelude.rs`): +- `.de()` — Deserialize to `T` +- `.ser(&value)` — Serialize from `T` +- `.mutate(|v| ...)` — Deserialize, mutate, reserialize +- For maps: `.keys()`, `.as_idx(&key)`, `.as_idx_mut(&key)`, `.insert()`, `.remove()`, `.contains_key()` + +See [patchdb.md](patchdb.md) for `TypedDbWatch` construction, API, and usage patterns. + +## i18n + +See [i18n-patterns.md](i18n-patterns.md) for internationalization key conventions and the `t!()` macro. + +## Rust Utilities & Patterns + +See [core-rust-patterns.md](core-rust-patterns.md) for common utilities (Invoke trait, Guard pattern, mount guards, Apply trait, etc.). + +## Related Documentation + +- [rpc-toolkit.md](rpc-toolkit.md) — JSON-RPC handler patterns +- [patchdb.md](patchdb.md) — Patch-DB watch patterns and TypedDbWatch +- [i18n-patterns.md](i18n-patterns.md) — Internationalization conventions +- [core-rust-patterns.md](core-rust-patterns.md) — Common Rust utilities +- [s9pk-structure.md](s9pk-structure.md) — S9PK package format diff --git a/core/CLAUDE.md b/core/CLAUDE.md new file mode 100644 index 000000000..dbb053348 --- /dev/null +++ b/core/CLAUDE.md @@ -0,0 +1,27 @@ +# Core — Rust Backend + +The Rust backend daemon for StartOS. + +## Architecture + +See [ARCHITECTURE.md](ARCHITECTURE.md) for binaries, modules, Patch-DB patterns, and related documentation. + +See [CONTRIBUTING.md](CONTRIBUTING.md) for how to add RPC endpoints, TS-exported types, and i18n keys. + +## Quick Reference + +```bash +cargo check -p start-os # Type check +make test-core # Run tests +make ts-bindings # Regenerate TS types after changing #[ts(export)] structs +cd sdk && make baseDist dist # Rebuild SDK after ts-bindings +``` + +## Operating Rules + +- Always run `cargo check -p start-os` after modifying Rust code +- When adding RPC endpoints, follow the patterns in [rpc-toolkit.md](rpc-toolkit.md) +- When modifying `#[ts(export)]` types, regenerate bindings and rebuild the SDK (see [ARCHITECTURE.md](../ARCHITECTURE.md#build-pipeline)) +- When adding i18n keys, add all 5 locales in `core/locales/i18n.yaml` (see [i18n-patterns.md](i18n-patterns.md)) +- When using DB watches, follow the `TypedDbWatch` patterns in [patchdb.md](patchdb.md) +- **Always use `.invoke(ErrorKind::...)` instead of `.status()` when running CLI commands** via `tokio::process::Command`. The `Invoke` trait (from `crate::util::Invoke`) captures stdout/stderr and checks exit codes properly. Using `.status()` leaks stderr directly to system logs, creating noise. For check-then-act patterns (e.g. `iptables -C`), use `.invoke(...).await.is_ok()` / `.is_err()` instead of `.status().await.map_or(false, |s| s.success())`. diff --git a/core/CONTRIBUTING.md b/core/CONTRIBUTING.md new file mode 100644 index 000000000..63abdafc8 --- /dev/null +++ b/core/CONTRIBUTING.md @@ -0,0 +1,49 @@ +# Contributing to Core + +For general environment setup, cloning, and build system, see the root [CONTRIBUTING.md](../CONTRIBUTING.md). + +## Prerequisites + +- [Rust](https://rustup.rs) (nightly for formatting) +- [rust-analyzer](https://rust-analyzer.github.io/) recommended +- [Docker](https://docs.docker.com/get-docker/) (for cross-compilation via `rust-zig-builder` container) + +## Common Commands + +```bash +cargo check -p start-os # Type check +cargo test --features=test # Run tests (or: make test-core) +make format # Format with nightly rustfmt +cd core && cargo test --features=test # Run a specific test +``` + +## Adding a New RPC Endpoint + +1. Define a params struct with `#[derive(Deserialize, Serialize)]` +2. Choose a handler type (`from_fn_async` for most cases) +3. Write the handler function: `async fn my_handler(ctx: RpcContext, params: MyParams) -> Result` +4. Register it in the appropriate `ParentHandler` tree +5. If params/response should be available in TypeScript, add `#[derive(TS)]` and `#[ts(export)]` + +See [rpc-toolkit.md](rpc-toolkit.md) for full handler patterns and all four handler types. + +## Adding TS-Exported Types + +When a Rust type needs to be available in TypeScript (for the web frontend or SDK): + +1. Add `ts_rs::TS` to the derive list and `#[ts(export)]` to the struct/enum +2. Use `#[serde(rename_all = "camelCase")]` for JS-friendly field names +3. For types that don't implement TS (like `DateTime`, `exver::Version`), use `#[ts(type = "string")]` overrides +4. For `u64` fields that should be JS `number` (not `bigint`), use `#[ts(type = "number")]` +5. Run `make ts-bindings` to regenerate — files appear in `core/bindings/` then sync to `sdk/base/lib/osBindings/` +6. Rebuild the SDK: `cd sdk && make baseDist dist` + +## Adding i18n Keys + +1. Add the key to `core/locales/i18n.yaml` with all 5 language translations +2. Use the `t!("your.key.name")` macro in Rust code +3. Follow existing namespace conventions — match the module path where the key is used +4. Use kebab-case for multi-word segments +5. Translations are validated at compile time + +See [i18n-patterns.md](i18n-patterns.md) for full conventions. diff --git a/core/Cargo.lock b/core/Cargo.lock index 484974f9f..739b886cf 100644 --- a/core/Cargo.lock +++ b/core/Cargo.lock @@ -26,22 +26,10 @@ dependencies = [ "cfg-if", "cipher 0.3.0", "cpufeatures", - "ctr 0.8.0", + "ctr", "opaque-debug", ] -[[package]] -name = "aes" -version = "0.8.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" -dependencies = [ - "cfg-if", - "cipher 0.4.4", - "cpufeatures", - "zeroize", -] - [[package]] name = "ahash" version = "0.7.8" @@ -95,52 +83,6 @@ version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" -[[package]] -name = "amplify" -version = "4.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f7fb4ac7c881e54a8e7015e399b6112a2a5bc958b6c89ac510840ff20273b31" -dependencies = [ - "amplify_derive", - "amplify_num", - "ascii", - "getrandom 0.2.17", - "getrandom 0.3.4", - "wasm-bindgen", -] - -[[package]] -name = "amplify_derive" -version = "4.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a6309e6b8d89b36b9f959b7a8fa093583b94922a0f6438a24fb08936de4d428" -dependencies = [ - "amplify_syn", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "amplify_num" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99bcb75a2982047f733547042fc3968c0f460dfcf7d90b90dea3b2744580e9ad" -dependencies = [ - "wasm-bindgen", -] - -[[package]] -name = "amplify_syn" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7736fb8d473c0d83098b5bac44df6a561e20470375cd8bcae30516dc889fd62a" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "android_system_properties" version = "0.1.5" @@ -150,12 +92,6 @@ dependencies = [ "libc", ] -[[package]] -name = "anes" -version = "0.1.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" - [[package]] name = "ansi-regex" version = "0.1.0" @@ -217,33 +153,24 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.100" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" +checksum = "5f0e0fee31ef5ed1ba1316088939cea399010ed7731dba877ed44aeb407a75ea" [[package]] name = "ar_archive_writer" -version = "0.2.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0c269894b6fe5e9d7ada0cf69b5bf847ff35bc25fc271f08e1d080fce80339a" +checksum = "7eb93bbb63b9c227414f6eb3a0adfddca591a8ce1e9b60661bb08969b87e340b" dependencies = [ - "object 0.32.2", -] - -[[package]] -name = "arbitrary" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3d036a3c4ab069c7b410a2ce876bd74808d2d0888a82667669f8e783a898bf1" -dependencies = [ - "derive_arbitrary", + "object", ] [[package]] name = "arc-swap" -version = "1.8.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d03449bb8ca2cc2ef70869af31463d1ae5ccc8fa3e334b307203fbf815207e" +checksum = "9ded5f9a03ac8f24d1b8a25101ee812cd32cdc8c50a4c50237de2c4915850e73" dependencies = [ "rustversion", ] @@ -272,62 +199,6 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" -[[package]] -name = "arti-client" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "async-trait", - "cfg-if", - "derive-deftly", - "derive_builder_fork_arti", - "derive_more 2.1.1", - "educe", - "fs-mistrust", - "futures", - "hostname-validator", - "humantime", - "humantime-serde", - "libc", - "once_cell", - "postage", - "rand 0.9.2", - "safelog", - "serde", - "thiserror 2.0.17", - "time", - "tor-async-utils", - "tor-basic-utils", - "tor-chanmgr", - "tor-circmgr", - "tor-config", - "tor-config-path", - "tor-dirmgr", - "tor-error", - "tor-guardmgr", - "tor-hsclient", - "tor-hscrypto", - "tor-hsservice", - "tor-keymgr", - "tor-linkspec", - "tor-llcrypto", - "tor-memquota", - "tor-netdir", - "tor-netdoc", - "tor-persist", - "tor-proto", - "tor-protover", - "tor-rtcompat", - "tracing", - "void", -] - -[[package]] -name = "ascii" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d92bec98840b8f03a5ff5413de5293bfcd8bf96467cf5452609f939ec6f5de16" - [[package]] name = "ascii-canvas" version = "3.0.0" @@ -343,7 +214,7 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5493c3bedbacf7fd7382c6346bbd66687d12bbaad3a89a2d2c303ee6cf20b048" dependencies = [ - "asn1-rs-derive 0.5.1", + "asn1-rs-derive", "asn1-rs-impl", "displaydoc", "nom 7.1.3", @@ -353,21 +224,6 @@ dependencies = [ "time", ] -[[package]] -name = "asn1-rs" -version = "0.7.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56624a96882bb8c26d61312ae18cb45868e5a9992ea73c58e45c3101e56a1e60" -dependencies = [ - "asn1-rs-derive 0.6.0", - "asn1-rs-impl", - "displaydoc", - "nom 7.1.3", - "num-traits", - "rusticata-macros", - "thiserror 2.0.17", -] - [[package]] name = "asn1-rs-derive" version = "0.5.1" @@ -376,19 +232,7 @@ checksum = "965c2d33e53cb6b267e148a4cb0760bc01f4904c1cd4bb4002a085bb016d1490" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", - "synstructure", -] - -[[package]] -name = "asn1-rs-derive" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3109e49b1e4909e9db6515a30c633684d68cdeaa252f215214cb4fa1a5bfee2c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", + "syn 2.0.115", "synstructure", ] @@ -400,15 +244,9 @@ checksum = "7b18050c2cd6fe86c3a76584ef5e0baf286d038cda203eb6223df2cc413565f7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] -[[package]] -name = "assert_matches" -version = "1.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b34d609dfbaf33d6889b2b7106d3ca345eacad44200913df5ba02bfd31d2ba9" - [[package]] name = "async-acme" version = "0.6.0" @@ -468,13 +306,12 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.37" +version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d10e4f991a553474232bc0a31799f6d24b034a84c0971d80d2e2f78b2e576e40" +checksum = "68650b7df54f0293fd061972a0fb05aaf4fc0879d3b3d21a638a182c5c543b9f" dependencies = [ "compression-codecs", "compression-core", - "futures-io", "pin-project-lite", "tokio", ] @@ -493,21 +330,6 @@ dependencies = [ "slab", ] -[[package]] -name = "async-global-executor" -version = "2.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" -dependencies = [ - "async-channel 2.5.0", - "async-executor", - "async-io", - "async-lock", - "blocking", - "futures-lite", - "once_cell", -] - [[package]] name = "async-io" version = "2.6.0" @@ -537,18 +359,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "async-native-tls" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9343dc5acf07e79ff82d0c37899f079db3534d99f189a1837c8e549c99405bec" -dependencies = [ - "futures-util", - "native-tls", - "thiserror 1.0.69", - "url", -] - [[package]] name = "async-process" version = "2.5.0" @@ -575,7 +385,7 @@ checksum = "3b43422f69d8ff38f95f1b2bb76517c91589a924d1559a0e935d7c8ce0274c11" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -596,33 +406,6 @@ dependencies = [ "windows-sys 0.61.2", ] -[[package]] -name = "async-std" -version = "1.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c8e079a4ab67ae52b7403632e4618815d6db36d2a010cfe41b02c1b1578f93b" -dependencies = [ - "async-channel 1.9.0", - "async-global-executor", - "async-io", - "async-lock", - "async-process", - "crossbeam-utils", - "futures-channel", - "futures-core", - "futures-io", - "futures-lite", - "gloo-timers", - "kv-log-macro", - "log", - "memchr", - "once_cell", - "pin-project-lite", - "pin-utils", - "slab", - "wasm-bindgen-futures", -] - [[package]] name = "async-stream" version = "0.3.6" @@ -642,7 +425,7 @@ checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -659,36 +442,7 @@ checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", -] - -[[package]] -name = "async_executors" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a982d2f86de6137cc05c9db9a915a19886c97911f9790d04f174cede74be01a5" -dependencies = [ - "async-std", - "blanket", - "futures-core", - "futures-task", - "futures-util", - "pin-project", - "rustc_version", - "tokio", -] - -[[package]] -name = "asynchronous-codec" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a860072022177f903e59730004fb5dc13db9275b79bb2aef7ba8ce831956c233" -dependencies = [ - "bytes", - "futures-sink", - "futures-util", - "memchr", - "pin-project-lite", + "syn 2.0.115", ] [[package]] @@ -700,21 +454,6 @@ dependencies = [ "num-traits", ] -[[package]] -name = "atomic" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c59bdb34bc650a32731b31bd8f0829cc15d24a708ee31559e0bb34f2bc320cba" - -[[package]] -name = "atomic" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a89cbf775b137e9b968e67227ef7f775587cde3fd31b0d8599dbd0f598a48340" -dependencies = [ - "bytemuck", -] - [[package]] name = "atomic-waker" version = "1.1.2" @@ -729,9 +468,9 @@ checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8" [[package]] name = "aws-lc-rs" -version = "1.15.3" +version = "1.15.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e84ce723ab67259cfeb9877c6a639ee9eb7a27b28123abd71db7f0d5d0cc9d86" +checksum = "7b7b6141e96a8c160799cc2d5adecd5cbbe5054cb8c7c4af53da0f83bb7ad256" dependencies = [ "aws-lc-sys", "zeroize", @@ -739,9 +478,9 @@ dependencies = [ [[package]] name = "aws-lc-sys" -version = "0.36.0" +version = "0.37.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a442ece363113bd4bd4c8b18977a7798dd4d3c3383f34fb61936960e8f4ad8" +checksum = "b092fe214090261288111db7a2b2c2118e5a7f30dc2569f1732c4069a6840549" dependencies = [ "cc", "cmake", @@ -814,9 +553,9 @@ dependencies = [ "cfg-if", "libc", "miniz_oxide", - "object 0.37.3", + "object", "rustc-demangle", - "windows-link 0.2.1", + "windows-link", ] [[package]] @@ -845,12 +584,6 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4c7f02d4ea65f2c1853089ffd8d2787bdbc63de2f0d29dedbcf8ccdfa0ccd4cf" -[[package]] -name = "base32" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23ce669cd6c8588f79e15cf450314f9638f967fc5770ff1c7c1deb0925ea7cfa" - [[package]] name = "base32" version = "0.5.1" @@ -863,12 +596,6 @@ version = "2.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1adf9755786e27479693dedd3271691a92b5e242ab139cacb9fb8e7fb5381111" -[[package]] -name = "base64" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" - [[package]] name = "base64" version = "0.21.7" @@ -999,15 +726,6 @@ dependencies = [ "wyz 0.5.1", ] -[[package]] -name = "blake2" -version = "0.10.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46502ad458c9a52b69d4d4d32775c788b7a1b85e8bc9d482d92250fc0e3f8efe" -dependencies = [ - "digest 0.10.7", -] - [[package]] name = "blake2b_simd" version = "1.0.4" @@ -1035,17 +753,6 @@ dependencies = [ "rayon-core", ] -[[package]] -name = "blanket" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0b121a9fe0df916e362fb3271088d071159cdf11db0e4182d02152850756eff" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - [[package]] name = "block" version = "0.1.6" @@ -1058,7 +765,6 @@ version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4152116fd6e9dadb291ae18fc1ec3575ed6d84c29642d97890f4b4a3417297e4" dependencies = [ - "block-padding", "generic-array", ] @@ -1071,12 +777,6 @@ dependencies = [ "generic-array", ] -[[package]] -name = "block-padding" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d696c370c750c948ada61c69a0ee2cbbb9c50b1019ddb86d9317157a99c2cae" - [[package]] name = "blocking" version = "1.6.2" @@ -1090,12 +790,6 @@ dependencies = [ "piper", ] -[[package]] -name = "bounded-vec-deque" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2225b558afc76c596898f5f1b3fc35cfce0eb1b13635cbd7d1b2a7177dc10ccd" - [[package]] name = "brotli" version = "8.0.2" @@ -1124,7 +818,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "63044e1ae8e69f3b5a92c736ca6269b8d12fa7efe39bf34ddb06d102cf0e2cab" dependencies = [ "memchr", - "regex-automata 0.4.13", "serde", ] @@ -1134,17 +827,11 @@ version = "3.19.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5dd9dc738b7a8311c7ade152424974d8115f2cdad61e8dab8dac9f2362298510" -[[package]] -name = "by_address" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64fa3c856b712db6612c019f14756e64e4bcea13337a6b33b696333a9eaa2d06" - [[package]] name = "bytemuck" -version = "1.24.0" +version = "1.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fbdf580320f38b612e485521afda1ee26d10cc9884efaaa750d383e13e3c5f4" +checksum = "c8efb64bd706a16a1bdde310ae86b351e4d21550d98d056f22f8a7f7a2183fec" [[package]] name = "byteorder" @@ -1160,26 +847,15 @@ checksum = "8f1fe948ff07f4bd06c30984e69f5b4899c516a3ef74f34df92a2df2ab535495" [[package]] name = "bytes" -version = "1.11.0" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3" - -[[package]] -name = "caret" -version = "0.5.3" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" - -[[package]] -name = "cast" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" +checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33" [[package]] name = "cc" -version = "1.2.53" +version = "1.2.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "755d2fce177175ffca841e9a06afdb2c4ab0f593d53b4dee48147dfaade85932" +checksum = "47b26a0954ae34af09b50f0de26458fa95369a0d478d8236d3f93082b219bd29" dependencies = [ "find-msvc-tools", "jobserver", @@ -1222,7 +898,7 @@ dependencies = [ "num-traits", "serde", "wasm-bindgen", - "windows-link 0.2.1", + "windows-link", ] [[package]] @@ -1279,14 +955,13 @@ checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad" dependencies = [ "crypto-common", "inout", - "zeroize", ] [[package]] name = "clap" -version = "4.5.54" +version = "4.5.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6e6ff9dcd79cff5cd969a17a545d79e84ab086e444102a591e288a8aa3ce394" +checksum = "63be97961acde393029492ce0be7a1af7e323e6bae9511ebfac33751be5e6806" dependencies = [ "clap_builder", "clap_derive", @@ -1294,34 +969,34 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.54" +version = "4.5.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa42cf4d2b7a41bc8f663a7cab4031ebafa1bf3875705bfaf8466dc60ab52c00" +checksum = "7f13174bda5dfd69d7e947827e5af4b0f2f94a4a3ee92912fba07a66150f21e2" dependencies = [ "anstream", "anstyle", "clap_lex", - "strsim 0.11.1", + "strsim", "terminal_size", ] [[package]] name = "clap_derive" -version = "4.5.49" +version = "4.5.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0b5487afeab2deb2ff4e03a807ad1a03ac532ff5a2cee5d86884440c7f7671" +checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5" dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] name = "clap_lex" -version = "0.7.7" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3e64b0cc0439b12df2fa678eae89a1c56a529fd067a9115f7827f1fffd22b32" +checksum = "3a822ea5bc7590f9d40f1ba12c0dc3c2760f3482c6984db1573ad11031420831" [[package]] name = "clipboard-win" @@ -1342,17 +1017,6 @@ dependencies = [ "cc", ] -[[package]] -name = "coarsetime" -version = "0.1.37" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e58eb270476aa4fc7843849f8a35063e8743b4dbcdf6dd0f8ea0886980c204c2" -dependencies = [ - "libc", - "wasix", - "wasm-bindgen", -] - [[package]] name = "color-eyre" version = "0.6.5" @@ -1414,7 +1078,6 @@ dependencies = [ "brotli", "compression-core", "flate2", - "liblzma", "memchr", "zstd", "zstd-safe", @@ -1520,12 +1183,6 @@ version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d52eff69cd5e647efe296129160853a42795992097e8af39800e1060caeea9b" -[[package]] -name = "convert_case" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" - [[package]] name = "convert_case" version = "0.6.0" @@ -1555,15 +1212,6 @@ dependencies = [ "version_check", ] -[[package]] -name = "cookie-factory" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9885fa71e26b8ab7855e2ec7cae6e9b380edff76cd052e07c683a0319d51b3a2" -dependencies = [ - "futures", -] - [[package]] name = "cookie_store" version = "0.22.0" @@ -1667,52 +1315,6 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "criterion" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2b12d017a929603d80db1831cd3a24082f8137ce19c69e6447f54f5fc8d692f" -dependencies = [ - "anes", - "cast", - "ciborium", - "clap", - "criterion-plot", - "is-terminal", - "itertools 0.10.5", - "num-traits", - "once_cell", - "oorandom", - "plotters", - "rayon", - "regex", - "serde", - "serde_derive", - "serde_json", - "tinytemplate", - "walkdir", -] - -[[package]] -name = "criterion-cycles-per-byte" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1029452fa751c93f8834962dd74807d69f0a6c7624d5b06625b393aeb6a14fc2" -dependencies = [ - "cfg-if", - "criterion", -] - -[[package]] -name = "criterion-plot" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b50826342786a51a89e2da3a28f1c32b06e387201bc2d19791f622c673706b1" -dependencies = [ - "cast", - "itertools 0.10.5", -] - [[package]] name = "critical-section" version = "1.2.0" @@ -1770,7 +1372,7 @@ checksum = "d8b9f2e4c67f833b660cdb0a3523065869fb35570177239812ed4c905aeff87b" dependencies = [ "bitflags 2.10.0", "crossterm_winapi", - "derive_more 2.1.1", + "derive_more", "document-features", "futures-core", "mio", @@ -1810,31 +1412,21 @@ dependencies = [ [[package]] name = "crypto-common" -version = "0.1.7" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ "generic-array", "typenum", ] -[[package]] -name = "crypto-mac" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25fab6889090c8133f3deb8f73ba3c65a7f456f66436fc012a1b1e272b1e103e" -dependencies = [ - "generic-array", - "subtle", -] - [[package]] name = "csscolorparser" version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5fda6aace1fbef3aa217b27f4c8d7d071ef2a70a5ca51050b1f17d40299d3f16" dependencies = [ - "phf 0.11.3", + "phf", ] [[package]] @@ -1867,15 +1459,6 @@ dependencies = [ "cipher 0.3.0", ] -[[package]] -name = "ctr" -version = "0.9.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0369ee1ad671834580515889b80f2ea915f23b8be8d0daa4bbaf2ac5c7590835" -dependencies = [ - "cipher 0.4.4", -] - [[package]] name = "curve25519-dalek" version = "3.2.0" @@ -1913,77 +1496,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", -] - -[[package]] -name = "darling" -version = "0.14.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b750cb3417fd1b327431a470f388520309479ab0bf5e323505daf0290cd3850" -dependencies = [ - "darling_core 0.14.4", - "darling_macro 0.14.4", -] - -[[package]] -name = "darling" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" -dependencies = [ - "darling_core 0.21.3", - "darling_macro 0.21.3", -] - -[[package]] -name = "darling_core" -version = "0.14.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim 0.10.0", - "syn 1.0.109", -] - -[[package]] -name = "darling_core" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" -dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim 0.11.1", - "syn 2.0.114", -] - -[[package]] -name = "darling_macro" -version = "0.14.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4aab4dbc9f7611d8b55048a3a16d2d010c2c8334e46304b40ac1cc14bf3b48e" -dependencies = [ - "darling_core 0.14.4", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "darling_macro" -version = "0.21.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" -dependencies = [ - "darling_core 0.21.3", - "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -2010,7 +1523,7 @@ version = "9.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5cd0a5c643689626bec213c4d8bd4d96acc8ffdb4ad4bb6bc16abf27d5f4b553" dependencies = [ - "asn1-rs 0.6.2", + "asn1-rs", "displaydoc", "nom 7.1.3", "num-bigint", @@ -2018,20 +1531,6 @@ dependencies = [ "rusticata-macros", ] -[[package]] -name = "der-parser" -version = "10.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07da5016415d5a3c4dd39b11ed26f915f52fc4e0dc197d87908bc916e51bc1a6" -dependencies = [ - "asn1-rs 0.7.1", - "cookie-factory", - "displaydoc", - "nom 7.1.3", - "num-traits", - "rusticata-macros", -] - [[package]] name = "der_derive" version = "0.7.3" @@ -2040,100 +1539,16 @@ checksum = "8034092389675178f570469e6c3b0465d3d30b4505c294a6550db47f3c17ad18" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] name = "deranged" -version = "0.5.5" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ececcb659e7ba858fb4f10388c250a7252eb0a27373f1a72b8748afdd248e587" +checksum = "cc3dc5ad92c2e2d1c193bbbbdf2ea477cb81331de4f3103f267ca18368b988c4" dependencies = [ "powerfmt", - "serde_core", -] - -[[package]] -name = "derive-deftly" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "957bb73a3a9c0bbcac67e129b81954661b3cfcb9e28873d8441f91b54852e77a" -dependencies = [ - "derive-deftly-macros", - "heck 0.5.0", -] - -[[package]] -name = "derive-deftly-macros" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ea41269bd490d251b9eca50ccb43117e641cc68b129849757c15ece88fe0574" -dependencies = [ - "heck 0.5.0", - "indexmap 2.13.0", - "itertools 0.14.0", - "proc-macro-crate", - "proc-macro2", - "quote", - "sha3 0.10.8", - "strum", - "syn 2.0.114", - "void", -] - -[[package]] -name = "derive_arbitrary" -version = "1.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e567bd82dcff979e4b03460c307b3cdc9e96fde3d73bed1496d2bc75d9dd62a" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "derive_builder_core_fork_arti" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24c1b715c79be6328caa9a5e1a387a196ea503740f0722ec3dd8f67a9e72314d" -dependencies = [ - "darling 0.14.4", - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "derive_builder_fork_arti" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3eae24d595f4d0ecc90a9a5a6d11c2bd8dafe2375ec4a1ec63250e5ade7d228" -dependencies = [ - "derive_builder_macro_fork_arti", -] - -[[package]] -name = "derive_builder_macro_fork_arti" -version = "0.11.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69887769a2489cd946bf782eb2b1bb2cb7bc88551440c94a765d4f040c08ebf3" -dependencies = [ - "derive_builder_core_fork_arti", - "syn 1.0.109", -] - -[[package]] -name = "derive_more" -version = "0.99.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6edb4b64a43d977b8e99788fe3a04d483834fba1215a7e02caa415b626497f7f" -dependencies = [ - "convert_case 0.4.0", - "proc-macro2", - "quote", - "rustc_version", - "syn 2.0.114", ] [[package]] @@ -2155,8 +1570,7 @@ dependencies = [ "proc-macro2", "quote", "rustc_version", - "syn 2.0.114", - "unicode-xid", + "syn 2.0.115", ] [[package]] @@ -2186,24 +1600,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "directories" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16f5094c54661b38d03bd7e50df373292118db60b585c08a411c6d840017fe7d" -dependencies = [ - "dirs-sys", -] - -[[package]] -name = "dirs" -version = "6.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3e8aa94d75141228480295a7d0e7feb620b1a5ad9f12bc40be62411e38cce4e" -dependencies = [ - "dirs-sys", -] - [[package]] name = "dirs-next" version = "2.0.0" @@ -2214,18 +1610,6 @@ dependencies = [ "dirs-sys-next", ] -[[package]] -name = "dirs-sys" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e01a3366d27ee9890022452ee61b2b63a67e6f13f58900b651ff5665f0bb1fab" -dependencies = [ - "libc", - "option-ext", - "redox_users 0.5.2", - "windows-sys 0.61.2", -] - [[package]] name = "dirs-sys-next" version = "0.1.2" @@ -2233,7 +1617,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d" dependencies = [ "libc", - "redox_users 0.4.6", + "redox_users", "winapi", ] @@ -2245,7 +1629,7 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -2271,7 +1655,7 @@ checksum = "6e39034cee21a2f5bbb66ba0e3689819c4bb5d00382a282006e802a7ffa6c41d" dependencies = [ "cfg-if", "libc", - "socket2 0.6.1", + "socket2 0.6.2", "windows-sys 0.60.2", ] @@ -2296,12 +1680,6 @@ version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75b325c5dbd37f80359721ad39aca5a29fb04c89279657cffdda8736d0c0b9d2" -[[package]] -name = "downcast-rs" -version = "2.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "117240f60069e65410b3ae1bb213295bd828f707b5bec6596a1afc8793ce0cbc" - [[package]] name = "dunce" version = "1.0.5" @@ -2314,33 +1692,6 @@ version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0881ea181b1df73ff77ffaaf9c7544ecc11e82fba9b5f27b262a3c73a332555" -[[package]] -name = "dynasm" -version = "3.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f7d4c414c94bc830797115b8e5f434d58e7e80cb42ba88508c14bc6ea270625" -dependencies = [ - "bitflags 2.10.0", - "byteorder", - "lazy_static", - "proc-macro-error2", - "proc-macro2", - "quote", - "syn 2.0.114", -] - -[[package]] -name = "dynasmrt" -version = "3.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "602f7458a3859195fb840e6e0cce5f4330dd9dfbfece0edaf31fe427af346f55" -dependencies = [ - "byteorder", - "dynasm", - "fnv", - "memmap2 0.9.9", -] - [[package]] name = "ecdsa" version = "0.16.9" @@ -2397,7 +1748,6 @@ checksum = "70e796c081cee67dc755e1a36a0a172b897fab85fc3f6bc48307991f64e4eca9" dependencies = [ "curve25519-dalek 4.1.3", "ed25519 2.2.3", - "merlin", "rand_core 0.6.4", "serde", "sha2 0.10.9", @@ -2406,18 +1756,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "educe" -version = "0.4.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f0042ff8246a363dbe77d2ceedb073339e85a804b9a47636c6e016a9a32c05f" -dependencies = [ - "enum-ordinalize", - "proc-macro2", - "quote", - "syn 1.0.109", -] - [[package]] name = "either" version = "1.15.0" @@ -2477,9 +1815,9 @@ dependencies = [ [[package]] name = "ena" -version = "0.14.3" +version = "0.14.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d248bdd43ce613d87415282f69b9bb99d947d290b10962dd6c56233312c2ad5" +checksum = "eabffdaee24bd1bf95c5ef7cec31260444317e72ea56c4c91750e8b7ee58d5f1" dependencies = [ "log", ] @@ -2514,20 +1852,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.114", -] - -[[package]] -name = "enum-ordinalize" -version = "3.1.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bf1fa3f06bbff1ea5b1a9c7b14aa992a39657db60a2759457328d7e058f49ee" -dependencies = [ - "num-bigint", - "num-traits", - "proc-macro2", - "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -2548,7 +1873,7 @@ checksum = "67c78a4d8fdf9953a5c9d458f9efe940fd97a0cab0941c075a813ac594733827" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -2557,18 +1882,6 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" -[[package]] -name = "equix" -version = "0.2.5" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "arrayvec 0.7.6", - "hashx", - "num-traits", - "thiserror 2.0.17", - "visibility", -] - [[package]] name = "errno" version = "0.3.14" @@ -2645,18 +1958,6 @@ dependencies = [ "once_cell", ] -[[package]] -name = "fallible-iterator" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649" - -[[package]] -name = "fallible-streaming-iterator" -version = "0.1.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a" - [[package]] name = "fastrand" version = "2.3.0" @@ -2688,19 +1989,6 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "28dea519a9695b9977216879a3ebfddf92f1c08c05d984f8996aecd6ecdc811d" -[[package]] -name = "figment" -version = "0.10.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cb01cd46b0cf372153850f4c6c272d9cbea2da513e07538405148f95bd789f3" -dependencies = [ - "atomic 0.6.1", - "serde", - "toml 0.8.23", - "uncased", - "version_check", -] - [[package]] name = "filedescriptor" version = "0.8.3" @@ -2714,27 +2002,20 @@ dependencies = [ [[package]] name = "filetime" -version = "0.2.26" +version = "0.2.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc0505cd1b6fa6580283f6bdf70a73fcf4aba1184038c90902b92b3dd0df63ed" +checksum = "f98844151eee8917efc50bd9e8318cb963ae8b297431495d3f758616ea5c57db" dependencies = [ "cfg-if", "libc", "libredox", - "windows-sys 0.60.2", ] [[package]] name = "find-msvc-tools" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8591b0bcc8a98a64310a2fae1bb3e9b8564dd10e381e6e28010fde8e8e8568db" - -[[package]] -name = "fixed-capacity-vec" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b31a14f5ee08ed1a40e1252b35af18bed062e3f39b69aab34decde36bc43e40" +checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582" [[package]] name = "fixedbitset" @@ -2744,20 +2025,14 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.1.8" +version = "1.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b375d6465b98090a5f25b1c7703f3859783755aa9a80433b36e0379a3ec2f369" +checksum = "843fba2746e448b37e26a819579957415c8cef339bf08564fe8b7ddbd959573c" dependencies = [ "crc32fast", "miniz_oxide", ] -[[package]] -name = "fluid-let" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "749cff877dc1af878a0b31a41dd221a753634401ea0ef2f87b62d3171522485a" - [[package]] name = "fnv" version = "1.0.7" @@ -2809,20 +2084,6 @@ dependencies = [ "itertools 0.8.2", ] -[[package]] -name = "fs-mistrust" -version = "0.10.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "derive_builder_fork_arti", - "dirs", - "libc", - "pwd-grp", - "serde", - "thiserror 2.0.17", - "walkdir", -] - [[package]] name = "fs2" version = "0.4.3" @@ -2839,36 +2100,6 @@ version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" -[[package]] -name = "fslock" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04412b8935272e3a9bae6f48c7bfff74c2911f60525404edfdd28e49884c3bfb" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "fslock-arti-fork" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b21bd626aaab7b904b20bef6d9e06298914a0c8d9fb8b010483766b2e532791" -dependencies = [ - "libc", - "winapi", -] - -[[package]] -name = "fslock-guard" -version = "0.2.4" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "fslock-arti-fork", - "thiserror 2.0.17", - "winapi", -] - [[package]] name = "funty" version = "1.1.0" @@ -2961,7 +2192,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -2975,17 +2206,6 @@ dependencies = [ "rustls-pki-types", ] -[[package]] -name = "futures-rustls" -version = "0.26.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f2f12607f92c69b12ed746fabf9ca4f5c482cba46679c1a75b874ed7c26adb" -dependencies = [ - "futures-io", - "rustls 0.23.36", - "rustls-pki-types", -] - [[package]] name = "futures-sink" version = "0.3.31" @@ -3027,9 +2247,9 @@ dependencies = [ [[package]] name = "generic-array" -version = "0.14.7" +version = "0.14.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85649ca51fd72272d7821adaf274ad91c288277713d9c18820d8499a7ff69e9a" +checksum = "4bb6743198531e02858aeaea5398fcc883e71851fcbcb5a2f773e2fb6cb1edf2" dependencies = [ "typenum", "version_check", @@ -3043,7 +2263,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "75cec8bb4d3d32542cfcb9517f78366b52c17931e30d7ee1682c13686c19cee7" dependencies = [ "futures", - "futures-rustls 0.25.1", + "futures-rustls", "hyper", "log", "serde", @@ -3103,6 +2323,19 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "getrandom" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "139ef39800118c7683f2fd3c98c1b23c09ae076556b435f8e9064ae108aaeeec" +dependencies = [ + "cfg-if", + "libc", + "r-efi", + "wasip2", + "wasip3", +] + [[package]] name = "gimli" version = "0.32.3" @@ -3115,12 +2348,6 @@ version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0cc23270f6e1808e30a928bdc84dea0b9b4136a8bc82338574f23baf47bbd280" -[[package]] -name = "glob-match" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9985c9503b412198aa4197559e9a318524ebc4519c229bfa05a535828c950b9d" - [[package]] name = "globset" version = "0.4.18" @@ -3130,8 +2357,8 @@ dependencies = [ "aho-corasick", "bstr", "log", - "regex-automata 0.4.13", - "regex-syntax 0.8.8", + "regex-automata 0.4.14", + "regex-syntax 0.8.9", ] [[package]] @@ -3145,18 +2372,6 @@ dependencies = [ "walkdir", ] -[[package]] -name = "gloo-timers" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994" -dependencies = [ - "futures-channel", - "futures-core", - "js-sys", - "wasm-bindgen", -] - [[package]] name = "gpt" version = "4.1.0" @@ -3180,18 +2395,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "growable-bloom-filter" -version = "2.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d174ccb4ba660d431329e7f0797870d0a4281e36353ec4b4a3c5eab6c2cfb6f1" -dependencies = [ - "serde", - "serde_bytes", - "serde_derive", - "xxhash-rust", -] - [[package]] name = "h2" version = "0.4.13" @@ -3292,20 +2495,6 @@ dependencies = [ "hashbrown 0.15.5", ] -[[package]] -name = "hashx" -version = "0.3.4" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "arrayvec 0.7.6", - "blake2", - "dynasmrt", - "fixed-capacity-vec", - "hex", - "rand_core 0.9.5", - "thiserror 2.0.17", -] - [[package]] name = "hdrhistogram" version = "7.5.4" @@ -3362,7 +2551,7 @@ dependencies = [ "rand 0.9.2", "ring", "serde", - "thiserror 2.0.17", + "thiserror 2.0.18", "tinyvec", "tokio", "tracing", @@ -3386,7 +2575,7 @@ dependencies = [ "resolv-conf", "serde", "smallvec", - "thiserror 2.0.17", + "thiserror 2.0.18", "tokio", "tracing", ] @@ -3408,7 +2597,7 @@ dependencies = [ "ipnet", "prefix-trie", "serde", - "thiserror 2.0.17", + "thiserror 2.0.18", "time", "tokio", "tokio-util", @@ -3427,17 +2616,7 @@ version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7b5f8eb2ad728638ea2c7d47a21db23b7b58a72ed6a38256b8a1849f15fbbdf7" dependencies = [ - "hmac 0.12.1", -] - -[[package]] -name = "hmac" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a2a2320eb7ec0ebe8da8f744d7812d9fc4cb4d09344ac01898dbcb6a20ae69b" -dependencies = [ - "crypto-mac", - "digest 0.9.0", + "hmac", ] [[package]] @@ -3466,15 +2645,9 @@ checksum = "617aaa3557aef3810a6369d0a99fac8a080891b68bd9f9812a1eeda0c0730cbd" dependencies = [ "cfg-if", "libc", - "windows-link 0.2.1", + "windows-link", ] -[[package]] -name = "hostname-validator" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f558a64ac9af88b5ba400d99b579451af0d39c6d360980045b91aac966d705e2" - [[package]] name = "http" version = "1.4.0" @@ -3526,16 +2699,6 @@ version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "135b12329e5e3ce057a9f972339ea52bc954fe1e9358ef27f95e89716fbc5424" -[[package]] -name = "humantime-serde" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57a3db5ea5923d99402c94e9feb261dc5ee9b4efa158b0315f788cf549cc200c" -dependencies = [ - "humantime", - "serde", -] - [[package]] name = "hyper" version = "1.8.1" @@ -3573,7 +2736,7 @@ dependencies = [ "tokio", "tokio-rustls 0.26.4", "tower-service", - "webpki-roots 1.0.5", + "webpki-roots 1.0.6", ] [[package]] @@ -3607,14 +2770,13 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.19" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "727805d60e7938b76b826a6ef209eb70eaa1812794f9424d4a4e2d740662df5f" +checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0" dependencies = [ "base64 0.22.1", "bytes", "futures-channel", - "futures-core", "futures-util", "http", "http-body", @@ -3623,7 +2785,7 @@ dependencies = [ "libc", "percent-encoding", "pin-project-lite", - "socket2 0.6.1", + "socket2 0.6.2", "system-configuration", "tokio", "tower-service", @@ -3633,9 +2795,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.64" +version = "0.1.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" +checksum = "e31bc9ad994ba00e440a8aa5c9ef0ec67d5cb5e5cb0cc7f8b744a35b389cc470" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -3643,7 +2805,7 @@ dependencies = [ "js-sys", "log", "wasm-bindgen", - "windows-core 0.62.2", + "windows-core", ] [[package]] @@ -3736,6 +2898,12 @@ dependencies = [ "zerovec", ] +[[package]] +name = "id-arena" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d3067d79b975e8844ca9eb072e16b31c3c1c36928edf9c6789548c524d0d954" + [[package]] name = "id-pool" version = "0.2.2" @@ -3758,12 +2926,6 @@ dependencies = [ "rustc-hash", ] -[[package]] -name = "ident_case" -version = "1.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" - [[package]] name = "idna" version = "1.1.0" @@ -3795,7 +2957,7 @@ dependencies = [ "globset", "log", "memchr", - "regex-automata 0.4.13", + "regex-automata 0.4.14", "same-file", "walkdir", "winapi-util", @@ -3839,9 +3001,9 @@ dependencies = [ [[package]] name = "imbl-value" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef2a5f88a75295785a3b4a752db1d45a3b83b9f3a4b13dc70a5aaa6c16d859b3" +checksum = "2722c61df925c481ef6e78c66c451c8ff8514430ec9bacfa02613d8c126205dd" dependencies = [ "imbl", "serde", @@ -3852,8 +3014,8 @@ dependencies = [ [[package]] name = "imbl-value" -version = "0.4.3" -source = "git+https://github.com/Start9Labs/imbl-value.git#27f9bb38cd87290ce4732a2ef3034ea1c7340560" +version = "0.4.4" +source = "git+https://github.com/Start9Labs/imbl-value.git#ffb4901d55c7771489599b21314c08663328c8c2" dependencies = [ "imbl", "serde", @@ -3894,7 +3056,6 @@ checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" dependencies = [ "autocfg", "hashbrown 0.12.3", - "serde", ] [[package]] @@ -3973,15 +3134,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "inventory" -version = "0.3.21" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc61209c082fbeb19919bee74b176221b27223e27b65d781eb91af24eb1fb46e" -dependencies = [ - "rustversion", -] - [[package]] name = "ipconfig" version = "0.3.2" @@ -4180,7 +3332,7 @@ dependencies = [ "regex", "serde", "serde_json", - "thiserror 2.0.17", + "thiserror 2.0.18", "time", ] @@ -4198,7 +3350,7 @@ dependencies = [ name = "json-patch" version = "0.2.7-alpha.0" dependencies = [ - "imbl-value 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "imbl-value 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "json-ptr", "serde", ] @@ -4208,9 +3360,9 @@ name = "json-ptr" version = "0.1.0" dependencies = [ "imbl", - "imbl-value 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "imbl-value 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "serde", - "thiserror 2.0.17", + "thiserror 2.0.18", ] [[package]] @@ -4218,51 +3370,12 @@ name = "jsonpath_lib" version = "0.3.0" source = "git+https://github.com/Start9Labs/jsonpath.git#1cacbd64afa2e1941a21fef06bad14317ba92f30" dependencies = [ - "imbl-value 0.4.3 (git+https://github.com/Start9Labs/imbl-value.git)", + "imbl-value 0.4.4 (git+https://github.com/Start9Labs/imbl-value.git)", "log", "serde", "serde_json", ] -[[package]] -name = "k12" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4dc5fdb62af2f520116927304f15d25b3c2667b4817b90efdc045194c912c54" -dependencies = [ - "digest 0.10.7", - "sha3 0.10.8", -] - -[[package]] -name = "keccak" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" -dependencies = [ - "cpufeatures", -] - -[[package]] -name = "kqueue" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a" -dependencies = [ - "kqueue-sys", - "libc", -] - -[[package]] -name = "kqueue-sys" -version = "1.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b" -dependencies = [ - "bitflags 1.3.2", - "libc", -] - [[package]] name = "kv" version = "0.24.0" @@ -4278,15 +3391,6 @@ dependencies = [ "toml 0.5.11", ] -[[package]] -name = "kv-log-macro" -version = "1.0.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0de8b303297635ad57c9f5059fd9cee7a47f8e8daa09df0fcd07dd39fb22977f" -dependencies = [ - "log", -] - [[package]] name = "lalrpop" version = "0.20.2" @@ -4301,7 +3405,7 @@ dependencies = [ "petgraph", "pico-args", "regex", - "regex-syntax 0.8.8", + "regex-syntax 0.8.9", "string_cache", "term", "tiny-keccak", @@ -4315,7 +3419,7 @@ version = "0.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "507460a910eb7b32ee961886ff48539633b788a36b65692b95f225b844c82553" dependencies = [ - "regex-automata 0.4.13", + "regex-automata 0.4.14", ] [[package]] @@ -4349,6 +3453,12 @@ dependencies = [ "spin", ] +[[package]] +name = "leb128fmt" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2" + [[package]] name = "lettre" version = "0.11.19" @@ -4372,7 +3482,7 @@ dependencies = [ "quoted_printable", "rustls 0.23.36", "rustls-platform-verifier", - "socket2 0.6.1", + "socket2 0.6.2", "tokio", "tokio-rustls 0.26.4", "url", @@ -4393,9 +3503,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.180" +version = "0.2.181" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc" +checksum = "459427e2af2b9c839b132acb702a1c654d95e10f8c326bfc2ad11310e458b1c5" [[package]] name = "libloading" @@ -4404,34 +3514,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55" dependencies = [ "cfg-if", - "windows-link 0.2.1", -] - -[[package]] -name = "liblzma" -version = "0.4.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73c36d08cad03a3fbe2c4e7bb3a9e84c57e4ee4135ed0b065cade3d98480c648" -dependencies = [ - "liblzma-sys", -] - -[[package]] -name = "liblzma-sys" -version = "0.4.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01b9596486f6d60c3bbe644c0e1be1aa6ccc472ad630fe8927b456973d7cb736" -dependencies = [ - "cc", - "libc", - "pkg-config", + "windows-link", ] [[package]] name = "libm" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de" +checksum = "b6d2cec3eae94f9f509c767b45932f1ada8350c4bdb85af2fcab4a3c14807981" [[package]] name = "libmimalloc-sys" @@ -4451,18 +3541,7 @@ checksum = "3d0b95e02c851351f877147b7deea7b1afb1df71b63aa5f8270716e0c5720616" dependencies = [ "bitflags 2.10.0", "libc", - "redox_syscall 0.7.0", -] - -[[package]] -name = "libsqlite3-sys" -version = "0.35.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "133c182a6a2c87864fe97778797e46c7e999672690dc9fa3ee8e241aa4a9c13f" -dependencies = [ - "cc", - "pkg-config", - "vcpkg", + "redox_syscall 0.7.1", ] [[package]] @@ -4519,9 +3598,6 @@ name = "log" version = "0.4.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897" -dependencies = [ - "value-bag", -] [[package]] name = "lru-slab" @@ -4563,7 +3639,7 @@ dependencies = [ "bitvec 1.0.1", "serde", "serde-big-array", - "thiserror 2.0.17", + "thiserror 2.0.18", ] [[package]] @@ -4578,9 +3654,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.6" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273" +checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79" [[package]] name = "memmap2" @@ -4618,18 +3694,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "merlin" -version = "3.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58c38e2799fc0978b65dfff8023ec7843e2330bb462f19198840b34b6582397d" -dependencies = [ - "byteorder", - "keccak", - "rand_core 0.6.4", - "zeroize", -] - [[package]] name = "miette" version = "7.6.0" @@ -4657,7 +3721,7 @@ checksum = "db5b29714e950dbb20d5e6f74f9dcec4edbcc1067bb7f8ed198c097b8c1a818b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -4705,9 +3769,9 @@ dependencies = [ [[package]] name = "moka" -version = "0.12.12" +version = "0.12.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3dec6bd31b08944e08b58fd99373893a6c17054d6f3ea5006cc894f4f4eee2a" +checksum = "b4ac832c50ced444ef6be0767a008b02c106a909ba79d1d830501e94b96f6b7e" dependencies = [ "crossbeam-channel", "crossbeam-epoch", @@ -4732,9 +3796,9 @@ dependencies = [ [[package]] name = "native-tls" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" +checksum = "6cdede44f9a69cab2899a2049e2c3bd49bf911a157f6a3353d4a91c61abbce44" dependencies = [ "libc", "log", @@ -4854,38 +3918,6 @@ dependencies = [ "windows-sys 0.61.2", ] -[[package]] -name = "notify" -version = "8.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3" -dependencies = [ - "bitflags 2.10.0", - "inotify", - "kqueue", - "libc", - "log", - "mio", - "notify-types", - "walkdir", - "windows-sys 0.60.2", -] - -[[package]] -name = "notify-types" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e0826a989adedc2a244799e823aece04662b66609d96af8dff7ac6df9a8925d" - -[[package]] -name = "ntapi" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c70f219e21142367c70c0b30c6a9e3a14d55b4d12a204d897fbec83a0363f081" -dependencies = [ - "winapi", -] - [[package]] name = "nu-ansi-term" version = "0.46.0" @@ -4947,9 +3979,9 @@ dependencies = [ [[package]] name = "num-conv" -version = "0.1.0" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9" +checksum = "cf97ec579c3c42f953ef76dbf8d55ac91fb219dde70e49aa4a6b7d74e9919050" [[package]] name = "num-integer" @@ -5021,7 +4053,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -5050,25 +4082,6 @@ dependencies = [ "objc_id", ] -[[package]] -name = "objc2-core-foundation" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a180dd8642fa45cdb7dd721cd4c11b1cadd4929ce112ebd8b9f5803cc79d536" -dependencies = [ - "bitflags 2.10.0", -] - -[[package]] -name = "objc2-io-kit" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33fafba39597d6dc1fb709123dfa8289d39406734be322956a69f0931c73bb15" -dependencies = [ - "libc", - "objc2-core-foundation", -] - [[package]] name = "objc_id" version = "0.1.1" @@ -5078,15 +4091,6 @@ dependencies = [ "objc", ] -[[package]] -name = "object" -version = "0.32.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" -dependencies = [ - "memchr", -] - [[package]] name = "object" version = "0.37.3" @@ -5102,7 +4106,7 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8d8034d9489cdaf79228eb9f6a3b8d7bb32ba00d6645ebd48eef4077ceb5bd9" dependencies = [ - "asn1-rs 0.6.2", + "asn1-rs", ] [[package]] @@ -5121,14 +4125,6 @@ version = "1.70.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe" -[[package]] -name = "oneshot-fused-workaround" -version = "0.2.3" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "futures", -] - [[package]] name = "onig" version = "6.5.1" @@ -5151,12 +4147,6 @@ dependencies = [ "pkg-config", ] -[[package]] -name = "oorandom" -version = "11.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6790f58c7ff633d8771f42965289203411a5e5c68388703c06e14f24770b41e" - [[package]] name = "opaque-debug" version = "0.3.1" @@ -5199,7 +4189,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -5210,15 +4200,15 @@ checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" [[package]] name = "openssl-probe" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f50d9b3dabb09ecd771ad0aa242ca6894994c130308ca3d7684634df8037391" +checksum = "7c87def4c32ab89d880effc9e097653c8da5d6ef28e6b539d313baaacfbafcbe" [[package]] name = "openssl-src" -version = "300.5.4+3.5.4" +version = "300.5.5+3.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507b3792995dae9b0df8a1c1e3771e8418b7c2d9f0baeba32e6fe8b06c7cb72" +checksum = "3f1787d533e03597a7934fd0a765f0d28e94ecc5fb7789f8053b1e699a56f709" dependencies = [ "cc", ] @@ -5236,21 +4226,6 @@ dependencies = [ "vcpkg", ] -[[package]] -name = "option-ext" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" - -[[package]] -name = "ordered-float" -version = "2.10.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" -dependencies = [ - "num-traits", -] - [[package]] name = "ordered-stream" version = "0.2.0" @@ -5261,15 +4236,6 @@ dependencies = [ "pin-project-lite", ] -[[package]] -name = "os_str_bytes" -version = "6.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" -dependencies = [ - "memchr", -] - [[package]] name = "overload" version = "0.1.1" @@ -5371,15 +4337,9 @@ dependencies = [ "libc", "redox_syscall 0.5.18", "smallvec", - "windows-link 0.2.1", + "windows-link", ] -[[package]] -name = "paste" -version = "1.0.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a" - [[package]] name = "patch-db" version = "0.1.0" @@ -5388,7 +4348,7 @@ dependencies = [ "fd-lock-rs", "futures", "imbl", - "imbl-value 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "imbl-value 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "json-patch", "json-ptr", "lazy_static", @@ -5396,7 +4356,7 @@ dependencies = [ "patch-db-macro", "serde", "serde_cbor 0.11.1", - "thiserror 2.0.17", + "thiserror 2.0.18", "tokio", "tracing", "tracing-error", @@ -5428,7 +4388,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2" dependencies = [ "digest 0.10.7", - "hmac 0.12.1", + "hmac", ] [[package]] @@ -5458,9 +4418,9 @@ checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "pest" -version = "2.8.5" +version = "2.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c9eb05c21a464ea704b53158d358a31e6425db2f63a1a7312268b05fe2b75f7" +checksum = "e0848c601009d37dfa3430c4666e147e49cdcf1b92ecd3e63657d8a5f19da662" dependencies = [ "memchr", "ucd-trie", @@ -5468,9 +4428,9 @@ dependencies = [ [[package]] name = "pest_derive" -version = "2.8.5" +version = "2.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f9dbced329c441fa79d80472764b1a2c7e57123553b8519b36663a2fb234ed" +checksum = "11f486f1ea21e6c10ed15d5a7c77165d0ee443402f0780849d1768e7d9d6fe77" dependencies = [ "pest", "pest_generator", @@ -5478,22 +4438,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.8.5" +version = "2.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bb96d5051a78f44f43c8f712d8e810adb0ebf923fc9ed2655a7f66f63ba8ee5" +checksum = "8040c4647b13b210a963c1ed407c1ff4fdfa01c31d6d2a098218702e6664f94f" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] name = "pest_meta" -version = "2.8.5" +version = "2.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "602113b5b5e8621770cfd490cfd90b9f84ab29bd2b0e49ad83eb6d186cef2365" +checksum = "89815c69d36021a140146f26659a81d6c2afa33d216d736dd4be5381a7362220" dependencies = [ "pest", "sha2 0.10.9", @@ -5515,19 +4475,8 @@ version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078" dependencies = [ - "phf_macros 0.11.3", - "phf_shared 0.11.3", -] - -[[package]] -name = "phf" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1562dc717473dbaa4c1f85a36410e03c047b2e7df7f45ee938fbef64ae7fadf" -dependencies = [ - "phf_macros 0.13.1", - "phf_shared 0.13.1", - "serde", + "phf_macros", + "phf_shared", ] [[package]] @@ -5536,44 +4485,21 @@ version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d" dependencies = [ - "phf_shared 0.11.3", + "phf_shared", "rand 0.8.5", ] -[[package]] -name = "phf_generator" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "135ace3a761e564ec88c03a77317a7c6b80bb7f7135ef2544dbe054243b89737" -dependencies = [ - "fastrand", - "phf_shared 0.13.1", -] - [[package]] name = "phf_macros" version = "0.11.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216" dependencies = [ - "phf_generator 0.11.3", - "phf_shared 0.11.3", + "phf_generator", + "phf_shared", "proc-macro2", "quote", - "syn 2.0.114", -] - -[[package]] -name = "phf_macros" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "812f032b54b1e759ccd5f8b6677695d5268c588701effba24601f6932f8269ef" -dependencies = [ - "phf_generator 0.13.1", - "phf_shared 0.13.1", - "proc-macro2", - "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -5585,15 +4511,6 @@ dependencies = [ "siphasher", ] -[[package]] -name = "phf_shared" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e57fef6bc5981e38c2ce2d63bfa546861309f875b8a75f092d1d54ae2d64f266" -dependencies = [ - "siphasher", -] - [[package]] name = "pico-args" version = "0.5.0" @@ -5617,7 +4534,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -5683,34 +4600,6 @@ dependencies = [ "time", ] -[[package]] -name = "plotters" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" -dependencies = [ - "num-traits", - "plotters-backend", - "plotters-svg", - "wasm-bindgen", - "web-sys", -] - -[[package]] -name = "plotters-backend" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" - -[[package]] -name = "plotters-svg" -version = "0.3.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" -dependencies = [ - "plotters-backend", -] - [[package]] name = "polling" version = "3.11.0" @@ -5727,9 +4616,9 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.13.0" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f89776e4d69bb58bc6993e99ffa1d11f228b839984854c7daeb5d37f87cbe950" +checksum = "c33a9471896f1c69cecef8d20cbe2f7accd12527ce60845ff44c153bb2a21b49" [[package]] name = "portable-pty" @@ -5739,7 +4628,7 @@ checksum = "b4a596a2b3d2752d94f51fac2d4a96737b8705dddd311a32b9af47211f08671e" dependencies = [ "anyhow", "bitflags 1.3.2", - "downcast-rs 1.2.1", + "downcast-rs", "filedescriptor", "lazy_static", "libc", @@ -5752,21 +4641,6 @@ dependencies = [ "winreg 0.10.1", ] -[[package]] -name = "postage" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af3fb618632874fb76937c2361a7f22afd393c982a2165595407edc75b06d3c1" -dependencies = [ - "atomic 0.5.3", - "crossbeam-queue", - "futures", - "parking_lot 0.12.5", - "pin-project", - "static_assertions", - "thiserror 1.0.69", -] - [[package]] name = "potential_utf" version = "0.1.4" @@ -5817,6 +4691,16 @@ dependencies = [ "yansi", ] +[[package]] +name = "prettyplease" +version = "0.2.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" +dependencies = [ + "proc-macro2", + "syn 2.0.115", +] + [[package]] name = "prettytable-rs" version = "0.10.0" @@ -5840,17 +4724,6 @@ dependencies = [ "elliptic-curve", ] -[[package]] -name = "priority-queue" -version = "2.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93980406f12d9f8140ed5abe7155acb10bb1e69ea55c88960b9c2f117445ef96" -dependencies = [ - "equivalent", - "indexmap 2.13.0", - "serde", -] - [[package]] name = "proc-macro-crate" version = "3.4.0" @@ -5860,33 +4733,11 @@ dependencies = [ "toml_edit 0.23.10+spec-1.0.0", ] -[[package]] -name = "proc-macro-error-attr2" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96de42df36bb9bba5542fe9f1a054b8cc87e172759a1868aa05c1f3acc89dfc5" -dependencies = [ - "proc-macro2", - "quote", -] - -[[package]] -name = "proc-macro-error2" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11ec05c52be0a07b08061f7dd003e7d7092e0472bc731b4af7bb1ef876109802" -dependencies = [ - "proc-macro-error-attr2", - "proc-macro2", - "quote", - "syn 2.0.114", -] - [[package]] name = "proc-macro2" -version = "1.0.105" +version = "1.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "535d180e0ecab6268a3e718bb9fd44db66bbbc256257165fc699dadf70d16fe7" +checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934" dependencies = [ "unicode-ident", ] @@ -5917,9 +4768,9 @@ dependencies = [ [[package]] name = "proptest" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bee689443a2bd0a16ab0348b52ee43e3b2d1b1f931c8aa5c9f8de4c86fbe8c40" +checksum = "37566cb3fdacef14c0737f9546df7cfeadbfbc9fef10991038bf5015d0c80532" dependencies = [ "bit-set 0.8.0", "bit-vec 0.8.0", @@ -5928,7 +4779,7 @@ dependencies = [ "rand 0.9.2", "rand_chacha 0.9.0", "rand_xorshift", - "regex-syntax 0.8.8", + "regex-syntax 0.8.9", "rusty-fork", "tempfile", "unarray", @@ -5942,7 +4793,7 @@ checksum = "fb6dc647500e84a25a85b100e76c85b8ace114c209432dc174f20aac11d4ed6c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -5965,7 +4816,7 @@ dependencies = [ "itertools 0.14.0", "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -5985,9 +4836,9 @@ checksum = "33cb294fe86a74cbcf50d4445b37da762029549ebeea341421c7c70370f86cac" [[package]] name = "psm" -version = "0.1.28" +version = "0.1.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d11f2fedc3b7dafdc2851bc52f277377c5473d378859be234bc7ebb593144d01" +checksum = "3852766467df634d74f0b2d7819bf8dc483a0eb2e3b0f50f756f9cfe8b0d18d8" dependencies = [ "ar_archive_writer", "cc", @@ -6012,18 +4863,6 @@ dependencies = [ "psl-types", ] -[[package]] -name = "pwd-grp" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e2023f41b5fcb7c30eb5300a5733edfaa9e0e0d502d51b586f65633fd39e40c" -dependencies = [ - "derive-deftly", - "libc", - "paste", - "thiserror 2.0.17", -] - [[package]] name = "pxfm" version = "0.1.27" @@ -6070,8 +4909,8 @@ dependencies = [ "quinn-udp", "rustc-hash", "rustls 0.23.36", - "socket2 0.6.1", - "thiserror 2.0.17", + "socket2 0.6.2", + "thiserror 2.0.18", "tokio", "tracing", "web-time", @@ -6092,7 +4931,7 @@ dependencies = [ "rustls 0.23.36", "rustls-pki-types", "slab", - "thiserror 2.0.17", + "thiserror 2.0.18", "tinyvec", "tracing", "web-time", @@ -6107,16 +4946,16 @@ dependencies = [ "cfg_aliases 0.2.1", "libc", "once_cell", - "socket2 0.6.1", + "socket2 0.6.2", "tracing", "windows-sys 0.60.2", ] [[package]] name = "quote" -version = "1.0.43" +version = "1.0.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc74d9a594b72ae6656596548f56f667211f8a97b3d4c3d467150794690dc40a" +checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4" dependencies = [ "proc-macro2", ] @@ -6171,7 +5010,7 @@ dependencies = [ "strum_macros", "syntect", "textwrap", - "thiserror 2.0.17", + "thiserror 2.0.18", "tokio", "tracing", "tracing-appender", @@ -6294,17 +5133,6 @@ dependencies = [ "rand_core 0.5.1", ] -[[package]] -name = "rand_jitter" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b16df48f071248e67b8fc5e866d9448d45c08ad8b672baaaf796e2f15e606ff0" -dependencies = [ - "libc", - "rand_core 0.9.5", - "winapi", -] - [[package]] name = "rand_xorshift" version = "0.4.0" @@ -6323,16 +5151,6 @@ dependencies = [ "rand_core 0.9.5", ] -[[package]] -name = "rayon" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368f01d005bf8fd9b1206fb6fa653e6c4a81ceb1466406b81792d87c5677a58f" -dependencies = [ - "either", - "rayon-core", -] - [[package]] name = "rayon-core" version = "1.13.0" @@ -6355,15 +5173,6 @@ dependencies = [ "yasna", ] -[[package]] -name = "rdrand" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d92195228612ac8eed47adbc2ed0f04e513a4ccb98175b6f2bd04d963b533655" -dependencies = [ - "rand_core 0.6.4", -] - [[package]] name = "redox_syscall" version = "0.2.16" @@ -6384,9 +5193,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f3fe0889e69e2ae9e41f4d6c4c0181701d00e4697b356fb1f74173a5e0ee27" +checksum = "35985aa610addc02e24fc232012c86fd11f14111180f902b67e2d5331f8ebf2b" dependencies = [ "bitflags 2.10.0", ] @@ -6402,47 +5211,16 @@ dependencies = [ "thiserror 1.0.69", ] -[[package]] -name = "redox_users" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4e608c6638b9c18977b00b475ac1f28d14e84b27d8d42f70e0bf1e3dec127ac" -dependencies = [ - "getrandom 0.2.17", - "libredox", - "thiserror 2.0.17", -] - -[[package]] -name = "ref-cast" -version = "1.0.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f354300ae66f76f1c85c5f84693f0ce81d747e2c3f21a45fef496d89c960bf7d" -dependencies = [ - "ref-cast-impl", -] - -[[package]] -name = "ref-cast-impl" -version = "1.0.25" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7186006dcb21920990093f30e3dea63b7d6e977bf1256be20c3563a5db070da" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - [[package]] name = "regex" -version = "1.12.2" +version = "1.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "843bc0191f75f3e22651ae5f1e72939ab2f72a4bc30fa80a066bd66edefc24d4" +checksum = "e10754a14b9137dd7b1e3e5b0493cc9171fdd105e0ab477f51b72e7f3ac0e276" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.13", - "regex-syntax 0.8.8", + "regex-automata 0.4.14", + "regex-syntax 0.8.9", ] [[package]] @@ -6456,13 +5234,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.13" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5276caf25ac86c8d810222b3dbb938e512c55c6831a10f3e6ed1c93b84041f1c" +checksum = "6e1dd4122fc1595e8162618945476892eefca7b88c52820e74af6262213cae8f" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.8", + "regex-syntax 0.8.9", ] [[package]] @@ -6473,9 +5251,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2d987857b319362043e95f5353c0535c1f58eec5336fdfcf626430af7def58" +checksum = "a96887878f22d7bad8a3b6dc5b7440e0ada9a245242924394987b21cf2210a4c" [[package]] name = "reqwest" @@ -6523,7 +5301,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", - "webpki-roots 1.0.5", + "webpki-roots 1.0.6", ] [[package]] @@ -6544,18 +5322,13 @@ version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e061d1b48cb8d38042de4ae0a7a6401009d6143dc80d2e2d6f31f0bdd6470c7" -[[package]] -name = "retry-error" -version = "0.6.5" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" - [[package]] name = "rfc6979" version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f8dd2a808d456c4a54e300a23e9f5a67e122c3024119acbfd73e3bf664491cb2" dependencies = [ - "hmac 0.12.1", + "hmac", "subtle", ] @@ -6596,7 +5369,7 @@ dependencies = [ "futures", "http", "http-body-util", - "imbl-value 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "imbl-value 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "itertools 0.14.0", "lazy_format", "lazy_static", @@ -6606,7 +5379,7 @@ dependencies = [ "serde", "serde_cbor 0.11.2", "serde_json", - "thiserror 2.0.17", + "thiserror 2.0.18", "tokio", "tokio-stream", "url", @@ -6644,21 +5417,6 @@ dependencies = [ "windows-sys 0.52.0", ] -[[package]] -name = "rusqlite" -version = "0.37.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "165ca6e57b20e1351573e3729b958bc62f0e48025386970b6e4d29e7a7e71f3f" -dependencies = [ - "bitflags 2.10.0", - "fallible-iterator", - "fallible-streaming-iterator", - "hashlink", - "libsqlite3-sys", - "smallvec", - "time", -] - [[package]] name = "rust-argon2" version = "3.0.0" @@ -6699,7 +5457,7 @@ dependencies = [ "serde", "serde_json", "serde_yaml", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -6817,7 +5575,7 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "612460d5f7bea540c490b2b6395d8e34a953e52b491accd6c86c8164c5932a63" dependencies = [ - "openssl-probe 0.2.0", + "openssl-probe 0.2.1", "rustls-pki-types", "schannel", "security-framework 3.5.1", @@ -6912,21 +5670,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.22" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a50f4cf475b65d88e057964e0e9bb1f0aa9bbb2036dc65c64596b42932536984" - -[[package]] -name = "safelog" -version = "0.4.8" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "derive_more 2.1.1", - "educe", - "either", - "fluid-let", - "thiserror 2.0.17", -] +checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f" [[package]] name = "same-file" @@ -6937,15 +5683,6 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "sanitize-filename" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc984f4f9ceb736a7bb755c3e3bd17dc56370af2600c9780dcc48c66453da34d" -dependencies = [ - "regex", -] - [[package]] name = "schannel" version = "0.1.28" @@ -6955,30 +5692,6 @@ dependencies = [ "windows-sys 0.61.2", ] -[[package]] -name = "schemars" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cd191f9397d57d581cddd31014772520aa448f65ef991055d7f61582c65165f" -dependencies = [ - "dyn-clone", - "ref-cast", - "serde", - "serde_json", -] - -[[package]] -name = "schemars" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54e910108742c57a770f492731f99be216a52fadd361b06c8fb59d74ccc267d2" -dependencies = [ - "dyn-clone", - "ref-cast", - "serde", - "serde_json", -] - [[package]] name = "scoped-tls" version = "1.0.1" @@ -7070,26 +5783,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde-value" -version = "0.7.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" -dependencies = [ - "ordered-float", - "serde", -] - -[[package]] -name = "serde_bytes" -version = "0.11.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a5d440709e79d88e51ac01c4b72fc6cb7314017bb7da9eeff678aa94c10e3ea8" -dependencies = [ - "serde", - "serde_core", -] - [[package]] name = "serde_cbor" version = "0.11.1" @@ -7125,17 +5818,7 @@ checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", -] - -[[package]] -name = "serde_ignored" -version = "0.1.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "115dffd5f3853e06e746965a20dcbae6ee747ae30b543d91b0e089668bb07798" -dependencies = [ - "serde", - "serde_core", + "syn 2.0.115", ] [[package]] @@ -7182,7 +5865,7 @@ checksum = "175ee3e80ae9982737ca543e96133087cbd9a485eecc3bc4de9c1a37b47ea59c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -7215,37 +5898,6 @@ dependencies = [ "serde", ] -[[package]] -name = "serde_with" -version = "3.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fa237f2807440d238e0364a218270b98f767a00d3dada77b1c53ae88940e2e7" -dependencies = [ - "base64 0.22.1", - "chrono", - "hex", - "indexmap 1.9.3", - "indexmap 2.13.0", - "schemars 0.9.0", - "schemars 1.2.0", - "serde_core", - "serde_json", - "serde_with_macros", - "time", -] - -[[package]] -name = "serde_with_macros" -version = "3.16.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52a8e3ca0ca629121f70ab50f95249e5a6f925cc0f6ffe8256c45b728875706c" -dependencies = [ - "darling 0.21.3", - "proc-macro2", - "quote", - "syn 2.0.114", -] - [[package]] name = "serde_yaml" version = "0.9.34+deprecated" @@ -7332,28 +5984,6 @@ dependencies = [ "digest 0.10.7", ] -[[package]] -name = "sha3" -version = "0.9.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f81199417d4e5de3f04b1e871023acea7389672c4135918f05aa9cbf2f2fa809" -dependencies = [ - "block-buffer 0.9.0", - "digest 0.9.0", - "keccak", - "opaque-debug", -] - -[[package]] -name = "sha3" -version = "0.10.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75872d278a8f37ef87fa0ddbda7802605cb18344497949862c0d4dcb291eba60" -dependencies = [ - "digest 0.10.7", - "keccak", -] - [[package]] name = "sharded-slab" version = "0.1.7" @@ -7379,17 +6009,6 @@ version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc6fe69c597f9c37bfeeeeeb33da3530379845f10be461a66d16d03eca2ded77" -[[package]] -name = "shellexpand" -version = "3.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b1fdf65dd6331831494dd616b30351c38e96e45921a27745cf98490458b90bb" -dependencies = [ - "bstr", - "dirs", - "os_str_bytes", -] - [[package]] name = "shlex" version = "1.3.0" @@ -7457,15 +6076,15 @@ checksum = "c11532d9d241904f095185f35dcdaf930b1427a94d5b01d7002d74ba19b44cc4" [[package]] name = "siphasher" -version = "1.0.1" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d" +checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e" [[package]] name = "slab" -version = "0.4.11" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ae44ef20feb57a68b23d846850f861394c2e02dc425a50098ae8c90267589" +checksum = "0c790de23124f9ab44544d7ac05d60440adc586479ce501c1d6d7da3cd8c9cf5" [[package]] name = "sled" @@ -7483,28 +6102,6 @@ dependencies = [ "parking_lot 0.11.2", ] -[[package]] -name = "slotmap" -version = "1.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bdd58c3c93c3d278ca835519292445cb4b0d4dc59ccfdf7ceadaab3f8aeb4038" -dependencies = [ - "serde", - "version_check", -] - -[[package]] -name = "slotmap-careful" -version = "0.2.5" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "paste", - "serde", - "slotmap", - "thiserror 2.0.17", - "void", -] - [[package]] name = "smallstr" version = "0.3.1" @@ -7570,9 +6167,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881" +checksum = "86f4aa3ad99f2088c990dfa82d367e19cb29268ed67c574d10d0a4bfe71f07e0" dependencies = [ "libc", "windows-sys 0.60.2", @@ -7587,7 +6184,7 @@ dependencies = [ "async-trait", "bytes", "percent-encoding", - "thiserror 2.0.17", + "thiserror 2.0.18", "tokio", ] @@ -7646,7 +6243,7 @@ dependencies = [ "serde_json", "sha2 0.10.9", "smallvec", - "thiserror 2.0.17", + "thiserror 2.0.18", "tokio", "tokio-stream", "tracing", @@ -7664,7 +6261,7 @@ dependencies = [ "quote", "sqlx-core", "sqlx-macros-core", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -7685,7 +6282,7 @@ dependencies = [ "sha2 0.10.9", "sqlx-core", "sqlx-postgres", - "syn 2.0.114", + "syn 2.0.115", "tokio", "url", ] @@ -7708,7 +6305,7 @@ dependencies = [ "futures-util", "hex", "hkdf", - "hmac 0.12.1", + "hmac", "home", "itoa", "log", @@ -7722,7 +6319,7 @@ dependencies = [ "smallvec", "sqlx-core", "stringprep", - "thiserror 2.0.17", + "thiserror 2.0.18", "tracing", "whoami", ] @@ -7749,8 +6346,8 @@ dependencies = [ "proc-macro2", "quote", "regex-syntax 0.6.29", - "strsim 0.11.1", - "syn 2.0.114", + "strsim", + "syn 2.0.115", "unicode-width 0.1.14", ] @@ -7804,9 +6401,9 @@ checksum = "6ce2be8dc25455e1f91df71bfa12ad37d7af1092ae736f3a6cd0e37bc7810596" [[package]] name = "stacker" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1f8b29fb42aafcea4edeeb6b2f2d7ecd0d969c48b4cf0d2e64aafc471dd6e59" +checksum = "08d74a23609d509411d10e2176dc2a4346e3b4aea2e7b1869f19fdedbc71c013" dependencies = [ "cc", "cfg-if", @@ -7817,17 +6414,16 @@ dependencies = [ [[package]] name = "start-os" -version = "0.4.0-alpha.19" +version = "0.4.0-alpha.20" dependencies = [ - "aes 0.7.5", - "arti-client", + "aes", "async-acme", "async-compression", "async-stream", "async-trait", "axum", "backtrace-on-stack-overflow", - "base32 0.5.1", + "base32", "base64 0.22.1", "base64ct", "basic-cookies", @@ -7842,7 +6438,6 @@ dependencies = [ "const_format", "cookie", "cookie_store", - "curve25519-dalek 4.1.3", "der", "digest 0.10.7", "divrem", @@ -7858,7 +6453,7 @@ dependencies = [ "hashing-serializer", "hex", "hickory-server", - "hmac 0.12.1", + "hmac", "http", "http-body-util", "hyper", @@ -7866,7 +6461,7 @@ dependencies = [ "id-pool", "iddqd", "imbl", - "imbl-value 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "imbl-value 0.4.4 (registry+https://github.com/rust-lang/crates.io-index)", "include_dir", "indexmap 2.13.0", "indicatif", @@ -7917,7 +6512,6 @@ dependencies = [ "rpc-toolkit", "rust-argon2", "rust-i18n", - "safelog", "semver", "serde", "serde_json", @@ -7925,7 +6519,7 @@ dependencies = [ "sha-crypt", "sha2 0.10.9", "signal-hook", - "socket2 0.6.1", + "socket2 0.6.2", "socks5-impl", "sqlx", "sscanf", @@ -7933,22 +6527,14 @@ dependencies = [ "tar", "termion", "textwrap", - "thiserror 2.0.17", + "thiserror 2.0.18", "tokio", "tokio-rustls 0.26.4", "tokio-stream", "tokio-tar", "tokio-tungstenite 0.26.2", "tokio-util", - "toml 0.9.11+spec-1.1.0", - "tor-cell", - "tor-hscrypto", - "tor-hsservice", - "tor-keymgr", - "tor-llcrypto", - "tor-proto", - "tor-rtcompat", - "torut", + "toml 0.9.12+spec-1.1.0", "tower-service", "tracing", "tracing-error", @@ -7977,7 +6563,7 @@ checksum = "bf776ba3fa74f83bf4b63c3dcbbf82173db2632ed8452cb2d891d33f459de70f" dependencies = [ "new_debug_unreachable", "parking_lot 0.12.5", - "phf_shared 0.11.3", + "phf_shared", "precomputed-hash", ] @@ -8010,12 +6596,6 @@ dependencies = [ "vte", ] -[[package]] -name = "strsim" -version = "0.10.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" - [[package]] name = "strsim" version = "0.11.1" @@ -8027,9 +6607,6 @@ name = "strum" version = "0.27.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af23d6f6c1a224baef9d3f61e287d2761385a5b88fdab4eb4c6f11aeb54c4bcf" -dependencies = [ - "strum_macros", -] [[package]] name = "strum_macros" @@ -8040,7 +6617,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -8083,9 +6660,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.114" +version = "2.0.115" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4d107df263a3013ef9b1879b0df87d706ff80f65a86ea879bd9c31f9b307c2a" +checksum = "6e614ed320ac28113fa64972c4262d5dbc89deacdfd00c34a3e4cea073243c12" dependencies = [ "proc-macro2", "quote", @@ -8109,7 +6686,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -8124,34 +6701,20 @@ dependencies = [ "once_cell", "onig", "plist", - "regex-syntax 0.8.8", + "regex-syntax 0.8.9", "serde", "serde_derive", "serde_json", - "thiserror 2.0.17", + "thiserror 2.0.18", "walkdir", "yaml-rust", ] -[[package]] -name = "sysinfo" -version = "0.36.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "252800745060e7b9ffb7b2badbd8b31cfa4aa2e61af879d0a3bf2a317c20217d" -dependencies = [ - "libc", - "memchr", - "ntapi", - "objc2-core-foundation", - "objc2-io-kit", - "windows", -] - [[package]] name = "system-configuration" -version = "0.6.1" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +checksum = "a13f3d0daba03132c0aa9767f98351b3488edc2c100cda2d2ec2b04f3d8d3c8b" dependencies = [ "bitflags 2.10.0", "core-foundation 0.9.4", @@ -8193,12 +6756,12 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.24.0" +version = "3.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "655da9c7eb6305c55742045d5a8d2037996d61d8de95806335c7c86ce0f82e9c" +checksum = "0136791f7c95b1f6dd99f9cc786b91bb81c3800b639b3478e561ddb7be95e5f1" dependencies = [ "fastrand", - "getrandom 0.3.4", + "getrandom 0.4.1", "once_cell", "rustix 1.1.3", "windows-sys 0.61.2", @@ -8266,11 +6829,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.17" +version = "2.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" +checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4" dependencies = [ - "thiserror-impl 2.0.17", + "thiserror-impl 2.0.18", ] [[package]] @@ -8281,18 +6844,18 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] name = "thiserror-impl" -version = "2.0.17" +version = "2.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" +checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -8306,9 +6869,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.45" +version = "0.3.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9e442fc33d7fdb45aa9bfeb312c095964abdf596f7567261062b2a7107aaabd" +checksum = "743bd48c283afc0388f9b8827b976905fb217ad9e647fae3a379a9283c4def2c" dependencies = [ "deranged", "itoa", @@ -8321,15 +6884,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b36ee98fd31ec7426d599183e8fe26932a8dc1fb76ddb6214d05493377d34ca" +checksum = "7694e1cfe791f8d31026952abf09c69ca6f6fa4e1a1229e18988f06a04a12dca" [[package]] name = "time-macros" -version = "0.2.25" +version = "0.2.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71e552d1249bf61ac2a52db88179fd0673def1e1ad8243a00d9ec9ed71fee3dd" +checksum = "2e70e4c5a0e0a8a4823ad65dfe1a6930e4f4d756dcd9dd7939022b5e8c501215" dependencies = [ "num-conv", "time-core", @@ -8351,20 +6914,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42d3e9c45c09de15d06dd8acf5f4e0e399e85927b7f00711024eb7ae10fa4869" dependencies = [ "displaydoc", - "serde_core", "zerovec", ] -[[package]] -name = "tinytemplate" -version = "1.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4d6b5f19ff7664e8c98d03e2139cb510db9b0a60b55f8e8709b689d939b6bc" -dependencies = [ - "serde", - "serde_json", -] - [[package]] name = "tinyvec" version = "1.10.0" @@ -8392,7 +6944,7 @@ dependencies = [ "parking_lot 0.12.5", "pin-project-lite", "signal-hook-registry", - "socket2 0.6.1", + "socket2 0.6.2", "tokio-macros", "tracing", "windows-sys 0.61.2", @@ -8406,7 +6958,7 @@ checksum = "af407857209536a95c8e56f8231ef2c2e2aff839b22e07a1ffcbc617e9db9fa5" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -8500,7 +7052,6 @@ checksum = "9ae9cec805b01e8fc3fd2fe289f89149a9b66dd16786abd8b19cfa7b48cb0098" dependencies = [ "bytes", "futures-core", - "futures-io", "futures-sink", "pin-project-lite", "tokio", @@ -8529,9 +7080,9 @@ dependencies = [ [[package]] name = "toml" -version = "0.9.11+spec-1.1.0" +version = "0.9.12+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3afc9a848309fe1aaffaed6e1546a7a14de1f935dc9d89d32afd9a44bab7c46" +checksum = "cf92845e79fc2e2def6a5d828f0801e29a2f8acc037becc5ab08595c7d5e9863" dependencies = [ "indexmap 2.13.0", "serde_core", @@ -8588,9 +7139,9 @@ dependencies = [ [[package]] name = "toml_parser" -version = "1.0.6+spec-1.1.0" +version = "1.0.8+spec-1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3198b4b0a8e11f09dd03e133c0280504d0801269e9afa46362ffde1cbeebf44" +checksum = "0742ff5ff03ea7e67c8ae6c93cac239e0d9784833362da3f9a9c1da8dfefcbdc" dependencies = [ "winnow", ] @@ -8609,9 +7160,9 @@ checksum = "ab16f14aed21ee8bfd8ec22513f7287cd4a91aa92e44edfe2c17ddd004e92607" [[package]] name = "tonic" -version = "0.14.2" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb7613188ce9f7df5bfe185db26c5814347d110db17920415cf2fbcad85e7203" +checksum = "a286e33f82f8a1ee2df63f4fa35c0becf4a85a0cb03091a15fd7bf0b402dc94a" dependencies = [ "async-trait", "axum", @@ -8626,7 +7177,7 @@ dependencies = [ "hyper-util", "percent-encoding", "pin-project", - "socket2 0.6.1", + "socket2 0.6.2", "sync_wrapper", "tokio", "tokio-stream", @@ -8638,1010 +7189,15 @@ dependencies = [ [[package]] name = "tonic-prost" -version = "0.14.2" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66bd50ad6ce1252d87ef024b3d64fe4c3cf54a86fb9ef4c631fdd0ded7aeaa67" +checksum = "d6c55a2d6a14174563de34409c9f92ff981d006f56da9c6ecd40d9d4a31500b0" dependencies = [ "bytes", "prost", "tonic", ] -[[package]] -name = "tor-async-utils" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "derive-deftly", - "educe", - "futures", - "oneshot-fused-workaround", - "pin-project", - "postage", - "thiserror 2.0.17", - "void", -] - -[[package]] -name = "tor-basic-utils" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "derive_more 2.1.1", - "hex", - "itertools 0.14.0", - "libc", - "paste", - "rand 0.9.2", - "rand_chacha 0.9.0", - "serde", - "slab", - "smallvec", - "thiserror 2.0.17", -] - -[[package]] -name = "tor-bytes" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "bytes", - "derive-deftly", - "digest 0.10.7", - "educe", - "getrandom 0.3.4", - "safelog", - "thiserror 2.0.17", - "tor-error", - "tor-llcrypto", - "zeroize", -] - -[[package]] -name = "tor-cell" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "amplify", - "bitflags 2.10.0", - "bytes", - "caret", - "derive-deftly", - "derive_more 2.1.1", - "educe", - "itertools 0.14.0", - "paste", - "rand 0.9.2", - "smallvec", - "thiserror 2.0.17", - "tor-basic-utils", - "tor-bytes", - "tor-cert", - "tor-error", - "tor-hscrypto", - "tor-linkspec", - "tor-llcrypto", - "tor-memquota", - "tor-protover", - "tor-units", - "void", -] - -[[package]] -name = "tor-cert" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "caret", - "derive_builder_fork_arti", - "derive_more 2.1.1", - "digest 0.10.7", - "thiserror 2.0.17", - "tor-bytes", - "tor-checkable", - "tor-llcrypto", -] - -[[package]] -name = "tor-chanmgr" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "async-trait", - "caret", - "derive_builder_fork_arti", - "derive_more 2.1.1", - "educe", - "futures", - "oneshot-fused-workaround", - "postage", - "rand 0.9.2", - "safelog", - "serde", - "thiserror 2.0.17", - "tor-async-utils", - "tor-basic-utils", - "tor-cell", - "tor-config", - "tor-error", - "tor-keymgr", - "tor-linkspec", - "tor-llcrypto", - "tor-memquota", - "tor-netdir", - "tor-proto", - "tor-rtcompat", - "tor-socksproto", - "tor-units", - "tracing", - "void", -] - -[[package]] -name = "tor-checkable" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "humantime", - "signature 2.2.0", - "thiserror 2.0.17", - "tor-llcrypto", -] - -[[package]] -name = "tor-circmgr" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "amplify", - "async-trait", - "bounded-vec-deque", - "cfg-if", - "derive-deftly", - "derive_builder_fork_arti", - "derive_more 2.1.1", - "downcast-rs 2.0.2", - "dyn-clone", - "educe", - "futures", - "humantime-serde", - "itertools 0.14.0", - "once_cell", - "oneshot-fused-workaround", - "pin-project", - "rand 0.9.2", - "retry-error", - "safelog", - "serde", - "static_assertions", - "thiserror 2.0.17", - "tor-async-utils", - "tor-basic-utils", - "tor-cell", - "tor-chanmgr", - "tor-config", - "tor-error", - "tor-guardmgr", - "tor-linkspec", - "tor-memquota", - "tor-netdir", - "tor-netdoc", - "tor-persist", - "tor-proto", - "tor-protover", - "tor-relay-selection", - "tor-rtcompat", - "tor-units", - "tracing", - "void", - "weak-table", -] - -[[package]] -name = "tor-config" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "amplify", - "cfg-if", - "derive-deftly", - "derive_builder_fork_arti", - "educe", - "either", - "figment", - "fs-mistrust", - "futures", - "itertools 0.14.0", - "notify", - "paste", - "postage", - "regex", - "serde", - "serde-value", - "serde_ignored", - "strum", - "thiserror 2.0.17", - "toml 0.9.11+spec-1.1.0", - "tor-basic-utils", - "tor-error", - "tor-rtcompat", - "tracing", - "void", -] - -[[package]] -name = "tor-config-path" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "directories", - "serde", - "shellexpand", - "thiserror 2.0.17", - "tor-error", - "tor-general-addr", -] - -[[package]] -name = "tor-consdiff" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "digest 0.10.7", - "hex", - "thiserror 2.0.17", - "tor-llcrypto", -] - -[[package]] -name = "tor-dirclient" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "async-compression", - "base64ct", - "derive_more 2.1.1", - "futures", - "hex", - "http", - "httparse", - "httpdate", - "itertools 0.14.0", - "memchr", - "thiserror 2.0.17", - "tor-circmgr", - "tor-error", - "tor-hscrypto", - "tor-linkspec", - "tor-llcrypto", - "tor-netdoc", - "tor-proto", - "tor-rtcompat", - "tracing", -] - -[[package]] -name = "tor-dirmgr" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "async-trait", - "base64ct", - "derive_builder_fork_arti", - "derive_more 2.1.1", - "digest 0.10.7", - "educe", - "event-listener 5.4.1", - "fs-mistrust", - "fslock", - "futures", - "hex", - "humantime", - "humantime-serde", - "itertools 0.14.0", - "memmap2 0.9.9", - "oneshot-fused-workaround", - "paste", - "postage", - "rand 0.9.2", - "rusqlite", - "safelog", - "scopeguard", - "serde", - "serde_json", - "signature 2.2.0", - "static_assertions", - "strum", - "thiserror 2.0.17", - "time", - "tor-async-utils", - "tor-basic-utils", - "tor-checkable", - "tor-circmgr", - "tor-config", - "tor-consdiff", - "tor-dirclient", - "tor-error", - "tor-guardmgr", - "tor-llcrypto", - "tor-netdir", - "tor-netdoc", - "tor-persist", - "tor-proto", - "tor-protover", - "tor-rtcompat", - "tracing", -] - -[[package]] -name = "tor-error" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "derive_more 2.1.1", - "futures", - "paste", - "retry-error", - "static_assertions", - "strum", - "thiserror 2.0.17", - "tracing", - "void", -] - -[[package]] -name = "tor-general-addr" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "arbitrary", - "derive_more 2.1.1", - "thiserror 2.0.17", - "void", -] - -[[package]] -name = "tor-guardmgr" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "amplify", - "base64ct", - "derive-deftly", - "derive_builder_fork_arti", - "derive_more 2.1.1", - "dyn-clone", - "educe", - "futures", - "humantime", - "humantime-serde", - "itertools 0.14.0", - "num_enum", - "oneshot-fused-workaround", - "pin-project", - "postage", - "rand 0.9.2", - "safelog", - "serde", - "strum", - "thiserror 2.0.17", - "tor-async-utils", - "tor-basic-utils", - "tor-config", - "tor-error", - "tor-linkspec", - "tor-llcrypto", - "tor-netdir", - "tor-netdoc", - "tor-persist", - "tor-proto", - "tor-relay-selection", - "tor-rtcompat", - "tor-units", - "tracing", -] - -[[package]] -name = "tor-hsclient" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "async-trait", - "derive-deftly", - "derive_more 2.1.1", - "educe", - "either", - "futures", - "itertools 0.14.0", - "oneshot-fused-workaround", - "postage", - "rand 0.9.2", - "retry-error", - "safelog", - "slotmap-careful", - "strum", - "thiserror 2.0.17", - "tor-async-utils", - "tor-basic-utils", - "tor-bytes", - "tor-cell", - "tor-checkable", - "tor-circmgr", - "tor-config", - "tor-dirclient", - "tor-error", - "tor-hscrypto", - "tor-keymgr", - "tor-linkspec", - "tor-llcrypto", - "tor-memquota", - "tor-netdir", - "tor-netdoc", - "tor-persist", - "tor-proto", - "tor-protover", - "tor-rtcompat", - "tracing", -] - -[[package]] -name = "tor-hscrypto" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "cipher 0.4.4", - "data-encoding", - "derive-deftly", - "derive_more 2.1.1", - "digest 0.10.7", - "equix", - "hex", - "humantime", - "itertools 0.14.0", - "paste", - "rand 0.9.2", - "safelog", - "serde", - "signature 2.2.0", - "subtle", - "thiserror 2.0.17", - "tor-basic-utils", - "tor-bytes", - "tor-error", - "tor-key-forge", - "tor-llcrypto", - "tor-memquota", - "tor-units", - "void", - "zeroize", -] - -[[package]] -name = "tor-hsservice" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "amplify", - "async-trait", - "base64ct", - "cfg-if", - "derive-deftly", - "derive_builder_fork_arti", - "derive_more 2.1.1", - "digest 0.10.7", - "educe", - "fs-mistrust", - "futures", - "growable-bloom-filter", - "hex", - "humantime", - "itertools 0.14.0", - "k12", - "once_cell", - "oneshot-fused-workaround", - "postage", - "rand 0.9.2", - "rand_core 0.9.5", - "retry-error", - "safelog", - "serde", - "serde_with", - "strum", - "thiserror 2.0.17", - "tor-async-utils", - "tor-basic-utils", - "tor-bytes", - "tor-cell", - "tor-circmgr", - "tor-config", - "tor-config-path", - "tor-dirclient", - "tor-error", - "tor-hscrypto", - "tor-keymgr", - "tor-linkspec", - "tor-llcrypto", - "tor-log-ratelim", - "tor-netdir", - "tor-netdoc", - "tor-persist", - "tor-proto", - "tor-protover", - "tor-relay-selection", - "tor-rtcompat", - "tracing", - "void", -] - -[[package]] -name = "tor-key-forge" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "derive-deftly", - "derive_more 2.1.1", - "downcast-rs 2.0.2", - "paste", - "rand 0.9.2", - "signature 2.2.0", - "ssh-key", - "thiserror 2.0.17", - "tor-bytes", - "tor-cert", - "tor-checkable", - "tor-error", - "tor-llcrypto", -] - -[[package]] -name = "tor-keymgr" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "amplify", - "arrayvec 0.7.6", - "cfg-if", - "derive-deftly", - "derive_builder_fork_arti", - "derive_more 2.1.1", - "downcast-rs 2.0.2", - "dyn-clone", - "fs-mistrust", - "glob-match", - "humantime", - "inventory", - "itertools 0.14.0", - "rand 0.9.2", - "safelog", - "serde", - "signature 2.2.0", - "ssh-key", - "thiserror 2.0.17", - "tor-basic-utils", - "tor-bytes", - "tor-config", - "tor-config-path", - "tor-error", - "tor-hscrypto", - "tor-key-forge", - "tor-llcrypto", - "tor-persist", - "tracing", - "visibility", - "walkdir", - "zeroize", -] - -[[package]] -name = "tor-linkspec" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "base64ct", - "by_address", - "caret", - "derive-deftly", - "derive_builder_fork_arti", - "derive_more 2.1.1", - "hex", - "itertools 0.14.0", - "safelog", - "serde", - "serde_with", - "strum", - "thiserror 2.0.17", - "tor-basic-utils", - "tor-bytes", - "tor-config", - "tor-llcrypto", - "tor-memquota", - "tor-protover", -] - -[[package]] -name = "tor-llcrypto" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "aes 0.8.4", - "base64ct", - "ctr 0.9.2", - "curve25519-dalek 4.1.3", - "der-parser 10.0.0", - "derive-deftly", - "derive_more 2.1.1", - "digest 0.10.7", - "ed25519-dalek 2.2.0", - "educe", - "getrandom 0.3.4", - "hex", - "rand 0.9.2", - "rand_chacha 0.9.0", - "rand_core 0.6.4", - "rand_core 0.9.5", - "rand_jitter", - "rdrand", - "rsa", - "safelog", - "serde", - "sha1", - "sha2 0.10.9", - "sha3 0.10.8", - "signature 2.2.0", - "subtle", - "thiserror 2.0.17", - "tor-memquota", - "visibility", - "x25519-dalek", - "zeroize", -] - -[[package]] -name = "tor-log-ratelim" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "futures", - "humantime", - "thiserror 2.0.17", - "tor-error", - "tor-rtcompat", - "tracing", - "weak-table", -] - -[[package]] -name = "tor-memquota" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "cfg-if", - "derive-deftly", - "derive_more 2.1.1", - "dyn-clone", - "educe", - "futures", - "itertools 0.14.0", - "paste", - "pin-project", - "serde", - "slotmap-careful", - "static_assertions", - "sysinfo", - "thiserror 2.0.17", - "tor-async-utils", - "tor-basic-utils", - "tor-config", - "tor-error", - "tor-log-ratelim", - "tor-rtcompat", - "tracing", - "void", -] - -[[package]] -name = "tor-netdir" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "async-trait", - "bitflags 2.10.0", - "derive_more 2.1.1", - "digest 0.10.7", - "futures", - "hex", - "humantime", - "itertools 0.14.0", - "num_enum", - "rand 0.9.2", - "serde", - "static_assertions", - "strum", - "thiserror 2.0.17", - "time", - "tor-basic-utils", - "tor-error", - "tor-hscrypto", - "tor-linkspec", - "tor-llcrypto", - "tor-netdoc", - "tor-protover", - "tor-units", - "tracing", - "typed-index-collections", -] - -[[package]] -name = "tor-netdoc" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "amplify", - "base64ct", - "bitflags 2.10.0", - "cipher 0.4.4", - "derive-deftly", - "derive_builder_fork_arti", - "derive_more 2.1.1", - "digest 0.10.7", - "educe", - "hex", - "humantime", - "itertools 0.14.0", - "memchr", - "paste", - "phf 0.13.1", - "rand 0.9.2", - "serde", - "serde_with", - "signature 2.2.0", - "smallvec", - "strum", - "subtle", - "thiserror 2.0.17", - "time", - "tinystr", - "tor-basic-utils", - "tor-bytes", - "tor-cell", - "tor-cert", - "tor-checkable", - "tor-error", - "tor-hscrypto", - "tor-linkspec", - "tor-llcrypto", - "tor-protover", - "tor-units", - "void", - "weak-table", - "zeroize", -] - -[[package]] -name = "tor-persist" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "amplify", - "derive-deftly", - "derive_more 2.1.1", - "filetime", - "fs-mistrust", - "fslock", - "fslock-guard", - "futures", - "itertools 0.14.0", - "oneshot-fused-workaround", - "paste", - "sanitize-filename", - "serde", - "serde_json", - "thiserror 2.0.17", - "time", - "tor-async-utils", - "tor-basic-utils", - "tor-error", - "tracing", - "void", -] - -[[package]] -name = "tor-proto" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "amplify", - "asynchronous-codec", - "bitvec 1.0.1", - "bytes", - "caret", - "cfg-if", - "cipher 0.4.4", - "coarsetime", - "criterion-cycles-per-byte", - "derive-deftly", - "derive_builder_fork_arti", - "derive_more 2.1.1", - "digest 0.10.7", - "educe", - "futures", - "futures-util", - "hkdf", - "hmac 0.12.1", - "itertools 0.14.0", - "oneshot-fused-workaround", - "pin-project", - "postage", - "rand 0.9.2", - "rand_core 0.9.5", - "safelog", - "slotmap-careful", - "smallvec", - "static_assertions", - "subtle", - "sync_wrapper", - "thiserror 2.0.17", - "tokio", - "tokio-util", - "tor-async-utils", - "tor-basic-utils", - "tor-bytes", - "tor-cell", - "tor-cert", - "tor-checkable", - "tor-config", - "tor-error", - "tor-hscrypto", - "tor-linkspec", - "tor-llcrypto", - "tor-log-ratelim", - "tor-memquota", - "tor-protover", - "tor-rtcompat", - "tor-rtmock", - "tor-units", - "tracing", - "typenum", - "visibility", - "void", - "zeroize", -] - -[[package]] -name = "tor-protover" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "caret", - "paste", - "serde_with", - "thiserror 2.0.17", - "tor-bytes", -] - -[[package]] -name = "tor-relay-selection" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "rand 0.9.2", - "serde", - "tor-basic-utils", - "tor-linkspec", - "tor-netdir", - "tor-netdoc", -] - -[[package]] -name = "tor-rtcompat" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "arbitrary", - "async-io", - "async-native-tls", - "async-std", - "async-trait", - "async_executors", - "asynchronous-codec", - "coarsetime", - "derive_more 2.1.1", - "dyn-clone", - "educe", - "futures", - "futures-rustls 0.26.0", - "hex", - "libc", - "native-tls", - "paste", - "pin-project", - "rustls-pki-types", - "rustls-webpki 0.103.9", - "thiserror 2.0.17", - "tokio", - "tokio-util", - "tor-error", - "tor-general-addr", - "tracing", - "void", -] - -[[package]] -name = "tor-rtmock" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "amplify", - "assert_matches", - "async-trait", - "derive-deftly", - "derive_more 2.1.1", - "educe", - "futures", - "humantime", - "itertools 0.14.0", - "oneshot-fused-workaround", - "pin-project", - "priority-queue", - "slotmap-careful", - "strum", - "thiserror 2.0.17", - "tor-error", - "tor-general-addr", - "tor-rtcompat", - "tracing", - "tracing-test", - "void", -] - -[[package]] -name = "tor-socksproto" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "amplify", - "caret", - "derive-deftly", - "educe", - "safelog", - "subtle", - "thiserror 2.0.17", - "tor-bytes", - "tor-error", -] - -[[package]] -name = "tor-units" -version = "0.33.0" -source = "git+https://github.com/Start9Labs/arti.git?branch=patch%2Fdisable-exit#24730694701a83432d791d80802db8bda0699700" -dependencies = [ - "derive-deftly", - "derive_more 2.1.1", - "serde", - "thiserror 2.0.17", - "tor-memquota", -] - -[[package]] -name = "torut" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "99febc413f26cf855b3a309c5872edff5c31e0ffe9c2fce5681868761df36f69" -dependencies = [ - "base32 0.4.0", - "base64 0.13.1", - "derive_more 0.99.20", - "ed25519-dalek 1.0.1", - "hex", - "hmac 0.11.0", - "rand 0.7.3", - "serde", - "serde_derive", - "sha2 0.9.9", - "sha3 0.9.1", - "tokio", -] - [[package]] name = "tower" version = "0.5.3" @@ -9710,7 +7266,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "786d480bce6247ab75f005b14ae1624ad978d3029d9113f0a22fa1ac773faeaf" dependencies = [ "crossbeam-channel", - "thiserror 2.0.17", + "thiserror 2.0.18", "time", "tracing-subscriber", ] @@ -9723,7 +7279,7 @@ checksum = "7490cfa5ec963746568740651ac6781f701c9c5ea257c58e057f3ba8cf69e8da" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -9786,27 +7342,6 @@ dependencies = [ "tracing-log", ] -[[package]] -name = "tracing-test" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "557b891436fe0d5e0e363427fc7f217abf9ccd510d5136549847bdcbcd011d68" -dependencies = [ - "tracing-core", - "tracing-subscriber", - "tracing-test-macro", -] - -[[package]] -name = "tracing-test-macro" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04659ddb06c87d233c566112c1c9c5b9e98256d9af50ec3bc9c8327f873a7568" -dependencies = [ - "quote", - "syn 2.0.114", -] - [[package]] name = "triomphe" version = "0.1.15" @@ -9842,7 +7377,7 @@ checksum = "c88cc88fd23b5a04528f3a8436024f20010a16ec18eb23c164b1242f65860130" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", "termcolor", ] @@ -9860,7 +7395,7 @@ dependencies = [ "native-tls", "rand 0.9.2", "sha1", - "thiserror 2.0.17", + "thiserror 2.0.18", "url", "utf-8", ] @@ -9878,7 +7413,7 @@ dependencies = [ "log", "rand 0.9.2", "sha1", - "thiserror 2.0.17", + "thiserror 2.0.18", "utf-8", ] @@ -9899,17 +7434,7 @@ checksum = "076a02dc54dd46795c2e9c8282ed40bcfb1e22747e955de9389a1de28190fb26" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", -] - -[[package]] -name = "typed-index-collections" -version = "3.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5318ee4ce62a4e948a33915574021a7a953d83e84fba6e25c72ffcfd7dad35ff" -dependencies = [ - "bincode 2.0.1", - "serde", + "syn 2.0.115", ] [[package]] @@ -9941,15 +7466,6 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eaea85b334db583fe3274d12b4cd1880032beab409c0d774be044d4480ab9a94" -[[package]] -name = "uncased" -version = "0.9.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e1b88fcfe09e89d3866a5c11019378088af2d24c3fbd4f0543f96b479ec90697" -dependencies = [ - "version_check", -] - [[package]] name = "unicase" version = "2.9.0" @@ -9964,9 +7480,9 @@ checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5" [[package]] name = "unicode-ident" -version = "1.0.22" +version = "1.0.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9312f7c4f6ff9069b165498234ce8be658059c6728633667c526e27dc2cf1df5" +checksum = "537dd038a89878be9b64dd4bd1b260315c1bb94f4d784956b81e27a088d9a09e" [[package]] name = "unicode-linebreak" @@ -10070,9 +7586,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.19.0" +version = "1.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2e054861b4bd027cd373e18e8d8d8e6548085000e41290d95ce0c373a654b4a" +checksum = "ee48d38b119b0cd71fe4141b30f5ba9c7c5d9f4e7a3a8b4a674e4b6ef789976f" dependencies = [ "getrandom 0.3.4", "js-sys", @@ -10086,12 +7602,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" -[[package]] -name = "value-bag" -version = "1.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ba6f5989077681266825251a52748b8c1d8a4ad098cc37e440103d0ea717fc0" - [[package]] name = "vcpkg" version = "0.2.15" @@ -10110,17 +7620,6 @@ version = "0.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "051eb1abcf10076295e815102942cc58f9d5e3b4560e46e53c21e8ff6f3af7b1" -[[package]] -name = "visibility" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d674d135b4a8c1d7e813e2f8d1c9a58308aee4a680323066025e53132218bd91" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.114", -] - [[package]] name = "visit-rs" version = "0.1.9" @@ -10141,15 +7640,9 @@ checksum = "2a3bfb04fd13da4fc8df24709b7a0949667f43c63691d9fecddf1d3be8af5099" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] -[[package]] -name = "void" -version = "1.0.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" - [[package]] name = "vte" version = "0.14.1" @@ -10208,21 +7701,21 @@ dependencies = [ "wit-bindgen", ] +[[package]] +name = "wasip3" +version = "0.4.0+wasi-0.3.0-rc-2026-01-06" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5428f8bf88ea5ddc08faddef2ac4a67e390b88186c703ce6dbd955e1c145aca5" +dependencies = [ + "wit-bindgen", +] + [[package]] name = "wasite" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" -[[package]] -name = "wasix" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1757e0d1f8456693c7e5c6c629bdb54884e032aa0bb53c155f6a39f94440d332" -dependencies = [ - "wasi 0.11.1+wasi-snapshot-preview1", -] - [[package]] name = "wasm-bindgen" version = "0.2.108" @@ -10269,7 +7762,7 @@ dependencies = [ "bumpalo", "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", "wasm-bindgen-shared", ] @@ -10282,6 +7775,28 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "wasm-encoder" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "990065f2fe63003fe337b932cfb5e3b80e0b4d0f5ff650e6985b1048f62c8319" +dependencies = [ + "leb128fmt", + "wasmparser", +] + +[[package]] +name = "wasm-metadata" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909" +dependencies = [ + "anyhow", + "indexmap 2.13.0", + "wasm-encoder", + "wasmparser", +] + [[package]] name = "wasm-streams" version = "0.4.2" @@ -10295,6 +7810,18 @@ dependencies = [ "web-sys", ] +[[package]] +name = "wasmparser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe" +dependencies = [ + "bitflags 2.10.0", + "hashbrown 0.15.5", + "indexmap 2.13.0", + "semver", +] + [[package]] name = "wayland-client" version = "0.29.5" @@ -10302,7 +7829,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f3b068c05a039c9f755f881dc50f01732214f5685e379829759088967c46715" dependencies = [ "bitflags 1.3.2", - "downcast-rs 1.2.1", + "downcast-rs", "libc", "nix 0.24.3", "scoped-tls", @@ -10368,12 +7895,6 @@ dependencies = [ "pkg-config", ] -[[package]] -name = "weak-table" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "323f4da9523e9a669e1eaf9c6e763892769b1d38c623913647bfdc1532fe4549" - [[package]] name = "web-sys" version = "0.3.85" @@ -10396,9 +7917,9 @@ dependencies = [ [[package]] name = "webpki-root-certs" -version = "1.0.5" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36a29fc0408b113f68cf32637857ab740edfafdf460c326cd2afaa2d84cc05dc" +checksum = "804f18a4ac2676ffb4e8b5b5fa9ae38af06df08162314f96a68d2a363e21a8ca" dependencies = [ "rustls-pki-types", ] @@ -10409,14 +7930,14 @@ version = "0.26.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "521bc38abb08001b01866da9f51eb7c5d647a19260e00054a8c7fd5f9e57f7a9" dependencies = [ - "webpki-roots 1.0.5", + "webpki-roots 1.0.6", ] [[package]] name = "webpki-roots" -version = "1.0.5" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12bed680863276c63889429bfd6cab3b99943659923822de1c8a39c49e4d722c" +checksum = "22cfaf3c063993ff62e73cb4311efde4db1efb31ab78a3e5c457939ad5cc0bed" dependencies = [ "rustls-pki-types", ] @@ -10489,41 +8010,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -[[package]] -name = "windows" -version = "0.61.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9babd3a767a4c1aef6900409f85f5d53ce2544ccdfaa86dad48c91782c6d6893" -dependencies = [ - "windows-collections", - "windows-core 0.61.2", - "windows-future", - "windows-link 0.1.3", - "windows-numerics", -] - -[[package]] -name = "windows-collections" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3beeceb5e5cfd9eb1d76b381630e82c4241ccd0d27f1a39ed41b2760b255c5e8" -dependencies = [ - "windows-core 0.61.2", -] - -[[package]] -name = "windows-core" -version = "0.61.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" -dependencies = [ - "windows-implement", - "windows-interface", - "windows-link 0.1.3", - "windows-result 0.3.4", - "windows-strings 0.4.2", -] - [[package]] name = "windows-core" version = "0.62.2" @@ -10532,20 +8018,9 @@ checksum = "b8e83a14d34d0623b51dce9581199302a221863196a1dde71a7663a4c2be9deb" dependencies = [ "windows-implement", "windows-interface", - "windows-link 0.2.1", - "windows-result 0.4.1", - "windows-strings 0.5.1", -] - -[[package]] -name = "windows-future" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc6a41e98427b19fe4b73c550f060b59fa592d7d686537eebf9385621bfbad8e" -dependencies = [ - "windows-core 0.61.2", - "windows-link 0.1.3", - "windows-threading", + "windows-link", + "windows-result", + "windows-strings", ] [[package]] @@ -10556,7 +8031,7 @@ checksum = "053e2e040ab57b9dc951b72c264860db7eb3b0200ba345b4e4c3b14f67855ddf" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -10567,49 +8042,24 @@ checksum = "3f316c4a2570ba26bbec722032c4099d8c8bc095efccdc15688708623367e358" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] -[[package]] -name = "windows-link" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" - [[package]] name = "windows-link" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" -[[package]] -name = "windows-numerics" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9150af68066c4c5c07ddc0ce30421554771e528bde427614c61038bc2c92c2b1" -dependencies = [ - "windows-core 0.61.2", - "windows-link 0.1.3", -] - [[package]] name = "windows-registry" version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "02752bf7fbdcce7f2a27a742f798510f3e5ad88dbe84871e5168e2120c3d5720" dependencies = [ - "windows-link 0.2.1", - "windows-result 0.4.1", - "windows-strings 0.5.1", -] - -[[package]] -name = "windows-result" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" -dependencies = [ - "windows-link 0.1.3", + "windows-link", + "windows-result", + "windows-strings", ] [[package]] @@ -10618,16 +8068,7 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7781fa89eaf60850ac3d2da7af8e5242a5ea78d1a11c49bf2910bb5a73853eb5" dependencies = [ - "windows-link 0.2.1", -] - -[[package]] -name = "windows-strings" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" -dependencies = [ - "windows-link 0.1.3", + "windows-link", ] [[package]] @@ -10636,7 +8077,7 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7837d08f69c77cf6b07689544538e017c1bfcf57e34b4c0ff58e6c2cd3b37091" dependencies = [ - "windows-link 0.2.1", + "windows-link", ] [[package]] @@ -10690,7 +8131,7 @@ version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" dependencies = [ - "windows-link 0.2.1", + "windows-link", ] [[package]] @@ -10745,7 +8186,7 @@ version = "0.53.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" dependencies = [ - "windows-link 0.2.1", + "windows-link", "windows_aarch64_gnullvm 0.53.1", "windows_aarch64_msvc 0.53.1", "windows_i686_gnu 0.53.1", @@ -10756,15 +8197,6 @@ dependencies = [ "windows_x86_64_msvc 0.53.1", ] -[[package]] -name = "windows-threading" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b66463ad2e0ea3bbf808b7f1d371311c80e115c0b71d60efc142cafbcfb057a6" -dependencies = [ - "windows-link 0.1.3", -] - [[package]] name = "windows_aarch64_gnullvm" version = "0.42.2" @@ -10978,6 +8410,88 @@ name = "wit-bindgen" version = "0.51.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7249219f66ced02969388cf2bb044a09756a083d0fab1e566056b04d9fbcaa5" +dependencies = [ + "wit-bindgen-rust-macro", +] + +[[package]] +name = "wit-bindgen-core" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea61de684c3ea68cb082b7a88508a8b27fcc8b797d738bfc99a82facf1d752dc" +dependencies = [ + "anyhow", + "heck 0.5.0", + "wit-parser", +] + +[[package]] +name = "wit-bindgen-rust" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21" +dependencies = [ + "anyhow", + "heck 0.5.0", + "indexmap 2.13.0", + "prettyplease", + "syn 2.0.115", + "wasm-metadata", + "wit-bindgen-core", + "wit-component", +] + +[[package]] +name = "wit-bindgen-rust-macro" +version = "0.51.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c0f9bfd77e6a48eccf51359e3ae77140a7f50b1e2ebfe62422d8afdaffab17a" +dependencies = [ + "anyhow", + "prettyplease", + "proc-macro2", + "quote", + "syn 2.0.115", + "wit-bindgen-core", + "wit-bindgen-rust", +] + +[[package]] +name = "wit-component" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2" +dependencies = [ + "anyhow", + "bitflags 2.10.0", + "indexmap 2.13.0", + "log", + "serde", + "serde_derive", + "serde_json", + "wasm-encoder", + "wasm-metadata", + "wasmparser", + "wit-parser", +] + +[[package]] +name = "wit-parser" +version = "0.244.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736" +dependencies = [ + "anyhow", + "id-arena", + "indexmap 2.13.0", + "log", + "semver", + "serde", + "serde_derive", + "serde_json", + "unicode-xid", + "wasmparser", +] [[package]] name = "writeable" @@ -11049,9 +8563,9 @@ version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcbc162f30700d6f3f82a24bf7cc62ffe7caea42c0b2cba8bf7f3ae50cf51f69" dependencies = [ - "asn1-rs 0.6.2", + "asn1-rs", "data-encoding", - "der-parser 9.0.0", + "der-parser", "lazy_static", "nom 7.1.3", "oid-registry", @@ -11166,15 +8680,15 @@ checksum = "b659052874eb698efe5b9e8cf382204678a0086ebf46982b79d6ca3182927e5d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", "synstructure", ] [[package]] name = "zbus" -version = "5.13.1" +version = "5.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17f79257df967b6779afa536788657777a0001f5b42524fcaf5038d4344df40b" +checksum = "1bfeff997a0aaa3eb20c4652baf788d2dfa6d2839a0ead0b3ff69ce2f9c4bdd1" dependencies = [ "async-broadcast", "async-executor", @@ -11207,14 +8721,14 @@ dependencies = [ [[package]] name = "zbus_macros" -version = "5.13.1" +version = "5.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aad23e2d2f91cae771c7af7a630a49e755f1eb74f8a46e9f6d5f7a146edf5a37" +checksum = "0bbd5a90dbe8feee5b13def448427ae314ccd26a49cac47905cafefb9ff846f1" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", "zbus_names", "zvariant", "zvariant_utils", @@ -11233,22 +8747,22 @@ dependencies = [ [[package]] name = "zerocopy" -version = "0.8.33" +version = "0.8.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "668f5168d10b9ee831de31933dc111a459c97ec93225beb307aed970d1372dfd" +checksum = "db6d35d663eadb6c932438e763b262fe1a70987f9ae936e60158176d710cae4a" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.33" +version = "0.8.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c7962b26b0a8685668b671ee4b54d007a67d4eaf05fda79ac0ecf41e32270f1" +checksum = "4122cd3169e94605190e77839c9a40d40ed048d305bfdc146e7df40ab0f3e517" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -11268,7 +8782,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", "synstructure", ] @@ -11289,7 +8803,7 @@ checksum = "85a5b4158499876c763cb03bc4e49185d3cccbabb15b33c627f7884f43db852e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] @@ -11309,7 +8823,6 @@ version = "0.11.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c28719294829477f525be0186d13efa9a3c602f7ec202ca9e353d310fb9a002" dependencies = [ - "serde", "yoke", "zerofrom", "zerovec-derive", @@ -11323,14 +8836,14 @@ checksum = "eadce39539ca5cb3985590102671f2567e659fca9666581ad3411d59207951f3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", ] [[package]] name = "zmij" -version = "1.0.14" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd8f3f50b848df28f887acb68e41201b5aea6bc8a8dacc00fb40635ff9a72fea" +checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa" [[package]] name = "zstd" @@ -11362,9 +8875,9 @@ dependencies = [ [[package]] name = "zvariant" -version = "5.9.1" +version = "5.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "326aaed414f04fe839777b4c443d4e94c74e7b3621093bd9c5e649ac8aa96543" +checksum = "68b64ef4f40c7951337ddc7023dd03528a57a3ce3408ee9da5e948bd29b232c4" dependencies = [ "endi", "enumflags2", @@ -11376,14 +8889,14 @@ dependencies = [ [[package]] name = "zvariant_derive" -version = "5.9.1" +version = "5.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba44e1f8f4da9e6e2d25d2a60b116ef8b9d0be174a7685e55bb12a99866279a7" +checksum = "484d5d975eb7afb52cc6b929c13d3719a20ad650fea4120e6310de3fc55e415c" dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.114", + "syn 2.0.115", "zvariant_utils", ] @@ -11396,6 +8909,6 @@ dependencies = [ "proc-macro2", "quote", "serde", - "syn 2.0.114", + "syn 2.0.115", "winnow", ] diff --git a/core/Cargo.toml b/core/Cargo.toml index 0256ac433..9937dfaa1 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -15,7 +15,7 @@ license = "MIT" name = "start-os" readme = "README.md" repository = "https://github.com/Start9Labs/start-os" -version = "0.4.0-alpha.19" # VERSION_BUMP +version = "0.4.0-alpha.20" # VERSION_BUMP [lib] name = "startos" @@ -42,17 +42,6 @@ name = "tunnelbox" path = "src/main/tunnelbox.rs" [features] -arti = [ - "arti-client", - "safelog", - "tor-cell", - "tor-hscrypto", - "tor-hsservice", - "tor-keymgr", - "tor-llcrypto", - "tor-proto", - "tor-rtcompat", -] beta = [] console = ["console-subscriber", "tokio/tracing"] default = [] @@ -62,16 +51,6 @@ unstable = ["backtrace-on-stack-overflow"] [dependencies] aes = { version = "0.7.5", features = ["ctr"] } -arti-client = { version = "0.33", features = [ - "compression", - "ephemeral-keystore", - "experimental-api", - "onion-service-client", - "onion-service-service", - "rustls", - "static", - "tokio", -], default-features = false, git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true } async-acme = { version = "0.6.0", git = "https://github.com/dr-bonez/async-acme.git", features = [ "use_rustls", "use_tokio", @@ -100,7 +79,6 @@ console-subscriber = { version = "0.5.0", optional = true } const_format = "0.2.34" cookie = "0.18.0" cookie_store = "0.22.0" -curve25519-dalek = "4.1.3" der = { version = "0.7.9", features = ["derive", "pem"] } digest = "0.10.7" divrem = "1.0.0" @@ -216,7 +194,6 @@ rpassword = "7.2.0" rust-argon2 = "3.0.0" rust-i18n = "3.1.5" rpc-toolkit = { git = "https://github.com/Start9Labs/rpc-toolkit.git" } -safelog = { version = "0.4.8", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true } semver = { version = "1.0.20", features = ["serde"] } serde = { version = "1.0", features = ["derive", "rc"] } serde_cbor = { package = "ciborium", version = "0.2.1" } @@ -244,23 +221,6 @@ tokio-stream = { version = "0.1.14", features = ["io-util", "net", "sync"] } tokio-tar = { git = "https://github.com/dr-bonez/tokio-tar.git" } tokio-tungstenite = { version = "0.26.2", features = ["native-tls", "url"] } tokio-util = { version = "0.7.9", features = ["io"] } -tor-cell = { version = "0.33", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true } -tor-hscrypto = { version = "0.33", features = [ - "full", -], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true } -tor-hsservice = { version = "0.33", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true } -tor-keymgr = { version = "0.33", features = [ - "ephemeral-keystore", -], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true } -tor-llcrypto = { version = "0.33", features = [ - "full", -], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true } -tor-proto = { version = "0.33", git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true } -tor-rtcompat = { version = "0.33", features = [ - "rustls", - "tokio", -], git = "https://github.com/Start9Labs/arti.git", branch = "patch/disable-exit", optional = true } -torut = "0.2.1" tower-service = "0.3.3" tracing = "0.1.39" tracing-error = "0.2.0" diff --git a/core/README.md b/core/README.md index 1a66748f6..c4f97a2c3 100644 --- a/core/README.md +++ b/core/README.md @@ -22,9 +22,7 @@ several different names for different behavior: - `start-sdk`: This is a CLI tool that aids in building and packaging services you wish to deploy to StartOS -## Questions +## Documentation -If you have questions about how various pieces of the backend system work. Open -an issue and tag the following people - -- dr-bonez +- [ARCHITECTURE.md](ARCHITECTURE.md) — Backend architecture, modules, and patterns +- [CONTRIBUTING.md](CONTRIBUTING.md) — How to contribute to core diff --git a/core/build/build-ts.sh b/core/build/build-ts.sh index ad808310a..9bed2f19f 100755 --- a/core/build/build-ts.sh +++ b/core/build/build-ts.sh @@ -7,11 +7,11 @@ source ./builder-alias.sh set -ea shopt -s expand_aliases -PROFILE=${PROFILE:-release} +PROFILE=${PROFILE:-debug} if [ "${PROFILE}" = "release" ]; then BUILD_FLAGS="--release" else - if [ "$PROFILE" != "debug"]; then + if [ "$PROFILE" != "debug" ]; then >&2 echo "Unknown profile $PROFILE: falling back to debug..." PROFILE=debug fi @@ -38,7 +38,7 @@ if [[ "${ENVIRONMENT}" =~ (^|-)console($|-) ]]; then fi echo "FEATURES=\"$FEATURES\"" echo "RUSTFLAGS=\"$RUSTFLAGS\"" -rust-zig-builder cargo test --manifest-path=./core/Cargo.toml $BUILD_FLAGS --features test,$FEATURES --locked 'export_bindings_' +rust-zig-builder cargo test --manifest-path=./core/Cargo.toml --lib $BUILD_FLAGS --features test,$FEATURES --locked 'export_bindings_' if [ "$(ls -nd "core/bindings" | awk '{ print $3 }')" != "$UID" ]; then rust-zig-builder sh -c "chown -R $UID:$UID core/target && chown -R $UID:$UID core/bindings && chown -R $UID:$UID /usr/local/cargo" fi \ No newline at end of file diff --git a/agents/core-rust-patterns.md b/core/core-rust-patterns.md similarity index 100% rename from agents/core-rust-patterns.md rename to core/core-rust-patterns.md diff --git a/agents/i18n-patterns.md b/core/i18n-patterns.md similarity index 100% rename from agents/i18n-patterns.md rename to core/i18n-patterns.md diff --git a/core/locales/i18n.yaml b/core/locales/i18n.yaml index 7f6cd150e..45a75fdc9 100644 --- a/core/locales/i18n.yaml +++ b/core/locales/i18n.yaml @@ -197,6 +197,13 @@ setup.transferring-data: fr_FR: "Transfert de données" pl_PL: "Przesyłanie danych" +setup.password-required: + en_US: "Password is required for fresh setup" + de_DE: "Passwort ist für die Ersteinrichtung erforderlich" + es_ES: "Se requiere contraseña para la configuración inicial" + fr_FR: "Le mot de passe est requis pour la première configuration" + pl_PL: "Hasło jest wymagane do nowej konfiguracji" + # system.rs system.governor-not-available: en_US: "Governor %{governor} not available" @@ -994,6 +1001,27 @@ disk.mount.binding: fr_FR: "Liaison de %{src} à %{dst}" pl_PL: "Wiązanie %{src} do %{dst}" +hostname.empty: + en_US: "Hostname cannot be empty" + de_DE: "Der Hostname darf nicht leer sein" + es_ES: "El nombre de host no puede estar vacío" + fr_FR: "Le nom d'hôte ne peut pas être vide" + pl_PL: "Nazwa hosta nie może być pusta" + +hostname.invalid-character: + en_US: "Invalid character in hostname: %{char}" + de_DE: "Ungültiges Zeichen im Hostnamen: %{char}" + es_ES: "Carácter no válido en el nombre de host: %{char}" + fr_FR: "Caractère invalide dans le nom d'hôte : %{char}" + pl_PL: "Nieprawidłowy znak w nazwie hosta: %{char}" + +hostname.must-provide-name-or-hostname: + en_US: "Must provide at least one of: name, hostname" + de_DE: "Es muss mindestens eines angegeben werden: name, hostname" + es_ES: "Se debe proporcionar al menos uno de: name, hostname" + fr_FR: "Vous devez fournir au moins l'un des éléments suivants : name, hostname" + pl_PL: "Należy podać co najmniej jedno z: name, hostname" + # init.rs init.running-preinit: en_US: "Running preinit.sh" @@ -1243,6 +1271,21 @@ backup.target.cifs.target-not-found-id: fr_FR: "ID de cible de sauvegarde %{id} non trouvé" pl_PL: "Nie znaleziono ID celu kopii zapasowej %{id}" +# service/effects/net/plugin.rs +net.plugin.manifest-missing-plugin: + en_US: "manifest does not declare the \"%{plugin}\" plugin" + de_DE: "Manifest deklariert das Plugin \"%{plugin}\" nicht" + es_ES: "el manifiesto no declara el plugin \"%{plugin}\"" + fr_FR: "le manifeste ne déclare pas le plugin \"%{plugin}\"" + pl_PL: "manifest nie deklaruje wtyczki \"%{plugin}\"" + +net.plugin.binding-not-found: + en_US: "binding not found: %{binding}" + de_DE: "Bindung nicht gefunden: %{binding}" + es_ES: "enlace no encontrado: %{binding}" + fr_FR: "liaison introuvable : %{binding}" + pl_PL: "powiązanie nie znalezione: %{binding}" + # net/ssl.rs net.ssl.unreachable: en_US: "unreachable" @@ -1790,6 +1833,28 @@ registry.package.remove-mirror.unauthorized: fr_FR: "Non autorisé" pl_PL: "Brak autoryzacji" +# registry/package/index.rs +registry.package.index.metadata-mismatch: + en_US: "package metadata mismatch: remove the existing version first, then re-add" + de_DE: "Paketmetadaten stimmen nicht überein: vorhandene Version zuerst entfernen, dann erneut hinzufügen" + es_ES: "discrepancia de metadatos del paquete: elimine la versión existente primero, luego vuelva a agregarla" + fr_FR: "discordance des métadonnées du paquet : supprimez d'abord la version existante, puis ajoutez-la à nouveau" + pl_PL: "niezgodność metadanych pakietu: najpierw usuń istniejącą wersję, a następnie dodaj ponownie" + +registry.package.index.icon-mismatch: + en_US: "package icon mismatch: remove the existing version first, then re-add" + de_DE: "Paketsymbol stimmt nicht überein: vorhandene Version zuerst entfernen, dann erneut hinzufügen" + es_ES: "discrepancia del icono del paquete: elimine la versión existente primero, luego vuelva a agregarla" + fr_FR: "discordance de l'icône du paquet : supprimez d'abord la version existante, puis ajoutez-la à nouveau" + pl_PL: "niezgodność ikony pakietu: najpierw usuń istniejącą wersję, a następnie dodaj ponownie" + +registry.package.index.dependency-metadata-mismatch: + en_US: "dependency metadata mismatch: remove the existing version first, then re-add" + de_DE: "Abhängigkeitsmetadaten stimmen nicht überein: vorhandene Version zuerst entfernen, dann erneut hinzufügen" + es_ES: "discrepancia de metadatos de dependencia: elimine la versión existente primero, luego vuelva a agregarla" + fr_FR: "discordance des métadonnées de dépendance : supprimez d'abord la version existante, puis ajoutez-la à nouveau" + pl_PL: "niezgodność metadanych zależności: najpierw usuń istniejącą wersję, a następnie dodaj ponownie" + # registry/package/get.rs registry.package.get.version-not-found: en_US: "Could not find a version of %{id} that satisfies %{version}" @@ -3087,7 +3152,7 @@ help.arg.smtp-from: fr_FR: "Adresse de l'expéditeur" pl_PL: "Adres nadawcy e-mail" -help.arg.smtp-login: +help.arg.smtp-username: en_US: "SMTP authentication username" de_DE: "SMTP-Authentifizierungsbenutzername" es_ES: "Nombre de usuario de autenticación SMTP" @@ -3108,13 +3173,20 @@ help.arg.smtp-port: fr_FR: "Port du serveur SMTP" pl_PL: "Port serwera SMTP" -help.arg.smtp-server: +help.arg.smtp-host: en_US: "SMTP server hostname" de_DE: "SMTP-Server-Hostname" es_ES: "Nombre de host del servidor SMTP" fr_FR: "Nom d'hôte du serveur SMTP" pl_PL: "Nazwa hosta serwera SMTP" +help.arg.smtp-security: + en_US: "Connection security mode (starttls or tls)" + de_DE: "Verbindungssicherheitsmodus (starttls oder tls)" + es_ES: "Modo de seguridad de conexión (starttls o tls)" + fr_FR: "Mode de sécurité de connexion (starttls ou tls)" + pl_PL: "Tryb zabezpieczeń połączenia (starttls lub tls)" + help.arg.smtp-to: en_US: "Email recipient address" de_DE: "E-Mail-Empfängeradresse" @@ -3612,6 +3684,13 @@ help.arg.s9pk-file-path: fr_FR: "Chemin vers le fichier de paquet s9pk" pl_PL: "Ścieżka do pliku pakietu s9pk" +help.arg.s9pk-file-paths: + en_US: "Paths to s9pk package files" + de_DE: "Pfade zu s9pk-Paketdateien" + es_ES: "Rutas a los archivos de paquete s9pk" + fr_FR: "Chemins vers les fichiers de paquet s9pk" + pl_PL: "Ścieżki do plików pakietów s9pk" + help.arg.session-ids: en_US: "Session identifiers" de_DE: "Sitzungskennungen" @@ -3935,6 +4014,13 @@ about.allow-gateway-infer-inbound-access-from-wan: fr_FR: "Permettre à cette passerelle de déduire si elle a un accès entrant depuis le WAN en fonction de son adresse IPv4" pl_PL: "Pozwól tej bramce wywnioskować, czy ma dostęp przychodzący z WAN na podstawie adresu IPv4" +about.apply-available-update: + en_US: "Apply available update" + de_DE: "Verfügbares Update anwenden" + es_ES: "Aplicar actualización disponible" + fr_FR: "Appliquer la mise à jour disponible" + pl_PL: "Zastosuj dostępną aktualizację" + about.calculate-blake3-hash-for-file: en_US: "Calculate blake3 hash for a file" de_DE: "Blake3-Hash für eine Datei berechnen" @@ -3949,6 +4035,20 @@ about.cancel-install-package: fr_FR: "Annuler l'installation d'un paquet" pl_PL: "Anuluj instalację pakietu" +about.check-dns-configuration: + en_US: "Check DNS configuration for a gateway" + de_DE: "DNS-Konfiguration für ein Gateway prüfen" + es_ES: "Verificar la configuración DNS de un gateway" + fr_FR: "Vérifier la configuration DNS d'une passerelle" + pl_PL: "Sprawdź konfigurację DNS bramy" + +about.check-for-updates: + en_US: "Check for available updates" + de_DE: "Nach verfügbaren Updates suchen" + es_ES: "Buscar actualizaciones disponibles" + fr_FR: "Vérifier les mises à jour disponibles" + pl_PL: "Sprawdź dostępne aktualizacje" + about.check-update-startos: en_US: "Check a given registry for StartOS updates and update if available" de_DE: "Ein bestimmtes Registry auf StartOS-Updates prüfen und bei Verfügbarkeit aktualisieren" @@ -4887,6 +4987,13 @@ about.publish-s9pk: fr_FR: "Publier s9pk dans le bucket S3 et indexer dans le registre" pl_PL: "Opublikuj s9pk do bucketu S3 i zindeksuj w rejestrze" +about.select-s9pk-for-device: + en_US: "Select the best compatible s9pk for a target device" + de_DE: "Das beste kompatible s9pk für ein Zielgerät auswählen" + es_ES: "Seleccionar el s9pk más compatible para un dispositivo destino" + fr_FR: "Sélectionner le meilleur s9pk compatible pour un appareil cible" + pl_PL: "Wybierz najlepiej kompatybilny s9pk dla urządzenia docelowego" + about.rebuild-service-container: en_US: "Rebuild service container" de_DE: "Dienst-Container neu erstellen" @@ -5139,6 +5246,13 @@ about.set-country: fr_FR: "Définir le pays" pl_PL: "Ustaw kraj" +about.set-hostname: + en_US: "Set the server hostname" + de_DE: "Den Server-Hostnamen festlegen" + es_ES: "Establecer el nombre de host del servidor" + fr_FR: "Définir le nom d'hôte du serveur" + pl_PL: "Ustaw nazwę hosta serwera" + about.set-gateway-enabled-for-binding: en_US: "Set gateway enabled for binding" de_DE: "Gateway für Bindung aktivieren" diff --git a/core/patchdb.md b/core/patchdb.md new file mode 100644 index 000000000..b35bf221e --- /dev/null +++ b/core/patchdb.md @@ -0,0 +1,105 @@ +# Patch-DB Patterns + +## Model and HasModel + +Types stored in the database derive `HasModel`, which generates typed accessor methods on `Model`: + +```rust +#[derive(Debug, Deserialize, Serialize, HasModel)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +pub struct ServerInfo { + pub version: Version, + pub network: NetworkInfo, + // ... +} +``` + +**Generated accessors** (one per field): +- `as_version()` — `&Model` +- `as_version_mut()` — `&mut Model` +- `into_version()` — `Model` + +**`Model` APIs:** +- `.de()` — Deserialize to `T` +- `.ser(&value)` — Serialize from `T` +- `.mutate(|v| ...)` — Deserialize, mutate, reserialize +- For maps: `.keys()`, `.as_idx(&key)`, `.insert()`, `.remove()`, `.contains_key()` + +## Database Access + +```rust +// Read-only snapshot +let snap = db.peek().await; +let version = snap.as_public().as_server_info().as_version().de()?; + +// Atomic mutation +db.mutate(|db| { + db.as_public_mut().as_server_info_mut().as_version_mut().ser(&new_version)?; + Ok(()) +}).await; +``` + +## TypedDbWatch + +Watch a JSON pointer path for changes and deserialize as a typed value. Requires `T: HasModel`. + +### Construction + +```rust +use patch_db::json_ptr::JsonPointer; + +let ptr: JsonPointer = "/public/serverInfo".parse().unwrap(); +let mut watch = db.watch(ptr).await.typed::(); +``` + +### API + +- `watch.peek()?.de()?` — Get current value as `T` +- `watch.changed().await?` — Wait until the watched path changes +- `watch.peek()?.as_field().de()?` — Access nested fields via `HasModel` accessors + +### Usage Patterns + +**Wait for a condition, then proceed:** + +```rust +// Wait for DB version to match current OS version +let current = Current::default().semver(); +let mut watch = db + .watch("/public/serverInfo".parse().unwrap()) + .await + .typed::(); +loop { + let server_info = watch.peek()?.de()?; + if server_info.version == current { + break; + } + watch.changed().await?; +} +``` + +**React to changes in a loop:** + +```rust +// From net_controller.rs — react to host changes +let mut watch = db + .watch("/public/serverInfo/network/host".parse().unwrap()) + .await + .typed::(); +loop { + if let Err(e) = watch.changed().await { + tracing::error!("DB watch disconnected: {e}"); + break; + } + let host = watch.peek()?.de()?; + // ... process host ... +} +``` + +### Real Examples + +- `net_controller.rs:469` — Watch `Hosts` for package network changes +- `net_controller.rs:493` — Watch `Host` for main UI network changes +- `service_actor.rs:37` — Watch `StatusInfo` for service state transitions +- `gateway.rs:1212` — Wait for DB migrations to complete before syncing diff --git a/agents/rpc-toolkit.md b/core/rpc-toolkit.md similarity index 96% rename from agents/rpc-toolkit.md rename to core/rpc-toolkit.md index 933c345b6..a1499dc29 100644 --- a/agents/rpc-toolkit.md +++ b/core/rpc-toolkit.md @@ -21,6 +21,14 @@ pub async fn my_handler(ctx: RpcContext, params: MyParams) -> Result Result { + // ... +} +``` + ### `from_fn_async_local` - Non-thread-safe async handlers For async functions that are not `Send` (cannot be safely moved between threads). Use when working with non-thread-safe types. @@ -181,9 +189,9 @@ pub struct MyParams { ### Adding a New RPC Endpoint -1. Define params struct with `Deserialize, Serialize, Parser, TS` +1. Define params struct with `Deserialize, Serialize, Parser, TS` (skip if no params needed) 2. Choose handler type based on sync/async and thread-safety -3. Write handler function taking `(Context, Params) -> Result` +3. Write handler function taking `(Context, Params) -> Result` (omit Params if none needed) 4. Add to parent handler with appropriate extensions (display modifiers before `with_about`) 5. TypeScript types auto-generated via `make ts-bindings` diff --git a/agents/s9pk-structure.md b/core/s9pk-structure.md similarity index 100% rename from agents/s9pk-structure.md rename to core/s9pk-structure.md diff --git a/core/src/account.rs b/core/src/account.rs index d583c95f7..f80cc951c 100644 --- a/core/src/account.rs +++ b/core/src/account.rs @@ -6,9 +6,8 @@ use openssl::pkey::{PKey, Private}; use openssl::x509::X509; use crate::db::model::DatabaseModel; -use crate::hostname::{Hostname, generate_hostname, generate_id}; +use crate::hostname::{ServerHostnameInfo, generate_hostname, generate_id}; use crate::net::ssl::{gen_nistp256, make_root_cert}; -use crate::net::tor::TorSecretKey; use crate::prelude::*; use crate::util::serde::Pem; @@ -24,21 +23,27 @@ fn hash_password(password: &str) -> Result { #[derive(Clone)] pub struct AccountInfo { pub server_id: String, - pub hostname: Hostname, + pub hostname: ServerHostnameInfo, pub password: String, - pub tor_keys: Vec, pub root_ca_key: PKey, pub root_ca_cert: X509, pub ssh_key: ssh_key::PrivateKey, pub developer_key: ed25519_dalek::SigningKey, } impl AccountInfo { - pub fn new(password: &str, start_time: SystemTime) -> Result { + pub fn new( + password: &str, + start_time: SystemTime, + hostname: Option, + ) -> Result { let server_id = generate_id(); - let hostname = generate_hostname(); - let tor_key = vec![TorSecretKey::generate()]; + let hostname = if let Some(h) = hostname { + h + } else { + ServerHostnameInfo::from_hostname(generate_hostname()) + }; let root_ca_key = gen_nistp256()?; - let root_ca_cert = make_root_cert(&root_ca_key, &hostname, start_time)?; + let root_ca_cert = make_root_cert(&root_ca_key, &hostname.hostname, start_time)?; let ssh_key = ssh_key::PrivateKey::from(ssh_key::private::Ed25519Keypair::random( &mut ssh_key::rand_core::OsRng::default(), )); @@ -48,7 +53,6 @@ impl AccountInfo { server_id, hostname, password: hash_password(password)?, - tor_keys: tor_key, root_ca_key, root_ca_cert, ssh_key, @@ -58,20 +62,9 @@ impl AccountInfo { pub fn load(db: &DatabaseModel) -> Result { let server_id = db.as_public().as_server_info().as_id().de()?; - let hostname = Hostname(db.as_public().as_server_info().as_hostname().de()?); + let hostname = ServerHostnameInfo::load(db.as_public().as_server_info())?; let password = db.as_private().as_password().de()?; let key_store = db.as_private().as_key_store(); - let tor_addrs = db - .as_public() - .as_server_info() - .as_network() - .as_host() - .as_onions() - .de()?; - let tor_keys = tor_addrs - .into_iter() - .map(|tor_addr| key_store.as_onion().get_key(&tor_addr)) - .collect::>()?; let cert_store = key_store.as_local_certs(); let root_ca_key = cert_store.as_root_key().de()?.0; let root_ca_cert = cert_store.as_root_cert().de()?.0; @@ -82,7 +75,6 @@ impl AccountInfo { server_id, hostname, password, - tor_keys, root_ca_key, root_ca_cert, ssh_key, @@ -93,21 +85,10 @@ impl AccountInfo { pub fn save(&self, db: &mut DatabaseModel) -> Result<(), Error> { let server_info = db.as_public_mut().as_server_info_mut(); server_info.as_id_mut().ser(&self.server_id)?; - server_info.as_hostname_mut().ser(&self.hostname.0)?; + self.hostname.save(server_info)?; server_info .as_pubkey_mut() .ser(&self.ssh_key.public_key().to_openssh()?)?; - server_info - .as_network_mut() - .as_host_mut() - .as_onions_mut() - .ser( - &self - .tor_keys - .iter() - .map(|tor_key| tor_key.onion_address()) - .collect(), - )?; server_info.as_password_hash_mut().ser(&self.password)?; db.as_private_mut().as_password_mut().ser(&self.password)?; db.as_private_mut() @@ -117,9 +98,6 @@ impl AccountInfo { .as_developer_key_mut() .ser(Pem::new_ref(&self.developer_key))?; let key_store = db.as_private_mut().as_key_store_mut(); - for tor_key in &self.tor_keys { - key_store.as_onion_mut().insert_key(tor_key)?; - } let cert_store = key_store.as_local_certs_mut(); if cert_store.as_root_cert().de()?.0 != self.root_ca_cert { cert_store @@ -145,14 +123,8 @@ impl AccountInfo { pub fn hostnames(&self) -> impl IntoIterator + Send + '_ { [ - self.hostname.no_dot_host_name(), - self.hostname.local_domain_name(), + (*self.hostname.hostname).clone(), + self.hostname.hostname.local_domain_name(), ] - .into_iter() - .chain( - self.tor_keys - .iter() - .map(|k| InternedString::from_display(&k.onion_address())), - ) } } diff --git a/core/src/action.rs b/core/src/action.rs index 1bd1986a2..5fa65c961 100644 --- a/core/src/action.rs +++ b/core/src/action.rs @@ -67,6 +67,10 @@ pub struct GetActionInputParams { pub package_id: PackageId, #[arg(help = "help.arg.action-id")] pub action_id: ActionId, + #[ts(type = "Record | null")] + #[serde(default)] + #[arg(skip)] + pub prefill: Option, } #[instrument(skip_all)] @@ -75,6 +79,7 @@ pub async fn get_action_input( GetActionInputParams { package_id, action_id, + prefill, }: GetActionInputParams, ) -> Result, Error> { ctx.services @@ -82,7 +87,7 @@ pub async fn get_action_input( .await .as_ref() .or_not_found(lazy_format!("Manager for {}", package_id))? - .get_action_input(Guid::new(), action_id) + .get_action_input(Guid::new(), action_id, prefill.unwrap_or(Value::Null)) .await } @@ -271,6 +276,7 @@ pub fn display_action_result( } #[derive(Deserialize, Serialize, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] pub struct RunActionParams { pub package_id: PackageId, @@ -362,6 +368,7 @@ pub async fn run_action( } #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] pub struct ClearTaskParams { diff --git a/core/src/auth.rs b/core/src/auth.rs index f145149a8..402d5d9c2 100644 --- a/core/src/auth.rs +++ b/core/src/auth.rs @@ -418,6 +418,7 @@ impl AsLogoutSessionId for KillSessionId { } #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] pub struct KillParams { @@ -435,6 +436,7 @@ pub async fn kill( } #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] pub struct ResetPasswordParams { diff --git a/core/src/backup/backup_bulk.rs b/core/src/backup/backup_bulk.rs index 6a2d10dfd..722498f3c 100644 --- a/core/src/backup/backup_bulk.rs +++ b/core/src/backup/backup_bulk.rs @@ -30,6 +30,7 @@ use crate::util::serde::IoFormat; use crate::version::VersionT; #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] pub struct BackupParams { @@ -270,9 +271,9 @@ async fn perform_backup( package_backups.insert( id.clone(), PackageBackupInfo { - os_version: manifest.as_os_version().de()?, + os_version: manifest.as_metadata().as_os_version().de()?, version: manifest.as_version().de()?, - title: manifest.as_title().de()?, + title: manifest.as_metadata().as_title().de()?, timestamp: Utc::now(), }, ); @@ -337,7 +338,7 @@ async fn perform_backup( let timestamp = Utc::now(); backup_guard.unencrypted_metadata.version = crate::version::Current::default().semver().into(); - backup_guard.unencrypted_metadata.hostname = ctx.account.peek(|a| a.hostname.clone()); + backup_guard.unencrypted_metadata.hostname = ctx.account.peek(|a| a.hostname.hostname.clone()); backup_guard.unencrypted_metadata.timestamp = timestamp.clone(); backup_guard.metadata.version = crate::version::Current::default().semver().into(); backup_guard.metadata.timestamp = Some(timestamp); diff --git a/core/src/backup/mod.rs b/core/src/backup/mod.rs index 3e231afe2..ce2e72f3d 100644 --- a/core/src/backup/mod.rs +++ b/core/src/backup/mod.rs @@ -2,6 +2,7 @@ use std::collections::BTreeMap; use rpc_toolkit::{Context, HandlerExt, ParentHandler, from_fn_async}; use serde::{Deserialize, Serialize}; +use ts_rs::TS; use crate::PackageId; use crate::context::CliContext; @@ -13,19 +14,22 @@ pub mod os; pub mod restore; pub mod target; -#[derive(Debug, Deserialize, Serialize)] +#[derive(Debug, Deserialize, Serialize, TS)] +#[ts(export)] pub struct BackupReport { server: ServerBackupReport, packages: BTreeMap, } -#[derive(Debug, Deserialize, Serialize)] +#[derive(Debug, Deserialize, Serialize, TS)] +#[ts(export)] pub struct ServerBackupReport { attempted: bool, error: Option, } -#[derive(Debug, Deserialize, Serialize)] +#[derive(Debug, Deserialize, Serialize, TS)] +#[ts(export)] pub struct PackageBackupReport { pub error: Option, } diff --git a/core/src/backup/os.rs b/core/src/backup/os.rs index 380772b77..0d0f79a03 100644 --- a/core/src/backup/os.rs +++ b/core/src/backup/os.rs @@ -6,10 +6,8 @@ use serde::{Deserialize, Serialize}; use ssh_key::private::Ed25519Keypair; use crate::account::AccountInfo; -use crate::hostname::{Hostname, generate_hostname, generate_id}; -use crate::net::tor::TorSecretKey; +use crate::hostname::{ServerHostname, ServerHostnameInfo, generate_hostname, generate_id}; use crate::prelude::*; -use crate::util::crypto::ed25519_expand_key; use crate::util::serde::{Base32, Base64, Pem}; pub struct OsBackup { @@ -29,10 +27,12 @@ impl<'de> Deserialize<'de> for OsBackup { .map_err(serde::de::Error::custom)?, 1 => patch_db::value::from_value::(tagged.rest) .map_err(serde::de::Error::custom)? - .project(), + .project() + .map_err(serde::de::Error::custom)?, 2 => patch_db::value::from_value::(tagged.rest) .map_err(serde::de::Error::custom)? - .project(), + .project() + .map_err(serde::de::Error::custom)?, v => { return Err(serde::de::Error::custom(&format!( "Unknown backup version {v}" @@ -77,7 +77,7 @@ impl OsBackupV0 { Ok(OsBackup { account: AccountInfo { server_id: generate_id(), - hostname: generate_hostname(), + hostname: ServerHostnameInfo::from_hostname(generate_hostname()), password: Default::default(), root_ca_key: self.root_ca_key.0, root_ca_cert: self.root_ca_cert.0, @@ -85,10 +85,6 @@ impl OsBackupV0 { &mut ssh_key::rand_core::OsRng::default(), ssh_key::Algorithm::Ed25519, )?, - tor_keys: TorSecretKey::from_bytes(self.tor_key.0) - .ok() - .into_iter() - .collect(), developer_key: ed25519_dalek::SigningKey::generate( &mut ssh_key::rand_core::OsRng::default(), ), @@ -110,23 +106,19 @@ struct OsBackupV1 { ui: Value, // JSON Value } impl OsBackupV1 { - fn project(self) -> OsBackup { - OsBackup { + fn project(self) -> Result { + Ok(OsBackup { account: AccountInfo { server_id: self.server_id, - hostname: Hostname(self.hostname), + hostname: ServerHostnameInfo::from_hostname(ServerHostname::new(self.hostname)?), password: Default::default(), root_ca_key: self.root_ca_key.0, root_ca_cert: self.root_ca_cert.0, ssh_key: ssh_key::PrivateKey::from(Ed25519Keypair::from_seed(&self.net_key.0)), - tor_keys: TorSecretKey::from_bytes(ed25519_expand_key(&self.net_key.0)) - .ok() - .into_iter() - .collect(), developer_key: ed25519_dalek::SigningKey::from_bytes(&self.net_key), }, ui: self.ui, - } + }) } } @@ -140,34 +132,31 @@ struct OsBackupV2 { root_ca_key: Pem>, // PEM Encoded OpenSSL Key root_ca_cert: Pem, // PEM Encoded OpenSSL X509 Certificate ssh_key: Pem, // PEM Encoded OpenSSH Key - tor_keys: Vec, // Base64 Encoded Ed25519 Expanded Secret Key compat_s9pk_key: Pem, // PEM Encoded ED25519 Key ui: Value, // JSON Value } impl OsBackupV2 { - fn project(self) -> OsBackup { - OsBackup { + fn project(self) -> Result { + Ok(OsBackup { account: AccountInfo { server_id: self.server_id, - hostname: Hostname(self.hostname), + hostname: ServerHostnameInfo::from_hostname(ServerHostname::new(self.hostname)?), password: Default::default(), root_ca_key: self.root_ca_key.0, root_ca_cert: self.root_ca_cert.0, ssh_key: self.ssh_key.0, - tor_keys: self.tor_keys, developer_key: self.compat_s9pk_key.0, }, ui: self.ui, - } + }) } fn unproject(backup: &OsBackup) -> Self { Self { server_id: backup.account.server_id.clone(), - hostname: backup.account.hostname.0.clone(), + hostname: (*backup.account.hostname.hostname).clone(), root_ca_key: Pem(backup.account.root_ca_key.clone()), root_ca_cert: Pem(backup.account.root_ca_cert.clone()), ssh_key: Pem(backup.account.ssh_key.clone()), - tor_keys: backup.account.tor_keys.clone(), compat_s9pk_key: Pem(backup.account.developer_key.clone()), ui: backup.ui.clone(), } diff --git a/core/src/backup/restore.rs b/core/src/backup/restore.rs index 6f5d78eac..bc96d8823 100644 --- a/core/src/backup/restore.rs +++ b/core/src/backup/restore.rs @@ -17,6 +17,7 @@ use crate::db::model::Database; use crate::disk::mount::backup::BackupMountGuard; use crate::disk::mount::filesystem::ReadWrite; use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard}; +use crate::hostname::ServerHostnameInfo; use crate::init::init; use crate::prelude::*; use crate::progress::ProgressUnits; @@ -30,6 +31,7 @@ use crate::{PLATFORM, PackageId}; #[derive(Deserialize, Serialize, Parser, TS)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] +#[ts(export)] pub struct RestorePackageParams { #[arg(help = "help.arg.package-ids")] pub ids: Vec, @@ -84,11 +86,12 @@ pub async fn restore_packages_rpc( pub async fn recover_full_server( ctx: &SetupContext, disk_guid: InternedString, - password: String, + password: Option, recovery_source: TmpMountGuard, server_id: &str, recovery_password: &str, kiosk: Option, + hostname: Option, SetupExecuteProgress { init_phases, restore_phase, @@ -107,12 +110,18 @@ pub async fn recover_full_server( .with_ctx(|_| (ErrorKind::Filesystem, os_backup_path.display().to_string()))?, )?; - os_backup.account.password = argon2::hash_encoded( - password.as_bytes(), - &rand::random::<[u8; 16]>()[..], - &argon2::Config::rfc9106_low_mem(), - ) - .with_kind(ErrorKind::PasswordHashGeneration)?; + if let Some(password) = password { + os_backup.account.password = argon2::hash_encoded( + password.as_bytes(), + &rand::random::<[u8; 16]>()[..], + &argon2::Config::rfc9106_low_mem(), + ) + .with_kind(ErrorKind::PasswordHashGeneration)?; + } + + if let Some(h) = hostname { + os_backup.account.hostname = h; + } let kiosk = Some(kiosk.unwrap_or(true)).filter(|_| &*PLATFORM != "raspberrypi"); sync_kiosk(kiosk).await?; @@ -182,7 +191,7 @@ pub async fn recover_full_server( Ok(( SetupResult { - hostname: os_backup.account.hostname, + hostname: os_backup.account.hostname.hostname, root_ca: Pem(os_backup.account.root_ca_cert), needs_restart: ctx.install_rootfs.peek(|a| a.is_some()), }, diff --git a/core/src/backup/target/cifs.rs b/core/src/backup/target/cifs.rs index 0c22536a5..f5fa74862 100644 --- a/core/src/backup/target/cifs.rs +++ b/core/src/backup/target/cifs.rs @@ -36,7 +36,8 @@ impl Map for CifsTargets { } } -#[derive(Debug, Deserialize, Serialize)] +#[derive(Debug, Deserialize, Serialize, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] pub struct CifsBackupTarget { hostname: String, @@ -72,9 +73,10 @@ pub fn cifs() -> ParentHandler { } #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] -pub struct AddParams { +pub struct CifsAddParams { #[arg(help = "help.arg.cifs-hostname")] pub hostname: String, #[arg(help = "help.arg.cifs-path")] @@ -87,12 +89,12 @@ pub struct AddParams { pub async fn add( ctx: RpcContext, - AddParams { + CifsAddParams { hostname, path, username, password, - }: AddParams, + }: CifsAddParams, ) -> Result, Error> { let cifs = Cifs { hostname, @@ -131,9 +133,10 @@ pub async fn add( } #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] -pub struct UpdateParams { +pub struct CifsUpdateParams { #[arg(help = "help.arg.backup-target-id")] pub id: BackupTargetId, #[arg(help = "help.arg.cifs-hostname")] @@ -148,13 +151,13 @@ pub struct UpdateParams { pub async fn update( ctx: RpcContext, - UpdateParams { + CifsUpdateParams { id, hostname, path, username, password, - }: UpdateParams, + }: CifsUpdateParams, ) -> Result, Error> { let id = if let BackupTargetId::Cifs { id } = id { id @@ -207,14 +210,18 @@ pub async fn update( } #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] -pub struct RemoveParams { +pub struct CifsRemoveParams { #[arg(help = "help.arg.backup-target-id")] pub id: BackupTargetId, } -pub async fn remove(ctx: RpcContext, RemoveParams { id }: RemoveParams) -> Result<(), Error> { +pub async fn remove( + ctx: RpcContext, + CifsRemoveParams { id }: CifsRemoveParams, +) -> Result<(), Error> { let id = if let BackupTargetId::Cifs { id } = id { id } else { diff --git a/core/src/backup/target/mod.rs b/core/src/backup/target/mod.rs index a5956eb65..86a006cdd 100644 --- a/core/src/backup/target/mod.rs +++ b/core/src/backup/target/mod.rs @@ -34,7 +34,8 @@ use crate::util::{FromStrParser, VersionString}; pub mod cifs; -#[derive(Debug, Deserialize, Serialize)] +#[derive(Debug, Deserialize, Serialize, TS)] +#[ts(export)] #[serde(tag = "type")] #[serde(rename_all = "camelCase")] pub enum BackupTarget { @@ -49,7 +50,7 @@ pub enum BackupTarget { } #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, TS)] -#[ts(type = "string")] +#[ts(export, type = "string")] pub enum BackupTargetId { Disk { logicalname: PathBuf }, Cifs { id: u32 }, @@ -111,6 +112,7 @@ impl Serialize for BackupTargetId { } #[derive(Debug, Deserialize, Serialize, TS)] +#[ts(export)] #[serde(tag = "type")] #[serde(rename_all = "camelCase")] pub enum BackupTargetFS { @@ -210,20 +212,26 @@ pub async fn list(ctx: RpcContext) -> Result>, pub package_backups: BTreeMap, } -#[derive(Clone, Debug, Deserialize, Serialize)] +#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] pub struct PackageBackupInfo { pub title: InternedString, pub version: VersionString, + #[ts(type = "string")] pub os_version: Version, + #[ts(type = "string")] pub timestamp: DateTime, } @@ -265,6 +273,7 @@ fn display_backup_info(params: WithIoFormat, info: BackupInfo) -> Re } #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] pub struct InfoParams { @@ -387,6 +396,7 @@ pub async fn mount( } #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] pub struct UmountParams { diff --git a/core/src/bins/start_init.rs b/core/src/bins/start_init.rs index 48e65f5af..5c53a6e0c 100644 --- a/core/src/bins/start_init.rs +++ b/core/src/bins/start_init.rs @@ -9,7 +9,7 @@ use crate::disk::fsck::RepairStrategy; use crate::disk::main::DEFAULT_PASSWORD; use crate::firmware::{check_for_firmware_update, update_firmware}; use crate::init::{InitPhases, STANDBY_MODE_PATH}; -use crate::net::gateway::UpgradableListener; +use crate::net::gateway::WildcardListener; use crate::net::web_server::WebServer; use crate::prelude::*; use crate::progress::FullProgressTracker; @@ -19,7 +19,7 @@ use crate::{DATA_DIR, PLATFORM}; #[instrument(skip_all)] async fn setup_or_init( - server: &mut WebServer, + server: &mut WebServer, config: &ServerConfig, ) -> Result, Error> { if let Some(firmware) = check_for_firmware_update() @@ -204,7 +204,7 @@ async fn setup_or_init( #[instrument(skip_all)] pub async fn main( - server: &mut WebServer, + server: &mut WebServer, config: &ServerConfig, ) -> Result, Error> { if &*PLATFORM == "raspberrypi" && tokio::fs::metadata(STANDBY_MODE_PATH).await.is_ok() { diff --git a/core/src/bins/startd.rs b/core/src/bins/startd.rs index f4a7784f4..314d3dc7a 100644 --- a/core/src/bins/startd.rs +++ b/core/src/bins/startd.rs @@ -12,7 +12,7 @@ use tracing::instrument; use crate::context::config::ServerConfig; use crate::context::rpc::InitRpcContextPhases; use crate::context::{DiagnosticContext, InitContext, RpcContext}; -use crate::net::gateway::{BindTcp, SelfContainedNetworkInterfaceListener, UpgradableListener}; +use crate::net::gateway::WildcardListener; use crate::net::static_server::refresher; use crate::net::web_server::{Acceptor, WebServer}; use crate::prelude::*; @@ -23,7 +23,7 @@ use crate::util::logger::LOGGER; #[instrument(skip_all)] async fn inner_main( - server: &mut WebServer, + server: &mut WebServer, config: &ServerConfig, ) -> Result, Error> { let rpc_ctx = if !tokio::fs::metadata("/run/startos/initialized") @@ -70,7 +70,8 @@ async fn inner_main( }; let (rpc_ctx, shutdown) = async { - crate::hostname::sync_hostname(&rpc_ctx.account.peek(|a| a.hostname.clone())).await?; + crate::hostname::sync_hostname(&rpc_ctx.account.peek(|a| a.hostname.hostname.clone())) + .await?; let mut shutdown_recv = rpc_ctx.shutdown.subscribe(); @@ -147,10 +148,7 @@ pub fn main(args: impl IntoIterator) { .build() .expect(&t!("bins.startd.failed-to-initialize-runtime")); let res = rt.block_on(async { - let mut server = WebServer::new( - Acceptor::bind_upgradable(SelfContainedNetworkInterfaceListener::bind(BindTcp, 80)), - refresher(), - ); + let mut server = WebServer::new(Acceptor::new(WildcardListener::new(80)?), refresher()); match inner_main(&mut server, &config).await { Ok(a) => { server.shutdown().await; diff --git a/core/src/bins/tunnel.rs b/core/src/bins/tunnel.rs index 97fb818ea..07db8f671 100644 --- a/core/src/bins/tunnel.rs +++ b/core/src/bins/tunnel.rs @@ -7,13 +7,13 @@ use clap::Parser; use futures::FutureExt; use rpc_toolkit::CliApp; use rust_i18n::t; +use tokio::net::TcpListener; use tokio::signal::unix::signal; use tracing::instrument; use visit_rs::Visit; use crate::context::CliContext; use crate::context::config::ClientConfig; -use crate::net::gateway::{Bind, BindTcp}; use crate::net::tls::TlsListener; use crate::net::web_server::{Accept, Acceptor, MetadataVisitor, WebServer}; use crate::prelude::*; @@ -57,7 +57,12 @@ async fn inner_main(config: &TunnelConfig) -> Result<(), Error> { if !a.contains_key(&key) { match (|| { Ok::<_, Error>(TlsListener::new( - BindTcp.bind(addr)?, + TcpListener::from_std( + mio::net::TcpListener::bind(addr) + .with_kind(ErrorKind::Network)? + .into(), + ) + .with_kind(ErrorKind::Network)?, TunnelCertHandler { db: https_db.clone(), crypto_provider: Arc::new(tokio_rustls::rustls::crypto::ring::default_provider()), diff --git a/core/src/context/rpc.rs b/core/src/context/rpc.rs index a59d60236..f1fb6343d 100644 --- a/core/src/context/rpc.rs +++ b/core/src/context/rpc.rs @@ -10,7 +10,6 @@ use std::time::Duration; use chrono::{TimeDelta, Utc}; use imbl::OrdMap; use imbl_value::InternedString; -use itertools::Itertools; use josekit::jwk::Jwk; use reqwest::{Client, Proxy}; use rpc_toolkit::yajrc::RpcError; @@ -25,7 +24,6 @@ use crate::account::AccountInfo; use crate::auth::Sessions; use crate::context::config::ServerConfig; use crate::db::model::Database; -use crate::db::model::package::TaskSeverity; use crate::disk::OsPartitionInfo; use crate::disk::mount::filesystem::bind::Bind; use crate::disk::mount::filesystem::block_dev::BlockDev; @@ -34,7 +32,7 @@ use crate::disk::mount::guard::MountGuard; use crate::init::{InitResult, check_time_is_synchronized}; use crate::install::PKG_ARCHIVE_DIR; use crate::lxc::LxcManager; -use crate::net::gateway::UpgradableListener; +use crate::net::gateway::WildcardListener; use crate::net::net_controller::{NetController, NetService}; use crate::net::socks::DEFAULT_SOCKS_LISTEN; use crate::net::utils::{find_eth_iface, find_wifi_iface}; @@ -44,7 +42,6 @@ use crate::prelude::*; use crate::progress::{FullProgressTracker, PhaseProgressTrackerHandle}; use crate::rpc_continuations::{Guid, OpenAuthedContinuations, RpcContinuations}; use crate::service::ServiceMap; -use crate::service::action::update_tasks; use crate::service::effects::callbacks::ServiceCallbacks; use crate::service::effects::subcontainer::NVIDIA_OVERLAY_PATH; use crate::shutdown::Shutdown; @@ -53,7 +50,7 @@ use crate::util::future::NonDetachingJoinHandle; use crate::util::io::{TmpDir, delete_file}; use crate::util::lshw::LshwDevice; use crate::util::sync::{SyncMutex, SyncRwLock, Watch}; -use crate::{ActionId, DATA_DIR, PLATFORM, PackageId}; +use crate::{DATA_DIR, PLATFORM, PackageId}; pub struct RpcContextSeed { is_closed: AtomicBool, @@ -114,7 +111,6 @@ pub struct CleanupInitPhases { cleanup_sessions: PhaseProgressTrackerHandle, init_services: PhaseProgressTrackerHandle, prune_s9pks: PhaseProgressTrackerHandle, - check_tasks: PhaseProgressTrackerHandle, } impl CleanupInitPhases { pub fn new(handle: &FullProgressTracker) -> Self { @@ -122,7 +118,6 @@ impl CleanupInitPhases { cleanup_sessions: handle.add_phase("Cleaning up sessions".into(), Some(1)), init_services: handle.add_phase("Initializing services".into(), Some(10)), prune_s9pks: handle.add_phase("Pruning S9PKs".into(), Some(1)), - check_tasks: handle.add_phase("Checking action requests".into(), Some(1)), } } } @@ -132,7 +127,7 @@ pub struct RpcContext(Arc); impl RpcContext { #[instrument(skip_all)] pub async fn init( - webserver: &WebServerAcceptorSetter, + webserver: &WebServerAcceptorSetter, config: &ServerConfig, disk_guid: InternedString, init_result: Option, @@ -165,16 +160,15 @@ impl RpcContext { { (net_ctrl, os_net_service) } else { - let net_ctrl = - Arc::new(NetController::init(db.clone(), &account.hostname, socks_proxy).await?); - webserver.try_upgrade(|a| net_ctrl.net_iface.watcher.upgrade_listener(a))?; + let net_ctrl = Arc::new(NetController::init(db.clone(), socks_proxy).await?); + webserver.send_modify(|wl| wl.set_ip_info(net_ctrl.net_iface.watcher.subscribe())); let os_net_service = net_ctrl.os_bindings().await?; (net_ctrl, os_net_service) }; init_net_ctrl.complete(); tracing::info!("{}", t!("context.rpc.initialized-net-controller")); - if PLATFORM.ends_with("-nonfree") { + if PLATFORM.ends_with("-nvidia") { if let Err(e) = Command::new("nvidia-smi") .invoke(ErrorKind::ParseSysInfo) .await @@ -412,7 +406,6 @@ impl RpcContext { mut cleanup_sessions, mut init_services, mut prune_s9pks, - mut check_tasks, }: CleanupInitPhases, ) -> Result<(), Error> { cleanup_sessions.start(); @@ -504,76 +497,6 @@ impl RpcContext { } prune_s9pks.complete(); - check_tasks.start(); - let mut action_input: OrdMap> = OrdMap::new(); - let tasks: BTreeSet<_> = peek - .as_public() - .as_package_data() - .as_entries()? - .into_iter() - .map(|(_, pde)| { - Ok(pde - .as_tasks() - .as_entries()? - .into_iter() - .map(|(_, r)| { - let t = r.as_task(); - Ok::<_, Error>(if t.as_input().transpose_ref().is_some() { - Some((t.as_package_id().de()?, t.as_action_id().de()?)) - } else { - None - }) - }) - .filter_map_ok(|a| a)) - }) - .flatten_ok() - .map(|a| a.and_then(|a| a)) - .try_collect()?; - let procedure_id = Guid::new(); - for (package_id, action_id) in tasks { - if let Some(service) = self.services.get(&package_id).await.as_ref() { - if let Some(input) = service - .get_action_input(procedure_id.clone(), action_id.clone()) - .await - .log_err() - .flatten() - .and_then(|i| i.value) - { - action_input - .entry(package_id) - .or_default() - .insert(action_id, input); - } - } - } - - self.db - .mutate(|db| { - for (package_id, action_input) in &action_input { - for (action_id, input) in action_input { - for (_, pde) in db.as_public_mut().as_package_data_mut().as_entries_mut()? { - pde.as_tasks_mut().mutate(|tasks| { - Ok(update_tasks(tasks, package_id, action_id, input, false)) - })?; - } - } - } - for (_, pde) in db.as_public_mut().as_package_data_mut().as_entries_mut()? { - if pde - .as_tasks() - .de()? - .into_iter() - .any(|(_, t)| t.active && t.task.severity == TaskSeverity::Critical) - { - pde.as_status_info_mut().stop()?; - } - } - Ok(()) - }) - .await - .result?; - check_tasks.complete(); - Ok(()) } pub async fn call_remote( diff --git a/core/src/context/setup.rs b/core/src/context/setup.rs index bbfee9862..d4d0bb9de 100644 --- a/core/src/context/setup.rs +++ b/core/src/context/setup.rs @@ -19,8 +19,8 @@ use crate::MAIN_DATA; use crate::context::RpcContext; use crate::context::config::ServerConfig; use crate::disk::mount::guard::{MountGuard, TmpMountGuard}; -use crate::hostname::Hostname; -use crate::net::gateway::UpgradableListener; +use crate::hostname::ServerHostname; +use crate::net::gateway::WildcardListener; use crate::net::web_server::{WebServer, WebServerAcceptorSetter}; use crate::prelude::*; use crate::progress::FullProgressTracker; @@ -45,13 +45,13 @@ lazy_static::lazy_static! { #[ts(export)] pub struct SetupResult { #[ts(type = "string")] - pub hostname: Hostname, + pub hostname: ServerHostname, pub root_ca: Pem, pub needs_restart: bool, } pub struct SetupContextSeed { - pub webserver: WebServerAcceptorSetter, + pub webserver: WebServerAcceptorSetter, pub config: SyncMutex, pub disable_encryption: bool, pub progress: FullProgressTracker, @@ -70,7 +70,7 @@ pub struct SetupContext(Arc); impl SetupContext { #[instrument(skip_all)] pub fn init( - webserver: &WebServer, + webserver: &WebServer, config: ServerConfig, ) -> Result { let (shutdown, _) = tokio::sync::broadcast::channel(1); diff --git a/core/src/control.rs b/core/src/control.rs index f5d39d288..63b4ea889 100644 --- a/core/src/control.rs +++ b/core/src/control.rs @@ -8,6 +8,7 @@ use crate::prelude::*; use crate::{Error, PackageId}; #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] pub struct ControlParams { diff --git a/core/src/db/model/mod.rs b/core/src/db/model/mod.rs index 64a9ae4c6..05fc8502d 100644 --- a/core/src/db/model/mod.rs +++ b/core/src/db/model/mod.rs @@ -45,7 +45,12 @@ impl Database { .collect(), ssh_privkey: Pem(account.ssh_key.clone()), ssh_pubkeys: SshKeys::new(), - available_ports: AvailablePorts::new(), + available_ports: { + let mut ports = AvailablePorts::new(); + ports.set_ssl(80, false); + ports.set_ssl(443, true); + ports + }, sessions: Sessions::new(), notifications: Notifications::new(), cifs: CifsTargets::new(), diff --git a/core/src/db/model/package.rs b/core/src/db/model/package.rs index 70c33a360..68c9282b4 100644 --- a/core/src/db/model/package.rs +++ b/core/src/db/model/package.rs @@ -18,7 +18,7 @@ use crate::s9pk::manifest::{LocaleString, Manifest}; use crate::status::StatusInfo; use crate::util::DataUrl; use crate::util::serde::{Pem, is_partial_of}; -use crate::{ActionId, HealthCheckId, HostId, PackageId, ReplayId, ServiceInterfaceId}; +use crate::{ActionId, GatewayId, HealthCheckId, HostId, PackageId, ReplayId, ServiceInterfaceId}; #[derive(Debug, Default, Deserialize, Serialize, TS)] #[ts(export)] @@ -381,6 +381,10 @@ pub struct PackageDataEntry { pub hosts: Hosts, #[ts(type = "string[]")] pub store_exposed_dependents: Vec, + #[ts(type = "string | null")] + pub outbound_gateway: Option, + #[serde(default)] + pub plugin: PackagePlugin, } impl AsRef for PackageDataEntry { fn as_ref(&self) -> &PackageDataEntry { @@ -388,6 +392,21 @@ impl AsRef for PackageDataEntry { } } +#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] +pub struct PackagePlugin { + pub url: Option, +} + +#[derive(Debug, Clone, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct UrlPluginRegistration { + pub table_action: ActionId, +} + #[derive(Debug, Clone, Default, Deserialize, Serialize, TS)] #[ts(export)] pub struct CurrentDependencies(pub BTreeMap); diff --git a/core/src/db/model/public.rs b/core/src/db/model/public.rs index 20c5bc390..dac5faf11 100644 --- a/core/src/db/model/public.rs +++ b/core/src/db/model/public.rs @@ -13,6 +13,7 @@ use openssl::hash::MessageDigest; use patch_db::{HasModel, Value}; use serde::{Deserialize, Serialize}; use ts_rs::TS; +use url::Url; use crate::account::AccountInfo; use crate::db::DbAccessByKey; @@ -20,8 +21,9 @@ use crate::db::model::Database; use crate::db::model::package::AllPackageData; use crate::net::acme::AcmeProvider; use crate::net::host::Host; -use crate::net::host::binding::{AddSslOptions, BindInfo, BindOptions, NetInfo}; -use crate::net::utils::ipv6_is_local; +use crate::net::host::binding::{ + AddSslOptions, BindInfo, BindOptions, Bindings, DerivedAddressInfo, NetInfo, +}; use crate::net::vhost::AlpnInfo; use crate::prelude::*; use crate::progress::FullProgress; @@ -57,42 +59,43 @@ impl Public { platform: get_platform(), id: account.server_id.clone(), version: Current::default().semver(), - hostname: account.hostname.no_dot_host_name(), + name: account.hostname.name.clone(), + hostname: (*account.hostname.hostname).clone(), last_backup: None, package_version_compat: Current::default().compat().clone(), post_init_migration_todos: BTreeMap::new(), network: NetworkInfo { host: Host { - bindings: [( - 80, - BindInfo { - enabled: false, - options: BindOptions { - preferred_external_port: 80, - add_ssl: Some(AddSslOptions { - preferred_external_port: 443, - add_x_forwarded_headers: false, - alpn: Some(AlpnInfo::Specified(vec![ - MaybeUtf8String("h2".into()), - MaybeUtf8String("http/1.1".into()), - ])), - }), - secure: None, + bindings: Bindings( + [( + 80, + BindInfo { + enabled: false, + options: BindOptions { + preferred_external_port: 80, + add_ssl: Some(AddSslOptions { + preferred_external_port: 443, + add_x_forwarded_headers: false, + alpn: Some(AlpnInfo::Specified(vec![ + MaybeUtf8String("h2".into()), + MaybeUtf8String("http/1.1".into()), + ])), + }), + secure: None, + }, + net: NetInfo { + assigned_port: None, + assigned_ssl_port: Some(443), + }, + addresses: DerivedAddressInfo::default(), }, - net: NetInfo { - assigned_port: None, - assigned_ssl_port: Some(443), - private_disabled: OrdSet::new(), - public_enabled: OrdSet::new(), - }, - }, - )] - .into_iter() - .collect(), - onions: account.tor_keys.iter().map(|k| k.onion_address()).collect(), + )] + .into_iter() + .collect(), + ), public_domains: BTreeMap::new(), - private_domains: BTreeSet::new(), - hostname_info: BTreeMap::new(), + private_domains: BTreeMap::new(), + port_forwards: BTreeSet::new(), }, wifi: WifiInfo { enabled: true, @@ -117,6 +120,7 @@ impl Public { acme }, dns: Default::default(), + default_outbound: None, }, status_info: ServerStatus { backup_progress: None, @@ -141,6 +145,7 @@ impl Public { zram: true, governor: None, smtp: None, + ifconfig_url: default_ifconfig_url(), ram: 0, devices: Vec::new(), kiosk, @@ -162,19 +167,21 @@ fn get_platform() -> InternedString { (&*PLATFORM).into() } +pub fn default_ifconfig_url() -> Url { + "https://ifconfig.co".parse().unwrap() +} + #[derive(Debug, Deserialize, Serialize, HasModel, TS)] #[serde(rename_all = "camelCase")] #[model = "Model"] #[ts(export)] pub struct ServerInfo { #[serde(default = "get_arch")] - #[ts(type = "string")] pub arch: InternedString, #[serde(default = "get_platform")] - #[ts(type = "string")] pub platform: InternedString, pub id: String, - #[ts(type = "string")] + pub name: InternedString, pub hostname: InternedString, #[ts(type = "string")] pub version: Version, @@ -198,6 +205,9 @@ pub struct ServerInfo { pub zram: bool, pub governor: Option, pub smtp: Option, + #[serde(default = "default_ifconfig_url")] + #[ts(type = "string")] + pub ifconfig_url: Url, #[ts(type = "number")] pub ram: u64, pub devices: Vec, @@ -220,6 +230,9 @@ pub struct NetworkInfo { pub acme: BTreeMap, #[serde(default)] pub dns: DnsSettings, + #[serde(default)] + #[ts(type = "string | null")] + pub default_outbound: Option, } #[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)] @@ -239,41 +252,12 @@ pub struct DnsSettings { #[ts(export)] pub struct NetworkInterfaceInfo { pub name: Option, - pub public: Option, pub secure: Option, pub ip_info: Option>, + #[serde(default, rename = "type")] + pub gateway_type: Option, } impl NetworkInterfaceInfo { - pub fn public(&self) -> bool { - self.public.unwrap_or_else(|| { - !self.ip_info.as_ref().map_or(true, |ip_info| { - let ip4s = ip_info - .subnets - .iter() - .filter_map(|ipnet| { - if let IpAddr::V4(ip4) = ipnet.addr() { - Some(ip4) - } else { - None - } - }) - .collect::>(); - if !ip4s.is_empty() { - return ip4s - .iter() - .all(|ip4| ip4.is_loopback() || ip4.is_private() || ip4.is_link_local()); - } - ip_info.subnets.iter().all(|ipnet| { - if let IpAddr::V6(ip6) = ipnet.addr() { - ipv6_is_local(ip6) - } else { - true - } - }) - }) - }) - } - pub fn secure(&self) -> bool { self.secure.unwrap_or(false) } @@ -310,6 +294,28 @@ pub enum NetworkInterfaceType { Loopback, } +#[derive( + Clone, + Copy, + Debug, + Default, + PartialEq, + Eq, + PartialOrd, + Ord, + Deserialize, + Serialize, + TS, + clap::ValueEnum, +)] +#[ts(export)] +#[serde(rename_all = "kebab-case")] +pub enum GatewayType { + #[default] + InboundOutbound, + OutboundOnly, +} + #[derive(Debug, Deserialize, Serialize, HasModel, TS)] #[serde(rename_all = "camelCase")] #[model = "Model"] diff --git a/core/src/dependencies.rs b/core/src/dependencies.rs index 73627075b..ce7991193 100644 --- a/core/src/dependencies.rs +++ b/core/src/dependencies.rs @@ -45,7 +45,7 @@ impl TS for DepInfo { "DepInfo".into() } fn inline() -> String { - "{ description: string | null, optional: boolean } & MetadataSrc".into() + "{ description: LocaleString | null, optional: boolean } & MetadataSrc".into() } fn inline_flattened() -> String { Self::inline() @@ -54,7 +54,8 @@ impl TS for DepInfo { where Self: 'static, { - v.visit::() + v.visit::(); + v.visit::(); } fn output_path() -> Option<&'static std::path::Path> { Some(Path::new("DepInfo.ts")) diff --git a/core/src/disk/util.rs b/core/src/disk/util.rs index fff4264a5..9cf2b6882 100644 --- a/core/src/disk/util.rs +++ b/core/src/disk/util.rs @@ -19,7 +19,7 @@ use super::mount::filesystem::block_dev::BlockDev; use super::mount::guard::TmpMountGuard; use crate::disk::OsPartitionInfo; use crate::disk::mount::guard::GenericMountGuard; -use crate::hostname::Hostname; +use crate::hostname::ServerHostname; use crate::prelude::*; use crate::util::Invoke; use crate::util::serde::IoFormat; @@ -43,22 +43,28 @@ pub struct DiskInfo { pub guid: Option, } -#[derive(Clone, Debug, Deserialize, Serialize)] +#[derive(Clone, Debug, Deserialize, Serialize, ts_rs::TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] pub struct PartitionInfo { pub logicalname: PathBuf, pub label: Option, + #[ts(type = "number")] pub capacity: u64, + #[ts(type = "number | null")] pub used: Option, pub start_os: BTreeMap, pub guid: Option, } -#[derive(Clone, Debug, Default, Deserialize, Serialize)] +#[derive(Clone, Debug, Default, Deserialize, Serialize, ts_rs::TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] pub struct StartOsRecoveryInfo { - pub hostname: Hostname, + pub hostname: ServerHostname, + #[ts(type = "string")] pub version: exver::Version, + #[ts(type = "string")] pub timestamp: DateTime, pub password_hash: Option, pub wrapped_key: Option, diff --git a/core/src/error.rs b/core/src/error.rs index dba631303..55b4494b1 100644 --- a/core/src/error.rs +++ b/core/src/error.rs @@ -3,6 +3,7 @@ use std::fmt::{Debug, Display}; use axum::http::StatusCode; use axum::http::uri::InvalidUri; use color_eyre::eyre::eyre; +use imbl_value::InternedString; use num_enum::TryFromPrimitive; use patch_db::Value; use rpc_toolkit::reqwest; @@ -42,11 +43,11 @@ pub enum ErrorKind { ParseUrl = 19, DiskNotAvailable = 20, BlockDevice = 21, - InvalidOnionAddress = 22, + // InvalidOnionAddress = 22, Pack = 23, ValidateS9pk = 24, DiskCorrupted = 25, // Remove - Tor = 26, + // Tor = 26, ConfigGen = 27, ParseNumber = 28, Database = 29, @@ -126,11 +127,11 @@ impl ErrorKind { ParseUrl => t!("error.parse-url"), DiskNotAvailable => t!("error.disk-not-available"), BlockDevice => t!("error.block-device"), - InvalidOnionAddress => t!("error.invalid-onion-address"), + // InvalidOnionAddress => t!("error.invalid-onion-address"), Pack => t!("error.pack"), ValidateS9pk => t!("error.validate-s9pk"), DiskCorrupted => t!("error.disk-corrupted"), // Remove - Tor => t!("error.tor"), + // Tor => t!("error.tor"), ConfigGen => t!("error.config-gen"), ParseNumber => t!("error.parse-number"), Database => t!("error.database"), @@ -204,17 +205,12 @@ pub struct Error { impl Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}: {:#}", &self.kind.as_str(), self.source) + write!(f, "{}: {}", &self.kind.as_str(), self.display_src()) } } impl Debug for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!( - f, - "{}: {:?}", - &self.kind.as_str(), - self.debug.as_ref().unwrap_or(&self.source) - ) + write!(f, "{}: {}", &self.kind.as_str(), self.display_dbg()) } } impl Error { @@ -235,8 +231,13 @@ impl Error { } pub fn clone_output(&self) -> Self { Error { - source: eyre!("{}", self.source), - debug: self.debug.as_ref().map(|e| eyre!("{e}")), + source: eyre!("{:#}", self.source), + debug: Some( + self.debug + .as_ref() + .map(|e| eyre!("{e}")) + .unwrap_or_else(|| eyre!("{:?}", self.source)), + ), kind: self.kind, info: self.info.clone(), task: None, @@ -257,6 +258,30 @@ impl Error { self.task.take(); self } + + pub fn display_src(&self) -> impl Display { + struct D<'a>(&'a Error); + impl<'a> Display for D<'a> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:#}", self.0.source) + } + } + D(self) + } + + pub fn display_dbg(&self) -> impl Display { + struct D<'a>(&'a Error); + impl<'a> Display for D<'a> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if let Some(debug) = &self.0.debug { + write!(f, "{}", debug) + } else { + write!(f, "{:?}", self.0.source) + } + } + } + D(self) + } } impl axum::response::IntoResponse for Error { fn into_response(self) -> axum::response::Response { @@ -370,17 +395,6 @@ impl From for Error { Error::new(e, kind) } } -#[cfg(feature = "arti")] -impl From for Error { - fn from(e: arti_client::Error) -> Self { - Error::new(e, ErrorKind::Tor) - } -} -impl From for Error { - fn from(e: torut::control::ConnError) -> Self { - Error::new(e, ErrorKind::Tor) - } -} impl From for Error { fn from(e: zbus::Error) -> Self { Error::new(e, ErrorKind::DBus) @@ -444,9 +458,11 @@ impl Debug for ErrorData { impl std::error::Error for ErrorData {} impl From for ErrorData { fn from(value: Error) -> Self { + let details = value.display_src().to_string(); + let debug = value.display_dbg().to_string(); Self { - details: value.to_string(), - debug: format!("{:?}", value), + details, + debug, info: value.info, } } @@ -634,13 +650,10 @@ impl ResultExt for Result { fn with_ctx (ErrorKind, D), D: Display>(self, f: F) -> Result { self.map_err(|e| { let (kind, ctx) = f(&e); + let ctx = InternedString::from_display(&ctx); let source = e.source; - let with_ctx = format!("{ctx}: {source}"); - let source = source.wrap_err(with_ctx); - let debug = e.debug.map(|e| { - let with_ctx = format!("{ctx}: {e}"); - e.wrap_err(with_ctx) - }); + let source = source.wrap_err(ctx.clone()); + let debug = e.debug.map(|e| e.wrap_err(ctx)); Error { kind, source, diff --git a/core/src/hostname.rs b/core/src/hostname.rs index 5c88bdcec..0113afe75 100644 --- a/core/src/hostname.rs +++ b/core/src/hostname.rs @@ -1,25 +1,58 @@ +use clap::Parser; use imbl_value::InternedString; use lazy_format::lazy_format; -use rand::{Rng, rng}; +use serde::{Deserialize, Serialize}; use tokio::process::Command; use tracing::instrument; +use ts_rs::TS; +use crate::context::RpcContext; +use crate::db::model::public::ServerInfo; +use crate::prelude::*; use crate::util::Invoke; -use crate::{Error, ErrorKind}; -#[derive(Clone, Debug, Default, serde::Deserialize, serde::Serialize)] -pub struct Hostname(pub InternedString); -lazy_static::lazy_static! { - static ref ADJECTIVES: Vec = include_str!("./assets/adjectives.txt").lines().map(|x| x.to_string()).collect(); - static ref NOUNS: Vec = include_str!("./assets/nouns.txt").lines().map(|x| x.to_string()).collect(); -} -impl AsRef for Hostname { - fn as_ref(&self) -> &str { +#[derive(Clone, Debug, Default, serde::Deserialize, serde::Serialize, ts_rs::TS)] +#[ts(type = "string")] +pub struct ServerHostname(InternedString); +impl std::ops::Deref for ServerHostname { + type Target = InternedString; + fn deref(&self) -> &Self::Target { &self.0 } } +impl AsRef for ServerHostname { + fn as_ref(&self) -> &str { + &***self + } +} + +impl ServerHostname { + fn validate(&self) -> Result<(), Error> { + if self.0.is_empty() { + return Err(Error::new( + eyre!("{}", t!("hostname.empty")), + ErrorKind::InvalidRequest, + )); + } + if let Some(c) = self + .0 + .chars() + .find(|c| !(c.is_ascii_alphanumeric() || c == &'-') || c.is_ascii_uppercase()) + { + return Err(Error::new( + eyre!("{}", t!("hostname.invalid-character", char = c)), + ErrorKind::InvalidRequest, + )); + } + Ok(()) + } + + pub fn new(hostname: InternedString) -> Result { + let res = Self(hostname); + res.validate()?; + Ok(res) + } -impl Hostname { pub fn lan_address(&self) -> InternedString { InternedString::from_display(&lazy_format!("https://{}.local", self.0)) } @@ -28,17 +61,135 @@ impl Hostname { InternedString::from_display(&lazy_format!("{}.local", self.0)) } - pub fn no_dot_host_name(&self) -> InternedString { - self.0.clone() + pub fn load(server_info: &Model) -> Result { + Ok(Self(server_info.as_hostname().de()?)) + } + + pub fn save(&self, server_info: &mut Model) -> Result<(), Error> { + server_info.as_hostname_mut().ser(&**self) } } -pub fn generate_hostname() -> Hostname { - let mut rng = rng(); - let adjective = &ADJECTIVES[rng.random_range(0..ADJECTIVES.len())]; - let noun = &NOUNS[rng.random_range(0..NOUNS.len())]; - Hostname(InternedString::from_display(&lazy_format!( - "{adjective}-{noun}" +#[derive(Clone, Debug, Default, serde::Deserialize, serde::Serialize, ts_rs::TS)] +#[ts(type = "string")] +pub struct ServerHostnameInfo { + pub name: InternedString, + pub hostname: ServerHostname, +} + +lazy_static::lazy_static! { + static ref ADJECTIVES: Vec = include_str!("./assets/adjectives.txt").lines().map(|x| x.to_string()).collect(); + static ref NOUNS: Vec = include_str!("./assets/nouns.txt").lines().map(|x| x.to_string()).collect(); +} +impl AsRef for ServerHostnameInfo { + fn as_ref(&self) -> &str { + &self.hostname + } +} + +fn normalize(s: &str) -> InternedString { + let mut prev_was_dash = true; + let mut normalized = s + .chars() + .filter_map(|c| { + if c.is_alphanumeric() { + prev_was_dash = false; + Some(c.to_ascii_lowercase()) + } else if (c == '-' || c.is_whitespace()) && !prev_was_dash { + prev_was_dash = true; + Some('-') + } else { + None + } + }) + .collect::(); + while normalized.ends_with('-') { + normalized.pop(); + } + if normalized.len() < 4 { + generate_hostname().0 + } else { + normalized.into() + } +} + +fn denormalize(s: &str) -> InternedString { + let mut cap = true; + s.chars() + .map(|c| { + if c == '-' { + cap = true; + ' ' + } else if cap { + cap = false; + c.to_ascii_uppercase() + } else { + c + } + }) + .collect::() + .into() +} + +impl ServerHostnameInfo { + pub fn new( + name: Option, + hostname: Option, + ) -> Result { + Self::new_opt(name, hostname) + .map(|h| h.unwrap_or_else(|| ServerHostnameInfo::from_hostname(generate_hostname()))) + } + + pub fn new_opt( + name: Option, + hostname: Option, + ) -> Result, Error> { + let name = name.filter(|n| !n.is_empty()); + let hostname = hostname.filter(|h| !h.is_empty()); + Ok(match (name, hostname) { + (Some(name), Some(hostname)) => Some(ServerHostnameInfo { + name, + hostname: ServerHostname::new(hostname)?, + }), + (Some(name), None) => Some(ServerHostnameInfo::from_name(name)), + (None, Some(hostname)) => Some(ServerHostnameInfo::from_hostname(ServerHostname::new( + hostname, + )?)), + (None, None) => None, + }) + } + + pub fn from_hostname(hostname: ServerHostname) -> Self { + Self { + name: denormalize(&**hostname), + hostname, + } + } + + pub fn from_name(name: InternedString) -> Self { + Self { + hostname: ServerHostname(normalize(&*name)), + name, + } + } + + pub fn load(server_info: &Model) -> Result { + Ok(Self { + name: server_info.as_name().de()?, + hostname: ServerHostname::load(server_info)?, + }) + } + + pub fn save(&self, server_info: &mut Model) -> Result<(), Error> { + server_info.as_name_mut().ser(&self.name)?; + self.hostname.save(server_info) + } +} + +pub fn generate_hostname() -> ServerHostname { + let num = rand::random::(); + ServerHostname(InternedString::from_display(&lazy_format!( + "startos-{num:04x}" ))) } @@ -48,17 +199,17 @@ pub fn generate_id() -> String { } #[instrument(skip_all)] -pub async fn get_current_hostname() -> Result { +pub async fn get_current_hostname() -> Result { let out = Command::new("hostname") .invoke(ErrorKind::ParseSysInfo) .await?; let out_string = String::from_utf8(out)?; - Ok(Hostname(out_string.trim().into())) + Ok(out_string.trim().into()) } #[instrument(skip_all)] -pub async fn set_hostname(hostname: &Hostname) -> Result<(), Error> { - let hostname = &*hostname.0; +pub async fn set_hostname(hostname: &ServerHostname) -> Result<(), Error> { + let hostname = &***hostname; Command::new("hostnamectl") .arg("--static") .arg("set-hostname") @@ -77,7 +228,7 @@ pub async fn set_hostname(hostname: &Hostname) -> Result<(), Error> { } #[instrument(skip_all)] -pub async fn sync_hostname(hostname: &Hostname) -> Result<(), Error> { +pub async fn sync_hostname(hostname: &ServerHostname) -> Result<(), Error> { set_hostname(hostname).await?; Command::new("systemctl") .arg("restart") @@ -86,3 +237,54 @@ pub async fn sync_hostname(hostname: &Hostname) -> Result<(), Error> { .await?; Ok(()) } + +#[derive(Deserialize, Serialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[command(rename_all = "kebab-case")] +#[ts(export)] +pub struct SetServerHostnameParams { + name: Option, + hostname: Option, +} + +pub async fn set_hostname_rpc( + ctx: RpcContext, + SetServerHostnameParams { name, hostname }: SetServerHostnameParams, +) -> Result<(), Error> { + let name = name.filter(|n| !n.is_empty()); + let hostname = hostname + .filter(|h| !h.is_empty()) + .map(ServerHostname::new) + .transpose()?; + if name.is_none() && hostname.is_none() { + return Err(Error::new( + eyre!("{}", t!("hostname.must-provide-name-or-hostname")), + ErrorKind::InvalidRequest, + )); + }; + let info = ctx + .db + .mutate(|db| { + let server_info = db.as_public_mut().as_server_info_mut(); + if let Some(name) = name { + server_info.as_name_mut().ser(&name)?; + } + if let Some(hostname) = &hostname { + hostname.save(server_info)?; + } + ServerHostnameInfo::load(server_info) + }) + .await + .result?; + ctx.account.mutate(|a| a.hostname = info.clone()); + if let Some(h) = hostname { + sync_hostname(&h).await?; + } + + Ok(()) +} + +#[test] +fn test_generate_hostname() { + assert_eq!(dbg!(generate_hostname().0).len(), 12); +} diff --git a/core/src/init.rs b/core/src/init.rs index 39680015e..8b6a91625 100644 --- a/core/src/init.rs +++ b/core/src/init.rs @@ -18,9 +18,9 @@ use crate::context::{CliContext, InitContext, RpcContext}; use crate::db::model::Database; use crate::db::model::public::ServerStatus; use crate::developer::OS_DEVELOPER_KEY_PATH; -use crate::hostname::Hostname; +use crate::hostname::ServerHostname; use crate::middleware::auth::local::LocalAuthContext; -use crate::net::gateway::UpgradableListener; +use crate::net::gateway::WildcardListener; use crate::net::net_controller::{NetController, NetService}; use crate::net::socks::DEFAULT_SOCKS_LISTEN; use crate::net::utils::find_wifi_iface; @@ -144,7 +144,7 @@ pub async fn run_script>(path: P, mut progress: PhaseProgressTrac #[instrument(skip_all)] pub async fn init( - webserver: &WebServerAcceptorSetter, + webserver: &WebServerAcceptorSetter, cfg: &ServerConfig, InitPhases { preinit, @@ -191,15 +191,16 @@ pub async fn init( .arg(OS_DEVELOPER_KEY_PATH) .invoke(ErrorKind::Filesystem) .await?; + let hostname = ServerHostname::load(peek.as_public().as_server_info())?; crate::ssh::sync_keys( - &Hostname(peek.as_public().as_server_info().as_hostname().de()?), + &hostname, &peek.as_private().as_ssh_privkey().de()?, &peek.as_private().as_ssh_pubkeys().de()?, SSH_DIR, ) .await?; crate::ssh::sync_keys( - &Hostname(peek.as_public().as_server_info().as_hostname().de()?), + &hostname, &peek.as_private().as_ssh_privkey().de()?, &Default::default(), "/root/.ssh", @@ -211,14 +212,9 @@ pub async fn init( start_net.start(); let net_ctrl = Arc::new( - NetController::init( - db.clone(), - &account.hostname, - cfg.socks_listen.unwrap_or(DEFAULT_SOCKS_LISTEN), - ) - .await?, + NetController::init(db.clone(), cfg.socks_listen.unwrap_or(DEFAULT_SOCKS_LISTEN)).await?, ); - webserver.try_upgrade(|a| net_ctrl.net_iface.watcher.upgrade_listener(a))?; + webserver.send_modify(|wl| wl.set_ip_info(net_ctrl.net_iface.watcher.subscribe())); let os_net_service = net_ctrl.os_bindings().await?; start_net.complete(); diff --git a/core/src/install/mod.rs b/core/src/install/mod.rs index 9267be2d5..80687c0c2 100644 --- a/core/src/install/mod.rs +++ b/core/src/install/mod.rs @@ -177,6 +177,7 @@ pub async fn install( } #[derive(Deserialize, Serialize, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] pub struct SideloadParams { #[ts(skip)] @@ -185,6 +186,7 @@ pub struct SideloadParams { } #[derive(Deserialize, Serialize, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] pub struct SideloadResponse { pub upload: Guid, @@ -284,6 +286,7 @@ pub async fn sideload( } #[derive(Debug, Clone, Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] pub struct CancelInstallParams { @@ -521,6 +524,7 @@ pub async fn cli_install( } #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] pub struct UninstallParams { diff --git a/core/src/lib.rs b/core/src/lib.rs index d7cfc79b4..10913503d 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -25,6 +25,9 @@ pub fn platform_to_arch(platform: &str) -> &str { if let Some(arch) = platform.strip_suffix("-nonfree") { return arch; } + if let Some(arch) = platform.strip_suffix("-nvidia") { + return arch; + } match platform { "raspberrypi" | "rockchip64" => "aarch64", _ => platform, @@ -268,6 +271,18 @@ pub fn server() -> ParentHandler { .with_about("about.display-time-uptime") .with_call_remote::(), ) + .subcommand( + "device-info", + ParentHandler::>::new().root_handler( + from_fn_async(system::device_info) + .with_display_serializable() + .with_custom_display_fn(|handle, result| { + system::display_device_info(handle.params, result) + }) + .with_about("about.get-device-info") + .with_call_remote::(), + ), + ) .subcommand( "experimental", system::experimental::().with_about("about.commands-experimental"), @@ -377,6 +392,20 @@ pub fn server() -> ParentHandler { "host", net::host::server_host_api::().with_about("about.commands-host-system-ui"), ) + .subcommand( + "set-hostname", + from_fn_async(hostname::set_hostname_rpc) + .no_display() + .with_about("about.set-hostname") + .with_call_remote::(), + ) + .subcommand( + "set-ifconfig-url", + from_fn_async(system::set_ifconfig_url) + .no_display() + .with_about("about.set-ifconfig-url") + .with_call_remote::(), + ) .subcommand( "set-keyboard", from_fn_async(system::set_keyboard) @@ -548,4 +577,12 @@ pub fn package() -> ParentHandler { "host", net::host::host_api::().with_about("about.manage-network-hosts-package"), ) + .subcommand( + "set-outbound-gateway", + from_fn_async(net::gateway::set_outbound_gateway) + .with_metadata("sync_db", Value::Bool(true)) + .no_display() + .with_about("about.set-outbound-gateway-package") + .with_call_remote::(), + ) } diff --git a/core/src/logs.rs b/core/src/logs.rs index 737907bb1..7c72415f5 100644 --- a/core/src/logs.rs +++ b/core/src/logs.rs @@ -24,6 +24,7 @@ use tokio::process::{Child, Command}; use tokio_stream::wrappers::LinesStream; use tokio_tungstenite::tungstenite::Message; use tracing::instrument; +use ts_rs::TS; use crate::PackageId; use crate::context::{CliContext, RpcContext}; @@ -109,23 +110,28 @@ async fn ws_handler( } } -#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] +#[derive(serde::Serialize, serde::Deserialize, Debug, Clone, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] pub struct LogResponse { + #[ts(as = "Vec")] pub entries: Reversible, start_cursor: Option, end_cursor: Option, } -#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] +#[derive(serde::Serialize, serde::Deserialize, Debug, Clone, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] pub struct LogFollowResponse { start_cursor: Option, guid: Guid, } -#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)] +#[derive(serde::Serialize, serde::Deserialize, Debug, Clone, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] pub struct LogEntry { + #[ts(type = "string")] timestamp: DateTime, message: String, boot_id: String, @@ -321,14 +327,17 @@ impl From for String { } } -#[derive(Deserialize, Serialize, Parser)] +#[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export, concrete(Extra = Empty), bound = "")] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] pub struct LogsParams { #[command(flatten)] #[serde(flatten)] + #[ts(skip)] extra: Extra, #[arg(short = 'l', long = "limit", help = "help.arg.log-limit")] + #[ts(optional)] limit: Option, #[arg( short = 'c', @@ -336,9 +345,11 @@ pub struct LogsParams { conflicts_with = "follow", help = "help.arg.log-cursor" )] + #[ts(optional)] cursor: Option, #[arg(short = 'b', long = "boot", help = "help.arg.log-boot")] #[serde(default)] + #[ts(optional, type = "number | string")] boot: Option, #[arg( short = 'B', diff --git a/core/src/lxc/config.template b/core/src/lxc/config.template index a85b700e4..4f87fce08 100644 --- a/core/src/lxc/config.template +++ b/core/src/lxc/config.template @@ -17,3 +17,6 @@ lxc.net.0.link = lxcbr0 lxc.net.0.flags = up lxc.rootfs.options = rshared + +# Environment +lxc.environment = LANG={lang} diff --git a/core/src/lxc/mod.rs b/core/src/lxc/mod.rs index baffb041e..b8ac615e5 100644 --- a/core/src/lxc/mod.rs +++ b/core/src/lxc/mod.rs @@ -174,10 +174,15 @@ impl LxcContainer { config: LxcConfig, ) -> Result { let guid = new_guid(); + let lang = std::env::var("LANG").unwrap_or_else(|_| "C.UTF-8".into()); let machine_id = hex::encode(rand::random::<[u8; 16]>()); let container_dir = Path::new(LXC_CONTAINER_DIR).join(&*guid); tokio::fs::create_dir_all(&container_dir).await?; - let config_str = format!(include_str!("./config.template"), guid = &*guid); + let config_str = format!( + include_str!("./config.template"), + guid = &*guid, + lang = &lang, + ); tokio::fs::write(container_dir.join("config"), config_str).await?; let rootfs_dir = container_dir.join("rootfs"); let rootfs = OverlayGuard::mount( @@ -215,6 +220,13 @@ impl LxcContainer { 100000, ) .await?; + write_file_owned_atomic( + rootfs_dir.join("etc/default/locale"), + format!("LANG={lang}\n"), + 100000, + 100000, + ) + .await?; Command::new("sed") .arg("-i") .arg(format!("s/LXC_NAME/{guid}/g")) diff --git a/core/src/middleware/auth/session.rs b/core/src/middleware/auth/session.rs index 2ff67d372..66ef0ffe4 100644 --- a/core/src/middleware/auth/session.rs +++ b/core/src/middleware/auth/session.rs @@ -20,9 +20,6 @@ use crate::context::RpcContext; use crate::middleware::auth::DbContext; use crate::prelude::*; use crate::rpc_continuations::OpenAuthedContinuations; -use crate::util::Invoke; -use crate::util::io::{create_file_mod, read_file_to_string}; -use crate::util::serde::{BASE64, const_true}; use crate::util::sync::SyncMutex; pub trait SessionAuthContext: DbContext { diff --git a/core/src/middleware/auth/signature.rs b/core/src/middleware/auth/signature.rs index 22af16182..1536ea4a1 100644 --- a/core/src/middleware/auth/signature.rs +++ b/core/src/middleware/auth/signature.rs @@ -71,7 +71,7 @@ impl SignatureAuthContext for RpcContext { .as_network() .as_host() .as_private_domains() - .de() + .keys() .map(|k| k.into_iter()) .transpose(), ) diff --git a/core/src/net/acme.rs b/core/src/net/acme.rs index 8054715af..056e77e4f 100644 --- a/core/src/net/acme.rs +++ b/core/src/net/acme.rs @@ -461,7 +461,8 @@ impl ValueParserFactory for AcmeProvider { } } -#[derive(Deserialize, Serialize, Parser)] +#[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] pub struct InitAcmeParams { #[arg(long, help = "help.arg.acme-provider")] pub provider: AcmeProvider, @@ -486,7 +487,8 @@ pub async fn init( Ok(()) } -#[derive(Deserialize, Serialize, Parser)] +#[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] pub struct RemoveAcmeParams { #[arg(long, help = "help.arg.acme-provider")] pub provider: AcmeProvider, diff --git a/core/src/net/dns.rs b/core/src/net/dns.rs index 84c5cb3a4..1083b74dc 100644 --- a/core/src/net/dns.rs +++ b/core/src/net/dns.rs @@ -10,8 +10,9 @@ use color_eyre::eyre::eyre; use futures::{FutureExt, StreamExt, TryStreamExt}; use hickory_server::authority::{AuthorityObject, Catalog, MessageResponseBuilder}; use hickory_server::proto::op::{Header, ResponseCode}; -use hickory_server::proto::rr::{LowerName, Name, Record, RecordType}; -use hickory_server::resolver::config::{ResolverConfig, ResolverOpts}; +use hickory_server::proto::rr::{Name, Record, RecordType}; +use hickory_server::proto::xfer::Protocol; +use hickory_server::resolver::config::{NameServerConfig, ResolverConfig, ResolverOpts}; use hickory_server::server::{Request, RequestHandler, ResponseHandler, ResponseInfo}; use hickory_server::store::forwarder::{ForwardAuthority, ForwardConfig}; use hickory_server::{ServerFuture, resolver as hickory_resolver}; @@ -25,6 +26,7 @@ use serde::{Deserialize, Serialize}; use tokio::net::{TcpListener, UdpSocket}; use tokio::sync::RwLock; use tracing::instrument; +use ts_rs::TS; use crate::context::{CliContext, RpcContext}; use crate::db::model::Database; @@ -93,7 +95,8 @@ pub fn dns_api() -> ParentHandler { ) } -#[derive(Deserialize, Serialize, Parser)] +#[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] pub struct QueryDnsParams { #[arg(help = "help.arg.fqdn")] pub fqdn: InternedString, @@ -133,7 +136,8 @@ pub fn query_dns( .map_err(Error::from) } -#[derive(Deserialize, Serialize, Parser)] +#[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] pub struct SetStaticDnsParams { #[arg(help = "help.arg.dns-servers")] pub servers: Option>, @@ -203,6 +207,7 @@ pub async fn dump_table( struct ResolveMap { private_domains: BTreeMap>, services: BTreeMap, BTreeMap>>, + challenges: BTreeMap)>, } pub struct DnsController { @@ -237,22 +242,60 @@ impl Resolver { let mut prev = crate::util::serde::hash_serializable::(&( ResolverConfig::new(), ResolverOpts::default(), + Option::>::None, )) .unwrap_or_default(); loop { - if let Err(e) = async { - let mut stream = file_string_stream("/run/systemd/resolve/resolv.conf") - .filter_map(|a| futures::future::ready(a.transpose())) - .boxed(); - while let Some(conf) = stream.try_next().await? { - let (config, mut opts) = - hickory_resolver::system_conf::parse_resolv_conf(conf) - .with_kind(ErrorKind::ParseSysInfo)?; - opts.timeout = Duration::from_secs(30); + let res: Result<(), Error> = async { + let mut file_stream = + file_string_stream("/run/systemd/resolve/resolv.conf") + .filter_map(|a| futures::future::ready(a.transpose())) + .boxed(); + let mut static_sub = db + .subscribe( + "/public/serverInfo/network/dns/staticServers" + .parse() + .unwrap(), + ) + .await; + let mut last_config: Option<(ResolverConfig, ResolverOpts)> = None; + loop { + let got_file = tokio::select! { + res = file_stream.try_next() => { + let conf = res? + .ok_or_else(|| Error::new( + eyre!("resolv.conf stream ended"), + ErrorKind::Network, + ))?; + let (config, mut opts) = + hickory_resolver::system_conf::parse_resolv_conf(conf) + .with_kind(ErrorKind::ParseSysInfo)?; + opts.timeout = Duration::from_secs(30); + last_config = Some((config, opts)); + true + } + _ = static_sub.recv() => false, + }; + let Some((ref config, ref opts)) = last_config else { + continue; + }; + let static_servers: Option> = db + .peek() + .await + .as_public() + .as_server_info() + .as_network() + .as_dns() + .as_static_servers() + .de()?; let hash = crate::util::serde::hash_serializable::( - &(&config, &opts), + &(config, opts, &static_servers), )?; - if hash != prev { + if hash == prev { + prev = hash; + continue; + } + if got_file { db.mutate(|db| { db.as_public_mut() .as_server_info_mut() @@ -271,44 +314,52 @@ impl Resolver { }) .await .result?; - let auth: Vec> = vec![Arc::new( - ForwardAuthority::builder_tokio(ForwardConfig { - name_servers: from_value(Value::Array( - config - .name_servers() - .into_iter() - .skip(4) - .map(to_value) - .collect::>()?, - ))?, - options: Some(opts), + } + let forward_servers = if let Some(servers) = &static_servers { + servers + .iter() + .flat_map(|addr| { + [ + NameServerConfig::new(*addr, Protocol::Udp), + NameServerConfig::new(*addr, Protocol::Tcp), + ] }) - .build() - .map_err(|e| Error::new(eyre!("{e}"), ErrorKind::Network))?, - )]; - { - let mut guard = tokio::time::timeout( - Duration::from_secs(10), - catalog.write(), - ) - .await - .map_err(|_| { - Error::new( - eyre!("{}", t!("net.dns.timeout-updating-catalog")), - ErrorKind::Timeout, - ) - })?; - guard.upsert(Name::root().into(), auth); - drop(guard); - } + .map(|n| to_value(&n)) + .collect::>()? + } else { + config + .name_servers() + .into_iter() + .skip(4) + .map(to_value) + .collect::>()? + }; + let auth: Vec> = vec![Arc::new( + ForwardAuthority::builder_tokio(ForwardConfig { + name_servers: from_value(Value::Array(forward_servers))?, + options: Some(opts.clone()), + }) + .build() + .map_err(|e| Error::new(eyre!("{e}"), ErrorKind::Network))?, + )]; + { + let mut guard = + tokio::time::timeout(Duration::from_secs(10), catalog.write()) + .await + .map_err(|_| { + Error::new( + eyre!("{}", t!("net.dns.timeout-updating-catalog")), + ErrorKind::Timeout, + ) + })?; + guard.upsert(Name::root().into(), auth); + drop(guard); } prev = hash; } - - Ok::<_, Error>(()) } - .await - { + .await; + if let Err(e) = res { tracing::error!("{e}"); tracing::debug!("{e:?}"); tokio::time::sleep(Duration::from_secs(1)).await; @@ -399,7 +450,41 @@ impl RequestHandler for Resolver { match async { let req = request.request_info()?; let query = req.query; - if let Some(ip) = self.resolve(query.name().borrow(), req.src.ip()) { + let name = query.name(); + + if STARTOS.zone_of(name) && query.query_type() == RecordType::TXT { + let name_str = + InternedString::intern(name.to_lowercase().to_utf8().trim_end_matches('.')); + if let Some(txt_value) = self.resolve.mutate(|r| { + r.challenges.retain(|_, (_, weak)| weak.strong_count() > 0); + r.challenges.remove(&name_str).map(|(val, _)| val) + }) { + let mut header = Header::response_from_request(request.header()); + header.set_recursion_available(true); + return response_handle + .send_response( + MessageResponseBuilder::from_message_request(&*request).build( + header, + &[Record::from_rdata( + query.name().to_owned().into(), + 0, + hickory_server::proto::rr::RData::TXT( + hickory_server::proto::rr::rdata::TXT::new(vec![ + txt_value.to_string(), + ]), + ), + )], + [], + [], + [], + ), + ) + .await + .map(Some); + } + } + + if let Some(ip) = self.resolve(name, req.src.ip()) { match query.query_type() { RecordType::A => { let mut header = Header::response_from_request(request.header()); @@ -615,6 +700,34 @@ impl DnsController { } } + pub fn add_challenge( + &self, + domain: InternedString, + value: InternedString, + ) -> Result, Error> { + if let Some(resolve) = Weak::upgrade(&self.resolve) { + resolve.mutate(|writable| { + let entry = writable + .challenges + .entry(domain) + .or_insert_with(|| (value.clone(), Weak::new())); + let rc = if let Some(rc) = Weak::upgrade(&entry.1) { + rc + } else { + let new = Arc::new(()); + *entry = (value, Arc::downgrade(&new)); + new + }; + Ok(rc) + }) + } else { + Err(Error::new( + eyre!("{}", t!("net.dns.server-thread-exited")), + crate::ErrorKind::Network, + )) + } + } + pub fn gc_private_domains<'a, BK: Ord + 'a>( &self, domains: impl IntoIterator + 'a, diff --git a/core/src/net/forward.rs b/core/src/net/forward.rs index b18ed7f1b..067b6b484 100644 --- a/core/src/net/forward.rs +++ b/core/src/net/forward.rs @@ -4,44 +4,90 @@ use std::sync::{Arc, Weak}; use std::time::Duration; use futures::channel::oneshot; -use id_pool::IdPool; use iddqd::{IdOrdItem, IdOrdMap}; use imbl::OrdMap; +use ipnet::{IpNet, Ipv4Net}; +use rand::Rng; use rpc_toolkit::{Context, HandlerArgs, HandlerExt, ParentHandler, from_fn_async}; use serde::{Deserialize, Serialize}; use tokio::process::Command; use tokio::sync::mpsc; -use crate::GatewayId; use crate::context::{CliContext, RpcContext}; use crate::db::model::public::NetworkInterfaceInfo; -use crate::net::gateway::{DynInterfaceFilter, InterfaceFilter}; use crate::prelude::*; use crate::util::Invoke; use crate::util::future::NonDetachingJoinHandle; use crate::util::serde::{HandlerExtSerde, display_serializable}; use crate::util::sync::Watch; +use crate::{GatewayId, HOST_IP}; pub const START9_BRIDGE_IFACE: &str = "lxcbr0"; -pub const FIRST_DYNAMIC_PRIVATE_PORT: u16 = 49152; +const EPHEMERAL_PORT_START: u16 = 49152; +// vhost.rs:89 — not allowed: <=1024, >=32768, 5355, 5432, 9050, 6010, 9051, 5353 +const RESTRICTED_PORTS: &[u16] = &[5353, 5355, 5432, 6010, 9050, 9051]; + +fn is_restricted(port: u16) -> bool { + port <= 1024 || RESTRICTED_PORTS.contains(&port) +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] +pub struct ForwardRequirements { + pub public_gateways: BTreeSet, + pub private_ips: BTreeSet, + pub secure: bool, +} + +impl std::fmt::Display for ForwardRequirements { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "ForwardRequirements {{ public: {:?}, private: {:?}, secure: {} }}", + self.public_gateways, self.private_ips, self.secure + ) + } +} #[derive(Debug, Deserialize, Serialize)] -pub struct AvailablePorts(IdPool); +pub struct AvailablePorts(BTreeMap); impl AvailablePorts { pub fn new() -> Self { - Self(IdPool::new_ranged(FIRST_DYNAMIC_PRIVATE_PORT..u16::MAX)) + Self(BTreeMap::new()) } - pub fn alloc(&mut self) -> Result { - self.0.request_id().ok_or_else(|| { - Error::new( - eyre!("{}", t!("net.forward.no-dynamic-ports-available")), - ErrorKind::Network, - ) - }) + pub fn alloc(&mut self, ssl: bool) -> Result { + let mut rng = rand::rng(); + for _ in 0..1000 { + let port = rng.random_range(EPHEMERAL_PORT_START..u16::MAX); + if !self.0.contains_key(&port) { + self.0.insert(port, ssl); + return Ok(port); + } + } + Err(Error::new( + eyre!("{}", t!("net.forward.no-dynamic-ports-available")), + ErrorKind::Network, + )) + } + /// Try to allocate a specific port. Returns Some(port) if available, None if taken/restricted. + pub fn try_alloc(&mut self, port: u16, ssl: bool) -> Option { + if is_restricted(port) || self.0.contains_key(&port) { + return None; + } + self.0.insert(port, ssl); + Some(port) + } + + pub fn set_ssl(&mut self, port: u16, ssl: bool) { + self.0.insert(port, ssl); + } + + /// Returns whether a given allocated port is SSL. + pub fn is_ssl(&self, port: u16) -> bool { + self.0.get(&port).copied().unwrap_or(false) } pub fn free(&mut self, ports: impl IntoIterator) { for port in ports { - self.0.return_id(port).unwrap_or_default(); + self.0.remove(&port); } } } @@ -61,10 +107,10 @@ pub fn forward_api() -> ParentHandler { } let mut table = Table::new(); - table.add_row(row![bc => "FROM", "TO", "FILTER"]); + table.add_row(row![bc => "FROM", "TO", "REQS"]); for (external, target) in res.0 { - table.add_row(row![external, target.target, target.filter]); + table.add_row(row![external, target.target, target.reqs]); } table.print_tty(false)?; @@ -79,6 +125,7 @@ struct ForwardMapping { source: SocketAddrV4, target: SocketAddrV4, target_prefix: u8, + src_filter: Option, rc: Weak<()>, } @@ -93,9 +140,10 @@ impl PortForwardState { source: SocketAddrV4, target: SocketAddrV4, target_prefix: u8, + src_filter: Option, ) -> Result, Error> { if let Some(existing) = self.mappings.get_mut(&source) { - if existing.target == target { + if existing.target == target && existing.src_filter == src_filter { if let Some(existing_rc) = existing.rc.upgrade() { return Ok(existing_rc); } else { @@ -104,21 +152,28 @@ impl PortForwardState { return Ok(rc); } } else { - // Different target, need to remove old and add new + // Different target or src_filter, need to remove old and add new if let Some(mapping) = self.mappings.remove(&source) { - unforward(mapping.source, mapping.target, mapping.target_prefix).await?; + unforward( + mapping.source, + mapping.target, + mapping.target_prefix, + mapping.src_filter.as_ref(), + ) + .await?; } } } let rc = Arc::new(()); - forward(source, target, target_prefix).await?; + forward(source, target, target_prefix, src_filter.as_ref()).await?; self.mappings.insert( source, ForwardMapping { source, target, target_prefix, + src_filter, rc: Arc::downgrade(&rc), }, ); @@ -136,7 +191,13 @@ impl PortForwardState { for source in to_remove { if let Some(mapping) = self.mappings.remove(&source) { - unforward(mapping.source, mapping.target, mapping.target_prefix).await?; + unforward( + mapping.source, + mapping.target, + mapping.target_prefix, + mapping.src_filter.as_ref(), + ) + .await?; } } Ok(()) @@ -157,9 +218,14 @@ impl Drop for PortForwardState { let mappings = std::mem::take(&mut self.mappings); tokio::spawn(async move { for (_, mapping) in mappings { - unforward(mapping.source, mapping.target, mapping.target_prefix) - .await - .log_err(); + unforward( + mapping.source, + mapping.target, + mapping.target_prefix, + mapping.src_filter.as_ref(), + ) + .await + .log_err(); } }); } @@ -171,6 +237,7 @@ enum PortForwardCommand { source: SocketAddrV4, target: SocketAddrV4, target_prefix: u8, + src_filter: Option, respond: oneshot::Sender, Error>>, }, Gc { @@ -191,7 +258,13 @@ pub async fn add_iptables_rule(nat: bool, undo: bool, args: &[&str]) -> Result<( if nat { cmd.arg("-t").arg("nat"); } - if undo != !cmd.arg("-C").args(args).status().await?.success() { + let exists = cmd + .arg("-C") + .args(args) + .invoke(ErrorKind::Network) + .await + .is_ok(); + if undo != !exists { let mut cmd = Command::new("iptables"); if nat { cmd.arg("-t").arg("nat"); @@ -257,9 +330,12 @@ impl PortForwardController { source, target, target_prefix, + src_filter, respond, } => { - let result = state.add_forward(source, target, target_prefix).await; + let result = state + .add_forward(source, target, target_prefix, src_filter) + .await; respond.send(result).ok(); } PortForwardCommand::Gc { respond } => { @@ -284,6 +360,7 @@ impl PortForwardController { source: SocketAddrV4, target: SocketAddrV4, target_prefix: u8, + src_filter: Option, ) -> Result, Error> { let (send, recv) = oneshot::channel(); self.req @@ -291,6 +368,7 @@ impl PortForwardController { source, target, target_prefix, + src_filter, respond: send, }) .map_err(err_has_exited)?; @@ -321,14 +399,14 @@ struct InterfaceForwardRequest { external: u16, target: SocketAddrV4, target_prefix: u8, - filter: DynInterfaceFilter, + reqs: ForwardRequirements, rc: Arc<()>, } #[derive(Clone)] struct InterfaceForwardEntry { external: u16, - filter: BTreeMap)>, + targets: BTreeMap)>, // Maps source SocketAddr -> strong reference for the forward created in PortForwardController forwards: BTreeMap>, } @@ -346,7 +424,7 @@ impl InterfaceForwardEntry { fn new(external: u16) -> Self { Self { external, - filter: BTreeMap::new(), + targets: BTreeMap::new(), forwards: BTreeMap::new(), } } @@ -358,28 +436,37 @@ impl InterfaceForwardEntry { ) -> Result<(), Error> { let mut keep = BTreeSet::::new(); - for (iface, info) in ip_info.iter() { - if let Some((target, target_prefix)) = self - .filter - .iter() - .filter(|(_, (_, _, rc))| rc.strong_count() > 0) - .find(|(filter, _)| filter.filter(iface, info)) - .map(|(_, (target, target_prefix, _))| (*target, *target_prefix)) - { - if let Some(ip_info) = &info.ip_info { - for addr in ip_info.subnets.iter().filter_map(|net| { - if let IpAddr::V4(ip) = net.addr() { - Some(SocketAddrV4::new(ip, self.external)) - } else { - None + for (gw_id, info) in ip_info.iter() { + if let Some(ip_info) = &info.ip_info { + for subnet in ip_info.subnets.iter() { + if let IpAddr::V4(ip) = subnet.addr() { + let addr = SocketAddrV4::new(ip, self.external); + if keep.contains(&addr) { + continue; } - }) { - keep.insert(addr); - if !self.forwards.contains_key(&addr) { - let rc = port_forward - .add_forward(addr, target, target_prefix) + + for (reqs, (target, target_prefix, rc)) in self.targets.iter() { + if rc.strong_count() == 0 { + continue; + } + if !reqs.secure && !info.secure() { + continue; + } + + let src_filter = if reqs.public_gateways.contains(gw_id) { + None + } else if reqs.private_ips.contains(&IpAddr::V4(ip)) { + Some(subnet.trunc()) + } else { + continue; + }; + + keep.insert(addr); + let fwd_rc = port_forward + .add_forward(addr, *target, *target_prefix, src_filter) .await?; - self.forwards.insert(addr, rc); + self.forwards.insert(addr, fwd_rc); + break; } } } @@ -398,7 +485,7 @@ impl InterfaceForwardEntry { external, target, target_prefix, - filter, + reqs, mut rc, }: InterfaceForwardRequest, ip_info: &OrdMap, @@ -412,8 +499,8 @@ impl InterfaceForwardEntry { } let entry = self - .filter - .entry(filter) + .targets + .entry(reqs) .or_insert_with(|| (target, target_prefix, Arc::downgrade(&rc))); if entry.0 != target { entry.0 = target; @@ -436,7 +523,7 @@ impl InterfaceForwardEntry { ip_info: &OrdMap, port_forward: &PortForwardController, ) -> Result<(), Error> { - self.filter.retain(|_, (_, _, rc)| rc.strong_count() > 0); + self.targets.retain(|_, (_, _, rc)| rc.strong_count() > 0); self.update(ip_info, port_forward).await } @@ -495,7 +582,7 @@ pub struct ForwardTable(pub BTreeMap); pub struct ForwardTarget { pub target: SocketAddrV4, pub target_prefix: u8, - pub filter: String, + pub reqs: String, } impl From<&InterfaceForwardState> for ForwardTable { @@ -506,16 +593,16 @@ impl From<&InterfaceForwardState> for ForwardTable { .iter() .flat_map(|entry| { entry - .filter + .targets .iter() .filter(|(_, (_, _, rc))| rc.strong_count() > 0) - .map(|(filter, (target, target_prefix, _))| { + .map(|(reqs, (target, target_prefix, _))| { ( entry.external, ForwardTarget { target: *target, target_prefix: *target_prefix, - filter: format!("{:#?}", filter), + reqs: format!("{reqs}"), }, ) }) @@ -534,16 +621,6 @@ enum InterfaceForwardCommand { DumpTable(oneshot::Sender), } -#[test] -fn test() { - use crate::net::gateway::SecureFilter; - - assert_ne!( - false.into_dyn(), - SecureFilter { secure: false }.into_dyn().into_dyn() - ); -} - pub struct InterfacePortForwardController { req: mpsc::UnboundedSender, _thread: NonDetachingJoinHandle<()>, @@ -593,7 +670,7 @@ impl InterfacePortForwardController { pub async fn add( &self, external: u16, - filter: DynInterfaceFilter, + reqs: ForwardRequirements, target: SocketAddrV4, target_prefix: u8, ) -> Result, Error> { @@ -605,7 +682,7 @@ impl InterfacePortForwardController { external, target, target_prefix, - filter, + reqs, rc, }, send, @@ -637,15 +714,25 @@ async fn forward( source: SocketAddrV4, target: SocketAddrV4, target_prefix: u8, + src_filter: Option<&IpNet>, ) -> Result<(), Error> { - Command::new("/usr/lib/startos/scripts/forward-port") - .env("sip", source.ip().to_string()) + let mut cmd = Command::new("/usr/lib/startos/scripts/forward-port"); + cmd.env("sip", source.ip().to_string()) .env("dip", target.ip().to_string()) .env("dprefix", target_prefix.to_string()) .env("sport", source.port().to_string()) .env("dport", target.port().to_string()) - .invoke(ErrorKind::Network) - .await?; + .env( + "bridge_subnet", + Ipv4Net::new(HOST_IP.into(), 24) + .with_kind(ErrorKind::ParseNetAddress)? + .trunc() + .to_string(), + ); + if let Some(subnet) = src_filter { + cmd.env("src_subnet", subnet.to_string()); + } + cmd.invoke(ErrorKind::Network).await?; Ok(()) } @@ -653,15 +740,18 @@ async fn unforward( source: SocketAddrV4, target: SocketAddrV4, target_prefix: u8, + src_filter: Option<&IpNet>, ) -> Result<(), Error> { - Command::new("/usr/lib/startos/scripts/forward-port") - .env("UNDO", "1") + let mut cmd = Command::new("/usr/lib/startos/scripts/forward-port"); + cmd.env("UNDO", "1") .env("sip", source.ip().to_string()) .env("dip", target.ip().to_string()) .env("dprefix", target_prefix.to_string()) .env("sport", source.port().to_string()) - .env("dport", target.port().to_string()) - .invoke(ErrorKind::Network) - .await?; + .env("dport", target.port().to_string()); + if let Some(subnet) = src_filter { + cmd.env("src_subnet", subnet.to_string()); + } + cmd.invoke(ErrorKind::Network).await?; Ok(()) } diff --git a/core/src/net/gateway.rs b/core/src/net/gateway.rs index 6079efd76..49bf35a23 100644 --- a/core/src/net/gateway.rs +++ b/core/src/net/gateway.rs @@ -1,17 +1,14 @@ -use std::any::Any; use std::collections::{BTreeMap, BTreeSet, HashMap}; -use std::fmt; use std::future::Future; -use std::net::{IpAddr, Ipv4Addr, SocketAddr, SocketAddrV6}; -use std::sync::{Arc, Weak}; -use std::task::{Poll, ready}; -use std::time::Duration; +use std::net::{IpAddr, Ipv4Addr, Ipv6Addr, SocketAddr}; +use std::sync::Arc; +use std::task::Poll; +use std::time::{Duration, Instant}; use clap::Parser; -use futures::future::Either; use futures::{FutureExt, Stream, StreamExt, TryStreamExt}; use imbl::{OrdMap, OrdSet}; -use imbl_value::InternedString; +use imbl_value::{InternedString, Value}; use ipnet::IpNet; use itertools::Itertools; use nix::net::if_::if_nametoindex; @@ -23,6 +20,7 @@ use tokio::net::TcpListener; use tokio::process::Command; use tokio::sync::oneshot; use ts_rs::TS; +use url::Url; use visit_rs::{Visit, VisitFields}; use zbus::proxy::{PropertyChanged, PropertyStream, SignalStream}; use zbus::zvariant::{ @@ -30,21 +28,21 @@ use zbus::zvariant::{ }; use zbus::{Connection, proxy}; -use crate::GatewayId; use crate::context::{CliContext, RpcContext}; use crate::db::model::Database; use crate::db::model::public::{IpInfo, NetworkInterfaceInfo, NetworkInterfaceType}; use crate::net::forward::START9_BRIDGE_IFACE; use crate::net::gateway::device::DeviceProxy; -use crate::net::utils::ipv6_is_link_local; -use crate::net::web_server::{Accept, AcceptStream, Acceptor, MetadataVisitor}; +use crate::net::host::all_hosts; +use crate::net::web_server::{Accept, AcceptStream, MetadataVisitor, TcpMetadata}; use crate::prelude::*; use crate::util::Invoke; use crate::util::collections::OrdMapIterMut; use crate::util::future::{NonDetachingJoinHandle, Until}; use crate::util::io::open_file; use crate::util::serde::{HandlerExtSerde, display_serializable}; -use crate::util::sync::{SyncMutex, Watch}; +use crate::util::sync::Watch; +use crate::{GatewayId, PackageId}; pub fn gateway_api() -> ParentHandler { ParentHandler::new() @@ -60,7 +58,7 @@ pub fn gateway_api() -> ParentHandler { } let mut table = Table::new(); - table.add_row(row![bc => "INTERFACE", "TYPE", "PUBLIC", "ADDRESSES", "WAN IP"]); + table.add_row(row![bc => "INTERFACE", "TYPE", "ADDRESSES", "WAN IP"]); for (iface, info) in res { table.add_row(row![ iface, @@ -68,7 +66,6 @@ pub fn gateway_api() -> ParentHandler { .as_ref() .and_then(|ip_info| ip_info.device_type) .map_or_else(|| "UNKNOWN".to_owned(), |ty| format!("{ty:?}")), - info.public(), info.ip_info.as_ref().map_or_else( || "".to_owned(), |ip_info| ip_info @@ -98,22 +95,6 @@ pub fn gateway_api() -> ParentHandler { .with_about("about.show-gateways-startos-can-listen-on") .with_call_remote::(), ) - .subcommand( - "set-public", - from_fn_async(set_public) - .with_metadata("sync_db", Value::Bool(true)) - .no_display() - .with_about("about.indicate-gateway-inbound-access-from-wan") - .with_call_remote::(), - ) - .subcommand( - "unset-public", - from_fn_async(unset_public) - .with_metadata("sync_db", Value::Bool(true)) - .no_display() - .with_about("about.allow-gateway-infer-inbound-access-from-wan") - .with_call_remote::(), - ) .subcommand( "forget", from_fn_async(forget_iface) @@ -130,6 +111,28 @@ pub fn gateway_api() -> ParentHandler { .with_about("about.rename-gateway") .with_call_remote::(), ) + .subcommand( + "check-port", + from_fn_async(check_port) + .with_display_serializable() + .with_about("about.check-port-reachability") + .with_call_remote::(), + ) + .subcommand( + "check-dns", + from_fn_async(check_dns) + .with_display_serializable() + .with_about("about.check-dns-configuration") + .with_call_remote::(), + ) + .subcommand( + "set-default-outbound", + from_fn_async(set_default_outbound) + .with_metadata("sync_db", Value::Bool(true)) + .no_display() + .with_about("about.set-default-outbound-gateway") + .with_call_remote::(), + ) } async fn list_interfaces( @@ -139,40 +142,7 @@ async fn list_interfaces( } #[derive(Debug, Clone, Deserialize, Serialize, Parser, TS)] -struct NetworkInterfaceSetPublicParams { - #[arg(help = "help.arg.gateway-id")] - gateway: GatewayId, - #[arg(help = "help.arg.is-public")] - public: Option, -} - -async fn set_public( - ctx: RpcContext, - NetworkInterfaceSetPublicParams { gateway, public }: NetworkInterfaceSetPublicParams, -) -> Result<(), Error> { - ctx.net_controller - .net_iface - .set_public(&gateway, Some(public.unwrap_or(true))) - .await -} - -#[derive(Debug, Clone, Deserialize, Serialize, Parser, TS)] -struct UnsetPublicParams { - #[arg(help = "help.arg.gateway-id")] - gateway: GatewayId, -} - -async fn unset_public( - ctx: RpcContext, - UnsetPublicParams { gateway }: UnsetPublicParams, -) -> Result<(), Error> { - ctx.net_controller - .net_iface - .set_public(&gateway, None) - .await -} - -#[derive(Debug, Clone, Deserialize, Serialize, Parser, TS)] +#[ts(export)] struct ForgetGatewayParams { #[arg(help = "help.arg.gateway-id")] gateway: GatewayId, @@ -186,6 +156,7 @@ async fn forget_iface( } #[derive(Debug, Clone, Deserialize, Serialize, Parser, TS)] +#[ts(export)] struct RenameGatewayParams { #[arg(help = "help.arg.gateway-id")] id: GatewayId, @@ -200,6 +171,260 @@ async fn set_name( ctx.net_controller.net_iface.set_name(&id, name).await } +#[derive(Debug, Clone, Deserialize, Serialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +struct CheckPortParams { + #[arg(help = "help.arg.port")] + port: u16, + #[arg(help = "help.arg.gateway-id")] + gateway: GatewayId, +} + +#[derive(Debug, Clone, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct CheckPortRes { + pub ip: Ipv4Addr, + pub port: u16, + pub open_externally: bool, + pub open_internally: bool, + pub hairpinning: bool, +} + +#[derive(Debug, Clone, Deserialize, Serialize)] +#[serde(rename_all = "camelCase")] +pub struct IfconfigPortRes { + pub ip: Ipv4Addr, + pub port: u16, + pub reachable: bool, +} + +async fn check_port( + ctx: RpcContext, + CheckPortParams { port, gateway }: CheckPortParams, +) -> Result { + let db = ctx.db.peek().await; + let base_url = db.as_public().as_server_info().as_ifconfig_url().de()?; + let gateways = db + .as_public() + .as_server_info() + .as_network() + .as_gateways() + .de()?; + let gw_info = gateways + .get(&gateway) + .ok_or_else(|| Error::new(eyre!("unknown gateway: {gateway}"), ErrorKind::NotFound))?; + let ip_info = gw_info.ip_info.as_ref().ok_or_else(|| { + Error::new( + eyre!("gateway {gateway} has no IP info"), + ErrorKind::NotFound, + ) + })?; + + let internal_ips = ip_info + .subnets + .iter() + .map(|i| i.addr()) + .filter(|a| a.is_ipv4()) + .map(|a| SocketAddr::new(a, port)) + .collect::>(); + + let open_internally = tokio::time::timeout( + Duration::from_secs(5), + tokio::net::TcpStream::connect(&*internal_ips), + ) + .await + .map_or(false, |r| r.is_ok()); + + let client = reqwest::Client::builder(); + #[cfg(target_os = "linux")] + let client = client.interface(gateway.as_str()); + let url = base_url + .join(&format!("/port/{port}")) + .with_kind(ErrorKind::ParseUrl)?; + let IfconfigPortRes { + ip, + port, + reachable: open_externally, + } = client + .build()? + .get(url) + .timeout(Duration::from_secs(10)) + .send() + .await? + .error_for_status()? + .json() + .await?; + + let hairpinning = tokio::time::timeout( + Duration::from_secs(5), + tokio::net::TcpStream::connect(SocketAddr::new(ip.into(), port)), + ) + .await + .map_or(false, |r| r.is_ok()); + + Ok(CheckPortRes { + ip, + port, + open_externally, + open_internally, + hairpinning, + }) +} + +#[derive(Debug, Clone, Deserialize, Serialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +struct CheckDnsParams { + #[arg(help = "help.arg.gateway-id")] + gateway: GatewayId, +} + +async fn check_dns( + ctx: RpcContext, + CheckDnsParams { gateway }: CheckDnsParams, +) -> Result { + use hickory_server::proto::xfer::Protocol; + use hickory_server::resolver::Resolver; + use hickory_server::resolver::config::{NameServerConfig, ResolverConfig, ResolverOpts}; + use hickory_server::resolver::name_server::TokioConnectionProvider; + + let ip_info = ctx.net_controller.net_iface.watcher.ip_info(); + let gw_info = ip_info + .get(&gateway) + .ok_or_else(|| Error::new(eyre!("unknown gateway: {gateway}"), ErrorKind::NotFound))?; + let gw_ip_info = gw_info.ip_info.as_ref().ok_or_else(|| { + Error::new( + eyre!("gateway {gateway} has no IP info"), + ErrorKind::NotFound, + ) + })?; + + for dns_ip in &gw_ip_info.dns_servers { + // Case 1: DHCP DNS == server IP → immediate success + if gw_ip_info.subnets.iter().any(|s| s.addr() == *dns_ip) { + return Ok(true); + } + + // Case 2: DHCP DNS is on LAN but not the server → TXT challenge check + if gw_ip_info.subnets.iter().any(|s| s.contains(dns_ip)) { + let nonce = rand::random::(); + let challenge_domain = InternedString::intern(format!("_dns-check-{nonce}.startos")); + let challenge_value = + InternedString::intern(crate::rpc_continuations::Guid::new().as_ref()); + + let _guard = ctx + .net_controller + .dns + .add_challenge(challenge_domain.clone(), challenge_value.clone())?; + + let mut config = ResolverConfig::new(); + config.add_name_server(NameServerConfig::new( + SocketAddr::new(*dns_ip, 53), + Protocol::Udp, + )); + config.add_name_server(NameServerConfig::new( + SocketAddr::new(*dns_ip, 53), + Protocol::Tcp, + )); + let mut opts = ResolverOpts::default(); + opts.timeout = Duration::from_secs(5); + opts.attempts = 1; + + let resolver = + Resolver::builder_with_config(config, TokioConnectionProvider::default()) + .with_options(opts) + .build(); + let txt_lookup = resolver.txt_lookup(&*challenge_domain).await; + + return Ok(match txt_lookup { + Ok(lookup) => lookup.iter().any(|txt| { + txt.iter() + .any(|data| data.as_ref() == challenge_value.as_bytes()) + }), + Err(_) => false, + }); + } + } + + // Case 3: No DNS servers in subnet → failure + Ok(false) +} + +#[derive(Debug, Clone, Deserialize, Serialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +struct SetDefaultOutboundParams { + #[arg(help = "help.arg.gateway-id")] + gateway: Option, +} + +async fn set_default_outbound( + ctx: RpcContext, + SetDefaultOutboundParams { gateway }: SetDefaultOutboundParams, +) -> Result<(), Error> { + if let Some(ref gw) = gateway { + let ip_info = ctx.net_controller.net_iface.watcher.ip_info(); + let info = ip_info + .get(gw) + .ok_or_else(|| Error::new(eyre!("unknown gateway: {gw}"), ErrorKind::NotFound))?; + ensure_code!( + info.ip_info.is_some(), + ErrorKind::InvalidRequest, + "gateway {gw} is not connected" + ); + } + ctx.db + .mutate(|db| { + db.as_public_mut() + .as_server_info_mut() + .as_network_mut() + .as_default_outbound_mut() + .ser(&gateway) + }) + .await + .result +} + +#[derive(Debug, Clone, Deserialize, Serialize, Parser, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct SetOutboundGatewayParams { + #[arg(help = "help.arg.package-id")] + package: PackageId, + #[arg(help = "help.arg.gateway-id")] + gateway: Option, +} + +pub async fn set_outbound_gateway( + ctx: RpcContext, + SetOutboundGatewayParams { package, gateway }: SetOutboundGatewayParams, +) -> Result<(), Error> { + if let Some(ref gw) = gateway { + let ip_info = ctx.net_controller.net_iface.watcher.ip_info(); + let info = ip_info + .get(gw) + .ok_or_else(|| Error::new(eyre!("unknown gateway: {gw}"), ErrorKind::NotFound))?; + ensure_code!( + info.ip_info.is_some(), + ErrorKind::InvalidRequest, + "gateway {gw} is not connected" + ); + } + ctx.db + .mutate(|db| { + db.as_public_mut() + .as_package_data_mut() + .as_idx_mut(&package) + .or_not_found(&package)? + .as_outbound_gateway_mut() + .ser(&gateway) + }) + .await + .result +} + #[proxy( interface = "org.freedesktop.NetworkManager", default_service = "org.freedesktop.NetworkManager", @@ -423,6 +648,7 @@ impl<'a> StubStream<'a> for SignalStream<'a> { async fn watcher( watch_ip_info: Watch>, watch_activation: Watch>, + db: Option>, ) { loop { let res: Result<(), Error> = async { @@ -496,6 +722,7 @@ async fn watcher( device_proxy.clone(), iface.clone(), &watch_ip_info, + db.as_ref(), ))); ifaces.insert(iface); } @@ -526,33 +753,85 @@ async fn watcher( } } -async fn get_wan_ipv4(iface: &str) -> Result, Error> { +async fn get_wan_ipv4(iface: &str, base_url: &Url) -> Result, Error> { let client = reqwest::Client::builder(); #[cfg(target_os = "linux")] let client = client.interface(iface); - Ok(client + let url = base_url.join("/ip").with_kind(ErrorKind::ParseUrl)?; + let text = client .build()? - .get("https://ip4only.me/api/") + .get(url) .timeout(Duration::from_secs(10)) .send() .await? .error_for_status()? .text() - .await? - .split(",") - .skip(1) - .next() - .filter(|s| !s.is_empty()) - .map(|s| s.parse()) - .transpose()?) + .await?; + let trimmed = text.trim(); + if trimmed.is_empty() { + return Ok(None); + } + Ok(Some(trimmed.parse()?)) } -#[instrument(skip(connection, device_proxy, write_to))] +struct PolicyRoutingCleanup { + table_id: u32, + iface: String, +} +impl Drop for PolicyRoutingCleanup { + fn drop(&mut self) { + let table_str = self.table_id.to_string(); + let iface = std::mem::take(&mut self.iface); + tokio::spawn(async move { + Command::new("ip") + .arg("rule") + .arg("del") + .arg("fwmark") + .arg(&table_str) + .arg("lookup") + .arg(&table_str) + .arg("priority") + .arg("50") + .invoke(ErrorKind::Network) + .await + .log_err(); + Command::new("ip") + .arg("route") + .arg("flush") + .arg("table") + .arg(&table_str) + .invoke(ErrorKind::Network) + .await + .log_err(); + Command::new("iptables") + .arg("-t") + .arg("mangle") + .arg("-D") + .arg("PREROUTING") + .arg("-i") + .arg(&iface) + .arg("-m") + .arg("conntrack") + .arg("--ctstate") + .arg("NEW") + .arg("-j") + .arg("CONNMARK") + .arg("--set-mark") + .arg(&table_str) + .invoke(ErrorKind::Network) + .await + .log_err(); + }); + } +} + +#[instrument(skip(connection, device_proxy, write_to, db))] async fn watch_ip( connection: &Connection, device_proxy: device::DeviceProxy<'_>, iface: GatewayId, write_to: &Watch>, + db: Option<&TypedPatchDb>, ) -> Result<(), Error> { let mut until = Until::new() .with_stream( @@ -572,12 +851,14 @@ async fn watch_ip( .with_stream(device_proxy.receive_ip6_config_changed().await.stub()) .with_async_fn(|| { async { - tokio::time::sleep(Duration::from_secs(300)).await; + tokio::time::sleep(Duration::from_secs(600)).await; Ok(()) } .fuse() }); + let mut prev_attempt: Option = None; + loop { until .run(async { @@ -661,133 +942,37 @@ async fn watch_ip( None }; + // Policy routing: track per-interface table for cleanup on scope exit + let policy_table_id = if !matches!( + device_type, + Some(NetworkInterfaceType::Bridge | NetworkInterfaceType::Loopback) + ) { + if_nametoindex(iface.as_str()) + .map(|idx| 1000 + idx) + .log_err() + } else { + None + }; + let policy_guard: Option = + policy_table_id.map(|t| PolicyRoutingCleanup { + table_id: t, + iface: iface.as_str().to_owned(), + }); + loop { until - .run(async { - let addresses = ip4_proxy - .address_data() - .await? - .into_iter() - .chain(ip6_proxy.address_data().await?) - .collect_vec(); - let lan_ip = [ - Some(ip4_proxy.gateway().await?) - .filter(|g| !g.is_empty()) - .and_then(|g| g.parse::().log_err()), - Some(ip6_proxy.gateway().await?) - .filter(|g| !g.is_empty()) - .and_then(|g| g.parse::().log_err()), - ] - .into_iter() - .filter_map(|a| a) - .collect(); - let mut ntp_servers = OrdSet::new(); - let mut dns_servers = OrdSet::new(); - if let Some(dhcp4_proxy) = &dhcp4_proxy { - let dhcp = dhcp4_proxy.options().await?; - if let Some(ntp) = dhcp.ntp_servers { - ntp_servers.extend( - ntp.split_whitespace() - .map(InternedString::intern), - ); - } - if let Some(dns) = dhcp.domain_name_servers { - dns_servers.extend( - dns.split_ascii_whitespace() - .filter_map(|s| { - s.parse::().log_err() - }) - .collect::>(), - ); - } - } - let scope_id = if_nametoindex(iface.as_str()) - .with_kind(ErrorKind::Network)?; - let subnets: OrdSet = addresses - .into_iter() - .map(IpNet::try_from) - .try_collect()?; - // let tables = ip4_proxy.route_data().await?.into_iter().filter_map(|d|d.table).collect::>(); - // if !tables.is_empty() { - // let rules = String::from_utf8(Command::new("ip").arg("rule").arg("list").invoke(ErrorKind::Network).await?)?; - // for table in tables { - // for subnet in subnets.iter().filter(|s| s.addr().is_ipv4()) { - // let subnet_string = subnet.trunc().to_string(); - // let rule = ["from", &subnet_string, "lookup", &table.to_string()]; - // if !rules.contains(&rule.join(" ")) { - // if rules.contains(&rule[..2].join(" ")) { - // Command::new("ip").arg("rule").arg("del").args(&rule[..2]).invoke(ErrorKind::Network).await?; - // } - // Command::new("ip").arg("rule").arg("add").args(rule).invoke(ErrorKind::Network).await?; - // } - // } - // } - // } - let wan_ip = if !subnets.is_empty() - && !matches!( - device_type, - Some( - NetworkInterfaceType::Bridge - | NetworkInterfaceType::Loopback - ) - ) { - match get_wan_ipv4(iface.as_str()).await { - Ok(a) => a, - Err(e) => { - tracing::error!( - "{}", - t!("net.gateway.failed-to-determine-wan-ip", iface = iface.to_string(), error = e.to_string()) - ); - tracing::debug!("{e:?}"); - None - } - } - } else { - None - }; - let mut ip_info = IpInfo { - name: name.clone(), - scope_id, - device_type, - subnets, - lan_ip, - wan_ip, - ntp_servers, - dns_servers, - }; - - write_to.send_if_modified( - |m: &mut OrdMap| { - let (name, public, secure, prev_wan_ip) = m - .get(&iface) - .map_or((None, None, None, None), |i| { - ( - i.name.clone(), - i.public, - i.secure, - i.ip_info - .as_ref() - .and_then(|i| i.wan_ip), - ) - }); - ip_info.wan_ip = ip_info.wan_ip.or(prev_wan_ip); - let ip_info = Arc::new(ip_info); - m.insert( - iface.clone(), - NetworkInterfaceInfo { - name, - public, - secure, - ip_info: Some(ip_info.clone()), - }, - ) - .filter(|old| &old.ip_info == &Some(ip_info)) - .is_none() - }, - ); - - Ok::<_, Error>(()) - }) + .run(poll_ip_info( + &ip4_proxy, + &ip6_proxy, + &dhcp4_proxy, + &policy_guard, + &iface, + &mut prev_attempt, + db, + write_to, + device_type, + &name, + )) .await?; } }) @@ -798,6 +983,319 @@ async fn watch_ip( } } +async fn apply_policy_routing( + guard: &PolicyRoutingCleanup, + iface: &GatewayId, + lan_ip: &OrdSet, +) -> Result<(), Error> { + let table_id = guard.table_id; + let table_str = table_id.to_string(); + + let ipv4_gateway: Option = lan_ip + .iter() + .find_map(|ip| match ip { + IpAddr::V4(v4) => Some(v4), + _ => None, + }) + .copied(); + + // Flush and rebuild per-interface routing table. + // Clone all non-default routes from the main table so that LAN IPs on + // other subnets remain reachable when the priority-75 catch-all overrides + // default routing, then replace the default route with this interface's. + Command::new("ip") + .arg("route") + .arg("flush") + .arg("table") + .arg(&table_str) + .invoke(ErrorKind::Network) + .await + .log_err(); + if let Ok(main_routes) = Command::new("ip") + .arg("route") + .arg("show") + .arg("table") + .arg("main") + .invoke(ErrorKind::Network) + .await + .and_then(|b| String::from_utf8(b).with_kind(ErrorKind::Utf8)) + { + for line in main_routes.lines() { + let line = line.trim(); + if line.is_empty() || line.starts_with("default") { + continue; + } + let mut cmd = Command::new("ip"); + cmd.arg("route").arg("add"); + for part in line.split_whitespace() { + // Skip status flags that appear in route output but + // are not valid for `ip route add`. + if part == "linkdown" || part == "dead" { + continue; + } + cmd.arg(part); + } + cmd.arg("table").arg(&table_str); + cmd.invoke(ErrorKind::Network).await.log_err(); + } + } + // Add default route via this interface's gateway + { + let mut cmd = Command::new("ip"); + cmd.arg("route").arg("add").arg("default"); + if let Some(gw) = ipv4_gateway { + cmd.arg("via").arg(gw.to_string()); + } + cmd.arg("dev") + .arg(iface.as_str()) + .arg("table") + .arg(&table_str); + if ipv4_gateway.is_none() { + cmd.arg("scope").arg("link"); + } + cmd.invoke(ErrorKind::Network).await.log_err(); + } + + // Ensure global CONNMARK restore rules in mangle PREROUTING (forwarded + // packets) and OUTPUT (locally-generated replies). Both are needed: + // PREROUTING handles DNAT-forwarded traffic, OUTPUT handles replies from + // locally-bound listeners (e.g. vhost). The `-m mark --mark 0` condition + // ensures we only restore when the packet has no existing fwmark, + // preserving marks set by WireGuard on encapsulation packets. + for chain in ["PREROUTING", "OUTPUT"] { + if Command::new("iptables") + .arg("-t") + .arg("mangle") + .arg("-C") + .arg(chain) + .arg("-m") + .arg("mark") + .arg("--mark") + .arg("0") + .arg("-j") + .arg("CONNMARK") + .arg("--restore-mark") + .invoke(ErrorKind::Network) + .await + .is_err() + { + Command::new("iptables") + .arg("-t") + .arg("mangle") + .arg("-I") + .arg(chain) + .arg("1") + .arg("-m") + .arg("mark") + .arg("--mark") + .arg("0") + .arg("-j") + .arg("CONNMARK") + .arg("--restore-mark") + .invoke(ErrorKind::Network) + .await + .log_err(); + } + } + + // Mark NEW connections arriving on this interface with its routing + // table ID via conntrack mark + if Command::new("iptables") + .arg("-t") + .arg("mangle") + .arg("-C") + .arg("PREROUTING") + .arg("-i") + .arg(iface.as_str()) + .arg("-m") + .arg("conntrack") + .arg("--ctstate") + .arg("NEW") + .arg("-j") + .arg("CONNMARK") + .arg("--set-mark") + .arg(&table_str) + .invoke(ErrorKind::Network) + .await + .is_err() + { + Command::new("iptables") + .arg("-t") + .arg("mangle") + .arg("-A") + .arg("PREROUTING") + .arg("-i") + .arg(iface.as_str()) + .arg("-m") + .arg("conntrack") + .arg("--ctstate") + .arg("NEW") + .arg("-j") + .arg("CONNMARK") + .arg("--set-mark") + .arg(&table_str) + .invoke(ErrorKind::Network) + .await + .log_err(); + } + + // Ensure fwmark-based ip rule for this interface's table + let rules_output = String::from_utf8( + Command::new("ip") + .arg("rule") + .arg("list") + .invoke(ErrorKind::Network) + .await?, + )?; + if !rules_output + .lines() + .any(|l| l.contains("fwmark") && l.contains(&format!("lookup {table_id}"))) + { + Command::new("ip") + .arg("rule") + .arg("add") + .arg("fwmark") + .arg(&table_str) + .arg("lookup") + .arg(&table_str) + .arg("priority") + .arg("50") + .invoke(ErrorKind::Network) + .await + .log_err(); + } + + Ok(()) +} + +async fn poll_ip_info( + ip4_proxy: &Ip4ConfigProxy<'_>, + ip6_proxy: &Ip6ConfigProxy<'_>, + dhcp4_proxy: &Option>, + policy_guard: &Option, + iface: &GatewayId, + prev_attempt: &mut Option, + db: Option<&TypedPatchDb>, + write_to: &Watch>, + device_type: Option, + name: &InternedString, +) -> Result<(), Error> { + let addresses = ip4_proxy + .address_data() + .await? + .into_iter() + .chain(ip6_proxy.address_data().await?) + .collect_vec(); + let lan_ip: OrdSet = [ + Some(ip4_proxy.gateway().await?) + .filter(|g| !g.is_empty()) + .and_then(|g| g.parse::().log_err()), + Some(ip6_proxy.gateway().await?) + .filter(|g| !g.is_empty()) + .and_then(|g| g.parse::().log_err()), + ] + .into_iter() + .filter_map(|a| a) + .collect(); + let mut ntp_servers = OrdSet::new(); + let mut dns_servers = OrdSet::new(); + if let Some(dhcp4_proxy) = dhcp4_proxy { + let dhcp = dhcp4_proxy.options().await?; + if let Some(ntp) = dhcp.ntp_servers { + ntp_servers.extend(ntp.split_whitespace().map(InternedString::intern)); + } + if let Some(dns) = dhcp.domain_name_servers { + dns_servers.extend( + dns.split_ascii_whitespace() + .filter_map(|s| s.parse::().log_err()) + .collect::>(), + ); + } + } + let scope_id = if_nametoindex(iface.as_str()).with_kind(ErrorKind::Network)?; + let subnets: OrdSet = addresses.into_iter().map(IpNet::try_from).try_collect()?; + + // Policy routing: ensure replies exit the same interface they arrived on, + // eliminating the need for MASQUERADE. + if let Some(guard) = policy_guard { + apply_policy_routing(guard, iface, &lan_ip).await?; + } + + let ifconfig_url = if let Some(db) = db { + db.peek() + .await + .as_public() + .as_server_info() + .as_ifconfig_url() + .de() + .unwrap_or_else(|_| crate::db::model::public::default_ifconfig_url()) + } else { + crate::db::model::public::default_ifconfig_url() + }; + let wan_ip = if prev_attempt.map_or(true, |i| i.elapsed() > Duration::from_secs(300)) + && !subnets.is_empty() + && !matches!( + device_type, + Some(NetworkInterfaceType::Bridge | NetworkInterfaceType::Loopback) + ) { + *prev_attempt = Some(Instant::now()); + match get_wan_ipv4(iface.as_str(), &ifconfig_url).await { + Ok(a) => a, + Err(e) => { + tracing::error!( + "{}", + t!( + "net.gateway.failed-to-determine-wan-ip", + iface = iface.to_string(), + error = e.to_string() + ) + ); + tracing::debug!("{e:?}"); + None + } + } + } else { + None + }; + let mut ip_info = IpInfo { + name: name.clone(), + scope_id, + device_type, + subnets, + lan_ip, + wan_ip, + ntp_servers, + dns_servers, + }; + + write_to.send_if_modified(|m: &mut OrdMap| { + let (name, secure, gateway_type, prev_wan_ip) = + m.get(iface).map_or((None, None, None, None), |i| { + ( + i.name.clone(), + i.secure, + i.gateway_type, + i.ip_info.as_ref().and_then(|i| i.wan_ip), + ) + }); + ip_info.wan_ip = ip_info.wan_ip.or(prev_wan_ip); + let ip_info = Arc::new(ip_info); + m.insert( + iface.clone(), + NetworkInterfaceInfo { + name, + secure, + ip_info: Some(ip_info.clone()), + gateway_type, + }, + ) + .filter(|old| &old.ip_info == &Some(ip_info)) + .is_none() + }); + + Ok(()) +} + #[instrument(skip(_connection, device_proxy, watch_activation))] async fn watch_activated( _connection: &Connection, @@ -838,12 +1336,12 @@ pub struct NetworkInterfaceWatcher { activated: Watch>, ip_info: Watch>, _watcher: NonDetachingJoinHandle<()>, - listeners: SyncMutex>>, } impl NetworkInterfaceWatcher { pub fn new( seed: impl Future> + Send + Sync + 'static, watch_activated: impl IntoIterator, + db: TypedPatchDb, ) -> Self { let ip_info = Watch::new(OrdMap::new()); let activated = Watch::new(watch_activated.into_iter().map(|k| (k, false)).collect()); @@ -855,10 +1353,9 @@ impl NetworkInterfaceWatcher { if !seed.is_empty() { ip_info.send_replace(seed); } - watcher(ip_info, activated).await + watcher(ip_info, activated, Some(db)).await }) .into(), - listeners: SyncMutex::new(BTreeMap::new()), } } @@ -885,51 +1382,6 @@ impl NetworkInterfaceWatcher { pub fn ip_info(&self) -> OrdMap { self.ip_info.read() } - - pub fn bind(&self, bind: B, port: u16) -> Result, Error> { - let arc = Arc::new(()); - self.listeners.mutate(|l| { - if l.get(&port).filter(|w| w.strong_count() > 0).is_some() { - return Err(Error::new( - std::io::Error::from_raw_os_error(libc::EADDRINUSE), - ErrorKind::Network, - )); - } - l.insert(port, Arc::downgrade(&arc)); - Ok(()) - })?; - let ip_info = self.ip_info.clone_unseen(); - Ok(NetworkInterfaceListener { - _arc: arc, - ip_info, - listeners: ListenerMap::new(bind, port), - }) - } - - pub fn upgrade_listener( - &self, - SelfContainedNetworkInterfaceListener { - mut listener, - .. - }: SelfContainedNetworkInterfaceListener, - ) -> Result, Error> { - let port = listener.listeners.port; - let arc = &listener._arc; - self.listeners.mutate(|l| { - if l.get(&port).filter(|w| w.strong_count() > 0).is_some() { - return Err(Error::new( - std::io::Error::from_raw_os_error(libc::EADDRINUSE), - ErrorKind::Network, - )); - } - l.insert(port, Arc::downgrade(arc)); - Ok(()) - })?; - let ip_info = self.ip_info.clone_unseen(); - ip_info.mark_changed(); - listener.change_ip_info_source(ip_info); - Ok(listener) - } } pub struct NetworkInterfaceController { @@ -949,7 +1401,13 @@ impl NetworkInterfaceController { .as_server_info_mut() .as_network_mut() .as_gateways_mut() - .ser(info) + .ser(info)?; + let hostname = crate::hostname::ServerHostname::load(db.as_public().as_server_info())?; + let ports = db.as_private().as_available_ports().de()?; + for host in all_hosts(db) { + host?.update_addresses(&hostname, info, &ports)?; + } + Ok(()) }) .await .result?; @@ -1004,6 +1462,187 @@ impl NetworkInterfaceController { Ok(()) } + + async fn apply_default_outbound( + default_outbound: &Option, + ip_info: &OrdMap, + ) { + // 1. Snapshot existing rules at priorities 74 and 75. + // Priority 74: fwmark-based exemptions (WireGuard encap packets) + // Priority 75: catch-all routing to the chosen gateway's table + let (existing_74, existing_75) = match async { + let output = String::from_utf8( + Command::new("ip") + .arg("rule") + .arg("show") + .invoke(ErrorKind::Network) + .await?, + )?; + let mut fwmarks_74 = BTreeSet::::new(); + let mut tables_75 = BTreeSet::::new(); + for line in output.lines() { + let line = line.trim(); + if let Some(rest) = line.strip_prefix("74:") { + if let Some(pos) = rest.find("fwmark ") { + let after = &rest[pos + 7..]; + let token = after.split_whitespace().next().unwrap_or(""); + if let Ok(v) = + u32::from_str_radix(token.strip_prefix("0x").unwrap_or(token), 16) + { + fwmarks_74.insert(v); + } + } + } else if let Some(rest) = line.strip_prefix("75:") { + if let Some(pos) = rest.find("lookup ") { + let after = &rest[pos + 7..]; + let token = after.split_whitespace().next().unwrap_or(""); + if let Ok(v) = token.parse::() { + tables_75.insert(v); + } + } + } + } + Ok::<_, Error>((fwmarks_74, tables_75)) + } + .await + { + Ok(v) => v, + Err(e) => { + tracing::error!("failed to snapshot outbound rules: {e}"); + (BTreeSet::new(), BTreeSet::new()) + } + }; + + // 2. Compute desired rules + let mut desired_74 = BTreeSet::::new(); + let mut desired_75 = BTreeSet::::new(); + + if let Some(gw_id) = default_outbound { + let connected = ip_info + .get(gw_id) + .map_or(false, |info| info.ip_info.is_some()); + if !connected { + if ip_info.contains_key(gw_id) { + tracing::warn!("default outbound gateway {gw_id} is not connected"); + } else { + tracing::warn!("default outbound gateway {gw_id} not found in ip_info"); + } + } else { + match if_nametoindex(gw_id.as_str()) { + Ok(idx) => { + let table_id = 1000 + idx; + desired_75.insert(table_id); + + // Exempt ALL active WireGuard interfaces' encapsulation packets. + // Our priority-75 catch-all would otherwise swallow their encap + // traffic before NM's fwmark rules at priority 31610 can route + // it correctly. + for (iface_id, iface_info) in ip_info { + let Some(ref ip) = iface_info.ip_info else { + continue; + }; + if ip.device_type != Some(NetworkInterfaceType::Wireguard) { + continue; + } + match Command::new("wg") + .arg("show") + .arg(iface_id.as_str()) + .arg("fwmark") + .invoke(ErrorKind::Network) + .await + { + Ok(output) => { + let fwmark_hex = + String::from_utf8_lossy(&output).trim().to_owned(); + if fwmark_hex.is_empty() || fwmark_hex == "off" { + continue; + } + match u32::from_str_radix( + fwmark_hex.strip_prefix("0x").unwrap_or(&fwmark_hex), + 16, + ) { + Ok(v) => { + desired_74.insert(v); + } + Err(e) => { + tracing::error!( + "failed to parse WireGuard fwmark '{fwmark_hex}' for {iface_id}: {e}" + ); + } + } + } + Err(e) => { + tracing::error!( + "failed to read WireGuard fwmark for {iface_id}: {e}" + ); + } + } + } + } + Err(e) => { + tracing::error!("failed to get ifindex for {gw_id}: {e}"); + } + } + } + } + + // 3. Add rules in desired set but not in existing set + for fwmark in desired_74.difference(&existing_74) { + Command::new("ip") + .arg("rule") + .arg("add") + .arg("fwmark") + .arg(fwmark.to_string()) + .arg("lookup") + .arg("main") + .arg("priority") + .arg("74") + .invoke(ErrorKind::Network) + .await + .log_err(); + } + for table in desired_75.difference(&existing_75) { + Command::new("ip") + .arg("rule") + .arg("add") + .arg("table") + .arg(table.to_string()) + .arg("priority") + .arg("75") + .invoke(ErrorKind::Network) + .await + .log_err(); + } + + // 4. Delete rules in existing set but not in desired set + for fwmark in existing_74.difference(&desired_74) { + Command::new("ip") + .arg("rule") + .arg("del") + .arg("fwmark") + .arg(fwmark.to_string()) + .arg("lookup") + .arg("main") + .arg("priority") + .arg("74") + .invoke(ErrorKind::Network) + .await + .log_err(); + } + for table in existing_75.difference(&desired_75) { + Command::new("ip") + .arg("rule") + .arg("del") + .arg("table") + .arg(table.to_string()) + .arg("priority") + .arg("75") + .invoke(ErrorKind::Network) + .await + .log_err(); + } + } + pub fn new(db: TypedPatchDb) -> Self { let (seeded_send, seeded) = oneshot::channel(); let watcher = NetworkInterfaceWatcher::new( @@ -1042,6 +1681,7 @@ impl NetworkInterfaceController { } }, [InternedString::from_static(START9_BRIDGE_IFACE).into()], + db.clone(), ); let mut ip_info_watch = watcher.subscribe(); ip_info_watch.mark_seen(); @@ -1051,10 +1691,28 @@ impl NetworkInterfaceController { _sync: tokio::spawn(async move { let res: Result<(), Error> = async { let mut ip_info = seeded.await.ok(); + let mut outbound_sub = db + .subscribe( + "/public/serverInfo/network/defaultOutbound" + .parse::>() + .unwrap(), + ) + .await; loop { if let Err(e) = async { - if let Some(ip_info) = ip_info { - Self::sync(&db, &ip_info).boxed().await?; + if let Some(ref ip_info) = ip_info { + Self::sync(&db, ip_info).boxed().await?; + } + if let Some(ref ip_info) = ip_info { + let default_outbound: Option = db + .peek() + .await + .as_public() + .as_server_info() + .as_network() + .as_default_outbound() + .de()?; + Self::apply_default_outbound(&default_outbound, ip_info).await; } Ok::<_, Error>(()) @@ -1068,8 +1726,12 @@ impl NetworkInterfaceController { tracing::debug!("{e:?}"); } - let _ = ip_info_watch.changed().await; - ip_info = Some(ip_info_watch.read()); + tokio::select! { + _ = ip_info_watch.changed() => { + ip_info = Some(ip_info_watch.read()); + } + _ = outbound_sub.recv() => {} + } } } .await; @@ -1085,43 +1747,6 @@ impl NetworkInterfaceController { } } - pub async fn set_public( - &self, - interface: &GatewayId, - public: Option, - ) -> Result<(), Error> { - let mut sub = self - .db - .subscribe( - "/public/serverInfo/network/gateways" - .parse::>() - .with_kind(ErrorKind::Database)?, - ) - .await; - let mut err = None; - let changed = self.watcher.ip_info.send_if_modified(|ip_info| { - let prev = std::mem::replace( - &mut match ip_info.get_mut(interface).or_not_found(interface) { - Ok(a) => a, - Err(e) => { - err = Some(e); - return false; - } - } - .public, - public, - ); - prev != public - }); - if let Some(e) = err { - return Err(e); - } - if changed { - sub.recv().await; - } - Ok(()) - } - pub async fn forget(&self, interface: &GatewayId) -> Result<(), Error> { let mut sub = self .db @@ -1237,235 +1862,6 @@ impl NetworkInterfaceController { } } -pub trait InterfaceFilter: Any + Clone + std::fmt::Debug + Eq + Ord + Send + Sync { - fn filter(&self, id: &GatewayId, info: &NetworkInterfaceInfo) -> bool; - fn eq(&self, other: &dyn Any) -> bool { - Some(self) == other.downcast_ref::() - } - fn cmp(&self, other: &dyn Any) -> std::cmp::Ordering { - match (self as &dyn Any).type_id().cmp(&other.type_id()) { - std::cmp::Ordering::Equal => { - std::cmp::Ord::cmp(self, other.downcast_ref::().unwrap()) - } - ord => ord, - } - } - fn as_any(&self) -> &dyn Any { - self - } - fn into_dyn(self) -> DynInterfaceFilter { - DynInterfaceFilter::new(self) - } -} - -impl InterfaceFilter for bool { - fn filter(&self, _: &GatewayId, _: &NetworkInterfaceInfo) -> bool { - *self - } -} - -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct TypeFilter(pub NetworkInterfaceType); -impl InterfaceFilter for TypeFilter { - fn filter(&self, _: &GatewayId, info: &NetworkInterfaceInfo) -> bool { - info.ip_info.as_ref().and_then(|i| i.device_type) == Some(self.0) - } -} - -#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct IdFilter(pub GatewayId); -impl InterfaceFilter for IdFilter { - fn filter(&self, id: &GatewayId, _: &NetworkInterfaceInfo) -> bool { - id == &self.0 - } -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct PublicFilter { - pub public: bool, -} -impl InterfaceFilter for PublicFilter { - fn filter(&self, _: &GatewayId, info: &NetworkInterfaceInfo) -> bool { - self.public == info.public() - } -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct SecureFilter { - pub secure: bool, -} -impl InterfaceFilter for SecureFilter { - fn filter(&self, _: &GatewayId, info: &NetworkInterfaceInfo) -> bool { - self.secure || info.secure() - } -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct AndFilter(pub A, pub B); -impl InterfaceFilter for AndFilter { - fn filter(&self, id: &GatewayId, info: &NetworkInterfaceInfo) -> bool { - self.0.filter(id, info) && self.1.filter(id, info) - } -} - -#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] -pub struct OrFilter(pub A, pub B); -impl InterfaceFilter for OrFilter { - fn filter(&self, id: &GatewayId, info: &NetworkInterfaceInfo) -> bool { - self.0.filter(id, info) || self.1.filter(id, info) - } -} - -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] -pub struct AnyFilter(pub BTreeSet); -impl InterfaceFilter for AnyFilter { - fn filter(&self, id: &GatewayId, info: &NetworkInterfaceInfo) -> bool { - self.0.iter().any(|f| InterfaceFilter::filter(f, id, info)) - } -} - -#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] -pub struct AllFilter(pub BTreeSet); -impl InterfaceFilter for AllFilter { - fn filter(&self, id: &GatewayId, info: &NetworkInterfaceInfo) -> bool { - self.0.iter().all(|f| InterfaceFilter::filter(f, id, info)) - } -} - -pub trait DynInterfaceFilterT: std::fmt::Debug + Any + Send + Sync { - fn filter(&self, id: &GatewayId, info: &NetworkInterfaceInfo) -> bool; - fn eq(&self, other: &dyn Any) -> bool; - fn cmp(&self, other: &dyn Any) -> std::cmp::Ordering; - fn as_any(&self) -> &dyn Any; -} -impl DynInterfaceFilterT for T { - fn filter(&self, id: &GatewayId, info: &NetworkInterfaceInfo) -> bool { - InterfaceFilter::filter(self, id, info) - } - fn eq(&self, other: &dyn Any) -> bool { - InterfaceFilter::eq(self, other) - } - fn cmp(&self, other: &dyn Any) -> std::cmp::Ordering { - InterfaceFilter::cmp(self, other) - } - fn as_any(&self) -> &dyn Any { - InterfaceFilter::as_any(self) - } -} - -#[test] -fn test_interface_filter_eq() { - let dyn_t = true.into_dyn(); - assert!(DynInterfaceFilterT::eq( - &dyn_t, - DynInterfaceFilterT::as_any(&true), - )) -} - -#[derive(Clone, Debug)] -pub struct DynInterfaceFilter(Arc); -impl InterfaceFilter for DynInterfaceFilter { - fn filter(&self, id: &GatewayId, info: &NetworkInterfaceInfo) -> bool { - self.0.filter(id, info) - } - fn eq(&self, other: &dyn Any) -> bool { - self.0.eq(other) - } - fn cmp(&self, other: &dyn Any) -> std::cmp::Ordering { - self.0.cmp(other) - } - fn as_any(&self) -> &dyn Any { - self.0.as_any() - } - fn into_dyn(self) -> DynInterfaceFilter { - self - } -} -impl DynInterfaceFilter { - fn new(value: T) -> Self { - Self(Arc::new(value)) - } -} -impl PartialEq for DynInterfaceFilter { - fn eq(&self, other: &Self) -> bool { - DynInterfaceFilterT::eq(&*self.0, DynInterfaceFilterT::as_any(&*other.0)) - } -} -impl Eq for DynInterfaceFilter {} -impl PartialOrd for DynInterfaceFilter { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.0.cmp(other.0.as_any())) - } -} -impl Ord for DynInterfaceFilter { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.0.cmp(other.0.as_any()) - } -} - -struct ListenerMap { - prev_filter: DynInterfaceFilter, - bind: B, - port: u16, - listeners: BTreeMap, -} -impl ListenerMap { - fn new(bind: B, port: u16) -> Self { - Self { - prev_filter: false.into_dyn(), - bind, - port, - listeners: BTreeMap::new(), - } - } - - #[instrument(skip(self))] - fn update( - &mut self, - ip_info: &OrdMap, - filter: &impl InterfaceFilter, - ) -> Result<(), Error> { - let mut keep = BTreeSet::::new(); - for (_, info) in ip_info - .iter() - .filter(|(id, info)| filter.filter(*id, *info)) - { - if let Some(ip_info) = &info.ip_info { - for ipnet in &ip_info.subnets { - let addr = match ipnet.addr() { - IpAddr::V6(ip6) => SocketAddrV6::new( - ip6, - self.port, - 0, - if ipv6_is_link_local(ip6) { - ip_info.scope_id - } else { - 0 - }, - ) - .into(), - ip => SocketAddr::new(ip, self.port), - }; - keep.insert(addr); - if !self.listeners.contains_key(&addr) { - self.listeners.insert(addr, self.bind.bind(addr)?); - } - } - } - } - self.listeners.retain(|key, _| keep.contains(key)); - self.prev_filter = filter.clone().into_dyn(); - Ok(()) - } - fn poll_accept( - &mut self, - cx: &mut std::task::Context<'_>, - ) -> Poll::Metadata, AcceptStream), Error>> { - let (metadata, stream) = ready!(self.listeners.poll_accept(cx)?); - Poll::Ready(Ok((metadata.key, metadata.inner, stream))) - } -} - pub fn lookup_info_by_addr( ip_info: &OrdMap, addr: SocketAddr, @@ -1477,28 +1873,6 @@ pub fn lookup_info_by_addr( }) } -pub trait Bind { - type Accept: Accept; - fn bind(&mut self, addr: SocketAddr) -> Result; -} - -#[derive(Clone, Copy, Default)] -pub struct BindTcp; -impl Bind for BindTcp { - type Accept = TcpListener; - fn bind(&mut self, addr: SocketAddr) -> Result { - TcpListener::from_std( - mio::net::TcpListener::bind(addr) - .with_kind(ErrorKind::Network)? - .into(), - ) - .with_kind(ErrorKind::Network) - } -} - -pub trait FromGatewayInfo { - fn from_gateway_info(id: &GatewayId, info: &NetworkInterfaceInfo) -> Self; -} #[derive(Clone, Debug)] pub struct GatewayInfo { pub id: GatewayId, @@ -1509,212 +1883,88 @@ impl Visit for GatewayInfo { visitor.visit(self) } } -impl FromGatewayInfo for GatewayInfo { - fn from_gateway_info(id: &GatewayId, info: &NetworkInterfaceInfo) -> Self { - Self { - id: id.clone(), - info: info.clone(), - } - } -} -pub struct NetworkInterfaceListener { - pub ip_info: Watch>, - listeners: ListenerMap, - _arc: Arc<()>, -} -impl NetworkInterfaceListener { - pub(super) fn new( - mut ip_info: Watch>, - bind: B, - port: u16, - ) -> Self { - ip_info.mark_unseen(); - Self { - ip_info, - listeners: ListenerMap::new(bind, port), - _arc: Arc::new(()), - } - } - - pub fn port(&self) -> u16 { - self.listeners.port - } - - #[cfg_attr(feature = "unstable", inline(never))] - pub fn poll_accept( - &mut self, - cx: &mut std::task::Context<'_>, - filter: &impl InterfaceFilter, - ) -> Poll::Metadata, AcceptStream), Error>> { - while self.ip_info.poll_changed(cx).is_ready() - || !DynInterfaceFilterT::eq(&self.listeners.prev_filter, filter.as_any()) - { - self.ip_info - .peek_and_mark_seen(|ip_info| self.listeners.update(ip_info, filter))?; - } - let (addr, inner, stream) = ready!(self.listeners.poll_accept(cx)?); - Poll::Ready(Ok(( - self.ip_info - .peek(|ip_info| { - lookup_info_by_addr(ip_info, addr) - .map(|(id, info)| M::from_gateway_info(id, info)) - }) - .or_not_found(lazy_format!("gateway for {addr}"))?, - inner, - stream, - ))) - } - - pub fn change_ip_info_source( - &mut self, - mut ip_info: Watch>, - ) { - ip_info.mark_unseen(); - self.ip_info = ip_info; - } - - pub async fn accept( - &mut self, - filter: &impl InterfaceFilter, - ) -> Result<(M, ::Metadata, AcceptStream), Error> { - futures::future::poll_fn(|cx| self.poll_accept(cx, filter)).await - } - - pub fn check_filter(&self) -> impl FnOnce(SocketAddr, &DynInterfaceFilter) -> bool + 'static { - let ip_info = self.ip_info.clone(); - move |addr, filter| { - ip_info.peek(|i| { - lookup_info_by_addr(i, addr).map_or(false, |(id, info)| { - InterfaceFilter::filter(filter, id, info) - }) - }) - } - } -} - -#[derive(VisitFields)] -pub struct NetworkInterfaceListenerAcceptMetadata { - pub inner: ::Metadata, +/// Metadata for connections accepted by WildcardListener or VHostBindListener. +#[derive(Clone, Debug, VisitFields)] +pub struct NetworkInterfaceListenerAcceptMetadata { + pub inner: TcpMetadata, pub info: GatewayInfo, } -impl fmt::Debug for NetworkInterfaceListenerAcceptMetadata { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - f.debug_struct("NetworkInterfaceListenerAcceptMetadata") - .field("inner", &self.inner) - .field("info", &self.info) - .finish() - } -} -impl Clone for NetworkInterfaceListenerAcceptMetadata -where - ::Metadata: Clone, -{ - fn clone(&self) -> Self { - Self { - inner: self.inner.clone(), - info: self.info.clone(), - } - } -} -impl Visit for NetworkInterfaceListenerAcceptMetadata -where - B: Bind, - ::Metadata: Visit + Clone + Send + Sync + 'static, - V: MetadataVisitor, -{ +impl Visit for NetworkInterfaceListenerAcceptMetadata { fn visit(&self, visitor: &mut V) -> V::Result { self.visit_fields(visitor).collect() } } -impl Accept for NetworkInterfaceListener { - type Metadata = NetworkInterfaceListenerAcceptMetadata; +/// A simple TCP listener on 0.0.0.0:port that looks up GatewayInfo from the +/// connection's local address on each accepted connection. +pub struct WildcardListener { + listener: TcpListener, + ip_info: Watch>, + /// Handle to the self-contained watcher task started in `new()`. + /// Dropped (and thus aborted) when `set_ip_info` replaces the ip_info source. + _watcher: Option>, +} +impl WildcardListener { + pub fn new(port: u16) -> Result { + let listener = TcpListener::from_std( + mio::net::TcpListener::bind(SocketAddr::new(IpAddr::V6(Ipv6Addr::UNSPECIFIED), port)) + .with_kind(ErrorKind::Network)? + .into(), + ) + .with_kind(ErrorKind::Network)?; + let ip_info = Watch::new(OrdMap::new()); + let watcher_handle = + tokio::spawn(watcher(ip_info.clone(), Watch::new(BTreeMap::new()), None)).into(); + Ok(Self { + listener, + ip_info, + _watcher: Some(watcher_handle), + }) + } + + /// Replace the ip_info source with the one from the NetworkInterfaceController. + /// Aborts the self-contained watcher task. + pub fn set_ip_info(&mut self, ip_info: Watch>) { + self.ip_info = ip_info; + self._watcher = None; + } +} +impl Accept for WildcardListener { + type Metadata = NetworkInterfaceListenerAcceptMetadata; fn poll_accept( &mut self, cx: &mut std::task::Context<'_>, ) -> Poll> { - NetworkInterfaceListener::poll_accept(self, cx, &true).map(|res| { - res.map(|(info, inner, stream)| { - ( - NetworkInterfaceListenerAcceptMetadata { inner, info }, - stream, - ) - }) - }) - } -} - -pub struct SelfContainedNetworkInterfaceListener { - _watch_thread: NonDetachingJoinHandle<()>, - listener: NetworkInterfaceListener, -} -impl SelfContainedNetworkInterfaceListener { - pub fn bind(bind: B, port: u16) -> Self { - let ip_info = Watch::new(OrdMap::new()); - let _watch_thread = - tokio::spawn(watcher(ip_info.clone(), Watch::new(BTreeMap::new()))).into(); - Self { - _watch_thread, - listener: NetworkInterfaceListener::new(ip_info, bind, port), + if let Poll::Ready((stream, peer_addr)) = TcpListener::poll_accept(&self.listener, cx)? { + if let Err(e) = socket2::SockRef::from(&stream).set_keepalive(true) { + tracing::error!("Failed to set tcp keepalive: {e}"); + tracing::debug!("{e:?}"); + } + let local_addr = stream.local_addr()?; + let info = self + .ip_info + .peek(|ip_info| { + lookup_info_by_addr(ip_info, local_addr).map(|(id, info)| GatewayInfo { + id: id.clone(), + info: info.clone(), + }) + }) + .unwrap_or_else(|| GatewayInfo { + id: InternedString::from_static("").into(), + info: NetworkInterfaceInfo::default(), + }); + return Poll::Ready(Ok(( + NetworkInterfaceListenerAcceptMetadata { + inner: TcpMetadata { + local_addr, + peer_addr, + }, + info, + }, + Box::pin(stream), + ))); } + Poll::Pending } } -impl Accept for SelfContainedNetworkInterfaceListener { - type Metadata = as Accept>::Metadata; - fn poll_accept( - &mut self, - cx: &mut std::task::Context<'_>, - ) -> std::task::Poll> { - Accept::poll_accept(&mut self.listener, cx) - } -} - -pub type UpgradableListener = - Option, NetworkInterfaceListener>>; - -impl Acceptor> -where - B: Bind + Send + Sync + 'static, - B::Accept: Send + Sync, -{ - pub fn bind_upgradable(listener: SelfContainedNetworkInterfaceListener) -> Self { - Self::new(Some(Either::Left(listener))) - } -} - -#[test] -fn test_filter() { - use crate::net::host::binding::NetInfo; - let wg1 = "wg1".parse::().unwrap(); - assert!(!InterfaceFilter::filter( - &AndFilter( - NetInfo { - private_disabled: [wg1.clone()].into_iter().collect(), - public_enabled: Default::default(), - assigned_port: None, - assigned_ssl_port: None, - }, - AndFilter(IdFilter(wg1.clone()), PublicFilter { public: false }), - ) - .into_dyn(), - &wg1, - &NetworkInterfaceInfo { - name: None, - public: None, - secure: None, - ip_info: Some(Arc::new(IpInfo { - name: "".into(), - scope_id: 3, - device_type: Some(NetworkInterfaceType::Wireguard), - subnets: ["10.59.0.2/24".parse::().unwrap()] - .into_iter() - .collect(), - lan_ip: Default::default(), - wan_ip: None, - ntp_servers: Default::default(), - dns_servers: Default::default(), - })), - }, - )); -} diff --git a/core/src/net/host/address.rs b/core/src/net/host/address.rs index 9c60ababe..0a69d0427 100644 --- a/core/src/net/host/address.rs +++ b/core/src/net/host/address.rs @@ -10,46 +10,29 @@ use ts_rs::TS; use crate::GatewayId; use crate::context::{CliContext, RpcContext}; use crate::db::model::DatabaseModel; +use crate::hostname::ServerHostname; use crate::net::acme::AcmeProvider; use crate::net::host::{HostApiKind, all_hosts}; -use crate::net::tor::OnionAddress; use crate::prelude::*; use crate::util::serde::{HandlerExtSerde, display_serializable}; #[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(rename_all = "kebab-case")] -#[serde(rename_all_fields = "camelCase")] -#[serde(tag = "kind")] -pub enum HostAddress { - Onion { - address: OnionAddress, - }, - Domain { - address: InternedString, - public: Option, - private: bool, - }, +#[serde(rename_all = "camelCase")] +pub struct HostAddress { + pub address: InternedString, + pub public: Option, + pub private: Option>, } #[derive(Debug, Clone, Deserialize, Serialize, TS)] +#[ts(export)] pub struct PublicDomainConfig { pub gateway: GatewayId, pub acme: Option, } fn handle_duplicates(db: &mut DatabaseModel) -> Result<(), Error> { - let mut onions = BTreeSet::::new(); let mut domains = BTreeSet::::new(); - let check_onion = |onions: &mut BTreeSet, onion: OnionAddress| { - if onions.contains(&onion) { - return Err(Error::new( - eyre!("onion address {onion} is already in use"), - ErrorKind::InvalidRequest, - )); - } - onions.insert(onion); - Ok(()) - }; let check_domain = |domains: &mut BTreeSet, domain: InternedString| { if domains.contains(&domain) { return Err(Error::new( @@ -68,35 +51,27 @@ fn handle_duplicates(db: &mut DatabaseModel) -> Result<(), Error> { not_in_use.push(host); continue; } - for onion in host.as_onions().de()? { - check_onion(&mut onions, onion)?; - } let public = host.as_public_domains().keys()?; for domain in &public { check_domain(&mut domains, domain.clone())?; } - for domain in host.as_private_domains().de()? { + for domain in host.as_private_domains().keys()? { if !public.contains(&domain) { check_domain(&mut domains, domain)?; } } } for host in not_in_use { - host.as_onions_mut() - .mutate(|o| Ok(o.retain(|o| !onions.contains(o))))?; host.as_public_domains_mut() .mutate(|d| Ok(d.retain(|d, _| !domains.contains(d))))?; host.as_private_domains_mut() - .mutate(|d| Ok(d.retain(|d| !domains.contains(d))))?; + .mutate(|d| Ok(d.retain(|d, _| !domains.contains(d))))?; - for onion in host.as_onions().de()? { - check_onion(&mut onions, onion)?; - } let public = host.as_public_domains().keys()?; for domain in &public { check_domain(&mut domains, domain.clone())?; } - for domain in host.as_private_domains().de()? { + for domain in host.as_private_domains().keys()? { if !public.contains(&domain) { check_domain(&mut domains, domain)?; } @@ -159,29 +134,6 @@ pub fn address_api() ) .with_inherited(Kind::inheritance), ) - .subcommand( - "onion", - ParentHandler::::new() - .subcommand( - "add", - from_fn_async(add_onion::) - .with_metadata("sync_db", Value::Bool(true)) - .with_inherited(|_, a| a) - .no_display() - .with_about("about.add-address-to-host") - .with_call_remote::(), - ) - .subcommand( - "remove", - from_fn_async(remove_onion::) - .with_metadata("sync_db", Value::Bool(true)) - .with_inherited(|_, a| a) - .no_display() - .with_about("about.remove-address-from-host") - .with_call_remote::(), - ) - .with_inherited(Kind::inheritance), - ) .subcommand( "list", from_fn_async(list_addresses::) @@ -196,35 +148,7 @@ pub fn address_api() } let mut table = Table::new(); - table.add_row(row![bc => "ADDRESS", "PUBLIC", "ACME PROVIDER"]); - for address in &res { - match address { - HostAddress::Onion { address } => { - table.add_row(row![address, true, "N/A"]); - } - HostAddress::Domain { - address, - public: Some(PublicDomainConfig { gateway, acme }), - private, - } => { - table.add_row(row![ - address, - &format!( - "{} ({gateway})", - if *private { "YES" } else { "ONLY" } - ), - acme.as_ref().map(|a| a.0.as_str()).unwrap_or("NONE") - ]); - } - HostAddress::Domain { - address, - public: None, - .. - } => { - table.add_row(row![address, &format!("NO"), "N/A"]); - } - } - } + todo!("find a good way to represent this"); table.print_tty(false)?; @@ -235,7 +159,8 @@ pub fn address_api() ) } -#[derive(Deserialize, Serialize, Parser)] +#[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] pub struct AddPublicDomainParams { #[arg(help = "help.arg.fqdn")] pub fqdn: InternedString, @@ -271,11 +196,14 @@ pub async fn add_public_domain( Kind::host_for(&inheritance, db)? .as_public_domains_mut() .insert(&fqdn, &PublicDomainConfig { acme, gateway })?; - handle_duplicates(db) + handle_duplicates(db)?; + let hostname = ServerHostname::load(db.as_public().as_server_info())?; + let gateways = db.as_public().as_server_info().as_network().as_gateways().de()?; + let ports = db.as_private().as_available_ports().de()?; + Kind::host_for(&inheritance, db)?.update_addresses(&hostname, &gateways, &ports) }) .await .result?; - Kind::sync_host(&ctx, inheritance).await?; tokio::task::spawn_blocking(|| { crate::net::dns::query_dns(ctx, crate::net::dns::QueryDnsParams { fqdn }) @@ -284,7 +212,8 @@ pub async fn add_public_domain( .with_kind(ErrorKind::Unknown)? } -#[derive(Deserialize, Serialize, Parser)] +#[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] pub struct RemoveDomainParams { #[arg(help = "help.arg.fqdn")] pub fqdn: InternedString, @@ -299,36 +228,55 @@ pub async fn remove_public_domain( .mutate(|db| { Kind::host_for(&inheritance, db)? .as_public_domains_mut() - .remove(&fqdn) + .remove(&fqdn)?; + let hostname = ServerHostname::load(db.as_public().as_server_info())?; + let gateways = db + .as_public() + .as_server_info() + .as_network() + .as_gateways() + .de()?; + let ports = db.as_private().as_available_ports().de()?; + Kind::host_for(&inheritance, db)?.update_addresses(&hostname, &gateways, &ports) }) .await .result?; - Kind::sync_host(&ctx, inheritance).await?; Ok(()) } -#[derive(Deserialize, Serialize, Parser)] +#[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] pub struct AddPrivateDomainParams { #[arg(help = "help.arg.fqdn")] pub fqdn: InternedString, + pub gateway: GatewayId, } pub async fn add_private_domain( ctx: RpcContext, - AddPrivateDomainParams { fqdn }: AddPrivateDomainParams, + AddPrivateDomainParams { fqdn, gateway }: AddPrivateDomainParams, inheritance: Kind::Inheritance, ) -> Result<(), Error> { ctx.db .mutate(|db| { Kind::host_for(&inheritance, db)? .as_private_domains_mut() - .mutate(|d| Ok(d.insert(fqdn)))?; - handle_duplicates(db) + .upsert(&fqdn, || Ok(BTreeSet::new()))? + .mutate(|d| Ok(d.insert(gateway)))?; + handle_duplicates(db)?; + let hostname = ServerHostname::load(db.as_public().as_server_info())?; + let gateways = db + .as_public() + .as_server_info() + .as_network() + .as_gateways() + .de()?; + let ports = db.as_private().as_available_ports().de()?; + Kind::host_for(&inheritance, db)?.update_addresses(&hostname, &gateways, &ports) }) .await .result?; - Kind::sync_host(&ctx, inheritance).await?; Ok(()) } @@ -342,60 +290,19 @@ pub async fn remove_private_domain( .mutate(|db| { Kind::host_for(&inheritance, db)? .as_private_domains_mut() - .mutate(|d| Ok(d.remove(&domain))) + .mutate(|d| Ok(d.remove(&domain)))?; + let hostname = ServerHostname::load(db.as_public().as_server_info())?; + let gateways = db + .as_public() + .as_server_info() + .as_network() + .as_gateways() + .de()?; + let ports = db.as_private().as_available_ports().de()?; + Kind::host_for(&inheritance, db)?.update_addresses(&hostname, &gateways, &ports) }) .await .result?; - Kind::sync_host(&ctx, inheritance).await?; - - Ok(()) -} - -#[derive(Deserialize, Serialize, Parser)] -pub struct OnionParams { - #[arg(help = "help.arg.onion-address")] - pub onion: String, -} - -pub async fn add_onion( - ctx: RpcContext, - OnionParams { onion }: OnionParams, - inheritance: Kind::Inheritance, -) -> Result<(), Error> { - let onion = onion.parse::()?; - ctx.db - .mutate(|db| { - db.as_private().as_key_store().as_onion().get_key(&onion)?; - - Kind::host_for(&inheritance, db)? - .as_onions_mut() - .mutate(|a| Ok(a.insert(onion)))?; - handle_duplicates(db) - }) - .await - .result?; - - Kind::sync_host(&ctx, inheritance).await?; - - Ok(()) -} - -pub async fn remove_onion( - ctx: RpcContext, - OnionParams { onion }: OnionParams, - inheritance: Kind::Inheritance, -) -> Result<(), Error> { - let onion = onion.parse::()?; - ctx.db - .mutate(|db| { - Kind::host_for(&inheritance, db)? - .as_onions_mut() - .mutate(|a| Ok(a.remove(&onion))) - }) - .await - .result?; - - Kind::sync_host(&ctx, inheritance).await?; Ok(()) } diff --git a/core/src/net/host/binding.rs b/core/src/net/host/binding.rs index 8862e2bda..54020d865 100644 --- a/core/src/net/host/binding.rs +++ b/core/src/net/host/binding.rs @@ -1,23 +1,23 @@ use std::collections::{BTreeMap, BTreeSet}; +use std::net::SocketAddr; use std::str::FromStr; use clap::Parser; use clap::builder::ValueParserFactory; -use imbl::OrdSet; use rpc_toolkit::{Context, Empty, HandlerArgs, HandlerExt, ParentHandler, from_fn_async}; use serde::{Deserialize, Serialize}; use ts_rs::TS; +use crate::HostId; use crate::context::{CliContext, RpcContext}; -use crate::db::model::public::NetworkInterfaceInfo; +use crate::db::prelude::Map; use crate::net::forward::AvailablePorts; -use crate::net::gateway::InterfaceFilter; use crate::net::host::HostApiKind; +use crate::net::service_interface::HostnameInfo; use crate::net::vhost::AlpnInfo; use crate::prelude::*; use crate::util::FromStrParser; use crate::util::serde::{HandlerExtSerde, display_serializable}; -use crate::{GatewayId, HostId}; #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, TS)] #[ts(export)] @@ -45,25 +45,87 @@ impl FromStr for BindId { } } -#[derive(Debug, Deserialize, Serialize, TS)] +#[derive(Debug, Default, Clone, Deserialize, Serialize, TS, HasModel)] #[serde(rename_all = "camelCase")] #[ts(export)] +#[model = "Model"] +pub struct DerivedAddressInfo { + /// User override: enable these addresses (only for public IP & port) + pub enabled: BTreeSet, + /// User override: disable these addresses (only for domains and private IP & port) + pub disabled: BTreeSet<(InternedString, u16)>, + /// COMPUTED: NetServiceData::update — all possible addresses for this binding + pub available: BTreeSet, +} + +impl DerivedAddressInfo { + /// Returns addresses that are currently enabled after applying overrides. + /// Default: public IPs are disabled, everything else is enabled. + /// Explicit `enabled`/`disabled` overrides take precedence. + pub fn enabled(&self) -> BTreeSet<&HostnameInfo> { + self.available + .iter() + .filter(|h| { + if h.public && h.metadata.is_ip() { + // Public IPs: disabled by default, explicitly enabled via SocketAddr + h.to_socket_addr().map_or( + true, // should never happen, but would rather see them if it does + |sa| self.enabled.contains(&sa), + ) + } else { + !self + .disabled + .contains(&(h.hostname.clone(), h.port.unwrap_or_default())) // disablable addresses will always have a port + } + }) + .collect() + } +} + +#[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)] +#[model = "Model"] +#[ts(export)] +pub struct Bindings(pub BTreeMap); + +impl Map for Bindings { + type Key = u16; + type Value = BindInfo; + fn key_str(key: &Self::Key) -> Result, Error> { + Self::key_string(key) + } + fn key_string(key: &Self::Key) -> Result { + Ok(InternedString::from_display(key)) + } +} + +impl std::ops::Deref for Bindings { + type Target = BTreeMap; + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl std::ops::DerefMut for Bindings { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.0 + } +} + +#[derive(Debug, Deserialize, Serialize, HasModel, TS)] +#[serde(rename_all = "camelCase")] +#[model = "Model"] +#[ts(export)] pub struct BindInfo { pub enabled: bool, pub options: BindOptions, pub net: NetInfo, + pub addresses: DerivedAddressInfo, } #[derive(Clone, Debug, Deserialize, Serialize, TS, PartialEq, Eq, PartialOrd, Ord)] #[serde(rename_all = "camelCase")] #[ts(export)] pub struct NetInfo { - #[ts(as = "BTreeSet::")] - #[serde(default)] - pub private_disabled: OrdSet, - #[ts(as = "BTreeSet::")] - #[serde(default)] - pub public_enabled: OrdSet, pub assigned_port: Option, pub assigned_ssl_port: Option, } @@ -71,25 +133,28 @@ impl BindInfo { pub fn new(available_ports: &mut AvailablePorts, options: BindOptions) -> Result { let mut assigned_port = None; let mut assigned_ssl_port = None; - if options.add_ssl.is_some() { - assigned_ssl_port = Some(available_ports.alloc()?); + if let Some(ssl) = &options.add_ssl { + assigned_ssl_port = available_ports + .try_alloc(ssl.preferred_external_port, true) + .or_else(|| Some(available_ports.alloc(true).ok()?)); } if options .secure .map_or(true, |s| !(s.ssl && options.add_ssl.is_some())) { - assigned_port = Some(available_ports.alloc()?); + assigned_port = available_ports + .try_alloc(options.preferred_external_port, false) + .or_else(|| Some(available_ports.alloc(false).ok()?)); } Ok(Self { enabled: true, options, net: NetInfo { - private_disabled: OrdSet::new(), - public_enabled: OrdSet::new(), assigned_port, assigned_ssl_port, }, + addresses: DerivedAddressInfo::default(), }) } pub fn update( @@ -97,7 +162,11 @@ impl BindInfo { available_ports: &mut AvailablePorts, options: BindOptions, ) -> Result { - let Self { net: mut lan, .. } = self; + let Self { + net: mut lan, + addresses, + .. + } = self; if options .secure .map_or(true, |s| !(s.ssl && options.add_ssl.is_some())) @@ -105,19 +174,26 @@ impl BindInfo { { lan.assigned_port = if let Some(port) = lan.assigned_port.take() { Some(port) + } else if let Some(port) = + available_ports.try_alloc(options.preferred_external_port, false) + { + Some(port) } else { - Some(available_ports.alloc()?) + Some(available_ports.alloc(false)?) }; } else { if let Some(port) = lan.assigned_port.take() { available_ports.free([port]); } } - if options.add_ssl.is_some() { + if let Some(ssl) = &options.add_ssl { lan.assigned_ssl_port = if let Some(port) = lan.assigned_ssl_port.take() { Some(port) + } else if let Some(port) = available_ports.try_alloc(ssl.preferred_external_port, true) + { + Some(port) } else { - Some(available_ports.alloc()?) + Some(available_ports.alloc(true)?) }; } else { if let Some(port) = lan.assigned_ssl_port.take() { @@ -128,22 +204,13 @@ impl BindInfo { enabled: true, options, net: lan, + addresses, }) } pub fn disable(&mut self) { self.enabled = false; } } -impl InterfaceFilter for NetInfo { - fn filter(&self, id: &GatewayId, info: &NetworkInterfaceInfo) -> bool { - info.ip_info.is_some() - && if info.public() { - self.public_enabled.contains(id) - } else { - !self.private_disabled.contains(id) - } - } -} #[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize, TS)] #[ts(export)] @@ -188,7 +255,7 @@ pub fn binding() let mut table = Table::new(); table.add_row(row![bc => "INTERNAL PORT", "ENABLED", "EXTERNAL PORT", "EXTERNAL SSL PORT"]); - for (internal, info) in res { + for (internal, info) in res.iter() { table.add_row(row![ internal, info.enabled, @@ -213,12 +280,12 @@ pub fn binding() .with_call_remote::(), ) .subcommand( - "set-gateway-enabled", - from_fn_async(set_gateway_enabled::) + "set-address-enabled", + from_fn_async(set_address_enabled::) .with_metadata("sync_db", Value::Bool(true)) .with_inherited(Kind::inheritance) .no_display() - .with_about("about.set-gateway-enabled-for-binding") + .with_about("about.set-address-enabled-for-binding") .with_call_remote::(), ) } @@ -227,7 +294,7 @@ pub async fn list_bindings( ctx: RpcContext, _: Empty, inheritance: Kind::Inheritance, -) -> Result, Error> { +) -> Result { Kind::host_for(&inheritance, &mut ctx.db.peek().await)? .as_bindings() .de() @@ -236,50 +303,54 @@ pub async fn list_bindings( #[derive(Deserialize, Serialize, Parser, TS)] #[serde(rename_all = "camelCase")] #[ts(export)] -pub struct BindingGatewaySetEnabledParams { +pub struct BindingSetAddressEnabledParams { #[arg(help = "help.arg.internal-port")] internal_port: u16, - #[arg(help = "help.arg.gateway-id")] - gateway: GatewayId, + #[arg(long, help = "help.arg.address")] + address: String, #[arg(long, help = "help.arg.binding-enabled")] enabled: Option, } -pub async fn set_gateway_enabled( +pub async fn set_address_enabled( ctx: RpcContext, - BindingGatewaySetEnabledParams { + BindingSetAddressEnabledParams { internal_port, - gateway, + address, enabled, - }: BindingGatewaySetEnabledParams, + }: BindingSetAddressEnabledParams, inheritance: Kind::Inheritance, ) -> Result<(), Error> { let enabled = enabled.unwrap_or(true); - let gateway_public = ctx - .net_controller - .net_iface - .watcher - .ip_info() - .get(&gateway) - .or_not_found(&gateway)? - .public(); + let address: HostnameInfo = + serde_json::from_str(&address).with_kind(ErrorKind::Deserialization)?; ctx.db .mutate(|db| { Kind::host_for(&inheritance, db)? .as_bindings_mut() .mutate(|b| { - let net = &mut b.get_mut(&internal_port).or_not_found(internal_port)?.net; - if gateway_public { + let bind = b.get_mut(&internal_port).or_not_found(internal_port)?; + if address.public && address.metadata.is_ip() { + // Public IPs: toggle via SocketAddr in `enabled` set + let sa = address.to_socket_addr().ok_or_else(|| { + Error::new( + eyre!("cannot convert address to socket addr"), + ErrorKind::InvalidRequest, + ) + })?; if enabled { - net.public_enabled.insert(gateway); + bind.addresses.enabled.insert(sa); } else { - net.public_enabled.remove(&gateway); + bind.addresses.enabled.remove(&sa); } } else { + // Domains and private IPs: toggle via (host, port) in `disabled` set + let port = address.port.unwrap_or(if address.ssl { 443 } else { 80 }); + let key = (address.hostname.clone(), port); if enabled { - net.private_disabled.remove(&gateway); + bind.addresses.disabled.remove(&key); } else { - net.private_disabled.insert(gateway); + bind.addresses.disabled.insert(key); } } Ok(()) @@ -287,5 +358,5 @@ pub async fn set_gateway_enabled( }) .await .result?; - Kind::sync_host(&ctx, inheritance).await + Ok(()) } diff --git a/core/src/net/host/mod.rs b/core/src/net/host/mod.rs index 620991ca7..aff25ccd5 100644 --- a/core/src/net/host/mod.rs +++ b/core/src/net/host/mod.rs @@ -1,23 +1,26 @@ use std::collections::{BTreeMap, BTreeSet}; -use std::future::Future; +use std::net::{IpAddr, SocketAddrV4}; use std::panic::RefUnwindSafe; use clap::Parser; +use imbl::OrdMap; use imbl_value::InternedString; use itertools::Itertools; +use patch_db::DestructureMut; use rpc_toolkit::{Context, Empty, HandlerExt, OrEmpty, ParentHandler, from_fn_async}; use serde::{Deserialize, Serialize}; use ts_rs::TS; use crate::context::RpcContext; use crate::db::model::DatabaseModel; +use crate::db::model::public::{NetworkInterfaceInfo, NetworkInterfaceType}; +use crate::hostname::ServerHostname; use crate::net::forward::AvailablePorts; use crate::net::host::address::{HostAddress, PublicDomainConfig, address_api}; -use crate::net::host::binding::{BindInfo, BindOptions, binding}; -use crate::net::service_interface::HostnameInfo; -use crate::net::tor::OnionAddress; +use crate::net::host::binding::{BindInfo, BindOptions, Bindings, binding}; +use crate::net::service_interface::{HostnameInfo, HostnameMetadata}; use crate::prelude::*; -use crate::{HostId, PackageId}; +use crate::{GatewayId, HostId, PackageId}; pub mod address; pub mod binding; @@ -27,13 +30,23 @@ pub mod binding; #[model = "Model"] #[ts(export)] pub struct Host { - pub bindings: BTreeMap, - #[ts(type = "string[]")] - pub onions: BTreeSet, + pub bindings: Bindings, pub public_domains: BTreeMap, - pub private_domains: BTreeSet, - /// COMPUTED: NetService::update - pub hostname_info: BTreeMap>, // internal port -> Hostnames + pub private_domains: BTreeMap>, + /// COMPUTED: port forwarding rules needed on gateways for public addresses to work. + #[serde(default)] + pub port_forwards: BTreeSet, +} + +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct PortForward { + #[ts(type = "string")] + pub src: SocketAddrV4, + #[ts(type = "string")] + pub dst: SocketAddrV4, + pub gateway: GatewayId, } impl AsRef for Host { @@ -46,31 +59,287 @@ impl Host { Self::default() } pub fn addresses<'a>(&'a self) -> impl Iterator + 'a { - self.onions + self.public_domains .iter() - .cloned() - .map(|address| HostAddress::Onion { address }) - .chain( - self.public_domains - .iter() - .map(|(address, config)| HostAddress::Domain { - address: address.clone(), - public: Some(config.clone()), - private: self.private_domains.contains(address), - }), - ) + .map(|(address, config)| HostAddress { + address: address.clone(), + public: Some(config.clone()), + private: self.private_domains.get(address).cloned(), + }) .chain( self.private_domains .iter() - .filter(|a| !self.public_domains.contains_key(*a)) - .map(|address| HostAddress::Domain { - address: address.clone(), + .filter(|(domain, _)| !self.public_domains.contains_key(*domain)) + .map(|(domain, gateways)| HostAddress { + address: domain.clone(), public: None, - private: true, + private: Some(gateways.clone()), }), ) } } +impl Model { + pub fn update_addresses( + &mut self, + mdns: &ServerHostname, + gateways: &OrdMap, + available_ports: &AvailablePorts, + ) -> Result<(), Error> { + let this = self.destructure_mut(); + + // ips + for (_, bind) in this.bindings.as_entries_mut()? { + let net = bind.as_net().de()?; + let opt = bind.as_options().de()?; + + // Preserve existing plugin-provided addresses across recomputation + let mut available = bind.as_addresses().as_available().de()?; + available.retain(|h| matches!(h.metadata, HostnameMetadata::Plugin { .. })); + for (gid, g) in gateways { + let Some(ip_info) = &g.ip_info else { + continue; + }; + let gateway_secure = g.secure(); + for subnet in &ip_info.subnets { + let host = InternedString::from_display(&subnet.addr()); + let metadata = if subnet.addr().is_ipv4() { + HostnameMetadata::Ipv4 { + gateway: gid.clone(), + } + } else { + HostnameMetadata::Ipv6 { + gateway: gid.clone(), + scope_id: ip_info.scope_id, + } + }; + if let Some(port) = net.assigned_port.filter(|_| { + opt.secure + .map_or(gateway_secure, |s| !(s.ssl && opt.add_ssl.is_some())) + }) { + available.insert(HostnameInfo { + ssl: opt.secure.map_or(false, |s| s.ssl), + public: false, + hostname: host.clone(), + port: Some(port), + metadata: metadata.clone(), + }); + } + if let Some(port) = net.assigned_ssl_port { + available.insert(HostnameInfo { + ssl: true, + public: false, + hostname: host.clone(), + port: Some(port), + metadata, + }); + } + } + if let Some(wan_ip) = &ip_info.wan_ip { + let host = InternedString::from_display(&wan_ip); + let metadata = HostnameMetadata::Ipv4 { + gateway: gid.clone(), + }; + if let Some(port) = net.assigned_port.filter(|_| { + opt.secure.map_or( + false, // the public internet is never secure + |s| !(s.ssl && opt.add_ssl.is_some()), + ) + }) { + available.insert(HostnameInfo { + ssl: opt.secure.map_or(false, |s| s.ssl), + public: true, + hostname: host.clone(), + port: Some(port), + metadata: metadata.clone(), + }); + } + if let Some(port) = net.assigned_ssl_port { + available.insert(HostnameInfo { + ssl: true, + public: true, + hostname: host.clone(), + port: Some(port), + metadata, + }); + } + } + } + + // mdns + let mdns_host = mdns.local_domain_name(); + let mdns_gateways: BTreeSet = gateways + .iter() + .filter(|(_, g)| { + matches!( + g.ip_info.as_ref().and_then(|i| i.device_type), + Some(NetworkInterfaceType::Ethernet | NetworkInterfaceType::Wireless) + ) + }) + .map(|(id, _)| id.clone()) + .collect(); + if let Some(port) = net.assigned_port.filter(|_| { + opt.secure + .map_or(true, |s| !(s.ssl && opt.add_ssl.is_some())) + }) { + let mdns_gateways = if opt.secure.is_some() { + mdns_gateways.clone() + } else { + mdns_gateways + .iter() + .filter(|g| gateways.get(*g).map_or(false, |g| g.secure())) + .cloned() + .collect() + }; + if !mdns_gateways.is_empty() { + available.insert(HostnameInfo { + ssl: opt.secure.map_or(false, |s| s.ssl), + public: false, + hostname: mdns_host.clone(), + port: Some(port), + metadata: HostnameMetadata::Mdns { + gateways: mdns_gateways, + }, + }); + } + } + if let Some(port) = net.assigned_ssl_port { + available.insert(HostnameInfo { + ssl: true, + public: false, + hostname: mdns_host, + port: Some(port), + metadata: HostnameMetadata::Mdns { + gateways: mdns_gateways, + }, + }); + } + + // public domains + for (domain, info) in this.public_domains.de()? { + let metadata = HostnameMetadata::PublicDomain { + gateway: info.gateway.clone(), + }; + if let Some(port) = net.assigned_port.filter(|_| { + opt.secure.map_or( + false, // the public internet is never secure + |s| !(s.ssl && opt.add_ssl.is_some()), + ) + }) { + available.insert(HostnameInfo { + ssl: opt.secure.map_or(false, |s| s.ssl), + public: true, + hostname: domain.clone(), + port: Some(port), + metadata: metadata.clone(), + }); + } + if let Some(mut port) = net.assigned_ssl_port { + if let Some(preferred) = opt + .add_ssl + .as_ref() + .map(|s| s.preferred_external_port) + .filter(|p| available_ports.is_ssl(*p)) + { + port = preferred; + } + available.insert(HostnameInfo { + ssl: true, + public: true, + hostname: domain, + port: Some(port), + metadata, + }); + } + } + + // private domains + for (domain, domain_gateways) in this.private_domains.de()? { + if let Some(port) = net.assigned_port.filter(|_| { + opt.secure + .map_or(true, |s| !(s.ssl && opt.add_ssl.is_some())) + }) { + let gateways = if opt.secure.is_some() { + domain_gateways.clone() + } else { + domain_gateways + .iter() + .cloned() + .filter(|g| gateways.get(g).map_or(false, |g| g.secure())) + .collect() + }; + available.insert(HostnameInfo { + ssl: opt.secure.map_or(false, |s| s.ssl), + public: true, + hostname: domain.clone(), + port: Some(port), + metadata: HostnameMetadata::PrivateDomain { gateways }, + }); + } + if let Some(mut port) = net.assigned_ssl_port { + if let Some(preferred) = opt + .add_ssl + .as_ref() + .map(|s| s.preferred_external_port) + .filter(|p| available_ports.is_ssl(*p)) + { + port = preferred; + } + available.insert(HostnameInfo { + ssl: true, + public: true, + hostname: domain, + port: Some(port), + metadata: HostnameMetadata::PrivateDomain { + gateways: domain_gateways, + }, + }); + } + } + bind.as_addresses_mut().as_available_mut().ser(&available)?; + } + + // compute port forwards from available public addresses + let bindings: Bindings = this.bindings.de()?; + let mut port_forwards = BTreeSet::new(); + for bind in bindings.values() { + for addr in bind.addresses.enabled() { + if !addr.public { + continue; + } + let Some(port) = addr.port else { + continue; + }; + let gw_id = match &addr.metadata { + HostnameMetadata::Ipv4 { gateway } + | HostnameMetadata::PublicDomain { gateway } => gateway, + _ => continue, + }; + let Some(gw_info) = gateways.get(gw_id) else { + continue; + }; + let Some(ip_info) = &gw_info.ip_info else { + continue; + }; + let Some(wan_ip) = ip_info.wan_ip else { + continue; + }; + for subnet in &ip_info.subnets { + let IpAddr::V4(addr) = subnet.addr() else { + continue; + }; + port_forwards.insert(PortForward { + src: SocketAddrV4::new(wan_ip, port), + dst: SocketAddrV4::new(addr, port), + gateway: gw_id.clone(), + }); + } + } + } + this.port_forwards.ser(&port_forwards)?; + + Ok(()) + } +} #[derive(Debug, Default, Deserialize, Serialize, HasModel, TS)] #[model = "Model"] @@ -112,22 +381,7 @@ pub fn host_for<'a>( .as_hosts_mut(), ) } - let tor_key = if host_info(db, package_id)?.as_idx(host_id).is_none() { - Some( - db.as_private_mut() - .as_key_store_mut() - .as_onion_mut() - .new_key()?, - ) - } else { - None - }; - host_info(db, package_id)?.upsert(host_id, || { - let mut h = Host::new(); - h.onions - .insert(tor_key.or_not_found("generated tor key")?.onion_address()); - Ok(h) - }) + host_info(db, package_id)?.upsert(host_id, || Ok(Host::new())) } pub fn all_hosts(db: &mut DatabaseModel) -> impl Iterator, Error>> { @@ -185,10 +439,6 @@ pub trait HostApiKind: 'static { inheritance: &Self::Inheritance, db: &'a mut DatabaseModel, ) -> Result<&'a mut Model, Error>; - fn sync_host( - ctx: &RpcContext, - inheritance: Self::Inheritance, - ) -> impl Future> + Send; } pub struct ForPackage; impl HostApiKind for ForPackage { @@ -207,12 +457,6 @@ impl HostApiKind for ForPackage { ) -> Result<&'a mut Model, Error> { host_for(db, Some(package), host) } - async fn sync_host(ctx: &RpcContext, (package, host): Self::Inheritance) -> Result<(), Error> { - let service = ctx.services.get(&package).await; - let service_ref = service.as_ref().or_not_found(&package)?; - service_ref.sync_host(host).await?; - Ok(()) - } } pub struct ForServer; impl HostApiKind for ForServer { @@ -228,9 +472,6 @@ impl HostApiKind for ForServer { ) -> Result<&'a mut Model, Error> { host_for(db, None, &HostId::default()) } - async fn sync_host(ctx: &RpcContext, _: Self::Inheritance) -> Result<(), Error> { - ctx.os_net_service.sync_host(HostId::default()).await - } } pub fn host_api() -> ParentHandler { diff --git a/core/src/net/keys.rs b/core/src/net/keys.rs index 41e96eedd..077f56875 100644 --- a/core/src/net/keys.rs +++ b/core/src/net/keys.rs @@ -3,28 +3,21 @@ use serde::{Deserialize, Serialize}; use crate::account::AccountInfo; use crate::net::acme::AcmeCertStore; use crate::net::ssl::CertStore; -use crate::net::tor::OnionStore; use crate::prelude::*; #[derive(Debug, Deserialize, Serialize, HasModel)] #[model = "Model"] #[serde(rename_all = "camelCase")] pub struct KeyStore { - pub onion: OnionStore, pub local_certs: CertStore, #[serde(default)] pub acme: AcmeCertStore, } impl KeyStore { pub fn new(account: &AccountInfo) -> Result { - let mut res = Self { - onion: OnionStore::new(), + Ok(Self { local_certs: CertStore::new(account)?, acme: AcmeCertStore::new(), - }; - for tor_key in account.tor_keys.iter().cloned() { - res.onion.insert(tor_key); - } - Ok(res) + }) } } diff --git a/core/src/net/mod.rs b/core/src/net/mod.rs index f30c1383b..f199b3194 100644 --- a/core/src/net/mod.rs +++ b/core/src/net/mod.rs @@ -14,7 +14,6 @@ pub mod socks; pub mod ssl; pub mod static_server; pub mod tls; -pub mod tor; pub mod tunnel; pub mod utils; pub mod vhost; @@ -23,7 +22,6 @@ pub mod wifi; pub fn net_api() -> ParentHandler { ParentHandler::new() - .subcommand("tor", tor::tor_api::().with_about("about.tor-commands")) .subcommand( "acme", acme::acme_api::().with_about("about.setup-acme-certificate"), diff --git a/core/src/net/net_controller.rs b/core/src/net/net_controller.rs index dc46fde33..dc990ec06 100644 --- a/core/src/net/net_controller.rs +++ b/core/src/net/net_controller.rs @@ -3,59 +3,50 @@ use std::net::{IpAddr, Ipv4Addr, SocketAddr, SocketAddrV4}; use std::sync::{Arc, Weak}; use color_eyre::eyre::eyre; -use imbl::{OrdMap, vector}; use imbl_value::InternedString; -use ipnet::IpNet; +use nix::net::if_::if_nametoindex; +use patch_db::json_ptr::JsonPointer; +use tokio::process::Command; use tokio::sync::Mutex; use tokio::task::JoinHandle; use tokio_rustls::rustls::ClientConfig as TlsClientConfig; use tracing::instrument; use crate::db::model::Database; -use crate::db::model::public::NetworkInterfaceType; -use crate::error::ErrorCollection; -use crate::hostname::Hostname; +use crate::hostname::ServerHostname; use crate::net::dns::DnsController; -use crate::net::forward::{InterfacePortForwardController, START9_BRIDGE_IFACE, add_iptables_rule}; -use crate::net::gateway::{ - AndFilter, DynInterfaceFilter, IdFilter, InterfaceFilter, NetworkInterfaceController, OrFilter, - PublicFilter, SecureFilter, TypeFilter, +use crate::net::forward::{ + ForwardRequirements, InterfacePortForwardController, START9_BRIDGE_IFACE, add_iptables_rule, }; +use crate::net::gateway::NetworkInterfaceController; use crate::net::host::address::HostAddress; use crate::net::host::binding::{AddSslOptions, BindId, BindOptions}; use crate::net::host::{Host, Hosts, host_for}; -use crate::net::service_interface::{GatewayInfo, HostnameInfo, IpHostname, OnionHostname}; +use crate::net::service_interface::HostnameMetadata; use crate::net::socks::SocksController; -use crate::net::tor::{OnionAddress, TorController, TorSecretKey}; -use crate::net::utils::ipv6_is_local; use crate::net::vhost::{AlpnInfo, DynVHostTarget, ProxyTarget, VHostController}; use crate::prelude::*; use crate::service::effects::callbacks::ServiceCallbacks; +use crate::util::Invoke; use crate::util::serde::MaybeUtf8String; +use crate::util::sync::Watch; use crate::{GatewayId, HOST_IP, HostId, OptionExt, PackageId}; pub struct NetController { pub(crate) db: TypedPatchDb, - pub(super) tor: TorController, pub(super) vhost: VHostController, pub(super) tls_client_config: Arc, pub(crate) net_iface: Arc, pub(super) dns: DnsController, pub(super) forward: InterfacePortForwardController, pub(super) socks: SocksController, - pub(super) server_hostnames: Vec>, pub(crate) callbacks: Arc, } impl NetController { - pub async fn init( - db: TypedPatchDb, - hostname: &Hostname, - socks_listen: SocketAddr, - ) -> Result { + pub async fn init(db: TypedPatchDb, socks_listen: SocketAddr) -> Result { let net_iface = Arc::new(NetworkInterfaceController::new(db.clone())); - let tor = TorController::new()?; - let socks = SocksController::new(socks_listen, tor.clone())?; + let socks = SocksController::new(socks_listen)?; let crypto_provider = Arc::new(tokio_rustls::rustls::crypto::ring::default_provider()); let tls_client_config = Arc::new(crate::net::tls::client_config( crypto_provider.clone(), @@ -87,25 +78,12 @@ impl NetController { .await?; Ok(Self { db: db.clone(), - tor, vhost: VHostController::new(db.clone(), net_iface.clone(), crypto_provider), tls_client_config, dns: DnsController::init(db, &net_iface.watcher).await?, forward: InterfacePortForwardController::new(net_iface.watcher.subscribe()), net_iface, socks, - server_hostnames: vec![ - // LAN IP - None, - // Internal DNS - Some("embassy".into()), - Some("startos".into()), - // localhost - Some("localhost".into()), - Some(hostname.no_dot_host_name()), - // LAN mDNS - Some(hostname.local_domain_name()), - ], callbacks: Arc::new(ServiceCallbacks::default()), }) } @@ -165,10 +143,9 @@ impl NetController { #[derive(Default, Debug)] struct HostBinds { - forwards: BTreeMap)>, + forwards: BTreeMap)>, vhosts: BTreeMap<(Option, u16), (ProxyTarget, Arc<()>)>, private_dns: BTreeMap>, - tor: BTreeMap, Vec>)>, } pub struct NetServiceData { @@ -188,115 +165,72 @@ impl NetServiceData { }) } - async fn clear_bindings( - &mut self, - ctrl: &NetController, - except: BTreeSet, - ) -> Result<(), Error> { - if let Some(pkg_id) = &self.id { - let hosts = ctrl - .db - .mutate(|db| { - let mut res = Hosts::default(); - for (host_id, host) in db - .as_public_mut() - .as_package_data_mut() - .as_idx_mut(pkg_id) - .or_not_found(pkg_id)? - .as_hosts_mut() - .as_entries_mut()? - { - host.as_bindings_mut().mutate(|b| { - for (internal_port, info) in b { - if !except.contains(&BindId { - id: host_id.clone(), - internal_port: *internal_port, - }) { - info.disable(); - } - } - Ok(()) - })?; - res.0.insert(host_id, host.de()?); - } - Ok(res) - }) - .await - .result?; - let mut errors = ErrorCollection::new(); - for (id, host) in hosts.0 { - errors.handle(self.update(ctrl, id, host).await); - } - errors.into_result() - } else { - let host = ctrl - .db - .mutate(|db| { - let host = db - .as_public_mut() - .as_server_info_mut() - .as_network_mut() - .as_host_mut(); - host.as_bindings_mut().mutate(|b| { - for (internal_port, info) in b { - if !except.contains(&BindId { - id: HostId::default(), - internal_port: *internal_port, - }) { - info.disable(); - } - } - Ok(()) - })?; - host.de() - }) - .await - .result?; - self.update(ctrl, HostId::default(), host).await - } - } - async fn update(&mut self, ctrl: &NetController, id: HostId, host: Host) -> Result<(), Error> { - let mut forwards: BTreeMap = BTreeMap::new(); + let mut forwards: BTreeMap = BTreeMap::new(); let mut vhosts: BTreeMap<(Option, u16), ProxyTarget> = BTreeMap::new(); let mut private_dns: BTreeSet = BTreeSet::new(); - let mut tor: BTreeMap)> = - BTreeMap::new(); - let mut hostname_info: BTreeMap> = BTreeMap::new(); let binds = self.binds.entry(id.clone()).or_default(); - let peek = ctrl.db.peek().await; - - // LAN - let server_info = peek.as_public().as_server_info(); let net_ifaces = ctrl.net_iface.watcher.ip_info(); - let hostname = server_info.as_hostname().de()?; - for (port, bind) in &host.bindings { + let host_addresses: Vec<_> = host.addresses().collect(); + + // Collect private DNS entries (domains without public config) + for HostAddress { + address, public, .. + } in &host_addresses + { + if public.is_none() { + private_dns.insert(address.clone()); + } + } + + // ── Build controller entries from enabled addresses ── + for (port, bind) in host.bindings.iter() { if !bind.enabled { continue; } - if bind.net.assigned_port.is_some() || bind.net.assigned_ssl_port.is_some() { - let mut hostnames = BTreeSet::new(); - if let Some(ssl) = &bind.options.add_ssl { - let external = bind - .net - .assigned_ssl_port - .or_not_found("assigned ssl port")?; - let addr = (self.ip, *port).into(); - let connect_ssl = if let Some(alpn) = ssl.alpn.clone() { - Err(alpn) - } else { - if bind.options.secure.as_ref().map_or(false, |s| s.ssl) { - Ok(()) - } else { - Err(AlpnInfo::Reflect) - } - }; - for hostname in ctrl.server_hostnames.iter().cloned() { + if bind.net.assigned_port.is_none() && bind.net.assigned_ssl_port.is_none() { + continue; + } + + let enabled_addresses = bind.addresses.enabled(); + let addr: SocketAddr = (self.ip, *port).into(); + + // SSL vhosts + if let Some(ssl) = &bind.options.add_ssl { + let connect_ssl = if let Some(alpn) = ssl.alpn.clone() { + Err(alpn) + } else if bind.options.secure.as_ref().map_or(false, |s| s.ssl) { + Ok(()) + } else { + Err(AlpnInfo::Reflect) + }; + + if let Some(assigned_ssl_port) = bind.net.assigned_ssl_port { + // Collect private IPs from enabled private addresses' gateways + let server_private_ips: BTreeSet = enabled_addresses + .iter() + .filter(|a| !a.public) + .flat_map(|a| a.metadata.gateways()) + .filter_map(|gw| net_ifaces.get(gw).and_then(|info| info.ip_info.as_ref())) + .flat_map(|ip_info| ip_info.subnets.iter().map(|s| s.addr())) + .collect(); + + // Collect public gateways from enabled public IP addresses + let server_public_gateways: BTreeSet = enabled_addresses + .iter() + .filter(|a| a.public && a.metadata.is_ip()) + .flat_map(|a| a.metadata.gateways()) + .cloned() + .collect(); + + // * vhost (on assigned_ssl_port) + if !server_private_ips.is_empty() || !server_public_gateways.is_empty() { vhosts.insert( - (hostname, external), + (None, assigned_ssl_port), ProxyTarget { - filter: bind.net.clone().into_dyn(), + public: server_public_gateways.clone(), + private: server_private_ips.clone(), acme: None, addr, add_x_forwarded_headers: ssl.add_x_forwarded_headers, @@ -306,375 +240,86 @@ impl NetServiceData { }, ); } - for address in host.addresses() { - match address { - HostAddress::Onion { address } => { - let hostname = InternedString::from_display(&address); - if hostnames.insert(hostname.clone()) { - vhosts.insert( - (Some(hostname), external), - ProxyTarget { - filter: OrFilter( - TypeFilter(NetworkInterfaceType::Loopback), - IdFilter(GatewayId::from(InternedString::from( - START9_BRIDGE_IFACE, - ))), - ) - .into_dyn(), - acme: None, - addr, - add_x_forwarded_headers: ssl.add_x_forwarded_headers, - connect_ssl: connect_ssl - .clone() - .map(|_| ctrl.tls_client_config.clone()), - }, - ); // TODO: wrap onion ssl stream directly in tor ctrl - } - } - HostAddress::Domain { - address, - public, - private, - } => { - if hostnames.insert(address.clone()) { - let address = Some(address.clone()); - if ssl.preferred_external_port == 443 { - if let Some(public) = &public { - vhosts.insert( - (address.clone(), 5443), - ProxyTarget { - filter: AndFilter( - bind.net.clone(), - AndFilter( - IdFilter(public.gateway.clone()), - PublicFilter { public: false }, - ), - ) - .into_dyn(), - acme: public.acme.clone(), - addr, - add_x_forwarded_headers: ssl - .add_x_forwarded_headers, - connect_ssl: connect_ssl - .clone() - .map(|_| ctrl.tls_client_config.clone()), - }, - ); - vhosts.insert( - (address.clone(), 443), - ProxyTarget { - filter: AndFilter( - bind.net.clone(), - if private { - OrFilter( - IdFilter(public.gateway.clone()), - PublicFilter { public: false }, - ) - .into_dyn() - } else { - AndFilter( - IdFilter(public.gateway.clone()), - PublicFilter { public: true }, - ) - .into_dyn() - }, - ) - .into_dyn(), - acme: public.acme.clone(), - addr, - add_x_forwarded_headers: ssl - .add_x_forwarded_headers, - connect_ssl: connect_ssl - .clone() - .map(|_| ctrl.tls_client_config.clone()), - }, - ); - } else { - vhosts.insert( - (address.clone(), 443), - ProxyTarget { - filter: AndFilter( - bind.net.clone(), - PublicFilter { public: false }, - ) - .into_dyn(), - acme: None, - addr, - add_x_forwarded_headers: ssl - .add_x_forwarded_headers, - connect_ssl: connect_ssl - .clone() - .map(|_| ctrl.tls_client_config.clone()), - }, - ); - } - } else { - if let Some(public) = public { - vhosts.insert( - (address.clone(), external), - ProxyTarget { - filter: AndFilter( - bind.net.clone(), - if private { - OrFilter( - IdFilter(public.gateway.clone()), - PublicFilter { public: false }, - ) - .into_dyn() - } else { - IdFilter(public.gateway.clone()) - .into_dyn() - }, - ) - .into_dyn(), - acme: public.acme.clone(), - addr, - add_x_forwarded_headers: ssl - .add_x_forwarded_headers, - connect_ssl: connect_ssl - .clone() - .map(|_| ctrl.tls_client_config.clone()), - }, - ); - } else { - vhosts.insert( - (address.clone(), external), - ProxyTarget { - filter: AndFilter( - bind.net.clone(), - PublicFilter { public: false }, - ) - .into_dyn(), - acme: None, - addr, - add_x_forwarded_headers: ssl - .add_x_forwarded_headers, - connect_ssl: connect_ssl - .clone() - .map(|_| ctrl.tls_client_config.clone()), - }, - ); - } - } - } - } - } + } + + // Domain vhosts: group by (domain, ssl_port), merge public/private sets + for addr_info in &enabled_addresses { + if !addr_info.ssl { + continue; } - } - if bind - .options - .secure - .map_or(true, |s| !(s.ssl && bind.options.add_ssl.is_some())) - { - let external = bind.net.assigned_port.or_not_found("assigned lan port")?; - forwards.insert( - external, - ( - SocketAddrV4::new(self.ip, *port), - AndFilter( - SecureFilter { - secure: bind.options.secure.is_some(), - }, - bind.net.clone(), - ) - .into_dyn(), - ), - ); - } - let mut bind_hostname_info: Vec = - hostname_info.remove(port).unwrap_or_default(); - for (gateway_id, info) in net_ifaces - .iter() - .filter(|(_, info)| { - info.ip_info.as_ref().map_or(false, |i| { - !matches!(i.device_type, Some(NetworkInterfaceType::Bridge)) - }) - }) - .filter(|(id, info)| bind.net.filter(id, info)) - { - let gateway = GatewayInfo { - id: gateway_id.clone(), - name: info - .name - .clone() - .or_else(|| info.ip_info.as_ref().map(|i| i.name.clone())) - .unwrap_or_else(|| gateway_id.clone().into()), - public: info.public(), - }; - let port = bind.net.assigned_port.filter(|_| { - bind.options.secure.map_or(false, |s| { - !(s.ssl && bind.options.add_ssl.is_some()) || info.secure() - }) + match &addr_info.metadata { + HostnameMetadata::PublicDomain { .. } + | HostnameMetadata::PrivateDomain { .. } => {} + _ => continue, + } + let domain = &addr_info.hostname; + let domain_ssl_port = addr_info.port.unwrap_or(443); + let key = (Some(domain.clone()), domain_ssl_port); + let target = vhosts.entry(key).or_insert_with(|| ProxyTarget { + public: BTreeSet::new(), + private: BTreeSet::new(), + acme: host_addresses + .iter() + .find(|a| a.address == *domain) + .and_then(|a| a.public.as_ref()) + .and_then(|p| p.acme.clone()), + addr, + add_x_forwarded_headers: ssl.add_x_forwarded_headers, + connect_ssl: connect_ssl.clone().map(|_| ctrl.tls_client_config.clone()), }); - if !info.public() - && info.ip_info.as_ref().map_or(false, |i| { - i.device_type != Some(NetworkInterfaceType::Wireguard) - }) - { - bind_hostname_info.push(HostnameInfo::Ip { - gateway: gateway.clone(), - public: false, - hostname: IpHostname::Local { - value: InternedString::from_display(&{ - let hostname = &hostname; - lazy_format!("{hostname}.local") - }), - port, - ssl_port: bind.net.assigned_ssl_port, - }, - }); - } - for address in host.addresses() { - if let HostAddress::Domain { - address, - public, - private, - } = address - { - if public.is_none() { - private_dns.insert(address.clone()); - } - let private = private && !info.public(); - let public = - public.as_ref().map_or(false, |p| &p.gateway == gateway_id); - if public || private { - if bind - .options - .add_ssl - .as_ref() - .map_or(false, |ssl| ssl.preferred_external_port == 443) - { - bind_hostname_info.push(HostnameInfo::Ip { - gateway: gateway.clone(), - public, - hostname: IpHostname::Domain { - value: address.clone(), - port: None, - ssl_port: Some(443), - }, - }); - } else { - bind_hostname_info.push(HostnameInfo::Ip { - gateway: gateway.clone(), - public, - hostname: IpHostname::Domain { - value: address.clone(), - port, - ssl_port: bind.net.assigned_ssl_port, - }, - }); - } - } + if addr_info.public { + for gw in addr_info.metadata.gateways() { + target.public.insert(gw.clone()); } - } - if let Some(ip_info) = &info.ip_info { - let public = info.public(); - if let Some(wan_ip) = ip_info.wan_ip { - bind_hostname_info.push(HostnameInfo::Ip { - gateway: gateway.clone(), - public: true, - hostname: IpHostname::Ipv4 { - value: wan_ip, - port, - ssl_port: bind.net.assigned_ssl_port, - }, - }); - } - for ipnet in &ip_info.subnets { - match ipnet { - IpNet::V4(net) => { - if !public { - bind_hostname_info.push(HostnameInfo::Ip { - gateway: gateway.clone(), - public, - hostname: IpHostname::Ipv4 { - value: net.addr(), - port, - ssl_port: bind.net.assigned_ssl_port, - }, - }); + } else { + for gw in addr_info.metadata.gateways() { + if let Some(info) = net_ifaces.get(gw) { + if let Some(ip_info) = &info.ip_info { + for subnet in &ip_info.subnets { + target.private.insert(subnet.addr()); } } - IpNet::V6(net) => { - bind_hostname_info.push(HostnameInfo::Ip { - gateway: gateway.clone(), - public: public && !ipv6_is_local(net.addr()), - hostname: IpHostname::Ipv6 { - value: net.addr(), - scope_id: ip_info.scope_id, - port, - ssl_port: bind.net.assigned_ssl_port, - }, - }); - } } } } } - hostname_info.insert(*port, bind_hostname_info); } - } - struct TorHostnamePorts { - non_ssl: Option, - ssl: Option, - } - let mut tor_hostname_ports = BTreeMap::::new(); - let mut tor_binds = OrdMap::::new(); - for (internal, info) in &host.bindings { - if !info.enabled { - continue; - } - tor_binds.insert( - info.options.preferred_external_port, - SocketAddr::from((self.ip, *internal)), - ); - if let (Some(ssl), Some(ssl_internal)) = - (&info.options.add_ssl, info.net.assigned_ssl_port) + // Non-SSL forwards + if bind + .options + .secure + .map_or(true, |s| !(s.ssl && bind.options.add_ssl.is_some())) { - tor_binds.insert( - ssl.preferred_external_port, - SocketAddr::from(([127, 0, 0, 1], ssl_internal)), - ); - tor_hostname_ports.insert( - *internal, - TorHostnamePorts { - non_ssl: Some(info.options.preferred_external_port) - .filter(|p| *p != ssl.preferred_external_port), - ssl: Some(ssl.preferred_external_port), - }, - ); - } else { - tor_hostname_ports.insert( - *internal, - TorHostnamePorts { - non_ssl: Some(info.options.preferred_external_port), - ssl: None, - }, + let external = bind.net.assigned_port.or_not_found("assigned lan port")?; + let fwd_public: BTreeSet = enabled_addresses + .iter() + .filter(|a| a.public) + .flat_map(|a| a.metadata.gateways()) + .cloned() + .collect(); + let fwd_private: BTreeSet = enabled_addresses + .iter() + .filter(|a| !a.public) + .flat_map(|a| a.metadata.gateways()) + .filter_map(|gw| net_ifaces.get(gw).and_then(|i| i.ip_info.as_ref())) + .flat_map(|ip| ip.subnets.iter().map(|s| s.addr())) + .collect(); + forwards.insert( + external, + ( + SocketAddrV4::new(self.ip, *port), + ForwardRequirements { + public_gateways: fwd_public, + private_ips: fwd_private, + secure: bind.options.secure.is_some(), + }, + ), ); } } - for tor_addr in host.onions.iter() { - let key = peek - .as_private() - .as_key_store() - .as_onion() - .get_key(tor_addr)?; - tor.insert(key.onion_address(), (key, tor_binds.clone())); - for (internal, ports) in &tor_hostname_ports { - let mut bind_hostname_info = hostname_info.remove(internal).unwrap_or_default(); - bind_hostname_info.push(HostnameInfo::Onion { - hostname: OnionHostname { - value: InternedString::from_display(tor_addr), - port: ports.non_ssl, - ssl_port: ports.ssl, - }, - }); - hostname_info.insert(*internal, bind_hostname_info); - } - } - + // ── Phase 3: Reconcile ── let all = binds .forwards .keys() @@ -683,8 +328,8 @@ impl NetServiceData { .collect::>(); for external in all { let mut prev = binds.forwards.remove(&external); - if let Some((internal, filter)) = forwards.remove(&external) { - prev = prev.filter(|(i, f, _)| i == &internal && *f == filter); + if let Some((internal, reqs)) = forwards.remove(&external) { + prev = prev.filter(|(i, r, _)| i == &internal && *r == reqs); binds.forwards.insert( external, if let Some(prev) = prev { @@ -692,11 +337,11 @@ impl NetServiceData { } else { ( internal, - filter.clone(), + reqs.clone(), ctrl.forward .add( external, - filter, + reqs, internal, net_ifaces .iter() @@ -763,88 +408,6 @@ impl NetServiceData { } ctrl.dns.gc_private_domains(&rm)?; - let all = binds - .tor - .keys() - .chain(tor.keys()) - .cloned() - .collect::>(); - for onion in all { - let mut prev = binds.tor.remove(&onion); - if let Some((key, tor_binds)) = tor.remove(&onion).filter(|(_, b)| !b.is_empty()) { - prev = prev.filter(|(b, _)| b == &tor_binds); - binds.tor.insert( - onion, - if let Some(prev) = prev { - prev - } else { - let service = ctrl.tor.service(key)?; - let rcs = service.proxy_all(tor_binds.iter().map(|(k, v)| (*k, *v))); - (tor_binds, rcs) - }, - ); - } else { - if let Some((_, rc)) = prev { - drop(rc); - ctrl.tor.gc(Some(onion)).await?; - } - } - } - - let res = ctrl - .db - .mutate(|db| { - host_for(db, self.id.as_ref(), &id)? - .as_hostname_info_mut() - .ser(&hostname_info) - }) - .await; - res.result?; - if let Some(pkg_id) = self.id.as_ref() { - if res.revision.is_some() { - if let Some(cbs) = ctrl.callbacks.get_host_info(&(pkg_id.clone(), id)) { - cbs.call(vector![]).await?; - } - } - } - Ok(()) - } - - async fn update_all(&mut self) -> Result<(), Error> { - let ctrl = self.net_controller()?; - if let Some(id) = self.id.clone() { - for (host_id, host) in ctrl - .db - .peek() - .await - .as_public() - .as_package_data() - .as_idx(&id) - .or_not_found(&id)? - .as_hosts() - .as_entries()? - { - tracing::info!("Updating host {host_id} for {id}"); - self.update(&*ctrl, host_id.clone(), host.de()?).await?; - tracing::info!("Updated host {host_id} for {id}"); - } - } else { - tracing::info!("Updating host for Main UI"); - self.update( - &*ctrl, - HostId::default(), - ctrl.db - .peek() - .await - .as_public() - .as_server_info() - .as_network() - .as_host() - .de()?, - ) - .await?; - tracing::info!("Updated host for Main UI"); - } Ok(()) } } @@ -853,6 +416,7 @@ pub struct NetService { shutdown: bool, data: Arc>, sync_task: JoinHandle<()>, + synced: Watch, } impl NetService { fn dummy() -> Self { @@ -866,26 +430,206 @@ impl NetService { binds: BTreeMap::new(), })), sync_task: tokio::spawn(futures::future::ready(())), + synced: Watch::new(0u64), } } fn new(data: NetServiceData) -> Result { - let mut ip_info = data.net_controller()?.net_iface.watcher.subscribe(); + let ctrl = data.net_controller()?; + let pkg_id = data.id.clone(); + let db = ctrl.db.clone(); + drop(ctrl); + + let synced = Watch::new(0u64); + let synced_writer = synced.clone(); + + let ip = data.ip; let data = Arc::new(Mutex::new(data)); let thread_data = data.clone(); let sync_task = tokio::spawn(async move { - loop { - if let Err(e) = thread_data.lock().await.update_all().await { - tracing::error!("Failed to update network info: {e}"); - tracing::debug!("{e:?}"); + if let Some(ref id) = pkg_id { + let ptr: JsonPointer = format!("/public/packageData/{}/hosts", id).parse().unwrap(); + let mut watch = db.watch(ptr).await.typed::(); + + // Outbound gateway enforcement + let service_ip = ip.to_string(); + // Purge any stale rules from a previous instance + loop { + if Command::new("ip") + .arg("rule") + .arg("del") + .arg("from") + .arg(&service_ip) + .arg("priority") + .arg("100") + .invoke(ErrorKind::Network) + .await + .is_err() + { + break; + } + } + let mut outbound_sub = db + .subscribe( + format!("/public/packageData/{}/outboundGateway", id) + .parse::>() + .unwrap(), + ) + .await; + let ctrl_for_ip = thread_data.lock().await.net_controller().ok(); + let mut ip_info_watch = ctrl_for_ip + .as_ref() + .map(|c| c.net_iface.watcher.subscribe()); + if let Some(ref mut w) = ip_info_watch { + w.mark_seen(); + } + drop(ctrl_for_ip); + let mut current_outbound_table: Option = None; + + loop { + let (hosts_changed, outbound_changed) = tokio::select! { + res = watch.changed() => { + if let Err(e) = res { + tracing::error!("DB watch disconnected for {id}: {e}"); + break; + } + (true, false) + } + _ = outbound_sub.recv() => (false, true), + _ = async { + if let Some(ref mut w) = ip_info_watch { + w.changed().await; + } else { + std::future::pending::<()>().await; + } + } => (false, true), + }; + + // Handle host updates + if hosts_changed { + if let Err(e) = async { + let hosts = watch.peek()?.de()?; + let mut data = thread_data.lock().await; + let ctrl = data.net_controller()?; + for (host_id, host) in hosts.0 { + data.update(&*ctrl, host_id, host).await?; + } + Ok::<_, Error>(()) + } + .await + { + tracing::error!("Failed to update network info for {id}: {e}"); + tracing::debug!("{e:?}"); + } + } + + // Handle outbound gateway changes + if outbound_changed { + if let Err(e) = async { + // Remove old rule if any + if let Some(old_table) = current_outbound_table.take() { + let old_table_str = old_table.to_string(); + let _ = Command::new("ip") + .arg("rule") + .arg("del") + .arg("from") + .arg(&service_ip) + .arg("lookup") + .arg(&old_table_str) + .arg("priority") + .arg("100") + .invoke(ErrorKind::Network) + .await; + } + // Read current outbound gateway from DB + let outbound_gw: Option = db + .peek() + .await + .as_public() + .as_package_data() + .as_idx(id) + .map(|p| p.as_outbound_gateway().de().ok()) + .flatten() + .flatten(); + if let Some(gw_id) = outbound_gw { + // Look up table ID for this gateway + if let Some(table_id) = if_nametoindex(gw_id.as_str()) + .map(|idx| 1000 + idx) + .log_err() + { + let table_str = table_id.to_string(); + Command::new("ip") + .arg("rule") + .arg("add") + .arg("from") + .arg(&service_ip) + .arg("lookup") + .arg(&table_str) + .arg("priority") + .arg("100") + .invoke(ErrorKind::Network) + .await + .log_err(); + current_outbound_table = Some(table_id); + } + } + Ok::<_, Error>(()) + } + .await + { + tracing::error!("Failed to update outbound gateway for {id}: {e}"); + tracing::debug!("{e:?}"); + } + } + + synced_writer.send_modify(|v| *v += 1); + } + + // Cleanup outbound rule on task exit + if let Some(table_id) = current_outbound_table { + let table_str = table_id.to_string(); + let _ = Command::new("ip") + .arg("rule") + .arg("del") + .arg("from") + .arg(&service_ip) + .arg("lookup") + .arg(&table_str) + .arg("priority") + .arg("100") + .invoke(ErrorKind::Network) + .await; + } + } else { + let ptr: JsonPointer = "/public/serverInfo/network/host".parse().unwrap(); + let mut watch = db.watch(ptr).await.typed::(); + loop { + if let Err(e) = watch.changed().await { + tracing::error!("DB watch disconnected for Main UI: {e}"); + break; + } + if let Err(e) = async { + let host = watch.peek()?.de()?; + let mut data = thread_data.lock().await; + let ctrl = data.net_controller()?; + data.update(&*ctrl, HostId::default(), host).await?; + Ok::<_, Error>(()) + } + .await + { + tracing::error!("Failed to update network info for Main UI: {e}"); + tracing::debug!("{e:?}"); + } + synced_writer.send_modify(|v| *v += 1); } - ip_info.changed().await; } }); + Ok(Self { shutdown: false, data, sync_task, + synced, }) } @@ -895,60 +639,125 @@ impl NetService { internal_port: u16, options: BindOptions, ) -> Result<(), Error> { - let mut data = self.data.lock().await; - let pkg_id = &data.id; - let ctrl = data.net_controller()?; - let host = ctrl - .db + let (ctrl, pkg_id) = { + let data = self.data.lock().await; + (data.net_controller()?, data.id.clone()) + }; + ctrl.db .mutate(|db| { + let gateways = db + .as_public() + .as_server_info() + .as_network() + .as_gateways() + .de()?; + let hostname = ServerHostname::load(db.as_public().as_server_info())?; let mut ports = db.as_private().as_available_ports().de()?; let host = host_for(db, pkg_id.as_ref(), &id)?; host.add_binding(&mut ports, internal_port, options)?; - let host = host.de()?; + host.update_addresses(&hostname, &gateways, &ports)?; db.as_private_mut().as_available_ports_mut().ser(&ports)?; - Ok(host) + Ok(()) }) .await - .result?; - data.update(&*ctrl, id, host).await + .result } pub async fn clear_bindings(&self, except: BTreeSet) -> Result<(), Error> { - let mut data = self.data.lock().await; - let ctrl = data.net_controller()?; - data.clear_bindings(&*ctrl, except).await - } - - pub async fn update(&self, id: HostId, host: Host) -> Result<(), Error> { - let mut data = self.data.lock().await; - let ctrl = data.net_controller()?; - data.update(&*ctrl, id, host).await - } - - pub async fn sync_host(&self, id: HostId) -> Result<(), Error> { - let mut data = self.data.lock().await; - let ctrl = data.net_controller()?; - let host = host_for(&mut ctrl.db.peek().await, data.id.as_ref(), &id)?.de()?; - data.update(&*ctrl, id, host).await + let (ctrl, pkg_id) = { + let data = self.data.lock().await; + (data.net_controller()?, data.id.clone()) + }; + ctrl.db + .mutate(|db| { + let gateways = db + .as_public() + .as_server_info() + .as_network() + .as_gateways() + .de()?; + let hostname = ServerHostname::load(db.as_public().as_server_info())?; + let ports = db.as_private().as_available_ports().de()?; + if let Some(ref pkg_id) = pkg_id { + for (host_id, host) in db + .as_public_mut() + .as_package_data_mut() + .as_idx_mut(pkg_id) + .or_not_found(pkg_id)? + .as_hosts_mut() + .as_entries_mut()? + { + host.as_bindings_mut().mutate(|b| { + for (internal_port, info) in b.iter_mut() { + if !except.contains(&BindId { + id: host_id.clone(), + internal_port: *internal_port, + }) { + info.disable(); + } + } + Ok(()) + })?; + host.update_addresses(&hostname, &gateways, &ports)?; + } + } else { + let host = db + .as_public_mut() + .as_server_info_mut() + .as_network_mut() + .as_host_mut(); + host.as_bindings_mut().mutate(|b| { + for (internal_port, info) in b.iter_mut() { + if !except.contains(&BindId { + id: HostId::default(), + internal_port: *internal_port, + }) { + info.disable(); + } + } + Ok(()) + })?; + host.update_addresses(&hostname, &gateways, &ports)?; + } + Ok(()) + }) + .await + .result } pub async fn remove_all(mut self) -> Result<(), Error> { - self.sync_task.abort(); - let mut data = self.data.lock().await; - if let Some(ctrl) = Weak::upgrade(&data.controller) { - self.shutdown = true; - data.clear_bindings(&*ctrl, Default::default()).await?; - - drop(ctrl); - Ok(()) - } else { + if Weak::upgrade(&self.data.lock().await.controller).is_none() { self.shutdown = true; tracing::warn!("NetService dropped after NetController is shutdown"); - Err(Error::new( + return Err(Error::new( eyre!("NetController is shutdown"), crate::ErrorKind::Network, - )) + )); } + let current = self.synced.peek(|v| *v); + self.clear_bindings(Default::default()).await?; + let mut w = self.synced.clone(); + w.wait_for(|v| *v > current).await; + self.sync_task.abort(); + // Clean up any outbound gateway ip rules for this service + let service_ip = self.data.lock().await.ip.to_string(); + loop { + if Command::new("ip") + .arg("rule") + .arg("del") + .arg("from") + .arg(&service_ip) + .arg("priority") + .arg("100") + .invoke(ErrorKind::Network) + .await + .is_err() + { + break; + } + } + self.shutdown = true; + Ok(()) } pub async fn get_ip(&self) -> Ipv4Addr { diff --git a/core/src/net/service_interface.rs b/core/src/net/service_interface.rs index 499e1a321..7c4b294aa 100644 --- a/core/src/net/service_interface.rs +++ b/core/src/net/service_interface.rs @@ -1,36 +1,142 @@ -use std::net::{Ipv4Addr, Ipv6Addr}; +use std::collections::BTreeSet; +use std::net::SocketAddr; use imbl_value::InternedString; use serde::{Deserialize, Serialize}; use ts_rs::TS; -use crate::{GatewayId, HostId, ServiceInterfaceId}; +use crate::prelude::*; +use crate::{ActionId, GatewayId, HostId, PackageId, ServiceInterfaceId}; -#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, TS)] #[ts(export)] #[serde(rename_all = "camelCase")] +pub struct HostnameInfo { + pub ssl: bool, + pub public: bool, + pub hostname: InternedString, + pub port: Option, + pub metadata: HostnameMetadata, +} + +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, TS)] +#[ts(export)] +#[serde(rename_all = "kebab-case")] #[serde(rename_all_fields = "camelCase")] #[serde(tag = "kind")] -pub enum HostnameInfo { - Ip { - gateway: GatewayInfo, - public: bool, - hostname: IpHostname, +pub enum HostnameMetadata { + Ipv4 { + gateway: GatewayId, }, - Onion { - hostname: OnionHostname, + Ipv6 { + gateway: GatewayId, + scope_id: u32, + }, + Mdns { + gateways: BTreeSet, + }, + PrivateDomain { + gateways: BTreeSet, + }, + PublicDomain { + gateway: GatewayId, + }, + Plugin { + package_id: PackageId, + remove_action: Option, + overflow_actions: Vec, + #[ts(type = "unknown")] + #[serde(default)] + info: Value, }, } + impl HostnameInfo { + pub fn to_socket_addr(&self) -> Option { + let ip = self.hostname.parse().ok()?; + Some(SocketAddr::new(ip, self.port?)) + } + pub fn to_san_hostname(&self) -> InternedString { + self.hostname.clone() + } +} + +impl HostnameMetadata { + pub fn is_ip(&self) -> bool { + matches!(self, Self::Ipv4 { .. } | Self::Ipv6 { .. }) + } + + pub fn gateways(&self) -> Box + '_> { match self { - Self::Ip { hostname, .. } => hostname.to_san_hostname(), - Self::Onion { hostname } => hostname.to_san_hostname(), + Self::Ipv4 { gateway } + | Self::Ipv6 { gateway, .. } + | Self::PublicDomain { gateway } => Box::new(std::iter::once(gateway)), + Self::PrivateDomain { gateways } | Self::Mdns { gateways } => Box::new(gateways.iter()), + Self::Plugin { .. } => Box::new(std::iter::empty()), } } } -#[derive(Clone, Debug, Deserialize, Serialize, TS)] +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +pub struct PluginHostnameInfo { + pub package_id: Option, + pub host_id: HostId, + pub internal_port: u16, + pub ssl: bool, + pub public: bool, + #[ts(type = "string")] + pub hostname: InternedString, + pub port: Option, + #[ts(type = "unknown")] + #[serde(default)] + pub info: Value, +} + +impl PluginHostnameInfo { + /// Convert to a `HostnameInfo` with `Plugin` metadata, using the given plugin package ID. + pub fn to_hostname_info( + &self, + plugin_package: &PackageId, + remove_action: Option, + overflow_actions: Vec, + ) -> HostnameInfo { + HostnameInfo { + ssl: self.ssl, + public: self.public, + hostname: self.hostname.clone(), + port: self.port, + metadata: HostnameMetadata::Plugin { + package_id: plugin_package.clone(), + info: self.info.clone(), + remove_action, + overflow_actions, + }, + } + } + + /// Check if a `HostnameInfo` with Plugin metadata matches this `PluginHostnameInfo` + /// (comparing address fields only, not row_actions). + pub fn matches_hostname_info(&self, h: &HostnameInfo, plugin_package: &PackageId) -> bool { + match &h.metadata { + HostnameMetadata::Plugin { + package_id, info, .. + } => { + package_id == plugin_package + && h.ssl == self.ssl + && h.public == self.public + && h.hostname == self.hostname + && h.port == self.port + && *info == self.info + } + _ => false, + } + } +} + +#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, TS)] #[ts(export)] #[serde(rename_all = "camelCase")] pub struct GatewayInfo { @@ -39,63 +145,6 @@ pub struct GatewayInfo { pub public: bool, } -#[derive(Clone, Debug, Deserialize, Serialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -pub struct OnionHostname { - #[ts(type = "string")] - pub value: InternedString, - pub port: Option, - pub ssl_port: Option, -} -impl OnionHostname { - pub fn to_san_hostname(&self) -> InternedString { - self.value.clone() - } -} - -#[derive(Clone, Debug, Deserialize, Serialize, TS)] -#[ts(export)] -#[serde(rename_all = "camelCase")] -#[serde(rename_all_fields = "camelCase")] -#[serde(tag = "kind")] -pub enum IpHostname { - Ipv4 { - value: Ipv4Addr, - port: Option, - ssl_port: Option, - }, - Ipv6 { - value: Ipv6Addr, - #[serde(default)] - scope_id: u32, - port: Option, - ssl_port: Option, - }, - Local { - #[ts(type = "string")] - value: InternedString, - port: Option, - ssl_port: Option, - }, - Domain { - #[ts(type = "string")] - value: InternedString, - port: Option, - ssl_port: Option, - }, -} -impl IpHostname { - pub fn to_san_hostname(&self) -> InternedString { - match self { - Self::Ipv4 { value, .. } => InternedString::from_display(value), - Self::Ipv6 { value, .. } => InternedString::from_display(value), - Self::Local { value, .. } => value.clone(), - Self::Domain { value, .. } => value.clone(), - } - } -} - #[derive(Clone, Debug, Deserialize, Serialize, TS)] #[ts(export)] #[serde(rename_all = "camelCase")] diff --git a/core/src/net/socks.rs b/core/src/net/socks.rs index 5d1be66f0..7f54a8010 100644 --- a/core/src/net/socks.rs +++ b/core/src/net/socks.rs @@ -8,7 +8,6 @@ use socks5_impl::server::{AuthAdaptor, ClientConnection, Server}; use tokio::net::{TcpListener, TcpStream}; use crate::HOST_IP; -use crate::net::tor::TorController; use crate::prelude::*; use crate::util::actor::background::BackgroundJobQueue; use crate::util::future::NonDetachingJoinHandle; @@ -22,7 +21,7 @@ pub struct SocksController { _thread: NonDetachingJoinHandle<()>, } impl SocksController { - pub fn new(listen: SocketAddr, tor: TorController) -> Result { + pub fn new(listen: SocketAddr) -> Result { Ok(Self { _thread: tokio::spawn(async move { let auth: AuthAdaptor<()> = Arc::new(NoAuth); @@ -45,7 +44,6 @@ impl SocksController { loop { match server.accept().await { Ok((stream, _)) => { - let tor = tor.clone(); bg.add_job(async move { if let Err(e) = async { match stream @@ -57,40 +55,6 @@ impl SocksController { .await .with_kind(ErrorKind::Network)? { - ClientConnection::Connect( - reply, - Address::DomainAddress(domain, port), - ) if domain.ends_with(".onion") => { - if let Ok(mut target) = tor - .connect_onion(&domain.parse()?, port) - .await - { - let mut sock = reply - .reply( - Reply::Succeeded, - Address::unspecified(), - ) - .await - .with_kind(ErrorKind::Network)?; - tokio::io::copy_bidirectional( - &mut sock, - &mut target, - ) - .await - .with_kind(ErrorKind::Network)?; - } else { - let mut sock = reply - .reply( - Reply::HostUnreachable, - Address::unspecified(), - ) - .await - .with_kind(ErrorKind::Network)?; - sock.shutdown() - .await - .with_kind(ErrorKind::Network)?; - } - } ClientConnection::Connect(reply, addr) => { if let Ok(mut target) = match addr { Address::DomainAddress(domain, port) => { diff --git a/core/src/net/ssl.rs b/core/src/net/ssl.rs index 748abb493..284d224a2 100644 --- a/core/src/net/ssl.rs +++ b/core/src/net/ssl.rs @@ -33,7 +33,7 @@ use crate::SOURCE_DATE; use crate::account::AccountInfo; use crate::db::model::Database; use crate::db::{DbAccess, DbAccessMut}; -use crate::hostname::Hostname; +use crate::hostname::ServerHostname; use crate::init::check_time_is_synchronized; use crate::net::gateway::GatewayInfo; use crate::net::tls::TlsHandler; @@ -283,7 +283,7 @@ pub fn gen_nistp256() -> Result, Error> { #[instrument(skip_all)] pub fn make_root_cert( root_key: &PKey, - hostname: &Hostname, + hostname: &ServerHostname, start_time: SystemTime, ) -> Result { let mut builder = X509Builder::new()?; @@ -300,7 +300,8 @@ pub fn make_root_cert( builder.set_serial_number(&*rand_serial()?)?; let mut subject_name_builder = X509NameBuilder::new()?; - subject_name_builder.append_entry_by_text("CN", &format!("{} Local Root CA", &*hostname.0))?; + subject_name_builder + .append_entry_by_text("CN", &format!("{} Local Root CA", hostname.as_ref()))?; subject_name_builder.append_entry_by_text("O", "Start9")?; subject_name_builder.append_entry_by_text("OU", "StartOS")?; let subject_name = subject_name_builder.build(); diff --git a/core/src/net/static_server.rs b/core/src/net/static_server.rs index 08af326b6..0d1fb458c 100644 --- a/core/src/net/static_server.rs +++ b/core/src/net/static_server.rs @@ -9,14 +9,14 @@ use async_compression::tokio::bufread::GzipEncoder; use axum::Router; use axum::body::Body; use axum::extract::{self as x, Request}; -use axum::response::{IntoResponse, Redirect, Response}; +use axum::response::Response; use axum::routing::{any, get}; use base64::display::Base64Display; use digest::Digest; use futures::future::ready; use http::header::{ ACCEPT_ENCODING, ACCEPT_RANGES, CACHE_CONTROL, CONNECTION, CONTENT_ENCODING, CONTENT_LENGTH, - CONTENT_RANGE, CONTENT_TYPE, ETAG, HOST, RANGE, + CONTENT_RANGE, CONTENT_TYPE, ETAG, RANGE, }; use http::request::Parts as RequestParts; use http::{HeaderValue, Method, StatusCode}; @@ -31,13 +31,11 @@ use tokio_util::io::ReaderStream; use url::Url; use crate::context::{DiagnosticContext, InitContext, RpcContext, SetupContext}; -use crate::hostname::Hostname; +use crate::hostname::ServerHostname; use crate::middleware::auth::Auth; use crate::middleware::auth::session::ValidSessionToken; use crate::middleware::cors::Cors; use crate::middleware::db::SyncDb; -use crate::net::gateway::GatewayInfo; -use crate::net::tls::TlsHandshakeInfo; use crate::prelude::*; use crate::rpc_continuations::{Guid, RpcContinuations}; use crate::s9pk::S9pk; @@ -89,30 +87,6 @@ impl UiContext for RpcContext { .middleware(SyncDb::new()) } fn extend_router(self, router: Router) -> Router { - async fn https_redirect_if_public_http( - req: Request, - next: axum::middleware::Next, - ) -> Response { - if req - .extensions() - .get::() - .map_or(false, |p| p.info.public()) - && req.extensions().get::().is_none() - { - Redirect::temporary(&format!( - "https://{}{}", - req.headers() - .get(HOST) - .and_then(|s| s.to_str().ok()) - .unwrap_or("localhost"), - req.uri() - )) - .into_response() - } else { - next.run(req).await - } - } - router .route("/proxy/{url}", { let ctx = self.clone(); @@ -131,12 +105,12 @@ impl UiContext for RpcContext { get(move || { let ctx = self.clone(); async move { - ctx.account - .peek(|account| cert_send(&account.root_ca_cert, &account.hostname)) + ctx.account.peek(|account| { + cert_send(&account.root_ca_cert, &account.hostname.hostname) + }) } }), ) - .layer(axum::middleware::from_fn(https_redirect_if_public_http)) } } @@ -446,7 +420,7 @@ pub fn bad_request() -> Response { .unwrap() } -fn cert_send(cert: &X509, hostname: &Hostname) -> Result { +fn cert_send(cert: &X509, hostname: &ServerHostname) -> Result { let pem = cert.to_pem()?; Response::builder() .status(StatusCode::OK) @@ -462,7 +436,7 @@ fn cert_send(cert: &X509, hostname: &Hostname) -> Result { .header(http::header::CONTENT_LENGTH, pem.len()) .header( http::header::CONTENT_DISPOSITION, - format!("attachment; filename={}.crt", &hostname.0), + format!("attachment; filename={}.crt", hostname.as_ref()), ) .body(Body::from(pem)) .with_kind(ErrorKind::Network) diff --git a/core/src/net/tls.rs b/core/src/net/tls.rs index 99f36e838..3d8c1b1a4 100644 --- a/core/src/net/tls.rs +++ b/core/src/net/tls.rs @@ -1,5 +1,6 @@ use std::sync::Arc; use std::task::{Poll, ready}; +use std::time::Duration; use futures::future::BoxFuture; use futures::stream::FuturesUnordered; @@ -170,7 +171,7 @@ where let (metadata, stream) = ready!(self.accept.poll_accept(cx)?); let mut tls_handler = self.tls_handler.clone(); let mut fut = async move { - let res = async { + let res = match tokio::time::timeout(Duration::from_secs(15), async { let mut acceptor = LazyConfigAcceptor::new(Acceptor::default(), BackTrackingIO::new(stream)); let mut mid: tokio_rustls::StartHandshake> = @@ -233,14 +234,22 @@ where } Ok(None) - } - .await; + }) + .await + { + Ok(res) => res, + Err(_) => { + tracing::trace!("TLS handshake timed out"); + Ok(None) + } + }; (tls_handler, res) } .boxed(); match fut.poll_unpin(cx) { Poll::Pending => { in_progress.push(fut); + cx.waker().wake_by_ref(); Poll::Pending } Poll::Ready((handler, res)) => { diff --git a/core/src/net/tor/arti.rs b/core/src/net/tor/arti.rs deleted file mode 100644 index c44d8d528..000000000 --- a/core/src/net/tor/arti.rs +++ /dev/null @@ -1,964 +0,0 @@ -use std::borrow::Cow; -use std::collections::{BTreeMap, BTreeSet}; -use std::net::SocketAddr; -use std::str::FromStr; -use std::sync::{Arc, Weak}; -use std::time::{Duration, Instant}; - -use arti_client::config::onion_service::OnionServiceConfigBuilder; -use arti_client::{TorClient, TorClientConfig}; -use base64::Engine; -use clap::Parser; -use color_eyre::eyre::eyre; -use futures::{FutureExt, StreamExt}; -use imbl_value::InternedString; -use itertools::Itertools; -use rpc_toolkit::{Context, Empty, HandlerExt, ParentHandler, from_fn_async}; -use serde::{Deserialize, Serialize}; -use tokio::io::{AsyncReadExt, AsyncWriteExt}; -use tokio::net::TcpStream; -use tokio::sync::Notify; -use tor_cell::relaycell::msg::Connected; -use tor_hscrypto::pk::{HsId, HsIdKeypair}; -use tor_hsservice::status::State as ArtiOnionServiceState; -use tor_hsservice::{HsNickname, RunningOnionService}; -use tor_keymgr::config::ArtiKeystoreKind; -use tor_proto::client::stream::IncomingStreamRequest; -use tor_rtcompat::tokio::TokioRustlsRuntime; -use ts_rs::TS; - -use crate::context::{CliContext, RpcContext}; -use crate::prelude::*; -use crate::util::actor::background::BackgroundJobQueue; -use crate::util::future::{NonDetachingJoinHandle, Until}; -use crate::util::io::ReadWriter; -use crate::util::serde::{ - BASE64, Base64, HandlerExtSerde, WithIoFormat, deserialize_from_str, display_serializable, - serialize_display, -}; -use crate::util::sync::{SyncMutex, SyncRwLock, Watch}; - -const BOOTSTRAP_PROGRESS_TIMEOUT: Duration = Duration::from_secs(300); -const HS_BOOTSTRAP_TIMEOUT: Duration = Duration::from_secs(300); -const RETRY_COOLDOWN: Duration = Duration::from_secs(15); -const HEALTH_CHECK_FAILURE_ALLOWANCE: usize = 5; -const HEALTH_CHECK_COOLDOWN: Duration = Duration::from_secs(120); - -#[derive(Debug, Clone, Copy)] -pub struct OnionAddress(pub HsId); -impl std::fmt::Display for OnionAddress { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - safelog::DisplayRedacted::fmt_unredacted(&self.0, f) - } -} -impl FromStr for OnionAddress { - type Err = Error; - fn from_str(s: &str) -> Result { - Ok(Self( - if s.ends_with(".onion") { - Cow::Borrowed(s) - } else { - Cow::Owned(format!("{s}.onion")) - } - .parse::() - .with_kind(ErrorKind::Tor)?, - )) - } -} -impl Serialize for OnionAddress { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - serialize_display(self, serializer) - } -} -impl<'de> Deserialize<'de> for OnionAddress { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - deserialize_from_str(deserializer) - } -} -impl PartialEq for OnionAddress { - fn eq(&self, other: &Self) -> bool { - self.0.as_ref() == other.0.as_ref() - } -} -impl Eq for OnionAddress {} -impl PartialOrd for OnionAddress { - fn partial_cmp(&self, other: &Self) -> Option { - self.0.as_ref().partial_cmp(other.0.as_ref()) - } -} -impl Ord for OnionAddress { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.0.as_ref().cmp(other.0.as_ref()) - } -} - -pub struct TorSecretKey(pub HsIdKeypair); -impl TorSecretKey { - pub fn onion_address(&self) -> OnionAddress { - OnionAddress(HsId::from(self.0.as_ref().public().to_bytes())) - } - pub fn from_bytes(bytes: [u8; 64]) -> Result { - Ok(Self( - tor_llcrypto::pk::ed25519::ExpandedKeypair::from_secret_key_bytes(bytes) - .ok_or_else(|| { - Error::new( - eyre!("{}", t!("net.tor.invalid-ed25519-key")), - ErrorKind::Tor, - ) - })? - .into(), - )) - } - pub fn generate() -> Self { - Self( - tor_llcrypto::pk::ed25519::ExpandedKeypair::from( - &tor_llcrypto::pk::ed25519::Keypair::generate(&mut rand::rng()), - ) - .into(), - ) - } -} -impl Clone for TorSecretKey { - fn clone(&self) -> Self { - Self(HsIdKeypair::from( - tor_llcrypto::pk::ed25519::ExpandedKeypair::from_secret_key_bytes( - self.0.as_ref().to_secret_key_bytes(), - ) - .unwrap(), - )) - } -} -impl std::fmt::Display for TorSecretKey { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!( - f, - "{}", - BASE64.encode(self.0.as_ref().to_secret_key_bytes()) - ) - } -} -impl FromStr for TorSecretKey { - type Err = Error; - fn from_str(s: &str) -> Result { - Self::from_bytes(Base64::<[u8; 64]>::from_str(s)?.0) - } -} -impl Serialize for TorSecretKey { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - serialize_display(self, serializer) - } -} -impl<'de> Deserialize<'de> for TorSecretKey { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - deserialize_from_str(deserializer) - } -} - -#[derive(Default, Deserialize, Serialize)] -pub struct OnionStore(BTreeMap); -impl Map for OnionStore { - type Key = OnionAddress; - type Value = TorSecretKey; - fn key_str(key: &Self::Key) -> Result, Error> { - Self::key_string(key) - } - fn key_string(key: &Self::Key) -> Result { - Ok(InternedString::from_display(key)) - } -} -impl OnionStore { - pub fn new() -> Self { - Self::default() - } - pub fn insert(&mut self, key: TorSecretKey) { - self.0.insert(key.onion_address(), key); - } -} -impl Model { - pub fn new_key(&mut self) -> Result { - let key = TorSecretKey::generate(); - self.insert(&key.onion_address(), &key)?; - Ok(key) - } - pub fn insert_key(&mut self, key: &TorSecretKey) -> Result<(), Error> { - self.insert(&key.onion_address(), &key) - } - pub fn get_key(&self, address: &OnionAddress) -> Result { - self.as_idx(address) - .or_not_found(lazy_format!("private key for {address}"))? - .de() - } -} -impl std::fmt::Debug for OnionStore { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - struct OnionStoreMap<'a>(&'a BTreeMap); - impl<'a> std::fmt::Debug for OnionStoreMap<'a> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - #[derive(Debug)] - struct KeyFor(#[allow(unused)] OnionAddress); - let mut map = f.debug_map(); - for (k, v) in self.0 { - map.key(k); - map.value(&KeyFor(v.onion_address())); - } - map.finish() - } - } - f.debug_tuple("OnionStore") - .field(&OnionStoreMap(&self.0)) - .finish() - } -} - -pub fn tor_api() -> ParentHandler { - ParentHandler::new() - .subcommand( - "list-services", - from_fn_async(list_services) - .with_display_serializable() - .with_custom_display_fn(|handle, result| display_services(handle.params, result)) - .with_about("about.display-tor-v3-onion-addresses") - .with_call_remote::(), - ) - .subcommand( - "reset", - from_fn_async(reset) - .no_display() - .with_about("about.reset-tor-daemon") - .with_call_remote::(), - ) - .subcommand( - "key", - key::().with_about("about.manage-onion-service-key-store"), - ) -} - -pub fn key() -> ParentHandler { - ParentHandler::new() - .subcommand( - "generate", - from_fn_async(generate_key) - .with_about("about.generate-onion-service-key-add-to-store") - .with_call_remote::(), - ) - .subcommand( - "add", - from_fn_async(add_key) - .with_about("about.add-onion-service-key-to-store") - .with_call_remote::(), - ) - .subcommand( - "list", - from_fn_async(list_keys) - .with_custom_display_fn(|_, res| { - for addr in res { - println!("{addr}"); - } - Ok(()) - }) - .with_about("about.list-onion-services-with-keys-in-store") - .with_call_remote::(), - ) -} - -pub async fn generate_key(ctx: RpcContext) -> Result { - ctx.db - .mutate(|db| { - Ok(db - .as_private_mut() - .as_key_store_mut() - .as_onion_mut() - .new_key()? - .onion_address()) - }) - .await - .result -} - -#[derive(Deserialize, Serialize, Parser)] -pub struct AddKeyParams { - #[arg(help = "help.arg.onion-secret-key")] - pub key: Base64<[u8; 64]>, -} - -pub async fn add_key( - ctx: RpcContext, - AddKeyParams { key }: AddKeyParams, -) -> Result { - let key = TorSecretKey::from_bytes(key.0)?; - ctx.db - .mutate(|db| { - db.as_private_mut() - .as_key_store_mut() - .as_onion_mut() - .insert_key(&key) - }) - .await - .result?; - Ok(key.onion_address()) -} - -pub async fn list_keys(ctx: RpcContext) -> Result, Error> { - ctx.db - .peek() - .await - .into_private() - .into_key_store() - .into_onion() - .keys() -} - -#[derive(Deserialize, Serialize, Parser, TS)] -#[serde(rename_all = "camelCase")] -#[command(rename_all = "kebab-case")] -pub struct ResetParams { - #[arg( - name = "wipe-state", - short = 'w', - long = "wipe-state", - help = "help.arg.wipe-tor-state" - )] - wipe_state: bool, -} - -pub async fn reset(ctx: RpcContext, ResetParams { wipe_state }: ResetParams) -> Result<(), Error> { - ctx.net_controller.tor.reset(wipe_state).await -} - -pub fn display_services( - params: WithIoFormat, - services: BTreeMap, -) -> Result<(), Error> { - use prettytable::*; - - if let Some(format) = params.format { - return display_serializable(format, services); - } - - let mut table = Table::new(); - table.add_row(row![bc => "ADDRESS", "STATE", "BINDINGS"]); - for (service, info) in services { - let row = row![ - &service.to_string(), - &format!("{:?}", info.state), - &info - .bindings - .into_iter() - .map(|(port, addr)| lazy_format!("{port} -> {addr}")) - .join("; ") - ]; - table.add_row(row); - } - table.print_tty(false)?; - Ok(()) -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "kebab-case")] -pub enum OnionServiceState { - Shutdown, - Bootstrapping, - DegradedReachable, - DegradedUnreachable, - Running, - Recovering, - Broken, -} -impl From for OnionServiceState { - fn from(value: ArtiOnionServiceState) -> Self { - match value { - ArtiOnionServiceState::Shutdown => Self::Shutdown, - ArtiOnionServiceState::Bootstrapping => Self::Bootstrapping, - ArtiOnionServiceState::DegradedReachable => Self::DegradedReachable, - ArtiOnionServiceState::DegradedUnreachable => Self::DegradedUnreachable, - ArtiOnionServiceState::Running => Self::Running, - ArtiOnionServiceState::Recovering => Self::Recovering, - ArtiOnionServiceState::Broken => Self::Broken, - _ => unreachable!(), - } - } -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct OnionServiceInfo { - pub state: OnionServiceState, - pub bindings: BTreeMap, -} - -pub async fn list_services( - ctx: RpcContext, - _: Empty, -) -> Result, Error> { - ctx.net_controller.tor.list_services().await -} - -#[derive(Clone)] -pub struct TorController(Arc); -struct TorControllerInner { - client: Watch<(usize, TorClient)>, - _bootstrapper: NonDetachingJoinHandle<()>, - services: SyncMutex>, - reset: Arc, -} -impl TorController { - pub fn new() -> Result { - let mut config = TorClientConfig::builder(); - config - .storage() - .keystore() - .primary() - .kind(ArtiKeystoreKind::Ephemeral.into()); - let client = Watch::new(( - 0, - TorClient::with_runtime(TokioRustlsRuntime::current()?) - .config(config.build().with_kind(ErrorKind::Tor)?) - .local_resource_timeout(Duration::from_secs(0)) - .create_unbootstrapped()?, - )); - let reset = Arc::new(Notify::new()); - let bootstrapper_reset = reset.clone(); - let bootstrapper_client = client.clone(); - let bootstrapper = tokio::spawn(async move { - loop { - let (epoch, client): (usize, _) = bootstrapper_client.read(); - if let Err(e) = Until::new() - .with_async_fn(|| bootstrapper_reset.notified().map(Ok)) - .run(async { - let mut events = client.bootstrap_events(); - let bootstrap_fut = - client.bootstrap().map(|res| res.with_kind(ErrorKind::Tor)); - let failure_fut = async { - let mut prev_frac = 0_f32; - let mut prev_inst = Instant::now(); - while let Some(event) = - tokio::time::timeout(BOOTSTRAP_PROGRESS_TIMEOUT, events.next()) - .await - .with_kind(ErrorKind::Tor)? - { - if event.ready_for_traffic() { - return Ok::<_, Error>(()); - } - let frac = event.as_frac(); - if frac == prev_frac { - if prev_inst.elapsed() > BOOTSTRAP_PROGRESS_TIMEOUT { - return Err(Error::new( - eyre!( - "{}", - t!( - "net.tor.bootstrap-no-progress", - duration = crate::util::serde::Duration::from( - BOOTSTRAP_PROGRESS_TIMEOUT - ) - .to_string() - ) - ), - ErrorKind::Tor, - )); - } - } else { - prev_frac = frac; - prev_inst = Instant::now(); - } - } - futures::future::pending().await - }; - if let Err::<(), Error>(e) = tokio::select! { - res = bootstrap_fut => res, - res = failure_fut => res, - } { - tracing::error!( - "{}", - t!("net.tor.bootstrap-error", error = e.to_string()) - ); - tracing::debug!("{e:?}"); - } else { - bootstrapper_client.send_modify(|_| ()); - - for _ in 0..HEALTH_CHECK_FAILURE_ALLOWANCE { - if let Err::<(), Error>(e) = async { - loop { - let (bg, mut runner) = BackgroundJobQueue::new(); - runner - .run_while(async { - const PING_BUF_LEN: usize = 8; - let key = TorSecretKey::generate(); - let onion = key.onion_address(); - let (hs, stream) = client - .launch_onion_service_with_hsid( - OnionServiceConfigBuilder::default() - .nickname( - onion - .to_string() - .trim_end_matches(".onion") - .parse::() - .with_kind(ErrorKind::Tor)?, - ) - .build() - .with_kind(ErrorKind::Tor)?, - key.clone().0, - ) - .with_kind(ErrorKind::Tor)?; - bg.add_job(async move { - if let Err(e) = async { - let mut stream = - tor_hsservice::handle_rend_requests( - stream, - ); - while let Some(req) = stream.next().await { - let mut stream = req - .accept(Connected::new_empty()) - .await - .with_kind(ErrorKind::Tor)?; - let mut buf = [0; PING_BUF_LEN]; - stream.read_exact(&mut buf).await?; - stream.write_all(&buf).await?; - stream.flush().await?; - stream.shutdown().await?; - } - Ok::<_, Error>(()) - } - .await - { - tracing::error!( - "{}", - t!( - "net.tor.health-error", - error = e.to_string() - ) - ); - tracing::debug!("{e:?}"); - } - }); - - tokio::time::timeout(HS_BOOTSTRAP_TIMEOUT, async { - let mut status = hs.status_events(); - while let Some(status) = status.next().await { - if status.state().is_fully_reachable() { - return Ok(()); - } - } - Err(Error::new( - eyre!( - "{}", - t!("net.tor.status-stream-ended") - ), - ErrorKind::Tor, - )) - }) - .await - .with_kind(ErrorKind::Tor)??; - - let mut stream = client - .connect((onion.to_string(), 8080)) - .await?; - let mut ping_buf = [0; PING_BUF_LEN]; - rand::fill(&mut ping_buf); - stream.write_all(&ping_buf).await?; - stream.flush().await?; - let mut ping_res = [0; PING_BUF_LEN]; - stream.read_exact(&mut ping_res).await?; - ensure_code!( - ping_buf == ping_res, - ErrorKind::Tor, - "ping buffer mismatch" - ); - stream.shutdown().await?; - - Ok::<_, Error>(()) - }) - .await?; - tokio::time::sleep(HEALTH_CHECK_COOLDOWN).await; - } - } - .await - { - tracing::error!( - "{}", - t!("net.tor.client-health-error", error = e.to_string()) - ); - tracing::debug!("{e:?}"); - } - } - tracing::error!( - "{}", - t!( - "net.tor.health-check-failed-recycling", - count = HEALTH_CHECK_FAILURE_ALLOWANCE - ) - ); - } - - Ok(()) - }) - .await - { - tracing::error!( - "{}", - t!("net.tor.bootstrapper-error", error = e.to_string()) - ); - tracing::debug!("{e:?}"); - } - if let Err::<(), Error>(e) = async { - tokio::time::sleep(RETRY_COOLDOWN).await; - bootstrapper_client.send(( - epoch.wrapping_add(1), - TorClient::with_runtime(TokioRustlsRuntime::current()?) - .config(config.build().with_kind(ErrorKind::Tor)?) - .local_resource_timeout(Duration::from_secs(0)) - .create_unbootstrapped_async() - .await?, - )); - tracing::debug!("TorClient recycled"); - Ok(()) - } - .await - { - tracing::error!( - "{}", - t!("net.tor.client-creation-error", error = e.to_string()) - ); - tracing::debug!("{e:?}"); - } - } - }) - .into(); - Ok(Self(Arc::new(TorControllerInner { - client, - _bootstrapper: bootstrapper, - services: SyncMutex::new(BTreeMap::new()), - reset, - }))) - } - - pub fn service(&self, key: TorSecretKey) -> Result { - self.0.services.mutate(|s| { - use std::collections::btree_map::Entry; - let addr = key.onion_address(); - match s.entry(addr) { - Entry::Occupied(e) => Ok(e.get().clone()), - Entry::Vacant(e) => Ok(e - .insert(OnionService::launch(self.0.client.clone(), key)?) - .clone()), - } - }) - } - - pub async fn gc(&self, addr: Option) -> Result<(), Error> { - if let Some(addr) = addr { - if let Some(s) = self.0.services.mutate(|s| { - let rm = if let Some(s) = s.get(&addr) { - !s.gc() - } else { - false - }; - if rm { s.remove(&addr) } else { None } - }) { - s.shutdown().await - } else { - Ok(()) - } - } else { - for s in self.0.services.mutate(|s| { - let mut rm = Vec::new(); - s.retain(|_, s| { - if s.gc() { - true - } else { - rm.push(s.clone()); - false - } - }); - rm - }) { - s.shutdown().await?; - } - Ok(()) - } - } - - pub async fn reset(&self, wipe_state: bool) -> Result<(), Error> { - self.0.reset.notify_waiters(); - Ok(()) - } - - pub async fn list_services(&self) -> Result, Error> { - Ok(self - .0 - .services - .peek(|s| s.iter().map(|(a, s)| (a.clone(), s.info())).collect())) - } - - pub async fn connect_onion( - &self, - addr: &OnionAddress, - port: u16, - ) -> Result, Error> { - if let Some(target) = self.0.services.peek(|s| { - s.get(addr).and_then(|s| { - s.0.bindings.peek(|b| { - b.get(&port).and_then(|b| { - b.iter() - .find(|(_, rc)| rc.strong_count() > 0) - .map(|(a, _)| *a) - }) - }) - }) - }) { - let tcp_stream = TcpStream::connect(target) - .await - .with_kind(ErrorKind::Network)?; - if let Err(e) = socket2::SockRef::from(&tcp_stream).set_keepalive(true) { - tracing::error!( - "{}", - t!("net.tor.failed-to-set-tcp-keepalive", error = e.to_string()) - ); - tracing::debug!("{e:?}"); - } - Ok(Box::new(tcp_stream)) - } else { - let mut client = self.0.client.clone(); - client - .wait_for(|(_, c)| c.bootstrap_status().ready_for_traffic()) - .await; - let stream = client - .read() - .1 - .connect((addr.to_string(), port)) - .await - .with_kind(ErrorKind::Tor)?; - Ok(Box::new(stream)) - } - } -} - -#[derive(Clone)] -pub struct OnionService(Arc); -struct OnionServiceData { - service: Arc>>>, - bindings: Arc>>>>, - _thread: NonDetachingJoinHandle<()>, -} -impl OnionService { - fn launch( - mut client: Watch<(usize, TorClient)>, - key: TorSecretKey, - ) -> Result { - let service = Arc::new(SyncMutex::new(None)); - let bindings = Arc::new(SyncRwLock::new(BTreeMap::< - u16, - BTreeMap>, - >::new())); - Ok(Self(Arc::new(OnionServiceData { - service: service.clone(), - bindings: bindings.clone(), - _thread: tokio::spawn(async move { - let (bg, mut runner) = BackgroundJobQueue::new(); - runner - .run_while(async { - loop { - if let Err(e) = async { - client.wait_for(|(_,c)| c.bootstrap_status().ready_for_traffic()).await; - let epoch = client.peek(|(e, c)| { - ensure_code!(c.bootstrap_status().ready_for_traffic(), ErrorKind::Tor, "TorClient recycled"); - Ok::<_, Error>(*e) - })?; - let addr = key.onion_address(); - let (new_service, stream) = client.peek(|(_, c)| { - c.launch_onion_service_with_hsid( - OnionServiceConfigBuilder::default() - .nickname( - addr - .to_string() - .trim_end_matches(".onion") - .parse::() - .with_kind(ErrorKind::Tor)?, - ) - .build() - .with_kind(ErrorKind::Tor)?, - key.clone().0, - ) - .with_kind(ErrorKind::Tor) - })?; - let mut status_stream = new_service.status_events(); - let mut status = new_service.status(); - if status.state().is_fully_reachable() { - tracing::debug!("{addr} is fully reachable"); - } else { - tracing::debug!("{addr} is not fully reachable"); - } - bg.add_job(async move { - while let Some(new_status) = status_stream.next().await { - if status.state().is_fully_reachable() && !new_status.state().is_fully_reachable() { - tracing::debug!("{addr} is no longer fully reachable"); - } else if !status.state().is_fully_reachable() && new_status.state().is_fully_reachable() { - tracing::debug!("{addr} is now fully reachable"); - } - status = new_status; - // TODO: health daemon? - } - }); - service.replace(Some(new_service)); - let mut stream = tor_hsservice::handle_rend_requests(stream); - while let Some(req) = tokio::select! { - req = stream.next() => req, - _ = client.wait_for(|(e, _)| *e != epoch) => None - } { - bg.add_job({ - let bg = bg.clone(); - let bindings = bindings.clone(); - async move { - if let Err(e) = async { - let IncomingStreamRequest::Begin(begin) = - req.request() - else { - return req - .reject(tor_cell::relaycell::msg::End::new_with_reason( - tor_cell::relaycell::msg::EndReason::DONE, - )) - .await - .with_kind(ErrorKind::Tor); - }; - let Some(target) = bindings.peek(|b| { - b.get(&begin.port()).and_then(|a| { - a.iter() - .find(|(_, rc)| rc.strong_count() > 0) - .map(|(addr, _)| *addr) - }) - }) else { - return req - .reject(tor_cell::relaycell::msg::End::new_with_reason( - tor_cell::relaycell::msg::EndReason::DONE, - )) - .await - .with_kind(ErrorKind::Tor); - }; - bg.add_job(async move { - if let Err(e) = async { - let mut outgoing = - TcpStream::connect(target) - .await - .with_kind(ErrorKind::Network)?; - if let Err(e) = socket2::SockRef::from(&outgoing).set_keepalive(true) { - tracing::error!("{}", t!("net.tor.failed-to-set-tcp-keepalive", error = e.to_string())); - tracing::debug!("{e:?}"); - } - let mut incoming = req - .accept(Connected::new_empty()) - .await - .with_kind(ErrorKind::Tor)?; - if let Err(e) = - tokio::io::copy_bidirectional( - &mut outgoing, - &mut incoming, - ) - .await - { - tracing::trace!("Tor Stream Error: {e}"); - tracing::trace!("{e:?}"); - } - - Ok::<_, Error>(()) - } - .await - { - tracing::trace!("Tor Stream Error: {e}"); - tracing::trace!("{e:?}"); - } - }); - Ok::<_, Error>(()) - } - .await - { - tracing::trace!("Tor Request Error: {e}"); - tracing::trace!("{e:?}"); - } - } - }); - } - Ok::<_, Error>(()) - } - .await - { - tracing::error!("{}", t!("net.tor.client-error", error = e.to_string())); - tracing::debug!("{e:?}"); - } - } - }) - .await - }) - .into(), - }))) - } - - pub async fn proxy_all>>( - &self, - bindings: impl IntoIterator, - ) -> Result { - Ok(self.0.bindings.mutate(|b| { - bindings - .into_iter() - .map(|(port, target)| { - let entry = b.entry(port).or_default().entry(target).or_default(); - if let Some(rc) = entry.upgrade() { - rc - } else { - let rc = Arc::new(()); - *entry = Arc::downgrade(&rc); - rc - } - }) - .collect() - })) - } - - pub fn gc(&self) -> bool { - self.0.bindings.mutate(|b| { - b.retain(|_, targets| { - targets.retain(|_, rc| rc.strong_count() > 0); - !targets.is_empty() - }); - !b.is_empty() - }) - } - - pub async fn shutdown(self) -> Result<(), Error> { - self.0.service.replace(None); - self.0._thread.abort(); - Ok(()) - } - - pub fn state(&self) -> OnionServiceState { - self.0 - .service - .peek(|s| s.as_ref().map(|s| s.status().state().into())) - .unwrap_or(OnionServiceState::Bootstrapping) - } - - pub fn info(&self) -> OnionServiceInfo { - OnionServiceInfo { - state: self.state(), - bindings: self.0.bindings.peek(|b| { - b.iter() - .filter_map(|(port, b)| { - b.iter() - .find(|(_, rc)| rc.strong_count() > 0) - .map(|(addr, _)| (*port, *addr)) - }) - .collect() - }), - } - } -} diff --git a/core/src/net/tor/ctor.rs b/core/src/net/tor/ctor.rs deleted file mode 100644 index 1fc3485fe..000000000 --- a/core/src/net/tor/ctor.rs +++ /dev/null @@ -1,1092 +0,0 @@ -use std::collections::{BTreeMap, BTreeSet}; -use std::net::{Ipv4Addr, SocketAddr, SocketAddrV4}; -use std::str::FromStr; -use std::sync::atomic::AtomicBool; -use std::sync::{Arc, Weak}; -use std::time::Duration; - -use base64::Engine; -use clap::Parser; -use color_eyre::eyre::eyre; -use futures::future::BoxFuture; -use futures::{FutureExt, TryFutureExt, TryStreamExt}; -use imbl::OrdMap; -use imbl_value::InternedString; -use lazy_static::lazy_static; -use regex::Regex; -use rpc_toolkit::{Context, Empty, HandlerExt, ParentHandler, from_fn_async}; -use serde::{Deserialize, Serialize}; -use tokio::net::TcpStream; -use tokio::process::Command; -use tokio::sync::{mpsc, oneshot}; -use tokio::time::Instant; -use torut::control::{AsyncEvent, AuthenticatedConn, ConnError}; -use torut::onion::{OnionAddressV3, TorSecretKeyV3}; -use tracing::instrument; -use ts_rs::TS; - -use crate::context::{CliContext, RpcContext}; -use crate::logs::{LogSource, LogsParams, journalctl}; -use crate::prelude::*; -use crate::util::Invoke; -use crate::util::collections::ordmap_retain; -use crate::util::future::NonDetachingJoinHandle; -use crate::util::io::{ReadWriter, write_file_atomic}; -use crate::util::serde::{ - BASE64, Base64, HandlerExtSerde, WithIoFormat, deserialize_from_str, display_serializable, - serialize_display, -}; -use crate::util::sync::Watch; - -pub const SYSTEMD_UNIT: &str = "tor@default"; -const STARTING_HEALTH_TIMEOUT: u64 = 120; // 2min - -const TOR_CONTROL: SocketAddr = - SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(127, 0, 1, 1), 9051)); - -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub struct OnionAddress(OnionAddressV3); -impl std::fmt::Display for OnionAddress { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", self.0) - } -} -impl FromStr for OnionAddress { - type Err = Error; - fn from_str(s: &str) -> Result { - Ok(Self( - s.strip_suffix(".onion") - .unwrap_or(s) - .rsplit(".") - .next() - .unwrap_or(s) - .parse::() - .with_kind(ErrorKind::Tor)?, - )) - } -} -impl Serialize for OnionAddress { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - serialize_display(self, serializer) - } -} -impl<'de> Deserialize<'de> for OnionAddress { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - deserialize_from_str(deserializer) - } -} -impl Ord for OnionAddress { - fn cmp(&self, other: &Self) -> std::cmp::Ordering { - self.0.get_raw_bytes().cmp(&other.0.get_raw_bytes()) - } -} -impl PartialOrd for OnionAddress { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -#[derive(Clone, Debug, PartialEq, Eq)] -pub struct TorSecretKey(pub TorSecretKeyV3); -impl TorSecretKey { - pub fn onion_address(&self) -> OnionAddress { - OnionAddress(self.0.public().get_onion_address()) - } - pub fn from_bytes(bytes: [u8; 64]) -> Result { - Ok(Self(TorSecretKeyV3::from(bytes))) - } - pub fn generate() -> Self { - Self(TorSecretKeyV3::generate()) - } - pub fn is_valid(&self) -> bool { - let bytes = self.0.as_bytes()[..32].try_into().unwrap(); - curve25519_dalek::scalar::clamp_integer(bytes) == bytes - } -} -impl std::fmt::Display for TorSecretKey { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(f, "{}", BASE64.encode(self.0.as_bytes())) - } -} -impl FromStr for TorSecretKey { - type Err = Error; - fn from_str(s: &str) -> Result { - Self::from_bytes(Base64::<[u8; 64]>::from_str(s)?.0) - } -} -impl Serialize for TorSecretKey { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - serialize_display(self, serializer) - } -} -impl<'de> Deserialize<'de> for TorSecretKey { - fn deserialize(deserializer: D) -> Result - where - D: serde::Deserializer<'de>, - { - deserialize_from_str(deserializer) - } -} - -#[test] -fn test_generated_is_valid() { - for _ in 0..100 { - assert!(TorSecretKey::generate().is_valid()); - } -} - -#[test] -fn test_tor_key() { - // let key = crate::util::crypto::ed25519_expand_key( - // &hex::decode("c4b1a617bfdbcfb3f31e98c95542ce61718100e81cc6766eeebaa0dab42f0a93") - // .unwrap() - // .try_into() - // .unwrap(), - // ); - let key = - "4FpKpT4GZeEkUvH32AWMsndW+EG3XH46EmSFTh286G4AfG2U/Cc7y7L6k1dW5bl996QGDwe8gnaglq2hR2aD2w" - .parse::() - .unwrap(); - assert_eq!( - InternedString::from_display(&key.onion_address()), - InternedString::from("ja24lucrzgcusm72r2kmiujaa2g6b5o2w4wrwt5crfrhaz2qek5ozhqd.onion") - ); - eprintln!("{:?}", key.0.as_bytes()); - dbg!(key.to_string()); - dbg!(key.0.as_bytes()[0] & 0b111); - dbg!(key.onion_address()); - assert!(key.is_valid()); -} - -#[derive(Default, Deserialize, Serialize)] -pub struct OnionStore(BTreeMap); -impl Map for OnionStore { - type Key = OnionAddress; - type Value = TorSecretKey; - fn key_str(key: &Self::Key) -> Result, Error> { - Self::key_string(key) - } - fn key_string(key: &Self::Key) -> Result { - Ok(InternedString::from_display(key)) - } -} -impl OnionStore { - pub fn new() -> Self { - Self::default() - } - pub fn insert(&mut self, key: TorSecretKey) { - self.0.insert(key.onion_address(), key); - } -} -impl Model { - pub fn new_key(&mut self) -> Result { - let key = TorSecretKey::generate(); - self.insert_key(&key)?; - Ok(key) - } - pub fn insert_key(&mut self, key: &TorSecretKey) -> Result<(), Error> { - self.insert(&key.onion_address(), &key).map(|_| ()) - } - pub fn get_key(&self, address: &OnionAddress) -> Result { - self.as_idx(address) - .or_not_found(lazy_format!("private key for {address}"))? - .de() - } -} -impl std::fmt::Debug for OnionStore { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - struct OnionStoreMap<'a>(&'a BTreeMap); - impl<'a> std::fmt::Debug for OnionStoreMap<'a> { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - #[derive(Debug)] - struct KeyFor(#[allow(unused)] OnionAddress); - let mut map = f.debug_map(); - for (k, v) in self.0 { - map.key(k); - map.value(&KeyFor(v.onion_address())); - } - map.finish() - } - } - f.debug_tuple("OnionStore") - .field(&OnionStoreMap(&self.0)) - .finish() - } -} - -enum ErrorLogSeverity { - Fatal { wipe_state: bool }, - Unknown { wipe_state: bool }, -} - -lazy_static! { - static ref LOG_REGEXES: Vec<(Regex, ErrorLogSeverity)> = vec![( - Regex::new("This could indicate a route manipulation attack, network overload, bad local network connectivity, or a bug\\.").unwrap(), - ErrorLogSeverity::Unknown { wipe_state: true } - ),( - Regex::new("died due to an invalid selected path").unwrap(), - ErrorLogSeverity::Fatal { wipe_state: false } - ),( - Regex::new("Tor has not observed any network activity for the past").unwrap(), - ErrorLogSeverity::Unknown { wipe_state: false } - )]; - static ref PROGRESS_REGEX: Regex = Regex::new("PROGRESS=([0-9]+)").unwrap(); -} - -pub fn tor_api() -> ParentHandler { - ParentHandler::new() - .subcommand( - "list-services", - from_fn_async(list_services) - .with_display_serializable() - .with_custom_display_fn(|handle, result| display_services(handle.params, result)) - .with_about("about.display-tor-v3-onion-addresses") - .with_call_remote::(), - ) - .subcommand("logs", logs().with_about("about.display-tor-logs")) - .subcommand( - "logs", - from_fn_async(crate::logs::cli_logs::) - .no_display() - .with_about("about.display-tor-logs"), - ) - .subcommand( - "reset", - from_fn_async(reset) - .no_display() - .with_about("about.reset-tor-daemon") - .with_call_remote::(), - ) - .subcommand( - "key", - key::().with_about("about.manage-onion-service-key-store"), - ) -} - -pub fn key() -> ParentHandler { - ParentHandler::new() - .subcommand( - "generate", - from_fn_async(generate_key) - .with_about("about.generate-onion-service-key-add-to-store") - .with_call_remote::(), - ) - .subcommand( - "add", - from_fn_async(add_key) - .with_about("about.add-onion-service-key-to-store") - .with_call_remote::(), - ) - .subcommand( - "list", - from_fn_async(list_keys) - .with_custom_display_fn(|_, res| { - for addr in res { - println!("{addr}"); - } - Ok(()) - }) - .with_about("about.list-onion-services-with-keys-in-store") - .with_call_remote::(), - ) -} - -pub async fn generate_key(ctx: RpcContext) -> Result { - ctx.db - .mutate(|db| { - Ok(db - .as_private_mut() - .as_key_store_mut() - .as_onion_mut() - .new_key()? - .onion_address()) - }) - .await - .result -} - -#[derive(Deserialize, Serialize, Parser)] -pub struct AddKeyParams { - #[arg(help = "help.arg.onion-secret-key")] - pub key: Base64<[u8; 64]>, -} - -pub async fn add_key( - ctx: RpcContext, - AddKeyParams { key }: AddKeyParams, -) -> Result { - let key = TorSecretKey::from_bytes(key.0)?; - ctx.db - .mutate(|db| { - db.as_private_mut() - .as_key_store_mut() - .as_onion_mut() - .insert_key(&key) - }) - .await - .result?; - Ok(key.onion_address()) -} - -pub async fn list_keys(ctx: RpcContext) -> Result, Error> { - ctx.db - .peek() - .await - .into_private() - .into_key_store() - .into_onion() - .keys() -} - -#[derive(Deserialize, Serialize, Parser, TS)] -#[serde(rename_all = "camelCase")] -#[command(rename_all = "kebab-case")] -pub struct ResetParams { - #[arg( - name = "wipe-state", - short = 'w', - long = "wipe-state", - help = "help.arg.wipe-tor-state" - )] - wipe_state: bool, - #[arg(help = "help.arg.reset-reason")] - reason: String, -} - -pub async fn reset( - ctx: RpcContext, - ResetParams { reason, wipe_state }: ResetParams, -) -> Result<(), Error> { - ctx.net_controller - .tor - .reset(wipe_state, Error::new(eyre!("{reason}"), ErrorKind::Tor)) - .await -} - -pub fn display_services( - params: WithIoFormat, - services: Vec, -) -> Result<(), Error> { - use prettytable::*; - - if let Some(format) = params.format { - return display_serializable(format, services); - } - - let mut table = Table::new(); - for service in services { - let row = row![&service.to_string()]; - table.add_row(row); - } - table.print_tty(false)?; - Ok(()) -} - -pub async fn list_services(ctx: RpcContext, _: Empty) -> Result, Error> { - ctx.net_controller.tor.list_services().await -} - -pub fn logs() -> ParentHandler { - crate::logs::logs::(|_: &RpcContext, _| async { - Ok(LogSource::Unit(SYSTEMD_UNIT)) - }) -} - -fn event_handler(_event: AsyncEvent<'static>) -> BoxFuture<'static, Result<(), ConnError>> { - async move { Ok(()) }.boxed() -} - -#[derive(Clone)] -pub struct TorController(Arc); -impl TorController { - const TOR_SOCKS: &[SocketAddr] = &[ - SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(127, 0, 0, 1), 9050)), - SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(10, 0, 3, 1), 9050)), - ]; - - pub fn new() -> Result { - Ok(TorController(Arc::new(TorControl::new( - TOR_CONTROL, - Self::TOR_SOCKS, - )))) - } - - pub fn service(&self, key: TorSecretKey) -> Result { - Ok(TorService { - services: self.0.services.clone(), - key, - }) - } - - pub async fn gc(&self, addr: Option) -> Result<(), Error> { - self.0.services.send_if_modified(|services| { - let mut changed = false; - let mut gc = |bindings: &mut OrdMap>>| { - ordmap_retain(bindings, |_, targets| { - let start_len = targets.len(); - targets.retain(|_, rc| rc.strong_count() > 0); - changed |= start_len != targets.len(); - !targets.is_empty() - }); - if bindings.is_empty() { - changed = true; - false - } else { - true - } - }; - if let Some(addr) = addr { - if !if let Some((_, bindings, needs_sync)) = services.get_mut(&addr) { - let keep = gc(bindings); - if !keep { - *needs_sync = Some(SyncState::Remove); - } - keep - } else { - true - } { - services.remove(&addr); - } - } else { - services.retain(|_, (_, bindings, _)| gc(bindings)); - } - changed - }); - Ok(()) - } - - pub async fn reset(&self, wipe_state: bool, context: Error) -> Result<(), Error> { - self.0 - .send - .send(TorCommand::Reset { - wipe_state, - context, - }) - .ok() - .ok_or_else(|| Error::new(eyre!("TorControl died"), ErrorKind::Tor)) - } - - pub async fn list_services(&self) -> Result, Error> { - let (reply, res) = oneshot::channel(); - self.0 - .send - .send(TorCommand::GetInfo { - query: "onions/current".into(), - reply, - }) - .ok() - .ok_or_else(|| Error::new(eyre!("TorControl died"), ErrorKind::Tor))?; - res.await - .ok() - .ok_or_else(|| Error::new(eyre!("TorControl died"), ErrorKind::Tor))?? - .lines() - .map(|l| l.trim()) - .filter(|l| !l.is_empty()) - .map(|l| l.parse::().with_kind(ErrorKind::Tor)) - .collect() - } - - pub async fn connect_onion( - &self, - addr: &OnionAddress, - port: u16, - ) -> Result, Error> { - if let Some(target) = self.0.services.peek(|s| { - s.get(addr).and_then(|(_, bindings, _)| { - bindings.get(&port).and_then(|b| { - b.iter() - .find(|(_, rc)| rc.strong_count() > 0) - .map(|(a, _)| *a) - }) - }) - }) { - tracing::debug!("Resolving {addr} internally to {target}"); - let tcp_stream = TcpStream::connect(target) - .await - .with_kind(ErrorKind::Network)?; - if let Err(e) = socket2::SockRef::from(&tcp_stream).set_keepalive(true) { - tracing::error!("Failed to set tcp keepalive: {e}"); - tracing::debug!("{e:?}"); - } - Ok(Box::new(tcp_stream)) - } else { - let mut stream = TcpStream::connect(Self::TOR_SOCKS[0]) - .await - .with_kind(ErrorKind::Tor)?; - if let Err(e) = socket2::SockRef::from(&stream).set_keepalive(true) { - tracing::error!("Failed to set tcp keepalive: {e}"); - tracing::debug!("{e:?}"); - } - socks5_impl::client::connect(&mut stream, (addr.to_string(), port), None) - .await - .with_kind(ErrorKind::Tor)?; - Ok(Box::new(stream)) - } - } -} - -#[derive(Clone, Copy, PartialEq, Eq)] -enum SyncState { - Add, - Update, - Remove, -} - -pub struct TorService { - services: Watch< - BTreeMap< - OnionAddress, - ( - TorSecretKey, - OrdMap>>, - Option, - ), - >, - >, - key: TorSecretKey, -} - -impl TorService { - pub fn proxy_all>>( - &self, - bindings: impl IntoIterator, - ) -> Rcs { - self.services.send_modify(|services| { - let (_, entry, needs_sync) = services - .entry(self.key.onion_address()) - .or_insert_with(|| (self.key.clone(), OrdMap::new(), Some(SyncState::Add))); - let rcs = bindings - .into_iter() - .map(|(external, target)| { - let binding = entry.entry(external).or_default(); - let target = binding.entry(target).or_default(); - let rc = if let Some(rc) = Weak::upgrade(&*target) { - rc - } else { - if needs_sync.is_none() { - *needs_sync = Some(SyncState::Update); - } - Arc::new(()) - }; - *target = Arc::downgrade(&rc); - rc - }) - .collect(); - - rcs - }) - } -} - -type AuthenticatedConnection = AuthenticatedConn< - TcpStream, - Box) -> BoxFuture<'static, Result<(), ConnError>> + Send + Sync>, ->; - -enum TorCommand { - GetInfo { - query: String, - reply: oneshot::Sender>, - }, - Reset { - wipe_state: bool, - context: Error, - }, -} - -#[instrument(skip_all)] -async fn torctl( - tor_control: SocketAddr, - tor_socks: &[SocketAddr], - recv: &mut mpsc::UnboundedReceiver, - services: &mut Watch< - BTreeMap< - OnionAddress, - ( - TorSecretKey, - OrdMap>>, - Option, - ), - >, - >, - wipe_state: &AtomicBool, - health_timeout: &mut Duration, -) -> Result<(), Error> { - let bootstrap = async { - if Command::new("systemctl") - .arg("is-active") - .arg("--quiet") - .arg("tor") - .invoke(ErrorKind::Tor) - .await - .is_ok() - { - Command::new("systemctl") - .arg("stop") - .arg("tor") - .invoke(ErrorKind::Tor) - .await?; - for _ in 0..30 { - if TcpStream::connect(tor_control).await.is_err() { - break; - } - tokio::time::sleep(Duration::from_secs(1)).await; - } - if TcpStream::connect(tor_control).await.is_ok() { - return Err(Error::new( - eyre!("Tor is failing to shut down"), - ErrorKind::Tor, - )); - } - } - if wipe_state.load(std::sync::atomic::Ordering::SeqCst) { - tokio::fs::remove_dir_all("/var/lib/tor").await?; - wipe_state.store(false, std::sync::atomic::Ordering::SeqCst); - } - - write_file_atomic("/etc/tor/torrc", { - use std::fmt::Write; - let mut conf = String::new(); - - for tor_socks in tor_socks { - writeln!(&mut conf, "SocksPort {tor_socks}").unwrap(); - } - writeln!( - &mut conf, - "ControlPort {tor_control}\nCookieAuthentication 1" - ) - .unwrap(); - conf - }) - .await?; - tokio::fs::create_dir_all("/var/lib/tor").await?; - Command::new("chown") - .arg("-R") - .arg("debian-tor") - .arg("/var/lib/tor") - .invoke(ErrorKind::Filesystem) - .await?; - Command::new("systemctl") - .arg("start") - .arg("tor") - .invoke(ErrorKind::Tor) - .await?; - - let mut tcp_stream = None; - for _ in 0..60 { - if let Ok(conn) = TcpStream::connect(tor_control).await { - tcp_stream = Some(conn); - break; - } - tokio::time::sleep(Duration::from_secs(1)).await; - } - let tcp_stream = tcp_stream.ok_or_else(|| { - Error::new(eyre!("Timed out waiting for tor to start"), ErrorKind::Tor) - })?; - tracing::info!("Tor is started"); - - if let Err(e) = socket2::SockRef::from(&tcp_stream).set_keepalive(true) { - tracing::error!("Failed to set tcp keepalive: {e}"); - tracing::debug!("{e:?}"); - } - - let mut conn = torut::control::UnauthenticatedConn::new(tcp_stream); - let auth = conn - .load_protocol_info() - .await? - .make_auth_data()? - .ok_or_else(|| eyre!("Cookie Auth Not Available")) - .with_kind(crate::ErrorKind::Tor)?; - conn.authenticate(&auth).await?; - let mut connection: AuthenticatedConnection = conn.into_authenticated().await; - connection.set_async_event_handler(Some(Box::new(|event| event_handler(event)))); - - let mut bootstrapped = false; - let mut last_increment = (String::new(), Instant::now()); - for _ in 0..300 { - match connection.get_info("status/bootstrap-phase").await { - Ok(a) => { - if a.contains("TAG=done") { - bootstrapped = true; - break; - } - if let Some(p) = PROGRESS_REGEX.captures(&a) { - if let Some(p) = p.get(1) { - if p.as_str() != &*last_increment.0 { - last_increment = (p.as_str().into(), Instant::now()); - } - } - } - } - Err(e) => { - let e = Error::from(e); - tracing::error!("{}", e); - tracing::debug!("{:?}", e); - } - } - if last_increment.1.elapsed() > Duration::from_secs(30) { - return Err(Error::new( - eyre!("Tor stuck bootstrapping at {}%", last_increment.0), - ErrorKind::Tor, - )); - } - tokio::time::sleep(Duration::from_secs(1)).await; - } - if !bootstrapped { - return Err(Error::new( - eyre!("Timed out waiting for tor to bootstrap"), - ErrorKind::Tor, - )); - } - Ok(connection) - }; - let pre_handler = async { - while let Some(command) = recv.recv().await { - match command { - TorCommand::GetInfo { reply, .. } => { - reply - .send(Err(Error::new( - eyre!("Tor has not finished bootstrapping..."), - ErrorKind::Tor, - ))) - .unwrap_or_default(); - } - TorCommand::Reset { - wipe_state: new_wipe_state, - context, - } => { - wipe_state.fetch_or(new_wipe_state, std::sync::atomic::Ordering::SeqCst); - return Err(context); - } - } - } - Ok(()) - }; - - let mut connection = tokio::select! { - res = bootstrap => res?, - res = pre_handler => return res, - }; - - let hck_key = TorSecretKeyV3::generate(); - connection - .add_onion_v3( - &hck_key, - false, - false, - false, - None, - &mut [(80, SocketAddr::from(([127, 0, 0, 1], 80)))].iter(), - ) - .await?; - - services.send_modify(|s| { - for (_, _, s) in s.values_mut() { - *s = Some(SyncState::Add); - } - }); - - let handler = async { - loop { - let recv = recv.recv(); - tokio::pin!(recv); - let mut changed = services.changed().boxed(); - - match futures::future::select(recv, &mut changed).await { - futures::future::Either::Left((Some(command), _)) => match command { - TorCommand::GetInfo { query, reply } => { - reply - .send(connection.get_info(&query).await.with_kind(ErrorKind::Tor)) - .unwrap_or_default(); - } - TorCommand::Reset { - wipe_state: new_wipe_state, - context, - } => { - wipe_state.fetch_or(new_wipe_state, std::sync::atomic::Ordering::SeqCst); - return Err(context); - } - }, - futures::future::Either::Left((None, _)) => break, - futures::future::Either::Right(_) => { - drop(changed); - let to_add = services.peek_and_mark_seen(|services| { - services - .iter() - .filter(|(_, (_, _, s))| s.is_some()) - .map(|(k, v)| (k.clone(), (*v).clone())) - .collect::>() - }); - - for (addr, (key, bindings, state)) in &to_add { - if matches!(state, Some(SyncState::Update) | Some(SyncState::Remove)) { - connection - .del_onion(&addr.0.get_address_without_dot_onion()) - .await - .with_kind(ErrorKind::Tor)?; - } - let bindings = bindings - .iter() - .filter_map(|(external, targets)| { - targets - .iter() - .find(|(_, rc)| rc.strong_count() > 0) - .map(|(target, _)| (*external, *target)) - }) - .collect::>(); - if !bindings.is_empty() { - connection - .add_onion_v3( - &key.0, - false, - false, - false, - None, - &mut bindings.iter(), - ) - .await?; - } - } - services.send_if_modified(|services| { - for (addr, (_, bindings_a, _)) in to_add { - if let Some((_, bindings_b, needs_sync)) = services.get_mut(&addr) { - if OrdMap::ptr_eq(&bindings_a, bindings_b) - || bindings_a.len() == bindings_b.len() - && bindings_a.iter().zip(bindings_b.iter()).all( - |((a_port, a), (b_port, b))| { - a_port == b_port - && a.len() == b.len() - && a.keys().zip(b.keys()).all(|(a, b)| a == b) - }, - ) - { - *needs_sync = None; - } else { - *needs_sync = Some(SyncState::Update); - } - } - } - false - }); - } - } - } - - Ok(()) - }; - let log_parser = async { - loop { - let mut logs = journalctl( - LogSource::Unit(SYSTEMD_UNIT), - Some(0), - None, - Some("0"), - false, - true, - ) - .await?; - while let Some(log) = logs.try_next().await? { - for (regex, severity) in &*LOG_REGEXES { - if regex.is_match(&log.message) { - let (check, wipe_state) = match severity { - ErrorLogSeverity::Fatal { wipe_state } => (false, *wipe_state), - ErrorLogSeverity::Unknown { wipe_state } => (true, *wipe_state), - }; - let addr = hck_key.public().get_onion_address().to_string(); - if !check - || TcpStream::connect(tor_socks) - .map_err(|e| Error::new(e, ErrorKind::Tor)) - .and_then(|mut tor_socks| async move { - tokio::time::timeout( - Duration::from_secs(30), - socks5_impl::client::connect( - &mut tor_socks, - (addr, 80), - None, - ) - .map_err(|e| Error::new(e, ErrorKind::Tor)), - ) - .map_err(|e| Error::new(e, ErrorKind::Tor)) - .await? - }) - .await - .with_ctx(|_| (ErrorKind::Tor, "Tor is confirmed to be down")) - .log_err() - .is_some() - { - if wipe_state { - Command::new("systemctl") - .arg("stop") - .arg("tor") - .invoke(ErrorKind::Tor) - .await?; - tokio::fs::remove_dir_all("/var/lib/tor").await?; - } - return Err(Error::new(eyre!("{}", log.message), ErrorKind::Tor)); - } - } - } - } - } - }; - let health_checker = async { - let mut last_success = Instant::now(); - loop { - tokio::time::sleep(Duration::from_secs(30)).await; - let addr = hck_key.public().get_onion_address().to_string(); - if TcpStream::connect(tor_socks) - .map_err(|e| Error::new(e, ErrorKind::Tor)) - .and_then(|mut tor_socks| async move { - tokio::time::timeout( - Duration::from_secs(30), - socks5_impl::client::connect(&mut tor_socks, (addr, 80), None) - .map_err(|e| Error::new(e, ErrorKind::Tor)), - ) - .map_err(|e| Error::new(e, ErrorKind::Tor)) - .await - }) - .await - .is_err() - { - if last_success.elapsed() > *health_timeout { - let err = Error::new( - eyre!( - "Tor health check failed for longer than current timeout ({health_timeout:?})" - ), - crate::ErrorKind::Tor, - ); - *health_timeout *= 2; - wipe_state.store(true, std::sync::atomic::Ordering::SeqCst); - return Err(err); - } - } else { - last_success = Instant::now(); - } - } - }; - - tokio::select! { - res = handler => res?, - res = log_parser => res?, - res = health_checker => res?, - } - - Ok(()) -} - -struct TorControl { - _thread: NonDetachingJoinHandle<()>, - send: mpsc::UnboundedSender, - services: Watch< - BTreeMap< - OnionAddress, - ( - TorSecretKey, - OrdMap>>, - Option, - ), - >, - >, -} -impl TorControl { - pub fn new( - tor_control: SocketAddr, - tor_socks: impl AsRef<[SocketAddr]> + Send + 'static, - ) -> Self { - let (send, mut recv) = mpsc::unbounded_channel(); - let services = Watch::new(BTreeMap::new()); - let mut thread_services = services.clone(); - Self { - _thread: tokio::spawn(async move { - let wipe_state = AtomicBool::new(false); - let mut health_timeout = Duration::from_secs(STARTING_HEALTH_TIMEOUT); - loop { - if let Err(e) = torctl( - tor_control, - tor_socks.as_ref(), - &mut recv, - &mut thread_services, - &wipe_state, - &mut health_timeout, - ) - .await - { - tracing::error!("TorControl : {e}"); - tracing::debug!("{e:?}"); - } - tracing::info!("Restarting Tor"); - tokio::time::sleep(Duration::from_secs(1)).await; - } - }) - .into(), - send, - services, - } - } -} - -#[tokio::test] -#[ignore] -async fn test_connection() { - let mut conn = torut::control::UnauthenticatedConn::new( - TcpStream::connect(SocketAddr::from(([127, 0, 0, 1], 9051))) - .await - .unwrap(), - ); - let auth = conn - .load_protocol_info() - .await - .unwrap() - .make_auth_data() - .unwrap() - .ok_or_else(|| eyre!("Cookie Auth Not Available")) - .with_kind(crate::ErrorKind::Tor) - .unwrap(); - conn.authenticate(&auth).await.unwrap(); - let mut connection: AuthenticatedConn< - TcpStream, - fn(AsyncEvent<'static>) -> BoxFuture<'static, Result<(), ConnError>>, - > = conn.into_authenticated().await; - let tor_key = torut::onion::TorSecretKeyV3::generate(); - connection.get_conf("SocksPort").await.unwrap(); - connection - .add_onion_v3( - &tor_key, - false, - false, - false, - None, - &mut [(443_u16, SocketAddr::from(([127, 0, 0, 1], 8443)))].iter(), - ) - .await - .unwrap(); - connection - .del_onion( - &tor_key - .public() - .get_onion_address() - .get_address_without_dot_onion(), - ) - .await - .unwrap(); - connection - .add_onion_v3( - &tor_key, - false, - false, - false, - None, - &mut [(8443_u16, SocketAddr::from(([127, 0, 0, 1], 8443)))].iter(), - ) - .await - .unwrap(); -} diff --git a/core/src/net/tor/mod.rs b/core/src/net/tor/mod.rs deleted file mode 100644 index d4d5c8007..000000000 --- a/core/src/net/tor/mod.rs +++ /dev/null @@ -1,10 +0,0 @@ -#[cfg(feature = "arti")] -mod arti; - -#[cfg(not(feature = "arti"))] -mod ctor; - -#[cfg(feature = "arti")] -pub use arti::{OnionAddress, OnionStore, TorController, TorSecretKey, tor_api}; -#[cfg(not(feature = "arti"))] -pub use ctor::{OnionAddress, OnionStore, TorController, TorSecretKey, tor_api}; diff --git a/core/src/net/tunnel.rs b/core/src/net/tunnel.rs index f3b505850..da0f6d84c 100644 --- a/core/src/net/tunnel.rs +++ b/core/src/net/tunnel.rs @@ -8,7 +8,7 @@ use ts_rs::TS; use crate::GatewayId; use crate::context::{CliContext, RpcContext}; -use crate::db::model::public::{NetworkInterfaceInfo, NetworkInterfaceType}; +use crate::db::model::public::{GatewayType, NetworkInterfaceInfo, NetworkInterfaceType}; use crate::net::host::all_hosts; use crate::prelude::*; use crate::util::Invoke; @@ -32,14 +32,19 @@ pub fn tunnel_api() -> ParentHandler { } #[derive(Debug, Clone, Deserialize, Serialize, Parser, TS)] +#[serde(rename_all = "camelCase")] #[ts(export)] pub struct AddTunnelParams { #[arg(help = "help.arg.tunnel-name")] name: InternedString, #[arg(help = "help.arg.wireguard-config")] config: String, - #[arg(help = "help.arg.is-public")] - public: bool, + #[arg(help = "help.arg.gateway-type")] + #[serde(default, rename = "type")] + gateway_type: Option, + #[arg(help = "help.arg.set-as-default-outbound")] + #[serde(default)] + set_as_default_outbound: bool, } fn sanitize_config(config: &str) -> String { @@ -64,7 +69,8 @@ pub async fn add_tunnel( AddTunnelParams { name, config, - public, + gateway_type, + set_as_default_outbound, }: AddTunnelParams, ) -> Result { let ifaces = ctx.net_controller.net_iface.watcher.subscribe(); @@ -76,9 +82,9 @@ pub async fn add_tunnel( iface.clone(), NetworkInterfaceInfo { name: Some(name), - public: Some(public), secure: None, ip_info: None, + gateway_type, }, ); return true; @@ -120,6 +126,19 @@ pub async fn add_tunnel( sub.recv().await; + if set_as_default_outbound { + ctx.db + .mutate(|db| { + db.as_public_mut() + .as_server_info_mut() + .as_network_mut() + .as_default_outbound_mut() + .ser(&Some(iface.clone())) + }) + .await + .result?; + } + Ok(iface) } @@ -156,10 +175,19 @@ pub async fn remove_tunnel( ctx.db .mutate(|db| { + let hostname = crate::hostname::ServerHostname::load(db.as_public().as_server_info())?; + let gateways = db + .as_public() + .as_server_info() + .as_network() + .as_gateways() + .de()?; + let ports = db.as_private().as_available_ports().de()?; for host in all_hosts(db) { let host = host?; host.as_public_domains_mut() .mutate(|p| Ok(p.retain(|_, v| v.gateway != id)))?; + host.update_addresses(&hostname, &gateways, &ports)?; } Ok(()) @@ -171,14 +199,24 @@ pub async fn remove_tunnel( ctx.db .mutate(|db| { + let hostname = crate::hostname::ServerHostname::load(db.as_public().as_server_info())?; + let gateways = db + .as_public() + .as_server_info() + .as_network() + .as_gateways() + .de()?; + let ports = db.as_private().as_available_ports().de()?; for host in all_hosts(db) { let host = host?; - host.as_bindings_mut().mutate(|b| { - Ok(b.values_mut().for_each(|v| { - v.net.private_disabled.remove(&id); - v.net.public_enabled.remove(&id); - })) + host.as_private_domains_mut().mutate(|d| { + for gateways in d.values_mut() { + gateways.remove(&id); + } + d.retain(|_, gateways| !gateways.is_empty()); + Ok(()) })?; + host.update_addresses(&hostname, &gateways, &ports)?; } Ok(()) diff --git a/core/src/net/vhost.rs b/core/src/net/vhost.rs index 4996ca937..85054e62b 100644 --- a/core/src/net/vhost.rs +++ b/core/src/net/vhost.rs @@ -1,19 +1,19 @@ use std::any::Any; use std::collections::{BTreeMap, BTreeSet}; use std::fmt; -use std::net::{IpAddr, SocketAddr}; +use std::net::{IpAddr, SocketAddr, SocketAddrV6}; use std::sync::{Arc, Weak}; use std::task::{Poll, ready}; -use std::time::Duration; use async_acme::acme::ACME_TLS_ALPN_NAME; use color_eyre::eyre::eyre; use futures::FutureExt; use futures::future::BoxFuture; +use imbl::OrdMap; use imbl_value::{InOMap, InternedString}; use rpc_toolkit::{Context, HandlerArgs, HandlerExt, ParentHandler, from_fn}; use serde::{Deserialize, Serialize}; -use tokio::net::TcpStream; +use tokio::net::{TcpListener, TcpStream}; use tokio_rustls::TlsConnector; use tokio_rustls::rustls::crypto::CryptoProvider; use tokio_rustls::rustls::pki_types::ServerName; @@ -23,28 +23,28 @@ use tracing::instrument; use ts_rs::TS; use visit_rs::Visit; -use crate::ResultExt; use crate::context::{CliContext, RpcContext}; use crate::db::model::Database; -use crate::db::model::public::AcmeSettings; +use crate::db::model::public::{AcmeSettings, NetworkInterfaceInfo}; use crate::db::{DbAccessByKey, DbAccessMut}; use crate::net::acme::{ AcmeCertStore, AcmeProvider, AcmeTlsAlpnCache, AcmeTlsHandler, GetAcmeProvider, }; use crate::net::gateway::{ - AnyFilter, BindTcp, DynInterfaceFilter, GatewayInfo, InterfaceFilter, - NetworkInterfaceController, NetworkInterfaceListener, + GatewayInfo, NetworkInterfaceController, NetworkInterfaceListenerAcceptMetadata, }; use crate::net::ssl::{CertStore, RootCaTlsHandler}; use crate::net::tls::{ ChainedHandler, TlsHandlerWrapper, TlsListener, TlsMetadata, WrapTlsHandler, }; +use crate::net::utils::ipv6_is_link_local; use crate::net::web_server::{Accept, AcceptStream, ExtractVisitor, TcpMetadata, extract}; use crate::prelude::*; use crate::util::collections::EqSet; use crate::util::future::{NonDetachingJoinHandle, WeakFuture}; use crate::util::serde::{HandlerExtSerde, MaybeUtf8String, display_serializable}; use crate::util::sync::{SyncMutex, Watch}; +use crate::{GatewayId, ResultExt}; pub fn vhost_api() -> ParentHandler { ParentHandler::new().subcommand( @@ -93,7 +93,7 @@ pub struct VHostController { interfaces: Arc, crypto_provider: Arc, acme_cache: AcmeTlsAlpnCache, - servers: SyncMutex>>, + servers: SyncMutex>>, } impl VHostController { pub fn new( @@ -114,14 +114,22 @@ impl VHostController { &self, hostname: Option, external: u16, - target: DynVHostTarget, + target: DynVHostTarget, ) -> Result, Error> { self.servers.mutate(|writable| { let server = if let Some(server) = writable.remove(&external) { server } else { + let bind_reqs = Watch::new(VHostBindRequirements::default()); + let listener = VHostBindListener { + ip_info: self.interfaces.watcher.subscribe(), + port: external, + bind_reqs: bind_reqs.clone_unseen(), + listeners: BTreeMap::new(), + }; VHostServer::new( - self.interfaces.watcher.bind(BindTcp, external)?, + listener, + bind_reqs, self.db.clone(), self.crypto_provider.clone(), self.acme_cache.clone(), @@ -173,6 +181,142 @@ impl VHostController { } } +/// Union of all ProxyTargets' bind requirements for a VHostServer. +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub struct VHostBindRequirements { + pub public_gateways: BTreeSet, + pub private_ips: BTreeSet, +} + +fn compute_bind_reqs(mapping: &Mapping) -> VHostBindRequirements { + let mut reqs = VHostBindRequirements::default(); + for (_, targets) in mapping { + for (target, rc) in targets { + if rc.strong_count() > 0 { + let (pub_gw, priv_ip) = target.0.bind_requirements(); + reqs.public_gateways.extend(pub_gw); + reqs.private_ips.extend(priv_ip); + } + } + } + reqs +} + +/// Listener that manages its own TCP listeners with IP-level precision. +/// Binds ALL IPs of public gateways and ONLY matching private IPs. +pub struct VHostBindListener { + ip_info: Watch>, + port: u16, + bind_reqs: Watch, + listeners: BTreeMap, +} + +fn update_vhost_listeners( + listeners: &mut BTreeMap, + port: u16, + ip_info: &OrdMap, + reqs: &VHostBindRequirements, +) -> Result<(), Error> { + let mut keep = BTreeSet::::new(); + for (gw_id, info) in ip_info { + if let Some(ip_info) = &info.ip_info { + for ipnet in &ip_info.subnets { + let ip = ipnet.addr(); + let should_bind = + reqs.public_gateways.contains(gw_id) || reqs.private_ips.contains(&ip); + if should_bind { + let addr = match ip { + IpAddr::V6(ip6) => SocketAddrV6::new( + ip6, + port, + 0, + if ipv6_is_link_local(ip6) { + ip_info.scope_id + } else { + 0 + }, + ) + .into(), + ip => SocketAddr::new(ip, port), + }; + keep.insert(addr); + if let Some((_, existing_info)) = listeners.get_mut(&addr) { + *existing_info = GatewayInfo { + id: gw_id.clone(), + info: info.clone(), + }; + } else { + let tcp = TcpListener::from_std( + mio::net::TcpListener::bind(addr) + .with_kind(ErrorKind::Network)? + .into(), + ) + .with_kind(ErrorKind::Network)?; + listeners.insert( + addr, + ( + tcp, + GatewayInfo { + id: gw_id.clone(), + info: info.clone(), + }, + ), + ); + } + } + } + } + } + listeners.retain(|key, _| keep.contains(key)); + Ok(()) +} + +impl Accept for VHostBindListener { + type Metadata = NetworkInterfaceListenerAcceptMetadata; + fn poll_accept( + &mut self, + cx: &mut std::task::Context<'_>, + ) -> Poll> { + // Update listeners when ip_info or bind_reqs change + while self.ip_info.poll_changed(cx).is_ready() || self.bind_reqs.poll_changed(cx).is_ready() + { + let reqs = self.bind_reqs.read_and_mark_seen(); + let listeners = &mut self.listeners; + let port = self.port; + self.ip_info.peek_and_mark_seen(|ip_info| { + update_vhost_listeners(listeners, port, ip_info, &reqs) + })?; + } + + // Poll each listener for incoming connections + for (&addr, (listener, gw_info)) in &self.listeners { + match listener.poll_accept(cx) { + Poll::Ready(Ok((stream, peer_addr))) => { + if let Err(e) = socket2::SockRef::from(&stream).set_keepalive(true) { + tracing::error!("Failed to set tcp keepalive: {e}"); + tracing::debug!("{e:?}"); + } + return Poll::Ready(Ok(( + NetworkInterfaceListenerAcceptMetadata { + inner: TcpMetadata { + local_addr: addr, + peer_addr, + }, + info: gw_info.clone(), + }, + Box::pin(stream), + ))); + } + Poll::Ready(Err(e)) => { + tracing::trace!("VHostBindListener accept error on {addr}: {e}"); + } + Poll::Pending => {} + } + } + Poll::Pending + } +} + pub trait VHostTarget: std::fmt::Debug + Eq { type PreprocessRes: Send + 'static; #[allow(unused_variables)] @@ -182,6 +326,10 @@ pub trait VHostTarget: std::fmt::Debug + Eq { fn acme(&self) -> Option<&AcmeProvider> { None } + /// Returns (public_gateways, private_ips) this target needs the listener to bind on. + fn bind_requirements(&self) -> (BTreeSet, BTreeSet) { + (BTreeSet::new(), BTreeSet::new()) + } fn preprocess<'a>( &'a self, prev: ServerConfig, @@ -200,6 +348,7 @@ pub trait VHostTarget: std::fmt::Debug + Eq { pub trait DynVHostTargetT: std::fmt::Debug + Any { fn filter(&self, metadata: &::Metadata) -> bool; fn acme(&self) -> Option<&AcmeProvider>; + fn bind_requirements(&self) -> (BTreeSet, BTreeSet); fn preprocess<'a>( &'a self, prev: ServerConfig, @@ -224,6 +373,9 @@ impl + 'static> DynVHostTargetT for T { fn acme(&self) -> Option<&AcmeProvider> { VHostTarget::acme(self) } + fn bind_requirements(&self) -> (BTreeSet, BTreeSet) { + VHostTarget::bind_requirements(self) + } fn preprocess<'a>( &'a self, prev: ServerConfig, @@ -301,7 +453,8 @@ impl Preprocessed { #[derive(Clone)] pub struct ProxyTarget { - pub filter: DynInterfaceFilter, + pub public: BTreeSet, + pub private: BTreeSet, pub acme: Option, pub addr: SocketAddr, pub add_x_forwarded_headers: bool, @@ -309,7 +462,8 @@ pub struct ProxyTarget { } impl PartialEq for ProxyTarget { fn eq(&self, other: &Self) -> bool { - self.filter == other.filter + self.public == other.public + && self.private == other.private && self.acme == other.acme && self.addr == other.addr && self.connect_ssl.as_ref().map(Arc::as_ptr) @@ -320,7 +474,8 @@ impl Eq for ProxyTarget {} impl fmt::Debug for ProxyTarget { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("ProxyTarget") - .field("filter", &self.filter) + .field("public", &self.public) + .field("private", &self.private) .field("acme", &self.acme) .field("addr", &self.addr) .field("add_x_forwarded_headers", &self.add_x_forwarded_headers) @@ -340,16 +495,35 @@ where { type PreprocessRes = AcceptStream; fn filter(&self, metadata: &::Metadata) -> bool { - let info = extract::(metadata); - if info.is_none() { - tracing::warn!("No GatewayInfo on metadata"); + let gw = extract::(metadata); + let tcp = extract::(metadata); + let (Some(gw), Some(tcp)) = (gw, tcp) else { + return false; + }; + let Some(ip_info) = &gw.info.ip_info else { + return false; + }; + + let src = tcp.peer_addr.ip(); + // Public: source is outside all known subnets (direct internet) + let is_public = !ip_info.subnets.iter().any(|s| s.contains(&src)); + + if is_public { + self.public.contains(&gw.id) + } else { + // Private: accept if connection arrived on an interface with a matching IP + ip_info + .subnets + .iter() + .any(|s| self.private.contains(&s.addr())) } - info.as_ref() - .map_or(true, |i| self.filter.filter(&i.id, &i.info)) } fn acme(&self) -> Option<&AcmeProvider> { self.acme.as_ref() } + fn bind_requirements(&self) -> (BTreeSet, BTreeSet) { + (self.public.clone(), self.private.clone()) + } async fn preprocess<'a>( &'a self, mut prev: ServerConfig, @@ -518,6 +692,7 @@ where let (target, rc) = self.0.peek(|m| { m.get(&hello.server_name().map(InternedString::from)) + .or_else(|| m.get(&None)) .into_iter() .flatten() .filter(|(_, rc)| rc.strong_count() > 0) @@ -634,28 +809,15 @@ where struct VHostServer { mapping: Watch>, + bind_reqs: Watch, _thread: NonDetachingJoinHandle<()>, } -impl<'a> From<&'a BTreeMap, BTreeMap>>> for AnyFilter { - fn from(value: &'a BTreeMap, BTreeMap>>) -> Self { - Self( - value - .iter() - .flat_map(|(_, v)| { - v.iter() - .filter(|(_, r)| r.strong_count() > 0) - .map(|(t, _)| t.filter.clone()) - }) - .collect(), - ) - } -} - impl VHostServer { #[instrument(skip_all)] fn new( listener: A, + bind_reqs: Watch, db: TypedPatchDb, crypto_provider: Arc, acme_cache: AcmeTlsAlpnCache, @@ -679,6 +841,7 @@ impl VHostServer { let mapping = Watch::new(BTreeMap::new()); Self { mapping: mapping.clone(), + bind_reqs, _thread: tokio::spawn(async move { let mut listener = VHostListener(TlsListener::new( listener, @@ -729,6 +892,9 @@ impl VHostServer { targets.insert(target, Arc::downgrade(&rc)); writable.insert(hostname, targets); res = Ok(rc); + if changed { + self.update_bind_reqs(writable); + } changed }); if self.mapping.watcher_count() > 1 { @@ -752,9 +918,23 @@ impl VHostServer { if !targets.is_empty() { writable.insert(hostname, targets); } + if pre != post { + self.update_bind_reqs(writable); + } pre == post }); } + fn update_bind_reqs(&self, mapping: &Mapping) { + let new_reqs = compute_bind_reqs(mapping); + self.bind_reqs.send_if_modified(|reqs| { + if *reqs != new_reqs { + *reqs = new_reqs; + true + } else { + false + } + }); + } fn is_empty(&self) -> bool { self.mapping.peek(|m| m.is_empty()) } diff --git a/core/src/net/web_server.rs b/core/src/net/web_server.rs index 2ac5b035f..8ffe9deaa 100644 --- a/core/src/net/web_server.rs +++ b/core/src/net/web_server.rs @@ -366,28 +366,6 @@ where pub struct WebServerAcceptorSetter { acceptor: Watch, } -impl WebServerAcceptorSetter>> -where - A: Accept, - B: Accept, -{ - pub fn try_upgrade Result>(&self, f: F) -> Result<(), Error> { - let mut res = Ok(()); - self.acceptor.send_modify(|a| { - *a = match a.take() { - Some(Either::Left(a)) => match f(a) { - Ok(b) => Some(Either::Right(b)), - Err(e) => { - res = Err(e); - None - } - }, - x => x, - } - }); - res - } -} impl Deref for WebServerAcceptorSetter { type Target = Watch; fn deref(&self) -> &Self::Target { diff --git a/core/src/net/wifi.rs b/core/src/net/wifi.rs index 046ad612f..57ccbd107 100644 --- a/core/src/net/wifi.rs +++ b/core/src/net/wifi.rs @@ -85,6 +85,7 @@ pub fn wifi() -> ParentHandler { } #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] pub struct SetWifiEnabledParams { @@ -150,16 +151,20 @@ pub fn country() -> ParentHandler { } #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] -pub struct AddParams { +pub struct WifiAddParams { #[arg(help = "help.arg.wifi-ssid")] ssid: String, #[arg(help = "help.arg.wifi-password")] password: String, } #[instrument(skip_all)] -pub async fn add(ctx: RpcContext, AddParams { ssid, password }: AddParams) -> Result<(), Error> { +pub async fn add( + ctx: RpcContext, + WifiAddParams { ssid, password }: WifiAddParams, +) -> Result<(), Error> { let wifi_manager = ctx.wifi_manager.clone(); if !ssid.is_ascii() { return Err(Error::new( @@ -229,15 +234,19 @@ pub async fn add(ctx: RpcContext, AddParams { ssid, password }: AddParams) -> Re Ok(()) } #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] -pub struct SsidParams { +pub struct WifiSsidParams { #[arg(help = "help.arg.wifi-ssid")] ssid: String, } #[instrument(skip_all)] -pub async fn connect(ctx: RpcContext, SsidParams { ssid }: SsidParams) -> Result<(), Error> { +pub async fn connect( + ctx: RpcContext, + WifiSsidParams { ssid }: WifiSsidParams, +) -> Result<(), Error> { let wifi_manager = ctx.wifi_manager.clone(); if !ssid.is_ascii() { return Err(Error::new( @@ -311,7 +320,7 @@ pub async fn connect(ctx: RpcContext, SsidParams { ssid }: SsidParams) -> Result } #[instrument(skip_all)] -pub async fn remove(ctx: RpcContext, SsidParams { ssid }: SsidParams) -> Result<(), Error> { +pub async fn remove(ctx: RpcContext, WifiSsidParams { ssid }: WifiSsidParams) -> Result<(), Error> { let wifi_manager = ctx.wifi_manager.clone(); if !ssid.is_ascii() { return Err(Error::new( @@ -359,11 +368,13 @@ pub async fn remove(ctx: RpcContext, SsidParams { ssid }: SsidParams) -> Result< .result?; Ok(()) } -#[derive(serde::Serialize, serde::Deserialize)] +#[derive(serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] pub struct WifiListInfo { ssids: HashMap, connected: Option, + #[ts(type = "string | null")] country: Option, ethernet: bool, available_wifi: Vec, @@ -374,7 +385,8 @@ pub struct WifiListInfoLow { strength: SignalStrength, security: Vec, } -#[derive(serde::Serialize, serde::Deserialize)] +#[derive(serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] pub struct WifiListOut { ssid: Ssid, @@ -560,6 +572,7 @@ pub async fn get_available(ctx: RpcContext, _: Empty) -> Result } #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] pub struct SetCountryParams { @@ -605,7 +618,7 @@ pub struct NetworkId(String); /// Ssid are the names of the wifis, usually human readable. #[derive( - Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, serde::Serialize, serde::Deserialize, + Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, serde::Serialize, serde::Deserialize, TS, )] pub struct Ssid(String); @@ -622,6 +635,7 @@ pub struct Ssid(String); Hash, serde::Serialize, serde::Deserialize, + TS, )] pub struct SignalStrength(u8); diff --git a/core/src/notifications.rs b/core/src/notifications.rs index 1d6c147ba..2e5ca39ac 100644 --- a/core/src/notifications.rs +++ b/core/src/notifications.rs @@ -75,6 +75,7 @@ pub fn notification() -> ParentHandler { } #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] pub struct ListNotificationParams { @@ -140,6 +141,7 @@ pub async fn list( } #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] pub struct ModifyNotificationParams { @@ -175,6 +177,7 @@ pub async fn remove( } #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] pub struct ModifyNotificationBeforeParams { @@ -326,6 +329,7 @@ pub async fn create( } #[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] pub enum NotificationLevel { Success, @@ -396,26 +400,31 @@ impl Map for Notifications { } } -#[derive(Debug, Serialize, Deserialize, HasModel)] +#[derive(Debug, Serialize, Deserialize, HasModel, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[model = "Model"] pub struct Notification { pub package_id: Option, + #[ts(type = "string")] pub created_at: DateTime, pub code: u32, pub level: NotificationLevel, pub title: String, pub message: String, + #[ts(type = "any")] pub data: Value, #[serde(default = "const_true")] pub seen: bool, } -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Serialize, Deserialize, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] pub struct NotificationWithId { id: u32, #[serde(flatten)] + #[ts(flatten)] notification: Notification, } diff --git a/core/src/os_install/mod.rs b/core/src/os_install/mod.rs index 1e823c754..d121a1387 100644 --- a/core/src/os_install/mod.rs +++ b/core/src/os_install/mod.rs @@ -359,6 +359,7 @@ pub async fn install_os_to( "riscv64" => install.arg("--target=riscv64-efi"), _ => &mut install, }; + install.arg("--no-nvram"); } install .arg(disk_path) diff --git a/core/src/registry/package/get.rs b/core/src/registry/package/get.rs index 2d834bf87..b18ee54c1 100644 --- a/core/src/registry/package/get.rs +++ b/core/src/registry/package/get.rs @@ -579,14 +579,12 @@ fn check_matching_info_short() { use crate::s9pk::manifest::{Alerts, Description}; use crate::util::DataUrl; - let lang_map = |s: &str| { - LocaleString::LanguageMap([("en".into(), s.into())].into_iter().collect()) - }; + let lang_map = + |s: &str| LocaleString::LanguageMap([("en".into(), s.into())].into_iter().collect()); let info = PackageVersionInfo { metadata: PackageMetadata { title: "Test Package".into(), - icon: DataUrl::from_vec("image/png", vec![]), description: Description { short: lang_map("A short description"), long: lang_map("A longer description of the test package"), @@ -594,18 +592,19 @@ fn check_matching_info_short() { release_notes: lang_map("Initial release"), git_hash: None, license: "MIT".into(), - wrapper_repo: "https://github.com/example/wrapper".parse().unwrap(), + package_repo: "https://github.com/example/wrapper".parse().unwrap(), upstream_repo: "https://github.com/example/upstream".parse().unwrap(), - support_site: "https://example.com/support".parse().unwrap(), - marketing_site: "https://example.com".parse().unwrap(), + marketing_url: Some("https://example.com".parse().unwrap()), donation_url: None, - docs_url: None, + docs_urls: Vec::new(), alerts: Alerts::default(), - dependency_metadata: BTreeMap::new(), os_version: exver::Version::new([0, 3, 6], []), sdk_version: None, hardware_acceleration: false, + plugins: BTreeSet::new(), }, + icon: DataUrl::from_vec("image/png", vec![]), + dependency_metadata: BTreeMap::new(), source_version: None, s9pks: Vec::new(), }; diff --git a/core/src/registry/package/index.rs b/core/src/registry/package/index.rs index 64b83d5e4..e7848b13c 100644 --- a/core/src/registry/package/index.rs +++ b/core/src/registry/package/index.rs @@ -17,8 +17,11 @@ use crate::registry::device_info::DeviceInfo; use crate::rpc_continuations::Guid; use crate::s9pk::S9pk; use crate::s9pk::git_hash::GitHash; -use crate::s9pk::manifest::{Alerts, Description, HardwareRequirements, LocaleString}; +use crate::s9pk::manifest::{ + Alerts, Description, HardwareRequirements, LocaleString, current_version, +}; use crate::s9pk::merkle_archive::source::FileSource; +use crate::service::effects::plugin::PluginId; use crate::sign::commitment::merkle_archive::MerkleArchiveCommitment; use crate::sign::{AnySignature, AnyVerifyingKey}; use crate::util::{DataUrl, VersionString}; @@ -69,75 +72,44 @@ impl DependencyMetadata { } } -#[derive(Debug, Deserialize, Serialize, HasModel, TS, PartialEq)] +fn placeholder_url() -> Url { + "https://example.com".parse().unwrap() +} + +#[derive(Clone, Debug, Deserialize, Serialize, HasModel, TS, PartialEq)] #[serde(rename_all = "camelCase")] #[model = "Model"] pub struct PackageMetadata { #[ts(type = "string")] pub title: InternedString, - pub icon: DataUrl<'static>, pub description: Description, pub release_notes: LocaleString, pub git_hash: Option, #[ts(type = "string")] pub license: InternedString, #[ts(type = "string")] - pub wrapper_repo: Url, + #[serde(default = "placeholder_url")] // TODO: remove + pub package_repo: Url, #[ts(type = "string")] pub upstream_repo: Url, #[ts(type = "string")] - pub support_site: Url, - #[ts(type = "string")] - pub marketing_site: Url, + pub marketing_url: Option, #[ts(type = "string | null")] pub donation_url: Option, - #[ts(type = "string | null")] - pub docs_url: Option, + #[serde(default)] + #[ts(type = "string[]")] + pub docs_urls: Vec, + #[serde(default)] pub alerts: Alerts, - pub dependency_metadata: BTreeMap, + #[serde(default = "current_version")] #[ts(type = "string")] pub os_version: Version, #[ts(type = "string | null")] pub sdk_version: Option, #[serde(default)] pub hardware_acceleration: bool, -} -impl PackageMetadata { - pub async fn load(s9pk: &S9pk) -> Result { - let manifest = s9pk.as_manifest(); - let mut dependency_metadata = BTreeMap::new(); - for (id, info) in &manifest.dependencies.0 { - let metadata = s9pk.dependency_metadata(id).await?; - dependency_metadata.insert( - id.clone(), - DependencyMetadata { - title: metadata.map(|m| m.title), - icon: s9pk.dependency_icon_data_url(id).await?, - description: info.description.clone(), - optional: info.optional, - }, - ); - } - Ok(Self { - title: manifest.title.clone(), - icon: s9pk.icon_data_url().await?, - description: manifest.description.clone(), - release_notes: manifest.release_notes.clone(), - git_hash: manifest.git_hash.clone(), - license: manifest.license.clone(), - wrapper_repo: manifest.wrapper_repo.clone(), - upstream_repo: manifest.upstream_repo.clone(), - support_site: manifest.support_site.clone(), - marketing_site: manifest.marketing_site.clone(), - donation_url: manifest.donation_url.clone(), - docs_url: manifest.docs_url.clone(), - alerts: manifest.alerts.clone(), - dependency_metadata, - os_version: manifest.os_version.clone(), - sdk_version: manifest.sdk_version.clone(), - hardware_acceleration: manifest.hardware_acceleration.clone(), - }) - } + #[serde(default)] + pub plugins: BTreeSet, } #[derive(Debug, Deserialize, Serialize, HasModel, TS)] @@ -147,6 +119,8 @@ impl PackageMetadata { pub struct PackageVersionInfo { #[serde(flatten)] pub metadata: PackageMetadata, + pub icon: DataUrl<'static>, + pub dependency_metadata: BTreeMap, #[ts(type = "string | null")] pub source_version: Option, pub s9pks: Vec<(HardwareRequirements, RegistryAsset)>, @@ -156,11 +130,28 @@ impl PackageVersionInfo { s9pk: &S9pk, urls: Vec, ) -> Result { + let manifest = s9pk.as_manifest(); + let icon = s9pk.icon_data_url().await?; + let mut dependency_metadata = BTreeMap::new(); + for (id, info) in &manifest.dependencies.0 { + let dep_meta = s9pk.dependency_metadata(id).await?; + dependency_metadata.insert( + id.clone(), + DependencyMetadata { + title: dep_meta.map(|m| m.title), + icon: s9pk.dependency_icon_data_url(id).await?, + description: info.description.clone(), + optional: info.optional, + }, + ); + } Ok(Self { - metadata: PackageMetadata::load(s9pk).await?, + metadata: manifest.metadata.clone(), + icon, + dependency_metadata, source_version: None, // TODO s9pks: vec![( - s9pk.as_manifest().hardware_requirements.clone(), + manifest.hardware_requirements.clone(), RegistryAsset { published_at: Utc::now(), urls, @@ -176,6 +167,27 @@ impl PackageVersionInfo { }) } pub fn merge_with(&mut self, other: Self, replace_urls: bool) -> Result<(), Error> { + if self.metadata != other.metadata { + return Err(Error::new( + color_eyre::eyre::eyre!("{}", t!("registry.package.index.metadata-mismatch")), + ErrorKind::InvalidRequest, + )); + } + if self.icon != other.icon { + return Err(Error::new( + color_eyre::eyre::eyre!("{}", t!("registry.package.index.icon-mismatch")), + ErrorKind::InvalidRequest, + )); + } + if self.dependency_metadata != other.dependency_metadata { + return Err(Error::new( + color_eyre::eyre::eyre!( + "{}", + t!("registry.package.index.dependency-metadata-mismatch") + ), + ErrorKind::InvalidRequest, + )); + } for (hw_req, asset) in other.s9pks { if let Some((_, matching)) = self .s9pks @@ -221,10 +233,9 @@ impl PackageVersionInfo { ]); table.add_row(row![br -> "GIT HASH", self.metadata.git_hash.as_deref().unwrap_or("N/A")]); table.add_row(row![br -> "LICENSE", &self.metadata.license]); - table.add_row(row![br -> "PACKAGE REPO", &self.metadata.wrapper_repo.to_string()]); + table.add_row(row![br -> "PACKAGE REPO", &self.metadata.package_repo.to_string()]); table.add_row(row![br -> "SERVICE REPO", &self.metadata.upstream_repo.to_string()]); - table.add_row(row![br -> "WEBSITE", &self.metadata.marketing_site.to_string()]); - table.add_row(row![br -> "SUPPORT", &self.metadata.support_site.to_string()]); + table.add_row(row![br -> "WEBSITE", self.metadata.marketing_url.as_ref().map_or("N/A".to_owned(), |u| u.to_string())]); table } @@ -244,30 +255,7 @@ impl Model { } if let Some(hw) = &device_info.hardware { self.as_s9pks_mut().mutate(|s9pks| { - s9pks.retain(|(hw_req, _)| { - if let Some(arch) = &hw_req.arch { - if !arch.contains(&hw.arch) { - return false; - } - } - if let Some(ram) = hw_req.ram { - if hw.ram < ram { - return false; - } - } - if let Some(dev) = &hw.devices { - for device_filter in &hw_req.device { - if !dev - .iter() - .filter(|d| d.class() == &*device_filter.class) - .any(|d| device_filter.matches(d)) - { - return false; - } - } - } - true - }); + s9pks.retain(|(hw_req, _)| hw_req.is_compatible(hw)); if hw.devices.is_some() { s9pks.sort_by_key(|(req, _)| req.specificity_desc()); } else { @@ -287,19 +275,17 @@ impl Model { } if let Some(locale) = device_info.os.language.as_deref() { - let metadata = self.as_metadata_mut(); - metadata + self.as_metadata_mut() .as_alerts_mut() .mutate(|a| Ok(a.localize_for(locale)))?; - metadata - .as_dependency_metadata_mut() + self.as_dependency_metadata_mut() .as_entries_mut()? .into_iter() .try_for_each(|(_, d)| d.mutate(|d| Ok(d.localize_for(locale))))?; - metadata + self.as_metadata_mut() .as_description_mut() .mutate(|d| Ok(d.localize_for(locale)))?; - metadata + self.as_metadata_mut() .as_release_notes_mut() .mutate(|r| Ok(r.localize_for(locale)))?; } diff --git a/core/src/s9pk/rpc.rs b/core/src/s9pk/rpc.rs index 3c16d27ba..2fafd7e0c 100644 --- a/core/src/s9pk/rpc.rs +++ b/core/src/s9pk/rpc.rs @@ -3,16 +3,17 @@ use std::path::PathBuf; use std::sync::Arc; use clap::Parser; -use rpc_toolkit::{Empty, HandlerExt, ParentHandler, from_fn_async}; +use rpc_toolkit::{Empty, HandlerArgs, HandlerExt, ParentHandler, from_fn_async}; use serde::{Deserialize, Serialize}; use tokio::process::Command; use ts_rs::TS; use url::Url; use crate::ImageId; -use crate::context::CliContext; +use crate::context::{CliContext, RpcContext}; use crate::prelude::*; -use crate::s9pk::manifest::Manifest; +use crate::registry::device_info::DeviceInfo; +use crate::s9pk::manifest::{HardwareRequirements, Manifest}; use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile; use crate::s9pk::v2::SIG_CONTEXT; use crate::s9pk::v2::pack::ImageConfig; @@ -70,6 +71,15 @@ pub fn s9pk() -> ParentHandler { .no_display() .with_about("about.publish-s9pk"), ) + .subcommand( + "select", + from_fn_async(select) + .with_custom_display_fn(|_, path: PathBuf| { + println!("{}", path.display()); + Ok(()) + }) + .with_about("about.select-s9pk-for-device"), + ) } #[derive(Deserialize, Serialize, Parser)] @@ -323,3 +333,97 @@ async fn publish(ctx: CliContext, S9pkPath { s9pk: s9pk_path }: S9pkPath) -> Res .await?; crate::registry::package::add::cli_add_package_impl(ctx, s9pk, vec![s3url], false).await } + +#[derive(Deserialize, Serialize, Parser)] +struct SelectParams { + #[arg(help = "help.arg.s9pk-file-paths")] + s9pks: Vec, +} + +async fn select( + HandlerArgs { + context, + params: SelectParams { s9pks }, + .. + }: HandlerArgs, +) -> Result { + // Resolve file list: use provided paths or scan cwd for *.s9pk + let paths = if s9pks.is_empty() { + let mut found = Vec::new(); + let mut entries = tokio::fs::read_dir(".").await?; + while let Some(entry) = entries.next_entry().await? { + let path = entry.path(); + if path.extension().and_then(|e| e.to_str()) == Some("s9pk") { + found.push(path); + } + } + if found.is_empty() { + return Err(Error::new( + eyre!("no .s9pk files found in current directory"), + ErrorKind::NotFound, + )); + } + found + } else { + s9pks + }; + + // Fetch DeviceInfo from the target server + let device_info: DeviceInfo = from_value( + context + .call_remote::("server.device-info", imbl_value::json!({})) + .await?, + )?; + + // Filter and rank s9pk files by compatibility + let mut compatible: Vec<(PathBuf, HardwareRequirements)> = Vec::new(); + for path in &paths { + let s9pk = match super::S9pk::open(path, None).await { + Ok(s9pk) => s9pk, + Err(e) => { + tracing::warn!("skipping {}: {e}", path.display()); + continue; + } + }; + let manifest = s9pk.as_manifest(); + + // OS version check: package's required OS version must be in server's compat range + if !manifest + .metadata + .os_version + .satisfies(&device_info.os.compat) + { + continue; + } + + let hw_req = &manifest.hardware_requirements; + + if let Some(hw) = &device_info.hardware { + if !hw_req.is_compatible(hw) { + continue; + } + } + + compatible.push((path.clone(), hw_req.clone())); + } + + if compatible.is_empty() { + return Err(Error::new( + eyre!( + "no compatible s9pk found for device (arch: {}, os: {})", + device_info + .hardware + .as_ref() + .map(|h| h.arch.to_string()) + .unwrap_or_else(|| "unknown".into()), + device_info.os.version, + ), + ErrorKind::NotFound, + )); + } + + // Sort by specificity (most specific first) + compatible.sort_by_key(|(_, req)| req.specificity_desc()); + + Ok(compatible.into_iter().next().unwrap().0) +} diff --git a/core/src/s9pk/v2/compat.rs b/core/src/s9pk/v2/compat.rs index 837632fff..9b0add0bf 100644 --- a/core/src/s9pk/v2/compat.rs +++ b/core/src/s9pk/v2/compat.rs @@ -9,6 +9,7 @@ use tokio::process::Command; use crate::dependencies::{DepInfo, Dependencies}; use crate::prelude::*; +use crate::registry::package::index::PackageMetadata; use crate::s9pk::manifest::{DeviceFilter, LocaleString, Manifest}; use crate::s9pk::merkle_archive::directory_contents::DirectoryContents; use crate::s9pk::merkle_archive::source::TmpSource; @@ -195,20 +196,30 @@ impl TryFrom for Manifest { } Ok(Self { id: value.id, - title: format!("{} (Legacy)", value.title).into(), version: version.into(), satisfies: BTreeSet::new(), - release_notes: LocaleString::Translated(value.release_notes), can_migrate_from: VersionRange::any(), can_migrate_to: VersionRange::none(), - license: value.license.into(), - wrapper_repo: value.wrapper_repo, - upstream_repo: value.upstream_repo, - support_site: value.support_site.unwrap_or_else(|| default_url.clone()), - marketing_site: value.marketing_site.unwrap_or_else(|| default_url.clone()), - donation_url: value.donation_url, - docs_url: None, - description: value.description, + metadata: PackageMetadata { + title: format!("{} (Legacy)", value.title).into(), + release_notes: LocaleString::Translated(value.release_notes), + license: value.license.into(), + package_repo: value.wrapper_repo, + upstream_repo: value.upstream_repo, + marketing_url: Some(value.marketing_site.unwrap_or_else(|| default_url.clone())), + donation_url: value.donation_url, + docs_urls: Vec::new(), + description: value.description, + alerts: value.alerts, + git_hash: value.git_hash, + os_version: value.eos_version, + sdk_version: None, + hardware_acceleration: match value.main { + PackageProcedure::Docker(d) => d.gpu_acceleration, + PackageProcedure::Script(_) => false, + }, + plugins: BTreeSet::new(), + }, images: BTreeMap::new(), volumes: value .volumes @@ -217,7 +228,6 @@ impl TryFrom for Manifest { .map(|(id, _)| id.clone()) .chain([VolumeId::from_str("embassy").unwrap()]) .collect(), - alerts: value.alerts, dependencies: Dependencies( value .dependencies @@ -252,13 +262,6 @@ impl TryFrom for Manifest { }) .collect(), }, - git_hash: value.git_hash, - os_version: value.eos_version, - sdk_version: None, - hardware_acceleration: match value.main { - PackageProcedure::Docker(d) => d.gpu_acceleration, - PackageProcedure::Script(_) => false, - }, }) } } diff --git a/core/src/s9pk/v2/manifest.rs b/core/src/s9pk/v2/manifest.rs index 00e57f18c..bc31bec8f 100644 --- a/core/src/s9pk/v2/manifest.rs +++ b/core/src/s9pk/v2/manifest.rs @@ -7,12 +7,11 @@ use exver::{Version, VersionRange}; use imbl_value::{InOMap, InternedString}; use serde::{Deserialize, Serialize}; use ts_rs::TS; -use url::Url; pub use crate::PackageId; use crate::dependencies::Dependencies; use crate::prelude::*; -use crate::s9pk::git_hash::GitHash; +use crate::registry::package::index::PackageMetadata; use crate::s9pk::merkle_archive::directory_contents::DirectoryContents; use crate::s9pk::merkle_archive::expected::{Expected, Filter}; use crate::s9pk::v2::pack::ImageConfig; @@ -22,7 +21,7 @@ use crate::util::{FromStrParser, VersionString, mime}; use crate::version::{Current, VersionT}; use crate::{ImageId, VolumeId}; -fn current_version() -> Version { +pub(crate) fn current_version() -> Version { Current::default().semver() } @@ -32,46 +31,20 @@ fn current_version() -> Version { #[ts(export)] pub struct Manifest { pub id: PackageId, - #[ts(type = "string")] - pub title: InternedString, pub version: VersionString, pub satisfies: BTreeSet, - pub release_notes: LocaleString, #[ts(type = "string")] pub can_migrate_to: VersionRange, #[ts(type = "string")] pub can_migrate_from: VersionRange, - #[ts(type = "string")] - pub license: InternedString, // type of license - #[ts(type = "string")] - pub wrapper_repo: Url, - #[ts(type = "string")] - pub upstream_repo: Url, - #[ts(type = "string")] - pub support_site: Url, - #[ts(type = "string")] - pub marketing_site: Url, - #[ts(type = "string | null")] - pub donation_url: Option, - #[ts(type = "string | null")] - pub docs_url: Option, - pub description: Description, + #[serde(flatten)] + pub metadata: PackageMetadata, pub images: BTreeMap, pub volumes: BTreeSet, #[serde(default)] - pub alerts: Alerts, - #[serde(default)] pub dependencies: Dependencies, #[serde(default)] pub hardware_requirements: HardwareRequirements, - #[serde(default)] - pub hardware_acceleration: bool, - pub git_hash: Option, - #[serde(default = "current_version")] - #[ts(type = "string")] - pub os_version: Version, - #[ts(type = "string | null")] - pub sdk_version: Option, } impl Manifest { pub fn validate_for<'a, T: Clone>( @@ -181,6 +154,32 @@ pub struct HardwareRequirements { pub arch: Option>, } impl HardwareRequirements { + /// Returns true if this s9pk's hardware requirements are satisfied by the given hardware. + pub fn is_compatible(&self, hw: &crate::registry::device_info::HardwareInfo) -> bool { + if let Some(arch) = &self.arch { + if !arch.contains(&hw.arch) { + return false; + } + } + if let Some(ram) = self.ram { + if hw.ram < ram { + return false; + } + } + if let Some(devices) = &hw.devices { + for device_filter in &self.device { + if !devices + .iter() + .filter(|d| d.class() == &*device_filter.class) + .any(|d| device_filter.matches(d)) + { + return false; + } + } + } + true + } + /// returns a value that can be used as a sort key to get most specific requirements first pub fn specificity_desc(&self) -> (u32, u32, u64) { ( @@ -240,7 +239,7 @@ impl LocaleString { pub fn localize(&mut self) { self.localize_for(&*rust_i18n::locale()); } - pub fn localized(mut self) -> String { + pub fn localized(self) -> String { self.localized_for(&*rust_i18n::locale()) } } diff --git a/core/src/s9pk/v2/pack.rs b/core/src/s9pk/v2/pack.rs index 667241eca..cffd32d3d 100644 --- a/core/src/s9pk/v2/pack.rs +++ b/core/src/s9pk/v2/pack.rs @@ -685,7 +685,7 @@ pub async fn pack(ctx: CliContext, params: PackParams) -> Result<(), Error> { .await?; let manifest = s9pk.as_manifest_mut(); - manifest.git_hash = Some(GitHash::from_path(params.path()).await?); + manifest.metadata.git_hash = Some(GitHash::from_path(params.path()).await?); if !params.arch.is_empty() { let arches: BTreeSet = match manifest.hardware_requirements.arch.take() { Some(a) => params @@ -792,7 +792,7 @@ pub async fn pack(ctx: CliContext, params: PackParams) -> Result<(), Error> { } }; Some(( - LocaleString::Translated(s9pk.as_manifest().title.to_string()), + LocaleString::Translated(s9pk.as_manifest().metadata.title.to_string()), s9pk.icon_data_url().await?, )) } diff --git a/core/src/service/action.rs b/core/src/service/action.rs index 6bb69bf1d..e3367918f 100644 --- a/core/src/service/action.rs +++ b/core/src/service/action.rs @@ -17,6 +17,7 @@ use crate::{ActionId, PackageId, ReplayId}; pub(super) struct GetActionInput { id: ActionId, + prefill: Value, } impl Handler for ServiceActor { type Response = Result, Error>; @@ -26,7 +27,10 @@ impl Handler for ServiceActor { async fn handle( &mut self, id: Guid, - GetActionInput { id: action_id }: GetActionInput, + GetActionInput { + id: action_id, + prefill, + }: GetActionInput, _: &BackgroundJobQueue, ) -> Self::Response { let container = &self.0.persistent_container; @@ -34,7 +38,7 @@ impl Handler for ServiceActor { .execute::>( id, ProcedureName::GetActionInput(action_id), - Value::Null, + json!({ "prefill": prefill }), Some(Duration::from_secs(30)), ) .await @@ -47,6 +51,7 @@ impl Service { &self, id: Guid, action_id: ActionId, + prefill: Value, ) -> Result, Error> { if !self .seed @@ -67,7 +72,13 @@ impl Service { return Ok(None); } self.actor - .send(id, GetActionInput { id: action_id }) + .send( + id, + GetActionInput { + id: action_id, + prefill, + }, + ) .await? } } diff --git a/core/src/service/effects/action.rs b/core/src/service/effects/action.rs index adb6f2d74..e01fd0318 100644 --- a/core/src/service/effects/action.rs +++ b/core/src/service/effects/action.rs @@ -122,6 +122,10 @@ pub struct GetActionInputParams { package_id: Option, #[arg(help = "help.arg.action-id")] action_id: ActionId, + #[ts(type = "Record | null")] + #[serde(default)] + #[arg(skip)] + prefill: Option, } async fn get_action_input( context: EffectContext, @@ -129,9 +133,11 @@ async fn get_action_input( procedure_id, package_id, action_id, + prefill, }: GetActionInputParams, ) -> Result, Error> { let context = context.deref()?; + let prefill = prefill.unwrap_or(Value::Null); if let Some(package_id) = package_id { context @@ -142,16 +148,18 @@ async fn get_action_input( .await .as_ref() .or_not_found(&package_id)? - .get_action_input(procedure_id, action_id) + .get_action_input(procedure_id, action_id, prefill) .await } else { - context.get_action_input(procedure_id, action_id).await + context + .get_action_input(procedure_id, action_id, prefill) + .await } } #[derive(Debug, Clone, Serialize, Deserialize, TS, Parser)] #[serde(rename_all = "camelCase")] -#[ts(export)] +#[ts(export, rename = "EffectsRunActionParams")] pub struct RunActionParams { #[serde(default)] #[ts(skip)] @@ -243,11 +251,12 @@ async fn create_task( .get(&task.package_id) .await .as_ref() + .filter(|s| s.is_initialized()) { - let Some(prev) = service - .get_action_input(procedure_id.clone(), task.action_id.clone()) - .await? - else { + let prev = service + .get_action_input(procedure_id.clone(), task.action_id.clone(), Value::Null) + .await?; + let Some(prev) = prev else { return Err(Error::new( eyre!( "{}", @@ -270,7 +279,9 @@ async fn create_task( true } } else { - true // update when service is installed + // Service not installed or not yet initialized — assume active. + // Will be retested when service init completes (Service::recheck_tasks). + true } } }, diff --git a/core/src/service/effects/callbacks.rs b/core/src/service/effects/callbacks.rs index c86773095..d30665c96 100644 --- a/core/src/service/effects/callbacks.rs +++ b/core/src/service/effects/callbacks.rs @@ -5,12 +5,16 @@ use std::time::{Duration, SystemTime}; use clap::Parser; use futures::future::join_all; -use imbl::{Vector, vector}; +use imbl::{OrdMap, Vector, vector}; use imbl_value::InternedString; +use patch_db::TypedDbWatch; +use patch_db::json_ptr::JsonPointer; use serde::{Deserialize, Serialize}; use tracing::warn; use ts_rs::TS; +use crate::db::model::Database; +use crate::db::model::public::NetworkInterfaceInfo; use crate::net::ssl::FullchainCertData; use crate::prelude::*; use crate::service::effects::context::EffectContext; @@ -19,7 +23,7 @@ use crate::service::rpc::{CallbackHandle, CallbackId}; use crate::service::{Service, ServiceActorSeed}; use crate::util::collections::EqMap; use crate::util::future::NonDetachingJoinHandle; -use crate::{HostId, PackageId, ServiceInterfaceId}; +use crate::{GatewayId, HostId, PackageId, ServiceInterfaceId}; #[derive(Default)] pub struct ServiceCallbacks(Mutex); @@ -29,7 +33,8 @@ struct ServiceCallbackMap { get_service_interface: BTreeMap<(PackageId, ServiceInterfaceId), Vec>, list_service_interfaces: BTreeMap>, get_system_smtp: Vec, - get_host_info: BTreeMap<(PackageId, HostId), Vec>, + get_host_info: + BTreeMap<(PackageId, HostId), (NonDetachingJoinHandle<()>, Vec)>, get_ssl_certificate: EqMap< (BTreeSet, FullchainCertData, Algorithm), (NonDetachingJoinHandle<()>, Vec), @@ -37,6 +42,7 @@ struct ServiceCallbackMap { get_status: BTreeMap>, get_container_ip: BTreeMap>, get_service_manifest: BTreeMap>, + get_outbound_gateway: BTreeMap, Vec)>, } impl ServiceCallbacks { @@ -57,7 +63,7 @@ impl ServiceCallbacks { }); this.get_system_smtp .retain(|h| h.handle.is_active() && h.seed.strong_count() > 0); - this.get_host_info.retain(|_, v| { + this.get_host_info.retain(|_, (_, v)| { v.retain(|h| h.handle.is_active() && h.seed.strong_count() > 0); !v.is_empty() }); @@ -73,6 +79,10 @@ impl ServiceCallbacks { v.retain(|h| h.handle.is_active() && h.seed.strong_count() > 0); !v.is_empty() }); + this.get_outbound_gateway.retain(|_, (_, v)| { + v.retain(|h| h.handle.is_active() && h.seed.strong_count() > 0); + !v.is_empty() + }); }) } @@ -141,29 +151,53 @@ impl ServiceCallbacks { } pub(super) fn add_get_host_info( - &self, + self: &Arc, + db: &TypedPatchDb, package_id: PackageId, host_id: HostId, handler: CallbackHandler, ) { self.mutate(|this| { this.get_host_info - .entry((package_id, host_id)) - .or_default() + .entry((package_id.clone(), host_id.clone())) + .or_insert_with(|| { + let ptr: JsonPointer = + format!("/public/packageData/{}/hosts/{}", package_id, host_id) + .parse() + .expect("valid json pointer"); + let db = db.clone(); + let callbacks = Arc::clone(self); + let key = (package_id, host_id); + ( + tokio::spawn(async move { + let mut sub = db.subscribe(ptr).await; + while sub.recv().await.is_some() { + if let Some(cbs) = callbacks.mutate(|this| { + this.get_host_info + .remove(&key) + .map(|(_, handlers)| CallbackHandlers(handlers)) + .filter(|cb| !cb.0.is_empty()) + }) { + if let Err(e) = cbs.call(vector![]).await { + tracing::error!("Error in host info callback: {e}"); + tracing::debug!("{e:?}"); + } + } + // entry was removed when we consumed handlers, + // so stop watching — a new subscription will be + // created if the service re-registers + break; + } + }) + .into(), + Vec::new(), + ) + }) + .1 .push(handler); }) } - #[must_use] - pub fn get_host_info(&self, id: &(PackageId, HostId)) -> Option { - self.mutate(|this| { - Some(CallbackHandlers( - this.get_host_info.remove(id).unwrap_or_default(), - )) - .filter(|cb| !cb.0.is_empty()) - }) - } - pub(super) fn add_get_ssl_certificate( &self, ctx: EffectContext, @@ -256,6 +290,61 @@ impl ServiceCallbacks { }) } + /// Register a callback for outbound gateway changes. + pub(super) fn add_get_outbound_gateway( + self: &Arc, + package_id: PackageId, + mut outbound_gateway: TypedDbWatch>, + mut default_outbound: Option>>, + mut fallback: Option>>, + handler: CallbackHandler, + ) { + self.mutate(|this| { + this.get_outbound_gateway + .entry(package_id.clone()) + .or_insert_with(|| { + let callbacks = Arc::clone(self); + let key = package_id; + ( + tokio::spawn(async move { + tokio::select! { + _ = outbound_gateway.changed() => {} + _ = async { + if let Some(ref mut w) = default_outbound { + let _ = w.changed().await; + } else { + std::future::pending::<()>().await; + } + } => {} + _ = async { + if let Some(ref mut w) = fallback { + let _ = w.changed().await; + } else { + std::future::pending::<()>().await; + } + } => {} + } + if let Some(cbs) = callbacks.mutate(|this| { + this.get_outbound_gateway + .remove(&key) + .map(|(_, handlers)| CallbackHandlers(handlers)) + .filter(|cb| !cb.0.is_empty()) + }) { + if let Err(e) = cbs.call(vector![]).await { + tracing::error!("Error in outbound gateway callback: {e}"); + tracing::debug!("{e:?}"); + } + } + }) + .into(), + Vec::new(), + ) + }) + .1 + .push(handler); + }) + } + pub(super) fn add_get_service_manifest(&self, package_id: PackageId, handler: CallbackHandler) { self.mutate(|this| { this.get_service_manifest diff --git a/core/src/service/effects/mod.rs b/core/src/service/effects/mod.rs index 779a427ec..73d06467f 100644 --- a/core/src/service/effects/mod.rs +++ b/core/src/service/effects/mod.rs @@ -14,6 +14,7 @@ mod control; mod dependency; mod health; mod net; +pub mod plugin; mod prelude; pub mod subcontainer; mod system; @@ -142,6 +143,10 @@ pub fn handler() -> ParentHandler { "get-container-ip", from_fn_async(net::info::get_container_ip).no_cli(), ) + .subcommand( + "get-outbound-gateway", + from_fn_async(net::info::get_outbound_gateway).no_cli(), + ) .subcommand( "get-os-ip", from_fn(|_: C| Ok::<_, Error>(Ipv4Addr::from(HOST_IP))), @@ -167,6 +172,23 @@ pub fn handler() -> ParentHandler { from_fn_async(net::ssl::get_ssl_certificate).no_cli(), ) .subcommand("get-ssl-key", from_fn_async(net::ssl::get_ssl_key).no_cli()) + // plugin + .subcommand( + "plugin", + ParentHandler::::new().subcommand( + "url", + ParentHandler::::new() + .subcommand("register", from_fn_async(net::plugin::register).no_cli()) + .subcommand( + "export-url", + from_fn_async(net::plugin::export_url).no_cli(), + ) + .subcommand( + "clear-urls", + from_fn_async(net::plugin::clear_urls).no_cli(), + ), + ), + ) .subcommand( "set-data-version", from_fn_async(version::set_data_version) diff --git a/core/src/service/effects/net/host.rs b/core/src/service/effects/net/host.rs index ebd1b80c8..a20fcf189 100644 --- a/core/src/service/effects/net/host.rs +++ b/core/src/service/effects/net/host.rs @@ -29,6 +29,7 @@ pub async fn get_host_info( if let Some(callback) = callback { let callback = callback.register(&context.seed.persistent_container); context.seed.ctx.callbacks.add_get_host_info( + &context.seed.ctx.db, package_id.clone(), host_id.clone(), CallbackHandler::new(&context, callback), diff --git a/core/src/service/effects/net/info.rs b/core/src/service/effects/net/info.rs index f14ee72dc..ef8507e47 100644 --- a/core/src/service/effects/net/info.rs +++ b/core/src/service/effects/net/info.rs @@ -1,9 +1,16 @@ use std::net::Ipv4Addr; -use crate::PackageId; +use imbl::OrdMap; +use patch_db::TypedDbWatch; +use patch_db::json_ptr::JsonPointer; +use tokio::process::Command; + +use crate::db::model::public::NetworkInterfaceInfo; use crate::service::effects::callbacks::CallbackHandler; use crate::service::effects::prelude::*; use crate::service::rpc::CallbackId; +use crate::util::Invoke; +use crate::{GatewayId, PackageId}; #[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] #[serde(rename_all = "camelCase")] @@ -51,3 +58,116 @@ pub async fn get_container_ip( lxc.ip().await.map(Some) } } + +#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct GetOutboundGatewayParams { + #[ts(optional)] + callback: Option, +} + +pub async fn get_outbound_gateway( + context: EffectContext, + GetOutboundGatewayParams { callback }: GetOutboundGatewayParams, +) -> Result { + let context = context.deref()?; + let ctx = &context.seed.ctx; + + // Resolve the effective gateway; DB watches are created atomically + // with each read to avoid race conditions. + let (gw, pkg_watch, os_watch, gateways_watch) = + resolve_outbound_gateway(ctx, &context.seed.id).await?; + + if let Some(callback) = callback { + let callback = callback.register(&context.seed.persistent_container); + context.seed.ctx.callbacks.add_get_outbound_gateway( + context.seed.id.clone(), + pkg_watch, + os_watch, + gateways_watch, + CallbackHandler::new(&context, callback), + ); + } + + Ok(gw) +} + +async fn resolve_outbound_gateway( + ctx: &crate::context::RpcContext, + package_id: &PackageId, +) -> Result< + ( + GatewayId, + TypedDbWatch>, + Option>>, + Option>>, + ), + Error, +> { + // 1. Package-specific outbound gateway — subscribe before reading + let pkg_ptr: JsonPointer = format!("/public/packageData/{}/outboundGateway", package_id) + .parse() + .expect("valid json pointer"); + let mut pkg_watch = ctx.db.watch(pkg_ptr).await; + let pkg_gw: Option = imbl_value::from_value(pkg_watch.peek_and_mark_seen()?)?; + + if let Some(gw) = pkg_gw { + return Ok((gw, pkg_watch.typed(), None, None)); + } + + // 2. OS-level default outbound — subscribe before reading + let os_ptr: JsonPointer = "/public/serverInfo/network/defaultOutbound" + .parse() + .expect("valid json pointer"); + let mut os_watch = ctx.db.watch(os_ptr).await; + let default_outbound: Option = + imbl_value::from_value(os_watch.peek_and_mark_seen()?)?; + + if let Some(gw) = default_outbound { + return Ok((gw, pkg_watch.typed(), Some(os_watch.typed()), None)); + } + + // 3. Fall through to main routing table — watch gateways for changes + let gw_ptr: JsonPointer = "/public/serverInfo/network/gateways" + .parse() + .expect("valid json pointer"); + let mut gateways_watch = ctx.db.watch(gw_ptr).await; + gateways_watch.peek_and_mark_seen()?; + + let gw = default_route_interface().await?; + Ok(( + gw, + pkg_watch.typed(), + Some(os_watch.typed()), + Some(gateways_watch.typed()), + )) +} + +/// Parses `ip route show table main` for the default route's `dev` field. +async fn default_route_interface() -> Result { + let output = Command::new("ip") + .arg("route") + .arg("show") + .arg("table") + .arg("main") + .invoke(ErrorKind::Network) + .await?; + let text = String::from_utf8_lossy(&output); + for line in text.lines() { + if line.starts_with("default ") { + let mut parts = line.split_whitespace(); + while let Some(tok) = parts.next() { + if tok == "dev" { + if let Some(dev) = parts.next() { + return Ok(dev.parse().unwrap()); + } + } + } + } + } + Err(Error::new( + eyre!("no default route found in main routing table"), + ErrorKind::Network, + )) +} diff --git a/core/src/service/effects/net/mod.rs b/core/src/service/effects/net/mod.rs index cf13451a6..3d8ca091b 100644 --- a/core/src/service/effects/net/mod.rs +++ b/core/src/service/effects/net/mod.rs @@ -2,4 +2,5 @@ pub mod bind; pub mod host; pub mod info; pub mod interface; +pub mod plugin; pub mod ssl; diff --git a/core/src/service/effects/net/plugin.rs b/core/src/service/effects/net/plugin.rs new file mode 100644 index 000000000..7d98bb30c --- /dev/null +++ b/core/src/service/effects/net/plugin.rs @@ -0,0 +1,176 @@ +use std::collections::BTreeSet; +use std::sync::Arc; + +use crate::ActionId; +use crate::net::host::{all_hosts, host_for}; +use crate::net::service_interface::{HostnameMetadata, PluginHostnameInfo}; +use crate::service::Service; +use crate::service::effects::plugin::PluginId; +use crate::service::effects::prelude::*; + +fn require_url_plugin(context: &Arc) -> Result<(), Error> { + if !context + .seed + .persistent_container + .s9pk + .as_manifest() + .metadata + .plugins + .contains(&PluginId::UrlV0) + { + return Err(Error::new( + eyre!( + "{}", + t!("net.plugin.manifest-missing-plugin", plugin = "url-v0") + ), + ErrorKind::InvalidRequest, + )); + } + Ok(()) +} + +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct UrlPluginRegisterParams { + pub table_action: ActionId, +} + +pub async fn register( + context: EffectContext, + UrlPluginRegisterParams { table_action }: UrlPluginRegisterParams, +) -> Result<(), Error> { + use crate::db::model::package::UrlPluginRegistration; + + let context = context.deref()?; + require_url_plugin(&context)?; + let plugin_id = context.seed.id.clone(); + + context + .seed + .ctx + .db + .mutate(|db| { + db.as_public_mut() + .as_package_data_mut() + .as_idx_mut(&plugin_id) + .or_not_found(&plugin_id)? + .as_plugin_mut() + .as_url_mut() + .ser(&Some(UrlPluginRegistration { table_action }))?; + Ok(()) + }) + .await + .result?; + + Ok(()) +} + +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct UrlPluginExportUrlParams { + pub hostname_info: PluginHostnameInfo, + pub remove_action: Option, + pub overflow_actions: Vec, +} + +pub async fn export_url( + context: EffectContext, + UrlPluginExportUrlParams { + hostname_info, + remove_action, + overflow_actions, + }: UrlPluginExportUrlParams, +) -> Result<(), Error> { + let context = context.deref()?; + require_url_plugin(&context)?; + let plugin_id = context.seed.id.clone(); + + let entry = hostname_info.to_hostname_info(&plugin_id, remove_action, overflow_actions); + + context + .seed + .ctx + .db + .mutate(|db| { + let host = host_for( + db, + hostname_info.package_id.as_ref(), + &hostname_info.host_id, + )?; + host.as_bindings_mut() + .as_idx_mut(&hostname_info.internal_port) + .or_not_found(t!( + "net.plugin.binding-not-found", + binding = format!( + "{}:{}:{}", + hostname_info.package_id.as_deref().unwrap_or("STARTOS"), + hostname_info.host_id, + hostname_info.internal_port + ) + ))? + .as_addresses_mut() + .as_available_mut() + .mutate(|available: &mut BTreeSet<_>| { + available.insert(entry); + Ok(()) + })?; + Ok(()) + }) + .await + .result?; + + Ok(()) +} + +#[derive(Debug, Clone, Serialize, Deserialize, TS)] +#[serde(rename_all = "camelCase")] +#[ts(export)] +pub struct UrlPluginClearUrlsParams { + pub except: BTreeSet, +} + +pub async fn clear_urls( + context: EffectContext, + UrlPluginClearUrlsParams { except }: UrlPluginClearUrlsParams, +) -> Result<(), Error> { + let context = context.deref()?; + require_url_plugin(&context)?; + let plugin_id = context.seed.id.clone(); + + context + .seed + .ctx + .db + .mutate(|db| { + for host in all_hosts(db) { + let host = host?; + for (_, bind) in host.as_bindings_mut().as_entries_mut()? { + bind.as_addresses_mut().as_available_mut().mutate( + |available: &mut BTreeSet<_>| { + available.retain(|h| { + match &h.metadata { + HostnameMetadata::Plugin { package_id, .. } + if package_id == &plugin_id => + { + // Keep if it matches any entry in the except list + except + .iter() + .any(|e| e.matches_hostname_info(h, &plugin_id)) + } + _ => true, + } + }); + Ok(()) + }, + )?; + } + } + Ok(()) + }) + .await + .result?; + + Ok(()) +} diff --git a/core/src/service/effects/net/ssl.rs b/core/src/service/effects/net/ssl.rs index fcd1dac9d..f19f4faf2 100644 --- a/core/src/service/effects/net/ssl.rs +++ b/core/src/service/effects/net/ssl.rs @@ -55,20 +55,18 @@ pub async fn get_ssl_certificate( .map(|(_, m)| m.as_hosts().as_entries()) .flatten_ok() .map_ok(|(_, m)| { - Ok(m.as_onions() - .de()? - .iter() - .map(InternedString::from_display) - .chain(m.as_public_domains().keys()?) - .chain(m.as_private_domains().de()?) + Ok(m.as_public_domains() + .keys()? + .into_iter() + .chain(m.as_private_domains().keys()?) .chain( - m.as_hostname_info() + m.as_bindings() .de()? .values() - .flatten() + .flat_map(|b| b.addresses.available.iter().cloned()) .map(|h| h.to_san_hostname()), ) - .collect::>()) + .collect::>()) }) .map(|a| a.and_then(|a| a)) .flatten_ok() @@ -181,20 +179,18 @@ pub async fn get_ssl_key( .map(|m| m.as_hosts().as_entries()) .flatten_ok() .map_ok(|(_, m)| { - Ok(m.as_onions() - .de()? - .iter() - .map(InternedString::from_display) - .chain(m.as_public_domains().keys()?) - .chain(m.as_private_domains().de()?) + Ok(m.as_public_domains() + .keys()? + .into_iter() + .chain(m.as_private_domains().keys()?) .chain( - m.as_hostname_info() + m.as_bindings() .de()? .values() - .flatten() + .flat_map(|b| b.addresses.available.iter().cloned()) .map(|h| h.to_san_hostname()), ) - .collect::>()) + .collect::>()) }) .map(|a| a.and_then(|a| a)) .flatten_ok() diff --git a/core/src/service/effects/plugin.rs b/core/src/service/effects/plugin.rs new file mode 100644 index 000000000..76f33e6e5 --- /dev/null +++ b/core/src/service/effects/plugin.rs @@ -0,0 +1,9 @@ +use serde::{Deserialize, Serialize}; +use ts_rs::TS; + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, TS)] +#[serde(rename_all = "kebab-case")] +#[ts(export)] +pub enum PluginId { + UrlV0, +} diff --git a/core/src/service/mod.rs b/core/src/service/mod.rs index 81aae3229..f17f2d266 100644 --- a/core/src/service/mod.rs +++ b/core/src/service/mod.rs @@ -16,7 +16,7 @@ use futures::{FutureExt, SinkExt, StreamExt, TryStreamExt}; use imbl_value::{InternedString, json}; use itertools::Itertools; use nix::sys::signal::Signal; -use persistent_container::{PersistentContainer, Subcontainer}; +use persistent_container::PersistentContainer; use rpc_toolkit::HandlerArgs; use rpc_toolkit::yajrc::RpcError; use serde::{Deserialize, Serialize}; @@ -52,7 +52,7 @@ use crate::util::serde::Pem; use crate::util::sync::SyncMutex; use crate::util::tui::choose; use crate::volume::data_dir; -use crate::{ActionId, CAP_1_KiB, DATA_DIR, HostId, ImageId, PackageId}; +use crate::{ActionId, CAP_1_KiB, DATA_DIR, ImageId, PackageId}; pub mod action; pub mod cli; @@ -215,6 +215,84 @@ pub struct Service { seed: Arc, } impl Service { + pub fn is_initialized(&self) -> bool { + self.seed.persistent_container.state.borrow().rt_initialized + } + + /// Re-evaluate all tasks that reference this service's actions. + /// Called after every service init to update task active state. + #[instrument(skip_all)] + async fn recheck_tasks(&self) -> Result<(), Error> { + let service_id = &self.seed.id; + let peek = self.seed.ctx.db.peek().await; + let mut action_input: BTreeMap = BTreeMap::new(); + let tasks: BTreeSet<_> = peek + .as_public() + .as_package_data() + .as_entries()? + .into_iter() + .map(|(_, pde)| { + Ok(pde + .as_tasks() + .as_entries()? + .into_iter() + .map(|(_, r)| { + let t = r.as_task(); + Ok::<_, Error>( + if t.as_package_id().de()? == *service_id + && t.as_input().transpose_ref().is_some() + { + Some(t.as_action_id().de()?) + } else { + None + }, + ) + }) + .filter_map_ok(|a| a)) + }) + .flatten_ok() + .map(|a| a.and_then(|a| a)) + .try_collect()?; + let procedure_id = Guid::new(); + for action_id in tasks { + if let Some(input) = self + .get_action_input(procedure_id.clone(), action_id.clone(), Value::Null) + .await + .log_err() + .flatten() + .and_then(|i| i.value) + { + action_input.insert(action_id, input); + } + } + self.seed + .ctx + .db + .mutate(|db| { + for (action_id, input) in &action_input { + for (_, pde) in db.as_public_mut().as_package_data_mut().as_entries_mut()? { + pde.as_tasks_mut().mutate(|tasks| { + Ok(update_tasks(tasks, service_id, action_id, input, false)) + })?; + } + } + for (_, pde) in db.as_public_mut().as_package_data_mut().as_entries_mut()? { + if pde + .as_tasks() + .de()? + .into_iter() + .any(|(_, t)| t.active && t.task.severity == TaskSeverity::Critical) + { + pde.as_status_info_mut().stop()?; + } + } + Ok(()) + }) + .await + .result?; + Ok(()) + } + #[instrument(skip_all)] async fn new( ctx: RpcContext, @@ -263,6 +341,7 @@ impl Service { .persistent_container .init(service.weak(), procedure_id, init_kind) .await?; + service.recheck_tasks().await?; if let Some(recovery_guard) = recovery_guard { recovery_guard.unmount(true).await?; } @@ -489,70 +568,8 @@ impl Service { ) .await?; - if let Some(mut progress) = progress { - progress.finalization_progress.complete(); - progress.progress.complete(); - tokio::task::yield_now().await; - } - - let peek = ctx.db.peek().await; - let mut action_input: BTreeMap = BTreeMap::new(); - let tasks: BTreeSet<_> = peek - .as_public() - .as_package_data() - .as_entries()? - .into_iter() - .map(|(_, pde)| { - Ok(pde - .as_tasks() - .as_entries()? - .into_iter() - .map(|(_, r)| { - let t = r.as_task(); - Ok::<_, Error>( - if t.as_package_id().de()? == manifest.id - && t.as_input().transpose_ref().is_some() - { - Some(t.as_action_id().de()?) - } else { - None - }, - ) - }) - .filter_map_ok(|a| a)) - }) - .flatten_ok() - .map(|a| a.and_then(|a| a)) - .try_collect()?; - for action_id in tasks { - if peek - .as_public() - .as_package_data() - .as_idx(&manifest.id) - .or_not_found(&manifest.id)? - .as_actions() - .contains_key(&action_id)? - { - if let Some(input) = service - .get_action_input(procedure_id.clone(), action_id.clone()) - .await - .log_err() - .flatten() - .and_then(|i| i.value) - { - action_input.insert(action_id, input); - } - } - } ctx.db .mutate(|db| { - for (action_id, input) in &action_input { - for (_, pde) in db.as_public_mut().as_package_data_mut().as_entries_mut()? { - pde.as_tasks_mut().mutate(|tasks| { - Ok(update_tasks(tasks, &manifest.id, action_id, input, false)) - })?; - } - } let entry = db .as_public_mut() .as_package_data_mut() @@ -587,12 +604,19 @@ impl Service { entry.as_developer_key_mut().ser(&Pem::new(developer_key))?; entry.as_icon_mut().ser(&icon)?; entry.as_registry_mut().ser(registry)?; + entry.as_status_info_mut().as_error_mut().ser(&None)?; Ok(()) }) .await .result?; + if let Some(mut progress) = progress { + progress.finalization_progress.complete(); + progress.progress.complete(); + tokio::task::yield_now().await; + } + // Trigger manifest callbacks after successful installation let manifest = service.seed.persistent_container.s9pk.as_manifest(); if let Some(callbacks) = ctx.callbacks.get_service_manifest(&manifest.id) { @@ -682,14 +706,6 @@ impl Service { memory_usage: MiB::from_MiB(used), }) } - - pub async fn sync_host(&self, host_id: HostId) -> Result<(), Error> { - self.seed - .persistent_container - .net_service - .sync_host(host_id) - .await - } } struct ServiceActorSeed { @@ -701,6 +717,7 @@ struct ServiceActorSeed { } #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] pub struct RebuildParams { #[arg(help = "help.arg.package-id")] pub id: PackageId, @@ -1194,6 +1211,9 @@ pub async fn cli_attach( { Ok(a) => a, Err(e) => { + if e.kind != ErrorKind::InvalidRequest { + return Err(e); + } let prompt = e.to_string(); let options: Vec = from_value(e.info)?; let choice = choose(&prompt, &options).await?; @@ -1206,6 +1226,7 @@ pub async fn cli_attach( )?; let mut ws = context.ws_continuation(guid).await?; + print!("\r"); let (kill, thread_kill) = tokio::sync::oneshot::channel(); let (thread_send, recv) = tokio::sync::mpsc::channel(4 * CAP_1_KiB); let stdin_thread: NonDetachingJoinHandle<()> = tokio::task::spawn_blocking(move || { @@ -1234,18 +1255,6 @@ pub async fn cli_attach( let mut stderr = Some(stderr); loop { futures::select_biased! { - // signal = tokio:: => { - // let exit = exit?; - // if current_out != "exit" { - // ws.send(Message::Text("exit".into())) - // .await - // .with_kind(ErrorKind::Network)?; - // current_out = "exit"; - // } - // ws.send(Message::Binary( - // i32::to_be_bytes(exit.into_raw()).to_vec() - // )).await.with_kind(ErrorKind::Network)?; - // } input = stdin.as_mut().map_or( futures::future::Either::Left(futures::future::pending()), |s| futures::future::Either::Right(s.recv()) diff --git a/core/src/service/persistent_container.rs b/core/src/service/persistent_container.rs index 7c73f7bf0..6bc52b4db 100644 --- a/core/src/service/persistent_container.rs +++ b/core/src/service/persistent_container.rs @@ -97,7 +97,7 @@ impl PersistentContainer { .join(&s9pk.as_manifest().id), ), LxcConfig { - hardware_acceleration: s9pk.manifest.hardware_acceleration, + hardware_acceleration: s9pk.manifest.metadata.hardware_acceleration, }, ) .await?; diff --git a/core/src/service/service_map.rs b/core/src/service/service_map.rs index fe8192d26..697578a7c 100644 --- a/core/src/service/service_map.rs +++ b/core/src/service/service_map.rs @@ -259,6 +259,8 @@ impl ServiceMap { service_interfaces: Default::default(), hosts: Default::default(), store_exposed_dependents: Default::default(), + outbound_gateway: None, + plugin: Default::default(), }, )?; }; diff --git a/core/src/service/uninstall.rs b/core/src/service/uninstall.rs index a924e8cb0..2f6515024 100644 --- a/core/src/service/uninstall.rs +++ b/core/src/service/uninstall.rs @@ -1,9 +1,12 @@ +use std::collections::BTreeSet; use std::path::Path; use imbl::vector; use crate::context::RpcContext; use crate::db::model::package::{InstalledState, InstallingInfo, InstallingState, PackageState}; +use crate::net::host::all_hosts; +use crate::net::service_interface::{HostnameInfo, HostnameMetadata}; use crate::prelude::*; use crate::volume::PKG_VOLUME_DIR; use crate::{DATA_DIR, PACKAGE_DATA, PackageId}; @@ -36,6 +39,24 @@ pub async fn cleanup(ctx: &RpcContext, id: &PackageId, soft: bool) -> Result<(), Ok(()) })?; d.as_private_mut().as_package_stores_mut().remove(&id)?; + // Remove plugin URLs exported by this package from all hosts + for host in all_hosts(d) { + let host = host?; + for (_, bind) in host.as_bindings_mut().as_entries_mut()? { + bind.as_addresses_mut().as_available_mut().mutate( + |available: &mut BTreeSet| { + available.retain(|h| { + !matches!( + &h.metadata, + HostnameMetadata::Plugin { package_id, .. } + if package_id == id + ) + }); + Ok(()) + }, + )?; + } + } Ok(Some(pde)) } else { Ok(None) diff --git a/core/src/setup.rs b/core/src/setup.rs index 95aa276a7..2166cfc08 100644 --- a/core/src/setup.rs +++ b/core/src/setup.rs @@ -31,6 +31,7 @@ use crate::disk::mount::filesystem::ReadWrite; use crate::disk::mount::filesystem::cifs::Cifs; use crate::disk::mount::guard::{GenericMountGuard, TmpMountGuard}; use crate::disk::util::{DiskInfo, StartOsRecoveryInfo, pvscan, recovery_info}; +use crate::hostname::ServerHostnameInfo; use crate::init::{InitPhases, InitResult, init}; use crate::net::ssl::root_ca_start_time; use crate::prelude::*; @@ -115,6 +116,7 @@ async fn setup_init( ctx: &SetupContext, password: Option, kiosk: Option, + hostname: Option, init_phases: InitPhases, ) -> Result<(AccountInfo, InitResult), Error> { let init_result = init(&ctx.webserver, &ctx.config.peek(|c| c.clone()), init_phases).await?; @@ -129,6 +131,9 @@ async fn setup_init( if let Some(password) = &password { account.set_password(password)?; } + if let Some(hostname) = hostname { + account.hostname = hostname; + } account.save(m)?; let info = m.as_public_mut().as_server_info_mut(); info.as_password_hash_mut().ser(&account.password)?; @@ -233,7 +238,8 @@ pub async fn attach( } disk_phase.complete(); - let (account, net_ctrl) = setup_init(&setup_ctx, password, kiosk, init_phases).await?; + let (account, net_ctrl) = + setup_init(&setup_ctx, password, kiosk, None, init_phases).await?; let rpc_ctx = RpcContext::init( &setup_ctx.webserver, @@ -246,7 +252,7 @@ pub async fn attach( Ok(( SetupResult { - hostname: account.hostname, + hostname: account.hostname.hostname, root_ca: Pem(account.root_ca_cert), needs_restart: setup_ctx.install_rootfs.peek(|a| a.is_some()), }, @@ -402,10 +408,12 @@ pub async fn setup_data_drive( #[ts(export)] pub struct SetupExecuteParams { guid: InternedString, - password: EncryptedWire, + password: Option, recovery_source: Option>, #[ts(optional)] kiosk: Option, + name: Option, + hostname: Option, } // #[command(rpc_only)] @@ -416,17 +424,20 @@ pub async fn execute( password, recovery_source, kiosk, + name, + hostname, }: SetupExecuteParams, ) -> Result { - let password = match password.decrypt(&ctx) { - Some(a) => a, - None => { - return Err(Error::new( - color_eyre::eyre::eyre!("{}", t!("setup.couldnt-decode-startos-password")), - crate::ErrorKind::Unknown, - )); - } - }; + let password = password + .map(|p| { + p.decrypt(&ctx).ok_or_else(|| { + Error::new( + color_eyre::eyre::eyre!("{}", t!("setup.couldnt-decode-startos-password")), + crate::ErrorKind::Unknown, + ) + }) + }) + .transpose()?; let recovery = match recovery_source { Some(RecoverySource::Backup { target, @@ -446,8 +457,10 @@ pub async fn execute( None => None, }; + let hostname = ServerHostnameInfo::new_opt(name, hostname)?; + let setup_ctx = ctx.clone(); - ctx.run_setup(move || execute_inner(setup_ctx, guid, password, recovery, kiosk))?; + ctx.run_setup(move || execute_inner(setup_ctx, guid, password, recovery, kiosk, hostname))?; Ok(ctx.progress().await) } @@ -462,7 +475,7 @@ pub async fn complete(ctx: SetupContext) -> Result { guid_file.sync_all().await?; Command::new("systemd-firstboot") .arg("--root=/media/startos/config/overlay/") - .arg(format!("--hostname={}", res.hostname.0)) + .arg(format!("--hostname={}", res.hostname.as_ref())) .invoke(ErrorKind::ParseSysInfo) .await?; Command::new("sync").invoke(ErrorKind::Filesystem).await?; @@ -533,9 +546,10 @@ pub async fn shutdown(ctx: SetupContext) -> Result<(), Error> { pub async fn execute_inner( ctx: SetupContext, guid: InternedString, - password: String, + password: Option, recovery_source: Option>, kiosk: Option, + hostname: Option, ) -> Result<(SetupResult, RpcContext), Error> { let progress = &ctx.progress; let restore_phase = match recovery_source.as_ref() { @@ -570,14 +584,30 @@ pub async fn execute_inner( server_id, recovery_password, kiosk, + hostname, progress, ) .await } Some(RecoverySource::Migrate { guid: old_guid }) => { - migrate(&ctx, guid, &old_guid, password, kiosk, progress).await + migrate(&ctx, guid, &old_guid, password, kiosk, hostname, progress).await + } + None => { + fresh_setup( + &ctx, + guid, + &password.ok_or_else(|| { + Error::new( + eyre!("{}", t!("setup.password-required")), + ErrorKind::InvalidRequest, + ) + })?, + kiosk, + hostname, + progress, + ) + .await } - None => fresh_setup(&ctx, guid, &password, kiosk, progress).await, } } @@ -592,13 +622,14 @@ async fn fresh_setup( guid: InternedString, password: &str, kiosk: Option, + hostname: Option, SetupExecuteProgress { init_phases, rpc_ctx_phases, .. }: SetupExecuteProgress, ) -> Result<(SetupResult, RpcContext), Error> { - let account = AccountInfo::new(password, root_ca_start_time().await)?; + let account = AccountInfo::new(password, root_ca_start_time().await, hostname)?; let db = ctx.db().await?; let kiosk = Some(kiosk.unwrap_or(true)).filter(|_| &*PLATFORM != "raspberrypi"); sync_kiosk(kiosk).await?; @@ -635,7 +666,7 @@ async fn fresh_setup( Ok(( SetupResult { - hostname: account.hostname, + hostname: account.hostname.hostname, root_ca: Pem(account.root_ca_cert), needs_restart: ctx.install_rootfs.peek(|a| a.is_some()), }, @@ -647,11 +678,12 @@ async fn fresh_setup( async fn recover( ctx: &SetupContext, guid: InternedString, - password: String, + password: Option, recovery_source: BackupTargetFS, server_id: String, recovery_password: String, kiosk: Option, + hostname: Option, progress: SetupExecuteProgress, ) -> Result<(SetupResult, RpcContext), Error> { let recovery_source = TmpMountGuard::mount(&recovery_source, ReadWrite).await?; @@ -663,6 +695,7 @@ async fn recover( &server_id, &recovery_password, kiosk, + hostname, progress, ) .await @@ -673,8 +706,9 @@ async fn migrate( ctx: &SetupContext, guid: InternedString, old_guid: &str, - password: String, + password: Option, kiosk: Option, + hostname: Option, SetupExecuteProgress { init_phases, restore_phase, @@ -753,7 +787,7 @@ async fn migrate( crate::disk::main::export(&old_guid, "/media/startos/migrate").await?; restore_phase.complete(); - let (account, net_ctrl) = setup_init(&ctx, Some(password), kiosk, init_phases).await?; + let (account, net_ctrl) = setup_init(&ctx, password, kiosk, hostname, init_phases).await?; let rpc_ctx = RpcContext::init( &ctx.webserver, @@ -766,7 +800,7 @@ async fn migrate( Ok(( SetupResult { - hostname: account.hostname, + hostname: account.hostname.hostname, root_ca: Pem(account.root_ca_cert), needs_restart: ctx.install_rootfs.peek(|a| a.is_some()), }, diff --git a/core/src/ssh.rs b/core/src/ssh.rs index 7d8b073b4..69602e806 100644 --- a/core/src/ssh.rs +++ b/core/src/ssh.rs @@ -12,7 +12,7 @@ use tracing::instrument; use ts_rs::TS; use crate::context::{CliContext, RpcContext}; -use crate::hostname::Hostname; +use crate::hostname::ServerHostname; use crate::prelude::*; use crate::util::io::create_file; use crate::util::serde::{HandlerExtSerde, Pem, WithIoFormat, display_serializable}; @@ -58,7 +58,8 @@ impl ValueParserFactory for SshPubKey { } } -#[derive(serde::Serialize, serde::Deserialize)] +#[derive(serde::Serialize, serde::Deserialize, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] pub struct SshKeyResponse { pub alg: String, @@ -115,15 +116,19 @@ pub fn ssh() -> ParentHandler { } #[derive(Deserialize, Serialize, Parser, TS)] +#[ts(export)] #[serde(rename_all = "camelCase")] #[command(rename_all = "kebab-case")] -pub struct AddParams { +pub struct SshAddParams { #[arg(help = "help.arg.ssh-public-key")] key: SshPubKey, } #[instrument(skip_all)] -pub async fn add(ctx: RpcContext, AddParams { key }: AddParams) -> Result { +pub async fn add( + ctx: RpcContext, + SshAddParams { key }: SshAddParams, +) -> Result { let mut key = WithTimeData::new(key); let fingerprint = InternedString::intern(key.0.fingerprint_md5()); let (keys, res) = ctx @@ -150,9 +155,10 @@ pub async fn add(ctx: RpcContext, AddParams { key }: AddParams) -> Result Result<(), Error> { let keys = ctx .db @@ -235,7 +241,7 @@ pub async fn list(ctx: RpcContext) -> Result, Error> { #[instrument(skip_all)] pub async fn sync_keys>( - hostname: &Hostname, + hostname: &ServerHostname, privkey: &Pem, pubkeys: &SshKeys, ssh_dir: P, @@ -281,8 +287,8 @@ pub async fn sync_keys>( .to_openssh() .with_kind(ErrorKind::OpenSsh)? + " start9@" - + &*hostname.0) - .as_bytes(), + + hostname.as_ref()) + .as_bytes(), ) .await?; f.write_all(b"\n").await?; diff --git a/core/src/system/mod.rs b/core/src/system/mod.rs index 3fe43797a..248990581 100644 --- a/core/src/system/mod.rs +++ b/core/src/system/mod.rs @@ -20,6 +20,7 @@ use crate::context::{CliContext, RpcContext}; use crate::disk::util::{get_available, get_used}; use crate::logs::{LogSource, LogsParams, SYSTEM_UNIT}; use crate::prelude::*; +use crate::registry::device_info::DeviceInfo; use crate::rpc_continuations::{Guid, RpcContinuation, RpcContinuations}; use crate::shutdown::Shutdown; use crate::util::Invoke; @@ -191,7 +192,9 @@ pub async fn governor( Ok(GovernorInfo { current, available }) } -#[derive(Serialize, Deserialize)] +#[derive(Serialize, Deserialize, TS)] +#[ts(export)] +#[serde(rename_all = "camelCase")] pub struct TimeInfo { now: String, uptime: u64, @@ -247,6 +250,64 @@ pub async fn time(ctx: RpcContext, _: Empty) -> Result { }) } +pub async fn device_info(ctx: RpcContext) -> Result { + DeviceInfo::load(&ctx).await +} + +pub fn display_device_info(params: WithIoFormat, info: DeviceInfo) -> Result<(), Error> { + use prettytable::*; + + if let Some(format) = params.format { + return display_serializable(format, info); + } + + let mut table = Table::new(); + table.add_row(row![br -> "PLATFORM", &*info.os.platform]); + table.add_row(row![br -> "OS VERSION", info.os.version.to_string()]); + table.add_row(row![br -> "OS COMPAT", info.os.compat.to_string()]); + if let Some(lang) = &info.os.language { + table.add_row(row![br -> "LANGUAGE", &**lang]); + } + if let Some(hw) = &info.hardware { + table.add_row(row![br -> "ARCH", &*hw.arch]); + table.add_row(row![br -> "RAM", format_ram(hw.ram)]); + if let Some(devices) = &hw.devices { + for dev in devices { + let (class, desc) = match dev { + crate::util::lshw::LshwDevice::Processor(p) => ( + "PROCESSOR", + p.product.as_deref().unwrap_or("unknown").to_string(), + ), + crate::util::lshw::LshwDevice::Display(d) => ( + "DISPLAY", + format!( + "{}{}", + d.product.as_deref().unwrap_or("unknown"), + d.driver + .as_deref() + .map(|drv| format!(" ({})", drv)) + .unwrap_or_default() + ), + ), + }; + table.add_row(row![br -> class, desc]); + } + } + } + table.print_tty(false)?; + Ok(()) +} + +fn format_ram(bytes: u64) -> String { + const GIB: u64 = 1024 * 1024 * 1024; + const MIB: u64 = 1024 * 1024; + if bytes >= GIB { + format!("{:.1} GiB", bytes as f64 / GIB as f64) + } else { + format!("{:.1} MiB", bytes as f64 / MIB as f64) + } +} + pub fn logs>() -> ParentHandler { crate::logs::logs(|_: &C, _| async { Ok(LogSource::Unit(SYSTEM_UNIT)) }) } @@ -331,6 +392,7 @@ pub struct MetricLeaf { } #[derive(Clone, Copy, Debug, PartialEq, PartialOrd, TS)] +#[ts(type = "{ value: string, unit: string }")] pub struct Celsius(f64); impl fmt::Display for Celsius { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -359,6 +421,7 @@ impl<'de> Deserialize<'de> for Celsius { } } #[derive(Clone, Debug, PartialEq, PartialOrd, TS)] +#[ts(type = "{ value: string, unit: string }")] pub struct Percentage(f64); impl Serialize for Percentage { fn serialize(&self, serializer: S) -> Result @@ -385,6 +448,7 @@ impl<'de> Deserialize<'de> for Percentage { } #[derive(Clone, Debug, TS)] +#[ts(type = "{ value: string, unit: string }")] pub struct MebiBytes(pub f64); impl Serialize for MebiBytes { fn serialize(&self, serializer: S) -> Result @@ -411,6 +475,7 @@ impl<'de> Deserialize<'de> for MebiBytes { } #[derive(Clone, Debug, PartialEq, PartialOrd, TS)] +#[ts(type = "{ value: string, unit: string }")] pub struct GigaBytes(f64); impl Serialize for GigaBytes { fn serialize(&self, serializer: S) -> Result @@ -490,6 +555,7 @@ pub async fn metrics(ctx: RpcContext) -> Result { #[derive(Deserialize, Serialize, Clone, Debug, TS)] #[serde(rename_all = "camelCase")] +#[ts(export)] pub struct MetricsFollowResponse { pub guid: Guid, pub metrics: Metrics, @@ -1042,20 +1108,36 @@ async fn get_disk_info() -> Result { }) } +#[derive( + Debug, Clone, Copy, Default, serde::Serialize, serde::Deserialize, TS, clap::ValueEnum, +)] +#[ts(export)] +#[serde(rename_all = "camelCase")] +pub enum SmtpSecurity { + #[default] + Starttls, + Tls, +} + #[derive(Debug, Clone, serde::Serialize, serde::Deserialize, Parser, TS)] #[ts(export)] #[serde(rename_all = "camelCase")] pub struct SmtpValue { - #[arg(long, help = "help.arg.smtp-server")] - pub server: String, + #[arg(long, help = "help.arg.smtp-host")] + #[serde(alias = "server")] + pub host: String, #[arg(long, help = "help.arg.smtp-port")] pub port: u16, #[arg(long, help = "help.arg.smtp-from")] pub from: String, - #[arg(long, help = "help.arg.smtp-login")] - pub login: String, + #[arg(long, help = "help.arg.smtp-username")] + #[serde(alias = "login")] + pub username: String, #[arg(long, help = "help.arg.smtp-password")] pub password: Option, + #[arg(long, help = "help.arg.smtp-security")] + #[serde(default)] + pub security: SmtpSecurity, } pub async fn set_system_smtp(ctx: RpcContext, smtp: SmtpValue) -> Result<(), Error> { let smtp = Some(smtp); @@ -1088,51 +1170,89 @@ pub async fn clear_system_smtp(ctx: RpcContext) -> Result<(), Error> { } Ok(()) } + +#[derive(Debug, Clone, Deserialize, Serialize, Parser)] +pub struct SetIfconfigUrlParams { + #[arg(help = "help.arg.ifconfig-url")] + pub url: url::Url, +} + +pub async fn set_ifconfig_url( + ctx: RpcContext, + SetIfconfigUrlParams { url }: SetIfconfigUrlParams, +) -> Result<(), Error> { + ctx.db + .mutate(|db| { + db.as_public_mut() + .as_server_info_mut() + .as_ifconfig_url_mut() + .ser(&url) + }) + .await + .result +} + #[derive(Debug, Clone, serde::Serialize, serde::Deserialize, Parser, TS)] #[ts(export)] #[serde(rename_all = "camelCase")] pub struct TestSmtpParams { - #[arg(long, help = "help.arg.smtp-server")] - pub server: String, + #[arg(long, help = "help.arg.smtp-host")] + pub host: String, #[arg(long, help = "help.arg.smtp-port")] pub port: u16, #[arg(long, help = "help.arg.smtp-from")] pub from: String, #[arg(long, help = "help.arg.smtp-to")] pub to: String, - #[arg(long, help = "help.arg.smtp-login")] - pub login: String, + #[arg(long, help = "help.arg.smtp-username")] + pub username: String, #[arg(long, help = "help.arg.smtp-password")] pub password: String, + #[arg(long, help = "help.arg.smtp-security")] + #[serde(default)] + pub security: SmtpSecurity, } pub async fn test_smtp( _: RpcContext, TestSmtpParams { - server, + host, port, from, to, - login, + username, password, + security, }: TestSmtpParams, ) -> Result<(), Error> { use lettre::message::header::ContentType; use lettre::transport::smtp::authentication::Credentials; + use lettre::transport::smtp::client::{Tls, TlsParameters}; use lettre::{AsyncSmtpTransport, AsyncTransport, Message, Tokio1Executor}; - AsyncSmtpTransport::::relay(&server)? - .port(port) - .credentials(Credentials::new(login, password)) - .build() - .send( - Message::builder() - .from(from.parse()?) - .to(to.parse()?) - .subject("StartOS Test Email") - .header(ContentType::TEXT_PLAIN) - .body("This is a test email sent from your StartOS Server".to_owned())?, - ) - .await?; + let creds = Credentials::new(username, password); + let message = Message::builder() + .from(from.parse()?) + .to(to.parse()?) + .subject("StartOS Test Email") + .header(ContentType::TEXT_PLAIN) + .body("This is a test email sent from your StartOS Server".to_owned())?; + + let transport = match security { + SmtpSecurity::Starttls => AsyncSmtpTransport::::relay(&host)? + .port(port) + .credentials(creds) + .build(), + SmtpSecurity::Tls => { + let tls = TlsParameters::new(host.clone())?; + AsyncSmtpTransport::::relay(&host)? + .port(port) + .tls(Tls::Wrapper(tls)) + .credentials(creds) + .build() + } + }; + + transport.send(message).await?; Ok(()) } @@ -1211,6 +1331,7 @@ pub async fn set_keyboard(ctx: RpcContext, options: KeyboardOptions) -> Result<( } #[derive(Debug, Clone, Deserialize, Serialize, TS, Parser)] +#[ts(export)] #[serde(rename_all = "camelCase")] pub struct SetLanguageParams { #[arg(help = "help.arg.language-code")] @@ -1231,9 +1352,15 @@ pub async fn save_language(language: &str) -> Result<(), Error> { "/media/startos/config/overlay/usr/lib/locale/locale-archive", ) .await?; + let locale_content = format!("LANG={language}.UTF-8\n"); write_file_atomic( "/media/startos/config/overlay/etc/default/locale", - format!("LANG={language}.UTF-8\n").as_bytes(), + locale_content.as_bytes(), + ) + .await?; + write_file_atomic( + "/media/startos/config/overlay/etc/locale.conf", + locale_content.as_bytes(), ) .await?; Ok(()) diff --git a/core/src/tunnel/api.rs b/core/src/tunnel/api.rs index b8f5fd693..10c2f21c2 100644 --- a/core/src/tunnel/api.rs +++ b/core/src/tunnel/api.rs @@ -53,6 +53,24 @@ pub fn tunnel_api() -> ParentHandler { .with_call_remote::(), ), ) + .subcommand( + "update", + ParentHandler::::new() + .subcommand( + "check", + from_fn_async(super::update::check_update) + .with_display_serializable() + .with_about("about.check-for-updates") + .with_call_remote::(), + ) + .subcommand( + "apply", + from_fn_async(super::update::apply_update) + .with_display_serializable() + .with_about("about.apply-available-update") + .with_call_remote::(), + ), + ) } #[derive(Deserialize, Serialize, Parser)] @@ -414,14 +432,11 @@ pub async fn show_config( i.iter().find_map(|(_, info)| { info.ip_info .as_ref() - .filter(|_| info.public()) - .iter() - .find_map(|info| info.subnets.iter().next()) - .copied() + .and_then(|ip_info| ip_info.wan_ip) + .map(IpAddr::from) }) }) .or_not_found("a public IP address")? - .addr() }; Ok(client .client_config( @@ -459,7 +474,10 @@ pub async fn add_forward( }) .map(|s| s.prefix_len()) .unwrap_or(32); - let rc = ctx.forward.add_forward(source, target, prefix).await?; + let rc = ctx + .forward + .add_forward(source, target, prefix, None) + .await?; ctx.active_forwards.mutate(|m| { m.insert(source, rc); }); diff --git a/core/src/tunnel/context.rs b/core/src/tunnel/context.rs index 5afac62ab..ac56eaa36 100644 --- a/core/src/tunnel/context.rs +++ b/core/src/tunnel/context.rs @@ -199,7 +199,7 @@ impl TunnelContext { }) .map(|s| s.prefix_len()) .unwrap_or(32); - active_forwards.insert(from, forward.add_forward(from, to, prefix).await?); + active_forwards.insert(from, forward.add_forward(from, to, prefix, None).await?); } Ok(Self(Arc::new(TunnelContextSeed { diff --git a/core/src/tunnel/mod.rs b/core/src/tunnel/mod.rs index 778a3272a..5d69de7c0 100644 --- a/core/src/tunnel/mod.rs +++ b/core/src/tunnel/mod.rs @@ -9,6 +9,7 @@ pub mod api; pub mod auth; pub mod context; pub mod db; +pub mod update; pub mod web; pub mod wg; diff --git a/core/src/tunnel/update.rs b/core/src/tunnel/update.rs new file mode 100644 index 000000000..fe4a2b748 --- /dev/null +++ b/core/src/tunnel/update.rs @@ -0,0 +1,102 @@ +use std::process::Stdio; + +use rpc_toolkit::Empty; +use serde::{Deserialize, Serialize}; +use tokio::process::Command; +use tracing::instrument; +use ts_rs::TS; + +use crate::prelude::*; +use crate::tunnel::context::TunnelContext; +use crate::util::Invoke; + +#[derive(Deserialize, Serialize, TS)] +#[serde(rename_all = "camelCase")] +pub struct TunnelUpdateResult { + /// "up-to-date", "update-available", or "updating" + pub status: String, + /// Currently installed version + pub installed: String, + /// Available candidate version + pub candidate: String, +} + +#[instrument(skip_all)] +pub async fn check_update(_ctx: TunnelContext, _: Empty) -> Result { + Command::new("apt-get") + .arg("update") + .invoke(ErrorKind::UpdateFailed) + .await?; + + let policy_output = Command::new("apt-cache") + .arg("policy") + .arg("start-tunnel") + .invoke(ErrorKind::UpdateFailed) + .await?; + + let policy_str = String::from_utf8_lossy(&policy_output).to_string(); + let installed = parse_version_field(&policy_str, "Installed:"); + let candidate = parse_version_field(&policy_str, "Candidate:"); + + let status = if installed == candidate { + "up-to-date" + } else { + "update-available" + }; + + Ok(TunnelUpdateResult { + status: status.to_string(), + installed: installed.unwrap_or_default(), + candidate: candidate.unwrap_or_default(), + }) +} + +#[instrument(skip_all)] +pub async fn apply_update(_ctx: TunnelContext, _: Empty) -> Result { + let policy_output = Command::new("apt-cache") + .arg("policy") + .arg("start-tunnel") + .invoke(ErrorKind::UpdateFailed) + .await?; + + let policy_str = String::from_utf8_lossy(&policy_output).to_string(); + let installed = parse_version_field(&policy_str, "Installed:"); + let candidate = parse_version_field(&policy_str, "Candidate:"); + + // Spawn in a separate cgroup via systemd-run so the process survives + // when the postinst script restarts start-tunneld.service. + // After the install completes, reboot the system. + // Uses --reinstall so the update applies even when versions match. + Command::new("systemd-run") + .arg("--scope") + .arg("--") + .arg("sh") + .arg("-c") + .arg("apt-get install --reinstall -y start-tunnel && reboot") + .env("DEBIAN_FRONTEND", "noninteractive") + .stdin(Stdio::null()) + .stdout(Stdio::null()) + .stderr(Stdio::null()) + .spawn() + .with_kind(ErrorKind::UpdateFailed)?; + + Ok(TunnelUpdateResult { + status: "updating".to_string(), + installed: installed.unwrap_or_default(), + candidate: candidate.unwrap_or_default(), + }) +} + +fn parse_version_field(policy: &str, field: &str) -> Option { + policy + .lines() + .find(|l| l.trim().starts_with(field)) + .and_then(|l| l.split_whitespace().nth(1)) + .filter(|v| *v != "(none)") + .map(|s| s.to_string()) +} + +#[test] +fn export_bindings_tunnel_update() { + TunnelUpdateResult::export_all_to("bindings/tunnel").unwrap(); +} diff --git a/core/src/tunnel/web.rs b/core/src/tunnel/web.rs index fc17869fc..598f05fa7 100644 --- a/core/src/tunnel/web.rs +++ b/core/src/tunnel/web.rs @@ -18,7 +18,7 @@ use tokio_rustls::rustls::server::ClientHello; use ts_rs::TS; use crate::context::CliContext; -use crate::hostname::Hostname; +use crate::hostname::ServerHostname; use crate::net::ssl::{SANInfo, root_ca_start_time}; use crate::net::tls::TlsHandler; use crate::net::web_server::Accept; @@ -292,7 +292,7 @@ pub async fn generate_certificate( let root_key = crate::net::ssl::gen_nistp256()?; let root_cert = crate::net::ssl::make_root_cert( &root_key, - &Hostname("start-tunnel".into()), + &ServerHostname::new("start-tunnel".into())?, root_ca_start_time().await, )?; let int_key = crate::net::ssl::gen_nistp256()?; @@ -527,23 +527,23 @@ pub async fn init_web(ctx: CliContext) -> Result<(), Error> { " 2. Paste the following command (**DO NOT** click Return): pbcopy < ~/Desktop/ca.crt\n", " 3. Copy your Root CA (including -----BEGIN CERTIFICATE----- and -----END CERTIFICATE-----)\n", " 4. Back in Terminal, click Return. ca.crt is saved to your Desktop\n", - " 5. Complete by trusting your Root CA: https://staging.docs.start9.com/device-guides/mac/ca.html\n", + " 5. Complete by trusting your Root CA: https://docs.start9.com/device-guides/mac/ca.html\n", " - Linux\n", " 1. Open gedit, nano, or any editor\n", " 2. Copy/paste your Root CA (including -----BEGIN CERTIFICATE----- and -----END CERTIFICATE-----)\n", " 3. Name the file ca.crt and save as plaintext\n", - " 5. Complete by trusting your Root CA: https://staging.docs.start9.com/device-guides/linux/ca.html\n", + " 4. Complete by trusting your Root CA: https://docs.start9.com/device-guides/linux/ca.html\n", " - Windows\n", " 1. Open the Notepad app\n", " 2. Copy/paste your Root CA (including -----BEGIN CERTIFICATE----- and -----END CERTIFICATE-----)\n", " 3. Name the file ca.crt and save as plaintext\n", - " 5. Complete by trusting your Root CA: https://staging.docs.start9.com/device-guides/windows/ca.html\n", + " 4. Complete by trusting your Root CA: https://docs.start9.com/device-guides/windows/ca.html\n", " - Android/Graphene\n", " 1. Send the ca.crt file (created above) to yourself\n", - " 2. Complete by trusting your Root CA: https://staging.docs.start9.com/device-guides/android/ca.html\n", + " 2. Complete by trusting your Root CA: https://docs.start9.com/device-guides/android/ca.html\n", " - iOS\n", " 1. Send the ca.crt file (created above) to yourself\n", - " 2. Complete by trusting your Root CA: https://staging.docs.start9.com/device-guides/ios/ca.html\n", + " 2. Complete by trusting your Root CA: https://docs.start9.com/device-guides/ios/ca.html\n", )); return Ok(()); diff --git a/core/src/util/actor/background.rs b/core/src/util/actor/background.rs index 87a535f87..46147ca5c 100644 --- a/core/src/util/actor/background.rs +++ b/core/src/util/actor/background.rs @@ -1,5 +1,6 @@ use futures::future::BoxFuture; -use futures::{Future, FutureExt}; +use futures::stream::FuturesUnordered; +use futures::{Future, FutureExt, StreamExt}; use tokio::sync::mpsc; #[derive(Clone)] @@ -11,7 +12,7 @@ impl BackgroundJobQueue { Self(send), BackgroundJobRunner { recv, - jobs: Vec::new(), + jobs: FuturesUnordered::new(), }, ) } @@ -27,7 +28,7 @@ impl BackgroundJobQueue { pub struct BackgroundJobRunner { recv: mpsc::UnboundedReceiver>, - jobs: Vec>, + jobs: FuturesUnordered>, } impl BackgroundJobRunner { pub fn is_empty(&self) -> bool { @@ -43,19 +44,7 @@ impl Future for BackgroundJobRunner { while let std::task::Poll::Ready(Some(job)) = self.recv.poll_recv(cx) { self.jobs.push(job); } - let complete = self - .jobs - .iter_mut() - .enumerate() - .filter_map(|(i, f)| match f.poll_unpin(cx) { - std::task::Poll::Pending => None, - std::task::Poll::Ready(_) => Some(i), - }) - .collect::>(); - for idx in complete.into_iter().rev() { - #[allow(clippy::let_underscore_future)] - let _ = self.jobs.swap_remove(idx); - } + while let std::task::Poll::Ready(Some(())) = self.jobs.poll_next_unpin(cx) {} if self.jobs.is_empty() && self.recv.is_closed() { std::task::Poll::Ready(()) } else { diff --git a/core/src/version/mod.rs b/core/src/version/mod.rs index 5c2a2a2f3..4c17bc32f 100644 --- a/core/src/version/mod.rs +++ b/core/src/version/mod.rs @@ -59,8 +59,9 @@ mod v0_4_0_alpha_16; mod v0_4_0_alpha_17; mod v0_4_0_alpha_18; mod v0_4_0_alpha_19; +mod v0_4_0_alpha_20; -pub type Current = v0_4_0_alpha_19::Version; // VERSION_BUMP +pub type Current = v0_4_0_alpha_20::Version; // VERSION_BUMP impl Current { #[instrument(skip(self, db))] @@ -89,7 +90,13 @@ impl Current { .await .result?; } - Ordering::Equal => (), + Ordering::Equal => { + db.apply_function(|db| { + Ok::<_, Error>((to_value(&from_value::(db.clone())?)?, ())) + }) + .await + .result?; + } } Ok(()) } @@ -181,7 +188,8 @@ enum Version { V0_4_0_alpha_16(Wrapper), V0_4_0_alpha_17(Wrapper), V0_4_0_alpha_18(Wrapper), - V0_4_0_alpha_19(Wrapper), // VERSION_BUMP + V0_4_0_alpha_19(Wrapper), + V0_4_0_alpha_20(Wrapper), // VERSION_BUMP Other(exver::Version), } @@ -243,7 +251,8 @@ impl Version { Self::V0_4_0_alpha_16(v) => DynVersion(Box::new(v.0)), Self::V0_4_0_alpha_17(v) => DynVersion(Box::new(v.0)), Self::V0_4_0_alpha_18(v) => DynVersion(Box::new(v.0)), - Self::V0_4_0_alpha_19(v) => DynVersion(Box::new(v.0)), // VERSION_BUMP + Self::V0_4_0_alpha_19(v) => DynVersion(Box::new(v.0)), + Self::V0_4_0_alpha_20(v) => DynVersion(Box::new(v.0)), // VERSION_BUMP Self::Other(v) => { return Err(Error::new( eyre!("unknown version {v}"), @@ -297,7 +306,8 @@ impl Version { Version::V0_4_0_alpha_16(Wrapper(x)) => x.semver(), Version::V0_4_0_alpha_17(Wrapper(x)) => x.semver(), Version::V0_4_0_alpha_18(Wrapper(x)) => x.semver(), - Version::V0_4_0_alpha_19(Wrapper(x)) => x.semver(), // VERSION_BUMP + Version::V0_4_0_alpha_19(Wrapper(x)) => x.semver(), + Version::V0_4_0_alpha_20(Wrapper(x)) => x.semver(), // VERSION_BUMP Version::Other(x) => x.clone(), } } diff --git a/core/src/version/update_details/v0_4_0.md b/core/src/version/update_details/v0_4_0.md index e546704ee..e3881a5fd 100644 --- a/core/src/version/update_details/v0_4_0.md +++ b/core/src/version/update_details/v0_4_0.md @@ -10,13 +10,13 @@ A server is not a toy. It is a critical component of the computing paradigm, and Start9 is paving new ground with StartOS, trying to create what most developers and IT professionals thought impossible; namely, an OS and user experience that affords a normal person the same independent control over their data and communications as an experienced Linux sysadmin. -The difficulty of our endeavor requires making mistakes; and our integrity and dedication to excellence require that we correct them. This means a willingness to discard bad ideas and broken parts, and if absolutely necessary, to tear it all down and start over. That is exactly what we did with StartOS v0.2.0 in 2020. It is what we did with StartOS v0.3.0 in 2022. And we are doing it now with StartOS v0.4.0 in 2025. +The difficulty of our endeavor requires making mistakes; and our integrity and dedication to excellence require that we correct them. This means a willingness to discard bad ideas and broken parts, and if absolutely necessary, to tear it all down and start over. That is exactly what we did with StartOS v0.2.0 in 2020. It is what we did with StartOS v0.3.0 in 2022. And we are doing it now with StartOS v0.4.0 in 2026. v0.4.0 is a complete rewrite of StartOS, almost nothing survived. After nearly six years of building StartOS, we believe that we have finally arrived at the correct architecture and foundation that will allow us to deliver on the promise of sovereign computing. ## Changelog -### Improved User interface +### New User interface We re-wrote the StartOS UI to be more performant, more intuitive, and better looking on both mobile and desktop. Enjoy. @@ -28,6 +28,10 @@ StartOS v0.4.0 supports multiple languages and also makes it easy to add more la Neither Docker nor Podman offer the reliability and flexibility needed for StartOS. Instead, v0.4.0 uses a nested container paradigm based on LXC for the outer container and Linux namespaces for sub containers. This architecture naturally supports multi container setups. +### Hardware Acceleration + +Services can take advantage of (and require) the presence of certain hardware modules, such as Nvidia GPUs, for transcoding or inference purposes. For example, StartOS and Ollama can run natively on The Nvidia DGX Spark and take full advantage of the hardware/firmware stack to perform local inference against open source models. + ### New S9PK archive format The S9PK archive format has been overhauled to allow for signature verification of partial downloads, and allow direct mounting of container images without unpacking the s9pk. @@ -80,13 +84,13 @@ The new start-fs fuse module unifies file system expectations for various platfo StartOS now uses Extended Versioning (Exver), which consists of three parts: (1) a Semver-compliant upstream version, (2) a Semver-compliant wrapper version, and (3) an optional "flavor" prefix. Flavors can be thought of as alternative implementations of services, where a user would only want one or the other installed, and data can feasibly be migrating between the two. Another common characteristic of flavors is that they satisfy the same API requirement of dependents, though this is not strictly necessary. A valid Exver looks something like this: `#knots:29.0:1.0-beta.1`. This would translate to "the first beta release of StartOS wrapper version 1.0 of Bitcoin Knots version 29.0". -### ACME +### Let's Encrypt -StartOS now supports using ACME protocol to automatically obtain SSL/TLS certificates from widely trusted certificate authorities, such as Let's Encrypt, for your public domains. This means people visiting your public websites and APIs will not need to download and trust your server's Root CA. +StartOS now supports Let's Encrypt to automatically obtain SSL/TLS certificates for public domains. This means people visiting your public websites and APIs will not need to download and trust your server's Root CA. ### Gateways -Gateways connect your server to the Internet. They process outbound traffic, and under certain conditions, they also permit inbound traffic. For example, your router is a gateway. It is now possible add gateways to StartOS, such as StartTunnel, in order to more granularly control how your installed services are exposed to the Internet. +Gateways connect your server to the Internet, facilitating inbound and outbound traffic. Your router is a gateway. It is now possible to add Wireguard VPN gateways to your server to control how devices outside the LAN connect to your server and how your server connects out to the Internet. ### Static DNS Servers diff --git a/core/src/version/v0_3_6_alpha_0.rs b/core/src/version/v0_3_6_alpha_0.rs index c048c97b2..fbae2fc2f 100644 --- a/core/src/version/v0_3_6_alpha_0.rs +++ b/core/src/version/v0_3_6_alpha_0.rs @@ -1,4 +1,4 @@ -use std::collections::{BTreeMap, BTreeSet}; +use std::collections::BTreeMap; use std::ffi::OsStr; use std::path::Path; @@ -21,19 +21,16 @@ use crate::backup::target::cifs::CifsTargets; use crate::context::RpcContext; use crate::disk::mount::filesystem::cifs::Cifs; use crate::disk::mount::util::unmount; -use crate::hostname::Hostname; +use crate::hostname::{ServerHostname, ServerHostnameInfo}; use crate::net::forward::AvailablePorts; -use crate::net::host::Host; use crate::net::keys::KeyStore; -use crate::net::tor::{OnionAddress, TorSecretKey}; use crate::notifications::Notifications; use crate::prelude::*; use crate::s9pk::merkle_archive::source::multi_cursor_file::MultiCursorFile; use crate::ssh::{SshKeys, SshPubKey}; use crate::util::Invoke; -use crate::util::crypto::ed25519_expand_key; use crate::util::serde::Pem; -use crate::{DATA_DIR, HostId, Id, PACKAGE_DATA, PackageId, ReplayId}; +use crate::{DATA_DIR, PACKAGE_DATA, PackageId, ReplayId}; lazy_static::lazy_static! { static ref V0_3_6_alpha_0: exver::Version = exver::Version::new( @@ -146,12 +143,7 @@ pub struct Version; impl VersionT for Version { type Previous = v0_3_5_2::Version; - type PreUpRes = ( - AccountInfo, - SshKeys, - CifsTargets, - BTreeMap>, - ); + type PreUpRes = (AccountInfo, SshKeys, CifsTargets); fn semver(self) -> exver::Version { V0_3_6_alpha_0.clone() } @@ -166,21 +158,15 @@ impl VersionT for Version { let cifs = previous_cifs(&pg).await?; - let tor_keys = previous_tor_keys(&pg).await?; - Command::new("systemctl") .arg("stop") .arg("postgresql@*.service") .invoke(crate::ErrorKind::Database) .await?; - Ok((account, ssh_keys, cifs, tor_keys)) + Ok((account, ssh_keys, cifs)) } - fn up( - self, - db: &mut Value, - (account, ssh_keys, cifs, tor_keys): Self::PreUpRes, - ) -> Result { + fn up(self, db: &mut Value, (account, ssh_keys, cifs): Self::PreUpRes) -> Result { let prev_package_data = db["package-data"].clone(); let wifi = json!({ @@ -242,11 +228,7 @@ impl VersionT for Version { "ui": db["ui"], }); - let mut keystore = KeyStore::new(&account)?; - for key in tor_keys.values().flat_map(|v| v.values()) { - assert!(key.is_valid()); - keystore.onion.insert(key.clone()); - } + let keystore = KeyStore::new(&account)?; let private = { let mut value = json!({}); @@ -350,20 +332,6 @@ impl VersionT for Version { false }; - let onions = input[&*id]["installed"]["interface-addresses"] - .as_object() - .into_iter() - .flatten() - .filter_map(|(id, addrs)| { - addrs["tor-address"].as_str().map(|addr| { - Ok(( - HostId::from(Id::try_from(id.clone())?), - addr.parse::()?, - )) - }) - }) - .collect::, Error>>()?; - if let Err(e) = async { let package_s9pk = tokio::fs::File::open(path).await?; let file = MultiCursorFile::open(&package_s9pk).await?; @@ -381,11 +349,8 @@ impl VersionT for Version { .await? .await?; - let to_sync = ctx - .db + ctx.db .mutate(|db| { - let mut to_sync = BTreeSet::new(); - let package = db .as_public_mut() .as_package_data_mut() @@ -396,29 +361,11 @@ impl VersionT for Version { .as_tasks_mut() .remove(&ReplayId::from("needs-config"))?; } - for (id, onion) in onions { - package - .as_hosts_mut() - .upsert(&id, || Ok(Host::new()))? - .as_onions_mut() - .mutate(|o| { - o.clear(); - o.insert(onion); - Ok(()) - })?; - to_sync.insert(id); - } - Ok(to_sync) + Ok(()) }) .await .result?; - if let Some(service) = &*ctx.services.get(&id).await { - for host_id in to_sync { - service.sync_host(host_id.clone()).await?; - } - } - Ok::<_, Error>(()) } .await @@ -481,42 +428,15 @@ async fn previous_account_info(pg: &sqlx::Pool) -> Result>, _>("tor_key") - .with_ctx(|_| (ErrorKind::Database, "tor_key"))? - { - <[u8; 64]>::try_from(bytes).map_err(|e| { - Error::new( - eyre!("expected vec of len 64, got len {}", e.len()), - ErrorKind::ParseDbField, - ) - })? - } else { - ed25519_expand_key( - &<[u8; 32]>::try_from( - account_query - .try_get::, _>("network_key") - .with_kind(ErrorKind::Database)?, - ) - .map_err(|e| { - Error::new( - eyre!("expected vec of len 32, got len {}", e.len()), - ErrorKind::ParseDbField, - ) - })?, - ) - }, - )?], server_id: account_query .try_get("server_id") .with_ctx(|_| (ErrorKind::Database, "server_id"))?, - hostname: Hostname( + hostname: ServerHostnameInfo::from_hostname(ServerHostname::new( account_query .try_get::("hostname") .with_ctx(|_| (ErrorKind::Database, "hostname"))? .into(), - ), + )?), root_ca_key: PKey::private_key_from_pem( &account_query .try_get::("root_ca_key_pem") @@ -578,69 +498,3 @@ async fn previous_ssh_keys(pg: &sqlx::Pool) -> Result, -) -> Result>, Error> { - let mut res = BTreeMap::>::new(); - let net_key_query = sqlx::query(r#"SELECT * FROM network_keys"#) - .fetch_all(pg) - .await - .with_kind(ErrorKind::Database)?; - - for row in net_key_query { - let package_id: PackageId = row - .try_get::("package") - .with_ctx(|_| (ErrorKind::Database, "network_keys::package"))? - .parse()?; - let interface_id: HostId = row - .try_get::("interface") - .with_ctx(|_| (ErrorKind::Database, "network_keys::interface"))? - .parse()?; - let key = TorSecretKey::from_bytes(ed25519_expand_key( - &<[u8; 32]>::try_from( - row.try_get::, _>("key") - .with_ctx(|_| (ErrorKind::Database, "network_keys::key"))?, - ) - .map_err(|e| { - Error::new( - eyre!("expected vec of len 32, got len {}", e.len()), - ErrorKind::ParseDbField, - ) - })?, - ))?; - res.entry(package_id).or_default().insert(interface_id, key); - } - - let tor_key_query = sqlx::query(r#"SELECT * FROM tor"#) - .fetch_all(pg) - .await - .with_kind(ErrorKind::Database)?; - - for row in tor_key_query { - let package_id: PackageId = row - .try_get::("package") - .with_ctx(|_| (ErrorKind::Database, "tor::package"))? - .parse()?; - let interface_id: HostId = row - .try_get::("interface") - .with_ctx(|_| (ErrorKind::Database, "tor::interface"))? - .parse()?; - let key = TorSecretKey::from_bytes( - <[u8; 64]>::try_from( - row.try_get::, _>("key") - .with_ctx(|_| (ErrorKind::Database, "tor::key"))?, - ) - .map_err(|e| { - Error::new( - eyre!("expected vec of len 64, got len {}", e.len()), - ErrorKind::ParseDbField, - ) - })?, - )?; - res.entry(package_id).or_default().insert(interface_id, key); - } - - Ok(res) -} diff --git a/core/src/version/v0_3_6_alpha_10.rs b/core/src/version/v0_3_6_alpha_10.rs index 08543d9e1..8cbf902a9 100644 --- a/core/src/version/v0_3_6_alpha_10.rs +++ b/core/src/version/v0_3_6_alpha_10.rs @@ -8,7 +8,6 @@ use super::v0_3_5::V0_3_0_COMPAT; use super::{VersionT, v0_3_6_alpha_9}; use crate::GatewayId; use crate::net::host::address::PublicDomainConfig; -use crate::net::tor::OnionAddress; use crate::prelude::*; lazy_static::lazy_static! { @@ -22,7 +21,7 @@ lazy_static::lazy_static! { #[serde(rename_all = "camelCase")] #[serde(tag = "kind")] enum HostAddress { - Onion { address: OnionAddress }, + Onion { address: String }, Domain { address: InternedString }, } diff --git a/core/src/version/v0_3_6_alpha_7.rs b/core/src/version/v0_3_6_alpha_7.rs index 358ddaa49..7829a9480 100644 --- a/core/src/version/v0_3_6_alpha_7.rs +++ b/core/src/version/v0_3_6_alpha_7.rs @@ -50,7 +50,10 @@ impl VersionT for Version { async fn post_up(self, ctx: &RpcContext, _input: Value) -> Result<(), Error> { Command::new("systemd-firstboot") .arg("--root=/media/startos/config/overlay/") - .arg(ctx.account.peek(|a| format!("--hostname={}", a.hostname.0))) + .arg( + ctx.account + .peek(|a| format!("--hostname={}", a.hostname.hostname.as_ref())), + ) .invoke(ErrorKind::ParseSysInfo) .await?; Ok(()) diff --git a/core/src/version/v0_4_0_alpha_12.rs b/core/src/version/v0_4_0_alpha_12.rs index fa7e5a189..d998945a2 100644 --- a/core/src/version/v0_4_0_alpha_12.rs +++ b/core/src/version/v0_4_0_alpha_12.rs @@ -1,11 +1,7 @@ -use std::collections::BTreeSet; - use exver::{PreReleaseSegment, VersionRange}; -use imbl_value::InternedString; use super::v0_3_5::V0_3_0_COMPAT; use super::{VersionT, v0_4_0_alpha_11}; -use crate::net::tor::TorSecretKey; use crate::prelude::*; lazy_static::lazy_static! { @@ -33,48 +29,6 @@ impl VersionT for Version { } #[instrument(skip_all)] fn up(self, db: &mut Value, _: Self::PreUpRes) -> Result { - let mut err = None; - let onion_store = db["private"]["keyStore"]["onion"] - .as_object_mut() - .or_not_found("private.keyStore.onion")?; - onion_store.retain(|o, v| match from_value::(v.clone()) { - Ok(k) => k.is_valid() && &InternedString::from_display(&k.onion_address()) == o, - Err(e) => { - err = Some(e); - true - } - }); - if let Some(e) = err { - return Err(e); - } - let allowed_addresses = onion_store.keys().cloned().collect::>(); - let fix_host = |host: &mut Value| { - Ok::<_, Error>( - host["onions"] - .as_array_mut() - .or_not_found("host.onions")? - .retain(|addr| { - addr.as_str() - .map(|s| allowed_addresses.contains(s)) - .unwrap_or(false) - }), - ) - }; - for (_, pde) in db["public"]["packageData"] - .as_object_mut() - .or_not_found("public.packageData")? - .iter_mut() - { - for (_, host) in pde["hosts"] - .as_object_mut() - .or_not_found("public.packageData[].hosts")? - .iter_mut() - { - fix_host(host)?; - } - } - fix_host(&mut db["public"]["serverInfo"]["network"]["host"])?; - if db["private"]["keyStore"]["localCerts"].is_null() { db["private"]["keyStore"]["localCerts"] = db["private"]["keyStore"]["local_certs"].clone(); diff --git a/core/src/version/v0_4_0_alpha_20.rs b/core/src/version/v0_4_0_alpha_20.rs new file mode 100644 index 000000000..62b454bb1 --- /dev/null +++ b/core/src/version/v0_4_0_alpha_20.rs @@ -0,0 +1,361 @@ +use std::path::Path; + +use exver::{PreReleaseSegment, VersionRange}; +use imbl_value::json; + +use super::v0_3_5::V0_3_0_COMPAT; +use super::{VersionT, v0_4_0_alpha_19}; +use crate::context::RpcContext; +use crate::prelude::*; + +lazy_static::lazy_static! { + static ref V0_4_0_alpha_20: exver::Version = exver::Version::new( + [0, 4, 0], + [PreReleaseSegment::String("alpha".into()), 20.into()] + ); +} + +#[derive(Clone, Copy, Debug, Default)] +pub struct Version; + +impl VersionT for Version { + type Previous = v0_4_0_alpha_19::Version; + type PreUpRes = (); + + async fn pre_up(self) -> Result { + Ok(()) + } + fn semver(self) -> exver::Version { + V0_4_0_alpha_20.clone() + } + fn compat(self) -> &'static VersionRange { + &V0_3_0_COMPAT + } + #[instrument(skip_all)] + fn up(self, db: &mut Value, _: Self::PreUpRes) -> Result { + // Extract onion migration data before removing it + let onion_store = db + .get("private") + .and_then(|p| p.get("keyStore")) + .and_then(|k| k.get("onion")) + .cloned() + .unwrap_or(Value::Object(Default::default())); + + let mut addresses = imbl::Vector::::new(); + + // Extract OS host onion addresses + if let Some(onions) = db + .get("public") + .and_then(|p| p.get("serverInfo")) + .and_then(|s| s.get("network")) + .and_then(|n| n.get("host")) + .and_then(|h| h.get("onions")) + .and_then(|o| o.as_array()) + { + for onion in onions { + if let Some(hostname) = onion.as_str() { + let key = onion_store + .get(hostname) + .and_then(|v| v.as_str()) + .unwrap_or_default(); + addresses.push_back(json!({ + "hostname": hostname, + "packageId": "STARTOS", + "hostId": "STARTOS", + "key": key, + })); + } + } + } + + // Extract package host onion addresses + if let Some(packages) = db + .get("public") + .and_then(|p| p.get("packageData")) + .and_then(|p| p.as_object()) + { + for (package_id, package) in packages.iter() { + if let Some(hosts) = package.get("hosts").and_then(|h| h.as_object()) { + for (host_id, host) in hosts.iter() { + if let Some(onions) = host.get("onions").and_then(|o| o.as_array()) { + for onion in onions { + if let Some(hostname) = onion.as_str() { + let key = onion_store + .get(hostname) + .and_then(|v| v.as_str()) + .unwrap_or_default(); + addresses.push_back(json!({ + "hostname": hostname, + "packageId": &**package_id, + "hostId": &**host_id, + "key": key, + })); + } + } + } + } + } + } + } + + let migration_data = json!({ + "addresses": addresses, + }); + + // Remove onions and tor-related fields from server host + if let Some(host) = db + .get_mut("public") + .and_then(|p| p.get_mut("serverInfo")) + .and_then(|s| s.get_mut("network")) + .and_then(|n| n.get_mut("host")) + .and_then(|h| h.as_object_mut()) + { + host.remove("onions"); + } + + // Remove onions from all package hosts + if let Some(packages) = db + .get_mut("public") + .and_then(|p| p.get_mut("packageData")) + .and_then(|p| p.as_object_mut()) + { + for (_, package) in packages.iter_mut() { + if let Some(hosts) = package.get_mut("hosts").and_then(|h| h.as_object_mut()) { + for (_, host) in hosts.iter_mut() { + if let Some(host_obj) = host.as_object_mut() { + host_obj.remove("onions"); + } + } + } + } + } + + // Remove onion store from private keyStore + if let Some(key_store) = db + .get_mut("private") + .and_then(|p| p.get_mut("keyStore")) + .and_then(|k| k.as_object_mut()) + { + key_store.remove("onion"); + } + + // Migrate server host: remove hostnameInfo, add addresses to bindings, clean net + migrate_host( + db.get_mut("public") + .and_then(|p| p.get_mut("serverInfo")) + .and_then(|s| s.get_mut("network")) + .and_then(|n| n.get_mut("host")), + ); + + // Migrate all package hosts + if let Some(packages) = db + .get_mut("public") + .and_then(|p| p.get_mut("packageData")) + .and_then(|p| p.as_object_mut()) + { + for (_, package) in packages.iter_mut() { + if let Some(hosts) = package.get_mut("hosts").and_then(|h| h.as_object_mut()) { + for (_, host) in hosts.iter_mut() { + migrate_host(Some(host)); + } + } + } + } + + // Migrate availablePorts from IdPool format to BTreeMap + // Rebuild from actual assigned ports in all bindings + migrate_available_ports(db); + + // Migrate SMTP: rename server->host, login->username, add security field + migrate_smtp(db); + + // Delete ui.name (moved to serverInfo.name) + if let Some(ui) = db + .get_mut("public") + .and_then(|p| p.get_mut("ui")) + .and_then(|u| u.as_object_mut()) + { + ui.remove("name"); + } + + // Generate serverInfo.name from serverInfo.hostname + if let Some(hostname) = db + .get("public") + .and_then(|p| p.get("serverInfo")) + .and_then(|s| s.get("hostname")) + .and_then(|h| h.as_str()) + .map(|s| s.to_owned()) + { + let name = denormalize_hostname(&hostname); + if let Some(server_info) = db + .get_mut("public") + .and_then(|p| p.get_mut("serverInfo")) + .and_then(|s| s.as_object_mut()) + { + server_info.insert("name".into(), Value::String(name.into())); + } + } + + Ok(migration_data) + } + + #[instrument(skip_all)] + async fn post_up(self, _ctx: &RpcContext, input: Value) -> Result<(), Error> { + let path = Path::new( + "/media/startos/data/package-data/volumes/tor/data/startos/onion-migration.json", + ); + + let json = serde_json::to_string(&input).with_kind(ErrorKind::Serialization)?; + + crate::util::io::write_file_atomic(path, json).await?; + + Ok(()) + } + fn down(self, _db: &mut Value) -> Result<(), Error> { + Ok(()) + } +} + +fn collect_ports_from_host(host: Option<&Value>, ports: &mut Value) { + let Some(bindings) = host + .and_then(|h| h.get("bindings")) + .and_then(|b| b.as_object()) + else { + return; + }; + for (_, binding) in bindings.iter() { + if let Some(net) = binding.get("net") { + if let Some(port) = net.get("assignedPort").and_then(|p| p.as_u64()) { + if let Some(obj) = ports.as_object_mut() { + obj.insert(port.to_string().into(), Value::from(false)); + } + } + if let Some(port) = net.get("assignedSslPort").and_then(|p| p.as_u64()) { + if let Some(obj) = ports.as_object_mut() { + obj.insert(port.to_string().into(), Value::from(true)); + } + } + } + } +} + +fn migrate_available_ports(db: &mut Value) { + let mut new_ports: Value = serde_json::json!({}).into(); + + // Collect from server host + let server_host = db + .get("public") + .and_then(|p| p.get("serverInfo")) + .and_then(|s| s.get("network")) + .and_then(|n| n.get("host")) + .cloned(); + collect_ports_from_host(server_host.as_ref(), &mut new_ports); + + // Collect from all package hosts + if let Some(packages) = db + .get("public") + .and_then(|p| p.get("packageData")) + .and_then(|p| p.as_object()) + { + for (_, package) in packages.iter() { + if let Some(hosts) = package.get("hosts").and_then(|h| h.as_object()) { + for (_, host) in hosts.iter() { + collect_ports_from_host(Some(host), &mut new_ports); + } + } + } + } + + // Replace private.availablePorts + if let Some(private) = db.get_mut("private").and_then(|p| p.as_object_mut()) { + private.insert("availablePorts".into(), new_ports); + } +} + +fn migrate_smtp(db: &mut Value) { + if let Some(smtp) = db + .get_mut("public") + .and_then(|p| p.get_mut("serverInfo")) + .and_then(|s| s.get_mut("smtp")) + .and_then(|s| s.as_object_mut()) + { + if let Some(server) = smtp.remove("server") { + smtp.insert("host".into(), server); + } + if let Some(login) = smtp.remove("login") { + smtp.insert("username".into(), login); + } + if !smtp.contains_key("security") { + smtp.insert("security".into(), json!("starttls")); + } + } +} + +fn denormalize_hostname(s: &str) -> String { + let mut cap = true; + s.chars() + .map(|c| { + if c == '-' { + cap = true; + ' ' + } else if cap { + cap = false; + c.to_ascii_uppercase() + } else { + c + } + }) + .collect() +} + +fn migrate_host(host: Option<&mut Value>) { + let Some(host) = host.and_then(|h| h.as_object_mut()) else { + return; + }; + + // Remove hostnameInfo from host + host.remove("hostnameInfo"); + + // Migrate privateDomains from array to object (BTreeSet -> BTreeMap<_, BTreeSet>) + if let Some(private_domains) = host + .get("privateDomains") + .and_then(|v| v.as_array()) + .cloned() + { + let mut new_pd: Value = serde_json::json!({}).into(); + for domain in private_domains { + if let Some(d) = domain.as_str() { + if let Some(obj) = new_pd.as_object_mut() { + obj.insert(d.into(), serde_json::json!([]).into()); + } + } + } + host.insert("privateDomains".into(), new_pd); + } + + // For each binding: add "addresses" field, remove gateway-level fields from "net" + if let Some(bindings) = host.get_mut("bindings").and_then(|b| b.as_object_mut()) { + for (_, binding) in bindings.iter_mut() { + if let Some(binding_obj) = binding.as_object_mut() { + // Add addresses if not present + if !binding_obj.contains_key("addresses") { + binding_obj.insert( + "addresses".into(), + serde_json::json!({ + "enabled": [], + "disabled": [], + "available": [] + }) + .into(), + ); + } + + // Remove gateway-level privateDisabled/publicEnabled from net + if let Some(net) = binding_obj.get_mut("net").and_then(|n| n.as_object_mut()) { + net.remove("privateDisabled"); + net.remove("publicEnabled"); + } + } + } + } +} diff --git a/debian/dpkg-build.sh b/debian/dpkg-build.sh index c8d1351d6..5e69fef49 100755 --- a/debian/dpkg-build.sh +++ b/debian/dpkg-build.sh @@ -7,10 +7,12 @@ cd "$(dirname "${BASH_SOURCE[0]}")/.." PROJECT=${PROJECT:-"startos"} BASENAME=${BASENAME:-"$(./build/env/basename.sh)"} VERSION=${VERSION:-$(cat ./build/env/VERSION.txt)} -if [ "$PLATFORM" = "x86_64" ] || [ "$PLATFORM" = "x86_64-nonfree" ]; then +if [ "$PLATFORM" = "x86_64" ] || [ "$PLATFORM" = "x86_64-nonfree" ] || [ "$PLATFORM" = "x86_64-nvidia" ]; then DEB_ARCH=amd64 -elif [ "$PLATFORM" = "aarch64" ] || [ "$PLATFORM" = "aarch64-nonfree" ] || [ "$PLATFORM" = "raspberrypi" ]; then +elif [ "$PLATFORM" = "aarch64" ] || [ "$PLATFORM" = "aarch64-nonfree" ] || [ "$PLATFORM" = "aarch64-nvidia" ] || [ "$PLATFORM" = "raspberrypi" ] || [ "$PLATFORM" = "rockchip64" ]; then DEB_ARCH=arm64 +elif [ "$PLATFORM" = "riscv64" ] || [ "$PLATFORM" = "riscv64-nonfree" ]; then + DEB_ARCH=riscv64 else DEB_ARCH="$PLATFORM" fi @@ -23,7 +25,7 @@ if [ "${PROJECT}" = "startos" ]; then else INSTALL_TARGET="install-${PROJECT#start-}" fi -make "${INSTALL_TARGET}" DESTDIR=dpkg-workdir/$BASENAME +make "${INSTALL_TARGET}" DESTDIR=dpkg-workdir/$BASENAME REMOTE= if [ -f dpkg-workdir/$BASENAME/usr/lib/$PROJECT/depends ]; then if [ -n "$DEPENDS" ]; then diff --git a/docs/TODO.md b/docs/TODO.md new file mode 100644 index 000000000..71465955f --- /dev/null +++ b/docs/TODO.md @@ -0,0 +1,40 @@ +# AI Agent TODOs + +Pending tasks for AI agents. Remove items when completed. + +## Features + +- [ ] Extract TS-exported types into a lightweight sub-crate for fast binding generation + + **Problem**: `make ts-bindings` compiles the entire `start-os` crate (with all dependencies: tokio, + axum, openssl, etc.) just to run test functions that serialize type definitions to `.ts` files. + Even in debug mode, this takes minutes. The generated output is pure type info — no runtime code + is needed. + + **Goal**: Generate TS bindings in seconds by isolating exported types in a small crate with minimal + dependencies. + + **Approach**: Create a `core/bindings-types/` sub-crate containing (or re-exporting) all 168 + `#[ts(export)]` types. This crate depends only on `serde`, `ts-rs`, `exver`, and other type-only + crates — not on tokio, axum, openssl, etc. Then `build-ts.sh` runs `cargo test -p bindings-types` + instead of `cargo test -p start-os`. + + **Challenge**: The exported types are scattered across `core/src/` and reference each other and + other crate types. Extracting them requires either moving the type definitions into the sub-crate + (and importing them back into `start-os`) or restructuring to share a common types crate. + +- [ ] Auto-configure port forwards via UPnP/NAT-PMP/PCP - @dr-bonez + + **Goal**: When a binding is marked public, automatically configure port forwards on the user's router + using UPnP, NAT-PMP, or PCP, instead of requiring manual router configuration. Fall back to + displaying manual instructions (the port forward mapping from patch-db) when auto-configuration is + unavailable or fails. + +- [ ] Use TLS-ALPN challenges for check-port when addSsl - @dr-bonez + + **Problem**: The `check_port` RPC in `core/src/net/gateway.rs` currently uses an external HTTP + service (`ifconfig_url`) to verify port reachability. This doesn't check whether the port is forwarded to the right place, just that it's open. there's nothing we can do about this if it's a raw forward, but if it goes through the ssl proxy we can do a better verification. + + **Goal**: When a binding has `addSsl` enabled, use TLS-ALPN-01 challenges to verify port + reachability instead of (or in addition to) the plain TCP check. This more accurately validates + that the SSL port is properly configured and reachable. diff --git a/agents/VERSION_BUMP.md b/docs/VERSION_BUMP.md similarity index 100% rename from agents/VERSION_BUMP.md rename to docs/VERSION_BUMP.md diff --git a/agents/exver.md b/docs/exver.md similarity index 100% rename from agents/exver.md rename to docs/exver.md diff --git a/sdk/CLAUDE.md b/sdk/CLAUDE.md new file mode 100644 index 000000000..d03111f86 --- /dev/null +++ b/sdk/CLAUDE.md @@ -0,0 +1,8 @@ +# SDK — TypeScript Service Packaging + +TypeScript SDK for packaging services for StartOS (`@start9labs/start-sdk`). + +## Structure + +- `base/` — Core types, ABI definitions, effects interface (`@start9labs/start-sdk-base`) +- `package/` — Full SDK for package developers, re-exports base diff --git a/sdk/Makefile b/sdk/Makefile index fc065466e..9370ab372 100644 --- a/sdk/Makefile +++ b/sdk/Makefile @@ -1,12 +1,12 @@ -PACKAGE_TS_FILES := $(shell git ls-files package/lib) package/lib/test/output.ts -BASE_TS_FILES := $(shell git ls-files base/lib) package/lib/test/output.ts +PACKAGE_TS_FILES := $(shell git ls-files package/lib) +BASE_TS_FILES := $(shell git ls-files base/lib) version = $(shell git tag --sort=committerdate | tail -1) .PHONY: test base/test package/test clean bundle fmt buildOutput check all: bundle -package/test: $(PACKAGE_TS_FILES) package/lib/test/output.ts package/node_modules base/node_modules +package/test: $(PACKAGE_TS_FILES) package/node_modules base/node_modules cd package && npm test base/test: $(BASE_TS_FILES) base/node_modules @@ -21,25 +21,39 @@ clean: rm -f package/lib/test/output.ts rm -rf package/node_modules -package/lib/test/output.ts: package/node_modules package/lib/test/makeOutput.ts package/scripts/oldSpecToBuilder.ts - cd package && npm run buildOutput - bundle: baseDist dist | test fmt touch dist base/lib/exver/exver.ts: base/node_modules base/lib/exver/exver.pegjs cd base && npm run peggy -baseDist: $(PACKAGE_TS_FILES) $(BASE_TS_FILES) base/package.json base/node_modules base/README.md base/LICENSE +baseDist: $(PACKAGE_TS_FILES) $(BASE_TS_FILES) base/package.json base/node_modules base/README.md base/LICENSE (cd base && npm run tsc) + # Copy hand-written .js/.d.ts pairs (no corresponding .ts source) into the output. + cd base/lib && find . -name '*.js' | while read f; do \ + base="$${f%.js}"; \ + if [ -f "$$base.d.ts" ] && [ ! -f "$$base.ts" ]; then \ + mkdir -p "../../baseDist/$$(dirname "$$f")"; \ + cp "$$f" "../../baseDist/$$f"; \ + cp "$$base.d.ts" "../../baseDist/$$base.d.ts"; \ + fi; \ + done rsync -ac base/node_modules baseDist/ cp base/package.json baseDist/package.json cp base/README.md baseDist/README.md cp base/LICENSE baseDist/LICENSE touch baseDist -dist: $(PACKAGE_TS_FILES) $(BASE_TS_FILES) package/package.json package/.npmignore package/node_modules package/README.md package/LICENSE +dist: $(PACKAGE_TS_FILES) $(BASE_TS_FILES) package/package.json package/.npmignore package/node_modules package/README.md package/LICENSE (cd package && npm run tsc) + cd base/lib && find . -name '*.js' | while read f; do \ + base="$${f%.js}"; \ + if [ -f "$$base.d.ts" ] && [ ! -f "$$base.ts" ]; then \ + mkdir -p "../../dist/base/lib/$$(dirname "$$f")"; \ + cp "$$f" "../../dist/base/lib/$$f"; \ + cp "$$base.d.ts" "../../dist/base/lib/$$base.d.ts"; \ + fi; \ + done rsync -ac package/node_modules dist/ cp package/.npmignore dist/.npmignore cp package/package.json dist/package.json @@ -73,7 +87,7 @@ base/node_modules: base/package-lock.json node_modules: package/node_modules base/node_modules publish: bundle package/package.json package/README.md package/LICENSE - cd dist && npm publish --access=public + cd dist && npm publish --access=public --tag=latest link: bundle cd dist && npm link diff --git a/sdk/base/lib/Effects.ts b/sdk/base/lib/Effects.ts index b27c14728..d3d0b8923 100644 --- a/sdk/base/lib/Effects.ts +++ b/sdk/base/lib/Effects.ts @@ -16,6 +16,7 @@ import { MountParams, StatusInfo, Manifest, + HostnameInfo, } from './osBindings' import { PackageId, @@ -23,6 +24,7 @@ import { ServiceInterfaceId, SmtpValue, ActionResult, + PluginHostnameInfo, } from './types' /** Used to reach out from the pure js runtime */ @@ -133,6 +135,8 @@ export type Effects = { }): Promise /** Returns the IP address of StartOS */ getOsIp(): Promise + /** Returns the effective outbound gateway for this service */ + getOutboundGateway(options: { callback?: () => void }): Promise // interface /** Creates an interface bound to a specific host and port to show to the user */ exportServiceInterface(options: ExportServiceInterfaceParams): Promise @@ -151,6 +155,18 @@ export type Effects = { clearServiceInterfaces(options: { except: ServiceInterfaceId[] }): Promise + + plugin: { + url: { + register(options: { tableAction: ActionId }): Promise + exportUrl(options: { + hostnameInfo: PluginHostnameInfo + removeAction: ActionId | null + overflowActions: ActionId[] + }): Promise + clearUrls(options: { except: PluginHostnameInfo[] }): Promise + } + } // ssl /** Returns a PEM encoded fullchain for the hostnames specified */ getSslCertificate: (options: { diff --git a/sdk/base/lib/actions/input/builder/inputSpec.ts b/sdk/base/lib/actions/input/builder/inputSpec.ts index 986d898f1..3050a19ac 100644 --- a/sdk/base/lib/actions/input/builder/inputSpec.ts +++ b/sdk/base/lib/actions/input/builder/inputSpec.ts @@ -2,21 +2,84 @@ import { ValueSpec } from '../inputSpecTypes' import { Value } from './value' import { _ } from '../../../util' import { Effects } from '../../../Effects' -import { Parser, object } from 'ts-matches' +import { z } from 'zod' +import { zodDeepPartial } from 'zod-deep-partial' import { DeepPartial } from '../../../types' +import { InputSpecTools, createInputSpecTools } from './inputSpecTools' -export type LazyBuildOptions = { +/** Options passed to a lazy builder function when resolving dynamic form field values. */ +export type LazyBuildOptions = { + /** The effects interface for runtime operations (e.g. reading files, querying state). */ effects: Effects + /** Previously saved form data to pre-fill the form with, or `null` for fresh creation. */ + prefill: DeepPartial | null } -export type LazyBuild = ( - options: LazyBuildOptions, +/** + * A function that lazily produces a value, potentially using effects and prefill data. + * Used by `dynamic*` variants of {@link Value} to compute form field options at runtime. + */ +export type LazyBuild = ( + options: LazyBuildOptions, ) => Promise | ExpectedOut +/** + * Defines which keys to keep when filtering an InputSpec. + * Use `true` to keep a field as-is, or a nested object to filter sub-fields of an object-typed field. + */ +export type FilterKeys = { + [K in keyof F]?: F[K] extends Record + ? boolean | FilterKeys + : boolean +} + +type RetainKey = { + [K in keyof T]: K extends keyof F + ? F[K] extends false + ? never + : K + : Default extends true + ? K + : never +}[keyof T] + +/** + * Computes the resulting type after applying a {@link FilterKeys} shape to a type. + */ +export type ApplyFilter = { + [K in RetainKey]: K extends keyof F + ? true extends F[K] + ? F[K] extends true + ? T[K] + : T[K] | undefined + : T[K] extends Record + ? F[K] extends FilterKeys + ? ApplyFilter + : undefined + : undefined + : Default extends true + ? T[K] + : undefined +} + +/** + * Computes the union of all valid key-path tuples through a nested type. + * Each tuple represents a path from root to a field, recursing into object-typed sub-fields. + */ +export type KeyPaths = { + [K in keyof T & string]: T[K] extends any[] + ? [K] + : T[K] extends Record + ? [K] | [K, ...KeyPaths] + : [K] +}[keyof T & string] + +/** Extracts the runtime type from an {@link InputSpec}. */ // prettier-ignore -export type ExtractInputSpecType, any>> = +export type ExtractInputSpecType, any>> = A extends InputSpec ? B : never +/** Extracts the static validation type from an {@link InputSpec}. */ export type ExtractInputSpecStaticValidatedAs< A extends InputSpec>, > = A extends InputSpec ? B : never @@ -25,11 +88,13 @@ export type ExtractInputSpecStaticValidatedAs< // A extends Record | InputSpec>, // > = A extends InputSpec ? DeepPartial : DeepPartial +/** Maps an object type to a record of {@link Value} entries for use with `InputSpec.of`. */ export type InputSpecOf> = { [K in keyof A]: Value } -export type MaybeLazyValues = LazyBuild | A +/** A value that is either directly provided or lazily computed via a {@link LazyBuild} function. */ +export type MaybeLazyValues = LazyBuild | A /** * InputSpecs are the specs that are used by the os input specification form for this service. * Here is an example of a simple input specification @@ -94,21 +159,28 @@ export class InputSpec< private readonly spec: { [K in keyof Type]: Value }, - public readonly validator: Parser, + public readonly validator: z.ZodType, ) {} public _TYPE: Type = null as any as Type public _PARTIAL: DeepPartial = null as any as DeepPartial - async build(options: LazyBuildOptions): Promise<{ + public readonly partialValidator: z.ZodType> = + zodDeepPartial(this.validator) as any + /** + * Builds the runtime form specification and combined Zod validator from this InputSpec's fields. + * + * @returns An object containing the resolved `spec` (field specs keyed by name) and a combined `validator` + */ + async build(options: LazyBuildOptions): Promise<{ spec: { [K in keyof Type]: ValueSpec } - validator: Parser + validator: z.ZodType }> { const answer = {} as { [K in keyof Type]: ValueSpec } const validator = {} as { - [K in keyof Type]: Parser + [K in keyof Type]: z.ZodType } for (const k in this.spec) { const built = await this.spec[k].build(options as any) @@ -117,22 +189,311 @@ export class InputSpec< } return { spec: answer, - validator: object(validator) as any, + validator: z.object(validator) as any, } } + /** + * Adds multiple fields to this spec at once, returning a new `InputSpec` with extended types. + * + * @param build - A record of {@link Value} entries, or a function receiving typed tools that returns one + */ + add>>( + build: AddSpec | ((tools: InputSpecTools) => AddSpec), + ): InputSpec< + Type & { + [K in keyof AddSpec]: AddSpec[K] extends Value + ? T + : never + }, + StaticValidatedAs & { + [K in keyof AddSpec]: AddSpec[K] extends Value + ? S + : never + } + > { + const addedValues = + build instanceof Function ? build(createInputSpecTools()) : build + const newSpec = { ...this.spec, ...addedValues } as any + const newValidator = z.object( + Object.fromEntries( + Object.entries(newSpec).map(([k, v]) => [ + k, + (v as Value).validator, + ]), + ), + ) + return new InputSpec(newSpec, newValidator as any) + } + + /** + * Returns a new InputSpec containing only the specified keys. + * Use `true` to keep a field as-is, or a nested object to filter sub-fields of object-typed fields. + * + * @example + * ```ts + * const full = InputSpec.of({ + * name: Value.text({ name: 'Name', required: true, default: null }), + * settings: Value.object({ name: 'Settings' }, InputSpec.of({ + * debug: Value.toggle({ name: 'Debug', default: false }), + * port: Value.number({ name: 'Port', required: true, default: 8080, integer: true }), + * })), + * }) + * const filtered = full.filter({ name: true, settings: { debug: true } }) + * ``` + */ + filter, Default extends boolean = false>( + keys: F, + keepByDefault?: Default, + ): InputSpec< + ApplyFilter & ApplyFilter, + ApplyFilter + > { + const newSpec: Record> = {} + for (const k of Object.keys(this.spec)) { + const filterVal = (keys as any)[k] + const value = (this.spec as any)[k] as Value | undefined + if (!value) continue + if (filterVal === true) { + newSpec[k] = value + } else if (typeof filterVal === 'object' && filterVal !== null) { + const objectMeta = value._objectSpec + if (objectMeta) { + const filteredInner = objectMeta.inputSpec.filter( + filterVal, + keepByDefault, + ) + newSpec[k] = Value.object(objectMeta.params, filteredInner) + } else { + newSpec[k] = value + } + } else if (keepByDefault && filterVal !== false) { + newSpec[k] = value + } + } + const newValidator = z.object( + Object.fromEntries( + Object.entries(newSpec).map(([k, v]) => [k, v.validator]), + ), + ) + return new InputSpec(newSpec as any, newValidator as any) as any + } + + /** + * Returns a new InputSpec with the specified keys disabled. + * Use `true` to disable a field, or a nested object to disable sub-fields of object-typed fields. + * All fields remain in the spec — disabled fields simply cannot be edited by the user. + * + * @param keys - Which fields to disable, using the same shape as {@link FilterKeys} + * @param message - The reason the fields are disabled, displayed to the user + * + * @example + * ```ts + * const spec = InputSpec.of({ + * name: Value.text({ name: 'Name', required: true, default: null }), + * settings: Value.object({ name: 'Settings' }, InputSpec.of({ + * debug: Value.toggle({ name: 'Debug', default: false }), + * port: Value.number({ name: 'Port', required: true, default: 8080, integer: true }), + * })), + * }) + * const disabled = spec.disable({ name: true, settings: { debug: true } }, 'Managed by the system') + * ``` + */ + disable( + keys: FilterKeys, + message: string, + ): InputSpec { + const newSpec: Record> = {} + for (const k in this.spec) { + const filterVal = (keys as any)[k] + const value = (this.spec as any)[k] as Value + if (!filterVal) { + newSpec[k] = value + } else if (filterVal === true) { + newSpec[k] = value.withDisabled(message) + } else if (typeof filterVal === 'object' && filterVal !== null) { + const objectMeta = value._objectSpec + if (objectMeta) { + const disabledInner = objectMeta.inputSpec.disable(filterVal, message) + newSpec[k] = Value.object(objectMeta.params, disabledInner) + } else { + newSpec[k] = value.withDisabled(message) + } + } + } + const newValidator = z.object( + Object.fromEntries( + Object.entries(newSpec).map(([k, v]) => [k, v.validator]), + ), + ) + return new InputSpec(newSpec as any, newValidator as any) as any + } + + /** + * Resolves a key path to its corresponding display name path. + * Each key is mapped to the `name` property of its built {@link ValueSpec}. + * Recurses into `Value.object` sub-specs for nested paths. + * + * @param path - Typed tuple of field keys (e.g. `["settings", "debug"]`) + * @param options - Build options providing effects and prefill data + * @returns Array of display names (e.g. `["Settings", "Debug"]`) + */ + async namePath( + path: KeyPaths, + options: LazyBuildOptions, + ): Promise { + if (path.length === 0) return [] + const [key, ...rest] = path as [string, ...string[]] + const value = (this.spec as any)[key] as Value | undefined + if (!value) return [] + const built = await value.build(options as any) + const name = + 'name' in built.spec ? (built.spec as { name: string }).name : key + if (rest.length === 0) return [name] + const objectMeta = value._objectSpec + if (objectMeta) { + const innerNames = await objectMeta.inputSpec.namePath( + rest as any, + options, + ) + return [name, ...innerNames] + } + return [name] + } + + /** + * Resolves a key path to the description of the target field. + * Recurses into `Value.object` sub-specs for nested paths. + * + * @param path - Typed tuple of field keys (e.g. `["settings", "debug"]`) + * @param options - Build options providing effects and prefill data + * @returns The description string, or `null` if the field has no description or was not found + */ + async description( + path: KeyPaths, + options: LazyBuildOptions, + ): Promise { + if (path.length === 0) return null + const [key, ...rest] = path as [string, ...string[]] + const value = (this.spec as any)[key] as Value | undefined + if (!value) return null + if (rest.length === 0) { + const built = await value.build(options as any) + return 'description' in built.spec + ? (built.spec as { description: string | null }).description + : null + } + const objectMeta = value._objectSpec + if (objectMeta) { + return objectMeta.inputSpec.description(rest as any, options) + } + return null + } + + /** + * Returns a new InputSpec filtered to only include keys present in the given partial object. + * For nested `Value.object` fields, recurses into the partial value to filter sub-fields. + * + * @param partial - A deep-partial object whose defined keys determine which fields to keep + */ + filterFromPartial( + partial: DeepPartial, + ): InputSpec< + DeepPartial & DeepPartial, + DeepPartial + > { + const newSpec: Record> = {} + for (const k of Object.keys(partial)) { + const value = (this.spec as any)[k] as Value | undefined + if (!value) continue + const objectMeta = value._objectSpec + if (objectMeta) { + const partialVal = (partial as any)[k] + if (typeof partialVal === 'object' && partialVal !== null) { + const filteredInner = + objectMeta.inputSpec.filterFromPartial(partialVal) + newSpec[k] = Value.object(objectMeta.params, filteredInner) + continue + } + } + newSpec[k] = value + } + const newValidator = z.object( + Object.fromEntries( + Object.entries(newSpec).map(([k, v]) => [k, v.validator]), + ), + ) + return new InputSpec(newSpec as any, newValidator as any) as any + } + + /** + * Returns a new InputSpec with fields disabled based on which keys are present in the given partial object. + * For nested `Value.object` fields, recurses into the partial value to disable sub-fields. + * All fields remain in the spec — disabled fields simply cannot be edited by the user. + * + * @param partial - A deep-partial object whose defined keys determine which fields to disable + * @param message - The reason the fields are disabled, displayed to the user + */ + disableFromPartial( + partial: DeepPartial, + message: string, + ): InputSpec { + const newSpec: Record> = {} + for (const k in this.spec) { + const value = (this.spec as any)[k] as Value + if (!(k in (partial as any))) { + newSpec[k] = value + continue + } + const objectMeta = value._objectSpec + if (objectMeta) { + const partialVal = (partial as any)[k] + if (typeof partialVal === 'object' && partialVal !== null) { + const disabledInner = objectMeta.inputSpec.disableFromPartial( + partialVal, + message, + ) + newSpec[k] = Value.object(objectMeta.params, disabledInner) + continue + } + } + newSpec[k] = value.withDisabled(message) + } + const newValidator = z.object( + Object.fromEntries( + Object.entries(newSpec).map(([k, v]) => [k, v.validator]), + ), + ) + return new InputSpec(newSpec as any, newValidator as any) as any + } + + /** + * Creates an `InputSpec` from a plain record of {@link Value} entries. + * + * @example + * ```ts + * const spec = InputSpec.of({ + * username: Value.text({ name: 'Username', required: true, default: null }), + * verbose: Value.toggle({ name: 'Verbose Logging', default: false }), + * }) + * ``` + */ static of>>(spec: Spec) { - const validator = object( + const validator = z.object( Object.fromEntries( Object.entries(spec).map(([k, v]) => [k, v.validator]), ), ) return new InputSpec< { - [K in keyof Spec]: Spec[K] extends Value ? T : never + [K in keyof Spec]: Spec[K] extends Value + ? T + : never }, { - [K in keyof Spec]: Spec[K] extends Value ? T : never + [K in keyof Spec]: Spec[K] extends Value + ? T + : never } >(spec, validator as any) } diff --git a/sdk/base/lib/actions/input/builder/inputSpecTools.ts b/sdk/base/lib/actions/input/builder/inputSpecTools.ts new file mode 100644 index 000000000..04dc26a28 --- /dev/null +++ b/sdk/base/lib/actions/input/builder/inputSpecTools.ts @@ -0,0 +1,274 @@ +import { InputSpec, LazyBuild } from './inputSpec' +import { AsRequired, FileInfo, Value } from './value' +import { List } from './list' +import { UnionRes, UnionResStaticValidatedAs, Variants } from './variants' +import { + Pattern, + RandomString, + ValueSpecDatetime, + ValueSpecText, +} from '../inputSpecTypes' +import { DefaultString } from '../inputSpecTypes' +import { z } from 'zod' +import { ListValueSpecText } from '../inputSpecTypes' + +export interface InputSpecTools { + Value: BoundValue + Variants: typeof Variants + InputSpec: typeof InputSpec + List: BoundList +} + +export interface BoundValue { + // Static (non-dynamic) methods — no OuterType involved + toggle: typeof Value.toggle + text: typeof Value.text + textarea: typeof Value.textarea + number: typeof Value.number + color: typeof Value.color + datetime: typeof Value.datetime + select: typeof Value.select + multiselect: typeof Value.multiselect + object: typeof Value.object + file: typeof Value.file + list: typeof Value.list + hidden: typeof Value.hidden + union: typeof Value.union + + // Dynamic methods with OuterType pre-bound (last generic param removed) + dynamicToggle( + a: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + default: boolean + disabled?: false | string + }, + OuterType + >, + ): Value + + dynamicText( + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + default: DefaultString | null + required: Required + masked?: boolean + placeholder?: string | null + minLength?: number | null + maxLength?: number | null + patterns?: Pattern[] + inputmode?: ValueSpecText['inputmode'] + disabled?: string | false + generate?: null | RandomString + }, + OuterType + >, + ): Value, string | null, OuterType> + + dynamicTextarea( + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + default: string | null + required: Required + minLength?: number | null + maxLength?: number | null + patterns?: Pattern[] + minRows?: number + maxRows?: number + placeholder?: string | null + disabled?: false | string + }, + OuterType + >, + ): Value, string | null, OuterType> + + dynamicNumber( + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + default: number | null + required: Required + min?: number | null + max?: number | null + step?: number | null + integer: boolean + units?: string | null + placeholder?: string | null + disabled?: false | string + }, + OuterType + >, + ): Value, number | null, OuterType> + + dynamicColor( + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + default: string | null + required: Required + disabled?: false | string + }, + OuterType + >, + ): Value, string | null, OuterType> + + dynamicDatetime( + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + default: string | null + required: Required + inputmode?: ValueSpecDatetime['inputmode'] + min?: string | null + max?: string | null + disabled?: false | string + }, + OuterType + >, + ): Value, string | null, OuterType> + + dynamicSelect>( + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + default: string + values: Values + disabled?: false | string | string[] + }, + OuterType + >, + ): Value + + dynamicMultiselect>( + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + default: string[] + values: Values + minLength?: number | null + maxLength?: number | null + disabled?: false | string | string[] + }, + OuterType + >, + ): Value<(keyof Values & string)[], (keyof Values & string)[], OuterType> + + dynamicFile( + a: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + extensions: string[] + required: Required + }, + OuterType + >, + ): Value, FileInfo | null, OuterType> + + dynamicUnion< + VariantValues extends { + [K in string]: { + name: string + spec: InputSpec + } + }, + >( + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + variants: Variants + default: keyof VariantValues & string + disabled: string[] | false | string + }, + OuterType + >, + ): Value, UnionRes, OuterType> + dynamicUnion< + StaticVariantValues extends { + [K in string]: { + name: string + spec: InputSpec + } + }, + VariantValues extends StaticVariantValues, + >( + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + variants: Variants + default: keyof VariantValues & string + disabled: string[] | false | string + }, + OuterType + >, + validator: z.ZodType>, + ): Value< + UnionRes, + UnionResStaticValidatedAs, + OuterType + > + + dynamicHidden( + getParser: LazyBuild, OuterType>, + ): Value +} + +export interface BoundList { + text: typeof List.text + obj: typeof List.obj + dynamicText( + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + default?: string[] + minLength?: number | null + maxLength?: number | null + disabled?: false | string + generate?: null | RandomString + spec: { + masked?: boolean + placeholder?: string | null + minLength?: number | null + maxLength?: number | null + patterns?: Pattern[] + inputmode?: ListValueSpecText['inputmode'] + } + }, + OuterType + >, + ): List +} + +export function createInputSpecTools(): InputSpecTools { + return { + Value: Value as any as BoundValue, + Variants, + InputSpec, + List: List as any as BoundList, + } +} diff --git a/sdk/base/lib/actions/input/builder/list.ts b/sdk/base/lib/actions/input/builder/list.ts index 765775086..92b1a1f06 100644 --- a/sdk/base/lib/actions/input/builder/list.ts +++ b/sdk/base/lib/actions/input/builder/list.ts @@ -7,18 +7,39 @@ import { ValueSpecList, ValueSpecListOf, } from '../inputSpecTypes' -import { Parser, arrayOf, string } from 'ts-matches' +import { z } from 'zod' -export class List { +/** + * Builder class for defining list-type form fields. + * + * A list presents an interface to add, remove, and reorder items. Items can be + * either text strings ({@link List.text}) or structured objects ({@link List.obj}). + * + * Used with {@link Value.list} to include a list field in an {@link InputSpec}. + */ +export class List< + Type extends StaticValidatedAs, + StaticValidatedAs = Type, + OuterType = unknown, +> { private constructor( - public build: LazyBuild<{ - spec: ValueSpecList - validator: Parser - }>, - public readonly validator: Parser, + public build: LazyBuild< + { + spec: ValueSpecList + validator: z.ZodType + }, + OuterType + >, + public readonly validator: z.ZodType, ) {} readonly _TYPE: Type = null as any + /** + * Creates a list of text input items. + * + * @param a - List-level options (name, description, min/max length, defaults) + * @param aSpec - Item-level options (patterns, input mode, masking, generation) + */ static text( a: { name: string @@ -62,7 +83,7 @@ export class List { generate?: null | RandomString }, ) { - const validator = arrayOf(string) + const validator = z.array(z.string()) return new List(() => { const spec = { type: 'text' as const, @@ -90,28 +111,32 @@ export class List { }, validator) } - static dynamicText( - getA: LazyBuild<{ - name: string - description?: string | null - warning?: string | null - default?: string[] - minLength?: number | null - maxLength?: number | null - disabled?: false | string - generate?: null | RandomString - spec: { - masked?: boolean - placeholder?: string | null + /** Like {@link List.text} but options are resolved lazily at runtime via a builder function. */ + static dynamicText( + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + default?: string[] minLength?: number | null maxLength?: number | null - patterns?: Pattern[] - inputmode?: ListValueSpecText['inputmode'] - } - }>, + disabled?: false | string + generate?: null | RandomString + spec: { + masked?: boolean + placeholder?: string | null + minLength?: number | null + maxLength?: number | null + patterns?: Pattern[] + inputmode?: ListValueSpecText['inputmode'] + } + }, + OuterType + >, ) { - const validator = arrayOf(string) - return new List(async (options) => { + const validator = z.array(z.string()) + return new List(async (options) => { const { spec: aSpec, ...a } = await getA(options) const spec = { type: 'text' as const, @@ -140,6 +165,12 @@ export class List { }, validator) } + /** + * Creates a list of structured object items, each defined by a nested {@link InputSpec}. + * + * @param a - List-level options (name, description, min/max length) + * @param aSpec - Item-level options (the nested spec, display expression, uniqueness constraint) + */ static obj< Type extends StaticValidatedAs, StaticValidatedAs extends Record, @@ -183,8 +214,8 @@ export class List { disabled: false, ...value, }, - validator: arrayOf(built.validator), + validator: z.array(built.validator), } - }, arrayOf(aSpec.spec.validator)) + }, z.array(aSpec.spec.validator)) } } diff --git a/sdk/base/lib/actions/input/builder/value.ts b/sdk/base/lib/actions/input/builder/value.ts index b211376cc..e916e9ecf 100644 --- a/sdk/base/lib/actions/input/builder/value.ts +++ b/sdk/base/lib/actions/input/builder/value.ts @@ -12,51 +12,69 @@ import { } from '../inputSpecTypes' import { DefaultString } from '../inputSpecTypes' import { _, once } from '../../../util' -import { - Parser, - any, - anyOf, - arrayOf, - boolean, - literal, - literals, - number, - object, - string, -} from 'ts-matches' +import { z } from 'zod' import { DeepPartial } from '../../../types' -export const fileInfoParser = object({ - path: string, - commitment: object({ hash: string, size: number }), +/** Zod schema for a file upload result — validates `{ path, commitment: { hash, size } }`. */ +export const fileInfoParser = z.object({ + path: z.string(), + commitment: z.object({ hash: z.string(), size: z.number() }), }) -export type FileInfo = typeof fileInfoParser._TYPE +/** The parsed result of a file upload, containing the file path and its content commitment (hash + size). */ +export type FileInfo = z.infer -type AsRequired = Required extends true +/** Conditional type: returns `T` if `Required` is `true`, otherwise `T | null`. */ +export type AsRequired = Required extends true ? T : T | null const testForAsRequiredParser = once( - () => object({ required: literal(true) }).test, + () => (v: unknown) => + z.object({ required: z.literal(true) }).safeParse(v).success, ) function asRequiredParser( - parser: Parser, + parser: z.ZodType, input: Input, -): Parser> { +): z.ZodType> { if (testForAsRequiredParser()(input)) return parser as any return parser.nullable() as any } -export class Value { +/** + * Core builder class for defining a single form field in a service configuration spec. + * + * Each static factory method (e.g. `Value.text()`, `Value.toggle()`, `Value.select()`) creates + * a typed `Value` instance representing a specific field type. Dynamic variants (e.g. `Value.dynamicText()`) + * allow the field options to be computed lazily at runtime. + * + * Use with {@link InputSpec} to compose complete form specifications. + * + * @typeParam Type - The runtime type this field produces when filled in + * @typeParam StaticValidatedAs - The compile-time validated type (usually same as Type) + * @typeParam OuterType - The parent form's type context (used by dynamic variants) + */ +export class Value< + Type extends StaticValidatedAs, + StaticValidatedAs = Type, + OuterType = unknown, +> { protected constructor( - public build: LazyBuild<{ - spec: ValueSpec - validator: Parser - }>, - public readonly validator: Parser, + public build: LazyBuild< + { + spec: ValueSpec + validator: z.ZodType + }, + OuterType + >, + public readonly validator: z.ZodType, ) {} public _TYPE: Type = null as any as Type public _PARTIAL: DeepPartial = null as any as DeepPartial + /** @internal Used by {@link InputSpec.filter} to support nested filtering of object-typed fields. */ + _objectSpec?: { + inputSpec: InputSpec + params: { name: string; description?: string | null } + } /** * @description Displays a boolean toggle to enable/disable @@ -86,7 +104,7 @@ export class Value { */ immutable?: boolean }) { - const validator = boolean + const validator = z.boolean() return new Value( async () => ({ spec: { @@ -102,17 +120,21 @@ export class Value { validator, ) } - static dynamicToggle( - a: LazyBuild<{ - name: string - description?: string | null - warning?: string | null - default: boolean - disabled?: false | string - }>, + /** Like {@link Value.toggle} but options are resolved lazily at runtime via a builder function. */ + static dynamicToggle( + a: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + default: boolean + disabled?: false | string + }, + OuterType + >, ) { - const validator = boolean - return new Value( + const validator = z.boolean() + return new Value( async (options) => ({ spec: { description: null, @@ -202,7 +224,7 @@ export class Value { */ generate?: RandomString | null }) { - const validator = asRequiredParser(string, a) + const validator = asRequiredParser(z.string(), a) return new Value>( async () => ({ spec: { @@ -225,24 +247,28 @@ export class Value { validator, ) } - static dynamicText( - getA: LazyBuild<{ - name: string - description?: string | null - warning?: string | null - default: DefaultString | null - required: Required - masked?: boolean - placeholder?: string | null - minLength?: number | null - maxLength?: number | null - patterns?: Pattern[] - inputmode?: ValueSpecText['inputmode'] - disabled?: string | false - generate?: null | RandomString - }>, + /** Like {@link Value.text} but options are resolved lazily at runtime via a builder function. */ + static dynamicText( + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + default: DefaultString | null + required: Required + masked?: boolean + placeholder?: string | null + minLength?: number | null + maxLength?: number | null + patterns?: Pattern[] + inputmode?: ValueSpecText['inputmode'] + disabled?: string | false + generate?: null | RandomString + }, + OuterType + >, ) { - return new Value, string | null>( + return new Value, string | null, OuterType>( async (options) => { const a = await getA(options) return { @@ -261,10 +287,10 @@ export class Value { generate: a.generate ?? null, ...a, }, - validator: asRequiredParser(string, a), + validator: asRequiredParser(z.string(), a), } }, - string.nullable(), + z.string().nullable(), ) } /** @@ -323,7 +349,7 @@ export class Value { */ immutable?: boolean }) { - const validator = asRequiredParser(string, a) + const validator = asRequiredParser(z.string(), a) return new Value>(async () => { const built: ValueSpecTextarea = { description: null, @@ -342,23 +368,27 @@ export class Value { return { spec: built, validator } }, validator) } - static dynamicTextarea( - getA: LazyBuild<{ - name: string - description?: string | null - warning?: string | null - default: string | null - required: Required - minLength?: number | null - maxLength?: number | null - patterns?: Pattern[] - minRows?: number - maxRows?: number - placeholder?: string | null - disabled?: false | string - }>, + /** Like {@link Value.textarea} but options are resolved lazily at runtime via a builder function. */ + static dynamicTextarea( + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + default: string | null + required: Required + minLength?: number | null + maxLength?: number | null + patterns?: Pattern[] + minRows?: number + maxRows?: number + placeholder?: string | null + disabled?: false | string + }, + OuterType + >, ) { - return new Value, string | null>( + return new Value, string | null, OuterType>( async (options) => { const a = await getA(options) return { @@ -376,10 +406,10 @@ export class Value { immutable: false, ...a, }, - validator: asRequiredParser(string, a), + validator: asRequiredParser(z.string(), a), } }, - string.nullable(), + z.string().nullable(), ) } /** @@ -440,7 +470,7 @@ export class Value { */ immutable?: boolean }) { - const validator = asRequiredParser(number, a) + const validator = asRequiredParser(z.number(), a) return new Value>( () => ({ spec: { @@ -461,23 +491,27 @@ export class Value { validator, ) } - static dynamicNumber( - getA: LazyBuild<{ - name: string - description?: string | null - warning?: string | null - default: number | null - required: Required - min?: number | null - max?: number | null - step?: number | null - integer: boolean - units?: string | null - placeholder?: string | null - disabled?: false | string - }>, + /** Like {@link Value.number} but options are resolved lazily at runtime via a builder function. */ + static dynamicNumber( + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + default: number | null + required: Required + min?: number | null + max?: number | null + step?: number | null + integer: boolean + units?: string | null + placeholder?: string | null + disabled?: false | string + }, + OuterType + >, ) { - return new Value, number | null>( + return new Value, number | null, OuterType>( async (options) => { const a = await getA(options) return { @@ -494,10 +528,10 @@ export class Value { immutable: false, ...a, }, - validator: asRequiredParser(number, a), + validator: asRequiredParser(z.number(), a), } }, - number.nullable(), + z.number().nullable(), ) } /** @@ -536,7 +570,7 @@ export class Value { */ immutable?: boolean }) { - const validator = asRequiredParser(string, a) + const validator = asRequiredParser(z.string(), a) return new Value>( () => ({ spec: { @@ -553,17 +587,21 @@ export class Value { ) } - static dynamicColor( - getA: LazyBuild<{ - name: string - description?: string | null - warning?: string | null - default: string | null - required: Required - disabled?: false | string - }>, + /** Like {@link Value.color} but options are resolved lazily at runtime via a builder function. */ + static dynamicColor( + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + default: string | null + required: Required + disabled?: false | string + }, + OuterType + >, ) { - return new Value, string | null>( + return new Value, string | null, OuterType>( async (options) => { const a = await getA(options) return { @@ -575,10 +613,10 @@ export class Value { immutable: false, ...a, }, - validator: asRequiredParser(string, a), + validator: asRequiredParser(z.string(), a), } }, - string.nullable(), + z.string().nullable(), ) } /** @@ -627,7 +665,7 @@ export class Value { */ immutable?: boolean }) { - const validator = asRequiredParser(string, a) + const validator = asRequiredParser(z.string(), a) return new Value>( () => ({ spec: { @@ -647,20 +685,24 @@ export class Value { validator, ) } - static dynamicDatetime( - getA: LazyBuild<{ - name: string - description?: string | null - warning?: string | null - default: string | null - required: Required - inputmode?: ValueSpecDatetime['inputmode'] - min?: string | null - max?: string | null - disabled?: false | string - }>, + /** Like {@link Value.datetime} but options are resolved lazily at runtime via a builder function. */ + static dynamicDatetime( + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + default: string | null + required: Required + inputmode?: ValueSpecDatetime['inputmode'] + min?: string | null + max?: string | null + disabled?: false | string + }, + OuterType + >, ) { - return new Value, string | null>( + return new Value, string | null, OuterType>( async (options) => { const a = await getA(options) return { @@ -675,10 +717,10 @@ export class Value { immutable: false, ...a, }, - validator: asRequiredParser(string, a), + validator: asRequiredParser(z.string(), a), } }, - string.nullable(), + z.string().nullable(), ) } /** @@ -732,8 +774,12 @@ export class Value { */ immutable?: boolean }) { - const validator = anyOf( - ...Object.keys(a.values).map((x: keyof Values & string) => literal(x)), + const validator = z.union( + Object.keys(a.values).map((x: keyof Values & string) => z.literal(x)) as [ + z.ZodLiteral, + z.ZodLiteral, + ...z.ZodLiteral[], + ], ) return new Value( () => ({ @@ -750,34 +796,48 @@ export class Value { validator, ) } - static dynamicSelect>( - getA: LazyBuild<{ - name: string - description?: string | null - warning?: string | null - default: string - values: Values - disabled?: false | string | string[] - }>, + /** Like {@link Value.select} but options are resolved lazily at runtime via a builder function. */ + static dynamicSelect< + Values extends Record, + OuterType = unknown, + >( + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + default: string + values: Values + disabled?: false | string | string[] + }, + OuterType + >, ) { - return new Value(async (options) => { - const a = await getA(options) - return { - spec: { - description: null, - warning: null, - type: 'select' as const, - disabled: false, - immutable: false, - ...a, - }, - validator: anyOf( - ...Object.keys(a.values).map((x: keyof Values & string) => - literal(x), + return new Value( + async (options) => { + const a = await getA(options) + return { + spec: { + description: null, + warning: null, + type: 'select' as const, + disabled: false, + immutable: false, + ...a, + }, + validator: z.union( + Object.keys(a.values).map((x: keyof Values & string) => + z.literal(x), + ) as [ + z.ZodLiteral, + z.ZodLiteral, + ...z.ZodLiteral[], + ], ), - ), - } - }, string) + } + }, + z.string(), + ) } /** * @description Displays a select modal with checkboxes, allowing for multiple selections. @@ -831,8 +891,14 @@ export class Value { */ immutable?: boolean }) { - const validator = arrayOf( - literals(...(Object.keys(a.values) as any as [keyof Values & string])), + const validator = z.array( + z.union( + Object.keys(a.values).map((x) => z.literal(x)) as [ + z.ZodLiteral, + z.ZodLiteral, + ...z.ZodLiteral[], + ], + ), ) return new Value<(keyof Values & string)[]>( () => ({ @@ -851,19 +917,30 @@ export class Value { validator, ) } - static dynamicMultiselect>( - getA: LazyBuild<{ - name: string - description?: string | null - warning?: string | null - default: string[] - values: Values - minLength?: number | null - maxLength?: number | null - disabled?: false | string | string[] - }>, + /** Like {@link Value.multiselect} but options are resolved lazily at runtime via a builder function. */ + static dynamicMultiselect< + Values extends Record, + OuterType = unknown, + >( + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + default: string[] + values: Values + minLength?: number | null + maxLength?: number | null + disabled?: false | string | string[] + }, + OuterType + >, ) { - return new Value<(keyof Values & string)[], string[]>(async (options) => { + return new Value< + (keyof Values & string)[], + (keyof Values & string)[], + OuterType + >(async (options) => { const a = await getA(options) return { spec: { @@ -876,13 +953,17 @@ export class Value { immutable: false, ...a, }, - validator: arrayOf( - literals( - ...(Object.keys(a.values) as any as [keyof Values & string]), + validator: z.array( + z.union( + Object.keys(a.values).map((x) => z.literal(x)) as [ + z.ZodLiteral, + z.ZodLiteral, + ...z.ZodLiteral[], + ], ), ), } - }, arrayOf(string)) + }, z.array(z.string())) } /** * @description Display a collapsable grouping of additional fields, a "sub form". The second value is the inputSpec spec for the sub form. @@ -911,7 +992,7 @@ export class Value { }, spec: InputSpec, ) { - return new Value(async (options) => { + const value = new Value(async (options) => { const built = await spec.build(options as any) return { spec: { @@ -924,7 +1005,15 @@ export class Value { validator: built.validator, } }, spec.validator) + value._objectSpec = { inputSpec: spec, params: a } + return value } + /** + * Displays a file upload input field. + * + * @param a.extensions - Allowed file extensions (e.g. `[".pem", ".crt"]`) + * @param a.required - Whether a file must be selected + */ static file(a: { name: string description?: string | null @@ -948,30 +1037,35 @@ export class Value { asRequiredParser(fileInfoParser, a), ) } - static dynamicFile( - a: LazyBuild<{ - name: string - description?: string | null - warning?: string | null - extensions: string[] - required: Required - }>, - ) { - return new Value, FileInfo | null>( - async (options) => { - const spec = { - type: 'file' as const, - description: null, - warning: null, - ...(await a(options)), - } - return { - spec, - validator: asRequiredParser(fileInfoParser, spec), - } + /** Like {@link Value.file} but options are resolved lazily at runtime via a builder function. */ + static dynamicFile( + a: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + extensions: string[] + required: Required }, - fileInfoParser.nullable(), - ) + OuterType + >, + ) { + return new Value< + AsRequired, + FileInfo | null, + OuterType + >(async (options) => { + const spec = { + type: 'file' as const, + description: null, + warning: null, + ...(await a(options)), + } + return { + spec, + validator: asRequiredParser(fileInfoParser, spec), + } + }, fileInfoParser.nullable()) } /** * @description Displays a dropdown, allowing for a single selection. Depending on the selection, a different object ("sub form") is presented. @@ -1029,7 +1123,7 @@ export class Value { }) { return new Value< typeof a.variants._TYPE, - typeof a.variants.validator._TYPE + typeof a.variants.validator._output >(async (options) => { const built = await a.variants.build(options as any) return { @@ -1046,6 +1140,7 @@ export class Value { } }, a.variants.validator) } + /** Like {@link Value.union} but options (including which variants are available) are resolved lazily at runtime. */ static dynamicUnion< VariantValues extends { [K in string]: { @@ -1053,37 +1148,47 @@ export class Value { spec: InputSpec } }, + OuterType = unknown, >( - getA: LazyBuild<{ - name: string - description?: string | null - warning?: string | null - variants: Variants - default: keyof VariantValues & string - disabled: string[] | false | string - }>, - ): Value, unknown> + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + variants: Variants + default: keyof VariantValues & string + disabled: string[] | false | string + }, + OuterType + >, + ): Value, UnionRes, OuterType> + /** Like {@link Value.union} but options are resolved lazily, with an explicit static validator type. */ static dynamicUnion< - VariantValues extends StaticVariantValues, StaticVariantValues extends { [K in string]: { name: string spec: InputSpec } }, + VariantValues extends StaticVariantValues, + OuterType = unknown, >( - getA: LazyBuild<{ - name: string - description?: string | null - warning?: string | null - variants: Variants - default: keyof VariantValues & string - disabled: string[] | false | string - }>, - validator: Parser>, + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + variants: Variants + default: keyof VariantValues & string + disabled: string[] | false | string + }, + OuterType + >, + validator: z.ZodType>, ): Value< UnionRes, - UnionResStaticValidatedAs + UnionResStaticValidatedAs, + OuterType > static dynamicUnion< VariantValues extends { @@ -1092,35 +1197,40 @@ export class Value { spec: InputSpec } }, + OuterType = unknown, >( - getA: LazyBuild<{ - name: string - description?: string | null - warning?: string | null - variants: Variants - default: keyof VariantValues & string - disabled: string[] | false | string - }>, - validator: Parser = any, - ) { - return new Value, typeof validator._TYPE>( - async (options) => { - const newValues = await getA(options) - const built = await newValues.variants.build(options as any) - return { - spec: { - type: 'union' as const, - description: null, - warning: null, - ...newValues, - variants: built.spec, - immutable: false, - }, - validator: built.validator, - } + getA: LazyBuild< + { + name: string + description?: string | null + warning?: string | null + variants: Variants + default: keyof VariantValues & string + disabled: string[] | false | string }, - validator, - ) + OuterType + >, + validator: z.ZodType = z.any(), + ) { + return new Value< + UnionRes, + z.infer, + OuterType + >(async (options) => { + const newValues = await getA(options) + const built = await newValues.variants.build(options as any) + return { + spec: { + type: 'union' as const, + description: null, + warning: null, + ...newValues, + variants: built.spec, + immutable: false, + }, + validator: built.validator, + } + }, validator) } /** * @description Presents an interface to add/remove/edit items in a list. @@ -1196,10 +1306,10 @@ export class Value { hiddenExample: Value.hidden(), * ``` */ - static hidden(): Value - static hidden(parser: Parser): Value - static hidden(parser: Parser = any) { - return new Value(async () => { + static hidden(): Value + static hidden(parser: z.ZodType): Value + static hidden(parser: z.ZodType = z.any()) { + return new Value>(async () => { return { spec: { type: 'hidden' as const, @@ -1216,8 +1326,10 @@ export class Value { hiddenExample: Value.hidden(), * ``` */ - static dynamicHidden(getParser: LazyBuild>) { - return new Value(async (options) => { + static dynamicHidden( + getParser: LazyBuild, OuterType>, + ) { + return new Value(async (options) => { const validator = await getParser(options) return { spec: { @@ -1225,16 +1337,41 @@ export class Value { } as ValueSpecHidden, validator, } - }, any) + }, z.any()) } - map(fn: (value: StaticValidatedAs) => U): Value { - return new Value(async (effects) => { - const built = await this.build(effects) + /** + * Returns a new Value that produces the same field spec but with `disabled` set to the given message. + * The field remains in the form but cannot be edited by the user. + * + * @param message - The reason the field is disabled, displayed to the user + */ + withDisabled(message: string): Value { + const original = this + const v = new Value(async (options) => { + const built = await original.build(options) + return { + spec: { ...built.spec, disabled: message } as ValueSpec, + validator: built.validator, + } + }, this.validator) + v._objectSpec = this._objectSpec + return v + } + + /** + * Transforms the validated output value using a mapping function. + * The form field itself remains unchanged, but the value is transformed after validation. + * + * @param fn - A function to transform the validated value + */ + map(fn: (value: StaticValidatedAs) => U): Value { + return new Value(async (options) => { + const built = await this.build(options) return { spec: built.spec, - validator: built.validator.map(fn), + validator: built.validator.transform(fn), } - }, this.validator.map(fn)) + }, this.validator.transform(fn)) } } diff --git a/sdk/base/lib/actions/input/builder/variants.ts b/sdk/base/lib/actions/input/builder/variants.ts index 46818d893..e0784c746 100644 --- a/sdk/base/lib/actions/input/builder/variants.ts +++ b/sdk/base/lib/actions/input/builder/variants.ts @@ -6,8 +6,13 @@ import { ExtractInputSpecType, ExtractInputSpecStaticValidatedAs, } from './inputSpec' -import { Parser, any, anyOf, literal, object } from 'ts-matches' +import { z } from 'zod' +/** + * The runtime result type of a discriminated union form field. + * Contains `selection` (the chosen variant key), `value` (the variant's form data), + * and optionally `other` (partial data from previously selected variants). + */ export type UnionRes< VariantValues extends { [K in string]: { @@ -28,6 +33,7 @@ export type UnionRes< } }[K] +/** Like {@link UnionRes} but using the static (Zod-inferred) validated types. */ export type UnionResStaticValidatedAs< VariantValues extends { [K in string]: { @@ -103,18 +109,26 @@ export class Variants< spec: InputSpec } }, + OuterType = unknown, > { private constructor( - public build: LazyBuild<{ - spec: ValueSpecUnion['variants'] - validator: Parser> - }>, - public readonly validator: Parser< - unknown, + public build: LazyBuild< + { + spec: ValueSpecUnion['variants'] + validator: z.ZodType> + }, + OuterType + >, + public readonly validator: z.ZodType< UnionResStaticValidatedAs >, ) {} readonly _TYPE: UnionRes = null as any + /** + * Creates a `Variants` instance from a record mapping variant keys to their display name and form spec. + * + * @param a - A record of `{ name: string, spec: InputSpec }` entries, one per variant + */ static of< VariantValues extends { [K in string]: { @@ -124,8 +138,7 @@ export class Variants< }, >(a: VariantValues) { const staticValidators = {} as { - [K in keyof VariantValues]: Parser< - unknown, + [K in keyof VariantValues]: z.ZodType< ExtractInputSpecStaticValidatedAs > } @@ -133,16 +146,20 @@ export class Variants< const value = a[key] staticValidators[key] = value.spec.validator } - const other = object( - Object.fromEntries( - Object.entries(staticValidators).map(([k, v]) => [k, any.optional()]), - ), - ).optional() + const other = z + .object( + Object.fromEntries( + Object.entries(staticValidators).map(([k, v]) => [ + k, + z.any().optional(), + ]), + ), + ) + .optional() return new Variants( async (options) => { const validators = {} as { - [K in keyof VariantValues]: Parser< - unknown, + [K in keyof VariantValues]: z.ZodType< ExtractInputSpecType > } @@ -161,32 +178,37 @@ export class Variants< } validators[key] = built.validator } - const other = object( - Object.fromEntries( - Object.entries(validators).map(([k, v]) => [k, any.optional()]), - ), - ).optional() + const other = z + .object( + Object.fromEntries( + Object.entries(validators).map(([k, v]) => [ + k, + z.any().optional(), + ]), + ), + ) + .optional() return { spec: variants, - validator: anyOf( - ...Object.entries(validators).map(([k, v]) => - object({ - selection: literal(k), + validator: z.union( + Object.entries(validators).map(([k, v]) => + z.object({ + selection: z.literal(k), value: v, other, }), - ), + ) as [z.ZodObject, z.ZodObject, ...z.ZodObject[]], ) as any, } }, - anyOf( - ...Object.entries(staticValidators).map(([k, v]) => - object({ - selection: literal(k), + z.union( + Object.entries(staticValidators).map(([k, v]) => + z.object({ + selection: z.literal(k), value: v, other, }), - ), + ) as [z.ZodObject, z.ZodObject, ...z.ZodObject[]], ) as any, ) } diff --git a/sdk/base/lib/actions/input/inputSpecConstants.ts b/sdk/base/lib/actions/input/inputSpecConstants.ts index d6bc2a68b..e2992740a 100644 --- a/sdk/base/lib/actions/input/inputSpecConstants.ts +++ b/sdk/base/lib/actions/input/inputSpecConstants.ts @@ -5,42 +5,124 @@ import { Value } from './builder/value' import { Variants } from './builder/variants' /** - * Base SMTP settings, to be used by StartOS for system wide SMTP + * Creates an SMTP field spec with provider-specific defaults pre-filled. */ -export const customSmtp: InputSpec = InputSpec.of< - InputSpecOf ->({ - server: Value.text({ - name: 'SMTP Server', - required: true, - default: null, - }), - port: Value.number({ - name: 'Port', - required: true, - default: 587, - min: 1, - max: 65535, - integer: true, - }), - from: Value.text({ - name: 'From Address', - required: true, - default: null, - placeholder: 'Example Name ', - inputmode: 'email', - patterns: [Patterns.emailWithName], - }), - login: Value.text({ - name: 'Login', - required: true, - default: null, - }), - password: Value.text({ - name: 'Password', - required: false, - default: null, - masked: true, +function smtpFields( + defaults: { + host?: string + port?: number + security?: 'starttls' | 'tls' + } = {}, +): InputSpec { + return InputSpec.of>({ + host: Value.text({ + name: 'Host', + required: true, + default: defaults.host ?? null, + placeholder: 'smtp.example.com', + }), + port: Value.number({ + name: 'Port', + required: true, + default: defaults.port ?? 587, + min: 1, + max: 65535, + integer: true, + }), + security: Value.select({ + name: 'Connection Security', + default: defaults.security ?? 'starttls', + values: { + starttls: 'STARTTLS', + tls: 'TLS', + }, + }), + from: Value.text({ + name: 'From Address', + required: true, + default: null, + placeholder: 'Example Name ', + patterns: [Patterns.emailWithName], + }), + username: Value.text({ + name: 'Username', + required: true, + default: null, + }), + password: Value.text({ + name: 'Password', + required: false, + default: null, + masked: true, + }), + }) +} + +/** + * Base SMTP settings with no provider-specific defaults. + */ +export const customSmtp = smtpFields() + +/** + * Provider presets for SMTP configuration. + * Each variant has SMTP fields pre-filled with the provider's recommended settings. + */ +export const smtpProviderVariants = Variants.of({ + gmail: { + name: 'Gmail', + spec: smtpFields({ + host: 'smtp.gmail.com', + port: 587, + security: 'starttls', + }), + }, + ses: { + name: 'Amazon SES', + spec: smtpFields({ + host: 'email-smtp.us-east-1.amazonaws.com', + port: 587, + security: 'starttls', + }), + }, + sendgrid: { + name: 'SendGrid', + spec: smtpFields({ + host: 'smtp.sendgrid.net', + port: 587, + security: 'starttls', + }), + }, + mailgun: { + name: 'Mailgun', + spec: smtpFields({ + host: 'smtp.mailgun.org', + port: 587, + security: 'starttls', + }), + }, + protonmail: { + name: 'Proton Mail', + spec: smtpFields({ + host: 'smtp.protonmail.ch', + port: 587, + security: 'starttls', + }), + }, + other: { + name: 'Other', + spec: customSmtp, + }, +}) + +/** + * System SMTP settings with provider presets. + * Wraps smtpProviderVariants in a union for use by the system email settings page. + */ +export const systemSmtpSpec = InputSpec.of({ + provider: Value.union({ + name: 'Provider', + default: null as any, + variants: smtpProviderVariants, }), }) @@ -55,19 +137,24 @@ const smtpVariants = Variants.of({ 'A custom from address for this service. If not provided, the system from address will be used.', required: false, default: null, - placeholder: 'test@example.com', - inputmode: 'email', - patterns: [Patterns.email], + placeholder: 'Name ', + patterns: [Patterns.emailWithName], }), }), }, custom: { name: 'Custom Credentials', - spec: customSmtp, + spec: InputSpec.of({ + provider: Value.union({ + name: 'Provider', + default: null as any, + variants: smtpProviderVariants, + }), + }), }, }) /** - * For service inputSpec. Gives users 3 options for SMTP: (1) disabled, (2) use system SMTP settings, (3) use custom SMTP settings + * For service inputSpec. Gives users 3 options for SMTP: (1) disabled, (2) use system SMTP settings, (3) use custom SMTP settings with provider presets */ export const smtpInputSpec = Value.dynamicUnion(async ({ effects }) => { const smtp = await new GetSystemSmtp(effects).once() diff --git a/sdk/base/lib/actions/input/inputSpecTypes.ts b/sdk/base/lib/actions/input/inputSpecTypes.ts index 2fe5d7b79..d3437b370 100644 --- a/sdk/base/lib/actions/input/inputSpecTypes.ts +++ b/sdk/base/lib/actions/input/inputSpecTypes.ts @@ -1,4 +1,12 @@ +/** + * A record mapping field keys to their {@link ValueSpec} definitions. + * This is the root shape of a dynamic form specification — it defines the complete set + * of configurable fields for a service or action. + */ export type InputSpec = Record +/** + * The discriminator for all supported form field types. + */ export type ValueType = | 'text' | 'textarea' @@ -13,6 +21,7 @@ export type ValueType = | 'file' | 'union' | 'hidden' +/** Union of all concrete form field spec types. Discriminate on the `type` field. */ export type ValueSpec = ValueSpecOf /** core spec types. These types provide the metadata for performing validations */ // prettier-ignore @@ -32,37 +41,56 @@ export type ValueSpecOf = T extends "hidden" ? ValueSpecHidden : never +/** Spec for a single-line text input field. */ export type ValueSpecText = { + /** Display label for the field. */ name: string + /** Optional help text displayed below the field. */ description: string | null + /** Optional warning message displayed to the user. */ warning: string | null type: 'text' + /** Regex patterns used to validate the input value. */ patterns: Pattern[] + /** Minimum character length, or `null` for no minimum. */ minLength: number | null + /** Maximum character length, or `null` for no maximum. */ maxLength: number | null + /** Whether the field should obscure input (e.g. for passwords). */ masked: boolean + /** HTML input mode hint for mobile keyboards. */ inputmode: 'text' | 'email' | 'tel' | 'url' + /** Placeholder text shown when the field is empty. */ placeholder: string | null + /** Whether the field must have a value. */ required: boolean + /** Default value, which may be a literal string or a {@link RandomString} generation spec. */ default: DefaultString | null + /** `false` if editable, or a string message explaining why the field is disabled. */ disabled: false | string + /** If set, provides a "generate" button that fills the field with a random string matching this spec. */ generate: null | RandomString + /** Whether the field value cannot be changed after initial configuration. */ immutable: boolean } +/** Spec for a multi-line textarea input field. */ export type ValueSpecTextarea = { name: string description: string | null warning: string | null type: 'textarea' + /** Regex patterns used to validate the input value. */ patterns: Pattern[] placeholder: string | null minLength: number | null maxLength: number | null + /** Minimum number of visible rows. */ minRows: number + /** Maximum number of visible rows before scrolling. */ maxRows: number required: boolean default: string | null @@ -70,12 +98,18 @@ export type ValueSpecTextarea = { immutable: boolean } +/** Spec for a numeric input field. */ export type ValueSpecNumber = { type: 'number' + /** Minimum allowed value, or `null` for unbounded. */ min: number | null + /** Maximum allowed value, or `null` for unbounded. */ max: number | null + /** Whether only whole numbers are accepted. */ integer: boolean + /** Step increment for the input spinner, or `null` for any precision. */ step: number | null + /** Display label for the unit (e.g. `"MB"`, `"seconds"`), shown next to the field. */ units: string | null placeholder: string | null name: string @@ -86,6 +120,7 @@ export type ValueSpecNumber = { disabled: false | string immutable: boolean } +/** Spec for a browser-native color picker field. */ export type ValueSpecColor = { name: string description: string | null @@ -93,34 +128,44 @@ export type ValueSpecColor = { type: 'color' required: boolean + /** Default hex color string (e.g. `"#ff0000"`), or `null`. */ default: string | null disabled: false | string immutable: boolean } +/** Spec for a date, time, or datetime input field. */ export type ValueSpecDatetime = { name: string description: string | null warning: string | null type: 'datetime' required: boolean + /** Controls which kind of picker is displayed. */ inputmode: 'date' | 'time' | 'datetime-local' + /** Minimum selectable date/time as an ISO string, or `null`. */ min: string | null + /** Maximum selectable date/time as an ISO string, or `null`. */ max: string | null default: string | null disabled: false | string immutable: boolean } +/** Spec for a single-select field displayed as radio buttons in a modal. */ export type ValueSpecSelect = { + /** Map of option keys to display labels. */ values: Record name: string description: string | null warning: string | null type: 'select' default: string | null + /** `false` if all enabled, a string disabling the whole field, or an array of disabled option keys. */ disabled: false | string | string[] immutable: boolean } +/** Spec for a multi-select field displayed as checkboxes in a modal. */ export type ValueSpecMultiselect = { + /** Map of option keys to display labels. */ values: Record name: string @@ -128,12 +173,17 @@ export type ValueSpecMultiselect = { warning: string | null type: 'multiselect' + /** Minimum number of selections required, or `null`. */ minLength: number | null + /** Maximum number of selections allowed, or `null`. */ maxLength: number | null + /** `false` if all enabled, a string disabling the whole field, or an array of disabled option keys. */ disabled: false | string | string[] + /** Array of option keys selected by default. */ default: string[] immutable: boolean } +/** Spec for a boolean toggle (on/off switch). */ export type ValueSpecToggle = { name: string description: string | null @@ -144,57 +194,81 @@ export type ValueSpecToggle = { disabled: false | string immutable: boolean } +/** + * Spec for a discriminated union field — displays a dropdown for variant selection, + * and each variant can have its own nested sub-form. + */ export type ValueSpecUnion = { name: string description: string | null warning: string | null type: 'union' + /** Map of variant keys to their display name and nested form spec. */ variants: Record< string, { + /** Display name for this variant in the dropdown. */ name: string + /** Nested form spec shown when this variant is selected. */ spec: InputSpec } > + /** `false` if all enabled, a string disabling the whole field, or an array of disabled variant keys. */ disabled: false | string | string[] default: string | null immutable: boolean } +/** Spec for a file upload input field. */ export type ValueSpecFile = { name: string description: string | null warning: string | null type: 'file' + /** Allowed file extensions (e.g. `[".pem", ".crt"]`). */ extensions: string[] required: boolean } +/** Spec for a collapsible grouping of nested fields (a "sub-form"). */ export type ValueSpecObject = { name: string description: string | null warning: string | null type: 'object' + /** The nested form spec containing this object's fields. */ spec: InputSpec } +/** Spec for a hidden field — not displayed to the user but included in the form data. */ export type ValueSpecHidden = { type: 'hidden' } +/** The two supported list item types. */ export type ListValueSpecType = 'text' | 'object' +/** Maps a {@link ListValueSpecType} to its concrete list item spec. */ // prettier-ignore -export type ListValueSpecOf = +export type ListValueSpecOf = T extends "text" ? ListValueSpecText : T extends "object" ? ListValueSpecObject : never +/** A list field spec — union of text-list and object-list variants. */ export type ValueSpecList = ValueSpecListOf +/** + * Spec for a list field — an interface to add, remove, and edit items in an ordered collection. + * The `spec` field determines whether list items are text strings or structured objects. + */ export type ValueSpecListOf = { name: string description: string | null warning: string | null type: 'list' + /** The item spec — determines whether this is a list of text values or objects. */ spec: ListValueSpecOf + /** Minimum number of items, or `null` for no minimum. */ minLength: number | null + /** Maximum number of items, or `null` for no maximum. */ maxLength: number | null disabled: false | string + /** Default list items to populate on creation. */ default: | string[] | DefaultString[] @@ -203,10 +277,14 @@ export type ValueSpecListOf = { | readonly DefaultString[] | readonly Record[] } +/** A regex validation pattern with a human-readable description of what it enforces. */ export type Pattern = { + /** The regex pattern string (without delimiters). */ regex: string + /** A user-facing explanation shown when validation fails (e.g. `"Must be a valid email"`). */ description: string } +/** Spec for text items within a list field. */ export type ListValueSpecText = { type: 'text' patterns: Pattern[] @@ -218,13 +296,24 @@ export type ListValueSpecText = { inputmode: 'text' | 'email' | 'tel' | 'url' placeholder: string | null } +/** Spec for object items within a list field. */ export type ListValueSpecObject = { type: 'object' + /** The form spec for each object item. */ spec: InputSpec + /** Defines how uniqueness is determined among list items. */ uniqueBy: UniqueBy + /** An expression used to generate the display string for each item in the list summary (e.g. a key path). */ displayAs: string | null } +/** + * Describes how list items determine uniqueness. + * - `null`: no uniqueness constraint + * - `string`: unique by a specific field key + * - `{ any: UniqueBy[] }`: unique if any of the sub-constraints match + * - `{ all: UniqueBy[] }`: unique if all sub-constraints match together + */ export type UniqueBy = | null | string @@ -234,12 +323,21 @@ export type UniqueBy = | { all: readonly UniqueBy[] | UniqueBy[] } +/** A default value that is either a literal string or a {@link RandomString} generation spec. */ export type DefaultString = string | RandomString +/** Spec for generating a random string — used for default passwords, API keys, etc. */ export type RandomString = { + /** The character set to draw from (e.g. `"a-zA-Z0-9"`). */ charset: string + /** The length of the generated string. */ len: number } -// sometimes the type checker needs just a little bit of help +/** + * Type guard that narrows a {@link ValueSpec} to a {@link ValueSpecListOf} of a specific item type. + * + * @param t - The value spec to check + * @param s - The list item type to narrow to (`"text"` or `"object"`) + */ export function isValueSpecListOf( t: ValueSpec, s: S, diff --git a/sdk/base/lib/actions/setupActions.ts b/sdk/base/lib/actions/setupActions.ts index d056be56e..87e1a9d48 100644 --- a/sdk/base/lib/actions/setupActions.ts +++ b/sdk/base/lib/actions/setupActions.ts @@ -3,7 +3,7 @@ import { ExtractInputSpecType } from './input/builder/inputSpec' import * as T from '../types' import { once } from '../util' import { InitScript } from '../inits' -import { Parser } from 'ts-matches' +import { z } from 'zod' type MaybeInputSpec = {} extends Type ? null : InputSpec export type Run> = (options: { @@ -13,12 +13,15 @@ export type Run> = (options: { }) => Promise<(T.ActionResult & { version: '1' }) | null | void | undefined> export type GetInput> = (options: { effects: T.Effects + prefill: T.DeepPartial | null }) => Promise> -export type MaybeFn = T | ((options: { effects: T.Effects }) => Promise) -function callMaybeFn( - maybeFn: MaybeFn, - options: { effects: T.Effects }, +export type MaybeFn = + | T + | ((options: Opts) => Promise) +function callMaybeFn( + maybeFn: MaybeFn, + options: Opts, ): Promise { if (maybeFn instanceof Function) { return maybeFn(options) @@ -51,12 +54,18 @@ export class Action> readonly _INPUT: Type = null as any as Type private prevInputSpec: Record< string, - { spec: T.inputSpecTypes.InputSpec; validator: Parser } + { spec: T.inputSpecTypes.InputSpec; validator: z.ZodType } > = {} private constructor( readonly id: Id, private readonly metadataFn: MaybeFn, - private readonly inputSpec: MaybeInputSpec, + private readonly inputSpec: MaybeFn< + MaybeInputSpec, + { + effects: T.Effects + prefill: unknown | null + } + >, private readonly getInputFn: GetInput, private readonly runFn: Run, ) {} @@ -66,7 +75,13 @@ export class Action> >( id: Id, metadata: MaybeFn>, - inputSpec: InputSpecType, + inputSpec: MaybeFn< + InputSpecType, + { + effects: T.Effects + prefill: unknown | null + } + >, getInput: GetInput>, run: Run>, ): Action> { @@ -104,12 +119,18 @@ export class Action> await options.effects.action.export({ id: this.id, metadata }) return metadata } - async getInput(options: { effects: T.Effects }): Promise { + async getInput(options: { + effects: T.Effects + prefill: T.DeepPartial | null + }): Promise { let spec = {} if (this.inputSpec) { - const built = await this.inputSpec.build(options) - this.prevInputSpec[options.effects.eventId!] = built - spec = built.spec + const inputSpec = await callMaybeFn(this.inputSpec, options) + const built = await inputSpec?.build(options) + if (built) { + this.prevInputSpec[options.effects.eventId!] = built + spec = built.spec + } } return { eventId: options.effects.eventId!, @@ -133,7 +154,7 @@ export class Action> `getActionInput has not been called for EventID ${options.effects.eventId}`, ) } - options.input = prevInputSpec.validator.unsafeCast(options.input) + options.input = prevInputSpec.validator.parse(options.input) spec = prevInputSpec.spec } return ( diff --git a/sdk/base/lib/exver/exver.ts b/sdk/base/lib/exver/exver.ts index 352178cca..94202fde1 100644 --- a/sdk/base/lib/exver/exver.ts +++ b/sdk/base/lib/exver/exver.ts @@ -1,3195 +1,2902 @@ /* eslint-disable */ -const peggyParser: { parse: any; SyntaxError: any; DefaultTracer?: any } = // Generated by Peggy 3.0.2. - // - // https://peggyjs.org/ - // @ts-ignore - (function () { - // @ts-ignore - 'use strict' - // @ts-ignore - function peg$subclass(child, parent) { - // @ts-ignore - function C() { - this.constructor = child - } - // @ts-ignore - C.prototype = parent.prototype - // @ts-ignore - child.prototype = new C() - } - // @ts-ignore - function peg$SyntaxError(message, expected, found, location) { - // @ts-ignore - var self = Error.call(this, message) - // istanbul ignore next Check is a necessary evil to support older environments - // @ts-ignore - if (Object.setPrototypeOf) { - // @ts-ignore - Object.setPrototypeOf(self, peg$SyntaxError.prototype) - } - // @ts-ignore - self.expected = expected - // @ts-ignore - self.found = found - // @ts-ignore - self.location = location - // @ts-ignore - self.name = 'SyntaxError' - // @ts-ignore - return self - } - - // @ts-ignore - peg$subclass(peg$SyntaxError, Error) - - // @ts-ignore - function peg$padEnd(str, targetLength, padString) { - // @ts-ignore - padString = padString || ' ' - // @ts-ignore - if (str.length > targetLength) { - return str - } - // @ts-ignore - targetLength -= str.length - // @ts-ignore - padString += padString.repeat(targetLength) - // @ts-ignore - return str + padString.slice(0, targetLength) - } - - // @ts-ignore - peg$SyntaxError.prototype.format = function (sources) { - // @ts-ignore - var str = 'Error: ' + this.message - // @ts-ignore - if (this.location) { - // @ts-ignore - var src = null - // @ts-ignore - var k - // @ts-ignore - for (k = 0; k < sources.length; k++) { - // @ts-ignore - if (sources[k].source === this.location.source) { - // @ts-ignore - src = sources[k].text.split(/\r\n|\n|\r/g) - // @ts-ignore - break - } - } - // @ts-ignore - var s = this.location.start - // @ts-ignore - var offset_s = - this.location.source && - typeof this.location.source.offset === 'function' - ? // @ts-ignore - this.location.source.offset(s) - : // @ts-ignore - s - // @ts-ignore - var loc = - this.location.source + ':' + offset_s.line + ':' + offset_s.column - // @ts-ignore - if (src) { - // @ts-ignore - var e = this.location.end - // @ts-ignore - var filler = peg$padEnd('', offset_s.line.toString().length, ' ') - // @ts-ignore - var line = src[s.line - 1] - // @ts-ignore - var last = s.line === e.line ? e.column : line.length + 1 - // @ts-ignore - var hatLen = last - s.column || 1 - // @ts-ignore - str += - '\n --> ' + - loc + - '\n' + - // @ts-ignore - filler + - ' |\n' + - // @ts-ignore - offset_s.line + - ' | ' + - line + - '\n' + - // @ts-ignore - filler + - ' | ' + - peg$padEnd('', s.column - 1, ' ') + - // @ts-ignore - peg$padEnd('', hatLen, '^') - // @ts-ignore - } else { - // @ts-ignore - str += '\n at ' + loc - } - } - // @ts-ignore - return str - } - - // @ts-ignore - peg$SyntaxError.buildMessage = function (expected, found) { - // @ts-ignore - var DESCRIBE_EXPECTATION_FNS = { - // @ts-ignore - literal: function (expectation) { - // @ts-ignore - return '"' + literalEscape(expectation.text) + '"' - }, - - // @ts-ignore - class: function (expectation) { - // @ts-ignore - var escapedParts = expectation.parts.map(function (part) { - // @ts-ignore - return Array.isArray(part) - ? // @ts-ignore - classEscape(part[0]) + '-' + classEscape(part[1]) - : // @ts-ignore - classEscape(part) - }) - - // @ts-ignore - return ( - '[' + - (expectation.inverted ? '^' : '') + - escapedParts.join('') + - ']' - ) - }, - - // @ts-ignore - any: function () { - // @ts-ignore - return 'any character' - }, - - // @ts-ignore - end: function () { - // @ts-ignore - return 'end of input' - }, - - // @ts-ignore - other: function (expectation) { - // @ts-ignore - return expectation.description - }, - } - - // @ts-ignore - function hex(ch) { - // @ts-ignore - return ch.charCodeAt(0).toString(16).toUpperCase() - } - - // @ts-ignore - function literalEscape(s) { - // @ts-ignore - return ( - s - // @ts-ignore - .replace(/\\/g, '\\\\') - // @ts-ignore - .replace(/"/g, '\\"') - // @ts-ignore - .replace(/\0/g, '\\0') - // @ts-ignore - .replace(/\t/g, '\\t') - // @ts-ignore - .replace(/\n/g, '\\n') - // @ts-ignore - .replace(/\r/g, '\\r') - // @ts-ignore - .replace(/[\x00-\x0F]/g, function (ch) { - return '\\x0' + hex(ch) - }) - // @ts-ignore - .replace(/[\x10-\x1F\x7F-\x9F]/g, function (ch) { - return '\\x' + hex(ch) - }) - ) - } - - // @ts-ignore - function classEscape(s) { - // @ts-ignore - return ( - s - // @ts-ignore - .replace(/\\/g, '\\\\') - // @ts-ignore - .replace(/\]/g, '\\]') - // @ts-ignore - .replace(/\^/g, '\\^') - // @ts-ignore - .replace(/-/g, '\\-') - // @ts-ignore - .replace(/\0/g, '\\0') - // @ts-ignore - .replace(/\t/g, '\\t') - // @ts-ignore - .replace(/\n/g, '\\n') - // @ts-ignore - .replace(/\r/g, '\\r') - // @ts-ignore - .replace(/[\x00-\x0F]/g, function (ch) { - return '\\x0' + hex(ch) - }) - // @ts-ignore - .replace(/[\x10-\x1F\x7F-\x9F]/g, function (ch) { - return '\\x' + hex(ch) - }) - ) - } - - // @ts-ignore - function describeExpectation(expectation) { - // @ts-ignore - return DESCRIBE_EXPECTATION_FNS[expectation.type](expectation) - } - - // @ts-ignore - function describeExpected(expected) { - // @ts-ignore - var descriptions = expected.map(describeExpectation) - // @ts-ignore - var i, j - - // @ts-ignore - descriptions.sort() - - // @ts-ignore - if (descriptions.length > 0) { - // @ts-ignore - for (i = 1, j = 1; i < descriptions.length; i++) { - // @ts-ignore - if (descriptions[i - 1] !== descriptions[i]) { - // @ts-ignore - descriptions[j] = descriptions[i] - // @ts-ignore - j++ - } - } - // @ts-ignore - descriptions.length = j - } - - // @ts-ignore - switch (descriptions.length) { - // @ts-ignore - case 1: - // @ts-ignore - return descriptions[0] - - // @ts-ignore - case 2: - // @ts-ignore - return descriptions[0] + ' or ' + descriptions[1] - - // @ts-ignore - default: - // @ts-ignore - return ( - descriptions.slice(0, -1).join(', ') + - // @ts-ignore - ', or ' + - // @ts-ignore - descriptions[descriptions.length - 1] - ) - } - } - - // @ts-ignore - function describeFound(found) { - // @ts-ignore - return found ? '"' + literalEscape(found) + '"' : 'end of input' - } - - // @ts-ignore - return ( - 'Expected ' + - describeExpected(expected) + - ' but ' + - describeFound(found) + - ' found.' - ) - } - - // @ts-ignore - function peg$parse(input, options) { - // @ts-ignore - options = options !== undefined ? options : {} - - // @ts-ignore - var peg$FAILED = {} - // @ts-ignore - var peg$source = options.grammarSource - - // @ts-ignore - var peg$startRuleFunctions = { - VersionRange: peg$parseVersionRange, - Or: peg$parseOr, - And: peg$parseAnd, - VersionRangeAtom: peg$parseVersionRangeAtom, - Parens: peg$parseParens, - Anchor: peg$parseAnchor, - VersionSpec: peg$parseVersionSpec, - FlavorAtom: peg$parseFlavorAtom, - Not: peg$parseNot, - Any: peg$parseAny, - None: peg$parseNone, - CmpOp: peg$parseCmpOp, - ExtendedVersion: peg$parseExtendedVersion, - EmverVersionRange: peg$parseEmverVersionRange, - EmverVersionRangeAtom: peg$parseEmverVersionRangeAtom, - EmverParens: peg$parseEmverParens, - EmverAnchor: peg$parseEmverAnchor, - EmverNot: peg$parseEmverNot, - Emver: peg$parseEmver, - Flavor: peg$parseFlavor, - FlavorString: peg$parseFlavorString, - String: peg$parseString, - Version: peg$parseVersion, - PreRelease: peg$parsePreRelease, - PreReleaseSegment: peg$parsePreReleaseSegment, - VersionNumber: peg$parseVersionNumber, - Digit: peg$parseDigit, - _: peg$parse_, - } - // @ts-ignore - var peg$startRuleFunction = peg$parseVersionRange - - // @ts-ignore - var peg$c0 = '||' - var peg$c1 = '&&' - var peg$c2 = '(' - var peg$c3 = ')' - var peg$c4 = ':' - var peg$c5 = '#' - var peg$c6 = '!' - var peg$c7 = '*' - var peg$c8 = '>=' - var peg$c9 = '<=' - var peg$c10 = '>' - var peg$c11 = '<' - var peg$c12 = '=' - var peg$c13 = '!=' - var peg$c14 = '^' - var peg$c15 = '~' - var peg$c16 = '.' - var peg$c17 = '-' - - var peg$r0 = /^[a-z]/ - var peg$r1 = /^[a-zA-Z]/ - var peg$r2 = /^[0-9]/ - var peg$r3 = /^[ \t\n\r]/ - - var peg$e0 = peg$literalExpectation('||', false) - var peg$e1 = peg$literalExpectation('&&', false) - var peg$e2 = peg$literalExpectation('(', false) - var peg$e3 = peg$literalExpectation(')', false) - var peg$e4 = peg$literalExpectation(':', false) - var peg$e5 = peg$literalExpectation('#', false) - var peg$e6 = peg$literalExpectation('!', false) - var peg$e7 = peg$literalExpectation('*', false) - var peg$e8 = peg$literalExpectation('>=', false) - var peg$e9 = peg$literalExpectation('<=', false) - var peg$e10 = peg$literalExpectation('>', false) - var peg$e11 = peg$literalExpectation('<', false) - var peg$e12 = peg$literalExpectation('=', false) - var peg$e13 = peg$literalExpectation('!=', false) - var peg$e14 = peg$literalExpectation('^', false) - var peg$e15 = peg$literalExpectation('~', false) - var peg$e16 = peg$literalExpectation('.', false) - var peg$e17 = peg$classExpectation([['a', 'z']], false, false) - var peg$e18 = peg$classExpectation( - [ - ['a', 'z'], - ['A', 'Z'], - ], - false, - false, - ) - var peg$e19 = peg$literalExpectation('-', false) - var peg$e20 = peg$classExpectation([['0', '9']], false, false) - var peg$e21 = peg$otherExpectation('whitespace') - var peg$e22 = peg$classExpectation([' ', '\t', '\n', '\r'], false, false) - // @ts-ignore - - var peg$f0 = function (expr) { - // @ts-ignore - return { type: 'Parens', expr } - } // @ts-ignore - - var peg$f1 = function (operator, version) { - // @ts-ignore - return { type: 'Anchor', operator, version } - } // @ts-ignore - - var peg$f2 = function (flavor, upstream, downstream) { - // @ts-ignore - return { - flavor: flavor || null, - upstream, - downstream: downstream - ? downstream[1] - : { number: [0], prerelease: [] }, - } - } // @ts-ignore - - var peg$f3 = function (flavor) { - // @ts-ignore - return { type: 'Flavor', flavor: flavor } - } // @ts-ignore - - var peg$f4 = function (value) { - // @ts-ignore - return { type: 'Not', value: value } - } // @ts-ignore - - var peg$f5 = function () { - // @ts-ignore - return { type: 'Any' } - } // @ts-ignore - - var peg$f6 = function () { - // @ts-ignore - return { type: 'None' } - } // @ts-ignore - - var peg$f7 = function () { - // @ts-ignore - return '>=' - } // @ts-ignore - - var peg$f8 = function () { - // @ts-ignore - return '<=' - } // @ts-ignore - - var peg$f9 = function () { - // @ts-ignore - return '>' - } // @ts-ignore - - var peg$f10 = function () { - // @ts-ignore - return '<' - } // @ts-ignore - - var peg$f11 = function () { - // @ts-ignore - return '=' - } // @ts-ignore - - var peg$f12 = function () { - // @ts-ignore - return '!=' - } // @ts-ignore - - var peg$f13 = function () { - // @ts-ignore - return '^' - } // @ts-ignore - - var peg$f14 = function () { - // @ts-ignore - return '~' - } // @ts-ignore - - var peg$f15 = function (flavor, upstream, downstream) { - // @ts-ignore - return { flavor: flavor || null, upstream, downstream } - } // @ts-ignore - - var peg$f16 = function (expr) { - // @ts-ignore - return { type: 'Parens', expr } - } // @ts-ignore - - var peg$f17 = function (operator, version) { - // @ts-ignore - return { type: 'Anchor', operator, version } - } // @ts-ignore - - var peg$f18 = function (value) { - // @ts-ignore - return { type: 'Not', value: value } - } // @ts-ignore - - var peg$f19 = function (major, minor, patch, revision) { - // @ts-ignore - return revision - } // @ts-ignore - - var peg$f20 = function (major, minor, patch, revision) { - // @ts-ignore - return { - // @ts-ignore - flavor: null, - // @ts-ignore - upstream: { - // @ts-ignore - number: [major, minor, patch], - // @ts-ignore - prerelease: [], - }, - // @ts-ignore - downstream: { - // @ts-ignore - number: [revision || 0], - // @ts-ignore - prerelease: [], - }, - } - } // @ts-ignore - - var peg$f21 = function (flavor) { - // @ts-ignore - return flavor - } // @ts-ignore - - var peg$f22 = function () { - // @ts-ignore - return text() - } // @ts-ignore - - var peg$f23 = function () { - // @ts-ignore - return text() - } // @ts-ignore - - var peg$f24 = function (number, prerelease) { - // @ts-ignore - return { - // @ts-ignore - number, - // @ts-ignore - prerelease: prerelease || [], - } - } // @ts-ignore - - var peg$f25 = function (first, rest) { - // @ts-ignore - return [first].concat(rest.map((r) => r[1])) - } // @ts-ignore - - var peg$f26 = function (segment) { - // @ts-ignore - return segment - } // @ts-ignore - - var peg$f27 = function (first, rest) { - // @ts-ignore - return [first].concat(rest.map((r) => r[1])) - } // @ts-ignore - - var peg$f28 = function () { - // @ts-ignore - return parseInt(text(), 10) - } - // @ts-ignore - var peg$currPos = 0 - // @ts-ignore - var peg$savedPos = 0 - // @ts-ignore - var peg$posDetailsCache = [{ line: 1, column: 1 }] - // @ts-ignore - var peg$maxFailPos = 0 - // @ts-ignore - var peg$maxFailExpected = [] - // @ts-ignore - var peg$silentFails = 0 - - // @ts-ignore - var peg$result - - // @ts-ignore - if ('startRule' in options) { - // @ts-ignore - if (!(options.startRule in peg$startRuleFunctions)) { - // @ts-ignore - throw new Error( - 'Can\'t start parsing from rule "' + options.startRule + '".', - ) - } - - // @ts-ignore - peg$startRuleFunction = peg$startRuleFunctions[options.startRule] - } - - // @ts-ignore - function text() { - // @ts-ignore - return input.substring(peg$savedPos, peg$currPos) - } - - // @ts-ignore - function offset() { - // @ts-ignore - return peg$savedPos - } - - // @ts-ignore - function range() { - // @ts-ignore - return { - // @ts-ignore - source: peg$source, - // @ts-ignore - start: peg$savedPos, - // @ts-ignore - end: peg$currPos, - } - } - - // @ts-ignore - function location() { - // @ts-ignore - return peg$computeLocation(peg$savedPos, peg$currPos) - } - - // @ts-ignore - function expected(description, location) { - // @ts-ignore - location = - location !== undefined - ? // @ts-ignore - location - : // @ts-ignore - peg$computeLocation(peg$savedPos, peg$currPos) - - // @ts-ignore - throw peg$buildStructuredError( - // @ts-ignore - [peg$otherExpectation(description)], - // @ts-ignore - input.substring(peg$savedPos, peg$currPos), - // @ts-ignore - location, - ) - } - - // @ts-ignore - function error(message, location) { - // @ts-ignore - location = - location !== undefined - ? // @ts-ignore - location - : // @ts-ignore - peg$computeLocation(peg$savedPos, peg$currPos) - - // @ts-ignore - throw peg$buildSimpleError(message, location) - } - - // @ts-ignore - function peg$literalExpectation(text, ignoreCase) { - // @ts-ignore - return { type: 'literal', text: text, ignoreCase: ignoreCase } - } - - // @ts-ignore - function peg$classExpectation(parts, inverted, ignoreCase) { - // @ts-ignore - return { - type: 'class', - parts: parts, - inverted: inverted, - ignoreCase: ignoreCase, - } - } - - // @ts-ignore - function peg$anyExpectation() { - // @ts-ignore - return { type: 'any' } - } - - // @ts-ignore - function peg$endExpectation() { - // @ts-ignore - return { type: 'end' } - } - - // @ts-ignore - function peg$otherExpectation(description) { - // @ts-ignore - return { type: 'other', description: description } - } - - // @ts-ignore - function peg$computePosDetails(pos) { - // @ts-ignore - var details = peg$posDetailsCache[pos] - // @ts-ignore - var p - - // @ts-ignore - if (details) { - // @ts-ignore - return details - // @ts-ignore - } else { - // @ts-ignore - p = pos - 1 - // @ts-ignore - while (!peg$posDetailsCache[p]) { - // @ts-ignore - p-- - } - - // @ts-ignore - details = peg$posDetailsCache[p] - // @ts-ignore - details = { - // @ts-ignore - line: details.line, - // @ts-ignore - column: details.column, - } - - // @ts-ignore - while (p < pos) { - // @ts-ignore - if (input.charCodeAt(p) === 10) { - // @ts-ignore - details.line++ - // @ts-ignore - details.column = 1 - // @ts-ignore - } else { - // @ts-ignore - details.column++ - } - - // @ts-ignore - p++ - } - - // @ts-ignore - peg$posDetailsCache[pos] = details - - // @ts-ignore - return details - } - } - - // @ts-ignore - function peg$computeLocation(startPos, endPos, offset) { - // @ts-ignore - var startPosDetails = peg$computePosDetails(startPos) - // @ts-ignore - var endPosDetails = peg$computePosDetails(endPos) - - // @ts-ignore - var res = { - // @ts-ignore - source: peg$source, - // @ts-ignore - start: { - // @ts-ignore - offset: startPos, - // @ts-ignore - line: startPosDetails.line, - // @ts-ignore - column: startPosDetails.column, - }, - // @ts-ignore - end: { - // @ts-ignore - offset: endPos, - // @ts-ignore - line: endPosDetails.line, - // @ts-ignore - column: endPosDetails.column, - }, - } - // @ts-ignore - if (offset && peg$source && typeof peg$source.offset === 'function') { - // @ts-ignore - res.start = peg$source.offset(res.start) - // @ts-ignore - res.end = peg$source.offset(res.end) - } - // @ts-ignore - return res - } - - // @ts-ignore - function peg$fail(expected) { - // @ts-ignore - if (peg$currPos < peg$maxFailPos) { - return - } - - // @ts-ignore - if (peg$currPos > peg$maxFailPos) { - // @ts-ignore - peg$maxFailPos = peg$currPos - // @ts-ignore - peg$maxFailExpected = [] - } - - // @ts-ignore - peg$maxFailExpected.push(expected) - } - - // @ts-ignore - function peg$buildSimpleError(message, location) { - // @ts-ignore - return new peg$SyntaxError(message, null, null, location) - } - - // @ts-ignore - function peg$buildStructuredError(expected, found, location) { - // @ts-ignore - return new peg$SyntaxError( - // @ts-ignore - peg$SyntaxError.buildMessage(expected, found), - // @ts-ignore - expected, - // @ts-ignore - found, - // @ts-ignore - location, - ) - } - - // @ts-ignore - function // @ts-ignore - peg$parseVersionRange() { - // @ts-ignore - var s0, s1, s2, s3, s4, s5, s6, s7 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - s1 = peg$parseVersionRangeAtom() - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - s2 = [] - // @ts-ignore - s3 = peg$currPos - // @ts-ignore - s4 = peg$parse_() - // @ts-ignore - s5 = peg$currPos - // @ts-ignore - s6 = peg$parseOr() - // @ts-ignore - if (s6 === peg$FAILED) { - // @ts-ignore - s6 = peg$parseAnd() - } - // @ts-ignore - if (s6 !== peg$FAILED) { - // @ts-ignore - s7 = peg$parse_() - // @ts-ignore - s6 = [s6, s7] - // @ts-ignore - s5 = s6 - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s5 - // @ts-ignore - s5 = peg$FAILED - } - // @ts-ignore - if (s5 === peg$FAILED) { - // @ts-ignore - s5 = null - } - // @ts-ignore - s6 = peg$parseVersionRangeAtom() - // @ts-ignore - if (s6 !== peg$FAILED) { - // @ts-ignore - s4 = [s4, s5, s6] - // @ts-ignore - s3 = s4 - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s3 - // @ts-ignore - s3 = peg$FAILED - } - // @ts-ignore - while (s3 !== peg$FAILED) { - // @ts-ignore - s2.push(s3) - // @ts-ignore - s3 = peg$currPos - // @ts-ignore - s4 = peg$parse_() - // @ts-ignore - s5 = peg$currPos - // @ts-ignore - s6 = peg$parseOr() - // @ts-ignore - if (s6 === peg$FAILED) { - // @ts-ignore - s6 = peg$parseAnd() - } - // @ts-ignore - if (s6 !== peg$FAILED) { - // @ts-ignore - s7 = peg$parse_() - // @ts-ignore - s6 = [s6, s7] - // @ts-ignore - s5 = s6 - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s5 - // @ts-ignore - s5 = peg$FAILED - } - // @ts-ignore - if (s5 === peg$FAILED) { - // @ts-ignore - s5 = null - } - // @ts-ignore - s6 = peg$parseVersionRangeAtom() - // @ts-ignore - if (s6 !== peg$FAILED) { - // @ts-ignore - s4 = [s4, s5, s6] - // @ts-ignore - s3 = s4 - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s3 - // @ts-ignore - s3 = peg$FAILED - } - } - // @ts-ignore - s1 = [s1, s2] - // @ts-ignore - s0 = s1 - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseOr() { - // @ts-ignore - var s0 - - // @ts-ignore - if (input.substr(peg$currPos, 2) === peg$c0) { - // @ts-ignore - s0 = peg$c0 - // @ts-ignore - peg$currPos += 2 - // @ts-ignore - } else { - // @ts-ignore - s0 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e0) - } - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseAnd() { - // @ts-ignore - var s0 - - // @ts-ignore - if (input.substr(peg$currPos, 2) === peg$c1) { - // @ts-ignore - s0 = peg$c1 - // @ts-ignore - peg$currPos += 2 - // @ts-ignore - } else { - // @ts-ignore - s0 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e1) - } - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseVersionRangeAtom() { - // @ts-ignore - var s0 - - // @ts-ignore - s0 = peg$parseParens() - // @ts-ignore - if (s0 === peg$FAILED) { - // @ts-ignore - s0 = peg$parseAnchor() - // @ts-ignore - if (s0 === peg$FAILED) { - // @ts-ignore - s0 = peg$parseNot() - // @ts-ignore - if (s0 === peg$FAILED) { - // @ts-ignore - s0 = peg$parseAny() - // @ts-ignore - if (s0 === peg$FAILED) { - // @ts-ignore - s0 = peg$parseNone() - // @ts-ignore - if (s0 === peg$FAILED) { - // @ts-ignore - s0 = peg$parseFlavorAtom() - } - } - } - } - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseParens() { - // @ts-ignore - var s0, s1, s2, s3, s4, s5 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 40) { - // @ts-ignore - s1 = peg$c2 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e2) - } - } - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - s2 = peg$parse_() - // @ts-ignore - s3 = peg$parseVersionRange() - // @ts-ignore - if (s3 !== peg$FAILED) { - // @ts-ignore - s4 = peg$parse_() - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 41) { - // @ts-ignore - s5 = peg$c3 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s5 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e3) - } - } - // @ts-ignore - if (s5 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s0 = peg$f0(s3) - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseAnchor() { - // @ts-ignore - var s0, s1, s2, s3 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - s1 = peg$parseCmpOp() - // @ts-ignore - if (s1 === peg$FAILED) { - // @ts-ignore - s1 = null - } - // @ts-ignore - s2 = peg$parse_() - // @ts-ignore - s3 = peg$parseVersionSpec() - // @ts-ignore - if (s3 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s0 = peg$f1(s1, s3) - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseVersionSpec() { - // @ts-ignore - var s0, s1, s2, s3, s4, s5 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - s1 = peg$parseFlavor() - // @ts-ignore - if (s1 === peg$FAILED) { - // @ts-ignore - s1 = null - } - // @ts-ignore - s2 = peg$parseVersion() - // @ts-ignore - if (s2 !== peg$FAILED) { - // @ts-ignore - s3 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 58) { - // @ts-ignore - s4 = peg$c4 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s4 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e4) - } - } - // @ts-ignore - if (s4 !== peg$FAILED) { - // @ts-ignore - s5 = peg$parseVersion() - // @ts-ignore - if (s5 !== peg$FAILED) { - // @ts-ignore - s4 = [s4, s5] - // @ts-ignore - s3 = s4 - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s3 - // @ts-ignore - s3 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s3 - // @ts-ignore - s3 = peg$FAILED - } - // @ts-ignore - if (s3 === peg$FAILED) { - // @ts-ignore - s3 = null - } - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s0 = peg$f2(s1, s2, s3) - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseFlavorAtom() { - // @ts-ignore - var s0, s1, s2 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 35) { - // @ts-ignore - s1 = peg$c5 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e5) - } - } - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - s2 = peg$parseFlavorString() - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s0 = peg$f3(s2) - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseNot() { - // @ts-ignore - var s0, s1, s2, s3 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 33) { - // @ts-ignore - s1 = peg$c6 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e6) - } - } - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - s2 = peg$parse_() - // @ts-ignore - s3 = peg$parseVersionRangeAtom() - // @ts-ignore - if (s3 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s0 = peg$f4(s3) - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseAny() { - // @ts-ignore - var s0, s1 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 42) { - // @ts-ignore - s1 = peg$c7 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e7) - } - } - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s1 = peg$f5() - } - // @ts-ignore - s0 = s1 - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseNone() { - // @ts-ignore - var s0, s1 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 33) { - // @ts-ignore - s1 = peg$c6 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e6) - } - } - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s1 = peg$f6() - } - // @ts-ignore - s0 = s1 - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseCmpOp() { - // @ts-ignore - var s0, s1 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - if (input.substr(peg$currPos, 2) === peg$c8) { - // @ts-ignore - s1 = peg$c8 - // @ts-ignore - peg$currPos += 2 - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e8) - } - } - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s1 = peg$f7() - } - // @ts-ignore - s0 = s1 - // @ts-ignore - if (s0 === peg$FAILED) { - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - if (input.substr(peg$currPos, 2) === peg$c9) { - // @ts-ignore - s1 = peg$c9 - // @ts-ignore - peg$currPos += 2 - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e9) - } - } - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s1 = peg$f8() - } - // @ts-ignore - s0 = s1 - // @ts-ignore - if (s0 === peg$FAILED) { - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 62) { - // @ts-ignore - s1 = peg$c10 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e10) - } - } - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s1 = peg$f9() - } - // @ts-ignore - s0 = s1 - // @ts-ignore - if (s0 === peg$FAILED) { - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 60) { - // @ts-ignore - s1 = peg$c11 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e11) - } - } - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s1 = peg$f10() - } - // @ts-ignore - s0 = s1 - // @ts-ignore - if (s0 === peg$FAILED) { - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 61) { - // @ts-ignore - s1 = peg$c12 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e12) - } - } - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s1 = peg$f11() - } - // @ts-ignore - s0 = s1 - // @ts-ignore - if (s0 === peg$FAILED) { - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - if (input.substr(peg$currPos, 2) === peg$c13) { - // @ts-ignore - s1 = peg$c13 - // @ts-ignore - peg$currPos += 2 - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e13) - } - } - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s1 = peg$f12() - } - // @ts-ignore - s0 = s1 - // @ts-ignore - if (s0 === peg$FAILED) { - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 94) { - // @ts-ignore - s1 = peg$c14 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e14) - } - } - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s1 = peg$f13() - } - // @ts-ignore - s0 = s1 - // @ts-ignore - if (s0 === peg$FAILED) { - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 126) { - // @ts-ignore - s1 = peg$c15 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e15) - } - } - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s1 = peg$f14() - } - // @ts-ignore - s0 = s1 - } - } - } - } - } - } - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseExtendedVersion() { - // @ts-ignore - var s0, s1, s2, s3, s4 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - s1 = peg$parseFlavor() - // @ts-ignore - if (s1 === peg$FAILED) { - // @ts-ignore - s1 = null - } - // @ts-ignore - s2 = peg$parseVersion() - // @ts-ignore - if (s2 !== peg$FAILED) { - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 58) { - // @ts-ignore - s3 = peg$c4 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s3 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e4) - } - } - // @ts-ignore - if (s3 !== peg$FAILED) { - // @ts-ignore - s4 = peg$parseVersion() - // @ts-ignore - if (s4 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s0 = peg$f15(s1, s2, s4) - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseEmverVersionRange() { - // @ts-ignore - var s0, s1, s2, s3, s4, s5, s6, s7 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - s1 = peg$parseEmverVersionRangeAtom() - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - s2 = [] - // @ts-ignore - s3 = peg$currPos - // @ts-ignore - s4 = peg$parse_() - // @ts-ignore - s5 = peg$currPos - // @ts-ignore - s6 = peg$parseOr() - // @ts-ignore - if (s6 === peg$FAILED) { - // @ts-ignore - s6 = peg$parseAnd() - } - // @ts-ignore - if (s6 !== peg$FAILED) { - // @ts-ignore - s7 = peg$parse_() - // @ts-ignore - s6 = [s6, s7] - // @ts-ignore - s5 = s6 - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s5 - // @ts-ignore - s5 = peg$FAILED - } - // @ts-ignore - if (s5 === peg$FAILED) { - // @ts-ignore - s5 = null - } - // @ts-ignore - s6 = peg$parseEmverVersionRangeAtom() - // @ts-ignore - if (s6 !== peg$FAILED) { - // @ts-ignore - s4 = [s4, s5, s6] - // @ts-ignore - s3 = s4 - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s3 - // @ts-ignore - s3 = peg$FAILED - } - // @ts-ignore - while (s3 !== peg$FAILED) { - // @ts-ignore - s2.push(s3) - // @ts-ignore - s3 = peg$currPos - // @ts-ignore - s4 = peg$parse_() - // @ts-ignore - s5 = peg$currPos - // @ts-ignore - s6 = peg$parseOr() - // @ts-ignore - if (s6 === peg$FAILED) { - // @ts-ignore - s6 = peg$parseAnd() - } - // @ts-ignore - if (s6 !== peg$FAILED) { - // @ts-ignore - s7 = peg$parse_() - // @ts-ignore - s6 = [s6, s7] - // @ts-ignore - s5 = s6 - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s5 - // @ts-ignore - s5 = peg$FAILED - } - // @ts-ignore - if (s5 === peg$FAILED) { - // @ts-ignore - s5 = null - } - // @ts-ignore - s6 = peg$parseEmverVersionRangeAtom() - // @ts-ignore - if (s6 !== peg$FAILED) { - // @ts-ignore - s4 = [s4, s5, s6] - // @ts-ignore - s3 = s4 - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s3 - // @ts-ignore - s3 = peg$FAILED - } - } - // @ts-ignore - s1 = [s1, s2] - // @ts-ignore - s0 = s1 - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseEmverVersionRangeAtom() { - // @ts-ignore - var s0 - - // @ts-ignore - s0 = peg$parseEmverParens() - // @ts-ignore - if (s0 === peg$FAILED) { - // @ts-ignore - s0 = peg$parseEmverAnchor() - // @ts-ignore - if (s0 === peg$FAILED) { - // @ts-ignore - s0 = peg$parseEmverNot() - // @ts-ignore - if (s0 === peg$FAILED) { - // @ts-ignore - s0 = peg$parseAny() - // @ts-ignore - if (s0 === peg$FAILED) { - // @ts-ignore - s0 = peg$parseNone() - } - } - } - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseEmverParens() { - // @ts-ignore - var s0, s1, s2, s3, s4, s5 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 40) { - // @ts-ignore - s1 = peg$c2 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e2) - } - } - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - s2 = peg$parse_() - // @ts-ignore - s3 = peg$parseEmverVersionRange() - // @ts-ignore - if (s3 !== peg$FAILED) { - // @ts-ignore - s4 = peg$parse_() - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 41) { - // @ts-ignore - s5 = peg$c3 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s5 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e3) - } - } - // @ts-ignore - if (s5 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s0 = peg$f16(s3) - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseEmverAnchor() { - // @ts-ignore - var s0, s1, s2, s3 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - s1 = peg$parseCmpOp() - // @ts-ignore - if (s1 === peg$FAILED) { - // @ts-ignore - s1 = null - } - // @ts-ignore - s2 = peg$parse_() - // @ts-ignore - s3 = peg$parseEmver() - // @ts-ignore - if (s3 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s0 = peg$f17(s1, s3) - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseEmverNot() { - // @ts-ignore - var s0, s1, s2, s3 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 33) { - // @ts-ignore - s1 = peg$c6 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e6) - } - } - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - s2 = peg$parse_() - // @ts-ignore - s3 = peg$parseEmverVersionRangeAtom() - // @ts-ignore - if (s3 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s0 = peg$f18(s3) - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseEmver() { - // @ts-ignore - var s0, s1, s2, s3, s4, s5, s6, s7, s8 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - s1 = peg$parseDigit() - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 46) { - // @ts-ignore - s2 = peg$c16 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s2 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e16) - } - } - // @ts-ignore - if (s2 !== peg$FAILED) { - // @ts-ignore - s3 = peg$parseDigit() - // @ts-ignore - if (s3 !== peg$FAILED) { - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 46) { - // @ts-ignore - s4 = peg$c16 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s4 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e16) - } - } - // @ts-ignore - if (s4 !== peg$FAILED) { - // @ts-ignore - s5 = peg$parseDigit() - // @ts-ignore - if (s5 !== peg$FAILED) { - // @ts-ignore - s6 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 46) { - // @ts-ignore - s7 = peg$c16 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s7 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e16) - } - } - // @ts-ignore - if (s7 !== peg$FAILED) { - // @ts-ignore - s8 = peg$parseDigit() - // @ts-ignore - if (s8 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s6 - // @ts-ignore - s6 = peg$f19(s1, s3, s5, s8) - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s6 - // @ts-ignore - s6 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s6 - // @ts-ignore - s6 = peg$FAILED - } - // @ts-ignore - if (s6 === peg$FAILED) { - // @ts-ignore - s6 = null - } - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s0 = peg$f20(s1, s3, s5, s6) - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseFlavor() { - // @ts-ignore - var s0, s1, s2, s3 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 35) { - // @ts-ignore - s1 = peg$c5 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e5) - } - } - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - s2 = peg$parseFlavorString() - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 58) { - // @ts-ignore - s3 = peg$c4 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s3 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e4) - } - } - // @ts-ignore - if (s3 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s0 = peg$f21(s2) - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseFlavorString() { - // @ts-ignore - var s0, s1, s2 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - s1 = [] - // @ts-ignore - if (peg$r0.test(input.charAt(peg$currPos))) { - // @ts-ignore - s2 = input.charAt(peg$currPos) - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s2 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e17) - } - } - // @ts-ignore - while (s2 !== peg$FAILED) { - // @ts-ignore - s1.push(s2) - // @ts-ignore - if (peg$r0.test(input.charAt(peg$currPos))) { - // @ts-ignore - s2 = input.charAt(peg$currPos) - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s2 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e17) - } - } - } - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s1 = peg$f22() - // @ts-ignore - s0 = s1 - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseString() { - // @ts-ignore - var s0, s1, s2 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - s1 = [] - // @ts-ignore - if (peg$r1.test(input.charAt(peg$currPos))) { - // @ts-ignore - s2 = input.charAt(peg$currPos) - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s2 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e18) - } - } - // @ts-ignore - if (s2 !== peg$FAILED) { - // @ts-ignore - while (s2 !== peg$FAILED) { - // @ts-ignore - s1.push(s2) - // @ts-ignore - if (peg$r1.test(input.charAt(peg$currPos))) { - // @ts-ignore - s2 = input.charAt(peg$currPos) - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s2 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e18) - } - } - } - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - } - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s1 = peg$f23() - } - // @ts-ignore - s0 = s1 - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseVersion() { - // @ts-ignore - var s0, s1, s2 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - s1 = peg$parseVersionNumber() - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - s2 = peg$parsePreRelease() - // @ts-ignore - if (s2 === peg$FAILED) { - // @ts-ignore - s2 = null - } - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s0 = peg$f24(s1, s2) - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parsePreRelease() { - // @ts-ignore - var s0, s1, s2, s3, s4, s5, s6 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 45) { - // @ts-ignore - s1 = peg$c17 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e19) - } - } - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - s2 = peg$parsePreReleaseSegment() - // @ts-ignore - if (s2 !== peg$FAILED) { - // @ts-ignore - s3 = [] - // @ts-ignore - s4 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 46) { - // @ts-ignore - s5 = peg$c16 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s5 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e16) - } - } - // @ts-ignore - if (s5 !== peg$FAILED) { - // @ts-ignore - s6 = peg$parsePreReleaseSegment() - // @ts-ignore - if (s6 !== peg$FAILED) { - // @ts-ignore - s5 = [s5, s6] - // @ts-ignore - s4 = s5 - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s4 - // @ts-ignore - s4 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s4 - // @ts-ignore - s4 = peg$FAILED - } - // @ts-ignore - while (s4 !== peg$FAILED) { - // @ts-ignore - s3.push(s4) - // @ts-ignore - s4 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 46) { - // @ts-ignore - s5 = peg$c16 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s5 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e16) - } - } - // @ts-ignore - if (s5 !== peg$FAILED) { - // @ts-ignore - s6 = peg$parsePreReleaseSegment() - // @ts-ignore - if (s6 !== peg$FAILED) { - // @ts-ignore - s5 = [s5, s6] - // @ts-ignore - s4 = s5 - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s4 - // @ts-ignore - s4 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s4 - // @ts-ignore - s4 = peg$FAILED - } - } - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s0 = peg$f25(s2, s3) - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parsePreReleaseSegment() { - // @ts-ignore - var s0, s1, s2 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 46) { - // @ts-ignore - s1 = peg$c16 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e16) - } - } - // @ts-ignore - if (s1 === peg$FAILED) { - // @ts-ignore - s1 = null - } - // @ts-ignore - s2 = peg$parseDigit() - // @ts-ignore - if (s2 === peg$FAILED) { - // @ts-ignore - s2 = peg$parseString() - } - // @ts-ignore - if (s2 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s0 = peg$f26(s2) - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseVersionNumber() { - // @ts-ignore - var s0, s1, s2, s3, s4, s5 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - s1 = peg$parseDigit() - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - s2 = [] - // @ts-ignore - s3 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 46) { - // @ts-ignore - s4 = peg$c16 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s4 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e16) - } - } - // @ts-ignore - if (s4 !== peg$FAILED) { - // @ts-ignore - s5 = peg$parseDigit() - // @ts-ignore - if (s5 !== peg$FAILED) { - // @ts-ignore - s4 = [s4, s5] - // @ts-ignore - s3 = s4 - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s3 - // @ts-ignore - s3 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s3 - // @ts-ignore - s3 = peg$FAILED - } - // @ts-ignore - while (s3 !== peg$FAILED) { - // @ts-ignore - s2.push(s3) - // @ts-ignore - s3 = peg$currPos - // @ts-ignore - if (input.charCodeAt(peg$currPos) === 46) { - // @ts-ignore - s4 = peg$c16 - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s4 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e16) - } - } - // @ts-ignore - if (s4 !== peg$FAILED) { - // @ts-ignore - s5 = peg$parseDigit() - // @ts-ignore - if (s5 !== peg$FAILED) { - // @ts-ignore - s4 = [s4, s5] - // @ts-ignore - s3 = s4 - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s3 - // @ts-ignore - s3 = peg$FAILED - } - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s3 - // @ts-ignore - s3 = peg$FAILED - } - } - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s0 = peg$f27(s1, s2) - // @ts-ignore - } else { - // @ts-ignore - peg$currPos = s0 - // @ts-ignore - s0 = peg$FAILED - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parseDigit() { - // @ts-ignore - var s0, s1, s2 - - // @ts-ignore - s0 = peg$currPos - // @ts-ignore - s1 = [] - // @ts-ignore - if (peg$r2.test(input.charAt(peg$currPos))) { - // @ts-ignore - s2 = input.charAt(peg$currPos) - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s2 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e20) - } - } - // @ts-ignore - if (s2 !== peg$FAILED) { - // @ts-ignore - while (s2 !== peg$FAILED) { - // @ts-ignore - s1.push(s2) - // @ts-ignore - if (peg$r2.test(input.charAt(peg$currPos))) { - // @ts-ignore - s2 = input.charAt(peg$currPos) - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s2 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e20) - } - } - } - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - } - // @ts-ignore - if (s1 !== peg$FAILED) { - // @ts-ignore - peg$savedPos = s0 - // @ts-ignore - s1 = peg$f28() - } - // @ts-ignore - s0 = s1 - - // @ts-ignore - return s0 - } - - // @ts-ignore - function // @ts-ignore - peg$parse_() { - // @ts-ignore - var s0, s1 - - // @ts-ignore - peg$silentFails++ - // @ts-ignore - s0 = [] - // @ts-ignore - if (peg$r3.test(input.charAt(peg$currPos))) { - // @ts-ignore - s1 = input.charAt(peg$currPos) - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e22) - } - } - // @ts-ignore - while (s1 !== peg$FAILED) { - // @ts-ignore - s0.push(s1) - // @ts-ignore - if (peg$r3.test(input.charAt(peg$currPos))) { - // @ts-ignore - s1 = input.charAt(peg$currPos) - // @ts-ignore - peg$currPos++ - // @ts-ignore - } else { - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e22) - } - } - } - // @ts-ignore - peg$silentFails-- - // @ts-ignore - s1 = peg$FAILED - // @ts-ignore - if (peg$silentFails === 0) { - peg$fail(peg$e21) - } - - // @ts-ignore - return s0 - } - - // @ts-ignore - peg$result = peg$startRuleFunction() - - // @ts-ignore - if (peg$result !== peg$FAILED && peg$currPos === input.length) { - // @ts-ignore - return peg$result - // @ts-ignore - } else { - // @ts-ignore - if (peg$result !== peg$FAILED && peg$currPos < input.length) { - // @ts-ignore - peg$fail(peg$endExpectation()) - } - - // @ts-ignore - throw peg$buildStructuredError( - // @ts-ignore - peg$maxFailExpected, - // @ts-ignore - peg$maxFailPos < input.length ? input.charAt(peg$maxFailPos) : null, - // @ts-ignore - peg$maxFailPos < input.length - ? // @ts-ignore - peg$computeLocation(peg$maxFailPos, peg$maxFailPos + 1) - : // @ts-ignore - peg$computeLocation(peg$maxFailPos, peg$maxFailPos), - ) +const peggyParser: {parse: any, SyntaxError: any, DefaultTracer?: any} = // Generated by Peggy 3.0.2. +// +// https://peggyjs.org/ +// @ts-ignore +(function() { +// @ts-ignore + "use strict"; + +// @ts-ignore +function peg$subclass(child, parent) { +// @ts-ignore + function C() { this.constructor = child; } +// @ts-ignore + C.prototype = parent.prototype; +// @ts-ignore + child.prototype = new C(); +} + +// @ts-ignore +function peg$SyntaxError(message, expected, found, location) { +// @ts-ignore + var self = Error.call(this, message); + // istanbul ignore next Check is a necessary evil to support older environments +// @ts-ignore + if (Object.setPrototypeOf) { +// @ts-ignore + Object.setPrototypeOf(self, peg$SyntaxError.prototype); + } +// @ts-ignore + self.expected = expected; +// @ts-ignore + self.found = found; +// @ts-ignore + self.location = location; +// @ts-ignore + self.name = "SyntaxError"; +// @ts-ignore + return self; +} + +// @ts-ignore +peg$subclass(peg$SyntaxError, Error); + +// @ts-ignore +function peg$padEnd(str, targetLength, padString) { +// @ts-ignore + padString = padString || " "; +// @ts-ignore + if (str.length > targetLength) { return str; } +// @ts-ignore + targetLength -= str.length; +// @ts-ignore + padString += padString.repeat(targetLength); +// @ts-ignore + return str + padString.slice(0, targetLength); +} + +// @ts-ignore +peg$SyntaxError.prototype.format = function(sources) { +// @ts-ignore + var str = "Error: " + this.message; +// @ts-ignore + if (this.location) { +// @ts-ignore + var src = null; +// @ts-ignore + var k; +// @ts-ignore + for (k = 0; k < sources.length; k++) { +// @ts-ignore + if (sources[k].source === this.location.source) { +// @ts-ignore + src = sources[k].text.split(/\r\n|\n|\r/g); +// @ts-ignore + break; } } +// @ts-ignore + var s = this.location.start; +// @ts-ignore + var offset_s = (this.location.source && (typeof this.location.source.offset === "function")) +// @ts-ignore + ? this.location.source.offset(s) +// @ts-ignore + : s; +// @ts-ignore + var loc = this.location.source + ":" + offset_s.line + ":" + offset_s.column; +// @ts-ignore + if (src) { +// @ts-ignore + var e = this.location.end; +// @ts-ignore + var filler = peg$padEnd("", offset_s.line.toString().length, ' '); +// @ts-ignore + var line = src[s.line - 1]; +// @ts-ignore + var last = s.line === e.line ? e.column : line.length + 1; +// @ts-ignore + var hatLen = (last - s.column) || 1; +// @ts-ignore + str += "\n --> " + loc + "\n" +// @ts-ignore + + filler + " |\n" +// @ts-ignore + + offset_s.line + " | " + line + "\n" +// @ts-ignore + + filler + " | " + peg$padEnd("", s.column - 1, ' ') +// @ts-ignore + + peg$padEnd("", hatLen, "^"); +// @ts-ignore + } else { +// @ts-ignore + str += "\n at " + loc; + } + } +// @ts-ignore + return str; +}; - // @ts-ignore +// @ts-ignore +peg$SyntaxError.buildMessage = function(expected, found) { +// @ts-ignore + var DESCRIBE_EXPECTATION_FNS = { +// @ts-ignore + literal: function(expectation) { +// @ts-ignore + return "\"" + literalEscape(expectation.text) + "\""; + }, + +// @ts-ignore + class: function(expectation) { +// @ts-ignore + var escapedParts = expectation.parts.map(function(part) { +// @ts-ignore + return Array.isArray(part) +// @ts-ignore + ? classEscape(part[0]) + "-" + classEscape(part[1]) +// @ts-ignore + : classEscape(part); + }); + +// @ts-ignore + return "[" + (expectation.inverted ? "^" : "") + escapedParts.join("") + "]"; + }, + +// @ts-ignore + any: function() { +// @ts-ignore + return "any character"; + }, + +// @ts-ignore + end: function() { +// @ts-ignore + return "end of input"; + }, + +// @ts-ignore + other: function(expectation) { +// @ts-ignore + return expectation.description; + } + }; + +// @ts-ignore + function hex(ch) { +// @ts-ignore + return ch.charCodeAt(0).toString(16).toUpperCase(); + } + +// @ts-ignore + function literalEscape(s) { +// @ts-ignore + return s +// @ts-ignore + .replace(/\\/g, "\\\\") +// @ts-ignore + .replace(/"/g, "\\\"") +// @ts-ignore + .replace(/\0/g, "\\0") +// @ts-ignore + .replace(/\t/g, "\\t") +// @ts-ignore + .replace(/\n/g, "\\n") +// @ts-ignore + .replace(/\r/g, "\\r") +// @ts-ignore + .replace(/[\x00-\x0F]/g, function(ch) { return "\\x0" + hex(ch); }) +// @ts-ignore + .replace(/[\x10-\x1F\x7F-\x9F]/g, function(ch) { return "\\x" + hex(ch); }); + } + +// @ts-ignore + function classEscape(s) { +// @ts-ignore + return s +// @ts-ignore + .replace(/\\/g, "\\\\") +// @ts-ignore + .replace(/\]/g, "\\]") +// @ts-ignore + .replace(/\^/g, "\\^") +// @ts-ignore + .replace(/-/g, "\\-") +// @ts-ignore + .replace(/\0/g, "\\0") +// @ts-ignore + .replace(/\t/g, "\\t") +// @ts-ignore + .replace(/\n/g, "\\n") +// @ts-ignore + .replace(/\r/g, "\\r") +// @ts-ignore + .replace(/[\x00-\x0F]/g, function(ch) { return "\\x0" + hex(ch); }) +// @ts-ignore + .replace(/[\x10-\x1F\x7F-\x9F]/g, function(ch) { return "\\x" + hex(ch); }); + } + +// @ts-ignore + function describeExpectation(expectation) { +// @ts-ignore + return DESCRIBE_EXPECTATION_FNS[expectation.type](expectation); + } + +// @ts-ignore + function describeExpected(expected) { +// @ts-ignore + var descriptions = expected.map(describeExpectation); +// @ts-ignore + var i, j; + +// @ts-ignore + descriptions.sort(); + +// @ts-ignore + if (descriptions.length > 0) { +// @ts-ignore + for (i = 1, j = 1; i < descriptions.length; i++) { +// @ts-ignore + if (descriptions[i - 1] !== descriptions[i]) { +// @ts-ignore + descriptions[j] = descriptions[i]; +// @ts-ignore + j++; + } + } +// @ts-ignore + descriptions.length = j; + } + +// @ts-ignore + switch (descriptions.length) { +// @ts-ignore + case 1: +// @ts-ignore + return descriptions[0]; + +// @ts-ignore + case 2: +// @ts-ignore + return descriptions[0] + " or " + descriptions[1]; + +// @ts-ignore + default: +// @ts-ignore + return descriptions.slice(0, -1).join(", ") +// @ts-ignore + + ", or " +// @ts-ignore + + descriptions[descriptions.length - 1]; + } + } + +// @ts-ignore + function describeFound(found) { +// @ts-ignore + return found ? "\"" + literalEscape(found) + "\"" : "end of input"; + } + +// @ts-ignore + return "Expected " + describeExpected(expected) + " but " + describeFound(found) + " found."; +}; + +// @ts-ignore +function peg$parse(input, options) { +// @ts-ignore + options = options !== undefined ? options : {}; + +// @ts-ignore + var peg$FAILED = {}; +// @ts-ignore + var peg$source = options.grammarSource; + +// @ts-ignore + var peg$startRuleFunctions = { VersionRange: peg$parseVersionRange, Or: peg$parseOr, And: peg$parseAnd, VersionRangeAtom: peg$parseVersionRangeAtom, Parens: peg$parseParens, Anchor: peg$parseAnchor, VersionSpec: peg$parseVersionSpec, FlavorAtom: peg$parseFlavorAtom, Not: peg$parseNot, Any: peg$parseAny, None: peg$parseNone, CmpOp: peg$parseCmpOp, ExtendedVersion: peg$parseExtendedVersion, EmverVersionRange: peg$parseEmverVersionRange, EmverVersionRangeAtom: peg$parseEmverVersionRangeAtom, EmverParens: peg$parseEmverParens, EmverAnchor: peg$parseEmverAnchor, EmverNot: peg$parseEmverNot, Emver: peg$parseEmver, Flavor: peg$parseFlavor, FlavorString: peg$parseFlavorString, String: peg$parseString, Version: peg$parseVersion, PreRelease: peg$parsePreRelease, PreReleaseSegment: peg$parsePreReleaseSegment, VersionNumber: peg$parseVersionNumber, Digit: peg$parseDigit, _: peg$parse_ }; +// @ts-ignore + var peg$startRuleFunction = peg$parseVersionRange; + +// @ts-ignore + var peg$c0 = "||"; + var peg$c1 = "&&"; + var peg$c2 = "("; + var peg$c3 = ")"; + var peg$c4 = ":"; + var peg$c5 = "#"; + var peg$c6 = "!"; + var peg$c7 = "*"; + var peg$c8 = ">="; + var peg$c9 = "<="; + var peg$c10 = ">"; + var peg$c11 = "<"; + var peg$c12 = "="; + var peg$c13 = "!="; + var peg$c14 = "^"; + var peg$c15 = "~"; + var peg$c16 = "."; + var peg$c17 = "-"; + + var peg$r0 = /^[a-z]/; + var peg$r1 = /^[a-zA-Z]/; + var peg$r2 = /^[0-9]/; + var peg$r3 = /^[ \t\n\r]/; + + var peg$e0 = peg$literalExpectation("||", false); + var peg$e1 = peg$literalExpectation("&&", false); + var peg$e2 = peg$literalExpectation("(", false); + var peg$e3 = peg$literalExpectation(")", false); + var peg$e4 = peg$literalExpectation(":", false); + var peg$e5 = peg$literalExpectation("#", false); + var peg$e6 = peg$literalExpectation("!", false); + var peg$e7 = peg$literalExpectation("*", false); + var peg$e8 = peg$literalExpectation(">=", false); + var peg$e9 = peg$literalExpectation("<=", false); + var peg$e10 = peg$literalExpectation(">", false); + var peg$e11 = peg$literalExpectation("<", false); + var peg$e12 = peg$literalExpectation("=", false); + var peg$e13 = peg$literalExpectation("!=", false); + var peg$e14 = peg$literalExpectation("^", false); + var peg$e15 = peg$literalExpectation("~", false); + var peg$e16 = peg$literalExpectation(".", false); + var peg$e17 = peg$classExpectation([["a", "z"]], false, false); + var peg$e18 = peg$classExpectation([["a", "z"], ["A", "Z"]], false, false); + var peg$e19 = peg$literalExpectation("-", false); + var peg$e20 = peg$classExpectation([["0", "9"]], false, false); + var peg$e21 = peg$otherExpectation("whitespace"); + var peg$e22 = peg$classExpectation([" ", "\t", "\n", "\r"], false, false); +// @ts-ignore + + var peg$f0 = function(expr) {// @ts-ignore + return { type: "Parens", expr } };// @ts-ignore + + var peg$f1 = function(operator, version) {// @ts-ignore + return { type: "Anchor", operator, version } };// @ts-ignore + + var peg$f2 = function(flavor, upstream, downstream) {// @ts-ignore + return { flavor: flavor || null, upstream, downstream: downstream ? downstream[1] : { number: [0], prerelease: [] } } };// @ts-ignore + + var peg$f3 = function(flavor) {// @ts-ignore + return { type: "Flavor", flavor: flavor } };// @ts-ignore + + var peg$f4 = function(value) {// @ts-ignore + return { type: "Not", value: value }};// @ts-ignore + + var peg$f5 = function() {// @ts-ignore + return { type: "Any" } };// @ts-ignore + + var peg$f6 = function() {// @ts-ignore + return { type: "None" } };// @ts-ignore + + var peg$f7 = function() {// @ts-ignore + return ">="; };// @ts-ignore + + var peg$f8 = function() {// @ts-ignore + return "<="; };// @ts-ignore + + var peg$f9 = function() {// @ts-ignore + return ">"; };// @ts-ignore + + var peg$f10 = function() {// @ts-ignore + return "<"; };// @ts-ignore + + var peg$f11 = function() {// @ts-ignore + return "="; };// @ts-ignore + + var peg$f12 = function() {// @ts-ignore + return "!="; };// @ts-ignore + + var peg$f13 = function() {// @ts-ignore + return "^"; };// @ts-ignore + + var peg$f14 = function() {// @ts-ignore + return "~"; };// @ts-ignore + + var peg$f15 = function(flavor, upstream, downstream) { +// @ts-ignore + return { flavor: flavor || null, upstream, downstream } + };// @ts-ignore + + var peg$f16 = function(expr) {// @ts-ignore + return { type: "Parens", expr } };// @ts-ignore + + var peg$f17 = function(operator, version) {// @ts-ignore + return { type: "Anchor", operator, version } };// @ts-ignore + + var peg$f18 = function(value) {// @ts-ignore + return { type: "Not", value: value }};// @ts-ignore + + var peg$f19 = function(major, minor, patch, revision) {// @ts-ignore + return revision };// @ts-ignore + + var peg$f20 = function(major, minor, patch, revision) { +// @ts-ignore return { - SyntaxError: peg$SyntaxError, - parse: peg$parse, +// @ts-ignore + flavor: null, +// @ts-ignore + upstream: { +// @ts-ignore + number: [major, minor, patch], +// @ts-ignore + prerelease: [], + }, +// @ts-ignore + downstream: { +// @ts-ignore + number: [revision || 0], +// @ts-ignore + prerelease: [], + }, } - })() + };// @ts-ignore + + var peg$f21 = function(flavor) {// @ts-ignore + return flavor };// @ts-ignore + + var peg$f22 = function() {// @ts-ignore + return text() };// @ts-ignore + + var peg$f23 = function() {// @ts-ignore + return text(); };// @ts-ignore + + var peg$f24 = function(number, prerelease) { +// @ts-ignore + return { +// @ts-ignore + number, +// @ts-ignore + prerelease: prerelease || [] + }; + };// @ts-ignore + + var peg$f25 = function(first, rest) { +// @ts-ignore + return [first].concat(rest.map(r => r[1])); + };// @ts-ignore + + var peg$f26 = function(segment) { +// @ts-ignore + return segment; + };// @ts-ignore + + var peg$f27 = function(first, rest) { +// @ts-ignore + return [first].concat(rest.map(r => r[1])); + };// @ts-ignore + + var peg$f28 = function() {// @ts-ignore + return parseInt(text(), 10); }; +// @ts-ignore + var peg$currPos = 0; +// @ts-ignore + var peg$savedPos = 0; +// @ts-ignore + var peg$posDetailsCache = [{ line: 1, column: 1 }]; +// @ts-ignore + var peg$maxFailPos = 0; +// @ts-ignore + var peg$maxFailExpected = []; +// @ts-ignore + var peg$silentFails = 0; + +// @ts-ignore + var peg$result; + +// @ts-ignore + if ("startRule" in options) { +// @ts-ignore + if (!(options.startRule in peg$startRuleFunctions)) { +// @ts-ignore + throw new Error("Can't start parsing from rule \"" + options.startRule + "\"."); + } + +// @ts-ignore + peg$startRuleFunction = peg$startRuleFunctions[options.startRule]; + } + +// @ts-ignore + function text() { +// @ts-ignore + return input.substring(peg$savedPos, peg$currPos); + } + +// @ts-ignore + function offset() { +// @ts-ignore + return peg$savedPos; + } + +// @ts-ignore + function range() { +// @ts-ignore + return { +// @ts-ignore + source: peg$source, +// @ts-ignore + start: peg$savedPos, +// @ts-ignore + end: peg$currPos + }; + } + +// @ts-ignore + function location() { +// @ts-ignore + return peg$computeLocation(peg$savedPos, peg$currPos); + } + +// @ts-ignore + function expected(description, location) { +// @ts-ignore + location = location !== undefined +// @ts-ignore + ? location +// @ts-ignore + : peg$computeLocation(peg$savedPos, peg$currPos); + +// @ts-ignore + throw peg$buildStructuredError( +// @ts-ignore + [peg$otherExpectation(description)], +// @ts-ignore + input.substring(peg$savedPos, peg$currPos), +// @ts-ignore + location + ); + } + +// @ts-ignore + function error(message, location) { +// @ts-ignore + location = location !== undefined +// @ts-ignore + ? location +// @ts-ignore + : peg$computeLocation(peg$savedPos, peg$currPos); + +// @ts-ignore + throw peg$buildSimpleError(message, location); + } + +// @ts-ignore + function peg$literalExpectation(text, ignoreCase) { +// @ts-ignore + return { type: "literal", text: text, ignoreCase: ignoreCase }; + } + +// @ts-ignore + function peg$classExpectation(parts, inverted, ignoreCase) { +// @ts-ignore + return { type: "class", parts: parts, inverted: inverted, ignoreCase: ignoreCase }; + } + +// @ts-ignore + function peg$anyExpectation() { +// @ts-ignore + return { type: "any" }; + } + +// @ts-ignore + function peg$endExpectation() { +// @ts-ignore + return { type: "end" }; + } + +// @ts-ignore + function peg$otherExpectation(description) { +// @ts-ignore + return { type: "other", description: description }; + } + +// @ts-ignore + function peg$computePosDetails(pos) { +// @ts-ignore + var details = peg$posDetailsCache[pos]; +// @ts-ignore + var p; + +// @ts-ignore + if (details) { +// @ts-ignore + return details; +// @ts-ignore + } else { +// @ts-ignore + p = pos - 1; +// @ts-ignore + while (!peg$posDetailsCache[p]) { +// @ts-ignore + p--; + } + +// @ts-ignore + details = peg$posDetailsCache[p]; +// @ts-ignore + details = { +// @ts-ignore + line: details.line, +// @ts-ignore + column: details.column + }; + +// @ts-ignore + while (p < pos) { +// @ts-ignore + if (input.charCodeAt(p) === 10) { +// @ts-ignore + details.line++; +// @ts-ignore + details.column = 1; +// @ts-ignore + } else { +// @ts-ignore + details.column++; + } + +// @ts-ignore + p++; + } + +// @ts-ignore + peg$posDetailsCache[pos] = details; + +// @ts-ignore + return details; + } + } + +// @ts-ignore + function peg$computeLocation(startPos, endPos, offset) { +// @ts-ignore + var startPosDetails = peg$computePosDetails(startPos); +// @ts-ignore + var endPosDetails = peg$computePosDetails(endPos); + +// @ts-ignore + var res = { +// @ts-ignore + source: peg$source, +// @ts-ignore + start: { +// @ts-ignore + offset: startPos, +// @ts-ignore + line: startPosDetails.line, +// @ts-ignore + column: startPosDetails.column + }, +// @ts-ignore + end: { +// @ts-ignore + offset: endPos, +// @ts-ignore + line: endPosDetails.line, +// @ts-ignore + column: endPosDetails.column + } + }; +// @ts-ignore + if (offset && peg$source && (typeof peg$source.offset === "function")) { +// @ts-ignore + res.start = peg$source.offset(res.start); +// @ts-ignore + res.end = peg$source.offset(res.end); + } +// @ts-ignore + return res; + } + +// @ts-ignore + function peg$fail(expected) { +// @ts-ignore + if (peg$currPos < peg$maxFailPos) { return; } + +// @ts-ignore + if (peg$currPos > peg$maxFailPos) { +// @ts-ignore + peg$maxFailPos = peg$currPos; +// @ts-ignore + peg$maxFailExpected = []; + } + +// @ts-ignore + peg$maxFailExpected.push(expected); + } + +// @ts-ignore + function peg$buildSimpleError(message, location) { +// @ts-ignore + return new peg$SyntaxError(message, null, null, location); + } + +// @ts-ignore + function peg$buildStructuredError(expected, found, location) { +// @ts-ignore + return new peg$SyntaxError( +// @ts-ignore + peg$SyntaxError.buildMessage(expected, found), +// @ts-ignore + expected, +// @ts-ignore + found, +// @ts-ignore + location + ); + } + +// @ts-ignore + function // @ts-ignore +peg$parseVersionRange() { +// @ts-ignore + var s0, s1, s2, s3, s4, s5, s6, s7; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + s1 = peg$parseVersionRangeAtom(); +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + s2 = []; +// @ts-ignore + s3 = peg$currPos; +// @ts-ignore + s4 = peg$parse_(); +// @ts-ignore + s5 = peg$currPos; +// @ts-ignore + s6 = peg$parseOr(); +// @ts-ignore + if (s6 === peg$FAILED) { +// @ts-ignore + s6 = peg$parseAnd(); + } +// @ts-ignore + if (s6 !== peg$FAILED) { +// @ts-ignore + s7 = peg$parse_(); +// @ts-ignore + s6 = [s6, s7]; +// @ts-ignore + s5 = s6; +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s5; +// @ts-ignore + s5 = peg$FAILED; + } +// @ts-ignore + if (s5 === peg$FAILED) { +// @ts-ignore + s5 = null; + } +// @ts-ignore + s6 = peg$parseVersionRangeAtom(); +// @ts-ignore + if (s6 !== peg$FAILED) { +// @ts-ignore + s4 = [s4, s5, s6]; +// @ts-ignore + s3 = s4; +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s3; +// @ts-ignore + s3 = peg$FAILED; + } +// @ts-ignore + while (s3 !== peg$FAILED) { +// @ts-ignore + s2.push(s3); +// @ts-ignore + s3 = peg$currPos; +// @ts-ignore + s4 = peg$parse_(); +// @ts-ignore + s5 = peg$currPos; +// @ts-ignore + s6 = peg$parseOr(); +// @ts-ignore + if (s6 === peg$FAILED) { +// @ts-ignore + s6 = peg$parseAnd(); + } +// @ts-ignore + if (s6 !== peg$FAILED) { +// @ts-ignore + s7 = peg$parse_(); +// @ts-ignore + s6 = [s6, s7]; +// @ts-ignore + s5 = s6; +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s5; +// @ts-ignore + s5 = peg$FAILED; + } +// @ts-ignore + if (s5 === peg$FAILED) { +// @ts-ignore + s5 = null; + } +// @ts-ignore + s6 = peg$parseVersionRangeAtom(); +// @ts-ignore + if (s6 !== peg$FAILED) { +// @ts-ignore + s4 = [s4, s5, s6]; +// @ts-ignore + s3 = s4; +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s3; +// @ts-ignore + s3 = peg$FAILED; + } + } +// @ts-ignore + s1 = [s1, s2]; +// @ts-ignore + s0 = s1; +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseOr() { +// @ts-ignore + var s0; + +// @ts-ignore + if (input.substr(peg$currPos, 2) === peg$c0) { +// @ts-ignore + s0 = peg$c0; +// @ts-ignore + peg$currPos += 2; +// @ts-ignore + } else { +// @ts-ignore + s0 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e0); } + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseAnd() { +// @ts-ignore + var s0; + +// @ts-ignore + if (input.substr(peg$currPos, 2) === peg$c1) { +// @ts-ignore + s0 = peg$c1; +// @ts-ignore + peg$currPos += 2; +// @ts-ignore + } else { +// @ts-ignore + s0 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e1); } + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseVersionRangeAtom() { +// @ts-ignore + var s0; + +// @ts-ignore + s0 = peg$parseParens(); +// @ts-ignore + if (s0 === peg$FAILED) { +// @ts-ignore + s0 = peg$parseAnchor(); +// @ts-ignore + if (s0 === peg$FAILED) { +// @ts-ignore + s0 = peg$parseNot(); +// @ts-ignore + if (s0 === peg$FAILED) { +// @ts-ignore + s0 = peg$parseAny(); +// @ts-ignore + if (s0 === peg$FAILED) { +// @ts-ignore + s0 = peg$parseNone(); +// @ts-ignore + if (s0 === peg$FAILED) { +// @ts-ignore + s0 = peg$parseFlavorAtom(); + } + } + } + } + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseParens() { +// @ts-ignore + var s0, s1, s2, s3, s4, s5; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 40) { +// @ts-ignore + s1 = peg$c2; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e2); } + } +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + s2 = peg$parse_(); +// @ts-ignore + s3 = peg$parseVersionRange(); +// @ts-ignore + if (s3 !== peg$FAILED) { +// @ts-ignore + s4 = peg$parse_(); +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 41) { +// @ts-ignore + s5 = peg$c3; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s5 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e3); } + } +// @ts-ignore + if (s5 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s0 = peg$f0(s3); +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseAnchor() { +// @ts-ignore + var s0, s1, s2, s3; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + s1 = peg$parseCmpOp(); +// @ts-ignore + if (s1 === peg$FAILED) { +// @ts-ignore + s1 = null; + } +// @ts-ignore + s2 = peg$parse_(); +// @ts-ignore + s3 = peg$parseVersionSpec(); +// @ts-ignore + if (s3 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s0 = peg$f1(s1, s3); +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseVersionSpec() { +// @ts-ignore + var s0, s1, s2, s3, s4, s5; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + s1 = peg$parseFlavor(); +// @ts-ignore + if (s1 === peg$FAILED) { +// @ts-ignore + s1 = null; + } +// @ts-ignore + s2 = peg$parseVersion(); +// @ts-ignore + if (s2 !== peg$FAILED) { +// @ts-ignore + s3 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 58) { +// @ts-ignore + s4 = peg$c4; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s4 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e4); } + } +// @ts-ignore + if (s4 !== peg$FAILED) { +// @ts-ignore + s5 = peg$parseVersion(); +// @ts-ignore + if (s5 !== peg$FAILED) { +// @ts-ignore + s4 = [s4, s5]; +// @ts-ignore + s3 = s4; +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s3; +// @ts-ignore + s3 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s3; +// @ts-ignore + s3 = peg$FAILED; + } +// @ts-ignore + if (s3 === peg$FAILED) { +// @ts-ignore + s3 = null; + } +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s0 = peg$f2(s1, s2, s3); +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseFlavorAtom() { +// @ts-ignore + var s0, s1, s2; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 35) { +// @ts-ignore + s1 = peg$c5; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e5); } + } +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + s2 = peg$parseFlavorString(); +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s0 = peg$f3(s2); +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseNot() { +// @ts-ignore + var s0, s1, s2, s3; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 33) { +// @ts-ignore + s1 = peg$c6; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e6); } + } +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + s2 = peg$parse_(); +// @ts-ignore + s3 = peg$parseVersionRangeAtom(); +// @ts-ignore + if (s3 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s0 = peg$f4(s3); +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseAny() { +// @ts-ignore + var s0, s1; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 42) { +// @ts-ignore + s1 = peg$c7; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e7); } + } +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s1 = peg$f5(); + } +// @ts-ignore + s0 = s1; + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseNone() { +// @ts-ignore + var s0, s1; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 33) { +// @ts-ignore + s1 = peg$c6; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e6); } + } +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s1 = peg$f6(); + } +// @ts-ignore + s0 = s1; + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseCmpOp() { +// @ts-ignore + var s0, s1; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + if (input.substr(peg$currPos, 2) === peg$c8) { +// @ts-ignore + s1 = peg$c8; +// @ts-ignore + peg$currPos += 2; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e8); } + } +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s1 = peg$f7(); + } +// @ts-ignore + s0 = s1; +// @ts-ignore + if (s0 === peg$FAILED) { +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + if (input.substr(peg$currPos, 2) === peg$c9) { +// @ts-ignore + s1 = peg$c9; +// @ts-ignore + peg$currPos += 2; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e9); } + } +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s1 = peg$f8(); + } +// @ts-ignore + s0 = s1; +// @ts-ignore + if (s0 === peg$FAILED) { +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 62) { +// @ts-ignore + s1 = peg$c10; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e10); } + } +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s1 = peg$f9(); + } +// @ts-ignore + s0 = s1; +// @ts-ignore + if (s0 === peg$FAILED) { +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 60) { +// @ts-ignore + s1 = peg$c11; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e11); } + } +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s1 = peg$f10(); + } +// @ts-ignore + s0 = s1; +// @ts-ignore + if (s0 === peg$FAILED) { +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 61) { +// @ts-ignore + s1 = peg$c12; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e12); } + } +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s1 = peg$f11(); + } +// @ts-ignore + s0 = s1; +// @ts-ignore + if (s0 === peg$FAILED) { +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + if (input.substr(peg$currPos, 2) === peg$c13) { +// @ts-ignore + s1 = peg$c13; +// @ts-ignore + peg$currPos += 2; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e13); } + } +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s1 = peg$f12(); + } +// @ts-ignore + s0 = s1; +// @ts-ignore + if (s0 === peg$FAILED) { +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 94) { +// @ts-ignore + s1 = peg$c14; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e14); } + } +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s1 = peg$f13(); + } +// @ts-ignore + s0 = s1; +// @ts-ignore + if (s0 === peg$FAILED) { +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 126) { +// @ts-ignore + s1 = peg$c15; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e15); } + } +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s1 = peg$f14(); + } +// @ts-ignore + s0 = s1; + } + } + } + } + } + } + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseExtendedVersion() { +// @ts-ignore + var s0, s1, s2, s3, s4; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + s1 = peg$parseFlavor(); +// @ts-ignore + if (s1 === peg$FAILED) { +// @ts-ignore + s1 = null; + } +// @ts-ignore + s2 = peg$parseVersion(); +// @ts-ignore + if (s2 !== peg$FAILED) { +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 58) { +// @ts-ignore + s3 = peg$c4; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s3 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e4); } + } +// @ts-ignore + if (s3 !== peg$FAILED) { +// @ts-ignore + s4 = peg$parseVersion(); +// @ts-ignore + if (s4 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s0 = peg$f15(s1, s2, s4); +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseEmverVersionRange() { +// @ts-ignore + var s0, s1, s2, s3, s4, s5, s6, s7; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + s1 = peg$parseEmverVersionRangeAtom(); +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + s2 = []; +// @ts-ignore + s3 = peg$currPos; +// @ts-ignore + s4 = peg$parse_(); +// @ts-ignore + s5 = peg$currPos; +// @ts-ignore + s6 = peg$parseOr(); +// @ts-ignore + if (s6 === peg$FAILED) { +// @ts-ignore + s6 = peg$parseAnd(); + } +// @ts-ignore + if (s6 !== peg$FAILED) { +// @ts-ignore + s7 = peg$parse_(); +// @ts-ignore + s6 = [s6, s7]; +// @ts-ignore + s5 = s6; +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s5; +// @ts-ignore + s5 = peg$FAILED; + } +// @ts-ignore + if (s5 === peg$FAILED) { +// @ts-ignore + s5 = null; + } +// @ts-ignore + s6 = peg$parseEmverVersionRangeAtom(); +// @ts-ignore + if (s6 !== peg$FAILED) { +// @ts-ignore + s4 = [s4, s5, s6]; +// @ts-ignore + s3 = s4; +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s3; +// @ts-ignore + s3 = peg$FAILED; + } +// @ts-ignore + while (s3 !== peg$FAILED) { +// @ts-ignore + s2.push(s3); +// @ts-ignore + s3 = peg$currPos; +// @ts-ignore + s4 = peg$parse_(); +// @ts-ignore + s5 = peg$currPos; +// @ts-ignore + s6 = peg$parseOr(); +// @ts-ignore + if (s6 === peg$FAILED) { +// @ts-ignore + s6 = peg$parseAnd(); + } +// @ts-ignore + if (s6 !== peg$FAILED) { +// @ts-ignore + s7 = peg$parse_(); +// @ts-ignore + s6 = [s6, s7]; +// @ts-ignore + s5 = s6; +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s5; +// @ts-ignore + s5 = peg$FAILED; + } +// @ts-ignore + if (s5 === peg$FAILED) { +// @ts-ignore + s5 = null; + } +// @ts-ignore + s6 = peg$parseEmverVersionRangeAtom(); +// @ts-ignore + if (s6 !== peg$FAILED) { +// @ts-ignore + s4 = [s4, s5, s6]; +// @ts-ignore + s3 = s4; +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s3; +// @ts-ignore + s3 = peg$FAILED; + } + } +// @ts-ignore + s1 = [s1, s2]; +// @ts-ignore + s0 = s1; +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseEmverVersionRangeAtom() { +// @ts-ignore + var s0; + +// @ts-ignore + s0 = peg$parseEmverParens(); +// @ts-ignore + if (s0 === peg$FAILED) { +// @ts-ignore + s0 = peg$parseEmverAnchor(); +// @ts-ignore + if (s0 === peg$FAILED) { +// @ts-ignore + s0 = peg$parseEmverNot(); +// @ts-ignore + if (s0 === peg$FAILED) { +// @ts-ignore + s0 = peg$parseAny(); +// @ts-ignore + if (s0 === peg$FAILED) { +// @ts-ignore + s0 = peg$parseNone(); + } + } + } + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseEmverParens() { +// @ts-ignore + var s0, s1, s2, s3, s4, s5; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 40) { +// @ts-ignore + s1 = peg$c2; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e2); } + } +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + s2 = peg$parse_(); +// @ts-ignore + s3 = peg$parseEmverVersionRange(); +// @ts-ignore + if (s3 !== peg$FAILED) { +// @ts-ignore + s4 = peg$parse_(); +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 41) { +// @ts-ignore + s5 = peg$c3; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s5 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e3); } + } +// @ts-ignore + if (s5 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s0 = peg$f16(s3); +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseEmverAnchor() { +// @ts-ignore + var s0, s1, s2, s3; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + s1 = peg$parseCmpOp(); +// @ts-ignore + if (s1 === peg$FAILED) { +// @ts-ignore + s1 = null; + } +// @ts-ignore + s2 = peg$parse_(); +// @ts-ignore + s3 = peg$parseEmver(); +// @ts-ignore + if (s3 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s0 = peg$f17(s1, s3); +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseEmverNot() { +// @ts-ignore + var s0, s1, s2, s3; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 33) { +// @ts-ignore + s1 = peg$c6; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e6); } + } +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + s2 = peg$parse_(); +// @ts-ignore + s3 = peg$parseEmverVersionRangeAtom(); +// @ts-ignore + if (s3 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s0 = peg$f18(s3); +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseEmver() { +// @ts-ignore + var s0, s1, s2, s3, s4, s5, s6, s7, s8; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + s1 = peg$parseDigit(); +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 46) { +// @ts-ignore + s2 = peg$c16; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s2 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e16); } + } +// @ts-ignore + if (s2 !== peg$FAILED) { +// @ts-ignore + s3 = peg$parseDigit(); +// @ts-ignore + if (s3 !== peg$FAILED) { +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 46) { +// @ts-ignore + s4 = peg$c16; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s4 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e16); } + } +// @ts-ignore + if (s4 !== peg$FAILED) { +// @ts-ignore + s5 = peg$parseDigit(); +// @ts-ignore + if (s5 !== peg$FAILED) { +// @ts-ignore + s6 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 46) { +// @ts-ignore + s7 = peg$c16; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s7 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e16); } + } +// @ts-ignore + if (s7 !== peg$FAILED) { +// @ts-ignore + s8 = peg$parseDigit(); +// @ts-ignore + if (s8 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s6; +// @ts-ignore + s6 = peg$f19(s1, s3, s5, s8); +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s6; +// @ts-ignore + s6 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s6; +// @ts-ignore + s6 = peg$FAILED; + } +// @ts-ignore + if (s6 === peg$FAILED) { +// @ts-ignore + s6 = null; + } +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s0 = peg$f20(s1, s3, s5, s6); +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseFlavor() { +// @ts-ignore + var s0, s1, s2, s3; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 35) { +// @ts-ignore + s1 = peg$c5; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e5); } + } +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + s2 = peg$parseFlavorString(); +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 58) { +// @ts-ignore + s3 = peg$c4; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s3 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e4); } + } +// @ts-ignore + if (s3 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s0 = peg$f21(s2); +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseFlavorString() { +// @ts-ignore + var s0, s1, s2; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + s1 = []; +// @ts-ignore + if (peg$r0.test(input.charAt(peg$currPos))) { +// @ts-ignore + s2 = input.charAt(peg$currPos); +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s2 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e17); } + } +// @ts-ignore + while (s2 !== peg$FAILED) { +// @ts-ignore + s1.push(s2); +// @ts-ignore + if (peg$r0.test(input.charAt(peg$currPos))) { +// @ts-ignore + s2 = input.charAt(peg$currPos); +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s2 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e17); } + } + } +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s1 = peg$f22(); +// @ts-ignore + s0 = s1; + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseString() { +// @ts-ignore + var s0, s1, s2; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + s1 = []; +// @ts-ignore + if (peg$r1.test(input.charAt(peg$currPos))) { +// @ts-ignore + s2 = input.charAt(peg$currPos); +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s2 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e18); } + } +// @ts-ignore + if (s2 !== peg$FAILED) { +// @ts-ignore + while (s2 !== peg$FAILED) { +// @ts-ignore + s1.push(s2); +// @ts-ignore + if (peg$r1.test(input.charAt(peg$currPos))) { +// @ts-ignore + s2 = input.charAt(peg$currPos); +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s2 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e18); } + } + } +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; + } +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s1 = peg$f23(); + } +// @ts-ignore + s0 = s1; + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseVersion() { +// @ts-ignore + var s0, s1, s2; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + s1 = peg$parseVersionNumber(); +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + s2 = peg$parsePreRelease(); +// @ts-ignore + if (s2 === peg$FAILED) { +// @ts-ignore + s2 = null; + } +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s0 = peg$f24(s1, s2); +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parsePreRelease() { +// @ts-ignore + var s0, s1, s2, s3, s4, s5, s6; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 45) { +// @ts-ignore + s1 = peg$c17; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e19); } + } +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + s2 = peg$parsePreReleaseSegment(); +// @ts-ignore + if (s2 !== peg$FAILED) { +// @ts-ignore + s3 = []; +// @ts-ignore + s4 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 46) { +// @ts-ignore + s5 = peg$c16; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s5 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e16); } + } +// @ts-ignore + if (s5 !== peg$FAILED) { +// @ts-ignore + s6 = peg$parsePreReleaseSegment(); +// @ts-ignore + if (s6 !== peg$FAILED) { +// @ts-ignore + s5 = [s5, s6]; +// @ts-ignore + s4 = s5; +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s4; +// @ts-ignore + s4 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s4; +// @ts-ignore + s4 = peg$FAILED; + } +// @ts-ignore + while (s4 !== peg$FAILED) { +// @ts-ignore + s3.push(s4); +// @ts-ignore + s4 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 46) { +// @ts-ignore + s5 = peg$c16; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s5 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e16); } + } +// @ts-ignore + if (s5 !== peg$FAILED) { +// @ts-ignore + s6 = peg$parsePreReleaseSegment(); +// @ts-ignore + if (s6 !== peg$FAILED) { +// @ts-ignore + s5 = [s5, s6]; +// @ts-ignore + s4 = s5; +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s4; +// @ts-ignore + s4 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s4; +// @ts-ignore + s4 = peg$FAILED; + } + } +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s0 = peg$f25(s2, s3); +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parsePreReleaseSegment() { +// @ts-ignore + var s0, s1, s2; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 46) { +// @ts-ignore + s1 = peg$c16; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e16); } + } +// @ts-ignore + if (s1 === peg$FAILED) { +// @ts-ignore + s1 = null; + } +// @ts-ignore + s2 = peg$parseDigit(); +// @ts-ignore + if (s2 === peg$FAILED) { +// @ts-ignore + s2 = peg$parseString(); + } +// @ts-ignore + if (s2 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s0 = peg$f26(s2); +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseVersionNumber() { +// @ts-ignore + var s0, s1, s2, s3, s4, s5; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + s1 = peg$parseDigit(); +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + s2 = []; +// @ts-ignore + s3 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 46) { +// @ts-ignore + s4 = peg$c16; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s4 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e16); } + } +// @ts-ignore + if (s4 !== peg$FAILED) { +// @ts-ignore + s5 = peg$parseDigit(); +// @ts-ignore + if (s5 !== peg$FAILED) { +// @ts-ignore + s4 = [s4, s5]; +// @ts-ignore + s3 = s4; +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s3; +// @ts-ignore + s3 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s3; +// @ts-ignore + s3 = peg$FAILED; + } +// @ts-ignore + while (s3 !== peg$FAILED) { +// @ts-ignore + s2.push(s3); +// @ts-ignore + s3 = peg$currPos; +// @ts-ignore + if (input.charCodeAt(peg$currPos) === 46) { +// @ts-ignore + s4 = peg$c16; +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s4 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e16); } + } +// @ts-ignore + if (s4 !== peg$FAILED) { +// @ts-ignore + s5 = peg$parseDigit(); +// @ts-ignore + if (s5 !== peg$FAILED) { +// @ts-ignore + s4 = [s4, s5]; +// @ts-ignore + s3 = s4; +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s3; +// @ts-ignore + s3 = peg$FAILED; + } +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s3; +// @ts-ignore + s3 = peg$FAILED; + } + } +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s0 = peg$f27(s1, s2); +// @ts-ignore + } else { +// @ts-ignore + peg$currPos = s0; +// @ts-ignore + s0 = peg$FAILED; + } + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parseDigit() { +// @ts-ignore + var s0, s1, s2; + +// @ts-ignore + s0 = peg$currPos; +// @ts-ignore + s1 = []; +// @ts-ignore + if (peg$r2.test(input.charAt(peg$currPos))) { +// @ts-ignore + s2 = input.charAt(peg$currPos); +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s2 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e20); } + } +// @ts-ignore + if (s2 !== peg$FAILED) { +// @ts-ignore + while (s2 !== peg$FAILED) { +// @ts-ignore + s1.push(s2); +// @ts-ignore + if (peg$r2.test(input.charAt(peg$currPos))) { +// @ts-ignore + s2 = input.charAt(peg$currPos); +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s2 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e20); } + } + } +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; + } +// @ts-ignore + if (s1 !== peg$FAILED) { +// @ts-ignore + peg$savedPos = s0; +// @ts-ignore + s1 = peg$f28(); + } +// @ts-ignore + s0 = s1; + +// @ts-ignore + return s0; + } + +// @ts-ignore + function // @ts-ignore +peg$parse_() { +// @ts-ignore + var s0, s1; + +// @ts-ignore + peg$silentFails++; +// @ts-ignore + s0 = []; +// @ts-ignore + if (peg$r3.test(input.charAt(peg$currPos))) { +// @ts-ignore + s1 = input.charAt(peg$currPos); +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e22); } + } +// @ts-ignore + while (s1 !== peg$FAILED) { +// @ts-ignore + s0.push(s1); +// @ts-ignore + if (peg$r3.test(input.charAt(peg$currPos))) { +// @ts-ignore + s1 = input.charAt(peg$currPos); +// @ts-ignore + peg$currPos++; +// @ts-ignore + } else { +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e22); } + } + } +// @ts-ignore + peg$silentFails--; +// @ts-ignore + s1 = peg$FAILED; +// @ts-ignore + if (peg$silentFails === 0) { peg$fail(peg$e21); } + +// @ts-ignore + return s0; + } + +// @ts-ignore + peg$result = peg$startRuleFunction(); + +// @ts-ignore + if (peg$result !== peg$FAILED && peg$currPos === input.length) { +// @ts-ignore + return peg$result; +// @ts-ignore + } else { +// @ts-ignore + if (peg$result !== peg$FAILED && peg$currPos < input.length) { +// @ts-ignore + peg$fail(peg$endExpectation()); + } + +// @ts-ignore + throw peg$buildStructuredError( +// @ts-ignore + peg$maxFailExpected, +// @ts-ignore + peg$maxFailPos < input.length ? input.charAt(peg$maxFailPos) : null, +// @ts-ignore + peg$maxFailPos < input.length +// @ts-ignore + ? peg$computeLocation(peg$maxFailPos, peg$maxFailPos + 1) +// @ts-ignore + : peg$computeLocation(peg$maxFailPos, peg$maxFailPos) + ); + } +} + +// @ts-ignore + return { + SyntaxError: peg$SyntaxError, + parse: peg$parse + }; +})() export interface FilePosition { - offset: number - line: number - column: number + offset: number; + line: number; + column: number; } export interface FileRange { - start: FilePosition - end: FilePosition - source: string + start: FilePosition; + end: FilePosition; + source: string; } export interface LiteralExpectation { - type: 'literal' - text: string - ignoreCase: boolean + type: "literal"; + text: string; + ignoreCase: boolean; } export interface ClassParts extends Array {} export interface ClassExpectation { - type: 'class' - parts: ClassParts - inverted: boolean - ignoreCase: boolean + type: "class"; + parts: ClassParts; + inverted: boolean; + ignoreCase: boolean; } export interface AnyExpectation { - type: 'any' + type: "any"; } export interface EndExpectation { - type: 'end' + type: "end"; } export interface OtherExpectation { - type: 'other' - description: string + type: "other"; + description: string; } -export type Expectation = - | LiteralExpectation - | ClassExpectation - | AnyExpectation - | EndExpectation - | OtherExpectation +export type Expectation = LiteralExpectation | ClassExpectation | AnyExpectation | EndExpectation | OtherExpectation; declare class _PeggySyntaxError extends Error { - public static buildMessage( - expected: Expectation[], - found: string | null, - ): string - public message: string - public expected: Expectation[] - public found: string | null - public location: FileRange - public name: string - constructor( - message: string, - expected: Expectation[], - found: string | null, - location: FileRange, - ) - format( - sources: { - source?: any - text: string - }[], - ): string + public static buildMessage(expected: Expectation[], found: string | null): string; + public message: string; + public expected: Expectation[]; + public found: string | null; + public location: FileRange; + public name: string; + constructor(message: string, expected: Expectation[], found: string | null, location: FileRange); + format(sources: { + source?: any; + text: string; + }[]): string; } export interface TraceEvent { - type: string - rule: string - result?: any - location: FileRange -} + type: string; + rule: string; + result?: any; + location: FileRange; + } declare class _DefaultTracer { - private indentLevel: number - public trace(event: TraceEvent): void + private indentLevel: number; + public trace(event: TraceEvent): void; } -peggyParser.SyntaxError.prototype.name = 'PeggySyntaxError' +peggyParser.SyntaxError.prototype.name = "PeggySyntaxError"; export interface ParseOptions { - filename?: string - startRule?: - | 'VersionRange' - | 'Or' - | 'And' - | 'VersionRangeAtom' - | 'Parens' - | 'Anchor' - | 'VersionSpec' - | 'FlavorAtom' - | 'Not' - | 'Any' - | 'None' - | 'CmpOp' - | 'ExtendedVersion' - | 'EmverVersionRange' - | 'EmverVersionRangeAtom' - | 'EmverParens' - | 'EmverAnchor' - | 'EmverNot' - | 'Emver' - | 'Flavor' - | 'FlavorString' - | 'String' - | 'Version' - | 'PreRelease' - | 'PreReleaseSegment' - | 'VersionNumber' - | 'Digit' - | '_' - tracer?: any - [key: string]: any + filename?: string; + startRule?: "VersionRange" | "Or" | "And" | "VersionRangeAtom" | "Parens" | "Anchor" | "VersionSpec" | "FlavorAtom" | "Not" | "Any" | "None" | "CmpOp" | "ExtendedVersion" | "EmverVersionRange" | "EmverVersionRangeAtom" | "EmverParens" | "EmverAnchor" | "EmverNot" | "Emver" | "Flavor" | "FlavorString" | "String" | "Version" | "PreRelease" | "PreReleaseSegment" | "VersionNumber" | "Digit" | "_"; + tracer?: any; + [key: string]: any; } export type ParseFunction = ( - input: string, - options?: Options, -) => Options extends { startRule: infer StartRule } - ? StartRule extends 'VersionRange' - ? VersionRange - : StartRule extends 'Or' - ? Or - : StartRule extends 'And' - ? And - : StartRule extends 'VersionRangeAtom' - ? VersionRangeAtom - : StartRule extends 'Parens' - ? Parens - : StartRule extends 'Anchor' - ? Anchor - : StartRule extends 'VersionSpec' - ? VersionSpec - : StartRule extends 'FlavorAtom' - ? FlavorAtom - : StartRule extends 'Not' - ? Not - : StartRule extends 'Any' - ? Any - : StartRule extends 'None' - ? None - : StartRule extends 'CmpOp' - ? CmpOp - : StartRule extends 'ExtendedVersion' - ? ExtendedVersion - : StartRule extends 'EmverVersionRange' - ? EmverVersionRange - : StartRule extends 'EmverVersionRangeAtom' - ? EmverVersionRangeAtom - : StartRule extends 'EmverParens' - ? EmverParens - : StartRule extends 'EmverAnchor' - ? EmverAnchor - : StartRule extends 'EmverNot' - ? EmverNot - : StartRule extends 'Emver' - ? Emver - : StartRule extends 'Flavor' - ? Flavor - : StartRule extends 'FlavorString' - ? FlavorString - : StartRule extends 'String' - ? String_1 - : StartRule extends 'Version' - ? Version - : StartRule extends 'PreRelease' - ? PreRelease - : StartRule extends 'PreReleaseSegment' - ? PreReleaseSegment - : StartRule extends 'VersionNumber' - ? VersionNumber - : StartRule extends 'Digit' - ? Digit - : StartRule extends '_' - ? _ - : VersionRange - : VersionRange -export const parse: ParseFunction = peggyParser.parse + input: string, + options?: Options + ) => Options extends { startRule: infer StartRule } ? + StartRule extends "VersionRange" ? VersionRange : + StartRule extends "Or" ? Or : + StartRule extends "And" ? And : + StartRule extends "VersionRangeAtom" ? VersionRangeAtom : + StartRule extends "Parens" ? Parens : + StartRule extends "Anchor" ? Anchor : + StartRule extends "VersionSpec" ? VersionSpec : + StartRule extends "FlavorAtom" ? FlavorAtom : + StartRule extends "Not" ? Not : + StartRule extends "Any" ? Any : + StartRule extends "None" ? None : + StartRule extends "CmpOp" ? CmpOp : + StartRule extends "ExtendedVersion" ? ExtendedVersion : + StartRule extends "EmverVersionRange" ? EmverVersionRange : + StartRule extends "EmverVersionRangeAtom" ? EmverVersionRangeAtom : + StartRule extends "EmverParens" ? EmverParens : + StartRule extends "EmverAnchor" ? EmverAnchor : + StartRule extends "EmverNot" ? EmverNot : + StartRule extends "Emver" ? Emver : + StartRule extends "Flavor" ? Flavor : + StartRule extends "FlavorString" ? FlavorString : + StartRule extends "String" ? String_1 : + StartRule extends "Version" ? Version : + StartRule extends "PreRelease" ? PreRelease : + StartRule extends "PreReleaseSegment" ? PreReleaseSegment : + StartRule extends "VersionNumber" ? VersionNumber : + StartRule extends "Digit" ? Digit : + StartRule extends "_" ? _ : VersionRange + : VersionRange; +export const parse: ParseFunction = peggyParser.parse; -export const PeggySyntaxError = - peggyParser.SyntaxError as typeof _PeggySyntaxError +export const PeggySyntaxError = peggyParser.SyntaxError as typeof _PeggySyntaxError; -export type PeggySyntaxError = _PeggySyntaxError +export type PeggySyntaxError = _PeggySyntaxError; // These types were autogenerated by ts-pegjs export type VersionRange = [ VersionRangeAtom, - [_, [Or | And, _] | null, VersionRangeAtom][], -] -export type Or = '||' -export type And = '&&' -export type VersionRangeAtom = Parens | Anchor | Not | Any | None | FlavorAtom -export type Parens = { type: 'Parens'; expr: VersionRange } + [_, [Or | And, _] | null, VersionRangeAtom][] +]; +export type Or = "||"; +export type And = "&&"; +export type VersionRangeAtom = Parens | Anchor | Not | Any | None | FlavorAtom; +export type Parens = { type: "Parens"; expr: VersionRange }; export type Anchor = { - type: 'Anchor' - operator: CmpOp | null - version: VersionSpec -} + type: "Anchor"; + operator: CmpOp | null; + version: VersionSpec; +}; export type VersionSpec = { - flavor: NonNullable | null - upstream: Version - downstream: any -} -export type FlavorAtom = { type: 'Flavor'; flavor: FlavorString } -export type Not = { type: 'Not'; value: VersionRangeAtom } -export type Any = { type: 'Any' } -export type None = { type: 'None' } -export type CmpOp = '>=' | '<=' | '>' | '<' | '=' | '!=' | '^' | '~' + flavor: NonNullable | null; + upstream: Version; + downstream: any; +}; +export type FlavorAtom = { type: "Flavor"; flavor: FlavorString }; +export type Not = { type: "Not"; value: VersionRangeAtom }; +export type Any = { type: "Any" }; +export type None = { type: "None" }; +export type CmpOp = ">=" | "<=" | ">" | "<" | "=" | "!=" | "^" | "~"; export type ExtendedVersion = { - flavor: NonNullable | null - upstream: Version - downstream: Version -} + flavor: NonNullable | null; + upstream: Version; + downstream: Version; +}; export type EmverVersionRange = [ EmverVersionRangeAtom, - [_, [Or | And, _] | null, EmverVersionRangeAtom][], -] + [_, [Or | And, _] | null, EmverVersionRangeAtom][] +]; export type EmverVersionRangeAtom = | EmverParens | EmverAnchor | EmverNot | Any - | None -export type EmverParens = { type: 'Parens'; expr: EmverVersionRange } + | None; +export type EmverParens = { type: "Parens"; expr: EmverVersionRange }; export type EmverAnchor = { - type: 'Anchor' - operator: CmpOp | null - version: Emver -} -export type EmverNot = { type: 'Not'; value: EmverVersionRangeAtom } + type: "Anchor"; + operator: CmpOp | null; + version: Emver; +}; +export type EmverNot = { type: "Not"; value: EmverVersionRangeAtom }; export type Emver = { - flavor: null - upstream: { number: [Digit, Digit, Digit]; prerelease: [] } - downstream: { number: [0 | NonNullable]; prerelease: [] } -} -export type Flavor = FlavorString -export type FlavorString = string -export type String_1 = string + flavor: null; + upstream: { number: [Digit, Digit, Digit]; prerelease: [] }; + downstream: { number: [0 | NonNullable]; prerelease: [] }; +}; +export type Flavor = FlavorString; +export type FlavorString = string; +export type String_1 = string; export type Version = { - number: VersionNumber - prerelease: never[] | NonNullable -} -export type PreRelease = PreReleaseSegment[] -export type PreReleaseSegment = Digit | String_1 -export type VersionNumber = Digit[] -export type Digit = number -export type _ = string[] + number: VersionNumber; + prerelease: never[] | NonNullable; +}; +export type PreRelease = PreReleaseSegment[]; +export type PreReleaseSegment = Digit | String_1; +export type VersionNumber = Digit[]; +export type Digit = number; +export type _ = string[]; diff --git a/sdk/base/lib/exver/index.ts b/sdk/base/lib/exver/index.ts index 3eb818097..237de75a9 100644 --- a/sdk/base/lib/exver/index.ts +++ b/sdk/base/lib/exver/index.ts @@ -1,6 +1,17 @@ import { DeepMap } from 'deep-equality-data-structures' import * as P from './exver' +/** + * Compile-time utility type that validates a version string literal conforms to semver format. + * + * Resolves to `unknown` if valid, `never` if invalid. Used with {@link testTypeVersion}. + * + * @example + * ```ts + * type Valid = ValidateVersion<"1.2.3"> // unknown (valid) + * type Invalid = ValidateVersion<"-3"> // never (invalid) + * ``` + */ // prettier-ignore export type ValidateVersion = T extends `-${infer A}` ? never : @@ -9,12 +20,32 @@ T extends `${infer A}-${string}` ? ValidateVersion : T extends `${bigint}.${infer A}` ? ValidateVersion : never +/** + * Compile-time utility type that validates an extended version string literal. + * + * Extended versions have the format `upstream:downstream` or `#flavor:upstream:downstream`. + * + * @example + * ```ts + * type Valid = ValidateExVer<"1.2.3:0"> // valid + * type Flavored = ValidateExVer<"#bitcoin:1.0:0"> // valid + * type Bad = ValidateExVer<"1.2-3"> // never (invalid) + * ``` + */ // prettier-ignore export type ValidateExVer = T extends `#${string}:${infer A}:${infer B}` ? ValidateVersion & ValidateVersion : T extends `${infer A}:${infer B}` ? ValidateVersion & ValidateVersion : never +/** + * Validates a tuple of extended version string literals at compile time. + * + * @example + * ```ts + * type Valid = ValidateExVers<["1.0:0", "2.0:0"]> // valid + * ``` + */ // prettier-ignore export type ValidateExVers = T extends [] ? unknown[] : @@ -460,6 +491,28 @@ class VersionRangeTable { } } +/** + * Represents a parsed version range expression used to match against {@link Version} or {@link ExtendedVersion} values. + * + * Version ranges support standard comparison operators (`=`, `>`, `<`, `>=`, `<=`, `!=`), + * caret (`^`) and tilde (`~`) ranges, boolean logic (`&&`, `||`, `!`), and flavor matching (`#flavor`). + * + * @example + * ```ts + * const range = VersionRange.parse(">=1.0.0:0 && <2.0.0:0") + * const version = ExtendedVersion.parse("1.5.0:0") + * console.log(range.satisfiedBy(version)) // true + * + * // Combine ranges with boolean logic + * const combined = VersionRange.and( + * VersionRange.parse(">=1.0:0"), + * VersionRange.parse("<3.0:0"), + * ) + * + * // Match a specific flavor + * const flavored = VersionRange.parse("#bitcoin") + * ``` + */ export class VersionRange { constructor(public atom: Anchor | And | Or | Not | P.Any | P.None | Flavor) {} @@ -488,6 +541,7 @@ export class VersionRange { } } + /** Serializes this version range back to its canonical string representation. */ toString(): string { switch (this.atom.type) { case 'Anchor': @@ -563,38 +617,69 @@ export class VersionRange { return result } + /** + * Parses a version range string into a `VersionRange`. + * + * @param range - A version range expression, e.g. `">=1.0.0:0 && <2.0.0:0"`, `"^1.2:0"`, `"*"` + * @returns The parsed `VersionRange` + * @throws If the string is not a valid version range expression + */ static parse(range: string): VersionRange { return VersionRange.parseRange( P.parse(range, { startRule: 'VersionRange' }), ) } + /** + * Creates a version range from a comparison operator and an {@link ExtendedVersion}. + * + * @param operator - One of `"="`, `">"`, `"<"`, `">="`, `"<="`, `"!="`, `"^"`, `"~"` + * @param version - The version to compare against + */ static anchor(operator: P.CmpOp, version: ExtendedVersion) { return new VersionRange({ type: 'Anchor', operator, version }) } + /** + * Creates a version range that matches only versions with the specified flavor. + * + * @param flavor - The flavor string to match, or `null` for the default (unflavored) variant + */ static flavor(flavor: string | null) { return new VersionRange({ type: 'Flavor', flavor }) } + /** + * Parses a legacy "emver" format version range string. + * + * @param range - A version range in the legacy emver format + * @returns The parsed `VersionRange` + */ static parseEmver(range: string): VersionRange { return VersionRange.parseRange( P.parse(range, { startRule: 'EmverVersionRange' }), ) } + /** Returns the intersection of this range with another (logical AND). */ and(right: VersionRange) { return new VersionRange({ type: 'And', left: this, right }) } + /** Returns the union of this range with another (logical OR). */ or(right: VersionRange) { return new VersionRange({ type: 'Or', left: this, right }) } + /** Returns the negation of this range (logical NOT). */ not() { return new VersionRange({ type: 'Not', value: this }) } + /** + * Returns the logical AND (intersection) of multiple version ranges. + * Short-circuits on `none()` and skips `any()`. + */ static and(...xs: Array) { let y = VersionRange.any() for (let x of xs) { @@ -613,6 +698,10 @@ export class VersionRange { return y } + /** + * Returns the logical OR (union) of multiple version ranges. + * Short-circuits on `any()` and skips `none()`. + */ static or(...xs: Array) { let y = VersionRange.none() for (let x of xs) { @@ -631,14 +720,21 @@ export class VersionRange { return y } + /** Returns a version range that matches all versions (wildcard `*`). */ static any() { return new VersionRange({ type: 'Any' }) } + /** Returns a version range that matches no versions (`!`). */ static none() { return new VersionRange({ type: 'None' }) } + /** + * Returns `true` if the given version satisfies this range. + * + * @param version - A {@link Version} or {@link ExtendedVersion} to test + */ satisfiedBy(version: Version | ExtendedVersion) { return version.satisfies(this) } @@ -714,29 +810,60 @@ export class VersionRange { } } + /** Returns `true` if any version exists that could satisfy this range. */ satisfiable(): boolean { return VersionRangeTable.collapse(this.tables()) !== false } + /** Returns `true` if this range and `other` share at least one satisfying version. */ intersects(other: VersionRange): boolean { return VersionRange.and(this, other).satisfiable() } + /** + * Returns a canonical (simplified) form of this range using minterm expansion. + * Useful for normalizing complex boolean expressions into a minimal representation. + */ normalize(): VersionRange { return VersionRangeTable.minterms(this.tables()) } } +/** + * Represents a semantic version number with numeric segments and optional prerelease identifiers. + * + * Follows semver precedence rules: numeric segments are compared left-to-right, + * and a version with prerelease identifiers has lower precedence than the same version without. + * + * @example + * ```ts + * const v = Version.parse("1.2.3") + * console.log(v.toString()) // "1.2.3" + * console.log(v.compare(Version.parse("1.3.0"))) // "less" + * + * const pre = Version.parse("2.0.0-beta.1") + * console.log(pre.compare(Version.parse("2.0.0"))) // "less" (prerelease < release) + * ``` + */ export class Version { constructor( + /** The numeric version segments (e.g. `[1, 2, 3]` for `"1.2.3"`). */ public number: number[], + /** Optional prerelease identifiers (e.g. `["beta", 1]` for `"-beta.1"`). */ public prerelease: (string | number)[], ) {} + /** Serializes this version to its string form (e.g. `"1.2.3"` or `"1.0.0-beta.1"`). */ toString(): string { return `${this.number.join('.')}${this.prerelease.length > 0 ? `-${this.prerelease.join('.')}` : ''}` } + /** + * Compares this version against another using semver precedence rules. + * + * @param other - The version to compare against + * @returns `'greater'`, `'equal'`, or `'less'` + */ compare(other: Version): 'greater' | 'equal' | 'less' { const numLen = Math.max(this.number.length, other.number.length) for (let i = 0; i < numLen; i++) { @@ -783,6 +910,11 @@ export class Version { return 'equal' } + /** + * Compares two versions, returning a numeric value suitable for use with `Array.sort()`. + * + * @returns `-1` if less, `0` if equal, `1` if greater + */ compareForSort(other: Version): -1 | 0 | 1 { switch (this.compare(other)) { case 'greater': @@ -794,11 +926,21 @@ export class Version { } } + /** + * Parses a version string into a `Version` instance. + * + * @param version - A semver-compatible string, e.g. `"1.2.3"` or `"1.0.0-beta.1"` + * @throws If the string is not a valid version + */ static parse(version: string): Version { const parsed = P.parse(version, { startRule: 'Version' }) return new Version(parsed.number, parsed.prerelease) } + /** + * Returns `true` if this version satisfies the given {@link VersionRange}. + * Internally treats this as an unflavored {@link ExtendedVersion} with downstream `0`. + */ satisfies(versionRange: VersionRange): boolean { return new ExtendedVersion(null, this, new Version([0], [])).satisfies( versionRange, @@ -806,18 +948,50 @@ export class Version { } } -// #flavor:0.1.2-beta.1:0 +/** + * Represents an extended version with an optional flavor, an upstream version, and a downstream version. + * + * The format is `#flavor:upstream:downstream` (e.g. `#bitcoin:1.2.3:0`) or `upstream:downstream` + * for unflavored versions. Flavors allow multiple variants of a package to coexist. + * + * - **flavor**: An optional string identifier for the variant (e.g. `"bitcoin"`, `"litecoin"`) + * - **upstream**: The version of the upstream software being packaged + * - **downstream**: The version of the StartOS packaging itself + * + * Versions with different flavors are incomparable (comparison returns `null`). + * + * @example + * ```ts + * const v = ExtendedVersion.parse("#bitcoin:1.2.3:0") + * console.log(v.flavor) // "bitcoin" + * console.log(v.upstream) // Version { number: [1, 2, 3] } + * console.log(v.downstream) // Version { number: [0] } + * console.log(v.toString()) // "#bitcoin:1.2.3:0" + * + * const range = VersionRange.parse(">=1.0.0:0") + * console.log(v.satisfies(range)) // true + * ``` + */ export class ExtendedVersion { constructor( + /** The flavor identifier (e.g. `"bitcoin"`), or `null` for unflavored versions. */ public flavor: string | null, + /** The upstream software version. */ public upstream: Version, + /** The downstream packaging version. */ public downstream: Version, ) {} + /** Serializes this extended version to its string form (e.g. `"#bitcoin:1.2.3:0"` or `"1.0.0:1"`). */ toString(): string { return `${this.flavor ? `#${this.flavor}:` : ''}${this.upstream.toString()}:${this.downstream.toString()}` } + /** + * Compares this extended version against another. + * + * @returns `'greater'`, `'equal'`, `'less'`, or `null` if the flavors differ (incomparable) + */ compare(other: ExtendedVersion): 'greater' | 'equal' | 'less' | null { if (this.flavor !== other.flavor) { return null @@ -829,6 +1003,10 @@ export class ExtendedVersion { return this.downstream.compare(other.downstream) } + /** + * Lexicographic comparison — compares flavors alphabetically first, then versions. + * Unlike {@link compare}, this never returns `null`: different flavors are ordered alphabetically. + */ compareLexicographic(other: ExtendedVersion): 'greater' | 'equal' | 'less' { if ((this.flavor || '') > (other.flavor || '')) { return 'greater' @@ -839,6 +1017,10 @@ export class ExtendedVersion { } } + /** + * Returns a numeric comparison result suitable for use with `Array.sort()`. + * Uses lexicographic ordering (flavors sorted alphabetically, then by version). + */ compareForSort(other: ExtendedVersion): 1 | 0 | -1 { switch (this.compareLexicographic(other)) { case 'greater': @@ -850,26 +1032,37 @@ export class ExtendedVersion { } } + /** Returns `true` if this version is strictly greater than `other`. Returns `false` if flavors differ. */ greaterThan(other: ExtendedVersion): boolean { return this.compare(other) === 'greater' } + /** Returns `true` if this version is greater than or equal to `other`. Returns `false` if flavors differ. */ greaterThanOrEqual(other: ExtendedVersion): boolean { return ['greater', 'equal'].includes(this.compare(other) as string) } + /** Returns `true` if this version equals `other` (same flavor, upstream, and downstream). */ equals(other: ExtendedVersion): boolean { return this.compare(other) === 'equal' } + /** Returns `true` if this version is strictly less than `other`. Returns `false` if flavors differ. */ lessThan(other: ExtendedVersion): boolean { return this.compare(other) === 'less' } + /** Returns `true` if this version is less than or equal to `other`. Returns `false` if flavors differ. */ lessThanOrEqual(other: ExtendedVersion): boolean { return ['less', 'equal'].includes(this.compare(other) as string) } + /** + * Parses an extended version string into an `ExtendedVersion`. + * + * @param extendedVersion - A string like `"1.2.3:0"` or `"#bitcoin:1.0.0:0"` + * @throws If the string is not a valid extended version + */ static parse(extendedVersion: string): ExtendedVersion { const parsed = P.parse(extendedVersion, { startRule: 'ExtendedVersion' }) return new ExtendedVersion( @@ -879,6 +1072,12 @@ export class ExtendedVersion { ) } + /** + * Parses a legacy "emver" format extended version string. + * + * @param extendedVersion - A version string in the legacy emver format + * @throws If the string is not a valid emver version (error message includes the input string) + */ static parseEmver(extendedVersion: string): ExtendedVersion { try { const parsed = P.parse(extendedVersion, { startRule: 'Emver' }) @@ -1014,8 +1213,29 @@ export class ExtendedVersion { } } +/** + * Compile-time type-checking helper that validates an extended version string literal. + * If the string is invalid, TypeScript will report a type error at the call site. + * + * @example + * ```ts + * testTypeExVer("1.2.3:0") // compiles + * testTypeExVer("#bitcoin:1.0:0") // compiles + * testTypeExVer("invalid") // type error + * ``` + */ export const testTypeExVer = (t: T & ValidateExVer) => t +/** + * Compile-time type-checking helper that validates a version string literal. + * If the string is invalid, TypeScript will report a type error at the call site. + * + * @example + * ```ts + * testTypeVersion("1.2.3") // compiles + * testTypeVersion("-3") // type error + * ``` + */ export const testTypeVersion = (t: T & ValidateVersion) => t diff --git a/sdk/base/lib/index.ts b/sdk/base/lib/index.ts index 85d00a794..bcf37fe03 100644 --- a/sdk/base/lib/index.ts +++ b/sdk/base/lib/index.ts @@ -8,6 +8,6 @@ export * as types from './types' export * as T from './types' export * as yaml from 'yaml' export * as inits from './inits' -export * as matches from 'ts-matches' +export { z } from './zExport' export * as utils from './util' diff --git a/sdk/base/lib/inits/setupInit.ts b/sdk/base/lib/inits/setupInit.ts index 25f499e42..577ca0a27 100644 --- a/sdk/base/lib/inits/setupInit.ts +++ b/sdk/base/lib/inits/setupInit.ts @@ -2,21 +2,37 @@ import { VersionRange } from '../../../base/lib/exver' import * as T from '../../../base/lib/types' import { once } from '../util' +/** + * The reason a service's init function is being called: + * - `'install'` — first-time installation + * - `'update'` — after a package update + * - `'restore'` — after restoring from backup + * - `null` — regular startup (no special lifecycle event) + */ export type InitKind = 'install' | 'update' | 'restore' | null +/** Function signature for an init handler that runs during service startup. */ export type InitFn = ( effects: T.Effects, kind: Kind, ) => Promise +/** Object form of an init handler — implements an `init()` method. */ export interface InitScript { init(effects: T.Effects, kind: Kind): Promise } +/** Either an {@link InitScript} object or an {@link InitFn} function. */ export type InitScriptOrFn = | InitScript | InitFn +/** + * Composes multiple init handlers into a single `ExpectedExports.init`-compatible function. + * Handlers are executed sequentially in the order provided. + * + * @param inits - One or more init handlers to compose + */ export function setupInit(...inits: InitScriptOrFn[]): T.ExpectedExports.init { return async (opts) => { for (const idx in inits) { @@ -42,6 +58,7 @@ export function setupInit(...inits: InitScriptOrFn[]): T.ExpectedExports.init { } } +/** Normalizes an {@link InitScriptOrFn} into an {@link InitScript} object. */ export function setupOnInit(onInit: InitScriptOrFn): InitScript { return 'init' in onInit ? onInit diff --git a/sdk/base/lib/inits/setupUninit.ts b/sdk/base/lib/inits/setupUninit.ts index 52d111fa5..fee005531 100644 --- a/sdk/base/lib/inits/setupUninit.ts +++ b/sdk/base/lib/inits/setupUninit.ts @@ -1,6 +1,9 @@ import { ExtendedVersion, VersionRange } from '../../../base/lib/exver' import * as T from '../../../base/lib/types' +/** + * Function signature for an uninit handler that runs during service shutdown/uninstall. + */ export type UninitFn = ( effects: T.Effects, /** @@ -13,6 +16,7 @@ export type UninitFn = ( target: VersionRange | ExtendedVersion | null, ) => Promise +/** Object form of an uninit handler — implements an `uninit()` method. */ export interface UninitScript { uninit( effects: T.Effects, @@ -27,8 +31,15 @@ export interface UninitScript { ): Promise } +/** Either a {@link UninitScript} object or a {@link UninitFn} function. */ export type UninitScriptOrFn = UninitScript | UninitFn +/** + * Composes multiple uninit handlers into a single `ExpectedExports.uninit`-compatible function. + * Handlers are executed sequentially in the order provided. + * + * @param uninits - One or more uninit handlers to compose + */ export function setupUninit( ...uninits: UninitScriptOrFn[] ): T.ExpectedExports.uninit { @@ -40,6 +51,7 @@ export function setupUninit( } } +/** Normalizes a {@link UninitScriptOrFn} into a {@link UninitScript} object. */ export function setupOnUninit(onUninit: UninitScriptOrFn): UninitScript { return 'uninit' in onUninit ? onUninit diff --git a/sdk/base/lib/interfaces/Host.ts b/sdk/base/lib/interfaces/Host.ts index 0ad4424ba..8842caf77 100644 --- a/sdk/base/lib/interfaces/Host.ts +++ b/sdk/base/lib/interfaces/Host.ts @@ -1,4 +1,4 @@ -import { object, string } from 'ts-matches' +import { z } from 'zod' import { Effects } from '../Effects' import { Origin } from './Origin' import { AddSslOptions, BindParams } from '../osBindings' @@ -69,9 +69,8 @@ export type BindOptionsByProtocol = | BindOptionsByKnownProtocol | (BindOptions & { protocol: null }) -const hasStringProtocol = object({ - protocol: string, -}).test +const hasStringProtocol = (v: unknown): v is { protocol: string } => + z.object({ protocol: z.string() }).safeParse(v).success export class MultiHost { constructor( diff --git a/sdk/base/lib/interfaces/setupExportedUrls.ts b/sdk/base/lib/interfaces/setupExportedUrls.ts new file mode 100644 index 000000000..f090747de --- /dev/null +++ b/sdk/base/lib/interfaces/setupExportedUrls.ts @@ -0,0 +1,28 @@ +import { Effects, PluginHostnameInfo } from '../types' + +export type SetExportedUrls = (opts: { effects: Effects }) => Promise +export type UpdateExportedUrls = (effects: Effects) => Promise +export type SetupExportedUrls = (fn: SetExportedUrls) => UpdateExportedUrls + +export const setupExportedUrls: SetupExportedUrls = (fn: SetExportedUrls) => { + return (async (effects: Effects) => { + const urls: PluginHostnameInfo[] = [] + await fn({ + effects: { + ...effects, + plugin: { + ...effects.plugin, + url: { + ...effects.plugin.url, + exportUrl: (params) => { + urls.push(params.hostnameInfo) + return effects.plugin.url.exportUrl(params) + }, + }, + }, + }, + }) + await effects.plugin.url.clearUrls({ except: urls }) + return null + }) as UpdateExportedUrls +} diff --git a/sdk/base/lib/osBindings/AddPrivateDomainParams.ts b/sdk/base/lib/osBindings/AddPrivateDomainParams.ts new file mode 100644 index 000000000..5eb1f3e50 --- /dev/null +++ b/sdk/base/lib/osBindings/AddPrivateDomainParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { GatewayId } from './GatewayId' + +export type AddPrivateDomainParams = { fqdn: string; gateway: GatewayId } diff --git a/sdk/base/lib/osBindings/AddPublicDomainParams.ts b/sdk/base/lib/osBindings/AddPublicDomainParams.ts new file mode 100644 index 000000000..3d7ddbdc1 --- /dev/null +++ b/sdk/base/lib/osBindings/AddPublicDomainParams.ts @@ -0,0 +1,9 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { AcmeProvider } from './AcmeProvider' +import type { GatewayId } from './GatewayId' + +export type AddPublicDomainParams = { + fqdn: string + acme: AcmeProvider | null + gateway: GatewayId +} diff --git a/sdk/base/lib/osBindings/AddTunnelParams.ts b/sdk/base/lib/osBindings/AddTunnelParams.ts index 018b22bb5..639a4e36a 100644 --- a/sdk/base/lib/osBindings/AddTunnelParams.ts +++ b/sdk/base/lib/osBindings/AddTunnelParams.ts @@ -1,3 +1,9 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { GatewayType } from './GatewayType' -export type AddTunnelParams = { name: string; config: string; public: boolean } +export type AddTunnelParams = { + name: string + config: string + type: GatewayType | null + setAsDefaultOutbound: boolean +} diff --git a/sdk/base/lib/osBindings/BackupInfo.ts b/sdk/base/lib/osBindings/BackupInfo.ts new file mode 100644 index 000000000..bc36fecb1 --- /dev/null +++ b/sdk/base/lib/osBindings/BackupInfo.ts @@ -0,0 +1,9 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { PackageBackupInfo } from './PackageBackupInfo' +import type { PackageId } from './PackageId' + +export type BackupInfo = { + version: string + timestamp: string | null + packageBackups: { [key: PackageId]: PackageBackupInfo } +} diff --git a/sdk/base/lib/osBindings/BackupParams.ts b/sdk/base/lib/osBindings/BackupParams.ts new file mode 100644 index 000000000..921f4ec22 --- /dev/null +++ b/sdk/base/lib/osBindings/BackupParams.ts @@ -0,0 +1,11 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { BackupTargetId } from './BackupTargetId' +import type { PackageId } from './PackageId' +import type { PasswordType } from './PasswordType' + +export type BackupParams = { + targetId: BackupTargetId + oldPassword: PasswordType | null + packageIds: Array | null + password: PasswordType +} diff --git a/sdk/base/lib/osBindings/BackupReport.ts b/sdk/base/lib/osBindings/BackupReport.ts new file mode 100644 index 000000000..9cc3f0576 --- /dev/null +++ b/sdk/base/lib/osBindings/BackupReport.ts @@ -0,0 +1,9 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { PackageBackupReport } from './PackageBackupReport' +import type { PackageId } from './PackageId' +import type { ServerBackupReport } from './ServerBackupReport' + +export type BackupReport = { + server: ServerBackupReport + packages: { [key: PackageId]: PackageBackupReport } +} diff --git a/sdk/base/lib/osBindings/BackupTarget.ts b/sdk/base/lib/osBindings/BackupTarget.ts new file mode 100644 index 000000000..886222570 --- /dev/null +++ b/sdk/base/lib/osBindings/BackupTarget.ts @@ -0,0 +1,17 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { CifsBackupTarget } from './CifsBackupTarget' +import type { StartOsRecoveryInfo } from './StartOsRecoveryInfo' + +export type BackupTarget = + | { + type: 'disk' + vendor: string | null + model: string | null + logicalname: string + label: string | null + capacity: number + used: number | null + startOs: { [key: string]: StartOsRecoveryInfo } + guid: string | null + } + | ({ type: 'cifs' } & CifsBackupTarget) diff --git a/sdk/base/lib/osBindings/BackupTargetId.ts b/sdk/base/lib/osBindings/BackupTargetId.ts new file mode 100644 index 000000000..927dd2e1b --- /dev/null +++ b/sdk/base/lib/osBindings/BackupTargetId.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type BackupTargetId = string diff --git a/sdk/base/lib/osBindings/BindInfo.ts b/sdk/base/lib/osBindings/BindInfo.ts index eba1fe446..d83c6948f 100644 --- a/sdk/base/lib/osBindings/BindInfo.ts +++ b/sdk/base/lib/osBindings/BindInfo.ts @@ -1,5 +1,11 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. import type { BindOptions } from './BindOptions' +import type { DerivedAddressInfo } from './DerivedAddressInfo' import type { NetInfo } from './NetInfo' -export type BindInfo = { enabled: boolean; options: BindOptions; net: NetInfo } +export type BindInfo = { + enabled: boolean + options: BindOptions + net: NetInfo + addresses: DerivedAddressInfo +} diff --git a/sdk/base/lib/osBindings/BindingGatewaySetEnabledParams.ts b/sdk/base/lib/osBindings/BindingSetAddressEnabledParams.ts similarity index 58% rename from sdk/base/lib/osBindings/BindingGatewaySetEnabledParams.ts rename to sdk/base/lib/osBindings/BindingSetAddressEnabledParams.ts index fb90cdaa7..2dfeff757 100644 --- a/sdk/base/lib/osBindings/BindingGatewaySetEnabledParams.ts +++ b/sdk/base/lib/osBindings/BindingSetAddressEnabledParams.ts @@ -1,8 +1,7 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { GatewayId } from './GatewayId' -export type BindingGatewaySetEnabledParams = { +export type BindingSetAddressEnabledParams = { internalPort: number - gateway: GatewayId + address: string enabled: boolean | null } diff --git a/sdk/base/lib/osBindings/Bindings.ts b/sdk/base/lib/osBindings/Bindings.ts new file mode 100644 index 000000000..ef921868e --- /dev/null +++ b/sdk/base/lib/osBindings/Bindings.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { BindInfo } from './BindInfo' + +export type Bindings = { [key: number]: BindInfo } diff --git a/sdk/base/lib/osBindings/CancelInstallParams.ts b/sdk/base/lib/osBindings/CancelInstallParams.ts new file mode 100644 index 000000000..28f73d5d7 --- /dev/null +++ b/sdk/base/lib/osBindings/CancelInstallParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { PackageId } from './PackageId' + +export type CancelInstallParams = { id: PackageId } diff --git a/sdk/base/lib/osBindings/Celsius.ts b/sdk/base/lib/osBindings/Celsius.ts index 298f97f5e..24b3380df 100644 --- a/sdk/base/lib/osBindings/Celsius.ts +++ b/sdk/base/lib/osBindings/Celsius.ts @@ -1,3 +1,3 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -export type Celsius = number +export type Celsius = { value: string; unit: string } diff --git a/sdk/base/lib/osBindings/CheckDnsParams.ts b/sdk/base/lib/osBindings/CheckDnsParams.ts new file mode 100644 index 000000000..992d0706a --- /dev/null +++ b/sdk/base/lib/osBindings/CheckDnsParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { GatewayId } from './GatewayId' + +export type CheckDnsParams = { gateway: GatewayId } diff --git a/sdk/base/lib/osBindings/CheckPortParams.ts b/sdk/base/lib/osBindings/CheckPortParams.ts new file mode 100644 index 000000000..63ab439a7 --- /dev/null +++ b/sdk/base/lib/osBindings/CheckPortParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { GatewayId } from './GatewayId' + +export type CheckPortParams = { port: number; gateway: GatewayId } diff --git a/sdk/base/lib/osBindings/CheckPortRes.ts b/sdk/base/lib/osBindings/CheckPortRes.ts new file mode 100644 index 000000000..d50296885 --- /dev/null +++ b/sdk/base/lib/osBindings/CheckPortRes.ts @@ -0,0 +1,9 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type CheckPortRes = { + ip: string + port: number + openExternally: boolean + openInternally: boolean + hairpinning: boolean +} diff --git a/sdk/base/lib/osBindings/CifsAddParams.ts b/sdk/base/lib/osBindings/CifsAddParams.ts new file mode 100644 index 000000000..dad0b6bb9 --- /dev/null +++ b/sdk/base/lib/osBindings/CifsAddParams.ts @@ -0,0 +1,8 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type CifsAddParams = { + hostname: string + path: string + username: string + password: string | null +} diff --git a/sdk/base/lib/osBindings/CifsBackupTarget.ts b/sdk/base/lib/osBindings/CifsBackupTarget.ts new file mode 100644 index 000000000..9620e6b52 --- /dev/null +++ b/sdk/base/lib/osBindings/CifsBackupTarget.ts @@ -0,0 +1,10 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { StartOsRecoveryInfo } from './StartOsRecoveryInfo' + +export type CifsBackupTarget = { + hostname: string + path: string + username: string + mountable: boolean + startOs: { [key: string]: StartOsRecoveryInfo } +} diff --git a/sdk/base/lib/osBindings/CifsRemoveParams.ts b/sdk/base/lib/osBindings/CifsRemoveParams.ts new file mode 100644 index 000000000..161a24d80 --- /dev/null +++ b/sdk/base/lib/osBindings/CifsRemoveParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { BackupTargetId } from './BackupTargetId' + +export type CifsRemoveParams = { id: BackupTargetId } diff --git a/sdk/base/lib/osBindings/CifsUpdateParams.ts b/sdk/base/lib/osBindings/CifsUpdateParams.ts new file mode 100644 index 000000000..8529ee51d --- /dev/null +++ b/sdk/base/lib/osBindings/CifsUpdateParams.ts @@ -0,0 +1,10 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { BackupTargetId } from './BackupTargetId' + +export type CifsUpdateParams = { + id: BackupTargetId + hostname: string + path: string + username: string + password: string | null +} diff --git a/sdk/base/lib/osBindings/ClearTaskParams.ts b/sdk/base/lib/osBindings/ClearTaskParams.ts new file mode 100644 index 000000000..6bdc3ae91 --- /dev/null +++ b/sdk/base/lib/osBindings/ClearTaskParams.ts @@ -0,0 +1,9 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { PackageId } from './PackageId' +import type { ReplayId } from './ReplayId' + +export type ClearTaskParams = { + packageId: PackageId + replayId: ReplayId + force: boolean +} diff --git a/sdk/base/lib/osBindings/ControlParams.ts b/sdk/base/lib/osBindings/ControlParams.ts new file mode 100644 index 000000000..7f1a77a0f --- /dev/null +++ b/sdk/base/lib/osBindings/ControlParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { PackageId } from './PackageId' + +export type ControlParams = { id: PackageId } diff --git a/sdk/base/lib/osBindings/DepInfo.ts b/sdk/base/lib/osBindings/DepInfo.ts index cd8e482cc..07b0908e3 100644 --- a/sdk/base/lib/osBindings/DepInfo.ts +++ b/sdk/base/lib/osBindings/DepInfo.ts @@ -1,7 +1,8 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { LocaleString } from './LocaleString' import type { MetadataSrc } from './MetadataSrc' export type DepInfo = { - description: string | null + description: LocaleString | null optional: boolean } & MetadataSrc diff --git a/sdk/base/lib/osBindings/DerivedAddressInfo.ts b/sdk/base/lib/osBindings/DerivedAddressInfo.ts new file mode 100644 index 000000000..79fb49bda --- /dev/null +++ b/sdk/base/lib/osBindings/DerivedAddressInfo.ts @@ -0,0 +1,17 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { HostnameInfo } from './HostnameInfo' + +export type DerivedAddressInfo = { + /** + * User override: enable these addresses (only for public IP & port) + */ + enabled: Array + /** + * User override: disable these addresses (only for domains and private IP & port) + */ + disabled: Array<[string, number]> + /** + * COMPUTED: NetServiceData::update — all possible addresses for this binding + */ + available: Array +} diff --git a/sdk/base/lib/osBindings/EffectsRunActionParams.ts b/sdk/base/lib/osBindings/EffectsRunActionParams.ts new file mode 100644 index 000000000..eb93655ad --- /dev/null +++ b/sdk/base/lib/osBindings/EffectsRunActionParams.ts @@ -0,0 +1,9 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { ActionId } from './ActionId' +import type { PackageId } from './PackageId' + +export type EffectsRunActionParams = { + packageId?: PackageId + actionId: ActionId + input: any +} diff --git a/sdk/base/lib/osBindings/ErrorData.ts b/sdk/base/lib/osBindings/ErrorData.ts index 3485b2f8b..fc2fa1e9a 100644 --- a/sdk/base/lib/osBindings/ErrorData.ts +++ b/sdk/base/lib/osBindings/ErrorData.ts @@ -1,3 +1,3 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -export type ErrorData = { details: string; debug: string } +export type ErrorData = { details: string; debug: string; info: unknown } diff --git a/sdk/base/lib/osBindings/ForgetGatewayParams.ts b/sdk/base/lib/osBindings/ForgetGatewayParams.ts new file mode 100644 index 000000000..ce453bdb2 --- /dev/null +++ b/sdk/base/lib/osBindings/ForgetGatewayParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { GatewayId } from './GatewayId' + +export type ForgetGatewayParams = { gateway: GatewayId } diff --git a/sdk/base/lib/osBindings/GatewayType.ts b/sdk/base/lib/osBindings/GatewayType.ts new file mode 100644 index 000000000..aa7a2d6ed --- /dev/null +++ b/sdk/base/lib/osBindings/GatewayType.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type GatewayType = 'inbound-outbound' | 'outbound-only' diff --git a/sdk/base/lib/osBindings/GetActionInputParams.ts b/sdk/base/lib/osBindings/GetActionInputParams.ts index 3142ca2f0..d38727ca8 100644 --- a/sdk/base/lib/osBindings/GetActionInputParams.ts +++ b/sdk/base/lib/osBindings/GetActionInputParams.ts @@ -2,4 +2,8 @@ import type { ActionId } from './ActionId' import type { PackageId } from './PackageId' -export type GetActionInputParams = { packageId?: PackageId; actionId: ActionId } +export type GetActionInputParams = { + packageId?: PackageId + actionId: ActionId + prefill: Record | null +} diff --git a/sdk/base/lib/osBindings/GetOutboundGatewayParams.ts b/sdk/base/lib/osBindings/GetOutboundGatewayParams.ts new file mode 100644 index 000000000..703fb4f08 --- /dev/null +++ b/sdk/base/lib/osBindings/GetOutboundGatewayParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { CallbackId } from './CallbackId' + +export type GetOutboundGatewayParams = { callback?: CallbackId } diff --git a/sdk/base/lib/osBindings/GigaBytes.ts b/sdk/base/lib/osBindings/GigaBytes.ts index 9e5e4ed7b..cb2250230 100644 --- a/sdk/base/lib/osBindings/GigaBytes.ts +++ b/sdk/base/lib/osBindings/GigaBytes.ts @@ -1,3 +1,3 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -export type GigaBytes = number +export type GigaBytes = { value: string; unit: string } diff --git a/sdk/base/lib/osBindings/Host.ts b/sdk/base/lib/osBindings/Host.ts index e34af2ae8..da048ff5f 100644 --- a/sdk/base/lib/osBindings/Host.ts +++ b/sdk/base/lib/osBindings/Host.ts @@ -1,15 +1,15 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { BindInfo } from './BindInfo' -import type { HostnameInfo } from './HostnameInfo' +import type { Bindings } from './Bindings' +import type { GatewayId } from './GatewayId' +import type { PortForward } from './PortForward' import type { PublicDomainConfig } from './PublicDomainConfig' export type Host = { - bindings: { [key: number]: BindInfo } - onions: string[] + bindings: Bindings publicDomains: { [key: string]: PublicDomainConfig } - privateDomains: Array + privateDomains: { [key: string]: Array } /** - * COMPUTED: NetService::update + * COMPUTED: port forwarding rules needed on gateways for public addresses to work. */ - hostnameInfo: { [key: number]: Array } + portForwards: Array } diff --git a/sdk/base/lib/osBindings/HostnameInfo.ts b/sdk/base/lib/osBindings/HostnameInfo.ts index f2bb5e226..ff08c833d 100644 --- a/sdk/base/lib/osBindings/HostnameInfo.ts +++ b/sdk/base/lib/osBindings/HostnameInfo.ts @@ -1,8 +1,10 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { GatewayInfo } from './GatewayInfo' -import type { IpHostname } from './IpHostname' -import type { OnionHostname } from './OnionHostname' +import type { HostnameMetadata } from './HostnameMetadata' -export type HostnameInfo = - | { kind: 'ip'; gateway: GatewayInfo; public: boolean; hostname: IpHostname } - | { kind: 'onion'; hostname: OnionHostname } +export type HostnameInfo = { + ssl: boolean + public: boolean + hostname: string + port: number | null + metadata: HostnameMetadata +} diff --git a/sdk/base/lib/osBindings/HostnameMetadata.ts b/sdk/base/lib/osBindings/HostnameMetadata.ts new file mode 100644 index 000000000..3fb9e7aa7 --- /dev/null +++ b/sdk/base/lib/osBindings/HostnameMetadata.ts @@ -0,0 +1,18 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { ActionId } from './ActionId' +import type { GatewayId } from './GatewayId' +import type { PackageId } from './PackageId' + +export type HostnameMetadata = + | { kind: 'ipv4'; gateway: GatewayId } + | { kind: 'ipv6'; gateway: GatewayId; scopeId: number } + | { kind: 'mdns'; gateways: Array } + | { kind: 'private-domain'; gateways: Array } + | { kind: 'public-domain'; gateway: GatewayId } + | { + kind: 'plugin' + packageId: PackageId + removeAction: ActionId | null + overflowActions: Array + info: unknown + } diff --git a/sdk/base/lib/osBindings/InfoParams.ts b/sdk/base/lib/osBindings/InfoParams.ts new file mode 100644 index 000000000..4e4aa388e --- /dev/null +++ b/sdk/base/lib/osBindings/InfoParams.ts @@ -0,0 +1,8 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { BackupTargetId } from './BackupTargetId' + +export type InfoParams = { + targetId: BackupTargetId + serverId: string + password: string +} diff --git a/sdk/base/lib/osBindings/InitAcmeParams.ts b/sdk/base/lib/osBindings/InitAcmeParams.ts new file mode 100644 index 000000000..38a90e91e --- /dev/null +++ b/sdk/base/lib/osBindings/InitAcmeParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { AcmeProvider } from './AcmeProvider' + +export type InitAcmeParams = { provider: AcmeProvider; contact: Array } diff --git a/sdk/base/lib/osBindings/IpHostname.ts b/sdk/base/lib/osBindings/IpHostname.ts deleted file mode 100644 index 6f6be463f..000000000 --- a/sdk/base/lib/osBindings/IpHostname.ts +++ /dev/null @@ -1,23 +0,0 @@ -// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. - -export type IpHostname = - | { kind: 'ipv4'; value: string; port: number | null; sslPort: number | null } - | { - kind: 'ipv6' - value: string - scopeId: number - port: number | null - sslPort: number | null - } - | { - kind: 'local' - value: string - port: number | null - sslPort: number | null - } - | { - kind: 'domain' - value: string - port: number | null - sslPort: number | null - } diff --git a/sdk/base/lib/osBindings/KillParams.ts b/sdk/base/lib/osBindings/KillParams.ts new file mode 100644 index 000000000..fb0342f24 --- /dev/null +++ b/sdk/base/lib/osBindings/KillParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type KillParams = { ids: Array } diff --git a/sdk/base/lib/osBindings/OnionHostname.ts b/sdk/base/lib/osBindings/ListNotificationParams.ts similarity index 54% rename from sdk/base/lib/osBindings/OnionHostname.ts rename to sdk/base/lib/osBindings/ListNotificationParams.ts index 0bea8245e..814b97fc6 100644 --- a/sdk/base/lib/osBindings/OnionHostname.ts +++ b/sdk/base/lib/osBindings/ListNotificationParams.ts @@ -1,7 +1,6 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -export type OnionHostname = { - value: string - port: number | null - sslPort: number | null +export type ListNotificationParams = { + before: number | null + limit: number | null } diff --git a/sdk/base/lib/osBindings/LogEntry.ts b/sdk/base/lib/osBindings/LogEntry.ts new file mode 100644 index 000000000..b036b428d --- /dev/null +++ b/sdk/base/lib/osBindings/LogEntry.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type LogEntry = { timestamp: string; message: string; bootId: string } diff --git a/sdk/base/lib/osBindings/LogFollowResponse.ts b/sdk/base/lib/osBindings/LogFollowResponse.ts new file mode 100644 index 000000000..503cf414b --- /dev/null +++ b/sdk/base/lib/osBindings/LogFollowResponse.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { Guid } from './Guid' + +export type LogFollowResponse = { startCursor: string | null; guid: Guid } diff --git a/sdk/base/lib/osBindings/LogResponse.ts b/sdk/base/lib/osBindings/LogResponse.ts new file mode 100644 index 000000000..be282cc8f --- /dev/null +++ b/sdk/base/lib/osBindings/LogResponse.ts @@ -0,0 +1,8 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { LogEntry } from './LogEntry' + +export type LogResponse = { + entries: Array + startCursor: string | null + endCursor: string | null +} diff --git a/sdk/base/lib/osBindings/LogsParams.ts b/sdk/base/lib/osBindings/LogsParams.ts new file mode 100644 index 000000000..529ac06fd --- /dev/null +++ b/sdk/base/lib/osBindings/LogsParams.ts @@ -0,0 +1,8 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type LogsParams = { + limit?: number + cursor?: string + boot?: number | string + before: boolean +} diff --git a/sdk/base/lib/osBindings/Manifest.ts b/sdk/base/lib/osBindings/Manifest.ts index 98af5c018..c962425eb 100644 --- a/sdk/base/lib/osBindings/Manifest.ts +++ b/sdk/base/lib/osBindings/Manifest.ts @@ -8,32 +8,33 @@ import type { ImageConfig } from './ImageConfig' import type { ImageId } from './ImageId' import type { LocaleString } from './LocaleString' import type { PackageId } from './PackageId' +import type { PluginId } from './PluginId' import type { Version } from './Version' import type { VolumeId } from './VolumeId' export type Manifest = { id: PackageId - title: string version: Version satisfies: Array - releaseNotes: LocaleString canMigrateTo: string canMigrateFrom: string - license: string - wrapperRepo: string - upstreamRepo: string - supportSite: string - marketingSite: string - donationUrl: string | null - docsUrl: string | null - description: Description images: { [key: ImageId]: ImageConfig } volumes: Array - alerts: Alerts dependencies: Dependencies hardwareRequirements: HardwareRequirements - hardwareAcceleration: boolean + title: string + description: Description + releaseNotes: LocaleString gitHash: GitHash | null + license: string + packageRepo: string + upstreamRepo: string + marketingUrl: string + donationUrl: string | null + docsUrls: string[] + alerts: Alerts osVersion: string sdkVersion: string | null + hardwareAcceleration: boolean + plugins: Array } diff --git a/sdk/base/lib/osBindings/MebiBytes.ts b/sdk/base/lib/osBindings/MebiBytes.ts index c10760d23..499aa88e9 100644 --- a/sdk/base/lib/osBindings/MebiBytes.ts +++ b/sdk/base/lib/osBindings/MebiBytes.ts @@ -1,3 +1,3 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -export type MebiBytes = number +export type MebiBytes = { value: string; unit: string } diff --git a/sdk/base/lib/osBindings/MetricsFollowResponse.ts b/sdk/base/lib/osBindings/MetricsFollowResponse.ts new file mode 100644 index 000000000..f93ca65b6 --- /dev/null +++ b/sdk/base/lib/osBindings/MetricsFollowResponse.ts @@ -0,0 +1,5 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { Guid } from './Guid' +import type { Metrics } from './Metrics' + +export type MetricsFollowResponse = { guid: Guid; metrics: Metrics } diff --git a/sdk/base/lib/osBindings/ModifyNotificationBeforeParams.ts b/sdk/base/lib/osBindings/ModifyNotificationBeforeParams.ts new file mode 100644 index 000000000..a153b5101 --- /dev/null +++ b/sdk/base/lib/osBindings/ModifyNotificationBeforeParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ModifyNotificationBeforeParams = { before: number } diff --git a/sdk/base/lib/osBindings/ModifyNotificationParams.ts b/sdk/base/lib/osBindings/ModifyNotificationParams.ts new file mode 100644 index 000000000..5e14a76b1 --- /dev/null +++ b/sdk/base/lib/osBindings/ModifyNotificationParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ModifyNotificationParams = { ids: number[] } diff --git a/sdk/base/lib/osBindings/NetInfo.ts b/sdk/base/lib/osBindings/NetInfo.ts index 4483f81b8..90abe2cd8 100644 --- a/sdk/base/lib/osBindings/NetInfo.ts +++ b/sdk/base/lib/osBindings/NetInfo.ts @@ -1,9 +1,6 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { GatewayId } from './GatewayId' export type NetInfo = { - privateDisabled: Array - publicEnabled: Array assignedPort: number | null assignedSslPort: number | null } diff --git a/sdk/base/lib/osBindings/NetworkInfo.ts b/sdk/base/lib/osBindings/NetworkInfo.ts index 5debd58d1..3acfb3851 100644 --- a/sdk/base/lib/osBindings/NetworkInfo.ts +++ b/sdk/base/lib/osBindings/NetworkInfo.ts @@ -13,4 +13,5 @@ export type NetworkInfo = { gateways: { [key: GatewayId]: NetworkInterfaceInfo } acme: { [key: AcmeProvider]: AcmeSettings } dns: DnsSettings + defaultOutbound: string | null } diff --git a/sdk/base/lib/osBindings/NetworkInterfaceInfo.ts b/sdk/base/lib/osBindings/NetworkInterfaceInfo.ts index dd3be99d9..a57f3c1e9 100644 --- a/sdk/base/lib/osBindings/NetworkInterfaceInfo.ts +++ b/sdk/base/lib/osBindings/NetworkInterfaceInfo.ts @@ -1,9 +1,10 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { GatewayType } from './GatewayType' import type { IpInfo } from './IpInfo' export type NetworkInterfaceInfo = { name: string | null - public: boolean | null secure: boolean | null ipInfo: IpInfo | null + type: GatewayType | null } diff --git a/sdk/base/lib/osBindings/Notification.ts b/sdk/base/lib/osBindings/Notification.ts new file mode 100644 index 000000000..5a4f8c3b8 --- /dev/null +++ b/sdk/base/lib/osBindings/Notification.ts @@ -0,0 +1,14 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { NotificationLevel } from './NotificationLevel' +import type { PackageId } from './PackageId' + +export type Notification = { + packageId: PackageId | null + createdAt: string + code: number + level: NotificationLevel + title: string + message: string + data: any + seen: boolean +} diff --git a/sdk/base/lib/osBindings/NotificationLevel.ts b/sdk/base/lib/osBindings/NotificationLevel.ts new file mode 100644 index 000000000..71110829d --- /dev/null +++ b/sdk/base/lib/osBindings/NotificationLevel.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type NotificationLevel = 'success' | 'info' | 'warning' | 'error' diff --git a/sdk/base/lib/osBindings/NotificationWithId.ts b/sdk/base/lib/osBindings/NotificationWithId.ts new file mode 100644 index 000000000..f61c98079 --- /dev/null +++ b/sdk/base/lib/osBindings/NotificationWithId.ts @@ -0,0 +1,15 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { NotificationLevel } from './NotificationLevel' +import type { PackageId } from './PackageId' + +export type NotificationWithId = { + id: number + packageId: PackageId | null + createdAt: string + code: number + level: NotificationLevel + title: string + message: string + data: any + seen: boolean +} diff --git a/sdk/base/lib/osBindings/PackageBackupInfo.ts b/sdk/base/lib/osBindings/PackageBackupInfo.ts new file mode 100644 index 000000000..51fc7464d --- /dev/null +++ b/sdk/base/lib/osBindings/PackageBackupInfo.ts @@ -0,0 +1,9 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { Version } from './Version' + +export type PackageBackupInfo = { + title: string + version: Version + osVersion: string + timestamp: string +} diff --git a/sdk/base/lib/osBindings/PackageBackupReport.ts b/sdk/base/lib/osBindings/PackageBackupReport.ts new file mode 100644 index 000000000..067655b5f --- /dev/null +++ b/sdk/base/lib/osBindings/PackageBackupReport.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type PackageBackupReport = { error: string | null } diff --git a/sdk/base/lib/osBindings/PackageDataEntry.ts b/sdk/base/lib/osBindings/PackageDataEntry.ts index 05188c792..ac219040e 100644 --- a/sdk/base/lib/osBindings/PackageDataEntry.ts +++ b/sdk/base/lib/osBindings/PackageDataEntry.ts @@ -4,6 +4,7 @@ import type { ActionMetadata } from './ActionMetadata' import type { CurrentDependencies } from './CurrentDependencies' import type { DataUrl } from './DataUrl' import type { Hosts } from './Hosts' +import type { PackagePlugin } from './PackagePlugin' import type { PackageState } from './PackageState' import type { ReplayId } from './ReplayId' import type { ServiceInterface } from './ServiceInterface' @@ -25,4 +26,6 @@ export type PackageDataEntry = { serviceInterfaces: { [key: ServiceInterfaceId]: ServiceInterface } hosts: Hosts storeExposedDependents: string[] + outboundGateway: string | null + plugin: PackagePlugin } diff --git a/sdk/base/lib/osBindings/PackageInfoShort.ts b/sdk/base/lib/osBindings/PackageInfoShort.ts index 22c7fbea4..0bf858781 100644 --- a/sdk/base/lib/osBindings/PackageInfoShort.ts +++ b/sdk/base/lib/osBindings/PackageInfoShort.ts @@ -1,3 +1,4 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { LocaleString } from './LocaleString' -export type PackageInfoShort = { releaseNotes: string } +export type PackageInfoShort = { releaseNotes: LocaleString } diff --git a/sdk/base/lib/osBindings/PackagePlugin.ts b/sdk/base/lib/osBindings/PackagePlugin.ts new file mode 100644 index 000000000..2a72784f1 --- /dev/null +++ b/sdk/base/lib/osBindings/PackagePlugin.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { UrlPluginRegistration } from './UrlPluginRegistration' + +export type PackagePlugin = { url: UrlPluginRegistration | null } diff --git a/sdk/base/lib/osBindings/PackageVersionInfo.ts b/sdk/base/lib/osBindings/PackageVersionInfo.ts index e3a7c28b9..00a3f3052 100644 --- a/sdk/base/lib/osBindings/PackageVersionInfo.ts +++ b/sdk/base/lib/osBindings/PackageVersionInfo.ts @@ -8,26 +8,27 @@ import type { HardwareRequirements } from './HardwareRequirements' import type { LocaleString } from './LocaleString' import type { MerkleArchiveCommitment } from './MerkleArchiveCommitment' import type { PackageId } from './PackageId' +import type { PluginId } from './PluginId' import type { RegistryAsset } from './RegistryAsset' export type PackageVersionInfo = { + icon: DataUrl + dependencyMetadata: { [key: PackageId]: DependencyMetadata } sourceVersion: string | null s9pks: Array<[HardwareRequirements, RegistryAsset]> title: string - icon: DataUrl description: Description releaseNotes: LocaleString gitHash: GitHash | null license: string - wrapperRepo: string + packageRepo: string upstreamRepo: string - supportSite: string - marketingSite: string + marketingUrl: string donationUrl: string | null - docsUrl: string | null + docsUrls: string[] alerts: Alerts - dependencyMetadata: { [key: PackageId]: DependencyMetadata } osVersion: string sdkVersion: string | null hardwareAcceleration: boolean + plugins: Array } diff --git a/sdk/base/lib/osBindings/PartitionInfo.ts b/sdk/base/lib/osBindings/PartitionInfo.ts new file mode 100644 index 000000000..777199c3b --- /dev/null +++ b/sdk/base/lib/osBindings/PartitionInfo.ts @@ -0,0 +1,11 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { StartOsRecoveryInfo } from './StartOsRecoveryInfo' + +export type PartitionInfo = { + logicalname: string + label: string | null + capacity: number + used: number | null + startOs: { [key: string]: StartOsRecoveryInfo } + guid: string | null +} diff --git a/sdk/base/lib/osBindings/Percentage.ts b/sdk/base/lib/osBindings/Percentage.ts index aff21db40..35e7746e4 100644 --- a/sdk/base/lib/osBindings/Percentage.ts +++ b/sdk/base/lib/osBindings/Percentage.ts @@ -1,3 +1,3 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -export type Percentage = number +export type Percentage = { value: string; unit: string } diff --git a/sdk/base/lib/osBindings/PluginHostnameInfo.ts b/sdk/base/lib/osBindings/PluginHostnameInfo.ts new file mode 100644 index 000000000..46117fa81 --- /dev/null +++ b/sdk/base/lib/osBindings/PluginHostnameInfo.ts @@ -0,0 +1,14 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { HostId } from './HostId' +import type { PackageId } from './PackageId' + +export type PluginHostnameInfo = { + packageId: PackageId | null + hostId: HostId + internalPort: number + ssl: boolean + public: boolean + hostname: string + port: number | null + info: unknown +} diff --git a/sdk/base/lib/osBindings/PluginId.ts b/sdk/base/lib/osBindings/PluginId.ts new file mode 100644 index 000000000..9de22068e --- /dev/null +++ b/sdk/base/lib/osBindings/PluginId.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type PluginId = 'url-v0' diff --git a/sdk/base/lib/osBindings/PortForward.ts b/sdk/base/lib/osBindings/PortForward.ts new file mode 100644 index 000000000..c400acdfb --- /dev/null +++ b/sdk/base/lib/osBindings/PortForward.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { GatewayId } from './GatewayId' + +export type PortForward = { src: string; dst: string; gateway: GatewayId } diff --git a/sdk/base/lib/osBindings/QueryDnsParams.ts b/sdk/base/lib/osBindings/QueryDnsParams.ts new file mode 100644 index 000000000..a232d1cdd --- /dev/null +++ b/sdk/base/lib/osBindings/QueryDnsParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type QueryDnsParams = { fqdn: string } diff --git a/sdk/base/lib/osBindings/RebuildParams.ts b/sdk/base/lib/osBindings/RebuildParams.ts new file mode 100644 index 000000000..2a530ab1f --- /dev/null +++ b/sdk/base/lib/osBindings/RebuildParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { PackageId } from './PackageId' + +export type RebuildParams = { id: PackageId } diff --git a/sdk/base/lib/osBindings/RemoveAcmeParams.ts b/sdk/base/lib/osBindings/RemoveAcmeParams.ts new file mode 100644 index 000000000..36d1d1946 --- /dev/null +++ b/sdk/base/lib/osBindings/RemoveAcmeParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { AcmeProvider } from './AcmeProvider' + +export type RemoveAcmeParams = { provider: AcmeProvider } diff --git a/sdk/base/lib/osBindings/RemoveDomainParams.ts b/sdk/base/lib/osBindings/RemoveDomainParams.ts new file mode 100644 index 000000000..91ccf235f --- /dev/null +++ b/sdk/base/lib/osBindings/RemoveDomainParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type RemoveDomainParams = { fqdn: string } diff --git a/sdk/base/lib/osBindings/RenameGatewayParams.ts b/sdk/base/lib/osBindings/RenameGatewayParams.ts new file mode 100644 index 000000000..625870299 --- /dev/null +++ b/sdk/base/lib/osBindings/RenameGatewayParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { GatewayId } from './GatewayId' + +export type RenameGatewayParams = { id: GatewayId; name: string } diff --git a/sdk/base/lib/osBindings/ResetPasswordParams.ts b/sdk/base/lib/osBindings/ResetPasswordParams.ts new file mode 100644 index 000000000..750e84a45 --- /dev/null +++ b/sdk/base/lib/osBindings/ResetPasswordParams.ts @@ -0,0 +1,7 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { PasswordType } from './PasswordType' + +export type ResetPasswordParams = { + oldPassword: PasswordType | null + newPassword: PasswordType | null +} diff --git a/sdk/base/lib/osBindings/RestorePackageParams.ts b/sdk/base/lib/osBindings/RestorePackageParams.ts new file mode 100644 index 000000000..545d3e7ec --- /dev/null +++ b/sdk/base/lib/osBindings/RestorePackageParams.ts @@ -0,0 +1,9 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { BackupTargetId } from './BackupTargetId' +import type { PackageId } from './PackageId' + +export type RestorePackageParams = { + ids: Array + targetId: BackupTargetId + password: string +} diff --git a/sdk/base/lib/osBindings/RunActionParams.ts b/sdk/base/lib/osBindings/RunActionParams.ts index 7dabffce6..66550838e 100644 --- a/sdk/base/lib/osBindings/RunActionParams.ts +++ b/sdk/base/lib/osBindings/RunActionParams.ts @@ -1,9 +1,11 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. import type { ActionId } from './ActionId' +import type { Guid } from './Guid' import type { PackageId } from './PackageId' export type RunActionParams = { - packageId?: PackageId + packageId: PackageId + eventId: Guid | null actionId: ActionId - input: any + input?: any } diff --git a/sdk/base/lib/osBindings/ServerBackupReport.ts b/sdk/base/lib/osBindings/ServerBackupReport.ts new file mode 100644 index 000000000..8b8a4d5c6 --- /dev/null +++ b/sdk/base/lib/osBindings/ServerBackupReport.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ServerBackupReport = { attempted: boolean; error: string | null } diff --git a/sdk/base/lib/osBindings/ServerHostname.ts b/sdk/base/lib/osBindings/ServerHostname.ts new file mode 100644 index 000000000..d73f2dc89 --- /dev/null +++ b/sdk/base/lib/osBindings/ServerHostname.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type ServerHostname = string diff --git a/sdk/base/lib/osBindings/ServerInfo.ts b/sdk/base/lib/osBindings/ServerInfo.ts index 23ff7ab4b..a0eb98e0a 100644 --- a/sdk/base/lib/osBindings/ServerInfo.ts +++ b/sdk/base/lib/osBindings/ServerInfo.ts @@ -10,6 +10,7 @@ export type ServerInfo = { arch: string platform: string id: string + name: string hostname: string version: string packageVersionCompat: string @@ -25,6 +26,7 @@ export type ServerInfo = { zram: boolean governor: Governor | null smtp: SmtpValue | null + ifconfigUrl: string ram: number devices: Array kiosk: boolean | null diff --git a/sdk/base/lib/osBindings/SetCountryParams.ts b/sdk/base/lib/osBindings/SetCountryParams.ts new file mode 100644 index 000000000..0bd6fc337 --- /dev/null +++ b/sdk/base/lib/osBindings/SetCountryParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type SetCountryParams = { country: string } diff --git a/sdk/base/lib/osBindings/SetDefaultOutboundParams.ts b/sdk/base/lib/osBindings/SetDefaultOutboundParams.ts new file mode 100644 index 000000000..de66785ac --- /dev/null +++ b/sdk/base/lib/osBindings/SetDefaultOutboundParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { GatewayId } from './GatewayId' + +export type SetDefaultOutboundParams = { gateway: GatewayId | null } diff --git a/sdk/base/lib/osBindings/SetLanguageParams.ts b/sdk/base/lib/osBindings/SetLanguageParams.ts new file mode 100644 index 000000000..7ed420ee6 --- /dev/null +++ b/sdk/base/lib/osBindings/SetLanguageParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type SetLanguageParams = { language: string } diff --git a/sdk/base/lib/osBindings/SetOutboundGatewayParams.ts b/sdk/base/lib/osBindings/SetOutboundGatewayParams.ts new file mode 100644 index 000000000..717509d85 --- /dev/null +++ b/sdk/base/lib/osBindings/SetOutboundGatewayParams.ts @@ -0,0 +1,8 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { GatewayId } from './GatewayId' +import type { PackageId } from './PackageId' + +export type SetOutboundGatewayParams = { + package: PackageId + gateway: GatewayId | null +} diff --git a/sdk/base/lib/osBindings/SetServerHostnameParams.ts b/sdk/base/lib/osBindings/SetServerHostnameParams.ts new file mode 100644 index 000000000..ac81ea3c4 --- /dev/null +++ b/sdk/base/lib/osBindings/SetServerHostnameParams.ts @@ -0,0 +1,6 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type SetServerHostnameParams = { + name: string | null + hostname: string | null +} diff --git a/sdk/base/lib/osBindings/SetStaticDnsParams.ts b/sdk/base/lib/osBindings/SetStaticDnsParams.ts new file mode 100644 index 000000000..df87b13e1 --- /dev/null +++ b/sdk/base/lib/osBindings/SetStaticDnsParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type SetStaticDnsParams = { servers: Array | null } diff --git a/sdk/base/lib/osBindings/SetWifiEnabledParams.ts b/sdk/base/lib/osBindings/SetWifiEnabledParams.ts new file mode 100644 index 000000000..3382e08f2 --- /dev/null +++ b/sdk/base/lib/osBindings/SetWifiEnabledParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type SetWifiEnabledParams = { enabled: boolean } diff --git a/sdk/base/lib/osBindings/SetupExecuteParams.ts b/sdk/base/lib/osBindings/SetupExecuteParams.ts index 0df094f0b..69f358c54 100644 --- a/sdk/base/lib/osBindings/SetupExecuteParams.ts +++ b/sdk/base/lib/osBindings/SetupExecuteParams.ts @@ -4,7 +4,9 @@ import type { RecoverySource } from './RecoverySource' export type SetupExecuteParams = { guid: string - password: EncryptedWire + password: EncryptedWire | null recoverySource: RecoverySource | null kiosk?: boolean + name: string | null + hostname: string | null } diff --git a/sdk/base/lib/osBindings/SideloadParams.ts b/sdk/base/lib/osBindings/SideloadParams.ts new file mode 100644 index 000000000..d7f67746a --- /dev/null +++ b/sdk/base/lib/osBindings/SideloadParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type SideloadParams = {} diff --git a/sdk/base/lib/osBindings/SideloadResponse.ts b/sdk/base/lib/osBindings/SideloadResponse.ts new file mode 100644 index 000000000..21f304615 --- /dev/null +++ b/sdk/base/lib/osBindings/SideloadResponse.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { Guid } from './Guid' + +export type SideloadResponse = { upload: Guid; progress: Guid } diff --git a/sdk/base/lib/osBindings/SignalStrength.ts b/sdk/base/lib/osBindings/SignalStrength.ts new file mode 100644 index 000000000..b7f72309d --- /dev/null +++ b/sdk/base/lib/osBindings/SignalStrength.ts @@ -0,0 +1,6 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +/** + * So a signal strength is a number between 0-100, I want the null option to be 0 since there is no signal + */ +export type SignalStrength = number diff --git a/sdk/base/lib/osBindings/SmtpSecurity.ts b/sdk/base/lib/osBindings/SmtpSecurity.ts new file mode 100644 index 000000000..a1199f03b --- /dev/null +++ b/sdk/base/lib/osBindings/SmtpSecurity.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type SmtpSecurity = 'starttls' | 'tls' diff --git a/sdk/base/lib/osBindings/SmtpValue.ts b/sdk/base/lib/osBindings/SmtpValue.ts index 5291d6602..66e5ff8f9 100644 --- a/sdk/base/lib/osBindings/SmtpValue.ts +++ b/sdk/base/lib/osBindings/SmtpValue.ts @@ -1,9 +1,11 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { SmtpSecurity } from './SmtpSecurity' export type SmtpValue = { - server: string + host: string port: number from: string - login: string + username: string password: string | null + security: SmtpSecurity } diff --git a/sdk/base/lib/osBindings/SshAddParams.ts b/sdk/base/lib/osBindings/SshAddParams.ts new file mode 100644 index 000000000..a5abbbc3b --- /dev/null +++ b/sdk/base/lib/osBindings/SshAddParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { SshPubKey } from './SshPubKey' + +export type SshAddParams = { key: SshPubKey } diff --git a/sdk/base/lib/osBindings/SshDeleteParams.ts b/sdk/base/lib/osBindings/SshDeleteParams.ts new file mode 100644 index 000000000..fe08546a0 --- /dev/null +++ b/sdk/base/lib/osBindings/SshDeleteParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type SshDeleteParams = { fingerprint: string } diff --git a/sdk/base/lib/osBindings/SshKeyResponse.ts b/sdk/base/lib/osBindings/SshKeyResponse.ts new file mode 100644 index 000000000..30d66aa4c --- /dev/null +++ b/sdk/base/lib/osBindings/SshKeyResponse.ts @@ -0,0 +1,8 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type SshKeyResponse = { + alg: string + fingerprint: string + hostname: string + createdAt: string +} diff --git a/sdk/base/lib/osBindings/SshPubKey.ts b/sdk/base/lib/osBindings/SshPubKey.ts new file mode 100644 index 000000000..cbfcae75f --- /dev/null +++ b/sdk/base/lib/osBindings/SshPubKey.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type SshPubKey = string diff --git a/sdk/base/lib/osBindings/Ssid.ts b/sdk/base/lib/osBindings/Ssid.ts new file mode 100644 index 000000000..17bf88f78 --- /dev/null +++ b/sdk/base/lib/osBindings/Ssid.ts @@ -0,0 +1,6 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +/** + * Ssid are the names of the wifis, usually human readable. + */ +export type Ssid = string diff --git a/sdk/base/lib/osBindings/StartOsRecoveryInfo.ts b/sdk/base/lib/osBindings/StartOsRecoveryInfo.ts new file mode 100644 index 000000000..7cb1aa5e7 --- /dev/null +++ b/sdk/base/lib/osBindings/StartOsRecoveryInfo.ts @@ -0,0 +1,10 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { ServerHostname } from './ServerHostname' + +export type StartOsRecoveryInfo = { + hostname: ServerHostname + version: string + timestamp: string + passwordHash: string | null + wrappedKey: string | null +} diff --git a/sdk/base/lib/osBindings/TestSmtpParams.ts b/sdk/base/lib/osBindings/TestSmtpParams.ts index e2d175f36..e3db51fe8 100644 --- a/sdk/base/lib/osBindings/TestSmtpParams.ts +++ b/sdk/base/lib/osBindings/TestSmtpParams.ts @@ -1,10 +1,12 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { SmtpSecurity } from './SmtpSecurity' export type TestSmtpParams = { - server: string + host: string port: number from: string to: string - login: string + username: string password: string + security: SmtpSecurity } diff --git a/sdk/base/lib/osBindings/TimeInfo.ts b/sdk/base/lib/osBindings/TimeInfo.ts new file mode 100644 index 000000000..e5429f14e --- /dev/null +++ b/sdk/base/lib/osBindings/TimeInfo.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type TimeInfo = { now: string; uptime: bigint } diff --git a/sdk/base/lib/osBindings/UmountParams.ts b/sdk/base/lib/osBindings/UmountParams.ts new file mode 100644 index 000000000..2923332a7 --- /dev/null +++ b/sdk/base/lib/osBindings/UmountParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { BackupTargetId } from './BackupTargetId' + +export type UmountParams = { targetId: BackupTargetId | null } diff --git a/sdk/base/lib/osBindings/UninstallParams.ts b/sdk/base/lib/osBindings/UninstallParams.ts new file mode 100644 index 000000000..7cc2100aa --- /dev/null +++ b/sdk/base/lib/osBindings/UninstallParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { PackageId } from './PackageId' + +export type UninstallParams = { id: PackageId; soft: boolean; force: boolean } diff --git a/sdk/base/lib/osBindings/UrlPluginClearUrlsParams.ts b/sdk/base/lib/osBindings/UrlPluginClearUrlsParams.ts new file mode 100644 index 000000000..72255710d --- /dev/null +++ b/sdk/base/lib/osBindings/UrlPluginClearUrlsParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { PluginHostnameInfo } from './PluginHostnameInfo' + +export type UrlPluginClearUrlsParams = { except: Array } diff --git a/sdk/base/lib/osBindings/UrlPluginExportUrlParams.ts b/sdk/base/lib/osBindings/UrlPluginExportUrlParams.ts new file mode 100644 index 000000000..51171357c --- /dev/null +++ b/sdk/base/lib/osBindings/UrlPluginExportUrlParams.ts @@ -0,0 +1,9 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { ActionId } from './ActionId' +import type { PluginHostnameInfo } from './PluginHostnameInfo' + +export type UrlPluginExportUrlParams = { + hostnameInfo: PluginHostnameInfo + removeAction: ActionId | null + overflowActions: Array +} diff --git a/sdk/base/lib/osBindings/UrlPluginRegisterParams.ts b/sdk/base/lib/osBindings/UrlPluginRegisterParams.ts new file mode 100644 index 000000000..1a6e3e41b --- /dev/null +++ b/sdk/base/lib/osBindings/UrlPluginRegisterParams.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { ActionId } from './ActionId' + +export type UrlPluginRegisterParams = { tableAction: ActionId } diff --git a/sdk/base/lib/osBindings/UrlPluginRegistration.ts b/sdk/base/lib/osBindings/UrlPluginRegistration.ts new file mode 100644 index 000000000..03f03a11e --- /dev/null +++ b/sdk/base/lib/osBindings/UrlPluginRegistration.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { ActionId } from './ActionId' + +export type UrlPluginRegistration = { tableAction: ActionId } diff --git a/sdk/base/lib/osBindings/WifiAddParams.ts b/sdk/base/lib/osBindings/WifiAddParams.ts new file mode 100644 index 000000000..1dd729b35 --- /dev/null +++ b/sdk/base/lib/osBindings/WifiAddParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type WifiAddParams = { ssid: string; password: string } diff --git a/sdk/base/lib/osBindings/WifiListInfo.ts b/sdk/base/lib/osBindings/WifiListInfo.ts new file mode 100644 index 000000000..b1575d40b --- /dev/null +++ b/sdk/base/lib/osBindings/WifiListInfo.ts @@ -0,0 +1,12 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { SignalStrength } from './SignalStrength' +import type { Ssid } from './Ssid' +import type { WifiListOut } from './WifiListOut' + +export type WifiListInfo = { + ssids: { [key: Ssid]: SignalStrength } + connected: Ssid | null + country: string | null + ethernet: boolean + availableWifi: Array +} diff --git a/sdk/base/lib/osBindings/WifiListOut.ts b/sdk/base/lib/osBindings/WifiListOut.ts new file mode 100644 index 000000000..ed5612a0f --- /dev/null +++ b/sdk/base/lib/osBindings/WifiListOut.ts @@ -0,0 +1,9 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { SignalStrength } from './SignalStrength' +import type { Ssid } from './Ssid' + +export type WifiListOut = { + ssid: Ssid + strength: SignalStrength + security: Array +} diff --git a/sdk/base/lib/osBindings/WifiSsidParams.ts b/sdk/base/lib/osBindings/WifiSsidParams.ts new file mode 100644 index 000000000..6dc0a7467 --- /dev/null +++ b/sdk/base/lib/osBindings/WifiSsidParams.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type WifiSsidParams = { ssid: string } diff --git a/sdk/base/lib/osBindings/index.ts b/sdk/base/lib/osBindings/index.ts index 4d9049856..60dc64898 100644 --- a/sdk/base/lib/osBindings/index.ts +++ b/sdk/base/lib/osBindings/index.ts @@ -17,6 +17,8 @@ export { AddMirrorParams } from './AddMirrorParams' export { AddPackageParams } from './AddPackageParams' export { AddPackageSignerParams } from './AddPackageSignerParams' export { AddPackageToCategoryParams } from './AddPackageToCategoryParams' +export { AddPrivateDomainParams } from './AddPrivateDomainParams' +export { AddPublicDomainParams } from './AddPublicDomainParams' export { AddressInfo } from './AddressInfo' export { AddSslOptions } from './AddSslOptions' export { AddTunnelParams } from './AddTunnelParams' @@ -31,30 +33,46 @@ export { AnySigningKey } from './AnySigningKey' export { AnyVerifyingKey } from './AnyVerifyingKey' export { ApiState } from './ApiState' export { AttachParams } from './AttachParams' +export { BackupInfo } from './BackupInfo' +export { BackupParams } from './BackupParams' export { BackupProgress } from './BackupProgress' +export { BackupReport } from './BackupReport' export { BackupTargetFS } from './BackupTargetFS' +export { BackupTargetId } from './BackupTargetId' +export { BackupTarget } from './BackupTarget' export { Base64 } from './Base64' export { BindId } from './BindId' export { BindInfo } from './BindInfo' -export { BindingGatewaySetEnabledParams } from './BindingGatewaySetEnabledParams' +export { BindingSetAddressEnabledParams } from './BindingSetAddressEnabledParams' +export { Bindings } from './Bindings' export { BindOptions } from './BindOptions' export { BindParams } from './BindParams' export { Blake3Commitment } from './Blake3Commitment' export { BlockDev } from './BlockDev' export { BuildArg } from './BuildArg' export { CallbackId } from './CallbackId' +export { CancelInstallParams } from './CancelInstallParams' export { Category } from './Category' export { Celsius } from './Celsius' export { CheckDependenciesParam } from './CheckDependenciesParam' export { CheckDependenciesResult } from './CheckDependenciesResult' +export { CheckDnsParams } from './CheckDnsParams' +export { CheckPortParams } from './CheckPortParams' +export { CheckPortRes } from './CheckPortRes' +export { CifsAddParams } from './CifsAddParams' +export { CifsBackupTarget } from './CifsBackupTarget' +export { CifsRemoveParams } from './CifsRemoveParams' export { Cifs } from './Cifs' +export { CifsUpdateParams } from './CifsUpdateParams' export { ClearActionsParams } from './ClearActionsParams' export { ClearBindingsParams } from './ClearBindingsParams' export { ClearCallbacksParams } from './ClearCallbacksParams' export { ClearServiceInterfacesParams } from './ClearServiceInterfacesParams' +export { ClearTaskParams } from './ClearTaskParams' export { ClearTasksParams } from './ClearTasksParams' export { CliSetIconParams } from './CliSetIconParams' export { ContactInfo } from './ContactInfo' +export { ControlParams } from './ControlParams' export { CreateSubcontainerFsParams } from './CreateSubcontainerFsParams' export { CreateTaskParams } from './CreateTaskParams' export { CurrentDependencies } from './CurrentDependencies' @@ -64,6 +82,7 @@ export { Dependencies } from './Dependencies' export { DependencyMetadata } from './DependencyMetadata' export { DependencyRequirement } from './DependencyRequirement' export { DepInfo } from './DepInfo' +export { DerivedAddressInfo } from './DerivedAddressInfo' export { Description } from './Description' export { DesiredStatus } from './DesiredStatus' export { DestroySubcontainerFsParams } from './DestroySubcontainerFsParams' @@ -73,21 +92,25 @@ export { DomainSettings } from './DomainSettings' export { Duration } from './Duration' export { EchoParams } from './EchoParams' export { EditSignerParams } from './EditSignerParams' +export { EffectsRunActionParams } from './EffectsRunActionParams' export { EncryptedWire } from './EncryptedWire' export { ErrorData } from './ErrorData' export { EventId } from './EventId' export { ExportActionParams } from './ExportActionParams' export { ExportServiceInterfaceParams } from './ExportServiceInterfaceParams' export { FileType } from './FileType' +export { ForgetGatewayParams } from './ForgetGatewayParams' export { FullIndex } from './FullIndex' export { FullProgress } from './FullProgress' export { GatewayId } from './GatewayId' export { GatewayInfo } from './GatewayInfo' +export { GatewayType } from './GatewayType' export { GetActionInputParams } from './GetActionInputParams' export { GetContainerIpParams } from './GetContainerIpParams' export { GetHostInfoParams } from './GetHostInfoParams' export { GetOsAssetParams } from './GetOsAssetParams' export { GetOsVersionParams } from './GetOsVersionParams' +export { GetOutboundGatewayParams } from './GetOutboundGatewayParams' export { GetPackageParams } from './GetPackageParams' export { GetPackageResponseFull } from './GetPackageResponseFull' export { GetPackageResponse } from './GetPackageResponse' @@ -106,6 +129,7 @@ export { HardwareRequirements } from './HardwareRequirements' export { HealthCheckId } from './HealthCheckId' export { HostId } from './HostId' export { HostnameInfo } from './HostnameInfo' +export { HostnameMetadata } from './HostnameMetadata' export { Hosts } from './Hosts' export { Host } from './Host' export { IdMap } from './IdMap' @@ -113,20 +137,27 @@ export { ImageConfig } from './ImageConfig' export { ImageId } from './ImageId' export { ImageMetadata } from './ImageMetadata' export { ImageSource } from './ImageSource' +export { InfoParams } from './InfoParams' +export { InitAcmeParams } from './InitAcmeParams' export { InitProgressRes } from './InitProgressRes' export { InstalledState } from './InstalledState' export { InstalledVersionParams } from './InstalledVersionParams' export { InstallingInfo } from './InstallingInfo' export { InstallingState } from './InstallingState' export { InstallParams } from './InstallParams' -export { IpHostname } from './IpHostname' export { IpInfo } from './IpInfo' export { KeyboardOptions } from './KeyboardOptions' +export { KillParams } from './KillParams' +export { ListNotificationParams } from './ListNotificationParams' export { ListPackageSignersParams } from './ListPackageSignersParams' export { ListServiceInterfacesParams } from './ListServiceInterfacesParams' export { ListVersionSignersParams } from './ListVersionSignersParams' export { LocaleString } from './LocaleString' +export { LogEntry } from './LogEntry' +export { LogFollowResponse } from './LogFollowResponse' export { LoginParams } from './LoginParams' +export { LogResponse } from './LogResponse' +export { LogsParams } from './LogsParams' export { LshwDevice } from './LshwDevice' export { LshwDisplay } from './LshwDisplay' export { LshwProcessor } from './LshwProcessor' @@ -138,9 +169,12 @@ export { MetadataSrc } from './MetadataSrc' export { Metadata } from './Metadata' export { MetricsCpu } from './MetricsCpu' export { MetricsDisk } from './MetricsDisk' +export { MetricsFollowResponse } from './MetricsFollowResponse' export { MetricsGeneral } from './MetricsGeneral' export { MetricsMemory } from './MetricsMemory' export { Metrics } from './Metrics' +export { ModifyNotificationBeforeParams } from './ModifyNotificationBeforeParams' +export { ModifyNotificationParams } from './ModifyNotificationParams' export { MountParams } from './MountParams' export { MountTarget } from './MountTarget' export { NamedHealthCheckResult } from './NamedHealthCheckResult' @@ -149,42 +183,60 @@ export { NetInfo } from './NetInfo' export { NetworkInfo } from './NetworkInfo' export { NetworkInterfaceInfo } from './NetworkInterfaceInfo' export { NetworkInterfaceType } from './NetworkInterfaceType' -export { OnionHostname } from './OnionHostname' +export { NotificationLevel } from './NotificationLevel' +export { Notification } from './Notification' +export { NotificationWithId } from './NotificationWithId' export { OsIndex } from './OsIndex' export { OsVersionInfoMap } from './OsVersionInfoMap' export { OsVersionInfo } from './OsVersionInfo' +export { PackageBackupInfo } from './PackageBackupInfo' +export { PackageBackupReport } from './PackageBackupReport' export { PackageDataEntry } from './PackageDataEntry' export { PackageDetailLevel } from './PackageDetailLevel' export { PackageId } from './PackageId' export { PackageIndex } from './PackageIndex' export { PackageInfoShort } from './PackageInfoShort' export { PackageInfo } from './PackageInfo' +export { PackagePlugin } from './PackagePlugin' export { PackageState } from './PackageState' export { PackageVersionInfo } from './PackageVersionInfo' +export { PartitionInfo } from './PartitionInfo' export { PasswordType } from './PasswordType' export { PathOrUrl } from './PathOrUrl' export { Pem } from './Pem' export { Percentage } from './Percentage' +export { PluginHostnameInfo } from './PluginHostnameInfo' +export { PluginId } from './PluginId' +export { PortForward } from './PortForward' export { Progress } from './Progress' export { ProgressUnits } from './ProgressUnits' export { PublicDomainConfig } from './PublicDomainConfig' export { Public } from './Public' +export { QueryDnsParams } from './QueryDnsParams' +export { RebuildParams } from './RebuildParams' export { RecoverySource } from './RecoverySource' export { RegistryAsset } from './RegistryAsset' export { RegistryInfo } from './RegistryInfo' +export { RemoveAcmeParams } from './RemoveAcmeParams' export { RemoveAdminParams } from './RemoveAdminParams' export { RemoveAssetParams } from './RemoveAssetParams' export { RemoveCategoryParams } from './RemoveCategoryParams' +export { RemoveDomainParams } from './RemoveDomainParams' export { RemoveMirrorParams } from './RemoveMirrorParams' export { RemovePackageFromCategoryParams } from './RemovePackageFromCategoryParams' export { RemovePackageParams } from './RemovePackageParams' export { RemovePackageSignerParams } from './RemovePackageSignerParams' export { RemoveTunnelParams } from './RemoveTunnelParams' export { RemoveVersionParams } from './RemoveVersionParams' +export { RenameGatewayParams } from './RenameGatewayParams' export { ReplayId } from './ReplayId' export { RequestCommitment } from './RequestCommitment' +export { ResetPasswordParams } from './ResetPasswordParams' +export { RestorePackageParams } from './RestorePackageParams' export { RunActionParams } from './RunActionParams' export { Security } from './Security' +export { ServerBackupReport } from './ServerBackupReport' +export { ServerHostname } from './ServerHostname' export { ServerInfo } from './ServerInfo' export { ServerSpecs } from './ServerSpecs' export { ServerStatus } from './ServerStatus' @@ -194,21 +246,38 @@ export { ServiceInterfaceType } from './ServiceInterfaceType' export { SessionList } from './SessionList' export { Sessions } from './Sessions' export { Session } from './Session' +export { SetCountryParams } from './SetCountryParams' export { SetDataVersionParams } from './SetDataVersionParams' +export { SetDefaultOutboundParams } from './SetDefaultOutboundParams' export { SetDependenciesParams } from './SetDependenciesParams' export { SetHealth } from './SetHealth' export { SetIconParams } from './SetIconParams' +export { SetLanguageParams } from './SetLanguageParams' export { SetMainStatusStatus } from './SetMainStatusStatus' export { SetMainStatus } from './SetMainStatus' export { SetNameParams } from './SetNameParams' +export { SetOutboundGatewayParams } from './SetOutboundGatewayParams' +export { SetServerHostnameParams } from './SetServerHostnameParams' +export { SetStaticDnsParams } from './SetStaticDnsParams' export { SetupExecuteParams } from './SetupExecuteParams' export { SetupInfo } from './SetupInfo' export { SetupProgress } from './SetupProgress' export { SetupResult } from './SetupResult' export { SetupStatusRes } from './SetupStatusRes' +export { SetWifiEnabledParams } from './SetWifiEnabledParams' +export { SideloadParams } from './SideloadParams' +export { SideloadResponse } from './SideloadResponse' +export { SignalStrength } from './SignalStrength' export { SignAssetParams } from './SignAssetParams' export { SignerInfo } from './SignerInfo' +export { SmtpSecurity } from './SmtpSecurity' export { SmtpValue } from './SmtpValue' +export { SshAddParams } from './SshAddParams' +export { SshDeleteParams } from './SshDeleteParams' +export { SshKeyResponse } from './SshKeyResponse' +export { SshPubKey } from './SshPubKey' +export { Ssid } from './Ssid' +export { StartOsRecoveryInfo } from './StartOsRecoveryInfo' export { StartStop } from './StartStop' export { StatusInfo } from './StatusInfo' export { TaskCondition } from './TaskCondition' @@ -218,9 +287,20 @@ export { TaskSeverity } from './TaskSeverity' export { TaskTrigger } from './TaskTrigger' export { Task } from './Task' export { TestSmtpParams } from './TestSmtpParams' +export { TimeInfo } from './TimeInfo' +export { UmountParams } from './UmountParams' +export { UninstallParams } from './UninstallParams' export { UpdatingState } from './UpdatingState' +export { UrlPluginClearUrlsParams } from './UrlPluginClearUrlsParams' +export { UrlPluginExportUrlParams } from './UrlPluginExportUrlParams' +export { UrlPluginRegisterParams } from './UrlPluginRegisterParams' +export { UrlPluginRegistration } from './UrlPluginRegistration' export { VerifyCifsParams } from './VerifyCifsParams' export { VersionSignerParams } from './VersionSignerParams' export { Version } from './Version' export { VolumeId } from './VolumeId' +export { WifiAddParams } from './WifiAddParams' export { WifiInfo } from './WifiInfo' +export { WifiListInfo } from './WifiListInfo' +export { WifiListOut } from './WifiListOut' +export { WifiSsidParams } from './WifiSsidParams' diff --git a/sdk/base/lib/osBindings/tunnel/TunnelUpdateResult.ts b/sdk/base/lib/osBindings/tunnel/TunnelUpdateResult.ts new file mode 100644 index 000000000..c70dbff0b --- /dev/null +++ b/sdk/base/lib/osBindings/tunnel/TunnelUpdateResult.ts @@ -0,0 +1,16 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type TunnelUpdateResult = { + /** + * "up-to-date", "update-available", or "updating" + */ + status: string + /** + * Currently installed version + */ + installed: string + /** + * Available candidate version + */ + candidate: string +} diff --git a/sdk/base/lib/s9pk/index.ts b/sdk/base/lib/s9pk/index.ts index 1c3b74c6d..89f13ec59 100644 --- a/sdk/base/lib/s9pk/index.ts +++ b/sdk/base/lib/s9pk/index.ts @@ -12,6 +12,11 @@ import { FileContents } from './merkleArchive/fileContents' const magicAndVersion = new Uint8Array([59, 59, 2]) +/** + * Compares two `Uint8Array` instances byte-by-byte for equality. + * + * @returns `true` if both arrays have the same length and identical bytes + */ export function compare(a: Uint8Array, b: Uint8Array) { if (a.length !== b.length) return false for (let i = 0; i < a.length; i++) { @@ -20,12 +25,41 @@ export function compare(a: Uint8Array, b: Uint8Array) { return true } +/** + * Represents a parsed `.s9pk` package archive — the binary distribution format for StartOS services. + * + * An `S9pk` wraps a verified {@link Manifest}, a {@link MerkleArchive} containing the package's + * assets (icon, license, dependency metadata), and the total archive size in bytes. + * + * @example + * ```ts + * const s9pk = await S9pk.deserialize(file, null) + * console.log(s9pk.manifest.id) // e.g. "bitcoind" + * console.log(s9pk.size) // archive size in bytes + * const icon = await s9pk.icon() // base64 data URL + * const license = await s9pk.license() + * ``` + */ export class S9pk { private constructor( + /** The parsed package manifest containing metadata, dependencies, and interface definitions. */ readonly manifest: Manifest, + /** The Merkle-verified archive containing the package's files. */ readonly archive: MerkleArchive, + /** The total size of the archive in bytes. */ readonly size: number, ) {} + /** + * Deserializes an `S9pk` from a `Blob` (e.g. a `File` from a browser file input). + * + * Validates the magic bytes and version header, then parses the Merkle archive structure. + * If a `commitment` is provided, the archive is cryptographically verified against it. + * + * @param source - The raw `.s9pk` file as a `Blob` + * @param commitment - An optional Merkle commitment to verify the archive against, or `null` to skip verification + * @returns A fully parsed `S9pk` instance + * @throws If the magic bytes are invalid or the archive fails verification + */ static async deserialize( source: Blob, commitment: MerkleArchiveCommitment | null, @@ -57,6 +91,14 @@ export class S9pk { return new S9pk(manifest, archive, source.size) } + /** + * Extracts the package icon from the archive and returns it as a base64-encoded data URL. + * + * Looks for a file named `icon.*` with an image MIME type (e.g. `icon.png`, `icon.svg`). + * + * @returns A data URL string like `"data:image/png;base64,..."` suitable for use in ``. + * @throws If no icon file is found in the archive + */ async icon(): Promise { const iconName = Object.keys(this.archive.contents.contents).find( (name) => @@ -73,6 +115,12 @@ export class S9pk { ) } + /** + * Returns the metadata (e.g. `{ title }`) for a specific dependency by its package ID. + * + * @param id - The dependency's package identifier (e.g. `"bitcoind"`) + * @returns The dependency metadata object, or `null` if the dependency is not present in the archive + */ async dependencyMetadataFor(id: PackageId) { const entry = this.archive.contents.getPath([ 'dependencies', @@ -85,6 +133,12 @@ export class S9pk { ) as { title: string } } + /** + * Returns the icon for a specific dependency as a base64 data URL. + * + * @param id - The dependency's package identifier + * @returns A data URL string, or `null` if the dependency or its icon is not present + */ async dependencyIconFor(id: PackageId) { const dir = this.archive.contents.getPath(['dependencies', id]) if (!dir || !(dir.contents instanceof DirectoryContents)) return null @@ -101,6 +155,12 @@ export class S9pk { ) } + /** + * Returns a merged record of all dependency metadata (title, icon, description, optional flag) + * for every dependency declared in the manifest. + * + * @returns A record keyed by package ID, each containing `{ title, icon, description, optional }` + */ async dependencyMetadata() { return Object.fromEntries( await Promise.all( @@ -119,6 +179,12 @@ export class S9pk { ) } + /** + * Reads and returns the `LICENSE.md` file from the archive as a UTF-8 string. + * + * @returns The full license text + * @throws If `LICENSE.md` is not found in the archive + */ async license(): Promise { const file = this.archive.contents.getPath(['LICENSE.md']) if (!file || !(file.contents instanceof FileContents)) diff --git a/sdk/base/lib/test/startosTypeValidation.test.ts b/sdk/base/lib/test/startosTypeValidation.test.ts index bcb9f6abb..f02336d7e 100644 --- a/sdk/base/lib/test/startosTypeValidation.test.ts +++ b/sdk/base/lib/test/startosTypeValidation.test.ts @@ -10,7 +10,7 @@ import { GetContainerIpParams, GetStatusParams, CreateTaskParams, - RunActionParams, + EffectsRunActionParams, SetDataVersionParams, SetMainStatus, GetServiceManifestParams, @@ -25,11 +25,15 @@ import { GetSslKeyParams } from '.././osBindings' import { GetServiceInterfaceParams } from '.././osBindings' import { SetDependenciesParams } from '.././osBindings' import { GetSystemSmtpParams } from '.././osBindings' +import { GetOutboundGatewayParams } from '.././osBindings' import { GetServicePortForwardParams } from '.././osBindings' import { ExportServiceInterfaceParams } from '.././osBindings' import { ListServiceInterfacesParams } from '.././osBindings' import { ExportActionParams } from '.././osBindings' import { MountParams } from '.././osBindings' +import { UrlPluginRegisterParams } from '.././osBindings' +import { UrlPluginExportUrlParams } from '.././osBindings' +import { UrlPluginClearUrlsParams } from '.././osBindings' import { StringObject } from '../util' import { ExtendedVersion, VersionRange } from '../exver' function typeEquality(_a: ExpectedType) {} @@ -56,7 +60,7 @@ describe('startosTypeValidation ', () => { clear: {} as ClearActionsParams, export: {} as ExportActionParams, getInput: {} as GetActionInputParams, - run: {} as RunActionParams, + run: {} as EffectsRunActionParams, createTask: {} as CreateTaskParams, clearTasks: {} as ClearTasksParams, }, @@ -80,6 +84,7 @@ describe('startosTypeValidation ', () => { getServiceManifest: {} as WithCallback, getSystemSmtp: {} as WithCallback, getContainerIp: {} as WithCallback, + getOutboundGateway: {} as WithCallback, getOsIp: undefined, getServicePortForward: {} as GetServicePortForwardParams, clearServiceInterfaces: {} as ClearServiceInterfacesParams, @@ -90,6 +95,13 @@ describe('startosTypeValidation ', () => { getDependencies: undefined, getStatus: {} as WithCallback, setMainStatus: {} as SetMainStatus, + plugin: { + url: { + register: {} as UrlPluginRegisterParams, + exportUrl: {} as UrlPluginExportUrlParams, + clearUrls: {} as UrlPluginClearUrlsParams, + }, + }, }) }) }) diff --git a/sdk/base/lib/types.ts b/sdk/base/lib/types.ts index 006e6dc66..5d6e3756f 100644 --- a/sdk/base/lib/types.ts +++ b/sdk/base/lib/types.ts @@ -1,4 +1,5 @@ export * as inputSpecTypes from './actions/input/inputSpecTypes' +import { InputSpec as InputSpecClass } from './actions/input/builder/inputSpec' import { DependencyRequirement, @@ -20,20 +21,32 @@ export { CurrentDependenciesResult, } from './dependencies/setupDependencies' +/** An object that can be built into a terminable daemon process. */ export type DaemonBuildable = { build(): Promise<{ term(): Promise }> } +/** The three categories of service network interfaces. */ export type ServiceInterfaceType = 'ui' | 'p2p' | 'api' +/** A Node.js signal name (e.g. `"SIGTERM"`, `"SIGKILL"`). */ export type Signals = NodeJS.Signals +/** The SIGTERM signal — used for graceful daemon termination. */ export const SIGTERM: Signals = 'SIGTERM' +/** The SIGKILL signal — used for forceful daemon termination. */ export const SIGKILL: Signals = 'SIGKILL' +/** Sentinel value (`-1`) indicating that no timeout should be applied. */ export const NO_TIMEOUT = -1 +/** A function that builds an absolute file path from a volume name and relative path. */ export type PathMaker = (options: { volume: string; path: string }) => string +/** A value that may or may not be wrapped in a `Promise`. */ export type MaybePromise = Promise | A +/** + * Namespace defining the required exports for a StartOS service package. + * Every package must export implementations matching these types. + */ export namespace ExpectedExports { version: 1 @@ -62,10 +75,16 @@ export namespace ExpectedExports { target: ExtendedVersion | VersionRange | null }) => Promise + /** The package manifest describing the service's metadata, dependencies, and interfaces. */ export type manifest = Manifest + /** The map of user-invocable actions defined by this service. */ export type actions = Actions>> } +/** + * The complete ABI (Application Binary Interface) for a StartOS service package. + * Maps all required exports to their expected types. + */ export type ABI = { createBackup: ExpectedExports.createBackup main: ExpectedExports.main @@ -74,53 +93,82 @@ export type ABI = { manifest: ExpectedExports.manifest actions: ExpectedExports.actions } +/** A time value in milliseconds. */ export type TimeMs = number +/** A version string in string form. */ export type VersionString = string declare const DaemonProof: unique symbol +/** Opaque branded type proving that a daemon was started. Cannot be constructed directly. */ export type DaemonReceipt = { [DaemonProof]: never } +/** A running daemon with methods to wait for completion or terminate it. */ export type Daemon = { + /** Waits for the daemon to exit and returns its exit message. */ wait(): Promise + /** Terminates the daemon. */ term(): Promise [DaemonProof]: never } +/** The result status of a health check (extracted from `NamedHealthCheckResult`). */ export type HealthStatus = NamedHealthCheckResult['result'] +/** SMTP mail server configuration values. */ export type SmtpValue = { - server: string + host: string port: number from: string - login: string + username: string password: string | null | undefined + security: 'starttls' | 'tls' } +/** + * Marker class indicating that a container should use its own built-in entrypoint + * rather than a custom command. Optionally accepts an override command array. + */ export class UseEntrypoint { readonly USE_ENTRYPOINT = 'USE_ENTRYPOINT' constructor(readonly overridCmd?: string[]) {} } +/** Type guard that checks if a {@link CommandType} is a {@link UseEntrypoint} instance. */ export function isUseEntrypoint( command: CommandType, ): command is UseEntrypoint { return typeof command === 'object' && 'USE_ENTRYPOINT' in command } -export type CommandType = string | [string, ...string[]] | UseEntrypoint +/** + * The ways to specify a command to run in a container: + * - A shell string (run via `sh -c`) + * - An explicit argv array + * - A {@link UseEntrypoint} to use the container's built-in entrypoint + */ +export type CommandType = + | string + | [string, ...string[]] + | readonly [string, ...string[]] + | UseEntrypoint +/** The return type from starting a daemon — provides `wait()` and `term()` controls. */ export type DaemonReturned = { + /** Waits for the daemon process to exit. */ wait(): Promise + /** Sends a signal to terminate the daemon. If it doesn't exit within `timeout` ms, sends SIGKILL. */ term(options?: { signal?: Signals; timeout?: number }): Promise } export declare const hostName: unique symbol -// asdflkjadsf.onion | 1.2.3.4 +/** A branded string type for hostnames (e.g. `.onion` addresses or IP addresses). */ export type Hostname = string & { [hostName]: never } +/** A string identifier for a service network interface. */ export type ServiceInterfaceId = string export { ServiceInterface } +/** Maps effect method names to their kebab-case RPC equivalents. */ export type EffectMethod = { [K in keyof T]-?: K extends string ? T[K] extends Function @@ -131,6 +179,7 @@ export type EffectMethod = { : never }[keyof T] +/** Options for rsync-based file synchronization (used in backup/restore). */ export type SyncOptions = { /** delete files that exist in the target directory, but not in the source directory */ delete: boolean @@ -156,51 +205,75 @@ export type Metadata = { mode: number } +/** Result type for setting a service's dependency configuration and restart signal. */ export type SetResult = { dependsOn: DependsOn signal: Signals } +/** A string identifier for a StartOS package (e.g. `"bitcoind"`). */ export type PackageId = string +/** A user-facing message string. */ export type Message = string +/** Whether a dependency needs to be actively running or merely installed. */ export type DependencyKind = 'running' | 'exists' +/** + * Maps package IDs to the health check IDs that must pass before this service considers + * the dependency satisfied. + */ export type DependsOn = { [packageId: string]: string[] | readonly string[] } +/** + * A typed error that can be displayed to the user. + * Either a plain error message string, or a structured error code with description. + */ export type KnownError = | { error: string } | { errorCode: [number, string] | readonly [number, string] } +/** An array of dependency requirements for a service. */ export type Dependencies = Array +/** Recursively makes all properties of `T` optional. */ export type DeepPartial = T extends [infer A, ...infer Rest] ? [DeepPartial, ...DeepPartial] : T extends {} ? { [P in keyof T]?: DeepPartial } : T +/** Recursively removes all `readonly` modifiers from `T`. */ export type DeepWritable = { -readonly [K in keyof T]: T[K] } +/** Casts a value to {@link DeepWritable} (identity at runtime, removes `readonly` at the type level). */ export function writable(value: T): DeepWritable { return value } +/** Recursively makes all properties of `T` readonly. */ export type DeepReadonly = { readonly [P in keyof T]: DeepReadonly } +/** Casts a value to {@link DeepReadonly} (identity at runtime, adds `readonly` at the type level). */ export function readonly(value: T): DeepReadonly { return value } +/** Accepts either a mutable or deeply-readonly version of `T`. */ export type AllowReadonly = | T | { readonly [P in keyof T]: AllowReadonly } + +export type InputSpec< + Type extends StaticValidatedAs, + StaticValidatedAs extends Record = Type, +> = InputSpecClass diff --git a/sdk/base/lib/types/ManifestTypes.ts b/sdk/base/lib/types/ManifestTypes.ts index 3173ef9c2..4ba167430 100644 --- a/sdk/base/lib/types/ManifestTypes.ts +++ b/sdk/base/lib/types/ManifestTypes.ts @@ -21,22 +21,17 @@ export type SDKManifest = { * URL of the StartOS package repository * @example `https://github.com/Start9Labs/nextcloud-startos` */ - readonly wrapperRepo: string + readonly packageRepo: string /** * URL of the upstream service repository * @example `https://github.com/nextcloud/docker` */ readonly upstreamRepo: string - /** - * URL where users can get help using the upstream service - * @example `https://github.com/nextcloud/docker/issues` - */ - readonly supportSite: string /** * URL where users can learn more about the upstream service * @example `https://nextcloud.com` */ - readonly marketingSite: string + readonly marketingUrl: string /** * (optional) URL where users can donate to the upstream project * @example `https://nextcloud.com/contribute/` @@ -45,7 +40,7 @@ export type SDKManifest = { /** * URL where users can find instructions on how to use the service */ - readonly docsUrl: string + readonly docsUrls: string[] readonly description: { /** Short description to display on the marketplace list page. Max length 80 chars. */ readonly short: T.LocaleString @@ -152,6 +147,11 @@ export type SDKManifest = { * @description Enable access to hardware acceleration devices (such as /dev/dri, or /dev/nvidia*) */ readonly hardwareAcceleration?: boolean + + /** + * @description Enable OS plugins + */ + readonly plugins?: T.PluginId[] } // this is hacky but idk a more elegant way diff --git a/sdk/base/lib/util/AbortedError.ts b/sdk/base/lib/util/AbortedError.ts new file mode 100644 index 000000000..ca9843a22 --- /dev/null +++ b/sdk/base/lib/util/AbortedError.ts @@ -0,0 +1,10 @@ +export class AbortedError extends Error { + readonly muteUnhandled = true as const + declare cause?: unknown + + constructor(message?: string, options?: { cause?: unknown }) { + super(message) + this.name = 'AbortedError' + if (options?.cause !== undefined) this.cause = options.cause + } +} diff --git a/sdk/base/lib/util/GetOutboundGateway.ts b/sdk/base/lib/util/GetOutboundGateway.ts new file mode 100644 index 000000000..460bb8b90 --- /dev/null +++ b/sdk/base/lib/util/GetOutboundGateway.ts @@ -0,0 +1,106 @@ +import { Effects } from '../Effects' +import { AbortedError } from './AbortedError' +import { DropGenerator, DropPromise } from './Drop' + +export class GetOutboundGateway { + constructor(readonly effects: Effects) {} + + /** + * Returns the effective outbound gateway. Reruns the context from which it has been called if the underlying value changes + */ + const() { + return this.effects.getOutboundGateway({ + callback: + this.effects.constRetry && + (() => this.effects.constRetry && this.effects.constRetry()), + }) + } + /** + * Returns the effective outbound gateway. Does nothing if the value changes + */ + once() { + return this.effects.getOutboundGateway({}) + } + + private async *watchGen(abort?: AbortSignal) { + const resolveCell = { resolve: () => {} } + this.effects.onLeaveContext(() => { + resolveCell.resolve() + }) + abort?.addEventListener('abort', () => resolveCell.resolve()) + while (this.effects.isInContext && !abort?.aborted) { + let callback: () => void = () => {} + const waitForNext = new Promise((resolve) => { + callback = resolve + resolveCell.resolve = resolve + }) + yield await this.effects.getOutboundGateway({ + callback: () => callback(), + }) + await waitForNext + } + return new Promise((_, rej) => rej(new AbortedError())) + } + + /** + * Watches the effective outbound gateway. Returns an async iterator that yields whenever the value changes + */ + watch(abort?: AbortSignal): AsyncGenerator { + const ctrl = new AbortController() + abort?.addEventListener('abort', () => ctrl.abort()) + return DropGenerator.of(this.watchGen(ctrl.signal), () => ctrl.abort()) + } + + /** + * Watches the effective outbound gateway. Takes a custom callback function to run whenever the value changes + */ + onChange( + callback: ( + value: string, + error?: Error, + ) => { cancel: boolean } | Promise<{ cancel: boolean }>, + ) { + ;(async () => { + const ctrl = new AbortController() + for await (const value of this.watch(ctrl.signal)) { + try { + const res = await callback(value) + if (res.cancel) { + ctrl.abort() + break + } + } catch (e) { + console.error( + 'callback function threw an error @ GetOutboundGateway.onChange', + e, + ) + } + } + })() + .catch((e) => callback('', e)) + .catch((e) => + console.error( + 'callback function threw an error @ GetOutboundGateway.onChange', + e, + ), + ) + } + + /** + * Watches the effective outbound gateway. Returns when the predicate is true + */ + waitFor(pred: (value: string) => boolean): Promise { + const ctrl = new AbortController() + return DropPromise.of( + Promise.resolve().then(async () => { + for await (const next of this.watchGen(ctrl.signal)) { + if (pred(next)) { + return next + } + } + return '' + }), + () => ctrl.abort(), + ) + } +} diff --git a/sdk/base/lib/util/GetSystemSmtp.ts b/sdk/base/lib/util/GetSystemSmtp.ts index 69e6c1279..03cedba6f 100644 --- a/sdk/base/lib/util/GetSystemSmtp.ts +++ b/sdk/base/lib/util/GetSystemSmtp.ts @@ -1,5 +1,6 @@ import { Effects } from '../Effects' import * as T from '../types' +import { AbortedError } from './AbortedError' import { DropGenerator, DropPromise } from './Drop' export class GetSystemSmtp { @@ -39,7 +40,7 @@ export class GetSystemSmtp { }) await waitForNext } - return new Promise((_, rej) => rej(new Error('aborted'))) + return new Promise((_, rej) => rej(new AbortedError())) } /** diff --git a/sdk/base/lib/util/asError.ts b/sdk/base/lib/util/asError.ts index 5f0a3884d..dddb4aafe 100644 --- a/sdk/base/lib/util/asError.ts +++ b/sdk/base/lib/util/asError.ts @@ -1,3 +1,11 @@ +/** + * Converts an unknown thrown value into an Error instance. + * If `e` is already an Error, wraps it; if a string, uses it as the message; + * otherwise JSON-serializes it as the error message. + * + * @param e - The unknown value to convert + * @returns An Error instance + */ export const asError = (e: unknown) => { if (e instanceof Error) { return new Error(e as any) diff --git a/sdk/base/lib/util/deepEqual.ts b/sdk/base/lib/util/deepEqual.ts index e64c23429..4c53807a9 100644 --- a/sdk/base/lib/util/deepEqual.ts +++ b/sdk/base/lib/util/deepEqual.ts @@ -1,7 +1,22 @@ -import { object } from 'ts-matches' - +/** + * Performs a deep structural equality check across all provided arguments. + * Returns true only if every argument is deeply equal to every other argument. + * Handles primitives, arrays, and plain objects recursively. + * + * @param args - Two or more values to compare for deep equality + * @returns True if all arguments are deeply equal + * + * @example + * ```ts + * deepEqual({ a: 1 }, { a: 1 }) // true + * deepEqual([1, 2], [1, 2], [1, 2]) // true + * deepEqual({ a: 1 }, { a: 2 }) // false + * ``` + */ export function deepEqual(...args: unknown[]) { - const objects = args.filter(object.test) + const objects = args.filter( + (x): x is object => typeof x === 'object' && x !== null, + ) if (objects.length === 0) { for (const x of args) if (x !== args[0]) return false return true diff --git a/sdk/base/lib/util/deepMerge.ts b/sdk/base/lib/util/deepMerge.ts index f64a2ef50..0742acb73 100644 --- a/sdk/base/lib/util/deepMerge.ts +++ b/sdk/base/lib/util/deepMerge.ts @@ -1,3 +1,13 @@ +/** + * Computes the partial difference between two values. + * Returns `undefined` if the values are equal, or `{ diff }` containing only the changed parts. + * For arrays, the diff contains only items in `next` that have no deep-equal counterpart in `prev`. + * For objects, the diff contains only keys whose values changed. + * + * @param prev - The original value + * @param next - The updated value + * @returns An object containing the diff, or `undefined` if the values are equal + */ export function partialDiff( prev: T, next: T, @@ -46,6 +56,14 @@ export function partialDiff( } } +/** + * Deeply merges multiple values together. Objects are merged key-by-key recursively. + * Arrays are merged by appending items that are not already present (by deep equality). + * Primitives are resolved by taking the last argument. + * + * @param args - The values to merge, applied left to right + * @returns The merged result + */ export function deepMerge(...args: unknown[]): unknown { const lastItem = (args as any)[args.length - 1] if (typeof lastItem !== 'object' || !lastItem) return lastItem diff --git a/sdk/base/lib/util/getDefaultString.ts b/sdk/base/lib/util/getDefaultString.ts index 7468c1e00..831292a09 100644 --- a/sdk/base/lib/util/getDefaultString.ts +++ b/sdk/base/lib/util/getDefaultString.ts @@ -1,6 +1,14 @@ import { DefaultString } from '../actions/input/inputSpecTypes' import { getRandomString } from './getRandomString' +/** + * Resolves a DefaultString spec into a concrete string value. + * If the spec is a plain string, returns it directly. + * If it is a random-string specification, generates a random string accordingly. + * + * @param defaultSpec - A string literal or a random-string generation spec + * @returns The resolved default string value + */ export function getDefaultString(defaultSpec: DefaultString): string { if (typeof defaultSpec === 'string') { return defaultSpec diff --git a/sdk/base/lib/util/getServiceInterface.ts b/sdk/base/lib/util/getServiceInterface.ts index 17186b7d7..60fb2a745 100644 --- a/sdk/base/lib/util/getServiceInterface.ts +++ b/sdk/base/lib/util/getServiceInterface.ts @@ -1,7 +1,14 @@ import { PackageId, ServiceInterfaceId, ServiceInterfaceType } from '../types' import { knownProtocols } from '../interfaces/Host' -import { AddressInfo, Host, Hostname, HostnameInfo } from '../types' +import { + AddressInfo, + DerivedAddressInfo, + Host, + Hostname, + HostnameInfo, +} from '../types' import { Effects } from '../Effects' +import { AbortedError } from './AbortedError' import { DropGenerator, DropPromise } from './Drop' import { IpAddress, IPV6_LINK_LOCAL } from './ip' import { deepEqual } from './deepEqual' @@ -20,7 +27,6 @@ export const getHostname = (url: string): Hostname | null => { } type FilterKinds = - | 'onion' | 'mdns' | 'domain' | 'ip' @@ -42,27 +48,26 @@ type VisibilityFilter = V extends 'public' | (HostnameInfo & { public: false }) | VisibilityFilter> : never -type KindFilter = K extends 'onion' - ? (HostnameInfo & { kind: 'onion' }) | KindFilter> - : K extends 'mdns' +type KindFilter = K extends 'mdns' + ? + | (HostnameInfo & { metadata: { kind: 'mdns' } }) + | KindFilter> + : K extends 'domain' ? - | (HostnameInfo & { kind: 'ip'; hostname: { kind: 'local' } }) - | KindFilter> - : K extends 'domain' + | (HostnameInfo & { metadata: { kind: 'private-domain' } }) + | (HostnameInfo & { metadata: { kind: 'public-domain' } }) + | KindFilter> + : K extends 'ipv4' ? - | (HostnameInfo & { kind: 'ip'; hostname: { kind: 'domain' } }) - | KindFilter> - : K extends 'ipv4' + | (HostnameInfo & { metadata: { kind: 'ipv4' } }) + | KindFilter> + : K extends 'ipv6' ? - | (HostnameInfo & { kind: 'ip'; hostname: { kind: 'ipv4' } }) - | KindFilter> - : K extends 'ipv6' - ? - | (HostnameInfo & { kind: 'ip'; hostname: { kind: 'ipv6' } }) - | KindFilter> - : K extends 'ip' - ? KindFilter | 'ipv4' | 'ipv6'> - : never + | (HostnameInfo & { metadata: { kind: 'ipv6' } }) + | KindFilter> + : K extends 'ip' + ? KindFilter | 'ipv4' | 'ipv6'> + : never type FilterReturnTy = F extends { visibility: infer V extends 'public' | 'private' @@ -90,10 +95,6 @@ const nonLocalFilter = { const publicFilter = { visibility: 'public', } as const -const onionFilter = { - kind: 'onion', -} as const - type Formats = 'hostname-info' | 'urlstring' | 'url' type FormatReturnTy< F extends Filter, @@ -109,10 +110,7 @@ type FormatReturnTy< export type Filled = { hostnames: HostnameInfo[] - toUrls: (h: HostnameInfo) => { - url: UrlString | null - sslUrl: UrlString | null - } + toUrl: (h: HostnameInfo) => UrlString format: ( format?: Format, @@ -124,7 +122,6 @@ export type Filled = { nonLocal: Filled public: Filled - onion: Filled } export type FilledAddressInfo = AddressInfo & Filled export type ServiceInterfaceFilled = { @@ -154,41 +151,29 @@ const unique = (values: A[]) => Array.from(new Set(values)) export const addressHostToUrl = ( { scheme, sslScheme, username, suffix }: AddressInfo, hostname: HostnameInfo, -): { url: UrlString | null; sslUrl: UrlString | null } => { - const res = [] - const fmt = (scheme: string | null, host: HostnameInfo, port: number) => { +): UrlString => { + const effectiveScheme = hostname.ssl ? sslScheme : scheme + let host: string + if (hostname.metadata.kind === 'ipv6') { + host = IPV6_LINK_LOCAL.contains(hostname.hostname) + ? `[${hostname.hostname}%${hostname.metadata.scopeId}]` + : `[${hostname.hostname}]` + } else { + host = hostname.hostname + } + let portStr = '' + if (hostname.port !== null) { const excludePort = - scheme && - scheme in knownProtocols && - port === knownProtocols[scheme as keyof typeof knownProtocols].defaultPort - let hostname - if (host.kind === 'onion') { - hostname = host.hostname.value - } else if (host.kind === 'ip') { - if (host.hostname.kind === 'domain') { - hostname = host.hostname.value - } else if (host.hostname.kind === 'ipv6') { - hostname = IPV6_LINK_LOCAL.contains(host.hostname.value) - ? `[${host.hostname.value}%${host.hostname.scopeId}]` - : `[${host.hostname.value}]` - } else { - hostname = host.hostname.value - } - } - return `${scheme ? `${scheme}://` : ''}${ - username ? `${username}@` : '' - }${hostname}${excludePort ? '' : `:${port}`}${suffix}` + effectiveScheme && + effectiveScheme in knownProtocols && + hostname.port === + knownProtocols[effectiveScheme as keyof typeof knownProtocols] + .defaultPort + if (!excludePort) portStr = `:${hostname.port}` } - let url = null - if (hostname.hostname.port !== null) { - url = fmt(scheme, hostname, hostname.hostname.port) - } - let sslUrl = null - if (hostname.hostname.sslPort !== null) { - sslUrl = fmt(sslScheme, hostname, hostname.hostname.sslPort) - } - - return { url, sslUrl } + return `${effectiveScheme ? `${effectiveScheme}://` : ''}${ + username ? `${username}@` : '' + }${host}${portStr}${suffix}` } function filterRec( @@ -201,13 +186,9 @@ function filterRec( hostnames = hostnames.filter((h) => invert !== pred(h)) } if (filter.visibility === 'public') - hostnames = hostnames.filter( - (h) => invert !== (h.kind === 'onion' || h.public), - ) + hostnames = hostnames.filter((h) => invert !== h.public) if (filter.visibility === 'private') - hostnames = hostnames.filter( - (h) => invert !== (h.kind !== 'onion' && !h.public), - ) + hostnames = hostnames.filter((h) => invert !== !h.public) if (filter.kind) { const kind = new Set( Array.isArray(filter.kind) ? filter.kind : [filter.kind], @@ -219,21 +200,17 @@ function filterRec( hostnames = hostnames.filter( (h) => invert !== - ((kind.has('onion') && h.kind === 'onion') || - (kind.has('mdns') && - h.kind === 'ip' && - h.hostname.kind === 'local') || + ((kind.has('mdns') && h.metadata.kind === 'mdns') || (kind.has('domain') && - h.kind === 'ip' && - h.hostname.kind === 'domain') || - (kind.has('ipv4') && h.kind === 'ip' && h.hostname.kind === 'ipv4') || - (kind.has('ipv6') && h.kind === 'ip' && h.hostname.kind === 'ipv6') || + (h.metadata.kind === 'private-domain' || + h.metadata.kind === 'public-domain')) || + (kind.has('ipv4') && h.metadata.kind === 'ipv4') || + (kind.has('ipv6') && h.metadata.kind === 'ipv6') || (kind.has('localhost') && - ['localhost', '127.0.0.1', '::1'].includes(h.hostname.value)) || + ['localhost', '127.0.0.1', '::1'].includes(h.hostname)) || (kind.has('link-local') && - h.kind === 'ip' && - h.hostname.kind === 'ipv6' && - IPV6_LINK_LOCAL.contains(IpAddress.parse(h.hostname.value)))), + h.metadata.kind === 'ipv6' && + IPV6_LINK_LOCAL.contains(IpAddress.parse(h.hostname)))), ) } @@ -242,16 +219,36 @@ function filterRec( return hostnames } +function isPublicIp(h: HostnameInfo): boolean { + return h.public && (h.metadata.kind === 'ipv4' || h.metadata.kind === 'ipv6') +} + +function enabledAddresses(addr: DerivedAddressInfo): HostnameInfo[] { + return addr.available.filter((h) => { + if (isPublicIp(h)) { + // Public IPs: disabled by default, explicitly enabled via SocketAddr string + if (h.port === null) return true + const sa = + h.metadata.kind === 'ipv6' + ? `[${h.hostname}]:${h.port}` + : `${h.hostname}:${h.port}` + return addr.enabled.includes(sa) + } else { + // Everything else: enabled by default, explicitly disabled via [hostname, port] tuple + return !addr.disabled.some( + ([hostname, port]) => hostname === h.hostname && port === (h.port ?? 0), + ) + } + }) +} + export const filledAddress = ( host: Host, addressInfo: AddressInfo, ): FilledAddressInfo => { - const toUrls = addressHostToUrl.bind(null, addressInfo) - const toUrlArray = (h: HostnameInfo) => { - const u = toUrls(h) - return [u.url, u.sslUrl].filter((u) => u !== null) - } - const hostnames = host.hostnameInfo[addressInfo.internalPort] ?? [] + const toUrl = addressHostToUrl.bind(null, addressInfo) + const binding = host.bindings[addressInfo.internalPort] + const hostnames = binding ? enabledAddresses(binding.addresses) : [] function filledAddressFromHostnames( hostnames: HostnameInfo[], @@ -266,19 +263,14 @@ export const filledAddress = ( filterRec(hostnames, publicFilter, false), ), ) - const getOnion = once(() => - filledAddressFromHostnames( - filterRec(hostnames, onionFilter, false), - ), - ) return { ...addressInfo, hostnames, - toUrls, + toUrl, format: (format?: Format) => { let res: FormatReturnTy<{}, Format>[] = hostnames as any if (format === 'hostname-info') return res - const urls = hostnames.flatMap(toUrlArray) + const urls = hostnames.map(toUrl) if (format === 'url') res = urls.map((u) => new URL(u)) as any else res = urls as any return res @@ -294,9 +286,6 @@ export const filledAddress = ( get public(): Filled { return getPublic() }, - get onion(): Filled { - return getOnion() - }, } } @@ -406,7 +395,7 @@ export class GetServiceInterface { } await waitForNext } - return new Promise((_, rej) => rej(new Error('aborted'))) + return new Promise((_, rej) => rej(new AbortedError())) } /** diff --git a/sdk/base/lib/util/getServiceInterfaces.ts b/sdk/base/lib/util/getServiceInterfaces.ts index ea11d7c65..e6a745d56 100644 --- a/sdk/base/lib/util/getServiceInterfaces.ts +++ b/sdk/base/lib/util/getServiceInterfaces.ts @@ -1,5 +1,6 @@ import { Effects } from '../Effects' import { PackageId } from '../osBindings' +import { AbortedError } from './AbortedError' import { deepEqual } from './deepEqual' import { DropGenerator, DropPromise } from './Drop' import { ServiceInterfaceFilled, filledAddress } from './getServiceInterface' @@ -105,7 +106,7 @@ export class GetServiceInterfaces { } await waitForNext } - return new Promise((_, rej) => rej(new Error('aborted'))) + return new Promise((_, rej) => rej(new AbortedError())) } /** diff --git a/sdk/base/lib/util/graph.ts b/sdk/base/lib/util/graph.ts index f2ba5ff09..7d2bda0d7 100644 --- a/sdk/base/lib/util/graph.ts +++ b/sdk/base/lib/util/graph.ts @@ -1,20 +1,41 @@ -import { boolean } from 'ts-matches' import { ExtendedVersion } from '../exver' +/** + * A vertex (node) in a directed graph, holding metadata and a list of connected edges. + * @typeParam VMetadata - The type of metadata stored on vertices + * @typeParam EMetadata - The type of metadata stored on edges + */ export type Vertex = { metadata: VMetadata edges: Array> } +/** + * A directed edge connecting two vertices, with its own metadata. + * @typeParam EMetadata - The type of metadata stored on edges + * @typeParam VMetadata - The type of metadata stored on the connected vertices + */ export type Edge = { metadata: EMetadata from: Vertex to: Vertex } +/** + * A directed graph data structure supporting vertex/edge management and graph traversal algorithms + * including breadth-first search, reverse BFS, and shortest path computation. + * + * @typeParam VMetadata - The type of metadata stored on vertices + * @typeParam EMetadata - The type of metadata stored on edges + */ export class Graph { private readonly vertices: Array> = [] constructor() {} + /** + * Serializes the graph to a JSON string for debugging. + * @param metadataRepr - Optional function to transform metadata values before serialization + * @returns A pretty-printed JSON string of the graph structure + */ dump( metadataRepr: (metadata: VMetadata | EMetadata) => any = (a) => a, ): string { @@ -31,6 +52,13 @@ export class Graph { 2, ) } + /** + * Adds a new vertex to the graph, optionally connecting it to existing vertices via edges. + * @param metadata - The metadata to attach to the new vertex + * @param fromEdges - Edges pointing from existing vertices to this new vertex + * @param toEdges - Edges pointing from this new vertex to existing vertices + * @returns The newly created vertex + */ addVertex( metadata: VMetadata, fromEdges: Array, 'to'>>, @@ -61,6 +89,11 @@ export class Graph { this.vertices.push(vertex) return vertex } + /** + * Returns a generator that yields all vertices matching the predicate. + * @param predicate - A function to test each vertex + * @returns A generator of matching vertices + */ findVertex( predicate: (vertex: Vertex) => boolean, ): Generator, null> { @@ -75,6 +108,13 @@ export class Graph { } return gen() } + /** + * Adds a directed edge between two existing vertices. + * @param metadata - The metadata to attach to the edge + * @param from - The source vertex + * @param to - The destination vertex + * @returns The newly created edge + */ addEdge( metadata: EMetadata, from: Vertex, @@ -89,6 +129,11 @@ export class Graph { edge.to.edges.push(edge) return edge } + /** + * Performs a breadth-first traversal following outgoing edges from the starting vertex or vertices. + * @param from - A starting vertex, or a predicate to select multiple starting vertices + * @returns A generator yielding vertices in BFS order + */ breadthFirstSearch( from: | Vertex @@ -140,6 +185,11 @@ export class Graph { return rec(from) } } + /** + * Performs a reverse breadth-first traversal following incoming edges from the starting vertex or vertices. + * @param to - A starting vertex, or a predicate to select multiple starting vertices + * @returns A generator yielding vertices in reverse BFS order + */ reverseBreadthFirstSearch( to: | Vertex @@ -191,6 +241,12 @@ export class Graph { return rec(to) } } + /** + * Finds the shortest path (by edge count) between two vertices using BFS. + * @param from - The starting vertex, or a predicate to select starting vertices + * @param to - The target vertex, or a predicate to identify target vertices + * @returns An array of edges forming the shortest path, or `null` if no path exists + */ shortestPath( from: | Vertex diff --git a/sdk/base/lib/util/inMs.ts b/sdk/base/lib/util/inMs.ts index bf03f78d4..e7a26e509 100644 --- a/sdk/base/lib/util/inMs.ts +++ b/sdk/base/lib/util/inMs.ts @@ -15,6 +15,21 @@ const digitsMs = (digits: string | null, multiplier: number) => { const divideBy = multiplier / Math.pow(10, digits.length - 1) return Math.round(value * divideBy) } +/** + * Converts a human-readable time string to milliseconds. + * Supports units: `ms`, `s`, `m`, `h`, `d`. If a number is passed, it is returned as-is. + * + * @param time - A time string (e.g. `"500ms"`, `"1.5s"`, `"2h"`) or a numeric millisecond value + * @returns The time in milliseconds, or `undefined` if `time` is falsy + * @throws Error if the string format is invalid + * + * @example + * ```ts + * inMs("2s") // 2000 + * inMs("1.5h") // 5400000 + * inMs(500) // 500 + * ``` + */ export const inMs = (time?: string | number) => { if (typeof time === 'number') return time if (!time) return undefined diff --git a/sdk/base/lib/util/index.ts b/sdk/base/lib/util/index.ts index 303cf5f73..bad134501 100644 --- a/sdk/base/lib/util/index.ts +++ b/sdk/base/lib/util/index.ts @@ -14,6 +14,7 @@ export { once } from './once' export { asError } from './asError' export * as Patterns from './patterns' export * from './typeHelpers' +export { GetOutboundGateway } from './GetOutboundGateway' export { GetSystemSmtp } from './GetSystemSmtp' export { Graph, Vertex } from './graph' export { inMs } from './inMs' @@ -21,5 +22,6 @@ export { splitCommand } from './splitCommand' export { nullIfEmpty } from './nullIfEmpty' export { deepMerge, partialDiff } from './deepMerge' export { deepEqual } from './deepEqual' +export { AbortedError } from './AbortedError' export * as regexes from './regexes' export { stringFromStdErrOut } from './stringFromStdErrOut' diff --git a/sdk/base/lib/util/ip.ts b/sdk/base/lib/util/ip.ts index 894d1f08b..ae152e4b6 100644 --- a/sdk/base/lib/util/ip.ts +++ b/sdk/base/lib/util/ip.ts @@ -1,3 +1,14 @@ +/** + * Represents an IPv4 or IPv6 address as raw octets with arithmetic and comparison operations. + * + * IPv4 addresses have 4 octets, IPv6 addresses have 16 octets. + * + * @example + * ```ts + * const ip = IpAddress.parse("192.168.1.1") + * const next = ip.add(1) // 192.168.1.2 + * ``` + */ export class IpAddress { private renderedOctets: number[] protected constructor( @@ -6,6 +17,13 @@ export class IpAddress { ) { this.renderedOctets = [...octets] } + /** + * Parses an IP address string into an IpAddress instance. + * Supports both IPv4 dotted-decimal and IPv6 colon-hex notation (including `::` shorthand). + * @param address - The IP address string to parse + * @returns A new IpAddress instance + * @throws Error if the address format is invalid + */ static parse(address: string): IpAddress { let octets if (address.includes(':')) { @@ -39,6 +57,12 @@ export class IpAddress { } return new IpAddress(octets, address) } + /** + * Creates an IpAddress from a raw octet array. + * @param octets - Array of 4 octets (IPv4) or 16 octets (IPv6), each 0-255 + * @returns A new IpAddress instance + * @throws Error if the octet array length is not 4 or 16, or any octet exceeds 255 + */ static fromOctets(octets: number[]) { if (octets.length == 4) { if (octets.some((o) => o > 255)) { @@ -66,15 +90,24 @@ export class IpAddress { throw new Error('invalid ip address') } } + /** Returns true if this is an IPv4 address (4 octets). */ isIpv4(): boolean { return this.octets.length === 4 } + /** Returns true if this is an IPv6 address (16 octets). */ isIpv6(): boolean { return this.octets.length === 16 } + /** Returns true if this is a public IPv4 address (not in any private range). */ isPublic(): boolean { return this.isIpv4() && !PRIVATE_IPV4_RANGES.some((r) => r.contains(this)) } + /** + * Returns a new IpAddress incremented by `n`. + * @param n - The integer amount to add (fractional part is truncated) + * @returns A new IpAddress with the result + * @throws Error on overflow + */ add(n: number): IpAddress { let octets = [...this.octets] n = Math.floor(n) @@ -92,6 +125,12 @@ export class IpAddress { } return IpAddress.fromOctets(octets) } + /** + * Returns a new IpAddress decremented by `n`. + * @param n - The integer amount to subtract (fractional part is truncated) + * @returns A new IpAddress with the result + * @throws Error on underflow + */ sub(n: number): IpAddress { let octets = [...this.octets] n = Math.floor(n) @@ -109,6 +148,11 @@ export class IpAddress { } return IpAddress.fromOctets(octets) } + /** + * Compares this address to another, returning -1, 0, or 1. + * @param other - An IpAddress instance or string to compare against + * @returns -1 if this < other, 0 if equal, 1 if this > other + */ cmp(other: string | IpAddress): -1 | 0 | 1 { if (typeof other === 'string') other = IpAddress.parse(other) const len = Math.max(this.octets.length, other.octets.length) @@ -123,6 +167,7 @@ export class IpAddress { } return 0 } + /** The string representation of this IP address (e.g. `"192.168.1.1"` or `"::1"`). Cached and recomputed only when octets change. */ get address(): string { if ( this.renderedOctets.length === this.octets.length && @@ -160,6 +205,17 @@ export class IpAddress { } } +/** + * Represents an IP network (CIDR notation) combining an IP address with a prefix length. + * Extends IpAddress with network-specific operations like containment checks and broadcast calculation. + * + * @example + * ```ts + * const net = IpNet.parse("192.168.1.0/24") + * net.contains("192.168.1.100") // true + * net.broadcast() // 192.168.1.255 + * ``` + */ export class IpNet extends IpAddress { private constructor( octets: number[], @@ -168,18 +224,35 @@ export class IpNet extends IpAddress { ) { super(octets, address) } + /** + * Creates an IpNet from an IpAddress and prefix length. + * @param ip - The base IP address + * @param prefix - The CIDR prefix length (0-32 for IPv4, 0-128 for IPv6) + * @returns A new IpNet instance + * @throws Error if prefix exceeds the address bit length + */ static fromIpPrefix(ip: IpAddress, prefix: number): IpNet { if (prefix > ip.octets.length * 8) { throw new Error('invalid prefix') } return new IpNet(ip.octets, prefix, ip.address) } + /** + * Parses a CIDR notation string (e.g. `"192.168.1.0/24"`) into an IpNet. + * @param ipnet - The CIDR string to parse + * @returns A new IpNet instance + */ static parse(ipnet: string): IpNet { const [address, prefixStr] = ipnet.split('/', 2) const ip = IpAddress.parse(address) const prefix = Number(prefixStr) return IpNet.fromIpPrefix(ip, prefix) } + /** + * Checks whether this network contains the given address or subnet. + * @param address - An IP address or subnet (string, IpAddress, or IpNet) + * @returns True if the address falls within this network's range + */ contains(address: string | IpAddress | IpNet): boolean { if (typeof address === 'string') address = IpAddress.parse(address) if (address instanceof IpNet && address.prefix < this.prefix) return false @@ -197,6 +270,7 @@ export class IpNet extends IpAddress { const mask = 255 ^ (255 >> prefix) return (this.octets[idx] & mask) === (address.octets[idx] & mask) } + /** Returns the network address (all host bits zeroed) for this subnet. */ zero(): IpAddress { let octets: number[] = [] let prefix = this.prefix @@ -213,6 +287,7 @@ export class IpNet extends IpAddress { return IpAddress.fromOctets(octets) } + /** Returns the broadcast address (all host bits set to 1) for this subnet. */ broadcast(): IpAddress { let octets: number[] = [] let prefix = this.prefix @@ -229,11 +304,13 @@ export class IpNet extends IpAddress { return IpAddress.fromOctets(octets) } + /** The CIDR notation string for this network (e.g. `"192.168.1.0/24"`). */ get ipnet() { return `${this.address}/${this.prefix}` } } +/** All private IPv4 ranges: loopback (127.0.0.0/8), Class A (10.0.0.0/8), Class B (172.16.0.0/12), Class C (192.168.0.0/16). */ export const PRIVATE_IPV4_RANGES = [ IpNet.parse('127.0.0.0/8'), IpNet.parse('10.0.0.0/8'), @@ -241,8 +318,12 @@ export const PRIVATE_IPV4_RANGES = [ IpNet.parse('192.168.0.0/16'), ] +/** IPv4 loopback network (127.0.0.0/8). */ export const IPV4_LOOPBACK = IpNet.parse('127.0.0.0/8') +/** IPv6 loopback address (::1/128). */ export const IPV6_LOOPBACK = IpNet.parse('::1/128') +/** IPv6 link-local network (fe80::/10). */ export const IPV6_LINK_LOCAL = IpNet.parse('fe80::/10') +/** Carrier-Grade NAT (CGNAT) address range (100.64.0.0/10), per RFC 6598. */ export const CGNAT = IpNet.parse('100.64.0.0/10') diff --git a/sdk/base/lib/util/once.ts b/sdk/base/lib/util/once.ts index 5f689b0e1..98c2d91df 100644 --- a/sdk/base/lib/util/once.ts +++ b/sdk/base/lib/util/once.ts @@ -1,3 +1,16 @@ +/** + * Wraps a function so it is only executed once. Subsequent calls return the cached result. + * + * @param fn - The function to execute at most once + * @returns A wrapper that lazily evaluates `fn` on first call and caches the result + * + * @example + * ```ts + * const getConfig = once(() => loadExpensiveConfig()) + * getConfig() // loads config + * getConfig() // returns cached result + * ``` + */ export function once(fn: () => B): () => B { let result: [B] | [] = [] return () => { diff --git a/sdk/base/lib/util/patterns.ts b/sdk/base/lib/util/patterns.ts index 59f7a863b..c55a36797 100644 --- a/sdk/base/lib/util/patterns.ts +++ b/sdk/base/lib/util/patterns.ts @@ -1,67 +1,68 @@ import { Pattern } from '../actions/input/inputSpecTypes' import * as regexes from './regexes' +/** Pattern for validating IPv6 addresses. */ export const ipv6: Pattern = { regex: regexes.ipv6.matches(), description: 'Must be a valid IPv6 address', } +/** Pattern for validating IPv4 addresses. */ export const ipv4: Pattern = { regex: regexes.ipv4.matches(), description: 'Must be a valid IPv4 address', } +/** Pattern for validating hostnames (RFC-compliant). */ export const hostname: Pattern = { regex: regexes.hostname.matches(), description: 'Must be a valid hostname', } +/** Pattern for validating `.local` mDNS hostnames. */ export const localHostname: Pattern = { regex: regexes.localHostname.matches(), description: 'Must be a valid ".local" hostname', } -export const torHostname: Pattern = { - regex: regexes.torHostname.matches(), - description: 'Must be a valid Tor (".onion") hostname', -} - +/** Pattern for validating HTTP/HTTPS URLs. */ export const url: Pattern = { regex: regexes.url.matches(), description: 'Must be a valid URL', } +/** Pattern for validating `.local` URLs (mDNS/LAN). */ export const localUrl: Pattern = { regex: regexes.localUrl.matches(), description: 'Must be a valid ".local" URL', } -export const torUrl: Pattern = { - regex: regexes.torUrl.matches(), - description: 'Must be a valid Tor (".onion") URL', -} - +/** Pattern for validating ASCII-only strings (printable characters). */ export const ascii: Pattern = { regex: regexes.ascii.matches(), description: 'May only contain ASCII characters. See https://www.w3schools.com/charsets/ref_html_ascii.asp', } +/** Pattern for validating fully qualified domain names (FQDNs). */ export const domain: Pattern = { regex: regexes.domain.matches(), description: 'Must be a valid Fully Qualified Domain Name', } +/** Pattern for validating email addresses. */ export const email: Pattern = { regex: regexes.email.matches(), description: 'Must be a valid email address', } +/** Pattern for validating email addresses, optionally with a display name (e.g. `"John Doe "`). */ export const emailWithName: Pattern = { regex: regexes.emailWithName.matches(), description: 'Must be a valid email address, optionally with a name', } +/** Pattern for validating base64-encoded strings. */ export const base64: Pattern = { regex: regexes.base64.matches(), description: diff --git a/sdk/base/lib/util/regexes.ts b/sdk/base/lib/util/regexes.ts index 65213a7b3..3fa372d8c 100644 --- a/sdk/base/lib/util/regexes.ts +++ b/sdk/base/lib/util/regexes.ts @@ -1,3 +1,16 @@ +/** + * A wrapper around RegExp that supports composition into larger patterns. + * Provides helpers to produce anchored (full-match), grouped (sub-expression), + * and unanchored (contains) regex source strings. + * + * @example + * ```ts + * const digit = new ComposableRegex(/\d+/) + * digit.matches() // "^\\d+$" + * digit.contains() // "\\d+" + * digit.asExpr() // "(\\d+)" + * ``` + */ export class ComposableRegex { readonly regex: RegExp constructor(regex: RegExp | string) { @@ -7,77 +20,94 @@ export class ComposableRegex { this.regex = new RegExp(regex) } } + /** Returns the regex source wrapped in a capturing group, suitable for embedding in a larger expression. */ asExpr(): string { return `(${this.regex.source})` } + /** Returns the regex source anchored with `^...$` for full-string matching. */ matches(): string { return `^${this.regex.source}$` } + /** Returns the raw regex source string for substring/containment matching. */ contains(): string { return this.regex.source } } +/** + * Escapes all regex special characters in a string so it can be used as a literal in a RegExp. + * @param str - The string to escape + * @returns The escaped string safe for regex interpolation + */ export const escapeLiteral = (str: string) => str.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') +/** Composable regex for matching IPv6 addresses (all standard forms including `::` shorthand). */ // https://ihateregex.io/expr/ipv6/ export const ipv6 = new ComposableRegex( /(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))/, ) +/** Composable regex for matching IPv4 addresses in dotted-decimal notation. */ // https://ihateregex.io/expr/ipv4/ export const ipv4 = new ComposableRegex( /(\b25[0-5]|\b2[0-4][0-9]|\b[01]?[0-9][0-9]?)(\.(25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}/, ) +/** Composable regex for matching RFC-compliant hostnames. */ export const hostname = new ComposableRegex( /(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])/, ) +/** Composable regex for matching `.local` mDNS hostnames. */ export const localHostname = new ComposableRegex( /[-a-zA-Z0-9@:%._\+~#=]{1,256}\.local/, ) -export const torHostname = new ComposableRegex( - /[-a-zA-Z0-9@:%._\+~#=]{1,256}\.onion/, -) - +/** Composable regex for matching HTTP/HTTPS URLs. */ // https://ihateregex.io/expr/url/ export const url = new ComposableRegex( /https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.[a-zA-Z0-9()]{1,6}\b([-a-zA-Z0-9()!@:%_\+.~#?&\/\/=]*)/, ) +/** Composable regex for matching `.local` URLs (mDNS/LAN). */ export const localUrl = new ComposableRegex( /https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.local\b([-a-zA-Z0-9()!@:%_\+.~#?&\/\/=]*)/, ) -export const torUrl = new ComposableRegex( - /https?:\/\/(www\.)?[-a-zA-Z0-9@:%._\+~#=]{1,256}\.onion\b([-a-zA-Z0-9()!@:%_\+.~#?&\/\/=]*)/, -) - +/** Composable regex for matching printable ASCII characters (space through tilde). */ // https://ihateregex.io/expr/ascii/ export const ascii = new ComposableRegex(/[ -~]*/) +/** Composable regex for matching fully qualified domain names. */ export const domain = new ComposableRegex(/[A-Za-z0-9.-]+\.[A-Za-z]{2,}/) +/** Composable regex for matching email addresses. */ // https://www.regular-expressions.info/email.html export const email = new ComposableRegex(`[A-Za-z0-9._%+-]+@${domain.asExpr()}`) +/** Composable regex for matching email addresses optionally preceded by a display name (e.g. `"Name "`). */ export const emailWithName = new ComposableRegex( `${email.asExpr()}|([^<]*<${email.asExpr()}>)`, ) +/** Composable regex for matching base64-encoded strings (no whitespace). */ //https://rgxdb.com/r/1NUN74O6 export const base64 = new ComposableRegex( /(?:[a-zA-Z0-9+\/]{4})*(?:|(?:[a-zA-Z0-9+\/]{3}=)|(?:[a-zA-Z0-9+\/]{2}==)|(?:[a-zA-Z0-9+\/]{1}===))/, ) +/** Composable regex for matching base64-encoded strings that may contain interspersed whitespace. */ //https://rgxdb.com/r/1NUN74O6 export const base64Whitespace = new ComposableRegex( /(?:([a-zA-Z0-9+\/]\s*){4})*(?:|(?:([a-zA-Z0-9+\/]\s*){3}=)|(?:([a-zA-Z0-9+\/]\s*){2}==)|(?:([a-zA-Z0-9+\/]\s*){1}===))/, ) +/** + * Creates a composable regex for matching PEM-encoded blocks with the given label. + * @param label - The PEM label (e.g. `"CERTIFICATE"`, `"RSA PRIVATE KEY"`) + * @returns A ComposableRegex matching `-----BEGIN = { [affine]: A } type NeverPossible = { [affine]: string } +/** + * Evaluates to `never` if `A` is `any`, otherwise resolves to `A`. + * Useful for preventing `any` from silently propagating through generic constraints. + */ export type NoAny = NeverPossible extends A ? keyof NeverPossible extends keyof A ? never @@ -54,6 +80,14 @@ type Numbers = '0' | '1' | '2' | '3' | '4' | '5' | '6' | '7' | '8' | '9' type CapitalChars = CapitalLetters | Numbers +/** + * Converts a PascalCase or camelCase string type to kebab-case at the type level. + * + * @example + * ```ts + * type Result = ToKebab<"FooBar"> // "foo-bar" + * ``` + */ export type ToKebab = S extends string ? S extends `${infer Head}${CapitalChars}${infer Tail}` // string has a capital char somewhere ? Head extends '' // there is a capital char in the first position @@ -101,6 +135,7 @@ export type ToKebab = S extends string : S /* 'abc' */ : never +/** A generic object type with string keys and unknown values. */ export type StringObject = Record function test() { diff --git a/sdk/base/lib/zExport.d.ts b/sdk/base/lib/zExport.d.ts new file mode 100644 index 000000000..995cf7c9c --- /dev/null +++ b/sdk/base/lib/zExport.d.ts @@ -0,0 +1,14 @@ +import { z as _z } from 'zod' +import type { DeepPartial } from './types' + +type ZodDeepPartial = (a: _z.ZodType) => _z.ZodType> +type ZodDeepLoose = (a: _z.ZodType) => _z.ZodType + +declare module 'zod' { + namespace z { + const deepPartial: ZodDeepPartial + const deepLoose: ZodDeepLoose + } +} + +export { _z as z } diff --git a/sdk/base/lib/zExport.js b/sdk/base/lib/zExport.js new file mode 100644 index 000000000..c99abe324 --- /dev/null +++ b/sdk/base/lib/zExport.js @@ -0,0 +1,92 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); + +const zod_1 = require("zod"); +const zod_deep_partial_1 = require("zod-deep-partial"); + +// Recursively make all ZodObjects in a schema loose (preserve extra keys at every nesting level). +// Uses _zod.def.type duck-typing instead of instanceof to avoid issues with mismatched zod versions. +function deepLoose(schema) { + const def = schema._zod?.def; + if (!def) return schema; + let result; + switch (def.type) { + case "optional": + result = deepLoose(def.innerType).optional(); + break; + case "nullable": + result = deepLoose(def.innerType).nullable(); + break; + case "object": { + const newShape = {}; + for (const key in schema.shape) { + newShape[key] = deepLoose(schema.shape[key]); + } + result = zod_1.z.looseObject(newShape); + break; + } + case "array": + result = zod_1.z.array(deepLoose(def.element)); + break; + case "union": + result = zod_1.z.union(def.options.map((o) => deepLoose(o))); + break; + case "intersection": + result = zod_1.z.intersection(deepLoose(def.left), deepLoose(def.right)); + break; + case "record": + result = zod_1.z.record(def.keyType, deepLoose(def.valueType)); + break; + case "tuple": + result = zod_1.z.tuple(def.items.map((i) => deepLoose(i))); + break; + case "lazy": + result = zod_1.z.lazy(() => deepLoose(def.getter())); + break; + default: + return schema; + } + return result; +} + +// Add deepPartial and deepLoose to z at runtime +zod_1.z.deepPartial = (a) => + deepLoose((0, zod_deep_partial_1.zodDeepPartial)(a)); +zod_1.z.deepLoose = deepLoose; + +// Override z.object to produce loose objects by default (extra keys are preserved, not stripped). +const _origObject = zod_1.z.object; +const _patchedObject = (...args) => _origObject(...args).loose(); + +// In CJS (Node.js), patch the source module in require.cache where 'object' is a writable property; +// the CJS getter chain (index → external → schemas) then relays the patched version. +// We walk only the zod entry module's dependency tree and match by identity (=== origObject). +try { + const _zodModule = require.cache[require.resolve("zod")]; + for (const child of _zodModule?.children ?? []) { + for (const grandchild of child.children ?? []) { + const desc = Object.getOwnPropertyDescriptor( + grandchild.exports, + "object", + ); + if (desc?.value === _origObject && desc.writable) { + grandchild.exports.object = _patchedObject; + } + } + } +} catch (_) { + // Not in CJS/Node environment (e.g. browser) — require.cache unavailable +} + +// z.object is a non-configurable getter on the zod namespace, so we can't override it directly. +// Shadow it by exporting a new object with _z as prototype and our patched object on the instance. +const z = Object.create(zod_1.z, { + object: { + value: _patchedObject, + writable: true, + configurable: true, + enumerable: true, + }, +}); + +exports.z = z; diff --git a/sdk/base/package-lock.json b/sdk/base/package-lock.json index f6949c84f..e96432bed 100644 --- a/sdk/base/package-lock.json +++ b/sdk/base/package-lock.json @@ -13,8 +13,9 @@ "deep-equality-data-structures": "^1.5.0", "isomorphic-fetch": "^3.0.0", "mime": "^4.0.7", - "ts-matches": "^6.3.2", - "yaml": "^2.7.1" + "yaml": "^2.7.1", + "zod": "^4.3.6", + "zod-deep-partial": "^1.2.0" }, "devDependencies": { "@types/jest": "^29.4.0", @@ -4626,12 +4627,6 @@ "node": ">=10" } }, - "node_modules/ts-matches": { - "version": "6.3.2", - "resolved": "https://registry.npmjs.org/ts-matches/-/ts-matches-6.3.2.tgz", - "integrity": "sha512-UhSgJymF8cLd4y0vV29qlKVCkQpUtekAaujXbQVc729FezS8HwqzepqvtjzQ3HboatIqN/Idor85O2RMwT7lIQ==", - "license": "MIT" - }, "node_modules/ts-morph": { "version": "18.0.0", "resolved": "https://registry.npmjs.org/ts-morph/-/ts-morph-18.0.0.tgz", @@ -5006,6 +5001,25 @@ "funding": { "url": "https://github.com/sponsors/sindresorhus" } + }, + "node_modules/zod": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/zod/-/zod-4.3.6.tgz", + "integrity": "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==", + "license": "MIT", + "peer": true, + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zod-deep-partial": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/zod-deep-partial/-/zod-deep-partial-1.2.0.tgz", + "integrity": "sha512-dXfte+/YN0aFYs0kMGz6xfPQWEYNaKz/LsbfxrbwL+oY3l/aR9HOBTyWCpHZ5AJXMGWKSq+0X0oVPpRliUFcjQ==", + "license": "MIT", + "peerDependencies": { + "zod": "^4.1.13" + } } } } diff --git a/sdk/base/package.json b/sdk/base/package.json index 1eda223bf..59d6d5455 100644 --- a/sdk/base/package.json +++ b/sdk/base/package.json @@ -24,11 +24,12 @@ "@iarna/toml": "^3.0.0", "@noble/curves": "^1.8.2", "@noble/hashes": "^1.7.2", + "deep-equality-data-structures": "^1.5.0", "isomorphic-fetch": "^3.0.0", "mime": "^4.0.7", - "ts-matches": "^6.3.2", "yaml": "^2.7.1", - "deep-equality-data-structures": "^1.5.0" + "zod": "^4.3.6", + "zod-deep-partial": "^1.2.0" }, "prettier": { "trailingComma": "all", diff --git a/sdk/package/lib/StartSdk.ts b/sdk/package/lib/StartSdk.ts index 9d1dc0164..755425ccd 100644 --- a/sdk/package/lib/StartSdk.ts +++ b/sdk/package/lib/StartSdk.ts @@ -6,16 +6,15 @@ import { ActionInfo, Actions, } from '../../base/lib/actions/setupActions' -import { - SyncOptions, - ServiceInterfaceId, - PackageId, - ServiceInterfaceType, - Effects, -} from '../../base/lib/types' +import { ServiceInterfaceType, Effects } from '../../base/lib/types' import * as patterns from '../../base/lib/util/patterns' -import { BackupSync, Backups } from './backup/Backups' -import { smtpInputSpec } from '../../base/lib/actions/input/inputSpecConstants' +import { Backups } from './backup/Backups' +import { + smtpInputSpec, + systemSmtpSpec, + customSmtp, + smtpProviderVariants, +} from '../../base/lib/actions/input/inputSpecConstants' import { Daemon, Daemons } from './mainFn/Daemons' import { checkPortListening } from './health/checkFns/checkPortListening' import { checkWebUrl, runHealthScript } from './health/checkFns' @@ -25,10 +24,11 @@ import { setupMain } from './mainFn' import { defaultTrigger } from './trigger/defaultTrigger' import { changeOnFirstSuccess, cooldownTrigger } from './trigger' import { setupServiceInterfaces } from '../../base/lib/interfaces/setupInterfaces' +import { setupExportedUrls } from '../../base/lib/interfaces/setupExportedUrls' import { successFailure } from './trigger/successFailure' import { MultiHost, Scheme } from '../../base/lib/interfaces/Host' import { ServiceInterfaceBuilder } from '../../base/lib/interfaces/ServiceInterfaceBuilder' -import { GetSystemSmtp } from './util' +import { GetOutboundGateway, GetSystemSmtp } from './util' import { nullIfEmpty } from './util' import { getServiceInterface, getServiceInterfaces } from './util' import { @@ -67,7 +67,8 @@ import { import { getOwnServiceInterfaces } from '../../base/lib/util/getServiceInterfaces' import { Volumes, createVolumes } from './util/Volume' -export const OSVersion = testTypeVersion('0.4.0-alpha.19') +/** The minimum StartOS version required by this SDK release */ +export const OSVersion = testTypeVersion('0.4.0-alpha.20') // prettier-ignore type AnyNeverCond = @@ -76,17 +77,51 @@ type AnyNeverCond = T extends [any, ...infer U] ? AnyNeverCond : never +/** + * The top-level SDK facade for building StartOS service packages. + * + * Use `StartSdk.of()` to create an uninitialized instance, then call `.withManifest()` + * to bind it to a manifest, and finally `.build()` to obtain the full toolkit of helpers + * for actions, daemons, backups, interfaces, health checks, and more. + * + * @typeParam Manifest - The service manifest type; starts as `never` until `.withManifest()` is called. + */ export class StartSdk { private constructor(readonly manifest: Manifest) {} + /** + * Create an uninitialized StartSdk instance. Call `.withManifest()` next. + * @returns A new StartSdk with no manifest bound. + */ static of() { return new StartSdk(null as never) } + /** + * Bind a manifest to the SDK, producing a typed SDK instance. + * @param manifest - The service manifest definition + * @returns A new StartSdk instance parameterized by the given manifest type + */ withManifest(manifest: Manifest) { return new StartSdk(manifest) } + private ifPluginEnabled
- + {{ 'Package a service' | i18n }} @@ -86,7 +86,7 @@
- + {{ 'Package a service' | i18n }} diff --git a/web/projects/marketplace/src/pages/show/links.component.ts b/web/projects/marketplace/src/pages/show/links.component.ts index d551e897b..2454754c2 100644 --- a/web/projects/marketplace/src/pages/show/links.component.ts +++ b/web/projects/marketplace/src/pages/show/links.component.ts @@ -23,7 +23,7 @@ import { MarketplaceLinkComponent } from './link.component' class="item-pointer" /> {{ 'Links' | i18n }}
- @if (pkg().docsUrl; as docsUrl) { + @for (docsUrl of pkg().docsUrls; track $index) { } - @if (pkg().donationUrl; as donationUrl) { follow instructions diff --git a/web/projects/setup-wizard/src/app/pages/password.page.ts b/web/projects/setup-wizard/src/app/pages/password.page.ts index adef97f3a..6e8e7fcd7 100644 --- a/web/projects/setup-wizard/src/app/pages/password.page.ts +++ b/web/projects/setup-wizard/src/app/pages/password.page.ts @@ -8,7 +8,12 @@ import { ReactiveFormsModule, Validators, } from '@angular/forms' -import { ErrorService, i18nPipe, LoadingService } from '@start9labs/shared' +import { + ErrorService, + i18nPipe, + LoadingService, + normalizeHostname, +} from '@start9labs/shared' import { TuiAutoFocus, TuiMapperPipe, TuiValidator } from '@taiga-ui/cdk' import { TuiButton, @@ -31,31 +36,36 @@ import { StateService } from '../services/state.service'

{{ - isRequired - ? ('Set Master Password' | i18n) + isFresh + ? ('Set Up Your Server' | i18n) : ('Set New Password (Optional)' | i18n) }} - - {{ - isRequired - ? ('Make it good. Write it down.' | i18n) - : ('Skip to keep your existing password.' | i18n) - }} -

- + @if (isFresh) { + + + + + + @if (form.controls.name.value?.trim()) { +

{{ derivedHostname }}.local

+ } + } + + @@ -87,14 +97,14 @@ import { StateService } from '../services/state.service' - @if (!isRequired) { + @if (!isFresh) { } + @if (error) { +

{{ error }}

+ }
+ + + `, + providers: [ + tuiValidationErrorsProvider({ + required: 'This field is required', + minlength: 'Password must be at least 8 characters', + maxlength: 'Password cannot exceed 64 characters', + match: 'Passwords do not match', + }), + ], + changeDetection: ChangeDetectionStrategy.OnPush, + imports: [ + AsyncPipe, + ReactiveFormsModule, + TuiAutoFocus, + TuiButton, + TuiButtonLoading, + TuiError, + TuiFieldErrorPipe, + TuiForm, + TuiTextfield, + TuiValidator, + ], +}) +export class ChangePasswordDialog { + private readonly context = injectContext>() + private readonly api = inject(ApiService) + private readonly alerts = inject(TuiAlertService) + private readonly errorService = inject(ErrorService) + + protected readonly loading = signal(false) + protected readonly form = inject(NonNullableFormBuilder).group({ + password: [ + '', + [Validators.required, Validators.minLength(8), Validators.maxLength(64)], + ], + confirm: [ + '', + [Validators.required, Validators.minLength(8), Validators.maxLength(64)], + ], + }) + + protected readonly matchValidator = toSignal( + this.form.controls.password.valueChanges.pipe( + map( + (password): ValidatorFn => + ({ value }) => + value === password ? null : { match: true }, + ), + ), + { initialValue: Validators.nullValidator }, + ) + + protected readonly formInvalid = toSignal( + this.form.statusChanges.pipe(map(() => this.form.invalid)), + { initialValue: this.form.invalid }, + ) + + protected async onSave() { + this.loading.set(true) + + try { + await this.api.setPassword({ password: this.form.getRawValue().password }) + this.alerts + .open('Password changed', { label: 'Success', appearance: 'positive' }) + .subscribe() + this.context.$implicit.complete() + } catch (e: any) { + this.errorService.handleError(e) + } finally { + this.loading.set(false) + } + } +} + +export const CHANGE_PASSWORD = new PolymorpheusComponent(ChangePasswordDialog) diff --git a/web/projects/start-tunnel/src/app/routes/home/routes/settings/index.ts b/web/projects/start-tunnel/src/app/routes/home/routes/settings/index.ts index 4339af6a9..e2360e52f 100644 --- a/web/projects/start-tunnel/src/app/routes/home/routes/settings/index.ts +++ b/web/projects/start-tunnel/src/app/routes/home/routes/settings/index.ts @@ -1,142 +1,101 @@ -import { AsyncPipe } from '@angular/common' import { ChangeDetectionStrategy, Component, inject, signal, } from '@angular/core' -import { toSignal } from '@angular/core/rxjs-interop' -import { - NonNullableFormBuilder, - ReactiveFormsModule, - ValidatorFn, - Validators, -} from '@angular/forms' import { ErrorService } from '@start9labs/shared' -import { tuiMarkControlAsTouchedAndValidate, TuiValidator } from '@taiga-ui/cdk' -import { - TuiAlertService, - TuiAppearance, - TuiButton, - TuiError, - TuiTextfield, - TuiTitle, -} from '@taiga-ui/core' -import { - TuiButtonLoading, - TuiFieldErrorPipe, - tuiValidationErrorsProvider, -} from '@taiga-ui/kit' -import { TuiCard, TuiForm, TuiHeader } from '@taiga-ui/layout' -import { map } from 'rxjs' -import { ApiService } from 'src/app/services/api/api.service' +import { TuiAppearance, TuiButton, TuiTitle } from '@taiga-ui/core' +import { TuiDialogService } from '@taiga-ui/experimental' +import { TuiBadge, TuiButtonLoading } from '@taiga-ui/kit' +import { TuiCard, TuiCell } from '@taiga-ui/layout' +import { UpdateService } from 'src/app/services/update.service' + +import { CHANGE_PASSWORD } from './change-password' @Component({ template: ` -
-
-

- Settings - Change password -

-
- - - - - - - - - - -
- -
- +
+
+ + + Version + @if (update.hasUpdate()) { + + Update Available + + } + + Current: {{ update.installed() ?? '—' }} + + @if (update.hasUpdate()) { + + } @else { + + } +
+
+ + Change password + + +
+
`, - providers: [ - tuiValidationErrorsProvider({ - required: 'This field is required', - minlength: 'Password must be at least 8 characters', - maxlength: 'Password cannot exceed 64 characters', - match: 'Passwords do not match', - }), - ], changeDetection: ChangeDetectionStrategy.OnPush, imports: [ - ReactiveFormsModule, - AsyncPipe, TuiCard, - TuiForm, - TuiHeader, + TuiCell, TuiTitle, - TuiTextfield, - TuiError, - TuiFieldErrorPipe, TuiButton, TuiButtonLoading, - TuiValidator, + TuiBadge, TuiAppearance, ], }) export default class Settings { - private readonly api = inject(ApiService) - private readonly alerts = inject(TuiAlertService) + private readonly dialogs = inject(TuiDialogService) private readonly errorService = inject(ErrorService) - protected readonly loading = signal(false) - protected readonly form = inject(NonNullableFormBuilder).group({ - password: [ - '', - [Validators.required, Validators.minLength(8), Validators.maxLength(64)], - ], - confirm: [ - '', - [Validators.required, Validators.minLength(8), Validators.maxLength(64)], - ], - }) + protected readonly update = inject(UpdateService) + protected readonly checking = signal(false) + protected readonly applying = signal(false) - protected readonly matchValidator = toSignal( - this.form.controls.password.valueChanges.pipe( - map( - (password): ValidatorFn => - ({ value }) => - value === password ? null : { match: true }, - ), - ), - { initialValue: Validators.nullValidator }, - ) + protected onChangePassword(): void { + this.dialogs.open(CHANGE_PASSWORD, { label: 'Change Password' }).subscribe() + } - protected async onSave() { - if (this.form.invalid) { - tuiMarkControlAsTouchedAndValidate(this.form) - - return - } - - this.loading.set(true) + protected async onCheckUpdate() { + this.checking.set(true) try { - await this.api.setPassword({ password: this.form.getRawValue().password }) - this.alerts - .open('Password changed', { label: 'Success', appearance: 'positive' }) - .subscribe() - this.form.reset() + await this.update.checkUpdate() } catch (e: any) { this.errorService.handleError(e) } finally { - this.loading.set(false) + this.checking.set(false) + } + } + + protected async onApply() { + this.applying.set(true) + + try { + await this.update.applyUpdate() + } catch (e: any) { + this.errorService.handleError(e) + } finally { + this.applying.set(false) } } } diff --git a/web/projects/start-tunnel/src/app/services/api/api.service.ts b/web/projects/start-tunnel/src/app/services/api/api.service.ts index 3bd67ba16..401d7f43c 100644 --- a/web/projects/start-tunnel/src/app/services/api/api.service.ts +++ b/web/projects/start-tunnel/src/app/services/api/api.service.ts @@ -25,6 +25,9 @@ export abstract class ApiService { // forwards abstract addForward(params: AddForwardReq): Promise // port-forward.add abstract deleteForward(params: DeleteForwardReq): Promise // port-forward.remove + // update + abstract checkUpdate(): Promise // update.check + abstract applyUpdate(): Promise // update.apply } export type SubscribeRes = { @@ -62,3 +65,9 @@ export type AddForwardReq = { export type DeleteForwardReq = { source: string } + +export type TunnelUpdateResult = { + status: string + installed: string + candidate: string +} diff --git a/web/projects/start-tunnel/src/app/services/api/live-api.service.ts b/web/projects/start-tunnel/src/app/services/api/live-api.service.ts index 7c8ee6970..cabf8200f 100644 --- a/web/projects/start-tunnel/src/app/services/api/live-api.service.ts +++ b/web/projects/start-tunnel/src/app/services/api/live-api.service.ts @@ -16,6 +16,7 @@ import { DeleteSubnetReq, LoginReq, SubscribeRes, + TunnelUpdateResult, UpsertDeviceReq, UpsertSubnetReq, } from './api.service' @@ -103,6 +104,16 @@ export class LiveApiService extends ApiService { return this.rpcRequest({ method: 'port-forward.remove', params }) } + // update + + async checkUpdate(): Promise { + return this.rpcRequest({ method: 'update.check', params: {} }) + } + + async applyUpdate(): Promise { + return this.rpcRequest({ method: 'update.apply', params: {} }) + } + // private private async upsertSubnet(params: UpsertSubnetReq): Promise { diff --git a/web/projects/start-tunnel/src/app/services/api/mock-api.service.ts b/web/projects/start-tunnel/src/app/services/api/mock-api.service.ts index 7f35f6a05..6f82c597f 100644 --- a/web/projects/start-tunnel/src/app/services/api/mock-api.service.ts +++ b/web/projects/start-tunnel/src/app/services/api/mock-api.service.ts @@ -9,6 +9,7 @@ import { DeleteSubnetReq, LoginReq, SubscribeRes, + TunnelUpdateResult, UpsertDeviceReq, UpsertSubnetReq, } from './api.service' @@ -196,6 +197,24 @@ export class MockApiService extends ApiService { return null } + async checkUpdate(): Promise { + await pauseFor(1000) + return { + status: 'update-available', + installed: '0.4.0-alpha.19', + candidate: '0.4.0-alpha.20', + } + } + + async applyUpdate(): Promise { + await pauseFor(2000) + return { + status: 'updating', + installed: '0.4.0-alpha.19', + candidate: '0.4.0-alpha.20', + } + } + private async mockRevision(patch: Operation[]): Promise { const revision = { id: ++this.sequence, diff --git a/web/projects/start-tunnel/src/app/services/patch-db/data-model.ts b/web/projects/start-tunnel/src/app/services/patch-db/data-model.ts index f84546717..9df4fac6d 100644 --- a/web/projects/start-tunnel/src/app/services/patch-db/data-model.ts +++ b/web/projects/start-tunnel/src/app/services/patch-db/data-model.ts @@ -39,14 +39,14 @@ export const mockTunnelData: TunnelData = { }, }, portForwards: { - '69.1.1.42:443': '10.59.0.2:5443', + '69.1.1.42:443': '10.59.0.2:443', '69.1.1.42:3000': '10.59.0.2:3000', }, gateways: { eth0: { name: null, - public: null, secure: null, + type: null, ipInfo: { name: 'Wired Connection 1', scopeId: 1, diff --git a/web/projects/start-tunnel/src/app/services/update.service.ts b/web/projects/start-tunnel/src/app/services/update.service.ts new file mode 100644 index 000000000..861b5c057 --- /dev/null +++ b/web/projects/start-tunnel/src/app/services/update.service.ts @@ -0,0 +1,120 @@ +import { Component, computed, inject, Injectable, signal } from '@angular/core' +import { toObservable } from '@angular/core/rxjs-interop' +import { ErrorService } from '@start9labs/shared' +import { TuiLoader } from '@taiga-ui/core' +import { TuiDialogService } from '@taiga-ui/experimental' +import { PolymorpheusComponent } from '@taiga-ui/polymorpheus' +import { + catchError, + EMPTY, + filter, + from, + interval, + Subscription, + switchMap, + takeWhile, +} from 'rxjs' +import { ApiService, TunnelUpdateResult } from './api/api.service' +import { AuthService } from './auth.service' + +@Component({ + template: '', + imports: [TuiLoader], +}) +class UpdatingDialog { + protected readonly text = 'StartTunnel is updating...' +} + +@Injectable({ + providedIn: 'root', +}) +export class UpdateService { + private readonly api = inject(ApiService) + private readonly auth = inject(AuthService) + private readonly dialogs = inject(TuiDialogService) + private readonly errorService = inject(ErrorService) + + readonly result = signal(null) + readonly hasUpdate = computed( + () => this.result()?.status === 'update-available', + ) + readonly installed = computed(() => this.result()?.installed ?? null) + readonly candidate = computed(() => this.result()?.candidate ?? null) + + private polling = false + private updatingDialog: Subscription | null = null + + constructor() { + toObservable(this.auth.authenticated) + .pipe(filter(Boolean)) + .subscribe(() => this.initCheck()) + } + + async checkUpdate(): Promise { + const result = await this.api.checkUpdate() + this.setResult(result) + } + + async applyUpdate(): Promise { + const result = await this.api.applyUpdate() + this.setResult(result) + } + + private setResult(result: TunnelUpdateResult): void { + this.result.set(result) + + if (result.status === 'updating') { + this.showUpdatingDialog() + this.startPolling() + } else { + this.hideUpdatingDialog() + } + } + + private async initCheck(): Promise { + try { + await this.checkUpdate() + } catch (e: any) { + this.errorService.handleError(e) + } + } + + private startPolling(): void { + if (this.polling) return + this.polling = true + + interval(5000) + .pipe( + switchMap(() => + from(this.api.checkUpdate()).pipe(catchError(() => EMPTY)), + ), + takeWhile(result => result.status === 'updating', true), + ) + .subscribe({ + next: result => this.result.set(result), + complete: () => { + this.polling = false + this.hideUpdatingDialog() + }, + error: () => { + this.polling = false + this.hideUpdatingDialog() + }, + }) + } + + private showUpdatingDialog(): void { + if (this.updatingDialog) return + this.updatingDialog = this.dialogs + .open(new PolymorpheusComponent(UpdatingDialog), { + closable: false, + dismissible: false, + }) + .subscribe({ complete: () => (this.updatingDialog = null) }) + } + + private hideUpdatingDialog(): void { + this.updatingDialog?.unsubscribe() + this.updatingDialog = null + } +} diff --git a/web/projects/ui/src/app/components/backup-report.component.ts b/web/projects/ui/src/app/components/backup-report.component.ts index 84aaaf141..ffb629ba1 100644 --- a/web/projects/ui/src/app/components/backup-report.component.ts +++ b/web/projects/ui/src/app/components/backup-report.component.ts @@ -12,7 +12,7 @@ import { TuiCell } from '@taiga-ui/layout' import { injectContext, PolymorpheusComponent } from '@taiga-ui/polymorpheus' import { PatchDB } from 'patch-db-client' import { map } from 'rxjs' -import { BackupReport } from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' import { getManifest } from '../utils/get-package-data' import { DataModel } from '../services/patch-db/data-model' @@ -60,7 +60,7 @@ export class BackupsReportModal { readonly data = injectContext< - TuiDialogContext + TuiDialogContext >().data readonly pkgTitles = toSignal( diff --git a/web/projects/ui/src/app/routes/diagnostic/logs.component.ts b/web/projects/ui/src/app/routes/diagnostic/logs.component.ts index 8682d48dc..7ba7b8e06 100644 --- a/web/projects/ui/src/app/routes/diagnostic/logs.component.ts +++ b/web/projects/ui/src/app/routes/diagnostic/logs.component.ts @@ -69,7 +69,7 @@ export default class LogsPage implements OnInit { private readonly api = inject(ApiService) private readonly errorService = inject(ErrorService) - startCursor?: string + startCursor?: string | null loading = false logs: string[] = [] scrollTop = 0 @@ -98,7 +98,7 @@ export default class LogsPage implements OnInit { try { const response = await this.api.diagnosticGetLogs({ - cursor: this.startCursor, + cursor: this.startCursor ?? undefined, before: !!this.startCursor, limit: 200, }) diff --git a/web/projects/ui/src/app/routes/login/ca-wizard/ca-wizard.component.html b/web/projects/ui/src/app/routes/login/ca-wizard/ca-wizard.component.html index 3d2b796b3..52c87c61c 100644 --- a/web/projects/ui/src/app/routes/login/ca-wizard/ca-wizard.component.html +++ b/web/projects/ui/src/app/routes/login/ca-wizard/ca-wizard.component.html @@ -46,7 +46,7 @@ tuiButton docsLink size="s" - path="/user-manual/trust-ca.html" + path="/start-os/user-manual/trust-ca.html" iconEnd="@tui.external-link" > {{ 'View instructions' | i18n }} diff --git a/web/projects/ui/src/app/routes/portal/components/header/menu.component.ts b/web/projects/ui/src/app/routes/portal/components/header/menu.component.ts index c3e9c47f0..d21e62e3d 100644 --- a/web/projects/ui/src/app/routes/portal/components/header/menu.component.ts +++ b/web/projects/ui/src/app/routes/portal/components/header/menu.component.ts @@ -50,7 +50,12 @@ import { ABOUT } from './about.component' - + {{ 'User manual' | i18n }} - @if (interface.address().masked) { - - } - @if (interface.address().ui) { + @if (address().ui) { - {{ 'Open' | i18n }} + {{ 'Open UI' | i18n }} } + @if (address().deletable) { + + } + @if (address().hostnameInfo.metadata.kind === 'public-domain') { + + } + @if (address().hostnameInfo.metadata.kind === 'private-domain') { + + } + @if ( + address().hostnameInfo.metadata.kind === 'ipv4' && + address().access === 'public' && + address().hostnameInfo.port !== null + ) { + + } @@ -75,30 +115,29 @@ import { InterfaceAddressItemComponent } from './item.component' > {{ 'Actions' | i18n }} - @if (interface.address().ui) { + @if (address().ui) { - {{ 'Open' | i18n }} + {{ 'Open UI' | i18n }} } - @if (interface.address().masked) { - - } + @@ -106,10 +145,53 @@ import { InterfaceAddressItemComponent } from './item.component' tuiOption new iconStart="@tui.copy" - (click)="copyService.copy(href())" + (click)="copyService.copy(address().url)" > {{ 'Copy URL' | i18n }} + @if (address().hostnameInfo.metadata.kind === 'public-domain') { + + } + @if (address().hostnameInfo.metadata.kind === 'private-domain') { + + } + @if ( + address().hostnameInfo.metadata.kind === 'ipv4' && + address().hostnameInfo.port !== null + ) { + + } + @if (address().deletable) { + + }
@@ -123,6 +205,11 @@ import { InterfaceAddressItemComponent } from './item.component' white-space: nowrap; } + .disabled { + pointer-events: none; + opacity: var(--tui-disabled-opacity); + } + .mobile { display: none; } @@ -136,40 +223,130 @@ import { InterfaceAddressItemComponent } from './item.component' display: block; } } - - :host-context(tbody.uncommon-hidden) { - .desktop { - height: 0; - visibility: hidden; - } - - .mobile { - display: none; - } - } `, imports: [TuiButton, TuiDropdown, TuiDataList, i18nPipe, TuiTextfield], providers: [tuiButtonOptionsProvider({ appearance: 'icon' })], changeDetection: ChangeDetectionStrategy.OnPush, }) export class AddressActionsComponent { - readonly isMobile = inject(TUI_IS_MOBILE) - readonly dialog = inject(DialogService) + private readonly isMobile = inject(TUI_IS_MOBILE) + private readonly dialog = inject(DialogService) + private readonly api = inject(ApiService) + private readonly loader = inject(LoadingService) + private readonly errorService = inject(ErrorService) + private readonly domainHealth = inject(DomainHealthService) readonly copyService = inject(CopyService) - readonly interface = inject(InterfaceAddressItemComponent) readonly open = signal(false) - readonly href = input.required() - readonly bullets = input.required() + readonly address = input.required() + readonly packageId = input('') + readonly value = input() readonly disabled = input.required() + readonly gatewayId = input('') showQR() { this.dialog .openComponent(new PolymorpheusComponent(QRModal), { size: 'auto', closeable: this.isMobile, - data: this.href(), + data: this.address().url, }) .subscribe() } + + async toggleEnabled() { + const addr = this.address() + const iface = this.value() + if (!iface) return + + const enabled = !addr.enabled + const addressJson = JSON.stringify(addr.hostnameInfo) + const loader = this.loader.open('Saving').subscribe() + + try { + if (this.packageId()) { + await this.api.pkgBindingSetAddressEnabled({ + internalPort: iface.addressInfo.internalPort, + address: addressJson, + enabled, + package: this.packageId(), + host: iface.addressInfo.hostId, + }) + } else { + await this.api.serverBindingSetAddressEnabled({ + internalPort: 80, + address: addressJson, + enabled, + }) + } + } catch (e: any) { + this.errorService.handleError(e) + } finally { + loader.unsubscribe() + } + } + + showDnsValidation() { + const port = this.address().hostnameInfo.port + if (port === null) return + this.domainHealth.showPublicDomainSetup( + this.address().hostnameInfo.hostname, + this.gatewayId(), + port, + ) + } + + showPrivateDnsValidation() { + this.domainHealth.showPrivateDomainSetup(this.gatewayId()) + } + + showPortForwardValidation() { + const port = this.address().hostnameInfo.port + if (port === null) return + this.domainHealth.showPortForwardSetup(this.gatewayId(), port) + } + + async deleteDomain() { + const addr = this.address() + const iface = this.value() + if (!iface) return + + const confirmed = await this.dialog + .openConfirm({ label: 'Are you sure?', size: 's' }) + .toPromise() + + if (!confirmed) return + + const loader = this.loader.open('Removing').subscribe() + + try { + const host = addr.hostnameInfo.hostname + + if (addr.hostnameInfo.metadata.kind === 'public-domain') { + if (this.packageId()) { + await this.api.pkgRemovePublicDomain({ + fqdn: host, + package: this.packageId(), + host: iface.addressInfo.hostId, + }) + } else { + await this.api.osUiRemovePublicDomain({ fqdn: host }) + } + } else if (addr.hostnameInfo.metadata.kind === 'private-domain') { + if (this.packageId()) { + await this.api.pkgRemovePrivateDomain({ + fqdn: host, + package: this.packageId(), + host: iface.addressInfo.hostId, + }) + } else { + await this.api.osUiRemovePrivateDomain({ fqdn: host }) + } + } + } catch (e: any) { + this.errorService.handleError(e) + } finally { + loader.unsubscribe() + } + } } diff --git a/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/addresses.component.ts b/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/addresses.component.ts index ae4a472fc..d314349b9 100644 --- a/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/addresses.component.ts +++ b/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/addresses.component.ts @@ -1,117 +1,273 @@ -import { ChangeDetectionStrategy, Component, input } from '@angular/core' -import { i18nPipe } from '@start9labs/shared' -import { TuiButton } from '@taiga-ui/core' -import { TuiAccordion } from '@taiga-ui/experimental' -import { TuiElasticContainer, TuiSkeleton } from '@taiga-ui/kit' +import { + ChangeDetectionStrategy, + Component, + inject, + input, + signal, +} from '@angular/core' +import { ErrorService, i18nPipe, LoadingService } from '@start9labs/shared' +import { ISB, utils } from '@start9labs/start-sdk' +import { + TuiButton, + TuiDataList, + TuiDropdown, + TuiTextfield, +} from '@taiga-ui/core' +import { PatchDB } from 'patch-db-client' +import { firstValueFrom } from 'rxjs' +import { + FormComponent, + FormContext, +} from 'src/app/routes/portal/components/form.component' import { PlaceholderComponent } from 'src/app/routes/portal/components/placeholder.component' import { TableComponent } from 'src/app/routes/portal/components/table.component' - -import { MappedServiceInterface } from '../interface.service' +import { ApiService } from 'src/app/services/api/embassy-api.service' +import { FormDialogService } from 'src/app/services/form-dialog.service' +import { DataModel } from 'src/app/services/patch-db/data-model' +import { toAuthorityName } from 'src/app/utils/acme' +import { configBuilderToSpec } from 'src/app/utils/configBuilderToSpec' +import { + GatewayAddressGroup, + MappedServiceInterface, +} from '../interface.service' +import { DomainHealthService } from './domain-health.service' import { InterfaceAddressItemComponent } from './item.component' @Component({ - selector: 'section[addresses]', + selector: 'section[gatewayGroup]', template: ` -
{{ 'Addresses' | i18n }}
- - - - @for (address of addresses()?.common; track $index) { - - } @empty { - @if (addresses()) { - - - - } @else { - @for (_ of [0, 1]; track $index) { - - - - } - } - } - - @if (addresses()?.uncommon?.length && uncommon) { - - - - } - @for (address of addresses()?.uncommon; track $index) { - - } - - @if (addresses()?.uncommon?.length) { - - } -
- - {{ 'No addresses' | i18n }} - -
-
{{ 'Loading' | i18n }}
-
- -
-
+
+ {{ gatewayGroup().gatewayName }} + + + + +
+ + @for (address of gatewayGroup().addresses; track $index) { + + } @empty { + + + + } +
+ + {{ 'No addresses' | i18n }} + +
`, styles: ` :host ::ng-deep { - th:nth-child(2) { + th:first-child { width: 5rem; } - - th:nth-child(3) { - width: 4rem; - } - } - - .g-table:has(caption) { - border-bottom-left-radius: 0; - border-bottom-right-radius: 0; - } - - [tuiButton] { - width: 100%; - border-top-left-radius: 0; - border-top-right-radius: 0; - } - - :host-context(tui-root._mobile) { - [tuiButton] { - border-radius: var(--tui-radius-xs); - margin-block-end: 0.75rem; - } } `, host: { class: 'g-card' }, imports: [ - TuiSkeleton, TuiButton, + TuiDropdown, + TuiDataList, + TuiTextfield, TableComponent, PlaceholderComponent, i18nPipe, InterfaceAddressItemComponent, - TuiElasticContainer, ], changeDetection: ChangeDetectionStrategy.OnPush, }) export class InterfaceAddressesComponent { - readonly addresses = input.required< - MappedServiceInterface['addresses'] | undefined - >() + private readonly patch = inject>(PatchDB) + private readonly formDialog = inject(FormDialogService) + private readonly loader = inject(LoadingService) + private readonly errorService = inject(ErrorService) + private readonly api = inject(ApiService) + private readonly i18n = inject(i18nPipe) + private readonly domainHealth = inject(DomainHealthService) + + readonly gatewayGroup = input.required() + readonly packageId = input('') + readonly value = input() readonly isRunning = input.required() - uncommon = false + readonly addOpen = signal(false) + + async addPrivateDomain() { + this.formDialog.open>(FormComponent, { + label: 'New private domain', + size: 's', + data: { + spec: await configBuilderToSpec( + ISB.InputSpec.of({ + fqdn: ISB.Value.text({ + name: this.i18n.transform('Domain'), + description: this.i18n.transform( + 'Enter a fully qualified domain name. Since the domain is for private use, it can be any domain you want, even one you do not control.', + ), + required: true, + default: null, + patterns: [utils.Patterns.domain], + }), + }), + ), + buttons: [ + { + text: this.i18n.transform('Save')!, + handler: async (value: { fqdn: string }) => + this.savePrivateDomain(value.fqdn), + }, + ], + }, + }) + } + + async addPublicDomain() { + const iface = this.value() + if (!iface) return + + const network = await firstValueFrom( + this.patch.watch$('serverInfo', 'network'), + ) + + const authorities = Object.keys(network.acme).reduce< + Record + >( + (obj, url) => ({ + ...obj, + [url]: toAuthorityName(url), + }), + { local: toAuthorityName(null) }, + ) + + const addSpec = ISB.InputSpec.of({ + fqdn: ISB.Value.text({ + name: this.i18n.transform('Domain'), + description: this.i18n.transform( + 'Enter a fully qualified domain name. For example, if you control domain.com, you could enter domain.com or subdomain.domain.com or another.subdomain.domain.com.', + ), + required: true, + default: null, + patterns: [utils.Patterns.domain], + }).map(f => f.toLocaleLowerCase()), + ...(iface.addSsl + ? { + authority: ISB.Value.select({ + name: this.i18n.transform('Certificate Authority'), + description: this.i18n.transform( + 'Select a Certificate Authority to issue SSL/TLS certificates for this domain', + ), + values: authorities, + default: Object.keys(network.acme)[0] || 'local', + }), + } + : {}), + }) + + this.formDialog.open(FormComponent, { + label: 'Add public domain', + size: 's', + data: { + spec: await configBuilderToSpec(addSpec), + buttons: [ + { + text: this.i18n.transform('Save')!, + handler: (input: typeof addSpec._TYPE) => + this.savePublicDomain(input.fqdn, input.authority), + }, + ], + }, + }) + } + + private async savePrivateDomain(fqdn: string): Promise { + const iface = this.value() + const gatewayId = this.gatewayGroup().gatewayId + const loader = this.loader.open('Saving').subscribe() + + try { + if (this.packageId()) { + await this.api.pkgAddPrivateDomain({ + fqdn, + gateway: gatewayId, + package: this.packageId(), + host: iface?.addressInfo.hostId || '', + }) + } else { + await this.api.osUiAddPrivateDomain({ fqdn, gateway: gatewayId }) + } + + await this.domainHealth.checkPrivateDomain(gatewayId) + + return true + } catch (e: any) { + this.errorService.handleError(e) + return false + } finally { + loader.unsubscribe() + } + } + + private async savePublicDomain( + fqdn: string, + authority?: 'local' | string, + ): Promise { + const iface = this.value() + const gatewayId = this.gatewayGroup().gatewayId + const loader = this.loader.open('Saving').subscribe() + + const params = { + fqdn, + gateway: gatewayId, + acme: !authority || authority === 'local' ? null : authority, + } + + try { + if (this.packageId()) { + await this.api.pkgAddPublicDomain({ + ...params, + package: this.packageId(), + host: iface?.addressInfo.hostId || '', + }) + } else { + await this.api.osUiAddPublicDomain(params) + } + + const port = this.gatewayGroup().addresses.find( + a => a.access === 'public' && a.hostnameInfo.port !== null, + )?.hostnameInfo.port + + if (port !== undefined && port !== null) { + await this.domainHealth.checkPublicDomain(fqdn, gatewayId, port) + } + + return true + } catch (e: any) { + this.errorService.handleError(e) + return false + } finally { + loader.unsubscribe() + } + } } diff --git a/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/dns.component.ts b/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/dns.component.ts new file mode 100644 index 000000000..d5715cc33 --- /dev/null +++ b/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/dns.component.ts @@ -0,0 +1,294 @@ +import { + ChangeDetectionStrategy, + Component, + computed, + inject, + signal, +} from '@angular/core' +import { FormsModule } from '@angular/forms' +import { ErrorService, i18nPipe } from '@start9labs/shared' +import { TuiButton, TuiDialogContext, TuiIcon, TuiLoader } from '@taiga-ui/core' +import { + TuiButtonLoading, + TuiSwitch, + tuiSwitchOptionsProvider, +} from '@taiga-ui/kit' +import { injectContext, PolymorpheusComponent } from '@taiga-ui/polymorpheus' +import { PortCheckIconComponent } from 'src/app/routes/portal/components/port-check-icon.component' +import { PortCheckWarningsComponent } from 'src/app/routes/portal/components/port-check-warnings.component' +import { TableComponent } from 'src/app/routes/portal/components/table.component' +import { ApiService } from 'src/app/services/api/embassy-api.service' +import { T } from '@start9labs/start-sdk' +import { parse } from 'tldts' + +export type DnsGateway = T.NetworkInterfaceInfo & { + id: string + ipInfo: T.IpInfo +} + +export type DomainValidationData = { + fqdn: string + gateway: DnsGateway + port: number + initialResults?: { dnsPass: boolean; portResult: T.CheckPortRes | null } +} + +@Component({ + selector: 'domain-validation', + template: ` + @let wanIp = context.data.gateway.ipInfo.wanIp || ('Error' | i18n); + @let gatewayName = + context.data.gateway.name || context.data.gateway.ipInfo.name; + +

{{ 'DNS' | i18n }}

+

+ {{ 'In your domain registrar for' | i18n }} {{ domain }}, + {{ 'create this DNS record' | i18n }} +

+ + @if (context.data.gateway.ipInfo.deviceType !== 'wireguard') { + + } + + + + + + + + + +
+ @if (dnsLoading()) { + + } @else if (dnsPass() === true) { + + } @else if (dnsPass() === false) { + + } @else { + + } + {{ ddns ? 'ALIAS' : 'A' }}*{{ ddns ? '[DDNS Address]' : wanIp }} + +
+ +

{{ 'Port Forwarding' | i18n }}

+

+ {{ 'In your gateway' | i18n }} "{{ gatewayName }}", + {{ 'create this port forwarding rule' | i18n }} +

+ + @let portRes = portResult(); + + + + + + + + +
+ + {{ context.data.port }}{{ context.data.port }} + +
+ + + + @if (!isManualMode) { +
+ + +
+ } + `, + styles: ` + label { + display: flex; + gap: 0.75rem; + align-items: center; + margin: 1rem 0; + } + + h2 { + margin: 2rem 0 0 0; + } + + p { + margin-top: 0.5rem; + } + + tui-icon { + font-size: 1.3rem; + vertical-align: text-bottom; + } + + .status { + width: 3.2rem; + } + + .padding-top { + padding-top: 2rem; + } + + td:last-child { + text-align: end; + } + + footer { + margin-top: 1.5rem; + } + + :host-context(tui-root._mobile) table { + thead { + display: table-header-group !important; + } + + tr { + display: table-row !important; + box-shadow: none !important; + } + + td, + th { + padding: 0.5rem 0.5rem !important; + font: var(--tui-font-text-s) !important; + color: var(--tui-text-primary) !important; + font-weight: normal !important; + } + + th { + font-weight: bold !important; + } + } + `, + providers: [ + tuiSwitchOptionsProvider({ + appearance: () => 'glass', + icon: () => '', + }), + ], + changeDetection: ChangeDetectionStrategy.OnPush, + imports: [ + TuiButton, + i18nPipe, + TableComponent, + TuiSwitch, + FormsModule, + TuiButtonLoading, + TuiIcon, + TuiLoader, + PortCheckIconComponent, + PortCheckWarningsComponent, + ], +}) +export class DomainValidationComponent { + private readonly errorService = inject(ErrorService) + private readonly api = inject(ApiService) + + readonly ddns = false + + readonly context = + injectContext>() + + readonly domain = + parse(this.context.data.fqdn).domain || this.context.data.fqdn + + readonly dnsLoading = signal(false) + readonly portLoading = signal(false) + readonly dnsPass = signal(undefined) + readonly portResult = signal(undefined) + + readonly allPass = computed(() => { + const result = this.portResult() + return ( + this.dnsPass() === true && + !!result?.openInternally && + !!result?.openExternally + ) + }) + + readonly isManualMode = !this.context.data.initialResults + + constructor() { + const initial = this.context.data.initialResults + if (initial) { + this.dnsPass.set(initial.dnsPass) + if (initial.portResult) this.portResult.set(initial.portResult) + } + } + + async testDns() { + this.dnsLoading.set(true) + + try { + const ip = await this.api.queryDns({ + fqdn: this.context.data.fqdn, + }) + + this.dnsPass.set(ip === this.context.data.gateway.ipInfo.wanIp) + } catch (e: any) { + this.errorService.handleError(e) + } finally { + this.dnsLoading.set(false) + } + } + + async testPort() { + this.portLoading.set(true) + + try { + const result = await this.api.checkPort({ + gateway: this.context.data.gateway.id, + port: this.context.data.port, + }) + + this.portResult.set(result) + } catch (e: any) { + this.errorService.handleError(e) + } finally { + this.portLoading.set(false) + } + } +} + +export const DOMAIN_VALIDATION = new PolymorpheusComponent( + DomainValidationComponent, +) diff --git a/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/domain-health.service.ts b/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/domain-health.service.ts new file mode 100644 index 000000000..6d8713da7 --- /dev/null +++ b/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/domain-health.service.ts @@ -0,0 +1,190 @@ +import { inject, Injectable } from '@angular/core' +import { DialogService, ErrorService } from '@start9labs/shared' +import { T } from '@start9labs/start-sdk' +import { PatchDB } from 'patch-db-client' +import { firstValueFrom } from 'rxjs' +import { ApiService } from 'src/app/services/api/embassy-api.service' +import { DataModel } from 'src/app/services/patch-db/data-model' +import { DOMAIN_VALIDATION, DnsGateway } from './dns.component' +import { PORT_FORWARD_VALIDATION } from './port-forward.component' +import { PRIVATE_DNS_VALIDATION } from './private-dns.component' + +@Injectable({ providedIn: 'root' }) +export class DomainHealthService { + private readonly patch = inject>(PatchDB) + private readonly dialog = inject(DialogService) + private readonly api = inject(ApiService) + private readonly errorService = inject(ErrorService) + + async checkPublicDomain( + fqdn: string, + gatewayId: string, + port: number, + ): Promise { + try { + const gateway = await this.getGatewayData(gatewayId) + if (!gateway) return + + const [dnsPass, portResult] = await Promise.all([ + this.api + .queryDns({ fqdn }) + .then(ip => ip === gateway.ipInfo.wanIp) + .catch(() => false), + this.api + .checkPort({ gateway: gatewayId, port }) + .catch((): null => null), + ]) + + const portOk = + !!portResult?.openInternally && + !!portResult?.openExternally && + !!portResult?.hairpinning + + if (!dnsPass || !portOk) { + setTimeout( + () => + this.openPublicDomainModal(fqdn, gateway, port, { + dnsPass, + portResult, + }), + 250, + ) + } + } catch (e: any) { + this.errorService.handleError(e) + } + } + + async checkPrivateDomain(gatewayId: string): Promise { + try { + const gateway = await this.getGatewayData(gatewayId) + if (!gateway) return + + const configured = await this.api + .checkDns({ gateway: gatewayId }) + .catch(() => false) + + if (!configured) { + setTimeout( + () => this.openPrivateDomainModal(gateway, { configured }), + 250, + ) + } + } catch (e: any) { + this.errorService.handleError(e) + } + } + + async showPublicDomainSetup( + fqdn: string, + gatewayId: string, + port: number, + ): Promise { + try { + const gateway = await this.getGatewayData(gatewayId) + if (!gateway) return + + this.openPublicDomainModal(fqdn, gateway, port) + } catch (e: any) { + this.errorService.handleError(e) + } + } + + async checkPortForward(gatewayId: string, port: number): Promise { + try { + const gateway = await this.getGatewayData(gatewayId) + if (!gateway) return + + const portResult = await this.api + .checkPort({ gateway: gatewayId, port }) + .catch((): null => null) + + const portOk = + !!portResult?.openInternally && + !!portResult?.openExternally && + !!portResult?.hairpinning + + if (!portOk) { + setTimeout( + () => this.openPortForwardModal(gateway, port, { portResult }), + 250, + ) + } + } catch (e: any) { + this.errorService.handleError(e) + } + } + + async showPortForwardSetup(gatewayId: string, port: number): Promise { + try { + const gateway = await this.getGatewayData(gatewayId) + if (!gateway) return + + this.openPortForwardModal(gateway, port) + } catch (e: any) { + this.errorService.handleError(e) + } + } + + async showPrivateDomainSetup(gatewayId: string): Promise { + try { + const gateway = await this.getGatewayData(gatewayId) + if (!gateway) return + + this.openPrivateDomainModal(gateway) + } catch (e: any) { + this.errorService.handleError(e) + } + } + + private async getGatewayData(gatewayId: string): Promise { + const network = await firstValueFrom( + this.patch.watch$('serverInfo', 'network'), + ) + const gateway = network.gateways[gatewayId] + if (!gateway?.ipInfo) return null + return { id: gatewayId, ...gateway, ipInfo: gateway.ipInfo } + } + + private openPublicDomainModal( + fqdn: string, + gateway: DnsGateway, + port: number, + initialResults?: { dnsPass: boolean; portResult: T.CheckPortRes | null }, + ) { + this.dialog + .openComponent(DOMAIN_VALIDATION, { + label: 'Address Requirements', + size: 'm', + data: { fqdn, gateway, port, initialResults }, + }) + .subscribe() + } + + private openPortForwardModal( + gateway: DnsGateway, + port: number, + initialResults?: { portResult: T.CheckPortRes | null }, + ) { + this.dialog + .openComponent(PORT_FORWARD_VALIDATION, { + label: 'Address Requirements', + size: 'm', + data: { gateway, port, initialResults }, + }) + .subscribe() + } + + private openPrivateDomainModal( + gateway: DnsGateway, + initialResults?: { configured: boolean }, + ) { + this.dialog + .openComponent(PRIVATE_DNS_VALIDATION, { + label: 'Address Requirements', + size: 'm', + data: { gateway, initialResults }, + }) + .subscribe() + } +} diff --git a/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/item.component.ts b/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/item.component.ts index c212c26b9..689a709a4 100644 --- a/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/item.component.ts +++ b/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/item.component.ts @@ -6,121 +6,127 @@ import { input, signal, } from '@angular/core' -import { DialogService, i18nKey, i18nPipe } from '@start9labs/shared' +import { ErrorService, i18nPipe, LoadingService } from '@start9labs/shared' import { TuiObfuscatePipe } from '@taiga-ui/cdk' import { TuiButton, TuiIcon } from '@taiga-ui/core' -import { TuiBadge } from '@taiga-ui/kit' -import { DisplayAddress } from '../interface.service' +import { FormsModule } from '@angular/forms' +import { TuiSwitch } from '@taiga-ui/kit' +import { ApiService } from 'src/app/services/api/embassy-api.service' +import { GatewayAddress, MappedServiceInterface } from '../interface.service' import { AddressActionsComponent } from './actions.component' +import { DomainHealthService } from './domain-health.service' @Component({ selector: 'tr[address]', + host: { + '[class._disabled]': '!address().enabled', + }, template: ` @if (address(); as address) { - -
- -
- - -
{{ address.type }}
- - -
- @if (address.access === 'public') { - - {{ 'public' | i18n }} - - } @else if (address.access === 'private') { - - {{ 'private' | i18n }} - - } @else { - - + {{ address.url | tuiObfuscate: recipe() }} + + @if (address.masked) { + }
- -
- {{ address.gatewayName || '-' }} -
- - -
- {{ address.url | tuiObfuscate: recipe() }} -
- } `, styles: ` :host { - white-space: nowrap; grid-template-columns: fit-content(10rem) 1fr 2rem 2rem; - - td:last-child { - padding-inline-start: 0; - } } - .info { - background: var(--tui-status-info); - - &::after { - mask-size: 1.5rem; - } + .type tui-icon { + font-size: 1.3rem; + margin-right: 0.7rem; + vertical-align: middle; } - :host-context(.uncommon-hidden) { - .wrapper { - height: 0; - visibility: hidden; - } + .url { + display: flex; + align-items: center; + gap: 0.25rem; - td, - & { - padding-block: 0 !important; - border: hidden; + span { + white-space: normal; + word-break: break-all; + display: -webkit-box; + -webkit-box-orient: vertical; + -webkit-line-clamp: 1; + overflow: hidden; } } - div { - white-space: normal; - word-break: break-all; - display: -webkit-box; - -webkit-box-orient: vertical; - -webkit-line-clamp: 1; - overflow: hidden; - } - :host-context(tui-root._mobile) { + padding-inline-start: 0.75rem !important; + + &::before { + content: ''; + position: absolute; + inset-inline-start: 0; + top: 0.25rem; + bottom: 0.25rem; + width: 4px; + background: var(--tui-status-positive); + border-radius: 2px; + } + + &._disabled::before { + background: var(--tui-background-neutral-1-hover); + } + td { width: auto !important; align-content: center; } td:first-child { - grid-area: 1 / 3 / 4 / 3; + display: none; } td:nth-child(2) { @@ -129,37 +135,104 @@ import { AddressActionsComponent } from './actions.component' color: var(--tui-text-primary); padding-inline-end: 0.5rem; } + + td:nth-child(3) { + grid-area: 2 / 1 / 2 / 3; + } + + td:nth-child(4) { + grid-area: 3 / 1 / 3 / 3; + } + + td:last-child { + grid-area: 1 / 3 / 4 / 5; + align-self: center; + justify-self: end; + } } `, imports: [ i18nPipe, AddressActionsComponent, - TuiBadge, - TuiObfuscatePipe, TuiButton, TuiIcon, + TuiObfuscatePipe, + TuiSwitch, + FormsModule, ], changeDetection: ChangeDetectionStrategy.OnPush, }) export class InterfaceAddressItemComponent { - private readonly dialogs = inject(DialogService) + private readonly api = inject(ApiService) + private readonly errorService = inject(ErrorService) + private readonly loader = inject(LoadingService) + private readonly domainHealth = inject(DomainHealthService) - readonly address = input.required() + readonly address = input.required() + readonly packageId = input('') + readonly value = input() readonly isRunning = input.required() + readonly gatewayId = input('') + readonly toggling = signal(false) readonly currentlyMasked = signal(true) readonly recipe = computed(() => this.address()?.masked && this.currentlyMasked() ? 'mask' : 'none', ) - viewDetails() { - this.dialogs - .openAlert( - `
    ${this.address() - .bullets.map(b => `
  • ${b}
  • `) - .join('')}
` as i18nKey, - { label: 'About this address' as i18nKey }, - ) - .subscribe() + async onToggleEnabled() { + const addr = this.address() + const iface = this.value() + if (!iface) return + + this.toggling.set(true) + const enabled = !addr.enabled + const addressJson = JSON.stringify(addr.hostnameInfo) + const loader = this.loader.open('Saving').subscribe() + + try { + if (this.packageId()) { + await this.api.pkgBindingSetAddressEnabled({ + internalPort: iface.addressInfo.internalPort, + address: addressJson, + enabled, + package: this.packageId(), + host: iface.addressInfo.hostId, + }) + } else { + await this.api.serverBindingSetAddressEnabled({ + internalPort: 80, + address: addressJson, + enabled, + }) + } + + if (enabled) { + const kind = addr.hostnameInfo.metadata.kind + if (kind === 'public-domain' && addr.hostnameInfo.port !== null) { + await this.domainHealth.checkPublicDomain( + addr.hostnameInfo.hostname, + this.gatewayId(), + addr.hostnameInfo.port, + ) + } else if (kind === 'private-domain') { + await this.domainHealth.checkPrivateDomain(this.gatewayId()) + } else if ( + kind === 'ipv4' && + addr.access === 'public' && + addr.hostnameInfo.port !== null + ) { + await this.domainHealth.checkPortForward( + this.gatewayId(), + addr.hostnameInfo.port, + ) + } + } + } catch (e: any) { + this.errorService.handleError(e) + } finally { + loader.unsubscribe() + this.toggling.set(false) + } } } diff --git a/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/plugin.component.ts b/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/plugin.component.ts new file mode 100644 index 000000000..bd60a9a59 --- /dev/null +++ b/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/plugin.component.ts @@ -0,0 +1,361 @@ +import { + ChangeDetectionStrategy, + Component, + inject, + input, + signal, +} from '@angular/core' +import { CopyService, DialogService, i18nPipe } from '@start9labs/shared' +import { T } from '@start9labs/start-sdk' +import { TUI_IS_MOBILE } from '@taiga-ui/cdk' +import { + TuiButton, + TuiDataList, + TuiDropdown, + TuiTextfield, +} from '@taiga-ui/core' +import { PolymorpheusComponent } from '@taiga-ui/polymorpheus' +import { PlaceholderComponent } from 'src/app/routes/portal/components/placeholder.component' +import { TableComponent } from 'src/app/routes/portal/components/table.component' +import { QRModal } from 'src/app/routes/portal/modals/qr.component' +import { ActionService } from 'src/app/services/action.service' +import { + MappedServiceInterface, + PluginAddress, + PluginAddressGroup, +} from '../interface.service' + +@Component({ + selector: 'section[pluginGroup]', + template: ` +
+ @if (pluginGroup().pluginPkgInfo; as pkgInfo) { + + } + {{ pluginGroup().pluginName }} + @if (pluginGroup().tableAction; as action) { + + } +
+ + @for (address of pluginGroup().addresses; track $index; let i = $index) { + + + + + + } @empty { + + + + } +
{{ address.hostnameInfo.ssl ? 'HTTPS' : 'HTTP' }} + {{ address.url }} + +
+ @if (address.hostnameInfo.metadata.kind === 'plugin') { + @if (address.hostnameInfo.metadata.removeAction) { + @if ( + pluginGroup().pluginActions[ + address.hostnameInfo.metadata.removeAction + ]; + as meta + ) { + + } + } + } + + + @if (address.hostnameInfo.metadata.kind === 'plugin') { + @if (address.hostnameInfo.metadata.overflowActions.length) { + + } + } + + + } + } +
+
+ + } + } + } + + + @if (address.hostnameInfo.metadata.kind === 'plugin') { + @for ( + actionId of address.hostnameInfo.metadata.overflowActions; + track actionId + ) { + @if (pluginGroup().pluginActions[actionId]; as meta) { + + } + } + } + + +
+
+ + {{ 'No addresses' | i18n }} + +
+ `, + styles: ` + .plugin-icon { + height: 1.25rem; + margin-right: 0.25rem; + border-radius: 100%; + } + + :host ::ng-deep { + th:first-child { + width: 5rem; + } + } + + .desktop { + display: flex; + white-space: nowrap; + } + + .url { + white-space: normal; + word-break: break-all; + display: -webkit-box; + -webkit-box-orient: vertical; + -webkit-line-clamp: 1; + overflow: hidden; + } + + .mobile { + display: none; + } + + :host-context(tui-root._mobile) { + .desktop { + display: none; + } + + .mobile { + display: block; + } + + tr { + grid-template-columns: 1fr auto; + } + + td { + width: auto !important; + align-content: center; + } + } + `, + host: { class: 'g-card' }, + imports: [ + TuiButton, + TuiDropdown, + TuiDataList, + TuiTextfield, + TableComponent, + PlaceholderComponent, + i18nPipe, + ], + changeDetection: ChangeDetectionStrategy.OnPush, +}) +export class PluginAddressesComponent { + private readonly isMobile = inject(TUI_IS_MOBILE) + private readonly dialog = inject(DialogService) + private readonly actionService = inject(ActionService) + readonly copyService = inject(CopyService) + readonly open = signal(false) + readonly overflowOpen = signal(null) + + readonly pluginGroup = input.required() + readonly packageId = input('') + readonly value = input() + + showQR(url: string) { + this.dialog + .openComponent(new PolymorpheusComponent(QRModal), { + size: 'auto', + closeable: this.isMobile, + data: url, + }) + .subscribe() + } + + runTableAction() { + const group = this.pluginGroup() + if (!group.tableAction || !group.pluginPkgInfo) return + + const iface = this.value() + if (!iface) return + + const { addressInfo } = iface + + this.actionService.present({ + pkgInfo: group.pluginPkgInfo, + actionInfo: group.tableAction, + prefill: { + urlPluginMetadata: { + packageId: this.packageId() || null, + hostId: addressInfo.hostId, + interfaceId: iface.id, + internalPort: addressInfo.internalPort, + }, + }, + }) + } + + runRowAction( + actionId: string, + metadata: T.ActionMetadata, + address: PluginAddress, + ) { + const group = this.pluginGroup() + if (!group.pluginPkgInfo) return + + const iface = this.value() + if (!iface) return + + const { hostnameInfo } = address + const { addressInfo } = iface + const hostMeta = hostnameInfo.metadata + + if (hostMeta.kind !== 'plugin') return + + this.actionService.present({ + pkgInfo: group.pluginPkgInfo, + actionInfo: { id: actionId, metadata }, + prefill: { + urlPluginMetadata: { + packageId: this.packageId() || null, + hostId: addressInfo.hostId, + interfaceId: iface.id, + internalPort: addressInfo.internalPort, + hostname: hostnameInfo.hostname, + port: hostnameInfo.port, + ssl: hostnameInfo.ssl, + public: hostnameInfo.public, + info: hostMeta.info, + }, + }, + }) + } +} diff --git a/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/port-forward.component.ts b/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/port-forward.component.ts new file mode 100644 index 000000000..906da0f67 --- /dev/null +++ b/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/port-forward.component.ts @@ -0,0 +1,187 @@ +import { + ChangeDetectionStrategy, + Component, + computed, + inject, + signal, +} from '@angular/core' +import { ErrorService, i18nPipe } from '@start9labs/shared' +import { T } from '@start9labs/start-sdk' +import { TuiButton, TuiDialogContext } from '@taiga-ui/core' +import { TuiButtonLoading } from '@taiga-ui/kit' +import { injectContext, PolymorpheusComponent } from '@taiga-ui/polymorpheus' +import { PortCheckIconComponent } from 'src/app/routes/portal/components/port-check-icon.component' +import { PortCheckWarningsComponent } from 'src/app/routes/portal/components/port-check-warnings.component' +import { TableComponent } from 'src/app/routes/portal/components/table.component' +import { ApiService } from 'src/app/services/api/embassy-api.service' +import { DnsGateway } from './dns.component' + +export type PortForwardValidationData = { + gateway: DnsGateway + port: number + initialResults?: { portResult: T.CheckPortRes | null } +} + +@Component({ + selector: 'port-forward-validation', + template: ` + @let gatewayName = + context.data.gateway.name || context.data.gateway.ipInfo.name; + +

{{ 'Port Forwarding' | i18n }}

+

+ {{ 'In your gateway' | i18n }} "{{ gatewayName }}", + {{ 'create this port forwarding rule' | i18n }} +

+ + @let portRes = portResult(); + + + + + + + + +
+ + {{ context.data.port }}{{ context.data.port }} + +
+ + + + @if (!isManualMode) { +
+ + +
+ } + `, + styles: ` + h2 { + margin: 2rem 0 0 0; + } + + p { + margin-top: 0.5rem; + } + + tui-icon { + font-size: 1.3rem; + vertical-align: text-bottom; + } + + .status { + width: 3.2rem; + } + + .padding-top { + padding-top: 2rem; + } + + td:last-child { + text-align: end; + } + + footer { + margin-top: 1.5rem; + } + + :host-context(tui-root._mobile) table { + thead { + display: table-header-group !important; + } + + tr { + display: table-row !important; + box-shadow: none !important; + } + + td, + th { + padding: 0.5rem 0.5rem !important; + font: var(--tui-font-text-s) !important; + color: var(--tui-text-primary) !important; + font-weight: normal !important; + } + + th { + font-weight: bold !important; + } + } + `, + changeDetection: ChangeDetectionStrategy.OnPush, + imports: [ + TuiButton, + i18nPipe, + TableComponent, + TuiButtonLoading, + PortCheckIconComponent, + PortCheckWarningsComponent, + ], +}) +export class PortForwardValidationComponent { + private readonly errorService = inject(ErrorService) + private readonly api = inject(ApiService) + + readonly context = + injectContext>() + + readonly loading = signal(false) + readonly portResult = signal(undefined) + + readonly portOk = computed(() => { + const result = this.portResult() + return ( + !!result?.openInternally && + !!result?.openExternally && + !!result?.hairpinning + ) + }) + + readonly isManualMode = !this.context.data.initialResults + + constructor() { + const initial = this.context.data.initialResults + if (initial) { + if (initial.portResult) this.portResult.set(initial.portResult) + } + } + + async testPort() { + this.loading.set(true) + + try { + const result = await this.api.checkPort({ + gateway: this.context.data.gateway.id, + port: this.context.data.port, + }) + + this.portResult.set(result) + } catch (e: any) { + this.errorService.handleError(e) + } finally { + this.loading.set(false) + } + } +} + +export const PORT_FORWARD_VALIDATION = new PolymorpheusComponent( + PortForwardValidationComponent, +) diff --git a/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/private-dns.component.ts b/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/private-dns.component.ts new file mode 100644 index 000000000..dd246d442 --- /dev/null +++ b/web/projects/ui/src/app/routes/portal/components/interfaces/addresses/private-dns.component.ts @@ -0,0 +1,180 @@ +import { + ChangeDetectionStrategy, + Component, + inject, + signal, +} from '@angular/core' +import { ErrorService, i18nPipe } from '@start9labs/shared' +import { TuiButton, TuiDialogContext, TuiIcon, TuiLoader } from '@taiga-ui/core' +import { TuiButtonLoading } from '@taiga-ui/kit' +import { injectContext, PolymorpheusComponent } from '@taiga-ui/polymorpheus' +import { TableComponent } from 'src/app/routes/portal/components/table.component' +import { ApiService } from 'src/app/services/api/embassy-api.service' +import { DnsGateway } from './dns.component' + +export type PrivateDnsValidationData = { + gateway: DnsGateway + initialResults?: { configured: boolean } +} + +@Component({ + selector: 'private-dns-validation', + template: ` + @let gatewayName = + context.data.gateway.name || context.data.gateway.ipInfo.name; + @let internalIp = context.data.gateway.ipInfo.lanIp[0] || ('Error' | i18n); + +

{{ 'DNS Server Config' | i18n }}

+

+ {{ 'Gateway' | i18n }} "{{ gatewayName }}" + {{ 'must be configured to use' | i18n }} + {{ internalIp }} + ({{ 'the LAN IP address of this server' | i18n }}) + {{ 'as its DNS server' | i18n }}. +

+ + + + + + + + +
+ @if (loading()) { + + } @else if (pass() === true) { + + } @else if (pass() === false) { + + } @else { + + } + {{ gatewayName }}{{ internalIp }} + +
+ + @if (!isManualMode) { +
+ + +
+ } + `, + styles: ` + h2 { + margin: 2rem 0 0 0; + } + + p { + margin-top: 0.5rem; + } + + tui-icon { + font-size: 1rem; + vertical-align: text-bottom; + } + + .status { + width: 3.2rem; + } + + .padding-top { + padding-top: 2rem; + } + + td:last-child { + text-align: end; + } + + footer { + margin-top: 1.5rem; + } + + :host-context(tui-root._mobile) table { + thead { + display: table-header-group !important; + } + + tr { + display: table-row !important; + box-shadow: none !important; + } + + td, + th { + padding: 0.5rem 0.5rem !important; + font: var(--tui-font-text-s) !important; + color: var(--tui-text-primary) !important; + font-weight: normal !important; + } + + th { + font-weight: bold !important; + } + } + `, + changeDetection: ChangeDetectionStrategy.OnPush, + imports: [ + TuiButton, + i18nPipe, + TableComponent, + TuiButtonLoading, + TuiIcon, + TuiLoader, + ], +}) +export class PrivateDnsValidationComponent { + private readonly errorService = inject(ErrorService) + private readonly api = inject(ApiService) + + readonly context = + injectContext>() + + readonly loading = signal(false) + readonly pass = signal(undefined) + + readonly isManualMode = !this.context.data.initialResults + + constructor() { + const initial = this.context.data.initialResults + if (initial) { + this.pass.set(initial.configured) + } + } + + async testDns() { + this.loading.set(true) + + try { + const result = await this.api.checkDns({ + gateway: this.context.data.gateway.id, + }) + + this.pass.set(result) + } catch (e: any) { + this.errorService.handleError(e) + } finally { + this.loading.set(false) + } + } +} + +export const PRIVATE_DNS_VALIDATION = new PolymorpheusComponent( + PrivateDnsValidationComponent, +) diff --git a/web/projects/ui/src/app/routes/portal/components/interfaces/gateways.component.ts b/web/projects/ui/src/app/routes/portal/components/interfaces/gateways.component.ts deleted file mode 100644 index 077fa87bd..000000000 --- a/web/projects/ui/src/app/routes/portal/components/interfaces/gateways.component.ts +++ /dev/null @@ -1,114 +0,0 @@ -import { CommonModule } from '@angular/common' -import { - ChangeDetectionStrategy, - Component, - input, - inject, -} from '@angular/core' -import { TuiIcon, TuiTitle } from '@taiga-ui/core' -import { TuiSkeleton, TuiSwitch, TuiTooltip } from '@taiga-ui/kit' -import { FormsModule } from '@angular/forms' -import { i18nPipe, LoadingService, ErrorService } from '@start9labs/shared' -import { TuiCell } from '@taiga-ui/layout' -import { InterfaceGateway } from './interface.service' -import { ApiService } from 'src/app/services/api/embassy-api.service' -import { InterfaceComponent } from './interface.component' - -@Component({ - selector: 'section[gateways]', - template: ` -
{{ 'Gateways' | i18n }}
- @for (gateway of gateways(); track $index) { - - } @empty { - @for (_ of [0, 1]; track $index) { - - } - } - `, - styles: ` - :host { - grid-column: span 3; - } - - [tuiCell]:has([tuiTooltip]) { - background: none !important; - } - - :host-context(tui-root:not(._mobile)) { - &:has(+ section table) header { - background: transparent; - } - } - `, - host: { class: 'g-card' }, - changeDetection: ChangeDetectionStrategy.OnPush, - imports: [ - CommonModule, - FormsModule, - TuiSwitch, - i18nPipe, - TuiCell, - TuiTitle, - TuiSkeleton, - TuiIcon, - TuiTooltip, - ], -}) -export class InterfaceGatewaysComponent { - private readonly loader = inject(LoadingService) - private readonly errorService = inject(ErrorService) - private readonly api = inject(ApiService) - readonly interface = inject(InterfaceComponent) - - readonly gateways = input.required() - - async onToggle(gateway: InterfaceGateway) { - const addressInfo = this.interface.value()!.addressInfo - const pkgId = this.interface.packageId() - - const loader = this.loader.open().subscribe() - - try { - if (pkgId) { - await this.api.pkgBindingToggleGateway({ - gateway: gateway.id, - enabled: !gateway.enabled, - internalPort: addressInfo.internalPort, - host: addressInfo.hostId, - package: pkgId, - }) - } else { - await this.api.serverBindingToggleGateway({ - gateway: gateway.id, - enabled: !gateway.enabled, - internalPort: 80, - }) - } - } catch (e: any) { - this.errorService.handleError(e) - } finally { - loader.unsubscribe() - } - } -} diff --git a/web/projects/ui/src/app/routes/portal/components/interfaces/interface.component.ts b/web/projects/ui/src/app/routes/portal/components/interfaces/interface.component.ts index a1deaf339..8e50d45ac 100644 --- a/web/projects/ui/src/app/routes/portal/components/interfaces/interface.component.ts +++ b/web/projects/ui/src/app/routes/portal/components/interfaces/interface.component.ts @@ -1,26 +1,27 @@ import { ChangeDetectionStrategy, Component, input } from '@angular/core' import { tuiButtonOptionsProvider } from '@taiga-ui/core' import { MappedServiceInterface } from './interface.service' -import { InterfaceGatewaysComponent } from './gateways.component' -import { InterfaceTorDomainsComponent } from './tor-domains.component' -import { PublicDomainsComponent } from './public-domains/pd.component' -import { InterfacePrivateDomainsComponent } from './private-domains.component' import { InterfaceAddressesComponent } from './addresses/addresses.component' +import { PluginAddressesComponent } from './addresses/plugin.component' @Component({ selector: 'service-interface', template: ` -
-
+ @for (group of value()?.gatewayGroups; track group.gatewayId) {
-
-
-
-
-
+ } + @for (group of value()?.pluginGroups; track group.pluginId) { +
+ } `, styles: ` :host { @@ -30,33 +31,16 @@ import { InterfaceAddressesComponent } from './addresses/addresses.component' color: var(--tui-text-secondary); font: var(--tui-font-text-l); - div { - display: grid; - grid-template-columns: repeat(10, 1fr); - gap: inherit; - flex-direction: column; - } - ::ng-deep [tuiSkeleton] { width: 100%; height: 1rem; border-radius: var(--tui-radius-s); } } - - :host-context(tui-root._mobile) div { - display: flex; - } `, changeDetection: ChangeDetectionStrategy.OnPush, providers: [tuiButtonOptionsProvider({ size: 'xs' })], - imports: [ - InterfaceGatewaysComponent, - InterfaceTorDomainsComponent, - PublicDomainsComponent, - InterfacePrivateDomainsComponent, - InterfaceAddressesComponent, - ], + imports: [InterfaceAddressesComponent, PluginAddressesComponent], }) export class InterfaceComponent { readonly packageId = input('') diff --git a/web/projects/ui/src/app/routes/portal/components/interfaces/interface.service.ts b/web/projects/ui/src/app/routes/portal/components/interfaces/interface.service.ts index 65e3055bb..4b36e3a28 100644 --- a/web/projects/ui/src/app/routes/portal/components/interfaces/interface.service.ts +++ b/web/projects/ui/src/app/routes/portal/components/interfaces/interface.service.ts @@ -2,113 +2,91 @@ import { inject, Injectable } from '@angular/core' import { T, utils } from '@start9labs/start-sdk' import { ConfigService } from 'src/app/services/config.service' import { GatewayPlus } from 'src/app/services/gateway.service' -import { PublicDomain } from './public-domains/pd.service' -import { i18nKey, i18nPipe } from '@start9labs/shared' +import { + PrimaryStatus, + renderPkgStatus, +} from 'src/app/services/pkg-status-rendering.service' +import { toAuthorityName } from 'src/app/utils/acme' +import { getManifest } from 'src/app/utils/get-package-data' -type AddressWithInfo = { - url: string - info: T.HostnameInfo - gateway?: GatewayPlus - showSsl: boolean - masked: boolean - ui: boolean +function isPublicIp(h: T.HostnameInfo): boolean { + return h.public && (h.metadata.kind === 'ipv4' || h.metadata.kind === 'ipv6') } -function cmpWithRankedPredicates( - a: T, - b: T, - preds: ((x: T) => boolean)[], -): -1 | 0 | 1 { - for (const pred of preds) { - for (let [x, y, sign] of [[a, b, 1] as const, [b, a, -1] as const]) { - if (pred(y) && !pred(x)) return sign - } - } - return 0 +export function isLanIp(h: T.HostnameInfo): boolean { + return !h.public && (h.metadata.kind === 'ipv4' || h.metadata.kind === 'ipv6') } -type TorAddress = AddressWithInfo & { info: { kind: 'onion' } } -function filterTor(a: AddressWithInfo): a is TorAddress { - return a.info.kind === 'onion' -} -function cmpTor(a: TorAddress, b: TorAddress): -1 | 0 | 1 { - return cmpWithRankedPredicates(a, b, [x => !x.showSsl]) -} - -type LanAddress = AddressWithInfo & { info: { kind: 'ip'; public: false } } -function filterLan(a: AddressWithInfo): a is LanAddress { - return a.info.kind === 'ip' && !a.info.public -} -function cmpLan(host: T.Host, a: LanAddress, b: LanAddress): -1 | 0 | 1 { - return cmpWithRankedPredicates(a, b, [ - x => - x.info.hostname.kind === 'domain' && - !!host.privateDomains.find(d => d === x.info.hostname.value), // private domain - x => x.info.hostname.kind === 'local', // .local - x => x.info.hostname.kind === 'ipv4', // ipv4 - x => x.info.hostname.kind === 'ipv6', // ipv6 - // remainder: public domains accessible privately - ]) -} - -type VpnAddress = AddressWithInfo & { - info: { - kind: 'ip' - public: false - hostname: { kind: 'ipv4' | 'ipv6' | 'domain' } +function isEnabled(addr: T.DerivedAddressInfo, h: T.HostnameInfo): boolean { + if (isPublicIp(h)) { + if (h.port === null) return true + const sa = + h.metadata.kind === 'ipv6' + ? `[${h.hostname}]:${h.port}` + : `${h.hostname}:${h.port}` + return addr.enabled.includes(sa) + } else { + return !addr.disabled.some( + ([hostname, port]) => hostname === h.hostname && port === (h.port ?? 0), + ) } } -function filterVpn(a: AddressWithInfo): a is VpnAddress { - return ( - a.info.kind === 'ip' && !a.info.public && a.info.hostname.kind !== 'local' - ) -} -function cmpVpn(host: T.Host, a: VpnAddress, b: VpnAddress): -1 | 0 | 1 { - return cmpWithRankedPredicates(a, b, [ - x => - x.info.hostname.kind === 'domain' && - !!host.privateDomains.find(d => d === x.info.hostname.value), // private domain - x => x.info.hostname.kind === 'ipv4', // ipv4 - x => x.info.hostname.kind === 'ipv6', // ipv6 - // remainder: public domains accessible privately - ]) -} -type ClearnetAddress = AddressWithInfo & { - info: { - kind: 'ip' - public: true - hostname: { kind: 'ipv4' | 'ipv6' | 'domain' } +function getGatewayIds(h: T.HostnameInfo): string[] { + switch (h.metadata.kind) { + case 'ipv4': + case 'ipv6': + case 'public-domain': + return [h.metadata.gateway] + case 'mdns': + case 'private-domain': + return h.metadata.gateways + case 'plugin': + return [] } } -function filterClearnet(a: AddressWithInfo): a is ClearnetAddress { - return a.info.kind === 'ip' && a.info.public -} -function cmpClearnet( + +function getCertificate( + h: T.HostnameInfo, host: T.Host, - a: ClearnetAddress, - b: ClearnetAddress, -): -1 | 0 | 1 { - return cmpWithRankedPredicates(a, b, [ - x => - x.info.hostname.kind === 'domain' && - x.info.gateway.id === host.publicDomains[x.info.hostname.value]?.gateway, // public domain for this gateway - x => x.gateway?.public ?? false, // public gateway - x => x.info.hostname.kind === 'ipv4', // ipv4 - x => x.info.hostname.kind === 'ipv6', // ipv6 - // remainder: private domains / domains public on other gateways - ]) + addSsl: T.AddSslOptions | null, + secure: T.Security | null, +): string { + if (!h.ssl) return '-' + + if (h.metadata.kind === 'public-domain') { + const config = host.publicDomains[h.hostname] + return config ? toAuthorityName(config.acme) : toAuthorityName(null) + } + + if (addSsl) return toAuthorityName(null) + if (secure?.ssl) return 'Self signed' + + return '-' } -export function getPublicDomains( - publicDomains: Record, - gateways: GatewayPlus[], -): PublicDomain[] { - return Object.entries(publicDomains).map(([fqdn, info]) => ({ - fqdn, - acme: info.acme, - gateway: gateways.find(g => g.id === info.gateway) || null, - })) +function sortDomainsFirst(a: GatewayAddress, b: GatewayAddress): number { + const isDomain = (addr: GatewayAddress) => + addr.hostnameInfo.metadata.kind === 'public-domain' || + (addr.hostnameInfo.metadata.kind === 'private-domain' && + !addr.hostnameInfo.hostname.endsWith('.local')) + return Number(isDomain(b)) - Number(isDomain(a)) +} + +function getAddressType(h: T.HostnameInfo): string { + switch (h.metadata.kind) { + case 'ipv4': + return 'IPv4' + case 'ipv6': + return 'IPv6' + case 'public-domain': + case 'private-domain': + return h.hostname + case 'mdns': + return 'mDNS' + case 'plugin': + return 'Plugin' + } } @Injectable({ @@ -116,95 +94,144 @@ export function getPublicDomains( }) export class InterfaceService { private readonly config = inject(ConfigService) - private readonly i18n = inject(i18nPipe) - getAddresses( + getGatewayGroups( serviceInterface: T.ServiceInterface, host: T.Host, gateways: GatewayPlus[], - ): MappedServiceInterface['addresses'] { - const hostnamesInfos = this.hostnameInfo(serviceInterface, host) - - const addresses = { - common: [], - uncommon: [], - } - - if (!hostnamesInfos.length) return addresses + ): GatewayAddressGroup[] { + const binding = host.bindings[serviceInterface.addressInfo.internalPort] + if (!binding) return [] + const addr = binding.addresses const masked = serviceInterface.masked const ui = serviceInterface.type === 'ui' + const { addSsl, secure } = binding.options - const allAddressesWithInfo: AddressWithInfo[] = hostnamesInfos.flatMap( - h => { - const { url, sslUrl } = utils.addressHostToUrl( - serviceInterface.addressInfo, - h, - ) - const info = h - const gateway = - h.kind === 'ip' - ? gateways.find(g => h.gateway.id === g.id) - : undefined - const res = [] - if (url) { - res.push({ - url, - info, - gateway, - showSsl: false, - masked, - ui, - }) - } - if (sslUrl) { - res.push({ - url: sslUrl, - info, - gateway, - showSsl: !!url, - masked, - ui, - }) - } - return res - }, - ) + const groupMap = new Map() + const gatewayMap = new Map() - const torAddrs = allAddressesWithInfo.filter(filterTor).sort(cmpTor) - const lanAddrs = allAddressesWithInfo - .filter(filterLan) - .sort((a, b) => cmpLan(host, a, b)) - const vpnAddrs = allAddressesWithInfo - .filter(filterVpn) - .sort((a, b) => cmpVpn(host, a, b)) - const clearnetAddrs = allAddressesWithInfo - .filter(filterClearnet) - .sort((a, b) => cmpClearnet(host, a, b)) - - let bestAddrs = [ - (clearnetAddrs[0]?.gateway?.public || - clearnetAddrs[0]?.info.hostname.kind === 'domain') && - clearnetAddrs[0], - lanAddrs[0], - vpnAddrs[0], - torAddrs[0], - ] - .filter(a => !!a) - .reduce((acc, x) => { - if (!acc.includes(x)) acc.push(x) - return acc - }, [] as AddressWithInfo[]) - - return { - common: bestAddrs.map(a => this.toDisplayAddress(a, host.publicDomains)), - uncommon: allAddressesWithInfo - .filter(a => !bestAddrs.includes(a)) - .map(a => this.toDisplayAddress(a, host.publicDomains)), + for (const gateway of gateways) { + groupMap.set(gateway.id, []) + gatewayMap.set(gateway.id, gateway) } + + for (const h of addr.available) { + const gatewayIds = getGatewayIds(h) + for (const gid of gatewayIds) { + const list = groupMap.get(gid) + if (!list) continue + list.push({ + enabled: isEnabled(addr, h), + type: getAddressType(h), + access: h.public ? 'public' : 'private', + url: utils.addressHostToUrl(serviceInterface.addressInfo, h), + hostnameInfo: h, + masked, + ui, + deletable: + h.metadata.kind === 'private-domain' || + h.metadata.kind === 'public-domain', + certificate: getCertificate(h, host, addSsl, secure), + }) + } + } + + return gateways + .filter(g => (groupMap.get(g.id)?.length ?? 0) > 0) + .map(g => { + const addresses = groupMap.get(g.id)!.sort(sortDomainsFirst) + + // Derive mDNS enabled state from LAN IPs on this gateway + const lanIps = addresses.filter(a => isLanIp(a.hostnameInfo)) + for (const a of addresses) { + if (a.hostnameInfo.metadata.kind === 'mdns') { + a.enabled = lanIps.some(ip => ip.enabled) + } + } + + return { + gatewayId: g.id, + gatewayName: g.name, + addresses, + } + }) + } + + getPluginGroups( + serviceInterface: T.ServiceInterface, + host: T.Host, + allPackageData?: Record, + ): PluginAddressGroup[] { + const binding = host.bindings[serviceInterface.addressInfo.internalPort] + if (!binding) return [] + + const addr = binding.addresses + const masked = serviceInterface.masked + const groupMap = new Map() + + for (const h of addr.available) { + if (h.metadata.kind !== 'plugin') continue + + const url = utils.addressHostToUrl(serviceInterface.addressInfo, h) + const pluginId = h.metadata.packageId + + if (!groupMap.has(pluginId)) { + groupMap.set(pluginId, []) + } + + groupMap.get(pluginId)!.push({ + url, + hostnameInfo: h, + masked, + }) + } + + // Also include URL plugins that have no addresses yet + if (allPackageData) { + for (const [pkgId, pkg] of Object.entries(allPackageData)) { + if (pkg.plugin?.url && !groupMap.has(pkgId)) { + groupMap.set(pkgId, []) + } + } + } + + return Array.from(groupMap.entries()).map(([pluginId, addresses]) => { + const pluginPkg = allPackageData?.[pluginId] + const pluginActions = pluginPkg?.actions ?? {} + const tableActionId = pluginPkg?.plugin?.url?.tableAction ?? null + const tableActionMeta = tableActionId + ? pluginActions[tableActionId] + : undefined + const tableAction = + tableActionId && tableActionMeta + ? { id: tableActionId, metadata: tableActionMeta } + : null + + let pluginPkgInfo: PluginPkgInfo | null = null + if (pluginPkg) { + const manifest = getManifest(pluginPkg) + pluginPkgInfo = { + id: manifest.id, + title: manifest.title, + icon: pluginPkg.icon, + status: renderPkgStatus(pluginPkg).primary, + } + } + + return { + pluginId, + pluginName: + pluginPkgInfo?.title ?? + pluginId.charAt(0).toUpperCase() + pluginId.slice(1), + addresses, + tableAction, + pluginPkgInfo, + pluginActions, + } + }) } - /** ${scheme}://${username}@${host}:${externalPort}${suffix} */ launchableAddress(ui: T.ServiceInterface, host: T.Host): string { const addresses = utils.filledAddress(host, ui.addressInfo) @@ -214,9 +241,8 @@ export class InterfaceService { kind: 'domain', visibility: 'public', }) - const tor = addresses.filter({ kind: 'onion' }) const wanIp = addresses.filter({ kind: 'ipv4', visibility: 'public' }) - const bestPublic = [publicDomains, tor, wanIp].flatMap(h => + const bestPublic = [publicDomains, wanIp].flatMap(h => h.format('urlstring'), )[0] const privateDomains = addresses.filter({ @@ -235,7 +261,7 @@ export class InterfaceService { matching = addresses.nonLocal .filter({ kind: 'ipv4', - predicate: h => h.hostname.value === this.config.hostname, + predicate: h => h.hostname === this.config.hostname, }) .format('urlstring')[0] onLan = true @@ -244,7 +270,7 @@ export class InterfaceService { matching = addresses.nonLocal .filter({ kind: 'ipv6', - predicate: h => h.hostname.value === this.config.hostname, + predicate: h => h.hostname === this.config.hostname, }) .format('urlstring')[0] break @@ -254,9 +280,6 @@ export class InterfaceService { .format('urlstring')[0] onLan = true break - case 'tor': - matching = tor.format('urlstring')[0] - break case 'mdns': matching = mdns.format('urlstring')[0] onLan = true @@ -268,237 +291,50 @@ export class InterfaceService { if (bestPublic) return bestPublic return '' } +} - private hostnameInfo( - serviceInterface: T.ServiceInterface, - host: T.Host, - ): T.HostnameInfo[] { - let hostnameInfo = - host.hostnameInfo[serviceInterface.addressInfo.internalPort] - return ( - hostnameInfo?.filter( - h => - this.config.accessType === 'localhost' || - !( - h.kind === 'ip' && - ((h.hostname.kind === 'ipv6' && - utils.IPV6_LINK_LOCAL.contains(h.hostname.value)) || - h.gateway.id === 'lo') - ), - ) || [] - ) - } +export type GatewayAddress = { + enabled: boolean + type: string + access: 'public' | 'private' + url: string + hostnameInfo: T.HostnameInfo + masked: boolean + ui: boolean + deletable: boolean + certificate: string +} - private toDisplayAddress( - { info, url, gateway, showSsl, masked, ui }: AddressWithInfo, - publicDomains: Record, - ): DisplayAddress { - let access: DisplayAddress['access'] - let gatewayName: DisplayAddress['gatewayName'] - let type: DisplayAddress['type'] - let bullets: any[] +export type GatewayAddressGroup = { + gatewayId: string + gatewayName: string + addresses: GatewayAddress[] +} - const rootCaRequired = this.i18n.transform( - "Requires trusting your server's Root CA", - ) +export type PluginAddress = { + url: string + hostnameInfo: T.HostnameInfo + masked: boolean +} - // ** Tor ** - if (info.kind === 'onion') { - access = null - gatewayName = null - type = 'Tor' - bullets = [ - this.i18n.transform('Connections can be slow or unreliable at times'), - this.i18n.transform( - 'Public if you share the address publicly, otherwise private', - ), - this.i18n.transform('Requires using a Tor-enabled device or browser'), - ] - // Tor (SSL) - if (showSsl) { - bullets = [rootCaRequired, ...bullets] - // Tor (NON-SSL) - } else { - bullets.unshift( - this.i18n.transform( - 'Ideal for anonymous, censorship-resistant hosting and remote access', - ), - ) - } - // ** Not Tor ** - } else { - const port = info.hostname.sslPort || info.hostname.port - gatewayName = info.gateway.name +export type PluginPkgInfo = { + id: string + title: string + icon: string + status: PrimaryStatus +} - const gatewayLanIpv4 = gateway?.lanIpv4[0] - const isWireguard = gateway?.ipInfo.deviceType === 'wireguard' - - const localIdeal = this.i18n.transform('Ideal for local access') - const lanRequired = this.i18n.transform( - 'Requires being connected to the same Local Area Network (LAN) as your server, either physically or via VPN', - ) - const staticRequired = `${this.i18n.transform('Requires setting a static IP address for')} ${gatewayLanIpv4} ${this.i18n.transform('in your gateway')}` - const vpnAccess = this.i18n.transform('Ideal for VPN access via') - const routerWireguard = this.i18n.transform( - "your router's Wireguard server", - ) - const portForwarding = this.i18n.transform( - 'Requires port forwarding in gateway', - ) - const dnsFor = this.i18n.transform('Requires a DNS record for') - const resolvesTo = this.i18n.transform('that resolves to') - - // * Local * - if (info.hostname.kind === 'local') { - type = this.i18n.transform('Local') - access = 'private' - bullets = [ - localIdeal, - this.i18n.transform( - 'Not recommended for VPN access. VPNs do not support ".local" domains without advanced configuration', - ), - lanRequired, - rootCaRequired, - ] - // * IPv4 * - } else if (info.hostname.kind === 'ipv4') { - type = 'IPv4' - if (info.public) { - access = 'public' - bullets = [ - this.i18n.transform('Can be used for clearnet access'), - this.i18n.transform( - 'Not recommended in most cases. Using a public domain is more common and preferred', - ), - rootCaRequired, - ] - if (!info.gateway.public) { - bullets.push( - `${portForwarding} "${gatewayName}": ${port} -> ${port}`, - ) - } - } else { - access = 'private' - if (isWireguard) { - bullets = [`${vpnAccess} StartTunnel`, rootCaRequired] - } else { - bullets = [ - localIdeal, - `${vpnAccess} ${routerWireguard}`, - lanRequired, - rootCaRequired, - staticRequired, - ] - } - } - // * IPv6 * - } else if (info.hostname.kind === 'ipv6') { - type = 'IPv6' - access = 'private' - bullets = [ - this.i18n.transform('Can be used for local access'), - lanRequired, - rootCaRequired, - ] - // * Domain * - } else { - type = this.i18n.transform('Domain') - if (info.public) { - access = 'public' - bullets = [ - `${dnsFor} ${info.hostname.value} ${resolvesTo} ${gateway?.ipInfo.wanIp}`, - ] - - if (!info.gateway.public) { - bullets.push( - `${portForwarding} "${gatewayName}": ${port} -> ${port === 443 ? 5443 : port}`, - ) - } - - if (publicDomains[info.hostname.value]?.acme) { - bullets.unshift( - this.i18n.transform('Ideal for public access via the Internet'), - ) - } else { - bullets = [ - this.i18n.transform( - 'Can be used for personal access via the public Internet, but a VPN is more private and secure', - ), - this.i18n.transform( - `Not good for public access, since the certificate is signed by your Server's Root CA`, - ), - rootCaRequired, - ...bullets, - ] - } - } else { - access = 'private' - const ipPortBad = this.i18n.transform( - 'when using IP addresses and ports is undesirable', - ) - const customDnsRequired = `${dnsFor} ${info.hostname.value} ${resolvesTo} ${gatewayLanIpv4}` - if (isWireguard) { - bullets = [ - `${vpnAccess} StartTunnel ${ipPortBad}`, - customDnsRequired, - rootCaRequired, - ] - } else { - bullets = [ - `${localIdeal} ${ipPortBad}`, - `${vpnAccess} ${routerWireguard} ${ipPortBad}`, - customDnsRequired, - rootCaRequired, - lanRequired, - staticRequired, - ] - } - } - } - } - - if (showSsl) { - type = `${type} (SSL)` - - bullets.unshift( - this.i18n.transform('Should only needed for apps that enforce SSL'), - ) - } - - return { - url, - access, - gatewayName, - type, - bullets, - masked, - ui, - } - } +export type PluginAddressGroup = { + pluginId: string + pluginName: string + addresses: PluginAddress[] + tableAction: { id: string; metadata: T.ActionMetadata } | null + pluginPkgInfo: PluginPkgInfo | null + pluginActions: Record } export type MappedServiceInterface = T.ServiceInterface & { - gateways: InterfaceGateway[] - torDomains: string[] - publicDomains: PublicDomain[] - privateDomains: string[] - addresses: { - common: DisplayAddress[] - uncommon: DisplayAddress[] - } + gatewayGroups: GatewayAddressGroup[] + pluginGroups: PluginAddressGroup[] addSsl: boolean } - -export type InterfaceGateway = GatewayPlus & { - enabled: boolean -} - -export type DisplayAddress = { - type: string - access: 'public' | 'private' | null - gatewayName: string | null - url: string - bullets: i18nKey[] - masked: boolean - ui: boolean -} diff --git a/web/projects/ui/src/app/routes/portal/components/interfaces/private-domains.component.ts b/web/projects/ui/src/app/routes/portal/components/interfaces/private-domains.component.ts deleted file mode 100644 index 40b78406e..000000000 --- a/web/projects/ui/src/app/routes/portal/components/interfaces/private-domains.component.ts +++ /dev/null @@ -1,184 +0,0 @@ -import { - ChangeDetectionStrategy, - Component, - inject, - input, -} from '@angular/core' -import { - DialogService, - DocsLinkDirective, - ErrorService, - i18nPipe, - LoadingService, -} from '@start9labs/shared' -import { ISB, utils } from '@start9labs/start-sdk' -import { TuiButton, TuiTitle } from '@taiga-ui/core' -import { TuiSkeleton } from '@taiga-ui/kit' -import { TuiCell } from '@taiga-ui/layout' -import { filter } from 'rxjs' -import { - FormComponent, - FormContext, -} from 'src/app/routes/portal/components/form.component' -import { PlaceholderComponent } from 'src/app/routes/portal/components/placeholder.component' -import { ApiService } from 'src/app/services/api/embassy-api.service' -import { FormDialogService } from 'src/app/services/form-dialog.service' -import { configBuilderToSpec } from 'src/app/utils/configBuilderToSpec' -import { InterfaceComponent } from './interface.component' - -@Component({ - selector: 'section[privateDomains]', - template: ` -
- {{ 'Private Domains' | i18n }} - - {{ 'Documentation' | i18n }} - - -
- @for (domain of privateDomains(); track domain) { -
- {{ domain }} - -
- } @empty { - @if (privateDomains()) { - - {{ 'No private domains' | i18n }} - - } @else { - @for (_ of [0, 1]; track $index) { - - } - } - } - `, - styles: ` - :host { - grid-column: span 4; - overflow-wrap: break-word; - } - `, - host: { class: 'g-card' }, - imports: [ - TuiCell, - TuiTitle, - TuiButton, - PlaceholderComponent, - i18nPipe, - DocsLinkDirective, - TuiSkeleton, - ], - changeDetection: ChangeDetectionStrategy.OnPush, -}) -export class InterfacePrivateDomainsComponent { - private readonly dialog = inject(DialogService) - private readonly formDialog = inject(FormDialogService) - private readonly loader = inject(LoadingService) - private readonly errorService = inject(ErrorService) - private readonly api = inject(ApiService) - private readonly interface = inject(InterfaceComponent) - private readonly i18n = inject(i18nPipe) - - readonly privateDomains = input.required() - - async add() { - this.formDialog.open>(FormComponent, { - label: 'New private domain', - data: { - spec: await configBuilderToSpec( - ISB.InputSpec.of({ - fqdn: ISB.Value.text({ - name: this.i18n.transform('Domain'), - description: this.i18n.transform( - 'Enter a fully qualified domain name. Since the domain is for private use, it can be any domain you want, even one you do not control.', - ), - required: true, - default: null, - patterns: [utils.Patterns.domain], - }), - }), - ), - buttons: [ - { - text: this.i18n.transform('Save')!, - handler: async value => this.save(value.fqdn), - }, - ], - }, - }) - } - - async remove(fqdn: string) { - this.dialog - .openConfirm({ label: 'Are you sure?', size: 's' }) - .pipe(filter(Boolean)) - .subscribe(async () => { - const loader = this.loader.open('Removing').subscribe() - - try { - if (this.interface.packageId()) { - await this.api.pkgRemovePrivateDomain({ - fqdn, - package: this.interface.packageId(), - host: this.interface.value()?.addressInfo.hostId || '', - }) - } else { - await this.api.osUiRemovePrivateDomain({ fqdn }) - } - return true - } catch (e: any) { - this.errorService.handleError(e) - return false - } finally { - loader.unsubscribe() - } - }) - } - - private async save(fqdn: string): Promise { - const loader = this.loader.open('Saving').subscribe() - - try { - if (this.interface.packageId) { - await this.api.pkgAddPrivateDomain({ - fqdn, - package: this.interface.packageId(), - host: this.interface.value()?.addressInfo.hostId || '', - }) - } else { - await this.api.osUiAddPrivateDomain({ fqdn }) - } - return true - } catch (e: any) { - this.errorService.handleError(e) - return false - } finally { - loader.unsubscribe() - } - } -} diff --git a/web/projects/ui/src/app/routes/portal/components/interfaces/public-domains/dns.component.ts b/web/projects/ui/src/app/routes/portal/components/interfaces/public-domains/dns.component.ts deleted file mode 100644 index e11135b07..000000000 --- a/web/projects/ui/src/app/routes/portal/components/interfaces/public-domains/dns.component.ts +++ /dev/null @@ -1,167 +0,0 @@ -import { - ChangeDetectionStrategy, - Component, - computed, - inject, - signal, -} from '@angular/core' -import { FormsModule } from '@angular/forms' -import { ErrorService, i18nPipe } from '@start9labs/shared' -import { TuiButton, TuiDialogContext, TuiIcon } from '@taiga-ui/core' -import { - TuiButtonLoading, - TuiSwitch, - tuiSwitchOptionsProvider, -} from '@taiga-ui/kit' -import { injectContext, PolymorpheusComponent } from '@taiga-ui/polymorpheus' -import { TableComponent } from 'src/app/routes/portal/components/table.component' -import { ApiService } from 'src/app/services/api/embassy-api.service' -import { parse } from 'tldts' -import { GatewayWithId } from './pd.service' - -@Component({ - selector: 'dns', - template: ` -

{{ context.data.message }}

- - @let wanIp = context.data.gateway.ipInfo.wanIp || ('Error' | i18n); - - @if (context.data.gateway.ipInfo.deviceType !== 'wireguard') { - - } - - - @for (row of rows(); track $index) { - - - - - - - } -
- @if (pass() === true) { - - } @else if (pass() === false) { - - } - {{ ddns ? 'ALIAS' : 'A' }} - {{ row.host }}{{ ddns ? '[DDNS Address]' : wanIp }}{{ row.purpose }}
- -
- -
- `, - styles: ` - label { - display: flex; - gap: 0.75rem; - align-items: center; - margin: 1rem 0; - } - - tui-icon { - font-size: 1rem; - vertical-align: text-bottom; - } - `, - providers: [ - tuiSwitchOptionsProvider({ - appearance: () => 'primary', - icon: () => '', - }), - ], - changeDetection: ChangeDetectionStrategy.OnPush, - imports: [ - TuiButton, - i18nPipe, - TableComponent, - TuiSwitch, - FormsModule, - TuiButtonLoading, - TuiIcon, - ], -}) -export class DnsComponent { - private readonly errorService = inject(ErrorService) - private readonly api = inject(ApiService) - private readonly i18n = inject(i18nPipe) - - readonly ddns = false - - readonly context = - injectContext< - TuiDialogContext< - void, - { fqdn: string; gateway: GatewayWithId; message: string } - > - >() - - readonly loading = signal(false) - readonly pass = signal(undefined) - - readonly rows = computed<{ host: string; purpose: string }[]>(() => { - const { domain, subdomain } = parse(this.context.data.fqdn) - - if (!subdomain) { - return [ - { - host: '@', - purpose: domain!, - }, - ] - } - - const segments = subdomain.split('.').slice(1) - - const subdomains = this.i18n.transform('all subdomains of') - - return [ - { - host: subdomain, - purpose: `only ${subdomain}`, - }, - ...segments.map((_, i) => { - const parent = segments.slice(i).join('.') - return { - host: `*.${parent}`, - purpose: `${subdomains} ${parent}`, - } - }), - { - host: '*', - purpose: `${subdomains} ${domain}`, - }, - ] - }) - - async testDns() { - this.pass.set(undefined) - this.loading.set(true) - - try { - const ip = await this.api.queryDns({ - fqdn: this.context.data.fqdn, - }) - - this.pass.set(ip === this.context.data.gateway.ipInfo.wanIp) - } catch (e: any) { - this.errorService.handleError(e) - } finally { - this.loading.set(false) - } - } -} - -export const DNS = new PolymorpheusComponent(DnsComponent) diff --git a/web/projects/ui/src/app/routes/portal/components/interfaces/public-domains/pd.component.ts b/web/projects/ui/src/app/routes/portal/components/interfaces/public-domains/pd.component.ts deleted file mode 100644 index aa9d47a65..000000000 --- a/web/projects/ui/src/app/routes/portal/components/interfaces/public-domains/pd.component.ts +++ /dev/null @@ -1,85 +0,0 @@ -import { - ChangeDetectionStrategy, - Component, - inject, - input, -} from '@angular/core' -import { DocsLinkDirective, i18nPipe } from '@start9labs/shared' -import { TuiButton } from '@taiga-ui/core' -import { TuiSkeleton } from '@taiga-ui/kit' -import { PlaceholderComponent } from 'src/app/routes/portal/components/placeholder.component' -import { TableComponent } from 'src/app/routes/portal/components/table.component' -import { PublicDomainsItemComponent } from './pd.item.component' -import { PublicDomain, PublicDomainService } from './pd.service' - -@Component({ - selector: 'section[publicDomains]', - template: ` -
- {{ 'Public Domains' | i18n }} - - {{ 'Documentation' | i18n }} - - @if (service.data()) { - - } -
- @if (publicDomains()?.length === 0) { - - {{ 'No public domains' | i18n }} - - } @else { - - @for (domain of publicDomains(); track $index) { - - } @empty { - @for (_ of [0]; track $index) { - - - - } - } -
-
{{ 'Loading' | i18n }}
-
- } - `, - styles: ` - :host { - grid-column: span 7; - } - `, - host: { class: 'g-card' }, - providers: [PublicDomainService], - imports: [ - TuiButton, - TableComponent, - PlaceholderComponent, - i18nPipe, - DocsLinkDirective, - PublicDomainsItemComponent, - TuiSkeleton, - ], - changeDetection: ChangeDetectionStrategy.OnPush, -}) -export class PublicDomainsComponent { - readonly service = inject(PublicDomainService) - - readonly publicDomains = input.required() - - readonly addSsl = input.required() -} diff --git a/web/projects/ui/src/app/routes/portal/components/interfaces/public-domains/pd.item.component.ts b/web/projects/ui/src/app/routes/portal/components/interfaces/public-domains/pd.item.component.ts deleted file mode 100644 index 2fc6b9d04..000000000 --- a/web/projects/ui/src/app/routes/portal/components/interfaces/public-domains/pd.item.component.ts +++ /dev/null @@ -1,119 +0,0 @@ -import { - ChangeDetectionStrategy, - Component, - computed, - inject, - input, -} from '@angular/core' -import { i18nPipe, i18nKey } from '@start9labs/shared' -import { - TuiButton, - TuiDataList, - TuiDropdown, - TuiTextfield, -} from '@taiga-ui/core' -import { PublicDomain, PublicDomainService } from './pd.service' -import { toAuthorityName } from 'src/app/utils/acme' - -@Component({ - selector: 'tr[publicDomain]', - template: ` - {{ publicDomain().fqdn }} - {{ publicDomain().gateway?.name }} - {{ authority() }} - - - - - - - - - - - `, - styles: ` - :host { - grid-template-columns: min-content 1fr min-content; - } - - td:nth-child(2) { - order: -1; - grid-column: span 2; - } - - td:last-child { - grid-area: 1 / 3 / 3; - align-self: center; - text-align: right; - } - - :host-context(tui-root._mobile) { - .authority { - grid-column: span 2; - } - tui-badge { - vertical-align: bottom; - margin-inline-start: 0.25rem; - } - } - `, - changeDetection: ChangeDetectionStrategy.OnPush, - imports: [TuiButton, TuiDataList, TuiDropdown, i18nPipe, TuiTextfield], -}) -export class PublicDomainsItemComponent { - protected readonly service = inject(PublicDomainService) - - open = false - - readonly publicDomain = input.required() - readonly addSsl = input.required() - - readonly authority = computed(() => - toAuthorityName(this.publicDomain().acme, this.addSsl()), - ) - readonly dnsMessage = computed( - () => - `Create one of the DNS records below to cause ${this.publicDomain().fqdn} to resolve to ${this.publicDomain().gateway?.ipInfo.wanIp}` as i18nKey, - ) -} diff --git a/web/projects/ui/src/app/routes/portal/components/interfaces/public-domains/pd.service.ts b/web/projects/ui/src/app/routes/portal/components/interfaces/public-domains/pd.service.ts deleted file mode 100644 index 2991c8bf8..000000000 --- a/web/projects/ui/src/app/routes/portal/components/interfaces/public-domains/pd.service.ts +++ /dev/null @@ -1,277 +0,0 @@ -import { inject, Injectable } from '@angular/core' -import { - DialogService, - ErrorService, - i18nKey, - LoadingService, - i18nPipe, -} from '@start9labs/shared' -import { toSignal } from '@angular/core/rxjs-interop' -import { ISB, T, utils } from '@start9labs/start-sdk' -import { filter, map } from 'rxjs' -import { FormComponent } from 'src/app/routes/portal/components/form.component' -import { ApiService } from 'src/app/services/api/embassy-api.service' -import { FormDialogService } from 'src/app/services/form-dialog.service' -import { configBuilderToSpec } from 'src/app/utils/configBuilderToSpec' -import { PatchDB } from 'patch-db-client' -import { DataModel } from 'src/app/services/patch-db/data-model' -import { toAuthorityName } from 'src/app/utils/acme' -import { InterfaceComponent } from '../interface.component' -import { DNS } from './dns.component' - -export type PublicDomain = { - fqdn: string - gateway: GatewayWithId | null - acme: string | null -} - -export type GatewayWithId = T.NetworkInterfaceInfo & { - id: string - ipInfo: T.IpInfo -} - -@Injectable() -export class PublicDomainService { - private readonly patch = inject>(PatchDB) - private readonly loader = inject(LoadingService) - private readonly errorService = inject(ErrorService) - private readonly api = inject(ApiService) - private readonly formDialog = inject(FormDialogService) - private readonly dialog = inject(DialogService) - private readonly interface = inject(InterfaceComponent) - private readonly i18n = inject(i18nPipe) - - readonly data = toSignal( - this.patch.watch$('serverInfo', 'network').pipe( - map(({ gateways, acme }) => ({ - gateways: Object.entries(gateways) - .filter(([_, g]) => g.ipInfo) - .map(([id, g]) => ({ id, ...g })) as GatewayWithId[], - authorities: Object.keys(acme).reduce>( - (obj, url) => ({ - ...obj, - [url]: toAuthorityName(url), - }), - { local: toAuthorityName(null) }, - ), - })), - ), - ) - - async add(addSsl: boolean) { - const addSpec = ISB.InputSpec.of({ - fqdn: ISB.Value.text({ - name: this.i18n.transform('Domain'), - description: this.i18n.transform( - 'Enter a fully qualified domain name. For example, if you control domain.com, you could enter domain.com or subdomain.domain.com or another.subdomain.domain.com.', - ), - required: true, - default: null, - patterns: [utils.Patterns.domain], - }).map(f => f.toLocaleLowerCase()), - ...this.gatewaySpec(), - ...(addSsl - ? this.authoritySpec() - : ({} as ReturnType)), - }) - - this.formDialog.open(FormComponent, { - label: 'Add public domain', - data: { - spec: await configBuilderToSpec(addSpec), - buttons: [ - { - text: 'Save', - handler: (input: typeof addSpec._TYPE) => - this.save(input.fqdn, input.gateway, input.authority), - }, - ], - }, - }) - } - - async edit(domain: PublicDomain, addSsl: boolean) { - const editSpec = ISB.InputSpec.of({ - ...this.gatewaySpec(), - ...(addSsl - ? this.authoritySpec() - : ({} as ReturnType)), - }) - - this.formDialog.open(FormComponent, { - label: 'Edit public domain', - data: { - spec: await configBuilderToSpec(editSpec), - buttons: [ - { - text: 'Save', - handler: ({ gateway, authority }: typeof editSpec._TYPE) => - this.save(domain.fqdn, gateway, authority), - }, - ], - value: { - gateway: domain.gateway!.id, - authority: domain.acme, - }, - }, - }) - } - - remove(fqdn: string) { - this.dialog - .openConfirm({ label: 'Are you sure?', size: 's' }) - .pipe(filter(Boolean)) - .subscribe(async () => { - const loader = this.loader.open('Deleting').subscribe() - - try { - if (this.interface.packageId()) { - await this.api.pkgRemovePublicDomain({ - fqdn, - package: this.interface.packageId(), - host: this.interface.value()?.addressInfo.hostId || '', - }) - } else { - await this.api.osUiRemovePublicDomain({ fqdn }) - } - return true - } catch (e: any) { - this.errorService.handleError(e) - return false - } finally { - loader.unsubscribe() - } - }) - } - - showDns(fqdn: string, gateway: GatewayWithId, message: i18nKey) { - this.dialog - .openComponent(DNS, { - label: 'DNS Records', - size: 'l', - data: { - fqdn, - gateway, - message, - }, - }) - .subscribe() - } - - private async save( - fqdn: string, - gatewayId: string, - authority?: 'local' | string, - ) { - const gateway = this.data()!.gateways.find(g => g.id === gatewayId)! - - const loader = this.loader.open('Saving').subscribe() - const params = { - fqdn, - gateway: gatewayId, - acme: !authority || authority === 'local' ? null : authority, - } - try { - let ip: string | null - if (this.interface.packageId()) { - ip = await this.api.pkgAddPublicDomain({ - ...params, - package: this.interface.packageId(), - host: this.interface.value()?.addressInfo.hostId || '', - }) - } else { - ip = await this.api.osUiAddPublicDomain(params) - } - - const wanIp = gateway.ipInfo.wanIp - - let message = this.i18n.transform( - 'Create one of the DNS records below.', - ) as i18nKey - - if (!ip) { - setTimeout( - () => - this.showDns( - fqdn, - gateway, - `${this.i18n.transform('No DNS record detected for')} ${fqdn}. ${message}` as i18nKey, - ), - 250, - ) - } else if (ip !== wanIp) { - setTimeout( - () => - this.showDns( - fqdn, - gateway, - `${this.i18n.transform('Invalid DNS record')}. ${fqdn} ${this.i18n.transform('resolves to')} ${ip}. ${message}` as i18nKey, - ), - 250, - ) - } else { - setTimeout( - () => - this.dialog - .openAlert( - `${fqdn} ${this.i18n.transform('resolves to')} ${wanIp}` as i18nKey, - { label: 'DNS record detected!', appearance: 'positive' }, - ) - .subscribe(), - 250, - ) - } - - return true - } catch (e: any) { - this.errorService.handleError(e) - return false - } finally { - loader.unsubscribe() - } - } - - private gatewaySpec() { - const data = this.data()! - - const gateways = data.gateways.filter( - ({ ipInfo: { deviceType } }) => - deviceType !== 'loopback' && deviceType !== 'bridge', - ) - - return { - gateway: ISB.Value.dynamicSelect(() => ({ - name: this.i18n.transform('Gateway'), - description: this.i18n.transform( - 'Select a gateway to use for this domain.', - ), - values: gateways.reduce>( - (obj, gateway) => ({ - [gateway.id]: gateway.name || gateway.ipInfo.name, - ...obj, - }), - { '~/system/gateways': this.i18n.transform('New gateway') }, - ), - default: '', - disabled: gateways - .filter(g => !g.ipInfo.wanIp || utils.CGNAT.contains(g.ipInfo.wanIp)) - .map(g => g.id), - })), - } - } - - private authoritySpec() { - const data = this.data()! - - return { - authority: ISB.Value.select({ - name: this.i18n.transform('Certificate Authority'), - description: this.i18n.transform( - 'Select a Certificate Authority to issue SSL/TLS certificates for this domain', - ), - values: data.authorities, - default: '', - }), - } - } -} diff --git a/web/projects/ui/src/app/routes/portal/components/interfaces/tor-domains.component.ts b/web/projects/ui/src/app/routes/portal/components/interfaces/tor-domains.component.ts deleted file mode 100644 index 001bcd71a..000000000 --- a/web/projects/ui/src/app/routes/portal/components/interfaces/tor-domains.component.ts +++ /dev/null @@ -1,193 +0,0 @@ -import { - ChangeDetectionStrategy, - Component, - inject, - input, -} from '@angular/core' -import { - DialogService, - DocsLinkDirective, - ErrorService, - i18nPipe, - LoadingService, -} from '@start9labs/shared' -import { ISB, utils } from '@start9labs/start-sdk' -import { TuiButton, TuiTitle } from '@taiga-ui/core' -import { TuiSkeleton } from '@taiga-ui/kit' -import { TuiCell } from '@taiga-ui/layout' -import { filter } from 'rxjs' -import { - FormComponent, - FormContext, -} from 'src/app/routes/portal/components/form.component' -import { PlaceholderComponent } from 'src/app/routes/portal/components/placeholder.component' -import { ApiService } from 'src/app/services/api/embassy-api.service' -import { FormDialogService } from 'src/app/services/form-dialog.service' -import { configBuilderToSpec } from 'src/app/utils/configBuilderToSpec' - -import { InterfaceComponent } from './interface.component' - -type OnionForm = { - key: string -} - -@Component({ - selector: 'section[torDomains]', - template: ` -
- {{ 'Tor Domains' | i18n }} - - {{ 'Documentation' | i18n }} - - -
- @for (domain of torDomains(); track domain) { -
- {{ domain }} - -
- } @empty { - @if (torDomains()) { - - {{ 'No Tor domains' | i18n }} - - } @else { - @for (_ of [0, 1]; track $index) { - - } - } - } - `, - styles: ` - :host { - grid-column: span 6; - overflow-wrap: break-word; - } - `, - host: { class: 'g-card' }, - imports: [ - TuiCell, - TuiTitle, - TuiButton, - PlaceholderComponent, - i18nPipe, - DocsLinkDirective, - TuiSkeleton, - ], - changeDetection: ChangeDetectionStrategy.OnPush, -}) -export class InterfaceTorDomainsComponent { - private readonly dialog = inject(DialogService) - private readonly formDialog = inject(FormDialogService) - private readonly loader = inject(LoadingService) - private readonly errorService = inject(ErrorService) - private readonly api = inject(ApiService) - private readonly interface = inject(InterfaceComponent) - private readonly i18n = inject(i18nPipe) - - readonly torDomains = input.required() - - async remove(onion: string) { - this.dialog - .openConfirm({ label: 'Are you sure?', size: 's' }) - .pipe(filter(Boolean)) - .subscribe(async () => { - const loader = this.loader.open('Removing').subscribe() - const params = { onion } - - try { - if (this.interface.packageId()) { - await this.api.pkgRemoveOnion({ - ...params, - package: this.interface.packageId(), - host: this.interface.value()?.addressInfo.hostId || '', - }) - } else { - await this.api.serverRemoveOnion(params) - } - return true - } catch (e: any) { - this.errorService.handleError(e) - return false - } finally { - loader.unsubscribe() - } - }) - } - - async add() { - this.formDialog.open>(FormComponent, { - label: 'New Tor domain', - data: { - spec: await configBuilderToSpec( - ISB.InputSpec.of({ - key: ISB.Value.text({ - name: this.i18n.transform('Private Key (optional)')!, - description: this.i18n.transform( - 'Optionally provide a base64-encoded ed25519 private key for generating the Tor V3 (.onion) domain. If not provided, a random key will be generated.', - ), - required: false, - default: null, - patterns: [utils.Patterns.base64], - }), - }), - ), - buttons: [ - { - text: this.i18n.transform('Save')!, - handler: async value => this.save(value.key), - }, - ], - }, - }) - } - - private async save(key?: string): Promise { - const loader = this.loader.open('Saving').subscribe() - - try { - const onion = key - ? await this.api.addTorKey({ key }) - : await this.api.generateTorKey({}) - - if (this.interface.packageId()) { - await this.api.pkgAddOnion({ - onion, - package: this.interface.packageId(), - host: this.interface.value()?.addressInfo.hostId || '', - }) - } else { - await this.api.serverAddOnion({ onion }) - } - return true - } catch (e: any) { - this.errorService.handleError(e) - return false - } finally { - loader.unsubscribe() - } - } -} diff --git a/web/projects/ui/src/app/routes/portal/components/logs/logs-download.directive.ts b/web/projects/ui/src/app/routes/portal/components/logs/logs-download.directive.ts index 997f1283b..addabce0f 100644 --- a/web/projects/ui/src/app/routes/portal/components/logs/logs-download.directive.ts +++ b/web/projects/ui/src/app/routes/portal/components/logs/logs-download.directive.ts @@ -3,10 +3,9 @@ import { convertAnsi, DownloadHTMLService, ErrorService, - FetchLogsReq, - FetchLogsRes, LoadingService, } from '@start9labs/shared' +import { T } from '@start9labs/start-sdk' import { LogsComponent } from './logs.component' @Directive({ @@ -19,7 +18,7 @@ export class LogsDownloadDirective { private readonly downloadHtml = inject(DownloadHTMLService) @Input({ required: true }) - logsDownload!: (params: FetchLogsReq) => Promise + logsDownload!: (params: T.LogsParams) => Promise @HostListener('click') async download() { diff --git a/web/projects/ui/src/app/routes/portal/components/logs/logs-fetch.directive.ts b/web/projects/ui/src/app/routes/portal/components/logs/logs-fetch.directive.ts index 04a7c718a..99fd09f86 100644 --- a/web/projects/ui/src/app/routes/portal/components/logs/logs-fetch.directive.ts +++ b/web/projects/ui/src/app/routes/portal/components/logs/logs-fetch.directive.ts @@ -18,7 +18,7 @@ export class LogsFetchDirective { switchMap(() => from( this.component.fetchLogs({ - cursor: this.component.startCursor, + cursor: this.component.startCursor ?? undefined, before: true, limit: 400, }), diff --git a/web/projects/ui/src/app/routes/portal/components/logs/logs.component.ts b/web/projects/ui/src/app/routes/portal/components/logs/logs.component.ts index ee700cbda..4cde692b2 100644 --- a/web/projects/ui/src/app/routes/portal/components/logs/logs.component.ts +++ b/web/projects/ui/src/app/routes/portal/components/logs/logs.component.ts @@ -5,11 +5,12 @@ import { WaIntersectionObserver, } from '@ng-web-apis/intersection-observer' import { WaMutationObserver } from '@ng-web-apis/mutation-observer' -import { FetchLogsReq, FetchLogsRes, i18nPipe } from '@start9labs/shared' +import { i18nPipe } from '@start9labs/shared' +import { T } from '@start9labs/start-sdk' import { TuiButton, TuiLoader, TuiScrollbar } from '@taiga-ui/core' import { NgDompurifyPipe } from '@taiga-ui/dompurify' import { BehaviorSubject } from 'rxjs' -import { RR } from 'src/app/services/api/api.types' +import { FollowServerLogsReq } from 'src/app/services/api/api.types' import { LogsDownloadDirective } from './logs-download.directive' import { LogsFetchDirective } from './logs-fetch.directive' import { LogsPipe } from './logs.pipe' @@ -41,17 +42,17 @@ export class LogsComponent { private readonly scrollbar?: ElementRef @Input({ required: true }) followLogs!: ( - params: RR.FollowServerLogsReq, - ) => Promise + params: FollowServerLogsReq, + ) => Promise @Input({ required: true }) fetchLogs!: ( - params: FetchLogsReq, - ) => Promise + params: T.LogsParams, + ) => Promise @Input({ required: true }) context!: string scrollTop = 0 - startCursor?: string + startCursor?: string | null scroll = true loading = false previous: readonly string[] = [] diff --git a/web/projects/ui/src/app/routes/portal/components/logs/logs.pipe.ts b/web/projects/ui/src/app/routes/portal/components/logs/logs.pipe.ts index b8c3ad8da..233579826 100644 --- a/web/projects/ui/src/app/routes/portal/components/logs/logs.pipe.ts +++ b/web/projects/ui/src/app/routes/portal/components/logs/logs.pipe.ts @@ -1,10 +1,6 @@ import { inject, Pipe, PipeTransform } from '@angular/core' -import { - convertAnsi, - i18nPipe, - Log, - toLocalIsoString, -} from '@start9labs/shared' +import { convertAnsi, i18nPipe, toLocalIsoString } from '@start9labs/shared' +import { T } from '@start9labs/start-sdk' import { bufferTime, catchError, @@ -25,7 +21,7 @@ import { take, tap, } from 'rxjs' -import { RR } from 'src/app/services/api/api.types' +import { FollowServerLogsReq } from 'src/app/services/api/api.types' import { ApiService } from 'src/app/services/api/embassy-api.service' import { ConnectionService } from 'src/app/services/connection.service' import { LogsComponent } from './logs.component' @@ -40,9 +36,7 @@ export class LogsPipe implements PipeTransform { private readonly i18n = inject(i18nPipe) transform( - followLogs: ( - params: RR.FollowServerLogsReq, - ) => Promise, + followLogs: (params: FollowServerLogsReq) => Promise, ): Observable { return merge( this.logs.status$.pipe( @@ -53,7 +47,7 @@ export class LogsPipe implements PipeTransform { defer(() => followLogs(this.options)).pipe( tap(r => this.logs.setCursor(r.startCursor)), switchMap(r => - this.api.openWebsocket$(r.guid, { + this.api.openWebsocket$(r.guid, { openObserver: { next: () => this.logs.status$.next('connected'), }, diff --git a/web/projects/ui/src/app/routes/portal/components/port-check-icon.component.ts b/web/projects/ui/src/app/routes/portal/components/port-check-icon.component.ts new file mode 100644 index 000000000..016581c9c --- /dev/null +++ b/web/projects/ui/src/app/routes/portal/components/port-check-icon.component.ts @@ -0,0 +1,37 @@ +import { ChangeDetectionStrategy, Component, input } from '@angular/core' +import { T } from '@start9labs/start-sdk' +import { TuiIcon, TuiLoader } from '@taiga-ui/core' + +@Component({ + selector: 'port-check-icon', + template: ` + @if (loading()) { + + } @else { + @let res = result(); + @if (res) { + @if (!res.openInternally) { + + } @else if (!res.openExternally) { + + } @else { + + } + } @else { + + } + } + `, + styles: ` + tui-icon { + font-size: 1.3rem; + vertical-align: text-bottom; + } + `, + changeDetection: ChangeDetectionStrategy.OnPush, + imports: [TuiIcon, TuiLoader], +}) +export class PortCheckIconComponent { + readonly result = input() + readonly loading = input(false) +} diff --git a/web/projects/ui/src/app/routes/portal/components/port-check-warnings.component.ts b/web/projects/ui/src/app/routes/portal/components/port-check-warnings.component.ts new file mode 100644 index 000000000..1c70b319c --- /dev/null +++ b/web/projects/ui/src/app/routes/portal/components/port-check-warnings.component.ts @@ -0,0 +1,38 @@ +import { ChangeDetectionStrategy, Component, input } from '@angular/core' +import { i18nPipe } from '@start9labs/shared' +import { T } from '@start9labs/start-sdk' + +@Component({ + selector: 'port-check-warnings', + template: ` + @let res = result(); + @if (res) { + @if (!res.openInternally) { +

+ {{ + 'Port status cannot be determined while service is not running' + | i18n + }} +

+ } + @if (res.openExternally && !res.hairpinning) { +

+ {{ + 'This address will not work from your local network due to a router hairpinning limitation' + | i18n + }} +

+ } + } + `, + styles: ` + p { + margin-top: 0.5rem; + } + `, + changeDetection: ChangeDetectionStrategy.OnPush, + imports: [i18nPipe], +}) +export class PortCheckWarningsComponent { + readonly result = input() +} diff --git a/web/projects/ui/src/app/routes/portal/portal.component.ts b/web/projects/ui/src/app/routes/portal/portal.component.ts index e0ad431ec..2f1aabd34 100644 --- a/web/projects/ui/src/app/routes/portal/portal.component.ts +++ b/web/projects/ui/src/app/routes/portal/portal.component.ts @@ -101,7 +101,7 @@ export class PortalComponent { private readonly patch = inject>(PatchDB) private readonly api = inject(ApiService) - readonly name = toSignal(this.patch.watch$('ui', 'name')) + readonly name = toSignal(this.patch.watch$('serverInfo', 'name')) readonly update = toSignal(inject(OSService).updating$) readonly bar = signal(true) diff --git a/web/projects/ui/src/app/routes/portal/routes/backups/components/targets.component.ts b/web/projects/ui/src/app/routes/portal/routes/backups/components/targets.component.ts index 33423f4bd..75fe0b257 100644 --- a/web/projects/ui/src/app/routes/portal/routes/backups/components/targets.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/backups/components/targets.component.ts @@ -15,7 +15,7 @@ import { } from '@taiga-ui/core' import { TuiConfirmData, TUI_CONFIRM, TuiSkeleton } from '@taiga-ui/kit' import { filter, map, Subject, switchMap } from 'rxjs' -import { BackupTarget } from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' import { GetBackupIconPipe } from '../pipes/get-backup-icon.pipe' @Component({ @@ -140,7 +140,7 @@ export class BackupsTargetsComponent { readonly delete$ = new Subject() @Input() - backupsTargets: Record | null = null + backupsTargets: Record | null = null @Output() readonly update = new EventEmitter() diff --git a/web/projects/ui/src/app/routes/portal/routes/backups/modals/edit.component.ts b/web/projects/ui/src/app/routes/portal/routes/backups/modals/edit.component.ts index fc55acf35..ef86c11da 100644 --- a/web/projects/ui/src/app/routes/portal/routes/backups/modals/edit.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/backups/modals/edit.component.ts @@ -11,7 +11,8 @@ import { import { TuiBadge, TuiSwitch } from '@taiga-ui/kit' import { injectContext, PolymorpheusComponent } from '@taiga-ui/polymorpheus' import { from, map } from 'rxjs' -import { BackupJob, BackupTarget } from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' +import { BackupJob } from 'src/app/services/api/api.types' import { ApiService } from 'src/app/services/api/embassy-api.service' import { ToHumanCronPipe } from '../pipes/to-human-cron.pipe' import { BackupJobBuilder } from '../utils/job-builder' @@ -149,7 +150,7 @@ export class BackupsEditModal { selectTarget() { this.dialogs - .open(TARGET, TARGET_CREATE) + .open(TARGET, TARGET_CREATE) .subscribe(({ id }) => { this.job.targetId = id }) diff --git a/web/projects/ui/src/app/routes/portal/routes/backups/modals/jobs.component.ts b/web/projects/ui/src/app/routes/portal/routes/backups/modals/jobs.component.ts index 72c67e8d9..970745044 100644 --- a/web/projects/ui/src/app/routes/portal/routes/backups/modals/jobs.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/backups/modals/jobs.component.ts @@ -26,7 +26,9 @@ import { DocsLinkDirective } from 'projects/shared/src/public-api' Scheduling automatic backups is an excellent way to ensure your StartOS data is safely backed up. StartOS will issue a notification whenever one of your scheduled backups succeeds or fails. - View instructions + + View instructions +

Saved Jobs diff --git a/web/projects/ui/src/app/routes/portal/routes/backups/modals/recover.component.ts b/web/projects/ui/src/app/routes/portal/routes/backups/modals/recover.component.ts index 96e237d44..11beba750 100644 --- a/web/projects/ui/src/app/routes/portal/routes/backups/modals/recover.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/backups/modals/recover.component.ts @@ -8,7 +8,7 @@ import { TuiBlock, TuiCheckbox } from '@taiga-ui/kit' import { injectContext, PolymorpheusComponent } from '@taiga-ui/polymorpheus' import { PatchDB } from 'patch-db-client' import { take } from 'rxjs' -import { PackageBackupInfo } from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' import { ApiService } from 'src/app/services/api/embassy-api.service' import { DataModel } from 'src/app/services/patch-db/data-model' import { ToOptionsPipe } from '../pipes/to-options.pipe' @@ -98,7 +98,7 @@ export class BackupsRecoverModal { } } - get backups(): Record { + get backups(): Record { return this.context.data.backupInfo.packageBackups } @@ -110,13 +110,12 @@ export class BackupsRecoverModal { const ids = options.filter(({ checked }) => !!checked).map(({ id }) => id) const loader = this.loader.open('Initializing').subscribe() - const { targetId, serverId, password } = this.context.data + const { targetId, password } = this.context.data try { await this.api.restorePackages({ ids, targetId, - serverId, password, }) diff --git a/web/projects/ui/src/app/routes/portal/routes/backups/modals/target.component.ts b/web/projects/ui/src/app/routes/portal/routes/backups/modals/target.component.ts index f39d9b51d..b6db19285 100644 --- a/web/projects/ui/src/app/routes/portal/routes/backups/modals/target.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/backups/modals/target.component.ts @@ -6,7 +6,7 @@ import { signal, } from '@angular/core' import { ErrorService, Exver } from '@start9labs/shared' -import { Version } from '@start9labs/start-sdk' +import { T, Version } from '@start9labs/start-sdk' import { TuiButton, TuiDialogContext, @@ -19,7 +19,6 @@ import { import { TuiCell } from '@taiga-ui/layout' import { injectContext, PolymorpheusComponent } from '@taiga-ui/polymorpheus' import { PatchDB } from 'patch-db-client' -import { BackupTarget } from 'src/app/services/api/api.types' import { ApiService } from 'src/app/services/api/embassy-api.service' import { DataModel } from 'src/app/services/patch-db/data-model' import { getServerInfo } from 'src/app/utils/get-server-info' @@ -81,7 +80,7 @@ export class BackupsTargetModal { readonly context = injectContext< - TuiDialogContext + TuiDialogContext >() readonly loading = signal(true) @@ -91,7 +90,7 @@ export class BackupsTargetModal { : 'Loading Backup Sources' serverId = '' - targets: Record = {} + targets: Record = {} async ngOnInit() { try { @@ -104,14 +103,14 @@ export class BackupsTargetModal { } } - isDisabled(target: BackupTarget): boolean { + isDisabled(target: T.BackupTarget): boolean { return ( !target.mountable || (this.context.data.type === 'restore' && !this.hasBackup(target)) ) } - hasBackup(target: BackupTarget): boolean { + hasBackup(target: T.BackupTarget): boolean { return ( target.startOs?.[this.serverId] && Version.parse(target.startOs[this.serverId].version).compare( @@ -127,7 +126,7 @@ export class BackupsTargetModal { .subscribe() } - select(target: BackupTarget, id: string) { + select(target: T.BackupTarget, id: string) { this.context.completeWith({ ...target, id }) } } diff --git a/web/projects/ui/src/app/routes/portal/routes/backups/modals/targets.component.ts b/web/projects/ui/src/app/routes/portal/routes/backups/modals/targets.component.ts index 07f96b917..6bde58daa 100644 --- a/web/projects/ui/src/app/routes/portal/routes/backups/modals/targets.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/backups/modals/targets.component.ts @@ -3,8 +3,8 @@ import { ErrorService, LoadingService } from '@start9labs/shared' import { TuiButton, TuiLink, TuiNotification } from '@taiga-ui/core' import { PolymorpheusComponent } from '@taiga-ui/polymorpheus' import { FormComponent } from 'src/app/routes/portal/components/form.component' +import { T } from '@start9labs/start-sdk' import { - BackupTarget, BackupTargetType, RR, UnknownDisk, @@ -31,7 +31,9 @@ import { DocsLinkDirective } from 'projects/shared/src/public-api' backups. They can be physical drives plugged into your server, shared folders on your Local Area Network (LAN), or third party clouds such as Dropbox or Google Drive. - View instructions + + View instructions +

Unknown Physical Drives @@ -188,7 +190,7 @@ export class BackupsTargetsModal implements OnInit { } private async add( - type: BackupTargetType, + type: T.BackupTargetType, value: | RR.AddCifsBackupTargetReq | RR.AddCloudBackupTargetReq @@ -204,7 +206,7 @@ export class BackupsTargetsModal implements OnInit { } private async update( - type: BackupTargetType, + type: T.BackupTargetType, value: | RR.UpdateCifsBackupTargetReq | RR.UpdateCloudBackupTargetReq @@ -220,13 +222,13 @@ export class BackupsTargetsModal implements OnInit { } private setTargets( - saved: Record = this.targets()?.saved || {}, + saved: Record = this.targets()?.saved || {}, unknownDisks: UnknownDisk[] = this.targets()?.unknownDisks || [], ) { this.targets.set({ unknownDisks, saved }) } - private async getSpec(target: BackupTarget) { + private async getSpec(target: T.BackupTarget) { switch (target.type) { case 'cifs': return await configBuilderToSpec(cifsSpec) diff --git a/web/projects/ui/src/app/routes/portal/routes/backups/pipes/get-display-info.pipe.ts b/web/projects/ui/src/app/routes/portal/routes/backups/pipes/get-display-info.pipe.ts index c1f2ae345..83a718bc3 100644 --- a/web/projects/ui/src/app/routes/portal/routes/backups/pipes/get-display-info.pipe.ts +++ b/web/projects/ui/src/app/routes/portal/routes/backups/pipes/get-display-info.pipe.ts @@ -1,5 +1,5 @@ import { Pipe, PipeTransform } from '@angular/core' -import { BackupTarget } from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' import { DisplayInfo } from '../types/display-info' import { GetBackupIconPipe } from './get-backup-icon.pipe' @@ -9,7 +9,7 @@ import { GetBackupIconPipe } from './get-backup-icon.pipe' export class GetDisplayInfoPipe implements PipeTransform { readonly icon = new GetBackupIconPipe() - transform(target: BackupTarget): DisplayInfo { + transform(target: T.BackupTarget): DisplayInfo { const result = { name: target.name, path: `Path: ${target.path}`, diff --git a/web/projects/ui/src/app/routes/portal/routes/backups/pipes/has-error.pipe.ts b/web/projects/ui/src/app/routes/portal/routes/backups/pipes/has-error.pipe.ts index 32bc938e7..15cc3d516 100644 --- a/web/projects/ui/src/app/routes/portal/routes/backups/pipes/has-error.pipe.ts +++ b/web/projects/ui/src/app/routes/portal/routes/backups/pipes/has-error.pipe.ts @@ -1,11 +1,11 @@ import { Pipe, PipeTransform } from '@angular/core' -import { BackupReport } from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' @Pipe({ name: 'hasError', }) export class HasErrorPipe implements PipeTransform { - transform(report: BackupReport): boolean { + transform(report: T.BackupReport): boolean { return ( !!report.server.error || !!Object.values(report.packages).find(({ error }) => error) diff --git a/web/projects/ui/src/app/routes/portal/routes/backups/pipes/to-options.pipe.ts b/web/projects/ui/src/app/routes/portal/routes/backups/pipes/to-options.pipe.ts index b33e8b678..2d1796b74 100644 --- a/web/projects/ui/src/app/routes/portal/routes/backups/pipes/to-options.pipe.ts +++ b/web/projects/ui/src/app/routes/portal/routes/backups/pipes/to-options.pipe.ts @@ -1,10 +1,9 @@ import { inject, Pipe, PipeTransform } from '@angular/core' import { map, Observable } from 'rxjs' -import { PackageBackupInfo } from 'src/app/services/api/api.types' +import { T, Version } from '@start9labs/start-sdk' import { ConfigService } from 'src/app/services/config.service' import { PackageDataEntry } from 'src/app/services/patch-db/data-model' import { RecoverOption } from '../types/recover-option' -import { Version } from '@start9labs/start-sdk' @Pipe({ name: 'toOptions', @@ -14,7 +13,7 @@ export class ToOptionsPipe implements PipeTransform { transform( packageData$: Observable>, - packageBackups: Record = {}, + packageBackups: Record = {}, ): Observable { return packageData$.pipe( map(packageData => diff --git a/web/projects/ui/src/app/routes/portal/routes/backups/services/create.service.ts b/web/projects/ui/src/app/routes/portal/routes/backups/services/create.service.ts index 8561552e7..74a85dee7 100644 --- a/web/projects/ui/src/app/routes/portal/routes/backups/services/create.service.ts +++ b/web/projects/ui/src/app/routes/portal/routes/backups/services/create.service.ts @@ -2,7 +2,7 @@ import { inject, Injectable } from '@angular/core' import { LoadingService } from '@start9labs/shared' import { TuiDialogOptions, TuiDialogService } from '@taiga-ui/core' import { from, switchMap } from 'rxjs' -import { BackupTarget } from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' import { ApiService } from 'src/app/services/api/embassy-api.service' import { TARGET, TARGET_CREATE } from '../modals/target.component' import { BACKUP, BACKUP_OPTIONS } from '../modals/backup.component' @@ -17,7 +17,7 @@ export class BackupsCreateService { readonly handle = () => { this.dialogs - .open(TARGET, TARGET_CREATE) + .open(TARGET, TARGET_CREATE) .pipe( switchMap(({ id }) => this.dialogs diff --git a/web/projects/ui/src/app/routes/portal/routes/backups/services/restore.service.ts b/web/projects/ui/src/app/routes/portal/routes/backups/services/restore.service.ts index 8dfde95aa..4d1c7a4dc 100644 --- a/web/projects/ui/src/app/routes/portal/routes/backups/services/restore.service.ts +++ b/web/projects/ui/src/app/routes/portal/routes/backups/services/restore.service.ts @@ -22,7 +22,7 @@ import { PROMPT, PromptOptions, } from 'src/app/routes/portal/modals/prompt.component' -import { BackupTarget } from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' import { ApiService } from 'src/app/services/api/embassy-api.service' import { RECOVER } from '../modals/recover.component' import { SERVERS } from '../modals/servers.component' @@ -41,7 +41,7 @@ export class BackupsRestoreService { readonly handle = () => { this.dialogs - .open(TARGET, TARGET_RESTORE) + .open(TARGET, TARGET_RESTORE) .pipe( switchMap(target => this.dialogs diff --git a/web/projects/ui/src/app/routes/portal/routes/backups/types/recover-data.ts b/web/projects/ui/src/app/routes/portal/routes/backups/types/recover-data.ts index 71103a0c3..2063555bc 100644 --- a/web/projects/ui/src/app/routes/portal/routes/backups/types/recover-data.ts +++ b/web/projects/ui/src/app/routes/portal/routes/backups/types/recover-data.ts @@ -1,8 +1,8 @@ -import { BackupInfo } from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' export interface RecoverData { targetId: string serverId: string - backupInfo: BackupInfo + backupInfo: T.BackupInfo password: string } diff --git a/web/projects/ui/src/app/routes/portal/routes/backups/types/recover-option.ts b/web/projects/ui/src/app/routes/portal/routes/backups/types/recover-option.ts index 89ba8557b..75cb0fc29 100644 --- a/web/projects/ui/src/app/routes/portal/routes/backups/types/recover-option.ts +++ b/web/projects/ui/src/app/routes/portal/routes/backups/types/recover-option.ts @@ -1,6 +1,6 @@ -import { PackageBackupInfo } from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' -export interface RecoverOption extends PackageBackupInfo { +export interface RecoverOption extends T.PackageBackupInfo { id: string checked: boolean installed: boolean diff --git a/web/projects/ui/src/app/routes/portal/routes/backups/utils/job-builder.ts b/web/projects/ui/src/app/routes/portal/routes/backups/utils/job-builder.ts index 78e99b03c..dac07545d 100644 --- a/web/projects/ui/src/app/routes/portal/routes/backups/utils/job-builder.ts +++ b/web/projects/ui/src/app/routes/portal/routes/backups/utils/job-builder.ts @@ -1,4 +1,5 @@ -import { BackupJob, BackupTarget, RR } from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' +import { BackupJob, RR } from 'src/app/services/api/api.types' export class BackupJobBuilder { name: string diff --git a/web/projects/ui/src/app/routes/portal/routes/logs/logs.routes.ts b/web/projects/ui/src/app/routes/portal/routes/logs/logs.routes.ts index 281683fcd..c42623ea9 100644 --- a/web/projects/ui/src/app/routes/portal/routes/logs/logs.routes.ts +++ b/web/projects/ui/src/app/routes/portal/routes/logs/logs.routes.ts @@ -13,10 +13,6 @@ export const ROUTES: Routes = [ path: 'os', loadComponent: () => import('./routes/os.component'), }, - { - path: 'tor', - loadComponent: () => import('./routes/tor.component'), - }, ] export default ROUTES diff --git a/web/projects/ui/src/app/routes/portal/routes/logs/routes/kernel.component.ts b/web/projects/ui/src/app/routes/portal/routes/logs/routes/kernel.component.ts index c4509dc28..a78666946 100644 --- a/web/projects/ui/src/app/routes/portal/routes/logs/routes/kernel.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/logs/routes/kernel.component.ts @@ -1,7 +1,8 @@ import { ChangeDetectionStrategy, Component, inject } from '@angular/core' import { i18nPipe } from '@start9labs/shared' +import { T } from '@start9labs/start-sdk' import { LogsComponent } from 'src/app/routes/portal/components/logs/logs.component' -import { RR } from 'src/app/services/api/api.types' +import { FollowServerLogsReq } from 'src/app/services/api/api.types' import { ApiService } from 'src/app/services/api/embassy-api.service' import { LogsHeaderComponent } from '../components/header.component' @@ -24,9 +25,9 @@ import { LogsHeaderComponent } from '../components/header.component' export default class SystemKernelComponent { private readonly api = inject(ApiService) - protected readonly follow = (params: RR.FollowServerLogsReq) => + protected readonly follow = (params: FollowServerLogsReq) => this.api.followKernelLogs(params) - protected readonly fetch = (params: RR.GetServerLogsReq) => + protected readonly fetch = (params: T.LogsParams) => this.api.getKernelLogs(params) } diff --git a/web/projects/ui/src/app/routes/portal/routes/logs/routes/os.component.ts b/web/projects/ui/src/app/routes/portal/routes/logs/routes/os.component.ts index 37d270c35..b5cebb3e6 100644 --- a/web/projects/ui/src/app/routes/portal/routes/logs/routes/os.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/logs/routes/os.component.ts @@ -1,8 +1,9 @@ import { ChangeDetectionStrategy, Component, inject } from '@angular/core' import { i18nPipe } from '@start9labs/shared' +import { T } from '@start9labs/start-sdk' import { LogsComponent } from 'src/app/routes/portal/components/logs/logs.component' import { LogsHeaderComponent } from 'src/app/routes/portal/routes/logs/components/header.component' -import { RR } from 'src/app/services/api/api.types' +import { FollowServerLogsReq } from 'src/app/services/api/api.types' import { ApiService } from 'src/app/services/api/embassy-api.service' @Component({ @@ -24,9 +25,9 @@ import { ApiService } from 'src/app/services/api/embassy-api.service' export default class SystemOSComponent { private readonly api = inject(ApiService) - protected readonly follow = (params: RR.FollowServerLogsReq) => + protected readonly follow = (params: FollowServerLogsReq) => this.api.followServerLogs(params) - protected readonly fetch = (params: RR.GetServerLogsReq) => + protected readonly fetch = (params: T.LogsParams) => this.api.getServerLogs(params) } diff --git a/web/projects/ui/src/app/routes/portal/routes/logs/routes/outlet.component.ts b/web/projects/ui/src/app/routes/portal/routes/logs/routes/outlet.component.ts index 97d3f69cd..524a997da 100644 --- a/web/projects/ui/src/app/routes/portal/routes/logs/routes/outlet.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/logs/routes/outlet.component.ts @@ -79,12 +79,6 @@ export default class SystemLogsComponent { subtitle: 'Raw, unfiltered operating system logs', icon: '@tui.square-dashed-bottom-code', }, - { - link: 'tor', - title: 'Tor Logs', - subtitle: 'Diagnostics for the Tor daemon on this server', - icon: '@tui.target', - }, { link: 'kernel', title: 'Kernel Logs', diff --git a/web/projects/ui/src/app/routes/portal/routes/logs/routes/tor.component.ts b/web/projects/ui/src/app/routes/portal/routes/logs/routes/tor.component.ts deleted file mode 100644 index 45b711fa5..000000000 --- a/web/projects/ui/src/app/routes/portal/routes/logs/routes/tor.component.ts +++ /dev/null @@ -1,32 +0,0 @@ -import { ChangeDetectionStrategy, Component, inject } from '@angular/core' -import { i18nPipe } from '@start9labs/shared' -import { LogsComponent } from 'src/app/routes/portal/components/logs/logs.component' -import { LogsHeaderComponent } from 'src/app/routes/portal/routes/logs/components/header.component' -import { RR } from 'src/app/services/api/api.types' -import { ApiService } from 'src/app/services/api/embassy-api.service' - -@Component({ - template: ` - - {{ 'Diagnostics for the Tor daemon on this server' | i18n }} - - - `, - styles: ` - :host { - padding: 1rem; - } - `, - changeDetection: ChangeDetectionStrategy.OnPush, - imports: [LogsComponent, LogsHeaderComponent, i18nPipe], - host: { class: 'g-page' }, -}) -export default class SystemTorComponent { - private readonly api = inject(ApiService) - - protected readonly follow = (params: RR.FollowServerLogsReq) => - this.api.followTorLogs(params) - - protected readonly fetch = (params: RR.GetServerLogsReq) => - this.api.getTorLogs(params) -} diff --git a/web/projects/ui/src/app/routes/portal/routes/metrics/cpu.component.ts b/web/projects/ui/src/app/routes/portal/routes/metrics/cpu.component.ts index 13ff85603..11ca3a6d1 100644 --- a/web/projects/ui/src/app/routes/portal/routes/metrics/cpu.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/metrics/cpu.component.ts @@ -4,7 +4,7 @@ import { computed, input, } from '@angular/core' -import { ServerMetrics } from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' import { DataComponent } from './data.component' import { i18nKey } from '@start9labs/shared' @@ -86,7 +86,7 @@ const LABELS: Record = { imports: [DataComponent], }) export class CpuComponent { - readonly value = input() + readonly value = input() readonly transform = computed( (value = this.value()?.percentageUsed?.value || '0') => diff --git a/web/projects/ui/src/app/routes/portal/routes/metrics/data.component.ts b/web/projects/ui/src/app/routes/portal/routes/metrics/data.component.ts index a2aacc1cc..7a6b0c988 100644 --- a/web/projects/ui/src/app/routes/portal/routes/metrics/data.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/metrics/data.component.ts @@ -6,7 +6,7 @@ import { } from '@angular/core' import { TuiTitle } from '@taiga-ui/core' import { TuiCell } from '@taiga-ui/layout' -import { ServerMetrics } from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' import { ValuePipe } from './value.pipe' import { i18nKey, i18nPipe } from '@start9labs/shared' @@ -43,8 +43,8 @@ import { i18nKey, i18nPipe } from '@start9labs/shared' changeDetection: ChangeDetectionStrategy.OnPush, imports: [TuiCell, TuiTitle, ValuePipe, i18nPipe], }) -export class DataComponent { - readonly labels = input.required>() - readonly value = input() - readonly keys = computed(() => Object.keys(this.labels()) as Array) +export class DataComponent { + readonly labels = input.required>() + readonly value = input() + readonly keys = computed(() => Object.keys(this.labels()) as Array) } diff --git a/web/projects/ui/src/app/routes/portal/routes/metrics/memory.component.ts b/web/projects/ui/src/app/routes/portal/routes/metrics/memory.component.ts index 382ecd0df..2e35215c8 100644 --- a/web/projects/ui/src/app/routes/portal/routes/metrics/memory.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/metrics/memory.component.ts @@ -5,7 +5,7 @@ import { input, } from '@angular/core' import { TuiProgress } from '@taiga-ui/kit' -import { ServerMetrics } from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' import { DataComponent } from './data.component' import { ValuePipe } from './value.pipe' import { i18nKey } from '@start9labs/shared' @@ -40,7 +40,7 @@ const LABELS: Record = { imports: [DataComponent, TuiProgress, ValuePipe], }) export class MemoryComponent { - readonly value = input() + readonly value = input() readonly used = computed( (value = this.value()?.percentageUsed.value || '0') => diff --git a/web/projects/ui/src/app/routes/portal/routes/metrics/metrics.service.ts b/web/projects/ui/src/app/routes/portal/routes/metrics/metrics.service.ts index dfc0c33a7..fd7a43655 100644 --- a/web/projects/ui/src/app/routes/portal/routes/metrics/metrics.service.ts +++ b/web/projects/ui/src/app/routes/portal/routes/metrics/metrics.service.ts @@ -13,14 +13,14 @@ import { take, tap, } from 'rxjs' -import { ServerMetrics } from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' import { ApiService } from 'src/app/services/api/embassy-api.service' import { ConnectionService } from 'src/app/services/connection.service' @Injectable({ providedIn: 'root', }) -export class MetricsService extends Observable { +export class MetricsService extends Observable { private readonly connection = inject(ConnectionService) private readonly api = inject(ApiService) @@ -28,7 +28,7 @@ export class MetricsService extends Observable { this.api.followServerMetrics({}), ).pipe( switchMap(({ guid, metrics }) => - this.api.openWebsocket$(guid).pipe(startWith(metrics)), + this.api.openWebsocket$(guid).pipe(startWith(metrics)), ), catchError(() => this.connection.pipe(filter(Boolean), take(1), ignoreElements()), diff --git a/web/projects/ui/src/app/routes/portal/routes/metrics/storage.component.ts b/web/projects/ui/src/app/routes/portal/routes/metrics/storage.component.ts index a761553dc..2849dc342 100644 --- a/web/projects/ui/src/app/routes/portal/routes/metrics/storage.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/metrics/storage.component.ts @@ -5,7 +5,7 @@ import { input, } from '@angular/core' import { TuiProgress } from '@taiga-ui/kit' -import { ServerMetrics } from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' import { DataComponent } from './data.component' import { i18nKey } from '@start9labs/shared' @@ -41,7 +41,7 @@ const LABELS: Record = { imports: [TuiProgress, DataComponent], }) export class StorageComponent { - readonly value = input() + readonly value = input() readonly used = computed( ( diff --git a/web/projects/ui/src/app/routes/portal/routes/metrics/time.component.ts b/web/projects/ui/src/app/routes/portal/routes/metrics/time.component.ts index 73cae2664..b6f1247e7 100644 --- a/web/projects/ui/src/app/routes/portal/routes/metrics/time.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/metrics/time.component.ts @@ -49,7 +49,7 @@ import { TimeService } from 'src/app/services/time.service' docsLink iconEnd="@tui.external-link" appearance="" - path="/help/common-issues.html" + path="/start-os/faq/index.html" fragment="#clock-sync-failure" [pseudo]="true" [textContent]="'the docs' | i18n" diff --git a/web/projects/ui/src/app/routes/portal/routes/notifications/notifications.component.ts b/web/projects/ui/src/app/routes/portal/routes/notifications/notifications.component.ts index c112eff8e..b343c077f 100644 --- a/web/projects/ui/src/app/routes/portal/routes/notifications/notifications.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/notifications/notifications.component.ts @@ -17,11 +17,8 @@ import { import { TuiButton } from '@taiga-ui/core' import { filter } from 'rxjs' import { distinctUntilChanged, skip } from 'rxjs/operators' -import { - RR, - ServerNotification, - ServerNotifications, -} from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' +import { ServerNotification } from 'src/app/services/api/api.types' import { ApiService } from 'src/app/services/api/embassy-api.service' import { BadgeService } from 'src/app/services/badge.service' import { NotificationService } from 'src/app/services/notification.service' @@ -66,7 +63,7 @@ export default class NotificationsComponent implements OnInit { readonly service = inject(NotificationService) readonly api = inject(ApiService) readonly errorService = inject(ErrorService) - readonly notifications = signal(null) + readonly notifications = signal(null) protected readonly table = viewChild< NotificationsTableComponent> @@ -92,7 +89,7 @@ export default class NotificationsComponent implements OnInit { }) } - async getMore(params: RR.GetNotificationsReq) { + async getMore(params: T.ListNotificationParams) { try { this.notifications.set(null) this.notifications.set(await this.api.getNotifications(params)) @@ -101,7 +98,7 @@ export default class NotificationsComponent implements OnInit { } } - async remove(all: ServerNotifications) { + async remove(all: T.NotificationWithId[]) { const ids = this.table() ?.selected() @@ -119,7 +116,7 @@ export default class NotificationsComponent implements OnInit { } private init() { - this.getMore({}).then(() => { + this.getMore({ before: null, limit: null }).then(() => { const latest = this.notifications()?.at(0) if (latest) { this.service.markSeenAll(latest.id) diff --git a/web/projects/ui/src/app/routes/portal/routes/services/components/error.component.ts b/web/projects/ui/src/app/routes/portal/routes/services/components/error.component.ts index 7fe582761..3a1b2e133 100644 --- a/web/projects/ui/src/app/routes/portal/routes/services/components/error.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/services/components/error.component.ts @@ -16,7 +16,6 @@ import { getManifest } from 'src/app/utils/get-package-data' template: `
{{ 'Service Launch Error' | i18n }}

{{ error?.details }}

-

{{ error?.debug }}

{{ 'Actions' | i18n }} diff --git a/web/projects/ui/src/app/routes/portal/routes/services/components/interface-item.component.ts b/web/projects/ui/src/app/routes/portal/routes/services/components/interface-item.component.ts index 4e76f7e54..acde11acd 100644 --- a/web/projects/ui/src/app/routes/portal/routes/services/components/interface-item.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/services/components/interface-item.component.ts @@ -1,5 +1,6 @@ -import { ChangeDetectionStrategy, Component, Input } from '@angular/core' +import { ChangeDetectionStrategy, Component, input } from '@angular/core' import { T } from '@start9labs/start-sdk' +import { TuiIcon } from '@taiga-ui/core' import { TuiBadge } from '@taiga-ui/kit' @Component({ @@ -7,15 +8,19 @@ import { TuiBadge } from '@taiga-ui/kit' template: ` - {{ info.type }} + + {{ info().type }} + - {{ info.description }} + {{ info().description }} + + + `, styles: ` :host { - clip-path: inset(0 round 0.75rem); cursor: pointer; &:hover { @@ -32,13 +37,18 @@ import { TuiBadge } from '@taiga-ui/kit' font-weight: bold; } - tui-icon { + .chevron { + text-align: end; + } + + .chevron tui-icon { font-size: 1rem; + color: var(--tui-text-tertiary); } :host-context(tui-root._mobile) { display: grid; - grid-template-columns: min-content; + grid-template-columns: 1fr auto; align-items: center; padding: 1rem 0.5rem; gap: 0.5rem; @@ -46,17 +56,21 @@ import { TuiBadge } from '@taiga-ui/kit' td { padding: 0; } + + .chevron { + grid-area: 1 / 2 / 3 / 3; + } } `, changeDetection: ChangeDetectionStrategy.OnPush, - imports: [TuiBadge], + imports: [TuiBadge, TuiIcon], }) export class ServiceInterfaceItemComponent { - @Input({ required: true }) - info!: T.ServiceInterface + readonly info = input.required() + readonly link = input.required() get appearance(): string { - switch (this.info.type) { + switch (this.info().type) { case 'ui': return 'positive' case 'api': diff --git a/web/projects/ui/src/app/routes/portal/routes/services/components/interfaces.component.ts b/web/projects/ui/src/app/routes/portal/routes/services/components/interfaces.component.ts index a58915834..2fe9d4ade 100644 --- a/web/projects/ui/src/app/routes/portal/routes/services/components/interfaces.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/services/components/interfaces.component.ts @@ -22,6 +22,7 @@ import { PlaceholderComponent } from '../../../components/placeholder.component' {{ 'Name' | i18n }} {{ 'Type' | i18n }} {{ 'Description' | i18n }} + @@ -30,6 +31,7 @@ import { PlaceholderComponent } from '../../../components/placeholder.component' tabindex="-1" serviceInterface [info]="info" + [link]="info.routerLink" [routerLink]="info.routerLink" > @@ -53,10 +55,10 @@ import { PlaceholderComponent } from '../../../components/placeholder.component' changeDetection: ChangeDetectionStrategy.OnPush, imports: [ ServiceInterfaceItemComponent, + RouterLink, TuiTable, i18nPipe, PlaceholderComponent, - RouterLink, ], }) export class ServiceInterfacesComponent { diff --git a/web/projects/ui/src/app/routes/portal/routes/services/components/task.component.ts b/web/projects/ui/src/app/routes/portal/routes/services/components/task.component.ts index 5f9f91767..975f4de73 100644 --- a/web/projects/ui/src/app/routes/portal/routes/services/components/task.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/services/components/task.component.ts @@ -19,7 +19,12 @@ import { ServiceTasksComponent } from 'src/app/routes/portal/routes/services/com import { ActionService } from 'src/app/services/action.service' import { ApiService } from 'src/app/services/api/embassy-api.service' import { PackageDataEntry } from 'src/app/services/patch-db/data-model' -import { getInstalledBaseStatus } from 'src/app/services/pkg-status-rendering.service' +import { + ALLOWED_STATUSES, + getInstalledBaseStatus, + INACTIVE_STATUSES, + renderPkgStatus, +} from 'src/app/services/pkg-status-rendering.service' import { getManifest } from 'src/app/utils/get-package-data' @Component({ @@ -49,6 +54,9 @@ import { getManifest } from 'src/app/utils/get-package-data' {{ task().reason || ('No reason provided' | i18n) }} + @if (disabled()) { +
{{ disabled() }}
+ } @if (task().severity !== 'critical') { @@ -66,7 +74,7 @@ import { getManifest } from 'src/app/utils/get-package-data' tuiIconButton iconStart="@tui.play" appearance="primary-success" - [disabled]="!pkg()" + [disabled]="!!disabled()" (click)="handle()" > {{ 'Run' | i18n }} @@ -113,7 +121,9 @@ import { getManifest } from 'src/app/utils/get-package-data' } } `, - host: { '[style.opacity]': 'pkg() ? null : "var(--tui-disabled-opacity)"' }, + host: { + '[style.opacity]': '!disabled() ? null : "var(--tui-disabled-opacity)"', + }, changeDetection: ChangeDetectionStrategy.OnPush, imports: [TuiButton, TuiAvatar, i18nPipe, TuiFade], }) @@ -124,6 +134,7 @@ export class ServiceTaskComponent { private readonly errorService = inject(ErrorService) private readonly loader = inject(LoadingService) private readonly tasks = inject(ServiceTasksComponent) + private readonly i18n = inject(i18nPipe) readonly task = input.required() readonly services = input.required>() @@ -135,6 +146,28 @@ export class ServiceTaskComponent { () => this.tasks.pkg().currentDependencies[this.task().packageId], ) + readonly disabled = computed(() => { + const pkg = this.pkg() + if (!pkg) return this.i18n.transform('Not installed')! + + const action = pkg.actions[this.task().actionId] + if (!action) return this.i18n.transform('Action not found')! + + const status = renderPkgStatus(pkg).primary + + if (INACTIVE_STATUSES.includes(status)) return status as string + + if (!ALLOWED_STATUSES[action.allowedStatuses].has(status)) { + return `${this.i18n.transform('Action can only be executed when service is')} ${this.i18n.transform(action.allowedStatuses === 'only-running' ? 'Running' : 'Stopped')?.toLowerCase()}` + } + + if (typeof action.visibility === 'object') { + return action.visibility.disabled + } + + return false + }) + async dismiss() { const { packageId, replayId } = this.task() @@ -144,7 +177,7 @@ export class ServiceTaskComponent { .subscribe(async () => { const loader = this.loader.open().subscribe() try { - await this.api.clearTask({ packageId, replayId }) + await this.api.clearTask({ packageId, replayId, force: false }) } catch (e: any) { this.errorService.handleError(e) } finally { @@ -154,20 +187,21 @@ export class ServiceTaskComponent { } async handle() { + const task = this.task() const title = this.title() const pkg = this.pkg() - const metadata = pkg?.actions[this.task().actionId] + const metadata = pkg?.actions[task.actionId] if (title && pkg && metadata) { this.actionService.present({ pkgInfo: { - id: this.task().packageId, + id: task.packageId, title, status: getInstalledBaseStatus(pkg.statusInfo), icon: pkg.icon, }, - actionInfo: { id: this.task().actionId, metadata }, - requestInfo: this.task(), + actionInfo: { id: task.actionId, metadata }, + prefill: task.input?.value, }) } } diff --git a/web/projects/ui/src/app/routes/portal/routes/services/modals/action-confirm.component.ts b/web/projects/ui/src/app/routes/portal/routes/services/modals/action-confirm.component.ts new file mode 100644 index 000000000..473e205f4 --- /dev/null +++ b/web/projects/ui/src/app/routes/portal/routes/services/modals/action-confirm.component.ts @@ -0,0 +1,59 @@ +import { ChangeDetectionStrategy, Component } from '@angular/core' +import { i18nPipe } from '@start9labs/shared' +import { TuiButton, TuiDialogContext, TuiNotification } from '@taiga-ui/core' +import { injectContext, PolymorpheusComponent } from '@taiga-ui/polymorpheus' +import { PackageActionData } from './action-input.component' + +@Component({ + template: ` +
+ +

{{ pkgInfo.title }}

+
+ +
+
+
+ + +
+ `, + styles: ` + .service-title { + display: inline-flex; + align-items: center; + margin-bottom: 1.5rem; + + img { + height: 1.25rem; + margin-right: 0.25rem; + border-radius: 100%; + } + + h4 { + margin: 0; + } + } + + footer { + margin-top: 1.5rem; + } + `, + changeDetection: ChangeDetectionStrategy.OnPush, + imports: [TuiButton, TuiNotification, i18nPipe], +}) +export class ActionConfirmModal { + readonly context = + injectContext>() + + readonly pkgInfo = this.context.data.pkgInfo + readonly warning = this.context.data.actionInfo.metadata.warning +} + +export const ACTION_CONFIRM_MODAL = new PolymorpheusComponent( + ActionConfirmModal, +) diff --git a/web/projects/ui/src/app/routes/portal/routes/services/modals/action-input.component.ts b/web/projects/ui/src/app/routes/portal/routes/services/modals/action-input.component.ts index 39362f5d2..e20b60e4a 100644 --- a/web/projects/ui/src/app/routes/portal/routes/services/modals/action-input.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/services/modals/action-input.component.ts @@ -23,7 +23,6 @@ import { FormComponent, } from 'src/app/routes/portal/components/form.component' import { InvalidService } from 'src/app/routes/portal/components/form/containers/control.directive' -import { TaskInfoComponent } from 'src/app/routes/portal/modals/config-dep.component' import { ActionService } from 'src/app/services/action.service' import { ApiService } from 'src/app/services/api/embassy-api.service' import { DataModel } from 'src/app/services/patch-db/data-model' @@ -41,7 +40,7 @@ export type PackageActionData = { id: string metadata: T.ActionMetadata } - requestInfo?: T.Task + prefill?: Record } @Component({ @@ -62,13 +61,6 @@ export type PackageActionData = { } - @if (requestInfo) { - - } - - {{ 'Reset defaults' | i18n }} + {{ 'Reset' | i18n }} } @else if (!error()) { @@ -109,14 +101,7 @@ export type PackageActionData = { } } `, - imports: [ - TuiNotification, - TuiLoader, - TuiButton, - TaskInfoComponent, - FormComponent, - i18nPipe, - ], + imports: [TuiNotification, TuiLoader, TuiButton, FormComponent, i18nPipe], providers: [InvalidService], }) export class ActionInputModal { @@ -131,7 +116,7 @@ export class ActionInputModal { readonly actionId = this.context.data.actionInfo.id readonly warning = this.context.data.actionInfo.metadata.warning readonly pkgInfo = this.context.data.pkgInfo - readonly requestInfo = this.context.data.requestInfo + readonly prefill = this.context.data.prefill eventId: string | null = null buttons: ActionButton[] = [ @@ -147,6 +132,7 @@ export class ActionInputModal { this.api.getActionInput({ packageId: this.pkgInfo.id, actionId: this.actionId, + prefill: this.prefill ?? null, }), ).pipe( map(res => { @@ -154,18 +140,20 @@ export class ActionInputModal { const originalValue = res.value || {} this.eventId = res.eventId + const operations = this.prefill + ? compare( + JSON.parse(JSON.stringify(originalValue)), + utils.deepMerge( + JSON.parse(JSON.stringify(originalValue)), + this.prefill, + ) as object, + ) + : null + return { spec: res.spec, originalValue, - operations: this.requestInfo?.input - ? compare( - JSON.parse(JSON.stringify(originalValue)), - utils.deepMerge( - JSON.parse(JSON.stringify(originalValue)), - this.requestInfo.input.value, - ) as object, - ) - : null, + operations, } }), catchError(e => { diff --git a/web/projects/ui/src/app/routes/portal/routes/services/modals/action-success/types.ts b/web/projects/ui/src/app/routes/portal/routes/services/modals/action-success/types.ts index efc515195..f8fa355e2 100644 --- a/web/projects/ui/src/app/routes/portal/routes/services/modals/action-success/types.ts +++ b/web/projects/ui/src/app/routes/portal/routes/services/modals/action-success/types.ts @@ -1,6 +1,6 @@ -import { RR } from 'src/app/services/api/api.types' +import { ActionRes } from 'src/app/services/api/api.types' -type ActionResponse = NonNullable +type ActionResponse = NonNullable type ActionResult = NonNullable export type ActionResponseWithResult = ActionResponse & { result: ActionResult } export type SingleResult = ActionResult & { type: 'single' } diff --git a/web/projects/ui/src/app/routes/portal/routes/services/routes/about.component.ts b/web/projects/ui/src/app/routes/portal/routes/services/routes/about.component.ts index 3d70470c1..f7dd4b174 100644 --- a/web/projects/ui/src/app/routes/portal/routes/services/routes/about.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/services/routes/about.component.ts @@ -116,6 +116,23 @@ export default class ServiceAboutRoute { }, ], }, + { + header: 'Links', + items: [ + ...manifest.docsUrls.map(docsUrl => ({ + name: 'Documentation', + value: docsUrl, + })), + { + name: 'Marketing', + value: manifest.marketingUrl || NOT_PROVIDED, + }, + { + name: 'Donations', + value: manifest.donationUrl || NOT_PROVIDED, + }, + ], + }, { header: 'Source Code', items: [ @@ -125,28 +142,7 @@ export default class ServiceAboutRoute { }, { name: 'StartOS package', - value: manifest.wrapperRepo, - }, - ], - }, - { - header: 'Links', - items: [ - { - name: 'Documentation', - value: manifest.docsUrl || NOT_PROVIDED, - }, - { - name: 'Support', - value: manifest.supportSite || NOT_PROVIDED, - }, - { - name: 'Marketing', - value: manifest.marketingSite || NOT_PROVIDED, - }, - { - name: 'Donations', - value: manifest.donationUrl || NOT_PROVIDED, + value: manifest.packageRepo, }, ], }, diff --git a/web/projects/ui/src/app/routes/portal/routes/services/routes/actions.component.ts b/web/projects/ui/src/app/routes/portal/routes/services/routes/actions.component.ts index 321423718..6b7316137 100644 --- a/web/projects/ui/src/app/routes/portal/routes/services/routes/actions.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/services/routes/actions.component.ts @@ -6,43 +6,31 @@ import { inject, } from '@angular/core' import { toSignal } from '@angular/core/rxjs-interop' -import { getPkgId, i18nPipe } from '@start9labs/shared' -import { T } from '@start9labs/start-sdk' +import { + ErrorService, + getPkgId, + i18nPipe, + LoadingService, +} from '@start9labs/shared' +import { ISB, T } from '@start9labs/start-sdk' import { TuiCell } from '@taiga-ui/layout' import { PatchDB } from 'patch-db-client' -import { map } from 'rxjs' +import { firstValueFrom, map } from 'rxjs' import { ActionService } from 'src/app/services/action.service' +import { ApiService } from 'src/app/services/api/embassy-api.service' import { DataModel } from 'src/app/services/patch-db/data-model' import { StandardActionsService } from 'src/app/services/standard-actions.service' import { getManifest } from 'src/app/utils/get-package-data' import { ServiceActionComponent } from '../components/action.component' import { + ALLOWED_STATUSES, + INACTIVE_STATUSES, PrimaryStatus, renderPkgStatus, } from 'src/app/services/pkg-status-rendering.service' - -const INACTIVE: PrimaryStatus[] = [ - 'installing', - 'updating', - 'removing', - 'restoring', - 'backing-up', -] - -const ALLOWED_STATUSES: Record> = { - 'only-running': new Set(['running']), - 'only-stopped': new Set(['stopped']), - any: new Set([ - 'running', - 'stopped', - 'restarting', - 'restoring', - 'stopping', - 'starting', - 'backing-up', - 'task-required', - ]), -} +import { FormDialogService } from 'src/app/services/form-dialog.service' +import { FormComponent } from 'src/app/routes/portal/components/form.component' +import { configBuilderToSpec } from 'src/app/utils/configBuilderToSpec' @Component({ template: ` @@ -65,6 +53,12 @@ const ALLOWED_STATUSES: Record> = {
StartOS
+

} @@ -92,8 +86,6 @@ import { MarketplacePkgSideload, validateS9pk } from './sideload.utils' ], }) export default class SideloadComponent { - readonly isTor = inject(ConfigService).accessType === 'tor' - file: File | null = null readonly package = signal(null) readonly error = signal(null) diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/authorities/authorities.component.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/authorities/authorities.component.ts index 0b56e05de..95e37e4f3 100644 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/authorities/authorities.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/authorities/authorities.component.ts @@ -21,7 +21,7 @@ import { AuthoritiesTableComponent } from './table.component' tuiIconButton size="xs" docsLink - path="/user-manual/authorities.html" + path="/start-os/user-manual/trust-ca.html" appearance="icon" iconStart="@tui.external-link" > diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/authorities/table.component.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/authorities/table.component.ts index bf9d62dcb..1b60722fd 100644 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/authorities/table.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/authorities/table.component.ts @@ -7,7 +7,7 @@ import { AuthorityService } from './authority.service' selector: 'authorities-table', template: ` - + @for (authority of authorityService.authorities(); track $index) { } diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/backups/backup.service.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/backups/backup.service.ts index 4e36dc59c..71ed62a7a 100644 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/backups/backup.service.ts +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/backups/backup.service.ts @@ -1,8 +1,7 @@ import { inject, Injectable, signal } from '@angular/core' import { ErrorService, getErrorMessage } from '@start9labs/shared' -import { Version } from '@start9labs/start-sdk' +import { T, Version } from '@start9labs/start-sdk' import { - BackupTarget, CifsBackupTarget, DiskBackupTarget, } from 'src/app/services/api/api.types' @@ -61,13 +60,13 @@ export class BackupService { } } - hasAnyBackup({ startOs }: BackupTarget): boolean { + hasAnyBackup({ startOs }: T.BackupTarget): boolean { return Object.values(startOs).some( s => Version.parse(s.version).compare(Version.parse('0.3.6')) !== 'less', ) } - hasThisBackup({ startOs }: BackupTarget, id: string): boolean { + hasThisBackup({ startOs }: T.BackupTarget, id: string): boolean { const item = startOs[id] return ( diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/backups/backup.types.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/backups/backup.types.ts index 38d8ff65a..75de9104e 100644 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/backups/backup.types.ts +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/backups/backup.types.ts @@ -1,9 +1,8 @@ +import { T } from '@start9labs/start-sdk' import { TuiDialogContext } from '@taiga-ui/core' import { - BackupInfo, CifsBackupTarget, DiskBackupTarget, - PackageBackupInfo, } from 'src/app/services/api/api.types' import { MappedBackupTarget } from './backup.service' @@ -12,7 +11,7 @@ export type BackupContext = TuiDialogContext< MappedBackupTarget > -export interface RecoverOption extends PackageBackupInfo { +export interface RecoverOption extends T.PackageBackupInfo { id: string checked: boolean installed: boolean @@ -22,6 +21,6 @@ export interface RecoverOption extends PackageBackupInfo { export interface RecoverData { targetId: string serverId: string - backupInfo: BackupInfo + backupInfo: T.BackupInfo password: string } diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/backups/backups.component.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/backups/backups.component.ts index 0f33d4f26..604cf9918 100644 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/backups/backups.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/backups/backups.component.ts @@ -66,7 +66,7 @@ import { BACKUP_RESTORE } from './restore.component' this.addTarget(value), + handler: (value: T.CifsAddParams) => this.addTarget(value), }, ], }, @@ -226,7 +226,7 @@ export class BackupNetworkComponent { buttons: [ { text: this.i18n.transform('Connect'), - handler: async (value: RR.AddBackupTargetReq) => { + handler: async (value: T.CifsAddParams) => { const loader = this.loader .open('Testing connectivity to shared folder') .subscribe() @@ -272,7 +272,7 @@ export class BackupNetworkComponent { }) } - private async addTarget(v: RR.AddBackupTargetReq): Promise { + private async addTarget(v: T.CifsAddParams): Promise { const loader = this.loader .open('Testing connectivity to shared folder') .subscribe() diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/backups/recover.component.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/backups/recover.component.ts index df937a1e1..c98464756 100644 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/backups/recover.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/backups/recover.component.ts @@ -148,8 +148,8 @@ export class BackupsRecoverComponent { async restore(options: RecoverOption[]): Promise { const ids = options.filter(({ checked }) => !!checked).map(({ id }) => id) - const { targetId, serverId, password } = this.context.data - const params = { ids, targetId, serverId, password } + const { targetId, password } = this.context.data + const params = { ids, targetId, password } const loader = this.loader.open('Initializing').subscribe() try { diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/dns/dns.component.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/dns/dns.component.ts index 58e8170e8..f8cbcfa09 100644 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/dns/dns.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/dns/dns.component.ts @@ -47,7 +47,7 @@ const ipv6 = tuiIconButton size="xs" docsLink - path="/user-manual/dns.html" + path="/start-os/user-manual/dns.html" appearance="icon" iconStart="@tui.external-link" > @@ -184,7 +184,9 @@ export default class SystemDnsComponent { if ( Object.values(pkgs).some(p => - Object.values(p.hosts).some(h => h?.privateDomains.length), + Object.values(p.hosts).some( + h => Object.keys(h?.privateDomains || {}).length, + ), ) ) { Object.values(gateways) diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/email/email.component.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/email/email.component.ts index 8d6777beb..c354778c6 100644 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/email/email.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/email/email.component.ts @@ -1,5 +1,10 @@ import { CommonModule } from '@angular/common' -import { ChangeDetectionStrategy, Component, inject } from '@angular/core' +import { + ChangeDetectionStrategy, + Component, + inject, + signal, +} from '@angular/core' import { FormsModule, ReactiveFormsModule } from '@angular/forms' import { RouterLink } from '@angular/router' import { @@ -10,11 +15,11 @@ import { i18nPipe, LoadingService, } from '@start9labs/shared' -import { inputSpec, IST } from '@start9labs/start-sdk' +import { inputSpec } from '@start9labs/start-sdk' import { TuiButton, TuiTextfield, TuiTitle } from '@taiga-ui/core' import { TuiHeader } from '@taiga-ui/layout' import { PatchDB } from 'patch-db-client' -import { switchMap, tap } from 'rxjs' +import { Subscription, switchMap, tap } from 'rxjs' import { FormGroupComponent } from 'src/app/routes/portal/components/form/containers/group.component' import { ApiService } from 'src/app/services/api/embassy-api.service' import { FormService } from 'src/app/services/form.service' @@ -22,6 +27,32 @@ import { DataModel } from 'src/app/services/patch-db/data-model' import { TitleDirective } from 'src/app/services/title.service' import { configBuilderToSpec } from 'src/app/utils/configBuilderToSpec' +const PROVIDER_HINTS: Record = { + gmail: + 'Requires an App Password. Enable 2FA in your Google account, then generate an App Password.', + ses: 'Use SMTP credentials (not IAM credentials). Update the host to match your SES region.', + sendgrid: + "Username is 'apikey' (literal). Password is your SendGrid API key.", + mailgun: 'Use SMTP credentials from your Mailgun domain settings.', + protonmail: + 'Requires a Proton for Business account. Use your Proton email as username.', +} + +function detectProviderKey(host: string | undefined): string { + if (!host) return 'other' + const providers: Record = { + 'smtp.gmail.com': 'gmail', + 'smtp.sendgrid.net': 'sendgrid', + 'smtp.mailgun.org': 'mailgun', + 'smtp.protonmail.ch': 'protonmail', + } + for (const [h, key] of Object.entries(providers)) { + if (host === h) return key + } + if (host.endsWith('.amazonaws.com')) return 'ses' + return 'other' +} + @Component({ template: ` @@ -40,7 +71,7 @@ import { configBuilderToSpec } from 'src/app/utils/configBuilderToSpec' tuiIconButton size="xs" docsLink - path="/user-manual/smtp.html" + path="/start-os/user-manual/smtp.html" appearance="icon" iconStart="@tui.external-link" > @@ -52,6 +83,9 @@ import { configBuilderToSpec } from 'src/app/utils/configBuilderToSpec' @if (spec | async; as resolved) { } + @if (providerHint()) { +

{{ providerHint() }}

+ }
@if (isSaved) {
- - + + } @empty { - diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/general/general.component.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/general/general.component.ts index aaff817d0..dc0a79110 100644 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/general/general.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/general/general.component.ts @@ -7,7 +7,6 @@ import { } from '@angular/core' import { toSignal } from '@angular/core/rxjs-interop' import { FormsModule } from '@angular/forms' -import { Title } from '@angular/platform-browser' import { RouterLink } from '@angular/router' import { DialogService, @@ -24,14 +23,13 @@ import { LANGUAGE_TO_CODE, LoadingService, } from '@start9labs/shared' +import { WA_WINDOW } from '@ng-web-apis/common' import { TuiResponsiveDialogService } from '@taiga-ui/addon-mobile' import { TuiAnimated } from '@taiga-ui/cdk' import { TuiAppearance, TuiButton, - tuiFadeIn, TuiIcon, - tuiScaleIn, TuiTextfield, TuiTitle, } from '@taiga-ui/core' @@ -47,14 +45,14 @@ import { PolymorpheusComponent } from '@taiga-ui/polymorpheus' import { PatchDB } from 'patch-db-client' import { filter } from 'rxjs' import { ApiService } from 'src/app/services/api/embassy-api.service' -import { ConfigService } from 'src/app/services/config.service' import { OSService } from 'src/app/services/os.service' import { DataModel } from 'src/app/services/patch-db/data-model' import { TitleDirective } from 'src/app/services/title.service' -import { SnekDirective } from './snek.directive' +import { ABOUT } from 'src/app/routes/portal/components/header/about.component' +import { SnakeDirective } from './snake.directive' import { UPDATE } from './update.component' -import { SystemWipeComponent } from './wipe.component' import { KeyboardSelectComponent } from './keyboard-select.component' +import { ServerNameDialog } from './server-name.dialog' @Component({ template: ` @@ -66,17 +64,28 @@ import { KeyboardSelectComponent } from './keyboard-select.component' @if (server(); as server) {
- + - {{ 'Software Update' | i18n }} - - {{ server.version }} + {{ 'About this server' | i18n }} + + {{ 'Version, Root CA, and more' | i18n }} + + + +
+
+ + + + {{ 'Software Update' | i18n }} @if (os.showUpdate$ | async) { {{ 'Update available' | i18n }} } - +
- + - {{ 'Browser tab title' | i18n }} + {{ 'Server Name' | i18n }} - {{ 'Customize the name appearing in your browser tab' | i18n }} + {{ server.name }} + {{ server.hostname }}.local - +
@@ -178,18 +190,6 @@ import { KeyboardSelectComponent } from './keyboard-select.component' }
-
- - - {{ 'Restart Tor' | i18n }} - - {{ 'Restart the Tor daemon on your server' | i18n }} - - - -
@if (count > 4) {
@@ -203,10 +203,10 @@ import { KeyboardSelectComponent } from './keyboard-select.component'
} Play Snake } `, @@ -215,7 +215,7 @@ import { KeyboardSelectComponent } from './keyboard-select.component' max-inline-size: 40rem; } - .snek { + .snake { width: 1rem; opacity: 0.2; cursor: pointer; @@ -270,29 +270,30 @@ import { KeyboardSelectComponent } from './keyboard-select.component' TuiDataListWrapper, TuiTextfield, FormsModule, - SnekDirective, + SnakeDirective, TuiBadge, TuiBadgeNotification, TuiAnimated, ], }) export default class SystemGeneralComponent { - private readonly title = inject(Title) private readonly dialogs = inject(TuiResponsiveDialogService) private readonly loader = inject(LoadingService) private readonly errorService = inject(ErrorService) private readonly patch = inject>(PatchDB) private readonly api = inject(ApiService) - private readonly isTor = inject(ConfigService).accessType === 'tor' private readonly dialog = inject(DialogService) private readonly i18n = inject(i18nPipe) private readonly injector = inject(INJECTOR) + private readonly win = inject(WA_WINDOW) - wipe = false count = 0 + about() { + this.dialog.openComponent(ABOUT, { label: 'About this server' }).subscribe() + } + readonly server = toSignal(this.patch.watch$('serverInfo')) - readonly name = toSignal(this.patch.watch$('ui', 'name')) readonly score = toSignal(this.patch.watch$('ui', 'snakeHighScore')) readonly os = inject(OSService) readonly i18nService = inject(i18nService) @@ -362,52 +363,80 @@ export default class SystemGeneralComponent { } } - onTitle() { - const sub = this.dialog - .openPrompt({ - label: 'Browser tab title', - data: { - label: 'Device Name', - message: - 'This value will be displayed as the title of your browser tab.', - placeholder: 'StartOS' as i18nKey, - required: false, - buttonText: 'Save', - initialValue: this.name(), - }, - }) - .subscribe(async name => { - const loader = this.loader.open('Saving').subscribe() - const title = `${name || 'StartOS'} — ${this.i18n.transform('System')}` + onName() { + const server = this.server() + if (!server) return - try { - await this.api.setDbValue(['name'], name || null) - this.title.setTitle(title) - } catch (e: any) { - this.errorService.handleError(e) - } finally { - loader.unsubscribe() - sub.unsubscribe() + this.dialog + .openComponent<{ name: string; hostname: string } | null>( + new PolymorpheusComponent(ServerNameDialog, this.injector), + { + label: 'Server Name', + size: 's', + data: { initialName: server.name }, + }, + ) + .pipe( + filter( + (result): result is { name: string; hostname: string } => + result !== null, + ), + ) + .subscribe(result => { + if (this.win.location.hostname.endsWith('.local')) { + this.confirmNameChange(result) + } else { + this.saveName(result) } }) } - onTorRestart() { - this.wipe = false + private confirmNameChange(result: { name: string; hostname: string }) { this.dialog .openConfirm({ - label: this.isTor ? 'Warning' : 'Confirm', + label: 'Warning', data: { - content: new PolymorpheusComponent( - SystemWipeComponent, - this.injector, - ), - yes: 'Restart', + content: + 'You are currently connected via your .local address. Changing the hostname will require you to switch to the new .local address.', + yes: 'Save', no: 'Cancel', }, }) .pipe(filter(Boolean)) - .subscribe(() => this.resetTor(this.wipe)) + .subscribe(() => this.saveName(result, true)) + } + + private async saveName( + { name, hostname }: { name: string; hostname: string }, + wasLocal = false, + ) { + const loader = this.loader.open('Saving').subscribe() + + try { + await this.api.setHostname({ name, hostname }) + + if (wasLocal) { + const { protocol, port } = this.win.location + const newUrl = `${protocol}//${hostname}.local${port ? ':' + port : ''}` + + this.dialog + .openConfirm({ + label: 'Hostname Changed', + data: { + content: + `${this.i18n.transform('Your server is now reachable at')} ${hostname}.local` as i18nKey, + yes: 'Open new address', + no: 'Dismiss', + }, + }) + .pipe(filter(Boolean)) + .subscribe(() => this.win.open(newUrl, '_blank')) + } + } catch (e: any) { + this.errorService.handleError(e) + } finally { + loader.unsubscribe() + } } async onRepair() { @@ -532,19 +561,6 @@ export default class SystemGeneralComponent { .subscribe(() => this.restart()) } - private async resetTor(wipeState: boolean) { - const loader = this.loader.open().subscribe() - - try { - await this.api.resetTor({ wipeState, reason: 'User triggered' }) - this.dialog.openAlert('Tor restart in progress').subscribe() - } catch (e: any) { - this.errorService.handleError(e) - } finally { - loader.unsubscribe() - } - } - private update() { this.dialogs .open(UPDATE, { diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/general/server-name.dialog.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/general/server-name.dialog.ts new file mode 100644 index 000000000..a2f11f1a5 --- /dev/null +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/general/server-name.dialog.ts @@ -0,0 +1,63 @@ +import { Component } from '@angular/core' +import { FormsModule } from '@angular/forms' +import { i18nPipe, normalizeHostname } from '@start9labs/shared' +import { TuiButton, TuiDialogContext, TuiTextfield } from '@taiga-ui/core' +import { injectContext } from '@taiga-ui/polymorpheus' + +@Component({ + template: ` + + + + + @if (name.trim()) { +

{{ normalizeHostname(name) }}.local

+ } +
+ + +
+ `, + styles: ` + .hostname-preview { + color: var(--tui-text-secondary); + font: var(--tui-font-text-s); + margin-top: 0.25rem; + } + + footer { + display: flex; + gap: 1rem; + margin-top: 1.5rem; + } + `, + imports: [FormsModule, TuiButton, TuiTextfield, i18nPipe], +}) +export class ServerNameDialog { + private readonly context = + injectContext< + TuiDialogContext< + { name: string; hostname: string } | null, + { initialName: string } + > + >() + + name = this.context.data.initialName + readonly normalizeHostname = normalizeHostname + + cancel() { + this.context.completeWith(null) + } + + confirm() { + const name = this.name.trim() + this.context.completeWith({ + name, + hostname: normalizeHostname(name), + }) + } +} diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/general/snake.component.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/general/snake.component.ts new file mode 100644 index 000000000..f820911c5 --- /dev/null +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/general/snake.component.ts @@ -0,0 +1,504 @@ +import { + afterNextRender, + ChangeDetectionStrategy, + Component, + DestroyRef, + ElementRef, + HostListener, + inject, + signal, + viewChild, +} from '@angular/core' +import { i18nPipe } from '@start9labs/shared' +import { TuiButton, TuiDialogContext } from '@taiga-ui/core' +import { injectContext } from '@taiga-ui/polymorpheus' + +type GameState = 'ready' | 'playing' | 'dead' + +interface Point { + x: number + y: number +} + +interface Snake { + cells: Point[] + dx: number + dy: number + maxCells: number +} + +type RGB = [number, number, number] + +const HEAD_COLOR: RGB = [47, 223, 117] // #2fdf75 +const TAIL_COLOR: RGB = [20, 90, 48] // #145a30 +const GRID_W = 40 +const GRID_H = 26 +const SPEED = 45 +const STARTING_LENGTH = 4 + +const SAAS_ICONS = [ + 'adobe', + 'amazon', + 'anthropic', + 'apple', + 'atlassian', + 'box', + 'cloudflare', + 'datadog', + 'discord', + 'dropbox', + 'github', + 'gitlab', + 'godaddy', + 'google', + 'hubspot', + 'icloud', + 'lastpass', + 'meta', + 'microsoft', + 'mongodb', + 'netflix', + 'notion', + 'onepassword', + 'openai', + 'paypal', + 'salesforce', + 'shopify', + 'slack', + 'spotify', + 'squarespace', + 'square', + 'stripe', + 'twilio', + 'wix', + 'zoom', +].map(name => `assets/img/icons/saas/${name}.svg`) + +function lerpColor(from: RGB, to: RGB, t: number): string { + const r = Math.round(from[0] + (to[0] - from[0]) * t) + const g = Math.round(from[1] + (to[1] - from[1]) * t) + const b = Math.round(from[2] + (to[2] - from[2]) * t) + return `rgb(${r},${g},${b})` +} + +@Component({ + template: ` +
+ + @if (state() === 'ready') { +
+ {{ 'Press any key or tap to start' | i18n }} + ← ↑ ↓ → +
+ } + @if (state() === 'dead') { +
+ {{ 'Game Over' | i18n }} + {{ 'Score' | i18n }}: {{ score }} + + {{ 'Press any key or tap to play again' | i18n }} + +
+ } +
+
+ {{ 'Score' | i18n }}: {{ score }} + {{ 'High score' | i18n }}: {{ highScore }} + +
+ `, + styles: ` + :host { + display: flex; + flex-direction: column; + gap: 1rem; + } + + .game-container { + position: relative; + background: #111; + border-radius: 0.5rem; + display: flex; + justify-content: center; + } + + canvas { + display: block; + } + + .overlay { + position: absolute; + inset: 0; + display: flex; + flex-direction: column; + align-items: center; + justify-content: center; + gap: 0.75rem; + background: rgba(0, 0, 0, 0.7); + border-radius: 0.5rem; + font-size: 1.125rem; + color: #fff; + } + + .game-over { + font-size: 1.5rem; + } + + .arrows { + font-size: 1.5rem; + letter-spacing: 0.5rem; + opacity: 0.5; + } + + .hint { + opacity: 0.6; + font-size: 0.875rem; + } + + footer { + display: flex; + align-items: center; + justify-content: space-between; + } + `, + changeDetection: ChangeDetectionStrategy.OnPush, + imports: [TuiButton, i18nPipe], +}) +export class SnakeComponent { + private readonly destroyRef = inject(DestroyRef) + private readonly dialog = injectContext>() + private readonly canvasRef = viewChild>('game') + + readonly state = signal('ready') + + highScore: number = this.dialog.data + score = 0 + + private grid = NaN + private canvasW = 0 + private canvasH = 0 + private ctx!: CanvasRenderingContext2D + private images: HTMLImageElement[] = [] + private currentImage: HTMLImageElement | null = null + private animationId = 0 + private lastTime = 0 + private dead = false + + private snake!: Snake + private food: Point = { x: NaN, y: NaN } + private moveQueue: string[] = [] + + constructor() { + for (const src of SAAS_ICONS) { + const img = new Image() + img.src = src + this.images.push(img) + } + + afterNextRender(() => { + this.initCanvas() + this.snake = this.createSnake() + this.spawnFood() + this.drawFrame() + this.animationId = requestAnimationFrame(t => this.loop(t)) + }) + + this.destroyRef.onDestroy(() => { + cancelAnimationFrame(this.animationId) + }) + } + + dismiss() { + this.dialog.completeWith(this.highScore) + } + + @HostListener('document:keydown', ['$event']) + onKeydown(e: KeyboardEvent) { + if ( + e.key === 'ArrowUp' || + e.key === 'ArrowDown' || + e.key === 'ArrowLeft' || + e.key === 'ArrowRight' + ) { + e.preventDefault() + } + + const current = this.state() + + if (current === 'ready') { + this.state.set('playing') + this.lastTime = 0 + // Queue directional input so first keypress sets direction + if (e.key.startsWith('Arrow')) { + this.moveQueue.push(e.key) + } + return + } + + if (current === 'dead' && !this.dead) { + this.restart() + return + } + + if (current === 'playing') { + this.moveQueue.push(e.key) + } + } + + @HostListener('touchstart', ['$event']) + onTouchStart(e: TouchEvent) { + const current = this.state() + + if (current === 'ready') { + this.state.set('playing') + this.lastTime = 0 + return + } + + if (current === 'dead' && !this.dead) { + this.restart() + return + } + + this.touchStart = { + x: e.touches[0]?.clientX ?? 0, + y: e.touches[0]?.clientY ?? 0, + } + } + + @HostListener('touchmove', ['$event']) + onTouchMove(e: TouchEvent) { + if (!this.touchStart || this.state() !== 'playing') return + + const xUp = e.touches[0]?.clientX ?? 0 + const yUp = e.touches[0]?.clientY ?? 0 + const xDiff = this.touchStart.x - xUp + const yDiff = this.touchStart.y - yUp + + if (Math.abs(xDiff) > Math.abs(yDiff)) { + this.moveQueue.push(xDiff > 0 ? 'ArrowLeft' : 'ArrowRight') + } else { + this.moveQueue.push(yDiff > 0 ? 'ArrowUp' : 'ArrowDown') + } + + this.touchStart = null + } + + @HostListener('window:resize') + onResize() { + this.initCanvas() + this.drawFrame() + } + + private touchStart: Point | null = null + + private initCanvas() { + const canvas = this.canvasRef()?.nativeElement + if (!canvas) return + + this.ctx = canvas.getContext('2d')! + const container = canvas.parentElement! + const dpr = window.devicePixelRatio || 1 + + // Size grid based on available width, cap so canvas height stays reasonable + const maxHeight = window.innerHeight * 0.55 + this.grid = Math.min( + Math.floor(container.clientWidth / GRID_W), + Math.floor(maxHeight / GRID_H), + ) + + this.canvasW = this.grid * GRID_W + this.canvasH = this.grid * GRID_H + + canvas.width = this.canvasW * dpr + canvas.height = this.canvasH * dpr + canvas.style.width = `${this.canvasW}px` + canvas.style.height = `${this.canvasH}px` + this.ctx.scale(dpr, dpr) + } + + private createSnake(): Snake { + return { + cells: [], + dx: this.grid, + dy: 0, + maxCells: STARTING_LENGTH, + } + } + + private getStartX(): number { + return this.grid * (Math.floor(GRID_W / 2) - STARTING_LENGTH) + } + + private getStartY(): number { + return this.grid * Math.floor(GRID_H / 2) + } + + private spawnFood() { + this.food = { + x: this.randomInt(0, GRID_W) * this.grid, + y: this.randomInt(0, GRID_H) * this.grid, + } + + const img = this.images[this.randomInt(0, this.images.length)]! + this.currentImage = img.complete && img.naturalWidth ? img : null + + if (!this.currentImage) { + img.onload = () => { + this.currentImage = img + this.drawFrame() + } + } + } + + private restart() { + this.score = 0 + this.snake = this.createSnake() + this.moveQueue = [] + this.spawnFood() + this.lastTime = 0 + this.state.set('playing') + } + + private loop(timestamp: number) { + this.animationId = requestAnimationFrame(t => this.loop(t)) + + if (this.state() !== 'playing') return + + if (this.lastTime && timestamp - this.lastTime < SPEED) return + this.lastTime = timestamp + + this.update() + this.drawFrame() + } + + private update() { + // Process next queued move + if (this.moveQueue.length) { + const move = this.moveQueue.shift()! + if (move === 'ArrowLeft' && this.snake.dx === 0) { + this.snake.dx = -this.grid + this.snake.dy = 0 + } else if (move === 'ArrowUp' && this.snake.dy === 0) { + this.snake.dy = -this.grid + this.snake.dx = 0 + } else if (move === 'ArrowRight' && this.snake.dx === 0) { + this.snake.dx = this.grid + this.snake.dy = 0 + } else if (move === 'ArrowDown' && this.snake.dy === 0) { + this.snake.dy = this.grid + this.snake.dx = 0 + } + } + + // Determine new head position + const prev = this.snake.cells[0] + const newHead: Point = prev + ? { x: prev.x + this.snake.dx, y: prev.y + this.snake.dy } + : { + x: this.getStartX() + this.snake.dx, + y: this.getStartY() + this.snake.dy, + } + + this.snake.cells.unshift(newHead) + + // Trim tail + while (this.snake.cells.length > this.snake.maxCells) { + this.snake.cells.pop() + } + + // Wall collision + if ( + newHead.x < 0 || + newHead.y < 0 || + newHead.x >= this.canvasW || + newHead.y >= this.canvasH + ) { + this.onDeath() + return + } + + // Self collision + for (let i = 1; i < this.snake.cells.length; i++) { + const cell = this.snake.cells[i] + if (cell && newHead.x === cell.x && newHead.y === cell.y) { + this.onDeath() + return + } + } + + // Eat food + if (newHead.x === this.food.x && newHead.y === this.food.y) { + this.score++ + this.highScore = Math.max(this.score, this.highScore) + this.snake.maxCells++ + this.spawnFood() + } + } + + private onDeath() { + this.dead = true + this.state.set('dead') + // Brief delay before accepting restart input + setTimeout(() => { + this.dead = false + }, 300) + } + + private drawFrame() { + if (!this.ctx) return + + this.ctx.clearRect(0, 0, this.canvasW, this.canvasH) + this.drawFood() + this.drawSnake() + } + + private drawFood() { + if (!this.currentImage) return + this.ctx.drawImage( + this.currentImage, + this.food.x, + this.food.y, + this.grid, + this.grid, + ) + } + + private drawSnake() { + const { cells } = this.snake + if (cells.length === 0) { + // Draw initial position in bottom-left corner (out of overlay text) + const x = STARTING_LENGTH * this.grid + const y = this.canvasH ? this.canvasH - this.grid * 2 : this.getStartY() + for (let i = 0; i < STARTING_LENGTH; i++) { + const t = STARTING_LENGTH > 1 ? i / (STARTING_LENGTH - 1) : 0 + this.ctx.fillStyle = lerpColor(HEAD_COLOR, TAIL_COLOR, t) + const r = i === 0 ? this.grid * 0.35 : this.grid * 0.2 + const size = this.grid - 1 + this.ctx.beginPath() + this.ctx.roundRect(x - i * this.grid + 0.5, y + 0.5, size, size, r) + this.ctx.fill() + } + return + } + + // Draw tail-first so head renders on top + for (let i = cells.length - 1; i >= 0; i--) { + const cell = cells[i] + if (!cell) continue + const t = cells.length > 1 ? i / (cells.length - 1) : 0 + this.ctx.fillStyle = lerpColor(HEAD_COLOR, TAIL_COLOR, t) + const r = i === 0 ? this.grid * 0.35 : this.grid * 0.2 + const size = this.grid - 1 + this.ctx.beginPath() + this.ctx.roundRect(cell.x + 0.5, cell.y + 0.5, size, size, r) + this.ctx.fill() + } + } + + private randomInt(min: number, max: number): number { + return Math.floor(Math.random() * (max - min)) + min + } +} diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/general/snek.directive.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/general/snake.directive.ts similarity index 78% rename from web/projects/ui/src/app/routes/portal/routes/system/routes/general/snek.directive.ts rename to web/projects/ui/src/app/routes/portal/routes/system/routes/general/snake.directive.ts index fc5515b41..470dcd4da 100644 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/general/snek.directive.ts +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/general/snake.directive.ts @@ -8,30 +8,31 @@ import { import { PolymorpheusComponent } from '@taiga-ui/polymorpheus' import { filter } from 'rxjs' import { ApiService } from 'src/app/services/api/embassy-api.service' -import { SnekComponent } from './snek.component' +import { SnakeComponent } from './snake.component' @Directive({ - selector: 'img[snek]', + selector: 'img[snake]', }) -export class SnekDirective { +export class SnakeDirective { private readonly loader = inject(LoadingService) private readonly errorService = inject(ErrorService) private readonly api = inject(ApiService) private readonly dialog = inject(DialogService) @Input() - snek = 0 + snake = 0 @HostListener('click') async onClick() { this.dialog - .openComponent(new PolymorpheusComponent(SnekComponent), { + .openComponent(new PolymorpheusComponent(SnakeComponent), { label: 'Snake!' as i18nKey, + size: 'l', closeable: false, dismissible: false, - data: this.snek, + data: this.snake, }) - .pipe(filter(score => score > this.snek)) + .pipe(filter(score => score > this.snake)) .subscribe(async score => { const loader = this.loader.open('Saving high score').subscribe() diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/general/snek.component.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/general/snek.component.ts deleted file mode 100644 index bcd3280f7..000000000 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/general/snek.component.ts +++ /dev/null @@ -1,295 +0,0 @@ -import { - AfterViewInit, - Component, - HostListener, - inject, - OnDestroy, - DOCUMENT, -} from '@angular/core' -import { i18nPipe, pauseFor } from '@start9labs/shared' -import { TuiButton, TuiDialogContext } from '@taiga-ui/core' -import { injectContext } from '@taiga-ui/polymorpheus' - -@Component({ - template: ` -
- -
-
- {{ 'Score' | i18n }}: {{ score }} - {{ 'High score' | i18n }}: {{ highScore }} - -
- `, - styles: ` - .canvas-center { - min-height: 50vh; - padding-top: 20px; - display: flex; - align-items: center; - justify-content: center; - } - - .footer { - display: flex; - align-items: center; - justify-content: space-between; - padding-top: 32px; - } - `, - imports: [TuiButton, i18nPipe], -}) -export class SnekComponent implements AfterViewInit, OnDestroy { - private readonly document = inject(DOCUMENT) - private readonly dialog = injectContext>() - - highScore: number = this.dialog.data - score = 0 - - private readonly speed = 45 - private readonly width = 40 - private readonly height = 26 - private grid = NaN - - private readonly startingLength = 4 - - private xDown?: number - private yDown?: number - private canvas!: HTMLCanvasElement - private image!: HTMLImageElement - private context!: CanvasRenderingContext2D - - private snake: any - private bitcoin: { x: number; y: number } = { x: NaN, y: NaN } - - private moveQueue: String[] = [] - private destroyed = false - - dismiss() { - this.dialog.completeWith(this.highScore) - } - - @HostListener('document:keydown', ['$event']) - keyEvent(e: KeyboardEvent) { - this.moveQueue.push(e.key) - } - - @HostListener('touchstart', ['$event']) - touchStart(e: TouchEvent) { - this.handleTouchStart(e) - } - - @HostListener('touchmove', ['$event']) - touchMove(e: TouchEvent) { - this.handleTouchMove(e) - } - - @HostListener('window:resize') - sizeChange() { - this.init() - } - - ngOnDestroy() { - this.destroyed = true - } - - ngAfterViewInit() { - this.init() - - this.image = new Image() - this.image.onload = () => { - requestAnimationFrame(async () => await this.loop()) - } - this.image.src = '../../../../../../assets/img/icons/bitcoin.svg' - } - - init() { - this.canvas = this.document.querySelector('canvas#game')! - this.canvas.style.border = '1px solid #e0e0e0' - this.context = this.canvas.getContext('2d')! - const container = this.document.querySelector('.canvas-center')! - this.grid = Math.min( - Math.floor(container.clientWidth / this.width), - Math.floor(container.clientHeight / this.height), - ) - this.snake = { - x: this.grid * (Math.floor(this.width / 2) - this.startingLength), - y: this.grid * Math.floor(this.height / 2), - // snake velocity. moves one grid length every frame in either the x or y direction - dx: this.grid, - dy: 0, - // keep track of all grids the snake body occupies - cells: [], - // length of the snake. grows when eating an bitcoin - maxCells: this.startingLength, - } - this.bitcoin = { - x: this.getRandomInt(0, this.width) * this.grid, - y: this.getRandomInt(0, this.height) * this.grid, - } - - this.canvas.width = this.grid * this.width - this.canvas.height = this.grid * this.height - this.context.imageSmoothingEnabled = false - } - - getTouches(evt: TouchEvent) { - return evt.touches - } - - handleTouchStart(evt: TouchEvent) { - const firstTouch = this.getTouches(evt)[0] - this.xDown = firstTouch?.clientX - this.yDown = firstTouch?.clientY - } - - handleTouchMove(evt: TouchEvent) { - if (!this.xDown || !this.yDown) { - return - } - - var xUp = evt.touches[0]?.clientX || 0 - var yUp = evt.touches[0]?.clientY || 0 - - var xDiff = this.xDown - xUp - var yDiff = this.yDown - yUp - - if (Math.abs(xDiff) > Math.abs(yDiff)) { - /*most significant*/ - if (xDiff > 0) { - this.moveQueue.push('ArrowLeft') - } else { - this.moveQueue.push('ArrowRight') - } - } else { - if (yDiff > 0) { - this.moveQueue.push('ArrowUp') - } else { - this.moveQueue.push('ArrowDown') - } - } - /* reset values */ - this.xDown = undefined - this.yDown = undefined - } - - // game loop - async loop() { - if (this.destroyed) return - - await pauseFor(this.speed) - - requestAnimationFrame(async () => await this.loop()) - - this.context.clearRect(0, 0, this.canvas.width, this.canvas.height) - - // move snake by its velocity - this.snake.x += this.snake.dx - this.snake.y += this.snake.dy - - if (this.moveQueue.length) { - const move = this.moveQueue.shift() - // left arrow key - if (move === 'ArrowLeft' && this.snake.dx === 0) { - this.snake.dx = -this.grid - this.snake.dy = 0 - } - // up arrow key - else if (move === 'ArrowUp' && this.snake.dy === 0) { - this.snake.dy = -this.grid - this.snake.dx = 0 - } - // right arrow key - else if (move === 'ArrowRight' && this.snake.dx === 0) { - this.snake.dx = this.grid - this.snake.dy = 0 - } - // down arrow key - else if (move === 'ArrowDown' && this.snake.dy === 0) { - this.snake.dy = this.grid - this.snake.dx = 0 - } - } - - // edge death - if ( - this.snake.x < 0 || - this.snake.y < 0 || - this.snake.x >= this.canvas.width || - this.snake.y >= this.canvas.height - ) { - this.death() - } - - // keep track of where snake has been. front of the array is always the head - this.snake.cells.unshift({ x: this.snake.x, y: this.snake.y }) - - // remove cells as we move away from them - if (this.snake.cells.length > this.snake.maxCells) { - this.snake.cells.pop() - } - - // draw bitcoin - this.context.fillStyle = '#ff4961' - this.context.drawImage( - this.image, - this.bitcoin.x - 1, - this.bitcoin.y - 1, - this.grid + 2, - this.grid + 2, - ) - - // draw snake one cell at a time - this.context.fillStyle = '#2fdf75' - - const firstCell = this.snake.cells[0] - - for (let index = 0; index < this.snake.cells.length; index++) { - const cell = this.snake.cells[index] - - // drawing 1 px smaller than the grid creates a grid effect in the snake body so you can see how long it is - this.context.fillRect(cell.x, cell.y, this.grid - 1, this.grid - 1) - - // snake ate bitcoin - if (cell.x === this.bitcoin.x && cell.y === this.bitcoin.y) { - this.score++ - this.highScore = Math.max(this.score, this.highScore) - this.snake.maxCells++ - - this.bitcoin.x = this.getRandomInt(0, this.width) * this.grid - this.bitcoin.y = this.getRandomInt(0, this.height) * this.grid - } - - if (index > 0) { - // check collision with all cells after this one (modified bubble sort) - // snake occupies same space as a body part. reset game - if ( - firstCell.x === this.snake.cells[index].x && - firstCell.y === this.snake.cells[index].y - ) { - this.death() - } - } - } - } - - death() { - this.snake.x = - this.grid * (Math.floor(this.width / 2) - this.startingLength) - this.snake.y = this.grid * Math.floor(this.height / 2) - this.snake.cells = [] - this.snake.maxCells = this.startingLength - this.snake.dx = this.grid - this.snake.dy = 0 - - this.bitcoin.x = this.getRandomInt(0, 25) * this.grid - this.bitcoin.y = this.getRandomInt(0, 25) * this.grid - this.score = 0 - } - - getRandomInt(min: number, max: number) { - return Math.floor(Math.random() * (max - min)) + min - } -} diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/general/wipe.component.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/general/wipe.component.ts deleted file mode 100644 index 0456cbed3..000000000 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/general/wipe.component.ts +++ /dev/null @@ -1,36 +0,0 @@ -import { ChangeDetectionStrategy, Component, inject } from '@angular/core' -import { FormsModule } from '@angular/forms' -import { TuiLabel } from '@taiga-ui/core' -import { TuiCheckbox } from '@taiga-ui/kit' -import { ConfigService } from 'src/app/services/config.service' -import SystemGeneralComponent from './general.component' -import { i18nPipe } from '@start9labs/shared' - -@Component({ - template: ` - @if (isTor) { -

- {{ - 'You are currently connected over Tor. If you restart the Tor daemon, you will lose connectivity until it comes back online.' - | i18n - }} -

- } -

- {{ - 'Optionally wipe state to forcibly acquire new guard nodes. It is recommended to try without wiping state first.' - | i18n - }} -

- - `, - changeDetection: ChangeDetectionStrategy.OnPush, - imports: [TuiLabel, FormsModule, TuiCheckbox, i18nPipe], -}) -export class SystemWipeComponent { - readonly isTor = inject(ConfigService).accessType === 'tor' - readonly component = inject(SystemGeneralComponent) -} diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/sessions/sessions.component.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/sessions/sessions.component.ts index 35977c2c4..7ff4d0026 100644 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/sessions/sessions.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/sessions/sessions.component.ts @@ -7,9 +7,9 @@ import { } from '@angular/core' import { RouterLink } from '@angular/router' import { ErrorService, i18nPipe, LoadingService } from '@start9labs/shared' +import { T } from '@start9labs/start-sdk' import { TuiButton } from '@taiga-ui/core' import { from, map, merge, Observable, Subject } from 'rxjs' -import { Session } from 'src/app/services/api/api.types' import { ApiService } from 'src/app/services/api/embassy-api.service' import { TitleDirective } from 'src/app/services/title.service' import { SessionsTableComponent } from './table.component' @@ -72,7 +72,7 @@ export default class SystemSessionsComponent { readonly current$ = this.sessions$.pipe( map(s => { - const current = s.sessions[s.current] + const current = s.current ? s.sessions[s.current] : undefined return current ? [current] : [] }), @@ -115,6 +115,6 @@ export default class SystemSessionsComponent { } } -interface SessionWithId extends Session { +interface SessionWithId extends T.Session { id: string } diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/sessions/table.component.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/sessions/table.component.ts index 32f467eb9..ed7eb92bf 100644 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/sessions/table.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/sessions/table.component.ts @@ -11,7 +11,7 @@ import { FormsModule } from '@angular/forms' import { TuiIcon } from '@taiga-ui/core' import { TuiCheckbox, TuiFade, TuiSkeleton } from '@taiga-ui/kit' import { TableComponent } from 'src/app/routes/portal/components/table.component' -import { Session } from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' import { PlatformInfoPipe } from './platform-info.pipe' import { i18nPipe } from '@start9labs/shared' @@ -165,11 +165,11 @@ import { i18nPipe } from '@start9labs/shared' i18nPipe, ], }) -export class SessionsTableComponent implements OnChanges { - readonly sessions = input(null) +export class SessionsTableComponent implements OnChanges { + readonly sessions = input(null) readonly single = input(false) - readonly selected = signal([]) + readonly selected = signal([]) readonly all = computed( () => !!this.selected()?.length && @@ -180,7 +180,7 @@ export class SessionsTableComponent implements OnChanges { this.selected.set([]) } - onToggle(session: T) { + onToggle(session: S) { if (this.selected().includes(session)) { this.selected.update(selected => selected.filter(s => s !== session)) } else { diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/ssh/ssh.component.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/ssh/ssh.component.ts index 853258973..e76e0e92f 100644 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/ssh/ssh.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/ssh/ssh.component.ts @@ -13,11 +13,10 @@ import { i18nPipe, LoadingService, } from '@start9labs/shared' -import { ISB } from '@start9labs/start-sdk' +import { ISB, T } from '@start9labs/start-sdk' import { TuiButton, TuiHint } from '@taiga-ui/core' import { filter, from, merge, Subject } from 'rxjs' import { FormComponent } from 'src/app/routes/portal/components/form.component' -import { SSHKey } from 'src/app/services/api/api.types' import { ApiService } from 'src/app/services/api/embassy-api.service' import { FormDialogService } from 'src/app/services/form-dialog.service' import { TitleDirective } from 'src/app/services/title.service' @@ -40,7 +39,7 @@ import { SSHTableComponent } from './table.component' tuiIconButton size="xs" docsLink - path="/user-manual/ssh.html" + path="/start-os/user-manual/ssh.html" appearance="icon" iconStart="@tui.external-link" > @@ -101,13 +100,13 @@ export default class SystemSSHComponent { private readonly i18n = inject(i18nPipe) private readonly dialogs = inject(DialogService) - private readonly local$ = new Subject() + private readonly local$ = new Subject() readonly keys$ = merge(from(this.api.getSshKeys({})), this.local$) - protected tableKeys = viewChild>('table') + protected tableKeys = viewChild>('table') - async add(all: readonly SSHKey[]) { + async add(all: readonly T.SshKeyResponse[]) { const spec = ISB.InputSpec.of({ key: ISB.Value.text({ name: this.i18n.transform('Public Key'), @@ -150,7 +149,7 @@ export default class SystemSSHComponent { }) } - remove(all: readonly SSHKey[]) { + remove(all: readonly T.SshKeyResponse[]) { this.dialogs .openConfirm({ label: 'Are you sure?', size: 's' }) .pipe(filter(Boolean)) diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/ssh/table.component.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/ssh/table.component.ts index b6723c245..d7c09ed2d 100644 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/ssh/table.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/ssh/table.component.ts @@ -11,7 +11,7 @@ import { FormsModule } from '@angular/forms' import { i18nPipe } from '@start9labs/shared' import { TuiCheckbox, TuiFade, TuiSkeleton } from '@taiga-ui/kit' import { TableComponent } from 'src/app/routes/portal/components/table.component' -import { SSHKey } from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' @Component({ selector: '[keys]', @@ -151,10 +151,12 @@ import { SSHKey } from 'src/app/services/api/api.types' i18nPipe, ], }) -export class SSHTableComponent implements OnChanges { - readonly keys = input(null) +export class SSHTableComponent< + K extends T.SshKeyResponse, +> implements OnChanges { + readonly keys = input(null) - readonly selected = signal([]) + readonly selected = signal([]) readonly all = computed( () => !!this.selected()?.length && @@ -165,7 +167,7 @@ export class SSHTableComponent implements OnChanges { this.selected.set([]) } - onToggle(key: T) { + onToggle(key: K) { if (this.selected().includes(key)) { this.selected.update(selected => selected.filter(s => s !== key)) } else { diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/startos-ui/startos-ui.component.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/startos-ui/startos-ui.component.ts index 524eb92dc..6e46d5e20 100644 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/startos-ui/startos-ui.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/startos-ui/startos-ui.component.ts @@ -12,10 +12,7 @@ import { TuiButton, TuiTitle } from '@taiga-ui/core' import { TuiHeader } from '@taiga-ui/layout' import { PatchDB } from 'patch-db-client' import { InterfaceComponent } from 'src/app/routes/portal/components/interfaces/interface.component' -import { - getPublicDomains, - InterfaceService, -} from 'src/app/routes/portal/components/interfaces/interface.service' +import { InterfaceService } from 'src/app/routes/portal/components/interfaces/interface.service' import { GatewayService } from 'src/app/services/gateway.service' import { DataModel } from 'src/app/services/patch-db/data-model' import { TitleDirective } from 'src/app/services/title.service' @@ -74,35 +71,32 @@ export default class StartOsUiComponent { }, } + private readonly patch = inject>(PatchDB) + readonly network = toSignal( - inject>(PatchDB).watch$('serverInfo', 'network'), + this.patch.watch$('serverInfo', 'network'), ) + readonly allPackageData = toSignal(this.patch.watch$('packageData')) + readonly ui = computed(() => { const network = this.network() const gateways = this.gatewayService.gateways() if (!network || !gateways) return - const binding = network.host.bindings['80'] - return { ...this.iface, - addresses: this.interfaceService.getAddresses( + gatewayGroups: this.interfaceService.getGatewayGroups( this.iface, network.host, gateways, ), - gateways: gateways.map(g => ({ - enabled: - (g.public - ? binding?.net.publicEnabled.includes(g.id) - : !binding?.net.privateDisabled.includes(g.id)) ?? false, - ...g, - })), - torDomains: network.host.onions, - publicDomains: getPublicDomains(network.host.publicDomains, gateways), - privateDomains: network.host.privateDomains, + pluginGroups: this.interfaceService.getPluginGroups( + this.iface, + network.host, + this.allPackageData(), + ), addSsl: true, } }) diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/wifi/utils.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/wifi/utils.ts index 8f3fadd2e..7ada0635f 100644 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/wifi/utils.ts +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/wifi/utils.ts @@ -1,12 +1,11 @@ -import { AvailableWifi } from 'src/app/services/api/api.types' -import { RR } from 'src/app/services/api/api.types' +import { T } from '@start9labs/start-sdk' export interface WiFiForm { ssid: string password: string } -export interface Wifi extends AvailableWifi { +export interface Wifi extends T.WifiListOut { readonly connected?: boolean } @@ -15,7 +14,7 @@ export interface WifiData { available: readonly Wifi[] } -export function parseWifi(res: RR.GetWifiRes): WifiData { +export function parseWifi(res: T.WifiListInfo): WifiData { return { available: res.availableWifi, known: Object.entries(res.ssids).map(([ssid, strength]) => ({ diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/wifi/wifi.component.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/wifi/wifi.component.ts index 056bdbedd..d9409d96b 100644 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/wifi/wifi.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/wifi/wifi.component.ts @@ -54,7 +54,7 @@ import { wifiSpec } from './wifi.const' tuiIconButton size="xs" docsLink - path="/user-manual/wifi.html" + path="/start-os/user-manual/wifi.html" appearance="icon" iconStart="@tui.external-link" > @@ -148,7 +148,7 @@ export default class SystemWifiComponent { .subscribe() try { - await this.api.enableWifi({ enable }) + await this.api.enableWifi({ enabled: enable }) } catch (e: any) { this.errorService.handleError(e) } finally { @@ -187,9 +187,8 @@ export default class SystemWifiComponent { await this.api.addWifi({ ssid, password, - priority: 0, - connect: true, }) + await this.api.connectWifi({ ssid }) } else { await this.api.connectWifi({ ssid }) } @@ -263,8 +262,6 @@ export default class SystemWifiComponent { await this.api.addWifi({ ssid, password, - priority: 0, - connect: false, }) wifi.known = wifi.known.concat({ ssid, diff --git a/web/projects/ui/src/app/routes/portal/routes/updates/item.component.ts b/web/projects/ui/src/app/routes/portal/routes/updates/item.component.ts index 0a87d2faf..09c82580b 100644 --- a/web/projects/ui/src/app/routes/portal/routes/updates/item.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/updates/item.component.ts @@ -183,6 +183,7 @@ import UpdatesComponent from './updates.component' &:last-child { white-space: nowrap; + text-align: right; } &[colspan]:only-child { diff --git a/web/projects/ui/src/app/services/action.service.ts b/web/projects/ui/src/app/services/action.service.ts index f5b4b6340..c4e19e9f5 100644 --- a/web/projects/ui/src/app/services/action.service.ts +++ b/web/projects/ui/src/app/services/action.service.ts @@ -1,12 +1,13 @@ import { inject, Injectable } from '@angular/core' import { DialogService, - ErrorService, + getErrorMessage, i18nKey, LoadingService, } from '@start9labs/shared' import { PolymorpheusComponent } from '@taiga-ui/polymorpheus' import { filter } from 'rxjs' +import { ACTION_CONFIRM_MODAL } from 'src/app/routes/portal/routes/services/modals/action-confirm.component' import { ActionInputModal, PackageActionData, @@ -21,7 +22,6 @@ import { FormDialogService } from 'src/app/services/form-dialog.service' export class ActionService { private readonly api = inject(ApiService) private readonly dialog = inject(DialogService) - private readonly errorService = inject(ErrorService) private readonly loader = inject(LoadingService) private readonly formDialog = inject(FormDialogService) @@ -36,14 +36,10 @@ export class ActionService { } else { if (actionInfo.metadata.warning) { this.dialog - .openConfirm({ - label: 'Warning', + .openComponent(ACTION_CONFIRM_MODAL, { + label: actionInfo.metadata.name as i18nKey, size: 's', - data: { - no: 'Cancel', - yes: 'Run', - content: actionInfo.metadata.warning as i18nKey, - }, + data, }) .pipe(filter(Boolean)) .subscribe(() => this.execute(pkgInfo.id, null, actionInfo.id)) @@ -64,9 +60,9 @@ export class ActionService { try { const res = await this.api.runAction({ packageId, - eventId, actionId, input: input ?? null, + eventId, }) if (!res) return @@ -84,7 +80,9 @@ export class ActionService { .subscribe() } } catch (e: any) { - this.errorService.handleError(e) + this.dialog + .openAlert(getErrorMessage(e) as i18nKey, { label: 'Error' }) + .subscribe() } finally { loader.unsubscribe() } diff --git a/web/projects/ui/src/app/services/api/api.fixures.ts b/web/projects/ui/src/app/services/api/api.fixures.ts index 4a30d43f6..3168326f7 100644 --- a/web/projects/ui/src/app/services/api/api.fixures.ts +++ b/web/projects/ui/src/app/services/api/api.fixures.ts @@ -2,9 +2,8 @@ import { InstalledState, PackageDataEntry, } from 'src/app/services/patch-db/data-model' -import { RR, ServerMetrics, ServerNotifications } from './api.types' +import { ActionRes } from './api.types' import { BTC_ICON, LND_ICON, PROXY_ICON, REGISTRY_ICON } from './api-icons' -import { Log } from '@start9labs/shared' import { configBuilderToSpec } from 'src/app/utils/configBuilderToSpec' import { T, ISB, IST } from '@start9labs/start-sdk' import { GetPackagesRes } from '@start9labs/marketplace' @@ -30,7 +29,7 @@ export namespace Mock { shuttingDown: false, } - export const RegistryOSUpdate: RR.CheckOsUpdateRes = { + export const RegistryOSUpdate: T.OsVersionInfoMap = { '0.4.1': { headline: 'v0.4.1', releaseNotes: 'Testing some release notes', @@ -232,12 +231,11 @@ export namespace Mock { }, releaseNotes: 'Taproot, Schnorr, and more.', license: 'MIT', - wrapperRepo: 'https://github.com/start9labs/bitcoind-wrapper', + packageRepo: 'https://github.com/start9labs/bitcoind-wrapper', upstreamRepo: 'https://github.com/bitcoin/bitcoin', - supportSite: 'https://bitcoin.org', - marketingSite: 'https://bitcoin.org', + marketingUrl: 'https://bitcoin.org', donationUrl: 'https://start9.com', - docsUrl: 'https://docs.start9.com', + docsUrls: ['https://docs.start9.com'], alerts: { install: 'Bitcoin can take over a week to sync.', uninstall: @@ -264,6 +262,7 @@ export namespace Mock { ram: null, }, hardwareAcceleration: false, + plugins: [], } export const MockManifestLnd: T.Manifest = { @@ -280,12 +279,11 @@ export namespace Mock { }, releaseNotes: 'Dual funded channels!', license: 'MIT', - wrapperRepo: 'https://github.com/start9labs/lnd-wrapper', + packageRepo: 'https://github.com/start9labs/lnd-wrapper', upstreamRepo: 'https://github.com/lightningnetwork/lnd', - supportSite: 'https://lightning.engineering/', - marketingSite: 'https://lightning.engineering/', + marketingUrl: 'https://lightning.engineering/', donationUrl: null, - docsUrl: 'https://docs.start9.com', + docsUrls: ['https://docs.start9.com'], alerts: { install: null, uninstall: null, @@ -324,6 +322,54 @@ export namespace Mock { ram: null, }, hardwareAcceleration: false, + plugins: [], + } + + export const MockManifestTor: T.Manifest = { + id: 'tor', + title: 'Tor', + version: '0.4.8:0', + satisfies: [], + canMigrateTo: '!', + canMigrateFrom: '*', + gitHash: 'torhash1', + description: { + short: 'An anonymous overlay network.', + long: 'Tor provides anonymous communication by directing traffic through a free, worldwide overlay network.', + }, + releaseNotes: 'Bug fixes and stability improvements.', + license: 'BSD-3-Clause', + packageRepo: 'https://github.com/start9labs/tor-wrapper', + upstreamRepo: 'https://gitlab.torproject.org/tpo/core/tor', + marketingUrl: 'https://www.torproject.org', + donationUrl: null, + docsUrls: ['https://docs.start9.com'], + alerts: { + install: null, + uninstall: null, + restore: null, + start: null, + stop: null, + }, + osVersion: '0.2.12', + sdkVersion: '0.4.0', + dependencies: {}, + images: { + main: { + source: 'packed', + arch: ['x86_64', 'aarch64'], + emulateMissingAs: 'aarch64', + nvidiaContainer: false, + }, + }, + volumes: ['main'], + hardwareRequirements: { + device: [], + arch: null, + ram: null, + }, + hardwareAcceleration: false, + plugins: ['url-v0'], } export const MockManifestBitcoinProxy: T.Manifest = { @@ -340,12 +386,11 @@ export namespace Mock { }, releaseNotes: 'Even better support for Bitcoin and wallets!', license: 'MIT', - wrapperRepo: 'https://github.com/start9labs/btc-rpc-proxy-wrapper', + packageRepo: 'https://github.com/start9labs/btc-rpc-proxy-wrapper', upstreamRepo: 'https://github.com/Kixunil/btc-rpc-proxy', - supportSite: '', - marketingSite: '', + marketingUrl: '', donationUrl: 'https://start9.com', - docsUrl: 'https://docs.start9.com', + docsUrls: ['https://docs.start9.com'], alerts: { install: 'Testing install alert', uninstall: null, @@ -377,6 +422,7 @@ export namespace Mock { ram: null, }, hardwareAcceleration: false, + plugins: [], } export const BitcoinDep: T.DependencyMetadata = { @@ -403,14 +449,13 @@ export namespace Mock { title: 'Bitcoin Core', description: mockDescription, license: 'mit', - wrapperRepo: 'https://github.com/start9labs/bitcoind-startos', + packageRepo: 'https://github.com/start9labs/bitcoind-startos', upstreamRepo: 'https://github.com/bitcoin/bitcoin', - supportSite: 'https://bitcoin.org', - marketingSite: 'https://bitcoin.org', - docsUrl: 'https://bitcoin.org', + marketingUrl: 'https://bitcoin.org', + docsUrls: ['https://bitcoin.org'], releaseNotes: 'Even better support for Bitcoin and wallets!', osVersion: '0.3.6', - sdkVersion: '0.4.0-beta.48', + sdkVersion: '0.4.0-beta.49', gitHash: 'fakehash', icon: BTC_ICON, sourceVersion: null, @@ -437,6 +482,7 @@ export namespace Mock { ], ], hardwareAcceleration: false, + plugins: [], }, '#knots:26.1.20240325:0': { title: 'Bitcoin Knots', @@ -445,14 +491,13 @@ export namespace Mock { long: 'Bitcoin Knots is a combined Bitcoin node and wallet. Not only is it easy to use, but it also ensures bitcoins you receive are both real bitcoins and really yours.', }, license: 'mit', - wrapperRepo: 'https://github.com/start9labs/bitcoinknots-startos', + packageRepo: 'https://github.com/start9labs/bitcoinknots-startos', upstreamRepo: 'https://github.com/bitcoinknots/bitcoin', - supportSite: 'https://bitcoinknots.org', - marketingSite: 'https://bitcoinknots.org', - docsUrl: 'https://bitcoinknots.org', + marketingUrl: 'https://bitcoinknots.org', + docsUrls: ['https://bitcoinknots.org'], releaseNotes: 'Even better support for Bitcoin and wallets!', osVersion: '0.3.6', - sdkVersion: '0.4.0-beta.48', + sdkVersion: '0.4.0-beta.49', gitHash: 'fakehash', icon: BTC_ICON, sourceVersion: null, @@ -479,6 +524,7 @@ export namespace Mock { ], ], hardwareAcceleration: false, + plugins: [], }, }, categories: ['bitcoin', 'featured'], @@ -497,14 +543,13 @@ export namespace Mock { title: 'Bitcoin Core', description: mockDescription, license: 'mit', - wrapperRepo: 'https://github.com/start9labs/bitcoind-startos', + packageRepo: 'https://github.com/start9labs/bitcoind-startos', upstreamRepo: 'https://github.com/bitcoin/bitcoin', - supportSite: 'https://bitcoin.org', - marketingSite: 'https://bitcoin.org', - docsUrl: 'https://bitcoin.org', + marketingUrl: 'https://bitcoin.org', + docsUrls: ['https://bitcoin.org'], releaseNotes: 'Even better support for Bitcoin and wallets!', osVersion: '0.3.6', - sdkVersion: '0.4.0-beta.48', + sdkVersion: '0.4.0-beta.49', gitHash: 'fakehash', icon: BTC_ICON, sourceVersion: null, @@ -531,6 +576,7 @@ export namespace Mock { ], ], hardwareAcceleration: false, + plugins: [], }, '#knots:26.1.20240325:0': { title: 'Bitcoin Knots', @@ -539,14 +585,13 @@ export namespace Mock { long: 'Bitcoin Knots is a combined Bitcoin node and wallet. Not only is it easy to use, but it also ensures bitcoins you receive are both real bitcoins and really yours.', }, license: 'mit', - wrapperRepo: 'https://github.com/start9labs/bitcoinknots-startos', + packageRepo: 'https://github.com/start9labs/bitcoinknots-startos', upstreamRepo: 'https://github.com/bitcoinknots/bitcoin', - supportSite: 'https://bitcoinknots.org', - marketingSite: 'https://bitcoinknots.org', - docsUrl: 'https://bitcoinknots.org', + marketingUrl: 'https://bitcoinknots.org', + docsUrls: ['https://bitcoinknots.org'], releaseNotes: 'Even better support for Bitcoin and wallets!', osVersion: '0.3.6', - sdkVersion: '0.4.0-beta.48', + sdkVersion: '0.4.0-beta.49', gitHash: 'fakehash', icon: BTC_ICON, sourceVersion: null, @@ -573,6 +618,7 @@ export namespace Mock { ], ], hardwareAcceleration: false, + plugins: [], }, }, categories: ['bitcoin', 'featured'], @@ -593,14 +639,13 @@ export namespace Mock { title: 'LND', description: mockDescription, license: 'mit', - wrapperRepo: 'https://github.com/start9labs/lnd-startos', + packageRepo: 'https://github.com/start9labs/lnd-startos', upstreamRepo: 'https://github.com/lightningnetwork/lnd', - supportSite: 'https://lightning.engineering/slack.html', - marketingSite: 'https://lightning.engineering/', - docsUrl: 'https://lightning.engineering/', + marketingUrl: 'https://lightning.engineering/', + docsUrls: ['https://lightning.engineering/'], releaseNotes: 'Upstream release to 0.17.5', osVersion: '0.3.6', - sdkVersion: '0.4.0-beta.48', + sdkVersion: '0.4.0-beta.49', gitHash: 'fakehash', icon: LND_ICON, sourceVersion: null, @@ -630,6 +675,7 @@ export namespace Mock { ], ], hardwareAcceleration: false, + plugins: [], }, }, categories: ['lightning'], @@ -648,14 +694,13 @@ export namespace Mock { title: 'LND', description: mockDescription, license: 'mit', - wrapperRepo: 'https://github.com/start9labs/lnd-startos', + packageRepo: 'https://github.com/start9labs/lnd-startos', upstreamRepo: 'https://github.com/lightningnetwork/lnd', - supportSite: 'https://lightning.engineering/slack.html', - marketingSite: 'https://lightning.engineering/', - docsUrl: 'https://lightning.engineering/', + marketingUrl: 'https://lightning.engineering/', + docsUrls: ['https://lightning.engineering/'], releaseNotes: 'Upstream release to 0.17.4', osVersion: '0.3.6', - sdkVersion: '0.4.0-beta.48', + sdkVersion: '0.4.0-beta.49', gitHash: 'fakehash', icon: LND_ICON, sourceVersion: null, @@ -685,6 +730,7 @@ export namespace Mock { ], ], hardwareAcceleration: false, + plugins: [], }, }, categories: ['lightning'], @@ -707,14 +753,13 @@ export namespace Mock { title: 'Bitcoin Core', description: mockDescription, license: 'mit', - wrapperRepo: 'https://github.com/start9labs/bitcoind-startos', + packageRepo: 'https://github.com/start9labs/bitcoind-startos', upstreamRepo: 'https://github.com/bitcoin/bitcoin', - supportSite: 'https://bitcoin.org', - marketingSite: 'https://bitcoin.org', - docsUrl: 'https://bitcoin.org', + marketingUrl: 'https://bitcoin.org', + docsUrls: ['https://bitcoin.org'], releaseNotes: 'Even better support for Bitcoin and wallets!', osVersion: '0.3.6', - sdkVersion: '0.4.0-beta.48', + sdkVersion: '0.4.0-beta.49', gitHash: 'fakehash', icon: BTC_ICON, sourceVersion: null, @@ -741,6 +786,7 @@ export namespace Mock { ], ], hardwareAcceleration: false, + plugins: [], }, '#knots:27.1.0:0': { title: 'Bitcoin Knots', @@ -749,14 +795,13 @@ export namespace Mock { long: 'Bitcoin Knots is a combined Bitcoin node and wallet. Not only is it easy to use, but it also ensures bitcoins you receive are both real bitcoins and really yours.', }, license: 'mit', - wrapperRepo: 'https://github.com/start9labs/bitcoinknots-startos', + packageRepo: 'https://github.com/start9labs/bitcoinknots-startos', upstreamRepo: 'https://github.com/bitcoinknots/bitcoin', - supportSite: 'https://bitcoinknots.org', - marketingSite: 'https://bitcoinknots.org', - docsUrl: 'https://bitcoinknots.org', + marketingUrl: 'https://bitcoinknots.org', + docsUrls: [], releaseNotes: 'Even better support for Bitcoin and wallets!', osVersion: '0.3.6', - sdkVersion: '0.4.0-beta.48', + sdkVersion: '0.4.0-beta.49', gitHash: 'fakehash', icon: BTC_ICON, sourceVersion: null, @@ -783,6 +828,7 @@ export namespace Mock { ], ], hardwareAcceleration: false, + plugins: [], }, }, categories: ['bitcoin', 'featured'], @@ -801,14 +847,13 @@ export namespace Mock { title: 'LND', description: mockDescription, license: 'mit', - wrapperRepo: 'https://github.com/start9labs/lnd-startos', + packageRepo: 'https://github.com/start9labs/lnd-startos', upstreamRepo: 'https://github.com/lightningnetwork/lnd', - supportSite: 'https://lightning.engineering/slack.html', - marketingSite: 'https://lightning.engineering/', - docsUrl: 'https://lightning.engineering/', + marketingUrl: 'https://lightning.engineering/', + docsUrls: [], releaseNotes: 'Upstream release and minor fixes.', osVersion: '0.3.6', - sdkVersion: '0.4.0-beta.48', + sdkVersion: '0.4.0-beta.49', gitHash: 'fakehash', icon: LND_ICON, sourceVersion: null, @@ -838,6 +883,7 @@ export namespace Mock { ], ], hardwareAcceleration: false, + plugins: [], }, }, categories: ['lightning'], @@ -856,14 +902,13 @@ export namespace Mock { title: 'Bitcoin Proxy', description: mockDescription, license: 'mit', - wrapperRepo: 'https://github.com/Start9Labs/btc-rpc-proxy-wrappers', + packageRepo: 'https://github.com/Start9Labs/btc-rpc-proxy-wrappers', upstreamRepo: 'https://github.com/Kixunil/btc-rpc-proxy', - supportSite: 'https://github.com/Kixunil/btc-rpc-proxy/issues', - docsUrl: 'https://github.com/Kixunil/btc-rpc-proxy', - marketingSite: '', + docsUrls: [], + marketingUrl: '', releaseNotes: 'Upstream release and minor fixes.', osVersion: '0.3.6', - sdkVersion: '0.4.0-beta.48', + sdkVersion: '0.4.0-beta.49', gitHash: 'fakehash', icon: PROXY_ICON, sourceVersion: null, @@ -892,6 +937,7 @@ export namespace Mock { ], ], hardwareAcceleration: false, + plugins: [], }, }, categories: ['bitcoin'], @@ -899,7 +945,7 @@ export namespace Mock { }, } - export const Notifications: ServerNotifications = [ + export const Notifications: T.NotificationWithId[] = [ { id: 1, packageId: null, @@ -974,7 +1020,7 @@ export namespace Mock { }, ] - export function getMetrics(): ServerMetrics { + export function getMetrics(): T.Metrics { return { general: { temperature: { @@ -1055,7 +1101,7 @@ export namespace Mock { } } - export const ServerLogs: Log[] = [ + export const ServerLogs: T.LogEntry[] = [ { timestamp: '2022-07-28T03:52:54.808769Z', message: '****** START *****', @@ -1079,7 +1125,7 @@ export namespace Mock { }, ] - export const Sessions: RR.GetSessionsRes = { + export const Sessions: T.SessionList = { current: 'b7b1a9cef4284f00af9e9dda6e676177', sessions: { '9513226517c54ddd8107d6d7b9d8aed7': { @@ -1101,7 +1147,7 @@ export namespace Mock { }, } - export const SshKeys: RR.GetSSHKeysRes = [ + export const SshKeys: T.SshKeyResponse[] = [ { createdAt: new Date().toISOString(), alg: 'ed25519', @@ -1116,14 +1162,14 @@ export namespace Mock { }, ] - export const SshKey: RR.AddSSHKeyRes = { + export const SshKey: T.SshKeyResponse = { createdAt: new Date().toISOString(), alg: 'ed25519', hostname: 'Lucy Key', fingerprint: '44:44:7e:78:61:b4:bf:g2:de:24:15:96:4e:d4:15:53', } - export const Wifi: RR.GetWifiRes = { + export const Wifi: T.WifiListInfo = { ethernet: true, ssids: { Goosers: 50, @@ -1150,7 +1196,7 @@ export namespace Mock { ], } - export const BackupTargets: RR.GetBackupTargetsRes = { + export const BackupTargets: { [id: string]: T.BackupTarget } = { hsbdjhasbasda: { type: 'cifs', hostname: 'smb://192.169.10.0', @@ -1195,6 +1241,7 @@ export namespace Mock { used: 100000000000, model: null, vendor: 'SSK', + guid: null, startOs: { '1234-5678-9876-5432': { hostname: 'adjective-noun', @@ -1338,7 +1385,7 @@ export namespace Mock { // }, // ] - export const BackupInfo: RR.GetBackupInfoRes = { + export const BackupInfo: T.BackupInfo = { version: '0.3.6', timestamp: new Date().toISOString(), packageBackups: { @@ -1357,7 +1404,7 @@ export namespace Mock { }, } - export const ActionResMessage: RR.ActionRes = { + export const ActionResMessage: ActionRes = { version: '1', title: 'New Password', message: @@ -1365,7 +1412,7 @@ export namespace Mock { result: null, } - export const ActionResSingle: RR.ActionRes = { + export const ActionResSingle: ActionRes = { version: '1', title: 'New Password', message: @@ -1379,7 +1426,7 @@ export namespace Mock { }, } - export const ActionResGroup: RR.ActionRes = { + export const ActionResGroup: ActionRes = { version: '1', title: 'Properties', message: @@ -1444,6 +1491,31 @@ export namespace Mock { }, } + export const getCreateOnionServiceSpec = async (): Promise => + configBuilderToSpec( + ISB.InputSpec.of({ + ssl: ISB.Value.toggle({ + name: 'SSL', + description: 'Enable HTTPS for this onion service', + default: true, + }), + privateKey: ISB.Value.text({ + name: 'Private Key', + description: + 'Optionally provide an existing ed25519 private key to reuse a .onion address. Leave blank to generate a new one.', + required: false, + default: null, + masked: true, + }), + urlPluginMetadata: ISB.Value.hidden<{ + packageId: string + interfaceId: string + hostId: string + internalPort: number + }>(), + }), + ) + export const getActionInputSpec = async (): Promise => configBuilderToSpec( ISB.InputSpec.of({ @@ -2126,8 +2198,50 @@ export namespace Mock { net: { assignedPort: 80, assignedSslPort: 443, - publicEnabled: [], - privateDisabled: [], + }, + addresses: { + enabled: [], + disabled: [], + available: [ + { + ssl: true, + public: false, + hostname: 'adjective-noun.local', + port: 1234, + metadata: { + kind: 'mdns', + gateways: ['eth0', 'wlan0'], + }, + }, + { + ssl: true, + public: false, + hostname: '192.168.10.11', + port: 1234, + metadata: { kind: 'ipv4', gateway: 'wlan0' }, + }, + { + ssl: true, + public: false, + hostname: '10.0.0.2', + port: 1234, + metadata: { kind: 'ipv4', gateway: 'wlan0' }, + }, + { + ssl: true, + public: false, + hostname: 'fe80:cd00:0000:0cde:1257:0000:211e:72cd', + port: 1234, + metadata: { kind: 'ipv6', gateway: 'eth0', scopeId: 2 }, + }, + { + ssl: true, + public: false, + hostname: 'fe80:cd00:0000:0cde:1257:0000:211e:1234', + port: 1234, + metadata: { kind: 'ipv6', gateway: 'wlan0', scopeId: 3 }, + }, + ], }, options: { addSsl: null, @@ -2137,88 +2251,8 @@ export namespace Mock { }, }, publicDomains: {}, - privateDomains: [], - onions: [], - hostnameInfo: { - 80: [ - { - kind: 'ip', - gateway: { id: 'eth0', name: 'Ethernet', public: false }, - public: false, - hostname: { - kind: 'local', - value: 'adjective-noun.local', - port: null, - sslPort: 1234, - }, - }, - { - kind: 'ip', - gateway: { id: 'wlan0', name: 'Wireless', public: false }, - public: false, - hostname: { - kind: 'local', - value: 'adjective-noun.local', - port: null, - sslPort: 1234, - }, - }, - { - kind: 'ip', - gateway: { id: 'wlan0', name: 'Wireless', public: false }, - public: false, - hostname: { - kind: 'ipv4', - value: '192.168.10.11', - port: null, - sslPort: 1234, - }, - }, - { - kind: 'ip', - gateway: { id: 'wlan0', name: 'Wireless', public: false }, - public: false, - hostname: { - kind: 'ipv4', - value: '10.0.0.2', - port: null, - sslPort: 1234, - }, - }, - { - kind: 'ip', - gateway: { id: 'eth0', name: 'Ethernet', public: false }, - public: false, - hostname: { - kind: 'ipv6', - value: '[fe80:cd00:0000:0cde:1257:0000:211e:72cd]', - scopeId: 2, - port: null, - sslPort: 1234, - }, - }, - { - kind: 'ip', - gateway: { id: 'wlan0', name: 'Wireless', public: false }, - public: false, - hostname: { - kind: 'ipv6', - value: '[fe80:cd00:0000:0cde:1257:0000:211e:1234]', - scopeId: 3, - port: null, - sslPort: 1234, - }, - }, - { - kind: 'onion', - hostname: { - value: 'bitcoin-p2p.onion', - port: 80, - sslPort: 443, - }, - }, - ], - }, + privateDomains: {}, + portForwards: [], }, bcdefgh: { bindings: { @@ -2227,8 +2261,11 @@ export namespace Mock { net: { assignedPort: 8332, assignedSslPort: null, - publicEnabled: [], - privateDisabled: [], + }, + addresses: { + enabled: [], + disabled: [], + available: [], }, options: { addSsl: null, @@ -2238,11 +2275,8 @@ export namespace Mock { }, }, publicDomains: {}, - privateDomains: [], - onions: [], - hostnameInfo: { - 8332: [], - }, + privateDomains: {}, + portForwards: [], }, cdefghi: { bindings: { @@ -2251,8 +2285,11 @@ export namespace Mock { net: { assignedPort: 8333, assignedSslPort: null, - publicEnabled: [], - privateDisabled: [], + }, + addresses: { + enabled: [], + disabled: [], + available: [], }, options: { addSsl: null, @@ -2262,16 +2299,15 @@ export namespace Mock { }, }, publicDomains: {}, - privateDomains: [], - onions: [], - hostnameInfo: { - 8333: [], - }, + privateDomains: {}, + portForwards: [], }, }, storeExposedDependents: [], + outboundGateway: null, registry: 'https://registry.start9.com/', developerKey: 'developer-key', + plugin: { url: null }, tasks: { 'bitcoind-config': { task: { @@ -2338,8 +2374,10 @@ export namespace Mock { }, hosts: {}, storeExposedDependents: [], + outboundGateway: null, registry: 'https://registry.start9.com/', developerKey: 'developer-key', + plugin: { url: null }, tasks: {}, } @@ -2444,8 +2482,10 @@ export namespace Mock { }, hosts: {}, storeExposedDependents: [], + outboundGateway: null, registry: 'https://registry.start9.com/', developerKey: 'developer-key', + plugin: { url: null }, tasks: { config: { active: true, diff --git a/web/projects/ui/src/app/services/api/api.types.ts b/web/projects/ui/src/app/services/api/api.types.ts index 565bca8e4..1f179b20a 100644 --- a/web/projects/ui/src/app/services/api/api.types.ts +++ b/web/projects/ui/src/app/services/api/api.types.ts @@ -1,531 +1,103 @@ -import { Dump } from 'patch-db-client' -import { DataModel } from 'src/app/services/patch-db/data-model' -import { - FetchLogsReq, - FetchLogsRes, - FullKeyboard, - SetLanguageParams, - StartOSDiskInfo, -} from '@start9labs/shared' import { IST, T } from '@start9labs/start-sdk' import { WebSocketSubjectConfig } from 'rxjs/webSocket' -import { - GetPackageReq, - GetPackageRes, - GetPackagesReq, - GetPackagesRes, -} from '@start9labs/marketplace' +import { GetPackageReq, GetPackagesReq } from '@start9labs/marketplace' -export namespace RR { - // websocket +// websocket - export type WebsocketConfig = Omit, 'url'> +export type WebsocketConfig = Omit, 'url'> - // state +// state - export type EchoReq = { message: string } // server.echo - export type EchoRes = string +export type ServerState = 'initializing' | 'error' | 'running' - export type ServerState = 'initializing' | 'error' | 'running' +// diagnostic - // DB - - export type SubscribePatchReq = {} - export type SubscribePatchRes = { - dump: Dump - guid: string - } - - export type SetDBValueReq = { pointer: string; value: T } // db.put.ui - export type SetDBValueRes = null - - // auth - - export type LoginReq = { - password: string - ephemeral?: boolean - } // auth.login - unauthed - export type loginRes = null - - export type LogoutReq = {} // auth.logout - export type LogoutRes = null - - export type ResetPasswordReq = { - oldPassword: string - newPassword: string - } // auth.reset-password - export type ResetPasswordRes = null - - // diagnostic - - export type DiagnosticErrorRes = { - code: number - message: string - data: { details: string } - } - - // init - - export type InitFollowProgressRes = { - progress: T.FullProgress - guid: string - } - - // server - - export type GetSystemTimeReq = {} // server.time - export type GetSystemTimeRes = { - now: string - uptime: number // seconds - } - - export type GetServerLogsReq = FetchLogsReq // server.logs & server.kernel-logs & net.tor.logs - export type GetServerLogsRes = FetchLogsRes - - export type FollowServerLogsReq = { - limit?: number // (optional) default is 50. Ignored if cursor provided - boot?: number | string | null // (optional) number is offset (0: current, -1 prev, +1 first), string is a specific boot id, null is all. Default is undefined - cursor?: string // the last known log. Websocket will return all logs since this log - } // server.logs.follow & server.kernel-logs.follow & net.tor.follow-logs - export type FollowServerLogsRes = { - startCursor: string - guid: string - } - - export type FollowServerMetricsReq = {} // server.metrics.follow - export type FollowServerMetricsRes = { - guid: string - metrics: ServerMetrics - } - - export type UpdateServerReq = { registry: string; targetVersion: string } // server.update - export type UpdateServerRes = 'updating' | 'no-updates' - - export type RestartServerReq = {} // server.restart - export type RestartServerRes = null - - export type ShutdownServerReq = {} // server.shutdown - export type ShutdownServerRes = null - - export type DiskRepairReq = {} // server.disk.repair - export type DiskRepairRes = null - - export type SetDnsReq = { - servers: string[] | null - } // net.dns.set-static - export type SetDnsRes = null - - export type QueryDnsReq = { - fqdn: string - } // net.dns.query - export type QueryDnsRes = string | null - - export type ResetTorReq = { - wipeState: boolean - reason: string - } // net.tor.reset - export type ResetTorRes = null - - export type SetKeyboardReq = FullKeyboard // server.set-keyboard - export type SetKeyboardRes = null - - export type SetLanguageReq = SetLanguageParams // server.set-language - export type SetLanguageRes = null - - // smtp - - export type SetSMTPReq = T.SmtpValue // server.set-smtp - export type SetSMTPRes = null - - export type ClearSMTPReq = {} // server.clear-smtp - export type ClearSMTPRes = null - - export type TestSMTPReq = SetSMTPReq & { to: string } // server.test-smtp - export type TestSMTPRes = null - - // sessions - - export type GetSessionsReq = {} // sessions.list - export type GetSessionsRes = { - current: string - sessions: { [hash: string]: Session } - } - - export type KillSessionsReq = { ids: string[] } // sessions.kill - export type KillSessionsRes = null - - // notification - - export type GetNotificationsReq = { - before?: number - limit?: number - } // notification.list - export type GetNotificationsRes = ServerNotification[] - - export type DeleteNotificationsReq = { ids: number[] } // notification.remove - export type DeleteNotificationsRes = null - - export type MarkSeenNotificationReq = DeleteNotificationsReq // notification.mark-seen - export type MarkSeenNotificationRes = null - - export type MarkSeenAllNotificationsReq = { before: number } // notification.mark-seen-before - export type MarkSeenAllNotificationsRes = null - - export type MarkUnseenNotificationReq = DeleteNotificationsReq // notification.mark-unseen - export type MarkUnseenNotificationRes = null - - // wifi - - export type GetWifiReq = {} - export type GetWifiRes = { - ssids: { - [ssid: string]: number - } - connected: string | null - country: string | null - ethernet: boolean - availableWifi: AvailableWifi[] - } - - export type AddWifiReq = { - // wifi.add - ssid: string - password: string - priority: number - connect: boolean - } - export type AddWifiRes = null - - export type EnabledWifiReq = { enable: boolean } // wifi.set-enabled - export type EnabledWifiRes = null - - export type SetWifiCountryReq = { country: string } // wifi.country.set - export type SetWifiCountryRes = null - - export type ConnectWifiReq = { ssid: string } // wifi.connect - export type ConnectWifiRes = null - - export type DeleteWifiReq = { ssid: string } // wifi.remove - export type DeleteWifiRes = null - - // ssh - - export type GetSSHKeysReq = {} // ssh.list - export type GetSSHKeysRes = SSHKey[] - - export type AddSSHKeyReq = { key: string } // ssh.add - export type AddSSHKeyRes = SSHKey - - export type DeleteSSHKeyReq = { fingerprint: string } // ssh.remove - export type DeleteSSHKeyRes = null - - // backup - - export type GetBackupTargetsReq = {} // backup.target.list - export type GetBackupTargetsRes = { [id: string]: BackupTarget } - - export type AddBackupTargetReq = { - // backup.target.cifs.add - hostname: string - path: string - username: string - password: string | null - } - export type AddBackupTargetRes = { [id: string]: CifsBackupTarget } - - export type UpdateBackupTargetReq = AddBackupTargetReq & { id: string } // backup.target.cifs.update - export type UpdateBackupTargetRes = AddBackupTargetRes - - export type RemoveBackupTargetReq = { id: string } // backup.target.cifs.remove - export type RemoveBackupTargetRes = null - - export type GetBackupInfoReq = { - // backup.target.info - targetId: string - serverId: string - password: string - } - export type GetBackupInfoRes = BackupInfo - - export type CreateBackupReq = { - // backup.create - targetId: string - packageIds: string[] - oldPassword: string | null - password: string - } - export type CreateBackupRes = null - - // network - - export type AddTunnelReq = { - name: string - config: string // file contents - public: boolean - } // net.tunnel.add - export type AddTunnelRes = { - id: string - } - - export type UpdateTunnelReq = { - id: string - name: string - } // net.gateway.set-name - export type UpdateTunnelRes = null - - export type RemoveTunnelReq = { id: string } // net.tunnel.remove - export type RemoveTunnelRes = null - - export type InitAcmeReq = { - provider: string - contact: string[] - } - export type InitAcmeRes = null - - export type RemoveAcmeReq = { - provider: string - } - export type RemoveAcmeRes = null - - export type AddTorKeyReq = { - // net.tor.key.add - key: string - } - export type GenerateTorKeyReq = {} // net.tor.key.generate - export type AddTorKeyRes = string // onion address *with* .onion suffix - - export type ServerBindingToggleGatewayReq = { - // server.host.binding.set-gateway-enabled - gateway: T.GatewayId - internalPort: 80 - enabled: boolean - } - export type ServerBindingToggleGatewayRes = null - - export type ServerAddOnionReq = { - // server.host.address.onion.add - onion: string // address *with* .onion suffix - } - export type AddOnionRes = null - - export type ServerRemoveOnionReq = ServerAddOnionReq // server.host.address.onion.remove - export type RemoveOnionRes = null - - export type OsUiAddPublicDomainReq = { - // server.host.address.domain.public.add - fqdn: string // FQDN - gateway: T.GatewayId - acme: string | null // URL. null means local Root CA - } - export type OsUiAddPublicDomainRes = QueryDnsRes - - export type OsUiRemovePublicDomainReq = { - // server.host.address.domain.public.remove - fqdn: string // FQDN - } - export type OsUiRemovePublicDomainRes = null - - export type OsUiAddPrivateDomainReq = { - // server.host.address.domain.private.add - fqdn: string // FQDN - } - export type OsUiAddPrivateDomainRes = null - - export type OsUiRemovePrivateDomainReq = { - // server.host.address.domain.private.remove - fqdn: string // FQDN - } - export type OsUiRemovePrivateDomainRes = null - - export type PkgBindingToggleGatewayReq = Omit< - ServerBindingToggleGatewayReq, - 'internalPort' - > & { - // package.host.binding.set-gateway-enabled - internalPort: number - package: T.PackageId // string - host: T.HostId // string - } - export type PkgBindingToggleGatewayRes = null - - export type PkgAddOnionReq = ServerAddOnionReq & { - // package.host.address.onion.add - package: T.PackageId // string - host: T.HostId // string - } - export type PkgRemoveOnionReq = PkgAddOnionReq // package.host.address.onion.remove - - export type PkgAddPublicDomainReq = OsUiAddPublicDomainReq & { - // package.host.address.domain.public.add - package: T.PackageId // string - host: T.HostId // string - } - export type PkgAddPublicDomainRes = OsUiAddPublicDomainRes - - export type PkgRemovePublicDomainReq = OsUiRemovePublicDomainReq & { - // package.host.address.domain.public.remove - package: T.PackageId // string - host: T.HostId // string - } - export type PkgRemovePublicDomainRes = OsUiRemovePublicDomainRes - - export type PkgAddPrivateDomainReq = OsUiAddPrivateDomainReq & { - // package.host.address.domain.private.add - package: T.PackageId // string - host: T.HostId // string - } - export type PkgAddPrivateDomainRes = OsUiAddPrivateDomainRes - - export type PkgRemovePrivateDomainReq = PkgAddPrivateDomainReq - export type PkgRemovePrivateDomainRes = OsUiRemovePrivateDomainRes - - export type GetPackageLogsReq = FetchLogsReq & { id: string } // package.logs - export type GetPackageLogsRes = FetchLogsRes - - export type FollowPackageLogsReq = FollowServerLogsReq & { id: string } // package.logs.follow - export type FollowPackageLogsRes = FollowServerLogsRes - - export type InstallPackageReq = T.InstallParams - export type InstallPackageRes = null - - export type CancelInstallPackageReq = { id: string } - export type CancelInstallPackageRes = null - - export type GetActionInputReq = { packageId: string; actionId: string } // package.action.get-input - export type GetActionInputRes = { - eventId: string - spec: IST.InputSpec - value: object | null - } - - export type ActionReq = { - packageId: string - eventId: string | null - actionId: string - input: object | null - } // package.action.run - export type ActionRes = (T.ActionResult & { version: '1' }) | null - - export type ClearTaskReq = { - packageId: string - replayId: string - } // package.action.clear-task - export type ClearTaskRes = null - - export type RestorePackagesReq = { - // package.backup.restore - ids: string[] - targetId: string - serverId: string - password: string - } - export type RestorePackagesRes = null - - export type StartPackageReq = { id: string } // package.start - export type StartPackageRes = null - - export type RestartPackageReq = { id: string } // package.restart - export type RestartPackageRes = null - - export type StopPackageReq = { id: string } // package.stop - export type StopPackageRes = null - - export type RebuildPackageReq = { id: string } // package.rebuild - export type RebuildPackageRes = null - - export type UninstallPackageReq = { - id: string - force: boolean - soft: boolean - } // package.uninstall - export type UninstallPackageRes = null - - export type SideloadPackageReq = { - manifest: T.Manifest - icon: string // base64 - } - export type SideloadPackageRes = { - upload: string - progress: string // guid - } - - // registry - - /** these are returned in ASCENDING order. the newest available version will be the LAST in the object */ - export type CheckOsUpdateReq = { registry: string; serverId: string } - export type CheckOsUpdateRes = { [version: string]: T.OsVersionInfo } - - export type GetRegistryInfoReq = { registry: string } - export type GetRegistryInfoRes = T.RegistryInfo - - export type GetRegistryPackageReq = GetPackageReq & { registry: string } - export type GetRegistryPackageRes = GetPackageRes - - export type GetRegistryPackagesReq = GetPackagesReq & { registry: string } - export type GetRegistryPackagesRes = GetPackagesRes +export type DiagnosticErrorRes = { + code: number + message: string + data: { details: string } } -interface MetricData { - value: string - unit: string +// logs + +export type FollowServerLogsReq = Omit + +// bindings + +export type ServerBindingSetAddressEnabledReq = { + // server.host.binding.set-address-enabled + internalPort: 80 + address: string // JSON-serialized HostnameInfo + enabled: boolean | null // null = reset to default } -export type ServerMetrics = { - general: { - temperature: MetricData | null - } - memory: { - total: MetricData - percentageUsed: MetricData - used: MetricData - available: MetricData - zramTotal: MetricData - zramUsed: MetricData - zramAvailable: MetricData - } - cpu: { - percentageUsed: MetricData - idle: MetricData - userSpace: MetricData - kernelSpace: MetricData - wait: MetricData - } - disk: { - capacity: MetricData - percentageUsed: MetricData - used: MetricData - available: MetricData - } +export type PkgBindingSetAddressEnabledReq = Omit< + ServerBindingSetAddressEnabledReq, + 'internalPort' +> & { + // package.host.binding.set-address-enabled + internalPort: number + package: T.PackageId // string + host: T.HostId // string } -export type Session = { - loggedIn: string - lastActive: string - userAgent: string +// package domains + +export type PkgAddPublicDomainReq = T.AddPublicDomainParams & { + // package.host.address.domain.public.add + package: T.PackageId // string + host: T.HostId // string } -export type BackupTarget = DiskBackupTarget | CifsBackupTarget - -export interface DiskBackupTarget { - type: 'disk' - vendor: string | null - model: string | null - logicalname: string | null - label: string | null - capacity: number - used: number | null - startOs: Record +export type PkgRemovePublicDomainReq = T.RemoveDomainParams & { + // package.host.address.domain.public.remove + package: T.PackageId // string + host: T.HostId // string } -export interface CifsBackupTarget { - type: 'cifs' - hostname: string - path: string - username: string - mountable: boolean - startOs: Record +export type PkgAddPrivateDomainReq = T.AddPrivateDomainParams & { + // package.host.address.domain.private.add + package: T.PackageId // string + host: T.HostId // string } +export type PkgRemovePrivateDomainReq = T.RemoveDomainParams & { + // package.host.address.domain.private.remove + package: T.PackageId // string + host: T.HostId // string +} + +// package logs + +export type GetPackageLogsReq = T.LogsParams & { id: string } // package.logs + +export type FollowPackageLogsReq = FollowServerLogsReq & { id: string } // package.logs.follow + +// actions + +export type GetActionInputRes = { + eventId: string + spec: IST.InputSpec + value: object | null +} + +export type ActionRes = (T.ActionResult & { version: '1' }) | null + +// registry + +export type GetRegistryPackageReq = GetPackageReq & { registry: string } + +export type GetRegistryPackagesReq = GetPackagesReq & { registry: string } + +// dns +// TODO: Replace with T.CheckDnsRes when SDK types are generated +export type CheckDnsRes = boolean + +// backup + +export type DiskBackupTarget = Extract +export type CifsBackupTarget = T.CifsBackupTarget & { type: 'cifs' } + export type RecoverySource = DiskRecoverySource | CifsRecoverySource export interface DiskRecoverySource { @@ -541,74 +113,28 @@ export interface CifsRecoverySource { password: string } -export type BackupInfo = { - version: string - timestamp: string - packageBackups: { - [id: string]: PackageBackupInfo - } -} +// notifications -export type PackageBackupInfo = { - title: string - version: string - osVersion: string - timestamp: string -} - -export type ServerSpecs = { - [key: string]: string | number -} - -export type SSHKey = { - createdAt: string - alg: string - hostname: string - fingerprint: string -} - -export type ServerNotifications = ServerNotification[] - -export type ServerNotification = { +export type ServerNotification = { id: number packageId: string | null createdAt: string - code: T - level: NotificationLevel + code: N + level: T.NotificationLevel title: string message: string - data: NotificationData + data: NotificationData seen: boolean } -export type NotificationLevel = 'success' | 'info' | 'warning' | 'error' - -export type NotificationData = T extends 0 +export type NotificationData = N extends 0 ? null - : T extends 1 - ? BackupReport - : T extends 2 + : N extends 1 + ? T.BackupReport + : N extends 2 ? string : any -export type BackupReport = { - server: { - attempted: boolean - error: string | null - } - packages: { - [id: string]: { - error: string | null - } - } -} - -export type AvailableWifi = { - ssid: string - strength: number - security: string[] -} - declare global { type Stringified = string & { [P in keyof T]: T[P] @@ -624,10 +150,6 @@ declare global { } } -export type Encrypted = { - encrypted: string -} - // @TODO 041 // export namespace RR041 { diff --git a/web/projects/ui/src/app/services/api/embassy-api.service.ts b/web/projects/ui/src/app/services/api/embassy-api.service.ts index 41d918ef0..cf660b87b 100644 --- a/web/projects/ui/src/app/services/api/embassy-api.service.ts +++ b/web/projects/ui/src/app/services/api/embassy-api.service.ts @@ -1,7 +1,29 @@ -import { MarketplacePkg } from '@start9labs/marketplace' +import { FullKeyboard, SetLanguageParams } from '@start9labs/shared' import { T } from '@start9labs/start-sdk' -import { RR } from './api.types' +import { GetPackageRes, GetPackagesRes } from '@start9labs/marketplace' +import { Dump } from 'patch-db-client' import { WebSocketSubject } from 'rxjs/webSocket' +import { DataModel } from '../patch-db/data-model' +import { + ActionRes, + CheckDnsRes, + CifsBackupTarget, + DiagnosticErrorRes, + FollowPackageLogsReq, + FollowServerLogsReq, + GetActionInputRes, + GetPackageLogsReq, + GetRegistryPackageReq, + GetRegistryPackagesReq, + PkgAddPrivateDomainReq, + PkgAddPublicDomainReq, + PkgBindingSetAddressEnabledReq, + PkgRemovePrivateDomainReq, + PkgRemovePublicDomainReq, + ServerBindingSetAddressEnabledReq, + ServerState, + WebsocketConfig, +} from './api.types' export abstract class ApiService { // http @@ -19,222 +41,204 @@ export abstract class ApiService { abstract openWebsocket$( guid: string, - config?: RR.WebsocketConfig, + config?: WebsocketConfig, ): WebSocketSubject // state - abstract echo(params: RR.EchoReq, url: string): Promise + abstract echo(params: T.EchoParams, url: string): Promise - abstract getState(): Promise + abstract getState(): Promise // db - abstract subscribeToPatchDB( - params: RR.SubscribePatchReq, - ): Promise + abstract subscribeToPatchDB(params: {}): Promise<{ + dump: Dump + guid: string + }> abstract setDbValue( pathArr: Array, value: T, - ): Promise + ): Promise // auth - abstract login(params: RR.LoginReq): Promise + abstract login(params: T.LoginParams): Promise - abstract logout(params: RR.LogoutReq): Promise + abstract logout(params: {}): Promise - abstract getSessions(params: RR.GetSessionsReq): Promise + abstract getSessions(params: {}): Promise - abstract killSessions(params: RR.KillSessionsReq): Promise + abstract killSessions(params: T.KillParams): Promise - abstract resetPassword( - params: RR.ResetPasswordReq, - ): Promise + abstract resetPassword(params: T.ResetPasswordParams): Promise // diagnostic - abstract diagnosticGetError(): Promise + abstract diagnosticGetError(): Promise abstract diagnosticRestart(): Promise abstract diagnosticForgetDrive(): Promise abstract diagnosticRepairDisk(): Promise - abstract diagnosticGetLogs( - params: RR.GetServerLogsReq, - ): Promise + abstract diagnosticGetLogs(params: T.LogsParams): Promise // init - abstract initFollowProgress(): Promise + abstract initFollowProgress(): Promise abstract initFollowLogs( - params: RR.FollowServerLogsReq, - ): Promise + params: FollowServerLogsReq, + ): Promise // server - abstract getSystemTime( - params: RR.GetSystemTimeReq, - ): Promise + abstract getSystemTime(params: {}): Promise - abstract getServerLogs( - params: RR.GetServerLogsReq, - ): Promise + abstract getServerLogs(params: T.LogsParams): Promise - abstract getTorLogs(params: RR.GetServerLogsReq): Promise - - abstract getKernelLogs( - params: RR.GetServerLogsReq, - ): Promise + abstract getKernelLogs(params: T.LogsParams): Promise abstract followServerLogs( - params: RR.FollowServerLogsReq, - ): Promise - - abstract followTorLogs( - params: RR.FollowServerLogsReq, - ): Promise + params: FollowServerLogsReq, + ): Promise abstract followKernelLogs( - params: RR.FollowServerLogsReq, - ): Promise + params: FollowServerLogsReq, + ): Promise - abstract followServerMetrics( - params: RR.FollowServerMetricsReq, - ): Promise + abstract followServerMetrics(params: {}): Promise - abstract updateServer(params: RR.UpdateServerReq): Promise + abstract updateServer(params: { + registry: string + targetVersion: string + }): Promise<'updating' | 'no-updates'> - abstract restartServer( - params: RR.RestartServerReq, - ): Promise + abstract restartServer(params: {}): Promise - abstract shutdownServer( - params: RR.ShutdownServerReq, - ): Promise + abstract shutdownServer(params: {}): Promise - abstract repairDisk(params: RR.DiskRepairReq): Promise + abstract repairDisk(params: {}): Promise abstract toggleKiosk(enable: boolean): Promise - abstract setKeyboard(params: RR.SetKeyboardReq): Promise + abstract setHostname(params: T.SetServerHostnameParams): Promise - abstract setLanguage(params: RR.SetLanguageReq): Promise + abstract setKeyboard(params: FullKeyboard): Promise - abstract setDns(params: RR.SetDnsReq): Promise + abstract setLanguage(params: SetLanguageParams): Promise - abstract queryDns(params: RR.QueryDnsReq): Promise + abstract setDns(params: T.SetStaticDnsParams): Promise - abstract resetTor(params: RR.ResetTorReq): Promise + abstract queryDns(params: T.QueryDnsParams): Promise + + abstract checkPort(params: T.CheckPortParams): Promise + + abstract checkDns(params: T.CheckDnsParams): Promise // smtp - abstract setSmtp(params: RR.SetSMTPReq): Promise + abstract setSmtp(params: T.SmtpValue): Promise - abstract clearSmtp(params: RR.ClearSMTPReq): Promise + abstract clearSmtp(params: {}): Promise - abstract testSmtp(params: RR.TestSMTPReq): Promise + abstract testSmtp(params: T.TestSmtpParams): Promise // marketplace URLs - abstract checkOSUpdate( - params: RR.CheckOsUpdateReq, - ): Promise + abstract checkOSUpdate(params: { + registry: string + serverId: string + }): Promise - abstract getRegistryInfo( - params: RR.GetRegistryInfoReq, - ): Promise + abstract getRegistryInfo(params: { + registry: string + }): Promise abstract getRegistryPackage( - params: RR.GetRegistryPackageReq, - ): Promise + params: GetRegistryPackageReq, + ): Promise abstract getRegistryPackages( - params: RR.GetRegistryPackagesReq, - ): Promise + params: GetRegistryPackagesReq, + ): Promise // notification abstract getNotifications( - params: RR.GetNotificationsReq, - ): Promise + params: T.ListNotificationParams, + ): Promise abstract markSeenNotifications( - params: RR.MarkSeenNotificationReq, - ): Promise + params: T.ModifyNotificationParams, + ): Promise abstract markSeenAllNotifications( - params: RR.MarkSeenAllNotificationsReq, - ): Promise + params: T.ModifyNotificationBeforeParams, + ): Promise abstract markUnseenNotifications( - params: RR.DeleteNotificationsReq, - ): Promise + params: T.ModifyNotificationParams, + ): Promise abstract deleteNotifications( - params: RR.DeleteNotificationsReq, - ): Promise + params: T.ModifyNotificationParams, + ): Promise // ** proxies ** - abstract addTunnel(params: RR.AddTunnelReq): Promise + abstract addTunnel(params: T.AddTunnelParams): Promise<{ id: string }> - abstract updateTunnel(params: RR.UpdateTunnelReq): Promise + abstract updateTunnel(params: T.RenameGatewayParams): Promise - abstract removeTunnel(params: RR.RemoveTunnelReq): Promise + abstract removeTunnel(params: T.RemoveTunnelParams): Promise + + abstract setDefaultOutbound(params: { gateway: string | null }): Promise + + abstract setServiceOutbound(params: T.SetOutboundGatewayParams): Promise // ** domains ** // wifi - abstract enableWifi(params: RR.EnabledWifiReq): Promise + abstract enableWifi(params: T.SetWifiEnabledParams): Promise - abstract setWifiCountry( - params: RR.SetWifiCountryReq, - ): Promise + abstract setWifiCountry(params: T.SetCountryParams): Promise - abstract getWifi( - params: RR.GetWifiReq, - timeout: number, - ): Promise + abstract getWifi(params: {}, timeout: number): Promise - abstract addWifi(params: RR.AddWifiReq): Promise + abstract addWifi(params: T.WifiAddParams): Promise - abstract connectWifi(params: RR.ConnectWifiReq): Promise + abstract connectWifi(params: T.WifiSsidParams): Promise - abstract deleteWifi(params: RR.DeleteWifiReq): Promise + abstract deleteWifi(params: T.WifiSsidParams): Promise // ssh - abstract getSshKeys(params: RR.GetSSHKeysReq): Promise + abstract getSshKeys(params: {}): Promise - abstract addSshKey(params: RR.AddSSHKeyReq): Promise + abstract addSshKey(params: T.SshAddParams): Promise - abstract deleteSshKey(params: RR.DeleteSSHKeyReq): Promise + abstract deleteSshKey(params: T.SshDeleteParams): Promise // backup - abstract getBackupTargets( - params: RR.GetBackupTargetsReq, - ): Promise + abstract getBackupTargets(params: {}): Promise<{ + [id: string]: T.BackupTarget + }> abstract addBackupTarget( - params: RR.AddBackupTargetReq, - ): Promise + params: T.CifsAddParams, + ): Promise<{ [id: string]: CifsBackupTarget }> abstract updateBackupTarget( - params: RR.UpdateBackupTargetReq, - ): Promise + params: T.CifsUpdateParams, + ): Promise<{ [id: string]: CifsBackupTarget }> - abstract removeBackupTarget( - params: RR.RemoveBackupTargetReq, - ): Promise + abstract removeBackupTarget(params: T.CifsRemoveParams): Promise - abstract getBackupInfo( - params: RR.GetBackupInfoReq, - ): Promise + abstract getBackupInfo(params: T.InfoParams): Promise - abstract createBackup(params: RR.CreateBackupReq): Promise + abstract createBackup(params: T.BackupParams): Promise // @TODO 041 @@ -286,51 +290,37 @@ export abstract class ApiService { // package - abstract getPackageLogs( - params: RR.GetPackageLogsReq, - ): Promise + abstract getPackageLogs(params: GetPackageLogsReq): Promise abstract followPackageLogs( - params: RR.FollowPackageLogsReq, - ): Promise + params: FollowPackageLogsReq, + ): Promise - abstract installPackage( - params: RR.InstallPackageReq, - ): Promise + abstract installPackage(params: T.InstallParams): Promise - abstract cancelInstallPackage( - params: RR.CancelInstallPackageReq, - ): Promise + abstract cancelInstallPackage(params: T.CancelInstallParams): Promise abstract getActionInput( - params: RR.GetActionInputReq, - ): Promise + params: T.GetActionInputParams, + ): Promise - abstract runAction(params: RR.ActionReq): Promise + abstract runAction(params: T.RunActionParams): Promise - abstract clearTask(params: RR.ClearTaskReq): Promise + abstract clearTask(params: T.ClearTaskParams): Promise - abstract restorePackages( - params: RR.RestorePackagesReq, - ): Promise + abstract restorePackages(params: T.RestorePackageParams): Promise - abstract startPackage(params: RR.StartPackageReq): Promise + abstract startPackage(params: T.ControlParams): Promise - abstract restartPackage( - params: RR.RestartPackageReq, - ): Promise + abstract restartPackage(params: T.ControlParams): Promise - abstract stopPackage(params: RR.StopPackageReq): Promise + abstract stopPackage(params: T.ControlParams): Promise - abstract rebuildPackage( - params: RR.RebuildPackageReq, - ): Promise + abstract rebuildPackage(params: T.RebuildParams): Promise - abstract uninstallPackage( - params: RR.UninstallPackageReq, - ): Promise + abstract uninstallPackage(params: T.UninstallParams): Promise - abstract sideloadPackage(): Promise + abstract sideloadPackage(): Promise // @TODO 041 @@ -340,65 +330,39 @@ export abstract class ApiService { // params: RR.SetServiceOutboundTunnelReq, // ): Promise - abstract initAcme(params: RR.InitAcmeReq): Promise + abstract initAcme(params: T.InitAcmeParams): Promise - abstract removeAcme(params: RR.RemoveAcmeReq): Promise + abstract removeAcme(params: T.RemoveAcmeParams): Promise - abstract addTorKey(params: RR.AddTorKeyReq): Promise - - abstract generateTorKey( - params: RR.GenerateTorKeyReq, - ): Promise - - abstract serverBindingToggleGateway( - params: RR.ServerBindingToggleGatewayReq, - ): Promise - - abstract serverAddOnion(params: RR.ServerAddOnionReq): Promise - - abstract serverRemoveOnion( - params: RR.ServerRemoveOnionReq, - ): Promise + abstract serverBindingSetAddressEnabled( + params: ServerBindingSetAddressEnabledReq, + ): Promise abstract osUiAddPublicDomain( - params: RR.OsUiAddPublicDomainReq, - ): Promise + params: T.AddPublicDomainParams, + ): Promise - abstract osUiRemovePublicDomain( - params: RR.OsUiRemovePublicDomainReq, - ): Promise + abstract osUiRemovePublicDomain(params: T.RemoveDomainParams): Promise - abstract osUiAddPrivateDomain( - params: RR.OsUiAddPrivateDomainReq, - ): Promise + abstract osUiAddPrivateDomain(params: T.AddPrivateDomainParams): Promise - abstract osUiRemovePrivateDomain( - params: RR.OsUiRemovePrivateDomainReq, - ): Promise + abstract osUiRemovePrivateDomain(params: T.RemoveDomainParams): Promise - abstract pkgBindingToggleGateway( - params: RR.PkgBindingToggleGatewayReq, - ): Promise - - abstract pkgAddOnion(params: RR.PkgAddOnionReq): Promise - - abstract pkgRemoveOnion( - params: RR.PkgRemoveOnionReq, - ): Promise + abstract pkgBindingSetAddressEnabled( + params: PkgBindingSetAddressEnabledReq, + ): Promise abstract pkgAddPublicDomain( - params: RR.PkgAddPublicDomainReq, - ): Promise + params: PkgAddPublicDomainReq, + ): Promise abstract pkgRemovePublicDomain( - params: RR.PkgRemovePublicDomainReq, - ): Promise + params: PkgRemovePublicDomainReq, + ): Promise - abstract pkgAddPrivateDomain( - params: RR.PkgAddPrivateDomainReq, - ): Promise + abstract pkgAddPrivateDomain(params: PkgAddPrivateDomainReq): Promise abstract pkgRemovePrivateDomain( - params: RR.PkgRemovePrivateDomainReq, - ): Promise + params: PkgRemovePrivateDomainReq, + ): Promise } diff --git a/web/projects/ui/src/app/services/api/embassy-live-api.service.ts b/web/projects/ui/src/app/services/api/embassy-live-api.service.ts index 47890f047..200792a89 100644 --- a/web/projects/ui/src/app/services/api/embassy-live-api.service.ts +++ b/web/projects/ui/src/app/services/api/embassy-live-api.service.ts @@ -1,21 +1,42 @@ import { DOCUMENT, Inject, Injectable } from '@angular/core' import { blake3 } from '@noble/hashes/blake3' -import { MarketplacePkg } from '@start9labs/marketplace' import { + FullKeyboard, HttpOptions, HttpService, isRpcError, RpcError, RPCOptions, + SetLanguageParams, } from '@start9labs/shared' import { T } from '@start9labs/start-sdk' +import { GetPackageRes, GetPackagesRes } from '@start9labs/marketplace' import { Dump, pathFromArray } from 'patch-db-client' import { filter, firstValueFrom, Observable } from 'rxjs' import { webSocket, WebSocketSubject } from 'rxjs/webSocket' import { PATCH_CACHE } from 'src/app/services/patch-db/patch-db-source' import { AuthService } from '../auth.service' import { DataModel } from '../patch-db/data-model' -import { RR } from './api.types' +import { + ActionRes, + CheckDnsRes, + CifsBackupTarget, + DiagnosticErrorRes, + FollowPackageLogsReq, + FollowServerLogsReq, + GetActionInputRes, + GetPackageLogsReq, + GetRegistryPackageReq, + GetRegistryPackagesReq, + PkgAddPrivateDomainReq, + PkgAddPublicDomainReq, + PkgBindingSetAddressEnabledReq, + PkgRemovePrivateDomainReq, + PkgRemovePublicDomainReq, + ServerBindingSetAddressEnabledReq, + ServerState, + WebsocketConfig, +} from './api.types' import { ApiService } from './embassy-api.service' @Injectable() @@ -49,7 +70,7 @@ export class LiveApiService extends ApiService { urls: string[], params: Record, ): Promise { - for (let url in urls) { + for (const url of urls) { try { const res = await this.httpRequest({ method: 'GET', @@ -67,7 +88,7 @@ export class LiveApiService extends ApiService { openWebsocket$( guid: string, - config: RR.WebsocketConfig = {}, + config: WebsocketConfig = {}, ): WebSocketSubject { const { location } = this.document.defaultView! const protocol = location.protocol === 'http:' ? 'ws' : 'wss' @@ -81,59 +102,58 @@ export class LiveApiService extends ApiService { // state - async echo(params: RR.EchoReq, url: string): Promise { + async echo(params: T.EchoParams, url: string): Promise { return this.rpcRequest({ method: 'echo', params }, url) } - async getState(): Promise { + async getState(): Promise { return this.rpcRequest({ method: 'state', params: {}, timeout: 10000 }) } // db - async subscribeToPatchDB( - params: RR.SubscribePatchReq, - ): Promise { + async subscribeToPatchDB(params: {}): Promise<{ + dump: Dump + guid: string + }> { return this.rpcRequest({ method: 'db.subscribe', params }) } async setDbValue( pathArr: Array, value: T, - ): Promise { + ): Promise { const pointer = pathFromArray(pathArr) - const params: RR.SetDBValueReq = { pointer, value } + const params = { pointer, value } return this.rpcRequest({ method: 'db.put.ui', params }) } // auth - async login(params: RR.LoginReq): Promise { + async login(params: T.LoginParams): Promise { return this.rpcRequest({ method: 'auth.login', params }) } - async logout(params: RR.LogoutReq): Promise { + async logout(params: {}): Promise { return this.rpcRequest({ method: 'auth.logout', params }) } - async getSessions(params: RR.GetSessionsReq): Promise { + async getSessions(params: {}): Promise { return this.rpcRequest({ method: 'auth.session.list', params }) } - async killSessions(params: RR.KillSessionsReq): Promise { + async killSessions(params: T.KillParams): Promise { return this.rpcRequest({ method: 'auth.session.kill', params }) } - async resetPassword( - params: RR.ResetPasswordReq, - ): Promise { + async resetPassword(params: T.ResetPasswordParams): Promise { return this.rpcRequest({ method: 'auth.reset-password', params }) } // diagnostic - async diagnosticGetError(): Promise { - return this.rpcRequest({ + async diagnosticGetError(): Promise { + return this.rpcRequest({ method: 'diagnostic.error', params: {}, }) @@ -160,10 +180,8 @@ export class LiveApiService extends ApiService { }) } - async diagnosticGetLogs( - params: RR.GetServerLogsReq, - ): Promise { - return this.rpcRequest({ + async diagnosticGetLogs(params: T.LogsParams): Promise { + return this.rpcRequest({ method: 'diagnostic.logs', params, }) @@ -171,81 +189,62 @@ export class LiveApiService extends ApiService { // init - async initFollowProgress(): Promise { + async initFollowProgress(): Promise { return this.rpcRequest({ method: 'init.subscribe', params: {} }) } async initFollowLogs( - params: RR.FollowServerLogsReq, - ): Promise { + params: FollowServerLogsReq, + ): Promise { return this.rpcRequest({ method: 'init.logs.follow', params }) } // server - async getSystemTime( - params: RR.GetSystemTimeReq, - ): Promise { + async getSystemTime(params: {}): Promise { return this.rpcRequest({ method: 'server.time', params }) } - async getServerLogs( - params: RR.GetServerLogsReq, - ): Promise { + async getServerLogs(params: T.LogsParams): Promise { return this.rpcRequest({ method: 'server.logs', params }) } - async getTorLogs(params: RR.GetServerLogsReq): Promise { - return this.rpcRequest({ method: 'net.tor.logs', params }) - } - - async getKernelLogs( - params: RR.GetServerLogsReq, - ): Promise { + async getKernelLogs(params: T.LogsParams): Promise { return this.rpcRequest({ method: 'server.kernel-logs', params }) } async followServerLogs( - params: RR.FollowServerLogsReq, - ): Promise { + params: FollowServerLogsReq, + ): Promise { return this.rpcRequest({ method: 'server.logs.follow', params }) } - async followTorLogs( - params: RR.FollowServerLogsReq, - ): Promise { - return this.rpcRequest({ method: 'net.tor.logs.follow', params }) - } - async followKernelLogs( - params: RR.FollowServerLogsReq, - ): Promise { + params: FollowServerLogsReq, + ): Promise { return this.rpcRequest({ method: 'server.kernel-logs.follow', params }) } - async followServerMetrics( - params: RR.FollowServerMetricsReq, - ): Promise { + async followServerMetrics(params: {}): Promise { return this.rpcRequest({ method: 'server.metrics.follow', params }) } - async updateServer(params: RR.UpdateServerReq): Promise { + async updateServer(params: { + registry: string + targetVersion: string + }): Promise<'updating' | 'no-updates'> { return this.rpcRequest({ method: 'server.update', params }) } - async restartServer( - params: RR.RestartServerReq, - ): Promise { + async restartServer(params: {}): Promise { return this.rpcRequest({ method: 'server.restart', params }) } - async shutdownServer( - params: RR.ShutdownServerReq, - ): Promise { + async shutdownServer(params: {}): Promise { return this.rpcRequest({ method: 'server.shutdown', params }) } - async repairDisk(params: RR.RestartServerReq): Promise { + async repairDisk(params: {}): Promise { return this.rpcRequest({ method: 'disk.repair', params }) } @@ -256,46 +255,59 @@ export class LiveApiService extends ApiService { }) } - async setKeyboard(params: RR.SetKeyboardReq): Promise { + async setHostname(params: T.SetServerHostnameParams): Promise { + return this.rpcRequest({ method: 'server.set-hostname', params }) + } + + async setKeyboard(params: FullKeyboard): Promise { return this.rpcRequest({ method: 'server.set-keyboard', params }) } - async setLanguage(params: RR.SetLanguageReq): Promise { + async setLanguage(params: SetLanguageParams): Promise { return this.rpcRequest({ method: 'server.set-language', params }) } - async setDns(params: RR.SetDnsReq): Promise { + async setDns(params: T.SetStaticDnsParams): Promise { return this.rpcRequest({ method: 'net.dns.set-static', params, }) } - async queryDns(params: RR.QueryDnsReq): Promise { + async queryDns(params: T.QueryDnsParams): Promise { return this.rpcRequest({ method: 'net.dns.query', params, }) } - async resetTor(params: RR.ResetTorReq): Promise { - return this.rpcRequest({ method: 'net.tor.reset', params }) + async checkPort(params: T.CheckPortParams): Promise { + return this.rpcRequest({ + method: 'net.gateway.check-port', + params, + }) + } + + async checkDns(params: T.CheckDnsParams): Promise { + return this.rpcRequest({ + method: 'net.gateway.check-dns', + params, + }) } // marketplace URLs - async checkOSUpdate( - params: RR.CheckOsUpdateReq, - ): Promise { + async checkOSUpdate(params: { + registry: string + serverId: string + }): Promise { return this.rpcRequest({ method: 'registry.os.version.get', params, }) } - async getRegistryInfo( - params: RR.GetRegistryInfoReq, - ): Promise { + async getRegistryInfo(params: { registry: string }): Promise { return this.rpcRequest({ method: 'registry.info', params, @@ -303,8 +315,8 @@ export class LiveApiService extends ApiService { } async getRegistryPackage( - params: RR.GetRegistryPackageReq, - ): Promise { + params: GetRegistryPackageReq, + ): Promise { return this.rpcRequest({ method: 'registry.package.get', params, @@ -312,8 +324,8 @@ export class LiveApiService extends ApiService { } async getRegistryPackages( - params: RR.GetRegistryPackagesReq, - ): Promise { + params: GetRegistryPackagesReq, + ): Promise { return this.rpcRequest({ method: 'registry.package.get', params, @@ -323,26 +335,24 @@ export class LiveApiService extends ApiService { // notification async getNotifications( - params: RR.GetNotificationsReq, - ): Promise { + params: T.ListNotificationParams, + ): Promise { return this.rpcRequest({ method: 'notification.list', params }) } - async deleteNotifications( - params: RR.DeleteNotificationsReq, - ): Promise { + async deleteNotifications(params: T.ModifyNotificationParams): Promise { return this.rpcRequest({ method: 'notification.remove', params }) } async markSeenNotifications( - params: RR.MarkSeenNotificationReq, - ): Promise { + params: T.ModifyNotificationParams, + ): Promise { return this.rpcRequest({ method: 'notification.mark-seen', params }) } async markSeenAllNotifications( - params: RR.MarkSeenAllNotificationsReq, - ): Promise { + params: T.ModifyNotificationBeforeParams, + ): Promise { return this.rpcRequest({ method: 'notification.mark-seen-before', params, @@ -350,118 +360,120 @@ export class LiveApiService extends ApiService { } async markUnseenNotifications( - params: RR.MarkUnseenNotificationReq, - ): Promise { + params: T.ModifyNotificationParams, + ): Promise { return this.rpcRequest({ method: 'notification.mark-unseen', params }) } // proxies - async addTunnel(params: RR.AddTunnelReq): Promise { + async addTunnel(params: T.AddTunnelParams): Promise<{ id: string }> { return this.rpcRequest({ method: 'net.tunnel.add', params }) } - async updateTunnel(params: RR.UpdateTunnelReq): Promise { + async updateTunnel(params: T.RenameGatewayParams): Promise { return this.rpcRequest({ method: 'net.gateway.set-name', params }) } - async removeTunnel(params: RR.RemoveTunnelReq): Promise { + async removeTunnel(params: T.RemoveTunnelParams): Promise { return this.rpcRequest({ method: 'net.tunnel.remove', params }) } + async setDefaultOutbound(params: T.SetDefaultOutboundParams): Promise { + return this.rpcRequest({ + method: 'net.gateway.set-default-outbound', + params, + }) + } + + async setServiceOutbound(params: T.SetOutboundGatewayParams): Promise { + return this.rpcRequest({ method: 'package.set-outbound-gateway', params }) + } + // wifi - async enableWifi(params: RR.EnabledWifiReq): Promise { + async enableWifi(params: T.SetWifiEnabledParams): Promise { return this.rpcRequest({ method: 'wifi.enable', params }) } - async getWifi( - params: RR.GetWifiReq, - timeout?: number, - ): Promise { + async getWifi(params: {}, timeout?: number): Promise { return this.rpcRequest({ method: 'wifi.get', params, timeout }) } - async setWifiCountry( - params: RR.SetWifiCountryReq, - ): Promise { + async setWifiCountry(params: T.SetCountryParams): Promise { return this.rpcRequest({ method: 'wifi.country.set', params }) } - async addWifi(params: RR.AddWifiReq): Promise { + async addWifi(params: T.WifiAddParams): Promise { return this.rpcRequest({ method: 'wifi.add', params }) } - async connectWifi(params: RR.ConnectWifiReq): Promise { + async connectWifi(params: T.WifiSsidParams): Promise { return this.rpcRequest({ method: 'wifi.connect', params }) } - async deleteWifi(params: RR.DeleteWifiReq): Promise { + async deleteWifi(params: T.WifiSsidParams): Promise { return this.rpcRequest({ method: 'wifi.remove', params }) } // smtp - async setSmtp(params: RR.SetSMTPReq): Promise { + async setSmtp(params: T.SmtpValue): Promise { return this.rpcRequest({ method: 'server.set-smtp', params }) } - async clearSmtp(params: RR.ClearSMTPReq): Promise { + async clearSmtp(params: {}): Promise { return this.rpcRequest({ method: 'server.clear-smtp', params }) } - async testSmtp(params: RR.TestSMTPReq): Promise { + async testSmtp(params: T.TestSmtpParams): Promise { return this.rpcRequest({ method: 'server.test-smtp', params }) } // ssh - async getSshKeys(params: RR.GetSSHKeysReq): Promise { + async getSshKeys(params: {}): Promise { return this.rpcRequest({ method: 'ssh.list', params }) } - async addSshKey(params: RR.AddSSHKeyReq): Promise { + async addSshKey(params: T.SshAddParams): Promise { return this.rpcRequest({ method: 'ssh.add', params }) } - async deleteSshKey(params: RR.DeleteSSHKeyReq): Promise { + async deleteSshKey(params: T.SshDeleteParams): Promise { return this.rpcRequest({ method: 'ssh.remove', params }) } // backup - async getBackupTargets( - params: RR.GetBackupTargetsReq, - ): Promise { + async getBackupTargets(params: {}): Promise<{ + [id: string]: T.BackupTarget + }> { return this.rpcRequest({ method: 'backup.target.list', params }) } async addBackupTarget( - params: RR.AddBackupTargetReq, - ): Promise { + params: T.CifsAddParams, + ): Promise<{ [id: string]: CifsBackupTarget }> { params.path = params.path.replace('/\\/g', '/') return this.rpcRequest({ method: 'backup.target.cifs.add', params }) } async updateBackupTarget( - params: RR.UpdateBackupTargetReq, - ): Promise { + params: T.CifsUpdateParams, + ): Promise<{ [id: string]: CifsBackupTarget }> { return this.rpcRequest({ method: 'backup.target.cifs.update', params }) } - async removeBackupTarget( - params: RR.RemoveBackupTargetReq, - ): Promise { + async removeBackupTarget(params: T.CifsRemoveParams): Promise { return this.rpcRequest({ method: 'backup.target.cifs.remove', params }) } - async getBackupInfo( - params: RR.GetBackupInfoReq, - ): Promise { + async getBackupInfo(params: T.InfoParams): Promise { return this.rpcRequest({ method: 'backup.target.info', params }) } - async createBackup(params: RR.CreateBackupReq): Promise { + async createBackup(params: T.BackupParams): Promise { return this.rpcRequest({ method: 'backup.create', params }) } @@ -524,77 +536,63 @@ export class LiveApiService extends ApiService { // package - async getPackageLogs( - params: RR.GetPackageLogsReq, - ): Promise { + async getPackageLogs(params: GetPackageLogsReq): Promise { return this.rpcRequest({ method: 'package.logs', params }) } async followPackageLogs( - params: RR.FollowPackageLogsReq, - ): Promise { + params: FollowPackageLogsReq, + ): Promise { return this.rpcRequest({ method: 'package.logs.follow', params }) } - async installPackage( - params: RR.InstallPackageReq, - ): Promise { + async installPackage(params: T.InstallParams): Promise { return this.rpcRequest({ method: 'package.install', params }) } - async cancelInstallPackage( - params: RR.CancelInstallPackageReq, - ): Promise { + async cancelInstallPackage(params: T.CancelInstallParams): Promise { return this.rpcRequest({ method: 'package.cancel-install', params }) } async getActionInput( - params: RR.GetActionInputReq, - ): Promise { + params: T.GetActionInputParams, + ): Promise { return this.rpcRequest({ method: 'package.action.get-input', params }) } - async runAction(params: RR.ActionReq): Promise { + async runAction(params: T.RunActionParams): Promise { return this.rpcRequest({ method: 'package.action.run', params }) } - async clearTask(params: RR.ClearTaskReq): Promise { + async clearTask(params: T.ClearTaskParams): Promise { return this.rpcRequest({ method: 'package.action.clear-task', params }) } - async restorePackages( - params: RR.RestorePackagesReq, - ): Promise { + async restorePackages(params: T.RestorePackageParams): Promise { return this.rpcRequest({ method: 'package.backup.restore', params }) } - async startPackage(params: RR.StartPackageReq): Promise { + async startPackage(params: T.ControlParams): Promise { return this.rpcRequest({ method: 'package.start', params }) } - async restartPackage( - params: RR.RestartPackageReq, - ): Promise { + async restartPackage(params: T.ControlParams): Promise { return this.rpcRequest({ method: 'package.restart', params }) } - async stopPackage(params: RR.StopPackageReq): Promise { + async stopPackage(params: T.ControlParams): Promise { return this.rpcRequest({ method: 'package.stop', params }) } - async rebuildPackage( - params: RR.RebuildPackageReq, - ): Promise { + async rebuildPackage(params: T.RebuildParams): Promise { return this.rpcRequest({ method: 'package.rebuild', params }) } - async uninstallPackage( - params: RR.UninstallPackageReq, - ): Promise { + async uninstallPackage(params: T.UninstallParams): Promise { return this.rpcRequest({ method: 'package.uninstall', params }) } - async sideloadPackage(): Promise { + async sideloadPackage(): Promise { return this.rpcRequest({ method: 'package.sideload', params: {}, @@ -607,141 +605,85 @@ export class LiveApiService extends ApiService { // return this.rpcRequest({ method: 'package.proxy.set-outbound', params }) // } - async removeAcme(params: RR.RemoveAcmeReq): Promise { + async removeAcme(params: T.RemoveAcmeParams): Promise { return this.rpcRequest({ method: 'net.acme.remove', params, }) } - async initAcme(params: RR.InitAcmeReq): Promise { + async initAcme(params: T.InitAcmeParams): Promise { return this.rpcRequest({ method: 'net.acme.init', params, }) } - async addTorKey(params: RR.AddTorKeyReq): Promise { + async serverBindingSetAddressEnabled( + params: ServerBindingSetAddressEnabledReq, + ): Promise { return this.rpcRequest({ - method: 'net.tor.key.add', - params, - }) - } - - async generateTorKey(params: RR.GenerateTorKeyReq): Promise { - return this.rpcRequest({ - method: 'net.tor.key.generate', - params, - }) - } - - async serverBindingToggleGateway( - params: RR.ServerBindingToggleGatewayReq, - ): Promise { - return this.rpcRequest({ - method: 'server.host.binding.set-gateway-enabled', - params, - }) - } - - async serverAddOnion(params: RR.ServerAddOnionReq): Promise { - return this.rpcRequest({ - method: 'server.host.address.onion.add', - params, - }) - } - - async serverRemoveOnion( - params: RR.ServerRemoveOnionReq, - ): Promise { - return this.rpcRequest({ - method: 'server.host.address.onion.remove', + method: 'server.host.binding.set-address-enabled', params, }) } async osUiAddPublicDomain( - params: RR.OsUiAddPublicDomainReq, - ): Promise { + params: T.AddPublicDomainParams, + ): Promise { return this.rpcRequest({ method: 'server.host.address.domain.public.add', params, }) } - async osUiRemovePublicDomain( - params: RR.OsUiRemovePublicDomainReq, - ): Promise { + async osUiRemovePublicDomain(params: T.RemoveDomainParams): Promise { return this.rpcRequest({ method: 'server.host.address.domain.public.remove', params, }) } - async osUiAddPrivateDomain( - params: RR.OsUiAddPrivateDomainReq, - ): Promise { + async osUiAddPrivateDomain(params: T.AddPrivateDomainParams): Promise { return this.rpcRequest({ method: 'server.host.address.domain.private.add', params, }) } - async osUiRemovePrivateDomain( - params: RR.OsUiRemovePrivateDomainReq, - ): Promise { + async osUiRemovePrivateDomain(params: T.RemoveDomainParams): Promise { return this.rpcRequest({ method: 'server.host.address.domain.private.remove', params, }) } - async pkgBindingToggleGateway( - params: RR.PkgBindingToggleGatewayReq, - ): Promise { + async pkgBindingSetAddressEnabled( + params: PkgBindingSetAddressEnabledReq, + ): Promise { return this.rpcRequest({ - method: 'package.host.binding.set-gateway-enabled', - params, - }) - } - - async pkgAddOnion(params: RR.PkgAddOnionReq): Promise { - return this.rpcRequest({ - method: 'package.host.address.onion.add', - params, - }) - } - - async pkgRemoveOnion( - params: RR.PkgRemoveOnionReq, - ): Promise { - return this.rpcRequest({ - method: 'package.host.address.onion.remove', + method: 'package.host.binding.set-address-enabled', params, }) } async pkgAddPublicDomain( - params: RR.PkgAddPublicDomainReq, - ): Promise { + params: PkgAddPublicDomainReq, + ): Promise { return this.rpcRequest({ method: 'package.host.address.domain.public.add', params, }) } - async pkgRemovePublicDomain( - params: RR.PkgRemovePublicDomainReq, - ): Promise { + async pkgRemovePublicDomain(params: PkgRemovePublicDomainReq): Promise { return this.rpcRequest({ method: 'package.host.address.domain.public.remove', params, }) } - async pkgAddPrivateDomain( - params: RR.PkgAddPrivateDomainReq, - ): Promise { + async pkgAddPrivateDomain(params: PkgAddPrivateDomainReq): Promise { return this.rpcRequest({ method: 'package.host.address.domain.private.add', params, @@ -749,8 +691,8 @@ export class LiveApiService extends ApiService { } async pkgRemovePrivateDomain( - params: RR.PkgRemovePrivateDomainReq, - ): Promise { + params: PkgRemovePrivateDomainReq, + ): Promise { return this.rpcRequest({ method: 'package.host.address.domain.private.remove', params, diff --git a/web/projects/ui/src/app/services/api/embassy-mock-api.service.ts b/web/projects/ui/src/app/services/api/embassy-mock-api.service.ts index c2f8d953b..2f84e1ffb 100644 --- a/web/projects/ui/src/app/services/api/embassy-mock-api.service.ts +++ b/web/projects/ui/src/app/services/api/embassy-mock-api.service.ts @@ -1,8 +1,15 @@ import { Injectable } from '@angular/core' -import { pauseFor, Log, RPCErrorDetails } from '@start9labs/shared' -import { ApiService } from './embassy-api.service' +import { GetPackageRes, GetPackagesRes } from '@start9labs/marketplace' +import { + FullKeyboard, + pauseFor, + RPCErrorDetails, + SetLanguageParams, +} from '@start9labs/shared' +import { T } from '@start9labs/start-sdk' import { AddOperation, + Dump, Operation, PatchOp, pathFromArray, @@ -10,20 +17,40 @@ import { ReplaceOperation, Revision, } from 'patch-db-client' +import { from, interval, map, shareReplay, startWith, Subject, tap } from 'rxjs' +import { WebSocketSubject } from 'rxjs/webSocket' import { + DataModel, InstallingState, PackageDataEntry, StateInfo, UpdatingState, } from 'src/app/services/patch-db/data-model' -import { CifsBackupTarget, RR } from './api.types' -import { Mock } from './api.fixures' -import { from, interval, map, shareReplay, startWith, Subject, tap } from 'rxjs' -import { mockPatchData } from './mock-patch' -import { AuthService } from '../auth.service' -import { T } from '@start9labs/start-sdk' -import { WebSocketSubject } from 'rxjs/webSocket' import { toAuthorityUrl } from 'src/app/utils/acme' +import { AuthService } from '../auth.service' +import { Mock } from './api.fixures' +import { + ActionRes, + CheckDnsRes, + CifsBackupTarget, + DiagnosticErrorRes, + FollowPackageLogsReq, + FollowServerLogsReq, + GetActionInputRes, + GetPackageLogsReq, + GetRegistryPackageReq, + GetRegistryPackagesReq, + PkgAddPrivateDomainReq, + PkgAddPublicDomainReq, + PkgBindingSetAddressEnabledReq, + PkgRemovePrivateDomainReq, + PkgRemovePublicDomainReq, + ServerBindingSetAddressEnabledReq, + ServerState, + WebsocketConfig, +} from './api.types' +import { ApiService } from './embassy-api.service' +import { mockPatchData } from './mock-patch' import markdown from './md-sample.md' @@ -86,7 +113,7 @@ export class MockApiService extends ApiService { openWebsocket$( guid: string, - config: RR.WebsocketConfig = {}, + config: WebsocketConfig = {}, ): WebSocketSubject { if (guid === 'db-guid') { return this.mockWsSource$.pipe( @@ -121,7 +148,7 @@ export class MockApiService extends ApiService { // state - async echo(params: RR.EchoReq, url: string): Promise { + async echo(params: T.EchoParams, url: string): Promise { if (url) { const num = Math.floor(Math.random() * 10) + 1 if (num > 8) return params.message @@ -132,7 +159,7 @@ export class MockApiService extends ApiService { } private stateIndex = 0 - async getState(): Promise { + async getState(): Promise { await pauseFor(1000) this.stateIndex++ @@ -142,9 +169,10 @@ export class MockApiService extends ApiService { // db - async subscribeToPatchDB( - params: RR.SubscribePatchReq, - ): Promise { + async subscribeToPatchDB(params: {}): Promise<{ + dump: Dump + guid: string + }> { await pauseFor(2000) return { dump: { id: 1, value: mockPatchData }, @@ -155,9 +183,9 @@ export class MockApiService extends ApiService { async setDbValue( pathArr: Array, value: T, - ): Promise { + ): Promise { const pointer = pathFromArray(pathArr) - const params: RR.SetDBValueReq = { pointer, value } + const params = { pointer, value } await pauseFor(2000) const patch = [ { @@ -173,29 +201,27 @@ export class MockApiService extends ApiService { // auth - async login(params: RR.LoginReq): Promise { + async login(params: T.LoginParams): Promise { await pauseFor(2000) return null } - async logout(params: RR.LogoutReq): Promise { + async logout(params: {}): Promise { await pauseFor(2000) return null } - async getSessions(params: RR.GetSessionsReq): Promise { + async getSessions(params: {}): Promise { await pauseFor(2000) return Mock.Sessions } - async killSessions(params: RR.KillSessionsReq): Promise { + async killSessions(params: T.KillParams): Promise { await pauseFor(2000) return null } - async resetPassword( - params: RR.ResetPasswordReq, - ): Promise { + async resetPassword(params: T.ResetPasswordParams): Promise { await pauseFor(2000) return null } @@ -211,7 +237,7 @@ export class MockApiService extends ApiService { } } - async diagnosticGetError(): Promise { + async diagnosticGetError(): Promise { await pauseFor(1000) return { code: 15, @@ -232,15 +258,13 @@ export class MockApiService extends ApiService { await pauseFor(1000) } - async diagnosticGetLogs( - params: RR.GetServerLogsReq, - ): Promise { + async diagnosticGetLogs(params: T.LogsParams): Promise { return this.getServerLogs(params) } // init - async initFollowProgress(): Promise { + async initFollowProgress(): Promise { await pauseFor(250) return { progress: PROGRESS, @@ -248,7 +272,7 @@ export class MockApiService extends ApiService { } } - async initFollowLogs(): Promise { + async initFollowLogs(): Promise { await pauseFor(2000) return { startCursor: 'start-cursor', @@ -258,19 +282,15 @@ export class MockApiService extends ApiService { // server - async getSystemTime( - params: RR.GetSystemTimeReq, - ): Promise { + async getSystemTime(params: {}): Promise { await pauseFor(2000) return { now: new Date().toUTCString(), - uptime: 1234567, + uptime: 1234567n, } } - async getServerLogs( - params: RR.GetServerLogsReq, - ): Promise { + async getServerLogs(params: T.LogsParams): Promise { await pauseFor(2000) const entries = this.randomLogs(params.limit) @@ -281,20 +301,7 @@ export class MockApiService extends ApiService { } } - async getTorLogs(params: RR.GetServerLogsReq): Promise { - await pauseFor(2000) - const entries = this.randomLogs(params.limit) - - return { - entries, - startCursor: 'start-cursor', - endCursor: 'end-cursor', - } - } - - async getKernelLogs( - params: RR.GetServerLogsReq, - ): Promise { + async getKernelLogs(params: T.LogsParams): Promise { await pauseFor(2000) const entries = this.randomLogs(params.limit) @@ -306,18 +313,8 @@ export class MockApiService extends ApiService { } async followServerLogs( - params: RR.FollowServerLogsReq, - ): Promise { - await pauseFor(2000) - return { - startCursor: 'start-cursor', - guid: 'logs-guid', - } - } - - async followTorLogs( - params: RR.FollowServerLogsReq, - ): Promise { + params: FollowServerLogsReq, + ): Promise { await pauseFor(2000) return { startCursor: 'start-cursor', @@ -326,8 +323,8 @@ export class MockApiService extends ApiService { } async followKernelLogs( - params: RR.FollowServerLogsReq, - ): Promise { + params: FollowServerLogsReq, + ): Promise { await pauseFor(2000) return { startCursor: 'start-cursor', @@ -335,7 +332,7 @@ export class MockApiService extends ApiService { } } - private randomLogs(limit = 1): Log[] { + private randomLogs(limit = 1): T.LogEntry[] { const arrLength = Math.ceil(limit / Mock.ServerLogs.length) const logs = new Array(arrLength) .fill(Mock.ServerLogs) @@ -344,9 +341,7 @@ export class MockApiService extends ApiService { return logs } - async followServerMetrics( - params: RR.FollowServerMetricsReq, - ): Promise { + async followServerMetrics(params: {}): Promise { await pauseFor(2000) return { guid: 'metrics-guid', @@ -354,7 +349,10 @@ export class MockApiService extends ApiService { } } - async updateServer(params?: RR.UpdateServerReq): Promise { + async updateServer(params?: { + registry: string + targetVersion: string + }): Promise<'updating' | 'no-updates'> { await pauseFor(2000) const initialProgress = { size: null, @@ -377,9 +375,7 @@ export class MockApiService extends ApiService { return 'updating' } - async restartServer( - params: RR.RestartServerReq, - ): Promise { + async restartServer(params: {}): Promise { await pauseFor(2000) const patch = [ @@ -405,9 +401,7 @@ export class MockApiService extends ApiService { return null } - async shutdownServer( - params: RR.ShutdownServerReq, - ): Promise { + async shutdownServer(params: {}): Promise { await pauseFor(2000) const patch = [ @@ -433,7 +427,7 @@ export class MockApiService extends ApiService { return null } - async repairDisk(params: RR.RestartServerReq): Promise { + async repairDisk(params: {}): Promise { await pauseFor(2000) return null } @@ -453,7 +447,27 @@ export class MockApiService extends ApiService { return null } - async setKeyboard(params: RR.SetKeyboardReq): Promise { + async setHostname(params: T.SetServerHostnameParams): Promise { + await pauseFor(1000) + + const patch = [ + { + op: PatchOp.REPLACE, + path: '/serverInfo/name', + value: params.name, + }, + { + op: PatchOp.REPLACE, + path: '/serverInfo/hostname', + value: params.hostname, + }, + ] + this.mockRevision(patch) + + return null + } + + async setKeyboard(params: FullKeyboard): Promise { await pauseFor(1000) const patch = [ @@ -468,7 +482,7 @@ export class MockApiService extends ApiService { return null } - async setLanguage(params: RR.SetLanguageReq): Promise { + async setLanguage(params: SetLanguageParams): Promise { await pauseFor(1000) const patch = [ @@ -483,7 +497,7 @@ export class MockApiService extends ApiService { return null } - async setDns(params: RR.SetDnsReq): Promise { + async setDns(params: T.SetStaticDnsParams): Promise { await pauseFor(2000) const patch: ReplaceOperation[] = [ @@ -498,36 +512,48 @@ export class MockApiService extends ApiService { return null } - async queryDns(params: RR.QueryDnsReq): Promise { + async queryDns(params: T.QueryDnsParams): Promise { await pauseFor(2000) return null } - async resetTor(params: RR.ResetTorReq): Promise { + async checkPort(params: T.CheckPortParams): Promise { await pauseFor(2000) - return null + + return { + ip: '0.0.0.0', + port: params.port, + openExternally: true, + openInternally: false, + hairpinning: true, + } + } + + async checkDns(params: T.CheckDnsParams): Promise { + await pauseFor(2000) + + return false } // marketplace URLs - async checkOSUpdate( - params: RR.CheckOsUpdateReq, - ): Promise { + async checkOSUpdate(params: { + registry: string + serverId: string + }): Promise { await pauseFor(2000) return Mock.RegistryOSUpdate } - async getRegistryInfo( - params: RR.GetRegistryInfoReq, - ): Promise { + async getRegistryInfo(params: { registry: string }): Promise { await pauseFor(2000) return Mock.RegistryInfo } async getRegistryPackage( - params: RR.GetRegistryPackageReq, - ): Promise { + params: GetRegistryPackageReq, + ): Promise { await pauseFor(2000) const { targetVersion, id } = params @@ -540,8 +566,8 @@ export class MockApiService extends ApiService { } async getRegistryPackages( - params: RR.GetRegistryPackagesReq, - ): Promise { + params: GetRegistryPackagesReq, + ): Promise { await pauseFor(2000) return Mock.RegistryPackages } @@ -549,37 +575,35 @@ export class MockApiService extends ApiService { // notification async getNotifications( - params: RR.GetNotificationsReq, - ): Promise { + params: T.ListNotificationParams, + ): Promise { await pauseFor(2000) return Mock.Notifications } - async deleteNotifications( - params: RR.DeleteNotificationsReq, - ): Promise { + async deleteNotifications(params: T.ModifyNotificationParams): Promise { await pauseFor(2000) return null } async markSeenNotifications( - params: RR.MarkSeenNotificationReq, - ): Promise { + params: T.ModifyNotificationParams, + ): Promise { await pauseFor(2000) return null } async markSeenAllNotifications( - params: RR.MarkSeenAllNotificationsReq, - ): Promise { + params: T.ModifyNotificationBeforeParams, + ): Promise { await pauseFor(2000) return null } async markUnseenNotifications( - params: RR.MarkUnseenNotificationReq, - ): Promise { + params: T.ModifyNotificationParams, + ): Promise { await pauseFor(2000) return null } @@ -587,18 +611,17 @@ export class MockApiService extends ApiService { // proxies private proxyId = 0 - async addTunnel(params: RR.AddTunnelReq): Promise { + async addTunnel(params: T.AddTunnelParams): Promise<{ id: string }> { await pauseFor(2000) const id = `wg${this.proxyId++}` - const patch: AddOperation[] = [ + const patch: AddOperation[] = [ { op: PatchOp.ADD, path: `/serverInfo/network/gateways/${id}`, value: { name: params.name, - public: params.public, secure: false, ipInfo: { name: id, @@ -610,15 +633,25 @@ export class MockApiService extends ApiService { lanIp: ['192.168.1.10'], dnsServers: [], }, + type: 'inbound-outbound', }, }, ] + + if (params.setAsDefaultOutbound) { + (patch as any[]).push({ + op: PatchOp.REPLACE, + path: '/serverInfo/network/defaultOutbound', + value: id, + }) + } + this.mockRevision(patch) return { id } } - async updateTunnel(params: RR.UpdateTunnelReq): Promise { + async updateTunnel(params: T.RenameGatewayParams): Promise { await pauseFor(2000) const patch: ReplaceOperation[] = [ @@ -633,7 +666,7 @@ export class MockApiService extends ApiService { return null } - async removeTunnel(params: RR.RemoveTunnelReq): Promise { + async removeTunnel(params: T.RemoveTunnelParams): Promise { await pauseFor(2000) const patch: RemoveOperation[] = [ { @@ -646,15 +679,13 @@ export class MockApiService extends ApiService { return null } - // wifi - - async enableWifi(params: RR.EnabledWifiReq): Promise { + async setDefaultOutbound(params: { gateway: string | null }): Promise { await pauseFor(2000) const patch = [ { op: PatchOp.REPLACE, - path: '/serverInfo/network/wifi/enabled', - value: params.enable, + path: '/serverInfo/network/defaultOutbound', + value: params.gateway, }, ] this.mockRevision(patch) @@ -662,36 +693,64 @@ export class MockApiService extends ApiService { return null } - async setWifiCountry( - params: RR.SetWifiCountryReq, - ): Promise { + async setServiceOutbound(params: T.SetOutboundGatewayParams): Promise { + await pauseFor(2000) + const patch = [ + { + op: PatchOp.REPLACE, + path: `/packageData/${params.package}/outboundGateway`, + value: params.gateway, + }, + ] + this.mockRevision(patch) + + return null + } + + // wifi + + async enableWifi(params: T.SetWifiEnabledParams): Promise { + await pauseFor(2000) + const patch = [ + { + op: PatchOp.REPLACE, + path: '/serverInfo/network/wifi/enabled', + value: params.enabled, + }, + ] + this.mockRevision(patch) + + return null + } + + async setWifiCountry(params: T.SetCountryParams): Promise { await pauseFor(2000) return null } - async getWifi(params: RR.GetWifiReq): Promise { + async getWifi(params: {}, timeout: number): Promise { await pauseFor(2000) return Mock.Wifi } - async addWifi(params: RR.AddWifiReq): Promise { + async addWifi(params: T.WifiAddParams): Promise { await pauseFor(2000) return null } - async connectWifi(params: RR.ConnectWifiReq): Promise { + async connectWifi(params: T.WifiSsidParams): Promise { await pauseFor(2000) return null } - async deleteWifi(params: RR.DeleteWifiReq): Promise { + async deleteWifi(params: T.WifiSsidParams): Promise { await pauseFor(2000) return null } // smtp - async setSmtp(params: RR.SetSMTPReq): Promise { + async setSmtp(params: T.SmtpValue): Promise { await pauseFor(2000) const patch = [ { @@ -705,7 +764,7 @@ export class MockApiService extends ApiService { return null } - async clearSmtp(params: RR.ClearSMTPReq): Promise { + async clearSmtp(params: {}): Promise { await pauseFor(2000) const patch = [ { @@ -719,40 +778,40 @@ export class MockApiService extends ApiService { return null } - async testSmtp(params: RR.TestSMTPReq): Promise { + async testSmtp(params: T.TestSmtpParams): Promise { await pauseFor(2000) return null } // ssh - async getSshKeys(params: RR.GetSSHKeysReq): Promise { + async getSshKeys(params: {}): Promise { await pauseFor(2000) return Mock.SshKeys } - async addSshKey(params: RR.AddSSHKeyReq): Promise { + async addSshKey(params: T.SshAddParams): Promise { await pauseFor(2000) return Mock.SshKey } - async deleteSshKey(params: RR.DeleteSSHKeyReq): Promise { + async deleteSshKey(params: T.SshDeleteParams): Promise { await pauseFor(2000) return null } // backup - async getBackupTargets( - params: RR.GetBackupTargetsReq, - ): Promise { + async getBackupTargets(params: {}): Promise<{ + [id: string]: T.BackupTarget + }> { await pauseFor(2000) return Mock.BackupTargets } async addBackupTarget( - params: RR.AddBackupTargetReq, - ): Promise { + params: T.CifsAddParams, + ): Promise<{ [id: string]: CifsBackupTarget }> { await pauseFor(2000) const { hostname, path, username } = params return { @@ -768,8 +827,8 @@ export class MockApiService extends ApiService { } async updateBackupTarget( - params: RR.UpdateBackupTargetReq, - ): Promise { + params: T.CifsUpdateParams, + ): Promise<{ [id: string]: CifsBackupTarget }> { await pauseFor(2000) const { id, hostname, path, username } = params return { @@ -782,24 +841,20 @@ export class MockApiService extends ApiService { } } - async removeBackupTarget( - params: RR.RemoveBackupTargetReq, - ): Promise { + async removeBackupTarget(params: T.CifsRemoveParams): Promise { await pauseFor(2000) return null } - async getBackupInfo( - params: RR.GetBackupInfoReq, - ): Promise { + async getBackupInfo(params: T.InfoParams): Promise { await pauseFor(2000) return Mock.BackupInfo } - async createBackup(params: RR.CreateBackupReq): Promise { + async createBackup(params: T.BackupParams): Promise { await pauseFor(2000) const serverPath = '/serverInfo/statusInfo/backupProgress' - const ids = params.packageIds + const ids = params.packageIds || [] setTimeout(async () => { for (let i = 0; i < ids.length; i++) { @@ -955,9 +1010,7 @@ export class MockApiService extends ApiService { // package - async getPackageLogs( - params: RR.GetPackageLogsReq, - ): Promise { + async getPackageLogs(params: GetPackageLogsReq): Promise { await pauseFor(2000) let entries if (Math.random() < 0.2) { @@ -978,8 +1031,8 @@ export class MockApiService extends ApiService { } async followPackageLogs( - params: RR.FollowPackageLogsReq, - ): Promise { + params: FollowPackageLogsReq, + ): Promise { await pauseFor(2000) return { startCursor: 'start-cursor', @@ -987,9 +1040,7 @@ export class MockApiService extends ApiService { } } - async installPackage( - params: RR.InstallPackageReq, - ): Promise { + async installPackage(params: T.InstallParams): Promise { await pauseFor(2000) setTimeout(async () => { @@ -1026,9 +1077,7 @@ export class MockApiService extends ApiService { return null } - async cancelInstallPackage( - params: RR.CancelInstallPackageReq, - ): Promise { + async cancelInstallPackage(params: T.CancelInstallParams): Promise { await pauseFor(500) const patch: RemoveOperation[] = [ @@ -1043,9 +1092,21 @@ export class MockApiService extends ApiService { } async getActionInput( - params: RR.GetActionInputReq, - ): Promise { + params: T.GetActionInputParams, + ): Promise { await pauseFor(2000) + + if ( + params.packageId === 'tor' && + params.actionId === 'create-onion-service' + ) { + return { + eventId: 'ANZXNWIFRTTBZ6T52KQPZILIQQODDHXQ', + value: null, + spec: await Mock.getCreateOnionServiceSpec(), + } + } + return { eventId: 'ANZXNWIFRTTBZ6T52KQPZILIQQODDHXQ', value: Mock.MockConfig, @@ -1053,7 +1114,7 @@ export class MockApiService extends ApiService { } } - async runAction(params: RR.ActionReq): Promise { + async runAction(params: T.RunActionParams): Promise { await pauseFor(2000) const patch: ReplaceOperation<{ [key: string]: T.TaskEntry }>[] = [ @@ -1070,7 +1131,7 @@ export class MockApiService extends ApiService { // return Mock.ActionResSingle } - async clearTask(params: RR.ClearTaskReq): Promise { + async clearTask(params: T.ClearTaskParams): Promise { await pauseFor(2000) const patch: RemoveOperation[] = [ @@ -1084,9 +1145,7 @@ export class MockApiService extends ApiService { return null } - async restorePackages( - params: RR.RestorePackagesReq, - ): Promise { + async restorePackages(params: T.RestorePackageParams): Promise { await pauseFor(2000) const patch: AddOperation[] = params.ids.map(id => { setTimeout(async () => { @@ -1114,7 +1173,7 @@ export class MockApiService extends ApiService { return null } - async startPackage(params: RR.StartPackageReq): Promise { + async startPackage(params: T.ControlParams): Promise { const path = `/packageData/${params.id}/statusInfo` await pauseFor(2000) @@ -1179,9 +1238,7 @@ export class MockApiService extends ApiService { return null } - async restartPackage( - params: RR.RestartPackageReq, - ): Promise { + async restartPackage(params: T.ControlParams): Promise { await pauseFor(2000) const path = `/packageData/${params.id}/statusInfo` @@ -1245,7 +1302,7 @@ export class MockApiService extends ApiService { return null } - async stopPackage(params: RR.StopPackageReq): Promise { + async stopPackage(params: T.ControlParams): Promise { await pauseFor(2000) const path = `/packageData/${params.id}/statusInfo` @@ -1283,15 +1340,11 @@ export class MockApiService extends ApiService { return null } - async rebuildPackage( - params: RR.RebuildPackageReq, - ): Promise { + async rebuildPackage(params: T.RebuildParams): Promise { return this.restartPackage(params) } - async uninstallPackage( - params: RR.UninstallPackageReq, - ): Promise { + async uninstallPackage(params: T.UninstallParams): Promise { await pauseFor(2000) setTimeout(async () => { @@ -1317,7 +1370,7 @@ export class MockApiService extends ApiService { return null } - async sideloadPackage(): Promise { + async sideloadPackage(): Promise { await pauseFor(2000) return { upload: 'sideload-upload-guid', // no significance, randomly generated @@ -1341,7 +1394,7 @@ export class MockApiService extends ApiService { // return null // } - async initAcme(params: RR.InitAcmeReq): Promise { + async initAcme(params: T.InitAcmeParams): Promise { await pauseFor(2000) const patch = [ @@ -1358,7 +1411,7 @@ export class MockApiService extends ApiService { return null } - async removeAcme(params: RR.RemoveAcmeReq): Promise { + async removeAcme(params: T.RemoveAcmeParams): Promise { await pauseFor(2000) const regex = new RegExp('/', 'g') @@ -1374,107 +1427,39 @@ export class MockApiService extends ApiService { return null } - async addTorKey(params: RR.AddTorKeyReq): Promise { - await pauseFor(2000) - return 'vanityabcdefghijklmnop.onion' - } - - async generateTorKey(params: RR.GenerateTorKeyReq): Promise { - await pauseFor(2000) - return 'abcdefghijklmnopqrstuv.onion' - } - - async serverBindingToggleGateway( - params: RR.ServerBindingToggleGatewayReq, - ): Promise { + async serverBindingSetAddressEnabled( + params: ServerBindingSetAddressEnabledReq, + ): Promise { await pauseFor(2000) - const patch = [ - { - op: PatchOp.REPLACE, - path: `/serverInfo/network/host/bindings/${params.internalPort}/net/publicEnabled`, - value: params.enabled ? [params.gateway] : [], - }, - ] - this.mockRevision(patch) - - return null - } - - async serverAddOnion(params: RR.ServerAddOnionReq): Promise { - await pauseFor(2000) - - const patch: Operation[] = [ - { - op: PatchOp.ADD, - path: `/serverInfo/host/onions/0`, - value: params.onion, - }, - { - op: PatchOp.ADD, - path: `/serverInfo/host/hostnameInfo/80/0`, - value: { - kind: 'onion', - hostname: { - port: 80, - sslPort: 443, - value: params.onion, - }, - }, - }, - ] - this.mockRevision(patch) - - return null - } - - async serverRemoveOnion( - params: RR.ServerRemoveOnionReq, - ): Promise { - await pauseFor(2000) - - const patch: RemoveOperation[] = [ - { - op: PatchOp.REMOVE, - path: `/serverInfo/host/onions/0`, - }, - { - op: PatchOp.REMOVE, - path: `/serverInfo/host/hostnameInfo/80/-1`, - }, - ] - this.mockRevision(patch) + const basePath = `/serverInfo/network/host/bindings/${params.internalPort}/addresses` + this.mockSetAddressEnabled(basePath, params.address, params.enabled) return null } async osUiAddPublicDomain( - params: RR.OsUiAddPublicDomainReq, - ): Promise { + params: T.AddPublicDomainParams, + ): Promise { await pauseFor(2000) const patch: Operation[] = [ { op: PatchOp.ADD, - path: `/serverInfo/host/publicDomains`, + path: `/serverInfo/network/host/publicDomains`, value: { [params.fqdn]: { gateway: params.gateway, acme: params.acme }, }, }, { op: PatchOp.ADD, - path: `/serverInfo/host/hostnameInfo/80/0`, + path: `/serverInfo/network/host/bindings/80/addresses/available/-`, value: { - kind: 'ip', - gatewayId: 'eth0', + ssl: true, public: true, - hostname: { - kind: 'domain', - domain: params.fqdn, - subdomain: null, - port: null, - sslPort: 443, - }, + host: params.fqdn, + port: 443, + metadata: { kind: 'public-domain', gateway: params.gateway }, }, }, ] @@ -1483,19 +1468,13 @@ export class MockApiService extends ApiService { return null } - async osUiRemovePublicDomain( - params: RR.OsUiRemovePublicDomainReq, - ): Promise { + async osUiRemovePublicDomain(params: T.RemoveDomainParams): Promise { await pauseFor(2000) const patch: RemoveOperation[] = [ { op: PatchOp.REMOVE, - path: `/serverInfo/host/publicDomains/${params.fqdn}`, - }, - { - op: PatchOp.REMOVE, - path: `/serverInfo/host/hostnameInfo/80/0`, + path: `/serverInfo/network/host/publicDomains/${params.fqdn}`, }, ] this.mockRevision(patch) @@ -1503,31 +1482,24 @@ export class MockApiService extends ApiService { return null } - async osUiAddPrivateDomain( - params: RR.OsUiAddPrivateDomainReq, - ): Promise { + async osUiAddPrivateDomain(params: T.AddPrivateDomainParams): Promise { await pauseFor(2000) const patch: Operation[] = [ { - op: PatchOp.REPLACE, - path: `/serverInfo/host/privateDomains`, - value: [params.fqdn], + op: PatchOp.ADD, + path: `/serverInfo/network/host/privateDomains/${params.fqdn}`, + value: ['eth0'], }, { op: PatchOp.ADD, - path: `/serverInfo/host/hostnameInfo/80/0`, + path: `/serverInfo/network/host/bindings/80/addresses/available/-`, value: { - kind: 'ip', - gatewayId: 'eth0', + ssl: true, public: false, - hostname: { - kind: 'domain', - domain: params.fqdn, - subdomain: null, - port: null, - sslPort: 443, - }, + host: params.fqdn, + port: 443, + metadata: { kind: 'private-domain', gateways: ['eth0'] }, }, }, ] @@ -1536,84 +1508,13 @@ export class MockApiService extends ApiService { return null } - async osUiRemovePrivateDomain( - params: RR.OsUiRemovePrivateDomainReq, - ): Promise { - await pauseFor(2000) - - const patch: Operation[] = [ - { - op: PatchOp.REPLACE, - path: `/serverInfo/host/privateDomains`, - value: [], - }, - { - op: PatchOp.REMOVE, - path: `/serverInfo/host/hostnameInfo/80/0`, - }, - ] - this.mockRevision(patch) - - return null - } - - async pkgBindingToggleGateway( - params: RR.PkgBindingToggleGatewayReq, - ): Promise { - await pauseFor(2000) - - const patch = [ - { - op: PatchOp.REPLACE, - path: `/packageData/${params.package}/hosts/${params.host}/bindings/${params.internalPort}/net/privateDisabled`, - value: params.enabled ? [] : [params.gateway], - }, - ] - this.mockRevision(patch) - - return null - } - - async pkgAddOnion(params: RR.PkgAddOnionReq): Promise { - await pauseFor(2000) - - const patch: Operation[] = [ - { - op: PatchOp.ADD, - path: `/packageData/${params.package}/hosts/${params.host}/onions/0`, - value: params.onion, - }, - { - op: PatchOp.ADD, - path: `/packageData/${params.package}/hosts/${params.host}/hostnameInfo/80/0`, - value: { - kind: 'onion', - hostname: { - port: 80, - sslPort: 443, - value: params.onion, - }, - }, - }, - ] - this.mockRevision(patch) - - return null - } - - async pkgRemoveOnion( - params: RR.PkgRemoveOnionReq, - ): Promise { + async osUiRemovePrivateDomain(params: T.RemoveDomainParams): Promise { await pauseFor(2000) const patch: RemoveOperation[] = [ { op: PatchOp.REMOVE, - path: `/packageData/${params.package}/hosts/${params.host}/onions/0`, - }, - { - op: PatchOp.REMOVE, - path: `/packageData/${params.package}/hosts/${params.host}/hostnameInfo/80/0`, + path: `/serverInfo/network/host/privateDomains/${params.fqdn}`, }, ] this.mockRevision(patch) @@ -1621,9 +1522,20 @@ export class MockApiService extends ApiService { return null } + async pkgBindingSetAddressEnabled( + params: PkgBindingSetAddressEnabledReq, + ): Promise { + await pauseFor(2000) + + const basePath = `/packageData/${params.package}/hosts/${params.host}/bindings/${params.internalPort}/addresses` + this.mockSetAddressEnabled(basePath, params.address, params.enabled) + + return null + } + async pkgAddPublicDomain( - params: RR.PkgAddPublicDomainReq, - ): Promise { + params: PkgAddPublicDomainReq, + ): Promise { await pauseFor(2000) const patch: Operation[] = [ @@ -1636,18 +1548,13 @@ export class MockApiService extends ApiService { }, { op: PatchOp.ADD, - path: `/packageData/${params.package}/hosts/${params.host}/hostnameInfo/80/0`, + path: `/packageData/${params.package}/hosts/${params.host}/bindings/80/addresses/available/-`, value: { - kind: 'ip', - gatewayId: 'eth0', + ssl: true, public: true, - hostname: { - kind: 'domain', - domain: params.fqdn, - subdomain: null, - port: null, - sslPort: 443, - }, + host: params.fqdn, + port: 443, + metadata: { kind: 'public-domain', gateway: params.gateway }, }, }, ] @@ -1656,9 +1563,7 @@ export class MockApiService extends ApiService { return null } - async pkgRemovePublicDomain( - params: RR.PkgRemovePublicDomainReq, - ): Promise { + async pkgRemovePublicDomain(params: PkgRemovePublicDomainReq): Promise { await pauseFor(2000) const patch: RemoveOperation[] = [ @@ -1666,41 +1571,30 @@ export class MockApiService extends ApiService { op: PatchOp.REMOVE, path: `/packageData/${params.package}/hosts/${params.host}/publicDomains/${params.fqdn}`, }, - { - op: PatchOp.REMOVE, - path: `/packageData/${params.package}/hosts/${params.host}/hostnameInfo/80/0`, - }, ] this.mockRevision(patch) return null } - async pkgAddPrivateDomain( - params: RR.PkgAddPrivateDomainReq, - ): Promise { + async pkgAddPrivateDomain(params: PkgAddPrivateDomainReq): Promise { await pauseFor(2000) const patch: Operation[] = [ { - op: PatchOp.REPLACE, - path: `/packageData/${params.package}/hosts/${params.host}/privateDomains`, - value: [params.fqdn], + op: PatchOp.ADD, + path: `/packageData/${params.package}/hosts/${params.host}/privateDomains/${params.fqdn}`, + value: ['eth0'], }, { op: PatchOp.ADD, - path: `/packageData/${params.package}/hosts/${params.host}/hostnameInfo/80/0`, + path: `/packageData/${params.package}/hosts/${params.host}/bindings/80/addresses/available/-`, value: { - kind: 'ip', - gatewayId: 'eth0', + ssl: true, public: false, - hostname: { - kind: 'domain', - domain: params.fqdn, - subdomain: null, - port: null, - sslPort: 443, - }, + host: params.fqdn, + port: 443, + metadata: { kind: 'private-domain', gateways: ['eth0'] }, }, }, ] @@ -1710,19 +1604,14 @@ export class MockApiService extends ApiService { } async pkgRemovePrivateDomain( - params: RR.PkgRemovePrivateDomainReq, - ): Promise { + params: PkgRemovePrivateDomainReq, + ): Promise { await pauseFor(2000) - const patch: Operation[] = [ - { - op: PatchOp.REPLACE, - path: `/packageData/${params.package}/hosts/${params.host}/privateDomains`, - value: [], - }, + const patch: RemoveOperation[] = [ { op: PatchOp.REMOVE, - path: `/packageData/${params.package}/hosts/${params.host}/hostnameInfo/80/0`, + path: `/packageData/${params.package}/hosts/${params.host}/privateDomains/${params.fqdn}`, }, ] this.mockRevision(patch) @@ -1956,6 +1845,63 @@ export class MockApiService extends ApiService { }, 1000) } + private mockSetAddressEnabled( + basePath: string, + addressJson: string, + enabled: boolean | null, + ): void { + const h: T.HostnameInfo = JSON.parse(addressJson) + const isPublicIp = + h.public && (h.metadata.kind === 'ipv4' || h.metadata.kind === 'ipv6') + + const current = this.mockData(basePath) as T.DerivedAddressInfo + + if (isPublicIp) { + if (h.port === null) return + const sa = + h.metadata.kind === 'ipv6' + ? `[${h.hostname}]:${h.port}` + : `${h.hostname}:${h.port}` + + const arr = [...current.enabled] + + if (enabled) { + if (!arr.includes(sa)) arr.push(sa) + } else { + const idx = arr.indexOf(sa) + if (idx >= 0) arr.splice(idx, 1) + } + + current.enabled = arr + this.mockRevision([ + { op: PatchOp.REPLACE, path: `${basePath}/enabled`, value: arr }, + ]) + } else { + const port = h.port ?? 0 + const arr = current.disabled.filter( + ([dHost, dPort]) => !(dHost === h.hostname && dPort === port), + ) + + if (!enabled) { + arr.push([h.hostname, port]) + } + + current.disabled = arr + this.mockRevision([ + { op: PatchOp.REPLACE, path: `${basePath}/disabled`, value: arr }, + ]) + } + } + + private mockData(path: string): any { + const parts = path.split('/').filter(Boolean) + let obj: any = mockPatchData + for (const part of parts) { + obj = obj[part] + } + return obj + } + private async mockRevision(patch: Operation[]): Promise { const revision = { id: ++this.sequence, diff --git a/web/projects/ui/src/app/services/api/mock-patch.ts b/web/projects/ui/src/app/services/api/mock-patch.ts index b7da8d5b9..cff1c4ddf 100644 --- a/web/projects/ui/src/app/services/api/mock-patch.ts +++ b/web/projects/ui/src/app/services/api/mock-patch.ts @@ -38,8 +38,88 @@ export const mockPatchData: DataModel = { net: { assignedPort: null, assignedSslPort: 443, - publicEnabled: [], - privateDisabled: [], + }, + addresses: { + enabled: [], + disabled: [], + available: [ + { + ssl: true, + public: false, + hostname: 'adjective-noun.local', + port: 443, + metadata: { + kind: 'mdns', + gateways: ['eth0', 'wlan0'], + }, + }, + { + ssl: false, + public: false, + hostname: '10.0.0.1', + port: 80, + metadata: { kind: 'ipv4', gateway: 'eth0' }, + }, + { + ssl: false, + public: false, + hostname: '10.0.0.2', + port: 80, + metadata: { kind: 'ipv4', gateway: 'wlan0' }, + }, + { + ssl: false, + public: false, + hostname: 'fe80::cd00:0000:0cde:1257:0000:211e:72cd', + port: 80, + metadata: { kind: 'ipv6', gateway: 'eth0', scopeId: 2 }, + }, + { + ssl: false, + public: false, + hostname: 'fe80::cd00:0000:0cde:1257:0000:211e:1234', + port: 80, + metadata: { kind: 'ipv6', gateway: 'wlan0', scopeId: 3 }, + }, + { + ssl: true, + public: false, + hostname: 'my-server.home', + port: 443, + metadata: { + kind: 'private-domain', + gateways: ['eth0'], + }, + }, + { + ssl: false, + public: false, + hostname: + 'abc123def456ghi789jkl012mno345pqr678stu901vwx234yz567abc.onion', + port: 80, + metadata: { + kind: 'plugin', + packageId: 'tor', + removeAction: 'delete-onion-service', + overflowActions: ['regenerate-key'], + info: null, + }, + }, + { + ssl: true, + public: false, + hostname: + 'abc123def456ghi789jkl012mno345pqr678stu901vwx234yz567abc.onion', + port: 443, + metadata: { + kind: 'plugin', + packageId: 'tor', + removeAction: 'delete-onion-service', + overflowActions: ['regenerate-key'], + info: null, + }, + }, + ], }, options: { preferredExternalPort: 80, @@ -53,94 +133,22 @@ export const mockPatchData: DataModel = { }, }, publicDomains: {}, - privateDomains: [], - onions: ['myveryownspecialtoraddress'], - hostnameInfo: { - 80: [ - { - kind: 'ip', - gateway: { id: 'eth0', name: 'Ethernet', public: false }, - public: false, - hostname: { - kind: 'local', - value: 'adjective-noun.local', - port: null, - sslPort: 443, - }, - }, - { - kind: 'ip', - gateway: { id: 'wlan0', name: 'Wireless', public: false }, - public: false, - hostname: { - kind: 'local', - value: 'adjective-noun.local', - port: null, - sslPort: 443, - }, - }, - { - kind: 'ip', - gateway: { id: 'eth0', name: 'Ethernet', public: false }, - public: false, - hostname: { - kind: 'ipv4', - value: '10.0.0.1', - port: null, - sslPort: 443, - }, - }, - { - kind: 'ip', - gateway: { id: 'wlan0', name: 'Wireless', public: false }, - public: false, - hostname: { - kind: 'ipv4', - value: '10.0.0.2', - port: null, - sslPort: 443, - }, - }, - { - kind: 'ip', - gateway: { id: 'eth0', name: 'Ethernet', public: false }, - public: false, - hostname: { - kind: 'ipv6', - value: 'fe80::cd00:0000:0cde:1257:0000:211e:72cd', - scopeId: 2, - port: null, - sslPort: 443, - }, - }, - { - kind: 'ip', - gateway: { id: 'wlan0', name: 'Wireless', public: false }, - public: false, - hostname: { - kind: 'ipv6', - value: 'fe80::cd00:0000:0cde:1257:0000:211e:1234', - scopeId: 3, - port: null, - sslPort: 443, - }, - }, - { - kind: 'onion', - hostname: { - value: 'myveryownspecialtoraddress.onion', - port: 80, - sslPort: 443, - }, - }, - ], + privateDomains: { + 'my-server.home': ['eth0'], }, + portForwards: [ + { + src: '203.0.113.45:443', + dst: '10.0.0.1:443', + gateway: 'eth0', + }, + ], }, gateways: { eth0: { name: null, - public: null, secure: null, + type: null, ipInfo: { name: 'Wired Connection 1', scopeId: 1, @@ -154,8 +162,8 @@ export const mockPatchData: DataModel = { }, wlan0: { name: null, - public: null, secure: null, + type: null, ipInfo: { name: 'Wireless Connection 1', scopeId: 2, @@ -172,8 +180,8 @@ export const mockPatchData: DataModel = { }, wireguard1: { name: 'StartTunnel', - public: null, secure: null, + type: 'inbound-outbound', ipInfo: { name: 'wireguard1', scopeId: 2, @@ -188,7 +196,23 @@ export const mockPatchData: DataModel = { dnsServers: ['1.1.1.1'], }, }, + wireguard2: { + name: 'Mullvad VPN', + secure: null, + type: 'outbound-only', + ipInfo: { + name: 'wireguard2', + scopeId: 4, + deviceType: 'wireguard', + subnets: [], + wanIp: '198.51.100.77', + ntpServers: [], + lanIp: [], + dnsServers: ['10.64.0.1'], + }, + }, }, + defaultOutbound: 'eth0', dns: { dhcpServers: ['1.1.1.1', '8.8.8.8'], staticServers: null, @@ -208,11 +232,13 @@ export const mockPatchData: DataModel = { shuttingDown: false, backupProgress: null, }, + name: 'Random Words', hostname: 'random-words', pubkey: 'npub1sg6plzptd64u62a878hep2kev88swjh3tw00gjsfl8f237lmu63q0uf63m', caFingerprint: '63:2B:11:99:44:40:17:DF:37:FC:C3:DF:0F:3D:15', ntpSynced: false, smtp: null, + ifconfigUrl: 'https://ifconfig.co', platform: 'x86_64-nonfree', zram: true, governor: 'performance', @@ -335,8 +361,10 @@ export const mockPatchData: DataModel = { }, hosts: {}, storeExposedDependents: [], + outboundGateway: null, registry: 'https://registry.start9.com/', developerKey: 'developer-key', + plugin: { url: null }, tasks: { config: { active: true, @@ -472,13 +500,13 @@ export const mockPatchData: DataModel = { }, rpc: { id: 'rpc', - masked: false, + masked: true, name: 'RPC', description: 'Used by dependent services and client wallets for connecting to your node', type: 'api', addressInfo: { - username: null, + username: 'rpcuser', hostId: 'bcdefgh', internalPort: 8332, scheme: 'http', @@ -510,154 +538,210 @@ export const mockPatchData: DataModel = { 80: { enabled: true, net: { - assignedPort: 80, - assignedSslPort: 443, - publicEnabled: [], - privateDisabled: [], + assignedPort: 42080, + assignedSslPort: 42443, + }, + addresses: { + enabled: ['203.0.113.45:42443'], + disabled: [], + available: [ + { + ssl: true, + public: false, + hostname: 'adjective-noun.local', + port: 42443, + metadata: { + kind: 'mdns', + gateways: ['eth0'], + }, + }, + { + ssl: false, + public: false, + hostname: '10.0.0.1', + port: 42080, + metadata: { kind: 'ipv4', gateway: 'eth0' }, + }, + { + ssl: false, + public: false, + hostname: 'fe80::cd00:0cde:1257:211e:72cd', + port: 42080, + metadata: { kind: 'ipv6', gateway: 'eth0', scopeId: 2 }, + }, + { + ssl: true, + public: true, + hostname: '203.0.113.45', + port: 42443, + metadata: { kind: 'ipv4', gateway: 'eth0' }, + }, + { + ssl: true, + public: true, + hostname: 'bitcoin.example.com', + port: 42443, + metadata: { kind: 'public-domain', gateway: 'eth0' }, + }, + { + ssl: false, + public: false, + hostname: '192.168.10.11', + port: 42080, + metadata: { kind: 'ipv4', gateway: 'wlan0' }, + }, + { + ssl: false, + public: false, + hostname: 'fe80::cd00:0cde:1257:211e:1234', + port: 42080, + metadata: { kind: 'ipv6', gateway: 'wlan0', scopeId: 3 }, + }, + { + ssl: true, + public: false, + hostname: 'my-bitcoin.home', + port: 42443, + metadata: { + kind: 'private-domain', + gateways: ['wlan0'], + }, + }, + { + ssl: false, + public: false, + hostname: + 'xyz789abc123def456ghi789jkl012mno345pqr678stu901vwx234.onion', + port: 42080, + metadata: { + kind: 'plugin', + packageId: 'tor', + removeAction: 'delete-onion-service', + overflowActions: ['regenerate-key'], + info: null, + }, + }, + { + ssl: true, + public: false, + hostname: + 'xyz789abc123def456ghi789jkl012mno345pqr678stu901vwx234.onion', + port: 42443, + metadata: { + kind: 'plugin', + packageId: 'tor', + removeAction: 'delete-onion-service', + overflowActions: ['regenerate-key'], + info: null, + }, + }, + ], }, options: { - addSsl: null, - preferredExternalPort: 443, - secure: { ssl: true }, + preferredExternalPort: 42443, + addSsl: { + preferredExternalPort: 42443, + alpn: { specified: ['http/1.1', 'h2'] }, + addXForwardedHeaders: false, + }, + secure: null, }, }, }, - publicDomains: {}, - privateDomains: [], - onions: [], - hostnameInfo: { - 80: [ - { - kind: 'ip', - gateway: { id: 'eth0', name: 'Ethernet', public: false }, - public: false, - hostname: { - kind: 'local', - value: 'adjective-noun.local', - port: null, - sslPort: 1234, - }, - }, - { - kind: 'ip', - gateway: { id: 'wlan0', name: 'Wireless', public: false }, - public: false, - hostname: { - kind: 'local', - value: 'adjective-noun.local', - port: null, - sslPort: 1234, - }, - }, - { - kind: 'ip', - gateway: { id: 'eth0', name: 'Ethernet', public: false }, - public: false, - hostname: { - kind: 'ipv4', - value: '10.0.0.1', - port: null, - sslPort: 1234, - }, - }, - { - kind: 'ip', - gateway: { id: 'wlan0', name: 'Wireless', public: false }, - public: false, - hostname: { - kind: 'ipv4', - value: '10.0.0.2', - port: null, - sslPort: 1234, - }, - }, - { - kind: 'ip', - gateway: { id: 'eth0', name: 'Ethernet', public: false }, - public: false, - hostname: { - kind: 'ipv6', - value: 'fe80::cd00:0000:0cde:1257:0000:211e:72cd', - scopeId: 2, - port: null, - sslPort: 1234, - }, - }, - { - kind: 'ip', - gateway: { id: 'wlan0', name: 'Wireless', public: false }, - public: false, - hostname: { - kind: 'ipv6', - value: 'fe80::cd00:0000:0cde:1257:0000:211e:1234', - scopeId: 3, - port: null, - sslPort: 1234, - }, - }, - { - kind: 'onion', - hostname: { - value: 'bitcoin-p2p.onion', - port: 80, - sslPort: 443, - }, - }, - ], + publicDomains: { + 'bitcoin.example.com': { + gateway: 'eth0', + acme: null, + }, }, + privateDomains: { + 'my-bitcoin.home': ['wlan0'], + }, + portForwards: [ + { + src: '203.0.113.45:443', + dst: '10.0.0.1:443', + gateway: 'eth0', + }, + { + src: '203.0.113.45:42443', + dst: '10.0.0.1:42443', + gateway: 'eth0', + }, + ], }, bcdefgh: { bindings: { 8332: { enabled: true, net: { - assignedPort: 8332, + assignedPort: 48332, assignedSslPort: null, - publicEnabled: [], - privateDisabled: [], + }, + addresses: { + enabled: [], + disabled: [], + available: [ + { + ssl: false, + public: false, + hostname: 'adjective-noun.local', + port: 48332, + metadata: { + kind: 'mdns', + gateways: ['eth0'], + }, + }, + { + ssl: false, + public: false, + hostname: '10.0.0.1', + port: 48332, + metadata: { kind: 'ipv4', gateway: 'eth0' }, + }, + ], }, options: { addSsl: null, - preferredExternalPort: 8332, + preferredExternalPort: 48332, secure: { ssl: false }, }, }, }, publicDomains: {}, - privateDomains: [], - onions: [], - hostnameInfo: { - 8332: [], - }, + privateDomains: {}, + portForwards: [], }, cdefghi: { bindings: { 8333: { enabled: true, net: { - assignedPort: 8333, + assignedPort: 48333, assignedSslPort: null, - publicEnabled: [], - privateDisabled: [], + }, + addresses: { + enabled: [], + disabled: [], + available: [], }, options: { addSsl: null, - preferredExternalPort: 8333, + preferredExternalPort: 48333, secure: { ssl: false }, }, }, }, publicDomains: {}, - privateDomains: [], - onions: [], - hostnameInfo: { - 8333: [], - }, + privateDomains: {}, + portForwards: [], }, }, storeExposedDependents: [], + outboundGateway: null, registry: 'https://registry.start9.com/', developerKey: 'developer-key', + plugin: { url: null }, tasks: { // 'bitcoind-config': { // task: { @@ -680,5 +764,62 @@ export const mockPatchData: DataModel = { }, }, }, + tor: { + stateInfo: { + state: 'installed', + manifest: { + ...Mock.MockManifestTor, + version: '0.4.8:0', + }, + }, + s9pk: '/media/startos/data/package-data/archive/installed/tor.s9pk', + icon: '/assets/img/service-icons/fallback.png', + lastBackup: null, + statusInfo: { + desired: { main: 'running' }, + error: null, + health: {}, + started: new Date().toISOString(), + }, + actions: { + 'create-onion-service': { + name: 'Create Onion Service', + description: 'Register a new .onion address for a service interface', + warning: null, + visibility: 'enabled', + allowedStatuses: 'only-running', + hasInput: true, + group: null, + }, + 'delete-onion-service': { + name: 'Delete Onion Service', + description: 'Remove an existing .onion address', + warning: 'This will permanently remove the .onion address.', + visibility: 'enabled', + allowedStatuses: 'only-running', + hasInput: false, + group: null, + }, + 'regenerate-key': { + name: 'Regenerate Key', + description: 'Generate a new key pair and .onion address', + warning: + 'This will change the .onion address. Any bookmarks or links to the old address will stop working.', + visibility: 'enabled', + allowedStatuses: 'only-running', + hasInput: false, + group: null, + }, + }, + serviceInterfaces: {}, + currentDependencies: {}, + hosts: {}, + storeExposedDependents: [], + outboundGateway: null, + registry: 'https://registry.start9.com/', + developerKey: 'developer-key', + plugin: { url: { tableAction: 'create-onion-service' } }, + tasks: {}, + }, }, } diff --git a/web/projects/ui/src/app/services/config.service.ts b/web/projects/ui/src/app/services/config.service.ts index 3e9097b54..4e56e8e46 100644 --- a/web/projects/ui/src/app/services/config.service.ts +++ b/web/projects/ui/src/app/services/config.service.ts @@ -32,7 +32,6 @@ export class ConfigService { private getAccessType = utils.once(() => { if (useMocks) return mocks.maskAs if (this.hostname === 'localhost') return 'localhost' - if (this.hostname.endsWith('.onion')) return 'tor' if (this.hostname.endsWith('.local')) return 'mdns' let ip = null try { @@ -51,7 +50,7 @@ export class ConfigService { } isLanHttp(): boolean { - return !this.isHttps() && !['localhost', 'tor'].includes(this.accessType) + return !this.isHttps() && this.accessType !== 'localhost' } isHttps(): boolean { diff --git a/web/projects/ui/src/app/services/gateway.service.ts b/web/projects/ui/src/app/services/gateway.service.ts index 3bb61ba07..ecbdc1420 100644 --- a/web/projects/ui/src/app/services/gateway.service.ts +++ b/web/projects/ui/src/app/services/gateway.service.ts @@ -1,7 +1,7 @@ import { inject, Injectable } from '@angular/core' import { PatchDB } from 'patch-db-client' import { T, utils } from '@start9labs/start-sdk' -import { map } from 'rxjs/operators' +import { map } from 'rxjs' import { DataModel } from './patch-db/data-model' import { toSignal } from '@angular/core/rxjs-interop' @@ -12,39 +12,47 @@ export type GatewayPlus = T.NetworkInterfaceInfo & { subnets: utils.IpNet[] lanIpv4: string[] wanIp?: utils.IpAddress - public: boolean + isDefaultOutbound: boolean } @Injectable() export class GatewayService { + private readonly patch = inject>(PatchDB) + + private readonly network$ = this.patch.watch$('serverInfo', 'network') + + readonly defaultOutbound = toSignal( + this.network$.pipe(map(n => n.defaultOutbound)), + ) + readonly gateways = toSignal( - inject>(PatchDB) - .watch$('serverInfo', 'network', 'gateways') - .pipe( - map(gateways => - Object.entries(gateways) - .filter(([_, val]) => !!val?.ipInfo) - .filter( - ([_, val]) => - val?.ipInfo?.deviceType !== 'bridge' && - val?.ipInfo?.deviceType !== 'loopback', - ) - .map(([id, val]) => { - const subnets = - val.ipInfo?.subnets.map(s => utils.IpNet.parse(s)) ?? [] - const name = val.name ?? val.ipInfo!.name - return { - ...val, - id, - name, - subnets, - lanIpv4: subnets.filter(s => s.isIpv4()).map(s => s.address), - public: val.public ?? subnets.some(s => s.isPublic()), - wanIp: - val.ipInfo?.wanIp && utils.IpAddress.parse(val.ipInfo?.wanIp), - } as GatewayPlus - }), - ), - ), + this.network$.pipe( + map(network => { + const gateways = network.gateways + const defaultOutbound = network.defaultOutbound + return Object.entries(gateways) + .filter(([_, val]) => !!val?.ipInfo) + .filter( + ([_, val]) => + val?.ipInfo?.deviceType !== 'bridge' && + val?.ipInfo?.deviceType !== 'loopback', + ) + .map(([id, val]) => { + const subnets = + val.ipInfo?.subnets.map(s => utils.IpNet.parse(s)) ?? [] + const name = val.name ?? val.ipInfo!.name + return { + ...val, + id, + name, + subnets, + lanIpv4: subnets.filter(s => s.isIpv4()).map(s => s.address), + wanIp: + val.ipInfo?.wanIp && utils.IpAddress.parse(val.ipInfo?.wanIp), + isDefaultOutbound: id === defaultOutbound, + } as GatewayPlus + }) + }), + ), ) } diff --git a/web/projects/ui/src/app/services/marketplace.service.ts b/web/projects/ui/src/app/services/marketplace.service.ts index aa1b37eb0..b5bfec34a 100644 --- a/web/projects/ui/src/app/services/marketplace.service.ts +++ b/web/projects/ui/src/app/services/marketplace.service.ts @@ -28,7 +28,6 @@ import { switchMap, tap, } from 'rxjs' -import { RR } from 'src/app/services/api/api.types' import { ApiService } from 'src/app/services/api/embassy-api.service' import { DataModel } from 'src/app/services/patch-db/data-model' @@ -247,7 +246,7 @@ export class MarketplaceService { version: string, url: string, ): Promise { - const params: RR.InstallPackageReq = { + const params: T.InstallParams = { id, version, registry: url, diff --git a/web/projects/ui/src/app/services/os.service.ts b/web/projects/ui/src/app/services/os.service.ts index 422ce1643..a34931915 100644 --- a/web/projects/ui/src/app/services/os.service.ts +++ b/web/projects/ui/src/app/services/os.service.ts @@ -2,16 +2,16 @@ import { inject, Injectable } from '@angular/core' import { PatchDB } from 'patch-db-client' import { BehaviorSubject, - distinctUntilChanged, - map, combineLatest, + distinctUntilChanged, firstValueFrom, + map, + shareReplay, } from 'rxjs' import { ApiService } from 'src/app/services/api/embassy-api.service' import { getServerInfo } from 'src/app/utils/get-server-info' import { DataModel } from './patch-db/data-model' -import { Version } from '@start9labs/start-sdk' -import { RR } from './api/api.types' +import { T, Version } from '@start9labs/start-sdk' @Injectable({ providedIn: 'root', @@ -20,20 +20,22 @@ export class OSService { private readonly api = inject(ApiService) private readonly patch = inject>(PatchDB) - osUpdate?: RR.CheckOsUpdateRes + osUpdate?: T.OsVersionInfoMap readonly updateAvailable$ = new BehaviorSubject(false) - readonly updating$ = this.patch.watch$('serverInfo', 'statusInfo').pipe( + private readonly statusInfo$ = this.patch + .watch$('serverInfo', 'statusInfo') + .pipe(shareReplay({ bufferSize: 1, refCount: true })) + + readonly updating$ = this.statusInfo$.pipe( map(status => status.updateProgress ?? status.updated), distinctUntilChanged(), ) - readonly backingUp$ = this.patch - .watch$('serverInfo', 'statusInfo', 'backupProgress') - .pipe( - map(obj => !!obj), - distinctUntilChanged(), - ) + readonly backingUp$ = this.statusInfo$.pipe( + map(status => !!status.backupProgress), + distinctUntilChanged(), + ) readonly updatingOrBackingUp$ = combineLatest([ this.updating$, diff --git a/web/projects/ui/src/app/services/pkg-status-rendering.service.ts b/web/projects/ui/src/app/services/pkg-status-rendering.service.ts index b0902944c..d45122d1b 100644 --- a/web/projects/ui/src/app/services/pkg-status-rendering.service.ts +++ b/web/projects/ui/src/app/services/pkg-status-rendering.service.ts @@ -2,6 +2,30 @@ import { i18nKey } from '@start9labs/shared' import { T } from '@start9labs/start-sdk' import { PackageDataEntry } from 'src/app/services/patch-db/data-model' +export const INACTIVE_STATUSES: PrimaryStatus[] = [ + 'installing', + 'updating', + 'removing', + 'restoring', + 'backing-up', + 'error', +] + +export const ALLOWED_STATUSES: Record> = { + 'only-running': new Set(['running']), + 'only-stopped': new Set(['stopped']), + any: new Set([ + 'running', + 'stopped', + 'restarting', + 'restoring', + 'stopping', + 'starting', + 'backing-up', + 'task-required', + ]), +} + export interface PackageStatus { primary: PrimaryStatus health: T.HealthStatus | null diff --git a/web/projects/ui/src/app/services/standard-actions.service.ts b/web/projects/ui/src/app/services/standard-actions.service.ts index 5cd002139..4d645e8d3 100644 --- a/web/projects/ui/src/app/services/standard-actions.service.ts +++ b/web/projects/ui/src/app/services/standard-actions.service.ts @@ -15,7 +15,6 @@ import { getAllPackages } from '../utils/get-package-data' import { hasCurrentDeps } from '../utils/has-deps' import { ApiService } from './api/embassy-api.service' import { DataModel } from './patch-db/data-model' -import { RR } from './api/api.types' @Injectable({ providedIn: 'root', @@ -78,7 +77,7 @@ export class StandardActionsService { .subscribe(() => this.doUninstall({ id, force, soft })) } - private async doUninstall(options: RR.UninstallPackageReq) { + private async doUninstall(options: T.UninstallParams) { const loader = this.loader.open('Beginning uninstall').subscribe() try { diff --git a/web/projects/ui/src/app/services/state.service.ts b/web/projects/ui/src/app/services/state.service.ts index 162031df6..d21c0f679 100644 --- a/web/projects/ui/src/app/services/state.service.ts +++ b/web/projects/ui/src/app/services/state.service.ts @@ -26,7 +26,7 @@ import { takeUntil, tap, } from 'rxjs/operators' -import { RR } from 'src/app/services/api/api.types' +import { ServerState } from 'src/app/services/api/api.types' import { ApiService } from 'src/app/services/api/embassy-api.service' import { NetworkService } from 'src/app/services/network.service' @@ -56,7 +56,7 @@ class DisconnectedToast {} @Injectable({ providedIn: 'root', }) -export class StateService extends Observable { +export class StateService extends Observable { private readonly alerts = inject(TuiAlertService) private readonly i18n = inject(i18nPipe) private readonly api = inject(ApiService) @@ -115,7 +115,7 @@ export class StateService extends Observable { setTimeout(() => this.trigger$.next(gracefully), delay) } - private handleState(state: RR.ServerState): void { + private handleState(state: ServerState): void { switch (state) { case 'initializing': this.router.navigate(['initializing'], { replaceUrl: true }) @@ -136,7 +136,7 @@ export class StateService extends Observable { } } -export function stateNot(state: RR.ServerState[]): CanActivateFn { +export function stateNot(state: ServerState[]): CanActivateFn { return () => inject(StateService).pipe( filter(current => !current || !state.includes(current)), diff --git a/web/projects/ui/src/app/services/time.service.ts b/web/projects/ui/src/app/services/time.service.ts index bd088242a..6107a212f 100644 --- a/web/projects/ui/src/app/services/time.service.ts +++ b/web/projects/ui/src/app/services/time.service.ts @@ -12,14 +12,15 @@ export class TimeService { private readonly time$ = defer(() => inject(ApiService).getSystemTime({}), ).pipe( - switchMap(({ now, uptime }) => - timer(0, 1000).pipe( + switchMap(({ now, uptime }) => { + const uptimeSecs = Number(uptime) + return timer(0, 1000).pipe( map(index => ({ now: new Date(now).valueOf() + 1000 * index, - uptime: uptime + index, + uptime: uptimeSecs + index, })), - ), - ), + ) + }), shareReplay(1), ) diff --git a/web/projects/ui/src/app/utils/acme.ts b/web/projects/ui/src/app/utils/acme.ts index f72e3b922..a6b8defe8 100644 --- a/web/projects/ui/src/app/utils/acme.ts +++ b/web/projects/ui/src/app/utils/acme.ts @@ -1,11 +1,11 @@ export function toAuthorityName( url: string | null, addSsl = true, -): string | 'Local Root CA' | '-' { +): string | 'Root CA' | '-' { if (url) { return knownAuthorities.find(ca => ca.url === url)?.name || url } else { - return addSsl ? 'Local Root CA' : '-' + return addSsl ? 'Root CA' : '-' } } diff --git a/web/projects/ui/src/app/utils/configBuilderToSpec.ts b/web/projects/ui/src/app/utils/configBuilderToSpec.ts index b2eee7dbf..2ffa249f5 100644 --- a/web/projects/ui/src/app/utils/configBuilderToSpec.ts +++ b/web/projects/ui/src/app/utils/configBuilderToSpec.ts @@ -1,7 +1,5 @@ import { ISB } from '@start9labs/start-sdk' -export async function configBuilderToSpec( - builder: ISB.InputSpec>, -) { +export async function configBuilderToSpec(builder: ISB.InputSpec) { return builder.build({} as any).then(a => a.spec) } diff --git a/web/projects/ui/src/app/utils/title-resolver.ts b/web/projects/ui/src/app/utils/title-resolver.ts index a4bed3349..21565679b 100644 --- a/web/projects/ui/src/app/utils/title-resolver.ts +++ b/web/projects/ui/src/app/utils/title-resolver.ts @@ -13,7 +13,7 @@ export async function titleResolver({ let route = inject(i18nPipe).transform(data['title']) const patch = inject>(PatchDB) - const title = await firstValueFrom(patch.watch$('ui', 'name')) + const title = await firstValueFrom(patch.watch$('serverInfo', 'name')) const id = params['pkgId'] if (id) { diff --git a/web/scripts/check-i18n.mjs b/web/scripts/check-i18n.mjs index 6868d18d4..4c12d031c 100644 --- a/web/scripts/check-i18n.mjs +++ b/web/scripts/check-i18n.mjs @@ -89,6 +89,43 @@ if (errors.length > 0) { console.error(` ${rel}:${line} "${key}"`) } console.error() +} + +// Check that all numeric keys in en.ts exist in every non-English dictionary +const enNumericKeys = new Set() +for (const match of enSource.matchAll(/^\s+'[^']+?':\s*(\d+)/gm)) { + enNumericKeys.add(Number(match[1])) +} + +const dictDir = join(root, 'projects/shared/src/i18n/dictionaries') +const otherLangs = ['de', 'es', 'fr', 'pl'] +const dictErrors = [] + +for (const lang of otherLangs) { + const dictPath = join(dictDir, `${lang}.ts`) + const dictSource = readFileSync(dictPath, 'utf-8') + const dictKeys = new Set() + + for (const match of dictSource.matchAll(/^\s*(\d+):/gm)) { + dictKeys.add(Number(match[1])) + } + + const missing = [...enNumericKeys].filter(k => !dictKeys.has(k)).sort((a, b) => a - b) + + if (missing.length > 0) { + dictErrors.push({ lang, missing }) + } +} + +if (dictErrors.length > 0) { + console.error(`\nMissing i18n dictionary keys:\n`) + for (const { lang, missing } of dictErrors) { + console.error(` ${lang}.ts is missing keys: ${missing.join(', ')}`) + } + console.error() +} + +if (errors.length > 0 || dictErrors.length > 0) { process.exit(1) } else { console.log('All i18n keys are valid.')
+ + @switch (gateway.ipInfo.deviceType) { + @case ('ethernet') { + + } + @case ('wireless') { + + } + @case ('wireguard') { + + } + } {{ gateway.name }} - - @if (gateway.ipInfo.deviceType; as type) { - {{ type }} ({{ - gateway.public ? ('public' | i18n) : ('private' | i18n) - }}) - } @else { - - + @if (gateway.isDefaultOutbound) { + + {{ 'default outbound' | i18n }} + } {{ gateway.lanIpv4.join(', ') }} + @if (gateway.type === 'outbound-only') { + {{ 'Outbound Only' | i18n }} + } @else { + {{ 'Inbound/Outbound' | i18n }} + } + {{ gateway.lanIpv4.join(', ') || '-' }} {{ gateway.ipInfo.wanIp || ('Error' | i18n) }} @@ -63,19 +80,27 @@ import { GatewayPlus } from 'src/app/services/gateway.service' {{ 'More' | i18n }} - + @if (gateway.type !== 'outbound-only') { + + + + } + @if (!gateway.isDefaultOutbound) { + + + + } @if (gateway.ipInfo.deviceType === 'wireguard') { - @@ -86,45 +111,55 @@ import { GatewayPlus } from 'src/app/services/gateway.service' } `, styles: ` + tui-icon { + font-size: 1.3rem; + margin-right: 0.7rem; + } + + tui-badge { + margin-left: 1rem; + } + td:last-child { - grid-area: 1 / 3 / 5; - align-self: center; text-align: right; } - .name { - width: 14rem; - } - - .type { - width: 14rem; - } - :host-context(tui-root._mobile) { - grid-template-columns: min-content 1fr min-content; - - .name { - grid-column: span 2; + td { + width: auto !important; + align-content: center; } - .type { - grid-column: span 2; - order: -1; + td:first-child { + font: var(--tui-font-text-m); + font-weight: bold; + color: var(--tui-text-primary); } - .lan, - .wan { - grid-column: span 2; + td:nth-child(2) { + grid-area: 2 / 1 / 2 / 3; + } + + td:nth-child(3), + td:nth-child(4) { + grid-area: auto / 1 / auto / 3; &::before { - content: 'LAN IP: '; color: var(--tui-text-primary); } } - .wan::before { + td:nth-child(3)::before { + content: 'LAN IP: '; + } + + td:nth-child(4)::before { content: 'WAN IP: '; } + + td:last-child { + grid-area: 1 / 3 / 6; + } } `, changeDetection: ChangeDetectionStrategy.OnPush, @@ -132,9 +167,11 @@ import { GatewayPlus } from 'src/app/services/gateway.service' TuiButton, TuiDropdown, TuiDataList, + TuiIcon, TuiOptGroup, TuiTextfield, i18nPipe, + TuiBadge, ], }) export class GatewaysItemComponent { @@ -149,6 +186,17 @@ export class GatewaysItemComponent { open = false + viewPortForwards() { + const { id, name } = this.gateway() + this.dialog + .openComponent(PORT_FORWARDS_MODAL, { + label: 'Port Forwards', + size: 'l', + data: { gatewayId: id, gatewayName: name }, + }) + .subscribe() + } + remove() { this.dialog .openConfirm({ label: 'Are you sure?', size: 's' }) @@ -166,6 +214,18 @@ export class GatewaysItemComponent { }) } + async setDefaultOutbound() { + const loader = this.loader.open().subscribe() + + try { + await this.api.setDefaultOutbound({ gateway: this.gateway().id }) + } catch (e: any) { + this.errorService.handleError(e) + } finally { + loader.unsubscribe() + } + } + async rename() { const { id, name } = this.gateway() const renameSpec = ISB.InputSpec.of({ diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/gateways/port-forwards.component.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/gateways/port-forwards.component.ts new file mode 100644 index 000000000..08fbc83ce --- /dev/null +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/gateways/port-forwards.component.ts @@ -0,0 +1,262 @@ +import { + ChangeDetectionStrategy, + Component, + inject, + signal, +} from '@angular/core' +import { toSignal } from '@angular/core/rxjs-interop' +import { ErrorService, i18nPipe } from '@start9labs/shared' +import { T } from '@start9labs/start-sdk' +import { TuiButton, TuiDialogContext } from '@taiga-ui/core' +import { TuiButtonLoading } from '@taiga-ui/kit' +import { injectContext, PolymorpheusComponent } from '@taiga-ui/polymorpheus' +import { PatchDB } from 'patch-db-client' +import { combineLatest, map } from 'rxjs' +import { PlaceholderComponent } from 'src/app/routes/portal/components/placeholder.component' +import { PortCheckIconComponent } from 'src/app/routes/portal/components/port-check-icon.component' +import { PortCheckWarningsComponent } from 'src/app/routes/portal/components/port-check-warnings.component' +import { TableComponent } from 'src/app/routes/portal/components/table.component' +import { ApiService } from 'src/app/services/api/embassy-api.service' +import { DataModel } from 'src/app/services/patch-db/data-model' + +export type PortForwardsModalData = { + gatewayId: string + gatewayName: string +} + +type PortForwardRow = { + interfaces: string[] + externalPort: number + internalPort: number +} + +function parseSocketAddr(s: string): { ip: string; port: number } { + const lastColon = s.lastIndexOf(':') + return { + ip: s.substring(0, lastColon), + port: Number(s.substring(lastColon + 1)), + } +} + +@Component({ + selector: 'port-forwards-modal', + template: ` +

+ {{ 'Port forwarding rules required on gateway' | i18n }} + "{{ context.data.gatewayName }}" +

+ + + @for (row of rows(); track row.externalPort; let i = $index) { + + + + + + + + } @empty { + + + + } +
+ @for (iface of row.interfaces; track iface) { +
{{ iface }}
+ } + +
+ + {{ row.externalPort }}{{ row.internalPort }} + +
+ + {{ 'No port forwarding rules' | i18n }} + +
+ `, + styles: ` + p { + margin: 0 0 1rem 0; + } + + .interfaces { + white-space: nowrap; + } + + .status { + width: 3.2rem; + } + + td:last-child { + text-align: end; + } + + :host-context(tui-root._mobile) table { + thead { + display: table-header-group !important; + } + + tr { + display: table-row !important; + box-shadow: none !important; + } + + td, + th { + padding: 0.5rem 0.5rem !important; + font: var(--tui-font-text-s) !important; + color: var(--tui-text-primary) !important; + font-weight: normal !important; + } + + th { + font-weight: bold !important; + } + } + `, + changeDetection: ChangeDetectionStrategy.OnPush, + imports: [ + TuiButton, + i18nPipe, + TableComponent, + PlaceholderComponent, + PortCheckIconComponent, + PortCheckWarningsComponent, + TuiButtonLoading, + ], +}) +export class PortForwardsModalComponent { + private readonly patch = inject>(PatchDB) + private readonly api = inject(ApiService) + private readonly errorService = inject(ErrorService) + + readonly context = + injectContext>() + + readonly loading = signal>({}) + readonly results = signal>({}) + + private readonly portForwards$ = combineLatest([ + this.patch.watch$('serverInfo', 'network', 'host', 'portForwards').pipe( + map(pfs => + pfs.map(pf => ({ + ...pf, + interfaces: ['StartOS - UI'], + })), + ), + ), + this.patch.watch$('packageData').pipe( + map(pkgData => { + const rows: Array<{ + src: string + dst: string + gateway: string + interfaces: string[] + }> = [] + + for (const [pkgId, pkg] of Object.entries(pkgData)) { + const title = + pkg.stateInfo.manifest?.title ?? + pkg.stateInfo.installingInfo?.newManifest?.title ?? + pkgId + + for (const [hostId, host] of Object.entries(pkg.hosts)) { + // Find interface names pointing to this host + const ifaceNames: string[] = [] + for (const iface of Object.values(pkg.serviceInterfaces)) { + if (iface.addressInfo.hostId === hostId) { + ifaceNames.push(`${title} - ${iface.name}`) + } + } + + const label = + ifaceNames.length > 0 ? ifaceNames : [`${title} - ${hostId}`] + + for (const pf of host.portForwards) { + rows.push({ ...pf, interfaces: label }) + } + } + } + + return rows + }), + ), + ]).pipe( + map(([osForwards, pkgForwards]) => { + const gatewayId = this.context.data.gatewayId + const all = [...osForwards, ...pkgForwards].filter( + pf => pf.gateway === gatewayId, + ) + + // Group by (externalPort, internalPort) + const grouped = new Map() + + for (const pf of all) { + const src = parseSocketAddr(pf.src) + const dst = parseSocketAddr(pf.dst) + const key = `${src.port}:${dst.port}` + + const existing = grouped.get(key) + if (existing) { + for (const iface of pf.interfaces) { + if (!existing.interfaces.includes(iface)) { + existing.interfaces.push(iface) + } + } + } else { + grouped.set(key, { + interfaces: [...pf.interfaces], + externalPort: src.port, + internalPort: dst.port, + }) + } + } + + return [...grouped.values()].sort( + (a, b) => a.externalPort - b.externalPort, + ) + }), + ) + + readonly rows = toSignal(this.portForwards$, { initialValue: [] }) + + async testPort(index: number, port: number) { + this.loading.update(l => ({ ...l, [index]: true })) + + try { + const result = await this.api.checkPort({ + gateway: this.context.data.gatewayId, + port, + }) + + this.results.update(r => ({ ...r, [index]: result })) + } catch (e: any) { + this.errorService.handleError(e) + } finally { + this.loading.update(l => ({ ...l, [index]: false })) + } + } +} + +export const PORT_FORWARDS_MODAL = new PolymorpheusComponent( + PortForwardsModalComponent, +) diff --git a/web/projects/ui/src/app/routes/portal/routes/system/routes/gateways/table.component.ts b/web/projects/ui/src/app/routes/portal/routes/system/routes/gateways/table.component.ts index 65f205455..d7e5d7d03 100644 --- a/web/projects/ui/src/app/routes/portal/routes/system/routes/gateways/table.component.ts +++ b/web/projects/ui/src/app/routes/portal/routes/system/routes/gateways/table.component.ts @@ -13,7 +13,7 @@ import { GatewayService } from 'src/app/services/gateway.service'
+
{{ 'Loading' | i18n }}

( + plugin: P, + value: T, + ): Manifest extends { plugins: P[] } ? T : null { + if (this.manifest.plugins?.includes(plugin)) return value as any + return null as any + } + + /** + * Finalize the SDK and return the full set of helpers for building a StartOS service. + * + * This method is only callable after `.withManifest()` has been called (enforced at the type level). + * + * @param isReady - Type-level gate; resolves to `true` only when a manifest is bound. + * @returns An object containing all SDK utilities: actions, daemons, backups, interfaces, health checks, volumes, triggers, and more. + */ build(isReady: AnyNeverCond<[Manifest], 'Build not ready', true>) { - type NestedEffects = 'subcontainer' | 'store' | 'action' + type NestedEffects = 'subcontainer' | 'store' | 'action' | 'plugin' type InterfaceEffects = | 'getServiceInterface' | 'listServiceInterfaces' @@ -104,6 +139,7 @@ export class StartSdk { type AlreadyExposed = | 'getSslCertificate' | 'getSystemSmtp' + | 'getOutboundGateway' | 'getContainerIp' | 'getDataVersion' | 'setDataVersion' @@ -133,13 +169,19 @@ export class StartSdk { } return { + /** The bound service manifest */ manifest: this.manifest, + /** Volume path helpers derived from the manifest volume definitions */ volumes: createVolumes(this.manifest), ...startSdkEffectWrapper, + /** Persist the current data version to the StartOS effect system */ setDataVersion, + /** Retrieve the current data version from the StartOS effect system */ getDataVersion, action: { + /** Execute an action by its ID, optionally providing input */ run: actions.runAction, + /** Create a task notification for a specific package's action */ createTask: >( effects: T.Effects, packageId: T.PackageId, @@ -154,6 +196,7 @@ export class StartSdk { severity, options: options, }), + /** Create a task notification for this service's own action (uses manifest.id automatically) */ createOwnTask: >( effects: T.Effects, action: T, @@ -167,22 +210,47 @@ export class StartSdk { severity, options: options, }), + /** + * Clear one or more task notifications by their replay IDs + * @param effects - The effects context + * @param replayIds - One or more replay IDs of the tasks to clear + */ clearTask: (effects: T.Effects, ...replayIds: string[]) => effects.action.clearTasks({ only: replayIds }), }, + /** + * Check whether the specified (or all) dependencies are satisfied. + * @param effects - The effects context + * @param packageIds - Optional subset of dependency IDs to check; defaults to all + * @returns An object describing which dependencies are satisfied and which are not + */ checkDependencies: checkDependencies as < DependencyId extends keyof Manifest['dependencies'] & - PackageId = keyof Manifest['dependencies'] & PackageId, + T.PackageId = keyof Manifest['dependencies'] & T.PackageId, >( effects: Effects, packageIds?: DependencyId[], ) => Promise>, serviceInterface: { + /** Retrieve a single service interface belonging to this package by its ID */ getOwn: getOwnServiceInterface, + /** Retrieve a single service interface from any package */ get: getServiceInterface, + /** Retrieve all service interfaces belonging to this package */ getAllOwn: getOwnServiceInterfaces, + /** Retrieve all service interfaces, optionally filtering by package */ getAll: getServiceInterfaces, }, + /** + * Get the container IP address with reactive subscription support. + * + * Returns an object with multiple read strategies: `const()` for a value + * that retries on change, `once()` for a single read, `watch()` for an async + * generator, `onChange()` for a callback, and `waitFor()` to block until a predicate is met. + * + * @param effects - The effects context + * @param options - Optional filtering options (e.g. `containerId`) + */ getContainerIp: ( effects: T.Effects, options: Omit< @@ -275,9 +343,22 @@ export class StartSdk { }, MultiHost: { + /** + * Create a new MultiHost instance for binding ports and exporting interfaces. + * @param effects - The effects context + * @param id - A unique identifier for this multi-host group + */ of: (effects: Effects, id: string) => new MultiHost({ id, effects }), }, + /** + * Return `null` if the given string is empty, otherwise return the string unchanged. + * Useful for converting empty user input into explicit null values. + */ nullIfEmpty, + /** + * Indicate that a daemon should use the container image's configured entrypoint. + * @param overrideCmd - Optional command arguments to append after the entrypoint + */ useEntrypoint: (overrideCmd?: string[]) => new T.UseEntrypoint(overrideCmd), /** @@ -392,7 +473,12 @@ export class StartSdk { run: Run<{}>, ) => Action.withoutInput(id, metadata, run), }, - inputSpecConstants: { smtpInputSpec }, + inputSpecConstants: { + smtpInputSpec, + systemSmtpSpec, + customSmtp, + smtpProviderVariants, + }, /** * @description Use this function to create a service interface. * @param effects @@ -440,19 +526,37 @@ export class StartSdk { masked: boolean }, ) => new ServiceInterfaceBuilder({ ...options, effects }), + /** + * Get the system SMTP configuration with reactive subscription support. + * @param effects - The effects context + */ getSystemSmtp: (effects: E) => new GetSystemSmtp(effects), + /** + * Get the outbound network gateway address with reactive subscription support. + * @param effects - The effects context + */ + getOutboundGateway: (effects: E) => + new GetOutboundGateway(effects), + /** + * Get an SSL certificate for the given hostnames with reactive subscription support. + * @param effects - The effects context + * @param hostnames - The hostnames to obtain a certificate for + * @param algorithm - Optional algorithm preference (e.g. Ed25519) + */ getSslCertificate: ( effects: E, hostnames: string[], algorithm?: T.Algorithm, ) => new GetSslCertificate(effects, hostnames, algorithm), + /** Retrieve the manifest of any installed service package by its ID */ getServiceManifest, healthCheck: { checkPortListening, checkWebUrl, runHealthScript, }, + /** Common utility patterns (e.g. hostname regex, port validators) */ patterns, /** * @description Use this function to list every Action offered by the service. Actions will be displayed in the provided order. @@ -632,21 +736,47 @@ export class StartSdk { * ``` */ setupInterfaces: setupServiceInterfaces, + /** + * Define the main entrypoint for the service. The provided function should + * configure and return a `Daemons` instance describing all long-running processes. + * @param fn - Async function that receives `effects` and returns a `Daemons` instance + */ setupMain: ( fn: (o: { effects: Effects }) => Promise>, ) => setupMain(fn), + /** Built-in trigger strategies for controlling health-check polling intervals */ trigger: { + /** Default trigger: polls at a fixed interval */ defaultTrigger, + /** Trigger with a cooldown period between checks */ cooldownTrigger, + /** Switches to a different interval after the first successful check */ changeOnFirstSuccess, + /** Uses different intervals based on success vs failure results */ successFailure, }, Mounts: { + /** + * Create an empty Mounts builder for declaring volume, asset, dependency, and backup mounts. + * @returns A new Mounts instance with no mounts configured + */ of: Mounts.of, }, Backups: { + /** + * Create a Backups configuration that backs up entire volumes by name. + * @param volumeNames - Volume IDs from the manifest to include in backups + */ ofVolumes: Backups.ofVolumes, + /** + * Create a Backups configuration from explicit sync path pairs. + * @param syncs - Array of `{ dataPath, backupPath }` objects + */ ofSyncs: Backups.ofSyncs, + /** + * Create a Backups configuration with custom rsync options (e.g. exclude patterns). + * @param options - Partial sync options to override defaults + */ withOptions: Backups.withOptions, }, InputSpec: { @@ -681,11 +811,20 @@ export class StartSdk { InputSpec.of(spec), }, Daemon: { + /** + * Create a single Daemon that wraps a long-running process with automatic restart logic. + * Returns a curried function: call with `(effects, subcontainer, exec)`. + */ get of() { return Daemon.of() }, }, Daemons: { + /** + * Create a new Daemons builder for defining the service's daemon topology. + * Chain `.addDaemon()` calls to register each long-running process. + * @param effects - The effects context + */ of(effects: Effects) { return Daemons.of({ effects }) }, @@ -737,10 +876,74 @@ export class StartSdk { List, Value, Variants, + plugin: { + url: this.ifPluginEnabled('url-v0' as const, { + register: ( + effects: T.Effects, + options: { + tableAction: ActionInfo< + T.ActionId, + { + urlPluginMetadata: { + packageId: T.PackageId + interfaceId: T.ServiceInterfaceId + hostId: T.HostId + internalPort: number + } + } + > + }, + ) => + effects.plugin.url.register({ + tableAction: options.tableAction.id, + }), + exportUrl: ( + effects: T.Effects, + options: { + hostnameInfo: T.PluginHostnameInfo + removeAction: ActionInfo< + T.ActionId, + { + urlPluginMetadata: T.PluginHostnameInfo & { + interfaceId: T.ServiceInterfaceId + } + } + > | null + overflowActions: ActionInfo< + T.ActionId, + { + urlPluginMetadata: T.PluginHostnameInfo & { + interfaceId: T.ServiceInterfaceId + } + } + >[] + }, + ) => + effects.plugin.url.exportUrl({ + hostnameInfo: options.hostnameInfo, + removeAction: options.removeAction?.id ?? null, + overflowActions: options.overflowActions.map((a) => a.id), + }), + setupExportedUrls, // similar to setupInterfaces + }), + }, } } } +/** + * Run a one-shot command inside a temporary subcontainer. + * + * Creates a subcontainer, executes the command, and destroys the subcontainer when finished. + * Throws an {@link ExitError} if the command exits with a non-zero code or signal. + * + * @param effects - The effects context + * @param image - The container image to use + * @param command - The command to execute (string array or UseEntrypoint) + * @param options - Mount and command options + * @param name - Optional human-readable name for debugging + * @returns The stdout and stderr output of the command + */ export async function runCommand( effects: Effects, image: { imageId: keyof Manifest['images'] & T.ImageId; sharedRun?: boolean }, diff --git a/sdk/package/lib/backup/Backups.ts b/sdk/package/lib/backup/Backups.ts index 8add5dba3..5acefcb1c 100644 --- a/sdk/package/lib/backup/Backups.ts +++ b/sdk/package/lib/backup/Backups.ts @@ -5,10 +5,12 @@ import { Affine, asError } from '../util' import { ExtendedVersion, VersionRange } from '../../../base/lib' import { InitKind, InitScript } from '../../../base/lib/inits' +/** Default rsync options used for backup and restore operations */ export const DEFAULT_OPTIONS: T.SyncOptions = { delete: true, exclude: [], } +/** A single source-to-destination sync pair for backup and restore */ export type BackupSync = { dataPath: `/media/startos/volumes/${Volumes}/${string}` backupPath: `/media/startos/backup/${string}` @@ -17,8 +19,18 @@ export type BackupSync = { restoreOptions?: Partial } +/** Effects type narrowed for backup/restore contexts, preventing reuse outside that scope */ export type BackupEffects = T.Effects & Affine<'Backups'> +/** + * Configures backup and restore operations using rsync. + * + * Supports syncing entire volumes or custom path pairs, with optional pre/post hooks + * for both backup and restore phases. Implements {@link InitScript} so it can be used + * as a restore-init step in `setupInit`. + * + * @typeParam M - The service manifest type + */ export class Backups implements InitScript { private constructor( private options = DEFAULT_OPTIONS, @@ -31,6 +43,11 @@ export class Backups implements InitScript { private postRestore = async (effects: BackupEffects) => {}, ) {} + /** + * Create a Backups configuration that backs up entire volumes by name. + * Each volume is synced to a corresponding directory under `/media/startos/backup/volumes/`. + * @param volumeNames - One or more volume IDs from the manifest + */ static ofVolumes( ...volumeNames: Array ): Backups { @@ -42,18 +59,31 @@ export class Backups implements InitScript { ) } + /** + * Create a Backups configuration from explicit source/destination sync pairs. + * @param syncs - Array of `{ dataPath, backupPath }` objects with optional per-sync options + */ static ofSyncs( ...syncs: BackupSync[] ) { return syncs.reduce((acc, x) => acc.addSync(x), new Backups()) } + /** + * Create an empty Backups configuration with custom default rsync options. + * Chain `.addVolume()` or `.addSync()` to add sync targets. + * @param options - Partial rsync options to override defaults (e.g. `{ exclude: ['cache'] }`) + */ static withOptions( options?: Partial, ) { return new Backups({ ...DEFAULT_OPTIONS, ...options }) } + /** + * Override the default rsync options for both backup and restore. + * @param options - Partial rsync options to merge with current defaults + */ setOptions(options?: Partial) { this.options = { ...this.options, @@ -62,6 +92,10 @@ export class Backups implements InitScript { return this } + /** + * Override rsync options used only during backup (not restore). + * @param options - Partial rsync options for the backup phase + */ setBackupOptions(options?: Partial) { this.backupOptions = { ...this.backupOptions, @@ -70,6 +104,10 @@ export class Backups implements InitScript { return this } + /** + * Override rsync options used only during restore (not backup). + * @param options - Partial rsync options for the restore phase + */ setRestoreOptions(options?: Partial) { this.restoreOptions = { ...this.restoreOptions, @@ -78,26 +116,47 @@ export class Backups implements InitScript { return this } + /** + * Register a hook to run before backup rsync begins (e.g. dump a database). + * @param fn - Async function receiving backup-scoped effects + */ setPreBackup(fn: (effects: BackupEffects) => Promise) { this.preBackup = fn return this } + /** + * Register a hook to run after backup rsync completes. + * @param fn - Async function receiving backup-scoped effects + */ setPostBackup(fn: (effects: BackupEffects) => Promise) { this.postBackup = fn return this } + /** + * Register a hook to run before restore rsync begins. + * @param fn - Async function receiving backup-scoped effects + */ setPreRestore(fn: (effects: BackupEffects) => Promise) { this.preRestore = fn return this } + /** + * Register a hook to run after restore rsync completes. + * @param fn - Async function receiving backup-scoped effects + */ setPostRestore(fn: (effects: BackupEffects) => Promise) { this.postRestore = fn return this } + /** + * Add a volume to the backup set by its ID. + * @param volume - The volume ID from the manifest + * @param options - Optional per-volume rsync overrides + */ addVolume( volume: M['volumes'][number], options?: Partial<{ @@ -113,11 +172,19 @@ export class Backups implements InitScript { }) } + /** + * Add a custom sync pair to the backup set. + * @param sync - A `{ dataPath, backupPath }` object with optional per-sync rsync options + */ addSync(sync: BackupSync) { this.backupSet.push(sync) return this } + /** + * Execute the backup: runs pre-hook, rsyncs all configured paths, saves the data version, then runs post-hook. + * @param effects - The effects context + */ async createBackup(effects: T.Effects) { await this.preBackup(effects as BackupEffects) for (const item of this.backupSet) { @@ -149,6 +216,10 @@ export class Backups implements InitScript { } } + /** + * Execute the restore: runs pre-hook, rsyncs all configured paths from backup to data, restores the data version, then runs post-hook. + * @param effects - The effects context + */ async restoreBackup(effects: T.Effects) { this.preRestore(effects as BackupEffects) diff --git a/sdk/package/lib/backup/setupBackups.ts b/sdk/package/lib/backup/setupBackups.ts index 7c605f849..8b31a7c65 100644 --- a/sdk/package/lib/backup/setupBackups.ts +++ b/sdk/package/lib/backup/setupBackups.ts @@ -3,6 +3,11 @@ import * as T from '../../../base/lib/types' import { _ } from '../util' import { InitScript } from '../../../base/lib/inits' +/** + * Parameters for `setupBackups`. Either: + * - An array of volume IDs to back up entirely, or + * - An async factory function that returns a fully configured {@link Backups} instance + */ export type SetupBackupsParams = | M['volumes'][number][] | ((_: { effects: T.Effects }) => Promise>) @@ -12,6 +17,15 @@ type SetupBackupsRes = { restoreInit: InitScript } +/** + * Set up backup and restore exports for the service. + * + * Returns `{ createBackup, restoreInit }` which should be exported and wired into + * the service's init and backup entry points. + * + * @param options - Either an array of volume IDs or an async factory returning a Backups instance + * @returns An object with `createBackup` (the backup export) and `restoreInit` (an InitScript for restore) + */ export function setupBackups( options: SetupBackupsParams, ) { diff --git a/sdk/package/lib/health/HealthCheck.ts b/sdk/package/lib/health/HealthCheck.ts index 9f1f1088a..ec1443033 100644 --- a/sdk/package/lib/health/HealthCheck.ts +++ b/sdk/package/lib/health/HealthCheck.ts @@ -4,8 +4,8 @@ import { Trigger } from '../trigger' import { TriggerInput } from '../trigger/TriggerInput' import { defaultTrigger } from '../trigger/defaultTrigger' import { once, asError, Drop } from '../util' -import { object, unknown } from 'ts-matches' +/** Parameters for creating a health check */ export type HealthCheckParams = { id: HealthCheckId name: string @@ -14,6 +14,13 @@ export type HealthCheckParams = { fn(): Promise | HealthCheckResult } +/** + * A periodic health check that reports daemon readiness to the StartOS UI. + * + * Polls at an interval controlled by a {@link Trigger}, reporting results as + * "starting" (during the grace period), "success", or "failure". Automatically + * pauses when the daemon is stopped and resumes when restarted. + */ export class HealthCheck extends Drop { private started: number | null = null private setStarted = (started: number | null) => { @@ -92,13 +99,21 @@ export class HealthCheck extends Drop { } }) } + /** + * Create a new HealthCheck instance and begin its polling loop. + * @param effects - The effects context for reporting health status + * @param options - Health check configuration (ID, name, check function, trigger, grace period) + * @returns A new HealthCheck instance + */ static of(effects: Effects, options: HealthCheckParams): HealthCheck { return new HealthCheck(effects, options) } + /** Signal that the daemon is running, enabling health check polling */ start() { if (this.started) return this.setStarted(performance.now()) } + /** Signal that the daemon has stopped, pausing health check polling */ stop() { if (!this.started) return this.setStarted(null) @@ -109,7 +124,8 @@ export class HealthCheck extends Drop { } function asMessage(e: unknown) { - if (object({ message: unknown }).test(e)) return String(e.message) + if (typeof e === 'object' && e !== null && 'message' in e) + return String((e as any).message) const value = String(e) if (value.length == null) return null return value diff --git a/sdk/package/lib/health/checkFns/HealthCheckResult.ts b/sdk/package/lib/health/checkFns/HealthCheckResult.ts index f62eacfbc..ce610bcc7 100644 --- a/sdk/package/lib/health/checkFns/HealthCheckResult.ts +++ b/sdk/package/lib/health/checkFns/HealthCheckResult.ts @@ -1,3 +1,9 @@ import { T } from '../../../../base/lib' +/** + * The result of a single health check invocation. + * + * Contains a `result` field ("success", "failure", or "starting") and an optional `message`. + * This is the unnamed variant -- the health check name is added by the framework. + */ export type HealthCheckResult = Omit diff --git a/sdk/package/lib/health/checkFns/index.ts b/sdk/package/lib/health/checkFns/index.ts index cfd297324..c493171d5 100644 --- a/sdk/package/lib/health/checkFns/index.ts +++ b/sdk/package/lib/health/checkFns/index.ts @@ -3,6 +3,14 @@ export { checkPortListening } from './checkPortListening' export { HealthCheckResult } from './HealthCheckResult' export { checkWebUrl } from './checkWebUrl' +/** + * Create a promise that rejects after the specified timeout. + * Useful for racing against long-running health checks. + * + * @param ms - Timeout duration in milliseconds + * @param options.message - Custom error message (defaults to "Timed out") + * @returns A promise that never resolves, only rejects after the timeout + */ export function timeoutPromise(ms: number, { message = 'Timed out' } = {}) { return new Promise((resolve, reject) => setTimeout(() => reject(new Error(message)), ms), diff --git a/sdk/package/lib/index.ts b/sdk/package/lib/index.ts index 3526e1c05..13427fe21 100644 --- a/sdk/package/lib/index.ts +++ b/sdk/package/lib/index.ts @@ -7,7 +7,7 @@ import { ISB, IST, types, - matches, + z, utils, } from '../../base/lib' @@ -20,7 +20,7 @@ export { ISB, IST, types, - matches, + z, utils, } export { setupI18n } from './i18n' diff --git a/sdk/package/lib/mainFn/CommandController.ts b/sdk/package/lib/mainFn/CommandController.ts index e58761a6c..d8f290aa3 100644 --- a/sdk/package/lib/mainFn/CommandController.ts +++ b/sdk/package/lib/mainFn/CommandController.ts @@ -8,6 +8,15 @@ import * as cp from 'child_process' import * as fs from 'node:fs/promises' import { DaemonCommandType, ExecCommandOptions, ExecFnOptions } from './Daemons' +/** + * Low-level controller for a single running process inside a subcontainer (or as a JS function). + * + * Manages the child process lifecycle: spawning, waiting, and signal-based termination. + * Used internally by {@link Daemon} to manage individual command executions. + * + * @typeParam Manifest - The service manifest type + * @typeParam C - The subcontainer type, or `null` for JS-only commands + */ export class CommandController< Manifest extends T.SDKManifest, C extends SubContainer | null, @@ -21,6 +30,13 @@ export class CommandController< ) { super() } + /** + * Factory method to create a new CommandController. + * + * Returns a curried async function: `(effects, subcontainer, exec) => CommandController`. + * If the exec spec has an `fn` property, runs the function; otherwise spawns a shell command + * in the subcontainer. + */ static of< Manifest extends T.SDKManifest, C extends SubContainer | null, @@ -130,6 +146,10 @@ export class CommandController< } } } + /** + * Wait for the command to finish. Optionally terminate after a timeout. + * @param options.timeout - Milliseconds to wait before terminating. Defaults to no timeout. + */ async wait({ timeout = NO_TIMEOUT } = {}) { if (timeout > 0) setTimeout(() => { @@ -156,6 +176,15 @@ export class CommandController< await this.subcontainer?.destroy() } } + /** + * Terminate the running command by sending a signal. + * + * Sends the specified signal (default: SIGTERM), then escalates to SIGKILL + * after the timeout expires. Destroys the subcontainer after the process exits. + * + * @param options.signal - The signal to send (default: SIGTERM) + * @param options.timeout - Milliseconds before escalating to SIGKILL + */ async term({ signal = SIGTERM, timeout = this.sigtermTimeout } = {}) { try { if (!this.state.exited) { diff --git a/sdk/package/lib/mainFn/Daemon.ts b/sdk/package/lib/mainFn/Daemon.ts index a0110698d..fcbf1c9cb 100644 --- a/sdk/package/lib/mainFn/Daemon.ts +++ b/sdk/package/lib/mainFn/Daemon.ts @@ -13,10 +13,15 @@ import { Oneshot } from './Oneshot' const TIMEOUT_INCREMENT_MS = 1000 const MAX_TIMEOUT_MS = 30000 /** - * This is a wrapper around CommandController that has a state of off, where the command shouldn't be running - * and the others state of running, where it will keep a living running command + * A managed long-running process wrapper around {@link CommandController}. + * + * When started, the daemon automatically restarts its underlying command on failure + * with exponential backoff (up to 30 seconds). When stopped, the command is terminated + * gracefully. Implements {@link Drop} for automatic cleanup when the context is left. + * + * @typeParam Manifest - The service manifest type + * @typeParam C - The subcontainer type, or `null` for JS-only daemons */ - export class Daemon< Manifest extends T.SDKManifest, C extends SubContainer | null = SubContainer | null, @@ -33,9 +38,16 @@ export class Daemon< ) { super() } + /** Returns true if this daemon is a one-shot process (exits after success) */ isOneshot(): this is Oneshot { return this.oneshot } + /** + * Factory method to create a new Daemon. + * + * Returns a curried function: `(effects, subcontainer, exec) => Daemon`. + * The daemon auto-terminates when the effects context is left. + */ static of() { return | null>( effects: T.Effects, @@ -57,6 +69,12 @@ export class Daemon< return res } } + /** + * Start the daemon. If it is already running, this is a no-op. + * + * The daemon will automatically restart on failure with increasing backoff + * until {@link term} is called. + */ async start() { if (this.commandController) { return @@ -105,6 +123,17 @@ export class Daemon< console.error(asError(err)) }) } + /** + * Terminate the daemon, stopping its underlying command. + * + * Sends the configured signal (default SIGTERM) and waits for the process to exit. + * Optionally destroys the subcontainer after termination. + * + * @param termOptions - Optional termination settings + * @param termOptions.signal - The signal to send (default: SIGTERM) + * @param termOptions.timeout - Milliseconds to wait before SIGKILL + * @param termOptions.destroySubcontainer - Whether to destroy the subcontainer after exit + */ async term(termOptions?: { signal?: NodeJS.Signals | undefined timeout?: number | undefined @@ -125,14 +154,20 @@ export class Daemon< this.exiting = null } } + /** Get a reference-counted handle to the daemon's subcontainer, or null if there is none */ subcontainerRc(): SubContainerRc | null { return this.subcontainer?.rc() ?? null } + /** Check whether this daemon shares the same subcontainer as another daemon */ sharesSubcontainerWith( other: Daemon | null>, ): boolean { return this.subcontainer?.guid === other.subcontainer?.guid } + /** + * Register a callback to be invoked each time the daemon's process exits. + * @param fn - Callback receiving `true` on clean exit, `false` on error + */ onExit(fn: (success: boolean) => void) { this.onExitFns.push(fn) } diff --git a/sdk/package/lib/mainFn/Daemons.ts b/sdk/package/lib/mainFn/Daemons.ts index 374073518..4b6bc69c0 100644 --- a/sdk/package/lib/mainFn/Daemons.ts +++ b/sdk/package/lib/mainFn/Daemons.ts @@ -16,8 +16,15 @@ import { Daemon } from './Daemon' import { CommandController } from './CommandController' import { Oneshot } from './Oneshot' +/** Promisified version of `child_process.exec` */ export const cpExec = promisify(CP.exec) +/** Promisified version of `child_process.execFile` */ export const cpExecFile = promisify(CP.execFile) +/** + * Configuration for a daemon's health-check readiness probe. + * + * Determines how the system knows when a daemon is healthy and ready to serve. + */ export type Ready = { /** A human-readable display name for the health check. If null, the health check itself will be from the UI */ display: string | null @@ -45,6 +52,10 @@ export type Ready = { trigger?: Trigger } +/** + * Options for running a daemon as a shell command inside a subcontainer. + * Includes the command to run, optional signal/timeout, environment, user, and stdio callbacks. + */ export type ExecCommandOptions = { command: T.CommandType // Defaults to the DEFAULT_SIGTERM_TIMEOUT = 30_000ms @@ -61,6 +72,11 @@ export type ExecCommandOptions = { onStderr?: (chunk: Buffer | string | any) => void } +/** + * Options for running a daemon via an async function that may optionally return + * a command to execute in the subcontainer. The function receives an `AbortSignal` + * for cooperative cancellation. + */ export type ExecFnOptions< Manifest extends T.SDKManifest, C extends SubContainer | null, @@ -73,6 +89,10 @@ export type ExecFnOptions< sigtermTimeout?: number } +/** + * The execution specification for a daemon: either an {@link ExecFnOptions} (async function) + * or an {@link ExecCommandOptions} (shell command, only valid when a subcontainer is provided). + */ export type DaemonCommandType< Manifest extends T.SDKManifest, C extends SubContainer | null, @@ -385,6 +405,13 @@ export class Daemons return null } + /** + * Gracefully terminate all daemons in reverse dependency order. + * + * Daemons with no remaining dependents are shut down first, proceeding + * until all daemons have been terminated. Falls back to a bulk shutdown + * if a dependency cycle is detected. + */ async term() { const remaining = new Set(this.healthDaemons) @@ -427,6 +454,10 @@ export class Daemons } } + /** + * Start all registered daemons and their health checks. + * @returns This `Daemons` instance, now running + */ async build() { for (const daemon of this.healthDaemons) { await daemon.updateStatus() diff --git a/sdk/package/lib/mainFn/Mounts.ts b/sdk/package/lib/mainFn/Mounts.ts index b3eb11945..653637fb8 100644 --- a/sdk/package/lib/mainFn/Mounts.ts +++ b/sdk/package/lib/mainFn/Mounts.ts @@ -49,6 +49,15 @@ type DependencyOpts = { readonly: boolean } & SharedOptions +/** + * Immutable builder for declaring filesystem mounts into a subcontainer. + * + * Supports mounting volumes, static assets, dependency volumes, and backup directories. + * Each `mount*` method returns a new `Mounts` instance (immutable builder pattern). + * + * @typeParam Manifest - The service manifest type + * @typeParam Backups - Tracks whether backup mounts have been added (type-level flag) + */ export class Mounts< Manifest extends T.SDKManifest, Backups extends SharedOptions = never, @@ -60,10 +69,19 @@ export class Mounts< readonly backups: Backups[], ) {} + /** + * Create an empty Mounts builder with no mounts configured. + * @returns A new Mounts instance ready for chaining mount declarations + */ static of() { return new Mounts([], [], [], []) } + /** + * Add a volume mount from the service's own volumes. + * @param options - Volume ID, mountpoint, readonly flag, and optional subpath + * @returns A new Mounts instance with this volume added + */ mountVolume(options: VolumeOpts) { return new Mounts( [...this.volumes, options], @@ -73,6 +91,11 @@ export class Mounts< ) } + /** + * Add a read-only mount of the service's packaged static assets. + * @param options - Mountpoint and optional subpath within the assets directory + * @returns A new Mounts instance with this asset mount added + */ mountAssets(options: SharedOptions) { return new Mounts( [...this.volumes], @@ -82,6 +105,11 @@ export class Mounts< ) } + /** + * Add a mount from a dependency package's volume. + * @param options - Dependency ID, volume ID, mountpoint, readonly flag, and optional subpath + * @returns A new Mounts instance with this dependency mount added + */ mountDependency( options: DependencyOpts, ) { @@ -93,6 +121,11 @@ export class Mounts< ) } + /** + * Add a mount of the backup directory. Only valid during backup/restore operations. + * @param options - Mountpoint and optional subpath within the backup directory + * @returns A new Mounts instance with this backup mount added + */ mountBackups(options: SharedOptions) { return new Mounts< Manifest, @@ -108,6 +141,11 @@ export class Mounts< ) } + /** + * Compile all declared mounts into the low-level mount array consumed by the subcontainer runtime. + * @throws If any two mounts share the same mountpoint + * @returns An array of `{ mountpoint, options }` objects + */ build(): MountArray { const mountpoints = new Set() for (let mountpoint of this.volumes diff --git a/sdk/package/lib/mainFn/index.ts b/sdk/package/lib/mainFn/index.ts index 279cfce29..36c951b2d 100644 --- a/sdk/package/lib/mainFn/index.ts +++ b/sdk/package/lib/mainFn/index.ts @@ -3,6 +3,7 @@ import { Daemons } from './Daemons' import '../../../base/lib/interfaces/ServiceInterfaceBuilder' import '../../../base/lib/interfaces/Origin' +/** Default time in milliseconds to wait for a process to exit after SIGTERM before escalating to SIGKILL */ export const DEFAULT_SIGTERM_TIMEOUT = 60_000 /** * Used to ensure that the main function is running with the valid proofs. diff --git a/sdk/package/lib/manifest/setupManifest.ts b/sdk/package/lib/manifest/setupManifest.ts index 1f78e087d..fad59950a 100644 --- a/sdk/package/lib/manifest/setupManifest.ts +++ b/sdk/package/lib/manifest/setupManifest.ts @@ -24,6 +24,15 @@ export function setupManifest< return manifest } +/** + * Build the final publishable manifest by combining the SDK manifest definition + * with version graph metadata, OS version, SDK version, and computed fields + * (migration ranges, hardware requirements, alerts, etc.). + * + * @param versions - The service's VersionGraph, used to extract the current version, release notes, and migration ranges + * @param manifest - The SDK manifest definition (from `setupManifest`) + * @returns A fully resolved Manifest ready for packaging + */ export function buildManifest< Id extends string, Version extends string, @@ -89,5 +98,6 @@ export function buildManifest< ), }, hardwareAcceleration: manifest.hardwareAcceleration ?? false, + plugins: manifest.plugins ?? [], } } diff --git a/sdk/package/lib/test/inputSpecBuilder.test.ts b/sdk/package/lib/test/inputSpecBuilder.test.ts index 3bbe1a048..d9b2dd4c0 100644 --- a/sdk/package/lib/test/inputSpecBuilder.test.ts +++ b/sdk/package/lib/test/inputSpecBuilder.test.ts @@ -1,4 +1,3 @@ -import { testOutput } from './output.test' import { InputSpec } from '../../../base/lib/actions/input/builder/inputSpec' import { List } from '../../../base/lib/actions/input/builder/list' import { Value } from '../../../base/lib/actions/input/builder/value' @@ -7,6 +6,12 @@ import { ValueSpec } from '../../../base/lib/actions/input/inputSpecTypes' import { setupManifest } from '../manifest/setupManifest' import { StartSdk } from '../StartSdk' +export type IfEquals = + (() => G extends T ? 1 : 2) extends () => G extends U ? 1 : 2 ? Y : N +export function testOutput(): (c: IfEquals) => null { + return () => null +} + describe('builder tests', () => { test('text', async () => { const bitcoinPropertiesBuilt: { @@ -50,8 +55,8 @@ describe('values', () => { default: false, }).build({} as any) const validator = value.validator - validator.unsafeCast(false) - testOutput()(null) + validator.parse(false) + testOutput()(null) }) test('text', async () => { const value = await Value.text({ @@ -61,9 +66,9 @@ describe('values', () => { }).build({} as any) const validator = value.validator const rawIs = value.spec - validator.unsafeCast('test text') - expect(() => validator.unsafeCast(null)).toThrowError() - testOutput()(null) + validator.parse('test text') + expect(() => validator.parse(null)).toThrowError() + testOutput()(null) }) test('text with default', async () => { const value = await Value.text({ @@ -73,9 +78,9 @@ describe('values', () => { }).build({} as any) const validator = value.validator const rawIs = value.spec - validator.unsafeCast('test text') - expect(() => validator.unsafeCast(null)).toThrowError() - testOutput()(null) + validator.parse('test text') + expect(() => validator.parse(null)).toThrowError() + testOutput()(null) }) test('optional text', async () => { const value = await Value.text({ @@ -85,9 +90,9 @@ describe('values', () => { }).build({} as any) const validator = value.validator const rawIs = value.spec - validator.unsafeCast('test text') - validator.unsafeCast(null) - testOutput()(null) + validator.parse('test text') + validator.parse(null) + testOutput()(null) }) test('color', async () => { const value = await Value.color({ @@ -98,8 +103,8 @@ describe('values', () => { warning: null, }).build({} as any) const validator = value.validator - validator.unsafeCast('#000000') - testOutput()(null) + validator.parse('#000000') + testOutput()(null) }) test('datetime', async () => { const value = await Value.datetime({ @@ -113,8 +118,8 @@ describe('values', () => { max: null, }).build({} as any) const validator = value.validator - validator.unsafeCast('2021-01-01') - testOutput()(null) + validator.parse('2021-01-01') + testOutput()(null) }) test('optional datetime', async () => { const value = await Value.datetime({ @@ -128,8 +133,8 @@ describe('values', () => { max: null, }).build({} as any) const validator = value.validator - validator.unsafeCast('2021-01-01') - testOutput()(null) + validator.parse('2021-01-01') + testOutput()(null) }) test('textarea', async () => { const value = await Value.textarea({ @@ -145,8 +150,8 @@ describe('values', () => { placeholder: null, }).build({} as any) const validator = value.validator - validator.unsafeCast('test text') - testOutput()(null) + validator.parse('test text') + testOutput()(null) }) test('number', async () => { const value = await Value.number({ @@ -163,8 +168,8 @@ describe('values', () => { placeholder: null, }).build({} as any) const validator = value.validator - validator.unsafeCast(2) - testOutput()(null) + validator.parse(2) + testOutput()(null) }) test('optional number', async () => { const value = await Value.number({ @@ -181,8 +186,8 @@ describe('values', () => { placeholder: null, }).build({} as any) const validator = value.validator - validator.unsafeCast(2) - testOutput()(null) + validator.parse(2) + testOutput()(null) }) test('select', async () => { const value = await Value.select({ @@ -196,10 +201,10 @@ describe('values', () => { warning: null, }).build({} as any) const validator = value.validator - validator.unsafeCast('a') - validator.unsafeCast('b') - expect(() => validator.unsafeCast('c')).toThrowError() - testOutput()(null) + validator.parse('a') + validator.parse('b') + expect(() => validator.parse('c')).toThrowError() + testOutput()(null) }) test('nullable select', async () => { const value = await Value.select({ @@ -213,9 +218,9 @@ describe('values', () => { warning: null, }).build({} as any) const validator = value.validator - validator.unsafeCast('a') - validator.unsafeCast('b') - testOutput()(null) + validator.parse('a') + validator.parse('b') + testOutput()(null) }) test('multiselect', async () => { const value = await Value.multiselect({ @@ -231,12 +236,12 @@ describe('values', () => { maxLength: null, }).build({} as any) const validator = value.validator - validator.unsafeCast([]) - validator.unsafeCast(['a', 'b']) + validator.parse([]) + validator.parse(['a', 'b']) - expect(() => validator.unsafeCast(['e'])).toThrowError() - expect(() => validator.unsafeCast([4])).toThrowError() - testOutput>()(null) + expect(() => validator.parse(['e'])).toThrowError() + expect(() => validator.parse([4])).toThrowError() + testOutput>()(null) }) test('object', async () => { const value = await Value.object( @@ -254,8 +259,8 @@ describe('values', () => { }), ).build({} as any) const validator = value.validator - validator.unsafeCast({ a: true }) - testOutput()(null) + validator.parse({ a: true }) + testOutput()(null) }) test('union', async () => { const value = await Value.union({ @@ -278,8 +283,8 @@ describe('values', () => { }), }).build({} as any) const validator = value.validator - validator.unsafeCast({ selection: 'a', value: { b: false } }) - type Test = typeof validator._TYPE + validator.parse({ selection: 'a', value: { b: false } }) + type Test = typeof validator._output testOutput< Test, { @@ -306,9 +311,9 @@ describe('values', () => { default: false, })).build({} as any) const validator = value.validator - validator.unsafeCast(false) - expect(() => validator.unsafeCast(null)).toThrowError() - testOutput()(null) + validator.parse(false) + expect(() => validator.parse(null)).toThrowError() + testOutput()(null) expect(value.spec).toMatchObject({ name: 'Testing', description: null, @@ -324,9 +329,9 @@ describe('values', () => { })).build({} as any) const validator = value.validator const rawIs = value.spec - validator.unsafeCast('test text') - validator.unsafeCast(null) - testOutput()(null) + validator.parse('test text') + validator.parse(null) + testOutput()(null) expect(value.spec).toMatchObject({ name: 'Testing', required: false, @@ -340,9 +345,9 @@ describe('values', () => { default: 'this is a default value', })).build({} as any) const validator = value.validator - validator.unsafeCast('test text') - validator.unsafeCast(null) - testOutput()(null) + validator.parse('test text') + validator.parse(null) + testOutput()(null) expect(value.spec).toMatchObject({ name: 'Testing', required: false, @@ -357,9 +362,9 @@ describe('values', () => { })).build({} as any) const validator = value.validator const rawIs = value.spec - validator.unsafeCast('test text') - validator.unsafeCast(null) - testOutput()(null) + validator.parse('test text') + validator.parse(null) + testOutput()(null) expect(value.spec).toMatchObject({ name: 'Testing', required: false, @@ -375,9 +380,9 @@ describe('values', () => { warning: null, })).build({} as any) const validator = value.validator - validator.unsafeCast('#000000') - validator.unsafeCast(null) - testOutput()(null) + validator.parse('#000000') + validator.parse(null) + testOutput()(null) expect(value.spec).toMatchObject({ name: 'Testing', required: false, @@ -393,12 +398,11 @@ describe('values', () => { id: 'testOutput', title: '', license: '', - wrapperRepo: '', + packageRepo: '', upstreamRepo: '', - supportSite: '', - marketingSite: '', + marketingUrl: '', donationUrl: null, - docsUrl: '', + docsUrls: [], description: { short: '', long: '', @@ -433,9 +437,9 @@ describe('values', () => { } }).build({} as any) const validator = value.validator - validator.unsafeCast('2021-01-01') - validator.unsafeCast(null) - testOutput()(null) + validator.parse('2021-01-01') + validator.parse(null) + testOutput()(null) expect(value.spec).toMatchObject({ name: 'Testing', required: false, @@ -459,8 +463,8 @@ describe('values', () => { placeholder: null, })).build({} as any) const validator = value.validator - validator.unsafeCast('test text') - testOutput()(null) + validator.parse('test text') + testOutput()(null) expect(value.spec).toMatchObject({ name: 'Testing', required: false, @@ -481,10 +485,10 @@ describe('values', () => { placeholder: null, })).build({} as any) const validator = value.validator - validator.unsafeCast(2) - validator.unsafeCast(null) - expect(() => validator.unsafeCast('null')).toThrowError() - testOutput()(null) + validator.parse(2) + validator.parse(null) + expect(() => validator.parse('null')).toThrowError() + testOutput()(null) expect(value.spec).toMatchObject({ name: 'Testing', required: false, @@ -502,9 +506,9 @@ describe('values', () => { warning: null, })).build({} as any) const validator = value.validator - validator.unsafeCast('a') - validator.unsafeCast('b') - testOutput()(null) + validator.parse('a') + validator.parse('b') + testOutput()(null) expect(value.spec).toMatchObject({ name: 'Testing', }) @@ -523,12 +527,12 @@ describe('values', () => { maxLength: null, })).build({} as any) const validator = value.validator - validator.unsafeCast([]) - validator.unsafeCast(['a', 'b']) + validator.parse([]) + validator.parse(['a', 'b']) - expect(() => validator.unsafeCast([4])).toThrowError() - expect(() => validator.unsafeCast(null)).toThrowError() - testOutput>()(null) + expect(() => validator.parse([4])).toThrowError() + expect(() => validator.parse(null)).toThrowError() + testOutput>()(null) expect(value.spec).toMatchObject({ name: 'Testing', default: [], @@ -569,8 +573,8 @@ describe('values', () => { }), })).build({} as any) const validator = value.validator - validator.unsafeCast({ selection: 'a', value: { b: false } }) - type Test = typeof validator._TYPE + validator.parse({ selection: 'a', value: { b: false } }) + type Test = typeof validator._output testOutput< Test, | { @@ -654,8 +658,8 @@ describe('values', () => { }), })).build({} as any) const validator = value.validator - validator.unsafeCast({ selection: 'a', value: { b: false } }) - type Test = typeof validator._TYPE + validator.parse({ selection: 'a', value: { b: false } }) + type Test = typeof validator._output testOutput< Test, | { @@ -727,8 +731,8 @@ describe('Builder List', () => { ), ).build({} as any) const validator = value.validator - validator.unsafeCast([{ test: true }]) - testOutput()(null) + validator.parse([{ test: true }]) + testOutput()(null) }) test('text', async () => { const value = await Value.list( @@ -742,8 +746,8 @@ describe('Builder List', () => { ), ).build({} as any) const validator = value.validator - validator.unsafeCast(['test', 'text']) - testOutput()(null) + validator.parse(['test', 'text']) + testOutput()(null) }) describe('dynamic', () => { test('text', async () => { @@ -754,10 +758,10 @@ describe('Builder List', () => { })), ).build({} as any) const validator = value.validator - validator.unsafeCast(['test', 'text']) - expect(() => validator.unsafeCast([3, 4])).toThrowError() - expect(() => validator.unsafeCast(null)).toThrowError() - testOutput()(null) + validator.parse(['test', 'text']) + expect(() => validator.parse([3, 4])).toThrowError() + expect(() => validator.parse(null)).toThrowError() + testOutput()(null) expect(value.spec).toMatchObject({ name: 'test', spec: { patterns: [] }, @@ -778,10 +782,10 @@ describe('Nested nullable values', () => { }), }).build({} as any) const validator = value.validator - validator.unsafeCast({ a: null }) - validator.unsafeCast({ a: 'test' }) - expect(() => validator.unsafeCast({ a: 4 })).toThrowError() - testOutput()(null) + validator.parse({ a: null }) + validator.parse({ a: 'test' }) + expect(() => validator.parse({ a: 4 })).toThrowError() + testOutput()(null) }) test('Testing number', async () => { const value = await InputSpec.of({ @@ -801,10 +805,10 @@ describe('Nested nullable values', () => { }), }).build({} as any) const validator = value.validator - validator.unsafeCast({ a: null }) - validator.unsafeCast({ a: 5 }) - expect(() => validator.unsafeCast({ a: '4' })).toThrowError() - testOutput()(null) + validator.parse({ a: null }) + validator.parse({ a: 5 }) + expect(() => validator.parse({ a: '4' })).toThrowError() + testOutput()(null) }) test('Testing color', async () => { const value = await InputSpec.of({ @@ -818,10 +822,10 @@ describe('Nested nullable values', () => { }), }).build({} as any) const validator = value.validator - validator.unsafeCast({ a: null }) - validator.unsafeCast({ a: '5' }) - expect(() => validator.unsafeCast({ a: 4 })).toThrowError() - testOutput()(null) + validator.parse({ a: null }) + validator.parse({ a: '5' }) + expect(() => validator.parse({ a: 4 })).toThrowError() + testOutput()(null) }) test('Testing select', async () => { const value = await InputSpec.of({ @@ -848,9 +852,9 @@ describe('Nested nullable values', () => { }).build({} as any) const validator = value.validator - validator.unsafeCast({ a: 'a' }) - expect(() => validator.unsafeCast({ a: '4' })).toThrowError() - testOutput()(null) + validator.parse({ a: 'a' }) + expect(() => validator.parse({ a: '4' })).toThrowError() + testOutput()(null) }) test('Testing multiselect', async () => { const value = await InputSpec.of({ @@ -869,10 +873,10 @@ describe('Nested nullable values', () => { }), }).build({} as any) const validator = value.validator - validator.unsafeCast({ a: [] }) - validator.unsafeCast({ a: ['a'] }) - expect(() => validator.unsafeCast({ a: ['4'] })).toThrowError() - expect(() => validator.unsafeCast({ a: '4' })).toThrowError() - testOutput()(null) + validator.parse({ a: [] }) + validator.parse({ a: ['a'] }) + expect(() => validator.parse({ a: ['4'] })).toThrowError() + expect(() => validator.parse({ a: '4' })).toThrowError() + testOutput()(null) }) }) diff --git a/sdk/package/lib/test/makeOutput.ts b/sdk/package/lib/test/makeOutput.ts deleted file mode 100644 index b526aeca3..000000000 --- a/sdk/package/lib/test/makeOutput.ts +++ /dev/null @@ -1,428 +0,0 @@ -import { oldSpecToBuilder } from '../../scripts/oldSpecToBuilder' - -oldSpecToBuilder( - // Make the location - './lib/test/output.ts', - // Put the inputSpec here - { - mediasources: { - type: 'list', - subtype: 'enum', - name: 'Media Sources', - description: 'List of Media Sources to use with Jellyfin', - range: '[1,*)', - default: ['nextcloud'], - spec: { - values: ['nextcloud', 'filebrowser'], - 'value-names': { - nextcloud: 'NextCloud', - filebrowser: 'File Browser', - }, - }, - }, - testListUnion: { - type: 'list', - subtype: 'union', - name: 'Lightning Nodes', - description: 'List of Lightning Network node instances to manage', - range: '[1,*)', - default: ['lnd'], - spec: { - type: 'string', - 'display-as': '{{name}}', - 'unique-by': 'name', - name: 'Node Implementation', - tag: { - id: 'type', - name: 'Type', - description: - '- LND: Lightning Network Daemon from Lightning Labs\n- CLN: Core Lightning from Blockstream\n', - 'variant-names': { - lnd: 'Lightning Network Daemon (LND)', - 'c-lightning': 'Core Lightning (CLN)', - }, - }, - default: 'lnd', - variants: { - lnd: { - name: { - type: 'string', - name: 'Node Name', - description: 'Name of this node in the list', - default: 'LND Wrapper', - nullable: false, - }, - }, - }, - }, - }, - rpc: { - type: 'object', - name: 'RPC Settings', - description: 'RPC configuration options.', - spec: { - enable: { - type: 'boolean', - name: 'Enable', - description: 'Allow remote RPC requests.', - default: true, - }, - username: { - type: 'string', - nullable: false, - name: 'Username', - description: 'The username for connecting to Bitcoin over RPC.', - default: 'bitcoin', - masked: true, - pattern: '^[a-zA-Z0-9_]+$', - 'pattern-description': - 'Must be alphanumeric (can contain underscore).', - }, - password: { - type: 'string', - nullable: false, - name: 'RPC Password', - description: 'The password for connecting to Bitcoin over RPC.', - default: { - charset: 'a-z,2-7', - len: 20, - }, - pattern: '^[^\\n"]*$', - 'pattern-description': - 'Must not contain newline or quote characters.', - copyable: true, - masked: true, - }, - bio: { - type: 'string', - nullable: false, - name: 'Username', - description: 'The username for connecting to Bitcoin over RPC.', - default: 'bitcoin', - masked: true, - pattern: '^[a-zA-Z0-9_]+$', - 'pattern-description': - 'Must be alphanumeric (can contain underscore).', - textarea: true, - }, - advanced: { - type: 'object', - name: 'Advanced', - description: 'Advanced RPC Settings', - spec: { - auth: { - name: 'Authorization', - description: - 'Username and hashed password for JSON-RPC connections. RPC clients connect using the usual http basic authentication.', - type: 'list', - subtype: 'string', - default: [], - spec: { - pattern: - '^[a-zA-Z0-9_-]+:([0-9a-fA-F]{2})+\\$([0-9a-fA-F]{2})+$', - 'pattern-description': - 'Each item must be of the form ":$".', - masked: false, - }, - range: '[0,*)', - }, - serialversion: { - name: 'Serialization Version', - description: - 'Return raw transaction or block hex with Segwit or non-SegWit serialization.', - type: 'enum', - values: ['non-segwit', 'segwit'], - 'value-names': {}, - default: 'segwit', - }, - servertimeout: { - name: 'Rpc Server Timeout', - description: - 'Number of seconds after which an uncompleted RPC call will time out.', - type: 'number', - nullable: false, - range: '[5,300]', - integral: true, - units: 'seconds', - default: 30, - }, - threads: { - name: 'Threads', - description: - 'Set the number of threads for handling RPC calls. You may wish to increase this if you are making lots of calls via an integration.', - type: 'number', - nullable: false, - default: 16, - range: '[1,64]', - integral: true, - }, - workqueue: { - name: 'Work Queue', - description: - 'Set the depth of the work queue to service RPC calls. Determines how long the backlog of RPC requests can get before it just rejects new ones.', - type: 'number', - nullable: false, - default: 128, - range: '[8,256]', - integral: true, - units: 'requests', - }, - }, - }, - }, - }, - 'zmq-enabled': { - type: 'boolean', - name: 'ZeroMQ Enabled', - description: 'Enable the ZeroMQ interface', - default: true, - }, - txindex: { - type: 'boolean', - name: 'Transaction Index', - description: 'Enable the Transaction Index (txindex)', - default: true, - }, - wallet: { - type: 'object', - name: 'Wallet', - description: 'Wallet Settings', - spec: { - enable: { - name: 'Enable Wallet', - description: 'Load the wallet and enable wallet RPC calls.', - type: 'boolean', - default: true, - }, - avoidpartialspends: { - name: 'Avoid Partial Spends', - description: - 'Group outputs by address, selecting all or none, instead of selecting on a per-output basis. This improves privacy at the expense of higher transaction fees.', - type: 'boolean', - default: true, - }, - discardfee: { - name: 'Discard Change Tolerance', - description: - 'The fee rate (in BTC/kB) that indicates your tolerance for discarding change by adding it to the fee.', - type: 'number', - nullable: false, - default: 0.0001, - range: '[0,.01]', - integral: false, - units: 'BTC/kB', - }, - }, - }, - advanced: { - type: 'object', - name: 'Advanced', - description: 'Advanced Settings', - spec: { - mempool: { - type: 'object', - name: 'Mempool', - description: 'Mempool Settings', - spec: { - mempoolfullrbf: { - name: 'Enable Full RBF', - description: - 'Policy for your node to use for relaying and mining unconfirmed transactions. For details, see https://github.com/bitcoin/bitcoin/blob/master/doc/release-notes/release-notes-24.0.md#notice-of-new-option-for-transaction-replacement-policies', - type: 'boolean', - default: false, - }, - persistmempool: { - type: 'boolean', - name: 'Persist Mempool', - description: 'Save the mempool on shutdown and load on restart.', - default: true, - }, - maxmempool: { - type: 'number', - nullable: false, - name: 'Max Mempool Size', - description: - 'Keep the transaction memory pool below megabytes.', - range: '[1,*)', - integral: true, - units: 'MiB', - default: 300, - }, - mempoolexpiry: { - type: 'number', - nullable: false, - name: 'Mempool Expiration', - description: - 'Do not keep transactions in the mempool longer than hours.', - range: '[1,*)', - integral: true, - units: 'Hr', - default: 336, - }, - }, - }, - peers: { - type: 'object', - name: 'Peers', - description: 'Peer Connection Settings', - spec: { - listen: { - type: 'boolean', - name: 'Make Public', - description: - 'Allow other nodes to find your server on the network.', - default: true, - }, - onlyconnect: { - type: 'boolean', - name: 'Disable Peer Discovery', - description: 'Only connect to specified peers.', - default: false, - }, - onlyonion: { - type: 'boolean', - name: 'Disable Clearnet', - description: 'Only connect to peers over Tor.', - default: false, - }, - addnode: { - name: 'Add Nodes', - description: 'Add addresses of nodes to connect to.', - type: 'list', - subtype: 'object', - range: '[0,*)', - default: [], - spec: { - 'unique-by': null, - spec: { - hostname: { - type: 'string', - nullable: true, - name: 'Hostname', - description: 'Domain or IP address of bitcoin peer', - pattern: - '(^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$)|((^(([0-9a-fA-F]{1,4}:){7,7}[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,7}:|([0-9a-fA-F]{1,4}:){1,6}:[0-9a-fA-F]{1,4}|([0-9a-fA-F]{1,4}:){1,5}(:[0-9a-fA-F]{1,4}){1,2}|([0-9a-fA-F]{1,4}:){1,4}(:[0-9a-fA-F]{1,4}){1,3}|([0-9a-fA-F]{1,4}:){1,3}(:[0-9a-fA-F]{1,4}){1,4}|([0-9a-fA-F]{1,4}:){1,2}(:[0-9a-fA-F]{1,4}){1,5}|[0-9a-fA-F]{1,4}:((:[0-9a-fA-F]{1,4}){1,6})|:((:[0-9a-fA-F]{1,4}){1,7}|:)|fe80:(:[0-9a-fA-F]{0,4}){0,4}%[0-9a-zA-Z]{1,}|::(ffff(:0{1,4}){0,1}:){0,1}((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|([0-9a-fA-F]{1,4}:){1,4}:((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\\.){3,3}(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9]))$)|(^[a-z2-7]{16}\\.onion$)|(^([a-z0-9]([a-z0-9-]{0,61}[a-z0-9])?\\.)+[a-z0-9][a-z0-9-]{0,61}[a-z0-9]$))', - 'pattern-description': - "Must be either a domain name, or an IPv4 or IPv6 address. Do not include protocol scheme (eg 'http://') or port.", - masked: false, - }, - port: { - type: 'number', - nullable: true, - name: 'Port', - description: - 'Port that peer is listening on for inbound p2p connections', - range: '[0,65535]', - integral: true, - }, - }, - }, - }, - }, - }, - dbcache: { - type: 'number', - nullable: true, - name: 'Database Cache', - description: - "How much RAM to allocate for caching the TXO set. Higher values improve syncing performance, but increase your chance of using up all your system's memory or corrupting your database in the event of an ungraceful shutdown. Set this high but comfortably below your system's total RAM during IBD, then turn down to 450 (or leave blank) once the sync completes.", - warning: - 'WARNING: Increasing this value results in a higher chance of ungraceful shutdowns, which can leave your node unusable if it happens during the initial block download. Use this setting with caution. Be sure to set this back to the default (450 or leave blank) once your node is synced. DO NOT press the STOP button if your dbcache is large. Instead, set this number back to the default, hit save, and wait for bitcoind to restart on its own.', - range: '(0,*)', - integral: true, - units: 'MiB', - }, - pruning: { - type: 'union', - name: 'Pruning Settings', - description: - 'Blockchain Pruning Options\nReduce the blockchain size on disk\n', - warning: - 'If you set pruning to Manual and your disk is smaller than the total size of the blockchain, you MUST have something running that prunes these blocks or you may overfill your disk!\nDisabling pruning will convert your node into a full archival node. This requires a resync of the entire blockchain, a process that may take several days. Make sure you have enough free disk space or you may fill up your disk.\n', - tag: { - id: 'mode', - name: 'Pruning Mode', - description: - '- Disabled: Disable pruning\n- Automatic: Limit blockchain size on disk to a certain number of megabytes\n- Manual: Prune blockchain with the "pruneblockchain" RPC\n', - 'variant-names': { - disabled: 'Disabled', - automatic: 'Automatic', - manual: 'Manual', - }, - }, - variants: { - disabled: {}, - automatic: { - size: { - type: 'number', - nullable: false, - name: 'Max Chain Size', - description: 'Limit of blockchain size on disk.', - warning: - 'Increasing this value will require re-syncing your node.', - default: 550, - range: '[550,1000000)', - integral: true, - units: 'MiB', - }, - }, - manual: { - size: { - type: 'number', - nullable: false, - name: 'Failsafe Chain Size', - description: 'Prune blockchain if size expands beyond this.', - default: 65536, - range: '[550,1000000)', - integral: true, - units: 'MiB', - }, - }, - }, - default: 'disabled', - }, - blockfilters: { - type: 'object', - name: 'Block Filters', - description: 'Settings for storing and serving compact block filters', - spec: { - blockfilterindex: { - type: 'boolean', - name: 'Compute Compact Block Filters (BIP158)', - description: - "Generate Compact Block Filters during initial sync (IBD) to enable 'getblockfilter' RPC. This is useful if dependent services need block filters to efficiently scan for addresses/transactions etc.", - default: true, - }, - peerblockfilters: { - type: 'boolean', - name: 'Serve Compact Block Filters to Peers (BIP157)', - description: - "Serve Compact Block Filters as a peer service to other nodes on the network. This is useful if you wish to connect an SPV client to your node to make it efficient to scan transactions without having to download all block data. 'Compute Compact Block Filters (BIP158)' is required.", - default: false, - }, - }, - }, - bloomfilters: { - type: 'object', - name: 'Bloom Filters (BIP37)', - description: 'Setting for serving Bloom Filters', - spec: { - peerbloomfilters: { - type: 'boolean', - name: 'Serve Bloom Filters to Peers', - description: - 'Peers have the option of setting filters on each connection they make after the version handshake has completed. Bloom filters are for clients implementing SPV (Simplified Payment Verification) that want to check that block headers connect together correctly, without needing to verify the full blockchain. The client must trust that the transactions in the chain are in fact valid. It is highly recommended AGAINST using for anything except Bisq integration.', - warning: - 'This is ONLY for use with Bisq integration, please use Block Filters for all other applications.', - default: false, - }, - }, - }, - }, - }, - }, - { - // convert this to `start-sdk/lib` for conversions - StartSdk: './output.sdk', - }, -) diff --git a/sdk/package/lib/test/output.sdk.ts b/sdk/package/lib/test/output.sdk.ts index 17e462c81..7ebfc18ec 100644 --- a/sdk/package/lib/test/output.sdk.ts +++ b/sdk/package/lib/test/output.sdk.ts @@ -9,12 +9,11 @@ export const sdk = StartSdk.of() id: 'testOutput', title: '', license: '', - wrapperRepo: '', + packageRepo: '', upstreamRepo: '', - supportSite: '', - marketingSite: '', + marketingUrl: '', donationUrl: null, - docsUrl: '', + docsUrls: [], description: { short: '', long: '', diff --git a/sdk/package/lib/test/output.test.ts b/sdk/package/lib/test/output.test.ts deleted file mode 100644 index 146092075..000000000 --- a/sdk/package/lib/test/output.test.ts +++ /dev/null @@ -1,148 +0,0 @@ -import { inputSpecSpec, InputSpecSpec } from './output' -import * as _I from '../index' -import { camelCase } from '../../scripts/oldSpecToBuilder' -import { deepMerge } from '../../../base/lib/util' - -export type IfEquals = - (() => G extends T ? 1 : 2) extends () => G extends U ? 1 : 2 ? Y : N -export function testOutput(): (c: IfEquals) => null { - return () => null -} - -/// Testing the types of the input spec -testOutput()(null) -testOutput()(null) -testOutput()(null) - -testOutput()(null) -testOutput< - InputSpecSpec['rpc']['advanced']['serialversion'], - 'segwit' | 'non-segwit' ->()(null) -testOutput()(null) -testOutput< - InputSpecSpec['advanced']['peers']['addnode'][0]['hostname'], - string | null ->()(null) -testOutput< - InputSpecSpec['testListUnion'][0]['union']['value']['name'], - string ->()(null) -testOutput()( - null, -) -testOutput>()( - null, -) - -// @ts-expect-error Because enable should be a boolean -testOutput()(null) -// prettier-ignore -// @ts-expect-error Expect that the string is the one above -testOutput()(null); - -/// Here we test the output of the matchInputSpecSpec function -describe('Inputs', () => { - const validInput: InputSpecSpec = { - mediasources: ['filebrowser'], - testListUnion: [ - { - union: { selection: 'lnd', value: { name: 'string' } }, - }, - ], - rpc: { - enable: true, - bio: 'This is a bio', - username: 'test', - password: 'test', - advanced: { - auth: ['test'], - serialversion: 'segwit', - servertimeout: 6, - threads: 3, - workqueue: 9, - }, - }, - 'zmq-enabled': false, - txindex: false, - wallet: { enable: false, avoidpartialspends: false, discardfee: 0.0001 }, - advanced: { - mempool: { - maxmempool: 1, - persistmempool: true, - mempoolexpiry: 23, - mempoolfullrbf: true, - }, - peers: { - listen: true, - onlyconnect: true, - onlyonion: true, - addnode: [ - { - hostname: 'test', - port: 1, - }, - ], - }, - dbcache: 5, - pruning: { - selection: 'disabled', - value: { disabled: {} }, - }, - blockfilters: { - blockfilterindex: false, - peerblockfilters: false, - }, - bloomfilters: { peerbloomfilters: false }, - }, - } - - test('test valid input', async () => { - const { validator } = await inputSpecSpec.build({} as any) - const output = validator.unsafeCast(validInput) - expect(output).toEqual(validInput) - }) - test('test no longer care about the conversion of min/max and validating', async () => { - const { validator } = await inputSpecSpec.build({} as any) - validator.unsafeCast( - deepMerge({}, validInput, { rpc: { advanced: { threads: 0 } } }), - ) - }) - test('test errors should throw for number in string', async () => { - const { validator } = await inputSpecSpec.build({} as any) - expect(() => - validator.unsafeCast(deepMerge({}, validInput, { rpc: { enable: 2 } })), - ).toThrowError() - }) - test('Test that we set serialversion to something not segwit or non-segwit', async () => { - const { validator } = await inputSpecSpec.build({} as any) - expect(() => - validator.unsafeCast( - deepMerge({}, validInput, { - rpc: { advanced: { serialversion: 'testing' } }, - }), - ), - ).toThrowError() - }) -}) - -describe('camelCase', () => { - test("'EquipmentClass name'", () => { - expect(camelCase('EquipmentClass name')).toEqual('equipmentClassName') - }) - test("'Equipment className'", () => { - expect(camelCase('Equipment className')).toEqual('equipmentClassName') - }) - test("'equipment class name'", () => { - expect(camelCase('equipment class name')).toEqual('equipmentClassName') - }) - test("'Equipment Class Name'", () => { - expect(camelCase('Equipment Class Name')).toEqual('equipmentClassName') - }) - test("'hyphen-name-format'", () => { - expect(camelCase('hyphen-name-format')).toEqual('hyphenNameFormat') - }) - test("'underscore_name_format'", () => { - expect(camelCase('underscore_name_format')).toEqual('underscoreNameFormat') - }) -}) diff --git a/sdk/package/lib/util/GetServiceManifest.ts b/sdk/package/lib/util/GetServiceManifest.ts index 87b7ea5da..9f85570d2 100644 --- a/sdk/package/lib/util/GetServiceManifest.ts +++ b/sdk/package/lib/util/GetServiceManifest.ts @@ -1,5 +1,6 @@ import { Effects } from '../../../base/lib/Effects' import { Manifest, PackageId } from '../../../base/lib/osBindings' +import { AbortedError } from '../../../base/lib/util/AbortedError' import { DropGenerator, DropPromise } from '../../../base/lib/util/Drop' import { deepEqual } from '../../../base/lib/util/deepEqual' @@ -64,7 +65,7 @@ export class GetServiceManifest { } await waitForNext } - return new Promise((_, rej) => rej(new Error('aborted'))) + return new Promise((_, rej) => rej(new AbortedError())) } /** diff --git a/sdk/package/lib/util/GetSslCertificate.ts b/sdk/package/lib/util/GetSslCertificate.ts index df60b3b1f..b9967bf22 100644 --- a/sdk/package/lib/util/GetSslCertificate.ts +++ b/sdk/package/lib/util/GetSslCertificate.ts @@ -1,5 +1,6 @@ import { T } from '..' import { Effects } from '../../../base/lib/Effects' +import { AbortedError } from '../../../base/lib/util/AbortedError' import { DropGenerator, DropPromise } from '../../../base/lib/util/Drop' export class GetSslCertificate { @@ -50,7 +51,7 @@ export class GetSslCertificate { }) await waitForNext } - return new Promise((_, rej) => rej(new Error('aborted'))) + return new Promise((_, rej) => rej(new AbortedError())) } /** diff --git a/sdk/package/lib/util/SubContainer.ts b/sdk/package/lib/util/SubContainer.ts index 41b70f0c5..70c587eea 100644 --- a/sdk/package/lib/util/SubContainer.ts +++ b/sdk/package/lib/util/SubContainer.ts @@ -69,6 +69,14 @@ async function bind( await execFile('mount', [...args, from, to]) } +/** + * Interface representing an isolated container environment for running service processes. + * + * Provides methods for executing commands, spawning processes, mounting filesystems, + * and writing files within the container's rootfs. Comes in two flavors: + * {@link SubContainerOwned} (owns the underlying filesystem) and + * {@link SubContainerRc} (reference-counted handle to a shared container). + */ export interface SubContainer< Manifest extends T.SDKManifest, Effects extends T.Effects = T.Effects, @@ -84,6 +92,11 @@ export interface SubContainer< */ subpath(path: string): string + /** + * Apply filesystem mounts (volumes, assets, dependencies, backups) to this subcontainer. + * @param mounts - The Mounts configuration to apply + * @returns This subcontainer instance for chaining + */ mount( mounts: Effects extends BackupEffects ? Mounts< @@ -96,6 +109,7 @@ export interface SubContainer< : Mounts, ): Promise + /** Destroy this subcontainer and clean up its filesystem */ destroy: () => Promise /** @@ -136,11 +150,22 @@ export interface SubContainer< stderr: string | Buffer }> + /** + * Launch a command as the init (PID 1) process of the subcontainer. + * Replaces the current leader process. + * @param command - The command and arguments to execute + * @param options - Optional environment, working directory, and user overrides + */ launch( command: string[], options?: CommandOptions, ): Promise + /** + * Spawn a command inside the subcontainer as a non-init process. + * @param command - The command and arguments to execute + * @param options - Optional environment, working directory, user, and stdio overrides + */ spawn( command: string[], options?: CommandOptions & StdioOptions, @@ -162,8 +187,13 @@ export interface SubContainer< options?: Parameters[2], ): Promise + /** + * Create a reference-counted handle to this subcontainer. + * The underlying container is only destroyed when all handles are released. + */ rc(): SubContainerRc + /** Returns true if this is an owned subcontainer (not a reference-counted handle) */ isOwned(): this is SubContainerOwned } @@ -679,6 +709,12 @@ export class SubContainerOwned< } } +/** + * A reference-counted handle to a {@link SubContainerOwned}. + * + * Multiple `SubContainerRc` instances can share one underlying subcontainer. + * The subcontainer is destroyed only when the last reference is released via `destroy()`. + */ export class SubContainerRc< Manifest extends T.SDKManifest, Effects extends T.Effects = T.Effects, @@ -901,14 +937,17 @@ export type StdioOptions = { stdio?: cp.IOType } +/** UID/GID mapping for mount id-remapping (see kernel idmappings docs) */ export type IdMap = { fromId: number; toId: number; range: number } +/** Union of all mount option types supported by the subcontainer runtime */ export type MountOptions = | MountOptionsVolume | MountOptionsAssets | MountOptionsPointer | MountOptionsBackup +/** Mount options for binding a service volume into a subcontainer */ export type MountOptionsVolume = { type: 'volume' volumeId: string @@ -918,6 +957,7 @@ export type MountOptionsVolume = { idmap: IdMap[] } +/** Mount options for binding packaged static assets into a subcontainer */ export type MountOptionsAssets = { type: 'assets' subpath: string | null @@ -925,6 +965,7 @@ export type MountOptionsAssets = { idmap: { fromId: number; toId: number; range: number }[] } +/** Mount options for binding a dependency package's volume into a subcontainer */ export type MountOptionsPointer = { type: 'pointer' packageId: string @@ -934,6 +975,7 @@ export type MountOptionsPointer = { idmap: { fromId: number; toId: number; range: number }[] } +/** Mount options for binding the backup directory into a subcontainer */ export type MountOptionsBackup = { type: 'backup' subpath: string | null @@ -944,6 +986,10 @@ function wait(time: number) { return new Promise((resolve) => setTimeout(resolve, time)) } +/** + * Error thrown when a subcontainer command exits with a non-zero code or signal. + * Contains the full result including stdout, stderr, exit code, and exit signal. + */ export class ExitError extends Error { constructor( readonly command: string, diff --git a/sdk/package/lib/util/fileHelper.ts b/sdk/package/lib/util/fileHelper.ts index 33d3504ad..6b428edfd 100644 --- a/sdk/package/lib/util/fileHelper.ts +++ b/sdk/package/lib/util/fileHelper.ts @@ -1,10 +1,10 @@ -import * as matches from 'ts-matches' +import { z } from 'zod' import * as YAML from 'yaml' import * as TOML from '@iarna/toml' import * as INI from 'ini' import * as T from '../../../base/lib/types' import * as fs from 'node:fs/promises' -import { asError, deepEqual } from '../../../base/lib/util' +import { AbortedError, asError, deepEqual } from '../../../base/lib/util' import { DropGenerator, DropPromise } from '../../../base/lib/util/Drop' import { PathBase } from './Volume' @@ -84,9 +84,22 @@ function filterUndefined(a: A): A { return a } -export type Transformers = { +/** + * Bidirectional transformers for converting between the raw file format and + * the application-level data type. Used with FileHelper factory methods. + * + * @typeParam Raw - The native type the file format parses to (e.g. `Record` for JSON) + * @typeParam Transformed - The application-level type after transformation + */ +export type Transformers< + Raw = unknown, + Transformed = unknown, + Validated extends Transformed = Transformed, +> = { + /** Transform raw parsed data into the application type */ onRead: (value: Raw) => Transformed - onWrite: (value: Transformed) => Raw + /** Transform application data back into the raw format for writing */ + onWrite: (value: Validated) => Raw } type ToPath = string | { base: PathBase; subpath: string } @@ -97,7 +110,7 @@ function toPath(path: ToPath): string { return path.base.subpath(path.subpath) } -type Validator = matches.Validator | matches.Validator +type Validator<_T, U> = z.ZodType type ReadType = { once: () => Promise @@ -276,7 +289,7 @@ export class FileHelper { await onCreated(this.path).catch((e) => console.error(asError(e))) } } - return new Promise((_, rej) => rej(new Error('aborted'))) + return new Promise((_, rej) => rej(new AbortedError())) } private readOnChange( @@ -343,6 +356,19 @@ export class FileHelper { ) } + /** + * Create a reactive reader for this file. + * + * Returns an object with multiple read strategies: + * - `once()` - Read the file once and return the parsed value + * - `const(effects)` - Read once but re-read when the file changes (for use with constRetry) + * - `watch(effects)` - Async generator yielding new values on each file change + * - `onChange(effects, callback)` - Fire a callback on each file change + * - `waitFor(effects, predicate)` - Block until the file value satisfies a predicate + * + * @param map - Optional transform function applied after validation + * @param eq - Optional equality function to deduplicate watch emissions + */ read(): ReadType read( map: (value: A) => B, @@ -461,7 +487,7 @@ export class FileHelper { toFile: (dataIn: Raw) => string, fromFile: (rawData: string) => Raw, validate: (data: Transformed) => A, - transformers: Transformers | undefined, + transformers: Transformers | undefined, ) { return FileHelper.raw( path, @@ -471,7 +497,12 @@ export class FileHelper { } return toFile(inData as any as Raw) }, - fromFile, + (fileData) => { + if (transformers) { + return transformers.onRead(fromFile(fileData)) + } + return fromFile(fileData) + }, validate as (a: unknown) => A, ) } @@ -487,19 +518,19 @@ export class FileHelper { static string( path: ToPath, shape: Validator, - transformers: Transformers, + transformers: Transformers, ): FileHelper static string( path: ToPath, shape?: Validator, - transformers?: Transformers, + transformers?: Transformers, ) { return FileHelper.rawTransformed( path, (inData) => inData, (inString) => inString, (data) => - (shape || (matches.string as Validator)).unsafeCast( + (shape || (z.string() as unknown as Validator)).parse( data, ), transformers, @@ -509,16 +540,22 @@ export class FileHelper { /** * Create a File Helper for a .json file. */ - static json( + static json(path: ToPath, shape: Validator): FileHelper + static json( path: ToPath, shape: Validator, - transformers?: Transformers, + transformers: Transformers, + ): FileHelper + static json( + path: ToPath, + shape: Validator, + transformers?: Transformers, ) { return FileHelper.rawTransformed( path, (inData) => JSON.stringify(inData, null, 2), (inString) => JSON.parse(inString), - (data) => shape.unsafeCast(data), + (data) => shape.parse(data), transformers, ) } @@ -533,18 +570,18 @@ export class FileHelper { static yaml>( path: ToPath, shape: Validator, - transformers: Transformers, Transformed>, + transformers: Transformers, Transformed, A>, ): FileHelper static yaml>( path: ToPath, shape: Validator, - transformers?: Transformers, Transformed>, + transformers?: Transformers, Transformed, A>, ) { return FileHelper.rawTransformed, Transformed>( path, (inData) => YAML.stringify(inData, null, 2), (inString) => YAML.parse(inString), - (data) => shape.unsafeCast(data), + (data) => shape.parse(data), transformers, ) } @@ -559,22 +596,27 @@ export class FileHelper { static toml>( path: ToPath, shape: Validator, - transformers: Transformers, Transformed>, + transformers: Transformers, Transformed, A>, ): FileHelper static toml>( path: ToPath, shape: Validator, - transformers?: Transformers, Transformed>, + transformers?: Transformers, Transformed, A>, ) { return FileHelper.rawTransformed, Transformed>( path, (inData) => TOML.stringify(inData as TOML.JsonMap), (inString) => TOML.parse(inString), - (data) => shape.unsafeCast(data), + (data) => shape.parse(data), transformers, ) } + /** + * Create a File Helper for a .ini file. + * + * Supports optional encode/decode options and custom transformers. + */ static ini>( path: ToPath, shape: Validator, A>, @@ -584,23 +626,28 @@ export class FileHelper { path: ToPath, shape: Validator, options: INI.EncodeOptions & INI.DecodeOptions, - transformers: Transformers, Transformed>, + transformers: Transformers, Transformed, A>, ): FileHelper static ini>( path: ToPath, shape: Validator, options?: INI.EncodeOptions & INI.DecodeOptions, - transformers?: Transformers, Transformed>, + transformers?: Transformers, Transformed, A>, ): FileHelper { return FileHelper.rawTransformed, Transformed>( path, (inData) => INI.stringify(filterUndefined(inData), options), (inString) => INI.parse(inString, options), - (data) => shape.unsafeCast(data), + (data) => shape.parse(data), transformers, ) } + /** + * Create a File Helper for a .env file (KEY=VALUE format, one per line). + * + * Lines starting with `#` are treated as comments and ignored on read. + */ static env>( path: ToPath, shape: Validator, A>, @@ -608,12 +655,12 @@ export class FileHelper { static env>( path: ToPath, shape: Validator, - transformers: Transformers, Transformed>, + transformers: Transformers, Transformed, A>, ): FileHelper static env>( path: ToPath, shape: Validator, - transformers?: Transformers, Transformed>, + transformers?: Transformers, Transformed, A>, ) { return FileHelper.rawTransformed, Transformed>( path, @@ -632,7 +679,7 @@ export class FileHelper { return [line.slice(0, pos), line.slice(pos + 1)] }), ), - (data) => shape.unsafeCast(data), + (data) => shape.parse(data), transformers, ) } diff --git a/sdk/package/lib/version/VersionGraph.ts b/sdk/package/lib/version/VersionGraph.ts index 396497de5..84d24269e 100644 --- a/sdk/package/lib/version/VersionGraph.ts +++ b/sdk/package/lib/version/VersionGraph.ts @@ -12,6 +12,11 @@ import { import { Graph, Vertex, once } from '../util' import { IMPOSSIBLE, VersionInfo } from './VersionInfo' +/** + * Read the current data version from the effects system. + * @param effects - The effects context + * @returns The parsed ExtendedVersion or VersionRange, or null if no version is set + */ export async function getDataVersion(effects: T.Effects) { const versionStr = await effects.getDataVersion() if (!versionStr) return null @@ -22,6 +27,11 @@ export async function getDataVersion(effects: T.Effects) { } } +/** + * Persist a data version to the effects system. + * @param effects - The effects context + * @param version - The version to set, or null to clear it + */ export async function setDataVersion( effects: T.Effects, version: ExtendedVersion | VersionRange | null, @@ -37,6 +47,14 @@ function isRange(v: ExtendedVersion | VersionRange): v is VersionRange { return 'satisfiedBy' in v } +/** + * Check whether two version specifiers overlap (i.e. share at least one common version). + * Works with any combination of ExtendedVersion and VersionRange. + * + * @param a - First version or range + * @param b - Second version or range + * @returns True if the two specifiers overlap + */ export function overlaps( a: ExtendedVersion | VersionRange, b: ExtendedVersion | VersionRange, @@ -49,6 +67,16 @@ export function overlaps( ) } +/** + * A directed graph of service versions and their migration paths. + * + * Builds a graph from {@link VersionInfo} definitions, then uses shortest-path + * search to find and execute migration sequences between any two versions. + * Implements both {@link InitScript} (for install/update migrations) and + * {@link UninitScript} (for uninstall/downgrade migrations). + * + * @typeParam CurrentVersion - The string literal type of the current service version + */ export class VersionGraph implements InitScript, UninitScript { @@ -58,14 +86,13 @@ export class VersionGraph ExtendedVersion | VersionRange, ((opts: { effects: T.Effects }) => Promise) | undefined > + /** Dump the version graph as a human-readable string for debugging */ dump(): string { return this.graph().dump((metadata) => metadata?.toString()) } private constructor( readonly current: VersionInfo, versions: Array>, - private readonly preInstall?: InitScriptOrFn<'install'>, - private readonly uninstall?: UninitScript | UninitFn, ) { this.graph = once(() => { const graph = new Graph< @@ -167,25 +194,21 @@ export class VersionGraph static of< CurrentVersion extends string, OtherVersions extends Array>, - >(options: { - current: VersionInfo - other: OtherVersions - /** - * A script to run only on fresh install - */ - preInstall?: InitScriptOrFn<'install'> - /** - * A script to run only on uninstall - */ - uninstall?: UninitScriptOrFn - }) { - return new VersionGraph( - options.current, - options.other, - options.preInstall, - options.uninstall, - ) + >(options: { current: VersionInfo; other: OtherVersions }) { + return new VersionGraph(options.current, options.other) } + /** + * Execute the shortest migration path between two versions. + * + * Finds the shortest path in the version graph from `from` to `to`, + * executes each migration step in order, and updates the data version after each step. + * + * @param options.effects - The effects context + * @param options.from - The source version or range + * @param options.to - The target version or range + * @returns The final data version after migration + * @throws If no migration path exists between the two versions + */ async migrate({ effects, from, @@ -235,6 +258,10 @@ export class VersionGraph `cannot migrate from ${from.toString()} to ${to.toString()}`, ) } + /** + * Compute the version range from which the current version can be reached via migration. + * Uses reverse breadth-first search from the current version vertex. + */ canMigrateFrom = once(() => Array.from( this.graph().reverseBreadthFirstSearch((v) => @@ -252,6 +279,10 @@ export class VersionGraph ) .normalize(), ) + /** + * Compute the version range that the current version can migrate to. + * Uses forward breadth-first search from the current version vertex. + */ canMigrateTo = once(() => Array.from( this.graph().breadthFirstSearch((v) => @@ -270,7 +301,12 @@ export class VersionGraph .normalize(), ) - async init(effects: T.Effects, kind: InitKind): Promise { + /** + * InitScript implementation: migrate from the stored data version to the current version. + * If no data version exists (fresh install), sets it to the current version. + * @param effects - The effects context + */ + async init(effects: T.Effects): Promise { const from = await getDataVersion(effects) if (from) { await this.migrate({ @@ -279,14 +315,17 @@ export class VersionGraph to: this.currentVersion(), }) } else { - kind = 'install' // implied by !dataVersion - if (this.preInstall) - if ('init' in this.preInstall) await this.preInstall.init(effects, kind) - else await this.preInstall(effects, kind) await effects.setDataVersion({ version: this.current.options.version }) } } + /** + * UninitScript implementation: migrate from the current data version to the target version. + * Used during uninstall or downgrade to prepare data for the target version. + * + * @param effects - The effects context + * @param target - The target version to migrate to, or null to clear the data version + */ async uninit( effects: T.Effects, target: VersionRange | ExtendedVersion | null, @@ -300,11 +339,6 @@ export class VersionGraph to: target, }) } - } else { - if (this.uninstall) - if ('uninit' in this.uninstall) - await this.uninstall.uninit(effects, target) - else await this.uninstall(effects, target) } await setDataVersion(effects, target) } diff --git a/sdk/package/lib/version/VersionInfo.ts b/sdk/package/lib/version/VersionInfo.ts index 9a6cb4e78..64b837a83 100644 --- a/sdk/package/lib/version/VersionInfo.ts +++ b/sdk/package/lib/version/VersionInfo.ts @@ -1,8 +1,17 @@ import { ValidateExVer } from '../../../base/lib/exver' import * as T from '../../../base/lib/types' +/** + * Sentinel value indicating that a migration in a given direction is not possible. + * Use this for `migrations.up` or `migrations.down` to prevent migration. + */ export const IMPOSSIBLE: unique symbol = Symbol('IMPOSSIBLE') +/** + * Configuration options for a single service version definition. + * + * @typeParam Version - The string literal exver version number + */ export type VersionOptions = { /** The exver-compliant version number */ version: Version & ValidateExVer @@ -33,6 +42,14 @@ export type VersionOptions = { } } +/** + * Represents a single version of the service, including its release notes, + * migration scripts, and backwards-compatibility declarations. + * + * By convention, each version gets its own file (e.g. `versions/v1_0_0.ts`). + * + * @typeParam Version - The string literal exver version number + */ export class VersionInfo { private _version: null | Version = null private constructor( diff --git a/sdk/package/package-lock.json b/sdk/package/package-lock.json index e89921bdc..b6bf59802 100644 --- a/sdk/package/package-lock.json +++ b/sdk/package/package-lock.json @@ -1,12 +1,12 @@ { "name": "@start9labs/start-sdk", - "version": "0.4.0-beta.48", + "version": "0.4.0-beta.55", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@start9labs/start-sdk", - "version": "0.4.0-beta.48", + "version": "0.4.0-beta.55", "license": "MIT", "dependencies": { "@iarna/toml": "^3.0.0", @@ -17,8 +17,9 @@ "ini": "^5.0.0", "isomorphic-fetch": "^3.0.0", "mime": "^4.0.7", - "ts-matches": "^6.3.2", - "yaml": "^2.7.1" + "yaml": "^2.7.1", + "zod": "^4.3.6", + "zod-deep-partial": "^1.2.0" }, "devDependencies": { "@types/jest": "^29.4.0", @@ -4815,12 +4816,6 @@ "node": ">=10" } }, - "node_modules/ts-matches": { - "version": "6.3.2", - "resolved": "https://registry.npmjs.org/ts-matches/-/ts-matches-6.3.2.tgz", - "integrity": "sha512-UhSgJymF8cLd4y0vV29qlKVCkQpUtekAaujXbQVc729FezS8HwqzepqvtjzQ3HboatIqN/Idor85O2RMwT7lIQ==", - "license": "MIT" - }, "node_modules/ts-morph": { "version": "18.0.0", "resolved": "https://registry.npmjs.org/ts-morph/-/ts-morph-18.0.0.tgz", @@ -5232,6 +5227,25 @@ "funding": { "url": "https://github.com/sponsors/sindresorhus" } + }, + "node_modules/zod": { + "version": "4.3.6", + "resolved": "https://registry.npmjs.org/zod/-/zod-4.3.6.tgz", + "integrity": "sha512-rftlrkhHZOcjDwkGlnUtZZkvaPHCsDATp4pGpuOOMDaTdDDXF91wuVDJoWoPsKX/3YPQ5fHuF3STjcYyKr+Qhg==", + "license": "MIT", + "peer": true, + "funding": { + "url": "https://github.com/sponsors/colinhacks" + } + }, + "node_modules/zod-deep-partial": { + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/zod-deep-partial/-/zod-deep-partial-1.4.4.tgz", + "integrity": "sha512-aWkPl7hVStgE01WzbbSxCgX4O+sSpgt8JOjvFUtMTF75VgL6MhWQbiZi+AWGN85SfSTtI9gsOtL1vInoqfDVaA==", + "license": "MIT", + "peerDependencies": { + "zod": "^4.1.13" + } } } } diff --git a/sdk/package/package.json b/sdk/package/package.json index d7c5a8b7e..31265ba99 100644 --- a/sdk/package/package.json +++ b/sdk/package/package.json @@ -1,6 +1,6 @@ { "name": "@start9labs/start-sdk", - "version": "0.4.0-beta.48", + "version": "0.4.0-beta.55", "description": "Software development kit to facilitate packaging services for StartOS", "main": "./package/lib/index.js", "types": "./package/lib/index.d.ts", @@ -31,16 +31,17 @@ }, "homepage": "https://github.com/Start9Labs/start-os#readme", "dependencies": { - "isomorphic-fetch": "^3.0.0", - "mime": "^4.0.7", - "ts-matches": "^6.3.2", - "yaml": "^2.7.1", - "deep-equality-data-structures": "^2.0.0", - "ini": "^5.0.0", - "@types/ini": "^4.1.1", "@iarna/toml": "^3.0.0", "@noble/curves": "^1.8.2", - "@noble/hashes": "^1.7.2" + "@noble/hashes": "^1.7.2", + "@types/ini": "^4.1.1", + "deep-equality-data-structures": "^2.0.0", + "ini": "^5.0.0", + "isomorphic-fetch": "^3.0.0", + "mime": "^4.0.7", + "yaml": "^2.7.1", + "zod": "^4.3.6", + "zod-deep-partial": "^1.2.0" }, "prettier": { "trailingComma": "all", diff --git a/sdk/package/scripts/oldSpecToBuilder.ts b/sdk/package/scripts/oldSpecToBuilder.ts deleted file mode 100644 index c8a275d57..000000000 --- a/sdk/package/scripts/oldSpecToBuilder.ts +++ /dev/null @@ -1,384 +0,0 @@ -import * as fs from "fs" - -// https://stackoverflow.com/questions/2970525/converting-any-string-into-camel-case -export function camelCase(value: string) { - return value - .replace(/([\(\)\[\]])/g, "") - .replace(/^([A-Z])|[\s-_](\w)/g, function (match, p1, p2, offset) { - if (p2) return p2.toUpperCase() - return p1.toLowerCase() - }) -} - -export async function oldSpecToBuilder( - file: string, - inputData: Promise | any, - options?: Parameters[1], -) { - await fs.writeFile( - file, - await makeFileContentFromOld(inputData, options), - (err) => console.error(err), - ) -} - -function isString(x: unknown): x is string { - return typeof x === "string" -} - -export default async function makeFileContentFromOld( - inputData: Promise | any, - { StartSdk = "start-sdk", nested = true } = {}, -) { - const outputLines: string[] = [] - outputLines.push(` -import { sdk } from "${StartSdk}" -const {InputSpec, List, Value, Variants} = sdk -`) - const data = await inputData - - const namedConsts = new Set(["InputSpec", "Value", "List"]) - const inputSpecName = newConst("inputSpecSpec", convertInputSpec(data)) - outputLines.push(`export type InputSpecSpec = typeof ${inputSpecName}._TYPE;`) - - return outputLines.join("\n") - - function newConst(key: string, data: string, type?: string) { - const variableName = getNextConstName(camelCase(key)) - outputLines.push( - `export const ${variableName}${!type ? "" : `: ${type}`} = ${data};`, - ) - return variableName - } - function maybeNewConst(key: string, data: string) { - if (nested) return data - return newConst(key, data) - } - function convertInputSpecInner(data: any) { - let answer = "{" - for (const [key, value] of Object.entries(data)) { - const variableName = maybeNewConst(key, convertValueSpec(value)) - - answer += `${JSON.stringify(key)}: ${variableName},` - } - return `${answer}}` - } - - function convertInputSpec(data: any) { - return `InputSpec.of(${convertInputSpecInner(data)})` - } - function convertValueSpec(value: any): string { - switch (value.type) { - case "string": { - if (value.textarea) { - return `${rangeToTodoComment( - value?.range, - )}Value.textarea(${JSON.stringify( - { - name: value.name || null, - description: value.description || null, - warning: value.warning || null, - required: !(value.nullable || false), - default: value.default, - placeholder: value.placeholder || null, - minLength: null, - maxLength: null, - minRows: 3, - maxRows: 6, - }, - null, - 2, - )})` - } - return `${rangeToTodoComment(value?.range)}Value.text(${JSON.stringify( - { - name: value.name || null, - default: value.default || null, - required: !value.nullable, - description: value.description || null, - warning: value.warning || null, - masked: value.masked || false, - placeholder: value.placeholder || null, - inputmode: "text", - patterns: value.pattern - ? [ - { - regex: value.pattern, - description: value["pattern-description"], - }, - ] - : [], - minLength: null, - maxLength: null, - }, - null, - 2, - )})` - } - case "number": { - return `${rangeToTodoComment( - value?.range, - )}Value.number(${JSON.stringify( - { - name: value.name || null, - description: value.description || null, - warning: value.warning || null, - default: value.default || null, - required: !value.nullable, - min: null, - max: null, - step: null, - integer: value.integral || false, - units: value.units || null, - placeholder: value.placeholder || null, - }, - null, - 2, - )})` - } - case "boolean": { - return `Value.toggle(${JSON.stringify( - { - name: value.name || null, - default: value.default || false, - description: value.description || null, - warning: value.warning || null, - }, - null, - 2, - )})` - } - case "enum": { - const allValueNames = new Set([ - ...(value?.["values"] || []), - ...Object.keys(value?.["value-names"] || {}), - ]) - const values = Object.fromEntries( - Array.from(allValueNames) - .filter(isString) - .map((key) => [key, value?.spec?.["value-names"]?.[key] || key]), - ) - return `Value.select(${JSON.stringify( - { - name: value.name || null, - description: value.description || null, - warning: value.warning || null, - default: value.default, - values, - }, - null, - 2, - )} as const)` - } - case "object": { - const specName = maybeNewConst( - value.name + "_spec", - convertInputSpec(value.spec), - ) - return `Value.object({ - name: ${JSON.stringify(value.name || null)}, - description: ${JSON.stringify(value.description || null)}, - }, ${specName})` - } - case "union": { - const variants = maybeNewConst( - value.name + "_variants", - convertVariants(value.variants, value.tag["variant-names"] || {}), - ) - - return `Value.union({ - name: ${JSON.stringify(value.name || null)}, - description: ${JSON.stringify(value.tag.description || null)}, - warning: ${JSON.stringify(value.tag.warning || null)}, - default: ${JSON.stringify(value.default)}, - variants: ${variants}, - })` - } - case "list": { - if (value.subtype === "enum") { - const allValueNames = new Set([ - ...(value?.spec?.["values"] || []), - ...Object.keys(value?.spec?.["value-names"] || {}), - ]) - const values = Object.fromEntries( - Array.from(allValueNames) - .filter(isString) - .map((key: string) => [ - key, - value?.spec?.["value-names"]?.[key] ?? key, - ]), - ) - return `Value.multiselect(${JSON.stringify( - { - name: value.name || null, - minLength: null, - maxLength: null, - default: value.default ?? null, - description: value.description || null, - warning: value.warning || null, - values, - }, - null, - 2, - )})` - } - const list = maybeNewConst(value.name + "_list", convertList(value)) - return `Value.list(${list})` - } - case "pointer": { - return `/* TODO deal with point removed point "${value.name}" */null as any` - } - } - throw Error(`Unknown type "${value.type}"`) - } - - function convertList(value: any) { - switch (value.subtype) { - case "string": { - return `${rangeToTodoComment(value?.range)}List.text(${JSON.stringify( - { - name: value.name || null, - minLength: null, - maxLength: null, - default: value.default || null, - description: value.description || null, - warning: value.warning || null, - }, - null, - 2, - )}, ${JSON.stringify({ - masked: value?.spec?.masked || false, - placeholder: value?.spec?.placeholder || null, - patterns: value?.spec?.pattern - ? [ - { - regex: value.spec.pattern, - description: value?.spec?.["pattern-description"], - }, - ] - : [], - minLength: null, - maxLength: null, - })})` - } - // case "number": { - // return `${rangeToTodoComment(value?.range)}List.number(${JSON.stringify( - // { - // name: value.name || null, - // minLength: null, - // maxLength: null, - // default: value.default || null, - // description: value.description || null, - // warning: value.warning || null, - // }, - // null, - // 2, - // )}, ${JSON.stringify({ - // integer: value?.spec?.integral || false, - // min: null, - // max: null, - // units: value?.spec?.units || null, - // placeholder: value?.spec?.placeholder || null, - // })})` - // } - case "enum": { - return "/* error!! list.enum */" - } - case "object": { - const specName = maybeNewConst( - value.name + "_spec", - convertInputSpec(value.spec.spec), - ) - return `${rangeToTodoComment(value?.range)}List.obj({ - name: ${JSON.stringify(value.name || null)}, - minLength: ${JSON.stringify(null)}, - maxLength: ${JSON.stringify(null)}, - default: ${JSON.stringify(value.default || null)}, - description: ${JSON.stringify(value.description || null)}, - }, { - spec: ${specName}, - displayAs: ${JSON.stringify(value?.spec?.["display-as"] || null)}, - uniqueBy: ${JSON.stringify(value?.spec?.["unique-by"] || null)}, - })` - } - case "union": { - const variants = maybeNewConst( - value.name + "_variants", - convertVariants( - value.spec.variants, - value.spec["variant-names"] || {}, - ), - ) - const unionValueName = maybeNewConst( - value.name + "_union", - `${rangeToTodoComment(value?.range)} - Value.union({ - name: ${JSON.stringify(value?.spec?.tag?.name || null)}, - description: ${JSON.stringify( - value?.spec?.tag?.description || null, - )}, - warning: ${JSON.stringify(value?.spec?.tag?.warning || null)}, - default: ${JSON.stringify(value?.spec?.default || null)}, - variants: ${variants}, - }) - `, - ) - const listInputSpec = maybeNewConst( - value.name + "_list_inputSpec", - ` - InputSpec.of({ - "union": ${unionValueName} - }) - `, - ) - return `${rangeToTodoComment(value?.range)}List.obj({ - name:${JSON.stringify(value.name || null)}, - minLength:${JSON.stringify(null)}, - maxLength:${JSON.stringify(null)}, - default: [], - description: ${JSON.stringify(value.description || null)}, - warning: ${JSON.stringify(value.warning || null)}, - }, { - spec: ${listInputSpec}, - displayAs: ${JSON.stringify(value?.spec?.["display-as"] || null)}, - uniqueBy: ${JSON.stringify(value?.spec?.["unique-by"] || null)}, - })` - } - } - throw new Error(`Unknown subtype "${value.subtype}"`) - } - - function convertVariants( - variants: Record, - variantNames: Record, - ): string { - let answer = "Variants.of({" - for (const [key, value] of Object.entries(variants)) { - const variantSpec = maybeNewConst(key, convertInputSpec(value)) - answer += `"${key}": {name: "${ - variantNames[key] || key - }", spec: ${variantSpec}},` - } - return `${answer}})` - } - - function getNextConstName(name: string, i = 0): string { - const newName = !i ? name : name + i - if (namedConsts.has(newName)) { - return getNextConstName(name, i + 1) - } - namedConsts.add(newName) - return newName - } -} - -function rangeToTodoComment(range: string | undefined) { - if (!range) return "" - return `/* TODO: Convert range for this value (${range})*/` -} - -// oldSpecToBuilder( -// "./inputSpec.ts", -// // Put inputSpec here -// {}, -// ) diff --git a/sdk/package/tsconfig.json b/sdk/package/tsconfig.json index cee6dad33..d178c8212 100644 --- a/sdk/package/tsconfig.json +++ b/sdk/package/tsconfig.json @@ -16,5 +16,5 @@ "resolveJsonModule": true }, "include": ["lib/**/*"], - "exclude": ["lib/**/*.spec.ts", "lib/**/*.gen.ts", "list", "node_modules"] + "exclude": ["lib/**/*.spec.ts", "lib/**/*.test.ts", "lib/**/*.gen.ts", "list", "node_modules"] } diff --git a/web/ARCHITECTURE.md b/web/ARCHITECTURE.md new file mode 100644 index 000000000..33d92abcf --- /dev/null +++ b/web/ARCHITECTURE.md @@ -0,0 +1,98 @@ +# Web Architecture + +Angular 20 + TypeScript workspace using [Taiga UI](https://taiga-ui.dev/) component library. + +## API Layer (JSON-RPC) + +All backend communication uses JSON-RPC, not REST. + +- **`HttpService`** (`shared/src/services/http.service.ts`) — Low-level HTTP wrapper. Sends JSON-RPC POST requests via `rpcRequest()`. +- **`ApiService`** (`ui/src/app/services/api/embassy-api.service.ts`) — Abstract class defining 100+ RPC methods. Two implementations: + - `LiveApiService` — Production, calls the real backend + - `MockApiService` — Development with mocks +- **`api.types.ts`** (`ui/src/app/services/api/api.types.ts`) — Namespace `RR` with all request/response type pairs. + +**Calling an RPC endpoint from a component:** + +```typescript +private readonly api = inject(ApiService) + +async doSomething() { + await this.api.someMethod({ param: value }) +} +``` + +The live API handles `x-patch-sequence` headers — after a mutating call, it waits for the PatchDB WebSocket to catch up before resolving. This ensures the UI always reflects the result of the call. + +## PatchDB (Reactive State) + +The backend pushes state diffs to the frontend via WebSocket. This is the primary way components get data. + +- **`PatchDbSource`** (`ui/src/app/services/patch-db/patch-db-source.ts`) — Establishes a WebSocket subscription when authenticated. Buffers updates every 250ms. +- **`DataModel`** (`ui/src/app/services/patch-db/data-model.ts`) — TypeScript type for the full database shape (`ui`, `serverInfo`, `packageData`). +- **`PatchDB`** — Injected service. Use `watch$()` to observe specific paths. + +**Watching data in a component:** + +```typescript +private readonly patch = inject>(PatchDB) + +// Watch a specific path — returns Observable, convert to Signal with toSignal() +readonly name = toSignal(this.patch.watch$('ui', 'name')) +readonly status = toSignal(this.patch.watch$('serverInfo', 'statusInfo')) +readonly packages = toSignal(this.patch.watch$('packageData')) +``` + +**In templates:** `{{ name() }}` — signals are called as functions. + +## WebSockets + +Three WebSocket use cases, all opened via `api.openWebsocket$(guid)`: + +1. **PatchDB** — Continuous state patches (managed by `PatchDbSource`) +2. **Logs** — Streamed via `followServerLogs` / `followPackageLogs`, buffered every 1s +3. **Metrics** — Real-time server metrics via `followServerMetrics` + +## Navigation & Routing + +- **Main app** (`ui/src/app/routing.module.ts`) — NgModule-based with guards (`AuthGuard`, `UnauthGuard`, `stateNot()`), lazy loading via `loadChildren`, `PreloadAllModules`. +- **Portal routes** (`ui/src/app/routes/portal/portal.routes.ts`) — Modern array-based routes with `loadChildren` and `loadComponent`. +- **Setup wizard** (`setup-wizard/src/app/app.routes.ts`) — Standalone `loadComponent()` per step. +- Route config uses `bindToComponentInputs: true` — route params bind directly to component `@Input()`. + +## Forms + +Two patterns: + +1. **Dynamic (spec-driven)** — `FormService` (`ui/src/app/services/form.service.ts`) generates `FormGroup` from IST (Input Specification Type) schemas. Supports text, textarea, number, color, datetime, object, list, union, toggle, select, multiselect, file. Used for service configuration forms. + +2. **Manual** — Standard Angular `FormGroup`/`FormControl` with validators. Used for login, setup wizard, system settings. + +Form controls live in `ui/src/app/routes/portal/components/form/controls/` — each extends a base `Control` class and uses Taiga input components. + +**Dialog-based forms** use `PolymorpheusComponent` + `TuiDialogContext` for modal rendering. + +## i18n + +- **`i18nPipe`** (`shared/src/i18n/i18n.pipe.ts`) — Translates English keys to the active language. +- **Dictionaries** live in `shared/src/i18n/dictionaries/` (en, es, de, fr, pl). +- Usage in templates: `{{ 'Some English Text' | i18n }}` + +### How dictionaries work + +- **`en.ts`** is the source of truth. Keys are English strings; values are numeric IDs (e.g. `'Domain Health': 748`). +- **Other language files** (`de.ts`, `es.ts`, `fr.ts`, `pl.ts`) use those same numeric IDs as keys, mapping to translated strings (e.g. `748: 'Santé du domaine'`). +- When adding a new i18n key: + 1. Add the English string and next available numeric ID to `en.ts`. + 2. Add the same numeric ID with a proper translation to every other language file. + 3. Always provide real translations, not empty strings. + +## Services & State + +Services often extend `Observable` and expose reactive streams via DI: + +- **`ConnectionService`** — Combines network status + WebSocket readiness +- **`StateService`** — Polls server availability, manages app state (`running`, `initializing`, etc.) +- **`AuthService`** — Tracks `isVerified$`, triggers PatchDB start/stop +- **`PatchMonitorService`** — Starts/stops PatchDB based on auth state +- **`PatchDataService`** — Watches entire DB, updates localStorage bootstrap diff --git a/web/CLAUDE.md b/web/CLAUDE.md new file mode 100644 index 000000000..0e33fb3ee --- /dev/null +++ b/web/CLAUDE.md @@ -0,0 +1,111 @@ +# Web — Angular Frontend + +Angular 20 + TypeScript workspace using [Taiga UI](https://taiga-ui.dev/) component library. + +## Projects + +- `projects/ui/` — Main admin interface +- `projects/setup-wizard/` — Initial setup +- `projects/start-tunnel/` — VPN management UI +- `projects/shared/` — Common library (API clients, components, i18n) +- `projects/marketplace/` — Service discovery + +## Development + +```bash +npm ci +npm run start:ui # Dev server with mocks +npm run build:ui # Production build +npm run check # Type check all projects +``` + +## Golden Rules + +1. **Taiga-first.** Use Taiga components, directives, and APIs whenever possible. Avoid hand-rolled HTML/CSS unless absolutely necessary. If Taiga has a component for it, use it. + +2. **Pattern-match.** Nearly anything we build has a similar example elsewhere in this codebase. Search for existing patterns before writing new code. Copy the conventions used in neighboring components. + +3. **When unsure about Taiga, ask or look it up.** Use `WebFetch` against `https://taiga-ui.dev/llms-full.txt` to search for component usage, or ask the user. Taiga docs are authoritative. See [Taiga UI Docs](#taiga-ui-docs) below. + +## Taiga UI Docs + +Taiga provides an LLM-friendly reference at `https://taiga-ui.dev/llms-full.txt` (~2200 lines covering all components with code examples). Use `WebFetch` to search it when you need to look up a component, directive, or API: + +``` +WebFetch url=https://taiga-ui.dev/llms-full.txt prompt="How to use TuiTextfield with a select dropdown" +``` + +When implementing something with Taiga, **also check existing code in this project** for local patterns and conventions — Taiga usage here may have project-specific wrappers or style choices. + +## Architecture + +See [ARCHITECTURE.md](ARCHITECTURE.md) for the web architecture: API layer, PatchDB state, WebSockets, routing, forms, i18n, and services. + +## Component Conventions + +- **Standalone components** preferred (no NgModule). Use `imports` array in `@Component`. +- **`export default class`** for route components (enables direct `loadComponent` import). +- **`inject()`** function for DI (not constructor injection). +- **`signal()`** and `computed()`\*\* for local reactive state. +- **`toSignal()`** to convert Observables (e.g., PatchDB watches) to signals. +- **`ChangeDetectionStrategy.OnPush`** on almost all components. +- **`takeUntilDestroyed(inject(DestroyRef))`** for subscription cleanup. + +## Common Taiga Patterns + +### Textfield + Select (dropdown) + +```html + + + + + + + +``` + +Provider to remove the X clear button: + +```typescript +providers: [tuiTextfieldOptionsProvider({ cleaner: signal(false) })] +``` + +### Buttons + +```html + + + +``` + +### Dialogs + +```typescript +// Confirmation +this.dialog.openConfirm({ label: 'Warning', data: { content: '...', yes: 'Confirm', no: 'Cancel' } }) + +// Custom component in dialog +this.dialog.openComponent(new PolymorpheusComponent(MyComponent, injector), { label: 'Title' }) +``` + +### Toggle + +```html + +``` + +### Errors & Tooltips + +```html + + +``` + +### Layout + +```html + + + +``` diff --git a/web/CONTRIBUTING.md b/web/CONTRIBUTING.md new file mode 100644 index 000000000..15756a89d --- /dev/null +++ b/web/CONTRIBUTING.md @@ -0,0 +1,115 @@ +# Contributing to StartOS Web + +For general environment setup (Node.js, cloning, etc.), see the root [CONTRIBUTING.md](../CONTRIBUTING.md). + +## Web Setup + +```sh +cd web +npm ci +npm run build:deps +``` + +#### Configure `config.json` + +```sh +cp config-sample.json config.json +``` + +- By default, "useMocks" is set to `true`. +- Use "maskAs" to mock the host from which the web UI is served. Valid values are `tor`, `local`, `localhost`, `ipv4`, `ipv6`, and `clearnet`. +- Use "maskAsHttps" to mock the protocol over which the web UI is served. `true` means https; `false` means http. + +## Development Server + +You can develop using mocks (recommended to start) or against a live server. Code changes will live reload the browser. + +### Using mocks + +```sh +npm run start:setup +npm run start:ui +``` + +### Proxying to a live server + +1. In `config.json`, set "useMocks" to `false` + +2. Copy and configure the proxy config: + +```sh +cp proxy.conf-sample.json proxy.conf.json +``` + +3. Replace every instance of `` with the hostname of your remote server + +4. Start the proxy dev server: + +```sh +npm run start:ui:proxy +``` + +## Translations + +### Currently supported languages + +- English +- Spanish +- Polish +- German +- French + + +### Adding a new translation + +When prompting AI to translate the English dictionary, it is recommended to only give it 50-100 entries at a time. Beyond that it struggles. Remember to sanity check the results and ensure keys/values align in the resulting dictionary. + +#### Sample AI prompt + +Translate the English dictionary below into ``. Format the result as a javascript object with the numeric values of the English dictionary as keys in the translated dictionary. These translations are for the web UI of StartOS, a graphical server operating system optimized for self-hosting. Comments may be included in the English dictionary to provide additional context. + +#### Adding to StartOS + +- In the `shared` project: + 1. Create a new file (`language.ts`) in `src/i18n/dictionaries` + 2. Update the `I18N_PROVIDERS` array in `src/i18n/i18n.providers.ts` (2 places) + 3. Update the `languages` array in `/src/i18n/i18n.service.ts` + 4. Add the name of the new language (lowercase) to the English dictionary in `src/i18n/dictionaries/en.ts`. Add the translations of the new language's name (lowercase) to ALL non-English dictionaries in `src/i18n/dictionaries/` (e.g., `es.ts`, `pl.ts`, etc.). + + If you have any doubt about the above steps, check the [French example PR](https://github.com/Start9Labs/start-os/pull/2945/files) for reference. + +- Here in this CONTRIBUTING.md: + 1. Add the language to the list of supported languages above + +### Updating the English dictionary + +#### Sample AI prompt + +Translate the English dictionary below into the languages beneath the dictionary. Format the result as a javascript object with translated language as keys, mapping to a javascript object with the numeric values of the English dictionary as keys and the translations as values. These translations are for the web UI of StartOS, a graphical server operating system optimized for self-hosting. Comments may be included in the English dictionary to provide additional context. + +English dictionary: + +``` +'Hello': 420, +'Goodby': 421 +``` + +Languages: + +- Spanish +- Polish +- German +- French + +#### Adding to StartOS + +In the `shared` project, copy/paste the translations into their corresponding dictionaries in `/src/i18n/dictionaries`. diff --git a/web/README.md b/web/README.md index 7341d2907..15d32a741 100644 --- a/web/README.md +++ b/web/README.md @@ -1,155 +1,20 @@ # StartOS Web -StartOS web UIs are written in [Angular/Typescript](https://angular.io/docs) and leverage the [Ionic Framework](https://ionicframework.com/) component library. +[Angular](https://angular.dev/) + TypeScript workspace using the [Taiga UI](https://taiga-ui.dev/) component library. -StartOS conditionally serves one of three Web UIs, depending on the state of the system and user choice. +## Applications -- **setup-wizard** - UI for setting up StartOS, served on start.local. -- **ui** - primary UI for administering StartOS, served on various hosts unique to the instance. +StartOS serves one of these UIs depending on the state of the system: -Additionally, there are two libraries for shared code: +- **ui** — Primary admin interface for managing StartOS, served on hosts unique to the instance. +- **setup-wizard** — Initial setup UI, served on `start.local`. +- **start-tunnel** — VPN/tunnel management UI. -- **marketplace** - library code shared between the StartOS UI and Start9's [brochure marketplace](https://github.com/Start9Labs/brochure-marketplace). -- **shared** - library code shared between the various web UIs and marketplace lib. +## Libraries -## Environment Setup +- **shared** — Common code shared between all web UIs (API clients, components, i18n). +- **marketplace** — Library code for service discovery, shared between the StartOS UI and the marketplace. -#### Install NodeJS and NPM +## Contributing -- [Install nodejs](https://nodejs.org/en/) -- [Install npm](https://www.npmjs.com/get-npm) - -#### Check that your versions match the ones below - -```sh -node --version -v22.15.0 - -npm --version -v11.3.0 -``` - -#### Install and enable the Prettier extension for your text editor - -#### Clone StartOS and load submodules - -```sh -git clone https://github.com/Start9Labs/start-os.git -cd start-os -git submodule update --init --recursive -``` - -#### Move to web directory and install dependencies - -```sh -cd web -npm ci -npm run build:deps -``` - -> Note if you are on **Windows** you need to install `make` for these scripts to work. Easiest way to do so is to install [Chocolatey](https://chocolatey.org/install) and then run `choco install make`. - -#### Copy `config-sample.json` to a new file `config.json`. - -```sh -cp config-sample.json config.json -``` - -- By default, "useMocks" is set to `true`. -- Use "maskAs" to mock the host from which the web UI is served. Valid values are `tor`, `local`, `localhost`, `ipv4`, `ipv6`, and `clearnet`. -- Use "maskAsHttps" to mock the protocol over which the web UI is served. `true` means https; `false` means http. - -## Running the development server - -You can develop using mocks (recommended to start) or against a live server. Either way, any code changes will live reload the development server and refresh the browser page. - -### Using mocks - -#### Start the standard development server - -```sh -npm run start:setup -npm run start:ui -``` - -### Proxying to a live server - -#### In `config.json`, set "useMocks" to `false` - -#### Copy `proxy.conf-sample.json` to a new file `proxy.conf.json` - -```sh -cp proxy.conf-sample.json proxy.conf.json -``` - -#### Replace every instance of "\\" with the hostname of your remote server - -#### Start the proxy development server - -```sh -npm run start:ui:proxy -``` - -## Translations - -### Currently supported languages - -- Spanish -- Polish -- German -- French - - -### Adding a new translation - -When prompting AI to translate the English dictionary, it is recommended to only give it 50-100 entries at a time. Beyond that it struggles. Remember to sanity check the results and ensure keys/values align in the resulting dictionary. - -#### Sample AI prompt - -Translate the English dictionary below into ``. Format the result as a javascript object with the numeric values of the English dictionary as keys in the translated dictionary. These translations are for the web UI of StartOS, a graphical server operating system optimized for self-hosting. Comments may be included in the English dictionary to provide additional context. - -#### Adding to StartOS - -- In the `shared` project: - 1. Create a new file (`language.ts`) in `src/i18n/dictionaries` - 2. Update the `I18N_PROVIDERS` array in `src/i18n/i18n.providers.ts` (2 places) - 3. Update the `languages` array in `/src/i18n/i18n.service.ts` - 4. Add the name of the new language (lowercase) to the English dictionary in `src/i18n/dictionaries/en.ts`. Add the translations of the new language’s name (lowercase) to ALL non-English dictionaries in `src/i18n/dictionaries/` (e.g., `es.ts`, `pl.ts`, etc.). - - If you have any doubt about the above steps, check the [French example PR](https://github.com/Start9Labs/start-os/pull/2945/files) for reference. - -- Here in this README: - 1. Add the language to the list of supported languages below - -### Updating the English dictionary - -#### Sample AI prompt - -Translate the English dictionary below into the languages beneath the dictionary. Format the result as a javascript object with translated language as keys, mapping to a javascript object with the numeric values of the English dictionary as keys and the translations as values. These translations are for the web UI of StartOS, a graphical server operating system optimized for self-hosting. Comments may be included in the English dictionary to provide additional context. - -English dictionary: - -``` -'Hello': 420, -'Goodby': 421 -``` - -Languages: - -- Spanish -- Polish -- German -- French - -#### Adding to StartOS - -In the `shared` project, copy/past the translations into their corresponding dictionaries in `/src/i18n/dictionaries`. +See [CONTRIBUTING.md](CONTRIBUTING.md) for environment setup, development server instructions, and translation guides. diff --git a/web/package-lock.json b/web/package-lock.json index bb0aca4f2..869b99834 100644 --- a/web/package-lock.json +++ b/web/package-lock.json @@ -1,12 +1,12 @@ { "name": "startos-ui", - "version": "0.4.0-alpha.19", + "version": "0.4.0-alpha.20", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "startos-ui", - "version": "0.4.0-alpha.19", + "version": "0.4.0-alpha.20", "license": "MIT", "dependencies": { "@angular/animations": "^20.3.0", @@ -62,7 +62,6 @@ "pbkdf2": "^3.1.2", "rxjs": "^7.8.2", "tldts": "^7.0.11", - "ts-matches": "^6.3.2", "tslib": "^2.8.1", "uuid": "^8.3.2", "zone.js": "^0.15.0" @@ -126,8 +125,9 @@ "deep-equality-data-structures": "^1.5.0", "isomorphic-fetch": "^3.0.0", "mime": "^4.0.7", - "ts-matches": "^6.3.2", - "yaml": "^2.7.1" + "yaml": "^2.7.1", + "zod": "^4.3.6", + "zod-deep-partial": "^1.2.0" }, "devDependencies": { "@types/jest": "^29.4.0", @@ -366,13 +366,13 @@ } }, "node_modules/@angular-devkit/architect": { - "version": "0.2003.13", - "resolved": "https://registry.npmjs.org/@angular-devkit/architect/-/architect-0.2003.13.tgz", - "integrity": "sha512-JyH6Af6PNC1IHJToColFk1RaXDU87mpPjz7M5sWDfn8bC+KBipw6dSdRkCEuw0D9HY1lZkC9EBV9k9GhpvHjCQ==", + "version": "0.2003.16", + "resolved": "https://registry.npmjs.org/@angular-devkit/architect/-/architect-0.2003.16.tgz", + "integrity": "sha512-W7FPVhZzIeHVP/duuKepfZU66LpQ0k9YMHFhrGpzaUuHPOwKmza6+pjVvvti3g6jzT8b1uVlb+XlYgNPZ5jrPQ==", "devOptional": true, "license": "MIT", "dependencies": { - "@angular-devkit/core": "20.3.13", + "@angular-devkit/core": "20.3.16", "rxjs": "7.8.2" }, "engines": { @@ -382,9 +382,9 @@ } }, "node_modules/@angular-devkit/core": { - "version": "20.3.13", - "resolved": "https://registry.npmjs.org/@angular-devkit/core/-/core-20.3.13.tgz", - "integrity": "sha512-/D84T1Caxll3I2sRihPDR9UaWBhF50M+tAX15PdP6uSh/TxwAlLl9p7Rm1bD0mPjPercqaEKA+h9a9qLP16hug==", + "version": "20.3.16", + "resolved": "https://registry.npmjs.org/@angular-devkit/core/-/core-20.3.16.tgz", + "integrity": "sha512-6L9Lpe3lbkyz32gzqxZGVC8MhXxXht+yV+4LUsb4+6T/mG/V9lW6UTW0dhwVOS3vpWMEwpy75XHT298t7HcKEg==", "license": "MIT", "peer": true, "dependencies": { @@ -410,13 +410,13 @@ } }, "node_modules/@angular-devkit/schematics": { - "version": "20.3.13", - "resolved": "https://registry.npmjs.org/@angular-devkit/schematics/-/schematics-20.3.13.tgz", - "integrity": "sha512-hdMKY4rUTko8xqeWYGnwwDYDomkeOoLsYsP6SdaHWK7hpGvzWsT6Q/aIv8J8NrCYkLu+M+5nLiKOooweUZu3GQ==", + "version": "20.3.16", + "resolved": "https://registry.npmjs.org/@angular-devkit/schematics/-/schematics-20.3.16.tgz", + "integrity": "sha512-3K8QwTpKjnLo3hIvNzB9sTjrlkeRyMK0TxdwgTbwJseewGhXLl98oBoTCWM2ygtpskiWNpYqXJNIhoslNN65WQ==", "license": "MIT", "peer": true, "dependencies": { - "@angular-devkit/core": "20.3.13", + "@angular-devkit/core": "20.3.16", "jsonc-parser": "3.3.1", "magic-string": "0.30.17", "ora": "8.2.0", @@ -447,9 +447,9 @@ } }, "node_modules/@angular/animations": { - "version": "20.3.15", - "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-20.3.15.tgz", - "integrity": "sha512-ikyKfhkxoqQA6JcBN0B9RaN6369sM1XYX81Id0lI58dmWCe7gYfrTp8ejqxxKftl514psQO3pkW8Gn1nJ131Gw==", + "version": "20.3.16", + "resolved": "https://registry.npmjs.org/@angular/animations/-/animations-20.3.16.tgz", + "integrity": "sha512-N83/GFY5lKNyWgPV3xHHy2rb3/eP1ZLzSVI+dmMVbf3jbqwY1YPQcMiAG8UDzaILY1Dkus91kWLF8Qdr3nHAzg==", "license": "MIT", "peer": true, "dependencies": { @@ -459,18 +459,18 @@ "node": "^20.19.0 || ^22.12.0 || >=24.0.0" }, "peerDependencies": { - "@angular/core": "20.3.15" + "@angular/core": "20.3.16" } }, "node_modules/@angular/build": { - "version": "20.3.13", - "resolved": "https://registry.npmjs.org/@angular/build/-/build-20.3.13.tgz", - "integrity": "sha512-/5pM3ZS+lLkZgA+n6TMmNV8I6t9Ow1C6Vkj6bXqWeOgFDH5LwnIEZFAKzEDBkCGos0m2gPKPcREcDD5tfp9h4g==", + "version": "20.3.16", + "resolved": "https://registry.npmjs.org/@angular/build/-/build-20.3.16.tgz", + "integrity": "sha512-p1W3wwMG1Bs4tkPW7ceXO4woO1KCP28sjfpBJg32dIMW3dYSC+iWNmUkYS/wb4YEkqCV0wd6Apnd98mZjL6rNg==", "dev": true, "license": "MIT", "dependencies": { "@ampproject/remapping": "2.3.0", - "@angular-devkit/architect": "0.2003.13", + "@angular-devkit/architect": "0.2003.16", "@babel/core": "7.28.3", "@babel/helper-annotate-as-pure": "7.27.3", "@babel/helper-split-export-declaration": "7.24.7", @@ -512,7 +512,7 @@ "@angular/platform-browser": "^20.0.0", "@angular/platform-server": "^20.0.0", "@angular/service-worker": "^20.0.0", - "@angular/ssr": "^20.3.13", + "@angular/ssr": "^20.3.16", "karma": "^6.4.0", "less": "^4.2.0", "ng-packagr": "^20.0.0", @@ -578,27 +578,27 @@ } }, "node_modules/@angular/cli": { - "version": "20.3.13", - "resolved": "https://registry.npmjs.org/@angular/cli/-/cli-20.3.13.tgz", - "integrity": "sha512-G78I/HDJULloS2LSqfUfbmBlhDCbcWujIRWfuMnGsRf82TyGA2OEPe3IA/F8MrJfeOzPQim2fMyn24MqHL40Vg==", + "version": "20.3.16", + "resolved": "https://registry.npmjs.org/@angular/cli/-/cli-20.3.16.tgz", + "integrity": "sha512-kjGp0ywIWebWrH6U5eCRkS4Tx1D/yMe2iT7DXMfEcLc8iMSrBozEriMJppbot9ou8O2LeEH5d1Nw0efNNo78Kw==", "devOptional": true, "license": "MIT", "peer": true, "dependencies": { - "@angular-devkit/architect": "0.2003.13", - "@angular-devkit/core": "20.3.13", - "@angular-devkit/schematics": "20.3.13", + "@angular-devkit/architect": "0.2003.16", + "@angular-devkit/core": "20.3.16", + "@angular-devkit/schematics": "20.3.16", "@inquirer/prompts": "7.8.2", "@listr2/prompt-adapter-inquirer": "3.0.1", - "@modelcontextprotocol/sdk": "1.24.0", - "@schematics/angular": "20.3.13", + "@modelcontextprotocol/sdk": "1.26.0", + "@schematics/angular": "20.3.16", "@yarnpkg/lockfile": "1.1.0", "algoliasearch": "5.35.0", "ini": "5.0.0", "jsonc-parser": "3.3.1", "listr2": "9.0.1", "npm-package-arg": "13.0.0", - "pacote": "21.0.0", + "pacote": "21.0.4", "resolve": "1.22.10", "semver": "7.7.2", "yargs": "18.0.0", @@ -614,9 +614,9 @@ } }, "node_modules/@angular/common": { - "version": "20.3.15", - "resolved": "https://registry.npmjs.org/@angular/common/-/common-20.3.15.tgz", - "integrity": "sha512-k4mCXWRFiOHK3bUKfWkRQQ8KBPxW8TAJuKLYCsSHPCpMz6u0eA1F0VlrnOkZVKWPI792fOaEAWH2Y4PTaXlUHw==", + "version": "20.3.16", + "resolved": "https://registry.npmjs.org/@angular/common/-/common-20.3.16.tgz", + "integrity": "sha512-GRAziNlntwdnJy3F+8zCOvDdy7id0gITjDnM6P9+n2lXvtDuBLGJKU3DWBbvxcCjtD6JK/g/rEX5fbCxbUHkQQ==", "license": "MIT", "peer": true, "dependencies": { @@ -626,14 +626,14 @@ "node": "^20.19.0 || ^22.12.0 || >=24.0.0" }, "peerDependencies": { - "@angular/core": "20.3.15", + "@angular/core": "20.3.16", "rxjs": "^6.5.3 || ^7.4.0" } }, "node_modules/@angular/compiler": { - "version": "20.3.15", - "resolved": "https://registry.npmjs.org/@angular/compiler/-/compiler-20.3.15.tgz", - "integrity": "sha512-lMicIAFAKZXa+BCZWs3soTjNQPZZXrF/WMVDinm8dQcggNarnDj4UmXgKSyXkkyqK5SLfnLsXVzrX6ndVT6z7A==", + "version": "20.3.16", + "resolved": "https://registry.npmjs.org/@angular/compiler/-/compiler-20.3.16.tgz", + "integrity": "sha512-Pt9Ms9GwTThgzdxWBwMfN8cH1JEtQ2DK5dc2yxYtPSaD+WKmG9AVL1PrzIYQEbaKcWk2jxASUHpEWSlNiwo8uw==", "license": "MIT", "peer": true, "dependencies": { @@ -644,9 +644,9 @@ } }, "node_modules/@angular/compiler-cli": { - "version": "20.3.15", - "resolved": "https://registry.npmjs.org/@angular/compiler-cli/-/compiler-cli-20.3.15.tgz", - "integrity": "sha512-8sJoxodxsfyZ8eJ5r6Bx7BCbazXYgsZ1+dE8t5u5rTQ6jNggwNtYEzkyReoD5xvP+MMtRkos3xpwq4rtFnpI6A==", + "version": "20.3.16", + "resolved": "https://registry.npmjs.org/@angular/compiler-cli/-/compiler-cli-20.3.16.tgz", + "integrity": "sha512-l3xF/fXfJAl/UrNnH9Ufkr79myjMgXdHq1mmmph2UnpeqilRB1b8lC9sLBV9MipQHVn3dwocxMIvtrcryfOaXw==", "dev": true, "license": "MIT", "peer": true, @@ -668,7 +668,7 @@ "node": "^20.19.0 || ^22.12.0 || >=24.0.0" }, "peerDependencies": { - "@angular/compiler": "20.3.15", + "@angular/compiler": "20.3.16", "typescript": ">=5.8 <6.0" }, "peerDependenciesMeta": { @@ -678,9 +678,9 @@ } }, "node_modules/@angular/core": { - "version": "20.3.15", - "resolved": "https://registry.npmjs.org/@angular/core/-/core-20.3.15.tgz", - "integrity": "sha512-NMbX71SlTZIY9+rh/SPhRYFJU0pMJYW7z/TBD4lqiO+b0DTOIg1k7Pg9ydJGqSjFO1Z4dQaA6TteNuF99TJCNw==", + "version": "20.3.16", + "resolved": "https://registry.npmjs.org/@angular/core/-/core-20.3.16.tgz", + "integrity": "sha512-KSFPKvOmWWLCJBbEO+CuRUXfecX2FRuO0jNi9c54ptXMOPHlK1lIojUnyXmMNzjdHgRug8ci9qDuftvC2B7MKg==", "license": "MIT", "peer": true, "dependencies": { @@ -690,7 +690,7 @@ "node": "^20.19.0 || ^22.12.0 || >=24.0.0" }, "peerDependencies": { - "@angular/compiler": "20.3.15", + "@angular/compiler": "20.3.16", "rxjs": "^6.5.3 || ^7.4.0", "zone.js": "~0.15.0" }, @@ -704,9 +704,9 @@ } }, "node_modules/@angular/forms": { - "version": "20.3.15", - "resolved": "https://registry.npmjs.org/@angular/forms/-/forms-20.3.15.tgz", - "integrity": "sha512-gS5hQkinq52pm/7mxz4yHPCzEcmRWjtUkOVddPH0V1BW/HMni/p4Y6k2KqKBeGb9p8S5EAp6PDxDVLOPukp3mg==", + "version": "20.3.16", + "resolved": "https://registry.npmjs.org/@angular/forms/-/forms-20.3.16.tgz", + "integrity": "sha512-1yzbXpExTqATpVcqA3wGrq4ACFIP3mRxA4pbso5KoJU+/4JfzNFwLsDaFXKpm5uxwchVnj8KM2vPaDOkvtp7NA==", "license": "MIT", "peer": true, "dependencies": { @@ -716,16 +716,16 @@ "node": "^20.19.0 || ^22.12.0 || >=24.0.0" }, "peerDependencies": { - "@angular/common": "20.3.15", - "@angular/core": "20.3.15", - "@angular/platform-browser": "20.3.15", + "@angular/common": "20.3.16", + "@angular/core": "20.3.16", + "@angular/platform-browser": "20.3.16", "rxjs": "^6.5.3 || ^7.4.0" } }, "node_modules/@angular/language-service": { - "version": "20.3.15", - "resolved": "https://registry.npmjs.org/@angular/language-service/-/language-service-20.3.15.tgz", - "integrity": "sha512-oD5rvAsZYzNqdJqMTYYp6T9yITG6axTI/j64v3qxHe+Y/PlHKfNHXcjENpA+LcR5wq0wtIE+s96APykCq9ouEQ==", + "version": "20.3.16", + "resolved": "https://registry.npmjs.org/@angular/language-service/-/language-service-20.3.16.tgz", + "integrity": "sha512-0A/tSQPq5geIz2mMcZA5fzzbzT39v+ADQksnfPr8htNxtkYWy+EI5+d0+++k59NuvjLY4uTBqhRTRB9b1PKrjw==", "dev": true, "license": "MIT", "engines": { @@ -733,9 +733,9 @@ } }, "node_modules/@angular/platform-browser": { - "version": "20.3.15", - "resolved": "https://registry.npmjs.org/@angular/platform-browser/-/platform-browser-20.3.15.tgz", - "integrity": "sha512-TxRM/wTW/oGXv/3/Iohn58yWoiYXOaeEnxSasiGNS1qhbkcKtR70xzxW6NjChBUYAixz2ERkLURkpx3pI8Q6Dw==", + "version": "20.3.16", + "resolved": "https://registry.npmjs.org/@angular/platform-browser/-/platform-browser-20.3.16.tgz", + "integrity": "sha512-YsrLS6vyS77i4pVHg4gdSBW74qvzHjpQRTVQ5Lv/OxIjJdYYYkMmjNalCNgy1ZuyY6CaLIB11ccxhrNnxfKGOQ==", "license": "MIT", "peer": true, "dependencies": { @@ -745,9 +745,9 @@ "node": "^20.19.0 || ^22.12.0 || >=24.0.0" }, "peerDependencies": { - "@angular/animations": "20.3.15", - "@angular/common": "20.3.15", - "@angular/core": "20.3.15" + "@angular/animations": "20.3.16", + "@angular/common": "20.3.16", + "@angular/core": "20.3.16" }, "peerDependenciesMeta": { "@angular/animations": { @@ -756,9 +756,9 @@ } }, "node_modules/@angular/platform-browser-dynamic": { - "version": "20.3.15", - "resolved": "https://registry.npmjs.org/@angular/platform-browser-dynamic/-/platform-browser-dynamic-20.3.15.tgz", - "integrity": "sha512-RizuRdBt0d6ongQ2y8cr8YsXFyjF8f91vFfpSNw+cFj+oiEmRC1txcWUlH5bPLD9qSDied8qazUi0Tb8VPQDGw==", + "version": "20.3.16", + "resolved": "https://registry.npmjs.org/@angular/platform-browser-dynamic/-/platform-browser-dynamic-20.3.16.tgz", + "integrity": "sha512-5mECCV9YeKH6ue239GXRTGeDSd/eTbM1j8dDejhm5cGnPBhTxRw4o+GgSrWTYtb6VmIYdwUGBTC+wCBphiaQ2A==", "license": "MIT", "dependencies": { "tslib": "^2.3.0" @@ -767,20 +767,20 @@ "node": "^20.19.0 || ^22.12.0 || >=24.0.0" }, "peerDependencies": { - "@angular/common": "20.3.15", - "@angular/compiler": "20.3.15", - "@angular/core": "20.3.15", - "@angular/platform-browser": "20.3.15" + "@angular/common": "20.3.16", + "@angular/compiler": "20.3.16", + "@angular/core": "20.3.16", + "@angular/platform-browser": "20.3.16" } }, "node_modules/@angular/pwa": { - "version": "20.3.13", - "resolved": "https://registry.npmjs.org/@angular/pwa/-/pwa-20.3.13.tgz", - "integrity": "sha512-k5H5Pn32SeHRMlnTZgu1wsZ4RJVNZjzu9IMVaKn8hqOwlLIqjBeKi1Mlrg/sJk6bWp3VPRzAzR9s/GJQCAWXYQ==", + "version": "20.3.16", + "resolved": "https://registry.npmjs.org/@angular/pwa/-/pwa-20.3.16.tgz", + "integrity": "sha512-F4YFgklMadJ/2kE/A/usj/Mi6X/zWpvveY9TiC8JNgC4HtD1NAg8ypIk21W1Uq17nows0rVxcJGR3ZJdBvbVyQ==", "license": "MIT", "dependencies": { - "@angular-devkit/schematics": "20.3.13", - "@schematics/angular": "20.3.13", + "@angular-devkit/schematics": "20.3.16", + "@schematics/angular": "20.3.16", "parse5-html-rewriting-stream": "8.0.0" }, "engines": { @@ -789,7 +789,7 @@ "yarn": ">= 1.13.0" }, "peerDependencies": { - "@angular/cli": "^20.3.13" + "@angular/cli": "^20.3.16" }, "peerDependenciesMeta": { "@angular/cli": { @@ -798,9 +798,9 @@ } }, "node_modules/@angular/router": { - "version": "20.3.15", - "resolved": "https://registry.npmjs.org/@angular/router/-/router-20.3.15.tgz", - "integrity": "sha512-6+qgk8swGSoAu7ISSY//GatAyCP36hEvvUgvjbZgkXLLH9yUQxdo77ij05aJ5s0OyB25q/JkqS8VTY0z1yE9NQ==", + "version": "20.3.16", + "resolved": "https://registry.npmjs.org/@angular/router/-/router-20.3.16.tgz", + "integrity": "sha512-e1LiQFZaajKqc00cY5FboIrWJZSMnZ64GDp5R0UejritYrqorQQQNOqP1W85BMuY2owibMmxVfX+dJg/Mc8PuQ==", "license": "MIT", "peer": true, "dependencies": { @@ -810,16 +810,16 @@ "node": "^20.19.0 || ^22.12.0 || >=24.0.0" }, "peerDependencies": { - "@angular/common": "20.3.15", - "@angular/core": "20.3.15", - "@angular/platform-browser": "20.3.15", + "@angular/common": "20.3.16", + "@angular/core": "20.3.16", + "@angular/platform-browser": "20.3.16", "rxjs": "^6.5.3 || ^7.4.0" } }, "node_modules/@angular/service-worker": { - "version": "20.3.15", - "resolved": "https://registry.npmjs.org/@angular/service-worker/-/service-worker-20.3.15.tgz", - "integrity": "sha512-HCptODPVWg30XJwSueOz2zqsJjQ1chSscTs7FyIQcfuCTTthO35Lvz2Gtct8/GNHel9QNvvVwA5jrLjsU4dt1A==", + "version": "20.3.16", + "resolved": "https://registry.npmjs.org/@angular/service-worker/-/service-worker-20.3.16.tgz", + "integrity": "sha512-qme+jz3ySWas4JRif6NVaxWStas1XmOaws6EUfpei1AAlK0aBXmuTZtF3YAQDfP6RxLQP/axE0Vm1TpYhNYahA==", "license": "MIT", "peer": true, "dependencies": { @@ -832,18 +832,18 @@ "node": "^20.19.0 || ^22.12.0 || >=24.0.0" }, "peerDependencies": { - "@angular/core": "20.3.15", + "@angular/core": "20.3.16", "rxjs": "^6.5.3 || ^7.4.0" } }, "node_modules/@babel/code-frame": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.27.1.tgz", - "integrity": "sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz", + "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-validator-identifier": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5", "js-tokens": "^4.0.0", "picocolors": "^1.1.1" }, @@ -852,9 +852,9 @@ } }, "node_modules/@babel/compat-data": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.28.5.tgz", - "integrity": "sha512-6uFXyCayocRbqhZOB+6XcuZbkMNimwfVGFji8CTZnCzOHVGvDqzvitu1re2AU5LROliz7eQPhB8CpAMvnx9EjA==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.29.0.tgz", + "integrity": "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==", "dev": true, "license": "MIT", "engines": { @@ -911,14 +911,14 @@ } }, "node_modules/@babel/generator": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.5.tgz", - "integrity": "sha512-3EwLFhZ38J4VyIP6WNtt2kUdW9dokXA9Cr4IVIFHuCpZ3H8/YFOl5JjZHisrn1fATPBmKKqXzDFvh9fUwHz6CQ==", + "version": "7.29.1", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.29.1.tgz", + "integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/parser": "^7.28.5", - "@babel/types": "^7.28.5", + "@babel/parser": "^7.29.0", + "@babel/types": "^7.29.0", "@jridgewell/gen-mapping": "^0.3.12", "@jridgewell/trace-mapping": "^0.3.28", "jsesc": "^3.0.2" @@ -941,13 +941,13 @@ } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.27.2", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.27.2.tgz", - "integrity": "sha512-2+1thGUUWWjLTYTHZWK1n8Yga0ijBz1XAhUXcKy81rd5g6yh7hGqMp45v7cadSbEHc9G3OTv45SyneRN3ps4DQ==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz", + "integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==", "dev": true, "license": "MIT", "dependencies": { - "@babel/compat-data": "^7.27.2", + "@babel/compat-data": "^7.28.6", "@babel/helper-validator-option": "^7.27.1", "browserslist": "^4.24.0", "lru-cache": "^5.1.1", @@ -978,29 +978,29 @@ } }, "node_modules/@babel/helper-module-imports": { - "version": "7.27.1", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.27.1.tgz", - "integrity": "sha512-0gSFWUPNXNopqtIPQvlD5WgXYI5GY2kP2cCvoT8kczjbfcfuIljTbcWrulD1CIPIX2gt1wghbDy08yE1p+/r3w==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz", + "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" + "@babel/traverse": "^7.28.6", + "@babel/types": "^7.28.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.3.tgz", - "integrity": "sha512-gytXUbs8k2sXS9PnQptz5o0QnpLL51SwASIORY6XaBKF88nsOT0Zw9szLqlSGQDP/4TljBAD5y98p2U1fqkdsw==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz", + "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-module-imports": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1", - "@babel/traverse": "^7.28.3" + "@babel/helper-module-imports": "^7.28.6", + "@babel/helper-validator-identifier": "^7.28.5", + "@babel/traverse": "^7.28.6" }, "engines": { "node": ">=6.9.0" @@ -1053,27 +1053,27 @@ } }, "node_modules/@babel/helpers": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", - "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.6.tgz", + "integrity": "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4" + "@babel/template": "^7.28.6", + "@babel/types": "^7.28.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/parser": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.5.tgz", - "integrity": "sha512-KKBU1VGYR7ORr3At5HAtUQ+TV3SzRCXmA/8OdDZiLDBIZxVyzXuztPjfLd3BV1PRAQGCMWWSHYhL0F8d5uHBDQ==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.0.tgz", + "integrity": "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==", "dev": true, "license": "MIT", "dependencies": { - "@babel/types": "^7.28.5" + "@babel/types": "^7.29.0" }, "bin": { "parser": "bin/babel-parser.js" @@ -1083,33 +1083,33 @@ } }, "node_modules/@babel/template": { - "version": "7.27.2", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.27.2.tgz", - "integrity": "sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==", + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz", + "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==", "dev": true, "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/parser": "^7.27.2", - "@babel/types": "^7.27.1" + "@babel/code-frame": "^7.28.6", + "@babel/parser": "^7.28.6", + "@babel/types": "^7.28.6" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.5.tgz", - "integrity": "sha512-TCCj4t55U90khlYkVV/0TfkJkAkUg3jZFA3Neb7unZT8CPok7iiRfaX0F+WnqWqt7OxhOn0uBKXCw4lbL8W0aQ==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz", + "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==", "dev": true, "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.5", + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.5", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.5", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/types": "^7.29.0", "debug": "^4.3.1" }, "engines": { @@ -1117,9 +1117,9 @@ } }, "node_modules/@babel/types": { - "version": "7.28.5", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.5.tgz", - "integrity": "sha512-qQ5m48eI/MFLQ5PxQj4PFaprjyCTLI37ElWMmNs0K8Lk3dVeOdNpB3ks8jc7yM5CDmVC73eMVk/trk3fgmrUpA==", + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz", + "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==", "dev": true, "license": "MIT", "dependencies": { @@ -1596,6 +1596,19 @@ "node": ">=18" } }, + "node_modules/@hono/node-server": { + "version": "1.19.9", + "resolved": "https://registry.npmjs.org/@hono/node-server/-/node-server-1.19.9.tgz", + "integrity": "sha512-vHL6w3ecZsky+8P5MD+eFfaGTyCeOHUIFYMGpQGbrBTSmNNoxv0if69rEZ5giu36weC5saFuznL411gRX7bJDw==", + "devOptional": true, + "license": "MIT", + "engines": { + "node": ">=18.14.1" + }, + "peerDependencies": { + "hono": "^4" + } + }, "node_modules/@inquirer/ansi": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/@inquirer/ansi/-/ansi-1.0.2.tgz", @@ -1947,88 +1960,14 @@ } } }, - "node_modules/@isaacs/balanced-match": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/@isaacs/balanced-match/-/balanced-match-4.0.1.tgz", - "integrity": "sha512-yzMTt9lEb8Gv7zRioUilSglI0c0smZ9k5D65677DLWLtWJaXIS3CqcGyUFByYKlnUj6TkjLVs54fBl6+TiGQDQ==", - "devOptional": true, - "license": "MIT", - "engines": { - "node": "20 || >=22" - } - }, - "node_modules/@isaacs/brace-expansion": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/@isaacs/brace-expansion/-/brace-expansion-5.0.0.tgz", - "integrity": "sha512-ZT55BDLV0yv0RBm2czMiZ+SqCGO7AvmOM3G/w2xhVPH+te0aKgFjmBvGlL1dH+ql2tgGO3MVrbb3jCKyvpgnxA==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "@isaacs/balanced-match": "^4.0.1" - }, - "engines": { - "node": "20 || >=22" - } - }, "node_modules/@isaacs/cliui": { - "version": "8.0.2", - "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", - "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-9.0.0.tgz", + "integrity": "sha512-AokJm4tuBHillT+FpMtxQ60n8ObyXBatq7jD2/JA9dxbDDokKQm8KMht5ibGzLVU9IJDIKK4TPKgMHEYMn3lMg==", "devOptional": true, - "license": "ISC", - "dependencies": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, + "license": "BlueOak-1.0.0", "engines": { - "node": ">=12" - } - }, - "node_modules/@isaacs/cliui/node_modules/emoji-regex": { - "version": "9.2.2", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", - "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", - "devOptional": true, - "license": "MIT" - }, - "node_modules/@isaacs/cliui/node_modules/string-width": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", - "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", - "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + "node": ">=18" } }, "node_modules/@isaacs/fs-minipass": { @@ -2265,12 +2204,13 @@ } }, "node_modules/@modelcontextprotocol/sdk": { - "version": "1.24.0", - "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.24.0.tgz", - "integrity": "sha512-D8h5KXY2vHFW8zTuxn2vuZGN0HGrQ5No6LkHwlEA9trVgNdPL3TF1dSqKA7Dny6BbBYKSW/rOBDXdC8KJAjUCg==", + "version": "1.26.0", + "resolved": "https://registry.npmjs.org/@modelcontextprotocol/sdk/-/sdk-1.26.0.tgz", + "integrity": "sha512-Y5RmPncpiDtTXDbLKswIJzTqu2hyBKxTNsgKqKclDbhIgg1wgtf1fRuvxgTnRfcnxtvvgbIEcqUOzZrJ6iSReg==", "devOptional": true, "license": "MIT", "dependencies": { + "@hono/node-server": "^1.19.9", "ajv": "^8.17.1", "ajv-formats": "^3.0.1", "content-type": "^1.0.5", @@ -2278,13 +2218,15 @@ "cross-spawn": "^7.0.5", "eventsource": "^3.0.2", "eventsource-parser": "^3.0.0", - "express": "^5.0.1", - "express-rate-limit": "^7.5.0", - "jose": "^6.1.1", + "express": "^5.2.1", + "express-rate-limit": "^8.2.1", + "hono": "^4.11.4", + "jose": "^6.1.3", + "json-schema-typed": "^8.0.2", "pkce-challenge": "^5.0.0", "raw-body": "^3.0.0", "zod": "^3.25 || ^4.0", - "zod-to-json-schema": "^3.25.0" + "zod-to-json-schema": "^3.25.1" }, "engines": { "node": ">=18" @@ -2867,267 +2809,293 @@ } }, "node_modules/@npmcli/agent": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-3.0.0.tgz", - "integrity": "sha512-S79NdEgDQd/NGCay6TCoVzXSj74skRZIKJcpJjC5lOq34SZzyI6MqtiiWoiVWoVrTcGjNeC4ipbh1VIHlpfF5Q==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@npmcli/agent/-/agent-4.0.0.tgz", + "integrity": "sha512-kAQTcEN9E8ERLVg5AsGwLNoFb+oEG6engbqAU2P43gD4JEIkNGMHdVQ096FsOAAYpZPB0RSt0zgInKIAS1l5QA==", "devOptional": true, "license": "ISC", "dependencies": { "agent-base": "^7.1.0", "http-proxy-agent": "^7.0.0", "https-proxy-agent": "^7.0.1", - "lru-cache": "^10.0.1", + "lru-cache": "^11.2.1", "socks-proxy-agent": "^8.0.3" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/@npmcli/agent/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "version": "11.2.6", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.6.tgz", + "integrity": "sha512-ESL2CrkS/2wTPfuend7Zhkzo2u0daGJ/A2VucJOgQ/C48S/zB8MMeMHSGKYpXhIjbPxfuezITkaBH1wqv00DDQ==", "devOptional": true, - "license": "ISC" + "license": "BlueOak-1.0.0", + "engines": { + "node": "20 || >=22" + } }, "node_modules/@npmcli/fs": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-4.0.0.tgz", - "integrity": "sha512-/xGlezI6xfGO9NwuJlnwz/K14qD1kCSAGtacBHnGzeAIuJGazcp45KP5NuyARXoKb7cwulAGWVsbeSxdG/cb0Q==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@npmcli/fs/-/fs-5.0.0.tgz", + "integrity": "sha512-7OsC1gNORBEawOa5+j2pXN9vsicaIOH5cPXxoR6fJOmH6/EXpJB2CajXOu1fPRFun2m1lktEFX11+P89hqO/og==", "devOptional": true, "license": "ISC", "dependencies": { "semver": "^7.3.5" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/@npmcli/git": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-6.0.3.tgz", - "integrity": "sha512-GUYESQlxZRAdhs3UhbB6pVRNUELQOHXwK9ruDkwmCv2aZ5y0SApQzUJCg02p3A7Ue2J5hxvlk1YI53c00NmRyQ==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/@npmcli/git/-/git-7.0.1.tgz", + "integrity": "sha512-+XTFxK2jJF/EJJ5SoAzXk3qwIDfvFc5/g+bD274LZ7uY7LE8sTfG6Z8rOanPl2ZEvZWqNvmEdtXC25cE54VcoA==", "devOptional": true, "license": "ISC", "dependencies": { - "@npmcli/promise-spawn": "^8.0.0", - "ini": "^5.0.0", - "lru-cache": "^10.0.1", - "npm-pick-manifest": "^10.0.0", - "proc-log": "^5.0.0", + "@npmcli/promise-spawn": "^9.0.0", + "ini": "^6.0.0", + "lru-cache": "^11.2.1", + "npm-pick-manifest": "^11.0.1", + "proc-log": "^6.0.0", "promise-retry": "^2.0.1", "semver": "^7.3.5", - "which": "^5.0.0" + "which": "^6.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" + } + }, + "node_modules/@npmcli/git/node_modules/ini": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/ini/-/ini-6.0.0.tgz", + "integrity": "sha512-IBTdIkzZNOpqm7q3dRqJvMaldXjDHWkEDfrwGEQTs5eaQMWV+djAhR+wahyNNMAa+qpbDUhBMVt4ZKNwpPm7xQ==", + "devOptional": true, + "license": "ISC", + "engines": { + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/@npmcli/git/node_modules/isexe": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", - "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-4.0.0.tgz", + "integrity": "sha512-FFUtZMpoZ8RqHS3XeXEmHWLA4thH+ZxCv2lOiPIn1Xc7CxrqhWzNSDzD+/chS/zbYezmiwWLdQC09JdQKmthOw==", "devOptional": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "engines": { - "node": ">=16" + "node": ">=20" } }, "node_modules/@npmcli/git/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "version": "11.2.6", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.6.tgz", + "integrity": "sha512-ESL2CrkS/2wTPfuend7Zhkzo2u0daGJ/A2VucJOgQ/C48S/zB8MMeMHSGKYpXhIjbPxfuezITkaBH1wqv00DDQ==", "devOptional": true, - "license": "ISC" + "license": "BlueOak-1.0.0", + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/@npmcli/git/node_modules/proc-log": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-6.1.0.tgz", + "integrity": "sha512-iG+GYldRf2BQ0UDUAd6JQ/RwzaQy6mXmsk/IzlYyal4A4SNFw54MeH4/tLkF4I5WoWG9SQwuqWzS99jaFQHBuQ==", + "devOptional": true, + "license": "ISC", + "engines": { + "node": "^20.17.0 || >=22.9.0" + } }, "node_modules/@npmcli/git/node_modules/which": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/which/-/which-5.0.0.tgz", - "integrity": "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/which/-/which-6.0.1.tgz", + "integrity": "sha512-oGLe46MIrCRqX7ytPUf66EAYvdeMIZYn3WaocqqKZAxrBpkqHfL/qvTyJ/bTk5+AqHCjXmrv3CEWgy368zhRUg==", "devOptional": true, "license": "ISC", "dependencies": { - "isexe": "^3.1.1" + "isexe": "^4.0.0" }, "bin": { "node-which": "bin/which.js" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/@npmcli/installed-package-contents": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-3.0.0.tgz", - "integrity": "sha512-fkxoPuFGvxyrH+OQzyTkX2LUEamrF4jZSmxjAtPPHHGO0dqsQ8tTKjnIS8SAnPHdk2I03BDtSMR5K/4loKg79Q==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@npmcli/installed-package-contents/-/installed-package-contents-4.0.0.tgz", + "integrity": "sha512-yNyAdkBxB72gtZ4GrwXCM0ZUedo9nIbOMKfGjt6Cu6DXf0p8y1PViZAKDC8q8kv/fufx0WTjRBdSlyrvnP7hmA==", "devOptional": true, "license": "ISC", "dependencies": { - "npm-bundled": "^4.0.0", - "npm-normalize-package-bin": "^4.0.0" + "npm-bundled": "^5.0.0", + "npm-normalize-package-bin": "^5.0.0" }, "bin": { "installed-package-contents": "bin/index.js" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/@npmcli/node-gyp": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@npmcli/node-gyp/-/node-gyp-4.0.0.tgz", - "integrity": "sha512-+t5DZ6mO/QFh78PByMq1fGSAub/agLJZDRfJRMeOSNCt8s9YVlTjmGpIPwPhvXTGUIJk+WszlT0rQa1W33yzNA==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@npmcli/node-gyp/-/node-gyp-5.0.0.tgz", + "integrity": "sha512-uuG5HZFXLfyFKqg8QypsmgLQW7smiRjVc45bqD/ofZZcR/uxEjgQU8qDPv0s9TEeMUiAAU/GC5bR6++UdTirIQ==", "devOptional": true, "license": "ISC", "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/@npmcli/package-json": { - "version": "6.2.0", - "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-6.2.0.tgz", - "integrity": "sha512-rCNLSB/JzNvot0SEyXqWZ7tX2B5dD2a1br2Dp0vSYVo5jh8Z0EZ7lS9TsZ1UtziddB1UfNUaMCc538/HztnJGA==", + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/@npmcli/package-json/-/package-json-7.0.4.tgz", + "integrity": "sha512-0wInJG3j/K40OJt/33ax47WfWMzZTm6OQxB9cDhTt5huCP2a9g2GnlsxmfN+PulItNPIpPrZ+kfwwUil7eHcZQ==", "devOptional": true, "license": "ISC", "dependencies": { - "@npmcli/git": "^6.0.0", - "glob": "^10.2.2", - "hosted-git-info": "^8.0.0", - "json-parse-even-better-errors": "^4.0.0", - "proc-log": "^5.0.0", + "@npmcli/git": "^7.0.0", + "glob": "^13.0.0", + "hosted-git-info": "^9.0.0", + "json-parse-even-better-errors": "^5.0.0", + "proc-log": "^6.0.0", "semver": "^7.5.3", "validate-npm-package-license": "^3.0.4" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, - "node_modules/@npmcli/package-json/node_modules/hosted-git-info": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz", - "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==", + "node_modules/@npmcli/package-json/node_modules/proc-log": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-6.1.0.tgz", + "integrity": "sha512-iG+GYldRf2BQ0UDUAd6JQ/RwzaQy6mXmsk/IzlYyal4A4SNFw54MeH4/tLkF4I5WoWG9SQwuqWzS99jaFQHBuQ==", "devOptional": true, "license": "ISC", - "dependencies": { - "lru-cache": "^10.0.1" - }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, - "node_modules/@npmcli/package-json/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "devOptional": true, - "license": "ISC" - }, "node_modules/@npmcli/promise-spawn": { - "version": "8.0.3", - "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-8.0.3.tgz", - "integrity": "sha512-Yb00SWaL4F8w+K8YGhQ55+xE4RUNdMHV43WZGsiTM92gS+lC0mGsn7I4hLug7pbao035S6bj3Y3w0cUNGLfmkg==", + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/@npmcli/promise-spawn/-/promise-spawn-9.0.1.tgz", + "integrity": "sha512-OLUaoqBuyxeTqUvjA3FZFiXUfYC1alp3Sa99gW3EUDz3tZ3CbXDdcZ7qWKBzicrJleIgucoWamWH1saAmH/l2Q==", "devOptional": true, "license": "ISC", "dependencies": { - "which": "^5.0.0" + "which": "^6.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/@npmcli/promise-spawn/node_modules/isexe": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", - "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-4.0.0.tgz", + "integrity": "sha512-FFUtZMpoZ8RqHS3XeXEmHWLA4thH+ZxCv2lOiPIn1Xc7CxrqhWzNSDzD+/chS/zbYezmiwWLdQC09JdQKmthOw==", "devOptional": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "engines": { - "node": ">=16" + "node": ">=20" } }, "node_modules/@npmcli/promise-spawn/node_modules/which": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/which/-/which-5.0.0.tgz", - "integrity": "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/which/-/which-6.0.1.tgz", + "integrity": "sha512-oGLe46MIrCRqX7ytPUf66EAYvdeMIZYn3WaocqqKZAxrBpkqHfL/qvTyJ/bTk5+AqHCjXmrv3CEWgy368zhRUg==", "devOptional": true, "license": "ISC", "dependencies": { - "isexe": "^3.1.1" + "isexe": "^4.0.0" }, "bin": { "node-which": "bin/which.js" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/@npmcli/redact": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/@npmcli/redact/-/redact-3.2.2.tgz", - "integrity": "sha512-7VmYAmk4csGv08QzrDKScdzn11jHPFGyqJW39FyPgPuAp3zIaUmuCo1yxw9aGs+NEJuTGQ9Gwqpt93vtJubucg==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@npmcli/redact/-/redact-4.0.0.tgz", + "integrity": "sha512-gOBg5YHMfZy+TfHArfVogwgfBeQnKbbGo3pSUyK/gSI0AVu+pEiDVcKlQb0D8Mg1LNRZILZ6XG8I5dJ4KuAd9Q==", "devOptional": true, "license": "ISC", "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/@npmcli/run-script": { - "version": "9.1.0", - "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-9.1.0.tgz", - "integrity": "sha512-aoNSbxtkePXUlbZB+anS1LqsJdctG5n3UVhfU47+CDdwMi6uNTBMF9gPcQRnqghQd2FGzcwwIFBruFMxjhBewg==", + "version": "10.0.3", + "resolved": "https://registry.npmjs.org/@npmcli/run-script/-/run-script-10.0.3.tgz", + "integrity": "sha512-ER2N6itRkzWbbtVmZ9WKaWxVlKlOeBFF1/7xx+KA5J1xKa4JjUwBdb6tDpk0v1qA+d+VDwHI9qmLcXSWcmi+Rw==", "devOptional": true, "license": "ISC", "dependencies": { - "@npmcli/node-gyp": "^4.0.0", - "@npmcli/package-json": "^6.0.0", - "@npmcli/promise-spawn": "^8.0.0", - "node-gyp": "^11.0.0", - "proc-log": "^5.0.0", - "which": "^5.0.0" + "@npmcli/node-gyp": "^5.0.0", + "@npmcli/package-json": "^7.0.0", + "@npmcli/promise-spawn": "^9.0.0", + "node-gyp": "^12.1.0", + "proc-log": "^6.0.0", + "which": "^6.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/@npmcli/run-script/node_modules/isexe": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", - "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-4.0.0.tgz", + "integrity": "sha512-FFUtZMpoZ8RqHS3XeXEmHWLA4thH+ZxCv2lOiPIn1Xc7CxrqhWzNSDzD+/chS/zbYezmiwWLdQC09JdQKmthOw==", + "devOptional": true, + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=20" + } + }, + "node_modules/@npmcli/run-script/node_modules/proc-log": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-6.1.0.tgz", + "integrity": "sha512-iG+GYldRf2BQ0UDUAd6JQ/RwzaQy6mXmsk/IzlYyal4A4SNFw54MeH4/tLkF4I5WoWG9SQwuqWzS99jaFQHBuQ==", "devOptional": true, "license": "ISC", "engines": { - "node": ">=16" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/@npmcli/run-script/node_modules/which": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/which/-/which-5.0.0.tgz", - "integrity": "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/which/-/which-6.0.1.tgz", + "integrity": "sha512-oGLe46MIrCRqX7ytPUf66EAYvdeMIZYn3WaocqqKZAxrBpkqHfL/qvTyJ/bTk5+AqHCjXmrv3CEWgy368zhRUg==", "devOptional": true, "license": "ISC", "dependencies": { - "isexe": "^3.1.1" + "isexe": "^4.0.0" }, "bin": { "node-which": "bin/which.js" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/@parcel/watcher": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher/-/watcher-2.5.1.tgz", - "integrity": "sha512-dfUnCxiN9H4ap84DvD2ubjw+3vUNpstxa0TneY/Paat8a3R4uQZDLSvWjmznAY/DoahqTHl9V46HF/Zs3F29pg==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher/-/watcher-2.5.6.tgz", + "integrity": "sha512-tmmZ3lQxAe/k/+rNnXQRawJ4NjxO2hqiOLTHvWchtGZULp4RyFeh6aU4XdOYBFe2KE1oShQTv4AblOs2iOrNnQ==", "dev": true, "hasInstallScript": true, "license": "MIT", "optional": true, "dependencies": { - "detect-libc": "^1.0.3", + "detect-libc": "^2.0.3", "is-glob": "^4.0.3", - "micromatch": "^4.0.5", - "node-addon-api": "^7.0.0" + "node-addon-api": "^7.0.0", + "picomatch": "^4.0.3" }, "engines": { "node": ">= 10.0.0" @@ -3137,25 +3105,25 @@ "url": "https://opencollective.com/parcel" }, "optionalDependencies": { - "@parcel/watcher-android-arm64": "2.5.1", - "@parcel/watcher-darwin-arm64": "2.5.1", - "@parcel/watcher-darwin-x64": "2.5.1", - "@parcel/watcher-freebsd-x64": "2.5.1", - "@parcel/watcher-linux-arm-glibc": "2.5.1", - "@parcel/watcher-linux-arm-musl": "2.5.1", - "@parcel/watcher-linux-arm64-glibc": "2.5.1", - "@parcel/watcher-linux-arm64-musl": "2.5.1", - "@parcel/watcher-linux-x64-glibc": "2.5.1", - "@parcel/watcher-linux-x64-musl": "2.5.1", - "@parcel/watcher-win32-arm64": "2.5.1", - "@parcel/watcher-win32-ia32": "2.5.1", - "@parcel/watcher-win32-x64": "2.5.1" + "@parcel/watcher-android-arm64": "2.5.6", + "@parcel/watcher-darwin-arm64": "2.5.6", + "@parcel/watcher-darwin-x64": "2.5.6", + "@parcel/watcher-freebsd-x64": "2.5.6", + "@parcel/watcher-linux-arm-glibc": "2.5.6", + "@parcel/watcher-linux-arm-musl": "2.5.6", + "@parcel/watcher-linux-arm64-glibc": "2.5.6", + "@parcel/watcher-linux-arm64-musl": "2.5.6", + "@parcel/watcher-linux-x64-glibc": "2.5.6", + "@parcel/watcher-linux-x64-musl": "2.5.6", + "@parcel/watcher-win32-arm64": "2.5.6", + "@parcel/watcher-win32-ia32": "2.5.6", + "@parcel/watcher-win32-x64": "2.5.6" } }, "node_modules/@parcel/watcher-android-arm64": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-android-arm64/-/watcher-android-arm64-2.5.1.tgz", - "integrity": "sha512-KF8+j9nNbUN8vzOFDpRMsaKBHZ/mcjEjMToVMJOhTozkDonQFFrRcfdLWn6yWKCmJKmdVxSgHiYvTCef4/qcBA==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-android-arm64/-/watcher-android-arm64-2.5.6.tgz", + "integrity": "sha512-YQxSS34tPF/6ZG7r/Ih9xy+kP/WwediEUsqmtf0cuCV5TPPKw/PQHRhueUo6JdeFJaqV3pyjm0GdYjZotbRt/A==", "cpu": [ "arm64" ], @@ -3174,9 +3142,9 @@ } }, "node_modules/@parcel/watcher-darwin-arm64": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-arm64/-/watcher-darwin-arm64-2.5.1.tgz", - "integrity": "sha512-eAzPv5osDmZyBhou8PoF4i6RQXAfeKL9tjb3QzYuccXFMQU0ruIc/POh30ePnaOyD1UXdlKguHBmsTs53tVoPw==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-arm64/-/watcher-darwin-arm64-2.5.6.tgz", + "integrity": "sha512-Z2ZdrnwyXvvvdtRHLmM4knydIdU9adO3D4n/0cVipF3rRiwP+3/sfzpAwA/qKFL6i1ModaabkU7IbpeMBgiVEA==", "cpu": [ "arm64" ], @@ -3195,9 +3163,9 @@ } }, "node_modules/@parcel/watcher-darwin-x64": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-x64/-/watcher-darwin-x64-2.5.1.tgz", - "integrity": "sha512-1ZXDthrnNmwv10A0/3AJNZ9JGlzrF82i3gNQcWOzd7nJ8aj+ILyW1MTxVk35Db0u91oD5Nlk9MBiujMlwmeXZg==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-darwin-x64/-/watcher-darwin-x64-2.5.6.tgz", + "integrity": "sha512-HgvOf3W9dhithcwOWX9uDZyn1lW9R+7tPZ4sug+NGrGIo4Rk1hAXLEbcH1TQSqxts0NYXXlOWqVpvS1SFS4fRg==", "cpu": [ "x64" ], @@ -3216,9 +3184,9 @@ } }, "node_modules/@parcel/watcher-freebsd-x64": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-freebsd-x64/-/watcher-freebsd-x64-2.5.1.tgz", - "integrity": "sha512-SI4eljM7Flp9yPuKi8W0ird8TI/JK6CSxju3NojVI6BjHsTyK7zxA9urjVjEKJ5MBYC+bLmMcbAWlZ+rFkLpJQ==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-freebsd-x64/-/watcher-freebsd-x64-2.5.6.tgz", + "integrity": "sha512-vJVi8yd/qzJxEKHkeemh7w3YAn6RJCtYlE4HPMoVnCpIXEzSrxErBW5SJBgKLbXU3WdIpkjBTeUNtyBVn8TRng==", "cpu": [ "x64" ], @@ -3237,9 +3205,9 @@ } }, "node_modules/@parcel/watcher-linux-arm-glibc": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-glibc/-/watcher-linux-arm-glibc-2.5.1.tgz", - "integrity": "sha512-RCdZlEyTs8geyBkkcnPWvtXLY44BCeZKmGYRtSgtwwnHR4dxfHRG3gR99XdMEdQ7KeiDdasJwwvNSF5jKtDwdA==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-glibc/-/watcher-linux-arm-glibc-2.5.6.tgz", + "integrity": "sha512-9JiYfB6h6BgV50CCfasfLf/uvOcJskMSwcdH1PHH9rvS1IrNy8zad6IUVPVUfmXr+u+Km9IxcfMLzgdOudz9EQ==", "cpu": [ "arm" ], @@ -3258,9 +3226,9 @@ } }, "node_modules/@parcel/watcher-linux-arm-musl": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-musl/-/watcher-linux-arm-musl-2.5.1.tgz", - "integrity": "sha512-6E+m/Mm1t1yhB8X412stiKFG3XykmgdIOqhjWj+VL8oHkKABfu/gjFj8DvLrYVHSBNC+/u5PeNrujiSQ1zwd1Q==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm-musl/-/watcher-linux-arm-musl-2.5.6.tgz", + "integrity": "sha512-Ve3gUCG57nuUUSyjBq/MAM0CzArtuIOxsBdQ+ftz6ho8n7s1i9E1Nmk/xmP323r2YL0SONs1EuwqBp2u1k5fxg==", "cpu": [ "arm" ], @@ -3279,9 +3247,9 @@ } }, "node_modules/@parcel/watcher-linux-arm64-glibc": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-glibc/-/watcher-linux-arm64-glibc-2.5.1.tgz", - "integrity": "sha512-LrGp+f02yU3BN9A+DGuY3v3bmnFUggAITBGriZHUREfNEzZh/GO06FF5u2kx8x+GBEUYfyTGamol4j3m9ANe8w==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-glibc/-/watcher-linux-arm64-glibc-2.5.6.tgz", + "integrity": "sha512-f2g/DT3NhGPdBmMWYoxixqYr3v/UXcmLOYy16Bx0TM20Tchduwr4EaCbmxh1321TABqPGDpS8D/ggOTaljijOA==", "cpu": [ "arm64" ], @@ -3300,9 +3268,9 @@ } }, "node_modules/@parcel/watcher-linux-arm64-musl": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-musl/-/watcher-linux-arm64-musl-2.5.1.tgz", - "integrity": "sha512-cFOjABi92pMYRXS7AcQv9/M1YuKRw8SZniCDw0ssQb/noPkRzA+HBDkwmyOJYp5wXcsTrhxO0zq1U11cK9jsFg==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-arm64-musl/-/watcher-linux-arm64-musl-2.5.6.tgz", + "integrity": "sha512-qb6naMDGlbCwdhLj6hgoVKJl2odL34z2sqkC7Z6kzir8b5W65WYDpLB6R06KabvZdgoHI/zxke4b3zR0wAbDTA==", "cpu": [ "arm64" ], @@ -3321,9 +3289,9 @@ } }, "node_modules/@parcel/watcher-linux-x64-glibc": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-glibc/-/watcher-linux-x64-glibc-2.5.1.tgz", - "integrity": "sha512-GcESn8NZySmfwlTsIur+49yDqSny2IhPeZfXunQi48DMugKeZ7uy1FX83pO0X22sHntJ4Ub+9k34XQCX+oHt2A==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-glibc/-/watcher-linux-x64-glibc-2.5.6.tgz", + "integrity": "sha512-kbT5wvNQlx7NaGjzPFu8nVIW1rWqV780O7ZtkjuWaPUgpv2NMFpjYERVi0UYj1msZNyCzGlaCWEtzc+exjMGbQ==", "cpu": [ "x64" ], @@ -3342,9 +3310,9 @@ } }, "node_modules/@parcel/watcher-linux-x64-musl": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-musl/-/watcher-linux-x64-musl-2.5.1.tgz", - "integrity": "sha512-n0E2EQbatQ3bXhcH2D1XIAANAcTZkQICBPVaxMeaCVBtOpBZpWJuf7LwyWPSBDITb7In8mqQgJ7gH8CILCURXg==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-linux-x64-musl/-/watcher-linux-x64-musl-2.5.6.tgz", + "integrity": "sha512-1JRFeC+h7RdXwldHzTsmdtYR/Ku8SylLgTU/reMuqdVD7CtLwf0VR1FqeprZ0eHQkO0vqsbvFLXUmYm/uNKJBg==", "cpu": [ "x64" ], @@ -3363,9 +3331,9 @@ } }, "node_modules/@parcel/watcher-win32-arm64": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-arm64/-/watcher-win32-arm64-2.5.1.tgz", - "integrity": "sha512-RFzklRvmc3PkjKjry3hLF9wD7ppR4AKcWNzH7kXR7GUe0Igb3Nz8fyPwtZCSquGrhU5HhUNDr/mKBqj7tqA2Vw==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-arm64/-/watcher-win32-arm64-2.5.6.tgz", + "integrity": "sha512-3ukyebjc6eGlw9yRt678DxVF7rjXatWiHvTXqphZLvo7aC5NdEgFufVwjFfY51ijYEWpXbqF5jtrK275z52D4Q==", "cpu": [ "arm64" ], @@ -3384,9 +3352,9 @@ } }, "node_modules/@parcel/watcher-win32-ia32": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-ia32/-/watcher-win32-ia32-2.5.1.tgz", - "integrity": "sha512-c2KkcVN+NJmuA7CGlaGD1qJh1cLfDnQsHjE89E60vUEMlqduHGCdCLJCID5geFVM0dOtA3ZiIO8BoEQmzQVfpQ==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-ia32/-/watcher-win32-ia32-2.5.6.tgz", + "integrity": "sha512-k35yLp1ZMwwee3Ez/pxBi5cf4AoBKYXj00CZ80jUz5h8prpiaQsiRPKQMxoLstNuqe2vR4RNPEAEcjEFzhEz/g==", "cpu": [ "ia32" ], @@ -3405,9 +3373,9 @@ } }, "node_modules/@parcel/watcher-win32-x64": { - "version": "2.5.1", - "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-x64/-/watcher-win32-x64-2.5.1.tgz", - "integrity": "sha512-9lHBdJITeNR++EvSQVUcaZoWupyHfXe1jZvGZ06O/5MflPcuPLtEphScIBL+AiCWBO46tDSHzWyD0uDmmZqsgA==", + "version": "2.5.6", + "resolved": "https://registry.npmjs.org/@parcel/watcher-win32-x64/-/watcher-win32-x64-2.5.6.tgz", + "integrity": "sha512-hbQlYcCq5dlAX9Qx+kFb0FHue6vbjlf0FrNzSKdYK2APUf7tGfGxQCk2ihEREmbR6ZMc0MVAD5RIX/41gpUzTw==", "cpu": [ "x64" ], @@ -3425,20 +3393,6 @@ "url": "https://opencollective.com/parcel" } }, - "node_modules/@parcel/watcher/node_modules/detect-libc": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", - "integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==", - "dev": true, - "license": "Apache-2.0", - "optional": true, - "bin": { - "detect-libc": "bin/detect-libc.js" - }, - "engines": { - "node": ">=0.10" - } - }, "node_modules/@parcel/watcher/node_modules/node-addon-api": { "version": "7.1.1", "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-7.1.1.tgz", @@ -3447,17 +3401,6 @@ "license": "MIT", "optional": true }, - "node_modules/@pkgjs/parseargs": { - "version": "0.11.0", - "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", - "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", - "dev": true, - "license": "MIT", - "optional": true, - "engines": { - "node": ">=14" - } - }, "node_modules/@rollup/plugin-json": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/@rollup/plugin-json/-/plugin-json-6.1.0.tgz", @@ -3818,9 +3761,9 @@ ] }, "node_modules/@rollup/wasm-node": { - "version": "4.54.0", - "resolved": "https://registry.npmjs.org/@rollup/wasm-node/-/wasm-node-4.54.0.tgz", - "integrity": "sha512-CeEdHzNY+ZIR6NWpIOiJuCrr6tTK7cRGeOf6GYg5f73+UwJLqn5a4d5Ovf/hOWDyHM1KcySbxHQESJ9krhe0+A==", + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/wasm-node/-/wasm-node-4.57.1.tgz", + "integrity": "sha512-b0rcJH8ykEanfgTeDtlPubhphIUOx0oaAek+3hizTaFkoC1FBSTsY0GixwB4D5HZ5r3Gt2yI9c8M13OcW/kW5A==", "dev": true, "license": "MIT", "dependencies": { @@ -3845,13 +3788,13 @@ "license": "MIT" }, "node_modules/@schematics/angular": { - "version": "20.3.13", - "resolved": "https://registry.npmjs.org/@schematics/angular/-/angular-20.3.13.tgz", - "integrity": "sha512-ETJ1budKmrkdxojo5QP6TPr6zQZYGxtWWf8NrX1cBIS851zPCmFkKyhSFLZsoksariYF/LP8ljvm8tlcIzt/XA==", + "version": "20.3.16", + "resolved": "https://registry.npmjs.org/@schematics/angular/-/angular-20.3.16.tgz", + "integrity": "sha512-KeOcsM5piwv/6tUKBmLD1zXTwtJlZBnR2WM/4T9ImaQbmFGe1MMHUABT5SQ3Bifv1YKCw58ImxiaQUY9sdNqEQ==", "license": "MIT", "dependencies": { - "@angular-devkit/core": "20.3.13", - "@angular-devkit/schematics": "20.3.13", + "@angular-devkit/core": "20.3.16", + "@angular-devkit/schematics": "20.3.16", "jsonc-parser": "3.3.1" }, "engines": { @@ -3861,32 +3804,32 @@ } }, "node_modules/@sigstore/bundle": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-3.1.0.tgz", - "integrity": "sha512-Mm1E3/CmDDCz3nDhFKTuYdB47EdRFRQMOE/EAbiG1MJW77/w1b3P7Qx7JSrVJs8PfwOLOVcKQCHErIwCTyPbag==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@sigstore/bundle/-/bundle-4.0.0.tgz", + "integrity": "sha512-NwCl5Y0V6Di0NexvkTqdoVfmjTaQwoLM236r89KEojGmq/jMls8S+zb7yOwAPdXvbwfKDlP+lmXgAL4vKSQT+A==", "devOptional": true, "license": "Apache-2.0", "dependencies": { - "@sigstore/protobuf-specs": "^0.4.0" + "@sigstore/protobuf-specs": "^0.5.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/@sigstore/core": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-2.0.0.tgz", - "integrity": "sha512-nYxaSb/MtlSI+JWcwTHQxyNmWeWrUXJJ/G4liLrGG7+tS4vAz6LF3xRXqLH6wPIVUoZQel2Fs4ddLx4NCpiIYg==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-3.1.0.tgz", + "integrity": "sha512-o5cw1QYhNQ9IroioJxpzexmPjfCe7gzafd2RY3qnMpxr4ZEja+Jad/U8sgFpaue6bOaF+z7RVkyKVV44FN+N8A==", "devOptional": true, "license": "Apache-2.0", "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/@sigstore/protobuf-specs": { - "version": "0.4.3", - "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.4.3.tgz", - "integrity": "sha512-fk2zjD9117RL9BjqEwF7fwv7Q/P9yGsMV4MUJZ/DocaQJ6+3pKr+syBq1owU5Q5qGw5CUbXzm+4yJ2JVRDQeSA==", + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.5.0.tgz", + "integrity": "sha512-MM8XIwUjN2bwvCg1QvrMtbBmpcSHrkhFSCu1D11NyPvDQ25HEc4oG5/OcQfd/Tlf/OxmKWERDj0zGE23jQaMwA==", "devOptional": true, "license": "Apache-2.0", "engines": { @@ -3894,50 +3837,60 @@ } }, "node_modules/@sigstore/sign": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-3.1.0.tgz", - "integrity": "sha512-knzjmaOHOov1Ur7N/z4B1oPqZ0QX5geUfhrVaqVlu+hl0EAoL4o+l0MSULINcD5GCWe3Z0+YJO8ues6vFlW0Yw==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-4.1.0.tgz", + "integrity": "sha512-Vx1RmLxLGnSUqx/o5/VsCjkuN5L7y+vxEEwawvc7u+6WtX2W4GNa7b9HEjmcRWohw/d6BpATXmvOwc78m+Swdg==", "devOptional": true, "license": "Apache-2.0", "dependencies": { - "@sigstore/bundle": "^3.1.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.4.0", - "make-fetch-happen": "^14.0.2", - "proc-log": "^5.0.0", + "@sigstore/bundle": "^4.0.0", + "@sigstore/core": "^3.1.0", + "@sigstore/protobuf-specs": "^0.5.0", + "make-fetch-happen": "^15.0.3", + "proc-log": "^6.1.0", "promise-retry": "^2.0.1" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" + } + }, + "node_modules/@sigstore/sign/node_modules/proc-log": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-6.1.0.tgz", + "integrity": "sha512-iG+GYldRf2BQ0UDUAd6JQ/RwzaQy6mXmsk/IzlYyal4A4SNFw54MeH4/tLkF4I5WoWG9SQwuqWzS99jaFQHBuQ==", + "devOptional": true, + "license": "ISC", + "engines": { + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/@sigstore/tuf": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-3.1.1.tgz", - "integrity": "sha512-eFFvlcBIoGwVkkwmTi/vEQFSva3xs5Ot3WmBcjgjVdiaoelBLQaQ/ZBfhlG0MnG0cmTYScPpk7eDdGDWUcFUmg==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/@sigstore/tuf/-/tuf-4.0.1.tgz", + "integrity": "sha512-OPZBg8y5Vc9yZjmWCHrlWPMBqW5yd8+wFNl+thMdtcWz3vjVSoJQutF8YkrzI0SLGnkuFof4HSsWUhXrf219Lw==", "devOptional": true, "license": "Apache-2.0", "dependencies": { - "@sigstore/protobuf-specs": "^0.4.1", - "tuf-js": "^3.0.1" + "@sigstore/protobuf-specs": "^0.5.0", + "tuf-js": "^4.1.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/@sigstore/verify": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-2.1.1.tgz", - "integrity": "sha512-hVJD77oT67aowHxwT4+M6PGOp+E2LtLdTK3+FC0lBO9T7sYwItDMXZ7Z07IDCvR1M717a4axbIWckrW67KMP/w==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-3.1.0.tgz", + "integrity": "sha512-mNe0Iigql08YupSOGv197YdHpPPr+EzDZmfCgMc7RPNaZTw5aLN01nBl6CHJOh3BGtnMIj83EeN4butBchc8Ag==", "devOptional": true, "license": "Apache-2.0", "dependencies": { - "@sigstore/bundle": "^3.1.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.4.1" + "@sigstore/bundle": "^4.0.0", + "@sigstore/core": "^3.1.0", + "@sigstore/protobuf-specs": "^0.5.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/@start9labs/argon2": { @@ -4140,9 +4093,9 @@ } }, "node_modules/@taiga-ui/i18n": { - "version": "4.66.0", - "resolved": "https://registry.npmjs.org/@taiga-ui/i18n/-/i18n-4.66.0.tgz", - "integrity": "sha512-8S9slzAxiSWCBOn2QZhTLM5rH2Lxw/vRt9sZc0xFh0lW5cv47EcRd0Hgs8MwwTwnHmitWgS7uGSlnvkFlJbOxQ==", + "version": "4.70.0", + "resolved": "https://registry.npmjs.org/@taiga-ui/i18n/-/i18n-4.70.0.tgz", + "integrity": "sha512-9G8Kp+2LvD8vepPOjAHvU9cZ7aoqp2JqkQRFQOGqv7E9y25bU7PPMx9t/sbNNmzdXodv0g/zjMsimghkrldk3Q==", "license": "Apache-2.0", "peer": true, "dependencies": { @@ -4254,22 +4207,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/@ts-morph/common/node_modules/mkdirp": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", - "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", - "license": "MIT", - "optional": true, - "bin": { - "mkdirp": "dist/cjs/src/bin.js" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/@tsconfig/node10": { "version": "1.0.12", "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.12.tgz", @@ -4309,30 +4246,56 @@ } }, "node_modules/@tufjs/models": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-3.0.1.tgz", - "integrity": "sha512-UUYHISyhCU3ZgN8yaear3cGATHb3SMuKHsQ/nVbHXcmnBf+LzQ/cQfhNG+rfaSHgqGKNEm2cOCLVLELStUQ1JA==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/@tufjs/models/-/models-4.1.0.tgz", + "integrity": "sha512-Y8cK9aggNRsqJVaKUlEYs4s7CvQ1b1ta2DVPyAimb0I2qhzjNk+A+mxvll/klL0RlfuIUei8BF7YWiua4kQqww==", "devOptional": true, "license": "MIT", "dependencies": { "@tufjs/canonical-json": "2.0.0", - "minimatch": "^9.0.5" + "minimatch": "^10.1.1" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" + } + }, + "node_modules/@tufjs/models/node_modules/balanced-match": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.2.tgz", + "integrity": "sha512-x0K50QvKQ97fdEz2kPehIerj+YTeptKF9hyYkKf6egnwmMWAkADiO0QCzSp0R5xN8FTZgYaBfSaue46Ej62nMg==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "jackspeak": "^4.2.3" + }, + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/@tufjs/models/node_modules/brace-expansion": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.2.tgz", + "integrity": "sha512-Pdk8c9poy+YhOgVWw1JNN22/HcivgKWwpxKq04M/jTmHyCZn12WPJebZxdjSa5TmBqISrUSgNYU3eRORljfCCw==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^4.0.2" + }, + "engines": { + "node": "20 || >=22" } }, "node_modules/@tufjs/models/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.0.tgz", + "integrity": "sha512-ugkC31VaVg9cF0DFVoADH12k6061zNZkZON+aX8AWsR9GhPcErkcMBceb6znR8wLERM2AkkOxy2nWRLpT9Jq5w==", "devOptional": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "dependencies": { - "brace-expansion": "^2.0.1" + "brace-expansion": "^5.0.2" }, "engines": { - "node": ">=16 || 14 >=14.17" + "node": "20 || >=22" }, "funding": { "url": "https://github.com/sponsors/isaacs" @@ -4390,9 +4353,9 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "22.19.3", - "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.3.tgz", - "integrity": "sha512-1N9SBnWYOJTrNZCdh/yJE+t910Y128BoyY+zBLWhL3r0TYzlTmFdXrPwHL9DyFZmlEXNQQolTZh3KHV31QDhyA==", + "version": "22.19.11", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.19.11.tgz", + "integrity": "sha512-BH7YwL6rA93ReqeQS1c4bsPpcfOmJasG+Fkr6Y59q83f9M1WcBRHR2vM+P9eOisYRcN3ujQoiZY8uk5W+1WL8w==", "dev": true, "license": "MIT", "peer": true, @@ -4461,13 +4424,13 @@ "license": "BSD-2-Clause" }, "node_modules/abbrev": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-3.0.1.tgz", - "integrity": "sha512-AO2ac6pjRB3SJmGJo+v5/aK6Omggp6fsLrs6wN9bd35ulu4cCwaAU9+7ZhXjeqHVkaHThLuzH0nZr0YpCDhygg==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-4.0.0.tgz", + "integrity": "sha512-a1wflyaL0tHtJSmLSOVybYhy22vRih4eduhhrkcjgrWGnRfrZtovJ2FRjxuTtkkj47O/baf0R86QU5OuYpz8fA==", "devOptional": true, "license": "ISC", "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/accepts": { @@ -4590,9 +4553,9 @@ } }, "node_modules/ansi-escapes": { - "version": "7.2.0", - "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.2.0.tgz", - "integrity": "sha512-g6LhBsl+GBPRWGWsBtutpzBYuIIdBkLEvad5C/va/74Db018+5TZiyA26cZJAr3Rft5lprVqOIPxf5Vid6tqAw==", + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-7.3.0.tgz", + "integrity": "sha512-BvU8nYgGQBxcmMuEeUEmNTvrMVjJNSH7RgW24vXexN4Ven6qCvy4TntnvlnwnMLTVlcRQQdbRY8NKnaIoeWDNg==", "devOptional": true, "license": "MIT", "dependencies": { @@ -4747,9 +4710,9 @@ } }, "node_modules/baseline-browser-mapping": { - "version": "2.9.11", - "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.11.tgz", - "integrity": "sha512-Sg0xJUNDU1sJNGdfGWhVHX0kkZ+HWcvmVymJbj6NSgZZmW/8S9Y2HQ5euytnIgakgxN6papOAWiwDo1ctFDcoQ==", + "version": "2.9.19", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.9.19.tgz", + "integrity": "sha512-ipDqC8FrAl/76p2SSWKSI+H9tFwm7vYqXQrItCuiVPt26Km0jS+NzSsBWAaBusvSbQcfJG+JitdMm+wZAgTYqg==", "dev": true, "license": "Apache-2.0", "bin": { @@ -4790,9 +4753,9 @@ } }, "node_modules/body-parser": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.1.tgz", - "integrity": "sha512-nfDwkulwiZYQIGwxdy0RUmowMhKcFVcYXUU7m4QlKYim1rUtg83xm2yjZ40QjDuc291AJjjeSc9b++AWHSgSHw==", + "version": "2.2.2", + "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.2.tgz", + "integrity": "sha512-oP5VkATKlNwcgvxi0vM0p/D3n2C3EReYVX+DNYs5TjZFn/oQt2j+4sVJtSMr18pdRr8wjTcBl6LoV+FUwzPmNA==", "devOptional": true, "license": "MIT", "dependencies": { @@ -4802,7 +4765,7 @@ "http-errors": "^2.0.0", "iconv-lite": "^0.7.0", "on-finished": "^2.4.1", - "qs": "^6.14.0", + "qs": "^6.14.1", "raw-body": "^3.0.1", "type-is": "^2.0.1" }, @@ -4825,8 +4788,8 @@ "version": "2.0.2", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", - "devOptional": true, "license": "MIT", + "optional": true, "dependencies": { "balanced-match": "^1.0.0" } @@ -4947,71 +4910,36 @@ } }, "node_modules/cacache": { - "version": "19.0.1", - "resolved": "https://registry.npmjs.org/cacache/-/cacache-19.0.1.tgz", - "integrity": "sha512-hdsUxulXCi5STId78vRVYEtDAjq99ICAUktLTeTYsLoTE6Z8dS0c8pWNCxwdrk9YfJeobDZc2Y186hD/5ZQgFQ==", + "version": "20.0.3", + "resolved": "https://registry.npmjs.org/cacache/-/cacache-20.0.3.tgz", + "integrity": "sha512-3pUp4e8hv07k1QlijZu6Kn7c9+ZpWWk4j3F8N3xPuCExULobqJydKYOTj1FTq58srkJsXvO7LbGAH4C0ZU3WGw==", "devOptional": true, "license": "ISC", "dependencies": { - "@npmcli/fs": "^4.0.0", + "@npmcli/fs": "^5.0.0", "fs-minipass": "^3.0.0", - "glob": "^10.2.2", - "lru-cache": "^10.0.1", + "glob": "^13.0.0", + "lru-cache": "^11.1.0", "minipass": "^7.0.3", "minipass-collect": "^2.0.1", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "p-map": "^7.0.2", - "ssri": "^12.0.0", - "tar": "^7.4.3", - "unique-filename": "^4.0.0" + "ssri": "^13.0.0", + "unique-filename": "^5.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/cacache/node_modules/chownr": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", - "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", - "devOptional": true, - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=18" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/cacache/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "devOptional": true, - "license": "ISC" - }, - "node_modules/cacache/node_modules/tar": { - "version": "7.5.2", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.2.tgz", - "integrity": "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg==", - "devOptional": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "@isaacs/fs-minipass": "^4.0.0", - "chownr": "^3.0.0", - "minipass": "^7.1.2", - "minizlib": "^3.1.0", - "yallist": "^5.0.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/cacache/node_modules/yallist": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", - "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", + "version": "11.2.6", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.6.tgz", + "integrity": "sha512-ESL2CrkS/2wTPfuend7Zhkzo2u0daGJ/A2VucJOgQ/C48S/zB8MMeMHSGKYpXhIjbPxfuezITkaBH1wqv00DDQ==", "devOptional": true, "license": "BlueOak-1.0.0", "engines": { - "node": ">=18" + "node": "20 || >=22" } }, "node_modules/call-bind": { @@ -5081,9 +5009,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001761", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001761.tgz", - "integrity": "sha512-JF9ptu1vP2coz98+5051jZ4PwQgd2ni8A+gYSN7EA7dPKIMf0pDlSUxhdmVOaV3/fYK5uWBkgSXJaRLr4+3A6g==", + "version": "1.0.30001769", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001769.tgz", + "integrity": "sha512-BCfFL1sHijQlBGWBMuJyhZUhzo7wer5sVj9hqekB/7xn0Ypy+pER/edCYQm4exbXj4WiySGp40P8UuTh6w1srg==", "dev": true, "funding": [ { @@ -5154,13 +5082,13 @@ } }, "node_modules/chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", + "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", "devOptional": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "engines": { - "node": ">=10" + "node": ">=18" } }, "node_modules/ci-info": { @@ -5419,9 +5347,9 @@ } }, "node_modules/core-js": { - "version": "3.47.0", - "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.47.0.tgz", - "integrity": "sha512-c3Q2VVkGAUyupsjRnaNX6u8Dq2vAdzm9iuPj5FW0fRxzlxgq9Q39MDq10IvmQSpLgHQNyQzQmOo6bgGHmH3NNg==", + "version": "3.48.0", + "resolved": "https://registry.npmjs.org/core-js/-/core-js-3.48.0.tgz", + "integrity": "sha512-zpEHTy1fjTMZCKLHUZoVeylt9XrzaIN2rbPXEt0k+q7JE5CkCZdo6bNq55bn24a69CH7ErAVLKijxJja4fw+UQ==", "hasInstallScript": true, "license": "MIT", "funding": { @@ -5436,9 +5364,9 @@ "license": "MIT" }, "node_modules/cors": { - "version": "2.8.5", - "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.5.tgz", - "integrity": "sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g==", + "version": "2.8.6", + "resolved": "https://registry.npmjs.org/cors/-/cors-2.8.6.tgz", + "integrity": "sha512-tJtZBBHA6vjIAaF6EnIaq6laBBP9aq/Y3ouVJjEfoHbRBcHBAHYcMh/w8LDrk2PvIMMq8gmopa5D4V8RmbrxGw==", "devOptional": true, "license": "MIT", "dependencies": { @@ -5447,6 +5375,10 @@ }, "engines": { "node": ">= 0.10" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" } }, "node_modules/corser": { @@ -5621,9 +5553,9 @@ } }, "node_modules/default-browser": { - "version": "5.4.0", - "resolved": "https://registry.npmjs.org/default-browser/-/default-browser-5.4.0.tgz", - "integrity": "sha512-XDuvSq38Hr1MdN47EDvYtx3U0MTqpCEn+F6ft8z2vYDzMrvQhVp0ui9oQdqW3MvK3vqUETglt1tVGgjLuJ5izg==", + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/default-browser/-/default-browser-5.5.0.tgz", + "integrity": "sha512-H9LMLr5zwIbSxrmvikGuI/5KGhZ8E2zH3stkMgM5LpOWDutGM2JZaj460Udnf1a+946zc7YBgrqEWwbk7zHvGw==", "dev": true, "license": "MIT", "dependencies": { @@ -5712,9 +5644,9 @@ } }, "node_modules/diff": { - "version": "4.0.2", - "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz", - "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.4.tgz", + "integrity": "sha512-X07nttJQkwkfKfvTPG/KSnE2OMdcUCao6+eXF3wmnIQRn2aPAHH3VxDbDOdegkd6JbPsXqShpvEOHfAT+nCNwQ==", "dev": true, "license": "BSD-3-Clause", "engines": { @@ -5824,7 +5756,7 @@ "version": "0.2.0", "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", - "devOptional": true, + "dev": true, "license": "MIT" }, "node_modules/ee-first": { @@ -5835,9 +5767,9 @@ "license": "MIT" }, "node_modules/electron-to-chromium": { - "version": "1.5.267", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.267.tgz", - "integrity": "sha512-0Drusm6MVRXSOJpGbaSVgcQsuB4hEkMpHXaVstcPmhu5LIedxs1xNK/nIxmQIU/RPC0+1/o0AVZfBTkTNJOdUw==", + "version": "1.5.286", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.286.tgz", + "integrity": "sha512-9tfDXhJ4RKFNerfjdCcZfufu49vg620741MNs26a9+bhLThdB+plgMeou98CAaHu/WATj2iHOOHTp1hWtABj2A==", "dev": true, "license": "ISC" }, @@ -6195,11 +6127,14 @@ } }, "node_modules/express-rate-limit": { - "version": "7.5.1", - "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-7.5.1.tgz", - "integrity": "sha512-7iN8iPMDzOMHPUYllBEsQdWVB6fPDMPqwjBaFrgr4Jgr/+okjvzAy+UHlYYL/Vs0OsOrMkwS6PJDkFlJwoxUnw==", + "version": "8.2.1", + "resolved": "https://registry.npmjs.org/express-rate-limit/-/express-rate-limit-8.2.1.tgz", + "integrity": "sha512-PCZEIEIxqwhzw4KF0n7QF4QqruVTcF73O5kFKUnGOyjbCCgizBBiFaYpd/fnBLUMPw/BWw9OsiN7GgrNYr7j6g==", "devOptional": true, "license": "MIT", + "dependencies": { + "ip-address": "10.0.1" + }, "engines": { "node": ">= 16" }, @@ -6460,23 +6395,6 @@ "url": "https://github.com/sponsors/ljharb" } }, - "node_modules/foreground-child": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz", - "integrity": "sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==", - "devOptional": true, - "license": "ISC", - "dependencies": { - "cross-spawn": "^7.0.6", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/forwarded": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", @@ -6632,21 +6550,18 @@ } }, "node_modules/glob": { - "version": "10.5.0", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.5.0.tgz", - "integrity": "sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==", + "version": "13.0.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-13.0.3.tgz", + "integrity": "sha512-/g3B0mC+4x724v1TgtBlBtt2hPi/EWptsIAmXUx9Z2rvBYleQcsrmaOzd5LyL50jf/Soi83ZDJmw2+XqvH/EeA==", "devOptional": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", + "minimatch": "^10.2.0", "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" + "path-scurry": "^2.0.0" }, - "bin": { - "glob": "dist/esm/bin.mjs" + "engines": { + "node": "20 || >=22" }, "funding": { "url": "https://github.com/sponsors/isaacs" @@ -6672,17 +6587,43 @@ "dev": true, "license": "BSD-2-Clause" }, - "node_modules/glob/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "node_modules/glob/node_modules/balanced-match": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.2.tgz", + "integrity": "sha512-x0K50QvKQ97fdEz2kPehIerj+YTeptKF9hyYkKf6egnwmMWAkADiO0QCzSp0R5xN8FTZgYaBfSaue46Ej62nMg==", "devOptional": true, - "license": "ISC", + "license": "MIT", "dependencies": { - "brace-expansion": "^2.0.1" + "jackspeak": "^4.2.3" }, "engines": { - "node": ">=16 || 14 >=14.17" + "node": "20 || >=22" + } + }, + "node_modules/glob/node_modules/brace-expansion": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.2.tgz", + "integrity": "sha512-Pdk8c9poy+YhOgVWw1JNN22/HcivgKWwpxKq04M/jTmHyCZn12WPJebZxdjSa5TmBqISrUSgNYU3eRORljfCCw==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^4.0.2" + }, + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/glob/node_modules/minimatch": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.0.tgz", + "integrity": "sha512-ugkC31VaVg9cF0DFVoADH12k6061zNZkZON+aX8AWsR9GhPcErkcMBceb6znR8wLERM2AkkOxy2nWRLpT9Jq5w==", + "devOptional": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "brace-expansion": "^5.0.2" + }, + "engines": { + "node": "20 || >=22" }, "funding": { "url": "https://github.com/sponsors/isaacs" @@ -6813,6 +6754,17 @@ "he": "bin/he" } }, + "node_modules/hono": { + "version": "4.11.9", + "resolved": "https://registry.npmjs.org/hono/-/hono-4.11.9.tgz", + "integrity": "sha512-Eaw2YTGM6WOxA6CXbckaEvslr2Ne4NFsKrvc0v97JD5awbmeBLO5w9Ho9L9kmKonrwF9RJlW6BxT1PVv/agBHQ==", + "devOptional": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=16.9.0" + } + }, "node_modules/hosted-git-info": { "version": "9.0.2", "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-9.0.2.tgz", @@ -6827,9 +6779,9 @@ } }, "node_modules/hosted-git-info/node_modules/lru-cache": { - "version": "11.2.4", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.4.tgz", - "integrity": "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg==", + "version": "11.2.6", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.6.tgz", + "integrity": "sha512-ESL2CrkS/2wTPfuend7Zhkzo2u0daGJ/A2VucJOgQ/C48S/zB8MMeMHSGKYpXhIjbPxfuezITkaBH1wqv00DDQ==", "devOptional": true, "license": "BlueOak-1.0.0", "engines": { @@ -6850,9 +6802,9 @@ } }, "node_modules/htmlparser2": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-10.0.0.tgz", - "integrity": "sha512-TwAZM+zE5Tq3lrEHvOlvwgj1XLWQCtaaibSN11Q+gGBAS7Y1uZSWwXXRe4iF6OXnaq1riyQAPFOBtYc77Mxq0g==", + "version": "10.1.0", + "resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-10.1.0.tgz", + "integrity": "sha512-VTZkM9GWRAtEpveh7MSF6SjjrpNVNNVJfFup7xTY3UpFtm67foy9HDVXneLtFVt4pMz5kZtgNcvCniNFb1hlEQ==", "dev": true, "funding": [ "https://github.com/fb55/htmlparser2?sponsor=1", @@ -6865,14 +6817,14 @@ "dependencies": { "domelementtype": "^2.3.0", "domhandler": "^5.0.3", - "domutils": "^3.2.1", - "entities": "^6.0.0" + "domutils": "^3.2.2", + "entities": "^7.0.1" } }, "node_modules/htmlparser2/node_modules/entities": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz", - "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==", + "version": "7.0.1", + "resolved": "https://registry.npmjs.org/entities/-/entities-7.0.1.tgz", + "integrity": "sha512-TWrgLOFUQTH994YUyl1yT4uyavY5nNB5muff+RtWaqNVCAK408b5ZnnbNAUEWLTCpum9w6arT70i1XdQ4UeOPA==", "dev": true, "license": "BSD-2-Clause", "engines": { @@ -7102,9 +7054,9 @@ } }, "node_modules/iconv-lite": { - "version": "0.7.1", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.1.tgz", - "integrity": "sha512-2Tth85cXwGFHfvRgZWszZSvdo+0Xsqmw8k8ZwxScfcBneNUraK+dxRxRm24nszx80Y0TVio8kKLt5sLE7ZCLlw==", + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.7.2.tgz", + "integrity": "sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==", "devOptional": true, "license": "MIT", "dependencies": { @@ -7151,14 +7103,40 @@ "node": "^20.17.0 || >=22.9.0" } }, + "node_modules/ignore-walk/node_modules/balanced-match": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.2.tgz", + "integrity": "sha512-x0K50QvKQ97fdEz2kPehIerj+YTeptKF9hyYkKf6egnwmMWAkADiO0QCzSp0R5xN8FTZgYaBfSaue46Ej62nMg==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "jackspeak": "^4.2.3" + }, + "engines": { + "node": "20 || >=22" + } + }, + "node_modules/ignore-walk/node_modules/brace-expansion": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.2.tgz", + "integrity": "sha512-Pdk8c9poy+YhOgVWw1JNN22/HcivgKWwpxKq04M/jTmHyCZn12WPJebZxdjSa5TmBqISrUSgNYU3eRORljfCCw==", + "devOptional": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^4.0.2" + }, + "engines": { + "node": "20 || >=22" + } + }, "node_modules/ignore-walk/node_modules/minimatch": { - "version": "10.1.1", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.1.1.tgz", - "integrity": "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==", + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.0.tgz", + "integrity": "sha512-ugkC31VaVg9cF0DFVoADH12k6061zNZkZON+aX8AWsR9GhPcErkcMBceb6znR8wLERM2AkkOxy2nWRLpT9Jq5w==", "devOptional": true, "license": "BlueOak-1.0.0", "dependencies": { - "@isaacs/brace-expansion": "^5.0.0" + "brace-expansion": "^5.0.2" }, "engines": { "node": "20 || >=22" @@ -7333,9 +7311,9 @@ } }, "node_modules/ip-address": { - "version": "10.1.0", - "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.1.0.tgz", - "integrity": "sha512-XXADHxXmvT9+CRxhXg56LJovE+bmWnEWB78LB83VZTprKTmaC5QfruXocxzTZ2Kl0DNwKuBdlIhjL8LeY8Sf8Q==", + "version": "10.0.1", + "resolved": "https://registry.npmjs.org/ip-address/-/ip-address-10.0.1.tgz", + "integrity": "sha512-NWv9YLW4PoW2B7xtzaS3NCot75m6nK7Icdv0o3lfMceJVRfSoQwqD4wEH5rLwoKJwUiZ/rfpiVBhnaF0FK4HoA==", "devOptional": true, "license": "MIT", "engines": { @@ -7591,19 +7569,19 @@ } }, "node_modules/jackspeak": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz", - "integrity": "sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-4.2.3.tgz", + "integrity": "sha512-ykkVRwrYvFm1nb2AJfKKYPr0emF6IiXDYUaFx4Zn9ZuIH7MrzEZ3sD5RlqGXNRpHtvUHJyOnCEFxOlNDtGo7wg==", "devOptional": true, "license": "BlueOak-1.0.0", "dependencies": { - "@isaacs/cliui": "^8.0.2" + "@isaacs/cliui": "^9.0.0" + }, + "engines": { + "node": "20 || >=22" }, "funding": { "url": "https://github.com/sponsors/isaacs" - }, - "optionalDependencies": { - "@pkgjs/parseargs": "^0.11.0" } }, "node_modules/jose": { @@ -7648,13 +7626,13 @@ } }, "node_modules/json-parse-even-better-errors": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-4.0.0.tgz", - "integrity": "sha512-lR4MXjGNgkJc7tkQ97kb2nuEMnNCyU//XYVH0MKTGcXEiSudQ5MKGKen3C5QubYy0vmq+JGitUg92uuywGEwIA==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-5.0.0.tgz", + "integrity": "sha512-ZF1nxZ28VhQouRWhUcVlUIN3qwSgPuswK05s/HIaoetAoE/9tngVmCHjSxmSQPav1nd+lPtTL0YZ/2AFdR/iYQ==", "devOptional": true, "license": "MIT", "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/json-schema-traverse": { @@ -7663,6 +7641,13 @@ "integrity": "sha512-NM8/P9n3XjXhIZn1lLhkFaACTOURQXjWhV4BA/RnOv8xvgqtqpAX9IO4mRQxSx1Rlo4tqzeqb0sOlruaOy3dug==", "license": "MIT" }, + "node_modules/json-schema-typed": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/json-schema-typed/-/json-schema-typed-8.0.2.tgz", + "integrity": "sha512-fQhoXdcvc3V28x7C7BMs4P5+kNlgUURe2jmUT1T//oBRMDrqy1QPelJimwZGo7Hg9VPV3EQV5Bnq4hbFy2vetA==", + "devOptional": true, + "license": "BSD-2-Clause" + }, "node_modules/json5": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", @@ -7747,9 +7732,9 @@ } }, "node_modules/libphonenumber-js": { - "version": "1.12.33", - "resolved": "https://registry.npmjs.org/libphonenumber-js/-/libphonenumber-js-1.12.33.tgz", - "integrity": "sha512-r9kw4OA6oDO4dPXkOrXTkArQAafIKAU71hChInV4FxZ69dxCfbwQGDPzqR5/vea94wU705/3AZroEbSoeVWrQw==", + "version": "1.12.36", + "resolved": "https://registry.npmjs.org/libphonenumber-js/-/libphonenumber-js-1.12.36.tgz", + "integrity": "sha512-woWhKMAVx1fzzUnMCyOzglgSgf6/AFHLASdOBcchYCyvWSGWt12imw3iu2hdI5d4dGZRsNWAmWiz37sDKUPaRQ==", "license": "MIT", "peer": true }, @@ -7896,9 +7881,9 @@ "license": "MIT" }, "node_modules/lint-staged/node_modules/eventemitter3": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", - "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.4.tgz", + "integrity": "sha512-mlsTRyGaPBjPedk6Bvw+aqbsXDtoAyAzm5MO7JgU+yVRyMQ5O8bD4Kcci7BS85f93veegeCPkL8R4GLClnjLFw==", "dev": true, "license": "MIT" }, @@ -8071,9 +8056,9 @@ } }, "node_modules/listr2/node_modules/eventemitter3": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.1.tgz", - "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-5.0.4.tgz", + "integrity": "sha512-mlsTRyGaPBjPedk6Bvw+aqbsXDtoAyAzm5MO7JgU+yVRyMQ5O8bD4Kcci7BS85f93veegeCPkL8R4GLClnjLFw==", "devOptional": true, "license": "MIT" }, @@ -8140,9 +8125,9 @@ } }, "node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "version": "4.17.23", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz", + "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", "license": "MIT" }, "node_modules/log-symbols": { @@ -8312,26 +8297,36 @@ "license": "ISC" }, "node_modules/make-fetch-happen": { - "version": "14.0.3", - "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-14.0.3.tgz", - "integrity": "sha512-QMjGbFTP0blj97EeidG5hk/QhKQ3T4ICckQGLgz38QF7Vgbk6e6FTARN8KhKxyBbWn8R0HU+bnw8aSoFPD4qtQ==", + "version": "15.0.3", + "resolved": "https://registry.npmjs.org/make-fetch-happen/-/make-fetch-happen-15.0.3.tgz", + "integrity": "sha512-iyyEpDty1mwW3dGlYXAJqC/azFn5PPvgKVwXayOGBSmKLxhKZ9fg4qIan2ePpp1vJIwfFiO34LAPZgq9SZW9Aw==", "devOptional": true, "license": "ISC", "dependencies": { - "@npmcli/agent": "^3.0.0", - "cacache": "^19.0.1", + "@npmcli/agent": "^4.0.0", + "cacache": "^20.0.1", "http-cache-semantics": "^4.1.1", "minipass": "^7.0.2", - "minipass-fetch": "^4.0.0", + "minipass-fetch": "^5.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^1.0.0", - "proc-log": "^5.0.0", + "proc-log": "^6.0.0", "promise-retry": "^2.0.1", - "ssri": "^12.0.0" + "ssri": "^13.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" + } + }, + "node_modules/make-fetch-happen/node_modules/proc-log": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-6.1.0.tgz", + "integrity": "sha512-iG+GYldRf2BQ0UDUAd6JQ/RwzaQy6mXmsk/IzlYyal4A4SNFw54MeH4/tLkF4I5WoWG9SQwuqWzS99jaFQHBuQ==", + "devOptional": true, + "license": "ISC", + "engines": { + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/marked": { @@ -8550,18 +8545,18 @@ } }, "node_modules/minipass-fetch": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-4.0.1.tgz", - "integrity": "sha512-j7U11C5HXigVuutxebFadoYBbd7VSdZWggSe64NVdvWNBqGAiXPL2QVCehjmw7lY1oF9gOllYbORh+hiNgfPgQ==", + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/minipass-fetch/-/minipass-fetch-5.0.1.tgz", + "integrity": "sha512-yHK8pb0iCGat0lDrs/D6RZmCdaBT64tULXjdxjSMAqoDi18Q3qKEUTHypHQZQd9+FYpIS+lkvpq6C/R6SbUeRw==", "devOptional": true, "license": "MIT", "dependencies": { "minipass": "^7.0.3", - "minipass-sized": "^1.0.3", + "minipass-sized": "^2.0.0", "minizlib": "^3.0.1" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" }, "optionalDependencies": { "encoding": "^0.1.13" @@ -8634,38 +8629,18 @@ "license": "ISC" }, "node_modules/minipass-sized": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-1.0.3.tgz", - "integrity": "sha512-MbkQQ2CTiBMlA2Dm/5cY+9SWFEN8pzzOXi6rlM5Xxq0Yqbda5ZQy9sU75a673FE9ZK0Zsbr6Y5iP6u9nktfg2g==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/minipass-sized/-/minipass-sized-2.0.0.tgz", + "integrity": "sha512-zSsHhto5BcUVM2m1LurnXY6M//cGhVaegT71OfOXoprxT6o780GZd792ea6FfrQkuU4usHZIUczAQMRUE2plzA==", "devOptional": true, "license": "ISC", "dependencies": { - "minipass": "^3.0.0" + "minipass": "^7.1.2" }, "engines": { "node": ">=8" } }, - "node_modules/minipass-sized/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "devOptional": true, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minipass-sized/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "devOptional": true, - "license": "ISC" - }, "node_modules/minizlib": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-3.1.0.tgz", @@ -8680,16 +8655,19 @@ } }, "node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "devOptional": true, + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-3.0.1.tgz", + "integrity": "sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==", "license": "MIT", + "optional": true, "bin": { - "mkdirp": "bin/cmd.js" + "mkdirp": "dist/cjs/src/bin.js" }, "engines": { "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, "node_modules/monaco-editor": { @@ -8943,9 +8921,9 @@ } }, "node_modules/ng-packagr/node_modules/commander": { - "version": "14.0.2", - "resolved": "https://registry.npmjs.org/commander/-/commander-14.0.2.tgz", - "integrity": "sha512-TywoWNNRbhoD0BXs1P3ZEScW8W5iKrnbithIl0YH+uCmBd0QpPOA8yc82DS3BIE5Ma6FnBVUsJ7wVUDz4dvOWQ==", + "version": "14.0.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-14.0.3.tgz", + "integrity": "sha512-H+y0Jo/T1RZ9qPP4Eh1pkcQcLRglraJaSLoyOtHxu6AapkjWVCy2Sit1QQ4x3Dng8qDlSsZEet7g5Pq06MvTgw==", "dev": true, "license": "MIT", "engines": { @@ -8984,28 +8962,28 @@ } }, "node_modules/node-gyp": { - "version": "11.5.0", - "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-11.5.0.tgz", - "integrity": "sha512-ra7Kvlhxn5V9Slyus0ygMa2h+UqExPqUIkfk7Pc8QTLT956JLSy51uWFwHtIYy0vI8cB4BDhc/S03+880My/LQ==", + "version": "12.2.0", + "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-12.2.0.tgz", + "integrity": "sha512-q23WdzrQv48KozXlr0U1v9dwO/k59NHeSzn6loGcasyf0UnSrtzs8kRxM+mfwJSf0DkX0s43hcqgnSO4/VNthQ==", "devOptional": true, "license": "MIT", "dependencies": { "env-paths": "^2.2.0", "exponential-backoff": "^3.1.1", "graceful-fs": "^4.2.6", - "make-fetch-happen": "^14.0.3", - "nopt": "^8.0.0", - "proc-log": "^5.0.0", + "make-fetch-happen": "^15.0.0", + "nopt": "^9.0.0", + "proc-log": "^6.0.0", "semver": "^7.3.5", - "tar": "^7.4.3", + "tar": "^7.5.4", "tinyglobby": "^0.2.12", - "which": "^5.0.0" + "which": "^6.0.0" }, "bin": { "node-gyp": "bin/node-gyp.js" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/node-gyp-build-optional-packages": { @@ -9024,67 +9002,40 @@ "node-gyp-build-optional-packages-test": "build-test.js" } }, - "node_modules/node-gyp/node_modules/chownr": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-3.0.0.tgz", - "integrity": "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==", + "node_modules/node-gyp/node_modules/isexe": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-4.0.0.tgz", + "integrity": "sha512-FFUtZMpoZ8RqHS3XeXEmHWLA4thH+ZxCv2lOiPIn1Xc7CxrqhWzNSDzD+/chS/zbYezmiwWLdQC09JdQKmthOw==", "devOptional": true, "license": "BlueOak-1.0.0", "engines": { - "node": ">=18" + "node": ">=20" } }, - "node_modules/node-gyp/node_modules/isexe": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/isexe/-/isexe-3.1.1.tgz", - "integrity": "sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==", + "node_modules/node-gyp/node_modules/proc-log": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-6.1.0.tgz", + "integrity": "sha512-iG+GYldRf2BQ0UDUAd6JQ/RwzaQy6mXmsk/IzlYyal4A4SNFw54MeH4/tLkF4I5WoWG9SQwuqWzS99jaFQHBuQ==", "devOptional": true, "license": "ISC", "engines": { - "node": ">=16" - } - }, - "node_modules/node-gyp/node_modules/tar": { - "version": "7.5.2", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.2.tgz", - "integrity": "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg==", - "devOptional": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "@isaacs/fs-minipass": "^4.0.0", - "chownr": "^3.0.0", - "minipass": "^7.1.2", - "minizlib": "^3.1.0", - "yallist": "^5.0.0" - }, - "engines": { - "node": ">=18" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/node-gyp/node_modules/which": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/which/-/which-5.0.0.tgz", - "integrity": "sha512-JEdGzHwwkrbWoGOlIHqQ5gtprKGOenpDHpxE9zVR1bWbOtYRyPPHMe9FaP6x61CmNaTThSkb0DAJte5jD+DmzQ==", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/which/-/which-6.0.1.tgz", + "integrity": "sha512-oGLe46MIrCRqX7ytPUf66EAYvdeMIZYn3WaocqqKZAxrBpkqHfL/qvTyJ/bTk5+AqHCjXmrv3CEWgy368zhRUg==", "devOptional": true, "license": "ISC", "dependencies": { - "isexe": "^3.1.1" + "isexe": "^4.0.0" }, "bin": { "node-which": "bin/which.js" }, "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/node-gyp/node_modules/yallist": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", - "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", - "devOptional": true, - "license": "BlueOak-1.0.0", - "engines": { - "node": ">=18" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/node-html-parser": { @@ -9212,55 +9163,55 @@ "license": "MIT" }, "node_modules/nopt": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-8.1.0.tgz", - "integrity": "sha512-ieGu42u/Qsa4TFktmaKEwM6MQH0pOWnaB3htzh0JRtx84+Mebc0cbZYN5bC+6WTZ4+77xrL9Pn5m7CV6VIkV7A==", + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-9.0.0.tgz", + "integrity": "sha512-Zhq3a+yFKrYwSBluL4H9XP3m3y5uvQkB/09CwDruCiRmR/UJYnn9W4R48ry0uGC70aeTPKLynBtscP9efFFcPw==", "devOptional": true, "license": "ISC", "dependencies": { - "abbrev": "^3.0.0" + "abbrev": "^4.0.0" }, "bin": { "nopt": "bin/nopt.js" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm-bundled": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-4.0.0.tgz", - "integrity": "sha512-IxaQZDMsqfQ2Lz37VvyyEtKLe8FsRZuysmedy/N06TU1RyVppYKXrO4xIhR0F+7ubIBox6Q7nir6fQI3ej39iA==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/npm-bundled/-/npm-bundled-5.0.0.tgz", + "integrity": "sha512-JLSpbzh6UUXIEoqPsYBvVNVmyrjVZ1fzEFbqxKkTJQkWBO3xFzFT+KDnSKQWwOQNbuWRwt5LSD6HOTLGIWzfrw==", "devOptional": true, "license": "ISC", "dependencies": { - "npm-normalize-package-bin": "^4.0.0" + "npm-normalize-package-bin": "^5.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm-install-checks": { - "version": "7.1.2", - "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-7.1.2.tgz", - "integrity": "sha512-z9HJBCYw9Zr8BqXcllKIs5nI+QggAImbBdHphOzVYrz2CB4iQ6FzWyKmlqDZua+51nAu7FcemlbTc9VgQN5XDQ==", + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/npm-install-checks/-/npm-install-checks-8.0.0.tgz", + "integrity": "sha512-ScAUdMpyzkbpxoNekQ3tNRdFI8SJ86wgKZSQZdUxT+bj0wVFpsEMWnkXP0twVe1gJyNF5apBWDJhhIbgrIViRA==", "devOptional": true, "license": "BSD-2-Clause", "dependencies": { "semver": "^7.1.1" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm-normalize-package-bin": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-4.0.0.tgz", - "integrity": "sha512-TZKxPvItzai9kN9H/TkmCtx/ZN/hvr3vUycjlfmH0ootY9yFBzNOpiXAdIn1Iteqsvk4lQn6B5PTrt+n6h8k/w==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/npm-normalize-package-bin/-/npm-normalize-package-bin-5.0.0.tgz", + "integrity": "sha512-CJi3OS4JLsNMmr2u07OJlhcrPxCeOeP/4xq67aWNai6TNWWbTrlNDgl8NcFKVlcBKp18GPj+EzbNIgrBfZhsag==", "devOptional": true, "license": "ISC", "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm-package-arg": { @@ -9304,111 +9255,49 @@ } }, "node_modules/npm-pick-manifest": { - "version": "10.0.0", - "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-10.0.0.tgz", - "integrity": "sha512-r4fFa4FqYY8xaM7fHecQ9Z2nE9hgNfJR+EmoKv0+chvzWkBcORX3r0FpTByP+CbOVJDladMXnPQGVN8PBLGuTQ==", + "version": "11.0.3", + "resolved": "https://registry.npmjs.org/npm-pick-manifest/-/npm-pick-manifest-11.0.3.tgz", + "integrity": "sha512-buzyCfeoGY/PxKqmBqn1IUJrZnUi1VVJTdSSRPGI60tJdUhUoSQFhs0zycJokDdOznQentgrpf8LayEHyyYlqQ==", "devOptional": true, "license": "ISC", "dependencies": { - "npm-install-checks": "^7.1.0", - "npm-normalize-package-bin": "^4.0.0", - "npm-package-arg": "^12.0.0", + "npm-install-checks": "^8.0.0", + "npm-normalize-package-bin": "^5.0.0", + "npm-package-arg": "^13.0.0", "semver": "^7.3.5" }, "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm-pick-manifest/node_modules/hosted-git-info": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz", - "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==", - "devOptional": true, - "license": "ISC", - "dependencies": { - "lru-cache": "^10.0.1" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm-pick-manifest/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "devOptional": true, - "license": "ISC" - }, - "node_modules/npm-pick-manifest/node_modules/npm-package-arg": { - "version": "12.0.2", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-12.0.2.tgz", - "integrity": "sha512-f1NpFjNI9O4VbKMOlA5QoBq/vSQPORHcTZ2feJpFkTHJ9eQkdlmZEKSjcAhxTGInC7RlEyScT9ui67NaOsjFWA==", - "devOptional": true, - "license": "ISC", - "dependencies": { - "hosted-git-info": "^8.0.0", - "proc-log": "^5.0.0", - "semver": "^7.3.5", - "validate-npm-package-name": "^6.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm-registry-fetch": { - "version": "18.0.2", - "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-18.0.2.tgz", - "integrity": "sha512-LeVMZBBVy+oQb5R6FDV9OlJCcWDU+al10oKpe+nsvcHnG24Z3uM3SvJYKfGJlfGjVU8v9liejCrUR/M5HO5NEQ==", + "version": "19.1.1", + "resolved": "https://registry.npmjs.org/npm-registry-fetch/-/npm-registry-fetch-19.1.1.tgz", + "integrity": "sha512-TakBap6OM1w0H73VZVDf44iFXsOS3h+L4wVMXmbWOQroZgFhMch0juN6XSzBNlD965yIKvWg2dfu7NSiaYLxtw==", "devOptional": true, "license": "ISC", "dependencies": { - "@npmcli/redact": "^3.0.0", + "@npmcli/redact": "^4.0.0", "jsonparse": "^1.3.1", - "make-fetch-happen": "^14.0.0", + "make-fetch-happen": "^15.0.0", "minipass": "^7.0.2", - "minipass-fetch": "^4.0.0", + "minipass-fetch": "^5.0.0", "minizlib": "^3.0.1", - "npm-package-arg": "^12.0.0", - "proc-log": "^5.0.0" + "npm-package-arg": "^13.0.0", + "proc-log": "^6.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, - "node_modules/npm-registry-fetch/node_modules/hosted-git-info": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz", - "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==", + "node_modules/npm-registry-fetch/node_modules/proc-log": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-6.1.0.tgz", + "integrity": "sha512-iG+GYldRf2BQ0UDUAd6JQ/RwzaQy6mXmsk/IzlYyal4A4SNFw54MeH4/tLkF4I5WoWG9SQwuqWzS99jaFQHBuQ==", "devOptional": true, "license": "ISC", - "dependencies": { - "lru-cache": "^10.0.1" - }, "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/npm-registry-fetch/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "devOptional": true, - "license": "ISC" - }, - "node_modules/npm-registry-fetch/node_modules/npm-package-arg": { - "version": "12.0.2", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-12.0.2.tgz", - "integrity": "sha512-f1NpFjNI9O4VbKMOlA5QoBq/vSQPORHcTZ2feJpFkTHJ9eQkdlmZEKSjcAhxTGInC7RlEyScT9ui67NaOsjFWA==", - "devOptional": true, - "license": "ISC", - "dependencies": { - "hosted-git-info": "^8.0.0", - "proc-log": "^5.0.0", - "semver": "^7.3.5", - "validate-npm-package-name": "^6.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/npm-run-path": { @@ -9587,9 +9476,9 @@ } }, "node_modules/ordered-binary": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/ordered-binary/-/ordered-binary-1.6.0.tgz", - "integrity": "sha512-IQh2aMfMIDbPjI/8a3Edr+PiOpcsB7yo8NdW7aHWVaoR/pcDldunMvnnwbk/auPGqmKeAdxtZl7MHX/QmPwhvQ==", + "version": "1.6.1", + "resolved": "https://registry.npmjs.org/ordered-binary/-/ordered-binary-1.6.1.tgz", + "integrity": "sha512-QkCdPooczexPLiXIrbVOPYkR3VO3T6v2OyKRkR1Xbhpy7/LAVXwahnRCgRp78Oe/Ehf0C/HATAxfSr6eA1oX+w==", "dev": true, "license": "MIT", "optional": true @@ -9652,33 +9541,33 @@ "version": "1.0.1", "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", - "devOptional": true, + "dev": true, "license": "BlueOak-1.0.0" }, "node_modules/pacote": { - "version": "21.0.0", - "resolved": "https://registry.npmjs.org/pacote/-/pacote-21.0.0.tgz", - "integrity": "sha512-lcqexq73AMv6QNLo7SOpz0JJoaGdS3rBFgF122NZVl1bApo2mfu+XzUBU/X/XsiJu+iUmKpekRayqQYAs+PhkA==", + "version": "21.0.4", + "resolved": "https://registry.npmjs.org/pacote/-/pacote-21.0.4.tgz", + "integrity": "sha512-RplP/pDW0NNNDh3pnaoIWYPvNenS7UqMbXyvMqJczosiFWTeGGwJC2NQBLqKf4rGLFfwCOnntw1aEp9Jiqm1MA==", "devOptional": true, "license": "ISC", "dependencies": { - "@npmcli/git": "^6.0.0", - "@npmcli/installed-package-contents": "^3.0.0", - "@npmcli/package-json": "^6.0.0", - "@npmcli/promise-spawn": "^8.0.0", - "@npmcli/run-script": "^9.0.0", - "cacache": "^19.0.0", + "@npmcli/git": "^7.0.0", + "@npmcli/installed-package-contents": "^4.0.0", + "@npmcli/package-json": "^7.0.0", + "@npmcli/promise-spawn": "^9.0.0", + "@npmcli/run-script": "^10.0.0", + "cacache": "^20.0.0", "fs-minipass": "^3.0.0", "minipass": "^7.0.2", - "npm-package-arg": "^12.0.0", - "npm-packlist": "^10.0.0", - "npm-pick-manifest": "^10.0.0", - "npm-registry-fetch": "^18.0.0", - "proc-log": "^5.0.0", + "npm-package-arg": "^13.0.0", + "npm-packlist": "^10.0.1", + "npm-pick-manifest": "^11.0.1", + "npm-registry-fetch": "^19.0.0", + "proc-log": "^6.0.0", "promise-retry": "^2.0.1", - "sigstore": "^3.0.0", - "ssri": "^12.0.0", - "tar": "^6.1.11" + "sigstore": "^4.0.0", + "ssri": "^13.0.0", + "tar": "^7.4.3" }, "bin": { "pacote": "bin/index.js" @@ -9687,40 +9576,14 @@ "node": "^20.17.0 || >=22.9.0" } }, - "node_modules/pacote/node_modules/hosted-git-info": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-8.1.0.tgz", - "integrity": "sha512-Rw/B2DNQaPBICNXEm8balFz9a6WpZrkCGpcWFpy7nCj+NyhSdqXipmfvtmWt9xGfp0wZnBxB+iVpLmQMYt47Tw==", + "node_modules/pacote/node_modules/proc-log": { + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-6.1.0.tgz", + "integrity": "sha512-iG+GYldRf2BQ0UDUAd6JQ/RwzaQy6mXmsk/IzlYyal4A4SNFw54MeH4/tLkF4I5WoWG9SQwuqWzS99jaFQHBuQ==", "devOptional": true, "license": "ISC", - "dependencies": { - "lru-cache": "^10.0.1" - }, "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/pacote/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "devOptional": true, - "license": "ISC" - }, - "node_modules/pacote/node_modules/npm-package-arg": { - "version": "12.0.2", - "resolved": "https://registry.npmjs.org/npm-package-arg/-/npm-package-arg-12.0.2.tgz", - "integrity": "sha512-f1NpFjNI9O4VbKMOlA5QoBq/vSQPORHcTZ2feJpFkTHJ9eQkdlmZEKSjcAhxTGInC7RlEyScT9ui67NaOsjFWA==", - "devOptional": true, - "license": "ISC", - "dependencies": { - "hosted-git-info": "^8.0.0", - "proc-log": "^5.0.0", - "semver": "^7.3.5", - "validate-npm-package-name": "^6.0.0" - }, - "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/pako": { @@ -9898,28 +9761,31 @@ "license": "MIT" }, "node_modules/path-scurry": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", - "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.1.tgz", + "integrity": "sha512-oWyT4gICAu+kaA7QWk/jvCHWarMKNs6pXOGWKDTr7cw4IGcUbW+PeTfbaQiLGheFRpjo6O9J0PmyMfQPjH71oA==", "devOptional": true, "license": "BlueOak-1.0.0", "dependencies": { - "lru-cache": "^10.2.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + "lru-cache": "^11.0.0", + "minipass": "^7.1.2" }, "engines": { - "node": ">=16 || 14 >=14.18" + "node": "20 || >=22" }, "funding": { "url": "https://github.com/sponsors/isaacs" } }, "node_modules/path-scurry/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "version": "11.2.6", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.6.tgz", + "integrity": "sha512-ESL2CrkS/2wTPfuend7Zhkzo2u0daGJ/A2VucJOgQ/C48S/zB8MMeMHSGKYpXhIjbPxfuezITkaBH1wqv00DDQ==", "devOptional": true, - "license": "ISC" + "license": "BlueOak-1.0.0", + "engines": { + "node": "20 || >=22" + } }, "node_modules/path-to-regexp": { "version": "8.3.0", @@ -10137,9 +10003,9 @@ "license": "MIT" }, "node_modules/prettier": { - "version": "3.7.4", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.7.4.tgz", - "integrity": "sha512-v6UNi1+3hSlVvv8fSaoUbggEM5VErKmmpGA7Pl3HF8V6uKY7rvClBOJlH6yNwQtfTueNkGVpOv/mtWL9L4bgRA==", + "version": "3.8.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.8.1.tgz", + "integrity": "sha512-UOnG6LftzbdaHZcKoPFtOcCKztrQ57WkHDeRD9t/PTQtmT0NHSeWWepj6pS0z/N7+08BHFDQVUrfmfMRcZwbMg==", "dev": true, "license": "MIT", "bin": { @@ -10385,9 +10251,9 @@ } }, "node_modules/qs": { - "version": "6.14.0", - "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.0.tgz", - "integrity": "sha512-YWWTjgABSKcvs/nWBi9PycY/JiPJqOD4JA6o9Sej2AtvSGarXxKC3OQSk4pAarbdQlKAh5D4FCQkJNkW+GAn3w==", + "version": "6.14.2", + "resolved": "https://registry.npmjs.org/qs/-/qs-6.14.2.tgz", + "integrity": "sha512-V/yCWTTF7VJ9hIh18Ugr2zhJMP01MY7c5kh4J870L7imm6/DIzBsNLTXzMwUA3yZ5b/KBqLx8Kp3uRvd7xSe3Q==", "devOptional": true, "license": "BSD-3-Clause", "dependencies": { @@ -10624,67 +10490,6 @@ "url": "https://github.com/sponsors/isaacs" } }, - "node_modules/rimraf/node_modules/glob": { - "version": "13.0.0", - "resolved": "https://registry.npmjs.org/glob/-/glob-13.0.0.tgz", - "integrity": "sha512-tvZgpqk6fz4BaNZ66ZsRaZnbHvP/jG3uKJvAZOwEVUL4RTA5nJeeLYfyN9/VA8NX/V3IBG+hkeuGpKjvELkVhA==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "minimatch": "^10.1.1", - "minipass": "^7.1.2", - "path-scurry": "^2.0.0" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/rimraf/node_modules/lru-cache": { - "version": "11.2.4", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.4.tgz", - "integrity": "sha512-B5Y16Jr9LB9dHVkh6ZevG+vAbOsNOYCX+sXvFWFu7B3Iz5mijW3zdbMyhsh8ANd2mSWBYdJgnqi+mL7/LrOPYg==", - "dev": true, - "license": "BlueOak-1.0.0", - "engines": { - "node": "20 || >=22" - } - }, - "node_modules/rimraf/node_modules/minimatch": { - "version": "10.1.1", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.1.1.tgz", - "integrity": "sha512-enIvLvRAFZYXJzkCYG5RKmPfrFArdLv+R+lbQ53BmIMLIry74bjKzX6iHAm8WYamJkhSSEabrWN5D97XnKObjQ==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "@isaacs/brace-expansion": "^5.0.0" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/rimraf/node_modules/path-scurry": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-2.0.1.tgz", - "integrity": "sha512-oWyT4gICAu+kaA7QWk/jvCHWarMKNs6pXOGWKDTr7cw4IGcUbW+PeTfbaQiLGheFRpjo6O9J0PmyMfQPjH71oA==", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "lru-cache": "^11.0.0", - "minipass": "^7.1.2" - }, - "engines": { - "node": "20 || >=22" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, "node_modules/ripemd160": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/ripemd160/-/ripemd160-2.0.3.tgz", @@ -10891,12 +10696,15 @@ } }, "node_modules/sax": { - "version": "1.4.3", - "resolved": "https://registry.npmjs.org/sax/-/sax-1.4.3.tgz", - "integrity": "sha512-yqYn1JhPczigF94DMS+shiDMjDowYO6y9+wB/4WgO0Y19jWYk0lQ4tuG5KI7kj4FTp1wxPj5IFfcrz/s1c3jjQ==", + "version": "1.4.4", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.4.4.tgz", + "integrity": "sha512-1n3r/tGXO6b6VXMdFT54SHzT9ytu9yr7TaELowdYpMqY/Ao7EnlQGmAQ1+RatX7Tkkdm6hONI2owqNx2aZj5Sw==", "dev": true, "license": "BlueOak-1.0.0", - "optional": true + "optional": true, + "engines": { + "node": ">=11.0.0" + } }, "node_modules/secure-compare": { "version": "3.0.1", @@ -11167,21 +10975,21 @@ } }, "node_modules/sigstore": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-3.1.0.tgz", - "integrity": "sha512-ZpzWAFHIFqyFE56dXqgX/DkDRZdz+rRcjoIk/RQU4IX0wiCv1l8S7ZrXDHcCc+uaf+6o7w3h2l3g6GYG5TKN9Q==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/sigstore/-/sigstore-4.1.0.tgz", + "integrity": "sha512-/fUgUhYghuLzVT/gaJoeVehLCgZiUxPCPMcyVNY0lIf/cTCz58K/WTI7PefDarXxp9nUKpEwg1yyz3eSBMTtgA==", "devOptional": true, "license": "Apache-2.0", "dependencies": { - "@sigstore/bundle": "^3.1.0", - "@sigstore/core": "^2.0.0", - "@sigstore/protobuf-specs": "^0.4.0", - "@sigstore/sign": "^3.1.0", - "@sigstore/tuf": "^3.1.0", - "@sigstore/verify": "^2.1.0" + "@sigstore/bundle": "^4.0.0", + "@sigstore/core": "^3.1.0", + "@sigstore/protobuf-specs": "^0.5.0", + "@sigstore/sign": "^4.1.0", + "@sigstore/tuf": "^4.0.1", + "@sigstore/verify": "^3.1.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/slash": { @@ -11336,16 +11144,16 @@ "license": "BSD-3-Clause" }, "node_modules/ssri": { - "version": "12.0.0", - "resolved": "https://registry.npmjs.org/ssri/-/ssri-12.0.0.tgz", - "integrity": "sha512-S7iGNosepx9RadX82oimUkvr0Ct7IjJbEbs4mJcTxst8um95J3sDYU1RBEOvdu6oL1Wek2ODI5i4MAw+dZ6cAQ==", + "version": "13.0.1", + "resolved": "https://registry.npmjs.org/ssri/-/ssri-13.0.1.tgz", + "integrity": "sha512-QUiRf1+u9wPTL/76GTYlKttDEBWV1ga9ZXW8BG6kfdeyyM8LGPix9gROyg9V2+P0xNyF3X2Go526xKFdMZrHSQ==", "devOptional": true, "license": "ISC", "dependencies": { "minipass": "^7.0.3" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/statuses": { @@ -11406,62 +11214,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/string-width-cjs": { - "name": "string-width", - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "devOptional": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "devOptional": true, - "license": "MIT" - }, - "node_modules/string-width-cjs/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "devOptional": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/strip-ansi": { "version": "7.1.2", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz", @@ -11477,30 +11229,6 @@ "url": "https://github.com/chalk/strip-ansi?sponsor=1" } }, - "node_modules/strip-ansi-cjs": { - "name": "strip-ansi", - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "devOptional": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, "node_modules/strip-final-newline": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", @@ -11541,92 +11269,31 @@ } }, "node_modules/tar": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", - "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", + "version": "7.5.7", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.7.tgz", + "integrity": "sha512-fov56fJiRuThVFXD6o6/Q354S7pnWMJIVlDBYijsTNx6jKSE4pvrDTs6lUnmGvNyfJwFQQwWy3owKz1ucIhveQ==", "devOptional": true, - "license": "ISC", + "license": "BlueOak-1.0.0", "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" + "@isaacs/fs-minipass": "^4.0.0", + "chownr": "^3.0.0", + "minipass": "^7.1.2", + "minizlib": "^3.1.0", + "yallist": "^5.0.0" }, "engines": { - "node": ">=10" - } - }, - "node_modules/tar/node_modules/fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "devOptional": true, - "license": "ISC", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/tar/node_modules/fs-minipass/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "devOptional": true, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/tar/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "devOptional": true, - "license": "ISC", - "engines": { - "node": ">=8" - } - }, - "node_modules/tar/node_modules/minizlib": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", - "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/tar/node_modules/minizlib/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "devOptional": true, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" + "node": ">=18" } }, "node_modules/tar/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz", + "integrity": "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw==", "devOptional": true, - "license": "ISC" + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=18" + } }, "node_modules/tinyglobby": { "version": "0.2.14", @@ -11646,21 +11313,21 @@ } }, "node_modules/tldts": { - "version": "7.0.19", - "resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.19.tgz", - "integrity": "sha512-8PWx8tvC4jDB39BQw1m4x8y5MH1BcQ5xHeL2n7UVFulMPH/3Q0uiamahFJ3lXA0zO2SUyRXuVVbWSDmstlt9YA==", + "version": "7.0.23", + "resolved": "https://registry.npmjs.org/tldts/-/tldts-7.0.23.tgz", + "integrity": "sha512-ASdhgQIBSay0R/eXggAkQ53G4nTJqTXqC2kbaBbdDwM7SkjyZyO0OaaN1/FH7U/yCeqOHDwFO5j8+Os/IS1dXw==", "license": "MIT", "dependencies": { - "tldts-core": "^7.0.19" + "tldts-core": "^7.0.23" }, "bin": { "tldts": "bin/cli.js" } }, "node_modules/tldts-core": { - "version": "7.0.19", - "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.19.tgz", - "integrity": "sha512-lJX2dEWx0SGH4O6p+7FPwYmJ/bu1JbcGJ8RLaG9b7liIgZ85itUVEPbMtWRVrde/0fnDPEPHW10ZsKW3kVsE9A==", + "version": "7.0.23", + "resolved": "https://registry.npmjs.org/tldts-core/-/tldts-core-7.0.23.tgz", + "integrity": "sha512-0g9vrtDQLrNIiCj22HSe9d4mLVG3g5ph5DZ8zCKBr4OtrspmNB6ss7hVyzArAeE88ceZocIEGkyW1Ime7fxPtQ==", "license": "MIT" }, "node_modules/to-buffer": { @@ -11726,12 +11393,6 @@ "node": ">=0.6" } }, - "node_modules/ts-matches": { - "version": "6.5.0", - "resolved": "https://registry.npmjs.org/ts-matches/-/ts-matches-6.5.0.tgz", - "integrity": "sha512-MhuobYhHYn6MlOTPAF/qk3tsRRioPac5ofYn68tc3rAJaGjsw1MsX1MOSep52DkvNJPgNV0F73zfgcQfYTVeyQ==", - "license": "MIT" - }, "node_modules/ts-morph": { "version": "23.0.0", "resolved": "https://registry.npmjs.org/ts-morph/-/ts-morph-23.0.0.tgz", @@ -11903,7 +11564,7 @@ "version": "7.2.3", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "deprecated": "Glob versions prior to v9 are no longer supported", + "deprecated": "Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me", "dev": true, "license": "ISC", "dependencies": { @@ -12022,18 +11683,18 @@ "license": "0BSD" }, "node_modules/tuf-js": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-3.1.0.tgz", - "integrity": "sha512-3T3T04WzowbwV2FDiGXBbr81t64g1MUGGJRgT4x5o97N+8ArdhVCAF9IxFrxuSJmM3E5Asn7nKHkao0ibcZXAg==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/tuf-js/-/tuf-js-4.1.0.tgz", + "integrity": "sha512-50QV99kCKH5P/Vs4E2Gzp7BopNV+KzTXqWeaxrfu5IQJBOULRsTIS9seSsOVT8ZnGXzCyx55nYWAi4qJzpZKEQ==", "devOptional": true, "license": "MIT", "dependencies": { - "@tufjs/models": "3.0.1", - "debug": "^4.4.1", - "make-fetch-happen": "^14.0.3" + "@tufjs/models": "4.1.0", + "debug": "^4.4.3", + "make-fetch-happen": "^15.0.1" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/type-fest": { @@ -12113,29 +11774,29 @@ } }, "node_modules/unique-filename": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-4.0.0.tgz", - "integrity": "sha512-XSnEewXmQ+veP7xX2dS5Q4yZAvO40cBN2MWkJ7D/6sW4Dg6wYBNwM1Vrnz1FhH5AdeLIlUXRI9e28z1YZi71NQ==", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/unique-filename/-/unique-filename-5.0.0.tgz", + "integrity": "sha512-2RaJTAvAb4owyjllTfXzFClJ7WsGxlykkPvCr9pA//LD9goVq+m4PPAeBgNodGZ7nSrntT/auWpJ6Y5IFXcfjg==", "devOptional": true, "license": "ISC", "dependencies": { - "unique-slug": "^5.0.0" + "unique-slug": "^6.0.0" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/unique-slug": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-5.0.0.tgz", - "integrity": "sha512-9OdaqO5kwqR+1kVgHAhsp5vPNU0hnxRa26rBFNfNgM7M6pNtgzeBn3s/xbyCQL3dcjzOatcef6UUHpB/6MaETg==", + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/unique-slug/-/unique-slug-6.0.0.tgz", + "integrity": "sha512-4Lup7Ezn8W3d52/xBhZBVdx323ckxa7DEvd9kPQHppTkLoJXw6ltrBCyj5pnrxj0qKDxYMJ56CoxNuFCscdTiw==", "devOptional": true, "license": "ISC", "dependencies": { "imurmurhash": "^0.1.4" }, "engines": { - "node": "^18.17.0 || >=20.5.0" + "node": "^20.17.0 || >=22.9.0" } }, "node_modules/unpipe": { @@ -12414,9 +12075,9 @@ } }, "node_modules/which-typed-array": { - "version": "1.1.19", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.19.tgz", - "integrity": "sha512-rEvr90Bck4WZt9HHFC4DJMsjvu7x+r6bImz0/BrbWb7A2djJ8hnZMrWnHo9F8ssv0OMErasDhftrfROTyqSDrw==", + "version": "1.1.20", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.20.tgz", + "integrity": "sha512-LYfpUkmqwl0h9A2HL09Mms427Q1RZWuOHsukfVcKRq9q95iQxdw0ix1JQrqbcDR9PH1QDwf5Qo8OZb5lksZ8Xg==", "license": "MIT", "dependencies": { "available-typed-arrays": "^1.0.7", @@ -12448,96 +12109,6 @@ "node": ">=8" } }, - "node_modules/wrap-ansi-cjs": { - "name": "wrap-ansi", - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", - "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", - "devOptional": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", - "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { - "version": "8.0.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", - "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", - "devOptional": true, - "license": "MIT" - }, - "node_modules/wrap-ansi-cjs/node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", - "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", - "devOptional": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/string-width": { - "version": "4.2.3", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", - "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", - "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", - "devOptional": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, "node_modules/wrap-ansi/node_modules/ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", diff --git a/web/package.json b/web/package.json index 49fc3a76d..9d5818382 100644 --- a/web/package.json +++ b/web/package.json @@ -1,6 +1,6 @@ { "name": "startos-ui", - "version": "0.4.0-alpha.19", + "version": "0.4.0-alpha.20", "author": "Start9 Labs, Inc", "homepage": "https://start9.com/", "license": "MIT", @@ -86,7 +86,6 @@ "pbkdf2": "^3.1.2", "rxjs": "^7.8.2", "tldts": "^7.0.11", - "ts-matches": "^6.3.2", "tslib": "^2.8.1", "uuid": "^8.3.2", "zone.js": "^0.15.0" diff --git a/web/projects/marketplace/src/components/menu/menu.component.html b/web/projects/marketplace/src/components/menu/menu.component.html index f934c55c6..b1e2174e1 100644 --- a/web/projects/marketplace/src/components/menu/menu.component.html +++ b/web/projects/marketplace/src/components/menu/menu.component.html @@ -62,7 +62,7 @@